Skip to content

Commit

Permalink
Enable aoti for preprocess + torch pin update (pytorch#6621)
Browse files Browse the repository at this point in the history
enable aoti for preprocess ci

ghstack-source-id: bd5aaef89499f16d3fa34d2aa7a9347d1b2db3e0
Pull Request resolved: pytorch#6553

Co-authored-by: lucylq <[email protected]>
  • Loading branch information
pytorchbot and lucylq authored Nov 2, 2024
1 parent f2c7700 commit 97a4600
Show file tree
Hide file tree
Showing 4 changed files with 39 additions and 28 deletions.
2 changes: 1 addition & 1 deletion .ci/docker/ci_commit_pins/pytorch.txt
Original file line number Diff line number Diff line change
@@ -1 +1 @@
e47e8794499a4a0130ff4efb8713ff93f4b40c36
c8a648d4dffb9f0133ff4a2ea0e660b42105d3ad
25 changes: 9 additions & 16 deletions examples/models/llama3_2_vision/preprocess/export_preprocess.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,29 +24,22 @@ def main():
strict=False,
)

# Executorch
# AOTInductor. Note: export AOTI before ExecuTorch, as
# ExecuTorch will modify the ExportedProgram.
torch._inductor.aot_compile(
ep.module(),
model.get_example_inputs(),
options={"aot_inductor.output_path": "preprocess_aoti.so"},
)

# Executorch.
edge_program = to_edge(
ep, compile_config=EdgeCompileConfig(_check_ir_validity=False)
)
et_program = edge_program.to_executorch()
with open("preprocess_et.pte", "wb") as file:
et_program.write_to_file(file)

# Export.
# ep = torch.export.export(
# model.get_eager_model(),
# model.get_example_inputs(),
# dynamic_shapes=model.get_dynamic_shapes(),
# strict=False,
# )
#
# # AOTInductor
# torch._inductor.aot_compile(
# ep.module(),
# model.get_example_inputs(),
# options={"aot_inductor.output_path": "preprocess_aoti.so"},
# )


if __name__ == "__main__":
main()
38 changes: 28 additions & 10 deletions examples/models/llama3_2_vision/preprocess/test_preprocess.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@
)

from PIL import Image
from torch._inductor.package import package_aoti

from torchtune.models.clip.inference._transform import CLIPImageTransform

Expand Down Expand Up @@ -55,31 +56,46 @@ def initialize_models(resize_to_max_canvas: bool) -> Dict[str, Any]:
possible_resolutions=None,
)

# Eager model.
model = CLIPImageTransformModel(config)

# Exported model.
exported_model = torch.export.export(
model.get_eager_model(),
model.get_example_inputs(),
dynamic_shapes=model.get_dynamic_shapes(),
strict=False,
)

# aoti_path = torch._inductor.aot_compile(
# exported_model.module(),
# model.get_example_inputs(),
# )
# AOTInductor model.
so = torch._export.aot_compile(
exported_model.module(),
args=model.get_example_inputs(),
options={"aot_inductor.package": True},
dynamic_shapes=model.get_dynamic_shapes(),
)
aoti_path = "preprocess.pt2"
package_aoti(aoti_path, so)

edge_program = to_edge(
exported_model, compile_config=EdgeCompileConfig(_check_ir_validity=False)
)
executorch_model = edge_program.to_executorch()

# Re-export as ExecuTorch edits the ExportedProgram.
exported_model = torch.export.export(
model.get_eager_model(),
model.get_example_inputs(),
dynamic_shapes=model.get_dynamic_shapes(),
strict=False,
)

return {
"config": config,
"reference_model": reference_model,
"model": model,
"exported_model": exported_model,
# "aoti_path": aoti_path,
"aoti_path": aoti_path,
"executorch_model": executorch_model,
}

Expand Down Expand Up @@ -265,11 +281,13 @@ def run_preprocess(
), f"Executorch model: expected {reference_ar} but got {et_ar.tolist()}"

# Run aoti model and check it matches reference model.
# aoti_path = models["aoti_path"]
# aoti_model = torch._export.aot_load(aoti_path, "cpu")
# aoti_image, aoti_ar = aoti_model(image_tensor, inscribed_size, best_resolution)
# self.assertTrue(torch.allclose(reference_image, aoti_image))
# self.assertEqual(reference_ar, aoti_ar.tolist())
aoti_path = models["aoti_path"]
aoti_model = torch._inductor.aoti_load_package(aoti_path)
aoti_image, aoti_ar = aoti_model(image_tensor, inscribed_size, best_resolution)
assert_expected(aoti_image, reference_image, rtol=0, atol=1e-4)
assert (
reference_ar == aoti_ar.tolist()
), f"AOTI model: expected {reference_ar} but got {aoti_ar.tolist()}"

# This test setup mirrors the one in torchtune:
# https://github.com/pytorch/torchtune/blob/main/tests/torchtune/models/clip/test_clip_image_transform.py
Expand Down
2 changes: 1 addition & 1 deletion install_requirements.py
Original file line number Diff line number Diff line change
Expand Up @@ -112,7 +112,7 @@ def python_is_compatible():
# NOTE: If a newly-fetched version of the executorch repo changes the value of
# NIGHTLY_VERSION, you should re-run this script to install the necessary
# package versions.
NIGHTLY_VERSION = "dev20241030"
NIGHTLY_VERSION = "dev20241101"

# The pip repository that hosts nightly torch packages.
TORCH_NIGHTLY_URL = "https://download.pytorch.org/whl/nightly/cpu"
Expand Down

0 comments on commit 97a4600

Please sign in to comment.