Skip to content

Commit

Permalink
executorch/extension/llm/modules/test (#7393)
Browse files Browse the repository at this point in the history
Summary: Pull Request resolved: #7393

Reviewed By: avikchaudhuri, ydwu4

Differential Revision: D67383699
  • Loading branch information
gmagogsfm authored and facebook-github-bot committed Dec 19, 2024
1 parent c337bef commit 67b0ff0
Show file tree
Hide file tree
Showing 2 changed files with 7 additions and 2 deletions.
2 changes: 2 additions & 0 deletions extension/llm/modules/test/test_attention.py
Original file line number Diff line number Diff line change
Expand Up @@ -150,6 +150,7 @@ def test_attention_export(self):
(self.x, self.x),
kwargs={"input_pos": self.input_pos},
dynamic_shapes=self.dynamic_shapes,
strict=True,
)
et_res = et_mha_ep.module()(self.x, self.x, input_pos=self.input_pos)
tt_res = self.tt_mha(self.x, self.x, input_pos=self.input_pos)
Expand Down Expand Up @@ -196,6 +197,7 @@ def test_attention_executorch(self):
(self.x, self.x),
kwargs={"input_pos": self.input_pos},
dynamic_shapes=self.dynamic_shapes,
strict=True,
)
et_program = to_edge(
et_mha_ep,
Expand Down
7 changes: 5 additions & 2 deletions extension/llm/modules/test/test_position_embeddings.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,14 +49,14 @@ def test_tile_positional_embedding_smoke(self):
self.assertTrue(torch.allclose(y, ref_y))

def test_tile_positional_embedding_export(self):

tpe_ep = torch.export.export(
self.tpe,
(self.x, self.aspect_ratio),
dynamic_shapes=(
self.dynamic_shape,
None,
), # assuming aspect ratio is static
strict=True,
)

y = tpe_ep.module()(self.x, self.aspect_ratio)
Expand Down Expand Up @@ -91,6 +91,7 @@ def test_tile_positional_embedding_et(self):
self.dynamic_shape,
None,
), # assuming aspect ratio is static
strict=True,
)
et_program = to_edge(
tpe_ep,
Expand Down Expand Up @@ -148,14 +149,14 @@ def test_tiled_token_positional_embedding_smoke(self):
assert_close(y, ref_y)

def test_tiled_token_positional_embedding_export(self):

tpe_ep = torch.export.export(
self.tpe,
(self.x, self.aspect_ratio),
dynamic_shapes=(
self.dynamic_shape,
None,
), # assuming aspect ratio is static
strict=True,
)

y = tpe_ep.module()(self.x, self.aspect_ratio)
Expand All @@ -172,6 +173,7 @@ def test_tiled_token_positional_embedding_aoti(self):
self.dynamic_shape,
None,
), # assuming aspect ratio is static
strict=True,
)

with tempfile.TemporaryDirectory() as tmpdir:
Expand All @@ -195,6 +197,7 @@ def test_tiled_token_positional_embedding_et(self):
self.dynamic_shape,
None,
), # assuming aspect ratio is static
strict=True,
)
et_program = to_edge(
tpe_ep,
Expand Down

0 comments on commit 67b0ff0

Please sign in to comment.