Skip to content

Commit

Permalink
Add dim_order compat support (#7420)
Browse files Browse the repository at this point in the history
Summary: Pull Request resolved: #7420

Differential Revision: D67542995
  • Loading branch information
digantdesai authored and facebook-github-bot committed Dec 21, 2024
1 parent 82763a9 commit fc50a13
Show file tree
Hide file tree
Showing 5 changed files with 49 additions and 1 deletion.
6 changes: 6 additions & 0 deletions backends/apple/mps/mps_preprocess.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,9 @@
CompileSpec,
PreprocessResult,
)

from executorch.exir.passes.memory_format_ops_pass import DimOrderOpsRevertPass
from executorch.exir.program._program import _transform
from torch.export.exported_program import ExportedProgram

FORMAT = "[%(levelname)s %(asctime)s %(filename)s:%(lineno)s] %(message)s"
Expand Down Expand Up @@ -83,6 +86,9 @@ def preprocess(
# FlatBuffer graph, process the `output` nodes and add their id to
# the `output_ids` array in the schema.

# TODO: Remove this once we have a better support for the dim-order ops.
edge_program = _transform(edge_program, DimOrderOpsRevertPass())

mps_graph = MPSGraph(
version="0",
mps_nodes=[],
Expand Down
16 changes: 16 additions & 0 deletions backends/apple/mps/operators/constant_ops.py
Original file line number Diff line number Diff line change
Expand Up @@ -78,6 +78,22 @@ def define_node(
)
)

@register_node_visitor
class ToDimOrderEmptyVisitor(NodeVisitor):
target = ["dim_order_ops._empty_dim_order.default"]

def __init__(self, *args) -> None:
super().__init__(*args)

def define_node(
self,
node: torch.fx.Node,
mps_graph: MPSGraph,
) -> None:
# We should never get here, because DimOrderOpsRevertPass replaces this with an aten.empty.memory_format op
# But if we do, we can't handle it ATM, so raise an exception
raise NotImplementedError("dim_order_ops._empty_dim_order.default is not supported yet")


@register_node_visitor
class FullLikeVisitor(NodeVisitor):
Expand Down
16 changes: 16 additions & 0 deletions backends/apple/mps/operators/op_clone.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,3 +33,19 @@ def define_node(
)
input_id = self.define_tensor(get_input_node(node, 0), mps_graph)
self.tensor_to_id[node] = input_id

@register_node_visitor
class ToDimOrderCopyVisitor(NodeVisitor):
target = ["dim_order_ops._to_dim_order_copy.default"]

def __init__(self, *args) -> None:
super().__init__(*args)

def define_node(
self,
node: torch.fx.Node,
mps_graph: MPSGraph,
) -> None:
# We should never get here, because DimOrderOpsRevertPass replaces this with an aten._to_copy op
# But if we do, we can't handle it ATM, so raise an exception
raise NotImplementedError("dim_order_ops._to_dim_order_copy.default is not supported yet")
10 changes: 10 additions & 0 deletions backends/apple/mps/test/test_mps.py
Original file line number Diff line number Diff line change
Expand Up @@ -1829,6 +1829,16 @@ def forward(self, x):
Clone(), model_inputs, func_name=inspect.stack()[0].function[5:]
)

def test_mps_backend_to_copy(self):
class Copy(torch.nn.Module):
def forward(self, x):
return torch.ops.aten._to_copy.default(x + 2, memory_format=torch.contiguous_format) + x

model_inputs = (torch.randn(1, 3, 3),)
self.lower_and_test_with_partitioner(
Copy(), model_inputs, func_name=inspect.stack()[0].function[5:]
)

def test_mps_backend_floor(self):
class Floor(torch.nn.Module):
def forward(self, x):
Expand Down
2 changes: 1 addition & 1 deletion backends/apple/mps/test/test_mps_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -219,7 +219,7 @@ def lower_module_and_test_output(
dynamic_shapes=dynamic_shapes,
edge_compile_config=EdgeCompileConfig(
_check_ir_validity=False,
_skip_dim_order=True, # TODO(T182928844): Delegate dim order op to backend.
_skip_dim_order=False, # TODO(T182928844): Delegate dim order op to backend.
),
)

Expand Down

0 comments on commit fc50a13

Please sign in to comment.