Skip to content

Commit d5e02ac

Browse files
Fix bug add ignore alpha (fixes: #11684)
1 parent 7565342 commit d5e02ac

File tree

2 files changed

+31
-0
lines changed

2 files changed

+31
-0
lines changed

backends/xnnpack/partition/config/generic_node_configs.py

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -516,6 +516,13 @@ class SubConfig(GenericNodePartitionerConfig):
516516
def supported_precision_types(self) -> List[ConfigPrecisionType]:
517517
return [ConfigPrecisionType.FP32, ConfigPrecisionType.STATIC_QUANT]
518518

519+
def check_constraints(self, node: torch.fx.Node, ep: ExportedProgram) -> bool:
520+
# No support for sub nodes with alpha != 1
521+
if "alpha" in node.kwargs and node.kwargs["alpha"] != 1:
522+
why(node, reason="Sub node doesn't support alpha != 1")
523+
return False
524+
return True
525+
519526

520527
class BMMConfig(GenericNodePartitionerConfig):
521528
"""

backends/xnnpack/test/ops/test_sub.py

Lines changed: 24 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -152,3 +152,27 @@ def forward(self, x, y):
152152
.serialize()
153153
.run_method_and_compare_outputs()
154154
)
155+
156+
class SubWithAlpha(torch.nn.Module):
157+
def forward(self, x, y):
158+
# node with alpha = 1.0 will be partitioned
159+
out1 = torch.sub(x, y, alpha=1)
160+
# node with alpha != 1.0 will not be partitioned
161+
out2 = torch.sub(x, y, alpha=2)
162+
return out1, out2
163+
164+
def test_add_with_alpha(self):
165+
inputs = (torch.randn(1, 1, 4, 4), torch.randn(1, 1, 4, 4))
166+
(
167+
Tester(self.SubWithAlpha(), inputs)
168+
.export()
169+
.check_count({"torch.ops.aten.sub.Tensor": 2})
170+
.to_edge_transform_and_lower()
171+
# unpartitioned node
172+
.check_count({"executorch_exir_dialects_edge__ops_aten_sub_Tensor": 1})
173+
# partitioned node
174+
.check_count({"torch.ops.higher_order.executorch_call_delegate": 1})
175+
.to_executorch()
176+
.serialize()
177+
.run_method_and_compare_outputs()
178+
)

0 commit comments

Comments
 (0)