Skip to content

Commit bede30b

Browse files
committed
[Backend Tester] Skip in-place activation tests due to lack of support in ET
ghstack-source-id: 154fa15 ghstack-comment-id: 3257141684 Pull-Request: #13989
1 parent 20f4516 commit bede30b

File tree

8 files changed

+16
-0
lines changed

8 files changed

+16
-0
lines changed

backends/test/suite/operators/test_elu.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -8,6 +8,7 @@
88

99

1010
import torch
11+
import unittest
1112
from executorch.backends.test.suite.flow import TestFlow
1213

1314
from executorch.backends.test.suite.operators import (
@@ -42,5 +43,6 @@ def test_elu_f32_multi_dim(self, flow: TestFlow) -> None:
4243
def test_elu_f32_alpha(self, flow: TestFlow) -> None:
4344
self._test_op(Model(alpha=0.5), (torch.randn(3, 4, 5),), flow)
4445

46+
@unittest.skip("In place activations aren't properly defunctionalized yet.")
4547
def test_elu_f32_inplace(self, flow: TestFlow) -> None:
4648
self._test_op(Model(inplace=True), (torch.randn(3, 4, 5),), flow)

backends/test/suite/operators/test_hardsigmoid.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -8,6 +8,7 @@
88

99

1010
import torch
11+
import unittest
1112
from executorch.backends.test.suite.flow import TestFlow
1213

1314
from executorch.backends.test.suite.operators import (
@@ -38,6 +39,7 @@ def test_hardsigmoid_f32_single_dim(self, flow: TestFlow) -> None:
3839
def test_hardsigmoid_f32_multi_dim(self, flow: TestFlow) -> None:
3940
self._test_op(Model(), (torch.randn(2, 3, 4, 5),), flow)
4041

42+
@unittest.skip("In place activations aren't properly defunctionalized yet.")
4143
def test_hardsigmoid_f32_inplace(self, flow: TestFlow) -> None:
4244
self._test_op(Model(inplace=True), (torch.randn(3, 4, 5),), flow)
4345

backends/test/suite/operators/test_hardswish.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -8,6 +8,7 @@
88

99

1010
import torch
11+
import unittest
1112
from executorch.backends.test.suite.flow import TestFlow
1213

1314
from executorch.backends.test.suite.operators import (
@@ -38,6 +39,7 @@ def test_hardswish_f32_single_dim(self, flow: TestFlow) -> None:
3839
def test_hardswish_f32_multi_dim(self, flow: TestFlow) -> None:
3940
self._test_op(Model(), (torch.randn(2, 3, 4, 5),), flow)
4041

42+
@unittest.skip("In place activations aren't properly defunctionalized yet.")
4143
def test_hardswish_f32_inplace(self, flow: TestFlow) -> None:
4244
self._test_op(Model(inplace=True), (torch.randn(3, 4, 5),), flow)
4345

backends/test/suite/operators/test_hardtanh.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -8,6 +8,7 @@
88

99

1010
import torch
11+
import unittest
1112
from executorch.backends.test.suite.flow import TestFlow
1213

1314
from executorch.backends.test.suite.operators import (
@@ -45,6 +46,7 @@ def test_hardtanh_f32_multi_dim(self, flow: TestFlow) -> None:
4546
def test_hardtanh_f32_custom_range(self, flow: TestFlow) -> None:
4647
self._test_op(Model(min_val=-2.0, max_val=2.0), (torch.randn(3, 4, 5),), flow)
4748

49+
@unittest.skip("In place activations aren't properly defunctionalized yet.")
4850
def test_hardtanh_f32_inplace(self, flow: TestFlow) -> None:
4951
self._test_op(Model(inplace=True), (torch.randn(3, 4, 5),), flow)
5052

backends/test/suite/operators/test_leaky_relu.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -8,6 +8,7 @@
88

99

1010
import torch
11+
import unittest
1112
from executorch.backends.test.suite.flow import TestFlow
1213

1314
from executorch.backends.test.suite.operators import (
@@ -44,6 +45,7 @@ def test_leaky_relu_f32_multi_dim(self, flow: TestFlow) -> None:
4445
def test_leaky_relu_f32_custom_slope(self, flow: TestFlow) -> None:
4546
self._test_op(Model(negative_slope=0.1), (torch.randn(3, 4, 5),), flow)
4647

48+
@unittest.skip("In place activations aren't properly defunctionalized yet.")
4749
def test_leaky_relu_f32_inplace(self, flow: TestFlow) -> None:
4850
self._test_op(Model(inplace=True), (torch.randn(3, 4, 5),), flow)
4951

backends/test/suite/operators/test_relu.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -8,6 +8,7 @@
88

99

1010
import torch
11+
import unittest
1112
from executorch.backends.test.suite.flow import TestFlow
1213

1314
from executorch.backends.test.suite.operators import (
@@ -38,5 +39,6 @@ def test_relu_f32_single_dim(self, flow: TestFlow) -> None:
3839
def test_relu_f32_multi_dim(self, flow: TestFlow) -> None:
3940
self._test_op(Model(), (torch.randn(2, 3, 4, 5),), flow)
4041

42+
@unittest.skip("In place activations aren't properly defunctionalized yet.")
4143
def test_relu_f32_inplace(self, flow: TestFlow) -> None:
4244
self._test_op(Model(inplace=True), (torch.randn(3, 4, 5),), flow)

backends/test/suite/operators/test_silu.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -8,6 +8,7 @@
88

99

1010
import torch
11+
import unittest
1112
from executorch.backends.test.suite.flow import TestFlow
1213

1314
from executorch.backends.test.suite.operators import (
@@ -38,6 +39,7 @@ def test_silu_f32_single_dim(self, flow: TestFlow) -> None:
3839
def test_silu_f32_multi_dim(self, flow: TestFlow) -> None:
3940
self._test_op(Model(), (torch.randn(2, 3, 4, 5),), flow)
4041

42+
@unittest.skip("In place activations aren't properly defunctionalized yet.")
4143
def test_silu_f32_inplace(self, flow: TestFlow) -> None:
4244
self._test_op(Model(inplace=True), (torch.randn(3, 4, 5),), flow)
4345

backends/test/suite/operators/test_threshold.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -8,6 +8,7 @@
88

99

1010
import torch
11+
import unittest
1112
from executorch.backends.test.suite.flow import TestFlow
1213

1314
from executorch.backends.test.suite.operators import (
@@ -51,6 +52,7 @@ def test_threshold_f32_custom_value(self, flow: TestFlow) -> None:
5152
def test_threshold_f32_custom_threshold_value(self, flow: TestFlow) -> None:
5253
self._test_op(Model(threshold=0.5, value=1.0), (torch.randn(3, 4, 5),), flow)
5354

55+
@unittest.skip("In place activations aren't properly defunctionalized yet.")
5456
def test_threshold_f32_inplace(self, flow: TestFlow) -> None:
5557
self._test_op(Model(inplace=True), (torch.randn(3, 4, 5),), flow)
5658

0 commit comments

Comments
 (0)