-
Notifications
You must be signed in to change notification settings - Fork 26
/
Copy pathprogressive_lowering_tensor.py
48 lines (39 loc) · 1.41 KB
/
progressive_lowering_tensor.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
import torch
from utils import no_dispatch
from torch.utils._pytree import tree_map
from torch.testing._internal.common_utils import run_tests, TestCase
CALLED = []
class ProgressiveLoweringTensor(torch.Tensor):
@classmethod
def wrap(cls, t):
if isinstance(t, torch.Tensor) and not isinstance(t, cls):
return cls(t)
else:
return t
@classmethod
def __torch_function__(cls, func, types, args=(), kwargs=None):
if kwargs is None:
kwargs = {}
if func is torch.Tensor.relu:
CALLED.append(func)
with torch._C.DisableTorchFunction():
with no_dispatch():
return tree_map(cls.wrap, func(*args, **kwargs))
else:
with torch._C.DisableTorchFunction():
return func(*args, **kwargs)
@classmethod
def __torch_dispatch__(cls, func, types, args=(), kwargs=None):
CALLED.append(func)
with no_dispatch():
return tree_map(cls.wrap, func(*args, **kwargs))
class ProgressiveLoweringTensorTest(TestCase):
def test_basic(self):
CALLED.clear()
x = ProgressiveLoweringTensor(torch.randn(2))
x.add(2).relu()
# add call is low level aten op; relu call is high level torch
# op
self.assertEqual(CALLED, [torch.ops.aten.add.Tensor, torch.Tensor.relu])
if __name__ == "__main__":
run_tests()