@@ -21,58 +21,93 @@ def enable(lib=aten_lib, unused=None, registrar=registrar):
21
21
current_work_registrar = registrar (
22
22
(
23
23
("abs" , abs , Autograd .disable ),
24
+ ("abs_" , abs_ , Autograd .disable ),
24
25
("add.Tensor" , add , Autograd .disable ),
26
+ ("add_.Tensor" , add_ , Autograd .disable ),
25
27
("addmm" , addmm , Autograd .disable ),
26
28
("arange.start_step" , arange_start , Autograd .disable ),
27
29
("arange.start" , arange_start , Autograd .disable ),
28
30
("arange" , arange , Autograd .disable ),
29
31
("batch_norm" , batch_norm , Autograd .enable ),
30
32
("bitwise_and.Tensor" , bitwise_and_tensor , Autograd .disable ),
33
+ ("bitwise_and_.Tensor_" , bitwise_and_tensor_ , Autograd .disable ),
31
34
("bitwise_and.Scalar" , bitwise_and_scalar , Autograd .disable ),
35
+ ("bitwise_and_.Scalar" , bitwise_and_scalar_ , Autograd .disable ),
32
36
("bitwise_and.Scalar_Tensor" , bitwise_and_scalar_tensor , Autograd .disable ),
33
37
("bitwise_not" , bitwise_not , Autograd .disable ),
38
+ ("bitwise_not_" , bitwise_not_ , Autograd .disable ),
34
39
("bitwise_or.Tensor" , bitwise_or_tensor , Autograd .disable ),
40
+ ("bitwise_or_.Tensor" , bitwise_or_tensor_ , Autograd .disable ),
35
41
("bitwise_or.Scalar" , bitwise_or_scalar , Autograd .disable ),
42
+ ("bitwise_or_.Scalar" , bitwise_or_scalar_ , Autograd .disable ),
36
43
("bitwise_or.Scalar_Tensor" , bitwise_or_scalar_tensor , Autograd .disable ),
37
44
("bmm" , bmm , Autograd .disable ),
38
45
("clamp" , clamp , Autograd .disable ),
46
+ ("clamp_" , clamp_ , Autograd .disable ),
39
47
("clamp.Tensor" , clamp_tensor , Autograd .disable ),
48
+ ("clamp_.Tensor" , clamp_tensor_ , Autograd .disable ),
40
49
("cos" , cos , Autograd .disable ),
50
+ ("cos_" , cos_ , Autograd .disable ),
41
51
("pad" , pad , Autograd .disable ),
42
52
("constant_pad_nd" , constant_pad_nd , Autograd .disable ),
43
53
("cumsum" , cumsum , Autograd .disable ),
44
54
("cummin" , cummin , Autograd .disable ),
45
55
("div.Tensor" , true_divide , Autograd .disable ),
56
+ ("div_.Tensor" , true_divide_ , Autograd .disable ),
46
57
("div.Scalar" , true_divide , Autograd .disable ),
58
+ ("div_.Scalar" , true_divide_ , Autograd .disable ),
47
59
("div.Tensor_mode" , div_mode , Autograd .disable ),
60
+ ("div_.Tensor_mode" , div_mode_ , Autograd .disable ),
48
61
("div.Scalar_mode" , div_mode , Autograd .disable ),
62
+ ("div_.Scalar_mode" , div_mode_ , Autograd .disable ),
49
63
(
50
64
"divide.Tensor" ,
51
65
true_divide ,
52
66
Autograd .disable ,
53
67
), # divide, an alias for div
68
+ (
69
+ "divide_.Tensor" ,
70
+ true_divide_ ,
71
+ Autograd .disable ,
72
+ ), # divide, an alias for div
54
73
("divide.Scalar" , true_divide , Autograd .disable ),
74
+ ("divide_.Scalar" , true_divide_ , Autograd .disable ),
55
75
("divide.Tensor_mode" , div_mode , Autograd .disable ),
76
+ ("divide_.Tensor_mode" , div_mode_ , Autograd .disable ),
56
77
("divide.Scalar_mode" , div_mode , Autograd .disable ),
78
+ ("divide_.Scalar_mode" , div_mode_ , Autograd .disable ),
57
79
(
58
80
"true_divide.Tensor" ,
59
81
true_divide ,
60
82
Autograd .disable ,
61
83
), # true_divide, an alias for div
84
+ (
85
+ "true_divide_.Tensor" ,
86
+ true_divide_ ,
87
+ Autograd .disable ,
88
+ ), # true_divide, an alias for div
62
89
("true_divide.Scalar" , true_divide , Autograd .disable ),
90
+ ("true_divide_.Scalar" , true_divide_ , Autograd .disable ),
63
91
("floor_divide" , floor_divide , Autograd .disable ),
64
92
("floor_divide.Scalar" , floor_divide , Autograd .disable ),
65
93
("remainder.Tensor" , remainder , Autograd .disable ),
94
+ ("remainder_.Tensor" , remainder_ , Autograd .disable ),
95
+ ("remainder.Scalar" , remainder , Autograd .disable ),
96
+ ("remainder_.Scalar" , remainder_ , Autograd .disable ),
97
+ ("remainder.Scalar_Tensor" , remainder , Autograd .disable ),
66
98
("native_dropout" , native_dropout , Autograd .enable ),
67
99
("erf" , erf , Autograd .disable ),
100
+ ("erf_" , erf_ , Autograd .disable ),
68
101
("embedding" , embedding , Autograd .enable ),
69
102
("eq.Tensor" , eq , Autograd .disable ),
70
103
("eq.Scalar" , eq_scalar , Autograd .disable ),
71
104
("exp" , exp , Autograd .disable ),
105
+ ("exp_" , exp_ , Autograd .disable ),
72
106
("exponential_" , exponential_ , Autograd .disable ),
73
107
("ge.Tensor" , ge , Autograd .disable ),
74
108
("ge.Scalar" , ge_scalar , Autograd .disable ),
75
109
("gelu" , gelu , Autograd .enable ),
110
+ ("gelu_" , gelu_ , Autograd .enable ),
76
111
("native_group_norm" , group_norm , Autograd .enable ),
77
112
("_weight_norm_interface" , weight_norm_interface , Autograd .enable ),
78
113
("_weight_norm" , weight_norm , Autograd .enable ),
@@ -118,19 +153,30 @@ def enable(lib=aten_lib, unused=None, registrar=registrar):
118
153
("ne.Tensor" , ne , Autograd .disable ),
119
154
("ne.Scalar" , ne_scalar , Autograd .disable ),
120
155
("neg" , neg , Autograd .disable ),
156
+ ("neg_" , neg_ , Autograd .disable ),
121
157
("pow.Scalar" , pow_scalar , Autograd .disable ),
122
158
("pow.Tensor_Scalar" , pow_tensor_scalar , Autograd .disable ),
159
+ ("pow_.Scalar" , pow_tensor_scalar_ , Autograd .disable ),
123
160
("pow.Tensor_Tensor" , pow_tensor_tensor , Autograd .disable ),
161
+ ("pow_.Tensor" , pow_tensor_tensor_ , Autograd .disable ),
124
162
("reciprocal" , reciprocal , Autograd .disable ),
163
+ ("reciprocal_" , reciprocal_ , Autograd .disable ),
125
164
("relu" , relu , Autograd .enable ),
165
+ ("relu_" , relu_ , Autograd .enable ),
126
166
("rsqrt" , rsqrt , Autograd .disable ),
167
+ ("rsqrt_" , rsqrt_ , Autograd .disable ),
127
168
("sigmoid" , sigmoid , Autograd .enable ),
169
+ ("sigmoid_" , sigmoid_ , Autograd .enable ),
128
170
("silu" , silu , Autograd .enable ),
171
+ ("silu_" , silu_ , Autograd .enable ),
129
172
("sin" , sin , Autograd .disable ),
173
+ ("sin_" , sin_ , Autograd .disable ),
130
174
("softmax.int" , softmax , Autograd .enable ),
131
175
("sort" , sort , Autograd .disable ),
132
176
("sub.Tensor" , sub , Autograd .disable ),
177
+ ("sub_.Tensor" , sub_ , Autograd .disable ),
133
178
("tanh" , tanh , Autograd .enable ),
179
+ ("tanh_" , tanh_ , Autograd .enable ),
134
180
("triu" , triu , Autograd .disable ),
135
181
# ("topk", topk, Autograd.disable),
136
182
("var_mean.correction" , var_mean , Autograd .disable ),
0 commit comments