@@ -169,55 +169,182 @@ def aten_ops_gelu(
169
169
)
170
170
171
171
172
- @dynamo_tensorrt_converter (torch .ops .aten .matmul ) # type: ignore[misc]
173
- @dynamo_tensorrt_converter (torch .ops .aten .mm .default ) # type: ignore[misc]
174
- def aten_ops_matmul (
172
+ @dynamo_tensorrt_converter (torch .ops .aten .relu .default )
173
+ def aten_ops_relu (
175
174
network : TRTNetwork ,
176
175
target : Target ,
177
176
args : Tuple [Argument , ...],
178
177
kwargs : Dict [str , Argument ],
179
178
name : str ,
180
179
) -> Union [TRTTensor , Sequence [TRTTensor ]]:
181
- return impl .matmul .matrix_multiply (
182
- network , target , SourceIR .ATEN , name , args [0 ], args [1 ]
180
+ return impl .activation .relu (
181
+ network ,
182
+ target ,
183
+ SourceIR .ATEN ,
184
+ name ,
185
+ args [0 ],
183
186
)
184
187
185
188
186
- @dynamo_tensorrt_converter (torch .ops .aten .layer_norm .default ) # type: ignore[misc]
187
- def aten_ops_layernorm (
189
+ @dynamo_tensorrt_converter (torch .ops .aten .sigmoid .default )
190
+ def aten_ops_sigmoid (
188
191
network : TRTNetwork ,
189
192
target : Target ,
190
193
args : Tuple [Argument , ...],
191
194
kwargs : Dict [str , Argument ],
192
195
name : str ,
193
196
) -> Union [TRTTensor , Sequence [TRTTensor ]]:
194
- return impl .normalization . layer_norm (
197
+ return impl .activation . sigmoid (
195
198
network ,
196
199
target ,
197
200
SourceIR .ATEN ,
198
201
name ,
199
202
args [0 ],
200
- args [1 ],
201
- args [2 ],
202
- args [3 ],
203
- args [4 ],
204
203
)
205
204
206
205
207
- @dynamo_tensorrt_converter (torch .ops .aten .relu .default ) # type: ignore[misc]
208
- def aten_ops_relu (
206
+ @dynamo_tensorrt_converter (torch .ops .aten .tanh .default )
207
+ def aten_ops_tanh (
209
208
network : TRTNetwork ,
210
209
target : Target ,
211
210
args : Tuple [Argument , ...],
212
211
kwargs : Dict [str , Argument ],
213
212
name : str ,
214
213
) -> Union [TRTTensor , Sequence [TRTTensor ]]:
215
- return impl .activation .relu (
214
+ return impl .activation .tanh (
215
+ network ,
216
+ target ,
217
+ SourceIR .ATEN ,
218
+ name ,
219
+ args [0 ],
220
+ )
221
+
222
+
223
+ @dynamo_tensorrt_converter (torch .ops .aten .leaky_relu .default )
224
+ def aten_ops_leaky_relu (
225
+ network : TRTNetwork ,
226
+ target : Target ,
227
+ args : Tuple [Argument , ...],
228
+ kwargs : Dict [str , Argument ],
229
+ name : str ,
230
+ ) -> Union [TRTTensor , Sequence [TRTTensor ]]:
231
+ return impl .activation .leaky_relu (
232
+ network ,
233
+ target ,
234
+ SourceIR .ATEN ,
235
+ name ,
236
+ args [0 ],
237
+ args_bounds_check (args , 1 , 0.01 ),
238
+ )
239
+
240
+
241
+ @dynamo_tensorrt_converter (torch .ops .aten .elu .default )
242
+ def aten_ops_elu (
243
+ network : TRTNetwork ,
244
+ target : Target ,
245
+ args : Tuple [Argument , ...],
246
+ kwargs : Dict [str , Argument ],
247
+ name : str ,
248
+ ) -> Union [TRTTensor , Sequence [TRTTensor ]]:
249
+ return impl .activation .elu (
250
+ network ,
251
+ target ,
252
+ SourceIR .ATEN ,
253
+ name ,
254
+ args [0 ],
255
+ alpha = args_bounds_check (args , 1 , 1.0 ),
256
+ beta = args_bounds_check (args , 2 , None ),
257
+ )
258
+
259
+
260
+ @dynamo_tensorrt_converter (torch .ops .aten .softplus .default )
261
+ def aten_ops_softplus (
262
+ network : TRTNetwork ,
263
+ target : Target ,
264
+ args : Tuple [Argument , ...],
265
+ kwargs : Dict [str , Argument ],
266
+ name : str ,
267
+ ) -> Union [TRTTensor , Sequence [TRTTensor ]]:
268
+ return impl .activation .softplus (
216
269
network ,
217
270
target ,
218
271
SourceIR .ATEN ,
219
272
name ,
220
273
args [0 ],
274
+ beta = args_bounds_check (args , 1 , 1 ),
275
+ )
276
+
277
+
278
+ @dynamo_tensorrt_converter (torch .ops .aten .clip .default )
279
+ def aten_ops_clip (
280
+ network : TRTNetwork ,
281
+ target : Target ,
282
+ args : Tuple [Argument , ...],
283
+ kwargs : Dict [str , Argument ],
284
+ name : str ,
285
+ ) -> Union [TRTTensor , Sequence [TRTTensor ]]:
286
+ return impl .activation .clip (
287
+ network ,
288
+ target ,
289
+ SourceIR .ATEN ,
290
+ name ,
291
+ args [0 ],
292
+ alpha = args_bounds_check (args , 1 ),
293
+ beta = args_bounds_check (args , 2 ),
294
+ )
295
+
296
+
297
+ @dynamo_tensorrt_converter (torch .ops .aten .hardsigmoid .default )
298
+ def aten_ops_hard_sigmoid (
299
+ network : TRTNetwork ,
300
+ target : Target ,
301
+ args : Tuple [Argument , ...],
302
+ kwargs : Dict [str , Argument ],
303
+ name : str ,
304
+ ) -> Union [TRTTensor , Sequence [TRTTensor ]]:
305
+ return impl .activation .hard_sigmoid (
306
+ network ,
307
+ target ,
308
+ SourceIR .ATEN ,
309
+ name ,
310
+ args [0 ],
311
+ alpha = args_bounds_check (args , 1 , 1 / 6 ),
312
+ beta = args_bounds_check (args , 2 , 1 / 2 ),
313
+ )
314
+
315
+
316
+ @dynamo_tensorrt_converter (torch .ops .aten .matmul ) # type: ignore[misc]
317
+ @dynamo_tensorrt_converter (torch .ops .aten .mm .default ) # type: ignore[misc]
318
+ def aten_ops_matmul (
319
+ network : TRTNetwork ,
320
+ target : Target ,
321
+ args : Tuple [Argument , ...],
322
+ kwargs : Dict [str , Argument ],
323
+ name : str ,
324
+ ) -> Union [TRTTensor , Sequence [TRTTensor ]]:
325
+ return impl .matmul .matrix_multiply (
326
+ network , target , SourceIR .ATEN , name , args [0 ], args [1 ]
327
+ )
328
+
329
+
330
+ @dynamo_tensorrt_converter (torch .ops .aten .layer_norm .default ) # type: ignore[misc]
331
+ def aten_ops_layernorm (
332
+ network : TRTNetwork ,
333
+ target : Target ,
334
+ args : Tuple [Argument , ...],
335
+ kwargs : Dict [str , Argument ],
336
+ name : str ,
337
+ ) -> Union [TRTTensor , Sequence [TRTTensor ]]:
338
+ return impl .normalization .layer_norm (
339
+ network ,
340
+ target ,
341
+ SourceIR .ATEN ,
342
+ name ,
343
+ args [0 ],
344
+ args [1 ],
345
+ args [2 ],
346
+ args [3 ],
347
+ args [4 ],
221
348
)
222
349
223
350
0 commit comments