File tree 1 file changed +3
-10
lines changed
py/torch_tensorrt/dynamo/conversion
1 file changed +3
-10
lines changed Original file line number Diff line number Diff line change 33
33
from torch_tensorrt .dynamo .conversion ._ConverterRegistry import (
34
34
DYNAMO_CONVERTERS as CONVERTERS ,
35
35
)
36
- from torch_tensorrt .dynamo .conversion ._ConverterRegistry import CallingConvention
36
+ from torch_tensorrt .dynamo .conversion ._ConverterRegistry import (
37
+ CallingConvention ,
38
+ )
37
39
from torch_tensorrt .dynamo .conversion ._TRTBuilderMonitor import TRTBulderMonitor
38
40
from torch_tensorrt .dynamo .conversion .converter_utils import (
39
41
get_node_io ,
@@ -740,10 +742,6 @@ def run(
740
742
def run_node (self , n : torch .fx .Node ) -> torch .fx .Node :
741
743
self ._cur_node_name = get_node_name (n )
742
744
self ._cur_node = n
743
- # add "_itensor_to_tensor_meta"
744
- kwargs = dict (n .kwargs )
745
- kwargs ["_itensor_to_tensor_meta" ] = self ._itensor_to_tensor_meta
746
- n .kwargs = kwargs
747
745
748
746
if _LOGGER .isEnabledFor (logging .DEBUG ):
749
747
_LOGGER .debug (
@@ -759,11 +757,6 @@ def run_node(self, n: torch.fx.Node) -> torch.fx.Node:
759
757
f"Converted node { self ._cur_node_name } [{ n .target } ] ({ get_node_io (n , self .const_mapping )} )"
760
758
)
761
759
762
- # remove "_itensor_to_tensor_meta"
763
- kwargs = dict (n .kwargs )
764
- del kwargs ["_itensor_to_tensor_meta" ]
765
- n .kwargs = kwargs
766
-
767
760
if isinstance (trt_node , trt .ITensor ):
768
761
self ._itensor_to_tensor_meta [trt_node ] = n .meta .get ("tensor_meta" )
769
762
You can’t perform that action at this time.
0 commit comments