File tree 1 file changed +12
-5
lines changed
py/torch_tensorrt/dynamo/backend
1 file changed +12
-5
lines changed Original file line number Diff line number Diff line change 10
10
from torch ._dynamo .backends .common import aot_autograd
11
11
from torch ._dynamo .utils import detect_fake_mode
12
12
from torch ._functorch .aot_autograd import aot_export_joint_simple
13
- from torch ._ops import OpOverload
14
13
from torch_tensorrt .dynamo import CompilationSettings
15
14
from torch_tensorrt .dynamo ._compiler import compile_module
16
15
from torch_tensorrt .dynamo .lowering import (
@@ -63,11 +62,19 @@ def aot_torch_tensorrt_aten_backend(
63
62
settings_aot_autograd ["decompositions" ] = get_decompositions (
64
63
settings .enable_experimental_decompositions
65
64
)
65
+ # This is added since detach lowering leads to alias nodes
66
+ # Error - View operation returned a tensor that is the same as the input base tensor
67
+ # torch nop_decompositions in torch/_decomp/decompositions.py
66
68
# transpose key deleted since not desirable to lower it to permute
67
- for key in settings_aot_autograd ["decompositions" ]:
68
- if "transpose" in key ._name :
69
- to_delete = key
70
- del settings_aot_autograd ["decompositions" ][to_delete ]
69
+ to_delete = {
70
+ key
71
+ for key in settings_aot_autograd ["decompositions" ]
72
+ if "detach" in key ._name or "transpose" in key ._name
73
+ }
74
+
75
+ for key in to_delete :
76
+ del settings_aot_autograd ["decompositions" ][key ]
77
+
71
78
return aot_autograd (
72
79
fw_compiler = _pretraced_backend_autograd ,
73
80
decompositions = settings_aot_autograd ["decompositions" ],
You can’t perform that action at this time.
0 commit comments