File tree Expand file tree Collapse file tree 1 file changed +5
-1
lines changed
torch/_functorch/_aot_autograd Expand file tree Collapse file tree 1 file changed +5
-1
lines changed Original file line number Diff line number Diff line change @@ -468,12 +468,16 @@ def aot_dispatch_autograd_graph(
468468 # a fake tensor. Unlikely.
469469 # See Note: [Fake Modules and AOTAutograd]
470470 torch ._dynamo .utils .assert_no_fake_params_or_buffers (fx_g )
471+
472+ # Have to copy before eliminate_dead_code otherwise the
473+ # fw node match might be erased
474+ copy_fwd_metadata_to_bw_nodes (fx_g )
475+
471476 fx_g .graph .eliminate_dead_code ()
472477 if not aot_config .disable_functionalization :
473478 # There should be *NO* mutating ops in the graph at this point.
474479 assert_functional_graph (fx_g .graph )
475480
476- copy_fwd_metadata_to_bw_nodes (fx_g )
477481 fx_g .recompile ()
478482
479483 # TODO: in AOTAutograd, we create metadata like _indices_of_inps_to_detach to detect
You can’t perform that action at this time.
0 commit comments