From 42908bfeedab08ab65c1f8962c09010b57619bbc Mon Sep 17 00:00:00 2001
From: Svetlana Karslioglu <svekars@meta.com>
Date: Thu, 1 May 2025 16:28:19 -0700
Subject: [PATCH 1/4] Reenable memory format tutorial

---
 .jenkins/validate_tutorials_built.py          |  1 -
 intermediate_source/memory_format_tutorial.py | 20 +++++++++++--------
 2 files changed, 12 insertions(+), 9 deletions(-)

diff --git a/.jenkins/validate_tutorials_built.py b/.jenkins/validate_tutorials_built.py
index f4586c19b3e..640d1932b4c 100644
--- a/.jenkins/validate_tutorials_built.py
+++ b/.jenkins/validate_tutorials_built.py
@@ -52,7 +52,6 @@
     "intermediate_source/tensorboard_profiler_tutorial", # reenable after 2.0 release.
     "advanced_source/semi_structured_sparse", # reenable after 3303 is fixed.
     "intermediate_source/torchrec_intro_tutorial", # reenable after 3302 is fixe
-    "intermediate_source/memory_format_tutorial", # causes other tutorials like torch_logs fail. "state" issue, reseting dynamo didn't help
 ]
 
 def tutorial_source_dirs() -> List[Path]:
diff --git a/intermediate_source/memory_format_tutorial.py b/intermediate_source/memory_format_tutorial.py
index 26bc5c9d53c..047e874d8a9 100644
--- a/intermediate_source/memory_format_tutorial.py
+++ b/intermediate_source/memory_format_tutorial.py
@@ -358,10 +358,14 @@ def attribute(m):
                 print(e)
 
 
-attribute(torch.Tensor)
-attribute(torch.nn.functional)
-attribute(torch)
-
+#############################
+#
+# The following code  will wrap PyTorch functions to check if channels last
+# format is preserved through operations.
+#
+# attribute(torch.Tensor)
+# attribute(torch.nn.functional)
+# attribute(torch)
 
 ######################################################################
 # If you found an operator that doesn't support channels last tensors
@@ -371,10 +375,10 @@ def attribute(m):
 
 ######################################################################
 # Code below is to recover the attributes of torch.
-
-for (m, attrs) in old_attrs.items():
-    for (k, v) in attrs.items():
-        setattr(m, k, v)
+#
+#for (m, attrs) in old_attrs.items():
+#    for (k, v) in attrs.items():
+#        setattr(m, k, v)
 
 ######################################################################
 # Work to do

From 7079003659a617cb7362546298c80bd11fdf64b2 Mon Sep 17 00:00:00 2001
From: Svetlana Karslioglu <svekars@meta.com>
Date: Thu, 1 May 2025 16:30:02 -0700
Subject: [PATCH 2/4] Update

---
 .jenkins/validate_tutorials_built.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/.jenkins/validate_tutorials_built.py b/.jenkins/validate_tutorials_built.py
index 640d1932b4c..bc86f7f7f59 100644
--- a/.jenkins/validate_tutorials_built.py
+++ b/.jenkins/validate_tutorials_built.py
@@ -51,7 +51,7 @@
     "intermediate_source/text_to_speech_with_torchaudio",
     "intermediate_source/tensorboard_profiler_tutorial", # reenable after 2.0 release.
     "advanced_source/semi_structured_sparse", # reenable after 3303 is fixed.
-    "intermediate_source/torchrec_intro_tutorial", # reenable after 3302 is fixe
+    "intermediate_source/torchrec_intro_tutorial" # reenable after 3302 is fixe
 ]
 
 def tutorial_source_dirs() -> List[Path]:

From 1e521f23ef2e0215a37e17caf1e48bae284c7a70 Mon Sep 17 00:00:00 2001
From: Svetlana Karslioglu <svekars@meta.com>
Date: Thu, 1 May 2025 17:13:39 -0700
Subject: [PATCH 3/4] Update

---
 intermediate_source/memory_format_tutorial.py | 12 ++++++------
 1 file changed, 6 insertions(+), 6 deletions(-)

diff --git a/intermediate_source/memory_format_tutorial.py b/intermediate_source/memory_format_tutorial.py
index 047e874d8a9..5388ef5a22a 100644
--- a/intermediate_source/memory_format_tutorial.py
+++ b/intermediate_source/memory_format_tutorial.py
@@ -359,8 +359,8 @@ def attribute(m):
 
 
 #############################
-#
-# The following code  will wrap PyTorch functions to check if channels last
+# To debug which operators don't support channels last format, you could uncomment 
+# the following code. This will wrap PyTorch functions to check if channels last
 # format is preserved through operations.
 #
 # attribute(torch.Tensor)
@@ -369,14 +369,14 @@ def attribute(m):
 
 ######################################################################
 # If you found an operator that doesn't support channels last tensors
-# and you want to contribute, feel free to use following developers
-# guide https://github.com/pytorch/pytorch/wiki/Writing-memory-format-aware-operators.
+# and you want to contribute, see following developers
+# `Writing Memory Format Aware Operators <https://github.com/pytorch/pytorch/wiki/Writing-memory-format-aware-operators>`__.
 #
 
 ######################################################################
-# Code below is to recover the attributes of torch.
+# The code below is to recover the attributes of torch.
 #
-#for (m, attrs) in old_attrs.items():
+# for (m, attrs) in old_attrs.items():
 #    for (k, v) in attrs.items():
 #        setattr(m, k, v)
 

From 1c7810c5e1121e80626da8d0f3b314f18cc94c29 Mon Sep 17 00:00:00 2001
From: Svetlana Karslioglu <svekars@meta.com>
Date: Fri, 2 May 2025 08:50:59 -0700
Subject: [PATCH 4/4] Update

---
 intermediate_source/memory_format_tutorial.py | 14 ++++++++------
 1 file changed, 8 insertions(+), 6 deletions(-)

diff --git a/intermediate_source/memory_format_tutorial.py b/intermediate_source/memory_format_tutorial.py
index 5388ef5a22a..ee436832dbf 100644
--- a/intermediate_source/memory_format_tutorial.py
+++ b/intermediate_source/memory_format_tutorial.py
@@ -359,13 +359,15 @@ def attribute(m):
 
 
 #############################
-# To debug which operators don't support channels last format, you could uncomment 
-# the following code. This will wrap PyTorch functions to check if channels last
-# format is preserved through operations.
+# To debug which operators don't support channels last format, execute
+# the following lines:
+#
+# .. code-block:: python
+#
+#    attribute(torch.Tensor)
+#    attribute(torch.nn.functional)
+#    attribute(torch)
 #
-# attribute(torch.Tensor)
-# attribute(torch.nn.functional)
-# attribute(torch)
 
 ######################################################################
 # If you found an operator that doesn't support channels last tensors