Skip to content

Commit 7e7e569

Browse files
tugsbayasgalanpytorchmergebot
authored andcommitted
Suppress more warnings (pytorch#149833)
Differential Revision: [D71702307](https://our.internmc.facebook.com/intern/diff/D71702307) Pull Request resolved: pytorch#149833 Approved by: https://github.com/malfet, https://github.com/Skylion007
1 parent 790d459 commit 7e7e569

File tree

3 files changed

+19
-9
lines changed

3 files changed

+19
-9
lines changed

torch/_export/passes/lift_constants_pass.py

Lines changed: 9 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
# mypy: allow-untyped-defs
22
import collections
3-
import warnings
3+
import logging
44
from typing import Any, Union
55

66
import torch
@@ -19,6 +19,9 @@
1919
from torch.fx.graph_module import _get_attr
2020

2121

22+
log = logging.getLogger(__name__)
23+
24+
2225
class ConstantAttrMap(collections.abc.MutableMapping):
2326
"""A mapping class that understands how to use module constants (tensors,
2427
ScriptObjects, FakeScriptObjects) as keys. We store tensors and FakeScriptObjects normally,
@@ -213,9 +216,11 @@ def lift_constants_pass(
213216
elif isinstance(constant_val, torch.Tensor):
214217
# Remove the parameterness of constant_val
215218
if isinstance(constant_val, torch.nn.Parameter):
216-
warnings.warn(
217-
f"{node.target} created when tracing {node.meta.get('stack_trace', '<unknown stack>')} is a parameter. But"
218-
f"it's not registered with register_parameter(). export will treat it as a constant tensor"
219+
log.debug(
220+
"%s created when tracing %s is a parameter. But "
221+
"it's not registered with register_parameter(). export will treat it as a constant tensor",
222+
str(node.target),
223+
str(node.meta.get("stack_trace", "<unknown stack>")),
219224
)
220225
# We get the real data out of the parameter by disabling the surrounding fake mode.
221226
with unset_fake_temporarily():

torch/export/_unlift.py

Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -105,7 +105,12 @@ def _unlift_inputs_as_getattr(
105105

106106
else:
107107
with gm.graph.inserting_after(input_node):
108-
getattr_node = gm.graph.get_attr(lifted_node)
108+
# It is fine to ignore this warning because
109+
# it is guaranteed that we will populate this
110+
# attr later.
111+
with warnings.catch_warnings():
112+
warnings.simplefilter("ignore")
113+
getattr_node = gm.graph.get_attr(lifted_node)
109114
input_node.replace_all_uses_with(getattr_node)
110115
metadata = input_node.meta
111116
gm.graph.erase_node(input_node)

torch/fx/experimental/proxy_tensor.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,6 @@
1414
import traceback
1515
import typing
1616
import typing_extensions
17-
import warnings
1817
import weakref
1918
from collections import defaultdict, OrderedDict
2019
from collections.abc import Generator, Mapping, Sequence
@@ -1820,11 +1819,12 @@ def call_module(
18201819
try:
18211820
return Tracer.call_module(self, m, forward, args, kwargs)
18221821
except _ModuleNotInstalledAsSubmoduleError:
1823-
warnings.warn(
1824-
f"Unable to find the path of the module {m}. "
1822+
log.debug(
1823+
"Unable to find the path of the module %s. "
18251824
"This might be because the module was not properly registered "
18261825
"as a submodule, which is not good practice. We will trace "
1827-
"through the module without recording stack information."
1826+
"through the module without recording stack information.",
1827+
str(m),
18281828
)
18291829
return forward(*args, **kwargs)
18301830

0 commit comments

Comments
 (0)