Skip to content

Commit 3a0801f

Browse files
samestepfacebook-github-bot
authored andcommitted
[skip ci] Fix "arugment" typos (pytorch#61459)
Summary: Fixes pytorch#61455. Pull Request resolved: pytorch#61459 Reviewed By: soulitzer Differential Revision: D29636559 Pulled By: samestep fbshipit-source-id: 9ad65265c0491d9e81bb303abe3a07c6843bfa4a
1 parent e5fcc90 commit 3a0801f

File tree

7 files changed

+7
-7
lines changed

7 files changed

+7
-7
lines changed

aten/src/ATen/core/adaption.cpp

+1-1
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,7 @@ void common_device_check_failure(optional<Device>& common_device, const at::Tens
77
TORCH_CHECK(false,
88
"Expected all tensors to be on the same device, but "
99
"found at least two devices, ", common_device.value(), " and ", tensor.device(), "! "
10-
"(when checking arugment for argument ", argName, " in method ", methodName, ")");
10+
"(when checking argument for argument ", argName, " in method ", methodName, ")");
1111
}
1212

1313
} // namespace impl

binaries/compare_models_torch.cc

+1-1
Original file line numberDiff line numberDiff line change
@@ -50,7 +50,7 @@ C10_DEFINE_string(
5050
C10_DEFINE_bool(
5151
no_inputs,
5252
false,
53-
"Whether the model has any input. Will ignore other input arugments if true");
53+
"Whether the model has any input. Will ignore other input arguments if true");
5454
C10_DEFINE_bool(
5555
use_caching_allocator,
5656
false,

binaries/speed_benchmark_torch.cc

+1-1
Original file line numberDiff line numberDiff line change
@@ -48,7 +48,7 @@ C10_DEFINE_string(
4848
C10_DEFINE_bool(
4949
no_inputs,
5050
false,
51-
"Whether the model has any input. Will ignore other input arugments if true");
51+
"Whether the model has any input. Will ignore other input arguments if true");
5252
C10_DEFINE_bool(
5353
use_caching_allocator,
5454
false,

docs/source/fx.rst

+1-1
Original file line numberDiff line numberDiff line change
@@ -247,7 +247,7 @@ multiplication after the ``F.relu``, and then clean up the original
247247
objects to automatically record operations into the :class:`Graph`.
248248

249249
To use this method, we write the operations that we want inserted as regular
250-
PyTorch code and invoke that code with :class:`Proxy` objects as arugments.
250+
PyTorch code and invoke that code with :class:`Proxy` objects as arguments.
251251
These :class:`Proxy` objects will capture the operations that are performed
252252
on them and append them to the :class:`Graph`.
253253

test/test_overrides.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -407,7 +407,7 @@ def test_mm_semantics(self):
407407

408408
def test_precedence_semantics(self):
409409
"""Test semantics for __torch_function__ for functions that take
410-
multiple arugments
410+
multiple arguments
411411
412412
For functions that take multiple arguments, the appropriate
413413
__torch_function__ implementation to call is determined by

torch/csrc/jit/tensorexpr/dim_arg.h

+1-1
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@ namespace torch {
55
namespace jit {
66
namespace tensorexpr {
77
// A helper structure to store the arguments to specify dimensions. In the
8-
// Compute arugments for dim_args, all of the following is supported. For
8+
// Compute arguments for dim_args, all of the following is supported. For
99
// example:
1010
// dim_args: {1, 2, 3, 4}
1111
// dim_args: {{1, "x"}, {2, "y"}, {3, "z"}}

torch/fx/operator_schemas.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -317,7 +317,7 @@ def _args_kwargs_to_normalized_args_kwargs(sig : inspect.Signature, args : Tuple
317317
318318
target (inspect.Signature): Signature object for the target
319319
args (Tuple): Arguments that appear at the callsite for `target`
320-
kwargs (Dict): Keyword arugments that appear at the callsite for `target`
320+
kwargs (Dict): Keyword arguments that appear at the callsite for `target`
321321
normalize_to_only_use_kwargs (bool): Whether to normalize to only use kwargs.
322322
323323
Returns:

0 commit comments

Comments
 (0)