Skip to content

Commit 1cd6ebe

Browse files
kiszkpytorchmergebot
authored andcommitted
Fix typos in messages under torch (pytorch#89049)
This PR fixes typos of messages in `.py` files under torch directory. Only in `torch/onnx/symbolic_opset16.py`, fix a typo in comment to make the operator name correct. Pull Request resolved: pytorch#89049 Approved by: https://github.com/lezcano
1 parent d1f48f0 commit 1cd6ebe

File tree

25 files changed

+28
-28
lines changed

25 files changed

+28
-28
lines changed

torch/_refs/nn/functional/__init__.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -595,7 +595,7 @@ def _nll_loss_nd(
595595
) -> TensorLikeType:
596596
utils.check(
597597
input.ndim > 0 and input.ndim <= 3,
598-
lambda: f"Expected input dimension to be either [1, 2, 3] but recieved {input.ndim}.",
598+
lambda: f"Expected input dimension to be either [1, 2, 3] but received {input.ndim}.",
599599
)
600600

601601
utils.check(

torch/ao/nn/intrinsic/qat/modules/linear_fused.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -35,7 +35,7 @@ def __init__(self,
3535
freeze_bn=False,
3636
qconfig=None):
3737
nn.modules.linear.Linear.__init__(self, in_features, out_features, bias)
38-
assert qconfig, 'qconfig must be provded for QAT module'
38+
assert qconfig, 'qconfig must be provided for QAT module'
3939
self.qconfig = qconfig
4040
self.freeze_bn = freeze_bn if self.training else True
4141
self.bn = nn.BatchNorm1d(out_features, eps, momentum, True, True)

torch/ao/quantization/fx/_model_report/model_report.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -385,7 +385,7 @@ def _reformat_reports_for_visualizer(self) -> OrderedDict:
385385
module_fqns_to_features[module_fqn] = {**new_info, **present_info}
386386
else:
387387
error_str = "You have the same key with different values across detectors. "
388-
error_str += "Someone incorrectly implemented a detector with conflicting keys to exisiting detectors."
388+
error_str += "Someone incorrectly implemented a detector with conflicting keys to existing detectors."
389389
raise ValueError(error_str)
390390
else:
391391
# we just set it

torch/ao/quantization/observer.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1019,7 +1019,7 @@ def _non_linear_param_search(self) -> Tuple[torch.Tensor, torch.Tensor]:
10191019
This follows the implementation of NormMinimization::NonlinearQuantizationParamsSearch in
10201020
caffe2/quantization/server/norm_minimization.cc
10211021
"""
1022-
assert self.histogram.size()[0] == self.bins, "bins mistmatch"
1022+
assert self.histogram.size()[0] == self.bins, "bins mismatch"
10231023
bin_width = (self.max_val - self.min_val) / self.bins
10241024

10251025
# cumulative sum

torch/backends/xeon/run_cpu.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -598,7 +598,7 @@ def create_args(parser=None):
598598
_add_multi_instance_params(parser)
599599
# positional
600600
parser.add_argument("program", type=str,
601-
help="The full path to the proram/script to be launched. "
601+
help="The full path to the program/script to be launched. "
602602
"followed by all the arguments for the script")
603603

604604
# rest from the training program

torch/cuda/memory.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -61,7 +61,7 @@ def caching_allocator_alloc(size, device: Union[Device, int] = None, stream=None
6161
if not isinstance(stream, int):
6262
raise TypeError('Invalid type for stream argument, must be '
6363
'`torch.cuda.Stream` or `int` representing a pointer '
64-
'to a exisiting stream')
64+
'to a existing stream')
6565
with torch.cuda.device(device):
6666
return torch._C._cuda_cudaCachingAllocator_raw_alloc(size, stream)
6767

torch/distributed/benchmarks/benchmark_ddp_rpc.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -335,7 +335,7 @@ def run_worker(rank, world_size):
335335
"--embedding-dim",
336336
type=int,
337337
default=EMBEDDING_DIM,
338-
help="Number of embedding dimentions.",
338+
help="Number of embedding dimensions.",
339339
)
340340
parser.add_argument(
341341
"--warmup-cycles",

torch/distributed/elastic/multiprocessing/api.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -537,7 +537,7 @@ def _close(self, death_sig: signal.Signals, timeout: int = 30) -> None:
537537
for proc in self._pc.processes:
538538
if proc.is_alive():
539539
log.warning(
540-
f"Unable to shutdown process {proc.pid} via {death_sig}, forcefully exitting via {_get_kill_signal()}"
540+
f"Unable to shutdown process {proc.pid} via {death_sig}, forcefully exiting via {_get_kill_signal()}"
541541
)
542542
try:
543543
os.kill(proc.pid, _get_kill_signal())
@@ -714,7 +714,7 @@ def _close(self, death_sig: signal.Signals, timeout: int = 30) -> None:
714714
for handler in self.subprocess_handlers.values():
715715
if handler.proc.poll() is None:
716716
log.warning(
717-
f"Unable to shutdown process {handler.proc.pid} via {death_sig}, forcefully exitting via {_get_kill_signal()}"
717+
f"Unable to shutdown process {handler.proc.pid} via {death_sig}, forcefully exiting via {_get_kill_signal()}"
718718
)
719719
handler.close(death_sig=_get_kill_signal())
720720
handler.proc.wait()

torch/distributed/elastic/rendezvous/etcd_rendezvous.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -293,7 +293,7 @@ def rendezvous_barrier(self):
293293
time.sleep(1)
294294

295295
except RendezvousTimeoutError:
296-
log.info("Rendezvous timeout occured in EtcdRendezvousHandler")
296+
log.info("Rendezvous timeout occurred in EtcdRendezvousHandler")
297297
raise
298298

299299
except RendezvousClosedError:

torch/distributions/mixture_same_family.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -60,7 +60,7 @@ def __init__(self,
6060

6161
if not isinstance(self._mixture_distribution, Categorical):
6262
raise ValueError(" The Mixture distribution needs to be an "
63-
" instance of torch.distribtutions.Categorical")
63+
" instance of torch.distributions.Categorical")
6464

6565
if not isinstance(self._component_distribution, Distribution):
6666
raise ValueError("The Component distribution need to be an "

0 commit comments

Comments
 (0)