diff --git a/paddle/fluid/eager/pylayer/py_layer_node.cc b/paddle/fluid/eager/pylayer/py_layer_node.cc index 5ac0fc4640eb0..1aacd69ecc32d 100644 --- a/paddle/fluid/eager/pylayer/py_layer_node.cc +++ b/paddle/fluid/eager/pylayer/py_layer_node.cc @@ -50,7 +50,7 @@ GradNodePyLayer::operator()( PADDLE_ENFORCE_EQ(ctx->forward_output_tensor_is_duplicable.size(), grads.size(), paddle::platform::errors::InvalidArgument( - "%s's grad input size(%s) mast be equal with it's " + "%s's grad input size(%s) must be equal with it's " "forward's output size(%s).", name(), grads.size(), diff --git a/python/paddle/autograd/saved_tensors_hooks.py b/python/paddle/autograd/saved_tensors_hooks.py index 5d68a90e78c89..2839f7acafbe6 100644 --- a/python/paddle/autograd/saved_tensors_hooks.py +++ b/python/paddle/autograd/saved_tensors_hooks.py @@ -35,7 +35,7 @@ class saved_tensors_hooks: backward need use the saved inputs/outputs tensors. Then you can reload the tensor and return it to paddle framework. The input of `unpack_hook` is the information returned by `pack_hook`. The output of `unpack_hook` - is a tensor reloaded by the information, and the tensor mast has the same + is a tensor reloaded by the information, and the tensor must has the same content as the original tensor passed as input to the corresponding `pack_hook`. diff --git a/python/paddle/base/layers/layer_function_generator.py b/python/paddle/base/layers/layer_function_generator.py index d71316ba90dc6..2404ef286b1f2 100644 --- a/python/paddle/base/layers/layer_function_generator.py +++ b/python/paddle/base/layers/layer_function_generator.py @@ -329,7 +329,7 @@ def func(x, name=None): and x.is_view_var ): raise ValueError( - 'Sorry about what\'s happend. In to_static mode, {}\'s output variable {} is a viewed Tensor in dygraph. This will result in inconsistent calculation behavior between dynamic and static graphs. You mast find the location of the strided API be called, and call {} = {}.assign().'.format( + 'Sorry about what\'s happend. In to_static mode, {}\'s output variable {} is a viewed Tensor in dygraph. This will result in inconsistent calculation behavior between dynamic and static graphs. You must find the location of the strided API be called, and call {} = {}.assign().'.format( inplace_op_type, x.name, x.name, x.nameb ) ) diff --git a/python/paddle/utils/inplace_utils.py b/python/paddle/utils/inplace_utils.py index 306c3fba5bf64..a5f30ab91daaa 100644 --- a/python/paddle/utils/inplace_utils.py +++ b/python/paddle/utils/inplace_utils.py @@ -39,7 +39,7 @@ def __impl__(*args, **kwargs): for arg in args: if hasattr(arg, "is_view_var") and arg.is_view_var: raise ValueError( - f'Sorry about what\'s happend. In to_static mode, {func.__name__}\'s output variable {arg.name} is a viewed Tensor in dygraph. This will result in inconsistent calculation behavior between dynamic and static graphs. You mast find the location of the strided API be called, and call {arg.name} = {arg.name}.assign().' + f'Sorry about what\'s happend. In to_static mode, {func.__name__}\'s output variable {arg.name} is a viewed Tensor in dygraph. This will result in inconsistent calculation behavior between dynamic and static graphs. You must find the location of the strided API be called, and call {arg.name} = {arg.name}.assign().' ) origin_func = f"{func.__module__}.{origin_api_name}"