Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[CodeStyle][Typos][A-[31-35]] Fix typo(argumnetsargmumentsargumetarisedarangedarrray) #69505

Merged
merged 2 commits into from
Nov 20, 2024
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 1 addition & 6 deletions _typos.toml
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ grad = "grad"
arange = "arange"
ot = 'ot'
pash = 'pash'
eles = 'eles'
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

PR 描述里可以说一下,是 elements 的缩写


# These words need to be fixed
ontext = 'ontext'
Expand Down Expand Up @@ -181,7 +182,6 @@ overrided = 'overrided'
smll = 'smll'
outpout = 'outpout'
staticaly = 'staticaly'
aranged = 'aranged'
offets = 'offets'
olny = 'olny'
Continer = 'Continer'
Expand All @@ -192,7 +192,6 @@ readed = 'readed'
Opeartion = 'Opeartion'
shoule = 'shoule'
inputed = 'inputed'
arrray = 'arrray'
positon = 'positon'
invalide = 'invalide'
repeatly = 'repeatly'
Expand Down Expand Up @@ -255,7 +254,6 @@ defition = 'defition'
operants = 'operants'
funcitons = 'funcitons'
dateset = 'dateset'
arised = 'arised'
optimzed = 'optimzed'
encouter = 'encouter'
alis = 'alis'
Expand All @@ -264,7 +262,6 @@ poped = 'poped'
parmeter = 'parmeter'
doens = 'doens'
cadidate = 'cadidate'
argumnets = 'argumnets'
inconsistence = 'inconsistence'
Caculate = 'Caculate'
seperator = 'seperator'
Expand Down Expand Up @@ -417,7 +414,6 @@ recieved = 'recieved'
Hanlder = 'Hanlder'
EPOCHES = 'EPOCHES'
sequnce = 'sequnce'
argmuments = 'argmuments'
Iteraion = 'Iteraion'
whill = 'whill'
tood = 'tood'
Expand Down Expand Up @@ -778,7 +774,6 @@ expaned = 'expaned'
choos = 'choos'
whos = 'whos'
architecuture = 'architecuture'
argumet = 'argumet'
coule = 'coule'
instanciate = 'instanciate'
distrubuted = 'distrubuted'
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -286,7 +286,7 @@ void CinnJitInstruction::Run() {
static_cast<void*>(static_cast<phi::GPUContext*>(dev_ctx_)->stream());
}

// 1. prepare kernel argmuments
// 1. prepare kernel arguments
fn_ptr_impl_->InitFuncArgs(tensor_args_);

if (FLAGS_cinn_bucket_compile && need_update_shape) {
Expand Down
2 changes: 1 addition & 1 deletion paddle/fluid/inference/tensorrt/op_teller.cc
Original file line number Diff line number Diff line change
Expand Up @@ -2397,7 +2397,7 @@ struct SimpleOpTypeSetTeller : public Teller {
std::string padding_algorithm =
PADDLE_GET_CONST(std::string, desc.GetAttr("padding_algorithm"));

// trt error is arised if conv3d_transpose and SAME
// trt error arouse if conv3d_transpose and SAME
SigureMo marked this conversation as resolved.
Show resolved Hide resolved
if (op_type == "conv3d_transpose" && padding_algorithm == "SAME" &&
!with_dynamic_shape) {
return false;
Expand Down
2 changes: 1 addition & 1 deletion paddle/phi/infermeta/spmd_rules/replicated.h
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ namespace distributed {
* in auto parallel, and once there is a specific rule for that op, replicated
* rule would not effect that op any more.
*
* Vector of input tensors and output tensors used as argumnets (for both
* Vector of input tensors and output tensors used as arguments (for both
* inferfw & inferbw) to support any kind of op.
*
*/
Expand Down
4 changes: 2 additions & 2 deletions paddle/phi/kernels/impl/kron_grad_kernel_impl.h
Original file line number Diff line number Diff line change
Expand Up @@ -199,8 +199,8 @@ struct KronGradOpFunctor {
p_stride_dout = stride_dout.Get();
p_shape_y = dim_y.Get();
#endif
// dout_x: dout * kron(ones(X), Y) re-aranged in shape (numel_x, numel_y)
// dout_y: dout * kron(X, ones(Y)) re-aranged in shape (numel_y, numel_x)
// dout_x: dout * kron(ones(X), Y) re-arranged in shape (numel_x, numel_y)
// dout_y: dout * kron(X, ones(Y)) re-arranged in shape (numel_y, numel_x)
DenseTensor dout_x;
T *p_dout_x = nullptr;
if (dx) {
Expand Down
2 changes: 1 addition & 1 deletion python/paddle/incubate/asp/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -550,7 +550,7 @@ def create_mask(
t = tensor.astype(float)

assert isinstance(func_name, MaskAlgo), (
"func_name argumet of create_mask is only accepted as type MaskAlgo. "
"func_name argument of create_mask is only accepted as type MaskAlgo. "
f"But got {type(func_name)}"
)
func = getattr(sys.modules[__name__], func_name.value, None)
Expand Down
6 changes: 3 additions & 3 deletions python/paddle/jit/dy2static/convert_operators.py
Original file line number Diff line number Diff line change
Expand Up @@ -193,7 +193,7 @@ def convert_while_loop(
_run_py_while(cond, body, getter, setter)


def _convert_tensor_arrray_if_necessary(setterhelper, push_pop_names):
def _convert_tensor_array_if_necessary(setterhelper, push_pop_names):
push_pop_vars = setterhelper.get(push_pop_names)
if push_pop_vars is None:
return
Expand All @@ -219,7 +219,7 @@ def _run_paddle_while(
):
# NOTE: loop_vars of Paddle op `control_flow.while_loop` must be Paddle Tensors.
helper = GetterSetterHelper(getter, setter, return_name_ids, push_pop_names)
_convert_tensor_arrray_if_necessary(helper, push_pop_names)
_convert_tensor_array_if_necessary(helper, push_pop_names)

union_name = (
OrderedSet(return_name_ids) if return_name_ids else OrderedSet()
Expand Down Expand Up @@ -447,7 +447,7 @@ def _run_paddle_cond(
helper = GetterSetterHelper(
get_args, set_args, return_name_ids, push_pop_names
)
_convert_tensor_arrray_if_necessary(helper, push_pop_names)
_convert_tensor_array_if_necessary(helper, push_pop_names)
pred = cast_bool_if_necessary(pred)
init_args = helper.get(return_name_ids)
from paddle.jit.dy2static.program_translator import ProgramTranslator
Expand Down