Skip to content

Commit

Permalink
fix docs bugs (#69766)
Browse files Browse the repository at this point in the history
  • Loading branch information
winffke authored Nov 28, 2024
1 parent 796db76 commit bf9019f
Show file tree
Hide file tree
Showing 8 changed files with 8 additions and 15 deletions.
7 changes: 0 additions & 7 deletions _typos.toml
Original file line number Diff line number Diff line change
Expand Up @@ -134,13 +134,6 @@ defind = 'defind'
defeine = 'defeine'
defition = 'defition'
defination = 'defination'
delet = 'delet'
dependecies = 'dependecies'
dependecy = 'dependecy'
decprecated = 'decprecated'
derivated = 'derivated'
descripor = 'descripor'
deserailize = 'deserailize'
Destory = 'Destory'
DEIVCE = 'DEIVCE'
dictionnary = 'dictionnary'
Expand Down
4 changes: 2 additions & 2 deletions paddle/common/flags.cc
Original file line number Diff line number Diff line change
Expand Up @@ -1521,8 +1521,8 @@ PHI_DEFINE_EXPORTED_bool(use_shm_cache,
* Since Version: 2.6.2
* Value Range: bool, default=false
* Example:
* Note: . If True, mmap_allocator will use file descripor to open shared memory
* operation.
* Note: . If True, mmap_allocator will use file descriptor to open shared
* memory operation.
*/
PHI_DEFINE_EXPORTED_bool(dataloader_use_file_descriptor,
false,
Expand Down
2 changes: 1 addition & 1 deletion paddle/fluid/eager/autograd_meta.h
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@ using AbstractAutogradMeta = paddle::AbstractAutogradMeta;
*
* **/

// No other AutogradMeta class should be derivated from AbstractAutogradMeta.
// No other AutogradMeta class should be derived from AbstractAutogradMeta.
// It's only used by
class AutogradMeta : public AbstractAutogradMeta {
public:
Expand Down
2 changes: 1 addition & 1 deletion paddle/fluid/framework/ir/lock_free_optimize_pass.h
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ class Graph;

/*
* Remove the sum op of all gradients of the backward op.
* And remove the dependecies of the optimizer related to the
* And remove the dependencies of the optimizer related to the
* same backward op.
*
* Before this pass:
Expand Down
2 changes: 1 addition & 1 deletion paddle/phi/infermeta/spmd_rules/reshape.cc
Original file line number Diff line number Diff line change
Expand Up @@ -313,7 +313,7 @@ SpmdInfo ReshapeInferSpmdReverse(const DistMetaTensor& x,
return {{x_dist_attr}, {out_dist_attr_dst}};
}

// FIXME(dev): XShape will be decprecated in the future, so we
// FIXME(dev): XShape will be deprecated in the future, so we
// need unify inferSpmd into ReshapeInferSpmd function.
SpmdInfo ReshapeInferSpmdDynamic(const DistMetaTensor& x,
const std::vector<int64_t>& shape) {
Expand Down
2 changes: 1 addition & 1 deletion python/paddle/distributed/passes/auto_parallel_sharding.py
Original file line number Diff line number Diff line change
Expand Up @@ -1303,7 +1303,7 @@ def _overlap_grad_comm(
)
idx += 1

# NOTE(Ruibiao): Why add dependecy here?
# NOTE(Ruibiao): Why add dependency here?
# It is hack to delay GC for coalesce_var, which significantly reduce memory usage.
# With the pattern of reduce_sum + scale, the coalesce_var is used by the reduce_sum
# op on the comm-stream, and then released by the scale op on the comp-stream. Since
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -869,7 +869,7 @@ def _build_trainer_programs(self, compiled_config):
# for startup program
_startup = worker.fake_init_ops_pass(_startup, compiled_config)
_startup = worker.init_from_server_pass(_startup, compiled_config)
_startup = worker.delet_extra_optimizes_pass(
_startup = worker.delete_extra_optimizes_pass(
_startup, compiled_config
)
else:
Expand Down
2 changes: 1 addition & 1 deletion test/cpp/inference/api/trt_dynamic_shape_test.cc
Original file line number Diff line number Diff line change
Expand Up @@ -295,7 +295,7 @@ TEST(AnalysisPredictor, trt_dynamic) { TestDynamic(true); }
TEST(AnalysisPredictor, trt_memory_serialize) {
// serailize
TestDynamic(true, true, true);
// deserailize
// deserialize
TestDynamic(true, false, true);
}
TEST(AnalysisPredictor, trt_dynamic2) { TestDynamic2(); }
Expand Down

0 comments on commit bf9019f

Please sign in to comment.