Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

fix typos #361

Merged
merged 1 commit into from
Mar 26, 2017
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 3 additions & 3 deletions src/boosting/boosting.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -48,11 +48,11 @@ Boosting* Boosting::CreateBoosting(const std::string& type, const char* filename
} else if (type == std::string("goss")) {
ret.reset(new GOSS());
} else {
Log::Fatal("unknow boosting type %s", type.c_str());
Log::Fatal("unknown boosting type %s", type.c_str());
}
LoadFileToBoosting(ret.get(), filename);
} else {
Log::Fatal("unknow submodel type in model file %s", filename);
Log::Fatal("unknown submodel type in model file %s", filename);
}
return ret.release();
}
Expand All @@ -64,7 +64,7 @@ Boosting* Boosting::CreateBoosting(const char* filename) {
if (type == std::string("tree")) {
ret.reset(new GBDT());
} else {
Log::Fatal("unknow submodel type in model file %s", filename);
Log::Fatal("unknown submodel type in model file %s", filename);
}
LoadFileToBoosting(ret.get(), filename);
return ret.release();
Expand Down
4 changes: 2 additions & 2 deletions src/boosting/dart.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -84,7 +84,7 @@ class DART: public GBDT {
void DroppingTrees() {
drop_index_.clear();
bool is_skip = random_for_drop_.NextFloat() < gbdt_config_->skip_drop;
// select dropping tree indexes based on drop_rate and tree weights
// select dropping tree indices based on drop_rate and tree weights
if (!is_skip) {
double drop_rate = gbdt_config_->drop_rate;
if (!gbdt_config_->uniform_drop) {
Expand Down Expand Up @@ -180,7 +180,7 @@ class DART: public GBDT {
std::vector<double> tree_weight_;
/*! \brief sum weights of all trees */
double sum_weight_;
/*! \brief The indexes of dropping trees */
/*! \brief The indices of dropping trees */
std::vector<int> drop_index_;
/*! \brief Random generator, used to select dropping trees */
Random random_for_drop_;
Expand Down
2 changes: 1 addition & 1 deletion src/boosting/gbdt.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -370,7 +370,7 @@ bool GBDT::TrainOneIter(const score_t* gradient, const score_t* hessian, bool is
#ifdef TIMETAG
start_time = std::chrono::steady_clock::now();
#endif
std::unique_ptr<Tree> new_tree(new Tree(2));
std::unique_ptr<Tree> new_tree;
// train a new tree
new_tree.reset(tree_learner_->Train(gradient + curr_class * num_data_, hessian + curr_class * num_data_));
#ifdef TIMETAG
Expand Down
4 changes: 1 addition & 3 deletions src/boosting/gbdt.h
Original file line number Diff line number Diff line change
Expand Up @@ -26,11 +26,10 @@ class GBDT: public Boosting {
~GBDT();
/*!
* \brief Initialization logic
* \param config Config for boosting
* \param gbdt_config Config for boosting
* \param train_data Training data
* \param object_function Training objective function
* \param training_metrics Training metrics
* \param output_model_filename Filename of output model
*/
void Init(const BoostingConfig* gbdt_config, const Dataset* train_data, const ObjectiveFunction* object_function,
const std::vector<const Metric*>& training_metrics)
Expand Down Expand Up @@ -267,7 +266,6 @@ class GBDT: public Boosting {
std::string OutputMetric(int iter);
/*!
* \brief Calculate feature importances
* \param last_iter Last tree use to calculate
*/
std::vector<std::pair<size_t, std::string>> FeatureImportance() const;
/*! \brief current iteration */
Expand Down