Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[WIP] Get column matrix from GHistIndex. #7072

Closed
wants to merge 1 commit into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 10 additions & 0 deletions src/data/gradient_index.cc
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
#include <limits>
#include "gradient_index.h"
#include "../common/hist_util.h"
#include "../common/column_matrix.h"

namespace xgboost {
void GHistIndexMatrix::Init(DMatrix* p_fmat, int max_bins) {
Expand Down Expand Up @@ -162,4 +163,13 @@ void GHistIndexMatrix::ResizeIndex(const size_t n_index,
index.Resize((sizeof(uint32_t)) * n_index);
}
}

common::ColumnMatrix const& GHistIndexMatrix::Columns(double sparse_threshold) {
if (!this->columns_ || sparse_threshold_ != sparse_threshold) {
this->columns_.reset(new common::ColumnMatrix{});
this->columns_->Init(*this, sparse_threshold);
sparse_threshold_ = sparse_threshold;
}
return *columns_;
}
} // namespace xgboost
14 changes: 12 additions & 2 deletions src/data/gradient_index.h
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,10 @@
#include "../common/threading_utils.h"

namespace xgboost {
namespace common {
class ColumnMatrix;
} // namespace common

/*!
* \brief preprocessed global index matrix, in CSR format
*
Expand All @@ -33,8 +37,6 @@ class GHistIndexMatrix {
GHistIndexMatrix(DMatrix* x, int32_t max_bin) {
this->Init(x, max_bin);
}
// Create a global histogram matrix, given cut
void Init(DMatrix* p_fmat, int max_num_bins);

// specific method for sparse data as no possibility to reduce allocated memory
template <typename BinIdxType, typename GetOffset>
Expand Down Expand Up @@ -78,7 +80,15 @@ class GHistIndexMatrix {
return isDense_;
}

common::ColumnMatrix const& Columns(double sparse_threshold);

private:
// Create a global histogram matrix, given cut
void Init(DMatrix* p_fmat, int max_num_bins);
// shared ptr for incomplete type.
std::shared_ptr<common::ColumnMatrix> columns_;
double sparse_threshold_ {0};

std::vector<size_t> hit_count_tloc_;
bool isDense_;
};
Expand Down
58 changes: 23 additions & 35 deletions src/tree/updater_quantile_hist.cc
Original file line number Diff line number Diff line change
Expand Up @@ -69,27 +69,18 @@ template<typename GradientSumT>
void QuantileHistMaker::CallBuilderUpdate(const std::unique_ptr<Builder<GradientSumT>>& builder,
HostDeviceVector<GradientPair> *gpair,
DMatrix *dmat,
GHistIndexMatrix const& gmat,
GHistIndexMatrix* gmat,
const std::vector<RegTree *> &trees) {
for (auto tree : trees) {
builder->Update(gmat, column_matrix_, gpair, dmat, tree);
builder->Update(gmat, gpair, dmat, tree);
}
}
void QuantileHistMaker::Update(HostDeviceVector<GradientPair> *gpair,
DMatrix *dmat,
const std::vector<RegTree *> &trees) {
auto const &gmat =
*(dmat->GetBatches<GHistIndexMatrix>(
BatchParam{GenericParameter::kCpuId, param_.max_bin})
.begin());
if (dmat != p_last_dmat_ || is_gmat_initialized_ == false) {
updater_monitor_.Start("GmatInitialization");
column_matrix_.Init(gmat, param_.sparse_threshold);
updater_monitor_.Stop("GmatInitialization");
// A proper solution is puting cut matrix in DMatrix, see:
// https://github.com/dmlc/xgboost/issues/5143
is_gmat_initialized_ = true;
}
auto &gmat = *(dmat->GetBatches<GHistIndexMatrix>(
BatchParam{GenericParameter::kCpuId, param_.max_bin})
.begin());
// rescale learning rate according to size of trees
float lr = param_.learning_rate;
param_.learning_rate = lr / trees.size();
Expand All @@ -100,12 +91,12 @@ void QuantileHistMaker::Update(HostDeviceVector<GradientPair> *gpair,
if (!float_builder_) {
SetBuilder(n_trees, &float_builder_, dmat);
}
CallBuilderUpdate(float_builder_, gpair, dmat, gmat, trees);
CallBuilderUpdate(float_builder_, gpair, dmat, &gmat, trees);
} else {
if (!double_builder_) {
SetBuilder(n_trees, &double_builder_, dmat);
}
CallBuilderUpdate(double_builder_, gpair, dmat, gmat, trees);
CallBuilderUpdate(double_builder_, gpair, dmat, &gmat, trees);
}

param_.learning_rate = lr;
Expand Down Expand Up @@ -448,8 +439,7 @@ void QuantileHistMaker::Builder<GradientSumT>::BuildNodeStats(
template<typename GradientSumT>
template <bool any_missing>
void QuantileHistMaker::Builder<GradientSumT>::ExpandTree(
const GHistIndexMatrix& gmat,
const ColumnMatrix& column_matrix,
GHistIndexMatrix* gmat,
DMatrix* p_fmat,
RegTree* p_tree,
const std::vector<GradientPair>& gpair_h) {
Expand All @@ -458,7 +448,7 @@ void QuantileHistMaker::Builder<GradientSumT>::ExpandTree(

Driver<CPUExpandEntry> driver(static_cast<TrainParam::TreeGrowPolicy>(param_.grow_policy));
std::vector<CPUExpandEntry> expand;
InitRoot<any_missing>(gmat, *p_fmat, p_tree, gpair_h, &num_leaves, &expand);
InitRoot<any_missing>(*gmat, *p_fmat, p_tree, gpair_h, &num_leaves, &expand);
driver.Push(expand[0]);

int depth = 0;
Expand All @@ -473,19 +463,19 @@ void QuantileHistMaker::Builder<GradientSumT>::ExpandTree(
AddSplitsToTree(expand, p_tree, &num_leaves, &nodes_for_apply_split);

if (nodes_for_apply_split.size() != 0) {
ApplySplit<any_missing>(nodes_for_apply_split, gmat, column_matrix, hist_, p_tree);
ApplySplit<any_missing>(nodes_for_apply_split, gmat, hist_, p_tree);
SplitSiblings(nodes_for_apply_split, &nodes_to_evaluate, p_tree);

int starting_index = std::numeric_limits<int>::max();
int sync_count = 0;
hist_rows_adder_->AddHistRows(this, &starting_index, &sync_count, p_tree);
if (depth < param_.max_depth) {
BuildLocalHistograms<any_missing>(gmat, p_tree, gpair_h);
BuildLocalHistograms<any_missing>(*gmat, p_tree, gpair_h);
hist_synchronizer_->SyncHistograms(this, starting_index, sync_count, p_tree);
}

BuildNodeStats(gmat, *p_fmat, gpair_h, nodes_for_apply_split, p_tree);
EvaluateSplits(nodes_to_evaluate, gmat, hist_, *p_tree);
BuildNodeStats(*gmat, *p_fmat, gpair_h, nodes_for_apply_split, p_tree);
EvaluateSplits(nodes_to_evaluate, *gmat, hist_, *p_tree);

for (size_t i = 0; i < nodes_for_apply_split.size(); ++i) {
const CPUExpandEntry candidate = nodes_for_apply_split[i];
Expand All @@ -508,8 +498,7 @@ void QuantileHistMaker::Builder<GradientSumT>::ExpandTree(

template <typename GradientSumT>
void QuantileHistMaker::Builder<GradientSumT>::Update(
const GHistIndexMatrix &gmat,
const ColumnMatrix &column_matrix,
GHistIndexMatrix* gmat,
HostDeviceVector<GradientPair> *gpair,
DMatrix *p_fmat, RegTree *p_tree) {
builder_monitor_.Start("Update");
Expand All @@ -526,12 +515,12 @@ void QuantileHistMaker::Builder<GradientSumT>::Update(
interaction_constraints_.Reset();
p_last_fmat_mutable_ = p_fmat;

this->InitData(gmat, *p_fmat, *p_tree, gpair_ptr);
this->InitData(*gmat, *p_fmat, *p_tree, gpair_ptr);

if (column_matrix.AnyMissing()) {
ExpandTree<true>(gmat, column_matrix, p_fmat, p_tree, *gpair_ptr);
if (!gmat->IsDense()) {
ExpandTree<true>(gmat, p_fmat, p_tree, *gpair_ptr);
} else {
ExpandTree<false>(gmat, column_matrix, p_fmat, p_tree, *gpair_ptr);
ExpandTree<false>(gmat, p_fmat, p_tree, *gpair_ptr);
}
for (int nid = 0; nid < p_tree->param.num_nodes; ++nid) {
p_tree->Stat(nid).loss_chg = snode_[nid].best.loss_chg;
Expand Down Expand Up @@ -923,16 +912,14 @@ void QuantileHistMaker::Builder<GradientSumT>::AddSplitsToRowSet(

template <typename GradientSumT>
template <bool any_missing>
void QuantileHistMaker::Builder<GradientSumT>::ApplySplit(const std::vector<CPUExpandEntry> nodes,
const GHistIndexMatrix& gmat,
const ColumnMatrix& column_matrix,
const HistCollection<GradientSumT>& hist,
RegTree* p_tree) {
void QuantileHistMaker::Builder<GradientSumT>::ApplySplit(
const std::vector<CPUExpandEntry> nodes, GHistIndexMatrix* gmat,
const HistCollection<GradientSumT> &hist, RegTree *p_tree) {
builder_monitor_.Start("ApplySplit");
// 1. Find split condition for each split
const size_t n_nodes = nodes.size();
std::vector<int32_t> split_conditions;
FindSplitConditions(nodes, *p_tree, gmat, &split_conditions);
FindSplitConditions(nodes, *p_tree, *gmat, &split_conditions);
// 2.1 Create a blocked space of size SUM(samples in each node)
common::BlockedSpace2d space(n_nodes, [&](size_t node_in_set) {
int32_t nid = nodes[node_in_set].nid;
Expand All @@ -948,6 +935,7 @@ void QuantileHistMaker::Builder<GradientSumT>::ApplySplit(const std::vector<CPUE
});
// 2.3 Split elements of row_set_collection_ to left and right child-nodes for each node
// Store results in intermediate buffers from partition_builder_
auto const &column_matrix = gmat->Columns(param_.sparse_threshold);
common::ParallelFor2d(space, this->nthread_, [&](size_t node_in_set, common::Range1d r) {
size_t begin = r.begin();
const int32_t nid = nodes[node_in_set].nid;
Expand Down
23 changes: 7 additions & 16 deletions src/tree/updater_quantile_hist.h
Original file line number Diff line number Diff line change
Expand Up @@ -209,10 +209,7 @@ class QuantileHistMaker: public TreeUpdater {
CPUHistMakerTrainParam hist_maker_param_;
// training parameter
TrainParam param_;
// column accessor
ColumnMatrix column_matrix_;
DMatrix const* p_last_dmat_ {nullptr};
bool is_gmat_initialized_ {false};

// data structure
struct NodeEntry {
Expand Down Expand Up @@ -250,11 +247,9 @@ class QuantileHistMaker: public TreeUpdater {
builder_monitor_.Init("Quantile::Builder");
}
// update one tree, growing
virtual void Update(const GHistIndexMatrix& gmat,
const ColumnMatrix& column_matrix,
HostDeviceVector<GradientPair>* gpair,
DMatrix* p_fmat,
RegTree* p_tree);
virtual void Update(GHistIndexMatrix *gmat,
HostDeviceVector<GradientPair> *gpair, DMatrix *p_fmat,
RegTree *p_tree);

inline void SubtractionTrick(GHistRowT self,
GHistRowT sibling,
Expand Down Expand Up @@ -296,11 +291,8 @@ class QuantileHistMaker: public TreeUpdater {
const RegTree& tree);

template <bool any_missing>
void ApplySplit(std::vector<CPUExpandEntry> nodes,
const GHistIndexMatrix& gmat,
const ColumnMatrix& column_matrix,
const HistCollection<GradientSumT>& hist,
RegTree* p_tree);
void ApplySplit(std::vector<CPUExpandEntry> nodes, GHistIndexMatrix *gmat,
const HistCollection<GradientSumT> &hist, RegTree *p_tree);

void AddSplitsToRowSet(const std::vector<CPUExpandEntry>& nodes, RegTree* p_tree);

Expand Down Expand Up @@ -357,8 +349,7 @@ class QuantileHistMaker: public TreeUpdater {
const std::vector<GradientPair> &gpair_h,
const std::vector<CPUExpandEntry>& nodes_for_apply_split, RegTree *p_tree);
template <bool any_missing>
void ExpandTree(const GHistIndexMatrix& gmat,
const ColumnMatrix& column_matrix,
void ExpandTree(GHistIndexMatrix* gmat,
DMatrix* p_fmat,
RegTree* p_tree,
const std::vector<GradientPair>& gpair_h);
Expand Down Expand Up @@ -430,7 +421,7 @@ class QuantileHistMaker: public TreeUpdater {
void CallBuilderUpdate(const std::unique_ptr<Builder<GradientSumT>>& builder,
HostDeviceVector<GradientPair> *gpair,
DMatrix *dmat,
GHistIndexMatrix const& gmat,
GHistIndexMatrix* gmat,
const std::vector<RegTree *> &trees);

protected:
Expand Down