Skip to content

Commit

Permalink
add data initialisation for training
Browse files Browse the repository at this point in the history
  • Loading branch information
Dmitry Razdoburdin committed Apr 25, 2024
1 parent 58513dc commit c0463ae
Show file tree
Hide file tree
Showing 3 changed files with 176 additions and 1 deletion.
74 changes: 74 additions & 0 deletions plugin/sycl/tree/hist_updater.cc
Original file line number Diff line number Diff line change
Expand Up @@ -50,6 +50,80 @@ void HistUpdater<GradientSumT>::InitSampling(
qu_.wait();
}

template<typename GradientSumT>
void HistUpdater<GradientSumT>::InitData(
Context const * ctx,
const common::GHistIndexMatrix& gmat,
const USMVector<GradientPair, MemoryType::on_device> &gpair,
const DMatrix& fmat,
const RegTree& tree) {
CHECK((param_.max_depth > 0 || param_.max_leaves > 0))
<< "max_depth or max_leaves cannot be both 0 (unlimited); "
<< "at least one should be a positive quantity.";
if (param_.grow_policy == xgboost::tree::TrainParam::kDepthWise) {
CHECK(param_.max_depth > 0) << "max_depth cannot be 0 (unlimited) "
<< "when grow_policy is depthwise.";
}
builder_monitor_.Start("InitData");
const auto& info = fmat.Info();

// initialize the row set
{
row_set_collection_.Clear();
USMVector<size_t, MemoryType::on_device>* row_indices = &(row_set_collection_.Data());
row_indices->Resize(&qu_, info.num_row_);
size_t* p_row_indices = row_indices->Data();
// mark subsample and build list of member rows
if (param_.subsample < 1.0f) {
CHECK_EQ(param_.sampling_method, xgboost::tree::TrainParam::kUniform)
<< "Only uniform sampling is supported, "
<< "gradient-based sampling is only support by GPU Hist.";
InitSampling(gpair, row_indices);
} else {
int has_neg_hess = 0;
const GradientPair* gpair_ptr = gpair.DataConst();
::sycl::event event;
{
::sycl::buffer<int, 1> flag_buf(&has_neg_hess, 1);
event = qu_.submit([&](::sycl::handler& cgh) {
auto flag_buf_acc = flag_buf.get_access<::sycl::access::mode::read_write>(cgh);
cgh.parallel_for<>(::sycl::range<1>(::sycl::range<1>(info.num_row_)),
[=](::sycl::item<1> pid) {
const size_t idx = pid.get_id(0);
p_row_indices[idx] = idx;
if (gpair_ptr[idx].GetHess() < 0.0f) {
AtomicRef<int> has_neg_hess_ref(flag_buf_acc[0]);
has_neg_hess_ref.fetch_max(1);
}
});
});
}

if (has_neg_hess) {
size_t max_idx = 0;
{
::sycl::buffer<size_t, 1> flag_buf(&max_idx, 1);
event = qu_.submit([&](::sycl::handler& cgh) {
cgh.depends_on(event);
auto flag_buf_acc = flag_buf.get_access<::sycl::access::mode::read_write>(cgh);
cgh.parallel_for<>(::sycl::range<1>(::sycl::range<1>(info.num_row_)),
[=](::sycl::item<1> pid) {
const size_t idx = pid.get_id(0);
if (gpair_ptr[idx].GetHess() >= 0.0f) {
AtomicRef<size_t> max_idx_ref(flag_buf_acc[0]);
p_row_indices[max_idx_ref++] = idx;
}
});
});
}
row_indices->Resize(&qu_, max_idx, 0, &event);
}
qu_.wait_and_throw();
}
}
row_set_collection_.Init();
}

template class HistUpdater<float>;
template class HistUpdater<double>;

Expand Down
12 changes: 12 additions & 0 deletions plugin/sycl/tree/hist_updater.h
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,19 @@ class HistUpdater {
void InitSampling(const USMVector<GradientPair, MemoryType::on_device> &gpair,
USMVector<size_t, MemoryType::on_device>* row_indices);


void InitData(Context const * ctx,
const common::GHistIndexMatrix& gmat,
const USMVector<GradientPair, MemoryType::on_device> &gpair,
const DMatrix& fmat,
const RegTree& tree);

// --data fields--
size_t sub_group_size_;

// the internal row sets
common::RowSetCollection row_set_collection_;

const xgboost::tree::TrainParam& param_;
TreeEvaluator<GradientSumT> tree_evaluator_;
std::unique_ptr<TreeUpdater> pruner_;
Expand Down
91 changes: 90 additions & 1 deletion tests/cpp/plugin/test_sycl_hist_updater.cc
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@

namespace xgboost::sycl::tree {

// Use this class to test the protected methods of HistUpdater
template <typename GradientSumT>
class TestHistUpdater : public HistUpdater<GradientSumT> {
public:
Expand All @@ -23,9 +24,18 @@ class TestHistUpdater : public HistUpdater<GradientSumT> {
int_constraints_, fmat) {}

void TestInitSampling(const USMVector<GradientPair, MemoryType::on_device> &gpair,
USMVector<size_t, MemoryType::on_device>* row_indices) {
USMVector<size_t, MemoryType::on_device>* row_indices) {
HistUpdater<GradientSumT>::InitSampling(gpair, row_indices);
}

const auto* TestInitData(Context const * ctx,
const common::GHistIndexMatrix& gmat,
const USMVector<GradientPair, MemoryType::on_device> &gpair,
const DMatrix& fmat,
const RegTree& tree) {
HistUpdater<GradientSumT>::InitData(ctx, gmat, gpair, fmat, tree);
return &(HistUpdater<GradientSumT>::row_set_collection_.Data());
}
};

template <typename GradientSumT>
Expand Down Expand Up @@ -94,11 +104,90 @@ void TestHistUpdaterSampling(const xgboost::tree::TrainParam& param) {

}

template <typename GradientSumT>
void TestHistUpdaterInitData(const xgboost::tree::TrainParam& param, bool has_neg_hess) {
const size_t num_rows = 1u << 8;
const size_t num_columns = 1;
const size_t n_bins = 32;

Context ctx;
ctx.UpdateAllowUnknown(Args{{"device", "sycl"}});

DeviceManager device_manager;
auto qu = device_manager.GetQueue(ctx.Device());
ObjInfo task{ObjInfo::kRegression};

auto p_fmat = RandomDataGenerator{num_rows, num_columns, 0.0}.GenerateDMatrix();

FeatureInteractionConstraintHost int_constraints;
std::unique_ptr<TreeUpdater> pruner{TreeUpdater::Create("prune", &ctx, &task)};

TestHistUpdater<GradientSumT> updater(qu, param, std::move(pruner), int_constraints, p_fmat.get());

USMVector<GradientPair, MemoryType::on_device> gpair(&qu, num_rows);
auto* gpair_ptr = gpair.Data();
qu.submit([&](::sycl::handler& cgh) {
cgh.parallel_for<>(::sycl::range<1>(::sycl::range<1>(num_rows)),
[=](::sycl::item<1> pid) {
uint64_t i = pid.get_linear_id();

constexpr uint32_t seed = 777;
oneapi::dpl::minstd_rand engine(seed, i);
GradientPair::ValueT smallest_hess_val = has_neg_hess ? -1. : 0.;
oneapi::dpl::uniform_real_distribution<GradientPair::ValueT> distr(smallest_hess_val, 1.);
gpair_ptr[i] = {distr(engine), distr(engine)};
});
}).wait();

DeviceMatrix dmat;
dmat.Init(qu, p_fmat.get());
common::GHistIndexMatrix gmat;
gmat.Init(qu, &ctx, dmat, n_bins);
RegTree tree;

const auto* row_indices = updater.TestInitData(&ctx, gmat, gpair, *p_fmat, tree);

std::vector<size_t> row_indices_host(row_indices->Size());
qu.memcpy(row_indices_host.data(), row_indices->DataConst(), row_indices->Size()*sizeof(size_t)).wait();

if (!has_neg_hess) {
for (size_t i = 0; i < num_rows; ++i) {
ASSERT_EQ(row_indices_host[i], i);
}
} else {
std::vector<GradientPair> gpair_host(num_rows);
qu.memcpy(gpair_host.data(), gpair.Data(), num_rows*sizeof(GradientPair)).wait();

std::set<size_t> rows;
for (size_t i = 0; i < num_rows; ++i) {
if (gpair_host[i].GetHess() >= 0.0f) {
rows.insert(i);
}
}
ASSERT_EQ(rows.size(), row_indices_host.size());
for (size_t row_idx : row_indices_host) {
ASSERT_EQ(rows.count(row_idx), 1);
}
}
}

TEST(SyclHistUpdater, Sampling) {
xgboost::tree::TrainParam param;
param.UpdateAllowUnknown(Args{{"subsample", "0.7"}});

TestHistUpdaterSampling<float>(param);
TestHistUpdaterSampling<double>(param);
}

TEST(SyclHistUpdater, InitData) {
xgboost::tree::TrainParam param;
param.UpdateAllowUnknown(Args{{"subsample", "1"}});

TestHistUpdaterInitData<float>(param, true);
TestHistUpdaterInitData<float>(param, false);

TestHistUpdaterInitData<double>(param, true);
TestHistUpdaterInitData<double>(param, false);
}

} // namespace xgboost::sycl::tree

0 comments on commit c0463ae

Please sign in to comment.