From c0463aea026452e48eb83ade9b320b396caaaece Mon Sep 17 00:00:00 2001 From: Dmitry Razdoburdin <> Date: Thu, 25 Apr 2024 00:15:23 -0700 Subject: [PATCH] add data initialisation for training --- plugin/sycl/tree/hist_updater.cc | 74 ++++++++++++++++++ plugin/sycl/tree/hist_updater.h | 12 +++ tests/cpp/plugin/test_sycl_hist_updater.cc | 91 +++++++++++++++++++++- 3 files changed, 176 insertions(+), 1 deletion(-) diff --git a/plugin/sycl/tree/hist_updater.cc b/plugin/sycl/tree/hist_updater.cc index 7ac5924f419b..a96cf3f7603c 100644 --- a/plugin/sycl/tree/hist_updater.cc +++ b/plugin/sycl/tree/hist_updater.cc @@ -50,6 +50,80 @@ void HistUpdater::InitSampling( qu_.wait(); } +template +void HistUpdater::InitData( + Context const * ctx, + const common::GHistIndexMatrix& gmat, + const USMVector &gpair, + const DMatrix& fmat, + const RegTree& tree) { + CHECK((param_.max_depth > 0 || param_.max_leaves > 0)) + << "max_depth or max_leaves cannot be both 0 (unlimited); " + << "at least one should be a positive quantity."; + if (param_.grow_policy == xgboost::tree::TrainParam::kDepthWise) { + CHECK(param_.max_depth > 0) << "max_depth cannot be 0 (unlimited) " + << "when grow_policy is depthwise."; + } + builder_monitor_.Start("InitData"); + const auto& info = fmat.Info(); + + // initialize the row set + { + row_set_collection_.Clear(); + USMVector* row_indices = &(row_set_collection_.Data()); + row_indices->Resize(&qu_, info.num_row_); + size_t* p_row_indices = row_indices->Data(); + // mark subsample and build list of member rows + if (param_.subsample < 1.0f) { + CHECK_EQ(param_.sampling_method, xgboost::tree::TrainParam::kUniform) + << "Only uniform sampling is supported, " + << "gradient-based sampling is only support by GPU Hist."; + InitSampling(gpair, row_indices); + } else { + int has_neg_hess = 0; + const GradientPair* gpair_ptr = gpair.DataConst(); + ::sycl::event event; + { + ::sycl::buffer flag_buf(&has_neg_hess, 1); + event = qu_.submit([&](::sycl::handler& cgh) { + auto flag_buf_acc = flag_buf.get_access<::sycl::access::mode::read_write>(cgh); + cgh.parallel_for<>(::sycl::range<1>(::sycl::range<1>(info.num_row_)), + [=](::sycl::item<1> pid) { + const size_t idx = pid.get_id(0); + p_row_indices[idx] = idx; + if (gpair_ptr[idx].GetHess() < 0.0f) { + AtomicRef has_neg_hess_ref(flag_buf_acc[0]); + has_neg_hess_ref.fetch_max(1); + } + }); + }); + } + + if (has_neg_hess) { + size_t max_idx = 0; + { + ::sycl::buffer flag_buf(&max_idx, 1); + event = qu_.submit([&](::sycl::handler& cgh) { + cgh.depends_on(event); + auto flag_buf_acc = flag_buf.get_access<::sycl::access::mode::read_write>(cgh); + cgh.parallel_for<>(::sycl::range<1>(::sycl::range<1>(info.num_row_)), + [=](::sycl::item<1> pid) { + const size_t idx = pid.get_id(0); + if (gpair_ptr[idx].GetHess() >= 0.0f) { + AtomicRef max_idx_ref(flag_buf_acc[0]); + p_row_indices[max_idx_ref++] = idx; + } + }); + }); + } + row_indices->Resize(&qu_, max_idx, 0, &event); + } + qu_.wait_and_throw(); + } + } + row_set_collection_.Init(); +} + template class HistUpdater; template class HistUpdater; diff --git a/plugin/sycl/tree/hist_updater.h b/plugin/sycl/tree/hist_updater.h index 9efc402c0c78..fb81218b6edb 100644 --- a/plugin/sycl/tree/hist_updater.h +++ b/plugin/sycl/tree/hist_updater.h @@ -47,7 +47,19 @@ class HistUpdater { void InitSampling(const USMVector &gpair, USMVector* row_indices); + + void InitData(Context const * ctx, + const common::GHistIndexMatrix& gmat, + const USMVector &gpair, + const DMatrix& fmat, + const RegTree& tree); + + // --data fields-- size_t sub_group_size_; + + // the internal row sets + common::RowSetCollection row_set_collection_; + const xgboost::tree::TrainParam& param_; TreeEvaluator tree_evaluator_; std::unique_ptr pruner_; diff --git a/tests/cpp/plugin/test_sycl_hist_updater.cc b/tests/cpp/plugin/test_sycl_hist_updater.cc index 81bb9cb7fef2..4f66c27b9f81 100644 --- a/tests/cpp/plugin/test_sycl_hist_updater.cc +++ b/tests/cpp/plugin/test_sycl_hist_updater.cc @@ -12,6 +12,7 @@ namespace xgboost::sycl::tree { +// Use this class to test the protected methods of HistUpdater template class TestHistUpdater : public HistUpdater { public: @@ -23,9 +24,18 @@ class TestHistUpdater : public HistUpdater { int_constraints_, fmat) {} void TestInitSampling(const USMVector &gpair, - USMVector* row_indices) { + USMVector* row_indices) { HistUpdater::InitSampling(gpair, row_indices); } + + const auto* TestInitData(Context const * ctx, + const common::GHistIndexMatrix& gmat, + const USMVector &gpair, + const DMatrix& fmat, + const RegTree& tree) { + HistUpdater::InitData(ctx, gmat, gpair, fmat, tree); + return &(HistUpdater::row_set_collection_.Data()); + } }; template @@ -94,6 +104,73 @@ void TestHistUpdaterSampling(const xgboost::tree::TrainParam& param) { } +template +void TestHistUpdaterInitData(const xgboost::tree::TrainParam& param, bool has_neg_hess) { + const size_t num_rows = 1u << 8; + const size_t num_columns = 1; + const size_t n_bins = 32; + + Context ctx; + ctx.UpdateAllowUnknown(Args{{"device", "sycl"}}); + + DeviceManager device_manager; + auto qu = device_manager.GetQueue(ctx.Device()); + ObjInfo task{ObjInfo::kRegression}; + + auto p_fmat = RandomDataGenerator{num_rows, num_columns, 0.0}.GenerateDMatrix(); + + FeatureInteractionConstraintHost int_constraints; + std::unique_ptr pruner{TreeUpdater::Create("prune", &ctx, &task)}; + + TestHistUpdater updater(qu, param, std::move(pruner), int_constraints, p_fmat.get()); + + USMVector gpair(&qu, num_rows); + auto* gpair_ptr = gpair.Data(); + qu.submit([&](::sycl::handler& cgh) { + cgh.parallel_for<>(::sycl::range<1>(::sycl::range<1>(num_rows)), + [=](::sycl::item<1> pid) { + uint64_t i = pid.get_linear_id(); + + constexpr uint32_t seed = 777; + oneapi::dpl::minstd_rand engine(seed, i); + GradientPair::ValueT smallest_hess_val = has_neg_hess ? -1. : 0.; + oneapi::dpl::uniform_real_distribution distr(smallest_hess_val, 1.); + gpair_ptr[i] = {distr(engine), distr(engine)}; + }); + }).wait(); + + DeviceMatrix dmat; + dmat.Init(qu, p_fmat.get()); + common::GHistIndexMatrix gmat; + gmat.Init(qu, &ctx, dmat, n_bins); + RegTree tree; + + const auto* row_indices = updater.TestInitData(&ctx, gmat, gpair, *p_fmat, tree); + + std::vector row_indices_host(row_indices->Size()); + qu.memcpy(row_indices_host.data(), row_indices->DataConst(), row_indices->Size()*sizeof(size_t)).wait(); + + if (!has_neg_hess) { + for (size_t i = 0; i < num_rows; ++i) { + ASSERT_EQ(row_indices_host[i], i); + } + } else { + std::vector gpair_host(num_rows); + qu.memcpy(gpair_host.data(), gpair.Data(), num_rows*sizeof(GradientPair)).wait(); + + std::set rows; + for (size_t i = 0; i < num_rows; ++i) { + if (gpair_host[i].GetHess() >= 0.0f) { + rows.insert(i); + } + } + ASSERT_EQ(rows.size(), row_indices_host.size()); + for (size_t row_idx : row_indices_host) { + ASSERT_EQ(rows.count(row_idx), 1); + } + } +} + TEST(SyclHistUpdater, Sampling) { xgboost::tree::TrainParam param; param.UpdateAllowUnknown(Args{{"subsample", "0.7"}}); @@ -101,4 +178,16 @@ TEST(SyclHistUpdater, Sampling) { TestHistUpdaterSampling(param); TestHistUpdaterSampling(param); } + +TEST(SyclHistUpdater, InitData) { + xgboost::tree::TrainParam param; + param.UpdateAllowUnknown(Args{{"subsample", "1"}}); + + TestHistUpdaterInitData(param, true); + TestHistUpdaterInitData(param, false); + + TestHistUpdaterInitData(param, true); + TestHistUpdaterInitData(param, false); +} + } // namespace xgboost::sycl::tree