Skip to content

Commit

Permalink
[backport] Disable dense opt for distributed training. (dmlc#9272)
Browse files Browse the repository at this point in the history
  • Loading branch information
trivialfis committed Jun 10, 2023
1 parent a962611 commit f7e33fc
Show file tree
Hide file tree
Showing 2 changed files with 7 additions and 7 deletions.
12 changes: 6 additions & 6 deletions src/tree/updater_gpu_hist.cu
Original file line number Diff line number Diff line change
Expand Up @@ -286,7 +286,7 @@ struct GPUHistMakerDevice {
matrix.feature_segments,
matrix.gidx_fvalue_map,
matrix.min_fvalue,
matrix.is_dense
matrix.is_dense && !collective::IsDistributed()
};
auto split = this->evaluator_.EvaluateSingleSplit(inputs, shared_inputs);
return split;
Expand All @@ -300,11 +300,11 @@ struct GPUHistMakerDevice {
std::vector<bst_node_t> nidx(2 * candidates.size());
auto h_node_inputs = pinned2.GetSpan<EvaluateSplitInputs>(2 * candidates.size());
auto matrix = page->GetDeviceAccessor(ctx_->gpu_id);
EvaluateSplitSharedInputs shared_inputs{
GPUTrainingParam{param}, *quantiser, feature_types, matrix.feature_segments,
matrix.gidx_fvalue_map, matrix.min_fvalue,
matrix.is_dense
};
EvaluateSplitSharedInputs shared_inputs{GPUTrainingParam{param}, *quantiser, feature_types,
matrix.feature_segments, matrix.gidx_fvalue_map,
matrix.min_fvalue,
// is_dense represents the local data
matrix.is_dense && !collective::IsDistributed()};
dh::TemporaryArray<GPUExpandEntry> entries(2 * candidates.size());
// Store the feature set ptrs so they dont go out of scope before the kernel is called
std::vector<std::shared_ptr<HostDeviceVector<bst_feature_t>>> feature_sets;
Expand Down
2 changes: 1 addition & 1 deletion src/tree/updater_quantile_hist.cc
Original file line number Diff line number Diff line change
Expand Up @@ -78,7 +78,7 @@ CPUExpandEntry QuantileHistMaker::Builder::InitRoot(

{
GradientPairPrecise grad_stat;
if (p_fmat->IsDense()) {
if (p_fmat->IsDense() && !collective::IsDistributed()) {
/**
* Specialized code for dense data: For dense data (with no missing value), the sum
* of gradient histogram is equal to snode[nid]
Expand Down

0 comments on commit f7e33fc

Please sign in to comment.