From 39f56c5ff14d348b3d7e2cb1f56bd3ffcf108673 Mon Sep 17 00:00:00 2001 From: Christian Lorentzen Date: Sat, 15 Jan 2022 14:51:27 +0100 Subject: [PATCH 1/2] ENH save computations of exp in objectives --- src/objective/regression_objective.hpp | 36 ++++++++++++++++---------- 1 file changed, 22 insertions(+), 14 deletions(-) diff --git a/src/objective/regression_objective.hpp b/src/objective/regression_objective.hpp index e711da012066..ff9e69400256 100644 --- a/src/objective/regression_objective.hpp +++ b/src/objective/regression_objective.hpp @@ -439,17 +439,19 @@ class RegressionPoissonLoss: public RegressionL2loss { */ void GetGradients(const double* score, score_t* gradients, score_t* hessians) const override { + double exp_max_delta_step_ = std::exp(max_delta_step_); if (weights_ == nullptr) { #pragma omp parallel for schedule(static) for (data_size_t i = 0; i < num_data_; ++i) { - gradients[i] = static_cast(std::exp(score[i]) - label_[i]); - hessians[i] = static_cast(std::exp(score[i] + max_delta_step_)); + double exp_score = std::exp(score[i]); + gradients[i] = static_cast(exp_score - label_[i]); + hessians[i] = static_cast(exp_score * exp_max_delta_step_); } } else { #pragma omp parallel for schedule(static) for (data_size_t i = 0; i < num_data_; ++i) { - gradients[i] = static_cast((std::exp(score[i]) - label_[i]) * weights_[i]); - hessians[i] = static_cast(std::exp(score[i] + max_delta_step_) * weights_[i]); + gradients[i] = static_cast((exp_score - label_[i]) * weights_[i]); + hessians[i] = static_cast(exp_score * exp_max_delta_step_ * weights_[i]); } } } @@ -689,14 +691,16 @@ class RegressionGammaLoss : public RegressionPoissonLoss { if (weights_ == nullptr) { #pragma omp parallel for schedule(static) for (data_size_t i = 0; i < num_data_; ++i) { - gradients[i] = static_cast(1.0 - label_[i] * std::exp(-score[i])); - hessians[i] = static_cast(label_[i] * std::exp(-score[i])); + double exp_score = std::exp(-score[i]); + gradients[i] = static_cast(1.0 - label_[i] * exp_score); + hessians[i] = static_cast(label_[i] * exp_score); } } else { #pragma omp parallel for schedule(static) for (data_size_t i = 0; i < num_data_; ++i) { - gradients[i] = static_cast((1.0 - label_[i] * std::exp(-score[i])) * weights_[i]); - hessians[i] = static_cast(label_[i] * std::exp(-score[i]) * weights_[i]); + double exp_score = std::exp(-score[i]); + gradients[i] = static_cast((1.0 - label_[i] * exp_score) * weights_[i]); + hessians[i] = static_cast(label_[i] * exp_score * weights_[i]); } } } @@ -725,16 +729,20 @@ class RegressionTweedieLoss: public RegressionPoissonLoss { if (weights_ == nullptr) { #pragma omp parallel for schedule(static) for (data_size_t i = 0; i < num_data_; ++i) { - gradients[i] = static_cast(-label_[i] * std::exp((1 - rho_) * score[i]) + std::exp((2 - rho_) * score[i])); - hessians[i] = static_cast(-label_[i] * (1 - rho_) * std::exp((1 - rho_) * score[i]) + - (2 - rho_) * std::exp((2 - rho_) * score[i])); + double exp_1_score = std::exp((1 - rho_) * score[i]); + double exp_2_score = std::exp((2 - rho_) * score[i]); + gradients[i] = static_cast(-label_[i] * exp_1_score + exp_2_score); + hessians[i] = static_cast(-label_[i] * (1 - rho_) * exp_1_score + + (2 - rho_) * exp_2_score); } } else { #pragma omp parallel for schedule(static) for (data_size_t i = 0; i < num_data_; ++i) { - gradients[i] = static_cast((-label_[i] * std::exp((1 - rho_) * score[i]) + std::exp((2 - rho_) * score[i])) * weights_[i]); - hessians[i] = static_cast((-label_[i] * (1 - rho_) * std::exp((1 - rho_) * score[i]) + - (2 - rho_) * std::exp((2 - rho_) * score[i])) * weights_[i]); + double exp_1_score = std::exp((1 - rho_) * score[i]); + double exp_2_score = std::exp((2 - rho_) * score[i]); + gradients[i] = static_cast((-label_[i] * exp_1_score + exp_2_score) * weights_[i]); + hessians[i] = static_cast((-label_[i] * (1 - rho_) * exp_1_score + + (2 - rho_) * exp_2_score) * weights_[i]); } } } From d09e6fe04548ea1da749c2657f04b95477989866 Mon Sep 17 00:00:00 2001 From: Christian Lorentzen Date: Sun, 16 Jan 2022 20:16:48 +0100 Subject: [PATCH 2/2] CLN missing declaration --- src/objective/regression_objective.hpp | 1 + 1 file changed, 1 insertion(+) diff --git a/src/objective/regression_objective.hpp b/src/objective/regression_objective.hpp index ff9e69400256..3bdeaa1de00f 100644 --- a/src/objective/regression_objective.hpp +++ b/src/objective/regression_objective.hpp @@ -450,6 +450,7 @@ class RegressionPoissonLoss: public RegressionL2loss { } else { #pragma omp parallel for schedule(static) for (data_size_t i = 0; i < num_data_; ++i) { + double exp_score = std::exp(score[i]); gradients[i] = static_cast((exp_score - label_[i]) * weights_[i]); hessians[i] = static_cast(exp_score * exp_max_delta_step_ * weights_[i]); }