Skip to content

Commit

Permalink
Incorporate pr comments
Browse files Browse the repository at this point in the history
  • Loading branch information
mpetrosian committed Nov 7, 2022
1 parent b3ffd86 commit c9c23d2
Show file tree
Hide file tree
Showing 4 changed files with 13 additions and 15 deletions.
2 changes: 1 addition & 1 deletion codecov.yml
Original file line number Diff line number Diff line change
Expand Up @@ -22,4 +22,4 @@ ignore:
- "src/estimagic/optimization/subsolvers/bounded_newton_quadratic_fast.py"
- "src/estimagic/optimization/subsolvers/_trsbox_quadratic_fast.py"
- "src/estimagic/optimization/subsolvers/_conjugate_gradient_quadratic_fast.py"
- "src/estimagic/optimization/subsolvers/_steinhaug_toint_quadratic_fast.py"
- "src/estimagic/optimization/subsolvers/_steihaug_toint_quadratic_fast.py"
Original file line number Diff line number Diff line change
@@ -1,6 +1,4 @@
"""Implementation of the Conjugate Gradient algorithm."""
import math

import numpy as np
from numba import njit

Expand Down Expand Up @@ -80,14 +78,13 @@ def _update_vectors_for_next_iteration(
direction (np.ndarray): Direction vector of shape (n,).
Returns:
(tuple) Tuple containing:
- x_candidate (np.ndarray): Updated candidate vector of shape (n,).
- residual (np.ndarray): Updated array of residuals of shape (n,).
- direction (np.darray): Updated direction vector of shape (n,).
x_candidate (np.ndarray): Updated candidate vector of shape (n,).
residual_new (np.ndarray): Updated array of residuals of shape (n,).
direction (np.darray): Updated direction vector of shape (n,).
"""
residual_new = np.zeros(len(residual))
nom = 0
denom = 0
nom = 0.0
denom = 0.0
for i in range(len(x_candidate)):
x_candidate[i] = x_candidate[i] + alpha * direction[i]
temp = 0
Expand Down Expand Up @@ -129,7 +126,7 @@ def _get_distance_to_trustregion_boundary(candidate, direction, radius):
cc += candidate[i] ** 2
dd += direction[i] ** 2
cd += candidate[i] * direction[i]
sigma = -cd + math.sqrt(cd * cd + dd * (radius**2 - cc))
sigma = -cd + np.sqrt(cd * cd + dd * (radius**2 - cc))
sigma = sigma / dd

return sigma
Original file line number Diff line number Diff line change
Expand Up @@ -150,14 +150,14 @@ def minimize_trust_trsbox_fast(
hess_g,
)

if index_bound_active.size > 0:
if index_bound_active != -1:
n_fixed_variables += 1
if gradient_projected[index_bound_active] >= 0:
x_bounded[index_bound_active] = 1
else:
x_bounded[index_bound_active] = -1

delta_sq = (delta_sq - x_candidate[index_bound_active] ** 2)[0]
delta_sq = delta_sq - x_candidate[index_bound_active] ** 2
if delta_sq <= 0:
need_alt_trust_step = True
break
Expand Down Expand Up @@ -415,7 +415,7 @@ def _update_candidate_vectors_and_reduction(
"""Update candidate vectors and the associated criterion reduction."""
current_min = g_hess_g / gradient_projected_sumsq

if index_bound_active.size == 0 and current_min > 0:
if index_bound_active == -1 and current_min > 0:
if curve_min != -1.0:
curve_min = min(curve_min, current_min)
else:
Expand Down Expand Up @@ -451,6 +451,7 @@ def _take_constrained_step_up_to_boundary(
x_candidate, gradient_projected, step_len, lower_bounds, upper_bounds
):
"""Reduce step length, where boundary is hit, to preserve simple bounds."""
index_bound_active = -1
for i in range(len(x_candidate)):
if gradient_projected[i] != 0:
if gradient_projected[i] > 0:
Expand All @@ -463,7 +464,7 @@ def _take_constrained_step_up_to_boundary(
) / gradient_projected[i]
if step_len_constr < step_len:
step_len = step_len_constr
index_bound_active = np.array([i])
index_bound_active = i

return step_len, index_bound_active

Expand Down
2 changes: 1 addition & 1 deletion tests/optimization/tranquilo/test_minimize_trust_region.py
Original file line number Diff line number Diff line change
Expand Up @@ -187,7 +187,7 @@ def test_update_candidate_vector_and_reduction():
step_len = 0.05
total_reduction = 0
curve_min = -0.5
index_bound_active = np.array([3, 4])
index_bound_active = 3
gradient_projected_sumsq = 25
gradient_sumsq = 25
g_hess_g = 100
Expand Down

0 comments on commit c9c23d2

Please sign in to comment.