Skip to content

Commit

Permalink
Delete legacy get posterior mean (#2613)
Browse files Browse the repository at this point in the history
Summary:
Pull Request resolved: #2613

Legacy models have been deprecated for a while and they're being removed one by one. This particular one was being used to construct MC-PosteriorMean (qSimpleRegret) and was used only in `compute_posterior_pareto_frontier`. Updated the usage with MBM equivalent and removed the code.

Reviewed By: esantorella

Differential Revision: D60396661

fbshipit-source-id: d4232e94749f66032c7980cf1eb2075c745bb66b
  • Loading branch information
saitcakmak authored and facebook-github-bot committed Jul 29, 2024
1 parent 706d42a commit 8587b30
Show file tree
Hide file tree
Showing 4 changed files with 12 additions and 237 deletions.
119 changes: 0 additions & 119 deletions ax/models/tests/test_posterior_mean.py

This file was deleted.

84 changes: 0 additions & 84 deletions ax/models/torch/posterior_mean.py

This file was deleted.

38 changes: 12 additions & 26 deletions ax/plot/pareto_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,11 +36,11 @@
from ax.modelbridge.registry import Models
from ax.modelbridge.torch import TorchModelBridge
from ax.modelbridge.transforms.search_space_to_float import SearchSpaceToFloat
from ax.models.torch.posterior_mean import get_PosteriorMean
from ax.models.torch_base import TorchModel
from ax.utils.common.logger import get_logger
from ax.utils.common.typeutils import checked_cast
from ax.utils.stats.statstools import relativize
from botorch.acquisition.monte_carlo import qSimpleRegret
from botorch.utils.multi_objective import is_non_dominated
from botorch.utils.multi_objective.hypervolume import infer_reference_point

Expand Down Expand Up @@ -347,7 +347,6 @@ def compute_posterior_pareto_frontier(
absolute_metrics: Optional[List[str]] = None,
num_points: int = 10,
trial_index: Optional[int] = None,
chebyshev: bool = True,
) -> ParetoFrontierResults:
"""Compute the Pareto frontier between two objectives. For experiments
with batch trials, a trial index or data object must be provided.
Expand All @@ -368,16 +367,10 @@ def compute_posterior_pareto_frontier(
will be in % relative to status_quo).
num_points: The number of points to compute on the
Pareto frontier.
chebyshev: Whether to use augmented_chebyshev_scalarization
when computing Pareto Frontier points.
Returns:
ParetoFrontierResults: A NamedTuple with fields listed in its definition.
"""
model_gen_options = {
"acquisition_function_kwargs": {"chebyshev_scalarization": chebyshev}
}

if (
trial_index is None
and data is None
Expand Down Expand Up @@ -415,7 +408,7 @@ def compute_posterior_pareto_frontier(
# The weights here are just dummy weights that we pass in to construct the
# modelbridge. We set the weight to -1 if `lower_is_better` is `True` and
# 1 otherwise. This code would benefit from a serious revamp.
oc = _build_new_optimization_config(
oc = _build_scalarized_optimization_config(
weights=np.array(
[
-1 if primary_objective.lower_is_better else 1,
Expand All @@ -426,11 +419,11 @@ def compute_posterior_pareto_frontier(
secondary_objective=secondary_objective,
outcome_constraints=outcome_constraints,
)
model = Models.MOO(
model = Models.BOTORCH_MODULAR(
experiment=experiment,
data=data,
acqf_constructor=get_PosteriorMean,
optimization_config=oc,
botorch_acqf_class=qSimpleRegret,
)

status_quo = experiment.status_quo
Expand Down Expand Up @@ -463,16 +456,13 @@ def compute_posterior_pareto_frontier(
weights_list = np.stack([primary_weight, secondary_weight]).transpose()
for weights in weights_list:
outcome_constraints = outcome_constraints
oc = _build_new_optimization_config(
oc = _build_scalarized_optimization_config(
weights=weights,
primary_objective=primary_objective,
secondary_objective=secondary_objective,
outcome_constraints=outcome_constraints,
)
# TODO: (jej) T64002590 Let this serve as a starting point for optimization.
# ex. Add global spacing criterion. Implement on BoTorch side.
# pyre-fixme [6]: Expected different type for model_gen_options
run = model.gen(1, model_gen_options=model_gen_options, optimization_config=oc)
run = model.gen(1, optimization_config=oc)
param_dicts.append(run.arms[0].parameters)

# Call predict on points to get their decomposed metrics.
Expand Down Expand Up @@ -554,19 +544,15 @@ def _validate_outcome_constraints(
)


def _build_new_optimization_config(
# pyre-fixme[2]: Parameter must be annotated.
weights,
# pyre-fixme[2]: Parameter must be annotated.
primary_objective,
# pyre-fixme[2]: Parameter must be annotated.
secondary_objective,
# pyre-fixme[2]: Parameter must be annotated.
outcome_constraints=None,
def _build_scalarized_optimization_config(
weights: np.ndarray,
primary_objective: Metric,
secondary_objective: Metric,
outcome_constraints: Optional[List[OutcomeConstraint]] = None,
) -> MultiObjectiveOptimizationConfig:
obj = ScalarizedObjective(
metrics=[primary_objective, secondary_objective],
weights=weights,
weights=weights.tolist(),
minimize=False,
)
optimization_config = MultiObjectiveOptimizationConfig(
Expand Down
8 changes: 0 additions & 8 deletions sphinx/source/models.rst
Original file line number Diff line number Diff line change
Expand Up @@ -303,14 +303,6 @@ ax.models.torch.fully_bayesian_model_utils module
:undoc-members:
:show-inheritance:

ax.models.torch.posterior_mean module
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~

.. automodule:: ax.models.torch.posterior_mean
:members:
:undoc-members:
:show-inheritance:

ax.models.torch.utils module
~~~~~~~~~~~~~~~~~~~~~~~~~~~~

Expand Down

0 comments on commit 8587b30

Please sign in to comment.