Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Deprecate no_bayesian_optimization argument in favor of force_random_search #2693

Closed
wants to merge 1 commit into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
16 changes: 11 additions & 5 deletions ax/modelbridge/dispatch_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@
from ax.models.torch.botorch_modular.model import BoTorchModel as ModularBoTorchModel
from ax.models.types import TConfig
from ax.models.winsorization_config import WinsorizationConfig
from ax.utils.common.deprecation import _validate_force_random_search
from ax.utils.common.logger import get_logger
from ax.utils.common.typeutils import not_none

Expand Down Expand Up @@ -295,7 +296,8 @@ def choose_generation_strategy(
Union[WinsorizationConfig, dict[str, WinsorizationConfig]]
] = None,
derelativize_with_raw_status_quo: bool = False,
no_bayesian_optimization: bool = False,
no_bayesian_optimization: Optional[bool] = None,
force_random_search: bool = False,
num_trials: Optional[int] = None,
num_initialization_trials: Optional[int] = None,
num_completed_initialization_trials: int = 0,
Expand Down Expand Up @@ -347,8 +349,9 @@ def choose_generation_strategy(
Winsorization when relative constraints are present. Note: automatic
Winsorization will fail if this is set to `False` (or unset) and there
are relative constraints present.
no_bayesian_optimization: If True, Bayesian optimization generation
strategy will not be suggested and quasi-random strategy will be used.
no_bayesian_optimization: Deprecated. Use `force_random_search`.
force_random_search: If True, quasi-random generation strategy will be used
rather than Bayesian optimization.
num_trials: Total number of trials in the optimization, if
known in advance.
num_initialization_trials: Specific number of initialization trials, if wanted.
Expand Down Expand Up @@ -441,7 +444,10 @@ def choose_generation_strategy(
sobol_parallelism = None # No restriction on Sobol phase
bo_parallelism = DEFAULT_BAYESIAN_PARALLELISM

if not no_bayesian_optimization and suggested_model is not None:
# TODO[T199632397] Remove
_validate_force_random_search(no_bayesian_optimization, force_random_search)

if not force_random_search and suggested_model is not None:
if not enforce_sequential_optimization and (
max_parallelism_override or max_parallelism_cap
):
Expand Down Expand Up @@ -546,7 +552,7 @@ def choose_generation_strategy(
f" {num_remaining_initialization_trials} will take longer to generate due"
" to model-fitting."
)
else: # `no_bayesian_optimization` is True or we could not suggest BO model
else: # `force_random_search` is True or we could not suggest BO model
if verbose is not None:
logger.warning(
f"Ignoring `verbose = {verbose}` for `generation_strategy` "
Expand Down
34 changes: 34 additions & 0 deletions ax/utils/common/deprecation.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,34 @@
#!/usr/bin/env python3
# Copyright (c) Meta Platforms, Inc. and affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.

import warnings
from typing import Optional, Type


def _validate_force_random_search(
no_bayesian_optimization: Optional[bool] = None,
force_random_search: bool = False,
exception_cls: Type[Exception] = ValueError,
) -> None:
"""Helper function to validate interaction between `force_random_search`
and `no_bayesian_optimization` (supported until deprecation in [T199632397])
"""
if no_bayesian_optimization is not None:
# users are effectively permitted to continue using
# `no_bayesian_optimization` so long as it doesn't
# conflict with `force_random_search`
if no_bayesian_optimization != force_random_search:
raise exception_cls(
"Conflicting values for `force_random_search` "
"and `no_bayesian_optimization`! "
"Please only specify `force_random_search`."
)
warnings.warn(
"`no_bayesian_optimization` is deprecated. Please use "
"`force_random_search` in the future.",
DeprecationWarning,
stacklevel=2,
)
Loading