Skip to content

Commit

Permalink
Revert "[tune-telemetry] Tag searcher and scheduler types. (ray-proje…
Browse files Browse the repository at this point in the history
…ct#33561)" (ray-project#33731)

<img width="762" alt="Screen Shot 2023-03-26 at 7 54 30 PM" src="https://user-images.githubusercontent.com/18510752/227829626-001349f1-218e-4538-98c1-851f3dcf8a0e.png">
This reverts commit cb5bb0e.

<!-- Thank you for your contribution! Please review https://github.com/ray-project/ray/blob/master/CONTRIBUTING.rst before opening a pull request. -->

<!-- Please add a reviewer to the assignee section when you create a PR. If you don't have the access to it, we will shortly find a reviewer and assign them to your PR. -->

## Why are these changes needed?

<!-- Please give a short summary of the change and the problem this solves. -->

## Related issue number

<!-- For example: "Closes ray-project#1234" -->

## Checks

- [ ] I've signed off every commit(by using the -s flag, i.e., `git commit -s`) in this PR.
- [ ] I've run `scripts/format.sh` to lint the changes in this PR.
- [ ] I've included any doc changes needed for https://docs.ray.io/en/master/.
    - [ ] I've added any new APIs to the API Reference. For example, if I added a
           method in Tune, I've added it in `doc/source/tune/api/` under the
           corresponding `.rst` file.
- [ ] I've made sure the tests are passing. Note that there might be a few flaky tests, see the recent failures at https://flakey-tests.ray.io/
- Testing Strategy
   - [ ] Unit tests
   - [ ] Release tests
   - [ ] This PR is not tested :(

Signed-off-by: elliottower <[email protected]>
  • Loading branch information
rkooo567 authored and elliottower committed Apr 22, 2023
1 parent 913d01d commit d7636df
Show file tree
Hide file tree
Showing 10 changed files with 14 additions and 124 deletions.
99 changes: 8 additions & 91 deletions python/ray/air/_internal/usage.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,9 @@
from typing import TYPE_CHECKING, Set, Union
from typing import TYPE_CHECKING

from ray._private.usage.usage_lib import TagKey, record_extra_usage_tag

if TYPE_CHECKING:
from ray.train.trainer import BaseTrainer
from ray.tune.schedulers import TrialScheduler
from ray.tune.search import BasicVariantGenerator, Searcher

AIR_TRAINERS = {
"HorovodTrainer",
Expand All @@ -20,97 +18,16 @@
"XGBoostTrainer",
}

# searchers implemented by Ray Tune.
TUNE_SEARCHERS = {
"AxSearch",
"BayesOptSearch",
"TuneBOHB",
"DragonflySearch",
"HEBOSearch",
"HyperOptSearch",
"NevergradSearch",
"OptunaSearch",
"SkOptSearch",
"ZOOptSearch",
}

# These are just wrappers around real searchers.
# We don't want to double tag in this case, otherwise, the real tag
# will be overwritten.
TUNE_SEARCHER_WRAPPERS = {
"ConcurrencyLimiter",
"Repeater",
}

TUNE_SCHEDULERS = {
"FIFOScheduler",
"AsyncHyperBandScheduler",
"AsyncHyperBandScheduler",
"MedianStoppingRule",
"HyperBandScheduler",
"HyperBandForBOHB",
"PopulationBasedTraining",
"PopulationBasedTrainingReplay",
"PB2",
"ResourceChangingScheduler",
}


def _find_class_name(obj, allowed_module_path_prefix: str, whitelist: Set[str]):
"""Find the class name of the object. If the object is not
under `allowed_module_path_prefix` or if its class is not in the whitelist,
return "Custom".
Args:
obj: The object under inspection.
allowed_module_path_prefix: If the `obj`'s class is not under
the `allowed_module_path_prefix`, its class name will be anonymized.
whitelist: If the `obj`'s class is not in the `whitelist`,
it will be anonymized.
Returns:
The class name to be tagged with telemetry.
"""
module_path = obj.__module__
cls_name = obj.__class__.__name__
if module_path.startswith(allowed_module_path_prefix) and cls_name in whitelist:
return cls_name
else:
return "Custom"


def tag_air_trainer(trainer: "BaseTrainer"):
from ray.train.trainer import BaseTrainer

assert isinstance(trainer, BaseTrainer)
trainer_name = _find_class_name(trainer, "ray.train", AIR_TRAINERS)
record_extra_usage_tag(TagKey.AIR_TRAINER, trainer_name)


def tag_searcher(searcher: Union["BasicVariantGenerator", "Searcher"]):
from ray.tune.search import BasicVariantGenerator, Searcher

if isinstance(searcher, BasicVariantGenerator):
# Note this could be highly inflated as all train flows are treated
# as using BasicVariantGenerator.
record_extra_usage_tag(TagKey.TUNE_SEARCHER, "BasicVariantGenerator")
elif isinstance(searcher, Searcher):
searcher_name = _find_class_name(
searcher, "ray.tune.search", TUNE_SEARCHERS.union(TUNE_SEARCHER_WRAPPERS)
)
if searcher_name in TUNE_SEARCHER_WRAPPERS:
# ignore to avoid double tagging with wrapper name.
return
record_extra_usage_tag(TagKey.TUNE_SEARCHER, searcher_name)
module_path = trainer.__module__
if module_path.startswith("ray.train"):
trainer_name = trainer.__class__.__name__
if trainer_name not in AIR_TRAINERS:
trainer_name = "Custom"
else:
assert False, (
"Not expecting a non-BasicVariantGenerator, "
"non-Searcher type passed in for `tag_searcher`."
)


def tag_scheduler(scheduler: "TrialScheduler"):
from ray.tune.schedulers import TrialScheduler

assert isinstance(scheduler, TrialScheduler)
scheduler_name = _find_class_name(scheduler, "ray.tune.schedulers", TUNE_SCHEDULERS)
record_extra_usage_tag(TagKey.TUNE_SCHEDULER, scheduler_name)
trainer_name = "Custom"
record_extra_usage_tag(TagKey.AIR_TRAINER, trainer_name)
3 changes: 0 additions & 3 deletions python/ray/train/base_trainer.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,9 +56,6 @@ class BaseTrainer(abc.ABC):
Note: The base ``BaseTrainer`` class cannot be instantiated directly. Only
one of its subclasses can be used.
Note to AIR developers: If a new AIR trainer is added, please update
`air/_internal/usage.py`.
**How does a trainer work?**
- First, initialize the Trainer. The initialization runs locally,
Expand Down
2 changes: 1 addition & 1 deletion python/ray/tune/schedulers/async_hyperband.py
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,7 @@ def __init__(
if mode:
assert mode in ["min", "max"], "`mode` must be 'min' or 'max'!"

super().__init__()
FIFOScheduler.__init__(self)
self._reduction_factor = reduction_factor
self._max_t = max_t

Expand Down
2 changes: 1 addition & 1 deletion python/ray/tune/schedulers/hyperband.py
Original file line number Diff line number Diff line change
Expand Up @@ -97,7 +97,7 @@ def __init__(
if mode:
assert mode in ["min", "max"], "`mode` must be 'min' or 'max'!"

super().__init__()
FIFOScheduler.__init__(self)
self._eta = reduction_factor
self._s_max_1 = int(np.round(np.log(max_t) / np.log(reduction_factor))) + 1
self._max_t_attr = max_t
Expand Down
2 changes: 1 addition & 1 deletion python/ray/tune/schedulers/median_stopping_rule.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@ def __init__(
min_time_slice: int = 0,
hard_stop: bool = True,
):
super().__init__()
FIFOScheduler.__init__(self)
self._stopped_trials = set()
self._grace_period = grace_period
self._min_samples_required = min_samples_required
Expand Down
4 changes: 2 additions & 2 deletions python/ray/tune/schedulers/pbt.py
Original file line number Diff line number Diff line change
Expand Up @@ -391,7 +391,7 @@ def __init__(
if mode:
assert mode in ["min", "max"], "`mode` must be 'min' or 'max'."

super().__init__()
FIFOScheduler.__init__(self)
self._metric = metric
self._mode = mode
self._metric_op = None
Expand Down Expand Up @@ -1024,7 +1024,7 @@ def _load_policy(self, policy_file: str) -> Tuple[Dict, List[Tuple[int, Dict]]]:
policy = []
last_new_tag = None
last_old_conf = None
for old_tag, new_tag, old_step, new_step, old_conf, new_conf in reversed(
for (old_tag, new_tag, old_step, new_step, old_conf, new_conf) in reversed(
raw_policy
):
if last_new_tag and old_tag != last_new_tag:
Expand Down
13 changes: 1 addition & 12 deletions python/ray/tune/schedulers/trial_scheduler.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
from typing import Dict, Optional

from ray.air._internal.usage import tag_scheduler
from ray.tune.execution import trial_runner
from ray.tune.result import DEFAULT_METRIC
from ray.tune.experiment import Trial
Expand All @@ -9,11 +8,7 @@

@DeveloperAPI
class TrialScheduler:
"""Interface for implementing a Trial Scheduler class.
Note to Tune developers: If a new scheduler is added, please update
`air/_internal/usage.py`.
"""
"""Interface for implementing a Trial Scheduler class."""

CONTINUE = "CONTINUE" #: Status for continuing trial execution
PAUSE = "PAUSE" #: Status for pausing trial execution
Expand All @@ -28,9 +23,6 @@ class TrialScheduler:

_supports_buffered_results = True

def __init__(self):
tag_scheduler(self)

@property
def metric(self):
return self._metric
Expand Down Expand Up @@ -135,9 +127,6 @@ def restore(self, checkpoint_path: str):
class FIFOScheduler(TrialScheduler):
"""Simple scheduler that just runs trials in submission order."""

def __init__(self):
super().__init__()

def on_trial_add(self, trial_runner: "trial_runner.TrialRunner", trial: Trial):
pass

Expand Down
2 changes: 0 additions & 2 deletions python/ray/tune/search/basic_variant.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,6 @@
import warnings
import numpy as np

from ray.air._internal.usage import tag_searcher
from ray.tune.error import TuneError
from ray.tune.experiment.config_parser import _make_parser, _create_trial_from_spec
from ray.tune.search.sample import np_random_generator, _BackwardsCompatibleNumpyRng
Expand Down Expand Up @@ -295,7 +294,6 @@ def __init__(
Union[int, "np_random_generator", np.random.RandomState]
] = None,
):
tag_searcher(self)
self._trial_generator = []
self._iterators = []
self._trial_iter = None
Expand Down
5 changes: 0 additions & 5 deletions python/ray/tune/search/searcher.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,6 @@
import warnings
from typing import Dict, Optional, List, Union, Any, TYPE_CHECKING

from ray.air._internal.usage import tag_searcher
from ray.tune.search.util import _set_search_properties_backwards_compatible
from ray.util.annotations import DeveloperAPI, PublicAPI
from ray.util.debug import log_once
Expand Down Expand Up @@ -33,9 +32,6 @@ class Searcher:
Not all implementations support multi objectives.
Note to Tune developers: If a new searcher is added, please update
`air/_internal/usage.py`.
Args:
metric: The training result objective value attribute. If
list then list of training result objective value attributes
Expand Down Expand Up @@ -80,7 +76,6 @@ def __init__(
metric: Optional[str] = None,
mode: Optional[str] = None,
):
tag_searcher(self)
self._metric = metric
self._mode = mode

Expand Down
6 changes: 0 additions & 6 deletions src/ray/protobuf/usage.proto
Original file line number Diff line number Diff line change
Expand Up @@ -118,10 +118,4 @@ enum TagKey {
// Name of AIR trainer, or "Custom" if user-defined.
// Example: "TorchTrainer"
AIR_TRAINER = 500;
// Name of Tune search algorithm or "Custom" if user-defined.
// Example: "TuneBOHB", "BasicVariantGenerator"
TUNE_SEARCHER = 501;
// Name of Tune scheduler algorithm or "Custom" if user-defined.
// Example: "FIFOScheduler"
TUNE_SCHEDULER = 502;
}

0 comments on commit d7636df

Please sign in to comment.