diff --git a/.isort.cfg b/.isort.cfg index cb3272070ed5..e737792fcf14 100644 --- a/.isort.cfg +++ b/.isort.cfg @@ -19,7 +19,7 @@ filter_files=True # python/ray/setup-dev.py # For the rest we will gradually remove them from the blacklist as we # reformat the code to follow the style guide. -skip_glob=doc/*,python/ray/__init__.py,python/ray/setup-dev.py,python/build/*,python/ray/cloudpickle/*,python/ray/thirdparty_files/*,python/ray/_private/thirdparty/*,python/ray/_private/runtime_env/agent/thirdparty_files/*,python/ray/dag/*.py,ci/*,python/ray/_private/*,python/ray/air/*,dashboard/*,python/ray/includes/*,python/ray/internal/*,python/ray/ray_operator/*,python/ray/scripts/*,python/ray/serve/generated/serve_pb2.py,python/ray/sgd/*,python/ray/streaming/*,python/ray/tests/*,python/ray/tests/*,python/ray/tune/*,python/ray/util/*,python/ray/workers/*,python/ray/workflow/*,rllib/*,release/*, +skip_glob=doc/*,python/ray/__init__.py,python/ray/setup-dev.py,python/build/*,python/ray/cloudpickle/*,python/ray/thirdparty_files/*,python/ray/_private/thirdparty/*,python/ray/_private/runtime_env/agent/thirdparty_files/*,python/ray/dag/*.py,ci/*,python/ray/_private/*,python/ray/air/*,dashboard/*,python/ray/includes/*,python/ray/internal/*,python/ray/ray_operator/*,python/ray/scripts/*,python/ray/serve/generated/serve_pb2.py,python/ray/sgd/*,python/ray/streaming/*,python/ray/tests/*,python/ray/tests/*,python/ray/util/*,python/ray/workers/*,python/ray/workflow/*,rllib/*,release/*, known_local_folder=ray known_afterray=psutil,setproctitle diff --git a/python/ray/tune/__init__.py b/python/ray/tune/__init__.py index 2490fa3a6afd..fb7a5a819606 100644 --- a/python/ray/tune/__init__.py +++ b/python/ray/tune/__init__.py @@ -1,54 +1,53 @@ +# isort: off # Try import ray[tune] core requirements (defined in setup.py) try: + import fsspec # noqa: F401 import pandas # noqa: F401 - import requests # noqa: F401 import pyarrow # noqa: F401 - import fsspec # noqa: F401 + import requests # noqa: F401 except ImportError as exc: raise ImportError( "Can't import ray.tune as some dependencies are missing. " 'Run `pip install "ray[tune]"` to fix.' ) from exc +# isort: on - -from ray.tune.error import TuneError -from ray.tune.tune_config import ResumeConfig, TuneConfig -from ray.tune.tune import run_experiments, run -from ray.tune.syncer import SyncConfig -from ray.tune.experiment import Experiment from ray.tune.analysis import ExperimentAnalysis -from ray.tune.stopper import Stopper -from ray.tune.registry import register_env, register_trainable -from ray.tune.trainable import Trainable from ray.tune.callback import Callback -from ray.tune.search import grid_search +from ray.tune.error import TuneError +from ray.tune.execution.placement_groups import PlacementGroupFactory +from ray.tune.experiment import Experiment from ray.tune.progress_reporter import ( - ProgressReporter, CLIReporter, JupyterNotebookReporter, + ProgressReporter, ) +from ray.tune.registry import register_env, register_trainable +from ray.tune.result_grid import ResultGrid +from ray.tune.schedulers import create_scheduler +from ray.tune.search import create_searcher, grid_search from ray.tune.search.sample import ( - sample_from, - uniform, - quniform, choice, - randint, lograndint, - qrandint, - qlograndint, - randn, - qrandn, loguniform, + qlograndint, qloguniform, + qrandint, + qrandn, + quniform, + randint, + randn, + sample_from, + uniform, ) -from ray.tune.search import create_searcher -from ray.tune.schedulers import create_scheduler -from ray.tune.execution.placement_groups import PlacementGroupFactory +from ray.tune.stopper import Stopper +from ray.tune.syncer import SyncConfig +from ray.tune.trainable import Trainable from ray.tune.trainable.util import with_parameters, with_resources -from ray.tune.result_grid import ResultGrid +from ray.tune.tune import run, run_experiments +from ray.tune.tune_config import ResumeConfig, TuneConfig from ray.tune.tuner import Tuner - __all__ = [ "Trainable", "Callback", diff --git a/python/ray/tune/analysis/experiment_analysis.py b/python/ray/tune/analysis/experiment_analysis.py index 6e70a10eeb41..625118b1069d 100644 --- a/python/ray/tune/analysis/experiment_analysis.py +++ b/python/ray/tune/analysis/experiment_analysis.py @@ -2,34 +2,24 @@ import io import json import logging -from numbers import Number import os +from numbers import Number from pathlib import Path from typing import Any, Dict, List, Optional, Tuple, Union import pyarrow.fs -from ray.util.annotations import PublicAPI -from ray.air.constants import ( - EXPR_PROGRESS_FILE, - EXPR_RESULT_FILE, - TRAINING_ITERATION, -) +from ray.air.constants import EXPR_PROGRESS_FILE, EXPR_RESULT_FILE, TRAINING_ITERATION from ray.train import Checkpoint -from ray.train._internal.storage import ( - _exists_at_fs_path, - get_fs_and_path, -) +from ray.train._internal.storage import _exists_at_fs_path, get_fs_and_path from ray.tune.execution.experiment_state import _find_newest_experiment_checkpoint from ray.tune.execution.tune_controller import TuneController from ray.tune.experiment import Trial -from ray.tune.result import ( - DEFAULT_METRIC, - CONFIG_PREFIX, -) +from ray.tune.result import CONFIG_PREFIX, DEFAULT_METRIC from ray.tune.utils import flatten_dict from ray.tune.utils.serialization import TuneFunctionDecoder -from ray.tune.utils.util import is_nan_or_inf, is_nan, unflattened_lookup +from ray.tune.utils.util import is_nan, is_nan_or_inf, unflattened_lookup +from ray.util.annotations import PublicAPI try: import pandas as pd diff --git a/python/ray/tune/callback.py b/python/ray/tune/callback.py index 2d941fe90cbe..1295a0e61cef 100644 --- a/python/ray/tune/callback.py +++ b/python/ray/tune/callback.py @@ -1,11 +1,11 @@ -from abc import ABCMeta import glob +import warnings +from abc import ABCMeta from pathlib import Path from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple -import warnings -from ray.util.annotations import PublicAPI, DeveloperAPI from ray.tune.utils.util import _atomic_save, _load_newest_checkpoint +from ray.util.annotations import DeveloperAPI, PublicAPI if TYPE_CHECKING: from ray.train import Checkpoint diff --git a/python/ray/tune/cli/commands.py b/python/ray/tune/cli/commands.py index 99c0a559db40..09070124124e 100644 --- a/python/ray/tune/cli/commands.py +++ b/python/ray/tune/cli/commands.py @@ -1,25 +1,25 @@ -from pathlib import Path -from typing import Optional, List - -import click import logging import operator import os import shutil import subprocess from datetime import datetime +from pathlib import Path +from typing import List, Optional +import click import pandas as pd -from pandas.api.types import is_string_dtype, is_numeric_dtype +from pandas.api.types import is_numeric_dtype, is_string_dtype + +from ray._private.thirdparty.tabulate.tabulate import tabulate from ray.air.constants import EXPR_RESULT_FILE +from ray.tune import TuneError +from ray.tune.analysis import ExperimentAnalysis from ray.tune.result import ( + CONFIG_PREFIX, DEFAULT_EXPERIMENT_INFO_KEYS, DEFAULT_RESULT_KEYS, - CONFIG_PREFIX, ) -from ray.tune.analysis import ExperimentAnalysis -from ray.tune import TuneError -from ray._private.thirdparty.tabulate.tabulate import tabulate logger = logging.getLogger(__name__) diff --git a/python/ray/tune/cli/scripts.py b/python/ray/tune/cli/scripts.py index 922e2b405d97..5401d091c3ba 100644 --- a/python/ray/tune/cli/scripts.py +++ b/python/ray/tune/cli/scripts.py @@ -1,4 +1,5 @@ import click + import ray.tune.cli.commands as commands diff --git a/python/ray/tune/examples/ax_example.py b/python/ray/tune/examples/ax_example.py index c638f759b112..3a3210c35dca 100644 --- a/python/ray/tune/examples/ax_example.py +++ b/python/ray/tune/examples/ax_example.py @@ -4,9 +4,11 @@ Requires the Ax library to be installed (`pip install ax-platform sqlalchemy`). """ -import numpy as np + import time +import numpy as np + from ray import train, tune from ray.tune.schedulers import AsyncHyperBandScheduler from ray.tune.search.ax import AxSearch diff --git a/python/ray/tune/examples/bohb_example.py b/python/ray/tune/examples/bohb_example.py index 1975c8dbe20a..3b75b6633add 100644 --- a/python/ray/tune/examples/bohb_example.py +++ b/python/ray/tune/examples/bohb_example.py @@ -7,8 +7,8 @@ """ import json -import time import os +import time import numpy as np diff --git a/python/ray/tune/examples/cifar10_pytorch.py b/python/ray/tune/examples/cifar10_pytorch.py index cb1a44587449..572522bb8698 100644 --- a/python/ray/tune/examples/cifar10_pytorch.py +++ b/python/ray/tune/examples/cifar10_pytorch.py @@ -2,22 +2,25 @@ # fmt: off # __import_begin__ -import numpy as np import os import tempfile +from typing import Dict + +import numpy as np import torch import torch.nn as nn import torch.nn.functional as F import torch.optim as optim -from filelock import FileLock -from torch.utils.data import random_split import torchvision import torchvision.transforms as transforms -from typing import Dict +from filelock import FileLock +from torch.utils.data import random_split + import ray from ray import train, tune from ray.train import Checkpoint from ray.tune.schedulers import ASHAScheduler + # __import_end__ diff --git a/python/ray/tune/examples/hyperband_example.py b/python/ray/tune/examples/hyperband_example.py index 1720c246b879..4e3e8e675990 100755 --- a/python/ray/tune/examples/hyperband_example.py +++ b/python/ray/tune/examples/hyperband_example.py @@ -4,8 +4,8 @@ import ray from ray import train, tune -from ray.tune.utils.mock_trainable import MyTrainableClass from ray.tune.schedulers import HyperBandScheduler +from ray.tune.utils.mock_trainable import MyTrainableClass if __name__ == "__main__": parser = argparse.ArgumentParser() diff --git a/python/ray/tune/examples/hyperopt_conditional_search_space_example.py b/python/ray/tune/examples/hyperopt_conditional_search_space_example.py index 67c3b38d192f..741f9fe23be0 100644 --- a/python/ray/tune/examples/hyperopt_conditional_search_space_example.py +++ b/python/ray/tune/examples/hyperopt_conditional_search_space_example.py @@ -7,14 +7,16 @@ For an example of using a Tune search space, see :doc:`/tune/examples/hyperopt_example`. """ + import time +from hyperopt import hp + import ray from ray import train, tune -from ray.tune.search import ConcurrencyLimiter from ray.tune.schedulers import AsyncHyperBandScheduler +from ray.tune.search import ConcurrencyLimiter from ray.tune.search.hyperopt import HyperOptSearch -from hyperopt import hp def f_unpack_dict(dct): @@ -35,7 +37,7 @@ def f_unpack_dict(dct): """ res = {} - for (k, v) in dct.items(): + for k, v in dct.items(): if isinstance(v, dict): res = {**res, **f_unpack_dict(v)} else: diff --git a/python/ray/tune/examples/lightgbm_example.py b/python/ray/tune/examples/lightgbm_example.py index e1c1bf7d9b2e..3db060e86ec6 100644 --- a/python/ray/tune/examples/lightgbm_example.py +++ b/python/ray/tune/examples/lightgbm_example.py @@ -4,8 +4,8 @@ from sklearn.model_selection import train_test_split from ray import tune -from ray.tune.schedulers import ASHAScheduler from ray.tune.integration.lightgbm import TuneReportCheckpointCallback +from ray.tune.schedulers import ASHAScheduler def train_breast_cancer(config: dict): diff --git a/python/ray/tune/examples/mlflow_ptl.py b/python/ray/tune/examples/mlflow_ptl.py index 03dff1a71007..e1828661b98c 100644 --- a/python/ray/tune/examples/mlflow_ptl.py +++ b/python/ray/tune/examples/mlflow_ptl.py @@ -1,16 +1,16 @@ """An example showing how to use Pytorch Lightning training, Ray Tune HPO, and MLflow autologging all together.""" + import os import tempfile -import pytorch_lightning as pl - import mlflow +import pytorch_lightning as pl from ray import train, tune from ray.air.integrations.mlflow import setup_mlflow -from ray.tune.integration.pytorch_lightning import TuneReportCallback from ray.tune.examples.mnist_ptl_mini import LightningMNISTClassifier, MNISTDataModule +from ray.tune.integration.pytorch_lightning import TuneReportCallback def train_mnist_tune(config, data_dir=None, num_epochs=10, num_gpus=0): diff --git a/python/ray/tune/examples/mnist_ptl_mini.py b/python/ray/tune/examples/mnist_ptl_mini.py index d14ed9a926e8..d538eb90fc4a 100644 --- a/python/ray/tune/examples/mnist_ptl_mini.py +++ b/python/ray/tune/examples/mnist_ptl_mini.py @@ -1,21 +1,17 @@ import math - import os -import torch -from filelock import FileLock import pytorch_lightning as pl - - +import torch +from filelock import FileLock from torch.nn import functional as F from torch.utils.data import DataLoader, random_split from torchmetrics import Accuracy from torchvision import transforms from torchvision.datasets import MNIST -from ray.tune.integration.pytorch_lightning import TuneReportCheckpointCallback from ray import train, tune - +from ray.tune.integration.pytorch_lightning import TuneReportCheckpointCallback PATH_DATASETS = os.environ.get("PATH_DATASETS", ".") diff --git a/python/ray/tune/examples/mnist_pytorch.py b/python/ray/tune/examples/mnist_pytorch.py index 6a2752287fb5..e4962b185e36 100644 --- a/python/ray/tune/examples/mnist_pytorch.py +++ b/python/ray/tune/examples/mnist_pytorch.py @@ -1,13 +1,15 @@ # Original Code here: # https://github.com/pytorch/examples/blob/master/mnist/main.py -import os + import argparse -from filelock import FileLock +import os import tempfile + import torch import torch.nn as nn import torch.nn.functional as F import torch.optim as optim +from filelock import FileLock from torchvision import datasets, transforms import ray diff --git a/python/ray/tune/examples/mnist_pytorch_trainable.py b/python/ray/tune/examples/mnist_pytorch_trainable.py index 93086d7c8719..9bcf34ffba24 100644 --- a/python/ray/tune/examples/mnist_pytorch_trainable.py +++ b/python/ray/tune/examples/mnist_pytorch_trainable.py @@ -4,18 +4,19 @@ import argparse import os + import torch import torch.optim as optim import ray from ray import train, tune -from ray.tune.schedulers import ASHAScheduler from ray.tune.examples.mnist_pytorch import ( - train_func, - test_func, - get_data_loaders, ConvNet, + get_data_loaders, + test_func, + train_func, ) +from ray.tune.schedulers import ASHAScheduler # Change these values if you want the training to run quicker or slower. EPOCH_SIZE = 512 diff --git a/python/ray/tune/examples/nevergrad_example.py b/python/ray/tune/examples/nevergrad_example.py index 7906bc71c638..e579b781c857 100644 --- a/python/ray/tune/examples/nevergrad_example.py +++ b/python/ray/tune/examples/nevergrad_example.py @@ -4,11 +4,12 @@ Requires the Nevergrad library to be installed (`pip install nevergrad`). """ + import time from ray import train, tune -from ray.tune.search import ConcurrencyLimiter from ray.tune.schedulers import AsyncHyperBandScheduler +from ray.tune.search import ConcurrencyLimiter from ray.tune.search.nevergrad import NevergradSearch @@ -30,6 +31,7 @@ def easy_objective(config): if __name__ == "__main__": import argparse + import nevergrad as ng parser = argparse.ArgumentParser() diff --git a/python/ray/tune/examples/optuna_define_by_run_example.py b/python/ray/tune/examples/optuna_define_by_run_example.py index dd32402bbb00..443fa3549ca1 100644 --- a/python/ray/tune/examples/optuna_define_by_run_example.py +++ b/python/ray/tune/examples/optuna_define_by_run_example.py @@ -7,13 +7,14 @@ For an example of using a Tune search space, see :doc:`/tune/examples/optuna_example`. """ + import time -from typing import Dict, Optional, Any +from typing import Any, Dict, Optional import ray from ray import train, tune -from ray.tune.search import ConcurrencyLimiter from ray.tune.schedulers import AsyncHyperBandScheduler +from ray.tune.search import ConcurrencyLimiter from ray.tune.search.optuna import OptunaSearch diff --git a/python/ray/tune/examples/optuna_example.py b/python/ray/tune/examples/optuna_example.py index 3b4207f6c95f..17a7dfc7fe72 100644 --- a/python/ray/tune/examples/optuna_example.py +++ b/python/ray/tune/examples/optuna_example.py @@ -7,12 +7,13 @@ For an example of using an Optuna define-by-run function, see :doc:`/tune/examples/optuna_define_by_run_example`. """ + import time import ray from ray import train, tune -from ray.tune.search import ConcurrencyLimiter from ray.tune.schedulers import AsyncHyperBandScheduler +from ray.tune.search import ConcurrencyLimiter from ray.tune.search.optuna import OptunaSearch diff --git a/python/ray/tune/examples/pb2_example.py b/python/ray/tune/examples/pb2_example.py index 52bff6572e3f..ba1e94fb3d02 100644 --- a/python/ray/tune/examples/pb2_example.py +++ b/python/ray/tune/examples/pb2_example.py @@ -4,8 +4,8 @@ import ray from ray import train, tune -from ray.tune.schedulers.pb2 import PB2 from ray.tune.examples.pbt_function import pbt_function +from ray.tune.schedulers.pb2 import PB2 if __name__ == "__main__": parser = argparse.ArgumentParser() diff --git a/python/ray/tune/examples/pb2_ppo_example.py b/python/ray/tune/examples/pb2_ppo_example.py index 6ae092c4f754..75d184beefde 100644 --- a/python/ray/tune/examples/pb2_ppo_example.py +++ b/python/ray/tune/examples/pb2_ppo_example.py @@ -1,9 +1,10 @@ +import argparse import os import random -import argparse -import pandas as pd from datetime import datetime +import pandas as pd + from ray.tune import run, sample_from from ray.tune.schedulers import PopulationBasedTraining from ray.tune.schedulers.pb2 import PB2 diff --git a/python/ray/tune/examples/pbt_convnet_example.py b/python/ray/tune/examples/pbt_convnet_example.py index 5267b0d6012e..27e18e243ad8 100644 --- a/python/ray/tune/examples/pbt_convnet_example.py +++ b/python/ray/tune/examples/pbt_convnet_example.py @@ -6,17 +6,23 @@ # __tutorial_imports_begin__ import argparse import os + import numpy as np import torch import torch.optim as optim from torchvision import datasets -from ray.tune.examples.mnist_pytorch import train_func, test_func, ConvNet,\ - get_data_loaders import ray from ray import train, tune +from ray.tune.examples.mnist_pytorch import ( + ConvNet, + get_data_loaders, + test_func, + train_func, +) from ray.tune.schedulers import PopulationBasedTraining from ray.tune.utils import validate_save_restore + # __tutorial_imports_end__ diff --git a/python/ray/tune/examples/pbt_convnet_function_example.py b/python/ray/tune/examples/pbt_convnet_function_example.py index 50a3d9395f3f..b4ef69507ba6 100644 --- a/python/ray/tune/examples/pbt_convnet_function_example.py +++ b/python/ray/tune/examples/pbt_convnet_function_example.py @@ -3,14 +3,15 @@ # __tutorial_imports_begin__ import argparse import os + import numpy as np import torch import torch.optim as optim -from ray.tune.examples.mnist_pytorch import test_func, ConvNet, get_data_loaders import ray from ray import train, tune from ray.train import Checkpoint +from ray.tune.examples.mnist_pytorch import ConvNet, get_data_loaders, test_func from ray.tune.schedulers import PopulationBasedTraining # __tutorial_imports_end__ diff --git a/python/ray/tune/examples/pbt_dcgan_mnist/common.py b/python/ray/tune/examples/pbt_dcgan_mnist/common.py index 791a5aac3c4a..3f76e9e17531 100644 --- a/python/ray/tune/examples/pbt_dcgan_mnist/common.py +++ b/python/ray/tune/examples/pbt_dcgan_mnist/common.py @@ -1,6 +1,8 @@ -import ray - import os + +import matplotlib.animation as animation +import matplotlib.pyplot as plt +import numpy as np import torch import torch.nn as nn import torch.nn.parallel @@ -8,14 +10,11 @@ import torchvision.datasets as dset import torchvision.transforms as transforms import torchvision.utils as vutils -import numpy as np - +from scipy.stats import entropy from torch.autograd import Variable from torch.nn import functional as F -from scipy.stats import entropy -import matplotlib.pyplot as plt -import matplotlib.animation as animation +import ray # Training parameters workers = 2 diff --git a/python/ray/tune/examples/pbt_dcgan_mnist/pbt_dcgan_mnist_func.py b/python/ray/tune/examples/pbt_dcgan_mnist/pbt_dcgan_mnist_func.py index 89956b00b486..acb1edae2a85 100644 --- a/python/ray/tune/examples/pbt_dcgan_mnist/pbt_dcgan_mnist_func.py +++ b/python/ray/tune/examples/pbt_dcgan_mnist/pbt_dcgan_mnist_func.py @@ -2,34 +2,34 @@ """ Example of training DCGAN on MNIST using PBT with Tune's function API. """ -import ray -from ray import train, tune -from ray.train import Checkpoint -from ray.tune.schedulers import PopulationBasedTraining - import argparse import os -from filelock import FileLock import tempfile + +import numpy as np import torch import torch.nn as nn import torch.nn.parallel import torch.optim as optim import torch.utils.data -import numpy as np +from filelock import FileLock +import ray +from ray import train, tune +from ray.train import Checkpoint from ray.tune.examples.pbt_dcgan_mnist.common import ( - beta1, MODEL_PATH, + Discriminator, + Generator, + Net, + beta1, demo_gan, get_data_loader, plot_images, train_func, weights_init, - Discriminator, - Generator, - Net, ) +from ray.tune.schedulers import PopulationBasedTraining # __Train_begin__ diff --git a/python/ray/tune/examples/pbt_dcgan_mnist/pbt_dcgan_mnist_trainable.py b/python/ray/tune/examples/pbt_dcgan_mnist/pbt_dcgan_mnist_trainable.py index bd09149b8881..1de380c585cf 100644 --- a/python/ray/tune/examples/pbt_dcgan_mnist/pbt_dcgan_mnist_trainable.py +++ b/python/ray/tune/examples/pbt_dcgan_mnist/pbt_dcgan_mnist_trainable.py @@ -3,24 +3,33 @@ Example of training DCGAN on MNIST using PBT with Tune's Trainable Class API. """ -import ray -from ray import train, tune -from ray.tune.schedulers import PopulationBasedTraining - import argparse import os -from filelock import FileLock import random + +import numpy as np import torch import torch.nn as nn import torch.nn.parallel import torch.optim as optim import torch.utils.data -import numpy as np +from common import ( + MODEL_PATH, + Discriminator, + Generator, + Net, + beta1, + demo_gan, + get_data_loader, + plot_images, + train_func, + weights_init, +) +from filelock import FileLock -from common import beta1, MODEL_PATH -from common import demo_gan, get_data_loader, plot_images, train_func, weights_init -from common import Discriminator, Generator, Net +import ray +from ray import train, tune +from ray.tune.schedulers import PopulationBasedTraining # __Trainable_begin__ diff --git a/python/ray/tune/examples/pbt_example.py b/python/ray/tune/examples/pbt_example.py index feed7c8bef73..73a157b1c76c 100755 --- a/python/ray/tune/examples/pbt_example.py +++ b/python/ray/tune/examples/pbt_example.py @@ -1,9 +1,10 @@ #!/usr/bin/env python -import numpy as np import argparse import random +import numpy as np + import ray from ray import train, tune from ray.tune.schedulers import PopulationBasedTraining diff --git a/python/ray/tune/examples/pbt_memnn_example.py b/python/ray/tune/examples/pbt_memnn_example.py index 511825bd5f24..d2ae54a1c8fa 100644 --- a/python/ray/tune/examples/pbt_memnn_example.py +++ b/python/ray/tune/examples/pbt_memnn_example.py @@ -5,21 +5,29 @@ from __future__ import print_function -from tensorflow.keras.models import Sequential, Model, load_model -from tensorflow.keras.layers import Embedding -from tensorflow.keras.layers import Input, Activation, Dense, Permute, Dropout -from tensorflow.keras.layers import add, dot, concatenate -from tensorflow.keras.layers import LSTM -from tensorflow.keras.optimizers import RMSprop -from tensorflow.keras.utils import get_file -from tensorflow.keras.preprocessing.sequence import pad_sequences - -from filelock import FileLock -import os import argparse +import os +import re import tarfile + import numpy as np -import re +from filelock import FileLock +from tensorflow.keras.layers import ( + LSTM, + Activation, + Dense, + Dropout, + Embedding, + Input, + Permute, + add, + concatenate, + dot, +) +from tensorflow.keras.models import Model, Sequential, load_model +from tensorflow.keras.optimizers import RMSprop +from tensorflow.keras.preprocessing.sequence import pad_sequences +from tensorflow.keras.utils import get_file from ray import train, tune diff --git a/python/ray/tune/examples/pbt_ppo_example.py b/python/ray/tune/examples/pbt_ppo_example.py index ed66ae16c831..bcdfdff6072f 100755 --- a/python/ray/tune/examples/pbt_ppo_example.py +++ b/python/ray/tune/examples/pbt_ppo_example.py @@ -15,7 +15,6 @@ from ray.rllib.algorithms.ppo import PPO from ray.tune.schedulers import PopulationBasedTraining - if __name__ == "__main__": # Postprocess the perturbed config to ensure it's still valid def explore(config): diff --git a/python/ray/tune/examples/pbt_transformers/pbt_transformers.py b/python/ray/tune/examples/pbt_transformers/pbt_transformers.py index 886d97f82818..eb8a9d9a1b79 100644 --- a/python/ray/tune/examples/pbt_transformers/pbt_transformers.py +++ b/python/ray/tune/examples/pbt_transformers/pbt_transformers.py @@ -2,27 +2,29 @@ This example is uses the official huggingface transformers `hyperparameter_search` API. """ + import os -from ray import tune -from ray.train import CheckpointConfig -from ray.tune import CLIReporter -from ray.tune.examples.pbt_transformers.utils import ( - download_data, - build_compute_metrics_fn, -) -from ray.tune.schedulers import PopulationBasedTraining from transformers import ( - glue_tasks_num_labels, AutoConfig, AutoModelForSequenceClassification, AutoTokenizer, - Trainer, GlueDataset, GlueDataTrainingArguments, + Trainer, TrainingArguments, + glue_tasks_num_labels, ) +from ray import tune +from ray.train import CheckpointConfig +from ray.tune import CLIReporter +from ray.tune.examples.pbt_transformers.utils import ( + build_compute_metrics_fn, + download_data, +) +from ray.tune.schedulers import PopulationBasedTraining + def tune_transformer(num_samples=8, gpus_per_trial=0, smoke_test=False): data_dir_name = "./data" if not smoke_test else "./test_data" diff --git a/python/ray/tune/examples/pbt_transformers/utils.py b/python/ray/tune/examples/pbt_transformers/utils.py index cc41d52d6b97..ff304664b98b 100644 --- a/python/ray/tune/examples/pbt_transformers/utils.py +++ b/python/ray/tune/examples/pbt_transformers/utils.py @@ -2,9 +2,9 @@ import os from typing import Callable, Dict + import numpy as np -from transformers import EvalPrediction -from transformers import glue_compute_metrics, glue_output_modes +from transformers import EvalPrediction, glue_compute_metrics, glue_output_modes def build_compute_metrics_fn(task_name: str) -> Callable[[EvalPrediction], Dict]: diff --git a/python/ray/tune/examples/pbt_tune_cifar10_with_keras.py b/python/ray/tune/examples/pbt_tune_cifar10_with_keras.py index bdb9a81b65a3..4b89d8fc563d 100755 --- a/python/ray/tune/examples/pbt_tune_cifar10_with_keras.py +++ b/python/ray/tune/examples/pbt_tune_cifar10_with_keras.py @@ -17,8 +17,14 @@ import numpy as np import tensorflow as tf from tensorflow.keras.datasets import cifar10 -from tensorflow.keras.layers import Input, Dense, Dropout, Flatten -from tensorflow.keras.layers import Convolution2D, MaxPooling2D +from tensorflow.keras.layers import ( + Convolution2D, + Dense, + Dropout, + Flatten, + Input, + MaxPooling2D, +) from tensorflow.keras.models import Model, load_model from tensorflow.keras.preprocessing.image import ImageDataGenerator diff --git a/python/ray/tune/examples/tf_mnist_example.py b/python/ray/tune/examples/tf_mnist_example.py index f2c261728f95..f8d6e28c25da 100644 --- a/python/ray/tune/examples/tf_mnist_example.py +++ b/python/ray/tune/examples/tf_mnist_example.py @@ -13,9 +13,9 @@ import os from filelock import FileLock -from tensorflow.keras.layers import Dense, Flatten, Conv2D from tensorflow.keras import Model from tensorflow.keras.datasets.mnist import load_data +from tensorflow.keras.layers import Conv2D, Dense, Flatten from ray import train, tune diff --git a/python/ray/tune/examples/tune_mnist_keras.py b/python/ray/tune/examples/tune_mnist_keras.py index afeafff745b6..b5b521e61624 100644 --- a/python/ray/tune/examples/tune_mnist_keras.py +++ b/python/ray/tune/examples/tune_mnist_keras.py @@ -6,8 +6,8 @@ import ray from ray import train, tune -from ray.tune.schedulers import AsyncHyperBandScheduler from ray.air.integrations.keras import ReportCheckpointCallback +from ray.tune.schedulers import AsyncHyperBandScheduler def train_mnist(config): diff --git a/python/ray/tune/examples/xgboost_dynamic_resources_example.py b/python/ray/tune/examples/xgboost_dynamic_resources_example.py index f2bbb8745853..338da6ef87ea 100644 --- a/python/ray/tune/examples/xgboost_dynamic_resources_example.py +++ b/python/ray/tune/examples/xgboost_dynamic_resources_example.py @@ -1,15 +1,16 @@ -from typing import Dict, Any, Optional, TYPE_CHECKING +from typing import TYPE_CHECKING, Any, Dict, Optional + import sklearn.datasets import sklearn.metrics -from sklearn.model_selection import train_test_split import xgboost as xgb +from sklearn.model_selection import train_test_split import ray from ray import train, tune -from ray.tune.schedulers import ResourceChangingScheduler, ASHAScheduler from ray.tune.execution.placement_groups import PlacementGroupFactory from ray.tune.experiment import Trial from ray.tune.integration.xgboost import TuneReportCheckpointCallback +from ray.tune.schedulers import ASHAScheduler, ResourceChangingScheduler if TYPE_CHECKING: from ray.tune.execution.tune_controller import TuneController @@ -151,7 +152,7 @@ def example_resources_allocation_function( scheduler = ResourceChangingScheduler( base_scheduler=base_scheduler, - resources_allocation_function=example_resources_allocation_function + resources_allocation_function=example_resources_allocation_function, # resources_allocation_function=DistributeResources() # default ) diff --git a/python/ray/tune/examples/xgboost_example.py b/python/ray/tune/examples/xgboost_example.py index 96b4bf1a16fe..951ab8977056 100644 --- a/python/ray/tune/examples/xgboost_example.py +++ b/python/ray/tune/examples/xgboost_example.py @@ -1,15 +1,15 @@ from typing import Dict, List + +import numpy as np import sklearn.datasets import sklearn.metrics -import numpy as np -from sklearn.model_selection import train_test_split import xgboost as xgb +from sklearn.model_selection import train_test_split import ray from ray import tune -from ray.tune.schedulers import ASHAScheduler from ray.tune.integration.xgboost import TuneReportCheckpointCallback - +from ray.tune.schedulers import ASHAScheduler CHECKPOINT_FILENAME = "booster-checkpoint.json" diff --git a/python/ray/tune/execution/experiment_state.py b/python/ray/tune/execution/experiment_state.py index c0cb12aefce3..6858749c2738 100644 --- a/python/ray/tune/execution/experiment_state.py +++ b/python/ray/tune/execution/experiment_state.py @@ -1,18 +1,18 @@ -from collections import Counter import fnmatch -from pathlib import Path -from typing import Callable, Dict, Optional, Union import logging import os import time +from collections import Counter +from pathlib import Path +from typing import Callable, Dict, Optional, Union import pyarrow.fs from ray.train._internal.storage import ( StorageContext, - get_fs_and_path, _download_from_fs_path, _list_at_fs_path, + get_fs_and_path, ) from ray.tune.experiment.trial import Trial from ray.tune.impl.out_of_band_serialize_dataset import out_of_band_serialize_dataset diff --git a/python/ray/tune/execution/insufficient_resources_manager.py b/python/ray/tune/execution/insufficient_resources_manager.py index c0755914c8ac..0f788d2df7df 100644 --- a/python/ray/tune/execution/insufficient_resources_manager.py +++ b/python/ray/tune/execution/insufficient_resources_manager.py @@ -1,10 +1,10 @@ import logging -from functools import lru_cache import os -import ray import time +from functools import lru_cache from typing import Dict, Optional, Tuple +import ray from ray.tune.execution.cluster_info import _is_ray_cluster from ray.tune.experiment import Trial diff --git a/python/ray/tune/execution/placement_groups.py b/python/ray/tune/execution/placement_groups.py index 1b9b1f93503b..0848b147878d 100644 --- a/python/ray/tune/execution/placement_groups.py +++ b/python/ray/tune/execution/placement_groups.py @@ -1,5 +1,6 @@ import warnings from typing import Dict, Optional + from ray.air.execution.resources.request import ResourceRequest from ray.util.annotations import DeveloperAPI, PublicAPI from ray.util.placement_group import placement_group diff --git a/python/ray/tune/execution/tune_controller.py b/python/ray/tune/execution/tune_controller.py index baf7e62eb3f0..bb482a80e6e0 100644 --- a/python/ray/tune/execution/tune_controller.py +++ b/python/ray/tune/execution/tune_controller.py @@ -9,33 +9,35 @@ from datetime import datetime from functools import partial from pathlib import Path -from typing import Any, Callable, Dict, List, Optional, Union, Tuple, Set +from typing import Any, Callable, Dict, List, Optional, Set, Tuple, Union import ray from ray.air import ResourceRequest from ray.air.constants import TIME_THIS_ITER_S -from ray.air.execution import ResourceManager, PlacementGroupResourceManager +from ray.air.execution import PlacementGroupResourceManager, ResourceManager from ray.air.execution._internal import RayActorManager, TrackedActor +from ray.exceptions import RayActorError, RayTaskError from ray.train import CheckpointConfig -from ray.train._internal.session import _TrainingResult, _FutureTrainingResult +from ray.train._internal.session import _FutureTrainingResult, _TrainingResult from ray.train._internal.storage import StorageContext -from ray.exceptions import RayActorError, RayTaskError -from ray.tune.error import _AbortTrialExecution, _TuneStopTrialError +from ray.tune.callback import Callback, CallbackList +from ray.tune.error import TuneError, _AbortTrialExecution, _TuneStopTrialError from ray.tune.execution.class_cache import _ActorClassCache from ray.tune.execution.experiment_state import ( _ExperimentCheckpointManager, _find_newest_experiment_checkpoint, ) +from ray.tune.execution.insufficient_resources_manager import ( + _InsufficientResourcesManager, +) +from ray.tune.execution.placement_groups import PlacementGroupFactory +from ray.tune.experiment import Experiment, Trial from ray.tune.experiment.trial import ( _change_working_directory, + _get_trainable_kwargs, + _Location, _noop_logger_creator, _TrialInfo, - _Location, - _get_trainable_kwargs, -) -from ray.tune.experiment import Experiment -from ray.tune.execution.insufficient_resources_manager import ( - _InsufficientResourcesManager, ) from ray.tune.result import ( DEBUG_METRICS, @@ -43,25 +45,22 @@ DONE, RESULT_DUPLICATE, SHOULD_CHECKPOINT, + STDERR_FILE, + STDOUT_FILE, + TRIAL_INFO, ) -from ray.tune.result import TRIAL_INFO, STDOUT_FILE, STDERR_FILE -from ray.tune import ResumeConfig, TuneError -from ray.tune.callback import Callback, CallbackList from ray.tune.schedulers import FIFOScheduler, TrialScheduler -from ray.tune.stopper import NoopStopper, Stopper from ray.tune.search import BasicVariantGenerator, SearchAlgorithm -from ray.tune.experiment import Trial -from ray.tune.utils.log import _dedup_logs +from ray.tune.stopper import NoopStopper, Stopper +from ray.tune.tune_config import ResumeConfig +from ray.tune.utils import flatten_dict, warn_if_slow +from ray.tune.utils.log import Verbosity, _dedup_logs, has_verbosity from ray.tune.utils.object_cache import _ObjectCache from ray.tune.utils.resource_updater import _ResourceUpdater -from ray.tune.utils import warn_if_slow, flatten_dict -from ray.tune.utils.log import Verbosity, has_verbosity -from ray.tune.execution.placement_groups import PlacementGroupFactory from ray.tune.utils.serialization import TuneFunctionDecoder, TuneFunctionEncoder from ray.util.annotations import DeveloperAPI from ray.util.debug import log_once - logger = logging.getLogger(__name__) diff --git a/python/ray/tune/experiment/config_parser.py b/python/ray/tune/experiment/config_parser.py index 8e69ca54c12b..859f11402b9e 100644 --- a/python/ray/tune/experiment/config_parser.py +++ b/python/ray/tune/experiment/config_parser.py @@ -1,13 +1,13 @@ import argparse import json -# For compatibility under py2 to consider unicode as str -from ray.tune.utils.serialization import TuneFunctionEncoder - from ray.train import CheckpointConfig -from ray.tune import TuneError +from ray.tune.error import TuneError from ray.tune.experiment import Trial from ray.tune.resources import json_to_resources + +# For compatibility under py2 to consider unicode as str +from ray.tune.utils.serialization import TuneFunctionEncoder from ray.tune.utils.util import SafeFallbackEncoder diff --git a/python/ray/tune/experiment/experiment.py b/python/ray/tune/experiment/experiment.py index 287196bcffcd..30a8a2fd6fc2 100644 --- a/python/ray/tune/experiment/experiment.py +++ b/python/ray/tune/experiment/experiment.py @@ -1,40 +1,39 @@ import copy import datetime -from functools import partial import logging -from pathlib import Path -from pickle import PicklingError import pprint as pp import traceback +from functools import partial +from pathlib import Path +from pickle import PicklingError from typing import ( + TYPE_CHECKING, Any, + Callable, Dict, + List, + Mapping, Optional, Sequence, - Union, - Callable, Type, - List, - Mapping, - TYPE_CHECKING, + Union, ) import ray from ray.exceptions import RpcError from ray.train import CheckpointConfig, SyncConfig -from ray.train.constants import DEFAULT_STORAGE_PATH from ray.train._internal.storage import StorageContext +from ray.train.constants import DEFAULT_STORAGE_PATH from ray.tune.error import TuneError -from ray.tune.registry import register_trainable, is_function_trainable +from ray.tune.registry import is_function_trainable, register_trainable from ray.tune.stopper import CombinedStopper, FunctionStopper, Stopper, TimeoutStopper - -from ray.util.annotations import DeveloperAPI, Deprecated +from ray.util.annotations import Deprecated, DeveloperAPI if TYPE_CHECKING: import pyarrow.fs - from ray.tune.experiment import Trial from ray.tune import PlacementGroupFactory + from ray.tune.experiment import Trial logger = logging.getLogger(__name__) diff --git a/python/ray/tune/experiment/trial.py b/python/ray/tune/experiment/trial.py index d1bffb1cb0d3..257b44654e1f 100644 --- a/python/ray/tune/experiment/trial.py +++ b/python/ray/tune/experiment/trial.py @@ -1,35 +1,39 @@ import copy import json import logging -from contextlib import contextmanager -from functools import partial -from numbers import Number import os -from pathlib import Path import platform import re import time -from typing import Any, Dict, Optional, Sequence, Union, Callable, List, Tuple import uuid +from contextlib import contextmanager +from functools import partial +from numbers import Number +from pathlib import Path +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union import ray +import ray.cloudpickle as cloudpickle +from ray._private.utils import binary_to_hex, hex_to_binary from ray.air.constants import ( - EXPR_ERROR_PICKLE_FILE, EXPR_ERROR_FILE, + EXPR_ERROR_PICKLE_FILE, TRAINING_ITERATION, ) - -import ray.cloudpickle as cloudpickle from ray.exceptions import RayActorError, RayTaskError from ray.train import Checkpoint, CheckpointConfig +from ray.train._internal.checkpoint_manager import _CheckpointManager +from ray.train._internal.session import _FutureTrainingResult, _TrainingResult +from ray.train._internal.storage import StorageContext, _exists_at_fs_path from ray.train.constants import ( RAY_CHDIR_TO_TRIAL_DIR, RAY_TRAIN_COUNT_PREEMPTION_AS_FAILURE, ) -from ray.train._internal.checkpoint_manager import _CheckpointManager -from ray.train._internal.session import _FutureTrainingResult, _TrainingResult -from ray.train._internal.storage import StorageContext, _exists_at_fs_path -from ray.tune import TuneError +from ray.tune.error import TuneError +from ray.tune.execution.placement_groups import ( + PlacementGroupFactory, + resource_dict_to_pg_factory, +) from ray.tune.logger import NoopLogger # NOTE(rkn): We import ray.tune.registry here instead of importing the names we @@ -37,25 +41,19 @@ # have been defined yet. See https://github.com/ray-project/ray/issues/1716. from ray.tune.registry import get_trainable_cls, validate_trainable from ray.tune.result import ( + DEBUG_METRICS, DONE, NODE_IP, PID, + STDERR_FILE, + STDOUT_FILE, TRIAL_ID, - DEBUG_METRICS, TRIAL_INFO, - STDOUT_FILE, - STDERR_FILE, -) -from ray.tune.execution.placement_groups import ( - PlacementGroupFactory, - resource_dict_to_pg_factory, ) from ray.tune.trainable.metadata import _TrainingRunMetadata -from ray.tune.utils.serialization import TuneFunctionDecoder, TuneFunctionEncoder from ray.tune.utils import date_str, flatten_dict -from ray.util.annotations import DeveloperAPI, Deprecated -from ray._private.utils import binary_to_hex, hex_to_binary - +from ray.tune.utils.serialization import TuneFunctionDecoder, TuneFunctionEncoder +from ray.util.annotations import Deprecated, DeveloperAPI DEBUG_PRINT_INTERVAL = 5 _DEFAULT_WIN_MAX_PATH_LENGTH = 260 diff --git a/python/ray/tune/experimental/output.py b/python/ray/tune/experimental/output.py index 9b8143f6aa4d..699217e7534a 100644 --- a/python/ray/tune/experimental/output.py +++ b/python/ray/tune/experimental/output.py @@ -1,51 +1,33 @@ import argparse -import sys -from typing import ( - Any, - Collection, - Dict, - Iterable, - List, - Optional, - Tuple, - Union, -) - import collections -from dataclasses import dataclass import datetime -from enum import IntEnum import logging import math import numbers -import numpy as np import os -import pandas as pd +import sys import textwrap import time +from dataclasses import dataclass +from enum import IntEnum +from typing import Any, Collection, Dict, Iterable, List, Optional, Tuple, Union -from ray.air._internal.usage import AirEntrypoint -from ray.train import Checkpoint -from ray.tune.search.sample import Domain -from ray.tune.utils.log import Verbosity - -try: - import rich - import rich.layout - import rich.live -except ImportError: - rich = None +import numpy as np +import pandas as pd import ray -from ray._private.dict import unflattened_lookup, flatten_dict +from ray._private.dict import flatten_dict, unflattened_lookup from ray._private.thirdparty.tabulate.tabulate import ( - tabulate, - TableFormat, - Line, DataRow, + Line, + TableFormat, + tabulate, ) +from ray.air._internal.usage import AirEntrypoint from ray.air.constants import TRAINING_ITERATION +from ray.train import Checkpoint from ray.tune.callback import Callback +from ray.tune.experiment.trial import Trial from ray.tune.result import ( AUTO_RESULT_KEYS, EPISODE_REWARD_MEAN, @@ -54,7 +36,16 @@ TIME_TOTAL_S, TIMESTEPS_TOTAL, ) -from ray.tune.experiment.trial import Trial +from ray.tune.search.sample import Domain +from ray.tune.utils.log import Verbosity + +try: + import rich + import rich.layout + import rich.live +except ImportError: + rich = None + logger = logging.getLogger(__name__) diff --git a/python/ray/tune/impl/placeholder.py b/python/ray/tune/impl/placeholder.py index 8020f159284c..6865b46f3f04 100644 --- a/python/ray/tune/impl/placeholder.py +++ b/python/ray/tune/impl/placeholder.py @@ -1,12 +1,11 @@ -from collections import defaultdict import hashlib +from collections import defaultdict from typing import Any, Dict, Tuple from ray.tune.search.sample import Categorical, Domain, Function from ray.tune.search.variant_generator import assign_value from ray.util.annotations import DeveloperAPI - ID_HASH_LENGTH = 8 diff --git a/python/ray/tune/impl/test_utils.py b/python/ray/tune/impl/test_utils.py index 5d5dadb3da35..1b26178e661c 100644 --- a/python/ray/tune/impl/test_utils.py +++ b/python/ray/tune/impl/test_utils.py @@ -1,7 +1,7 @@ from sklearn.datasets import load_breast_cancer from ray import tune -from ray.data import read_datasource, Dataset, Datasource, ReadTask +from ray.data import Dataset, Datasource, ReadTask, read_datasource from ray.data.block import BlockMetadata from ray.tune.impl.utils import execute_dataset @@ -59,7 +59,8 @@ def test_choice(): if __name__ == "__main__": - import pytest import sys + import pytest + sys.exit(pytest.main(["-v", "-x", __file__])) diff --git a/python/ray/tune/impl/tuner_internal.py b/python/ray/tune/impl/tuner_internal.py index b1bf6bb39634..835ff2decc3f 100644 --- a/python/ray/tune/impl/tuner_internal.py +++ b/python/ray/tune/impl/tuner_internal.py @@ -1,36 +1,35 @@ import copy import io -import math import logging +import math from pathlib import Path from typing import ( + TYPE_CHECKING, Any, Callable, Dict, List, Optional, + Tuple, Type, Union, - TYPE_CHECKING, - Tuple, ) import pyarrow.fs import ray.cloudpickle as pickle -from ray.util import inspect_serializability from ray.air._internal.uri_utils import URI from ray.air._internal.usage import AirEntrypoint from ray.air.config import RunConfig, ScalingConfig from ray.train._internal.storage import StorageContext, get_fs_and_path from ray.tune import Experiment, ExperimentAnalysis, ResumeConfig, TuneError -from ray.tune.tune import _Config from ray.tune.registry import is_function_trainable from ray.tune.result_grid import ResultGrid from ray.tune.trainable import Trainable -from ray.tune.tune import run +from ray.tune.tune import _Config, run from ray.tune.tune_config import TuneConfig from ray.tune.utils import flatten_dict +from ray.util import inspect_serializability if TYPE_CHECKING: from ray.train.trainer import BaseTrainer diff --git a/python/ray/tune/integration/lightgbm.py b/python/ray/tune/integration/lightgbm.py index 73add8d9071c..778ba5ee2318 100644 --- a/python/ray/tune/integration/lightgbm.py +++ b/python/ray/tune/integration/lightgbm.py @@ -1,8 +1,7 @@ -from ray.util.annotations import Deprecated - from ray.train.lightgbm import ( # noqa: F401 RayTrainReportCallback as TuneReportCheckpointCallback, ) +from ray.util.annotations import Deprecated @Deprecated diff --git a/python/ray/tune/integration/pytorch_lightning.py b/python/ray/tune/integration/pytorch_lightning.py index 3e0326c1a165..0ca554ebced8 100644 --- a/python/ray/tune/integration/pytorch_lightning.py +++ b/python/ray/tune/integration/pytorch_lightning.py @@ -6,15 +6,15 @@ from contextlib import contextmanager from typing import Dict, List, Optional, Type, Union -try: - from lightning import Callback, Trainer, LightningModule -except ModuleNotFoundError: - from pytorch_lightning import Callback, Trainer, LightningModule - from ray import train -from ray.util import log_once -from ray.util.annotations import PublicAPI, Deprecated from ray.train import Checkpoint +from ray.util import log_once +from ray.util.annotations import Deprecated, PublicAPI + +try: + from lightning import Callback, LightningModule, Trainer +except ModuleNotFoundError: + from pytorch_lightning import Callback, LightningModule, Trainer logger = logging.getLogger(__name__) diff --git a/python/ray/tune/integration/xgboost.py b/python/ray/tune/integration/xgboost.py index 04468e353135..fadb64ec4be1 100644 --- a/python/ray/tune/integration/xgboost.py +++ b/python/ray/tune/integration/xgboost.py @@ -1,7 +1,6 @@ from ray.train.xgboost import ( # noqa: F401 RayTrainReportCallback as TuneReportCheckpointCallback, ) - from ray.util.annotations import Deprecated diff --git a/python/ray/tune/logger/__init__.py b/python/ray/tune/logger/__init__.py index 052d5cb9da86..fd315308bab3 100644 --- a/python/ray/tune/logger/__init__.py +++ b/python/ray/tune/logger/__init__.py @@ -1,18 +1,21 @@ +from ray.tune.logger.csv import CSVLogger, CSVLoggerCallback +from ray.tune.logger.json import JsonLogger, JsonLoggerCallback from ray.tune.logger.logger import ( + LegacyLoggerCallback, Logger, LoggerCallback, - LegacyLoggerCallback, pretty_print, ) -from ray.tune.logger.csv import CSVLogger, CSVLoggerCallback -from ray.tune.logger.json import JsonLogger, JsonLoggerCallback from ray.tune.logger.noop import NoopLogger from ray.tune.logger.tensorboardx import TBXLogger, TBXLoggerCallback DEFAULT_LOGGERS = (JsonLogger, CSVLogger, TBXLogger) +# isort: off from ray.tune.logger.unified import UnifiedLogger # noqa: E402 +# isort: on + __all__ = [ "Logger", "LoggerCallback", diff --git a/python/ray/tune/logger/aim.py b/python/ray/tune/logger/aim.py index 9464ed98b756..863df7f46fe1 100644 --- a/python/ray/tune/logger/aim.py +++ b/python/ray/tune/logger/aim.py @@ -1,14 +1,11 @@ import logging +from typing import TYPE_CHECKING, Dict, List, Optional, Union import numpy as np -from typing import TYPE_CHECKING, Dict, Optional, List, Union from ray.air.constants import TRAINING_ITERATION from ray.tune.logger.logger import LoggerCallback -from ray.tune.result import ( - TIME_TOTAL_S, - TIMESTEPS_TOTAL, -) +from ray.tune.result import TIME_TOTAL_S, TIMESTEPS_TOTAL from ray.tune.utils import flatten_dict from ray.util.annotations import PublicAPI diff --git a/python/ray/tune/logger/csv.py b/python/ray/tune/logger/csv.py index 62c16e205d4f..5802b43f893d 100644 --- a/python/ray/tune/logger/csv.py +++ b/python/ray/tune/logger/csv.py @@ -1,7 +1,6 @@ import csv import logging from pathlib import Path - from typing import TYPE_CHECKING, Dict, TextIO from ray.air.constants import EXPR_PROGRESS_FILE diff --git a/python/ray/tune/logger/json.py b/python/ray/tune/logger/json.py index bd54322b2d50..d248a4080296 100644 --- a/python/ray/tune/logger/json.py +++ b/python/ray/tune/logger/json.py @@ -1,16 +1,12 @@ import json import logging -import numpy as np from pathlib import Path - from typing import TYPE_CHECKING, Dict, TextIO -from ray.air.constants import ( - EXPR_PARAM_FILE, - EXPR_PARAM_PICKLE_FILE, - EXPR_RESULT_FILE, -) +import numpy as np + import ray.cloudpickle as cloudpickle +from ray.air.constants import EXPR_PARAM_FILE, EXPR_PARAM_PICKLE_FILE, EXPR_RESULT_FILE from ray.tune.logger.logger import _LOGGER_DEPRECATION_WARNING, Logger, LoggerCallback from ray.tune.utils.util import SafeFallbackEncoder from ray.util.annotations import Deprecated, PublicAPI diff --git a/python/ray/tune/logger/logger.py b/python/ray/tune/logger/logger.py index b80bbb9db0ea..ad14069c3c20 100644 --- a/python/ray/tune/logger/logger.py +++ b/python/ray/tune/logger/logger.py @@ -2,11 +2,11 @@ import json import logging from pathlib import Path -import pyarrow - from typing import TYPE_CHECKING, Dict, Iterable, List, Optional, Set, Type +import pyarrow import yaml + from ray.air._internal.json import SafeFallbackEncoder from ray.tune.callback import Callback from ray.util.annotations import Deprecated, DeveloperAPI, PublicAPI diff --git a/python/ray/tune/logger/tensorboardx.py b/python/ray/tune/logger/tensorboardx.py index 81577ff0e761..e0b626ae5053 100644 --- a/python/ray/tune/logger/tensorboardx.py +++ b/python/ray/tune/logger/tensorboardx.py @@ -1,17 +1,14 @@ import logging -import numpy as np - from typing import TYPE_CHECKING, Dict +import numpy as np + from ray.air.constants import TRAINING_ITERATION from ray.tune.logger.logger import _LOGGER_DEPRECATION_WARNING, Logger, LoggerCallback -from ray.util.debug import log_once -from ray.tune.result import ( - TIME_TOTAL_S, - TIMESTEPS_TOTAL, -) +from ray.tune.result import TIME_TOTAL_S, TIMESTEPS_TOTAL from ray.tune.utils import flatten_dict from ray.util.annotations import Deprecated, PublicAPI +from ray.util.debug import log_once if TYPE_CHECKING: from ray.tune.experiment.trial import Trial # noqa: F401 diff --git a/python/ray/tune/logger/unified.py b/python/ray/tune/logger/unified.py index ede689829c40..91af70cbd86c 100644 --- a/python/ray/tune/logger/unified.py +++ b/python/ray/tune/logger/unified.py @@ -1,5 +1,5 @@ import logging -from typing import Optional, List, Type, Dict, TYPE_CHECKING +from typing import TYPE_CHECKING, Dict, List, Optional, Type from ray.tune.logger import DEFAULT_LOGGERS from ray.tune.logger.json import JsonLogger diff --git a/python/ray/tune/progress_reporter.py b/python/ray/tune/progress_reporter.py index 64551525ad36..56edc5fc7062 100644 --- a/python/ray/tune/progress_reporter.py +++ b/python/ray/tune/progress_reporter.py @@ -3,12 +3,11 @@ import collections import datetime import numbers - -from pathlib import Path import sys import textwrap import time import warnings +from pathlib import Path from typing import Any, Callable, Collection, Dict, List, Optional, Tuple, Union import numpy as np @@ -17,10 +16,11 @@ import ray from ray._private.dict import flatten_dict from ray._private.thirdparty.tabulate.tabulate import tabulate -from ray.experimental.tqdm_ray import safe_print -from ray.air.util.node import _force_on_current_node from ray.air.constants import EXPR_ERROR_FILE, TRAINING_ITERATION +from ray.air.util.node import _force_on_current_node +from ray.experimental.tqdm_ray import safe_print from ray.tune.callback import Callback +from ray.tune.experiment.trial import DEBUG_PRINT_INTERVAL, Trial, _Location from ray.tune.logger import pretty_print from ray.tune.result import ( AUTO_RESULT_KEYS, @@ -36,13 +36,11 @@ TIMESTEPS_TOTAL, TRIAL_ID, ) -from ray.tune.experiment.trial import DEBUG_PRINT_INTERVAL, Trial, _Location from ray.tune.trainable import Trainable from ray.tune.utils import unflattened_lookup from ray.tune.utils.log import Verbosity, has_verbosity, set_verbosity from ray.util.annotations import DeveloperAPI, PublicAPI from ray.util.queue import Empty, Queue - from ray.widgets import Template try: @@ -1461,7 +1459,7 @@ def display_result(self, trial: Trial, result: Dict, error: bool, done: bool): error: True if an error has occurred, False otherwise done: True if the trial is finished, False otherwise """ - from IPython.display import display, HTML + from IPython.display import HTML, display self._last_result[trial] = result if has_verbosity(Verbosity.V3_TRIAL_DETAILS): diff --git a/python/ray/tune/registry.py b/python/ray/tune/registry.py index 4d44ed6d84c1..095e15f108fb 100644 --- a/python/ray/tune/registry.py +++ b/python/ray/tune/registry.py @@ -7,10 +7,10 @@ import ray import ray.cloudpickle as pickle from ray.experimental.internal_kv import ( + _internal_kv_del, _internal_kv_get, _internal_kv_initialized, _internal_kv_put, - _internal_kv_del, ) from ray.tune.error import TuneError from ray.util.annotations import DeveloperAPI @@ -95,8 +95,7 @@ def register_trainable(name: str, trainable: Union[Callable, Type], warn: bool = automatically converted into a class during registration. """ - from ray.tune.trainable import wrap_function - from ray.tune.trainable import Trainable + from ray.tune.trainable import Trainable, wrap_function if isinstance(trainable, type): logger.debug("Detected class for trainable.") diff --git a/python/ray/tune/resources.py b/python/ray/tune/resources.py index c45a5ce3d50c..6c6113ceac03 100644 --- a/python/ray/tune/resources.py +++ b/python/ray/tune/resources.py @@ -1,19 +1,18 @@ -from collections import namedtuple -import logging import json +import logging +from collections import namedtuple # For compatibility under py2 to consider unicode as str from typing import Optional +from ray.tune.error import TuneError from ray.tune.execution.placement_groups import ( - resource_dict_to_pg_factory, PlacementGroupFactory, + resource_dict_to_pg_factory, ) from ray.tune.utils.resource_updater import _Resources from ray.util.annotations import Deprecated, DeveloperAPI -from ray.tune import TuneError - logger = logging.getLogger(__name__) diff --git a/python/ray/tune/result.py b/python/ray/tune/result.py index 2f361b40a6eb..b4e966386a12 100644 --- a/python/ray/tune/result.py +++ b/python/ray/tune/result.py @@ -1,14 +1,14 @@ # Importing for Backward Compatibility from ray.air.constants import ( # noqa: F401 - TIMESTAMP, - TIME_THIS_ITER_S, - TRAINING_ITERATION, + EXPR_ERROR_FILE, + EXPR_ERROR_PICKLE_FILE, EXPR_PARAM_FILE, EXPR_PARAM_PICKLE_FILE, EXPR_PROGRESS_FILE, EXPR_RESULT_FILE, - EXPR_ERROR_PICKLE_FILE, - EXPR_ERROR_FILE, + TIME_THIS_ITER_S, + TIMESTAMP, + TRAINING_ITERATION, ) # fmt: off diff --git a/python/ray/tune/result_grid.py b/python/ray/tune/result_grid.py index 08dc91ddc108..7dffe6d4614a 100644 --- a/python/ray/tune/result_grid.py +++ b/python/ray/tune/result_grid.py @@ -1,6 +1,7 @@ +from typing import Optional, Union + import pandas as pd import pyarrow -from typing import Optional, Union from ray.air.result import Result from ray.exceptions import RayTaskError diff --git a/python/ray/tune/schedulers/__init__.py b/python/ray/tune/schedulers/__init__.py index 0fd6669120e7..f40125e5e50e 100644 --- a/python/ray/tune/schedulers/__init__.py +++ b/python/ray/tune/schedulers/__init__.py @@ -1,16 +1,16 @@ import inspect from ray._private.utils import get_function_args -from ray.tune.schedulers.trial_scheduler import TrialScheduler, FIFOScheduler -from ray.tune.schedulers.hyperband import HyperBandScheduler +from ray.tune.schedulers.async_hyperband import ASHAScheduler, AsyncHyperBandScheduler from ray.tune.schedulers.hb_bohb import HyperBandForBOHB -from ray.tune.schedulers.async_hyperband import AsyncHyperBandScheduler, ASHAScheduler +from ray.tune.schedulers.hyperband import HyperBandScheduler from ray.tune.schedulers.median_stopping_rule import MedianStoppingRule from ray.tune.schedulers.pbt import ( PopulationBasedTraining, PopulationBasedTrainingReplay, ) from ray.tune.schedulers.resource_changing_scheduler import ResourceChangingScheduler +from ray.tune.schedulers.trial_scheduler import FIFOScheduler, TrialScheduler from ray.util import PublicAPI diff --git a/python/ray/tune/schedulers/async_hyperband.py b/python/ray/tune/schedulers/async_hyperband.py index 95c477b57857..3bf8204592ba 100644 --- a/python/ray/tune/schedulers/async_hyperband.py +++ b/python/ray/tune/schedulers/async_hyperband.py @@ -1,12 +1,12 @@ import logging -from typing import Dict, Optional, Union, TYPE_CHECKING +import pickle +from typing import TYPE_CHECKING, Dict, Optional, Union import numpy as np -import pickle +from ray.tune.experiment import Trial from ray.tune.result import DEFAULT_METRIC from ray.tune.schedulers.trial_scheduler import FIFOScheduler, TrialScheduler -from ray.tune.experiment import Trial from ray.util import PublicAPI if TYPE_CHECKING: diff --git a/python/ray/tune/schedulers/hb_bohb.py b/python/ray/tune/schedulers/hb_bohb.py index 67a3d5dc4dbf..6c454d9efd35 100644 --- a/python/ray/tune/schedulers/hb_bohb.py +++ b/python/ray/tune/schedulers/hb_bohb.py @@ -1,9 +1,9 @@ import logging -from typing import Dict, Optional, TYPE_CHECKING +from typing import TYPE_CHECKING, Dict, Optional -from ray.tune.schedulers.trial_scheduler import TrialScheduler -from ray.tune.schedulers.hyperband import HyperBandScheduler from ray.tune.experiment import Trial +from ray.tune.schedulers.hyperband import HyperBandScheduler +from ray.tune.schedulers.trial_scheduler import TrialScheduler from ray.util import PublicAPI if TYPE_CHECKING: diff --git a/python/ray/tune/schedulers/hyperband.py b/python/ray/tune/schedulers/hyperband.py index fdaec21a7334..57503d97ee34 100644 --- a/python/ray/tune/schedulers/hyperband.py +++ b/python/ray/tune/schedulers/hyperband.py @@ -1,14 +1,14 @@ import collections -from typing import Dict, List, Optional, Tuple, TYPE_CHECKING +import logging +from typing import TYPE_CHECKING, Dict, List, Optional, Tuple import numpy as np -import logging -from ray.util.annotations import PublicAPI +from ray.tune.error import TuneError +from ray.tune.experiment import Trial from ray.tune.result import DEFAULT_METRIC from ray.tune.schedulers.trial_scheduler import FIFOScheduler, TrialScheduler -from ray.tune.experiment import Trial -from ray.tune.error import TuneError +from ray.util.annotations import PublicAPI if TYPE_CHECKING: from ray.tune.execution.tune_controller import TuneController diff --git a/python/ray/tune/schedulers/median_stopping_rule.py b/python/ray/tune/schedulers/median_stopping_rule.py index ffe1e31f330c..9b5cb35f7a53 100644 --- a/python/ray/tune/schedulers/median_stopping_rule.py +++ b/python/ray/tune/schedulers/median_stopping_rule.py @@ -1,11 +1,11 @@ import collections import logging -from typing import Dict, List, Optional, TYPE_CHECKING +from typing import TYPE_CHECKING, Dict, List, Optional import numpy as np -from ray.tune.result import DEFAULT_METRIC from ray.tune.experiment import Trial +from ray.tune.result import DEFAULT_METRIC from ray.tune.schedulers.trial_scheduler import FIFOScheduler, TrialScheduler from ray.util.annotations import PublicAPI diff --git a/python/ray/tune/schedulers/pb2.py b/python/ray/tune/schedulers/pb2.py index 564c1b20c75f..f41756635073 100644 --- a/python/ray/tune/schedulers/pb2.py +++ b/python/ray/tune/schedulers/pb2.py @@ -1,6 +1,7 @@ -from typing import Callable, Dict, Optional, Tuple, Union, TYPE_CHECKING -from copy import deepcopy import logging +from copy import deepcopy +from typing import TYPE_CHECKING, Callable, Dict, Optional, Tuple, Union + import numpy as np import pandas as pd @@ -31,12 +32,12 @@ def import_pb2_dependencies(): if GPy and has_sklearn: from ray.tune.schedulers.pb2_utils import ( + UCB, + TV_SquaredExp, normalize, optimize_acq, select_length, - UCB, standardize, - TV_SquaredExp, ) logger = logging.getLogger(__name__) diff --git a/python/ray/tune/schedulers/pb2_utils.py b/python/ray/tune/schedulers/pb2_utils.py index 07a340794afc..cdd06af9b4ef 100644 --- a/python/ray/tune/schedulers/pb2_utils.py +++ b/python/ray/tune/schedulers/pb2_utils.py @@ -1,9 +1,8 @@ -import numpy as np -from scipy.optimize import minimize - import GPy -from GPy.kern import Kern +import numpy as np from GPy.core import Param +from GPy.kern import Kern +from scipy.optimize import minimize from sklearn.metrics import pairwise_distances from sklearn.metrics.pairwise import euclidean_distances diff --git a/python/ray/tune/schedulers/pbt.py b/python/ray/tune/schedulers/pbt.py index fd854328f366..0c389f76dcd0 100644 --- a/python/ray/tune/schedulers/pbt.py +++ b/python/ray/tune/schedulers/pbt.py @@ -7,19 +7,19 @@ import shutil import warnings from pathlib import Path -from typing import Callable, Dict, List, Optional, Tuple, Union, TYPE_CHECKING +from typing import TYPE_CHECKING, Callable, Dict, List, Optional, Tuple, Union from ray.air.constants import TRAINING_ITERATION from ray.train import Checkpoint -from ray.train._internal.session import _TrainingResult, _FutureTrainingResult +from ray.train._internal.session import _FutureTrainingResult, _TrainingResult from ray.tune.error import TuneError +from ray.tune.experiment import Trial from ray.tune.result import DEFAULT_METRIC +from ray.tune.schedulers.trial_scheduler import FIFOScheduler, TrialScheduler from ray.tune.search import SearchGenerator -from ray.tune.utils.util import SafeFallbackEncoder from ray.tune.search.sample import Domain, Function -from ray.tune.schedulers import FIFOScheduler, TrialScheduler from ray.tune.search.variant_generator import format_vars -from ray.tune.experiment import Trial +from ray.tune.utils.util import SafeFallbackEncoder from ray.util import PublicAPI from ray.util.debug import log_once diff --git a/python/ray/tune/schedulers/resource_changing_scheduler.py b/python/ray/tune/schedulers/resource_changing_scheduler.py index 669139360d10..24d437cf892f 100644 --- a/python/ray/tune/schedulers/resource_changing_scheduler.py +++ b/python/ray/tune/schedulers/resource_changing_scheduler.py @@ -1,16 +1,16 @@ -from copy import deepcopy -import numpy as np import logging -from typing import Dict, Any, List, Optional, Set, Tuple, Union, Callable, TYPE_CHECKING - import pickle import warnings +from copy import deepcopy +from typing import TYPE_CHECKING, Any, Callable, Dict, List, Optional, Set, Tuple, Union + +import numpy as np from ray.air.execution.resources.request import _sum_bundles -from ray.util.annotations import PublicAPI -from ray.tune.schedulers.trial_scheduler import FIFOScheduler, TrialScheduler -from ray.tune.experiment import Trial from ray.tune.execution.placement_groups import PlacementGroupFactory +from ray.tune.experiment import Trial +from ray.tune.schedulers.trial_scheduler import FIFOScheduler, TrialScheduler +from ray.util.annotations import PublicAPI if TYPE_CHECKING: from ray.tune.execution.tune_controller import TuneController diff --git a/python/ray/tune/schedulers/trial_scheduler.py b/python/ray/tune/schedulers/trial_scheduler.py index 6991dd1da412..d99db5d24704 100644 --- a/python/ray/tune/schedulers/trial_scheduler.py +++ b/python/ray/tune/schedulers/trial_scheduler.py @@ -1,8 +1,8 @@ -from typing import Dict, Optional, TYPE_CHECKING +from typing import TYPE_CHECKING, Dict, Optional from ray.air._internal.usage import tag_scheduler -from ray.tune.result import DEFAULT_METRIC from ray.tune.experiment import Trial +from ray.tune.result import DEFAULT_METRIC from ray.util.annotations import DeveloperAPI, PublicAPI if TYPE_CHECKING: diff --git a/python/ray/tune/search/__init__.py b/python/ray/tune/search/__init__.py index 8a20ca95d0dc..d5d9b32753d8 100644 --- a/python/ray/tune/search/__init__.py +++ b/python/ray/tune/search/__init__.py @@ -1,13 +1,11 @@ -from ray.tune.search.search_algorithm import SearchAlgorithm -from ray.tune.search.searcher import Searcher +from ray._private.utils import get_function_args +from ray.tune.search.basic_variant import BasicVariantGenerator from ray.tune.search.concurrency_limiter import ConcurrencyLimiter from ray.tune.search.repeater import Repeater - -from ray.tune.search.basic_variant import BasicVariantGenerator -from ray.tune.search.variant_generator import grid_search +from ray.tune.search.search_algorithm import SearchAlgorithm from ray.tune.search.search_generator import SearchGenerator - -from ray._private.utils import get_function_args +from ray.tune.search.searcher import Searcher +from ray.tune.search.variant_generator import grid_search from ray.util import PublicAPI diff --git a/python/ray/tune/search/_mock.py b/python/ray/tune/search/_mock.py index b602ec281aeb..f1e8eb0b2140 100644 --- a/python/ray/tune/search/_mock.py +++ b/python/ray/tune/search/_mock.py @@ -1,8 +1,8 @@ from typing import Dict, List, Optional -from ray.tune.search import Searcher, ConcurrencyLimiter -from ray.tune.search.search_generator import SearchGenerator from ray.tune.experiment import Trial +from ray.tune.search import ConcurrencyLimiter, Searcher +from ray.tune.search.search_generator import SearchGenerator class _MockSearcher(Searcher): diff --git a/python/ray/tune/search/ax/ax_search.py b/python/ray/tune/search/ax/ax_search.py index 3bc6c3dfbc65..12ff400d7b37 100644 --- a/python/ray/tune/search/ax/ax_search.py +++ b/python/ray/tune/search/ax/ax_search.py @@ -1,9 +1,17 @@ import copy -import numpy as np +import logging from typing import Dict, List, Optional, Union +import numpy as np + from ray import cloudpickle from ray.tune.result import DEFAULT_METRIC +from ray.tune.search import ( + UNDEFINED_METRIC_MODE, + UNDEFINED_SEARCH_SPACE, + UNRESOLVED_SEARCH_SPACE, + Searcher, +) from ray.tune.search.sample import ( Categorical, Float, @@ -12,12 +20,6 @@ Quantized, Uniform, ) -from ray.tune.search import ( - UNRESOLVED_SEARCH_SPACE, - UNDEFINED_METRIC_MODE, - UNDEFINED_SEARCH_SPACE, - Searcher, -) from ray.tune.search.variant_generator import parse_spec_vars from ray.tune.utils.util import flatten_dict, unflatten_list_dict @@ -34,7 +36,6 @@ except ImportError: MaxParallelismReachedException = DataRequiredError = Exception -import logging logger = logging.getLogger(__name__) diff --git a/python/ray/tune/search/basic_variant.py b/python/ray/tune/search/basic_variant.py index 5676bdc0dbcb..c9a59bd95ffd 100644 --- a/python/ray/tune/search/basic_variant.py +++ b/python/ray/tune/search/basic_variant.py @@ -1,25 +1,26 @@ import copy import itertools import os -from pathlib import Path import uuid -from typing import Dict, List, Optional, Union, TYPE_CHECKING import warnings +from pathlib import Path +from typing import TYPE_CHECKING, Dict, List, Optional, Union + import numpy as np from ray.air._internal.usage import tag_searcher from ray.tune.error import TuneError -from ray.tune.experiment.config_parser import _make_parser, _create_trial_from_spec -from ray.tune.search.sample import np_random_generator, _BackwardsCompatibleNumpyRng +from ray.tune.experiment.config_parser import _create_trial_from_spec, _make_parser +from ray.tune.search.sample import _BackwardsCompatibleNumpyRng, np_random_generator +from ray.tune.search.search_algorithm import SearchAlgorithm from ray.tune.search.variant_generator import ( - _count_variants, _count_spec_samples, - generate_variants, - format_vars, + _count_variants, _flatten_resolved_vars, _get_preset_variants, + format_vars, + generate_variants, ) -from ray.tune.search.search_algorithm import SearchAlgorithm from ray.tune.utils.util import _atomic_save, _load_newest_checkpoint from ray.util import PublicAPI diff --git a/python/ray/tune/search/bayesopt/bayesopt_search.py b/python/ray/tune/search/bayesopt/bayesopt_search.py index 63eee05f471f..475c0f59ad01 100644 --- a/python/ray/tune/search/bayesopt/bayesopt_search.py +++ b/python/ray/tune/search/bayesopt/bayesopt_search.py @@ -1,18 +1,19 @@ -from collections import defaultdict +import json import logging import pickle -import json -from typing import Dict, List, Optional, Tuple, Any, TYPE_CHECKING +from collections import defaultdict +from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple from ray.tune.result import DEFAULT_METRIC -from ray.tune.search.sample import Domain, Float, Quantized, Uniform from ray.tune.search import ( - UNRESOLVED_SEARCH_SPACE, UNDEFINED_METRIC_MODE, UNDEFINED_SEARCH_SPACE, + UNRESOLVED_SEARCH_SPACE, Searcher, ) +from ray.tune.search.sample import Domain, Float, Quantized, Uniform from ray.tune.search.variant_generator import parse_spec_vars +from ray.tune.utils import flatten_dict from ray.tune.utils.util import is_nan_or_inf, unflatten_dict try: # Python 3 only -- needed for lint test. @@ -20,7 +21,6 @@ except ImportError: byo = None -from ray.tune.utils import flatten_dict if TYPE_CHECKING: from ray.tune import ExperimentAnalysis diff --git a/python/ray/tune/search/bohb/bohb_search.py b/python/ray/tune/search/bohb/bohb_search.py index ee1c44868369..5fa701b700f7 100644 --- a/python/ray/tune/search/bohb/bohb_search.py +++ b/python/ray/tune/search/bohb/bohb_search.py @@ -3,13 +3,18 @@ import copy import logging import math +from typing import Dict, List, Optional, Union # use cloudpickle instead of pickle to make BOHB obj # pickleable from ray import cloudpickle -from typing import Dict, List, Optional, Union - from ray.tune.result import DEFAULT_METRIC +from ray.tune.search import ( + UNDEFINED_METRIC_MODE, + UNDEFINED_SEARCH_SPACE, + UNRESOLVED_SEARCH_SPACE, + Searcher, +) from ray.tune.search.sample import ( Categorical, Domain, @@ -20,12 +25,6 @@ Quantized, Uniform, ) -from ray.tune.search import ( - UNRESOLVED_SEARCH_SPACE, - UNDEFINED_METRIC_MODE, - UNDEFINED_SEARCH_SPACE, - Searcher, -) from ray.tune.search.variant_generator import parse_spec_vars from ray.tune.utils.util import flatten_dict, unflatten_list_dict diff --git a/python/ray/tune/search/concurrency_limiter.py b/python/ray/tune/search/concurrency_limiter.py index 383296ffa049..847db982ea88 100644 --- a/python/ray/tune/search/concurrency_limiter.py +++ b/python/ray/tune/search/concurrency_limiter.py @@ -1,12 +1,11 @@ import copy import logging -from typing import Dict, Optional, List +from typing import Dict, List, Optional from ray.tune.search.searcher import Searcher from ray.tune.search.util import _set_search_properties_backwards_compatible from ray.util.annotations import PublicAPI - logger = logging.getLogger(__name__) diff --git a/python/ray/tune/search/hebo/hebo_search.py b/python/ray/tune/search/hebo/hebo_search.py index 9f40bd8fe2a6..7145960d301c 100644 --- a/python/ray/tune/search/hebo/hebo_search.py +++ b/python/ray/tune/search/hebo/hebo_search.py @@ -6,6 +6,12 @@ import pandas as pd from ray.tune.result import DEFAULT_METRIC +from ray.tune.search import ( + UNDEFINED_METRIC_MODE, + UNDEFINED_SEARCH_SPACE, + UNRESOLVED_SEARCH_SPACE, + Searcher, +) from ray.tune.search.sample import ( Categorical, Domain, @@ -15,12 +21,6 @@ Quantized, Uniform, ) -from ray.tune.search import ( - UNRESOLVED_SEARCH_SPACE, - UNDEFINED_METRIC_MODE, - UNDEFINED_SEARCH_SPACE, - Searcher, -) from ray.tune.search.variant_generator import parse_spec_vars from ray.tune.utils.util import is_nan_or_inf, unflatten_dict, validate_warmstart diff --git a/python/ray/tune/search/hyperopt/hyperopt_search.py b/python/ray/tune/search/hyperopt/hyperopt_search.py index ef4713733368..4988325dde2d 100644 --- a/python/ray/tune/search/hyperopt/hyperopt_search.py +++ b/python/ray/tune/search/hyperopt/hyperopt_search.py @@ -1,14 +1,20 @@ -from typing import Any, Dict, List, Optional - -import numpy as np import copy import logging from functools import partial +from typing import Any, Dict, List, Optional + +import numpy as np # Use cloudpickle instead of pickle to make lambda funcs in HyperOpt pickleable from ray import cloudpickle - +from ray.tune.error import TuneError from ray.tune.result import DEFAULT_METRIC +from ray.tune.search import ( + UNDEFINED_METRIC_MODE, + UNDEFINED_SEARCH_SPACE, + UNRESOLVED_SEARCH_SPACE, + Searcher, +) from ray.tune.search.sample import ( Categorical, Domain, @@ -19,12 +25,6 @@ Quantized, Uniform, ) -from ray.tune.search import ( - UNRESOLVED_SEARCH_SPACE, - UNDEFINED_METRIC_MODE, - UNDEFINED_SEARCH_SPACE, - Searcher, -) from ray.tune.search.variant_generator import assign_value, parse_spec_vars from ray.tune.utils import flatten_dict @@ -37,7 +37,6 @@ hpo = None Apply = None -from ray.tune.error import TuneError logger = logging.getLogger(__name__) @@ -522,17 +521,25 @@ def resolve_value(par: str, domain: Domain) -> Any: return hpo.hp.choice( par, [ - HyperOptSearch.convert_search_space(category, prefix=par) - if isinstance(category, dict) - else HyperOptSearch.convert_search_space( - dict(enumerate(category)), prefix=f"{par}/{i}" + ( + HyperOptSearch.convert_search_space( + category, prefix=par + ) + if isinstance(category, dict) + else ( + HyperOptSearch.convert_search_space( + dict(enumerate(category)), prefix=f"{par}/{i}" + ) + if isinstance(category, list) + and len(category) > 0 + and isinstance(category[0], Domain) + else ( + resolve_value(f"{par}/{i}", category) + if isinstance(category, Domain) + else category + ) + ) ) - if isinstance(category, list) - and len(category) > 0 - and isinstance(category[0], Domain) - else resolve_value(f"{par}/{i}", category) - if isinstance(category, Domain) - else category for i, category in enumerate(domain.categories) ], ) diff --git a/python/ray/tune/search/nevergrad/nevergrad_search.py b/python/ray/tune/search/nevergrad/nevergrad_search.py index 5b57069373e5..1e463f277053 100644 --- a/python/ray/tune/search/nevergrad/nevergrad_search.py +++ b/python/ray/tune/search/nevergrad/nevergrad_search.py @@ -1,9 +1,15 @@ import inspect import logging import pickle -from typing import Dict, Optional, Type, Union, List, Sequence +from typing import Dict, List, Optional, Sequence, Type, Union from ray.tune.result import DEFAULT_METRIC +from ray.tune.search import ( + UNDEFINED_METRIC_MODE, + UNDEFINED_SEARCH_SPACE, + UNRESOLVED_SEARCH_SPACE, + Searcher, +) from ray.tune.search.sample import ( Categorical, Domain, @@ -12,12 +18,6 @@ LogUniform, Quantized, ) -from ray.tune.search import ( - UNRESOLVED_SEARCH_SPACE, - UNDEFINED_METRIC_MODE, - UNDEFINED_SEARCH_SPACE, - Searcher, -) from ray.tune.search.variant_generator import parse_spec_vars from ray.tune.utils.util import flatten_dict, unflatten_dict diff --git a/python/ray/tune/search/optuna/optuna_search.py b/python/ray/tune/search/optuna/optuna_search.py index 3a23388f044a..f1656039a4af 100644 --- a/python/ray/tune/search/optuna/optuna_search.py +++ b/python/ray/tune/search/optuna/optuna_search.py @@ -1,13 +1,20 @@ -import time +import functools import logging import pickle -import functools +import time import warnings -from packaging import version from typing import Any, Callable, Dict, List, Optional, Tuple, Union +from packaging import version + from ray.air.constants import TRAINING_ITERATION from ray.tune.result import DEFAULT_METRIC +from ray.tune.search import ( + UNDEFINED_METRIC_MODE, + UNDEFINED_SEARCH_SPACE, + UNRESOLVED_SEARCH_SPACE, + Searcher, +) from ray.tune.search.sample import ( Categorical, Domain, @@ -17,12 +24,6 @@ Quantized, Uniform, ) -from ray.tune.search import ( - UNRESOLVED_SEARCH_SPACE, - UNDEFINED_METRIC_MODE, - UNDEFINED_SEARCH_SPACE, - Searcher, -) from ray.tune.search.variant_generator import parse_spec_vars from ray.tune.utils.util import flatten_dict, unflatten_dict, validate_warmstart @@ -30,8 +31,8 @@ import optuna as ot from optuna.distributions import BaseDistribution as OptunaDistribution from optuna.samplers import BaseSampler - from optuna.trial import TrialState as OptunaTrialState from optuna.trial import Trial as OptunaTrial + from optuna.trial import TrialState as OptunaTrialState except ImportError: ot = None OptunaDistribution = None diff --git a/python/ray/tune/search/repeater.py b/python/ray/tune/search/repeater.py index 838b31c1bb99..c9de4a156091 100644 --- a/python/ray/tune/search/repeater.py +++ b/python/ray/tune/search/repeater.py @@ -4,7 +4,7 @@ import numpy as np -from ray.tune.search import Searcher +from ray.tune.search.searcher import Searcher from ray.tune.search.util import _set_search_properties_backwards_compatible from ray.util import PublicAPI diff --git a/python/ray/tune/search/search_algorithm.py b/python/ray/tune/search/search_algorithm.py index eec8b4d98d2c..8ae5154c976b 100644 --- a/python/ray/tune/search/search_algorithm.py +++ b/python/ray/tune/search/search_algorithm.py @@ -1,4 +1,4 @@ -from typing import Dict, List, Optional, Union, TYPE_CHECKING +from typing import TYPE_CHECKING, Dict, List, Optional, Union from ray.util.annotations import DeveloperAPI diff --git a/python/ray/tune/search/search_generator.py b/python/ray/tune/search/search_generator.py index 11d900508c1b..41f267f45214 100644 --- a/python/ray/tune/search/search_generator.py +++ b/python/ray/tune/search/search_generator.py @@ -3,18 +3,17 @@ from typing import Dict, List, Optional, Union from ray.tune.error import TuneError -from ray.tune.experiment import Experiment, _convert_to_experiment_list -from ray.tune.experiment.config_parser import _make_parser, _create_trial_from_spec +from ray.tune.experiment import Experiment, Trial, _convert_to_experiment_list +from ray.tune.experiment.config_parser import _create_trial_from_spec, _make_parser from ray.tune.search.search_algorithm import SearchAlgorithm -from ray.tune.search import Searcher +from ray.tune.search.searcher import Searcher from ray.tune.search.util import _set_search_properties_backwards_compatible -from ray.tune.search.variant_generator import format_vars, _resolve_nested_dict -from ray.tune.experiment import Trial +from ray.tune.search.variant_generator import _resolve_nested_dict, format_vars from ray.tune.utils.util import ( - flatten_dict, - merge_dicts, _atomic_save, _load_newest_checkpoint, + flatten_dict, + merge_dicts, ) from ray.util.annotations import DeveloperAPI diff --git a/python/ray/tune/search/searcher.py b/python/ray/tune/search/searcher.py index 46c3ab24d538..5f018c55b27b 100644 --- a/python/ray/tune/search/searcher.py +++ b/python/ray/tune/search/searcher.py @@ -3,7 +3,7 @@ import logging import os import warnings -from typing import Dict, Optional, List, Union, Any, TYPE_CHECKING +from typing import TYPE_CHECKING, Any, Dict, List, Optional, Union from ray.air._internal.usage import tag_searcher from ray.tune.search.util import _set_search_properties_backwards_compatible @@ -11,8 +11,8 @@ from ray.util.debug import log_once if TYPE_CHECKING: - from ray.tune.experiment import Trial from ray.tune.analysis import ExperimentAnalysis + from ray.tune.experiment import Trial logger = logging.getLogger(__name__) @@ -227,8 +227,8 @@ def add_evaluated_trials( raise NotImplementedError # lazy imports to avoid circular dependencies - from ray.tune.experiment import Trial from ray.tune.analysis import ExperimentAnalysis + from ray.tune.experiment import Trial from ray.tune.result import DONE if isinstance(trials_or_analysis, (list, tuple)): diff --git a/python/ray/tune/search/variant_generator.py b/python/ray/tune/search/variant_generator.py index 41ab39d4c3a4..4da50c92e4af 100644 --- a/python/ray/tune/search/variant_generator.py +++ b/python/ray/tune/search/variant_generator.py @@ -1,11 +1,11 @@ import copy import logging +import random import re from collections.abc import Mapping from typing import Any, Dict, Generator, Iterable, List, Optional, Tuple import numpy -import random from ray.tune.search.sample import Categorical, Domain, Function, RandomState from ray.util.annotations import DeveloperAPI, PublicAPI @@ -472,9 +472,9 @@ def _split_resolved_unresolved_values( _resolved_children, _unresolved_children, ) = _split_resolved_unresolved_values(v) - for (path, value) in _resolved_children.items(): + for path, value in _resolved_children.items(): resolved_vars[(k,) + path] = value - for (path, value) in _unresolved_children.items(): + for path, value in _unresolved_children.items(): unresolved_vars[(k,) + path] = value elif isinstance(v, (list, tuple)): # Recurse into a list @@ -483,9 +483,9 @@ def _split_resolved_unresolved_values( _resolved_children, _unresolved_children, ) = _split_resolved_unresolved_values({i: elem}) - for (path, value) in _resolved_children.items(): + for path, value in _resolved_children.items(): resolved_vars[(k,) + path] = value - for (path, value) in _unresolved_children.items(): + for path, value in _unresolved_children.items(): unresolved_vars[(k,) + path] = value else: resolved_vars[(k,)] = v diff --git a/python/ray/tune/search/zoopt/zoopt_search.py b/python/ray/tune/search/zoopt/zoopt_search.py index b91c32affffc..d5ec4e423f97 100644 --- a/python/ray/tune/search/zoopt/zoopt_search.py +++ b/python/ray/tune/search/zoopt/zoopt_search.py @@ -5,6 +5,12 @@ import ray import ray.cloudpickle as pickle from ray.tune.result import DEFAULT_METRIC +from ray.tune.search import ( + UNDEFINED_METRIC_MODE, + UNDEFINED_SEARCH_SPACE, + UNRESOLVED_SEARCH_SPACE, + Searcher, +) from ray.tune.search.sample import ( Categorical, Domain, @@ -13,12 +19,6 @@ Quantized, Uniform, ) -from ray.tune.search import ( - UNRESOLVED_SEARCH_SPACE, - UNDEFINED_METRIC_MODE, - UNDEFINED_SEARCH_SPACE, - Searcher, -) from ray.tune.search.variant_generator import parse_spec_vars from ray.tune.utils.util import unflatten_dict diff --git a/python/ray/tune/stopper/__init__.py b/python/ray/tune/stopper/__init__.py index 0498d1ac7436..8fd3224875f4 100644 --- a/python/ray/tune/stopper/__init__.py +++ b/python/ray/tune/stopper/__init__.py @@ -1,8 +1,8 @@ -from ray.tune.stopper.stopper import Stopper, CombinedStopper from ray.tune.stopper.experiment_plateau import ExperimentPlateauStopper from ray.tune.stopper.function_stopper import FunctionStopper from ray.tune.stopper.maximum_iteration import MaximumIterationStopper from ray.tune.stopper.noop import NoopStopper +from ray.tune.stopper.stopper import CombinedStopper, Stopper from ray.tune.stopper.timeout import TimeoutStopper from ray.tune.stopper.trial_plateau import TrialPlateauStopper diff --git a/python/ray/tune/stopper/experiment_plateau.py b/python/ray/tune/stopper/experiment_plateau.py index 4e97f61f2d68..24bb1bf64c5f 100644 --- a/python/ray/tune/stopper/experiment_plateau.py +++ b/python/ray/tune/stopper/experiment_plateau.py @@ -1,7 +1,7 @@ import numpy as np -from ray.util.annotations import PublicAPI from ray.tune.stopper.stopper import Stopper +from ray.util.annotations import PublicAPI @PublicAPI diff --git a/python/ray/tune/stopper/function_stopper.py b/python/ray/tune/stopper/function_stopper.py index ef3768088916..51d53d30bd7b 100644 --- a/python/ray/tune/stopper/function_stopper.py +++ b/python/ray/tune/stopper/function_stopper.py @@ -1,7 +1,7 @@ -from typing import Dict, Callable +from typing import Callable, Dict -from ray.util.annotations import PublicAPI from ray.tune.stopper.stopper import Stopper +from ray.util.annotations import PublicAPI @PublicAPI diff --git a/python/ray/tune/stopper/maximum_iteration.py b/python/ray/tune/stopper/maximum_iteration.py index f71dd43b87c4..5795ba6ec49e 100644 --- a/python/ray/tune/stopper/maximum_iteration.py +++ b/python/ray/tune/stopper/maximum_iteration.py @@ -1,8 +1,8 @@ -from typing import Dict from collections import defaultdict +from typing import Dict -from ray.util.annotations import PublicAPI from ray.tune.stopper.stopper import Stopper +from ray.util.annotations import PublicAPI @PublicAPI diff --git a/python/ray/tune/stopper/noop.py b/python/ray/tune/stopper/noop.py index 911eec5390e6..3554f11b4c1c 100644 --- a/python/ray/tune/stopper/noop.py +++ b/python/ray/tune/stopper/noop.py @@ -1,5 +1,5 @@ -from ray.util.annotations import PublicAPI from ray.tune.stopper.stopper import Stopper +from ray.util.annotations import PublicAPI @PublicAPI diff --git a/python/ray/tune/stopper/timeout.py b/python/ray/tune/stopper/timeout.py index f3a69c53739d..0789669cce60 100644 --- a/python/ray/tune/stopper/timeout.py +++ b/python/ray/tune/stopper/timeout.py @@ -1,10 +1,10 @@ import datetime -from typing import Union import time +from typing import Union from ray import logger -from ray.util.annotations import PublicAPI from ray.tune.stopper.stopper import Stopper +from ray.util.annotations import PublicAPI @PublicAPI diff --git a/python/ray/tune/stopper/trial_plateau.py b/python/ray/tune/stopper/trial_plateau.py index 9757df32ef3e..eb230608e0c3 100644 --- a/python/ray/tune/stopper/trial_plateau.py +++ b/python/ray/tune/stopper/trial_plateau.py @@ -1,9 +1,10 @@ -from typing import Dict, Optional from collections import defaultdict, deque +from typing import Dict, Optional + import numpy as np -from ray.util.annotations import PublicAPI from ray.tune.stopper.stopper import Stopper +from ray.util.annotations import PublicAPI @PublicAPI diff --git a/python/ray/tune/syncer.py b/python/ray/tune/syncer.py index 8c6ceb017a5a..db1ca47e12db 100644 --- a/python/ray/tune/syncer.py +++ b/python/ray/tune/syncer.py @@ -3,7 +3,6 @@ from ray.train._internal.syncer import SyncConfig as TrainSyncConfig from ray.util.annotations import Deprecated - logger = logging.getLogger(__name__) diff --git a/python/ray/tune/tests/_test_multi_tenancy_run.py b/python/ray/tune/tests/_test_multi_tenancy_run.py index 79311f6e6b1f..59aed09fdf88 100644 --- a/python/ray/tune/tests/_test_multi_tenancy_run.py +++ b/python/ray/tune/tests/_test_multi_tenancy_run.py @@ -1,10 +1,9 @@ -from pathlib import Path import os import time +from pathlib import Path from ray import train, tune from ray.train.data_parallel_trainer import DataParallelTrainer - from ray.tune.search import BasicVariantGenerator # Hang full script until this marker is deleted diff --git a/python/ray/tune/tests/_test_trial_runner_callbacks.py b/python/ray/tune/tests/_test_trial_runner_callbacks.py index 5c44abc58d2b..5853ad6be3ac 100644 --- a/python/ray/tune/tests/_test_trial_runner_callbacks.py +++ b/python/ray/tune/tests/_test_trial_runner_callbacks.py @@ -4,25 +4,23 @@ import tempfile import time import unittest -from unittest.mock import patch from collections import OrderedDict +from unittest.mock import patch import ray from ray import tune -from ray.air._internal.checkpoint_manager import _TrackedCheckpoint, CheckpointStorage +from ray.air._internal.checkpoint_manager import CheckpointStorage, _TrackedCheckpoint from ray.air.constants import TRAINING_ITERATION from ray.rllib import _register_all +from ray.tune import Callback +from ray.tune.callback import warnings from ray.tune.execution.ray_trial_executor import ( + RayTrialExecutor, _ExecutorEvent, _ExecutorEventType, - RayTrialExecutor, ) - -from ray.tune.callback import warnings -from ray.tune.experiment import Trial from ray.tune.execution.trial_runner import TrialRunner -from ray.tune import Callback -from ray.tune.experiment import Experiment +from ray.tune.experiment import Experiment, Trial class TestCallback(Callback): diff --git a/python/ray/tune/tests/_test_trial_runner_pg.py b/python/ray/tune/tests/_test_trial_runner_pg.py index 0886f0a71519..82d4757a67c5 100644 --- a/python/ray/tune/tests/_test_trial_runner_pg.py +++ b/python/ray/tune/tests/_test_trial_runner_pg.py @@ -1,19 +1,20 @@ -import sys import os +import sys import time -import numpy as np import unittest +import numpy as np + import ray from ray import tune -from ray.tune.execution.ray_trial_executor import RayTrialExecutor -from ray.tune.experiment import Trial +from ray.cluster_utils import Cluster +from ray.rllib import _register_all from ray.tune import Callback -from ray.tune.execution.trial_runner import TrialRunner from ray.tune.execution.placement_groups import PlacementGroupFactory +from ray.tune.execution.ray_trial_executor import RayTrialExecutor +from ray.tune.execution.trial_runner import TrialRunner +from ray.tune.experiment import Trial from ray.util import placement_group_table -from ray.cluster_utils import Cluster -from ray.rllib import _register_all class TrialRunnerPlacementGroupTest(unittest.TestCase): diff --git a/python/ray/tune/tests/conftest.py b/python/ray/tune/tests/conftest.py index fd6fd0563759..ac67537190f3 100644 --- a/python/ray/tune/tests/conftest.py +++ b/python/ray/tune/tests/conftest.py @@ -1,14 +1,14 @@ -# Trigger pytest hook to automatically zip test cluster logs to archive dir on failure -from ray.tests.conftest import pytest_runtest_makereport # noqa -from ray.tests.conftest import propagate_logs # noqa - - import logging + import boto3 import pytest -from ray.air._internal.uri_utils import URI from ray._private.test_utils import simulate_storage +from ray.air._internal.uri_utils import URI + +# Trigger pytest hook to automatically zip test cluster logs to archive dir on failure +from ray.tests.conftest import propagate_logs # noqa +from ray.tests.conftest import pytest_runtest_makereport # noqa @pytest.fixture diff --git a/python/ray/tune/tests/execution/conftest.py b/python/ray/tune/tests/execution/conftest.py index ad1b6d49c3bd..53e343d0106c 100644 --- a/python/ray/tune/tests/execution/conftest.py +++ b/python/ray/tune/tests/execution/conftest.py @@ -1,3 +1,3 @@ # Trigger pytest hook to automatically zip test cluster logs to archive dir on failure -from ray.tests.conftest import pytest_runtest_makereport # noqa from ray.tests.conftest import propagate_logs # noqa +from ray.tests.conftest import pytest_runtest_makereport # noqa diff --git a/python/ray/tune/tests/execution/test_actor_caching.py b/python/ray/tune/tests/execution/test_actor_caching.py index 6808f6c92ffd..ebaaa6b3d93e 100644 --- a/python/ray/tune/tests/execution/test_actor_caching.py +++ b/python/ray/tune/tests/execution/test_actor_caching.py @@ -1,10 +1,10 @@ -import pytest import sys +import pytest + import ray from ray.tune import PlacementGroupFactory - -from ray.tune.tests.execution.utils import create_execution_test_objects, TestingTrial +from ray.tune.tests.execution.utils import TestingTrial, create_execution_test_objects @pytest.fixture diff --git a/python/ray/tune/tests/execution/test_controller_callback_integration.py b/python/ray/tune/tests/execution/test_controller_callback_integration.py index 356c775499dd..0476e158fe3d 100644 --- a/python/ray/tune/tests/execution/test_controller_callback_integration.py +++ b/python/ray/tune/tests/execution/test_controller_callback_integration.py @@ -1,16 +1,15 @@ +import sys from typing import Dict, Optional import pytest -import sys import ray from ray.air.execution import FixedResourceManager, PlacementGroupResourceManager +from ray.train.tests.util import mock_storage_context from ray.tune import Callback, ResumeConfig from ray.tune.execution.tune_controller import TuneController from ray.tune.experiment import Trial -from ray.train.tests.util import mock_storage_context - @pytest.fixture(scope="function") def ray_start_4_cpus_2_gpus_extra(): diff --git a/python/ray/tune/tests/execution/test_controller_checkpointing_integration.py b/python/ray/tune/tests/execution/test_controller_checkpointing_integration.py index ff8908a62094..b54d548fc7d0 100644 --- a/python/ray/tune/tests/execution/test_controller_checkpointing_integration.py +++ b/python/ray/tune/tests/execution/test_controller_checkpointing_integration.py @@ -1,29 +1,26 @@ import json import logging import os +import sys import tempfile +import time from unittest import mock import pytest -import sys -import time - import ray -from ray.train import CheckpointConfig -from ray.air.execution import FixedResourceManager, PlacementGroupResourceManager from ray.air.constants import TRAINING_ITERATION -from ray.train import Checkpoint +from ray.air.execution import FixedResourceManager, PlacementGroupResourceManager +from ray.train import Checkpoint, CheckpointConfig from ray.train._internal.session import _TrainingResult from ray.train._internal.storage import StorageContext +from ray.train.tests.util import mock_storage_context from ray.tune import PlacementGroupFactory, ResumeConfig from ray.tune.execution.tune_controller import TuneController from ray.tune.experiment import Trial from ray.tune.result import DONE from ray.tune.schedulers import FIFOScheduler from ray.tune.search import BasicVariantGenerator - -from ray.train.tests.util import mock_storage_context from ray.tune.tests.tune_test_util import TrialResultObserver STORAGE = mock_storage_context() diff --git a/python/ray/tune/tests/execution/test_controller_control_integration.py b/python/ray/tune/tests/execution/test_controller_control_integration.py index 22db563b30fb..a47ebc3e9b33 100644 --- a/python/ray/tune/tests/execution/test_controller_control_integration.py +++ b/python/ray/tune/tests/execution/test_controller_control_integration.py @@ -1,18 +1,15 @@ +import sys from collections import Counter import pytest -import sys import ray from ray.air.execution import FixedResourceManager, PlacementGroupResourceManager +from ray.train.tests.util import mock_storage_context from ray.tune import PlacementGroupFactory, register_trainable from ray.tune.execution.tune_controller import TuneController from ray.tune.experiment import Trial - -from ray.train.tests.util import mock_storage_context - - STORAGE = mock_storage_context() diff --git a/python/ray/tune/tests/execution/test_controller_errors_integration.py b/python/ray/tune/tests/execution/test_controller_errors_integration.py index 4f87b5e3afa0..437cdbc271e2 100644 --- a/python/ray/tune/tests/execution/test_controller_errors_integration.py +++ b/python/ray/tune/tests/execution/test_controller_errors_integration.py @@ -1,23 +1,21 @@ import os +import sys from collections import Counter import pytest -import sys import ray -from ray.train import CheckpointConfig from ray.air.execution import FixedResourceManager, PlacementGroupResourceManager +from ray.train import CheckpointConfig +from ray.train.tests.util import mock_storage_context from ray.tune import PlacementGroupFactory, TuneError from ray.tune.execution.tune_controller import TuneController from ray.tune.experiment import Trial from ray.tune.registry import TRAINABLE_CLASS, _global_registry from ray.tune.schedulers import FIFOScheduler from ray.tune.search import BasicVariantGenerator - -from ray.train.tests.util import mock_storage_context from ray.tune.tests.execution.utils import BudgetResourceManager - STORAGE = mock_storage_context() diff --git a/python/ray/tune/tests/execution/test_controller_resources_integration.py b/python/ray/tune/tests/execution/test_controller_resources_integration.py index 7fc22ba0fb87..eb547d04ef00 100644 --- a/python/ray/tune/tests/execution/test_controller_resources_integration.py +++ b/python/ray/tune/tests/execution/test_controller_resources_integration.py @@ -1,13 +1,14 @@ import os +import sys import time from collections import Counter import pytest -import sys import ray from ray import tune from ray.air.execution import FixedResourceManager, PlacementGroupResourceManager +from ray.train.tests.util import mock_storage_context from ray.tune import PlacementGroupFactory, TuneError from ray.tune.execution.tune_controller import TuneController from ray.tune.experiment import Trial @@ -15,9 +16,6 @@ from ray.tune.search import BasicVariantGenerator from ray.tune.utils.mock import TrialStatusSnapshot, TrialStatusSnapshotTaker -from ray.train.tests.util import mock_storage_context - - STORAGE = mock_storage_context() diff --git a/python/ray/tune/tests/execution/test_controller_resume_integration.py b/python/ray/tune/tests/execution/test_controller_resume_integration.py index 6ddadff1b8b9..a3393241f21d 100644 --- a/python/ray/tune/tests/execution/test_controller_resume_integration.py +++ b/python/ray/tune/tests/execution/test_controller_resume_integration.py @@ -1,23 +1,21 @@ import os +import sys from unittest.mock import patch import pandas as pd import pytest -import sys import ray from ray import tune from ray.air.execution import FixedResourceManager, PlacementGroupResourceManager +from ray.rllib.algorithms.callbacks import DefaultCallbacks from ray.train import CheckpointConfig +from ray.train.tests.util import mock_storage_context from ray.tune import Experiment, PlacementGroupFactory from ray.tune.execution.tune_controller import TuneController from ray.tune.experiment import Trial from ray.tune.impl.placeholder import create_resolvers_map, inject_placeholders from ray.tune.search import BasicVariantGenerator -from ray.rllib.algorithms.callbacks import DefaultCallbacks - -from ray.train.tests.util import mock_storage_context - STORAGE = mock_storage_context() diff --git a/python/ray/tune/tests/execution/test_controller_search_alg_integration.py b/python/ray/tune/tests/execution/test_controller_search_alg_integration.py index f84b928ec359..0fedd9ad60b1 100644 --- a/python/ray/tune/tests/execution/test_controller_search_alg_integration.py +++ b/python/ray/tune/tests/execution/test_controller_search_alg_integration.py @@ -1,22 +1,21 @@ import os import pickle +import sys from collections import Counter import pytest -import sys import ray from ray.air.constants import TRAINING_ITERATION from ray.air.execution import FixedResourceManager, PlacementGroupResourceManager +from ray.train.tests.util import mock_storage_context from ray.tune import Experiment, PlacementGroupFactory from ray.tune.execution.tune_controller import TuneController from ray.tune.experiment import Trial from ray.tune.schedulers import FIFOScheduler, TrialScheduler -from ray.tune.search import Searcher, ConcurrencyLimiter, Repeater, SearchGenerator +from ray.tune.search import ConcurrencyLimiter, Repeater, Searcher, SearchGenerator from ray.tune.search._mock import _MockSuggestionAlgorithm -from ray.train.tests.util import mock_storage_context - class TestTuneController(TuneController): def __init__(self, *args, **kwargs): diff --git a/python/ray/tune/tests/execution/utils.py b/python/ray/tune/tests/execution/utils.py index 8803ee9d07d7..c9e5cd54ff65 100644 --- a/python/ray/tune/tests/execution/utils.py +++ b/python/ray/tune/tests/execution/utils.py @@ -5,18 +5,13 @@ import ray from ray.air.execution import FixedResourceManager from ray.air.execution._internal import RayActorManager -from ray.air.execution.resources import ( - ResourceManager, - ResourceRequest, -) - from ray.air.execution._internal.tracked_actor import TrackedActor +from ray.air.execution.resources import ResourceManager, ResourceRequest +from ray.train.tests.util import mock_storage_context from ray.tune.execution.tune_controller import TuneController from ray.tune.experiment import Trial from ray.tune.utils.resource_updater import _ResourceUpdater -from ray.train.tests.util import mock_storage_context - class NoopClassCache: def get(self, trainable_name: str): diff --git a/python/ray/tune/tests/output/conftest.py b/python/ray/tune/tests/output/conftest.py index ad1b6d49c3bd..53e343d0106c 100644 --- a/python/ray/tune/tests/output/conftest.py +++ b/python/ray/tune/tests/output/conftest.py @@ -1,3 +1,3 @@ # Trigger pytest hook to automatically zip test cluster logs to archive dir on failure -from ray.tests.conftest import pytest_runtest_makereport # noqa from ray.tests.conftest import propagate_logs # noqa +from ray.tests.conftest import pytest_runtest_makereport # noqa diff --git a/python/ray/tune/tests/output/test_output.py b/python/ray/tune/tests/output/test_output.py index c028f592d316..773254357aa9 100644 --- a/python/ray/tune/tests/output/test_output.py +++ b/python/ray/tune/tests/output/test_output.py @@ -1,31 +1,28 @@ import argparse - +import sys from unittest import mock import pytest -import sys - from freezegun import freeze_time from ray import tune from ray.air.constants import TRAINING_ITERATION +from ray.tune.experiment.trial import Trial from ray.tune.experimental.output import ( + AirVerbosity, + TrainReporter, + TuneTerminalReporter, + _best_trial_str, + _current_best_trial, + _get_dict_as_table_data, _get_time_str, - _get_trials_by_state, _get_trial_info, - _infer_user_metrics, - _max_len, - _current_best_trial, - _best_trial_str, _get_trial_table_data, - _get_dict_as_table_data, + _get_trials_by_state, _infer_params, - AirVerbosity, - TrainReporter, - TuneTerminalReporter, + _infer_user_metrics, + _max_len, ) -from ray.tune.experiment.trial import Trial - LAST_RESULT = { "custom_metrics": {}, diff --git a/python/ray/tune/tests/test_actor_reuse.py b/python/ray/tune/tests/test_actor_reuse.py index 37f02a3eaa35..c81d4e91c6f6 100644 --- a/python/ray/tune/tests/test_actor_reuse.py +++ b/python/ray/tune/tests/test_actor_reuse.py @@ -1,23 +1,23 @@ import inspect import os -from pathlib import Path +import sys import tempfile +import time +from pathlib import Path from typing import Callable + import pytest -import sys -import time import ray -from ray import train, tune, logger +from ray import logger, train, tune from ray.train import CheckpointConfig -from ray.tune import Trainable, run_experiments, register_trainable +from ray.train.tests.util import create_dict_checkpoint, load_dict_checkpoint +from ray.tune import Trainable, register_trainable, run_experiments from ray.tune.error import TuneError from ray.tune.result_grid import ResultGrid from ray.tune.schedulers.trial_scheduler import FIFOScheduler, TrialScheduler from ray.tune.tune import _check_mixin -from ray.train.tests.util import create_dict_checkpoint, load_dict_checkpoint - @pytest.fixture def ray_start_1_cpu(): diff --git a/python/ray/tune/tests/test_api.py b/python/ray/tune/tests/test_api.py index 3bef350c66cc..fd252fdd904a 100644 --- a/python/ray/tune/tests/test_api.py +++ b/python/ray/tune/tests/test_api.py @@ -15,61 +15,56 @@ import ray from ray import train, tune -from ray.train import CheckpointConfig from ray.air.constants import TIME_THIS_ITER_S, TRAINING_ITERATION from ray.rllib import _register_all +from ray.train import CheckpointConfig from ray.train._internal.session import shutdown_session from ray.train._internal.storage import ( StorageContext, - get_fs_and_path, _create_directory, + get_fs_and_path, ) from ray.train.constants import CHECKPOINT_DIR_NAME from ray.train.tests.util import create_dict_checkpoint, load_dict_checkpoint from ray.tune import ( + Stopper, + Trainable, + TuneError, register_env, register_trainable, run, run_experiments, - Trainable, - TuneError, - Stopper, ) from ray.tune.callback import Callback -from ray.tune.experiment import Experiment -from ray.tune.trainable import wrap_function -from ray.tune.logger import Logger, LegacyLoggerCallback +from ray.tune.execution.placement_groups import PlacementGroupFactory +from ray.tune.execution.tune_controller import TuneController +from ray.tune.experiment import Experiment, Trial +from ray.tune.logger import LegacyLoggerCallback, Logger from ray.tune.result import ( - TIMESTEPS_TOTAL, DONE, + EPISODES_TOTAL, + EXPERIMENT_TAG, HOSTNAME, NODE_IP, PID, - EPISODES_TOTAL, - TIMESTEPS_THIS_ITER, TIME_TOTAL_S, + TIMESTEPS_THIS_ITER, + TIMESTEPS_TOTAL, TRIAL_ID, - EXPERIMENT_TAG, -) -from ray.tune.schedulers import ( - TrialScheduler, - FIFOScheduler, - AsyncHyperBandScheduler, ) +from ray.tune.schedulers import AsyncHyperBandScheduler, FIFOScheduler, TrialScheduler from ray.tune.schedulers.pb2 import PB2 +from ray.tune.search import BasicVariantGenerator, ConcurrencyLimiter, grid_search +from ray.tune.search._mock import _MockSuggestionAlgorithm +from ray.tune.search.ax import AxSearch +from ray.tune.search.hyperopt import HyperOptSearch from ray.tune.stopper import ( + ExperimentPlateauStopper, MaximumIterationStopper, TrialPlateauStopper, - ExperimentPlateauStopper, ) -from ray.tune.search import BasicVariantGenerator, grid_search, ConcurrencyLimiter -from ray.tune.search._mock import _MockSuggestionAlgorithm -from ray.tune.search.ax import AxSearch -from ray.tune.search.hyperopt import HyperOptSearch -from ray.tune.experiment import Trial -from ray.tune.execution.tune_controller import TuneController +from ray.tune.trainable import wrap_function from ray.tune.utils import flatten_dict -from ray.tune.execution.placement_groups import PlacementGroupFactory class TrainableFunctionApiTest(unittest.TestCase): @@ -1074,6 +1069,7 @@ def load_checkpoint(self, state): def testLogToFile(self): def train_fn(config): import sys + from ray import logger for i in range(10): @@ -1141,9 +1137,10 @@ def train_fn(config): self.assertIn("LOG_STDERR", content) def testTimeout(self): - from ray.tune.stopper import TimeoutStopper import datetime + from ray.tune.stopper import TimeoutStopper + def train_fn(config): for i in range(20): train.report(dict(metric=i)) diff --git a/python/ray/tune/tests/test_api_checkpoint_integration.py b/python/ray/tune/tests/test_api_checkpoint_integration.py index 20ed0aeae822..08df47e56b4e 100644 --- a/python/ray/tune/tests/test_api_checkpoint_integration.py +++ b/python/ray/tune/tests/test_api_checkpoint_integration.py @@ -1,23 +1,22 @@ import os +import sys import tempfile from pathlib import Path import pytest -import sys import ray from ray import train from ray.air import ScalingConfig -from ray.train import CheckpointConfig -from ray.air.execution import FixedResourceManager from ray.air.constants import TRAINING_ITERATION +from ray.air.execution import FixedResourceManager +from ray.train import CheckpointConfig from ray.train._internal.storage import StorageContext +from ray.train.tests.util import mock_storage_context from ray.tune import Trainable, register_trainable from ray.tune.execution.tune_controller import TuneController from ray.tune.experiment import Trial -from ray.train.tests.util import mock_storage_context - STORAGE = mock_storage_context() diff --git a/python/ray/tune/tests/test_callbacks.py b/python/ray/tune/tests/test_callbacks.py index 376908485b54..031fb6351ce1 100644 --- a/python/ray/tune/tests/test_callbacks.py +++ b/python/ray/tune/tests/test_callbacks.py @@ -1,6 +1,7 @@ -import pytest from typing import Dict, Optional +import pytest + from ray.tune.callback import Callback, CallbackList diff --git a/python/ray/tune/tests/test_cluster.py b/python/ray/tune/tests/test_cluster.py index 7216bf078030..1721515e2b3d 100644 --- a/python/ray/tune/tests/test_cluster.py +++ b/python/ray/tune/tests/test_cluster.py @@ -1,19 +1,19 @@ -import time import os - -import pytest import sys +import time from unittest.mock import MagicMock +import pytest + import ray from ray import tune -from ray.train import CheckpointConfig from ray.cluster_utils import Cluster +from ray.train import CheckpointConfig from ray.train._internal.storage import StorageContext from ray.tune.error import TuneError -from ray.tune.search import BasicVariantGenerator -from ray.tune.experiment import Trial from ray.tune.execution.tune_controller import TuneController +from ray.tune.experiment import Trial +from ray.tune.search import BasicVariantGenerator def _check_trial_running(trial): diff --git a/python/ray/tune/tests/test_commands.py b/python/ray/tune/tests/test_commands.py index 2d13c41bd6d6..cfc494ac497c 100644 --- a/python/ray/tune/tests/test_commands.py +++ b/python/ray/tune/tests/test_commands.py @@ -1,25 +1,25 @@ -import click import os -import pytest import random import subprocess import sys import time from unittest import mock -try: - from cStringIO import StringIO -except ImportError: - from io import StringIO +import click +import pytest import ray import ray.train from ray import tune from ray.rllib import _register_all +from ray.train.tests.util import create_dict_checkpoint from ray.tune.cli import commands from ray.tune.result import CONFIG_PREFIX -from ray.train.tests.util import create_dict_checkpoint +try: + from cStringIO import StringIO +except ImportError: + from io import StringIO class Capturing: diff --git a/python/ray/tune/tests/test_convergence.py b/python/ray/tune/tests/test_convergence.py index 949354fd5a9e..9468fb166836 100644 --- a/python/ray/tune/tests/test_convergence.py +++ b/python/ray/tune/tests/test_convergence.py @@ -1,12 +1,13 @@ import math -import numpy as np +import unittest +import numpy as np import pytest + import ray from ray import train, tune -from ray.tune.stopper import ExperimentPlateauStopper from ray.tune.search import ConcurrencyLimiter -import unittest +from ray.tune.stopper import ExperimentPlateauStopper def loss(config): @@ -98,9 +99,10 @@ def testConvergenceHyperopt(self): assert math.isclose(analysis.best_config["x"], 0, abs_tol=1e-2) def testConvergenceNevergrad(self): - from ray.tune.search.nevergrad import NevergradSearch import nevergrad as ng + from ray.tune.search.nevergrad import NevergradSearch + np.random.seed(0) searcher = NevergradSearch(optimizer=ng.optimizers.PSO) analysis = self._testConvergence(searcher, patience=50, top=5) diff --git a/python/ray/tune/tests/test_experiment.py b/python/ray/tune/tests/test_experiment.py index 0045b929708e..f4315f259cca 100644 --- a/python/ray/tune/tests/test_experiment.py +++ b/python/ray/tune/tests/test_experiment.py @@ -5,8 +5,8 @@ import ray.train from ray.train import CheckpointConfig from ray.tune import register_trainable -from ray.tune.experiment import Experiment, _convert_to_experiment_list from ray.tune.error import TuneError +from ray.tune.experiment import Experiment, _convert_to_experiment_list from ray.tune.utils import diagnose_serialization @@ -112,7 +112,8 @@ def test(config): if __name__ == "__main__": - import pytest import sys + import pytest + sys.exit(pytest.main(["-v", __file__])) diff --git a/python/ray/tune/tests/test_experiment_analysis.py b/python/ray/tune/tests/test_experiment_analysis.py index 9e6d91568e10..f85a4721e399 100644 --- a/python/ray/tune/tests/test_experiment_analysis.py +++ b/python/ray/tune/tests/test_experiment_analysis.py @@ -1,26 +1,24 @@ -from contextlib import contextmanager -import tempfile import os -from pathlib import Path import pickle -import pandas as pd -import numpy as np +import tempfile +from contextlib import contextmanager +from pathlib import Path from typing import List +import numpy as np +import pandas as pd import pytest from ray import train, tune from ray.air._internal.uri_utils import URI from ray.air.constants import EXPR_PROGRESS_FILE, EXPR_RESULT_FILE from ray.train._internal.storage import _delete_fs_path +from ray.train.tests.test_new_persistence import mock_s3_bucket_uri +from ray.train.tests.util import create_dict_checkpoint, load_dict_checkpoint from ray.tune.analysis.experiment_analysis import ExperimentAnalysis from ray.tune.experiment import Trial from ray.tune.utils import flatten_dict -from ray.train.tests.util import create_dict_checkpoint, load_dict_checkpoint -from ray.train.tests.test_new_persistence import mock_s3_bucket_uri - - NUM_TRIALS = 3 NON_NAN_VALUE = 42 PEAK_VALUE = 100 @@ -292,7 +290,8 @@ def test_pickle(experiment_analysis, tmp_path): if __name__ == "__main__": - import pytest import sys + import pytest + sys.exit(pytest.main(["-v", __file__])) diff --git a/python/ray/tune/tests/test_function_api.py b/python/ray/tune/tests/test_function_api.py index 8c533c308914..22d5fa33916c 100644 --- a/python/ray/tune/tests/test_function_api.py +++ b/python/ray/tune/tests/test_function_api.py @@ -5,22 +5,17 @@ import unittest import ray -from ray.air.constants import TRAINING_ITERATION -from ray.rllib import _register_all - import ray.train from ray import tune +from ray.air.constants import TRAINING_ITERATION +from ray.rllib import _register_all from ray.train import Checkpoint, CheckpointConfig -from ray.tune.logger import NoopLogger +from ray.train.tests.util import mock_storage_context from ray.tune.execution.placement_groups import PlacementGroupFactory -from ray.tune.trainable import ( - with_parameters, - wrap_function, -) +from ray.tune.logger import NoopLogger from ray.tune.result import DEFAULT_METRIC from ray.tune.schedulers import ResourceChangingScheduler - -from ray.train.tests.util import mock_storage_context +from ray.tune.trainable import with_parameters, wrap_function def creator_generator(logdir): diff --git a/python/ray/tune/tests/test_integration_pytorch_lightning.py b/python/ray/tune/tests/test_integration_pytorch_lightning.py index fe5bb958d5e6..c42961fb335e 100644 --- a/python/ray/tune/tests/test_integration_pytorch_lightning.py +++ b/python/ray/tune/tests/test_integration_pytorch_lightning.py @@ -1,14 +1,14 @@ import shutil import tempfile import unittest + import pytorch_lightning as pl -from ray.train import CheckpointConfig import torch -from ray.air.constants import TRAINING_ITERATION - from torch.utils.data import DataLoader, Dataset from ray import train, tune +from ray.air.constants import TRAINING_ITERATION +from ray.train import CheckpointConfig from ray.tune.integration.pytorch_lightning import TuneReportCheckpointCallback @@ -133,7 +133,8 @@ def train_fn(config): if __name__ == "__main__": - import pytest import sys + import pytest + sys.exit(pytest.main(sys.argv[1:] + ["-v", __file__])) diff --git a/python/ray/tune/tests/test_logger.py b/python/ray/tune/tests/test_logger.py index c9932c81e11a..5c2e7bb05e71 100644 --- a/python/ray/tune/tests/test_logger.py +++ b/python/ray/tune/tests/test_logger.py @@ -1,13 +1,14 @@ import csv -from dataclasses import dataclass import glob import json import os -import unittest +import shutil import tempfile +import unittest +from dataclasses import dataclass from pathlib import Path from typing import Optional -import shutil + import numpy as np import ray @@ -20,12 +21,12 @@ from ray.cloudpickle import cloudpickle from ray.train import Checkpoint from ray.tune.logger import ( + CSVLogger, CSVLoggerCallback, - JsonLoggerCallback, JsonLogger, - CSVLogger, - TBXLoggerCallback, + JsonLoggerCallback, TBXLogger, + TBXLoggerCallback, ) from ray.tune.logger.aim import AimLoggerCallback from ray.tune.utils import flatten_dict @@ -427,7 +428,8 @@ def testCustomConfigurations(self): if __name__ == "__main__": - import pytest import sys + import pytest + sys.exit(pytest.main(["-v", __file__] + sys.argv[1:])) diff --git a/python/ray/tune/tests/test_multi_tenancy.py b/python/ray/tune/tests/test_multi_tenancy.py index 33ee918d7d91..b4c3f6ff5b80 100644 --- a/python/ray/tune/tests/test_multi_tenancy.py +++ b/python/ray/tune/tests/test_multi_tenancy.py @@ -1,10 +1,10 @@ import os -import pytest import subprocess import sys - from pathlib import Path +import pytest + import ray diff --git a/python/ray/tune/tests/test_placeholder.py b/python/ray/tune/tests/test_placeholder.py index a080d1135a46..48f6433dea37 100644 --- a/python/ray/tune/tests/test_placeholder.py +++ b/python/ray/tune/tests/test_placeholder.py @@ -1,13 +1,14 @@ -import numpy as np import unittest +import numpy as np + from ray import tune from ray.tune.impl.placeholder import ( - inject_placeholders, - resolve_placeholders, - create_resolvers_map, _FunctionResolver, _RefResolver, + create_resolvers_map, + inject_placeholders, + resolve_placeholders, ) from ray.tune.search.sample import Float, Integer @@ -308,7 +309,8 @@ def testResolveFunctionAfterRef(self): if __name__ == "__main__": - import pytest import sys + import pytest + sys.exit(pytest.main(["-v", __file__])) diff --git a/python/ray/tune/tests/test_progress_reporter.py b/python/ray/tune/tests/test_progress_reporter.py index a9aaea2d02b6..2851dc007c14 100644 --- a/python/ray/tune/tests/test_progress_reporter.py +++ b/python/ray/tune/tests/test_progress_reporter.py @@ -1,28 +1,28 @@ import collections import os -import regex as re import unittest from unittest.mock import MagicMock, Mock, patch -import pytest import numpy as np +import pytest +import regex as re from ray import train, tune from ray._private.test_utils import run_string_as_driver +from ray.tune.experiment.trial import Trial from ray.tune.progress_reporter import ( CLIReporter, JupyterNotebookReporter, ProgressReporter, - _fair_filter_trials, + TuneReporterBase, _best_trial_str, _detect_reporter, + _fair_filter_trials, + _max_len, _time_passed_str, _trial_progress_str, - TuneReporterBase, - _max_len, ) from ray.tune.result import AUTO_RESULT_KEYS -from ray.tune.experiment.trial import Trial EXPECTED_RESULT_1 = """Result logdir: /foo Number of trials: 5 (1 PENDING, 3 RUNNING, 1 TERMINATED) diff --git a/python/ray/tune/tests/test_remote.py b/python/ray/tune/tests/test_remote.py index 0f8cdddceb84..1d1bf4613af5 100644 --- a/python/ray/tune/tests/test_remote.py +++ b/python/ray/tune/tests/test_remote.py @@ -4,11 +4,10 @@ import ray import ray.train -from ray.tune import register_trainable, run_experiments, run, choice +from ray.tune import choice, register_trainable, run, run_experiments +from ray.tune.experiment import Experiment, Trial from ray.tune.result import TIMESTEPS_TOTAL -from ray.tune.experiment import Experiment from ray.tune.search.hyperopt import HyperOptSearch -from ray.tune.experiment import Trial from ray.util.client.ray_client_helpers import ray_start_client_server @@ -104,7 +103,8 @@ def testRemoteRunInClient(self): if __name__ == "__main__": - import pytest import sys + import pytest + sys.exit(pytest.main(["-v", __file__])) diff --git a/python/ray/tune/tests/test_resource_updater.py b/python/ray/tune/tests/test_resource_updater.py index 3763240bb949..82d170a3123c 100644 --- a/python/ray/tune/tests/test_resource_updater.py +++ b/python/ray/tune/tests/test_resource_updater.py @@ -1,7 +1,8 @@ +from unittest import mock + import ray from ray.tests.conftest import * # noqa -from ray.tune.utils.resource_updater import _ResourceUpdater, _Resources -from unittest import mock +from ray.tune.utils.resource_updater import _Resources, _ResourceUpdater def test_resources_numerical_error(): @@ -144,6 +145,7 @@ def test_resource_updater_automatic(): if __name__ == "__main__": import sys + import pytest sys.exit(pytest.main(["-v", __file__])) diff --git a/python/ray/tune/tests/test_result_grid.py b/python/ray/tune/tests/test_result_grid.py index d4917d978a1c..31233f34ecf8 100644 --- a/python/ray/tune/tests/test_result_grid.py +++ b/python/ray/tune/tests/test_result_grid.py @@ -3,9 +3,8 @@ import ray from ray import train, tune from ray.train import Checkpoint, Result -from ray.tune.result_grid import ResultGrid - from ray.train.tests.util import create_dict_checkpoint, load_dict_checkpoint +from ray.tune.result_grid import ResultGrid @pytest.fixture @@ -98,8 +97,8 @@ def f(config): result_grid = tuner.fit() result = result_grid[0] - from ray.tune.result import AUTO_RESULT_KEYS from ray.tune.experimental.output import BLACKLISTED_KEYS + from ray.tune.result import AUTO_RESULT_KEYS representation = result.__repr__() assert not any(key in representation for key in AUTO_RESULT_KEYS) diff --git a/python/ray/tune/tests/test_run_experiment.py b/python/ray/tune/tests/test_run_experiment.py index 942e7155b583..caacdb70218b 100644 --- a/python/ray/tune/tests/test_run_experiment.py +++ b/python/ray/tune/tests/test_run_experiment.py @@ -3,15 +3,13 @@ import ray import ray.train -from ray.train import CheckpointConfig from ray.rllib import _register_all - -from ray.tune.result import TIMESTEPS_TOTAL -from ray.tune import Trainable, TuneError -from ray.tune import register_trainable, run_experiments -from ray.tune.logger import LegacyLoggerCallback, Logger +from ray.train import CheckpointConfig +from ray.tune import Trainable, TuneError, register_trainable, run_experiments from ray.tune.experiment import Experiment -from ray.tune.experiment.trial import Trial, ExportFormat +from ray.tune.experiment.trial import ExportFormat, Trial +from ray.tune.logger import LegacyLoggerCallback, Logger +from ray.tune.result import TIMESTEPS_TOTAL def train_fn(config): @@ -241,7 +239,8 @@ def testCustomTrialString(self): if __name__ == "__main__": - import pytest import sys + import pytest + sys.exit(pytest.main(["-v", __file__])) diff --git a/python/ray/tune/tests/test_sample.py b/python/ray/tune/tests/test_sample.py index ab7fc41f3124..a411f6d2344f 100644 --- a/python/ray/tune/tests/test_sample.py +++ b/python/ray/tune/tests/test_sample.py @@ -4,11 +4,11 @@ change your pytest running directory to ray/python/ray/tune/tests/ """ +import unittest from collections import defaultdict from unittest.mock import patch import numpy as np -import unittest import ray import ray.tune.search.sample @@ -471,9 +471,10 @@ def train_fn(config): self.assertSequenceEqual(choices_1, choices_2) def testConvertAx(self): - from ray.tune.search.ax import AxSearch from ax.service.ax_client import AxClient + from ray.tune.search.ax import AxSearch + # Grid search not supported, should raise ValueError with self.assertRaises(ValueError): AxSearch.convert_search_space({"grid": tune.grid_search([0, 1])}) @@ -536,13 +537,14 @@ def testConvertAx(self): self.assertTrue(8 <= config["b"] <= 9) def testSampleBoundsAx(self): - from ray.tune.search.ax import AxSearch - from ax.service.ax_client import AxClient + from ax import Models from ax.modelbridge.generation_strategy import ( - GenerationStrategy, GenerationStep, + GenerationStrategy, ) - from ax import Models + from ax.service.ax_client import AxClient + + from ray.tune.search.ax import AxSearch ignore = [ "func", @@ -686,9 +688,10 @@ def config_generator(): self._testTuneSampleAPI(config_generator(), ignore=ignore) def testConvertBOHB(self): - from ray.tune.search.bohb import TuneBOHB import ConfigSpace + from ray.tune.search.bohb import TuneBOHB + # Grid search not supported, should raise ValueError with self.assertRaises(ValueError): TuneBOHB.convert_search_space({"grid": tune.grid_search([0, 1])}) @@ -766,9 +769,10 @@ def config_generator(): self._testTuneSampleAPI(config_generator(), ignore=ignore) def testConvertHEBO(self): - from ray.tune.search.hebo import HEBOSearch - from hebo.design_space.design_space import DesignSpace import torch + from hebo.design_space.design_space import DesignSpace + + from ray.tune.search.hebo import HEBOSearch # Grid search not supported, should raise ValueError with self.assertRaises(ValueError): @@ -848,9 +852,10 @@ def config_generator(): self._testTuneSampleAPI(config_generator(), ignore=ignore) def testConvertHyperOpt(self): - from ray.tune.search.hyperopt import HyperOptSearch from hyperopt import hp + from ray.tune.search.hyperopt import HyperOptSearch + # Grid search not supported, should raise ValueError with self.assertRaises(ValueError): HyperOptSearch.convert_search_space({"grid": tune.grid_search([0, 1])}) @@ -904,9 +909,10 @@ def testConvertHyperOpt(self): self.assertTrue(8 <= config["b"] <= 9) def testConvertHyperOptChooseFromListOfList(self): - from ray.tune.search.hyperopt import HyperOptSearch from hyperopt import hp + from ray.tune.search.hyperopt import HyperOptSearch + config = { "a": tune.choice([[1, 2], [3, 4]]), } @@ -1026,9 +1032,10 @@ def config_generator(): self._testTuneSampleAPI(config_generator(), ignore=ignore) def testConvertNevergrad(self): - from ray.tune.search.nevergrad import NevergradSearch import nevergrad as ng + from ray.tune.search.nevergrad import NevergradSearch + # Grid search not supported, should raise ValueError with self.assertRaises(ValueError): NevergradSearch.convert_search_space({"grid": tune.grid_search([0, 1])}) @@ -1098,9 +1105,10 @@ def testConvertNevergrad(self): self.assertTrue(8 <= config["b"] <= 9) def testSampleBoundsNevergrad(self): - from ray.tune.search.nevergrad import NevergradSearch import nevergrad as ng + from ray.tune.search.nevergrad import NevergradSearch + ignore = [ "func", "randn", @@ -1130,10 +1138,11 @@ def config_generator(): self._testTuneSampleAPI(config_generator(), ignore=ignore) def testConvertOptuna(self): - from ray.tune.search.optuna import OptunaSearch import optuna from optuna.samplers import RandomSampler + from ray.tune.search.optuna import OptunaSearch + # Grid search not supported, should raise ValueError with self.assertRaises(ValueError): OptunaSearch.convert_search_space({"grid": tune.grid_search([0, 1])}) @@ -1335,9 +1344,10 @@ def config_generator(): self._testTuneSampleAPI(config_generator(), ignore=ignore) def testConvertZOOpt(self): - from ray.tune.search.zoopt import ZOOptSearch from zoopt import ValueType + from ray.tune.search.zoopt import ZOOptSearch + # Grid search not supported, should raise ValueError with self.assertRaises(ValueError): ZOOptSearch.convert_search_space({"grid": tune.grid_search([0, 1])}) @@ -1583,9 +1593,10 @@ def testPointsToEvaluateNevergrad(self): "c": ray.tune.search.sample.Float(1e-4, 1e-1).loguniform(), } - from ray.tune.search.nevergrad import NevergradSearch import nevergrad as ng + from ray.tune.search.nevergrad import NevergradSearch + return self._testPointsToEvaluate( NevergradSearch, config, exact=False, optimizer=ng.optimizers.OnePlusOne ) @@ -1658,7 +1669,6 @@ def testPointsToEvaluateBasicVariantAdvanced(self): # grid_1 * grid_2 are 3 * 4 = 12 variants per complete grid search # However if one grid var is set by preset variables, that run # is excluded from grid search. - # Point 1 overwrites grid_1, so the first trial only grid searches # over grid_2 (3 trials). # The remaining 5 trials search over the whole space (5 * 12 trials) @@ -1854,7 +1864,8 @@ def set_search_properties(self, metric, mode, config): if __name__ == "__main__": - import pytest import sys + import pytest + sys.exit(pytest.main(["-v", __file__] + sys.argv[1:])) diff --git a/python/ray/tune/tests/test_searcher_utils.py b/python/ray/tune/tests/test_searcher_utils.py index 984c91a692c4..174636edb5d6 100644 --- a/python/ray/tune/tests/test_searcher_utils.py +++ b/python/ray/tune/tests/test_searcher_utils.py @@ -1,8 +1,7 @@ import pytest -from ray.tune.search import BasicVariantGenerator +from ray.tune.search import BasicVariantGenerator, ConcurrencyLimiter, Searcher from ray.tune.search.repeater import Repeater -from ray.tune.search import Searcher, ConcurrencyLimiter from ray.tune.search.search_generator import SearchGenerator diff --git a/python/ray/tune/tests/test_searchers.py b/python/ray/tune/tests/test_searchers.py index d6607b602f22..9c1893d7cc6b 100644 --- a/python/ray/tune/tests/test_searchers.py +++ b/python/ray/tune/tests/test_searchers.py @@ -1,15 +1,16 @@ import contextlib -from copy import deepcopy -import numpy as np import os -from packaging.version import Version -import pandas -import pytest import shutil import tempfile import unittest +from copy import deepcopy from unittest.mock import patch +import numpy as np +import pandas +import pytest +from packaging.version import Version + import ray from ray import train, tune from ray.air.constants import TRAINING_ITERATION @@ -82,9 +83,10 @@ def check_searcher_checkpoint_errors_scope(self): ), "Searcher checkpointing failed (unable to serialize)." def testAxManualSetup(self): - from ray.tune.search.ax import AxSearch from ax.service.ax_client import AxClient + from ray.tune.search.ax import AxSearch + config = self.config.copy() config["mixed_list"] = [1, tune.uniform(2, 3), 4] converted_config = AxSearch.convert_search_space(config) @@ -196,9 +198,10 @@ def testHyperopt(self): self.assertCorrectExperimentOutput(out) def testNevergrad(self): - from ray.tune.search.nevergrad import NevergradSearch import nevergrad as ng + from ray.tune.search.nevergrad import NevergradSearch + np.random.seed(2020) # At least one nan, inf, -inf and float with self.check_searcher_checkpoint_errors_scope(): @@ -213,15 +216,17 @@ def testNevergrad(self): self.assertCorrectExperimentOutput(out) def testNevergradWithRequiredOptimizerKwargs(self): - from ray.tune.search.nevergrad import NevergradSearch import nevergrad as ng + from ray.tune.search.nevergrad import NevergradSearch + NevergradSearch(optimizer=ng.optimizers.CM, optimizer_kwargs=dict(budget=16)) def testOptuna(self): - from ray.tune.search.optuna import OptunaSearch from optuna.samplers import RandomSampler + from ray.tune.search.optuna import OptunaSearch + np.random.seed(1000) # At least one nan, inf, -inf and float with self.check_searcher_checkpoint_errors_scope(): @@ -237,9 +242,10 @@ def testOptuna(self): self.assertCorrectExperimentOutput(out) def testOptunaReportTooOften(self): - from ray.tune.search.optuna import OptunaSearch from optuna.samplers import RandomSampler + from ray.tune.search.optuna import OptunaSearch + searcher = OptunaSearch( sampler=RandomSampler(seed=1234), space=OptunaSearch.convert_search_space(self.config), @@ -344,9 +350,10 @@ def run_add_evaluated_trials(self, searcher, get_len_X, get_len_y): searcher_copy.suggest("1") def testOptuna(self): - from ray.tune.search.optuna import OptunaSearch from optuna.trial import TrialState + from ray.tune.search.optuna import OptunaSearch + searcher = OptunaSearch( space=self.space, metric="metric", @@ -491,9 +498,10 @@ def _restore(self, searcher): assert "not_completed" in searcher._live_trial_mapping def testAx(self): - from ray.tune.search.ax import AxSearch from ax.service.ax_client import AxClient + from ray.tune.search.ax import AxSearch + converted_config = AxSearch.convert_search_space(self.config) client = AxClient() client.create_experiment( @@ -565,9 +573,10 @@ def testHyperopt(self): self._restore(searcher) def testNevergrad(self): - from ray.tune.search.nevergrad import NevergradSearch import nevergrad as ng + from ray.tune.search.nevergrad import NevergradSearch + searcher = NevergradSearch( space=self.config, metric=self.metric_name, @@ -634,9 +643,10 @@ def tearDownClass(cls): ray.shutdown() def testOptuna(self): - from ray.tune.search.optuna import OptunaSearch from optuna.samplers import RandomSampler + from ray.tune.search.optuna import OptunaSearch + np.random.seed(1000) out = tune.run( diff --git a/python/ray/tune/tests/test_stopper.py b/python/ray/tune/tests/test_stopper.py index 4505255d87a2..8818091b2c32 100644 --- a/python/ray/tune/tests/test_stopper.py +++ b/python/ray/tune/tests/test_stopper.py @@ -57,7 +57,8 @@ def test_timeout_stopper_recover_after_timeout(): if __name__ == "__main__": - import pytest import sys + import pytest + sys.exit(pytest.main(["-v", __file__] + sys.argv[1:])) diff --git a/python/ray/tune/tests/test_syncer.py b/python/ray/tune/tests/test_syncer.py index 2c179f649327..098b45a8f7af 100644 --- a/python/ray/tune/tests/test_syncer.py +++ b/python/ray/tune/tests/test_syncer.py @@ -3,19 +3,18 @@ import time from typing import List, Optional -from freezegun import freeze_time import pytest +from freezegun import freeze_time import ray import ray.cloudpickle as pickle from ray.train._internal.storage import ( - _upload_to_fs_path, _download_from_fs_path, - get_fs_and_path, _FilesystemSyncer, + _upload_to_fs_path, + get_fs_and_path, ) from ray.train._internal.syncer import _BackgroundProcess - from ray.train.tests.test_new_persistence import _create_mock_custom_fs diff --git a/python/ray/tune/tests/test_trainable.py b/python/ray/tune/tests/test_trainable.py index 543993ce1e35..35139483baa1 100644 --- a/python/ray/tune/tests/test_trainable.py +++ b/python/ray/tune/tests/test_trainable.py @@ -7,9 +7,8 @@ import ray from ray import train, tune from ray.train._internal.storage import StorageContext -from ray.tune.trainable import wrap_function - from ray.train.tests.util import create_dict_checkpoint +from ray.tune.trainable import wrap_function @pytest.fixture diff --git a/python/ray/tune/tests/test_trainable_util.py b/python/ray/tune/tests/test_trainable_util.py index 028112dedfda..5a31dffca5da 100644 --- a/python/ray/tune/tests/test_trainable_util.py +++ b/python/ray/tune/tests/test_trainable_util.py @@ -1,12 +1,17 @@ import copy -from collections import OrderedDict -import pytest import sys import unittest +from collections import OrderedDict from unittest.mock import patch -from ray.tune.utils.util import wait_for_gpu -from ray.tune.utils.util import flatten_dict, unflatten_dict, unflatten_list_dict +import pytest + +from ray.tune.utils.util import ( + flatten_dict, + unflatten_dict, + unflatten_list_dict, + wait_for_gpu, +) class FlattenDictTest(unittest.TestCase): diff --git a/python/ray/tune/tests/test_trial.py b/python/ray/tune/tests/test_trial.py index 5338c2133b3a..d514edafa0f0 100644 --- a/python/ray/tune/tests/test_trial.py +++ b/python/ray/tune/tests/test_trial.py @@ -1,14 +1,14 @@ import sys + import pytest from ray.exceptions import RayActorError, RayTaskError from ray.train import Checkpoint -from ray.train.constants import RAY_TRAIN_COUNT_PREEMPTION_AS_FAILURE from ray.train._internal.session import _TrainingResult from ray.train._internal.storage import StorageContext -from ray.tune.experiment import Trial - +from ray.train.constants import RAY_TRAIN_COUNT_PREEMPTION_AS_FAILURE from ray.train.tests.util import mock_storage_context +from ray.tune.experiment import Trial @pytest.fixture diff --git a/python/ray/tune/tests/test_trial_scheduler.py b/python/ray/tune/tests/test_trial_scheduler.py index 019c4ffcf59c..caaf3da2154c 100644 --- a/python/ray/tune/tests/test_trial_scheduler.py +++ b/python/ray/tune/tests/test_trial_scheduler.py @@ -1,43 +1,40 @@ -from collections import Counter -import os -import pytest import json +import os import random -import unittest -import time - -import numpy as np +import shutil import sys import tempfile -import shutil +import time +import unittest +from collections import Counter from unittest.mock import MagicMock +import numpy as np +import pytest + import ray from ray import train, tune -from ray.train import CheckpointConfig from ray.air.constants import TRAINING_ITERATION -from ray.train import Checkpoint -from ray.train._internal.session import _TrainingResult, _FutureTrainingResult -from ray.train._internal.storage import StorageContext -from ray.tune import Trainable, PlacementGroupFactory +from ray.rllib import _register_all +from ray.train import Checkpoint, CheckpointConfig from ray.train._internal.checkpoint_manager import _CheckpointManager +from ray.train._internal.session import _FutureTrainingResult, _TrainingResult +from ray.train._internal.storage import StorageContext +from ray.tune import PlacementGroupFactory, Trainable +from ray.tune.experiment import Trial from ray.tune.experiment.trial import _TemporaryTrialState from ray.tune.schedulers import ( + AsyncHyperBandScheduler, FIFOScheduler, + HyperBandForBOHB, HyperBandScheduler, - AsyncHyperBandScheduler, - PopulationBasedTraining, MedianStoppingRule, + PopulationBasedTraining, TrialScheduler, - HyperBandForBOHB, ) - -from ray.tune.schedulers.pbt import _explore, PopulationBasedTrainingReplay -from ray.tune.search._mock import _MockSearcher +from ray.tune.schedulers.pbt import PopulationBasedTrainingReplay, _explore from ray.tune.search import ConcurrencyLimiter -from ray.tune.experiment import Trial - -from ray.rllib import _register_all +from ray.tune.search._mock import _MockSearcher from ray.tune.trainable.metadata import _TrainingRunMetadata _register_all() diff --git a/python/ray/tune/tests/test_trial_scheduler_pbt.py b/python/ray/tune/tests/test_trial_scheduler_pbt.py index 17720ccbeb35..cd5e1bf3aa25 100644 --- a/python/ray/tune/tests/test_trial_scheduler_pbt.py +++ b/python/ray/tune/tests/test_trial_scheduler_pbt.py @@ -1,35 +1,33 @@ -import tempfile -from functools import partial -from typing import List - import json -import numpy as np import os import pickle -import pytest import random -import unittest import sys +import tempfile import time +import unittest +from functools import partial +from typing import List from unittest.mock import MagicMock +import numpy as np +import pytest import ray from ray import cloudpickle, train, tune -from ray.air.config import FailureConfig, RunConfig, CheckpointConfig +from ray._private.test_utils import object_memory_usage +from ray.air.config import CheckpointConfig, FailureConfig, RunConfig from ray.train import Checkpoint -from ray.tune import Trainable, Callback +from ray.tune import Callback, Trainable from ray.tune.experiment import Trial from ray.tune.schedulers import PopulationBasedTraining -from ray.tune.schedulers.pbt import _filter_mutated_params_from_config from ray.tune.schedulers.pb2 import PB2 from ray.tune.schedulers.pb2_utils import UCB +from ray.tune.schedulers.pbt import _filter_mutated_params_from_config from ray.tune.tests.execution.utils import create_execution_test_objects from ray.tune.tune_config import TuneConfig -from ray._private.test_utils import object_memory_usage from ray.tune.utils.util import flatten_dict - # Import psutil after ray so the packaged version is used. import psutil diff --git a/python/ray/tune/tests/test_trial_scheduler_resource_changing.py b/python/ray/tune/tests/test_trial_scheduler_resource_changing.py index e96786396886..2f78e4b40139 100644 --- a/python/ray/tune/tests/test_trial_scheduler_resource_changing.py +++ b/python/ray/tune/tests/test_trial_scheduler_resource_changing.py @@ -2,18 +2,17 @@ import tempfile import unittest +from ray.train.tests.util import mock_storage_context from ray.tune import PlacementGroupFactory from ray.tune.execution.tune_controller import TuneController -from ray.tune.schedulers.trial_scheduler import TrialScheduler from ray.tune.experiment import Trial from ray.tune.schedulers.resource_changing_scheduler import ( - ResourceChangingScheduler, DistributeResources, DistributeResourcesToTopJob, + ResourceChangingScheduler, ) - +from ray.tune.schedulers.trial_scheduler import TrialScheduler from ray.tune.tests.execution.utils import create_execution_test_objects -from ray.train.tests.util import mock_storage_context class MockTuneController(TuneController): @@ -613,7 +612,8 @@ def testDeallocateResources(self): if __name__ == "__main__": - import pytest import sys + import pytest + sys.exit(pytest.main(["-v", __file__])) diff --git a/python/ray/tune/tests/test_tune_restore.py b/python/ray/tune/tests/test_tune_restore.py index d6996248f3d1..003d45ad58c1 100644 --- a/python/ray/tune/tests/test_tune_restore.py +++ b/python/ray/tune/tests/test_tune_restore.py @@ -1,34 +1,34 @@ # coding: utf-8 -import signal -import subprocess -from collections import Counter import multiprocessing import os -from pathlib import Path - -import pytest import shutil +import signal +import subprocess import tempfile import threading import time -from typing import List import unittest +from collections import Counter +from pathlib import Path +from typing import List from unittest import mock +import pytest + import ray import ray.train from ray import tune from ray._private.test_utils import recursive_fnmatch, run_string_as_driver -from ray.train import CheckpointConfig, Checkpoint from ray.exceptions import RayTaskError from ray.rllib import _register_all +from ray.train import Checkpoint, CheckpointConfig from ray.train._internal.session import _TrainingResult from ray.tune import TuneError from ray.tune.callback import Callback -from ray.tune.search.basic_variant import BasicVariantGenerator -from ray.tune.search import Searcher -from ray.tune.experiment import Trial from ray.tune.execution.tune_controller import TuneController +from ray.tune.experiment import Trial +from ray.tune.search import Searcher +from ray.tune.search.basic_variant import BasicVariantGenerator from ray.tune.utils import validate_save_restore from ray.tune.utils.mock_trainable import MyTrainableClass @@ -549,16 +549,18 @@ def tearDown(self): _register_all() def testPBTKeras(self): - from ray.tune.examples.pbt_tune_cifar10_with_keras import Cifar10Model from tensorflow.keras.datasets import cifar10 + from ray.tune.examples.pbt_tune_cifar10_with_keras import Cifar10Model + cifar10.load_data() validate_save_restore(Cifar10Model) def testPyTorchMNIST(self): - from ray.tune.examples.mnist_pytorch_trainable import TrainMNIST from torchvision import datasets + from ray.tune.examples.mnist_pytorch_trainable import TrainMNIST + datasets.MNIST("~/data", train=True, download=True) validate_save_restore(TrainMNIST) diff --git a/python/ray/tune/tests/test_tune_restore_warm_start.py b/python/ray/tune/tests/test_tune_restore_warm_start.py index 8298f6105eee..cd5e698e5f4a 100644 --- a/python/ray/tune/tests/test_tune_restore_warm_start.py +++ b/python/ray/tune/tests/test_tune_restore_warm_start.py @@ -1,31 +1,31 @@ # coding: utf-8 import os -from packaging.version import Version -import pandas -import pytest import shutil import tempfile import unittest import numpy as np +import pandas +import pytest +from hebo.design_space.design_space import DesignSpace as HEBODesignSpace from hyperopt import hp from nevergrad.optimization import optimizerlib +from packaging.version import Version from zoopt import ValueType -from hebo.design_space.design_space import DesignSpace as HEBODesignSpace import ray from ray import train, tune from ray.rllib import _register_all +from ray.tune.schedulers.hb_bohb import HyperBandForBOHB from ray.tune.search import ConcurrencyLimiter -from ray.tune.search.hyperopt import HyperOptSearch +from ray.tune.search.ax import AxSearch from ray.tune.search.bayesopt import BayesOptSearch +from ray.tune.search.bohb import TuneBOHB +from ray.tune.search.hebo import HEBOSearch +from ray.tune.search.hyperopt import HyperOptSearch from ray.tune.search.nevergrad import NevergradSearch from ray.tune.search.optuna import OptunaSearch from ray.tune.search.zoopt import ZOOptSearch -from ray.tune.search.hebo import HEBOSearch -from ray.tune.search.ax import AxSearch -from ray.tune.search.bohb import TuneBOHB -from ray.tune.schedulers.hb_bohb import HyperBandForBOHB class AbstractWarmStartTest: diff --git a/python/ray/tune/tests/test_tune_save_restore.py b/python/ray/tune/tests/test_tune_save_restore.py index eccc60b81cef..8a22bfb9ff47 100644 --- a/python/ray/tune/tests/test_tune_save_restore.py +++ b/python/ray/tune/tests/test_tune_save_restore.py @@ -145,7 +145,8 @@ def load_checkpoint(self, checkpoint_dir): if __name__ == "__main__": - import pytest import sys + import pytest + sys.exit(pytest.main(["-v", __file__])) diff --git a/python/ray/tune/tests/test_tuner.py b/python/ray/tune/tests/test_tuner.py index 34602144e5d8..708e61cfcab1 100644 --- a/python/ray/tune/tests/test_tuner.py +++ b/python/ray/tune/tests/test_tuner.py @@ -1,23 +1,22 @@ import os +import unittest from pathlib import Path +from typing import Optional from unittest.mock import patch import pytest -import unittest -from typing import Optional - from sklearn.datasets import load_breast_cancer from sklearn.utils import shuffle import ray from ray import train, tune +from ray.data import Dataset, Datasource, ReadTask, from_pandas, read_datasource +from ray.data.block import BlockMetadata from ray.train import CheckpointConfig, RunConfig, ScalingConfig +from ray.train.data_parallel_trainer import DataParallelTrainer from ray.train.examples.pytorch.torch_linear_example import ( train_func as linear_train_func, ) -from ray.data import Dataset, Datasource, ReadTask, from_pandas, read_datasource -from ray.data.block import BlockMetadata -from ray.train.data_parallel_trainer import DataParallelTrainer from ray.train.torch import TorchTrainer from ray.train.trainer import BaseTrainer from ray.train.xgboost import XGBoostTrainer diff --git a/python/ray/tune/tests/test_tuner_restore.py b/python/ray/tune/tests/test_tuner_restore.py index 99fa9c39aa9f..1c17e7cc1d7a 100644 --- a/python/ray/tune/tests/test_tuner_restore.py +++ b/python/ray/tune/tests/test_tuner_restore.py @@ -9,8 +9,9 @@ import pytest import ray -from ray import train, tune import ray.cloudpickle as ray_pickle +from ray import train, tune +from ray.air._internal.uri_utils import URI from ray.train import ( Checkpoint, CheckpointConfig, @@ -18,9 +19,9 @@ RunConfig, ScalingConfig, ) -from ray.air._internal.uri_utils import URI +from ray.train._internal.storage import _download_from_fs_path, get_fs_and_path from ray.train.data_parallel_trainer import DataParallelTrainer -from ray.train._internal.storage import get_fs_and_path, _download_from_fs_path +from ray.train.tests.util import create_dict_checkpoint, load_dict_checkpoint from ray.tune import Callback, Trainable from ray.tune.analysis import ExperimentAnalysis from ray.tune.execution.experiment_state import _find_newest_experiment_checkpoint @@ -31,8 +32,6 @@ from ray.tune.tune_config import TuneConfig from ray.tune.tuner import Tuner -from ray.train.tests.util import create_dict_checkpoint, load_dict_checkpoint - @pytest.fixture def propagate_logs(): diff --git a/python/ray/tune/tests/test_util_file_transfer.py b/python/ray/tune/tests/test_util_file_transfer.py index 05f3194d1178..e835bef6cc54 100644 --- a/python/ray/tune/tests/test_util_file_transfer.py +++ b/python/ray/tune/tests/test_util_file_transfer.py @@ -1,18 +1,17 @@ import io -import tarfile import os - -import pytest import shutil +import tarfile import tempfile -from ray.exceptions import RayTaskError +import pytest +import ray.util +from ray.exceptions import RayTaskError from ray.tune.utils.file_transfer import ( _sync_dir_between_different_nodes, _sync_dir_on_same_node, ) -import ray.util @pytest.fixture diff --git a/python/ray/tune/tests/test_utils.py b/python/ray/tune/tests/test_utils.py index cad5c91e06ac..faf8d2046af5 100644 --- a/python/ray/tune/tests/test_utils.py +++ b/python/ray/tune/tests/test_utils.py @@ -6,7 +6,9 @@ import pytest from ray.tune.search.variant_generator import format_vars -from ray.tune.utils.util import retry_fn, Tee, logger as util_logger +from ray.tune.utils.util import Tee +from ray.tune.utils.util import logger as util_logger +from ray.tune.utils.util import retry_fn def test_format_vars(): diff --git a/python/ray/tune/tests/test_var.py b/python/ray/tune/tests/test_var.py index d814af70797d..82d5700b9bca 100644 --- a/python/ray/tune/tests/test_var.py +++ b/python/ray/tune/tests/test_var.py @@ -1,14 +1,14 @@ import os -import numpy as np import random import unittest -import ray -from ray.rllib import _register_all +import numpy as np +import ray from ray import tune +from ray.rllib import _register_all from ray.train.constants import DEFAULT_STORAGE_PATH -from ray.tune.search import grid_search, BasicVariantGenerator +from ray.tune.search import BasicVariantGenerator, grid_search from ray.tune.search.variant_generator import ( RecursiveDependencyError, _resolve_nested_dict, @@ -362,7 +362,8 @@ def testRecursiveDep(self): if __name__ == "__main__": - import pytest import sys + import pytest + sys.exit(pytest.main(["-v", __file__])) diff --git a/python/ray/tune/tests/test_warnings.py b/python/ray/tune/tests/test_warnings.py index ad13d6ad9a34..9fd71a14da0f 100644 --- a/python/ray/tune/tests/test_warnings.py +++ b/python/ray/tune/tests/test_warnings.py @@ -3,8 +3,8 @@ import ray from ray import tune from ray.data.context import DataContext -from ray.util.scheduling_strategies import PlacementGroupSchedulingStrategy from ray.tune.error import TuneError +from ray.util.scheduling_strategies import PlacementGroupSchedulingStrategy def test_nowarn_zero_cpu(): diff --git a/python/ray/tune/tests/tutorial.py b/python/ray/tune/tests/tutorial.py index ac5667a183b6..0aaaa980fba8 100644 --- a/python/ray/tune/tests/tutorial.py +++ b/python/ray/tune/tests/tutorial.py @@ -1,4 +1,5 @@ # flake8: noqa +# isort: skip_file # Original Code: https://github.com/pytorch/examples/blob/master/mnist/main.py # fmt: off diff --git a/python/ray/tune/trainable/__init__.py b/python/ray/tune/trainable/__init__.py index 1532b4a78cac..a308feb13422 100644 --- a/python/ray/tune/trainable/__init__.py +++ b/python/ray/tune/trainable/__init__.py @@ -1,7 +1,6 @@ +from ray.tune.trainable.function_trainable import FunctionTrainable, wrap_function from ray.tune.trainable.trainable import Trainable from ray.tune.trainable.util import with_parameters -from ray.tune.trainable.function_trainable import FunctionTrainable, wrap_function - __all__ = [ "Trainable", diff --git a/python/ray/tune/trainable/function_trainable.py b/python/ray/tune/trainable/function_trainable.py index 2f7cb9896140..7d0b03cb7930 100644 --- a/python/ray/tune/trainable/function_trainable.py +++ b/python/ray/tune/trainable/function_trainable.py @@ -1,36 +1,28 @@ import inspect import logging import os +import queue from functools import partial from numbers import Number from typing import Any, Callable, Dict, Optional, Type -from ray.air._internal.util import StartTraceback, RunnerThread -import queue - -from ray.air.constants import ( - _ERROR_FETCH_TIMEOUT, -) import ray.train +from ray.air._internal.util import RunnerThread, StartTraceback +from ray.air.constants import _ERROR_FETCH_TIMEOUT from ray.train._internal.checkpoint_manager import _TrainingResult from ray.train._internal.session import ( - init_session, + TrialInfo, + _TrainSession, get_session, + init_session, shutdown_session, - _TrainSession, - TrialInfo, ) from ray.tune.execution.placement_groups import PlacementGroupFactory -from ray.tune.result import ( - DEFAULT_METRIC, - RESULT_DUPLICATE, - SHOULD_CHECKPOINT, -) -from ray.tune.trainable import Trainable +from ray.tune.result import DEFAULT_METRIC, RESULT_DUPLICATE, SHOULD_CHECKPOINT +from ray.tune.trainable.trainable import Trainable from ray.tune.utils import _detect_config_single from ray.util.annotations import DeveloperAPI - logger = logging.getLogger(__name__) # Time between FunctionTrainable checks when fetching diff --git a/python/ray/tune/trainable/metadata.py b/python/ray/tune/trainable/metadata.py index 5b1ae11a818d..a520371e4850 100644 --- a/python/ray/tune/trainable/metadata.py +++ b/python/ray/tune/trainable/metadata.py @@ -1,10 +1,10 @@ import json from collections import deque from numbers import Number -from typing import Tuple, Optional +from typing import Optional, Tuple from ray.train._internal.checkpoint_manager import _CheckpointManager -from ray.tune.utils.serialization import TuneFunctionEncoder, TuneFunctionDecoder +from ray.tune.utils.serialization import TuneFunctionDecoder, TuneFunctionEncoder class _TrainingRunMetadata: diff --git a/python/ray/tune/trainable/trainable.py b/python/ray/tune/trainable/trainable.py index c5f1cd220bb1..cf0f2951a5cc 100644 --- a/python/ray/tune/trainable/trainable.py +++ b/python/ray/tune/trainable/trainable.py @@ -1,27 +1,24 @@ import copy -from datetime import datetime import logging import os -from pathlib import Path import platform import sys import tempfile import time from contextlib import redirect_stderr, redirect_stdout +from datetime import datetime +from pathlib import Path from typing import TYPE_CHECKING, Any, Callable, Dict, List, Optional, Union import ray import ray.cloudpickle as ray_pickle -from ray.air._internal.util import skip_exceptions, exception_cause -from ray.air.constants import ( - TIMESTAMP, - TIME_THIS_ITER_S, - TRAINING_ITERATION, -) +from ray.air._internal.util import exception_cause, skip_exceptions +from ray.air.constants import TIME_THIS_ITER_S, TIMESTAMP, TRAINING_ITERATION +from ray.train import Checkpoint from ray.train._internal.checkpoint_manager import _TrainingResult from ray.train._internal.storage import StorageContext, _exists_at_fs_path -from ray.train import Checkpoint from ray.train.constants import DEFAULT_STORAGE_PATH +from ray.tune.execution.placement_groups import PlacementGroupFactory from ray.tune.result import ( DEBUG_METRICS, DONE, @@ -43,7 +40,6 @@ from ray.tune.utils import UtilMonitor from ray.tune.utils.log import disable_ipython from ray.tune.utils.util import Tee -from ray.tune.execution.placement_groups import PlacementGroupFactory from ray.util.annotations import DeveloperAPI, PublicAPI if TYPE_CHECKING: diff --git a/python/ray/tune/trainable/util.py b/python/ray/tune/trainable/util.py index 04c93e1cdea2..5c637fd6bc00 100644 --- a/python/ray/tune/trainable/util.py +++ b/python/ray/tune/trainable/util.py @@ -1,7 +1,7 @@ import inspect import logging import types -from typing import Any, Callable, Dict, Optional, Type, Union, TYPE_CHECKING +from typing import TYPE_CHECKING, Any, Callable, Dict, Optional, Type, Union import ray from ray.tune.execution.placement_groups import ( diff --git a/python/ray/tune/tune.py b/python/ray/tune/tune.py index f31511b76a2f..e30a6b7ef954 100644 --- a/python/ray/tune/tune.py +++ b/python/ray/tune/tune.py @@ -9,6 +9,7 @@ import time import warnings from typing import ( + TYPE_CHECKING, Any, Callable, Dict, @@ -17,7 +18,6 @@ Sequence, Type, Union, - TYPE_CHECKING, ) import ray @@ -25,25 +25,20 @@ from ray.air._internal.usage import AirEntrypoint from ray.air.util.node import _force_on_current_node from ray.train import CheckpointConfig, SyncConfig -from ray.train.constants import RAY_CHDIR_TO_TRIAL_DIR, _DEPRECATED_VALUE -from ray.tune import ResumeConfig +from ray.train.constants import _DEPRECATED_VALUE, RAY_CHDIR_TO_TRIAL_DIR from ray.tune.analysis import ExperimentAnalysis from ray.tune.callback import Callback from ray.tune.error import TuneError +from ray.tune.execution.placement_groups import PlacementGroupFactory from ray.tune.execution.tune_controller import TuneController -from ray.tune.experiment import Experiment, _convert_to_experiment_list -from ray.tune.experimental.output import ( - get_air_verbosity, - IS_NOTEBOOK, - AirVerbosity, -) - +from ray.tune.experiment import Experiment, Trial, _convert_to_experiment_list +from ray.tune.experimental.output import IS_NOTEBOOK, AirVerbosity, get_air_verbosity from ray.tune.impl.placeholder import create_resolvers_map, inject_placeholders from ray.tune.logger import TBXLoggerCallback from ray.tune.progress_reporter import ( ProgressReporter, - _detect_reporter, _detect_progress_metrics, + _detect_reporter, _prepare_progress_reporter_for_ray_client, _stream_client_output, ) @@ -59,28 +54,23 @@ from ray.tune.schedulers.util import ( _set_search_properties_backwards_compatible as scheduler_set_search_props, ) -from ray.tune.stopper import Stopper from ray.tune.search import ( BasicVariantGenerator, - SearchAlgorithm, - SearchGenerator, ConcurrencyLimiter, + SearchAlgorithm, Searcher, + SearchGenerator, create_searcher, ) from ray.tune.search.util import ( _set_search_properties_backwards_compatible as searcher_set_search_props, ) from ray.tune.search.variant_generator import _has_unresolved_values +from ray.tune.stopper import Stopper from ray.tune.trainable import Trainable -from ray.tune.experiment import Trial +from ray.tune.tune_config import ResumeConfig from ray.tune.utils.callback import _create_default_callbacks -from ray.tune.utils.log import ( - Verbosity, - has_verbosity, - set_verbosity, -) -from ray.tune.execution.placement_groups import PlacementGroupFactory +from ray.tune.utils.log import Verbosity, has_verbosity, set_verbosity from ray.util.annotations import PublicAPI from ray.util.queue import Queue diff --git a/python/ray/tune/tuner.py b/python/ray/tune/tuner.py index 5dd85b3cc14c..c4da01e4c88a 100644 --- a/python/ray/tune/tuner.py +++ b/python/ray/tune/tuner.py @@ -1,27 +1,25 @@ import logging import os from pathlib import Path -from typing import Any, Callable, Dict, Optional, Type, Union, TYPE_CHECKING +from typing import TYPE_CHECKING, Any, Callable, Dict, Optional, Type, Union import pyarrow.fs import ray -from ray.air.config import RunConfig from ray.air._internal.usage import AirEntrypoint +from ray.air.config import RunConfig from ray.air.util.node import _force_on_current_node from ray.train._internal.storage import _exists_at_fs_path, get_fs_and_path from ray.tune import ResumeConfig -from ray.tune.experimental.output import ( - get_air_verbosity, -) -from ray.tune.result_grid import ResultGrid -from ray.tune.trainable import Trainable -from ray.tune.impl.tuner_internal import TunerInternal, _TUNER_PKL -from ray.tune.tune_config import TuneConfig +from ray.tune.experimental.output import get_air_verbosity +from ray.tune.impl.tuner_internal import _TUNER_PKL, TunerInternal from ray.tune.progress_reporter import ( _prepare_progress_reporter_for_ray_client, _stream_client_output, ) +from ray.tune.result_grid import ResultGrid +from ray.tune.trainable import Trainable +from ray.tune.tune_config import TuneConfig from ray.util import PublicAPI logger = logging.getLogger(__name__) diff --git a/python/ray/tune/utils/__init__.py b/python/ray/tune/utils/__init__.py index 0333740f3107..2c1c01a05cc9 100644 --- a/python/ray/tune/utils/__init__.py +++ b/python/ray/tune/utils/__init__.py @@ -1,15 +1,15 @@ from ray.tune.utils.util import ( - deep_update, + UtilMonitor, + _detect_config_single, date_str, + deep_update, + diagnose_serialization, flatten_dict, merge_dicts, unflattened_lookup, - UtilMonitor, validate_save_restore, - warn_if_slow, - diagnose_serialization, - _detect_config_single, wait_for_gpu, + warn_if_slow, ) __all__ = [ diff --git a/python/ray/tune/utils/callback.py b/python/ray/tune/utils/callback.py index 87d133e4513c..b53063b85ab9 100644 --- a/python/ray/tune/utils/callback.py +++ b/python/ray/tune/utils/callback.py @@ -1,17 +1,16 @@ import logging import os -from typing import Collection, List, Optional, Type, Union, TYPE_CHECKING +from typing import TYPE_CHECKING, Collection, List, Optional, Type, Union from ray.tune.callback import Callback, CallbackList - from ray.tune.logger import ( - CSVLoggerCallback, CSVLogger, - JsonLoggerCallback, + CSVLoggerCallback, JsonLogger, + JsonLoggerCallback, LegacyLoggerCallback, - TBXLoggerCallback, TBXLogger, + TBXLoggerCallback, ) logger = logging.getLogger(__name__) diff --git a/python/ray/tune/utils/file_transfer.py b/python/ray/tune/utils/file_transfer.py index 5c223ef407de..b39ce69bf834 100644 --- a/python/ray/tune/utils/file_transfer.py +++ b/python/ray/tune/utils/file_transfer.py @@ -3,14 +3,12 @@ import os import shutil import tarfile - -from typing import Optional, Tuple, Dict, Generator, Union, List +from typing import Dict, Generator, List, Optional, Tuple, Union import ray -from ray.util.annotations import DeveloperAPI from ray.air._internal.filelock import TempFileLock -from ray.air.util.node import _get_node_id_from_node_ip, _force_on_node - +from ray.air.util.node import _force_on_node, _get_node_id_from_node_ip +from ray.util.annotations import DeveloperAPI _DEFAULT_CHUNK_SIZE_BYTES = 500 * 1024 * 1024 # 500 MiB _DEFAULT_MAX_SIZE_BYTES = 1 * 1024 * 1024 * 1024 # 1 GiB diff --git a/python/ray/tune/utils/mock.py b/python/ray/tune/utils/mock.py index 4502fc1afb44..5d7f7c8d2624 100644 --- a/python/ray/tune/utils/mock.py +++ b/python/ray/tune/utils/mock.py @@ -1,8 +1,8 @@ -from collections import defaultdict import logging import os import random import time +from collections import defaultdict from pathlib import Path from typing import Dict @@ -37,6 +37,7 @@ def on_step_begin(self, **info): return self.last_fail_check = time.monotonic() import click + from ray.autoscaler._private.commands import kill_node failures = 0 diff --git a/python/ray/tune/utils/mock_trainable.py b/python/ray/tune/utils/mock_trainable.py index 327492014a08..6592ab737f6d 100644 --- a/python/ray/tune/utils/mock_trainable.py +++ b/python/ray/tune/utils/mock_trainable.py @@ -1,7 +1,8 @@ import json -import numpy as np import os +import numpy as np + from ray.tune import Trainable diff --git a/python/ray/tune/utils/object_cache.py b/python/ray/tune/utils/object_cache.py index 4604d9b3aa1a..99f1b5678d2a 100644 --- a/python/ray/tune/utils/object_cache.py +++ b/python/ray/tune/utils/object_cache.py @@ -1,5 +1,4 @@ -from collections import defaultdict, Counter - +from collections import Counter, defaultdict from typing import Dict, Generator, List, Optional, TypeVar # Grouping key - must be hashable diff --git a/python/ray/tune/utils/release_test_util.py b/python/ray/tune/utils/release_test_util.py index d3e872eef145..9120097d52d2 100644 --- a/python/ray/tune/utils/release_test_util.py +++ b/python/ray/tune/utils/release_test_util.py @@ -1,15 +1,16 @@ -from collections import Counter import json -import numpy as np import os import pickle import tempfile import time +from collections import Counter + +import numpy as np from ray import train, tune +from ray._private.test_utils import safe_write_to_results_json from ray.train import Checkpoint from ray.tune.callback import Callback -from ray._private.test_utils import safe_write_to_results_json class ProgressCallback(Callback): diff --git a/python/ray/tune/utils/util.py b/python/ray/tune/utils/util.py index 9eaa8de4c991..248cf099dc04 100644 --- a/python/ray/tune/utils/util.py +++ b/python/ray/tune/utils/util.py @@ -12,22 +12,21 @@ from typing import Any, Callable, Dict, List, Optional, Sequence, Type, Union import numpy as np -import psutil + import ray -from ray.util.annotations import DeveloperAPI, PublicAPI -from ray.air._internal.json import SafeFallbackEncoder # noqa -from ray.air._internal.util import ( # noqa: F401 - is_nan, - is_nan_or_inf, -) from ray._private.dict import ( # noqa: F401 - merge_dicts, deep_update, flatten_dict, + merge_dicts, unflatten_dict, unflatten_list_dict, unflattened_lookup, ) +from ray.air._internal.json import SafeFallbackEncoder # noqa +from ray.air._internal.util import is_nan, is_nan_or_inf # noqa: F401 +from ray.util.annotations import DeveloperAPI, PublicAPI + +import psutil logger = logging.getLogger(__name__) @@ -335,7 +334,7 @@ def test(): assert diagnose_serialization(test) is True """ - from ray.tune.registry import register_trainable, _check_serializability + from ray.tune.registry import _check_serializability, register_trainable def check_variables(objects, failure_set, printer): for var_name, variable in objects.items():