Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[tune] enable isort #44693

Merged
merged 4 commits into from
Apr 15, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .isort.cfg
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ filter_files=True
# python/ray/setup-dev.py
# For the rest we will gradually remove them from the blacklist as we
# reformat the code to follow the style guide.
skip_glob=doc/*,python/ray/__init__.py,python/ray/setup-dev.py,python/build/*,python/ray/cloudpickle/*,python/ray/thirdparty_files/*,python/ray/_private/thirdparty/*,python/ray/_private/runtime_env/agent/thirdparty_files/*,python/ray/dag/*.py,ci/*,python/ray/_private/*,python/ray/air/*,dashboard/*,python/ray/includes/*,python/ray/internal/*,python/ray/ray_operator/*,python/ray/scripts/*,python/ray/serve/generated/serve_pb2.py,python/ray/sgd/*,python/ray/streaming/*,python/ray/tests/*,python/ray/tests/*,python/ray/tune/*,python/ray/util/*,python/ray/workers/*,python/ray/workflow/*,rllib/*,release/*,
skip_glob=doc/*,python/ray/__init__.py,python/ray/setup-dev.py,python/build/*,python/ray/cloudpickle/*,python/ray/thirdparty_files/*,python/ray/_private/thirdparty/*,python/ray/_private/runtime_env/agent/thirdparty_files/*,python/ray/dag/*.py,ci/*,python/ray/_private/*,python/ray/air/*,dashboard/*,python/ray/includes/*,python/ray/internal/*,python/ray/ray_operator/*,python/ray/scripts/*,python/ray/serve/generated/serve_pb2.py,python/ray/sgd/*,python/ray/streaming/*,python/ray/tests/*,python/ray/tests/*,python/ray/util/*,python/ray/workers/*,python/ray/workflow/*,rllib/*,release/*,
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Suggested change
skip_glob=doc/*,python/ray/__init__.py,python/ray/setup-dev.py,python/build/*,python/ray/cloudpickle/*,python/ray/thirdparty_files/*,python/ray/_private/thirdparty/*,python/ray/_private/runtime_env/agent/thirdparty_files/*,python/ray/dag/*.py,ci/*,python/ray/_private/*,python/ray/air/*,dashboard/*,python/ray/includes/*,python/ray/internal/*,python/ray/ray_operator/*,python/ray/scripts/*,python/ray/serve/generated/serve_pb2.py,python/ray/sgd/*,python/ray/streaming/*,python/ray/tests/*,python/ray/tests/*,python/ray/util/*,python/ray/workers/*,python/ray/workflow/*,rllib/*,release/*,
skip_glob=doc/*,python/ray/__init__.py,python/ray/setup-dev.py,python/build/*,python/ray/cloudpickle/*,python/ray/thirdparty_files/*,python/ray/_private/thirdparty/*,python/ray/_private/runtime_env/agent/thirdparty_files/*,python/ray/dag/*.py,ci/*,python/ray/_private/*,python/ray/air/*,dashboard/*,python/ray/includes/*,python/ray/internal/*,python/ray/ray_operator/*,python/ray/scripts/*,python/ray/serve/generated/serve_pb2.py,python/ray/streaming/*,python/ray/tests/*,python/ray/tests/*,python/ray/util/*,python/ray/workers/*,python/ray/workflow/*,rllib/*,release/*,

Also remove this legacy path?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Oh let me do this in a separate PR with the air directory.


known_local_folder=ray
known_afterray=psutil,setproctitle
Expand Down
51 changes: 25 additions & 26 deletions python/ray/tune/__init__.py
Original file line number Diff line number Diff line change
@@ -1,54 +1,53 @@
# isort: off
# Try import ray[tune] core requirements (defined in setup.py)
try:
import fsspec # noqa: F401
import pandas # noqa: F401
import requests # noqa: F401
import pyarrow # noqa: F401
import fsspec # noqa: F401
import requests # noqa: F401
except ImportError as exc:
raise ImportError(
"Can't import ray.tune as some dependencies are missing. "
'Run `pip install "ray[tune]"` to fix.'
) from exc
# isort: on


from ray.tune.error import TuneError
from ray.tune.tune_config import ResumeConfig, TuneConfig
from ray.tune.tune import run_experiments, run
from ray.tune.syncer import SyncConfig
from ray.tune.experiment import Experiment
from ray.tune.analysis import ExperimentAnalysis
from ray.tune.stopper import Stopper
from ray.tune.registry import register_env, register_trainable
from ray.tune.trainable import Trainable
from ray.tune.callback import Callback
from ray.tune.search import grid_search
from ray.tune.error import TuneError
from ray.tune.execution.placement_groups import PlacementGroupFactory
from ray.tune.experiment import Experiment
from ray.tune.progress_reporter import (
ProgressReporter,
CLIReporter,
JupyterNotebookReporter,
ProgressReporter,
)
from ray.tune.registry import register_env, register_trainable
from ray.tune.result_grid import ResultGrid
from ray.tune.schedulers import create_scheduler
from ray.tune.search import create_searcher, grid_search
from ray.tune.search.sample import (
sample_from,
uniform,
quniform,
choice,
randint,
lograndint,
qrandint,
qlograndint,
randn,
qrandn,
loguniform,
qlograndint,
qloguniform,
qrandint,
qrandn,
quniform,
randint,
randn,
sample_from,
uniform,
)
from ray.tune.search import create_searcher
from ray.tune.schedulers import create_scheduler
from ray.tune.execution.placement_groups import PlacementGroupFactory
from ray.tune.stopper import Stopper
from ray.tune.syncer import SyncConfig
from ray.tune.trainable import Trainable
from ray.tune.trainable.util import with_parameters, with_resources
from ray.tune.result_grid import ResultGrid
from ray.tune.tune import run, run_experiments
from ray.tune.tune_config import ResumeConfig, TuneConfig
from ray.tune.tuner import Tuner


__all__ = [
"Trainable",
"Callback",
Expand Down
22 changes: 6 additions & 16 deletions python/ray/tune/analysis/experiment_analysis.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,34 +2,24 @@
import io
import json
import logging
from numbers import Number
import os
from numbers import Number
from pathlib import Path
from typing import Any, Dict, List, Optional, Tuple, Union

import pyarrow.fs

from ray.util.annotations import PublicAPI
from ray.air.constants import (
EXPR_PROGRESS_FILE,
EXPR_RESULT_FILE,
TRAINING_ITERATION,
)
from ray.air.constants import EXPR_PROGRESS_FILE, EXPR_RESULT_FILE, TRAINING_ITERATION
from ray.train import Checkpoint
from ray.train._internal.storage import (
_exists_at_fs_path,
get_fs_and_path,
)
from ray.train._internal.storage import _exists_at_fs_path, get_fs_and_path
from ray.tune.execution.experiment_state import _find_newest_experiment_checkpoint
from ray.tune.execution.tune_controller import TuneController
from ray.tune.experiment import Trial
from ray.tune.result import (
DEFAULT_METRIC,
CONFIG_PREFIX,
)
from ray.tune.result import CONFIG_PREFIX, DEFAULT_METRIC
from ray.tune.utils import flatten_dict
from ray.tune.utils.serialization import TuneFunctionDecoder
from ray.tune.utils.util import is_nan_or_inf, is_nan, unflattened_lookup
from ray.tune.utils.util import is_nan, is_nan_or_inf, unflattened_lookup
from ray.util.annotations import PublicAPI

try:
import pandas as pd
Expand Down
6 changes: 3 additions & 3 deletions python/ray/tune/callback.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,11 @@
from abc import ABCMeta
import glob
import warnings
from abc import ABCMeta
from pathlib import Path
from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple
import warnings

from ray.util.annotations import PublicAPI, DeveloperAPI
from ray.tune.utils.util import _atomic_save, _load_newest_checkpoint
from ray.util.annotations import DeveloperAPI, PublicAPI

if TYPE_CHECKING:
from ray.train import Checkpoint
Expand Down
18 changes: 9 additions & 9 deletions python/ray/tune/cli/commands.py
Original file line number Diff line number Diff line change
@@ -1,25 +1,25 @@
from pathlib import Path
from typing import Optional, List

import click
import logging
import operator
import os
import shutil
import subprocess
from datetime import datetime
from pathlib import Path
from typing import List, Optional

import click
import pandas as pd
from pandas.api.types import is_string_dtype, is_numeric_dtype
from pandas.api.types import is_numeric_dtype, is_string_dtype

from ray._private.thirdparty.tabulate.tabulate import tabulate
from ray.air.constants import EXPR_RESULT_FILE
from ray.tune import TuneError
from ray.tune.analysis import ExperimentAnalysis
from ray.tune.result import (
CONFIG_PREFIX,
DEFAULT_EXPERIMENT_INFO_KEYS,
DEFAULT_RESULT_KEYS,
CONFIG_PREFIX,
)
from ray.tune.analysis import ExperimentAnalysis
from ray.tune import TuneError
from ray._private.thirdparty.tabulate.tabulate import tabulate

logger = logging.getLogger(__name__)

Expand Down
1 change: 1 addition & 0 deletions python/ray/tune/cli/scripts.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import click

import ray.tune.cli.commands as commands


Expand Down
4 changes: 3 additions & 1 deletion python/ray/tune/examples/ax_example.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,9 +4,11 @@

Requires the Ax library to be installed (`pip install ax-platform sqlalchemy`).
"""
import numpy as np

import time

import numpy as np

from ray import train, tune
from ray.tune.schedulers import AsyncHyperBandScheduler
from ray.tune.search.ax import AxSearch
Expand Down
2 changes: 1 addition & 1 deletion python/ray/tune/examples/bohb_example.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,8 +7,8 @@
"""

import json
import time
import os
import time

import numpy as np

Expand Down
11 changes: 7 additions & 4 deletions python/ray/tune/examples/cifar10_pytorch.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,22 +2,25 @@
# fmt: off

# __import_begin__
import numpy as np
import os
import tempfile
from typing import Dict

import numpy as np
import torch
import torch.nn as nn
import torch.nn.functional as F
import torch.optim as optim
from filelock import FileLock
from torch.utils.data import random_split
import torchvision
import torchvision.transforms as transforms
from typing import Dict
from filelock import FileLock
from torch.utils.data import random_split

import ray
from ray import train, tune
from ray.train import Checkpoint
from ray.tune.schedulers import ASHAScheduler

# __import_end__


Expand Down
2 changes: 1 addition & 1 deletion python/ray/tune/examples/hyperband_example.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,8 @@

import ray
from ray import train, tune
from ray.tune.utils.mock_trainable import MyTrainableClass
from ray.tune.schedulers import HyperBandScheduler
from ray.tune.utils.mock_trainable import MyTrainableClass

if __name__ == "__main__":
parser = argparse.ArgumentParser()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,14 +7,16 @@
For an example of using a Tune search space, see
:doc:`/tune/examples/hyperopt_example`.
"""

import time

from hyperopt import hp

import ray
from ray import train, tune
from ray.tune.search import ConcurrencyLimiter
from ray.tune.schedulers import AsyncHyperBandScheduler
from ray.tune.search import ConcurrencyLimiter
from ray.tune.search.hyperopt import HyperOptSearch
from hyperopt import hp


def f_unpack_dict(dct):
Expand All @@ -35,7 +37,7 @@ def f_unpack_dict(dct):
"""

res = {}
for (k, v) in dct.items():
for k, v in dct.items():
if isinstance(v, dict):
res = {**res, **f_unpack_dict(v)}
else:
Expand Down
2 changes: 1 addition & 1 deletion python/ray/tune/examples/lightgbm_example.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,8 @@
from sklearn.model_selection import train_test_split

from ray import tune
from ray.tune.schedulers import ASHAScheduler
from ray.tune.integration.lightgbm import TuneReportCheckpointCallback
from ray.tune.schedulers import ASHAScheduler


def train_breast_cancer(config: dict):
Expand Down
6 changes: 3 additions & 3 deletions python/ray/tune/examples/mlflow_ptl.py
Original file line number Diff line number Diff line change
@@ -1,16 +1,16 @@
"""An example showing how to use Pytorch Lightning training, Ray Tune
HPO, and MLflow autologging all together."""

import os
import tempfile

import pytorch_lightning as pl

import mlflow
import pytorch_lightning as pl

from ray import train, tune
from ray.air.integrations.mlflow import setup_mlflow
from ray.tune.integration.pytorch_lightning import TuneReportCallback
from ray.tune.examples.mnist_ptl_mini import LightningMNISTClassifier, MNISTDataModule
from ray.tune.integration.pytorch_lightning import TuneReportCallback


def train_mnist_tune(config, data_dir=None, num_epochs=10, num_gpus=0):
Expand Down
10 changes: 3 additions & 7 deletions python/ray/tune/examples/mnist_ptl_mini.py
Original file line number Diff line number Diff line change
@@ -1,21 +1,17 @@
import math

import os
import torch
from filelock import FileLock

import pytorch_lightning as pl


import torch
from filelock import FileLock
from torch.nn import functional as F
from torch.utils.data import DataLoader, random_split
from torchmetrics import Accuracy
from torchvision import transforms
from torchvision.datasets import MNIST
from ray.tune.integration.pytorch_lightning import TuneReportCheckpointCallback

from ray import train, tune

from ray.tune.integration.pytorch_lightning import TuneReportCheckpointCallback

PATH_DATASETS = os.environ.get("PATH_DATASETS", ".")

Expand Down
6 changes: 4 additions & 2 deletions python/ray/tune/examples/mnist_pytorch.py
Original file line number Diff line number Diff line change
@@ -1,13 +1,15 @@
# Original Code here:
# https://github.com/pytorch/examples/blob/master/mnist/main.py
import os

import argparse
from filelock import FileLock
import os
import tempfile

import torch
import torch.nn as nn
import torch.nn.functional as F
import torch.optim as optim
from filelock import FileLock
from torchvision import datasets, transforms

import ray
Expand Down
9 changes: 5 additions & 4 deletions python/ray/tune/examples/mnist_pytorch_trainable.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,18 +4,19 @@

import argparse
import os

import torch
import torch.optim as optim

import ray
from ray import train, tune
from ray.tune.schedulers import ASHAScheduler
from ray.tune.examples.mnist_pytorch import (
train_func,
test_func,
get_data_loaders,
ConvNet,
get_data_loaders,
test_func,
train_func,
)
from ray.tune.schedulers import ASHAScheduler

# Change these values if you want the training to run quicker or slower.
EPOCH_SIZE = 512
Expand Down
4 changes: 3 additions & 1 deletion python/ray/tune/examples/nevergrad_example.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,11 +4,12 @@

Requires the Nevergrad library to be installed (`pip install nevergrad`).
"""

import time

from ray import train, tune
from ray.tune.search import ConcurrencyLimiter
from ray.tune.schedulers import AsyncHyperBandScheduler
from ray.tune.search import ConcurrencyLimiter
from ray.tune.search.nevergrad import NevergradSearch


Expand All @@ -30,6 +31,7 @@ def easy_objective(config):

if __name__ == "__main__":
import argparse

import nevergrad as ng

parser = argparse.ArgumentParser()
Expand Down
Loading
Loading