From 8515fdd6db604ab484a22481bc74d81476b5c65f Mon Sep 17 00:00:00 2001 From: Kaushik B <45285388+kaushikb11@users.noreply.github.com> Date: Sat, 12 Feb 2022 07:15:06 +0530 Subject: [PATCH] [tune] Update Lightning examples to support PTL 1.5 (#20562) To helps resolve the issues users are facing with running Lightning examples with Ray Tune PyTorchLightning/pytorch-lightning#10407 Co-authored-by: Amog Kamsetty --- python/ray/tune/examples/mnist_ptl_mini.py | 5 +++-- python/ray/tune/examples/mnist_pytorch_lightning.py | 6 +++--- python/ray/util/ray_lightning/simple_tune.py | 4 ++-- python/ray/util/ray_lightning/tune/__init__.py | 6 +++--- python/requirements/ml/requirements_tune.txt | 2 +- python/requirements/ml/requirements_upstream.txt | 2 +- 6 files changed, 13 insertions(+), 12 deletions(-) diff --git a/python/ray/tune/examples/mnist_ptl_mini.py b/python/ray/tune/examples/mnist_ptl_mini.py index ae2c9e959ae7..99243a0ab6e1 100644 --- a/python/ray/tune/examples/mnist_ptl_mini.py +++ b/python/ray/tune/examples/mnist_ptl_mini.py @@ -3,6 +3,7 @@ import torch from filelock import FileLock from torch.nn import functional as F +from torchmetrics import Accuracy import pytorch_lightning as pl from pl_bolts.datamodules.mnist_datamodule import MNISTDataModule import os @@ -24,7 +25,7 @@ def __init__(self, config, data_dir=None): self.layer_1 = torch.nn.Linear(28 * 28, layer_1) self.layer_2 = torch.nn.Linear(layer_1, layer_2) self.layer_3 = torch.nn.Linear(layer_2, 10) - self.accuracy = pl.metrics.Accuracy() + self.accuracy = Accuracy() def forward(self, x): batch_size, channels, width, height = x.size() @@ -75,7 +76,7 @@ def train_mnist_tune(config, num_epochs=10, num_gpus=0): max_epochs=num_epochs, # If fractional GPUs passed in, convert to int. gpus=math.ceil(num_gpus), - progress_bar_refresh_rate=0, + enable_progress_bar=False, callbacks=[TuneReportCallback(metrics, on="validation_end")], ) trainer.fit(model, dm) diff --git a/python/ray/tune/examples/mnist_pytorch_lightning.py b/python/ray/tune/examples/mnist_pytorch_lightning.py index e48c12e28340..e5fae529eb3c 100644 --- a/python/ray/tune/examples/mnist_pytorch_lightning.py +++ b/python/ray/tune/examples/mnist_pytorch_lightning.py @@ -121,7 +121,7 @@ def configure_optimizers(self): def train_mnist(config): model = LightningMNISTClassifier(config) - trainer = pl.Trainer(max_epochs=10, show_progress_bar=False) + trainer = pl.Trainer(max_epochs=10, enable_progress_bar=False) trainer.fit(model) # __lightning_end__ @@ -148,7 +148,7 @@ def train_mnist_tune(config, num_epochs=10, num_gpus=0, data_dir="~/data"): gpus=math.ceil(num_gpus), logger=TensorBoardLogger( save_dir=tune.get_trial_dir(), name="", version="."), - progress_bar_refresh_rate=0, + enable_progress_bar=False, callbacks=[ TuneReportCallback( { @@ -174,7 +174,7 @@ def train_mnist_tune_checkpoint(config, "gpus": math.ceil(num_gpus), "logger": TensorBoardLogger( save_dir=tune.get_trial_dir(), name="", version="."), - "progress_bar_refresh_rate": 0, + "enable_progress_bar": False, "callbacks": [ TuneReportCheckpointCallback( metrics={ diff --git a/python/ray/util/ray_lightning/simple_tune.py b/python/ray/util/ray_lightning/simple_tune.py index d95acb3f290f..7a5f171d7fcd 100644 --- a/python/ray/util/ray_lightning/simple_tune.py +++ b/python/ray/util/ray_lightning/simple_tune.py @@ -8,7 +8,7 @@ import pytorch_lightning as pl from ray.util.ray_lightning import RayPlugin -from ray.util.ray_lightning.tune import TuneReportCallback, get_tune_ddp_resources +from ray.util.ray_lightning.tune import TuneReportCallback, get_tune_resources num_cpus_per_actor = 1 num_workers = 1 @@ -70,7 +70,7 @@ def main(): num_samples=1, metric="loss", mode="min", - resources_per_trial=get_tune_ddp_resources( + resources_per_trial=get_tune_resources( num_workers=num_workers, cpus_per_worker=num_cpus_per_actor ), ) diff --git a/python/ray/util/ray_lightning/tune/__init__.py b/python/ray/util/ray_lightning/tune/__init__.py index 2cc1bd329c84..6c90b5515529 100644 --- a/python/ray/util/ray_lightning/tune/__init__.py +++ b/python/ray/util/ray_lightning/tune/__init__.py @@ -4,13 +4,13 @@ TuneReportCallback = None TuneReportCheckpointCallback = None -get_tune_ddp_resources = None +get_tune_resources = None try: from ray_lightning.tune import ( TuneReportCallback, TuneReportCheckpointCallback, - get_tune_ddp_resources, + get_tune_resources, ) except ImportError: logger.info( @@ -22,5 +22,5 @@ __all__ = [ "TuneReportCallback", "TuneReportCheckpointCallback", - "get_tune_ddp_resources", + "get_tune_resources", ] diff --git a/python/requirements/ml/requirements_tune.txt b/python/requirements/ml/requirements_tune.txt index 30bd3ac3df6e..a58a7e5a9527 100644 --- a/python/requirements/ml/requirements_tune.txt +++ b/python/requirements/ml/requirements_tune.txt @@ -29,7 +29,7 @@ nevergrad==0.4.3.post7 optuna==2.9.1 pytest-remotedata==0.3.2 lightning-bolts==0.4.0 -pytorch-lightning==1.4.9 +pytorch-lightning==1.5.10 shortuuid==1.0.1 scikit-learn==0.24.2 scikit-optimize==0.8.1 diff --git a/python/requirements/ml/requirements_upstream.txt b/python/requirements/ml/requirements_upstream.txt index 09cc189469b0..84bcee6a6937 100644 --- a/python/requirements/ml/requirements_upstream.txt +++ b/python/requirements/ml/requirements_upstream.txt @@ -2,7 +2,7 @@ # Because they depend on Ray, we can't pin the subdependencies. # So we separate its own requirements file. -ray_lightning==0.1.1 +ray_lightning==0.2.0 tune-sklearn==0.4.1 xgboost_ray==0.1.4 lightgbm_ray==0.0.2