diff --git a/ignite/engine/engine.py b/ignite/engine/engine.py index 27a949cacca2..24e7f885ec8d 100644 --- a/ignite/engine/engine.py +++ b/ignite/engine/engine.py @@ -1,6 +1,5 @@ import functools import logging -import math import time import warnings import weakref @@ -731,14 +730,13 @@ def load_state_dict(self, state_dict: Mapping) -> None: @staticmethod def _is_done(state: State) -> bool: - is_done_iters = state.max_iters is not None and state.iteration >= state.max_iters is_done_count = ( state.epoch_length is not None and state.max_epochs is not None and state.iteration >= state.epoch_length * state.max_epochs ) is_done_epochs = state.max_epochs is not None and state.epoch >= state.max_epochs - return is_done_iters or is_done_count or is_done_epochs + return is_done_count or is_done_epochs def set_data(self, data: Union[Iterable, DataLoader]) -> None: """Method to set data. After calling the method the next batch passed to `processing_function` is @@ -780,14 +778,13 @@ def run( self, data: Optional[Iterable] = None, max_epochs: Optional[int] = None, - max_iters: Optional[int] = None, epoch_length: Optional[int] = None, ) -> State: """Runs the ``process_function`` over the passed data. Engine has a state and the following logic is applied in this function: - - At the first call, new state is defined by `max_epochs`, `max_iters`, `epoch_length`, if provided. + - At the first call, new state is defined by `max_epochs`, `epoch_length`, if provided. A timer for total and per-epoch time is initialized when Events.STARTED is handled. - If state is already defined such that there are iterations to run until `max_epochs` and no input arguments provided, state is kept and used in the function. @@ -805,9 +802,6 @@ def run( `len(data)`. If `data` is an iterator and `epoch_length` is not set, then it will be automatically determined as the iteration on which data iterator raises `StopIteration`. This argument should not change if run is resuming from a state. - max_iters: Number of iterations to run for. - `max_iters` and `max_epochs` are mutually exclusive; only one of the two arguments should be provided. - Returns: State: output state. @@ -858,6 +852,8 @@ def switch_batch(engine): if self.state.max_epochs is None or (self._is_done(self.state) and self._internal_run_generator is None): # Create new state + if max_epochs is None: + max_epochs = 1 if epoch_length is None: if data is None: raise ValueError("epoch_length should be provided if data is None") @@ -866,22 +862,9 @@ def switch_batch(engine): if epoch_length is not None and epoch_length < 1: raise ValueError("Input data has zero size. Please provide non-empty data") - if max_iters is None: - if max_epochs is None: - max_epochs = 1 - else: - if max_epochs is not None: - raise ValueError( - "Arguments max_iters and max_epochs are mutually exclusive." - "Please provide only max_epochs or max_iters." - ) - if epoch_length is not None: - max_epochs = math.ceil(max_iters / epoch_length) - self.state.iteration = 0 self.state.epoch = 0 self.state.max_epochs = max_epochs - self.state.max_iters = max_iters self.state.epoch_length = epoch_length # Reset generator if previously used self._internal_run_generator = None @@ -1062,18 +1045,12 @@ def _run_once_on_dataset_as_gen(self) -> Generator[State, None, float]: if self.state.epoch_length is None: # Define epoch length and stop the epoch self.state.epoch_length = iter_counter - if self.state.max_iters is not None: - self.state.max_epochs = math.ceil(self.state.max_iters / self.state.epoch_length) break # Should exit while loop if we can not iterate if should_exit: - if not self._is_done(self.state): - total_iters = ( - self.state.epoch_length * self.state.max_epochs - if self.state.max_epochs is not None - else self.state.max_iters - ) + if not self._is_done(self.state) and self.state.max_epochs is not None: + total_iters = self.state.epoch_length * self.state.max_epochs warnings.warn( "Data iterator can not provide data anymore but required total number of " @@ -1104,10 +1081,6 @@ def _run_once_on_dataset_as_gen(self) -> Generator[State, None, float]: if self.state.epoch_length is not None and iter_counter == self.state.epoch_length: break - if self.state.max_iters is not None and self.state.iteration == self.state.max_iters: - self.should_terminate = True - raise _EngineTerminateException() - except _EngineTerminateSingleEpochException: self._fire_event(Events.TERMINATE_SINGLE_EPOCH, iter_counter=iter_counter) self.should_terminate_single_epoch = False @@ -1229,18 +1202,12 @@ def _run_once_on_dataset_legacy(self) -> float: if self.state.epoch_length is None: # Define epoch length and stop the epoch self.state.epoch_length = iter_counter - if self.state.max_iters is not None: - self.state.max_epochs = math.ceil(self.state.max_iters / self.state.epoch_length) break # Should exit while loop if we can not iterate if should_exit: - if not self._is_done(self.state): - total_iters = ( - self.state.epoch_length * self.state.max_epochs - if self.state.max_epochs is not None - else self.state.max_iters - ) + if not self._is_done(self.state) and self.state.max_epochs is not None: + total_iters = self.state.epoch_length * self.state.max_epochs warnings.warn( "Data iterator can not provide data anymore but required total number of " @@ -1271,10 +1238,6 @@ def _run_once_on_dataset_legacy(self) -> float: if self.state.epoch_length is not None and iter_counter == self.state.epoch_length: break - if self.state.max_iters is not None and self.state.iteration == self.state.max_iters: - self.should_terminate = True - raise _EngineTerminateException() - except _EngineTerminateSingleEpochException: self._fire_event(Events.TERMINATE_SINGLE_EPOCH, iter_counter=iter_counter) self.should_terminate_single_epoch = False diff --git a/ignite/engine/events.py b/ignite/engine/events.py index 9dd99348492b..aebffdfe058a 100644 --- a/ignite/engine/events.py +++ b/ignite/engine/events.py @@ -443,7 +443,6 @@ class State: state.dataloader # data passed to engine state.epoch_length # optional length of an epoch state.max_epochs # number of epochs to run - state.max_iters # number of iterations to run state.batch # batch passed to `process_function` state.output # output of `process_function` after a single iteration state.metrics # dictionary with defined metrics if any @@ -470,7 +469,6 @@ def __init__(self, **kwargs: Any) -> None: self.epoch = 0 self.epoch_length: Optional[int] = None self.max_epochs: Optional[int] = None - self.max_iters: Optional[int] = None self.output: Optional[int] = None self.batch: Optional[int] = None self.metrics: Dict[str, Any] = {} diff --git a/ignite/handlers/lr_finder.py b/ignite/handlers/lr_finder.py index e3840d5da7d3..3643709a1b61 100644 --- a/ignite/handlers/lr_finder.py +++ b/ignite/handlers/lr_finder.py @@ -105,7 +105,6 @@ def _run( max_iter = trainer.state.epoch_length * trainer.state.max_epochs # type: ignore[operator] if max_iter < num_iter: max_iter = num_iter - trainer.state.max_iters = num_iter trainer.state.max_epochs = ceil(num_iter / trainer.state.epoch_length) # type: ignore[operator] if not trainer.has_event_handler(self._reached_num_iterations): diff --git a/tests/ignite/engine/test_engine.py b/tests/ignite/engine/test_engine.py index 130212426504..d1cc017bf916 100644 --- a/tests/ignite/engine/test_engine.py +++ b/tests/ignite/engine/test_engine.py @@ -1026,47 +1026,6 @@ def switch_dataloader(): trainer.run(data1, max_epochs=10) - def test_run_with_max_iters(self): - max_iters = 8 - engine = Engine(lambda e, b: 1) - engine.run([0] * 20, max_iters=max_iters) - assert engine.state.iteration == max_iters - assert engine.state.max_iters == max_iters - - def test_run_with_max_iters_greater_than_epoch_length(self): - max_iters = 73 - engine = Engine(lambda e, b: 1) - engine.run([0] * 20, max_iters=max_iters) - assert engine.state.iteration == max_iters - - def test_run_with_invalid_max_iters_and_max_epoch(self): - max_iters = 12 - max_epochs = 2 - engine = Engine(lambda e, b: 1) - with pytest.raises( - ValueError, - match=r"Arguments max_iters and max_epochs are mutually exclusive." - "Please provide only max_epochs or max_iters.", - ): - engine.run([0] * 20, max_iters=max_iters, max_epochs=max_epochs) - - def test_epoch_events_fired_max_iters(self): - max_iters = 32 - engine = Engine(lambda e, b: 1) - - @engine.on(Events.EPOCH_COMPLETED) - def fired_event(engine): - assert engine.state.iteration % engine.state.epoch_length == 0 - - engine.run([0] * 10, max_iters=max_iters) - - def test_is_done_with_max_iters(self): - state = State(iteration=100, epoch=1, max_epochs=3, epoch_length=100, max_iters=250) - assert not Engine._is_done(state) - - state = State(iteration=250, epoch=1, max_epochs=3, epoch_length=100, max_iters=250) - assert Engine._is_done(state) - @pytest.mark.skipif(torch.cuda.device_count() < 1, reason="Skip if no GPU") def test_batch_is_released_before_new_one_is_loaded_on_cuda(self): torch.cuda.empty_cache() diff --git a/tests/ignite/handlers/test_lr_finder.py b/tests/ignite/handlers/test_lr_finder.py index 23b823d9ce47..b64b3ab8527b 100644 --- a/tests/ignite/handlers/test_lr_finder.py +++ b/tests/ignite/handlers/test_lr_finder.py @@ -357,7 +357,7 @@ def test_num_iter_is_not_enough(lr_finder, to_save, dummy_engine, dataloader): trainer_with_finder.run(dataloader) assert_output_sizes(lr_finder, dummy_engine) assert dummy_engine.state.iteration != len(dataloader) - assert dummy_engine.state.iteration == 150 + assert dummy_engine.state.iteration == 150 + 1 def test_detach_terminates(lr_finder, to_save, dummy_engine, dataloader):