Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fix Exception misuse in ignite.contrib.handlers.lr_finder.py #1187

Merged
Merged
Show file tree
Hide file tree
Changes from 10 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
9 changes: 6 additions & 3 deletions ignite/contrib/handlers/lr_finder.py
Original file line number Diff line number Diff line change
Expand Up @@ -294,7 +294,7 @@ def attach(
raise ValueError("Mapping to_save should contain 'optimizer' key")

if not isinstance(to_save["optimizer"], torch.optim.Optimizer):
raise ValueError(
raise TypeError(
"Object to_save['optimizer'] should be torch optimizer, but given {}".format(type(to_save["optimizer"]))
)

Expand All @@ -304,8 +304,11 @@ def attach(
raise ValueError("diverge_th should be larger than 1")
if step_mode not in ["exp", "linear"]:
raise ValueError("step_mode should be 'exp' or 'linear', but given {}".format(step_mode))
if num_iter is not None and (not isinstance(num_iter, int) or num_iter <= 0):
raise ValueError("if provided, num_iter should be a positive integer, but given {}".format(num_iter))
if num_iter is not None:
if not isinstance(num_iter, int):
raise TypeError("If provided, num_iter should be an integer, but give {}".format(num_iter))
if num_iter <= 0:
raise ValueError("if provided, num_iter should be positive, but give {}".format(num_iter))

# store to_save
with tempfile.TemporaryDirectory() as tmpdirname:
Expand Down
6 changes: 5 additions & 1 deletion tests/ignite/contrib/handlers/test_lr_finder.py
Original file line number Diff line number Diff line change
Expand Up @@ -89,10 +89,14 @@ def test_attach_incorrect_input_args(lr_finder, dummy_engine, model, optimizer,
with lr_finder.attach(dummy_engine, to_save=to_save, diverge_th=0.0) as f:
pass

with pytest.raises(ValueError, match=r"if provided, num_iter should be a positive integer"):
with pytest.raises(TypeError, match=r"If provided, num_iter should be an integer"):
with lr_finder.attach(dummy_engine, to_save=to_save, num_iter=0.0) as f:
pass

with pytest.raises(ValueError, match="If provided, num_iter should be positive"):
with lr_finder.attach(dummy_engine, to_save=to_save, num_iter=0) as f:
pass

with lr_finder.attach(dummy_engine, to_save) as trainer_with_finder:
trainer_with_finder.run(dataloader)

Expand Down