Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[tune] Introduce ability to turn off default logging. #4104

Merged
merged 13 commits into from
Mar 1, 2019
3 changes: 3 additions & 0 deletions doc/source/tune-usage.rst
Original file line number Diff line number Diff line change
Expand Up @@ -424,6 +424,9 @@ via the Experiment object as follows:

These loggers will be called along with the default Tune loggers. All loggers must inherit the `Logger interface <tune-package-ref.html#ray.tune.logger.Logger>`__.

Tune has default loggers for Tensorboard, CSV, and JSON formats. To turn these off, set
``use_default_loggers=False`` in the ``Experiment`` initializer.

You can also check out `logger.py <https://github.com/ray-project/ray/blob/master/python/ray/tune/logger.py>`__ for implementation details.

An example can be found in `logging_example.py <https://github.com/ray-project/ray/blob/master/python/ray/tune/examples/logging_example.py>`__.
Expand Down
6 changes: 6 additions & 0 deletions python/ray/tune/config_parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -87,6 +87,11 @@ def make_parser(parser_creator=None, **kwargs):
help="Function for syncing the local_dir to upload_dir. If string, "
"then it must be a string template for syncer to run and needs to "
"include replacement fields '{local_dir}' and '{remote_dir}'.")
parser.add_argument(
"--use-default-loggers",
default=False,
type=bool,
help="Whether to use Tune's default loggers.")
parser.add_argument(
"--custom-loggers",
default=None,
Expand Down Expand Up @@ -192,6 +197,7 @@ def create_trial_from_spec(spec, output_path, parser, **trial_kwargs):
restore_path=spec.get("restore"),
upload_dir=args.upload_dir,
trial_name_creator=spec.get("trial_name_creator"),
use_default_loggers=args.use_default_loggers,
custom_loggers=spec.get("custom_loggers"),
# str(None) doesn't create None
sync_function=spec.get("sync_function"),
Expand Down
1 change: 1 addition & 0 deletions python/ray/tune/examples/logging_example.py
Original file line number Diff line number Diff line change
Expand Up @@ -66,6 +66,7 @@ def _restore(self, checkpoint_path):
run=MyTrainableClass,
num_samples=1,
trial_name_creator=tune.function(trial_str_creator),
use_default_loggers=False,
custom_loggers=[TestLogger],
stop={"training_iteration": 1 if args.smoke_test else 99999},
config={
Expand Down
3 changes: 3 additions & 0 deletions python/ray/tune/experiment.py
Original file line number Diff line number Diff line change
Expand Up @@ -62,6 +62,7 @@ class Experiment(object):
to (e.g. ``s3://bucket``).
trial_name_creator (func): Optional function for generating
the trial string representation.
use_default_loggers (bool): Whether to use Tune's default loggers.
custom_loggers (list): List of custom logger creators to be used with
each Trial. See `ray/tune/logger.py`.
sync_function (func|str): Function for syncing the local_dir to
Expand Down Expand Up @@ -117,6 +118,7 @@ def __init__(self,
local_dir=None,
upload_dir=None,
trial_name_creator=None,
use_default_loggers=True,
custom_loggers=None,
sync_function=None,
checkpoint_freq=0,
Expand Down Expand Up @@ -145,6 +147,7 @@ def __init__(self,
"local_dir": os.path.expanduser(local_dir or DEFAULT_RESULTS_DIR),
"upload_dir": upload_dir or "", # argparse converts None to "null"
"trial_name_creator": trial_name_creator,
"use_default_loggers": use_default_loggers,
"custom_loggers": custom_loggers,
"sync_function": sync_function,
"checkpoint_freq": checkpoint_freq,
Expand Down
6 changes: 5 additions & 1 deletion python/ray/tune/logger.py
Original file line number Diff line number Diff line change
Expand Up @@ -75,6 +75,7 @@ class UnifiedLogger(Logger):
config: Configuration passed to all logger creators.
logdir: Directory for all logger creators to log to.
upload_uri (str): Optional URI where the logdir is sync'ed to.
use_default_loggers (bool): Whether to use Tune's default loggers.
custom_loggers (list): List of custom logger creators.
sync_function (func|str): Optional function for syncer to run.
See ray/python/ray/tune/log_sync.py
Expand All @@ -84,9 +85,12 @@ def __init__(self,
config,
logdir,
upload_uri=None,
use_default_loggers=True,
Copy link
Contributor

@hartikainen hartikainen Feb 21, 2019

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Would it make more sense to change the argument to loggers instead of use_default_loggers and then just have a default be loggers=(_JsonLogger, _TFLogger, _CSVLogger)? You could provide a DEFAULT_LOGGERS = (_JsonLogger, _TFLogger, _CSVLogger), and if the user wants to add new loggers on top of the default ones, that can just do

from ray.tune.logger import DEFAULT_LOGGERS

unified_logger = UnifiedLogger(..., loggers=(*DEFAULT_LOGGERS, custom_logger), ...)

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

That way could also get rid of the custom_loggers and just have a single loggers argument.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

oh I see; yeah let me make this change.

custom_loggers=None,
sync_function=None):
self._logger_list = [_JsonLogger, _TFLogger, _CSVLogger]
self._logger_list = []
if use_default_loggers:
self._logger_list += [_JsonLogger, _TFLogger, _CSVLogger]
self._sync_function = sync_function
self._log_syncer = None
if custom_loggers:
Expand Down
3 changes: 3 additions & 0 deletions python/ray/tune/test/trial_runner_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -793,10 +793,13 @@ def on_result(self, result):
"stop": {
"training_iteration": 1
},
"use_default_loggers": False,
"custom_loggers": [CustomLogger]
}
})
self.assertTrue(os.path.exists(os.path.join(trial.logdir, "test.log")))
self.assertFalse(
os.path.exists(os.path.join(trial.logdir, "params.json")))

def testCustomTrialString(self):
[trial] = run_experiments({
Expand Down
4 changes: 4 additions & 0 deletions python/ray/tune/trial.py
Original file line number Diff line number Diff line change
Expand Up @@ -254,6 +254,7 @@ def __init__(self,
restore_path=None,
upload_dir=None,
trial_name_creator=None,
use_default_loggers=True,
custom_loggers=None,
sync_function=None,
max_failures=0):
Expand All @@ -274,6 +275,7 @@ def __init__(self,
or self._get_trainable_cls().default_resource_request(self.config))
self.stopping_criterion = stopping_criterion or {}
self.upload_dir = upload_dir
self.use_default_loggers = use_default_loggers
self.custom_loggers = custom_loggers
self.sync_function = sync_function
validate_sync_function(sync_function)
Expand Down Expand Up @@ -331,6 +333,7 @@ def init_logger(self):
self.config,
self.logdir,
upload_uri=self.upload_dir,
use_default_loggers=self.use_default_loggers,
custom_loggers=self.custom_loggers,
sync_function=self.sync_function)

Expand Down Expand Up @@ -507,6 +510,7 @@ def __getstate__(self):
state = self.__dict__.copy()
state["resources"] = resources_to_json(self.resources)

# These are non-pickleable entries.
pickle_data = {
"_checkpoint": self._checkpoint,
"config": self.config,
Expand Down