Skip to content

Commit

Permalink
Support all skopt plots
Browse files Browse the repository at this point in the history
Resolves #7.
  • Loading branch information
evhub committed Mar 15, 2019
1 parent 883ed9c commit bb45229
Show file tree
Hide file tree
Showing 15 changed files with 385 additions and 95 deletions.
28 changes: 28 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -90,6 +90,10 @@ Some examples of BBopt in action:
1. [`remember`](#remember)
1. [`plot_convergence`](#plot_convergence)
1. [`plot_history`](#plot_history)
1. [`partial_dependence`](#partial_dependence)
1. [`plot_partial_dependence_1D`](#plot_partial_dependence_1d)
1. [`plot_evaluations`](#plot_evaluations)
1. [`plot_objective`](#plot_objective)
1. [`get_current_run`](#get_current_run)
1. [`get_optimal_run`](#get_optimal_run)
1. [`get_data`](#get_data)
Expand Down Expand Up @@ -200,6 +204,30 @@ BlackBoxOptimizer.**plot_history**(_ax_=`None`, _yscale_=`None`)

Plot the gain/loss at each point over the course of all previous trials. If passed, `ax` should be the [matplotlib axis](https://matplotlib.org/api/axes_api.html) to plot on and `yscale` should be the scale for the y axis.

#### `partial_dependence`

BlackBoxOptimizer.**partial_dependence**(_i\_name_, _j\_name_=`None`, _sample\_points_=`None`, _n\_samples_=`250`, _n\_points_=`40`)

Calls [`skopt.plots.partial_dependence`](https://scikit-optimize.github.io/plots.m.html) using previous trial data. The parameters _i\_name_ and _j\_name_ should be set to names of the parameters you want for the _i_ and _j_ arguments to `skopt.plots.partial_dependence`.

#### `plot_partial_dependence_1D`

BlackBoxOptimizer.**plot_partial_dependence_1D**(_i\_name_, _ax_=`None`, _yscale_=`Non`, _sample\_points_=`None`, _n\_samples_=`250`, _n\_points_=`40`)

Plot the partial dependence of _i\_name_ on the given [matplotlib axis](https://matplotlib.org/api/axes_api.html) `ax` and with the given y axis scale `yscale`. See **partial_dependence** for the meaning of the other parameters.

#### `plot_evaluations`

BlackBoxOptimizer.**plot_evaluations**(_bins_=`20`)

Calls [`skopt.plots.plot_evaluations`](https://scikit-optimize.github.io/plots.m.html) using previous trial data.

#### `plot_objective`

BlackBoxOptimizer.**plot_objective**(_levels_=`10`, _n\_points_=`40`, _n\_samples_=`250`, _size_=`2`, _zscale_=`"linear"`)

Calls [`skopt.plots.plot_objective`](https://scikit-optimize.github.io/plots.m.html) using previous trial data.

#### `get_current_run`

BlackBoxOptimizer.**get_current_run**()
Expand Down
10 changes: 6 additions & 4 deletions bbopt-source/backends/hyperopt.coco
Original file line number Diff line number Diff line change
Expand Up @@ -136,16 +136,18 @@ class HyperoptBackend(Backend):
trials = Trials()
trials.insert_trial_docs(trial_list)

# run one iteration of hyperparameter optimization, with values saved
# to the self.set_current_values callback passed to Domain
FMinIter(
self.fmin_iter = FMinIter(
algo,
domain,
trials,
rstate,
show_progressbar=show_progressbar,
**options,
) |> next
)

# run one iteration of hyperparameter optimization, with values saved
# to the self.set_current_values callback passed to Domain
next(self.fmin_iter)

assert self.current_values is not None, self.current_values
assert set(self.current_values.keys()) == set(params), self.current_values
Expand Down
41 changes: 28 additions & 13 deletions bbopt-source/backends/skopt.coco
Original file line number Diff line number Diff line change
Expand Up @@ -19,29 +19,37 @@ from bbopt.backends.util import (

# Utilities:

def create_dimension(name, func, *args):
"""Create a scikit-optimize dimension for the given parameter."""
def create_space(name, func, *args):
"""Create a scikit-optimize space for the given parameter."""
case func:
match "choice":
return Categorical(*args)
return Categorical(*args, name=name)
match "randrange":
start, stop, step = args
if step != 1:
raise ValueError("the scikit-optimize backend only supports a randrange step size of 1")
stop -= 1 # scikit-optimize ranges are inclusive
return Integer(start, stop)
return Integer(start, stop, name=name)
match "uniform":
return Real(*args)
return Real(*args, name=name)
raise TypeError(f"insufficiently specified parameter {name}")


def create_dimensions(params) =
"""Construct the full optimization space for the given parameters."""
[
create_space(name, func, *args)
for name, (func, args, kwargs) in sorted_items(params)
]


# Backend:

class SkoptBackend(Backend):
"""The scikit-optimize backend uses scikit-optimize for black box optimization."""
backend_name = "scikit-optimize"
implemented_funcs = (
# should match create_dimension above
# should match create_space above
"choice",
"randrange",
"uniform",
Expand All @@ -55,20 +63,27 @@ class SkoptBackend(Backend):
return

data_points, losses = split_examples(examples, params)
dimensions = [
create_dimension(name, func, *args)
for name, (func, args, kwargs) in sorted_items(params)
]
dimensions = create_dimensions(params)

if isinstance(base_estimator, str):
base_estimator = py_str(base_estimator)

optimizer = Optimizer(dimensions, base_estimator, **options)
optimizer.tell(data_points, losses)
current_point = optimizer.ask()
self.optimizer = Optimizer(dimensions, base_estimator, **options)
self.result = self.optimizer.tell(data_points, losses)
current_point = self.optimizer.ask()

self.current_values = make_values(params, current_point)

@property
def space(self) =
"""The space over which optimization was performed."""
self.optimizer.space

@property
def model(self) =
"""Get the most recently fit model."""
self.optimizer.models[-1]


# Registered names:

Expand Down
2 changes: 1 addition & 1 deletion bbopt-source/constants.coco
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ Constants for use across all of BBopt.

# Installation constants:
name = "bbopt"
version = "1.0.1"
version = "1.0.2"
description = "The easiest hyperparameter optimization you'll ever do."
long_description = """
See BBopt's GitHub_ for more information.
Expand Down
18 changes: 15 additions & 3 deletions bbopt-source/examples/keras_example.py
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@ def run_trial():
l1=bb.uniform("l1", 0, 0.1, guess=0.005),
l2=bb.uniform("l2", 0, 0.1, guess=0.05),
),
activation=bb.choice("activation", ["relu", "elu"]),
activation="relu",
),
Dense(
units=y.shape[1],
Expand Down Expand Up @@ -133,6 +133,18 @@ def run_trial():
print("\nSummary of best run:")
pprint(bb.get_optimal_run())

print("Displaying convergence plot...")
bb.plot_convergence()
print("Displaying plots...")

fig, ((ax1, ax2), (ax3, ax4)) = plt.subplots(2, 2, figsize=(10, 10))
bb.plot_convergence(ax1)
bb.plot_history(ax2)
bb.plot_partial_dependence_1D("hidden neurons", ax3)
bb.plot_partial_dependence_1D("learning rate", ax4)

plt.figure(1)
bb.plot_evaluations()

plt.figure(2)
bb.plot_objective()

plt.show()
129 changes: 103 additions & 26 deletions bbopt-source/optimizer.coco
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,6 @@ import itertools
import time

import numpy as np
from matplotlib import pyplot as plt
from portalocker import Lock

from bbopt.registry import (
Expand All @@ -30,6 +29,7 @@ from bbopt.util import (
denumpy_all,
sorted_examples,
running_best,
plot,
)
from bbopt.constants import (
data_file_ext,
Expand Down Expand Up @@ -227,21 +227,31 @@ class BlackBoxOptimizer:
"""Whether using a gain, a loss, or no examples."""
None if not self._examples else "gain" if "gain" in self._examples[0] else "loss"

_skopt_backend_args = None
_skopt_backend = None

def _get_skopt_backend(self):
"""Get a scikit-optimize backend regardless of whether currently using one."""
from bbopt.backends.skopt import SkoptBackend

if isinstance(self.backend, SkoptBackend):
return self.backend

skopt_backend_args = (self._examples, self._old_params)
if self._skopt_backend_args == skopt_backend_args:
return self._skopt_backend

self._skopt_backend_args = skopt_backend_args
self._skopt_backend = SkoptBackend(*skopt_backend_args)
return self._skopt_backend

# Plotting functions:

def plot_convergence(self, ax=None, yscale=None):
"""Plot the best gain/loss over the history of optimization.
Based on skopt.plots.plot_convergence."""
if not self._examples:
raise ValueError("no existing data available to be plotted")
if ax is None:
ax = plt.gca()

ax.set_title(f"Convergence plot for {self._file_name}")
ax.set_xlabel("Number of trials $n$")
ax.set_ylabel(f"Best {self._metric} after $n$ trials")
ax.grid()

if yscale is not None:
ax.set_yscale(yscale)

iterations = range(1, len(self._examples) + 1)
best_metrics = (
Expand All @@ -252,24 +262,20 @@ class BlackBoxOptimizer:
|> list
)

ax.plot(iterations, best_metrics, marker=".", markersize=12, lw=2)

return ax
return plot(
iterations,
best_metrics,
ax=ax,
yscale=yscale,
title=f"Convergence plot for {self._file_name}",
xlabel="Number of trials $n$",
ylabel=f"Best {self._metric} after $n$ trials",
)

def plot_history(self, ax=None, yscale=None):
"""Plot the gain/loss of every point in the order in which they were sampled."""
if not self._examples:
raise ValueError("no existing data available to be plotted")
if ax is None:
ax = plt.gca()

ax.set_title(f"History plot for {self._file_name}")
ax.set_xlabel("Number of trials $n$")
ax.set_ylabel(f"The {self._metric} on the $n$th trial")
ax.grid()

if yscale is not None:
ax.set_yscale(yscale)

iterations = range(1, len(self._examples) + 1)
metrics = (
Expand All @@ -279,9 +285,80 @@ class BlackBoxOptimizer:
|> list
)

ax.plot(iterations, metrics, marker=".", markersize=12, lw=2)
return plot(
iterations,
metrics,
ax=ax,
yscale=yscale,
title=f"History plot for {self._file_name}",
xlabel="Number of trials $n$",
ylabel=f"The {self._metric} on the $n$th trial",
)

def partial_dependence(self, i_name, j_name=None, *args, **kwargs):
"""Calls skopt.plots.partial_dependence where i_name and j_name are parameter names."""
if not self._examples:
raise ValueError("no existing data available to be plotted")

from skopt.plots import partial_dependence

skopt_backend = self._get_skopt_backend()

sorted_names = list(sorted(self._old_params))
i = sorted_names.index(i_name)
j = None if j_name is None else sorted_names.index(j_name)

return partial_dependence(
skopt_backend.space,
skopt_backend.model,
i,
j,
*args,
**kwargs,
)

def plot_partial_dependence_1D(self, i_name, ax=None, yscale=None, **kwargs):
"""Constructs a 1D partial dependence plot using self.partial_dependence."""
xi, yi = self.partial_dependence(i_name, **kwargs)
return plot(
xi,
yi,
ax=ax,
yscale=yscale,
title=f"Partial dependence of {i_name}",
xlabel=f"Values of {i_name}",
ylabel=f"The loss at each point",
)

def plot_evaluations(self, *args, **kwargs):
"""Calls skopt.plots.plot_evaluations."""
if not self._examples:
raise ValueError("no existing data available to be plotted")

from skopt.plots import plot_evaluations

skopt_backend = self._get_skopt_backend()

return plot_evaluations(
skopt_backend.result,
*args,
**kwargs,
)

def plot_objective(self, *args, **kwargs):
"""Calls skopt.plots.plot_objective."""
if not self._examples:
raise ValueError("no existing data available to be plotted")

from skopt.plots import plot_objective

return ax
skopt_backend = self._get_skopt_backend()

return plot_objective(
skopt_backend.result,
*args,
**kwargs,
)

# Base random functions:

Expand Down
2 changes: 1 addition & 1 deletion bbopt-source/registry.coco
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ class Registry:
return self.run_gen(name)
else:
valid_names = ", ".join(repr(name) for name in self)
raise ValueError(f"unknown {obj_name}: {name} (valid {obj_name}s: {valid_names})")
raise ValueError(f"unknown {self.obj_name}: {name} (valid {self.obj_name}s: {valid_names})")

def register(self, name, value):
"""Register value under the given name."""
Expand Down
Loading

0 comments on commit bb45229

Please sign in to comment.