Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

ML test cases: new version #642

Merged
merged 13 commits into from
May 6, 2020
22 changes: 22 additions & 0 deletions nevergrad/benchmark/experiments.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@
from nevergrad.functions import ArtificialFunction
from nevergrad.functions import FarOptimumFunction
from nevergrad.functions import MultiobjectiveFunction
from nevergrad.functions.ml import MLTuning
from nevergrad.functions import mlda as _mlda
from nevergrad.functions.photonics import Photonics
from nevergrad.functions.arcoating import ARCoating
Expand All @@ -33,6 +34,27 @@
# fmt: off


@registry.register
def mltuning(seed: tp.Optional[int] = None) -> tp.Iterator[Experiment]:
seedg = create_seed_generator(seed)
# Continuous case

# First, a few functions with constraints.
functions = [
MLTuning("1d_decision_tree_regression")
]
optims = ["Shiwa", "DE", "DiscreteOnePlusOne", "PortfolioDiscreteOnePlusOne", "CMA", "MetaRecentering",
"DoubleFastGADiscreteOnePlusOne"]
for budget in [50, 150, 500]:
for num_workers in [1, 10, 50, 100]:
for optim in optims:
for function in functions:
xp = Experiment(function, optim, num_workers=num_workers,
budget=budget, seed=next(seedg))
if not xp.is_incoherent:
yield xp


# pylint:disable=too-many-branches
@registry.register
def yawidebbob(seed: tp.Optional[int] = None) -> tp.Iterator[Experiment]:
Expand Down
6 changes: 6 additions & 0 deletions nevergrad/functions/ml/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.

from .mlfunctionlib import MLTuning as MLTuning
69 changes: 69 additions & 0 deletions nevergrad/functions/ml/mlfunctionlib.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,69 @@
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.

#import hashlib
#import itertools
import typing as tp
import numpy as np
from functools import partial

from nevergrad.parametrization import parameter as p
from nevergrad.common import tools
from nevergrad.common.typetools import ArrayLike
from sklearn.tree import DecisionTreeRegressor # type: ignore

from ..base import ExperimentFunction
from .. import utils
from .. import corefuncs



class MLTuning(ExperimentFunction):
"""Class for generating ML hyperparameter tuning problems.
"""

# Example of ML problem.
def _decision_tree_parametrization(self, depth: int, noise_free: bool):
# 10-folds cross-validation
num_data: int = 80
result: float = 0.
for cv in range(10):
# All data.
X_all = np.arange(0., 1., 1. / num_data)

# Training set.
X = X_all[np.arange(num_data) % 10 != cv]
X = X.reshape(-1, 1)
y = np.sin(X).ravel()

# Validation set or test set (noise_free is True for test set).
X_test = X_all[np.arange(num_data) % 10 == cv]
X_test = X_test.reshape(-1, 1)

if noise_free:
X_test = np.arange(0., 1., 1000000).reshape(-1, 1)
y_test = np.sin(X_test).ravel()

assert isinstance(depth, int), f"depth has class {type(depth)} and value {depth}."

# Fit regression model
regr = DecisionTreeRegressor(max_depth=depth)
regr.fit(np.asarray(X), np.asarray(y))

# Predict
pred_test = regr.predict(X_test)
result += np.sum((y_test - pred_test)**2)
return result / num_data

def __init__(self, problem_type: str):
self.problem_type = problem_type

if problem_type == "1d_decision_tree_regression":
parametrization = p.Instrumentation(depth=p.Scalar(lower=1, upper=1200).set_integer_casting())
super().__init__(partial(self._decision_tree_parametrization, noise_free=False), parametrization)
self.evaluation_function = partial(self._decision_tree_parametrization, noise_free=True) # type: ignore
else:
assert False, f"Problem type {problem_type} undefined!"
self.register_initialization(problem_type=problem_type)
23 changes: 23 additions & 0 deletions nevergrad/functions/ml/test_mlfunctionlib.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.

from typing import Any, Dict
import numpy as np
import pytest
from nevergrad.common import testing
from nevergrad.parametrization import parameter as p
from . import mlfunctionlib


def test_ml_tuning() -> None:
func = mlfunctionlib.MLTuning("1d_decision_tree_regression")
x: int = 3
y1 = func(x) # returns a float
y2 = func(x) # returns the same float
np.testing.assert_array_almost_equal(y1, y2)
y3 = func.evaluation_function(x) # returns a float
y4 = func.evaluation_function(x) # returns the same float
np.testing.assert_array_almost_equal(y3, y4) # should be equal

2 changes: 1 addition & 1 deletion nevergrad/functions/test_functionlib.py
Original file line number Diff line number Diff line change
Expand Up @@ -85,7 +85,7 @@ def test_oracle() -> None:
np.testing.assert_raises(AssertionError, np.testing.assert_array_almost_equal, y1, y2)
y3 = func.evaluation_function(x) # returns a float
y4 = func.evaluation_function(x) # returns the same float (no noise for oracles + sphere function is deterministic)
np.testing.assert_array_almost_equal(y3, y4) # should be different
np.testing.assert_array_almost_equal(y3, y4) # should be equal


def test_function_transform() -> None:
Expand Down