Skip to content

Commit

Permalink
Updating search space (#156)
Browse files Browse the repository at this point in the history
* Updating search space

* fix typo

* Bug fix

* Fixing buggy implementation of predict when using gpu

bug fixes

fixing code style checks

bug fix for use_pynisher in the base pipeline

bug fix
  • Loading branch information
ArlindKadra authored and ravinkohli committed Mar 9, 2022
1 parent 0ea47d3 commit f8640fb
Show file tree
Hide file tree
Showing 18 changed files with 175 additions and 128 deletions.
4 changes: 3 additions & 1 deletion autoPyTorch/pipeline/base_pipeline.py
Original file line number Diff line number Diff line change
Expand Up @@ -500,7 +500,7 @@ def get_fit_requirements(self) -> List[FitRequirement]:
Returns:
List[NamedTuple]: List of FitRequirements
"""
fit_requirements = list() # List[FitRequirement]
fit_requirements: List[FitRequirement] = list()
for name, step in self.steps:
step_requirements = step.get_fit_requirements()
if step_requirements:
Expand Down Expand Up @@ -569,6 +569,7 @@ def get_pipeline_representation(self) -> Dict[str, str]:

@staticmethod
def get_default_pipeline_options() -> Dict[str, Any]:

return {
'num_run': 0,
'device': 'cpu',
Expand All @@ -578,5 +579,6 @@ def get_default_pipeline_options() -> Dict[str, Any]:
'torch_num_threads': 1,
'early_stopping': 10,
'use_tensorboard_logger': True,
'use_pynisher': False,
'metrics_during_training': True
}
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from typing import Any, Dict, Optional, Tuple, Union
from typing import Any, Dict, Optional, Union

from ConfigSpace.configuration_space import ConfigurationSpace
from ConfigSpace.hyperparameters import UniformIntegerHyperparameter
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
from typing import Any, Dict, Optional, Tuple, Union
from typing import Any, Dict, Optional, Union

from ConfigSpace.configuration_space import ConfigurationSpace
from ConfigSpace.hyperparameters import (
CategoricalHyperparameter,
Expand Down
5 changes: 3 additions & 2 deletions autoPyTorch/pipeline/components/setup/network/base_network.py
Original file line number Diff line number Diff line change
Expand Up @@ -128,6 +128,7 @@ def predict(self, loader: torch.utils.data.DataLoader) -> torch.Tensor:
return Y_snapshot_preds_tensor.mean(dim=0).cpu().numpy()

def _predict(self, network: torch.nn.Module, loader: torch.utils.data.DataLoader) -> torch.Tensor:
network.to(self.device)
network.float()
network.eval()
# Batch prediction
Expand All @@ -136,10 +137,10 @@ def _predict(self, network: torch.nn.Module, loader: torch.utils.data.DataLoader
for i, (X_batch, Y_batch) in enumerate(loader):
# Predict on batch
X_batch = X_batch.float().to(self.device)
Y_batch_pred = network(X_batch).detach().cpu()
Y_batch_pred = network(X_batch)
if self.final_activation is not None:
Y_batch_pred = self.final_activation(Y_batch_pred)
Y_batch_preds.append(Y_batch_pred)
Y_batch_preds.append(Y_batch_pred.detach().cpu())

return torch.cat(Y_batch_preds, 0)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -89,13 +89,13 @@ def get_hyperparameter_search_space(
num_units: HyperparameterSearchSpace = HyperparameterSearchSpace(hyperparameter="num_units",
value_range=(10, 1024),
default_value=200,
log=True
),
dropout: HyperparameterSearchSpace = HyperparameterSearchSpace(hyperparameter="dropout",
value_range=(0, 0.8),
default_value=0.5,
),
) -> ConfigurationSpace:

cs = ConfigurationSpace()

# The number of hidden layers the network will have.
Expand All @@ -116,6 +116,7 @@ def get_hyperparameter_search_space(
default_value=num_units.default_value,
log=num_units.log)
n_units_hp = get_hyperparameter(n_units_search_space, UniformIntegerHyperparameter)

cs.add_hyperparameter(n_units_hp)

if i > int(min_mlp_layers):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -113,12 +113,14 @@ def get_hyperparameter_search_space(
default_value=True,
),
multi_branch_choice: HyperparameterSearchSpace = HyperparameterSearchSpace(hyperparameter="mb_choice",
value_range=('None', 'shake-shake', 'shake-drop'),
value_range=('None', 'shake-shake',
'shake-drop'),
default_value='shake-drop',
),
num_units: HyperparameterSearchSpace = HyperparameterSearchSpace(hyperparameter="num_units",
value_range=(10, 1024),
default_value=200,
log=True
),
activation: HyperparameterSearchSpace = HyperparameterSearchSpace(hyperparameter="activation",
value_range=tuple(_activations.keys()),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -96,11 +96,11 @@ def get_hyperparameter_search_space(
max_units: HyperparameterSearchSpace = HyperparameterSearchSpace(hyperparameter="max_units",
value_range=(10, 1024),
default_value=200,
),
log=True),
output_dim: HyperparameterSearchSpace = HyperparameterSearchSpace(hyperparameter="output_dim",
value_range=(10, 1024),
default_value=200,
),
log=True),
mlp_shape: HyperparameterSearchSpace = HyperparameterSearchSpace(hyperparameter="mlp_shape",
value_range=('funnel', 'long_funnel',
'diamond', 'hexagon',
Expand All @@ -114,7 +114,6 @@ def get_hyperparameter_search_space(
),

) -> ConfigurationSpace:

cs = ConfigurationSpace()

# The number of groups that will compose the resnet. That is,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -98,6 +98,7 @@ def get_hyperparameter_search_space( # type: ignore[override]
output_dim: HyperparameterSearchSpace = HyperparameterSearchSpace(hyperparameter="output_dim",
value_range=(10, 1024),
default_value=200,
log=True
),
num_groups: HyperparameterSearchSpace = HyperparameterSearchSpace(hyperparameter="num_groups",
value_range=(1, 15),
Expand All @@ -116,12 +117,15 @@ def get_hyperparameter_search_space( # type: ignore[override]
default_value=True,
),
multi_branch_choice: HyperparameterSearchSpace = HyperparameterSearchSpace(hyperparameter="mb_choice",
value_range=('None', 'shake-shake', 'shake-drop'),
value_range=('None', 'shake-shake',
'shake-drop'),
default_value='shake-drop',
),
max_units: HyperparameterSearchSpace = HyperparameterSearchSpace(hyperparameter="max_units",
value_range=(10, 1024),
default_value=200),
default_value=200,
log=True
),
activation: HyperparameterSearchSpace = HyperparameterSearchSpace(hyperparameter="activation",
value_range=tuple(_activations.keys()),
default_value=list(_activations.keys())[0]),
Expand Down Expand Up @@ -154,6 +158,7 @@ def get_hyperparameter_search_space( # type: ignore[override]

use_dropout = get_hyperparameter(use_dropout, CategoricalHyperparameter)
max_dropout = get_hyperparameter(max_dropout, UniformFloatHyperparameter)
cs.add_hyperparameters([use_dropout, max_dropout])
cs.add_condition(CS.EqualsCondition(max_dropout, use_dropout, True))

use_sc = get_hyperparameter(use_skip_connection, CategoricalHyperparameter)
Expand Down
11 changes: 6 additions & 5 deletions autoPyTorch/pipeline/components/setup/optimizer/AdamOptimizer.py
Original file line number Diff line number Diff line change
Expand Up @@ -93,12 +93,13 @@ def get_hyperparameter_search_space(
value_range=(0.9, 0.9999),
default_value=0.9),
use_weight_decay: HyperparameterSearchSpace = HyperparameterSearchSpace(hyperparameter="use_weight_decay",
value_range=(True, False),
default_value=True,
),
value_range=(True, False),
default_value=True,
),
weight_decay: HyperparameterSearchSpace = HyperparameterSearchSpace(hyperparameter="weight_decay",
value_range=(0.0, 0.1),
default_value=0.0),
value_range=(1E-7, 0.1),
default_value=1E-4,
log=True),
) -> ConfigurationSpace:
cs = ConfigurationSpace()

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -97,8 +97,9 @@ def get_hyperparameter_search_space(
default_value=True,
),
weight_decay: HyperparameterSearchSpace = HyperparameterSearchSpace(hyperparameter="weight_decay",
value_range=(0.0, 0.1),
default_value=0.0),
value_range=(1E-7, 0.1),
default_value=1E-4,
log=True),
) -> ConfigurationSpace:
cs = ConfigurationSpace()

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -97,8 +97,9 @@ def get_hyperparameter_search_space(
default_value=True,
),
weight_decay: HyperparameterSearchSpace = HyperparameterSearchSpace(hyperparameter="weight_decay",
value_range=(0.0, 0.1),
default_value=0.0),
value_range=(1E-7, 0.1),
default_value=1E-4,
log=True),
momentum: HyperparameterSearchSpace = HyperparameterSearchSpace(hyperparameter="momentum",
value_range=(0.0, 0.99),
default_value=0.0),
Expand All @@ -109,7 +110,6 @@ def get_hyperparameter_search_space(
add_hyperparameter(cs, lr, UniformFloatHyperparameter)
add_hyperparameter(cs, alpha, UniformFloatHyperparameter)
add_hyperparameter(cs, momentum, UniformFloatHyperparameter)

weight_decay = get_hyperparameter(weight_decay, UniformFloatHyperparameter)
use_weight_decay = get_hyperparameter(use_weight_decay, CategoricalHyperparameter)
cs.add_hyperparameters([use_weight_decay, weight_decay])
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -88,8 +88,9 @@ def get_hyperparameter_search_space(
default_value=True,
),
weight_decay: HyperparameterSearchSpace = HyperparameterSearchSpace(hyperparameter="weight_decay",
value_range=(0.0, 0.1),
default_value=0.0),
value_range=(1E-7, 0.1),
default_value=1E-4,
log=True),
momentum: HyperparameterSearchSpace = HyperparameterSearchSpace(hyperparameter="momentum",
value_range=(0.0, 0.99),
default_value=0.0),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -264,10 +264,12 @@ def get_hyperparameter_search_space(
dataset_properties: Optional[Dict[str, BaseDatasetPropertiesType]] = None,
batch_size: HyperparameterSearchSpace = HyperparameterSearchSpace(hyperparameter="batch_size",
value_range=(32, 320),
default_value=64)
default_value=64,
log=True)
) -> ConfigurationSpace:
cs = ConfigurationSpace()
add_hyperparameter(cs, batch_size, UniformIntegerHyperparameter)

return cs

def __str__(self) -> str:
Expand Down
Loading

0 comments on commit f8640fb

Please sign in to comment.