Skip to content

Commit

Permalink
refactor: BO and ifBO (#134)
Browse files Browse the repository at this point in the history
  • Loading branch information
eddiebergman authored Oct 8, 2024
1 parent f594f52 commit 5ed2bf3
Show file tree
Hide file tree
Showing 128 changed files with 5,892 additions and 11,383 deletions.
2 changes: 1 addition & 1 deletion .gitignore
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
# Python
#False Python
__pycache__
dist

Expand Down
6 changes: 3 additions & 3 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ repos:
files: '^src/.*\.py$'

- repo: https://github.com/pre-commit/mirrors-mypy
rev: v1.11.1
rev: v1.11.2
hooks:
- id: mypy
files: |
Expand All @@ -42,7 +42,7 @@ repos:
- "--show-traceback"

- repo: https://github.com/python-jsonschema/check-jsonschema
rev: 0.29.1
rev: 0.29.2
hooks:
- id: check-github-workflows
files: '^github/workflows/.*\.ya?ml$'
Expand All @@ -51,7 +51,7 @@ repos:
files: '^\.github/dependabot\.ya?ml$'

- repo: https://github.com/charliermarsh/ruff-pre-commit
rev: v0.5.5
rev: v0.6.5
hooks:
- id: ruff
args: [--fix, --exit-non-zero-on-fix, --no-cache]
Expand Down
5 changes: 0 additions & 5 deletions docs/doc_yamls/customizing_neps_optimizer.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -19,8 +19,3 @@ searcher:
name: "my_bayesian" # optional; changing the searcher_name for better recognition
# Specific arguments depending on the searcher
initial_design_size: 7
surrogate_model: gp
acquisition: EI
acquisition_sampler: random
random_interleave_prob: 0.1

2 changes: 0 additions & 2 deletions docs/doc_yamls/loading_own_optimizer.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -19,5 +19,3 @@ searcher:
name: CustomOptimizer # class name within the file
# Specific arguments depending on your searcher
initial_design_size: 7
surrogate_model: gp
acquisition: EI
8 changes: 1 addition & 7 deletions docs/doc_yamls/set_up_optimizer.yaml
Original file line number Diff line number Diff line change
@@ -1,11 +1,5 @@
strategy: bayesian_optimization
# Specific arguments depending on the searcher
initial_design_size: 7
surrogate_model: gp
acquisition: EI
log_prior_weighted: false
acquisition_sampler: random
random_interleave_prob: 0.1
disable_priors: false
prior_confidence: high
use_priors: true
sample_default_first: false
2 changes: 0 additions & 2 deletions neps/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,8 +8,6 @@
FloatParameter,
FunctionParameter,
GraphGrammar,
GraphGrammarCell,
GraphGrammarRepetitive,
IntegerParameter,
)
from neps.status.status import get_summary_dict, status
Expand Down
8 changes: 1 addition & 7 deletions neps/api.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,5 @@
"""API for the neps package."""



import inspect
import logging
import warnings
Expand Down Expand Up @@ -31,11 +29,7 @@ def run(
run_pipeline: Callable | None = Default(None),
root_directory: str | Path | None = Default(None),
pipeline_space: (
dict[str, Parameter | CS.ConfigurationSpace]
| str
| Path
| CS.ConfigurationSpace
| None
dict[str, Parameter] | str | Path | CS.ConfigurationSpace | None
) = Default(None),
run_args: str | Path | None = Default(None),
overwrite_working_directory: bool = Default(False),
Expand Down
4 changes: 4 additions & 0 deletions neps/exceptions.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,3 +52,7 @@ class WorkerRaiseError(NePSError):
Includes additional information on how to recover
"""


class SurrogateFailedToFitError(NePSError):
"""Raised when a surrogate model fails to fit."""
14 changes: 5 additions & 9 deletions neps/optimizers/__init__.py
Original file line number Diff line number Diff line change
@@ -1,19 +1,17 @@


from collections.abc import Callable, Mapping
from functools import partial
from typing import Callable, Mapping
from typing import TYPE_CHECKING

from .base_optimizer import BaseOptimizer
from .bayesian_optimization.cost_cooling import CostCooling
from .bayesian_optimization.optimizer import BayesianOptimization
from .grid_search.optimizer import GridSearch
from .multi_fidelity.ifbo import IFBO
from .multi_fidelity.hyperband import (
MOBSTER,
AsynchronousHyperband,
Hyperband,
HyperbandCustomDefault,
)
from .multi_fidelity.ifbo import IFBO
from .multi_fidelity.successive_halving import (
AsynchronousSuccessiveHalving,
AsynchronousSuccessiveHalvingWithPriors,
Expand All @@ -27,11 +25,9 @@

# TODO: Rename Searcher to Optimizer...
SearcherMapping: Mapping[str, Callable[..., BaseOptimizer]] = {
"bayesian_optimization": BayesianOptimization,
"pibo": partial(BayesianOptimization, disable_priors=False),
"cost_cooling_bayesian_optimization": CostCooling,
"bayesian_optimization": partial(BayesianOptimization, use_priors=False),
"pibo": partial(BayesianOptimization, use_priors=True),
"random_search": RandomSearch,
"cost_cooling": CostCooling,
"regularized_evolution": RegularizedEvolution,
"assisted_regularized_evolution": partial(RegularizedEvolution, assisted=True),
"grid_search": GridSearch,
Expand Down
53 changes: 27 additions & 26 deletions neps/optimizers/base_optimizer.py
Original file line number Diff line number Diff line change
@@ -1,22 +1,25 @@

from __future__ import annotations

import logging
from abc import abstractmethod
from typing import Any, Mapping

from collections.abc import Mapping
from dataclasses import asdict, dataclass
from neps.state.optimizer import BudgetInfo
from neps.utils.types import ConfigResult, RawConfig, ERROR, ResultDict
from neps.search_spaces.search_space import SearchSpace
from typing import TYPE_CHECKING, Any

from neps.state.trial import Report, Trial
from neps.utils.data_loading import _get_cost, _get_learning_curve, _get_loss
from neps.state.trial import Trial
from neps.utils.types import ERROR, ConfigResult, RawConfig, ResultDict

if TYPE_CHECKING:
from neps.search_spaces.search_space import SearchSpace
from neps.state.optimizer import BudgetInfo


@dataclass
class SampledConfig:
id: Trial.ID
config: Mapping[str, Any]
previous_config_id: Trial.ID | None
previous_config_id: Trial.ID | None = None


class BaseOptimizer:
Expand Down Expand Up @@ -58,7 +61,7 @@ def load_optimization_state(

@abstractmethod
def get_config_and_ids(self) -> tuple[RawConfig, str, str | None]:
"""Sample a new configuration
"""Sample a new configuration.
Returns:
config: serializable object representing the configuration
Expand All @@ -73,8 +76,8 @@ def ask(
trials: Mapping[str, Trial],
budget_info: BudgetInfo | None,
optimizer_state: dict[str, Any],
) -> tuple[SampledConfig, dict[str, Any]]:
"""Sample a new configuration
) -> SampledConfig | tuple[SampledConfig, dict[str, Any]]:
"""Sample a new configuration.
!!! note
Expand Down Expand Up @@ -131,7 +134,7 @@ def ask(
config, config_id, previous_config_id = self.get_config_and_ids()
return SampledConfig(
id=config_id, config=config, previous_config_id=previous_config_id
), optimizer_state
)

def update_state_post_evaluation(
self, state: dict[str, Any], report: Trial.Report
Expand All @@ -141,16 +144,14 @@ def update_state_post_evaluation(
# state["key"] = "value"
return state

def get_loss(
self, result: ERROR | ResultDict | float | Trial.Report
) -> float | ERROR:
def get_loss(self, result: ERROR | ResultDict | float | Report) -> float | ERROR:
"""Calls result.utils.get_loss() and passes the error handling through.
Please use self.get_loss() instead of get_loss() in all optimizer classes."""

Please use self.get_loss() instead of get_loss() in all optimizer classes.
"""
# TODO(eddiebergman): This is a forward change for whenever we can have optimizers
# use `Trial` and `Report`, they already take care of this and save having to do this
# `_get_loss` at every call. We can also then just use `None` instead of the string `"error"`
if isinstance(result, Trial.Report):
if isinstance(result, Report):
return result.loss if result.loss is not None else "error"

return _get_loss(
Expand All @@ -159,15 +160,14 @@ def get_loss(
ignore_errors=self.ignore_errors,
)

def get_cost(
self, result: ERROR | ResultDict | float | Trial.Report
) -> float | ERROR:
def get_cost(self, result: ERROR | ResultDict | float | Report) -> float | ERROR:
"""Calls result.utils.get_cost() and passes the error handling through.
Please use self.get_cost() instead of get_cost() in all optimizer classes."""
Please use self.get_cost() instead of get_cost() in all optimizer classes.
"""
# TODO(eddiebergman): This is a forward change for whenever we can have optimizers
# use `Trial` and `Report`, they already take care of this and save having to do this
# `_get_loss` at every call
if isinstance(result, Trial.Report):
if isinstance(result, Report):
return result.loss if result.loss is not None else "error"

return _get_cost(
Expand All @@ -177,14 +177,15 @@ def get_cost(
)

def get_learning_curve(
self, result: str | dict | float | Trial.Report
self, result: str | dict | float | Report
) -> list[float] | Any:
"""Calls result.utils.get_loss() and passes the error handling through.
Please use self.get_loss() instead of get_loss() in all optimizer classes."""
Please use self.get_loss() instead of get_loss() in all optimizer classes.
"""
# TODO(eddiebergman): This is a forward change for whenever we can have optimizers
# use `Trial` and `Report`, they already take care of this and save having to do this
# `_get_loss` at every call
if isinstance(result, Trial.Report):
if isinstance(result, Report):
return result.learning_curve

return _get_learning_curve(
Expand Down
Original file line number Diff line number Diff line change
@@ -1,19 +1,15 @@


from collections.abc import Callable
from functools import partial
from typing import Callable

from neps.optimizers.bayesian_optimization.acquisition_functions.ei import (
ComprehensiveExpectedImprovement,
)
from neps.optimizers.bayesian_optimization.acquisition_functions.mf_pi import MFPI_Random
from neps.optimizers.bayesian_optimization.acquisition_functions.ucb import (
UpperConfidenceBound,
)
from neps.optimizers.bayesian_optimization.acquisition_functions.prior_weighted import (
DecayingPriorWeightedAcquisition,
)

from neps.optimizers.bayesian_optimization.acquisition_functions.ucb import (
UpperConfidenceBound,
)

AcquisitionMapping: dict[str, Callable] = {
"EI": partial(
Expand All @@ -34,11 +30,6 @@
in_fill="posterior",
augmented_ei=True,
),
"MFPI-random": partial(
MFPI_Random,
threshold="random",
horizon="random",
),
"UCB": partial(
UpperConfidenceBound,
maximize=False,
Expand All @@ -50,5 +41,4 @@
"ComprehensiveExpectedImprovement",
"UpperConfidenceBound",
"DecayingPriorWeightedAcquisition",
"MFPI_Random",
]
Original file line number Diff line number Diff line change
@@ -1,4 +1,6 @@
# from abc import ABC, abstractmethod
from __future__ import annotations

from itertools import product

import torch
Expand Down
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
from __future__ import annotations

from abc import ABC, abstractmethod


Expand Down
Loading

0 comments on commit 5ed2bf3

Please sign in to comment.