Skip to content

Commit

Permalink
refactor: rename neps.XParameter to neps.X (#149)
Browse files Browse the repository at this point in the history
  • Loading branch information
gopaljigaur authored Oct 18, 2024
2 parents 58ee368 + efd2ec0 commit 26724bc
Show file tree
Hide file tree
Showing 54 changed files with 619 additions and 328 deletions.
9 changes: 4 additions & 5 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@ import logging

# 1. Define a function that accepts hyperparameters and computes the validation error
def run_pipeline(
hyperparameter_a: float, hyperparameter_b: int, architecture_parameter: str
hyperparameter_a: float, hyperparameter_b: int, architecture_parameter: str
) -> dict:
# Create your model
model = MyModel(architecture_parameter)
Expand All @@ -74,14 +74,13 @@ def run_pipeline(

# 2. Define a search space of parameters; use the same parameter names as in run_pipeline
pipeline_space = dict(
hyperparameter_a=neps.FloatParameter(
hyperparameter_a=neps.Float(
lower=0.001, upper=0.1, log=True # The search space is sampled in log space
),
hyperparameter_b=neps.IntegerParameter(lower=1, upper=42),
architecture_parameter=neps.CategoricalParameter(["option_a", "option_b"]),
hyperparameter_b=neps.Integer(lower=1, upper=42),
architecture_parameter=neps.Categorical(["option_a", "option_b"]),
)


# 3. Run the NePS optimization
logging.basicConfig(level=logging.INFO)
neps.run(
Expand Down
6 changes: 3 additions & 3 deletions docs/doc_yamls/architecture_search_space.py
Original file line number Diff line number Diff line change
Expand Up @@ -86,12 +86,12 @@ def set_recursive_attribute(op_name, predecessor_values):


pipeline_space = dict(
architecture=neps.ArchitectureParameter(
architecture=neps.Architecture(
set_recursive_attribute=set_recursive_attribute,
structure=structure,
primitives=primitives,
),
optimizer=neps.CategoricalParameter(choices=["sgd", "adam"]),
learning_rate=neps.FloatParameter(lower=10e-7, upper=10e-3, log=True),
optimizer=neps.Categorical(choices=["sgd", "adam"]),
learning_rate=neps.Float(lower=10e-7, upper=10e-3, log=True),
)

17 changes: 9 additions & 8 deletions docs/getting_started.md
Original file line number Diff line number Diff line change
Expand Up @@ -35,10 +35,11 @@ In code, the usage pattern can look like this:
import neps
import logging

def run_pipeline( # (1)!
hyperparameter_a: float,
hyperparameter_b: int,
architecture_parameter: str,

def run_pipeline( # (1)!
hyperparameter_a: float,
hyperparameter_b: int,
architecture_parameter: str,
) -> dict:
# insert here your own model
model = MyModel(architecture_parameter)
Expand All @@ -49,7 +50,7 @@ def run_pipeline( # (1)!
)

return {
"loss": validation_error, #! (2)
"loss": validation_error, # ! (2)
"info_dict": {
"training_error": training_error
# + Other metrics
Expand All @@ -58,9 +59,9 @@ def run_pipeline( # (1)!


pipeline_space = { # (3)!
"hyperparameter_b":neps.IntegerParameter(1, 42, is_fidelity=True), #! (4)
"hyperparameter_a":neps.FloatParameter(1e-3, 1e-1, log=True) #! (5)
"architecture_parameter": neps.CategoricalParameter(["option_a", "option_b", "option_c"]),
"hyperparameter_b": neps.Integer(1, 42, is_fidelity=True), # ! (4)
"hyperparameter_a": neps.Float(1e-3, 1e-1, log=True) # ! (5)
"architecture_parameter": neps.Categorical(["option_a", "option_b", "option_c"]),
}

if __name__ == "__main__":
Expand Down
9 changes: 4 additions & 5 deletions docs/index.md
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,7 @@ import logging

# 1. Define a function that accepts hyperparameters and computes the validation error
def run_pipeline(
hyperparameter_a: float, hyperparameter_b: int, architecture_parameter: str
hyperparameter_a: float, hyperparameter_b: int, architecture_parameter: str
) -> dict:
# Create your model
model = MyModel(architecture_parameter)
Expand All @@ -80,14 +80,13 @@ def run_pipeline(

# 2. Define a search space of parameters; use the same parameter names as in run_pipeline
pipeline_space = dict(
hyperparameter_a=neps.FloatParameter(
hyperparameter_a=neps.Float(
lower=0.001, upper=0.1, log=True # The search space is sampled in log space
),
hyperparameter_b=neps.IntegerParameter(lower=1, upper=42),
architecture_parameter=neps.CategoricalParameter(["option_a", "option_b"]),
hyperparameter_b=neps.Integer(lower=1, upper=42),
architecture_parameter=neps.Categorical(["option_a", "option_b"]),
)


# 3. Run the NePS optimization
logging.basicConfig(level=logging.INFO)
neps.run(
Expand Down
42 changes: 21 additions & 21 deletions docs/reference/pipeline_space.md
Original file line number Diff line number Diff line change
Expand Up @@ -12,10 +12,10 @@ effectively incorporate various parameter types, ensuring that NePS can utilize
## Parameters
NePS currently features 4 primary hyperparameter types:

* [`CategoricalParameter`][neps.search_spaces.hyperparameters.categorical.CategoricalParameter]
* [`FloatParameter`][neps.search_spaces.hyperparameters.float.FloatParameter]
* [`IntegerParameter`][neps.search_spaces.hyperparameters.integer.IntegerParameter]
* [`ConstantParameter`][neps.search_spaces.hyperparameters.constant.ConstantParameter]
* [`Categorical`][neps.search_spaces.hyperparameters.categorical.Categorical]
* [`Float`][neps.search_spaces.hyperparameters.float.Float]
* [`Integer`][neps.search_spaces.hyperparameters.integer.Integer]
* [`Constant`][neps.search_spaces.hyperparameters.constant.Constant]

Using these types, you can define the parameters that NePS will optimize during the search process.
The most basic way to pass these parameters is through a Python dictionary, where each key-value
Expand All @@ -25,32 +25,32 @@ for optimizing a deep learning model:

```python
pipeline_space = {
"learning_rate": neps.FloatParameter(0.00001, 0.1, log=True),
"num_epochs": neps.IntegerParameter(3, 30, is_fidelity=True),
"optimizer": neps.CategoricalParameter(["adam", "sgd", "rmsprop"]),
"dropout_rate": neps.ConstantParameter(0.5),
"learning_rate": neps.Float(0.00001, 0.1, log=True),
"num_epochs": neps.Integer(3, 30, is_fidelity=True),
"optimizer": neps.Categorical(["adam", "sgd", "rmsprop"]),
"dropout_rate": neps.Constant(0.5),
}

neps.run(.., pipeline_space=pipeline_space)
neps.run(.., pipeline_space = pipeline_space)
```

??? example "Quick Parameter Reference"

=== "`CategoricalParameter`"
=== "`Categorical`"

::: neps.search_spaces.hyperparameters.categorical.CategoricalParameter
::: neps.search_spaces.hyperparameters.categorical.Categorical

=== "`FloatParameter`"
=== "`Float`"

::: neps.search_spaces.hyperparameters.float.FloatParameter
::: neps.search_spaces.hyperparameters.float.Float

=== "`IntegerParameter`"
=== "`Integer`"

::: neps.search_spaces.hyperparameters.integer.IntegerParameter
::: neps.search_spaces.hyperparameters.integer.Integer

=== "`ConstantParameter`"
=== "`Constant`"

::: neps.search_spaces.hyperparameters.constant.ConstantParameter
::: neps.search_spaces.hyperparameters.constant.Constant


## Using your knowledge, providing a Prior
Expand All @@ -70,10 +70,10 @@ import neps
neps.run(
...,
pipeline_space={
"learning_rate": neps.FloatParameter(1e-4, 1e-1, log=True, default=1e-2, default_confidence="medium"),
"num_epochs": neps.IntegerParameter(3, 30, is_fidelity=True),
"optimizer": neps.CategoricalParameter(["adam", "sgd", "rmsprop"], default="adam", default_confidence="low"),
"dropout_rate": neps.ConstantParameter(0.5),
"learning_rate": neps.Float(1e-4, 1e-1, log=True, default=1e-2, default_confidence="medium"),
"num_epochs": neps.Integer(3, 30, is_fidelity=True),
"optimizer": neps.Categorical(["adam", "sgd", "rmsprop"], default="adam", default_confidence="low"),
"dropout_rate": neps.Constant(0.5),
}
)
```
Expand Down
13 changes: 7 additions & 6 deletions neps/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,28 +2,29 @@
from neps.plot.plot import plot
from neps.plot.tensorboard_eval import tblogger
from neps.search_spaces import (
Architecture,
ArchitectureParameter,
Categorical,
CategoricalParameter,
Constant,
ConstantParameter,
Float,
FloatParameter,
Function,
FunctionParameter,
GraphGrammar,
Integer,
IntegerParameter,
)
from neps.status.status import get_summary_dict, status

Integer = IntegerParameter
Float = FloatParameter
Categorical = CategoricalParameter
Constant = ConstantParameter
Architecture = ArchitectureParameter

__all__ = [
"Architecture",
"Integer",
"Float",
"Categorical",
"Constant",
"Function",
"ArchitectureParameter",
"CategoricalParameter",
"ConstantParameter",
Expand Down
2 changes: 1 addition & 1 deletion neps/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -115,7 +115,7 @@ def run(
>>> validation_error = -some_parameter
>>> return validation_error
>>> pipeline_space = dict(some_parameter=neps.FloatParameter(lower=0, upper=1))
>>> pipeline_space = dict(some_parameter=neps.Float(lower=0, upper=1))
>>> logging.basicConfig(level=logging.INFO)
>>> neps.run(
Expand Down
22 changes: 11 additions & 11 deletions neps/optimizers/grid_search/optimizer.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,10 +11,10 @@
from neps.optimizers.base_optimizer import BaseOptimizer, SampledConfig
from neps.search_spaces.architecture.graph_grammar import GraphParameter
from neps.search_spaces.domain import UNIT_FLOAT_DOMAIN
from neps.search_spaces.hyperparameters.categorical import CategoricalParameter
from neps.search_spaces.hyperparameters.constant import ConstantParameter
from neps.search_spaces.hyperparameters.float import FloatParameter
from neps.search_spaces.hyperparameters.integer import IntegerParameter
from neps.search_spaces.hyperparameters.categorical import Categorical
from neps.search_spaces.hyperparameters.constant import Constant
from neps.search_spaces.hyperparameters.float import Float
from neps.search_spaces.hyperparameters.integer import Integer

if TYPE_CHECKING:
from neps.search_spaces.search_space import SearchSpace
Expand All @@ -29,16 +29,16 @@ def _make_grid(
) -> list[dict[str, Any]]:
"""Get a grid of configurations from the search space.
For [`NumericalParameter`][neps.search_spaces.NumericalParameter] hyperparameters,
For [`Numerical`][neps.search_spaces.Numerical] hyperparameters,
the parameter `size_per_numerical_hp=` is used to determine a grid. If there are
any duplicates, e.g. for an
[`IntegerParameter`][neps.search_spaces.IntegerParameter], then we will
[`Integer`][neps.search_spaces.Integer], then we will
remove duplicates.
For [`CategoricalParameter`][neps.search_spaces.CategoricalParameter]
For [`Categorical`][neps.search_spaces.Categorical]
hyperparameters, we include all the choices in the grid.
For [`ConstantParameter`][neps.search_spaces.ConstantParameter] hyperparameters,
For [`Constant`][neps.search_spaces.Constant] hyperparameters,
we include the constant value in the grid.
!!! note "TODO"
Expand All @@ -65,11 +65,11 @@ def _make_grid(
# If this is resolved, please update the docstring!
case GraphParameter():
raise ValueError("Trying to create a grid for graphs!")
case CategoricalParameter():
case Categorical():
param_ranges[name] = list(hp.choices)
case ConstantParameter():
case Constant():
param_ranges[name] = [hp.value]
case IntegerParameter() | FloatParameter():
case Integer() | Float():
if hp.is_fidelity:
param_ranges[name] = [hp.upper]
continue
Expand Down
4 changes: 2 additions & 2 deletions neps/optimizers/multi_fidelity/ifbo.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@
from neps.sampling.samplers import Sampler
from neps.search_spaces.domain import Domain
from neps.search_spaces.encoding import CategoricalToUnitNorm, ConfigEncoder
from neps.search_spaces.search_space import FloatParameter, IntegerParameter, SearchSpace
from neps.search_spaces.search_space import Float, Integer, SearchSpace

if TYPE_CHECKING:
from neps.state.optimizer import BudgetInfo
Expand Down Expand Up @@ -48,7 +48,7 @@ def _adjust_pipeline_space_to_match_stepsize(
fidelity = pipeline_space.fidelity
fidelity_name = pipeline_space.fidelity_name
assert fidelity_name is not None
assert isinstance(fidelity, FloatParameter | IntegerParameter)
assert isinstance(fidelity, Float | Integer)
if fidelity.log:
raise NotImplementedError("Log fidelity not yet supported")

Expand Down
22 changes: 10 additions & 12 deletions neps/optimizers/multi_fidelity/successive_halving.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,10 +20,10 @@
RandomUniformPolicy,
)
from neps.search_spaces import (
CategoricalParameter,
ConstantParameter,
FloatParameter,
IntegerParameter,
Categorical,
Constant,
Float,
Integer,
SearchSpace,
)

Expand All @@ -34,12 +34,10 @@

logger = logging.getLogger(__name__)

CUSTOM_FLOAT_CONFIDENCE_SCORES = dict(FloatParameter.DEFAULT_CONFIDENCE_SCORES)
CUSTOM_FLOAT_CONFIDENCE_SCORES = dict(Float.DEFAULT_CONFIDENCE_SCORES)
CUSTOM_FLOAT_CONFIDENCE_SCORES.update({"ultra": 0.05})

CUSTOM_CATEGORICAL_CONFIDENCE_SCORES = dict(
CategoricalParameter.DEFAULT_CONFIDENCE_SCORES
)
CUSTOM_CATEGORICAL_CONFIDENCE_SCORES = dict(Categorical.DEFAULT_CONFIDENCE_SCORES)
CUSTOM_CATEGORICAL_CONFIDENCE_SCORES.update({"ultra": 8})


Expand Down Expand Up @@ -187,7 +185,7 @@ def _get_rung_map(self, s: int = 0) -> dict:
for i in reversed(range(nrungs)):
rung_map[i + s] = (
int(_max_budget)
if isinstance(self.pipeline_space.fidelity, IntegerParameter)
if isinstance(self.pipeline_space.fidelity, Integer)
else _max_budget
)
_max_budget /= self.eta
Expand Down Expand Up @@ -465,15 +463,15 @@ def _enhance_priors(self, confidence_score: dict[str, float] | None = None) -> N
return

for k, v in self.pipeline_space.items():
if v.is_fidelity or isinstance(v, ConstantParameter):
if v.is_fidelity or isinstance(v, Constant):
continue
if isinstance(v, FloatParameter | IntegerParameter):
if isinstance(v, Float | Integer):
if confidence_score is None:
confidence = CUSTOM_FLOAT_CONFIDENCE_SCORES[self.prior_confidence]
else:
confidence = confidence_score["numeric"]
self.pipeline_space[k].default_confidence_score = confidence
elif isinstance(v, CategoricalParameter):
elif isinstance(v, Categorical):
if confidence_score is None:
confidence = CUSTOM_CATEGORICAL_CONFIDENCE_SCORES[
self.prior_confidence
Expand Down
Loading

0 comments on commit 26724bc

Please sign in to comment.