Skip to content

Commit

Permalink
Add custom param support
Browse files Browse the repository at this point in the history
  • Loading branch information
evhub committed Jun 30, 2019
1 parent d14e8a0 commit e8e1580
Show file tree
Hide file tree
Showing 13 changed files with 152 additions and 123 deletions.
7 changes: 7 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -116,6 +116,7 @@ Some examples of BBopt in action:
1. [`lognormvariate`](#lognormvariate)
1. [`rand`](#rand)
1. [`randn`](#randn)
1. [`param`](#param)
1. [Writing Your Own Backend](#writing-your-own-backend)

<!-- /MarkdownTOC -->
Expand Down Expand Up @@ -407,6 +408,12 @@ Create a new parameter modeled by [`numpy.random.randn(*shape)`](https://docs.sc

_Backends which support **randn**: `hyperopt`, `random`._

#### `param`

BlackBoxOptimizer.**param**(_name_, _func_, *_args_, **_kwargs_)

Create a new parameter modeled by the parameter definition function _func_ with the given arguments. This function is mostly useful if you want to use a custom backend that implements parameter definition functions not included in BBopt by default.

### Writing Your Own Backend

BBopt's backend system is built to be extremely extensible, allowing anyone to write and register their own BBopt backends. The basic template for writing a BBopt backend is as follows:
Expand Down
2 changes: 1 addition & 1 deletion bbopt-source/__init__.coco
Original file line number Diff line number Diff line change
Expand Up @@ -16,4 +16,4 @@ limitations under the License.
"""

from bbopt.backends import * # register backends
from bbopt.optimizer import BlackBoxOptimizer # make optimizer available
from bbopt.optimizer import * # make optimizer available
7 changes: 6 additions & 1 deletion bbopt-source/backends/util.coco
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,7 @@ def split_examples(
match {"values": values, "loss": loss, **_}:
pass
else:
raise ValueError("invalid example {}".format(example))
raise ValueError(f"invalid example {example}")

# extract features
features = make_features(values, params, fallback_func, converters, convert_fallback) |> list
Expand Down Expand Up @@ -222,3 +222,8 @@ class Backend:
"""Register an alias for this backend."""
assert cls.backend_name is not None, "Backend subclasses using Backend.register_alias must set backend_name on the class"
backend_registry.register_alias(cls.backend_name, alias)

@staticmethod
def register_param_func(func_name, handler, placeholder_generator):
"""Register a new parameter definition function. See bbopt.params for examples."""
param_processor.register(func_name, handler, placeholder_generator)
2 changes: 1 addition & 1 deletion bbopt-source/constants.coco
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ Constants for use across all of BBopt.

# Installation constants:
name = "bbopt"
version = "1.1.5"
version = "1.1.6"
description = "The easiest hyperparameter optimization you'll ever do."
long_description = """
See BBopt's GitHub_ for more information.
Expand Down
91 changes: 46 additions & 45 deletions bbopt-source/optimizer.coco
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,19 @@ from bbopt.backends.serving import ServingBackend
from bbopt.backends.skopt import SkoptBackend


def array_param(func, name, shape, kwargs):
"""Create a new array parameter for the given name and shape with entries from func."""
if not isinstance(name, Str):
raise TypeError(f"name must be string, not {name}")
arr = np.zeros(shape)
for indices in itertools.product(*map(range, shape)):
index_str = ",".join(map(str, indices))
cell_name = f"{name}[{index_str}]"
proc_kwargs = kwargs |> param_processor.modify_kwargs$(-> _[indices])
arr[indices] = func(cell_name, **proc_kwargs)
return arr


class BlackBoxOptimizer:
"""Main bbopt optimizer object. See https://github.com/evhub/bbopt for documentation."""
backend = None
Expand Down Expand Up @@ -93,27 +106,6 @@ class BlackBoxOptimizer:
"""Whether we have seen a maximize/minimize call yet."""
"loss" in self._current_example or "gain" in self._current_example

def _param(self, name, func, *args, **kwargs):
"""Create a black box parameter and return its value."""
if self._got_reward:
raise ValueError("all parameter definitions must come before maximize/minimize")
if not isinstance(name, Str):
raise TypeError(f"name must be a string, not {name}")
if name in self._new_params:
raise ValueError(f"parameter of name {name} already exists")

args = param_processor.standardize_args(func, args)
kwargs = param_processor.standardize_kwargs(kwargs)

match {=name: (old_func, old_args, old_kwargs), **_} in self._old_params:
if (func, args) != (old_func, old_args):
print(f"BBopt Warning: detected change in parameter {name} ({(func, args)} != {(old_func, old_args)}) (you may need to delete your old BBopt data)")

value = self.backend.param(name, func, *args, **kwargs)
self._new_params[name] = (func, args, kwargs)
self._current_example["values"][name] = value
return value

def _set_reward(self, reward_type, value):
"""Set the gain or loss to the given value."""
if self._got_reward:
Expand Down Expand Up @@ -186,19 +178,28 @@ class BlackBoxOptimizer:
self._skopt_backend = SkoptBackend(*skopt_backend_args)
return self._skopt_backend

def _array_param(self, func, name, shape, kwargs):
"""Create a new array parameter for the given name and shape with entries from func."""
# External API:

def param(self, name, func, *args, **kwargs):
"""Create a black box parameter and return its value."""
if self._got_reward:
raise ValueError("all parameter definitions must come before maximize/minimize")
if not isinstance(name, Str):
raise TypeError(f"name must be string, not {name}")
arr = np.zeros(shape)
for indices in itertools.product(*map(range, shape)):
index_str = ",".join(map(str, indices))
cell_name = f"{name}[{index_str}]"
proc_kwargs = kwargs |> param_processor.modify_kwargs$(-> _[indices])
arr[indices] = func(cell_name, **proc_kwargs)
return arr
raise TypeError(f"name must be a string, not {name}")
if name in self._new_params:
raise ValueError(f"parameter of name {name} already exists")

# External API:
args = param_processor.standardize_args(func, args)
kwargs = param_processor.standardize_kwargs(kwargs)

match {=name: (old_func, old_args, old_kwargs), **_} in self._old_params:
if (func, args) != (old_func, old_args):
print(f"BBopt Warning: detected change in parameter {name} ({(func, args)} != {(old_func, old_args)}) (you may need to delete your old BBopt data)")

value = self.backend.param(name, func, *args, **kwargs)
self._new_params[name] = (func, args, kwargs)
self._current_example["values"][name] = value
return value

def reload(self):
"""Completely reload the optimizer."""
Expand Down Expand Up @@ -400,47 +401,47 @@ class BlackBoxOptimizer:

def randrange(self, name, *args, **kwargs):
"""Create a new parameter with the given name modeled by random.randrange(*args)."""
return self._param(name, "randrange", *args, **kwargs)
return self.param(name, "randrange", *args, **kwargs)

def choice(self, name, seq, **kwargs):
"""Create a new parameter with the given name modeled by random.choice(seq)."""
return self._param(name, "choice", seq, **kwargs)
return self.param(name, "choice", seq, **kwargs)

def uniform(self, name, a, b, **kwargs):
"""Create a new parameter with the given name modeled by random.uniform(a, b)."""
return self._param(name, "uniform", a, b, **kwargs)
return self.param(name, "uniform", a, b, **kwargs)

def triangular(self, name, low, high, mode, **kwargs):
"""Create a new parameter with the given name modeled by random.triangular(low, high, mode)."""
return self._param(name, "triangular", low, high, mode, **kwargs)
return self.param(name, "triangular", low, high, mode, **kwargs)

def betavariate(self, name, alpha, beta, **kwargs):
"""Create a new parameter with the given name modeled by random.betavariate(alpha, beta)."""
return self._param(name, "betavariate", alpha, beta, **kwargs)
return self.param(name, "betavariate", alpha, beta, **kwargs)

def expovariate(self, name, lambd, **kwargs):
"""Create a new parameter with the given name modeled by random.expovariate(lambd)."""
return self._param(name, "expovariate", lambd, **kwargs)
return self.param(name, "expovariate", lambd, **kwargs)

def gammavariate(self, name, alpha, beta, **kwargs):
"""Create a new parameter with the given name modeled by random.gammavariate(alpha, beta)."""
return self._param(name, "gammavariate", alpha, beta, **kwargs)
return self.param(name, "gammavariate", alpha, beta, **kwargs)

def normalvariate(self, name, mu, sigma, **kwargs):
"""Create a new parameter with the given name modeled by random.gauss(mu, sigma)."""
return self._param(name, "normalvariate", mu, sigma, **kwargs)
return self.param(name, "normalvariate", mu, sigma, **kwargs)

def vonmisesvariate(self, name, kappa, **kwargs):
"""Create a new parameter with the given name modeled by random.vonmisesvariate(kappa)."""
return self._param(name, "vonmisesvariate", kappa, **kwargs)
return self.param(name, "vonmisesvariate", kappa, **kwargs)

def paretovariate(self, name, alpha, **kwargs):
"""Create a new parameter with the given name modeled by random.paretovariate(alpha)."""
return self._param(name, "paretovariate", alpha, **kwargs)
return self.param(name, "paretovariate", alpha, **kwargs)

def weibullvariate(self, name, alpha, beta, **kwargs):
"""Create a new parameter with the given name modeled by random.weibullvariate(alpha, beta)."""
return self._param(name, "weibullvariate", alpha, beta, **kwargs)
return self.param(name, "weibullvariate", alpha, beta, **kwargs)

# Derived random functions:

Expand Down Expand Up @@ -504,8 +505,8 @@ class BlackBoxOptimizer:

def rand(self, name, *shape, **kwargs):
"""Create a new array parameter for the given name and shape modeled by np.random.rand."""
return self._array_param(self.random, name, shape, kwargs)
return array_param(self.random, name, shape, kwargs)

def randn(self, name, *shape, **kwargs):
"""Create a new array parameter for the given name and shape modeled by np.random.randn."""
return self._array_param(self.normalvariate$(?, 0, 1), name, shape, kwargs)
return array_param(self.normalvariate$(?, 0, 1), name, shape, kwargs)
9 changes: 7 additions & 2 deletions bbopt-source/params.coco
Original file line number Diff line number Diff line change
Expand Up @@ -160,6 +160,11 @@ class ParamProcessor:
"weibullvariate": placeholder_weibullvariate,
}

def register(func_name, handler, placeholder_generator):
"""Register a new parameter definition function. See bbopt.params for examples."""
self.handlers[func_name] = handler
self.placeholder_funcs[func_name] = placeholder_generator

def modify_kwargs(self, func, kwargs):
"""Apply func to all kwargs with values in the random function's domain."""
new_kwargs = {}
Expand All @@ -180,7 +185,7 @@ class ParamProcessor:

# detect invalid funcs
if func not in self.handlers:
raise ValueError(f"unknown parameter definition function {func}")
raise ValueError(f"unknown parameter definition function {func} (register with bbopt.params.param_processor.register)")

# run handler
result = self.handlers[func](args)
Expand All @@ -195,7 +200,7 @@ class ParamProcessor:
def choose_default_placeholder(self, name, func, *args, **kwargs):
"""Choose a default placeholder_when_missing value for the given parameter."""
if func not in self.placeholder_funcs:
raise ValueError(f"unknown parameter definition function {func}")
raise ValueError(f"unknown parameter definition function {func} (register with bbopt.params.param_processor.register)")
return self.placeholder_funcs[func](*args)


Expand Down
8 changes: 4 additions & 4 deletions bbopt-source/util.coco
Original file line number Diff line number Diff line change
Expand Up @@ -73,7 +73,7 @@ def json_serialize(obj):
for k, v in obj.items():
serialized_k = json_serialize(k)
if not isinstance(serialized_k, str):
raise TypeError("dict keys must be strings, not {}".format(k))
raise TypeError(f"dict keys must be strings, not {k}")
serialized_dict[k] = json_serialize(v)
return serialized_dict
if isinstance(obj, Iterable):
Expand All @@ -82,8 +82,8 @@ def json_serialize(obj):
serialized_list.append(json_serialize(x))
return serialized_list
if isnumpy(obj):
return denumpy(obj, fallback=(def -> raise TypeError("cannot JSON serialize numpy dtype {}".format(obj.dtype))))
raise TypeError("cannot JSON serialize {}".format(obj))
return denumpy(obj, fallback=(def -> raise TypeError(f"cannot JSON serialize numpy dtype {obj.dtype}")))
raise TypeError(f"cannot JSON serialize {obj}")


def sorted_items(params) =
Expand All @@ -93,7 +93,7 @@ def sorted_items(params) =

def sorted_examples(examples) =
"""Sort examples by their timestamp."""
sorted(examples, key=ex -> ex["timestamp"])
sorted(examples, key=.["timestamp"])


def running_best(examples):
Expand Down
4 changes: 2 additions & 2 deletions bbopt/__init__.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# __coconut_hash__ = 0x75c86a86
# __coconut_hash__ = 0x754305b2

# Compiled with Coconut version 1.4.0-post_dev40 [Ernest Scribbler]

Expand Down Expand Up @@ -40,4 +40,4 @@


from bbopt.backends import * # register backends
from bbopt.optimizer import BlackBoxOptimizer # make optimizer available
from bbopt.optimizer import * # make optimizer available
9 changes: 7 additions & 2 deletions bbopt/backends/util.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# __coconut_hash__ = 0x189fad0
# __coconut_hash__ = 0x80ca3ec7

# Compiled with Coconut version 1.4.0-post_dev40 [Ernest Scribbler]

Expand Down Expand Up @@ -120,7 +120,7 @@ def split_examples(examples, params, fallback_func=param_processor.choose_defaul
if _coconut_case_check_0:
pass
if not _coconut_case_check_0:
raise ValueError("invalid example {}".format(example))
raise ValueError("invalid example {_coconut_format_0}".format(_coconut_format_0=(example)))

# extract features
features = (list)(make_features(values, params, fallback_func, converters, convert_fallback))
Expand Down Expand Up @@ -264,3 +264,8 @@ def register_alias(cls, alias):
"""Register an alias for this backend."""
assert cls.backend_name is not None, "Backend subclasses using Backend.register_alias must set backend_name on the class"
backend_registry.register_alias(cls.backend_name, alias)

@staticmethod
def register_param_func(func_name, handler, placeholder_generator):
"""Register a new parameter definition function. See bbopt.params for examples."""
param_processor.register(func_name, handler, placeholder_generator)
4 changes: 2 additions & 2 deletions bbopt/constants.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# __coconut_hash__ = 0x4564326b
# __coconut_hash__ = 0x30737416

# Compiled with Coconut version 1.4.0-post_dev40 [Ernest Scribbler]

Expand Down Expand Up @@ -29,7 +29,7 @@

# Installation constants:
name = "bbopt"
version = "1.1.5"
version = "1.1.6"
description = "The easiest hyperparameter optimization you'll ever do."
long_description = """
See BBopt's GitHub_ for more information.
Expand Down
Loading

0 comments on commit e8e1580

Please sign in to comment.