Skip to content

Commit

Permalink
refactor gp intialization
Browse files Browse the repository at this point in the history
  • Loading branch information
ronpandolfi committed Aug 2, 2023
1 parent 63e74f5 commit 8724b13
Show file tree
Hide file tree
Showing 2 changed files with 8 additions and 20 deletions.
14 changes: 1 addition & 13 deletions tsuchinoko/adaptive/fvgp_gpCAM_in_process.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,22 +33,10 @@ def init_optimizer(self):
parameter_bounds = np.asarray([[self.parameters[('bounds', f'axis_{i}_{edge}')]
for edge in ['min', 'max']]
for i in range(self.dimensionality)])
hyperparameters = np.asarray([self.parameters[('hyperparameters', f'hyperparameter_{i}')]
for i in range(self.num_hyperparameters)])

self.optimizer = fvGPOptimizer(self.dimensionality, self.output_dim, self.output_number, parameter_bounds)

if self.initial_x_data is not None and self.initial_y_data is not None:
variance_kwargs = {}
if self.initial_v_data is not None:
variance_kwargs['variances'] = self.initial_v_data
self.optimizer.tell(self.initial_x_data, self.initial_y_data, **variance_kwargs)

opts = self.gp_opts.copy()
# TODO: only fallback to numpy when packaged as an app
if sys.platform == 'darwin':
opts['compute_device'] = 'numpy'

def init_gp(self, hyperparameters, **opts):
self.optimizer.init_fvgp(hyperparameters, **opts)

def _set_hyperparameter(self, parameter, value):
Expand Down
14 changes: 7 additions & 7 deletions tsuchinoko/adaptive/gpCAM_in_process.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,6 @@ def __init__(self, dimensionality, parameter_bounds, hyperparameters, hyperparam
self.gp_opts = gp_opts or {}
self.ask_opts = ask_opts or {}
self.num_hyperparameters = len(hyperparameters)
self._needs_init = True
if acquisition_functions:
prepend_update_acquisition_functions(acquisition_functions)

Expand Down Expand Up @@ -74,8 +73,6 @@ def init_optimizer(self):

self.optimizer = GPOptimizer(self.dimensionality, parameter_bounds)

self._needs_init = True

def reset(self):
self._completed_training = {'global': set(),
'local': set()}
Expand Down Expand Up @@ -127,15 +124,18 @@ def update_measurements(self, data: Data):
scores = data.scores.copy()
variances = data.variances.copy()
self.optimizer.tell(np.asarray(positions), np.asarray(scores), np.asarray(variances))
if self._needs_init:
self._needs_init = False
if not self.optimizer.gp_initialized:
hyperparameters = np.asarray([self.parameters[('hyperparameters', f'hyperparameter_{i}')]
for i in range(self.num_hyperparameters)])
opts = self.gp_opts.copy()
# TODO: only fallback to numpy when packaged as an app
if sys.platform == 'darwin':
opts['compute_device'] = 'numpy'
self.optimizer.init_gp(hyperparameters, **opts)

self.init_gp(hyperparameters, **opts)

def init_gp(self, hyperparameters, **opts):
self.optimizer.init_gp(hyperparameters, **opts)

def update_metrics(self, data: Data):
for graph in self.graphs:
Expand All @@ -151,7 +151,7 @@ def request_targets(self, position):
n = self.parameters['n']

# If the GP is not initialized, generate random targets
if self._needs_init:
if not self.optimizer.gp_initialized:
return [[np.random.uniform(bounds[i][0], bounds[i][1]) for i in range(self.dimensionality)] for j in range(n)]
else:
kwargs = {key: self.parameters[key] for key in ['acquisition_function', 'method', 'pop_size', 'tol']}
Expand Down

0 comments on commit 8724b13

Please sign in to comment.