Skip to content

Commit

Permalink
Retire parameter normalisation and add output of the parameters in th…
Browse files Browse the repository at this point in the history
…e Pareto front
  • Loading branch information
ruicoelhopedro committed Apr 18, 2024
1 parent 1643d2a commit 34823af
Show file tree
Hide file tree
Showing 14 changed files with 169 additions and 170 deletions.
2 changes: 1 addition & 1 deletion piglot/bin/piglot.py
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,7 @@ def main(config_path: str = None):
)
# Re-run the best case
if 'skip_last_run' not in config and best_params is not None:
objective(parameters.normalise(best_params))
objective(best_params)


if __name__ == '__main__':
Expand Down
3 changes: 2 additions & 1 deletion piglot/bin/piglot_plot.py
Original file line number Diff line number Diff line change
Expand Up @@ -344,7 +344,8 @@ def plot_pareto(args):
# Separate the dominated points
dominated = []
nondominated = []
pareto = pd.read_table(os.path.join(config["output"], 'pareto_front')).to_numpy()
pareto_data = pd.read_table(os.path.join(config["output"], 'pareto_front')).to_numpy()
pareto = pareto_data[:, :objective.num_objectives]
for i, point in enumerate(total_points):
if np.isclose(point, pareto).all(axis=1).any():
nondominated.append((point, variances[i, :] if has_variance else None))
Expand Down
4 changes: 2 additions & 2 deletions piglot/objective.py
Original file line number Diff line number Diff line change
Expand Up @@ -360,8 +360,8 @@ def __call__(self, values: np.ndarray, concurrent: bool = False) -> ObjectiveRes
file.write(f'{value:>15.8e}\t{var:>15.8e}\t')
else:
file.write(f'{objective_result.scalarise(self.composition):>15.8e}\t')
for i, param in enumerate(self.parameters):
file.write(f"{param.denormalise(values[i]):>15.6f}\t")
for val in values:
file.write(f"{val:>15.6f}\t")
file.write(f'{self.parameters.hash(values)}\n')
return objective_result

Expand Down
4 changes: 2 additions & 2 deletions piglot/objectives/analytical.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ def __init__(self, parameters: ParameterSet, expression: str, output_dir: str =
)
# Generate a dummy set of parameters (to ensure proper handling of output parameters)
values = np.array([parameter.inital_value for parameter in parameters])
symbs = sympy.symbols(list(parameters.to_dict(values, input_normalised=False).keys()))
symbs = sympy.symbols(list(parameters.to_dict(values).keys()))
self.parameters = parameters
self.expression = sympy.lambdify(symbs, expression)

Expand Down Expand Up @@ -57,7 +57,7 @@ def _objective_denorm(self, values: np.ndarray) -> float:
float
Objective value.
"""
return self.expression(**self.parameters.to_dict(values, input_normalised=False))
return self.expression(**self.parameters.to_dict(values))

def _plot_1d(self, values: np.ndarray, append_title: str) -> Figure:
"""Plot the objective in 1D.
Expand Down
2 changes: 1 addition & 1 deletion piglot/objectives/synthetic.py
Original file line number Diff line number Diff line change
Expand Up @@ -110,7 +110,7 @@ def _objective(self, values: np.ndarray, concurrent: bool = False) -> ObjectiveR
ObjectiveResult
Objective value.
"""
params = torch.tensor(self.parameters.denormalise(values))
params = torch.from_numpy(values)
value = self.func.evaluate_true(params)
if self.composition is not None:
value -= self.func.optimal_value
Expand Down
94 changes: 77 additions & 17 deletions piglot/optimiser.py
Original file line number Diff line number Diff line change
Expand Up @@ -255,9 +255,8 @@ def optimise(
self.iters_no_improv = 0
# Build initial shot and bounds
n_dim = len(self.parameters)
init_shot = np.array([par.normalise(par.inital_value) for par in self.parameters])
bounds = np.ones((n_dim, 2))
bounds[:, 0] = -1
init_shot = np.array([par.inital_value for par in self.parameters])
bounds = np.array([[par.lbound, par.ubound] for par in self.parameters])
# Build best solution
self.best_value = np.nan
self.best_solution = None
Expand All @@ -278,10 +277,7 @@ def optimise(
self.begin_time = time.perf_counter()
self._optimise(n_dim, n_iter, bounds, init_shot)
elapsed = time.perf_counter() - self.begin_time
# Denormalise best solution (if any)
new_solution = None
if self.best_solution is not None:
new_solution = np.array(self.parameters.denormalise(self.best_solution))
# Output progress
if verbose:
self.pbar.close()
print(f'Completed {self.i_iter} iterations in {pretty_time(elapsed)}')
Expand All @@ -290,9 +286,9 @@ def optimise(
print('Best parameters')
max_width = max(len(par.name) for par in self.parameters)
for i, par in enumerate(self.parameters):
print(f'- {par.name.rjust(max_width)}: {new_solution[i]:>12.6f}')
print(f'- {par.name.rjust(max_width)}: {self.best_solution[i]:>12.6f}')
# Return the best value
return self.best_value, new_solution
return self.best_value, self.best_solution

@abstractmethod
def _optimise(
Expand Down Expand Up @@ -346,9 +342,6 @@ def __update_progress_files(
"""
elapsed = time.perf_counter() - self.begin_time
skip_pars = curr_solution is None
if not skip_pars:
denorm_curr = self.parameters.denormalise(curr_solution)
denorm_best = self.parameters.denormalise(self.best_solution)
# Update progress file
with open(os.path.join(self.output_dir, "progress"), 'w', encoding='utf8') as file:
file.write(f'Iteration: {i_iter}\n')
Expand All @@ -359,7 +352,7 @@ def __update_progress_files(
if not skip_pars:
file.write('Best parameters:\n')
for i, par in enumerate(self.parameters):
file.write(f'\t{par.name}: {denorm_best[i]}\n')
file.write(f'\t{par.name}: {self.best_solution[i]}\n')
file.write(f'\nElapsed time: {pretty_time(elapsed)}\n')
# Update history file
with open(os.path.join(self.output_dir, "history"), 'a', encoding='utf8') as file:
Expand All @@ -368,7 +361,7 @@ def __update_progress_files(
file.write(f'{self.best_value:>15.8e}\t')
file.write(f'{curr_value:>15.8e}\t')
for i, par in enumerate(self.parameters):
file.write('None\t'.rjust(16) if skip_pars else f'{denorm_curr[i]:>15.8f}\t')
file.write('None\t'.rjust(16) if skip_pars else f'{curr_solution[i]:>15.8f}\t')
file.write(f"\t{'-' if extra_info is None else extra_info}")
file.write('\n')

Expand Down Expand Up @@ -426,6 +419,10 @@ def _progress_check(
class ScalarOptimiser(Optimiser):
"""Base class for scalar optimisers."""

def __init__(self, name: str, objective: Objective) -> None:
super().__init__(name, objective)
self.bounds = None

def _validate_problem(self, objective: Objective) -> None:
"""Validate the combination of optimiser and objective.
Expand Down Expand Up @@ -479,6 +476,36 @@ def _scalar_optimise(
Observed optimum of the objective.
"""

def _norm_params(self, params: np.ndarray) -> np.ndarray:
"""Normalise the parameters.
Parameters
----------
params : np.ndarray
Denormalised parameters.
Returns
-------
np.ndarray
Normalised parameters.
"""
return 2.0 * (params - self.bounds[:, 0]) / (self.bounds[:, 1] - self.bounds[:, 0]) - 1.0

def _denorm_params(self, params: np.ndarray) -> np.ndarray:
"""Denormalise the parameters.
Parameters
----------
params : np.ndarray
Normalised parameters.
Returns
-------
np.ndarray
Denormalised parameters.
"""
return self.bounds[:, 0] + (1.0 + params) * (self.bounds[:, 1] - self.bounds[:, 0]) / 2.0

def _optimise(
self,
n_dim: int,
Expand Down Expand Up @@ -509,11 +536,44 @@ def _optimise(
np.ndarray
Observed optimum of the objective.
"""
self.bounds = bound
# Optimise the scalarised objective
return self._scalar_optimise(
lambda x, concurrent=False: self.objective(x, concurrent=concurrent).scalarise(),
lambda x, concurrent=False: self.objective(
self._denorm_params(x),
concurrent=concurrent
).scalarise(),
n_dim,
n_iter,
bound,
init_shot,
np.array([[-1.0, 1.0]]).repeat(n_dim, axis=0),
self._norm_params(init_shot),
)

def _progress_check(
self,
i_iter: int,
curr_value: float,
curr_solution: np.ndarray,
extra_info: str = None,
) -> bool:
"""
Report the optimiser progress and check for termination (with parameter denormalisation).
Parameters
----------
i_iter : int
Current iteration number.
curr_value : float
Current objective value.
curr_solution : np.ndarray
Current objective minimiser.
extra_info : str
Additional information to pass to user.
Returns
-------
bool
Whether any of the stopping criteria is satisfied.
"""
denorm_solution = None if curr_solution is None else self._denorm_params(curr_solution)
super()._progress_check(i_iter, curr_value, denorm_solution, extra_info)
19 changes: 13 additions & 6 deletions piglot/optimisers/botorch/bayes.py
Original file line number Diff line number Diff line change
Expand Up @@ -312,14 +312,21 @@ def _update_mo_data(self, dataset: BayesDataset) -> float:
self.partitioning = FastNondominatedPartitioning(self.ref_point, Y=y_points)
hypervolume = self.partitioning.compute_hypervolume().item()
pareto = self.partitioning.pareto_Y
# Map each Pareto point to the original parameter space
param_indices = [
torch.argmin((y_points - pareto[i, :]).norm(dim=1)).item()
for i in range(pareto.shape[0])
]
# Dump the Pareto front to a file
with open(os.path.join(self.output_dir, "pareto_front"), 'w', encoding='utf8') as file:
file.write('\t'.join(
[f'{"Objective_" + str(i + 1):>15}' for i in range(pareto.shape[1])]) + '\n'
)
for point in pareto:
file.write('\t'.join([f'{-x.item():>15.8f}' for x in point]) + '\n')
# TODO: after updating the parameter set, write the parameters and hash for each point
# Write header
num_obj = pareto.shape[1]
file.write('\t'.join([f'{"Objective_" + str(i + 1):>15}' for i in range(num_obj)]))
file.write('\t' + '\t'.join([f'{param.name:>15}' for param in self.parameters]) + '\n')
# Write each point
for i, idx in enumerate(param_indices):
file.write('\t'.join([f'{-x.item():>15.8f}' for x in pareto[i, :]]) + '\t')
file.write('\t'.join([f'{x.item():>15.8f}' for x in dataset.params[idx, :]]) + '\n')
return -np.log(hypervolume)

def _acq_func(
Expand Down
Loading

0 comments on commit 34823af

Please sign in to comment.