Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Framework for QoI optimisation #251

Draft
wants to merge 8 commits into
base: main
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
188 changes: 188 additions & 0 deletions goalie/optimisation.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,188 @@
"""
Module for handling PDE-constrained optimisation.
"""

import abc

import numpy as np

from .log import log
from .utility import AttrDict

__all__ = ["OptimisationProgress", "QoIOptimiser"]


class OptimisationProgress(AttrDict):
"""
Class for stashing progress of an optimisation routine.
"""

def __init__(self):
self["qoi"] = []
self["control"] = []
self["gradient"] = []
self["hessian"] = []


class QoIOptimiser_Base(abc.ABC):
"""
Base class for handling PDE-constrained optimisation.
"""

def __init__(self, forward_run, mesh, control, params):
"""
:arg forward_run: a Python function that implements the forward model and computes
the objective functional
:type forward_run: :class:`~.Callable`
:arg mesh: the initial mesh
:type mesh: :class:`firedrake.mesh.MeshGeometry`
:arg control: the initial control value
:type control: :class:`~.Control`
:kwarg params: Class holding parameters for optimisation routine
:type params: :class:`~.OptimisationParameters`
"""
# TODO: Use Goalie Solver rather than forward_run
self.forward_run = forward_run
self.mesh = mesh
self.control = control
self.params = params

def line_search(self, P, J, dJ):
"""
Apply a backtracking line search method to update the step length (i.e., learning
rate).

:arg P: the current descent direction
:type P: :class:`firedrake.function.Function`
:arg J: the current value of objective function
:type J: :class:`~.AdjFloat`
:arg dJ: the current gradient value
:type dJ: :class:`firedrake.function.Function`
"""

lr = self.params.lr
if not self.params.line_search:
return lr
alpha = self.params.ls_rtol
tau = self.params.ls_frac
maxiter = self.params.ls_maxiter

# Compute initial slope
initial_slope = np.dot(dJ.dat.data, P.dat.data)
if np.isclose(initial_slope, 0.0):
return self.params.lr

# Perform line search
log(f" Applying line search with alpha = {alpha} and tau = {tau}")
ext = ""
for i in range(maxiter):
log(f" {i:3d}: lr = {lr:.4e}{ext}")
u_plus = self.control + lr * P
# TODO: Use Goalie Solver rather than forward_run
J_plus, u_plus = self.forward_run(self.mesh, u_plus)
ext = f" diff {J_plus - J:.4e}"

# Check Armijo rule:
if J_plus - J <= alpha * lr * initial_slope:
break
lr *= tau
if lr < self.params.lr_min:
lr = self.params.lr_min
break
else:
raise Exception("Line search did not converge")
log(f" converged lr = {lr:.4e}")
self.lr = lr

@abc.abstractmethod
def step(self):
"""
Take a step with the chosen optimisation approach.

This method should be implemented in the subclass.
"""
pass

def minimise(self):
raise NotImplementedError # TODO: Upstream implementation from opt_adapt


class QoIOptimiser_GradientDescent(QoIOptimiser_Base):
"""
Class for handling PDE-constrained optimisation using the gradient descent approach.
"""

order = 1
method_type = "gradient-based"

def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
raise NotImplementedError # TODO: Upstream gradient descent implementation


class QoIOptimiser_Adam(QoIOptimiser_Base):
"""
Class for handling PDE-constrained optimisation using the Adam approach.
"""

order = 1
method_type = "gradient-based"

def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
raise NotImplementedError # TODO: Upstream Adam implementation


class QoIOptimiser_Newton(QoIOptimiser_Base):
"""
Class for handling PDE-constrained optimisation using the Newton approach.
"""

order = 2
method_type = "newton"

def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
raise NotImplementedError # TODO: Upstream Newton implementation


class QoIOptimiser_BFGS(QoIOptimiser_Base):
"""
Class for handling PDE-constrained optimisation using the BFGS approach.
"""

order = 2
method_type = "quasi-newton"

def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
raise NotImplementedError # TODO: Upstream BFGS implementation


class QoIOptimiser_LBFGS(QoIOptimiser_Base):
"""
Class for handling PDE-constrained optimisation using the L-BFGS approach.
"""

order = 2
method_type = "quasi-newton"

def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
raise NotImplementedError # TODO: Upstream L-BFGS implementation


def QoIOptimiser(method="gradient_descent"):
"""
Factory method for constructing handlers for PDE-constrained optimisation.
"""
try:
return {
"gradient_descent": QoIOptimiser_GradientDescent,
"adam": QoIOptimiser_Adam,
"newton": QoIOptimiser_Newton,
"bfgs": QoIOptimiser_BFGS,
"lbfgs": QoIOptimiser_LBFGS,
}[method]
except KeyError as ke:
raise ValueError(f"Method {method} not supported.") from ke
105 changes: 84 additions & 21 deletions goalie/options.py
Original file line number Diff line number Diff line change
@@ -1,30 +1,26 @@
"""
Classes for holding parameters associated with adaptation and optimisation methods.
"""

import abc

from .utility import AttrDict

__all__ = [
"AdaptParameters",
"GoalOrientedAdaptParameters",
]
__all__ = ["AdaptParameters", "GoalOrientedAdaptParameters", "OptimisationParameters"]


class AdaptParameters(AttrDict):
class Parameters(AttrDict, metaclass=abc.ABCMeta):
"""
A class for holding parameters associated with adaptive mesh fixed point iteration
loops.
Base class for parameter holders.
"""

def __init__(self, parameters=None):
@abc.abstractmethod
def __init__(self, parameters):
"""
:arg parameters: parameters to set
:kwarg parameters: parameters to set
:type parameters: :class:`dict` with :class:`str` keys and values which may take
various types
"""
parameters = parameters or {}

self["miniter"] = 3 # Minimum iteration count
self["maxiter"] = 35 # Maximum iteration count
self["element_rtol"] = 0.001 # Relative tolerance for element count
self["drop_out_converged"] = False # Drop out converged subintervals?

if not isinstance(parameters, dict):
raise TypeError(
"Expected 'parameters' keyword argument to be a dictionary, not of"
Expand All @@ -36,10 +32,6 @@ def __init__(self, parameters=None):
f"{self.__class__.__name__} does not have '{key}' attribute."
)
super().__init__(parameters)
self._check_type("miniter", int)
self._check_type("maxiter", int)
self._check_type("element_rtol", (float, int))
self._check_type("drop_out_converged", bool)

def _check_type(self, key, expected):
"""
Expand Down Expand Up @@ -83,6 +75,32 @@ def __repr__(self):
return f"{type(self).__name__}({d})"


class AdaptParameters(Parameters):
"""
A class for holding parameters associated with adaptive mesh fixed point iteration
loops.
"""

def __init__(self, parameters=None):
"""
:kwarg parameters: parameters to set
:type parameters: :class:`dict` with :class:`str` keys and values which may take
various types
"""
parameters = parameters or {}

self["miniter"] = 3 # Minimum iteration count
self["maxiter"] = 35 # Maximum iteration count
self["element_rtol"] = 0.001 # Relative tolerance for element count
self["drop_out_converged"] = False # Drop out converged subintervals?

super().__init__(parameters)
self._check_type("miniter", int)
self._check_type("maxiter", int)
self._check_type("element_rtol", (float, int))
self._check_type("drop_out_converged", bool)


class GoalOrientedAdaptParameters(AdaptParameters):
"""
A class for holding parameters associated with
Expand All @@ -92,7 +110,7 @@ class GoalOrientedAdaptParameters(AdaptParameters):

def __init__(self, parameters=None):
"""
:arg parameters: parameters to set
:kwarg parameters: parameters to set
:type parameters: :class:`dict` with :class:`str` keys and values which may take
various types
"""
Expand All @@ -108,3 +126,48 @@ def __init__(self, parameters=None):
self._check_type("estimator_rtol", (float, int))
self._check_type("convergence_criteria", str)
self._check_value("convergence_criteria", ["all", "any"])


class OptimisationParameters(Parameters):
"""
A class for holding parameters associated with PDE-constrained optimisation.
"""

def __init__(self, parameters=None):
"""
:kwarg parameters: parameters to set
:type parameters: :class:`dict` with :class:`str` keys and values which may take
various types
"""
parameters = parameters or {}

self["R_space"] = False # Is the control variable defined in R-space?

# Parameters for step length and line search
self["lr"] = 0.001 # Learning rate / step length
# TODO: Create separate class for line search parameters
self["line_search"] = True # Toggle whether line search should be used
self["lr_min"] = 1.0e-08 # Minimum learning rate
self["ls_rtol"] = 0.1 # Relative tolerance for line search
self["ls_frac"] = 0.5 # Fraction to reduce the step by in line search
self["ls_maxiter"] = 100 # Maximum iteration count for line search

# Parameters for optimisation routine
self["maxiter"] = 35 # Maximum iteration count
self["gtol"] = 1.0e-05 # Relative tolerance for gradient
self["gtol_loose"] = 1.0e-05 # TODO: Explanation
self["dtol"] = 1.1 # Divergence tolerance

super().__init__(parameters=parameters)

self._check_type("R_space", bool)
self._check_type("lr", (float, int))
self._check_type("lr_min", (float, int))
self._check_type("line_search", bool)
self._check_type("ls_rtol", (float, int))
self._check_type("ls_frac", (float, int))
self._check_type("ls_maxiter", int)
self._check_type("maxiter", int)
self._check_type("gtol", (float, int))
self._check_type("gtol_loose", (float, int))
self._check_type("dtol", (float, int))
Loading
Loading