From 4b7b6d35682b13d18b7c2ae75e4c6ae2a8660690 Mon Sep 17 00:00:00 2001 From: Joe Wallwork Date: Mon, 9 Dec 2024 09:02:07 +0000 Subject: [PATCH 1/8] Add OptimisationParameters --- goalie/options.py | 55 +++++++++++++++++++++++++++++++++++++++++------ 1 file changed, 49 insertions(+), 6 deletions(-) diff --git a/goalie/options.py b/goalie/options.py index e7e0b4e1..13581437 100644 --- a/goalie/options.py +++ b/goalie/options.py @@ -1,9 +1,6 @@ from .utility import AttrDict -__all__ = [ - "AdaptParameters", - "GoalOrientedAdaptParameters", -] +__all__ = ["AdaptParameters", "GoalOrientedAdaptParameters", "OptimisationParameters"] class AdaptParameters(AttrDict): @@ -14,7 +11,7 @@ class AdaptParameters(AttrDict): def __init__(self, parameters=None): """ - :arg parameters: parameters to set + :kwarg parameters: parameters to set :type parameters: :class:`dict` with :class:`str` keys and values which may take various types """ @@ -92,7 +89,7 @@ class GoalOrientedAdaptParameters(AdaptParameters): def __init__(self, parameters=None): """ - :arg parameters: parameters to set + :kwarg parameters: parameters to set :type parameters: :class:`dict` with :class:`str` keys and values which may take various types """ @@ -108,3 +105,49 @@ def __init__(self, parameters=None): self._check_type("estimator_rtol", (float, int)) self._check_type("convergence_criteria", str) self._check_value("convergence_criteria", ["all", "any"]) + + +class OptimisationParameters(AttrDict): + """ + A class for holding parameters associated with PDE-constrained optimisation. + """ + + def __init__(self, parameters=None): + """ + :kwarg parameters: parameters to set + :type parameters: :class:`dict` with :class:`str` keys and values which may take + various types + """ + parameters = parameters or {} + + self["R_space"] = False # Is the control variable defined in R-space? + self["disp"] = 0 # Level of verbosity + + # Parameters for step length and line search + self["lr"] = 0.001 # Learning rate / step length + self["lr_min"] = 1.0e-08 # Minimum learning rate + self["line_search"] = True # Toggle whether line search should be used + self["ls_rtol"] = 0.1 # Relative tolerance for line search + self["ls_frac"] = 0.5 # Fraction to reduce the step by in line search + self["ls_maxiter"] = 100 # Maximum iteration count for line search + + # Parameters for optimisation routine + self["maxiter"] = 35 # Maximum iteration count + self["gtol"] = 1.0e-05 # Relative tolerance for gradient + self["gtol_loose"] = 1.0e-05 # TODO: Explanation + self["dtol"] = 1.1 # Divergence tolerance + + super().__init__(parameters=parameters) + + self._check_type("Rspace", bool) + self._check_type("disp", int) + self._check_type("lr", (float, int)) + self._check_type("lr_min", (float, int)) + self._check_type("line_search", bool) + self._check_type("ls_rtol", (float, int)) + self._check_type("ls_frac", (float, int)) + self._check_type("ls_maxiter", int) + self._check_type("maxiter", int) + self._check_type("gtol", (float, int)) + self._check_type("gtol_loose", (float, int)) + self._check_type("dtol", (float, int)) From 77206a2519760e312fa3af56210e13758d94595b Mon Sep 17 00:00:00 2001 From: Joe Wallwork Date: Mon, 9 Dec 2024 08:48:03 +0000 Subject: [PATCH 2/8] Base optimisation module on opt_adapt --- goalie/optimisation.py | 123 +++++++++++++++++++++++++++++++++++++++++ 1 file changed, 123 insertions(+) create mode 100644 goalie/optimisation.py diff --git a/goalie/optimisation.py b/goalie/optimisation.py new file mode 100644 index 00000000..d560d56c --- /dev/null +++ b/goalie/optimisation.py @@ -0,0 +1,123 @@ +""" +Module for handling PDE-constrained optimisation. +""" + +import abc + +import numpy as np + +from .utility import AttrDict + +__all__ = ["OptimisationProgress", "QoIOptimiser"] + + +class OptimisationProgress(AttrDict): + """ + Class for stashing progress of an optimisation routine. + """ + + def __init__(self): + self["qoi"] = [] + self["control"] = [] + self["gradient"] = [] + self["hessian"] = [] + + +def dotproduct(f, g): + """ + The dot-product of two variables in the same :class:`~.FunctionSpace`. + """ + return np.dot(f.dat.data, g.dat.data) + + +def line_search(forward_run, m, u, P, J, dJ, params): + """ + Apply a backtracking line search method to compute the step length / learning rate + (lr). + + :arg forward_run: a Python function that implements the forward model and computes + the objective functional + :type forward_run: :class:`~.Callable` + :arg m: the current mesh + :type m: :class:`firedrake.mesh.MeshGeometry` + :arg u: the current control value + :type u: :class:`~.Control` + :arg P: the current descent direction + :type P: :class:`firedrake.function.Function` + :arg J: the current value of objective function + :type J: :class:`~.AdjFloat` + :arg dJ: the current gradient value + :type dJ: :class:`firedrake.function.Function` + :kwarg params: Class holding parameters for optimisation routine + :type params: :class:`~.OptimisationParameters` + """ + + lr = params.lr + if not params.line_search: + return lr + alpha = params.ls_rtol + tau = params.ls_frac + maxiter = params.ls_maxiter + disp = params.disp + + # Compute initial slope + initial_slope = dotproduct(dJ, P) + if np.isclose(initial_slope, 0.0): + return params.lr + + # Perform line search + if disp > 1: + print(f" Applying line search with alpha = {alpha} and tau = {tau}") + ext = "" + for i in range(maxiter): + if disp > 1: + print(f" {i:3d}: lr = {lr:.4e}{ext}") + u_plus = u + lr * P + J_plus, u_plus = forward_run(m, u_plus) + ext = f" diff {J_plus - J:.4e}" + + # Check Armijo rule: + if J_plus - J <= alpha * lr * initial_slope: + break + lr *= tau + if lr < params.lr_min: + lr = params.lr_min + break + else: + raise Exception("Line search did not converge") + if disp > 1: + print(f" converged lr = {lr:.4e}") + return lr + + +class QoIOptimiser_Base(abc.ABC): + """ + Base class for handling PDE-constrained optimisation. + """ + + @abc.abstractmethod + def __init__(self): + pass # TODO + + +class QoIOptimiser_GradientDescent(QoIOptimiser_Base): + """ + Class for handling PDE-constrained optimisation using the gradient descent approach. + """ + + order = 1 + method_type = "gradient-based" + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + raise NotImplementedError # TODO + + +def QoIOptimiser(method="gradient_descent"): + """ + Factory method for constructing handlers for PDE-constrained optimisation. + """ + try: + return {"gradient_descent": QoIOptimiser_GradientDescent}[method] + except KeyError as ke: + raise ValueError(f"Method {method} not supported.") from ke From 50a3cd84d7ee00d1388d4397907e7cb8f286641a Mon Sep 17 00:00:00 2001 From: Joe Wallwork Date: Tue, 10 Dec 2024 07:33:29 +0000 Subject: [PATCH 3/8] Inline dotproduct for now --- goalie/optimisation.py | 9 +-------- 1 file changed, 1 insertion(+), 8 deletions(-) diff --git a/goalie/optimisation.py b/goalie/optimisation.py index d560d56c..632d20af 100644 --- a/goalie/optimisation.py +++ b/goalie/optimisation.py @@ -23,13 +23,6 @@ def __init__(self): self["hessian"] = [] -def dotproduct(f, g): - """ - The dot-product of two variables in the same :class:`~.FunctionSpace`. - """ - return np.dot(f.dat.data, g.dat.data) - - def line_search(forward_run, m, u, P, J, dJ, params): """ Apply a backtracking line search method to compute the step length / learning rate @@ -61,7 +54,7 @@ def line_search(forward_run, m, u, P, J, dJ, params): disp = params.disp # Compute initial slope - initial_slope = dotproduct(dJ, P) + initial_slope = np.dot(dJ.dat.data, P.dat.data) if np.isclose(initial_slope, 0.0): return params.lr From 3a0a71e0805719e97d7164859fd31d3b89d8a3e6 Mon Sep 17 00:00:00 2001 From: Joe Wallwork Date: Tue, 10 Dec 2024 07:33:43 +0000 Subject: [PATCH 4/8] Use log rather than disp/print --- goalie/optimisation.py | 11 ++++------- goalie/options.py | 2 -- 2 files changed, 4 insertions(+), 9 deletions(-) diff --git a/goalie/optimisation.py b/goalie/optimisation.py index 632d20af..ed9b35ac 100644 --- a/goalie/optimisation.py +++ b/goalie/optimisation.py @@ -6,6 +6,7 @@ import numpy as np +from .log import log from .utility import AttrDict __all__ = ["OptimisationProgress", "QoIOptimiser"] @@ -51,7 +52,6 @@ def line_search(forward_run, m, u, P, J, dJ, params): alpha = params.ls_rtol tau = params.ls_frac maxiter = params.ls_maxiter - disp = params.disp # Compute initial slope initial_slope = np.dot(dJ.dat.data, P.dat.data) @@ -59,12 +59,10 @@ def line_search(forward_run, m, u, P, J, dJ, params): return params.lr # Perform line search - if disp > 1: - print(f" Applying line search with alpha = {alpha} and tau = {tau}") + log(f" Applying line search with alpha = {alpha} and tau = {tau}") ext = "" for i in range(maxiter): - if disp > 1: - print(f" {i:3d}: lr = {lr:.4e}{ext}") + log(f" {i:3d}: lr = {lr:.4e}{ext}") u_plus = u + lr * P J_plus, u_plus = forward_run(m, u_plus) ext = f" diff {J_plus - J:.4e}" @@ -78,8 +76,7 @@ def line_search(forward_run, m, u, P, J, dJ, params): break else: raise Exception("Line search did not converge") - if disp > 1: - print(f" converged lr = {lr:.4e}") + log(f" converged lr = {lr:.4e}") return lr diff --git a/goalie/options.py b/goalie/options.py index 13581437..aced5e8d 100644 --- a/goalie/options.py +++ b/goalie/options.py @@ -121,7 +121,6 @@ def __init__(self, parameters=None): parameters = parameters or {} self["R_space"] = False # Is the control variable defined in R-space? - self["disp"] = 0 # Level of verbosity # Parameters for step length and line search self["lr"] = 0.001 # Learning rate / step length @@ -140,7 +139,6 @@ def __init__(self, parameters=None): super().__init__(parameters=parameters) self._check_type("Rspace", bool) - self._check_type("disp", int) self._check_type("lr", (float, int)) self._check_type("lr_min", (float, int)) self._check_type("line_search", bool) From 1b2b0db564e4547f3825e1ee3cccdc2599a87ca2 Mon Sep 17 00:00:00 2001 From: Joe Wallwork Date: Tue, 10 Dec 2024 07:42:58 +0000 Subject: [PATCH 5/8] TODOs for optimisation --- goalie/optimisation.py | 66 ++++++++++++++++++++++++++++++++++++++++-- 1 file changed, 64 insertions(+), 2 deletions(-) diff --git a/goalie/optimisation.py b/goalie/optimisation.py index ed9b35ac..bfba49c9 100644 --- a/goalie/optimisation.py +++ b/goalie/optimisation.py @@ -64,6 +64,7 @@ def line_search(forward_run, m, u, P, J, dJ, params): for i in range(maxiter): log(f" {i:3d}: lr = {lr:.4e}{ext}") u_plus = u + lr * P + # TODO: Use Goalie Solver J_plus, u_plus = forward_run(m, u_plus) ext = f" diff {J_plus - J:.4e}" @@ -89,6 +90,9 @@ class QoIOptimiser_Base(abc.ABC): def __init__(self): pass # TODO + def minimise(self): + raise NotImplementedError # TODO: Upstream implementation from opt_adapt + class QoIOptimiser_GradientDescent(QoIOptimiser_Base): """ @@ -100,7 +104,59 @@ class QoIOptimiser_GradientDescent(QoIOptimiser_Base): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) - raise NotImplementedError # TODO + raise NotImplementedError # TODO: Upstream gradient descent implementation + + +class QoIOptimiser_Adam(QoIOptimiser_Base): + """ + Class for handling PDE-constrained optimisation using the Adam approach. + """ + + order = 1 + method_type = "gradient-based" + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + raise NotImplementedError # TODO: Upstream Adam implementation + + +class QoIOptimiser_Newton(QoIOptimiser_Base): + """ + Class for handling PDE-constrained optimisation using the Newton approach. + """ + + order = 2 + method_type = "newton" + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + raise NotImplementedError # TODO: Upstream Newton implementation + + +class QoIOptimiser_BFGS(QoIOptimiser_Base): + """ + Class for handling PDE-constrained optimisation using the BFGS approach. + """ + + order = 2 + method_type = "quasi-newton" + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + raise NotImplementedError # TODO: Upstream BFGS implementation + + +class QoIOptimiser_LBFGS(QoIOptimiser_Base): + """ + Class for handling PDE-constrained optimisation using the L-BFGS approach. + """ + + order = 2 + method_type = "quasi-newton" + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + raise NotImplementedError # TODO: Upstream L-BFGS implementation def QoIOptimiser(method="gradient_descent"): @@ -108,6 +164,12 @@ def QoIOptimiser(method="gradient_descent"): Factory method for constructing handlers for PDE-constrained optimisation. """ try: - return {"gradient_descent": QoIOptimiser_GradientDescent}[method] + return { + "gradient_descent": QoIOptimiser_GradientDescent, + "adam": QoIOptimiser_Adam, + "newton": QoIOptimiser_Newton, + "bfgs": QoIOptimiser_BFGS, + "lbfgs": QoIOptimiser_LBFGS, + }[method] except KeyError as ke: raise ValueError(f"Method {method} not supported.") from ke From f8ea4123bed7b7c38ad26c24925960e9bc71f0b5 Mon Sep 17 00:00:00 2001 From: Joe Wallwork Date: Tue, 10 Dec 2024 07:56:40 +0000 Subject: [PATCH 6/8] Make line_search a method of QoIOptimiser --- goalie/optimisation.py | 131 ++++++++++++++++++++++------------------- 1 file changed, 72 insertions(+), 59 deletions(-) diff --git a/goalie/optimisation.py b/goalie/optimisation.py index bfba49c9..7deacc1e 100644 --- a/goalie/optimisation.py +++ b/goalie/optimisation.py @@ -24,71 +24,84 @@ def __init__(self): self["hessian"] = [] -def line_search(forward_run, m, u, P, J, dJ, params): - """ - Apply a backtracking line search method to compute the step length / learning rate - (lr). - - :arg forward_run: a Python function that implements the forward model and computes - the objective functional - :type forward_run: :class:`~.Callable` - :arg m: the current mesh - :type m: :class:`firedrake.mesh.MeshGeometry` - :arg u: the current control value - :type u: :class:`~.Control` - :arg P: the current descent direction - :type P: :class:`firedrake.function.Function` - :arg J: the current value of objective function - :type J: :class:`~.AdjFloat` - :arg dJ: the current gradient value - :type dJ: :class:`firedrake.function.Function` - :kwarg params: Class holding parameters for optimisation routine - :type params: :class:`~.OptimisationParameters` - """ - - lr = params.lr - if not params.line_search: - return lr - alpha = params.ls_rtol - tau = params.ls_frac - maxiter = params.ls_maxiter - - # Compute initial slope - initial_slope = np.dot(dJ.dat.data, P.dat.data) - if np.isclose(initial_slope, 0.0): - return params.lr - - # Perform line search - log(f" Applying line search with alpha = {alpha} and tau = {tau}") - ext = "" - for i in range(maxiter): - log(f" {i:3d}: lr = {lr:.4e}{ext}") - u_plus = u + lr * P - # TODO: Use Goalie Solver - J_plus, u_plus = forward_run(m, u_plus) - ext = f" diff {J_plus - J:.4e}" - - # Check Armijo rule: - if J_plus - J <= alpha * lr * initial_slope: - break - lr *= tau - if lr < params.lr_min: - lr = params.lr_min - break - else: - raise Exception("Line search did not converge") - log(f" converged lr = {lr:.4e}") - return lr - - class QoIOptimiser_Base(abc.ABC): """ Base class for handling PDE-constrained optimisation. """ + def __init__(self, forward_run, mesh, control, params): + """ + :arg forward_run: a Python function that implements the forward model and computes + the objective functional + :type forward_run: :class:`~.Callable` + :arg mesh: the initial mesh + :type mesh: :class:`firedrake.mesh.MeshGeometry` + :arg control: the initial control value + :type control: :class:`~.Control` + :kwarg params: Class holding parameters for optimisation routine + :type params: :class:`~.OptimisationParameters` + """ + # TODO: Use Goalie Solver rather than forward_run + self.forward_run = forward_run + self.mesh = mesh + self.control = control + self.params = params + + def line_search(self, P, J, dJ): + """ + Apply a backtracking line search method to update the step length (i.e., learning + rate). + + :arg P: the current descent direction + :type P: :class:`firedrake.function.Function` + :arg J: the current value of objective function + :type J: :class:`~.AdjFloat` + :arg dJ: the current gradient value + :type dJ: :class:`firedrake.function.Function` + """ + + lr = self.params.lr + if not self.params.line_search: + return lr + alpha = self.params.ls_rtol + tau = self.params.ls_frac + maxiter = self.params.ls_maxiter + + # Compute initial slope + initial_slope = np.dot(dJ.dat.data, P.dat.data) + if np.isclose(initial_slope, 0.0): + return self.params.lr + + # Perform line search + log(f" Applying line search with alpha = {alpha} and tau = {tau}") + ext = "" + for i in range(maxiter): + log(f" {i:3d}: lr = {lr:.4e}{ext}") + u_plus = self.control + lr * P + # TODO: Use Goalie Solver rather than forward_run + J_plus, u_plus = self.forward_run(self.mesh, u_plus) + ext = f" diff {J_plus - J:.4e}" + + # Check Armijo rule: + if J_plus - J <= alpha * lr * initial_slope: + break + lr *= tau + if lr < self.params.lr_min: + lr = self.params.lr_min + break + else: + raise Exception("Line search did not converge") + log(f" converged lr = {lr:.4e}") + self.lr = lr + @abc.abstractmethod - def __init__(self): - pass # TODO + def step(self): + """ + Take a step with the chosen optimisation approach. + + This method should be implemented in the subclass. + """ + pass def minimise(self): raise NotImplementedError # TODO: Upstream implementation from opt_adapt From 011959e936ed483f8508f50993cc07852343eff9 Mon Sep 17 00:00:00 2001 From: Joe Wallwork Date: Tue, 10 Dec 2024 08:16:38 +0000 Subject: [PATCH 7/8] Create base class for Parameters --- goalie/options.py | 58 ++++++++++++++++++++++++++++++++--------------- 1 file changed, 40 insertions(+), 18 deletions(-) diff --git a/goalie/options.py b/goalie/options.py index aced5e8d..2d778169 100644 --- a/goalie/options.py +++ b/goalie/options.py @@ -1,27 +1,26 @@ +""" +Classes for holding parameters associated with adaptation and optimisation methods. +""" + +import abc + from .utility import AttrDict __all__ = ["AdaptParameters", "GoalOrientedAdaptParameters", "OptimisationParameters"] -class AdaptParameters(AttrDict): +class Parameters(AttrDict, metaclass=abc.ABCMeta): """ - A class for holding parameters associated with adaptive mesh fixed point iteration - loops. + Base class for parameter holders. """ - def __init__(self, parameters=None): + @abc.abstractmethod + def __init__(self, parameters): """ :kwarg parameters: parameters to set :type parameters: :class:`dict` with :class:`str` keys and values which may take various types """ - parameters = parameters or {} - - self["miniter"] = 3 # Minimum iteration count - self["maxiter"] = 35 # Maximum iteration count - self["element_rtol"] = 0.001 # Relative tolerance for element count - self["drop_out_converged"] = False # Drop out converged subintervals? - if not isinstance(parameters, dict): raise TypeError( "Expected 'parameters' keyword argument to be a dictionary, not of" @@ -33,10 +32,6 @@ def __init__(self, parameters=None): f"{self.__class__.__name__} does not have '{key}' attribute." ) super().__init__(parameters) - self._check_type("miniter", int) - self._check_type("maxiter", int) - self._check_type("element_rtol", (float, int)) - self._check_type("drop_out_converged", bool) def _check_type(self, key, expected): """ @@ -80,6 +75,32 @@ def __repr__(self): return f"{type(self).__name__}({d})" +class AdaptParameters(Parameters): + """ + A class for holding parameters associated with adaptive mesh fixed point iteration + loops. + """ + + def __init__(self, parameters=None): + """ + :kwarg parameters: parameters to set + :type parameters: :class:`dict` with :class:`str` keys and values which may take + various types + """ + parameters = parameters or {} + + self["miniter"] = 3 # Minimum iteration count + self["maxiter"] = 35 # Maximum iteration count + self["element_rtol"] = 0.001 # Relative tolerance for element count + self["drop_out_converged"] = False # Drop out converged subintervals? + + super().__init__(parameters) + self._check_type("miniter", int) + self._check_type("maxiter", int) + self._check_type("element_rtol", (float, int)) + self._check_type("drop_out_converged", bool) + + class GoalOrientedAdaptParameters(AdaptParameters): """ A class for holding parameters associated with @@ -107,7 +128,7 @@ def __init__(self, parameters=None): self._check_value("convergence_criteria", ["all", "any"]) -class OptimisationParameters(AttrDict): +class OptimisationParameters(Parameters): """ A class for holding parameters associated with PDE-constrained optimisation. """ @@ -124,8 +145,9 @@ def __init__(self, parameters=None): # Parameters for step length and line search self["lr"] = 0.001 # Learning rate / step length - self["lr_min"] = 1.0e-08 # Minimum learning rate + # TODO: Create separate class for line search parameters self["line_search"] = True # Toggle whether line search should be used + self["lr_min"] = 1.0e-08 # Minimum learning rate self["ls_rtol"] = 0.1 # Relative tolerance for line search self["ls_frac"] = 0.5 # Fraction to reduce the step by in line search self["ls_maxiter"] = 100 # Maximum iteration count for line search @@ -138,7 +160,7 @@ def __init__(self, parameters=None): super().__init__(parameters=parameters) - self._check_type("Rspace", bool) + self._check_type("R_space", bool) self._check_type("lr", (float, int)) self._check_type("lr_min", (float, int)) self._check_type("line_search", bool) From 38aef67548755fe7c155f650f6d1f917a7a414cf Mon Sep 17 00:00:00 2001 From: Joe Wallwork Date: Tue, 10 Dec 2024 08:16:48 +0000 Subject: [PATCH 8/8] Unit tests for OptimisationParameters --- test/test_options.py | 105 +++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 105 insertions(+) diff --git a/test/test_options.py b/test/test_options.py index 2bdb5462..0ac4d698 100644 --- a/test/test_options.py +++ b/test/test_options.py @@ -3,6 +3,7 @@ from goalie.options import * +# TODO: Subclass the test cases to simplify future extension class TestAdaptParameters(unittest.TestCase): """ Unit tests for the base :class:`AdaptParameters` class. @@ -144,5 +145,109 @@ def test_estimator_rtol_type_error(self): self.assertEqual(str(cm.exception), msg) +class TestOptimisationParameters(unittest.TestCase): + """ + Unit tests for the base :class:`~.OptimisationParameters` class. + """ + + def setUp(self): + self.defaults = { + "R_space": False, + "lr": 0.001, + "line_search": True, + "lr_min": 1.0e-08, + "ls_rtol": 0.1, + "ls_frac": 0.5, + "ls_maxiter": 100, + "maxiter": 35, + "gtol": 1.0e-05, + "gtol_loose": 1.0e-05, + "dtol": 1.1, + } + + def test_defaults(self): + ap = OptimisationParameters() + for key, value in self.defaults.items(): + self.assertEqual(ap[key], value) + + def test_repr(self): + ap = OptimisationParameters() + expected = ( + "OptimisationParameters(R_space=False, lr=0.001, line_search=True," + " lr_min=1e-08, ls_rtol=0.1, ls_frac=0.5, ls_maxiter=100, maxiter=35," + " gtol=1e-05, gtol_loose=1e-05, dtol=1.1)" + ) + print(repr(ap)) + self.assertEqual(repr(ap), expected) + + def test_R_space_type_error(self): + with self.assertRaises(TypeError) as cm: + OptimisationParameters({"R_space": 0}) + msg = "Expected attribute 'R_space' to be of type 'bool', not 'int'." + self.assertEqual(str(cm.exception), msg) + + def test_lr_type_error(self): + with self.assertRaises(TypeError) as cm: + OptimisationParameters({"lr": "0.001"}) + msg = "Expected attribute 'lr' to be of type 'float' or 'int', not 'str'." + self.assertEqual(str(cm.exception), msg) + + def test_line_search_type_error(self): + with self.assertRaises(TypeError) as cm: + OptimisationParameters({"line_search": 0}) + msg = "Expected attribute 'line_search' to be of type 'bool', not 'int'." + self.assertEqual(str(cm.exception), msg) + + def test_lr_min_type_error(self): + with self.assertRaises(TypeError) as cm: + OptimisationParameters({"lr_min": "1.0e-08"}) + msg = "Expected attribute 'lr_min' to be of type 'float' or 'int', not 'str'." + self.assertEqual(str(cm.exception), msg) + + def test_ls_rtol_type_error(self): + with self.assertRaises(TypeError) as cm: + OptimisationParameters({"ls_rtol": "0.1"}) + msg = "Expected attribute 'ls_rtol' to be of type 'float' or 'int', not 'str'." + self.assertEqual(str(cm.exception), msg) + + def test_ls_frac_type_error(self): + with self.assertRaises(TypeError) as cm: + OptimisationParameters({"ls_frac": "0.5"}) + msg = "Expected attribute 'ls_frac' to be of type 'float' or 'int', not 'str'." + self.assertEqual(str(cm.exception), msg) + + def test_ls_maxiter_type_error(self): + with self.assertRaises(TypeError) as cm: + OptimisationParameters({"ls_maxiter": 100.0}) + msg = "Expected attribute 'ls_maxiter' to be of type 'int', not 'float'." + self.assertEqual(str(cm.exception), msg) + + def test_maxiter_type_error(self): + with self.assertRaises(TypeError) as cm: + OptimisationParameters({"maxiter": 35.0}) + msg = "Expected attribute 'maxiter' to be of type 'int', not 'float'." + self.assertEqual(str(cm.exception), msg) + + def test_gtol_type_error(self): + with self.assertRaises(TypeError) as cm: + OptimisationParameters({"gtol": "1.0e-05"}) + msg = "Expected attribute 'gtol' to be of type 'float' or 'int', not 'str'." + self.assertEqual(str(cm.exception), msg) + + def test_gtol_loose_type_error(self): + with self.assertRaises(TypeError) as cm: + OptimisationParameters({"gtol_loose": "1.0e-05"}) + msg = ( + "Expected attribute 'gtol_loose' to be of type 'float' or 'int', not 'str'." + ) + self.assertEqual(str(cm.exception), msg) + + def test_dtol_type_error(self): + with self.assertRaises(TypeError) as cm: + OptimisationParameters({"dtol": "1.1"}) + msg = "Expected attribute 'dtol' to be of type 'float' or 'int', not 'str'." + self.assertEqual(str(cm.exception), msg) + + if __name__ == "__main__": unittest.main()