From 248e1e5a57696fdf17fb33e68b7cec7ffef4a768 Mon Sep 17 00:00:00 2001 From: Marcus Date: Thu, 13 Jun 2024 13:03:15 -0700 Subject: [PATCH] minor --- fvgp/gp.py | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/fvgp/gp.py b/fvgp/gp.py index e4647b5..7a7201b 100755 --- a/fvgp/gp.py +++ b/fvgp/gp.py @@ -485,8 +485,10 @@ def train(self, Hessian function of the objective function have to be defined.") if method == 'mcmc': objective_function = self.marginal_density.log_likelihood if objective_function is None: objective_function = self.marginal_density.neg_log_likelihood - if objective_function_gradient is None: objective_function_gradient = self.marginal_density.neg_log_likelihood_gradient - if objective_function_hessian is None: objective_function_hessian = self.marginal_density.neg_log_likelihood_hessian + if objective_function_gradient is None: + objective_function_gradient = self.marginal_density.neg_log_likelihood_gradient + if objective_function_hessian is None: + objective_function_hessian = self.marginal_density.neg_log_likelihood_hessian logger.info("objective function: {}", objective_function) logger.info("method: {}", method) @@ -587,8 +589,10 @@ def train_async(self, high=hyperparameter_bounds[:, 1], size=len(hyperparameter_bounds)) if objective_function is None: objective_function = self.marginal_density.neg_log_likelihood - if objective_function_gradient is None: objective_function_gradient = self.marginal_density.neg_log_likelihood_gradient - if objective_function_hessian is None: objective_function_hessian = self.marginal_density.neg_log_likelihood_hessian + if objective_function_gradient is None: objective_function_gradient = ( + self.marginal_density.neg_log_likelihood_gradient) + if objective_function_hessian is None: objective_function_hessian = ( + self.marginal_density.neg_log_likelihood_hessian) opt_obj = self.trainer.train_async( objective_function=objective_function,