From 0ef80b933eda0f23a964143474be1e6e733c3cd7 Mon Sep 17 00:00:00 2001 From: Brandon Paul Date: Wed, 6 Sep 2023 15:59:19 -0700 Subject: [PATCH 01/14] Brady's gradient generation script --- .../ML_AI_Plugin/generate_gradient_data.py | 77 +++++++++++++++++++ 1 file changed, 77 insertions(+) create mode 100644 examples/other_files/ML_AI_Plugin/generate_gradient_data.py diff --git a/examples/other_files/ML_AI_Plugin/generate_gradient_data.py b/examples/other_files/ML_AI_Plugin/generate_gradient_data.py new file mode 100644 index 000000000..7286493f0 --- /dev/null +++ b/examples/other_files/ML_AI_Plugin/generate_gradient_data.py @@ -0,0 +1,77 @@ +################################################################################# +# FOQUS Copyright (c) 2012 - 2023, by the software owners: Oak Ridge Institute +# for Science and Education (ORISE), TRIAD National Security, LLC., Lawrence +# Livermore National Security, LLC., The Regents of the University of +# California, through Lawrence Berkeley National Laboratory, Battelle Memorial +# Institute, Pacific Northwest Division through Pacific Northwest National +# Laboratory, Carnegie Mellon University, West Virginia University, Boston +# University, the Trustees of Princeton University, The University of Texas at +# Austin, URS Energy & Construction, Inc., et al. All rights reserved. +# +# Please see the file LICENSE.md for full copyright and license information, +# respectively. This file is also available online at the URL +# "https://github.com/CCSI-Toolset/FOQUS". +################################################################################# + +# Authors: Brayden Gess, Brandon Paul + +import numpy as np +import pandas as pd +from sklearn.neural_network import MLPRegressor +import pickle +from types import SimpleNamespace + +def finite_difference(m1, m2, y1, y2, n_x): + + def diff(y2, y1, x2, x1): + dv2_dv1 = (y2 - y1)/(x2 - x1) + + return dv2_dv1 + + mid_m = [None] * n_x # initialize dm vector, the midpoints of m1 and m2 + dy_dm = [None] * n_x # initialize dy vector, this is dy_dm(midpoints) + + for i in range(n_x): # for each input xi + dy_dm[i] = sum( + diff(y2, y1, m2[j], m1[j]) * # dy/dxj + diff(m2[j], m1[j], m2[i], m1[i]) # dxj/dxi + for j in range(n_x) + ) # for each input xj + + mid_m[i] = m2[i] - m1[i] + + return mid_m, dy_dm + +def generate_gradients(xy_data, n_x): + + # split data into inputs and outputs + x = xy_data[:, :n_x] # there are n_x input variables/columns + y = xy_data[:, n_x:] # the rest are output variables/columns + n_m = np.shape(y)[0] # save number of samples + + # estimate first-order gradients using finite difference approximation + + midpoints = np.empty((n_m-1, n_x)) + gradients_midpoints = np.empty((n_m-1, n_x)) + + # get midpoint gradients for one pair of samples at a time and save + for m in range(n_m-1): # we have (n_m - 1) adjacent sample pairs + print(m+1, " of ", n_m-1) + midpoints[m], gradients_midpoints[m] = finite_difference( + m1 = x[m,:], + m2 = x[m+1,:], + y1 = y[m][0], # each entry in y is an array somehow + y2 = y[m+1][0], # each entry in y is an array somehow + n_x = n_x + ) + + return midpoints, gradients_midpoints + +if __name__ == "__main__": + data = pd.read_csv(r"MEA_carbon_capture_dataset_mimo.csv") + data_array = np.array(data, ndmin=2) + data_array = data_array[:, :-1] # take only one output column + n_x = 6 + + midpoints, gradients = generate_gradients(xy_data=data_array, n_x=n_x) + print("Gradient generation complete.") \ No newline at end of file From 44def8c1b849f7820fd83f2c1045c95f56639182 Mon Sep 17 00:00:00 2001 From: Brandon Paul Date: Wed, 6 Sep 2023 16:02:55 -0700 Subject: [PATCH 02/14] Adding helpful docstrings and comments --- .../ML_AI_Plugin/generate_gradient_data.py | 55 ++++++++++++++++++- 1 file changed, 54 insertions(+), 1 deletion(-) diff --git a/examples/other_files/ML_AI_Plugin/generate_gradient_data.py b/examples/other_files/ML_AI_Plugin/generate_gradient_data.py index 7286493f0..1eb350e91 100644 --- a/examples/other_files/ML_AI_Plugin/generate_gradient_data.py +++ b/examples/other_files/ML_AI_Plugin/generate_gradient_data.py @@ -22,8 +22,34 @@ from types import SimpleNamespace def finite_difference(m1, m2, y1, y2, n_x): + """ + Calculate the first-order gradient between provided sample points m1 and + m2, where each point is assumed to be a vector with one or more input + variables x and exactly one output variable y. y1 is the value of y1 at m1, + and y2 is the value of y at m2. + + The total graident is calculated via chain rule assuming a multivariate + function y(x1, x2, x3, ...). In the notation below, D/D denotes a total + derivative and d/d denotes a partial derivative. Total derivatives are + functions of all (x1, x2, x3, ...) whereas partial derivatives are + functions of one input (e.g. x1) holding (x2, x3, ...) constant: + + Dy/Dx1 = (dy/dx1)(dx1/dx1) + (dy/dx2)(dx2/dx1) + (dy/dx3)(dx3/dx1) +... + + Note that (dx1/dx1) = 1. The partial derivatives dv2/dv1 are estimated + between sample points m1 and m2 as: + + dv2/dv1 at (m1+m2)/2 = [v2 at m2 - v2 at m1]/[v1 at m2 - v1 at m1] + + The method assumes that m1 is the first point and m2 is the second point, + and returns a vector dy_dm that is the same length as m1 and m2; m1 and m2 + must be the same length. y1 and y2 must be float or integer values. + """ def diff(y2, y1, x2, x1): + """ + Calculate derivative of y w.r.t. x. + """ dv2_dv1 = (y2 - y1)/(x2 - x1) return dv2_dv1 @@ -43,6 +69,30 @@ def diff(y2, y1, x2, x1): return mid_m, dy_dm def generate_gradients(xy_data, n_x): + """ + This method implements finite difference approximation and MLP regression + to estimate the first-order derivatives of a given dataset with columns + (x1, x2, ...., xN, y1, y2, ..., yM) where N is the number of input + variables and M is the number of output variables. The method takes an + array of size (m, n_x + n_y) where m is the number of samples, n_x is the + number of input variables, and n_y is the number of output variables. The + method returns an array of size (m, n_x, n_y) where the first dimension + spans samples, the second dimension spans gradients dy/dx for each x, and + the third dimension spans gradients dy/dx for each y. + + For example, passing an array with 100 samples, 8 inputs and 2 outputs will + return an array of size (100, 8, 2) where (:, :, 0) contains all dy1/dx and + (:, :, 1) contains all dy2/dx. + + The workflow of this method is as follows: + 1. Import xy data in array of size (m, n_x + n_y) and split into x, y + 2. Generate dy in n_y arrays of size (m-1, n_x) which correspond to + points between samples + 3. Normalize x, dy on [0, 1] and train MLP model dy(x) for each dy + 4. Predict dy(x) for m samples from xy data to generate n_y arrays of + size (m, n_x) which correspond to sample points + 5. Concatenate predicted gradients into array of size (m, n_x, n_y) + """ # split data into inputs and outputs x = xy_data[:, :n_x] # there are n_x input variables/columns @@ -50,7 +100,10 @@ def generate_gradients(xy_data, n_x): n_m = np.shape(y)[0] # save number of samples # estimate first-order gradients using finite difference approximation - + # this will account for all input variables, but will be for the midpoints + # between the sample points, i.e. len(y) - len(dy_midpoints) = 1. + # in both midpoints and gradients_midpoints, each column corresponds to an + # input variable xi and each row corresponds to a point between two samples midpoints = np.empty((n_m-1, n_x)) gradients_midpoints = np.empty((n_m-1, n_x)) From c0921c4b9697736d832d5d78bf90acae449e52cd Mon Sep 17 00:00:00 2001 From: Brandon Paul Date: Wed, 6 Sep 2023 16:07:45 -0700 Subject: [PATCH 03/14] Brady's working files for gradient model regression --- examples/other_files/ML_AI_Plugin/bg_KNN.py | 66 ++++++++ .../ML_AI_Plugin/bg_linear_regression.py | 37 +++++ .../other_files/ML_AI_Plugin/bg_regression.py | 149 ++++++++++++++++++ 3 files changed, 252 insertions(+) create mode 100644 examples/other_files/ML_AI_Plugin/bg_KNN.py create mode 100644 examples/other_files/ML_AI_Plugin/bg_linear_regression.py create mode 100644 examples/other_files/ML_AI_Plugin/bg_regression.py diff --git a/examples/other_files/ML_AI_Plugin/bg_KNN.py b/examples/other_files/ML_AI_Plugin/bg_KNN.py new file mode 100644 index 000000000..35d115fda --- /dev/null +++ b/examples/other_files/ML_AI_Plugin/bg_KNN.py @@ -0,0 +1,66 @@ +import pandas as pd +import numpy as np +from sklearn.model_selection import train_test_split +from sklearn.preprocessing import StandardScaler +from sklearn.neighbors import KNeighborsRegressor +from sklearn.metrics import mean_squared_error +from sklearn.model_selection import LeaveOneOut +from pyomo.common.fileutils import this_file_dir +import os + +def get_dataset(y): + data_file = os.path.join(this_file_dir(), 'cd_x_y.csv') + df = pd.read_csv(data_file, sep=';', header=None) + derivative_file = os.path.join(this_file_dir(), 'cd_dy.csv') + df_1 = pd.read_csv(derivative_file, sep=';', header=None) + features = df.iloc[:,:16] + labels = df_1.iloc[:,y-1:y] + return df, df_1, features, labels + +def main(nn,y): + df, df_1, features, labels = get_dataset(y) + + # Split the dataset into training and test sets + X_train, X_test, y_train, y_test = train_test_split(features, labels, test_size=0.3,shuffle=False) + + # Standardize the feature data + scaler = StandardScaler() + X_train_scaled = scaler.fit_transform(X_train) + X_test_scaled = scaler.transform(X_test) + + # Create the KNN regressor + n_neighbors = nn # Number of neighbors to consider + knn_regressor = KNeighborsRegressor(n_neighbors=n_neighbors) + + # Fit the KNN regressor to the training data + knn_regressor.fit(X_train_scaled, y_train) + + # Predict target values for the test set + y_pred = knn_regressor.predict(X_test_scaled) + + # Calculate the root mean squared error (RMSE) + RMSE = np.sqrt(mean_squared_error(y_test, y_pred)) + predictions = pd.DataFrame(y_pred) + knn_regressor = KNeighborsRegressor(n_neighbors=10) + knn_regressor.fit(X_train_scaled, y_train) + + # Calculate feature importance using LOOCV + + # Print or analyze the feature importance scores + return knn_regressor, RMSE,y_train,predictions + +# Assuming you have already created the KNN model and feature scaled data + +if __name__ == "__main__": + final_df = pd.DataFrame() + models=[] + for i in range(1,17): + print(i) + model,RMSE,y_train,predictions = main(nn=10,y=i) + models.append(model) + final_df[i-1] = pd.concat([y_train,predictions.rename(columns={0:i-1})],ignore_index=True) + print(i,RMSE) + final_df.to_csv('derivatives.csv',sep=';',index=False) + + + diff --git a/examples/other_files/ML_AI_Plugin/bg_linear_regression.py b/examples/other_files/ML_AI_Plugin/bg_linear_regression.py new file mode 100644 index 000000000..230eeb10b --- /dev/null +++ b/examples/other_files/ML_AI_Plugin/bg_linear_regression.py @@ -0,0 +1,37 @@ +import numpy as np +import matplotlib.pyplot as plt # To visualize +import pandas as pd # To read data +from sklearn.linear_model import LinearRegression +from pyomo.common.fileutils import this_file_dir +import os + +def get_data(file,y_values): + data = pd.read_csv(file, sep=';', header=None) # load data set + X = data.iloc[:, :data.shape[1]-y_values] + Y = data.iloc[:, data.shape[1]-y_values] + return X,Y + + +def main(file,y_values): + X,Y = get_data(file,y_values) + + from sklearn import linear_model + + regr = linear_model.LinearRegression() + regr.fit(X, Y) + print('Regression Coefficients') + print(regr.coef_) + with open('derivatives.csv', 'w') as f: + for index in range(X.shape[0]): + print(index) + for i in range(len(regr.coef_)): + f.write(str(regr.coef_[i])) + if i == len(regr.coef_) - 1: + f.write('\n') + else: + f.write(';') + f.close() + +file = os.path.join(this_file_dir(), 'cd_x_y.csv') +y_values = 1 +main(file,y_values) \ No newline at end of file diff --git a/examples/other_files/ML_AI_Plugin/bg_regression.py b/examples/other_files/ML_AI_Plugin/bg_regression.py new file mode 100644 index 000000000..0f5cf6426 --- /dev/null +++ b/examples/other_files/ML_AI_Plugin/bg_regression.py @@ -0,0 +1,149 @@ +import pandas as pd +import matplotlib.pyplot as plt +import numpy as np +import math + +### Promising things to check +### scaling the neural net input + +# Data preprocessing +from sklearn.preprocessing import MinMaxScaler +from sklearn.model_selection import train_test_split +from sklearn.preprocessing import StandardScaler +from sklearn.preprocessing import Normalizer + +# Neural Net modules +from keras.models import Sequential +from keras.layers import Dense, Dropout +from keras.callbacks import EarlyStopping +from pyomo.common.fileutils import this_file_dir +import os + +df = pd.read_csv(os.path.join(this_file_dir(), 'cd_x_y.csv'),sep=';',header=None) + +# drop any rows with missing values +print(df.head()) +# convert categorical variable into dummy variables +y = df[16] +X = df.drop(16, axis=1) +print(X.shape, y.shape) + +# convert to numpy array +X = np.array(X) +y = np.array(y) + +# split into X_train and X_test +# always split into X_train, X_test first THEN apply minmax scaler +X_train, X_test, y_train, y_test = train_test_split(X, y, + test_size=0.2, + random_state=123) +print(X_train.shape, X_test.shape, y_train.shape, y_test.shape) + +# use minMax scaler +min_max_scaler = MinMaxScaler() +X_train = min_max_scaler.fit_transform(X_train) +X_test = min_max_scaler.transform(X_test) + +def build_model(): + model = Sequential() + model.add(Dense(6, input_shape=(X_train.shape[1],), activation='relu')) # (features,) + model.add(Dense(6, activation='relu')) + model.add(Dense(1, activation='linear')) # output node + model.summary() # see what your model looks like + return model + +model = build_model() + +# compile the model +model.compile(optimizer='rmsprop', loss='mse', metrics=['mae']) + +# early stopping callback +es = EarlyStopping(monitor='val_loss', + mode='min', + patience=50, + restore_best_weights = True) + +# fit the model! +# attach it to a new variable called 'history' in case +# to look at the learning curves +history = model.fit(X_train, y_train, + validation_data = (X_test, y_test), + callbacks=[es], + epochs=100, + batch_size=50, + verbose=1) + +history_dict = history.history +loss_values = history_dict['loss'] # you can change this +val_loss_values = history_dict['val_loss'] # you can also change this +epochs = range(1, len(loss_values) + 1) # range of X (no. of epochs) +#plt.plot(epochs, loss_values, 'bo', label='Training loss') +#plt.plot(epochs, val_loss_values, 'orange', label='Validation loss') +#plt.title('Training and validation loss') +#plt.xlabel('Epochs') +#plt.ylabel('Loss') +#plt.legend() +#plt.show() + +pred = model.predict(X_test) +pred + +trainpreds = model.predict(X_train) + +from sklearn.metrics import mean_absolute_error +print(mean_absolute_error(y_train, trainpreds)) # train +print(mean_absolute_error(y_test, pred)) # test + + +def sigmoid_deriv(x): + ### Input integer or numpy array + ### Outputs the sigmoid derivative of the respective number(s) + sig = 1/(1+math.e**(-1*x)) + return sig*(1-sig) + + +def calculate_derivatives(row,y): + ### Manually calculates the derivatives + ### Currently set to linear Neural Network + ### Can be simplified to matrix multiplication + final = [] + for r in range(len(row)): + for i in range(16): + first = model.layers[0].get_weights()[0][i] + new = [] + for i in range(len(first)): + new.append(first[i]) + weights = model.layers[1].get_weights()[0] + second = [0]*len(model.layers[1].get_weights()[0][0]) + for a in range(len(new)): + for b in range(len(second)): + second[b] += (new[a]*weights[a][b]) + for b in range(len(second)): + second[b] = second[b] + x_final = 0 + for c in range(len(second)): + x_final += second[c]*model.layers[2].get_weights()[0][c] + final.append(x_final[0]) + return final + + +df = pd.read_csv(os.path.join(this_file_dir(), 'cd_x_y.csv'),sep=';',header=None) +y = df[16] +X1 = df.drop(16, axis=1) +### Normalization +scaler = MinMaxScaler() +X1 = scaler.fit_transform(X1) +print(len(X1)) + +with open('derivatives.csv','w') as f: + for index in range(len(X1)): + row = X1[index] + final = calculate_derivatives(row,y[index]) + print(index) + for i in range(len(final)): + f.write(str(final[i])) + if i == len(final)-1: + f.write('\n') + else: + f.write(';') +f.close() \ No newline at end of file From 4ebb2a226f9c01d461e2195cf01335c5b5c5af8d Mon Sep 17 00:00:00 2001 From: Brandon Paul Date: Thu, 7 Sep 2023 15:11:18 -0700 Subject: [PATCH 04/14] Integrate Brady's regression script into main file, clean up --- examples/other_files/ML_AI_Plugin/bg_KNN.py | 66 -------- .../ML_AI_Plugin/bg_linear_regression.py | 37 ----- .../other_files/ML_AI_Plugin/bg_regression.py | 149 ------------------ .../ML_AI_Plugin/generate_gradient_data.py | 105 +++++++++++- .../other_files/ML_AI_Plugin/gradients.csv | 103 ++++++++++++ 5 files changed, 200 insertions(+), 260 deletions(-) delete mode 100644 examples/other_files/ML_AI_Plugin/bg_KNN.py delete mode 100644 examples/other_files/ML_AI_Plugin/bg_linear_regression.py delete mode 100644 examples/other_files/ML_AI_Plugin/bg_regression.py create mode 100644 examples/other_files/ML_AI_Plugin/gradients.csv diff --git a/examples/other_files/ML_AI_Plugin/bg_KNN.py b/examples/other_files/ML_AI_Plugin/bg_KNN.py deleted file mode 100644 index 35d115fda..000000000 --- a/examples/other_files/ML_AI_Plugin/bg_KNN.py +++ /dev/null @@ -1,66 +0,0 @@ -import pandas as pd -import numpy as np -from sklearn.model_selection import train_test_split -from sklearn.preprocessing import StandardScaler -from sklearn.neighbors import KNeighborsRegressor -from sklearn.metrics import mean_squared_error -from sklearn.model_selection import LeaveOneOut -from pyomo.common.fileutils import this_file_dir -import os - -def get_dataset(y): - data_file = os.path.join(this_file_dir(), 'cd_x_y.csv') - df = pd.read_csv(data_file, sep=';', header=None) - derivative_file = os.path.join(this_file_dir(), 'cd_dy.csv') - df_1 = pd.read_csv(derivative_file, sep=';', header=None) - features = df.iloc[:,:16] - labels = df_1.iloc[:,y-1:y] - return df, df_1, features, labels - -def main(nn,y): - df, df_1, features, labels = get_dataset(y) - - # Split the dataset into training and test sets - X_train, X_test, y_train, y_test = train_test_split(features, labels, test_size=0.3,shuffle=False) - - # Standardize the feature data - scaler = StandardScaler() - X_train_scaled = scaler.fit_transform(X_train) - X_test_scaled = scaler.transform(X_test) - - # Create the KNN regressor - n_neighbors = nn # Number of neighbors to consider - knn_regressor = KNeighborsRegressor(n_neighbors=n_neighbors) - - # Fit the KNN regressor to the training data - knn_regressor.fit(X_train_scaled, y_train) - - # Predict target values for the test set - y_pred = knn_regressor.predict(X_test_scaled) - - # Calculate the root mean squared error (RMSE) - RMSE = np.sqrt(mean_squared_error(y_test, y_pred)) - predictions = pd.DataFrame(y_pred) - knn_regressor = KNeighborsRegressor(n_neighbors=10) - knn_regressor.fit(X_train_scaled, y_train) - - # Calculate feature importance using LOOCV - - # Print or analyze the feature importance scores - return knn_regressor, RMSE,y_train,predictions - -# Assuming you have already created the KNN model and feature scaled data - -if __name__ == "__main__": - final_df = pd.DataFrame() - models=[] - for i in range(1,17): - print(i) - model,RMSE,y_train,predictions = main(nn=10,y=i) - models.append(model) - final_df[i-1] = pd.concat([y_train,predictions.rename(columns={0:i-1})],ignore_index=True) - print(i,RMSE) - final_df.to_csv('derivatives.csv',sep=';',index=False) - - - diff --git a/examples/other_files/ML_AI_Plugin/bg_linear_regression.py b/examples/other_files/ML_AI_Plugin/bg_linear_regression.py deleted file mode 100644 index 230eeb10b..000000000 --- a/examples/other_files/ML_AI_Plugin/bg_linear_regression.py +++ /dev/null @@ -1,37 +0,0 @@ -import numpy as np -import matplotlib.pyplot as plt # To visualize -import pandas as pd # To read data -from sklearn.linear_model import LinearRegression -from pyomo.common.fileutils import this_file_dir -import os - -def get_data(file,y_values): - data = pd.read_csv(file, sep=';', header=None) # load data set - X = data.iloc[:, :data.shape[1]-y_values] - Y = data.iloc[:, data.shape[1]-y_values] - return X,Y - - -def main(file,y_values): - X,Y = get_data(file,y_values) - - from sklearn import linear_model - - regr = linear_model.LinearRegression() - regr.fit(X, Y) - print('Regression Coefficients') - print(regr.coef_) - with open('derivatives.csv', 'w') as f: - for index in range(X.shape[0]): - print(index) - for i in range(len(regr.coef_)): - f.write(str(regr.coef_[i])) - if i == len(regr.coef_) - 1: - f.write('\n') - else: - f.write(';') - f.close() - -file = os.path.join(this_file_dir(), 'cd_x_y.csv') -y_values = 1 -main(file,y_values) \ No newline at end of file diff --git a/examples/other_files/ML_AI_Plugin/bg_regression.py b/examples/other_files/ML_AI_Plugin/bg_regression.py deleted file mode 100644 index 0f5cf6426..000000000 --- a/examples/other_files/ML_AI_Plugin/bg_regression.py +++ /dev/null @@ -1,149 +0,0 @@ -import pandas as pd -import matplotlib.pyplot as plt -import numpy as np -import math - -### Promising things to check -### scaling the neural net input - -# Data preprocessing -from sklearn.preprocessing import MinMaxScaler -from sklearn.model_selection import train_test_split -from sklearn.preprocessing import StandardScaler -from sklearn.preprocessing import Normalizer - -# Neural Net modules -from keras.models import Sequential -from keras.layers import Dense, Dropout -from keras.callbacks import EarlyStopping -from pyomo.common.fileutils import this_file_dir -import os - -df = pd.read_csv(os.path.join(this_file_dir(), 'cd_x_y.csv'),sep=';',header=None) - -# drop any rows with missing values -print(df.head()) -# convert categorical variable into dummy variables -y = df[16] -X = df.drop(16, axis=1) -print(X.shape, y.shape) - -# convert to numpy array -X = np.array(X) -y = np.array(y) - -# split into X_train and X_test -# always split into X_train, X_test first THEN apply minmax scaler -X_train, X_test, y_train, y_test = train_test_split(X, y, - test_size=0.2, - random_state=123) -print(X_train.shape, X_test.shape, y_train.shape, y_test.shape) - -# use minMax scaler -min_max_scaler = MinMaxScaler() -X_train = min_max_scaler.fit_transform(X_train) -X_test = min_max_scaler.transform(X_test) - -def build_model(): - model = Sequential() - model.add(Dense(6, input_shape=(X_train.shape[1],), activation='relu')) # (features,) - model.add(Dense(6, activation='relu')) - model.add(Dense(1, activation='linear')) # output node - model.summary() # see what your model looks like - return model - -model = build_model() - -# compile the model -model.compile(optimizer='rmsprop', loss='mse', metrics=['mae']) - -# early stopping callback -es = EarlyStopping(monitor='val_loss', - mode='min', - patience=50, - restore_best_weights = True) - -# fit the model! -# attach it to a new variable called 'history' in case -# to look at the learning curves -history = model.fit(X_train, y_train, - validation_data = (X_test, y_test), - callbacks=[es], - epochs=100, - batch_size=50, - verbose=1) - -history_dict = history.history -loss_values = history_dict['loss'] # you can change this -val_loss_values = history_dict['val_loss'] # you can also change this -epochs = range(1, len(loss_values) + 1) # range of X (no. of epochs) -#plt.plot(epochs, loss_values, 'bo', label='Training loss') -#plt.plot(epochs, val_loss_values, 'orange', label='Validation loss') -#plt.title('Training and validation loss') -#plt.xlabel('Epochs') -#plt.ylabel('Loss') -#plt.legend() -#plt.show() - -pred = model.predict(X_test) -pred - -trainpreds = model.predict(X_train) - -from sklearn.metrics import mean_absolute_error -print(mean_absolute_error(y_train, trainpreds)) # train -print(mean_absolute_error(y_test, pred)) # test - - -def sigmoid_deriv(x): - ### Input integer or numpy array - ### Outputs the sigmoid derivative of the respective number(s) - sig = 1/(1+math.e**(-1*x)) - return sig*(1-sig) - - -def calculate_derivatives(row,y): - ### Manually calculates the derivatives - ### Currently set to linear Neural Network - ### Can be simplified to matrix multiplication - final = [] - for r in range(len(row)): - for i in range(16): - first = model.layers[0].get_weights()[0][i] - new = [] - for i in range(len(first)): - new.append(first[i]) - weights = model.layers[1].get_weights()[0] - second = [0]*len(model.layers[1].get_weights()[0][0]) - for a in range(len(new)): - for b in range(len(second)): - second[b] += (new[a]*weights[a][b]) - for b in range(len(second)): - second[b] = second[b] - x_final = 0 - for c in range(len(second)): - x_final += second[c]*model.layers[2].get_weights()[0][c] - final.append(x_final[0]) - return final - - -df = pd.read_csv(os.path.join(this_file_dir(), 'cd_x_y.csv'),sep=';',header=None) -y = df[16] -X1 = df.drop(16, axis=1) -### Normalization -scaler = MinMaxScaler() -X1 = scaler.fit_transform(X1) -print(len(X1)) - -with open('derivatives.csv','w') as f: - for index in range(len(X1)): - row = X1[index] - final = calculate_derivatives(row,y[index]) - print(index) - for i in range(len(final)): - f.write(str(final[i])) - if i == len(final)-1: - f.write('\n') - else: - f.write(';') -f.close() \ No newline at end of file diff --git a/examples/other_files/ML_AI_Plugin/generate_gradient_data.py b/examples/other_files/ML_AI_Plugin/generate_gradient_data.py index 1eb350e91..2405dfe3f 100644 --- a/examples/other_files/ML_AI_Plugin/generate_gradient_data.py +++ b/examples/other_files/ML_AI_Plugin/generate_gradient_data.py @@ -17,9 +17,17 @@ import numpy as np import pandas as pd -from sklearn.neural_network import MLPRegressor -import pickle -from types import SimpleNamespace +import matplotlib.pyplot as plt + +# Data preprocessing +from sklearn.preprocessing import MinMaxScaler +from sklearn.model_selection import train_test_split + +# Neural Net modules +from keras import Input +from keras.models import Model +from keras.layers import Dense +from keras.callbacks import EarlyStopping def finite_difference(m1, m2, y1, y2, n_x): """ @@ -68,9 +76,77 @@ def diff(y2, y1, x2, x1): return mid_m, dy_dm +def predict_gradients(midpoints, gradients_midpoints, x, n_m, n_x): + """ + Train MLP regression model with data normalization on gradients at + midpoints to predict gradients at sample point. + """ + # split and normalize data + print("Splitting data into training and test sets...") + X_train, X_test, y_train, y_test = train_test_split(midpoints, + gradients_midpoints, + test_size=0.2, + random_state=123) + print(X_train.shape, X_test.shape, y_train.shape, y_test.shape) + + # use minMax scaler + print("Normalizing data...") + min_max_scaler = MinMaxScaler() + X_train = min_max_scaler.fit_transform(X_train) + X_test = min_max_scaler.transform(X_test) + + print("Training gradient prediction model...") + inputs = Input(shape=X_train.shape[1]) # input node, layer for x1, x2, ... + h1 = Dense(6, activation='relu')(inputs) + h2 = Dense(6, activation='relu')(h1) + outputs = Dense(n_x, activation='linear')(h2) # output node, layer for dy/dx1, dy/dx2, ... + model = Model(inputs=inputs, outputs=outputs) + model.summary() # see what your model looks like + + # compile the model + model.compile(optimizer='rmsprop', loss='mse', metrics=['mae']) + + # early stopping callback + es = EarlyStopping(monitor='val_loss', + mode='min', + patience=50, + restore_best_weights = True) + + # fit the model! + # attach it to a new variable called 'history' in case + # to look at the learning curves + history = model.fit(X_train, y_train, + validation_data = (X_test, y_test), + callbacks=[es], + epochs=100, + batch_size=50, + verbose=1) + if len(history.history['loss']) == 100: + print("Successfully completed, 100 epochs run.") + else: + print("Validation loss stopped improving after ", + len(history.history['loss']), + "epochs. Successfully completed after early stopping.") + + history_dict = history.history + loss_values = history_dict['loss'] # you can change this + val_loss_values = history_dict['val_loss'] # you can also change this + epochs = range(1, len(loss_values) + 1) # range of X (no. of epochs) + plt.plot(epochs, loss_values, 'bo', label='Training loss') + plt.plot(epochs, val_loss_values, 'orange', label='Validation loss') + plt.title('Training and validation loss') + plt.xlabel('Epochs') + plt.ylabel('Loss') + plt.legend() + plt.show() + + gradients = model.predict(x) # predict against original sample points + + return gradients + def generate_gradients(xy_data, n_x): """ - This method implements finite difference approximation and MLP regression + This method implements finite difference approximation and NN regression to estimate the first-order derivatives of a given dataset with columns (x1, x2, ...., xN, y1, y2, ..., yM) where N is the number of input variables and M is the number of output variables. The method takes an @@ -109,7 +185,7 @@ def generate_gradients(xy_data, n_x): # get midpoint gradients for one pair of samples at a time and save for m in range(n_m-1): # we have (n_m - 1) adjacent sample pairs - print(m+1, " of ", n_m-1) + print("Midpoint gradient ", m+1, " of ", n_m-1, " generated.") midpoints[m], gradients_midpoints[m] = finite_difference( m1 = x[m,:], m2 = x[m+1,:], @@ -117,8 +193,18 @@ def generate_gradients(xy_data, n_x): y2 = y[m+1][0], # each entry in y is an array somehow n_x = n_x ) + print("Midpoint gradient generation complete.") + print() - return midpoints, gradients_midpoints + # leverage NN regression to predict gradients at sample points + gradients = predict_gradients( + midpoints=midpoints, + gradients_midpoints=gradients_midpoints, + x=x, + n_m=n_m, + n_x=n_x,) + + return gradients if __name__ == "__main__": data = pd.read_csv(r"MEA_carbon_capture_dataset_mimo.csv") @@ -126,5 +212,8 @@ def generate_gradients(xy_data, n_x): data_array = data_array[:, :-1] # take only one output column n_x = 6 - midpoints, gradients = generate_gradients(xy_data=data_array, n_x=n_x) - print("Gradient generation complete.") \ No newline at end of file + gradients = generate_gradients(xy_data=data_array, n_x=n_x) + print("Gradient generation complete.") + + pd.DataFrame(gradients).to_csv("gradients.csv") + print("Gradients written to gradients.csv") diff --git a/examples/other_files/ML_AI_Plugin/gradients.csv b/examples/other_files/ML_AI_Plugin/gradients.csv new file mode 100644 index 000000000..103bd1b46 --- /dev/null +++ b/examples/other_files/ML_AI_Plugin/gradients.csv @@ -0,0 +1,103 @@ +,0,1,2,3,4,5 +0,770.8773,-874.4314,-261.05432,-228.90572,-131.92918,614.3173 +1,816.8847,-927.9707,-294.15442,-277.31323,-147.60168,657.40497 +2,827.8799,-939.08154,-280.26596,-245.64766,-141.64275,659.7095 +3,833.11194,-945.1262,-283.4617,-250.02519,-143.172,664.40106 +4,864.3997,-980.8662,-297.301,-265.7483,-149.97017,690.5239 +5,711.33923,-806.93256,-241.3391,-212.1152,-121.9398,567.0348 +6,981.5561,-1057.9387,-383.19305,-385.6799,-164.5594,859.60565 +7,1022.432,-1151.1028,-350.84558,-311.73416,-173.07173,825.39746 +8,853.33167,-968.849,-300.50433,-276.24915,-151.17082,684.25256 +9,731.3112,-831.28595,-270.09467,-261.66245,-135.14795,591.01447 +10,966.87897,-1097.6057,-338.46033,-308.97775,-170.38057,774.5575 +11,873.4325,-937.65247,-344.0018,-348.74942,-146.03064,769.9672 +12,809.8626,-917.3791,-270.5517,-233.02466,-136.51878,645.0028 +13,921.11847,-1009.1136,-342.58633,-330.12775,-154.58197,782.9936 +14,974.21533,-1073.5865,-356.90842,-339.13266,-164.02452,819.48364 +15,925.4884,-953.7595,-404.5619,-444.2761,-153.77304,872.97003 +16,928.70483,-1041.5311,-322.1009,-289.46432,-156.83658,755.25714 +17,891.9018,-1012.10126,-307.12964,-274.93567,-154.9058,712.6295 +18,879.6826,-932.2414,-359.12958,-374.92838,-146.97182,793.0847 +19,842.36316,-896.6071,-338.7574,-349.30917,-140.34013,753.2899 +20,824.1685,-924.2622,-284.52637,-254.27711,-138.62593,669.6963 +21,704.493,-799.5479,-243.94997,-219.8589,-122.96267,563.38635 +22,969.79425,-1100.8131,-338.15985,-307.28937,-170.3062,776.40857 +23,845.69434,-917.6736,-323.06036,-318.9685,-141.5686,731.3798 +24,826.1764,-937.977,-290.4005,-266.38602,-146.11975,662.2794 +25,903.4318,-1025.3086,-312.70447,-281.6716,-157.62242,722.4303 +26,1033.964,-1150.5782,-368.17706,-340.17972,-174.60318,854.0014 +27,1033.5028,-1144.2047,-373.73584,-350.68698,-174.31024,861.956 +28,921.33606,-1044.9642,-310.3186,-270.2303,-156.92493,733.60034 +29,780.01984,-885.004,-266.7703,-236.81633,-134.66003,622.56366 +30,857.9081,-973.579,-296.08972,-265.7786,-149.29872,685.7126 +31,854.5099,-969.65607,-294.0529,-263.01236,-148.32277,682.67975 +32,814.2485,-925.1634,-295.62854,-281.22473,-148.20424,656.172 +33,1072.1599,-1216.4923,-367.22507,-326.5749,-185.33023,855.93115 +34,875.42737,-992.71826,-292.5405,-252.17424,-148.07547,696.1972 +35,799.54865,-907.7888,-281.58203,-258.8746,-141.65164,641.13245 +36,747.04114,-848.0717,-261.74936,-239.21652,-131.75316,598.5366 +37,758.27893,-860.5079,-261.5328,-234.5758,-131.88562,606.01733 +38,1017.1832,-1153.6498,-342.35306,-297.84744,-173.13806,809.8265 +39,994.6473,-1128.3412,-338.00345,-297.66586,-170.74274,793.07086 +40,880.5106,-946.0259,-346.37576,-350.84335,-147.38359,775.2563 +41,859.62866,-950.49524,-310.8982,-291.6358,-144.46931,718.16394 +42,747.4842,-848.07477,-255.4355,-226.52802,-128.95137,596.51965 +43,1057.84,-1199.8759,-357.5424,-312.7373,-180.72807,842.74725 +44,871.77704,-988.8051,-294.24744,-256.92758,-148.76105,694.3673 +45,873.4238,-979.2691,-302.5343,-271.42892,-147.22261,710.3907 +46,854.1584,-969.57367,-298.02753,-271.027,-150.08502,683.9006 +47,939.237,-1066.2069,-328.51822,-299.61783,-165.3916,752.3162 +48,934.04095,-1060.4451,-328.465,-301.45935,-165.26208,748.80115 +49,1001.73145,-1135.5392,-335.82556,-290.63864,-169.7054,797.5168 +50,771.3145,-875.57654,-269.605,-245.70067,-135.74469,617.7467 +51,915.6929,-1039.2498,-317.29022,-286.1719,-159.91362,732.3602 +52,946.7799,-1055.3053,-334.93204,-307.34387,-159.74005,779.29346 +53,814.38055,-924.21954,-281.51956,-253.19258,-141.9259,651.0876 +54,992.43054,-1126.1783,-341.8041,-306.03445,-172.38956,792.97314 +55,899.5594,-908.0989,-426.11224,-494.69345,-154.68785,881.82904 +56,909.1474,-943.45526,-391.30978,-425.08524,-151.41048,848.3816 +57,810.4825,-920.9363,-294.93042,-281.2514,-147.81674,653.3825 +58,768.2371,-871.60046,-262.2805,-232.32693,-132.42128,612.9908 +59,985.3747,-1117.521,-330.95038,-287.1521,-167.414,784.2467 +60,735.5241,-835.6134,-265.6822,-251.3324,-133.26941,592.2307 +61,788.066,-895.4426,-286.47293,-272.87775,-143.59511,635.201 +62,1045.0642,-1185.965,-360.727,-323.84082,-181.88559,835.32025 +63,828.0107,-938.9937,-277.25534,-239.62762,-140.30515,658.69354 +64,967.1813,-1097.5544,-333.46088,-298.94897,-168.16109,772.92773 +65,923.57886,-1048.2667,-320.8938,-290.36383,-161.67847,738.98676 +66,733.4438,-832.5819,-256.28323,-233.4708,-129.04282,587.38495 +67,969.6827,-1077.6132,-346.13895,-320.55624,-163.46964,802.7066 +68,981.06506,-1113.3193,-338.3784,-303.49918,-170.63303,784.071 +69,982.1723,-1114.3378,-335.67838,-297.72928,-169.45358,783.8259 +70,801.7218,-910.6546,-287.5044,-269.80615,-144.33241,644.766 +71,788.3306,-894.4517,-269.85464,-239.8192,-136.20238,629.28564 +72,861.33044,-976.79846,-288.67386,-249.78806,-146.06703,685.29584 +73,880.8932,-1000.2977,-312.24518,-289.20673,-156.95894,707.0993 +74,799.77277,-906.9619,-267.66684,-231.19232,-135.46144,636.181 +75,743.6595,-844.5492,-264.65826,-246.25298,-132.97926,597.3283 +76,908.638,-1030.674,-307.47946,-269.35776,-155.40237,724.01685 +77,978.5748,-1085.6659,-351.12253,-326.86816,-164.91104,812.6793 +78,1031.3623,-1169.8389,-348.52618,-304.77734,-176.17497,821.6288 +79,868.2088,-984.9996,-296.16827,-262.0743,-149.54347,692.67114 +80,901.3438,-995.099,-327.68073,-308.96854,-151.51082,755.2689 +81,1024.0276,-1161.6732,-348.035,-306.55176,-175.80695,816.51434 +82,915.89197,-988.7139,-355.48386,-355.95374,-153.37645,799.6498 +83,711.84674,-808.3312,-252.1605,-233.38727,-126.7676,571.34406 +84,892.1614,-907.38635,-402.99414,-452.63257,-148.30994,859.1999 +85,844.5954,-958.8563,-296.46887,-271.51932,-149.19638,676.8957 +86,719.41486,-817.8793,-267.19485,-260.3692,-133.61473,581.948 +87,757.0916,-860.7594,-281.81586,-275.24847,-140.89104,612.65564 +88,1051.1176,-1192.5682,-359.37265,-318.8864,-181.40596,838.896 +89,903.8351,-1025.0792,-303.95334,-264.16415,-153.7348,719.49286 +90,886.2841,-1005.7414,-305.38568,-273.58188,-154.01501,708.2109 +91,1008.3877,-1116.9032,-363.90543,-340.74298,-169.98735,840.1803 +92,722.44135,-820.4375,-256.90015,-238.8174,-129.09338,580.20935 +93,811.33655,-920.8318,-281.33167,-253.9607,-141.78026,648.9708 +94,910.05286,-1031.0663,-304.20563,-262.23328,-153.56934,724.68134 +95,759.34845,-861.6255,-260.65912,-232.44218,-131.51845,606.4165 +96,865.4456,-982.513,-303.6142,-277.87738,-152.80197,693.5424 +97,993.0316,-1126.539,-337.8496,-297.96613,-170.64133,791.9275 +98,830.6486,-943.6551,-299.74927,-283.2517,-150.37271,668.71594 +99,829.3579,-941.405,-289.1392,-262.69226,-145.62303,663.9573 +100,863.44586,-926.97894,-339.7522,-344.14255,-144.25156,760.97687 +101,1041.4768,-1181.466,-353.94748,-311.73917,-178.79442,830.4211 From b0bf039fffbe6586578441a42d6a1dd314264cff Mon Sep 17 00:00:00 2001 From: Brandon Paul Date: Thu, 7 Sep 2023 15:23:49 -0700 Subject: [PATCH 05/14] fix formatting for black and pylint --- .../ML_AI_Plugin/generate_gradient_data.py | 166 ++++++++++-------- 1 file changed, 88 insertions(+), 78 deletions(-) diff --git a/examples/other_files/ML_AI_Plugin/generate_gradient_data.py b/examples/other_files/ML_AI_Plugin/generate_gradient_data.py index 2405dfe3f..481e9229e 100644 --- a/examples/other_files/ML_AI_Plugin/generate_gradient_data.py +++ b/examples/other_files/ML_AI_Plugin/generate_gradient_data.py @@ -24,33 +24,34 @@ from sklearn.model_selection import train_test_split # Neural Net modules -from keras import Input -from keras.models import Model -from keras.layers import Dense -from keras.callbacks import EarlyStopping +from tensorflow.keras import Input +from tensorflow.keras.models import Model +from tensorflow.keras.layers import Dense +from tensorflow.keras.callbacks import EarlyStopping + def finite_difference(m1, m2, y1, y2, n_x): """ - Calculate the first-order gradient between provided sample points m1 and - m2, where each point is assumed to be a vector with one or more input + Calculate the first-order gradient between provided sample points m1 and + m2, where each point is assumed to be a vector with one or more input variables x and exactly one output variable y. y1 is the value of y1 at m1, and y2 is the value of y at m2. - - The total graident is calculated via chain rule assuming a multivariate - function y(x1, x2, x3, ...). In the notation below, D/D denotes a total - derivative and d/d denotes a partial derivative. Total derivatives are - functions of all (x1, x2, x3, ...) whereas partial derivatives are + + The total graident is calculated via chain rule assuming a multivariate + function y(x1, x2, x3, ...). In the notation below, D/D denotes a total + derivative and d/d denotes a partial derivative. Total derivatives are + functions of all (x1, x2, x3, ...) whereas partial derivatives are functions of one input (e.g. x1) holding (x2, x3, ...) constant: - + Dy/Dx1 = (dy/dx1)(dx1/dx1) + (dy/dx2)(dx2/dx1) + (dy/dx3)(dx3/dx1) +... - - Note that (dx1/dx1) = 1. The partial derivatives dv2/dv1 are estimated + + Note that (dx1/dx1) = 1. The partial derivatives dv2/dv1 are estimated between sample points m1 and m2 as: - + dv2/dv1 at (m1+m2)/2 = [v2 at m2 - v2 at m1]/[v1 at m2 - v1 at m1] - - The method assumes that m1 is the first point and m2 is the second point, - and returns a vector dy_dm that is the same length as m1 and m2; m1 and m2 + + The method assumes that m1 is the first point and m2 is the second point, + and returns a vector dy_dm that is the same length as m1 and m2; m1 and m2 must be the same length. y1 and y2 must be float or integer values. """ @@ -58,7 +59,7 @@ def diff(y2, y1, x2, x1): """ Calculate derivative of y w.r.t. x. """ - dv2_dv1 = (y2 - y1)/(x2 - x1) + dv2_dv1 = (y2 - y1) / (x2 - x1) return dv2_dv1 @@ -67,26 +68,26 @@ def diff(y2, y1, x2, x1): for i in range(n_x): # for each input xi dy_dm[i] = sum( - diff(y2, y1, m2[j], m1[j]) * # dy/dxj - diff(m2[j], m1[j], m2[i], m1[i]) # dxj/dxi + diff(y2, y1, m2[j], m1[j]) + * diff(m2[j], m1[j], m2[i], m1[i]) # dy/dxj # dxj/dxi for j in range(n_x) - ) # for each input xj + ) # for each input xj mid_m[i] = m2[i] - m1[i] return mid_m, dy_dm + def predict_gradients(midpoints, gradients_midpoints, x, n_m, n_x): """ - Train MLP regression model with data normalization on gradients at + Train MLP regression model with data normalization on gradients at midpoints to predict gradients at sample point. """ # split and normalize data print("Splitting data into training and test sets...") - X_train, X_test, y_train, y_test = train_test_split(midpoints, - gradients_midpoints, - test_size=0.2, - random_state=123) + X_train, X_test, y_train, y_test = train_test_split( + midpoints, gradients_midpoints, test_size=0.2, random_state=123 + ) print(X_train.shape, X_test.shape, y_train.shape, y_test.shape) # use minMax scaler @@ -95,48 +96,54 @@ def predict_gradients(midpoints, gradients_midpoints, x, n_m, n_x): X_train = min_max_scaler.fit_transform(X_train) X_test = min_max_scaler.transform(X_test) - print("Training gradient prediction model...") + print("Training gradient prediction model...") inputs = Input(shape=X_train.shape[1]) # input node, layer for x1, x2, ... - h1 = Dense(6, activation='relu')(inputs) - h2 = Dense(6, activation='relu')(h1) - outputs = Dense(n_x, activation='linear')(h2) # output node, layer for dy/dx1, dy/dx2, ... + h1 = Dense(6, activation="relu")(inputs) + h2 = Dense(6, activation="relu")(h1) + outputs = Dense(n_x, activation="linear")( + h2 + ) # output node, layer for dy/dx1, dy/dx2, ... model = Model(inputs=inputs, outputs=outputs) - model.summary() # see what your model looks like - + model.summary() # see what your model looks like + # compile the model - model.compile(optimizer='rmsprop', loss='mse', metrics=['mae']) + model.compile(optimizer="rmsprop", loss="mse", metrics=["mae"]) # early stopping callback - es = EarlyStopping(monitor='val_loss', - mode='min', - patience=50, - restore_best_weights = True) + es = EarlyStopping( + monitor="val_loss", mode="min", patience=50, restore_best_weights=True + ) # fit the model! # attach it to a new variable called 'history' in case # to look at the learning curves - history = model.fit(X_train, y_train, - validation_data = (X_test, y_test), - callbacks=[es], - epochs=100, - batch_size=50, - verbose=1) - if len(history.history['loss']) == 100: + history = model.fit( + X_train, + y_train, + validation_data=(X_test, y_test), + callbacks=[es], + epochs=100, + batch_size=50, + verbose=1, + ) + if len(history.history["loss"]) == 100: print("Successfully completed, 100 epochs run.") else: - print("Validation loss stopped improving after ", - len(history.history['loss']), - "epochs. Successfully completed after early stopping.") + print( + "Validation loss stopped improving after ", + len(history.history["loss"]), + "epochs. Successfully completed after early stopping.", + ) history_dict = history.history - loss_values = history_dict['loss'] # you can change this - val_loss_values = history_dict['val_loss'] # you can also change this - epochs = range(1, len(loss_values) + 1) # range of X (no. of epochs) - plt.plot(epochs, loss_values, 'bo', label='Training loss') - plt.plot(epochs, val_loss_values, 'orange', label='Validation loss') - plt.title('Training and validation loss') - plt.xlabel('Epochs') - plt.ylabel('Loss') + loss_values = history_dict["loss"] # you can change this + val_loss_values = history_dict["val_loss"] # you can also change this + epochs = range(1, len(loss_values) + 1) # range of X (no. of epochs) + plt.plot(epochs, loss_values, "bo", label="Training loss") + plt.plot(epochs, val_loss_values, "orange", label="Validation loss") + plt.title("Training and validation loss") + plt.xlabel("Epochs") + plt.ylabel("Loss") plt.legend() plt.show() @@ -144,28 +151,29 @@ def predict_gradients(midpoints, gradients_midpoints, x, n_m, n_x): return gradients + def generate_gradients(xy_data, n_x): """ - This method implements finite difference approximation and NN regression - to estimate the first-order derivatives of a given dataset with columns - (x1, x2, ...., xN, y1, y2, ..., yM) where N is the number of input + This method implements finite difference approximation and NN regression + to estimate the first-order derivatives of a given dataset with columns + (x1, x2, ...., xN, y1, y2, ..., yM) where N is the number of input variables and M is the number of output variables. The method takes an - array of size (m, n_x + n_y) where m is the number of samples, n_x is the - number of input variables, and n_y is the number of output variables. The - method returns an array of size (m, n_x, n_y) where the first dimension + array of size (m, n_x + n_y) where m is the number of samples, n_x is the + number of input variables, and n_y is the number of output variables. The + method returns an array of size (m, n_x, n_y) where the first dimension spans samples, the second dimension spans gradients dy/dx for each x, and the third dimension spans gradients dy/dx for each y. - + For example, passing an array with 100 samples, 8 inputs and 2 outputs will return an array of size (100, 8, 2) where (:, :, 0) contains all dy1/dx and (:, :, 1) contains all dy2/dx. - + The workflow of this method is as follows: 1. Import xy data in array of size (m, n_x + n_y) and split into x, y - 2. Generate dy in n_y arrays of size (m-1, n_x) which correspond to + 2. Generate dy in n_y arrays of size (m-1, n_x) which correspond to points between samples 3. Normalize x, dy on [0, 1] and train MLP model dy(x) for each dy - 4. Predict dy(x) for m samples from xy data to generate n_y arrays of + 4. Predict dy(x) for m samples from xy data to generate n_y arrays of size (m, n_x) which correspond to sample points 5. Concatenate predicted gradients into array of size (m, n_x, n_y) """ @@ -180,32 +188,34 @@ def generate_gradients(xy_data, n_x): # between the sample points, i.e. len(y) - len(dy_midpoints) = 1. # in both midpoints and gradients_midpoints, each column corresponds to an # input variable xi and each row corresponds to a point between two samples - midpoints = np.empty((n_m-1, n_x)) - gradients_midpoints = np.empty((n_m-1, n_x)) + midpoints = np.empty((n_m - 1, n_x)) + gradients_midpoints = np.empty((n_m - 1, n_x)) # get midpoint gradients for one pair of samples at a time and save - for m in range(n_m-1): # we have (n_m - 1) adjacent sample pairs - print("Midpoint gradient ", m+1, " of ", n_m-1, " generated.") + for m in range(n_m - 1): # we have (n_m - 1) adjacent sample pairs + print("Midpoint gradient ", m + 1, " of ", n_m - 1, " generated.") midpoints[m], gradients_midpoints[m] = finite_difference( - m1 = x[m,:], - m2 = x[m+1,:], - y1 = y[m][0], # each entry in y is an array somehow - y2 = y[m+1][0], # each entry in y is an array somehow - n_x = n_x - ) + m1=x[m, :], + m2=x[m + 1, :], + y1=y[m][0], # each entry in y is an array somehow + y2=y[m + 1][0], # each entry in y is an array somehow + n_x=n_x, + ) print("Midpoint gradient generation complete.") print() # leverage NN regression to predict gradients at sample points gradients = predict_gradients( - midpoints=midpoints, + midpoints=midpoints, gradients_midpoints=gradients_midpoints, x=x, n_m=n_m, - n_x=n_x,) + n_x=n_x, + ) return gradients + if __name__ == "__main__": data = pd.read_csv(r"MEA_carbon_capture_dataset_mimo.csv") data_array = np.array(data, ndmin=2) From 73c1428d4f574f5ded98cc531f252b82c2faecb4 Mon Sep 17 00:00:00 2001 From: Brandon Paul Date: Fri, 8 Sep 2023 10:53:42 -0700 Subject: [PATCH 06/14] Generalize for multiple outputs, add settings optimization loop --- .../ML_AI_Plugin/generate_gradient_data.py | 300 +++++++++++++----- .../other_files/ML_AI_Plugin/gradients.csv | 103 ------ .../ML_AI_Plugin/gradients_output0.csv | 103 ++++++ .../ML_AI_Plugin/gradients_output1.csv | 103 ++++++ 4 files changed, 418 insertions(+), 191 deletions(-) delete mode 100644 examples/other_files/ML_AI_Plugin/gradients.csv create mode 100644 examples/other_files/ML_AI_Plugin/gradients_output0.csv create mode 100644 examples/other_files/ML_AI_Plugin/gradients_output1.csv diff --git a/examples/other_files/ML_AI_Plugin/generate_gradient_data.py b/examples/other_files/ML_AI_Plugin/generate_gradient_data.py index 481e9229e..a89c9685c 100644 --- a/examples/other_files/ML_AI_Plugin/generate_gradient_data.py +++ b/examples/other_files/ML_AI_Plugin/generate_gradient_data.py @@ -24,11 +24,24 @@ from sklearn.model_selection import train_test_split # Neural Net modules +import tensorflow as tf from tensorflow.keras import Input from tensorflow.keras.models import Model from tensorflow.keras.layers import Dense from tensorflow.keras.callbacks import EarlyStopping +import os +import random as rn + +# set seed values for reproducibility +os.environ["PYTHONHASHSEED"] = "0" +os.environ[ + "CUDA_VISIBLE_DEVICES" +] = "" # changing "" to "0" or "-1" may solve import issues +np.random.seed(46) +rn.seed(1342) +tf.random.set_seed(62) + def finite_difference(m1, m2, y1, y2, n_x): """ @@ -78,16 +91,37 @@ def diff(y2, y1, x2, x1): return mid_m, dy_dm -def predict_gradients(midpoints, gradients_midpoints, x, n_m, n_x): +def predict_gradients( + midpoints, + gradients_midpoints, + x, + n_m, + n_x, + show_plots=True, + optimize_training=False, +): """ Train MLP regression model with data normalization on gradients at midpoints to predict gradients at sample point. + + Setting random_state to an integer and shuffle to False, along with the + fixed seeds in the import section at the top of this file, will ensure + reproducible results each time the file is run. However, calling the model + training multiple times on the same data in the same file run will produce + different results due to randomness in the random_state instance that is + generated. Therefore, the training is performed for a preset list of model + settings and the best option is selected. """ - # split and normalize data - print("Splitting data into training and test sets...") + # split into X_train and X_test + # always split into X_train, X_test first THEN apply minmax scaler + print("Normalizing data...") X_train, X_test, y_train, y_test = train_test_split( - midpoints, gradients_midpoints, test_size=0.2, random_state=123 - ) + midpoints, + gradients_midpoints, + test_size=0.2, + random_state=42, # for reproducibility + shuffle=False, + ) # for reproducibility print(X_train.shape, X_test.shape, y_train.shape, y_test.shape) # use minMax scaler @@ -97,62 +131,135 @@ def predict_gradients(midpoints, gradients_midpoints, x, n_m, n_x): X_test = min_max_scaler.transform(X_test) print("Training gradient prediction model...") - inputs = Input(shape=X_train.shape[1]) # input node, layer for x1, x2, ... - h1 = Dense(6, activation="relu")(inputs) - h2 = Dense(6, activation="relu")(h1) - outputs = Dense(n_x, activation="linear")( - h2 - ) # output node, layer for dy/dx1, dy/dx2, ... - model = Model(inputs=inputs, outputs=outputs) - model.summary() # see what your model looks like - - # compile the model - model.compile(optimizer="rmsprop", loss="mse", metrics=["mae"]) - - # early stopping callback - es = EarlyStopping( - monitor="val_loss", mode="min", patience=50, restore_best_weights=True - ) - - # fit the model! - # attach it to a new variable called 'history' in case - # to look at the learning curves - history = model.fit( - X_train, - y_train, - validation_data=(X_test, y_test), - callbacks=[es], - epochs=100, - batch_size=50, - verbose=1, - ) - if len(history.history["loss"]) == 100: - print("Successfully completed, 100 epochs run.") + best_loss = 1e30 # insanely high value that will for sure be beaten + best_model = None + best_settings = None + progress = 0 + + if optimize_training: + optimizers = ["Adam", "rmsprop"] + activations = ["relu", "sigmoid"] + act_outs = ["linear", "relu"] + num_neurons = [6, 12] + num_hidden_layers = [2, 8] else: - print( - "Validation loss stopped improving after ", - len(history.history["loss"]), - "epochs. Successfully completed after early stopping.", - ) - - history_dict = history.history - loss_values = history_dict["loss"] # you can change this - val_loss_values = history_dict["val_loss"] # you can also change this - epochs = range(1, len(loss_values) + 1) # range of X (no. of epochs) - plt.plot(epochs, loss_values, "bo", label="Training loss") - plt.plot(epochs, val_loss_values, "orange", label="Validation loss") - plt.title("Training and validation loss") - plt.xlabel("Epochs") - plt.ylabel("Loss") - plt.legend() - plt.show() - - gradients = model.predict(x) # predict against original sample points + optimizers = [ + "Adam", + ] + activations = [ + "relu", + ] + act_outs = [ + "linear", + ] + num_neurons = [ + 6, + ] + num_hidden_layers = [ + 2, + ] + + for optimizer in optimizers: + for activation in activations: + for act_out in act_outs: + for neuron in num_neurons: + for num_hidden_layer in num_hidden_layers: + progress += 1 + if optimize_training: + print( + "Trying ", + optimizer, + "solver with ", + activation, + "on hidden nodes, ", + act_out, + "on output node with ", + neuron, + "neurons per node and ", + num_hidden_layer, + "hidden layers", + ) + inputs = Input( + shape=X_train.shape[1] + ) # input node, layer for x1, x2, ... + h = Dense(neuron, activation=activation)(inputs) + for num in range(num_hidden_layer): + h = Dense(neuron, activation=activation)(h) + outputs = Dense(n_x, activation=act_out)( + h + ) # output node, layer for dy/dx1, dy/dx2, ... + model = Model(inputs=inputs, outputs=outputs) + # model.summary() # see what your model looks like + + # compile the model + model.compile(optimizer=optimizer, loss="mse", metrics=["mae"]) + + # early stopping callback + es = EarlyStopping( + monitor="val_loss", + mode="min", + patience=50, + restore_best_weights=True, + ) + + # fit the model! + # attach it to a new variable called 'history' in case + # to look at the learning curves + history = model.fit( + X_train, + y_train, + validation_data=(X_test, y_test), + callbacks=[es], + epochs=100, + batch_size=50, + verbose=0, + ) + if len(history.history["loss"]) == 100: + print("Successfully completed, 100 epochs run.") + else: + print( + "Validation loss stopped improving after ", + len(history.history["loss"]), + "epochs. Successfully completed after early stopping.", + ) + print("Loss: ", sum(history.history["loss"])) + if optimize_training: + print("Progress: ", 100 * progress / 32, "%") + + if sum(history.history["loss"]) < best_loss: + best_loss = sum(history.history["loss"]) + best_model = model + best_history = history + best_settings = [ + optimizer, + activation, + act_out, + neuron, + num_hidden_layer, + ] + + if optimize_training: + print("The best settings are: ", best_settings) + + if show_plots: + history_dict = best_history.history + loss_values = history_dict["loss"] # you can change this + val_loss_values = history_dict["val_loss"] # you can also change this + epochs = range(1, len(loss_values) + 1) # range of X (no. of epochs) + plt.plot(epochs, loss_values, "bo", label="Training loss") + plt.plot(epochs, val_loss_values, "orange", label="Validation loss") + plt.title("Training and validation loss") + plt.xlabel("Epochs") + plt.ylabel("Loss") + plt.legend() + plt.show() + + gradients = best_model.predict(x) # predict against original sample points return gradients -def generate_gradients(xy_data, n_x): +def generate_gradients(xy_data, n_x, show_plots=True, optimize_training=False): """ This method implements finite difference approximation and NN regression to estimate the first-order derivatives of a given dataset with columns @@ -181,37 +288,46 @@ def generate_gradients(xy_data, n_x): # split data into inputs and outputs x = xy_data[:, :n_x] # there are n_x input variables/columns y = xy_data[:, n_x:] # the rest are output variables/columns + n_y = np.shape(y)[1] # save number of outputs n_m = np.shape(y)[0] # save number of samples - # estimate first-order gradients using finite difference approximation - # this will account for all input variables, but will be for the midpoints - # between the sample points, i.e. len(y) - len(dy_midpoints) = 1. - # in both midpoints and gradients_midpoints, each column corresponds to an - # input variable xi and each row corresponds to a point between two samples - midpoints = np.empty((n_m - 1, n_x)) - gradients_midpoints = np.empty((n_m - 1, n_x)) - - # get midpoint gradients for one pair of samples at a time and save - for m in range(n_m - 1): # we have (n_m - 1) adjacent sample pairs - print("Midpoint gradient ", m + 1, " of ", n_m - 1, " generated.") - midpoints[m], gradients_midpoints[m] = finite_difference( - m1=x[m, :], - m2=x[m + 1, :], - y1=y[m][0], # each entry in y is an array somehow - y2=y[m + 1][0], # each entry in y is an array somehow - n_x=n_x, + gradients = [] # empty list to hold gradient arrays for multiple outputs + + for output in range(n_y): + print("Generating gradients for output ", output, ":") + # estimate first-order gradients using finite difference approximation + # this will account for all input variables, but will be for the midpoints + # between the sample points, i.e. len(y) - len(dy_midpoints) = 1. + # in both midpoints and gradients_midpoints, each column corresponds to an + # input variable xi and each row corresponds to a point between two samples + midpoints = np.empty((n_m - 1, n_x)) + gradients_midpoints = np.empty((n_m - 1, n_x)) + + # get midpoint gradients for one pair of samples at a time and save + for m in range(n_m - 1): # we have (n_m - 1) adjacent sample pairs + print("Midpoint gradient ", m + 1, " of ", n_m - 1, " generated.") + midpoints[m], gradients_midpoints[m] = finite_difference( + m1=x[m, :], + m2=x[m + 1, :], + y1=y[m][output], # each entry in y is an array somehow + y2=y[m + 1][output], # each entry in y is an array somehow + n_x=n_x, + ) + print("Midpoint gradient generation complete.") + print() + + # leverage NN regression to predict gradients at sample points + gradients.append( + predict_gradients( + midpoints=midpoints, + gradients_midpoints=gradients_midpoints, + x=x, + n_m=n_m, + n_x=n_x, + show_plots=show_plots, + optimize_training=optimize_training, + ) ) - print("Midpoint gradient generation complete.") - print() - - # leverage NN regression to predict gradients at sample points - gradients = predict_gradients( - midpoints=midpoints, - gradients_midpoints=gradients_midpoints, - x=x, - n_m=n_m, - n_x=n_x, - ) return gradients @@ -219,11 +335,19 @@ def generate_gradients(xy_data, n_x): if __name__ == "__main__": data = pd.read_csv(r"MEA_carbon_capture_dataset_mimo.csv") data_array = np.array(data, ndmin=2) - data_array = data_array[:, :-1] # take only one output column n_x = 6 - gradients = generate_gradients(xy_data=data_array, n_x=n_x) + gradients = generate_gradients( + xy_data=data_array, n_x=n_x, show_plots=False, optimize_training=True + ) print("Gradient generation complete.") - pd.DataFrame(gradients).to_csv("gradients.csv") - print("Gradients written to gradients.csv") + for output in range(len(gradients)): + pd.DataFrame(gradients[output]).to_csv( + "gradients_output" + str(output) + ".csv" + ) + print( + "Gradients for output ", + str(output), + " written to gradients_output" + str(output) + ".csv", + ) diff --git a/examples/other_files/ML_AI_Plugin/gradients.csv b/examples/other_files/ML_AI_Plugin/gradients.csv deleted file mode 100644 index 103bd1b46..000000000 --- a/examples/other_files/ML_AI_Plugin/gradients.csv +++ /dev/null @@ -1,103 +0,0 @@ -,0,1,2,3,4,5 -0,770.8773,-874.4314,-261.05432,-228.90572,-131.92918,614.3173 -1,816.8847,-927.9707,-294.15442,-277.31323,-147.60168,657.40497 -2,827.8799,-939.08154,-280.26596,-245.64766,-141.64275,659.7095 -3,833.11194,-945.1262,-283.4617,-250.02519,-143.172,664.40106 -4,864.3997,-980.8662,-297.301,-265.7483,-149.97017,690.5239 -5,711.33923,-806.93256,-241.3391,-212.1152,-121.9398,567.0348 -6,981.5561,-1057.9387,-383.19305,-385.6799,-164.5594,859.60565 -7,1022.432,-1151.1028,-350.84558,-311.73416,-173.07173,825.39746 -8,853.33167,-968.849,-300.50433,-276.24915,-151.17082,684.25256 -9,731.3112,-831.28595,-270.09467,-261.66245,-135.14795,591.01447 -10,966.87897,-1097.6057,-338.46033,-308.97775,-170.38057,774.5575 -11,873.4325,-937.65247,-344.0018,-348.74942,-146.03064,769.9672 -12,809.8626,-917.3791,-270.5517,-233.02466,-136.51878,645.0028 -13,921.11847,-1009.1136,-342.58633,-330.12775,-154.58197,782.9936 -14,974.21533,-1073.5865,-356.90842,-339.13266,-164.02452,819.48364 -15,925.4884,-953.7595,-404.5619,-444.2761,-153.77304,872.97003 -16,928.70483,-1041.5311,-322.1009,-289.46432,-156.83658,755.25714 -17,891.9018,-1012.10126,-307.12964,-274.93567,-154.9058,712.6295 -18,879.6826,-932.2414,-359.12958,-374.92838,-146.97182,793.0847 -19,842.36316,-896.6071,-338.7574,-349.30917,-140.34013,753.2899 -20,824.1685,-924.2622,-284.52637,-254.27711,-138.62593,669.6963 -21,704.493,-799.5479,-243.94997,-219.8589,-122.96267,563.38635 -22,969.79425,-1100.8131,-338.15985,-307.28937,-170.3062,776.40857 -23,845.69434,-917.6736,-323.06036,-318.9685,-141.5686,731.3798 -24,826.1764,-937.977,-290.4005,-266.38602,-146.11975,662.2794 -25,903.4318,-1025.3086,-312.70447,-281.6716,-157.62242,722.4303 -26,1033.964,-1150.5782,-368.17706,-340.17972,-174.60318,854.0014 -27,1033.5028,-1144.2047,-373.73584,-350.68698,-174.31024,861.956 -28,921.33606,-1044.9642,-310.3186,-270.2303,-156.92493,733.60034 -29,780.01984,-885.004,-266.7703,-236.81633,-134.66003,622.56366 -30,857.9081,-973.579,-296.08972,-265.7786,-149.29872,685.7126 -31,854.5099,-969.65607,-294.0529,-263.01236,-148.32277,682.67975 -32,814.2485,-925.1634,-295.62854,-281.22473,-148.20424,656.172 -33,1072.1599,-1216.4923,-367.22507,-326.5749,-185.33023,855.93115 -34,875.42737,-992.71826,-292.5405,-252.17424,-148.07547,696.1972 -35,799.54865,-907.7888,-281.58203,-258.8746,-141.65164,641.13245 -36,747.04114,-848.0717,-261.74936,-239.21652,-131.75316,598.5366 -37,758.27893,-860.5079,-261.5328,-234.5758,-131.88562,606.01733 -38,1017.1832,-1153.6498,-342.35306,-297.84744,-173.13806,809.8265 -39,994.6473,-1128.3412,-338.00345,-297.66586,-170.74274,793.07086 -40,880.5106,-946.0259,-346.37576,-350.84335,-147.38359,775.2563 -41,859.62866,-950.49524,-310.8982,-291.6358,-144.46931,718.16394 -42,747.4842,-848.07477,-255.4355,-226.52802,-128.95137,596.51965 -43,1057.84,-1199.8759,-357.5424,-312.7373,-180.72807,842.74725 -44,871.77704,-988.8051,-294.24744,-256.92758,-148.76105,694.3673 -45,873.4238,-979.2691,-302.5343,-271.42892,-147.22261,710.3907 -46,854.1584,-969.57367,-298.02753,-271.027,-150.08502,683.9006 -47,939.237,-1066.2069,-328.51822,-299.61783,-165.3916,752.3162 -48,934.04095,-1060.4451,-328.465,-301.45935,-165.26208,748.80115 -49,1001.73145,-1135.5392,-335.82556,-290.63864,-169.7054,797.5168 -50,771.3145,-875.57654,-269.605,-245.70067,-135.74469,617.7467 -51,915.6929,-1039.2498,-317.29022,-286.1719,-159.91362,732.3602 -52,946.7799,-1055.3053,-334.93204,-307.34387,-159.74005,779.29346 -53,814.38055,-924.21954,-281.51956,-253.19258,-141.9259,651.0876 -54,992.43054,-1126.1783,-341.8041,-306.03445,-172.38956,792.97314 -55,899.5594,-908.0989,-426.11224,-494.69345,-154.68785,881.82904 -56,909.1474,-943.45526,-391.30978,-425.08524,-151.41048,848.3816 -57,810.4825,-920.9363,-294.93042,-281.2514,-147.81674,653.3825 -58,768.2371,-871.60046,-262.2805,-232.32693,-132.42128,612.9908 -59,985.3747,-1117.521,-330.95038,-287.1521,-167.414,784.2467 -60,735.5241,-835.6134,-265.6822,-251.3324,-133.26941,592.2307 -61,788.066,-895.4426,-286.47293,-272.87775,-143.59511,635.201 -62,1045.0642,-1185.965,-360.727,-323.84082,-181.88559,835.32025 -63,828.0107,-938.9937,-277.25534,-239.62762,-140.30515,658.69354 -64,967.1813,-1097.5544,-333.46088,-298.94897,-168.16109,772.92773 -65,923.57886,-1048.2667,-320.8938,-290.36383,-161.67847,738.98676 -66,733.4438,-832.5819,-256.28323,-233.4708,-129.04282,587.38495 -67,969.6827,-1077.6132,-346.13895,-320.55624,-163.46964,802.7066 -68,981.06506,-1113.3193,-338.3784,-303.49918,-170.63303,784.071 -69,982.1723,-1114.3378,-335.67838,-297.72928,-169.45358,783.8259 -70,801.7218,-910.6546,-287.5044,-269.80615,-144.33241,644.766 -71,788.3306,-894.4517,-269.85464,-239.8192,-136.20238,629.28564 -72,861.33044,-976.79846,-288.67386,-249.78806,-146.06703,685.29584 -73,880.8932,-1000.2977,-312.24518,-289.20673,-156.95894,707.0993 -74,799.77277,-906.9619,-267.66684,-231.19232,-135.46144,636.181 -75,743.6595,-844.5492,-264.65826,-246.25298,-132.97926,597.3283 -76,908.638,-1030.674,-307.47946,-269.35776,-155.40237,724.01685 -77,978.5748,-1085.6659,-351.12253,-326.86816,-164.91104,812.6793 -78,1031.3623,-1169.8389,-348.52618,-304.77734,-176.17497,821.6288 -79,868.2088,-984.9996,-296.16827,-262.0743,-149.54347,692.67114 -80,901.3438,-995.099,-327.68073,-308.96854,-151.51082,755.2689 -81,1024.0276,-1161.6732,-348.035,-306.55176,-175.80695,816.51434 -82,915.89197,-988.7139,-355.48386,-355.95374,-153.37645,799.6498 -83,711.84674,-808.3312,-252.1605,-233.38727,-126.7676,571.34406 -84,892.1614,-907.38635,-402.99414,-452.63257,-148.30994,859.1999 -85,844.5954,-958.8563,-296.46887,-271.51932,-149.19638,676.8957 -86,719.41486,-817.8793,-267.19485,-260.3692,-133.61473,581.948 -87,757.0916,-860.7594,-281.81586,-275.24847,-140.89104,612.65564 -88,1051.1176,-1192.5682,-359.37265,-318.8864,-181.40596,838.896 -89,903.8351,-1025.0792,-303.95334,-264.16415,-153.7348,719.49286 -90,886.2841,-1005.7414,-305.38568,-273.58188,-154.01501,708.2109 -91,1008.3877,-1116.9032,-363.90543,-340.74298,-169.98735,840.1803 -92,722.44135,-820.4375,-256.90015,-238.8174,-129.09338,580.20935 -93,811.33655,-920.8318,-281.33167,-253.9607,-141.78026,648.9708 -94,910.05286,-1031.0663,-304.20563,-262.23328,-153.56934,724.68134 -95,759.34845,-861.6255,-260.65912,-232.44218,-131.51845,606.4165 -96,865.4456,-982.513,-303.6142,-277.87738,-152.80197,693.5424 -97,993.0316,-1126.539,-337.8496,-297.96613,-170.64133,791.9275 -98,830.6486,-943.6551,-299.74927,-283.2517,-150.37271,668.71594 -99,829.3579,-941.405,-289.1392,-262.69226,-145.62303,663.9573 -100,863.44586,-926.97894,-339.7522,-344.14255,-144.25156,760.97687 -101,1041.4768,-1181.466,-353.94748,-311.73917,-178.79442,830.4211 diff --git a/examples/other_files/ML_AI_Plugin/gradients_output0.csv b/examples/other_files/ML_AI_Plugin/gradients_output0.csv new file mode 100644 index 000000000..cb83a754e --- /dev/null +++ b/examples/other_files/ML_AI_Plugin/gradients_output0.csv @@ -0,0 +1,103 @@ +,0,1,2,3,4,5 +0,79.931435,-91.37865,278.3458,-66.22485,-144.19691,-120.84488 +1,94.582245,-110.10992,300.8567,-73.11867,-152.05132,-125.41921 +2,85.885605,-98.1704,299.2175,-71.19216,-155.06975,-129.99739 +3,87.259796,-99.91246,301.5615,-71.87913,-155.94972,-130.558 +4,92.54158,-106.33635,314.34235,-75.22922,-161.8603,-135.14842 +5,73.96548,-84.616325,256.793,-61.13261,-132.89908,-111.301575 +6,101.51534,-114.83136,375.2238,-88.30013,-196.59392,-166.01509 +7,104.4355,-118.784904,372.10513,-88.11146,-194.04385,-163.31714 +8,95.5073,-110.56375,312.69635,-75.474976,-159.38446,-132.2193 +9,85.89002,-100.33718,268.96695,-65.61988,-134.93253,-110.60035 +10,107.12147,-123.775406,353.9702,-85.26302,-180.93675,-150.3827 +11,89.97311,-101.7761,335.0388,-78.815155,-175.45778,-148.21397 +12,81.34667,-92.42115,291.44708,-68.91341,-152.12305,-128.09349 +13,94.21727,-106.66829,345.23126,-81.32502,-180.78975,-152.587 +14,100.05553,-113.37235,363.0728,-85.6386,-190.10966,-160.41222 +15,97.00606,-110.72163,371.44797,-88.047485,-192.93825,-163.63539 +16,94.619514,-107.48416,339.12753,-80.19324,-177.08513,-149.16327 +17,95.700294,-110.008865,324.45834,-77.68349,-166.98286,-139.37932 +18,91.54417,-103.73887,342.68762,-80.747574,-179.18858,-151.50475 +19,86.73082,-98.38803,326.0057,-76.86773,-170.24901,-143.97746 +20,83.088455,-94.23336,300.0793,-70.82989,-156.9394,-132.31006 +21,76.19791,-87.76109,256.0701,-61.41514,-131.38304,-109.43264 +22,106.660065,-123.0925,354.5869,-85.29139,-181.55768,-151.06174 +23,86.590866,-97.94937,320.336,-75.36868,-167.795,-141.6703 +24,92.13782,-106.60332,302.52957,-72.97113,-154.32234,-128.08342 +25,97.92853,-112.7542,329.34653,-79.00371,-169.14671,-141.00336 +26,106.08274,-120.421,381.3794,-90.129234,-199.32547,-167.99362 +27,106.19588,-120.43985,383.36206,-90.51539,-200.56342,-169.1409 +28,94.74466,-108.09889,332.81857,-79.05112,-172.90164,-145.17569 +29,82.493935,-94.619385,282.7624,-67.52178,-145.90335,-121.97543 +30,92.40509,-106.29988,312.19022,-74.80261,-160.4998,-133.87491 +31,91.48199,-105.13964,310.51373,-74.3178,-159.82019,-133.40034 +32,94.66799,-110.33125,299.5993,-72.90135,-151.06844,-124.3675 +33,113.98216,-130.7881,389.71973,-93.13937,-201.07388,-168.11066 +34,88.5444,-100.747665,315.08737,-74.616066,-164.1874,-138.10957 +35,89.40093,-103.50187,292.67963,-70.63637,-149.13734,-123.68622 +36,82.723145,-95.62282,272.9512,-65.75266,-139.38461,-115.75739 +37,81.478325,-93.71561,275.56238,-66.00188,-141.6679,-118.15808 +38,104.53298,-119.23347,367.6137,-87.30109,-191.0671,-160.48166 +39,104.15131,-119.19312,360.6145,-85.93833,-186.67583,-156.39975 +40,90.9033,-102.78483,337.60165,-79.39711,-176.89305,-149.39786 +41,87.57932,-99.21862,318.65125,-75.12937,-166.82394,-140.7529 +42,78.86872,-90.44279,270.6591,-64.607605,-139.67178,-116.76642 +43,109.58537,-125.179756,382.77017,-91.0369,-198.5881,-166.60995 +44,89.959404,-102.71762,314.93475,-74.85476,-163.44153,-137.13853 +45,88.56739,-100.53083,318.60922,-75.274895,-166.49808,-140.30632 +46,93.97358,-108.47746,312.10815,-75.08324,-159.72963,-132.85193 +47,103.84993,-119.96836,343.58615,-82.733055,-175.66719,-146.01862 +48,104.34892,-120.74195,342.39325,-82.60815,-174.66557,-144.98047 +49,101.884895,-115.99141,361.49432,-85.67668,-188.30147,-158.37337 +50,85.029686,-98.21587,281.60574,-67.77905,-143.95435,-119.6326 +51,99.44558,-114.5432,333.8602,-80.11693,-171.37318,-142.80855 +52,96.76797,-109.824524,348.25452,-82.27827,-182.0416,-153.43805 +53,87.98135,-101.264496,296.48602,-71.08086,-152.31747,-126.99219 +54,106.57264,-122.51085,361.21567,-86.49449,-185.90804,-155.18419 +55,95.18846,-109.54394,373.46933,-89.14667,-192.65071,-164.01154 +56,95.32428,-108.55919,362.39514,-85.74988,-188.6346,-159.84265 +57,94.36733,-110.01563,298.23788,-72.5955,-150.27928,-123.645424 +58,80.96275,-92.81039,278.2817,-66.40884,-143.69016,-120.17596 +59,100.80213,-114.89454,355.72488,-84.40852,-185.03258,-155.48463 +60,85.09368,-99.09072,270.15057,-65.67492,-136.4739,-112.53742 +61,91.73927,-106.93907,290.1222,-70.610664,-146.22386,-120.33162 +62,112.75414,-129.70657,380.83517,-91.27057,-195.84537,-163.39954 +63,84.06326,-95.72013,298.14188,-70.65328,-155.2156,-130.48709 +64,104.05963,-119.66364,352.11176,-84.34567,-181.13647,-151.15488 +65,100.83582,-116.24165,337.11832,-80.979,-172.85855,-143.94942 +66,80.809845,-93.33055,267.7837,-64.44447,-136.91432,-113.79709 +67,99.174774,-112.49215,357.83688,-84.4939,-187.16711,-157.81778 +68,105.6971,-121.55783,357.4392,-85.63969,-183.87646,-153.4476 +69,103.929634,-119.17012,356.58594,-85.148674,-184.12587,-154.01427 +70,92.47953,-107.591736,295.0471,-71.649925,-149.30153,-123.27631 +71,83.498795,-95.801704,285.79437,-68.26643,-147.40321,-123.19326 +72,87.65054,-99.83635,310.37653,-73.58224,-161.53961,-135.7838 +73,99.82143,-115.783455,323.5758,-78.287155,-164.47331,-136.19893 +74,81.10221,-92.33355,287.86932,-68.20526,-149.89023,-126.02049 +75,84.82247,-98.507744,273.2816,-66.2077,-138.63264,-114.646614 +76,94.25473,-107.71716,328.59326,-78.176254,-170.35378,-142.84769 +77,100.140976,-113.55463,361.80283,-85.4055,-189.30336,-159.65094 +78,106.80259,-121.995224,373.13907,-88.74069,-193.59967,-162.42856 +79,91.41787,-104.76248,314.63065,-75.06696,-162.54462,-135.99553 +80,91.99855,-104.2212,334.81345,-78.9388,-175.2956,-147.9078 +81,107.272934,-122.769585,371.3485,-88.50218,-192.231,-161.05492 +82,94.29153,-106.64442,349.22665,-82.157936,-182.95972,-154.49773 +83,80.45438,-93.30924,261.043,-63.13301,-132.67351,-109.84852 +84,93.797005,-107.47615,363.42218,-86.42586,-188.1302,-159.83447 +85,93.97434,-108.6794,309.23273,-74.5526,-157.85104,-131.0732 +86,84.74202,-99.0697,264.4535,-64.573044,-132.45355,-108.41818 +87,89.35248,-104.49542,278.45386,-68.018364,-139.35516,-113.987 +88,111.36116,-127.70374,381.85748,-91.20087,-197.16957,-164.92711 +89,92.622086,-105.62274,326.17862,-77.426605,-169.5413,-142.39651 +90,95.23853,-109.49817,322.5822,-77.25404,-165.9871,-138.53525 +91,103.42994,-117.28234,373.6911,-88.213684,-195.5346,-164.91391 +92,82.199936,-95.446045,265.12994,-64.20796,-134.5044,-111.22829 +93,88.1462,-101.556786,295.59354,-70.94446,-151.64618,-126.31735 +94,91.655136,-104.17953,327.67914,-77.52028,-170.95883,-143.91682 +95,80.88851,-92.89211,275.62125,-65.90586,-141.99658,-118.59313 +96,96.22409,-111.26226,316.88654,-76.38586,-161.80428,-134.38269 +97,104.21575,-119.313126,360.1738,-85.86917,-186.35872,-156.08759 +98,96.33667,-112.188194,306.03976,-74.404564,-154.56291,-127.41383 +99,91.008125,-105.0344,302.62543,-72.77232,-154.88487,-128.81706 +100,88.78199,-100.46858,331.0735,-77.90511,-173.30588,-146.4144 +101,109.10222,-124.86166,377.69528,-90.01473,-195.52164,-163.8152 diff --git a/examples/other_files/ML_AI_Plugin/gradients_output1.csv b/examples/other_files/ML_AI_Plugin/gradients_output1.csv new file mode 100644 index 000000000..ef77d02a7 --- /dev/null +++ b/examples/other_files/ML_AI_Plugin/gradients_output1.csv @@ -0,0 +1,103 @@ +,0,1,2,3,4,5 +0,39.069317,8.79817,4.92915,-7.4853544,-8.395348,19.75606 +1,42.996727,9.8077135,5.8568726,-7.9216003,-10.108579,21.688585 +2,42.03033,9.456588,4.950679,-8.164676,-8.510103,21.309946 +3,42.356087,9.59381,5.2045197,-8.065804,-9.007373,21.446346 +4,44.186485,10.144742,5.4463677,-8.191303,-9.6827545,22.38414 +5,36.02388,8.135324,4.8232055,-6.7867727,-8.185115,18.173038 +6,53.99135,12.499687,0.6233006,-11.576059,-3.6904032,28.462757 +7,52.769604,12.082337,3.0270822,-10.82522,-6.6390023,27.343578 +8,44.249977,10.149398,5.826031,-8.117586,-10.2154045,22.356815 +9,39.90996,9.376428,4.458464,-7.1839647,-8.513556,20.320137 +10,49.953213,11.551039,6.249258,-9.10547,-11.237158,25.29983 +11,48.043804,10.999258,0.9791989,-10.374518,-3.629604,25.235342 +12,40.885853,9.04533,4.0700073,-8.394886,-6.914519,20.835707 +13,49.143448,11.14489,2.1298347,-10.454416,-5.0446134,25.587364 +14,51.834152,11.888842,1.8800223,-10.918737,-5.0747924,27.070217 +15,54.05637,12.943797,-1.326961,-11.652617,-1.5783217,29.099876 +16,47.996677,10.889046,3.050812,-9.91996,-6.2547426,24.804693 +17,45.607685,10.488318,5.678906,-8.411667,-10.108815,23.09641 +18,49.528553,11.520241,-0.14379533,-10.7399235,-2.5164645,26.25208 +19,46.65241,10.591434,1.1169223,-10.169249,-3.5818243,24.464869 +20,42.068253,9.276815,3.931426,-8.757217,-6.688584,21.476854 +21,36.100704,8.185345,5.0787873,-6.6817408,-8.619909,18.176353 +22,49.974148,11.569446,6.202605,-9.100825,-11.19619,25.319336 +23,45.681526,10.354372,1.6739062,-9.814797,-4.264869,23.843126 +24,42.78735,9.812289,5.636801,-7.8503227,-9.879754,21.61704 +25,46.319107,10.708403,5.7961755,-8.444504,-10.416288,23.457785 +26,54.35506,12.500773,2.2765284,-11.307072,-5.7989984,28.331184 +27,54.733475,12.599085,1.9960805,-11.45417,-5.4578533,28.586172 +28,46.719357,10.511131,4.6061616,-9.325666,-8.171637,23.833223 +29,39.709633,9.054628,5.107865,-7.4008303,-8.889133,20.075207 +30,43.890743,10.101926,5.649886,-8.029832,-10.010151,22.197788 +31,43.62636,10.020903,5.687294,-7.99397,-10.013002,22.050238 +32,43.238457,9.8817005,5.824423,-7.954534,-10.110538,21.824081 +33,54.809837,12.524115,6.147181,-10.428625,-11.019275,27.858728 +34,44.22387,9.820371,4.569786,-8.97665,-7.7875733,22.513083 +35,41.450253,9.443848,5.680409,-7.6434526,-9.76858,20.899986 +36,38.594795,8.794884,5.278029,-7.1159935,-9.083405,19.461876 +37,38.76463,8.859461,5.2066283,-7.1311054,-9.033946,19.564106 +38,51.726814,11.753441,4.2310443,-10.387661,-8.055258,26.553179 +39,50.65384,11.523455,5.082472,-9.883726,-9.229517,25.838678 +40,48.517284,11.175541,0.71405,-10.447537,-3.420193,25.542652 +41,45.157066,10.169799,2.6453102,-9.518875,-5.438174,23.373808 +42,37.987617,8.658164,5.11342,-7.022545,-8.822123,19.167187 +43,53.732006,12.167909,5.1273513,-10.647867,-9.303163,27.446033 +44,44.25091,9.916737,4.929216,-8.738545,-8.4774685,22.477983 +45,44.89916,10.056236,3.4545486,-9.3134,-6.4226065,23.077473 +46,44.0228,10.134875,5.6596403,-8.052505,-10.037324,22.267021 +47,48.492474,11.181142,6.1795635,-8.859195,-11.009447,24.53847 +48,48.400497,11.162828,6.1582556,-8.840654,-10.981818,24.49432 +49,51.033264,11.602614,3.647719,-10.390922,-7.2446475,26.298681 +50,39.785885,9.074857,5.4119873,-7.330218,-9.33744,20.067774 +51,46.980053,10.844123,5.9412365,-8.575586,-10.621507,23.78081 +52,49.456554,11.272518,2.6187482,-10.290987,-5.8298965,25.66428 +53,41.707638,9.590114,5.3992763,-7.636343,-9.539446,21.088066 +54,50.791744,11.682289,6.151969,-9.414431,-11.011988,25.749878 +55,54.796734,13.812137,-2.8580718,-11.484967,-0.3348681,30.205782 +56,52.772533,12.672955,-1.324554,-11.316986,-1.5829241,28.409422 +57,43.195316,9.909228,5.685209,-7.92343,-9.982032,21.828047 +58,39.07558,8.890673,5.0242147,-7.314111,-8.707218,19.753069 +59,49.943954,11.260029,4.4277563,-10.071655,-8.076758,25.564623 +60,38.843548,8.802714,5.4919534,-7.1924334,-9.312708,19.55595 +61,41.892437,9.590277,5.5831556,-7.69692,-9.741965,21.156197 +62,53.56912,12.353763,6.3863044,-9.905837,-11.529861,27.177832 +63,41.86905,9.288754,4.6133046,-8.432643,-7.7683764,21.266869 +64,49.508102,11.402799,6.0776405,-9.1283455,-10.8807335,25.087479 +65,47.471607,10.965285,5.9769473,-8.66054,-10.709924,24.034922 +66,37.824116,8.633366,5.120361,-6.9651556,-8.854047,19.082941 +67,50.8602,11.592879,2.5607302,-10.621451,-5.815653,26.418015 +68,50.27885,11.581372,5.947454,-9.331144,-10.730231,25.5149 +69,50.13993,11.416995,5.756238,-9.566345,-10.195355,25.459545 +70,42.076324,9.578743,5.7976317,-7.7638383,-9.950077,21.21139 +71,40.129673,9.162768,5.2591496,-7.4325457,-9.145871,20.272882 +72,43.57336,9.71034,4.544129,-8.777585,-7.7992535,22.178682 +73,45.88148,10.520498,6.0555034,-8.418805,-10.607863,23.178997 +74,40.42375,8.959602,4.514099,-8.138077,-7.570573,20.522152 +75,38.82707,8.840514,5.3384633,-7.1634645,-9.16829,19.574907 +76,46.17931,10.381673,5.1770535,-9.058301,-8.956369,23.4554 +77,51.457233,11.734335,2.4866838,-10.768042,-5.7523212,26.748535 +78,52.38262,11.854738,5.044304,-10.379631,-9.120999,26.748623 +79,44.20178,10.0445595,5.4168916,-8.36944,-9.441274,22.386477 +80,47.532547,10.748239,2.5180042,-10.028093,-5.4431643,24.658165 +81,52.153557,11.88189,5.1025887,-10.185059,-9.348577,26.626593 +82,50.0525,11.472816,1.1241015,-10.756549,-3.9475865,26.271622 +83,37.045547,8.416919,5.1542416,-6.8459806,-8.800345,18.664843 +84,53.03639,12.932045,-1.8319895,-11.339063,-1.078959,28.803791 +85,43.703976,10.04579,5.675286,-8.00399,-10.017313,22.095602 +86,39.62158,9.390179,4.134128,-7.081501,-8.190656,20.229063 +87,41.8374,9.9542465,4.2303743,-7.4533157,-8.529897,21.386478 +88,53.687176,12.257087,5.854972,-10.277421,-10.535543,27.314135 +89,45.797924,10.256862,4.739912,-9.154937,-8.242218,23.321909 +90,45.353615,10.435729,5.5626698,-8.378717,-9.942873,22.982191 +91,53.265232,12.207612,2.2117052,-11.15428,-5.5765953,27.763102 +92,37.689518,8.5276375,5.3730955,-6.987081,-9.070712,18.9651 +93,41.634586,9.549301,5.4762397,-7.637904,-9.601296,21.035032 +94,45.961662,10.208635,4.405357,-9.42113,-7.6043468,23.453869 +95,38.7096,8.868873,5.073586,-7.1200304,-8.88235,19.558548 +96,44.77065,10.3091955,5.7500234,-8.187977,-10.204675,22.64694 +97,50.5999,11.519817,5.148919,-9.839472,-9.339327,25.800356 +98,43.78647,9.995866,5.937278,-8.062111,-10.270747,22.092314 +99,42.709816,9.767928,5.7205334,-7.8525343,-9.9436655,21.559265 +100,47.38733,10.796876,1.1965739,-10.249292,-3.7953076,24.84174 +101,53.04126,12.09163,5.129554,-10.363167,-9.4359455,27.090277 From 3786b7cd78906156ec173eb907034d9f4323110c Mon Sep 17 00:00:00 2001 From: Brandon Paul Date: Mon, 18 Sep 2023 08:44:01 -0700 Subject: [PATCH 07/14] Add documentation for gradient tool --- docs/source/chapt_surrogates/gradients.rst | 79 ++++++++++++++++++++++ docs/source/chapt_surrogates/index.rst | 1 + 2 files changed, 80 insertions(+) create mode 100644 docs/source/chapt_surrogates/gradients.rst diff --git a/docs/source/chapt_surrogates/gradients.rst b/docs/source/chapt_surrogates/gradients.rst new file mode 100644 index 000000000..eb42b2754 --- /dev/null +++ b/docs/source/chapt_surrogates/gradients.rst @@ -0,0 +1,79 @@ +Gradient Generation to Support Gradient-Enhanced Neural Networks +================================================================ + +Neural networks are useful in instances where multivariate process data +is available and the mathematical functions describing the variable +relationships are unknown. Training deep neural networks is most efficient +when samples of the variable derivatives, or gradients, are collected +simultaneously with process data. However, gradient data is often unavailable +unless the physics of the system are known and predetermined such as in +fluid dynamics with outputs of known physical properties. + +These gradients may be estimated numerically using solely the process data. The +gradient generation tool described below requires a Comma-Separated Value (CSV) file +containing process samples (rows), with inputs in the left columns and outputs in the rightmost +columns. Multiple outputs are supported, as long as they are the rightmost columns, and +the variable columns may have string (text) headings or data may start in row 1. The method +produces a CSV file for each output variable containing gradients with respect to each input +variable (columns), for each sample point (rows). After navigating to the FOQUS directory +"examples/other_files/ML_AI_Plugin", the code below sets up and calls the gradient generation +method on the example dataset "MEA_carbon_capture_dataset_mimo.csv": + +.. code:: python + # required imports + >>> import pandas as pd + >>> import numpy as np + >>> from generate_gradient_data import generate_gradients + >>> + >>> data = pd.read_csv(r"MEA_carbon_capture_dataset_mimo.csv") # get dataset + >>> data_array = np.array(data, ndmin=2) # convert to Numpy array + >>> n_x = 6 # we have 6 input variables, in the leftmost 6 columns + + >>> gradients = generate_gradients( + >>> xy_data=data_array, + >>> n_x=n_x, + >>> show_plots=False, # flag to plot regression results during gradient training + >>> optimize_training=True # will try many regression settings and pick the best result + >>> ) + >>> print("Gradient generation complete.") + + >>> for output in range(len(gradients)): # save each gradient array to a CSV file + >>> pd.DataFrame(gradients[output]).to_csv("gradients_output" + str(output) + ".csv") + >>> print("Gradients for output ", str(output), " written to gradients_output" + str(output) + ".csv",) + +Internally, the gradient generation methods automatically executes a series of actions on the dataset: + +1. Import process data of size *(m, n_x + n_y)*, where *m* is the number of sample rows, +*n_x* is the number of input columns and *n_y* is the number of output columns. Given *n_x*, +the data is split into an input array *X* and an output array *Y*. + +2. For each input *xi* and each output *yj*, estimate the gradient using a multivariate +chain rule approximation. For example, the gradient of y1 with respect to x1 is +calculated at each point as: + +:math:`\frac{Dy_1}{Dx_1} = \frac{dy_1}{dx_1} \frac{dx_1}{dx_1} + \frac{dy_1}{dx_2} \frac{dx_2}{dx_1} + \frac{dy_1}{dx_3} \frac{dx_3}{dx_1} + ...` + +where *D/D* represents the total derivative, *d/d* represents a partial derivative at each +sample point. *y1*, *x1*, *x2*, *x3*, and so on are vectors with values at each sample point *m*, and +this formula produces the gradients of each output with respect to each input at each sample point by iterating +through the dataset. The partial derivatives are calculated by simple finite difference. For example: + +:math:`\frac{dy_1}{dx_1} (m_{1.5}) = \frac{y_1 (m_2) - y_1 (m_1)}{x_1 (m_2) - x_1 (m_1)}` + +where *m_1.5* is the midpoint between sample points *m_2* and *m_1*. As a result, this scheme +calculates gradients at the points between the sample points, not the actual sample points. + +3. Train an MLP model on the calculated midpoint and midpoint-gradient values. After normalizing the data +via linear scaling (see :ref:`chapt_surrogates.mlaiplugin.Data Normalization For Neural Network Models`), +the algorithm leverages a small neural network model to generate gradient data for the actual +sampe points. Passing the argument *optimize_training=True* will train models using the optimizers +*Adam* or *RMSProp*, with activation functions *ReLu* or *Sigmoid* on hidden layers, using a *Linear* +or *ReLu* activation function on the output layer, building *2* or *8* hidden layers with *6* or *12* +neurons per hidden layer. The algorithm employs cross-validation to check the mean-squared-error (MSE) loss +on each model and uses the model with the smallest error to predict the sample gradients. + +4. Predict the gradients at each sample point from the regressed model. This produces *n_y* +arrays with each having size *(m, n_x)* - the same size as the original input array *X*. + +5. Concatenate the predicted gradients into a single array of size *(m, n_x, n_y)*. This is the +single object returned by the gradient generation method. \ No newline at end of file diff --git a/docs/source/chapt_surrogates/index.rst b/docs/source/chapt_surrogates/index.rst index 1e9bc9eb6..2768b8ff1 100644 --- a/docs/source/chapt_surrogates/index.rst +++ b/docs/source/chapt_surrogates/index.rst @@ -7,6 +7,7 @@ Contents .. toctree:: :maxdepth: 2 + gradients mlaiplugin reference tutorial/index From 200b8571cb6d886842749b6e55f3728e6c859b31 Mon Sep 17 00:00:00 2001 From: Brandon Paul Date: Mon, 18 Sep 2023 08:52:05 -0700 Subject: [PATCH 08/14] Manually run spellchecker on ML AI Plugin files --- docs/source/chapt_surrogates/mlaiplugin.rst | 2 +- examples/other_files/ML_AI_Plugin/generate_gradient_data.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/source/chapt_surrogates/mlaiplugin.rst b/docs/source/chapt_surrogates/mlaiplugin.rst index 6876d4d38..9632b2c9a 100644 --- a/docs/source/chapt_surrogates/mlaiplugin.rst +++ b/docs/source/chapt_surrogates/mlaiplugin.rst @@ -99,7 +99,7 @@ Currently, FOQUS supports the following custom attributes: bounds for each output variable (default: (0, 1E5)) - *normalized* – Boolean flag for whether the user is passing a normalized neural network model; to use this flag, users must train their models with - data normalized according to a specifc scaling form and add all input and + data normalized according to a specific scaling form and add all input and output bounds custom attributes. The section below details scaling options. - *normalization_form* - string flag required when *normalization* is *True* indicating a scaling option for FOQUS to automatically scale flowsheet-level diff --git a/examples/other_files/ML_AI_Plugin/generate_gradient_data.py b/examples/other_files/ML_AI_Plugin/generate_gradient_data.py index a89c9685c..99e5eabd7 100644 --- a/examples/other_files/ML_AI_Plugin/generate_gradient_data.py +++ b/examples/other_files/ML_AI_Plugin/generate_gradient_data.py @@ -50,7 +50,7 @@ def finite_difference(m1, m2, y1, y2, n_x): variables x and exactly one output variable y. y1 is the value of y1 at m1, and y2 is the value of y at m2. - The total graident is calculated via chain rule assuming a multivariate + The total gradient is calculated via chain rule assuming a multivariate function y(x1, x2, x3, ...). In the notation below, D/D denotes a total derivative and d/d denotes a partial derivative. Total derivatives are functions of all (x1, x2, x3, ...) whereas partial derivatives are From 131f07d93cf99ad5234016a82c13cd3d788ec4fe Mon Sep 17 00:00:00 2001 From: Brandon Paul Date: Mon, 18 Sep 2023 09:20:01 -0700 Subject: [PATCH 09/14] Fix links and file names --- docs/source/chapt_surrogates/gradients.rst | 7 ++++--- docs/source/chapt_surrogates/mlaiplugin.rst | 4 ++++ 2 files changed, 8 insertions(+), 3 deletions(-) diff --git a/docs/source/chapt_surrogates/gradients.rst b/docs/source/chapt_surrogates/gradients.rst index eb42b2754..269af1df1 100644 --- a/docs/source/chapt_surrogates/gradients.rst +++ b/docs/source/chapt_surrogates/gradients.rst @@ -16,10 +16,11 @@ columns. Multiple outputs are supported, as long as they are the rightmost colum the variable columns may have string (text) headings or data may start in row 1. The method produces a CSV file for each output variable containing gradients with respect to each input variable (columns), for each sample point (rows). After navigating to the FOQUS directory -"examples/other_files/ML_AI_Plugin", the code below sets up and calls the gradient generation -method on the example dataset "MEA_carbon_capture_dataset_mimo.csv": +*examples/other_files/ML_AI_Plugin*, the code below sets up and calls the gradient generation +method on the example dataset *MEA_carbon_capture_dataset_mimo.csv*: .. code:: python + # required imports >>> import pandas as pd >>> import numpy as np @@ -64,7 +65,7 @@ where *m_1.5* is the midpoint between sample points *m_2* and *m_1*. As a result calculates gradients at the points between the sample points, not the actual sample points. 3. Train an MLP model on the calculated midpoint and midpoint-gradient values. After normalizing the data -via linear scaling (see :ref:`chapt_surrogates.mlaiplugin.Data Normalization For Neural Network Models`), +via linear scaling (see :ref:`mlaiplugin.datanorm`), the algorithm leverages a small neural network model to generate gradient data for the actual sampe points. Passing the argument *optimize_training=True* will train models using the optimizers *Adam* or *RMSProp*, with activation functions *ReLu* or *Sigmoid* on hidden layers, using a *Linear* diff --git a/docs/source/chapt_surrogates/mlaiplugin.rst b/docs/source/chapt_surrogates/mlaiplugin.rst index 9632b2c9a..eefeaa00f 100644 --- a/docs/source/chapt_surrogates/mlaiplugin.rst +++ b/docs/source/chapt_surrogates/mlaiplugin.rst @@ -1,3 +1,5 @@ +.. _mlaiplugin: + Machine Learning & Artificial Intelligence Flowsheet Model Plugins ================================================================== @@ -108,6 +110,8 @@ Currently, FOQUS supports the following custom attributes: - *normalization_function* - optional string argument that is required when a 'Custom' *normalization_form* is used. The section below details scaling options. +.. _mlaiplugin.datanorm: + Data Normalization For Neural Network Models -------------------------------------------- From e1b10185da6317f8e4255470ef586fc98835d2fb Mon Sep 17 00:00:00 2001 From: Brandon Paul Date: Tue, 19 Sep 2023 12:19:05 -0700 Subject: [PATCH 10/14] Add option to use simple partial derivative in lieu of chain rule formula --- docs/source/chapt_surrogates/gradients.rst | 1 + .../ML_AI_Plugin/generate_gradient_data.py | 21 +- .../ML_AI_Plugin/gradients_output0.csv | 204 +++++++++--------- .../ML_AI_Plugin/gradients_output1.csv | 204 +++++++++--------- 4 files changed, 218 insertions(+), 212 deletions(-) diff --git a/docs/source/chapt_surrogates/gradients.rst b/docs/source/chapt_surrogates/gradients.rst index 269af1df1..59ead3d3c 100644 --- a/docs/source/chapt_surrogates/gradients.rst +++ b/docs/source/chapt_surrogates/gradients.rst @@ -35,6 +35,7 @@ method on the example dataset *MEA_carbon_capture_dataset_mimo.csv*: >>> n_x=n_x, >>> show_plots=False, # flag to plot regression results during gradient training >>> optimize_training=True # will try many regression settings and pick the best result + >>> use_simple_diff=True # flag to use simple partials instead of chain rule formula; defaults to False if not passed >>> ) >>> print("Gradient generation complete.") diff --git a/examples/other_files/ML_AI_Plugin/generate_gradient_data.py b/examples/other_files/ML_AI_Plugin/generate_gradient_data.py index 99e5eabd7..bb145be96 100644 --- a/examples/other_files/ML_AI_Plugin/generate_gradient_data.py +++ b/examples/other_files/ML_AI_Plugin/generate_gradient_data.py @@ -43,7 +43,7 @@ tf.random.set_seed(62) -def finite_difference(m1, m2, y1, y2, n_x): +def finite_difference(m1, m2, y1, y2, n_x, use_simple_diff=False): """ Calculate the first-order gradient between provided sample points m1 and m2, where each point is assumed to be a vector with one or more input @@ -80,11 +80,14 @@ def diff(y2, y1, x2, x1): dy_dm = [None] * n_x # initialize dy vector, this is dy_dm(midpoints) for i in range(n_x): # for each input xi - dy_dm[i] = sum( - diff(y2, y1, m2[j], m1[j]) - * diff(m2[j], m1[j], m2[i], m1[i]) # dy/dxj # dxj/dxi - for j in range(n_x) - ) # for each input xj + if use_simple_diff: + dy_dm[i] = diff(y2, y1, m2[i], m1[i]) + else: # use chain rule + dy_dm[i] = sum( + diff(y2, y1, m2[j], m1[j]) + * diff(m2[j], m1[j], m2[i], m1[i]) # dy/dxj # dxj/dxi + for j in range(n_x) + ) # for each input xj mid_m[i] = m2[i] - m1[i] @@ -259,7 +262,8 @@ def predict_gradients( return gradients -def generate_gradients(xy_data, n_x, show_plots=True, optimize_training=False): +def generate_gradients(xy_data, n_x, show_plots=True, optimize_training=False, + use_simple_diff=False): """ This method implements finite difference approximation and NN regression to estimate the first-order derivatives of a given dataset with columns @@ -312,6 +316,7 @@ def generate_gradients(xy_data, n_x, show_plots=True, optimize_training=False): y1=y[m][output], # each entry in y is an array somehow y2=y[m + 1][output], # each entry in y is an array somehow n_x=n_x, + use_simple_diff=use_simple_diff, ) print("Midpoint gradient generation complete.") print() @@ -338,7 +343,7 @@ def generate_gradients(xy_data, n_x, show_plots=True, optimize_training=False): n_x = 6 gradients = generate_gradients( - xy_data=data_array, n_x=n_x, show_plots=False, optimize_training=True + xy_data=data_array, n_x=n_x, show_plots=False, optimize_training=True, use_simple_diff=True, ) print("Gradient generation complete.") diff --git a/examples/other_files/ML_AI_Plugin/gradients_output0.csv b/examples/other_files/ML_AI_Plugin/gradients_output0.csv index cb83a754e..6cbbf41ab 100644 --- a/examples/other_files/ML_AI_Plugin/gradients_output0.csv +++ b/examples/other_files/ML_AI_Plugin/gradients_output0.csv @@ -1,103 +1,103 @@ ,0,1,2,3,4,5 -0,79.931435,-91.37865,278.3458,-66.22485,-144.19691,-120.84488 -1,94.582245,-110.10992,300.8567,-73.11867,-152.05132,-125.41921 -2,85.885605,-98.1704,299.2175,-71.19216,-155.06975,-129.99739 -3,87.259796,-99.91246,301.5615,-71.87913,-155.94972,-130.558 -4,92.54158,-106.33635,314.34235,-75.22922,-161.8603,-135.14842 -5,73.96548,-84.616325,256.793,-61.13261,-132.89908,-111.301575 -6,101.51534,-114.83136,375.2238,-88.30013,-196.59392,-166.01509 -7,104.4355,-118.784904,372.10513,-88.11146,-194.04385,-163.31714 -8,95.5073,-110.56375,312.69635,-75.474976,-159.38446,-132.2193 -9,85.89002,-100.33718,268.96695,-65.61988,-134.93253,-110.60035 -10,107.12147,-123.775406,353.9702,-85.26302,-180.93675,-150.3827 -11,89.97311,-101.7761,335.0388,-78.815155,-175.45778,-148.21397 -12,81.34667,-92.42115,291.44708,-68.91341,-152.12305,-128.09349 -13,94.21727,-106.66829,345.23126,-81.32502,-180.78975,-152.587 -14,100.05553,-113.37235,363.0728,-85.6386,-190.10966,-160.41222 -15,97.00606,-110.72163,371.44797,-88.047485,-192.93825,-163.63539 -16,94.619514,-107.48416,339.12753,-80.19324,-177.08513,-149.16327 -17,95.700294,-110.008865,324.45834,-77.68349,-166.98286,-139.37932 -18,91.54417,-103.73887,342.68762,-80.747574,-179.18858,-151.50475 -19,86.73082,-98.38803,326.0057,-76.86773,-170.24901,-143.97746 -20,83.088455,-94.23336,300.0793,-70.82989,-156.9394,-132.31006 -21,76.19791,-87.76109,256.0701,-61.41514,-131.38304,-109.43264 -22,106.660065,-123.0925,354.5869,-85.29139,-181.55768,-151.06174 -23,86.590866,-97.94937,320.336,-75.36868,-167.795,-141.6703 -24,92.13782,-106.60332,302.52957,-72.97113,-154.32234,-128.08342 -25,97.92853,-112.7542,329.34653,-79.00371,-169.14671,-141.00336 -26,106.08274,-120.421,381.3794,-90.129234,-199.32547,-167.99362 -27,106.19588,-120.43985,383.36206,-90.51539,-200.56342,-169.1409 -28,94.74466,-108.09889,332.81857,-79.05112,-172.90164,-145.17569 -29,82.493935,-94.619385,282.7624,-67.52178,-145.90335,-121.97543 -30,92.40509,-106.29988,312.19022,-74.80261,-160.4998,-133.87491 -31,91.48199,-105.13964,310.51373,-74.3178,-159.82019,-133.40034 -32,94.66799,-110.33125,299.5993,-72.90135,-151.06844,-124.3675 -33,113.98216,-130.7881,389.71973,-93.13937,-201.07388,-168.11066 -34,88.5444,-100.747665,315.08737,-74.616066,-164.1874,-138.10957 -35,89.40093,-103.50187,292.67963,-70.63637,-149.13734,-123.68622 -36,82.723145,-95.62282,272.9512,-65.75266,-139.38461,-115.75739 -37,81.478325,-93.71561,275.56238,-66.00188,-141.6679,-118.15808 -38,104.53298,-119.23347,367.6137,-87.30109,-191.0671,-160.48166 -39,104.15131,-119.19312,360.6145,-85.93833,-186.67583,-156.39975 -40,90.9033,-102.78483,337.60165,-79.39711,-176.89305,-149.39786 -41,87.57932,-99.21862,318.65125,-75.12937,-166.82394,-140.7529 -42,78.86872,-90.44279,270.6591,-64.607605,-139.67178,-116.76642 -43,109.58537,-125.179756,382.77017,-91.0369,-198.5881,-166.60995 -44,89.959404,-102.71762,314.93475,-74.85476,-163.44153,-137.13853 -45,88.56739,-100.53083,318.60922,-75.274895,-166.49808,-140.30632 -46,93.97358,-108.47746,312.10815,-75.08324,-159.72963,-132.85193 -47,103.84993,-119.96836,343.58615,-82.733055,-175.66719,-146.01862 -48,104.34892,-120.74195,342.39325,-82.60815,-174.66557,-144.98047 -49,101.884895,-115.99141,361.49432,-85.67668,-188.30147,-158.37337 -50,85.029686,-98.21587,281.60574,-67.77905,-143.95435,-119.6326 -51,99.44558,-114.5432,333.8602,-80.11693,-171.37318,-142.80855 -52,96.76797,-109.824524,348.25452,-82.27827,-182.0416,-153.43805 -53,87.98135,-101.264496,296.48602,-71.08086,-152.31747,-126.99219 -54,106.57264,-122.51085,361.21567,-86.49449,-185.90804,-155.18419 -55,95.18846,-109.54394,373.46933,-89.14667,-192.65071,-164.01154 -56,95.32428,-108.55919,362.39514,-85.74988,-188.6346,-159.84265 -57,94.36733,-110.01563,298.23788,-72.5955,-150.27928,-123.645424 -58,80.96275,-92.81039,278.2817,-66.40884,-143.69016,-120.17596 -59,100.80213,-114.89454,355.72488,-84.40852,-185.03258,-155.48463 -60,85.09368,-99.09072,270.15057,-65.67492,-136.4739,-112.53742 -61,91.73927,-106.93907,290.1222,-70.610664,-146.22386,-120.33162 -62,112.75414,-129.70657,380.83517,-91.27057,-195.84537,-163.39954 -63,84.06326,-95.72013,298.14188,-70.65328,-155.2156,-130.48709 -64,104.05963,-119.66364,352.11176,-84.34567,-181.13647,-151.15488 -65,100.83582,-116.24165,337.11832,-80.979,-172.85855,-143.94942 -66,80.809845,-93.33055,267.7837,-64.44447,-136.91432,-113.79709 -67,99.174774,-112.49215,357.83688,-84.4939,-187.16711,-157.81778 -68,105.6971,-121.55783,357.4392,-85.63969,-183.87646,-153.4476 -69,103.929634,-119.17012,356.58594,-85.148674,-184.12587,-154.01427 -70,92.47953,-107.591736,295.0471,-71.649925,-149.30153,-123.27631 -71,83.498795,-95.801704,285.79437,-68.26643,-147.40321,-123.19326 -72,87.65054,-99.83635,310.37653,-73.58224,-161.53961,-135.7838 -73,99.82143,-115.783455,323.5758,-78.287155,-164.47331,-136.19893 -74,81.10221,-92.33355,287.86932,-68.20526,-149.89023,-126.02049 -75,84.82247,-98.507744,273.2816,-66.2077,-138.63264,-114.646614 -76,94.25473,-107.71716,328.59326,-78.176254,-170.35378,-142.84769 -77,100.140976,-113.55463,361.80283,-85.4055,-189.30336,-159.65094 -78,106.80259,-121.995224,373.13907,-88.74069,-193.59967,-162.42856 -79,91.41787,-104.76248,314.63065,-75.06696,-162.54462,-135.99553 -80,91.99855,-104.2212,334.81345,-78.9388,-175.2956,-147.9078 -81,107.272934,-122.769585,371.3485,-88.50218,-192.231,-161.05492 -82,94.29153,-106.64442,349.22665,-82.157936,-182.95972,-154.49773 -83,80.45438,-93.30924,261.043,-63.13301,-132.67351,-109.84852 -84,93.797005,-107.47615,363.42218,-86.42586,-188.1302,-159.83447 -85,93.97434,-108.6794,309.23273,-74.5526,-157.85104,-131.0732 -86,84.74202,-99.0697,264.4535,-64.573044,-132.45355,-108.41818 -87,89.35248,-104.49542,278.45386,-68.018364,-139.35516,-113.987 -88,111.36116,-127.70374,381.85748,-91.20087,-197.16957,-164.92711 -89,92.622086,-105.62274,326.17862,-77.426605,-169.5413,-142.39651 -90,95.23853,-109.49817,322.5822,-77.25404,-165.9871,-138.53525 -91,103.42994,-117.28234,373.6911,-88.213684,-195.5346,-164.91391 -92,82.199936,-95.446045,265.12994,-64.20796,-134.5044,-111.22829 -93,88.1462,-101.556786,295.59354,-70.94446,-151.64618,-126.31735 -94,91.655136,-104.17953,327.67914,-77.52028,-170.95883,-143.91682 -95,80.88851,-92.89211,275.62125,-65.90586,-141.99658,-118.59313 -96,96.22409,-111.26226,316.88654,-76.38586,-161.80428,-134.38269 -97,104.21575,-119.313126,360.1738,-85.86917,-186.35872,-156.08759 -98,96.33667,-112.188194,306.03976,-74.404564,-154.56291,-127.41383 -99,91.008125,-105.0344,302.62543,-72.77232,-154.88487,-128.81706 -100,88.78199,-100.46858,331.0735,-77.90511,-173.30588,-146.4144 -101,109.10222,-124.86166,377.69528,-90.01473,-195.52164,-163.8152 +0,74.01828,132.16856,-37.867424,-87.99398,-4.1455474,-29.112787 +1,80.10569,147.16621,-40.357662,-95.16894,-3.3632255,-31.500113 +2,79.45062,141.44975,-40.728428,-94.467354,-4.5550265,-31.185713 +3,80.09741,143.0444,-40.987965,-95.22719,-4.474018,-31.456068 +4,83.3853,149.3718,-42.61538,-99.136185,-4.526995,-32.69882 +5,68.366295,122.46076,-34.90481,-81.26287,-3.730377,-26.934528 +6,98.03055,171.53583,-50.67336,-116.58603,-6.4520106,-38.59053 +7,97.89941,172.60396,-50.445877,-116.42936,-6.072017,-38.41172 +8,83.01357,150.70213,-42.10721,-98.65618,-3.971518,-32.60649 +9,71.396675,136.51456,-34.725723,-84.223755,-2.0283308,-28.99291 +10,93.8227,169.31696,-47.76738,-111.52925,-4.75127,-36.766186 +11,87.57114,153.17346,-45.266582,-104.14352,-5.784079,-34.50622 +12,77.11264,136.21832,-39.681473,-91.69841,-4.7168374,-30.304417 +13,90.61951,159.05618,-46.771446,-107.76673,-5.829528,-35.66367 +14,95.282,167.36357,-49.171406,-113.3156,-6.089981,-37.4559 +15,95.61301,165.93967,-49.6347,-113.73363,-6.6623554,-37.62568 +16,89.286415,157.34029,-46.010975,-106.183365,-5.5629764,-35.063023 +17,86.075874,154.30821,-43.971436,-102.3324,-4.6421576,-33.758076 +18,88.97119,155.19272,-46.05375,-105.81409,-5.9943185,-35.06345 +19,85.13099,148.73717,-44.019444,-101.2385,-5.674104,-33.58378 +20,79.27477,139.77596,-40.826813,-94.26976,-4.9238853,-31.177721 +21,68.19138,123.46287,-34.609493,-81.03175,-3.3662899,-26.888262 +22,93.974236,169.21736,-47.90398,-111.716484,-4.85899,-36.815624 +23,83.95978,147.13693,-43.360966,-99.84655,-5.46732,-33.07123 +24,80.32151,145.7025,-40.758068,-95.45836,-3.8735087,-31.55181 +25,87.33303,156.8335,-44.577415,-103.825325,-4.6336923,-34.233963 +26,100.16098,176.2134,-51.658722,-119.11959,-6.3200517,-39.332443 +27,100.61913,176.86285,-51.913834,-119.66431,-6.394028,-39.529224 +28,88.05725,155.98763,-45.272488,-104.71868,-5.2560325,-34.51785 +29,75.13442,134.63,-38.37448,-89.31733,-4.077221,-29.527359 +30,82.87425,148.91374,-42.27488,-98.51663,-4.3797503,-32.53275 +31,82.475716,148.13205,-42.075085,-98.040596,-4.379695,-32.39845 +32,79.820496,148.22089,-39.87293,-94.75434,-2.972318,-31.737362 +33,103.239876,184.07578,-52.91633,-122.764824,-5.8291836,-40.402317 +34,83.44963,147.62373,-42.917164,-99.233864,-5.044079,-32.77074 +35,77.814964,141.65369,-39.39349,-92.4633,-3.6255062,-30.626474 +36,72.576965,131.80801,-36.788624,-86.244194,-3.465675,-28.566385 +37,73.26434,131.98471,-37.3039,-87.07939,-3.7878819,-28.821802 +38,96.98463,171.53969,-49.905228,-115.34031,-5.8591366,-38.00519 +39,95.46655,169.51312,-49.04027,-113.533226,-5.5802684,-37.35544 +40,88.204475,154.2815,-45.597893,-104.8987,-5.8239093,-34.742092 +41,83.82496,147.37799,-43.23154,-99.68545,-5.3224497,-32.97493 +42,72.0031,129.25752,-36.725876,-85.5848,-3.848767,-28.34243 +43,101.20366,179.36389,-52.027626,-120.35538,-6.0127025,-39.63646 +44,83.56118,148.32939,-42.912643,-99.366585,-4.9050317,-32.7624 +45,84.01179,148.07623,-43.28163,-99.90703,-5.2288985,-33.01481 +46,82.814964,149.50114,-42.142643,-98.43732,-4.186265,-32.498432 +47,91.13173,164.59996,-46.36689,-108.32369,-4.581602,-35.74409 +48,90.79159,164.3494,-46.141026,-107.91509,-4.4643555,-35.602654 +49,95.26023,168.18198,-49.054127,-113.28893,-5.843741,-37.365513 +50,74.87018,135.7826,-37.98163,-88.97286,-3.6262012,-29.462618 +51,88.55916,159.22435,-45.16972,-105.27757,-4.64986,-34.731316 +52,91.57796,161.18896,-47.217155,-108.909546,-5.7591033,-35.97436 +53,78.71591,141.59416,-40.12848,-93.570305,-4.1193304,-30.908072 +54,95.76671,171.49147,-48.960106,-113.860565,-5.212231,-37.52514 +55,95.20761,164.35713,-49.598755,-113.285774,-6.8527017,-37.324512 +56,93.35032,162.17128,-48.433243,-111.038414,-6.4628286,-36.74515 +57,79.45699,147.86624,-39.623432,-94.30814,-2.8812304,-31.659342 +58,73.95901,132.46022,-37.781715,-87.920006,-4.031681,-29.072739 +59,93.9208,166.09866,-48.326027,-111.694046,-5.682809,-36.825455 +60,71.9617,133.69165,-35.912685,-85.41209,-2.6741002,-28.698986 +61,77.29709,143.4738,-38.63179,-91.76507,-2.8898616,-30.699736 +62,100.90558,180.6842,-51.59789,-119.97491,-5.4902635,-39.506794 +63,79.093956,140.097,-40.651505,-94.05265,-4.7312365,-31.055733 +64,93.36842,167.33827,-47.70987,-111.005775,-5.0444255,-36.594864 +65,89.400475,160.87416,-45.581074,-106.277145,-4.655254,-35.05068 +66,71.18247,129.02841,-36.122963,-84.59284,-3.4643352,-28.004879 +67,94.070724,165.49329,-48.511963,-111.87364,-5.940198,-36.96388 +68,94.69907,169.47949,-48.43937,-112.59784,-5.176377,-37.072872 +69,94.52092,168.54114,-48.43717,-112.392815,-5.337618,-37.020348 +70,78.516495,144.10529,-39.572163,-93.27994,-3.3379686,-30.896975 +71,75.965195,136.26534,-38.772453,-90.30055,-4.0844836,-29.867825 +72,82.22544,145.58038,-42.27432,-97.779045,-4.9341216,-32.272003 +73,85.88904,156.38759,-43.495705,-102.06654,-3.9824226,-33.73376 +74,76.41134,135.3663,-39.267925,-90.861694,-4.565704,-30.00838 +75,72.63777,132.85725,-36.678158,-86.302795,-3.212714,-28.58673 +76,87.165695,154.8638,-44.745518,-103.65183,-5.0786123,-34.166847 +77,95.09086,167.23694,-49.044453,-113.08687,-6.0191965,-37.36917 +78,98.678246,174.89987,-50.726715,-117.35156,-5.8597326,-38.650524 +79,83.53726,149.27426,-42.739464,-99.31751,-4.6406217,-32.79148 +80,88.01864,154.69762,-45.403282,-104.67356,-5.6031585,-34.621407 +81,98.24959,174.41043,-50.477947,-116.84416,-5.75449,-38.439507 +82,91.3545,159.92653,-47.20724,-108.64343,-5.9945984,-35.977535 +83,69.42779,126.83768,-35.07397,-82.48843,-3.11376,-27.343887 +84,93.19298,161.36255,-48.451252,-110.86878,-6.588386,-36.6198 +85,82.06257,148.62521,-41.683678,-97.53424,-4.0184665,-32.213467 +86,70.19333,136.28699,-33.510227,-81.94457,-2.0913234,-28.541834 +87,73.913826,143.66867,-35.24645,-86.2297,-2.203389,-30.034256 +88,101.149536,180.15692,-51.875717,-120.283035,-5.7623706,-39.57856 +89,86.4,153.08112,-44.41113,-102.74501,-5.1515007,-33.88524 +90,85.54218,153.2783,-43.715324,-101.70182,-4.630538,-33.53017 +91,98.13822,172.53021,-50.62748,-116.71244,-6.22962,-38.562004 +92,70.57257,129.37149,-35.57549,-83.83683,-3.0496752,-27.828009 +93,78.51507,141.5761,-39.9677,-93.32313,-4.01859,-30.85107 +94,86.626,153.00754,-44.583206,-103.012764,-5.301827,-34.02862 +95,73.235,131.46855,-37.367054,-87.05551,-3.9084888,-28.783335 +96,84.06279,152.10603,-42.725967,-99.91602,-4.152734,-32.981956 +97,95.3838,169.43991,-48.98864,-113.43475,-5.55435,-37.315998 +98,81.511986,149.84026,-41.05513,-96.839584,-3.3964128,-32.0437 +99,80.42077,145.53183,-40.852016,-95.576706,-3.978455,-31.623737 +100,86.57293,151.44499,-44.745354,-102.95487,-5.7145934,-34.12189 +101,99.90212,177.32112,-51.330788,-118.809875,-5.8574,-39.084644 diff --git a/examples/other_files/ML_AI_Plugin/gradients_output1.csv b/examples/other_files/ML_AI_Plugin/gradients_output1.csv index ef77d02a7..065accd59 100644 --- a/examples/other_files/ML_AI_Plugin/gradients_output1.csv +++ b/examples/other_files/ML_AI_Plugin/gradients_output1.csv @@ -1,103 +1,103 @@ ,0,1,2,3,4,5 -0,39.069317,8.79817,4.92915,-7.4853544,-8.395348,19.75606 -1,42.996727,9.8077135,5.8568726,-7.9216003,-10.108579,21.688585 -2,42.03033,9.456588,4.950679,-8.164676,-8.510103,21.309946 -3,42.356087,9.59381,5.2045197,-8.065804,-9.007373,21.446346 -4,44.186485,10.144742,5.4463677,-8.191303,-9.6827545,22.38414 -5,36.02388,8.135324,4.8232055,-6.7867727,-8.185115,18.173038 -6,53.99135,12.499687,0.6233006,-11.576059,-3.6904032,28.462757 -7,52.769604,12.082337,3.0270822,-10.82522,-6.6390023,27.343578 -8,44.249977,10.149398,5.826031,-8.117586,-10.2154045,22.356815 -9,39.90996,9.376428,4.458464,-7.1839647,-8.513556,20.320137 -10,49.953213,11.551039,6.249258,-9.10547,-11.237158,25.29983 -11,48.043804,10.999258,0.9791989,-10.374518,-3.629604,25.235342 -12,40.885853,9.04533,4.0700073,-8.394886,-6.914519,20.835707 -13,49.143448,11.14489,2.1298347,-10.454416,-5.0446134,25.587364 -14,51.834152,11.888842,1.8800223,-10.918737,-5.0747924,27.070217 -15,54.05637,12.943797,-1.326961,-11.652617,-1.5783217,29.099876 -16,47.996677,10.889046,3.050812,-9.91996,-6.2547426,24.804693 -17,45.607685,10.488318,5.678906,-8.411667,-10.108815,23.09641 -18,49.528553,11.520241,-0.14379533,-10.7399235,-2.5164645,26.25208 -19,46.65241,10.591434,1.1169223,-10.169249,-3.5818243,24.464869 -20,42.068253,9.276815,3.931426,-8.757217,-6.688584,21.476854 -21,36.100704,8.185345,5.0787873,-6.6817408,-8.619909,18.176353 -22,49.974148,11.569446,6.202605,-9.100825,-11.19619,25.319336 -23,45.681526,10.354372,1.6739062,-9.814797,-4.264869,23.843126 -24,42.78735,9.812289,5.636801,-7.8503227,-9.879754,21.61704 -25,46.319107,10.708403,5.7961755,-8.444504,-10.416288,23.457785 -26,54.35506,12.500773,2.2765284,-11.307072,-5.7989984,28.331184 -27,54.733475,12.599085,1.9960805,-11.45417,-5.4578533,28.586172 -28,46.719357,10.511131,4.6061616,-9.325666,-8.171637,23.833223 -29,39.709633,9.054628,5.107865,-7.4008303,-8.889133,20.075207 -30,43.890743,10.101926,5.649886,-8.029832,-10.010151,22.197788 -31,43.62636,10.020903,5.687294,-7.99397,-10.013002,22.050238 -32,43.238457,9.8817005,5.824423,-7.954534,-10.110538,21.824081 -33,54.809837,12.524115,6.147181,-10.428625,-11.019275,27.858728 -34,44.22387,9.820371,4.569786,-8.97665,-7.7875733,22.513083 -35,41.450253,9.443848,5.680409,-7.6434526,-9.76858,20.899986 -36,38.594795,8.794884,5.278029,-7.1159935,-9.083405,19.461876 -37,38.76463,8.859461,5.2066283,-7.1311054,-9.033946,19.564106 -38,51.726814,11.753441,4.2310443,-10.387661,-8.055258,26.553179 -39,50.65384,11.523455,5.082472,-9.883726,-9.229517,25.838678 -40,48.517284,11.175541,0.71405,-10.447537,-3.420193,25.542652 -41,45.157066,10.169799,2.6453102,-9.518875,-5.438174,23.373808 -42,37.987617,8.658164,5.11342,-7.022545,-8.822123,19.167187 -43,53.732006,12.167909,5.1273513,-10.647867,-9.303163,27.446033 -44,44.25091,9.916737,4.929216,-8.738545,-8.4774685,22.477983 -45,44.89916,10.056236,3.4545486,-9.3134,-6.4226065,23.077473 -46,44.0228,10.134875,5.6596403,-8.052505,-10.037324,22.267021 -47,48.492474,11.181142,6.1795635,-8.859195,-11.009447,24.53847 -48,48.400497,11.162828,6.1582556,-8.840654,-10.981818,24.49432 -49,51.033264,11.602614,3.647719,-10.390922,-7.2446475,26.298681 -50,39.785885,9.074857,5.4119873,-7.330218,-9.33744,20.067774 -51,46.980053,10.844123,5.9412365,-8.575586,-10.621507,23.78081 -52,49.456554,11.272518,2.6187482,-10.290987,-5.8298965,25.66428 -53,41.707638,9.590114,5.3992763,-7.636343,-9.539446,21.088066 -54,50.791744,11.682289,6.151969,-9.414431,-11.011988,25.749878 -55,54.796734,13.812137,-2.8580718,-11.484967,-0.3348681,30.205782 -56,52.772533,12.672955,-1.324554,-11.316986,-1.5829241,28.409422 -57,43.195316,9.909228,5.685209,-7.92343,-9.982032,21.828047 -58,39.07558,8.890673,5.0242147,-7.314111,-8.707218,19.753069 -59,49.943954,11.260029,4.4277563,-10.071655,-8.076758,25.564623 -60,38.843548,8.802714,5.4919534,-7.1924334,-9.312708,19.55595 -61,41.892437,9.590277,5.5831556,-7.69692,-9.741965,21.156197 -62,53.56912,12.353763,6.3863044,-9.905837,-11.529861,27.177832 -63,41.86905,9.288754,4.6133046,-8.432643,-7.7683764,21.266869 -64,49.508102,11.402799,6.0776405,-9.1283455,-10.8807335,25.087479 -65,47.471607,10.965285,5.9769473,-8.66054,-10.709924,24.034922 -66,37.824116,8.633366,5.120361,-6.9651556,-8.854047,19.082941 -67,50.8602,11.592879,2.5607302,-10.621451,-5.815653,26.418015 -68,50.27885,11.581372,5.947454,-9.331144,-10.730231,25.5149 -69,50.13993,11.416995,5.756238,-9.566345,-10.195355,25.459545 -70,42.076324,9.578743,5.7976317,-7.7638383,-9.950077,21.21139 -71,40.129673,9.162768,5.2591496,-7.4325457,-9.145871,20.272882 -72,43.57336,9.71034,4.544129,-8.777585,-7.7992535,22.178682 -73,45.88148,10.520498,6.0555034,-8.418805,-10.607863,23.178997 -74,40.42375,8.959602,4.514099,-8.138077,-7.570573,20.522152 -75,38.82707,8.840514,5.3384633,-7.1634645,-9.16829,19.574907 -76,46.17931,10.381673,5.1770535,-9.058301,-8.956369,23.4554 -77,51.457233,11.734335,2.4866838,-10.768042,-5.7523212,26.748535 -78,52.38262,11.854738,5.044304,-10.379631,-9.120999,26.748623 -79,44.20178,10.0445595,5.4168916,-8.36944,-9.441274,22.386477 -80,47.532547,10.748239,2.5180042,-10.028093,-5.4431643,24.658165 -81,52.153557,11.88189,5.1025887,-10.185059,-9.348577,26.626593 -82,50.0525,11.472816,1.1241015,-10.756549,-3.9475865,26.271622 -83,37.045547,8.416919,5.1542416,-6.8459806,-8.800345,18.664843 -84,53.03639,12.932045,-1.8319895,-11.339063,-1.078959,28.803791 -85,43.703976,10.04579,5.675286,-8.00399,-10.017313,22.095602 -86,39.62158,9.390179,4.134128,-7.081501,-8.190656,20.229063 -87,41.8374,9.9542465,4.2303743,-7.4533157,-8.529897,21.386478 -88,53.687176,12.257087,5.854972,-10.277421,-10.535543,27.314135 -89,45.797924,10.256862,4.739912,-9.154937,-8.242218,23.321909 -90,45.353615,10.435729,5.5626698,-8.378717,-9.942873,22.982191 -91,53.265232,12.207612,2.2117052,-11.15428,-5.5765953,27.763102 -92,37.689518,8.5276375,5.3730955,-6.987081,-9.070712,18.9651 -93,41.634586,9.549301,5.4762397,-7.637904,-9.601296,21.035032 -94,45.961662,10.208635,4.405357,-9.42113,-7.6043468,23.453869 -95,38.7096,8.868873,5.073586,-7.1200304,-8.88235,19.558548 -96,44.77065,10.3091955,5.7500234,-8.187977,-10.204675,22.64694 -97,50.5999,11.519817,5.148919,-9.839472,-9.339327,25.800356 -98,43.78647,9.995866,5.937278,-8.062111,-10.270747,22.092314 -99,42.709816,9.767928,5.7205334,-7.8525343,-9.9436655,21.559265 -100,47.38733,10.796876,1.1965739,-10.249292,-3.7953076,24.84174 -101,53.04126,12.09163,5.129554,-10.363167,-9.4359455,27.090277 +0,39.472305,9.040555,4.4753723,-6.9339933,-8.209688,19.979877 +1,43.45687,10.065209,5.3269405,-7.343949,-9.8375025,21.94748 +2,42.4612,9.715011,4.4662127,-7.5765567,-8.311502,21.548733 +3,42.791584,9.855425,4.714327,-7.470362,-8.806517,21.687979 +4,44.6401,10.417218,4.935413,-7.5705504,-9.473156,22.63581 +5,36.397438,8.360599,4.4018893,-6.27447,-8.013013,18.380919 +6,54.400146,12.731372,0.22052374,-10.894471,-3.5871472,28.646238 +7,53.186993,12.325948,2.6140978,-10.120545,-6.5384517,27.535337 +8,44.72174,10.41258,5.2846336,-7.5261583,-9.938572,22.621532 +9,40.581646,9.766127,3.6550992,-6.6480155,-8.009327,20.750727 +10,50.48267,11.845523,5.643082,-8.442704,-10.926988,25.59616 +11,48.411217,11.208746,0.61697316,-9.760799,-3.5378053,25.401096 +12,41.30308,9.289722,3.6200376,-7.8344564,-6.7380757,21.063185 +13,49.527798,11.367269,1.7503519,-9.808548,-4.950979,25.76279 +14,52.236248,12.12045,1.4830489,-10.243815,-4.975933,27.253035 +15,54.451675,13.21458,-1.7091123,-10.949172,-1.5287027,29.313953 +16,48.378906,11.112977,2.6725047,-9.273896,-6.163404,24.980865 +17,46.076252,10.769871,5.151027,-7.770252,-9.892258,23.356447 +18,49.898212,11.727691,-0.50780636,-10.126056,-2.4215105,26.41674 +19,47.010834,10.796278,0.7635553,-9.570347,-3.492666,24.626923 +20,42.49559,9.526718,3.4709811,-8.183946,-6.5079846,21.709482 +21,36.488853,8.402436,4.6331377,-6.1944523,-8.392515,18.394592 +22,50.50336,11.863611,5.597086,-8.438517,-10.886368,25.615381 +23,46.036648,10.558985,1.3234552,-9.219156,-4.1778283,24.004683 +24,43.243565,10.066781,5.1132917,-7.278385,-9.612114,21.873014 +25,46.810127,10.981322,5.2346187,-7.829938,-10.12914,23.732462 +26,54.778755,12.745698,1.8579553,-10.594688,-5.6953225,28.52434 +27,55.157936,12.84361,1.5769585,-10.741527,-5.3534093,28.779161 +28,47.194675,10.789401,4.0933127,-8.68676,-7.970171,24.092224 +29,40.119278,9.301185,4.6460824,-6.8395753,-8.700076,20.30283 +30,44.357586,10.361733,5.1157117,-7.4451814,-9.737166,22.459202 +31,44.091103,10.279667,5.1555157,-7.411811,-9.741317,22.310564 +32,43.932293,10.295078,4.9572897,-7.3557,-9.564874,22.270784 +33,55.372597,12.8546715,5.538947,-9.669955,-10.78018,28.166338 +34,44.67639,10.085663,4.081658,-8.36841,-7.596128,22.760012 +35,41.894337,9.692111,5.1701455,-7.0861683,-9.507846,21.1496 +36,39.008205,9.025907,4.803199,-6.5972314,-8.840872,19.694183 +37,39.179035,9.090601,4.73176,-6.6115074,-8.791496,19.79658 +38,52.145424,12.001308,3.8159924,-9.676685,-7.95686,26.747736 +39,51.169132,11.825222,4.5261436,-9.190424,-9.010765,26.119528 +40,48.885994,11.384972,0.35058913,-9.832352,-3.327425,25.708477 +41,45.515488,10.379142,2.290867,-8.914262,-5.3522134,23.538649 +42,38.381416,8.895483,4.669039,-6.4826374,-8.639994,19.386353 +43,54.27679,12.486514,4.5397754,-9.915822,-9.072086,27.742592 +44,44.70476,10.185299,4.431462,-8.124614,-8.278638,22.727264 +45,45.26169,10.270279,3.0954933,-8.699065,-6.3372545,23.245663 +46,44.490883,10.3955345,5.123365,-7.466139,-9.763141,22.529284 +47,49.007565,11.467879,5.589601,-8.214153,-10.707681,24.826971 +48,48.914463,11.449015,5.5692134,-8.196939,-10.680449,24.782263 +49,51.44247,11.843476,3.2424102,-9.697767,-7.147543,26.487959 +50,40.211796,9.31276,4.923057,-6.795901,-9.087687,20.30701 +51,47.47869,11.121441,5.370778,-7.951353,-10.32982,24.059877 +52,49.846416,11.49945,2.2332566,-9.633739,-5.7357206,25.84303 +53,42.151524,9.83725,4.8910985,-7.0802836,-9.279788,21.336718 +54,51.31227,11.994683,5.56605,-8.702654,-10.771388,26.038465 +55,55.185787,14.074042,-3.233125,-10.79583,-0.28277907,30.413597 +56,53.157955,12.936906,-1.6972876,-10.630949,-1.5344597,28.618082 +57,43.886723,10.321132,4.8204412,-7.327031,-9.437515,22.273174 +58,39.478786,9.133364,4.569732,-6.7617426,-8.521194,19.977112 +59,50.40319,11.530055,3.948904,-9.388246,-7.917727,25.799711 +60,39.470055,9.17631,4.7091875,-6.6506906,-8.820755,19.959515 +61,42.563923,9.990268,4.743841,-7.117564,-9.213723,21.58846 +62,54.117134,12.682417,5.769613,-9.156787,-11.276438,27.481503 +63,42.299873,9.541887,4.1478777,-7.8527775,-7.5858192,21.502428 +64,50.01601,11.707785,5.5057044,-8.433456,-10.645937,25.369198 +65,47.97519,11.245336,5.4007373,-8.030154,-10.415238,24.316742 +66,38.228764,8.859347,4.655823,-6.4575067,-8.616787,19.310186 +67,51.2602,11.825336,2.1653807,-9.947658,-5.718777,26.601175 +68,50.792892,11.889568,5.3689785,-8.628659,-10.492522,25.7997 +69,50.65201,11.723701,5.180649,-8.867682,-9.95888,25.743046 +70,42.527298,9.831198,5.2783356,-7.1976013,-9.684543,21.465185 +71,40.544342,9.412547,4.7915397,-6.86403,-8.954495,20.50343 +72,44.019356,9.97187,4.0628405,-8.177872,-7.610444,22.422098 +73,46.370758,10.793575,5.493628,-7.8053555,-10.320461,23.453663 +74,40.837055,9.207032,4.0501304,-7.575308,-7.380402,20.750893 +75,39.243137,9.073316,4.859652,-6.641093,-8.92359,19.808954 +76,46.65086,10.663804,4.64791,-8.415959,-8.739329,23.71628 +77,51.861187,11.968793,2.0875332,-10.088004,-5.654292,26.933317 +78,52.91414,12.1656885,4.4709535,-9.665283,-8.895572,27.038025 +79,44.65596,10.317351,4.9056816,-7.7484426,-9.231716,22.638445 +80,47.907692,10.966602,2.1472218,-9.396057,-5.3526545,24.830208 +81,52.682915,12.191681,4.5312095,-9.473099,-9.123813,26.914907 +82,50.435932,11.691713,0.7460609,-10.115618,-3.8520005,26.444775 +83,37.44313,8.63946,4.696719,-6.346684,-8.566634,18.888561 +84,53.42023,13.1933565,-2.2026727,-10.65715,-1.0296772,29.010656 +85,44.169193,10.30508,5.1417913,-7.421015,-9.744524,22.356443 +86,40.298096,9.780722,3.3313665,-6.554062,-7.6855173,20.662878 +87,42.549854,10.365376,3.384757,-6.898334,-7.997465,21.843248 +88,54.237057,12.579824,5.2608213,-9.53649,-10.301966,27.614477 +89,46.266056,10.531298,4.2347126,-8.525349,-8.04393,23.577332 +90,45.818863,10.715119,5.0386147,-7.742108,-9.72779,23.240261 +91,53.680496,12.447602,1.8015527,-10.456308,-5.4750175,27.952402 +92,38.095276,8.755097,4.905712,-6.4772305,-8.831982,19.1937 +93,42.078526,9.7967205,4.9676123,-7.081549,-9.341427,21.283915 +94,46.429058,10.482101,3.9015872,-8.7937,-7.4066167,23.708412 +95,39.11415,9.105655,4.6147113,-6.583228,-8.676915,19.784132 +96,45.246586,10.574317,5.2044415,-7.5917163,-9.9256525,22.913656 +97,51.115196,11.821705,4.59248,-9.1459675,-9.120545,26.081306 +98,44.2555,10.258384,5.396894,-7.4741893,-9.993968,22.356342 +99,43.16619,10.022589,5.1971354,-7.2803106,-9.676226,21.81539 +100,47.7516,11.005234,0.8373709,-9.640132,-3.704831,25.006502 +101,53.579086,12.406268,4.549125,-9.639955,-9.207587,27.383106 From 394c69cddf0fca6d524ecf06f8216c12208359d7 Mon Sep 17 00:00:00 2001 From: Brandon Paul Date: Tue, 19 Sep 2023 12:19:40 -0700 Subject: [PATCH 11/14] Fix comment --- .../mea_column_model_training_customnormform_scikitlearn.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/examples/other_files/ML_AI_Plugin/mea_column_model_training_customnormform_scikitlearn.py b/examples/other_files/ML_AI_Plugin/mea_column_model_training_customnormform_scikitlearn.py index 7ff68de62..952374c71 100644 --- a/examples/other_files/ML_AI_Plugin/mea_column_model_training_customnormform_scikitlearn.py +++ b/examples/other_files/ML_AI_Plugin/mea_column_model_training_customnormform_scikitlearn.py @@ -79,7 +79,7 @@ def create_model(x_train, z_train): model_data = np.concatenate( (xdata, zdata), axis=1 -) # PyTorch requires a Numpy array as input +) # SciKit Learn requires a Numpy array as input # define x and z data, not used but will add to variable dictionary xdata = model_data[:, :-2] From 1334194f2554eb99c5372823d6f23373c2461d41 Mon Sep 17 00:00:00 2001 From: Brandon Paul Date: Tue, 19 Sep 2023 12:20:43 -0700 Subject: [PATCH 12/14] run black again --- .../ML_AI_Plugin/generate_gradient_data.py | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/examples/other_files/ML_AI_Plugin/generate_gradient_data.py b/examples/other_files/ML_AI_Plugin/generate_gradient_data.py index bb145be96..59ae6693b 100644 --- a/examples/other_files/ML_AI_Plugin/generate_gradient_data.py +++ b/examples/other_files/ML_AI_Plugin/generate_gradient_data.py @@ -262,8 +262,9 @@ def predict_gradients( return gradients -def generate_gradients(xy_data, n_x, show_plots=True, optimize_training=False, - use_simple_diff=False): +def generate_gradients( + xy_data, n_x, show_plots=True, optimize_training=False, use_simple_diff=False +): """ This method implements finite difference approximation and NN regression to estimate the first-order derivatives of a given dataset with columns @@ -343,7 +344,11 @@ def generate_gradients(xy_data, n_x, show_plots=True, optimize_training=False, n_x = 6 gradients = generate_gradients( - xy_data=data_array, n_x=n_x, show_plots=False, optimize_training=True, use_simple_diff=True, + xy_data=data_array, + n_x=n_x, + show_plots=False, + optimize_training=True, + use_simple_diff=True, ) print("Gradient generation complete.") From 7a22bab398ec294ad5d20dfda2c7c5a85b3a30d4 Mon Sep 17 00:00:00 2001 From: Brandon Paul Date: Tue, 19 Sep 2023 12:23:06 -0700 Subject: [PATCH 13/14] minor typo --- docs/source/chapt_surrogates/gradients.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/source/chapt_surrogates/gradients.rst b/docs/source/chapt_surrogates/gradients.rst index 59ead3d3c..33b3d9e81 100644 --- a/docs/source/chapt_surrogates/gradients.rst +++ b/docs/source/chapt_surrogates/gradients.rst @@ -34,7 +34,7 @@ method on the example dataset *MEA_carbon_capture_dataset_mimo.csv*: >>> xy_data=data_array, >>> n_x=n_x, >>> show_plots=False, # flag to plot regression results during gradient training - >>> optimize_training=True # will try many regression settings and pick the best result + >>> optimize_training=True, # will try many regression settings and pick the best result >>> use_simple_diff=True # flag to use simple partials instead of chain rule formula; defaults to False if not passed >>> ) >>> print("Gradient generation complete.") From 53543db78d61ea38393c20979bfd62b6018c53ac Mon Sep 17 00:00:00 2001 From: Brandon Paul <86113916+bpaul4@users.noreply.github.com> Date: Mon, 25 Sep 2023 10:32:56 -0700 Subject: [PATCH 14/14] Give docs page reference name --- docs/source/chapt_surrogates/gradients.rst | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/docs/source/chapt_surrogates/gradients.rst b/docs/source/chapt_surrogates/gradients.rst index 33b3d9e81..5bc2d25f9 100644 --- a/docs/source/chapt_surrogates/gradients.rst +++ b/docs/source/chapt_surrogates/gradients.rst @@ -1,3 +1,5 @@ +.. _gengrad: + Gradient Generation to Support Gradient-Enhanced Neural Networks ================================================================ @@ -78,4 +80,4 @@ on each model and uses the model with the smallest error to predict the sample g arrays with each having size *(m, n_x)* - the same size as the original input array *X*. 5. Concatenate the predicted gradients into a single array of size *(m, n_x, n_y)*. This is the -single object returned by the gradient generation method. \ No newline at end of file +single object returned by the gradient generation method.