Skip to content

Commit

Permalink
#Change Data_Base_Updated_Final_Rotated_Train.json to Database_Random…
Browse files Browse the repository at this point in the history
….json
  • Loading branch information
Ronakshoghi committed Jan 17, 2024
1 parent 828ba6e commit cb0c9ca
Show file tree
Hide file tree
Showing 10 changed files with 34 additions and 31 deletions.
13 changes: 7 additions & 6 deletions examples/active_learning/QBC_SVC.py
Original file line number Diff line number Diff line change
Expand Up @@ -165,7 +165,7 @@ def plot_variances(var_list):
plt.ylabel('Variance')
plt.title('Variance vs Iteration')
plt.grid()
plt.savefig('variances_vs_iterations.png', dpi = 300)
plt.savefig('variances_vs_iterations_weight=999.png', dpi = 300)
plt.close()

def save_hard_test_cases(a , b, num_tests=5):
Expand All @@ -180,7 +180,7 @@ def save_hard_test_cases(a , b, num_tests=5):

nmembers=5 # Number of committee members
# Number of initial samples - can be chosen by the user
nsamples_to_generate=70 # Number of iterations
nsamples_to_generate=30 # Number of iterations
sampling_scheme='max_disagreement' # max disagreement for yf-predictions, for classifiers generally possible: vote_entropy, consensus_entropy or maximum_disagreement, cf. https://modal-python.readthedocs.io/en/latest/content/query_strategies/Disagreement-sampling.html#disagreement-sampling
subset_percentage=0.8
subset_assignment='random'
Expand All @@ -194,11 +194,12 @@ def save_hard_test_cases(a , b, num_tests=5):
mat_h.elasticity(E = E, nu = nu)
mat_h.plasticity(sy = sy, hill = hill)
mat_h.calc_properties(eps = 0.0013, sigeps = True)
c = 8
d = 22
N = c+d
c = 200
d = 99
N = 200
nsamples_init = N
sunit= FE.load_cases(number_3d=c, number_6d=d)
# sunit= FE.load_cases(number_3d=c, number_6d=d)
sunit = creator_rnd(200,8)
np.savetxt('Test_Cases.txt', sunit)
# create set of unit stresses and
print('Created {0} unit stresses (6d Voigt tensor).'.format(N))
Expand Down
16 changes: 9 additions & 7 deletions examples/active_learning/QBC_SVC_Batch.py
Original file line number Diff line number Diff line change
Expand Up @@ -216,7 +216,7 @@ def plot_variances(var_list):
plt.ylabel('Variance')
plt.title('Variance vs Iteration')
plt.grid()
plt.savefig('variances_vs_iterations.png', dpi = 300)
plt.savefig('variances_vs_iterations_weight=999.png', dpi = 300)
plt.close()

def save_hard_test_cases(a , b, num_tests=5):
Expand All @@ -231,7 +231,7 @@ def save_hard_test_cases(a , b, num_tests=5):

nmembers=5 # Number of committee members
# Number of initial samples - can be chosen by the user
nsamples_to_generate=70 # Number of iterations
nsamples_to_generate=50 # Number of iterations
sampling_scheme='max_disagreement' # max disagreement for yf-predictions, for classifiers generally possible: vote_entropy, consensus_entropy or maximum_disagreement, cf. https://modal-python.readthedocs.io/en/latest/content/query_strategies/Disagreement-sampling.html#disagreement-sampling
subset_percentage=0.8
subset_assignment='random'
Expand All @@ -245,15 +245,16 @@ def save_hard_test_cases(a , b, num_tests=5):
mat_h.elasticity(E = E, nu = nu)
mat_h.plasticity(sy = sy, hill = hill)
mat_h.calc_properties(eps = 0.0013, sigeps = True)
c = 8
d = 22
c = 1
d = 99
N = c+d
nsamples_init = N
sunit_random= FE.load_cases(number_3d=c, number_6d=d)
sunit = apply_repulsion(sunit_random, k=5, iterations=60, learning_rate=0.01)
np.savetxt('Test_Cases.txt', sunit)
final_knn_distances = calculate_knn_distances(sunit, k =5)
average_distance = np.mean(final_knn_distances)
print('Average distance to 5th nearest neighbor: {0:.4f}'.format(average_distance))
# create set of unit stresses and
print('Created {0} unit stresses (6d Voigt tensor).'.format(N))
x1=fsolve(find_yloc, np.ones(N) * mat_h.sy, args = (sunit, mat_h), xtol = 1.e-5)
Expand All @@ -274,7 +275,7 @@ def save_hard_test_cases(a , b, num_tests=5):
else:
raise NotImplementedError('chosen subset assignment not implemented')
mat_ml=FE.Material(name = 'ML-Hill_{}'.format(j))
mat_ml.train_SVC(C = C, gamma = gamma, sdata = sig[idx, :], gridsearch = True)
mat_ml.train_SVC(C = C, gamma = gamma, sdata = sig[idx, :], gridsearch = False)
committee.append(mat_ml)

# Search for next unit vector to query
Expand All @@ -285,7 +286,7 @@ def save_hard_test_cases(a , b, num_tests=5):
res=differential_evolution(
eval_max_disagreement,
bounds,
args = (committee, sunit_new_list, average_distance, 999, 'input_comparison', mat_h, sig),
args = (committee, sunit_new_list, average_distance, 99, 'input_comparison', mat_h, sig),
popsize = 90,
polish = True,
updating = 'immediate'
Expand All @@ -303,6 +304,7 @@ def save_hard_test_cases(a , b, num_tests=5):
variance=res.fun
final_knn_distances=calculate_knn_distances(sunit, k = 5)
average_distance=np.mean(final_knn_distances)
print('Average distance to 5th nearest neighbor: {0:.4f}'.format(average_distance))

if i == nsamples_to_generate - 1:
np.savetxt('DATA_sig_iter_{}.txt'.format(i + 1), sig)
Expand All @@ -311,7 +313,7 @@ def save_hard_test_cases(a , b, num_tests=5):
C=2
gamma=2.5
mat_ml=FE.Material(name = 'ML-Hill') # define material
mat_ml.train_SVC(C = C, gamma = gamma, sdata = sig, gridsearch = True)
mat_ml.train_SVC(C = C, gamma = gamma, sdata = sig, gridsearch = False)
# stress strain curves
print("Calculating properties of ML material, this might take a while ...")
mat_ml.elasticity(E = E, nu = nu)
Expand Down
28 changes: 14 additions & 14 deletions examples/data_6d/Data_6D.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ def rgb_to_hex(rgb):


# Import Data
db = FE.Data("Data_Base_Updated_Goss.json", wh_data=True) #"Data_Base_Updated_Final_Rotated_Train.JSON"
db = FE.Data("Data_Base_Updated_Final_Rotated_Train.JSON", wh_data=True) #"Data_Base_Updated_Final_Rotated_Train.JSON"
#db = FE.Data("Data_Base_UpdatedE-07.json", Work_Hardening=False)
mat_ref = FE.Material(name="reference") # define reference material, J2 plasticity, linear w.h.
mat_ref.elasticity(E=db.mat_data['E_av'], nu=db.mat_data['nu_av'])
Expand All @@ -36,10 +36,10 @@ def rgb_to_hex(rgb):
print(f'Training successful.\nNumber of support vectors: {len(mat_ml.svm_yf.support_vectors_)}')

# # Testing
# sig_tot, epl_tot, yf_ref = CTD.Create_Test_Sig(Json="Data_Base_Updated_Final_Rotated_Test.json")
# yf_ml = mat_ml.calc_yf(sig_tot, epl_tot, pred=False)
# Results = CTD.training_score(yf_ref, yf_ml)
# print(Results)
sig_tot, epl_tot, yf_ref = CTD.Create_Test_Sig(Json="Data_Base_Updated_Final_Rotated_Train.JSON")
yf_ml = mat_ml.calc_yf(sig_tot, epl_tot, pred=False)
Results = CTD.training_score(yf_ref, yf_ml)
print(Results)
#Plot Hardening levels over a meshed space
#Plot initial and final hardening level of trained ML yield function together with data points
ngrid = 100
Expand All @@ -57,8 +57,8 @@ def rgb_to_hex(rgb):
Z2 = mat_ml.calc_yf(sig=Cart_hh_6D, epl=normalized_grad_hh * 0.005, pred=False)
Z3 = mat_ml.calc_yf(sig=Cart_hh_6D, epl=normalized_grad_hh * 0.01, pred=False)
Z4 = mat_ml.calc_yf(sig=Cart_hh_6D, epl=normalized_grad_hh * 0.015, pred=False)
Z5 = mat_ml.calc_yf(sig=Cart_hh_6D, epl=normalized_grad_hh * 0.02, pred=False)
Z6 = mat_ml.calc_yf(sig=Cart_hh_6D, epl=normalized_grad_hh * 0.025, pred=False)
Z5 = mat_ml.calc_yf(sig=Cart_hh_6D, epl=normalized_grad_hh * 0.018, pred=False)
# Z6 = mat_ml.calc_yf(sig=Cart_hh_6D, epl=normalized_grad_hh * 0.025, pred=False)

colors_hex = ['#550000', '#990000', '#bb0000', '#cc3333', '#ee3333', '#ff5050']
fig = plt.figure(figsize=(4.2, 4.2))
Expand All @@ -69,18 +69,18 @@ def rgb_to_hex(rgb):
line3 = mat_ml.plot_data(Z3, ax, xx, yy, c=colors_hex[2])
line4 = mat_ml.plot_data(Z4, ax, xx, yy, c=colors_hex[3])
line5 = mat_ml.plot_data(Z5, ax, xx, yy, c=colors_hex[4])
line6 = mat_ml.plot_data(Z6, ax, xx, yy, c=colors_hex[5])
# line6 = mat_ml.plot_data(Z6, ax, xx, yy, c=colors_hex[5])
fig.savefig('Hardening_Levels.png', dpi=300)
handle1 = Line2D([], [], color=colors_hex[0], label='Equivalent Plastic Strain : 0 ')
handle2 = Line2D([], [], color=colors_hex[1], label='Equivalent Plastic Strain : 0.5% ')
handle3 = Line2D([], [], color=colors_hex[2], label='Equivalent Plastic Strain : 1% ')
handle4 = Line2D([], [], color=colors_hex[3], label='Equivalent Plastic Strain : 1.5% ')
handle5 = Line2D([], [], color=colors_hex[4], label='Equivalent Plastic Strain : 1.8% ')
handle6 = Line2D([], [], color=colors_hex[5], label='Equivalent Plastic Strain : 2.5% ')
# handle6 = Line2D([], [], color=colors_hex[5], label='Equivalent Plastic Strain : 2.5% ')
fig_leg = plt.figure(figsize=(4, 4))
ax_leg = fig_leg.add_subplot(111)
ax_leg.axis('off')
ax_leg.legend(handles=[handle1, handle2, handle3, handle4, handle5, handle6], loc="center")
ax_leg.legend(handles=[handle1, handle2, handle3, handle4, handle5], loc="center")
fig_leg.savefig('Legend.png', dpi=300)
plt.show()
#
Expand All @@ -104,7 +104,7 @@ def rgb_to_hex(rgb):

# Reconstruct Stress-Strain Curve
Keys = list(db.Data_Visualization.keys())
Key = "Us_A1B2C2D1E1F2_4092b_5e411_Tx_Rnd" # random.choice(Keys) # Select Data from database randomly
Key = "Us_A2B2C1D0E0F0_592bb_0abb1_Tx_Gs" # random.choice(Keys) # Us_A2B2C2D2E1F1_09ad0_0abb1_Tx_Gs
print("Selected Key is: {}".format(Key))
Stresses = db.Data_Visualization[Key]["Stress"]
Eq_Stresses = db.Data_Visualization[Key]["Eq_Stress"]
Expand Down Expand Up @@ -163,9 +163,9 @@ def rgb_to_hex(rgb):
# get stress data
peeq_dat = FE.eps_eq(db.mat_data['plastic_strain'])
# ind0 = np.nonzero(peeq_dat < 0.0002)[0]
ind0 =np.nonzero(np.logical_and(peeq_dat > 0.00018, peeq_dat < 0.00022))[0]
ind0 =np.nonzero(np.logical_and(peeq_dat > 0.00019, peeq_dat < 0.00021))[0]
sig_d0 = FE.s_cyl(db.mat_data['flow_stress'][ind0, :], mat_ml)
ind1 = np.nonzero(np.logical_and(peeq_dat > 0.0248, peeq_dat < 0.0252))[0]
ind1 = np.nonzero(np.logical_and(peeq_dat > 0.0149, peeq_dat < 0.0151))[0] #0.0248, 0.0252
sig_d1 = FE.s_cyl(db.mat_data['flow_stress'][ind1, :], mat_ml)
# calculate ML flow stresses
ngrid = 100
Expand Down Expand Up @@ -193,7 +193,7 @@ def rgb_to_hex(rgb):
fig.savefig('ML+ScatterData.png', dpi=300)
plt.show()
handle1 = mlines.Line2D([], [], color="#550000", label='Equivalent Plastic Strain : 0 ')
handle2 = mlines.Line2D([], [], color="#ff3333", label='Equivalent Plastic Strain : 2.5% ')
handle2 = mlines.Line2D([], [], color="#ff3333", label='Equivalent Plastic Strain : 1.5% ')
fig_leg = plt.figure(figsize=(4, 4))
ax_leg = fig_leg.add_subplot(111)
ax_leg.axis('off')
Expand Down
2 changes: 1 addition & 1 deletion examples/data_6d/Data_6D_DOE.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ def rgb_to_hex(rgb):
return '#{:02x}{:02x}{:02x}'.format(int(rgb[0]*255), int(rgb[1]*255), int(rgb[2]*255))

#Training
db = FE.Data("Data_Base_Updated_Final_Rotated_Train.json", wh_data=True)
db = FE.Data("Database_Random.json", wh_data=True)
mat_ref = FE.Material(name="reference") # define reference material, J2 plasticity, linear w.h.
mat_ref.elasticity(E=db.mat_data['E_av'], nu=db.mat_data['nu_av']) # identic elastic properties as mat1
mat_ref.plasticity(sy=db.mat_data['sy_av'], khard= 4.5e3) # same yield strength as mat1 and mat2, high w.h. coefficient 4.5e3)
Expand Down
2 changes: 1 addition & 1 deletion examples/data_6d/Data_6D_new.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ def find_yloc(x, sunit, epl, mat):


# Import Data
db = FE.Data("Data_Base_Updated_Final_Rotated_Train.json", wh_data=True)
db = FE.Data("Database_Random.json", wh_data=True)
# db = FE.Data("Data_Base_UpdatedE-07.json", Work_Hardening=False)
# define reference material, J2 plasticity, linear w.h.
mat_ref = FE.Material(name="reference")
Expand Down
File renamed without changes.
Binary file modified examples/data_6d/Hardening_Levels.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file modified examples/data_6d/Initial_Yield_Locus.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
2 changes: 1 addition & 1 deletion examples/data_6d/Stress_Strain_Calculation.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
import pylabfea as FE

# Import Data
db = FE.Data("Data_Base_Updated_Final_Rotated_Train.json", wh_data=True)
db = FE.Data("Database_Random.json", wh_data=True)

# db.plot_yield_locus(db=db, mat_data=db.mat_data, active='flow_stress')
print(f'Successfully imported data for {db.mat_data["Nlc"]} load cases')
Expand Down
2 changes: 1 addition & 1 deletion src/pylabfea/data.py
Original file line number Diff line number Diff line change
Expand Up @@ -89,7 +89,7 @@ def __init__(self, source, path_data='./',
name='Dataset', mat_name="Simulanium",
sdim=6,
epl_crit=2.e-3,
epl_start=1.e-3, epl_max=0.02,
epl_start=1.e-3, epl_max=0.01,
plot=False,
wh_data=True):
if sdim!=3 and sdim!=6:
Expand Down

0 comments on commit cb0c9ca

Please sign in to comment.