Skip to content

Commit

Permalink
Merge pull request #14 from jakekrell/main
Browse files Browse the repository at this point in the history
v3.1.0
  • Loading branch information
derek-slack authored Jan 31, 2024
2 parents 59f5173 + 53c31b3 commit f234499
Show file tree
Hide file tree
Showing 3 changed files with 513 additions and 442 deletions.
113 changes: 58 additions & 55 deletions Examples/GP Integrate/GP_integrate_example.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,79 +2,82 @@
from FoKL.GP_Integrate import GP_Integrate
import numpy as np
import matplotlib.pyplot as plt
import warnings
warnings.filterwarnings("ignore", category=UserWarning)


# ======================================================================================================================
def main():

# Inputs:
traininputs = np.loadtxt('traininputs.txt',dtype=float,delimiter=',')
traindata1 = np.loadtxt('traindata1.txt',dtype=float,delimiter=',')
traindata2 = np.loadtxt('traindata2.txt',dtype=float,delimiter=',')
traindata = [traindata1, traindata2]
y = np.loadtxt('y.txt',dtype=float,delimiter=',')
utest = np.loadtxt('utest.csv',dtype=float,delimiter=',')
# Inputs:
traininputs = np.loadtxt('traininputs.txt', dtype=float, delimiter=',')
traindata1 = np.loadtxt('traindata1.txt', dtype=float, delimiter=',')
traindata2 = np.loadtxt('traindata2.txt', dtype=float, delimiter=',')
traindata = [traindata1, traindata2]
y = np.loadtxt('y.txt', dtype=float, delimiter=',')
utest = np.loadtxt('utest.csv', dtype=float, delimiter=',')

# User-defined constant hyperparameters (to override default values):
relats_in = [1,1,1,1,1,1]
a = 1000
b = 1
draws = 2000
way3 = True
threshav = 0
threshstda = 0
threshstdb = 100
# User-defined hyperparameters (to override default values):
relats_in = [1, 1, 1, 1, 1, 1]
a = 1000
b = 1
draws = 2000
way3 = True
threshav = 0
threshstda = 0
threshstdb = 100

# Initializing FoKL model with constant hypers:
model = FoKLRoutines.FoKL(relats_in=relats_in, a=a, b=b, draws=draws, way3=way3, threshav=threshav, threshstda=threshstda, threshstdb=threshstdb)
# Initializing FoKL model with constant hypers:
model = FoKLRoutines.FoKL(relats_in=relats_in, a=a, b=b, draws=draws, way3=way3, threshav=threshav,
threshstda=threshstda, threshstdb=threshstdb)

# User-defined variable hyperparameters (to iterate through either for different data or for sweeping the same data):
btau = [0.6091, 1]
# User-defined hyperparameters (to iterate through for different data or to sweep the same data):
btau = [0.6091, 1]

# Iterating through datasets:
betas = []
mtx = []
for ii in range(2):
# Iterating through datasets:

print("\nCurrently fitting model to dataset", int(ii+1),". . .")
betas = []
mtx = []
for ii in range(2):

# Updating model with current iteration of variable hyperparameters:
model.btau = btau[ii]
print("\nCurrently fitting model to dataset", int(ii+1), "...")

# Running emulator routine for current model/data:
betas_i, mtx_i, _ = model.fit(traininputs, traindata[ii])
# Updating model with current iteration of variable hyperparameters:
model.btau = btau[ii]

print("Done!")
# Running emulator routine for current model/data:
betas_i, mtx_i, _ = model.fit(traininputs, traindata[ii])

# Store values for post-processing (i.e., GP integration):
betas.append(betas_i[1000:])
mtx.append(mtx_i)
print("Done!")

# Clear all attributes (except for hypers) so previous results do not influence the next iteration:
model.clear()
# Store values for post-processing (i.e., GP integration):
betas.append(betas_i[1000:])
mtx.append(mtx_i)

# ======================================================================================================================
# Clear all attributes (except for hypers) so previous results do not influence the next iteration:
model.clear()

# Integrating with FoKL.GP_Integrate():
# Integrating with FoKL.GP_Integrate():

phis = model.phis # same for all models iterated through, so just grab value from most recent model
phis = model.phis # same for all models iterated through, so just grab value from most recent model

n,m = np.shape(y)
norms1 = [np.min(y[0,0:int(m/2)]),np.max(y[0,0:int(m/2)])]
norms2 = [np.min(y[1,0:int(m/2)]),np.max(y[1,0:int(m/2)])]
norms = np.transpose([norms1,norms2])
n, m = np.shape(y)
norms1 = [np.min(y[0, 0:int(m/2)]), np.max(y[0, 0:int(m/2)])]
norms2 = [np.min(y[1, 0:int(m/2)]), np.max(y[1, 0:int(m/2)])]
norms = np.transpose([norms1, norms2])

start = 4
stop = 3750*4
stepsize = 4
used_inputs = [[1,1,1],[1,1,1]]
ic = y[:,int(m/2)-1]
start = 4
stop = 3750*4
stepsize = 4
used_inputs = [[1, 1, 1], [1, 1, 1]]
ic = y[:, int(m/2)-1]

T, Y = GP_Integrate([np.mean(betas[0],axis=0),np.mean(betas[1],axis=0)], [mtx[0],mtx[1]], utest, norms, phis, start, stop, ic, stepsize, used_inputs)
T, Y = GP_Integrate([np.mean(betas[0], axis=0), np.mean(betas[1], axis=0)], [mtx[0],mtx[1]], utest, norms, phis,
start, stop, ic, stepsize, used_inputs)

plt.figure()
plt.plot(T,Y[0],T,y[0][3750:7500])
plt.plot(T,Y[1],T,y[1][3750:7500])
plt.show()
plt.figure()
plt.plot(T, Y[0], T, y[0][3750:7500])
plt.plot(T, Y[1], T, y[1][3750:7500])
plt.show()


if __name__ == '__main__':
main()

92 changes: 46 additions & 46 deletions Examples/Sigmoid/SigmoidTest.py
Original file line number Diff line number Diff line change
@@ -1,64 +1,64 @@
from FoKL import FoKLRoutines
import numpy as np
import warnings
warnings.filterwarnings("ignore", category=UserWarning)


# ======================================================================================================================
def main():

# Inputs:
X_grid = np.loadtxt('X.csv',dtype=float,delimiter=',')
Y_grid = np.loadtxt('Y.csv',dtype=float,delimiter=',')
# Inputs:
X_grid = np.loadtxt('X.csv',dtype=float,delimiter=',')
Y_grid = np.loadtxt('Y.csv',dtype=float,delimiter=',')

# Data:
Z_grid = np.loadtxt('DATA_nois.csv',dtype=float,delimiter=',')
# Data:
Z_grid = np.loadtxt('DATA_nois.csv',dtype=float,delimiter=',')

# Reshaping grid matrices into vectors via fortran index order:
m, n = np.shape(X_grid) # = np.shape(Y_grid) = np.shape(Z_grid) = dimensions of grid
X = np.reshape(X_grid, (m*n,1), order='F')
Y = np.reshape(Y_grid, (m*n,1), order='F')
Z = np.reshape(Z_grid, (m*n,1), order='F')
# Reshaping grid matrices into vectors via fortran index order:
m, n = np.shape(X_grid) # = np.shape(Y_grid) = np.shape(Z_grid) = dimensions of grid
X = np.reshape(X_grid, (m*n,1), order='F')
Y = np.reshape(Y_grid, (m*n,1), order='F')
Z = np.reshape(Z_grid, (m*n,1), order='F')

# Initializing FoKL model with some user-defined hyperparameters (leaving others blank for default values):
model = FoKLRoutines.FoKL(a=9, b=0.01, atau=3, btau=4000, aic=True)
# Initializing FoKL model with some user-defined hyperparameters (leaving others blank for default values):
model = FoKLRoutines.FoKL(a=9, b=0.01, atau=3, btau=4000, aic=True)

# Training FoKL model on a random selection of 100% (or 100%, 80%, 60%, etc.) of the dataset:
train_all = [1] # = [1, 0.8, 0.6] etc. if sweeping through the percentage of data to train on
betas_all = []
mtx_all = []
evs_all = []
meen_all = []
bounds_all = []
rmse_all = []
for train in train_all:
# Training FoKL model on a random selection of 100% (or 100%, 80%, 60%, etc.) of the dataset:
train_all = [1] # = [1, 0.8, 0.6] etc. if sweeping through the percentage of data to train on
betas_all = []
mtx_all = []
evs_all = []
meen_all = []
bounds_all = []
rmse_all = []
for train in train_all:

print("\nCurrently fitting model to",train * 100,"% of data . . .")
print("\nCurrently fitting model to",train * 100,"% of data ...")

# Running emulator routine to fit model to training data as a function of the corresponding training inputs:
betas, mtx, evs = model.fit([X, Y], Z, train=train)
# Running emulator routine to fit model to training data as a function of the corresponding training inputs:
betas, mtx, evs = model.fit([X, Y], Z, train=train)

# Provide feedback to user before the figure from coverage3() pops up and pauses the code:
print("\nDone! Please close the figure to continue.\n")
# Provide feedback to user before the figure from coverage3() pops up and pauses the code:
print("\nDone! Please close the figure to continue.\n")

# Evaluating and visualizing predicted values of data as a function of all inputs (train set plus test set):
title = 'FoKL Model Trained on ' + str(train * 100) + '% of Data'
meen, bounds, rmse = model.coverage3(plot='bounds',title=title,legend=1)
# Evaluating and visualizing predicted values of data as a function of all inputs (train set plus test set):
title = 'FoKL Model Trained on ' + str(train * 100) + '% of Data'
meen, bounds, rmse = model.coverage3(plot='bounds',title=title,legend=1)

# Store any values from iteration if performing additional post-processing or analysis:
betas_all.append(betas)
mtx_all.append(mtx)
evs_all.append(evs)
meen_all.append(meen)
bounds_all.append(bounds)
rmse_all.append(rmse)
# Store any values from iteration if performing additional post-processing or analysis:
betas_all.append(betas)
mtx_all.append(mtx)
evs_all.append(evs)
meen_all.append(meen)
bounds_all.append(bounds)
rmse_all.append(rmse)

# Reset the model so that all attributes of the FoKL class are removed except for the hyperparameters:
model.clear()
# Reset the model so that all attributes of the FoKL class are removed except for the hyperparameters:
model.clear()

# ======================================================================================================================
# Post-processing:
print("\nThe results are as follows:")
for ii in range(len(train_all)):
print("\n ",train_all[ii]*100,"% of Data:\n --> RMSE =",rmse_all[ii])

# Post-processing:
print("\nThe results are as follows:")
for ii in range(len(train_all)):
print("\n ",train_all[ii]*100,"% of Data:\n --> RMSE =",rmse_all[ii])

if __name__ == '__main__':
main()

Loading

0 comments on commit f234499

Please sign in to comment.