1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162 |
- from keras import backend as K
- from keras.models import Sequential, Model
- from keras.layers import Dense, Dropout
- from keras.utils import np_utils, multi_gpu_model
- from keras.regularizers import l2
- from keras.wrappers.scikit_learn import KerasRegressor
- from keras.optimizers import Adam
- import numpy as np
- import matplotlib.pyplot as plt
- #function to test performance on testset
- def calc_mre(y_true, y_pred):
- y_err = 100*np.abs(y_true - y_pred)/y_true
- return np.mean(y_err)
- #naive percentage loss
- def relerr_loss(y_true, y_pred):
- y_err = np.abs(y_true - y_pred)/y_true
- y_err_f = K.flatten(y_err)
- return K.sum(y_err_f)
- def fullycon( in_size=8, out_size=250, N_hidden=3, N_neurons=250, N_gpus=1):
- """
- Returns a fully-connected model which will take a normalized size vector and return a
- spectrum
- in_size: length of the size vector
- out_size: length of the spectrum vector
- N_hidden: number of hidden layers
- N_neurons: number of neurons in each of the hidden layers
- """
- model = Sequential()
- model.add(Dense(out_size, input_dim=in_size, kernel_initializer='normal', activation='relu',
- name='first' ))
- for h in np.arange(N_hidden):
- lname = "H"+str(h)
- model.add(Dense(out_size, kernel_initializer='normal', activation='relu', name=lname ))
-
- model.add(Dense(out_size, kernel_initializer='normal', name='last'))
-
- # Compile model
- if N_gpus == 1:
- model.compile(loss=relerr_loss, optimizer='adam', metrics=['accuracy'])
- else:
- gpu_list = ["gpu(%d)" % i for i in range(N_gpus)]
- model.compile(loss=relerr_loss, optimizer='adam', metrics=['accuracy'], context = gpu_list)
- return model
- #staging area for new models
- def plot_training_history(history, red_factor):
- loss, val_loss = history.history['loss'], history.history['val_loss']
- loss = np.asarray(loss)/red_factor
- val_loss = np.asarray(val_loss)/red_factor
- epochs = len(loss)
-
- fig, axs = plt.subplots(1,1, figsize=(5,5))
- axs.semilogy(np.arange(1, epochs + 1), loss, label='train error')
- axs.semilogy(np.arange(1, epochs + 1), val_loss, label='validation error')
- axs.set_xlabel('Epoch number')
- axs.set_ylabel('Mean Relative Error (MRE) (%)')
- axs.legend(loc="best")
|