scnets.py 2.3 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162
  1. from keras import backend as K
  2. from keras.models import Sequential, Model
  3. from keras.layers import Dense, Dropout
  4. from keras.utils import np_utils, multi_gpu_model
  5. from keras.regularizers import l2
  6. from keras.wrappers.scikit_learn import KerasRegressor
  7. from keras.optimizers import Adam
  8. import numpy as np
  9. import matplotlib.pyplot as plt
  10. #function to test performance on testset
  11. def calc_mre(y_true, y_pred):
  12. y_err = 100*np.abs(y_true - y_pred)/y_true
  13. return np.mean(y_err)
  14. #naive percentage loss
  15. def relerr_loss(y_true, y_pred):
  16. y_err = np.abs(y_true - y_pred)/y_true
  17. y_err_f = K.flatten(y_err)
  18. return K.sum(y_err_f)
  19. def fullycon( in_size=8, out_size=250, N_hidden=3, N_neurons=250, N_gpus=1):
  20. """
  21. Returns a fully-connected model which will take a normalized size vector and return a
  22. spectrum
  23. in_size: length of the size vector
  24. out_size: length of the spectrum vector
  25. N_hidden: number of hidden layers
  26. N_neurons: number of neurons in each of the hidden layers
  27. """
  28. model = Sequential()
  29. model.add(Dense(out_size, input_dim=in_size, kernel_initializer='normal', activation='relu',
  30. name='first' ))
  31. for h in np.arange(N_hidden):
  32. lname = "H"+str(h)
  33. model.add(Dense(out_size, kernel_initializer='normal', activation='relu', name=lname ))
  34. model.add(Dense(out_size, kernel_initializer='normal', name='last'))
  35. # Compile model
  36. if N_gpus == 1:
  37. model.compile(loss=relerr_loss, optimizer='adam', metrics=['accuracy'])
  38. else:
  39. gpu_list = ["gpu(%d)" % i for i in range(N_gpus)]
  40. model.compile(loss=relerr_loss, optimizer='adam', metrics=['accuracy'], context = gpu_list)
  41. return model
  42. #staging area for new models
  43. def plot_training_history(history, red_factor):
  44. loss, val_loss = history.history['loss'], history.history['val_loss']
  45. loss = np.asarray(loss)/red_factor
  46. val_loss = np.asarray(val_loss)/red_factor
  47. epochs = len(loss)
  48. fig, axs = plt.subplots(1,1, figsize=(5,5))
  49. axs.semilogy(np.arange(1, epochs + 1), loss, label='train error')
  50. axs.semilogy(np.arange(1, epochs + 1), val_loss, label='validation error')
  51. axs.set_xlabel('Epoch number')
  52. axs.set_ylabel('Mean Relative Error (MRE) (%)')
  53. axs.legend(loc="best")