{ "cells": [ { "cell_type": "markdown", "metadata": {}, "source": [ "### Dataset Loading and testing" ] }, { "cell_type": "code", "execution_count": 2, "metadata": { "ExecuteTime": { "end_time": "2018-09-29T05:18:22.852441Z", "start_time": "2018-09-29T05:18:22.010399Z" } }, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "Dataset has been loaded\n", "x-train (60000, 8)\n", "x-test (40000, 8)\n", "y-train (60000, 256)\n", "y-test (40000, 256)\n" ] } ], "source": [ "%load_ext autoreload\n", "%autoreload 2\n", "\n", "import numpy as np\n", "import matplotlib.pyplot as plt\n", "import h5py\n", "from sklearn.model_selection import train_test_split\n", "#import jtplot submodule from jupyterthemes\n", "from jupyterthemes import jtplot\n", "#currently installed theme will be used to\n", "#set plot style if no arguments provided\n", "jtplot.style()\n", "\n", "#now load this dataset \n", "h5f = h5py.File('./datasets/s8_sio2tio2_v2.h5','r')\n", "X = h5f['sizes'][:]\n", "Y = h5f['spectrum'][:]\n", "\n", "\n", "\n", "\n", "#get the ranges of the loaded data\n", "num_layers = X.shape[1]\n", "num_lpoints = Y.shape[1]\n", "size_max = np.amax(X)\n", "size_min = np.amin(X)\n", "size_av = 0.5*(size_max + size_min)\n", "\n", "#this information is not given in the dataset\n", "lam_min = 300\n", "lam_max = 1200\n", "lams = np.linspace(lam_min, lam_max, num_lpoints)\n", "\n", "# X = np.expand_dims(X, 1)\n", "# #X = np.expand_dims(X, 3)\n", "# Y = np.expand_dims(Y, 1)\n", "# #Y = np.expand_dims(Y, 3)\n", "\n", "\n", "\n", "\n", "\n", "\n", "#create a train - test split of the dataset\n", "x_train, x_test, y_train, y_test = train_test_split(X, Y, test_size=0.4, random_state=42)\n", "\n", "# normalize inputs \n", "x_train = (x_train - 50)/20 \n", "x_test = (x_test - 50)/20 \n", "\n", "print(\"Dataset has been loaded\")\n", "print(\"x-train\", x_train.shape)\n", "print(\"x-test \", x_test.shape)\n", "print(\"y-train\", y_train.shape)\n", "print(\"y-test \", y_test.shape)\n" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "#### Model Development" ] }, { "cell_type": "code", "execution_count": 71, "metadata": { "ExecuteTime": { "end_time": "2018-09-27T05:04:59.838628Z", "start_time": "2018-09-27T05:04:59.808085Z" } }, "outputs": [ { "ename": "ValueError", "evalue": "Your data is either a textual data of shape `(num_sample, step, feature)` or a grey scale image of shape `(num_sample, rows, cols)`. Case 1: If your data is time-series or a textual data(probably you are using Conv1D), then there is no need of channel conversion.Case 2: If your data is image(probably you are using Conv2D), then you need to reshape the tension dimensions as follows:`shape = x_input.shape``x_input = x_input.reshape(shape[0], 1, shape[1], shape[2])`Note: Do not use `to_channels_fir()` in above cases.", "output_type": "error", "traceback": [ "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", "\u001b[0;31mValueError\u001b[0m Traceback (most recent call last)", "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m()\u001b[0m\n\u001b[1;32m 1\u001b[0m \u001b[0;32mfrom\u001b[0m \u001b[0mkeras\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mutils\u001b[0m \u001b[0;32mimport\u001b[0m \u001b[0mto_channels_first\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m----> 2\u001b[0;31m \u001b[0mx_train\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mto_channels_first\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mx_train\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m", "\u001b[0;32m~/anaconda3/envs/dp2/lib/python3.5/site-packages/keras/utils/np_utils.py\u001b[0m in \u001b[0;36mto_channels_first\u001b[0;34m(data)\u001b[0m\n\u001b[1;32m 114\u001b[0m \u001b[0mdata\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mi\u001b[0m\u001b[0;34m]\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mto_channels_first_helper\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mdata\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mi\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 115\u001b[0m \u001b[0;32melse\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 116\u001b[0;31m \u001b[0mdata\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mto_channels_first_helper\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mdata\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 117\u001b[0m \u001b[0;32mreturn\u001b[0m \u001b[0mdata\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", "\u001b[0;32m~/anaconda3/envs/dp2/lib/python3.5/site-packages/keras/utils/np_utils.py\u001b[0m in \u001b[0;36mto_channels_first_helper\u001b[0;34m(np_data)\u001b[0m\n\u001b[1;32m 92\u001b[0m \u001b[0;32melif\u001b[0m \u001b[0mlen\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mshape\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;34m==\u001b[0m \u001b[0;36m3\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 93\u001b[0m raise ValueError(\n\u001b[0;32m---> 94\u001b[0;31m \u001b[0;34m'Your data is either a textual data of shape '\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 95\u001b[0m \u001b[0;34m'`(num_sample, step, feature)` or a grey scale image of '\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 96\u001b[0m \u001b[0;34m'shape `(num_sample, rows, cols)`. '\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", "\u001b[0;31mValueError\u001b[0m: Your data is either a textual data of shape `(num_sample, step, feature)` or a grey scale image of shape `(num_sample, rows, cols)`. Case 1: If your data is time-series or a textual data(probably you are using Conv1D), then there is no need of channel conversion.Case 2: If your data is image(probably you are using Conv2D), then you need to reshape the tension dimensions as follows:`shape = x_input.shape``x_input = x_input.reshape(shape[0], 1, shape[1], shape[2])`Note: Do not use `to_channels_fir()` in above cases." ] } ], "source": [ "from keras.utils import to_channels_first\n", "x_train = to_channels_first(x_train)\n" ] }, { "cell_type": "code", "execution_count": 6, "metadata": { "ExecuteTime": { "end_time": "2018-09-29T07:24:39.839604Z", "start_time": "2018-09-29T07:24:39.798946Z" }, "code_folding": [ 48, 65, 84, 123, 169, 234 ] }, "outputs": [], "source": [ "from keras import backend as K\n", "from keras.models import Sequential, Model\n", "from keras.layers import Dense, Dropout, Reshape, UpSampling1D, Conv1D, Flatten, Activation\n", "from keras.utils import np_utils, multi_gpu_model\n", "from keras.regularizers import l2\n", "from keras.wrappers.scikit_learn import KerasRegressor\n", "from keras.optimizers import Adam\n", "from keras.layers.normalization import BatchNormalization\n", "from keras.layers import PReLU\n", "\n", "\n", "from sklearn.model_selection import cross_val_score, KFold\n", "from sklearn.preprocessing import StandardScaler\n", "from sklearn.pipeline import Pipeline\n", "\n", "num_gpus = 2\n", "gpu_list = [\"gpu(%d)\" % i for i in range(num_gpus)]\n", "\n", "\n", "\n", "#define various models here\n", "#naive percentage loss\n", "def size_percent_loss(y_true, y_pred):\n", " y_true_a = 0.5*y_true*(size_max - size_min) + size_av\n", " y_pred_a = 0.5*y_pred*(size_max - size_min) + size_av\n", " y_err = np.abs(y_true_a - y_pred_a)/y_true_a\n", " y_err_f = K.flatten(y_err)\n", " return K.sum(y_err_f)\n", "\n", "#naive percentage loss\n", "def naive_percent_loss(y_true, y_pred):\n", " y_err = np.abs(y_true - y_pred)/y_true\n", " y_err_f = K.flatten(y_err)\n", " return K.sum(y_err_f)\n", "\n", "\n", "#function to test performance on testset \n", "def calc_mre(y_true, y_pred):\n", " y_err = 100*np.abs(y_true - y_pred)/y_true\n", " return np.mean(y_err)\n", "\n", "#function to test performance on testset \n", "def calc_mre_K(y_true, y_pred):\n", " y_err = 100*np.abs(y_true - y_pred)/y_true\n", " return K.mean(y_err)\n", "\n", "\n", "\n", "def naiveploss_mgpu_model():\n", " # create model\n", " model = Sequential()\n", " model = multi_gpu_model(model, gpus=num_gpus)\n", " model.add(Dense(250, input_dim=x_train.shape[1], kernel_initializer='normal', activation='relu', \n", " name='first' ))\n", " model.add(Dense(250, input_dim=x_train.shape[1], kernel_initializer='normal', activation='relu', \n", " name='second' ))\n", " model.add(Dense(250, input_dim=x_train.shape[1], kernel_initializer='normal', activation='relu', \n", " name='third' ))\n", " model.add(Dense(250, input_dim=x_train.shape[1], kernel_initializer='normal', activation='relu', \n", " name='fourth' ))\n", " model.add(Dense(250, kernel_initializer='normal', name='last'))\n", " # Compile model\n", " model.compile(loss=naive_percent_loss, optimizer='adam', context = gpu_list)\n", " return model\n", "\n", "def naiveploss_model():\n", " # create model\n", " model = Sequential()\n", " model.add(Dense(256, input_dim=x_train.shape[1], kernel_initializer='normal', activation='relu', \n", " name='first' ))\n", " model.add(Dense(256, input_dim=x_train.shape[1], kernel_initializer='normal', activation='relu', \n", " name='second' ))\n", " model.add(Dense(256, input_dim=x_train.shape[1], kernel_initializer='normal', activation='relu', \n", " name='third' ))\n", " model.add(Dense(256, input_dim=x_train.shape[1], kernel_initializer='normal', activation='relu', \n", " name='fourth' ))\n", " model.add(Dense(256, kernel_initializer='normal', name='last'))\n", " # Compile model\n", " model.compile(loss=naive_percent_loss, optimizer='adam', metrics=['accuracy'])\n", " return model\n", "\n", "import timeit\n", "#here we must have a function that calls the training routine n times and then gives avg and stddev \n", "# of the resulting figures\n", "def net_performance(modelfunc, num_trials=3, batch_size=32, num_epochs=200, num_gpus=2):\n", " models = []\n", " train_err = np.ones(num_trials)\n", " test_err = np.ones(num_trials)\n", " val_err = np.ones(num_trials)\n", " train_time = np.ones(num_trials)\n", " for tnum in np.arange(num_trials):\n", " print(\"iteration: \" + str(tnum + 1))\n", " model_curr = modelfunc()\n", " x_t, x_v, y_t, y_v = train_test_split(x_train, y_train, test_size=0.2, random_state=42)\n", " start_time = timeit.default_timer()\n", " history = model_curr.fit(x_t, y_t,\n", " batch_size=batch_size*num_gpus,\n", " epochs=num_epochs, \n", " verbose=1,\n", " validation_data=(x_v, y_v))\n", " train_time[tnum] = timeit.default_timer() - start_time\n", " models.append(model_curr)\n", " train_err[tnum] = (100.0/num_lpoints)*history.history['loss'][-1]/(batch_size*num_gpus)\n", " val_err[tnum] = (100.0/num_lpoints)*history.history['val_loss'][-1]/(batch_size*num_gpus)\n", " test_err[tnum] = calc_mre(y_test, models[tnum].predict(x_test))\n", " return train_err, val_err, test_err, train_time\n", "\n", "#staging area for new models \n", "def plot_training_history(history, factor):\n", " loss, val_loss = history.history['loss'], history.history['val_loss']\n", " loss = np.asarray(loss)/(factor)\n", " val_loss = np.asarray(val_loss)/(factor)\n", " epochs = len(loss)\n", " \n", " fig, axs = plt.subplots(1,1, figsize=(5,2.5))\n", " axs.semilogy(np.arange(1, epochs + 1), loss, label='Train error')\n", " axs.semilogy(np.arange(1, epochs + 1), val_loss, label='Test error')\n", " axs.set_xlabel('Epoch number')\n", " #axs.set_ylim((0.4, 3))\n", " axs.set_xlim(left=1)\n", "# plt.yticks(np.array([0.1, 0.2, 0.3, 0.4, 0.5, 0.75, 1.0, 1.5, 2]), \n", "# ('0.1', '0.2', '0.3', '0.4', '0.5', '0.75', '1.0', '1.5', '2'))\n", " plt.yticks(np.array([0.5, 0.75, 1.0, 1.5, 2]), \n", " ('0.5', '0.75', '1.0', '1.5', '2'))\n", " axs.set_ylabel('MRE (%)')\n", " axs.legend(loc=\"best\")\n", " fig.savefig(\"foo2.pdf\", bbox_inches='tight')\n", "\n", "from keras.utils import to_channels_first\n", "\n", "def conv1d_lkyrelu():\n", " \n", " #gpu_list = [\"gpu(%d)\" % i for i in range(num_gpus)]\n", " \n", " # create model\n", " model = Sequential()\n", " \n", " model.add(Dense(256, input_dim=8, kernel_initializer='normal', \n", " name='first'))\n", " #model.add(BatchNormalization())\n", " #model.add(Activation('relu')) \n", " model.add(PReLU(alpha_initializer='zeros', alpha_regularizer=None))\n", " \n", " model.add(Reshape((4, 64)))\n", " model.add(UpSampling1D(size=2))\n", " \n", " \n", " model.add(Conv1D(filters=64, kernel_size=3, strides=1, padding='same', \n", " dilation_rate=1, \n", " kernel_initializer='normal'))\n", " model.add(PReLU(alpha_initializer='zeros', alpha_regularizer=None))\n", " \n", "# model.add(Conv1D(filters=32, kernel_size=3, strides=1, padding='same', \n", "# dilation_rate=1, kernel_initializer='normal'))\n", "# model.add(PReLU(alpha_initializer='zeros', alpha_regularizer=None))\n", " \n", " model.add(Conv1D(filters=32, kernel_size=3, strides=1, padding='same', \n", " dilation_rate=1, \n", " kernel_initializer='normal'))\n", " model.add(PReLU(alpha_initializer='zeros', alpha_regularizer=None))\n", "\n", "\n", "# model.add(Conv1D(filters=32, kernel_size=3, strides=1, padding='same', \n", "# dilation_rate=1, kernel_initializer='normal'))\n", "# model.add(PReLU(alpha_initializer='ones', alpha_regularizer=None)) \n", " \n", "# model.add(Conv1D(filters=32, kernel_size=3, strides=1, padding='same', \n", "# dilation_rate=1, kernel_initializer='normal'))\n", "# model.add(PReLU(alpha_initializer='ones', alpha_regularizer=None)) \n", " \n", " model.add(Flatten())\n", " # Compile model\n", " model.compile(loss=naive_percent_loss, optimizer='adam', metrics=[calc_mre_K])\n", " return model\n", " \n", " \n", "def conv1d_model_bnorm():\n", " \n", " #gpu_list = [\"gpu(%d)\" % i for i in range(num_gpus)]\n", " \n", " # create model\n", " model = Sequential()\n", " \n", " model.add(Dense(256, input_dim=8, kernel_initializer='normal', \n", " name='first' ))\n", " #model.add(BatchNormalization())\n", " model.add(Activation('relu'))\n", " #model.add(Dropout(0.2))\n", " \n", " model.add(Reshape((4, 64)))\n", " model.add(UpSampling1D(size=2))\n", " \n", " model.add(Conv1D(filters=64, kernel_size=3, strides=1, padding='same', \n", " dilation_rate=1, kernel_initializer='normal'))\n", " #model.add(BatchNormalization())\n", " model.add(Activation('relu'))\n", " #model.add(Dropout(0.2))\n", " #model.add(UpSampling1D(size=5))\n", "\n", " model.add(Conv1D(filters=32, kernel_size=3, strides=1, padding='same', \n", " dilation_rate=1, kernel_initializer='normal'))\n", " model.add(Activation('relu'))\n", " #model.add(Dropout(0.3))\n", " \n", "# model.add(Conv1D(filters=64, kernel_size=3, strides=1, padding='same', \n", "# dilation_rate=1, kernel_initializer='normal'))\n", "# model.add(Activation('relu')) \n", "# model.add(Dropout(0.3))\n", "\n", "# model.add(Conv1D(filters=32, kernel_size=3, strides=1, padding='same', \n", "# dilation_rate=1, kernel_initializer='normal'))\n", "# model.add(Activation('relu')) \n", " \n", "# model.add(Conv1D(filters=32, kernel_size=3, strides=1, padding='same', \n", "# dilation_rate=1, kernel_initializer='normal'))\n", "# model.add(Activation('relu')) \n", " \n", "# model.add(Conv1D(filters=32, kernel_size=3, strides=1, padding='same', \n", "# dilation_rate=1, kernel_initializer='normal'))\n", "# model.add(Activation('relu')) \n", " \n", "# model.add(Conv1D(filters=32, kernel_size=3, strides=1, padding='same', \n", "# dilation_rate=1, kernel_initializer='normal'))\n", "# model.add(Activation('relu')) \n", " \n", " model.add(Conv1D(filters=32, kernel_size=3, strides=1, padding='same', \n", " dilation_rate=1, kernel_initializer='normal'))\n", " model.add(Activation('relu')) \n", " \n", " \n", " \n", " model.add(Flatten())\n", " # Compile model\n", "# if num_gpus == 1:\n", " model.compile(loss=naive_percent_loss, optimizer='adam', metrics=[calc_mre_K])\n", "# else:\n", "# model.compile(loss=naive_percent_loss, optimizer='adam', metrics=['accuracy'], context = gpu_list)\n", " \n", " \n", " return model \n", "\n", "def resnetb():\n", " model = Sequential()\n", " \n", " #first layer6\n", " model.add(Dense(256, input_dim=8, kernel_initializer='normal', \n", " name='first'))\n", " model.add(PReLU(alpha_initializer='zeros', alpha_regularizer=None))\n", " model.add(Reshape((8, 32)))\n", " \n", " \n", " #resnet block\n", "# model.add(Conv1D(filters=32, kernel_size=3, strides=1, padding='same', \n", "# dilation_rate=1, \n", "# kernel_initializer='normal'))\n", "# model.add(PReLU(alpha_initializer='zeros', alpha_regularizer=None))\n", "# model.add(Conv1D(filters=32, kernel_size=3, strides=1, padding='same', \n", "# dilation_rate=1, \n", "# kernel_initializer='normal'))\n", " \n", " \n", " \n", " #Last layer\n", " model.add(Flatten())\n", " \n", " #compile model\n", " model.compile(loss=naive_percent_loss, optimizer='adam', metrics=[calc_mre_K])\n", " \n", " return model\n" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "### Model testing" ] }, { "cell_type": "code", "execution_count": 5, "metadata": { "ExecuteTime": { "end_time": "2018-09-29T07:24:39.795317Z", "start_time": "2018-09-29T05:19:01.617177Z" }, "code_folding": [], "scrolled": true }, "outputs": [ { "name": "stderr", "output_type": "stream", "text": [ "/home/hegder/anaconda3/envs/dp2/lib/python3.5/site-packages/keras/backend/mxnet_backend.py:89: UserWarning: MXNet Backend performs best with `channels_first` format. Using `channels_last` will significantly reduce performance due to the Transpose operations. For performance improvement, please use this API`keras.utils.to_channels_first(x_input)`to transform `channels_last` data to `channels_first` format and also please change the `image_data_format` in `keras.json` to `channels_first`.Note: `x_input` is a Numpy tensor or a list of Numpy tensorRefer to: https://github.com/awslabs/keras-apache-mxnet/tree/master/docs/mxnet_backend/performance_guide.md\n", " train_symbol = func(*args, **kwargs)\n", "/home/hegder/anaconda3/envs/dp2/lib/python3.5/site-packages/keras/backend/mxnet_backend.py:92: UserWarning: MXNet Backend performs best with `channels_first` format. Using `channels_last` will significantly reduce performance due to the Transpose operations. For performance improvement, please use this API`keras.utils.to_channels_first(x_input)`to transform `channels_last` data to `channels_first` format and also please change the `image_data_format` in `keras.json` to `channels_first`.Note: `x_input` is a Numpy tensor or a list of Numpy tensorRefer to: https://github.com/awslabs/keras-apache-mxnet/tree/master/docs/mxnet_backend/performance_guide.md\n", " test_symbol = func(*args, **kwargs)\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "__________________________________________________________________________________________________\n", "Layer (type) Output Shape Param # Connected to \n", "==================================================================================================\n", "input_1 (InputLayer) (None, 8) 0 \n", "__________________________________________________________________________________________________\n", "dense_1 (Dense) (None, 256) 2304 input_1[0][0] \n", "__________________________________________________________________________________________________\n", "p_re_lu_1 (PReLU) (None, 256) 256 dense_1[0][0] \n", "__________________________________________________________________________________________________\n", "reshape_1 (Reshape) (None, 8, 32) 0 p_re_lu_1[0][0] \n", "__________________________________________________________________________________________________\n", "conv1d_1 (Conv1D) (None, 8, 32) 3104 reshape_1[0][0] \n", "__________________________________________________________________________________________________\n", "activation_1 (Activation) (None, 8, 32) 0 conv1d_1[0][0] \n", "__________________________________________________________________________________________________\n", "conv1d_2 (Conv1D) (None, 8, 32) 3104 activation_1[0][0] \n", "__________________________________________________________________________________________________\n", "add_1 (Add) (None, 8, 32) 0 conv1d_2[0][0] \n", " reshape_1[0][0] \n", "__________________________________________________________________________________________________\n", "conv1d_3 (Conv1D) (None, 8, 32) 3104 add_1[0][0] \n", "__________________________________________________________________________________________________\n", "activation_2 (Activation) (None, 8, 32) 0 conv1d_3[0][0] \n", "__________________________________________________________________________________________________\n", "conv1d_4 (Conv1D) (None, 8, 32) 3104 activation_2[0][0] \n", "__________________________________________________________________________________________________\n", "add_2 (Add) (None, 8, 32) 0 conv1d_4[0][0] \n", " add_1[0][0] \n", "__________________________________________________________________________________________________\n", "flatten_1 (Flatten) (None, 256) 0 add_2[0][0] \n", "==================================================================================================\n", "Total params: 14,976\n", "Trainable params: 14,976\n", "Non-trainable params: 0\n", "__________________________________________________________________________________________________\n", "Train on 48000 samples, validate on 12000 samples\n", "Epoch 1/2000\n", " 1728/48000 [>.............................] - ETA: 1:29 - loss: 4395.1985 - calc_mre_K: 53.6523" ] }, { "name": "stderr", "output_type": "stream", "text": [ "/home/hegder/anaconda3/envs/dp2/lib/python3.5/site-packages/mxnet/module/bucketing_module.py:408: UserWarning: Optimizer created manually outside Module but rescale_grad is not normalized to 1.0/batch_size/num_workers (1.0 vs. 0.03125). Is this intended?\n", " force_init=force_init)\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "48000/48000 [==============================] - 7s 144us/step - loss: 732.8766 - calc_mre_K: 8.9462 - val_loss: 323.5366 - val_calc_mre_K: 3.9494\n", "Epoch 2/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 274.1360 - calc_mre_K: 3.3464 - val_loss: 237.9160 - val_calc_mre_K: 2.9042\n", "Epoch 3/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 230.6788 - calc_mre_K: 2.8159 - val_loss: 212.8244 - val_calc_mre_K: 2.5980\n", "Epoch 4/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 209.2977 - calc_mre_K: 2.5549 - val_loss: 196.5989 - val_calc_mre_K: 2.3999\n", "Epoch 5/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 199.4081 - calc_mre_K: 2.4342 - val_loss: 185.4336 - val_calc_mre_K: 2.2636\n", "Epoch 6/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 190.9030 - calc_mre_K: 2.3304 - val_loss: 188.8548 - val_calc_mre_K: 2.3054\n", "Epoch 7/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 183.2308 - calc_mre_K: 2.2367 - val_loss: 168.8411 - val_calc_mre_K: 2.0610\n", "Epoch 8/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 180.3695 - calc_mre_K: 2.2018 - val_loss: 171.4976 - val_calc_mre_K: 2.0935\n", "Epoch 9/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 174.0975 - calc_mre_K: 2.1252 - val_loss: 175.3341 - val_calc_mre_K: 2.1403\n", "Epoch 10/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 169.4694 - calc_mre_K: 2.0687 - val_loss: 159.8901 - val_calc_mre_K: 1.9518\n", "Epoch 11/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 166.2924 - calc_mre_K: 2.0299 - val_loss: 152.8264 - val_calc_mre_K: 1.8656\n", "Epoch 12/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 158.9732 - calc_mre_K: 1.9406 - val_loss: 156.2998 - val_calc_mre_K: 1.9080\n", "Epoch 13/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 155.7723 - calc_mre_K: 1.9015 - val_loss: 158.0917 - val_calc_mre_K: 1.9298\n", "Epoch 14/2000\n", "48000/48000 [==============================] - 4s 75us/step - loss: 153.4070 - calc_mre_K: 1.8726 - val_loss: 149.7118 - val_calc_mre_K: 1.8275\n", "Epoch 15/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 151.4225 - calc_mre_K: 1.8484 - val_loss: 169.0276 - val_calc_mre_K: 2.0633\n", "Epoch 16/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 147.2476 - calc_mre_K: 1.7975 - val_loss: 157.3640 - val_calc_mre_K: 1.9209\n", "Epoch 17/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 144.2164 - calc_mre_K: 1.7605 - val_loss: 136.3516 - val_calc_mre_K: 1.6644\n", "Epoch 18/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 142.8940 - calc_mre_K: 1.7443 - val_loss: 140.2765 - val_calc_mre_K: 1.7124\n", "Epoch 19/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 142.3986 - calc_mre_K: 1.7383 - val_loss: 141.5143 - val_calc_mre_K: 1.7275\n", "Epoch 20/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 138.4016 - calc_mre_K: 1.6895 - val_loss: 138.0885 - val_calc_mre_K: 1.6857\n", "Epoch 21/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 136.2504 - calc_mre_K: 1.6632 - val_loss: 134.2861 - val_calc_mre_K: 1.6392\n", "Epoch 22/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 136.1247 - calc_mre_K: 1.6617 - val_loss: 130.8850 - val_calc_mre_K: 1.5977\n", "Epoch 23/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 132.7933 - calc_mre_K: 1.6210 - val_loss: 128.0653 - val_calc_mre_K: 1.5633\n", "Epoch 24/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 131.7705 - calc_mre_K: 1.6085 - val_loss: 132.3274 - val_calc_mre_K: 1.6153\n", "Epoch 25/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 130.3055 - calc_mre_K: 1.5906 - val_loss: 129.7788 - val_calc_mre_K: 1.5842\n", "Epoch 26/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 128.3387 - calc_mre_K: 1.5666 - val_loss: 124.9020 - val_calc_mre_K: 1.5247\n", "Epoch 27/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 127.1021 - calc_mre_K: 1.5515 - val_loss: 123.4192 - val_calc_mre_K: 1.5066\n", "Epoch 28/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 125.2261 - calc_mre_K: 1.5286 - val_loss: 132.4476 - val_calc_mre_K: 1.6168\n", "Epoch 29/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 124.1974 - calc_mre_K: 1.5161 - val_loss: 118.5198 - val_calc_mre_K: 1.4468\n", "Epoch 30/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 122.0177 - calc_mre_K: 1.4895 - val_loss: 128.3601 - val_calc_mre_K: 1.5669\n", "Epoch 31/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 122.0739 - calc_mre_K: 1.4902 - val_loss: 123.9514 - val_calc_mre_K: 1.5131\n", "Epoch 32/2000\n", "48000/48000 [==============================] - 4s 75us/step - loss: 120.1092 - calc_mre_K: 1.4662 - val_loss: 119.5526 - val_calc_mre_K: 1.4594\n", "Epoch 33/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 118.8594 - calc_mre_K: 1.4509 - val_loss: 113.6323 - val_calc_mre_K: 1.3871\n", "Epoch 34/2000\n", "48000/48000 [==============================] - 4s 75us/step - loss: 118.1949 - calc_mre_K: 1.4428 - val_loss: 113.2363 - val_calc_mre_K: 1.3823\n", "Epoch 35/2000\n", "48000/48000 [==============================] - 4s 75us/step - loss: 116.6611 - calc_mre_K: 1.4241 - val_loss: 121.4791 - val_calc_mre_K: 1.4829\n", "Epoch 36/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 115.7877 - calc_mre_K: 1.4134 - val_loss: 113.5722 - val_calc_mre_K: 1.3864\n", "Epoch 37/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 114.5518 - calc_mre_K: 1.3983 - val_loss: 124.0801 - val_calc_mre_K: 1.5146\n", "Epoch 38/2000\n", "48000/48000 [==============================] - 4s 75us/step - loss: 114.3939 - calc_mre_K: 1.3964 - val_loss: 126.1863 - val_calc_mre_K: 1.5404\n", "Epoch 39/2000\n", "48000/48000 [==============================] - 4s 75us/step - loss: 112.7800 - calc_mre_K: 1.3767 - val_loss: 123.8789 - val_calc_mre_K: 1.5122\n", "Epoch 40/2000\n", "48000/48000 [==============================] - 4s 75us/step - loss: 111.8984 - calc_mre_K: 1.3659 - val_loss: 106.3512 - val_calc_mre_K: 1.2982\n", "Epoch 41/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 110.5522 - calc_mre_K: 1.3495 - val_loss: 113.0707 - val_calc_mre_K: 1.3803\n", "Epoch 42/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 109.9817 - calc_mre_K: 1.3426 - val_loss: 105.8024 - val_calc_mre_K: 1.2915\n", "Epoch 43/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 109.5629 - calc_mre_K: 1.3374 - val_loss: 112.1132 - val_calc_mre_K: 1.3686\n", "Epoch 44/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 108.4401 - calc_mre_K: 1.3237 - val_loss: 109.8048 - val_calc_mre_K: 1.3404\n", "Epoch 45/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 107.1162 - calc_mre_K: 1.3076 - val_loss: 107.9281 - val_calc_mre_K: 1.3175\n", "Epoch 46/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 107.2260 - calc_mre_K: 1.3089 - val_loss: 101.5096 - val_calc_mre_K: 1.2391\n", "Epoch 47/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 105.9399 - calc_mre_K: 1.2932 - val_loss: 108.1118 - val_calc_mre_K: 1.3197\n", "Epoch 48/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 105.2472 - calc_mre_K: 1.2848 - val_loss: 113.6236 - val_calc_mre_K: 1.3870\n", "Epoch 49/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 104.1817 - calc_mre_K: 1.2717 - val_loss: 104.3746 - val_calc_mre_K: 1.2741\n", "Epoch 50/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 103.4421 - calc_mre_K: 1.2627 - val_loss: 106.3601 - val_calc_mre_K: 1.2983\n", "Epoch 51/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 103.3363 - calc_mre_K: 1.2614 - val_loss: 101.3402 - val_calc_mre_K: 1.2371\n", "Epoch 52/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 102.5576 - calc_mre_K: 1.2519 - val_loss: 100.5841 - val_calc_mre_K: 1.2278\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Epoch 53/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 102.7360 - calc_mre_K: 1.2541 - val_loss: 111.2903 - val_calc_mre_K: 1.3585\n", "Epoch 54/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 101.2462 - calc_mre_K: 1.2359 - val_loss: 96.9858 - val_calc_mre_K: 1.1839\n", "Epoch 55/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 100.7244 - calc_mre_K: 1.2295 - val_loss: 109.6522 - val_calc_mre_K: 1.3385\n", "Epoch 56/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 100.6261 - calc_mre_K: 1.2283 - val_loss: 107.9017 - val_calc_mre_K: 1.3172\n", "Epoch 57/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 100.6158 - calc_mre_K: 1.2282 - val_loss: 108.7330 - val_calc_mre_K: 1.3273\n", "Epoch 58/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 99.7550 - calc_mre_K: 1.2177 - val_loss: 101.4416 - val_calc_mre_K: 1.2383\n", "Epoch 59/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 99.0314 - calc_mre_K: 1.2089 - val_loss: 95.1208 - val_calc_mre_K: 1.1611\n", "Epoch 60/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 99.0121 - calc_mre_K: 1.2086 - val_loss: 96.3037 - val_calc_mre_K: 1.1756\n", "Epoch 61/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 99.1496 - calc_mre_K: 1.2103 - val_loss: 94.6867 - val_calc_mre_K: 1.1558\n", "Epoch 62/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 98.0197 - calc_mre_K: 1.1965 - val_loss: 102.7163 - val_calc_mre_K: 1.2539\n", "Epoch 63/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 97.3518 - calc_mre_K: 1.1884 - val_loss: 94.4679 - val_calc_mre_K: 1.1532\n", "Epoch 64/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 97.7219 - calc_mre_K: 1.1929 - val_loss: 97.3026 - val_calc_mre_K: 1.1878\n", "Epoch 65/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 97.5975 - calc_mre_K: 1.1914 - val_loss: 93.3193 - val_calc_mre_K: 1.1392\n", "Epoch 66/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 96.1531 - calc_mre_K: 1.1737 - val_loss: 100.5098 - val_calc_mre_K: 1.2269\n", "Epoch 67/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 96.5027 - calc_mre_K: 1.1780 - val_loss: 98.0895 - val_calc_mre_K: 1.1974\n", "Epoch 68/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 95.9356 - calc_mre_K: 1.1711 - val_loss: 94.8141 - val_calc_mre_K: 1.1574\n", "Epoch 69/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 96.1591 - calc_mre_K: 1.1738 - val_loss: 119.7812 - val_calc_mre_K: 1.4622\n", "Epoch 70/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 95.3679 - calc_mre_K: 1.1642 - val_loss: 92.3566 - val_calc_mre_K: 1.1274\n", "Epoch 71/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 95.0694 - calc_mre_K: 1.1605 - val_loss: 91.6729 - val_calc_mre_K: 1.1191\n", "Epoch 72/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 94.8517 - calc_mre_K: 1.1579 - val_loss: 94.3612 - val_calc_mre_K: 1.1519\n", "Epoch 73/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 94.1733 - calc_mre_K: 1.1496 - val_loss: 104.0981 - val_calc_mre_K: 1.2707\n", "Epoch 74/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 94.0493 - calc_mre_K: 1.1481 - val_loss: 92.3050 - val_calc_mre_K: 1.1268\n", "Epoch 75/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 93.6244 - calc_mre_K: 1.1429 - val_loss: 91.1499 - val_calc_mre_K: 1.1127\n", "Epoch 76/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 92.9059 - calc_mre_K: 1.1341 - val_loss: 91.7989 - val_calc_mre_K: 1.1206\n", "Epoch 77/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 93.0215 - calc_mre_K: 1.1355 - val_loss: 89.7728 - val_calc_mre_K: 1.0959\n", "Epoch 78/2000\n", "48000/48000 [==============================] - 4s 75us/step - loss: 92.3543 - calc_mre_K: 1.1274 - val_loss: 90.0162 - val_calc_mre_K: 1.0988\n", "Epoch 79/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 91.6862 - calc_mre_K: 1.1192 - val_loss: 89.9879 - val_calc_mre_K: 1.0985\n", "Epoch 80/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 91.3569 - calc_mre_K: 1.1152 - val_loss: 94.1212 - val_calc_mre_K: 1.1489\n", "Epoch 81/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 91.7370 - calc_mre_K: 1.1198 - val_loss: 91.7697 - val_calc_mre_K: 1.1202\n", "Epoch 82/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 91.1477 - calc_mre_K: 1.1126 - val_loss: 90.1250 - val_calc_mre_K: 1.1002\n", "Epoch 83/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 90.9358 - calc_mre_K: 1.1101 - val_loss: 98.0585 - val_calc_mre_K: 1.1970\n", "Epoch 84/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 90.6991 - calc_mre_K: 1.1072 - val_loss: 87.2999 - val_calc_mre_K: 1.0657\n", "Epoch 85/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 90.3701 - calc_mre_K: 1.1032 - val_loss: 86.9727 - val_calc_mre_K: 1.0617\n", "Epoch 86/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 90.0569 - calc_mre_K: 1.0993 - val_loss: 91.8821 - val_calc_mre_K: 1.1216\n", "Epoch 87/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 89.6987 - calc_mre_K: 1.0950 - val_loss: 90.8433 - val_calc_mre_K: 1.1089\n", "Epoch 88/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 89.8940 - calc_mre_K: 1.0973 - val_loss: 90.6701 - val_calc_mre_K: 1.1068\n", "Epoch 89/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 89.1512 - calc_mre_K: 1.0883 - val_loss: 98.6283 - val_calc_mre_K: 1.2040\n", "Epoch 90/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 89.2185 - calc_mre_K: 1.0891 - val_loss: 88.5849 - val_calc_mre_K: 1.0814\n", "Epoch 91/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 88.5525 - calc_mre_K: 1.0810 - val_loss: 93.3768 - val_calc_mre_K: 1.1399\n", "Epoch 92/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 89.0763 - calc_mre_K: 1.0874 - val_loss: 85.6267 - val_calc_mre_K: 1.0452\n", "Epoch 93/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 87.9966 - calc_mre_K: 1.0742 - val_loss: 92.2069 - val_calc_mre_K: 1.1256\n", "Epoch 94/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 88.1431 - calc_mre_K: 1.0760 - val_loss: 85.0972 - val_calc_mre_K: 1.0388\n", "Epoch 95/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 87.9264 - calc_mre_K: 1.0733 - val_loss: 86.5247 - val_calc_mre_K: 1.0562\n", "Epoch 96/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 87.4308 - calc_mre_K: 1.0673 - val_loss: 86.2758 - val_calc_mre_K: 1.0532\n", "Epoch 97/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 87.7195 - calc_mre_K: 1.0708 - val_loss: 83.8367 - val_calc_mre_K: 1.0234\n", "Epoch 98/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 87.4166 - calc_mre_K: 1.0671 - val_loss: 86.9079 - val_calc_mre_K: 1.0609\n", "Epoch 99/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 87.1092 - calc_mre_K: 1.0633 - val_loss: 83.9744 - val_calc_mre_K: 1.0251\n", "Epoch 100/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 86.7816 - calc_mre_K: 1.0593 - val_loss: 83.6215 - val_calc_mre_K: 1.0208\n", "Epoch 101/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 86.8193 - calc_mre_K: 1.0598 - val_loss: 83.6693 - val_calc_mre_K: 1.0214\n", "Epoch 102/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 86.6440 - calc_mre_K: 1.0577 - val_loss: 96.7265 - val_calc_mre_K: 1.1807\n", "Epoch 103/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 86.3549 - calc_mre_K: 1.0541 - val_loss: 88.7963 - val_calc_mre_K: 1.0839\n", "Epoch 104/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 86.2218 - calc_mre_K: 1.0525 - val_loss: 86.5614 - val_calc_mre_K: 1.0567\n", "Epoch 105/2000\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "48000/48000 [==============================] - 4s 76us/step - loss: 85.9449 - calc_mre_K: 1.0491 - val_loss: 86.0742 - val_calc_mre_K: 1.0507\n", "Epoch 106/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 85.8073 - calc_mre_K: 1.0475 - val_loss: 86.1585 - val_calc_mre_K: 1.0517\n", "Epoch 107/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 85.7511 - calc_mre_K: 1.0468 - val_loss: 89.7336 - val_calc_mre_K: 1.0954\n", "Epoch 108/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 85.7893 - calc_mre_K: 1.0472 - val_loss: 87.2188 - val_calc_mre_K: 1.0647\n", "Epoch 109/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 85.5647 - calc_mre_K: 1.0445 - val_loss: 87.1230 - val_calc_mre_K: 1.0635\n", "Epoch 110/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 85.6572 - calc_mre_K: 1.0456 - val_loss: 83.7465 - val_calc_mre_K: 1.0223\n", "Epoch 111/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 84.5912 - calc_mre_K: 1.0326 - val_loss: 84.8502 - val_calc_mre_K: 1.0358\n", "Epoch 112/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 85.2428 - calc_mre_K: 1.0406 - val_loss: 82.6232 - val_calc_mre_K: 1.0086\n", "Epoch 113/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 85.1892 - calc_mre_K: 1.0399 - val_loss: 83.5287 - val_calc_mre_K: 1.0196\n", "Epoch 114/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 84.5240 - calc_mre_K: 1.0318 - val_loss: 94.7223 - val_calc_mre_K: 1.1563\n", "Epoch 115/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 84.5430 - calc_mre_K: 1.0320 - val_loss: 82.0893 - val_calc_mre_K: 1.0021\n", "Epoch 116/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 84.1593 - calc_mre_K: 1.0273 - val_loss: 82.4388 - val_calc_mre_K: 1.0063\n", "Epoch 117/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 84.8822 - calc_mre_K: 1.0362 - val_loss: 83.2025 - val_calc_mre_K: 1.0157\n", "Epoch 118/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 83.8902 - calc_mre_K: 1.0241 - val_loss: 90.1445 - val_calc_mre_K: 1.1004\n", "Epoch 119/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 84.5420 - calc_mre_K: 1.0320 - val_loss: 84.6692 - val_calc_mre_K: 1.0336\n", "Epoch 120/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 83.9903 - calc_mre_K: 1.0253 - val_loss: 83.2905 - val_calc_mre_K: 1.0167\n", "Epoch 121/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 83.7249 - calc_mre_K: 1.0220 - val_loss: 88.5992 - val_calc_mre_K: 1.0815\n", "Epoch 122/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 84.3942 - calc_mre_K: 1.0302 - val_loss: 81.4300 - val_calc_mre_K: 0.9940\n", "Epoch 123/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 83.0444 - calc_mre_K: 1.0137 - val_loss: 81.5708 - val_calc_mre_K: 0.9957\n", "Epoch 124/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 83.2123 - calc_mre_K: 1.0158 - val_loss: 81.1891 - val_calc_mre_K: 0.9911\n", "Epoch 125/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 83.3994 - calc_mre_K: 1.0181 - val_loss: 80.9949 - val_calc_mre_K: 0.9887\n", "Epoch 126/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 83.1338 - calc_mre_K: 1.0148 - val_loss: 82.0805 - val_calc_mre_K: 1.0020\n", "Epoch 127/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 83.2638 - calc_mre_K: 1.0164 - val_loss: 82.4698 - val_calc_mre_K: 1.0067\n", "Epoch 128/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 82.7845 - calc_mre_K: 1.0106 - val_loss: 84.4064 - val_calc_mre_K: 1.0304\n", "Epoch 129/2000\n", "48000/48000 [==============================] - 4s 83us/step - loss: 82.8501 - calc_mre_K: 1.0114 - val_loss: 83.7473 - val_calc_mre_K: 1.0223\n", "Epoch 130/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 82.5005 - calc_mre_K: 1.0071 - val_loss: 78.9815 - val_calc_mre_K: 0.9641\n", "Epoch 131/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 82.3589 - calc_mre_K: 1.0054 - val_loss: 80.1814 - val_calc_mre_K: 0.9788\n", "Epoch 132/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 82.4982 - calc_mre_K: 1.0071 - val_loss: 80.1964 - val_calc_mre_K: 0.9790\n", "Epoch 133/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 81.9254 - calc_mre_K: 1.0001 - val_loss: 80.3811 - val_calc_mre_K: 0.9812\n", "Epoch 134/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 82.4920 - calc_mre_K: 1.0070 - val_loss: 81.1564 - val_calc_mre_K: 0.9907\n", "Epoch 135/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 82.0136 - calc_mre_K: 1.0011 - val_loss: 80.3699 - val_calc_mre_K: 0.9811\n", "Epoch 136/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 82.2060 - calc_mre_K: 1.0035 - val_loss: 81.3707 - val_calc_mre_K: 0.9933\n", "Epoch 137/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 81.6297 - calc_mre_K: 0.9965 - val_loss: 80.6556 - val_calc_mre_K: 0.9846\n", "Epoch 138/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 81.3396 - calc_mre_K: 0.9929 - val_loss: 84.5167 - val_calc_mre_K: 1.0317\n", "Epoch 139/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 81.5746 - calc_mre_K: 0.9958 - val_loss: 86.2047 - val_calc_mre_K: 1.0523\n", "Epoch 140/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 81.5581 - calc_mre_K: 0.9956 - val_loss: 80.6257 - val_calc_mre_K: 0.9842\n", "Epoch 141/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 81.2049 - calc_mre_K: 0.9913 - val_loss: 79.1018 - val_calc_mre_K: 0.9656\n", "Epoch 142/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 81.5244 - calc_mre_K: 0.9952 - val_loss: 78.3962 - val_calc_mre_K: 0.9570\n", "Epoch 143/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 81.4058 - calc_mre_K: 0.9937 - val_loss: 80.0068 - val_calc_mre_K: 0.9766\n", "Epoch 144/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 81.5568 - calc_mre_K: 0.9956 - val_loss: 84.0140 - val_calc_mre_K: 1.0256\n", "Epoch 145/2000\n", "48000/48000 [==============================] - 4s 82us/step - loss: 80.7245 - calc_mre_K: 0.9854 - val_loss: 79.8115 - val_calc_mre_K: 0.9743\n", "Epoch 146/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 80.9309 - calc_mre_K: 0.9879 - val_loss: 80.4980 - val_calc_mre_K: 0.9826\n", "Epoch 147/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 80.5407 - calc_mre_K: 0.9832 - val_loss: 81.7605 - val_calc_mre_K: 0.9981\n", "Epoch 148/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 80.8416 - calc_mre_K: 0.9868 - val_loss: 79.6586 - val_calc_mre_K: 0.9724\n", "Epoch 149/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 80.6472 - calc_mre_K: 0.9845 - val_loss: 79.3674 - val_calc_mre_K: 0.9688\n", "Epoch 150/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 80.2999 - calc_mre_K: 0.9802 - val_loss: 79.1652 - val_calc_mre_K: 0.9664\n", "Epoch 151/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 80.9459 - calc_mre_K: 0.9881 - val_loss: 78.9897 - val_calc_mre_K: 0.9642\n", "Epoch 152/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 80.0244 - calc_mre_K: 0.9769 - val_loss: 78.6477 - val_calc_mre_K: 0.9601\n", "Epoch 153/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 80.5388 - calc_mre_K: 0.9831 - val_loss: 81.3144 - val_calc_mre_K: 0.9926\n", "Epoch 154/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 80.3217 - calc_mre_K: 0.9805 - val_loss: 82.5470 - val_calc_mre_K: 1.0077\n", "Epoch 155/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 80.0695 - calc_mre_K: 0.9774 - val_loss: 78.2237 - val_calc_mre_K: 0.9549\n", "Epoch 156/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 80.0186 - calc_mre_K: 0.9768 - val_loss: 80.6373 - val_calc_mre_K: 0.9843\n", "Epoch 157/2000\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "48000/48000 [==============================] - 4s 78us/step - loss: 79.9486 - calc_mre_K: 0.9759 - val_loss: 77.8510 - val_calc_mre_K: 0.9503\n", "Epoch 158/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 79.9485 - calc_mre_K: 0.9759 - val_loss: 77.8446 - val_calc_mre_K: 0.9503\n", "Epoch 159/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 79.8962 - calc_mre_K: 0.9753 - val_loss: 80.6575 - val_calc_mre_K: 0.9846\n", "Epoch 160/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 80.0035 - calc_mre_K: 0.9766 - val_loss: 79.2542 - val_calc_mre_K: 0.9675\n", "Epoch 161/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 79.3305 - calc_mre_K: 0.9684 - val_loss: 82.2613 - val_calc_mre_K: 1.0042\n", "Epoch 162/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 79.5210 - calc_mre_K: 0.9707 - val_loss: 79.5520 - val_calc_mre_K: 0.9711\n", "Epoch 163/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 79.9052 - calc_mre_K: 0.9754 - val_loss: 77.7117 - val_calc_mre_K: 0.9486\n", "Epoch 164/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 79.3250 - calc_mre_K: 0.9683 - val_loss: 80.1924 - val_calc_mre_K: 0.9789\n", "Epoch 165/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 79.5959 - calc_mre_K: 0.9716 - val_loss: 79.3057 - val_calc_mre_K: 0.9681\n", "Epoch 166/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 79.2363 - calc_mre_K: 0.9672 - val_loss: 77.1362 - val_calc_mre_K: 0.9416\n", "Epoch 167/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 79.1494 - calc_mre_K: 0.9662 - val_loss: 77.9412 - val_calc_mre_K: 0.9514\n", "Epoch 168/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 79.0455 - calc_mre_K: 0.9649 - val_loss: 78.5913 - val_calc_mre_K: 0.9594\n", "Epoch 169/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 79.0752 - calc_mre_K: 0.9653 - val_loss: 80.3298 - val_calc_mre_K: 0.9806\n", "Epoch 170/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 79.2569 - calc_mre_K: 0.9675 - val_loss: 77.7229 - val_calc_mre_K: 0.9488\n", "Epoch 171/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 78.8169 - calc_mre_K: 0.9621 - val_loss: 78.9695 - val_calc_mre_K: 0.9640\n", "Epoch 172/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 79.4041 - calc_mre_K: 0.9693 - val_loss: 76.9859 - val_calc_mre_K: 0.9398\n", "Epoch 173/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 78.7337 - calc_mre_K: 0.9611 - val_loss: 79.3306 - val_calc_mre_K: 0.9684\n", "Epoch 174/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 78.9319 - calc_mre_K: 0.9635 - val_loss: 80.2908 - val_calc_mre_K: 0.9801\n", "Epoch 175/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 78.5310 - calc_mre_K: 0.9586 - val_loss: 78.6967 - val_calc_mre_K: 0.9607\n", "Epoch 176/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 79.0607 - calc_mre_K: 0.9651 - val_loss: 77.7367 - val_calc_mre_K: 0.9489\n", "Epoch 177/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 78.5158 - calc_mre_K: 0.9584 - val_loss: 77.7643 - val_calc_mre_K: 0.9493\n", "Epoch 178/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 78.5020 - calc_mre_K: 0.9583 - val_loss: 76.8554 - val_calc_mre_K: 0.9382\n", "Epoch 179/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 78.6665 - calc_mre_K: 0.9603 - val_loss: 84.6595 - val_calc_mre_K: 1.0334\n", "Epoch 180/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 78.2248 - calc_mre_K: 0.9549 - val_loss: 75.7520 - val_calc_mre_K: 0.9247\n", "Epoch 181/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 78.3221 - calc_mre_K: 0.9561 - val_loss: 78.7158 - val_calc_mre_K: 0.9609\n", "Epoch 182/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 78.2020 - calc_mre_K: 0.9546 - val_loss: 76.6090 - val_calc_mre_K: 0.9352\n", "Epoch 183/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 78.1606 - calc_mre_K: 0.9541 - val_loss: 77.6845 - val_calc_mre_K: 0.9483\n", "Epoch 184/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 78.0476 - calc_mre_K: 0.9527 - val_loss: 82.8038 - val_calc_mre_K: 1.0108\n", "Epoch 185/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 78.1978 - calc_mre_K: 0.9546 - val_loss: 81.2140 - val_calc_mre_K: 0.9914\n", "Epoch 186/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 77.9875 - calc_mre_K: 0.9520 - val_loss: 78.2002 - val_calc_mre_K: 0.9546\n", "Epoch 187/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 77.6078 - calc_mre_K: 0.9474 - val_loss: 78.5930 - val_calc_mre_K: 0.9594\n", "Epoch 188/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 78.2093 - calc_mre_K: 0.9547 - val_loss: 76.4659 - val_calc_mre_K: 0.9334\n", "Epoch 189/2000\n", "48000/48000 [==============================] - 4s 73us/step - loss: 77.7502 - calc_mre_K: 0.9491 - val_loss: 78.7471 - val_calc_mre_K: 0.9613\n", "Epoch 190/2000\n", "48000/48000 [==============================] - 4s 74us/step - loss: 77.8605 - calc_mre_K: 0.9504 - val_loss: 78.2576 - val_calc_mre_K: 0.9553\n", "Epoch 191/2000\n", "48000/48000 [==============================] - 4s 75us/step - loss: 77.8755 - calc_mre_K: 0.9506 - val_loss: 77.7398 - val_calc_mre_K: 0.9490\n", "Epoch 192/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 77.5414 - calc_mre_K: 0.9466 - val_loss: 84.9913 - val_calc_mre_K: 1.0375\n", "Epoch 193/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 77.5031 - calc_mre_K: 0.9461 - val_loss: 84.9813 - val_calc_mre_K: 1.0374\n", "Epoch 194/2000\n", "48000/48000 [==============================] - 4s 75us/step - loss: 77.4007 - calc_mre_K: 0.9448 - val_loss: 74.8029 - val_calc_mre_K: 0.9131\n", "Epoch 195/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 77.4022 - calc_mre_K: 0.9449 - val_loss: 76.3104 - val_calc_mre_K: 0.9315\n", "Epoch 196/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 77.6730 - calc_mre_K: 0.9482 - val_loss: 85.5188 - val_calc_mre_K: 1.0439\n", "Epoch 197/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 77.5790 - calc_mre_K: 0.9470 - val_loss: 79.1473 - val_calc_mre_K: 0.9662\n", "Epoch 198/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 77.1311 - calc_mre_K: 0.9415 - val_loss: 77.0951 - val_calc_mre_K: 0.9411\n", "Epoch 199/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 77.3052 - calc_mre_K: 0.9437 - val_loss: 76.6761 - val_calc_mre_K: 0.9360\n", "Epoch 200/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 77.3435 - calc_mre_K: 0.9441 - val_loss: 76.1256 - val_calc_mre_K: 0.9293\n", "Epoch 201/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 77.2841 - calc_mre_K: 0.9434 - val_loss: 75.2312 - val_calc_mre_K: 0.9183\n", "Epoch 202/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 77.1123 - calc_mre_K: 0.9413 - val_loss: 76.5323 - val_calc_mre_K: 0.9342\n", "Epoch 203/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 76.9781 - calc_mre_K: 0.9397 - val_loss: 76.8535 - val_calc_mre_K: 0.9382\n", "Epoch 204/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 77.0089 - calc_mre_K: 0.9400 - val_loss: 79.5975 - val_calc_mre_K: 0.9716\n", "Epoch 205/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 77.1757 - calc_mre_K: 0.9421 - val_loss: 77.0393 - val_calc_mre_K: 0.9404\n", "Epoch 206/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 77.1332 - calc_mre_K: 0.9416 - val_loss: 77.1731 - val_calc_mre_K: 0.9421\n", "Epoch 207/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 76.7549 - calc_mre_K: 0.9370 - val_loss: 77.7119 - val_calc_mre_K: 0.9486\n", "Epoch 208/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 77.0789 - calc_mre_K: 0.9409 - val_loss: 75.2196 - val_calc_mre_K: 0.9182\n", "Epoch 209/2000\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "48000/48000 [==============================] - 4s 79us/step - loss: 76.8737 - calc_mre_K: 0.9384 - val_loss: 75.1035 - val_calc_mre_K: 0.9168\n", "Epoch 210/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 77.0310 - calc_mre_K: 0.9403 - val_loss: 75.8667 - val_calc_mre_K: 0.9261\n", "Epoch 211/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 76.7276 - calc_mre_K: 0.9366 - val_loss: 74.9716 - val_calc_mre_K: 0.9152\n", "Epoch 212/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 76.9187 - calc_mre_K: 0.9389 - val_loss: 78.6129 - val_calc_mre_K: 0.9596\n", "Epoch 213/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 76.9713 - calc_mre_K: 0.9396 - val_loss: 79.8308 - val_calc_mre_K: 0.9745\n", "Epoch 214/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 76.5212 - calc_mre_K: 0.9341 - val_loss: 75.5402 - val_calc_mre_K: 0.9221\n", "Epoch 215/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 76.8250 - calc_mre_K: 0.9378 - val_loss: 75.1286 - val_calc_mre_K: 0.9171\n", "Epoch 216/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 76.6544 - calc_mre_K: 0.9357 - val_loss: 78.6293 - val_calc_mre_K: 0.9598\n", "Epoch 217/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 76.3845 - calc_mre_K: 0.9324 - val_loss: 75.5527 - val_calc_mre_K: 0.9223\n", "Epoch 218/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 76.2299 - calc_mre_K: 0.9305 - val_loss: 82.2997 - val_calc_mre_K: 1.0046\n", "Epoch 219/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 76.4807 - calc_mre_K: 0.9336 - val_loss: 74.7591 - val_calc_mre_K: 0.9126\n", "Epoch 220/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 76.2586 - calc_mre_K: 0.9309 - val_loss: 74.1859 - val_calc_mre_K: 0.9056\n", "Epoch 221/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 76.3333 - calc_mre_K: 0.9318 - val_loss: 76.6704 - val_calc_mre_K: 0.9359\n", "Epoch 222/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 76.3994 - calc_mre_K: 0.9326 - val_loss: 73.8212 - val_calc_mre_K: 0.9011\n", "Epoch 223/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 76.0381 - calc_mre_K: 0.9282 - val_loss: 77.4713 - val_calc_mre_K: 0.9457\n", "Epoch 224/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 76.3959 - calc_mre_K: 0.9326 - val_loss: 76.1142 - val_calc_mre_K: 0.9291\n", "Epoch 225/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 76.2844 - calc_mre_K: 0.9312 - val_loss: 74.1236 - val_calc_mre_K: 0.9048\n", "Epoch 226/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 76.4245 - calc_mre_K: 0.9329 - val_loss: 74.8903 - val_calc_mre_K: 0.9142\n", "Epoch 227/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 75.9470 - calc_mre_K: 0.9271 - val_loss: 78.1084 - val_calc_mre_K: 0.9535\n", "Epoch 228/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 75.9443 - calc_mre_K: 0.9271 - val_loss: 75.4296 - val_calc_mre_K: 0.9208\n", "Epoch 229/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 76.0898 - calc_mre_K: 0.9288 - val_loss: 76.2484 - val_calc_mre_K: 0.9308\n", "Epoch 230/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 76.0486 - calc_mre_K: 0.9283 - val_loss: 74.4569 - val_calc_mre_K: 0.9089\n", "Epoch 231/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 76.0281 - calc_mre_K: 0.9281 - val_loss: 72.9693 - val_calc_mre_K: 0.8907\n", "Epoch 232/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 75.8047 - calc_mre_K: 0.9254 - val_loss: 78.0248 - val_calc_mre_K: 0.9525\n", "Epoch 233/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 75.9890 - calc_mre_K: 0.9276 - val_loss: 75.9891 - val_calc_mre_K: 0.9276\n", "Epoch 234/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 75.7936 - calc_mre_K: 0.9252 - val_loss: 78.8040 - val_calc_mre_K: 0.9620\n", "Epoch 235/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 75.5799 - calc_mre_K: 0.9226 - val_loss: 81.4213 - val_calc_mre_K: 0.9939\n", "Epoch 236/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 75.7818 - calc_mre_K: 0.9251 - val_loss: 75.0230 - val_calc_mre_K: 0.9158\n", "Epoch 237/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 75.6954 - calc_mre_K: 0.9240 - val_loss: 74.5701 - val_calc_mre_K: 0.9103\n", "Epoch 238/2000\n", "48000/48000 [==============================] - 4s 75us/step - loss: 75.4213 - calc_mre_K: 0.9207 - val_loss: 75.2667 - val_calc_mre_K: 0.9188\n", "Epoch 239/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 75.6514 - calc_mre_K: 0.9235 - val_loss: 74.6669 - val_calc_mre_K: 0.9115\n", "Epoch 240/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 75.6228 - calc_mre_K: 0.9231 - val_loss: 76.1106 - val_calc_mre_K: 0.9291\n", "Epoch 241/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 75.4352 - calc_mre_K: 0.9208 - val_loss: 75.1599 - val_calc_mre_K: 0.9175\n", "Epoch 242/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 75.4572 - calc_mre_K: 0.9211 - val_loss: 76.2727 - val_calc_mre_K: 0.9311\n", "Epoch 243/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 75.6930 - calc_mre_K: 0.9240 - val_loss: 76.2746 - val_calc_mre_K: 0.9311\n", "Epoch 244/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 76.0059 - calc_mre_K: 0.9278 - val_loss: 74.3731 - val_calc_mre_K: 0.9079\n", "Epoch 245/2000\n", "48000/48000 [==============================] - 4s 75us/step - loss: 75.1775 - calc_mre_K: 0.9177 - val_loss: 79.5644 - val_calc_mre_K: 0.9712\n", "Epoch 246/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 75.8407 - calc_mre_K: 0.9258 - val_loss: 77.0713 - val_calc_mre_K: 0.9408\n", "Epoch 247/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 75.3353 - calc_mre_K: 0.9196 - val_loss: 74.5050 - val_calc_mre_K: 0.9095\n", "Epoch 248/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 75.3835 - calc_mre_K: 0.9202 - val_loss: 74.9059 - val_calc_mre_K: 0.9144\n", "Epoch 249/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 75.1874 - calc_mre_K: 0.9178 - val_loss: 73.1644 - val_calc_mre_K: 0.8931\n", "Epoch 250/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 75.5847 - calc_mre_K: 0.9227 - val_loss: 77.8862 - val_calc_mre_K: 0.9508\n", "Epoch 251/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 75.2576 - calc_mre_K: 0.9187 - val_loss: 75.9420 - val_calc_mre_K: 0.9270\n", "Epoch 252/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 75.4230 - calc_mre_K: 0.9207 - val_loss: 76.4914 - val_calc_mre_K: 0.9337\n", "Epoch 253/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 74.9878 - calc_mre_K: 0.9154 - val_loss: 77.8169 - val_calc_mre_K: 0.9499\n", "Epoch 254/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 75.1360 - calc_mre_K: 0.9172 - val_loss: 80.8935 - val_calc_mre_K: 0.9875\n", "Epoch 255/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 74.7107 - calc_mre_K: 0.9120 - val_loss: 74.5227 - val_calc_mre_K: 0.9097\n", "Epoch 256/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 75.3829 - calc_mre_K: 0.9202 - val_loss: 76.5062 - val_calc_mre_K: 0.9339\n", "Epoch 257/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 74.8236 - calc_mre_K: 0.9134 - val_loss: 75.7469 - val_calc_mre_K: 0.9246\n", "Epoch 258/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 75.0158 - calc_mre_K: 0.9157 - val_loss: 74.2059 - val_calc_mre_K: 0.9058\n", "Epoch 259/2000\n", "48000/48000 [==============================] - 4s 75us/step - loss: 74.7856 - calc_mre_K: 0.9129 - val_loss: 77.6373 - val_calc_mre_K: 0.9477\n", "Epoch 260/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 75.2164 - calc_mre_K: 0.9182 - val_loss: 75.3711 - val_calc_mre_K: 0.9201\n", "Epoch 261/2000\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "48000/48000 [==============================] - 4s 79us/step - loss: 75.1417 - calc_mre_K: 0.9173 - val_loss: 76.0219 - val_calc_mre_K: 0.9280\n", "Epoch 262/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 74.7523 - calc_mre_K: 0.9125 - val_loss: 73.2210 - val_calc_mre_K: 0.8938\n", "Epoch 263/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 74.6908 - calc_mre_K: 0.9118 - val_loss: 73.0797 - val_calc_mre_K: 0.8921\n", "Epoch 264/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 74.6772 - calc_mre_K: 0.9116 - val_loss: 75.4837 - val_calc_mre_K: 0.9214\n", "Epoch 265/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 75.0636 - calc_mre_K: 0.9163 - val_loss: 76.7141 - val_calc_mre_K: 0.9365\n", "Epoch 266/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 74.9139 - calc_mre_K: 0.9145 - val_loss: 75.4587 - val_calc_mre_K: 0.9211\n", "Epoch 267/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 74.9075 - calc_mre_K: 0.9144 - val_loss: 74.1778 - val_calc_mre_K: 0.9055\n", "Epoch 268/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 74.7892 - calc_mre_K: 0.9130 - val_loss: 75.1050 - val_calc_mre_K: 0.9168\n", "Epoch 269/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 74.8121 - calc_mre_K: 0.9132 - val_loss: 75.1522 - val_calc_mre_K: 0.9174\n", "Epoch 270/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 74.7014 - calc_mre_K: 0.9119 - val_loss: 77.3682 - val_calc_mre_K: 0.9444\n", "Epoch 271/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 74.3781 - calc_mre_K: 0.9079 - val_loss: 72.8234 - val_calc_mre_K: 0.8890\n", "Epoch 272/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 74.5070 - calc_mre_K: 0.9095 - val_loss: 73.3800 - val_calc_mre_K: 0.8958\n", "Epoch 273/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 74.8713 - calc_mre_K: 0.9140 - val_loss: 74.1977 - val_calc_mre_K: 0.9057\n", "Epoch 274/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 74.7879 - calc_mre_K: 0.9129 - val_loss: 77.0483 - val_calc_mre_K: 0.9405\n", "Epoch 275/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 74.5181 - calc_mre_K: 0.9096 - val_loss: 80.5757 - val_calc_mre_K: 0.9836\n", "Epoch 276/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 74.9657 - calc_mre_K: 0.9151 - val_loss: 75.3511 - val_calc_mre_K: 0.9198\n", "Epoch 277/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 74.5141 - calc_mre_K: 0.9096 - val_loss: 74.7621 - val_calc_mre_K: 0.9126\n", "Epoch 278/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 74.4888 - calc_mre_K: 0.9093 - val_loss: 74.0132 - val_calc_mre_K: 0.9035\n", "Epoch 279/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 74.8241 - calc_mre_K: 0.9134 - val_loss: 72.5664 - val_calc_mre_K: 0.8858\n", "Epoch 280/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 73.8573 - calc_mre_K: 0.9016 - val_loss: 73.1464 - val_calc_mre_K: 0.8929\n", "Epoch 281/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 74.7547 - calc_mre_K: 0.9125 - val_loss: 73.1400 - val_calc_mre_K: 0.8928\n", "Epoch 282/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 74.3947 - calc_mre_K: 0.9081 - val_loss: 74.9926 - val_calc_mre_K: 0.9154\n", "Epoch 283/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 74.3623 - calc_mre_K: 0.9077 - val_loss: 72.4570 - val_calc_mre_K: 0.8845\n", "Epoch 284/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 74.3354 - calc_mre_K: 0.9074 - val_loss: 73.9193 - val_calc_mre_K: 0.9023\n", "Epoch 285/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 74.3315 - calc_mre_K: 0.9074 - val_loss: 72.1803 - val_calc_mre_K: 0.8811\n", "Epoch 286/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 74.3240 - calc_mre_K: 0.9073 - val_loss: 77.7146 - val_calc_mre_K: 0.9487\n", "Epoch 287/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 74.2919 - calc_mre_K: 0.9069 - val_loss: 74.3937 - val_calc_mre_K: 0.9081\n", "Epoch 288/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 74.2583 - calc_mre_K: 0.9065 - val_loss: 72.4885 - val_calc_mre_K: 0.8849\n", "Epoch 289/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 74.4617 - calc_mre_K: 0.9090 - val_loss: 73.9545 - val_calc_mre_K: 0.9028\n", "Epoch 290/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 73.9879 - calc_mre_K: 0.9032 - val_loss: 72.4915 - val_calc_mre_K: 0.8849\n", "Epoch 291/2000\n", "48000/48000 [==============================] - 4s 82us/step - loss: 74.0904 - calc_mre_K: 0.9044 - val_loss: 75.4706 - val_calc_mre_K: 0.9213\n", "Epoch 292/2000\n", "48000/48000 [==============================] - 4s 82us/step - loss: 74.0301 - calc_mre_K: 0.9037 - val_loss: 72.5541 - val_calc_mre_K: 0.8857\n", "Epoch 293/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 74.0615 - calc_mre_K: 0.9041 - val_loss: 73.8489 - val_calc_mre_K: 0.9015\n", "Epoch 294/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 74.4477 - calc_mre_K: 0.9088 - val_loss: 78.8999 - val_calc_mre_K: 0.9631\n", "Epoch 295/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 73.9474 - calc_mre_K: 0.9027 - val_loss: 80.8181 - val_calc_mre_K: 0.9865\n", "Epoch 296/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 74.1226 - calc_mre_K: 0.9048 - val_loss: 74.3266 - val_calc_mre_K: 0.9073\n", "Epoch 297/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 74.1820 - calc_mre_K: 0.9055 - val_loss: 71.8260 - val_calc_mre_K: 0.8768\n", "Epoch 298/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 74.3008 - calc_mre_K: 0.9070 - val_loss: 72.7828 - val_calc_mre_K: 0.8885\n", "Epoch 299/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 73.9523 - calc_mre_K: 0.9027 - val_loss: 71.5698 - val_calc_mre_K: 0.8737\n", "Epoch 300/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 73.8850 - calc_mre_K: 0.9019 - val_loss: 73.4689 - val_calc_mre_K: 0.8968\n", "Epoch 301/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 73.9430 - calc_mre_K: 0.9026 - val_loss: 74.7000 - val_calc_mre_K: 0.9119\n", "Epoch 302/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 73.6253 - calc_mre_K: 0.8987 - val_loss: 72.9763 - val_calc_mre_K: 0.8908\n", "Epoch 303/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 74.0133 - calc_mre_K: 0.9035 - val_loss: 71.9151 - val_calc_mre_K: 0.8779\n", "Epoch 304/2000\n", "48000/48000 [==============================] - 4s 74us/step - loss: 73.7480 - calc_mre_K: 0.9002 - val_loss: 71.8757 - val_calc_mre_K: 0.8774\n", "Epoch 305/2000\n", "48000/48000 [==============================] - 4s 75us/step - loss: 73.9646 - calc_mre_K: 0.9029 - val_loss: 73.6971 - val_calc_mre_K: 0.8996\n", "Epoch 306/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 73.9326 - calc_mre_K: 0.9025 - val_loss: 74.2357 - val_calc_mre_K: 0.9062\n", "Epoch 307/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 73.5702 - calc_mre_K: 0.8981 - val_loss: 79.7041 - val_calc_mre_K: 0.9729\n", "Epoch 308/2000\n", "48000/48000 [==============================] - 4s 84us/step - loss: 73.7149 - calc_mre_K: 0.8998 - val_loss: 80.7282 - val_calc_mre_K: 0.9855\n", "Epoch 309/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 73.9082 - calc_mre_K: 0.9022 - val_loss: 71.8158 - val_calc_mre_K: 0.8767\n", "Epoch 310/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 73.9171 - calc_mre_K: 0.9023 - val_loss: 71.2641 - val_calc_mre_K: 0.8699\n", "Epoch 311/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 73.6304 - calc_mre_K: 0.8988 - val_loss: 72.1592 - val_calc_mre_K: 0.8808\n", "Epoch 312/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 73.6948 - calc_mre_K: 0.8996 - val_loss: 73.1508 - val_calc_mre_K: 0.8930\n", "Epoch 313/2000\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "48000/48000 [==============================] - 4s 78us/step - loss: 73.7922 - calc_mre_K: 0.9008 - val_loss: 73.9676 - val_calc_mre_K: 0.9029\n", "Epoch 314/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 74.3367 - calc_mre_K: 0.9074 - val_loss: 73.9707 - val_calc_mre_K: 0.9030\n", "Epoch 315/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 73.8059 - calc_mre_K: 0.9010 - val_loss: 72.0849 - val_calc_mre_K: 0.8799\n", "Epoch 316/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 73.7798 - calc_mre_K: 0.9006 - val_loss: 75.3911 - val_calc_mre_K: 0.9203\n", "Epoch 317/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 73.6470 - calc_mre_K: 0.8990 - val_loss: 77.0586 - val_calc_mre_K: 0.9407\n", "Epoch 318/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 73.3933 - calc_mre_K: 0.8959 - val_loss: 75.9979 - val_calc_mre_K: 0.9277\n", "Epoch 319/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 73.3400 - calc_mre_K: 0.8953 - val_loss: 71.5275 - val_calc_mre_K: 0.8731\n", "Epoch 320/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 73.8976 - calc_mre_K: 0.9021 - val_loss: 71.2844 - val_calc_mre_K: 0.8702\n", "Epoch 321/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 73.3230 - calc_mre_K: 0.8951 - val_loss: 73.3206 - val_calc_mre_K: 0.8950\n", "Epoch 322/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 73.5924 - calc_mre_K: 0.8983 - val_loss: 72.7246 - val_calc_mre_K: 0.8878\n", "Epoch 323/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 73.6288 - calc_mre_K: 0.8988 - val_loss: 74.3858 - val_calc_mre_K: 0.9080\n", "Epoch 324/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 73.4681 - calc_mre_K: 0.8968 - val_loss: 74.5997 - val_calc_mre_K: 0.9106\n", "Epoch 325/2000\n", "48000/48000 [==============================] - 4s 82us/step - loss: 73.5628 - calc_mre_K: 0.8980 - val_loss: 74.5016 - val_calc_mre_K: 0.9094\n", "Epoch 326/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 73.6300 - calc_mre_K: 0.8988 - val_loss: 75.0210 - val_calc_mre_K: 0.9158\n", "Epoch 327/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 73.4889 - calc_mre_K: 0.8971 - val_loss: 73.7277 - val_calc_mre_K: 0.9000\n", "Epoch 328/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 73.3927 - calc_mre_K: 0.8959 - val_loss: 74.6039 - val_calc_mre_K: 0.9107\n", "Epoch 329/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 73.1598 - calc_mre_K: 0.8931 - val_loss: 73.6057 - val_calc_mre_K: 0.8985\n", "Epoch 330/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 73.2537 - calc_mre_K: 0.8942 - val_loss: 73.7009 - val_calc_mre_K: 0.8997\n", "Epoch 331/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 73.3419 - calc_mre_K: 0.8953 - val_loss: 71.3084 - val_calc_mre_K: 0.8705\n", "Epoch 332/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 73.4448 - calc_mre_K: 0.8965 - val_loss: 72.5356 - val_calc_mre_K: 0.8854\n", "Epoch 333/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 73.4198 - calc_mre_K: 0.8962 - val_loss: 74.1316 - val_calc_mre_K: 0.9049\n", "Epoch 334/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 73.3566 - calc_mre_K: 0.8955 - val_loss: 73.3062 - val_calc_mre_K: 0.8949\n", "Epoch 335/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 73.0813 - calc_mre_K: 0.8921 - val_loss: 73.5701 - val_calc_mre_K: 0.8981\n", "Epoch 336/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 73.3406 - calc_mre_K: 0.8953 - val_loss: 71.0041 - val_calc_mre_K: 0.8667\n", "Epoch 337/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 73.2111 - calc_mre_K: 0.8937 - val_loss: 73.1858 - val_calc_mre_K: 0.8934\n", "Epoch 338/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 73.4487 - calc_mre_K: 0.8966 - val_loss: 71.9185 - val_calc_mre_K: 0.8779\n", "Epoch 339/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 73.0926 - calc_mre_K: 0.8922 - val_loss: 73.6821 - val_calc_mre_K: 0.8994\n", "Epoch 340/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 73.2166 - calc_mre_K: 0.8938 - val_loss: 73.7431 - val_calc_mre_K: 0.9002\n", "Epoch 341/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 73.1306 - calc_mre_K: 0.8927 - val_loss: 73.2252 - val_calc_mre_K: 0.8939\n", "Epoch 342/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 73.2196 - calc_mre_K: 0.8938 - val_loss: 74.5583 - val_calc_mre_K: 0.9101\n", "Epoch 343/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 73.0001 - calc_mre_K: 0.8911 - val_loss: 73.5398 - val_calc_mre_K: 0.8977\n", "Epoch 344/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 73.0999 - calc_mre_K: 0.8923 - val_loss: 73.7507 - val_calc_mre_K: 0.9003\n", "Epoch 345/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 72.9565 - calc_mre_K: 0.8906 - val_loss: 70.9549 - val_calc_mre_K: 0.8661\n", "Epoch 346/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 72.7270 - calc_mre_K: 0.8878 - val_loss: 75.2258 - val_calc_mre_K: 0.9183\n", "Epoch 347/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 73.3044 - calc_mre_K: 0.8948 - val_loss: 72.6876 - val_calc_mre_K: 0.8873\n", "Epoch 348/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 72.8836 - calc_mre_K: 0.8897 - val_loss: 71.6055 - val_calc_mre_K: 0.8741\n", "Epoch 349/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 72.8257 - calc_mre_K: 0.8890 - val_loss: 74.6175 - val_calc_mre_K: 0.9109\n", "Epoch 350/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 72.7515 - calc_mre_K: 0.8881 - val_loss: 74.1572 - val_calc_mre_K: 0.9052\n", "Epoch 351/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 72.7607 - calc_mre_K: 0.8882 - val_loss: 72.5863 - val_calc_mre_K: 0.8861\n", "Epoch 352/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 73.1152 - calc_mre_K: 0.8925 - val_loss: 71.2192 - val_calc_mre_K: 0.8694\n", "Epoch 353/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 72.7793 - calc_mre_K: 0.8884 - val_loss: 71.9121 - val_calc_mre_K: 0.8778\n", "Epoch 354/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 72.8419 - calc_mre_K: 0.8892 - val_loss: 73.4737 - val_calc_mre_K: 0.8969\n", "Epoch 355/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 72.6930 - calc_mre_K: 0.8874 - val_loss: 71.6541 - val_calc_mre_K: 0.8747\n", "Epoch 356/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 72.7230 - calc_mre_K: 0.8877 - val_loss: 74.7361 - val_calc_mre_K: 0.9123\n", "Epoch 357/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 72.8113 - calc_mre_K: 0.8888 - val_loss: 72.0477 - val_calc_mre_K: 0.8795\n", "Epoch 358/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 72.8900 - calc_mre_K: 0.8898 - val_loss: 77.7485 - val_calc_mre_K: 0.9491\n", "Epoch 359/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 72.8325 - calc_mre_K: 0.8891 - val_loss: 71.2782 - val_calc_mre_K: 0.8701\n", "Epoch 360/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 73.1749 - calc_mre_K: 0.8932 - val_loss: 74.1250 - val_calc_mre_K: 0.9048\n", "Epoch 361/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 72.9403 - calc_mre_K: 0.8904 - val_loss: 76.5479 - val_calc_mre_K: 0.9344\n", "Epoch 362/2000\n", "48000/48000 [==============================] - 4s 82us/step - loss: 72.5704 - calc_mre_K: 0.8859 - val_loss: 70.8453 - val_calc_mre_K: 0.8648\n", "Epoch 363/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 73.0205 - calc_mre_K: 0.8914 - val_loss: 72.7900 - val_calc_mre_K: 0.8885\n", "Epoch 364/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 72.6867 - calc_mre_K: 0.8873 - val_loss: 71.0087 - val_calc_mre_K: 0.8668\n", "Epoch 365/2000\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "48000/48000 [==============================] - 4s 77us/step - loss: 72.5572 - calc_mre_K: 0.8857 - val_loss: 74.4452 - val_calc_mre_K: 0.9088\n", "Epoch 366/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 72.7852 - calc_mre_K: 0.8885 - val_loss: 71.6715 - val_calc_mre_K: 0.8749\n", "Epoch 367/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 73.0503 - calc_mre_K: 0.8917 - val_loss: 72.8850 - val_calc_mre_K: 0.8897\n", "Epoch 368/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 72.6005 - calc_mre_K: 0.8862 - val_loss: 71.3107 - val_calc_mre_K: 0.8705\n", "Epoch 369/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 72.7364 - calc_mre_K: 0.8879 - val_loss: 73.0268 - val_calc_mre_K: 0.8914\n", "Epoch 370/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 72.7198 - calc_mre_K: 0.8877 - val_loss: 72.0802 - val_calc_mre_K: 0.8799\n", "Epoch 371/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 72.7595 - calc_mre_K: 0.8882 - val_loss: 73.0957 - val_calc_mre_K: 0.8923\n", "Epoch 372/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 72.4112 - calc_mre_K: 0.8839 - val_loss: 71.7797 - val_calc_mre_K: 0.8762\n", "Epoch 373/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 72.4279 - calc_mre_K: 0.8841 - val_loss: 70.8417 - val_calc_mre_K: 0.8648\n", "Epoch 374/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 72.7655 - calc_mre_K: 0.8883 - val_loss: 73.4288 - val_calc_mre_K: 0.8963\n", "Epoch 375/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 72.3693 - calc_mre_K: 0.8834 - val_loss: 72.1488 - val_calc_mre_K: 0.8807\n", "Epoch 376/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 72.7285 - calc_mre_K: 0.8878 - val_loss: 74.2551 - val_calc_mre_K: 0.9064\n", "Epoch 377/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 72.4513 - calc_mre_K: 0.8844 - val_loss: 71.7500 - val_calc_mre_K: 0.8759\n", "Epoch 378/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 72.5151 - calc_mre_K: 0.8852 - val_loss: 72.8199 - val_calc_mre_K: 0.8889\n", "Epoch 379/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 72.3201 - calc_mre_K: 0.8828 - val_loss: 73.5393 - val_calc_mre_K: 0.8977\n", "Epoch 380/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 72.8486 - calc_mre_K: 0.8893 - val_loss: 70.9565 - val_calc_mre_K: 0.8662\n", "Epoch 381/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 72.3342 - calc_mre_K: 0.8830 - val_loss: 71.1047 - val_calc_mre_K: 0.8680\n", "Epoch 382/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 72.6757 - calc_mre_K: 0.8872 - val_loss: 72.7599 - val_calc_mre_K: 0.8882\n", "Epoch 383/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 72.5110 - calc_mre_K: 0.8851 - val_loss: 72.0314 - val_calc_mre_K: 0.8793\n", "Epoch 384/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 72.4898 - calc_mre_K: 0.8849 - val_loss: 71.4761 - val_calc_mre_K: 0.8725\n", "Epoch 385/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 72.3938 - calc_mre_K: 0.8837 - val_loss: 70.8614 - val_calc_mre_K: 0.8650\n", "Epoch 386/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 72.5224 - calc_mre_K: 0.8853 - val_loss: 71.4231 - val_calc_mre_K: 0.8719\n", "Epoch 387/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 72.2510 - calc_mre_K: 0.8820 - val_loss: 71.6252 - val_calc_mre_K: 0.8743\n", "Epoch 388/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 72.6181 - calc_mre_K: 0.8865 - val_loss: 70.0615 - val_calc_mre_K: 0.8552\n", "Epoch 389/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 72.2762 - calc_mre_K: 0.8823 - val_loss: 70.2992 - val_calc_mre_K: 0.8581\n", "Epoch 390/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 72.2249 - calc_mre_K: 0.8817 - val_loss: 71.0138 - val_calc_mre_K: 0.8669\n", "Epoch 391/2000\n", "48000/48000 [==============================] - 4s 82us/step - loss: 72.4168 - calc_mre_K: 0.8840 - val_loss: 75.4897 - val_calc_mre_K: 0.9215\n", "Epoch 392/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 72.2266 - calc_mre_K: 0.8817 - val_loss: 73.3837 - val_calc_mre_K: 0.8958\n", "Epoch 393/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 72.5954 - calc_mre_K: 0.8862 - val_loss: 73.3500 - val_calc_mre_K: 0.8954\n", "Epoch 394/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 72.3468 - calc_mre_K: 0.8831 - val_loss: 74.6905 - val_calc_mre_K: 0.9117\n", "Epoch 395/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 72.3009 - calc_mre_K: 0.8826 - val_loss: 73.0116 - val_calc_mre_K: 0.8913\n", "Epoch 396/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 72.2416 - calc_mre_K: 0.8819 - val_loss: 74.1114 - val_calc_mre_K: 0.9047\n", "Epoch 397/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 72.0565 - calc_mre_K: 0.8796 - val_loss: 70.8562 - val_calc_mre_K: 0.8649\n", "Epoch 398/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 72.3405 - calc_mre_K: 0.8831 - val_loss: 71.1203 - val_calc_mre_K: 0.8682\n", "Epoch 399/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 72.1142 - calc_mre_K: 0.8803 - val_loss: 75.5182 - val_calc_mre_K: 0.9219\n", "Epoch 400/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 72.3105 - calc_mre_K: 0.8827 - val_loss: 70.8271 - val_calc_mre_K: 0.8646\n", "Epoch 401/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 72.2701 - calc_mre_K: 0.8822 - val_loss: 71.2804 - val_calc_mre_K: 0.8701\n", "Epoch 402/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 72.5520 - calc_mre_K: 0.8856 - val_loss: 70.7779 - val_calc_mre_K: 0.8640\n", "Epoch 403/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 72.0965 - calc_mre_K: 0.8801 - val_loss: 71.7835 - val_calc_mre_K: 0.8763\n", "Epoch 404/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 72.2034 - calc_mre_K: 0.8814 - val_loss: 71.5401 - val_calc_mre_K: 0.8733\n", "Epoch 405/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 71.9405 - calc_mre_K: 0.8782 - val_loss: 70.9812 - val_calc_mre_K: 0.8665\n", "Epoch 406/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 72.1012 - calc_mre_K: 0.8801 - val_loss: 71.6906 - val_calc_mre_K: 0.8751\n", "Epoch 407/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 71.8051 - calc_mre_K: 0.8765 - val_loss: 74.2456 - val_calc_mre_K: 0.9063\n", "Epoch 408/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 72.1544 - calc_mre_K: 0.8808 - val_loss: 72.6816 - val_calc_mre_K: 0.8872\n", "Epoch 409/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 72.5097 - calc_mre_K: 0.8851 - val_loss: 75.0947 - val_calc_mre_K: 0.9167\n", "Epoch 410/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 72.1140 - calc_mre_K: 0.8803 - val_loss: 70.2937 - val_calc_mre_K: 0.8581\n", "Epoch 411/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 72.0580 - calc_mre_K: 0.8796 - val_loss: 71.4998 - val_calc_mre_K: 0.8728\n", "Epoch 412/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 72.2185 - calc_mre_K: 0.8816 - val_loss: 74.4943 - val_calc_mre_K: 0.9094\n", "Epoch 413/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 72.2980 - calc_mre_K: 0.8825 - val_loss: 71.8491 - val_calc_mre_K: 0.8771\n", "Epoch 414/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 71.9398 - calc_mre_K: 0.8782 - val_loss: 71.4444 - val_calc_mre_K: 0.8721\n", "Epoch 415/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 72.1230 - calc_mre_K: 0.8804 - val_loss: 72.4848 - val_calc_mre_K: 0.8848\n", "Epoch 416/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 71.9612 - calc_mre_K: 0.8784 - val_loss: 70.9722 - val_calc_mre_K: 0.8664\n", "Epoch 417/2000\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "48000/48000 [==============================] - 4s 80us/step - loss: 71.8702 - calc_mre_K: 0.8773 - val_loss: 70.8739 - val_calc_mre_K: 0.8652\n", "Epoch 418/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 72.0916 - calc_mre_K: 0.8800 - val_loss: 72.7284 - val_calc_mre_K: 0.8878\n", "Epoch 419/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 71.9594 - calc_mre_K: 0.8784 - val_loss: 73.6793 - val_calc_mre_K: 0.8994\n", "Epoch 420/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 71.8092 - calc_mre_K: 0.8766 - val_loss: 73.6372 - val_calc_mre_K: 0.8989\n", "Epoch 421/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 72.0592 - calc_mre_K: 0.8796 - val_loss: 71.4164 - val_calc_mre_K: 0.8718\n", "Epoch 422/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 71.7984 - calc_mre_K: 0.8764 - val_loss: 75.6888 - val_calc_mre_K: 0.9239\n", "Epoch 423/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 71.8892 - calc_mre_K: 0.8776 - val_loss: 74.0763 - val_calc_mre_K: 0.9043\n", "Epoch 424/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 71.9039 - calc_mre_K: 0.8777 - val_loss: 70.4618 - val_calc_mre_K: 0.8601\n", "Epoch 425/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 72.0167 - calc_mre_K: 0.8791 - val_loss: 75.8559 - val_calc_mre_K: 0.9260\n", "Epoch 426/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 71.9016 - calc_mre_K: 0.8777 - val_loss: 71.0929 - val_calc_mre_K: 0.8678\n", "Epoch 427/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 71.7941 - calc_mre_K: 0.8764 - val_loss: 71.7515 - val_calc_mre_K: 0.8759\n", "Epoch 428/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 72.0155 - calc_mre_K: 0.8791 - val_loss: 70.7153 - val_calc_mre_K: 0.8632\n", "Epoch 429/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 71.8988 - calc_mre_K: 0.8777 - val_loss: 72.7993 - val_calc_mre_K: 0.8887\n", "Epoch 430/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 72.0668 - calc_mre_K: 0.8797 - val_loss: 72.0424 - val_calc_mre_K: 0.8794\n", "Epoch 431/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 71.6330 - calc_mre_K: 0.8744 - val_loss: 70.5409 - val_calc_mre_K: 0.8611\n", "Epoch 432/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 71.8471 - calc_mre_K: 0.8770 - val_loss: 71.2882 - val_calc_mre_K: 0.8702\n", "Epoch 433/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 71.5681 - calc_mre_K: 0.8736 - val_loss: 71.5813 - val_calc_mre_K: 0.8738\n", "Epoch 434/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 72.0195 - calc_mre_K: 0.8791 - val_loss: 73.3207 - val_calc_mre_K: 0.8950\n", "Epoch 435/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 71.6913 - calc_mre_K: 0.8751 - val_loss: 71.5140 - val_calc_mre_K: 0.8730\n", "Epoch 436/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 71.7827 - calc_mre_K: 0.8763 - val_loss: 71.7236 - val_calc_mre_K: 0.8755\n", "Epoch 437/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 71.8618 - calc_mre_K: 0.8772 - val_loss: 72.6935 - val_calc_mre_K: 0.8874\n", "Epoch 438/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 71.7957 - calc_mre_K: 0.8764 - val_loss: 70.1790 - val_calc_mre_K: 0.8567\n", "Epoch 439/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 71.7724 - calc_mre_K: 0.8761 - val_loss: 81.3316 - val_calc_mre_K: 0.9928\n", "Epoch 440/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 71.5535 - calc_mre_K: 0.8735 - val_loss: 71.5521 - val_calc_mre_K: 0.8734\n", "Epoch 441/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 71.8816 - calc_mre_K: 0.8775 - val_loss: 70.6461 - val_calc_mre_K: 0.8624\n", "Epoch 442/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 71.7490 - calc_mre_K: 0.8758 - val_loss: 71.6697 - val_calc_mre_K: 0.8749\n", "Epoch 443/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 71.4002 - calc_mre_K: 0.8716 - val_loss: 75.4690 - val_calc_mre_K: 0.9213\n", "Epoch 444/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 71.7277 - calc_mre_K: 0.8756 - val_loss: 71.2514 - val_calc_mre_K: 0.8698\n", "Epoch 445/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 71.4960 - calc_mre_K: 0.8728 - val_loss: 72.5458 - val_calc_mre_K: 0.8856\n", "Epoch 446/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 71.7269 - calc_mre_K: 0.8756 - val_loss: 72.3932 - val_calc_mre_K: 0.8837\n", "Epoch 447/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 71.4812 - calc_mre_K: 0.8726 - val_loss: 72.3114 - val_calc_mre_K: 0.8827\n", "Epoch 448/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 71.5078 - calc_mre_K: 0.8729 - val_loss: 71.3593 - val_calc_mre_K: 0.8711\n", "Epoch 449/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 71.8626 - calc_mre_K: 0.8772 - val_loss: 72.9015 - val_calc_mre_K: 0.8899\n", "Epoch 450/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 71.4892 - calc_mre_K: 0.8727 - val_loss: 73.7358 - val_calc_mre_K: 0.9001\n", "Epoch 451/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 71.7074 - calc_mre_K: 0.8753 - val_loss: 72.5576 - val_calc_mre_K: 0.8857\n", "Epoch 452/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 71.6082 - calc_mre_K: 0.8741 - val_loss: 69.5133 - val_calc_mre_K: 0.8486\n", "Epoch 453/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 71.6130 - calc_mre_K: 0.8742 - val_loss: 69.9016 - val_calc_mre_K: 0.8533\n", "Epoch 454/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 71.3163 - calc_mre_K: 0.8706 - val_loss: 70.4835 - val_calc_mre_K: 0.8604\n", "Epoch 455/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 71.4786 - calc_mre_K: 0.8725 - val_loss: 71.7672 - val_calc_mre_K: 0.8761\n", "Epoch 456/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 71.7246 - calc_mre_K: 0.8755 - val_loss: 71.9389 - val_calc_mre_K: 0.8782\n", "Epoch 457/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 71.7219 - calc_mre_K: 0.8755 - val_loss: 71.8491 - val_calc_mre_K: 0.8771\n", "Epoch 458/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 71.2322 - calc_mre_K: 0.8695 - val_loss: 70.8990 - val_calc_mre_K: 0.8655\n", "Epoch 459/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 71.4215 - calc_mre_K: 0.8718 - val_loss: 71.1201 - val_calc_mre_K: 0.8682\n", "Epoch 460/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 71.4136 - calc_mre_K: 0.8717 - val_loss: 71.3727 - val_calc_mre_K: 0.8712\n", "Epoch 461/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 71.4578 - calc_mre_K: 0.8723 - val_loss: 72.2516 - val_calc_mre_K: 0.8820\n", "Epoch 462/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 71.5696 - calc_mre_K: 0.8737 - val_loss: 75.6422 - val_calc_mre_K: 0.9234\n", "Epoch 463/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 71.5304 - calc_mre_K: 0.8732 - val_loss: 70.8578 - val_calc_mre_K: 0.8650\n", "Epoch 464/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 71.6104 - calc_mre_K: 0.8742 - val_loss: 70.8910 - val_calc_mre_K: 0.8654\n", "Epoch 465/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 71.6714 - calc_mre_K: 0.8749 - val_loss: 77.0689 - val_calc_mre_K: 0.9408\n", "Epoch 466/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 71.3452 - calc_mre_K: 0.8709 - val_loss: 70.1728 - val_calc_mre_K: 0.8566\n", "Epoch 467/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 71.2335 - calc_mre_K: 0.8696 - val_loss: 72.6443 - val_calc_mre_K: 0.8868\n", "Epoch 468/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 71.3775 - calc_mre_K: 0.8713 - val_loss: 72.1932 - val_calc_mre_K: 0.8813\n", "Epoch 469/2000\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "48000/48000 [==============================] - 4s 76us/step - loss: 71.3568 - calc_mre_K: 0.8711 - val_loss: 69.7939 - val_calc_mre_K: 0.8520\n", "Epoch 470/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 71.4401 - calc_mre_K: 0.8721 - val_loss: 71.0194 - val_calc_mre_K: 0.8669\n", "Epoch 471/2000\n", "48000/48000 [==============================] - 4s 75us/step - loss: 71.3167 - calc_mre_K: 0.8706 - val_loss: 70.0169 - val_calc_mre_K: 0.8547\n", "Epoch 472/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 71.5568 - calc_mre_K: 0.8735 - val_loss: 69.5260 - val_calc_mre_K: 0.8487\n", "Epoch 473/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 71.5628 - calc_mre_K: 0.8736 - val_loss: 70.2824 - val_calc_mre_K: 0.8579\n", "Epoch 474/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 71.2862 - calc_mre_K: 0.8702 - val_loss: 69.7029 - val_calc_mre_K: 0.8509\n", "Epoch 475/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 71.4812 - calc_mre_K: 0.8726 - val_loss: 70.8526 - val_calc_mre_K: 0.8649\n", "Epoch 476/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 71.1777 - calc_mre_K: 0.8689 - val_loss: 73.2793 - val_calc_mre_K: 0.8945\n", "Epoch 477/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 71.1981 - calc_mre_K: 0.8691 - val_loss: 71.0292 - val_calc_mre_K: 0.8671\n", "Epoch 478/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 71.3535 - calc_mre_K: 0.8710 - val_loss: 70.7593 - val_calc_mre_K: 0.8638\n", "Epoch 479/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 71.3432 - calc_mre_K: 0.8709 - val_loss: 73.9201 - val_calc_mre_K: 0.9023\n", "Epoch 480/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 71.0846 - calc_mre_K: 0.8677 - val_loss: 72.1856 - val_calc_mre_K: 0.8812\n", "Epoch 481/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 71.4236 - calc_mre_K: 0.8719 - val_loss: 73.0379 - val_calc_mre_K: 0.8916\n", "Epoch 482/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 71.2213 - calc_mre_K: 0.8694 - val_loss: 69.7643 - val_calc_mre_K: 0.8516\n", "Epoch 483/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 71.5268 - calc_mre_K: 0.8731 - val_loss: 74.7056 - val_calc_mre_K: 0.9119\n", "Epoch 484/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 71.1862 - calc_mre_K: 0.8690 - val_loss: 72.2438 - val_calc_mre_K: 0.8819\n", "Epoch 485/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 71.3408 - calc_mre_K: 0.8709 - val_loss: 71.4217 - val_calc_mre_K: 0.8718\n", "Epoch 486/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 71.3604 - calc_mre_K: 0.8711 - val_loss: 68.5082 - val_calc_mre_K: 0.8363\n", "Epoch 487/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 71.0570 - calc_mre_K: 0.8674 - val_loss: 70.2407 - val_calc_mre_K: 0.8574\n", "Epoch 488/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 71.0587 - calc_mre_K: 0.8674 - val_loss: 70.8775 - val_calc_mre_K: 0.8652\n", "Epoch 489/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 71.1644 - calc_mre_K: 0.8687 - val_loss: 68.8862 - val_calc_mre_K: 0.8409\n", "Epoch 490/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 71.2420 - calc_mre_K: 0.8697 - val_loss: 70.2268 - val_calc_mre_K: 0.8573\n", "Epoch 491/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 71.0990 - calc_mre_K: 0.8679 - val_loss: 70.5702 - val_calc_mre_K: 0.8615\n", "Epoch 492/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 71.1678 - calc_mre_K: 0.8687 - val_loss: 70.2618 - val_calc_mre_K: 0.8577\n", "Epoch 493/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 71.3013 - calc_mre_K: 0.8704 - val_loss: 71.5721 - val_calc_mre_K: 0.8737\n", "Epoch 494/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 71.3331 - calc_mre_K: 0.8708 - val_loss: 70.0454 - val_calc_mre_K: 0.8550\n", "Epoch 495/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 70.8881 - calc_mre_K: 0.8653 - val_loss: 71.7148 - val_calc_mre_K: 0.8754\n", "Epoch 496/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 71.5171 - calc_mre_K: 0.8730 - val_loss: 68.9094 - val_calc_mre_K: 0.8412\n", "Epoch 497/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 70.9838 - calc_mre_K: 0.8665 - val_loss: 70.0073 - val_calc_mre_K: 0.8546\n", "Epoch 498/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 71.2482 - calc_mre_K: 0.8697 - val_loss: 71.2839 - val_calc_mre_K: 0.8702\n", "Epoch 499/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 70.9890 - calc_mre_K: 0.8666 - val_loss: 71.6100 - val_calc_mre_K: 0.8741\n", "Epoch 500/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 71.1317 - calc_mre_K: 0.8683 - val_loss: 70.7015 - val_calc_mre_K: 0.8631\n", "Epoch 501/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 71.1336 - calc_mre_K: 0.8683 - val_loss: 68.9179 - val_calc_mre_K: 0.8413\n", "Epoch 502/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 71.0517 - calc_mre_K: 0.8673 - val_loss: 69.8425 - val_calc_mre_K: 0.8526\n", "Epoch 503/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 70.9920 - calc_mre_K: 0.8666 - val_loss: 70.9399 - val_calc_mre_K: 0.8660\n", "Epoch 504/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 71.2317 - calc_mre_K: 0.8695 - val_loss: 71.0605 - val_calc_mre_K: 0.8674\n", "Epoch 505/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 70.8865 - calc_mre_K: 0.8653 - val_loss: 70.9253 - val_calc_mre_K: 0.8658\n", "Epoch 506/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 71.1295 - calc_mre_K: 0.8683 - val_loss: 72.9847 - val_calc_mre_K: 0.8909\n", "Epoch 507/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 71.1227 - calc_mre_K: 0.8682 - val_loss: 71.3847 - val_calc_mre_K: 0.8714\n", "Epoch 508/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 70.7267 - calc_mre_K: 0.8634 - val_loss: 71.3652 - val_calc_mre_K: 0.8712\n", "Epoch 509/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 71.2696 - calc_mre_K: 0.8700 - val_loss: 75.9211 - val_calc_mre_K: 0.9268\n", "Epoch 510/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 71.1017 - calc_mre_K: 0.8679 - val_loss: 71.0523 - val_calc_mre_K: 0.8673\n", "Epoch 511/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 71.0740 - calc_mre_K: 0.8676 - val_loss: 70.9989 - val_calc_mre_K: 0.8667\n", "Epoch 512/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 70.9656 - calc_mre_K: 0.8663 - val_loss: 70.2963 - val_calc_mre_K: 0.8581\n", "Epoch 513/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 70.7972 - calc_mre_K: 0.8642 - val_loss: 69.5460 - val_calc_mre_K: 0.8489\n", "Epoch 514/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 71.1317 - calc_mre_K: 0.8683 - val_loss: 68.0290 - val_calc_mre_K: 0.8304\n", "Epoch 515/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 70.9916 - calc_mre_K: 0.8666 - val_loss: 70.3159 - val_calc_mre_K: 0.8583\n", "Epoch 516/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 71.0003 - calc_mre_K: 0.8667 - val_loss: 70.3167 - val_calc_mre_K: 0.8584\n", "Epoch 517/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 70.7725 - calc_mre_K: 0.8639 - val_loss: 69.1843 - val_calc_mre_K: 0.8445\n", "Epoch 518/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 71.0793 - calc_mre_K: 0.8677 - val_loss: 72.4126 - val_calc_mre_K: 0.8839\n", "Epoch 519/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 70.6071 - calc_mre_K: 0.8619 - val_loss: 83.7517 - val_calc_mre_K: 1.0224\n", "Epoch 520/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 71.2109 - calc_mre_K: 0.8693 - val_loss: 72.3417 - val_calc_mre_K: 0.8831\n", "Epoch 521/2000\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "48000/48000 [==============================] - 4s 77us/step - loss: 71.0675 - calc_mre_K: 0.8675 - val_loss: 71.0111 - val_calc_mre_K: 0.8668\n", "Epoch 522/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 70.8438 - calc_mre_K: 0.8648 - val_loss: 73.6335 - val_calc_mre_K: 0.8988\n", "Epoch 523/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 71.0938 - calc_mre_K: 0.8678 - val_loss: 76.8784 - val_calc_mre_K: 0.9385\n", "Epoch 524/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 70.6672 - calc_mre_K: 0.8626 - val_loss: 71.1695 - val_calc_mre_K: 0.8688\n", "Epoch 525/2000\n", "48000/48000 [==============================] - 4s 75us/step - loss: 71.0190 - calc_mre_K: 0.8669 - val_loss: 70.2974 - val_calc_mre_K: 0.8581\n", "Epoch 526/2000\n", "48000/48000 [==============================] - 4s 75us/step - loss: 70.6845 - calc_mre_K: 0.8628 - val_loss: 72.9573 - val_calc_mre_K: 0.8906\n", "Epoch 527/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 70.9979 - calc_mre_K: 0.8667 - val_loss: 79.3687 - val_calc_mre_K: 0.9689\n", "Epoch 528/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 70.7043 - calc_mre_K: 0.8631 - val_loss: 72.1821 - val_calc_mre_K: 0.8811\n", "Epoch 529/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 70.8270 - calc_mre_K: 0.8646 - val_loss: 73.0936 - val_calc_mre_K: 0.8923\n", "Epoch 530/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 70.8024 - calc_mre_K: 0.8643 - val_loss: 74.8270 - val_calc_mre_K: 0.9134\n", "Epoch 531/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 70.8391 - calc_mre_K: 0.8647 - val_loss: 69.9579 - val_calc_mre_K: 0.8540\n", "Epoch 532/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 70.8307 - calc_mre_K: 0.8646 - val_loss: 73.3907 - val_calc_mre_K: 0.8959\n", "Epoch 533/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 70.8634 - calc_mre_K: 0.8650 - val_loss: 71.9255 - val_calc_mre_K: 0.8780\n", "Epoch 534/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 70.8161 - calc_mre_K: 0.8645 - val_loss: 68.5935 - val_calc_mre_K: 0.8373\n", "Epoch 535/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 70.5284 - calc_mre_K: 0.8609 - val_loss: 70.0673 - val_calc_mre_K: 0.8553\n", "Epoch 536/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 70.9979 - calc_mre_K: 0.8667 - val_loss: 73.4570 - val_calc_mre_K: 0.8967\n", "Epoch 537/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 70.7676 - calc_mre_K: 0.8639 - val_loss: 69.7197 - val_calc_mre_K: 0.8511\n", "Epoch 538/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 70.7251 - calc_mre_K: 0.8633 - val_loss: 69.2584 - val_calc_mre_K: 0.8454\n", "Epoch 539/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 70.6907 - calc_mre_K: 0.8629 - val_loss: 70.0556 - val_calc_mre_K: 0.8552\n", "Epoch 540/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 70.8896 - calc_mre_K: 0.8654 - val_loss: 72.1873 - val_calc_mre_K: 0.8812\n", "Epoch 541/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 70.8500 - calc_mre_K: 0.8649 - val_loss: 69.0256 - val_calc_mre_K: 0.8426\n", "Epoch 542/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 71.1246 - calc_mre_K: 0.8682 - val_loss: 71.0124 - val_calc_mre_K: 0.8669\n", "Epoch 543/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 70.7926 - calc_mre_K: 0.8642 - val_loss: 68.8985 - val_calc_mre_K: 0.8410\n", "Epoch 544/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 70.5029 - calc_mre_K: 0.8606 - val_loss: 68.1972 - val_calc_mre_K: 0.8325\n", "Epoch 545/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 70.9267 - calc_mre_K: 0.8658 - val_loss: 70.5439 - val_calc_mre_K: 0.8611\n", "Epoch 546/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 70.6910 - calc_mre_K: 0.8629 - val_loss: 70.4445 - val_calc_mre_K: 0.8599\n", "Epoch 547/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 70.7143 - calc_mre_K: 0.8632 - val_loss: 69.4809 - val_calc_mre_K: 0.8482\n", "Epoch 548/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 70.8686 - calc_mre_K: 0.8651 - val_loss: 70.0867 - val_calc_mre_K: 0.8556\n", "Epoch 549/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 70.6247 - calc_mre_K: 0.8621 - val_loss: 68.7204 - val_calc_mre_K: 0.8389\n", "Epoch 550/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 70.6510 - calc_mre_K: 0.8624 - val_loss: 73.0838 - val_calc_mre_K: 0.8921\n", "Epoch 551/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 70.6604 - calc_mre_K: 0.8626 - val_loss: 71.9346 - val_calc_mre_K: 0.8781\n", "Epoch 552/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 70.6013 - calc_mre_K: 0.8618 - val_loss: 71.5412 - val_calc_mre_K: 0.8733\n", "Epoch 553/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 70.3876 - calc_mre_K: 0.8592 - val_loss: 70.0227 - val_calc_mre_K: 0.8548\n", "Epoch 554/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 70.6376 - calc_mre_K: 0.8623 - val_loss: 71.4472 - val_calc_mre_K: 0.8722\n", "Epoch 555/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 70.7356 - calc_mre_K: 0.8635 - val_loss: 74.1101 - val_calc_mre_K: 0.9047\n", "Epoch 556/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 70.8122 - calc_mre_K: 0.8644 - val_loss: 70.6777 - val_calc_mre_K: 0.8628\n", "Epoch 557/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 70.6156 - calc_mre_K: 0.8620 - val_loss: 73.3545 - val_calc_mre_K: 0.8954\n", "Epoch 558/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 70.8322 - calc_mre_K: 0.8647 - val_loss: 73.2241 - val_calc_mre_K: 0.8938\n", "Epoch 559/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 70.1915 - calc_mre_K: 0.8568 - val_loss: 69.4415 - val_calc_mre_K: 0.8477\n", "Epoch 560/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 70.6324 - calc_mre_K: 0.8622 - val_loss: 70.6190 - val_calc_mre_K: 0.8620\n", "Epoch 561/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 70.7428 - calc_mre_K: 0.8636 - val_loss: 71.0466 - val_calc_mre_K: 0.8673\n", "Epoch 562/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 70.4167 - calc_mre_K: 0.8596 - val_loss: 71.5171 - val_calc_mre_K: 0.8730\n", "Epoch 563/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 70.2924 - calc_mre_K: 0.8581 - val_loss: 70.1717 - val_calc_mre_K: 0.8566\n", "Epoch 564/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 70.6502 - calc_mre_K: 0.8624 - val_loss: 68.9206 - val_calc_mre_K: 0.8413\n", "Epoch 565/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 70.7544 - calc_mre_K: 0.8637 - val_loss: 76.1351 - val_calc_mre_K: 0.9294\n", "Epoch 566/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 70.4967 - calc_mre_K: 0.8606 - val_loss: 72.4798 - val_calc_mre_K: 0.8848\n", "Epoch 567/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 70.6947 - calc_mre_K: 0.8630 - val_loss: 74.2538 - val_calc_mre_K: 0.9064\n", "Epoch 568/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 70.3857 - calc_mre_K: 0.8592 - val_loss: 69.4919 - val_calc_mre_K: 0.8483\n", "Epoch 569/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 70.5537 - calc_mre_K: 0.8613 - val_loss: 72.2529 - val_calc_mre_K: 0.8820\n", "Epoch 570/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 70.4482 - calc_mre_K: 0.8600 - val_loss: 70.6500 - val_calc_mre_K: 0.8624\n", "Epoch 571/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 70.7603 - calc_mre_K: 0.8638 - val_loss: 69.4813 - val_calc_mre_K: 0.8482\n", "Epoch 572/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 70.3634 - calc_mre_K: 0.8589 - val_loss: 70.9787 - val_calc_mre_K: 0.8664\n", "Epoch 573/2000\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "48000/48000 [==============================] - 4s 78us/step - loss: 70.4460 - calc_mre_K: 0.8599 - val_loss: 70.8114 - val_calc_mre_K: 0.8644\n", "Epoch 574/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 70.3442 - calc_mre_K: 0.8587 - val_loss: 68.7604 - val_calc_mre_K: 0.8394\n", "Epoch 575/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 70.6017 - calc_mre_K: 0.8618 - val_loss: 68.9969 - val_calc_mre_K: 0.8422\n", "Epoch 576/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 70.5513 - calc_mre_K: 0.8612 - val_loss: 70.7963 - val_calc_mre_K: 0.8642\n", "Epoch 577/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 70.3815 - calc_mre_K: 0.8591 - val_loss: 70.2563 - val_calc_mre_K: 0.8576\n", "Epoch 578/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 70.5073 - calc_mre_K: 0.8607 - val_loss: 69.2485 - val_calc_mre_K: 0.8453\n", "Epoch 579/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 70.6239 - calc_mre_K: 0.8621 - val_loss: 68.6029 - val_calc_mre_K: 0.8374\n", "Epoch 580/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 70.4574 - calc_mre_K: 0.8601 - val_loss: 69.9958 - val_calc_mre_K: 0.8544\n", "Epoch 581/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 70.1490 - calc_mre_K: 0.8563 - val_loss: 76.2456 - val_calc_mre_K: 0.9307\n", "Epoch 582/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 70.3736 - calc_mre_K: 0.8591 - val_loss: 69.6673 - val_calc_mre_K: 0.8504\n", "Epoch 583/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 70.2174 - calc_mre_K: 0.8571 - val_loss: 71.3862 - val_calc_mre_K: 0.8714\n", "Epoch 584/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 70.2619 - calc_mre_K: 0.8577 - val_loss: 70.8132 - val_calc_mre_K: 0.8644\n", "Epoch 585/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 70.5369 - calc_mre_K: 0.8610 - val_loss: 67.7267 - val_calc_mre_K: 0.8267\n", "Epoch 586/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 70.4944 - calc_mre_K: 0.8605 - val_loss: 70.8614 - val_calc_mre_K: 0.8650\n", "Epoch 587/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 70.3224 - calc_mre_K: 0.8584 - val_loss: 71.9959 - val_calc_mre_K: 0.8789\n", "Epoch 588/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 70.3576 - calc_mre_K: 0.8589 - val_loss: 71.5198 - val_calc_mre_K: 0.8730\n", "Epoch 589/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 70.1880 - calc_mre_K: 0.8568 - val_loss: 69.5494 - val_calc_mre_K: 0.8490\n", "Epoch 590/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 70.3841 - calc_mre_K: 0.8592 - val_loss: 68.3484 - val_calc_mre_K: 0.8343\n", "Epoch 591/2000\n", "48000/48000 [==============================] - 4s 75us/step - loss: 70.2397 - calc_mre_K: 0.8574 - val_loss: 73.9073 - val_calc_mre_K: 0.9022\n", "Epoch 592/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 70.4340 - calc_mre_K: 0.8598 - val_loss: 71.0886 - val_calc_mre_K: 0.8678\n", "Epoch 593/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 70.3556 - calc_mre_K: 0.8588 - val_loss: 71.0264 - val_calc_mre_K: 0.8670\n", "Epoch 594/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 70.2490 - calc_mre_K: 0.8575 - val_loss: 69.1267 - val_calc_mre_K: 0.8438\n", "Epoch 595/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 70.2159 - calc_mre_K: 0.8571 - val_loss: 70.2031 - val_calc_mre_K: 0.8570\n", "Epoch 596/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 70.3318 - calc_mre_K: 0.8585 - val_loss: 68.7837 - val_calc_mre_K: 0.8396\n", "Epoch 597/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 70.3598 - calc_mre_K: 0.8589 - val_loss: 68.8897 - val_calc_mre_K: 0.8409\n", "Epoch 598/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 70.3221 - calc_mre_K: 0.8584 - val_loss: 70.3499 - val_calc_mre_K: 0.8588\n", "Epoch 599/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 70.4013 - calc_mre_K: 0.8594 - val_loss: 69.4694 - val_calc_mre_K: 0.8480\n", "Epoch 600/2000\n", "48000/48000 [==============================] - 4s 83us/step - loss: 70.2029 - calc_mre_K: 0.8570 - val_loss: 69.3209 - val_calc_mre_K: 0.8462\n", "Epoch 601/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 70.3953 - calc_mre_K: 0.8593 - val_loss: 69.8124 - val_calc_mre_K: 0.8522\n", "Epoch 602/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 70.3767 - calc_mre_K: 0.8591 - val_loss: 71.0799 - val_calc_mre_K: 0.8677\n", "Epoch 603/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 70.4239 - calc_mre_K: 0.8597 - val_loss: 69.4428 - val_calc_mre_K: 0.8477\n", "Epoch 604/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 70.5230 - calc_mre_K: 0.8609 - val_loss: 71.4586 - val_calc_mre_K: 0.8723\n", "Epoch 605/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 70.5018 - calc_mre_K: 0.8606 - val_loss: 71.0205 - val_calc_mre_K: 0.8669\n", "Epoch 606/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 70.1127 - calc_mre_K: 0.8559 - val_loss: 70.5070 - val_calc_mre_K: 0.8607\n", "Epoch 607/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 70.1819 - calc_mre_K: 0.8567 - val_loss: 70.3672 - val_calc_mre_K: 0.8590\n", "Epoch 608/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 70.1076 - calc_mre_K: 0.8558 - val_loss: 70.0926 - val_calc_mre_K: 0.8556\n", "Epoch 609/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 69.8065 - calc_mre_K: 0.8521 - val_loss: 70.7404 - val_calc_mre_K: 0.8635\n", "Epoch 610/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 70.2959 - calc_mre_K: 0.8581 - val_loss: 69.6684 - val_calc_mre_K: 0.8504\n", "Epoch 611/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 70.1652 - calc_mre_K: 0.8565 - val_loss: 71.1667 - val_calc_mre_K: 0.8687\n", "Epoch 612/2000\n", "48000/48000 [==============================] - 4s 75us/step - loss: 70.2893 - calc_mre_K: 0.8580 - val_loss: 70.7365 - val_calc_mre_K: 0.8635\n", "Epoch 613/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 70.1270 - calc_mre_K: 0.8560 - val_loss: 67.8302 - val_calc_mre_K: 0.8280\n", "Epoch 614/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 70.2839 - calc_mre_K: 0.8580 - val_loss: 69.2116 - val_calc_mre_K: 0.8449\n", "Epoch 615/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 70.2978 - calc_mre_K: 0.8581 - val_loss: 69.4019 - val_calc_mre_K: 0.8472\n", "Epoch 616/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 70.2399 - calc_mre_K: 0.8574 - val_loss: 68.8672 - val_calc_mre_K: 0.8407\n", "Epoch 617/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 70.1083 - calc_mre_K: 0.8558 - val_loss: 70.1578 - val_calc_mre_K: 0.8564\n", "Epoch 618/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 70.4034 - calc_mre_K: 0.8594 - val_loss: 73.3787 - val_calc_mre_K: 0.8957\n", "Epoch 619/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 70.0854 - calc_mre_K: 0.8555 - val_loss: 70.5747 - val_calc_mre_K: 0.8615\n", "Epoch 620/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 70.0074 - calc_mre_K: 0.8546 - val_loss: 68.6352 - val_calc_mre_K: 0.8378\n", "Epoch 621/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 70.2138 - calc_mre_K: 0.8571 - val_loss: 71.8645 - val_calc_mre_K: 0.8773\n", "Epoch 622/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 70.3243 - calc_mre_K: 0.8585 - val_loss: 70.2174 - val_calc_mre_K: 0.8571\n", "Epoch 623/2000\n", "48000/48000 [==============================] - 4s 75us/step - loss: 70.0047 - calc_mre_K: 0.8546 - val_loss: 68.7759 - val_calc_mre_K: 0.8395\n", "Epoch 624/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 69.8688 - calc_mre_K: 0.8529 - val_loss: 70.4431 - val_calc_mre_K: 0.8599\n", "Epoch 625/2000\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "48000/48000 [==============================] - 4s 75us/step - loss: 70.0730 - calc_mre_K: 0.8554 - val_loss: 69.8090 - val_calc_mre_K: 0.8522\n", "Epoch 626/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 69.9742 - calc_mre_K: 0.8542 - val_loss: 70.2443 - val_calc_mre_K: 0.8575\n", "Epoch 627/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 69.9923 - calc_mre_K: 0.8544 - val_loss: 68.7785 - val_calc_mre_K: 0.8396\n", "Epoch 628/2000\n", "48000/48000 [==============================] - 4s 82us/step - loss: 70.3250 - calc_mre_K: 0.8585 - val_loss: 68.4076 - val_calc_mre_K: 0.8351\n", "Epoch 629/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 70.0047 - calc_mre_K: 0.8546 - val_loss: 70.0301 - val_calc_mre_K: 0.8549\n", "Epoch 630/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 70.0964 - calc_mre_K: 0.8557 - val_loss: 69.4070 - val_calc_mre_K: 0.8473\n", "Epoch 631/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 70.0194 - calc_mre_K: 0.8547 - val_loss: 69.2311 - val_calc_mre_K: 0.8451\n", "Epoch 632/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 70.1036 - calc_mre_K: 0.8558 - val_loss: 68.8178 - val_calc_mre_K: 0.8401\n", "Epoch 633/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 69.8131 - calc_mre_K: 0.8522 - val_loss: 74.5473 - val_calc_mre_K: 0.9100\n", "Epoch 634/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 70.2244 - calc_mre_K: 0.8572 - val_loss: 71.0845 - val_calc_mre_K: 0.8677\n", "Epoch 635/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 69.8923 - calc_mre_K: 0.8532 - val_loss: 70.3786 - val_calc_mre_K: 0.8591\n", "Epoch 636/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 69.8122 - calc_mre_K: 0.8522 - val_loss: 69.0226 - val_calc_mre_K: 0.8426\n", "Epoch 637/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 70.1482 - calc_mre_K: 0.8563 - val_loss: 68.0626 - val_calc_mre_K: 0.8308\n", "Epoch 638/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 69.7493 - calc_mre_K: 0.8514 - val_loss: 70.6639 - val_calc_mre_K: 0.8626\n", "Epoch 639/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 69.7636 - calc_mre_K: 0.8516 - val_loss: 69.0270 - val_calc_mre_K: 0.8426\n", "Epoch 640/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 70.1987 - calc_mre_K: 0.8569 - val_loss: 70.1003 - val_calc_mre_K: 0.8557\n", "Epoch 641/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 70.0711 - calc_mre_K: 0.8554 - val_loss: 72.3229 - val_calc_mre_K: 0.8828\n", "Epoch 642/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 69.8119 - calc_mre_K: 0.8522 - val_loss: 68.3696 - val_calc_mre_K: 0.8346\n", "Epoch 643/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 70.1438 - calc_mre_K: 0.8562 - val_loss: 71.1676 - val_calc_mre_K: 0.8687\n", "Epoch 644/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 69.8718 - calc_mre_K: 0.8529 - val_loss: 67.6489 - val_calc_mre_K: 0.8258\n", "Epoch 645/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 69.7820 - calc_mre_K: 0.8518 - val_loss: 78.4016 - val_calc_mre_K: 0.9571\n", "Epoch 646/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 69.9278 - calc_mre_K: 0.8536 - val_loss: 72.4437 - val_calc_mre_K: 0.8843\n", "Epoch 647/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 69.6033 - calc_mre_K: 0.8496 - val_loss: 71.3298 - val_calc_mre_K: 0.8707\n", "Epoch 648/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 69.9849 - calc_mre_K: 0.8543 - val_loss: 68.2391 - val_calc_mre_K: 0.8330\n", "Epoch 649/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 69.5879 - calc_mre_K: 0.8495 - val_loss: 70.1103 - val_calc_mre_K: 0.8558\n", "Epoch 650/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 69.9180 - calc_mre_K: 0.8535 - val_loss: 68.7834 - val_calc_mre_K: 0.8396\n", "Epoch 651/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 69.6801 - calc_mre_K: 0.8506 - val_loss: 69.9046 - val_calc_mre_K: 0.8533\n", "Epoch 652/2000\n", "48000/48000 [==============================] - 4s 82us/step - loss: 69.7405 - calc_mre_K: 0.8513 - val_loss: 70.0890 - val_calc_mre_K: 0.8556\n", "Epoch 653/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 69.6737 - calc_mre_K: 0.8505 - val_loss: 78.2063 - val_calc_mre_K: 0.9547\n", "Epoch 654/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 69.8293 - calc_mre_K: 0.8524 - val_loss: 71.2263 - val_calc_mre_K: 0.8695\n", "Epoch 655/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 69.7580 - calc_mre_K: 0.8515 - val_loss: 69.2221 - val_calc_mre_K: 0.8450\n", "Epoch 656/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 69.9003 - calc_mre_K: 0.8533 - val_loss: 67.6017 - val_calc_mre_K: 0.8252\n", "Epoch 657/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 69.7232 - calc_mre_K: 0.8511 - val_loss: 72.8851 - val_calc_mre_K: 0.8897\n", "Epoch 658/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 69.6986 - calc_mre_K: 0.8508 - val_loss: 74.5412 - val_calc_mre_K: 0.9099\n", "Epoch 659/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 69.9202 - calc_mre_K: 0.8535 - val_loss: 70.0884 - val_calc_mre_K: 0.8556\n", "Epoch 660/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 69.6307 - calc_mre_K: 0.8500 - val_loss: 69.6695 - val_calc_mre_K: 0.8505\n", "Epoch 661/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 69.5234 - calc_mre_K: 0.8487 - val_loss: 70.9558 - val_calc_mre_K: 0.8662\n", "Epoch 662/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 69.7177 - calc_mre_K: 0.8510 - val_loss: 69.4870 - val_calc_mre_K: 0.8482\n", "Epoch 663/2000\n", "48000/48000 [==============================] - 4s 82us/step - loss: 69.7810 - calc_mre_K: 0.8518 - val_loss: 72.6494 - val_calc_mre_K: 0.8868\n", "Epoch 664/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 69.6844 - calc_mre_K: 0.8506 - val_loss: 69.0827 - val_calc_mre_K: 0.8433\n", "Epoch 665/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 69.6813 - calc_mre_K: 0.8506 - val_loss: 67.9366 - val_calc_mre_K: 0.8293\n", "Epoch 666/2000\n", "48000/48000 [==============================] - 4s 82us/step - loss: 69.4351 - calc_mre_K: 0.8476 - val_loss: 73.2314 - val_calc_mre_K: 0.8939\n", "Epoch 667/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 69.8773 - calc_mre_K: 0.8530 - val_loss: 72.7999 - val_calc_mre_K: 0.8887\n", "Epoch 668/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 69.6842 - calc_mre_K: 0.8506 - val_loss: 68.8829 - val_calc_mre_K: 0.8409\n", "Epoch 669/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 69.7928 - calc_mre_K: 0.8520 - val_loss: 68.9392 - val_calc_mre_K: 0.8415\n", "Epoch 670/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 69.6134 - calc_mre_K: 0.8498 - val_loss: 72.2273 - val_calc_mre_K: 0.8817\n", "Epoch 671/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 69.3944 - calc_mre_K: 0.8471 - val_loss: 69.5759 - val_calc_mre_K: 0.8493\n", "Epoch 672/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 69.6366 - calc_mre_K: 0.8501 - val_loss: 69.2783 - val_calc_mre_K: 0.8457\n", "Epoch 673/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 69.8663 - calc_mre_K: 0.8529 - val_loss: 73.3524 - val_calc_mre_K: 0.8954\n", "Epoch 674/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 69.3330 - calc_mre_K: 0.8463 - val_loss: 69.2134 - val_calc_mre_K: 0.8449\n", "Epoch 675/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 69.5755 - calc_mre_K: 0.8493 - val_loss: 69.6044 - val_calc_mre_K: 0.8497\n", "Epoch 676/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 69.6378 - calc_mre_K: 0.8501 - val_loss: 69.0723 - val_calc_mre_K: 0.8432\n", "Epoch 677/2000\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "48000/48000 [==============================] - 4s 79us/step - loss: 69.2818 - calc_mre_K: 0.8457 - val_loss: 69.4179 - val_calc_mre_K: 0.8474\n", "Epoch 678/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 69.5839 - calc_mre_K: 0.8494 - val_loss: 69.3968 - val_calc_mre_K: 0.8471\n", "Epoch 679/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 69.6357 - calc_mre_K: 0.8500 - val_loss: 68.0484 - val_calc_mre_K: 0.8307\n", "Epoch 680/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 69.4607 - calc_mre_K: 0.8479 - val_loss: 69.1102 - val_calc_mre_K: 0.8436\n", "Epoch 681/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 69.4604 - calc_mre_K: 0.8479 - val_loss: 67.9453 - val_calc_mre_K: 0.8294\n", "Epoch 682/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 69.5922 - calc_mre_K: 0.8495 - val_loss: 72.2929 - val_calc_mre_K: 0.8825\n", "Epoch 683/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 69.5302 - calc_mre_K: 0.8488 - val_loss: 68.3712 - val_calc_mre_K: 0.8346\n", "Epoch 684/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 69.4440 - calc_mre_K: 0.8477 - val_loss: 73.0258 - val_calc_mre_K: 0.8914\n", "Epoch 685/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 69.9621 - calc_mre_K: 0.8540 - val_loss: 71.7226 - val_calc_mre_K: 0.8755\n", "Epoch 686/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 69.5323 - calc_mre_K: 0.8488 - val_loss: 70.7961 - val_calc_mre_K: 0.8642\n", "Epoch 687/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 69.4562 - calc_mre_K: 0.8479 - val_loss: 70.0002 - val_calc_mre_K: 0.8545\n", "Epoch 688/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 69.4991 - calc_mre_K: 0.8484 - val_loss: 72.4542 - val_calc_mre_K: 0.8845\n", "Epoch 689/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 69.3201 - calc_mre_K: 0.8462 - val_loss: 68.3360 - val_calc_mre_K: 0.8342\n", "Epoch 690/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 69.6653 - calc_mre_K: 0.8504 - val_loss: 68.0830 - val_calc_mre_K: 0.8311\n", "Epoch 691/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 69.2372 - calc_mre_K: 0.8452 - val_loss: 72.4232 - val_calc_mre_K: 0.8841\n", "Epoch 692/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 69.4786 - calc_mre_K: 0.8481 - val_loss: 68.8922 - val_calc_mre_K: 0.8410\n", "Epoch 693/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 69.4107 - calc_mre_K: 0.8473 - val_loss: 70.2751 - val_calc_mre_K: 0.8578\n", "Epoch 694/2000\n", "48000/48000 [==============================] - 4s 82us/step - loss: 69.2278 - calc_mre_K: 0.8451 - val_loss: 69.0661 - val_calc_mre_K: 0.8431\n", "Epoch 695/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 69.3933 - calc_mre_K: 0.8471 - val_loss: 68.1019 - val_calc_mre_K: 0.8313\n", "Epoch 696/2000\n", "48000/48000 [==============================] - 4s 82us/step - loss: 69.2384 - calc_mre_K: 0.8452 - val_loss: 68.8767 - val_calc_mre_K: 0.8408\n", "Epoch 697/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 69.3267 - calc_mre_K: 0.8463 - val_loss: 70.2662 - val_calc_mre_K: 0.8577\n", "Epoch 698/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 69.5265 - calc_mre_K: 0.8487 - val_loss: 67.8403 - val_calc_mre_K: 0.8281\n", "Epoch 699/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 69.3141 - calc_mre_K: 0.8461 - val_loss: 68.1901 - val_calc_mre_K: 0.8324\n", "Epoch 700/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 69.2797 - calc_mre_K: 0.8457 - val_loss: 68.1284 - val_calc_mre_K: 0.8316\n", "Epoch 701/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 69.3609 - calc_mre_K: 0.8467 - val_loss: 70.6950 - val_calc_mre_K: 0.8630\n", "Epoch 702/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 69.4120 - calc_mre_K: 0.8473 - val_loss: 70.0694 - val_calc_mre_K: 0.8553\n", "Epoch 703/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 69.3583 - calc_mre_K: 0.8467 - val_loss: 67.6442 - val_calc_mre_K: 0.8257\n", "Epoch 704/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 69.2246 - calc_mre_K: 0.8450 - val_loss: 67.9296 - val_calc_mre_K: 0.8292\n", "Epoch 705/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 69.2431 - calc_mre_K: 0.8453 - val_loss: 67.6842 - val_calc_mre_K: 0.8262\n", "Epoch 706/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 69.3274 - calc_mre_K: 0.8463 - val_loss: 69.7733 - val_calc_mre_K: 0.8517\n", "Epoch 707/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 69.4541 - calc_mre_K: 0.8478 - val_loss: 67.7560 - val_calc_mre_K: 0.8271\n", "Epoch 708/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 69.2379 - calc_mre_K: 0.8452 - val_loss: 69.2229 - val_calc_mre_K: 0.8450\n", "Epoch 709/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 69.2891 - calc_mre_K: 0.8458 - val_loss: 67.8721 - val_calc_mre_K: 0.8285\n", "Epoch 710/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 69.3878 - calc_mre_K: 0.8470 - val_loss: 67.8545 - val_calc_mre_K: 0.8283\n", "Epoch 711/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 69.1503 - calc_mre_K: 0.8441 - val_loss: 69.9257 - val_calc_mre_K: 0.8536\n", "Epoch 712/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 69.3504 - calc_mre_K: 0.8466 - val_loss: 70.0039 - val_calc_mre_K: 0.8545\n", "Epoch 713/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 69.4744 - calc_mre_K: 0.8481 - val_loss: 70.3127 - val_calc_mre_K: 0.8583\n", "Epoch 714/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 69.0754 - calc_mre_K: 0.8432 - val_loss: 70.6918 - val_calc_mre_K: 0.8629\n", "Epoch 715/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 69.3665 - calc_mre_K: 0.8468 - val_loss: 67.8787 - val_calc_mre_K: 0.8286\n", "Epoch 716/2000\n", "48000/48000 [==============================] - 4s 82us/step - loss: 69.3961 - calc_mre_K: 0.8471 - val_loss: 70.5999 - val_calc_mre_K: 0.8618\n", "Epoch 717/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 69.4270 - calc_mre_K: 0.8475 - val_loss: 68.6237 - val_calc_mre_K: 0.8377\n", "Epoch 718/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 69.2770 - calc_mre_K: 0.8457 - val_loss: 72.6423 - val_calc_mre_K: 0.8867\n", "Epoch 719/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 69.3505 - calc_mre_K: 0.8466 - val_loss: 69.5388 - val_calc_mre_K: 0.8489\n", "Epoch 720/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 69.0673 - calc_mre_K: 0.8431 - val_loss: 67.1381 - val_calc_mre_K: 0.8196\n", "Epoch 721/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 69.2099 - calc_mre_K: 0.8448 - val_loss: 68.4075 - val_calc_mre_K: 0.8351\n", "Epoch 722/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 69.1636 - calc_mre_K: 0.8443 - val_loss: 71.3106 - val_calc_mre_K: 0.8705\n", "Epoch 723/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 69.2691 - calc_mre_K: 0.8456 - val_loss: 69.2220 - val_calc_mre_K: 0.8450\n", "Epoch 724/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 69.0648 - calc_mre_K: 0.8431 - val_loss: 68.8120 - val_calc_mre_K: 0.8400\n", "Epoch 725/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 69.1140 - calc_mre_K: 0.8437 - val_loss: 71.8202 - val_calc_mre_K: 0.8767\n", "Epoch 726/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 69.1376 - calc_mre_K: 0.8440 - val_loss: 68.2575 - val_calc_mre_K: 0.8332\n", "Epoch 727/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 69.0461 - calc_mre_K: 0.8428 - val_loss: 70.4148 - val_calc_mre_K: 0.8596\n", "Epoch 728/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 69.1061 - calc_mre_K: 0.8436 - val_loss: 69.6207 - val_calc_mre_K: 0.8499\n", "Epoch 729/2000\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "48000/48000 [==============================] - 4s 80us/step - loss: 69.1756 - calc_mre_K: 0.8444 - val_loss: 70.7000 - val_calc_mre_K: 0.8630\n", "Epoch 730/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 69.0619 - calc_mre_K: 0.8430 - val_loss: 69.2779 - val_calc_mre_K: 0.8457\n", "Epoch 731/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 68.9835 - calc_mre_K: 0.8421 - val_loss: 68.4257 - val_calc_mre_K: 0.8353\n", "Epoch 732/2000\n", "48000/48000 [==============================] - 4s 75us/step - loss: 69.2267 - calc_mre_K: 0.8451 - val_loss: 67.9296 - val_calc_mre_K: 0.8292\n", "Epoch 733/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 68.9465 - calc_mre_K: 0.8416 - val_loss: 68.5082 - val_calc_mre_K: 0.8363\n", "Epoch 734/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 69.0907 - calc_mre_K: 0.8434 - val_loss: 69.0262 - val_calc_mre_K: 0.8426\n", "Epoch 735/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 69.2018 - calc_mre_K: 0.8447 - val_loss: 70.0411 - val_calc_mre_K: 0.8550\n", "Epoch 736/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 68.9686 - calc_mre_K: 0.8419 - val_loss: 71.1394 - val_calc_mre_K: 0.8684\n", "Epoch 737/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 69.1172 - calc_mre_K: 0.8437 - val_loss: 74.7799 - val_calc_mre_K: 0.9128\n", "Epoch 738/2000\n", "48000/48000 [==============================] - 4s 85us/step - loss: 69.1559 - calc_mre_K: 0.8442 - val_loss: 68.5011 - val_calc_mre_K: 0.8362\n", "Epoch 739/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 68.9933 - calc_mre_K: 0.8422 - val_loss: 71.2469 - val_calc_mre_K: 0.8697\n", "Epoch 740/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 69.1222 - calc_mre_K: 0.8438 - val_loss: 69.8653 - val_calc_mre_K: 0.8528\n", "Epoch 741/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 68.9705 - calc_mre_K: 0.8419 - val_loss: 69.0196 - val_calc_mre_K: 0.8425\n", "Epoch 742/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 68.8966 - calc_mre_K: 0.8410 - val_loss: 67.7050 - val_calc_mre_K: 0.8265\n", "Epoch 743/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 68.9181 - calc_mre_K: 0.8413 - val_loss: 68.3293 - val_calc_mre_K: 0.8341\n", "Epoch 744/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 69.0103 - calc_mre_K: 0.8424 - val_loss: 67.7903 - val_calc_mre_K: 0.8275\n", "Epoch 745/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 69.2181 - calc_mre_K: 0.8449 - val_loss: 67.7246 - val_calc_mre_K: 0.8267\n", "Epoch 746/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 69.0438 - calc_mre_K: 0.8428 - val_loss: 66.8368 - val_calc_mre_K: 0.8159\n", "Epoch 747/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 68.9325 - calc_mre_K: 0.8415 - val_loss: 67.5861 - val_calc_mre_K: 0.8250\n", "Epoch 748/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 69.2127 - calc_mre_K: 0.8449 - val_loss: 70.7625 - val_calc_mre_K: 0.8638\n", "Epoch 749/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 69.1926 - calc_mre_K: 0.8446 - val_loss: 69.5069 - val_calc_mre_K: 0.8485\n", "Epoch 750/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 68.9409 - calc_mre_K: 0.8416 - val_loss: 70.8891 - val_calc_mre_K: 0.8653\n", "Epoch 751/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 68.8819 - calc_mre_K: 0.8408 - val_loss: 68.6977 - val_calc_mre_K: 0.8386\n", "Epoch 752/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 69.0421 - calc_mre_K: 0.8428 - val_loss: 68.5416 - val_calc_mre_K: 0.8367\n", "Epoch 753/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 68.9247 - calc_mre_K: 0.8414 - val_loss: 69.5934 - val_calc_mre_K: 0.8495\n", "Epoch 754/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 68.7256 - calc_mre_K: 0.8389 - val_loss: 71.4210 - val_calc_mre_K: 0.8718\n", "Epoch 755/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 68.9008 - calc_mre_K: 0.8411 - val_loss: 68.1938 - val_calc_mre_K: 0.8324\n", "Epoch 756/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 69.0737 - calc_mre_K: 0.8432 - val_loss: 68.6847 - val_calc_mre_K: 0.8384\n", "Epoch 757/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 69.0036 - calc_mre_K: 0.8423 - val_loss: 68.8016 - val_calc_mre_K: 0.8399\n", "Epoch 758/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 68.8299 - calc_mre_K: 0.8402 - val_loss: 68.9793 - val_calc_mre_K: 0.8420\n", "Epoch 759/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 68.8170 - calc_mre_K: 0.8401 - val_loss: 71.5030 - val_calc_mre_K: 0.8728\n", "Epoch 760/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 68.7948 - calc_mre_K: 0.8398 - val_loss: 68.5820 - val_calc_mre_K: 0.8372\n", "Epoch 761/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 68.9168 - calc_mre_K: 0.8413 - val_loss: 68.1370 - val_calc_mre_K: 0.8318\n", "Epoch 762/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 68.7208 - calc_mre_K: 0.8389 - val_loss: 68.3701 - val_calc_mre_K: 0.8346\n", "Epoch 763/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 68.9311 - calc_mre_K: 0.8414 - val_loss: 68.8343 - val_calc_mre_K: 0.8403\n", "Epoch 764/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 69.1299 - calc_mre_K: 0.8439 - val_loss: 67.1612 - val_calc_mre_K: 0.8198\n", "Epoch 765/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 69.0102 - calc_mre_K: 0.8424 - val_loss: 73.7914 - val_calc_mre_K: 0.9008\n", "Epoch 766/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 68.8678 - calc_mre_K: 0.8407 - val_loss: 68.5865 - val_calc_mre_K: 0.8372\n", "Epoch 767/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 68.7972 - calc_mre_K: 0.8398 - val_loss: 76.7060 - val_calc_mre_K: 0.9364\n", "Epoch 768/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 68.9634 - calc_mre_K: 0.8418 - val_loss: 68.3220 - val_calc_mre_K: 0.8340\n", "Epoch 769/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 68.9155 - calc_mre_K: 0.8413 - val_loss: 69.2418 - val_calc_mre_K: 0.8452\n", "Epoch 770/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 69.0029 - calc_mre_K: 0.8423 - val_loss: 68.7127 - val_calc_mre_K: 0.8388\n", "Epoch 771/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 68.7106 - calc_mre_K: 0.8388 - val_loss: 68.5919 - val_calc_mre_K: 0.8373\n", "Epoch 772/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 68.8279 - calc_mre_K: 0.8402 - val_loss: 66.6861 - val_calc_mre_K: 0.8140\n", "Epoch 773/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 68.7920 - calc_mre_K: 0.8397 - val_loss: 67.3218 - val_calc_mre_K: 0.8218\n", "Epoch 774/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 68.7787 - calc_mre_K: 0.8396 - val_loss: 68.6517 - val_calc_mre_K: 0.8380\n", "Epoch 775/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 68.8109 - calc_mre_K: 0.8400 - val_loss: 69.2689 - val_calc_mre_K: 0.8456\n", "Epoch 776/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 68.8242 - calc_mre_K: 0.8401 - val_loss: 71.1534 - val_calc_mre_K: 0.8686\n", "Epoch 777/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 68.9064 - calc_mre_K: 0.8411 - val_loss: 68.0815 - val_calc_mre_K: 0.8311\n", "Epoch 778/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 69.0163 - calc_mre_K: 0.8425 - val_loss: 68.6864 - val_calc_mre_K: 0.8385\n", "Epoch 779/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 68.7570 - calc_mre_K: 0.8393 - val_loss: 69.5176 - val_calc_mre_K: 0.8486\n", "Epoch 780/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 68.5694 - calc_mre_K: 0.8370 - val_loss: 67.6373 - val_calc_mre_K: 0.8257\n", "Epoch 781/2000\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "48000/48000 [==============================] - 4s 79us/step - loss: 68.9212 - calc_mre_K: 0.8413 - val_loss: 69.5418 - val_calc_mre_K: 0.8489\n", "Epoch 782/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 68.6100 - calc_mre_K: 0.8375 - val_loss: 69.3023 - val_calc_mre_K: 0.8460\n", "Epoch 783/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 68.7570 - calc_mre_K: 0.8393 - val_loss: 71.9419 - val_calc_mre_K: 0.8782\n", "Epoch 784/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 68.8150 - calc_mre_K: 0.8400 - val_loss: 69.4250 - val_calc_mre_K: 0.8475\n", "Epoch 785/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 68.8167 - calc_mre_K: 0.8400 - val_loss: 67.8022 - val_calc_mre_K: 0.8277\n", "Epoch 786/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 68.7118 - calc_mre_K: 0.8388 - val_loss: 67.8021 - val_calc_mre_K: 0.8277\n", "Epoch 787/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 68.4922 - calc_mre_K: 0.8361 - val_loss: 71.6764 - val_calc_mre_K: 0.8750\n", "Epoch 788/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 68.7292 - calc_mre_K: 0.8390 - val_loss: 70.0593 - val_calc_mre_K: 0.8552\n", "Epoch 789/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 68.7321 - calc_mre_K: 0.8390 - val_loss: 68.5029 - val_calc_mre_K: 0.8362\n", "Epoch 790/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 68.8233 - calc_mre_K: 0.8401 - val_loss: 69.7151 - val_calc_mre_K: 0.8510\n", "Epoch 791/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 68.6159 - calc_mre_K: 0.8376 - val_loss: 69.2040 - val_calc_mre_K: 0.8448\n", "Epoch 792/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 68.8581 - calc_mre_K: 0.8406 - val_loss: 68.4430 - val_calc_mre_K: 0.8355\n", "Epoch 793/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 68.5488 - calc_mre_K: 0.8368 - val_loss: 67.6099 - val_calc_mre_K: 0.8253\n", "Epoch 794/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 68.5246 - calc_mre_K: 0.8365 - val_loss: 68.4849 - val_calc_mre_K: 0.8360\n", "Epoch 795/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 68.7169 - calc_mre_K: 0.8388 - val_loss: 68.7268 - val_calc_mre_K: 0.8389\n", "Epoch 796/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 68.7867 - calc_mre_K: 0.8397 - val_loss: 69.0646 - val_calc_mre_K: 0.8431\n", "Epoch 797/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 68.5887 - calc_mre_K: 0.8373 - val_loss: 69.2649 - val_calc_mre_K: 0.8455\n", "Epoch 798/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 68.7431 - calc_mre_K: 0.8391 - val_loss: 68.6155 - val_calc_mre_K: 0.8376\n", "Epoch 799/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 68.6733 - calc_mre_K: 0.8383 - val_loss: 71.3394 - val_calc_mre_K: 0.8708\n", "Epoch 800/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 68.5133 - calc_mre_K: 0.8363 - val_loss: 67.9055 - val_calc_mre_K: 0.8289\n", "Epoch 801/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 68.4395 - calc_mre_K: 0.8354 - val_loss: 66.8839 - val_calc_mre_K: 0.8165\n", "Epoch 802/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 68.6155 - calc_mre_K: 0.8376 - val_loss: 69.0790 - val_calc_mre_K: 0.8432\n", "Epoch 803/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 68.8768 - calc_mre_K: 0.8408 - val_loss: 69.3233 - val_calc_mre_K: 0.8462\n", "Epoch 804/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 68.4629 - calc_mre_K: 0.8357 - val_loss: 68.9061 - val_calc_mre_K: 0.8411\n", "Epoch 805/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 68.5194 - calc_mre_K: 0.8364 - val_loss: 68.3931 - val_calc_mre_K: 0.8349\n", "Epoch 806/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 68.6736 - calc_mre_K: 0.8383 - val_loss: 66.8958 - val_calc_mre_K: 0.8166\n", "Epoch 807/2000\n", "48000/48000 [==============================] - 4s 82us/step - loss: 68.3933 - calc_mre_K: 0.8349 - val_loss: 69.4767 - val_calc_mre_K: 0.8481\n", "Epoch 808/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 68.8431 - calc_mre_K: 0.8404 - val_loss: 67.2638 - val_calc_mre_K: 0.8211\n", "Epoch 809/2000\n", "48000/48000 [==============================] - 4s 84us/step - loss: 68.6539 - calc_mre_K: 0.8381 - val_loss: 68.1106 - val_calc_mre_K: 0.8314\n", "Epoch 810/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 68.5867 - calc_mre_K: 0.8372 - val_loss: 69.9081 - val_calc_mre_K: 0.8534\n", "Epoch 811/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 68.7751 - calc_mre_K: 0.8395 - val_loss: 69.1331 - val_calc_mre_K: 0.8439\n", "Epoch 812/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 68.6936 - calc_mre_K: 0.8385 - val_loss: 73.8612 - val_calc_mre_K: 0.9016\n", "Epoch 813/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 68.6228 - calc_mre_K: 0.8377 - val_loss: 70.2296 - val_calc_mre_K: 0.8573\n", "Epoch 814/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 68.4381 - calc_mre_K: 0.8354 - val_loss: 70.8973 - val_calc_mre_K: 0.8654\n", "Epoch 815/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 68.4559 - calc_mre_K: 0.8356 - val_loss: 68.7059 - val_calc_mre_K: 0.8387\n", "Epoch 816/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 68.6479 - calc_mre_K: 0.8380 - val_loss: 69.5440 - val_calc_mre_K: 0.8489\n", "Epoch 817/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 68.5340 - calc_mre_K: 0.8366 - val_loss: 69.8927 - val_calc_mre_K: 0.8532\n", "Epoch 818/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 68.6306 - calc_mre_K: 0.8378 - val_loss: 66.6731 - val_calc_mre_K: 0.8139\n", "Epoch 819/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 68.3543 - calc_mre_K: 0.8344 - val_loss: 66.0938 - val_calc_mre_K: 0.8068\n", "Epoch 820/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 68.6342 - calc_mre_K: 0.8378 - val_loss: 69.4982 - val_calc_mre_K: 0.8484\n", "Epoch 821/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 68.2048 - calc_mre_K: 0.8326 - val_loss: 67.4336 - val_calc_mre_K: 0.8232\n", "Epoch 822/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 68.4734 - calc_mre_K: 0.8359 - val_loss: 68.6486 - val_calc_mre_K: 0.8380\n", "Epoch 823/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 68.2736 - calc_mre_K: 0.8334 - val_loss: 68.5129 - val_calc_mre_K: 0.8363\n", "Epoch 824/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 68.3993 - calc_mre_K: 0.8350 - val_loss: 68.3882 - val_calc_mre_K: 0.8348\n", "Epoch 825/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 68.4992 - calc_mre_K: 0.8362 - val_loss: 68.1698 - val_calc_mre_K: 0.8322\n", "Epoch 826/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 68.3371 - calc_mre_K: 0.8342 - val_loss: 67.7058 - val_calc_mre_K: 0.8265\n", "Epoch 827/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 68.4223 - calc_mre_K: 0.8352 - val_loss: 68.5852 - val_calc_mre_K: 0.8372\n", "Epoch 828/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 68.3879 - calc_mre_K: 0.8348 - val_loss: 77.4614 - val_calc_mre_K: 0.9456\n", "Epoch 829/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 68.5528 - calc_mre_K: 0.8368 - val_loss: 67.5584 - val_calc_mre_K: 0.8247\n", "Epoch 830/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 68.4371 - calc_mre_K: 0.8354 - val_loss: 68.6295 - val_calc_mre_K: 0.8378\n", "Epoch 831/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 68.3759 - calc_mre_K: 0.8347 - val_loss: 69.9908 - val_calc_mre_K: 0.8544\n", "Epoch 832/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 68.2952 - calc_mre_K: 0.8337 - val_loss: 70.4606 - val_calc_mre_K: 0.8601\n", "Epoch 833/2000\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "48000/48000 [==============================] - 4s 80us/step - loss: 68.5398 - calc_mre_K: 0.8367 - val_loss: 66.6366 - val_calc_mre_K: 0.8134\n", "Epoch 834/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 68.4766 - calc_mre_K: 0.8359 - val_loss: 68.7908 - val_calc_mre_K: 0.8397\n", "Epoch 835/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 68.3508 - calc_mre_K: 0.8344 - val_loss: 68.6467 - val_calc_mre_K: 0.8380\n", "Epoch 836/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 68.4523 - calc_mre_K: 0.8356 - val_loss: 68.0088 - val_calc_mre_K: 0.8302\n", "Epoch 837/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 68.4229 - calc_mre_K: 0.8352 - val_loss: 69.3048 - val_calc_mre_K: 0.8460\n", "Epoch 838/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 68.3646 - calc_mre_K: 0.8345 - val_loss: 67.1027 - val_calc_mre_K: 0.8191\n", "Epoch 839/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 68.4714 - calc_mre_K: 0.8358 - val_loss: 68.2559 - val_calc_mre_K: 0.8332\n", "Epoch 840/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 68.2412 - calc_mre_K: 0.8330 - val_loss: 69.8319 - val_calc_mre_K: 0.8524\n", "Epoch 841/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 68.4044 - calc_mre_K: 0.8350 - val_loss: 68.4843 - val_calc_mre_K: 0.8360\n", "Epoch 842/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 68.2026 - calc_mre_K: 0.8326 - val_loss: 70.3405 - val_calc_mre_K: 0.8586\n", "Epoch 843/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 68.4064 - calc_mre_K: 0.8350 - val_loss: 67.8525 - val_calc_mre_K: 0.8283\n", "Epoch 844/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 68.4638 - calc_mre_K: 0.8357 - val_loss: 68.9357 - val_calc_mre_K: 0.8415\n", "Epoch 845/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 68.3168 - calc_mre_K: 0.8339 - val_loss: 68.7361 - val_calc_mre_K: 0.8391\n", "Epoch 846/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 68.0951 - calc_mre_K: 0.8312 - val_loss: 69.2471 - val_calc_mre_K: 0.8453\n", "Epoch 847/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 68.4191 - calc_mre_K: 0.8352 - val_loss: 67.2705 - val_calc_mre_K: 0.8212\n", "Epoch 848/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 68.1499 - calc_mre_K: 0.8319 - val_loss: 66.8058 - val_calc_mre_K: 0.8155\n", "Epoch 849/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 68.4239 - calc_mre_K: 0.8353 - val_loss: 68.0605 - val_calc_mre_K: 0.8308\n", "Epoch 850/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 68.4169 - calc_mre_K: 0.8352 - val_loss: 67.5252 - val_calc_mre_K: 0.8243\n", "Epoch 851/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 68.2858 - calc_mre_K: 0.8336 - val_loss: 67.9402 - val_calc_mre_K: 0.8293\n", "Epoch 852/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 68.6239 - calc_mre_K: 0.8377 - val_loss: 68.8023 - val_calc_mre_K: 0.8399\n", "Epoch 853/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 68.1857 - calc_mre_K: 0.8323 - val_loss: 68.9759 - val_calc_mre_K: 0.8420\n", "Epoch 854/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 68.3673 - calc_mre_K: 0.8346 - val_loss: 68.1198 - val_calc_mre_K: 0.8315\n", "Epoch 855/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 68.0846 - calc_mre_K: 0.8311 - val_loss: 71.8013 - val_calc_mre_K: 0.8765\n", "Epoch 856/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 68.1923 - calc_mre_K: 0.8324 - val_loss: 74.3052 - val_calc_mre_K: 0.9070\n", "Epoch 857/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 68.4464 - calc_mre_K: 0.8355 - val_loss: 68.1029 - val_calc_mre_K: 0.8313\n", "Epoch 858/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 68.1307 - calc_mre_K: 0.8317 - val_loss: 68.4922 - val_calc_mre_K: 0.8361\n", "Epoch 859/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 68.1380 - calc_mre_K: 0.8318 - val_loss: 66.7273 - val_calc_mre_K: 0.8145\n", "Epoch 860/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 68.1918 - calc_mre_K: 0.8324 - val_loss: 67.1910 - val_calc_mre_K: 0.8202\n", "Epoch 861/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 68.2350 - calc_mre_K: 0.8329 - val_loss: 68.9710 - val_calc_mre_K: 0.8419\n", "Epoch 862/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 68.3443 - calc_mre_K: 0.8343 - val_loss: 69.4112 - val_calc_mre_K: 0.8473\n", "Epoch 863/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 68.2634 - calc_mre_K: 0.8333 - val_loss: 69.5201 - val_calc_mre_K: 0.8486\n", "Epoch 864/2000\n", "48000/48000 [==============================] - 4s 75us/step - loss: 68.3265 - calc_mre_K: 0.8341 - val_loss: 68.5585 - val_calc_mre_K: 0.8369\n", "Epoch 865/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 68.1998 - calc_mre_K: 0.8325 - val_loss: 67.0747 - val_calc_mre_K: 0.8188\n", "Epoch 866/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 68.2510 - calc_mre_K: 0.8331 - val_loss: 69.8756 - val_calc_mre_K: 0.8530\n", "Epoch 867/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 68.4511 - calc_mre_K: 0.8356 - val_loss: 66.2117 - val_calc_mre_K: 0.8082\n", "Epoch 868/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 68.2175 - calc_mre_K: 0.8327 - val_loss: 71.9284 - val_calc_mre_K: 0.8780\n", "Epoch 869/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 68.3253 - calc_mre_K: 0.8340 - val_loss: 67.4170 - val_calc_mre_K: 0.8230\n", "Epoch 870/2000\n", "48000/48000 [==============================] - 4s 75us/step - loss: 68.1639 - calc_mre_K: 0.8321 - val_loss: 67.7378 - val_calc_mre_K: 0.8269\n", "Epoch 871/2000\n", "48000/48000 [==============================] - 4s 75us/step - loss: 68.3129 - calc_mre_K: 0.8339 - val_loss: 67.4871 - val_calc_mre_K: 0.8238\n", "Epoch 872/2000\n", "48000/48000 [==============================] - 4s 75us/step - loss: 68.2234 - calc_mre_K: 0.8328 - val_loss: 67.1580 - val_calc_mre_K: 0.8198\n", "Epoch 873/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 68.1798 - calc_mre_K: 0.8323 - val_loss: 67.5953 - val_calc_mre_K: 0.8251\n", "Epoch 874/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 68.3908 - calc_mre_K: 0.8348 - val_loss: 70.1672 - val_calc_mre_K: 0.8565\n", "Epoch 875/2000\n", "48000/48000 [==============================] - 4s 75us/step - loss: 68.2021 - calc_mre_K: 0.8325 - val_loss: 68.7908 - val_calc_mre_K: 0.8397\n", "Epoch 876/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 68.1299 - calc_mre_K: 0.8317 - val_loss: 70.1003 - val_calc_mre_K: 0.8557\n", "Epoch 877/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 68.3112 - calc_mre_K: 0.8339 - val_loss: 71.2577 - val_calc_mre_K: 0.8698\n", "Epoch 878/2000\n", "48000/48000 [==============================] - 4s 75us/step - loss: 68.1528 - calc_mre_K: 0.8319 - val_loss: 69.1082 - val_calc_mre_K: 0.8436\n", "Epoch 879/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 68.1438 - calc_mre_K: 0.8318 - val_loss: 68.2988 - val_calc_mre_K: 0.8337\n", "Epoch 880/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 68.3000 - calc_mre_K: 0.8337 - val_loss: 69.9950 - val_calc_mre_K: 0.8544\n", "Epoch 881/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 67.9770 - calc_mre_K: 0.8298 - val_loss: 67.9169 - val_calc_mre_K: 0.8291\n", "Epoch 882/2000\n", "48000/48000 [==============================] - 4s 75us/step - loss: 68.4965 - calc_mre_K: 0.8361 - val_loss: 67.6147 - val_calc_mre_K: 0.8254\n", "Epoch 883/2000\n", "48000/48000 [==============================] - 4s 75us/step - loss: 68.2638 - calc_mre_K: 0.8333 - val_loss: 70.4720 - val_calc_mre_K: 0.8603\n", "Epoch 884/2000\n", "48000/48000 [==============================] - 4s 75us/step - loss: 68.1279 - calc_mre_K: 0.8316 - val_loss: 67.8601 - val_calc_mre_K: 0.8284\n", "Epoch 885/2000\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "48000/48000 [==============================] - 4s 77us/step - loss: 68.2687 - calc_mre_K: 0.8334 - val_loss: 67.8355 - val_calc_mre_K: 0.8281\n", "Epoch 886/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 68.0514 - calc_mre_K: 0.8307 - val_loss: 68.8392 - val_calc_mre_K: 0.8403\n", "Epoch 887/2000\n", "48000/48000 [==============================] - 4s 75us/step - loss: 68.1054 - calc_mre_K: 0.8314 - val_loss: 69.6213 - val_calc_mre_K: 0.8499\n", "Epoch 888/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 68.2236 - calc_mre_K: 0.8328 - val_loss: 67.8345 - val_calc_mre_K: 0.8281\n", "Epoch 889/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 68.1612 - calc_mre_K: 0.8320 - val_loss: 67.1319 - val_calc_mre_K: 0.8195\n", "Epoch 890/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 68.2292 - calc_mre_K: 0.8329 - val_loss: 69.0708 - val_calc_mre_K: 0.8431\n", "Epoch 891/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 68.2077 - calc_mre_K: 0.8326 - val_loss: 74.1364 - val_calc_mre_K: 0.9050\n", "Epoch 892/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 68.4298 - calc_mre_K: 0.8353 - val_loss: 67.7942 - val_calc_mre_K: 0.8276\n", "Epoch 893/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 67.9460 - calc_mre_K: 0.8294 - val_loss: 69.6949 - val_calc_mre_K: 0.8508\n", "Epoch 894/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 68.2078 - calc_mre_K: 0.8326 - val_loss: 67.4425 - val_calc_mre_K: 0.8233\n", "Epoch 895/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 67.8772 - calc_mre_K: 0.8286 - val_loss: 67.7242 - val_calc_mre_K: 0.8267\n", "Epoch 896/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 68.3836 - calc_mre_K: 0.8348 - val_loss: 66.2782 - val_calc_mre_K: 0.8091\n", "Epoch 897/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 67.9549 - calc_mre_K: 0.8295 - val_loss: 70.7167 - val_calc_mre_K: 0.8632\n", "Epoch 898/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 68.0500 - calc_mre_K: 0.8307 - val_loss: 69.1171 - val_calc_mre_K: 0.8437\n", "Epoch 899/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 67.9123 - calc_mre_K: 0.8290 - val_loss: 68.5501 - val_calc_mre_K: 0.8368\n", "Epoch 900/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 68.3194 - calc_mre_K: 0.8340 - val_loss: 66.3617 - val_calc_mre_K: 0.8101\n", "Epoch 901/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 67.9869 - calc_mre_K: 0.8299 - val_loss: 67.9644 - val_calc_mre_K: 0.8296\n", "Epoch 902/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 68.1701 - calc_mre_K: 0.8322 - val_loss: 69.5949 - val_calc_mre_K: 0.8495\n", "Epoch 903/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 68.0749 - calc_mre_K: 0.8310 - val_loss: 68.2815 - val_calc_mre_K: 0.8335\n", "Epoch 904/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 67.9815 - calc_mre_K: 0.8299 - val_loss: 67.8516 - val_calc_mre_K: 0.8283\n", "Epoch 905/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 68.1968 - calc_mre_K: 0.8325 - val_loss: 67.0266 - val_calc_mre_K: 0.8182\n", "Epoch 906/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 68.0736 - calc_mre_K: 0.8310 - val_loss: 68.4466 - val_calc_mre_K: 0.8355\n", "Epoch 907/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 67.9781 - calc_mre_K: 0.8298 - val_loss: 65.6438 - val_calc_mre_K: 0.8013\n", "Epoch 908/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 68.0792 - calc_mre_K: 0.8310 - val_loss: 71.3924 - val_calc_mre_K: 0.8715\n", "Epoch 909/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 68.1403 - calc_mre_K: 0.8318 - val_loss: 67.7847 - val_calc_mre_K: 0.8274\n", "Epoch 910/2000\n", "48000/48000 [==============================] - 4s 75us/step - loss: 68.0976 - calc_mre_K: 0.8313 - val_loss: 69.9777 - val_calc_mre_K: 0.8542\n", "Epoch 911/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 67.9035 - calc_mre_K: 0.8289 - val_loss: 66.3798 - val_calc_mre_K: 0.8103\n", "Epoch 912/2000\n", "48000/48000 [==============================] - 4s 75us/step - loss: 67.8612 - calc_mre_K: 0.8284 - val_loss: 67.5149 - val_calc_mre_K: 0.8242\n", "Epoch 913/2000\n", "48000/48000 [==============================] - 4s 75us/step - loss: 68.2535 - calc_mre_K: 0.8332 - val_loss: 67.6925 - val_calc_mre_K: 0.8263\n", "Epoch 914/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 68.0623 - calc_mre_K: 0.8308 - val_loss: 67.1734 - val_calc_mre_K: 0.8200\n", "Epoch 915/2000\n", "48000/48000 [==============================] - 4s 75us/step - loss: 67.9159 - calc_mre_K: 0.8291 - val_loss: 69.7438 - val_calc_mre_K: 0.8514\n", "Epoch 916/2000\n", "48000/48000 [==============================] - 4s 75us/step - loss: 67.9651 - calc_mre_K: 0.8297 - val_loss: 68.6269 - val_calc_mre_K: 0.8377\n", "Epoch 917/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 68.1347 - calc_mre_K: 0.8317 - val_loss: 68.4802 - val_calc_mre_K: 0.8359\n", "Epoch 918/2000\n", "48000/48000 [==============================] - 4s 75us/step - loss: 68.0057 - calc_mre_K: 0.8301 - val_loss: 70.0680 - val_calc_mre_K: 0.8553\n", "Epoch 919/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 68.0942 - calc_mre_K: 0.8312 - val_loss: 68.7089 - val_calc_mre_K: 0.8387\n", "Epoch 920/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 68.3898 - calc_mre_K: 0.8348 - val_loss: 67.7226 - val_calc_mre_K: 0.8267\n", "Epoch 921/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 67.9239 - calc_mre_K: 0.8291 - val_loss: 67.7315 - val_calc_mre_K: 0.8268\n", "Epoch 922/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 68.0772 - calc_mre_K: 0.8310 - val_loss: 66.5846 - val_calc_mre_K: 0.8128\n", "Epoch 923/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 67.9986 - calc_mre_K: 0.8301 - val_loss: 66.9149 - val_calc_mre_K: 0.8168\n", "Epoch 924/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 67.9169 - calc_mre_K: 0.8291 - val_loss: 66.3030 - val_calc_mre_K: 0.8094\n", "Epoch 925/2000\n", "48000/48000 [==============================] - 4s 75us/step - loss: 68.0668 - calc_mre_K: 0.8309 - val_loss: 67.1639 - val_calc_mre_K: 0.8199\n", "Epoch 926/2000\n", "48000/48000 [==============================] - 4s 75us/step - loss: 67.8999 - calc_mre_K: 0.8289 - val_loss: 67.3902 - val_calc_mre_K: 0.8226\n", "Epoch 927/2000\n", "48000/48000 [==============================] - 4s 75us/step - loss: 68.0971 - calc_mre_K: 0.8313 - val_loss: 67.2550 - val_calc_mre_K: 0.8210\n", "Epoch 928/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 68.0309 - calc_mre_K: 0.8305 - val_loss: 67.2433 - val_calc_mre_K: 0.8208\n", "Epoch 929/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 67.9259 - calc_mre_K: 0.8292 - val_loss: 66.1477 - val_calc_mre_K: 0.8075\n", "Epoch 930/2000\n", "48000/48000 [==============================] - 4s 75us/step - loss: 68.0206 - calc_mre_K: 0.8303 - val_loss: 68.3151 - val_calc_mre_K: 0.8339\n", "Epoch 931/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 68.0087 - calc_mre_K: 0.8302 - val_loss: 67.5882 - val_calc_mre_K: 0.8251\n", "Epoch 932/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 67.9815 - calc_mre_K: 0.8299 - val_loss: 67.7168 - val_calc_mre_K: 0.8266\n", "Epoch 933/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 68.0067 - calc_mre_K: 0.8302 - val_loss: 74.5786 - val_calc_mre_K: 0.9104\n", "Epoch 934/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 68.0691 - calc_mre_K: 0.8309 - val_loss: 69.4910 - val_calc_mre_K: 0.8483\n", "Epoch 935/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 67.9634 - calc_mre_K: 0.8296 - val_loss: 67.9464 - val_calc_mre_K: 0.8294\n", "Epoch 936/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 67.7758 - calc_mre_K: 0.8273 - val_loss: 67.5622 - val_calc_mre_K: 0.8247\n", "Epoch 937/2000\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "48000/48000 [==============================] - 4s 76us/step - loss: 67.7235 - calc_mre_K: 0.8267 - val_loss: 73.2562 - val_calc_mre_K: 0.8942\n", "Epoch 938/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 68.3821 - calc_mre_K: 0.8347 - val_loss: 68.2266 - val_calc_mre_K: 0.8328\n", "Epoch 939/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 67.7575 - calc_mre_K: 0.8271 - val_loss: 67.6874 - val_calc_mre_K: 0.8263\n", "Epoch 940/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 67.8212 - calc_mre_K: 0.8279 - val_loss: 71.4814 - val_calc_mre_K: 0.8726\n", "Epoch 941/2000\n", "48000/48000 [==============================] - 4s 75us/step - loss: 67.9176 - calc_mre_K: 0.8291 - val_loss: 68.8167 - val_calc_mre_K: 0.8400\n", "Epoch 942/2000\n", "48000/48000 [==============================] - 4s 75us/step - loss: 68.0088 - calc_mre_K: 0.8302 - val_loss: 68.0682 - val_calc_mre_K: 0.8309\n", "Epoch 943/2000\n", "48000/48000 [==============================] - 4s 74us/step - loss: 67.9975 - calc_mre_K: 0.8300 - val_loss: 66.5679 - val_calc_mre_K: 0.8126\n", "Epoch 944/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 67.7820 - calc_mre_K: 0.8274 - val_loss: 66.9795 - val_calc_mre_K: 0.8176\n", "Epoch 945/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 68.0451 - calc_mre_K: 0.8306 - val_loss: 69.4126 - val_calc_mre_K: 0.8473\n", "Epoch 946/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 68.1943 - calc_mre_K: 0.8325 - val_loss: 66.9556 - val_calc_mre_K: 0.8173\n", "Epoch 947/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 67.9321 - calc_mre_K: 0.8292 - val_loss: 69.7556 - val_calc_mre_K: 0.8515\n", "Epoch 948/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 67.8298 - calc_mre_K: 0.8280 - val_loss: 66.0744 - val_calc_mre_K: 0.8066\n", "Epoch 949/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 67.9006 - calc_mre_K: 0.8289 - val_loss: 67.7481 - val_calc_mre_K: 0.8270\n", "Epoch 950/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 67.9724 - calc_mre_K: 0.8297 - val_loss: 70.2267 - val_calc_mre_K: 0.8573\n", "Epoch 951/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 67.7152 - calc_mre_K: 0.8266 - val_loss: 67.7343 - val_calc_mre_K: 0.8268\n", "Epoch 952/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 67.8857 - calc_mre_K: 0.8287 - val_loss: 66.2480 - val_calc_mre_K: 0.8087\n", "Epoch 953/2000\n", "48000/48000 [==============================] - 4s 82us/step - loss: 67.9190 - calc_mre_K: 0.8291 - val_loss: 67.7751 - val_calc_mre_K: 0.8273\n", "Epoch 954/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 67.9596 - calc_mre_K: 0.8296 - val_loss: 66.1946 - val_calc_mre_K: 0.8080\n", "Epoch 955/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 67.8605 - calc_mre_K: 0.8284 - val_loss: 67.5340 - val_calc_mre_K: 0.8244\n", "Epoch 956/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 68.1696 - calc_mre_K: 0.8321 - val_loss: 68.2885 - val_calc_mre_K: 0.8336\n", "Epoch 957/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 67.5169 - calc_mre_K: 0.8242 - val_loss: 67.7486 - val_calc_mre_K: 0.8270\n", "Epoch 958/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 67.9395 - calc_mre_K: 0.8293 - val_loss: 67.1106 - val_calc_mre_K: 0.8192\n", "Epoch 959/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 67.7574 - calc_mre_K: 0.8271 - val_loss: 67.1456 - val_calc_mre_K: 0.8196\n", "Epoch 960/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 67.7893 - calc_mre_K: 0.8275 - val_loss: 65.8408 - val_calc_mre_K: 0.8037\n", "Epoch 961/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 67.8319 - calc_mre_K: 0.8280 - val_loss: 69.3742 - val_calc_mre_K: 0.8469\n", "Epoch 962/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 67.8373 - calc_mre_K: 0.8281 - val_loss: 67.9928 - val_calc_mre_K: 0.8300\n", "Epoch 963/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 67.8701 - calc_mre_K: 0.8285 - val_loss: 67.9147 - val_calc_mre_K: 0.8290\n", "Epoch 964/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 67.8389 - calc_mre_K: 0.8281 - val_loss: 68.1027 - val_calc_mre_K: 0.8313\n", "Epoch 965/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 67.7876 - calc_mre_K: 0.8275 - val_loss: 69.0729 - val_calc_mre_K: 0.8432\n", "Epoch 966/2000\n", "48000/48000 [==============================] - 4s 82us/step - loss: 67.9486 - calc_mre_K: 0.8295 - val_loss: 72.4812 - val_calc_mre_K: 0.8848\n", "Epoch 967/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 67.5851 - calc_mre_K: 0.8250 - val_loss: 67.8244 - val_calc_mre_K: 0.8279\n", "Epoch 968/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 67.7436 - calc_mre_K: 0.8269 - val_loss: 66.8648 - val_calc_mre_K: 0.8162\n", "Epoch 969/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 67.6908 - calc_mre_K: 0.8263 - val_loss: 68.5925 - val_calc_mre_K: 0.8373\n", "Epoch 970/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 67.7036 - calc_mre_K: 0.8265 - val_loss: 68.6407 - val_calc_mre_K: 0.8379\n", "Epoch 971/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 67.7673 - calc_mre_K: 0.8272 - val_loss: 68.3297 - val_calc_mre_K: 0.8341\n", "Epoch 972/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 67.6446 - calc_mre_K: 0.8257 - val_loss: 68.0294 - val_calc_mre_K: 0.8304\n", "Epoch 973/2000\n", "48000/48000 [==============================] - 4s 82us/step - loss: 67.7515 - calc_mre_K: 0.8270 - val_loss: 68.1451 - val_calc_mre_K: 0.8318\n", "Epoch 974/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 67.6687 - calc_mre_K: 0.8260 - val_loss: 69.8613 - val_calc_mre_K: 0.8528\n", "Epoch 975/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 67.9650 - calc_mre_K: 0.8297 - val_loss: 66.5351 - val_calc_mre_K: 0.8122\n", "Epoch 976/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 67.6142 - calc_mre_K: 0.8254 - val_loss: 67.1462 - val_calc_mre_K: 0.8197\n", "Epoch 977/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 67.9246 - calc_mre_K: 0.8292 - val_loss: 66.7573 - val_calc_mre_K: 0.8149\n", "Epoch 978/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 67.5137 - calc_mre_K: 0.8241 - val_loss: 66.1574 - val_calc_mre_K: 0.8076\n", "Epoch 979/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 67.7195 - calc_mre_K: 0.8267 - val_loss: 66.5957 - val_calc_mre_K: 0.8129\n", "Epoch 980/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 67.7729 - calc_mre_K: 0.8273 - val_loss: 71.1284 - val_calc_mre_K: 0.8683\n", "Epoch 981/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 67.9187 - calc_mre_K: 0.8291 - val_loss: 67.1156 - val_calc_mre_K: 0.8193\n", "Epoch 982/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 67.7819 - calc_mre_K: 0.8274 - val_loss: 67.4253 - val_calc_mre_K: 0.8231\n", "Epoch 983/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 67.5481 - calc_mre_K: 0.8246 - val_loss: 66.6526 - val_calc_mre_K: 0.8136\n", "Epoch 984/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 67.7608 - calc_mre_K: 0.8272 - val_loss: 68.0578 - val_calc_mre_K: 0.8308\n", "Epoch 985/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 67.8712 - calc_mre_K: 0.8285 - val_loss: 69.4957 - val_calc_mre_K: 0.8483\n", "Epoch 986/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 67.5847 - calc_mre_K: 0.8250 - val_loss: 68.7883 - val_calc_mre_K: 0.8397\n", "Epoch 987/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 67.7452 - calc_mre_K: 0.8270 - val_loss: 67.2788 - val_calc_mre_K: 0.8213\n", "Epoch 988/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 67.7752 - calc_mre_K: 0.8273 - val_loss: 72.0770 - val_calc_mre_K: 0.8798\n", "Epoch 989/2000\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "48000/48000 [==============================] - 4s 80us/step - loss: 67.5965 - calc_mre_K: 0.8252 - val_loss: 66.2843 - val_calc_mre_K: 0.8091\n", "Epoch 990/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 67.5231 - calc_mre_K: 0.8243 - val_loss: 70.6849 - val_calc_mre_K: 0.8629\n", "Epoch 991/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 67.7525 - calc_mre_K: 0.8271 - val_loss: 66.9228 - val_calc_mre_K: 0.8169\n", "Epoch 992/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 67.3997 - calc_mre_K: 0.8228 - val_loss: 68.3498 - val_calc_mre_K: 0.8343\n", "Epoch 993/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 67.5936 - calc_mre_K: 0.8251 - val_loss: 66.9790 - val_calc_mre_K: 0.8176\n", "Epoch 994/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 67.6498 - calc_mre_K: 0.8258 - val_loss: 67.6192 - val_calc_mre_K: 0.8254\n", "Epoch 995/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 67.7450 - calc_mre_K: 0.8270 - val_loss: 70.0318 - val_calc_mre_K: 0.8549\n", "Epoch 996/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 67.7506 - calc_mre_K: 0.8270 - val_loss: 67.8847 - val_calc_mre_K: 0.8287\n", "Epoch 997/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 67.3797 - calc_mre_K: 0.8225 - val_loss: 69.7433 - val_calc_mre_K: 0.8514\n", "Epoch 998/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 67.5022 - calc_mre_K: 0.8240 - val_loss: 68.1163 - val_calc_mre_K: 0.8315\n", "Epoch 999/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 67.4806 - calc_mre_K: 0.8237 - val_loss: 68.2471 - val_calc_mre_K: 0.8331\n", "Epoch 1000/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 67.6440 - calc_mre_K: 0.8257 - val_loss: 66.4451 - val_calc_mre_K: 0.8111\n", "Epoch 1001/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 67.6031 - calc_mre_K: 0.8252 - val_loss: 66.0076 - val_calc_mre_K: 0.8058\n", "Epoch 1002/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 67.5437 - calc_mre_K: 0.8245 - val_loss: 70.5696 - val_calc_mre_K: 0.8614\n", "Epoch 1003/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 67.6739 - calc_mre_K: 0.8261 - val_loss: 66.4447 - val_calc_mre_K: 0.8111\n", "Epoch 1004/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 67.4989 - calc_mre_K: 0.8240 - val_loss: 72.0980 - val_calc_mre_K: 0.8801\n", "Epoch 1005/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 67.7013 - calc_mre_K: 0.8264 - val_loss: 70.7337 - val_calc_mre_K: 0.8634\n", "Epoch 1006/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 67.6498 - calc_mre_K: 0.8258 - val_loss: 68.6995 - val_calc_mre_K: 0.8386\n", "Epoch 1007/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 67.6643 - calc_mre_K: 0.8260 - val_loss: 67.2725 - val_calc_mre_K: 0.8212\n", "Epoch 1008/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 67.6260 - calc_mre_K: 0.8255 - val_loss: 71.1402 - val_calc_mre_K: 0.8684\n", "Epoch 1009/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 67.5259 - calc_mre_K: 0.8243 - val_loss: 68.4684 - val_calc_mre_K: 0.8358\n", "Epoch 1010/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 67.5872 - calc_mre_K: 0.8250 - val_loss: 69.3370 - val_calc_mre_K: 0.8464\n", "Epoch 1011/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 67.7464 - calc_mre_K: 0.8270 - val_loss: 68.5440 - val_calc_mre_K: 0.8367\n", "Epoch 1012/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 67.4668 - calc_mre_K: 0.8236 - val_loss: 68.3584 - val_calc_mre_K: 0.8345\n", "Epoch 1013/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 67.6733 - calc_mre_K: 0.8261 - val_loss: 68.4253 - val_calc_mre_K: 0.8353\n", "Epoch 1014/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 67.3764 - calc_mre_K: 0.8225 - val_loss: 67.7302 - val_calc_mre_K: 0.8268\n", "Epoch 1015/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 67.7405 - calc_mre_K: 0.8269 - val_loss: 67.5651 - val_calc_mre_K: 0.8248\n", "Epoch 1016/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 67.6641 - calc_mre_K: 0.8260 - val_loss: 67.6285 - val_calc_mre_K: 0.8255\n", "Epoch 1017/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 67.5604 - calc_mre_K: 0.8247 - val_loss: 70.8526 - val_calc_mre_K: 0.8649\n", "Epoch 1018/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 67.6876 - calc_mre_K: 0.8263 - val_loss: 68.3185 - val_calc_mre_K: 0.8340\n", "Epoch 1019/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 67.3474 - calc_mre_K: 0.8221 - val_loss: 69.4011 - val_calc_mre_K: 0.8472\n", "Epoch 1020/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 67.9744 - calc_mre_K: 0.8298 - val_loss: 68.7794 - val_calc_mre_K: 0.8396\n", "Epoch 1021/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 67.3182 - calc_mre_K: 0.8218 - val_loss: 69.1777 - val_calc_mre_K: 0.8445\n", "Epoch 1022/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 67.5963 - calc_mre_K: 0.8252 - val_loss: 69.4318 - val_calc_mre_K: 0.8476\n", "Epoch 1023/2000\n", "48000/48000 [==============================] - 4s 82us/step - loss: 67.6690 - calc_mre_K: 0.8260 - val_loss: 71.2851 - val_calc_mre_K: 0.8702\n", "Epoch 1024/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 67.5742 - calc_mre_K: 0.8249 - val_loss: 70.1252 - val_calc_mre_K: 0.8560\n", "Epoch 1025/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 67.4627 - calc_mre_K: 0.8235 - val_loss: 66.5297 - val_calc_mre_K: 0.8121\n", "Epoch 1026/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 67.2842 - calc_mre_K: 0.8213 - val_loss: 67.5627 - val_calc_mre_K: 0.8247\n", "Epoch 1027/2000\n", "48000/48000 [==============================] - 4s 82us/step - loss: 67.6127 - calc_mre_K: 0.8254 - val_loss: 66.9251 - val_calc_mre_K: 0.8170\n", "Epoch 1028/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 67.5853 - calc_mre_K: 0.8250 - val_loss: 66.6407 - val_calc_mre_K: 0.8135\n", "Epoch 1029/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 67.6733 - calc_mre_K: 0.8261 - val_loss: 68.7797 - val_calc_mre_K: 0.8396\n", "Epoch 1030/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 67.2432 - calc_mre_K: 0.8208 - val_loss: 68.2905 - val_calc_mre_K: 0.8336\n", "Epoch 1031/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 67.6047 - calc_mre_K: 0.8253 - val_loss: 67.1043 - val_calc_mre_K: 0.8191\n", "Epoch 1032/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 67.5872 - calc_mre_K: 0.8250 - val_loss: 66.5895 - val_calc_mre_K: 0.8129\n", "Epoch 1033/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 67.6015 - calc_mre_K: 0.8252 - val_loss: 67.0269 - val_calc_mre_K: 0.8182\n", "Epoch 1034/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 67.4020 - calc_mre_K: 0.8228 - val_loss: 68.7126 - val_calc_mre_K: 0.8388\n", "Epoch 1035/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 67.4490 - calc_mre_K: 0.8234 - val_loss: 68.1329 - val_calc_mre_K: 0.8317\n", "Epoch 1036/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 67.6669 - calc_mre_K: 0.8260 - val_loss: 67.4104 - val_calc_mre_K: 0.8229\n", "Epoch 1037/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 67.5223 - calc_mre_K: 0.8242 - val_loss: 68.9093 - val_calc_mre_K: 0.8412\n", "Epoch 1038/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 67.4242 - calc_mre_K: 0.8230 - val_loss: 67.5437 - val_calc_mre_K: 0.8245\n", "Epoch 1039/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 67.3448 - calc_mre_K: 0.8221 - val_loss: 66.3807 - val_calc_mre_K: 0.8103\n", "Epoch 1040/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 67.5030 - calc_mre_K: 0.8240 - val_loss: 66.3087 - val_calc_mre_K: 0.8094\n", "Epoch 1041/2000\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "48000/48000 [==============================] - 4s 76us/step - loss: 67.4032 - calc_mre_K: 0.8228 - val_loss: 68.3702 - val_calc_mre_K: 0.8346\n", "Epoch 1042/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 67.6313 - calc_mre_K: 0.8256 - val_loss: 66.1339 - val_calc_mre_K: 0.8073\n", "Epoch 1043/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 67.4623 - calc_mre_K: 0.8235 - val_loss: 67.6003 - val_calc_mre_K: 0.8252\n", "Epoch 1044/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 67.5595 - calc_mre_K: 0.8247 - val_loss: 65.6961 - val_calc_mre_K: 0.8020\n", "Epoch 1045/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 67.4620 - calc_mre_K: 0.8235 - val_loss: 65.6985 - val_calc_mre_K: 0.8020\n", "Epoch 1046/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 67.4367 - calc_mre_K: 0.8232 - val_loss: 66.0648 - val_calc_mre_K: 0.8065\n", "Epoch 1047/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 67.3994 - calc_mre_K: 0.8227 - val_loss: 68.3775 - val_calc_mre_K: 0.8347\n", "Epoch 1048/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 67.3120 - calc_mre_K: 0.8217 - val_loss: 67.7150 - val_calc_mre_K: 0.8266\n", "Epoch 1049/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 67.5634 - calc_mre_K: 0.8247 - val_loss: 66.9150 - val_calc_mre_K: 0.8168\n", "Epoch 1050/2000\n", "48000/48000 [==============================] - 4s 75us/step - loss: 67.1951 - calc_mre_K: 0.8203 - val_loss: 68.7130 - val_calc_mre_K: 0.8388\n", "Epoch 1051/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 67.5524 - calc_mre_K: 0.8246 - val_loss: 67.0152 - val_calc_mre_K: 0.8181\n", "Epoch 1052/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 67.2760 - calc_mre_K: 0.8212 - val_loss: 65.4211 - val_calc_mre_K: 0.7986\n", "Epoch 1053/2000\n", "48000/48000 [==============================] - 4s 75us/step - loss: 67.5767 - calc_mre_K: 0.8249 - val_loss: 70.1853 - val_calc_mre_K: 0.8568\n", "Epoch 1054/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 67.4368 - calc_mre_K: 0.8232 - val_loss: 65.9815 - val_calc_mre_K: 0.8054\n", "Epoch 1055/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 67.1551 - calc_mre_K: 0.8198 - val_loss: 65.5948 - val_calc_mre_K: 0.8007\n", "Epoch 1056/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 67.6877 - calc_mre_K: 0.8263 - val_loss: 66.8150 - val_calc_mre_K: 0.8156\n", "Epoch 1057/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 67.3256 - calc_mre_K: 0.8218 - val_loss: 66.8076 - val_calc_mre_K: 0.8155\n", "Epoch 1058/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 67.5627 - calc_mre_K: 0.8247 - val_loss: 68.0630 - val_calc_mre_K: 0.8308\n", "Epoch 1059/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 67.1376 - calc_mre_K: 0.8196 - val_loss: 66.5018 - val_calc_mre_K: 0.8118\n", "Epoch 1060/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 67.2438 - calc_mre_K: 0.8208 - val_loss: 67.6448 - val_calc_mre_K: 0.8257\n", "Epoch 1061/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 67.6687 - calc_mre_K: 0.8260 - val_loss: 67.4276 - val_calc_mre_K: 0.8231\n", "Epoch 1062/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 67.2260 - calc_mre_K: 0.8206 - val_loss: 68.8454 - val_calc_mre_K: 0.8404\n", "Epoch 1063/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 67.6198 - calc_mre_K: 0.8254 - val_loss: 67.6271 - val_calc_mre_K: 0.8255\n", "Epoch 1064/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 67.5276 - calc_mre_K: 0.8243 - val_loss: 67.1898 - val_calc_mre_K: 0.8202\n", "Epoch 1065/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 67.3391 - calc_mre_K: 0.8220 - val_loss: 69.2875 - val_calc_mre_K: 0.8458\n", "Epoch 1066/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 67.2189 - calc_mre_K: 0.8205 - val_loss: 75.1129 - val_calc_mre_K: 0.9169\n", "Epoch 1067/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 67.4038 - calc_mre_K: 0.8228 - val_loss: 66.6855 - val_calc_mre_K: 0.8140\n", "Epoch 1068/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 67.4373 - calc_mre_K: 0.8232 - val_loss: 67.8245 - val_calc_mre_K: 0.8279\n", "Epoch 1069/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 67.5622 - calc_mre_K: 0.8247 - val_loss: 65.6321 - val_calc_mre_K: 0.8012\n", "Epoch 1070/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 67.1323 - calc_mre_K: 0.8195 - val_loss: 65.7801 - val_calc_mre_K: 0.8030\n", "Epoch 1071/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 67.3278 - calc_mre_K: 0.8219 - val_loss: 69.9318 - val_calc_mre_K: 0.8537\n", "Epoch 1072/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 67.4832 - calc_mre_K: 0.8238 - val_loss: 68.5455 - val_calc_mre_K: 0.8367\n", "Epoch 1073/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 67.3598 - calc_mre_K: 0.8223 - val_loss: 67.1900 - val_calc_mre_K: 0.8202\n", "Epoch 1074/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 67.2220 - calc_mre_K: 0.8206 - val_loss: 67.9513 - val_calc_mre_K: 0.8295\n", "Epoch 1075/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 67.6801 - calc_mre_K: 0.8262 - val_loss: 66.9406 - val_calc_mre_K: 0.8171\n", "Epoch 1076/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 67.3399 - calc_mre_K: 0.8220 - val_loss: 66.6149 - val_calc_mre_K: 0.8132\n", "Epoch 1077/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 67.5155 - calc_mre_K: 0.8242 - val_loss: 65.7000 - val_calc_mre_K: 0.8020\n", "Epoch 1078/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 67.3202 - calc_mre_K: 0.8218 - val_loss: 65.5005 - val_calc_mre_K: 0.7996\n", "Epoch 1079/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 67.2365 - calc_mre_K: 0.8208 - val_loss: 67.0750 - val_calc_mre_K: 0.8188\n", "Epoch 1080/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 67.3987 - calc_mre_K: 0.8227 - val_loss: 67.7141 - val_calc_mre_K: 0.8266\n", "Epoch 1081/2000\n", "48000/48000 [==============================] - 4s 82us/step - loss: 67.5175 - calc_mre_K: 0.8242 - val_loss: 67.2694 - val_calc_mre_K: 0.8212\n", "Epoch 1082/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 67.1353 - calc_mre_K: 0.8195 - val_loss: 66.8004 - val_calc_mre_K: 0.8154\n", "Epoch 1083/2000\n", "48000/48000 [==============================] - 4s 82us/step - loss: 67.2797 - calc_mre_K: 0.8213 - val_loss: 67.5083 - val_calc_mre_K: 0.8241\n", "Epoch 1084/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 67.2802 - calc_mre_K: 0.8213 - val_loss: 66.8981 - val_calc_mre_K: 0.8166\n", "Epoch 1085/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 67.4025 - calc_mre_K: 0.8228 - val_loss: 67.9638 - val_calc_mre_K: 0.8296\n", "Epoch 1086/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 67.2515 - calc_mre_K: 0.8209 - val_loss: 66.4904 - val_calc_mre_K: 0.8117\n", "Epoch 1087/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 67.4598 - calc_mre_K: 0.8235 - val_loss: 68.5822 - val_calc_mre_K: 0.8372\n", "Epoch 1088/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 67.4480 - calc_mre_K: 0.8233 - val_loss: 67.8854 - val_calc_mre_K: 0.8287\n", "Epoch 1089/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 67.1436 - calc_mre_K: 0.8196 - val_loss: 66.5759 - val_calc_mre_K: 0.8127\n", "Epoch 1090/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 67.3318 - calc_mre_K: 0.8219 - val_loss: 67.1040 - val_calc_mre_K: 0.8191\n", "Epoch 1091/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 67.2326 - calc_mre_K: 0.8207 - val_loss: 66.8739 - val_calc_mre_K: 0.8163\n", "Epoch 1092/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 67.3075 - calc_mre_K: 0.8216 - val_loss: 69.3338 - val_calc_mre_K: 0.8464\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Epoch 1093/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 67.5301 - calc_mre_K: 0.8243 - val_loss: 67.7979 - val_calc_mre_K: 0.8276\n", "Epoch 1094/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 67.2745 - calc_mre_K: 0.8212 - val_loss: 68.1297 - val_calc_mre_K: 0.8317\n", "Epoch 1095/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 67.4628 - calc_mre_K: 0.8235 - val_loss: 67.2156 - val_calc_mre_K: 0.8205\n", "Epoch 1096/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 67.4427 - calc_mre_K: 0.8233 - val_loss: 67.8450 - val_calc_mre_K: 0.8282\n", "Epoch 1097/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 67.2937 - calc_mre_K: 0.8215 - val_loss: 66.1064 - val_calc_mre_K: 0.8070\n", "Epoch 1098/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 67.4178 - calc_mre_K: 0.8230 - val_loss: 67.9501 - val_calc_mre_K: 0.8295\n", "Epoch 1099/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 67.3209 - calc_mre_K: 0.8218 - val_loss: 68.1988 - val_calc_mre_K: 0.8325\n", "Epoch 1100/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 67.2682 - calc_mre_K: 0.8211 - val_loss: 66.5438 - val_calc_mre_K: 0.8123\n", "Epoch 1101/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 67.1975 - calc_mre_K: 0.8203 - val_loss: 68.0583 - val_calc_mre_K: 0.8308\n", "Epoch 1102/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 67.3811 - calc_mre_K: 0.8225 - val_loss: 69.4347 - val_calc_mre_K: 0.8476\n", "Epoch 1103/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 67.1944 - calc_mre_K: 0.8202 - val_loss: 67.3947 - val_calc_mre_K: 0.8227\n", "Epoch 1104/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 67.1097 - calc_mre_K: 0.8192 - val_loss: 67.3313 - val_calc_mre_K: 0.8219\n", "Epoch 1105/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 67.2477 - calc_mre_K: 0.8209 - val_loss: 66.5260 - val_calc_mre_K: 0.8121\n", "Epoch 1106/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 67.5524 - calc_mre_K: 0.8246 - val_loss: 67.1836 - val_calc_mre_K: 0.8201\n", "Epoch 1107/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 67.0610 - calc_mre_K: 0.8186 - val_loss: 67.3672 - val_calc_mre_K: 0.8224\n", "Epoch 1108/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 67.1551 - calc_mre_K: 0.8198 - val_loss: 66.4177 - val_calc_mre_K: 0.8108\n", "Epoch 1109/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 67.3086 - calc_mre_K: 0.8216 - val_loss: 66.6825 - val_calc_mre_K: 0.8140\n", "Epoch 1110/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 67.3987 - calc_mre_K: 0.8227 - val_loss: 66.7798 - val_calc_mre_K: 0.8152\n", "Epoch 1111/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 67.3507 - calc_mre_K: 0.8222 - val_loss: 65.7602 - val_calc_mre_K: 0.8027\n", "Epoch 1112/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 67.2625 - calc_mre_K: 0.8211 - val_loss: 64.8811 - val_calc_mre_K: 0.7920\n", "Epoch 1113/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 67.1778 - calc_mre_K: 0.8200 - val_loss: 67.7610 - val_calc_mre_K: 0.8272\n", "Epoch 1114/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 67.4651 - calc_mre_K: 0.8235 - val_loss: 66.1524 - val_calc_mre_K: 0.8075\n", "Epoch 1115/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 67.2547 - calc_mre_K: 0.8210 - val_loss: 67.1414 - val_calc_mre_K: 0.8196\n", "Epoch 1116/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 67.4332 - calc_mre_K: 0.8232 - val_loss: 66.6198 - val_calc_mre_K: 0.8132\n", "Epoch 1117/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 67.3174 - calc_mre_K: 0.8217 - val_loss: 66.1720 - val_calc_mre_K: 0.8078\n", "Epoch 1118/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 66.9865 - calc_mre_K: 0.8177 - val_loss: 68.1750 - val_calc_mre_K: 0.8322\n", "Epoch 1119/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 67.4952 - calc_mre_K: 0.8239 - val_loss: 68.2945 - val_calc_mre_K: 0.8337\n", "Epoch 1120/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 67.3326 - calc_mre_K: 0.8219 - val_loss: 66.5427 - val_calc_mre_K: 0.8123\n", "Epoch 1121/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 67.4338 - calc_mre_K: 0.8232 - val_loss: 66.1111 - val_calc_mre_K: 0.8070\n", "Epoch 1122/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 67.1041 - calc_mre_K: 0.8191 - val_loss: 67.7950 - val_calc_mre_K: 0.8276\n", "Epoch 1123/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 67.3606 - calc_mre_K: 0.8223 - val_loss: 67.9583 - val_calc_mre_K: 0.8296\n", "Epoch 1124/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 67.6731 - calc_mre_K: 0.8261 - val_loss: 67.9758 - val_calc_mre_K: 0.8298\n", "Epoch 1125/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 67.2086 - calc_mre_K: 0.8204 - val_loss: 66.3216 - val_calc_mre_K: 0.8096\n", "Epoch 1126/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 67.3855 - calc_mre_K: 0.8226 - val_loss: 67.6461 - val_calc_mre_K: 0.8258\n", "Epoch 1127/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 67.0891 - calc_mre_K: 0.8190 - val_loss: 68.0385 - val_calc_mre_K: 0.8305\n", "Epoch 1128/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 67.2612 - calc_mre_K: 0.8211 - val_loss: 67.8438 - val_calc_mre_K: 0.8282\n", "Epoch 1129/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 67.0959 - calc_mre_K: 0.8190 - val_loss: 68.6056 - val_calc_mre_K: 0.8375\n", "Epoch 1130/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 67.4558 - calc_mre_K: 0.8234 - val_loss: 67.2454 - val_calc_mre_K: 0.8209\n", "Epoch 1131/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 67.2468 - calc_mre_K: 0.8209 - val_loss: 70.0558 - val_calc_mre_K: 0.8552\n", "Epoch 1132/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 67.4674 - calc_mre_K: 0.8236 - val_loss: 66.1286 - val_calc_mre_K: 0.8072\n", "Epoch 1133/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 67.3190 - calc_mre_K: 0.8218 - val_loss: 67.3382 - val_calc_mre_K: 0.8220\n", "Epoch 1134/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 67.2302 - calc_mre_K: 0.8207 - val_loss: 69.4075 - val_calc_mre_K: 0.8473\n", "Epoch 1135/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 67.2471 - calc_mre_K: 0.8209 - val_loss: 68.4908 - val_calc_mre_K: 0.8361\n", "Epoch 1136/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 67.3336 - calc_mre_K: 0.8219 - val_loss: 67.3154 - val_calc_mre_K: 0.8217\n", "Epoch 1137/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 67.1604 - calc_mre_K: 0.8198 - val_loss: 68.2163 - val_calc_mre_K: 0.8327\n", "Epoch 1138/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 67.0610 - calc_mre_K: 0.8186 - val_loss: 73.2506 - val_calc_mre_K: 0.8942\n", "Epoch 1139/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 67.1327 - calc_mre_K: 0.8195 - val_loss: 68.2917 - val_calc_mre_K: 0.8336\n", "Epoch 1140/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 67.3085 - calc_mre_K: 0.8216 - val_loss: 66.6469 - val_calc_mre_K: 0.8136\n", "Epoch 1141/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 67.2220 - calc_mre_K: 0.8206 - val_loss: 67.9232 - val_calc_mre_K: 0.8291\n", "Epoch 1142/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 67.2056 - calc_mre_K: 0.8204 - val_loss: 67.5110 - val_calc_mre_K: 0.8241\n", "Epoch 1143/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 67.0114 - calc_mre_K: 0.8180 - val_loss: 67.6949 - val_calc_mre_K: 0.8264\n", "Epoch 1144/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 67.2108 - calc_mre_K: 0.8204 - val_loss: 67.9698 - val_calc_mre_K: 0.8297\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Epoch 1145/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 67.1100 - calc_mre_K: 0.8192 - val_loss: 65.2904 - val_calc_mre_K: 0.7970\n", "Epoch 1146/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 67.2173 - calc_mre_K: 0.8205 - val_loss: 68.0711 - val_calc_mre_K: 0.8309\n", "Epoch 1147/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 67.0734 - calc_mre_K: 0.8188 - val_loss: 66.4757 - val_calc_mre_K: 0.8115\n", "Epoch 1148/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 67.2313 - calc_mre_K: 0.8207 - val_loss: 67.5360 - val_calc_mre_K: 0.8244\n", "Epoch 1149/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 67.0195 - calc_mre_K: 0.8181 - val_loss: 65.7226 - val_calc_mre_K: 0.8023\n", "Epoch 1150/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 67.1817 - calc_mre_K: 0.8201 - val_loss: 68.2495 - val_calc_mre_K: 0.8331\n", "Epoch 1151/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 67.3919 - calc_mre_K: 0.8227 - val_loss: 68.7487 - val_calc_mre_K: 0.8392\n", "Epoch 1152/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 67.1870 - calc_mre_K: 0.8202 - val_loss: 65.3313 - val_calc_mre_K: 0.7975\n", "Epoch 1153/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 66.9979 - calc_mre_K: 0.8178 - val_loss: 68.7506 - val_calc_mre_K: 0.8392\n", "Epoch 1154/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 67.1228 - calc_mre_K: 0.8194 - val_loss: 67.3699 - val_calc_mre_K: 0.8224\n", "Epoch 1155/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 67.0722 - calc_mre_K: 0.8188 - val_loss: 67.3831 - val_calc_mre_K: 0.8225\n", "Epoch 1156/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 67.0560 - calc_mre_K: 0.8186 - val_loss: 65.1259 - val_calc_mre_K: 0.7950\n", "Epoch 1157/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 67.2938 - calc_mre_K: 0.8215 - val_loss: 67.0632 - val_calc_mre_K: 0.8186\n", "Epoch 1158/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 67.0846 - calc_mre_K: 0.8189 - val_loss: 68.8679 - val_calc_mre_K: 0.8407\n", "Epoch 1159/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 67.1696 - calc_mre_K: 0.8199 - val_loss: 66.8026 - val_calc_mre_K: 0.8155\n", "Epoch 1160/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 67.2755 - calc_mre_K: 0.8212 - val_loss: 68.4276 - val_calc_mre_K: 0.8353\n", "Epoch 1161/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 67.2938 - calc_mre_K: 0.8215 - val_loss: 67.3498 - val_calc_mre_K: 0.8221\n", "Epoch 1162/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 67.3430 - calc_mre_K: 0.8221 - val_loss: 65.8928 - val_calc_mre_K: 0.8044\n", "Epoch 1163/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 67.3586 - calc_mre_K: 0.8222 - val_loss: 66.9295 - val_calc_mre_K: 0.8170\n", "Epoch 1164/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 67.0444 - calc_mre_K: 0.8184 - val_loss: 66.7872 - val_calc_mre_K: 0.8153\n", "Epoch 1165/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 67.0542 - calc_mre_K: 0.8185 - val_loss: 67.3283 - val_calc_mre_K: 0.8219\n", "Epoch 1166/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 67.1292 - calc_mre_K: 0.8194 - val_loss: 67.8882 - val_calc_mre_K: 0.8287\n", "Epoch 1167/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 67.0412 - calc_mre_K: 0.8184 - val_loss: 67.5080 - val_calc_mre_K: 0.8241\n", "Epoch 1168/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 66.9209 - calc_mre_K: 0.8169 - val_loss: 65.8115 - val_calc_mre_K: 0.8034\n", "Epoch 1169/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 67.1426 - calc_mre_K: 0.8196 - val_loss: 67.4795 - val_calc_mre_K: 0.8237\n", "Epoch 1170/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 66.8840 - calc_mre_K: 0.8165 - val_loss: 66.6537 - val_calc_mre_K: 0.8136\n", "Epoch 1171/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 67.3317 - calc_mre_K: 0.8219 - val_loss: 72.6071 - val_calc_mre_K: 0.8863\n", "Epoch 1172/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 67.3073 - calc_mre_K: 0.8216 - val_loss: 68.6909 - val_calc_mre_K: 0.8385\n", "Epoch 1173/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 67.1280 - calc_mre_K: 0.8194 - val_loss: 67.7336 - val_calc_mre_K: 0.8268\n", "Epoch 1174/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 67.0034 - calc_mre_K: 0.8179 - val_loss: 67.8430 - val_calc_mre_K: 0.8282\n", "Epoch 1175/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 66.9648 - calc_mre_K: 0.8174 - val_loss: 66.4012 - val_calc_mre_K: 0.8106\n", "Epoch 1176/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 67.1254 - calc_mre_K: 0.8194 - val_loss: 68.2420 - val_calc_mre_K: 0.8330\n", "Epoch 1177/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 67.2382 - calc_mre_K: 0.8208 - val_loss: 67.0507 - val_calc_mre_K: 0.8185\n", "Epoch 1178/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 66.9115 - calc_mre_K: 0.8168 - val_loss: 68.5280 - val_calc_mre_K: 0.8365\n", "Epoch 1179/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 67.2510 - calc_mre_K: 0.8209 - val_loss: 66.7187 - val_calc_mre_K: 0.8144\n", "Epoch 1180/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 67.2097 - calc_mre_K: 0.8204 - val_loss: 67.0803 - val_calc_mre_K: 0.8189\n", "Epoch 1181/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 67.2200 - calc_mre_K: 0.8206 - val_loss: 66.8943 - val_calc_mre_K: 0.8166\n", "Epoch 1182/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 67.1381 - calc_mre_K: 0.8196 - val_loss: 66.3781 - val_calc_mre_K: 0.8103\n", "Epoch 1183/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 67.1277 - calc_mre_K: 0.8194 - val_loss: 64.9447 - val_calc_mre_K: 0.7928\n", "Epoch 1184/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 67.0898 - calc_mre_K: 0.8190 - val_loss: 68.1301 - val_calc_mre_K: 0.8317\n", "Epoch 1185/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 66.9836 - calc_mre_K: 0.8177 - val_loss: 72.8976 - val_calc_mre_K: 0.8899\n", "Epoch 1186/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 67.0220 - calc_mre_K: 0.8181 - val_loss: 67.6917 - val_calc_mre_K: 0.8263\n", "Epoch 1187/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 67.0687 - calc_mre_K: 0.8187 - val_loss: 66.1715 - val_calc_mre_K: 0.8078\n", "Epoch 1188/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 67.0308 - calc_mre_K: 0.8182 - val_loss: 68.8821 - val_calc_mre_K: 0.8408\n", "Epoch 1189/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 67.0251 - calc_mre_K: 0.8182 - val_loss: 68.7913 - val_calc_mre_K: 0.8397\n", "Epoch 1190/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 66.8934 - calc_mre_K: 0.8166 - val_loss: 68.0468 - val_calc_mre_K: 0.8306\n", "Epoch 1191/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 67.0643 - calc_mre_K: 0.8187 - val_loss: 68.5316 - val_calc_mre_K: 0.8366\n", "Epoch 1192/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 67.1802 - calc_mre_K: 0.8201 - val_loss: 68.8312 - val_calc_mre_K: 0.8402\n", "Epoch 1193/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 67.2182 - calc_mre_K: 0.8205 - val_loss: 65.3207 - val_calc_mre_K: 0.7974\n", "Epoch 1194/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 67.0549 - calc_mre_K: 0.8185 - val_loss: 67.2561 - val_calc_mre_K: 0.8210\n", "Epoch 1195/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 67.0604 - calc_mre_K: 0.8186 - val_loss: 67.3296 - val_calc_mre_K: 0.8219\n", "Epoch 1196/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 66.9579 - calc_mre_K: 0.8174 - val_loss: 70.8395 - val_calc_mre_K: 0.8647\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Epoch 1197/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 67.2034 - calc_mre_K: 0.8204 - val_loss: 65.6337 - val_calc_mre_K: 0.8012\n", "Epoch 1198/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 66.9329 - calc_mre_K: 0.8171 - val_loss: 68.4546 - val_calc_mre_K: 0.8356\n", "Epoch 1199/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 67.1621 - calc_mre_K: 0.8198 - val_loss: 66.2932 - val_calc_mre_K: 0.8092\n", "Epoch 1200/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 67.0542 - calc_mre_K: 0.8185 - val_loss: 67.4831 - val_calc_mre_K: 0.8238\n", "Epoch 1201/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 66.8999 - calc_mre_K: 0.8166 - val_loss: 67.1780 - val_calc_mre_K: 0.8200\n", "Epoch 1202/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 67.1889 - calc_mre_K: 0.8202 - val_loss: 68.1849 - val_calc_mre_K: 0.8323\n", "Epoch 1203/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 67.1201 - calc_mre_K: 0.8193 - val_loss: 67.0685 - val_calc_mre_K: 0.8187\n", "Epoch 1204/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 67.3170 - calc_mre_K: 0.8217 - val_loss: 66.0005 - val_calc_mre_K: 0.8057\n", "Epoch 1205/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 66.9402 - calc_mre_K: 0.8171 - val_loss: 66.5202 - val_calc_mre_K: 0.8120\n", "Epoch 1206/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 67.1671 - calc_mre_K: 0.8199 - val_loss: 68.4496 - val_calc_mre_K: 0.8356\n", "Epoch 1207/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 66.8171 - calc_mre_K: 0.8156 - val_loss: 67.3340 - val_calc_mre_K: 0.8219\n", "Epoch 1208/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 67.1939 - calc_mre_K: 0.8202 - val_loss: 64.5746 - val_calc_mre_K: 0.7883\n", "Epoch 1209/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 67.0777 - calc_mre_K: 0.8188 - val_loss: 67.7080 - val_calc_mre_K: 0.8265\n", "Epoch 1210/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 66.9613 - calc_mre_K: 0.8174 - val_loss: 67.3595 - val_calc_mre_K: 0.8223\n", "Epoch 1211/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 67.0812 - calc_mre_K: 0.8189 - val_loss: 66.8624 - val_calc_mre_K: 0.8162\n", "Epoch 1212/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 67.0319 - calc_mre_K: 0.8183 - val_loss: 66.3546 - val_calc_mre_K: 0.8100\n", "Epoch 1213/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 66.9748 - calc_mre_K: 0.8176 - val_loss: 66.5033 - val_calc_mre_K: 0.8118\n", "Epoch 1214/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 66.9097 - calc_mre_K: 0.8168 - val_loss: 66.1186 - val_calc_mre_K: 0.8071\n", "Epoch 1215/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 67.0574 - calc_mre_K: 0.8186 - val_loss: 68.5486 - val_calc_mre_K: 0.8368\n", "Epoch 1216/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 67.0509 - calc_mre_K: 0.8185 - val_loss: 65.3462 - val_calc_mre_K: 0.7977\n", "Epoch 1217/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 66.9935 - calc_mre_K: 0.8178 - val_loss: 67.6239 - val_calc_mre_K: 0.8255\n", "Epoch 1218/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 66.8333 - calc_mre_K: 0.8158 - val_loss: 66.9050 - val_calc_mre_K: 0.8167\n", "Epoch 1219/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 67.0370 - calc_mre_K: 0.8183 - val_loss: 68.1290 - val_calc_mre_K: 0.8317\n", "Epoch 1220/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 67.2233 - calc_mre_K: 0.8206 - val_loss: 65.8770 - val_calc_mre_K: 0.8042\n", "Epoch 1221/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 67.0893 - calc_mre_K: 0.8190 - val_loss: 67.8113 - val_calc_mre_K: 0.8278\n", "Epoch 1222/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 67.2276 - calc_mre_K: 0.8206 - val_loss: 66.5851 - val_calc_mre_K: 0.8128\n", "Epoch 1223/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 67.0024 - calc_mre_K: 0.8179 - val_loss: 65.8557 - val_calc_mre_K: 0.8039\n", "Epoch 1224/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 67.0728 - calc_mre_K: 0.8188 - val_loss: 66.5429 - val_calc_mre_K: 0.8123\n", "Epoch 1225/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 66.9693 - calc_mre_K: 0.8175 - val_loss: 68.6277 - val_calc_mre_K: 0.8377\n", "Epoch 1226/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 67.1008 - calc_mre_K: 0.8191 - val_loss: 68.6684 - val_calc_mre_K: 0.8382\n", "Epoch 1227/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 66.7936 - calc_mre_K: 0.8154 - val_loss: 66.5039 - val_calc_mre_K: 0.8118\n", "Epoch 1228/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 66.9754 - calc_mre_K: 0.8176 - val_loss: 68.6433 - val_calc_mre_K: 0.8379\n", "Epoch 1229/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 66.9366 - calc_mre_K: 0.8171 - val_loss: 72.2512 - val_calc_mre_K: 0.8820\n", "Epoch 1230/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 66.9638 - calc_mre_K: 0.8174 - val_loss: 67.0177 - val_calc_mre_K: 0.8181\n", "Epoch 1231/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 67.0282 - calc_mre_K: 0.8182 - val_loss: 66.4235 - val_calc_mre_K: 0.8108\n", "Epoch 1232/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 67.0659 - calc_mre_K: 0.8187 - val_loss: 65.6812 - val_calc_mre_K: 0.8018\n", "Epoch 1233/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 66.9067 - calc_mre_K: 0.8167 - val_loss: 66.2427 - val_calc_mre_K: 0.8086\n", "Epoch 1234/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 67.0749 - calc_mre_K: 0.8188 - val_loss: 66.4634 - val_calc_mre_K: 0.8113\n", "Epoch 1235/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 66.8880 - calc_mre_K: 0.8165 - val_loss: 65.9785 - val_calc_mre_K: 0.8054\n", "Epoch 1236/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 67.0849 - calc_mre_K: 0.8189 - val_loss: 65.9830 - val_calc_mre_K: 0.8055\n", "Epoch 1237/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 67.0633 - calc_mre_K: 0.8186 - val_loss: 66.9359 - val_calc_mre_K: 0.8171\n", "Epoch 1238/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 66.8685 - calc_mre_K: 0.8163 - val_loss: 64.7992 - val_calc_mre_K: 0.7910\n", "Epoch 1239/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 66.9350 - calc_mre_K: 0.8171 - val_loss: 69.3186 - val_calc_mre_K: 0.8462\n", "Epoch 1240/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 67.1319 - calc_mre_K: 0.8195 - val_loss: 66.8671 - val_calc_mre_K: 0.8162\n", "Epoch 1241/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 66.9021 - calc_mre_K: 0.8167 - val_loss: 66.8799 - val_calc_mre_K: 0.8164\n", "Epoch 1242/2000\n", "48000/48000 [==============================] - 4s 75us/step - loss: 66.9720 - calc_mre_K: 0.8175 - val_loss: 66.6797 - val_calc_mre_K: 0.8140\n", "Epoch 1243/2000\n", "48000/48000 [==============================] - 4s 75us/step - loss: 66.8624 - calc_mre_K: 0.8162 - val_loss: 65.8367 - val_calc_mre_K: 0.8037\n", "Epoch 1244/2000\n", "48000/48000 [==============================] - 4s 75us/step - loss: 66.9116 - calc_mre_K: 0.8168 - val_loss: 65.1227 - val_calc_mre_K: 0.7950\n", "Epoch 1245/2000\n", "48000/48000 [==============================] - 4s 75us/step - loss: 66.8239 - calc_mre_K: 0.8157 - val_loss: 66.5039 - val_calc_mre_K: 0.8118\n", "Epoch 1246/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 66.9591 - calc_mre_K: 0.8174 - val_loss: 65.7928 - val_calc_mre_K: 0.8031\n", "Epoch 1247/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 66.9799 - calc_mre_K: 0.8176 - val_loss: 69.4394 - val_calc_mre_K: 0.8476\n", "Epoch 1248/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 67.0021 - calc_mre_K: 0.8179 - val_loss: 69.9463 - val_calc_mre_K: 0.8538\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Epoch 1249/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 66.6394 - calc_mre_K: 0.8135 - val_loss: 66.1013 - val_calc_mre_K: 0.8069\n", "Epoch 1250/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 66.8997 - calc_mre_K: 0.8166 - val_loss: 68.2163 - val_calc_mre_K: 0.8327\n", "Epoch 1251/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 66.9640 - calc_mre_K: 0.8174 - val_loss: 67.1122 - val_calc_mre_K: 0.8192\n", "Epoch 1252/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 66.8539 - calc_mre_K: 0.8161 - val_loss: 66.5945 - val_calc_mre_K: 0.8129\n", "Epoch 1253/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 67.1452 - calc_mre_K: 0.8196 - val_loss: 67.7060 - val_calc_mre_K: 0.8265\n", "Epoch 1254/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 66.7950 - calc_mre_K: 0.8154 - val_loss: 65.4073 - val_calc_mre_K: 0.7984\n", "Epoch 1255/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 66.9543 - calc_mre_K: 0.8173 - val_loss: 66.9491 - val_calc_mre_K: 0.8172\n", "Epoch 1256/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 66.9628 - calc_mre_K: 0.8174 - val_loss: 65.7127 - val_calc_mre_K: 0.8022\n", "Epoch 1257/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 66.8583 - calc_mre_K: 0.8161 - val_loss: 66.5648 - val_calc_mre_K: 0.8126\n", "Epoch 1258/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 66.9595 - calc_mre_K: 0.8174 - val_loss: 67.8017 - val_calc_mre_K: 0.8277\n", "Epoch 1259/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 66.6699 - calc_mre_K: 0.8138 - val_loss: 65.1192 - val_calc_mre_K: 0.7949\n", "Epoch 1260/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 67.0149 - calc_mre_K: 0.8181 - val_loss: 68.3368 - val_calc_mre_K: 0.8342\n", "Epoch 1261/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 66.9471 - calc_mre_K: 0.8172 - val_loss: 65.5883 - val_calc_mre_K: 0.8006\n", "Epoch 1262/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 66.8542 - calc_mre_K: 0.8161 - val_loss: 70.4962 - val_calc_mre_K: 0.8605\n", "Epoch 1263/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 66.7842 - calc_mre_K: 0.8152 - val_loss: 71.2910 - val_calc_mre_K: 0.8703\n", "Epoch 1264/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 66.6557 - calc_mre_K: 0.8137 - val_loss: 65.8013 - val_calc_mre_K: 0.8032\n", "Epoch 1265/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 66.9317 - calc_mre_K: 0.8170 - val_loss: 67.1018 - val_calc_mre_K: 0.8191\n", "Epoch 1266/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 66.9127 - calc_mre_K: 0.8168 - val_loss: 73.6391 - val_calc_mre_K: 0.8989\n", "Epoch 1267/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 66.7125 - calc_mre_K: 0.8144 - val_loss: 69.5096 - val_calc_mre_K: 0.8485\n", "Epoch 1268/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 66.8716 - calc_mre_K: 0.8163 - val_loss: 67.4332 - val_calc_mre_K: 0.8232\n", "Epoch 1269/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 66.7746 - calc_mre_K: 0.8151 - val_loss: 65.9196 - val_calc_mre_K: 0.8047\n", "Epoch 1270/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 66.7511 - calc_mre_K: 0.8148 - val_loss: 68.8058 - val_calc_mre_K: 0.8399\n", "Epoch 1271/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 67.0005 - calc_mre_K: 0.8179 - val_loss: 66.8817 - val_calc_mre_K: 0.8164\n", "Epoch 1272/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 67.0025 - calc_mre_K: 0.8179 - val_loss: 67.7800 - val_calc_mre_K: 0.8274\n", "Epoch 1273/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 66.8805 - calc_mre_K: 0.8164 - val_loss: 66.6674 - val_calc_mre_K: 0.8138\n", "Epoch 1274/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 66.7006 - calc_mre_K: 0.8142 - val_loss: 67.0935 - val_calc_mre_K: 0.8190\n", "Epoch 1275/2000\n", "48000/48000 [==============================] - 4s 75us/step - loss: 66.7424 - calc_mre_K: 0.8147 - val_loss: 66.8621 - val_calc_mre_K: 0.8162\n", "Epoch 1276/2000\n", "48000/48000 [==============================] - 4s 75us/step - loss: 66.8748 - calc_mre_K: 0.8163 - val_loss: 65.7524 - val_calc_mre_K: 0.8026\n", "Epoch 1277/2000\n", "48000/48000 [==============================] - 4s 75us/step - loss: 66.6278 - calc_mre_K: 0.8133 - val_loss: 70.5689 - val_calc_mre_K: 0.8614\n", "Epoch 1278/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 66.7505 - calc_mre_K: 0.8148 - val_loss: 72.4845 - val_calc_mre_K: 0.8848\n", "Epoch 1279/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 66.7876 - calc_mre_K: 0.8153 - val_loss: 67.3176 - val_calc_mre_K: 0.8217\n", "Epoch 1280/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 66.7596 - calc_mre_K: 0.8149 - val_loss: 67.8707 - val_calc_mre_K: 0.8285\n", "Epoch 1281/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 66.9595 - calc_mre_K: 0.8174 - val_loss: 67.6530 - val_calc_mre_K: 0.8258\n", "Epoch 1282/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 66.9276 - calc_mre_K: 0.8170 - val_loss: 69.5011 - val_calc_mre_K: 0.8484\n", "Epoch 1283/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 67.0192 - calc_mre_K: 0.8181 - val_loss: 67.1927 - val_calc_mre_K: 0.8202\n", "Epoch 1284/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 66.7682 - calc_mre_K: 0.8150 - val_loss: 65.8415 - val_calc_mre_K: 0.8037\n", "Epoch 1285/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 66.7604 - calc_mre_K: 0.8149 - val_loss: 64.8401 - val_calc_mre_K: 0.7915\n", "Epoch 1286/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 67.1399 - calc_mre_K: 0.8196 - val_loss: 66.4564 - val_calc_mre_K: 0.8112\n", "Epoch 1287/2000\n", "48000/48000 [==============================] - 4s 75us/step - loss: 66.7304 - calc_mre_K: 0.8146 - val_loss: 67.0800 - val_calc_mre_K: 0.8188\n", "Epoch 1288/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 66.7167 - calc_mre_K: 0.8144 - val_loss: 65.4841 - val_calc_mre_K: 0.7994\n", "Epoch 1289/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 67.0393 - calc_mre_K: 0.8184 - val_loss: 66.2474 - val_calc_mre_K: 0.8087\n", "Epoch 1290/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 66.8456 - calc_mre_K: 0.8160 - val_loss: 66.7352 - val_calc_mre_K: 0.8146\n", "Epoch 1291/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 66.7636 - calc_mre_K: 0.8150 - val_loss: 65.9599 - val_calc_mre_K: 0.8052\n", "Epoch 1292/2000\n", "48000/48000 [==============================] - 4s 82us/step - loss: 66.8491 - calc_mre_K: 0.8160 - val_loss: 66.7092 - val_calc_mre_K: 0.8143\n", "Epoch 1293/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 66.6281 - calc_mre_K: 0.8133 - val_loss: 68.2102 - val_calc_mre_K: 0.8326\n", "Epoch 1294/2000\n", "48000/48000 [==============================] - 4s 82us/step - loss: 66.9106 - calc_mre_K: 0.8168 - val_loss: 65.6170 - val_calc_mre_K: 0.8010\n", "Epoch 1295/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 66.6458 - calc_mre_K: 0.8135 - val_loss: 67.1676 - val_calc_mre_K: 0.8199\n", "Epoch 1296/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 66.8878 - calc_mre_K: 0.8165 - val_loss: 64.7068 - val_calc_mre_K: 0.7899\n", "Epoch 1297/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 66.5937 - calc_mre_K: 0.8129 - val_loss: 68.1126 - val_calc_mre_K: 0.8315\n", "Epoch 1298/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 66.6354 - calc_mre_K: 0.8134 - val_loss: 65.9188 - val_calc_mre_K: 0.8047\n", "Epoch 1299/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 66.8783 - calc_mre_K: 0.8164 - val_loss: 66.5052 - val_calc_mre_K: 0.8118\n", "Epoch 1300/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 66.6902 - calc_mre_K: 0.8141 - val_loss: 64.8742 - val_calc_mre_K: 0.7919\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Epoch 1301/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 66.8051 - calc_mre_K: 0.8155 - val_loss: 67.8380 - val_calc_mre_K: 0.8281\n", "Epoch 1302/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 66.7742 - calc_mre_K: 0.8151 - val_loss: 67.1492 - val_calc_mre_K: 0.8197\n", "Epoch 1303/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 66.6116 - calc_mre_K: 0.8131 - val_loss: 66.6561 - val_calc_mre_K: 0.8137\n", "Epoch 1304/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 66.7486 - calc_mre_K: 0.8148 - val_loss: 68.4838 - val_calc_mre_K: 0.8360\n", "Epoch 1305/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 66.5684 - calc_mre_K: 0.8126 - val_loss: 66.0277 - val_calc_mre_K: 0.8060\n", "Epoch 1306/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 66.7393 - calc_mre_K: 0.8147 - val_loss: 65.1306 - val_calc_mre_K: 0.7951\n", "Epoch 1307/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 66.6311 - calc_mre_K: 0.8134 - val_loss: 66.5372 - val_calc_mre_K: 0.8122\n", "Epoch 1308/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 67.0055 - calc_mre_K: 0.8179 - val_loss: 66.9699 - val_calc_mre_K: 0.8175\n", "Epoch 1309/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 66.6657 - calc_mre_K: 0.8138 - val_loss: 68.0434 - val_calc_mre_K: 0.8306\n", "Epoch 1310/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 66.8747 - calc_mre_K: 0.8163 - val_loss: 68.0884 - val_calc_mre_K: 0.8312\n", "Epoch 1311/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 66.6959 - calc_mre_K: 0.8142 - val_loss: 68.0736 - val_calc_mre_K: 0.8310\n", "Epoch 1312/2000\n", "48000/48000 [==============================] - 4s 82us/step - loss: 66.7183 - calc_mre_K: 0.8144 - val_loss: 66.4484 - val_calc_mre_K: 0.8111\n", "Epoch 1313/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 66.6960 - calc_mre_K: 0.8142 - val_loss: 68.5008 - val_calc_mre_K: 0.8362\n", "Epoch 1314/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 66.6879 - calc_mre_K: 0.8141 - val_loss: 66.2120 - val_calc_mre_K: 0.8083\n", "Epoch 1315/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 66.8927 - calc_mre_K: 0.8166 - val_loss: 66.9920 - val_calc_mre_K: 0.8178\n", "Epoch 1316/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 66.6404 - calc_mre_K: 0.8135 - val_loss: 67.3888 - val_calc_mre_K: 0.8226\n", "Epoch 1317/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 66.4195 - calc_mre_K: 0.8108 - val_loss: 68.5678 - val_calc_mre_K: 0.8370\n", "Epoch 1318/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 66.8820 - calc_mre_K: 0.8164 - val_loss: 65.5167 - val_calc_mre_K: 0.7998\n", "Epoch 1319/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 66.8882 - calc_mre_K: 0.8165 - val_loss: 66.0377 - val_calc_mre_K: 0.8061\n", "Epoch 1320/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 66.8007 - calc_mre_K: 0.8154 - val_loss: 67.8108 - val_calc_mre_K: 0.8278\n", "Epoch 1321/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 66.6121 - calc_mre_K: 0.8131 - val_loss: 67.7918 - val_calc_mre_K: 0.8275\n", "Epoch 1322/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 66.6991 - calc_mre_K: 0.8142 - val_loss: 67.3422 - val_calc_mre_K: 0.8220\n", "Epoch 1323/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 66.8861 - calc_mre_K: 0.8165 - val_loss: 67.2428 - val_calc_mre_K: 0.8208\n", "Epoch 1324/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 66.8437 - calc_mre_K: 0.8160 - val_loss: 67.0621 - val_calc_mre_K: 0.8186\n", "Epoch 1325/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 66.8728 - calc_mre_K: 0.8163 - val_loss: 65.7074 - val_calc_mre_K: 0.8021\n", "Epoch 1326/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 66.5818 - calc_mre_K: 0.8128 - val_loss: 69.3537 - val_calc_mre_K: 0.8466\n", "Epoch 1327/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 66.7257 - calc_mre_K: 0.8145 - val_loss: 66.2604 - val_calc_mre_K: 0.8088\n", "Epoch 1328/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 66.8462 - calc_mre_K: 0.8160 - val_loss: 66.5156 - val_calc_mre_K: 0.8120\n", "Epoch 1329/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 66.5041 - calc_mre_K: 0.8118 - val_loss: 78.6033 - val_calc_mre_K: 0.9595\n", "Epoch 1330/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 66.7126 - calc_mre_K: 0.8144 - val_loss: 65.8893 - val_calc_mre_K: 0.8043\n", "Epoch 1331/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 66.7545 - calc_mre_K: 0.8149 - val_loss: 66.8210 - val_calc_mre_K: 0.8157\n", "Epoch 1332/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 66.7490 - calc_mre_K: 0.8148 - val_loss: 68.5574 - val_calc_mre_K: 0.8369\n", "Epoch 1333/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 66.7302 - calc_mre_K: 0.8146 - val_loss: 66.5090 - val_calc_mre_K: 0.8119\n", "Epoch 1334/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 66.6012 - calc_mre_K: 0.8130 - val_loss: 64.9395 - val_calc_mre_K: 0.7927\n", "Epoch 1335/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 66.6602 - calc_mre_K: 0.8137 - val_loss: 70.3978 - val_calc_mre_K: 0.8593\n", "Epoch 1336/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 66.8355 - calc_mre_K: 0.8159 - val_loss: 65.3081 - val_calc_mre_K: 0.7972\n", "Epoch 1337/2000\n", "48000/48000 [==============================] - 4s 82us/step - loss: 66.8764 - calc_mre_K: 0.8164 - val_loss: 66.4289 - val_calc_mre_K: 0.8109\n", "Epoch 1338/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 66.6360 - calc_mre_K: 0.8134 - val_loss: 65.7571 - val_calc_mre_K: 0.8027\n", "Epoch 1339/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 66.7758 - calc_mre_K: 0.8151 - val_loss: 66.4945 - val_calc_mre_K: 0.8117\n", "Epoch 1340/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 66.5471 - calc_mre_K: 0.8123 - val_loss: 65.7821 - val_calc_mre_K: 0.8030\n", "Epoch 1341/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 66.7924 - calc_mre_K: 0.8153 - val_loss: 67.6182 - val_calc_mre_K: 0.8254\n", "Epoch 1342/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 66.5994 - calc_mre_K: 0.8130 - val_loss: 66.4805 - val_calc_mre_K: 0.8115\n", "Epoch 1343/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 66.5363 - calc_mre_K: 0.8122 - val_loss: 66.3970 - val_calc_mre_K: 0.8105\n", "Epoch 1344/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 66.6083 - calc_mre_K: 0.8131 - val_loss: 65.8249 - val_calc_mre_K: 0.8035\n", "Epoch 1345/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 66.7174 - calc_mre_K: 0.8144 - val_loss: 67.1246 - val_calc_mre_K: 0.8194\n", "Epoch 1346/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 66.6649 - calc_mre_K: 0.8138 - val_loss: 67.3678 - val_calc_mre_K: 0.8224\n", "Epoch 1347/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 66.5000 - calc_mre_K: 0.8118 - val_loss: 68.2923 - val_calc_mre_K: 0.8336\n", "Epoch 1348/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 66.6266 - calc_mre_K: 0.8133 - val_loss: 66.9277 - val_calc_mre_K: 0.8170\n", "Epoch 1349/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 66.8541 - calc_mre_K: 0.8161 - val_loss: 68.1382 - val_calc_mre_K: 0.8318\n", "Epoch 1350/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 66.7522 - calc_mre_K: 0.8148 - val_loss: 66.6739 - val_calc_mre_K: 0.8139\n", "Epoch 1351/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 66.7619 - calc_mre_K: 0.8150 - val_loss: 66.0662 - val_calc_mre_K: 0.8065\n", "Epoch 1352/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 66.6585 - calc_mre_K: 0.8137 - val_loss: 72.0533 - val_calc_mre_K: 0.8796\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Epoch 1353/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 66.4322 - calc_mre_K: 0.8109 - val_loss: 67.5517 - val_calc_mre_K: 0.8246\n", "Epoch 1354/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 66.6421 - calc_mre_K: 0.8135 - val_loss: 67.1829 - val_calc_mre_K: 0.8201\n", "Epoch 1355/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 66.6773 - calc_mre_K: 0.8139 - val_loss: 67.4489 - val_calc_mre_K: 0.8234\n", "Epoch 1356/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 66.5499 - calc_mre_K: 0.8124 - val_loss: 66.8729 - val_calc_mre_K: 0.8163\n", "Epoch 1357/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 66.7631 - calc_mre_K: 0.8150 - val_loss: 66.1495 - val_calc_mre_K: 0.8075\n", "Epoch 1358/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 66.5862 - calc_mre_K: 0.8128 - val_loss: 66.7658 - val_calc_mre_K: 0.8150\n", "Epoch 1359/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 66.8177 - calc_mre_K: 0.8156 - val_loss: 74.1742 - val_calc_mre_K: 0.9054\n", "Epoch 1360/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 66.6452 - calc_mre_K: 0.8135 - val_loss: 68.7566 - val_calc_mre_K: 0.8393\n", "Epoch 1361/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 66.7084 - calc_mre_K: 0.8143 - val_loss: 69.5116 - val_calc_mre_K: 0.8485\n", "Epoch 1362/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 66.4605 - calc_mre_K: 0.8113 - val_loss: 65.3890 - val_calc_mre_K: 0.7982\n", "Epoch 1363/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 66.5510 - calc_mre_K: 0.8124 - val_loss: 68.1440 - val_calc_mre_K: 0.8318\n", "Epoch 1364/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 66.4481 - calc_mre_K: 0.8111 - val_loss: 66.1429 - val_calc_mre_K: 0.8074\n", "Epoch 1365/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 66.6884 - calc_mre_K: 0.8141 - val_loss: 65.5591 - val_calc_mre_K: 0.8003\n", "Epoch 1366/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 66.6794 - calc_mre_K: 0.8140 - val_loss: 71.4250 - val_calc_mre_K: 0.8719\n", "Epoch 1367/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 66.7876 - calc_mre_K: 0.8153 - val_loss: 66.3702 - val_calc_mre_K: 0.8102\n", "Epoch 1368/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 66.5752 - calc_mre_K: 0.8127 - val_loss: 67.7411 - val_calc_mre_K: 0.8269\n", "Epoch 1369/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 66.5869 - calc_mre_K: 0.8128 - val_loss: 68.8481 - val_calc_mre_K: 0.8404\n", "Epoch 1370/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 66.6966 - calc_mre_K: 0.8142 - val_loss: 67.3434 - val_calc_mre_K: 0.8221\n", "Epoch 1371/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 66.4800 - calc_mre_K: 0.8115 - val_loss: 67.0946 - val_calc_mre_K: 0.8190\n", "Epoch 1372/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 66.4668 - calc_mre_K: 0.8114 - val_loss: 67.7968 - val_calc_mre_K: 0.8276\n", "Epoch 1373/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 66.8017 - calc_mre_K: 0.8155 - val_loss: 68.0369 - val_calc_mre_K: 0.8305\n", "Epoch 1374/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 66.4162 - calc_mre_K: 0.8107 - val_loss: 68.3010 - val_calc_mre_K: 0.8338\n", "Epoch 1375/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 66.7758 - calc_mre_K: 0.8151 - val_loss: 65.7211 - val_calc_mre_K: 0.8023\n", "Epoch 1376/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 66.5848 - calc_mre_K: 0.8128 - val_loss: 66.3219 - val_calc_mre_K: 0.8096\n", "Epoch 1377/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 66.5163 - calc_mre_K: 0.8120 - val_loss: 66.0837 - val_calc_mre_K: 0.8067\n", "Epoch 1378/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 66.5846 - calc_mre_K: 0.8128 - val_loss: 67.4038 - val_calc_mre_K: 0.8228\n", "Epoch 1379/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 66.5046 - calc_mre_K: 0.8118 - val_loss: 66.3288 - val_calc_mre_K: 0.8097\n", "Epoch 1380/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 66.5992 - calc_mre_K: 0.8130 - val_loss: 65.8766 - val_calc_mre_K: 0.8042\n", "Epoch 1381/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 66.4990 - calc_mre_K: 0.8118 - val_loss: 68.8568 - val_calc_mre_K: 0.8405\n", "Epoch 1382/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 66.5716 - calc_mre_K: 0.8126 - val_loss: 66.7496 - val_calc_mre_K: 0.8148\n", "Epoch 1383/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 66.5193 - calc_mre_K: 0.8120 - val_loss: 67.6358 - val_calc_mre_K: 0.8256\n", "Epoch 1384/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 66.6352 - calc_mre_K: 0.8134 - val_loss: 65.1427 - val_calc_mre_K: 0.7952\n", "Epoch 1385/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 66.6909 - calc_mre_K: 0.8141 - val_loss: 67.7302 - val_calc_mre_K: 0.8268\n", "Epoch 1386/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 66.4075 - calc_mre_K: 0.8106 - val_loss: 66.2106 - val_calc_mre_K: 0.8082\n", "Epoch 1387/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 66.4914 - calc_mre_K: 0.8117 - val_loss: 65.0554 - val_calc_mre_K: 0.7941\n", "Epoch 1388/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 66.7246 - calc_mre_K: 0.8145 - val_loss: 67.0972 - val_calc_mre_K: 0.8191\n", "Epoch 1389/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 66.5587 - calc_mre_K: 0.8125 - val_loss: 66.3552 - val_calc_mre_K: 0.8100\n", "Epoch 1390/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 66.2253 - calc_mre_K: 0.8084 - val_loss: 67.2468 - val_calc_mre_K: 0.8209\n", "Epoch 1391/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 66.3848 - calc_mre_K: 0.8104 - val_loss: 68.5289 - val_calc_mre_K: 0.8365\n", "Epoch 1392/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 66.6303 - calc_mre_K: 0.8134 - val_loss: 66.0652 - val_calc_mre_K: 0.8065\n", "Epoch 1393/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 66.6131 - calc_mre_K: 0.8131 - val_loss: 65.6055 - val_calc_mre_K: 0.8008\n", "Epoch 1394/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 66.3918 - calc_mre_K: 0.8104 - val_loss: 68.0019 - val_calc_mre_K: 0.8301\n", "Epoch 1395/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 66.7194 - calc_mre_K: 0.8144 - val_loss: 66.5499 - val_calc_mre_K: 0.8124\n", "Epoch 1396/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 66.4144 - calc_mre_K: 0.8107 - val_loss: 66.7075 - val_calc_mre_K: 0.8143\n", "Epoch 1397/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 66.5205 - calc_mre_K: 0.8120 - val_loss: 65.5341 - val_calc_mre_K: 0.8000\n", "Epoch 1398/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 66.4995 - calc_mre_K: 0.8118 - val_loss: 67.5129 - val_calc_mre_K: 0.8241\n", "Epoch 1399/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 66.5915 - calc_mre_K: 0.8129 - val_loss: 64.9512 - val_calc_mre_K: 0.7929\n", "Epoch 1400/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 66.5076 - calc_mre_K: 0.8119 - val_loss: 66.8320 - val_calc_mre_K: 0.8158\n", "Epoch 1401/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 66.3684 - calc_mre_K: 0.8102 - val_loss: 67.4835 - val_calc_mre_K: 0.8238\n", "Epoch 1402/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 66.4317 - calc_mre_K: 0.8109 - val_loss: 66.1969 - val_calc_mre_K: 0.8081\n", "Epoch 1403/2000\n", "48000/48000 [==============================] - 4s 75us/step - loss: 66.4096 - calc_mre_K: 0.8107 - val_loss: 68.4543 - val_calc_mre_K: 0.8356\n", "Epoch 1404/2000\n", "48000/48000 [==============================] - 4s 75us/step - loss: 66.3983 - calc_mre_K: 0.8105 - val_loss: 65.8401 - val_calc_mre_K: 0.8037\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Epoch 1405/2000\n", "48000/48000 [==============================] - 4s 75us/step - loss: 66.3768 - calc_mre_K: 0.8103 - val_loss: 67.1074 - val_calc_mre_K: 0.8192\n", "Epoch 1406/2000\n", "48000/48000 [==============================] - 4s 75us/step - loss: 66.4997 - calc_mre_K: 0.8118 - val_loss: 65.9640 - val_calc_mre_K: 0.8052\n", "Epoch 1407/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 66.5525 - calc_mre_K: 0.8124 - val_loss: 66.9651 - val_calc_mre_K: 0.8174\n", "Epoch 1408/2000\n", "48000/48000 [==============================] - 4s 74us/step - loss: 66.4859 - calc_mre_K: 0.8116 - val_loss: 67.9303 - val_calc_mre_K: 0.8292\n", "Epoch 1409/2000\n", "48000/48000 [==============================] - 4s 75us/step - loss: 66.5773 - calc_mre_K: 0.8127 - val_loss: 67.0743 - val_calc_mre_K: 0.8188\n", "Epoch 1410/2000\n", "48000/48000 [==============================] - 4s 74us/step - loss: 66.2882 - calc_mre_K: 0.8092 - val_loss: 67.7836 - val_calc_mre_K: 0.8274\n", "Epoch 1411/2000\n", "48000/48000 [==============================] - 4s 74us/step - loss: 66.4003 - calc_mre_K: 0.8106 - val_loss: 66.1127 - val_calc_mre_K: 0.8070\n", "Epoch 1412/2000\n", "48000/48000 [==============================] - 4s 73us/step - loss: 66.5353 - calc_mre_K: 0.8122 - val_loss: 67.0680 - val_calc_mre_K: 0.8187\n", "Epoch 1413/2000\n", "48000/48000 [==============================] - 4s 74us/step - loss: 66.2982 - calc_mre_K: 0.8093 - val_loss: 65.9662 - val_calc_mre_K: 0.8053\n", "Epoch 1414/2000\n", "48000/48000 [==============================] - 4s 74us/step - loss: 66.5481 - calc_mre_K: 0.8124 - val_loss: 66.8193 - val_calc_mre_K: 0.8157\n", "Epoch 1415/2000\n", "48000/48000 [==============================] - 4s 74us/step - loss: 66.5370 - calc_mre_K: 0.8122 - val_loss: 65.3880 - val_calc_mre_K: 0.7982\n", "Epoch 1416/2000\n", "48000/48000 [==============================] - 4s 74us/step - loss: 66.3742 - calc_mre_K: 0.8102 - val_loss: 67.0809 - val_calc_mre_K: 0.8189\n", "Epoch 1417/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 66.2498 - calc_mre_K: 0.8087 - val_loss: 66.1708 - val_calc_mre_K: 0.8077\n", "Epoch 1418/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 66.4653 - calc_mre_K: 0.8113 - val_loss: 65.6436 - val_calc_mre_K: 0.8013\n", "Epoch 1419/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 66.4601 - calc_mre_K: 0.8113 - val_loss: 65.3757 - val_calc_mre_K: 0.7980\n", "Epoch 1420/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 66.2788 - calc_mre_K: 0.8091 - val_loss: 66.5648 - val_calc_mre_K: 0.8126\n", "Epoch 1421/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 66.4519 - calc_mre_K: 0.8112 - val_loss: 66.6283 - val_calc_mre_K: 0.8133\n", "Epoch 1422/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 66.2595 - calc_mre_K: 0.8088 - val_loss: 66.7540 - val_calc_mre_K: 0.8149\n", "Epoch 1423/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 66.4910 - calc_mre_K: 0.8117 - val_loss: 66.5472 - val_calc_mre_K: 0.8123\n", "Epoch 1424/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 66.3291 - calc_mre_K: 0.8097 - val_loss: 64.2405 - val_calc_mre_K: 0.7842\n", "Epoch 1425/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 66.2275 - calc_mre_K: 0.8084 - val_loss: 68.5160 - val_calc_mre_K: 0.8364\n", "Epoch 1426/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 66.4816 - calc_mre_K: 0.8115 - val_loss: 65.8689 - val_calc_mre_K: 0.8041\n", "Epoch 1427/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 66.5277 - calc_mre_K: 0.8121 - val_loss: 66.9894 - val_calc_mre_K: 0.8177\n", "Epoch 1428/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 66.4857 - calc_mre_K: 0.8116 - val_loss: 67.1826 - val_calc_mre_K: 0.8201\n", "Epoch 1429/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 66.3915 - calc_mre_K: 0.8104 - val_loss: 66.7130 - val_calc_mre_K: 0.8144\n", "Epoch 1430/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 66.1983 - calc_mre_K: 0.8081 - val_loss: 65.5044 - val_calc_mre_K: 0.7996\n", "Epoch 1431/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 66.4029 - calc_mre_K: 0.8106 - val_loss: 65.1119 - val_calc_mre_K: 0.7948\n", "Epoch 1432/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 66.2300 - calc_mre_K: 0.8085 - val_loss: 65.9991 - val_calc_mre_K: 0.8057\n", "Epoch 1433/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 66.5253 - calc_mre_K: 0.8121 - val_loss: 65.0567 - val_calc_mre_K: 0.7941\n", "Epoch 1434/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 66.4116 - calc_mre_K: 0.8107 - val_loss: 66.8777 - val_calc_mre_K: 0.8164\n", "Epoch 1435/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 66.1004 - calc_mre_K: 0.8069 - val_loss: 68.3363 - val_calc_mre_K: 0.8342\n", "Epoch 1436/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 66.2807 - calc_mre_K: 0.8091 - val_loss: 66.2985 - val_calc_mre_K: 0.8093\n", "Epoch 1437/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 66.4452 - calc_mre_K: 0.8111 - val_loss: 65.1499 - val_calc_mre_K: 0.7953\n", "Epoch 1438/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 66.2893 - calc_mre_K: 0.8092 - val_loss: 69.3953 - val_calc_mre_K: 0.8471\n", "Epoch 1439/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 66.3302 - calc_mre_K: 0.8097 - val_loss: 68.1510 - val_calc_mre_K: 0.8319\n", "Epoch 1440/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 66.4691 - calc_mre_K: 0.8114 - val_loss: 68.7709 - val_calc_mre_K: 0.8395\n", "Epoch 1441/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 66.1970 - calc_mre_K: 0.8081 - val_loss: 65.0276 - val_calc_mre_K: 0.7938\n", "Epoch 1442/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 66.5254 - calc_mre_K: 0.8121 - val_loss: 67.0146 - val_calc_mre_K: 0.8180\n", "Epoch 1443/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 66.2798 - calc_mre_K: 0.8091 - val_loss: 65.7691 - val_calc_mre_K: 0.8028\n", "Epoch 1444/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 66.4530 - calc_mre_K: 0.8112 - val_loss: 67.1200 - val_calc_mre_K: 0.8193\n", "Epoch 1445/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 66.0540 - calc_mre_K: 0.8063 - val_loss: 66.1303 - val_calc_mre_K: 0.8073\n", "Epoch 1446/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 66.4767 - calc_mre_K: 0.8115 - val_loss: 66.7381 - val_calc_mre_K: 0.8147\n", "Epoch 1447/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 66.4047 - calc_mre_K: 0.8106 - val_loss: 66.5672 - val_calc_mre_K: 0.8126\n", "Epoch 1448/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 66.1995 - calc_mre_K: 0.8081 - val_loss: 68.8960 - val_calc_mre_K: 0.8410\n", "Epoch 1449/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 66.3161 - calc_mre_K: 0.8095 - val_loss: 65.3539 - val_calc_mre_K: 0.7978\n", "Epoch 1450/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 66.1882 - calc_mre_K: 0.8080 - val_loss: 66.2587 - val_calc_mre_K: 0.8088\n", "Epoch 1451/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 66.3097 - calc_mre_K: 0.8094 - val_loss: 65.5482 - val_calc_mre_K: 0.8001\n", "Epoch 1452/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 66.2160 - calc_mre_K: 0.8083 - val_loss: 67.7136 - val_calc_mre_K: 0.8266\n", "Epoch 1453/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 66.4730 - calc_mre_K: 0.8114 - val_loss: 64.8405 - val_calc_mre_K: 0.7915\n", "Epoch 1454/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 66.4426 - calc_mre_K: 0.8111 - val_loss: 67.2774 - val_calc_mre_K: 0.8213\n", "Epoch 1455/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 66.1365 - calc_mre_K: 0.8073 - val_loss: 65.7532 - val_calc_mre_K: 0.8027\n", "Epoch 1456/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 66.3985 - calc_mre_K: 0.8105 - val_loss: 67.4509 - val_calc_mre_K: 0.8234\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Epoch 1457/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 66.2911 - calc_mre_K: 0.8092 - val_loss: 65.0784 - val_calc_mre_K: 0.7944\n", "Epoch 1458/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 66.3030 - calc_mre_K: 0.8094 - val_loss: 65.8812 - val_calc_mre_K: 0.8042\n", "Epoch 1459/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 66.4011 - calc_mre_K: 0.8106 - val_loss: 65.3577 - val_calc_mre_K: 0.7978\n", "Epoch 1460/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 66.4467 - calc_mre_K: 0.8111 - val_loss: 66.0511 - val_calc_mre_K: 0.8063\n", "Epoch 1461/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 66.2864 - calc_mre_K: 0.8092 - val_loss: 66.4445 - val_calc_mre_K: 0.8111\n", "Epoch 1462/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 66.2359 - calc_mre_K: 0.8085 - val_loss: 65.7780 - val_calc_mre_K: 0.8030\n", "Epoch 1463/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 66.3463 - calc_mre_K: 0.8099 - val_loss: 69.2057 - val_calc_mre_K: 0.8448\n", "Epoch 1464/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 66.2311 - calc_mre_K: 0.8085 - val_loss: 66.5688 - val_calc_mre_K: 0.8126\n", "Epoch 1465/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 66.1261 - calc_mre_K: 0.8072 - val_loss: 67.3048 - val_calc_mre_K: 0.8216\n", "Epoch 1466/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 66.1867 - calc_mre_K: 0.8079 - val_loss: 66.7343 - val_calc_mre_K: 0.8146\n", "Epoch 1467/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 66.3268 - calc_mre_K: 0.8097 - val_loss: 64.0565 - val_calc_mre_K: 0.7819\n", "Epoch 1468/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 66.2135 - calc_mre_K: 0.8083 - val_loss: 67.2558 - val_calc_mre_K: 0.8210\n", "Epoch 1469/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 66.3149 - calc_mre_K: 0.8095 - val_loss: 65.2417 - val_calc_mre_K: 0.7964\n", "Epoch 1470/2000\n", "48000/48000 [==============================] - 4s 75us/step - loss: 66.2154 - calc_mre_K: 0.8083 - val_loss: 67.2796 - val_calc_mre_K: 0.8213\n", "Epoch 1471/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 66.1444 - calc_mre_K: 0.8074 - val_loss: 69.1274 - val_calc_mre_K: 0.8438\n", "Epoch 1472/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 66.2798 - calc_mre_K: 0.8091 - val_loss: 65.5653 - val_calc_mre_K: 0.8004\n", "Epoch 1473/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 66.1738 - calc_mre_K: 0.8078 - val_loss: 66.7324 - val_calc_mre_K: 0.8146\n", "Epoch 1474/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 66.3018 - calc_mre_K: 0.8093 - val_loss: 65.2695 - val_calc_mre_K: 0.7967\n", "Epoch 1475/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 66.0675 - calc_mre_K: 0.8065 - val_loss: 66.4794 - val_calc_mre_K: 0.8115\n", "Epoch 1476/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 66.2394 - calc_mre_K: 0.8086 - val_loss: 64.8757 - val_calc_mre_K: 0.7919\n", "Epoch 1477/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 66.2842 - calc_mre_K: 0.8091 - val_loss: 65.1069 - val_calc_mre_K: 0.7948\n", "Epoch 1478/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 66.0641 - calc_mre_K: 0.8064 - val_loss: 69.1327 - val_calc_mre_K: 0.8439\n", "Epoch 1479/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 66.2405 - calc_mre_K: 0.8086 - val_loss: 65.6287 - val_calc_mre_K: 0.8011\n", "Epoch 1480/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 66.1650 - calc_mre_K: 0.8077 - val_loss: 66.7319 - val_calc_mre_K: 0.8146\n", "Epoch 1481/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 66.2333 - calc_mre_K: 0.8085 - val_loss: 66.9835 - val_calc_mre_K: 0.8177\n", "Epoch 1482/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 66.1479 - calc_mre_K: 0.8075 - val_loss: 65.9547 - val_calc_mre_K: 0.8051\n", "Epoch 1483/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 66.1236 - calc_mre_K: 0.8072 - val_loss: 65.8238 - val_calc_mre_K: 0.8035\n", "Epoch 1484/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 66.4369 - calc_mre_K: 0.8110 - val_loss: 66.9281 - val_calc_mre_K: 0.8170\n", "Epoch 1485/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 66.2529 - calc_mre_K: 0.8088 - val_loss: 66.9655 - val_calc_mre_K: 0.8175\n", "Epoch 1486/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 66.2966 - calc_mre_K: 0.8093 - val_loss: 66.8690 - val_calc_mre_K: 0.8163\n", "Epoch 1487/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 65.9651 - calc_mre_K: 0.8052 - val_loss: 65.1359 - val_calc_mre_K: 0.7951\n", "Epoch 1488/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 66.3312 - calc_mre_K: 0.8097 - val_loss: 67.6992 - val_calc_mre_K: 0.8264\n", "Epoch 1489/2000\n", "48000/48000 [==============================] - 4s 82us/step - loss: 66.3182 - calc_mre_K: 0.8095 - val_loss: 67.1273 - val_calc_mre_K: 0.8194\n", "Epoch 1490/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 66.0698 - calc_mre_K: 0.8065 - val_loss: 65.1038 - val_calc_mre_K: 0.7947\n", "Epoch 1491/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 66.3203 - calc_mre_K: 0.8096 - val_loss: 64.8135 - val_calc_mre_K: 0.7912\n", "Epoch 1492/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 66.1277 - calc_mre_K: 0.8072 - val_loss: 66.2881 - val_calc_mre_K: 0.8092\n", "Epoch 1493/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 66.2593 - calc_mre_K: 0.8088 - val_loss: 65.1731 - val_calc_mre_K: 0.7956\n", "Epoch 1494/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 66.3035 - calc_mre_K: 0.8094 - val_loss: 64.0335 - val_calc_mre_K: 0.7817\n", "Epoch 1495/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 66.2460 - calc_mre_K: 0.8087 - val_loss: 67.1093 - val_calc_mre_K: 0.8192\n", "Epoch 1496/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 66.2323 - calc_mre_K: 0.8085 - val_loss: 65.5782 - val_calc_mre_K: 0.8005\n", "Epoch 1497/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 66.0906 - calc_mre_K: 0.8068 - val_loss: 68.9801 - val_calc_mre_K: 0.8420\n", "Epoch 1498/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 66.5327 - calc_mre_K: 0.8122 - val_loss: 65.3360 - val_calc_mre_K: 0.7976\n", "Epoch 1499/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 65.9816 - calc_mre_K: 0.8054 - val_loss: 65.0186 - val_calc_mre_K: 0.7937\n", "Epoch 1500/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 66.2953 - calc_mre_K: 0.8093 - val_loss: 68.6661 - val_calc_mre_K: 0.8382\n", "Epoch 1501/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 66.0673 - calc_mre_K: 0.8065 - val_loss: 64.8227 - val_calc_mre_K: 0.7913\n", "Epoch 1502/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 66.0429 - calc_mre_K: 0.8062 - val_loss: 67.4186 - val_calc_mre_K: 0.8230\n", "Epoch 1503/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 66.2086 - calc_mre_K: 0.8082 - val_loss: 65.3037 - val_calc_mre_K: 0.7972\n", "Epoch 1504/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 66.2929 - calc_mre_K: 0.8092 - val_loss: 65.5324 - val_calc_mre_K: 0.8000\n", "Epoch 1505/2000\n", "48000/48000 [==============================] - 4s 75us/step - loss: 66.1801 - calc_mre_K: 0.8079 - val_loss: 64.8779 - val_calc_mre_K: 0.7920\n", "Epoch 1506/2000\n", "48000/48000 [==============================] - 4s 74us/step - loss: 66.1838 - calc_mre_K: 0.8079 - val_loss: 65.4223 - val_calc_mre_K: 0.7986\n", "Epoch 1507/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 66.1464 - calc_mre_K: 0.8075 - val_loss: 65.9030 - val_calc_mre_K: 0.8045\n", "Epoch 1508/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 66.1325 - calc_mre_K: 0.8073 - val_loss: 65.5533 - val_calc_mre_K: 0.8002\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Epoch 1509/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 66.1992 - calc_mre_K: 0.8081 - val_loss: 64.6479 - val_calc_mre_K: 0.7892\n", "Epoch 1510/2000\n", "48000/48000 [==============================] - 4s 75us/step - loss: 66.1349 - calc_mre_K: 0.8073 - val_loss: 65.8966 - val_calc_mre_K: 0.8044\n", "Epoch 1511/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 66.2914 - calc_mre_K: 0.8092 - val_loss: 66.0286 - val_calc_mre_K: 0.8060\n", "Epoch 1512/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 66.1392 - calc_mre_K: 0.8074 - val_loss: 64.0638 - val_calc_mre_K: 0.7820\n", "Epoch 1513/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 66.3745 - calc_mre_K: 0.8102 - val_loss: 68.1223 - val_calc_mre_K: 0.8316\n", "Epoch 1514/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 66.0707 - calc_mre_K: 0.8065 - val_loss: 64.5943 - val_calc_mre_K: 0.7885\n", "Epoch 1515/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 66.2314 - calc_mre_K: 0.8085 - val_loss: 65.8018 - val_calc_mre_K: 0.8032\n", "Epoch 1516/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 66.1303 - calc_mre_K: 0.8073 - val_loss: 65.6345 - val_calc_mre_K: 0.8012\n", "Epoch 1517/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 66.2707 - calc_mre_K: 0.8090 - val_loss: 67.4136 - val_calc_mre_K: 0.8229\n", "Epoch 1518/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 66.1253 - calc_mre_K: 0.8072 - val_loss: 64.9834 - val_calc_mre_K: 0.7933\n", "Epoch 1519/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 66.2103 - calc_mre_K: 0.8082 - val_loss: 66.9227 - val_calc_mre_K: 0.8169\n", "Epoch 1520/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 66.1680 - calc_mre_K: 0.8077 - val_loss: 69.3548 - val_calc_mre_K: 0.8466\n", "Epoch 1521/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 66.2345 - calc_mre_K: 0.8085 - val_loss: 65.4676 - val_calc_mre_K: 0.7992\n", "Epoch 1522/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 66.2854 - calc_mre_K: 0.8091 - val_loss: 66.9306 - val_calc_mre_K: 0.8170\n", "Epoch 1523/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 66.2811 - calc_mre_K: 0.8091 - val_loss: 64.7721 - val_calc_mre_K: 0.7907\n", "Epoch 1524/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 66.2418 - calc_mre_K: 0.8086 - val_loss: 64.2810 - val_calc_mre_K: 0.7847\n", "Epoch 1525/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 65.9769 - calc_mre_K: 0.8054 - val_loss: 65.2446 - val_calc_mre_K: 0.7964\n", "Epoch 1526/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 66.2424 - calc_mre_K: 0.8086 - val_loss: 65.9163 - val_calc_mre_K: 0.8046\n", "Epoch 1527/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 66.2939 - calc_mre_K: 0.8093 - val_loss: 65.8634 - val_calc_mre_K: 0.8040\n", "Epoch 1528/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 65.8922 - calc_mre_K: 0.8043 - val_loss: 66.6878 - val_calc_mre_K: 0.8141\n", "Epoch 1529/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 66.1580 - calc_mre_K: 0.8076 - val_loss: 64.9171 - val_calc_mre_K: 0.7924\n", "Epoch 1530/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 66.0703 - calc_mre_K: 0.8065 - val_loss: 67.9056 - val_calc_mre_K: 0.8289\n", "Epoch 1531/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 66.3021 - calc_mre_K: 0.8094 - val_loss: 66.5037 - val_calc_mre_K: 0.8118\n", "Epoch 1532/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 66.3668 - calc_mre_K: 0.8101 - val_loss: 66.5031 - val_calc_mre_K: 0.8118\n", "Epoch 1533/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 66.0053 - calc_mre_K: 0.8057 - val_loss: 71.2490 - val_calc_mre_K: 0.8697\n", "Epoch 1534/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 66.2186 - calc_mre_K: 0.8083 - val_loss: 67.9357 - val_calc_mre_K: 0.8293\n", "Epoch 1535/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 66.2415 - calc_mre_K: 0.8086 - val_loss: 71.9786 - val_calc_mre_K: 0.8786\n", "Epoch 1536/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 66.2224 - calc_mre_K: 0.8084 - val_loss: 64.5756 - val_calc_mre_K: 0.7883\n", "Epoch 1537/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 66.2358 - calc_mre_K: 0.8085 - val_loss: 67.2346 - val_calc_mre_K: 0.8207\n", "Epoch 1538/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 66.0674 - calc_mre_K: 0.8065 - val_loss: 66.1827 - val_calc_mre_K: 0.8079\n", "Epoch 1539/2000\n", "48000/48000 [==============================] - 4s 83us/step - loss: 66.0236 - calc_mre_K: 0.8060 - val_loss: 66.7531 - val_calc_mre_K: 0.8149\n", "Epoch 1540/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 66.3394 - calc_mre_K: 0.8098 - val_loss: 66.5352 - val_calc_mre_K: 0.8122\n", "Epoch 1541/2000\n", "48000/48000 [==============================] - 4s 83us/step - loss: 65.9710 - calc_mre_K: 0.8053 - val_loss: 65.2694 - val_calc_mre_K: 0.7967\n", "Epoch 1542/2000\n", "48000/48000 [==============================] - 4s 82us/step - loss: 66.2110 - calc_mre_K: 0.8082 - val_loss: 65.7041 - val_calc_mre_K: 0.8021\n", "Epoch 1543/2000\n", "48000/48000 [==============================] - 4s 82us/step - loss: 65.9408 - calc_mre_K: 0.8049 - val_loss: 65.5760 - val_calc_mre_K: 0.8005\n", "Epoch 1544/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 66.3605 - calc_mre_K: 0.8101 - val_loss: 69.6056 - val_calc_mre_K: 0.8497\n", "Epoch 1545/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 66.2347 - calc_mre_K: 0.8085 - val_loss: 64.4380 - val_calc_mre_K: 0.7866\n", "Epoch 1546/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 66.1267 - calc_mre_K: 0.8072 - val_loss: 66.2012 - val_calc_mre_K: 0.8081\n", "Epoch 1547/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 66.2976 - calc_mre_K: 0.8093 - val_loss: 64.1364 - val_calc_mre_K: 0.7829\n", "Epoch 1548/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 66.2990 - calc_mre_K: 0.8093 - val_loss: 66.6063 - val_calc_mre_K: 0.8131\n", "Epoch 1549/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 66.0577 - calc_mre_K: 0.8064 - val_loss: 68.3723 - val_calc_mre_K: 0.8346\n", "Epoch 1550/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 65.9890 - calc_mre_K: 0.8055 - val_loss: 65.4290 - val_calc_mre_K: 0.7987\n", "Epoch 1551/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 66.1406 - calc_mre_K: 0.8074 - val_loss: 68.2932 - val_calc_mre_K: 0.8337\n", "Epoch 1552/2000\n", "48000/48000 [==============================] - 4s 82us/step - loss: 66.3091 - calc_mre_K: 0.8094 - val_loss: 66.2592 - val_calc_mre_K: 0.8088\n", "Epoch 1553/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 65.9984 - calc_mre_K: 0.8056 - val_loss: 66.7100 - val_calc_mre_K: 0.8143\n", "Epoch 1554/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 65.8974 - calc_mre_K: 0.8044 - val_loss: 69.8623 - val_calc_mre_K: 0.8528\n", "Epoch 1555/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 66.1161 - calc_mre_K: 0.8071 - val_loss: 65.7168 - val_calc_mre_K: 0.8022\n", "Epoch 1556/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 66.0312 - calc_mre_K: 0.8060 - val_loss: 65.6137 - val_calc_mre_K: 0.8009\n", "Epoch 1557/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 65.8442 - calc_mre_K: 0.8038 - val_loss: 63.8432 - val_calc_mre_K: 0.7793\n", "Epoch 1558/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 66.2058 - calc_mre_K: 0.8082 - val_loss: 68.8761 - val_calc_mre_K: 0.8408\n", "Epoch 1559/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 65.9814 - calc_mre_K: 0.8054 - val_loss: 66.7548 - val_calc_mre_K: 0.8149\n", "Epoch 1560/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 66.0444 - calc_mre_K: 0.8062 - val_loss: 65.4012 - val_calc_mre_K: 0.7984\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Epoch 1561/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 65.9791 - calc_mre_K: 0.8054 - val_loss: 67.1021 - val_calc_mre_K: 0.8191\n", "Epoch 1562/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 66.1306 - calc_mre_K: 0.8073 - val_loss: 67.1971 - val_calc_mre_K: 0.8203\n", "Epoch 1563/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 65.9667 - calc_mre_K: 0.8053 - val_loss: 66.0394 - val_calc_mre_K: 0.8061\n", "Epoch 1564/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 65.8852 - calc_mre_K: 0.8043 - val_loss: 64.1786 - val_calc_mre_K: 0.7834\n", "Epoch 1565/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 66.0668 - calc_mre_K: 0.8065 - val_loss: 64.6863 - val_calc_mre_K: 0.7896\n", "Epoch 1566/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 65.9658 - calc_mre_K: 0.8052 - val_loss: 67.0145 - val_calc_mre_K: 0.8180\n", "Epoch 1567/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 66.1268 - calc_mre_K: 0.8072 - val_loss: 66.5065 - val_calc_mre_K: 0.8118\n", "Epoch 1568/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 65.9545 - calc_mre_K: 0.8051 - val_loss: 68.2088 - val_calc_mre_K: 0.8326\n", "Epoch 1569/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 65.8931 - calc_mre_K: 0.8044 - val_loss: 65.5064 - val_calc_mre_K: 0.7996\n", "Epoch 1570/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 65.9786 - calc_mre_K: 0.8054 - val_loss: 64.6146 - val_calc_mre_K: 0.7888\n", "Epoch 1571/2000\n", "48000/48000 [==============================] - 4s 82us/step - loss: 65.8782 - calc_mre_K: 0.8042 - val_loss: 65.3655 - val_calc_mre_K: 0.7979\n", "Epoch 1572/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 65.9830 - calc_mre_K: 0.8055 - val_loss: 65.9315 - val_calc_mre_K: 0.8048\n", "Epoch 1573/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 65.9149 - calc_mre_K: 0.8046 - val_loss: 65.8181 - val_calc_mre_K: 0.8034\n", "Epoch 1574/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 66.2086 - calc_mre_K: 0.8082 - val_loss: 66.1239 - val_calc_mre_K: 0.8072\n", "Epoch 1575/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 65.8810 - calc_mre_K: 0.8042 - val_loss: 66.0681 - val_calc_mre_K: 0.8065\n", "Epoch 1576/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 65.9434 - calc_mre_K: 0.8050 - val_loss: 67.3140 - val_calc_mre_K: 0.8217\n", "Epoch 1577/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 66.0546 - calc_mre_K: 0.8063 - val_loss: 66.4842 - val_calc_mre_K: 0.8116\n", "Epoch 1578/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 66.0407 - calc_mre_K: 0.8062 - val_loss: 65.0586 - val_calc_mre_K: 0.7942\n", "Epoch 1579/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 65.9085 - calc_mre_K: 0.8045 - val_loss: 68.0542 - val_calc_mre_K: 0.8307\n", "Epoch 1580/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 66.1304 - calc_mre_K: 0.8073 - val_loss: 65.7060 - val_calc_mre_K: 0.8021\n", "Epoch 1581/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 66.0073 - calc_mre_K: 0.8058 - val_loss: 65.3867 - val_calc_mre_K: 0.7982\n", "Epoch 1582/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 65.7365 - calc_mre_K: 0.8024 - val_loss: 69.3807 - val_calc_mre_K: 0.8469\n", "Epoch 1583/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 65.8922 - calc_mre_K: 0.8043 - val_loss: 65.6847 - val_calc_mre_K: 0.8018\n", "Epoch 1584/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 65.9398 - calc_mre_K: 0.8049 - val_loss: 66.7759 - val_calc_mre_K: 0.8151\n", "Epoch 1585/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 66.0852 - calc_mre_K: 0.8067 - val_loss: 64.8900 - val_calc_mre_K: 0.7921\n", "Epoch 1586/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 66.1020 - calc_mre_K: 0.8069 - val_loss: 70.7879 - val_calc_mre_K: 0.8641\n", "Epoch 1587/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 66.1292 - calc_mre_K: 0.8072 - val_loss: 64.2565 - val_calc_mre_K: 0.7844\n", "Epoch 1588/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 66.0364 - calc_mre_K: 0.8061 - val_loss: 67.5842 - val_calc_mre_K: 0.8250\n", "Epoch 1589/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 65.7584 - calc_mre_K: 0.8027 - val_loss: 63.5900 - val_calc_mre_K: 0.7762\n", "Epoch 1590/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 66.0634 - calc_mre_K: 0.8064 - val_loss: 65.3703 - val_calc_mre_K: 0.7980\n", "Epoch 1591/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 65.9580 - calc_mre_K: 0.8052 - val_loss: 64.8170 - val_calc_mre_K: 0.7912\n", "Epoch 1592/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 65.9568 - calc_mre_K: 0.8051 - val_loss: 67.9722 - val_calc_mre_K: 0.8297\n", "Epoch 1593/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 66.0389 - calc_mre_K: 0.8061 - val_loss: 66.7617 - val_calc_mre_K: 0.8150\n", "Epoch 1594/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 65.9614 - calc_mre_K: 0.8052 - val_loss: 67.1688 - val_calc_mre_K: 0.8199\n", "Epoch 1595/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 65.8361 - calc_mre_K: 0.8037 - val_loss: 68.5535 - val_calc_mre_K: 0.8368\n", "Epoch 1596/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 65.9356 - calc_mre_K: 0.8049 - val_loss: 70.3874 - val_calc_mre_K: 0.8592\n", "Epoch 1597/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 66.0276 - calc_mre_K: 0.8060 - val_loss: 67.8220 - val_calc_mre_K: 0.8279\n", "Epoch 1598/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 65.8135 - calc_mre_K: 0.8034 - val_loss: 64.6853 - val_calc_mre_K: 0.7896\n", "Epoch 1599/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 65.8637 - calc_mre_K: 0.8040 - val_loss: 66.2102 - val_calc_mre_K: 0.8082\n", "Epoch 1600/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 66.1217 - calc_mre_K: 0.8071 - val_loss: 66.8077 - val_calc_mre_K: 0.8155\n", "Epoch 1601/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 65.9679 - calc_mre_K: 0.8053 - val_loss: 66.1411 - val_calc_mre_K: 0.8074\n", "Epoch 1602/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 65.9284 - calc_mre_K: 0.8048 - val_loss: 65.6434 - val_calc_mre_K: 0.8013\n", "Epoch 1603/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 66.0895 - calc_mre_K: 0.8068 - val_loss: 67.1503 - val_calc_mre_K: 0.8197\n", "Epoch 1604/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 66.0051 - calc_mre_K: 0.8057 - val_loss: 67.5338 - val_calc_mre_K: 0.8244\n", "Epoch 1605/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 66.0651 - calc_mre_K: 0.8065 - val_loss: 65.0682 - val_calc_mre_K: 0.7943\n", "Epoch 1606/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 65.9426 - calc_mre_K: 0.8050 - val_loss: 65.5254 - val_calc_mre_K: 0.7999\n", "Epoch 1607/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 65.8806 - calc_mre_K: 0.8042 - val_loss: 64.7229 - val_calc_mre_K: 0.7901\n", "Epoch 1608/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 65.9387 - calc_mre_K: 0.8049 - val_loss: 65.2791 - val_calc_mre_K: 0.7969\n", "Epoch 1609/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 66.1191 - calc_mre_K: 0.8071 - val_loss: 65.7157 - val_calc_mre_K: 0.8022\n", "Epoch 1610/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 65.8826 - calc_mre_K: 0.8042 - val_loss: 67.4078 - val_calc_mre_K: 0.8228\n", "Epoch 1611/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 65.8340 - calc_mre_K: 0.8036 - val_loss: 66.7799 - val_calc_mre_K: 0.8152\n", "Epoch 1612/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 65.9475 - calc_mre_K: 0.8050 - val_loss: 65.2814 - val_calc_mre_K: 0.7969\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Epoch 1613/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 65.9270 - calc_mre_K: 0.8048 - val_loss: 65.5508 - val_calc_mre_K: 0.8002\n", "Epoch 1614/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 66.0787 - calc_mre_K: 0.8066 - val_loss: 67.6766 - val_calc_mre_K: 0.8261\n", "Epoch 1615/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 65.8324 - calc_mre_K: 0.8036 - val_loss: 66.4720 - val_calc_mre_K: 0.8114\n", "Epoch 1616/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 65.9836 - calc_mre_K: 0.8055 - val_loss: 65.8238 - val_calc_mre_K: 0.8035\n", "Epoch 1617/2000\n", "48000/48000 [==============================] - 4s 75us/step - loss: 65.9388 - calc_mre_K: 0.8049 - val_loss: 67.8855 - val_calc_mre_K: 0.8287\n", "Epoch 1618/2000\n", "48000/48000 [==============================] - 4s 75us/step - loss: 65.8704 - calc_mre_K: 0.8041 - val_loss: 64.5223 - val_calc_mre_K: 0.7876\n", "Epoch 1619/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 65.9897 - calc_mre_K: 0.8055 - val_loss: 67.2577 - val_calc_mre_K: 0.8210\n", "Epoch 1620/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 65.8630 - calc_mre_K: 0.8040 - val_loss: 65.8123 - val_calc_mre_K: 0.8034\n", "Epoch 1621/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 65.7190 - calc_mre_K: 0.8022 - val_loss: 64.8730 - val_calc_mre_K: 0.7919\n", "Epoch 1622/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 66.0177 - calc_mre_K: 0.8059 - val_loss: 66.5787 - val_calc_mre_K: 0.8127\n", "Epoch 1623/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 65.8333 - calc_mre_K: 0.8036 - val_loss: 66.0597 - val_calc_mre_K: 0.8064\n", "Epoch 1624/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 65.7268 - calc_mre_K: 0.8023 - val_loss: 65.6026 - val_calc_mre_K: 0.8008\n", "Epoch 1625/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 65.9723 - calc_mre_K: 0.8053 - val_loss: 64.6672 - val_calc_mre_K: 0.7894\n", "Epoch 1626/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 65.9959 - calc_mre_K: 0.8056 - val_loss: 71.4209 - val_calc_mre_K: 0.8718\n", "Epoch 1627/2000\n", "48000/48000 [==============================] - 4s 75us/step - loss: 65.8347 - calc_mre_K: 0.8036 - val_loss: 68.5198 - val_calc_mre_K: 0.8364\n", "Epoch 1628/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 65.7674 - calc_mre_K: 0.8028 - val_loss: 65.3545 - val_calc_mre_K: 0.7978\n", "Epoch 1629/2000\n", "48000/48000 [==============================] - 4s 74us/step - loss: 65.9329 - calc_mre_K: 0.8048 - val_loss: 66.3837 - val_calc_mre_K: 0.8103\n", "Epoch 1630/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 65.9435 - calc_mre_K: 0.8050 - val_loss: 64.8982 - val_calc_mre_K: 0.7922\n", "Epoch 1631/2000\n", "48000/48000 [==============================] - 4s 75us/step - loss: 65.7743 - calc_mre_K: 0.8029 - val_loss: 67.2597 - val_calc_mre_K: 0.8210\n", "Epoch 1632/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 65.8972 - calc_mre_K: 0.8044 - val_loss: 64.6227 - val_calc_mre_K: 0.7889\n", "Epoch 1633/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 65.8551 - calc_mre_K: 0.8039 - val_loss: 66.1713 - val_calc_mre_K: 0.8078\n", "Epoch 1634/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 65.9160 - calc_mre_K: 0.8046 - val_loss: 64.6405 - val_calc_mre_K: 0.7891\n", "Epoch 1635/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 65.8956 - calc_mre_K: 0.8044 - val_loss: 64.2112 - val_calc_mre_K: 0.7838\n", "Epoch 1636/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 65.7686 - calc_mre_K: 0.8028 - val_loss: 67.8945 - val_calc_mre_K: 0.8288\n", "Epoch 1637/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 65.9710 - calc_mre_K: 0.8053 - val_loss: 65.2958 - val_calc_mre_K: 0.7971\n", "Epoch 1638/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 65.8135 - calc_mre_K: 0.8034 - val_loss: 65.5640 - val_calc_mre_K: 0.8003\n", "Epoch 1639/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 65.8024 - calc_mre_K: 0.8033 - val_loss: 65.0501 - val_calc_mre_K: 0.7941\n", "Epoch 1640/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 65.9280 - calc_mre_K: 0.8048 - val_loss: 66.8886 - val_calc_mre_K: 0.8165\n", "Epoch 1641/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 66.1642 - calc_mre_K: 0.8077 - val_loss: 65.8296 - val_calc_mre_K: 0.8036\n", "Epoch 1642/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 65.9410 - calc_mre_K: 0.8049 - val_loss: 65.0802 - val_calc_mre_K: 0.7944\n", "Epoch 1643/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 65.9022 - calc_mre_K: 0.8045 - val_loss: 65.5595 - val_calc_mre_K: 0.8003\n", "Epoch 1644/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 65.8451 - calc_mre_K: 0.8038 - val_loss: 68.0552 - val_calc_mre_K: 0.8308\n", "Epoch 1645/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 66.1098 - calc_mre_K: 0.8070 - val_loss: 64.6959 - val_calc_mre_K: 0.7897\n", "Epoch 1646/2000\n", "48000/48000 [==============================] - 4s 83us/step - loss: 65.8275 - calc_mre_K: 0.8036 - val_loss: 65.1250 - val_calc_mre_K: 0.7950\n", "Epoch 1647/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 65.9080 - calc_mre_K: 0.8045 - val_loss: 66.1965 - val_calc_mre_K: 0.8081\n", "Epoch 1648/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 65.8105 - calc_mre_K: 0.8034 - val_loss: 64.5541 - val_calc_mre_K: 0.7880\n", "Epoch 1649/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 65.9345 - calc_mre_K: 0.8049 - val_loss: 65.5025 - val_calc_mre_K: 0.7996\n", "Epoch 1650/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 65.7846 - calc_mre_K: 0.8030 - val_loss: 65.5076 - val_calc_mre_K: 0.7997\n", "Epoch 1651/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 65.8875 - calc_mre_K: 0.8043 - val_loss: 67.1000 - val_calc_mre_K: 0.8191\n", "Epoch 1652/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 65.7916 - calc_mre_K: 0.8031 - val_loss: 65.2950 - val_calc_mre_K: 0.7971\n", "Epoch 1653/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 65.8086 - calc_mre_K: 0.8033 - val_loss: 65.1452 - val_calc_mre_K: 0.7952\n", "Epoch 1654/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 66.0780 - calc_mre_K: 0.8066 - val_loss: 65.6391 - val_calc_mre_K: 0.8013\n", "Epoch 1655/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 65.9781 - calc_mre_K: 0.8054 - val_loss: 64.2572 - val_calc_mre_K: 0.7844\n", "Epoch 1656/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 65.6245 - calc_mre_K: 0.8011 - val_loss: 66.2764 - val_calc_mre_K: 0.8090\n", "Epoch 1657/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 65.8468 - calc_mre_K: 0.8038 - val_loss: 64.9054 - val_calc_mre_K: 0.7923\n", "Epoch 1658/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 65.9686 - calc_mre_K: 0.8053 - val_loss: 66.3509 - val_calc_mre_K: 0.8099\n", "Epoch 1659/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 65.7500 - calc_mre_K: 0.8026 - val_loss: 63.5273 - val_calc_mre_K: 0.7755\n", "Epoch 1660/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 66.1115 - calc_mre_K: 0.8070 - val_loss: 66.9027 - val_calc_mre_K: 0.8167\n", "Epoch 1661/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 65.7917 - calc_mre_K: 0.8031 - val_loss: 65.8767 - val_calc_mre_K: 0.8042\n", "Epoch 1662/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 65.7271 - calc_mre_K: 0.8023 - val_loss: 66.7482 - val_calc_mre_K: 0.8148\n", "Epoch 1663/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 65.7060 - calc_mre_K: 0.8021 - val_loss: 66.6658 - val_calc_mre_K: 0.8138\n", "Epoch 1664/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 66.0885 - calc_mre_K: 0.8067 - val_loss: 66.0489 - val_calc_mre_K: 0.8063\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Epoch 1665/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 65.9519 - calc_mre_K: 0.8051 - val_loss: 65.2364 - val_calc_mre_K: 0.7963\n", "Epoch 1666/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 65.8175 - calc_mre_K: 0.8034 - val_loss: 66.5258 - val_calc_mre_K: 0.8121\n", "Epoch 1667/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 65.7300 - calc_mre_K: 0.8024 - val_loss: 65.6858 - val_calc_mre_K: 0.8018\n", "Epoch 1668/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 65.8588 - calc_mre_K: 0.8039 - val_loss: 65.0609 - val_calc_mre_K: 0.7942\n", "Epoch 1669/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 65.8671 - calc_mre_K: 0.8040 - val_loss: 64.6653 - val_calc_mre_K: 0.7894\n", "Epoch 1670/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 65.8269 - calc_mre_K: 0.8036 - val_loss: 67.1342 - val_calc_mre_K: 0.8195\n", "Epoch 1671/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 65.8165 - calc_mre_K: 0.8034 - val_loss: 64.7470 - val_calc_mre_K: 0.7904\n", "Epoch 1672/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 65.7310 - calc_mre_K: 0.8024 - val_loss: 66.3753 - val_calc_mre_K: 0.8102\n", "Epoch 1673/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 65.9187 - calc_mre_K: 0.8047 - val_loss: 66.9570 - val_calc_mre_K: 0.8173\n", "Epoch 1674/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 65.7357 - calc_mre_K: 0.8024 - val_loss: 65.7341 - val_calc_mre_K: 0.8024\n", "Epoch 1675/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 65.7494 - calc_mre_K: 0.8026 - val_loss: 65.3141 - val_calc_mre_K: 0.7973\n", "Epoch 1676/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 65.8281 - calc_mre_K: 0.8036 - val_loss: 66.4294 - val_calc_mre_K: 0.8109\n", "Epoch 1677/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 65.8676 - calc_mre_K: 0.8040 - val_loss: 64.9072 - val_calc_mre_K: 0.7923\n", "Epoch 1678/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 65.7372 - calc_mre_K: 0.8025 - val_loss: 66.8352 - val_calc_mre_K: 0.8159\n", "Epoch 1679/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 65.8036 - calc_mre_K: 0.8033 - val_loss: 67.4644 - val_calc_mre_K: 0.8235\n", "Epoch 1680/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 65.9950 - calc_mre_K: 0.8056 - val_loss: 64.5786 - val_calc_mre_K: 0.7883\n", "Epoch 1681/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 65.7244 - calc_mre_K: 0.8023 - val_loss: 66.4145 - val_calc_mre_K: 0.8107\n", "Epoch 1682/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 65.9500 - calc_mre_K: 0.8051 - val_loss: 64.1370 - val_calc_mre_K: 0.7829\n", "Epoch 1683/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 65.7012 - calc_mre_K: 0.8020 - val_loss: 65.4807 - val_calc_mre_K: 0.7993\n", "Epoch 1684/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 65.7712 - calc_mre_K: 0.8029 - val_loss: 66.0355 - val_calc_mre_K: 0.8061\n", "Epoch 1685/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 65.8784 - calc_mre_K: 0.8042 - val_loss: 64.8508 - val_calc_mre_K: 0.7916\n", "Epoch 1686/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 65.8177 - calc_mre_K: 0.8034 - val_loss: 65.8260 - val_calc_mre_K: 0.8035\n", "Epoch 1687/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 65.7695 - calc_mre_K: 0.8029 - val_loss: 66.9009 - val_calc_mre_K: 0.8167\n", "Epoch 1688/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 65.7255 - calc_mre_K: 0.8023 - val_loss: 67.2276 - val_calc_mre_K: 0.8206\n", "Epoch 1689/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 65.8332 - calc_mre_K: 0.8036 - val_loss: 68.2081 - val_calc_mre_K: 0.8326\n", "Epoch 1690/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 65.8622 - calc_mre_K: 0.8040 - val_loss: 64.9533 - val_calc_mre_K: 0.7929\n", "Epoch 1691/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 65.7879 - calc_mre_K: 0.8031 - val_loss: 67.5198 - val_calc_mre_K: 0.8242\n", "Epoch 1692/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 65.9058 - calc_mre_K: 0.8045 - val_loss: 65.6965 - val_calc_mre_K: 0.8020\n", "Epoch 1693/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 65.6873 - calc_mre_K: 0.8018 - val_loss: 67.4330 - val_calc_mre_K: 0.8232\n", "Epoch 1694/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 65.7677 - calc_mre_K: 0.8028 - val_loss: 67.7296 - val_calc_mre_K: 0.8268\n", "Epoch 1695/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 65.7479 - calc_mre_K: 0.8026 - val_loss: 66.1246 - val_calc_mre_K: 0.8072\n", "Epoch 1696/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 65.7379 - calc_mre_K: 0.8025 - val_loss: 66.8488 - val_calc_mre_K: 0.8160\n", "Epoch 1697/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 65.7541 - calc_mre_K: 0.8027 - val_loss: 68.6601 - val_calc_mre_K: 0.8381\n", "Epoch 1698/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 65.5674 - calc_mre_K: 0.8004 - val_loss: 65.4718 - val_calc_mre_K: 0.7992\n", "Epoch 1699/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 65.7635 - calc_mre_K: 0.8028 - val_loss: 67.5725 - val_calc_mre_K: 0.8249\n", "Epoch 1700/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 65.5949 - calc_mre_K: 0.8007 - val_loss: 69.3339 - val_calc_mre_K: 0.8464\n", "Epoch 1701/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 65.8071 - calc_mre_K: 0.8033 - val_loss: 68.8895 - val_calc_mre_K: 0.8409\n", "Epoch 1702/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 65.6173 - calc_mre_K: 0.8010 - val_loss: 65.8437 - val_calc_mre_K: 0.8038\n", "Epoch 1703/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 65.6700 - calc_mre_K: 0.8016 - val_loss: 66.3626 - val_calc_mre_K: 0.8101\n", "Epoch 1704/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 65.7400 - calc_mre_K: 0.8025 - val_loss: 63.7030 - val_calc_mre_K: 0.7776\n", "Epoch 1705/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 65.7257 - calc_mre_K: 0.8023 - val_loss: 66.8412 - val_calc_mre_K: 0.8159\n", "Epoch 1706/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 65.6976 - calc_mre_K: 0.8020 - val_loss: 65.7827 - val_calc_mre_K: 0.8030\n", "Epoch 1707/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 65.8146 - calc_mre_K: 0.8034 - val_loss: 64.7129 - val_calc_mre_K: 0.7900\n", "Epoch 1708/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 65.8094 - calc_mre_K: 0.8033 - val_loss: 66.4695 - val_calc_mre_K: 0.8114\n", "Epoch 1709/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 65.6446 - calc_mre_K: 0.8013 - val_loss: 66.9053 - val_calc_mre_K: 0.8167\n", "Epoch 1710/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 65.8081 - calc_mre_K: 0.8033 - val_loss: 65.9585 - val_calc_mre_K: 0.8052\n", "Epoch 1711/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 65.5283 - calc_mre_K: 0.7999 - val_loss: 64.4170 - val_calc_mre_K: 0.7863\n", "Epoch 1712/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 65.9195 - calc_mre_K: 0.8047 - val_loss: 64.3205 - val_calc_mre_K: 0.7852\n", "Epoch 1713/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 65.7668 - calc_mre_K: 0.8028 - val_loss: 67.3384 - val_calc_mre_K: 0.8220\n", "Epoch 1714/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 65.7963 - calc_mre_K: 0.8032 - val_loss: 68.7539 - val_calc_mre_K: 0.8393\n", "Epoch 1715/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 65.6915 - calc_mre_K: 0.8019 - val_loss: 65.4814 - val_calc_mre_K: 0.7993\n", "Epoch 1716/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 65.6897 - calc_mre_K: 0.8019 - val_loss: 65.6904 - val_calc_mre_K: 0.8019\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Epoch 1717/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 65.7460 - calc_mre_K: 0.8026 - val_loss: 64.9009 - val_calc_mre_K: 0.7922\n", "Epoch 1718/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 65.8763 - calc_mre_K: 0.8042 - val_loss: 65.4179 - val_calc_mre_K: 0.7986\n", "Epoch 1719/2000\n", "48000/48000 [==============================] - 4s 75us/step - loss: 65.9202 - calc_mre_K: 0.8047 - val_loss: 66.5496 - val_calc_mre_K: 0.8124\n", "Epoch 1720/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 65.6724 - calc_mre_K: 0.8017 - val_loss: 64.8026 - val_calc_mre_K: 0.7910\n", "Epoch 1721/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 65.8781 - calc_mre_K: 0.8042 - val_loss: 66.5334 - val_calc_mre_K: 0.8122\n", "Epoch 1722/2000\n", "48000/48000 [==============================] - 4s 75us/step - loss: 65.7890 - calc_mre_K: 0.8031 - val_loss: 66.4043 - val_calc_mre_K: 0.8106\n", "Epoch 1723/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 65.6412 - calc_mre_K: 0.8013 - val_loss: 65.7740 - val_calc_mre_K: 0.8029\n", "Epoch 1724/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 65.7609 - calc_mre_K: 0.8027 - val_loss: 65.3468 - val_calc_mre_K: 0.7977\n", "Epoch 1725/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 65.6662 - calc_mre_K: 0.8016 - val_loss: 66.1688 - val_calc_mre_K: 0.8077\n", "Epoch 1726/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 65.9700 - calc_mre_K: 0.8053 - val_loss: 64.8993 - val_calc_mre_K: 0.7922\n", "Epoch 1727/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 65.7551 - calc_mre_K: 0.8027 - val_loss: 64.1980 - val_calc_mre_K: 0.7837\n", "Epoch 1728/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 65.8713 - calc_mre_K: 0.8041 - val_loss: 67.9634 - val_calc_mre_K: 0.8296\n", "Epoch 1729/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 65.8264 - calc_mre_K: 0.8035 - val_loss: 67.0377 - val_calc_mre_K: 0.8183\n", "Epoch 1730/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 65.6525 - calc_mre_K: 0.8014 - val_loss: 66.5156 - val_calc_mre_K: 0.8120\n", "Epoch 1731/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 65.6289 - calc_mre_K: 0.8011 - val_loss: 65.3819 - val_calc_mre_K: 0.7981\n", "Epoch 1732/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 65.6772 - calc_mre_K: 0.8017 - val_loss: 66.5085 - val_calc_mre_K: 0.8119\n", "Epoch 1733/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 65.5090 - calc_mre_K: 0.7997 - val_loss: 65.8052 - val_calc_mre_K: 0.8033\n", "Epoch 1734/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 65.8376 - calc_mre_K: 0.8037 - val_loss: 65.0888 - val_calc_mre_K: 0.7945\n", "Epoch 1735/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 65.5581 - calc_mre_K: 0.8003 - val_loss: 65.0670 - val_calc_mre_K: 0.7943\n", "Epoch 1736/2000\n", "48000/48000 [==============================] - 4s 83us/step - loss: 65.8352 - calc_mre_K: 0.8037 - val_loss: 64.9289 - val_calc_mre_K: 0.7926\n", "Epoch 1737/2000\n", "48000/48000 [==============================] - 4s 82us/step - loss: 65.7730 - calc_mre_K: 0.8029 - val_loss: 65.8297 - val_calc_mre_K: 0.8036\n", "Epoch 1738/2000\n", "48000/48000 [==============================] - 4s 82us/step - loss: 65.5659 - calc_mre_K: 0.8004 - val_loss: 67.2308 - val_calc_mre_K: 0.8207\n", "Epoch 1739/2000\n", "48000/48000 [==============================] - 4s 82us/step - loss: 65.6854 - calc_mre_K: 0.8018 - val_loss: 65.4139 - val_calc_mre_K: 0.7985\n", "Epoch 1740/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 65.9394 - calc_mre_K: 0.8049 - val_loss: 66.0010 - val_calc_mre_K: 0.8057\n", "Epoch 1741/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 65.7059 - calc_mre_K: 0.8021 - val_loss: 68.3990 - val_calc_mre_K: 0.8349\n", "Epoch 1742/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 65.6630 - calc_mre_K: 0.8016 - val_loss: 64.3178 - val_calc_mre_K: 0.7851\n", "Epoch 1743/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 65.6966 - calc_mre_K: 0.8020 - val_loss: 66.4299 - val_calc_mre_K: 0.8109\n", "Epoch 1744/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 65.6885 - calc_mre_K: 0.8019 - val_loss: 65.3553 - val_calc_mre_K: 0.7978\n", "Epoch 1745/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 65.5495 - calc_mre_K: 0.8002 - val_loss: 66.2982 - val_calc_mre_K: 0.8093\n", "Epoch 1746/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 65.7946 - calc_mre_K: 0.8032 - val_loss: 64.0959 - val_calc_mre_K: 0.7824\n", "Epoch 1747/2000\n", "48000/48000 [==============================] - 4s 82us/step - loss: 65.6836 - calc_mre_K: 0.8018 - val_loss: 68.8650 - val_calc_mre_K: 0.8406\n", "Epoch 1748/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 65.6929 - calc_mre_K: 0.8019 - val_loss: 63.9682 - val_calc_mre_K: 0.7809\n", "Epoch 1749/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 65.5680 - calc_mre_K: 0.8004 - val_loss: 64.9677 - val_calc_mre_K: 0.7931\n", "Epoch 1750/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 65.9869 - calc_mre_K: 0.8055 - val_loss: 65.3604 - val_calc_mre_K: 0.7979\n", "Epoch 1751/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 65.8010 - calc_mre_K: 0.8032 - val_loss: 67.4427 - val_calc_mre_K: 0.8233\n", "Epoch 1752/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 65.6227 - calc_mre_K: 0.8011 - val_loss: 64.8591 - val_calc_mre_K: 0.7917\n", "Epoch 1753/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 65.6667 - calc_mre_K: 0.8016 - val_loss: 64.7187 - val_calc_mre_K: 0.7900\n", "Epoch 1754/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 65.6935 - calc_mre_K: 0.8019 - val_loss: 64.0520 - val_calc_mre_K: 0.7819\n", "Epoch 1755/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 65.7629 - calc_mre_K: 0.8028 - val_loss: 65.4479 - val_calc_mre_K: 0.7989\n", "Epoch 1756/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 65.6165 - calc_mre_K: 0.8010 - val_loss: 63.9937 - val_calc_mre_K: 0.7812\n", "Epoch 1757/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 65.7649 - calc_mre_K: 0.8028 - val_loss: 65.7620 - val_calc_mre_K: 0.8028\n", "Epoch 1758/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 65.8545 - calc_mre_K: 0.8039 - val_loss: 63.5760 - val_calc_mre_K: 0.7761\n", "Epoch 1759/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 65.6867 - calc_mre_K: 0.8018 - val_loss: 67.0372 - val_calc_mre_K: 0.8183\n", "Epoch 1760/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 65.7110 - calc_mre_K: 0.8021 - val_loss: 66.0206 - val_calc_mre_K: 0.8059\n", "Epoch 1761/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 65.5510 - calc_mre_K: 0.8002 - val_loss: 65.6657 - val_calc_mre_K: 0.8016\n", "Epoch 1762/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 65.6671 - calc_mre_K: 0.8016 - val_loss: 64.8913 - val_calc_mre_K: 0.7921\n", "Epoch 1763/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 65.5679 - calc_mre_K: 0.8004 - val_loss: 68.2123 - val_calc_mre_K: 0.8327\n", "Epoch 1764/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 65.7975 - calc_mre_K: 0.8032 - val_loss: 66.1258 - val_calc_mre_K: 0.8072\n", "Epoch 1765/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 65.6199 - calc_mre_K: 0.8010 - val_loss: 63.5278 - val_calc_mre_K: 0.7755\n", "Epoch 1766/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 65.6917 - calc_mre_K: 0.8019 - val_loss: 65.8317 - val_calc_mre_K: 0.8036\n", "Epoch 1767/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 65.5274 - calc_mre_K: 0.7999 - val_loss: 68.2312 - val_calc_mre_K: 0.8329\n", "Epoch 1768/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 65.7624 - calc_mre_K: 0.8028 - val_loss: 64.5872 - val_calc_mre_K: 0.7884\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Epoch 1769/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 65.6615 - calc_mre_K: 0.8015 - val_loss: 65.0255 - val_calc_mre_K: 0.7938\n", "Epoch 1770/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 65.5958 - calc_mre_K: 0.8007 - val_loss: 69.5694 - val_calc_mre_K: 0.8492\n", "Epoch 1771/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 65.6593 - calc_mre_K: 0.8015 - val_loss: 64.5599 - val_calc_mre_K: 0.7881\n", "Epoch 1772/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 65.7116 - calc_mre_K: 0.8021 - val_loss: 64.7574 - val_calc_mre_K: 0.7905\n", "Epoch 1773/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 65.3920 - calc_mre_K: 0.7982 - val_loss: 63.8304 - val_calc_mre_K: 0.7792\n", "Epoch 1774/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 65.6339 - calc_mre_K: 0.8012 - val_loss: 64.9983 - val_calc_mre_K: 0.7934\n", "Epoch 1775/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 65.5802 - calc_mre_K: 0.8005 - val_loss: 65.9173 - val_calc_mre_K: 0.8047\n", "Epoch 1776/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 65.7114 - calc_mre_K: 0.8021 - val_loss: 63.7319 - val_calc_mre_K: 0.7780\n", "Epoch 1777/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 65.6327 - calc_mre_K: 0.8012 - val_loss: 66.6804 - val_calc_mre_K: 0.8140\n", "Epoch 1778/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 65.4969 - calc_mre_K: 0.7995 - val_loss: 65.2836 - val_calc_mre_K: 0.7969\n", "Epoch 1779/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 65.5681 - calc_mre_K: 0.8004 - val_loss: 64.7039 - val_calc_mre_K: 0.7898\n", "Epoch 1780/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 65.6368 - calc_mre_K: 0.8012 - val_loss: 66.1210 - val_calc_mre_K: 0.8071\n", "Epoch 1781/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 65.6857 - calc_mre_K: 0.8018 - val_loss: 63.6766 - val_calc_mre_K: 0.7773\n", "Epoch 1782/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 65.4069 - calc_mre_K: 0.7984 - val_loss: 66.6742 - val_calc_mre_K: 0.8139\n", "Epoch 1783/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 65.5127 - calc_mre_K: 0.7997 - val_loss: 66.7139 - val_calc_mre_K: 0.8144\n", "Epoch 1784/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 65.8397 - calc_mre_K: 0.8037 - val_loss: 64.6742 - val_calc_mre_K: 0.7895\n", "Epoch 1785/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 65.7157 - calc_mre_K: 0.8022 - val_loss: 64.4910 - val_calc_mre_K: 0.7872\n", "Epoch 1786/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 65.7032 - calc_mre_K: 0.8020 - val_loss: 65.3098 - val_calc_mre_K: 0.7972\n", "Epoch 1787/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 65.4039 - calc_mre_K: 0.7984 - val_loss: 65.4114 - val_calc_mre_K: 0.7985\n", "Epoch 1788/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 65.7186 - calc_mre_K: 0.8022 - val_loss: 65.3311 - val_calc_mre_K: 0.7975\n", "Epoch 1789/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 65.6332 - calc_mre_K: 0.8012 - val_loss: 67.2536 - val_calc_mre_K: 0.8210\n", "Epoch 1790/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 65.4013 - calc_mre_K: 0.7984 - val_loss: 67.6987 - val_calc_mre_K: 0.8264\n", "Epoch 1791/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 65.6429 - calc_mre_K: 0.8013 - val_loss: 63.8436 - val_calc_mre_K: 0.7793\n", "Epoch 1792/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 65.6709 - calc_mre_K: 0.8016 - val_loss: 64.3814 - val_calc_mre_K: 0.7859\n", "Epoch 1793/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 65.4925 - calc_mre_K: 0.7995 - val_loss: 70.4700 - val_calc_mre_K: 0.8602\n", "Epoch 1794/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 65.8383 - calc_mre_K: 0.8037 - val_loss: 65.1271 - val_calc_mre_K: 0.7950\n", "Epoch 1795/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 65.5648 - calc_mre_K: 0.8004 - val_loss: 67.7283 - val_calc_mre_K: 0.8268\n", "Epoch 1796/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 65.6633 - calc_mre_K: 0.8016 - val_loss: 66.8729 - val_calc_mre_K: 0.8163\n", "Epoch 1797/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 65.4217 - calc_mre_K: 0.7986 - val_loss: 64.1162 - val_calc_mre_K: 0.7827\n", "Epoch 1798/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 65.6460 - calc_mre_K: 0.8013 - val_loss: 66.1194 - val_calc_mre_K: 0.8071\n", "Epoch 1799/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 65.4959 - calc_mre_K: 0.7995 - val_loss: 65.1821 - val_calc_mre_K: 0.7957\n", "Epoch 1800/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 65.6260 - calc_mre_K: 0.8011 - val_loss: 66.5295 - val_calc_mre_K: 0.8121\n", "Epoch 1801/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 65.6700 - calc_mre_K: 0.8016 - val_loss: 69.2444 - val_calc_mre_K: 0.8453\n", "Epoch 1802/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 65.6381 - calc_mre_K: 0.8012 - val_loss: 65.8424 - val_calc_mre_K: 0.8037\n", "Epoch 1803/2000\n", "48000/48000 [==============================] - 4s 82us/step - loss: 65.6897 - calc_mre_K: 0.8019 - val_loss: 65.2221 - val_calc_mre_K: 0.7962\n", "Epoch 1804/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 65.6849 - calc_mre_K: 0.8018 - val_loss: 64.5984 - val_calc_mre_K: 0.7886\n", "Epoch 1805/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 65.6419 - calc_mre_K: 0.8013 - val_loss: 64.6147 - val_calc_mre_K: 0.7888\n", "Epoch 1806/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 65.3948 - calc_mre_K: 0.7983 - val_loss: 65.8112 - val_calc_mre_K: 0.8034\n", "Epoch 1807/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 65.7223 - calc_mre_K: 0.8023 - val_loss: 66.5762 - val_calc_mre_K: 0.8127\n", "Epoch 1808/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 65.6203 - calc_mre_K: 0.8010 - val_loss: 65.0570 - val_calc_mre_K: 0.7942\n", "Epoch 1809/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 65.6981 - calc_mre_K: 0.8020 - val_loss: 66.0296 - val_calc_mre_K: 0.8060\n", "Epoch 1810/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 65.5218 - calc_mre_K: 0.7998 - val_loss: 65.2150 - val_calc_mre_K: 0.7961\n", "Epoch 1811/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 65.4623 - calc_mre_K: 0.7991 - val_loss: 65.1207 - val_calc_mre_K: 0.7949\n", "Epoch 1812/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 65.6186 - calc_mre_K: 0.8010 - val_loss: 69.7926 - val_calc_mre_K: 0.8520\n", "Epoch 1813/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 65.6795 - calc_mre_K: 0.8018 - val_loss: 65.6960 - val_calc_mre_K: 0.8020\n", "Epoch 1814/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 65.3657 - calc_mre_K: 0.7979 - val_loss: 65.8460 - val_calc_mre_K: 0.8038\n", "Epoch 1815/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 65.6037 - calc_mre_K: 0.8008 - val_loss: 64.0891 - val_calc_mre_K: 0.7823\n", "Epoch 1816/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 65.4807 - calc_mre_K: 0.7993 - val_loss: 67.5322 - val_calc_mre_K: 0.8244\n", "Epoch 1817/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 65.5109 - calc_mre_K: 0.7997 - val_loss: 67.2310 - val_calc_mre_K: 0.8207\n", "Epoch 1818/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 65.4480 - calc_mre_K: 0.7989 - val_loss: 65.2891 - val_calc_mre_K: 0.7970\n", "Epoch 1819/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 65.4195 - calc_mre_K: 0.7986 - val_loss: 64.7453 - val_calc_mre_K: 0.7903\n", "Epoch 1820/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 65.5146 - calc_mre_K: 0.7997 - val_loss: 64.6163 - val_calc_mre_K: 0.7888\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Epoch 1821/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 65.7883 - calc_mre_K: 0.8031 - val_loss: 65.4129 - val_calc_mre_K: 0.7985\n", "Epoch 1822/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 65.4297 - calc_mre_K: 0.7987 - val_loss: 64.2512 - val_calc_mre_K: 0.7843\n", "Epoch 1823/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 65.5611 - calc_mre_K: 0.8003 - val_loss: 65.8219 - val_calc_mre_K: 0.8035\n", "Epoch 1824/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 65.5643 - calc_mre_K: 0.8003 - val_loss: 69.6942 - val_calc_mre_K: 0.8508\n", "Epoch 1825/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 65.4061 - calc_mre_K: 0.7984 - val_loss: 67.5438 - val_calc_mre_K: 0.8245\n", "Epoch 1826/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 65.5164 - calc_mre_K: 0.7998 - val_loss: 63.8173 - val_calc_mre_K: 0.7790\n", "Epoch 1827/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 65.6298 - calc_mre_K: 0.8011 - val_loss: 65.2761 - val_calc_mre_K: 0.7968\n", "Epoch 1828/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 65.5250 - calc_mre_K: 0.7999 - val_loss: 65.1468 - val_calc_mre_K: 0.7952\n", "Epoch 1829/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 65.6154 - calc_mre_K: 0.8010 - val_loss: 64.5354 - val_calc_mre_K: 0.7878\n", "Epoch 1830/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 65.4815 - calc_mre_K: 0.7993 - val_loss: 64.3228 - val_calc_mre_K: 0.7852\n", "Epoch 1831/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 65.5367 - calc_mre_K: 0.8000 - val_loss: 64.9801 - val_calc_mre_K: 0.7932\n", "Epoch 1832/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 65.6952 - calc_mre_K: 0.8019 - val_loss: 67.1213 - val_calc_mre_K: 0.8194\n", "Epoch 1833/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 65.5896 - calc_mre_K: 0.8007 - val_loss: 63.9339 - val_calc_mre_K: 0.7804\n", "Epoch 1834/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 65.6006 - calc_mre_K: 0.8008 - val_loss: 64.7364 - val_calc_mre_K: 0.7902\n", "Epoch 1835/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 65.4552 - calc_mre_K: 0.7990 - val_loss: 65.5433 - val_calc_mre_K: 0.8001\n", "Epoch 1836/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 65.3062 - calc_mre_K: 0.7972 - val_loss: 66.4523 - val_calc_mre_K: 0.8112\n", "Epoch 1837/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 65.7263 - calc_mre_K: 0.8023 - val_loss: 65.5033 - val_calc_mre_K: 0.7996\n", "Epoch 1838/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 65.4966 - calc_mre_K: 0.7995 - val_loss: 64.3215 - val_calc_mre_K: 0.7852\n", "Epoch 1839/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 65.6216 - calc_mre_K: 0.8010 - val_loss: 65.6795 - val_calc_mre_K: 0.8018\n", "Epoch 1840/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 65.5192 - calc_mre_K: 0.7998 - val_loss: 65.5232 - val_calc_mre_K: 0.7998\n", "Epoch 1841/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 65.5753 - calc_mre_K: 0.8005 - val_loss: 64.9708 - val_calc_mre_K: 0.7931\n", "Epoch 1842/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 65.3211 - calc_mre_K: 0.7974 - val_loss: 65.2659 - val_calc_mre_K: 0.7967\n", "Epoch 1843/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 65.4529 - calc_mre_K: 0.7990 - val_loss: 67.6235 - val_calc_mre_K: 0.8255\n", "Epoch 1844/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 65.4840 - calc_mre_K: 0.7994 - val_loss: 67.2853 - val_calc_mre_K: 0.8214\n", "Epoch 1845/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 65.6652 - calc_mre_K: 0.8016 - val_loss: 65.4046 - val_calc_mre_K: 0.7984\n", "Epoch 1846/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 65.5572 - calc_mre_K: 0.8003 - val_loss: 66.4146 - val_calc_mre_K: 0.8107\n", "Epoch 1847/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 65.6123 - calc_mre_K: 0.8009 - val_loss: 64.0356 - val_calc_mre_K: 0.7817\n", "Epoch 1848/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 65.5216 - calc_mre_K: 0.7998 - val_loss: 65.9710 - val_calc_mre_K: 0.8053\n", "Epoch 1849/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 65.4844 - calc_mre_K: 0.7994 - val_loss: 64.6547 - val_calc_mre_K: 0.7892\n", "Epoch 1850/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 65.3886 - calc_mre_K: 0.7982 - val_loss: 66.2866 - val_calc_mre_K: 0.8092\n", "Epoch 1851/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 65.6144 - calc_mre_K: 0.8010 - val_loss: 64.9356 - val_calc_mre_K: 0.7927\n", "Epoch 1852/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 65.7651 - calc_mre_K: 0.8028 - val_loss: 64.0734 - val_calc_mre_K: 0.7821\n", "Epoch 1853/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 65.3980 - calc_mre_K: 0.7983 - val_loss: 65.0808 - val_calc_mre_K: 0.7944\n", "Epoch 1854/2000\n", "48000/48000 [==============================] - 4s 82us/step - loss: 65.3371 - calc_mre_K: 0.7976 - val_loss: 66.0109 - val_calc_mre_K: 0.8058\n", "Epoch 1855/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 65.5248 - calc_mre_K: 0.7999 - val_loss: 64.4640 - val_calc_mre_K: 0.7869\n", "Epoch 1856/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 65.4268 - calc_mre_K: 0.7987 - val_loss: 64.9018 - val_calc_mre_K: 0.7923\n", "Epoch 1857/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 65.4943 - calc_mre_K: 0.7995 - val_loss: 67.5332 - val_calc_mre_K: 0.8244\n", "Epoch 1858/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 65.4978 - calc_mre_K: 0.7995 - val_loss: 66.2933 - val_calc_mre_K: 0.8092\n", "Epoch 1859/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 65.6395 - calc_mre_K: 0.8013 - val_loss: 65.0331 - val_calc_mre_K: 0.7939\n", "Epoch 1860/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 65.4154 - calc_mre_K: 0.7985 - val_loss: 63.6592 - val_calc_mre_K: 0.7771\n", "Epoch 1861/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 65.4173 - calc_mre_K: 0.7986 - val_loss: 64.3087 - val_calc_mre_K: 0.7850\n", "Epoch 1862/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 65.4383 - calc_mre_K: 0.7988 - val_loss: 65.4968 - val_calc_mre_K: 0.7995\n", "Epoch 1863/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 65.4808 - calc_mre_K: 0.7993 - val_loss: 65.3649 - val_calc_mre_K: 0.7979\n", "Epoch 1864/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 65.2498 - calc_mre_K: 0.7965 - val_loss: 67.3485 - val_calc_mre_K: 0.8221\n", "Epoch 1865/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 65.5175 - calc_mre_K: 0.7998 - val_loss: 64.7995 - val_calc_mre_K: 0.7910\n", "Epoch 1866/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 65.6618 - calc_mre_K: 0.8015 - val_loss: 63.5980 - val_calc_mre_K: 0.7763\n", "Epoch 1867/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 65.3881 - calc_mre_K: 0.7982 - val_loss: 64.5179 - val_calc_mre_K: 0.7876\n", "Epoch 1868/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 65.5266 - calc_mre_K: 0.7999 - val_loss: 65.2326 - val_calc_mre_K: 0.7963\n", "Epoch 1869/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 65.4453 - calc_mre_K: 0.7989 - val_loss: 63.7747 - val_calc_mre_K: 0.7785\n", "Epoch 1870/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 65.4932 - calc_mre_K: 0.7995 - val_loss: 66.9042 - val_calc_mre_K: 0.8167\n", "Epoch 1871/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 65.4904 - calc_mre_K: 0.7994 - val_loss: 66.0880 - val_calc_mre_K: 0.8067\n", "Epoch 1872/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 65.5832 - calc_mre_K: 0.8006 - val_loss: 63.3782 - val_calc_mre_K: 0.7737\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Epoch 1873/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 65.5510 - calc_mre_K: 0.8002 - val_loss: 65.1799 - val_calc_mre_K: 0.7957\n", "Epoch 1874/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 65.6109 - calc_mre_K: 0.8009 - val_loss: 64.9158 - val_calc_mre_K: 0.7924\n", "Epoch 1875/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 65.4410 - calc_mre_K: 0.7988 - val_loss: 66.6066 - val_calc_mre_K: 0.8131\n", "Epoch 1876/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 65.4090 - calc_mre_K: 0.7985 - val_loss: 64.4441 - val_calc_mre_K: 0.7867\n", "Epoch 1877/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 65.4520 - calc_mre_K: 0.7990 - val_loss: 65.5834 - val_calc_mre_K: 0.8006\n", "Epoch 1878/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 65.3786 - calc_mre_K: 0.7981 - val_loss: 65.2335 - val_calc_mre_K: 0.7963\n", "Epoch 1879/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 65.3352 - calc_mre_K: 0.7975 - val_loss: 66.7639 - val_calc_mre_K: 0.8150\n", "Epoch 1880/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 65.5059 - calc_mre_K: 0.7996 - val_loss: 65.1011 - val_calc_mre_K: 0.7947\n", "Epoch 1881/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 65.4614 - calc_mre_K: 0.7991 - val_loss: 63.7772 - val_calc_mre_K: 0.7785\n", "Epoch 1882/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 65.5754 - calc_mre_K: 0.8005 - val_loss: 67.0997 - val_calc_mre_K: 0.8191\n", "Epoch 1883/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 65.2980 - calc_mre_K: 0.7971 - val_loss: 65.3599 - val_calc_mre_K: 0.7979\n", "Epoch 1884/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 65.5283 - calc_mre_K: 0.7999 - val_loss: 66.3972 - val_calc_mre_K: 0.8105\n", "Epoch 1885/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 65.5976 - calc_mre_K: 0.8008 - val_loss: 64.5555 - val_calc_mre_K: 0.7880\n", "Epoch 1886/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 65.4750 - calc_mre_K: 0.7993 - val_loss: 68.4920 - val_calc_mre_K: 0.8361\n", "Epoch 1887/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 65.3584 - calc_mre_K: 0.7978 - val_loss: 64.2654 - val_calc_mre_K: 0.7845\n", "Epoch 1888/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 65.5152 - calc_mre_K: 0.7997 - val_loss: 66.3518 - val_calc_mre_K: 0.8100\n", "Epoch 1889/2000\n", "48000/48000 [==============================] - 4s 74us/step - loss: 65.3346 - calc_mre_K: 0.7975 - val_loss: 64.5870 - val_calc_mre_K: 0.7884\n", "Epoch 1890/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 65.3319 - calc_mre_K: 0.7975 - val_loss: 65.2874 - val_calc_mre_K: 0.7970\n", "Epoch 1891/2000\n", "48000/48000 [==============================] - 4s 75us/step - loss: 65.3407 - calc_mre_K: 0.7976 - val_loss: 66.0824 - val_calc_mre_K: 0.8067\n", "Epoch 1892/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 65.5447 - calc_mre_K: 0.8001 - val_loss: 65.1710 - val_calc_mre_K: 0.7955\n", "Epoch 1893/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 65.5332 - calc_mre_K: 0.8000 - val_loss: 65.3423 - val_calc_mre_K: 0.7976\n", "Epoch 1894/2000\n", "48000/48000 [==============================] - 4s 75us/step - loss: 65.3149 - calc_mre_K: 0.7973 - val_loss: 64.6286 - val_calc_mre_K: 0.7889\n", "Epoch 1895/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 65.5280 - calc_mre_K: 0.7999 - val_loss: 66.1729 - val_calc_mre_K: 0.8078\n", "Epoch 1896/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 65.5021 - calc_mre_K: 0.7996 - val_loss: 64.2162 - val_calc_mre_K: 0.7839\n", "Epoch 1897/2000\n", "48000/48000 [==============================] - 4s 75us/step - loss: 65.4704 - calc_mre_K: 0.7992 - val_loss: 66.3957 - val_calc_mre_K: 0.8105\n", "Epoch 1898/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 65.4047 - calc_mre_K: 0.7984 - val_loss: 64.8384 - val_calc_mre_K: 0.7915\n", "Epoch 1899/2000\n", "48000/48000 [==============================] - 4s 75us/step - loss: 65.4438 - calc_mre_K: 0.7989 - val_loss: 64.4030 - val_calc_mre_K: 0.7862\n", "Epoch 1900/2000\n", "48000/48000 [==============================] - 4s 75us/step - loss: 65.4817 - calc_mre_K: 0.7993 - val_loss: 65.4928 - val_calc_mre_K: 0.7995\n", "Epoch 1901/2000\n", "48000/48000 [==============================] - 4s 75us/step - loss: 65.2625 - calc_mre_K: 0.7967 - val_loss: 64.4368 - val_calc_mre_K: 0.7866\n", "Epoch 1902/2000\n", "48000/48000 [==============================] - 4s 75us/step - loss: 65.6857 - calc_mre_K: 0.8018 - val_loss: 63.6255 - val_calc_mre_K: 0.7767\n", "Epoch 1903/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 65.3983 - calc_mre_K: 0.7983 - val_loss: 66.5674 - val_calc_mre_K: 0.8126\n", "Epoch 1904/2000\n", "48000/48000 [==============================] - 4s 74us/step - loss: 65.2736 - calc_mre_K: 0.7968 - val_loss: 67.0112 - val_calc_mre_K: 0.8180\n", "Epoch 1905/2000\n", "48000/48000 [==============================] - 4s 74us/step - loss: 65.6999 - calc_mre_K: 0.8020 - val_loss: 65.8844 - val_calc_mre_K: 0.8043\n", "Epoch 1906/2000\n", "48000/48000 [==============================] - 4s 75us/step - loss: 65.2438 - calc_mre_K: 0.7964 - val_loss: 63.8555 - val_calc_mre_K: 0.7795\n", "Epoch 1907/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 65.3315 - calc_mre_K: 0.7975 - val_loss: 64.7043 - val_calc_mre_K: 0.7898\n", "Epoch 1908/2000\n", "48000/48000 [==============================] - 4s 75us/step - loss: 65.2686 - calc_mre_K: 0.7967 - val_loss: 65.3613 - val_calc_mre_K: 0.7979\n", "Epoch 1909/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 65.6827 - calc_mre_K: 0.8018 - val_loss: 66.7297 - val_calc_mre_K: 0.8146\n", "Epoch 1910/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 65.4267 - calc_mre_K: 0.7987 - val_loss: 65.8616 - val_calc_mre_K: 0.8040\n", "Epoch 1911/2000\n", "48000/48000 [==============================] - 4s 74us/step - loss: 65.4173 - calc_mre_K: 0.7986 - val_loss: 67.2607 - val_calc_mre_K: 0.8211\n", "Epoch 1912/2000\n", "48000/48000 [==============================] - 4s 74us/step - loss: 65.4478 - calc_mre_K: 0.7989 - val_loss: 66.1415 - val_calc_mre_K: 0.8074\n", "Epoch 1913/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 65.4620 - calc_mre_K: 0.7991 - val_loss: 65.9603 - val_calc_mre_K: 0.8052\n", "Epoch 1914/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 65.3909 - calc_mre_K: 0.7982 - val_loss: 64.5386 - val_calc_mre_K: 0.7878\n", "Epoch 1915/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 65.3604 - calc_mre_K: 0.7979 - val_loss: 65.7556 - val_calc_mre_K: 0.8027\n", "Epoch 1916/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 65.3514 - calc_mre_K: 0.7977 - val_loss: 63.5344 - val_calc_mre_K: 0.7756\n", "Epoch 1917/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 65.5028 - calc_mre_K: 0.7996 - val_loss: 66.4508 - val_calc_mre_K: 0.8112\n", "Epoch 1918/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 65.3644 - calc_mre_K: 0.7979 - val_loss: 63.8927 - val_calc_mre_K: 0.7799\n", "Epoch 1919/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 65.4245 - calc_mre_K: 0.7986 - val_loss: 65.9967 - val_calc_mre_K: 0.8056\n", "Epoch 1920/2000\n", "48000/48000 [==============================] - 4s 75us/step - loss: 65.1414 - calc_mre_K: 0.7952 - val_loss: 65.2918 - val_calc_mre_K: 0.7970\n", "Epoch 1921/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 65.5540 - calc_mre_K: 0.8002 - val_loss: 65.9599 - val_calc_mre_K: 0.8052\n", "Epoch 1922/2000\n", "48000/48000 [==============================] - 4s 75us/step - loss: 65.2328 - calc_mre_K: 0.7963 - val_loss: 65.1975 - val_calc_mre_K: 0.7959\n", "Epoch 1923/2000\n", "48000/48000 [==============================] - 4s 75us/step - loss: 65.4585 - calc_mre_K: 0.7991 - val_loss: 64.3222 - val_calc_mre_K: 0.7852\n", "Epoch 1924/2000\n", "48000/48000 [==============================] - 4s 75us/step - loss: 65.5376 - calc_mre_K: 0.8000 - val_loss: 66.9293 - val_calc_mre_K: 0.8170\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Epoch 1925/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 65.3418 - calc_mre_K: 0.7976 - val_loss: 64.8999 - val_calc_mre_K: 0.7922\n", "Epoch 1926/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 65.3214 - calc_mre_K: 0.7974 - val_loss: 65.4472 - val_calc_mre_K: 0.7989\n", "Epoch 1927/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 65.4639 - calc_mre_K: 0.7991 - val_loss: 66.5222 - val_calc_mre_K: 0.8120\n", "Epoch 1928/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 65.4281 - calc_mre_K: 0.7987 - val_loss: 64.7585 - val_calc_mre_K: 0.7905\n", "Epoch 1929/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 65.4449 - calc_mre_K: 0.7989 - val_loss: 63.9033 - val_calc_mre_K: 0.7801\n", "Epoch 1930/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 65.4250 - calc_mre_K: 0.7986 - val_loss: 64.2604 - val_calc_mre_K: 0.7844\n", "Epoch 1931/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 65.2708 - calc_mre_K: 0.7968 - val_loss: 65.4081 - val_calc_mre_K: 0.7984\n", "Epoch 1932/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 65.3340 - calc_mre_K: 0.7975 - val_loss: 65.6786 - val_calc_mre_K: 0.8017\n", "Epoch 1933/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 65.5074 - calc_mre_K: 0.7997 - val_loss: 63.4366 - val_calc_mre_K: 0.7744\n", "Epoch 1934/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 65.2227 - calc_mre_K: 0.7962 - val_loss: 66.9193 - val_calc_mre_K: 0.8169\n", "Epoch 1935/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 65.5385 - calc_mre_K: 0.8000 - val_loss: 64.8564 - val_calc_mre_K: 0.7917\n", "Epoch 1936/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 65.4028 - calc_mre_K: 0.7984 - val_loss: 64.0250 - val_calc_mre_K: 0.7816\n", "Epoch 1937/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 65.3677 - calc_mre_K: 0.7979 - val_loss: 65.9979 - val_calc_mre_K: 0.8056\n", "Epoch 1938/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 65.5732 - calc_mre_K: 0.8005 - val_loss: 64.3644 - val_calc_mre_K: 0.7857\n", "Epoch 1939/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 65.5292 - calc_mre_K: 0.7999 - val_loss: 64.7075 - val_calc_mre_K: 0.7899\n", "Epoch 1940/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 65.4277 - calc_mre_K: 0.7987 - val_loss: 64.3542 - val_calc_mre_K: 0.7856\n", "Epoch 1941/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 65.2633 - calc_mre_K: 0.7967 - val_loss: 64.3570 - val_calc_mre_K: 0.7856\n", "Epoch 1942/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 65.5892 - calc_mre_K: 0.8006 - val_loss: 64.8313 - val_calc_mre_K: 0.7914\n", "Epoch 1943/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 65.3940 - calc_mre_K: 0.7983 - val_loss: 64.7001 - val_calc_mre_K: 0.7898\n", "Epoch 1944/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 65.5625 - calc_mre_K: 0.8003 - val_loss: 64.4629 - val_calc_mre_K: 0.7869\n", "Epoch 1945/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 65.1652 - calc_mre_K: 0.7955 - val_loss: 65.8889 - val_calc_mre_K: 0.8043\n", "Epoch 1946/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 65.3350 - calc_mre_K: 0.7975 - val_loss: 66.7169 - val_calc_mre_K: 0.8144\n", "Epoch 1947/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 65.6092 - calc_mre_K: 0.8009 - val_loss: 65.0661 - val_calc_mre_K: 0.7943\n", "Epoch 1948/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 65.1092 - calc_mre_K: 0.7948 - val_loss: 66.1256 - val_calc_mre_K: 0.8072\n", "Epoch 1949/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 65.2106 - calc_mre_K: 0.7960 - val_loss: 66.5392 - val_calc_mre_K: 0.8122\n", "Epoch 1950/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 65.4406 - calc_mre_K: 0.7988 - val_loss: 63.7512 - val_calc_mre_K: 0.7782\n", "Epoch 1951/2000\n", "48000/48000 [==============================] - 4s 75us/step - loss: 65.2250 - calc_mre_K: 0.7962 - val_loss: 65.2074 - val_calc_mre_K: 0.7960\n", "Epoch 1952/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 65.3576 - calc_mre_K: 0.7978 - val_loss: 65.6464 - val_calc_mre_K: 0.8013\n", "Epoch 1953/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 65.5736 - calc_mre_K: 0.8005 - val_loss: 63.1332 - val_calc_mre_K: 0.7707\n", "Epoch 1954/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 65.3310 - calc_mre_K: 0.7975 - val_loss: 65.1093 - val_calc_mre_K: 0.7948\n", "Epoch 1955/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 65.3425 - calc_mre_K: 0.7976 - val_loss: 66.4851 - val_calc_mre_K: 0.8116\n", "Epoch 1956/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 65.2892 - calc_mre_K: 0.7970 - val_loss: 64.3357 - val_calc_mre_K: 0.7853\n", "Epoch 1957/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 65.2290 - calc_mre_K: 0.7963 - val_loss: 65.1637 - val_calc_mre_K: 0.7955\n", "Epoch 1958/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 65.3781 - calc_mre_K: 0.7981 - val_loss: 64.2444 - val_calc_mre_K: 0.7842\n", "Epoch 1959/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 65.5061 - calc_mre_K: 0.7996 - val_loss: 66.8871 - val_calc_mre_K: 0.8165\n", "Epoch 1960/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 65.3745 - calc_mre_K: 0.7980 - val_loss: 65.9843 - val_calc_mre_K: 0.8055\n", "Epoch 1961/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 65.3211 - calc_mre_K: 0.7974 - val_loss: 66.1565 - val_calc_mre_K: 0.8076\n", "Epoch 1962/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 65.2978 - calc_mre_K: 0.7971 - val_loss: 64.2018 - val_calc_mre_K: 0.7837\n", "Epoch 1963/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 65.3697 - calc_mre_K: 0.7980 - val_loss: 66.3692 - val_calc_mre_K: 0.8102\n", "Epoch 1964/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 65.2573 - calc_mre_K: 0.7966 - val_loss: 65.1987 - val_calc_mre_K: 0.7959\n", "Epoch 1965/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 65.4248 - calc_mre_K: 0.7986 - val_loss: 65.4141 - val_calc_mre_K: 0.7985\n", "Epoch 1966/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 65.4756 - calc_mre_K: 0.7993 - val_loss: 64.6986 - val_calc_mre_K: 0.7898\n", "Epoch 1967/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 65.1986 - calc_mre_K: 0.7959 - val_loss: 64.1546 - val_calc_mre_K: 0.7831\n", "Epoch 1968/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 65.3948 - calc_mre_K: 0.7983 - val_loss: 64.7214 - val_calc_mre_K: 0.7901\n", "Epoch 1969/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 65.4844 - calc_mre_K: 0.7994 - val_loss: 65.0482 - val_calc_mre_K: 0.7940\n", "Epoch 1970/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 65.1615 - calc_mre_K: 0.7954 - val_loss: 66.4466 - val_calc_mre_K: 0.8111\n", "Epoch 1971/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 65.3540 - calc_mre_K: 0.7978 - val_loss: 66.5159 - val_calc_mre_K: 0.8120\n", "Epoch 1972/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 65.5238 - calc_mre_K: 0.7999 - val_loss: 64.4694 - val_calc_mre_K: 0.7870\n", "Epoch 1973/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 65.3552 - calc_mre_K: 0.7978 - val_loss: 64.3582 - val_calc_mre_K: 0.7856\n", "Epoch 1974/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 65.4367 - calc_mre_K: 0.7988 - val_loss: 65.6840 - val_calc_mre_K: 0.8018\n", "Epoch 1975/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 65.5182 - calc_mre_K: 0.7998 - val_loss: 65.9665 - val_calc_mre_K: 0.8053\n", "Epoch 1976/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 65.3069 - calc_mre_K: 0.7972 - val_loss: 66.4628 - val_calc_mre_K: 0.8113\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Epoch 1977/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 65.4093 - calc_mre_K: 0.7985 - val_loss: 67.8520 - val_calc_mre_K: 0.8283\n", "Epoch 1978/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 65.3609 - calc_mre_K: 0.7979 - val_loss: 65.7115 - val_calc_mre_K: 0.8021\n", "Epoch 1979/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 65.2788 - calc_mre_K: 0.7969 - val_loss: 64.2767 - val_calc_mre_K: 0.7846\n", "Epoch 1980/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 65.2981 - calc_mre_K: 0.7971 - val_loss: 64.4582 - val_calc_mre_K: 0.7868\n", "Epoch 1981/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 65.2057 - calc_mre_K: 0.7960 - val_loss: 65.9522 - val_calc_mre_K: 0.8051\n", "Epoch 1982/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 65.2822 - calc_mre_K: 0.7969 - val_loss: 64.6802 - val_calc_mre_K: 0.7896\n", "Epoch 1983/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 65.2968 - calc_mre_K: 0.7971 - val_loss: 66.1568 - val_calc_mre_K: 0.8076\n", "Epoch 1984/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 65.5220 - calc_mre_K: 0.7998 - val_loss: 63.8623 - val_calc_mre_K: 0.7796\n", "Epoch 1985/2000\n", "48000/48000 [==============================] - 4s 81us/step - loss: 65.4607 - calc_mre_K: 0.7991 - val_loss: 66.8462 - val_calc_mre_K: 0.8160\n", "Epoch 1986/2000\n", "48000/48000 [==============================] - 4s 82us/step - loss: 65.4901 - calc_mre_K: 0.7994 - val_loss: 65.2006 - val_calc_mre_K: 0.7959\n", "Epoch 1987/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 65.3293 - calc_mre_K: 0.7975 - val_loss: 64.8487 - val_calc_mre_K: 0.7916\n", "Epoch 1988/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 65.2863 - calc_mre_K: 0.7970 - val_loss: 64.9859 - val_calc_mre_K: 0.7933\n", "Epoch 1989/2000\n", "48000/48000 [==============================] - 4s 80us/step - loss: 65.4982 - calc_mre_K: 0.7995 - val_loss: 64.1392 - val_calc_mre_K: 0.7829\n", "Epoch 1990/2000\n", "48000/48000 [==============================] - 4s 79us/step - loss: 65.3134 - calc_mre_K: 0.7973 - val_loss: 64.0530 - val_calc_mre_K: 0.7819\n", "Epoch 1991/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 65.3745 - calc_mre_K: 0.7980 - val_loss: 66.4116 - val_calc_mre_K: 0.8107\n", "Epoch 1992/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 65.1874 - calc_mre_K: 0.7957 - val_loss: 64.0051 - val_calc_mre_K: 0.7813\n", "Epoch 1993/2000\n", "48000/48000 [==============================] - 4s 78us/step - loss: 65.3059 - calc_mre_K: 0.7972 - val_loss: 66.5521 - val_calc_mre_K: 0.8124\n", "Epoch 1994/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 65.3643 - calc_mre_K: 0.7979 - val_loss: 64.2480 - val_calc_mre_K: 0.7843\n", "Epoch 1995/2000\n", "48000/48000 [==============================] - 4s 75us/step - loss: 65.2585 - calc_mre_K: 0.7966 - val_loss: 66.8591 - val_calc_mre_K: 0.8162\n", "Epoch 1996/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 65.4006 - calc_mre_K: 0.7983 - val_loss: 63.2946 - val_calc_mre_K: 0.7726\n", "Epoch 1997/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 65.3939 - calc_mre_K: 0.7983 - val_loss: 65.5009 - val_calc_mre_K: 0.7996\n", "Epoch 1998/2000\n", "48000/48000 [==============================] - 4s 76us/step - loss: 65.2143 - calc_mre_K: 0.7961 - val_loss: 67.3997 - val_calc_mre_K: 0.8228\n", "Epoch 1999/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 65.4794 - calc_mre_K: 0.7993 - val_loss: 65.5703 - val_calc_mre_K: 0.8004\n", "Epoch 2000/2000\n", "48000/48000 [==============================] - 4s 77us/step - loss: 65.3623 - calc_mre_K: 0.7979 - val_loss: 66.3390 - val_calc_mre_K: 0.8098\n" ] } ], "source": [ "%autoreload\n", "# import warnings\n", "# warnings.filterwarnings('ignore')\n", "\n", "\n", "# model = resnetb()\n", "# #model = conv1d_lkyrelu()\n", "# #model = conv1d_model_bnorm()\n", "# #model = conv1d_model(1)\n", "\n", "# #model = naiveploss_model()\n", "# model.summary()\n", "\n", " \n", "# from IPython.display import SVG\n", "# from keras.utils.vis_utils import model_to_dot\n", "\n", "# #SVG(model_to_dot(model).create(prog='dot', format='svg'))\n", " \n", "\n", "\n", " \n", " \n", "import scnets as scn\n", "\n", "model = scn.resnet(in_size=8, \n", " out_size=256,\n", " num_units=2,\n", " red_dim=16,\n", " batch_size=32,\n", " ker_size=3)\n", "\n", "# model = scn.conv1dmodel(in_size=8, \n", "# out_size=256,\n", "# batch_size=64,\n", "# c1_nf=64,\n", "# clayers=4,\n", "# ker_size=5)\n", "\n", "# model = scn.convprel(in_size=8, \n", "# out_size=256,\n", "# batch_size=64,\n", "# c1_nf=64,\n", "# clayers=4,\n", "# ker_size=5)\n", "\n", "# model = scn.fullycon(in_size=8, \n", "# out_size=256, \n", "# batch_size=64,\n", "# N_hidden=3, \n", "# N_neurons=36, \n", "# N_gpus=1)\n", "\n", "\n", "\n", "\n", "\n", "# from keras import optimizers\n", "# sgd = optimizers.SGD(lr=0.01, decay=1e-6, momentum=0.9, nesterov=True)\n", "#model.compile(loss=naive_percent_loss, optimizer='nadam', metrics=[calc_mre_K])\n", "\n", "from IPython.display import SVG\n", "from keras.utils.vis_utils import model_to_dot\n", "from keras.utils.vis_utils import plot_model\n", "\n", "SVG(model_to_dot(model, show_shapes=False, show_layer_names=False).create(prog='dot', format='svg'))\n", "\n", "\n", "plot_model(model, show_shapes=False, show_layer_names=False, to_file='model_resnet.svg')\n", "\n", "\n", "\n", "model.summary() \n", " \n", " \n", "x_t, x_v, y_t, y_v = train_test_split(x_train, y_train, test_size=0.2, random_state=42)\n", "# model = naiveploss_mgpu_model()\n", "# model.summary() \n", "history = model.fit(x_t, y_t,\n", " batch_size=32,\n", " epochs=2000, \n", " verbose=1,\n", " validation_data=(x_v, y_v))\n", "\n", "\n", "\n", "\n", "\n", "# y_pred = model.predict(x_test)\n", "# print(calc_mre(y_test, y_pred))\n", "\n" ] }, { "cell_type": "code", "execution_count": 77, "metadata": { "ExecuteTime": { "end_time": "2018-09-29T08:34:11.043118Z", "start_time": "2018-09-29T08:34:11.003420Z" } }, "outputs": [], "source": [ "from keras.models import load_model\n", "\n", "#Creates a HDF5 file 'my_model.h5'\n", "model.save('res15k_model.h5')\n", "\n" ] }, { "cell_type": "code", "execution_count": 7, "metadata": { "ExecuteTime": { "end_time": "2018-09-29T07:25:18.329323Z", "start_time": "2018-09-29T07:25:15.692660Z" } }, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "0.8106580427940897\n" ] }, { "data": { "image/png": "iVBORw0KGgoAAAANSUhEUgAAAVsAAAC6CAYAAAAAoO5+AAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDIuMi4zLCBodHRwOi8vbWF0cGxvdGxpYi5vcmcvIxREBQAAIABJREFUeJzt3Xd4k9UXwPFvk3S30BZoy54CZRQcgCBT9l4igoACArKRociSPQUZimxUQGUIyN5DWbJHkZZdCrSstnSnzfj9kR8psSOd6Tqf5+lD886TS3Py5s2991jp9Xo9QgghMpUiqwMQQoi8QJKtEEJYgCRbIYSwAEm2QghhAaqsDsBSdDodoS/DsLO1xcrKKqvDEULkEnq9nhi1Gpf8+VAokr5+zTPJNvRlGBNnLMjqMIQQudS08SNxc3VJcn2eSbZ2trYATB47AidHhyyOJutotVpu3LiBl5cXSqUyq8PJUtIWBtIO8dLSFtExMUycscCYY5KSZ5Ltq1sH9na22NvbZXE0WUer1WJjY429vZ28sKQtAGmH16WnLczdnpQvyIQQwgIk2QohhAVIshVCCAvIM/dshcgO9Ho9Wq02q8MwodVq0ev1aDQa8vpUKYm1hUKhSLZLV0pJshXCQtRqNVZWVhnyws1ICoWCsmXLZru4skJibaHRaNDpdCiVSqytrdN8bEm2QliAXq/HysoKGxubrA4lAb1ej1KpRKVS5fkBP8m1hVqtNv4/poW8lQlhAVqtVq4cczilUpmuW0Dyvy+EEBYgyVYIISxAkq0QQliAJFshRKZ5/DiQFs3bEx0dk9WhZDnpjSCEAKBliw7G32NjY1FYKVBZG1KEt3cV5s6bkepjFilSmH37/8ywGHMySbZCCAD27ttu7NY0ftxkypUrS+8+PZPcXqPRolJlr4lrEospLXFqNVrjG01GkWQrRBbR6vQEh8dZ5FxuztYoFenrQ/vw4SN6dO/DV2NHsu6X34iIjGTHzs38/ttmdu7YQ3BwCC4u+enUuT1dPuxkss+efdtxcLBn+rQ52NvbERERyT9nzuHikp/hXwymVq0aiZ5Tr9ezbesOtm3bQXBwCGXKlGLEF0MoW7YMAEMGj6Rq1cr4+d3k3399GT/+S/z8bnHv7j1c3Vz56/gJmjZtzNDhA/nnn3OsXLGWwMeBeHp68Fn/3tSuXQuAVSt/4t7de7i4uvLX8b9p2qwxw4YPSld7/ZckWyGySHB4HK2nXrDIuXZPeptC+TNmQMXJk2dYvnKJcYCGp6cHCxbOwd29ENeuXefLMeMpXbok79R4O9H9Dx86xoyZ3zBx0li2bN7G7Fnz2fLHhkSnNNy2dQe7du5l5qwpFClSmD279/PVlxNZv2E1dnaGqVL37T3AzFlTqOhVgdjYOPz8bnH6zFnGfPkFI74YilarISDgIRPGT2HixLHUea82Z/85xzcTp7Ni1feUKlUSwLDPmBF8PrBvukaKJUWSrRBZxM3Zmt2TEk9ImXGujNKn7yc4OzsbHzdsVN/4u7d3FerWrcOlS1eSTLZ16tTizbeqA9CqdQuW/rCCZ8+e4+npkWDbbVt3MHBQP4oXLwZA23at2Pj7Fi5fusq7tWsC0LRZY7wqVQTA1tbwBlCuXFlatmwGgEql5PChY7z91pvUb1DXEMN771KrVg0OHjhMv/59jPu0aNmM6OhobM1MBJ4WkmyFyCJKhVWGXW1akqenu8njAwcOs2XTVgIDg9Dp9cSqY2nWvHGS+7sVcDP+/urqNCoqOtFtg4KeMH3aHKxeuwWiidPw/Pnz1+JJmKQ9PUxjfPbsGYWLeJosK1zEk2fPnie5T0aTZCuESKX4xBf4OIjZs75l3rczqVbNG5VKyfRpc8ioycM8PNwZMXII77zzVipDNL0/XahQIXx9/UyWBQUGUbxEsST3yWjSz1YIkWbR0YYrUheX/CiVCs6fu8Cpk2cy7PgdO7Vj9cqf8Pd/ABiugE+f/oeXL8NSdZz3GzfgwoVL/P33KbRaLWdOn+XMmXM0afp+hsVqjlzZCiHSrEzZ0vTo8RFfjPgKnVZHrXffoX6D9zLs+J06t0elUvLNxGk8ffoce3s7KlepRKVKXqk6TokSxZk6bSKrVv7ErBnz8PT04JvJ4yhdulSGxWqOlT6PzBYcHR3D6IkzmTP5K5ycHLM6nCyj1Wrx8fGhSpUqUtzPgm2h0WgAUKmy3/WNXq8nOjoae3t7mWIxmbZI6v/wVW75dtq4ZIvJym0EIYSwAEm2QghhAZJshRDCAiTZCiGEBUiyFUIIC5BkK4QQFiDJVgghLECSrRBCWIAkWyGEsIDsN5xFCJElMqMszitTJs/E09ODAZ/3TXecOZUkWyEEkPqyOFkto0rgWKq8jyRbIUSKPXz4iO+XLOPGv37Y2trQslVzen3SHaVSSWhoKHNnf8e1az7odDo8PT2YMm0if/91kr+On8DKyopt23bg6eHOT7+sTHBsvV7Pls3b2PHnboJDQihXtgzDvxhCmTKlABj4+XDeeqsa/1735YavH5Mmfc21a9d5GPAIZ2cn/v77JC1bNmPQkAGcOX2WlSvXEhQYhGdhT/r370Otdw2ld5YvW53oPplNkq0QWUSn0xKpDrHIuRxtXVEo0nf1FhUVzRfDv6Rb9w+ZNn0SISGhfP3VRFxc8tOxUzs2rN+ISqVk8x8bsLa25v59f5ycHOnW/UNu3rxt9jbCls3b2L//ELPmTKVwYU927tjD2C8nsG7DamPlhL17DjJz9hQqVHiD2Ng4rl27zslTpxn79WhGjRmORqPB//4DJk6cyqRvxlG7di1On/6HiROmsmrNUkqUKA6QYB9LkGQrRBaJVIewaEcH8xtmgOHttuNsXzBdxzjx90kKFixAp87tAXB3L8SHXTuz48/ddOzUDpVKRWjoSx49ekyZMqWNRRlTatvWHQwfMZhixYoC0KFjWzb+voWrV3yoUdNQYqd5iyZUrFgeiC+BU6FCeZo1M1SGUCqVHDp0lJo13qZevToA1KtXh3feeYtDB4/Qp+8nie5jCZJshcgijrauDG+33WLnSq+goCfcunWH1q06GZfpdXpcXPID0LNXN37+aQOTJ80gLCyc9xs3oP+AvslOO2g8jl5P0JMnTJk800wJnISlaxIvgVPYZJmlS+AkRpKtEFlEoVCm+2rTkjw83KlcpRKLFs9LdL2DgwMDB/Vj4KB+BAU9YcL4KfyxZRs9enZDYaaMupWVFR7u7nw5diRvvlktuQ3NLitUqBB3794zWRYUGETZcmWS3McSpJ+tECJF6tarQ1BgEDv+3I1aHYtWq+VhwCMuXrgMwIm/T/Ew4BE6nQ4HB3tUSiUKhSHFuLq68ujR42SP37FTO1auWMuDBwEAREVFcerUGcLDw1MVZ+MmDTl79jynTp5Bq9Vy6uQZzp27SOMmjdLwrDNOiq9sAwIecuniFQIfB6KOjcXFJT8VKpSnWvWqxgqZQojcy9HRkfnfzWb5j6tZu3YdsbGxFClcmC4fdgQMOeL7JcsIffkSezs73qtbm84fGNa1bdeKaVNn07pVJzzc3Vnz07IEx/+gS0esra2ZNGGqsQROlaqVqVq1cqriLFWqJFOmTGD1qp+YPm0OhQt7MGXaBEqWLJH+RkgHs2VxDh08wubN27jpdwsXVxcKFSyAjY0NYWHhPH4ciI2NDU2aNuLjHh/hkQX3QVJKyuIYSFmceFIWx0DK4sTLzLI4yf7Pf95/KGp1LK1aN2fylPEULmxad12tjuXa1WscPHiUfn0HMXrMCOo3qJuqJyeEEHlBssm2S9fONG7cMMn1trY2vFPjbd6p8TZPnz4jKOhJRscnhBC5QrLJNrlE+1/u7oVwdy+U3niEECJXStMNJP/7D7h48TJ6vR7valUoV65sRsclRK6iUCgsNlJJZA6dToe1tXWa90911689u/czbOhoLl++yoXzFxk6eBRbNm9LcwBC5AUKhQKdTpfVYYh00Gq16foC0eyVbWRkJI6O8d/eb9r0B6vWLKVQIUNn7EuXrjB1yiw+6NIxzUEIkRcolUrUanW26wWi1+vRarVoNBrpjZBIW+h0OrRarXF+hrQye2Xbp/dATp06Y3ysUqmIiIgwPg4PC8c6G3ZnESK7sba2xsbGJqvDSECn03Hnzh258ibxtrC2tsbe3t44QCOtzGbJ6dMnMXfOdxw5fJxhwwfy2WefMmzIKIoVK4pGoyUgIIAxX36RriCEyCusrKyyXV9bKysrY1zZ7arb0jKzLcz+r79RvhzLVixmw/rf6dd3MIOHDGDdhjX8e/0GOp0OL68KFChYIEODEkKI3CZFb7FKpZJen3xMvfrvMXfOdxw+fIwvRg7BxcUls+MTQohcIUU3IcLDw/Hzu4W7eyF+WPodXl4V6P/ZEA4fOprZ8QkhRK5g9sr26JHjzJ41HwdHB+Ji45j4zVg+6taFunXrMG/udxw+dIxRY4ZToICbJeIVQogcyeyV7Yrlaxg1Zjjbtv/O3G9nsHLFWgCKFS/KoiXf8k6Ntxg4YFimByqEEDmZ2SvbsPBwypcvB0CZMqWIiowyWd+pc3vqvPdu5kQnhBC5hNlkW69eHb6ZOJ269Wpz5YoPtevUSrCNp6dHpgQnhBC5hdlkO3rMCHb8uZsHDwJo0aIJLVs1t0RcQgiRq5hNtiqVylhNUwghRNok+wWZn9+tFB9IrY7F//6DdAckhBC5UbLJdvy4yUwYP4WzZ88nOW762bPnbFi/kR7de3PN53qmBCmEEDldsrcR1q1fza8bNjJj2lxiY2N5442yFCxUEBsba8LDwrl335+gwCdUr+7NxG++xtu7iqXiFkKIHCXZZGtvb0ffzz6hZ69u/PPPea5euUZgYBDh4eG4uLjQsWM7atZ6J8urVgohRHaXorkRbGxsqFevDvXq1cnseIQQIldK3wSNOZBGl2zldiGEyBR5LtmGRcVldQhCiDwozyXbJyHBWR2CECIPynPJNjQyJqtDEELkQXku2QZHRGd1CEKIPMhssv3r+AmTevcvXgSj18d/yaRWq9m08Y/MiS4ZsbGxzJv7Hd26fkKL5u35uFtvNm/aana/+09XWiA6IYQwZTbZTv5mBuHh8dV0e37cl8DAIOPjyMgolv24KnOiS4ZWq8PNzY1582eyZ+82Jk+dwK8bNnLk8LFk91PH+aLRqi0TpBBC/J/ZZPv6VWxij7PKqwEXxYoVRaFQ8MYbZXm3dk18fP41u+9p398sEKEQQsTLXjWV00Gj0XLt6nU+6t7F7LYxsRFotVoLRJX9vHreefX5v07awkDaIV5a2iKl2+aaZLtk8VIcnRxp3ryJ2W2fPnuCj4+PBaLKvm7cuJHVIWQb0hYG0g7xUtMWsbEp67ufomQbHBxskr1DgkOwsbEx/B4SmuKgMsvS75dz7ep1Fiycg7W1tdnt87k4UaVK3pw0R6vVcuPGDby8vFAqlVkdTpaStjCQdoiXlraIjo4BdpjdLkXJtl/fwcbf9Xo9Q4eMMnlsZWWVoqAyw5LFP3LxwmW+WzQHF5f8KdonPDoqz/9RKZXKPN8Gr0hbGEg7xEtNW6R0O7PJ9rtFc1N0oKyweNFSLl68zMJFc3FxcUnxftFq+YMSQliW2WRbvbq3JeJItaCgJ2z940+sbaz5qOsnxuXe3lWYO29GkvvFad2Jii1kiRCFEMIo3V+QBQU9Ye3qX/h6/JiMiCfFPD09OPbX/lTvZ62043lY3v5yTAhheSlKtv73H3Dx4mWsrVU0er8Bjo6OhIWF8fPaDezYuZvCnp6ZHWcGeoC1Urq4CCEsy2yy/efMOSaMn2Icsrtx4x+MHTuKCeOnULRoESZ9M466dWtneqAZ7WVkHPkdzfdcEEKIjGB2BNm6X36ldZsW7N67jQGf9+VhwCNmzfyWr8eP4ful31GvXp0s7Y2QWq3fGQ/A7vPrszgSIUReYvbK1t//AV+OHYmDgz0fdOnIiuVrGDS4PzVrvpPhwWzbuoP9+w5y5+49KlXyYtHieUluO3zYGP69fgOlKr5nwW+//4yra/K9EsoWrgGX4W7QavT6T3PUG4UQIucym2wjIiJxdnY2bKxSYWtrS8lSmVPgsUABN7p/3BVf35tcv25+BEf/AX3o8mGnVJ1DpbAx/n7h9lbeKtsBhUK6ggkhMleaRpCFhoRia2trsk3BggXSHUz9BnUBePLkabqPlRL7Ln5HeMwLGlXtb5HzCSHyrgwbQXbk2N6Mj86Mdet+45efN+Dh6UGXLh1p3qKp2X20Oi11KvbmlO9aAIKCb+apCThk0pF40hYG0g7xsnQimuw6gqz/gD6UKlUCW1tbLly4xNTJM3FwcKBe/feS3e+m301s9MWMj6MiY4yT0pzwn0+xfDUo5Vo/U2PPDmTSkXjSFgbSDvGyZCKa7DqCrHJlL+PvtWrVoG271hw5ctxssi1foTy2diqO3TM8dnN1N05Ks9P3LjqrKNrUG5RpcWc1mXQknrSFgbRDvCyfiCYnSGmvAqVCiZ2Ng/HxnaALPAm9SZECXsbj5IU/OJl0JJ60hYG0Q7wsmYimyfutU3SgQ0d2p2i75Gg0WrRaw49ep0OtjkWhsEowbWJ4eATXff6l+pveWFtbc/nyVXbu2MPoMcNTfK42Ncay69xsYmJfsOZQP0Z22JXu+IUQIilmk61Wq8XT04OWrZrh7u6eqcGs++VXfv4pfrBB86ZtqVbdm0WL5/HlmPF4e1ehR89uaLUafv5pPVOnBACGeRIGDe5Pw0Ypv9davUwbdp2bbXy8YHubjHsiQgjxH2aT7bTpk9i1cy+//Pwrb7/9Jm3ataJOnVqZ8nGjd5+e9O7TM9F1r8/k5eLiwo/LF2f4+YUQIrOYHa5bt14dZs+dxq+/raWiV3kWL/yBDz/owaqVawl8HGRu92ytSslmCZaFRgZmQSRCiNzObLJ9xd3Dnd59erFx8zpGjR7OlSvX+Pjj3iZlznOaDu9OyuoQhBB5RIqT7StXr/pw5PAx/HxvUaVyJWMtspyqjGfNBMv0eh0abWwWRCOEyK1S1PUrNDSUfXsPsmvnXsLCw2nWrDErV/9AyZKZM0eCJXVvsIDpG+uaLJu1uRE6vZbBrTdhrbTFyT79Q5GFEHmb2WT7zaTpnDp1hkqVvPi0dw/qN6ib469m/8vD5Q2ehN4yPtbpDcPvdp6dyYNnlxnaZgsOti5Yq+yyKkQhRA5nNtn+dfwE7h7uWKtU7Nt7kH17Dya63bcLZmV4cJbSp8kKZm1plGD5g2eXAViy6wMqFK1Pl7ozLR2aECKXMJtsmzVvQm6f8lWptGZkh90s2J70AI4X4Q8sGJEQIrcxm2y/HjfaEnFkOQfb/Axvt51FOzokuv552H202jiUSimlI4RIvVT3RsjNnO0LMrj1piTXH7rygwWjEULkJtkq2W7buoPP+w+laZM2DB+WfGn0qKhopk+bQ6sWHWnf9kOWL1uNXq9PdwyuTkVoXC3xWb8Cnl/j0OXvmb8t4e2GiOgXRKlfsv7ocEIiHqU7DiFE7pKtku2rsjhdupgvdbNk8VJehr5k4+ZfWLZ8MceP/83WP/7MkDhqV+zOuxW6JVgeFOLHGb/fiY59yfUHh4zJPSjkJgt3tGf7mSncf3qBo1dXZEgcmW37mamERubsUYBC5BTZaorFlJbFiYmJ4dChoyz5fgHOzs44Ozvz0Udd+HP7Tjp/kPg911e0Om2KZlZvVPVzrFDg5lyC3ecT9rTYdnoy205PpmLRBlQt1QqAu0FnAYjTqtFqtcza0oBejX5EqVBhb5uf/A4eqOOisLV2MDmWVqdBYaXAyir9730vI4PQ6bW4OhVNdP3rM9H7+B8gv0Nh6lfuk+7z5kRSocBA2iFellZqyI4CAh6hidNQrlxZ47Ly5ctx774/Wq022UlybvrdxMYmZV9yFbSqAxHQsPRE/J7vJDD8coJtfB8dx/fRcZNl94LOc/jMbwD8c203fs93oVTYUrFgG64//YNW5RcS8PIMN55tp2X5+ez0HUwR57d5HH6B90qOws2+TIriS8xOX0MJo7YVk7+//Gom+vuPruOiu4LCKu/OYyoVCgykHeJlSaWG7Cg6KgpbO1tUr5Uxd3JyQqc1zIHr4GCf5L7lK5THydExlWesQm2a8OPeboRGPja7tUYXw9mHywAIijoPgFan5vrTPwAoUjI//5w5jUYXQ8GiNuALj8MvAOBSyIYqpaqYHC9OE0NwxEM8XMqZPfdO3/9HXKVKoutfzURfsWJFdvrCo7DzlCpaOU1Xt5ExIew8N4NiBapQx6tnpiTsKPVL7G2cM+Sq/79yUoWCx8E32H5mMoNabczwY+ekdshsUqnhP+wdHFDHqNFotMaEGxERgUKpwNY2+dFtSkXaZ6Mf2OpXAG4++hsnuwL8fMR8+ZyXUQnvia462Nv4+9rD/UzW7T4/C402mhrlPzAu23fxBy7d3cEX7XfiaOea6Hmi1KHodPEfZ5RKJaGRQXy/6wNGd9yLnY2zcZ1eryMkMsD4ODz6qUmbPHpxnQLOJXkR7o+DrSuuTkUSPee9p+e498TwU7VUM9yciyfVDEn69fhI2tQYSz6HxOdKXrSzHQ2r9qdupV6pPnZK5YQKBTceHuFlVFCmxpkT2sFSsqRSQ3ZUvHhRVNYq7ty5S4UKbwBw69YdSpUsmbl/jApDc3kVN4w2m9D1BFHqUMKjn7Ny/6dYK+2I08ak+zz7Ly3kVuApmr05jGV7exiX770wn0ZV++PmXIwZmwwTpTvaupLf0ZPHwaYfe16EP2DZXsPcwJExITx87oOLUxFcHYtxL+QYu/z+MG7r+/AY7WqNN2679tAAShSqzoNnl7G3yc/IDrsSlB168OwKcZpo4+OA59dSlGzP3txEtdKtsbV2RK/XczfoLDcCjlGrwodJ7vP05R3j709Cb5tc4cdp1Gh1sSZvJgA3H52glPtb2Pzn/nhqvAh/wLZTk/ms+Zo0HyPD/KenjV6vIzz6BfkcCmVRQCK1slWyTWlZHDs7O5o0acTqVT8xcdLXREREsPH3LXTs1M7iMTvYuuBg68KErifQ6jScv/UHxQt6c/72Vq7eT3t597tBZ00SLRiSou/DYybLItUhRKpDEuz/457uxt+3nBzPs7B7qJS2jOl4gEdhF0y2jdVEc/nuLp6E3ubcrS1A/FDl6NiXzNhUjwldTwBw+MpSyhZ+l/VHh5kcY+fZmVy5t4cynjWxt8lHGc+axivimNgIYjXRONm5ceDSYnR6He9W+Mj4heJ/Ryjq9TpmbKpP7ybLDesxbOD/9BLrjg5lVMc92NvkA+DXv0YS8OyKMb5XNp0YC2BcrtGq0eq0Cb6cTM6/D44QFHrT7HY+/gdxsHWhjGcNAHQ6LS+jgrBRORAc8ZDiBauaPca205Nxti9E7Yrdk/z08rpzt7Zw4NLiBM9bZF/ZKtmmtCwOwNBhA1kwfwldu/REZa2idesWdOrcPqtCBwxXvrUqdAWgXYHxtKnxFaGRgZy9uYUWb3+Bj/8BihX05sq93QQG+3I78LRF4noWZiglrNGqmbWlQaLbvF4iKDHnb23FysqK076/ctr310S3efDssjFJg+G2i5tTcdYfG0ZQyE1qlTe0TWRMMD7+B9h+Zqohvpf3mb2lMR4u5WhafSi21oZ76g+fX///kQzJNjz6OQB/+awhJi6CN8u0JeDZFcBwX/vm45NULtHYJKZYTTQ2KnvWHR3GoxfXGdJmC7vPzcHJvgCujsX4y3cV5Srs5lHQNcp41kSljL8NpdfrjP8md894+5kpQHxiv3hnO/sufkfxQtUIeHaFMZ32G58TwIZjXxATG0bfZqv/f3w91x8cAuCM32/0a7YWD9c3kjzfk5BbHLhkqFRy89EJyhetm+S2aaHX65mxqR49Gy2hpPubCdZrtGpUSluTZbcfn8bdpZxcaSfDSp8RIwFygOjoGEZPnMmcyV/h5JTaL8gyT2RMCLGaaP48MxUXp6LodBo8Xctz6e5OQiIeZnV42Uodrx6curE+2W0Ku1akS91ZLN7Z0bisbc3x7Dw7I5m9DBpVHcB7lQy3Xv44OQEbaweu3NtDt/rzKVu4FjGx4VhZKYjTxBin3bz9+DS//20YgPMq2R73Wc3f19fi6lTM+H/4wXszqFjM8Eb3akrPkR12ceXeHkq5v8Xqg58Z42hXawLepVqYxPbqk0P5ovUo5f6mMdm+ft6kRMaEYGfthFJpjV6vQ6/XoVDEX2dptVp8fHyoUqUKMXFhBIb48vtfY/B0LU+7muMJjQw0JvTAYD9WH+yLe/6y1K3Ui0r/f3ObvrEuRd0q0btp8n3MI2NCzF65v4wMQqW0xdHOlZ1nZ6LT62jzzlf4PjpO8YLViFaHJvtmlB6vt0VqviAbPXEm304bh7190jMDSrLNgSJjQrj/9AJexRoRHRtGRMwL8tm7Ex79jEh1KA62+TlwaRFvl+3APzc38ejF9SSP1ch7LEevJn9Vm9NodV4oFWnvxuTmVJzgiACTZR1rG/pVvzK83Xau3d/HkavLEu7vXJzg8IAEyxt5DyDg2dUEn2hKe9Tg3pNzJstavj2KyJgQnof7cyPgqPEqGyCfgwdhUU+Mjz1cyqHVaSji5kWlEu/zNPQu1cu04UWYP6GRgfz5zzQAiheqhptTUa7c2wOAjcqekR12M3vL+3g6VadlrcGsPdyPwq4VCQzxNYmnafWhlHR/k1UH4nutFHHzoneTFbyMCuL7XV1wti9Er/d/QKuLo2C+kib7R0S/4Mr9PRy9upwm1Ydw+e4uejdZTkTMCzQaNT8dGUTDKp9RrnBtftzbHXub/IzquDvBXNOvTOh6ItFPHNGxYVy6s4M6Xj0S3S8xEdEvjG+ekmwzQG5KtmkRERPMuVtbKF+4Hv73HhGh/BelUkXjaoO4EXCUh899iI4No3WNrwiNfIzCSsVxn9X4+O+n5htdeLdiN67c28Nxn1VUK92KsKhnCRKEs31B40d9kbfVfKMLtwJPJ/vp7PUr/8S87/15om9mAN6lWnL1/l7erdCNM36GPu21K3Y33uIa0e5PLt3dyXGfVbSvNYlHL3womK8UZQvXwu/hX9jZ5EOpUBGnVbPn/FwaVu3xMBkcAAARP0lEQVTPuVtbiIwJpnWFxXhXrSbJNq3yerJ9JbXv3Hq9PkFPhNfXaXWx6PV6lAqVyUdTMHxRpNHForBScifoHyoUrYder+fcrc0UL+hNfsfC+D48zsPn12j59iisrBQEhdxkz/l51HjjA3afn0PbmuPZdW42H9Wbi+/DYxTIV5JDl78HYHTHfQSF3jT5ss7W2gl1nKEuXu0KPTntt864rojbWzwOvpjgeZT1XIH/sxlotP7mG1DkerWLTaPhu/VzdrLVaLT8+MMKDhw4hE6vp0GDeoz4YnCilR9aNDf9sisuNo6SJUuw5ifDO92smd9y+NBRVNbxL/CFC+dS0atCoueWZGuQlo9JWSU6NszY6+B1ho/UViZvApExITwJvU0ZzxrodFp0eg0qpS1R6pcoFSpCIh7h6VoejTbW+CXYy4in3Lv9iKpVvY1todXGcfneLq7e30frd74iIuYFLyOD8HApR5ECXsTEhhMVG8bS3V0Z2OpXXB2LoNVpOOP3O8d9VgHwccOFFMpXmi0nJ6BS2hCricJaZY//04uU9niHepV7s/nEOKJjX1IwXymeh90HoHeT5bwIe8CO1+4vF3Hzoo5XD6LUoew5Pw+AbvW/xcPlDQ5cXkxZz1rEaWLYd3GBcZ+a5T/k8YsbPHxxLUP/P/KKxmW+p+ZbVXN2sv35p/X89ddJ5sydjkqlZNzXk6lc2YvBQwaY3bfPp5/zfuMGxt4Is2Z+i5OTI0OHDUzRuSXZGuSkZJvZLNkWOp2GqNgwnOzcUhhbHFZWVgk+LaTGk9DbWCvtcHMuZrL8wu3tVCrxPvY2+QiNDOJu4FkUEcV5zmneLNOGiJhgCuUvRUT0C+49Oc/D59fwdKtA1ZLNyefgzvSNdalb6RPe8+oF6IlSh2KtsudJ6G0cbV1RKJTsv7iQp6G3GdByHdGxYTx6fh3v0i25/uAQ529vo0m1Qbg4FiEo5CZFC1Ri/dHhiXaze6tse/R6Pe9W/IiwqKf4Bhzjwp3tlChUnfe8enLyxjqCwwOIiHkBgL1NPkp71MDJvgBnb26iXOE63A48xTvlOnH+9laTY1cu0YQibl6AFUeu/ohWF0d+B0/ql5iY8+/ZfvhBDwYNGUDDhvUAOHv2PFMnz2LHrs0oFEl3rbnxry+DB3/Bps3rKVjQcCM7rcl25qTRaRium3vI0Mx40hYGuaUdkrvl9TqdTkOkOgRne9NualqdBq1Wy02/W6kervvV5Dlmk63F+tmGh0fw9Okz3njDdPKYiAjDck9PjyT33bN7P7Vq1TAm2lcO7D/Egf2HKFDAjZatmtPlw07JJm1I3UQ0uZlMOhJP2sIg77XDkyTX5OiJaKKjogBwfO2q0snJCTBMBJ6UmJgYjhw5xtfjTCcT79y5PQMHfYazszO+N/yYMnkmCoWCLh8mPxdu2iaiyT1yy1VMRpC2MJB2iJcrJqKxdzAMk4yMjMTFJT9gmDwGSHaWrqNH/8LW1o53a9cyWV6+Qnyn5spVKtH9467s33/IbLJNz0Q0uYlMOhJP2sJA2iFeZkxEY7FKDc7OTri7F+L2rfhJRW7dvIOTk2F5Unbv2kfzFk1MplNMjJUil5cAFkLkaBYti9O6TQvWr/ud589fEBoayk9r19GyVbMk77M+eBDAdZ9/adW6eYJ1R48cJzIyEr1ej6/vTX7dsIkGDTJ2jLgQQmQUi05E83GPbrx8Gcanvfqj1+to0LA+n/UzzO06/9tFAIwaPdy4/Z7d+/H2rkLx4sUSHGvb1h18++0itFothQoWpEOHNnzYtbNlnogQQqSSRZOtSqVk2PBBDBuecNLt15PsK58P/CzBslcWfz8/Q2MTQojMlK2q6wohRG4lyVYIISxAkq0QQliAJFshhLAASbZCCGEBkmyFEMICJNkKIYQFSLIVQggLyFalzFNTySG1lRqEECIrZatku2H9b1y+cpW1P68wVnJYuWJtkpUc2ndok+LJw4UQIitlq2S7e9c+Bg0ZYJwk/NPePZg6eRYDB/UzOym4Oa8KUkT+f17dvEqr0xIbG0dEZCRKRd6eTk/awkDaIV5a2iI6Rg3E55ikZJtkm5ZKDqmp1BCjNjTI1LlLMucJCCHytBi1Otm5ubNNsk1tJYfUVmpwyZ+PaeNHYmdrm6I6RUIIkRJ6vZ4YtRqX/AkrQb8u2yTb1FZySG2lBoVCgZurS0aHLYQQyV7RvpJtun6ltZLDK1KpQQiRnWWbZAupq+QglRqEEDmJld7cV2gWpNFoWfrDcg7sP2ys5DB8xGBsbW0SVHIYNmQUd+7eM1ZqaNW6OV0/+iDdvRaEECIzZKtkK4QQuZVcBgohhAVkm94ImSk1w4BzKnPDl821QU5uo21bd7B/30Hu3L1HpUpeLFo8z7guKiqaBfMXc+rkGaytrWnVujn9B/Qxdv9L7/rsJLl2GD5sDP9ev4FSFd9R/7fff8b1/z10clM7xMbGsmjhD1y8cJmQ0FAKuLnRoWNbY0+l9L4W0vpayRNXtq8PA97w6xru3/dn5Yq1WR1WhmvfoQ379v9p/Hl9nghzbZCT26hAATe6f9yVLl0SdvtbsngpL0NfsnHzLyxbvpjjx/9m6x9/Ztj67CS5dgDoP6CPyd+H62tdIXNTO2i1Otzc3Jg3fyZ79m5j8tQJ/LphI0cOHwPS/1pI82tFnwd06fyx/ujRv4yP//nnnL51y056rVabhVFlrJkz5ukXL1qa5HpzbZAb2mjTxj/0w4aONj6Ojo7WN2ncWn/jhp9x2Z/bd+n7fDogQ9ZnV/9tB71erx82dLR+08Y/Et0+t7bD62bP+la/aOEPer0+/a+FtL5Wcv2VrblhwLnJgf2HaNu6M5/26sfG37eg0+kA822QW9soIOARmjgN5cqZPq979/3RarXpXp/TrFv3G21bd+azvoPYv++gcXlubweNRsu1q9cpU7Z0ul8L6Xmt5Pp7tqkdBpxTJTd82VwbKP5/3y23tVF0VBS2draoXrtP6eTkhE6rQ62OTff6lIwayi76D+hDqVIlsLW15cKFS0ydPBMHBwfq1X8v17fDksVLcXRypHnzJoQEhwBpfy2k57WS669sXx8G/Epyw4BzqvIV3sDFxQWlUmkcvnzkyHHAfBvk1jayd3BAHaNGo4m/+oqIiEChVGBra5Pu9TlJ5cpeODo6olKpqFWrBm3btTb5+8it7bD0++Vcu3qdOXOnY21tne7XQnpeK7k+2aZ3GHBO9frwZXNtkFvbqHjxoqisVdy5c9e47NatO5QqWRKlUpnu9TnZ670Icms7LFn8I+fOXWTBwtnG+VbS+1pIz2sl1ydbSN0w4JzK3PBlc22Qk9tIo9GiVsei1WrR6wwfbePi4rCzs6NJk0asXvUT4eERBAYGsfH3LbRu0wIg3euzm6TaITw8gjOnzxITE4NWq+XChUvs3LHH+PeR29oBYPGipVy4cInvFs3BxcV0Aqr0vhbS+lrJEyPIkhsGnFuYG75srg1ychutXbOOn39ab7KsWnVvFi2eR1RUFAvmL+HUyTOorFW0bt3iP/1H07c+O0mqHaZMHc/XX03C3z8AAE9PDz7o0pFWrZsbt8tN7RAU9ISPPuyFtY21yZW3t3cV5s6bke7XQlpfK3ki2QohRFbL/p8RhRAiF5BkK4QQFiDJVgghLECSrRBCWIAkWyGEsABJtkIIYQGSbEWuExgYRMP6zbl61SerQ0mVS5eu0LB+8xw9+Y9IWq6fiEZYzqyZ35rMJvWKnb0d+/Znz7lPhbAUSbYiQ3l7V+GbKeNNluWEIb+5mV6vR6vVolLJyz0ryatAZCiVtYoCBdxMfl6vCDB82BjmzJ7P8mWrade2C61adGTunO9Qq9XGbTQaDcuXreaDTt1p8n5rPunZj0MHj5icJyoqmiWLf6RL549p2rgNXT/sxfp1v5ls8+L5C74eO4nmTdvRresnHNh/KNnY9+49wPuNWnLt2nX69R1M86btGNB/KH5+t4zbJPVR//1GLdm79wAQfxvj0MEjjBk1juZN29GzR18uX77Ks2fP+WrMBFo0a8cnPftx9cq1BHHcvnWHz/sPpWmTNnzaqx/nz10wWf/w4SMmTZhK61adaNOqM6NHfs3dO/cSPI9LFy/zWd9BNG3chrNnzyf73EXmk2QrLO74sROEhYWxZMl8Jkz8ilMnT7N82Wrj+pUr1rJ7116GDP2ctT8vp2mz95kxfS4XLlwCDFdqX4+dxKmTZxg2YjA/r1vJuHFjjDM7vbJi+RqaNWvM6rU/0rBRfWbPns/DgEfJxqbX6Vm5fA1Dh33OilXf4+zkxJRvZphML5hSa1b/QrsObVi1ZiklS5Zg2pRZzJo5jzZtW7Jy9VJKlizOtKmz0Wg0Jvv98P1yen36MatWLaVSJS/GfT2ZZ8+eAxAcHMLQIaNwcXVhyZL5LF22kOIlijFi+BhCQ0NNnseyZasZNKg/v6xfRaVKFVMdv8hYkmxFhrp8+Sotmrc3+fl67CSTbZzzOTNy1DBKlipBnffepe9nn7Jzxx6io2OIiYlh6x9/0qfvJzRsVJ/ixYvRo2c33qtbm/W/GK5cL168zJXLV/lmynjq1atDkSKFqVa9Km3atjI5T8dO7Wj0fgOKFStK388+xcbGhosXLycbv16vZ8jQz/GuVpWSJUvQu28vHj8O5PHjx6lui46d2lGvXh3Dc+jxES9eBFOrVg3q1X+P4sWL0bNXd549e86DBw9N9uv+cVfq1HmXkqVKMHL0cFxdXfhz+y4A/ty+C09PD0aOGkaZsqUpUaI4w4YPwsnJiYMH4q/+9Xo9gwf35623q1OkSOEEM18Jy5ObOCJDVfKqyNhxo02W2dnZmTz28ipvMhtTlaqViIuLMya0uLg4vKtVNdmnWrWq/LphIwA3/W7h7OxExYrlk43l9TIuKpUSN1dXQkJCkt3HysqKsuXKGB8XKlgAgJDgUEqUKJ7svgnPH38cNzdXAMqWLZ1gWWhIqMl+lat4mcRd0asC/vcfAODne5Obfrdo0by9yT6x6lgePTR9Q6hQsQIi+5BkKzKUja0NxYoVTdU+ic07l+jMfa8tTMnUfq+XdTfsBDpd8pPcWSmsTCfE/v95dHpDPTeFlSJB0Ib5YxMeV/n6F1L/P05iy14dOymvT8yn0+t46+3qDB8xOMF2Tk7xpVpyQhWFvEZuIwiL871x06RQ4L/X/8Xa2poiRYpQtGgRrG2suXLZ9IujK1euUbpUScBQAigsLBxf35sWjRvAxdVwX/j5i2Djstu375KRM5X+e93X+LtGo8XP9yYlSxmuqitUKM/9e/4UKlSQYsWKmvzIrYLsTZKtyFCaOA0vXgQn+Hk9GYWFhbPwu+/xv/+A06f/YfXqX2jdpgX29nbY2dnRuXMH1qz+mWNH/+JhwCPWr/uNkydO83HPjwB4663qeHtXYerkmZz4+xSBj4O4du06u3btzfTnV7RoUTw8Pfhp7Tr8/R9w9aoPPyxZlqGTaP+6YSNnTp/F//4DvluwmOCQENq1bwMY7gPrdDomjJvC1SvXCAwM4upVH1atXIvPtesZFoPIeHIbQWSoq1d96NyxW4Ll23dsMvYWaNCwLg4ODgwdMpK4OA0NG9Xj84GfGbf9rN+nWFlZ8f2SZYSGvqRo0SKMn/Alb7/9JmC4hTB77jRWrljLgvlLCAsLo2DBArRt1zrTn59KpeSbyeNYuGAJ/foOpnjxogwfMZgRI77MsHMMHNSPNat/5t49f4oUKcyMmZON9a3c3Fz54ceFrFqxlokTphEVFYWbmytVvatQoECBDItBZDyp1CAsaviwMRQtWoQvv/oiq0MRwqLkNoIQQliAJFshhLAAuY0ghBAWIFe2QghhAZJshRDCAiTZCiGEBUiyFUIIC5BkK4QQFvA/apKcwplSJ4gAAAAASUVORK5CYII=\n", "text/plain": [ "
" ] }, "metadata": {}, "output_type": "display_data" } ], "source": [ "y_pred = model.predict(x_test)\n", "print(calc_mre(y_test, y_pred))\n", "#history60 = history\n", "plot_training_history(history, 32*2.56)" ] }, { "cell_type": "code", "execution_count": 10, "metadata": { "ExecuteTime": { "end_time": "2018-09-26T10:28:37.316134Z", "start_time": "2018-09-26T10:27:22.339834Z" } }, "outputs": [ { "name": "stderr", "output_type": "stream", "text": [ "/home/hegder/anaconda3/envs/dp2/lib/python3.5/site-packages/keras/backend/mxnet_backend.py:89: UserWarning: MXNet Backend performs best with `channels_first` format. Using `channels_last` will significantly reduce performance due to the Transpose operations. For performance improvement, please use this API`keras.utils.to_channels_first(x_input)`to transform `channels_last` data to `channels_first` format and also please change the `image_data_format` in `keras.json` to `channels_first`.Note: `x_input` is a Numpy tensor or a list of Numpy tensorRefer to: https://github.com/awslabs/keras-apache-mxnet/tree/master/docs/mxnet_backend/performance_guide.md\n", " train_symbol = func(*args, **kwargs)\n", "/home/hegder/anaconda3/envs/dp2/lib/python3.5/site-packages/keras/backend/mxnet_backend.py:92: UserWarning: MXNet Backend performs best with `channels_first` format. Using `channels_last` will significantly reduce performance due to the Transpose operations. For performance improvement, please use this API`keras.utils.to_channels_first(x_input)`to transform `channels_last` data to `channels_first` format and also please change the `image_data_format` in `keras.json` to `channels_first`.Note: `x_input` is a Numpy tensor or a list of Numpy tensorRefer to: https://github.com/awslabs/keras-apache-mxnet/tree/master/docs/mxnet_backend/performance_guide.md\n", " test_symbol = func(*args, **kwargs)\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Train on 60000 samples, validate on 40000 samples\n", "Epoch 1/2000\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ "/home/hegder/anaconda3/envs/dp2/lib/python3.5/site-packages/mxnet/module/bucketing_module.py:408: UserWarning: Optimizer created manually outside Module but rescale_grad is not normalized to 1.0/batch_size/num_workers (1.0 vs. 0.015625). Is this intended?\n", " force_init=force_init)\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "60000/60000 [==============================] - 24s 394us/step - loss: 4209.7176 - calc_mre_K: 25.6984 - val_loss: 2627.7965 - val_calc_mre_K: 16.0388\n", "Epoch 2/2000\n", "60000/60000 [==============================] - 23s 385us/step - loss: 2203.3902 - calc_mre_K: 13.4509 - val_loss: 1408.6241 - val_calc_mre_K: 8.5976\n", "Epoch 3/2000\n", "60000/60000 [==============================] - 23s 386us/step - loss: 935.5657 - calc_mre_K: 5.7116 - val_loss: 773.9411 - val_calc_mre_K: 4.7238\n", "Epoch 4/2000\n", "11584/60000 [====>.........................] - ETA: 16s - loss: 822.8546 - calc_mre_K: 5.0223" ] }, { "ename": "KeyboardInterrupt", "evalue": "", "output_type": "error", "traceback": [ "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", "\u001b[0;31mKeyboardInterrupt\u001b[0m Traceback (most recent call last)", "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m()\u001b[0m\n\u001b[1;32m 140\u001b[0m \u001b[0mepochs\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;36m2000\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 141\u001b[0m \u001b[0mverbose\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;36m1\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 142\u001b[0;31m validation_data=(x_test, y_test))\n\u001b[0m\u001b[1;32m 143\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 144\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n", "\u001b[0;32m~/anaconda3/envs/dp2/lib/python3.5/site-packages/keras/engine/training.py\u001b[0m in \u001b[0;36mfit\u001b[0;34m(self, x, y, batch_size, epochs, verbose, callbacks, validation_split, validation_data, shuffle, class_weight, sample_weight, initial_epoch, steps_per_epoch, validation_steps, **kwargs)\u001b[0m\n\u001b[1;32m 1703\u001b[0m \u001b[0minitial_epoch\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0minitial_epoch\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1704\u001b[0m \u001b[0msteps_per_epoch\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0msteps_per_epoch\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 1705\u001b[0;31m validation_steps=validation_steps)\n\u001b[0m\u001b[1;32m 1706\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1707\u001b[0m def evaluate(self, x=None, y=None,\n", "\u001b[0;32m~/anaconda3/envs/dp2/lib/python3.5/site-packages/keras/engine/training.py\u001b[0m in \u001b[0;36m_fit_loop\u001b[0;34m(self, f, ins, out_labels, batch_size, epochs, verbose, callbacks, val_f, val_ins, shuffle, callback_metrics, initial_epoch, steps_per_epoch, validation_steps)\u001b[0m\n\u001b[1;32m 1234\u001b[0m \u001b[0mins_batch\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mi\u001b[0m\u001b[0;34m]\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mins_batch\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mi\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mtoarray\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1235\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 1236\u001b[0;31m \u001b[0mouts\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mf\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mins_batch\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 1237\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0;32mnot\u001b[0m \u001b[0misinstance\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mouts\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mlist\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1238\u001b[0m \u001b[0mouts\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;34m[\u001b[0m\u001b[0mouts\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", "\u001b[0;32m~/anaconda3/envs/dp2/lib/python3.5/site-packages/keras/backend/mxnet_backend.py\u001b[0m in \u001b[0;36mtrain_function\u001b[0;34m(inputs)\u001b[0m\n\u001b[1;32m 4566\u001b[0m provide_data=data_shapes, provide_label=label_shapes)\n\u001b[1;32m 4567\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_module\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mforward_backward\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mbatch\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 4568\u001b[0;31m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_module\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mupdate\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 4569\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_update\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_train_updates\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 4570\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_weights_dirty\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;32mTrue\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", "\u001b[0;32m~/anaconda3/envs/dp2/lib/python3.5/site-packages/mxnet/module/bucketing_module.py\u001b[0m in \u001b[0;36mupdate\u001b[0;34m(self)\u001b[0m\n\u001b[1;32m 474\u001b[0m \u001b[0;32massert\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mbinded\u001b[0m \u001b[0;32mand\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mparams_initialized\u001b[0m \u001b[0;32mand\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0moptimizer_initialized\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 475\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_params_dirty\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;32mTrue\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 476\u001b[0;31m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_curr_module\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mupdate\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 477\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 478\u001b[0m \u001b[0;32mdef\u001b[0m \u001b[0mget_outputs\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mmerge_multi_context\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;32mTrue\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", "\u001b[0;32m~/anaconda3/envs/dp2/lib/python3.5/site-packages/mxnet/module/module.py\u001b[0m in \u001b[0;36mupdate\u001b[0;34m(self)\u001b[0m\n\u001b[1;32m 669\u001b[0m \u001b[0mnum_device\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mlen\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_context\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 670\u001b[0m \u001b[0mkvstore\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_kvstore\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 671\u001b[0;31m param_names=self._exec_group.param_names)\n\u001b[0m\u001b[1;32m 672\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 673\u001b[0m \u001b[0;32mdef\u001b[0m \u001b[0mget_outputs\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mmerge_multi_context\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;32mTrue\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", "\u001b[0;32m~/anaconda3/envs/dp2/lib/python3.5/site-packages/mxnet/model.py\u001b[0m in \u001b[0;36m_update_params\u001b[0;34m(param_arrays, grad_arrays, updater, num_device, kvstore, param_names)\u001b[0m\n\u001b[1;32m 174\u001b[0m \u001b[0;31m# use a better solution later\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 175\u001b[0m \u001b[0mw\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mg\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mp\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 176\u001b[0;31m \u001b[0mupdater\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mindex\u001b[0m\u001b[0;34m*\u001b[0m\u001b[0mnum_device\u001b[0m\u001b[0;34m+\u001b[0m\u001b[0mk\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mg\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mw\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 177\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 178\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n", "\u001b[0;32m~/anaconda3/envs/dp2/lib/python3.5/site-packages/mxnet/optimizer.py\u001b[0m in \u001b[0;36m__call__\u001b[0;34m(self, index, grad, weight)\u001b[0m\n\u001b[1;32m 1515\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0msync_state_context\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mstates\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mindex\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mweight\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mcontext\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1516\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mstates_synced\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mindex\u001b[0m\u001b[0;34m]\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;32mTrue\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 1517\u001b[0;31m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0moptimizer\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mupdate_multi_precision\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mindex\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mweight\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mgrad\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mstates\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mindex\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 1518\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1519\u001b[0m \u001b[0;32mdef\u001b[0m \u001b[0msync_state_context\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mstate\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mcontext\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", "\u001b[0;32m~/anaconda3/envs/dp2/lib/python3.5/site-packages/mxnet/optimizer.py\u001b[0m in \u001b[0;36mupdate_multi_precision\u001b[0;34m(self, index, weight, grad, state)\u001b[0m\n\u001b[1;32m 272\u001b[0m \u001b[0mcast\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mweight_master_copy\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mdtype\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mweight\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mdtype\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mout\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mweight\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 273\u001b[0m \u001b[0;32melse\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 274\u001b[0;31m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mupdate\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mindex\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mweight\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mgrad\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mstate\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 275\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 276\u001b[0m \u001b[0;32mdef\u001b[0m \u001b[0mset_learning_rate\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mlr\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", "\u001b[0;32m~/anaconda3/envs/dp2/lib/python3.5/site-packages/mxnet/optimizer.py\u001b[0m in \u001b[0;36mupdate\u001b[0;34m(self, index, weight, grad, state)\u001b[0m\n\u001b[1;32m 1449\u001b[0m \u001b[0;32massert\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0misinstance\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mgrad\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mNDArray\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1450\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_update_count\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mindex\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 1451\u001b[0;31m \u001b[0mlr\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_get_lr\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mindex\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 1452\u001b[0m \u001b[0mwd\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_get_wd\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mindex\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1453\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n", "\u001b[0;32m~/anaconda3/envs/dp2/lib/python3.5/site-packages/keras/backend/mxnet_backend.py\u001b[0m in \u001b[0;36m_get_lr\u001b[0;34m(self, _)\u001b[0m\n\u001b[1;32m 4724\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 4725\u001b[0m \u001b[0;32mdef\u001b[0m \u001b[0m_get_lr\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0m_\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 4726\u001b[0;31m \u001b[0;32mreturn\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mlr\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mtensor\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0masscalar\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;34m/\u001b[0m \u001b[0;34m(\u001b[0m\u001b[0;36m1.\u001b[0m \u001b[0;34m+\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mdecay\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mtensor\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0masscalar\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;34m*\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mnum_update\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 4727\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 4728\u001b[0m \u001b[0;32mdef\u001b[0m \u001b[0mget_config\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", "\u001b[0;32m~/anaconda3/envs/dp2/lib/python3.5/site-packages/mxnet/ndarray/ndarray.py\u001b[0m in \u001b[0;36masscalar\u001b[0;34m(self)\u001b[0m\n\u001b[1;32m 1988\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mshape\u001b[0m \u001b[0;34m!=\u001b[0m \u001b[0;34m(\u001b[0m\u001b[0;36m1\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1989\u001b[0m \u001b[0;32mraise\u001b[0m \u001b[0mValueError\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m\"The current array is not a scalar\"\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 1990\u001b[0;31m \u001b[0;32mreturn\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0masnumpy\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;36m0\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 1991\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1992\u001b[0m \u001b[0;32mdef\u001b[0m \u001b[0mastype\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mdtype\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mcopy\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;32mTrue\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", "\u001b[0;32m~/anaconda3/envs/dp2/lib/python3.5/site-packages/mxnet/ndarray/ndarray.py\u001b[0m in \u001b[0;36masnumpy\u001b[0;34m(self)\u001b[0m\n\u001b[1;32m 1969\u001b[0m check_call(_LIB.MXNDArraySyncCopyToCPU(\n\u001b[1;32m 1970\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mhandle\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 1971\u001b[0;31m \u001b[0mdata\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mctypes\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mdata_as\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mctypes\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mc_void_p\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 1972\u001b[0m ctypes.c_size_t(data.size)))\n\u001b[1;32m 1973\u001b[0m \u001b[0;32mreturn\u001b[0m \u001b[0mdata\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", "\u001b[0;32m~/anaconda3/envs/dp2/lib/python3.5/site-packages/numpy/core/_internal.py\u001b[0m in \u001b[0;36m__init__\u001b[0;34m(self, array, ptr)\u001b[0m\n\u001b[1;32m 245\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 246\u001b[0m \u001b[0;32mclass\u001b[0m \u001b[0m_ctypes\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mobject\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 247\u001b[0;31m \u001b[0;32mdef\u001b[0m \u001b[0m__init__\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0marray\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mptr\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;32mNone\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 248\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mctypes\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 249\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_ctypes\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mctypes\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", "\u001b[0;31mKeyboardInterrupt\u001b[0m: " ] } ], "source": [ "from keras.models import Model\n", "from keras.layers import Input, Add, AveragePooling1D, MaxPooling1D, Concatenate\n", "\n", "a = Input(shape=(8,))\n", "first = Dense(256, kernel_initializer='normal')(a)\n", "#first = Dense(128, kernel_initializer='normal')(first)\n", "#first = BatchNormalization()(first)\n", "first= Activation('relu')(first)\n", "\n", "first = Reshape((256,1))(first)\n", "#first = UpSampling1D(size = 2)(first)\n", "first = Conv1D(filters=32, kernel_size=1, strides=1, padding='same', \n", " kernel_initializer='normal', activation='relu')(first)\n", "#first = UpSampling1D(size = 2)(first)\n", "\n", "\n", "\n", "\n", "# first_1 = Conv1D(filters=4, kernel_size=1, strides=1, padding='same', \n", "# kernel_initializer='normal')(first)\n", "# first_1 = Activation('relu')(first_1)\n", "# first_1 = Conv1D(filters=8, kernel_size=3, strides=1, padding='same', \n", "# kernel_initializer='normal')(first_1)\n", "first_2 = Conv1D(filters=4, kernel_size=1, strides=1, padding='same', \n", " kernel_initializer='normal')(first)\n", "#first_2 = Activation('relu')(first_2)\n", "first_2 = Conv1D(filters=8, kernel_size=3, strides=1, padding='same', \n", " kernel_initializer='normal')(first_2)\n", "first_3 = Conv1D(filters=4, kernel_size=1, strides=1, padding='same', \n", " kernel_initializer='normal')(first)\n", "first_3 = Activation('relu')(first_3)\n", "first_3 = Conv1D(filters=8, kernel_size=5, strides=1, padding='same', \n", " kernel_initializer='normal')(first_3)\n", "first_4 = Conv1D(filters=16, kernel_size=1, strides=1, padding='same', \n", " kernel_initializer='normal')(first)\n", "list_of_tensors = [first_2, first_3, first_4]\n", "conc = Concatenate()(list_of_tensors)\n", "first = Add()([first, conc])\n", "first= Activation('relu')(first)\n", "\n", "\n", "\n", "# first_1 = Conv1D(filters=4, kernel_size=1, strides=1, padding='same', \n", "# kernel_initializer='normal')(first)\n", "# first_1 = Activation('relu')(first_1)\n", "# first_1 = Conv1D(filters=8, kernel_size=3, strides=1, padding='same', \n", "# kernel_initializer='normal')(first_1)\n", "first_2 = Conv1D(filters=4, kernel_size=1, strides=1, padding='same', \n", " kernel_initializer='normal')(first)\n", "#first_2 = Activation('relu')(first_2)\n", "first_2 = Conv1D(filters=8, kernel_size=3, strides=1, padding='same', \n", " kernel_initializer='normal')(first_2)\n", "first_3 = Conv1D(filters=4, kernel_size=1, strides=1, padding='same', \n", " kernel_initializer='normal')(first)\n", "first_3 = Activation('relu')(first_3)\n", "first_3 = Conv1D(filters=8, kernel_size=5, strides=1, padding='same', \n", " kernel_initializer='normal')(first_3)\n", "first_4 = Conv1D(filters=16, kernel_size=1, strides=1, padding='same', \n", " kernel_initializer='normal')(first)\n", "list_of_tensors = [first_2, first_3, first_4]\n", "conc = Concatenate()(list_of_tensors)\n", "first = Add()([first, conc])\n", "first= Activation('relu')(first)\n", "\n", "\n", "\n", "\n", "# first_1 = Conv1D(filters=4, kernel_size=1, strides=1, padding='same', \n", "# kernel_initializer='normal')(first)\n", "# first_1 = Activation('relu')(first_1)\n", "# first_1 = Conv1D(filters=8, kernel_size=3, strides=1, padding='same', \n", "# kernel_initializer='normal')(first_1)\n", "first_2 = Conv1D(filters=4, kernel_size=1, strides=1, padding='same', \n", " kernel_initializer='normal')(first)\n", "#first_2 = Activation('relu')(first_2)\n", "first_2 = Conv1D(filters=8, kernel_size=3, strides=1, padding='same', \n", " kernel_initializer='normal')(first_2)\n", "first_3 = Conv1D(filters=4, kernel_size=1, strides=1, padding='same', \n", " kernel_initializer='normal')(first)\n", "first_3 = Activation('relu')(first_3)\n", "first_3 = Conv1D(filters=8, kernel_size=5, strides=1, padding='same', \n", " kernel_initializer='normal')(first_3)\n", "first_4 = Conv1D(filters=16, kernel_size=1, strides=1, padding='same', \n", " kernel_initializer='normal')(first)\n", "list_of_tensors = [first_2, first_3, first_4]\n", "conc = Concatenate()(list_of_tensors)\n", "first = Add()([first, conc])\n", "first= Activation('relu')(first)\n", "\n", "# first_1 = Conv1D(filters=4, kernel_size=1, strides=1, padding='same', \n", "# kernel_initializer='normal')(first)\n", "# first_1 = Activation('relu')(first_1)\n", "# first_1 = Conv1D(filters=8, kernel_size=3, strides=1, padding='same', \n", "# kernel_initializer='normal')(first_1)\n", "first_2 = Conv1D(filters=4, kernel_size=1, strides=1, padding='same', \n", " kernel_initializer='normal')(first)\n", "#first_2 = Activation('relu')(first_2)\n", "first_2 = Conv1D(filters=8, kernel_size=3, strides=1, padding='same', \n", " kernel_initializer='normal')(first_2)\n", "first_3 = Conv1D(filters=4, kernel_size=1, strides=1, padding='same', \n", " kernel_initializer='normal')(first)\n", "first_3 = Activation('relu')(first_3)\n", "first_3 = Conv1D(filters=8, kernel_size=5, strides=1, padding='same', \n", " kernel_initializer='normal')(first_3)\n", "first_4 = Conv1D(filters=16, kernel_size=1, strides=1, padding='same', \n", " kernel_initializer='normal')(first)\n", "list_of_tensors = [first_2, first_3, first_4]\n", "conc = Concatenate()(list_of_tensors)\n", "first = Add()([first, conc])\n", "first= Activation('relu')(first)\n", "\n", "\n", "\n", "\n", "\n", "first = Reshape((32,256))(first)\n", "first = MaxPooling1D(pool_size=32, strides=None, padding='same')(first)\n", "last = Flatten()(first)\n", "\n", "model = Model(inputs=a, outputs=last)\n", "model.compile(loss=naive_percent_loss, optimizer='nadam', metrics=[calc_mre_K], context=['gpu(0)'])\n", "\n", "#model.summary()\n", "\n", "from IPython.display import SVG\n", "from keras.utils.vis_utils import model_to_dot\n", "from keras.utils.vis_utils import plot_model\n", "\n", "#SVG(model_to_dot(model, show_shapes=True, show_layer_names=False).create(prog='dot', format='svg'))\n", "\n", "\n", "plot_model(model, show_shapes=False, show_layer_names=False, to_file='model.png')\n", "#plot_model(model, to_file='model.png', )\n", "\n", "x_t, x_v, y_t, y_v = train_test_split(x_train, y_train, test_size=0.2, random_state=42)\n", "# model = naiveploss_mgpu_model()\n", "# model.summary() \n", "history = model.fit(x_train, y_train,\n", " batch_size=64,\n", " epochs=2000, \n", " verbose=1,\n", " validation_data=(x_test, y_test))\n", "\n", "\n", "\n", "\n" ] }, { "cell_type": "code", "execution_count": 100, "metadata": { "ExecuteTime": { "end_time": "2018-09-29T08:48:50.180929Z", "start_time": "2018-09-29T08:48:49.923029Z" } }, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "[65 47 39 33 59 38 62 62]\n" ] }, { "data": { "image/png": "iVBORw0KGgoAAAANSUhEUgAAAwkAAADRCAYAAACHHv5TAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDIuMi4zLCBodHRwOi8vbWF0cGxvdGxpYi5vcmcvIxREBQAAIABJREFUeJzs3XV81dX/wPHXje2ue6w3FiwkNrpU/CIC0imhhIAgCooCIqgY2IRSEgoGKCEhIClKSYPk6DFirLvu7m58fn8M9nOysbtxl5zn48HjsX3inPcZi8/7fE7IJEmSEARBEARBEARBuEte1QEIgiAIgiAIglC9iCRBEARBEARBEIQiRJIgCIIgCIIgCEIRIkkQBEEQBEEQBKEIkSQIgiAIgiAIglCESBIEQRAEQRAEQShCJAmCIAiCIAiCIBRRpiQhOzuHy5evotVqKyoeQRAEQRAEQRCqmNKYi/Ly8vhq9nx27foTmUzGyl+W4+npwexZc3F1dWXosMEVHacgCIIgCIIgCJXEqDcJy7/7kWtR15k7bybmKvPC4y1aNufA/oMVFpwgCIIgCIIgCJXPqCRh//6DvD7hVRqFN0SGrPB43bq+xMbFVVhwgiAIgiAIgiBUPqOShNTUNOrUcb3vuE6nQ6/XmzwoQRAEQRAEQRCqjlFJgq+fD+fPRd53/O8DhwgKDDB5UIIgCIIgCIIgVB2jJi4PGvwcc79agFqdh4TE6VNn2bxpK+vX/8YHH7xT0TEKgiAIgiAIglCJZJIkScZcuH37Ln78fiUJCYkA1KnjysiXhtOx49MVGqAgCIIgCIIgCJXL6CThnvT0DCRJwtHRoVwVHjp4hO+Xr+B2TAxWllY8N6APAwf1L1dZgiAIgiAIgiCYnlHDjQCuX7/B7Vu3USqV+Ph6lytJOHr0OHNmz2PqO5MJD2+ERpNHQkJSmcsRBEEQBEEQBKHilPomISrqOp9/OpuoqOvcu1QmkxESGsy0dybj4+MNQFZWFra2tg+sbMzo8XTp0pGevbqXGpjBYCA9IxMLlQqZTFbq9YIgCIIgVD5JksjTaHCwt0MuN2o9lCojni2E2s6UP48PTBLiYuMZ/dKruLq6MnBwf/zr+gEQHX2DVb/8SkpqKj/8uJQjh4+RnJzywJ2X1eo8unTuxctjR7Ft604yMzOp3+Axxr82Fje3Ovddn5qWznufzHmoxgmCIAiCUDlmvPMmTuUcilxZxLOF8Kgwxc/jA4cb/fjjSkJCgvli5scoFIrC4/WCg3i6w/94+633eGvyO9yIvsmMT6Y/sKKsrCwkSWLnjt18/uUMHB0dWDh/CdPfm8GSpfPvu95CpQJg+lvjsbayKk/bqiW9Qc+Vy1cIDglGIVeUfkMNItpW89TWdoFoW01UW9sFtbttObm5fPTl/MK/29XZvRg/eHsCNta159miNHq9nosXLxIWFlbkea42exTbDJCdk8sHn39tkp/HByYJJ47/w/QPphX7xVUoFAwZOojXX5vMpMkTaN265QMrsrr7oN+nX088PNwBGPnScHr1eI7k5BRcXJyLXH/vNaC1lRU2NtbGt6ia0+v1mJubYWNtXeu+aUXbap7a2i4QbauJamu7oHa37Z6aMHznXoyWFiosLS2qOJrKc+/7z9LSotZ+//3Xo9hmoHCTY1P8PD5wsFJGRibu7m4lnnf3cEcul9O1W+dSK7KxscbN3Q0Z/x90TfiFIgiCIAiCIAiPmgcmCfb2dsTHJ5R4Pj4uvkyrHPXo0ZX1634jMSERjSaf5ct+JCQ0+L63CIIgCIIgCIIgVJ0HJglNmzXhpx9+xmAw3HfOYDCw4qdVNGvexOjKBg3uT8uWzXhp1Dj69x1MclIyH814r+xRC4IgCIIgCIJQYR44J2HY8OcZ89I4Rr80jkGD+uNX1xeA6Os3Wb36VxITEln67UKjK5PL5YwZO4oxY0c9XNSCIAiPMIPBgCRJ6HQ6yrgfZrWm1+trZbugZrdNoVCI4cGC8Ah6YJLg6enBnK++4PPPZvHxjC8Kf0lIkkRAoD9zvv4SD0/3SglUEAThUafVatHr9chkMgIDA6v9mvRlJZfLa2W7oGa3TavVIkkSqhqwepEgCKZT6o7L9YKDWPb9YqKirnP7VgySJOHr50NgYEBlxCcIgiBQ0DljMBiwsLBAkiT0ej1KpbJW9fBKkoRCoah17YKa37b8/HwkSaqRsQuCUD6lJgn3BAYGiMRAEAShiuj1+kdqGT+hepHL5YWJqSAIj4aa995TEKo5g06HLldd1WEIgiAIgiCUm+gSEISHpNMbOLxmFxnHjkF6CnaZ8cgNemzDI/Du0gmnFs3EK3pBEARBEGoUkSQIJpWbL9W4lTvKo2B8uMTvJ5L4fvcdnkm6TET0mcLzBpmc7NOnuHT6FE5tWhP61kSRKAiCIAiCUGOIJEEwmbg0DVM3qBmTm8Cgdl5VHU6FyU/PIHLOfPble3DOswUfDAoiCEfUMU0wd3LCuq4fksHA9W27if11PbtSHfHI1eFgbVbVoQtChevcqWfhx/n5+chlcpRmBX9qGjVqwJczP6mq0ARBEIQyMCpJ+OLz2XTr3oX69cMqOh6hhtLpDXz40yUGG06ya1MCT9R3xtvFoqrDMrm0f05x+euFkJ1JY+soXn3/ecwsLQA7bAL8i1wbMrAPfh3bc3Z/Kq8susDScfWxsRB5uVC77di5qfDjd6Z9QFBQIC+OGFLi9TqdHqVSTMgWBEGoboyauBwXF8/4V9/kxWFj2LB+E1lZ2RUdl1DDfLsrhhaxB6l/aQ+jolbw7aI/MBhq17Aj7fGTXP74M8jORBYYSqu5M+8mCCWzcHLgtZ7+hPvb8sX8fVz47EsMWm0lRSwI1U9MzB2eerIT27ftZPDA4fTpPRCAfn2f5/Cho4XXnTjxDz269y/8XKfTsXzZjwweOJzuXfsyZfK7xMcnVHr8giAIjwqjkoSv585kxc/LaN2mJT+vXE2/PoP55OMvOXvmXEXHJ9QAeoPE3n2XeSz6YOGxKHkdzt3MqsKoTCv1yDHyft8GgG3PfrSe+REqVxej7pXJZLzZw4/WZ9aTduQoMes2VGSoglAjHDx4hCXfzufXdSuNun7pkuWcP3eBeQtms+G31dQLDmL6ux9hMBgqOFJBEIRHk9FjH7y8PBk9ZgQjRw3j0KGjbP19O2++8Taenh5079GFrt06Y2VlVZGxCtXU+ZtZdE3cCzotykYNaDzlLU7uSebolQzC/e2qOryHlnnhIpdnf41MkrDt1ptGIwaVuQwzMwURE14mesaH3F67HqcWzbER+44IJtBi4uFKq+vY7NYmK2vEyGHY2toada3BYGDTb7+zaPFcXFycAXhxxBDWrlnPrVu3qVvXz2RxCYIgCAXKPEBaLpejUMiRy+SF27SvWbOeH3/4mbenTuTxJ9pURJxCNXZ8z1nCUi4ht1Bh3rEDCksLWgbb8+2uGEZ38qnq8B7asdtatHJLLOrX47EXB5a7HM9mjbjR9mmkg39y+ev5NPl6FjKxOZbwkEz54F6Z3N3rGH1tWmoaGo2G8eMn3ncuKTFZJAmCIAgVwOgkIT4+gW1bd7B92y5ycnJp/3Q7Fi76mtDQYPR6PSt+WsXXXy8UScIjKO/o3wDU6fA02XYFbw48rx6m89+biW08HM/HW1ZleA9lz9kUZh3O56uPPkajSXroZUxbvj6SvWf+gVu3SNp3gDrtnzJJnIJQ8xT9WbK0tECdl1f4eUpySuHHDo4OmJubs2TpfLy9a+/KaYIgCNWJUXMSJk+cxuBBwzn492Gef2Eg6zb8zKTJEwgNDQZAoVDQvUeXIr/UhUfDzUQ116z8cGjaBNd2TxYelzIz8MhLJOrvE1UYXfnlxtzhwLINfLE+mrmjwggO8UAmf/gNyhUqFfWGFgxXilqxSkxiFoS7goOD2L3rL/Lz84mPT+DXXzcWnlMoFPTs1Y0F8xeTkJAIQFZWFnv37Een01dVyIIgCLWaUU89Ts5OzF8wh2XfL6ZX7+7Fzj1wcnJk1ZofTR6gUL3tO5+Ke9uW1J/+Dtb/GmPv0DgCgLzImje5XZOSyqkp7yLf/DOfNc4gxNvapOX7dHgKvYs7eRmZZF+/YdKyBaGmGvXSi2RnZzPwuWF8MP1jOnd+psj50WNG8NhjYUx8422e7dSLkS+O5dChI4g9CgVBECqGUcONpk6bVOo1MpkMd3e3hw5IqFkOX0pn8FOe9x23qReEzMICq8wkNElJqFxdqyC6spMMBk5+NgeyM1EEhtDo2cdNXodMoaDxtIm8vTGR7Ax7upu8BkGoHj759IP7jnl7e7F3/877jnt4uDNvwWzUajWWlpbIZDL69e9deN7MzIyhwwYzdNjgigxZEARBuMuoNwnz5y5i3b9e/d6zft1vLFywxORBCTVDTkYOwUfXUU99+75zcqUSh4YNALh18GRlh1Zux79fh3T1ItjY0uS9yShUqgqpxzYwgJf712fpztvkacVwCUEQBEEQqhejkoR9+/8udrfl+vXD2Lf3gMmDEmqGszsO0iL9LIlrVxd73rFxOACxx09XZljl9s/GP8nb/CsAYRPGYe7oWKH1NfSzJczTgl3zVpF1+UqF1iUIgiAIglAWRiUJGRkZODg43Hfczt6OtPR0kwcl1AwpRwp2R3VuU/wSjPZ33yTooy4jSdV79+XTm/eQ88MiFBjw7tcHp+bNKqXeIarLOO/fyJUFizDodJVSpyAIgiAIQmmMShJcXV2IjLxw3/HI8xdxcXY2eVBC9WfIz8fmZiQAzq2KX+LU0scH1dPP8lfdTlCNd0W9cieHj4/IkDm74jt4IL4vVN6Y58f6d0Vt7UTerVvEbdlaafUKgiAIgiA8iFFJQqdOHVi4cCkH9h8kN1dNbq6a/fv+5ptvltKx09MVHaNQDcUe+QczvRarAH8s3IrfFEkmkxHx6gj+lvuTnV893yRExefy+rcXeXlAQ1otmIPPgP4PvRdCWShUKgLGvATAzV/WkJeYWGl1C4IgCIIglMSo1Y1eGDKI2DtxTH9vRuEDlCRJdHimPUOHPV+hAQrV0/W//kYBuLRu9cDrlAo5YT42nLuRRZuwih3jX1ZnV21ix56rvD5mKB3Cq+6NWEC7FkRuDMcp+gzRy34gbOpbVRaLIAiCIAgCGJkkKBQKpr4zmSHDBnPtahQA9YKD8PK6f+lLofYz6HQYIk+jAJzbPDhJMOh0PJV3gaQVu5A+mVCpvfQPcm71ZrJW/0RboKF1d6D4tyGVpdWkMUSOe43UI0fJvh6NTYB/lcYjCIIgCMKjzagk4R5vby+8vb0qKhahhjBoNJxxakQblzysvL0feK1MocD7+O+Qk01e/EAsPTwqKcqSXTp4mvRVPyEHAkaPwi40pKpDwtXbjaxGj+N0Zi+Ju//EZvSoqg5JEARBEIRHmNFJwunTZzlx/CSpqelIUtFJqFPenmjywITqKzZXzk7P9rw2vWmp18pkMuwbNCDj6BFSTp3Du4qThGvXk4j6eh4OSHj26oFH12erNJ5/e/LVwXz0qT2jn+1d+sWC8AiLi4tn0IBhbNm6HltbG2bPmouNtTVjxlZOcv3Zp7OwsbFm/GtjK6U+QRCEqmDUxOXVq37ljdffYv++g8THx5OYmFTkn/BoOXwpndYhDkYPHXKOaAjArcNVu6la1O10Dr4/E4f8DGyCAvGrxFWMjGHv5kzL/s+waEdMVYciCDXKxEmvG5UgxMXF89STncjKyq6EqARBEGo2o94kbFi/ifGvjaVP354VHY9QzeUlJpK4cyetO7Qx+h7HJhEAaC9FIun1yBSKigqvRFHxuWz+aDEtMqNQ2tgQPOkN5GZmlR5HaXq3cuOXfXGcPHmD+j5WWNSp2rkSglCRJEnCYDCgqILfCYIgCMKDGZUkZGfn0Lp18WvhC4+WhL+P0OTcFuxtU+Ap44aZWbi7I3d1Q5mUQNbVa5U+B+BSTDZvfHeJ14cOwP4PDf4jhleLuRHFMVPKedU3kZwZn3OjTUtC3xJD+YSaZ8BzQ+nRowv79x/k9q0YgkOCmDJlIh6e7gx4bijduz/LwYNHuB4Vzdx5MwkJDea3jVvYuGEzKSkp+AfU5fUJ46hXLxAo+Bs0Z/Y8jh49joO9PQMH9S9S33+H/8TcvsM3C5cSGXkRg0FPREQ4Mz6ZztgxrwHQv1/Bqnzjx4+la7fOxMXGs2D+IiIjL2JmZkanzh0Y/uJQlMqC5OXAgUMsWfQdqalptH28NXq9vrK+lIIgCFXGqOFGjz/Rmn/+OV3RsQg1wJ2/C3ZZdmvdokz3uTZvUnD/4RMmj+lBTp+/wxtLL/BWX386P+FPg4/ex7quX6XGUFZtOxXs9px86DC5t25XcTSCUD5bNm9j6rRJbNy0Gl9fH6a/N6Nw5/Xt23bx9tsT2b7zNwIC/dm8eSubNv3OjE+ms3rtT3R+tiNTJr9Lbq4agPnzFpGRkcGq1T8wf+Ecdu7cXWK9ublq3nxjCv4Bfqxe+xMbfltNn34Fb8EXLZkHwK/rfmbHzk107dYZjUbDm29MoWGjBqxdt5LFS+Zx7NhJ1q/bCEBMzB0++vBTxr7yEpt/X0fz5k3Yt/dARX7pBEEQqgWj3iTUrx/Gsu9+JDr6BoFBAZgpi97W4Zn2FRKcUL1os7LQX7+CTCbHoXFEme51bt6UC8cukmOwI6yC4vuvYwciSZ47i3ebNqZNg+aVVOvDs6rjgrzlk8iO7OHGyl94bNqUqg5JqMYO9uxb4rnAV8bg3qkjAPE7dxH1zZISr227aX3hx6ffnExO1PUSzxujZ69u1L2bkL889iW6d+1LdPRNAHr17o5fXV8AzM3N2bh+EyNHDcfX1we1Wk3Xrp35dc0GTv1zmlatW/DXn3uZN38WdnZ2AAwZOogpk98ttt4jh4+iNFMy6qUXC+dNNW4cXmKchw8dxcrKsvDthLOLM4MG9eeXn9cyYGA//vpzH00aR9D28dYAdOzUgY0bt5TpayEIglATGZUkfP3VQgDWr/vtvnMymUwkCY+I5ENHkEsGVGENMLO1LdO9jk0aY3jJk/3XMuhYQfH928Eth8hbvgB7gwbbtHgMeXkoLC0roWbTaD56EIePHyTt6DHSz5zFIbxRVYckCGXi9q+d2K2sLLGztyM5ORkAd3e3ItfGxyfw6SczkSvkIEkgk6HT6khOTiYjIxOtVovbv+757/3/lpCQiJenp9ELK8THJ3Djxi26dulTeEwySFhZFfy+SE5Oxs296NygB9UvCIJQWxiVJOzZt6Oi4xCMIEkSBgkU8qrZkOzmnwWv2H2efqJc97cOcWDh1ltodQbMlEaNdCuXvT9uQb7hJyww4NymNfUmjEehUlVYfRVB5eyMXZceqLesI2rpMprMm1MlE76F6s/YHn73Th0L3yqUJmLOzIcJCSh4WL8nN1dNZkYmLi4uBQf+8wDv5ubGK+NG06JFM9RqNZaWloUP+Xq9HjMzMxLiE3ByKti1PT4+ocR63dzqcCc2FkmS7ksU5PL7f++4udUhOKQeixbPLbY8FxcXIs9fKHIs/l+xCIIglESrM/Da0ot8NSoUC/Oa9ze84p7UBJM7cS2TicsuVUnd2owMdFcuIsnlOLUq3yR2N0cVTa0zOLR8nYmj+38756/GbMMPKDDg2asHIZPfrHEJwj0RQ/uSa+NEXkwM8Tt2VnU4glAmWzZv4+bNW2g0+Sxdsgz/gLr4+xc/H6h3nx4s++5Hbt64hSRJqHPVHD1ynPT0dBQKBf9r/yTLl/1EVlYWaWnp/LxidYn1tmrdEm2+luXLfkKtzkOr1XLq1BkAHBzskcvl3Im5U+T6jPQMNqzfhEajwWAwcOdOLCdO/ANA+/bt+OfUaQ4dOoJOp+ePXX9x5fJVE36lBEGorW4kqjkZlcmfZ1OrOpRyMXozte3bdrJ+/SZi78SxbPkiPDzdWfXLWjw9PWj3VPl6loWyiU3N4+yNrGJ7yCpafmY2Ubb+1Pe3L/NQo3t0uWq6Hf8OmU6LuvuTWHq4myw+SZJYu2Qb3rt/BSBg9Eg8unYxWflVQW5uTtDI4exZvgk3z6CqDkcQyqRL1058+slMbt28TXBwEB9+9G6Jv7d69uqGXC7ngw8+ISEhEStLS+rXDyMktB4A4197hTmz5jJwwDAc7O0ZMLAfZ8+eL7YsKytLZn/1Od8sWMqA/i8gSRKNm0TQuHE4KpWKYcOfZ+rb09Fqdbw6bjTPdunE7Dmfs3jxd6xYsYp8jQZ3D3f69u0FgLePF+++O4WF85fwcdoXtH28NU+2e7xivmiCINQq1+JycbRRsuloAl2buVZ1OGVmVJKwZfM2li5ZRv/n+rByxWokClaocHCwZ+PGzSJJqCSpWVqy8/TcSdHg7WJRqXVH6ezY1nAwQyeVf2y80soS5zatSd2/n+jfd/LYS8NMEpveIDFzQzRX0514Jage7k+2qfEJwj3+7dtySO7P3GPpzIuo/ORQEMrLr64fLwwZdN/xNWt/uu+YTCajR8+udO/R5b7hRgC2tja8/+E7Re7p0bNr4cdTp00qcs7X14fPv5xRbFzDhr/AsOEvFDnm4enOhx8VPxEaoN1TT4i/c4IglNnV2Fz6tXFn/eEEbiap8XOtOXMjwcjhRhvWb2LipNcZOuz5IpveBIfU48bd1SqEipeSpQXgYkzl7xa6/Z8kOjZ2QVbMmN6y8OrSCYCkv/Zg0OkeOq5cjZ63f7jIrSQ1cyc0p/HnM/Dq2eOhy61OBjzhTkqWlt2nkk3yNRMEQRAEoeJdi8shzMeGLk1d2Hw0sfQbqhmjnvju3IklNOz+DbAsLCzIyc01eVBC8VKztPi7WXI5JqdS6439+wjXDpyka1Pnhy7LNjQEubsnytwsEvYffKiybl69w/qx79Hq1Hq+GhmCjYWyWu6i/LCUCjmTW5uROPszLs5dWNXhCIIgCIJghGtxudTzsKJHCze2nUhCpzdUdUhlYlSS4OzsxJ07sfcdj4y8iGc13bm2NkrJyqdtmAOXKjFJkAwGrn27nOHRq7GIufbQ5clkMgL6F4z1vfz9SgxabbnKObpmB1FvTSIg5TKeiZfQxtx66NiqsxA/O3zUsaTv30/S/r+rOhxBeKA1a3/iiSfaVHUYgiAIVSYtW4tGa6COgzl13Szxdrbg7wvpVR1WmRiVJHTs1IFvFi7l9u0YZDIZGo2GI4ePsXTxMp7tUvZV7zUaDYMHDS+yLrVQutQsLW3CHLkUk124c2lFyzgfiSI9BeydsK//mEnKrPO/pzDz9MYsM5WrG343+j5Jksi6FsXOaV+i++VbVIZ8nFq2oMnCedgEBJgkturKytsb/5EvAnB5wSLyEmrea0tBEARBeFRci8slyMOqcH5Vz5Z12HysZv3tNipJGDpsMEFBgQwb8hJqtZqRL45l2tT3adGyOQMG9itzpcuX/YSra82b5V3VUrK01PO0QmUmJzZVUyl1XtvyBwBeHdubbJ1+mUJB0MihpIa2ZGWKJ3pD6QmPQafj5ITJnJ34FjaRR5GZmRH46suETn0LlcvDD4OqCby7dETZqCkyTR4X5y6stERREAShJhAdkEJ1UpAkWBd+/nS4M2eiM0nMqJznN1MwKklQKBRMnTaJn3/5nvc/fIf3pr/Nip+X8daUN8q82sqVy1c5euQYz78woFwBP6o0WgM6TT6WWjUh3jZcvlPxQ47yEhNRnziChAz3Dv8zadlOzZryzMcTSZbZ8OGqa+j09z/wGrRatBkZaHUGfj6QwGaNPzpLG+p07kTEnJm4d3zmkVrtRyaT0Xjiq+gsrMmNPE/cH39WdUiCIAjVhuiAFKqTa7E51PO0KvzcUqWgQ7gzW48nVWFUZWP0PglQsEych2f517bX6fTMmvk1r78xzuh79AY9er2+3HVWN/faUtY2JWdoGJK8g5MvfUPDPhO5cMuCdvUdKiLEQpdWrEVu0GPfpi1mrq6lxlzWtpnJYdbwerzz/QWWvL2Yll3bEOxnT+aZs6SfPkNm5AXSgpqwyLI9QR6WvPb+UHwcRiA3Ny9TPaZQ3v83U1PY2hA8+kWuz1vA1W+/x7FpY8wdyv99UF3aVRFqW9v0ej1yuRxJkgrfItW2t0m1tV1Q89smSRIGg6HYjhm9oep/xu51QL4ybgwffvBpVYcjCFyNy6V3a7cix3q2dOOdlVcY1t4Lubz6d3KWmCSsXLGK/s/1QaVSsXLFqgcWUtxa2MVZu2YdAYH+NG4cXrgDZmmuXL6CuXntW7Hm4sWLZbo+Ol7DY2kXMQA2t09zxLwh589nVExwgCEtjewD+5AhQ9ssnPPni9+4qDhlaZskSTx3ZQVS9A00V3Zz7j/ns1NSGTNIjq+TlqzE61yo4uF8Zf1/qwiSsyNScAh7s9y5vD+algEP//NRHdpVUWpL2yRJIjAwsMgy1Hl5eVUYUcWpre2Cmts2vV5PVFRUsUlCfn75FqAwFdEBWbra1mlijKpss04vcSNBjZ+rCr1eX9g5EOxpgYWZnH+i0mkcYFchdZsyaS8xSfh9y3a69+iKSqXi9y3bSyxAJpMZlSTcuRPLbxu38O2yb8oUYHBIMDbW1qVfWEPo9XouXrxIWFhYkT/2pUm/eRQACy9P/Hv1Qbc9hgYNTDORuDjxcelsdW1N73BrQp96yqh7ytu2rNGjSNixk6zLV9Dl5aOsF4pNo3D82jSmlaNjOVtgWuVtW0WRPmuAa7ya8Usv0byRH43qlm8X7OrWLlOqbW3T6XTI5XKUSiWSJJGXl4eFhUWtGnJXW9sFNb9tOp2OsLAwlMr7Hxuycyp3We7/Eh2QxqstnSZlURVtjssw4CXPJOrcaQxpaWi27cD88TYoQ4Lxd9Ty17EozHIr5vvPlEl7iUnC6n/tirm6mB0yy+rs2fOkp2cwfNhoAHRaLbk5ufTuNZAZM96jQcP6xd6nkCtqxR/4/1IoytYuzZXLKAHHiAgU9hakZWsr9Ovyw6F07Hr0pX53vzLfW9a2OTwWhsNjYWWupyqUtW0VKcTblvcHBTHz26O884wD9Z9uWe6yqlO7TK22tO1eT9S/HzBlMlmNfOAsTWW3a8BzQxk3/uVyL9s6e9ZcbKytGTN2VKnX1tT/M5lMVuLTKdQKAAAgAElEQVTPkkJedT9fogPSOLWt08QYVdnm+DMp9E1dT968W9gGB6O+eQu0Wh7r2YOW6jRORmXSoEFghdRtyqS9THMSHkb79u1o0aJZ4eeR5y/wxeez+W7ZN9jZla8X9JFy/RIA9g3ro1RJaNIrbq3dqLhcDkSm8evbERVWh2AazVx0vHLzFxIXaDCzeofg1g2rOiRBYOOGzezc8QdR16N57LEw5s6bWeR8bq6aObPncejgEczMzOjStROjx4woV13bt+9i5hdfYa4yR4YMJydH+vTtSd9+vUzRlPt89uksbGysGf/a2MJjEye9XiF1CaUTHZBlU1s6TcqiKtocGxlFcNxFJHNzgl55mfPTPyAvNo6kXX9QL+Ip1h5MqLCYTJm0G5UkzJ+7CA9Pd/r1713k+Pp1vxEfn8Cr48aUWoZKpUKlUhV+bmtnCzIZzs5OZQz50WPIz8cy7gYA2oxMrowaSUeLeuTlt8PC3LTfZJIk8c/0jxkfGoaVvCGVmEcK5WDu4oJbq+Yk/rWHWzM/J2/UGBp1ebKqw6p29Go16WfOknHuPHkJiSh8/VF27EFGjpbMM2fhz83I1Dmg02FQmiGTQKHJBTMz5KMn4eTriYejivwTh9Bl52Dl7YXK1RULdzeTLQ1cmzg7OzH4+QFcunSFyMj7X/XPn/cNGekZrPn1J7Kzcpg48W1cXJzp07dnuerzD6jLsuWLADh/LpJJE6cSEFCXxk0erY6Oe5OL//vwodPpUSrL9n1annuqguiAFKojq4M7AHDr9AwqVxf8XxzGxU8+59aqtTRo3ZZbSXno9AaUCqMWGa0yRj0B7t//Nx/NeO++4/Xrh7Fm9TqjkoT/atw4nK3bNpT5vkdRfno6aY5eOKjA7rFQDPn5hBmiSc7Q4O1qVXoBZXBo21E8Ey9jpk1EpjBuQrpQdWQyGYGvvowuJ5vUo8fJWjKXQydP0PSVYaicH439I0pyecly0o6d4GCeGnleLrJ/rSqz55YF5+KvYG+tpF52Om1irpdYzrozuSQdiSI+TcOIa+vxU9/5/5Nm5lgHBuDSvCkuTz6ORZ06FdmkGuPJdo8DkFDMpn95eXns3r2H+QvmYGtri62tLQMH9mfTb1vo07cn165GMfXt6Xw9bxb16gWSm5vLqJGv8NxzfenVu3updTdoWB+/un5cvny1MEnQ6XSsXLGaXbt2k5WZRWhYCG+++Vqxq/UlJCQy84uvuHo1Cr1eT2hoMBPeGIe3jxfr1/3G7j/+QiaTsXXrDpwcHfll9Q9F3i68M/V9AoMCGDFyWGGZC+YvJj09nTcnvoYkSfy2cQsbN2wmJTUVf/+6vD7hVerVK37oQWmxD3huKN27P8vBg0e4HhXN3Hkz2bhxCwq5nFx1LsePnWTI0MEMHNSfP3b9xcoVq0hOTsbX14dXx40p7Gn/7NNZxd5T3YkOSKG6ybl5C++486A0w6t3wRtNx+bNcIgIJ/30GRLWr8PNoRk3k/IIdDftM5ypGZUkpGdk4FDMMot29nakVeCwF6GARZ06/N50BGM6emHp44C5szOkpJAcdQtv11CT1ZOr1hK9eh3+gNszTyMvZoKaUP3IlUpC336L2C1bufHTSqQTBzn20lEChr2AV8/SH6pqOoNOR8y5q0T/+Teng57manwe1+JzGXj1MvWykwGQkIFvIDYREdQJrkvzBmGY350Ur8vxJ6dbIGa2NsjMzDDcnfSltLHGkK+lrcf/P0je3JpG8vlLZN+JQ5+chConnZxLl8i5dIlTx67iO3Ys9X1tkRt0AMjNHr2JkaW5ffsOOq2OoKD/fygODg4i+sZN9Ho9QfUCGf7iED768FOWfruQObPmERDgb1SCIEkS585FciP6Jl5DvAqPL/vuRy5cuMTcebNwdHRgxU+rePfdD/n2u4XI5UV78gwGA/3696Zxkwj0ej1zv1rAJx9/waIl8+jbrxdXrly7b7jRv3Xs1IFF33zLiyOGIpPJ0Ov1/PnnXqZOmwTAls3b2LTpdz7+9H28vDzZsf0Ppkx+l5W/LMfKyvK+8oyJffu2XXz62Yf4+Hqj0xV87+3evYePZrzH9PenodVqOXvmHLNnz+Wzzz6kYaOG7NzxB2/drdfJybHYe2oi0QEpVLUbq38FCp6jVHeTVZlMRt0Rwzk9YSLxO3YR0T6EqLjc2pEkuLq6EBl54b5el8jzF3F5xHsrK0tKphZnu4JVMaz8fMlPSSHzdiy0Ml2SsPXL7/HPjEZpY4N7504mK1eoeDK5HK+e3XFs2oSrP/xM5onjLL+i4uWMfFztzas6PJPKSUnj0u7DJJ27hD7mBjbp8SglPUrAw96H5u1aEuhuhSrOmuvR0YS1aonKzq7EB3altTX29Y1bKcyvayf8uv7/z4Y2M5O0c5Fc37WPVO9mrN4QTUJ6PoNsbhB6ajOuLZvh3KYNTs2bIpNXzGvlwMCKmfxWnKioqIcuQ52bi8pCVWQoi42NDQa9gXxNPsigX//enDx5inGvTCArK5vvlj94Umr09Rt07dKHfE0+Wq2WIUMH8/jjrQEKe+7nzZ+Fq6sLAMOGP8+aNeuIjr5BYGBAkbI8PNzx+FdiOHzEEAYNGIZanYelpUWp7WvTthWzZ83l7JnzhEc05MTxf1DI5TRt2hiNRsOGDZsYNWo4vr4+AHTt1pm1a9Zz6p/TtL0b8z3Gxt6rd3f86voCYH53H5nmLZrSqnULoKC3fefO3TzToX3h25Wu3TqzZfNW9u/7uzAB++89giCUTW5MDOmHD6OXyfHpW3SIvrWfL+6dOpL41x6C5Wlci8ulY+MqCtRIRiUJnTp1YOHCpahUKpo2awLAieMn+eabpfTo0aVCAxRAk5REelYeznYFDzmWHu6kA7mx8Sar4/yuw3j8sxNkMoInTijMfoWaxcrbi/B33yInKYWLJ3N5Yc4Zxj7rS/OkE7g+0fahNl6rTPmpaWRfv05+aipZeRI3fJtw7mY21y/HMODgV8iBf482tnB3x6FxBE26hWPpUdArqrcLRqHNx9zBAXkFzRsws7OjTtvW1GnbmlbAMCA+TcPJrw6DOpekvftJ2rsfy+AQQl9/BStvb5PHYIoH98pkaWWFJk9TZMx7dnY2coUcc5U5+fn5APTs1Y2pU6YzYuQw7OwevJ74vTkJWq2WH39YyalTZ9Dr9SiVSjIyMlCr1Ux4fTL8a1Uhg95AUmLyfUlCenoGCxcs4fSpM+Tk5BbekpGRYVSSYGZmxv/at2Pnzt2ERzQseDh/pn1hr39CfAKffjIT+eezC+/RaXUkJyffV5axsbu7u913r7tb0WNJSclERDQqcszD04OkpKQS7xEEoWwMeRry3XxItvVAdTex/zff5wfi3b8PebFw5Fj133nZqCThhSGDiL0Tx/T3ZhQu3SZJEh2eac/QYc9XaICPOkmv58SYV5mul7CSrwSUWNzt5dIlmiZJ0OZrubV8OfaA7+CBODap5qmtUCprV2dGd3bmqYZOrPxuN67//MSNVb/S4N0p2FfT5WbVcfHE7tpNwt9HkBLjCo9nK635o4sPDeva8VTfhnCnLhbOjtiHh2MTGIC1f12U1WgpQ3dHFV0/eo3cmL5c2nmAjD92wJXLnBz3BhbhTQgZ1Ae70JCqDrPK+Ph4oTRTEhV1nZCQegBcvRpFXT+/wgm32VnZfP3VQrp378KaNeto374d3j5eDyoWKHhAHzFyGKNfGsdvG7fQr39v7O3tsbC04JvFc/Hz8y21jG+XLic3J4el3y3E0dGBuLh4Bg0YVrgErTG7pHbs1IG3Jk3jpdEvcvDvwyxeOq/wnJubG6+MG03Lls1LLcfo2ItbUvU/h1xdXYiLK/o3Iz4unvDwhiXeIwhC2dgEBXKkwziCXIp/c21mW9C9FSSpiYrLrczQysWoJEGhUDD1nckMGTaYa1cLeq3qBQfh5eVZocEJoElOAb0etZkNiruvfy3cC5IEWYppth/euPsaSpUNbo4qvP/zekyo2YK9rHl7ZDNOph1FHn2ZM9Omo328MxFdn8QppPKGqRRHpzdwMzGPyFvZJO/dQ8ihtYXn9EpzJG9/HLzc8PFx55l+QYXDhaQ5X9aIFYWsvL1oMnIgugHduPztD6Tt20v+6RN8q/fhmVGe5d4ArybQ6Qp2s9Xr9UgGAxpNPnK5DDMzMywsLOjQ4X8s++4H3ps+lezsbNasXkfvPj0K758182tCQ4OZOPl17B3s+ejDT1m46GvMjJjjIZfLeWHIIOZ+tYBu3Z/FwsKCXr26s3DBEt6c+Bru7m5kZWVz8uQp2rZtdV+ZuTm5qCwssLGxJisri2Xf/lDkvKOjI9HRN5AkqcT9DurXD8PJyYlPP/kSPz8f/P3rFiYZvXp3Z9l3P+Lu5oavnw9qdR7nzp4nJLTefXP/ZDJZmWJ/kGc6Ps3UKdPp0KE99Rs8xh9//MmNG7d44sm2RpchCELprsXl0rnpg/eY8rCRo0hNIFutw8ay+s7/LFNk3t5eeHuX3psjmE5eQgIAOVb/P/zHNrgemufGcCrVkq4PWX5CmoZvD2fz7Zef4KnS1oiHL6FsrLy9eHz2DK4v+4H4rdtQHNjGpQPbyHL0xKr/C6jcKm7ssTYjg8wLF8nLySMpR09y1G3UN29ilnCbk9ahnKr3NPV9bQgPCoAT5ji3bo17h/9h91hYiRPna9r3qNLGhvpvjEMzdBDxf+4l0L0Z7668QqC7FcPd4qnXOBirWvZ7dcVPv/DjDysLP+/0THfCIxoV7pcw/rWxzJk9nwH9h6A0U9K1a+fC5U+3b9vFpctX+G5ZwZKmw18cwulTZ1i6ZLnRK+k9+WRbln/3Ixs3bGbQ4OcY9dJwVq/6lUlvTiUlJRVbWxvCIxoVzlv4t+EjhvD5Z7Pp3q0fzk5ODBk6mN279xSe79qtMx9+8Cndu/YrmEj887JiY+jY6WmWffcj48a/XOR4z17dUCgUvP/+xyQmJGFpaUH9+mGEhNYrtpyyxP4gERGNmPDGq8yeNZeUlBR8fLz54ssZYhUgQTCRzAsXQankelw2QR4lT0jOuXmLs5OmMFJpR1R8W8L9HzycsirJJOlf6wL+y8oVq+j/XB9UKhUrV6x6YCEvDDH9UplqdR6T3vuULz6Ygo1N9RlK8LD0ej3nz5+nQYMGRm2kEb/rD6IWLia2blP6z51WePzktQyW7LzN0lcblDsWSZKY/P1lQr2tGdXRp9zl3FPWttUktaVtaadOk3LoMMlHjqPPzOBYSGfWyxrg72ZFe65TV0rD0c8T91B/XIL8UFo8eAy2XqMh++o18lPTyNcZyFTrSXMLIi7fjNhUDXV/nYNTekyx91qHhhHxxccASAYDkl5v0tWAqvP/Wb7OwNYtp3H88QvkchkuvXoTOqjPA9t/b9UapVKJJEmo1WosLS1r5O69Jamt7YKa37Z/f//9V3Z2DlM++IJZM6YZNW+jKtXWZ4vSVOffhxWlstt8buq7ZF64yPqgfsyZXfJzsUGn49gLw9Gr1SSOnUHvzsYtnGEsU/48lvgm4fct2+neoysqlYrft2wvsQCZTFYhSYJQIC++4E2ChXvR9dedbM1IzXq4Jer2Hr9D6KHV9H+3fDudCjWPY+MIHBtH4D9qBPE7dtGic0eeiLyI3N6PxMVbMb9ykhwgCrgKZKvs0SnNSXL253xET8wUMqQ8NS2P/YJlXgY2uakoJEOROna3HIlZvRA8HFXYNW2KKtYeGxdH0GlR1amDtX9drP39sfxX77lMLq+w1X+qI3OlnO6dQrka1ZrUAwdIW7+WPX/to+HrY6nTWOyaLQiCUJPo1WqyLl9BkslQBj14zplcqcS+UQNSjx4n4/RZMHGSYEolJgmr1/5U7MdC5bqXJDj4Fp3/obhwkjaX95F11RbbekFlLjc7T8fxZWtom3SOW0uW0vCzj00Sr1AzKFQqvHp2R6/XY6aQ0aCuLel9nyY7yh/1nTuob8eQeycWO00GaMDZz4vAlnXQ6iXMs9NRbL+7oo5MhrlvXaw83VHc3Tly8qBGWPncXcWnw9AqamH1p7SyImzSBNI7/I8r3yyFhHiufvABF8Nb0eqNl1A51oyVqARBEB51GZEXkPR6Mp19aFLfo9TrHSIiSD16HOX1C5UQXfmVmCS0f+pZ1m9chaOjA198Ppvxr43Fyqp6b/pQG2nuzknwDCo6HCj/QiQtU0+RfrFxuZKEHzdG0irpOAB+w4Y8fKBCjefcqiXOrVoWfm7Q6chPTsaQn4/c3BwL94Kxy3qNDZnT38HcxQULtzooShmSJDyYQ0Q4zRZ8RcyG37j963qUZ46w6e0MnvjsPTycxFr1giAI1V3G2XMAnFF4M6Jh6fN8HBqHA+CWHIVBp6u2m9eW+H7fXGWOWq0GYOeO3YVrVwuVy/Ol0fzi1QOf+kXX8r63DGr6rdgyl3ntTib2v/+AQq/FuU3rR3o5RqFkcqUSC3d3rHx9C1fUgoK3EI5Nm2Dt5ysSBBORm5vjO/A5miz4GodmTcl/ujdDvzrLz/ti0ebrqjo8QRAE4QHuJQnZnvVwcyy9c8fSwwMLdzes9HncOnO5osMrtxJTl8fCQnnvnQ8JDQtBkiQWLliKSlX8zq2TJk+osAArw897Y3mygSM+LpZVHcp94i3dyKrXGPP/TLCy9Ch4nZUbG1fcbSWSJIn9ny0iJPsWZg4O+I980WSxCoLwcCw9PKj/3jTqA0/+T81na6NQ/vA1oRGBPDbqBQxKZbETRwWhohkMhjItuSoIj4r89Axyom+gVygJezzc6PscIsKJ37GLmJOR1G1avwIjLL8S/9q8PW0SP69cTUzMHWQyGXGxcSjN7r9cVsN3X/nrbApzt9wkV6PnpU4Pv8KPqUXHq/F3uz95ufcmQXt3OJKx/lqxg5Dbx5AplYRNm4LKxdkkcQqCYFq+rpZ82cmSM39Ek/1XFIdOnKDBtElI/n7IZDL0ej06na5GrpRTEkmSamW7oGa3zWAwPHBfCEF4lOXFxaG0teW65Ez7Ju6l33CXV++e/GHbFEsPDx6vwPgeRolJQp06rrzx5ngA/teuMzM+eR/HWjaRLiFNw5fro3n5WR8OX0yvdklC5oWL6H7bToOAx4Cia2hbuLsVfJCahKTXG7V2fE6enr3Hb9PZXIX/iGHYhgRXQNSCIJiKbVAgjb+aSeTXC+BGNBfefgennr2pN7gvUVFRhIWF1arlDA0GQ61sF9TstpmZmYkEQRBKYBcWivL9r9i3/hJDnI0fgmvh7o53iJwjl9MrMLqHU2KSMGjAMBYvnY+9vR3Dhr9Q7dc+Lo9vtt+iX1s3Brfz4Ke/7pCdp8PGovq8ys+8eAm3CwcxON4/YVxhYUG+jSPm2WnkxtzB2s+31PKW/XEbZZv2NO/cHzN7+4oIWRAEE7P2r0uzWZ9x85c13NnwG+mbNnDwxGns+3dFqVTWuAfOB5HJZMhkslrXLqjdbROqt7PRWchlhtIvFMptz/l02jQre0dzPQ9rft4ba3Rnb2UrceJySkoqmrw8AH768WfU6rxKC6oyGAwShy+m8T/5Da59NIO++rOcisqs6rCKUN+db+BS17PY89qAMNLcgzAYMan8enwOvx9PYlw3X8wdHR+pNekFoaaTm5nhP+wFGsx4H7mDI+Z3rnP0t5Nk5IpJzYIgFE+rM7Bw603eXH6JD1ZdR28odu9c4SHo1Wq0ObnsPZdK+4ZlH77tGH+FvscXcW1p8Tu3V7USu80DA/358ouvCI9oiCRJrPt1A5aWxU/srYmbqV2KTuWl6J+JP1uwG2xjznP+UDBP1G9XxZEVkCSJtNNnAPBoVPzqQ94vjeHjtVF0K2UJVINOx4l3P2Z8eBOcbJqZPFZBECqHQ6OGNJv/FTfXbyRGG8ywr8/z4eAgmgSKN4OCIPy/m4lq3vv5Ks62Zqx9K4J3Vl7hUFQe4Y2qOrLaJenAQaIWLeEZj1bUdWta5vstba3x0CSTcvL0fwaVVw8lJgmTp7zJkkXfsWP7H8hkMv7Y9RfyYnqfa+qOy5c37cIzKwYzBwes/euSfuo07nvWII14vFq88sm5Ho02OZlclS32wcV/6wR7WROXpiE9R4uDdcmrThz45me8kq6gPJ6ALrszZra2FRW2IAgVzMzOFv+hz/Pc+fN0Vnjz8fdneDlpM81fH4XjY6FVHZ4gCFVs64kk5m6+wejOPvRt7YZMJuPlTt5M+f4iI7oZsKoGzzi1RcaZs2Aw4BlQ+gZqxbEJCkRrZgFJCeTeuo2Vb/WaG1tikhAQUJcvZhbswvu/dp1Z8u2CWjVx+VK6AlcXNwJGDsGxSWP+GTcB16QEog+dJOCJFlUdHilHjgKgbNC4xKFBSoWMcB8Lzuw7Q9snw1D+Z5lUgORzF5D/uQVkMkLefF0kCIJQi7QJc8DZJ4q0c9Gcn/YuTr368tiQ56pFR4cgCJUvNjWPrzbdYOmr9Qlw///5jA38bPBxkrPxSCLPP+VVhRHWHpLBQPq5gv0RGrQv33OjTKEgO7gJjpGHiN38O0HjxpoyxIdm1MD0VWt+xMGh9rzOVmv07NT50XjBXJxbtURhYUHwhPGc7PAyB7TV44cn4eARAOo/++QDr3v24jqUSz4j/fTp+87pNRrOzpqHHAmv3j1xiDB+/V5BEGqG0BEv4NGzO3JJIn3jOg5MmIYmKamqwxIEoQos2XGb59q642FIJ277Ti59OZtjw0ZyduIU+viksWJPHDl5+qoOs1ZIP3MWXUYmWea2hEQElH5DCep0644EJO7ZS35qmukCNIEHJglr16xHo8nH3b3gddWFC5fQarWF53Nz1cyb+02FB2lq/1zPJNTbBmtLs8JeevsG9enQszUbDydU+eQeSa/ntmdDMt0CcIlo8MBrncIKljHNvHDpvnMXv/8FVXoSZp6e+A4aUCGxCoJQteRmZgSMGE79D6cjs7NHcesah8e+Qcyf+6o6NEEQKtHV2ByOXs5g8FMeRH2zhOuLl5Jy8BDa9HRyo6OxW/M9Xa1us/pA2TZhFe5n0Gq5fneycVb4E8UOxzdW89b1iHYJQ9LpiNu6zVQhmsQDW7V40Xfk5uYWfj7pzakkJSUXfp6Xl8dvG7dUXHQVICPyAndWrqSt2/2rgjzma4OdlZIjf/yDXq0uV/lR8bn8E5XxUDHmG2Qs00cQ/NGHyEvZ4TKgZcEspPQLF4scTzlylPTtW5FkMsImjEduXvxu2YIg1A4OEeE0X/AVdk2bYqZVc3PePM4duVj6jYIg1AqLtt9m2NNe2FgocWrVEue2bQh4eTQR877C+YnHIV9Lk0MrObj9OOk52tILFEp0Z8Nv5MXGkqRypvnI5x6qLJlMhnefngCknjiJJFWfVagemCT8N9DqFHh53dn8O17n9xKREVns+SHqg/DNp8Tv2FWu8nefTmbj4cRyx6dJTWXGmms0CbQj0P3+/RH+yyEsGL1ciTo6muSDhwqP70h1JNPSAe/Bg8SmaYLwiDCzt6fBe1MJfPVltE88y6Tt2fz41x0MYulDQajV/jl1E+d//qRP6zoAeHbrQuhbE/F4thPWfr4ETRiPeccOuDz1JMGtG/HTX3eqOOKaTaPRoZfJsX5uKHU9Hn6uZ7tnm7EpbCCaMdOq1caFj9Ri+ZrkFNKOHccgkxPS69lir2nYrjEAsX/uLVcdt5PzSMzQlPk+g1ZL0oGDHB0zHo9jW3mnv79R9yksLMh8sgcAl7+cTebFSxy5nM6KY5lEzJlF3ef6ljkWQRBqLplMhnvHZ3hq0ih+eL0RBy+m8dGnW4lcuBS9puy/mwRBqN60WVnEzvycp2L+ImlL8aM7ZDIZ5o+3IXD8q4zs6MO2E8mcOhcrfieUgyRJLNE35lTfaTzT73GTlCmXy3iq31P8sDfhoctKM+FbokcqSUjY9QcYDKjrhWPu5FjsNXVaNEWvVJF/+xZ5CWX/z7qToiEhvfTNze6RDAbid+7i2IjRXJk1B0V+Ho/XycfCzPjVSbq8Poik4IKZ9du+Wc9Hq6/x8Qv18PSsPatRCYJQdh5OKr4ZHUq7K1tI37WTw6++Sfa1qKoOSxAEE9FrNBydNgPHnEQsvLxw69D+gdfLZDJc7c2Z3t2V6I8/4uzHX2DQiqFHxpIkiZ/2xJKSqWXsINMuBtOxsQvxaRpOn4shJ/pGucq4FpfD+KX3z1Etr1KThEuXLnP+XCTnz0UiIXHlyrXCzy9dumyyQCqaXq0mbmfBEKLgft1KvE5uZoZNRAQAiYePlbmemOQ8kjLyjXq9n3X5CuemvUfUN0vQZ2aS4+iBz8gRNJzyZpl2RJbL5XT/5A1yIx7HztWB9VPCaRZUe1ajEgSh/JRmSppPfwu5mweypHhOTXqb67+sRdKLFU4EoSaTDAYuz/4a2a0oJHsnGnw4HTM7O6PujXBT4CLLJffsGS58MQuDTuzeXhpdbi6Hx0/mzKa/+HRIPcyVpu1nVypkjAzOI2P6JK7OnV+mIf45N2/x96TpzJj9Jy+29zRdTKVd8M7UD4oE+uH7nxQ5X9Fjp9JOnkTy9MTcwZ68xETy4hOw8vbGJiiwTGuB317zK7r0DFLsPGnT4sHZn0+71lw5cZToPYfw7dXd6Dqy1Dq0egNWKgVpOVqcbUueLKyOieHsW1MLPja3weWFobTp0b7cX0+FuTnPfPhGue4VBKF2swkKpMX82Vz7fgXJ27cTt2YNScdO0mjKBCw9yrcJkCAIVev26rWkHT2GRmlBy0/eR+XqYvS9Vj7eNPn0Q46/9R4Zx09wZfbXhEx6o8r3WEnP0fLeyqvMGx1Wrcbmp5+P5NzcxcgTY+nnK8PN4fkKqeeZbk3Zt9ocZfQN4nfswr1zxxK/DgatlpQjx4jdup3sixeRAS/X1dAgvD27NpomngcmCavW/GiaWh7Cta/nY24w3Hd8Vf3hKP2DCPaypo0hGk9bOc6tWqBQqe67Nj89g9jftzetswsAACAASURBVCEB9oOHl/qN59i0CZJCgfzGVbSZWZjZGTcpJSY5Dy9nC2RAYnp+iUmC3iCx+KQee4cwQhrVpd3oAWKTM0EQKpRCpSLk5VG4tWrOuVnz0EVf4+iUD3hi2UIUZqX2FwmCUI2kHD3G7TW/YkCG72uvY+3jXeYybAL8afTRe5x590NSDh0mepkjAaNHVkC0xjt+JYOjVzKITdXg5WxRpbEA5KenE7n4e3IP/40ckDnXIeKt18s02qMsLCxV5LftCPs2cn3xUlKPHCVgzCgsPf//7YA6Lo7E3X+R8OdfaNPSAdApVdRp347Awc+Rj+mSqwf+ZXB3dzNZReUlD2mAPDMdKSsThaMjShdX9IkJvPHqU8SkaYm8lc2l1b+Rk3WLi9Z2+PTphV/3zkWSBXMHe47872Wck6IZ9mzTUutUWltj36A+SecucfrwBZp3amlUrDEpeXg7W6DVSySk5xP2n921Jb2e3NR0lu7PR6lS8+mi97GzevASp4IgCKbkEBFO68VzOT9vMZvy/Fm37ArvDwrC1V4skywINYWFfwAJtl7YtGhF0P+xd9/hTVVvAMe/GW060r1buifQsofI3lNAkKEiQ4aICu6J44cDFRUQRcXBEnELisreIAJCKZvSQfeiO2nSJrm/PwqRCpQCLW3K+TwPD+05d5y3t03um3tG9xtb7RfArVkkwc+9QOpbc8j84088unet1xkR/z5ThEIOx86V1nuSkLDqe9LXrEVRrsOkUOJ/zwgC7rm7zqeU7/3wGD5Ir6BT8mYKY49weOaTNBl5N03uGYHcyor8A/+Q9uPPAOTZuOPevz+dxw5EaWcLQHmpptba0uA/Pmo7+2nUavsr1oX6QfdoV9LLe5KyaTvKtHNkrlxB0vc/I0W3xcHeGoaMZWvceQ4WO7HyhfE1Pm/EzEfJS63gvU1ZfN3bhFUN+p6ln6/MfMvKjVec4Sjj9z85vnoNDi3uYc701tiqRIIgCMKtp1SrafXi00QbJZZtSeOBD+J4xiOeFq0D8eh8Z303TxCEa1jxj5aE7tN4d3Kzmz5WcMcYsnv2x7jtT5K37SWmnpIESZLYH1/IwLYeHEspoX+bmnefuhkVRUWc//sABQcOEvjE4/wVr2FTbB7NNuwjtFyHXYuWRM2Yhq2P9y1pj52tFS/Om8iO/T05vXQlkVmxJG/YxonQrhxI0nLihBNd3Fpi3eFOxk7ojoNt3d1LNvgkoSb8hg/Fd9hdFBw4SPLq7yExEf7ZgQHYdt4R317dWDy9GXaqmve1U7m70cNN4td/Cli1I5OJvf2q3d6o15OaV0Z0gAOFmorLZjjS5eSQuGIV9hXljAoqqvUBL4IgCNdLqZAxpZ8/HZ00FL71E2c2SWS03UnUI9NQubnWd/MEQbiEoVRD3u495ER2ZO3fOax6qsVNrfR7qfbTx7HONYiPst35Um+8rvul2pKSq8NkgiHtPVi0LqXOziNJErqsLAoPHSZv7z6Kj5+AC2Nv35y9EeeIMPq2dqNt+L2o7VU4tYi55eMj5HIZPe8IokfH2fy94QDbj5yn8Hgx7cOdGNejLUGeXW5JmxpFkgCVA6hdO7THpX07Cg/HUno2ARtvb9pGhGHjfWPZn0wm48lBPrz0/i76tnK76qMvzbkU4p55Hj/PFvjNfBiVlZzEU4XmekmSiJv/CfKKcuzadUDeLOqG2iMIglAXottHkDZ1MknLVlL6zwEOzDxNi5efxzEqsr6bJggCUJaZxck33qIsLZ04z/08+9jD1U6Ocr0UNjYMfaA3cd8nMOfbs8wdH3HLb4z/PlNIxwgnmjZRczZTi77ChMqqdj9QLS8s5MhTz1Gel2cuM8jk5HuFY9u2PR8M64qn98Up8uu/y71MJuOOAR24Y0D9nL/RJAkXyWQyXNq0xqVN65s+li4ri/Qnn2ay3Ir3fwrm/anNr/hHk7d7Dya9nrKyCrwoxhT3N4VF/w5IKD59hooTcUgqW6IensKZ9LSbbpsgCEJtkcnl+A8ZiOcdHTj89nyM8Sc58sLL+N5/PyF3D6n3GU8EobYdmPowKpUN2NggU9ki2diAyhaZfzB07Y9JkjCdz0X2w1IoLYYKPSYnVyQXD4zO7phc3LFtHoOLjzuuaiucVGBjY1UnA1qLjh/n1Nx5GEpKKFR7UtShH71auNX6eWQyGc+NDOGldzazdu5uhr94awcx7z9TRN9W7tiqFAR52nI6XUOLoJub1EWqMFAYewTn1q05klTM1iP5RBfpUCptKQ+IwKtzR6L7d0bloK6lKBqXRpck1CaVlxfWbq4Y09Kxj49l+zEfesZU/cOUJIm8XbsB2G8bQfdVSzHGHsExeAhQOeD50I8bsQZ8+/fB2sUZRJIgCEIDpHJ3o8Pbr3H286Xkrl9P1soVpCRm0+OZqQ1qOkJBuGm6MiRt5QDPS2ejT8kqZY8pBrlchr2uiMHJZ8x1iqICSEkw3zhtyZ7MWVUT8ksq6Jy4ka4FBym3skWytUepVmPn5oJXq+a4tIzBITzsupsomUzkbN1GwidLkAwG4h1DMY2ZytMDr/9YNSXXlzEibgUmXRnbfomk5921s6LwtRiMJv5JKObFUaEARAeqOXau5IaTBEmSyNu9F82XSzlZXMKKFtModfSkTys3wl5/k9BI3zqboagxqXGSEBsbx8ED/5CfX4gkVZ2S9Lnnn6r1hjUEMpkMn0EDSVzyBQOS/2D5SkeiXxpQZRYQTUIiusws5I5OaP3CcGvlTFHsEQJyT2EySVSUV2A4vB9rwLNnt/oLRhAEoQbkSiURD0/FrU0rziz9mlXlTVn7xSmeGxmCj+vlU0wLgiVq/cnH2MjAWKbFqC3DoK38P9rFmbExlYOBJaORou6vYe3sjNzaCl1OLvrsbHRZ2ehycnh2XFtsvCq7pJz95CDZ60FVUQYVZVCcR3lGMqlHDxP3syfxY54jOkBNuK89LiWZOAYHVvuEzlRRwaEZj6HPyQXgoFcHejw/nZYhdbtQqtLeHv9RIzi3chUlq5fxvWMTRvcOqtNzQuVsRk3cbHB1qByEGx3owJ4TBTd0LF1ODqfeX4jmVOXKw4UOXkzp6Un7Hi3Ehx3XqUZJwrerf+CzT7/E378J7h5ut9UP2Xtgf0riz5K7bTvjkr/njQ+seOP5vjjYVv7ocnfsBEAX1QY/dztcO7Qn8dMlRGqSOX++hF17zuKokGPr0wT74GBMV1jzQRAEoaFx69ieOzq0o51RYuX2DCZ+EMszxb/S/K5eePfuKT6FEyyaUm2PzVVmTrxIplDg3CLG/H3l+MaYK24b9vBDhE6bgkGjwVBSiqG0lLLMLM7HHUNh7USe2oqNsXmsWhPH1NiP0StUFHqGoHB1w9agRVWuRVlWivLhZ0nTKjiXU0ag0QFbKx1prfox+enRqG1uTecP32F3kbN9B26paZz+5ivmFU7kieHBKBW1f+9XXliIsayM/WcMdIj4NwGKDlTz2frrH7xcGHeU43PfA20pOpU9FZ27M+Dh8SitxWySN6JGv3E//7SWx2Y+zIiRw+q6PQ2OTC4n/LEZGLUa8v8+wMijy5n7vyKc2ncgqvA07uv+AGBlQROmj/JF5eaEOiKc0jPxnN15kCVxjnz4/iICrHW3VXIlCILlk8lkWCllPNinCZ3K48n/7BSJH50i5Y/NRM+ajn1QYH03URAaDJlCgZWjI1aOjgA4REbg2aOyB8EdF7YpPiFxOtMLsrPxyjwJmVWP8dOmszgFNiHI0xavGTMJCnJnkLvtLYwC5FZWRD33DEeefo7IvOP8c3A7zxToeWNcBPY2tTc+qaK4hCNPPEN5fj72HlG0fOBec12Auw1avYncovIar+GSsWUHiYsWIZMkiIymy4tPcupcMjKF+EDjRtUoSSgt1dCpU80WFGuMZAoFkc88RcInS8jdvoPBnQNIc7Yl93QhbiYTu73u5OmZfWgWUDnwxe2OjpSeief896vpPuoFIvwdAcf6DUIQBOEmRA3sQY6NnDNfLMWQeIbDjz+N+6BBhD8wFoXtrb2JEQRL5disKe2XLEafm0vRseMYSjVYOVUmFkpHBzo1aVLni3XVhJ1/E8JnPsLpd9+n3en1KJoEMvWjcuZNiqy1Rc4Sl3xOeX4+AEG5p7Dfvha6RwOVH1BEB6g5llJy2VjQKzmTrmHeXon7rGzxHdiPiAn3Ifpt3LwaJQldunbi0KFYBvvW0xxMDYDcyoqwx2bge9dg7IODKgu73ktxfAfaBgZia/3vj9K1YwfOrfgaF10+03rV/xRagiAIN0smk+HVqztuHdpxdvkq8jZu5Pzv68jeuZtmjzyER6cbX/VVEG43Kg8PPHv2qO9mVMu9852UDDtD9sbNjO3ghLvOk8mLjjHnvvAqXYNuRN6ev8jbtQe5SoV+/CwSN+1k9P0jqmwTHajm+LnSayYJ2+LOM/fHRGYNbU7nmR9j7XThQ1mj8abaKNQwSWjevClffrGcpKRkQsNCsFJW3a1P317XPEZ5eTkLF3zMoX9iKSgsxM3VleF338Wo0SOuuW9DIZPJ/k0QLnAMD71sO7smftClLxUF+aglPVB9v0dBEARLoVTbE/XINEr69eLEok/hXBKf/xDHAM9I2oTW7aBKQRBurcDx4/AZPBAbLy/GAGE+dsz+Op5xPX25r5vPDXWjLi8sIuHTJQAETRzPijIP/IbfjzrMFwCjTkfG2t9oHtWF5Xvyqz3WpmW/s+NAKgufv5+m/jc3XapwuRolCQvmfwzATz+uuaxOJpPVKEkwGk24uroy7/238PX1ISEhiWeffhE3N1d69e5xfa22AJ2fmV7fTRAEQagzDuFhdJj/Drm796J1iOR/qxMI87Fjkus5gts0xT4woL6bKAjCTZIrleYZnAA8Dqznw45ezDmYy+k0Dc+NDLnucQoyhRznljFUFBXjPaAff78dy9sT/124MX7Bh5z/62/cB5dwKq05BqN0xUHTa/44huParxloKsc7pwP4t7nxQIUrqlGSsG3H+ps+ka2tDZOnTDB/Hx4eyh2dOnDs2IlGmSQIgiA0djKFAs/uXekLdItx5+fN8eQuWUrhcgPOPXoRMfE+rJ2d67uZgiDUAm1qGqnf/wgmE0+GhrLHug+j3iliWn9/7urgiUJes6cKVg4ORD79JEa9noyCcrR6E2Hedub6JveM5PzfB8j780+axfiSmKUlwq9qj4yvt6RitepzVKZy3DrdgXMtLKArXK7eFlMzGIwcjTvO2PtGVbud0WTE2Ij6lV2MpTHFdJGIzfI01rhAxHarKeUw4k5vEhJ6kr95M0XbtrBv9258hg8jYOhAlPbX7nbZEOOqLY06NlPji0m4nK2vD6EPTyPlm2/RJiTQOiGB9hHN2PhnS37YFcqsYcF0iLjyhwKSJHF+71+oPD3NC8udyalgyYZU7ohyRn5JgqEOC8Vn0AAy1/3BkLQNHE1uYU4SJEnikz9T0fyxhjtL07B2dSV0xnQxe2QdqXGScPhQLCtXriY5KQWZDIKCAxn3wL20bt3yhk686MPF2Kvt6d+/T7XbnTl9ButGOL/tyZMn67sJdUbEZnkaa1wgYrvlut6BfVQopX9sQpFwlpwfvifjlzUo27XBfkCfGq2v0CDjqiWNMbby8or6boJwC8gUCrz79cWjW1cy1v5G2s9rMJw5QS9O0NPdi3f0k0ChxMlOidpWgYOtEmd7JeHaFDz++g3SknFoGkXxA0+xakcmaed1jO3mw/COl0/wEnD/veTt+Qun8ylkbN0Gne/HYDTxxveJaOPjGZa+C4DwWY9i5SjGItSVGiUJWzZv48033qVz507cd/9oJEniyJGjPPXk88ye/dx1dxda/NFnHI07zgcL3sHKqvoEICIyAnUNPoGyFEajkZMnT9K0aVMU1ay2aIlEbJanscYFIrZ617MnRUePkbD6Rzh1grPHstG3dmN0Fy/zYpT/ZRFx3aDGHFupRlPfTRBuIYWNDf5jRuEzeBDZW7aQ+fufOERG8O2stqTm6SjOyEbzxYcYHN0xlJZgkxoPQInSnp/z/clZn8r9PX3p09IN5VXWMFDa2REyZRKn531A8OE/KMweyMtrsnDR5TPs0JcgSfgOHYJzqxv7oFqomRolCStXfMPkKRO4f9xYc9mo0SP4euVqVqz45rqShEUffsKhf2KZv/AdnJ2vPROGQq5odC+oAApF44wLRGyWqLHGBSK2+uTaqiWurVpSejYBr6IKVp0pZ9Q7cTwQUU6bzP2E3DsKO/8ml+3X0OO6GY0xNoW8ccUj1IxSbY/fsKH4DhmMsawMpVJOiLcdhdklHE9PQZmeghJQ2NnhN2I4vncNpitW2KnkNeoe5Nb5Tpw2bSXnZCovLIklICqQp4d3IvbIchyiIgl84P66D/I2V6MkIT09k+49ul5W3qNnN5YvX1Xjk324cDGHDsWyYOG7OIvBbIIgCLcFdVgoYcCrbSE1r4yDr86lKO04h/bswb5tO8LGjMAhMqK+mykIwg2QKRQo1Wrz9w4R4bR4dy667GyMZWW4depk7hJ0Pf1CZDIZYQ9PJeOpV+jQJoCJg0KQyWS0XfIJcmW9Dam9rdTop+zk7ERiQhJNmvhVKT97NhFnp5rNi52Vlc3PP63FytqKsWP+neWoRYto3p335nU0WRAEQbBU/u62eLz6KGdX/0TB9m1o/zlA3D8HUIRGEDZ6OJJKVd9NFCxMY1iHqTFR2NriEBlRK4m/jbc3/VYtqVImEoRbp0Y/6b59e/H+ex9SWFhEi5bRyJAReySOr75YwZC7arYKs7e3F9t3bripxgqCIAiWz8bTk+hZD6MfN4bUX38n888NkHCG03PfJadtT6KaNW90XXKEunO7rcMkCLdKjZKEyVMmYjKZWLToE4wGI5IkYWVlxciRw5j04IRrH0AQBEEQ/kPl5krYpAcIGnMPmRs3kbzmdzbJIlgy9wh3dfCkvzobD09HHCIjxBSHwlXdyDpMjW169WtpzFPwXs3tGDPU7pTENUoSlEoFD8+YyoOTx5OelgGAXxNfVOKxsCAIgnCTlHa2+A8fis+QQTgeP47aI5Q1f2dzdNkSPPT54BdI0JB+eHXrUqXvsyBcSU3WYWqs06tfS2OcgvdabreYa3NK4uvq2KVSqQgJDa61kwuCIAjCRRefFgR52fL4oCYkFXYjc8Mm5OnnSP7scxI//wrbVm0IGdQH51YtkV9jCm3h9lSTdZga2/Tq19KYp+C9mtsxZqjdKYmvmiS8N28BMx55CDs7W96bt6Dagzz9zOO11iBLZzKZMJlMKMXAGkEQhBumUKkIm/QAwfeN5vxff5Oyfgu6k8fRHzrAyUMHOD/iYbrd0x1ne5EoCP+q6TpMjXV69WtpjFPwXsvtFnNtTkl81TvZtLR0cz+u1NR0RHfQmvn2229JTk7mxRdfrO+mCIIgWDyFSoVnj2549uiGPjePrG07SNuznwOSL/PfOkzLIAeGpq/H21GBd7fOOLdsIZ4w3Kaudx0mQRCqd9UkYcHCeeavF34472qbCf8RHx9PcXFxfTdDEASh0VF5uBM4eiSBo0fSGdDojOw6koXiz78pMFZQsGMHko0dbh074NXtTpEw3EbEOkyCUPuuvB72fyxf9jU6ne6ycr1ez/JlX9d6oyxZWloapaWl9d0MQRCERs/eRsGAjn60X/geXqNGYfTwRabTkr9jOydff4s9904gcdtf9d1MoY5dXIcpPT2DsWMmMKD/MAb0H8azz7xU300TBItWo47zy5etYuiwIdjY2FQp1+n0LF+2igkTx9VJ4yxRWloaLi4u9d0MQRCE24adfxPCxo0lbNxYtKlpZOzYTebOvciz03nhzxKsD8fRtbkL7XMO4KQrxKVtaxybN0MhZuhrFMQ6TIJQN2qUJEiSdMUxCWlp6ajVt8/sANciSRJpaWlYW1vXd1MEQRBuS5cmDPrz51nl7Epccgm7jueT/MdmPLXZZK77HUlphUPz5nh2ao9r+/ao3N3qu+mCIAgNSrVJwtjR45HJZMhkMh6a+hhy+b+9k0wmE/n5BXTv0bXOG2kp8vPz0Wq1oruRIAhCA6Byq7zxbxPqSJtQR4pCH+Xc7oOc/+cQ1tmplB6JpfRILImffo6i3zDaPDQOa6X8wgdjYrYOQRBub9UmCYOHDECSYOlXK+jdpye2tv92N7KyssLbx5suXe6s80ZairS0NDw9PUWSIAiC0AA5NW9Gi+bNgPGUFxSQt/8fUnb9TcXJY2zKtOX5Vw4QE+hAD/0JfI7vxL1lc1yim+HYrJl40iAIwm2n2iThgfH3AeDp6UGv3t1FN5prSEtLIzIykgMHDtR3UwRBEIRqWLu44Nu/D779+2AqL6ezXM6T5XAooZjC5b8iZaWTm5VO7oaNlTu4uOHSvClurVvi1adX/TZeEAThFqjRmISgoADOnk2kWbOoKuUnTpxCLpcTFRVRJ42zNGlpaURERLBnzx4MBoNYUE0QBMECyC98AOaohB4xrpjmPkXp2QRyY4+SdfgYxqSzKArOU7B7N8eOpXGuOIhmAQ4097WBzWtxCA9HHRFm7t4kCILQGNToLnbhwsWMGnX3ZUlCTk4u33/3E4s/qX5F5ttFamoqkZGRqNVqNBoNTk5iMRdBEARLI7eywrFpFI5Nowi9dxSS0Yg2NZXzR0+gqlBR4WrD7hMF/PrTGcaf+MW8n+TghGNEOC5R4ThEhOPQNErMoCQIgsWqUZKQnHSOyCs8LYiICONc8rlab5SlSktLo3fv3tjb21NaWiqSBEEQhEZAplBgHxSEfVAQAUC7C+W6bCeS1hVw/sQZjClJKEuKKPnnICX/HAQgadxLhEQHE+FnD6fjMCQno/f2xtbTUwyMFgShwatRkiCXy9BqtJeVl5SUYjJJtd4oS5WWlkaTJk1Qq9Vi8LIgCEIjZ+PlRdPJDwAgmUzosrIoPh1PRtxpipPOcd7amb92ZHI6XcND8cvwLcvi8NerkVQ2qJr44xwahH1gAI7NmqIOCa7naARBEKqqUZLQPLoZ3333E7Nffq5K+fff/Ujz5k3rpGGWxmQykZ6ebk4SSkpK6rtJgiAIwi0ik8ux9fXF1tcXr57dAeh+oU6SJI5/dpTM2GNYF+Sh1GkoT4gnJyEegOTIrhgGjCLE244mxnxMf23HPigAu4AA7AL8UdrZ1VNUgiDczmqUJEyeMpFZM59myuQZtGnTCplMxj//HCYtLZ0FC+fVdRstQk5ODo6Ojtja2prHJAiCIAiCTCaj6dQJGI8dIzo6GmNJKdqUFArOJpN7JgkX/2hOl1RwMD4Th+P7GHzujyr7S86u2AUE4BIaSMC9Y8Q4B0EQbokaJQmRkeF88ulCvln1Pfv/rpzeMyIynBdfepaQkKC6bJ/FSEtLw9/fH0B0NxIEQRCuytrZCWvnGJxbxHCxk9GgC/9rU9Rk/uVEXnwyZSmpKPIykRfmU1aYT+HR47yU24IAT1sCPGxp9sdHqEwV2Pt64+Dng62vNzY+Ptj6eGPt5obskgVQBUEQrleN5+gMDg7ipdnP1l1LLFx6ejp+fn6ASBIEQRCEG2MXEEBoQAChF76XjEZ0WdloU1LQ5BfxSsswUnN1pOSWIctMxWQopyQ9hZL/LM9T3nUgnqPH4Otqg6ool6Ijcdj6+mDj443K3R2ZQnHLYxMEwbJc10T+FRUVFBQUIklVByt7eXnWaqMsUUZGBr6+vkDdJQlpaWns3r2bsWPH1vqxBUEQhIZHplBg6+eLrZ8vF1dhiGqiRpIkDHd8hi4rm7LMLApTMihIyaAsMxPycjihsWPpT0mkn9fROi+WuzM3mI8pyRXg6o6NtzdO/r6ETp6A3MoKgIriYhR2dsjFOj+CcNur0atARkYm777zAXFxx+CS/ECSJGQyGVu3/1lX7bMYmZmZhIWFAXWXJGzdupX333+fu+++G5XokyoIgnDbkslkWDk6YuXoiENEOP/9qK67JPHIhWlWMw9ak75FR1lGJqa8HJSlhZCXjT4vm5QTJ3kssyV+bjb4utrQY+P72BTlgoMTVu4e2Pt4Yu/ticrTA8eoKOyDg251qIIg1JMaJQnz3pmPplTL7NnP4e7hLuZ3voLMzEy6du0KVCYJOTk5tX6OEydOoNFo2LVrF3369Kn14wuCIAiNw6Xv0z7tWuHTrpX5e6Nejy4rG11GJhUaDSvatyD9vJ6MfD2yTTIkmQxZSREVJUUUJp2l8MJ+xyN7kd9hIN4uKnzzE1Bv+gEbD3ecOt9xi6MTBOFWqFGScPLUaRYvXkBIqJjH+WoyMzPx8fEBwN7evk6mQD1x4gTDhw/n999/F0mCIAiCcEMUKhX2gQHYBwaYyzydVLQOAZZ/gmQ0Up5fgC4nB112DsXpWRRn5tA0ogVZ7nZkFepJOnGO6OwMyrIz+CVLDS71F48gCHWjRkmCp4cHRpOprtti0TIyMsxJQl10NyovLychIYHFixczZMgQdDodNjY2tXoOQRAEQZApFKg83FF5uOPUvBleV9jG2PsedNld0efmEeboxJzPV97ydgqCULdqND/aQ9Mn88WSpRQXF9d1eyySVqtFp9Ph6uoK1E2SEB8fj7+/P02aNCEmJoYdO3bU6vEFQRAEoaYUtrbYBwXh2r4d9n6+9d0cQRDqQI2eJCz+eAnn8/MZMfxe3NzdsPrPrAdff/NVnTTOUmRmZuLt7W3uA+rg4FDrScKJEydo1qwZAAMHDmTjxo3079+/Vs8hCIIgCIIgCFDDJKFvv9513Q6LlpmZaZ7+FKiTFZePHz9O8+bNAYiJieGbb76p1eMLgiAIgiAIwkU1ShImThpX1+2oV2vXrqV9+/ZVbvSvx6WDlqFy4HJtP0k4fvy4+clBWFgYSUlJGAwGlGIua0EQBEEQBKGW3fZrtpeVlfHKK6+wc+fOGz7Gf5OE2h6TYDQaOX36tLm7ka2tLZ6enqSkKCy89AAAIABJREFUpNTaOQRBEARBEAThoqt+DD3uvgdZ/OkCHB0duf/eSdWujWDJYxL+/PNPNBoNiYmJN3yMzMxMWrRoYf7+4pOEi4vN3azk5GRcXFxwcnIyl0VERHDmzBlCQkJu+viCIAiCIAiCcKmrJgl9+vbC2tra/HVjXUDthx9+YNiwYSQlJV2xvqKigpdeeolXXnkFtVp9xW0yMzOrDCK2trZGqVSi1+trZZrS/fv3065duypl4eHhxMfHM2DAgJs+viAIgiAIgiBc6qpJwqXjECY9+MAtacytlpSUREJCAq+++iqPPPLIFbeJjY3lp59+ws/Pj1mzZl1xm/92N4J/nybURpKwd+9eevToUaUsIiKCTZs23fSxBUEQBEEQBOG/ajQm4YlZz1JScnkfe41GwxOznq31Rt0qP/74I3fffTchISFkZGRQXl5+2TY7duzgrrvuYsWKFeTl5V1WL0kSGRkZlw16VqvVlJSU8OOPP97UeAej0cjevXvp3LlzlfKLTxIEQRAEQRAEobbVKEmIjY3DYDBcVl5RUUFc3LFab9StsmXLFgYOHIi1tTU+Pj6kpqZets3OnTu5//77GT58OB999NFl9cXFxcjlchwcHKqUX1wrYdmyZaxZs+aG23jixAnc3d3x9vauUh4SEkJKSsoVExtBEARBEARBuBnVzp+ZnZ1j/jo3J7fKDanJaGLfvv24urrUXevqUHZ2Njk5OcTExACVN92JiYmEhoaat8nLyyMlJYVWrVoREhLCkCFDaNu2LXfddRf5+fns2bMHSZIu62oElU8S4uPjSUxMpKCg4IYHMe/du5c777zzsnKVSoWfnx9JSUlERkZe93EFQRAEQRAE4WqqTRLGjh6PTCZDJpMx/aGZl9XLZDKmTptUZ42rS3v27KFTp04oFArg3yThUrt27eLOO+/EysoKNzc3li1bxvjx4zl69Chr1qwhJiaG7Oxs2rRpc9nx1Wo169atY+DAgezdu5fU1FQCAgJuqJ3jx4+/Yt3FLkc3kiQUFxczf/58pk2bdsUk53pIksSvv/5qnqJVEARBEARBsGzVJgkffvQ+kiQx67GnefOt13Bw/LdLjZVSiZe3Fy4uznXeyNpgNBqZO3cunp6eTJs2jd27d9OlSxdzfUhICIcPH66yz86dO+nWrZv5+8jISL788ks++ugjli5dal4B+Urs7e35/fffmT9/PgaDgf379193kqDT6YiNjeXjjz++Yn1ERARbtmyhV69e2NnZ1fi4FRUVPProo+h0OkaOHMmiRYto27btdbXt0jbOnj2b2NhYCgsLGTduHNHR0Td0LEEQBEEQBKFhqHZMQkxMc1q0iGb1d8u5o1MHYmKam/9FNY20mARBr9cza9Ysjh07xmeffUZqaip79uy5LEm49EnChg0b2LFjR5UkASA6OppPP/202gQBKp8kyGQyunbtSseOHdm3b991t3vt2rW0bdv2svEOF40ZM4by8nJ69OjBr7/+ai4vKSm5bAxJXl4egwYNYuzYsUydOhWVSsXq1at58803mT59OgcPHgQqE6PZs2cjSdI122cymZg+fTp6vZ5ff/2VlStX8vPPP7No0aLrjlUQBEEQBEFoOKp9knDRr2t/Z/KUCZeVl5ZqmPfufP43Z3aNTmYwGPnk4yVs3LgZkyTRvXtXHn/iEfN6DNfDZDIhl185x5EkCUmSkMvlmEwmnnjiCQCWL1/OkiVLeOSRR7Czs8Pf39+8T0hICElJSej1el577TX27dvH559/ftmsRTWlVqtp164djo6OdOzYkU8//bRK2zds2EDz5s0JDAy8YjwVFRV88sknvPfee1c9h4+PDx9//DEnTpxg6tSplJWV4ebmxnPPPUfv3r155513kMlkVFRU8Nhjj9GrVy/at2/PqVOnGDduHAqFgp49ezJ//nxmzJjBlClT+Pzzz1Gr1WzatIl+/fpVG+Onn36KXq/niy++QKlUEhERweuvv87rr7+OjY0NQ4YMAcDPz6/a8Rj5+flkZWXdUHcljUbD2rVrKSkpwc7Ojr59+5oHeUuSRF5eHklJSWRkZJCTk0NZWRmBgYEMGzasQa39YTKZ2LFjB+7u7kRHR99Q26533IskSWzbto2kpCRsbGyIiIhoUD8TQRAEQRDqT42ShA3rNxF35Cgvv/oCXl6eABw7epzXX38HB4crLzB2Jau+Xk3skTiWLl+CUqngxRde4/MlS3nk0Yeq3c9oNLJv3z5atGiBg4MDGzZs4KmnnsLKyoqwsDDeeOMNc7/8nJwcXnzxRQ4dOsSLL75IfHw8BQUFLFu2DJVKxZQpU/juu+/o1atXlXO4ublhMBgYNWoUgYGBrFu3Dnt7+xrH9l8dO3Y0j1UICQlBr9eTlJREeXk5c+bMQa/Xk5OTQ+/evcnIyODgwYM8/vjjTJs2DZlMxq+//oqvr+9li6hdSbNmzVi1ahXjxo1DLpezYMEC3nzzTb777jv69OnD3LlzUavVPPnkk8jlcrp3715l/y5dujB79mzeeecdli1bRn5+Pq+++io9evS4YgInSRLbt29n+fLlrFmzBqXy318jZ2dnli9fzowZM1i1ahV6vZ6AgAAmTZpEcHAwHh4eeHh4AJCVlcU333zDqlWrsLa2pmXLlkyYMAEPDw/kcjlFRUU0adLEvD1UDjiPi4sjOzubzMxMfvjhB9q1a4e/vz9nz57lgw8+oFWrVhQXF5OQkICVlRWBgYE0adIET09P7OzsWLJkCXv27OGNN95ApVJRVlZGfHw8p0+f5syZM7Rp04aBAwde9zWvqKigqKgId3f3KuV5eXmcO3cOlUqFjY0NNjY2eHp6Ym1tjSRJ7N69m3nz5iFJEhqNBoPBQPv27WnatCkajYbi4mLGjRtHcHBwleMajUbWr1/PmjVrOHbsGHK5nMcff5wRI0aYxxJdetOflJTEb7/9hrOzM3feeScffvghp06dMl8zX19fpk+fjr29PXl5eSQkJJCRkYGHhwdOTk5kZGSQlZWFRqNBpVLRokULunfvbh78bzAYOHjwIFu2bMHR0ZG77rqLoKAgoDIJ2rRpk3ka4YtJfEhICBERETg7O1816b9Io9GwcOFCzpw5Q+vWrbGxseHcuXOUlZWhUqkYMGDAZeuJ1DeTyYRWq73qQowXSZJEfn4+GRkZyGQyfHx8cHV1rXHSptVq2bdvn/ln07p16xv68KUxKi4uJjMzk4KCAvM/rVaL0WjkzjvvrDYpNxgM5OTk4ObmhkqlMpdLkkR5eTkajQZJklCpVOTk5BAfH4+npyfNmzensLCQ48ePExYWhr+/P5IkUVpaSkVFhXks3KX0ej1arRYXl2tPBqLVatm6dStnzpwhMzPTPKmITCZDqVQSHh5O8+bNcXR0RKVSoVKpKC4uZuPGjZw9e5bOnTsTExNDWVkZzs7OREVFYTKZWLduHdu3b+fs2bO4ubnRr18/5HI5p0+fRqFQ4OnpSc+ePQkPDze3OS4ujn/+OXQjl0YQhAZOJtWgX0lBQSFvvfEup06d5qlnHic1JZXly1YxdNhgHp4xFSsrqxqdbPQ945jx6EP06NEVgP37DzLntbn8uu6Hy24QtNoynnllLo5WRg4fPoQkSRQXF9OjRw927trFgvnz8fHxYefOnSxcuJCx944lKzOLnTt3MmLkSLp26cLb77yNplTD0qVLcXb+t2vUqdOncVCr8fPzq3LOJ598kojISB56aBoyavcT1VdefYX16zfg4uJCj+7defKpJ9Fqtfz8888EBQURFhrGy6+8jLOzM9HR0fz222+8/PLLdGjfocbnKCgswNrKGnt7e5KTk3nwwQcxmUz069ePmTNnXvtGBckc98xZM7G2siY/Px+NVkvXLl3wa+JHakoqu/fsQa/T8cKLL3JHx47m/Y0mI2dOnyEiMgKFXGEu27p1K2vXriU3N4/cnBysVda4u7uTnpZOnz59mDRpEm7ubqz+ZjU7duygqKgIk8mE2kFNWmoaXbp2wd7Onr1/7UVTqqFZ82b4+frh7OxM3359CQsNM7ehtLSUfX/vw8Pdg8DAwCrX/aKysjLmvD6HrVu3YWWlxGg0EhgYSHh4OMHBwaz7bR0RkRG0adOGjPQM1A5qPD082bNnD/Hx8YSEhNChYwdU1iq0ZWVkZKSTmJjI0aNHkSSJsNAw2rVrR0FBAfHx8SQnJxMUFER5RTl6nR6dXke5vpwBAwcQH3+W3JwcZjwyg969ewOQkJDA0aNHOXv2LGr7ymv2ww8/0LdvXwoKCkhMTMTG1qYyIXFzZ8zYsbSIieH8+fMsXLiQI0eOYDKZUFpZ4eHhjrWVNTq9DkOFgf4D+lNUVMTePXvp3bs3s2bN4ty5c4SGhbJl8xZ++KHyb9HZ2Zng4GC8fbzJy8ujpKQEH28fPL08Uasd0Go1HI07yuYtm/H28sbXz5fdu3bj6+dLt27dKCgoYMvmLXh6edKjRw+2bNmClZUVUZFRAOYnXMnJySQmJqLRaHByciIiMgJ//wBKS0vQlelQO6ixsbFFJpOxZ89uWrduTc+ePTl29BjlFeUEBARgZ2tHqaaUb1d/i3+APzExMRgqDOj1enLzclGrHagoLycnN4ecnFzKyrTo9eXodTrkcjnBwcEEBgbi4uKCUqkkNzcXk2TCz88PWxtbCgsLzf/s7O3w82tCaWkJycnJaEo1GIwGwkLDiGkRg1ajJT0jnYz0DNLT00nPSEcmk+Hq6kpUZBQBAQEoFAr2799PWlqa+fczKzsLaytrvLy9AMjJzsFkMhEWFoajoyMmk4mc3BwyMzKRJKkyAZDJsLWxQaPRoNFqiIlpQXh4GEeOHCE+/iwOajUuLi44Ozvj4uJi/trZxQWlUoFWozXf4JaXl1NcXExaejrJyckUFhZSVqbFw90DXz9f8vLyyMnOITQsjMjICLQabeX0zwoFkslEfn4+JpOJ8IgIgoKCzO1KSEhAp9fh5elFmU5HSso5bG1sCQ0NRa1WYzKZsFfb4+joiCSBRlPK6VOnzUmk0kqJldIKhUJBhaECuUyOu7s7tna25OTkotVqsLOzx97ODls7W5TKyr/ngoIC0tLSOHnyFJmZGfh4+1TG7uyMo6Mjtna2mEwmdu3ahSRJ+Pr64ezkhExeeZPt7uZOeUU5mzdvAUmiqLgYKyslSoUSk2RCV6ZDrpBjZ1s5FkxfrsfV1ZWQ4BCyc7I5d64yzvCICJISE7GytqK4uBiTyURFhQEHtZqoplEEBQVRVFREakoqZxPOIpfL8fL0Iiw8DKVSiVwuRy6TV/4vl6PT6ygsLORo3FFiWsQQExODt5c3Khubyi6iF5KX+LPxnDl9Bm2ZlnJ9OXq9HisrK7p27Up4eDh/7/+bs2fPYm9nT2ZWJrY2thgMBpydnRk+fDghISFkZmWyY8cOFAoF4WHhSJJEZlYmW7ZsxdPTg/LyCtLSUgkODqZV69bkFhuYN+cF7Oxsa/yeVR8u3lu89vzjqO1rPpbP0hmNRk6ePEnTpk2vmKQ2RrdjzAClGi2vvb2gVv4ea5QkXLT44yX8+MMvyOVy/jdnNp27dKrxiUpKSrlr8EhWrV6Kn19lF57CwkKGDx3Dt9+vwPvCm+NF+QWFvPzmBzU+viAIgiAI9ef1l57EtYGPVRT3FsLtojb+HmvU3Qjgr7/+ZsP6zTRrFkVqajo//vALUVERuLm71Wj/Mq0WoEoXnoufbGu1ZZdt7+zkyOsvPYmNSiX6SQuCIAhCAyVJEjq9Hmcnx/puyjWJewuhsavNv8caJQmLPvyEtWvWMX7CfTww/j7y8s7z5hvvMPnBh3nuhafo1KnjNY9he2GKTo1Gg7OzE1DZNQS44uMQuVze4D+REARBEAThyu/jDZG4txBuB7X191j9SMELdu3cwwfz32b8hPuRyWR4eLgzf8G7jBg5jFdmv16jEzk4qPH09OBsfIK5LP5MAmp1ZbkgCIIgCIIgCA1DjZ4kfPHVYhwdqz62kMlkjJ9wP23btq7xyQYPGcDXK7+leXQzlEoFy5auZOCgftec1UQQBEEQBEEQhFvnugYuX0lxcfFlCcTVGAxGFn/8GRs3bEGSTHTv0Y1Zjz+CSiWm6hMEQRAEQRCEhqLaJGHQgLv55ttl5jEEL73wKk8+PQs3N1cA8vMLuGfEfWzd/uetaa0gCIIgCIIgCHWu2u5GZWVlXJpDHDp0BL1eX2WbG3kQMf+DRezdsw+NRoudnS3de3Rl+sNTsLKyuuaqzLW5anNd0ev1TJr4EEVFxfz+x8/AtdvdkOOa+9Z7bNm8DaXVv78uCxa8S1TTygXsLDm2i/bu2cfSr1aSmpaGna0do8eMYOy9oyw2tgH9h1X5vqK8gsDAAL5aVrnyt6XGdVFe3nk+XPAxsUeOgiQRE9OcWU88iqenh0XHlp6ewaKFn3D8+AmUSiUDB/VnytSJyOVyi4vrl59/ZcP6TSQkJtGsWVMWfjjPXKfVlvHB+x+yd88+rKysGDS4P9MeetA828zN1tdXXF99uZw9u/8iOTmFPn178cKLT1fZ93zeeebNW0Ds4SOo1WrGjL2HUaNH1Li+vmIrKCjk40WfcuTIUUpLNXh5ezJu3Fj69P13UdKGcM0a+73F1VQX982+fzd0je1+qyauFHNdXeebHgxwI3/gd48Yyoqvv+SP9b/wxVeLSUhIZPU33wNVV2Ve9c1XJCef4/MlS837Xqu+IfjqyxVVVgkGy49r2PAhrN+w1vzv4i8eWH5sf/99gA/e/5DpM6aw7vefWbnqCzp0bA9YbmyXXqv1G9YSGBhAr97/rrRtqXFdNP+DRRgMRr79bjnf//g1NrY2vDdvAWC5sRmNRl564VWCgwP56ZfVfLZkEX/v28+3q38ALC8uNzdX7rt/DKNGXX6Tu+jDxRQVFvHdDyv49LMP2bFjFz//tLbW6usrLh9fH6ZMm0SfPj2vuO8br7+Ds7MTP6/5jrlvz2HlytXs2f1Xjevr2tViKysrIyw8lI8Xz+eP9b8wc+YM3ntvIcePnTBv0xCuWWO/t7ia6uKGm3v/buga4/3WtVwpZqib61wvI4aDggKxtbUxfy9DRnpaBgC/r1vPA+Pvw93dDWdnZyZOGseff2zEZDLVqL6+nTkdz9/79nP/uDFVyi09rupYemxffbmCB8bfS9u2rVEqFdjb2xMSEgRYfmwAJ0+cIvncOQYM7Gcus/S4MjOy6NmrO3Z2dtjY2NCnby8SE5MBy40tNTWNlNQ0Jk0ej7W1NZ5entwzegS/rv29Ru1uaHF1696Fbt274PKf6SZ1Oh2bN29j8tSJODg44OPrzdixo/jj9/W1Ul9fcQEMHNiPTp06ona4fHX7jIxMjhw5yrSHJmNnZ0t4RBhDhgw0t/ta9bfC1WLz9fVh7L2j8PTyRCaT0aZtK5pGRXL8+Emg4VyzxnxvUZ3q4r4WS477drzfulrM13KjMVebJMhkMv77oEBG7TwaXPX1dwzsP5zhQ8eQkJjE3SOHUVJSSk5OLuHhoebtIiLCKC2tLL9WfX0zGIy8N28Bs554FCsrK3O5pccFsHHDZu4aPJKJ46fy3bc/mn+xLD22sjIdZ07Ho9PpmfDAVO4eNobZL/2P7Owci4/toj9+30DHju1xv7DwYWOIa9SYEWzftoOSklK0Wi0bN2yh0x0dLDq2K3bdlCSysrLJzs6x2Lj+KzU1HUOFgbCwqm1NSj6H0Wi86fqGKiEhCTc3V1xdXcxlERFhJCQk1ai+ISkt1RAfn0BIaDBw89e0NjXGe4uauFLcF93o+3dD1pjvt67majFfVBfXudoxCZIk8eorb2KlrNysvLyct956F5W1CoAKg+H6IrzE/ePGcP+4MZxLTmHjxs24urpcc1Vm+YWMpaarNt9q33/3IyGhwbRu3ZLDh4+Yyy09rpEjh/HwjCk4ODhw6uRp/vfaW8jlckaNHmHxsZWUlCBJEhvWb+btd1/HxcWZjxd9xisvv87rr78MWG5sUPkJ3tat23nhxWfMZZZ+zQBiopvzx+/rGTrkHmQyGaGhIXyw4G2Ljs3f3x8/X1+++nIFkx58gPz8An78cQ2A+aMZS4zrv8q0WlQ2KpRKhblMrVZjMprQ68tvur6hLupVptVir7avUqZW26MtK6tRfUNhMBh56413adkymnbt2gA3f01r85o1xnuLmrhS3HBz798NWWO936rO1WKGurvO1T5J6D+gDz4+Xrh7uOHu4Ubffr3w8/M1f+/j40W//r1vKNiLAoMCCAsL5e2571VZlfmiS1dlvlZ9fUpPz2DNL7/x8Ixpl9VZclwAEZHhODs7o1AoaB7djPvuH8PWrTsAy4/N7kL7RtwzDB8fb2xsbJg8dSKnT51Brqh8Q7PU2AC2bduJSmXDHZesim7p18xkMvHUUy/QvHkz/tywhj/W/0Lbdq159pnZFh2bUqngrbf/R1JiMmNGPcCzz7xE3769Kp/oXlhLxhLj+i9bOzv0Oj0Gw7+fIJeWliJXyFGprG+6vqGytbNDU6qpUlZaqsHO1rZG9Q2BwWDkzTfeQafT8cprL5rLG+I1ayz3Ftfr0rjh5t6/G6rGfL91NdXFDHV3nat9kvD8C09XV11rTCYT6WkZVVZl9vPzBaquyiyXy6utr09xcccoLCxi4oTKC2ioqECr0XL38LG8/vrLFhvXlcjk/3Y5s+RrBpWf1Hl5e1XpRnfpYHxLjg0q+yH2H9Cnyid4ln7NiotLyM7KZsTIYdjYVPbDHTFiKN+u/gGj0WjRsfn7N+GdeW+Yv//l51+JiorAw8PdouO6lL+/H0orJQkJiURGhgMQH59AUGAgCoXipusbqtDQYM6fz6egoNDc5z8+/iwhIcE1qq9vBoOR1+fMpbiomLnvzEGlUpnrGuo1awz3FjfiYtxXcj3v3w3V7XS/ddG1Yo6OaV5l+9q6zrd84LJWW8aff2ygpKQUSZJITEhixfJvaN+hLfDvqsx5eecpLCy8bFXma9XXl169urP6u+V88eVivvhyMc88+wR2drZ88eViIqMiLDYugG1bd6DRaJAkiVOnzvDNqu/p3r2Lud6SYwMYOnQwP/24hpzsHPT6cr76cjmRURG4u7tZdGwpKakcP3aCQYP7X1ZnyXE5Ozvh5+fLml9+Q68vR68v5+eff8XDwx1nZyeLji0hIRGttgyDwcjBA/+wcsVqJk+ZAFjeNTMYjOj15RiNRiRTZbeSioqKyoHmfXry5RfLKCkpJTMzi+++/ZHBQwYA3HR9fcVVWWdAry/HZDJhulBnuNAt19fXh5YtY/j8s68oK9MRH5/Aut/Wm9t9rfr6jM1gMDDntTcpLCxi7jtzzMn5RQ3hmjXWe4truVbcN/v+3RA15vutq7lWzHV1nW96xeXrVVamY/ZLr3HmdDwVBgMuzs50696FSQ8+gI2NzTVXZbaUVZsPHz7C7Jf+V2XeXkuNa+ajT5GQmITRaMTD3Z1Bg/szZuw95l8uS44NKj91+fyzr/jjj41IkomYmObMfPwRvLw8LTq2Tz/5gpMnTrFw0XuX1VlyXADJyef4eNFnnD59BkmSCAsLZcYj0wiPCLPo2L78Yjlr1/xGeXk5AQH+TJo8nk4XuopZWlxLv1rJ8mVfVylr2aoFCz+ch1ar5YP3K+d2V1opGTx4wH/m1L+5+vqKa+5b77Fh/aYqdf0H9DWvl3A+7zzz3l1AbOwR7O3tGXvvqMvXSaimvq5dLbZJDz7A4zOfwdraGrni35uKvn178dTTs4D6v2a3y73Ff10r7pt9/7YEjel+q6b+G3NdXedbniQIgiAIgiAIgtCwNdxnK4IgCIIgCIIg1AuRJAiCIAiCIAiCUIVIEgRBEARBEARBqEIkCYIgCIIgCIIgVCGSBEEQBEEQBEEQqhBJgiAIgiAIgiAIVYgkQRCEKzp8+Ag9uvUnJye3vptitvSrldx378Qb2lerLeOeEfdx6uTp2m3UFc9VuRLm2bMJdX4uQRCE2iJe929cY3zdF0mCINSD39etp3fPQWi12irlD06cftXyuW/Ou5VNrFc5Obn06Nafw4eP1NoxV3/zPZGR4UQ1jay1Y16NnZ0do0ePYPHHS+r8XIIgWAbxul898brf8IgkQRDqQdt2rTEajcTGxpnLCguLSE4+h5ub62XlSUnJtGnbqj6a2ijo9eWsXbuOu4YOvmXnHDCwH0dij5KYmHzLzikIQsMlXvdvLfG6f/NEkiAI9cDb2wtfPx8O/RNrLjt8KJbg4EA6d+l0WbkkSbRp2xqAzIwsXn5pDiPvvpf+fYcyacJDbNyw2bz9ut/+YPDAu9Hr9VXO+c2q77hnxH2YTCYA0tLSeWX2HAYPGsGQQSN5+skXSExIqrbd19rnzz830qvnQI4ePc7UyY/Qv+9QHpr2GKdPx1c5zsGDh5g04SH69hnCgxOnExsbR49u/dm4cQsAo+8ZB8ATs56lR7f+jBk9vsr+u3ft5YFxkxnQbyiPz3qG9PSMatu9f/9ByvV62rVvay7LzMyiR7f+bNu6gxeef4X+fYdy75gJVX6WAD269efnn9byv1ffZEC/oYy+Zxzbt++itFTDG3PeZmD/4dw7ZgI7tu+qsp+LizPR0c3YdCEmQRBub+J1X7zuWxqRJAhCPWnbpnWVN4VDh2Jp3aYVbdq0uqw8IMAfDw93AMrKymjTthXvvvcmXy37lLuGDuLtt9/n8KHKfXr07E5FhYHdu/ZWOd/GjVvo27cXcrmc/PwCHnv0KZxdnFm06H0Wf7oA/4AmPD7rGQoLC6/Y3pruI5kkPv/sKx6bOZ0lX3yEg1rN/159E4PBCEBubh4vvfAaTZtF8fkXH/PIYw+x+KPPqpzr8y8+BmDO6y/z0y+r+WzJh/+243w+a9euY/bLz/PR4vmUlmp49+0Pqv1ZH4mNIyw8DKVScVndks++ol+/3ny59BN69OzG22+/T1pqepWe4Os5AAAGgUlEQVRtVq5cTcc7OvDFV5/Q6c6OzH1zHnP+9xZt27fh8y8/5o5OHZj71jyKioqr7Ne0WRSxtfjoXBAEyyZe98XrviURSYIg1JM2bVuRlJRMQUHlC+3hQ0do3bolLVvFkHzuXJXytu1am/cLCQ3m7hFDCQ0Nwc/PlxEjh9Hpjg5s3rwNALXans5dOrHhkk9GzpyOJznpHP0G9AFg7Zp1eHt78eRTMwkJDSYgwJ+Zs2agVqvZtHHrFdtb030kSeLRx6bTomUMgYEBTJo8noyMTDIyMi4c5zecXZx56umZBAUF0rZtayZPnVjlXE7OTgA4ODrg5uaKs7Ozua68ooKXZj9LZGQ4YWGh3HvfaOLijqHXl1/1Z52ZmYWHu9sV6+4eMZSevbrTpIkfk6dMxNramkOHYqts06tXdwYM7EuTJn5MevAB9Ho9fn6+DBzYjyZN/Hhw8nh0Oj3Hj5+osp+HhzsZGVlXbZcgCLcX8bovXvctibK+GyAIt6vWbSr7mh765zAxLaLJyMykZasWqNX2hIaEmMvT0tKZ/vAU8346nY7ly1bx1959nD+fT4XBQEV5Ba1btzRv079/H1588VXOn8/Hzc2VjRu3EBEZTnBwEACnT/2/nXsLirqK4wD+3RuwctkbG07JQuyyy8QAC6tSDyUkRj2UmY52IXuAxgadxKbbNDSDOY7T1BTqoKSkPjQ9dn2oFKIEdaaJZLlZGoGaAyK4G9Qay7LbA8Mf/uySCwLLDt/PE3v+53fO+fPwO3POmfO/iIu/XcKjhetFY3IPu3Htz8BHuMHGSCQSGE0pwu/xJO246YTBkIju7itISzNDJpvY3UlPvy/o/1u8TieaPPTxOvh8PjidTiQk3BUwxj3sRkx0dMBnJpNR+Fsul0Gr0cDhcEypM/E+arUaUpkURuO9QllsbCwUCgWcjr9EcRERERh2i4//iWjpYt5n3g8nXCQQhYharYLRlIKmpmaMjHhgTjUhJmYsoWVnZwrlUpkU1kkTQfWhGjSeOYfS0hdhSEpEVFQUDlcdwd//THwZY9VqG9QqNWpPfY+Nmzagru4HFBU9LTz3+rzIsVmxs2y737jGxzBVsDESqUQ0EUAiEeKnFE37+//IFVPS1nj7Xm+A2mNUahUGh4aCbA/wen2iIpncP1X6lUnE7wgAg4NDUKtU046LiJYW5n2/KkFh3g8NLhKIQignx4rTPzZi1OMRdpgAwJqThQOVhzDq8cBiMYuSsd3einUF+Xh4bR6AsSR59eo1aLQaoY5MJkNBQR5OfleLpCQDhgaHsLYgT3husZjx7TcnodfHIzIyMqixziYmkORkA2pP1WN0dFSYVNrbL4jqKBQK4d3mgtlswueffTUnbc1E1x9dsKSZF7xfIlq8mPeZ98MF7yQQhZDNlo3e3utoaDiLnEmTRVZWBvpu3EBDw1nYpnwCL9GwAo1nzuFCx6/o7r6M99/bj4GBAb+2Cx9bh87OLtTUnEBu7krRUe2Gp56A1+tF+Vu70WJvRU9PL1pa2lBz9DjaWtsDjnU2MYGsf/JxOBxOfPjBQVzuvoLzvzSj5ugJAIAEY7tDKlUclEolfv6pCQMDNzE0zW5QsHJzV6Gnpxd91/vuqJ2Z8Pl8sNtbcf8DqxesTyJa/Jj3mffDBRcJRCGUmZUBuVwOt9uNjMx0oTw6OhoWcypcLhdstmxRzI4d27A8IQG7yl7HK2VvQq/XYc2aB/3aNhpTYEo14vdLnXiksED0TKvVoOpwJVSqOLxdvgdbi0qwd8+76O3tg04X+KLXbGIC0evjsXdfBdraOlBSXIqDB6pRXPICACAiYmwnSSqVomzXdtTXn8bmTUUoKfY/6p6JpGQDrNmZwqf2FkLzeTtu3foX+fkPLVifRLT4Me8z74cLic/n892+GhHR/LE3t2Lny6/i2PFqpEy6GDaXWuyteGf3Pnzy6TFERUXNSx+TvfFaObKsGXj2uS3z3hcRUbhh3l/8ZBUVFRWhHgQRLS1ffvE1PCMejHq9aG/rwP7KKtyz4m48v/WZeeszYXkClEolli1TQqNR3z7gDrhcLvT392Pzlo3iy3xEREsU83744UkCES24j6o/Rl1tPRwOJ7RaDWwrc7DtpWKoVHGhHhoREc0D5v3ww0UCERERERGJ8OIyERERERGJcJFAREREREQiXCQQEREREZEIFwlERERERCTCRQIREREREYlwkUBERERERCL/AVXpGJL5Y/R/AAAAAElFTkSuQmCC\n", "text/plain": [ "
" ] }, "metadata": {}, "output_type": "display_data" } ], "source": [ "from matplotlib import gridspec\n", "import snlay as snlay\n", "#here we test edge cases where failure happens\n", "#size = np.array([70, 60, 50, 40, 30, 20, 10, 10])\n", "#size = np.array([70, 70, 70, 70, 70, 70, 70, 70])\n", "#size = np.array([30, 30, 30, 30, 30, 30, 30, 30])\n", "\n", "#size = np.array([65, 65, 65, 65, 55, 65, 35, 65])\n", "#size = np.array([65, 35, 45, 35, 45, 35, 45, 35])\n", "\n", "size = np.random.randint(30, 71, 8)\n", "mats = np.array([3, 4, 3, 4, 3, 4, 3, 4])\n", "spec_ac = snlay.calc_spectrum(size, mats, lams)\n", "\n", "print(size)\n", "\n", "\n", "size = (size - 50.0)/20.0\n", "\n", "\n", "\n", "spec = model.predict(np.expand_dims(size, axis = 0))\n", "\n", "spec = np.ravel(spec)\n", "\n", "\n", "fig1 = plt.figure(figsize=(11,3))\n", "gs = gridspec.GridSpec(1, 2, width_ratios=[8, 3]) \n", "\n", "ax = plt.subplot(gs[0])\n", "#ax = fig1.add_subplot(1,2,1)\n", "#ax.set_title('silica coated gold')\n", "ax.set_xlabel('Wavelength (nm)')\n", "ax.set_ylabel('Extinction Efficiency Qe')\n", "ax.set_ylim((0, 6))\n", "ax.set_xlim((300, 1200))\n", "plt.plot(lams, spec_ac,'b', linewidth=1, label='True')\n", "plt.plot(lams, spec, 'r--', linewidth=2, label='predicted')\n", "plt.plot(lams, 10*np.abs(spec_ac - spec)/spec_ac,'k', linewidth=1, label='10x Relative error')\n", "ax.legend(loc='best')\n", "\n", "ax2 = plt.subplot(gs[1])\n", "#fig2 = plt.figure(figsize=(3,3))\n", "#ax2 = fig1.add_subplot(1,2,2)\n", "#ax.set_title('silica coated gold')\n", "ax2.set_xlabel('Wavelength (nm)')\n", "#ax.set_ylabel('Extinction Efficiency Qe')\n", "#ax2.set_ylim((2, 6))\n", "ax2.set_xlim((300, 450))\n", "plt.plot(lams, spec_ac,'b', linewidth=1, label='True')\n", "plt.plot(lams, spec, 'r--', linewidth=2, label='predicted')\n", "#plt.plot(lams, 10*np.abs(spec_ac - spec)/spec_ac,'b', linewidth=1, label='10x Relative error')\n", "#ax.legend(loc='best')\n", "\n", "\n", "\n", "plt.tight_layout()\n", "plt.savefig('grid_figure.pdf')\n", "\n", "\n", "\n", "\n" ] }, { "cell_type": "code", "execution_count": 49, "metadata": { "ExecuteTime": { "end_time": "2018-09-28T17:06:50.547581Z", "start_time": "2018-09-28T17:06:48.536649Z" } }, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "1.984304666519165\n" ] } ], "source": [ "# loop for hundred runs \n", "import time\n", "\n", "mats = np.array([3, 4, 3, 4, 3, 4, 3, 4])\n", "#spec_ac = snlay.calc_spectrum(size, mats, lams)\n", "\n", "reps = 1000\n", "\n", "\n", "start = time.time()\n", "for ind in np.arange(reps):\n", " size = np.random.randint(30, 71, 8)\n", " #spec_ac = snlay.calc_spectrum(size, mats, lams)\n", " size = (size - 50.0)/20.0\n", " spec = model.predict(np.expand_dims(size, axis = 0))\n", " \n", " \n", "end = time.time()\n", "print(1000*(end - start)/reps) \n", " \n", "\n", "\n", "\n", "\n" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "### Inverse scattering " ] }, { "cell_type": "code", "execution_count": null, "metadata": { "ExecuteTime": { "end_time": "2018-09-06T10:51:07.996778Z", "start_time": "2018-09-06T10:49:19.324Z" } }, "outputs": [], "source": [ "\n", "\n", "model_d = Sequential()\n", "model_d.add(Dense(8, input_dim=1, kernel_initializer='normal', activation='linear', \n", " name='dummy', use_bias=False))\n", "\n", "for layer in model.layers:\n", " model_d.add(layer)\n", "\n", "for layer in model_d.layers[1:]:\n", " layer.trainable = False\n", "\n", "for ind in range(1,len(model_d.layers)):\n", " model_d.layers[ind].set_weights(model.layers[ind-1].get_weights())\n", "\n", "model_d.compile(loss=naive_percent_loss, optimizer='adam') \n", " \n", " \n", "#model_d.summary()\n", "\n", "\n", "# # let us create a target spectrum first\n", "import snlay as snlay\n", "#size = np.array([60, 65, 65, 65, 35, 35, 35, 35])\n", "size = np.random.randint(30,70,8)\n", "mats = np.array([3, 4, 3, 4, 3, 4, 3, 4])\n", "target = snlay.calc_spectrum(size, mats, lams)\n", "\n", "print(size)\n", "\n", "# #do the training here\n", "xd_t = np.ones((1,1))\n", "yd_t = target.reshape(1,250)\n", "\n", "\n", "\n", "\n", "\n", "history = model_d.fit(xd_t, yd_t,\n", " batch_size=1,\n", " epochs=5000, \n", " verbose=0)\n", "\n", "# #here is the final result\n", "size_out = model_d.get_layer('dummy')\n", "wts = size_out.get_weights()\n", "wts = np.array(wts).ravel()\n", "size_res= 0.5*wts*(size_max - size_min) + size_av\n", "size_res_rounded = np.round(size_res)\n", "\n", "spec_zer = model_d.predict(xd_t).ravel()\n", "achieved = snlay.calc_spectrum(size_res_rounded, mats, lams)\n", "\n", "\n", "fig1 = plt.figure(figsize=(22,5))\n", "ax = fig1.add_subplot(1,1,1)\n", "#plt.plot(lams, spec_zer, label='new model')\n", "plt.plot(lams, target, linewidth=2, label='target')\n", "plt.plot(lams, achieved, '--', linewidth=3, label='achieved')\n", "plt.legend(bbox_to_anchor=(1.05, 1), loc=2, borderaxespad=0.)\n", "\n", "print(size_res_rounded)\n" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "### Model shipment" ] }, { "cell_type": "code", "execution_count": null, "metadata": { "ExecuteTime": { "end_time": "2018-09-06T10:51:07.997455Z", "start_time": "2018-09-06T10:49:19.326Z" } }, "outputs": [], "source": [ "\n", " \n", "from keras.models import load_model\n", "\n", "#Creates a HDF5 file 'my_model.h5'\n", "model.save('my_model.h5')\n", "\n", "# Deletes the existing model\n", "#del model \n", "\n", "# Returns a compiled model identical to the previous one\n", "#model = load_model('my_model.h5')\n", "\n" ] }, { "cell_type": "code", "execution_count": null, "metadata": { "ExecuteTime": { "end_time": "2018-09-08T09:40:51.323101Z", "start_time": "2018-09-08T09:40:50.155098Z" } }, "outputs": [], "source": [ "\n", "from keras.models import Model\n", "from keras.layers import Input, Add, Lambda, Dense\n", "import numpy as np\n", "\n", "\n", "def dropper(x):\n", " ms = 4\n", " #print(x.shape)\n", " \n", " \n", " \n", " return x**2\n", "# msk = np.array([1,1,1,1,0,0,0,0])\n", "\n", "\n", "a = Input(shape=(1,))\n", "b = Dense(8, input_dim=1, kernel_initializer='normal', activation='linear', \n", " name='dummy', use_bias=False)(a)\n", "b = Lambda(dropper)(b)\n", "b = Dense(256)(b)\n", "# #b = Lambda()\n", "# a = Lambda(dropper)(a)\n", "# \n", "\n", "model = Model(inputs=a, outputs=b)\n", "\n", "# model = Sequential()\n", "# model.add(Dense(256, input_dim=8))\n", "# #model.add(Lambda(lambda x: x**2))\n", "# model.add(Lambda(dropper))\n", "# model.add(Dense(256))\n", "\n", "\n", "\n", "model.compile(loss=naive_percent_loss, optimizer='adam', metrics=[calc_mre_K])\n", "#model.summary()" ] }, { "cell_type": "code", "execution_count": 1, "metadata": { "ExecuteTime": { "end_time": "2018-09-28T10:32:16.469180Z", "start_time": "2018-09-28T10:32:16.452138Z" } }, "outputs": [ { "data": { "text/plain": [ "4" ] }, "execution_count": 1, "metadata": {}, "output_type": "execute_result" } ], "source": [ "2 +2 " ] }, { "cell_type": "code", "execution_count": 2, "metadata": { "ExecuteTime": { "end_time": "2018-09-28T10:32:36.778144Z", "start_time": "2018-09-28T10:32:36.682162Z" } }, "outputs": [], "source": [ "import matplotlib.pyplot as plt" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [] } ], "metadata": { "kernelspec": { "display_name": "Python 3", "language": "python", "name": "python3" }, "language_info": { "codemirror_mode": { "name": "ipython", "version": 3 }, "file_extension": ".py", "mimetype": "text/x-python", "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", "version": "3.5.6" } }, "nbformat": 4, "nbformat_minor": 2 }