Add other optimizers

This commit is contained in:
karllzy 2022-06-27 19:03:10 +08:00
parent 879b8f7d71
commit c2eadb9465
9 changed files with 43694 additions and 4098 deletions

0
01_preprocess.ipynb Normal file → Executable file
View File

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

7256
02_model_training_nadam.ipynb Executable file

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

13253
02_model_training_sgd.ipynb Normal file

File diff suppressed because one or more lines are too long

229
03_model_evaluating.ipynb Normal file → Executable file
View File

@ -14,7 +14,7 @@
}, },
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": 16, "execution_count": 2,
"metadata": { "metadata": {
"collapsed": false, "collapsed": false,
"pycharm": { "pycharm": {
@ -57,7 +57,7 @@
}, },
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": 17, "execution_count": 4,
"metadata": { "metadata": {
"collapsed": false, "collapsed": false,
"pycharm": { "pycharm": {
@ -92,7 +92,7 @@
}, },
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": 18, "execution_count": 11,
"metadata": { "metadata": {
"collapsed": false, "collapsed": false,
"pycharm": { "pycharm": {
@ -104,18 +104,22 @@
"name": "stdout", "name": "stdout",
"output_type": "stream", "output_type": "stream",
"text": [ "text": [
"plain 5 mse : 0.05133910188824081\n", "plain 5 mse: 0.007162414257423199\n",
"plain 5 Dry matter content error 0.7758644362065223\n", "plain 5 rmse : 0.08463104783365971\n",
"plain 5 r^2 : 0.902928516828363\n", "plain 5 Dry matter content error 1.278990434152287\n",
"plain 11 mse : 0.05200769624271875\n", "plain 5 r^2 : 0.7362122841198271\n",
"plain 11 Dry matter content error 0.7859685978067217\n", "plain 11 mse: 0.027140651722534437\n",
"plain 11 r^2 : 0.9003837097594369\n", "plain 11 rmse : 0.16474420087679698\n",
"shortcut 5 mse : 0.051382735052895194\n", "plain 11 Dry matter content error 2.4897039844954327\n",
"shortcut 5 Dry matter content error 0.7765238443272209\n", "plain 11 r^2 : 0.0004249589491984729\n",
"shortcut 5 r^2 : 0.9027634443691182\n", "shortcut 5 mse: 0.007429169596940547\n",
"shortcut11 mse : 0.05078784364469306\n", "shortcut 5 rmse : 0.08619263075774254\n",
"shortcut11 Dry matter content error 0.7675335217455442\n", "shortcut 5 Dry matter content error 1.3025899248021375\n",
"shortcut11 r^2 : 0.9050019525259844\n" "shortcut 5 r^2 : 0.7263878339859644\n",
"shortcut11 mse: 0.007825262774295792\n",
"shortcut11 rmse : 0.08846051534043757\n",
"shortcut11 Dry matter content error 1.3368634303450377\n",
"shortcut11 r^2 : 0.7117999435379954\n"
] ]
} }
], ],
@ -128,23 +132,208 @@
"models = {'plain 5': plain_5, 'plain 11': plain_11, 'shortcut 5': shortcut5, 'shortcut11': shortcut11}\n", "models = {'plain 5': plain_5, 'plain 11': plain_11, 'shortcut 5': shortcut5, 'shortcut11': shortcut11}\n",
"results = {model_name: model.predict(x_test).reshape((-1, )) for model_name, model in models.items()}\n", "results = {model_name: model.predict(x_test).reshape((-1, )) for model_name, model in models.items()}\n",
"for model_name, model_result in results.items():\n", "for model_name, model_result in results.items():\n",
" rmse = np.sqrt(mean_squared_error(y_test, model_result))\n", " mse = mean_squared_error(y_test, model_result)\n",
" print(model_name, \"mse : \", rmse)\n", " rmse = np.sqrt(mse)\n",
" print(model_name, \"mse: \", mse)\n",
" print(model_name, \"rmse : \", rmse)\n",
" print(model_name, \"Dry matter content error\", retransform(rmse))\n", " print(model_name, \"Dry matter content error\", retransform(rmse))\n",
" print(model_name, \"r^2 :\", r2_score(y_test, model_result))" " print(model_name, \"r^2 :\", r2_score(y_test, model_result))"
] ]
}, },
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": null, "execution_count": 12,
"metadata": { "metadata": {
"collapsed": false, "collapsed": false,
"pycharm": { "pycharm": {
"name": "#%%\n" "name": "#%%\n"
} }
}, },
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"plain 5 nadam mse: 0.0025042022303828344\n",
"plain 5 nadam rmse : 0.0500420046599138\n",
"plain 5 nadam Dry matter content error 0.7562619972711523\n",
"plain 5 nadam r^2 : 0.9077716308058986\n",
"plain 11 nadam mse: 0.0027148425657891745\n",
"plain 11 nadam rmse : 0.05210415113778531\n",
"plain 11 nadam Dry matter content error 0.7874262766524306\n",
"plain 11 nadam r^2 : 0.900013864925283\n",
"shortcut 5 nadam mse: 0.0026949613632585197\n",
"shortcut 5 nadam rmse : 0.051913017281396\n",
"shortcut 5 nadam Dry matter content error 0.7845377578378575\n",
"shortcut 5 nadam r^2 : 0.9007460785080253\n",
"shortcut11 nadam mse: 0.002492666414101387\n",
"shortcut11 nadam rmse : 0.049926610280504595\n",
"shortcut11 nadam Dry matter content error 0.754518094634978\n",
"shortcut11 nadam r^2 : 0.9081964884751603\n"
]
}
],
"source": [
"plain_5, plain_11 = load_model('./checkpoints/plain5_nadam.hdf5'), load_model('./checkpoints/plain11_nadam.hdf5')\n",
"shortcut5, shortcut11 = load_model('./checkpoints/shortcut5_nadam.hdf5'), load_model('./checkpoints/shortcut11_nadam.hdf5')\n",
"models = {'plain 5 nadam': plain_5, 'plain 11 nadam': plain_11, 'shortcut 5 nadam': shortcut5, 'shortcut11 nadam': shortcut11}\n",
"results = {model_name: model.predict(x_test).reshape((-1, )) for model_name, model in models.items()}\n",
"for model_name, model_result in results.items():\n",
" mse = mean_squared_error(y_test, model_result)\n",
" rmse = np.sqrt(mse)\n",
" print(model_name, \"mse: \", mse)\n",
" print(model_name, \"rmse : \", rmse)\n",
" print(model_name, \"Dry matter content error\", retransform(rmse))\n",
" print(model_name, \"r^2 :\", r2_score(y_test, model_result))"
]
},
{
"cell_type": "code",
"execution_count": 13,
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"plain 5 rmsprop mse: 0.0025762880890505646\n",
"plain 5 rmsprop rmse : 0.05075714815718634\n",
"plain 5 rmsprop Dry matter content error 0.7670696348399975\n",
"plain 5 rmsprop r^2 : 0.905116748901307\n",
"plain 11 rmsprop mse: 0.0026627965381971945\n",
"plain 11 rmsprop rmse : 0.051602291985891426\n",
"plain 11 rmsprop Dry matter content error 0.7798419081376315\n",
"plain 11 rmsprop r^2 : 0.9019306910464329\n",
"shortcut 5 rmsprop mse: 0.002591777512557128\n",
"shortcut 5 rmsprop rmse : 0.0509095031654909\n",
"shortcut 5 rmsprop Dry matter content error 0.7693721066066205\n",
"shortcut 5 rmsprop r^2 : 0.9045462820865926\n",
"shortcut11 rmsprop mse: 0.0025523285716201384\n",
"shortcut11 rmsprop rmse : 0.050520575725343214\n",
"shortcut11 rmsprop Dry matter content error 0.7634944235545812\n",
"shortcut11 rmsprop r^2 : 0.9059991645434926\n"
]
}
],
"source": [
"plain_5, plain_11 = load_model('./checkpoints/plain5_rmsprop.hdf5'), load_model('./checkpoints/plain11_rmsprop.hdf5')\n",
"shortcut5, shortcut11 = load_model('./checkpoints/shortcut5_rmsprop.hdf5'), load_model('./checkpoints/shortcut11_rmsprop.hdf5')\n",
"models = {'plain 5 rmsprop': plain_5, 'plain 11 rmsprop': plain_11, 'shortcut 5 rmsprop': shortcut5, 'shortcut11 rmsprop': shortcut11}\n",
"results = {model_name: model.predict(x_test).reshape((-1, )) for model_name, model in models.items()}\n",
"for model_name, model_result in results.items():\n",
" mse = mean_squared_error(y_test, model_result)\n",
" rmse = np.sqrt(mse)\n",
" print(model_name, \"mse: \", mse)\n",
" print(model_name, \"rmse : \", rmse)\n",
" print(model_name, \"Dry matter content error\", retransform(rmse))\n",
" print(model_name, \"r^2 :\", r2_score(y_test, model_result))"
],
"metadata": {
"collapsed": false,
"pycharm": {
"name": "#%%\n"
}
}
},
{
"cell_type": "markdown",
"source": [],
"metadata": {
"collapsed": false
}
},
{
"cell_type": "code",
"execution_count": 9,
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"plain 5 sgd mse : 0.11134143767715826\n",
"plain 5 sgd Dry matter content error 1.682652375919312\n",
"plain 5 sgd r^2 : 0.543428518577538\n",
"plain 11 sgd mse : 0.1281311162291065\n",
"plain 11 sgd Dry matter content error 1.936387131781486\n",
"plain 11 sgd r^2 : 0.3953495916351124\n",
"shortcut 5 sgd mse : 0.07824195777158978\n",
"shortcut 5 sgd Dry matter content error 1.1824350294692925\n",
"shortcut 5 sgd r^2 : 0.7745373801958391\n",
"shortcut11 sgd mse : 0.09167697720606416\n",
"shortcut11 sgd Dry matter content error 1.3854723518136416\n",
"shortcut11 sgd r^2 : 0.690460767243821\n"
]
}
],
"source": [
"plain_5, plain_11 = load_model('./checkpoints/plain5_sgd.hdf5'), load_model('./checkpoints/plain11_sgd.hdf5')\n",
"shortcut5, shortcut11 = load_model('./checkpoints/shortcut5_sgd.hdf5'), load_model('./checkpoints/shortcut11_sgd.hdf5')\n",
"models = {'plain 5 sgd': plain_5, 'plain 11 sgd': plain_11, 'shortcut 5 sgd': shortcut5, 'shortcut11 sgd': shortcut11}\n",
"results = {model_name: model.predict(x_test).reshape((-1, )) for model_name, model in models.items()}\n",
"for model_name, model_result in results.items():\n",
" rmse = np.sqrt(mean_squared_error(y_test, model_result))\n",
" print(model_name, \"mse : \", rmse)\n",
" print(model_name, \"Dry matter content error\", retransform(rmse))\n",
" print(model_name, \"r^2 :\", r2_score(y_test, model_result))"
],
"metadata": {
"collapsed": false,
"pycharm": {
"name": "#%%\n"
}
}
},
{
"cell_type": "code",
"execution_count": 10,
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"plain 5 adagrad mse : 0.09175815616893311\n",
"plain 5 adagrad Dry matter content error 1.3866991724618731\n",
"plain 5 adagrad r^2 : 0.6899123373115493\n",
"plain 11 adagrad mse : 0.10080308998502606\n",
"plain 11 adagrad Dry matter content error 1.5233911327346656\n",
"plain 11 adagrad r^2 : 0.6257663620335936\n",
"shortcut 5 adagrad mse : 0.07867384574842859\n",
"shortcut 5 adagrad Dry matter content error 1.18896195552234\n",
"shortcut 5 adagrad r^2 : 0.7720414471534031\n",
"shortcut11 adagrad mse : 0.08402036572248298\n",
"shortcut11 adagrad Dry matter content error 1.2697614738771157\n",
"shortcut11 adagrad r^2 : 0.7400054367232816\n"
]
}
],
"source": [
"plain_5, plain_11 = load_model('./checkpoints/plain5_adagrad.hdf5'), load_model('./checkpoints/plain11_adagrad.hdf5')\n",
"shortcut5, shortcut11 = load_model('./checkpoints/shortcut5_adagrad.hdf5'), load_model('./checkpoints/shortcut11_adagrad.hdf5')\n",
"models = {'plain 5 adagrad': plain_5, 'plain 11 adagrad': plain_11, 'shortcut 5 adagrad': shortcut5, 'shortcut11 adagrad': shortcut11}\n",
"results = {model_name: model.predict(x_test).reshape((-1, )) for model_name, model in models.items()}\n",
"for model_name, model_result in results.items():\n",
" rmse = np.sqrt(mean_squared_error(y_test, model_result))\n",
" print(model_name, \"mse : \", rmse)\n",
" print(model_name, \"Dry matter content error\", retransform(rmse))\n",
" print(model_name, \"r^2 :\", r2_score(y_test, model_result))"
],
"metadata": {
"collapsed": false,
"pycharm": {
"name": "#%%\n"
}
}
},
{
"cell_type": "code",
"execution_count": null,
"outputs": [], "outputs": [],
"source": [] "source": [],
"metadata": {
"collapsed": false,
"pycharm": {
"name": "#%%\n"
}
}
} }
], ],
"metadata": { "metadata": {
@ -171,4 +360,4 @@
}, },
"nbformat": 4, "nbformat": 4,
"nbformat_minor": 0 "nbformat_minor": 0
} }

File diff suppressed because it is too large Load Diff

163
models.py Normal file → Executable file
View File

@ -2,7 +2,8 @@ from tkinter import N
import keras.callbacks import keras.callbacks
import keras.layers as KL import keras.layers as KL
from keras import Model from keras import Model
from keras.optimizers import adam_v2 from keras.optimizers import adam_v2, nadam_v2, adagrad_v2
from tensorflow.keras.optimizers import SGD, RMSprop
class Plain5(object): class Plain5(object):
@ -33,9 +34,22 @@ class Plain5(object):
model = Model(input_layer, x) model = Model(input_layer, x)
return model return model
def fit(self, x, y, x_val, y_val, epoch, batch_size): def fit(self, x, y, x_val, y_val, epoch, batch_size, method='adam'):
self.model.compile(loss='mse', optimizer=adam_v2.Adam(learning_rate=0.01 * (batch_size / 256))) save = f'checkpoints/plain5_{method}.hdf5'
checkpoint = keras.callbacks.ModelCheckpoint(filepath='checkpoints/plain5.hdf5', monitor='val_loss', if method == "adam":
optimizer = adam_v2.Adam(learning_rate=0.01 * (batch_size / 256))
elif method == 'nadam':
optimizer = nadam_v2.Nadam(learning_rate=0.01 * (batch_size / 256))
elif method == 'adagrad':
optimizer = adagrad_v2.Adagrad(learning_rate=0.01 * (batch_size / 256))
elif method == 'sgd':
optimizer = SGD(learning_rate=0.01 * (batch_size / 256))
elif method == 'rmsprop':
optimizer = RMSprop(learning_rate=0.01 * (batch_size / 256))
else:
optimizer = adam_v2.Adam(learning_rate=0.01 * (batch_size / 256))
self.model.compile(loss='mse', optimizer=optimizer)
checkpoint = keras.callbacks.ModelCheckpoint(filepath=save, monitor='val_loss',
mode="min", save_best_only=True) mode="min", save_best_only=True)
early_stop = keras.callbacks.EarlyStopping(monitor='val_loss', min_delta=0, early_stop = keras.callbacks.EarlyStopping(monitor='val_loss', min_delta=0,
patience=1000, verbose=0, mode='auto') patience=1000, verbose=0, mode='auto')
@ -77,12 +91,25 @@ class Residual5(object):
model = Model(input_layer, x) model = Model(input_layer, x)
return model return model
def fit(self, x, y, x_val, y_val, epoch, batch_size): def fit(self, x, y, x_val, y_val, epoch, batch_size, method='adam'):
self.model.compile(loss='mse', optimizer=adam_v2.Adam(learning_rate=0.01 * (batch_size / 256))) save = f'checkpoints/res5_{method}.hdf5'
checkpoint = keras.callbacks.ModelCheckpoint(filepath='checkpoints/res5.hdf5', monitor='val_loss', if method == "adam":
optimizer = adam_v2.Adam(learning_rate=0.01 * (batch_size / 256))
elif method == 'nadam':
optimizer = nadam_v2.Nadam(learning_rate=0.01 * (batch_size / 256))
elif method == 'adagrad':
optimizer = adagrad_v2.Adagrad(learning_rate=0.01 * (batch_size / 256))
elif method == 'sgd':
optimizer = SGD(learning_rate=0.01 * (batch_size / 256))
elif method == 'rmsprop':
optimizer = RMSprop(learning_rate=0.01 * (batch_size / 256))
else:
optimizer = adam_v2.Adam(learning_rate=0.01 * (batch_size / 256))
self.model.compile(loss='mse', optimizer=optimizer)
checkpoint = keras.callbacks.ModelCheckpoint(filepath=save, monitor='val_loss',
mode="min", save_best_only=True) mode="min", save_best_only=True)
early_stop = keras.callbacks.EarlyStopping(monitor='val_loss', min_delta=0, early_stop = keras.callbacks.EarlyStopping(monitor='val_loss', min_delta=0, patience=1000,
patience=1000, verbose=0, mode='auto') verbose=0, mode='auto')
lr_decay = keras.callbacks.ReduceLROnPlateau(monitor='val_loss', factor=0.5, patience=25, min_delta=1e-6) lr_decay = keras.callbacks.ReduceLROnPlateau(monitor='val_loss', factor=0.5, patience=25, min_delta=1e-6)
callbacks = [checkpoint, early_stop, lr_decay] callbacks = [checkpoint, early_stop, lr_decay]
history = self.model.fit(x, y, validation_data=(x_val, y_val), epochs=epoch, verbose=1, history = self.model.fit(x, y, validation_data=(x_val, y_val), epochs=epoch, verbose=1,
@ -120,10 +147,22 @@ class ShortCut5(object):
model = Model(input_layer, x) model = Model(input_layer, x)
return model return model
def fit(self, x, y, x_val, y_val, epoch, batch_size): def fit(self, x, y, x_val, y_val, epoch, batch_size, method='adam'):
self.model.compile(loss='mse', optimizer=adam_v2.Adam(learning_rate=0.01 * (batch_size / 256))) save = f'checkpoints/shortcut5_{method}.hdf5'
if method == "adam":
checkpoint = keras.callbacks.ModelCheckpoint(filepath='checkpoints/shortcut5.hdf5', monitor='val_loss', optimizer = adam_v2.Adam(learning_rate=0.01 * (batch_size / 256))
elif method == 'nadam':
optimizer = nadam_v2.Nadam(learning_rate=0.01 * (batch_size / 256))
elif method == 'adagrad':
optimizer = adagrad_v2.Adagrad(learning_rate=0.01 * (batch_size / 256))
elif method == 'sgd':
optimizer = SGD(learning_rate=0.01 * (batch_size / 256))
elif method == 'rmsprop':
optimizer = RMSprop(learning_rate=0.01 * (batch_size / 256))
else:
optimizer = adam_v2.Adam(learning_rate=0.01 * (batch_size / 256))
self.model.compile(loss='mse', optimizer=optimizer)
checkpoint = keras.callbacks.ModelCheckpoint(filepath=save, monitor='val_loss',
mode="min", save_best_only=True) mode="min", save_best_only=True)
early_stop = keras.callbacks.EarlyStopping(monitor='val_loss', min_delta=0, early_stop = keras.callbacks.EarlyStopping(monitor='val_loss', min_delta=0,
patience=1000, verbose=0, mode='auto') patience=1000, verbose=0, mode='auto')
@ -183,8 +222,22 @@ class ShortCut11(object):
model = Model(input_layer, x) model = Model(input_layer, x)
return model return model
def fit(self, x, y, x_val, y_val, epoch, batch_size, save='checkpoints/shortcut11.hdf5', is_show=True): def fit(self, x, y, x_val, y_val, epoch, batch_size, is_show=True,
self.model.compile(loss='mse', optimizer=adam_v2.Adam(learning_rate=0.01 * (batch_size / 256))) method='adam'):
save = f'checkpoints/shortcut11_{method}.hdf5'
if method == "adam":
optimizer = adam_v2.Adam(learning_rate=0.01 * (batch_size / 256))
elif method == 'nadam':
optimizer = nadam_v2.Nadam(learning_rate=0.01 * (batch_size / 256))
elif method == 'adagrad':
optimizer = adagrad_v2.Adagrad(learning_rate=0.01 * (batch_size / 256))
elif method == 'sgd':
optimizer = SGD(learning_rate=0.01 * (batch_size / 256))
elif method == 'rmsprop':
optimizer = RMSprop(learning_rate=0.01 * (batch_size / 256))
else:
optimizer = adam_v2.Adam(learning_rate=0.01 * (batch_size / 256))
self.model.compile(loss='mse', optimizer=optimizer)
callbacks = [] callbacks = []
checkpoint = keras.callbacks.ModelCheckpoint(filepath=save, monitor='val_loss', checkpoint = keras.callbacks.ModelCheckpoint(filepath=save, monitor='val_loss',
mode="min", save_best_only=True) mode="min", save_best_only=True)
@ -248,9 +301,23 @@ class Plain11(object):
model = Model(input_layer, x) model = Model(input_layer, x)
return model return model
def fit(self, x, y, x_val, y_val, epoch, batch_size): def fit(self, x, y, x_val, y_val, epoch, batch_size, method='adam'):
self.model.compile(loss='mse', optimizer=adam_v2.Adam(learning_rate=0.01 * (batch_size / 256))) save = f'checkpoints/plain11_{method}.hdf5'
checkpoint = keras.callbacks.ModelCheckpoint(filepath='checkpoints/plain11.hdf5', monitor='val_loss', if method == "adam":
optimizer = adam_v2.Adam(learning_rate=0.01 * (batch_size / 256))
elif method == 'nadam':
optimizer = nadam_v2.Nadam(learning_rate=0.01 * (batch_size / 256))
elif method == 'adagrad':
optimizer = adagrad_v2.Adagrad(learning_rate=0.01 * (batch_size / 256))
elif method == 'sgd':
optimizer = SGD(learning_rate=0.01 * (batch_size / 256))
elif method == 'rmsprop':
optimizer = RMSprop(learning_rate=0.01 * (batch_size / 256))
else:
optimizer = adam_v2.Adam(learning_rate=0.01 * (batch_size / 256))
self.model.compile(loss='mse', optimizer=optimizer)
checkpoint = keras.callbacks.ModelCheckpoint(filepath=save, monitor='val_loss',
mode="min", save_best_only=True) mode="min", save_best_only=True)
early_stop = keras.callbacks.EarlyStopping(monitor='val_loss', min_delta=1e-6, early_stop = keras.callbacks.EarlyStopping(monitor='val_loss', min_delta=1e-6,
patience=200, verbose=0, mode='auto') patience=200, verbose=0, mode='auto')
@ -294,9 +361,21 @@ class SimpleCNN(object):
model = Model(input_layer, x) model = Model(input_layer, x)
return model return model
def fit(self, x, y, x_val, y_val, epoch, batch_size): def fit(self, x, y, x_val, y_val, epoch, batch_size, method='adam'):
self.model.compile(loss='mse', optimizer=adam_v2.Adam(learning_rate=0.01 * (batch_size / 256))) if method == "adam":
checkpoint = keras.callbacks.ModelCheckpoint(filepath='checkpoints/plain5.hdf5', monitor='val_loss', optimizer = adam_v2.Adam(learning_rate=0.01 * (batch_size / 256))
elif method == 'nadam':
optimizer = nadam_v2.Nadam(learning_rate=0.01 * (batch_size / 256))
elif method == 'adagrad':
optimizer = adagrad_v2.Adagrad(learning_rate=0.01 * (batch_size / 256))
elif method == 'sgd':
optimizer = SGD(learning_rate=0.01 * (batch_size / 256))
elif method == 'rmsprop':
optimizer = RMSprop(learning_rate=0.01 * (batch_size / 256))
else:
optimizer = adam_v2.Adam(learning_rate=0.01 * (batch_size / 256))
self.model.compile(loss='mse', optimizer=optimizer)
checkpoint = keras.callbacks.ModelCheckpoint(filepath='checkpoints/simplecnn.hdf5', monitor='val_loss',
mode="min", save_best_only=True) mode="min", save_best_only=True)
early_stop = keras.callbacks.EarlyStopping(monitor='val_loss', min_delta=0, early_stop = keras.callbacks.EarlyStopping(monitor='val_loss', min_delta=0,
patience=1000, verbose=0, mode='auto') patience=1000, verbose=0, mode='auto')
@ -307,6 +386,48 @@ class SimpleCNN(object):
return history return history
class DNN(object):
def __init__(self, model_path=None, input_shape=None):
self.model = None
self.input_shape = input_shape
if model_path is not None:
pass
else:
self.model = self.build_model()
def build_model(self):
input_layer = KL.Input(self.input_shape, name='input')
x = KL.Dense(200, activation='relu', name="dense0")(input_layer)
x = KL.Dense(100, activation='relu', name='dense1')(x)
x = KL.Dense(1, activation='sigmoid', name='output')(x)
model = Model(input_layer, x)
return model
def fit(self, x, y, x_val, y_val, epoch, batch_size, method='adam'):
if method == "adam":
optimizer = adam_v2.Adam(learning_rate=0.01 * (batch_size / 256))
elif method == 'nadam':
optimizer = nadam_v2.Nadam(learning_rate=0.01 * (batch_size / 256))
elif method == 'adagrad':
optimizer = adagrad_v2.Adagrad(learning_rate=0.01 * (batch_size / 256))
elif method == 'sgd':
optimizer = SGD(learning_rate=0.01 * (batch_size / 256))
elif method == 'rmsprop':
optimizer = RMSprop(learning_rate=0.01 * (batch_size / 256))
else:
optimizer = adam_v2.Adam(learning_rate=0.01 * (batch_size / 256))
self.model.compile(loss='mse', optimizer=optimizer)
checkpoint = keras.callbacks.ModelCheckpoint(filepath='checkpoints/dnn.hdf5', monitor='val_loss',
mode="min", save_best_only=True)
early_stop = keras.callbacks.EarlyStopping(monitor='val_loss', min_delta=1e-6,
patience=200, verbose=0, mode='auto')
lr_decay = keras.callbacks.ReduceLROnPlateau(monitor='val_loss', factor=0.5,
patience=25, min_delta=1e-6)
callbacks = [checkpoint, early_stop, lr_decay]
history = self.model.fit(x, y, validation_data=(x_val, y_val), epochs=epoch, verbose=1,
callbacks=callbacks, batch_size=batch_size)
return history
if __name__ == '__main__': if __name__ == '__main__':
# plain5 = Plain5(model_path=None, input_shape=(1, 102)) # plain5 = Plain5(model_path=None, input_shape=(1, 102))