From 17e899207330081a35023f8d67e8825fbf19b055 Mon Sep 17 00:00:00 2001
From: karllzy
Date: Wed, 11 May 2022 11:00:55 +0800
Subject: [PATCH] First Commit
---
.gitignore | 219 +
README.md | 21 +
model_evaluating.ipynb | 155 +
model_training.ipynb | 7124 +++++++++++++++++++++++++++
models.py | 264 +
preprocess.ipynb | 127 +
preprocess/draw_pics_origin.m | 45 +
preprocess/draw_pics_preprocessed.m | 48 +
preprocess/pics/preprocessed.png | Bin 0 -> 91504 bytes
preprocess/pics/raw.png | Bin 0 -> 179507 bytes
preprocess/preprocess.m | 8 +
preprocess/preprocess_mango.m | 15 +
preprocess/train_test_split.m | 15 +
utils.py | 153 +
14 files changed, 8194 insertions(+)
create mode 100644 .gitignore
create mode 100644 README.md
create mode 100644 model_evaluating.ipynb
create mode 100644 model_training.ipynb
create mode 100644 models.py
create mode 100644 preprocess.ipynb
create mode 100755 preprocess/draw_pics_origin.m
create mode 100755 preprocess/draw_pics_preprocessed.m
create mode 100644 preprocess/pics/preprocessed.png
create mode 100644 preprocess/pics/raw.png
create mode 100755 preprocess/preprocess.m
create mode 100755 preprocess/preprocess_mango.m
create mode 100755 preprocess/train_test_split.m
create mode 100755 utils.py
diff --git a/.gitignore b/.gitignore
new file mode 100644
index 0000000..187847f
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1,219 @@
+preprocess/dataset/*
+checkpoints/*
+.idea
+### JetBrains template
+# Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio, WebStorm and Rider
+# Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839
+
+# User-specific stuff
+.idea/**/workspace.xml
+.idea/**/tasks.xml
+.idea/**/usage.statistics.xml
+.idea/**/dictionaries
+.idea/**/shelf
+
+# Generated files
+.idea/**/contentModel.xml
+
+# Sensitive or high-churn files
+.idea/**/dataSources/
+.idea/**/dataSources.ids
+.idea/**/dataSources.local.xml
+.idea/**/sqlDataSources.xml
+.idea/**/dynamic.xml
+.idea/**/uiDesigner.xml
+.idea/**/dbnavigator.xml
+
+# Gradle
+.idea/**/gradle.xml
+.idea/**/libraries
+
+# Gradle and Maven with auto-import
+# When using Gradle or Maven with auto-import, you should exclude module files,
+# since they will be recreated, and may cause churn. Uncomment if using
+# auto-import.
+# .idea/artifacts
+# .idea/compiler.xml
+# .idea/jarRepositories.xml
+# .idea/modules.xml
+# .idea/*.iml
+# .idea/modules
+# *.iml
+# *.ipr
+
+# CMake
+cmake-build-*/
+
+# Mongo Explorer plugin
+.idea/**/mongoSettings.xml
+
+# File-based project format
+*.iws
+
+# IntelliJ
+out/
+
+# mpeltonen/sbt-idea plugin
+.idea_modules/
+
+# JIRA plugin
+atlassian-ide-plugin.xml
+
+# Cursive Clojure plugin
+.idea/replstate.xml
+
+# Crashlytics plugin (for Android Studio and IntelliJ)
+com_crashlytics_export_strings.xml
+crashlytics.properties
+crashlytics-build.properties
+fabric.properties
+
+# Editor-based Rest Client
+.idea/httpRequests
+
+# Android studio 3.1+ serialized cache file
+.idea/caches/build_file_checksums.ser
+
+### Python template
+# Byte-compiled / optimized / DLL files
+__pycache__/
+*.py[cod]
+*$py.class
+
+# C extensions
+*.so
+
+# Distribution / packaging
+.Python
+build/
+develop-eggs/
+dist/
+downloads/
+eggs/
+.eggs/
+lib/
+lib64/
+parts/
+sdist/
+var/
+wheels/
+share/python-wheels/
+*.egg-info/
+.installed.cfg
+*.egg
+MANIFEST
+
+# PyInstaller
+# Usually these files are written by a python script from a template
+# before PyInstaller builds the exe, so as to inject date/other infos into it.
+*.manifest
+*.spec
+
+# Installer logs
+pip-log.txt
+pip-delete-this-directory.txt
+
+# Unit test / coverage reports
+htmlcov/
+.tox/
+.nox/
+.coverage
+.coverage.*
+.cache
+nosetests.xml
+coverage.xml
+*.cover
+*.py,cover
+.hypothesis/
+.pytest_cache/
+cover/
+
+# Translations
+*.mo
+*.pot
+
+# Django stuff:
+*.log
+local_settings.py
+db.sqlite3
+db.sqlite3-journal
+
+# Flask stuff:
+instance/
+.webassets-cache
+
+# Scrapy stuff:
+.scrapy
+
+# Sphinx documentation
+docs/_build/
+
+# PyBuilder
+.pybuilder/
+target/
+
+# Jupyter Notebook
+.ipynb_checkpoints
+
+# IPython
+profile_default/
+ipython_config.py
+
+# pyenv
+# For a library or package, you might want to ignore these files since the code is
+# intended to run in multiple environments; otherwise, check them in:
+# .python-version
+
+# pipenv
+# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
+# However, in case of collaboration, if having platform-specific dependencies or dependencies
+# having no cross-platform support, pipenv may install dependencies that don't work, or not
+# install all needed dependencies.
+#Pipfile.lock
+
+# PEP 582; used by e.g. github.com/David-OConnor/pyflow
+__pypackages__/
+
+# Celery stuff
+celerybeat-schedule
+celerybeat.pid
+
+# SageMath parsed files
+*.sage.py
+
+# Environments
+.env
+.venv
+env/
+venv/
+ENV/
+env.bak/
+venv.bak/
+
+# Spyder project settings
+.spyderproject
+.spyproject
+
+# Rope project settings
+.ropeproject
+
+# mkdocs documentation
+/site
+
+# mypy
+.mypy_cache/
+.dmypy.json
+dmypy.json
+
+# Pyre type checker
+.pyre/
+
+# pytype static type analyzer
+.pytype/
+
+# Cython debug symbols
+cython_debug/
+
+!/checkpoints/
+!/preprocess/dataset/
+!/preprocess/dataset/
diff --git a/README.md b/README.md
new file mode 100644
index 0000000..0157bab
--- /dev/null
+++ b/README.md
@@ -0,0 +1,21 @@
+# SCNet: A deep learning network framework for analyzing near-infrared spectroscopy using short-cut
+## Pre-processing
+
+Since the method we proposed is a regression model, the classification dataset weat kernel is not used in this work.
+
+The other three dataset (corn, marzipan, soil) were preprocessed manually with Matlab and saved in the sub dictionary of `./preprocess` dir. The original dataset of these three dataset were stored in the `./preprocess/dataset/`.
+
+The mango dataset is not in Matlab .m file format, so we save them with the `process.py`.
+Meanwhile, we drop the useless part and only save the data between 684 and 900 nm.
+
+> The data set used in this study comprises a total of 11,691 NIR spectra (684–990 nm in 3 nm sampling with a total 103 variables) and DM measurements performed on 4675 mango fruit across 4 harvest seasons 2015, 2016, 2017 and 2018 [24].
+
+The detailed preprocessing progress can be found in [./preprocess.ipynb](./preprocess.ipynb)
+
+## Network Training
+
+In order to show our network can prevent degration problem, we hold the experiment which contains the training loss curve of four models. The detailed information can be found in [model_training.ipynb](./model_training.ipynb).
+
+## Network evaluation
+After training our model on training set, we evaluate the models on testing dataset that spared before. The evaluation is done with [model_evaluation.ipynb](model_evaluating.ipynb).
+
diff --git a/model_evaluating.ipynb b/model_evaluating.ipynb
new file mode 100644
index 0000000..2c5fd23
--- /dev/null
+++ b/model_evaluating.ipynb
@@ -0,0 +1,155 @@
+{
+ "cells": [
+ {
+ "cell_type": "markdown",
+ "metadata": {
+ "collapsed": true,
+ "pycharm": {
+ "name": "#%% md\n"
+ }
+ },
+ "source": [
+ "# Experiment 2: Model Evaluating"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 29,
+ "outputs": [],
+ "source": [
+ "import numpy as np\n",
+ "from keras.models import load_model\n",
+ "from matplotlib import ticker\n",
+ "from scipy.io import loadmat\n",
+ "from sklearn.model_selection import train_test_split\n",
+ "from sklearn.metrics import mean_squared_error\n",
+ "import matplotlib.pyplot as plt\n",
+ "%matplotlib inline"
+ ],
+ "metadata": {
+ "collapsed": false,
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ }
+ },
+ {
+ "cell_type": "markdown",
+ "source": [
+ "In this experiment, we load model weights from the experiment1 and evaluate them on test dataset."
+ ],
+ "metadata": {
+ "collapsed": false,
+ "pycharm": {
+ "name": "#%% md\n"
+ }
+ }
+ },
+ {
+ "cell_type": "markdown",
+ "source": [],
+ "metadata": {
+ "collapsed": false,
+ "pycharm": {
+ "name": "#%% md\n"
+ }
+ }
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 30,
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "shape of data:\n",
+ "x_train: (5728, 1, 102), y_train: (5728, 1),\n",
+ "x_val: (2455, 1, 102), y_val: (2455, 1)\n",
+ "x_test: (3508, 1, 102), y_test: (3508, 1)\n"
+ ]
+ }
+ ],
+ "source": [
+ "data = loadmat('./preprocess/dataset/mango/mango_dm_split.mat')\n",
+ "x_train, y_train, x_test, y_test = data['x_train'], data['y_train'], data['x_test'], data['y_test']\n",
+ "x_train, x_val, y_train, y_val = train_test_split(x_train, y_train, test_size=0.3, random_state=12, shuffle=True)\n",
+ "x_train, x_val, x_test = x_train[:, np.newaxis, :], x_val[:, np.newaxis, :], x_test[:, np.newaxis, :]\n",
+ "print(f\"shape of data:\\n\"\n",
+ " f\"x_train: {x_train.shape}, y_train: {y_train.shape},\\n\"\n",
+ " f\"x_val: {x_val.shape}, y_val: {y_val.shape}\\n\"\n",
+ " f\"x_test: {x_test.shape}, y_test: {y_test.shape}\")"
+ ],
+ "metadata": {
+ "collapsed": false,
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ }
+ },
+ {
+ "cell_type": "code",
+ "source": [
+ "## Build model and load weights\n",
+ "plain_5, plain_11 = load_model('./checkpoints/plain5.hdf5'), load_model('./checkpoints/plain11.hdf5')\n",
+ "shortcut5, shortcut11 = load_model('./checkpoints/shortcut5.hdf5'), load_model('./checkpoints/shortcut11.hdf5')\n",
+ "models = {'plain 5': plain_5, 'plain 11': plain_11, 'shortcut 5': shortcut5, 'shortcut11': shortcut11}\n",
+ "results = {model_name: model.predict(x_test).reshape((-1, )) for model_name, model in models.items()}\n",
+ "for model_name, model_result in results.items():\n",
+ " print(model_name, \" : \", mean_squared_error(y_test, model_result)*100, \"%\")"
+ ],
+ "metadata": {
+ "collapsed": false,
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ },
+ "execution_count": 31,
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "plain 5 : 0.2707851525589865 %\n",
+ "plain 11 : 0.26240810192725905 %\n",
+ "shortcut 5 : 0.28330442301217196 %\n",
+ "shortcut11 : 0.25743312483685266 %\n"
+ ]
+ }
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 31,
+ "outputs": [],
+ "source": [],
+ "metadata": {
+ "collapsed": false,
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ }
+ }
+ ],
+ "metadata": {
+ "kernelspec": {
+ "display_name": "Python 3",
+ "language": "python",
+ "name": "python3"
+ },
+ "language_info": {
+ "codemirror_mode": {
+ "name": "ipython",
+ "version": 2
+ },
+ "file_extension": ".py",
+ "mimetype": "text/x-python",
+ "name": "python",
+ "nbconvert_exporter": "python",
+ "pygments_lexer": "ipython2",
+ "version": "2.7.6"
+ }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 0
+}
\ No newline at end of file
diff --git a/model_training.ipynb b/model_training.ipynb
new file mode 100644
index 0000000..1e0aecd
--- /dev/null
+++ b/model_training.ipynb
@@ -0,0 +1,7124 @@
+{
+ "cells": [
+ {
+ "cell_type": "markdown",
+ "metadata": {
+ "pycharm": {
+ "name": "#%% md\n"
+ }
+ },
+ "source": [
+ "# Model training Experiment"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {
+ "pycharm": {
+ "name": "#%% md\n"
+ }
+ },
+ "source": [
+ "In order to provide the experimental evidence to back up the statement in the section 2.3.2 that our shortcut block can prevent degration problem.\n",
+ "\n",
+ "Plain Networks. We first evaluate 5-layer and 11-layer plain nets. Comparing to the 5-layer one, the 11-layer one has three times convolutional layers."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 5,
+ "metadata": {
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ },
+ "outputs": [],
+ "source": [
+ "import pickle\n",
+ "import time\n",
+ "\n",
+ "import numpy as np\n",
+ "from matplotlib import ticker\n",
+ "from scipy.io import loadmat\n",
+ "from models import Plain5, Plain11, ShortCut5, ShortCut11\n",
+ "from sklearn.model_selection import train_test_split\n",
+ "import matplotlib.pyplot as plt\n",
+ "%matplotlib notebook"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {
+ "pycharm": {
+ "name": "#%% md\n"
+ }
+ },
+ "source": [
+ "## Load data\n",
+ "load data and split them into train, val, test"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 6,
+ "metadata": {
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ },
+ "outputs": [],
+ "source": [
+ "import random\n",
+ "from numpy.random import seed\n",
+ "import tensorflow\n",
+ "import time\n",
+ "seed(4750)\n",
+ "tensorflow.random.set_seed(4750)\n",
+ "time1 = time.time()\n",
+ "data = loadmat('./preprocess/dataset/mango/mango_dm_split.mat')\n",
+ "x_train, y_train, x_test, y_test = data['x_train'], data['y_train'], data['x_test'], data['y_test']\n",
+ "x_train, x_val, y_train, y_val = train_test_split(x_train, y_train, test_size=0.3, random_state=12, shuffle=True)\n",
+ "x_train, x_val, x_test = x_train[:, np.newaxis, :], x_val[:, np.newaxis, :], x_test[:, np.newaxis, :]\n",
+ "print(f\"shape of data:\\n\"\n",
+ " f\"x_train: {x_train.shape}, y_train: {y_train.shape},\\n\"\n",
+ " f\"x_val: {x_val.shape}, y_val: {y_val.shape}\\n\"\n",
+ " f\"x_test: {x_test.shape}, y_test: {y_test.shape}\")"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {
+ "pycharm": {
+ "name": "#%% md\n"
+ }
+ },
+ "source": [
+ "## Build Plain networks and Training"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 8,
+ "metadata": {
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ },
+ "outputs": [],
+ "source": [
+ "plain_5, plain_11 = Plain5(input_shape=(1, 102)), Plain11(input_shape=(1, 102))"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 9,
+ "metadata": {
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ },
+ "outputs": [
+ {
+ "name": "stderr",
+ "output_type": "stream",
+ "text": [
+ "2022-05-10 17:09:33.906952: W tensorflow/core/platform/profile_utils/cpu_utils.cc:128] Failed to get CPU frequency: 0 Hz\n"
+ ]
+ },
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "Epoch 1/1024\n",
+ "90/90 [==============================] - 1s 4ms/step - loss: 0.0236 - val_loss: 0.0272 - lr: 0.0025\n",
+ "Epoch 2/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0188 - val_loss: 0.0297 - lr: 0.0025\n",
+ "Epoch 3/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0127 - val_loss: 0.0301 - lr: 0.0025\n",
+ "Epoch 4/1024\n",
+ "90/90 [==============================] - 0s 3ms/step - loss: 0.0088 - val_loss: 0.0315 - lr: 0.0025\n",
+ "Epoch 5/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0081 - val_loss: 0.0813 - lr: 0.0025\n",
+ "Epoch 6/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0072 - val_loss: 0.0309 - lr: 0.0025\n",
+ "Epoch 7/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0069 - val_loss: 0.0930 - lr: 0.0025\n",
+ "Epoch 8/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0066 - val_loss: 0.0313 - lr: 0.0025\n",
+ "Epoch 9/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0058 - val_loss: 0.0597 - lr: 0.0025\n",
+ "Epoch 10/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0050 - val_loss: 0.0376 - lr: 0.0025\n",
+ "Epoch 11/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0048 - val_loss: 0.0731 - lr: 0.0025\n",
+ "Epoch 12/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0047 - val_loss: 0.3068 - lr: 0.0025\n",
+ "Epoch 13/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0046 - val_loss: 0.0852 - lr: 0.0025\n",
+ "Epoch 14/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0048 - val_loss: 0.2993 - lr: 0.0025\n",
+ "Epoch 15/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0046 - val_loss: 0.3298 - lr: 0.0025\n",
+ "Epoch 16/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0046 - val_loss: 0.0272 - lr: 0.0025\n",
+ "Epoch 17/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0045 - val_loss: 0.0434 - lr: 0.0025\n",
+ "Epoch 18/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0044 - val_loss: 0.0611 - lr: 0.0025\n",
+ "Epoch 19/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0043 - val_loss: 0.3292 - lr: 0.0025\n",
+ "Epoch 20/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0042 - val_loss: 0.0732 - lr: 0.0025\n",
+ "Epoch 21/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0041 - val_loss: 0.0785 - lr: 0.0025\n",
+ "Epoch 22/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0043 - val_loss: 0.0719 - lr: 0.0025\n",
+ "Epoch 23/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0042 - val_loss: 0.2986 - lr: 0.0025\n",
+ "Epoch 24/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0040 - val_loss: 0.3209 - lr: 0.0025\n",
+ "Epoch 25/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0041 - val_loss: 0.1807 - lr: 0.0025\n",
+ "Epoch 26/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0041 - val_loss: 0.0513 - lr: 0.0025\n",
+ "Epoch 27/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0038 - val_loss: 0.0834 - lr: 0.0012\n",
+ "Epoch 28/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0038 - val_loss: 0.1304 - lr: 0.0012\n",
+ "Epoch 29/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0039 - val_loss: 0.2374 - lr: 0.0012\n",
+ "Epoch 30/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0038 - val_loss: 0.1049 - lr: 0.0012\n",
+ "Epoch 31/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0041 - val_loss: 0.0522 - lr: 0.0012\n",
+ "Epoch 32/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0039 - val_loss: 0.2504 - lr: 0.0012\n",
+ "Epoch 33/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0040 - val_loss: 0.1557 - lr: 0.0012\n",
+ "Epoch 34/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0037 - val_loss: 0.0697 - lr: 0.0012\n",
+ "Epoch 35/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0039 - val_loss: 0.0616 - lr: 0.0012\n",
+ "Epoch 36/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0040 - val_loss: 0.3263 - lr: 0.0012\n",
+ "Epoch 37/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0038 - val_loss: 0.1540 - lr: 0.0012\n",
+ "Epoch 38/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0037 - val_loss: 0.0840 - lr: 0.0012\n",
+ "Epoch 39/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0039 - val_loss: 0.3193 - lr: 0.0012\n",
+ "Epoch 40/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0037 - val_loss: 0.1026 - lr: 0.0012\n",
+ "Epoch 41/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0041 - val_loss: 0.2626 - lr: 0.0012\n",
+ "Epoch 42/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0038 - val_loss: 0.1396 - lr: 0.0012\n",
+ "Epoch 43/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0038 - val_loss: 0.0453 - lr: 0.0012\n",
+ "Epoch 44/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0038 - val_loss: 0.0897 - lr: 0.0012\n",
+ "Epoch 45/1024\n",
+ "90/90 [==============================] - 0s 3ms/step - loss: 0.0038 - val_loss: 0.3225 - lr: 0.0012\n",
+ "Epoch 46/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0038 - val_loss: 0.3101 - lr: 0.0012\n",
+ "Epoch 47/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0038 - val_loss: 0.3089 - lr: 0.0012\n",
+ "Epoch 48/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0037 - val_loss: 0.0290 - lr: 0.0012\n",
+ "Epoch 49/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0038 - val_loss: 0.0591 - lr: 0.0012\n",
+ "Epoch 50/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0036 - val_loss: 0.1132 - lr: 0.0012\n",
+ "Epoch 51/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0038 - val_loss: 0.3202 - lr: 0.0012\n",
+ "Epoch 52/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0035 - val_loss: 0.0190 - lr: 6.2500e-04\n",
+ "Epoch 53/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0036 - val_loss: 0.0558 - lr: 6.2500e-04\n",
+ "Epoch 54/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0035 - val_loss: 0.1351 - lr: 6.2500e-04\n",
+ "Epoch 55/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0035 - val_loss: 0.1235 - lr: 6.2500e-04\n",
+ "Epoch 56/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0036 - val_loss: 0.2803 - lr: 6.2500e-04\n",
+ "Epoch 57/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0035 - val_loss: 0.0929 - lr: 6.2500e-04\n",
+ "Epoch 58/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0037 - val_loss: 0.0510 - lr: 6.2500e-04\n",
+ "Epoch 59/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0035 - val_loss: 0.0590 - lr: 6.2500e-04\n",
+ "Epoch 60/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0036 - val_loss: 0.3023 - lr: 6.2500e-04\n",
+ "Epoch 61/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0035 - val_loss: 0.1735 - lr: 6.2500e-04\n",
+ "Epoch 62/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0036 - val_loss: 0.1487 - lr: 6.2500e-04\n",
+ "Epoch 63/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0034 - val_loss: 0.1859 - lr: 6.2500e-04\n",
+ "Epoch 64/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0035 - val_loss: 0.3022 - lr: 6.2500e-04\n",
+ "Epoch 65/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0034 - val_loss: 0.2015 - lr: 6.2500e-04\n",
+ "Epoch 66/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0036 - val_loss: 0.3198 - lr: 6.2500e-04\n",
+ "Epoch 67/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0034 - val_loss: 0.2997 - lr: 6.2500e-04\n",
+ "Epoch 68/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0034 - val_loss: 0.1298 - lr: 6.2500e-04\n",
+ "Epoch 69/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0034 - val_loss: 0.1149 - lr: 6.2500e-04\n",
+ "Epoch 70/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0034 - val_loss: 0.1068 - lr: 6.2500e-04\n",
+ "Epoch 71/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0034 - val_loss: 0.0951 - lr: 6.2500e-04\n",
+ "Epoch 72/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0034 - val_loss: 0.0429 - lr: 6.2500e-04\n",
+ "Epoch 73/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0034 - val_loss: 0.2853 - lr: 6.2500e-04\n",
+ "Epoch 74/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0034 - val_loss: 0.0702 - lr: 6.2500e-04\n",
+ "Epoch 75/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0035 - val_loss: 0.2109 - lr: 6.2500e-04\n",
+ "Epoch 76/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0034 - val_loss: 0.1927 - lr: 6.2500e-04\n",
+ "Epoch 77/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0035 - val_loss: 0.0741 - lr: 6.2500e-04\n",
+ "Epoch 78/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0033 - val_loss: 0.0521 - lr: 3.1250e-04\n",
+ "Epoch 79/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0034 - val_loss: 0.1454 - lr: 3.1250e-04\n",
+ "Epoch 80/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0034 - val_loss: 0.0716 - lr: 3.1250e-04\n",
+ "Epoch 81/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0034 - val_loss: 0.0251 - lr: 3.1250e-04\n",
+ "Epoch 82/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0033 - val_loss: 0.1286 - lr: 3.1250e-04\n",
+ "Epoch 83/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0033 - val_loss: 0.0273 - lr: 3.1250e-04\n",
+ "Epoch 84/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0034 - val_loss: 0.0988 - lr: 3.1250e-04\n",
+ "Epoch 85/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0033 - val_loss: 0.0657 - lr: 3.1250e-04\n",
+ "Epoch 86/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0035 - val_loss: 0.0328 - lr: 3.1250e-04\n",
+ "Epoch 87/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0032 - val_loss: 0.0158 - lr: 3.1250e-04\n",
+ "Epoch 88/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0034 - val_loss: 0.0611 - lr: 3.1250e-04\n",
+ "Epoch 89/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0034 - val_loss: 0.0537 - lr: 3.1250e-04\n",
+ "Epoch 90/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0035 - val_loss: 0.1233 - lr: 3.1250e-04\n",
+ "Epoch 91/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0033 - val_loss: 0.1027 - lr: 3.1250e-04\n",
+ "Epoch 92/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0034 - val_loss: 0.0456 - lr: 3.1250e-04\n",
+ "Epoch 93/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0033 - val_loss: 0.0112 - lr: 3.1250e-04\n",
+ "Epoch 94/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0033 - val_loss: 0.0546 - lr: 3.1250e-04\n",
+ "Epoch 95/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0033 - val_loss: 0.0120 - lr: 3.1250e-04\n",
+ "Epoch 96/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0033 - val_loss: 0.0978 - lr: 3.1250e-04\n",
+ "Epoch 97/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0034 - val_loss: 0.0384 - lr: 3.1250e-04\n",
+ "Epoch 98/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0034 - val_loss: 0.0324 - lr: 3.1250e-04\n",
+ "Epoch 99/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0034 - val_loss: 0.1121 - lr: 3.1250e-04\n",
+ "Epoch 100/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0033 - val_loss: 0.0442 - lr: 3.1250e-04\n",
+ "Epoch 101/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0033 - val_loss: 0.1185 - lr: 3.1250e-04\n",
+ "Epoch 102/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0034 - val_loss: 0.0731 - lr: 3.1250e-04\n",
+ "Epoch 103/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0033 - val_loss: 0.1457 - lr: 3.1250e-04\n",
+ "Epoch 104/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0031 - val_loss: 0.2414 - lr: 3.1250e-04\n",
+ "Epoch 105/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0034 - val_loss: 0.0337 - lr: 3.1250e-04\n",
+ "Epoch 106/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0034 - val_loss: 0.1618 - lr: 3.1250e-04\n",
+ "Epoch 107/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0034 - val_loss: 0.0374 - lr: 3.1250e-04\n",
+ "Epoch 108/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0034 - val_loss: 0.0780 - lr: 3.1250e-04\n",
+ "Epoch 109/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0033 - val_loss: 0.0158 - lr: 3.1250e-04\n",
+ "Epoch 110/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0033 - val_loss: 0.2529 - lr: 3.1250e-04\n",
+ "Epoch 111/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0033 - val_loss: 0.2905 - lr: 3.1250e-04\n",
+ "Epoch 112/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0034 - val_loss: 0.1713 - lr: 3.1250e-04\n",
+ "Epoch 113/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0032 - val_loss: 0.1680 - lr: 3.1250e-04\n",
+ "Epoch 114/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0033 - val_loss: 0.0264 - lr: 3.1250e-04\n",
+ "Epoch 115/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0033 - val_loss: 0.1308 - lr: 3.1250e-04\n",
+ "Epoch 116/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0033 - val_loss: 0.0473 - lr: 3.1250e-04\n",
+ "Epoch 117/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0032 - val_loss: 0.1393 - lr: 3.1250e-04\n",
+ "Epoch 118/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0033 - val_loss: 0.0336 - lr: 3.1250e-04\n",
+ "Epoch 119/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0033 - val_loss: 0.0409 - lr: 1.5625e-04\n",
+ "Epoch 120/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0031 - val_loss: 0.0932 - lr: 1.5625e-04\n",
+ "Epoch 121/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0032 - val_loss: 0.0227 - lr: 1.5625e-04\n",
+ "Epoch 122/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0031 - val_loss: 0.0491 - lr: 1.5625e-04\n",
+ "Epoch 123/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0032 - val_loss: 0.0734 - lr: 1.5625e-04\n",
+ "Epoch 124/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0034 - val_loss: 0.1225 - lr: 1.5625e-04\n",
+ "Epoch 125/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0032 - val_loss: 0.0082 - lr: 1.5625e-04\n",
+ "Epoch 126/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0031 - val_loss: 0.1061 - lr: 1.5625e-04\n",
+ "Epoch 127/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0034 - val_loss: 0.0511 - lr: 1.5625e-04\n",
+ "Epoch 128/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0032 - val_loss: 0.0280 - lr: 1.5625e-04\n",
+ "Epoch 129/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0032 - val_loss: 0.0899 - lr: 1.5625e-04\n",
+ "Epoch 130/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0032 - val_loss: 0.0167 - lr: 1.5625e-04\n",
+ "Epoch 131/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0033 - val_loss: 0.0133 - lr: 1.5625e-04\n",
+ "Epoch 132/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0033 - val_loss: 0.0519 - lr: 1.5625e-04\n",
+ "Epoch 133/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0033 - val_loss: 0.1900 - lr: 1.5625e-04\n",
+ "Epoch 134/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0034 - val_loss: 0.0392 - lr: 1.5625e-04\n",
+ "Epoch 135/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0032 - val_loss: 0.0222 - lr: 1.5625e-04\n",
+ "Epoch 136/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0033 - val_loss: 0.0120 - lr: 1.5625e-04\n",
+ "Epoch 137/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0032 - val_loss: 0.0755 - lr: 1.5625e-04\n",
+ "Epoch 138/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0032 - val_loss: 0.0820 - lr: 1.5625e-04\n",
+ "Epoch 139/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0033 - val_loss: 0.0921 - lr: 1.5625e-04\n",
+ "Epoch 140/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0032 - val_loss: 0.0145 - lr: 1.5625e-04\n",
+ "Epoch 141/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0032 - val_loss: 0.0545 - lr: 1.5625e-04\n",
+ "Epoch 142/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0032 - val_loss: 0.1179 - lr: 1.5625e-04\n",
+ "Epoch 143/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0033 - val_loss: 0.1231 - lr: 1.5625e-04\n",
+ "Epoch 144/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0159 - lr: 1.5625e-04\n",
+ "Epoch 145/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0033 - val_loss: 0.0107 - lr: 1.5625e-04\n",
+ "Epoch 146/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0033 - val_loss: 0.0575 - lr: 1.5625e-04\n",
+ "Epoch 147/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0033 - val_loss: 0.2076 - lr: 1.5625e-04\n",
+ "Epoch 148/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0032 - val_loss: 0.0351 - lr: 1.5625e-04\n",
+ "Epoch 149/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0032 - val_loss: 0.1428 - lr: 1.5625e-04\n",
+ "Epoch 150/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0032 - val_loss: 0.1004 - lr: 1.5625e-04\n",
+ "Epoch 151/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0032 - val_loss: 0.0082 - lr: 7.8125e-05\n",
+ "Epoch 152/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0032 - val_loss: 0.0117 - lr: 7.8125e-05\n",
+ "Epoch 153/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0032 - val_loss: 0.0295 - lr: 7.8125e-05\n",
+ "Epoch 154/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0031 - val_loss: 0.0128 - lr: 7.8125e-05\n",
+ "Epoch 155/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0414 - lr: 7.8125e-05\n",
+ "Epoch 156/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0033 - val_loss: 0.0187 - lr: 7.8125e-05\n",
+ "Epoch 157/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0033 - val_loss: 0.0387 - lr: 7.8125e-05\n",
+ "Epoch 158/1024\n",
+ "90/90 [==============================] - 0s 3ms/step - loss: 0.0032 - val_loss: 0.0540 - lr: 7.8125e-05\n",
+ "Epoch 159/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0032 - val_loss: 0.0196 - lr: 7.8125e-05\n",
+ "Epoch 160/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0032 - val_loss: 0.0126 - lr: 7.8125e-05\n",
+ "Epoch 161/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0174 - lr: 7.8125e-05\n",
+ "Epoch 162/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0032 - val_loss: 0.0150 - lr: 7.8125e-05\n",
+ "Epoch 163/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0032 - val_loss: 0.0521 - lr: 7.8125e-05\n",
+ "Epoch 164/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0032 - val_loss: 0.0038 - lr: 7.8125e-05\n",
+ "Epoch 165/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0032 - val_loss: 0.0056 - lr: 7.8125e-05\n",
+ "Epoch 166/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0418 - lr: 7.8125e-05\n",
+ "Epoch 167/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0032 - val_loss: 0.0794 - lr: 7.8125e-05\n",
+ "Epoch 168/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0033 - val_loss: 0.0140 - lr: 7.8125e-05\n",
+ "Epoch 169/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0030 - val_loss: 0.0390 - lr: 7.8125e-05\n",
+ "Epoch 170/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0032 - val_loss: 0.0498 - lr: 7.8125e-05\n",
+ "Epoch 171/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0031 - val_loss: 0.0114 - lr: 7.8125e-05\n",
+ "Epoch 172/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0032 - val_loss: 0.0130 - lr: 7.8125e-05\n",
+ "Epoch 173/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0031 - val_loss: 0.0497 - lr: 7.8125e-05\n",
+ "Epoch 174/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0032 - val_loss: 0.0415 - lr: 7.8125e-05\n",
+ "Epoch 175/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0032 - val_loss: 0.0331 - lr: 7.8125e-05\n",
+ "Epoch 176/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0032 - val_loss: 0.0074 - lr: 7.8125e-05\n",
+ "Epoch 177/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0031 - val_loss: 0.0507 - lr: 7.8125e-05\n",
+ "Epoch 178/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0032 - val_loss: 0.0291 - lr: 7.8125e-05\n",
+ "Epoch 179/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0033 - val_loss: 0.0252 - lr: 7.8125e-05\n",
+ "Epoch 180/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0032 - val_loss: 0.0259 - lr: 7.8125e-05\n",
+ "Epoch 181/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0032 - val_loss: 0.0340 - lr: 7.8125e-05\n",
+ "Epoch 182/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0032 - val_loss: 0.0094 - lr: 7.8125e-05\n",
+ "Epoch 183/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0033 - val_loss: 0.0213 - lr: 7.8125e-05\n",
+ "Epoch 184/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0031 - val_loss: 0.0055 - lr: 7.8125e-05\n",
+ "Epoch 185/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0031 - val_loss: 0.0140 - lr: 7.8125e-05\n",
+ "Epoch 186/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0032 - val_loss: 0.0448 - lr: 7.8125e-05\n",
+ "Epoch 187/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0033 - val_loss: 0.0061 - lr: 7.8125e-05\n",
+ "Epoch 188/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0218 - lr: 7.8125e-05\n",
+ "Epoch 189/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0032 - val_loss: 0.0128 - lr: 7.8125e-05\n",
+ "Epoch 190/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0624 - lr: 3.9062e-05\n",
+ "Epoch 191/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0033 - val_loss: 0.0200 - lr: 3.9062e-05\n",
+ "Epoch 192/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0030 - val_loss: 0.0434 - lr: 3.9062e-05\n",
+ "Epoch 193/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0031 - val_loss: 0.0101 - lr: 3.9062e-05\n",
+ "Epoch 194/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0032 - val_loss: 0.0087 - lr: 3.9062e-05\n",
+ "Epoch 195/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0033 - val_loss: 0.0212 - lr: 3.9062e-05\n",
+ "Epoch 196/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0031 - val_loss: 0.0185 - lr: 3.9062e-05\n",
+ "Epoch 197/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0033 - val_loss: 0.0654 - lr: 3.9062e-05\n",
+ "Epoch 198/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0031 - val_loss: 0.0122 - lr: 3.9062e-05\n",
+ "Epoch 199/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0031 - val_loss: 0.0077 - lr: 3.9062e-05\n",
+ "Epoch 200/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0032 - val_loss: 0.0086 - lr: 3.9062e-05\n",
+ "Epoch 201/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0031 - val_loss: 0.0058 - lr: 3.9062e-05\n",
+ "Epoch 202/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0031 - val_loss: 0.0134 - lr: 3.9062e-05\n",
+ "Epoch 203/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0033 - val_loss: 0.0553 - lr: 3.9062e-05\n",
+ "Epoch 204/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0033 - val_loss: 0.0058 - lr: 3.9062e-05\n",
+ "Epoch 205/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0239 - lr: 3.9062e-05\n",
+ "Epoch 206/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0032 - val_loss: 0.0095 - lr: 3.9062e-05\n",
+ "Epoch 207/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0032 - val_loss: 0.0299 - lr: 3.9062e-05\n",
+ "Epoch 208/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0032 - val_loss: 0.0111 - lr: 3.9062e-05\n",
+ "Epoch 209/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0032 - val_loss: 0.0288 - lr: 3.9062e-05\n",
+ "Epoch 210/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0031 - val_loss: 0.0236 - lr: 3.9062e-05\n",
+ "Epoch 211/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0031 - val_loss: 0.0035 - lr: 3.9062e-05\n",
+ "Epoch 212/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0031 - val_loss: 0.0315 - lr: 3.9062e-05\n",
+ "Epoch 213/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0076 - lr: 3.9062e-05\n",
+ "Epoch 214/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0032 - val_loss: 0.0043 - lr: 3.9062e-05\n",
+ "Epoch 215/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0032 - val_loss: 0.0141 - lr: 3.9062e-05\n",
+ "Epoch 216/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0032 - val_loss: 0.0274 - lr: 3.9062e-05\n",
+ "Epoch 217/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0032 - val_loss: 0.0216 - lr: 3.9062e-05\n",
+ "Epoch 218/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0033 - val_loss: 0.0063 - lr: 3.9062e-05\n",
+ "Epoch 219/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0032 - val_loss: 0.0076 - lr: 3.9062e-05\n",
+ "Epoch 220/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0033 - val_loss: 0.0061 - lr: 3.9062e-05\n",
+ "Epoch 221/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0031 - val_loss: 0.0179 - lr: 3.9062e-05\n",
+ "Epoch 222/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0033 - val_loss: 0.0463 - lr: 3.9062e-05\n",
+ "Epoch 223/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0545 - lr: 3.9062e-05\n",
+ "Epoch 224/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0031 - val_loss: 0.0094 - lr: 3.9062e-05\n",
+ "Epoch 225/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0032 - val_loss: 0.0534 - lr: 3.9062e-05\n",
+ "Epoch 226/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0029 - val_loss: 0.0254 - lr: 3.9062e-05\n",
+ "Epoch 227/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0032 - val_loss: 0.0067 - lr: 3.9062e-05\n",
+ "Epoch 228/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0032 - val_loss: 0.0080 - lr: 3.9062e-05\n",
+ "Epoch 229/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0032 - val_loss: 0.0116 - lr: 3.9062e-05\n",
+ "Epoch 230/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0032 - val_loss: 0.0381 - lr: 3.9062e-05\n",
+ "Epoch 231/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0030 - val_loss: 0.0471 - lr: 3.9062e-05\n",
+ "Epoch 232/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0030 - val_loss: 0.0042 - lr: 3.9062e-05\n",
+ "Epoch 233/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0097 - lr: 3.9062e-05\n",
+ "Epoch 234/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0031 - val_loss: 0.0033 - lr: 3.9062e-05\n",
+ "Epoch 235/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0032 - val_loss: 0.0030 - lr: 3.9062e-05\n",
+ "Epoch 236/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0032 - val_loss: 0.0184 - lr: 3.9062e-05\n",
+ "Epoch 237/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0032 - val_loss: 0.0214 - lr: 3.9062e-05\n",
+ "Epoch 238/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0031 - val_loss: 0.0520 - lr: 3.9062e-05\n",
+ "Epoch 239/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0182 - lr: 3.9062e-05\n",
+ "Epoch 240/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0031 - val_loss: 0.0103 - lr: 3.9062e-05\n",
+ "Epoch 241/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0032 - val_loss: 0.0041 - lr: 3.9062e-05\n",
+ "Epoch 242/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0509 - lr: 3.9062e-05\n",
+ "Epoch 243/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0032 - val_loss: 0.0191 - lr: 3.9062e-05\n",
+ "Epoch 244/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0032 - val_loss: 0.0159 - lr: 3.9062e-05\n",
+ "Epoch 245/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0032 - val_loss: 0.0174 - lr: 3.9062e-05\n",
+ "Epoch 246/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0032 - val_loss: 0.0035 - lr: 3.9062e-05\n",
+ "Epoch 247/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0113 - lr: 3.9062e-05\n",
+ "Epoch 248/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0031 - val_loss: 0.0075 - lr: 3.9062e-05\n",
+ "Epoch 249/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0030 - val_loss: 0.0139 - lr: 3.9062e-05\n",
+ "Epoch 250/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0034 - val_loss: 0.0072 - lr: 3.9062e-05\n",
+ "Epoch 251/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0197 - lr: 3.9062e-05\n",
+ "Epoch 252/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0031 - val_loss: 0.0061 - lr: 3.9062e-05\n",
+ "Epoch 253/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0181 - lr: 3.9062e-05\n",
+ "Epoch 254/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0029 - val_loss: 0.0148 - lr: 3.9062e-05\n",
+ "Epoch 255/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0328 - lr: 3.9062e-05\n",
+ "Epoch 256/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0031 - val_loss: 0.0056 - lr: 3.9062e-05\n",
+ "Epoch 257/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0032 - val_loss: 0.0374 - lr: 3.9062e-05\n",
+ "Epoch 258/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0030 - val_loss: 0.0133 - lr: 3.9062e-05\n",
+ "Epoch 259/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0032 - val_loss: 0.0173 - lr: 3.9062e-05\n",
+ "Epoch 260/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0030 - val_loss: 0.0105 - lr: 3.9062e-05\n",
+ "Epoch 261/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0116 - lr: 1.9531e-05\n",
+ "Epoch 262/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0031 - val_loss: 0.0031 - lr: 1.9531e-05\n",
+ "Epoch 263/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0032 - val_loss: 0.0093 - lr: 1.9531e-05\n",
+ "Epoch 264/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0031 - val_loss: 0.0160 - lr: 1.9531e-05\n",
+ "Epoch 265/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0031 - val_loss: 0.0029 - lr: 1.9531e-05\n",
+ "Epoch 266/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0030 - val_loss: 0.0045 - lr: 1.9531e-05\n",
+ "Epoch 267/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0032 - val_loss: 0.0038 - lr: 1.9531e-05\n",
+ "Epoch 268/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0031 - val_loss: 0.0029 - lr: 1.9531e-05\n",
+ "Epoch 269/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0031 - val_loss: 0.0036 - lr: 1.9531e-05\n",
+ "Epoch 270/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0030 - val_loss: 0.0131 - lr: 1.9531e-05\n",
+ "Epoch 271/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0032 - val_loss: 0.0034 - lr: 1.9531e-05\n",
+ "Epoch 272/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0053 - lr: 1.9531e-05\n",
+ "Epoch 273/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0034 - lr: 1.9531e-05\n",
+ "Epoch 274/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0032 - val_loss: 0.0035 - lr: 1.9531e-05\n",
+ "Epoch 275/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0032 - val_loss: 0.0062 - lr: 1.9531e-05\n",
+ "Epoch 276/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0030 - val_loss: 0.0040 - lr: 1.9531e-05\n",
+ "Epoch 277/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0032 - val_loss: 0.0064 - lr: 1.9531e-05\n",
+ "Epoch 278/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0030 - val_loss: 0.0050 - lr: 1.9531e-05\n",
+ "Epoch 279/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0121 - lr: 1.9531e-05\n",
+ "Epoch 280/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0031 - lr: 1.9531e-05\n",
+ "Epoch 281/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0032 - val_loss: 0.0051 - lr: 1.9531e-05\n",
+ "Epoch 282/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0031 - val_loss: 0.0028 - lr: 1.9531e-05\n",
+ "Epoch 283/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0031 - val_loss: 0.0075 - lr: 1.9531e-05\n",
+ "Epoch 284/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0031 - val_loss: 0.0031 - lr: 1.9531e-05\n",
+ "Epoch 285/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0032 - val_loss: 0.0041 - lr: 1.9531e-05\n",
+ "Epoch 286/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0031 - val_loss: 0.0052 - lr: 1.9531e-05\n",
+ "Epoch 287/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0031 - val_loss: 0.0064 - lr: 1.9531e-05\n",
+ "Epoch 288/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0033 - val_loss: 0.0029 - lr: 1.9531e-05\n",
+ "Epoch 289/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0032 - val_loss: 0.0057 - lr: 1.9531e-05\n",
+ "Epoch 290/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0030 - val_loss: 0.0056 - lr: 1.9531e-05\n",
+ "Epoch 291/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0030 - val_loss: 0.0096 - lr: 1.9531e-05\n",
+ "Epoch 292/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0032 - val_loss: 0.0036 - lr: 1.9531e-05\n",
+ "Epoch 293/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0030 - val_loss: 0.0075 - lr: 1.9531e-05\n",
+ "Epoch 294/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0031 - val_loss: 0.0066 - lr: 1.9531e-05\n",
+ "Epoch 295/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0039 - lr: 1.9531e-05\n",
+ "Epoch 296/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0050 - lr: 1.9531e-05\n",
+ "Epoch 297/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0030 - val_loss: 0.0056 - lr: 1.9531e-05\n",
+ "Epoch 298/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0056 - lr: 1.9531e-05\n",
+ "Epoch 299/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0140 - lr: 1.9531e-05\n",
+ "Epoch 300/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0031 - val_loss: 0.0034 - lr: 1.9531e-05\n",
+ "Epoch 301/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0031 - val_loss: 0.0038 - lr: 1.9531e-05\n",
+ "Epoch 302/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0031 - val_loss: 0.0045 - lr: 1.9531e-05\n",
+ "Epoch 303/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0078 - lr: 1.9531e-05\n",
+ "Epoch 304/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0113 - lr: 1.9531e-05\n",
+ "Epoch 305/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0030 - val_loss: 0.0028 - lr: 1.9531e-05\n",
+ "Epoch 306/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0074 - lr: 1.9531e-05\n",
+ "Epoch 307/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0069 - lr: 1.9531e-05\n",
+ "Epoch 308/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0029 - lr: 1.9531e-05\n",
+ "Epoch 309/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0032 - val_loss: 0.0054 - lr: 1.9531e-05\n",
+ "Epoch 310/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0032 - val_loss: 0.0034 - lr: 1.9531e-05\n",
+ "Epoch 311/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0106 - lr: 1.9531e-05\n",
+ "Epoch 312/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0031 - val_loss: 0.0080 - lr: 1.9531e-05\n",
+ "Epoch 313/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0032 - val_loss: 0.0068 - lr: 1.9531e-05\n",
+ "Epoch 314/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0032 - val_loss: 0.0057 - lr: 1.9531e-05\n",
+ "Epoch 315/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0036 - lr: 1.9531e-05\n",
+ "Epoch 316/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0032 - val_loss: 0.0038 - lr: 1.9531e-05\n",
+ "Epoch 317/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0031 - val_loss: 0.0172 - lr: 1.9531e-05\n",
+ "Epoch 318/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0070 - lr: 1.9531e-05\n",
+ "Epoch 319/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0079 - lr: 1.9531e-05\n",
+ "Epoch 320/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0030 - val_loss: 0.0048 - lr: 1.9531e-05\n",
+ "Epoch 321/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0073 - lr: 1.9531e-05\n",
+ "Epoch 322/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0028 - lr: 1.9531e-05\n",
+ "Epoch 323/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0166 - lr: 1.9531e-05\n",
+ "Epoch 324/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0031 - val_loss: 0.0040 - lr: 1.9531e-05\n",
+ "Epoch 325/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0030 - val_loss: 0.0047 - lr: 1.9531e-05\n",
+ "Epoch 326/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0104 - lr: 1.9531e-05\n",
+ "Epoch 327/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0031 - val_loss: 0.0161 - lr: 1.9531e-05\n",
+ "Epoch 328/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0036 - lr: 1.9531e-05\n",
+ "Epoch 329/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0146 - lr: 1.9531e-05\n",
+ "Epoch 330/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0042 - lr: 1.9531e-05\n",
+ "Epoch 331/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0031 - val_loss: 0.0030 - lr: 9.7656e-06\n",
+ "Epoch 332/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0058 - lr: 9.7656e-06\n",
+ "Epoch 333/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0032 - val_loss: 0.0031 - lr: 9.7656e-06\n",
+ "Epoch 334/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0031 - val_loss: 0.0055 - lr: 9.7656e-06\n",
+ "Epoch 335/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0031 - val_loss: 0.0038 - lr: 9.7656e-06\n",
+ "Epoch 336/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0031 - val_loss: 0.0030 - lr: 9.7656e-06\n",
+ "Epoch 337/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0032 - lr: 9.7656e-06\n",
+ "Epoch 338/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0031 - val_loss: 0.0037 - lr: 9.7656e-06\n",
+ "Epoch 339/1024\n",
+ "90/90 [==============================] - 0s 3ms/step - loss: 0.0031 - val_loss: 0.0026 - lr: 9.7656e-06\n",
+ "Epoch 340/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0030 - val_loss: 0.0031 - lr: 9.7656e-06\n",
+ "Epoch 341/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0032 - val_loss: 0.0072 - lr: 9.7656e-06\n",
+ "Epoch 342/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0038 - lr: 9.7656e-06\n",
+ "Epoch 343/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0032 - val_loss: 0.0039 - lr: 9.7656e-06\n",
+ "Epoch 344/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0032 - val_loss: 0.0051 - lr: 9.7656e-06\n",
+ "Epoch 345/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0031 - val_loss: 0.0033 - lr: 9.7656e-06\n",
+ "Epoch 346/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0034 - lr: 9.7656e-06\n",
+ "Epoch 347/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0032 - val_loss: 0.0053 - lr: 9.7656e-06\n",
+ "Epoch 348/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0062 - lr: 9.7656e-06\n",
+ "Epoch 349/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0031 - val_loss: 0.0036 - lr: 9.7656e-06\n",
+ "Epoch 350/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0030 - val_loss: 0.0029 - lr: 9.7656e-06\n",
+ "Epoch 351/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0032 - val_loss: 0.0045 - lr: 9.7656e-06\n",
+ "Epoch 352/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0031 - val_loss: 0.0030 - lr: 9.7656e-06\n",
+ "Epoch 353/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0030 - val_loss: 0.0028 - lr: 9.7656e-06\n",
+ "Epoch 354/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0031 - lr: 9.7656e-06\n",
+ "Epoch 355/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0032 - val_loss: 0.0032 - lr: 9.7656e-06\n",
+ "Epoch 356/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0030 - val_loss: 0.0034 - lr: 9.7656e-06\n",
+ "Epoch 357/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0031 - val_loss: 0.0049 - lr: 9.7656e-06\n",
+ "Epoch 358/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0031 - val_loss: 0.0060 - lr: 9.7656e-06\n",
+ "Epoch 359/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0030 - val_loss: 0.0029 - lr: 9.7656e-06\n",
+ "Epoch 360/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0030 - val_loss: 0.0031 - lr: 9.7656e-06\n",
+ "Epoch 361/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0030 - val_loss: 0.0037 - lr: 9.7656e-06\n",
+ "Epoch 362/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0032 - val_loss: 0.0029 - lr: 9.7656e-06\n",
+ "Epoch 363/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0030 - val_loss: 0.0033 - lr: 9.7656e-06\n",
+ "Epoch 364/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0031 - val_loss: 0.0026 - lr: 9.7656e-06\n",
+ "Epoch 365/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0030 - val_loss: 0.0032 - lr: 9.7656e-06\n",
+ "Epoch 366/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0031 - val_loss: 0.0033 - lr: 9.7656e-06\n",
+ "Epoch 367/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0029 - lr: 9.7656e-06\n",
+ "Epoch 368/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0027 - lr: 9.7656e-06\n",
+ "Epoch 369/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0031 - lr: 9.7656e-06\n",
+ "Epoch 370/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0031 - val_loss: 0.0029 - lr: 9.7656e-06\n",
+ "Epoch 371/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0037 - lr: 9.7656e-06\n",
+ "Epoch 372/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0032 - lr: 9.7656e-06\n",
+ "Epoch 373/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0036 - lr: 9.7656e-06\n",
+ "Epoch 374/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0031 - val_loss: 0.0028 - lr: 9.7656e-06\n",
+ "Epoch 375/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0027 - lr: 9.7656e-06\n",
+ "Epoch 376/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0032 - lr: 9.7656e-06\n",
+ "Epoch 377/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0031 - val_loss: 0.0028 - lr: 9.7656e-06\n",
+ "Epoch 378/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0031 - val_loss: 0.0041 - lr: 9.7656e-06\n",
+ "Epoch 379/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0031 - val_loss: 0.0045 - lr: 9.7656e-06\n",
+ "Epoch 380/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0058 - lr: 9.7656e-06\n",
+ "Epoch 381/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0031 - val_loss: 0.0055 - lr: 9.7656e-06\n",
+ "Epoch 382/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0031 - val_loss: 0.0032 - lr: 9.7656e-06\n",
+ "Epoch 383/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0031 - val_loss: 0.0037 - lr: 9.7656e-06\n",
+ "Epoch 384/1024\n",
+ "90/90 [==============================] - 0s 3ms/step - loss: 0.0031 - val_loss: 0.0030 - lr: 9.7656e-06\n",
+ "Epoch 385/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0032 - val_loss: 0.0040 - lr: 9.7656e-06\n",
+ "Epoch 386/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0032 - val_loss: 0.0048 - lr: 9.7656e-06\n",
+ "Epoch 387/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0032 - val_loss: 0.0036 - lr: 9.7656e-06\n",
+ "Epoch 388/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0032 - val_loss: 0.0074 - lr: 9.7656e-06\n",
+ "Epoch 389/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0031 - val_loss: 0.0072 - lr: 9.7656e-06\n",
+ "Epoch 390/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0037 - lr: 4.8828e-06\n",
+ "Epoch 391/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0027 - lr: 4.8828e-06\n",
+ "Epoch 392/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0037 - lr: 4.8828e-06\n",
+ "Epoch 393/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0030 - val_loss: 0.0029 - lr: 4.8828e-06\n",
+ "Epoch 394/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0030 - lr: 4.8828e-06\n",
+ "Epoch 395/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0030 - val_loss: 0.0034 - lr: 4.8828e-06\n",
+ "Epoch 396/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0029 - lr: 4.8828e-06\n",
+ "Epoch 397/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0027 - lr: 4.8828e-06\n",
+ "Epoch 398/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0030 - val_loss: 0.0027 - lr: 4.8828e-06\n",
+ "Epoch 399/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0027 - lr: 4.8828e-06\n",
+ "Epoch 400/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0030 - val_loss: 0.0031 - lr: 4.8828e-06\n",
+ "Epoch 401/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0032 - val_loss: 0.0027 - lr: 4.8828e-06\n",
+ "Epoch 402/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0032 - val_loss: 0.0027 - lr: 4.8828e-06\n",
+ "Epoch 403/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0033 - val_loss: 0.0029 - lr: 4.8828e-06\n",
+ "Epoch 404/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0032 - val_loss: 0.0037 - lr: 4.8828e-06\n",
+ "Epoch 405/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0031 - val_loss: 0.0026 - lr: 4.8828e-06\n",
+ "Epoch 406/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0031 - val_loss: 0.0030 - lr: 4.8828e-06\n",
+ "Epoch 407/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0027 - lr: 4.8828e-06\n",
+ "Epoch 408/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0029 - lr: 4.8828e-06\n",
+ "Epoch 409/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0030 - val_loss: 0.0026 - lr: 4.8828e-06\n",
+ "Epoch 410/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0030 - val_loss: 0.0029 - lr: 4.8828e-06\n",
+ "Epoch 411/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0030 - val_loss: 0.0027 - lr: 4.8828e-06\n",
+ "Epoch 412/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0028 - lr: 4.8828e-06\n",
+ "Epoch 413/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0027 - lr: 4.8828e-06\n",
+ "Epoch 414/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0032 - val_loss: 0.0029 - lr: 4.8828e-06\n",
+ "Epoch 415/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0031 - val_loss: 0.0032 - lr: 4.8828e-06\n",
+ "Epoch 416/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0030 - val_loss: 0.0033 - lr: 4.8828e-06\n",
+ "Epoch 417/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0032 - val_loss: 0.0028 - lr: 4.8828e-06\n",
+ "Epoch 418/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0032 - val_loss: 0.0041 - lr: 4.8828e-06\n",
+ "Epoch 419/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0031 - val_loss: 0.0027 - lr: 4.8828e-06\n",
+ "Epoch 420/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0032 - val_loss: 0.0028 - lr: 4.8828e-06\n",
+ "Epoch 421/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0031 - val_loss: 0.0027 - lr: 4.8828e-06\n",
+ "Epoch 422/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0028 - lr: 4.8828e-06\n",
+ "Epoch 423/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0028 - lr: 4.8828e-06\n",
+ "Epoch 424/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0031 - val_loss: 0.0029 - lr: 4.8828e-06\n",
+ "Epoch 425/1024\n",
+ "90/90 [==============================] - 0s 4ms/step - loss: 0.0031 - val_loss: 0.0027 - lr: 4.8828e-06\n",
+ "Epoch 426/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0031 - val_loss: 0.0033 - lr: 4.8828e-06\n",
+ "Epoch 427/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0031 - val_loss: 0.0029 - lr: 4.8828e-06\n",
+ "Epoch 428/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0030 - val_loss: 0.0030 - lr: 4.8828e-06\n",
+ "Epoch 429/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0030 - val_loss: 0.0029 - lr: 4.8828e-06\n",
+ "Epoch 430/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0031 - val_loss: 0.0029 - lr: 4.8828e-06\n",
+ "Epoch 431/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0027 - lr: 2.4414e-06\n",
+ "Epoch 432/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0027 - lr: 2.4414e-06\n",
+ "Epoch 433/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0030 - val_loss: 0.0027 - lr: 2.4414e-06\n",
+ "Epoch 434/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0030 - val_loss: 0.0027 - lr: 2.4414e-06\n",
+ "Epoch 435/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0032 - val_loss: 0.0026 - lr: 2.4414e-06\n",
+ "Epoch 436/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0030 - val_loss: 0.0027 - lr: 2.4414e-06\n",
+ "Epoch 437/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0032 - val_loss: 0.0028 - lr: 2.4414e-06\n",
+ "Epoch 438/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0031 - val_loss: 0.0029 - lr: 2.4414e-06\n",
+ "Epoch 439/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0033 - val_loss: 0.0027 - lr: 2.4414e-06\n",
+ "Epoch 440/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0031 - val_loss: 0.0027 - lr: 2.4414e-06\n",
+ "Epoch 441/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0027 - lr: 2.4414e-06\n",
+ "Epoch 442/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0031 - val_loss: 0.0027 - lr: 2.4414e-06\n",
+ "Epoch 443/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0032 - val_loss: 0.0030 - lr: 2.4414e-06\n",
+ "Epoch 444/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0027 - lr: 2.4414e-06\n",
+ "Epoch 445/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0027 - lr: 2.4414e-06\n",
+ "Epoch 446/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0027 - lr: 2.4414e-06\n",
+ "Epoch 447/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0032 - val_loss: 0.0027 - lr: 2.4414e-06\n",
+ "Epoch 448/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0032 - val_loss: 0.0027 - lr: 2.4414e-06\n",
+ "Epoch 449/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0027 - lr: 2.4414e-06\n",
+ "Epoch 450/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0027 - lr: 2.4414e-06\n",
+ "Epoch 451/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0029 - lr: 2.4414e-06\n",
+ "Epoch 452/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0030 - val_loss: 0.0027 - lr: 2.4414e-06\n",
+ "Epoch 453/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0031 - val_loss: 0.0027 - lr: 2.4414e-06\n",
+ "Epoch 454/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0026 - lr: 2.4414e-06\n",
+ "Epoch 455/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0030 - val_loss: 0.0027 - lr: 2.4414e-06\n",
+ "Epoch 456/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0027 - lr: 1.2207e-06\n",
+ "Epoch 457/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0027 - lr: 1.2207e-06\n",
+ "Epoch 458/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0027 - lr: 1.2207e-06\n",
+ "Epoch 459/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0032 - val_loss: 0.0026 - lr: 1.2207e-06\n",
+ "Epoch 460/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0030 - val_loss: 0.0026 - lr: 1.2207e-06\n",
+ "Epoch 461/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0026 - lr: 1.2207e-06\n",
+ "Epoch 462/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0031 - val_loss: 0.0027 - lr: 1.2207e-06\n",
+ "Epoch 463/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0031 - val_loss: 0.0026 - lr: 1.2207e-06\n",
+ "Epoch 464/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0032 - val_loss: 0.0026 - lr: 1.2207e-06\n",
+ "Epoch 465/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0031 - val_loss: 0.0026 - lr: 1.2207e-06\n",
+ "Epoch 466/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0027 - lr: 1.2207e-06\n",
+ "Epoch 467/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0027 - lr: 1.2207e-06\n",
+ "Epoch 468/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0032 - val_loss: 0.0027 - lr: 1.2207e-06\n",
+ "Epoch 469/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0026 - lr: 1.2207e-06\n",
+ "Epoch 470/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0026 - lr: 1.2207e-06\n",
+ "Epoch 471/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0031 - val_loss: 0.0026 - lr: 1.2207e-06\n",
+ "Epoch 472/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0032 - val_loss: 0.0027 - lr: 1.2207e-06\n",
+ "Epoch 473/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0031 - val_loss: 0.0027 - lr: 1.2207e-06\n",
+ "Epoch 474/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0032 - val_loss: 0.0026 - lr: 1.2207e-06\n",
+ "Epoch 475/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0026 - lr: 1.2207e-06\n",
+ "Epoch 476/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0026 - lr: 1.2207e-06\n",
+ "Epoch 477/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0026 - lr: 1.2207e-06\n",
+ "Epoch 478/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0026 - lr: 1.2207e-06\n",
+ "Epoch 479/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0032 - val_loss: 0.0027 - lr: 1.2207e-06\n",
+ "Epoch 480/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0031 - val_loss: 0.0027 - lr: 1.2207e-06\n",
+ "Epoch 481/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0027 - lr: 1.2207e-06\n",
+ "Epoch 482/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0032 - val_loss: 0.0026 - lr: 1.2207e-06\n",
+ "Epoch 483/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0031 - val_loss: 0.0027 - lr: 1.2207e-06\n",
+ "Epoch 484/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0030 - val_loss: 0.0026 - lr: 1.2207e-06\n",
+ "Epoch 485/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0026 - lr: 6.1035e-07\n",
+ "Epoch 486/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0031 - val_loss: 0.0026 - lr: 6.1035e-07\n",
+ "Epoch 487/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0032 - val_loss: 0.0026 - lr: 6.1035e-07\n",
+ "Epoch 488/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0026 - lr: 6.1035e-07\n",
+ "Epoch 489/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0032 - val_loss: 0.0026 - lr: 6.1035e-07\n",
+ "Epoch 490/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0026 - lr: 6.1035e-07\n",
+ "Epoch 491/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0029 - val_loss: 0.0026 - lr: 6.1035e-07\n",
+ "Epoch 492/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0026 - lr: 6.1035e-07\n",
+ "Epoch 493/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0031 - val_loss: 0.0026 - lr: 6.1035e-07\n",
+ "Epoch 494/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0027 - lr: 6.1035e-07\n",
+ "Epoch 495/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0026 - lr: 6.1035e-07\n",
+ "Epoch 496/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0026 - lr: 6.1035e-07\n",
+ "Epoch 497/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0030 - val_loss: 0.0026 - lr: 6.1035e-07\n",
+ "Epoch 498/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0031 - val_loss: 0.0026 - lr: 6.1035e-07\n",
+ "Epoch 499/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0026 - lr: 6.1035e-07\n",
+ "Epoch 500/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0033 - val_loss: 0.0026 - lr: 6.1035e-07\n",
+ "Epoch 501/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0031 - val_loss: 0.0026 - lr: 6.1035e-07\n",
+ "Epoch 502/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0031 - val_loss: 0.0026 - lr: 6.1035e-07\n",
+ "Epoch 503/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0031 - val_loss: 0.0026 - lr: 6.1035e-07\n",
+ "Epoch 504/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0026 - lr: 6.1035e-07\n",
+ "Epoch 505/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0027 - lr: 6.1035e-07\n",
+ "Epoch 506/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0030 - val_loss: 0.0026 - lr: 6.1035e-07\n",
+ "Epoch 507/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0026 - lr: 6.1035e-07\n",
+ "Epoch 508/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0026 - lr: 6.1035e-07\n",
+ "Epoch 509/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0030 - val_loss: 0.0026 - lr: 6.1035e-07\n",
+ "Epoch 510/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0030 - val_loss: 0.0026 - lr: 3.0518e-07\n",
+ "Epoch 511/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0030 - val_loss: 0.0026 - lr: 3.0518e-07\n",
+ "Epoch 512/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0026 - lr: 3.0518e-07\n",
+ "Epoch 513/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0026 - lr: 3.0518e-07\n",
+ "Epoch 514/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0026 - lr: 3.0518e-07\n",
+ "Epoch 515/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0026 - lr: 3.0518e-07\n",
+ "Epoch 516/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0031 - val_loss: 0.0026 - lr: 3.0518e-07\n",
+ "Epoch 517/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0026 - lr: 3.0518e-07\n",
+ "Epoch 518/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0031 - val_loss: 0.0026 - lr: 3.0518e-07\n",
+ "Epoch 519/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0031 - val_loss: 0.0026 - lr: 3.0518e-07\n",
+ "Epoch 520/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0032 - val_loss: 0.0026 - lr: 3.0518e-07\n",
+ "Epoch 521/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0026 - lr: 3.0518e-07\n",
+ "Epoch 522/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0026 - lr: 3.0518e-07\n",
+ "Epoch 523/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0030 - val_loss: 0.0026 - lr: 3.0518e-07\n",
+ "Epoch 524/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0026 - lr: 3.0518e-07\n",
+ "Epoch 525/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0031 - val_loss: 0.0026 - lr: 3.0518e-07\n",
+ "Epoch 526/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0031 - val_loss: 0.0026 - lr: 3.0518e-07\n",
+ "Epoch 527/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0031 - val_loss: 0.0026 - lr: 3.0518e-07\n",
+ "Epoch 528/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0026 - lr: 3.0518e-07\n",
+ "Epoch 529/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0026 - lr: 3.0518e-07\n",
+ "Epoch 530/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0032 - val_loss: 0.0026 - lr: 3.0518e-07\n",
+ "Epoch 531/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0031 - val_loss: 0.0026 - lr: 3.0518e-07\n",
+ "Epoch 532/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0026 - lr: 3.0518e-07\n",
+ "Epoch 533/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0026 - lr: 3.0518e-07\n",
+ "Epoch 534/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0030 - val_loss: 0.0026 - lr: 3.0518e-07\n",
+ "Epoch 535/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0030 - val_loss: 0.0026 - lr: 1.5259e-07\n",
+ "Epoch 536/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0030 - val_loss: 0.0026 - lr: 1.5259e-07\n",
+ "Epoch 537/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0031 - val_loss: 0.0026 - lr: 1.5259e-07\n",
+ "Epoch 538/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0030 - val_loss: 0.0026 - lr: 1.5259e-07\n",
+ "Epoch 539/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0026 - lr: 1.5259e-07\n",
+ "Epoch 540/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0032 - val_loss: 0.0026 - lr: 1.5259e-07\n",
+ "Epoch 541/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0031 - val_loss: 0.0026 - lr: 1.5259e-07\n",
+ "Epoch 542/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0026 - lr: 1.5259e-07\n",
+ "Epoch 543/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0032 - val_loss: 0.0026 - lr: 1.5259e-07\n",
+ "Epoch 544/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0026 - lr: 1.5259e-07\n",
+ "Epoch 545/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0030 - val_loss: 0.0026 - lr: 1.5259e-07\n",
+ "Epoch 546/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0032 - val_loss: 0.0026 - lr: 1.5259e-07\n",
+ "Epoch 547/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0026 - lr: 1.5259e-07\n",
+ "Epoch 548/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0032 - val_loss: 0.0026 - lr: 1.5259e-07\n",
+ "Epoch 549/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0026 - lr: 1.5259e-07\n",
+ "Epoch 550/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0026 - lr: 1.5259e-07\n",
+ "Epoch 551/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0026 - lr: 1.5259e-07\n",
+ "Epoch 552/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0031 - val_loss: 0.0026 - lr: 1.5259e-07\n",
+ "Epoch 553/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0026 - lr: 1.5259e-07\n",
+ "Epoch 554/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0031 - val_loss: 0.0026 - lr: 1.5259e-07\n",
+ "Epoch 555/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0031 - val_loss: 0.0026 - lr: 1.5259e-07\n",
+ "Epoch 556/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0032 - val_loss: 0.0026 - lr: 1.5259e-07\n",
+ "Epoch 557/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0030 - val_loss: 0.0026 - lr: 1.5259e-07\n",
+ "Epoch 558/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0026 - lr: 1.5259e-07\n",
+ "Epoch 559/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0032 - val_loss: 0.0026 - lr: 1.5259e-07\n",
+ "Epoch 560/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0030 - val_loss: 0.0026 - lr: 7.6294e-08\n",
+ "Epoch 561/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0026 - lr: 7.6294e-08\n",
+ "Epoch 562/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0030 - val_loss: 0.0026 - lr: 7.6294e-08\n",
+ "Epoch 563/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0031 - val_loss: 0.0026 - lr: 7.6294e-08\n",
+ "Epoch 564/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0030 - val_loss: 0.0026 - lr: 7.6294e-08\n",
+ "Epoch 565/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0029 - val_loss: 0.0026 - lr: 7.6294e-08\n",
+ "Epoch 566/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0026 - lr: 7.6294e-08\n",
+ "Epoch 567/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0032 - val_loss: 0.0026 - lr: 7.6294e-08\n",
+ "Epoch 568/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0026 - lr: 7.6294e-08\n",
+ "Epoch 569/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0030 - val_loss: 0.0026 - lr: 7.6294e-08\n",
+ "Epoch 570/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0030 - val_loss: 0.0026 - lr: 7.6294e-08\n",
+ "Epoch 571/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0031 - val_loss: 0.0026 - lr: 7.6294e-08\n",
+ "Epoch 572/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0026 - lr: 7.6294e-08\n",
+ "Epoch 573/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0026 - lr: 7.6294e-08\n",
+ "Epoch 574/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0030 - val_loss: 0.0026 - lr: 7.6294e-08\n",
+ "Epoch 575/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0032 - val_loss: 0.0026 - lr: 7.6294e-08\n",
+ "Epoch 576/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0031 - val_loss: 0.0026 - lr: 7.6294e-08\n",
+ "Epoch 577/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0026 - lr: 7.6294e-08\n",
+ "Epoch 578/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0031 - val_loss: 0.0026 - lr: 7.6294e-08\n",
+ "Epoch 579/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0026 - lr: 7.6294e-08\n",
+ "Epoch 580/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0026 - lr: 7.6294e-08\n",
+ "Epoch 581/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0031 - val_loss: 0.0026 - lr: 7.6294e-08\n",
+ "Epoch 582/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0030 - val_loss: 0.0026 - lr: 7.6294e-08\n",
+ "Epoch 583/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0031 - val_loss: 0.0026 - lr: 7.6294e-08\n",
+ "Epoch 584/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0031 - val_loss: 0.0026 - lr: 7.6294e-08\n",
+ "Epoch 585/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0031 - val_loss: 0.0026 - lr: 3.8147e-08\n",
+ "Epoch 586/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0026 - lr: 3.8147e-08\n",
+ "Epoch 587/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0026 - lr: 3.8147e-08\n",
+ "Epoch 588/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0031 - val_loss: 0.0026 - lr: 3.8147e-08\n",
+ "Epoch 589/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0026 - lr: 3.8147e-08\n",
+ "Epoch 590/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0030 - val_loss: 0.0026 - lr: 3.8147e-08\n",
+ "Epoch 591/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0032 - val_loss: 0.0026 - lr: 3.8147e-08\n",
+ "Epoch 592/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0032 - val_loss: 0.0026 - lr: 3.8147e-08\n",
+ "Epoch 593/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0026 - lr: 3.8147e-08\n",
+ "Epoch 594/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0026 - lr: 3.8147e-08\n",
+ "Epoch 595/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0030 - val_loss: 0.0026 - lr: 3.8147e-08\n",
+ "Epoch 596/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0032 - val_loss: 0.0026 - lr: 3.8147e-08\n",
+ "Epoch 597/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0032 - val_loss: 0.0026 - lr: 3.8147e-08\n",
+ "Epoch 598/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0032 - val_loss: 0.0026 - lr: 3.8147e-08\n",
+ "Epoch 599/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0030 - val_loss: 0.0026 - lr: 3.8147e-08\n",
+ "Epoch 600/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0032 - val_loss: 0.0026 - lr: 3.8147e-08\n",
+ "Epoch 601/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0026 - lr: 3.8147e-08\n",
+ "Epoch 602/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0032 - val_loss: 0.0026 - lr: 3.8147e-08\n",
+ "Epoch 603/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0031 - val_loss: 0.0026 - lr: 3.8147e-08\n",
+ "Epoch 604/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0032 - val_loss: 0.0026 - lr: 3.8147e-08\n",
+ "Epoch 605/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0031 - val_loss: 0.0026 - lr: 3.8147e-08\n",
+ "Epoch 606/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0026 - lr: 3.8147e-08\n",
+ "Epoch 607/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0032 - val_loss: 0.0026 - lr: 3.8147e-08\n",
+ "Epoch 608/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0031 - val_loss: 0.0026 - lr: 3.8147e-08\n",
+ "Epoch 609/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0030 - val_loss: 0.0026 - lr: 3.8147e-08\n",
+ "Epoch 610/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0031 - val_loss: 0.0026 - lr: 1.9073e-08\n",
+ "Epoch 611/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0030 - val_loss: 0.0026 - lr: 1.9073e-08\n",
+ "Epoch 612/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0030 - val_loss: 0.0026 - lr: 1.9073e-08\n",
+ "Epoch 613/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0026 - lr: 1.9073e-08\n",
+ "Epoch 614/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0031 - val_loss: 0.0026 - lr: 1.9073e-08\n",
+ "Epoch 615/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0026 - lr: 1.9073e-08\n",
+ "Epoch 616/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0031 - val_loss: 0.0026 - lr: 1.9073e-08\n",
+ "Epoch 617/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0030 - val_loss: 0.0026 - lr: 1.9073e-08\n",
+ "Epoch 618/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0026 - lr: 1.9073e-08\n",
+ "Epoch 619/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0030 - val_loss: 0.0026 - lr: 1.9073e-08\n",
+ "Epoch 620/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0032 - val_loss: 0.0026 - lr: 1.9073e-08\n",
+ "Epoch 621/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0031 - val_loss: 0.0026 - lr: 1.9073e-08\n",
+ "Epoch 622/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0026 - lr: 1.9073e-08\n",
+ "Epoch 623/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0026 - lr: 1.9073e-08\n",
+ "Epoch 624/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0031 - val_loss: 0.0026 - lr: 1.9073e-08\n",
+ "Epoch 625/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0032 - val_loss: 0.0026 - lr: 1.9073e-08\n",
+ "Epoch 626/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0032 - val_loss: 0.0026 - lr: 1.9073e-08\n",
+ "Epoch 627/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0030 - val_loss: 0.0026 - lr: 1.9073e-08\n",
+ "Epoch 628/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0026 - lr: 1.9073e-08\n",
+ "Epoch 629/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0031 - val_loss: 0.0026 - lr: 1.9073e-08\n",
+ "Epoch 630/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0026 - lr: 1.9073e-08\n",
+ "Epoch 631/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0030 - val_loss: 0.0026 - lr: 1.9073e-08\n",
+ "Epoch 632/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0032 - val_loss: 0.0026 - lr: 1.9073e-08\n",
+ "Epoch 633/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0026 - lr: 1.9073e-08\n",
+ "Epoch 634/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0031 - val_loss: 0.0026 - lr: 1.9073e-08\n",
+ "Epoch 635/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0032 - val_loss: 0.0026 - lr: 9.5367e-09\n",
+ "Epoch 636/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0030 - val_loss: 0.0026 - lr: 9.5367e-09\n",
+ "Epoch 637/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0026 - lr: 9.5367e-09\n",
+ "Epoch 638/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0031 - val_loss: 0.0026 - lr: 9.5367e-09\n",
+ "Epoch 639/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0032 - val_loss: 0.0026 - lr: 9.5367e-09\n",
+ "Epoch 640/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0032 - val_loss: 0.0026 - lr: 9.5367e-09\n",
+ "Epoch 641/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0026 - lr: 9.5367e-09\n",
+ "Epoch 642/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0031 - val_loss: 0.0026 - lr: 9.5367e-09\n",
+ "Epoch 643/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0031 - val_loss: 0.0026 - lr: 9.5367e-09\n",
+ "Epoch 644/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0026 - lr: 9.5367e-09\n",
+ "Epoch 645/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0032 - val_loss: 0.0026 - lr: 9.5367e-09\n",
+ "Epoch 646/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0026 - lr: 9.5367e-09\n",
+ "Epoch 647/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0026 - lr: 9.5367e-09\n",
+ "Epoch 648/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0026 - lr: 9.5367e-09\n",
+ "Epoch 649/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0031 - val_loss: 0.0026 - lr: 9.5367e-09\n",
+ "Epoch 650/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0032 - val_loss: 0.0026 - lr: 9.5367e-09\n",
+ "Epoch 651/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0031 - val_loss: 0.0026 - lr: 9.5367e-09\n",
+ "Epoch 652/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0026 - lr: 9.5367e-09\n",
+ "Epoch 653/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0026 - lr: 9.5367e-09\n",
+ "Epoch 654/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0030 - val_loss: 0.0026 - lr: 9.5367e-09\n",
+ "Epoch 655/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0026 - lr: 9.5367e-09\n",
+ "Epoch 656/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0031 - val_loss: 0.0026 - lr: 9.5367e-09\n",
+ "Epoch 657/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0026 - lr: 9.5367e-09\n",
+ "Epoch 658/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0031 - val_loss: 0.0026 - lr: 9.5367e-09\n",
+ "Epoch 659/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0026 - lr: 9.5367e-09\n",
+ "Epoch 1/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0162 - val_loss: 0.0337 - lr: 0.0025\n",
+ "Epoch 2/1024\n",
+ "90/90 [==============================] - 0s 889us/step - loss: 0.0086 - val_loss: 0.0453 - lr: 0.0025\n",
+ "Epoch 3/1024\n",
+ "90/90 [==============================] - 0s 871us/step - loss: 0.0070 - val_loss: 0.1119 - lr: 0.0025\n",
+ "Epoch 4/1024\n",
+ "90/90 [==============================] - 0s 847us/step - loss: 0.0061 - val_loss: 0.2422 - lr: 0.0025\n",
+ "Epoch 5/1024\n",
+ "90/90 [==============================] - 0s 831us/step - loss: 0.0055 - val_loss: 0.1867 - lr: 0.0025\n",
+ "Epoch 6/1024\n",
+ "90/90 [==============================] - 0s 855us/step - loss: 0.0051 - val_loss: 0.0551 - lr: 0.0025\n",
+ "Epoch 7/1024\n",
+ "90/90 [==============================] - 0s 840us/step - loss: 0.0046 - val_loss: 0.1507 - lr: 0.0025\n",
+ "Epoch 8/1024\n",
+ "90/90 [==============================] - 0s 858us/step - loss: 0.0043 - val_loss: 0.1627 - lr: 0.0025\n",
+ "Epoch 9/1024\n",
+ "90/90 [==============================] - 0s 858us/step - loss: 0.0045 - val_loss: 0.2282 - lr: 0.0025\n",
+ "Epoch 10/1024\n",
+ "90/90 [==============================] - 0s 843us/step - loss: 0.0042 - val_loss: 0.0914 - lr: 0.0025\n",
+ "Epoch 11/1024\n",
+ "90/90 [==============================] - 0s 887us/step - loss: 0.0043 - val_loss: 0.3210 - lr: 0.0025\n",
+ "Epoch 12/1024\n",
+ "90/90 [==============================] - 0s 848us/step - loss: 0.0040 - val_loss: 0.1090 - lr: 0.0025\n",
+ "Epoch 13/1024\n",
+ "90/90 [==============================] - 0s 845us/step - loss: 0.0040 - val_loss: 0.2283 - lr: 0.0025\n",
+ "Epoch 14/1024\n",
+ "90/90 [==============================] - 0s 853us/step - loss: 0.0041 - val_loss: 0.0339 - lr: 0.0025\n",
+ "Epoch 15/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0042 - val_loss: 0.0163 - lr: 0.0025\n",
+ "Epoch 16/1024\n",
+ "90/90 [==============================] - 0s 854us/step - loss: 0.0041 - val_loss: 0.2283 - lr: 0.0025\n",
+ "Epoch 17/1024\n",
+ "90/90 [==============================] - 0s 855us/step - loss: 0.0039 - val_loss: 0.0743 - lr: 0.0025\n",
+ "Epoch 18/1024\n",
+ "90/90 [==============================] - 0s 980us/step - loss: 0.0039 - val_loss: 0.2028 - lr: 0.0025\n",
+ "Epoch 19/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0037 - val_loss: 0.3298 - lr: 0.0025\n",
+ "Epoch 20/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0038 - val_loss: 0.2000 - lr: 0.0025\n",
+ "Epoch 21/1024\n",
+ "90/90 [==============================] - 0s 837us/step - loss: 0.0036 - val_loss: 0.0345 - lr: 0.0025\n",
+ "Epoch 22/1024\n",
+ "90/90 [==============================] - 0s 845us/step - loss: 0.0037 - val_loss: 0.3248 - lr: 0.0025\n",
+ "Epoch 23/1024\n",
+ "90/90 [==============================] - 0s 859us/step - loss: 0.0037 - val_loss: 0.3253 - lr: 0.0025\n",
+ "Epoch 24/1024\n",
+ "90/90 [==============================] - 0s 869us/step - loss: 0.0035 - val_loss: 0.3070 - lr: 0.0025\n",
+ "Epoch 25/1024\n",
+ "90/90 [==============================] - 0s 833us/step - loss: 0.0036 - val_loss: 0.3283 - lr: 0.0025\n",
+ "Epoch 26/1024\n",
+ "90/90 [==============================] - 0s 852us/step - loss: 0.0036 - val_loss: 0.0853 - lr: 0.0025\n",
+ "Epoch 27/1024\n",
+ "90/90 [==============================] - 0s 849us/step - loss: 0.0036 - val_loss: 0.2262 - lr: 0.0025\n",
+ "Epoch 28/1024\n",
+ "90/90 [==============================] - 0s 854us/step - loss: 0.0035 - val_loss: 0.3298 - lr: 0.0025\n",
+ "Epoch 29/1024\n",
+ "90/90 [==============================] - 0s 862us/step - loss: 0.0038 - val_loss: 0.1514 - lr: 0.0025\n",
+ "Epoch 30/1024\n",
+ "90/90 [==============================] - 0s 859us/step - loss: 0.0035 - val_loss: 0.0532 - lr: 0.0025\n",
+ "Epoch 31/1024\n",
+ "90/90 [==============================] - 0s 848us/step - loss: 0.0039 - val_loss: 0.1852 - lr: 0.0025\n",
+ "Epoch 32/1024\n",
+ "90/90 [==============================] - 0s 851us/step - loss: 0.0035 - val_loss: 0.1469 - lr: 0.0025\n",
+ "Epoch 33/1024\n",
+ "90/90 [==============================] - 0s 848us/step - loss: 0.0036 - val_loss: 0.1926 - lr: 0.0025\n",
+ "Epoch 34/1024\n",
+ "90/90 [==============================] - 0s 847us/step - loss: 0.0034 - val_loss: 0.2283 - lr: 0.0025\n",
+ "Epoch 35/1024\n",
+ "90/90 [==============================] - 0s 854us/step - loss: 0.0035 - val_loss: 0.2885 - lr: 0.0025\n",
+ "Epoch 36/1024\n",
+ "90/90 [==============================] - 0s 872us/step - loss: 0.0037 - val_loss: 0.2894 - lr: 0.0025\n",
+ "Epoch 37/1024\n",
+ "90/90 [==============================] - 0s 855us/step - loss: 0.0036 - val_loss: 0.2274 - lr: 0.0025\n",
+ "Epoch 38/1024\n",
+ "90/90 [==============================] - 0s 846us/step - loss: 0.0035 - val_loss: 0.1992 - lr: 0.0025\n",
+ "Epoch 39/1024\n",
+ "90/90 [==============================] - 0s 856us/step - loss: 0.0035 - val_loss: 0.2283 - lr: 0.0025\n",
+ "Epoch 40/1024\n",
+ "90/90 [==============================] - 0s 851us/step - loss: 0.0034 - val_loss: 0.3245 - lr: 0.0025\n",
+ "Epoch 41/1024\n",
+ "90/90 [==============================] - 0s 885us/step - loss: 0.0035 - val_loss: 0.3290 - lr: 0.0012\n",
+ "Epoch 42/1024\n",
+ "90/90 [==============================] - 0s 879us/step - loss: 0.0034 - val_loss: 0.1089 - lr: 0.0012\n",
+ "Epoch 43/1024\n",
+ "90/90 [==============================] - 0s 859us/step - loss: 0.0033 - val_loss: 0.0756 - lr: 0.0012\n",
+ "Epoch 44/1024\n",
+ "90/90 [==============================] - 0s 842us/step - loss: 0.0034 - val_loss: 0.0401 - lr: 0.0012\n",
+ "Epoch 45/1024\n",
+ "90/90 [==============================] - 0s 849us/step - loss: 0.0033 - val_loss: 0.3272 - lr: 0.0012\n",
+ "Epoch 46/1024\n",
+ "90/90 [==============================] - 0s 841us/step - loss: 0.0033 - val_loss: 0.3168 - lr: 0.0012\n",
+ "Epoch 47/1024\n",
+ "90/90 [==============================] - 0s 878us/step - loss: 0.0033 - val_loss: 0.3233 - lr: 0.0012\n",
+ "Epoch 48/1024\n",
+ "90/90 [==============================] - 0s 868us/step - loss: 0.0031 - val_loss: 0.1908 - lr: 0.0012\n",
+ "Epoch 49/1024\n",
+ "90/90 [==============================] - 0s 845us/step - loss: 0.0034 - val_loss: 0.2271 - lr: 0.0012\n",
+ "Epoch 50/1024\n",
+ "90/90 [==============================] - 0s 851us/step - loss: 0.0032 - val_loss: 0.0582 - lr: 0.0012\n",
+ "Epoch 51/1024\n",
+ "90/90 [==============================] - 0s 844us/step - loss: 0.0033 - val_loss: 0.2781 - lr: 0.0012\n",
+ "Epoch 52/1024\n",
+ "90/90 [==============================] - 0s 845us/step - loss: 0.0031 - val_loss: 0.1596 - lr: 0.0012\n",
+ "Epoch 53/1024\n",
+ "90/90 [==============================] - 0s 864us/step - loss: 0.0032 - val_loss: 0.0435 - lr: 0.0012\n",
+ "Epoch 54/1024\n",
+ "90/90 [==============================] - 0s 848us/step - loss: 0.0033 - val_loss: 0.0385 - lr: 0.0012\n",
+ "Epoch 55/1024\n",
+ "90/90 [==============================] - 0s 881us/step - loss: 0.0032 - val_loss: 0.0780 - lr: 0.0012\n",
+ "Epoch 56/1024\n",
+ "90/90 [==============================] - 0s 844us/step - loss: 0.0033 - val_loss: 0.3291 - lr: 0.0012\n",
+ "Epoch 57/1024\n",
+ "90/90 [==============================] - 0s 861us/step - loss: 0.0032 - val_loss: 0.1991 - lr: 0.0012\n",
+ "Epoch 58/1024\n",
+ "90/90 [==============================] - 0s 845us/step - loss: 0.0033 - val_loss: 0.2066 - lr: 0.0012\n",
+ "Epoch 59/1024\n",
+ "90/90 [==============================] - 0s 851us/step - loss: 0.0031 - val_loss: 0.2353 - lr: 0.0012\n",
+ "Epoch 60/1024\n",
+ "90/90 [==============================] - 0s 861us/step - loss: 0.0033 - val_loss: 0.1137 - lr: 0.0012\n",
+ "Epoch 61/1024\n",
+ "90/90 [==============================] - 0s 861us/step - loss: 0.0032 - val_loss: 0.3180 - lr: 0.0012\n",
+ "Epoch 62/1024\n",
+ "90/90 [==============================] - 0s 849us/step - loss: 0.0033 - val_loss: 0.2062 - lr: 0.0012\n",
+ "Epoch 63/1024\n",
+ "90/90 [==============================] - 0s 859us/step - loss: 0.0031 - val_loss: 0.2061 - lr: 0.0012\n",
+ "Epoch 64/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0032 - val_loss: 0.3123 - lr: 0.0012\n",
+ "Epoch 65/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.3288 - lr: 0.0012\n",
+ "Epoch 66/1024\n",
+ "90/90 [==============================] - 0s 955us/step - loss: 0.0032 - val_loss: 0.3214 - lr: 6.2500e-04\n",
+ "Epoch 67/1024\n",
+ "90/90 [==============================] - 0s 859us/step - loss: 0.0031 - val_loss: 0.1355 - lr: 6.2500e-04\n",
+ "Epoch 68/1024\n",
+ "90/90 [==============================] - 0s 862us/step - loss: 0.0030 - val_loss: 0.0484 - lr: 6.2500e-04\n",
+ "Epoch 69/1024\n",
+ "90/90 [==============================] - 0s 838us/step - loss: 0.0031 - val_loss: 0.2270 - lr: 6.2500e-04\n",
+ "Epoch 70/1024\n",
+ "90/90 [==============================] - 0s 841us/step - loss: 0.0030 - val_loss: 0.0456 - lr: 6.2500e-04\n",
+ "Epoch 71/1024\n",
+ "90/90 [==============================] - 0s 847us/step - loss: 0.0031 - val_loss: 0.1166 - lr: 6.2500e-04\n",
+ "Epoch 72/1024\n",
+ "90/90 [==============================] - 0s 845us/step - loss: 0.0031 - val_loss: 0.0350 - lr: 6.2500e-04\n",
+ "Epoch 73/1024\n",
+ "90/90 [==============================] - 0s 848us/step - loss: 0.0030 - val_loss: 0.2658 - lr: 6.2500e-04\n",
+ "Epoch 74/1024\n",
+ "90/90 [==============================] - 0s 903us/step - loss: 0.0030 - val_loss: 0.0387 - lr: 6.2500e-04\n",
+ "Epoch 75/1024\n",
+ "90/90 [==============================] - 0s 966us/step - loss: 0.0031 - val_loss: 0.2661 - lr: 6.2500e-04\n",
+ "Epoch 76/1024\n",
+ "90/90 [==============================] - 0s 837us/step - loss: 0.0031 - val_loss: 0.0306 - lr: 6.2500e-04\n",
+ "Epoch 77/1024\n",
+ "90/90 [==============================] - 0s 856us/step - loss: 0.0031 - val_loss: 0.0508 - lr: 6.2500e-04\n",
+ "Epoch 78/1024\n",
+ "90/90 [==============================] - 0s 870us/step - loss: 0.0030 - val_loss: 0.0662 - lr: 6.2500e-04\n",
+ "Epoch 79/1024\n",
+ "90/90 [==============================] - 0s 845us/step - loss: 0.0032 - val_loss: 0.1038 - lr: 6.2500e-04\n",
+ "Epoch 80/1024\n",
+ "90/90 [==============================] - 0s 865us/step - loss: 0.0031 - val_loss: 0.0455 - lr: 6.2500e-04\n",
+ "Epoch 81/1024\n",
+ "90/90 [==============================] - 0s 852us/step - loss: 0.0032 - val_loss: 0.2104 - lr: 6.2500e-04\n",
+ "Epoch 82/1024\n",
+ "90/90 [==============================] - 0s 856us/step - loss: 0.0030 - val_loss: 0.0288 - lr: 6.2500e-04\n",
+ "Epoch 83/1024\n",
+ "90/90 [==============================] - 0s 849us/step - loss: 0.0030 - val_loss: 0.1063 - lr: 6.2500e-04\n",
+ "Epoch 84/1024\n",
+ "90/90 [==============================] - 0s 835us/step - loss: 0.0031 - val_loss: 0.3056 - lr: 6.2500e-04\n",
+ "Epoch 85/1024\n",
+ "90/90 [==============================] - 0s 838us/step - loss: 0.0030 - val_loss: 0.3213 - lr: 6.2500e-04\n",
+ "Epoch 86/1024\n",
+ "90/90 [==============================] - 0s 911us/step - loss: 0.0032 - val_loss: 0.0759 - lr: 6.2500e-04\n",
+ "Epoch 87/1024\n",
+ "90/90 [==============================] - 0s 863us/step - loss: 0.0029 - val_loss: 0.2254 - lr: 6.2500e-04\n",
+ "Epoch 88/1024\n",
+ "90/90 [==============================] - 0s 851us/step - loss: 0.0031 - val_loss: 0.0319 - lr: 6.2500e-04\n",
+ "Epoch 89/1024\n",
+ "90/90 [==============================] - 0s 843us/step - loss: 0.0031 - val_loss: 0.1777 - lr: 6.2500e-04\n",
+ "Epoch 90/1024\n",
+ "90/90 [==============================] - 0s 856us/step - loss: 0.0031 - val_loss: 0.1783 - lr: 6.2500e-04\n",
+ "Epoch 91/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0029 - val_loss: 0.0056 - lr: 3.1250e-04\n",
+ "Epoch 92/1024\n",
+ "90/90 [==============================] - 0s 869us/step - loss: 0.0030 - val_loss: 0.0405 - lr: 3.1250e-04\n",
+ "Epoch 93/1024\n",
+ "90/90 [==============================] - 0s 847us/step - loss: 0.0030 - val_loss: 0.1284 - lr: 3.1250e-04\n",
+ "Epoch 94/1024\n",
+ "90/90 [==============================] - 0s 844us/step - loss: 0.0030 - val_loss: 0.0306 - lr: 3.1250e-04\n",
+ "Epoch 95/1024\n",
+ "90/90 [==============================] - 0s 841us/step - loss: 0.0030 - val_loss: 0.0071 - lr: 3.1250e-04\n",
+ "Epoch 96/1024\n",
+ "90/90 [==============================] - 0s 843us/step - loss: 0.0030 - val_loss: 0.1139 - lr: 3.1250e-04\n",
+ "Epoch 97/1024\n",
+ "90/90 [==============================] - 0s 835us/step - loss: 0.0031 - val_loss: 0.0524 - lr: 3.1250e-04\n",
+ "Epoch 98/1024\n",
+ "90/90 [==============================] - 0s 848us/step - loss: 0.0031 - val_loss: 0.0218 - lr: 3.1250e-04\n",
+ "Epoch 99/1024\n",
+ "90/90 [==============================] - 0s 849us/step - loss: 0.0030 - val_loss: 0.2167 - lr: 3.1250e-04\n",
+ "Epoch 100/1024\n",
+ "90/90 [==============================] - 0s 879us/step - loss: 0.0029 - val_loss: 0.0393 - lr: 3.1250e-04\n",
+ "Epoch 101/1024\n",
+ "90/90 [==============================] - 0s 855us/step - loss: 0.0030 - val_loss: 0.0430 - lr: 3.1250e-04\n",
+ "Epoch 102/1024\n",
+ "90/90 [==============================] - 0s 844us/step - loss: 0.0031 - val_loss: 0.0761 - lr: 3.1250e-04\n",
+ "Epoch 103/1024\n",
+ "90/90 [==============================] - 0s 857us/step - loss: 0.0030 - val_loss: 0.0112 - lr: 3.1250e-04\n",
+ "Epoch 104/1024\n",
+ "90/90 [==============================] - 0s 857us/step - loss: 0.0029 - val_loss: 0.1880 - lr: 3.1250e-04\n",
+ "Epoch 105/1024\n",
+ "90/90 [==============================] - 0s 846us/step - loss: 0.0030 - val_loss: 0.0084 - lr: 3.1250e-04\n",
+ "Epoch 106/1024\n",
+ "90/90 [==============================] - 0s 847us/step - loss: 0.0030 - val_loss: 0.0204 - lr: 3.1250e-04\n",
+ "Epoch 107/1024\n",
+ "90/90 [==============================] - 0s 855us/step - loss: 0.0030 - val_loss: 0.0173 - lr: 3.1250e-04\n",
+ "Epoch 108/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.2058 - lr: 3.1250e-04\n",
+ "Epoch 109/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0029 - val_loss: 0.0875 - lr: 3.1250e-04\n",
+ "Epoch 110/1024\n",
+ "90/90 [==============================] - 0s 964us/step - loss: 0.0029 - val_loss: 0.0100 - lr: 3.1250e-04\n",
+ "Epoch 111/1024\n",
+ "90/90 [==============================] - 0s 869us/step - loss: 0.0029 - val_loss: 0.1672 - lr: 3.1250e-04\n",
+ "Epoch 112/1024\n",
+ "90/90 [==============================] - 0s 854us/step - loss: 0.0030 - val_loss: 0.1869 - lr: 3.1250e-04\n",
+ "Epoch 113/1024\n",
+ "90/90 [==============================] - 0s 837us/step - loss: 0.0029 - val_loss: 0.0400 - lr: 3.1250e-04\n",
+ "Epoch 114/1024\n",
+ "90/90 [==============================] - 0s 835us/step - loss: 0.0029 - val_loss: 0.2014 - lr: 3.1250e-04\n",
+ "Epoch 115/1024\n",
+ "90/90 [==============================] - 0s 841us/step - loss: 0.0030 - val_loss: 0.1869 - lr: 3.1250e-04\n",
+ "Epoch 116/1024\n",
+ "90/90 [==============================] - 0s 864us/step - loss: 0.0030 - val_loss: 0.0204 - lr: 3.1250e-04\n",
+ "Epoch 117/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0029 - val_loss: 0.0048 - lr: 1.5625e-04\n",
+ "Epoch 118/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0030 - val_loss: 0.0038 - lr: 1.5625e-04\n",
+ "Epoch 119/1024\n",
+ "90/90 [==============================] - 0s 848us/step - loss: 0.0029 - val_loss: 0.0644 - lr: 1.5625e-04\n",
+ "Epoch 120/1024\n",
+ "90/90 [==============================] - 0s 886us/step - loss: 0.0028 - val_loss: 0.0170 - lr: 1.5625e-04\n",
+ "Epoch 121/1024\n",
+ "90/90 [==============================] - 0s 890us/step - loss: 0.0029 - val_loss: 0.0230 - lr: 1.5625e-04\n",
+ "Epoch 122/1024\n",
+ "90/90 [==============================] - 0s 853us/step - loss: 0.0028 - val_loss: 0.0171 - lr: 1.5625e-04\n",
+ "Epoch 123/1024\n",
+ "90/90 [==============================] - 0s 850us/step - loss: 0.0029 - val_loss: 0.0114 - lr: 1.5625e-04\n",
+ "Epoch 124/1024\n",
+ "90/90 [==============================] - 0s 886us/step - loss: 0.0030 - val_loss: 0.0643 - lr: 1.5625e-04\n",
+ "Epoch 125/1024\n",
+ "90/90 [==============================] - 0s 861us/step - loss: 0.0028 - val_loss: 0.0198 - lr: 1.5625e-04\n",
+ "Epoch 126/1024\n",
+ "90/90 [==============================] - 0s 858us/step - loss: 0.0028 - val_loss: 0.0527 - lr: 1.5625e-04\n",
+ "Epoch 127/1024\n",
+ "90/90 [==============================] - 0s 848us/step - loss: 0.0031 - val_loss: 0.1066 - lr: 1.5625e-04\n",
+ "Epoch 128/1024\n",
+ "90/90 [==============================] - 0s 845us/step - loss: 0.0029 - val_loss: 0.0126 - lr: 1.5625e-04\n",
+ "Epoch 129/1024\n",
+ "90/90 [==============================] - 0s 880us/step - loss: 0.0029 - val_loss: 0.0094 - lr: 1.5625e-04\n",
+ "Epoch 130/1024\n",
+ "90/90 [==============================] - 0s 862us/step - loss: 0.0028 - val_loss: 0.0237 - lr: 1.5625e-04\n",
+ "Epoch 131/1024\n",
+ "90/90 [==============================] - 0s 845us/step - loss: 0.0030 - val_loss: 0.0102 - lr: 1.5625e-04\n",
+ "Epoch 132/1024\n",
+ "90/90 [==============================] - 0s 844us/step - loss: 0.0029 - val_loss: 0.0119 - lr: 1.5625e-04\n",
+ "Epoch 133/1024\n",
+ "90/90 [==============================] - 0s 835us/step - loss: 0.0029 - val_loss: 0.0440 - lr: 1.5625e-04\n",
+ "Epoch 134/1024\n",
+ "90/90 [==============================] - 0s 848us/step - loss: 0.0030 - val_loss: 0.1435 - lr: 1.5625e-04\n",
+ "Epoch 135/1024\n",
+ "90/90 [==============================] - 0s 862us/step - loss: 0.0029 - val_loss: 0.0172 - lr: 1.5625e-04\n",
+ "Epoch 136/1024\n",
+ "90/90 [==============================] - 0s 879us/step - loss: 0.0030 - val_loss: 0.1439 - lr: 1.5625e-04\n",
+ "Epoch 137/1024\n",
+ "90/90 [==============================] - 0s 859us/step - loss: 0.0029 - val_loss: 0.0572 - lr: 1.5625e-04\n",
+ "Epoch 138/1024\n",
+ "90/90 [==============================] - 0s 848us/step - loss: 0.0029 - val_loss: 0.0409 - lr: 1.5625e-04\n",
+ "Epoch 139/1024\n",
+ "90/90 [==============================] - 0s 850us/step - loss: 0.0029 - val_loss: 0.0104 - lr: 1.5625e-04\n",
+ "Epoch 140/1024\n",
+ "90/90 [==============================] - 0s 859us/step - loss: 0.0029 - val_loss: 0.0067 - lr: 1.5625e-04\n",
+ "Epoch 141/1024\n",
+ "90/90 [==============================] - 0s 853us/step - loss: 0.0029 - val_loss: 0.0443 - lr: 1.5625e-04\n",
+ "Epoch 142/1024\n",
+ "90/90 [==============================] - 0s 859us/step - loss: 0.0029 - val_loss: 0.1467 - lr: 1.5625e-04\n",
+ "Epoch 143/1024\n",
+ "90/90 [==============================] - 0s 873us/step - loss: 0.0030 - val_loss: 0.2135 - lr: 1.5625e-04\n",
+ "Epoch 144/1024\n",
+ "90/90 [==============================] - 0s 863us/step - loss: 0.0028 - val_loss: 0.0078 - lr: 7.8125e-05\n",
+ "Epoch 145/1024\n",
+ "90/90 [==============================] - 0s 861us/step - loss: 0.0030 - val_loss: 0.0148 - lr: 7.8125e-05\n",
+ "Epoch 146/1024\n",
+ "90/90 [==============================] - 0s 846us/step - loss: 0.0030 - val_loss: 0.0695 - lr: 7.8125e-05\n",
+ "Epoch 147/1024\n",
+ "90/90 [==============================] - 0s 850us/step - loss: 0.0029 - val_loss: 0.0059 - lr: 7.8125e-05\n",
+ "Epoch 148/1024\n",
+ "90/90 [==============================] - 0s 848us/step - loss: 0.0028 - val_loss: 0.0314 - lr: 7.8125e-05\n",
+ "Epoch 149/1024\n",
+ "90/90 [==============================] - 0s 863us/step - loss: 0.0029 - val_loss: 0.0089 - lr: 7.8125e-05\n",
+ "Epoch 150/1024\n",
+ "90/90 [==============================] - 0s 992us/step - loss: 0.0029 - val_loss: 0.0090 - lr: 7.8125e-05\n",
+ "Epoch 151/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0029 - val_loss: 0.0410 - lr: 7.8125e-05\n",
+ "Epoch 152/1024\n",
+ "90/90 [==============================] - 0s 978us/step - loss: 0.0029 - val_loss: 0.0308 - lr: 7.8125e-05\n",
+ "Epoch 153/1024\n",
+ "90/90 [==============================] - 0s 850us/step - loss: 0.0029 - val_loss: 0.0352 - lr: 7.8125e-05\n",
+ "Epoch 154/1024\n",
+ "90/90 [==============================] - 0s 852us/step - loss: 0.0029 - val_loss: 0.0195 - lr: 7.8125e-05\n",
+ "Epoch 155/1024\n",
+ "90/90 [==============================] - 0s 866us/step - loss: 0.0029 - val_loss: 0.0083 - lr: 7.8125e-05\n",
+ "Epoch 156/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0029 - val_loss: 0.0035 - lr: 7.8125e-05\n",
+ "Epoch 157/1024\n",
+ "90/90 [==============================] - 0s 845us/step - loss: 0.0030 - val_loss: 0.0118 - lr: 7.8125e-05\n",
+ "Epoch 158/1024\n",
+ "90/90 [==============================] - 0s 853us/step - loss: 0.0029 - val_loss: 0.0365 - lr: 7.8125e-05\n",
+ "Epoch 159/1024\n",
+ "90/90 [==============================] - 0s 860us/step - loss: 0.0029 - val_loss: 0.0038 - lr: 7.8125e-05\n",
+ "Epoch 160/1024\n",
+ "90/90 [==============================] - 0s 863us/step - loss: 0.0029 - val_loss: 0.0081 - lr: 7.8125e-05\n",
+ "Epoch 161/1024\n",
+ "90/90 [==============================] - 0s 860us/step - loss: 0.0029 - val_loss: 0.0292 - lr: 7.8125e-05\n",
+ "Epoch 162/1024\n",
+ "90/90 [==============================] - 0s 861us/step - loss: 0.0029 - val_loss: 0.0325 - lr: 7.8125e-05\n",
+ "Epoch 163/1024\n",
+ "90/90 [==============================] - 0s 858us/step - loss: 0.0029 - val_loss: 0.0128 - lr: 7.8125e-05\n",
+ "Epoch 164/1024\n",
+ "90/90 [==============================] - 0s 859us/step - loss: 0.0029 - val_loss: 0.0098 - lr: 7.8125e-05\n",
+ "Epoch 165/1024\n",
+ "90/90 [==============================] - 0s 853us/step - loss: 0.0029 - val_loss: 0.0058 - lr: 7.8125e-05\n",
+ "Epoch 166/1024\n",
+ "90/90 [==============================] - 0s 868us/step - loss: 0.0028 - val_loss: 0.0283 - lr: 7.8125e-05\n",
+ "Epoch 167/1024\n",
+ "90/90 [==============================] - 0s 854us/step - loss: 0.0029 - val_loss: 0.0385 - lr: 7.8125e-05\n",
+ "Epoch 168/1024\n",
+ "90/90 [==============================] - 0s 851us/step - loss: 0.0030 - val_loss: 0.0039 - lr: 7.8125e-05\n",
+ "Epoch 169/1024\n",
+ "90/90 [==============================] - 0s 855us/step - loss: 0.0028 - val_loss: 0.0042 - lr: 7.8125e-05\n",
+ "Epoch 170/1024\n",
+ "90/90 [==============================] - 0s 852us/step - loss: 0.0029 - val_loss: 0.0095 - lr: 7.8125e-05\n",
+ "Epoch 171/1024\n",
+ "90/90 [==============================] - 0s 867us/step - loss: 0.0029 - val_loss: 0.0312 - lr: 7.8125e-05\n",
+ "Epoch 172/1024\n",
+ "90/90 [==============================] - 0s 871us/step - loss: 0.0030 - val_loss: 0.0207 - lr: 7.8125e-05\n",
+ "Epoch 173/1024\n",
+ "90/90 [==============================] - 0s 851us/step - loss: 0.0028 - val_loss: 0.0326 - lr: 7.8125e-05\n",
+ "Epoch 174/1024\n",
+ "90/90 [==============================] - 0s 881us/step - loss: 0.0028 - val_loss: 0.0063 - lr: 7.8125e-05\n",
+ "Epoch 175/1024\n",
+ "90/90 [==============================] - 0s 861us/step - loss: 0.0028 - val_loss: 0.0125 - lr: 7.8125e-05\n",
+ "Epoch 176/1024\n",
+ "90/90 [==============================] - 0s 854us/step - loss: 0.0029 - val_loss: 0.0036 - lr: 7.8125e-05\n",
+ "Epoch 177/1024\n",
+ "90/90 [==============================] - 0s 847us/step - loss: 0.0028 - val_loss: 0.0107 - lr: 7.8125e-05\n",
+ "Epoch 178/1024\n",
+ "90/90 [==============================] - 0s 852us/step - loss: 0.0029 - val_loss: 0.0442 - lr: 7.8125e-05\n",
+ "Epoch 179/1024\n",
+ "90/90 [==============================] - 0s 858us/step - loss: 0.0030 - val_loss: 0.0142 - lr: 7.8125e-05\n",
+ "Epoch 180/1024\n",
+ "90/90 [==============================] - 0s 877us/step - loss: 0.0029 - val_loss: 0.0171 - lr: 7.8125e-05\n",
+ "Epoch 181/1024\n",
+ "90/90 [==============================] - 0s 858us/step - loss: 0.0028 - val_loss: 0.0116 - lr: 7.8125e-05\n",
+ "Epoch 182/1024\n",
+ "90/90 [==============================] - 0s 852us/step - loss: 0.0029 - val_loss: 0.0040 - lr: 3.9062e-05\n",
+ "Epoch 183/1024\n",
+ "90/90 [==============================] - 0s 852us/step - loss: 0.0030 - val_loss: 0.0042 - lr: 3.9062e-05\n",
+ "Epoch 184/1024\n",
+ "90/90 [==============================] - 0s 841us/step - loss: 0.0028 - val_loss: 0.0112 - lr: 3.9062e-05\n",
+ "Epoch 185/1024\n",
+ "90/90 [==============================] - 0s 857us/step - loss: 0.0028 - val_loss: 0.0047 - lr: 3.9062e-05\n",
+ "Epoch 186/1024\n",
+ "90/90 [==============================] - 0s 861us/step - loss: 0.0028 - val_loss: 0.0156 - lr: 3.9062e-05\n",
+ "Epoch 187/1024\n",
+ "90/90 [==============================] - 0s 869us/step - loss: 0.0030 - val_loss: 0.0150 - lr: 3.9062e-05\n",
+ "Epoch 188/1024\n",
+ "90/90 [==============================] - 0s 846us/step - loss: 0.0028 - val_loss: 0.0223 - lr: 3.9062e-05\n",
+ "Epoch 189/1024\n",
+ "90/90 [==============================] - 0s 858us/step - loss: 0.0029 - val_loss: 0.0086 - lr: 3.9062e-05\n",
+ "Epoch 190/1024\n",
+ "90/90 [==============================] - 0s 846us/step - loss: 0.0028 - val_loss: 0.0118 - lr: 3.9062e-05\n",
+ "Epoch 191/1024\n",
+ "90/90 [==============================] - 0s 854us/step - loss: 0.0030 - val_loss: 0.0062 - lr: 3.9062e-05\n",
+ "Epoch 192/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0065 - lr: 3.9062e-05\n",
+ "Epoch 193/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0029 - val_loss: 0.0084 - lr: 3.9062e-05\n",
+ "Epoch 194/1024\n",
+ "90/90 [==============================] - 0s 987us/step - loss: 0.0030 - val_loss: 0.0120 - lr: 3.9062e-05\n",
+ "Epoch 195/1024\n",
+ "90/90 [==============================] - 0s 832us/step - loss: 0.0030 - val_loss: 0.0056 - lr: 3.9062e-05\n",
+ "Epoch 196/1024\n",
+ "90/90 [==============================] - 0s 862us/step - loss: 0.0029 - val_loss: 0.0063 - lr: 3.9062e-05\n",
+ "Epoch 197/1024\n",
+ "90/90 [==============================] - 0s 836us/step - loss: 0.0030 - val_loss: 0.0045 - lr: 3.9062e-05\n",
+ "Epoch 198/1024\n",
+ "90/90 [==============================] - 0s 841us/step - loss: 0.0028 - val_loss: 0.0089 - lr: 3.9062e-05\n",
+ "Epoch 199/1024\n",
+ "90/90 [==============================] - 0s 876us/step - loss: 0.0028 - val_loss: 0.0049 - lr: 3.9062e-05\n",
+ "Epoch 200/1024\n",
+ "90/90 [==============================] - 0s 852us/step - loss: 0.0028 - val_loss: 0.0059 - lr: 3.9062e-05\n",
+ "Epoch 201/1024\n",
+ "90/90 [==============================] - 0s 847us/step - loss: 0.0028 - val_loss: 0.0151 - lr: 3.9062e-05\n",
+ "Epoch 202/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0029 - val_loss: 0.0030 - lr: 3.9062e-05\n",
+ "Epoch 203/1024\n",
+ "90/90 [==============================] - 0s 848us/step - loss: 0.0029 - val_loss: 0.0047 - lr: 3.9062e-05\n",
+ "Epoch 204/1024\n",
+ "90/90 [==============================] - 0s 869us/step - loss: 0.0030 - val_loss: 0.0040 - lr: 3.9062e-05\n",
+ "Epoch 205/1024\n",
+ "90/90 [==============================] - 0s 860us/step - loss: 0.0028 - val_loss: 0.0070 - lr: 3.9062e-05\n",
+ "Epoch 206/1024\n",
+ "90/90 [==============================] - 0s 840us/step - loss: 0.0029 - val_loss: 0.0060 - lr: 3.9062e-05\n",
+ "Epoch 207/1024\n",
+ "90/90 [==============================] - 0s 851us/step - loss: 0.0029 - val_loss: 0.0052 - lr: 3.9062e-05\n",
+ "Epoch 208/1024\n",
+ "90/90 [==============================] - 0s 869us/step - loss: 0.0029 - val_loss: 0.0135 - lr: 3.9062e-05\n",
+ "Epoch 209/1024\n",
+ "90/90 [==============================] - 0s 840us/step - loss: 0.0029 - val_loss: 0.0046 - lr: 3.9062e-05\n",
+ "Epoch 210/1024\n",
+ "90/90 [==============================] - 0s 867us/step - loss: 0.0028 - val_loss: 0.0057 - lr: 3.9062e-05\n",
+ "Epoch 211/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0028 - lr: 3.9062e-05\n",
+ "Epoch 212/1024\n",
+ "90/90 [==============================] - 0s 836us/step - loss: 0.0029 - val_loss: 0.0089 - lr: 3.9062e-05\n",
+ "Epoch 213/1024\n",
+ "90/90 [==============================] - 0s 883us/step - loss: 0.0028 - val_loss: 0.0048 - lr: 3.9062e-05\n",
+ "Epoch 214/1024\n",
+ "90/90 [==============================] - 0s 868us/step - loss: 0.0029 - val_loss: 0.0050 - lr: 3.9062e-05\n",
+ "Epoch 215/1024\n",
+ "90/90 [==============================] - 0s 862us/step - loss: 0.0030 - val_loss: 0.0058 - lr: 3.9062e-05\n",
+ "Epoch 216/1024\n",
+ "90/90 [==============================] - 0s 859us/step - loss: 0.0029 - val_loss: 0.0037 - lr: 3.9062e-05\n",
+ "Epoch 217/1024\n",
+ "90/90 [==============================] - 0s 865us/step - loss: 0.0029 - val_loss: 0.0059 - lr: 3.9062e-05\n",
+ "Epoch 218/1024\n",
+ "90/90 [==============================] - 0s 873us/step - loss: 0.0029 - val_loss: 0.0202 - lr: 3.9062e-05\n",
+ "Epoch 219/1024\n",
+ "90/90 [==============================] - 0s 856us/step - loss: 0.0029 - val_loss: 0.0313 - lr: 3.9062e-05\n",
+ "Epoch 220/1024\n",
+ "90/90 [==============================] - 0s 869us/step - loss: 0.0030 - val_loss: 0.0050 - lr: 3.9062e-05\n",
+ "Epoch 221/1024\n",
+ "90/90 [==============================] - 0s 858us/step - loss: 0.0029 - val_loss: 0.0115 - lr: 3.9062e-05\n",
+ "Epoch 222/1024\n",
+ "90/90 [==============================] - 0s 845us/step - loss: 0.0029 - val_loss: 0.0046 - lr: 3.9062e-05\n",
+ "Epoch 223/1024\n",
+ "90/90 [==============================] - 0s 860us/step - loss: 0.0029 - val_loss: 0.0065 - lr: 3.9062e-05\n",
+ "Epoch 224/1024\n",
+ "90/90 [==============================] - 0s 898us/step - loss: 0.0029 - val_loss: 0.0044 - lr: 3.9062e-05\n",
+ "Epoch 225/1024\n",
+ "90/90 [==============================] - 0s 980us/step - loss: 0.0029 - val_loss: 0.0489 - lr: 3.9062e-05\n",
+ "Epoch 226/1024\n",
+ "90/90 [==============================] - 0s 841us/step - loss: 0.0027 - val_loss: 0.0035 - lr: 3.9062e-05\n",
+ "Epoch 227/1024\n",
+ "90/90 [==============================] - 0s 859us/step - loss: 0.0029 - val_loss: 0.0069 - lr: 3.9062e-05\n",
+ "Epoch 228/1024\n",
+ "90/90 [==============================] - 0s 860us/step - loss: 0.0030 - val_loss: 0.0081 - lr: 3.9062e-05\n",
+ "Epoch 229/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0029 - val_loss: 0.0028 - lr: 3.9062e-05\n",
+ "Epoch 230/1024\n",
+ "90/90 [==============================] - 0s 893us/step - loss: 0.0029 - val_loss: 0.0077 - lr: 3.9062e-05\n",
+ "Epoch 231/1024\n",
+ "90/90 [==============================] - 0s 937us/step - loss: 0.0028 - val_loss: 0.0028 - lr: 3.9062e-05\n",
+ "Epoch 232/1024\n",
+ "90/90 [==============================] - 0s 922us/step - loss: 0.0028 - val_loss: 0.0095 - lr: 3.9062e-05\n",
+ "Epoch 233/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0103 - lr: 3.9062e-05\n",
+ "Epoch 234/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0034 - lr: 3.9062e-05\n",
+ "Epoch 235/1024\n",
+ "90/90 [==============================] - 0s 939us/step - loss: 0.0029 - val_loss: 0.0053 - lr: 3.9062e-05\n",
+ "Epoch 236/1024\n",
+ "90/90 [==============================] - 0s 869us/step - loss: 0.0030 - val_loss: 0.0069 - lr: 3.9062e-05\n",
+ "Epoch 237/1024\n",
+ "90/90 [==============================] - 0s 858us/step - loss: 0.0029 - val_loss: 0.0173 - lr: 3.9062e-05\n",
+ "Epoch 238/1024\n",
+ "90/90 [==============================] - 0s 841us/step - loss: 0.0028 - val_loss: 0.0057 - lr: 3.9062e-05\n",
+ "Epoch 239/1024\n",
+ "90/90 [==============================] - 0s 846us/step - loss: 0.0029 - val_loss: 0.0139 - lr: 3.9062e-05\n",
+ "Epoch 240/1024\n",
+ "90/90 [==============================] - 0s 849us/step - loss: 0.0029 - val_loss: 0.0090 - lr: 3.9062e-05\n",
+ "Epoch 241/1024\n",
+ "90/90 [==============================] - 0s 871us/step - loss: 0.0029 - val_loss: 0.0045 - lr: 3.9062e-05\n",
+ "Epoch 242/1024\n",
+ "90/90 [==============================] - 0s 857us/step - loss: 0.0028 - val_loss: 0.0173 - lr: 3.9062e-05\n",
+ "Epoch 243/1024\n",
+ "90/90 [==============================] - 0s 841us/step - loss: 0.0029 - val_loss: 0.0029 - lr: 3.9062e-05\n",
+ "Epoch 244/1024\n",
+ "90/90 [==============================] - 0s 902us/step - loss: 0.0029 - val_loss: 0.0130 - lr: 3.9062e-05\n",
+ "Epoch 245/1024\n",
+ "90/90 [==============================] - 0s 848us/step - loss: 0.0029 - val_loss: 0.0190 - lr: 3.9062e-05\n",
+ "Epoch 246/1024\n",
+ "90/90 [==============================] - 0s 856us/step - loss: 0.0029 - val_loss: 0.0061 - lr: 3.9062e-05\n",
+ "Epoch 247/1024\n",
+ "90/90 [==============================] - 0s 843us/step - loss: 0.0029 - val_loss: 0.0034 - lr: 3.9062e-05\n",
+ "Epoch 248/1024\n",
+ "90/90 [==============================] - 0s 849us/step - loss: 0.0028 - val_loss: 0.0163 - lr: 3.9062e-05\n",
+ "Epoch 249/1024\n",
+ "90/90 [==============================] - 0s 866us/step - loss: 0.0027 - val_loss: 0.0129 - lr: 3.9062e-05\n",
+ "Epoch 250/1024\n",
+ "90/90 [==============================] - 0s 853us/step - loss: 0.0031 - val_loss: 0.0044 - lr: 3.9062e-05\n",
+ "Epoch 251/1024\n",
+ "90/90 [==============================] - 0s 870us/step - loss: 0.0029 - val_loss: 0.0320 - lr: 3.9062e-05\n",
+ "Epoch 252/1024\n",
+ "90/90 [==============================] - 0s 898us/step - loss: 0.0029 - val_loss: 0.0176 - lr: 3.9062e-05\n",
+ "Epoch 253/1024\n",
+ "90/90 [==============================] - 0s 917us/step - loss: 0.0028 - val_loss: 0.0254 - lr: 3.9062e-05\n",
+ "Epoch 254/1024\n",
+ "90/90 [==============================] - 0s 855us/step - loss: 0.0027 - val_loss: 0.0031 - lr: 3.9062e-05\n",
+ "Epoch 255/1024\n",
+ "90/90 [==============================] - 0s 874us/step - loss: 0.0029 - val_loss: 0.0085 - lr: 1.9531e-05\n",
+ "Epoch 256/1024\n",
+ "90/90 [==============================] - 0s 848us/step - loss: 0.0028 - val_loss: 0.0069 - lr: 1.9531e-05\n",
+ "Epoch 257/1024\n",
+ "90/90 [==============================] - 0s 852us/step - loss: 0.0029 - val_loss: 0.0029 - lr: 1.9531e-05\n",
+ "Epoch 258/1024\n",
+ "90/90 [==============================] - 0s 861us/step - loss: 0.0028 - val_loss: 0.0030 - lr: 1.9531e-05\n",
+ "Epoch 259/1024\n",
+ "90/90 [==============================] - 0s 879us/step - loss: 0.0029 - val_loss: 0.0029 - lr: 1.9531e-05\n",
+ "Epoch 260/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0027 - lr: 1.9531e-05\n",
+ "Epoch 261/1024\n",
+ "90/90 [==============================] - 0s 851us/step - loss: 0.0028 - val_loss: 0.0028 - lr: 1.9531e-05\n",
+ "Epoch 262/1024\n",
+ "90/90 [==============================] - 0s 828us/step - loss: 0.0029 - val_loss: 0.0027 - lr: 1.9531e-05\n",
+ "Epoch 263/1024\n",
+ "90/90 [==============================] - 0s 873us/step - loss: 0.0029 - val_loss: 0.0035 - lr: 1.9531e-05\n",
+ "Epoch 264/1024\n",
+ "90/90 [==============================] - 0s 850us/step - loss: 0.0029 - val_loss: 0.0036 - lr: 1.9531e-05\n",
+ "Epoch 265/1024\n",
+ "90/90 [==============================] - 0s 847us/step - loss: 0.0028 - val_loss: 0.0031 - lr: 1.9531e-05\n",
+ "Epoch 266/1024\n",
+ "90/90 [==============================] - 0s 865us/step - loss: 0.0028 - val_loss: 0.0033 - lr: 1.9531e-05\n",
+ "Epoch 267/1024\n",
+ "90/90 [==============================] - 0s 859us/step - loss: 0.0029 - val_loss: 0.0041 - lr: 1.9531e-05\n",
+ "Epoch 268/1024\n",
+ "90/90 [==============================] - 0s 873us/step - loss: 0.0029 - val_loss: 0.0028 - lr: 1.9531e-05\n",
+ "Epoch 269/1024\n",
+ "90/90 [==============================] - 0s 859us/step - loss: 0.0028 - val_loss: 0.0028 - lr: 1.9531e-05\n",
+ "Epoch 270/1024\n",
+ "90/90 [==============================] - 0s 869us/step - loss: 0.0027 - val_loss: 0.0050 - lr: 1.9531e-05\n",
+ "Epoch 271/1024\n",
+ "90/90 [==============================] - 0s 845us/step - loss: 0.0029 - val_loss: 0.0029 - lr: 1.9531e-05\n",
+ "Epoch 272/1024\n",
+ "90/90 [==============================] - 0s 978us/step - loss: 0.0028 - val_loss: 0.0031 - lr: 1.9531e-05\n",
+ "Epoch 273/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0035 - lr: 1.9531e-05\n",
+ "Epoch 274/1024\n",
+ "90/90 [==============================] - 0s 978us/step - loss: 0.0029 - val_loss: 0.0039 - lr: 1.9531e-05\n",
+ "Epoch 275/1024\n",
+ "90/90 [==============================] - 0s 838us/step - loss: 0.0029 - val_loss: 0.0039 - lr: 1.9531e-05\n",
+ "Epoch 276/1024\n",
+ "90/90 [==============================] - 0s 869us/step - loss: 0.0028 - val_loss: 0.0053 - lr: 1.9531e-05\n",
+ "Epoch 277/1024\n",
+ "90/90 [==============================] - 0s 853us/step - loss: 0.0029 - val_loss: 0.0033 - lr: 1.9531e-05\n",
+ "Epoch 278/1024\n",
+ "90/90 [==============================] - 0s 871us/step - loss: 0.0028 - val_loss: 0.0061 - lr: 1.9531e-05\n",
+ "Epoch 279/1024\n",
+ "90/90 [==============================] - 0s 848us/step - loss: 0.0029 - val_loss: 0.0072 - lr: 1.9531e-05\n",
+ "Epoch 280/1024\n",
+ "90/90 [==============================] - 0s 874us/step - loss: 0.0028 - val_loss: 0.0055 - lr: 1.9531e-05\n",
+ "Epoch 281/1024\n",
+ "90/90 [==============================] - 0s 868us/step - loss: 0.0029 - val_loss: 0.0031 - lr: 1.9531e-05\n",
+ "Epoch 282/1024\n",
+ "90/90 [==============================] - 0s 909us/step - loss: 0.0029 - val_loss: 0.0040 - lr: 1.9531e-05\n",
+ "Epoch 283/1024\n",
+ "90/90 [==============================] - 0s 850us/step - loss: 0.0028 - val_loss: 0.0028 - lr: 1.9531e-05\n",
+ "Epoch 284/1024\n",
+ "90/90 [==============================] - 0s 845us/step - loss: 0.0028 - val_loss: 0.0052 - lr: 1.9531e-05\n",
+ "Epoch 285/1024\n",
+ "90/90 [==============================] - 0s 859us/step - loss: 0.0030 - val_loss: 0.0031 - lr: 1.9531e-05\n",
+ "Epoch 286/1024\n",
+ "90/90 [==============================] - 0s 875us/step - loss: 0.0029 - val_loss: 0.0037 - lr: 9.7656e-06\n",
+ "Epoch 287/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0029 - val_loss: 0.0025 - lr: 9.7656e-06\n",
+ "Epoch 288/1024\n",
+ "90/90 [==============================] - 0s 865us/step - loss: 0.0030 - val_loss: 0.0055 - lr: 9.7656e-06\n",
+ "Epoch 289/1024\n",
+ "90/90 [==============================] - 0s 847us/step - loss: 0.0029 - val_loss: 0.0026 - lr: 9.7656e-06\n",
+ "Epoch 290/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0025 - lr: 9.7656e-06\n",
+ "Epoch 291/1024\n",
+ "90/90 [==============================] - 0s 863us/step - loss: 0.0028 - val_loss: 0.0032 - lr: 9.7656e-06\n",
+ "Epoch 292/1024\n",
+ "90/90 [==============================] - 0s 892us/step - loss: 0.0029 - val_loss: 0.0026 - lr: 9.7656e-06\n",
+ "Epoch 293/1024\n",
+ "90/90 [==============================] - 0s 845us/step - loss: 0.0028 - val_loss: 0.0028 - lr: 9.7656e-06\n",
+ "Epoch 294/1024\n",
+ "90/90 [==============================] - 0s 854us/step - loss: 0.0028 - val_loss: 0.0038 - lr: 9.7656e-06\n",
+ "Epoch 295/1024\n",
+ "90/90 [==============================] - 0s 865us/step - loss: 0.0028 - val_loss: 0.0031 - lr: 9.7656e-06\n",
+ "Epoch 296/1024\n",
+ "90/90 [==============================] - 0s 851us/step - loss: 0.0028 - val_loss: 0.0027 - lr: 9.7656e-06\n",
+ "Epoch 297/1024\n",
+ "90/90 [==============================] - 0s 849us/step - loss: 0.0027 - val_loss: 0.0033 - lr: 9.7656e-06\n",
+ "Epoch 298/1024\n",
+ "90/90 [==============================] - 0s 880us/step - loss: 0.0028 - val_loss: 0.0027 - lr: 9.7656e-06\n",
+ "Epoch 299/1024\n",
+ "90/90 [==============================] - 0s 844us/step - loss: 0.0028 - val_loss: 0.0027 - lr: 9.7656e-06\n",
+ "Epoch 300/1024\n",
+ "90/90 [==============================] - 0s 850us/step - loss: 0.0029 - val_loss: 0.0035 - lr: 9.7656e-06\n",
+ "Epoch 301/1024\n",
+ "90/90 [==============================] - 0s 860us/step - loss: 0.0028 - val_loss: 0.0030 - lr: 9.7656e-06\n",
+ "Epoch 302/1024\n",
+ "90/90 [==============================] - 0s 853us/step - loss: 0.0029 - val_loss: 0.0026 - lr: 9.7656e-06\n",
+ "Epoch 303/1024\n",
+ "90/90 [==============================] - 0s 876us/step - loss: 0.0029 - val_loss: 0.0031 - lr: 9.7656e-06\n",
+ "Epoch 304/1024\n",
+ "90/90 [==============================] - 0s 856us/step - loss: 0.0028 - val_loss: 0.0027 - lr: 9.7656e-06\n",
+ "Epoch 305/1024\n",
+ "90/90 [==============================] - 0s 887us/step - loss: 0.0028 - val_loss: 0.0026 - lr: 9.7656e-06\n",
+ "Epoch 306/1024\n",
+ "90/90 [==============================] - 0s 852us/step - loss: 0.0028 - val_loss: 0.0052 - lr: 9.7656e-06\n",
+ "Epoch 307/1024\n",
+ "90/90 [==============================] - 0s 860us/step - loss: 0.0028 - val_loss: 0.0028 - lr: 9.7656e-06\n",
+ "Epoch 308/1024\n",
+ "90/90 [==============================] - 0s 861us/step - loss: 0.0028 - val_loss: 0.0026 - lr: 9.7656e-06\n",
+ "Epoch 309/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0029 - val_loss: 0.0032 - lr: 9.7656e-06\n",
+ "Epoch 310/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0030 - val_loss: 0.0028 - lr: 9.7656e-06\n",
+ "Epoch 311/1024\n",
+ "90/90 [==============================] - 0s 907us/step - loss: 0.0028 - val_loss: 0.0033 - lr: 9.7656e-06\n",
+ "Epoch 312/1024\n",
+ "90/90 [==============================] - 0s 864us/step - loss: 0.0029 - val_loss: 0.0027 - lr: 9.7656e-06\n",
+ "Epoch 313/1024\n",
+ "90/90 [==============================] - 0s 850us/step - loss: 0.0029 - val_loss: 0.0026 - lr: 9.7656e-06\n",
+ "Epoch 314/1024\n",
+ "90/90 [==============================] - 0s 848us/step - loss: 0.0029 - val_loss: 0.0028 - lr: 9.7656e-06\n",
+ "Epoch 315/1024\n",
+ "90/90 [==============================] - 0s 864us/step - loss: 0.0029 - val_loss: 0.0028 - lr: 9.7656e-06\n",
+ "Epoch 316/1024\n",
+ "90/90 [==============================] - 0s 847us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 4.8828e-06\n",
+ "Epoch 317/1024\n",
+ "90/90 [==============================] - 0s 866us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 4.8828e-06\n",
+ "Epoch 318/1024\n",
+ "90/90 [==============================] - 0s 861us/step - loss: 0.0029 - val_loss: 0.0026 - lr: 4.8828e-06\n",
+ "Epoch 319/1024\n",
+ "90/90 [==============================] - 0s 890us/step - loss: 0.0029 - val_loss: 0.0026 - lr: 4.8828e-06\n",
+ "Epoch 320/1024\n",
+ "90/90 [==============================] - 0s 890us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 4.8828e-06\n",
+ "Epoch 321/1024\n",
+ "90/90 [==============================] - 0s 848us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 4.8828e-06\n",
+ "Epoch 322/1024\n",
+ "90/90 [==============================] - 0s 863us/step - loss: 0.0028 - val_loss: 0.0027 - lr: 4.8828e-06\n",
+ "Epoch 323/1024\n",
+ "90/90 [==============================] - 0s 880us/step - loss: 0.0028 - val_loss: 0.0026 - lr: 4.8828e-06\n",
+ "Epoch 324/1024\n",
+ "90/90 [==============================] - 0s 843us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 4.8828e-06\n",
+ "Epoch 325/1024\n",
+ "90/90 [==============================] - 0s 871us/step - loss: 0.0028 - val_loss: 0.0026 - lr: 4.8828e-06\n",
+ "Epoch 326/1024\n",
+ "90/90 [==============================] - 0s 849us/step - loss: 0.0028 - val_loss: 0.0029 - lr: 4.8828e-06\n",
+ "Epoch 327/1024\n",
+ "90/90 [==============================] - 0s 889us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 4.8828e-06\n",
+ "Epoch 328/1024\n",
+ "90/90 [==============================] - 0s 857us/step - loss: 0.0028 - val_loss: 0.0026 - lr: 4.8828e-06\n",
+ "Epoch 329/1024\n",
+ "90/90 [==============================] - 0s 909us/step - loss: 0.0028 - val_loss: 0.0027 - lr: 4.8828e-06\n",
+ "Epoch 330/1024\n",
+ "90/90 [==============================] - 0s 887us/step - loss: 0.0028 - val_loss: 0.0027 - lr: 4.8828e-06\n",
+ "Epoch 331/1024\n",
+ "90/90 [==============================] - 0s 853us/step - loss: 0.0029 - val_loss: 0.0026 - lr: 4.8828e-06\n",
+ "Epoch 332/1024\n",
+ "90/90 [==============================] - 0s 868us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 4.8828e-06\n",
+ "Epoch 333/1024\n",
+ "90/90 [==============================] - 0s 870us/step - loss: 0.0029 - val_loss: 0.0026 - lr: 4.8828e-06\n",
+ "Epoch 334/1024\n",
+ "90/90 [==============================] - 0s 858us/step - loss: 0.0028 - val_loss: 0.0027 - lr: 4.8828e-06\n",
+ "Epoch 335/1024\n",
+ "90/90 [==============================] - 0s 847us/step - loss: 0.0029 - val_loss: 0.0029 - lr: 4.8828e-06\n",
+ "Epoch 336/1024\n",
+ "90/90 [==============================] - 0s 870us/step - loss: 0.0028 - val_loss: 0.0028 - lr: 4.8828e-06\n",
+ "Epoch 337/1024\n",
+ "90/90 [==============================] - 0s 858us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 4.8828e-06\n",
+ "Epoch 338/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0025 - lr: 4.8828e-06\n",
+ "Epoch 339/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0025 - lr: 4.8828e-06\n",
+ "Epoch 340/1024\n",
+ "90/90 [==============================] - 0s 857us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 4.8828e-06\n",
+ "Epoch 341/1024\n",
+ "90/90 [==============================] - 0s 874us/step - loss: 0.0029 - val_loss: 0.0026 - lr: 4.8828e-06\n",
+ "Epoch 342/1024\n",
+ "90/90 [==============================] - 0s 881us/step - loss: 0.0028 - val_loss: 0.0026 - lr: 4.8828e-06\n",
+ "Epoch 343/1024\n",
+ "90/90 [==============================] - 0s 862us/step - loss: 0.0030 - val_loss: 0.0032 - lr: 4.8828e-06\n",
+ "Epoch 344/1024\n",
+ "90/90 [==============================] - 0s 871us/step - loss: 0.0029 - val_loss: 0.0029 - lr: 4.8828e-06\n",
+ "Epoch 345/1024\n",
+ "90/90 [==============================] - 0s 880us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 4.8828e-06\n",
+ "Epoch 346/1024\n",
+ "90/90 [==============================] - 0s 861us/step - loss: 0.0028 - val_loss: 0.0029 - lr: 4.8828e-06\n",
+ "Epoch 347/1024\n",
+ "90/90 [==============================] - 0s 874us/step - loss: 0.0029 - val_loss: 0.0028 - lr: 4.8828e-06\n",
+ "Epoch 348/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0029 - val_loss: 0.0032 - lr: 4.8828e-06\n",
+ "Epoch 349/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0029 - val_loss: 0.0025 - lr: 4.8828e-06\n",
+ "Epoch 350/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0025 - lr: 4.8828e-06\n",
+ "Epoch 351/1024\n",
+ "90/90 [==============================] - 0s 860us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 4.8828e-06\n",
+ "Epoch 352/1024\n",
+ "90/90 [==============================] - 0s 855us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 4.8828e-06\n",
+ "Epoch 353/1024\n",
+ "90/90 [==============================] - 0s 853us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 4.8828e-06\n",
+ "Epoch 354/1024\n",
+ "90/90 [==============================] - 0s 862us/step - loss: 0.0028 - val_loss: 0.0027 - lr: 4.8828e-06\n",
+ "Epoch 355/1024\n",
+ "90/90 [==============================] - 0s 852us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 4.8828e-06\n",
+ "Epoch 356/1024\n",
+ "90/90 [==============================] - 0s 856us/step - loss: 0.0028 - val_loss: 0.0026 - lr: 4.8828e-06\n",
+ "Epoch 357/1024\n",
+ "90/90 [==============================] - 0s 859us/step - loss: 0.0028 - val_loss: 0.0026 - lr: 4.8828e-06\n",
+ "Epoch 358/1024\n",
+ "90/90 [==============================] - 0s 854us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 4.8828e-06\n",
+ "Epoch 359/1024\n",
+ "90/90 [==============================] - 0s 866us/step - loss: 0.0028 - val_loss: 0.0026 - lr: 4.8828e-06\n",
+ "Epoch 360/1024\n",
+ "90/90 [==============================] - 0s 862us/step - loss: 0.0027 - val_loss: 0.0026 - lr: 4.8828e-06\n",
+ "Epoch 361/1024\n",
+ "90/90 [==============================] - 0s 828us/step - loss: 0.0028 - val_loss: 0.0026 - lr: 4.8828e-06\n",
+ "Epoch 362/1024\n",
+ "90/90 [==============================] - 0s 847us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 4.8828e-06\n",
+ "Epoch 363/1024\n",
+ "90/90 [==============================] - 0s 848us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 4.8828e-06\n",
+ "Epoch 364/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0029 - val_loss: 0.0025 - lr: 2.4414e-06\n",
+ "Epoch 365/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 2.4414e-06\n",
+ "Epoch 366/1024\n",
+ "90/90 [==============================] - 0s 860us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 2.4414e-06\n",
+ "Epoch 367/1024\n",
+ "90/90 [==============================] - 0s 859us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 2.4414e-06\n",
+ "Epoch 368/1024\n",
+ "90/90 [==============================] - 0s 855us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 2.4414e-06\n",
+ "Epoch 369/1024\n",
+ "90/90 [==============================] - 0s 860us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 2.4414e-06\n",
+ "Epoch 370/1024\n",
+ "90/90 [==============================] - 0s 873us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 2.4414e-06\n",
+ "Epoch 371/1024\n",
+ "90/90 [==============================] - 0s 850us/step - loss: 0.0028 - val_loss: 0.0026 - lr: 2.4414e-06\n",
+ "Epoch 372/1024\n",
+ "90/90 [==============================] - 0s 860us/step - loss: 0.0028 - val_loss: 0.0026 - lr: 2.4414e-06\n",
+ "Epoch 373/1024\n",
+ "90/90 [==============================] - 0s 892us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 2.4414e-06\n",
+ "Epoch 374/1024\n",
+ "90/90 [==============================] - 0s 864us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 2.4414e-06\n",
+ "Epoch 375/1024\n",
+ "90/90 [==============================] - 0s 856us/step - loss: 0.0029 - val_loss: 0.0026 - lr: 2.4414e-06\n",
+ "Epoch 376/1024\n",
+ "90/90 [==============================] - 0s 866us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 2.4414e-06\n",
+ "Epoch 377/1024\n",
+ "90/90 [==============================] - 0s 841us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 2.4414e-06\n",
+ "Epoch 378/1024\n",
+ "90/90 [==============================] - 0s 865us/step - loss: 0.0028 - val_loss: 0.0026 - lr: 2.4414e-06\n",
+ "Epoch 379/1024\n",
+ "90/90 [==============================] - 0s 889us/step - loss: 0.0028 - val_loss: 0.0026 - lr: 2.4414e-06\n",
+ "Epoch 380/1024\n",
+ "90/90 [==============================] - 0s 827us/step - loss: 0.0028 - val_loss: 0.0026 - lr: 2.4414e-06\n",
+ "Epoch 381/1024\n",
+ "90/90 [==============================] - 0s 857us/step - loss: 0.0028 - val_loss: 0.0026 - lr: 2.4414e-06\n",
+ "Epoch 382/1024\n",
+ "90/90 [==============================] - 0s 858us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 2.4414e-06\n",
+ "Epoch 383/1024\n",
+ "90/90 [==============================] - 0s 859us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 2.4414e-06\n",
+ "Epoch 384/1024\n",
+ "90/90 [==============================] - 0s 882us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 2.4414e-06\n",
+ "Epoch 385/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0030 - val_loss: 0.0026 - lr: 2.4414e-06\n",
+ "Epoch 386/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0029 - val_loss: 0.0025 - lr: 2.4414e-06\n",
+ "Epoch 387/1024\n",
+ "90/90 [==============================] - 0s 882us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 2.4414e-06\n",
+ "Epoch 388/1024\n",
+ "90/90 [==============================] - 0s 854us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 2.4414e-06\n",
+ "Epoch 389/1024\n",
+ "90/90 [==============================] - 0s 903us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 2.4414e-06\n",
+ "Epoch 390/1024\n",
+ "90/90 [==============================] - 0s 840us/step - loss: 0.0029 - val_loss: 0.0026 - lr: 2.4414e-06\n",
+ "Epoch 391/1024\n",
+ "90/90 [==============================] - 0s 889us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.2207e-06\n",
+ "Epoch 392/1024\n",
+ "90/90 [==============================] - 0s 857us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 1.2207e-06\n",
+ "Epoch 393/1024\n",
+ "90/90 [==============================] - 0s 899us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.2207e-06\n",
+ "Epoch 394/1024\n",
+ "90/90 [==============================] - 0s 854us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 1.2207e-06\n",
+ "Epoch 395/1024\n",
+ "90/90 [==============================] - 0s 851us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.2207e-06\n",
+ "Epoch 396/1024\n",
+ "90/90 [==============================] - 0s 849us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.2207e-06\n",
+ "Epoch 397/1024\n",
+ "90/90 [==============================] - 0s 894us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.2207e-06\n",
+ "Epoch 398/1024\n",
+ "90/90 [==============================] - 0s 856us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.2207e-06\n",
+ "Epoch 399/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0029 - val_loss: 0.0025 - lr: 1.2207e-06\n",
+ "Epoch 400/1024\n",
+ "90/90 [==============================] - 0s 852us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.2207e-06\n",
+ "Epoch 401/1024\n",
+ "90/90 [==============================] - 0s 871us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 1.2207e-06\n",
+ "Epoch 402/1024\n",
+ "90/90 [==============================] - 0s 886us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 1.2207e-06\n",
+ "Epoch 403/1024\n",
+ "90/90 [==============================] - 0s 853us/step - loss: 0.0030 - val_loss: 0.0025 - lr: 1.2207e-06\n",
+ "Epoch 404/1024\n",
+ "90/90 [==============================] - 0s 877us/step - loss: 0.0030 - val_loss: 0.0025 - lr: 1.2207e-06\n",
+ "Epoch 405/1024\n",
+ "90/90 [==============================] - 0s 881us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.2207e-06\n",
+ "Epoch 406/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0029 - val_loss: 0.0025 - lr: 1.2207e-06\n",
+ "Epoch 407/1024\n",
+ "90/90 [==============================] - 0s 856us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 1.2207e-06\n",
+ "Epoch 408/1024\n",
+ "90/90 [==============================] - 0s 860us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.2207e-06\n",
+ "Epoch 409/1024\n",
+ "90/90 [==============================] - 0s 868us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.2207e-06\n",
+ "Epoch 410/1024\n",
+ "90/90 [==============================] - 0s 887us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.2207e-06\n",
+ "Epoch 411/1024\n",
+ "90/90 [==============================] - 0s 888us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.2207e-06\n",
+ "Epoch 412/1024\n",
+ "90/90 [==============================] - 0s 868us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.2207e-06\n",
+ "Epoch 413/1024\n",
+ "90/90 [==============================] - 0s 859us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 1.2207e-06\n",
+ "Epoch 414/1024\n",
+ "90/90 [==============================] - 0s 854us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 1.2207e-06\n",
+ "Epoch 415/1024\n",
+ "90/90 [==============================] - 0s 870us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 1.2207e-06\n",
+ "Epoch 416/1024\n",
+ "90/90 [==============================] - 0s 863us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.2207e-06\n",
+ "Epoch 417/1024\n",
+ "90/90 [==============================] - 0s 845us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 1.2207e-06\n",
+ "Epoch 418/1024\n",
+ "90/90 [==============================] - 0s 852us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 1.2207e-06\n",
+ "Epoch 419/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0029 - val_loss: 0.0025 - lr: 1.2207e-06\n",
+ "Epoch 420/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0029 - val_loss: 0.0025 - lr: 1.2207e-06\n",
+ "Epoch 421/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0029 - val_loss: 0.0025 - lr: 1.2207e-06\n",
+ "Epoch 422/1024\n",
+ "90/90 [==============================] - 0s 836us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.2207e-06\n",
+ "Epoch 423/1024\n",
+ "90/90 [==============================] - 0s 872us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.2207e-06\n",
+ "Epoch 424/1024\n",
+ "90/90 [==============================] - 0s 861us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.2207e-06\n",
+ "Epoch 425/1024\n",
+ "90/90 [==============================] - 0s 852us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.2207e-06\n",
+ "Epoch 426/1024\n",
+ "90/90 [==============================] - 0s 871us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 1.2207e-06\n",
+ "Epoch 427/1024\n",
+ "90/90 [==============================] - 0s 864us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.2207e-06\n",
+ "Epoch 428/1024\n",
+ "90/90 [==============================] - 0s 853us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.2207e-06\n",
+ "Epoch 429/1024\n",
+ "90/90 [==============================] - 0s 862us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.2207e-06\n",
+ "Epoch 430/1024\n",
+ "90/90 [==============================] - 0s 847us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.2207e-06\n",
+ "Epoch 431/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0029 - val_loss: 0.0025 - lr: 1.2207e-06\n",
+ "Epoch 432/1024\n",
+ "90/90 [==============================] - 0s 848us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 6.1035e-07\n",
+ "Epoch 433/1024\n",
+ "90/90 [==============================] - 0s 857us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 6.1035e-07\n",
+ "Epoch 434/1024\n",
+ "90/90 [==============================] - 0s 872us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 6.1035e-07\n",
+ "Epoch 435/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0029 - val_loss: 0.0025 - lr: 6.1035e-07\n",
+ "Epoch 436/1024\n",
+ "90/90 [==============================] - 0s 851us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 6.1035e-07\n",
+ "Epoch 437/1024\n",
+ "90/90 [==============================] - 0s 874us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 6.1035e-07\n",
+ "Epoch 438/1024\n",
+ "90/90 [==============================] - 0s 887us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 6.1035e-07\n",
+ "Epoch 439/1024\n",
+ "90/90 [==============================] - 0s 871us/step - loss: 0.0031 - val_loss: 0.0025 - lr: 6.1035e-07\n",
+ "Epoch 440/1024\n",
+ "90/90 [==============================] - 0s 878us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 6.1035e-07\n",
+ "Epoch 441/1024\n",
+ "90/90 [==============================] - 0s 852us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 6.1035e-07\n",
+ "Epoch 442/1024\n",
+ "90/90 [==============================] - 0s 857us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 6.1035e-07\n",
+ "Epoch 443/1024\n",
+ "90/90 [==============================] - 0s 855us/step - loss: 0.0030 - val_loss: 0.0025 - lr: 6.1035e-07\n",
+ "Epoch 444/1024\n",
+ "90/90 [==============================] - 0s 860us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 6.1035e-07\n",
+ "Epoch 445/1024\n",
+ "90/90 [==============================] - 0s 850us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 6.1035e-07\n",
+ "Epoch 446/1024\n",
+ "90/90 [==============================] - 0s 893us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 6.1035e-07\n",
+ "Epoch 447/1024\n",
+ "90/90 [==============================] - 0s 856us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 6.1035e-07\n",
+ "Epoch 448/1024\n",
+ "90/90 [==============================] - 0s 876us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 6.1035e-07\n",
+ "Epoch 449/1024\n",
+ "90/90 [==============================] - 0s 868us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 6.1035e-07\n",
+ "Epoch 450/1024\n",
+ "90/90 [==============================] - 0s 872us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 6.1035e-07\n",
+ "Epoch 451/1024\n",
+ "90/90 [==============================] - 0s 870us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 6.1035e-07\n",
+ "Epoch 452/1024\n",
+ "90/90 [==============================] - 0s 843us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 6.1035e-07\n",
+ "Epoch 453/1024\n",
+ "90/90 [==============================] - 0s 867us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 6.1035e-07\n",
+ "Epoch 454/1024\n",
+ "90/90 [==============================] - 0s 858us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 6.1035e-07\n",
+ "Epoch 455/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0025 - lr: 6.1035e-07\n",
+ "Epoch 456/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0029 - val_loss: 0.0025 - lr: 6.1035e-07\n",
+ "Epoch 457/1024\n",
+ "90/90 [==============================] - 0s 952us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 6.1035e-07\n",
+ "Epoch 458/1024\n",
+ "90/90 [==============================] - 0s 870us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 6.1035e-07\n",
+ "Epoch 459/1024\n",
+ "90/90 [==============================] - 0s 834us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 6.1035e-07\n",
+ "Epoch 460/1024\n",
+ "90/90 [==============================] - 0s 851us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 6.1035e-07\n",
+ "Epoch 461/1024\n",
+ "90/90 [==============================] - 0s 866us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 3.0518e-07\n",
+ "Epoch 462/1024\n",
+ "90/90 [==============================] - 0s 850us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 3.0518e-07\n",
+ "Epoch 463/1024\n",
+ "90/90 [==============================] - 0s 872us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 3.0518e-07\n",
+ "Epoch 464/1024\n",
+ "90/90 [==============================] - 0s 870us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 3.0518e-07\n",
+ "Epoch 465/1024\n",
+ "90/90 [==============================] - 0s 845us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 3.0518e-07\n",
+ "Epoch 466/1024\n",
+ "90/90 [==============================] - 0s 837us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 3.0518e-07\n",
+ "Epoch 467/1024\n",
+ "90/90 [==============================] - 0s 862us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 3.0518e-07\n",
+ "Epoch 468/1024\n",
+ "90/90 [==============================] - 0s 860us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 3.0518e-07\n",
+ "Epoch 469/1024\n",
+ "90/90 [==============================] - 0s 865us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 3.0518e-07\n",
+ "Epoch 470/1024\n",
+ "90/90 [==============================] - 0s 867us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 3.0518e-07\n",
+ "Epoch 471/1024\n",
+ "90/90 [==============================] - 0s 890us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 3.0518e-07\n",
+ "Epoch 472/1024\n",
+ "90/90 [==============================] - 0s 864us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 3.0518e-07\n",
+ "Epoch 473/1024\n",
+ "90/90 [==============================] - 0s 861us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 3.0518e-07\n",
+ "Epoch 474/1024\n",
+ "90/90 [==============================] - 0s 871us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 3.0518e-07\n",
+ "Epoch 475/1024\n",
+ "90/90 [==============================] - 0s 866us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 3.0518e-07\n",
+ "Epoch 476/1024\n",
+ "90/90 [==============================] - 0s 858us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 3.0518e-07\n",
+ "Epoch 477/1024\n",
+ "90/90 [==============================] - 0s 866us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 3.0518e-07\n",
+ "Epoch 478/1024\n",
+ "90/90 [==============================] - 0s 842us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 3.0518e-07\n",
+ "Epoch 479/1024\n",
+ "90/90 [==============================] - 0s 848us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 3.0518e-07\n",
+ "Epoch 480/1024\n",
+ "90/90 [==============================] - 0s 856us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 3.0518e-07\n",
+ "Epoch 481/1024\n",
+ "90/90 [==============================] - 0s 856us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 3.0518e-07\n",
+ "Epoch 482/1024\n",
+ "90/90 [==============================] - 0s 854us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 3.0518e-07\n",
+ "Epoch 483/1024\n",
+ "90/90 [==============================] - 0s 869us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 3.0518e-07\n",
+ "Epoch 484/1024\n",
+ "90/90 [==============================] - 0s 895us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 3.0518e-07\n",
+ "Epoch 485/1024\n",
+ "90/90 [==============================] - 0s 844us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 3.0518e-07\n",
+ "Epoch 486/1024\n",
+ "90/90 [==============================] - 0s 863us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.5259e-07\n",
+ "Epoch 487/1024\n",
+ "90/90 [==============================] - 0s 848us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.5259e-07\n",
+ "Epoch 488/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.5259e-07\n",
+ "Epoch 489/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0029 - val_loss: 0.0025 - lr: 1.5259e-07\n",
+ "Epoch 490/1024\n",
+ "90/90 [==============================] - 0s 907us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 1.5259e-07\n",
+ "Epoch 491/1024\n",
+ "90/90 [==============================] - 0s 858us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.5259e-07\n",
+ "Epoch 492/1024\n",
+ "90/90 [==============================] - 0s 864us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.5259e-07\n",
+ "Epoch 493/1024\n",
+ "90/90 [==============================] - 0s 850us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.5259e-07\n",
+ "Epoch 494/1024\n",
+ "90/90 [==============================] - 0s 854us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 1.5259e-07\n",
+ "Epoch 495/1024\n",
+ "90/90 [==============================] - 0s 844us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 1.5259e-07\n",
+ "Epoch 496/1024\n",
+ "90/90 [==============================] - 0s 885us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 1.5259e-07\n",
+ "Epoch 497/1024\n",
+ "90/90 [==============================] - 0s 848us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.5259e-07\n",
+ "Epoch 498/1024\n",
+ "90/90 [==============================] - 0s 859us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 1.5259e-07\n",
+ "Epoch 499/1024\n",
+ "90/90 [==============================] - 0s 865us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.5259e-07\n",
+ "Epoch 500/1024\n",
+ "90/90 [==============================] - 0s 894us/step - loss: 0.0030 - val_loss: 0.0025 - lr: 1.5259e-07\n",
+ "Epoch 501/1024\n",
+ "90/90 [==============================] - 0s 851us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.5259e-07\n",
+ "Epoch 502/1024\n",
+ "90/90 [==============================] - 0s 891us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.5259e-07\n",
+ "Epoch 503/1024\n",
+ "90/90 [==============================] - 0s 864us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.5259e-07\n",
+ "Epoch 504/1024\n",
+ "90/90 [==============================] - 0s 876us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.5259e-07\n",
+ "Epoch 505/1024\n",
+ "90/90 [==============================] - 0s 854us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 1.5259e-07\n",
+ "Epoch 506/1024\n",
+ "90/90 [==============================] - 0s 863us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.5259e-07\n",
+ "Epoch 507/1024\n",
+ "90/90 [==============================] - 0s 877us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 1.5259e-07\n",
+ "Epoch 508/1024\n",
+ "90/90 [==============================] - 0s 863us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.5259e-07\n",
+ "Epoch 509/1024\n",
+ "90/90 [==============================] - 0s 864us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.5259e-07\n",
+ "Epoch 510/1024\n",
+ "90/90 [==============================] - 0s 872us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.5259e-07\n",
+ "Epoch 511/1024\n",
+ "90/90 [==============================] - 0s 859us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 7.6294e-08\n",
+ "Epoch 512/1024\n",
+ "90/90 [==============================] - 0s 840us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 7.6294e-08\n",
+ "Epoch 513/1024\n",
+ "90/90 [==============================] - 0s 860us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 7.6294e-08\n",
+ "Epoch 514/1024\n",
+ "90/90 [==============================] - 0s 853us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 7.6294e-08\n",
+ "Epoch 515/1024\n",
+ "90/90 [==============================] - 0s 867us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 7.6294e-08\n",
+ "Epoch 516/1024\n",
+ "90/90 [==============================] - 0s 858us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 7.6294e-08\n",
+ "Epoch 517/1024\n",
+ "90/90 [==============================] - 0s 862us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 7.6294e-08\n",
+ "Epoch 518/1024\n",
+ "90/90 [==============================] - 0s 855us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 7.6294e-08\n",
+ "Epoch 519/1024\n",
+ "90/90 [==============================] - 0s 872us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 7.6294e-08\n",
+ "Epoch 520/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0030 - val_loss: 0.0025 - lr: 7.6294e-08\n",
+ "Epoch 521/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0025 - lr: 7.6294e-08\n",
+ "Epoch 522/1024\n",
+ "90/90 [==============================] - 0s 922us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 7.6294e-08\n",
+ "Epoch 523/1024\n",
+ "90/90 [==============================] - 0s 858us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 7.6294e-08\n",
+ "Epoch 524/1024\n",
+ "90/90 [==============================] - 0s 854us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 7.6294e-08\n",
+ "Epoch 525/1024\n",
+ "90/90 [==============================] - 0s 867us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 7.6294e-08\n",
+ "Epoch 526/1024\n",
+ "90/90 [==============================] - 0s 838us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 7.6294e-08\n",
+ "Epoch 527/1024\n",
+ "90/90 [==============================] - 0s 864us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 7.6294e-08\n",
+ "Epoch 528/1024\n",
+ "90/90 [==============================] - 0s 873us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 7.6294e-08\n",
+ "Epoch 529/1024\n",
+ "90/90 [==============================] - 0s 849us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 7.6294e-08\n",
+ "Epoch 530/1024\n",
+ "90/90 [==============================] - 0s 888us/step - loss: 0.0030 - val_loss: 0.0025 - lr: 7.6294e-08\n",
+ "Epoch 531/1024\n",
+ "90/90 [==============================] - 0s 862us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 7.6294e-08\n",
+ "Epoch 532/1024\n",
+ "90/90 [==============================] - 0s 861us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 7.6294e-08\n",
+ "Epoch 533/1024\n",
+ "90/90 [==============================] - 0s 848us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 7.6294e-08\n",
+ "Epoch 534/1024\n",
+ "90/90 [==============================] - 0s 844us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 7.6294e-08\n",
+ "Epoch 535/1024\n",
+ "90/90 [==============================] - 0s 889us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 7.6294e-08\n",
+ "Epoch 536/1024\n",
+ "90/90 [==============================] - 0s 852us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 3.8147e-08\n",
+ "Epoch 537/1024\n",
+ "90/90 [==============================] - 0s 869us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 3.8147e-08\n",
+ "Epoch 538/1024\n",
+ "90/90 [==============================] - 0s 859us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 3.8147e-08\n",
+ "Epoch 539/1024\n",
+ "90/90 [==============================] - 0s 866us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 3.8147e-08\n",
+ "Epoch 540/1024\n",
+ "90/90 [==============================] - 0s 889us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 3.8147e-08\n",
+ "Epoch 541/1024\n",
+ "90/90 [==============================] - 0s 860us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 3.8147e-08\n",
+ "Epoch 542/1024\n",
+ "90/90 [==============================] - 0s 846us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 3.8147e-08\n",
+ "Epoch 543/1024\n",
+ "90/90 [==============================] - 0s 845us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 3.8147e-08\n",
+ "Epoch 544/1024\n",
+ "90/90 [==============================] - 0s 858us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 3.8147e-08\n",
+ "Epoch 545/1024\n",
+ "90/90 [==============================] - 0s 867us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 3.8147e-08\n",
+ "Epoch 546/1024\n",
+ "90/90 [==============================] - 0s 869us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 3.8147e-08\n",
+ "Epoch 547/1024\n",
+ "90/90 [==============================] - 0s 834us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 3.8147e-08\n",
+ "Epoch 548/1024\n",
+ "90/90 [==============================] - 0s 859us/step - loss: 0.0030 - val_loss: 0.0025 - lr: 3.8147e-08\n",
+ "Epoch 549/1024\n",
+ "90/90 [==============================] - 0s 848us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 3.8147e-08\n",
+ "Epoch 550/1024\n",
+ "90/90 [==============================] - 0s 865us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 3.8147e-08\n",
+ "Epoch 551/1024\n",
+ "90/90 [==============================] - 0s 884us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 3.8147e-08\n",
+ "Epoch 552/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0029 - val_loss: 0.0025 - lr: 3.8147e-08\n",
+ "Epoch 553/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0029 - val_loss: 0.0025 - lr: 3.8147e-08\n",
+ "Epoch 554/1024\n",
+ "90/90 [==============================] - 0s 884us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 3.8147e-08\n",
+ "Epoch 555/1024\n",
+ "90/90 [==============================] - 0s 855us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 3.8147e-08\n",
+ "Epoch 556/1024\n",
+ "90/90 [==============================] - 0s 844us/step - loss: 0.0030 - val_loss: 0.0025 - lr: 3.8147e-08\n",
+ "Epoch 557/1024\n",
+ "90/90 [==============================] - 0s 843us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 3.8147e-08\n",
+ "Epoch 558/1024\n",
+ "90/90 [==============================] - 0s 867us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 3.8147e-08\n",
+ "Epoch 559/1024\n",
+ "90/90 [==============================] - 0s 829us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 3.8147e-08\n",
+ "Epoch 560/1024\n",
+ "90/90 [==============================] - 0s 863us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 3.8147e-08\n",
+ "Epoch 561/1024\n",
+ "90/90 [==============================] - 0s 860us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.9073e-08\n",
+ "Epoch 562/1024\n",
+ "90/90 [==============================] - 0s 854us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.9073e-08\n",
+ "Epoch 563/1024\n",
+ "90/90 [==============================] - 0s 863us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 1.9073e-08\n",
+ "Epoch 564/1024\n",
+ "90/90 [==============================] - 0s 864us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.9073e-08\n",
+ "Epoch 565/1024\n",
+ "90/90 [==============================] - 0s 880us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.9073e-08\n",
+ "Epoch 566/1024\n",
+ "90/90 [==============================] - 0s 856us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.9073e-08\n",
+ "Epoch 567/1024\n",
+ "90/90 [==============================] - 0s 887us/step - loss: 0.0030 - val_loss: 0.0025 - lr: 1.9073e-08\n",
+ "Epoch 568/1024\n",
+ "90/90 [==============================] - 0s 878us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.9073e-08\n",
+ "Epoch 569/1024\n",
+ "90/90 [==============================] - 0s 842us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.9073e-08\n",
+ "Epoch 570/1024\n",
+ "90/90 [==============================] - 0s 835us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.9073e-08\n",
+ "Epoch 571/1024\n",
+ "90/90 [==============================] - 0s 867us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 1.9073e-08\n",
+ "Epoch 572/1024\n",
+ "90/90 [==============================] - 0s 833us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.9073e-08\n",
+ "Epoch 573/1024\n",
+ "90/90 [==============================] - 0s 842us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.9073e-08\n",
+ "Epoch 574/1024\n",
+ "90/90 [==============================] - 0s 840us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.9073e-08\n",
+ "Epoch 575/1024\n",
+ "90/90 [==============================] - 0s 848us/step - loss: 0.0030 - val_loss: 0.0025 - lr: 1.9073e-08\n",
+ "Epoch 576/1024\n",
+ "90/90 [==============================] - 0s 857us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.9073e-08\n",
+ "Epoch 577/1024\n",
+ "90/90 [==============================] - 0s 847us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.9073e-08\n",
+ "Epoch 578/1024\n",
+ "90/90 [==============================] - 0s 881us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.9073e-08\n",
+ "Epoch 579/1024\n",
+ "90/90 [==============================] - 0s 846us/step - loss: 0.0030 - val_loss: 0.0025 - lr: 1.9073e-08\n",
+ "Epoch 580/1024\n",
+ "90/90 [==============================] - 0s 857us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 1.9073e-08\n",
+ "Epoch 581/1024\n",
+ "90/90 [==============================] - 0s 864us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.9073e-08\n",
+ "Epoch 582/1024\n",
+ "90/90 [==============================] - 0s 867us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.9073e-08\n",
+ "Epoch 583/1024\n",
+ "90/90 [==============================] - 0s 866us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.9073e-08\n",
+ "Epoch 584/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0029 - val_loss: 0.0025 - lr: 1.9073e-08\n",
+ "Epoch 585/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0029 - val_loss: 0.0025 - lr: 1.9073e-08\n",
+ "Epoch 586/1024\n",
+ "90/90 [==============================] - 0s 933us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 9.5367e-09\n",
+ "Epoch 587/1024\n",
+ "90/90 [==============================] - 0s 844us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 9.5367e-09\n",
+ "Epoch 588/1024\n",
+ "90/90 [==============================] - 0s 852us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 9.5367e-09\n",
+ "Epoch 589/1024\n",
+ "90/90 [==============================] - 0s 870us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 9.5367e-09\n",
+ "Epoch 590/1024\n",
+ "90/90 [==============================] - 0s 877us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 9.5367e-09\n",
+ "Epoch 591/1024\n",
+ "90/90 [==============================] - 0s 852us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 9.5367e-09\n",
+ "Epoch 592/1024\n",
+ "90/90 [==============================] - 0s 848us/step - loss: 0.0030 - val_loss: 0.0025 - lr: 9.5367e-09\n",
+ "Epoch 593/1024\n",
+ "90/90 [==============================] - 0s 842us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 9.5367e-09\n",
+ "Epoch 594/1024\n",
+ "90/90 [==============================] - 0s 847us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 9.5367e-09\n",
+ "Epoch 595/1024\n",
+ "90/90 [==============================] - 0s 865us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 9.5367e-09\n",
+ "Epoch 596/1024\n",
+ "90/90 [==============================] - 0s 863us/step - loss: 0.0030 - val_loss: 0.0025 - lr: 9.5367e-09\n",
+ "Epoch 597/1024\n",
+ "90/90 [==============================] - 0s 896us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 9.5367e-09\n",
+ "Epoch 598/1024\n",
+ "90/90 [==============================] - 0s 847us/step - loss: 0.0030 - val_loss: 0.0025 - lr: 9.5367e-09\n",
+ "Epoch 599/1024\n",
+ "90/90 [==============================] - 0s 867us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 9.5367e-09\n",
+ "Epoch 600/1024\n",
+ "90/90 [==============================] - 0s 879us/step - loss: 0.0030 - val_loss: 0.0025 - lr: 9.5367e-09\n",
+ "Epoch 601/1024\n",
+ "90/90 [==============================] - 0s 866us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 9.5367e-09\n",
+ "Epoch 602/1024\n",
+ "90/90 [==============================] - 0s 897us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 9.5367e-09\n",
+ "Epoch 603/1024\n",
+ "90/90 [==============================] - 0s 850us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 9.5367e-09\n",
+ "Epoch 604/1024\n",
+ "90/90 [==============================] - 0s 853us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 9.5367e-09\n",
+ "Epoch 605/1024\n",
+ "90/90 [==============================] - 0s 902us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 9.5367e-09\n",
+ "Epoch 606/1024\n",
+ "90/90 [==============================] - 0s 877us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 9.5367e-09\n",
+ "Epoch 607/1024\n",
+ "90/90 [==============================] - 0s 865us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 9.5367e-09\n",
+ "Epoch 608/1024\n",
+ "90/90 [==============================] - 0s 854us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 9.5367e-09\n",
+ "Epoch 609/1024\n",
+ "90/90 [==============================] - 0s 885us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 9.5367e-09\n",
+ "Epoch 610/1024\n",
+ "90/90 [==============================] - 0s 876us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 9.5367e-09\n",
+ "Epoch 611/1024\n",
+ "90/90 [==============================] - 0s 862us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 4.7684e-09\n",
+ "Epoch 612/1024\n",
+ "90/90 [==============================] - 0s 868us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 4.7684e-09\n",
+ "Epoch 613/1024\n",
+ "90/90 [==============================] - 0s 852us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 4.7684e-09\n",
+ "Epoch 614/1024\n",
+ "90/90 [==============================] - 0s 877us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 4.7684e-09\n",
+ "Epoch 615/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0029 - val_loss: 0.0025 - lr: 4.7684e-09\n",
+ "Epoch 616/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0029 - val_loss: 0.0025 - lr: 4.7684e-09\n",
+ "Epoch 617/1024\n",
+ "90/90 [==============================] - 0s 951us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 4.7684e-09\n",
+ "Epoch 618/1024\n",
+ "90/90 [==============================] - 0s 859us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 4.7684e-09\n",
+ "Epoch 619/1024\n",
+ "90/90 [==============================] - 0s 865us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 4.7684e-09\n",
+ "Epoch 620/1024\n",
+ "90/90 [==============================] - 0s 878us/step - loss: 0.0030 - val_loss: 0.0025 - lr: 4.7684e-09\n",
+ "Epoch 621/1024\n",
+ "90/90 [==============================] - 0s 876us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 4.7684e-09\n",
+ "Epoch 622/1024\n",
+ "90/90 [==============================] - 0s 886us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 4.7684e-09\n",
+ "Epoch 623/1024\n",
+ "90/90 [==============================] - 0s 863us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 4.7684e-09\n",
+ "Epoch 624/1024\n",
+ "90/90 [==============================] - 0s 860us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 4.7684e-09\n",
+ "Epoch 625/1024\n",
+ "90/90 [==============================] - 0s 854us/step - loss: 0.0030 - val_loss: 0.0025 - lr: 4.7684e-09\n",
+ "Epoch 626/1024\n",
+ "90/90 [==============================] - 0s 867us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 4.7684e-09\n",
+ "Epoch 627/1024\n",
+ "90/90 [==============================] - 0s 893us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 4.7684e-09\n",
+ "Epoch 628/1024\n",
+ "90/90 [==============================] - 0s 846us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 4.7684e-09\n",
+ "Epoch 629/1024\n",
+ "90/90 [==============================] - 0s 864us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 4.7684e-09\n",
+ "Epoch 630/1024\n",
+ "90/90 [==============================] - 0s 891us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 4.7684e-09\n",
+ "Epoch 631/1024\n",
+ "90/90 [==============================] - 0s 854us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 4.7684e-09\n",
+ "Epoch 632/1024\n",
+ "90/90 [==============================] - 0s 860us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 4.7684e-09\n",
+ "Epoch 633/1024\n",
+ "90/90 [==============================] - 0s 871us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 4.7684e-09\n",
+ "Epoch 634/1024\n",
+ "90/90 [==============================] - 0s 864us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 4.7684e-09\n",
+ "Epoch 635/1024\n",
+ "90/90 [==============================] - 0s 860us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 4.7684e-09\n",
+ "Epoch 636/1024\n",
+ "90/90 [==============================] - 0s 862us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 2.3842e-09\n",
+ "Epoch 637/1024\n",
+ "90/90 [==============================] - 0s 857us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 2.3842e-09\n",
+ "Epoch 638/1024\n",
+ "90/90 [==============================] - 0s 862us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 2.3842e-09\n",
+ "Epoch 639/1024\n",
+ "90/90 [==============================] - 0s 850us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 2.3842e-09\n",
+ "Epoch 640/1024\n",
+ "90/90 [==============================] - 0s 874us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 2.3842e-09\n",
+ "Epoch 641/1024\n",
+ "90/90 [==============================] - 0s 854us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 2.3842e-09\n",
+ "Epoch 642/1024\n",
+ "90/90 [==============================] - 0s 840us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 2.3842e-09\n",
+ "Epoch 643/1024\n",
+ "90/90 [==============================] - 0s 865us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 2.3842e-09\n",
+ "Epoch 644/1024\n",
+ "90/90 [==============================] - 0s 860us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 2.3842e-09\n",
+ "Epoch 645/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0029 - val_loss: 0.0025 - lr: 2.3842e-09\n",
+ "Epoch 646/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0025 - lr: 2.3842e-09\n",
+ "Epoch 647/1024\n",
+ "90/90 [==============================] - 0s 946us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 2.3842e-09\n",
+ "Epoch 648/1024\n",
+ "90/90 [==============================] - 0s 851us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 2.3842e-09\n",
+ "Epoch 649/1024\n",
+ "90/90 [==============================] - 0s 866us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 2.3842e-09\n",
+ "Epoch 650/1024\n",
+ "90/90 [==============================] - 0s 861us/step - loss: 0.0030 - val_loss: 0.0025 - lr: 2.3842e-09\n",
+ "Epoch 651/1024\n",
+ "90/90 [==============================] - 0s 869us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 2.3842e-09\n",
+ "Epoch 652/1024\n",
+ "90/90 [==============================] - 0s 883us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 2.3842e-09\n",
+ "Epoch 653/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0029 - val_loss: 0.0025 - lr: 2.3842e-09\n",
+ "Epoch 654/1024\n",
+ "90/90 [==============================] - 0s 858us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 2.3842e-09\n",
+ "Epoch 655/1024\n",
+ "90/90 [==============================] - 0s 854us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 2.3842e-09\n",
+ "Epoch 656/1024\n",
+ "90/90 [==============================] - 0s 857us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 2.3842e-09\n",
+ "Epoch 657/1024\n",
+ "90/90 [==============================] - 0s 865us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 2.3842e-09\n",
+ "Epoch 658/1024\n",
+ "90/90 [==============================] - 0s 870us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 2.3842e-09\n",
+ "Epoch 659/1024\n",
+ "90/90 [==============================] - 0s 850us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 2.3842e-09\n",
+ "Epoch 660/1024\n",
+ "90/90 [==============================] - 0s 887us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 2.3842e-09\n",
+ "Epoch 661/1024\n",
+ "90/90 [==============================] - 0s 873us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 1.1921e-09\n",
+ "Epoch 662/1024\n",
+ "90/90 [==============================] - 0s 851us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.1921e-09\n",
+ "Epoch 663/1024\n",
+ "90/90 [==============================] - 0s 869us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.1921e-09\n",
+ "Epoch 664/1024\n",
+ "90/90 [==============================] - 0s 852us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 1.1921e-09\n",
+ "Epoch 665/1024\n",
+ "90/90 [==============================] - 0s 888us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 1.1921e-09\n",
+ "Epoch 666/1024\n",
+ "90/90 [==============================] - 0s 853us/step - loss: 0.0030 - val_loss: 0.0025 - lr: 1.1921e-09\n",
+ "Epoch 667/1024\n",
+ "90/90 [==============================] - 0s 857us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 1.1921e-09\n",
+ "Epoch 668/1024\n",
+ "90/90 [==============================] - 0s 866us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 1.1921e-09\n",
+ "Epoch 669/1024\n",
+ "90/90 [==============================] - 0s 871us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.1921e-09\n",
+ "Epoch 670/1024\n",
+ "90/90 [==============================] - 0s 878us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.1921e-09\n",
+ "Epoch 671/1024\n",
+ "90/90 [==============================] - 0s 869us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.1921e-09\n",
+ "Epoch 672/1024\n",
+ "90/90 [==============================] - 0s 866us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 1.1921e-09\n",
+ "Epoch 673/1024\n",
+ "90/90 [==============================] - 0s 866us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.1921e-09\n",
+ "Epoch 674/1024\n",
+ "90/90 [==============================] - 0s 982us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 1.1921e-09\n",
+ "Epoch 675/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0029 - val_loss: 0.0025 - lr: 1.1921e-09\n",
+ "Epoch 676/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.1921e-09\n",
+ "Epoch 677/1024\n",
+ "90/90 [==============================] - 0s 840us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.1921e-09\n",
+ "Epoch 678/1024\n",
+ "90/90 [==============================] - 0s 859us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.1921e-09\n",
+ "Epoch 679/1024\n",
+ "90/90 [==============================] - 0s 854us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.1921e-09\n",
+ "Epoch 680/1024\n",
+ "90/90 [==============================] - 0s 858us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.1921e-09\n",
+ "Epoch 681/1024\n",
+ "90/90 [==============================] - 0s 867us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.1921e-09\n",
+ "Epoch 682/1024\n",
+ "90/90 [==============================] - 0s 854us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 1.1921e-09\n",
+ "Epoch 683/1024\n",
+ "90/90 [==============================] - 0s 867us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.1921e-09\n",
+ "Epoch 684/1024\n",
+ "90/90 [==============================] - 0s 851us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 1.1921e-09\n",
+ "Epoch 685/1024\n",
+ "90/90 [==============================] - 0s 852us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 1.1921e-09\n",
+ "Epoch 686/1024\n",
+ "90/90 [==============================] - 0s 869us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 5.9605e-10\n",
+ "Epoch 687/1024\n",
+ "90/90 [==============================] - 0s 895us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 5.9605e-10\n",
+ "Epoch 688/1024\n",
+ "90/90 [==============================] - 0s 864us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 5.9605e-10\n",
+ "Epoch 689/1024\n",
+ "90/90 [==============================] - 0s 870us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 5.9605e-10\n",
+ "Epoch 690/1024\n",
+ "90/90 [==============================] - 0s 874us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 5.9605e-10\n",
+ "Epoch 691/1024\n",
+ "90/90 [==============================] - 0s 867us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 5.9605e-10\n",
+ "Epoch 692/1024\n",
+ "90/90 [==============================] - 0s 858us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 5.9605e-10\n",
+ "Epoch 693/1024\n",
+ "90/90 [==============================] - 0s 854us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 5.9605e-10\n",
+ "Epoch 694/1024\n",
+ "90/90 [==============================] - 0s 871us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 5.9605e-10\n",
+ "Epoch 695/1024\n",
+ "90/90 [==============================] - 0s 870us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 5.9605e-10\n",
+ "Epoch 696/1024\n",
+ "90/90 [==============================] - 0s 882us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 5.9605e-10\n",
+ "Epoch 697/1024\n",
+ "90/90 [==============================] - 0s 847us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 5.9605e-10\n",
+ "Epoch 698/1024\n",
+ "90/90 [==============================] - 0s 848us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 5.9605e-10\n",
+ "Epoch 699/1024\n",
+ "90/90 [==============================] - 0s 852us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 5.9605e-10\n",
+ "Epoch 700/1024\n",
+ "90/90 [==============================] - 0s 875us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 5.9605e-10\n",
+ "Epoch 701/1024\n",
+ "90/90 [==============================] - 0s 865us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 5.9605e-10\n",
+ "Epoch 702/1024\n",
+ "90/90 [==============================] - 0s 861us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 5.9605e-10\n",
+ "Epoch 703/1024\n",
+ "90/90 [==============================] - 0s 834us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 5.9605e-10\n",
+ "Epoch 704/1024\n",
+ "90/90 [==============================] - 0s 879us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 5.9605e-10\n",
+ "Epoch 705/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0025 - lr: 5.9605e-10\n",
+ "Epoch 706/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0025 - lr: 5.9605e-10\n",
+ "Epoch 707/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0025 - lr: 5.9605e-10\n",
+ "Epoch 708/1024\n",
+ "90/90 [==============================] - 0s 872us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 5.9605e-10\n",
+ "Epoch 709/1024\n",
+ "90/90 [==============================] - 0s 843us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 5.9605e-10\n",
+ "Epoch 710/1024\n",
+ "90/90 [==============================] - 0s 847us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 5.9605e-10\n",
+ "Epoch 711/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0029 - val_loss: 0.0025 - lr: 2.9802e-10\n",
+ "Epoch 712/1024\n",
+ "90/90 [==============================] - 0s 873us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 2.9802e-10\n",
+ "Epoch 713/1024\n",
+ "90/90 [==============================] - 0s 858us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 2.9802e-10\n",
+ "Epoch 714/1024\n",
+ "90/90 [==============================] - 0s 888us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 2.9802e-10\n",
+ "Epoch 715/1024\n",
+ "90/90 [==============================] - 0s 855us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 2.9802e-10\n",
+ "Epoch 716/1024\n",
+ "90/90 [==============================] - 0s 867us/step - loss: 0.0030 - val_loss: 0.0025 - lr: 2.9802e-10\n",
+ "Epoch 717/1024\n",
+ "90/90 [==============================] - 0s 855us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 2.9802e-10\n",
+ "Epoch 718/1024\n",
+ "90/90 [==============================] - 0s 878us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 2.9802e-10\n",
+ "Epoch 719/1024\n",
+ "90/90 [==============================] - 0s 871us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 2.9802e-10\n",
+ "Epoch 720/1024\n",
+ "90/90 [==============================] - 0s 881us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 2.9802e-10\n",
+ "Epoch 721/1024\n",
+ "90/90 [==============================] - 0s 867us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 2.9802e-10\n",
+ "Epoch 722/1024\n",
+ "90/90 [==============================] - 0s 866us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 2.9802e-10\n",
+ "Epoch 723/1024\n",
+ "90/90 [==============================] - 0s 872us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 2.9802e-10\n",
+ "Epoch 724/1024\n",
+ "90/90 [==============================] - 0s 851us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 2.9802e-10\n",
+ "Epoch 725/1024\n",
+ "90/90 [==============================] - 0s 870us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 2.9802e-10\n",
+ "Epoch 726/1024\n",
+ "90/90 [==============================] - 0s 857us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 2.9802e-10\n",
+ "Epoch 727/1024\n",
+ "90/90 [==============================] - 0s 889us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 2.9802e-10\n",
+ "Epoch 728/1024\n",
+ "90/90 [==============================] - 0s 851us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 2.9802e-10\n",
+ "Epoch 729/1024\n",
+ "90/90 [==============================] - 0s 855us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 2.9802e-10\n",
+ "Epoch 730/1024\n",
+ "90/90 [==============================] - 0s 860us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 2.9802e-10\n",
+ "Epoch 731/1024\n",
+ "90/90 [==============================] - 0s 848us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 2.9802e-10\n",
+ "Epoch 732/1024\n",
+ "90/90 [==============================] - 0s 854us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 2.9802e-10\n",
+ "Epoch 733/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 2.9802e-10\n",
+ "Epoch 734/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0029 - val_loss: 0.0025 - lr: 2.9802e-10\n",
+ "Epoch 735/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 2.9802e-10\n",
+ "Epoch 736/1024\n",
+ "90/90 [==============================] - 0s 846us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.4901e-10\n",
+ "Epoch 737/1024\n",
+ "90/90 [==============================] - 0s 878us/step - loss: 0.0030 - val_loss: 0.0025 - lr: 1.4901e-10\n",
+ "Epoch 738/1024\n",
+ "90/90 [==============================] - 0s 847us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 1.4901e-10\n",
+ "Epoch 739/1024\n",
+ "90/90 [==============================] - 0s 891us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 1.4901e-10\n",
+ "Epoch 740/1024\n",
+ "90/90 [==============================] - 0s 862us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.4901e-10\n",
+ "Epoch 741/1024\n",
+ "90/90 [==============================] - 0s 848us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.4901e-10\n",
+ "Epoch 742/1024\n",
+ "90/90 [==============================] - 0s 849us/step - loss: 0.0030 - val_loss: 0.0025 - lr: 1.4901e-10\n",
+ "Epoch 743/1024\n",
+ "90/90 [==============================] - 0s 855us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.4901e-10\n",
+ "Epoch 744/1024\n",
+ "90/90 [==============================] - 0s 951us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.4901e-10\n",
+ "Epoch 745/1024\n",
+ "90/90 [==============================] - 0s 937us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 1.4901e-10\n",
+ "Epoch 746/1024\n",
+ "90/90 [==============================] - 0s 855us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 1.4901e-10\n",
+ "Epoch 747/1024\n",
+ "90/90 [==============================] - 0s 888us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.4901e-10\n",
+ "Epoch 748/1024\n",
+ "90/90 [==============================] - 0s 847us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 1.4901e-10\n",
+ "Epoch 749/1024\n",
+ "90/90 [==============================] - 0s 876us/step - loss: 0.0030 - val_loss: 0.0025 - lr: 1.4901e-10\n",
+ "Epoch 750/1024\n",
+ "90/90 [==============================] - 0s 867us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.4901e-10\n",
+ "Epoch 751/1024\n",
+ "90/90 [==============================] - 0s 871us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 1.4901e-10\n",
+ "Epoch 752/1024\n",
+ "90/90 [==============================] - 0s 842us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 1.4901e-10\n",
+ "Epoch 753/1024\n",
+ "90/90 [==============================] - 0s 864us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 1.4901e-10\n",
+ "Epoch 754/1024\n",
+ "90/90 [==============================] - 0s 860us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 1.4901e-10\n",
+ "Epoch 755/1024\n",
+ "90/90 [==============================] - 0s 849us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.4901e-10\n",
+ "Epoch 756/1024\n",
+ "90/90 [==============================] - 0s 862us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.4901e-10\n",
+ "Epoch 757/1024\n",
+ "90/90 [==============================] - 0s 866us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.4901e-10\n",
+ "Epoch 758/1024\n",
+ "90/90 [==============================] - 0s 887us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.4901e-10\n",
+ "Epoch 759/1024\n",
+ "90/90 [==============================] - 0s 867us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 1.4901e-10\n",
+ "Epoch 760/1024\n",
+ "90/90 [==============================] - 0s 875us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.4901e-10\n",
+ "Epoch 761/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0025 - lr: 7.4506e-11\n",
+ "Epoch 762/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0029 - val_loss: 0.0025 - lr: 7.4506e-11\n",
+ "Epoch 763/1024\n",
+ "90/90 [==============================] - 0s 957us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 7.4506e-11\n",
+ "Epoch 764/1024\n",
+ "90/90 [==============================] - 0s 848us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 7.4506e-11\n",
+ "Epoch 765/1024\n",
+ "90/90 [==============================] - 0s 863us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 7.4506e-11\n",
+ "Epoch 766/1024\n",
+ "90/90 [==============================] - 0s 873us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 7.4506e-11\n",
+ "Epoch 767/1024\n",
+ "90/90 [==============================] - 0s 866us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 7.4506e-11\n",
+ "Epoch 768/1024\n",
+ "90/90 [==============================] - 0s 862us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 7.4506e-11\n",
+ "Epoch 769/1024\n",
+ "90/90 [==============================] - 0s 874us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 7.4506e-11\n",
+ "Epoch 770/1024\n",
+ "90/90 [==============================] - 0s 854us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 7.4506e-11\n",
+ "Epoch 771/1024\n",
+ "90/90 [==============================] - 0s 862us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 7.4506e-11\n",
+ "Epoch 772/1024\n",
+ "90/90 [==============================] - 0s 858us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 7.4506e-11\n",
+ "Epoch 773/1024\n",
+ "90/90 [==============================] - 0s 848us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 7.4506e-11\n",
+ "Epoch 774/1024\n",
+ "90/90 [==============================] - 0s 903us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 7.4506e-11\n",
+ "Epoch 775/1024\n",
+ "90/90 [==============================] - 0s 853us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 7.4506e-11\n",
+ "Epoch 776/1024\n",
+ "90/90 [==============================] - 0s 870us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 7.4506e-11\n",
+ "Epoch 777/1024\n",
+ "90/90 [==============================] - 0s 865us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 7.4506e-11\n",
+ "Epoch 778/1024\n",
+ "90/90 [==============================] - 0s 855us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 7.4506e-11\n",
+ "Epoch 779/1024\n",
+ "90/90 [==============================] - 0s 844us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 7.4506e-11\n",
+ "Epoch 780/1024\n",
+ "90/90 [==============================] - 0s 853us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 7.4506e-11\n",
+ "Epoch 781/1024\n",
+ "90/90 [==============================] - 0s 859us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 7.4506e-11\n",
+ "Epoch 782/1024\n",
+ "90/90 [==============================] - 0s 923us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 7.4506e-11\n",
+ "Epoch 783/1024\n",
+ "90/90 [==============================] - 0s 856us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 7.4506e-11\n",
+ "Epoch 784/1024\n",
+ "90/90 [==============================] - 0s 847us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 7.4506e-11\n",
+ "Epoch 785/1024\n",
+ "90/90 [==============================] - 0s 864us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 7.4506e-11\n",
+ "Epoch 786/1024\n",
+ "90/90 [==============================] - 0s 846us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 3.7253e-11\n",
+ "Epoch 787/1024\n",
+ "90/90 [==============================] - 0s 862us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 3.7253e-11\n",
+ "Epoch 788/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0025 - lr: 3.7253e-11\n",
+ "Epoch 789/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0028 - val_loss: 0.0025 - lr: 3.7253e-11\n",
+ "Epoch 790/1024\n",
+ "90/90 [==============================] - 0s 915us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 3.7253e-11\n",
+ "Epoch 791/1024\n",
+ "90/90 [==============================] - 0s 859us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 3.7253e-11\n",
+ "Epoch 792/1024\n",
+ "90/90 [==============================] - 0s 854us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 3.7253e-11\n",
+ "Epoch 793/1024\n",
+ "90/90 [==============================] - 0s 857us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 3.7253e-11\n",
+ "Epoch 794/1024\n",
+ "90/90 [==============================] - 0s 879us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 3.7253e-11\n",
+ "Epoch 795/1024\n",
+ "90/90 [==============================] - 0s 864us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 3.7253e-11\n",
+ "Epoch 796/1024\n",
+ "90/90 [==============================] - 0s 861us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 3.7253e-11\n",
+ "Epoch 797/1024\n",
+ "90/90 [==============================] - 0s 866us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 3.7253e-11\n",
+ "Epoch 798/1024\n",
+ "90/90 [==============================] - 0s 860us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 3.7253e-11\n",
+ "Epoch 799/1024\n",
+ "90/90 [==============================] - 0s 848us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 3.7253e-11\n",
+ "Epoch 800/1024\n",
+ "90/90 [==============================] - 0s 866us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 3.7253e-11\n",
+ "Epoch 801/1024\n",
+ "90/90 [==============================] - 0s 863us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 3.7253e-11\n",
+ "Epoch 802/1024\n",
+ "90/90 [==============================] - 0s 848us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 3.7253e-11\n",
+ "Epoch 803/1024\n",
+ "90/90 [==============================] - 0s 885us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 3.7253e-11\n",
+ "Epoch 804/1024\n",
+ "90/90 [==============================] - 0s 852us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 3.7253e-11\n",
+ "Epoch 805/1024\n",
+ "90/90 [==============================] - 0s 868us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 3.7253e-11\n",
+ "Epoch 806/1024\n",
+ "90/90 [==============================] - 0s 851us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 3.7253e-11\n",
+ "Epoch 807/1024\n",
+ "90/90 [==============================] - 0s 890us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 3.7253e-11\n",
+ "Epoch 808/1024\n",
+ "90/90 [==============================] - 0s 843us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 3.7253e-11\n",
+ "Epoch 809/1024\n",
+ "90/90 [==============================] - 0s 847us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 3.7253e-11\n",
+ "Epoch 810/1024\n",
+ "90/90 [==============================] - 0s 858us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 3.7253e-11\n",
+ "Epoch 811/1024\n",
+ "90/90 [==============================] - 0s 847us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 1.8626e-11\n",
+ "Epoch 812/1024\n",
+ "90/90 [==============================] - 0s 857us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 1.8626e-11\n",
+ "Epoch 813/1024\n",
+ "90/90 [==============================] - 0s 869us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 1.8626e-11\n",
+ "Epoch 814/1024\n",
+ "90/90 [==============================] - 0s 867us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 1.8626e-11\n",
+ "Epoch 815/1024\n",
+ "90/90 [==============================] - 0s 850us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.8626e-11\n",
+ "Epoch 816/1024\n",
+ "90/90 [==============================] - 0s 996us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.8626e-11\n",
+ "Epoch 817/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.8626e-11\n",
+ "Epoch 818/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0029 - val_loss: 0.0025 - lr: 1.8626e-11\n",
+ "Epoch 819/1024\n",
+ "90/90 [==============================] - 0s 849us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.8626e-11\n",
+ "Epoch 820/1024\n",
+ "90/90 [==============================] - 0s 895us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.8626e-11\n",
+ "Epoch 821/1024\n",
+ "90/90 [==============================] - 0s 859us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.8626e-11\n",
+ "Epoch 822/1024\n",
+ "90/90 [==============================] - 0s 857us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.8626e-11\n",
+ "Epoch 823/1024\n",
+ "90/90 [==============================] - 0s 841us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.8626e-11\n",
+ "Epoch 824/1024\n",
+ "90/90 [==============================] - 0s 846us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.8626e-11\n",
+ "Epoch 825/1024\n",
+ "90/90 [==============================] - 0s 860us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 1.8626e-11\n",
+ "Epoch 826/1024\n",
+ "90/90 [==============================] - 0s 850us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.8626e-11\n",
+ "Epoch 827/1024\n",
+ "90/90 [==============================] - 0s 849us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 1.8626e-11\n",
+ "Epoch 828/1024\n",
+ "90/90 [==============================] - 0s 858us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.8626e-11\n",
+ "Epoch 829/1024\n",
+ "90/90 [==============================] - 0s 869us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.8626e-11\n",
+ "Epoch 830/1024\n",
+ "90/90 [==============================] - 0s 854us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.8626e-11\n",
+ "Epoch 831/1024\n",
+ "90/90 [==============================] - 0s 888us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 1.8626e-11\n",
+ "Epoch 832/1024\n",
+ "90/90 [==============================] - 0s 853us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.8626e-11\n",
+ "Epoch 833/1024\n",
+ "90/90 [==============================] - 0s 868us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.8626e-11\n",
+ "Epoch 834/1024\n",
+ "90/90 [==============================] - 0s 877us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 1.8626e-11\n",
+ "Epoch 835/1024\n",
+ "90/90 [==============================] - 0s 875us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.8626e-11\n",
+ "Epoch 836/1024\n",
+ "90/90 [==============================] - 0s 880us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 9.3132e-12\n",
+ "Epoch 837/1024\n",
+ "90/90 [==============================] - 0s 847us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 9.3132e-12\n",
+ "Epoch 838/1024\n",
+ "90/90 [==============================] - 0s 867us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 9.3132e-12\n",
+ "Epoch 839/1024\n",
+ "90/90 [==============================] - 0s 864us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 9.3132e-12\n",
+ "Epoch 840/1024\n",
+ "90/90 [==============================] - 0s 855us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 9.3132e-12\n",
+ "Epoch 841/1024\n",
+ "90/90 [==============================] - 0s 862us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 9.3132e-12\n",
+ "Epoch 842/1024\n",
+ "90/90 [==============================] - 0s 970us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 9.3132e-12\n",
+ "Epoch 843/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0025 - lr: 9.3132e-12\n",
+ "Epoch 844/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0029 - val_loss: 0.0025 - lr: 9.3132e-12\n",
+ "Epoch 845/1024\n",
+ "90/90 [==============================] - 0s 852us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 9.3132e-12\n",
+ "Epoch 846/1024\n",
+ "90/90 [==============================] - 0s 866us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 9.3132e-12\n",
+ "Epoch 847/1024\n",
+ "90/90 [==============================] - 0s 866us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 9.3132e-12\n",
+ "Epoch 848/1024\n",
+ "90/90 [==============================] - 0s 861us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 9.3132e-12\n",
+ "Epoch 849/1024\n",
+ "90/90 [==============================] - 0s 865us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 9.3132e-12\n",
+ "Epoch 850/1024\n",
+ "90/90 [==============================] - 0s 911us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 9.3132e-12\n",
+ "Epoch 851/1024\n",
+ "90/90 [==============================] - 0s 916us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 9.3132e-12\n",
+ "Epoch 852/1024\n",
+ "90/90 [==============================] - 0s 842us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 9.3132e-12\n",
+ "Epoch 853/1024\n",
+ "90/90 [==============================] - 0s 862us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 9.3132e-12\n",
+ "Epoch 854/1024\n",
+ "90/90 [==============================] - 0s 875us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 9.3132e-12\n",
+ "Epoch 855/1024\n",
+ "90/90 [==============================] - 0s 866us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 9.3132e-12\n",
+ "Epoch 856/1024\n",
+ "90/90 [==============================] - 0s 903us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 9.3132e-12\n",
+ "Epoch 857/1024\n",
+ "90/90 [==============================] - 0s 859us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 9.3132e-12\n",
+ "Epoch 858/1024\n",
+ "90/90 [==============================] - 0s 868us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 9.3132e-12\n",
+ "Epoch 859/1024\n",
+ "90/90 [==============================] - 0s 873us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 9.3132e-12\n",
+ "Epoch 860/1024\n",
+ "90/90 [==============================] - 0s 858us/step - loss: 0.0030 - val_loss: 0.0025 - lr: 9.3132e-12\n",
+ "Epoch 861/1024\n",
+ "90/90 [==============================] - 0s 853us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 4.6566e-12\n",
+ "Epoch 862/1024\n",
+ "90/90 [==============================] - 0s 864us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 4.6566e-12\n",
+ "Epoch 863/1024\n",
+ "90/90 [==============================] - 0s 835us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 4.6566e-12\n",
+ "Epoch 864/1024\n",
+ "90/90 [==============================] - 0s 867us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 4.6566e-12\n",
+ "Epoch 865/1024\n",
+ "90/90 [==============================] - 0s 867us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 4.6566e-12\n",
+ "Epoch 866/1024\n",
+ "90/90 [==============================] - 0s 913us/step - loss: 0.0030 - val_loss: 0.0025 - lr: 4.6566e-12\n",
+ "Epoch 867/1024\n",
+ "90/90 [==============================] - 0s 841us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 4.6566e-12\n",
+ "Epoch 868/1024\n",
+ "90/90 [==============================] - 0s 858us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 4.6566e-12\n",
+ "Epoch 869/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0029 - val_loss: 0.0025 - lr: 4.6566e-12\n",
+ "Epoch 870/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0025 - lr: 4.6566e-12\n",
+ "Epoch 871/1024\n",
+ "90/90 [==============================] - 0s 921us/step - loss: 0.0030 - val_loss: 0.0025 - lr: 4.6566e-12\n",
+ "Epoch 872/1024\n",
+ "90/90 [==============================] - 0s 868us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 4.6566e-12\n",
+ "Epoch 873/1024\n",
+ "90/90 [==============================] - 0s 894us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 4.6566e-12\n",
+ "Epoch 874/1024\n",
+ "90/90 [==============================] - 0s 915us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 4.6566e-12\n",
+ "Epoch 875/1024\n",
+ "90/90 [==============================] - 0s 855us/step - loss: 0.0030 - val_loss: 0.0025 - lr: 4.6566e-12\n",
+ "Epoch 876/1024\n",
+ "90/90 [==============================] - 0s 859us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 4.6566e-12\n",
+ "Epoch 877/1024\n",
+ "90/90 [==============================] - 0s 855us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 4.6566e-12\n",
+ "Epoch 878/1024\n",
+ "90/90 [==============================] - 0s 858us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 4.6566e-12\n",
+ "Epoch 879/1024\n",
+ "90/90 [==============================] - 0s 841us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 4.6566e-12\n",
+ "Epoch 880/1024\n",
+ "90/90 [==============================] - 0s 887us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 4.6566e-12\n",
+ "Epoch 881/1024\n",
+ "90/90 [==============================] - 0s 862us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 4.6566e-12\n",
+ "Epoch 882/1024\n",
+ "90/90 [==============================] - 0s 890us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 4.6566e-12\n",
+ "Epoch 883/1024\n",
+ "90/90 [==============================] - 0s 863us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 4.6566e-12\n",
+ "Epoch 884/1024\n",
+ "90/90 [==============================] - 0s 863us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 4.6566e-12\n",
+ "Epoch 885/1024\n",
+ "90/90 [==============================] - 0s 856us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 4.6566e-12\n",
+ "Epoch 886/1024\n",
+ "90/90 [==============================] - 0s 861us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 2.3283e-12\n",
+ "Epoch 887/1024\n",
+ "90/90 [==============================] - 0s 889us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 2.3283e-12\n",
+ "Epoch 888/1024\n",
+ "90/90 [==============================] - 0s 867us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 2.3283e-12\n",
+ "Epoch 889/1024\n",
+ "90/90 [==============================] - 0s 853us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 2.3283e-12\n",
+ "Epoch 890/1024\n",
+ "90/90 [==============================] - 0s 878us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 2.3283e-12\n",
+ "Epoch 891/1024\n",
+ "90/90 [==============================] - 0s 865us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 2.3283e-12\n",
+ "Epoch 892/1024\n",
+ "90/90 [==============================] - 0s 877us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 2.3283e-12\n",
+ "Epoch 893/1024\n",
+ "90/90 [==============================] - 0s 843us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 2.3283e-12\n",
+ "Epoch 894/1024\n",
+ "90/90 [==============================] - 0s 833us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 2.3283e-12\n",
+ "Epoch 895/1024\n",
+ "90/90 [==============================] - 0s 965us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 2.3283e-12\n",
+ "Epoch 896/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0029 - val_loss: 0.0025 - lr: 2.3283e-12\n",
+ "Epoch 897/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0029 - val_loss: 0.0025 - lr: 2.3283e-12\n",
+ "Epoch 898/1024\n",
+ "90/90 [==============================] - 0s 844us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 2.3283e-12\n",
+ "Epoch 899/1024\n",
+ "90/90 [==============================] - 0s 890us/step - loss: 0.0030 - val_loss: 0.0025 - lr: 2.3283e-12\n",
+ "Epoch 900/1024\n",
+ "90/90 [==============================] - 0s 848us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 2.3283e-12\n",
+ "Epoch 901/1024\n",
+ "90/90 [==============================] - 0s 875us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 2.3283e-12\n",
+ "Epoch 902/1024\n",
+ "90/90 [==============================] - 0s 950us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 2.3283e-12\n",
+ "Epoch 903/1024\n",
+ "90/90 [==============================] - 0s 855us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 2.3283e-12\n",
+ "Epoch 904/1024\n",
+ "90/90 [==============================] - 0s 880us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 2.3283e-12\n",
+ "Epoch 905/1024\n",
+ "90/90 [==============================] - 0s 895us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 2.3283e-12\n",
+ "Epoch 906/1024\n",
+ "90/90 [==============================] - 0s 849us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 2.3283e-12\n",
+ "Epoch 907/1024\n",
+ "90/90 [==============================] - 0s 869us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 2.3283e-12\n",
+ "Epoch 908/1024\n",
+ "90/90 [==============================] - 0s 898us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 2.3283e-12\n",
+ "Epoch 909/1024\n",
+ "90/90 [==============================] - 0s 849us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 2.3283e-12\n",
+ "Epoch 910/1024\n",
+ "90/90 [==============================] - 0s 852us/step - loss: 0.0030 - val_loss: 0.0025 - lr: 2.3283e-12\n",
+ "Epoch 911/1024\n",
+ "90/90 [==============================] - 0s 874us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.1642e-12\n",
+ "Epoch 912/1024\n",
+ "90/90 [==============================] - 0s 853us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.1642e-12\n",
+ "Epoch 913/1024\n",
+ "90/90 [==============================] - 0s 871us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.1642e-12\n",
+ "Epoch 914/1024\n",
+ "90/90 [==============================] - 0s 859us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.1642e-12\n",
+ "Epoch 915/1024\n",
+ "90/90 [==============================] - 0s 841us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.1642e-12\n",
+ "Epoch 916/1024\n",
+ "90/90 [==============================] - 0s 856us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.1642e-12\n",
+ "Epoch 917/1024\n",
+ "90/90 [==============================] - 0s 865us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.1642e-12\n",
+ "Epoch 918/1024\n",
+ "90/90 [==============================] - 0s 869us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.1642e-12\n",
+ "Epoch 919/1024\n",
+ "90/90 [==============================] - 0s 857us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.1642e-12\n",
+ "Epoch 920/1024\n",
+ "90/90 [==============================] - 0s 939us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.1642e-12\n",
+ "Epoch 921/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.1642e-12\n",
+ "Epoch 922/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0029 - val_loss: 0.0025 - lr: 1.1642e-12\n",
+ "Epoch 923/1024\n",
+ "90/90 [==============================] - 0s 847us/step - loss: 0.0030 - val_loss: 0.0025 - lr: 1.1642e-12\n",
+ "Epoch 924/1024\n",
+ "90/90 [==============================] - 0s 832us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 1.1642e-12\n",
+ "Epoch 925/1024\n",
+ "90/90 [==============================] - 0s 873us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 1.1642e-12\n",
+ "Epoch 926/1024\n",
+ "90/90 [==============================] - 0s 850us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.1642e-12\n",
+ "Epoch 927/1024\n",
+ "90/90 [==============================] - 0s 860us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 1.1642e-12\n",
+ "Epoch 928/1024\n",
+ "90/90 [==============================] - 0s 852us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.1642e-12\n",
+ "Epoch 929/1024\n",
+ "90/90 [==============================] - 0s 867us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 1.1642e-12\n",
+ "Epoch 930/1024\n",
+ "90/90 [==============================] - 0s 899us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.1642e-12\n",
+ "Epoch 931/1024\n",
+ "90/90 [==============================] - 0s 877us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 1.1642e-12\n",
+ "Epoch 932/1024\n",
+ "90/90 [==============================] - 0s 874us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.1642e-12\n",
+ "Epoch 933/1024\n",
+ "90/90 [==============================] - 0s 876us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 1.1642e-12\n",
+ "Epoch 934/1024\n",
+ "90/90 [==============================] - 0s 848us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.1642e-12\n",
+ "Epoch 935/1024\n",
+ "90/90 [==============================] - 0s 861us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.1642e-12\n",
+ "Epoch 936/1024\n",
+ "90/90 [==============================] - 0s 853us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 5.8208e-13\n",
+ "Epoch 937/1024\n",
+ "90/90 [==============================] - 0s 829us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 5.8208e-13\n",
+ "Epoch 938/1024\n",
+ "90/90 [==============================] - 0s 853us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 5.8208e-13\n",
+ "Epoch 939/1024\n",
+ "90/90 [==============================] - 0s 874us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 5.8208e-13\n",
+ "Epoch 940/1024\n",
+ "90/90 [==============================] - 0s 857us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 5.8208e-13\n",
+ "Epoch 941/1024\n",
+ "90/90 [==============================] - 0s 867us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 5.8208e-13\n",
+ "Epoch 942/1024\n",
+ "90/90 [==============================] - 0s 851us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 5.8208e-13\n",
+ "Epoch 943/1024\n",
+ "90/90 [==============================] - 0s 854us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 5.8208e-13\n",
+ "Epoch 944/1024\n",
+ "90/90 [==============================] - 0s 875us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 5.8208e-13\n",
+ "Epoch 945/1024\n",
+ "90/90 [==============================] - 0s 864us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 5.8208e-13\n",
+ "Epoch 946/1024\n",
+ "90/90 [==============================] - 0s 962us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 5.8208e-13\n",
+ "Epoch 947/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0029 - val_loss: 0.0025 - lr: 5.8208e-13\n",
+ "Epoch 948/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0029 - val_loss: 0.0025 - lr: 5.8208e-13\n",
+ "Epoch 949/1024\n",
+ "90/90 [==============================] - 0s 841us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 5.8208e-13\n",
+ "Epoch 950/1024\n",
+ "90/90 [==============================] - 0s 854us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 5.8208e-13\n",
+ "Epoch 951/1024\n",
+ "90/90 [==============================] - 0s 864us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 5.8208e-13\n",
+ "Epoch 952/1024\n",
+ "90/90 [==============================] - 0s 877us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 5.8208e-13\n",
+ "Epoch 953/1024\n",
+ "90/90 [==============================] - 0s 848us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 5.8208e-13\n",
+ "Epoch 954/1024\n",
+ "90/90 [==============================] - 0s 874us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 5.8208e-13\n",
+ "Epoch 955/1024\n",
+ "90/90 [==============================] - 0s 858us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 5.8208e-13\n",
+ "Epoch 956/1024\n",
+ "90/90 [==============================] - 0s 855us/step - loss: 0.0030 - val_loss: 0.0025 - lr: 5.8208e-13\n",
+ "Epoch 957/1024\n",
+ "90/90 [==============================] - 0s 853us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 5.8208e-13\n",
+ "Epoch 958/1024\n",
+ "90/90 [==============================] - 0s 851us/step - loss: 0.0030 - val_loss: 0.0025 - lr: 5.8208e-13\n",
+ "Epoch 959/1024\n",
+ "90/90 [==============================] - 0s 881us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 5.8208e-13\n",
+ "Epoch 960/1024\n",
+ "90/90 [==============================] - 0s 853us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 5.8208e-13\n",
+ "Epoch 961/1024\n",
+ "90/90 [==============================] - 0s 891us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 2.9104e-13\n",
+ "Epoch 962/1024\n",
+ "90/90 [==============================] - 0s 877us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 2.9104e-13\n",
+ "Epoch 963/1024\n",
+ "90/90 [==============================] - 0s 859us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 2.9104e-13\n",
+ "Epoch 964/1024\n",
+ "90/90 [==============================] - 0s 861us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 2.9104e-13\n",
+ "Epoch 965/1024\n",
+ "90/90 [==============================] - 0s 856us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 2.9104e-13\n",
+ "Epoch 966/1024\n",
+ "90/90 [==============================] - 0s 862us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 2.9104e-13\n",
+ "Epoch 967/1024\n",
+ "90/90 [==============================] - 0s 855us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 2.9104e-13\n",
+ "Epoch 968/1024\n",
+ "90/90 [==============================] - 0s 849us/step - loss: 0.0030 - val_loss: 0.0025 - lr: 2.9104e-13\n",
+ "Epoch 969/1024\n",
+ "90/90 [==============================] - 0s 867us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 2.9104e-13\n",
+ "Epoch 970/1024\n",
+ "90/90 [==============================] - 0s 835us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 2.9104e-13\n",
+ "Epoch 971/1024\n",
+ "90/90 [==============================] - 0s 984us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 2.9104e-13\n",
+ "Epoch 972/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0029 - val_loss: 0.0025 - lr: 2.9104e-13\n",
+ "Epoch 973/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0025 - lr: 2.9104e-13\n",
+ "Epoch 974/1024\n",
+ "90/90 [==============================] - 0s 832us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 2.9104e-13\n",
+ "Epoch 975/1024\n",
+ "90/90 [==============================] - 0s 871us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 2.9104e-13\n",
+ "Epoch 976/1024\n",
+ "90/90 [==============================] - 0s 864us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 2.9104e-13\n",
+ "Epoch 977/1024\n",
+ "90/90 [==============================] - 0s 843us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 2.9104e-13\n",
+ "Epoch 978/1024\n",
+ "90/90 [==============================] - 0s 859us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 2.9104e-13\n",
+ "Epoch 979/1024\n",
+ "90/90 [==============================] - 0s 867us/step - loss: 0.0030 - val_loss: 0.0025 - lr: 2.9104e-13\n",
+ "Epoch 980/1024\n",
+ "90/90 [==============================] - 0s 843us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 2.9104e-13\n",
+ "Epoch 981/1024\n",
+ "90/90 [==============================] - 0s 861us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 2.9104e-13\n",
+ "Epoch 982/1024\n",
+ "90/90 [==============================] - 0s 849us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 2.9104e-13\n",
+ "Epoch 983/1024\n",
+ "90/90 [==============================] - 0s 897us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 2.9104e-13\n",
+ "Epoch 984/1024\n",
+ "90/90 [==============================] - 0s 867us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 2.9104e-13\n",
+ "Epoch 985/1024\n",
+ "90/90 [==============================] - 0s 861us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 2.9104e-13\n",
+ "Epoch 986/1024\n",
+ "90/90 [==============================] - 0s 869us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 1.4552e-13\n",
+ "Epoch 987/1024\n",
+ "90/90 [==============================] - 0s 861us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 1.4552e-13\n",
+ "Epoch 988/1024\n",
+ "90/90 [==============================] - 0s 870us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.4552e-13\n",
+ "Epoch 989/1024\n",
+ "90/90 [==============================] - 0s 860us/step - loss: 0.0030 - val_loss: 0.0025 - lr: 1.4552e-13\n",
+ "Epoch 990/1024\n",
+ "90/90 [==============================] - 0s 854us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 1.4552e-13\n",
+ "Epoch 991/1024\n",
+ "90/90 [==============================] - 0s 852us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.4552e-13\n",
+ "Epoch 992/1024\n",
+ "90/90 [==============================] - 0s 869us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.4552e-13\n",
+ "Epoch 993/1024\n",
+ "90/90 [==============================] - 0s 849us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.4552e-13\n",
+ "Epoch 994/1024\n",
+ "90/90 [==============================] - 0s 850us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.4552e-13\n",
+ "Epoch 995/1024\n",
+ "90/90 [==============================] - 0s 988us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 1.4552e-13\n",
+ "Epoch 996/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0029 - val_loss: 0.0025 - lr: 1.4552e-13\n",
+ "Epoch 997/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0029 - val_loss: 0.0025 - lr: 1.4552e-13\n",
+ "Epoch 998/1024\n",
+ "90/90 [==============================] - 0s 857us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.4552e-13\n",
+ "Epoch 999/1024\n",
+ "90/90 [==============================] - 0s 838us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.4552e-13\n",
+ "Epoch 1000/1024\n",
+ "90/90 [==============================] - 0s 885us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 1.4552e-13\n",
+ "Epoch 1001/1024\n",
+ "90/90 [==============================] - 0s 863us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 1.4552e-13\n",
+ "Epoch 1002/1024\n",
+ "90/90 [==============================] - 0s 858us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 1.4552e-13\n",
+ "Epoch 1003/1024\n",
+ "90/90 [==============================] - 0s 886us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 1.4552e-13\n",
+ "Epoch 1004/1024\n",
+ "90/90 [==============================] - 0s 869us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 1.4552e-13\n",
+ "Epoch 1005/1024\n",
+ "90/90 [==============================] - 0s 843us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.4552e-13\n",
+ "Epoch 1006/1024\n",
+ "90/90 [==============================] - 0s 856us/step - loss: 0.0030 - val_loss: 0.0025 - lr: 1.4552e-13\n",
+ "Epoch 1007/1024\n",
+ "90/90 [==============================] - 0s 860us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.4552e-13\n",
+ "Epoch 1008/1024\n",
+ "90/90 [==============================] - 0s 883us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.4552e-13\n",
+ "Epoch 1009/1024\n",
+ "90/90 [==============================] - 0s 841us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.4552e-13\n",
+ "Epoch 1010/1024\n",
+ "90/90 [==============================] - 0s 841us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 1.4552e-13\n",
+ "Epoch 1011/1024\n",
+ "90/90 [==============================] - 0s 854us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 7.2760e-14\n",
+ "Epoch 1012/1024\n",
+ "90/90 [==============================] - 0s 858us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 7.2760e-14\n",
+ "Epoch 1013/1024\n",
+ "90/90 [==============================] - 0s 881us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 7.2760e-14\n",
+ "Epoch 1014/1024\n",
+ "90/90 [==============================] - 0s 864us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 7.2760e-14\n",
+ "Epoch 1015/1024\n",
+ "90/90 [==============================] - 0s 852us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 7.2760e-14\n",
+ "Epoch 1016/1024\n",
+ "90/90 [==============================] - 0s 863us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 7.2760e-14\n",
+ "Epoch 1017/1024\n",
+ "90/90 [==============================] - 0s 853us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 7.2760e-14\n",
+ "Epoch 1018/1024\n",
+ "90/90 [==============================] - 0s 845us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 7.2760e-14\n",
+ "Epoch 1019/1024\n",
+ "90/90 [==============================] - 0s 840us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 7.2760e-14\n",
+ "Epoch 1020/1024\n",
+ "90/90 [==============================] - 0s 851us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 7.2760e-14\n",
+ "Epoch 1021/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 7.2760e-14\n",
+ "Epoch 1022/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0029 - val_loss: 0.0025 - lr: 7.2760e-14\n",
+ "Epoch 1023/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0025 - lr: 7.2760e-14\n",
+ "Epoch 1024/1024\n",
+ "90/90 [==============================] - 0s 847us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 7.2760e-14\n"
+ ]
+ }
+ ],
+ "source": [
+ "epoch, batch_size = 1024, 64\n",
+ "history_plain_11 = plain_11.fit(x_train, y_train, x_val, y_val, epoch=epoch, batch_size=batch_size)\n",
+ "history_plain_5 = plain_5.fit(x_train, y_train, x_val, y_val, epoch=epoch, batch_size=batch_size)"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {
+ "pycharm": {
+ "name": "#%% md\n"
+ }
+ },
+ "source": [
+ "Save the result to history.p"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 10,
+ "metadata": {
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ },
+ "outputs": [],
+ "source": [
+ "# import pickle\n",
+ "# with open('history.p', 'wb') as f:\n",
+ "# pickle.dump({'history_11': history_plain_11, 'history_5':history_plain_5}, f)"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {
+ "pycharm": {
+ "name": "#%% md\n"
+ }
+ },
+ "source": [
+ "## Post analysis and drawing"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 11,
+ "metadata": {
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ },
+ "outputs": [],
+ "source": [
+ "# with open(\"history.p\", 'rb') as f:\n",
+ "# data = pickle.load(f)\n",
+ "# history_plain_11, history_plain_5 = data['history_11'], data['history_5']"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 12,
+ "metadata": {
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ },
+ "outputs": [],
+ "source": [
+ "smoothing_windows = 16\n",
+ "def moving_average(x, w):\n",
+ " return np.convolve(x, np.ones(w), 'valid') / w"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 20,
+ "metadata": {
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ },
+ "outputs": [
+ {
+ "data": {
+ "text/plain": "",
+ "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYgAAAEKCAYAAAAIO8L1AAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjUuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/YYfK9AAAACXBIWXMAAAsTAAALEwEAmpwYAABMCElEQVR4nO2dd3hUZdbAfy8JhF4VEAImKKCARGI0Zo2Aay+rq2IXsSKsbe1i27XLqmtX9LNgxV6wd0AEkYAGCX0BJSjFBIRQAknO98eZYSaTmWQSZjIzyfk9z33mlvfeOffemXvuKe95nYhgGIZhGIE0ibUAhmEYRnxiCsIwDMMIiikIwzAMIyimIAzDMIygmIIwDMMwgmIKwjAMwwhKVBWEc+4o59xC59wS59wNQbaf4Jyb45z7yTmX55zL9du23Dn3s3dbNOU0DMMwquKi1Q/COZcELAIOBwqBmcAZIjLPr01rYJOIiHNuIPCGiOzl2bYcyBKRP6IioGEYhlEt0bQgDgCWiMhSEdkGvAac4N9ARErEp6FaAdZrzzAMI05IjuKxuwMr/JYLgezARs65E4F7gM7AsX6bBPjcOSfAUyLydLAvcc6NBEYC9O7de7+nn9ZmvXr1ok2bNuTn5wPQqVMn+vfvz5QpUwBITk4mNzeX2bNn02rCBNbtuy8DTjiB1atXs2KFit27d29SUlKYO3cuAJ07d6ZPnz5MnTqVL77oQs+e5Vx88QDy8vIoKSkBIDs7m8LCQlauXAlA3759SUpKYt48NZy6du1Keno606dPB6BFixZkZ2czY8YMtmzZAkBOTg7Lli1j1apVAPTr14/y8nIWLlyoF7Z7d1JTU5kxYwYArVu3Jisri+nTp1NaWgpAbm4uixYtYs2aNQAMGDCA0tJSFi9eDECPHj3o0qULeXnqvWvbti2ZmZlMnTqVsrIyAAYPHkxBQQFFRUUAZGRksHHjRpYuXQpAWloaHTt2ZPbs2QB06NCBjIwMJk+ejIjgnGPIkCHk5+ezbt06ADIzMykuLmb58uW1vk8bNmwAICsrq9r7tNenn9J17Ngqv5W5EybwR9euQe9T7tlnk+yZ/37CBNrvu2/M7tP69aX07t0dgJSUCv74Y3OV+/Tvfy/gttv2AuDUU4Wbb55b7X265pruvP12UwDuvvsXxozZPeb3yf//pOeaQk5Ojv2f6vn/NHToUEcoRCQqE3AK8Izf8nDg0WraDwa+9Fvu5vnsDOQDg2v6zv3220/qxMqVIo8/XqtdKipErruubl9nRJmHHxaBqtPChaH32WMPX7tFi+pP1iBs21ZZ7PHjq7YZP963/Zxzaj7m2Wf72r/4YuRlNhKakM/UaLqYCoEefsupwG+hGovIFGAP59wunuXfPJ9rgHdRl1V06NoVPG8G4eIc7LEHeBS1EU8kJQVf37Rp6H2aNfPNb9sWWXlqSXIypKT4ls89F7Zvr9zG82ILVBa9umN68bzQGkaNRFNBzAR6O+fSnXPNgNOBif4NnHN7OuecZz4TaAYUOedaOefaeNa3Ao4A5kZN0iZN9OWqlpx/PrzzThTkMXaOJiF+1tU9Sf2VR+DTuJ5xrqqO83hcduCvw/yVSShMQRh1IWoxCBEpc85dCnwGJAHPiUiBc26UZ/s44GTgHOfcdmALcJqIiHOuC/CuR3ckA6+KyKfRkrWuJCdDRUWspTCqUBcLIo4UBFQ1YrZsgQ4dgm83C8KIFtEMUiMiHwMfB6wb5zc/FqgSTRSRpUBGNGWLFOnp8PLLcPbZsZbE2EEoBZEgFgTAnnvCggW+5c2bK283F5NRH1hP6p3k/POhoKBOHiojWoRyMSVIDALg0UcrLwcqCHMxGfWBKQgvSUl1fnM84wy4++4Iy2PUnQZgQRx2WOVlT8bmDszFZNQHpiC89OwJv/5ap10HDoStWyMsj1F3QimI5Go8qnGmIAAGD/bNm4vJiAWmILzsuSf873913r1JE3ULFBREUCajbgRTBE2banpQKOJQQbRs6ZvfWReTv840BWGEiykIL3vssVMK4rLL1JJ48skIymTUjWAKoqbX7DiLQUBlBbFpU+UH+85YEOXlOy+b0TgwBeGla1coLKzz7rvsAkOGaCqiBaxjTCgLojri0IJo0cI3f9ppsOuuMG2aLlsMwqgPTEF48bofdvLpnp4O8+dHQB6j7tTFgohDBeFvQQCsXw9HH63zlsVk1AemIPzp3Xun3EwAZ54Jjzyi6a/z5tXc3ogCwYLUCWhBBCoIgA0bVDlYkNqoD0xB+PPXv8JXX+3UIZo3h3Hj4Jhj4LPPIiSXUTsaSAzC38Xkz7JllUWsSfeBKQijbpiC8KdnT1ixouZ2YTBsGOTlwf33g6easVFfNFAXk5eSksoimgVhRAtTEIE4F7Eo80svwT77wBdfRORwRrg0kCB1mzbB12/aVFlEsyCMaGEKIpDOncEzoMfO0qQJHHmk9oIN7AlrRJEGYkEMGhR8fWDKa3X9/4K1MQVhhIspiEB22w1+CzlsRZ0YORLGj4/oIY3qqEuQOg5jEPvvH3z95s21tyCso5xRF0xBBNKtG/z+e0QPueeeWsXD+kfUEw3EgmjZEi65pOp6czEZ9YUpiEB22y3iCgIgNxfuuEPn/VMUjSjQQGIQAI89pnkTI0f61u2si8l6UhvhYgoikCgpiGOPhbZtYcYMOPRQ+OWXiH+F4SXYEzNUzqiXOFUQAKmp0Lq1b9ksCKO+MAURSPPmUYsoX3ABPP+8xiOuvx5OPx1Wr47KVzVugimI9u2r38f/KTt5ckTFiQSBdZlMQRj1gSmIeqRNG+1Et+eeOgrdwQfDjz9qCahrrtGe159/HmspGwB1URD+MYrZs7XLchzRqpVvPlBBWBaTES2iOuSoEZrkZBgxAoYPh44d4aGH1Lr49lvIzNTif0YdCZbFVJOCKC6uvLxoEWRlRUyknSVQQfg/5M2CMKKFKYhg7Lqr+n66dInq17RurSPRlZerdXH55XDSSfDqqzpv1JFgr9Tt2lW/T8eOlZfjLJPAX0HUJc3VFIRRF8zFFIxDD93pmkzhsvfeMGCAbzk1VfvpXX65ZZvUmbq4mE49tfJySUnExIkE1bmYTEEY0cIURDD23jumNbtvu00D2BMmQEVFzMRIXOqiIDp29NXSBn0KxxGB/fhqm+ZqHeWMumAKIhjOVT88ZT3wl7/oH3nECFMStaYuCgJ0tCcvcaYgAi0AsyCM+sBiEKFo0kR9PMECnvXEuefqH3vhQjVqjDCpS5AaKnc2iDMXk78SMAVh1BdmQYSiXz/4+edYS0FuLnz3XaylSDDqEqSGqo7+OML/lLZvr32aq7/OtNiWES6mIEJx+OFx0Slh991h+fJYS5Fg1NXF5K8g4tiC2Latcl2vcIxcUxBGXTAFEYoOHWDjxlhLEetQSGLSJMjPOhwLIrCeRRzhr/P8O/o3bRreb8QUhFEXTEFUh3NxESHu0gVWrYq1FAlE4BMzORlSUmreL0FcTJs3++bDiT+AKQijbpiCqI6MDK2uF2MOOqjeumU0TPwf/OG2W78+KqLUFX9F4G9BhBN/CGxnQWojXExBVMeJJ8L778daCjIyNF6enx9rSRIUf9dRdfi7oT75JK6epNW5mMLBLAijLpiCqI4mTbSHUozdTElJcOed8Prr8MMPVbcXFmpKrEjMRY1PQg3uHMhf/+qbX78e5syJijh1IZQFYQrCiCamIGoiIwPy8mItBcnJGoe47bbKf/Dff4d//Qv69IHHH9eifz/9pAPM2Ah2HsK1INq3hwMP9C3HURwiVAwiXBeTKQijLpiCqInjj4d33om1FADcfz/cey888YRv3csvw9ixcMYZ+jxbswbefRdOO03HnDAlQXgZTF784xBbt0ZeljoS2FEu2PrqsBHljLpgCqImmjbV6q633hprSejYEfbZR8c0euABrQT7449aGjw9XRXCwIGawn/ooXDAAbq90VObqrz+I89FaeCouhDKUqiLiymOQitGnGMKIhz+8Q8YNEgd/XHARRfBEUfAmWfCiy9W3nbyyXDddTp/xBHw9tu+bX/8AQUFldc1Cjp3Dr9t8+a++Ti1IMJZH4i5mIy6EFUF4Zw7yjm30Dm3xDl3Q5DtJzjn5jjnfnLO5Tnncj3rezjnvnHOzXfOFTjnroimnDXSooVmNPXqFTfjFe+zD6SlVX2zTEnxvTC3bavz8+erq+mxx+DZZ+HRR+Ouo3B0qauCSAALwmIQRjSJmoJwziUBjwNHA/2AM5xz/QKafQVkiMi+wPnAM571ZcDVIrI3cCBwSZB965/evWHx4lhLUStOOQXOOQeeeUZLNPz3v6og/LN346DDeHSpq4vJLAijkRNNC+IAYImILBWRbcBrwAn+DUSkRGRHGLUVIJ71v4vIbM/8RmA+0D2KsobH4MHw1FOxlqJW7Lab9vXLz/cNd7DPPjBzJvz6q2bE5ObG3QBqO8+JJ/rmjz8+/P3i1MUUylJo2bL2+5uCMMIlmuW+uwMr/JYLgezARs65E4F7gM7AsUG2pwGDgKBdmp1zI4GRAN26dWPSpEkA9OrVizZt2pDv6V3WqVMn+vfvz5QpUwBITk4mNzeX2bNns8EzQH1WVharV69mxQoVu3fv3qSkpDB37lwAOnfuTN/u3Zn+wQeUtWlDSkoKOTk55OXlUeLx2WRnZ1NYWMjKlSsB6Nu3L0lJScybNw+Arl27kp6ezvTp0wFo0aIF2dnZzJgxgy0el0ZOTg7Lli1jlae+Rr9+/SgvL2fhwoV6Ybt3JzU1lRmeXt6tW7cmKyuL6dOnU+p50ufm5rJo0SLWrFkDwK23DmD79lImTVIL6PDD92D8+M5Mnrye444rZvDg7jz7bFvWr59KmSeKOXjwYAoKCigqKgIgIyODDRs2MnHiH+yzzwbS0tLo2LEjs2fPBqBDhw5kZGQwefJkRATnHEOGDCE/P59169YBkJmZSXFxMcs9FQijdZ/6PPggK9q1Y/2gQWxdvJicXXYJ6z5tXbuWnp7fVtnGjUz1/J7q6z4NGDCA0tJSFnss1R49etClSxe+/34WMIRASkuLqKjoUOU+bdy4kaVLlwKQlpZG69YdgbZ6XmUCuPi4T336MHXqVICE+j+Fuk95npT4tm3bkpmZydSp1f+fAu9TLP5PQ4cOrfK72oGIRGUCTgGe8VseDjxaTfvBwJcB61oDs4CTwvnO/fbbT6LO99+LvPZa9L+nHqmoENm8WeSKK6pv9957IkceKdK7t8iff1bf9o8/9JgJx623imjIRuRf/4q1NDuoqPCJ5T+demp4+5eV+fZxLrqyGglHyGdqNF1MhUAPv+VU4LdQjUVkCrCHc24XAOdcU+Bt4BURiY+OCKC5o99/HzfB6kjgnLre99yzalHArVvhyy+1j8Utt8DZZ8Orr2rW7223acZUsNFZx47VPhvBuOkm8Ly8xB9xGqR2LnhZ73D7APoXuPWqCsOoiWgqiJlAb+dcunOuGXA6MNG/gXNuT+e09KZzLhNoBhR51j0LzBeR/0ZRxtrjHJx0Enz2WawliTgHHqi6DzQ2ce65cN558MYb2hHv2WfhrLMgK0t7bOfm6gPqxRd9te0WLICiIq2WXlFRObZx2WWqZHr21P29rFoVRyVC4jRIDcHjEOEqCOcqKwmLQxjhELUYhIiUOecuBT4DkoDnRKTAOTfKs30ccDJwjnNuO7AFOE1ExJPuOhz42Tn3k+eQN4rIx9GSt1bk5MAdd8Bxx8Vakoiy776a3TR9uiqIK6/UoU6LivRBFFjS6NBDdRo3Th/8AwfCk09q4NSbSnv22VpD6sMPtSrt6afrvu+8o+uKi2HaNBg2DA47rN5PuSpxGqQGVRCByQThKgjv/tu26Xx5efgpskbjJao/Ec8D/eOAdeP85scCY4PsNxWI36FykpMb5CtYcrLqPRF9q99tN13v/QzFqFG6z7hxMGmS9r/w8vPPcPHFcNRRPuUAaoRdeKG6tR5/XBVMTQpi82ZNIrvyyjqdXnjEaU9qCJ7SWhsFEdibOpwhMozGjb1D1JW+fTVXdP/9Yy1JxHGuZqUQbJ/Ro6uuHz4cTj1Vy4EEcsghcOyx+uBq3lzjEv7KJZBZs1ThfPqpKpyoEOcWRCB1VRAN8P3GiAJWaqOunHkmvPderKWIe9q0Ca4cQOMZ3qGiR4+G++4LfZzvvtMChP/5j1opURthz19B/PxzlL6kbkTSgjAFYYSDKYi6kpSkr81W+Swi7Lqrujz+/LPqtvJyjWNMnarK5p//VDfT3XfrSHt//FG37xQJUsm9Tx/f/IIFsGhR3Q4eBXbWgrDOckZtMQWxMwweDJ4OKMbOc+GFcPvtvgSxKVPgggtUGVxyiZbCAujaVVNtzz9fx7545ZXaf9fs2fDww3Dzzb7ALaBR+b59fcueTkzxQLxbEBUV8MsvkT+uERnKy3VwsYcfDt97agpiZzjkEFMQEaRrV61U+/bb8NtvmuX0yCPw4IOVn9mgxlvXrnD11Zpiu2ABLF8OK1YEO7KPrVvVcpgwQRXLNddoxtVTT2lISQQYMMC3Q4hCVcXFvvn66hITyRhENAzfTz7Re/X443U/xrZtjfsvtWCBeq9//z1yx7zvPo3fvfeeltvp21fvUzhYkHpnaNrUXEwRZq+9tPPdq6/CMcdUHr8nFNdfr+WWdt9d+3JccIF27FuxAj74ANat02f+GWfosXv21Pp9X32lIYeLLtL+HosWacrtFf75vJ4SDzt6ljnHv/+tt10E5s2D1FRVGA8+WLvCsYGUllafWRSOBfHnn6qw2rbV0XL9iaYFsWCBWn4PP6wFIV99VR904bJsmdbBzMtTo61/f3jtNU2WOOmkyMq6s6xerdfXP+FtZyko0CzAnj19SnbQIO1rtOuu4R9n40aN+y1frr+XP//U6ZNP9Df6ww8q9/Tpep2TkvR/EwonDahLZVZWluTV9/Cgr7yiVzzefsWNjLIy7Qh21VXQqZO+gXXvruvHjNGH13ffaYihZUtNqfUGz7/5Ro1B0DTfs2deQfoHj/gOfsUV2hswLY0fH/yGub+2Y/hw375Dh+r3vf56+Cm4BQWqoE4/Xd/sVqxQ6yYrS4PxwRg4sGrcfNEiLTIMWpDxhRdUyeTn67HGjPE9yNLSfC6gpUt1kKlIMG6cvvmPGuVTSrfcoteyJj7+WO/NXnvpvt578+ij+mCbOVMVRc+eNR8rWpSW6otFs2Yqx/Tp6u68/vrwhzuvjvnz1YK9/36flfjhh/pu0qyZvryEw8aNcOSR+pJz7bX6olBergon0PpcswbuuktfkMaOraZLQXV1OBJtqpdaTMG44YbYfK9RhQMPFPn1V5HSUl2uqPBt+/FHke3baz7GvJNvDl74COS9jFtDHuPKK0WuvVZk2rTQx378cZGrr9bpxx+1/Y8/inz9tW6/4YbKMvvz739XFem333Tbhg0il10mUl7uW54/X49/9tkq0667+vZ7+WXfcceM0WOPG1f5+2bMELnkkupran3/vcg991Rdf/PNIgsXht5PROTOO0UmTBBZsCD49u3bRVauFHn44eqPU1u+/Vbvw//+F177e+4RWbJE2xcWimzZIvL66yJPP1237y8rE5k1S+dnzxY588zg17iiQuSii0QKCkSKioIfq7BQp6VLRY45RuS++0TOP1/kscf0OwoKqpfF81sLXVOvuo2JNsVMQUyeLPL887H5bqMS8+dH4CBjx4ZUEOWHHBpyt9JS/cNddZXI8uWqqJ59VuTBB/UB/cADIl99Vf1Xv/66yA8/iPz+e9Vt69ZVFWnDBt02ZozIqlXBj1lRIXLNNSJduvj2u+IKkW++EfnkE5HnnhMpKdHTLi72ncsVV+j1fOopkUmTRNav9x1zzRp90N5wQ3Clu3SpnveYMSIff1y5TXm51rt86aXqr4WXSy4R+eADPX8RfViOHCmyaJEub90aWqkGynTPPaqcly0TufRSfdhXx/Llet+Ccd11eh1qUoT+fPGFKvK771aFfM45Kn8o1q8XOflkkdtvr7rt3Xf1+t58s8iJJ/qOs2aNyKZN4csk1TxTzcUUKW69VVNwjMTnySd1mNlgnHqq+pKq4ddfdfS+Nm0gM1NdXh9/rMvXXlv9V2/frq6a/Hx1r7gA43/0aHXpeDn/fP0O0Eyv6ujXz1dY8eef1bW11146TK1zsGSJ9jNp3VrdXddeq26oK6/UTox/+YvGd0Q0vnD66erS8O86EogIfP65lnDZYw/tOPnss+r+Ovzw6uX15913NUaxapWeb69e6qJr0kRdQH/+qcUjvf1qvMybp4lpInD55RqjOvxwjUEtXqydLi+7zNf+6ae17+ugQbp8/fXqpgs8LqjX8YMPtO7YY49VjfmAxsJatdKyM2PHaiysXTu9brm5un/HjjWf/223aXp3u3a6vHix7nvVVbosUvW3UgvMxRR1brkl+PrCQpHFi+tXFmPnePLJkBaEjBxZp0Nu3Rp++fMlS0QeekgkP7/qtssuqyxOcbEasP5v96EYMMC3X7Bjh+LPP/Wt//rrRX7+Wa2h8ePD319EZONGdReNHq1ukLqydq2WnN+2TZenTNG38aIikRtvFJk5Uy2fb7/VNscfr9M116g7J5Abb9TPb75Rd9xHH4lcfrle10svFfnyy9CyVFSolThrlsiLL1bdXlwsMmiQyE8/qRUUaBXWphz+qlV67SZMELn/fpXVa+1FgJDPVMtiihTNm2vRoQED9JXkww/11eGDD/SVzKyLxGHTptDbNm+u0yFrU/dojz3UUrjuOnjgAc008b4hBgYbO3TQ7jjhUNeOct7yJ717q0x9++obbW1o3VqnRx7Zucq9u+yib/1eDj5YJ9AEhJkz9U3dmwn3wAOaCTVvns8q8KdTJ7WkJk/W7CvnNGvolFPUQujWLbQszkGPHjq98YZaN127+rZ/840e86qrtHqx/zaoXRZUly56jPnztX7Zxo1676ONuZgiRXk5LFyo6QgnnKB+gFWr9Nf6+eeaCRPMTjXij4kT9R4Go1OnunfdriXPPaeuqY4dta9H8+aaffTdd742tfn7ZmVpPjzogzQrq/YylZSosgt3LOxY8cMPmnEU+FAOZOtWTa0dOLByHbCaUo4DKSnR98IRIzSrbdMmVVKPPBJ8HI84I6SLyRREpCko0N5bI0boQ6ZlS81Xe+stVRJG/FNRUf2/+uabw8vh3Em8b/lNmmjJ9SVL1Ef+/PO+NrX5+2Zn64MTdNyP7CoDABs7w4wZ2n+hVStVMHffXbOCihNCKghzMUWa/v3ho48qP2C6d9eeK3PnVu6la8QnTZrApZdq5DEYd96pLsOdiAqGg/9PaJdddPrmm8gcz/p3Rp7sbF8nzKKi2nVwi1es1EY0CPb2+cADWt/BSAxq8qGUlNSPHAGMGuXrPX3zzbXb16q5Rp/ddtP3i4agHMAsiPqjSRONSq1dq36BnanJYESfmhTEmjWR6UZbSzp0UC/mvHm1H4HPqrkatcUsiPrk0ks1cvX3v2tZRSN+CVQQgRHLo4/WIk8xoGdPHTCptkOGmgVh1BZTEPVJ+/YawH7+eS02szP5fkZ0CVQQZ50FRxzhW168uHK0OAEwBWHUFlMQ9c2AAZqwvcceWvXLiE8Cu8U2bap+HX+WL683cSKBBamN2mIKIhY4p2mw99zjKydtxBeBFkSzZnD22ZXXrV9fb+JEAn+XVH2NYWEkNqYgYkVKiqZLfv99rCUxghGoIJo21Z7y/gQbHzWO8e95G6PwiZFgmIKIJQMG6EDLZWVamsOIH4JZEGlp8PXXvnUJZkF4x7+A4J3Bg1kVd96poRfzhjZOTEHEkuRkHanmxBM16FlDlVCjHglmQUDlcikNSEHcdZd2+r/wQl/v7GnTdOCfL77QrCn/YVaNxoEpiFhz/PE6lNSVV2qhnfPOsxSTeCAwSO1dTmAF4d95a+3ayttuvlkN2Wef9Y1a99Zbvu0bNsCXX0ZfRiO+MAURD/Ttq59jxmiB/48+iq08RoO3INasCd3OWy01UCFYPkXjwxREvHHQQfDtt1VTKo36JVgMAiqX/NywIaGsPX8F8dFHvhh7YOxh0SLdNndu5fXWt7PxYQoi3mjSBI49VseRePzx2pXrNCJHKAsiKUlLfntZvbr+ZNpJ9tij8vJnn+ln4BAX69Zpcl3gT88siMaHKYh4ZOhQHeswPR2uuSbW0jROQlkQALvv7pv/5Zf6kScC7LabFhv24nUzBSqI4mINUAdiFkTjwxREPHPMMbDvvr6ooVF/BOtJ7SVBFQRoGTAv3qykYBZEsLRWsyAaH6Yg4p2//13dTUb9Eq4F4d8vIgHo2NE3H0pBFBdrxdhAfvvNyoc1NkxBxDtt2lQ/RrIRHULFIKCyn+bNN2HLlvqRKQKEoyBWrFBlAHra3vEntm2rt9FWjTjBFEQi0LOnppZAQgVFE5pAF1N6um/+zDN98+vXw6+/1otIkSAcBeFP796VDSZzMzUuTEEkAsOHa0bTiy/CySdXHc2suBjOOUfnRXS8Q2+KilE3+vf35YWecgpkZfm2tWwJubm+Ze/rdgJQWwUxaBCkpvqWly2LjlxGfBJVBeGcO8o5t9A5t8Q5d0OQ7Wc55+Z4pmnOuQy/bVc45+Y65wqcc/+MppxxT8uW8NBD8PvvMH48jB3r2/b115rplJEBl12mNRNeegkmTky4AGpc0bIl5Odrb7EJE6qOP92tm28+QRVEqCwmf66/vvIw6m+/HR25jPgkakOOOueSgMeBw4FCYKZzbqKI+PcAWwYMEZF1zrmjgaeBbOfcAOAi4ABgG/Cpc+4jEVkcLXnjHuf03wpw8MFwxRXqHJ43T/+127drHuKnn+pr3gMPaPmOhx/2uUt++UWtkOHDtfCcUT3dulVWBIHbvCSQgth9dy0BVlYG//ufGpuhFMQrr8A++8CwYfpzAiva19iIpgVxALBERJaKyDbgNeAE/wYiMk1EvIWHvwe8xuzewPcisllEyoDJwIlRlDWxOOIIuOMOLaAzZox2rktJ0Z5Qo0bBbbdB8+Y6fsGNN2oQtaICnnoK9ttP9/MvxvPcc/XbIW/KFF86zPr1WosqwcpWVFIQCWSptWqlPwEvX31VOQciK0sVwvPP+0It/jEIy5doXETNggC6Ayv8lguB7GraXwB84pmfC9zlnOsEbAGOAfKC7eScGwmMBOjWrRuTJk0CoFevXrRp04b8/HwAOnXqRP/+/ZkyZQoAycnJ5ObmMnv2bDZs2ABAVlYWq1evZsUKFbt3796kpKQw11NzoHPnzvTp04epU6cCkJKSQk5ODnl5eZR44gLZ2dkUFhay0hPN69u3L0lJSczzlM7o2rUr6enpTPe8irVo0YLs7GxmzJjBFk82TE5ODsuWLWPVqlUA9OvXj/LychYuXKgXtnt3UlNTmTFjBgCt8/LIyspi+vTplJaWApCbm8ui9u3Z3rkzux17LG07dWLroEH81LIlTYcNo/+YMbRs2ZLfi4tJ3rSJJjNmsJtzTD3jDMo8ymLw4MEUFBRQVFQEQEZGBhs3bmTp0qUApKWl0bFjR2bPng1Ahw4dyMjIYPKkSez54IMUHXQQA6+7jvz8fNatW0eHWbPYa+VKNrVoweaHHuLPjAx6rFyJnHcexVdeyS8jRiTMfepYVsZAz29w88SJ/DBsWM33qXXr4Pdp0SLWePw9AwYMoLS0lMWL1Vju0aMHXbp0IS9Pf/5t27YlMzOTqVOnUuYZFq629ykjYy9mzOgKwH33FdOv30Zgd8//ZgWjR/+PzMxMli8vZvny5ZSUJAEHA1BSUsakSVMT5j7V6f8UJ/dpx/9p8mREBOccQ4YM2fF/AsjMzKS4WO+T3r/aP/eGDh1KSEQkKhNwCvCM3/Jw4NEQbQ8B5gOd/NZdAMwGpgDjgAdr+s799ttPjBBs2CBSXu5b/uEHXedl5UqRN98UmT696r6zZoU+7rvvimzb5lv+6iuRK6/UY11zjW/9Z5+JPPaYyOrVvnVXXqnrRUQeekhk0aJanVJEKCys236bN4u0bCmitpfIBx9EVq4osmiRT+z27UX23tu3PGFC1fbbtvm2JyWJVFTUv8xGVAn5TI2mi6kQ6OG3nApUcdY65wYCzwAniEiRd72IPCsimSIyGCgGGm/8IRK0aaOuKC/776/rvHTrpkX/P/7Yt+6jj+C99zQILgKzZun6yZNh61Ydw+Lll+G113T9//6n/QLOOUfLmA8YoMuFhepWuuQS6NzZd/z//lfdZaCusaeeqv4ctmyBu++OXG+tmTPhtNPUvVVcXLsR4lq0gFNP9S2/805kZKoHevXyxdzXr4f583W+ZUv429+qtm/a1DdcaXl5NcOV/vEHfPJJiI1GWBQXw+mn63VcvRo8FkysiKaLaSbQ2zmXDqwETgfO9G/gnOsJvAMMF5FFAds6i8gaT5uTgJwoymqABr333FMD4BkZGqeYPVvjGCNHal+M//xHh9488khf2w8+0If7tGn6AO/eXY83YoTGO2bO1JFnqiMlRWtQjR2rD+20NH0SrV+vAxkUFamyAk35veyynT/fjz+Gxx5Tp3uvXvr0u+Ya2Hvv8PY//HDNKoOEcs4nJenwo4EDAJ1wgsYogtGypRavBQ1qV+omIgJz5sDTT6vi3H//yqVja2L+fHj/fb2G2dmqaAYN0heMpKRanVvUmDZNX4hGjNBswvbt9VwjRXm5vmR8+aVmLD7wADzzDJxxhv4+Y0TUFISIlDnnLgU+A5KA50SkwDk3yrN9HHAr0Al4wukrTZmIeBPO3/bEILYDl4gvmG1Ek3PO0R/l55/Duef6rI7DDoN331Vl8M47+icePFi3VVToEydwzGaAk06CVasqWyuhOO446NNHlcxNN+noNaWlevyWLeHHHzXl9L334IUXNBurSR2N4B9+0KfcvvvCkCGqwP78UxXgHXfo07Omh5z/A2LbtrrJEWl+/VWndu1UyYa47rvsUlVBHHxw6MMGKogdw2KsWwdXXQUDB8Kjj2ru7Pjx4ReZvO8+/e1cdJEqigcf1L4+33yjVmckXgR2FhH97R9+ONx6q16Itm31NxPY474urFkD116riuDJJ/U3fddd+nnjjTuvID78UCtE5+Xp/6e0VC33E06oed/q/E/eCeiMZhFdApyPZig1CWff+pwsBtFA+O03kYcfFpk6VZf9YydePvpIZMwYkf/8R6SgQNdt2aKxgeooK9PPG2/0zftz//0io0aJXH55zXJ++KHPOT9okMj27TXvE4pffqkcE6oLy5eLXHyxxnOeeELkH/8QKSoK2jQnxye6d/r009CH7tXL127xYhFZu1av38UX67w/Y8aILF1as7zLlonce2/ldf737957RS65RH8PseS993yxMi+vv67xt51lyxb9va1ZE3z700+L/Phj3Y+/davIUUeJXHSRXsvly/W+XX+9yPffa8ywumd/tRs1ePwZ6i56GrgTuB+YCBQAtwFtqztGfU6mIBoZ8+eL/PSTyHnnicycKXLWWSIvv1y5TWGhyKZNvgD4tdeKvP++yE03BT9mRYU+pJ57LvQT89tvRf75T5EHH/Q9NXv1EnnrrbqdR16eyNVX68P2P/+p3b4ffqjXYflykSuu0AeCl/XrRUaPFikt1eX8fJGbbxYpLpbjjyuvpBy6USiLZ64L+TUDBvja5ueLyAkniNxyi8i8eVUbb9miSqI6li1Teb2yhaK4WOSee6pvE00qKkSuuqrq+u3bRW69VT9rOodgvPeeXqf33hOZNi10u+3bRS67TOTVV2s+5s8/+37X//63vkS98or+vjZu1N+If9vDDhMZN06kmmdqTS6mY4CLRKRKsRnnXDJwHNoRzvpXGvXPXnvp55NPqs/28cfVf7t4sRYN+vhjdYXttpt+Xn21xlneeEPdZ8FwTl1H550H//oX9OsHPfxyLVat0jImt9+u8Qsv3btrp4LNm+Gss6p3fZWVaW/3uXPVRdGunbpaQN1bJSUakwnlvnjrLe3lvX49HHCAr6zKfffpfl7atdPOkqNGwXXXaULBkCFw++2c/EcfJjKaXVjLaJ6kOaWkPb8RBvxHj9u1q8afnnwSbrqJAypWsYQMttKCsuWFGoMaPTq4fM2bq1vQ21kz8Fr88ovep//8p2rNq0A6dNDrMHVq5fImFRWVj7tli36vc7Bxo+7TvLm6utLT4R//gFdf1fjS999ru/R0Tcyoju++C+57S05WV83jj2vp2yefDB4vWbVKr6U/8+bpcX/5ReefeCL09ycnwyOP6HkMG6bftXmzxmoee0z/A0uXqpuqfXuNC44Zo/d++nRt6+3Q4q26CBrfeeMNvb7VUZ32SLTJLAhDDj1U02nfeUffvrxum40b9U3Qa2KHk6u5bJn3DUvZskXkuut8bpvvvvO9Vh94oLpVvv22eivgo4/0GN9+q8uBcsyZo/JnZ/usgW3b9M3wp5/ULXDDDSKffOKTo6hIZQvF9u36Fu7nJnn/zNfkJu6QJxglbVkvmZki8vnnIscdp+61hQvVStq6VeSuu+S/vZ+QK3hQevCLLLj88ZpTkvPy1H3x8MNVt115Ze3euisqRM48U+TSS/XNd/58kZNOEhk/Xs/9559FzjhD7/nbb6tl8tRTajnecIPIs8+qdXb//SLDh+tb9VtvqeVZUlL9d197bXBXpIjej+ef1zTwe+6pei+nTNG39N9/1/u6bZvII4+IXHCB/h6zsz3mWBjMnav73X23WgePPaap1ZMn67Zff/W1XbxYr8uLL6plXTN1czFVaQwHAl8D3wEn1mbf+phMQRiycmVkj3fttfqgqajQB+YPP/i2zZzpUxCZmb71t92mD/Bp01QhePtaPPGEuiVq4o47RGbM0BjCzJnqJnrhBX3wXX993ftu+LF2rUhmy/myK6sF/MTatk3lfvTRSg/xo48WuYIH5V1OkPln3xn+F/33v3rwsWP1AX/nneqaqwslJbrvMceocv72W71GZ5+tcapbbtGHooj69UeN0tiOiC+O5a8QfvlF5L77gn/Xb7+pvOG4dkREJk1SZeHPddfpw/ucc/S8zz9f76fXdVnb2IJXUW3apL/FnYl5VSbkM9WJhC6x4JzrKiKr/JbfQIPUDpgmIvtUb5/UL1lZWeLtyWgYEeHrr7UvR79+aqKfd55v25w5mg4MarJ7R/4T0eyTNm0gJ0dTFwsL4cAD1d0TWPgvFO++CwsWaIZLcuQTDqdOVc9TixbqtarO2zBsmJb8akYpL09I5pTTa5F+Wl6uqaGtWql7Jzc3vKy2UPz6q5bAB02FnjZNXWf+bNqkLqiaUlFvvhnuvNO3fM01eq9Bjxk4kHd1XH21Zjrl5GifoFmz4MIL1dUW30MHh/xB1qQg3gNmAfeJyFbn3NNoyYsK4DwROSjCgu4UpiCMqFBerumW991XuaPAggW+PhN9+oCndAOgSsJfEXj/Z+EqhzhjxAit8+glLU0zoXv3jplIkeGBBzStdulS7Ytx+OEaN1iyBO69t/bHe+IJVU5Nm+pvJl76cVRP3RQEgHPub8AVwAtoMPpMoCUwQUTWVrdvfWMKwqhXli71vWGmpTXowRKeeUa7Kvhz1FENoOP0hg3aox80KSFBFfhOUncFATtKd/8DOBa4S0S+jZxskcMUhFGvFBb6Mpy6dWvQw61t3x484ai4uOZEGCPuCakgqu2G6pw73jk3FQ1Mz0XLZZzonJvgnKuFc84wGiD+T8x46UkdJZo21ezJQPxLdxkNj5rqFNwJHAmcDIwVkfUichVaIuOuaAtnGHFNI1IQAP/+t3ph/OPl995bv0OJGPVLTQriT9RqOB1Y410pIotF5PRoCmYYcU8jUxDNmqmS8FZ/Be3rt86qpDVYalIQJ6IB6TICKrEaRqOnkSkIL3vuqZOX33+PnSxGdKlJQWwVkUdFZJyIbAjWwDnXOth6w2jwJCX5sl4qKjQdtpHgregOpiAaMjUpiPedcw845wY753YkgDvnejnnLnDOfQbUUMzEMBoozjVaK2K33XzzN90UOzmM6FKtghCRQ4GvgIuBAufcn865IuBloCswQkTeir6YhhGnNFIFsWM8CHRojdoMxmckDuH03/8E+FlEVkRbGMNIOOqqIAoLtcduZmbdBz2KIf37V14uKIC//CU2shjRo8ZfpmhPuveiL4phJCB1URArV2ppjv3312FWy8qiIlo0OTMgZWXePO2U/OyzWpIjxkMpGxEi3FeX751z+0dVEsNIROqiIL7+WscvAPj2Wy3ml2B07KhDYnj56Scd+uHCC7Vu08iRMRPNiCDhKohDgOnOuf855+Y45352zs2JpmCGkRD4D9Dz22/h7bNmTeXliRMjJ089cuCBvvkPPtCKsF5efFH7SBiJTbgK4mhgD+CvwN/QkeT+Fi2hDCNh2N/PsPaOClcTgQoiQeuHDRniq9r966/qYvJn2DCYOdN6WicyYSkIEfkFaI8qhb8B7T3rDKNx4+9Lef99He60Jlavrry8YEFCPkWbNYOjjw69feFCHRH1+uvrTyYjsoSlIJxzVwCvAJ0908vOucuiKZhhJASDB1dO6Vm6tOZ9Ai2IjRsTthLssGE1t/EfRyLROf986NxZx4fy8uab+hNo0qR6Y/Dccyu74RKBcF1MFwDZInKriNyKDj16UQ37GEbjYOBA33zgwz8YwdpceSVccglMmhQxseqDk0/WYPWgQZqx+8MP8MUXcNVVvjarV6sObAicey58+mnldQMGwDvv6LtCLIlGMly4CsIB/nUEyqmmhrhhNCo6d/bNB7qPAikpgR9/rLr+rbd0NLK//lVdVQlCkyZwyy0we7aOsLn//nDYYTpQ2157+dotWRI7GSPJ4MGaweXP3ntD3761O87tt+u1GjBAvZQiOkppZqavzeLFsN9+Oj9rlsZ89tsPjjzSV95k6FAd3XbIEHj44TqfVkjCVRDPATOcc/92zv0b+B54NvLiGEYC0qWLb/6LL0K3++03jepWVIRuIwJ33BE52WKIf0G/665rOFZEJLj0Ug3gz52rGc8ffqiDE7ZrpynDAM8/rxbL9u06eulbb6miOP/8yuVN1q+HyZN1SOxIU6OCcM41AWYA5wHFwDp0POqHIi+OYSQg/hbE55/D2hAj8f7f/1Ve9lcs/ixaFBm5Yox/APvLL3X86rffjp088cQ330B2Nuyzj3aLKSjQ9RdeqIqhvBxef107JC5cqIrk8MNh333hzju1I76X006Lnpzh9KSuAB4Qkdki8oiIPCwiQWxkw2ikpKZWXg7mQoKqzutx4/SpCdCihW/9xo2weXPk5IsRI0fqA8/L6tVw+un65twYOO88faAfc0zl9Vu3wj/+oRbBzz/rWN9bt+q2k0/Wcb4//FDdSZ06qVHZv79aFj/9pPt8/rnveK1aETXCdTF97pw72bnGOaK3YVTLoYdWXi4q0s/t2+G55+C22zS7yf+1r1Mn+Nvf9B//wQf66R3fGmqOZSQAycnw9NPw0ks+v31Zmfa0DpbV++efcMMNuv2mm2Ds2MTubPf883pbA4dl9SqDXXbRkJR/ZlPz5hpjGD1aFQxofGPtWpg+XZe3b/dZHNEmnGJ9AFcBrYAy59xWNEAtItI2apIZRqKQnAyjRqlFAFBcrJ/PPKOvigBvvFG5p/XKlTqeRMuWcNxxuq5LF1jhqYm5ahWkp9eP/FHEOTj7bMjJgYwM2LRJR6S78Ua45ho9/ZISyM+HM86AP/6ovP9NN8HFF2uWVFISnHVW5eom9c0ZZ2ii2R9/qOF4222q/C67TB/ixx6rVsNnn4U+Rvv2ajXssw+kpVXuawl6ju+8A0ccocvNmqkSufxyVaJlZfDPf1YtmBgVRKTaCbUyDqqpXTxM++23nxhGTLjxRhF9MRa5/XZdd+KJvnX+U7t2wY9x3HG+Nu+8U2+i1xcnnxz8ctRmysgQWbw41mcSXe67T+Tmm+v1K0M+U2u0IESkwjl3P5ATbWVlGAmLf+6j14II5QfwD2r7062bb37ZssjIFUecfnr4QerhwzVIG1j/MD9fwzbDh2s67ddf66XatEkvX48e0Lat+uVbttSBjXJyfJd20yYNALdvr8deu1bfyJs21fXt2um0dauGheq7EvuJJ2q669df1+/3hiJcF9PnzrmTgXdEErAmgGFEm0AFsXVr6OR//wJ//vh3z736ao3ytm44I/oOGwavvgr33gtz/Ep9tmmjqZ4ZGVouPCND199zD/z3v+ptmzYNli/37fPSSzr5M2tWZOVt0kRva6dOOrVqpS6e0lJVJt5RZkN9iqhbzDs1aVJ52Ts5p/t4JxHtb+FvOyUn6+eWLdreOY1FlJXpp3e+adPKis3b1jvv/dy+Xc+jtLT69OPaxCBaAuUWgzCMIPgriBdf1Id9qP4OoayDQYMqLz//vDq3GxBnnKHT7NlqYB11FOy6qz78AlNgunfXDnde3ngjuimdgVRUaKwhMC4S76xfH7ljhasg2gFnAekicrtzriewWw37GEbjoXv3ysvXXeeb/8tf9BXYS9euwY8RqCDy8yMjWxySmVm513A4+ZGnnqrF/x58UI207ds1yHvIIepWKizU2P+mTb5p3jytj1RSokqodWt9u16/Xt/Kd91VDbrt23X9n3/q1Ly5L9uoMROugngcqEDLfd8ObATeBmwQIcMAfdoNGhS8D8TgwZqzefDBunzPPcGP0aoV3HWXr5vs/PnRkTWBSUuLTEmJ8nJVCMEUU0WFbtu+XRVRUZFaEZs2aeyiZUufu6hJk8rz/uuc0+/xdzsFm7yuKOcq7+tdBnUfOaeKC3Sfpk19U3KyTtu3axcabyDA303lv9y0qSrGUN5OLy6ckIJzbraIZDrnfhSRQZ51+SKSUcN+RwEPA0nAMyJyb8D2swBvMeASYLSI5Hu2tQeeAQYAApwvItOr+76srCzJS9Da+kYD4NdfYffdq65/8011wM+apU7kgw4K/cpcWOjrD9GunT6hEnDMaiOhCGm/hfvL2+6cS0If1DjndkUtitDfqO0fRwcb6gec4ZzrF9BsGTBERAYCdwBP+217GPhURPYCMgB7nTLim549tZypP7vtph3iQLvG5uZW70/p3l0joqC+jnDGlzCMKBGugngEeBfo7Jy7C5gK3F3DPgcAS0RkqYhsA14DTvBvICLTRGSdZ/F7IBXAOdcWGIynIKCIbBOR9WHKahixIytLO8c1b65lPidOrNmO98e5ymN5Tq/WaDaMqBJWDEJEXnHOzQIORc2Rv4tITW/03YEVfsuFQHY17S8APvHM9wLWAs875zKAWcAVIrIpcCfn3EhgJEC3bt2Y5Kmn36tXL9q0aUO+J9DXqVMn+vfvz5QpUwBITk4mNzeX2bNns8EzVmJWVharV69mhac3a+/evUlJSWGup79/586d6dOnD1OnTgUgJSWFnJwc8vLyKCkpASA7O5vCwkJWegaA6du3L0lJScybNw+Arl27kp6eznTPH79FixZkZ2czY8YMtngGss/JyWHZsmWsWrUKgH79+lFeXs7ChQv1wnbvTmpqKjNmzACgdevWZGVlMX36dEpLSwHIzc1l0aJFrPGMPTBgwABKS0tZ7Hkj7dGjB126dMHrkmvbti2ZmZlMnTqVMk9h+cGDB1NQUECRp3RERkYGGzduZKlnUJy0tDQ6duzI7NmzAejQoQMZGRlMnjwZEcE5x5AhQ8jPz2fdOn0PyMzMpLi4mOWenMUGeZ9OOQVOOcV3nzy/yXDvU7M998SbHlg8axbNhg2z+2T/p6jdp6FDhxKKsGIQdcE5dwpwpIhc6FkeDhwgIlXy9pxzhwBPALkiUuScy0ItioNEZIZz7mFgg4jcUt13WgzCaBA89pgvvXXUKHjyydjKYzR0djoGURcKAb/qY6QCvwU2cs4NRIPRJ4hIkd++hSIyw7P8FpAZuK9hNEh23dU3H84IdYYRJaKpIGYCvZ1z6c65ZsDpwET/Bp7+FO8Aw0VkRxF8EVkFrHDOecdpOhSYF0VZDSN+8C/FEWpsCcOoB8LtB1FrRKTMOXcp8Bma5vqciBQ450Z5to8DbgU6AU94KomXiUiW5xCXAa94lMtSdMAiw2j4mAVhxAlRi0HEAotBGA2C1asr97Y+/ngddPiSS4JnRIlomfCOHRtU7Saj3ohJDMIwjLrQqZN21/UycaIW7xsyRDva+bN2rabW7r671pvYZx8tCTp1avBReQyjFpiCMIx4IzlZy5gGWgszZqgymDdPLYbcXI1XeNIiEdEh2N57T8t6HHts9aU6DaMGzMVkGPHKxo06ZuXgwXU/Rk6ODnLcrl3ExDIaHOZiMoyEo00btQTuu6/6dl27arsxY7Tc6S67+LZNn67Lffro+Nh5eToIgGGEgVkQhpEIbNsGkyfD7bdrfAG0nMcrr8BJJ1Vt/+ijOohxMDp10jhFSoqv9vWPP+oY2Mcdp8Ov/fKLlhAtL9fv7tNHy5a3auX77qZNo3KqRr0T0oIwBWEYiYQIjBunlWEvvRT23Td02xdfhCuv9A2BGkmaNVPLpX17LUjYpYsqkvnzVeE0b65B87ZtNabiHSqtWzdt36KFtqlLpVrnKtfX9tbGDjV8Wm3m/eti15X6fqaG+r7A6+JfR9x/3WmnmYIwjEZJRYWO43njjfowsH4VRiAipiAMw0BLiH/9tY47IeIbzaZXL7VKvvxSU2n33lszpJKTdaScKVNUuWzbpiPShBpO1Ug8TEEYhhExRNRttWGDDre2apV27ktKgj331DEtSktVGW3Y4Bu+raJCx7coKVEltHVr3dwxFRWVh2gLNmya97O288HcVIG8+abGX7wuGu94H/68/DIMH177c6srgbL6XxP/yevq859ef90UhGEYRkRIS9NsMP9ssUBat1ZFGA28D/bIjTRoaa6GYRj1TkkJHHqojlm+zz7w/vu6/pZbKg+ufdNN8MgjOn/ffbD//jBwIPzrX7pu+XJ1+/3jH3qsFSuoD8yCMAzDqA3p6dChg7p1Lr4YRo6s2sZrQZSVacymbVv44w8dLXDxYk0jPukk7QVfUQG9e+twtbNmwVtvwVNPqZVw/PFw3XU6nG2vXjBtWuURByNDSAsiatVcDcMwGiTffafpumvWwOGHw157he7tLqIZZFOmqEto5UqN16SlaX+UH3/U5UGDdPnzz3UaNEj3LylRhdKzp9bbirxyqBZTEIZhGLWhWzf97NxZOxxOnuzrlDhqlE5eXnlFCyrOmqWB7bQ0Dc4DXHghjB+vQf7zz9d1Itoj/uKLK3/n8uW+Tor1iMUgDMMwwmXTJl8BxE2b9G1///21ZtZPP1VWDqCZXJ07q3L45ht1LXk58UT49FOYOROOPFLXHXmklkTxBrhXroxp3xWzIAzDMMJl9Wp9sIPGF848E446KnT7s87SNNisLO31vtdevm3NmsEhh2hv9KQkXXfEEdobPSdHl1u31pRZ7/Z6xoLUhmEYsaCiQjOS3nxTg9Sxw9JcDcMw4oZ587RT4aGHxlo5VIu5mAzDMOqbfv1g6dJYS1EjZkEYhmEYQTEFYRiGYQTFFIRhGIYRFFMQhmEYRlBMQRiGYRhBMQVhGIZhBMUUhGEYhhEUUxCGYRhGUExBGIZhGEExBWEYhmEExRSEYRiGERRTEIZhGEZQTEEYhmEYQTEFYRiGYQTFFIRhGIYRlKgqCOfcUc65hc65Jc65G4JsP8s5N8czTXPOZXjWN3fO/eCcy3fOFTjnboumnIZhGEZVojZgkHMuCXgcOBwoBGY65yaKyDy/ZsuAISKyzjl3NPA0kA2UAn8VkRLnXFNgqnPuExH5PlryGoZhGJWJpgVxALBERJaKyDbgNeAE/wYiMk1E1nkWvwdSPetFREo865t6poYzeLZhGEYCEM0hR7sDK/yWC1HrIBQXAJ94FzwWyCxgT+BxEZkRbCfn3EhgJEC3bt2YNGkSAL169aJNmzbk5+cD0KlTJ/r378+UKVMASE5OJjc3l9mzZ7NhwwYAsrKyWL16NStWqNi9e/cmJSWFuXPnAtC5c2f69OnD1KlTAUhJSSEnJ4e8vDxKSlSfZWdnU1hYyMqVKwHo27cvSUlJzJunhlPXrl1JT09n+vTpALRo0YLs7GxmzJjBli1bAMjJyWHZsmWsWrUKgH79+lFeXs7ChQv1wnbvTmpqKjNm6CVp3bo1WVlZTJ8+ndLSUgByc3NZtGgRa9asAWDAgAGUlpayePFiAHr06EGXLl3Iy8sDoG3btmRmZjJ16lTKysoAGDx4MAUFBRQVFQGQkZHBxo0bWeoZKjEtLY2OHTsye/ZsADp06EBGRgaTJ09GRHDOMWTIEPLz81m3Tt8DMjMzKS4uZvny5Xaf7D7ZfYqD+zR06FBC4USi82LunDsFOFJELvQsDwcOEJHLgrQ9BHgCyBWRooBt7YF3gctEZG5135mVlSXeG2QYhmGEhQu1IZoupkKgh99yKvBbYCPn3EDgGeCEQOUAICLrgUnAUVGR0jAMwwhKNBXETKC3cy7dOdcMOB2Y6N/AOdcTeAcYLiKL/Nbv6rEccM61AA4DFkRRVsMwDCOAqMUgRKTMOXcp8BmQBDwnIgXOuVGe7eOAW4FOwBPOOYAyEckCdgNe8MQhmgBviMiH0ZLVMAzDqErUYhCxwGIQhmEYtSZkDCKaWUxxwfbt2yksLGTr1q2xFiUuad68OampqTRt2jTWohiGEWc0eAVRWFhImzZtSEtLw+PGMjyICEVFRRQWFpKenh5rcQzDiDMafC2mrVu30qlTJ1MOQXDO0alTJ7OuDMMISoNXEIAph2qwa2MYRigahYIwDMMwao8piBgxdOhQasq4uvDCC3eUFAjFlClTyMzMJDk5mbfeeqvStqOOOor27dtz3HHH7bS8hmE0PkxBxDHPPPMM/fr1q7ZNz549GT9+PGeeeWaVbddeey0vvfRStMQzDKOB06gUhHPRm0KxfPly9tprL0aMGMHAgQMZNmwYmzdvrtRm9OjRZGVl0b9/f/71r3/tWO9vZbRu3ZqbbrqJjIwMDjzwQFavXg1oga+BAwfSpEnVW3nooYfSpk2bCFw5wzAaI41KQcSKhQsXMnLkSObMmUPbtm154oknKm2/6667yMvLY86cOUyePJk5c+ZUOcamTZs48MADyc/PZ/Dgwfzf//1ffYlvGEYjxRREPdCjRw8OOuggAM4+++wd5Y29vPHGG2RmZjJo0CAKCgqCxh2aNWu2I5aw33777SjvaxiGES0afEc5f2JVVSQwldR/edmyZdx///3MnDmTDh06cO655wbtl9C0adMd+yUlJe2oMW8YhhEtzIKoB3799dcdA5pMmDCB3NzcHds2bNhAq1ataNeuHatXr+aTTz4JdRjDMIx6xRREPbD33nvzwgsvMHDgQIqLixk9evSObRkZGQwaNIj+/ftz/vnn73BFhcvMmTNJTU3lzTff5OKLL6Z///47th188MGccsopfPXVV6SmpvLZZ59F7JwMw2j4NPhqrvPnz2fvvfeOkUSaxXTcccftGGYxHon1NTIMI6bEZEQ5wzAMI4ExBRFl0tLS4tp6MAzDCIUpCMMwDCMopiAMwzCMoJiCMAzDMIJiCsIwDMMIiimIGBGpct/jx49n1113Zd9992XfffflmWeeiaSYhmE0YhpVqY1EI9yH/WmnncZjjz0WZWkMw2hsNC4LIgb1vqNd7tswDCNaNC4FESOiXe777bff3qF8VqxYEfXzMQyjcWAKoh6IZrnvv/3tbyxfvpw5c+Zw2GGHMWLEiOiejGEYjYbGpSBEojdVQzjlvr/66ivmzJnDscceW6ty3506dSIlJQWAiy66iFmzZu3UJTIMw/DSuBREjIhmue/ff/99x/zEiROt6J5hGBHDspjqAW+574svvpjevXszevRoPvjgA6Byue9evXrVutz3I488wsSJE0lOTqZjx46MHz8+CmdgGEZjxMp9Rxkr920YRpxj5b4NwzCM2mEKIspYuW/DMBKVRqEgGpIbLdLYtTEMIxQNXkE0b96coqIiexAGQUQoKiqiefPmsRbFMIw4pMFnMaWmplJYWMjatWtjLUpc0rx5c1JTU2MthmEYcUiDVxBNmzYlPT091mIYhmEkHFF1MTnnjnLOLXTOLXHO3RBk+1nOuTmeaZpzLiPcfQ3DMIzoEjUF4ZxLAh4Hjgb6AWc45/oFNFsGDBGRgcAdwNO12NcwDMOIItG0IA4AlojIUhHZBrwGnODfQESmicg6z+L3QGq4+xqGYRjRJZoxiO6Af+3pQiC7mvYXAN5CRGHv65wbCYz0LJY65xpSp4NdgD9iLUQEsfOJb+x84ptonc+nInJUsA3RVBDBum8HzTV1zh2CKghvFbuw9xWRp/G5pvJEJKv2osYndj7xjZ1PfGPns/NEU0EUAj38llOB3wIbOecGAs8AR4tIUW32NQzDMKJHNGMQM4Hezrl051wz4HRgon8D51xP4B1guIgsqs2+hmEYRnSJmgUhImXOuUuBz4Ak4DkRKXDOjfJsHwfcCnQCnvAMhlMmIlmh9g3ja5+OxrnEEDuf+MbOJ76x89lJGlS5b8MwDCNyNPhaTIZhGEbdMAVhGIZhBKVBKIhELMvhnHvOObfGv9+Gc66jc+4L59xiz2cHv21jPOe30Dl3ZGykDo1zrodz7hvn3HznXIFz7grP+oQ8J+dcc+fcD865fM/53OZZn5DnA1qhwDn3o3PuQ89ywp4LgHNuuXPuZ+fcT865PM+6hD0n51x759xbzrkFnv9RTszPR0QSekKD2P8DegHNgHygX6zlCkPuwUAmMNdv3X+AGzzzNwBjPfP9POeVAqR7zjcp1ucQcD67AZme+TbAIo/cCXlOaF+c1p75psAM4MBEPR+PjFcBrwIfJvrvzSPncmCXgHUJe07AC8CFnvlmQPtYn09DsCASsiyHiEwBigNWn4D+SPB8/t1v/WsiUioiy4Al6HnHDSLyu4jM9sxvBOajPeIT8pxEKfEsNvVMQoKej3MuFTgW7XPkJSHPpQYS8pycc23Rl8ZnAURkm4isJ8bn0xAURLCyHN1jJMvO0kVEfgd94AKdPesT6hydc2nAIPStO2HPyeOS+QlYA3whIol8Pg8B1wEVfusS9Vy8CPC5c26Wp+QOJO459QLWAs973IDPOOdaEePzaQgKIuyyHAlMwpyjc6418DbwTxHZUF3TIOvi6pxEpFxE9kV78h/gnBtQTfO4PR/n3HHAGhGZFe4uQdbFxbkEcJCIZKJVny9xzg2upm28n1My6nJ+UkQGAZtQl1Io6uV8GoKCaEhlOVY753YD8Hyu8axPiHN0zjVFlcMrIvKOZ3VCnxOAx9SfBBxFYp7PQcDxzrnlqAv2r865l0nMc9mBiPzm+VwDvIu6WBL1nAqBQo+VCvAWqjBiej4NQUE0pLIcE4ERnvkRwPt+6093zqU459KB3sAPMZAvJE67wj8LzBeR//ptSshzcs7t6pxr75lvARwGLCABz0dExohIqoikof+Pr0XkbBLwXLw451o559p454EjgLkk6DmJyCpghXOur2fVocA8Yn0+sY7cRyj6fwyaNfM/4KZYyxOmzBOA34Ht6NvABWjZka+AxZ7Pjn7tb/Kc30K0sGHMzyHgfHJRE3cO8JNnOiZRzwkYCPzoOZ+5wK2e9Ql5Pn4yDsWXxZSw54L67PM9U4H3f5/g57QvkOf5zb0HdIj1+VipDcMwDCMoDcHFZBiGYUQBUxCGYRhGUExBGIZhGEExBWEYhmEExRSEYRiGERRTEIYRAZxz0zyfac65M2Mtj2FEAlMQhhEBROQvntk0oFYKwjmXFHGBDCMCmIIwjAjgnPNWfr0XONgzRsGVnoJ/9znnZjrn5jjnLva0H+p0/IxXgZ9jJrhhVENyrAUwjAbGDcA1InIcgKfK6J8isr9zLgX4zjn3uaftAcAA0XLNhhF3mIIwjOhyBDDQOTfMs9wOrZuzDfjBlIMRz5iCMIzo4oDLROSzSiudG4qWdDaMuMViEIYRWTaiQ656+QwY7SmFjnOuj6f6qGHEPWZBGEZkmQOUOefygfHAw2hm02xPSfS1+IaNNIy4xqq5GoZhGEExF5NhGIYRFFMQhmEYRlBMQRiGYRhBMQVhGIZhBMUUhGEYhhEUUxCGYRhGUExBGIZhGEH5f3clUCTn/ErjAAAAAElFTkSuQmCC\n"
+ },
+ "metadata": {
+ "needs_background": "light"
+ },
+ "output_type": "display_data"
+ }
+ ],
+ "source": [
+ "%matplotlib inline\n",
+ "fig, ax = plt.subplots()\n",
+ "x = np.array(range(len(history_plain_11.history['loss']) - smoothing_windows + 1))\n",
+ "ax.plot(x, 100*moving_average(history_plain_11.history[\"val_loss\"], smoothing_windows), linewidth=3.0, label='plain11', c='b')\n",
+ "ax.plot(x, 100*moving_average(history_plain_11.history[\"loss\"], smoothing_windows), linewidth=.5, c='b')\n",
+ "x = np.array(range(len(history_plain_5.history['loss']) - smoothing_windows + 1))\n",
+ "ax.plot(x, 100*moving_average(history_plain_5.history[\"val_loss\"], smoothing_windows), linewidth=3.0, label='plain5', c='r')\n",
+ "ax.plot(x, 100*moving_average(history_plain_5.history[\"loss\"], smoothing_windows), linewidth=.5, c='r')\n",
+ "ax.set_xlabel(\"iter\")\n",
+ "ax.set_ylabel(\"error(%)\")\n",
+ "ax.set_xlim(0, min(len(history_plain_5.history['loss']), len(history_plain_11.history['loss']) - smoothing_windows + 1))\n",
+ "ax.set_ylim(0.20, 0.35)\n",
+ "ax.spines[\"top\"].set_visible(False)\n",
+ "ax.spines[\"right\"].set_visible(False)\n",
+ "ax.yaxis.set_major_locator(ticker.LinearLocator(numticks=6))\n",
+ "ax.grid(axis='y', linestyle='--')\n",
+ "ax.annotate(\"11-layer\", (540, 0.267), c='b')\n",
+ "ax.annotate(\"5-layer\", (540, 0.245), c='r')\n",
+ "plt.legend(loc=3)\n",
+ "plt.show()"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 14,
+ "metadata": {
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ },
+ "outputs": [],
+ "source": [
+ "# plt.savefig(fname=\"fig1.png\", dpi=300)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 15,
+ "metadata": {
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ },
+ "outputs": [],
+ "source": [
+ "shortcut5, shortcut11 = ShortCut5(input_shape=(1, 102)), ShortCut11(input_shape=(1, 102))"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 16,
+ "metadata": {
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ },
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "Epoch 1/1024\n",
+ "90/90 [==============================] - 1s 3ms/step - loss: 0.0219 - val_loss: 0.0283 - lr: 0.0025\n",
+ "Epoch 2/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0196 - val_loss: 0.0270 - lr: 0.0025\n",
+ "Epoch 3/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0137 - val_loss: 0.0267 - lr: 0.0025\n",
+ "Epoch 4/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0089 - val_loss: 0.0280 - lr: 0.0025\n",
+ "Epoch 5/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0071 - val_loss: 0.0266 - lr: 0.0025\n",
+ "Epoch 6/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0059 - val_loss: 0.1138 - lr: 0.0025\n",
+ "Epoch 7/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0048 - val_loss: 0.1172 - lr: 0.0025\n",
+ "Epoch 8/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0047 - val_loss: 0.0186 - lr: 0.0025\n",
+ "Epoch 9/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0046 - val_loss: 0.2067 - lr: 0.0025\n",
+ "Epoch 10/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0044 - val_loss: 0.2280 - lr: 0.0025\n",
+ "Epoch 11/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0044 - val_loss: 0.2274 - lr: 0.0025\n",
+ "Epoch 12/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0043 - val_loss: 0.3251 - lr: 0.0025\n",
+ "Epoch 13/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0043 - val_loss: 0.2281 - lr: 0.0025\n",
+ "Epoch 14/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0043 - val_loss: 0.1210 - lr: 0.0025\n",
+ "Epoch 15/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0041 - val_loss: 0.0582 - lr: 0.0025\n",
+ "Epoch 16/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0043 - val_loss: 0.2277 - lr: 0.0025\n",
+ "Epoch 17/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0040 - val_loss: 0.2268 - lr: 0.0025\n",
+ "Epoch 18/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0044 - val_loss: 0.2283 - lr: 0.0025\n",
+ "Epoch 19/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0040 - val_loss: 0.3142 - lr: 0.0025\n",
+ "Epoch 20/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0039 - val_loss: 0.0752 - lr: 0.0025\n",
+ "Epoch 21/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0038 - val_loss: 0.0617 - lr: 0.0025\n",
+ "Epoch 22/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0040 - val_loss: 0.0920 - lr: 0.0025\n",
+ "Epoch 23/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0040 - val_loss: 0.3005 - lr: 0.0025\n",
+ "Epoch 24/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0038 - val_loss: 0.2490 - lr: 0.0025\n",
+ "Epoch 25/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0038 - val_loss: 0.0489 - lr: 0.0025\n",
+ "Epoch 26/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0039 - val_loss: 0.2725 - lr: 0.0025\n",
+ "Epoch 27/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0038 - val_loss: 0.2280 - lr: 0.0025\n",
+ "Epoch 28/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0038 - val_loss: 0.3246 - lr: 0.0025\n",
+ "Epoch 29/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0040 - val_loss: 0.2171 - lr: 0.0025\n",
+ "Epoch 30/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0037 - val_loss: 0.3298 - lr: 0.0025\n",
+ "Epoch 31/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0042 - val_loss: 0.2163 - lr: 0.0025\n",
+ "Epoch 32/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0038 - val_loss: 0.2283 - lr: 0.0025\n",
+ "Epoch 33/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0039 - val_loss: 0.2283 - lr: 0.0025\n",
+ "Epoch 34/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0035 - val_loss: 0.2266 - lr: 0.0012\n",
+ "Epoch 35/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0035 - val_loss: 0.1462 - lr: 0.0012\n",
+ "Epoch 36/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0037 - val_loss: 0.2875 - lr: 0.0012\n",
+ "Epoch 37/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0035 - val_loss: 0.3281 - lr: 0.0012\n",
+ "Epoch 38/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0035 - val_loss: 0.2047 - lr: 0.0012\n",
+ "Epoch 39/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0036 - val_loss: 0.2271 - lr: 0.0012\n",
+ "Epoch 40/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0034 - val_loss: 0.0597 - lr: 0.0012\n",
+ "Epoch 41/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0037 - val_loss: 0.1329 - lr: 0.0012\n",
+ "Epoch 42/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0035 - val_loss: 0.1875 - lr: 0.0012\n",
+ "Epoch 43/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0035 - val_loss: 0.2277 - lr: 0.0012\n",
+ "Epoch 44/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0035 - val_loss: 0.1507 - lr: 0.0012\n",
+ "Epoch 45/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0035 - val_loss: 0.2337 - lr: 0.0012\n",
+ "Epoch 46/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0036 - val_loss: 0.2683 - lr: 0.0012\n",
+ "Epoch 47/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0035 - val_loss: 0.2474 - lr: 0.0012\n",
+ "Epoch 48/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0034 - val_loss: 0.1367 - lr: 0.0012\n",
+ "Epoch 49/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0035 - val_loss: 0.2283 - lr: 0.0012\n",
+ "Epoch 50/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0033 - val_loss: 0.0999 - lr: 0.0012\n",
+ "Epoch 51/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0035 - val_loss: 0.2481 - lr: 0.0012\n",
+ "Epoch 52/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0033 - val_loss: 0.0795 - lr: 0.0012\n",
+ "Epoch 53/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0034 - val_loss: 0.3165 - lr: 0.0012\n",
+ "Epoch 54/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0034 - val_loss: 0.0367 - lr: 0.0012\n",
+ "Epoch 55/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0034 - val_loss: 0.0336 - lr: 0.0012\n",
+ "Epoch 56/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0034 - val_loss: 0.3071 - lr: 0.0012\n",
+ "Epoch 57/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0033 - val_loss: 0.0255 - lr: 0.0012\n",
+ "Epoch 58/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0034 - val_loss: 0.2279 - lr: 0.0012\n",
+ "Epoch 59/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0216 - lr: 6.2500e-04\n",
+ "Epoch 60/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0033 - val_loss: 0.2756 - lr: 6.2500e-04\n",
+ "Epoch 61/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.2596 - lr: 6.2500e-04\n",
+ "Epoch 62/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0033 - val_loss: 0.1604 - lr: 6.2500e-04\n",
+ "Epoch 63/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0691 - lr: 6.2500e-04\n",
+ "Epoch 64/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0031 - val_loss: 0.2389 - lr: 6.2500e-04\n",
+ "Epoch 65/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.2540 - lr: 6.2500e-04\n",
+ "Epoch 66/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0033 - val_loss: 0.2570 - lr: 6.2500e-04\n",
+ "Epoch 67/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0667 - lr: 6.2500e-04\n",
+ "Epoch 68/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.2504 - lr: 6.2500e-04\n",
+ "Epoch 69/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.2053 - lr: 6.2500e-04\n",
+ "Epoch 70/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.2035 - lr: 6.2500e-04\n",
+ "Epoch 71/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.1357 - lr: 6.2500e-04\n",
+ "Epoch 72/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0032 - val_loss: 0.1904 - lr: 6.2500e-04\n",
+ "Epoch 73/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.3200 - lr: 6.2500e-04\n",
+ "Epoch 74/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.1841 - lr: 6.2500e-04\n",
+ "Epoch 75/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0314 - lr: 6.2500e-04\n",
+ "Epoch 76/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0032 - val_loss: 0.1188 - lr: 6.2500e-04\n",
+ "Epoch 77/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0032 - val_loss: 0.2098 - lr: 6.2500e-04\n",
+ "Epoch 78/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0030 - val_loss: 0.2265 - lr: 6.2500e-04\n",
+ "Epoch 79/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0032 - val_loss: 0.1782 - lr: 6.2500e-04\n",
+ "Epoch 80/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.1006 - lr: 6.2500e-04\n",
+ "Epoch 81/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.2163 - lr: 6.2500e-04\n",
+ "Epoch 82/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0857 - lr: 6.2500e-04\n",
+ "Epoch 83/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0030 - val_loss: 0.2199 - lr: 6.2500e-04\n",
+ "Epoch 84/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0031 - val_loss: 0.2611 - lr: 3.1250e-04\n",
+ "Epoch 85/1024\n",
+ "90/90 [==============================] - 0s 3ms/step - loss: 0.0029 - val_loss: 0.0573 - lr: 3.1250e-04\n",
+ "Epoch 86/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0031 - val_loss: 0.1257 - lr: 3.1250e-04\n",
+ "Epoch 87/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0029 - val_loss: 0.2239 - lr: 3.1250e-04\n",
+ "Epoch 88/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.1547 - lr: 3.1250e-04\n",
+ "Epoch 89/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0030 - val_loss: 0.2397 - lr: 3.1250e-04\n",
+ "Epoch 90/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0815 - lr: 3.1250e-04\n",
+ "Epoch 91/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0029 - val_loss: 0.0359 - lr: 3.1250e-04\n",
+ "Epoch 92/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.1996 - lr: 3.1250e-04\n",
+ "Epoch 93/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0030 - val_loss: 0.0720 - lr: 3.1250e-04\n",
+ "Epoch 94/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0030 - val_loss: 0.1951 - lr: 3.1250e-04\n",
+ "Epoch 95/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0030 - val_loss: 0.1836 - lr: 3.1250e-04\n",
+ "Epoch 96/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0029 - val_loss: 0.0294 - lr: 3.1250e-04\n",
+ "Epoch 97/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0030 - val_loss: 0.0181 - lr: 3.1250e-04\n",
+ "Epoch 98/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0030 - val_loss: 0.1766 - lr: 3.1250e-04\n",
+ "Epoch 99/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0030 - val_loss: 0.0591 - lr: 3.1250e-04\n",
+ "Epoch 100/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0029 - val_loss: 0.1220 - lr: 3.1250e-04\n",
+ "Epoch 101/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0030 - val_loss: 0.2105 - lr: 3.1250e-04\n",
+ "Epoch 102/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0453 - lr: 3.1250e-04\n",
+ "Epoch 103/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0030 - val_loss: 0.1782 - lr: 3.1250e-04\n",
+ "Epoch 104/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0029 - val_loss: 0.0238 - lr: 3.1250e-04\n",
+ "Epoch 105/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0030 - val_loss: 0.2880 - lr: 3.1250e-04\n",
+ "Epoch 106/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0030 - val_loss: 0.2095 - lr: 3.1250e-04\n",
+ "Epoch 107/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0030 - val_loss: 0.1675 - lr: 3.1250e-04\n",
+ "Epoch 108/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0031 - val_loss: 0.0127 - lr: 3.1250e-04\n",
+ "Epoch 109/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0029 - val_loss: 0.0940 - lr: 3.1250e-04\n",
+ "Epoch 110/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0029 - val_loss: 0.2401 - lr: 3.1250e-04\n",
+ "Epoch 111/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0029 - val_loss: 0.1006 - lr: 3.1250e-04\n",
+ "Epoch 112/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0030 - val_loss: 0.2042 - lr: 3.1250e-04\n",
+ "Epoch 113/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0029 - val_loss: 0.3052 - lr: 3.1250e-04\n",
+ "Epoch 114/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0029 - val_loss: 0.2821 - lr: 3.1250e-04\n",
+ "Epoch 115/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0030 - val_loss: 0.0383 - lr: 3.1250e-04\n",
+ "Epoch 116/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0029 - val_loss: 0.1079 - lr: 3.1250e-04\n",
+ "Epoch 117/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0029 - val_loss: 0.1174 - lr: 3.1250e-04\n",
+ "Epoch 118/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0030 - val_loss: 0.2065 - lr: 3.1250e-04\n",
+ "Epoch 119/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0030 - val_loss: 0.2121 - lr: 3.1250e-04\n",
+ "Epoch 120/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0029 - val_loss: 0.2459 - lr: 3.1250e-04\n",
+ "Epoch 121/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0029 - val_loss: 0.1985 - lr: 3.1250e-04\n",
+ "Epoch 122/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0028 - val_loss: 0.0073 - lr: 3.1250e-04\n",
+ "Epoch 123/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0029 - val_loss: 0.2513 - lr: 3.1250e-04\n",
+ "Epoch 124/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0388 - lr: 3.1250e-04\n",
+ "Epoch 125/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0029 - val_loss: 0.1103 - lr: 3.1250e-04\n",
+ "Epoch 126/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0519 - lr: 3.1250e-04\n",
+ "Epoch 127/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0031 - val_loss: 0.0992 - lr: 3.1250e-04\n",
+ "Epoch 128/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0029 - val_loss: 0.1512 - lr: 3.1250e-04\n",
+ "Epoch 129/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0028 - val_loss: 0.0933 - lr: 3.1250e-04\n",
+ "Epoch 130/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0029 - val_loss: 0.2168 - lr: 3.1250e-04\n",
+ "Epoch 131/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0030 - val_loss: 0.2016 - lr: 3.1250e-04\n",
+ "Epoch 132/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0029 - val_loss: 0.1200 - lr: 3.1250e-04\n",
+ "Epoch 133/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0029 - val_loss: 0.0228 - lr: 3.1250e-04\n",
+ "Epoch 134/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0030 - val_loss: 0.0439 - lr: 3.1250e-04\n",
+ "Epoch 135/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0029 - val_loss: 0.2147 - lr: 3.1250e-04\n",
+ "Epoch 136/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0029 - val_loss: 0.1953 - lr: 3.1250e-04\n",
+ "Epoch 137/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0029 - val_loss: 0.0456 - lr: 3.1250e-04\n",
+ "Epoch 138/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0029 - val_loss: 0.2198 - lr: 3.1250e-04\n",
+ "Epoch 139/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0155 - lr: 3.1250e-04\n",
+ "Epoch 140/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0028 - val_loss: 0.0063 - lr: 3.1250e-04\n",
+ "Epoch 141/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0028 - val_loss: 0.2162 - lr: 3.1250e-04\n",
+ "Epoch 142/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0029 - val_loss: 0.1195 - lr: 3.1250e-04\n",
+ "Epoch 143/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0030 - val_loss: 0.0230 - lr: 3.1250e-04\n",
+ "Epoch 144/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0029 - val_loss: 0.2873 - lr: 3.1250e-04\n",
+ "Epoch 145/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0030 - val_loss: 0.1387 - lr: 3.1250e-04\n",
+ "Epoch 146/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0030 - val_loss: 0.0527 - lr: 3.1250e-04\n",
+ "Epoch 147/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0029 - val_loss: 0.1454 - lr: 3.1250e-04\n",
+ "Epoch 148/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0317 - lr: 3.1250e-04\n",
+ "Epoch 149/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0029 - val_loss: 0.1015 - lr: 3.1250e-04\n",
+ "Epoch 150/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.1067 - lr: 3.1250e-04\n",
+ "Epoch 151/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0029 - val_loss: 0.2105 - lr: 3.1250e-04\n",
+ "Epoch 152/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0029 - val_loss: 0.1155 - lr: 3.1250e-04\n",
+ "Epoch 153/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.2155 - lr: 3.1250e-04\n",
+ "Epoch 154/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0029 - val_loss: 0.1054 - lr: 3.1250e-04\n",
+ "Epoch 155/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0111 - lr: 3.1250e-04\n",
+ "Epoch 156/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0029 - val_loss: 0.2359 - lr: 3.1250e-04\n",
+ "Epoch 157/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0030 - val_loss: 0.2450 - lr: 3.1250e-04\n",
+ "Epoch 158/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0029 - val_loss: 0.1439 - lr: 3.1250e-04\n",
+ "Epoch 159/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0100 - lr: 3.1250e-04\n",
+ "Epoch 160/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0506 - lr: 3.1250e-04\n",
+ "Epoch 161/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.2382 - lr: 3.1250e-04\n",
+ "Epoch 162/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0028 - val_loss: 0.1973 - lr: 3.1250e-04\n",
+ "Epoch 163/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0029 - val_loss: 0.2399 - lr: 3.1250e-04\n",
+ "Epoch 164/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0029 - val_loss: 0.2276 - lr: 3.1250e-04\n",
+ "Epoch 165/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0029 - val_loss: 0.2351 - lr: 3.1250e-04\n",
+ "Epoch 166/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0165 - lr: 1.5625e-04\n",
+ "Epoch 167/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0029 - val_loss: 0.0790 - lr: 1.5625e-04\n",
+ "Epoch 168/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0029 - val_loss: 0.0259 - lr: 1.5625e-04\n",
+ "Epoch 169/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0026 - val_loss: 0.0053 - lr: 1.5625e-04\n",
+ "Epoch 170/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0028 - val_loss: 0.2109 - lr: 1.5625e-04\n",
+ "Epoch 171/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.1137 - lr: 1.5625e-04\n",
+ "Epoch 172/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0029 - val_loss: 0.0062 - lr: 1.5625e-04\n",
+ "Epoch 173/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.1020 - lr: 1.5625e-04\n",
+ "Epoch 174/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0978 - lr: 1.5625e-04\n",
+ "Epoch 175/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0028 - val_loss: 0.0829 - lr: 1.5625e-04\n",
+ "Epoch 176/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0029 - val_loss: 0.0343 - lr: 1.5625e-04\n",
+ "Epoch 177/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0330 - lr: 1.5625e-04\n",
+ "Epoch 178/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0645 - lr: 1.5625e-04\n",
+ "Epoch 179/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0029 - val_loss: 0.0073 - lr: 1.5625e-04\n",
+ "Epoch 180/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0157 - lr: 1.5625e-04\n",
+ "Epoch 181/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0310 - lr: 1.5625e-04\n",
+ "Epoch 182/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0029 - val_loss: 0.0674 - lr: 1.5625e-04\n",
+ "Epoch 183/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0029 - val_loss: 0.0094 - lr: 1.5625e-04\n",
+ "Epoch 184/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.1829 - lr: 1.5625e-04\n",
+ "Epoch 185/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0138 - lr: 1.5625e-04\n",
+ "Epoch 186/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.1026 - lr: 1.5625e-04\n",
+ "Epoch 187/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0029 - val_loss: 0.1747 - lr: 1.5625e-04\n",
+ "Epoch 188/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0026 - val_loss: 0.1049 - lr: 1.5625e-04\n",
+ "Epoch 189/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.1235 - lr: 1.5625e-04\n",
+ "Epoch 190/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0160 - lr: 1.5625e-04\n",
+ "Epoch 191/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.1550 - lr: 1.5625e-04\n",
+ "Epoch 192/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.1186 - lr: 1.5625e-04\n",
+ "Epoch 193/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.1048 - lr: 1.5625e-04\n",
+ "Epoch 194/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.1281 - lr: 1.5625e-04\n",
+ "Epoch 195/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0029 - val_loss: 0.0403 - lr: 7.8125e-05\n",
+ "Epoch 196/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0028 - val_loss: 0.0055 - lr: 7.8125e-05\n",
+ "Epoch 197/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0029 - val_loss: 0.0401 - lr: 7.8125e-05\n",
+ "Epoch 198/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0092 - lr: 7.8125e-05\n",
+ "Epoch 199/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0259 - lr: 7.8125e-05\n",
+ "Epoch 200/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0162 - lr: 7.8125e-05\n",
+ "Epoch 201/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.2130 - lr: 7.8125e-05\n",
+ "Epoch 202/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0112 - lr: 7.8125e-05\n",
+ "Epoch 203/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0100 - lr: 7.8125e-05\n",
+ "Epoch 204/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0776 - lr: 7.8125e-05\n",
+ "Epoch 205/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.1450 - lr: 7.8125e-05\n",
+ "Epoch 206/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0288 - lr: 7.8125e-05\n",
+ "Epoch 207/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0676 - lr: 7.8125e-05\n",
+ "Epoch 208/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0220 - lr: 7.8125e-05\n",
+ "Epoch 209/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0327 - lr: 7.8125e-05\n",
+ "Epoch 210/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0070 - lr: 7.8125e-05\n",
+ "Epoch 211/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0028 - val_loss: 0.0160 - lr: 7.8125e-05\n",
+ "Epoch 212/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0820 - lr: 7.8125e-05\n",
+ "Epoch 213/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0207 - lr: 7.8125e-05\n",
+ "Epoch 214/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0066 - lr: 7.8125e-05\n",
+ "Epoch 215/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0028 - val_loss: 0.0233 - lr: 7.8125e-05\n",
+ "Epoch 216/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0210 - lr: 7.8125e-05\n",
+ "Epoch 217/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0495 - lr: 7.8125e-05\n",
+ "Epoch 218/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0075 - lr: 7.8125e-05\n",
+ "Epoch 219/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0029 - val_loss: 0.0057 - lr: 7.8125e-05\n",
+ "Epoch 220/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0029 - val_loss: 0.0293 - lr: 3.9062e-05\n",
+ "Epoch 221/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0143 - lr: 3.9062e-05\n",
+ "Epoch 222/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0068 - lr: 3.9062e-05\n",
+ "Epoch 223/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0158 - lr: 3.9062e-05\n",
+ "Epoch 224/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0050 - lr: 3.9062e-05\n",
+ "Epoch 225/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0028 - val_loss: 0.0089 - lr: 3.9062e-05\n",
+ "Epoch 226/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0026 - val_loss: 0.0126 - lr: 3.9062e-05\n",
+ "Epoch 227/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0028 - val_loss: 0.0092 - lr: 3.9062e-05\n",
+ "Epoch 228/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0028 - val_loss: 0.0032 - lr: 3.9062e-05\n",
+ "Epoch 229/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0028 - val_loss: 0.0037 - lr: 3.9062e-05\n",
+ "Epoch 230/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0038 - lr: 3.9062e-05\n",
+ "Epoch 231/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0033 - lr: 3.9062e-05\n",
+ "Epoch 232/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0077 - lr: 3.9062e-05\n",
+ "Epoch 233/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0274 - lr: 3.9062e-05\n",
+ "Epoch 234/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0078 - lr: 3.9062e-05\n",
+ "Epoch 235/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0660 - lr: 3.9062e-05\n",
+ "Epoch 236/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0066 - lr: 3.9062e-05\n",
+ "Epoch 237/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0219 - lr: 3.9062e-05\n",
+ "Epoch 238/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0559 - lr: 3.9062e-05\n",
+ "Epoch 239/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0174 - lr: 3.9062e-05\n",
+ "Epoch 240/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0114 - lr: 3.9062e-05\n",
+ "Epoch 241/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0109 - lr: 3.9062e-05\n",
+ "Epoch 242/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0045 - lr: 3.9062e-05\n",
+ "Epoch 243/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0084 - lr: 3.9062e-05\n",
+ "Epoch 244/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0028 - val_loss: 0.0225 - lr: 3.9062e-05\n",
+ "Epoch 245/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0028 - val_loss: 0.0151 - lr: 3.9062e-05\n",
+ "Epoch 246/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0028 - val_loss: 0.0138 - lr: 3.9062e-05\n",
+ "Epoch 247/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0136 - lr: 3.9062e-05\n",
+ "Epoch 248/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0480 - lr: 3.9062e-05\n",
+ "Epoch 249/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0026 - val_loss: 0.0104 - lr: 3.9062e-05\n",
+ "Epoch 250/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0030 - val_loss: 0.0088 - lr: 3.9062e-05\n",
+ "Epoch 251/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0052 - lr: 3.9062e-05\n",
+ "Epoch 252/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0063 - lr: 3.9062e-05\n",
+ "Epoch 253/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0412 - lr: 3.9062e-05\n",
+ "Epoch 254/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0026 - val_loss: 0.0184 - lr: 1.9531e-05\n",
+ "Epoch 255/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0332 - lr: 1.9531e-05\n",
+ "Epoch 256/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0105 - lr: 1.9531e-05\n",
+ "Epoch 257/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0033 - lr: 1.9531e-05\n",
+ "Epoch 258/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0062 - lr: 1.9531e-05\n",
+ "Epoch 259/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0028 - val_loss: 0.0085 - lr: 1.9531e-05\n",
+ "Epoch 260/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0028 - lr: 1.9531e-05\n",
+ "Epoch 261/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0042 - lr: 1.9531e-05\n",
+ "Epoch 262/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0057 - lr: 1.9531e-05\n",
+ "Epoch 263/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0080 - lr: 1.9531e-05\n",
+ "Epoch 264/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0110 - lr: 1.9531e-05\n",
+ "Epoch 265/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0040 - lr: 1.9531e-05\n",
+ "Epoch 266/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0086 - lr: 1.9531e-05\n",
+ "Epoch 267/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0040 - lr: 1.9531e-05\n",
+ "Epoch 268/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0049 - lr: 1.9531e-05\n",
+ "Epoch 269/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0076 - lr: 1.9531e-05\n",
+ "Epoch 270/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0026 - val_loss: 0.0071 - lr: 1.9531e-05\n",
+ "Epoch 271/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0028 - val_loss: 0.0062 - lr: 1.9531e-05\n",
+ "Epoch 272/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0118 - lr: 1.9531e-05\n",
+ "Epoch 273/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0044 - lr: 1.9531e-05\n",
+ "Epoch 274/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0045 - lr: 1.9531e-05\n",
+ "Epoch 275/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0030 - lr: 1.9531e-05\n",
+ "Epoch 276/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0031 - lr: 1.9531e-05\n",
+ "Epoch 277/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0033 - lr: 1.9531e-05\n",
+ "Epoch 278/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0026 - val_loss: 0.0068 - lr: 1.9531e-05\n",
+ "Epoch 279/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0040 - lr: 1.9531e-05\n",
+ "Epoch 280/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0058 - lr: 1.9531e-05\n",
+ "Epoch 281/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0028 - val_loss: 0.0038 - lr: 1.9531e-05\n",
+ "Epoch 282/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0206 - lr: 1.9531e-05\n",
+ "Epoch 283/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0037 - lr: 1.9531e-05\n",
+ "Epoch 284/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0148 - lr: 1.9531e-05\n",
+ "Epoch 285/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0029 - val_loss: 0.0068 - lr: 1.9531e-05\n",
+ "Epoch 286/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0029 - lr: 9.7656e-06\n",
+ "Epoch 287/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0029 - lr: 9.7656e-06\n",
+ "Epoch 288/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0029 - val_loss: 0.0028 - lr: 9.7656e-06\n",
+ "Epoch 289/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0028 - val_loss: 0.0026 - lr: 9.7656e-06\n",
+ "Epoch 290/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0026 - val_loss: 0.0025 - lr: 9.7656e-06\n",
+ "Epoch 291/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0026 - val_loss: 0.0039 - lr: 9.7656e-06\n",
+ "Epoch 292/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0045 - lr: 9.7656e-06\n",
+ "Epoch 293/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0027 - lr: 9.7656e-06\n",
+ "Epoch 294/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0026 - lr: 9.7656e-06\n",
+ "Epoch 295/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0027 - lr: 9.7656e-06\n",
+ "Epoch 296/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0026 - lr: 9.7656e-06\n",
+ "Epoch 297/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0026 - val_loss: 0.0026 - lr: 9.7656e-06\n",
+ "Epoch 298/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0030 - lr: 9.7656e-06\n",
+ "Epoch 299/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0031 - lr: 9.7656e-06\n",
+ "Epoch 300/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0035 - lr: 9.7656e-06\n",
+ "Epoch 301/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0029 - lr: 9.7656e-06\n",
+ "Epoch 302/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0028 - lr: 9.7656e-06\n",
+ "Epoch 303/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0027 - lr: 9.7656e-06\n",
+ "Epoch 304/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0031 - lr: 9.7656e-06\n",
+ "Epoch 305/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0026 - val_loss: 0.0029 - lr: 9.7656e-06\n",
+ "Epoch 306/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0026 - val_loss: 0.0028 - lr: 9.7656e-06\n",
+ "Epoch 307/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0045 - lr: 9.7656e-06\n",
+ "Epoch 308/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0035 - lr: 9.7656e-06\n",
+ "Epoch 309/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0032 - lr: 9.7656e-06\n",
+ "Epoch 310/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0041 - lr: 9.7656e-06\n",
+ "Epoch 311/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0072 - lr: 9.7656e-06\n",
+ "Epoch 312/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0028 - lr: 9.7656e-06\n",
+ "Epoch 313/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0028 - lr: 9.7656e-06\n",
+ "Epoch 314/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0045 - lr: 9.7656e-06\n",
+ "Epoch 315/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0032 - lr: 9.7656e-06\n",
+ "Epoch 316/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0026 - lr: 4.8828e-06\n",
+ "Epoch 317/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0027 - lr: 4.8828e-06\n",
+ "Epoch 318/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0026 - lr: 4.8828e-06\n",
+ "Epoch 319/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0027 - lr: 4.8828e-06\n",
+ "Epoch 320/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0027 - lr: 4.8828e-06\n",
+ "Epoch 321/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0026 - lr: 4.8828e-06\n",
+ "Epoch 322/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0028 - val_loss: 0.0028 - lr: 4.8828e-06\n",
+ "Epoch 323/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0027 - lr: 4.8828e-06\n",
+ "Epoch 324/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0035 - lr: 4.8828e-06\n",
+ "Epoch 325/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0026 - val_loss: 0.0026 - lr: 4.8828e-06\n",
+ "Epoch 326/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0030 - lr: 4.8828e-06\n",
+ "Epoch 327/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0026 - lr: 4.8828e-06\n",
+ "Epoch 328/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 4.8828e-06\n",
+ "Epoch 329/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0027 - lr: 4.8828e-06\n",
+ "Epoch 330/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0026 - val_loss: 0.0031 - lr: 4.8828e-06\n",
+ "Epoch 331/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0029 - lr: 4.8828e-06\n",
+ "Epoch 332/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0026 - lr: 4.8828e-06\n",
+ "Epoch 333/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0028 - lr: 4.8828e-06\n",
+ "Epoch 334/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0033 - lr: 4.8828e-06\n",
+ "Epoch 335/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 4.8828e-06\n",
+ "Epoch 336/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 4.8828e-06\n",
+ "Epoch 337/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0031 - lr: 4.8828e-06\n",
+ "Epoch 338/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0026 - lr: 4.8828e-06\n",
+ "Epoch 339/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0031 - lr: 4.8828e-06\n",
+ "Epoch 340/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0026 - lr: 4.8828e-06\n",
+ "Epoch 341/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 4.8828e-06\n",
+ "Epoch 342/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0026 - lr: 4.8828e-06\n",
+ "Epoch 343/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0028 - val_loss: 0.0032 - lr: 4.8828e-06\n",
+ "Epoch 344/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0028 - val_loss: 0.0025 - lr: 4.8828e-06\n",
+ "Epoch 345/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0026 - val_loss: 0.0029 - lr: 4.8828e-06\n",
+ "Epoch 346/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 4.8828e-06\n",
+ "Epoch 347/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 4.8828e-06\n",
+ "Epoch 348/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0030 - lr: 4.8828e-06\n",
+ "Epoch 349/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0040 - lr: 4.8828e-06\n",
+ "Epoch 350/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0027 - lr: 4.8828e-06\n",
+ "Epoch 351/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0027 - lr: 4.8828e-06\n",
+ "Epoch 352/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0027 - lr: 4.8828e-06\n",
+ "Epoch 353/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0027 - lr: 4.8828e-06\n",
+ "Epoch 354/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0026 - lr: 4.8828e-06\n",
+ "Epoch 355/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0028 - lr: 4.8828e-06\n",
+ "Epoch 356/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0034 - lr: 4.8828e-06\n",
+ "Epoch 357/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0026 - val_loss: 0.0027 - lr: 4.8828e-06\n",
+ "Epoch 358/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0032 - lr: 4.8828e-06\n",
+ "Epoch 359/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0026 - lr: 4.8828e-06\n",
+ "Epoch 360/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0026 - val_loss: 0.0026 - lr: 4.8828e-06\n",
+ "Epoch 361/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0026 - lr: 4.8828e-06\n",
+ "Epoch 362/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0028 - val_loss: 0.0028 - lr: 4.8828e-06\n",
+ "Epoch 363/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0026 - val_loss: 0.0026 - lr: 4.8828e-06\n",
+ "Epoch 364/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 4.8828e-06\n",
+ "Epoch 365/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0026 - val_loss: 0.0028 - lr: 4.8828e-06\n",
+ "Epoch 366/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0029 - lr: 4.8828e-06\n",
+ "Epoch 367/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0027 - lr: 4.8828e-06\n",
+ "Epoch 368/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0026 - lr: 4.8828e-06\n",
+ "Epoch 369/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 4.8828e-06\n",
+ "Epoch 370/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0026 - lr: 2.4414e-06\n",
+ "Epoch 371/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0026 - val_loss: 0.0025 - lr: 2.4414e-06\n",
+ "Epoch 372/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0029 - lr: 2.4414e-06\n",
+ "Epoch 373/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0026 - lr: 2.4414e-06\n",
+ "Epoch 374/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0028 - lr: 2.4414e-06\n",
+ "Epoch 375/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 2.4414e-06\n",
+ "Epoch 376/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 2.4414e-06\n",
+ "Epoch 377/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0028 - val_loss: 0.0026 - lr: 2.4414e-06\n",
+ "Epoch 378/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0027 - lr: 2.4414e-06\n",
+ "Epoch 379/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0026 - lr: 2.4414e-06\n",
+ "Epoch 380/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0026 - lr: 2.4414e-06\n",
+ "Epoch 381/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 2.4414e-06\n",
+ "Epoch 382/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0028 - val_loss: 0.0026 - lr: 2.4414e-06\n",
+ "Epoch 383/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 2.4414e-06\n",
+ "Epoch 384/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 2.4414e-06\n",
+ "Epoch 385/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0028 - val_loss: 0.0026 - lr: 2.4414e-06\n",
+ "Epoch 386/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 2.4414e-06\n",
+ "Epoch 387/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0026 - lr: 2.4414e-06\n",
+ "Epoch 388/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0028 - val_loss: 0.0026 - lr: 2.4414e-06\n",
+ "Epoch 389/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 2.4414e-06\n",
+ "Epoch 390/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0027 - lr: 2.4414e-06\n",
+ "Epoch 391/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 2.4414e-06\n",
+ "Epoch 392/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0028 - val_loss: 0.0025 - lr: 2.4414e-06\n",
+ "Epoch 393/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 2.4414e-06\n",
+ "Epoch 394/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0025 - lr: 2.4414e-06\n",
+ "Epoch 395/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 2.4414e-06\n",
+ "Epoch 396/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0026 - val_loss: 0.0025 - lr: 2.4414e-06\n",
+ "Epoch 397/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0026 - val_loss: 0.0025 - lr: 2.4414e-06\n",
+ "Epoch 398/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 2.4414e-06\n",
+ "Epoch 399/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0028 - val_loss: 0.0026 - lr: 2.4414e-06\n",
+ "Epoch 400/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0026 - val_loss: 0.0026 - lr: 2.4414e-06\n",
+ "Epoch 401/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 2.4414e-06\n",
+ "Epoch 402/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0028 - val_loss: 0.0027 - lr: 2.4414e-06\n",
+ "Epoch 403/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0029 - val_loss: 0.0025 - lr: 2.4414e-06\n",
+ "Epoch 404/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0025 - lr: 2.4414e-06\n",
+ "Epoch 405/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 2.4414e-06\n",
+ "Epoch 406/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 2.4414e-06\n",
+ "Epoch 407/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 2.4414e-06\n",
+ "Epoch 408/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 2.4414e-06\n",
+ "Epoch 409/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 2.4414e-06\n",
+ "Epoch 410/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0026 - val_loss: 0.0025 - lr: 2.4414e-06\n",
+ "Epoch 411/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 2.4414e-06\n",
+ "Epoch 412/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 2.4414e-06\n",
+ "Epoch 413/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 2.4414e-06\n",
+ "Epoch 414/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0028 - val_loss: 0.0026 - lr: 2.4414e-06\n",
+ "Epoch 415/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 2.4414e-06\n",
+ "Epoch 416/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0026 - val_loss: 0.0026 - lr: 2.4414e-06\n",
+ "Epoch 417/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0028 - val_loss: 0.0026 - lr: 2.4414e-06\n",
+ "Epoch 418/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0025 - lr: 2.4414e-06\n",
+ "Epoch 419/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0027 - lr: 2.4414e-06\n",
+ "Epoch 420/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0025 - lr: 2.4414e-06\n",
+ "Epoch 421/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.2207e-06\n",
+ "Epoch 422/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0026 - val_loss: 0.0025 - lr: 1.2207e-06\n",
+ "Epoch 423/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.2207e-06\n",
+ "Epoch 424/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.2207e-06\n",
+ "Epoch 425/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.2207e-06\n",
+ "Epoch 426/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0026 - lr: 1.2207e-06\n",
+ "Epoch 427/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.2207e-06\n",
+ "Epoch 428/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.2207e-06\n",
+ "Epoch 429/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.2207e-06\n",
+ "Epoch 430/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.2207e-06\n",
+ "Epoch 431/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.2207e-06\n",
+ "Epoch 432/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.2207e-06\n",
+ "Epoch 433/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.2207e-06\n",
+ "Epoch 434/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.2207e-06\n",
+ "Epoch 435/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.2207e-06\n",
+ "Epoch 436/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.2207e-06\n",
+ "Epoch 437/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.2207e-06\n",
+ "Epoch 438/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.2207e-06\n",
+ "Epoch 439/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0029 - val_loss: 0.0025 - lr: 1.2207e-06\n",
+ "Epoch 440/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.2207e-06\n",
+ "Epoch 441/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.2207e-06\n",
+ "Epoch 442/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.2207e-06\n",
+ "Epoch 443/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0029 - val_loss: 0.0025 - lr: 1.2207e-06\n",
+ "Epoch 444/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.2207e-06\n",
+ "Epoch 445/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.2207e-06\n",
+ "Epoch 446/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0026 - val_loss: 0.0025 - lr: 1.2207e-06\n",
+ "Epoch 447/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.2207e-06\n",
+ "Epoch 448/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.2207e-06\n",
+ "Epoch 449/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.2207e-06\n",
+ "Epoch 450/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.2207e-06\n",
+ "Epoch 451/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.2207e-06\n",
+ "Epoch 452/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0026 - val_loss: 0.0025 - lr: 1.2207e-06\n",
+ "Epoch 453/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.2207e-06\n",
+ "Epoch 454/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0026 - val_loss: 0.0025 - lr: 1.2207e-06\n",
+ "Epoch 455/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0026 - lr: 1.2207e-06\n",
+ "Epoch 456/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.2207e-06\n",
+ "Epoch 457/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0026 - val_loss: 0.0025 - lr: 1.2207e-06\n",
+ "Epoch 458/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.2207e-06\n",
+ "Epoch 459/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.2207e-06\n",
+ "Epoch 460/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.2207e-06\n",
+ "Epoch 461/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.2207e-06\n",
+ "Epoch 462/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0026 - val_loss: 0.0025 - lr: 1.2207e-06\n",
+ "Epoch 463/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.2207e-06\n",
+ "Epoch 464/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.2207e-06\n",
+ "Epoch 465/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.2207e-06\n",
+ "Epoch 466/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.2207e-06\n",
+ "Epoch 467/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.2207e-06\n",
+ "Epoch 468/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.2207e-06\n",
+ "Epoch 469/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.2207e-06\n",
+ "Epoch 470/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.2207e-06\n",
+ "Epoch 471/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.2207e-06\n",
+ "Epoch 472/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.2207e-06\n",
+ "Epoch 473/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.2207e-06\n",
+ "Epoch 474/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.2207e-06\n",
+ "Epoch 475/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.2207e-06\n",
+ "Epoch 476/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.2207e-06\n",
+ "Epoch 477/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.2207e-06\n",
+ "Epoch 478/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.2207e-06\n",
+ "Epoch 479/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0028 - val_loss: 0.0025 - lr: 6.1035e-07\n",
+ "Epoch 480/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0028 - val_loss: 0.0025 - lr: 6.1035e-07\n",
+ "Epoch 481/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 6.1035e-07\n",
+ "Epoch 482/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0028 - val_loss: 0.0025 - lr: 6.1035e-07\n",
+ "Epoch 483/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 6.1035e-07\n",
+ "Epoch 484/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 6.1035e-07\n",
+ "Epoch 485/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 6.1035e-07\n",
+ "Epoch 486/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 6.1035e-07\n",
+ "Epoch 487/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 6.1035e-07\n",
+ "Epoch 488/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 6.1035e-07\n",
+ "Epoch 489/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0028 - val_loss: 0.0025 - lr: 6.1035e-07\n",
+ "Epoch 490/1024\n",
+ "90/90 [==============================] - 0s 3ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 6.1035e-07\n",
+ "Epoch 491/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0026 - val_loss: 0.0025 - lr: 6.1035e-07\n",
+ "Epoch 492/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 6.1035e-07\n",
+ "Epoch 493/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0026 - val_loss: 0.0025 - lr: 6.1035e-07\n",
+ "Epoch 494/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 6.1035e-07\n",
+ "Epoch 495/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 6.1035e-07\n",
+ "Epoch 496/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 6.1035e-07\n",
+ "Epoch 497/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0026 - val_loss: 0.0025 - lr: 6.1035e-07\n",
+ "Epoch 498/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 6.1035e-07\n",
+ "Epoch 499/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 6.1035e-07\n",
+ "Epoch 500/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0028 - val_loss: 0.0025 - lr: 6.1035e-07\n",
+ "Epoch 501/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0026 - val_loss: 0.0025 - lr: 6.1035e-07\n",
+ "Epoch 502/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 6.1035e-07\n",
+ "Epoch 503/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 6.1035e-07\n",
+ "Epoch 504/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 3.0518e-07\n",
+ "Epoch 505/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0025 - lr: 3.0518e-07\n",
+ "Epoch 506/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0026 - val_loss: 0.0025 - lr: 3.0518e-07\n",
+ "Epoch 507/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0025 - lr: 3.0518e-07\n",
+ "Epoch 508/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 3.0518e-07\n",
+ "Epoch 509/1024\n",
+ "90/90 [==============================] - 0s 3ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 3.0518e-07\n",
+ "Epoch 510/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0026 - val_loss: 0.0025 - lr: 3.0518e-07\n",
+ "Epoch 511/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 3.0518e-07\n",
+ "Epoch 512/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0028 - val_loss: 0.0025 - lr: 3.0518e-07\n",
+ "Epoch 513/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 3.0518e-07\n",
+ "Epoch 514/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 3.0518e-07\n",
+ "Epoch 515/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 3.0518e-07\n",
+ "Epoch 516/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 3.0518e-07\n",
+ "Epoch 517/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 3.0518e-07\n",
+ "Epoch 518/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 3.0518e-07\n",
+ "Epoch 519/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 3.0518e-07\n",
+ "Epoch 520/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0028 - val_loss: 0.0025 - lr: 3.0518e-07\n",
+ "Epoch 521/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 3.0518e-07\n",
+ "Epoch 522/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 3.0518e-07\n",
+ "Epoch 523/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0026 - val_loss: 0.0025 - lr: 3.0518e-07\n",
+ "Epoch 524/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 3.0518e-07\n",
+ "Epoch 525/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 3.0518e-07\n",
+ "Epoch 526/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 3.0518e-07\n",
+ "Epoch 527/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 3.0518e-07\n",
+ "Epoch 528/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 3.0518e-07\n",
+ "Epoch 529/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.5259e-07\n",
+ "Epoch 530/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0029 - val_loss: 0.0025 - lr: 1.5259e-07\n",
+ "Epoch 531/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.5259e-07\n",
+ "Epoch 532/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0026 - val_loss: 0.0025 - lr: 1.5259e-07\n",
+ "Epoch 533/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.5259e-07\n",
+ "Epoch 534/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0026 - val_loss: 0.0025 - lr: 1.5259e-07\n",
+ "Epoch 535/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0026 - val_loss: 0.0025 - lr: 1.5259e-07\n",
+ "Epoch 536/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0026 - val_loss: 0.0025 - lr: 1.5259e-07\n",
+ "Epoch 537/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0026 - val_loss: 0.0025 - lr: 1.5259e-07\n",
+ "Epoch 538/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0026 - val_loss: 0.0025 - lr: 1.5259e-07\n",
+ "Epoch 539/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.5259e-07\n",
+ "Epoch 540/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.5259e-07\n",
+ "Epoch 541/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.5259e-07\n",
+ "Epoch 542/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.5259e-07\n",
+ "Epoch 543/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.5259e-07\n",
+ "Epoch 544/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.5259e-07\n",
+ "Epoch 545/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0026 - val_loss: 0.0025 - lr: 1.5259e-07\n",
+ "Epoch 546/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.5259e-07\n",
+ "Epoch 547/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.5259e-07\n",
+ "Epoch 548/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.5259e-07\n",
+ "Epoch 549/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.5259e-07\n",
+ "Epoch 550/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.5259e-07\n",
+ "Epoch 551/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.5259e-07\n",
+ "Epoch 552/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.5259e-07\n",
+ "Epoch 553/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.5259e-07\n",
+ "Epoch 554/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0028 - val_loss: 0.0025 - lr: 7.6294e-08\n",
+ "Epoch 555/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0028 - val_loss: 0.0025 - lr: 7.6294e-08\n",
+ "Epoch 556/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0028 - val_loss: 0.0025 - lr: 7.6294e-08\n",
+ "Epoch 557/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0026 - val_loss: 0.0025 - lr: 7.6294e-08\n",
+ "Epoch 558/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 7.6294e-08\n",
+ "Epoch 559/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0028 - val_loss: 0.0025 - lr: 7.6294e-08\n",
+ "Epoch 560/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0026 - val_loss: 0.0025 - lr: 7.6294e-08\n",
+ "Epoch 561/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 7.6294e-08\n",
+ "Epoch 562/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 7.6294e-08\n",
+ "Epoch 563/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 7.6294e-08\n",
+ "Epoch 564/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0026 - val_loss: 0.0025 - lr: 7.6294e-08\n",
+ "Epoch 565/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 7.6294e-08\n",
+ "Epoch 566/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 7.6294e-08\n",
+ "Epoch 567/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0029 - val_loss: 0.0025 - lr: 7.6294e-08\n",
+ "Epoch 568/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 7.6294e-08\n",
+ "Epoch 569/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 7.6294e-08\n",
+ "Epoch 570/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 7.6294e-08\n",
+ "Epoch 571/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 7.6294e-08\n",
+ "Epoch 572/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 7.6294e-08\n",
+ "Epoch 573/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 7.6294e-08\n",
+ "Epoch 574/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0026 - val_loss: 0.0025 - lr: 7.6294e-08\n",
+ "Epoch 575/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0029 - val_loss: 0.0025 - lr: 7.6294e-08\n",
+ "Epoch 576/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0026 - val_loss: 0.0025 - lr: 7.6294e-08\n",
+ "Epoch 577/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 7.6294e-08\n",
+ "Epoch 578/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 7.6294e-08\n",
+ "Epoch 579/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0028 - val_loss: 0.0025 - lr: 3.8147e-08\n",
+ "Epoch 580/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 3.8147e-08\n",
+ "Epoch 581/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 3.8147e-08\n",
+ "Epoch 582/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0026 - val_loss: 0.0025 - lr: 3.8147e-08\n",
+ "Epoch 583/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0026 - val_loss: 0.0025 - lr: 3.8147e-08\n",
+ "Epoch 584/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0028 - val_loss: 0.0025 - lr: 3.8147e-08\n",
+ "Epoch 585/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 3.8147e-08\n",
+ "Epoch 586/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 3.8147e-08\n",
+ "Epoch 587/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 3.8147e-08\n",
+ "Epoch 588/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 3.8147e-08\n",
+ "Epoch 589/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 3.8147e-08\n",
+ "Epoch 590/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0026 - val_loss: 0.0025 - lr: 3.8147e-08\n",
+ "Epoch 591/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0028 - val_loss: 0.0025 - lr: 3.8147e-08\n",
+ "Epoch 592/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0028 - val_loss: 0.0025 - lr: 3.8147e-08\n",
+ "Epoch 593/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0028 - val_loss: 0.0025 - lr: 3.8147e-08\n",
+ "Epoch 594/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 3.8147e-08\n",
+ "Epoch 595/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 3.8147e-08\n",
+ "Epoch 596/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0028 - val_loss: 0.0025 - lr: 3.8147e-08\n",
+ "Epoch 597/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 3.8147e-08\n",
+ "Epoch 598/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0025 - lr: 3.8147e-08\n",
+ "Epoch 599/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 3.8147e-08\n",
+ "Epoch 600/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0028 - val_loss: 0.0025 - lr: 3.8147e-08\n",
+ "Epoch 601/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 3.8147e-08\n",
+ "Epoch 602/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 3.8147e-08\n",
+ "Epoch 603/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 3.8147e-08\n",
+ "Epoch 604/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.9073e-08\n",
+ "Epoch 605/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.9073e-08\n",
+ "Epoch 606/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.9073e-08\n",
+ "Epoch 607/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.9073e-08\n",
+ "Epoch 608/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.9073e-08\n",
+ "Epoch 609/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.9073e-08\n",
+ "Epoch 610/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.9073e-08\n",
+ "Epoch 611/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.9073e-08\n",
+ "Epoch 612/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.9073e-08\n",
+ "Epoch 613/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0026 - val_loss: 0.0025 - lr: 1.9073e-08\n",
+ "Epoch 614/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.9073e-08\n",
+ "Epoch 615/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.9073e-08\n",
+ "Epoch 616/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.9073e-08\n",
+ "Epoch 617/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.9073e-08\n",
+ "Epoch 618/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.9073e-08\n",
+ "Epoch 619/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.9073e-08\n",
+ "Epoch 620/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.9073e-08\n",
+ "Epoch 621/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.9073e-08\n",
+ "Epoch 622/1024\n",
+ "90/90 [==============================] - 0s 3ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.9073e-08\n",
+ "Epoch 623/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.9073e-08\n",
+ "Epoch 624/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.9073e-08\n",
+ "Epoch 625/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.9073e-08\n",
+ "Epoch 626/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.9073e-08\n",
+ "Epoch 627/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.9073e-08\n",
+ "Epoch 628/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.9073e-08\n",
+ "Epoch 629/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0026 - val_loss: 0.0025 - lr: 9.5367e-09\n",
+ "Epoch 630/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 9.5367e-09\n",
+ "Epoch 631/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 9.5367e-09\n",
+ "Epoch 632/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0028 - val_loss: 0.0025 - lr: 9.5367e-09\n",
+ "Epoch 633/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 9.5367e-09\n",
+ "Epoch 634/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 9.5367e-09\n",
+ "Epoch 635/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 9.5367e-09\n",
+ "Epoch 636/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 9.5367e-09\n",
+ "Epoch 637/1024\n",
+ "90/90 [==============================] - 0s 3ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 9.5367e-09\n",
+ "Epoch 638/1024\n",
+ "90/90 [==============================] - 0s 3ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 9.5367e-09\n",
+ "Epoch 639/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0028 - val_loss: 0.0025 - lr: 9.5367e-09\n",
+ "Epoch 640/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0028 - val_loss: 0.0025 - lr: 9.5367e-09\n",
+ "Epoch 641/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0026 - val_loss: 0.0025 - lr: 9.5367e-09\n",
+ "Epoch 642/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 9.5367e-09\n",
+ "Epoch 643/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 9.5367e-09\n",
+ "Epoch 644/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 9.5367e-09\n",
+ "Epoch 645/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 9.5367e-09\n",
+ "Epoch 646/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 9.5367e-09\n",
+ "Epoch 647/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 9.5367e-09\n",
+ "Epoch 648/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 9.5367e-09\n",
+ "Epoch 649/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0028 - val_loss: 0.0025 - lr: 9.5367e-09\n",
+ "Epoch 650/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0028 - val_loss: 0.0025 - lr: 9.5367e-09\n",
+ "Epoch 651/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 9.5367e-09\n",
+ "Epoch 652/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 9.5367e-09\n",
+ "Epoch 653/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0028 - val_loss: 0.0025 - lr: 9.5367e-09\n",
+ "Epoch 1/1024\n",
+ "90/90 [==============================] - 1s 2ms/step - loss: 0.0198 - val_loss: 0.0598 - lr: 0.0025\n",
+ "Epoch 2/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0112 - val_loss: 0.0525 - lr: 0.0025\n",
+ "Epoch 3/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0071 - val_loss: 0.0330 - lr: 0.0025\n",
+ "Epoch 4/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0060 - val_loss: 0.0307 - lr: 0.0025\n",
+ "Epoch 5/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0052 - val_loss: 0.0281 - lr: 0.0025\n",
+ "Epoch 6/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0046 - val_loss: 0.0257 - lr: 0.0025\n",
+ "Epoch 7/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0045 - val_loss: 0.0653 - lr: 0.0025\n",
+ "Epoch 8/1024\n",
+ "90/90 [==============================] - 0s 903us/step - loss: 0.0044 - val_loss: 0.0471 - lr: 0.0025\n",
+ "Epoch 9/1024\n",
+ "90/90 [==============================] - 0s 928us/step - loss: 0.0044 - val_loss: 0.2283 - lr: 0.0025\n",
+ "Epoch 10/1024\n",
+ "90/90 [==============================] - 0s 869us/step - loss: 0.0042 - val_loss: 0.2283 - lr: 0.0025\n",
+ "Epoch 11/1024\n",
+ "90/90 [==============================] - 0s 877us/step - loss: 0.0042 - val_loss: 0.1118 - lr: 0.0025\n",
+ "Epoch 12/1024\n",
+ "90/90 [==============================] - 0s 876us/step - loss: 0.0042 - val_loss: 0.1824 - lr: 0.0025\n",
+ "Epoch 13/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0042 - val_loss: 0.2283 - lr: 0.0025\n",
+ "Epoch 14/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0042 - val_loss: 0.0377 - lr: 0.0025\n",
+ "Epoch 15/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0040 - val_loss: 0.1167 - lr: 0.0025\n",
+ "Epoch 16/1024\n",
+ "90/90 [==============================] - 0s 900us/step - loss: 0.0041 - val_loss: 0.2283 - lr: 0.0025\n",
+ "Epoch 17/1024\n",
+ "90/90 [==============================] - 0s 886us/step - loss: 0.0040 - val_loss: 0.2175 - lr: 0.0025\n",
+ "Epoch 18/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0041 - val_loss: 0.2283 - lr: 0.0025\n",
+ "Epoch 19/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0039 - val_loss: 0.0520 - lr: 0.0025\n",
+ "Epoch 20/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0039 - val_loss: 0.1469 - lr: 0.0025\n",
+ "Epoch 21/1024\n",
+ "90/90 [==============================] - 0s 939us/step - loss: 0.0038 - val_loss: 0.0476 - lr: 0.0025\n",
+ "Epoch 22/1024\n",
+ "90/90 [==============================] - 0s 970us/step - loss: 0.0039 - val_loss: 0.1398 - lr: 0.0025\n",
+ "Epoch 23/1024\n",
+ "90/90 [==============================] - 0s 930us/step - loss: 0.0040 - val_loss: 0.0864 - lr: 0.0025\n",
+ "Epoch 24/1024\n",
+ "90/90 [==============================] - 0s 917us/step - loss: 0.0037 - val_loss: 0.2282 - lr: 0.0025\n",
+ "Epoch 25/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0038 - val_loss: 0.2278 - lr: 0.0025\n",
+ "Epoch 26/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0039 - val_loss: 0.1589 - lr: 0.0025\n",
+ "Epoch 27/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0037 - val_loss: 0.0401 - lr: 0.0025\n",
+ "Epoch 28/1024\n",
+ "90/90 [==============================] - 0s 885us/step - loss: 0.0037 - val_loss: 0.0640 - lr: 0.0025\n",
+ "Epoch 29/1024\n",
+ "90/90 [==============================] - 0s 894us/step - loss: 0.0038 - val_loss: 0.3123 - lr: 0.0025\n",
+ "Epoch 30/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0037 - val_loss: 0.1451 - lr: 0.0025\n",
+ "Epoch 31/1024\n",
+ "90/90 [==============================] - 0s 979us/step - loss: 0.0041 - val_loss: 0.2279 - lr: 0.0025\n",
+ "Epoch 32/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0036 - val_loss: 0.0576 - lr: 0.0012\n",
+ "Epoch 33/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0036 - val_loss: 0.0103 - lr: 0.0012\n",
+ "Epoch 34/1024\n",
+ "90/90 [==============================] - 0s 898us/step - loss: 0.0034 - val_loss: 0.1927 - lr: 0.0012\n",
+ "Epoch 35/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0035 - val_loss: 0.1350 - lr: 0.0012\n",
+ "Epoch 36/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0037 - val_loss: 0.0297 - lr: 0.0012\n",
+ "Epoch 37/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0035 - val_loss: 0.0853 - lr: 0.0012\n",
+ "Epoch 38/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0034 - val_loss: 0.2278 - lr: 0.0012\n",
+ "Epoch 39/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0035 - val_loss: 0.2013 - lr: 0.0012\n",
+ "Epoch 40/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0034 - val_loss: 0.2283 - lr: 0.0012\n",
+ "Epoch 41/1024\n",
+ "90/90 [==============================] - 0s 965us/step - loss: 0.0036 - val_loss: 0.0482 - lr: 0.0012\n",
+ "Epoch 42/1024\n",
+ "90/90 [==============================] - 0s 877us/step - loss: 0.0035 - val_loss: 0.2170 - lr: 0.0012\n",
+ "Epoch 43/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0035 - val_loss: 0.2271 - lr: 0.0012\n",
+ "Epoch 44/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0034 - val_loss: 0.0587 - lr: 0.0012\n",
+ "Epoch 45/1024\n",
+ "90/90 [==============================] - 0s 938us/step - loss: 0.0034 - val_loss: 0.1143 - lr: 0.0012\n",
+ "Epoch 46/1024\n",
+ "90/90 [==============================] - 0s 909us/step - loss: 0.0034 - val_loss: 0.2379 - lr: 0.0012\n",
+ "Epoch 47/1024\n",
+ "90/90 [==============================] - 0s 916us/step - loss: 0.0034 - val_loss: 0.1721 - lr: 0.0012\n",
+ "Epoch 48/1024\n",
+ "90/90 [==============================] - 0s 949us/step - loss: 0.0033 - val_loss: 0.1126 - lr: 0.0012\n",
+ "Epoch 49/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0035 - val_loss: 0.0890 - lr: 0.0012\n",
+ "Epoch 50/1024\n",
+ "90/90 [==============================] - 0s 925us/step - loss: 0.0033 - val_loss: 0.1434 - lr: 0.0012\n",
+ "Epoch 51/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0034 - val_loss: 0.2669 - lr: 0.0012\n",
+ "Epoch 52/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0032 - val_loss: 0.0172 - lr: 0.0012\n",
+ "Epoch 53/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0033 - val_loss: 0.2013 - lr: 0.0012\n",
+ "Epoch 54/1024\n",
+ "90/90 [==============================] - 0s 871us/step - loss: 0.0034 - val_loss: 0.0816 - lr: 0.0012\n",
+ "Epoch 55/1024\n",
+ "90/90 [==============================] - 0s 952us/step - loss: 0.0034 - val_loss: 0.2064 - lr: 0.0012\n",
+ "Epoch 56/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0034 - val_loss: 0.2262 - lr: 0.0012\n",
+ "Epoch 57/1024\n",
+ "90/90 [==============================] - 0s 959us/step - loss: 0.0033 - val_loss: 0.0168 - lr: 0.0012\n",
+ "Epoch 58/1024\n",
+ "90/90 [==============================] - 0s 897us/step - loss: 0.0034 - val_loss: 0.2105 - lr: 0.0012\n",
+ "Epoch 59/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.1548 - lr: 6.2500e-04\n",
+ "Epoch 60/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0033 - val_loss: 0.0658 - lr: 6.2500e-04\n",
+ "Epoch 61/1024\n",
+ "90/90 [==============================] - 0s 877us/step - loss: 0.0032 - val_loss: 0.0978 - lr: 6.2500e-04\n",
+ "Epoch 62/1024\n",
+ "90/90 [==============================] - 0s 960us/step - loss: 0.0033 - val_loss: 0.0730 - lr: 6.2500e-04\n",
+ "Epoch 63/1024\n",
+ "90/90 [==============================] - 0s 885us/step - loss: 0.0031 - val_loss: 0.0170 - lr: 6.2500e-04\n",
+ "Epoch 64/1024\n",
+ "90/90 [==============================] - 0s 905us/step - loss: 0.0031 - val_loss: 0.2857 - lr: 6.2500e-04\n",
+ "Epoch 65/1024\n",
+ "90/90 [==============================] - 0s 941us/step - loss: 0.0030 - val_loss: 0.2169 - lr: 6.2500e-04\n",
+ "Epoch 66/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0032 - val_loss: 0.2722 - lr: 6.2500e-04\n",
+ "Epoch 67/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.2282 - lr: 6.2500e-04\n",
+ "Epoch 68/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0030 - val_loss: 0.2139 - lr: 6.2500e-04\n",
+ "Epoch 69/1024\n",
+ "90/90 [==============================] - 0s 910us/step - loss: 0.0031 - val_loss: 0.0982 - lr: 6.2500e-04\n",
+ "Epoch 70/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.2279 - lr: 6.2500e-04\n",
+ "Epoch 71/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0761 - lr: 6.2500e-04\n",
+ "Epoch 72/1024\n",
+ "90/90 [==============================] - 0s 978us/step - loss: 0.0031 - val_loss: 0.1284 - lr: 6.2500e-04\n",
+ "Epoch 73/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.2601 - lr: 6.2500e-04\n",
+ "Epoch 74/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0030 - val_loss: 0.0350 - lr: 6.2500e-04\n",
+ "Epoch 75/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0031 - val_loss: 0.1648 - lr: 6.2500e-04\n",
+ "Epoch 76/1024\n",
+ "90/90 [==============================] - 0s 895us/step - loss: 0.0031 - val_loss: 0.2015 - lr: 6.2500e-04\n",
+ "Epoch 77/1024\n",
+ "90/90 [==============================] - 0s 930us/step - loss: 0.0031 - val_loss: 0.0225 - lr: 6.2500e-04\n",
+ "Epoch 78/1024\n",
+ "90/90 [==============================] - 0s 970us/step - loss: 0.0030 - val_loss: 0.2144 - lr: 6.2500e-04\n",
+ "Epoch 79/1024\n",
+ "90/90 [==============================] - 0s 890us/step - loss: 0.0032 - val_loss: 0.0431 - lr: 6.2500e-04\n",
+ "Epoch 80/1024\n",
+ "90/90 [==============================] - 0s 883us/step - loss: 0.0031 - val_loss: 0.0944 - lr: 6.2500e-04\n",
+ "Epoch 81/1024\n",
+ "90/90 [==============================] - 0s 898us/step - loss: 0.0031 - val_loss: 0.1827 - lr: 6.2500e-04\n",
+ "Epoch 82/1024\n",
+ "90/90 [==============================] - 0s 885us/step - loss: 0.0031 - val_loss: 0.1524 - lr: 6.2500e-04\n",
+ "Epoch 83/1024\n",
+ "90/90 [==============================] - 0s 905us/step - loss: 0.0030 - val_loss: 0.0833 - lr: 6.2500e-04\n",
+ "Epoch 84/1024\n",
+ "90/90 [==============================] - 0s 886us/step - loss: 0.0031 - val_loss: 0.0131 - lr: 3.1250e-04\n",
+ "Epoch 85/1024\n",
+ "90/90 [==============================] - 0s 871us/step - loss: 0.0030 - val_loss: 0.0984 - lr: 3.1250e-04\n",
+ "Epoch 86/1024\n",
+ "90/90 [==============================] - 0s 898us/step - loss: 0.0031 - val_loss: 0.0298 - lr: 3.1250e-04\n",
+ "Epoch 87/1024\n",
+ "90/90 [==============================] - 0s 881us/step - loss: 0.0029 - val_loss: 0.2283 - lr: 3.1250e-04\n",
+ "Epoch 88/1024\n",
+ "90/90 [==============================] - 0s 924us/step - loss: 0.0030 - val_loss: 0.2270 - lr: 3.1250e-04\n",
+ "Epoch 89/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.1353 - lr: 3.1250e-04\n",
+ "Epoch 90/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.2282 - lr: 3.1250e-04\n",
+ "Epoch 91/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0029 - val_loss: 0.1818 - lr: 3.1250e-04\n",
+ "Epoch 92/1024\n",
+ "90/90 [==============================] - 0s 890us/step - loss: 0.0031 - val_loss: 0.0185 - lr: 3.1250e-04\n",
+ "Epoch 93/1024\n",
+ "90/90 [==============================] - 0s 882us/step - loss: 0.0030 - val_loss: 0.0603 - lr: 3.1250e-04\n",
+ "Epoch 94/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0030 - val_loss: 0.0085 - lr: 3.1250e-04\n",
+ "Epoch 95/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0030 - val_loss: 0.0055 - lr: 3.1250e-04\n",
+ "Epoch 96/1024\n",
+ "90/90 [==============================] - 0s 968us/step - loss: 0.0029 - val_loss: 0.1777 - lr: 3.1250e-04\n",
+ "Epoch 97/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0030 - val_loss: 0.2107 - lr: 3.1250e-04\n",
+ "Epoch 98/1024\n",
+ "90/90 [==============================] - 0s 892us/step - loss: 0.0031 - val_loss: 0.2022 - lr: 3.1250e-04\n",
+ "Epoch 99/1024\n",
+ "90/90 [==============================] - 0s 952us/step - loss: 0.0030 - val_loss: 0.0119 - lr: 3.1250e-04\n",
+ "Epoch 100/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0029 - val_loss: 0.0247 - lr: 3.1250e-04\n",
+ "Epoch 101/1024\n",
+ "90/90 [==============================] - 0s 962us/step - loss: 0.0030 - val_loss: 0.1989 - lr: 3.1250e-04\n",
+ "Epoch 102/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0322 - lr: 3.1250e-04\n",
+ "Epoch 103/1024\n",
+ "90/90 [==============================] - 0s 972us/step - loss: 0.0030 - val_loss: 0.2276 - lr: 3.1250e-04\n",
+ "Epoch 104/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0029 - val_loss: 0.0326 - lr: 3.1250e-04\n",
+ "Epoch 105/1024\n",
+ "90/90 [==============================] - 0s 1000us/step - loss: 0.0031 - val_loss: 0.2399 - lr: 3.1250e-04\n",
+ "Epoch 106/1024\n",
+ "90/90 [==============================] - 0s 892us/step - loss: 0.0030 - val_loss: 0.0445 - lr: 3.1250e-04\n",
+ "Epoch 107/1024\n",
+ "90/90 [==============================] - 0s 896us/step - loss: 0.0030 - val_loss: 0.2216 - lr: 3.1250e-04\n",
+ "Epoch 108/1024\n",
+ "90/90 [==============================] - 0s 913us/step - loss: 0.0030 - val_loss: 0.0257 - lr: 3.1250e-04\n",
+ "Epoch 109/1024\n",
+ "90/90 [==============================] - 0s 927us/step - loss: 0.0029 - val_loss: 0.1788 - lr: 3.1250e-04\n",
+ "Epoch 110/1024\n",
+ "90/90 [==============================] - 0s 961us/step - loss: 0.0029 - val_loss: 0.0195 - lr: 3.1250e-04\n",
+ "Epoch 111/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0029 - val_loss: 0.0308 - lr: 3.1250e-04\n",
+ "Epoch 112/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0030 - val_loss: 0.0169 - lr: 3.1250e-04\n",
+ "Epoch 113/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0029 - val_loss: 0.2607 - lr: 3.1250e-04\n",
+ "Epoch 114/1024\n",
+ "90/90 [==============================] - 0s 965us/step - loss: 0.0029 - val_loss: 0.2670 - lr: 3.1250e-04\n",
+ "Epoch 115/1024\n",
+ "90/90 [==============================] - 0s 985us/step - loss: 0.0030 - val_loss: 0.0159 - lr: 3.1250e-04\n",
+ "Epoch 116/1024\n",
+ "90/90 [==============================] - 0s 932us/step - loss: 0.0030 - val_loss: 0.1088 - lr: 3.1250e-04\n",
+ "Epoch 117/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0029 - val_loss: 0.2283 - lr: 3.1250e-04\n",
+ "Epoch 118/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0030 - val_loss: 0.1138 - lr: 3.1250e-04\n",
+ "Epoch 119/1024\n",
+ "90/90 [==============================] - 0s 982us/step - loss: 0.0029 - val_loss: 0.0120 - lr: 3.1250e-04\n",
+ "Epoch 120/1024\n",
+ "90/90 [==============================] - 0s 917us/step - loss: 0.0029 - val_loss: 0.1762 - lr: 3.1250e-04\n",
+ "Epoch 121/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0029 - val_loss: 0.0073 - lr: 1.5625e-04\n",
+ "Epoch 122/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0924 - lr: 1.5625e-04\n",
+ "Epoch 123/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0188 - lr: 1.5625e-04\n",
+ "Epoch 124/1024\n",
+ "90/90 [==============================] - 0s 972us/step - loss: 0.0030 - val_loss: 0.0138 - lr: 1.5625e-04\n",
+ "Epoch 125/1024\n",
+ "90/90 [==============================] - 0s 921us/step - loss: 0.0028 - val_loss: 0.0270 - lr: 1.5625e-04\n",
+ "Epoch 126/1024\n",
+ "90/90 [==============================] - 0s 903us/step - loss: 0.0028 - val_loss: 0.0375 - lr: 1.5625e-04\n",
+ "Epoch 127/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0031 - val_loss: 0.0750 - lr: 1.5625e-04\n",
+ "Epoch 128/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0029 - val_loss: 0.0109 - lr: 1.5625e-04\n",
+ "Epoch 129/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0467 - lr: 1.5625e-04\n",
+ "Epoch 130/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0029 - val_loss: 0.1844 - lr: 1.5625e-04\n",
+ "Epoch 131/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0030 - val_loss: 0.0444 - lr: 1.5625e-04\n",
+ "Epoch 132/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0029 - val_loss: 0.0101 - lr: 1.5625e-04\n",
+ "Epoch 133/1024\n",
+ "90/90 [==============================] - 0s 963us/step - loss: 0.0029 - val_loss: 0.1559 - lr: 1.5625e-04\n",
+ "Epoch 134/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0030 - val_loss: 0.2269 - lr: 1.5625e-04\n",
+ "Epoch 135/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.1741 - lr: 1.5625e-04\n",
+ "Epoch 136/1024\n",
+ "90/90 [==============================] - 0s 881us/step - loss: 0.0029 - val_loss: 0.0634 - lr: 1.5625e-04\n",
+ "Epoch 137/1024\n",
+ "90/90 [==============================] - 0s 918us/step - loss: 0.0028 - val_loss: 0.1282 - lr: 1.5625e-04\n",
+ "Epoch 138/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0029 - val_loss: 0.1617 - lr: 1.5625e-04\n",
+ "Epoch 139/1024\n",
+ "90/90 [==============================] - 0s 955us/step - loss: 0.0028 - val_loss: 0.1587 - lr: 1.5625e-04\n",
+ "Epoch 140/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0652 - lr: 1.5625e-04\n",
+ "Epoch 141/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0028 - val_loss: 0.1194 - lr: 1.5625e-04\n",
+ "Epoch 142/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0029 - val_loss: 0.0069 - lr: 1.5625e-04\n",
+ "Epoch 143/1024\n",
+ "90/90 [==============================] - 0s 908us/step - loss: 0.0030 - val_loss: 0.2232 - lr: 1.5625e-04\n",
+ "Epoch 144/1024\n",
+ "90/90 [==============================] - 0s 964us/step - loss: 0.0028 - val_loss: 0.0199 - lr: 1.5625e-04\n",
+ "Epoch 145/1024\n",
+ "90/90 [==============================] - 0s 964us/step - loss: 0.0029 - val_loss: 0.0634 - lr: 1.5625e-04\n",
+ "Epoch 146/1024\n",
+ "90/90 [==============================] - 0s 937us/step - loss: 0.0029 - val_loss: 0.0338 - lr: 7.8125e-05\n",
+ "Epoch 147/1024\n",
+ "90/90 [==============================] - 0s 937us/step - loss: 0.0029 - val_loss: 0.0403 - lr: 7.8125e-05\n",
+ "Epoch 148/1024\n",
+ "90/90 [==============================] - 0s 931us/step - loss: 0.0028 - val_loss: 0.0079 - lr: 7.8125e-05\n",
+ "Epoch 149/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0029 - val_loss: 0.0402 - lr: 7.8125e-05\n",
+ "Epoch 150/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0441 - lr: 7.8125e-05\n",
+ "Epoch 151/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0071 - lr: 7.8125e-05\n",
+ "Epoch 152/1024\n",
+ "90/90 [==============================] - 0s 912us/step - loss: 0.0029 - val_loss: 0.0945 - lr: 7.8125e-05\n",
+ "Epoch 153/1024\n",
+ "90/90 [==============================] - 0s 928us/step - loss: 0.0028 - val_loss: 0.0241 - lr: 7.8125e-05\n",
+ "Epoch 154/1024\n",
+ "90/90 [==============================] - 0s 950us/step - loss: 0.0028 - val_loss: 0.0100 - lr: 7.8125e-05\n",
+ "Epoch 155/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0289 - lr: 7.8125e-05\n",
+ "Epoch 156/1024\n",
+ "90/90 [==============================] - 0s 924us/step - loss: 0.0029 - val_loss: 0.0380 - lr: 7.8125e-05\n",
+ "Epoch 157/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0029 - val_loss: 0.0557 - lr: 7.8125e-05\n",
+ "Epoch 158/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0029 - val_loss: 0.0217 - lr: 7.8125e-05\n",
+ "Epoch 159/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0028 - val_loss: 0.0129 - lr: 7.8125e-05\n",
+ "Epoch 160/1024\n",
+ "90/90 [==============================] - 0s 936us/step - loss: 0.0028 - val_loss: 0.0220 - lr: 7.8125e-05\n",
+ "Epoch 161/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0950 - lr: 7.8125e-05\n",
+ "Epoch 162/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0053 - lr: 7.8125e-05\n",
+ "Epoch 163/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0029 - val_loss: 0.0113 - lr: 7.8125e-05\n",
+ "Epoch 164/1024\n",
+ "90/90 [==============================] - 0s 953us/step - loss: 0.0028 - val_loss: 0.0521 - lr: 7.8125e-05\n",
+ "Epoch 165/1024\n",
+ "90/90 [==============================] - 0s 993us/step - loss: 0.0029 - val_loss: 0.0089 - lr: 7.8125e-05\n",
+ "Epoch 166/1024\n",
+ "90/90 [==============================] - 0s 963us/step - loss: 0.0028 - val_loss: 0.0064 - lr: 7.8125e-05\n",
+ "Epoch 167/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0489 - lr: 7.8125e-05\n",
+ "Epoch 168/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0030 - val_loss: 0.0217 - lr: 7.8125e-05\n",
+ "Epoch 169/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0157 - lr: 7.8125e-05\n",
+ "Epoch 170/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0189 - lr: 7.8125e-05\n",
+ "Epoch 171/1024\n",
+ "90/90 [==============================] - 0s 951us/step - loss: 0.0028 - val_loss: 0.0087 - lr: 7.8125e-05\n",
+ "Epoch 172/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0029 - val_loss: 0.1240 - lr: 7.8125e-05\n",
+ "Epoch 173/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0183 - lr: 7.8125e-05\n",
+ "Epoch 174/1024\n",
+ "90/90 [==============================] - 0s 976us/step - loss: 0.0028 - val_loss: 0.1380 - lr: 7.8125e-05\n",
+ "Epoch 175/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0104 - lr: 7.8125e-05\n",
+ "Epoch 176/1024\n",
+ "90/90 [==============================] - 0s 984us/step - loss: 0.0028 - val_loss: 0.0084 - lr: 7.8125e-05\n",
+ "Epoch 177/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0049 - lr: 7.8125e-05\n",
+ "Epoch 178/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0705 - lr: 7.8125e-05\n",
+ "Epoch 179/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0030 - val_loss: 0.0130 - lr: 7.8125e-05\n",
+ "Epoch 180/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0029 - val_loss: 0.0066 - lr: 7.8125e-05\n",
+ "Epoch 181/1024\n",
+ "90/90 [==============================] - 0s 902us/step - loss: 0.0028 - val_loss: 0.1058 - lr: 7.8125e-05\n",
+ "Epoch 182/1024\n",
+ "90/90 [==============================] - 0s 944us/step - loss: 0.0029 - val_loss: 0.0491 - lr: 7.8125e-05\n",
+ "Epoch 183/1024\n",
+ "90/90 [==============================] - 0s 908us/step - loss: 0.0029 - val_loss: 0.0252 - lr: 7.8125e-05\n",
+ "Epoch 184/1024\n",
+ "90/90 [==============================] - 0s 911us/step - loss: 0.0028 - val_loss: 0.0125 - lr: 7.8125e-05\n",
+ "Epoch 185/1024\n",
+ "90/90 [==============================] - 0s 955us/step - loss: 0.0027 - val_loss: 0.0052 - lr: 7.8125e-05\n",
+ "Epoch 186/1024\n",
+ "90/90 [==============================] - 0s 908us/step - loss: 0.0028 - val_loss: 0.0645 - lr: 7.8125e-05\n",
+ "Epoch 187/1024\n",
+ "90/90 [==============================] - 0s 942us/step - loss: 0.0029 - val_loss: 0.0465 - lr: 7.8125e-05\n",
+ "Epoch 188/1024\n",
+ "90/90 [==============================] - 0s 968us/step - loss: 0.0027 - val_loss: 0.0429 - lr: 7.8125e-05\n",
+ "Epoch 189/1024\n",
+ "90/90 [==============================] - 0s 984us/step - loss: 0.0028 - val_loss: 0.0272 - lr: 7.8125e-05\n",
+ "Epoch 190/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0577 - lr: 7.8125e-05\n",
+ "Epoch 191/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0029 - val_loss: 0.0040 - lr: 7.8125e-05\n",
+ "Epoch 192/1024\n",
+ "90/90 [==============================] - 0s 926us/step - loss: 0.0028 - val_loss: 0.0437 - lr: 7.8125e-05\n",
+ "Epoch 193/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0036 - lr: 7.8125e-05\n",
+ "Epoch 194/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0055 - lr: 7.8125e-05\n",
+ "Epoch 195/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0029 - val_loss: 0.0131 - lr: 7.8125e-05\n",
+ "Epoch 196/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0290 - lr: 7.8125e-05\n",
+ "Epoch 197/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0029 - val_loss: 0.0913 - lr: 7.8125e-05\n",
+ "Epoch 198/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0460 - lr: 7.8125e-05\n",
+ "Epoch 199/1024\n",
+ "90/90 [==============================] - 0s 943us/step - loss: 0.0028 - val_loss: 0.1060 - lr: 7.8125e-05\n",
+ "Epoch 200/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0051 - lr: 7.8125e-05\n",
+ "Epoch 201/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0334 - lr: 7.8125e-05\n",
+ "Epoch 202/1024\n",
+ "90/90 [==============================] - 0s 990us/step - loss: 0.0028 - val_loss: 0.0753 - lr: 7.8125e-05\n",
+ "Epoch 203/1024\n",
+ "90/90 [==============================] - 0s 964us/step - loss: 0.0029 - val_loss: 0.0238 - lr: 7.8125e-05\n",
+ "Epoch 204/1024\n",
+ "90/90 [==============================] - 0s 953us/step - loss: 0.0029 - val_loss: 0.0136 - lr: 7.8125e-05\n",
+ "Epoch 205/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.2139 - lr: 7.8125e-05\n",
+ "Epoch 206/1024\n",
+ "90/90 [==============================] - 0s 968us/step - loss: 0.0028 - val_loss: 0.0143 - lr: 7.8125e-05\n",
+ "Epoch 207/1024\n",
+ "90/90 [==============================] - 0s 947us/step - loss: 0.0029 - val_loss: 0.1455 - lr: 7.8125e-05\n",
+ "Epoch 208/1024\n",
+ "90/90 [==============================] - 0s 964us/step - loss: 0.0028 - val_loss: 0.0175 - lr: 7.8125e-05\n",
+ "Epoch 209/1024\n",
+ "90/90 [==============================] - 0s 934us/step - loss: 0.0028 - val_loss: 0.0451 - lr: 7.8125e-05\n",
+ "Epoch 210/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.1155 - lr: 7.8125e-05\n",
+ "Epoch 211/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0095 - lr: 7.8125e-05\n",
+ "Epoch 212/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.1050 - lr: 7.8125e-05\n",
+ "Epoch 213/1024\n",
+ "90/90 [==============================] - 0s 973us/step - loss: 0.0028 - val_loss: 0.0601 - lr: 7.8125e-05\n",
+ "Epoch 214/1024\n",
+ "90/90 [==============================] - 0s 957us/step - loss: 0.0028 - val_loss: 0.0070 - lr: 7.8125e-05\n",
+ "Epoch 215/1024\n",
+ "90/90 [==============================] - 0s 925us/step - loss: 0.0028 - val_loss: 0.0444 - lr: 7.8125e-05\n",
+ "Epoch 216/1024\n",
+ "90/90 [==============================] - 0s 923us/step - loss: 0.0029 - val_loss: 0.1996 - lr: 7.8125e-05\n",
+ "Epoch 217/1024\n",
+ "90/90 [==============================] - 0s 929us/step - loss: 0.0028 - val_loss: 0.0211 - lr: 7.8125e-05\n",
+ "Epoch 218/1024\n",
+ "90/90 [==============================] - 0s 953us/step - loss: 0.0029 - val_loss: 0.0245 - lr: 7.8125e-05\n",
+ "Epoch 219/1024\n",
+ "90/90 [==============================] - 0s 966us/step - loss: 0.0029 - val_loss: 0.0044 - lr: 3.9062e-05\n",
+ "Epoch 220/1024\n",
+ "90/90 [==============================] - 0s 975us/step - loss: 0.0029 - val_loss: 0.0474 - lr: 3.9062e-05\n",
+ "Epoch 221/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0457 - lr: 3.9062e-05\n",
+ "Epoch 222/1024\n",
+ "90/90 [==============================] - 0s 978us/step - loss: 0.0029 - val_loss: 0.0045 - lr: 3.9062e-05\n",
+ "Epoch 223/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0230 - lr: 3.9062e-05\n",
+ "Epoch 224/1024\n",
+ "90/90 [==============================] - 0s 965us/step - loss: 0.0028 - val_loss: 0.0180 - lr: 3.9062e-05\n",
+ "Epoch 225/1024\n",
+ "90/90 [==============================] - 0s 947us/step - loss: 0.0028 - val_loss: 0.0337 - lr: 3.9062e-05\n",
+ "Epoch 226/1024\n",
+ "90/90 [==============================] - 0s 924us/step - loss: 0.0027 - val_loss: 0.0289 - lr: 3.9062e-05\n",
+ "Epoch 227/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0036 - lr: 3.9062e-05\n",
+ "Epoch 228/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0029 - val_loss: 0.0044 - lr: 3.9062e-05\n",
+ "Epoch 229/1024\n",
+ "90/90 [==============================] - 0s 934us/step - loss: 0.0028 - val_loss: 0.0188 - lr: 3.9062e-05\n",
+ "Epoch 230/1024\n",
+ "90/90 [==============================] - 0s 997us/step - loss: 0.0028 - val_loss: 0.0352 - lr: 3.9062e-05\n",
+ "Epoch 231/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0164 - lr: 3.9062e-05\n",
+ "Epoch 232/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0092 - lr: 3.9062e-05\n",
+ "Epoch 233/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0099 - lr: 3.9062e-05\n",
+ "Epoch 234/1024\n",
+ "90/90 [==============================] - 0s 876us/step - loss: 0.0028 - val_loss: 0.0159 - lr: 3.9062e-05\n",
+ "Epoch 235/1024\n",
+ "90/90 [==============================] - 0s 928us/step - loss: 0.0028 - val_loss: 0.0126 - lr: 3.9062e-05\n",
+ "Epoch 236/1024\n",
+ "90/90 [==============================] - 0s 888us/step - loss: 0.0029 - val_loss: 0.0117 - lr: 3.9062e-05\n",
+ "Epoch 237/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0290 - lr: 3.9062e-05\n",
+ "Epoch 238/1024\n",
+ "90/90 [==============================] - 0s 948us/step - loss: 0.0027 - val_loss: 0.0302 - lr: 3.9062e-05\n",
+ "Epoch 239/1024\n",
+ "90/90 [==============================] - 0s 987us/step - loss: 0.0028 - val_loss: 0.0401 - lr: 3.9062e-05\n",
+ "Epoch 240/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.1169 - lr: 3.9062e-05\n",
+ "Epoch 241/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0029 - val_loss: 0.0076 - lr: 3.9062e-05\n",
+ "Epoch 242/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0303 - lr: 3.9062e-05\n",
+ "Epoch 243/1024\n",
+ "90/90 [==============================] - 0s 987us/step - loss: 0.0028 - val_loss: 0.0102 - lr: 3.9062e-05\n",
+ "Epoch 244/1024\n",
+ "90/90 [==============================] - 0s 878us/step - loss: 0.0028 - val_loss: 0.0545 - lr: 3.9062e-05\n",
+ "Epoch 245/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0034 - lr: 3.9062e-05\n",
+ "Epoch 246/1024\n",
+ "90/90 [==============================] - 0s 976us/step - loss: 0.0029 - val_loss: 0.0135 - lr: 3.9062e-05\n",
+ "Epoch 247/1024\n",
+ "90/90 [==============================] - 0s 971us/step - loss: 0.0028 - val_loss: 0.0045 - lr: 3.9062e-05\n",
+ "Epoch 248/1024\n",
+ "90/90 [==============================] - 0s 945us/step - loss: 0.0028 - val_loss: 0.0153 - lr: 3.9062e-05\n",
+ "Epoch 249/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0092 - lr: 3.9062e-05\n",
+ "Epoch 250/1024\n",
+ "90/90 [==============================] - 0s 873us/step - loss: 0.0030 - val_loss: 0.0083 - lr: 3.9062e-05\n",
+ "Epoch 251/1024\n",
+ "90/90 [==============================] - 0s 955us/step - loss: 0.0028 - val_loss: 0.0221 - lr: 3.9062e-05\n",
+ "Epoch 252/1024\n",
+ "90/90 [==============================] - 0s 963us/step - loss: 0.0028 - val_loss: 0.0097 - lr: 3.9062e-05\n",
+ "Epoch 253/1024\n",
+ "90/90 [==============================] - 0s 910us/step - loss: 0.0028 - val_loss: 0.0062 - lr: 3.9062e-05\n",
+ "Epoch 254/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0032 - lr: 3.9062e-05\n",
+ "Epoch 255/1024\n",
+ "90/90 [==============================] - 0s 903us/step - loss: 0.0028 - val_loss: 0.0411 - lr: 3.9062e-05\n",
+ "Epoch 256/1024\n",
+ "90/90 [==============================] - 0s 934us/step - loss: 0.0028 - val_loss: 0.0529 - lr: 3.9062e-05\n",
+ "Epoch 257/1024\n",
+ "90/90 [==============================] - 0s 930us/step - loss: 0.0028 - val_loss: 0.0063 - lr: 3.9062e-05\n",
+ "Epoch 258/1024\n",
+ "90/90 [==============================] - 0s 973us/step - loss: 0.0028 - val_loss: 0.0036 - lr: 3.9062e-05\n",
+ "Epoch 259/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0308 - lr: 3.9062e-05\n",
+ "Epoch 260/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0085 - lr: 3.9062e-05\n",
+ "Epoch 261/1024\n",
+ "90/90 [==============================] - 0s 979us/step - loss: 0.0028 - val_loss: 0.0068 - lr: 3.9062e-05\n",
+ "Epoch 262/1024\n",
+ "90/90 [==============================] - 0s 982us/step - loss: 0.0028 - val_loss: 0.0074 - lr: 3.9062e-05\n",
+ "Epoch 263/1024\n",
+ "90/90 [==============================] - 0s 910us/step - loss: 0.0028 - val_loss: 0.0079 - lr: 3.9062e-05\n",
+ "Epoch 264/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0109 - lr: 3.9062e-05\n",
+ "Epoch 265/1024\n",
+ "90/90 [==============================] - 0s 963us/step - loss: 0.0027 - val_loss: 0.0214 - lr: 3.9062e-05\n",
+ "Epoch 266/1024\n",
+ "90/90 [==============================] - 0s 928us/step - loss: 0.0027 - val_loss: 0.0143 - lr: 3.9062e-05\n",
+ "Epoch 267/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0029 - val_loss: 0.0102 - lr: 3.9062e-05\n",
+ "Epoch 268/1024\n",
+ "90/90 [==============================] - 0s 991us/step - loss: 0.0028 - val_loss: 0.0262 - lr: 3.9062e-05\n",
+ "Epoch 269/1024\n",
+ "90/90 [==============================] - 0s 909us/step - loss: 0.0028 - val_loss: 0.0217 - lr: 3.9062e-05\n",
+ "Epoch 270/1024\n",
+ "90/90 [==============================] - 0s 912us/step - loss: 0.0027 - val_loss: 0.0221 - lr: 3.9062e-05\n",
+ "Epoch 271/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0056 - lr: 3.9062e-05\n",
+ "Epoch 272/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0028 - val_loss: 0.0146 - lr: 3.9062e-05\n",
+ "Epoch 273/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0307 - lr: 3.9062e-05\n",
+ "Epoch 274/1024\n",
+ "90/90 [==============================] - 0s 917us/step - loss: 0.0028 - val_loss: 0.0197 - lr: 3.9062e-05\n",
+ "Epoch 275/1024\n",
+ "90/90 [==============================] - 0s 885us/step - loss: 0.0028 - val_loss: 0.0057 - lr: 3.9062e-05\n",
+ "Epoch 276/1024\n",
+ "90/90 [==============================] - 0s 919us/step - loss: 0.0027 - val_loss: 0.0204 - lr: 3.9062e-05\n",
+ "Epoch 277/1024\n",
+ "90/90 [==============================] - 0s 897us/step - loss: 0.0028 - val_loss: 0.0130 - lr: 3.9062e-05\n",
+ "Epoch 278/1024\n",
+ "90/90 [==============================] - 0s 941us/step - loss: 0.0027 - val_loss: 0.0049 - lr: 3.9062e-05\n",
+ "Epoch 279/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0096 - lr: 3.9062e-05\n",
+ "Epoch 280/1024\n",
+ "90/90 [==============================] - 0s 894us/step - loss: 0.0027 - val_loss: 0.0226 - lr: 1.9531e-05\n",
+ "Epoch 281/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0028 - lr: 1.9531e-05\n",
+ "Epoch 282/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0031 - lr: 1.9531e-05\n",
+ "Epoch 283/1024\n",
+ "90/90 [==============================] - 0s 910us/step - loss: 0.0027 - val_loss: 0.0043 - lr: 1.9531e-05\n",
+ "Epoch 284/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0028 - val_loss: 0.0044 - lr: 1.9531e-05\n",
+ "Epoch 285/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0029 - val_loss: 0.0151 - lr: 1.9531e-05\n",
+ "Epoch 286/1024\n",
+ "90/90 [==============================] - 0s 900us/step - loss: 0.0028 - val_loss: 0.0076 - lr: 1.9531e-05\n",
+ "Epoch 287/1024\n",
+ "90/90 [==============================] - 0s 905us/step - loss: 0.0028 - val_loss: 0.0042 - lr: 1.9531e-05\n",
+ "Epoch 288/1024\n",
+ "90/90 [==============================] - 0s 888us/step - loss: 0.0029 - val_loss: 0.0032 - lr: 1.9531e-05\n",
+ "Epoch 289/1024\n",
+ "90/90 [==============================] - 0s 874us/step - loss: 0.0028 - val_loss: 0.0045 - lr: 1.9531e-05\n",
+ "Epoch 290/1024\n",
+ "90/90 [==============================] - 0s 937us/step - loss: 0.0027 - val_loss: 0.0042 - lr: 1.9531e-05\n",
+ "Epoch 291/1024\n",
+ "90/90 [==============================] - 0s 924us/step - loss: 0.0027 - val_loss: 0.0037 - lr: 1.9531e-05\n",
+ "Epoch 292/1024\n",
+ "90/90 [==============================] - 0s 946us/step - loss: 0.0028 - val_loss: 0.0048 - lr: 1.9531e-05\n",
+ "Epoch 293/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0063 - lr: 1.9531e-05\n",
+ "Epoch 294/1024\n",
+ "90/90 [==============================] - 0s 946us/step - loss: 0.0028 - val_loss: 0.0030 - lr: 1.9531e-05\n",
+ "Epoch 295/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0027 - lr: 1.9531e-05\n",
+ "Epoch 296/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0126 - lr: 1.9531e-05\n",
+ "Epoch 297/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0034 - lr: 1.9531e-05\n",
+ "Epoch 298/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0035 - lr: 1.9531e-05\n",
+ "Epoch 299/1024\n",
+ "90/90 [==============================] - 0s 910us/step - loss: 0.0027 - val_loss: 0.0056 - lr: 1.9531e-05\n",
+ "Epoch 300/1024\n",
+ "90/90 [==============================] - 0s 921us/step - loss: 0.0028 - val_loss: 0.0107 - lr: 1.9531e-05\n",
+ "Epoch 301/1024\n",
+ "90/90 [==============================] - 0s 912us/step - loss: 0.0028 - val_loss: 0.0060 - lr: 1.9531e-05\n",
+ "Epoch 302/1024\n",
+ "90/90 [==============================] - 0s 892us/step - loss: 0.0028 - val_loss: 0.0133 - lr: 1.9531e-05\n",
+ "Epoch 303/1024\n",
+ "90/90 [==============================] - 0s 918us/step - loss: 0.0028 - val_loss: 0.0037 - lr: 1.9531e-05\n",
+ "Epoch 304/1024\n",
+ "90/90 [==============================] - 0s 883us/step - loss: 0.0028 - val_loss: 0.0066 - lr: 1.9531e-05\n",
+ "Epoch 305/1024\n",
+ "90/90 [==============================] - 0s 896us/step - loss: 0.0027 - val_loss: 0.0032 - lr: 1.9531e-05\n",
+ "Epoch 306/1024\n",
+ "90/90 [==============================] - 0s 876us/step - loss: 0.0027 - val_loss: 0.0032 - lr: 1.9531e-05\n",
+ "Epoch 307/1024\n",
+ "90/90 [==============================] - 0s 951us/step - loss: 0.0027 - val_loss: 0.0033 - lr: 1.9531e-05\n",
+ "Epoch 308/1024\n",
+ "90/90 [==============================] - 0s 870us/step - loss: 0.0027 - val_loss: 0.0039 - lr: 1.9531e-05\n",
+ "Epoch 309/1024\n",
+ "90/90 [==============================] - 0s 916us/step - loss: 0.0028 - val_loss: 0.0067 - lr: 1.9531e-05\n",
+ "Epoch 310/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0029 - val_loss: 0.0045 - lr: 1.9531e-05\n",
+ "Epoch 311/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0028 - val_loss: 0.0039 - lr: 1.9531e-05\n",
+ "Epoch 312/1024\n",
+ "90/90 [==============================] - 0s 978us/step - loss: 0.0028 - val_loss: 0.0043 - lr: 1.9531e-05\n",
+ "Epoch 313/1024\n",
+ "90/90 [==============================] - 0s 895us/step - loss: 0.0028 - val_loss: 0.0040 - lr: 1.9531e-05\n",
+ "Epoch 314/1024\n",
+ "90/90 [==============================] - 0s 906us/step - loss: 0.0028 - val_loss: 0.0108 - lr: 1.9531e-05\n",
+ "Epoch 315/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0026 - lr: 1.9531e-05\n",
+ "Epoch 316/1024\n",
+ "90/90 [==============================] - 0s 894us/step - loss: 0.0028 - val_loss: 0.0041 - lr: 1.9531e-05\n",
+ "Epoch 317/1024\n",
+ "90/90 [==============================] - 0s 902us/step - loss: 0.0028 - val_loss: 0.0030 - lr: 1.9531e-05\n",
+ "Epoch 318/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0052 - lr: 1.9531e-05\n",
+ "Epoch 319/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0112 - lr: 1.9531e-05\n",
+ "Epoch 320/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0205 - lr: 1.9531e-05\n",
+ "Epoch 321/1024\n",
+ "90/90 [==============================] - 0s 916us/step - loss: 0.0028 - val_loss: 0.0102 - lr: 1.9531e-05\n",
+ "Epoch 322/1024\n",
+ "90/90 [==============================] - 0s 918us/step - loss: 0.0028 - val_loss: 0.0042 - lr: 1.9531e-05\n",
+ "Epoch 323/1024\n",
+ "90/90 [==============================] - 0s 926us/step - loss: 0.0027 - val_loss: 0.0031 - lr: 1.9531e-05\n",
+ "Epoch 324/1024\n",
+ "90/90 [==============================] - 0s 947us/step - loss: 0.0028 - val_loss: 0.0031 - lr: 1.9531e-05\n",
+ "Epoch 325/1024\n",
+ "90/90 [==============================] - 0s 904us/step - loss: 0.0027 - val_loss: 0.0063 - lr: 1.9531e-05\n",
+ "Epoch 326/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0166 - lr: 1.9531e-05\n",
+ "Epoch 327/1024\n",
+ "90/90 [==============================] - 0s 863us/step - loss: 0.0028 - val_loss: 0.0035 - lr: 1.9531e-05\n",
+ "Epoch 328/1024\n",
+ "90/90 [==============================] - 0s 895us/step - loss: 0.0027 - val_loss: 0.0055 - lr: 1.9531e-05\n",
+ "Epoch 329/1024\n",
+ "90/90 [==============================] - 0s 909us/step - loss: 0.0028 - val_loss: 0.0091 - lr: 1.9531e-05\n",
+ "Epoch 330/1024\n",
+ "90/90 [==============================] - 0s 918us/step - loss: 0.0027 - val_loss: 0.0143 - lr: 1.9531e-05\n",
+ "Epoch 331/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0056 - lr: 1.9531e-05\n",
+ "Epoch 332/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0028 - val_loss: 0.0045 - lr: 1.9531e-05\n",
+ "Epoch 333/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0048 - lr: 1.9531e-05\n",
+ "Epoch 334/1024\n",
+ "90/90 [==============================] - 0s 872us/step - loss: 0.0027 - val_loss: 0.0057 - lr: 1.9531e-05\n",
+ "Epoch 335/1024\n",
+ "90/90 [==============================] - 0s 863us/step - loss: 0.0028 - val_loss: 0.0142 - lr: 1.9531e-05\n",
+ "Epoch 336/1024\n",
+ "90/90 [==============================] - 0s 922us/step - loss: 0.0028 - val_loss: 0.0052 - lr: 1.9531e-05\n",
+ "Epoch 337/1024\n",
+ "90/90 [==============================] - 0s 883us/step - loss: 0.0027 - val_loss: 0.0027 - lr: 1.9531e-05\n",
+ "Epoch 338/1024\n",
+ "90/90 [==============================] - 0s 856us/step - loss: 0.0027 - val_loss: 0.0054 - lr: 1.9531e-05\n",
+ "Epoch 339/1024\n",
+ "90/90 [==============================] - 0s 865us/step - loss: 0.0028 - val_loss: 0.0032 - lr: 1.9531e-05\n",
+ "Epoch 340/1024\n",
+ "90/90 [==============================] - 0s 901us/step - loss: 0.0027 - val_loss: 0.0034 - lr: 1.9531e-05\n",
+ "Epoch 341/1024\n",
+ "90/90 [==============================] - 0s 850us/step - loss: 0.0028 - val_loss: 0.0037 - lr: 9.7656e-06\n",
+ "Epoch 342/1024\n",
+ "90/90 [==============================] - 0s 854us/step - loss: 0.0028 - val_loss: 0.0034 - lr: 9.7656e-06\n",
+ "Epoch 343/1024\n",
+ "90/90 [==============================] - 0s 860us/step - loss: 0.0029 - val_loss: 0.0063 - lr: 9.7656e-06\n",
+ "Epoch 344/1024\n",
+ "90/90 [==============================] - 0s 887us/step - loss: 0.0028 - val_loss: 0.0032 - lr: 9.7656e-06\n",
+ "Epoch 345/1024\n",
+ "90/90 [==============================] - 0s 870us/step - loss: 0.0028 - val_loss: 0.0037 - lr: 9.7656e-06\n",
+ "Epoch 346/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0050 - lr: 9.7656e-06\n",
+ "Epoch 347/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0028 - val_loss: 0.0042 - lr: 9.7656e-06\n",
+ "Epoch 348/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0027 - lr: 9.7656e-06\n",
+ "Epoch 349/1024\n",
+ "90/90 [==============================] - 0s 879us/step - loss: 0.0028 - val_loss: 0.0034 - lr: 9.7656e-06\n",
+ "Epoch 350/1024\n",
+ "90/90 [==============================] - 0s 872us/step - loss: 0.0027 - val_loss: 0.0032 - lr: 9.7656e-06\n",
+ "Epoch 351/1024\n",
+ "90/90 [==============================] - 0s 883us/step - loss: 0.0028 - val_loss: 0.0032 - lr: 9.7656e-06\n",
+ "Epoch 352/1024\n",
+ "90/90 [==============================] - 0s 858us/step - loss: 0.0028 - val_loss: 0.0051 - lr: 9.7656e-06\n",
+ "Epoch 353/1024\n",
+ "90/90 [==============================] - 0s 872us/step - loss: 0.0027 - val_loss: 0.0028 - lr: 9.7656e-06\n",
+ "Epoch 354/1024\n",
+ "90/90 [==============================] - 0s 866us/step - loss: 0.0027 - val_loss: 0.0040 - lr: 9.7656e-06\n",
+ "Epoch 355/1024\n",
+ "90/90 [==============================] - 0s 904us/step - loss: 0.0028 - val_loss: 0.0032 - lr: 9.7656e-06\n",
+ "Epoch 356/1024\n",
+ "90/90 [==============================] - 0s 924us/step - loss: 0.0027 - val_loss: 0.0033 - lr: 9.7656e-06\n",
+ "Epoch 357/1024\n",
+ "90/90 [==============================] - 0s 877us/step - loss: 0.0027 - val_loss: 0.0036 - lr: 9.7656e-06\n",
+ "Epoch 358/1024\n",
+ "90/90 [==============================] - 0s 893us/step - loss: 0.0028 - val_loss: 0.0048 - lr: 9.7656e-06\n",
+ "Epoch 359/1024\n",
+ "90/90 [==============================] - 0s 867us/step - loss: 0.0027 - val_loss: 0.0062 - lr: 9.7656e-06\n",
+ "Epoch 360/1024\n",
+ "90/90 [==============================] - 0s 880us/step - loss: 0.0027 - val_loss: 0.0030 - lr: 9.7656e-06\n",
+ "Epoch 361/1024\n",
+ "90/90 [==============================] - 0s 992us/step - loss: 0.0028 - val_loss: 0.0054 - lr: 9.7656e-06\n",
+ "Epoch 362/1024\n",
+ "90/90 [==============================] - 0s 865us/step - loss: 0.0029 - val_loss: 0.0031 - lr: 9.7656e-06\n",
+ "Epoch 363/1024\n",
+ "90/90 [==============================] - 0s 922us/step - loss: 0.0027 - val_loss: 0.0027 - lr: 9.7656e-06\n",
+ "Epoch 364/1024\n",
+ "90/90 [==============================] - 0s 872us/step - loss: 0.0028 - val_loss: 0.0034 - lr: 9.7656e-06\n",
+ "Epoch 365/1024\n",
+ "90/90 [==============================] - 0s 866us/step - loss: 0.0026 - val_loss: 0.0033 - lr: 9.7656e-06\n",
+ "Epoch 366/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0041 - lr: 4.8828e-06\n",
+ "Epoch 367/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0028 - val_loss: 0.0028 - lr: 4.8828e-06\n",
+ "Epoch 368/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0035 - lr: 4.8828e-06\n",
+ "Epoch 369/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0026 - lr: 4.8828e-06\n",
+ "Epoch 370/1024\n",
+ "90/90 [==============================] - 0s 924us/step - loss: 0.0027 - val_loss: 0.0027 - lr: 4.8828e-06\n",
+ "Epoch 371/1024\n",
+ "90/90 [==============================] - 0s 917us/step - loss: 0.0027 - val_loss: 0.0027 - lr: 4.8828e-06\n",
+ "Epoch 372/1024\n",
+ "90/90 [==============================] - 0s 912us/step - loss: 0.0028 - val_loss: 0.0029 - lr: 4.8828e-06\n",
+ "Epoch 373/1024\n",
+ "90/90 [==============================] - 0s 923us/step - loss: 0.0027 - val_loss: 0.0032 - lr: 4.8828e-06\n",
+ "Epoch 374/1024\n",
+ "90/90 [==============================] - 0s 982us/step - loss: 0.0027 - val_loss: 0.0026 - lr: 4.8828e-06\n",
+ "Epoch 375/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0028 - val_loss: 0.0032 - lr: 4.8828e-06\n",
+ "Epoch 376/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0028 - lr: 4.8828e-06\n",
+ "Epoch 377/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0026 - lr: 4.8828e-06\n",
+ "Epoch 378/1024\n",
+ "90/90 [==============================] - 0s 897us/step - loss: 0.0028 - val_loss: 0.0028 - lr: 4.8828e-06\n",
+ "Epoch 379/1024\n",
+ "90/90 [==============================] - 0s 886us/step - loss: 0.0027 - val_loss: 0.0027 - lr: 4.8828e-06\n",
+ "Epoch 380/1024\n",
+ "90/90 [==============================] - 0s 881us/step - loss: 0.0028 - val_loss: 0.0028 - lr: 4.8828e-06\n",
+ "Epoch 381/1024\n",
+ "90/90 [==============================] - 0s 893us/step - loss: 0.0027 - val_loss: 0.0041 - lr: 4.8828e-06\n",
+ "Epoch 382/1024\n",
+ "90/90 [==============================] - 0s 894us/step - loss: 0.0028 - val_loss: 0.0033 - lr: 4.8828e-06\n",
+ "Epoch 383/1024\n",
+ "90/90 [==============================] - 0s 914us/step - loss: 0.0028 - val_loss: 0.0027 - lr: 4.8828e-06\n",
+ "Epoch 384/1024\n",
+ "90/90 [==============================] - 0s 880us/step - loss: 0.0027 - val_loss: 0.0031 - lr: 4.8828e-06\n",
+ "Epoch 385/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0025 - lr: 4.8828e-06\n",
+ "Epoch 386/1024\n",
+ "90/90 [==============================] - 0s 905us/step - loss: 0.0029 - val_loss: 0.0027 - lr: 4.8828e-06\n",
+ "Epoch 387/1024\n",
+ "90/90 [==============================] - 0s 924us/step - loss: 0.0027 - val_loss: 0.0029 - lr: 4.8828e-06\n",
+ "Epoch 388/1024\n",
+ "90/90 [==============================] - 0s 936us/step - loss: 0.0028 - val_loss: 0.0028 - lr: 4.8828e-06\n",
+ "Epoch 389/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0026 - lr: 4.8828e-06\n",
+ "Epoch 390/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0031 - lr: 4.8828e-06\n",
+ "Epoch 391/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0038 - lr: 4.8828e-06\n",
+ "Epoch 392/1024\n",
+ "90/90 [==============================] - 0s 896us/step - loss: 0.0028 - val_loss: 0.0027 - lr: 4.8828e-06\n",
+ "Epoch 393/1024\n",
+ "90/90 [==============================] - 0s 907us/step - loss: 0.0027 - val_loss: 0.0028 - lr: 4.8828e-06\n",
+ "Epoch 394/1024\n",
+ "90/90 [==============================] - 0s 900us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 4.8828e-06\n",
+ "Epoch 395/1024\n",
+ "90/90 [==============================] - 0s 903us/step - loss: 0.0027 - val_loss: 0.0030 - lr: 4.8828e-06\n",
+ "Epoch 396/1024\n",
+ "90/90 [==============================] - 0s 930us/step - loss: 0.0027 - val_loss: 0.0033 - lr: 4.8828e-06\n",
+ "Epoch 397/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0027 - lr: 4.8828e-06\n",
+ "Epoch 398/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0027 - lr: 4.8828e-06\n",
+ "Epoch 399/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0026 - lr: 4.8828e-06\n",
+ "Epoch 400/1024\n",
+ "90/90 [==============================] - 0s 888us/step - loss: 0.0027 - val_loss: 0.0030 - lr: 4.8828e-06\n",
+ "Epoch 401/1024\n",
+ "90/90 [==============================] - 0s 880us/step - loss: 0.0028 - val_loss: 0.0026 - lr: 4.8828e-06\n",
+ "Epoch 402/1024\n",
+ "90/90 [==============================] - 0s 914us/step - loss: 0.0029 - val_loss: 0.0027 - lr: 4.8828e-06\n",
+ "Epoch 403/1024\n",
+ "90/90 [==============================] - 0s 895us/step - loss: 0.0029 - val_loss: 0.0030 - lr: 4.8828e-06\n",
+ "Epoch 404/1024\n",
+ "90/90 [==============================] - 0s 873us/step - loss: 0.0029 - val_loss: 0.0026 - lr: 4.8828e-06\n",
+ "Epoch 405/1024\n",
+ "90/90 [==============================] - 0s 877us/step - loss: 0.0027 - val_loss: 0.0028 - lr: 4.8828e-06\n",
+ "Epoch 406/1024\n",
+ "90/90 [==============================] - 0s 879us/step - loss: 0.0027 - val_loss: 0.0026 - lr: 4.8828e-06\n",
+ "Epoch 407/1024\n",
+ "90/90 [==============================] - 0s 897us/step - loss: 0.0027 - val_loss: 0.0028 - lr: 4.8828e-06\n",
+ "Epoch 408/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0033 - lr: 4.8828e-06\n",
+ "Epoch 409/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0028 - lr: 4.8828e-06\n",
+ "Epoch 410/1024\n",
+ "90/90 [==============================] - 0s 970us/step - loss: 0.0027 - val_loss: 0.0026 - lr: 4.8828e-06\n",
+ "Epoch 411/1024\n",
+ "90/90 [==============================] - 0s 861us/step - loss: 0.0027 - val_loss: 0.0026 - lr: 2.4414e-06\n",
+ "Epoch 412/1024\n",
+ "90/90 [==============================] - 0s 907us/step - loss: 0.0027 - val_loss: 0.0027 - lr: 2.4414e-06\n",
+ "Epoch 413/1024\n",
+ "90/90 [==============================] - 0s 899us/step - loss: 0.0028 - val_loss: 0.0026 - lr: 2.4414e-06\n",
+ "Epoch 414/1024\n",
+ "90/90 [==============================] - 0s 881us/step - loss: 0.0028 - val_loss: 0.0026 - lr: 2.4414e-06\n",
+ "Epoch 415/1024\n",
+ "90/90 [==============================] - 0s 867us/step - loss: 0.0028 - val_loss: 0.0026 - lr: 2.4414e-06\n",
+ "Epoch 416/1024\n",
+ "90/90 [==============================] - 0s 882us/step - loss: 0.0027 - val_loss: 0.0026 - lr: 2.4414e-06\n",
+ "Epoch 417/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0025 - lr: 2.4414e-06\n",
+ "Epoch 418/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0026 - lr: 2.4414e-06\n",
+ "Epoch 419/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0026 - lr: 2.4414e-06\n",
+ "Epoch 420/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0027 - lr: 2.4414e-06\n",
+ "Epoch 421/1024\n",
+ "90/90 [==============================] - 0s 965us/step - loss: 0.0028 - val_loss: 0.0026 - lr: 2.4414e-06\n",
+ "Epoch 422/1024\n",
+ "90/90 [==============================] - 0s 860us/step - loss: 0.0027 - val_loss: 0.0027 - lr: 2.4414e-06\n",
+ "Epoch 423/1024\n",
+ "90/90 [==============================] - 0s 938us/step - loss: 0.0028 - val_loss: 0.0026 - lr: 2.4414e-06\n",
+ "Epoch 424/1024\n",
+ "90/90 [==============================] - 0s 896us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 2.4414e-06\n",
+ "Epoch 425/1024\n",
+ "90/90 [==============================] - 0s 981us/step - loss: 0.0028 - val_loss: 0.0026 - lr: 2.4414e-06\n",
+ "Epoch 426/1024\n",
+ "90/90 [==============================] - 0s 990us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 2.4414e-06\n",
+ "Epoch 427/1024\n",
+ "90/90 [==============================] - 0s 870us/step - loss: 0.0028 - val_loss: 0.0026 - lr: 2.4414e-06\n",
+ "Epoch 428/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0026 - lr: 2.4414e-06\n",
+ "Epoch 429/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0026 - lr: 2.4414e-06\n",
+ "Epoch 430/1024\n",
+ "90/90 [==============================] - 0s 986us/step - loss: 0.0027 - val_loss: 0.0027 - lr: 2.4414e-06\n",
+ "Epoch 431/1024\n",
+ "90/90 [==============================] - 0s 882us/step - loss: 0.0028 - val_loss: 0.0026 - lr: 2.4414e-06\n",
+ "Epoch 432/1024\n",
+ "90/90 [==============================] - 0s 911us/step - loss: 0.0027 - val_loss: 0.0026 - lr: 2.4414e-06\n",
+ "Epoch 433/1024\n",
+ "90/90 [==============================] - 0s 982us/step - loss: 0.0027 - val_loss: 0.0026 - lr: 2.4414e-06\n",
+ "Epoch 434/1024\n",
+ "90/90 [==============================] - 0s 912us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 2.4414e-06\n",
+ "Epoch 435/1024\n",
+ "90/90 [==============================] - 0s 919us/step - loss: 0.0028 - val_loss: 0.0026 - lr: 2.4414e-06\n",
+ "Epoch 436/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0027 - lr: 2.4414e-06\n",
+ "Epoch 437/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0026 - lr: 2.4414e-06\n",
+ "Epoch 438/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0031 - lr: 2.4414e-06\n",
+ "Epoch 439/1024\n",
+ "90/90 [==============================] - 0s 877us/step - loss: 0.0030 - val_loss: 0.0028 - lr: 2.4414e-06\n",
+ "Epoch 440/1024\n",
+ "90/90 [==============================] - 0s 896us/step - loss: 0.0027 - val_loss: 0.0026 - lr: 2.4414e-06\n",
+ "Epoch 441/1024\n",
+ "90/90 [==============================] - 0s 948us/step - loss: 0.0028 - val_loss: 0.0026 - lr: 2.4414e-06\n",
+ "Epoch 442/1024\n",
+ "90/90 [==============================] - 0s 918us/step - loss: 0.0028 - val_loss: 0.0026 - lr: 2.4414e-06\n",
+ "Epoch 443/1024\n",
+ "90/90 [==============================] - 0s 902us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 1.2207e-06\n",
+ "Epoch 444/1024\n",
+ "90/90 [==============================] - 0s 873us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.2207e-06\n",
+ "Epoch 445/1024\n",
+ "90/90 [==============================] - 0s 948us/step - loss: 0.0028 - val_loss: 0.0026 - lr: 1.2207e-06\n",
+ "Epoch 446/1024\n",
+ "90/90 [==============================] - 0s 922us/step - loss: 0.0027 - val_loss: 0.0026 - lr: 1.2207e-06\n",
+ "Epoch 447/1024\n",
+ "90/90 [==============================] - 0s 963us/step - loss: 0.0028 - val_loss: 0.0026 - lr: 1.2207e-06\n",
+ "Epoch 448/1024\n",
+ "90/90 [==============================] - 0s 878us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.2207e-06\n",
+ "Epoch 449/1024\n",
+ "90/90 [==============================] - 0s 872us/step - loss: 0.0028 - val_loss: 0.0026 - lr: 1.2207e-06\n",
+ "Epoch 450/1024\n",
+ "90/90 [==============================] - 0s 911us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.2207e-06\n",
+ "Epoch 451/1024\n",
+ "90/90 [==============================] - 0s 876us/step - loss: 0.0028 - val_loss: 0.0026 - lr: 1.2207e-06\n",
+ "Epoch 452/1024\n",
+ "90/90 [==============================] - 0s 864us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.2207e-06\n",
+ "Epoch 453/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0026 - lr: 1.2207e-06\n",
+ "Epoch 454/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0026 - lr: 1.2207e-06\n",
+ "Epoch 455/1024\n",
+ "90/90 [==============================] - 0s 997us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.2207e-06\n",
+ "Epoch 456/1024\n",
+ "90/90 [==============================] - 0s 940us/step - loss: 0.0028 - val_loss: 0.0026 - lr: 1.2207e-06\n",
+ "Epoch 457/1024\n",
+ "90/90 [==============================] - 0s 894us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.2207e-06\n",
+ "Epoch 458/1024\n",
+ "90/90 [==============================] - 0s 896us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.2207e-06\n",
+ "Epoch 459/1024\n",
+ "90/90 [==============================] - 0s 958us/step - loss: 0.0027 - val_loss: 0.0026 - lr: 1.2207e-06\n",
+ "Epoch 460/1024\n",
+ "90/90 [==============================] - 0s 913us/step - loss: 0.0027 - val_loss: 0.0026 - lr: 1.2207e-06\n",
+ "Epoch 461/1024\n",
+ "90/90 [==============================] - 0s 902us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.2207e-06\n",
+ "Epoch 462/1024\n",
+ "90/90 [==============================] - 0s 900us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.2207e-06\n",
+ "Epoch 463/1024\n",
+ "90/90 [==============================] - 0s 912us/step - loss: 0.0028 - val_loss: 0.0026 - lr: 1.2207e-06\n",
+ "Epoch 464/1024\n",
+ "90/90 [==============================] - 0s 879us/step - loss: 0.0028 - val_loss: 0.0026 - lr: 1.2207e-06\n",
+ "Epoch 465/1024\n",
+ "90/90 [==============================] - 0s 874us/step - loss: 0.0028 - val_loss: 0.0026 - lr: 1.2207e-06\n",
+ "Epoch 466/1024\n",
+ "90/90 [==============================] - 0s 911us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.2207e-06\n",
+ "Epoch 467/1024\n",
+ "90/90 [==============================] - 0s 878us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.2207e-06\n",
+ "Epoch 468/1024\n",
+ "90/90 [==============================] - 0s 878us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 6.1035e-07\n",
+ "Epoch 469/1024\n",
+ "90/90 [==============================] - 0s 881us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 6.1035e-07\n",
+ "Epoch 470/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0025 - lr: 6.1035e-07\n",
+ "Epoch 471/1024\n",
+ "90/90 [==============================] - 0s 892us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 6.1035e-07\n",
+ "Epoch 472/1024\n",
+ "90/90 [==============================] - 0s 856us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 6.1035e-07\n",
+ "Epoch 473/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0025 - lr: 6.1035e-07\n",
+ "Epoch 474/1024\n",
+ "90/90 [==============================] - 0s 898us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 6.1035e-07\n",
+ "Epoch 475/1024\n",
+ "90/90 [==============================] - 0s 888us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 6.1035e-07\n",
+ "Epoch 476/1024\n",
+ "90/90 [==============================] - 0s 884us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 6.1035e-07\n",
+ "Epoch 477/1024\n",
+ "90/90 [==============================] - 0s 883us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 6.1035e-07\n",
+ "Epoch 478/1024\n",
+ "90/90 [==============================] - 0s 877us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 6.1035e-07\n",
+ "Epoch 479/1024\n",
+ "90/90 [==============================] - 0s 870us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 6.1035e-07\n",
+ "Epoch 480/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0025 - lr: 6.1035e-07\n",
+ "Epoch 481/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 6.1035e-07\n",
+ "Epoch 482/1024\n",
+ "90/90 [==============================] - 0s 959us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 6.1035e-07\n",
+ "Epoch 483/1024\n",
+ "90/90 [==============================] - 0s 864us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 6.1035e-07\n",
+ "Epoch 484/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 6.1035e-07\n",
+ "Epoch 485/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0028 - val_loss: 0.0025 - lr: 6.1035e-07\n",
+ "Epoch 486/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 6.1035e-07\n",
+ "Epoch 487/1024\n",
+ "90/90 [==============================] - 0s 912us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 6.1035e-07\n",
+ "Epoch 488/1024\n",
+ "90/90 [==============================] - 0s 903us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 6.1035e-07\n",
+ "Epoch 489/1024\n",
+ "90/90 [==============================] - 0s 986us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 6.1035e-07\n",
+ "Epoch 490/1024\n",
+ "90/90 [==============================] - 0s 859us/step - loss: 0.0028 - val_loss: 0.0026 - lr: 6.1035e-07\n",
+ "Epoch 491/1024\n",
+ "90/90 [==============================] - 0s 945us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 6.1035e-07\n",
+ "Epoch 492/1024\n",
+ "90/90 [==============================] - 0s 921us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 6.1035e-07\n",
+ "Epoch 493/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 6.1035e-07\n",
+ "Epoch 494/1024\n",
+ "90/90 [==============================] - 0s 875us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 6.1035e-07\n",
+ "Epoch 495/1024\n",
+ "90/90 [==============================] - 0s 872us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 6.1035e-07\n",
+ "Epoch 496/1024\n",
+ "90/90 [==============================] - 0s 909us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 3.0518e-07\n",
+ "Epoch 497/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 3.0518e-07\n",
+ "Epoch 498/1024\n",
+ "90/90 [==============================] - 0s 962us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 3.0518e-07\n",
+ "Epoch 499/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 3.0518e-07\n",
+ "Epoch 500/1024\n",
+ "90/90 [==============================] - 0s 905us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 3.0518e-07\n",
+ "Epoch 501/1024\n",
+ "90/90 [==============================] - 0s 885us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 3.0518e-07\n",
+ "Epoch 502/1024\n",
+ "90/90 [==============================] - 0s 984us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 3.0518e-07\n",
+ "Epoch 503/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0028 - val_loss: 0.0025 - lr: 3.0518e-07\n",
+ "Epoch 504/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 3.0518e-07\n",
+ "Epoch 505/1024\n",
+ "90/90 [==============================] - 0s 906us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 3.0518e-07\n",
+ "Epoch 506/1024\n",
+ "90/90 [==============================] - 0s 935us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 3.0518e-07\n",
+ "Epoch 507/1024\n",
+ "90/90 [==============================] - 0s 928us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 3.0518e-07\n",
+ "Epoch 508/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 3.0518e-07\n",
+ "Epoch 509/1024\n",
+ "90/90 [==============================] - 0s 920us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 3.0518e-07\n",
+ "Epoch 510/1024\n",
+ "90/90 [==============================] - 0s 913us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 3.0518e-07\n",
+ "Epoch 511/1024\n",
+ "90/90 [==============================] - 0s 894us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 3.0518e-07\n",
+ "Epoch 512/1024\n",
+ "90/90 [==============================] - 0s 933us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 3.0518e-07\n",
+ "Epoch 513/1024\n",
+ "90/90 [==============================] - 0s 910us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 3.0518e-07\n",
+ "Epoch 514/1024\n",
+ "90/90 [==============================] - 0s 913us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 3.0518e-07\n",
+ "Epoch 515/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0025 - lr: 3.0518e-07\n",
+ "Epoch 516/1024\n",
+ "90/90 [==============================] - 0s 962us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 3.0518e-07\n",
+ "Epoch 517/1024\n",
+ "90/90 [==============================] - 0s 929us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 3.0518e-07\n",
+ "Epoch 518/1024\n",
+ "90/90 [==============================] - 0s 962us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 3.0518e-07\n",
+ "Epoch 519/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 3.0518e-07\n",
+ "Epoch 520/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0025 - lr: 3.0518e-07\n",
+ "Epoch 521/1024\n",
+ "90/90 [==============================] - 0s 948us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.5259e-07\n",
+ "Epoch 522/1024\n",
+ "90/90 [==============================] - 0s 914us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.5259e-07\n",
+ "Epoch 523/1024\n",
+ "90/90 [==============================] - 0s 918us/step - loss: 0.0026 - val_loss: 0.0025 - lr: 1.5259e-07\n",
+ "Epoch 524/1024\n",
+ "90/90 [==============================] - 0s 900us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.5259e-07\n",
+ "Epoch 525/1024\n",
+ "90/90 [==============================] - 0s 930us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.5259e-07\n",
+ "Epoch 526/1024\n",
+ "90/90 [==============================] - 0s 997us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.5259e-07\n",
+ "Epoch 527/1024\n",
+ "90/90 [==============================] - 0s 931us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.5259e-07\n",
+ "Epoch 528/1024\n",
+ "90/90 [==============================] - 0s 992us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.5259e-07\n",
+ "Epoch 529/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.5259e-07\n",
+ "Epoch 530/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0029 - val_loss: 0.0025 - lr: 1.5259e-07\n",
+ "Epoch 531/1024\n",
+ "90/90 [==============================] - 0s 979us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.5259e-07\n",
+ "Epoch 532/1024\n",
+ "90/90 [==============================] - 0s 908us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.5259e-07\n",
+ "Epoch 533/1024\n",
+ "90/90 [==============================] - 0s 918us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.5259e-07\n",
+ "Epoch 534/1024\n",
+ "90/90 [==============================] - 0s 946us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.5259e-07\n",
+ "Epoch 535/1024\n",
+ "90/90 [==============================] - 0s 899us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.5259e-07\n",
+ "Epoch 536/1024\n",
+ "90/90 [==============================] - 0s 947us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.5259e-07\n",
+ "Epoch 537/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0026 - val_loss: 0.0025 - lr: 1.5259e-07\n",
+ "Epoch 538/1024\n",
+ "90/90 [==============================] - 0s 879us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.5259e-07\n",
+ "Epoch 539/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.5259e-07\n",
+ "Epoch 540/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0029 - val_loss: 0.0025 - lr: 1.5259e-07\n",
+ "Epoch 541/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.5259e-07\n",
+ "Epoch 542/1024\n",
+ "90/90 [==============================] - 0s 936us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.5259e-07\n",
+ "Epoch 543/1024\n",
+ "90/90 [==============================] - 0s 987us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.5259e-07\n",
+ "Epoch 544/1024\n",
+ "90/90 [==============================] - 0s 879us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.5259e-07\n",
+ "Epoch 545/1024\n",
+ "90/90 [==============================] - 0s 881us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.5259e-07\n",
+ "Epoch 546/1024\n",
+ "90/90 [==============================] - 0s 890us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 7.6294e-08\n",
+ "Epoch 547/1024\n",
+ "90/90 [==============================] - 0s 862us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 7.6294e-08\n",
+ "Epoch 548/1024\n",
+ "90/90 [==============================] - 0s 883us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 7.6294e-08\n",
+ "Epoch 549/1024\n",
+ "90/90 [==============================] - 0s 859us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 7.6294e-08\n",
+ "Epoch 550/1024\n",
+ "90/90 [==============================] - 0s 902us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 7.6294e-08\n",
+ "Epoch 551/1024\n",
+ "90/90 [==============================] - 0s 974us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 7.6294e-08\n",
+ "Epoch 552/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0025 - lr: 7.6294e-08\n",
+ "Epoch 553/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0029 - val_loss: 0.0025 - lr: 7.6294e-08\n",
+ "Epoch 554/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0028 - val_loss: 0.0025 - lr: 7.6294e-08\n",
+ "Epoch 555/1024\n",
+ "90/90 [==============================] - 0s 968us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 7.6294e-08\n",
+ "Epoch 556/1024\n",
+ "90/90 [==============================] - 0s 933us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 7.6294e-08\n",
+ "Epoch 557/1024\n",
+ "90/90 [==============================] - 0s 928us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 7.6294e-08\n",
+ "Epoch 558/1024\n",
+ "90/90 [==============================] - 0s 893us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 7.6294e-08\n",
+ "Epoch 559/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0025 - lr: 7.6294e-08\n",
+ "Epoch 560/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 7.6294e-08\n",
+ "Epoch 561/1024\n",
+ "90/90 [==============================] - 0s 991us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 7.6294e-08\n",
+ "Epoch 562/1024\n",
+ "90/90 [==============================] - 0s 931us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 7.6294e-08\n",
+ "Epoch 563/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0025 - lr: 7.6294e-08\n",
+ "Epoch 564/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0026 - val_loss: 0.0025 - lr: 7.6294e-08\n",
+ "Epoch 565/1024\n",
+ "90/90 [==============================] - 0s 887us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 7.6294e-08\n",
+ "Epoch 566/1024\n",
+ "90/90 [==============================] - 0s 923us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 7.6294e-08\n",
+ "Epoch 567/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0029 - val_loss: 0.0025 - lr: 7.6294e-08\n",
+ "Epoch 568/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 7.6294e-08\n",
+ "Epoch 569/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0025 - lr: 7.6294e-08\n",
+ "Epoch 570/1024\n",
+ "90/90 [==============================] - 0s 889us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 7.6294e-08\n",
+ "Epoch 571/1024\n",
+ "90/90 [==============================] - 0s 921us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 3.8147e-08\n",
+ "Epoch 572/1024\n",
+ "90/90 [==============================] - 0s 962us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 3.8147e-08\n",
+ "Epoch 573/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 3.8147e-08\n",
+ "Epoch 574/1024\n",
+ "90/90 [==============================] - 0s 952us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 3.8147e-08\n",
+ "Epoch 575/1024\n",
+ "90/90 [==============================] - 0s 984us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 3.8147e-08\n",
+ "Epoch 576/1024\n",
+ "90/90 [==============================] - 0s 975us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 3.8147e-08\n",
+ "Epoch 577/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 3.8147e-08\n",
+ "Epoch 578/1024\n",
+ "90/90 [==============================] - 0s 921us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 3.8147e-08\n",
+ "Epoch 579/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0025 - lr: 3.8147e-08\n",
+ "Epoch 580/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 3.8147e-08\n",
+ "Epoch 581/1024\n",
+ "90/90 [==============================] - 0s 926us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 3.8147e-08\n",
+ "Epoch 582/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0026 - val_loss: 0.0025 - lr: 3.8147e-08\n",
+ "Epoch 583/1024\n",
+ "90/90 [==============================] - 0s 981us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 3.8147e-08\n",
+ "Epoch 584/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0025 - lr: 3.8147e-08\n",
+ "Epoch 585/1024\n",
+ "90/90 [==============================] - 0s 871us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 3.8147e-08\n",
+ "Epoch 586/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 3.8147e-08\n",
+ "Epoch 587/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 3.8147e-08\n",
+ "Epoch 588/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 3.8147e-08\n",
+ "Epoch 589/1024\n",
+ "90/90 [==============================] - 0s 924us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 3.8147e-08\n",
+ "Epoch 590/1024\n",
+ "90/90 [==============================] - 0s 912us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 3.8147e-08\n",
+ "Epoch 591/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0025 - lr: 3.8147e-08\n",
+ "Epoch 592/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0029 - val_loss: 0.0025 - lr: 3.8147e-08\n",
+ "Epoch 593/1024\n",
+ "90/90 [==============================] - 0s 923us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 3.8147e-08\n",
+ "Epoch 594/1024\n",
+ "90/90 [==============================] - 0s 960us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 3.8147e-08\n",
+ "Epoch 595/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 3.8147e-08\n",
+ "Epoch 596/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0029 - val_loss: 0.0025 - lr: 1.9073e-08\n",
+ "Epoch 597/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.9073e-08\n",
+ "Epoch 598/1024\n",
+ "90/90 [==============================] - 0s 955us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.9073e-08\n",
+ "Epoch 599/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.9073e-08\n",
+ "Epoch 600/1024\n",
+ "90/90 [==============================] - 0s 879us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.9073e-08\n",
+ "Epoch 601/1024\n",
+ "90/90 [==============================] - 0s 916us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.9073e-08\n",
+ "Epoch 602/1024\n",
+ "90/90 [==============================] - 0s 951us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.9073e-08\n",
+ "Epoch 603/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.9073e-08\n",
+ "Epoch 604/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.9073e-08\n",
+ "Epoch 605/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.9073e-08\n",
+ "Epoch 606/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.9073e-08\n",
+ "Epoch 607/1024\n",
+ "90/90 [==============================] - 0s 917us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.9073e-08\n",
+ "Epoch 608/1024\n",
+ "90/90 [==============================] - 0s 994us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.9073e-08\n",
+ "Epoch 609/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.9073e-08\n",
+ "Epoch 610/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.9073e-08\n",
+ "Epoch 611/1024\n",
+ "90/90 [==============================] - 0s 992us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.9073e-08\n",
+ "Epoch 612/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.9073e-08\n",
+ "Epoch 613/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.9073e-08\n",
+ "Epoch 614/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.9073e-08\n",
+ "Epoch 615/1024\n",
+ "90/90 [==============================] - 0s 966us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.9073e-08\n",
+ "Epoch 616/1024\n",
+ "90/90 [==============================] - 0s 942us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.9073e-08\n",
+ "Epoch 617/1024\n",
+ "90/90 [==============================] - 0s 966us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.9073e-08\n",
+ "Epoch 618/1024\n",
+ "90/90 [==============================] - 0s 911us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.9073e-08\n",
+ "Epoch 619/1024\n",
+ "90/90 [==============================] - 0s 895us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.9073e-08\n",
+ "Epoch 620/1024\n",
+ "90/90 [==============================] - 0s 922us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 1.9073e-08\n",
+ "Epoch 621/1024\n",
+ "90/90 [==============================] - 0s 953us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 9.5367e-09\n",
+ "Epoch 622/1024\n",
+ "90/90 [==============================] - 0s 960us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 9.5367e-09\n",
+ "Epoch 623/1024\n",
+ "90/90 [==============================] - 0s 989us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 9.5367e-09\n",
+ "Epoch 624/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 9.5367e-09\n",
+ "Epoch 625/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0028 - val_loss: 0.0025 - lr: 9.5367e-09\n",
+ "Epoch 626/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 9.5367e-09\n",
+ "Epoch 627/1024\n",
+ "90/90 [==============================] - 0s 919us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 9.5367e-09\n",
+ "Epoch 628/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 9.5367e-09\n",
+ "Epoch 629/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 9.5367e-09\n",
+ "Epoch 630/1024\n",
+ "90/90 [==============================] - 0s 993us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 9.5367e-09\n",
+ "Epoch 631/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 9.5367e-09\n",
+ "Epoch 632/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0025 - lr: 9.5367e-09\n",
+ "Epoch 633/1024\n",
+ "90/90 [==============================] - 0s 966us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 9.5367e-09\n",
+ "Epoch 634/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0025 - lr: 9.5367e-09\n",
+ "Epoch 635/1024\n",
+ "90/90 [==============================] - 0s 966us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 9.5367e-09\n",
+ "Epoch 636/1024\n",
+ "90/90 [==============================] - 0s 918us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 9.5367e-09\n",
+ "Epoch 637/1024\n",
+ "90/90 [==============================] - 0s 954us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 9.5367e-09\n",
+ "Epoch 638/1024\n",
+ "90/90 [==============================] - 0s 957us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 9.5367e-09\n",
+ "Epoch 639/1024\n",
+ "90/90 [==============================] - 0s 934us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 9.5367e-09\n",
+ "Epoch 640/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0025 - lr: 9.5367e-09\n",
+ "Epoch 641/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 9.5367e-09\n",
+ "Epoch 642/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0025 - lr: 9.5367e-09\n",
+ "Epoch 643/1024\n",
+ "90/90 [==============================] - 0s 904us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 9.5367e-09\n",
+ "Epoch 644/1024\n",
+ "90/90 [==============================] - 0s 943us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 9.5367e-09\n",
+ "Epoch 645/1024\n",
+ "90/90 [==============================] - 0s 944us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 9.5367e-09\n",
+ "Epoch 646/1024\n",
+ "90/90 [==============================] - 0s 922us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 4.7684e-09\n",
+ "Epoch 647/1024\n",
+ "90/90 [==============================] - 0s 955us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 4.7684e-09\n",
+ "Epoch 648/1024\n",
+ "90/90 [==============================] - 0s 996us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 4.7684e-09\n",
+ "Epoch 649/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0025 - lr: 4.7684e-09\n",
+ "Epoch 650/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0029 - val_loss: 0.0025 - lr: 4.7684e-09\n",
+ "Epoch 651/1024\n",
+ "90/90 [==============================] - 0s 987us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 4.7684e-09\n",
+ "Epoch 652/1024\n",
+ "90/90 [==============================] - 0s 991us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 4.7684e-09\n",
+ "Epoch 653/1024\n",
+ "90/90 [==============================] - 0s 912us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 4.7684e-09\n",
+ "Epoch 654/1024\n",
+ "90/90 [==============================] - 0s 927us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 4.7684e-09\n",
+ "Epoch 655/1024\n",
+ "90/90 [==============================] - 0s 921us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 4.7684e-09\n",
+ "Epoch 656/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 4.7684e-09\n",
+ "Epoch 657/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0025 - lr: 4.7684e-09\n",
+ "Epoch 658/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 4.7684e-09\n",
+ "Epoch 659/1024\n",
+ "90/90 [==============================] - 0s 874us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 4.7684e-09\n",
+ "Epoch 660/1024\n",
+ "90/90 [==============================] - 0s 929us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 4.7684e-09\n",
+ "Epoch 661/1024\n",
+ "90/90 [==============================] - 0s 960us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 4.7684e-09\n",
+ "Epoch 662/1024\n",
+ "90/90 [==============================] - 0s 937us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 4.7684e-09\n",
+ "Epoch 663/1024\n",
+ "90/90 [==============================] - 0s 927us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 4.7684e-09\n",
+ "Epoch 664/1024\n",
+ "90/90 [==============================] - 0s 922us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 4.7684e-09\n",
+ "Epoch 665/1024\n",
+ "90/90 [==============================] - 0s 917us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 4.7684e-09\n",
+ "Epoch 666/1024\n",
+ "90/90 [==============================] - 0s 995us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 4.7684e-09\n",
+ "Epoch 667/1024\n",
+ "90/90 [==============================] - 0s 942us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 4.7684e-09\n",
+ "Epoch 668/1024\n",
+ "90/90 [==============================] - 0s 944us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 4.7684e-09\n",
+ "Epoch 669/1024\n",
+ "90/90 [==============================] - 0s 919us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 4.7684e-09\n",
+ "Epoch 670/1024\n",
+ "90/90 [==============================] - 0s 941us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 4.7684e-09\n",
+ "Epoch 671/1024\n",
+ "90/90 [==============================] - 0s 918us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 2.3842e-09\n",
+ "Epoch 672/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0025 - lr: 2.3842e-09\n",
+ "Epoch 673/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0025 - lr: 2.3842e-09\n",
+ "Epoch 674/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0025 - lr: 2.3842e-09\n",
+ "Epoch 675/1024\n",
+ "90/90 [==============================] - 0s 919us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 2.3842e-09\n",
+ "Epoch 676/1024\n",
+ "90/90 [==============================] - 0s 945us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 2.3842e-09\n",
+ "Epoch 677/1024\n",
+ "90/90 [==============================] - 0s 940us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 2.3842e-09\n",
+ "Epoch 678/1024\n",
+ "90/90 [==============================] - 0s 926us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 2.3842e-09\n",
+ "Epoch 679/1024\n",
+ "90/90 [==============================] - 0s 926us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 2.3842e-09\n",
+ "Epoch 680/1024\n",
+ "90/90 [==============================] - 0s 951us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 2.3842e-09\n",
+ "Epoch 681/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0025 - lr: 2.3842e-09\n",
+ "Epoch 682/1024\n",
+ "90/90 [==============================] - 0s 918us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 2.3842e-09\n",
+ "Epoch 683/1024\n",
+ "90/90 [==============================] - 0s 966us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 2.3842e-09\n",
+ "Epoch 684/1024\n",
+ "90/90 [==============================] - 0s 943us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 2.3842e-09\n",
+ "Epoch 685/1024\n",
+ "90/90 [==============================] - 0s 930us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 2.3842e-09\n",
+ "Epoch 686/1024\n",
+ "90/90 [==============================] - 0s 938us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 2.3842e-09\n",
+ "Epoch 687/1024\n",
+ "90/90 [==============================] - 0s 955us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 2.3842e-09\n",
+ "Epoch 688/1024\n",
+ "90/90 [==============================] - 0s 932us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 2.3842e-09\n",
+ "Epoch 689/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0025 - lr: 2.3842e-09\n",
+ "Epoch 690/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0028 - val_loss: 0.0025 - lr: 2.3842e-09\n",
+ "Epoch 691/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 2.3842e-09\n",
+ "Epoch 692/1024\n",
+ "90/90 [==============================] - 0s 967us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 2.3842e-09\n",
+ "Epoch 693/1024\n",
+ "90/90 [==============================] - 0s 948us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 2.3842e-09\n",
+ "Epoch 694/1024\n",
+ "90/90 [==============================] - 0s 1000us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 2.3842e-09\n",
+ "Epoch 695/1024\n",
+ "90/90 [==============================] - 0s 975us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 2.3842e-09\n",
+ "Epoch 696/1024\n",
+ "90/90 [==============================] - 0s 947us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.1921e-09\n",
+ "Epoch 697/1024\n",
+ "90/90 [==============================] - 0s 951us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.1921e-09\n",
+ "Epoch 698/1024\n",
+ "90/90 [==============================] - 0s 976us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.1921e-09\n",
+ "Epoch 699/1024\n",
+ "90/90 [==============================] - 0s 961us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.1921e-09\n",
+ "Epoch 700/1024\n",
+ "90/90 [==============================] - 0s 952us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.1921e-09\n",
+ "Epoch 701/1024\n",
+ "90/90 [==============================] - 0s 939us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.1921e-09\n",
+ "Epoch 702/1024\n",
+ "90/90 [==============================] - 0s 944us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.1921e-09\n",
+ "Epoch 703/1024\n",
+ "90/90 [==============================] - 0s 945us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.1921e-09\n",
+ "Epoch 704/1024\n",
+ "90/90 [==============================] - 0s 905us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.1921e-09\n",
+ "Epoch 705/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.1921e-09\n",
+ "Epoch 706/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.1921e-09\n",
+ "Epoch 707/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.1921e-09\n",
+ "Epoch 708/1024\n",
+ "90/90 [==============================] - 0s 945us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.1921e-09\n",
+ "Epoch 709/1024\n",
+ "90/90 [==============================] - 0s 928us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.1921e-09\n",
+ "Epoch 710/1024\n",
+ "90/90 [==============================] - 0s 958us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.1921e-09\n",
+ "Epoch 711/1024\n",
+ "90/90 [==============================] - 0s 912us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.1921e-09\n",
+ "Epoch 712/1024\n",
+ "90/90 [==============================] - 0s 919us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.1921e-09\n",
+ "Epoch 713/1024\n",
+ "90/90 [==============================] - 0s 985us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.1921e-09\n",
+ "Epoch 714/1024\n",
+ "90/90 [==============================] - 0s 959us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.1921e-09\n",
+ "Epoch 715/1024\n",
+ "90/90 [==============================] - 0s 912us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.1921e-09\n",
+ "Epoch 716/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0029 - val_loss: 0.0025 - lr: 1.1921e-09\n",
+ "Epoch 717/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.1921e-09\n",
+ "Epoch 718/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.1921e-09\n",
+ "Epoch 719/1024\n",
+ "90/90 [==============================] - 0s 911us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.1921e-09\n",
+ "Epoch 720/1024\n",
+ "90/90 [==============================] - 0s 949us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.1921e-09\n",
+ "Epoch 721/1024\n",
+ "90/90 [==============================] - 0s 937us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 5.9605e-10\n",
+ "Epoch 722/1024\n",
+ "90/90 [==============================] - 0s 951us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 5.9605e-10\n",
+ "Epoch 723/1024\n",
+ "90/90 [==============================] - 0s 929us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 5.9605e-10\n",
+ "Epoch 724/1024\n",
+ "90/90 [==============================] - 0s 910us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 5.9605e-10\n",
+ "Epoch 725/1024\n",
+ "90/90 [==============================] - 0s 972us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 5.9605e-10\n",
+ "Epoch 726/1024\n",
+ "90/90 [==============================] - 0s 916us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 5.9605e-10\n",
+ "Epoch 727/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 5.9605e-10\n",
+ "Epoch 728/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 5.9605e-10\n",
+ "Epoch 729/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0029 - val_loss: 0.0025 - lr: 5.9605e-10\n",
+ "Epoch 730/1024\n",
+ "90/90 [==============================] - 0s 887us/step - loss: 0.0026 - val_loss: 0.0025 - lr: 5.9605e-10\n",
+ "Epoch 731/1024\n",
+ "90/90 [==============================] - 0s 949us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 5.9605e-10\n",
+ "Epoch 732/1024\n",
+ "90/90 [==============================] - 0s 957us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 5.9605e-10\n",
+ "Epoch 733/1024\n",
+ "90/90 [==============================] - 0s 923us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 5.9605e-10\n",
+ "Epoch 734/1024\n",
+ "90/90 [==============================] - 0s 926us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 5.9605e-10\n",
+ "Epoch 735/1024\n",
+ "90/90 [==============================] - 0s 941us/step - loss: 0.0026 - val_loss: 0.0025 - lr: 5.9605e-10\n",
+ "Epoch 736/1024\n",
+ "90/90 [==============================] - 0s 920us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 5.9605e-10\n",
+ "Epoch 737/1024\n",
+ "90/90 [==============================] - 0s 949us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 5.9605e-10\n",
+ "Epoch 738/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0025 - lr: 5.9605e-10\n",
+ "Epoch 739/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0028 - val_loss: 0.0025 - lr: 5.9605e-10\n",
+ "Epoch 740/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 5.9605e-10\n",
+ "Epoch 741/1024\n",
+ "90/90 [==============================] - 0s 915us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 5.9605e-10\n",
+ "Epoch 742/1024\n",
+ "90/90 [==============================] - 0s 926us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 5.9605e-10\n",
+ "Epoch 743/1024\n",
+ "90/90 [==============================] - 0s 952us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 5.9605e-10\n",
+ "Epoch 744/1024\n",
+ "90/90 [==============================] - 0s 940us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 5.9605e-10\n",
+ "Epoch 745/1024\n",
+ "90/90 [==============================] - 0s 949us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 5.9605e-10\n",
+ "Epoch 746/1024\n",
+ "90/90 [==============================] - 0s 900us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 2.9802e-10\n",
+ "Epoch 747/1024\n",
+ "90/90 [==============================] - 0s 953us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 2.9802e-10\n",
+ "Epoch 748/1024\n",
+ "90/90 [==============================] - 0s 940us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 2.9802e-10\n",
+ "Epoch 749/1024\n",
+ "90/90 [==============================] - 0s 921us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 2.9802e-10\n",
+ "Epoch 750/1024\n",
+ "90/90 [==============================] - 0s 973us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 2.9802e-10\n",
+ "Epoch 751/1024\n",
+ "90/90 [==============================] - 0s 917us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 2.9802e-10\n",
+ "Epoch 752/1024\n",
+ "90/90 [==============================] - 0s 924us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 2.9802e-10\n",
+ "Epoch 753/1024\n",
+ "90/90 [==============================] - 0s 917us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 2.9802e-10\n",
+ "Epoch 754/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 2.9802e-10\n",
+ "Epoch 755/1024\n",
+ "90/90 [==============================] - 0s 4ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 2.9802e-10\n",
+ "Epoch 756/1024\n",
+ "90/90 [==============================] - 0s 4ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 2.9802e-10\n",
+ "Epoch 757/1024\n",
+ "90/90 [==============================] - 0s 3ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 2.9802e-10\n",
+ "Epoch 758/1024\n",
+ "90/90 [==============================] - 0s 4ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 2.9802e-10\n",
+ "Epoch 759/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0025 - lr: 2.9802e-10\n",
+ "Epoch 760/1024\n",
+ "90/90 [==============================] - 0s 929us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 2.9802e-10\n",
+ "Epoch 761/1024\n",
+ "90/90 [==============================] - 0s 945us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 2.9802e-10\n",
+ "Epoch 762/1024\n",
+ "90/90 [==============================] - 0s 941us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 2.9802e-10\n",
+ "Epoch 763/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0025 - lr: 2.9802e-10\n",
+ "Epoch 764/1024\n",
+ "90/90 [==============================] - 0s 923us/step - loss: 0.0026 - val_loss: 0.0025 - lr: 2.9802e-10\n",
+ "Epoch 765/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 2.9802e-10\n",
+ "Epoch 766/1024\n",
+ "90/90 [==============================] - 0s 924us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 2.9802e-10\n",
+ "Epoch 767/1024\n",
+ "90/90 [==============================] - 0s 948us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 2.9802e-10\n",
+ "Epoch 768/1024\n",
+ "90/90 [==============================] - 0s 944us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 2.9802e-10\n",
+ "Epoch 769/1024\n",
+ "90/90 [==============================] - 0s 948us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 2.9802e-10\n",
+ "Epoch 770/1024\n",
+ "90/90 [==============================] - 0s 925us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 2.9802e-10\n",
+ "Epoch 771/1024\n",
+ "90/90 [==============================] - 0s 986us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.4901e-10\n",
+ "Epoch 772/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.4901e-10\n",
+ "Epoch 773/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.4901e-10\n",
+ "Epoch 774/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.4901e-10\n",
+ "Epoch 775/1024\n",
+ "90/90 [==============================] - 0s 936us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.4901e-10\n",
+ "Epoch 776/1024\n",
+ "90/90 [==============================] - 0s 941us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.4901e-10\n",
+ "Epoch 777/1024\n",
+ "90/90 [==============================] - 0s 895us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.4901e-10\n",
+ "Epoch 778/1024\n",
+ "90/90 [==============================] - 0s 920us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.4901e-10\n",
+ "Epoch 779/1024\n",
+ "90/90 [==============================] - 0s 956us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.4901e-10\n",
+ "Epoch 780/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.4901e-10\n",
+ "Epoch 781/1024\n",
+ "90/90 [==============================] - 0s 892us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.4901e-10\n",
+ "Epoch 782/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.4901e-10\n",
+ "Epoch 783/1024\n",
+ "90/90 [==============================] - 0s 945us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.4901e-10\n",
+ "Epoch 784/1024\n",
+ "90/90 [==============================] - 0s 991us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.4901e-10\n",
+ "Epoch 785/1024\n",
+ "90/90 [==============================] - 0s 903us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.4901e-10\n",
+ "Epoch 786/1024\n",
+ "90/90 [==============================] - 0s 928us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.4901e-10\n",
+ "Epoch 787/1024\n",
+ "90/90 [==============================] - 0s 925us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.4901e-10\n",
+ "Epoch 788/1024\n",
+ "90/90 [==============================] - 0s 942us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.4901e-10\n",
+ "Epoch 789/1024\n",
+ "90/90 [==============================] - 0s 916us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.4901e-10\n",
+ "Epoch 790/1024\n",
+ "90/90 [==============================] - 0s 917us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.4901e-10\n",
+ "Epoch 791/1024\n",
+ "90/90 [==============================] - 0s 952us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.4901e-10\n",
+ "Epoch 792/1024\n",
+ "90/90 [==============================] - 0s 943us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.4901e-10\n",
+ "Epoch 793/1024\n",
+ "90/90 [==============================] - 0s 934us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.4901e-10\n",
+ "Epoch 794/1024\n",
+ "90/90 [==============================] - 0s 937us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.4901e-10\n",
+ "Epoch 795/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.4901e-10\n",
+ "Epoch 796/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0025 - lr: 7.4506e-11\n",
+ "Epoch 797/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 7.4506e-11\n",
+ "Epoch 798/1024\n",
+ "90/90 [==============================] - 0s 903us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 7.4506e-11\n",
+ "Epoch 799/1024\n",
+ "90/90 [==============================] - 0s 933us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 7.4506e-11\n",
+ "Epoch 800/1024\n",
+ "90/90 [==============================] - 0s 911us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 7.4506e-11\n",
+ "Epoch 801/1024\n",
+ "90/90 [==============================] - 0s 954us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 7.4506e-11\n",
+ "Epoch 802/1024\n",
+ "90/90 [==============================] - 0s 941us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 7.4506e-11\n",
+ "Epoch 803/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 7.4506e-11\n",
+ "Epoch 804/1024\n",
+ "90/90 [==============================] - 0s 927us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 7.4506e-11\n",
+ "Epoch 805/1024\n",
+ "90/90 [==============================] - 0s 908us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 7.4506e-11\n",
+ "Epoch 806/1024\n",
+ "90/90 [==============================] - 0s 942us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 7.4506e-11\n",
+ "Epoch 807/1024\n",
+ "90/90 [==============================] - 0s 922us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 7.4506e-11\n",
+ "Epoch 808/1024\n",
+ "90/90 [==============================] - 0s 943us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 7.4506e-11\n",
+ "Epoch 809/1024\n",
+ "90/90 [==============================] - 0s 903us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 7.4506e-11\n",
+ "Epoch 810/1024\n",
+ "90/90 [==============================] - 0s 977us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 7.4506e-11\n",
+ "Epoch 811/1024\n",
+ "90/90 [==============================] - 0s 956us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 7.4506e-11\n",
+ "Epoch 812/1024\n",
+ "90/90 [==============================] - 0s 932us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 7.4506e-11\n",
+ "Epoch 813/1024\n",
+ "90/90 [==============================] - 0s 925us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 7.4506e-11\n",
+ "Epoch 814/1024\n",
+ "90/90 [==============================] - 0s 925us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 7.4506e-11\n",
+ "Epoch 815/1024\n",
+ "90/90 [==============================] - 0s 956us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 7.4506e-11\n",
+ "Epoch 816/1024\n",
+ "90/90 [==============================] - 0s 908us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 7.4506e-11\n",
+ "Epoch 817/1024\n",
+ "90/90 [==============================] - 0s 928us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 7.4506e-11\n",
+ "Epoch 818/1024\n",
+ "90/90 [==============================] - 0s 917us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 7.4506e-11\n",
+ "Epoch 819/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0025 - lr: 7.4506e-11\n",
+ "Epoch 820/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 7.4506e-11\n",
+ "Epoch 821/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 3.7253e-11\n",
+ "Epoch 822/1024\n",
+ "90/90 [==============================] - 0s 909us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 3.7253e-11\n",
+ "Epoch 823/1024\n",
+ "90/90 [==============================] - 0s 914us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 3.7253e-11\n",
+ "Epoch 824/1024\n",
+ "90/90 [==============================] - 0s 973us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 3.7253e-11\n",
+ "Epoch 825/1024\n",
+ "90/90 [==============================] - 0s 939us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 3.7253e-11\n",
+ "Epoch 826/1024\n",
+ "90/90 [==============================] - 0s 947us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 3.7253e-11\n",
+ "Epoch 827/1024\n",
+ "90/90 [==============================] - 0s 938us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 3.7253e-11\n",
+ "Epoch 828/1024\n",
+ "90/90 [==============================] - 0s 971us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 3.7253e-11\n",
+ "Epoch 829/1024\n",
+ "90/90 [==============================] - 0s 934us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 3.7253e-11\n",
+ "Epoch 830/1024\n",
+ "90/90 [==============================] - 0s 931us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 3.7253e-11\n",
+ "Epoch 831/1024\n",
+ "90/90 [==============================] - 0s 924us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 3.7253e-11\n",
+ "Epoch 832/1024\n",
+ "90/90 [==============================] - 0s 967us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 3.7253e-11\n",
+ "Epoch 833/1024\n",
+ "90/90 [==============================] - 0s 911us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 3.7253e-11\n",
+ "Epoch 834/1024\n",
+ "90/90 [==============================] - 0s 936us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 3.7253e-11\n",
+ "Epoch 835/1024\n",
+ "90/90 [==============================] - 0s 924us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 3.7253e-11\n",
+ "Epoch 836/1024\n",
+ "90/90 [==============================] - 0s 950us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 3.7253e-11\n",
+ "Epoch 837/1024\n",
+ "90/90 [==============================] - 0s 928us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 3.7253e-11\n",
+ "Epoch 838/1024\n",
+ "90/90 [==============================] - 0s 918us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 3.7253e-11\n",
+ "Epoch 839/1024\n",
+ "90/90 [==============================] - 0s 920us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 3.7253e-11\n",
+ "Epoch 840/1024\n",
+ "90/90 [==============================] - 0s 926us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 3.7253e-11\n",
+ "Epoch 841/1024\n",
+ "90/90 [==============================] - 0s 928us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 3.7253e-11\n",
+ "Epoch 842/1024\n",
+ "90/90 [==============================] - 0s 912us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 3.7253e-11\n",
+ "Epoch 843/1024\n",
+ "90/90 [==============================] - 0s 923us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 3.7253e-11\n",
+ "Epoch 844/1024\n",
+ "90/90 [==============================] - 0s 920us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 3.7253e-11\n",
+ "Epoch 845/1024\n",
+ "90/90 [==============================] - 0s 925us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 3.7253e-11\n",
+ "Epoch 846/1024\n",
+ "90/90 [==============================] - 0s 949us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.8626e-11\n",
+ "Epoch 847/1024\n",
+ "90/90 [==============================] - 0s 938us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.8626e-11\n",
+ "Epoch 848/1024\n",
+ "90/90 [==============================] - 0s 919us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.8626e-11\n",
+ "Epoch 849/1024\n",
+ "90/90 [==============================] - 0s 918us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.8626e-11\n",
+ "Epoch 850/1024\n",
+ "90/90 [==============================] - 0s 949us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.8626e-11\n",
+ "Epoch 851/1024\n",
+ "90/90 [==============================] - 0s 924us/step - loss: 0.0026 - val_loss: 0.0025 - lr: 1.8626e-11\n",
+ "Epoch 852/1024\n",
+ "90/90 [==============================] - 0s 925us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.8626e-11\n",
+ "Epoch 853/1024\n",
+ "90/90 [==============================] - 0s 916us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.8626e-11\n",
+ "Epoch 854/1024\n",
+ "90/90 [==============================] - 0s 922us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.8626e-11\n",
+ "Epoch 855/1024\n",
+ "90/90 [==============================] - 0s 938us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.8626e-11\n",
+ "Epoch 856/1024\n",
+ "90/90 [==============================] - 0s 929us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.8626e-11\n",
+ "Epoch 857/1024\n",
+ "90/90 [==============================] - 0s 922us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.8626e-11\n",
+ "Epoch 858/1024\n",
+ "90/90 [==============================] - 0s 945us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.8626e-11\n",
+ "Epoch 859/1024\n",
+ "90/90 [==============================] - 0s 938us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.8626e-11\n",
+ "Epoch 860/1024\n",
+ "90/90 [==============================] - 0s 922us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 1.8626e-11\n",
+ "Epoch 861/1024\n",
+ "90/90 [==============================] - 0s 939us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.8626e-11\n",
+ "Epoch 862/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.8626e-11\n",
+ "Epoch 863/1024\n",
+ "90/90 [==============================] - 0s 969us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.8626e-11\n",
+ "Epoch 864/1024\n",
+ "90/90 [==============================] - 0s 944us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.8626e-11\n",
+ "Epoch 865/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.8626e-11\n",
+ "Epoch 866/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0029 - val_loss: 0.0025 - lr: 1.8626e-11\n",
+ "Epoch 867/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.8626e-11\n",
+ "Epoch 868/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.8626e-11\n",
+ "Epoch 869/1024\n",
+ "90/90 [==============================] - 0s 908us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.8626e-11\n",
+ "Epoch 870/1024\n",
+ "90/90 [==============================] - 0s 953us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.8626e-11\n",
+ "Epoch 871/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0025 - lr: 9.3132e-12\n",
+ "Epoch 872/1024\n",
+ "90/90 [==============================] - 0s 916us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 9.3132e-12\n",
+ "Epoch 873/1024\n",
+ "90/90 [==============================] - 0s 916us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 9.3132e-12\n",
+ "Epoch 874/1024\n",
+ "90/90 [==============================] - 0s 930us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 9.3132e-12\n",
+ "Epoch 875/1024\n",
+ "90/90 [==============================] - 0s 915us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 9.3132e-12\n",
+ "Epoch 876/1024\n",
+ "90/90 [==============================] - 0s 943us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 9.3132e-12\n",
+ "Epoch 877/1024\n",
+ "90/90 [==============================] - 0s 923us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 9.3132e-12\n",
+ "Epoch 878/1024\n",
+ "90/90 [==============================] - 0s 957us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 9.3132e-12\n",
+ "Epoch 879/1024\n",
+ "90/90 [==============================] - 0s 912us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 9.3132e-12\n",
+ "Epoch 880/1024\n",
+ "90/90 [==============================] - 0s 907us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 9.3132e-12\n",
+ "Epoch 881/1024\n",
+ "90/90 [==============================] - 0s 935us/step - loss: 0.0026 - val_loss: 0.0025 - lr: 9.3132e-12\n",
+ "Epoch 882/1024\n",
+ "90/90 [==============================] - 0s 941us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 9.3132e-12\n",
+ "Epoch 883/1024\n",
+ "90/90 [==============================] - 0s 931us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 9.3132e-12\n",
+ "Epoch 884/1024\n",
+ "90/90 [==============================] - 0s 930us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 9.3132e-12\n",
+ "Epoch 885/1024\n",
+ "90/90 [==============================] - 0s 904us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 9.3132e-12\n",
+ "Epoch 886/1024\n",
+ "90/90 [==============================] - 0s 921us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 9.3132e-12\n",
+ "Epoch 887/1024\n",
+ "90/90 [==============================] - 0s 919us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 9.3132e-12\n",
+ "Epoch 888/1024\n",
+ "90/90 [==============================] - 0s 968us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 9.3132e-12\n",
+ "Epoch 889/1024\n",
+ "90/90 [==============================] - 0s 931us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 9.3132e-12\n",
+ "Epoch 890/1024\n",
+ "90/90 [==============================] - 0s 940us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 9.3132e-12\n",
+ "Epoch 891/1024\n",
+ "90/90 [==============================] - 0s 967us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 9.3132e-12\n",
+ "Epoch 892/1024\n",
+ "90/90 [==============================] - 0s 949us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 9.3132e-12\n",
+ "Epoch 893/1024\n",
+ "90/90 [==============================] - 0s 934us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 9.3132e-12\n",
+ "Epoch 894/1024\n",
+ "90/90 [==============================] - 0s 930us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 9.3132e-12\n",
+ "Epoch 895/1024\n",
+ "90/90 [==============================] - 0s 924us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 9.3132e-12\n",
+ "Epoch 896/1024\n",
+ "90/90 [==============================] - 0s 915us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 4.6566e-12\n",
+ "Epoch 897/1024\n",
+ "90/90 [==============================] - 0s 932us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 4.6566e-12\n",
+ "Epoch 898/1024\n",
+ "90/90 [==============================] - 0s 955us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 4.6566e-12\n",
+ "Epoch 899/1024\n",
+ "90/90 [==============================] - 0s 937us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 4.6566e-12\n",
+ "Epoch 900/1024\n",
+ "90/90 [==============================] - 0s 954us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 4.6566e-12\n",
+ "Epoch 901/1024\n",
+ "90/90 [==============================] - 0s 937us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 4.6566e-12\n",
+ "Epoch 902/1024\n",
+ "90/90 [==============================] - 0s 3ms/step - loss: 0.0028 - val_loss: 0.0025 - lr: 4.6566e-12\n",
+ "Epoch 903/1024\n",
+ "90/90 [==============================] - 0s 3ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 4.6566e-12\n",
+ "Epoch 904/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0028 - val_loss: 0.0025 - lr: 4.6566e-12\n",
+ "Epoch 905/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0026 - val_loss: 0.0025 - lr: 4.6566e-12\n",
+ "Epoch 906/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0025 - lr: 4.6566e-12\n",
+ "Epoch 907/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 4.6566e-12\n",
+ "Epoch 908/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0028 - val_loss: 0.0025 - lr: 4.6566e-12\n",
+ "Epoch 909/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 4.6566e-12\n",
+ "Epoch 910/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0029 - val_loss: 0.0025 - lr: 4.6566e-12\n",
+ "Epoch 911/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 4.6566e-12\n",
+ "Epoch 912/1024\n",
+ "90/90 [==============================] - 0s 932us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 4.6566e-12\n",
+ "Epoch 913/1024\n",
+ "90/90 [==============================] - 0s 919us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 4.6566e-12\n",
+ "Epoch 914/1024\n",
+ "90/90 [==============================] - 0s 950us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 4.6566e-12\n",
+ "Epoch 915/1024\n",
+ "90/90 [==============================] - 0s 936us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 4.6566e-12\n",
+ "Epoch 916/1024\n",
+ "90/90 [==============================] - 0s 990us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 4.6566e-12\n",
+ "Epoch 917/1024\n",
+ "90/90 [==============================] - 0s 921us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 4.6566e-12\n",
+ "Epoch 918/1024\n",
+ "90/90 [==============================] - 0s 930us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 4.6566e-12\n",
+ "Epoch 919/1024\n",
+ "90/90 [==============================] - 0s 967us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 4.6566e-12\n",
+ "Epoch 920/1024\n",
+ "90/90 [==============================] - 0s 925us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 4.6566e-12\n",
+ "Epoch 921/1024\n",
+ "90/90 [==============================] - 0s 937us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 2.3283e-12\n",
+ "Epoch 922/1024\n",
+ "90/90 [==============================] - 0s 939us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 2.3283e-12\n",
+ "Epoch 923/1024\n",
+ "90/90 [==============================] - 0s 923us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 2.3283e-12\n",
+ "Epoch 924/1024\n",
+ "90/90 [==============================] - 0s 952us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 2.3283e-12\n",
+ "Epoch 925/1024\n",
+ "90/90 [==============================] - 0s 881us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 2.3283e-12\n",
+ "Epoch 926/1024\n",
+ "90/90 [==============================] - 0s 906us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 2.3283e-12\n",
+ "Epoch 927/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0028 - val_loss: 0.0025 - lr: 2.3283e-12\n",
+ "Epoch 928/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 2.3283e-12\n",
+ "Epoch 929/1024\n",
+ "90/90 [==============================] - 0s 989us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 2.3283e-12\n",
+ "Epoch 930/1024\n",
+ "90/90 [==============================] - 0s 900us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 2.3283e-12\n",
+ "Epoch 931/1024\n",
+ "90/90 [==============================] - 0s 902us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 2.3283e-12\n",
+ "Epoch 932/1024\n",
+ "90/90 [==============================] - 0s 932us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 2.3283e-12\n",
+ "Epoch 933/1024\n",
+ "90/90 [==============================] - 0s 885us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 2.3283e-12\n",
+ "Epoch 934/1024\n",
+ "90/90 [==============================] - 0s 990us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 2.3283e-12\n",
+ "Epoch 935/1024\n",
+ "90/90 [==============================] - 0s 930us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 2.3283e-12\n",
+ "Epoch 936/1024\n",
+ "90/90 [==============================] - 0s 987us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 2.3283e-12\n",
+ "Epoch 937/1024\n",
+ "90/90 [==============================] - 0s 911us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 2.3283e-12\n",
+ "Epoch 938/1024\n",
+ "90/90 [==============================] - 0s 889us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 2.3283e-12\n",
+ "Epoch 939/1024\n",
+ "90/90 [==============================] - 0s 896us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 2.3283e-12\n",
+ "Epoch 940/1024\n",
+ "90/90 [==============================] - 0s 877us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 2.3283e-12\n",
+ "Epoch 941/1024\n",
+ "90/90 [==============================] - 0s 858us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 2.3283e-12\n",
+ "Epoch 942/1024\n",
+ "90/90 [==============================] - 0s 852us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 2.3283e-12\n",
+ "Epoch 943/1024\n",
+ "90/90 [==============================] - 0s 899us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 2.3283e-12\n",
+ "Epoch 944/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 2.3283e-12\n",
+ "Epoch 945/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0028 - val_loss: 0.0025 - lr: 2.3283e-12\n",
+ "Epoch 946/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.1642e-12\n",
+ "Epoch 947/1024\n",
+ "90/90 [==============================] - 0s 916us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.1642e-12\n",
+ "Epoch 948/1024\n",
+ "90/90 [==============================] - 0s 880us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.1642e-12\n",
+ "Epoch 949/1024\n",
+ "90/90 [==============================] - 0s 884us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.1642e-12\n",
+ "Epoch 950/1024\n",
+ "90/90 [==============================] - 0s 915us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.1642e-12\n",
+ "Epoch 951/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.1642e-12\n",
+ "Epoch 952/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.1642e-12\n",
+ "Epoch 953/1024\n",
+ "90/90 [==============================] - 0s 982us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.1642e-12\n",
+ "Epoch 954/1024\n",
+ "90/90 [==============================] - 0s 941us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.1642e-12\n",
+ "Epoch 955/1024\n",
+ "90/90 [==============================] - 0s 994us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.1642e-12\n",
+ "Epoch 956/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.1642e-12\n",
+ "Epoch 957/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.1642e-12\n",
+ "Epoch 958/1024\n",
+ "90/90 [==============================] - 0s 973us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 1.1642e-12\n",
+ "Epoch 959/1024\n",
+ "90/90 [==============================] - 0s 943us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.1642e-12\n",
+ "Epoch 960/1024\n",
+ "90/90 [==============================] - 0s 942us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.1642e-12\n",
+ "Epoch 961/1024\n",
+ "90/90 [==============================] - 0s 965us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.1642e-12\n",
+ "Epoch 962/1024\n",
+ "90/90 [==============================] - 0s 957us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.1642e-12\n",
+ "Epoch 963/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.1642e-12\n",
+ "Epoch 964/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.1642e-12\n",
+ "Epoch 965/1024\n",
+ "90/90 [==============================] - 0s 973us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.1642e-12\n",
+ "Epoch 966/1024\n",
+ "90/90 [==============================] - 0s 917us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.1642e-12\n",
+ "Epoch 967/1024\n",
+ "90/90 [==============================] - 0s 884us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.1642e-12\n",
+ "Epoch 968/1024\n",
+ "90/90 [==============================] - 0s 881us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 1.1642e-12\n",
+ "Epoch 969/1024\n",
+ "90/90 [==============================] - 0s 887us/step - loss: 0.0026 - val_loss: 0.0025 - lr: 1.1642e-12\n",
+ "Epoch 970/1024\n",
+ "90/90 [==============================] - 0s 918us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.1642e-12\n",
+ "Epoch 971/1024\n",
+ "90/90 [==============================] - 0s 858us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 5.8208e-13\n",
+ "Epoch 972/1024\n",
+ "90/90 [==============================] - 0s 939us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 5.8208e-13\n",
+ "Epoch 973/1024\n",
+ "90/90 [==============================] - 0s 884us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 5.8208e-13\n",
+ "Epoch 974/1024\n",
+ "90/90 [==============================] - 0s 879us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 5.8208e-13\n",
+ "Epoch 975/1024\n",
+ "90/90 [==============================] - 0s 852us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 5.8208e-13\n",
+ "Epoch 976/1024\n",
+ "90/90 [==============================] - 0s 976us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 5.8208e-13\n",
+ "Epoch 977/1024\n",
+ "90/90 [==============================] - 0s 868us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 5.8208e-13\n",
+ "Epoch 978/1024\n",
+ "90/90 [==============================] - 0s 890us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 5.8208e-13\n",
+ "Epoch 979/1024\n",
+ "90/90 [==============================] - 0s 860us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 5.8208e-13\n",
+ "Epoch 980/1024\n",
+ "90/90 [==============================] - 0s 895us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 5.8208e-13\n",
+ "Epoch 981/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 5.8208e-13\n",
+ "Epoch 982/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0025 - lr: 5.8208e-13\n",
+ "Epoch 983/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 5.8208e-13\n",
+ "Epoch 984/1024\n",
+ "90/90 [==============================] - 0s 919us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 5.8208e-13\n",
+ "Epoch 985/1024\n",
+ "90/90 [==============================] - 0s 909us/step - loss: 0.0026 - val_loss: 0.0025 - lr: 5.8208e-13\n",
+ "Epoch 986/1024\n",
+ "90/90 [==============================] - 0s 882us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 5.8208e-13\n",
+ "Epoch 987/1024\n",
+ "90/90 [==============================] - 0s 873us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 5.8208e-13\n",
+ "Epoch 988/1024\n",
+ "90/90 [==============================] - 0s 923us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 5.8208e-13\n",
+ "Epoch 989/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0029 - val_loss: 0.0025 - lr: 5.8208e-13\n",
+ "Epoch 990/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0025 - lr: 5.8208e-13\n",
+ "Epoch 991/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 5.8208e-13\n",
+ "Epoch 992/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 5.8208e-13\n",
+ "Epoch 993/1024\n",
+ "90/90 [==============================] - 0s 952us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 5.8208e-13\n",
+ "Epoch 994/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0025 - lr: 5.8208e-13\n",
+ "Epoch 995/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0025 - lr: 5.8208e-13\n",
+ "Epoch 996/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0025 - lr: 2.9104e-13\n",
+ "Epoch 997/1024\n",
+ "90/90 [==============================] - 0s 899us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 2.9104e-13\n",
+ "Epoch 998/1024\n",
+ "90/90 [==============================] - 0s 930us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 2.9104e-13\n",
+ "Epoch 999/1024\n",
+ "90/90 [==============================] - 0s 998us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 2.9104e-13\n",
+ "Epoch 1000/1024\n",
+ "90/90 [==============================] - 0s 956us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 2.9104e-13\n",
+ "Epoch 1001/1024\n",
+ "90/90 [==============================] - 0s 916us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 2.9104e-13\n",
+ "Epoch 1002/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0025 - lr: 2.9104e-13\n",
+ "Epoch 1003/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0025 - lr: 2.9104e-13\n",
+ "Epoch 1004/1024\n",
+ "90/90 [==============================] - 0s 914us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 2.9104e-13\n",
+ "Epoch 1005/1024\n",
+ "90/90 [==============================] - 0s 934us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 2.9104e-13\n",
+ "Epoch 1006/1024\n",
+ "90/90 [==============================] - 0s 932us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 2.9104e-13\n",
+ "Epoch 1007/1024\n",
+ "90/90 [==============================] - 0s 992us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 2.9104e-13\n",
+ "Epoch 1008/1024\n",
+ "90/90 [==============================] - 0s 942us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 2.9104e-13\n",
+ "Epoch 1009/1024\n",
+ "90/90 [==============================] - 0s 963us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 2.9104e-13\n",
+ "Epoch 1010/1024\n",
+ "90/90 [==============================] - 0s 916us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 2.9104e-13\n",
+ "Epoch 1011/1024\n",
+ "90/90 [==============================] - 0s 966us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 2.9104e-13\n",
+ "Epoch 1012/1024\n",
+ "90/90 [==============================] - 0s 908us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 2.9104e-13\n",
+ "Epoch 1013/1024\n",
+ "90/90 [==============================] - 0s 865us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 2.9104e-13\n",
+ "Epoch 1014/1024\n",
+ "90/90 [==============================] - 0s 876us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 2.9104e-13\n",
+ "Epoch 1015/1024\n",
+ "90/90 [==============================] - 0s 895us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 2.9104e-13\n",
+ "Epoch 1016/1024\n",
+ "90/90 [==============================] - 0s 885us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 2.9104e-13\n",
+ "Epoch 1017/1024\n",
+ "90/90 [==============================] - 0s 866us/step - loss: 0.0029 - val_loss: 0.0025 - lr: 2.9104e-13\n",
+ "Epoch 1018/1024\n",
+ "90/90 [==============================] - 0s 937us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 2.9104e-13\n",
+ "Epoch 1019/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0028 - val_loss: 0.0025 - lr: 2.9104e-13\n",
+ "Epoch 1020/1024\n",
+ "90/90 [==============================] - 0s 2ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 2.9104e-13\n",
+ "Epoch 1021/1024\n",
+ "90/90 [==============================] - 0s 1ms/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.4552e-13\n",
+ "Epoch 1022/1024\n",
+ "90/90 [==============================] - 0s 931us/step - loss: 0.0028 - val_loss: 0.0025 - lr: 1.4552e-13\n",
+ "Epoch 1023/1024\n",
+ "90/90 [==============================] - 0s 901us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.4552e-13\n",
+ "Epoch 1024/1024\n",
+ "90/90 [==============================] - 0s 877us/step - loss: 0.0027 - val_loss: 0.0025 - lr: 1.4552e-13\n"
+ ]
+ }
+ ],
+ "source": [
+ "epoch, batch_size = 1024, 64\n",
+ "history_shortcut_11 = shortcut11.fit(x_train, y_train, x_val, y_val, epoch=epoch, batch_size=batch_size)\n",
+ "history_shortcut_5 = shortcut5.fit(x_train, y_train, x_val, y_val, epoch=epoch, batch_size=batch_size)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 21,
+ "metadata": {
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ },
+ "outputs": [
+ {
+ "data": {
+ "text/plain": "",
+ "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYgAAAEKCAYAAAAIO8L1AAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjUuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/YYfK9AAAACXBIWXMAAAsTAAALEwEAmpwYAABKtElEQVR4nO2dd3xUVfbAv5cACRAQCIQWhFCChEAwRGLWSNBVrKvi6qqwdsHeV8WCsmJZVyyouILY1oaKuPqzoK4rIIiRGIlSDF0IJUBCSSip9/fHmWEmyaRPSSbn+/m8z7xy33vnzpu5591zzznXWGtRFEVRlIq0CLQAiqIoSuNEFYSiKIriEVUQiqIoikdUQSiKoigeUQWhKIqieEQVhKIoiuIRnyoIY8zpxpgsY8w6Y8wkD8fPNcb8YoxZboxJN8akuB3bZIz51XnMl3IqiqIolTG+ioMwxoQAa4BTgWxgGXCJtXaVW5lw4IC11hpjhgHvW2uPcRzbBCRaa3f7REBFURSlWnzZgxgJrLPWbrDWFgFzgHPdC1hrC6xLQ7UDNGpPURSlkdDSh9fuBWxx284GkioWMsaMBR4HIoGz3A5Z4CtjjAVmWmtnebqJMWYiMBFg4MCBI2bNkmL9+vWjffv2ZGZmAhAREcGQIUNYtGgRAC1btiQlJYWMjAz2799Pj08/Jeqaa/hyVThZWXtITs5l4MCBhIaGsmLFCgAiIyOJiYlh8eLFAISGhpKcnEx6ejoFBQUAJCUlkZ2dzdatWwEYNGgQISEhrFolHafu3bsTHR3N0qVLAWjTpg1JSUmkpaVx6NAhAJKTk9m4cSM7duwAIDY2ltLSUrKysuSL7dWLqKgo0tLSAAgPDycxMZGlS5dSWFgIQEpKCmvWrGHnzp0AxMXFUVhYyNq1awHo3bs33bp1Iz1drHcdOnQgISGBxYsXU1JSAsCoUaNYuXIlubm5AMTHx5Ofn8+GDRsA6Nu3L507dyYjIwOATp06ER8fz8KFC7HWYowhNTWVzMxM9uzZA0BCQgJ5eXls2rSpXs8JIDExkZycHLZskZ+Xt59TXl4r/vznEwDo2LGIjz76vtxzuvXWAaSlRRz5DU6ZspLU1F1+eU4D586l14wZAGy+5BI2TJzYbJ8T6P/JG89p9OjRhqqw1vpkAS4EZrttXwo8X035UcB/3bZ7Oj4jgUxgVE33HDFihK03S5da++mndu9ea6dNq/9llKZPTo61IEuXLpWPjx7tOg7WzprlR+Geesp149tu8+ONlSCmyjbVlyambKC323YUsK2qwtbaRUB/Y0wXx/Y2x+dO4CPEZOU7hg6FX37hqKNg3z6f3klp5Bi39ylPQ3QHD5bfzsvzrTzlCA11rTvebhXFV/hSQSwDBhpjoo0xrYGLgU/cCxhjBhgjf0djTALQGsg1xrQzxrR37G8HjAFW+FBWaNcOdu6E4mKKiyE/36d3UxoxLdz+FbVREL/+6lt5yqEKQvEjPlMQ1toS4CbgS2A14qG00hhznTHmOkexPwMrjDHLgRnARdZaC3QDFhtjMoEfgc+stfN9JesRJk6EF1/kllvg2Wd9fjelkeLegygrq3y8ooL45hvPisQnqIJQ/IgvB6mx1n4OfF5h30tu608AT3g4bwMQ70vZPDJ4MLz1Fj26FFNY2ApryzcWSvOgriamHTvgt9/k5+NzVEEofkQjqStywgmwbBkxMeBwclCaGXU1MQH8+9++k6ccqiAUP6IKoiL9+8OmTZxzDrzwQqCFUQJBXU1MAM89Bw7PTN+iCkLxI6ogKnL00bB5Mx07QpcunhsIJbipzsRUXAwOt3YAOnWSz4MHYdUqfI8qCMWPqIKoSJs24AiwiYqC7OwAy6P4nepMTO7ebeHhcMoprm1VEEqwoQrCE45uw4ABsGZNgGVR/E51JqYffnCt9+sHsbGubVUQSrChCsITYWFw4AAjR8KcOYEWRvE31ZmY/vtf1/opp5T3XFIFoQQbqiA8MXYsPPggbdtKA6CR1c2L6kxMX3/tWj/llPI9iM8+g++/961sqiAUf6IKwhOxsTB8OHz/PbGxsHp1oAVS/ElVJqa8PHDkmaNVKxg1CmJiyp87fbqPhVMFofgRVRBVMW4czJlDXBw4EiMqzYSqTEx797rWe/aU7Czu7TX4YcxKFYTiR1RBVEVICHTtSu9uRWzZUnNxJXioysRUWupab+mWg+AJt1wA3br5Ti5AFYTiV1RBVEf//rBxo6bbaGZUZWJyj38ICXGtH3eca/3wYd/JBYhty0lxsY9vpjR3VEFUx+DBsGwZ3bpJoleleVDVC0FVPYg2bVzrjhAa3+F+Y1UQio9RBVEdw4fDsmUMGeInF0alUVBRQTjNTO4Kwr0HERbmWvdrD6KkxI9pZJXmiCqI6jAGoqIY0nv/Ee8VpXngycxUlYLwaw+iRYvygyTuQimKl1EFURPHHEPknixycgItiOJPPHkyVTUG4dceBOg4hOI3VEHUxODB8Msv2pNvZnhSELUZg1AFoQQTqiBqYsAAWLWKzp3L+8ErwY0nV9fajEH43MQE5bWTe7dGUbyMKoja0Ls3Q7vv0gmEmhGexiCqMjFpD0IJVlRB1Ibjjye24EdVEM2ImkxM7gqiZUtXj6OkxA8v9aogFD+hCqI2JCTQY81CNmwItCCKv6jJxORu5THGz70IVRCKn1AFURtat6ZF1whCCjSta3OhLm6uEMBYCFUQig9RBVFbRo6k7+50/T82E+ri5gp+HqjWQWrFT6iCqC2JicQeWKZTkDYT6mJigvImpn/8w3dyAdqDUPyGKoja0r49XVrvY+PGQAui+IO6mph273atL1vmO7kAVRCK31AFUQc6RB1FdtaBQIuh+IG6eDEB3HWXa13HIJRgQRVEHegwrC/rF2zRqOpmgCcTU3VjEFdc4Vr3eVoWVRCKn1AFUQdCoo9mZI/N6u7aDKjJxFRxDKJrV9f67t0+zqGng9SKn1AFURd69ya23Wado7oZUFcTU6tW0LmzrJeVQW6uD4XTHoTiJ1RB1IUePehZlq0KohlQl1xMTiIjXes+nWBKFYTiJ1RB1IWWLQktO8T+vWU1l1WaNDXlYqpoYgLo0sW1rj0IJRhQBVFX4uPpkpulA9VBTl1NTAAdOrjW8/N9IxdQeVY5RfERqiDqyrHHciw/s25doAVRfEl9TEzuCmL/ft/IBei81IrfUAVRV2JiOMZkkZERaEEUX1KXdN9O2rd3rfutB6EKQvEhqiDqSkgIXdoX8ttqtTEFM+4KwNlzqM7NFfzYg1AFofgJVRD1oMWIY4nY9mugxVB8iLsC8KQgtAehNAd8qiCMMacbY7KMMeuMMZM8HD/XGPOLMWa5MSbdGJPi2N/bGPOtMWa1MWalMeZWX8pZZ1JSODp7iQ5UBzGeYtEazRiEDlIrfsJnCsIYEwLMAM4AYoFLjDGxFYp9A8Rba4cDVwGzHftLgDuttYOB44EbPZwbOHr1Isps08yuQYwnBdFoxiB0kFrxE77sQYwE1llrN1hri4A5wLnuBay1BdYeeQ9vB1jH/u3W2gzHej6wGujlQ1nrTM+ekJYWaCkUX+GuADz1IAI6BuGeW9ynARdKc8fDz9xr9AK2uG1nA0kVCxljxgKPA5HAWR6O9wWOBTw2x8aYicBEgJ49e7JgwQIA+vXrR/v27cnMzAQgIiKCIUOGsGjRIgBatmxJSkoKGRkZ7Hf8mxMTE8nJyWHLFhF74MCBhIaGsmLFCgAiIyOJiYlh8eLFdG9TwNIPl3HBBceRnp5OQUEBAElJSWRnZ7N161YABg0aREhICKtWrQKge/fuREdHs3TpUgDatGlDUlISaWlpHHLMNJOcnMzGjRvZsWMHALGxsZSWlpLlmBS7V69eREVFkebQUOHh4SQmJrJ06VIKCwsBSElJYc2aNex0hPTGxcVRWFjI2rVrAejduzfdunUjPT0dgA4dOpCQkMDixYspcbSIo0aNYuXKleQ6GqH4+Hjy8/PZ4EhG1bdvXzp37kyGw6WrU6dOxMfHs3DhQqy1GGNITU0lMzOTPXv2AJCQkEBeXh6bNm3yy3MCCA0NJTk5uU7PqbT0eEBmAUpLS+fQoVJKS10/382bN1JY2LPccyotHQ50BOD33/NYuzbXN89p2LAjcuR99RUdp05tts9J/08Nf06jR4+mSqy1PlmAC4HZbtuXAs9XU34U8N8K+8KBn4Dza3PPESNGWL/x88/27bEf+O9+il9JTLRWIiCs/fFH2Tdpkmvfo49WPmfBAtfxE0/0oXAbN7pudNRRPryR0kyosk31pYkpG+jtth0FbKuqsLV2EdDfGNMFwBjTCvgQeNtaO8+HctaPoUPpd+BX3+bcUQJGTWMQnkxMnlxjfUKfPq71fftckXyK4mV8qSCWAQONMdHGmNbAxcAn7gWMMQOMkZAkY0wC0BrIdex7BVhtrX3ahzLWn5AQBvQr46uvAi2I4gtqGoPwNEjtyTXWJxijA9WKX/DZGIS1tsQYcxPwJRACvGqtXWmMuc5x/CXgz8Blxphi4BBwkbXWOtxdLwV+NcYsd1zyPmvt576Stz5ERLVh/a8HgbaBFkXxMvVxc/VbDwLE1dUpWHExtG7t4xsqzRFfDlLjaNA/r7DvJbf1J4AnPJy3GDAV9zc2TNJIer+5DEgNtCiKl6lPoJzfFYRjEFZ7EIqv0EjqhpCUxNHbftBYpSCkUY9BgEZTK35BFURDaN+eru0O4PAwU4KI+oxBqIJQgg1VEA3k6KMNs2erI0mw0dAxCJ/3KlVBKH5AFUQD6dStNaeOKuSuuwItieJNPI1B1GRi8psXE5QflC4q8vHNlOaKKoiGMmAAfwxbQvfusHcvOAIclSZOk/BicqI9CMVHqIJoKCefDG+/TVLcAS66CM4/H7Zsqfk0pXHjyVzkyLoAQGho9eeoglCCAVUQDaVrV7j3Xk6YP5kPPoA5c+Cdd1weiErTxFMP4uBB17527SqfowpCCTZUQXiDAQNo0TWCDq0O0a04m4gImFRp9gulKeFpPMFdQbT1EBupg9RKsKEKwlsMHQpffQXjx3NN/DKSkuCttwItlFJfaupBeFIQfh2kVgWh+AFVEN7iD3+ADRvggw9gxgzGrZ/K+l8OBFoqpZ7UR0H41cSkXkyKH1AF4S26dIHbb4fISHj1VTjlFE5f/ACOtPZKE8OTuahRKQjtQSh+QBWEL2jRApKTGTy6G0/euInnH9wVaImUOtLQMQhVEEowoArCh3R46HZu7/4uKfNu9+0cxYrXqWhi2rcPcnJc+3SQWmkOqILwJaGhdHziXqLOT+LjJ9cEWhqlDrgriP374Zhjyh93nxba0znag1CCAVUQfqDrfROI/vyFQIuh1AH33sDrr4NjOmNA2mb39tnTOaoglGBAFYQ/CAuDocNY89aPgZZEqSXuvYHs7PLHPJmXQIaenDgnjfYZ7l5MqiAUH6EKwk+MfPEKtv/zTcpKNe1rU8BTMj4nVSkI8GMvwr0HoW6uio9QBeEnWrVpSeez/8APL/8aaFGUWlCdgvA0/uDEbwPVamJS/IAqCD8y+PbTyf9gfqDFUGqBp2ytTqqb/jkgPQhVEIqPUAXhR1p27UT4oV3syVMzU2Onuh5EeHjtzlMFoTR1VEH4mSH3nsNPJ97GskkfcugQrF0baIkUT1SnINq3r/qY9iCUYEIVhJ/p+KcTOWXldAo+X8QLl/3Iu49v0ulKGyH17UH4TUG427kmT4azz4bNm314Q6U5ogoiQBx19QXc8seVXJ7/PNdNLGP6uDQ2bAi0VIqT6sYgPM0F4ek8nw5Sd+tWfvuzz+D66yuX270bTjsNzjwT8vJ8KJASjKiCCBAJt55I6HVX0ufSVB7ZcyMTzcu8/WAWZWWBlkyB6nsQd95Z9TG/9SCOPbbyvs8/r7zvllskDf0XX8Bjj/lQICUYUQURaM45h65z/0Wb117kqtKXefyMhWS+kh5oqZo9ERGV9yUmwptvymdV+G2Qetiw6rs5Tt5917X+xhu+k0cJSlRBNBZat6bXlWM4P/Qz9s14y8dhuEpNnHqqzCbrzpdfwl//Wv15futBtGkDZ51Vt3M6dfKNLErQogqiMTFmDIM/+SetzjuLn4Zewe8PzAq0RM2Wtm1hxIjy+zp3rvk8v+ZjGjeubuVrUwFFcUMVRCMk+cFTSVg2k9Vfb6Vo2+5Ai9NscZ9X/Npra3eOXxVEVFTVx6yFCy8sv097EEodUQXRSDFtwuj5z9tYfNpUcj9eHGhxmiWpqfDSSzJR4NSptTvHr3NC9OhReZ8zJiImBubOLX+suhBwRfFANb4aSqAZltqJgz88S9qp93PSuSmBFqdZUtuegxO/9iA8KYi9eyE/H9atq3zMfUo8RakF2oNo5LRtZ2gdFsK+PF+3Noo38OukQZ6yBqanw9atnsurglDqiCqIJkDU6XEsfnk1AHv2+KHhUeqNX3sQAFOmlN+eO7dqBXHggM/FUYILVRBNgKP/cjx7P/+emTPhySdh5sxAS6RUhd8VxEMPwUcfubZffRUuucRzWe1BKHVEFUQTwPTtw9jhG+nTu4xNmyAnB7ZskWN79si4pP73Gwd+HaR24imq2skVV7jWtQeh1BGfDlIbY04HpgMhwGxr7T8qHB8P3OPYLACut9ZmOo7dCkwADPCytfZZX8ra2Gl7Riqnf3gto6J6EXLBOJ5/L4a9e2HNGnlT7dkTkpJqDuRSfIvfexAg7q6tWnnO6tq3r2tdFYRSR3ymIIwxIcAM4FQgG1hmjPnEWrvKrdhGINVau8cYcwYwC0gyxsQhymEkUATMN8Z8Zq1tvsmxTz8dkpNp++KL8O+X+duTTwJQViYu7y1aiLUhP1/SUX/0ERx/vGdHF8V3+HWQ2klICPTrB1lZlY9FR7vWtZup1BFfmphGAuustRustUXAHOBc9wLW2u+ttXscmz8AzsifwcAP1tqD1toSYCEw1oeyNg2OOgruvRdGjoR586CsjBYtpH0wBg4dgjPOkHZi4UL4xz9g377yl0hLgxUrAiN+cyAgPQiAG2/0vD8mxjV3hNoilTriSxNTL2CL23Y2kFRN+auBLxzrK4BHjTERwCHgTMBjBjtjzERgIkDPnj1ZsGABAP369aN9+/ZkZmYCEBERwZAhQ1i0aBEALVu2JCUlhYyMDPbv3w9AYmIiOTk5bHEY+AcOHEhoaCgrHC1qZGQkMTExLF4sgWuhoaEkJyeTnp5OQUEBAElJSWRnZ7PV4UkyaNAgQkJCWLVKOk7du3cnOjqapUuXAtCmTRuSkpJIS0vj0KFDACQnJ7Nx40Z27NgBQGxsLKWlpWQ53hB7xcfT57vv2HLDDWy5+GLCw8NJTEwkNTWd2NgyLrlkIM8+2579+zdx1VVh3HzzOuLi4jh8uJBp08ooKmrBzTeXcvzxnUlPl6+1Q4cOJCQksHjxYkocxvNRo0axcuVKcnNzAYiPjyc/P58Njrzkffv2pXPnzmRkZADQqVMn4uPjWbhwIdZajDGkpqaSmZnJnj3yHpCQkEBeXh6bNm0KyufUunUHIAyAH37I4YQTOpCWlgZw5DktXbqUwsJCAFJSUlizZg07d+4EIC4ujsLCQtY6ZpLq3bs33bp1q/k5jR7NgbffpvcHH9DrP//ByZJt20jq2pWW27aJTP/5D22GDGn2z6nc/6lXL6KiovzznBrh/2n06NFUibXWJwtwITLu4Ny+FHi+irInAauBCLd9VwMZwCLgJeCZmu45YsQI26z4/HNrb7rJ2p9+svauu6z98ENrrbXLl1tbWipFpk619uGHrc3KsvaHH6ydO9faoiIprnifJ5+0Vox+1l5ySQAEePNNlwBgbVmZtSNHura/+y4AQimNnCrbVF/2ILKB3m7bUcC2ioWMMcOA2cAZ1tpc535r7SvAK44yjzmup7hzxhnQvz98+y2MHw8ffAA//kj8zTfD2gLo358bbmhJaalYpObPh/ffF4tD9+6wcyeEhUGHDoGuSPBw8smu9Y8/ljl6/Joj7+ijy28bIx4MTrZv96MwSlPHl2MQy4CBxphoY0xr4GLgE/cCxpijgXnApdbaNRWORbqVOR94F6UyMTGSDyI+XvJTx8TA3XfD9Onwyit07iy7r71WBq6d5ug//1n23XADZKvq9RrHHguxsbJ+8CB8913dzi8thX/+U+LfHBaSunHiiXDeeTIv6ltvyT53TwWdBF2pC9V1Lxq6IGMHa4D1wP2OfdcB1znWZwN7gOWOJd3t3O+AVUAm8Mfa3K/ZmZiqYtUqa0tKrH3gAbErVcPBg9ZOmWLta6/5R7TmwPjxLovOG2/U7dx333Wd+8wzDRCiuNi1/thj5c1OzmX69AbcQAkiqmxTja3FxDSOt/kTgJ7IoPEKR2PeqCbITExMtM5BIgXxgb37brjuOhgwQPatWiU5fNzdH4H774dHHhGLhNIwrr0WZjmm8vjXv+Trry1dukBurmvbK/NGbdwIcXGePZjWrnX9NpTmSpX/+mpNTMaYk4wxXwKfAWcAPYBY4AHgV2PM340xasFurLRoIa3Tu++K3SIvD2bPhqeekqyfbqSkwCuvoHNie4F27VzrdfUqbVHhH1lY6AUlER1ddX4WfaFSqqGmMYgzgQnW2uOstROttQ9Ya/9mrT0HiAd+RgLhlMbKgAEwebJMT3nuuXDPPdJd+Ee5oHbOOK2M/v1FdygNo21b17ozeLmsTL7ye+8Fh3chII4DkybB77/LdkUFERYm0582WEn89a/wzTeVb5CX18ALK8FMtV5M1tq7qjlWAvzH2wIpPmLIEJlU2dl69esn3k8rVsio6j//yUlvvMHPHx0G+gZS0iaPu4I4eFAa9xtucL3Ed+4Md90Fu3fDmWfK8WXLPLffIPt//RWGDWugYCefLNopPNy1Tz0UlGqokxeTMeZ4Y8z/jDFLjDEa2dzUcG+5rrxSEjmdfTbs2AHPPw/vv8/otCe8F2ybnV27cOKsLPjb30SBBQEVFcT//lfewnP33fL53/+6egb/+598OuKyKuHsYTSYdu3EluhEFYRSDTWNQXSvsOsO4BzgdOBhXwml+IFWrWQ0NTpaYihiYuCWWwg9NpaVX1cIVykuds1QVpOtw/k6fP31MGMGPPNM5TLLlsk8njk5YoN58UV44gl5Va6qhWxCuI9BzJwJp5ziuZy7IgGZ3rQqi49X2/FevVzr2yqFJinKEWrqQbxkjJlsjAlzbO8FxgEXAfurPEtpskTfdQH7Z7zp2vHzz/Dyy9LD2LkTJk6UHONnnAE33QTPPQcTJkhPobQUPvwQPv0Unn4aHn9cPKbeeUeutW+fnPPbbzIO8q9/ia3lyislidGECfDCC4GpuBdxb/g96bujjpLPig4B119f9TW9qiA6dXKtb90qA9XqnaB4oKYxiPOMMX8CPjXGvAHchiiItsB5PpdO8Ttt+/egrFUoS5Nu4/jLYjDr18lk9127SqNeWipv/089Bb17yytyfj588gksWgSDB0taWacx/cYb4cEH4dFHoaBAIsC6dJFjFWdDGzhQyuzb52pFmyAVewYViYyUz/z82l9zy5aay9Qa9+/2t9/guOPExOfIEKwoR6guSMK5IPM53AzMB06szTmBWDRQznt8M2+v/fiW/9rsbNe+w4etnffWAcnv46SsTILyJk60dt06zxf77TdJBlUbNm+29umnqz5eXGztKadYu2JF7a4XAL780nNcmnPp18/axx+vvsxLL8lX6tw+4wwvCrh9e+Ub9urlxRsoTYz6BcoZY84B7gZKgSmIW+uDSDzEA9ba9b5VX3VDA+W8y1dfScfg7LNlQHXLFpnvYNYscc88/3wpN326mEdCQ71045dekoHzhx6S8Y9Nm2SMZPt2MWH17i0BXiEh0mNJTZVmrqZXdz+xeLFkvGgIn38u1XEm2jzxROmgeYXDh8X0V5EDBxrNd6j4lSoD5WpSEL8AyUAb4HNr7UjH/oHAVGvtxV4WtEGogvA+P/8Mc+dKu3H11fDFF7B+vViB2reXeDtjxFO24pQE1jYgMvvjj8WryVpxyd2xAzp2FHfd006TkeC1a2HJEjGTgIx51OeGBw9Kg+mlMPKMDBgxomHX+PFHsdIlJsr2scfKdb2Gp7r+8gsMHerFmyhNhCp/+DVlc92HJNlrA+x07rQys1ujUg6Kbzj22PJTHl95pXzm5kqPYuBAcYj6299g82aZ/fKTT6Q9//ZbGXeuyounWs49V5bqGDhQFhBFcdNN4uM/fDhcconsLywUN9rISFEwYQ5/i7IyaYELC+Gyy+Dii+GPf5RuUAPfotu3r7zvmGNceqw2dOlSfgbRuoxX1Jt167yjIF57TWamuvtuUe7BhnPaRn9QXOzKsBkAavJiGosMSJcgg9OKAkBEhLTD7drJGPajj4qz0z/+IQFgY8bAv/8tPY6KbNwoY9vundfcXCgqaoBAJ5wgPYjHHpN8Ux9+KCHKDzwgXjpTpohQO3eKjezhh0WQ5GSJNP/2W+mZnH66aLvaUFgog+og/qnTpsFLLxEdXbn9+OtfYdCgqi914YXltyMiysezOW/jUxyT8tSbFSvEXblDB/F6e+aZhoWAf/1143N7zs6WVMhLlkjP09M84N7iyy/lh7Fxo/yGG/QHqR81mZjCrbXV/jRrU8ZfqIkp8Dz/vKR/cr70vP++OECddJJsz5kjb9JxcaJILrtMHKPuvFN6Hw8+6AUhbr5ZpmUdP97lTWWt2MUeesg1QcP27ZKjqkMHiQcIC5OW/ZVX5JX/wAEJdfZkjsnJEaXTo4e8UR44IHk03n4bxo7lT3cM5NPPXe9fc+bIC/oDD3gWecaM8ia6sjK5rNPhKDy8jr2IRYtk3tlu3eCKK0SLu+OpTvfdJ5reE6Wl5edTrcizz8KuXXDVVTJHCUBmppgKa/tQS0vFZnnXXfL81qyRZ5aaKj+UxsDkySLfq69Kz3TlSnnbGTzYe/fYsUMUbZcu8rJy7bXy3Z9/vqTM8QbWyh8xNBT69avatlrdCDbwDfAUMApo57a/HzLj25fABdVdw5+LejE1PsrKrL3zTlnPzLT2hRdc+xcvtvbmm6294QZrc3Ksff55mQ2vwezaVd7TqqJARUXikrVhg+cyBw/KzGzvvy/CL1lSucxtt1mbny/ru3dbe+iQrO/bZ+1DD9nfz77BdmOHBWv79JHbrVrlchoKZ78dxGp7Ie/ZUA7Z//s/a++bVGqvDHvH/nPqYWutOIe5OxoVFdWy/osXSw73nTvF2+vaa60tLCxfJiGhsifTlVeKt9nvv5cvm55u7YUXWvvJJ1IJ9+/2gQfkIc6Z41mWBx6Qh1sT8+bJNIePPmptXp61n37qus/06db++9/yXALJoUPyvTopK5PnfcUV3pGtrEye2R13lPcILCuTKSKvvdba1aut3b+//ve46ipr77nH2rfesvayy6y9915r6+vFBGCMORMYj6T77oSYm7KQDK+vWGt3NFCXeQ3tQTRO/vUv6NsXPvtMPJ7cX0QXLJAB7shIeWu+80554XSP5Qoo+fkwdar0NEB6CtOnyyxtV1xR9Xm7d7PtkjtZnt+fESd1oFvLXArvnUJYuxB6kc1kpvITI/iVoYznbS6c0IluXcsoSxxJiwX/k8RLV19Nu3aujLA9e0pOpiMz1O3aJd0zgA0boKREotQPHoRrrnH1EtavlzdeZ+/gyy/li//1V3lb/ekn2R8dLWM3+/ZJ7vdWreQt+dVXJSBy5kyxdXXtKnUvLpZrVoxncee118Qr7bPPXPEvFdm0ScpUSCB5hOJieO896VH86U8yTuIcS6pIfr5E5J93nmyXlIjrHYgZ8NCh8pHkdeGbb+Tzj38sv3/RIvldDBokHnb1HTN45x3JqXL22Z7HgnbulFikdu0kENVT4i4nZWXi1ZCY6JpFcN06+a4TE+H//k96QvIbqZ8XE4AxxgBR1lpvhur4BFUQjZOSEjEnH3dc1W2Ek927JZi6ujbn99+hTx+vilg9L7wgDVNkpGivceNEq9Xk13vokDSwBw6Ijej113n/2a1kMYjHuI/DuFxN9+2rMPXrPffA+PHMin+B23iWQ7TlVL7iqrF7uHh4lqR4nT5dPAAOHpQv2TkYP3VqZVnmz5fFOTXt6afLjHP9+onZBKROK1dKQ3TxxZCQII3euHHl84c4lUdpKRx/fPl5VitSViaVe+AB+R4rmrZeekm+n4kTax74ffddaeQOHpTv/oEHZNwjMlKuc8YZMg4UEyPli4qkcXz+eXjjDVEQLVrI+BPIdSZOFJkeeqjmeTFuvVWCCSua60pLxT5YUiImPU9mus2bRZ6q7mGtmJOmTavZm275cnl2xcUyRjN1qnj0/eEPIsuBA1LHvXvlO5o2TZ5DmzZVXb9+JibnAvxUm3KBXtTEFBy8+661H30k6+vWSdBYerrE0P3nP9aeeaa1990n61VZkmpLaalYi7Ztk+0dOzwU2rlTzAhjx1r7+ecNut9nHxfbuLjK1p1KvPGGtWPH2hh+s29wqX2DS+19PGKf73CfLfvPx9aOGycmrqeftnbRIjknP18qVBVlZdbOmiVmImsl6PDll11ChIa6zBclJdVfJzvb2o0ba1/x996Tme3cZ7qbM8fjlHt79shzuPVWlxWvEmvWWHv77WL6mjxZThg/3tq9e0W+TZuk3Ny5Ykr54gvZfvFFa6+7ztqLLxbzV0aGnONuOvLEDz9IHari0CFrDxyQ+735ZvljhYViR73xxso/2N9/F/muvNL1HB0UFVUde2rXrJFntHmztamp1j77rJiLrrtOTFHbt4tM990ndf/55+rMYPU3MQEYY2YAr1trl9VYOIBoDyJ4mDZNXoq6dJEXo/x8cbJZtkzGoE84Ad58E374Qcr36yfmqf37xRLhjB+oDmvFchQZKS9eo0fDHXeIY9O991ZxQjVvd4cPi5xOq091fPghXHBB+UuXo7gYcnIwvaP4A0tIJ5EiQgHL3LmGP/+55nvUmvh4iYEAMfPcc0+DLldSIlar445zs4JYC6tXi6kqPFy+R2NcpjsH+fniGr1jh3Q45syR63m0PmVlib2yrjPi7dsnJqr58+Gcc0SOl16SHtPIkfKmv3y5JJG89lrx5165Eh54ANuyFddfL45Mp1Y1E86778oXcOKJ4n20ezfcdpt4PqWlyQ/koYfELPjWWy5vu2ee4eBB+R1t3Qqvvy6duWnTpGNSJQ13ha2/iQnAGLMKiAF+Bw44LmittQ3NUO9VVEEEP7m5Es5Q0aFm1iwxtTv/K0884Tr2889irh8zpvw56elybMIECaE4eFA8M+fPl4Zq5UqxKDhN2NWxdq2cGxIibWBNVoLSUrHy/PCDOE395S+ey111lZjx3XFagqrDGchYnZn6CM6ZjJwsWlTnUPCsLKl3u3bSoB97rLSLt99eoWBZmZje2rWrpHC3bRPrzOTJIrvTqvWf/0hbeuONXozWr0hxsTTajz0Gt9wiFbj0UhEoL0/MeYg39MGDYsIPD5fq3HqrB5PnunXiwXX77fKwW7WSVj8jQx7gtGkSw7N//xF7am6uWDDbtpV40NRUGYubNk2sej6kwSamPp6W2pzrz0VNTM2b3Fz5vOsucUCyVhyVbrpJtq+5RnrlBw+KdWTKFLEKWGvtxx+7uvNlZWLmWrLE2r//vbJTj7XWbt1q7cyZ1r7yilhNJk8W68wnn1j74Ye1l9ndM6msTCwC7pabvDyxHrz5ZnmTVHWOQRkZ1l5+udQ3K6uyZaGwUCwQR8jPt7ZlS9fF77+/1vIfFocre9ttru/fyeOPu0x3NfHhh/JdO5+HO2Vlkt9q2rRai1U/nntOvKVefbX8zfPyjmzefbdY8Zy/o+xs+a4LCsTKtXdv/W//6KPiEFeRTz5xPcu6UKV5zkFenrUffGCtbaiJCcAYEw84Xyu+s9ZmNkBj+QTtQSggoQg5OfJG1q2bmAIGD5be/W+/SazT5s3iDfT449VfKytLuvru5awVc9YVV8j6nDkSNnH55bL95puS8fzRR12B3lWxYYNME96zp7xxjxghTkbPPFPZanDCCfD997L+8cdiHXGyc6dYLlaulNQoDz4oWdnvuUfGVB98ELo7Znd58EEx4cTESGR8RATlehHrEv7CgJ/eq+lrBmQsu08fkbtiL2jfPgmPeOih6q9RXCwhGDUlk733XqlPx461Eq3ulJaKy93113uM+fj9d/neb7ml/P4ff5RjS5aIdeqppyqnunrnHekRVUwOsGWLXO/OO6Xjdt99nkUrKJCehCfnjaIi+X0WFkqHJDTU5YA2b55YvOLixDHuscfEavbrr3LONdfAhRfWP9UGAMaYW4EJwDzHrreMMbOstc/X5nxF8Sfjx3ven5QkQdb9+0vMUW0amkGDxLsoN1cUyuHD4ml4/vmuKUDj413ljZGYrjPPlD/j4497Nos8/LCYD3btEm/DffukAenaVZTKzJkyTenhw67MHyNGuBTEb79JA2+tmD1ycyXg9uijxUTTooU0/LNni0nE2fBcd52Y359+WjweJ02SjBgrdp+Ic4rIiN9/Yv16kTsqSpxkDh2SsZp160QRHnec7IuLqzr476ijpPHatk0UoCesFSejv/615mdxww0ib3x89XNnONm9W+rQvr00nLt3y/fjbjJ0eo8ZgyiFm26q8npvvun58HHHiZfeaaeJd+rDD5d/odi8WZypdu8WZzh3s98bb4hpbuxYUTBV4Ry2+fZb+Z2MHCkN/8aN8pzPOUdegsLCXJa7JUtEoTrTlm3cKL+5hx+W72P+/JpNobUdg/gFSLbWHnBstwOWWh2DUJoBO3ZIo+tswC+/XBrOmli6VHofI0fKm/3UqXKNpUtl7GPgwKoHOu++WxTAsmUyfmqMNCS33irHr7xSGo39++XN3Tm3dVV/eGvF6/GZZ0QhOT1KDx2Sul188k4Sz3KNhM469QOW9bmATp3Ek3bMGAlFiIsT5TVpkryFtmxZOSzAnXfflTCK++5zRdO78+CD4hxQnaesO4cOSZbhoqLK6UmcyvKoo0SZ3XKLtPmvv+6a2NAY+Q4//VS8Ym+4QZTITTdVn2Dx0CFR+J48iCvyzjvSa2vbVoYc8vKkNzl3rsizbJk8u9xc6T29/LJ4prp7EnvCmc0lJkbG21JSRCksWlS1gt2+XXozs2eL8nAqOGcqMgcNHqT+FTjOWnvYsR0GLLPWNqrUj6ogFF8yb5504515AGuDtWJGAvlzx8a6BiOrG0BetEgau9RU6WWcfbZ4WL30khzv0UMcZXr0qH99Kgk6fLjLmykykuLft1FKSKWYtKwsaXCNEUef2gzIP/SQNGLHHOPaP3Om9NCcKc3rwqOPirnm2WfF062wUHoHZWVitmndWpTS9u0yyH3sseIU8K9/yfc2YID0bCZMEJk8mbhycx3mN0Q5n3yyKMiaKCyUZ3XyyfLcnD3I9etFGTzyiDT2XbuKKdRp+gsgDVYQtwNXAB85dp2HuL0+23DZvIcqCKUxc/CgmIjqmt32kkvEvHXKKa45OIwRu3fv3l4UcPt2iaZ2Jshbv77KbKxFRdIYVxXQXJG8PEne6MxoGxLiituqD6Wl0vMZMkSCptu2lYb3rrtEoW7dKjF8VVFYKCajq6+W7/K118Q12hnA/Pnn4hn35JPS27MWLrqofrI2ARoUSd0COB44DKQ4LrbIWvuzNyX0BqoglGBkzRoxR1kr4yebNsn+adNkcNOrpKaWn5lo9mxpRb3Enj1ik4+JEbPNkbQhAWbPHnE3/tvfxES0Y4eMc9x1lyjB+iqyJkL9B6mttWXGmKestcmAN6csURSlFjgzRxgjdvU77pDtFSt8cLOKeYquuUYGILzUVenUyZVny9OkdoGiUyfpQBUUyHiRM1vG5MmuXFjNkdqE0QB8ZYz5syMvk6IoAcJ98ibncIFX8WSXcSapC3LGjZOxCPe06926idWtuVLbMYh8oB2SyfUwrkjqDtWe6GfUxKQEO7t3l0/lkZgoLpZeiw3Ys6ey3WfCBDHIK8FKlS/+NfYgHGMQp1trW1hrW1trO1hr2zc25aAozYEuXcQzxkl6uphHnnrKSzfo1ElyZQwZ4tpX3UxzaWnS6+jXT4z3jW0GOKVB1KggrLVlQHAP0ShKE+K55yrv+9vfxGXSK4SEiD+mk127PJf7+mtxxk9Lkyis8eMl4d3evV4SRAk0OgahKE2M6GjxsqnIjz968SbuE3fs3l35uLUyWl5SUn7/qlWS5E6VRFBQWwVxB/A+UGiM2W+MyTfG7PehXIqiVEO3buW9UUHMTV6jJgWxcGHVblSffiqmqtmzvSiQEghqqyCOQgLlHnGMPQwBqsqGriiKHzjxRIlGdrLMm7O1uOdUz8+vPLbg7tl07bUSNXfbbeXLTJjgSh6lNElqqyBmIMFyziQD+cALPpFIUZRa456mIj3dw8RD9cWY8r2I3Nzyx9etc60ff7yUf/ppyT5XUUA1NzVZaqsgkqy1NyIurlhr9wCtqz8FjDGnG2OyjDHrjDGTPBwfb4z5xbF870gp7jzW0Rgz1xjzmzFmtTEmuZayKkqzYcAASU4HMpa8ebMXLx4Z6VqfP1+0z8svizKYM8d1rH9/+XSmsnW3dRUXSzIkpUlSWwVRbIwJASyAMaYrUFbdCY7yM4AzgFjgEmNMbIViG4FUR1bYqYC7s/V0YL619hggHlhdS1kVpdnQokX5LKR//7tYdSqOHdeLlBTX+tVXy80mTqxcruKUnyNGuLQWSNIopUlSWwXxHJKoL9IY8yiwGHishnNGAuustRustUXAHKDcdBnW2u8dvRGAH4AoAGNMB2AU8IqjXJG1dm8tZVWUZoX7/NuvvSYTCzlTRTSIquZBdad/f8/pSN3nyNy2zQvCKIGgVhMGWWvfNsb8BPwRibo7z1pb0xt9L2CL23Y2kFRN+auBLxzr/YBdwGsOs9NPwK3O+SjcMcZMBCYC9OzZkwULFsgF+vWjffv2ZGbKxHcREREMGTKERQ7Xj5YtW5KSkkJGRgb794tDVmJiIjk5OWzZImIPHDiQ0NBQVji8NSIjI4mJiWHx4sUAhIaGkpycTHp6OgUFBQAkJSWRnZ3NVkdw0aBBgwgJCWHVqlUAdO/enejoaJYuXQpAmzZtSEpKIi0tjUOHDgGQnJzMxo0b2eHwZYyNjaW0tJSsrCz5Ynv1IioqirS0NADCw8NJTExk6dKlFDoGE1NSUlizZg07d+4EIC4ujsLCQtauXQtA79696datG87I8w4dOpCQkMDixYspcbx+jho1ipUrV5LrsD/Hx8eTn5/PBkf+6r59+9K5c2cyMiRFV6dOnYiPj2fhwoUyXaExpKamkpmZyZ498h6QkJBAXl4emxwZ5/Q5Nfw5HX/8KCq+6735ZjGpqUsa9pz69CFszBi6f/UVFSns04fWF17IsiFDOLhwYaXn1CU3F2dm7N0rVrBiwYKAP6e+o0dT0rYtoW3bYkNC+N4xz7T7czrxjDP4+bvvmtX/aXR1+darm4+0IQtwITDbbftS4Pkqyp6EmJAiHNuJSFqPJMf2dGBqTffUOamV5khRkbXHHVd+zmooP991vSkrs/bOO+WC4eHWPvNM7c5bssQlSGKiFwTxAn36WLtrV/Vl2rXz3f3LymRC68ZHlW1qbU1M9SEbcE8BGQVU6msaY4YBs4FzrbW5budmW2vTHNtzgQQfyqooTZZWrcTrdMaM8vsdL9kNwxjJdV1YKPEQFV1Zq8J9JqPsbC8I4mcKCmSqvIQEmSTi449l/+TJ4Oh5ADL/rDO0/cknZf7RYcNcE3Fv2iTTvt1wg1xryxaaFNVpj4YsiPlqAxCNeDxlAkMqlDkaWAf8wcP53wGDHOtTgCdruqf2IJTmzmmnuV7cTzxRXloDQlGRvI07hVm3LkCCuNG3r7XHHmttQoK1M2d6LuPsQRQXW7tvn6zv2mVt//7yZW7cKNewVnoD/fpZu3u3tV9+ae2ECa5ewllnWbtwoZQ3xtqlS31du4ZQZZtaqzGIeiqeEmPMTcCXQAjwqrV2pTHmOsfxl4AHgQjgRUcWjxJrrXPI7WbgbWNMa4eiudJXsipKsHDjjfDll7L+3XfyQvvtt645qP1Gq1Yy+dDnn8v2oEEyJ+jQoeINtXWrrE+eDH37+kemJUugZ0/YuVMmAz/mGBg1ynNZayX396JFLnlzckTWiAiZZi4nR/KvR0TIRNlffeXKx15QAGvXSp379Kl+ertGTK3SfTcVNN23okhblJbm2r77bnjiiQAI8uabEhdRE3FxMp/nhReKIvEHU6ZIpPiHH8r2ddfJEh4ujfvrr8MXX8Bbb4my69sXFiyQz/feE1/iHTvg8svhzDNlar+YGIkqd2fTJkm/65PZnbxG/dN9K4rStHjxRWjtFsb63HOujN1r1oDD4cf3/PWvcO65NZdbsUJ6EsccI+6xXgniqMCBA5IyxLn+1VfSvVq+XJbrritfft8+CRRs1Uq6YO6xHGPHSuDgsmVw2mmy77TT4NVXRbmAfOEOj6emjM9MTIqiBIaEBJnveeRI+OknOHxYYteOO07y6EHd5rPeuhV++w2SkuQFu9YYI6k3xo+Hzz6TiYiuvVbetDdsEFtYxRS0kydLd+eWW8T8c/iwmHK2bBFBtm0TBRIWBqGhkjOqRw8xV8XFSapbY2QBUQq7d4tmvOsu2V9WJo18YqJcPyyssuzjx8Of/iRlhg8X5eWkdWs46aTy+arGjIHVqyHZkfAhPFx6H87jTRQ1MSlKkPL119JuVcV335UPlgZ5Af72W3nBbtdOXozPOw/275cMsvPnS3tZZwoKPGuX33+Hjz4SpeAph7k/cCqa9u1l7KGszJXUylpRSC1bSrnWrWVMYs0aMTe5d9Xc29Kq1uuCu9eyE2Pk/k4lWNXirIPz01o5LySk8rJoUZUmJlUQihKkWAvnnOPqNVSkQwfpGTg9UpcvLz/ntSd69JA2vVUrr4oqSaQuvVS6PAcqxcMqvsRaVRCK0hzJyxMnm6oIDYV77xXryHvv1e6a48bB2297R75KlJZKj2LuXDEpdewoFTj6aIiKEi+ksDAxDR0+LOajzZtFsTiVi/ubd7t2Mol3u3byBl1UJOcdOiRLfr4kFGzOqIJQlObLgQNw/fXSLt54o5j977679uf36CHWFPfB7YyMmnsbTQJrRVHs2yfKwhgxu7hPntmypZiZCgtlqdhmupetzXpdcDcbuSs+d9ORp6VFC5cpqoXDF6msTBRwxSU1VRWEoihCQYEMOFcXaf2HP0iW7pwcCQRu0ULcZ51jyklJMobhdVOTEgjUzVVRFCE8XMYbFi2qnLD1hhtkgPqbb8QyExfneqGe5ZaMPy1NMscqwY32IBSlmXPggIQiDB8uYxLVMXGizBkE4k774YfQu3eT9+Zs7mgPQlEUz7RrJyajmpQDyIC2k4wMCTvo1csVkFwVCxZIWMCuXa59+/eLF9W+ffUSW/EDGiinKEqtiY4Wb9Q333Tty8mBCy4Ql9rQUDFLjRsnvYrJk2vn8XT//RJ79vPPojR6O/JA5+eLAuvWTeLsBg6UwfEW+mrrF9TEpChKnSgpgRdekF5DWpr/vUR79pRA6L59JbP2ySeLo5FSb9SLSVEU77N2rTTShw8HTobWrUVp9OkjsrRrJ96o7qEO+/e7lvx8aNsWOnWS2VJ79ZK0S506yVTanTvLNZzepCUl4g1a28/SUpHLUzC1+76QEFFsFZdWrSrvKymREI7CQvksKpJ9Ts9Vdw9Wa11B0u6erk5v2YrrEyeqglAUxUd88AFMmgRt2kgDtmeP9CqcjeYdd4ipqaAA3nlHGvKuXSE2VvLdffghLF4M69ZJvqhTTpFMFvv3SwqksDD45Re51rJlEhuneA9rVUEoihIArK1/jJgniookx9Tq1TJW8fXXEkit1J/qFIRa7hRF8RneVA4g5qSzzpLFSUEBbN8uQXw7d4pZKSxMljZtJAdfhw6uJTwcDh6UNCTbt8uMqLt3S89n3z7Zf/CgK/6jZUuXOag2n+6B2J6CqZ1B0U7zlftSXOx5n3uuwNBQlxnKPf+ec90Yl7nJPeC6qvXqUAWhKEqTJjxcvJsGDqy6zFVXSdLCyEjX3D0ffCDzBq1eLcolMdHzuVdcIXP+XHCBtyVv/KizmKIoQc8VV0iqcnfi4mDevKpnHfUXvpgfyVuoglAUJegZNUq8k9wZPLjuM5w+/LAMpMfFSVS5tbB+vUSVO1m7ViZoAkkwm5oq26edJiYtgNGjZcrr1FSYPr3e1fI5qiAURVFqyU03iSfVihUy1vHpp9C/v7jHLl8uZV57TXosxcVw882Sufynn8TMdf/9rmvt3QsLF9Z+Zr9AoGMQiqIoteTbb+Gf/3QNcg8ZIjOTXnONKIann5Z5NX78EbKyRJGceqqcW1rqmpwJ4KKLAlOHuqAKQlEUxY0rr5SUHz17wuefu/YfPizZbtPTJRXIlCmuAME//xn+/neJ6h4xQuY42rZNFIj7PBrutGvn86o0GDUxKYqiuPHaa2IuclcO4FIGXbqIa+3cua5jYWEyxnD99aJgQMY3du1yKYjiYli50ufiexVVEIqiBD2XXALJyWL2iYqCV16RmU2joqQBP+ssaeCro2NHmDABhg6F886TwWp3xo+XGIQxY2S7dWtRIvfcA/Hxkk79++99UDkfopHUiqIoXmDaNAm0mzo10JLUGY2kVhRF8RVjx4q76//+F2hJvIsqCEVRlAby0UeBlsA36BiEoiiK4hFVEIqiKIpHVEEoiqIoHlEFoSiKonhEFYSiKIriEVUQiqIoikdUQSiKoige8amCMMacbozJMsasM8ZM8nB8vDHmF8fyvTEm3rE/zBjzozEm0xiz0hjzd1/KqSiKolTGZ4FyxpgQYAZwKpANLDPGfGKtXeVWbCOQaq3dY4w5A5gFJAGFwMnW2gJjTCtgsTHmC2vtD76SV1EURSmPL3sQI4F11toN1toiYA5wrnsBa+331to9js0fgCjHfmutLXDsb+VYgidplKIoShPAl6k2egFb3Lazkd5BVVwNfOHccPRAfgIGADOstWmeTjLGTAQmAvTs2ZMFCxYA0K9fP9q3b09mZiYAERERDBkyhEWLFgHQsmVLUlJSyMjIYP/+/QAkJiaSk5PDli0i9sCBAwkNDWWFY5bzyMhIYmJiWLx4MQChoaEkJyeTnp5OQYHos6SkJLKzs9m6dSsAgwYNIiQkhFWrpOPUvXt3oqOjWerIAdymTRuSkpJIS0vj0KFDACQnJ7Nx40Z27NgBQGxsLKWlpWRlZckX26sXUVFRpKXJVxIeHk5iYiJLly6lsLAQgJSUFNasWcPOnTsBiIuLo7CwkLVr1wLQu3dvunXrhjO5YYcOHUhISGDx4sWUOCbJHTVqFCtXriQ3NxeA+Ph48vPz2bBhAwB9+/alc+fOZGRkANCpUyfi4+NZuHAh1lqMMaSmppKZmcmePfIekJCQQF5eHps2bdLnpM9Jn1MjeE6jR4+mKnyWzdUYcyFwmrX2Gsf2pcBIa+3NHsqeBLwIpFhrcysc6wh8BNxsrV1R3T01m6uiKEqdqTKbqy9NTNlAb7ftKGBbxULGmGHAbODcisoBwFq7F1gAnO4TKRVFURSP+FJBLAMGGmOijTGtgYuBT9wLGGOOBuYBl1pr17jt7+roOWCMaQOcAvzmQ1kVRVGUCvhsDMJaW2KMuQn4EggBXrXWrjTGXOc4/hLwIBABvGiMASix1iYCPYA3HOMQLYD3rbWf+kpWRVEUpTI6o5yiKErzpvnOKFdcXEx2djaHnTOOK/UiLCyMqKgoWrVqFWhRFEXxE0GvILKzs2nfvj19+/bFYcZS6oi1ltzcXLKzs4mOjg60OIqi+Imgz8V0+PBhIiIiVDk0AGMMERER2gtTlGZG0CsIQJWDF9DvUFGaH81CQSiKoih1RxWEH3j00UcZMmQIw4YNY/jw4aSlpVFcXMykSZMYOHAgcXFxjBw5ki++kEwjBQUFXHvttfTv358hQ4YwatSoI2kAjDHceeedR649bdo0pkyZAsCUKVNo27btkXQAIGkDnFx11VVERkYSFxdXTr4PPviAIUOG0KJFC9QLTFEUJ6ogfMzSpUv59NNPycjI4JdffuG///0vvXv3ZvLkyWzfvp0VK1awYsUK/u///o/8/HwArrnmGjp37szatWtZuXIlr7/+Ort37wYkX828efOObFekS5cuPPXUUx6PXXHFFcyfP7/S/ri4OObNm8eoUaO8VGtFUYKBZqUgjPHdUhXbt2+nS5cuhIaGAtKAd+zYkZdffpnnn3/+yP5u3brxl7/8hfXr15OWlsYjjzxCixbyePr168dZZ50FSLKtiRMn8swzz3i831VXXcV7771HXl5epWOjRo2ic+fOlfYPHjyYQYMG1em7VBQl+GlWCiIQjBkzhi1bthATE8MNN9zAwoULWbduHUcffTQdOnSoVH7lypUMHz6ckJCQKq9544038vbbb7Nv375Kx8LDw7nqqquYPn26V+uhKErzQxWEjwkPD+enn35i1qxZdO3alYsuuuhISvL60qFDBy677DKee+45j8dvueUW3njjjSPpfBVFUepD0AfKuROorCIhISGMHj2a0aNHM3ToUGbOnMnmzZvJz8+nffv25coOGTKEzMxMysrKjpiYPHHbbbeRkJDAlVdeWelYx44dGTduHC+++KLX66IoSvNBexA+Jisr68ikIgDLly9n0KBBXH311dxyyy0UFRUBMlbx1ltv0b9/fxITE3nooYdw5slau3YtH3/8cbnrdu7cmb/85S+88sorHu97xx13MHPmzCOTlSiKotQVVRA+pqCggMsvv5zY2FiGDRvGqlWrmDJlCo888ghdu3YlNjaWuLg4zjvvPLp27QrA7Nmz2bFjBwMGDGDo0KFMmDCBnj17Vrr2nXfeWa0309ixY4/MiAVwySWXkJycTFZWFlFRUUeUy0cffURUVBRLly7lrLPO4rTTTvPBN6EoSlMj6LO5rl69msGDBwdIouBCv0tFCUoCMqOcoiiK0oRRBaEoiqJ4RBWEoiiK4hFVEIqiKIpHVEEoiqIoHlEFoSiKonhEFYQfaCzpvvv27cvQoUMZPnw4iYmJfqi5oihNmWaVaiMQuKf7Dg0NZffu3RQVFZVL9x0aGkpOTg4LFy4EJN13dHQ0a9eupUWLFmzYsIHVq1cDrnTf9957L126dKl0P2e67yeeeMKjPN9++63H8xRFUSrSvHoQAcj33ZjSfSuKotSF5qUgAkBjSvdtjGHMmDGMGDGCWbNmNaxiiqIEPaogfExjSve9ZMkSMjIy+OKLL5gxYwaLFi1qkByKogQ3zWsMIkB5pxpLum9nwr/IyEjGjh3Ljz/+qNOMKopSJdqD8DGNJd33gQMHjsx5feDAAb766ivi4uK8Xl9FUYIHVRA+prGk+87JySElJYX4+HhGjhzJWWedxemnn+67iiuK0uTRdN9KrdHvUlGCEk33rSiKotQNVRCKoiiKR5qFgggmM1qg0O9QUZofQa8gwsLCyM3N1QauAVhryc3NJSwsLNCiKIriR4I+DiIqKors7Gx27doVaFGaNGFhYURFRQVaDEVR/EjQK4hWrVoRHR0daDEURVGaHD41MRljTjfGZBlj1hljJnk4Pt4Y84tj+d4YE1/bcxVFURTf4jMFYYwJAWYAZwCxwCXGmNgKxTYCqdbaYcBUYFYdzlUURVF8iC97ECOBddbaDdbaImAOcK57AWvt99baPY7NH4Co2p6rKIqi+BZfjkH0Ara4bWcDSdWUvxr4oq7nGmMmAhMdm4XGmBX1krZx0gXwnEuj6RJsddL6NH6CrU7ers98a63HvDu+VBCewrc9+poaY05CFERKXc+11s7CZZpKt9YGzVyawVYfCL46aX0aP8FWJ3/Wx5cKIhvo7bYdBWyrWMgYMwyYDZxhrc2ty7mKoiiK7/DlGMQyYKAxJtoY0xq4GPjEvYAx5mhgHnCptXZNXc5VFEVRfIvPehDW2hJjzE3Al0AI8Kq1dqUx5jrH8ZeAB4EI4EUj8zqXWGsTqzq3FrcNtnk0g60+EHx10vo0foKtTn6rT1Cl+1YURVG8R9DnYlIURVHqhyoIRVEUxSNBoSCaaloOY8yrxpid7rEbxpjOxpivjTFrHZ+d3I7d66hjljHmtMBIXTXGmN7GmG+NMauNMSuNMbc69jfJOhljwowxPxpjMh31+btjf5OsjxNjTIgx5mdjzKeO7aZen03GmF+NMcuNMemOfU29Th2NMXONMb85/k/JAamTtbZJL8gg9nqgH9AayARiAy1XLWUfBSQAK9z2/ROY5FifBDzhWI911C0UiHbUOSTQdahQnx5AgmO9PbDGIXeTrBMSjxPuWG8FpAHHN9X6uNXrDuAd4NOm/ptzyLkJ6FJhX1Ov0xvANY711kDHQNQpGHoQTTYth7V2EZBXYfe5yI8Dx+d5bvvnWGsLrbUbgXVI3RsN1trt1toMx3o+sBqJim+SdbJCgWOzlWOxNNH6ABhjooCzkNgjJ022PtXQZOtkjOmAvDy+AmCtLbLW7iUAdQoGBeEpLUevAMniDbpZa7eDNLhApGN/k6qnMaYvcCzy1t1k6+QwxywHdgJfW2ubdH2AZ4G7gTK3fU25PiBK+ytjzE+O1DvQtOvUD9gFvOYwBc42xrQjAHUKBgVR67QcTZwmU09jTDjwIXCbtXZ/dUU97GtUdbLWllprhyPR/CONMXHVFG/U9THGnA3stNb+VNtTPOxrNPVx4wRrbQKS/flGY8yoaso2hTq1REzP/7LWHgscQExKVeGzOgWDggi2tBw5xpgeAI7PnY79TaKexphWiHJ421o7z7G7SdcJwNHFXwCcTtOtzwnAOcaYTYgp9mRjzFs03foAYK3d5vjcCXyEmFeacp2ygWxHbxVgLqIw/F6nYFAQwZaW4xPgcsf65cDHbvsvNsaEGmOigYHAjwGQr0qMhMO/Aqy21j7tdqhJ1skY09UY09Gx3gY4BfiNJlofa+291tooa21f5H/yP2vtX2mi9QEwxrQzxrR3rgNjgBU04TpZa3cAW4wxgxy7/gisIhB1CvRovZdG/M9EPGbWA/cHWp46yP0usB0oRt4CrkZSj3wDrHV8dnYrf7+jjllIcsOA16FCfVKQru0vwHLHcmZTrRMwDPjZUZ8VwIOO/U2yPhXqNhqXF1OTrQ9ir890LCud//+mXCeHjMOBdMdv7z9Ap0DUSVNtKIqiKB4JBhOToiiK4gNUQSiKoigeUQWhKIqieEQVhKIoiuIRVRCKoiiKR1RBKIoXMMZ87/jsa4wZF2h5FMUbqIJQFC9grf2DY7UvUCcFYYwJ8bpAiuIFVEEoihcwxjizvv4DONExN8HtjmR/TxpjlhljfjHGXOsoP9rI3BnvAL8GTHBFqYaWgRZAUYKMScDfrLVnAziyi+6z1h5njAkFlhhjvnKUHQnEWUnRrCiNDlUQiuJbxgDDjDEXOLaPQnLlFAE/qnJQGjOqIBTFtxjgZmvtl+V2GjMaSeOsKI0WHYNQFO+Sj0y36uRL4HpHGnSMMTGOrKOK0ujRHoSieJdfgBJjTCbwOjAd8WzKcKRD34VrqkhFadRoNldFURTFI2piUhRFUTyiCkJRFEXxiCoIRVEUxSOqIBRFURSPqIJQFEVRPKIKQlEURfGIKghFURTFI/8Pc3C/1Zoq0TEAAAAASUVORK5CYII=\n"
+ },
+ "metadata": {
+ "needs_background": "light"
+ },
+ "output_type": "display_data"
+ }
+ ],
+ "source": [
+ "fig, ax = plt.subplots()\n",
+ "x = np.array(range(len(history_shortcut_11.history['loss']) - smoothing_windows + 1))\n",
+ "ax.plot(x, 100*moving_average(history_shortcut_11.history[\"val_loss\"], smoothing_windows), linewidth=3.0, label='SCCNN11', c='b')\n",
+ "ax.plot(x, 100*moving_average(history_shortcut_11.history[\"loss\"], smoothing_windows), linewidth=.5, c='b')\n",
+ "x = np.array(range(len(history_shortcut_5.history['loss']) - smoothing_windows + 1))\n",
+ "ax.plot(x, 100*moving_average(history_shortcut_5.history[\"val_loss\"], smoothing_windows), linewidth=3.0, label='SCCNN5', c='r')\n",
+ "ax.plot(x, 100*moving_average(history_shortcut_5.history[\"loss\"], smoothing_windows), linewidth=.5, c='r')\n",
+ "ax.set_xlabel(\"iter\")\n",
+ "ax.set_ylabel(\"error(%)\")\n",
+ "ax.set_xlim(0, min(len(history_shortcut_5.history['loss']), len(history_shortcut_11.history['loss']) - smoothing_windows + 1))\n",
+ "ax.set_ylim(0.20, 0.35)\n",
+ "ax.spines[\"top\"].set_visible(False)\n",
+ "ax.spines[\"right\"].set_visible(False)\n",
+ "ax.yaxis.set_major_locator(ticker.LinearLocator(numticks=6))\n",
+ "ax.grid(axis='y', linestyle='--')\n",
+ "ax.annotate(\"11-layer\", (500, 0.24), c='b')\n",
+ "ax.annotate(\"5-layer\", (500, 0.255), c='r')\n",
+ "plt.legend(loc=3)\n",
+ "plt.show()"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 18,
+ "metadata": {
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ },
+ "outputs": [],
+ "source": [
+ "# plt.savefig(fname=\"fig2.png\", dpi=300)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 19,
+ "metadata": {
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ },
+ "outputs": [],
+ "source": [
+ "import os\n",
+ "time_spent = (time.time() - time1) /60\n"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 19,
+ "metadata": {
+ "pycharm": {
+ "name": "#%%\n"
+ }
+ },
+ "outputs": [],
+ "source": []
+ }
+ ],
+ "metadata": {
+ "interpreter": {
+ "hash": "7f619fc91ee8bdab81d49e7c14228037474662e3f2d607687ae505108922fa06"
+ },
+ "kernelspec": {
+ "display_name": "Python 3.9.7 64-bit ('base': conda)",
+ "language": "python",
+ "name": "python3"
+ },
+ "language_info": {
+ "codemirror_mode": {
+ "name": "ipython",
+ "version": 3
+ },
+ "file_extension": ".py",
+ "mimetype": "text/x-python",
+ "name": "python",
+ "nbconvert_exporter": "python",
+ "pygments_lexer": "ipython3",
+ "version": "3.9.7"
+ }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 1
+}
\ No newline at end of file
diff --git a/models.py b/models.py
new file mode 100644
index 0000000..2dae6dd
--- /dev/null
+++ b/models.py
@@ -0,0 +1,264 @@
+import keras.callbacks
+import keras.layers as KL
+from keras import Model
+from keras.optimizers import adam_v2
+
+
+class Plain5(object):
+ def __init__(self, model_path=None, input_shape=None):
+ self.model = None
+ self.input_shape = input_shape
+ if model_path is not None:
+ # TODO: loading from the file
+ pass
+ else:
+ self.model = self.build_model()
+
+ def build_model(self):
+ input_layer = KL.Input(self.input_shape, name='input')
+ x = KL.Conv1D(8, 3, padding='same', name='Conv1')(input_layer)
+ x = KL.BatchNormalization()(x)
+ x = KL.Activation('relu')(x)
+
+ x = KL.Conv1D(8, 3, padding='same', name='Conv2')(x)
+ x = KL.BatchNormalization()(x)
+ x = KL.Activation('relu')(x)
+
+ x = KL.Conv1D(8, 3, padding='same', name='Conv3')(x)
+ x = KL.BatchNormalization()(x)
+ x = KL.Activation('relu')(x)
+
+ x = KL.Dense(20, activation='relu', name='dense')(x)
+ x = KL.Dense(1, activation='sigmoid', name='output')(x)
+ model = Model(input_layer, x)
+ return model
+
+ def fit(self, x, y, x_val, y_val, epoch, batch_size):
+ self.model.compile(loss='mse', optimizer=adam_v2.Adam(learning_rate=0.01 * (batch_size / 256)))
+ checkpoint = keras.callbacks.ModelCheckpoint(filepath='checkpoints/plain5.hdf5', monitor='val_loss',
+ mode="min", save_best_only=True)
+ early_stop = keras.callbacks.EarlyStopping(monitor='val_loss', min_delta=0,
+ patience=1000, verbose=0, mode='auto')
+ lr_decay = keras.callbacks.ReduceLROnPlateau(monitor='val_loss', factor=0.5, patience=25, min_delta=1e-6)
+ callbacks = [checkpoint, early_stop, lr_decay]
+ history = self.model.fit(x, y, validation_data=(x_val, y_val), epochs=epoch, verbose=1,
+ callbacks=callbacks, batch_size=batch_size)
+ return history
+
+
+class Residual5(object):
+ def __init__(self, model_path=None, input_shape=None):
+ self.model = None
+ self.input_shape = input_shape
+ if model_path is not None:
+ # TODO: loading from the file
+ pass
+ else:
+ self.model = self.build_model()
+
+ def build_model(self):
+ input_layer = KL.Input(self.input_shape, name='input')
+ fx = KL.Conv1D(8, 3, padding='same', name='Conv1')(input_layer)
+ fx = KL.BatchNormalization()(fx)
+ x = KL.Activation('relu')(fx)
+
+ fx = KL.Conv1D(8, 3, padding='same', name='Conv2')(x)
+ fx = KL.BatchNormalization()(fx)
+ fx = KL.Activation('relu')(fx)
+ x = fx + x
+
+ fx = KL.Conv1D(8, 3, padding='same', name='Conv3')(x)
+ fx = KL.BatchNormalization()(fx)
+ fx = KL.Activation('relu')(fx)
+ x = fx + x
+
+ x = KL.Dense(20, activation='relu', name='dense')(x)
+ x = KL.Dense(1, activation='sigmoid', name='output')(x)
+ model = Model(input_layer, x)
+ return model
+
+ def fit(self, x, y, x_val, y_val, epoch, batch_size):
+ self.model.compile(loss='mse', optimizer=adam_v2.Adam(learning_rate=0.01 * (batch_size / 256)))
+ checkpoint = keras.callbacks.ModelCheckpoint(filepath='checkpoints/res5.hdf5', monitor='val_loss',
+ mode="min", save_best_only=True)
+ early_stop = keras.callbacks.EarlyStopping(monitor='val_loss', min_delta=0,
+ patience=1000, verbose=0, mode='auto')
+ lr_decay = keras.callbacks.ReduceLROnPlateau(monitor='val_loss', factor=0.5, patience=25, min_delta=1e-6)
+ callbacks = [checkpoint, early_stop, lr_decay]
+ history = self.model.fit(x, y, validation_data=(x_val, y_val), epochs=epoch, verbose=1,
+ callbacks=callbacks, batch_size=batch_size)
+ return history
+
+
+class ShortCut5(object):
+ def __init__(self, model_path=None, input_shape=None):
+ self.model = None
+ self.input_shape = input_shape
+ if model_path is not None:
+ # TODO: loading from the file
+ pass
+ else:
+ self.model = self.build_model()
+
+ def build_model(self):
+ input_layer = KL.Input(self.input_shape, name='input')
+ x_raw = KL.Conv1D(8, 3, padding='same', name='Conv1')(input_layer)
+ fx1 = KL.BatchNormalization()(x_raw)
+ fx1 = KL.Activation('relu')(fx1)
+
+ fx2 = KL.Conv1D(8, 3, padding='same', name='Conv2')(fx1)
+ fx2 = KL.BatchNormalization()(fx2)
+ fx2 = KL.Activation('relu')(fx2)
+
+ fx3 = KL.Conv1D(8, 3, padding='same', name='Conv3')(fx2)
+ fx3 = KL.BatchNormalization()(fx3)
+ fx3 = KL.Activation('relu')(fx3)
+ x = KL.Concatenate(axis=2)([x_raw, fx1, fx2, fx3])
+
+ x = KL.Dense(20, activation='relu', name='dense')(x)
+ x = KL.Dense(1, activation='sigmoid', name='output')(x)
+ model = Model(input_layer, x)
+ return model
+
+ def fit(self, x, y, x_val, y_val, epoch, batch_size):
+ self.model.compile(loss='mse', optimizer=adam_v2.Adam(learning_rate=0.01 * (batch_size / 256)))
+
+ checkpoint = keras.callbacks.ModelCheckpoint(filepath='checkpoints/shortcut5.hdf5', monitor='val_loss',
+ mode="min", save_best_only=True)
+ early_stop = keras.callbacks.EarlyStopping(monitor='val_loss', min_delta=0,
+ patience=1000, verbose=0, mode='auto')
+ lr_decay = keras.callbacks.ReduceLROnPlateau(monitor='val_loss', factor=0.5, patience=25, min_delta=1e-6)
+ callbacks = [checkpoint, early_stop, lr_decay]
+ history = self.model.fit(x, y, validation_data=(x_val, y_val), epochs=epoch, verbose=1,
+ callbacks=callbacks, batch_size=batch_size)
+ return history
+
+
+class ShortCut11(object):
+ def __init__(self, model_path=None, input_shape=None):
+ self.model = None
+ self.input_shape = input_shape
+ if model_path is not None:
+ # TODO: loading from the file
+ pass
+ else:
+ self.model = self.build_model()
+
+ def build_model(self):
+ input_layer = KL.Input(self.input_shape, name='input')
+ x_raw = KL.Conv1D(8, 3, padding='same', name='Conv1_1')(input_layer)
+ x = KL.BatchNormalization()(x_raw)
+ x = KL.Activation('relu')(x)
+ x = KL.Conv1D(8, 3, padding='same', name='Conv1_2')(x)
+ x = KL.BatchNormalization()(x)
+ x = KL.Activation('relu')(x)
+ x = KL.Conv1D(8, 3, padding='same', name='Conv1_3')(x)
+ x = KL.BatchNormalization()(x)
+ fx1 = KL.Activation('relu')(x)
+
+ x = KL.Conv1D(8, 3, padding='same', name='Conv2_1')(fx1)
+ x = KL.BatchNormalization()(x)
+ x = KL.Activation('relu')(x)
+ x = KL.Conv1D(8, 3, padding='same', name='Conv2_2')(x)
+ x = KL.BatchNormalization()(x)
+ x = KL.Activation('relu')(x)
+ x = KL.Conv1D(8, 3, padding='same', name='Conv2_3')(x)
+ x = KL.BatchNormalization()(x)
+ fx2 = KL.Activation('relu')(x)
+
+ x = KL.Conv1D(8, 3, padding='same', name='Conv3_1')(fx2)
+ x = KL.BatchNormalization()(x)
+ x = KL.Activation('relu')(x)
+ x = KL.Conv1D(8, 3, padding='same', name='Conv3_2')(x)
+ x = KL.BatchNormalization()(x)
+ x = KL.Activation('relu')(x)
+ x = KL.Conv1D(8, 3, padding='same', name='Conv3_3')(x)
+ x = KL.BatchNormalization()(x)
+ fx3 = KL.Activation('relu')(x)
+ x = KL.Concatenate(axis=2)([x_raw, fx1, fx2, fx3])
+
+ x = KL.Dense(200, activation='relu', name='dense1')(x)
+ x = KL.Dense(1, activation='sigmoid', name='output')(x)
+ model = Model(input_layer, x)
+ return model
+
+ def fit(self, x, y, x_val, y_val, epoch, batch_size):
+ self.model.compile(loss='mse', optimizer=adam_v2.Adam(learning_rate=0.01 * (batch_size / 256)))
+ checkpoint = keras.callbacks.ModelCheckpoint(filepath='checkpoints/shortcut11.hdf5', monitor='val_loss',
+ mode="min", save_best_only=True)
+ early_stop = keras.callbacks.EarlyStopping(monitor='val_loss', min_delta=1e-6,
+ patience=200, verbose=0, mode='auto')
+ lr_decay = keras.callbacks.ReduceLROnPlateau(monitor='val_loss', factor=0.5,
+ patience=25, min_delta=1e-6)
+ callbacks = [checkpoint, early_stop, lr_decay]
+ history = self.model.fit(x, y, validation_data=(x_val, y_val), epochs=epoch, verbose=1,
+ callbacks=callbacks, batch_size=batch_size)
+ return history
+
+
+class Plain11(object):
+ def __init__(self, model_path=None, input_shape=None):
+ self.model = None
+ self.input_shape = input_shape
+ if model_path is not None:
+ # TODO: loading from the file
+ pass
+ else:
+ self.model = self.build_model()
+
+ def build_model(self):
+ input_layer = KL.Input(self.input_shape, name='input')
+ x = KL.Conv1D(8, 3, padding='same', name='Conv1_1')(input_layer)
+ x = KL.BatchNormalization()(x)
+ x = KL.Activation('relu')(x)
+ x = KL.Conv1D(8, 3, padding='same', name='Conv1_2')(x)
+ x = KL.BatchNormalization()(x)
+ x = KL.Activation('relu')(x)
+ x = KL.Conv1D(8, 3, padding='same', name='Conv1_3')(x)
+ x = KL.BatchNormalization()(x)
+ x = KL.Activation('relu')(x)
+
+ x = KL.Conv1D(8, 3, padding='same', name='Conv2_1')(x)
+ x = KL.BatchNormalization()(x)
+ x = KL.Activation('relu')(x)
+ x = KL.Conv1D(8, 3, padding='same', name='Conv2_2')(x)
+ x = KL.BatchNormalization()(x)
+ x = KL.Activation('relu')(x)
+ x = KL.Conv1D(8, 3, padding='same', name='Conv2_3')(x)
+ x = KL.BatchNormalization()(x)
+ x = KL.Activation('relu')(x)
+
+ x = KL.Conv1D(8, 3, padding='same', name='Conv3_1')(x)
+ x = KL.BatchNormalization()(x)
+ x = KL.Activation('relu')(x)
+ x = KL.Conv1D(8, 3, padding='same', name='Conv3_2')(x)
+ x = KL.BatchNormalization()(x)
+ x = KL.Activation('relu')(x)
+ x = KL.Conv1D(8, 3, padding='same', name='Conv3_3')(x)
+ x = KL.BatchNormalization()(x)
+ x = KL.Activation('relu')(x)
+
+ x = KL.Dense(200, activation='relu', name='dense1')(x)
+ x = KL.Dense(1, activation='sigmoid', name='output')(x)
+ model = Model(input_layer, x)
+ return model
+
+ def fit(self, x, y, x_val, y_val, epoch, batch_size):
+ self.model.compile(loss='mse', optimizer=adam_v2.Adam(learning_rate=0.01 * (batch_size / 256)))
+ checkpoint = keras.callbacks.ModelCheckpoint(filepath='checkpoints/plain11.hdf5', monitor='val_loss',
+ mode="min", save_best_only=True)
+ early_stop = keras.callbacks.EarlyStopping(monitor='val_loss', min_delta=1e-6,
+ patience=200, verbose=0, mode='auto')
+ lr_decay = keras.callbacks.ReduceLROnPlateau(monitor='val_loss', factor=0.5,
+ patience=25, min_delta=1e-6)
+ callbacks = [checkpoint, early_stop, lr_decay]
+ history = self.model.fit(x, y, validation_data=(x_val, y_val), epochs=epoch, verbose=1,
+ callbacks=callbacks, batch_size=batch_size)
+ return history
+
+
+if __name__ == '__main__':
+ # plain5 = Plain5(model_path=None, input_shape=(1, 102))
+ # plain11 = Plain11(model_path=None, input_shape=(1, 102))
+ residual5 = Residual5(model_path=None, input_shape=(1, 102))
+ short5 = ShortCut5(model_path=None, input_shape=(1, 102))
diff --git a/preprocess.ipynb b/preprocess.ipynb
new file mode 100644
index 0000000..e87add3
--- /dev/null
+++ b/preprocess.ipynb
@@ -0,0 +1,127 @@
+{
+ "cells": [
+ {
+ "cell_type": "markdown",
+ "id": "dd2c8c55",
+ "metadata": {},
+ "source": [
+ "# Preprocessing"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 1,
+ "id": "716880ac",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "import pandas as pd\n",
+ "from sklearn.model_selection import train_test_split\n",
+ "from scipy.io import savemat, loadmat\n",
+ "import os"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "4d7dc4a0",
+ "metadata": {},
+ "source": [
+ "## Step 1: \n",
+ "Convert the dataset to mat format for Matlab."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 2,
+ "id": "711356a2",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "dataset = pd.read_csv('preprocess/dataset/mango/NAnderson2020MendeleyMangoNIRData.csv')\n",
+ "y = dataset.DM\n",
+ "x = dataset.loc[:, '684': '990']\n",
+ "savemat('preprocess/dataset/mango/mango_origin.mat', {'x': x.values, 'y': y.values})"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "3e41e8e6",
+ "metadata": {},
+ "source": []
+ },
+ {
+ "cell_type": "markdown",
+ "id": "ea5e54fd",
+ "metadata": {},
+ "source": [
+ "## Step3:\n",
+ "Data split with train test split."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 3,
+ "id": "6eac026e",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "data = loadmat('preprocess/dataset/mango/mango_preprocessed.mat')\n",
+ "x, y = data['x'], data['y']\n",
+ "x_train, x_test, y_train, y_test = train_test_split(x, y, test_size=0.3, random_state=24)\n",
+ "if not os.path.exists('mango'):\n",
+ " os.makedirs('mango')\n",
+ "savemat('preprocess/dataset/mango/mango_dm_split.mat',{'x_train':x_train, 'y_train':y_train, 'x_test':x_test, 'y_test':y_test,\n",
+ " 'max_y': data['max_y'], 'min_y': data['min_y'],\n",
+ " 'min_x':data['min_x'], 'max_x':data['max_x']})"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "b2977dae",
+ "metadata": {},
+ "source": [
+ "## Step 4:\n",
+ "Show data with pictures\n",
+ "use `draw_pics_origin` to draw original spectra\n",
+ ""
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "source": [
+ "use `draw_pics_preprocessed.m` to draw proprecessed spectra\n",
+ ""
+ ],
+ "metadata": {
+ "collapsed": false,
+ "pycharm": {
+ "name": "#%% md\n"
+ }
+ }
+ }
+ ],
+ "metadata": {
+ "interpreter": {
+ "hash": "7f619fc91ee8bdab81d49e7c14228037474662e3f2d607687ae505108922fa06"
+ },
+ "kernelspec": {
+ "display_name": "Python 3.9.7 64-bit ('base': conda)",
+ "language": "python",
+ "name": "python3"
+ },
+ "language_info": {
+ "codemirror_mode": {
+ "name": "ipython",
+ "version": 3
+ },
+ "file_extension": ".py",
+ "mimetype": "text/x-python",
+ "name": "python",
+ "nbconvert_exporter": "python",
+ "pygments_lexer": "ipython3",
+ "version": "3.9.7"
+ }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 5
+}
\ No newline at end of file
diff --git a/preprocess/draw_pics_origin.m b/preprocess/draw_pics_origin.m
new file mode 100755
index 0000000..0e65e53
--- /dev/null
+++ b/preprocess/draw_pics_origin.m
@@ -0,0 +1,45 @@
+set(gca,'LooseInset',get(gca,'TightInset'))
+f = figure;
+f.Position(3:4) = [1331 331];
+%%% draw the pic of corn spectra
+load('dataset/corn.mat');
+x = m5spec.data;
+wave_length = m5spec.axisscale{2, 1};
+subplot(1, 4, 1)
+plot(wave_length, x');
+xlim([wave_length(1) wave_length(end)]);
+xlabel('Wavelength(nm)');
+ylabel('Absorbance');
+clear
+
+%%% draw the pic of Marzipan spectra
+load('dataset/marzipan.mat');
+x = NIRS1;
+wave_length = NIRS1_axis;
+subplot(1, 4, 2)
+plot(wave_length, x');
+xlim([wave_length(1) wave_length(end)]);
+xlabel('Wavelength(nm)');
+ylabel('Absorbance');
+clear
+
+%%% draw the pic of Marzipan spectra
+load('dataset/soil.mat');
+x = soil.data;
+wave_length = soil.axisscale{2, 1};
+subplot(1, 4, 3)
+plot(wave_length, x');
+xlim([wave_length(1) wave_length(end)]);
+xlabel('Wavelength(nm)');
+ylabel('Absorbance');
+clear
+
+% draw the pic of Mango spectra
+load('dataset/mango/mango_origin.mat');
+wave_length = 684: 3: 990;
+subplot(1, 4, 4)
+plot(wave_length, x');
+xlim([wave_length(1) wave_length(end)]);
+xlabel('Wavelength(nm)');
+ylabel('Signal intensity');
+clear
\ No newline at end of file
diff --git a/preprocess/draw_pics_preprocessed.m b/preprocess/draw_pics_preprocessed.m
new file mode 100755
index 0000000..271b9fb
--- /dev/null
+++ b/preprocess/draw_pics_preprocessed.m
@@ -0,0 +1,48 @@
+set(gca,'LooseInset',get(gca,'TightInset'))
+f = figure;
+f.Position(3:4) = [1331 331];
+%%% draw the pic of corn spectra
+load('dataset/corn.mat');
+x = m5spec.data;
+wave_length = m5spec.axisscale{2, 1};
+preprocess;
+subplot(1, 4, 1)
+plot(wave_length(1, 1:end-1), x');
+xlim([wave_length(1) wave_length(end)]);
+xlabel('Wavelength(nm)');
+ylabel('Absorbance');
+clear
+
+%%% draw the pic of Marzipan spectra
+load('dataset/marzipan.mat');
+x = NIRS1;
+wave_length = NIRS1_axis;
+preprocess;
+subplot(1, 4, 2)
+plot(wave_length(1, 1:end-1), x');
+xlim([wave_length(1) wave_length(end)]);
+xlabel('Wavelength(nm)');
+ylabel('Absorbance');
+clear
+
+%%% draw the pic of Marzipan spectra
+load('dataset/soil.mat');
+x = soil.data;
+wave_length = soil.axisscale{2, 1};
+preprocess;
+subplot(1, 4, 3)
+plot(wave_length(1, 1:end-1), x');
+xlim([wave_length(1) wave_length(end)]);
+xlabel('Wavelength(nm)');
+ylabel('Absorbance');
+clear
+
+% draw the pic of Mango spectra
+load('dataset/mango/mango_preprocessed.mat');
+wave_length = 687: 3: 990;
+subplot(1, 4, 4)
+plot(wave_length, x');
+xlim([wave_length(1) wave_length(end)]);
+xlabel('Wavelength(nm)');
+ylabel('Signal intensity');
+clear
\ No newline at end of file
diff --git a/preprocess/pics/preprocessed.png b/preprocess/pics/preprocessed.png
new file mode 100644
index 0000000000000000000000000000000000000000..516150da7e705a97381114b26a62feda317ab3ed
GIT binary patch
literal 91504
zcmeEuWmJ@5*X}4Ff&vZ*NF&|dsWO0ccXxM5D~dEoHz?gmGjvFobT`b<-8J8X?|Rqz
zex0+{`F)-R%wh(3?%8qO*S+_(2~m`n#6TxR2Z2BsQXe78AkZ^85a>zi^QXWwT>CjO
zzyq4&M=ci+=mpN>=Mzv$+8f|WR97iEaa7n-d{kt<6Bvp)@D!D+xTdR$gSo4Rk+T^{
z?!Bt)`wvXys+MNtaz?I}8V=4jE==T7_9iSsnv0Krg5vS##hlHIT&*1J$yKfF%s?ER
z%o1kC%p7bS9OUfW0&H9Y+&ow+QSU$?a*!0{z3Qj5y+u#WSc^HHqZc1%IR9qhq7fIIk|qnZ7;aagjWHc3;G4m#=nfomf~qZZ$d8P=mYBMu>te
zrKpaE{5lv
zt*@<-dhfC4eFe4Ezl%VU^`vw8T!_Jq2~tVw?CpI%PGD%gv$K=J>!7cuhCBWqWT^YU
zZWa_$UXfi=QRQPa_$=>hn`wYsSlj2@!`9a7>bULKM_(6~cb&V3#s+KINldY2JLLMc7L*m3wZjdJ1bVA^7W(ZU7I7pCJ@RIGhHe
z(q<*wrW}8|?wFy?%Hoae0p}YfD;RCLJOP1Z?X&DD$H9hP$FVOZVHh0gK-}l$0l&{W
z=YA$xF?Y$-JVylHnv#YT!mM+>whsm=Ur1@4>
z)7}}@Acv)^y12E3M*83_cX6jOIR#s=+kp)&Us}Z;4+IU?b(;>pH0avXcqN%R7c|v3
zARwtv;s~C(K$-HsQuk7p+;gV5DHgGbxaiQ66qX;QeTzBel(G|0_~iC45)#eg@0;q}
z*hJhCpEz0>(yj*adV4~@PK8bt^S=kpi75}uA)@pd|M6A}9HIKR+HSsfn39$AzwR18
zi8G=oSZGjj`E+~4`h6(qo>%a0jGEb5s#*EhPras@n-Gc!?vbOae1
zTU{K}p~Kn#c{~#Ec)b5&-*{Iaci65jpGa}SmSBV&WiG1f5We)eTY`xUi-_JzQdycm
zLf4)|Bk3mQR+#L;?R!mC()J1)_}2&uXjvQ^mnvj~6ZnHIK?U=FT#=LXH=gPGlYl_o
zgU2a)(+PsMN^t#iEGep%v@VC%Dd&&T6@~E6^r0K2{6ystx8^9dtP08Z*7>#S^$;?z?r@PNXU6#@5bgF@l~H>ac{*2
zQ>X1nZ;O@(#`dX$mFNN}sdXHAa)tnpjuGJ0^!563>sLwB@hBCnC!
zxE>nGUBVlRD+_t3Va|(|?*M*+lIr=?0~^&=;k0G~&w*qnFCw6aFw5oxmXR81xxXu0
z-WaxLzYYKRp{er(P$XR?}v&R60@|99RC_?l{$VzD7)#U^LTb
z$$uKbQ!8#>=8U`AoD^E->PX{OEs7`FKhle67PsW@|8Df?4L@!l_Hi%}rCGtGEEWO)D9L)o>v*W!FM9r|HNT)%
z(ew2VPTWyWEKwLF8&>^p;B_1wU6nff9ZGlze26NqDliF}%A1Ru+uz0@=Tj6iQ6ZSZ
z;6FAJIe#Dmv#ZP81;;IraGw853YJ9Cmkm6B9y1KMlB~~e^_^oYLvvtZ$7%T(sFg1Y
zg=vlWRFZW)&SrxJu7BICs==$;g@-=eGdh$j)d)T#Q7PdR@#M}8^SvE(3`7;I%x!5|
z92u!%4M4Qd8{1g0kJBRKpaAze68>YCJ2fYervhrL8^?H_9Qk<1qxG%J=8e7@S8u6v
zGjPp)F%DxxnhNEfTv_;$xmx48wJ+ljt6E=Fphg&Ar@00PvD5TgVzXj}nMJs;RT?T8dvI
zcy27dOJm<|7melfMG0wy6U%1=wy5<(%{L=A$D-DP3m@+5NDz-F-^l~PV
z^o!z$9tP5nB$flwfg98J-Z{Hb*K)F%II^#S?WD~EO-NwO#?c(WFOV#g^Ve_NKvc46
zZC^H@*>9a_mgUf;AquX?X_;%9Uy!I{9-uo_FY&u>7iPFMAUP$=3Fx?uNTwRSnL&
zpB|PhIWMr=o%V>9!!L0&`IU#PMJK!@ge$_(rX(U0wEPfIoA!b-&?*KoT|uD_#3b9wnqjOz3b?JHA)A
zxEnORiTugyh4dOw=y&|%p9tvkpTykK^Uzj*{k!{9QWO4Hqp4HTG!_2V?`MTl*RpoA
z-R@d4a7uok6+iI>6NgnM;J@ABDIye165&sJbp+=F)&F{R_>5?r;4xdkUWJgw`+oq?
zC)s+KD}r>p=8UPQEuovTzi}k~&&|$Rb-IaYubY*W-lI4XoqL(IDRup*2@T${U?$JP
z9l#mb`Pxray;pC%WEYx^Hlizt7_D-Xk1Dbp`;_BbPC%8m>d5rZS1%Xs-P)*Ys*xHlie|m?(nltR38mL(4nvCZfPDk
zTFvVyDcu2!V2tkx{#etNqU$lClhhCe1c$(W8On(#j~Ubsf{k09KVZE7hedCkWQzG|
zo5w1&hU1Y7Jl?u)C$r4#lw{6xSjdGDyzJOvb;Yx3Y!k9TUbx#Z^xAAgJ>w(|-?|Ws
z;9#7kT8&u~TA6w8a%=JxU0*oPf`6;&OYq%Ux3+I(_j|#{_q#E8*YE8Xs*3LZfJX7X
z%-+RE24^dwhTRbes~HkLLX4iYo{75Bi?TW(^`_nGOKAmq{&5E`0M<@UM0KKH!zb+v
zU5|!rCmMzZ{QqWApfWhhmTyW1+eiz;1G^FDE>S_s>P+D;u{3WDq6|*Mf6L(o)f0*ObNuC`pQCdGmA~JcKYE{
zP~?!O`EbCz#?qE&Wkyo0_}P~^pN&uBchB~}#653~c~(~qHF%l0oZXQ2hMYk|$+FG~
z%hR;5q3VxF-N8$vfc6K#7Wa;5o*+=Y{DOLlby@o}Iv_WjpA`)QOHkb*JWbf;fW9B&
zwE1HHjt=cX7G|2jo70DZ4Snl%{LasIb_Cz{imu8ibXz7(MH744D6)xTAV`#RUt87BCr&_VzmwD8&fT>;EF
zN&d-rSpEKo6e!F#>f1IsfQ+Lx53BMBR9)pE?)OLN`z>y$C>{eC
zqd?$&i1*G(&D4BTwTdcYVQ+df$5z+9Kplo86y@d^#)`hR57LJp3@f{J_I;l3DDFfrW+Gckmw=9@ek696eZUOn&j47eGzt
zgds#`wM7!<)4_KU3V5RexB&O8{E&F}X|CUG$|Fw48r>+ViScZ_0pw}!Z_N`caQDSL$e}^NY
zL6%dV76pgw0v@1LR8TNeYc~%pLs&?N)NAY4%*;%#Oyb<(%~2x(&wYEr935o#+UMvk3}OzR1-CV}_LRk1w#-g0Ciy9x!v+imp36$}YQeJX$C+Qg0ErSV^?!MPL8DP<
z6W+kL^@2r%`Xj*Z&A;(FSPd7uNEf+D>uc0*ny2Wx10iSB*B(>wv*G3Rl70rtttoeGrh@Qs9(tb6a<)&o7%P&GQw2WE6o?MpmF4)7oAv|H2#lAwp
zmX|?^4mYZX${&EG9o|qNqNAhBS*}_STW(qOn?8*QUv6w}y1BVk%9;}
zuUpP4VPmLidM+x8MX|%p$C+++GTY+Ycz>I=O`aV@h=3IHVOEOMPRNF&60H;vO2f
zYrb3>!oFR|DgntR!MG#qoZkfQ4&a7JU|Qm<3tYim@kIC#=gpz>OR&;dt&!p72LX6g
zU)X8Jn_|HHLmQR)UKGc}&7>(iCLv%nZ``_ji^MW4R*QS{B@ZF@%0zx2#>d#CrlG;Z
z!*h3cPm#@25WVN3r!Q=6Mf65fHa9n;prBxhTp4O>r}UXJFftDG_Ztf^g=6RYZP@S{
z-u&+ZO5Myk&2(<$EOi-s|1~t~7{fn!*{h!F;OFW09?=;NjT%b7o_@Ee7VhtIERD&!
zQ)JE^+FpMEUw!pc|6WbUo_m2B>DoT=RRlj=S^eMT?p`ZPgM%JetWnOiV1~ajZF`5@s#N_-+SbJ4E?m*$M2~SuJXBt
zVj7C4r{~)!V+}c7`U8JxmYL2xN$uVaW;5dVaZBfEi{fTuGpx}T78Y(jXQ8E~wSYU!
zPD@cl8@}>dv)QBbcHe&pB30COt?i_CY83Sf+=j$X<~)3tj8JeyQiJR3YQ)?_1i)s(
z>_OXnj?9u{Q4bt&+(d_v(8*@6IvXgES#4T
z2&to{q3KP#^@h(BZlSuQjt;-i5>24$$2w~Ei_nUpMoOMl`9w;Wmzn;5D^(481n`)J
z?^V|>TZ~q{{TP4^%~54xVL`$PUzUQyK0S?U|I18}U&;qyY4>Vy_7n_Ws|hpx9FCtO
zC~U{8rNrHn?Mx1~5bCzNzZlO`
zeZ7EP4cOMlBjxq;mM@jgd*#Cy;kSr|P!uz&Vck@0?`1(6H}iWgp}
zCfFRW=%tn{EpWTfhl&L|@|@FWX`X?>!vw8&oVWw5!ZLf5htUfejd7!{q5<;yQ)jT+
z77;fL1zEXvJ9d56RE^}IEQ8ErR?n)OE2LqcMX3xXt%hCZjPNf8WK;Ktc?pU73u2xk
zWmqz?1Fe=Tp$!plwo}yql^$Z8cC{rnKcE)lPj|TD0cS+I0C79~kPnR!z)2XCM7$Sv
zOCb<;nUV`Q`t<nu0f{5`4Xu?`rAvXdGWt!MAt6WN8+|c|`g+@Sd<(aM#GDC|{i8Qv`YwN_
zzl-;v@xGOWcR-n?CH**^gTlF-Hf9E2;z=8_+2~v_agzKor7r))s8!9%uezk+C)v52
zFI0xO$*v4{B|ukTbhX7s(WVkX4LllmDX3%NT}iD{FOBna%yBLQdOU+>MTW#$RDtEh
zm{;Pgqd*+
zOD)+$;20@}#Qz?@g~^k~<7o}Pxt-tIEO+aop0Vw*G0GdUz@;LfjMkXx{m{TK;UvRJJqRQ5!i6tK!-4$gqShMI(o~3GVB*7ge4wZ&{G>|8Sjv7wT70t`;d0toUX`?&Iqb^y#^ABI
zQO7A2EM2&29YZcF1_A0*QQIGZI-%xy*RREP+5FF=Cbzj*?7$(12E{EIc6jLT#LV-y
z*R1U)8!37$INHl9a=Ep9bN>)-)#{|REJ-!F8h3>){1(?!nqT1;QB;)`176MPXc
zLA@SiqX2fipgmGWw#)r{vKSjzNhT#2&y+<+EzKaqTI&Kq>Pu^?NOaz6MXGjZBL>jR
zWq}RAPx8F4K0p#*7}!*n{XHJ>=CW+|DZ5s~R5#VB7qpdhv7a_rn?&y_%;jcqlCC}O
zOg`8zGNV^`li{v-n(C7<|G{l{4wJ9;*!GtII!$8x&l>y8nzKrnKGE8zl2DSOdLdRi
z>I;IxJq(tF6+sNrqV_;uEU9m=B7v7a1SL9E*$=F(;%Q!JB99YaX40m8Zgn^F|@zPBGbhoZv8UTNLXU7Th3=
z?)IJA!S;psomF0TJ0HNsK`jOV&x+Z82|D5i-s^=%VV@J`_qRDgZ^
zVEH&=L47Z#4@&-WBtH5n*AFo{LaZuDGvZz_w-Gk`sr3=xOL?8uCw;Gf$c{p^6cBNi
zKMy8v;}CpQm3(pWsZVp~550b;6bvtrE^e?->umQitMGCsDQIY*DR*c|66?bCn|VG7
z82)W8U6tFFppX{U7_YMY(x?XJ;50rxQeXW1*Yr4iZF2VY?3dHT-J8G{ez+2lWgiT2
zg9ilFo9UY`WBv^%@YY*9cEK;+k|tY%YbK*(mthwocp5zP5@Uw0G*hA&AU1X@q*+->
z&}W44K2z;h821j9zV0q}sJXy!6drM&Qlb^4&+?O;aUjN`zb5fx3&(H?;A9`-+vU8s
ze|S@XXB*@_(}@cmt&%&0V5Gvj@<;8>#khR*k
z@#I|wJIPjM?h2|btAPU6?tgf{hr8nOXls0lESYXoFPGkBfe$j-tf)oorBJ#^h4~0>psITCOndIom(jQap-|=~RZbra9_noTs`_}479&ytP{zb#|Zl`hu)9{6r)a@jhej=4YE
zxl{f?e)mPTBC6N$rK=5wE804=20Pk&A&tG&@mEciW;Uk1*E;v|!#%|Hr|)RvuL0i}
z+1AxK@iZH`_|2}CDi|!v*x{3>naJsGgT`&VJtxLw3hTpgq^%lt&Z~6FtOtUz+(}eC
ztxG?=lbn(d+ps>XaK!c^qpn)OL{N)J8MmCEppaH&4$rwWkpoqg^Tlc~(&!jQifB3=
zntoF4w{1%{mj4(Qzn#|l>R|-wvrcaoYsHo=WP_A9=$V^r&o#Itz3Xf;
z3zdeQAgW5`r9yZq1O?2klv<*$vlYqknL!ViaS@k4f0KxQJ8yl?{G+`a2XW<3WQmQy
ztjsKMlSVI4PfEiP3ay@$kw?b#Kqa)+y>#T4GkU
z)s;Vfg3^bQGTsN7h_L>xIY&B1nq0N#BHn}_^6xHS4j+Agxxjk>uNLguB{p(SMV
z*JF?LS3N?YUi?}xn_|iI?oYw%+OAJiCmCMLZ|SL&lKA5O+vyPFmGGd6aEjlioW8j6fkX
z1gH%!rAg5@JW-Ru&yG~u1kn4_pEh_r%kRgDWg(d?^1Tss
zA?(fSZLw#m*bl4s7YFNAuD|T4^OfNharwDi2x-*77`r6Vx+W$LhrhcSzk=Ifou!`m
z3kjE1Igra!zkAQ9B*lztA!#_}?gJ
zlAu0wqw2+CPFH)CQB}|4Uw7<0ZDEoI6~kzW(;_T>
z^RRv^hXb}!4Vh^irK?qwFWlFWLqgu(1K#6~#)w_W1enoxXH6jFi~q+>ama%ozmn1}
z(z6RlqyM0jddfFt9Kl~T7E(1!mI{6n%NDJ_5SNl!ZVZG}N_7<%hm;V=Yr?h3m33tq
z7T4)8|F;y@6Ru5UvXEM~h$!-U|sZ`;5WPaRpr{i;S)?`@yKedBU
z@6^3tDH$iRbON=jxATJ~bu@D=S4Z42TMBgmcD3G|EK
zuu87o7WsKO=u~-6>R~l#3oj15zPM1I?<3A!q!gYd(C(Ai#>BOfD;`nVH^Ia4iO1Dq9W%_|H9!Wk2N>Q69J=!&IkiIh%qu;QIQcswhNO>CY_qOr$l
zJH>^@;szYJSl7o!Q_}vJU$0I;^Lw6~i-G-q*b3FP4+LQSnw?O6;6jHjX-?wUq
zf9HVc@}JYu<$}^+9A1wEHy9LdZ%5bni!Y8P4l!2AYx^x%-?B1Z0fAZZ?Xg@XxDVxM
z___w&)0Q@uVjbb)BzX>cnBTN7&ZB2y%#Z)r>zY8u%qP1(X?F)~gaDsV{lP;%PAMCW
zV=y5Nv8s!|y5Q1rr6kP#nPY|#0pN;PL-fvI>*Nkfc>ZEwha^OlMlY0^w
z!3PCSI>43l_2U)E0xHf|UVW!?w5mn-AxW^Qma9+g+091blF!vy2d
z5+qOinHr*lsz<`+S%lyxL0f^=qHwFFwY5|mkzN!4t&x4%!2jcNB#a&a379YCAw#MJCstM!G7M$?N}isE96;u
zrb9M)mmg)*e!#s%p^U8hH_FkyEB|r{R~=Vhn`;z^rGuO_6T-D`Fzx^;^O9-jUxZ2~
z6kQz+uQc!u3-tpVn9P4vW72EI_&fCB+ppP3fQnHmifj<*^Ksmq#M^xqtGYhsIq>q5
z`2c~AlSX&Vmsb>_X-Z2rh6e08e=ODZWpWId>Rrw+mWM2GGcs@~*QaV`H1GG2d0IDo
zrD?JTCG7mF&+J#p1QwFLqkg`nsV%*;x4_VC!2nVLlZU%Cb#2jfin?o9ud(%(l{DV^!r*uj$ELw5&m|
z7VtWrT)dojim0YI?ivtXjitsLA1H1+h9_+)k5RrDnmn-admElA2C;V{P+I0&EZbfQ
zf9FXlj@3x;l5aq3B!=ngw$3JT`;b_gjNYojN~b*tGe0?thfg?m2<=*a3?D2kbNi+?
z+AA3CX({89u|PiRA{(G3O>vW1M8ZnbqNTCi!m;jZu{|)?h^_o^E-O>cM`uvsH%aPt
z{BrRw4RU!|mr!258gl5$&mb>CB5ab=#=ZzWH$HiIQ(R`#*3(6Ztz&`BHN;Wqt7!$B
zTFz`%{BbfNSvDJKF?;|nJvt%P&Tb}umCSQaOt;x%8VW6*z6%J>`t6D5gEAoMn;g~Y
zsUAQt9j#LABj`HoKqN}wxQc4kRCKu)rq}v?`!m}!n&pQYkmEpay_?8)#edoupeG;v
zk(PBlozX<#g)A$cFZ*M)^ig(nyrxxNCfbW=@)-j52%y*Poao0TcyXGCU5C9+2{G-3
zd5Acst|wPv-Je3M2W{+z4vL_|rNUsBW*Z#|%~bKAIxaXEoUdo+4jaP7>m?pC81&ls
zElrLCQ^{6sq+3L)
z?y!Ek??krT{#$7WEd5Sq3`fy)U^5+laqf*@J6=Up{In?EEcd23=7%KehQCT{s&(00
zidOE{`om1xK>wQ=W)>vDKb^c#DL;k^%d_LVS!U-R^9=g_FGC)x?n_RSqK`zKhYe8H
z0TLRklmH|-2kg}zj{4j?s!pI2rk&>ks9~?0d?ElgLY$|Se(%((Z17-jnF!ZCp>imz
zJgGRxvq?Pa(u0=8XQh4V_c<7ytl;)>%Br#nuG{&_vfX6AzKBK(m;<9$1|`{yq*~eGb-!}?SF
z#?yK}>J5$0$G_hkc^WMhC!(0WLaIkH4^A|MVpr-RcJYW9bytlXobA-$c!8&I^cS!i
zXFyE(qYl>l?V7}XSI`(|?P8yl$?6qB_;Je#AW{hEoU_*I3sMbejkE={9rgTs0{I&U
zj3hFu+|#ML&Z)cJEtP_Lku8b{qg1g-9@@9tGYQ`>9KZB(Wlzvv%XUgCiMnJaN1tE0
zSUR_3fzuk5j7O8Q=qj}Q?B3DwLJ(9Ymk&Q*=ee}cJl3~?lGsV(F8(7?eOMYv`wa9R
zQwi6vmhW;!zQTTNBoetrWD4~veWI$Pk)oW%
zXkHy_59h1Bu0A~28IkQsLhdvl{nhVt2#^+?@9?+c-H{EY2g>=M5MiO((+T>=cSgD7
zC2|vXacHMzkubpdJG(U(Ml2NQZ$9ygUh{g;roF_gNtcTg*BQ{~Dc9C*5W`b`b**Cd
z#pLE4;@FErMPsm7xyGL4w}>?Efg7P^NqB&~)70`f$!F
zDcR$F*v!k%@44C$IG7ih`b!Db`klR1Cdb6a
z0)MJ`cQ`C=b1SYR3(-p~P?P@J!%CZmM0@NM;lu?tvYHMR?&2DZ-yBFqs$jK(Uj3C+
z=NB~7E4~Q<7ekjt?%zW67II5LDt=Hw?NmV;!A6M-L&UFOSs&`LtUdrE3od)%M+`
za~3FFxGrIgV;QUfvUqA~>M&CeV8n=qzIe8XvRTF-`dPr}ZR>1LXT>ZZZJi4u^;ZK5
zA%)kkhtB@8@cy_S^XvZgi$f)hr*e{HIU#
znoJibCzZ1_$A8;hy9>##4-uve&)JZ;McNCI=?UG~MoPJF_a&2uPu49rHz8bw@X@^R
zRHjq&vYvk`kGY|VdWXzPUR^IH_z0fOba(^a_-agc*tGct9bhc)c&SbHzR3~lgNl21
zK&vb#jEb(zgRR8O7!qu{FbCx0s6hN&59J>jYS-VS>S%aa8<$l$!M)9X8)OgV`4)*>
zxFdA2Oa}b*11-i*GxsKlzP@7WKypRuq;!K6?G~D<4zwS@Phf
zT~anEpTu~MdU)u_5EDwws=8R?XP1<^oMN!hF^!uSxv~&3s!<|uH4WVE5KYf$(v3a+
zdH1s$T{da2W)V8NUOz!Px{u}42Ca;5-@d1aX40u;0@~-~J*F&Dl}hrd(SzNZ_WM9-$9*o!8Ye^^%Q4p
zsa;sLR*FGTr&G9~fJZI!1P9CWu@Ehl<>_~_R!E!PbA4L%!C>{1VRCbRc@L4(X(+!x^`enbN@@alKGqI
zCPq7-${;&($iq-zAPPoX!9ij)D0|95OYu+5($Ia6@ph>Q)$gQnop{?ZpwqhnzsY3=
zF{n_@3k4|fVMEG!#sP{)VQga@Zli;O-K8BfC6gq#sOE&Zf50n;coWW6!0IhX_wRHf
zw_-zon)ILTVMhR>nAQ%F!5S@BJ{{%d)fRnw7rA=Vrj$A60=tkw5+;#&bwKF4C86F(
zT)NlqC?CeSTO@Pso<=F|XV1ya9
zYVc0i$W)%8fi%T!wn;fSAr_r-5ZN%|J`}FO_gZM({Wpb|xk`(mCr@eumq&eRVjWD#
zZst7%rI1Ji^FUzh2u0@z5(-h|VMw@_H>dW!++CGmZ+UWou@TljdaIuQ~ty
zZxQC(8vjWlXWt~7g9RTOHk30)qLG}T6K1|F7_&3e_C?IIl*=Z9hj+hLKPSL-edY7?
zV%QM-t-Ado_`xEa5B9YV%Na!&yVKvH*JdVN6br9s$1)3Q@V!_P-IMq`b8P_$&Z}^g
z|C@5>%a<=*U0r~Lr?ayYpH+Y3_|t!6GGH(PC0mQ29H4d!#G6m_FK19DNRoRRJ35r;
z`K#y_&*IrXge#96`gm-QnjSeWByjHMP9Bh4a%;*jYeC<7ly
z5RKJ&$~hoNDokJ7W5i1jHLrnFF|jPZEwD41#UC_+t`)*|8orwC-j+q_~i%)fu_s!yf0=gxCw`RFBkENiPNH^-^TUz
zJM?*-9dgNiX5i=q_@L{n`dr0z-otds%i;m9_2a=Bc_~s1ZMj%ONRACjvT62ggNs=`
z!xZwDM-8BTUiV0DkI0t)0eIxH^p}0(<=}~&)t;t&NTS@~oL{Qn9fo)8fXE?23Fk<&UrvXw*6)IS+br9P(yIZZGN9ClzO~#b
zK$*VjT^0}|2H+R&F)2{7nxjTLKc6$%P1J%jribPL*>tfFci105
zQ^6Y$)%zq_)FNwZECa~YO-y3$9be)Eef`>*z^wCd)g!81s;;1*Kmp*&^fYVJ`IOH6
z{k?kec`?!$8+77`l4`>9UX+&r^VMRPA!&UVU=am9pwUzl8uPVhy)QdxKnN-Z8HW6E
zalqKWAd1QkN0p0hdmB(iC9A?ABKPiURS^bKnID?CBGW=Se!%y}7(Rp}^83_m8ah|Q
zs#0HOzDlfh(vS(pO#Gv^Tm!_&!E6XTOu{F9Lg-{4`ap%6&O0%wk;L&4-bl5bIlG^C
zZWR^Zv~Zon+gMjMU`Yl$*@?VgXk@>RsUU|J*0=8S-`viBT~cpKB)O|z1%dT;`s)*7
zf7iPPRg`tHjYPXi?93Na88CcY{Pd<%m(4xEg;zrK@~@)>_oz=^Rok03A^e5cyu7@(
zjWAwlW8*sQI=xsno}>5iNSEt3Lb4jCl#K>^yXDghqJ-f!C*fDJp1dlLddsVCPd~GB
zHV{T#wiYOPH)UoTIyj@~5$!nou0(Z)8I&*Xbdj748-+DQRNP)bY}5dnOZeZR;Z
zUkP0hfWH3#aS22j#Zae+K|2*(n8Fz~^!~6B?i@DkK4-E|nQ`c#2EPeAgG}tI*pb1;
zxLhb)P-WzE^DGj5R_PWeTvl?JrV2k=lfiac1k=>oYaB$Qj9A{d0L0_~R29N%Ebu%W
z<8BR@IAPOI>CT@w8}*-^=cvdz+-Bi*kBjJ0>Q*b1c{vVVG0(-EVgsb)-C+{@PB~N;
zUav6ehW*xibXC026bBK8m#{S352e-F~r8LGrI%dYSuapuKxtJSs3O`AFF8iBRGYP4Led8Vo($liY
zfe|crQ|1CwN4R-zhbl2XGiE9yUp!F47Rzod6RDV5j&2T5{0_{LA^@{H%!s=7w~f#N
zg79$Thy2L;6r#2$Ha|fZuu^zUQ>m0;E6JKANq%-XSq2loM6s~|O8Ao+tRP@`h3$gx
z+|?Jw;=X(f`0p5oT&XA&?T37V=SX`b6r3ykbih
zFdK83?Hxk58Z!#)+>5u7QP-?<9*wjw0V`uY4!HOT$_tO9fmXV8QQf8;fN8KdvL_~<
zPEMotpwDQflq~PbMqKLXg16`{yV_$IQ{BqFoq`xXtYh*!d^Av0_V2B^DLe1VA6vUP
z4C&__uq%@nSLVJP==KP%>#s;Q*o_1B#vKbxntq%*K(|%o
zF1h%bZENnw&su5@kKa168n3%!(N3G|#SYQ&=2@%iuR3-isyl}Vs#NFuDuzifKz`59
zprDoczNX!|l$JD!5O^j3IprYiNs=oIQMbpK^!V#R7aNid2R3p3@ZB%nKjihhVS+g+
z73Dafwm)wBcS<@1EL(GmK;Yn*rIEP#*EkHQ&Bj+~xzf2WlR}5oV5S%QDi9dv+%#$T
zZGT8X^6q=cTYZMikD|>>bZ7Q#6**5H)iD|5vL>T*dtrEjK`aAlgLCiT2^qn-m?+nU
z3M8(cLUaa3>5HcjEdnsLE{7Lub?7{c?Pbfld--ro2iLN`nPY(z^+(*gc#gXrVD>Wz
z@WXaSJz2y+
zl`L#-X>);feX7}5|2jYe3EXE_#@PDT@72S*g$Lne7V0U;*Otgh(ycrEC|Y3Wsgdtn
zt5u$MUyj1dH6Vk9UK-{0!#K|?L2agAfOP}qqJnYBt==S4
z1Bf75^|u=4WkA&f0Fir}xnk^`M>{x68s7w9_aLcpN88@AC8owvletq<eG?)vLp*G
zsiKd8GRIG4-`@Kn3~L-8ZNj~Z0F)fo6pd|+q;^Ovv|3!M`-C}cmf8bd&p;p*K=82QEBV@
zJvF*u{hj_5XiX6^J#IwPtk3y%o?_j5y)32Sg|H`}SS(=6SvF4bsBckor!I?n#`YFy
zkedi}yjkf}PI?@pTxJ<&Od=_!&Kim62%FqyDFlypn%!t8%%j@?2vUFVz1OL`|7Dos
zEBoJ4Gos7LzxLVlKNBANe)qP8`VQ5i@w0&rh%0L?ZzH`8W%*qVeC7?`(lV
zK+K%m)_zTyhKe{jZDUe2Fr3sLzGHor{M!-?^5X+=u0`C$;&H(K@BU4Zc1zM@td4xL
zecuJ+$Bvf#sy_ajm}yM5>>lcR!a)J-inJ7A?%%fg*u3!7Fgff=9{)jtE|-db(o8M|
z*FmVdtwF_`Mu(^cnD)WVb3~GH!l8d&G2ly!j;rJd?dbKu$a-YxdyeVGGD@CyzlyFe
zwZu`+N?oN+fzGDe`5gANrbDYVMa;Y|CwiKUo=KKlQD82-j8!}Cf*Tk&Nt8-pZ8;JE
zQ>$a$UNE3?8oyA^=*SWWU9J9;g-qkOKaSk`NM&R4NF3Qwq~G(5J4s}>>?Z=7dj)vT
z&J=hxTo?dZ`$k8s`<>_~P{dD5X?%Ff4}MC&k;gxl^G)o975`+1wsy^Vb*(lp+y@3*fWdZrd|c;GZI`EaXIC#5
z7bbk`GJ_=j$JKbtgYjT*We|<}v|Mu$ntLId_|HIc;h6U9H-5x|j
zN~yGt4gDd`gF?(V*u_j|s3|6P8X=j@nSv(}n9eTR_*
z3X+9^grr)Bf?kx%t%40-?gQ2weam*qoFTnX<{T$X$jB>JR}{--3#Of-?6&f*h5Tzg
zXFT>la+F48qXNVb)VlcD-@2<_;T{UJKTfP3Z(OTM{*B$-Il8;f$2-arn85gr%_P+6
zJ{K~x?{H=eC^Hw;XRz7MD#2kMCwe9n8Ya4_7ugica$?rQ0xcnb85PGFMH$2n7z4kF
zn7NA>kIScWyc^EXIIS`mu_UE^SV=~CGk%bPj_?op>nuz=jtTTW)EXI*lL)omna$4k
zL`8v;bOIjcsBmF^#4GE(y9i*mQ(i05nKw;gxxW`A`-S
z!_ob7(f}NK0d0PV1b4b&{bF_3rhVTUeUhintQv#1w8p#+SWEAaIn8vGj_(TF@`6!9|%da#Cnp
z4%PgpGs4cX;f(?(>=EqsJQfE-=rnLw3o%pOuIY!oo>L00u_Ts|6pVj++Y&X(vyD(b8#g8Pj
zvG*fqgIaZ@xvSFI->mava1h)?jbAk;{HhZB{2|LuK}wZe8nt|uQl2Xk{%~*i61H8&
zIPl*KoYmN!PmQdy*B64?x7`B3@ruU53ka(C#;LPYXoYOCj|#}nlrlX+
z{pu}`(jtEs4cQcBYX+0m-Je-3q&SkRix=P1t~f=S312kfMox&t;<$#T%MRPZkk*(j
z^WX|qZyj&lH8~FqHI5N|OeyU*mE$DDVR;psgPuYqtfJ6=Cm%W_Tg`RzEng@$j7kzd
zNZnv?lc@KRV*c}wF0L{Dr~`C#q2SBW+;yev>jhpgS!m-K^n0{VHk4rSfb2rO^-Yc8
zUG6HsgSk$9oR~+858fgc(f-&qDa0JS+y7P;>~Va9OpM~MY|1QfQr1haO>d`l{}s)Q
z-S9zHsyx;hVrNb+>o-_!UI8NpLAqKPDR6JM&d&xF$Qoni@&qsgaLR>Ol>1&!5k*N#
zrj9l}(A|pC7ph7!6yq8{|AxI(^7V=X5
zuEKP?w-TLVHntKGJv??*{2q}(>}kj8xwIi@u8eXXBfm*K!ma6GC1+R;hOcx-}s8T3y!9z1M!!_L9OBS
zc}kKaDJ;~)s0~#Q*1Atd=VH+bn~Um&cpx1@l{FC5Y}kx9)Nr$h?&FF)XapBvfd4S2aJpO
z^M7ioHZwcgEg3EtMmsrD=C6fsZ0xo13O#$;#yHsAgX5%jVEk;bH?8%*
zTFZ%IZ?YD=hJK8>vZ($OCG=d7$r+ou&|1)@>?hO=_(kk`dMcE+?tCsoFm`l$qMVK`#WNaQH~8n{+Y+Iqu5Mo5#xq?;t8eiZ+4_^bJV^%M7A%A+Vch7s
z|2kXIB4COjV!${)70N^&tG?d_p+wSj*MIdkpNb)+>s->*Qo{!pF;g_${$3cp@#}n(
zYuok4cM6FGpWFRN!3VBzv20@B)o{)QkKNKOeHj@U!Mo*`<5-4a1?vp{AJwA|KB7xz
zqol8frg*h8#Sp^Hu4xB}DsW_ue-PI27YfCebqrwDv~u){lV>;&Wtz{*c(i@gHAeOF~S3wo`1a
zzsi5WU$(B^7FS83^*Vk&j?${AOkn;)#D(-F#gc<2Cg0>dTTsCpHF6Zuuinvrm%PZye
zax9$edtC%qZ`p3zx+o*^_%f#Qb>6PQK=ZU!8>dqm=X)0LkA_?x}4-`WDZ}A+xc1Aii=D__Revtq$}6bBjZ7}R&Uy}$?3xI8nx-_a`
zo$lXX{r5se?ZBqecINMD%7XcE$3p5WPTwZ0l{nuKJO&}#-XbDnGp;qGYSGei6o`1O
z1qQ-_DfcN+w5lmBt!af}ziXCjF|BcxMuGx(KU$$=@XbYmQyPz-Ta6m(NE%@9bythp
z1@s_$kbCg%bmQtagXYw0uZNP&qDhNdjr%#rVM@*}F2ulQGeu?*^{Qf@O80|?%>v)e
zZ8Qp(Wd5Gm{;X;#tRbT?uP!#`)o50SN{;4O>QvCt)|mZm05f9)?9abOqkL`+q=Y>}
z!3A>N>$}feCT0BGFMmy#Fj~+Zi&bmZ-o+_0(0%7xulV+9fDRw-_tth5->Z4DEDkSF
zED4oW$n}QeWFkOC5>oeWn+yGX&`~ksdN;T~bBt=M)6e`Q0Mqns5J!H@xSdU0wG-Sq
za)FG)W4D4?;z-lqBp=%rACG%T7VUe8rg`Sg*zm)DYd5)2a`tl-NQK&`SvKAr4><)@
z^zQwu4ZU$qKX`8tckIYF!&7ZxrV^dgWj@7?$yH*Y7jUxyL8<-cqdk|aNp>Z=MZx82UqwvKH!o+`Xj18}2S!pYeawTfj
z^vzmuyv=7mFIES(Q?=F{Ajd{|y&BQitoViJh2pO#MK=K+j#3_{X14IQfGR{^&surp
z2R1p(UbP1bSU#-v?3H9s`d}}g-+Z6Ml_j*%L(p#cGV-f2=n0w}*dmZZYIoy=gGkd2
za$!>?>W*u@(XFkmpcnoA_Hue+A}k`}V7A`Y%nT7a^WYEaq+w$EtFOSW^zdH%4K9P`4f7er-nvXoap0_Lok&w{&htJd^1#
zkB~D7b&nu%KX!9^m+10El%Ja92)EK2WM9$wmI}_zEDbU2%`9RTqe&6**Mn~n01I4I~@a&lOn%Nr6XB;Q1B31zw&
z-4TBJETB{WS>Ew})gVSj`T@>W4QJ+TZstv{O@P+&nb?<=`%F#VyO#ui3sH_&dhw=nNchUrG+*GUcEfPZFgp_#>+o6NCWM&FV
zL+3z1vl+%s<_D8EK~ZsB_`i98ELUCS5552CpSKeapad4mHux7HF15#dhSFiGN6rBB
ze(9(86UiR+IsLv^Q&;D6vz=ewd?E?@$%QhR@xzrgSHR4^M<(BEsYb&vjC#72!iliB
zj*m3Z`$*Husn%d|Dwcsw;>2&sHy^IHXMktoOMY%R5)Fm)bTL|Q*GG*}3
zsrqNupx#pwu>#R1#MWYA(kk3$!y+e=MPNWv`$s9sZcYr&AQhJ_?cpMZSD%*wcRnpL
zgc9~o;E)+|yZtJjgimYBvW8Z_5%=x|k2H2B$%!#;RYD64J5;fW{9GD)lLrX#>5cUY
z!t>}p*T)o(9_T`N$lx?;{k3wjv9W2){#ZdASyqv)%`cEeOd{b*Z~ks~M<*Zg@{pbz
zkGn_lt%e=vXOoG{mjl{p)m+QNF;*{WVkUzzyP?iCTVcRM6Opd8
zQl44e4LwY~S
z#_#siEwQs>0^#618j|C%M}qpb=7YVnD3+1Gm?HE#3a~yw`wGMgePkiLE&A-25R$m=
zYQR6uJ!&Y;Sdy)HH8brw!AedhNb4P-5u
zfn6BAocDT>%u{%`L0&UL?C!@OQ5#(3`Z-?=M~H_ya?=}J{IvACUn^t$@4*qC9-O!J
zjoF*Hxm<1cwEm!wB9hL8P{hG_(&DL@VVQ+1wCVthgVxvj#${one!jTq{UYJdvK!<6
z#-5@>6Fvr*;dZLHW-xw|8haj*lQar10-FIOOoMIFNh_imhM+3G*f<<~eG8=Fdd1Z^
znXMXu+OZjvQ0CvlGo1o&m9Z(-kS^hKKm`H|Eu$e#=8V(pM>fR|ar}4GiO4f@w{yt@
zSYe)?^OXxWDAxN*91C)9VSaFPQ#NvCUTnLo3*f
z+~UkeoeVG%X{l?lG-n3W4s~OVqLAf^Pk{5rYBKo`A>OG`N@3*)A20lwq5V=v^!jdg
z@EuxcSIV&E>)jG!4j4aOQ9FtI>Mv<%p=t^%z1RWLD?A_b>@)^|OgI|~s>#F)S
z9V--Ag{T<`tL|?Q24hK!6POS|gc?f04y=7aB3PL8{C;RjYLrPzT&DHxw?DejmW8Qz
z4yPL$bTC2URAhYMS5vg4W+}@ZU*o248}T9MIfOBL(NsyKWY1WB>2Rq`OR_>A2l-jc
z^`X{|mv-TrBM9Ob(;6{UBmRVr2J^~C>B-tc%=k+0EARM&)LA|9BAF0V5~fRsOmkL6
z!SX$Avy+TN1_|VGBb&<`#b$=}nkb=Lbq+r>(BA}uo$mBrvLnlp!tO7*BC@db7aVUp
zv5@0uJ?Lexkldj+jP|_%ZqnUC-xKKUb*TZ4zQ9MNg{MiBu6%<07F
zL{A)ZZwmmUR=?*sDWsUe1%o!Q1BO{6)G~!2~{vC)d%Pw(~g
z^fR?p{rQQcYAJcTpB&Y?q@OLapr_DT3LHM(uFW1ul~WiVZAm&k!8_c+DXDsuP3F)-
z2U(@>z3$u=Cw-4TNW~w(idW$g)x-L!=I;%c!uv6ep~-&InDcTx0EAe!M+{BY%-6UH
zocYoCH`&q98?6LQj!alT08jv_!Z6ncLRXOYpY#4$Cp!7AI&jZQ_THRpm6r6?GFKXO
z)q9L5Ol?+j;R;d9UX@~JCTHos+3n$}6j4=@2xproeN&TUC*iHold)iHkHO#!)Be{E
z51W~LxGeMprp>E8vMC7X+psiS9-^QaSzPkpm6sZOngz3?Io&BCt2*ScQ2MLA9G{7P
zT3ako>4Q{Ym%Rr|W9<-HIZlR=$We@B-RpE1YGrABp``0zmL;rk*>i^R5`&=$&QK$0H%Sr>1Qo(?x;QkYX
zlL5fgbEtgUmO$y5A8GMd{P$UQ&-|wHaS(?gKd*K{wqCx$hBVrVbM{H^|Ja?5-Sd}P
zN0O3+mUkLYtV(@ky6m{67@{$n)~1jd4!qEH?-H+I;N$m$_hWgAIg+l3hztfeX@+ry
zq;tQFO@F@sN(yuDd809~>!D?4FK
zfouQ=qjF!#r&Fxj)4KcE$8ak6dz-dfD-qOzgzF6FRE6lBE+IzMqVos{s_Fe_P-Y67
z(WH~f+*8Oh-s`qZs*z}U=htkG?i(SijjekPnvAw2c11;sY!iMUodnp>b-_y&R$Mj!uR82)uq}dC07kF@k5~kT={<-_Zo*
zs$y3iOndUfc`)bGHJHiacKA1WBRc1q#cv5%EFK|P@S=Vo2Hg9zQtfJ{!DH=A$
zoNjn_>X2IJ+lTb$ai!-t_V&VCJA*+y{hEC$nbTE3q+!GBY
zJ;!*YWCkXzW)UD#nik^ZhHQw5VjSpckbN*aWz&0Ez;h4>&jUyk
zPt_K1-P0veIVaG@LmXL%^7k>|m(kE}lUm~Oi)L0%ro2)_*BV8ubNAu$`q31vB{bC6
zpQxSdDh#3IXHRT5+~m;$vA*6rngBVgtTfG6y;+VQCCk*-kBbq5j9CD+g+lkDXtz11
zi>3S_IOkGi?28Z!kHtwY@ACk;FZUl?U9)Fo=Fwb2RB+9>C&Q3E9c@ZC>iiiS+lu%$Zr2I3d?Yu&4(vmi$2
zt4n)Z|BzlJu8nG_<$;!B_LbV-vg8#QJ>yo97N%bP8RYuxJwQ*;>j6@}!7pW(YVgqx
zkN-TgyBlgQN(B?zF8hzXSzChv3(3-+%pyAzFodHoryl+Udg_v5`H2~XK7rceT~=5P%T@LOS~5ypUZ{RRMX6P8(afr?47RcI@bQHzit;$~3jG
zFh46>FIGadQgt`Gsd$#?);vt%H)~iwf9IEAorpt6$OAC{+sCx&b_mkPbDt-Fhna6F
zpYZU(?1Q;7DN5`QLTEhZ2|QbPFmvpC}|TBy1H#BB493YB%4x{?K;hm6lWoI@l1
zw5Hm$-`+B`=3QmU^;#;W5l()(``x%N9?}B%&}UGox24(vJS3}X6emwKs5x;nVVqjO
zts;M~wi(6bnS%wr@8=ZFSbt=m7Sm*TDy}n5a6|s5I2pkPa03QX6d_?q^9pEE+8xnF
z-9;s5bM^XgR6Rs+@o;)@6~uG55I-J$IF*C!QwiDP52M_?o?+H8B>=ilP%ls|Q*%?lk>r}Fr
zXhX?VG%^e)vpx_HT$P{+#YoR^6udS~0+ShdaKggo>UVRh?ZLqopTX%9S4Znc)l%?|rqC_~#L__sU>Cu7~UU
z9p)a*&7mwuY75GR`gE4y-K*;sB?Q;jH*w!SjtfghdS{e;v+lHzLq?r(WqnGkYtquv
z6wY@5-A+b2D@cT{rx2HamKR)@=`TW0h$dB4BQTjoP?m9r7gi~q-c3A~QNZWE@2C)2
z#wwDSjgk~2htcazWKfD#sut}aqCY@|VT2E6{$DSEpZ7;|0r@|~u8Q3a|G`y;LM9~A
z8mA>7J*Y0Zh+cisyLY3?x2*C;5klA10`frGYe?=E6JR21!nFl;clk4HZ;o+y>z}I&t
z_vw7+8AI%kOM>c$>gCEYg{+m*>8Gd&kP4_Art%YxWdL;z>FG^~e98}T{7@z;Ne*BK
z04j>>wa^Ij)J#7uNjk02A*QkBLCM!b(K0DbVwvBT9}pQn{1<0Sx!0vJw09ND3puAE
zUGYVVoWujrHA+gZ`GQ1IR5_$mnby1O0I;iE#f9423)Z7NdX(Bk+d}M;D1DUev!2N&
zzup<4O7LnYxgLpHc+L7A&_sduE{)mm&H5UazY-jPd32EiH)gB9!z1}lz<@Y$%%;wP
zlwn7N93VMizXSLox>CA#HR@(;-!S_o*jHVJ6DjO#34Re`3;i%fcI;m&=nd=4UWW8Qe_
zj#E$+2fUE`xxZQwN@l}%`TDJ+P^zcs!V+dU#eGNH@t#^!eVtVGN+ZafmYoG99cRMX
zr${96DO?ZQq;~CZR&5a`m^n=Up5AKe`t
zPY-=Ao1_vk70aT%SsuB6X`jG6%8c;?u@#g``9g}0I^x)yI}}e__gNkpy7+({Ur(Ja
zbHIf|_x3x30{t}~1lAP?rvA@-%V1u{g9y1|MlE-7#W>L8^#4P_M2GTQFMAVB0=0`M
zW{#8tv%sToBO+vW_u!aFr_uuwnS&M%vb*Yj#q!d4lv{*e(^B>t+1>V;AO9b4ULik4
zci8tKH(BsJ5$=l(`M$OisjDNeOCfZJKtmh!Yi>I4Qy|wE7BU5lWN~nDxm_JCzZATy
zXlwJGZ*&Gm&?!w@YisLpUnBcq#dv;vHG%i@xm81^SYX<-z+nYjIRUhX@f-2f0mCCR
z#R`ue=u$udli+nVi;{3=PFzn;H3`-grDm$z=kzVr`vLse20y9`x<+MUv~weZMKor9XUH4qPUAQ^lU36Ow
z*R*L^AO9riy;GR~a1P&Z$XglwR$=pJJ0#5wWq{7$KEeXFipt9HTL8N=1m
zk`uAd(X_^NtKY$CrwNbKp81&AuetZqP(Wth79B)?bQ{8icfiqR3pG<77J#mH
zLv8_$VG+yvKM*_fb!|*!xC^<{RCl$EV>tb*<)&C
zBRQFOredi=zE;XyAFBjyE1=^6FfN%$$=Vb93y|oNsc6-I0Z*ZVRDhtVj?IF)
z4tPUI@6D$JW~rNNhn?d+5hgzG*2vULko4vG@lMjiObLV3g<1%ciiX);Lq11uHM}4t
zIa-}!emXJ=U;yUid92uYY}grmx*so6%w$6pV%mkH7N
zLI=#TKkBb0Sbbb;5L>L}UO+mX*b-+-L>`&}AL=oMTdguvoRTO(vYw}9HJW7ep6bvt
z&)0l`(F05rq@T8}Wb}Bo1JFClaHz#}%Ikl+RduS`+gFa7g+25dRB{nUe79+&;pOF}
zquZTS)eYA%)}SZEsRVb>f)eNm)=AI#7xO>5Vo|5DdPY!v=kFLnyo2=mHpAhK*6xAA$`PbaCjSxnYl+d~3%Q=zOYa;k
zqq8^C5cGUg-8NLafp}@33--M{pgT33z6J#e-k*(MTwECTM7($&o2c;YbNg<2+XKnt
zG08i$2=@r_HO5gIg8Wqb<;TH>$A`sI+>?H*#wa?k!f!7ig`4Nf0mKkTz&R_=b@b=W
z!bxbW5K|lIp~+EH+0hgO0(*uRQ)WvPY+=qG#cHIk&(+@7fBU=2*cj+%ph#D&Mg2$S
z9}@~`4c4*`BO7mI&nbxA9(?J)e*Ll}dL?rbDMB{>>H+ridt2MX(w+L_9SXlSPR-
z9@{&PPl`;b50GCliQVrCN~Jp`$=Ko2aii5a-7_u!uPkHaA_WEvLOqBO`urbf-Cw(O
zKJN`NYpx>UFG?SOdEi1}+KJE`dRN7K
zH(EAHQ-XOi%tWpvvPK4nYegrUdXSu9p}cQ48ah3c=hg=@4-7WSl`oz=1FD;=;x=&{
z$aX2&SHOIc(oF(l;Ks&=II;U0=xO6z^g87|Y<4_n@e){EJ(jEICmueNdVhU=4Xy&{
zS;`$X3l9$mNn~dnh|HsAbG4SLF*-tg;J+THS5F(3n~)qQlJc-?GI5WhvHIv1p8luK
za++~x(Y+Bixf?TMweUkO4hP*SjNi^X?02WZ>ps}O?r(?=+Sk5g#zW7b;*(?u-SM`=
z{Y66usjZgtmc*e1WX7-tbjKo>R*_}4;dhR5yL6#cU$=$I+yvJk0q!+T~Sw->;Qcn-275b!H?HpWX`uu%-vTG8hK_D!|NUXi9ib0oZT}JlS`NOxJ
z^%n0nrz{^q#KGxjsTmpzWsZ@%}KwW2dl~fT2{qdIP{F
zZ*OmMa`M~D!}7vHJR+j`dfUa35jjIcLru-u_V)I!uC7esp#FZ$i!=&$lmLet?YXW1
zR+ORRhNISVl-fud6SV5CnBmZcNtYg-gam$pZ3oG^U9qtDDcN
z?zzLtxZPugworS0;4UeQ+5zucnf9^NE*Lw4qGe1vrXR?qM`h>a0dg^H?CkvDMo;t8
zZ*coGo%<#|Z&^$d^+v%dqG}d{kNwg1r*U6r#s@m`xMyvZ_{}L{P63615htdzh}kJS
zELrD;+?g&G1(_f6D#Ac5G{*NzIkxpDr`p(%rH(Hagf_+hhx9z}x@J)nSlEE;jcvTa
zag)Pl?)7|QgU`JOPnu1onhD|5|3{SqPdM)|z)(N$GW!*k;LhjKwe5CZsqL!>DAMpn
z`qeKv8Dyj?D10}5aSPyP6ZPCIc)Z;h&ki4BxYn|Gm5{GMF3+{u^d11kE=sf{5Al;?
z#L%Hkr#hXqXEP~^#EvzN6Eq5ebNW?G7Yy289Bu--a(y38GsMNj3YtKiIKB27#ZQoD!{6s@yB-|&xp)^t
z;ppgC_$r$~;BmL@VKj9Kc*w!yrZo=Fn(YV5UF2Zc$OCvrk4s6)~pgTwdt
zQ3ZOAP3;M3#mtap;XeexJRz3`5J2(4G0d8MRzxepB^@gY?oXC
zF-rE79TdTjx%u`~siw@^@z_rS*B~UEBM4r>d>>3d*W>sub3fUPAKRyuqwB{ak73M_
zx>~r5_7R+!B7tc^&s|3iMi-OGKd$JZLx3tihc{9}S}U^T?4fpb?RmBD45)0^1)Rf2
zn;`WQ^Yr%uOZDP!cO3&a#%(KoN=|cyUAypLMOm21XzVNKSX~fco-8%W_D?@7L7jNS
z#2nuT2NIclkGpYwFRC-No84IG=q#rc4U$&dYHe;yoi!VZZ@UH;gK;)jl#KC2B-PXV
zqLeN78Yu?8|Iu7?`kb%ONqIgl{^PjRsrZ|uQjUcr&gIuE<+uA7V)nKbz`{Vib}sSA
zP_~FDgs2TSJv1r}&dT*>hM?2d2|cph+|bQcWR+)b>F!)x>qy(
zlp*_^1t(5D%;Uxh)OVQgh)qMb<{14toVo%q5jL;1e!?nhA1%P?G$;1p!-?fN%>Pt=u5LIN^>9Na
z4W#@aNka61ug62=dCr6OzrmbNQrDygdJI?wzW>zK`T6a0+j)!W8c%f`@3r38B{~#>kUK^(4@b3?@#a$pz_yebinH?tEyerWq4zReLA(
z{Ue}-5Dqqlsuc^BKnc91+IkXCIcqpsP6oXpKUFu#J7*a6MZtTqd9}$qyLc6SN>`a?
zImLbT1znn5vu5LTO9kVPu}e2u
zUPz++Wa#Dq*-{?@W|Ss!xD!JPK9?~8XEpL}l=
zi6c2JbtnF6{q@}d2;JC5(VRy9c*+tpFhem74!W>QAT9pAd(J5-`1ikp#keLaU#OFE
z){~CC*#=ipWcp@eVTrpj9)f~IpFyaGX=PbfMKcC;#a^k-FQAKo*4fE*+P$P6_Ll+Z
zDae3sekUU>&3Cor-{b-)^->j2kY0(o?B2|O$PMqL*T8<-|06|*9y5rz|7K^W%XL4+ORDK07WKB4#t+#Dy?Z+6BA##pJV)7V>FIHjbR1eXO)H$5KERmxVr5^c|%x)Yj2
zu)lC!=fo5<)GK@4?`iwKpV$@FZ=8PgX2Sj;w@UmqSzXId&NWp`Ao^&nBuy91fI3pX
zDF|@X;?k`FD7DZsn>ckfJ3BiOw}a7aoi(s%ig(5aR^hj`K+a+$o%-Q2Er
zOxExUd$LzuK4iC>^%mWydJIc-?acVV{uLB}rC;YxXS@vc`A5zCTQDL^Wrta+p!`)K
zu0znefMD0Zn-NPR2pS~wkhPl@tWr%J^nIx@f-Spe{1|H7u#t3;$KXYizHzGEDT>%#
zn3J=1KCSzB0S0-T3^0k4MS~AD0?XsLS&c~eqvmd%YjHhNU^7ry0GkbnR1)rD0ywcO
zPA>I;blZREQv+K$)4f}z1sgH@+0v+xocj&5RPk*aN8YJjMTJ|wPnAcraNy$;BgJeq
zNys0Ls)->K9JH%tmKPHfGc0ghQeGa}SUkOZ@8he#98<noLodUK$3rK@r;;-@lpfz-guPy{(&nnE5eHkar3{zEBuJyikf%j+O;<{>weaW2s
z=Ocx_F#-wUtZ)7UY-W_|-%LOj&cMedBA}}Kb7JJS!4y8rt&ue
z{(-6mLP2GP<&M4RextB}mLYmbyF=lnnNQ%7!;%B{rP7K>F@SD0Suc3_xFM9v
zIogJ@q^PgE&?I;1DCoTYH#+fN4wQ@t;$qdAqnFT7Y;{4*Atn8vVX_
z=nGt+L#f>F?te{9O@Xg&1cdp@t2sYNaUNXV7XJq+UaPP+Xv-y&Fqf2U?>Mka3P!Cb
z8?lW!1*1XflHt?E49}OGn}B_j6W$W}o>nuUvXRs5*ckcy$I?&tYmK3|RTcw%i5lwI
z?5U5)&>ebyi})D{dwRh0Fm$2ur@9GD%-N0fy;}t|&eXqu{d&wkSDhq`x~+Ve!Jr%M
zEX#0bTsfB)J^&ML^UApamQ|u}7R@mXH<{dlbN|GGTP@GU@I0cs%I-JC7V|knd(5zN
z66NQWlewiooxqX~Qxd9w*YKJK&hA%33Ig_q@NpOr+B=O)WWuU!4sM}#%Go|OPr2Xa
z`pu^azhLLPKjqN*a7=?ubQ?Nh6OHie^f^db|w=SscB_Jrj`e-SF#0(D!
zv2$O22$&J!<*GTaKYy8K!1s~p4X`M%jVZ^WLV&4#?43=AM?EY+?t&mEWSI3>VW0~C
zJ0VXDsS++bLNc9Q&Z33PjN>y@loIe72d!MZ4YRVu5Hls7f`#M-T$bnudEV>HU*?q+*Hk)#
zIQjv9Rs_tQEY%+L{hmoxyyrg5Tw&=iK`Z#S$n%q_>(VxW39(err;ljl+=^_EGK@gw
zkF7sJO4Ee2V_wB)iIHE=NDp85HNE0u71}zDrchT@tOhgyMR;4SqU~A%sQKW&0Bw{J
zdakW6_ddhb&API%yERw76&L!cjR6&qqmh^{3
zdiA8dB8I!odMdfZd$F|^?2P+1A7mN-Y3p+R!x)egv9(jK*1I83BU>L-jW;|jEs*_H
zV|`qhcFk8kG|KnrD7^4$0#&rs$S@Ku<(EEA;-Bj?^0$jc=74RHh8-o2t9;Se(H>#y
z2Z#D(?y~Ouiptb>=iuqtB=+u5D9BO&P22-n){%3I$nx|Jt*%LFi)x*&I^l!yg|N<{
zdGf#K5Gzk%nqD{yyH5Eds6c9$KzUMobGzeZxc9@XmlM60l!7wibMpwkrMRn#CS5Th
z6yoWhza9^w4rk{T3Wuf$A|;H1y)vOz1E2)X84R@O$Sck$G6q@-TV%U`HRIW9B`{
zMTp3~3hApf;{;KnpI2rRT#Ro}S5ZIe9wytCdn&?C+)U{7_s&
z<0@VkQB=Px>-N+@IWQ+B8M;5{>(&)ia;zaqNfH9y&aI=@tuS@td^JrYg9neCJfJMM
zxOiao7?^MQ0fon|2=D?XyN_&Ve@I)KMx*y$hbO(c4kr{aml!%X`!!B!Aq%+pOAAy$
z9_VqDVT9D)uEoowVpi^VKDn7Kyp5qHbeqkX{n3uZxAtk%yCnFjy1`-q6p=rHBI~~u
z%X1Bm`}_NVi$ndX9W5&a2Njpo?`sLx)!d-?3>C>mZYt4|OMH9R<}I&z%utV$*!Y%V
zom;>)kf`#}=cjfx-C
z>w7!f1U{bxawk>AvzZdvn2$}IbEeGZ8
zvr&VtmZ$?noUquElUcRU6>IeG{hr}!uq3V#IISUkJ-w(G>
ztw*o8yBrrJ>$aAj;L-8*9jNh)ZZGx)ulqh(am)g7$C~4Nb=8V>FadjiflW@N^MA$$
zrAqhx=X5uU>N@pdV*ffo>j~8Uk;X18Rw5L7zh94S|F*p6FMRxjF)?FdWbZs#L`%H#
z5XBbsrXxo*K+ZwWo0096^JlZ){!*oQ*A+!v9<%xw??k2Oa*II&np&6gtckntk_%;d
zuBO7z+D734x?#;{^wQd?Uw|k2>dTK=%GxzY?Bj65QSO7Q^oDt5e(%0Lg3FSJBRdq0_-P}va>qAsuUYmK-
zld27-r?&N;x;b#8$S<2GLwjxqjodas%&+%GH5}Y3>>JH-hr)PR|Oyq3BKIIAR`n16lj41xQPna
z#+a0Ss@Qw7(H2umRk`K>H^e2^O7g`2v9bD!Sy{Yjs0&9*|IlSE!qlnnVO=flFI?+llp)e5KSD5hz*qm>rk_S1`JL^X}tCJ
zt75etRcue1&$d%zw}HgO)q2?h&?gVF&!j}9p%K}&kLEf5RY(I@dFJk+7@q>aPVZ+8
z8H8+UWTZ@c_Yv!lFpz;$->n}AoN$1*r%khNF&x{`R}|DmgX&0?Ygkx-l!5c~#(_ea
zkN0hl^*ZG*gF3-bf0j{DxlT+_;4sDS>;ad@NqIg&{x3wpm(5VK&JVdE#x
z!#U6!MKGyZZ|3|{nAk3YDp_=M$>p!d&(joL_4*of2!@j*8Y__(LBTcvGYzO^LQ;HG(&xm&&lr{^a*g679ATO3}Ns
z5|`@*31f<5M@#3Osr+Ho01Ah_Ar5nz;bU(6eXfJT64|jOknVzs?34xGRl*MSmyj#RYTGFy+T>h8C`#9uW&oWK6p3n02hx;~z|$D5oJ
zjUiNRVPQebzyL(i;yxpflRl~^n)qP}K~S%HCKz8=Yb6I*iA0@dqpBqI__{r!XoVj0
z_UkI<7;>ZM7||IA&OPq+8$1KlQ5DY{?r6#AUB^o&d>ADd?7NiXN}cK&U==!uKw-No
zGti>rt4T#wp@Wkl8*nGu%o}x&=%$JWdyT>ThGOLv`;ACX19`5DNY{
zZt2ID;3Mf&V1*M}zb+4%dP`2?&Og=TI+u!w(F|Z>O=B*cBQ4njlt_
zzlg2ewk3ZiW0bwLbaWIx!gtU<8-pO2_9GcgFLF7UYXChK@pP)`0^U4SR6&C#ylE^r
zMtvxNp4dhs&E*l5kn)w}_R&Fv>JW6FLG>3(jh*V~b`iSMJ&7Swl(#T?HLGeU`9bSt
z;V+7~J6_ZdAAj8O6pcA*)E+&do7ySw;rKS41P6^5R{|?%{ZCndC_F0}47@vHw1moz
zfUB*mPOq)G63Y*?Ct#~Qsp#InK+^6KHm*mNnqOB?#3dmXMyT^E@jnnqG1P6=ik<@
zL)Y}_A^OlEMtLsdv*2CEx6Oj(`L=q*4%LpX(Yld;V
zI>2YK7)mkaIiyTSI)Bw#9u|D7cPHF_b!)`ODRjl3vh%V
zZg!RUTLY+f%n%^Y0CN&0_7`E&82lOm4>GgX6D)z7=b-kI+1h+fo{l6_W`5#;=&Wt@
zw+Fa*Zlfd=n6g@yZ>bP
z`Z_OkN>Jc#LG-;t}pQQ$AJeXhm8+A1~S=9-lSWEebP
zLmAAfufGIp^8VJA9&0ixDyl;R-#kXHQdwW^PCw5Xyih}qK+@&ewo1XfwZYF!h&M$X
zi}%GO-wz5cD<0ui9&A$a60Qi_7;~$`6>Fup9)EBi$#wG$*KX|v673sO4hAEcEZ1n8
zVmdK*@qVMsxXk2^O8Ix4VcXlow@XgIkv4Zb{AaGF=|^6}IK9AKej1%E%DaSI7Nj^N
zQ=eNhNa=p73E*kNLZzaol#!cjJe{p#A2lhAHY|T
z0`(ihY-7egY6e8JJ6F`+?2gV|2pk^y7c+&xQ&o>z`}fzbQwcGO+S`s$_M4Q|0LOrf
z6oVh#DH1tZ01nz8O_6u*EeoK3Z+PEj(RI;V_ihDO5ZF`>eIIYP(P(qOzB8s%{F8DL
zhBmOLN#Jo4qXtX8T+rOjV=Yxsuj^hJJ{y1FhXbFwnsHbX`P>f~*K+H$ujqS(s`aPd
z6|8z>hy088-mOOULq|#$z_6r`1N$Bi&oGS_KN}lj990Lxr!PAK1dP@Z31b`Vh_x@I
zb`^VYI-NGX)^rCA;e@B0zsrgt2N1a*Uk&?Qm0uxzYQp;D==$De-Y_CTk31?@=y4(x
z#K=>?<2(s^K5cEbDIeET?yEM7{}>yDnALsA4O^5s(|M2*G5!%Wr#~ANV^Q~!UG=}5
zj#m1qMLLQ1N*?ElE8>rz)DOHpSW3<;e5i8@-6}D)UcPo23KS6yK^Lm-jbb36{;Z*)
zAwo8q@o+}PlZFO?1#>kt%%0uH2{!Q&ns4#E{BGMS5wo^mzX+gb%WdoZ75!Yobgrr$26Jf5}>=xS8VM9_TSkY46_d%W$ux}Cno
z)o2jN`$n_C3Avp!_omRj^ZkuIS4(WiJL_Hw*wdi(W*bMGqp&R
z(6nVL3hC(7BP(^~X6+e!Q!!y1zdCqhI^W&)ZTAN4ZuQ^>?q&lxa#4~`m2oKhudaTI
zkYx(s!9p3srqwgq+1c;Vc)f4Vz!z0-cZ$nT+?o-H-F{?ctqN
zfOI!VHwYpnEiIjbG}6)`UD73>(p}Qs-5?Dj-QV&a&pW;|_;LO@+-L2%=Dcby(sOsx
z)6@TxW!0_vrgyx9APJVVBejK2@jIx0N-WGqVol_At1K9v%LO%pFv%H%R!qF
zS1bF2njZLZj^{S*Atx@`B&_Ql5R{BYmWMlX>7h}n8Y157ggq;B^0(1IgXDTH=_4qh
za`5O-m_0_~7l9&nZiD0*$-CcgchuzP^x|gw{7WW$EWq~3io1Q5gwT1SL!nv8^xrO4
zePP&XP5&R&n!Ho`b8}DfPpc&5^8ymLSzXY*wS$1-y!j+M>Dz4Qce1~c(CcHa3oRaJ
z$)h}d49y$%Eljv}-MC5z4=1%N<8)PQY&_ltJ7eq;Pj@+}hi_~k~K
z-;6LMk&Rbq8gTi_e*
zh`Pn|BJH!GE%K!bWZ-z*@$5$eerf-^hB0%X#Jd6~4G*GH_zU#1!vh1Tp;AT!}g^?f)n!otLij*0oVd>m{CF7c*51+oq069SnX
zau64iEMG{BY~Co9f&`a?j>)BMPs;xdv;m`sry07=Hc8r@KpX&w7OZ!3L|!?rtyK7a
z9tII3KfJ33o#ajWcbF>N9s3iT!lP3VarIVb@%~g0a`ZHsDriRnFT}$n;g^uaph*^(
zZtu?GF-NH*Xj-cE5OA9fW<-V&-tBGr>+3|m^RyL#BkOtet6&MpyKzBUx>=(Z-M#}}
zfnl|U58)bgDRn7kK->25W;=OtagjG|baCm~85!k~z$4cInD{PX{edfImbO*<@@J&%
zjPcA#qy)t+hyI^
zi@N!lhVtdo1DT_Rg$3*K@{2dRW4H~(L1E$nC@8ezl40=3@bdD$eq@m;;$cf2XWr-I
zNLd+Yg4wh?nJy!D&8w9}`HFNfFC7+2Fvy$3grzOQzgQ5Y;mYKIkS>QVqnG6E%H!@>
zp1#~NjgF2U+40VUE%M-S?#E`sPF6%@q^*NP
z-QrQ8(X)x-xpKEX=LSS(qmgv}6VOF}k*8HDI!kW5-!97by#<(^hb<{3B^Crq)M2gN
zV$Y48GCFk)q^AB7B{DteVe@H#<&%R`R(#sV37Y7Gb?VB}XYob3hjKCp(7w9{=@ZiJ`6Ohv(bNy}o*!DMxYcPCZDVLh7rY}>eE~W|i
z2cbV7mO}3I_Ynl-Lg~f`e>wid7(Q=LA@q_rC3)R}{pt(a@wj&OzBQ3b88?0p>qs?*
zOXLs#?FNpDK(}ik@Vo+I+(+Qx_o*Z!B(&hcM6OL&F6=1MB*WzQd-R6dH&4-s*lcZW
z&sV(vz!mHT28=dXz+7@CW1SSha`YhI7=0T({JVZ^`ZPrndr$rt1ZbBWq
z5$hQ;ei=9{)?&Om{YsL(2N7;QQXTxJcfMF+9qlBNS=N++bogr;BJ6@nbBs2^aPp%1
zPrVqV&R_8dEtHP1mv;&B%BtuQwbw7pxZ(w4vF-TYAPUSQ5-a{1qYOhAiBeXGCT93j
z^_S%KWYh8D95cJXrOkVDM_b#Ga+Tvzk?o0B~SD(i%m0BCQ{AsYX()Y
zch(3RtF>HqT7C>55`7AG+0_pU!CWzr)ue-%^$pmUq#Lx3NEOfuw6l9I#$bg09i9m?
zJI!9!fkMJW#246mth?40+%wf-oKY#Y#?hRF-|bn@Rlya|Z}u07(tFKO!24V6yN&?x
z0nAlJMFr3_pNZy~s=?{+wh%eM2a`
zuB7g1d2qm5dH!)OrX5vX^}@9ft*s@K+izqFP!QrK19_V#18`JBh*BU(vQumAl2l02P*^HL)
zT}yM^lIkCQ;AJE=33b2gh3Q1AF>x=Xctl}^yg7Pa8O6zz<^8uGyyuO@#k&&hApS!{
zi{eA%jyi?Ln8$%9^RQw7qna6?-l?|Npw}jJyto_wRR7k|b^0HSwj0r^DM`y1U6|HZEF?e6}0%L+p1!H1t;n5tF7v$gbqg&k(a^^Wab3My*Vmw6l_J
zuHYZC-agK^ovhw(uLVaE-o2qnWW!1DjFMe^rPLUV?y%`-=j}(3_8I;D4aM}60UpUL
zK^V~A3Ec)VDulT`d(-*;%a0OQ5C2L__=<2cuEpJuWsB|V58zi+%5}8=l0uY<2q70a
zMY85z&X`E}&8MDQ&LH?8K?wVNJS}gZWe9B4B3LVJ!L`ZMd#-Y4OVYCrp~R(13yn}}
zn`lfvGo1-^^Cle~l-0*Gbv;~aIQz$C#?vXOd|Wpe<=rQaDNRx;R+Mu6N<;N(%!odR
zDbse-5(Dn27zOczQZ6>@pycFRGvjMpLInC4C+TpHjMO$lg`DGzn8Y-rkgcs`q;lgt|ZIuRzm9unW@jm)K@jt$Vw`vCaH#^%J&hG`mvkHpfOLjTH|jhh1k9RY(GCyggIVh)P52Q
z;~&U<3Oc!HVQm_I)_c^*)kC68JIOKBv=iln9!cUq-XmS_S*=cBO5=Di091XeHl4&K
z8H6W&ZdsTm2<7sw`s8&ShqP!r?}E#8K899{3FH&{!whj~@~cTxR1@@?d|=}UXZ_G3
z4*3hkU67PYTB<0Wln9cLkIUdYZ6U$Du}mt#K}76PhA-Rc{6kAaBgsO@&%=`;;_(-}
zi2FxJ|JpBPVj&4}nz*s5LjLvjb?#Hdlw3a7zov-n<2G@r)8F{r!qA%yN%^O%h>@I`
z%jh(=OG8=?OXX;+a+ub=-KST|hgc~#?RmIb7(+ejA4pji?ubz^!n|SH@avhoAUfRNSUsz$8dzCdwYZsezc4--UUs3C~dU
z8*3Eb%mofQ+f_)Rw9%u9??gN;EF|HVD&!XDe0Z>E(Rn8(9dNow766qMFo>Qt`mAS6H;_v<`8G5Oh9p
zNVuaF?Zl7Z6`}Evj73b;Tw);nv7x1gxWYVnEaEP`o5}JPds=rOZ=p2&o}{Ql;VX*0
zBn&ERgQSjp#HF8j+(6irF)`P@598Sumqgc>=lJ8BX3)dUYoYyF5BE>|}yfPjJ7%0OwkXk5KAFZGRWmy_$o_hAfpLGk0^>^SCiODh=yxtE0OLWApy~!f?
zla+_;r~93MnoIy_+L@korLIqn{lF)N#H}1VHB{H>Uvqq%Uq@}>W3$~u!!3Oiu0RC`
z8jIQ27jfE(cQf8D#RoBx~7-sM`i6)H0Myu#olXju)!J5(J;)4U@u#1p^Uw){(=Ye_3N3eHd_=};;=6EHVdr_FH_|OVFXy^Fzy#Foh#__
zd-tO1X<_Q4;cqDgcWJvloi31TG(7$Zw=`aU5FVmoRhQJXxTa1LIcP99_2#I|oIx&y
zHM)0aEMYXUTH~zC6_hb|Os2+`n@%L;iyurm>5;{cQjT%FkR^4Ia^7M1{f@GVD2J3M
zly@#OFQJ#1MaF@jXmOa%H~P1+R6ox1sAur@EDO(+-KRa=<*1>rd*$G${ghgcI-EoBdV1M}vBL(P59{S*@+M7Mo)c0h{*>${>?!
zam14`7k{vPZiS!zBlfdPGwCCmYb+=^P>|Xk`uHbEJWnmRlzGgK6Vw5E{KEYuE
zuxFWNnwZ-~iD#l9->_@^w|St^&w8xVoedp4MBXX-W(68$20(%D(7g&hV
z>PL2yN+B)`G%OICKp2=DcDE~@KK{+Sr>5oOr8A*#>+XR2#q(%9#hSq6bXLVM15trg
z-V{Poy!CyY3Q<1?wYSn}3t+Qsb6+)%!80FI{`w5t=a7D5V@j^IY`y1H&d<@cY8?AB
z%OFg&9}f3_KW~myqq=BVmZx)ImRb`}k|6|!K0+ow5qQ1eJ+pKu>R!JmlzSCQ#}T<;
zsfbIi-TR|cgC)b%F_mJWx(`)7!NvgMcrpx}!gWx@mzj`+@t4hm{1fcLtbXLFswxE_
z&`%T!D?Ke+*P6VFASNbGfPUA7t%J2S3a?(Z0K_y{(%8YFQlIUgn%S~?;e2?bHIGy|
z|JV;2Y)&54`qn&tUGj=K@=YqOe#valFbw1`n#axWOE_H`1;f&Vc1f7IJ+b4B=N(r`
zT^=5pz$Z0vq1{#N_H%Wf&f!@`kZ_MTPG#cWiK31D*wQchW%zZ?s1dD;V6ED>q&Si<
z_9AQV1WhxL{ACMYEYCR$ToX-<>*{M|%6x5jBlk>>WC{za?jsF~6*MV)Ty^phmoZ;TRjn>NplClA0lJ-6l5*k)Kl&P7%b3xPW0eFsn@jD@PyF;_
zO6TqsHZqv)-ffV{mwq8k(HJj7F9}YP&Y8Cj?)$_`jfuL5pjL{yfEwP?kvhxl%~K2u
zrR9AVSw_&vKy&JwcG?Rk>z#p2e#TL1zlS!xyU?T(qt^sQD+hXIL(
zWxNMZxs~~CIui_F}P;AkRwc9&}2rxlhFz@WY)KgIn$L!tF
za0%eHw-Y?S@ro;BJnyp^EStOx+K$PPvx&w;CwZm5N6Wd~DUw%2U)X{ryJ{QG$^HHZ
zXAKUYR*I)Ls}X1OkHsV4(+$yQa~H~l##UD=-;{uvYICEfmpxr&0!IAeAx4hSH)~kq
zxYQ_NSmgOGyYO*t$Q0}ll0`~sIs**=WIC;aJb$!*r4igusfR%#AnFaPB+6}(suuim
z7!kaUkJ3Ii-2~1y@g|zvK{O{k0RaJ9@&O>)G|HwOfRh!}j6zS@grI;=sizVKWh^c3
zyJj;(6UHURf55B~NS6eDNOgg1t8P}(O$OdqiO}o2S7S>Ir?9@YQ_c_V$R!_0?39mq
zwFQK%P&ImLxL$Tns|y2P&*4k)N!1>EdPv{Xg}u{?Oz!i!{RYOxEBdy?&E>Z@=M!u2
zCl!t@h!RnaAze}x#M5YJ_Ss|zTB1^wQFji|^nhiMp*op*Z$WPwJ4O*vNWj3{Yy=-8
z@}#b!$8C_{v)9G2g<=&5#N0%I)gTRey)<2M(ljyvXeI$J{mrnYv7+MmM=ymbS8ADd
zB}FAI5lBB4vS~v>gi7eL2nt6EnUK7u(&DA5F$Hz@xadAhP9rvrf4*dmKO!)?Sbok?
zZ2KuP#EqhJJXGyYF%b9dx;4a6r)x}GM)(g1ax(5&`e~y444AUwFR(RQKzui9e@;e9ML}x8qe6Z=tOM3-QCnPT5nHTk$p7)-XK~@DPI@W;3LlDbz
za*cME7y0UU!S~-(tYu_qYis-}sd3yU)+@=yxB*(`-y
zbH$fujHw&t`qpK>hW|ydj-XCMhPw7PuznZKnVr9SrTTvZEQL4yOFK=Oy_dYZy#=!-
zJycbxw_(^?DbI{Xzjg~3da_kz^!4=tZ;kth55wvFmCel}3=Cn?G`xJ#=~%Wr3Pm4&
zr4DD}mf}L9$gQOR`qo9;xJkZP^}GaEX`$Bd24ft(1D%49zou^*OTN`}hHo6aQ1K>V
z9J%v{!`$Ni@h5vz(9|yB|5u8W2W6cy5zXcMu@ktK4I84IoN;o_`1dtf7%?wAXvS{|xf0SW$Kj-tO
zhd6|%mv!d2Zb9pC?oYp!d&OT&Y*gAPe7#<^;EhP_B+kZs|D55F&&EgWXB^(4;qFW}
zYQcg6KU4jx22Ei4yd9cJ{dP-;=`X{+*oE;qQQaH#sjC(`wfA2J6a(?>tz8^!HFxxE
zzcr|}Y7{RCljHn-J#kIOS><#MOPLo%(%WK~^-lRMPNw5}!Y}R0jK^>c)5ZvU1>8Me
zAO(#y7p_c|4|zbAyR1BGu>(ypB!6&&ZO?@vl)AJ5;6F{X^O;}7caNjDhG>OSVjZs;
zzWlpPu7EcbfGv-=vu%pB@qp8c@83)^OcWrHpfr&D
zR~qW|k6&ba|1=*=0=WY1+s-J#ogl?@)3N^Zii6>>=&HNAx>67N$?RG7Gep1AA`VfW
z>d|<9u484*NL4)4OTy>+FHKJ|+W7Q4>;9pswQecBZ$>q>G7)^kV(J%PhDbl3e6HUz
z-qXyO-P`faKw`h|DSFwWMl34`@Pmw1!JCibh4a0n6MQI<-wd02Og3y#LW$?mo51=LTh6v
z`1d<2pTpA4@usTFlC-U>H-2x>*;Gp#iTadUl!d5Nvevhn!nPZiDT2vf}g!smc~M
zHa5Dt3t&s2iK(eUg_8^S)XEE3fBqVqIXkVZF7y|R#Bb0zc8BEENW#$djhosHc7(65
zGAQ#9{cp}l7%wPYt5t1YNrC%J2Qg`yI(
z+WpmnP@4V+=}+ORM)_<{!(AGVN*-%iN2l-@lUR`On%2##i{4-BOAiMMlgexDH9i@C
z;``!}bSChOeO!a{X1u5|Z-P`H)3n$Xb*X2iAMs4Rf9RG2frg*1Zkic3fUh^}(KLcu
zv&>hhZsQ13MC!#?`uXY)zK&1AN6Df%_33=_%*-={zXy%x2xChR7-l}wh#0=JCA4y-}BZv6wLuFK-gxD
zy5rB;DqpNf!|GCM$$6AgN}NsQmNkATl6UH2u998@xE)6a`I28wq;?Wr^lk(&8?YZ1
zKe01~Dp`Is>u|1%cI>-JAdWX^ofYojp;6N$uxvmNDyyWUku8Z79~AMpzgyQDS=C!2
z9J$QwWWFJ>tmbU7qglhO51cI(0wcaMlB%;3pq-4_V7qs+-L<+xvAXd4_2*_x>j!ffP^@;9Lkk^y
z>7|`(CV($@do{n-y5h)`EL_2Wc0RRGpojJvQF5N=9FPKtAFOb}>W(X2tU~QL>z%
zl_PWhIbjaUNhwBw1j-vFk$)Hn#y(FSf`pWC%h|0>IdwRM)GSWw}rjCore$zI=wiDIc^1SpRc+MMR
z(Tr68bWA0xnL)k-iM9=?XL>IaO=4BHx0k;4t~&w_i0;J;btKN{mi`~Vm1=hEge)Pn
z9q~ItwW{vjQdITa;^`$4O;#j-foMZX&9p@L>SU2pMbQ_nZ31Fl-5_Udc^4Kk+(f9G
zr!nC2p;H9NhXvlh7A0Y2#SmWN&-EwM=PWFd(lmf-Qa$(WuL1fJp?tSA3!3`|-c0gc
zShoEdr+z8My&)F1+S-row(r|C|0arp7YfOLkg(T$@MGF=8@8+RYXSzazhdTr@pT;-
z8z0i!yfv$LG{>{Rk-g*h>$k7EWG8R*Ks`za#192U=9Z}Gwk8TABeUPh_sihpEMYzD
zE>jmQ0h3>HN5sHFuCe|
z|M6TfR2^SYHA$egQm(Hr!Z4xe#jSY_x_$E@I$xxec^ZJ)Zv_ArSEL_oJ008xe!TsY
zlau3Pu;igGRhd4tD|IY$Ce@6(rlPD=NDupYzT1*Ck6Z_bV!!gLrb-?DBsb39E5DMR
zOzya()aUhF$HEI9vfOqEo*`z6CS(N@8v^-1qBi|4-4p@5g62B1QwY^0z5d}Nf!hk^1me#e|{%w058
za~UiVPYsVRy((T9dxfu4teY1lm@y~9$rbqA;#WS#Ig~5h
zI0nOaf#gGus@F<^c!rE8OKK}95NFX={`Jx5qi?^9-`m`91G6KZyaU&aW%Y-?gQx!6
z;Wc;N?{&q4w-=$6ZQKIW=AzABProZmu_$Ua2{e;kPIP)2j4@{ih9wUPpfLkY*Imc<
zX^!=24&(mOdI!I;F$Ez2){0^I(B&{i^Re?MoK$*d+f{RtuwG(&$?&V~Zepm2f_ZMQ
zCHylfnQ7r`vG_%no{wt(N9I%EqfS;pP)iEoe;yl7eMe;@s0(8tcs2~Ie|!!TVNUJbKY6}2d0fgvd^~@%l#9$WDC1v
zLS&e|AP@=;7B)krXmA^(vS(&yZqA*FkukcA7}??|pBsiq&5pp;*Zle|C*kQ3#hzCm
z_gypd1%~=0=kd_zlUbY1H?vEQcP~vWhZ#8he(0_0seeB!!!ful%wUW;9kjbt6x(mO
z7g4bt@Y6C(Y_4y0RV{d5a_AYbw{?1AH<0I)QP12HOV?5}P|0E-ox`~I8R2N+u@L!;
zPp}<>^%EJVNj2vVIycn^fUKhX~e#1`1ha1jVecuHUf@|IVxaR-A*dzRwQ-%}`)MC7~opF(Po&g`B2K
zn;%1N$d+8O3{3dL0w3Ep)K4uxcM|(&lP$DmQDupkLa~=q~3%09jYYr^YdU=$^IIh0GT}*Tysw5
ziyWp1p2I^yMemp7*Mu(?=M31b6?jsHwZPW`T?sW36J11&IdE1Dq}3QS7BJXoi*TqE
zaVsLN_SAb>Fh=AEa!
zz~~zvHS3^ZyheV&2PUA`xVt_9Mt})+LgiAHs65*aH8wNfAncJ+A4hVGq{x(7h;l%UJr#nVfuaaDJ|lLO(5G0t$qmr
zzcq@tJpJ3kU(ipMRrAfGQ+7agsao^raX&!Ii6`RIZM$7x^C)MDcrL3#@zna~AUF%
zF;S~9lsBvF$Ktmdl8lt*%x5|aj#BFfdw{fVZG^4Q;xc5^ko=&D)Bfh#
zt^Xln&7qrWSScaUv
zuMTtOQLL<%>Wd}_ya4}2=>)R;e3rbawYPmCWwHOLy}nqMI3h~8zDAq%b#=|d^_68>
zZp|X+61tTGaZ)f)Aw%y^F*T=u9Z{O@0|auyYeln{Ef)q7_kl;?4oDSHR8o5So80E=
z<~Ff+J~A?Le0)4NHOIeq0#?=C|RRKmbzUGe@6I0SaqNC
zYU(%T_WCzhW3D|61uv%OTkpnTvptw+@JQ-s4TVskrb|8Lalx4TsGGx+WV~>~zJ5
zJo`&zmr-;lzu2oQ)s%-O~-(^{FI)KH$R-aOA8$%TR9og!!sx-#YOn
zghIrgE2)0$1jyBp^$8ern7~h#nO}v#`IvleO$BE}De(h=^O`_=$%vEDij$KI531LB
z)wUw?&3loSd%MWlLDyFKiu&k1YihjUJ7rk=OzA0y)1TeibzD8fsoa*B0EYp2jR5v$
z)35&y))A$oqyz-OR?UmBgW_iWj#eM!r|3}ht%W44vTl`F;ha{dZ0t{
z9HmlZf2*IRvc^ih*u#f`i=hK;la4^>qAkwkPG@_23J(|~da%zgE;0q2Hh?zGsMY&s
zJWp=+n-2H*h5mbcC6Dgc&x<C2Tza&jC2w6hGnGg@tl=Ur
z1=QGWKN4aII)q)ZLij7Jw6s{SYC