Import all necessary libraries and files
import pandas as pd
import numpy as np
import tensorflow as tf
from tensorflow import keras
import matplotlib.pyplot as plt
plant1_gen_dataset = pd.read_csv("/content/drive/My Drive/ML Project downloads/soloarpowergen/Plant_1_Generation_Data.csv")
plant1_weather_dataset = pd.read_csv("/content/drive/My Drive/ML Project downloads/soloarpowergen/Plant_1_Weather_Sensor_Data.csv")
plant2_gen_dataset = pd.read_csv("/content/drive/My Drive/ML Project downloads/soloarpowergen/Plant_2_Generation_Data.csv")
plant2_weather_dataset = pd.read_csv("/content/drive/My Drive/ML Project downloads/soloarpowergen/Plant_2_Weather_Sensor_Data.csv")
Lets get an overview of the data to get an idea what we are working with
plant1_gen_dataset.head(),plant1_weather_dataset.head()
( DATE_TIME PLANT_ID ... DAILY_YIELD TOTAL_YIELD 0 15-05-2020 00:00 4135001 ... 0.0 6259559.0 1 15-05-2020 00:00 4135001 ... 0.0 6183645.0 2 15-05-2020 00:00 4135001 ... 0.0 6987759.0 3 15-05-2020 00:00 4135001 ... 0.0 7602960.0 4 15-05-2020 00:00 4135001 ... 0.0 7158964.0 [5 rows x 7 columns], DATE_TIME PLANT_ID ... MODULE_TEMPERATURE IRRADIATION 0 2020-05-15 00:00:00 4135001 ... 22.857507 0.0 1 2020-05-15 00:15:00 4135001 ... 22.761668 0.0 2 2020-05-15 00:30:00 4135001 ... 22.592306 0.0 3 2020-05-15 00:45:00 4135001 ... 22.360852 0.0 4 2020-05-15 01:00:00 4135001 ... 22.165423 0.0 [5 rows x 6 columns])
plant1_gen_dataset.info(),plant2_weather_dataset.info()
<class 'pandas.core.frame.DataFrame'> RangeIndex: 68778 entries, 0 to 68777 Data columns (total 7 columns): # Column Non-Null Count Dtype --- ------ -------------- ----- 0 DATE_TIME 68778 non-null object 1 PLANT_ID 68778 non-null int64 2 SOURCE_KEY 68778 non-null object 3 DC_POWER 68778 non-null float64 4 AC_POWER 68778 non-null float64 5 DAILY_YIELD 68778 non-null float64 6 TOTAL_YIELD 68778 non-null float64 dtypes: float64(4), int64(1), object(2) memory usage: 3.7+ MB <class 'pandas.core.frame.DataFrame'> RangeIndex: 3259 entries, 0 to 3258 Data columns (total 6 columns): # Column Non-Null Count Dtype --- ------ -------------- ----- 0 DATE_TIME 3259 non-null object 1 PLANT_ID 3259 non-null int64 2 SOURCE_KEY 3259 non-null object 3 AMBIENT_TEMPERATURE 3259 non-null float64 4 MODULE_TEMPERATURE 3259 non-null float64 5 IRRADIATION 3259 non-null float64 dtypes: float64(3), int64(1), object(2) memory usage: 152.9+ KB
(None, None)
The date_time column for the files are in different formats. Lets standardise them and merge the files together.
from datetime import datetime
def standardise_datetime(date_str):
try:
return datetime.strptime(date_str, '%Y-%m-%d %H:%M:%S')
except:
return datetime.strptime(date_str, '%d-%m-%Y %H:%M')
for df in [plant1_gen_dataset, plant1_weather_dataset, plant2_gen_dataset, plant2_weather_dataset]:
df["DATE_TIME"] = df.DATE_TIME.apply(standardise_datetime)
df = df.sort_values("DATE_TIME")
plant1_full = pd.merge(plant1_gen_dataset, plant1_weather_dataset.drop(["PLANT_ID"], axis=1), "inner", on = "DATE_TIME")
plant2_full = pd.merge(plant2_gen_dataset, plant2_weather_dataset.drop(["PLANT_ID"], axis=1), "inner", on = "DATE_TIME")
plant_all = pd.concat([plant1_full, plant2_full])
plant_all.head()
DATE_TIME | PLANT_ID | SOURCE_KEY_x | DC_POWER | AC_POWER | DAILY_YIELD | TOTAL_YIELD | SOURCE_KEY_y | AMBIENT_TEMPERATURE | MODULE_TEMPERATURE | IRRADIATION | |
---|---|---|---|---|---|---|---|---|---|---|---|
0 | 2020-05-15 | 4135001 | 1BY6WEcLGh8j5v7 | 0.0 | 0.0 | 0.0 | 6259559.0 | HmiyD2TTLFNqkNe | 25.184316 | 22.857507 | 0.0 |
1 | 2020-05-15 | 4135001 | 1IF53ai7Xc0U56Y | 0.0 | 0.0 | 0.0 | 6183645.0 | HmiyD2TTLFNqkNe | 25.184316 | 22.857507 | 0.0 |
2 | 2020-05-15 | 4135001 | 3PZuoBAID5Wc2HD | 0.0 | 0.0 | 0.0 | 6987759.0 | HmiyD2TTLFNqkNe | 25.184316 | 22.857507 | 0.0 |
3 | 2020-05-15 | 4135001 | 7JYdWkrLSPkdwr4 | 0.0 | 0.0 | 0.0 | 7602960.0 | HmiyD2TTLFNqkNe | 25.184316 | 22.857507 | 0.0 |
4 | 2020-05-15 | 4135001 | McdE0feGgRqW7Ca | 0.0 | 0.0 | 0.0 | 7158964.0 | HmiyD2TTLFNqkNe | 25.184316 | 22.857507 | 0.0 |
Next, we transform date and time in terms of sine and cosine to preserve its cyclical significance.
#Get time in sine and cos
day = 24*60*60
year = (365.2425)*day
timestamps = plant_all.DATE_TIME
timestamps = timestamps.map(datetime.timestamp)
plant_all["DAY_COS"] = np.cos(timestamps * (2 * np.pi / day))
plant_all["DAY_SIN"] = np.sin(timestamps * (2 * np.pi / day))
plant_all["YEAR_COS"] = np.cos(timestamps * (2 * np.pi / year))
plant_all["YEAR_SIN"] = np.sin(timestamps * (2 * np.pi / year))
plant_all.describe().transpose()
count | mean | std | min | 25% | 50% | 75% | max | |
---|---|---|---|---|---|---|---|---|
PLANT_ID | 136472.0 | 4.135497e+06 | 4.999863e+02 | 4.135001e+06 | 4.135001e+06 | 4.135001e+06 | 4.136001e+06 | 4.136001e+06 |
DC_POWER | 136472.0 | 1.708374e+03 | 3.222079e+03 | 0.000000e+00 | 0.000000e+00 | 5.993333e+00 | 1.155595e+03 | 1.447112e+04 |
AC_POWER | 136472.0 | 2.747903e+02 | 3.801802e+02 | 0.000000e+00 | 0.000000e+00 | 3.493095e+00 | 5.325686e+02 | 1.410950e+03 |
DAILY_YIELD | 136472.0 | 3.295366e+03 | 3.035313e+03 | 0.000000e+00 | 2.828571e+01 | 2.834643e+03 | 5.992000e+03 | 9.873000e+03 |
TOTAL_YIELD | 136472.0 | 3.303916e+08 | 6.085769e+08 | 0.000000e+00 | 6.520020e+06 | 7.269333e+06 | 2.826096e+08 | 2.247916e+09 |
AMBIENT_TEMPERATURE | 136472.0 | 2.676307e+01 | 3.897340e+00 | 2.039850e+01 | 2.363760e+01 | 2.590812e+01 | 2.926658e+01 | 3.918164e+01 |
MODULE_TEMPERATURE | 136472.0 | 3.192074e+01 | 1.180367e+01 | 1.814042e+01 | 2.241170e+01 | 2.641376e+01 | 4.077858e+01 | 6.663595e+01 |
IRRADIATION | 136472.0 | 2.307669e-01 | 3.056523e-01 | 0.000000e+00 | 0.000000e+00 | 2.621286e-02 | 4.429609e-01 | 1.221652e+00 |
DAY_COS | 136472.0 | -7.166717e-03 | 7.076213e-01 | -1.000000e+00 | -7.071068e-01 | -7.235672e-12 | 7.071068e-01 | 1.000000e+00 |
DAY_SIN | 136472.0 | -3.279095e-03 | 7.065531e-01 | -1.000000e+00 | -7.071068e-01 | -2.187879e-12 | 7.071068e-01 | 1.000000e+00 |
YEAR_COS | 136472.0 | -8.543586e-01 | 8.547759e-02 | -9.721282e-01 | -9.302478e-01 | -8.700556e-01 | -7.855203e-01 | -6.812203e-01 |
YEAR_SIN | 136472.0 | 4.915960e-01 | 1.452533e-01 | 2.344501e-01 | 3.669318e-01 | 4.929536e-01 | 6.188359e-01 | 7.320785e-01 |
Lets plot DC_power over time to visualise how DC power generation and other features correlates with time
key1 = "xoJJ8DcxJEcupym"
key2 = "xMbIugepa2P7lBB"
df1 = plant_all[plant_all["SOURCE_KEY_x"] == key1]
df2 = plant_all[plant_all["SOURCE_KEY_x"] == key2]
no_of_days = 30
DAYS = no_of_days * 96
df1.plot(x="DATE_TIME", y="DC_POWER", figsize=(20,4))
df2.plot(x="DATE_TIME", y="DC_POWER", figsize=(20,4))
<matplotlib.axes._subplots.AxesSubplot at 0x7f9fb12afb70>
no_of_days = 3
DAYS = no_of_days * 96
df1.plot(x="DATE_TIME", y=["AMBIENT_TEMPERATURE", "MODULE_TEMPERATURE", "IRRADIATION"], figsize=(20,4))
df2.plot(x="DATE_TIME", y=["AMBIENT_TEMPERATURE", "MODULE_TEMPERATURE", "IRRADIATION"], figsize=(20,4))
# plt.figure(figsize=(8*no_of_days,4))
# plt.xticks(np.arange(0, DAYS, step=4), rotation=90)
# plt.plot(df1["DATE_TIME"][:DAYS], df1["AMBIENT_TEMPERATURE"][:DAYS], "r", label="AMBIENT_TEMPERATURE" )
# plt.plot(df1["DATE_TIME"][:DAYS], df1["MODULE_TEMPERATURE"][:DAYS], "b", label="MODULE_TEMPERATURE")
# plt.plot(df1["DATE_TIME"][:DAYS], df1["IRRADIATION"][:DAYS], "g", label="IRRADIATION")
# plt.plot(df2["DATE_TIME"][:DAYS], df2["AMBIENT_TEMPERATURE"][:DAYS], "r--", label="AMBIENT_TEMPERATURE" )
# plt.plot(df2["DATE_TIME"][:DAYS], df2["MODULE_TEMPERATURE"][:DAYS], "b--", label="MODULE_TEMPERATURE")
# plt.plot(df2["DATE_TIME"][:DAYS], df2["IRRADIATION"][:DAYS], "g--", label="IRRADIATION")
# plt.legend()
<matplotlib.axes._subplots.AxesSubplot at 0x7f9faf967940>
Lets drop columns that we will not need
key = "1BY6WEcLGh8j5v7"
plant_all[plant_all["SOURCE_KEY_x"] == key].drop(["PLANT_ID", "SOURCE_KEY_x", "SOURCE_KEY_y", "IRRADIATION", "TOTAL_YIELD", "DAILY_YIELD"], axis=1)
DATE_TIME | DC_POWER | AC_POWER | AMBIENT_TEMPERATURE | MODULE_TEMPERATURE | DAY_COS | DAY_SIN | YEAR_COS | YEAR_SIN | |
---|---|---|---|---|---|---|---|---|---|
0 | 2020-05-15 00:00:00 | 0.0 | 0.0 | 25.184316 | 22.857507 | 1.000000 | -9.444237e-12 | -0.681220 | 0.732078 |
21 | 2020-05-15 00:15:00 | 0.0 | 0.0 | 25.084589 | 22.761668 | 0.997859 | 6.540313e-02 | -0.681351 | 0.731956 |
42 | 2020-05-15 00:30:00 | 0.0 | 0.0 | 24.935753 | 22.592306 | 0.991445 | 1.305262e-01 | -0.681483 | 0.731834 |
63 | 2020-05-15 00:45:00 | 0.0 | 0.0 | 24.846130 | 22.360852 | 0.980785 | 1.950903e-01 | -0.681614 | 0.731712 |
84 | 2020-05-15 01:00:00 | 0.0 | 0.0 | 24.621525 | 22.165423 | 0.965926 | 2.588190e-01 | -0.681745 | 0.731590 |
... | ... | ... | ... | ... | ... | ... | ... | ... | ... |
68664 | 2020-06-17 22:45:00 | 0.0 | 0.0 | 22.150570 | 21.480377 | 0.946930 | -3.214395e-01 | -0.971960 | 0.235147 |
68686 | 2020-06-17 23:00:00 | 0.0 | 0.0 | 22.129816 | 21.389024 | 0.965926 | -2.588190e-01 | -0.972002 | 0.234973 |
68708 | 2020-06-17 23:15:00 | 0.0 | 0.0 | 22.008275 | 20.709211 | 0.980785 | -1.950903e-01 | -0.972044 | 0.234798 |
68730 | 2020-06-17 23:30:00 | 0.0 | 0.0 | 21.969495 | 20.734963 | 0.991445 | -1.305262e-01 | -0.972086 | 0.234624 |
68752 | 2020-06-17 23:45:00 | 0.0 | 0.0 | 21.909288 | 20.427972 | 0.997859 | -6.540313e-02 | -0.972128 | 0.234450 |
3154 rows × 9 columns
Lets cut the data into short sequences which will use to train the model
date_list = np.array(pd.DataFrame(pd.date_range(start='15-05-2020', end='17-06-2020', freq="15min"), columns=["DATETIME"]).DATETIME.apply(lambda x: standardise_datetime(str(x))))
def get_seq(days=1):
length = days * 96
dataset = []
count = 0
prev_value = np.array([0 for i in range(8)])
for key in plant_all.SOURCE_KEY_x.unique():
new_set = []
key_set = plant_all[plant_all["SOURCE_KEY_x"] == key].drop(["PLANT_ID", "SOURCE_KEY_x", "SOURCE_KEY_y", "IRRADIATION", "TOTAL_YIELD", "DAILY_YIELD"], axis=1).to_numpy()
for d in date_list:
if d == key_set[count][0]:
new_set.append(key_set[count][1:])
count+=1
else:
new_set.append(prev_value)
# prev_value = key_set[count][1:]
if len(new_set) >= length:
dataset.append(new_set)
new_set = []
count = 0
return np.array(dataset).astype(np.float32)
#Convert to 1hr interval
dc_dataset = get_seq(days = 2.4)
dc_dataset = dc_dataset[:,::4]
print(dc_dataset.shape)
plt.plot(dc_dataset[0,:,0], "b.-")
(572, 58, 8)
[<matplotlib.lines.Line2D at 0x7f9faf9214e0>]
We will shuffle the dataset and then split them into training and validation sets.
np.random.shuffle(dc_dataset)
train_size = len(dc_dataset) * 70 // 100
valid_size = len(dc_dataset) * 20 // 100
train_set = dc_dataset[:train_size]
valid_set = dc_dataset[train_size:train_size + valid_size]
test_set = dc_dataset[train_size + valid_size:]
train_set.shape, valid_set.shape, test_set.shape
((400, 58, 8), (114, 58, 8), (58, 58, 8))
Normalising the data so that all features are on similiar scale.
#Normalising
train_mean = train_set.mean()
train_std = train_set.std()
train_set = (train_set-train_mean)/train_std
valid_set = (valid_set-train_mean)/train_std
test_set = (test_set-train_mean)/train_std
plt.plot(test_set[5,:,0], "b.-")
[<matplotlib.lines.Line2D at 0x7f9faf9d5b38>]
The plot below shows the short sequence of dc power over time. The sequence is split into blue and orange sections. The blue will be the sequence used to train the model, and in red the sequence which the model's result will be evaluated against.
n_steps = 34
pred_steps = 24
date_list_hr = np.array(
pd.DataFrame(pd.date_range(start='15-05-2020', end='17-06-2020', freq="1h"), columns=["DATETIME"]).DATETIME.apply(lambda x: standardise_datetime(str(x)).strftime('%d-%m-%Y %H:%M'))
)
X_train, y_train = train_set[:, :n_steps], train_set[:, n_steps: n_steps+pred_steps, :1]
X_valid, y_valid = valid_set[:, :n_steps], valid_set[:, n_steps: n_steps+pred_steps, :1]
X_test, y_test = test_set[:, :n_steps], test_set[:, n_steps: n_steps+pred_steps, :1]
print(X_train.shape, y_train.shape)
plt.figure(figsize=(20,4))
plt.xticks(np.arange(0, n_steps+pred_steps, step=2), rotation=90)
plt.plot(date_list_hr[:n_steps],X_train[0,:,0],"b.-")
plt.plot(date_list_hr[n_steps:n_steps+pred_steps],y_train[0,:,0],"r.-")
(400, 34, 8) (400, 24, 1)
[<matplotlib.lines.Line2D at 0x7f9fb132d7b8>]
We will tweak the dataset so that the model can make longer predictions at every time step
def pred_every_timestep(dataset, n_steps, pred_steps):
Y = np.empty((dataset.shape[0], n_steps, pred_steps))
for i in range(1, pred_steps+1):
Y[..., i-1] = dataset[..., i:i+n_steps, 0]
return Y
y_train_2 = pred_every_timestep(train_set, n_steps, pred_steps)
y_valid_2 = pred_every_timestep(valid_set, n_steps, pred_steps)
y_test_2 = pred_every_timestep(test_set, n_steps, pred_steps)
X_valid.shape, y_valid_2.shape
((114, 34, 8), (114, 34, 24))
np.random.seed(42)
tf.random.set_seed(42)
model = keras.models.Sequential([
keras.layers.LSTM(20, return_sequences=True),
keras.layers.LSTM(20),
keras.layers.Dense(pred_steps),
# keras.layers.Reshape([pred_steps, 5])
])
early_stopping_cb = keras.callbacks.EarlyStopping(patience=20, restore_best_weights=True)
reduce_lr_cb = keras.callbacks.ReduceLROnPlateau(factor=0.5)
model.compile(loss="mse", optimizer="adam")
history = model.fit(X_train, y_train, epochs=200,
validation_data=(X_valid, y_valid), callbacks=[early_stopping_cb,reduce_lr_cb])
Epoch 1/200 13/13 [==============================] - 1s 81ms/step - loss: 7.5098 - val_loss: 6.8339 Epoch 2/200 13/13 [==============================] - 0s 20ms/step - loss: 7.1988 - val_loss: 6.4846 Epoch 3/200 13/13 [==============================] - 0s 22ms/step - loss: 6.7559 - val_loss: 6.0654 Epoch 4/200 13/13 [==============================] - 0s 21ms/step - loss: 6.1996 - val_loss: 5.4761 Epoch 5/200 13/13 [==============================] - 0s 21ms/step - loss: 5.6152 - val_loss: 4.9993 Epoch 6/200 13/13 [==============================] - 0s 20ms/step - loss: 5.1046 - val_loss: 4.5714 Epoch 7/200 13/13 [==============================] - 0s 22ms/step - loss: 4.6689 - val_loss: 4.2159 Epoch 8/200 13/13 [==============================] - 0s 21ms/step - loss: 4.3087 - val_loss: 3.9259 Epoch 9/200 13/13 [==============================] - 0s 21ms/step - loss: 4.0143 - val_loss: 3.6809 Epoch 10/200 13/13 [==============================] - 0s 22ms/step - loss: 3.7713 - val_loss: 3.4720 Epoch 11/200 13/13 [==============================] - 0s 22ms/step - loss: 3.5608 - val_loss: 3.2894 Epoch 12/200 13/13 [==============================] - 0s 21ms/step - loss: 3.3707 - val_loss: 3.1287 Epoch 13/200 13/13 [==============================] - 0s 21ms/step - loss: 3.2055 - val_loss: 2.9817 Epoch 14/200 13/13 [==============================] - 0s 21ms/step - loss: 3.0480 - val_loss: 2.8386 Epoch 15/200 13/13 [==============================] - 0s 22ms/step - loss: 2.9055 - val_loss: 2.7103 Epoch 16/200 13/13 [==============================] - 0s 21ms/step - loss: 2.7755 - val_loss: 2.5841 Epoch 17/200 13/13 [==============================] - 0s 21ms/step - loss: 2.6487 - val_loss: 2.4725 Epoch 18/200 13/13 [==============================] - 0s 22ms/step - loss: 2.5356 - val_loss: 2.3677 Epoch 19/200 13/13 [==============================] - 0s 21ms/step - loss: 2.4291 - val_loss: 2.2671 Epoch 20/200 13/13 [==============================] - 0s 21ms/step - loss: 2.3277 - val_loss: 2.1694 Epoch 21/200 13/13 [==============================] - 0s 21ms/step - loss: 2.2279 - val_loss: 2.0815 Epoch 22/200 13/13 [==============================] - 0s 21ms/step - loss: 2.1344 - val_loss: 1.9958 Epoch 23/200 13/13 [==============================] - 0s 21ms/step - loss: 2.0468 - val_loss: 1.9114 Epoch 24/200 13/13 [==============================] - 0s 21ms/step - loss: 1.9583 - val_loss: 1.8339 Epoch 25/200 13/13 [==============================] - 0s 20ms/step - loss: 1.8834 - val_loss: 1.7633 Epoch 26/200 13/13 [==============================] - 0s 20ms/step - loss: 1.8112 - val_loss: 1.6936 Epoch 27/200 13/13 [==============================] - 0s 22ms/step - loss: 1.7419 - val_loss: 1.6274 Epoch 28/200 13/13 [==============================] - 0s 22ms/step - loss: 1.6679 - val_loss: 1.5560 Epoch 29/200 13/13 [==============================] - 0s 21ms/step - loss: 1.6039 - val_loss: 1.5005 Epoch 30/200 13/13 [==============================] - 0s 22ms/step - loss: 1.5417 - val_loss: 1.4452 Epoch 31/200 13/13 [==============================] - 0s 21ms/step - loss: 1.4841 - val_loss: 1.3938 Epoch 32/200 13/13 [==============================] - 0s 21ms/step - loss: 1.4329 - val_loss: 1.3472 Epoch 33/200 13/13 [==============================] - 0s 21ms/step - loss: 1.3836 - val_loss: 1.3002 Epoch 34/200 13/13 [==============================] - 0s 21ms/step - loss: 1.3383 - val_loss: 1.2706 Epoch 35/200 13/13 [==============================] - 0s 22ms/step - loss: 1.2992 - val_loss: 1.2186 Epoch 36/200 13/13 [==============================] - 0s 21ms/step - loss: 1.2589 - val_loss: 1.1931 Epoch 37/200 13/13 [==============================] - 0s 21ms/step - loss: 1.2269 - val_loss: 1.1577 Epoch 38/200 13/13 [==============================] - 0s 20ms/step - loss: 1.1932 - val_loss: 1.1192 Epoch 39/200 13/13 [==============================] - 0s 21ms/step - loss: 1.1488 - val_loss: 1.0826 Epoch 40/200 13/13 [==============================] - 0s 22ms/step - loss: 1.1117 - val_loss: 1.0494 Epoch 41/200 13/13 [==============================] - 0s 21ms/step - loss: 1.0806 - val_loss: 1.0460 Epoch 42/200 13/13 [==============================] - 0s 21ms/step - loss: 1.0598 - val_loss: 0.9967 Epoch 43/200 13/13 [==============================] - 0s 21ms/step - loss: 1.0221 - val_loss: 0.9628 Epoch 44/200 13/13 [==============================] - 0s 22ms/step - loss: 0.9921 - val_loss: 0.9402 Epoch 45/200 13/13 [==============================] - 0s 21ms/step - loss: 0.9681 - val_loss: 0.9144 Epoch 46/200 13/13 [==============================] - 0s 21ms/step - loss: 0.9449 - val_loss: 0.8868 Epoch 47/200 13/13 [==============================] - 0s 22ms/step - loss: 0.9225 - val_loss: 0.8636 Epoch 48/200 13/13 [==============================] - 0s 21ms/step - loss: 0.8961 - val_loss: 0.8439 Epoch 49/200 13/13 [==============================] - 0s 21ms/step - loss: 0.8736 - val_loss: 0.8218 Epoch 50/200 13/13 [==============================] - 0s 21ms/step - loss: 0.8530 - val_loss: 0.8030 Epoch 51/200 13/13 [==============================] - 0s 22ms/step - loss: 0.8334 - val_loss: 0.7832 Epoch 52/200 13/13 [==============================] - 0s 21ms/step - loss: 0.8146 - val_loss: 0.7644 Epoch 53/200 13/13 [==============================] - 0s 21ms/step - loss: 0.7960 - val_loss: 0.7471 Epoch 54/200 13/13 [==============================] - 0s 22ms/step - loss: 0.7794 - val_loss: 0.7319 Epoch 55/200 13/13 [==============================] - 0s 21ms/step - loss: 0.7636 - val_loss: 0.7152 Epoch 56/200 13/13 [==============================] - 0s 21ms/step - loss: 0.7461 - val_loss: 0.6977 Epoch 57/200 13/13 [==============================] - 0s 22ms/step - loss: 0.7308 - val_loss: 0.6817 Epoch 58/200 13/13 [==============================] - 0s 21ms/step - loss: 0.7146 - val_loss: 0.6695 Epoch 59/200 13/13 [==============================] - 0s 22ms/step - loss: 0.6994 - val_loss: 0.6533 Epoch 60/200 13/13 [==============================] - 0s 21ms/step - loss: 0.6846 - val_loss: 0.6394 Epoch 61/200 13/13 [==============================] - 0s 21ms/step - loss: 0.6718 - val_loss: 0.6265 Epoch 62/200 13/13 [==============================] - 0s 21ms/step - loss: 0.6709 - val_loss: 0.6453 Epoch 63/200 13/13 [==============================] - 0s 22ms/step - loss: 0.6669 - val_loss: 0.6207 Epoch 64/200 13/13 [==============================] - 0s 22ms/step - loss: 0.6454 - val_loss: 0.6032 Epoch 65/200 13/13 [==============================] - 0s 22ms/step - loss: 0.6255 - val_loss: 0.5779 Epoch 66/200 13/13 [==============================] - 0s 21ms/step - loss: 0.6089 - val_loss: 0.5653 Epoch 67/200 13/13 [==============================] - 0s 22ms/step - loss: 0.5941 - val_loss: 0.5516 Epoch 68/200 13/13 [==============================] - 0s 22ms/step - loss: 0.5826 - val_loss: 0.5437 Epoch 69/200 13/13 [==============================] - 0s 22ms/step - loss: 0.5723 - val_loss: 0.5292 Epoch 70/200 13/13 [==============================] - 0s 21ms/step - loss: 0.5612 - val_loss: 0.5218 Epoch 71/200 13/13 [==============================] - 0s 22ms/step - loss: 0.5512 - val_loss: 0.5105 Epoch 72/200 13/13 [==============================] - 0s 21ms/step - loss: 0.5411 - val_loss: 0.4980 Epoch 73/200 13/13 [==============================] - 0s 22ms/step - loss: 0.5303 - val_loss: 0.4903 Epoch 74/200 13/13 [==============================] - 0s 22ms/step - loss: 0.5214 - val_loss: 0.4836 Epoch 75/200 13/13 [==============================] - 0s 21ms/step - loss: 0.5113 - val_loss: 0.4730 Epoch 76/200 13/13 [==============================] - 0s 21ms/step - loss: 0.5035 - val_loss: 0.4624 Epoch 77/200 13/13 [==============================] - 0s 22ms/step - loss: 0.4941 - val_loss: 0.4591 Epoch 78/200 13/13 [==============================] - 0s 21ms/step - loss: 0.4884 - val_loss: 0.4475 Epoch 79/200 13/13 [==============================] - 0s 21ms/step - loss: 0.4790 - val_loss: 0.4390 Epoch 80/200 13/13 [==============================] - 0s 21ms/step - loss: 0.4717 - val_loss: 0.4318 Epoch 81/200 13/13 [==============================] - 0s 22ms/step - loss: 0.4665 - val_loss: 0.4304 Epoch 82/200 13/13 [==============================] - 0s 22ms/step - loss: 0.4596 - val_loss: 0.4203 Epoch 83/200 13/13 [==============================] - 0s 21ms/step - loss: 0.4524 - val_loss: 0.4153 Epoch 84/200 13/13 [==============================] - 0s 22ms/step - loss: 0.4450 - val_loss: 0.4109 Epoch 85/200 13/13 [==============================] - 0s 22ms/step - loss: 0.4403 - val_loss: 0.4033 Epoch 86/200 13/13 [==============================] - 0s 22ms/step - loss: 0.4331 - val_loss: 0.3981 Epoch 87/200 13/13 [==============================] - 0s 22ms/step - loss: 0.4300 - val_loss: 0.3897 Epoch 88/200 13/13 [==============================] - 0s 22ms/step - loss: 0.4226 - val_loss: 0.3849 Epoch 89/200 13/13 [==============================] - 0s 21ms/step - loss: 0.4224 - val_loss: 0.3878 Epoch 90/200 13/13 [==============================] - 0s 22ms/step - loss: 0.4142 - val_loss: 0.3783 Epoch 91/200 13/13 [==============================] - 0s 21ms/step - loss: 0.4082 - val_loss: 0.3726 Epoch 92/200 13/13 [==============================] - 0s 22ms/step - loss: 0.4021 - val_loss: 0.3662 Epoch 93/200 13/13 [==============================] - 0s 21ms/step - loss: 0.3977 - val_loss: 0.3651 Epoch 94/200 13/13 [==============================] - 0s 22ms/step - loss: 0.3950 - val_loss: 0.3584 Epoch 95/200 13/13 [==============================] - 0s 22ms/step - loss: 0.3912 - val_loss: 0.3625 Epoch 96/200 13/13 [==============================] - 0s 21ms/step - loss: 0.3902 - val_loss: 0.3566 Epoch 97/200 13/13 [==============================] - 0s 23ms/step - loss: 0.3851 - val_loss: 0.3482 Epoch 98/200 13/13 [==============================] - 0s 21ms/step - loss: 0.3807 - val_loss: 0.3426 Epoch 99/200 13/13 [==============================] - 0s 21ms/step - loss: 0.3753 - val_loss: 0.3404 Epoch 100/200 13/13 [==============================] - 0s 22ms/step - loss: 0.3713 - val_loss: 0.3372 Epoch 101/200 13/13 [==============================] - 0s 22ms/step - loss: 0.3684 - val_loss: 0.3354 Epoch 102/200 13/13 [==============================] - 0s 21ms/step - loss: 0.3654 - val_loss: 0.3359 Epoch 103/200 13/13 [==============================] - 0s 22ms/step - loss: 0.3635 - val_loss: 0.3283 Epoch 104/200 13/13 [==============================] - 0s 22ms/step - loss: 0.3591 - val_loss: 0.3273 Epoch 105/200 13/13 [==============================] - 0s 22ms/step - loss: 0.3555 - val_loss: 0.3252 Epoch 106/200 13/13 [==============================] - 0s 22ms/step - loss: 0.3534 - val_loss: 0.3231 Epoch 107/200 13/13 [==============================] - 0s 22ms/step - loss: 0.3546 - val_loss: 0.3178 Epoch 108/200 13/13 [==============================] - 0s 21ms/step - loss: 0.3509 - val_loss: 0.3215 Epoch 109/200 13/13 [==============================] - 0s 21ms/step - loss: 0.3491 - val_loss: 0.3164 Epoch 110/200 13/13 [==============================] - 0s 22ms/step - loss: 0.3488 - val_loss: 0.3162 Epoch 111/200 13/13 [==============================] - 0s 22ms/step - loss: 0.3447 - val_loss: 0.3146 Epoch 112/200 13/13 [==============================] - 0s 22ms/step - loss: 0.3416 - val_loss: 0.3125 Epoch 113/200 13/13 [==============================] - 0s 22ms/step - loss: 0.3387 - val_loss: 0.3090 Epoch 114/200 13/13 [==============================] - 0s 21ms/step - loss: 0.3370 - val_loss: 0.3074 Epoch 115/200 13/13 [==============================] - 0s 21ms/step - loss: 0.3392 - val_loss: 0.3055 Epoch 116/200 13/13 [==============================] - 0s 22ms/step - loss: 0.3345 - val_loss: 0.3010 Epoch 117/200 13/13 [==============================] - 0s 22ms/step - loss: 0.3313 - val_loss: 0.2982 Epoch 118/200 13/13 [==============================] - 0s 22ms/step - loss: 0.3261 - val_loss: 0.2977 Epoch 119/200 13/13 [==============================] - 0s 21ms/step - loss: 0.3272 - val_loss: 0.2946 Epoch 120/200 13/13 [==============================] - 0s 22ms/step - loss: 0.3283 - val_loss: 0.3069 Epoch 121/200 13/13 [==============================] - 0s 23ms/step - loss: 0.3288 - val_loss: 0.2938 Epoch 122/200 13/13 [==============================] - 0s 21ms/step - loss: 0.3276 - val_loss: 0.2988 Epoch 123/200 13/13 [==============================] - 0s 22ms/step - loss: 0.3400 - val_loss: 0.3082 Epoch 124/200 13/13 [==============================] - 0s 22ms/step - loss: 0.3473 - val_loss: 0.2992 Epoch 125/200 13/13 [==============================] - 0s 23ms/step - loss: 0.3329 - val_loss: 0.2986 Epoch 126/200 13/13 [==============================] - 0s 22ms/step - loss: 0.3312 - val_loss: 0.3060 Epoch 127/200 13/13 [==============================] - 0s 22ms/step - loss: 0.3244 - val_loss: 0.2939 Epoch 128/200 13/13 [==============================] - 0s 22ms/step - loss: 0.3148 - val_loss: 0.2846 Epoch 129/200 13/13 [==============================] - 0s 22ms/step - loss: 0.3164 - val_loss: 0.2820 Epoch 130/200 13/13 [==============================] - 0s 22ms/step - loss: 0.3091 - val_loss: 0.2803 Epoch 131/200 13/13 [==============================] - 0s 22ms/step - loss: 0.3068 - val_loss: 0.2817 Epoch 132/200 13/13 [==============================] - 0s 22ms/step - loss: 0.3044 - val_loss: 0.2757 Epoch 133/200 13/13 [==============================] - 0s 22ms/step - loss: 0.3013 - val_loss: 0.2814 Epoch 134/200 13/13 [==============================] - 0s 22ms/step - loss: 0.3005 - val_loss: 0.2779 Epoch 135/200 13/13 [==============================] - 0s 22ms/step - loss: 0.3011 - val_loss: 0.2725 Epoch 136/200 13/13 [==============================] - 0s 22ms/step - loss: 0.3007 - val_loss: 0.2902 Epoch 137/200 13/13 [==============================] - 0s 23ms/step - loss: 0.3004 - val_loss: 0.2746 Epoch 138/200 13/13 [==============================] - 0s 22ms/step - loss: 0.2950 - val_loss: 0.2819 Epoch 139/200 13/13 [==============================] - 0s 22ms/step - loss: 0.2981 - val_loss: 0.2773 Epoch 140/200 13/13 [==============================] - 0s 21ms/step - loss: 0.2958 - val_loss: 0.2661 Epoch 141/200 13/13 [==============================] - 0s 22ms/step - loss: 0.2924 - val_loss: 0.2684 Epoch 142/200 13/13 [==============================] - 0s 21ms/step - loss: 0.2886 - val_loss: 0.2646 Epoch 143/200 13/13 [==============================] - 0s 22ms/step - loss: 0.2925 - val_loss: 0.2685 Epoch 144/200 13/13 [==============================] - 0s 22ms/step - loss: 0.2895 - val_loss: 0.2642 Epoch 145/200 13/13 [==============================] - 0s 22ms/step - loss: 0.2895 - val_loss: 0.2633 Epoch 146/200 13/13 [==============================] - 0s 22ms/step - loss: 0.2846 - val_loss: 0.2630 Epoch 147/200 13/13 [==============================] - 0s 22ms/step - loss: 0.2855 - val_loss: 0.2621 Epoch 148/200 13/13 [==============================] - 0s 21ms/step - loss: 0.2837 - val_loss: 0.2663 Epoch 149/200 13/13 [==============================] - 0s 22ms/step - loss: 0.2841 - val_loss: 0.2590 Epoch 150/200 13/13 [==============================] - 0s 24ms/step - loss: 0.2799 - val_loss: 0.2627 Epoch 151/200 13/13 [==============================] - 0s 21ms/step - loss: 0.2801 - val_loss: 0.2555 Epoch 152/200 13/13 [==============================] - 0s 21ms/step - loss: 0.2800 - val_loss: 0.2576 Epoch 153/200 13/13 [==============================] - 0s 22ms/step - loss: 0.2755 - val_loss: 0.2527 Epoch 154/200 13/13 [==============================] - 0s 22ms/step - loss: 0.2756 - val_loss: 0.2580 Epoch 155/200 13/13 [==============================] - 0s 22ms/step - loss: 0.2755 - val_loss: 0.2583 Epoch 156/200 13/13 [==============================] - 0s 23ms/step - loss: 0.2795 - val_loss: 0.2635 Epoch 157/200 13/13 [==============================] - 0s 22ms/step - loss: 0.2753 - val_loss: 0.2565 Epoch 158/200 13/13 [==============================] - 0s 22ms/step - loss: 0.2745 - val_loss: 0.2496 Epoch 159/200 13/13 [==============================] - 0s 22ms/step - loss: 0.2727 - val_loss: 0.2477 Epoch 160/200 13/13 [==============================] - 0s 22ms/step - loss: 0.2706 - val_loss: 0.2486 Epoch 161/200 13/13 [==============================] - 0s 22ms/step - loss: 0.2687 - val_loss: 0.2473 Epoch 162/200 13/13 [==============================] - 0s 22ms/step - loss: 0.2662 - val_loss: 0.2492 Epoch 163/200 13/13 [==============================] - 0s 23ms/step - loss: 0.2690 - val_loss: 0.2437 Epoch 164/200 13/13 [==============================] - 0s 21ms/step - loss: 0.2654 - val_loss: 0.2465 Epoch 165/200 13/13 [==============================] - 0s 23ms/step - loss: 0.2631 - val_loss: 0.2470 Epoch 166/200 13/13 [==============================] - 0s 22ms/step - loss: 0.2611 - val_loss: 0.2503 Epoch 167/200 13/13 [==============================] - 0s 22ms/step - loss: 0.2626 - val_loss: 0.2407 Epoch 168/200 13/13 [==============================] - 0s 23ms/step - loss: 0.2587 - val_loss: 0.2403 Epoch 169/200 13/13 [==============================] - 0s 21ms/step - loss: 0.2622 - val_loss: 0.2422 Epoch 170/200 13/13 [==============================] - 0s 22ms/step - loss: 0.2614 - val_loss: 0.2423 Epoch 171/200 13/13 [==============================] - 0s 21ms/step - loss: 0.2564 - val_loss: 0.2374 Epoch 172/200 13/13 [==============================] - 0s 21ms/step - loss: 0.2555 - val_loss: 0.2367 Epoch 173/200 13/13 [==============================] - 0s 22ms/step - loss: 0.2556 - val_loss: 0.2382 Epoch 174/200 13/13 [==============================] - 0s 22ms/step - loss: 0.2529 - val_loss: 0.2345 Epoch 175/200 13/13 [==============================] - 0s 22ms/step - loss: 0.2507 - val_loss: 0.2344 Epoch 176/200 13/13 [==============================] - 0s 22ms/step - loss: 0.2515 - val_loss: 0.2365 Epoch 177/200 13/13 [==============================] - 0s 22ms/step - loss: 0.2518 - val_loss: 0.2325 Epoch 178/200 13/13 [==============================] - 0s 22ms/step - loss: 0.2506 - val_loss: 0.2358 Epoch 179/200 13/13 [==============================] - 0s 22ms/step - loss: 0.2502 - val_loss: 0.2320 Epoch 180/200 13/13 [==============================] - 0s 21ms/step - loss: 0.2495 - val_loss: 0.2326 Epoch 181/200 13/13 [==============================] - 0s 22ms/step - loss: 0.2481 - val_loss: 0.2307 Epoch 182/200 13/13 [==============================] - 0s 21ms/step - loss: 0.2494 - val_loss: 0.2369 Epoch 183/200 13/13 [==============================] - 0s 21ms/step - loss: 0.2538 - val_loss: 0.2380 Epoch 184/200 13/13 [==============================] - 0s 22ms/step - loss: 0.2536 - val_loss: 0.2437 Epoch 185/200 13/13 [==============================] - 0s 21ms/step - loss: 0.2480 - val_loss: 0.2321 Epoch 186/200 13/13 [==============================] - 0s 22ms/step - loss: 0.2449 - val_loss: 0.2300 Epoch 187/200 13/13 [==============================] - 0s 21ms/step - loss: 0.2446 - val_loss: 0.2235 Epoch 188/200 13/13 [==============================] - 0s 21ms/step - loss: 0.2443 - val_loss: 0.2266 Epoch 189/200 13/13 [==============================] - 0s 22ms/step - loss: 0.2460 - val_loss: 0.2253 Epoch 190/200 13/13 [==============================] - 0s 21ms/step - loss: 0.2422 - val_loss: 0.2264 Epoch 191/200 13/13 [==============================] - 0s 21ms/step - loss: 0.2425 - val_loss: 0.2252 Epoch 192/200 13/13 [==============================] - 0s 22ms/step - loss: 0.2405 - val_loss: 0.2254 Epoch 193/200 13/13 [==============================] - 0s 22ms/step - loss: 0.2428 - val_loss: 0.2235 Epoch 194/200 13/13 [==============================] - 0s 22ms/step - loss: 0.2366 - val_loss: 0.2194 Epoch 195/200 13/13 [==============================] - 0s 21ms/step - loss: 0.2347 - val_loss: 0.2191 Epoch 196/200 13/13 [==============================] - 0s 22ms/step - loss: 0.2374 - val_loss: 0.2205 Epoch 197/200 13/13 [==============================] - 0s 21ms/step - loss: 0.2398 - val_loss: 0.2220 Epoch 198/200 13/13 [==============================] - 0s 22ms/step - loss: 0.2368 - val_loss: 0.2193 Epoch 199/200 13/13 [==============================] - 0s 22ms/step - loss: 0.2395 - val_loss: 0.2234 Epoch 200/200 13/13 [==============================] - 0s 22ms/step - loss: 0.2342 - val_loss: 0.2161
Lets plot the validation loss against training lost to determine if the model is undertrained or overtrained.
plt.plot(history.history["val_loss"], label="val")
plt.plot(history.history["loss"], label="train")
plt.legend()
<matplotlib.legend.Legend at 0x7f9fae89b358>
After training the model, we can give it a test instance and see how it performs compared to the actual result
y_pred = model.predict(X_test)
plt.figure(figsize=(10,8))
for i in range(5):
plt.subplot(5,1,i+1)
plt.xticks([])
plt.plot(date_list_hr[:n_steps],X_test[i,:,0], "b.-")
plt.plot(date_list_hr[n_steps: n_steps+pred_steps],y_test[i,:,0],"r.-", label="real")
plt.plot(date_list_hr[n_steps: n_steps+pred_steps],y_pred[i,:], "g.-", label="pred")
plt.legend()
plt.xticks(np.arange(0, n_steps+pred_steps, step=12), rotation=90)
plt.show()
np.random.seed(42)
tf.random.set_seed(42)
#Calculate mse for last point in pred
def last_time_step_mse(Y_true, Y_pred):
return keras.metrics.mean_squared_error(Y_true[:, -1], Y_pred[:, -1])
model_conv = keras.models.Sequential([
keras.layers.Conv1D(filters=256, kernel_size=4, strides=2, padding="valid",activation='relu'),
keras.layers.LSTM(20, return_sequences=True),
keras.layers.LSTM(20, return_sequences=True),
keras.layers.Dense(pred_steps),
# keras.layers.Reshape([1,pred_steps])
])
model_conv.compile(loss="mse", optimizer="adam", metrics=[last_time_step_mse])
#3::2 because stride is 2, and we ignore first three value because first output of conv layer is based on input time 0-3
history = model_conv.fit(X_train, y_train_2[:, 3::2], epochs=200,
validation_data=(X_valid, y_valid_2[:, 3::2]))
Epoch 1/200 13/13 [==============================] - 1s 68ms/step - loss: 7.5627 - last_time_step_mse: 7.2081 - val_loss: 6.8268 - val_last_time_step_mse: 6.3753 Epoch 2/200 13/13 [==============================] - 0s 17ms/step - loss: 7.0849 - last_time_step_mse: 6.7136 - val_loss: 6.4985 - val_last_time_step_mse: 6.1146 Epoch 3/200 13/13 [==============================] - 0s 16ms/step - loss: 6.7129 - last_time_step_mse: 6.3394 - val_loss: 6.0371 - val_last_time_step_mse: 5.5281 Epoch 4/200 13/13 [==============================] - 0s 15ms/step - loss: 6.2240 - last_time_step_mse: 5.7208 - val_loss: 5.5996 - val_last_time_step_mse: 5.0934 Epoch 5/200 13/13 [==============================] - 0s 17ms/step - loss: 5.8011 - last_time_step_mse: 5.3325 - val_loss: 5.2449 - val_last_time_step_mse: 4.7877 Epoch 6/200 13/13 [==============================] - 0s 16ms/step - loss: 5.4644 - last_time_step_mse: 5.0261 - val_loss: 4.9699 - val_last_time_step_mse: 4.5468 Epoch 7/200 13/13 [==============================] - 0s 16ms/step - loss: 5.2056 - last_time_step_mse: 4.7987 - val_loss: 4.7591 - val_last_time_step_mse: 4.3613 Epoch 8/200 13/13 [==============================] - 0s 15ms/step - loss: 4.9945 - last_time_step_mse: 4.6071 - val_loss: 4.5762 - val_last_time_step_mse: 4.1948 Epoch 9/200 13/13 [==============================] - 0s 16ms/step - loss: 4.8074 - last_time_step_mse: 4.4086 - val_loss: 4.4006 - val_last_time_step_mse: 4.0067 Epoch 10/200 13/13 [==============================] - 0s 16ms/step - loss: 4.6156 - last_time_step_mse: 4.2215 - val_loss: 4.2249 - val_last_time_step_mse: 3.8100 Epoch 11/200 13/13 [==============================] - 0s 16ms/step - loss: 4.4118 - last_time_step_mse: 4.0204 - val_loss: 4.0376 - val_last_time_step_mse: 3.6344 Epoch 12/200 13/13 [==============================] - 0s 17ms/step - loss: 4.2242 - last_time_step_mse: 3.8224 - val_loss: 3.8669 - val_last_time_step_mse: 3.4715 Epoch 13/200 13/13 [==============================] - 0s 16ms/step - loss: 4.0429 - last_time_step_mse: 3.6356 - val_loss: 3.6997 - val_last_time_step_mse: 3.3251 Epoch 14/200 13/13 [==============================] - 0s 16ms/step - loss: 3.8605 - last_time_step_mse: 3.4461 - val_loss: 3.5391 - val_last_time_step_mse: 3.1747 Epoch 15/200 13/13 [==============================] - 0s 15ms/step - loss: 3.6856 - last_time_step_mse: 3.2706 - val_loss: 3.3749 - val_last_time_step_mse: 2.9904 Epoch 16/200 13/13 [==============================] - 0s 16ms/step - loss: 3.5254 - last_time_step_mse: 3.0905 - val_loss: 3.2274 - val_last_time_step_mse: 2.8249 Epoch 17/200 13/13 [==============================] - 0s 15ms/step - loss: 3.3706 - last_time_step_mse: 2.9261 - val_loss: 3.0832 - val_last_time_step_mse: 2.6892 Epoch 18/200 13/13 [==============================] - 0s 16ms/step - loss: 3.2248 - last_time_step_mse: 2.7742 - val_loss: 2.9522 - val_last_time_step_mse: 2.5382 Epoch 19/200 13/13 [==============================] - 0s 15ms/step - loss: 3.0847 - last_time_step_mse: 2.6133 - val_loss: 2.8294 - val_last_time_step_mse: 2.3727 Epoch 20/200 13/13 [==============================] - 0s 16ms/step - loss: 2.9780 - last_time_step_mse: 2.4798 - val_loss: 2.7122 - val_last_time_step_mse: 2.2517 Epoch 21/200 13/13 [==============================] - 0s 17ms/step - loss: 2.8294 - last_time_step_mse: 2.3475 - val_loss: 2.5942 - val_last_time_step_mse: 2.1634 Epoch 22/200 13/13 [==============================] - 0s 15ms/step - loss: 2.7134 - last_time_step_mse: 2.2276 - val_loss: 2.4928 - val_last_time_step_mse: 2.0575 Epoch 23/200 13/13 [==============================] - 0s 17ms/step - loss: 2.6103 - last_time_step_mse: 2.1084 - val_loss: 2.3871 - val_last_time_step_mse: 1.9398 Epoch 24/200 13/13 [==============================] - 0s 16ms/step - loss: 2.5013 - last_time_step_mse: 1.9942 - val_loss: 2.2939 - val_last_time_step_mse: 1.8435 Epoch 25/200 13/13 [==============================] - 0s 16ms/step - loss: 2.4086 - last_time_step_mse: 1.9042 - val_loss: 2.2163 - val_last_time_step_mse: 1.7756 Epoch 26/200 13/13 [==============================] - 0s 16ms/step - loss: 2.3450 - last_time_step_mse: 1.8223 - val_loss: 2.1412 - val_last_time_step_mse: 1.6826 Epoch 27/200 13/13 [==============================] - 0s 16ms/step - loss: 2.2693 - last_time_step_mse: 1.7501 - val_loss: 2.0680 - val_last_time_step_mse: 1.6267 Epoch 28/200 13/13 [==============================] - 0s 16ms/step - loss: 2.1804 - last_time_step_mse: 1.6788 - val_loss: 1.9995 - val_last_time_step_mse: 1.5758 Epoch 29/200 13/13 [==============================] - 0s 16ms/step - loss: 2.1183 - last_time_step_mse: 1.6163 - val_loss: 1.9442 - val_last_time_step_mse: 1.5247 Epoch 30/200 13/13 [==============================] - 0s 16ms/step - loss: 2.0607 - last_time_step_mse: 1.5688 - val_loss: 1.9156 - val_last_time_step_mse: 1.4910 Epoch 31/200 13/13 [==============================] - 0s 17ms/step - loss: 2.0593 - last_time_step_mse: 1.5345 - val_loss: 1.8576 - val_last_time_step_mse: 1.4276 Epoch 32/200 13/13 [==============================] - 0s 17ms/step - loss: 1.9745 - last_time_step_mse: 1.4910 - val_loss: 1.8163 - val_last_time_step_mse: 1.4093 Epoch 33/200 13/13 [==============================] - 0s 16ms/step - loss: 1.9251 - last_time_step_mse: 1.4392 - val_loss: 1.7661 - val_last_time_step_mse: 1.3883 Epoch 34/200 13/13 [==============================] - 0s 16ms/step - loss: 1.8889 - last_time_step_mse: 1.4059 - val_loss: 1.7271 - val_last_time_step_mse: 1.3395 Epoch 35/200 13/13 [==============================] - 0s 16ms/step - loss: 1.8574 - last_time_step_mse: 1.3784 - val_loss: 1.6995 - val_last_time_step_mse: 1.2990 Epoch 36/200 13/13 [==============================] - 0s 16ms/step - loss: 1.8173 - last_time_step_mse: 1.3530 - val_loss: 1.6754 - val_last_time_step_mse: 1.2960 Epoch 37/200 13/13 [==============================] - 0s 16ms/step - loss: 1.7956 - last_time_step_mse: 1.3247 - val_loss: 1.6493 - val_last_time_step_mse: 1.2636 Epoch 38/200 13/13 [==============================] - 0s 17ms/step - loss: 1.7608 - last_time_step_mse: 1.2994 - val_loss: 1.6192 - val_last_time_step_mse: 1.2394 Epoch 39/200 13/13 [==============================] - 0s 16ms/step - loss: 1.7317 - last_time_step_mse: 1.2777 - val_loss: 1.5996 - val_last_time_step_mse: 1.2098 Epoch 40/200 13/13 [==============================] - 0s 16ms/step - loss: 1.7111 - last_time_step_mse: 1.2590 - val_loss: 1.5689 - val_last_time_step_mse: 1.1935 Epoch 41/200 13/13 [==============================] - 0s 16ms/step - loss: 1.7179 - last_time_step_mse: 1.2387 - val_loss: 1.5512 - val_last_time_step_mse: 1.1938 Epoch 42/200 13/13 [==============================] - 0s 16ms/step - loss: 1.6890 - last_time_step_mse: 1.2250 - val_loss: 1.5377 - val_last_time_step_mse: 1.1717 Epoch 43/200 13/13 [==============================] - 0s 16ms/step - loss: 1.6626 - last_time_step_mse: 1.2103 - val_loss: 1.5739 - val_last_time_step_mse: 1.1515 Epoch 44/200 13/13 [==============================] - 0s 17ms/step - loss: 1.6435 - last_time_step_mse: 1.1907 - val_loss: 1.4829 - val_last_time_step_mse: 1.1366 Epoch 45/200 13/13 [==============================] - 0s 16ms/step - loss: 1.6034 - last_time_step_mse: 1.1655 - val_loss: 1.4672 - val_last_time_step_mse: 1.1245 Epoch 46/200 13/13 [==============================] - 0s 16ms/step - loss: 1.5902 - last_time_step_mse: 1.1584 - val_loss: 1.4521 - val_last_time_step_mse: 1.0994 Epoch 47/200 13/13 [==============================] - 0s 17ms/step - loss: 1.5672 - last_time_step_mse: 1.1303 - val_loss: 1.4419 - val_last_time_step_mse: 1.0831 Epoch 48/200 13/13 [==============================] - 0s 16ms/step - loss: 1.5495 - last_time_step_mse: 1.1144 - val_loss: 1.4122 - val_last_time_step_mse: 1.0639 Epoch 49/200 13/13 [==============================] - 0s 16ms/step - loss: 1.5226 - last_time_step_mse: 1.0956 - val_loss: 1.3909 - val_last_time_step_mse: 1.0510 Epoch 50/200 13/13 [==============================] - 0s 16ms/step - loss: 1.5087 - last_time_step_mse: 1.0822 - val_loss: 1.3815 - val_last_time_step_mse: 1.0281 Epoch 51/200 13/13 [==============================] - 0s 17ms/step - loss: 1.5026 - last_time_step_mse: 1.0659 - val_loss: 1.4001 - val_last_time_step_mse: 1.0121 Epoch 52/200 13/13 [==============================] - 0s 16ms/step - loss: 1.4759 - last_time_step_mse: 1.0578 - val_loss: 1.3413 - val_last_time_step_mse: 1.0079 Epoch 53/200 13/13 [==============================] - 0s 16ms/step - loss: 1.4710 - last_time_step_mse: 1.0387 - val_loss: 1.3589 - val_last_time_step_mse: 0.9841 Epoch 54/200 13/13 [==============================] - 0s 16ms/step - loss: 1.4411 - last_time_step_mse: 1.0229 - val_loss: 1.3082 - val_last_time_step_mse: 0.9696 Epoch 55/200 13/13 [==============================] - 0s 16ms/step - loss: 1.4320 - last_time_step_mse: 1.0154 - val_loss: 1.2975 - val_last_time_step_mse: 0.9629 Epoch 56/200 13/13 [==============================] - 0s 16ms/step - loss: 1.4070 - last_time_step_mse: 0.9961 - val_loss: 1.2993 - val_last_time_step_mse: 0.9461 Epoch 57/200 13/13 [==============================] - 0s 16ms/step - loss: 1.3929 - last_time_step_mse: 0.9825 - val_loss: 1.2649 - val_last_time_step_mse: 0.9371 Epoch 58/200 13/13 [==============================] - 0s 16ms/step - loss: 1.3805 - last_time_step_mse: 0.9694 - val_loss: 1.2497 - val_last_time_step_mse: 0.9222 Epoch 59/200 13/13 [==============================] - 0s 16ms/step - loss: 1.3604 - last_time_step_mse: 0.9563 - val_loss: 1.2406 - val_last_time_step_mse: 0.9129 Epoch 60/200 13/13 [==============================] - 0s 16ms/step - loss: 1.3447 - last_time_step_mse: 0.9412 - val_loss: 1.2188 - val_last_time_step_mse: 0.8917 Epoch 61/200 13/13 [==============================] - 0s 16ms/step - loss: 1.3173 - last_time_step_mse: 0.9261 - val_loss: 1.2003 - val_last_time_step_mse: 0.8751 Epoch 62/200 13/13 [==============================] - 0s 15ms/step - loss: 1.3032 - last_time_step_mse: 0.9125 - val_loss: 1.1842 - val_last_time_step_mse: 0.8684 Epoch 63/200 13/13 [==============================] - 0s 16ms/step - loss: 1.2964 - last_time_step_mse: 0.8981 - val_loss: 1.2029 - val_last_time_step_mse: 0.8598 Epoch 64/200 13/13 [==============================] - 0s 16ms/step - loss: 1.2868 - last_time_step_mse: 0.8935 - val_loss: 1.1618 - val_last_time_step_mse: 0.8626 Epoch 65/200 13/13 [==============================] - 0s 16ms/step - loss: 1.2704 - last_time_step_mse: 0.8818 - val_loss: 1.1474 - val_last_time_step_mse: 0.8343 Epoch 66/200 13/13 [==============================] - 0s 16ms/step - loss: 1.2528 - last_time_step_mse: 0.8710 - val_loss: 1.1403 - val_last_time_step_mse: 0.8264 Epoch 67/200 13/13 [==============================] - 0s 16ms/step - loss: 1.2372 - last_time_step_mse: 0.8602 - val_loss: 1.1443 - val_last_time_step_mse: 0.8254 Epoch 68/200 13/13 [==============================] - 0s 17ms/step - loss: 1.2295 - last_time_step_mse: 0.8501 - val_loss: 1.1191 - val_last_time_step_mse: 0.8316 Epoch 69/200 13/13 [==============================] - 0s 16ms/step - loss: 1.2135 - last_time_step_mse: 0.8459 - val_loss: 1.1055 - val_last_time_step_mse: 0.7991 Epoch 70/200 13/13 [==============================] - 0s 15ms/step - loss: 1.2004 - last_time_step_mse: 0.8267 - val_loss: 1.0870 - val_last_time_step_mse: 0.7845 Epoch 71/200 13/13 [==============================] - 0s 16ms/step - loss: 1.1904 - last_time_step_mse: 0.8177 - val_loss: 1.0790 - val_last_time_step_mse: 0.7912 Epoch 72/200 13/13 [==============================] - 0s 17ms/step - loss: 1.1744 - last_time_step_mse: 0.8112 - val_loss: 1.0881 - val_last_time_step_mse: 0.7829 Epoch 73/200 13/13 [==============================] - 0s 16ms/step - loss: 1.1660 - last_time_step_mse: 0.8013 - val_loss: 1.0638 - val_last_time_step_mse: 0.7720 Epoch 74/200 13/13 [==============================] - 0s 16ms/step - loss: 1.1594 - last_time_step_mse: 0.7973 - val_loss: 1.0463 - val_last_time_step_mse: 0.7711 Epoch 75/200 13/13 [==============================] - 0s 16ms/step - loss: 1.1447 - last_time_step_mse: 0.7893 - val_loss: 1.0368 - val_last_time_step_mse: 0.7619 Epoch 76/200 13/13 [==============================] - 0s 15ms/step - loss: 1.1369 - last_time_step_mse: 0.7831 - val_loss: 1.0295 - val_last_time_step_mse: 0.7447 Epoch 77/200 13/13 [==============================] - 0s 16ms/step - loss: 1.1254 - last_time_step_mse: 0.7766 - val_loss: 1.0201 - val_last_time_step_mse: 0.7420 Epoch 78/200 13/13 [==============================] - 0s 16ms/step - loss: 1.1093 - last_time_step_mse: 0.7748 - val_loss: 1.0159 - val_last_time_step_mse: 0.7370 Epoch 79/200 13/13 [==============================] - 0s 17ms/step - loss: 1.1238 - last_time_step_mse: 0.7687 - val_loss: 1.0114 - val_last_time_step_mse: 0.7277 Epoch 80/200 13/13 [==============================] - 0s 17ms/step - loss: 1.1142 - last_time_step_mse: 0.7701 - val_loss: 1.0034 - val_last_time_step_mse: 0.7218 Epoch 81/200 13/13 [==============================] - 0s 16ms/step - loss: 1.0882 - last_time_step_mse: 0.7592 - val_loss: 0.9900 - val_last_time_step_mse: 0.7236 Epoch 82/200 13/13 [==============================] - 0s 17ms/step - loss: 1.0816 - last_time_step_mse: 0.7528 - val_loss: 0.9763 - val_last_time_step_mse: 0.7215 Epoch 83/200 13/13 [==============================] - 0s 16ms/step - loss: 1.0809 - last_time_step_mse: 0.7468 - val_loss: 0.9715 - val_last_time_step_mse: 0.7189 Epoch 84/200 13/13 [==============================] - 0s 16ms/step - loss: 1.0720 - last_time_step_mse: 0.7499 - val_loss: 0.9835 - val_last_time_step_mse: 0.7022 Epoch 85/200 13/13 [==============================] - 0s 17ms/step - loss: 1.0643 - last_time_step_mse: 0.7396 - val_loss: 0.9607 - val_last_time_step_mse: 0.7014 Epoch 86/200 13/13 [==============================] - 0s 16ms/step - loss: 1.0553 - last_time_step_mse: 0.7398 - val_loss: 0.9553 - val_last_time_step_mse: 0.7007 Epoch 87/200 13/13 [==============================] - 0s 16ms/step - loss: 1.0451 - last_time_step_mse: 0.7285 - val_loss: 0.9448 - val_last_time_step_mse: 0.6943 Epoch 88/200 13/13 [==============================] - 0s 17ms/step - loss: 1.0497 - last_time_step_mse: 0.7242 - val_loss: 0.9463 - val_last_time_step_mse: 0.6726 Epoch 89/200 13/13 [==============================] - 0s 16ms/step - loss: 1.0349 - last_time_step_mse: 0.7206 - val_loss: 0.9366 - val_last_time_step_mse: 0.6883 Epoch 90/200 13/13 [==============================] - 0s 17ms/step - loss: 1.0317 - last_time_step_mse: 0.7189 - val_loss: 0.9490 - val_last_time_step_mse: 0.6995 Epoch 91/200 13/13 [==============================] - 0s 16ms/step - loss: 1.0284 - last_time_step_mse: 0.7075 - val_loss: 0.9305 - val_last_time_step_mse: 0.6994 Epoch 92/200 13/13 [==============================] - 0s 16ms/step - loss: 1.0238 - last_time_step_mse: 0.7080 - val_loss: 0.9224 - val_last_time_step_mse: 0.6885 Epoch 93/200 13/13 [==============================] - 0s 16ms/step - loss: 1.0226 - last_time_step_mse: 0.7097 - val_loss: 0.9162 - val_last_time_step_mse: 0.6603 Epoch 94/200 13/13 [==============================] - 0s 17ms/step - loss: 1.0131 - last_time_step_mse: 0.7047 - val_loss: 0.9167 - val_last_time_step_mse: 0.6522 Epoch 95/200 13/13 [==============================] - 0s 16ms/step - loss: 1.0033 - last_time_step_mse: 0.6909 - val_loss: 0.9182 - val_last_time_step_mse: 0.6591 Epoch 96/200 13/13 [==============================] - 0s 17ms/step - loss: 1.0128 - last_time_step_mse: 0.6986 - val_loss: 0.9091 - val_last_time_step_mse: 0.6444 Epoch 97/200 13/13 [==============================] - 0s 16ms/step - loss: 1.0021 - last_time_step_mse: 0.6919 - val_loss: 0.9006 - val_last_time_step_mse: 0.6524 Epoch 98/200 13/13 [==============================] - 0s 17ms/step - loss: 0.9988 - last_time_step_mse: 0.6954 - val_loss: 0.9004 - val_last_time_step_mse: 0.6461 Epoch 99/200 13/13 [==============================] - 0s 17ms/step - loss: 0.9932 - last_time_step_mse: 0.6922 - val_loss: 0.8988 - val_last_time_step_mse: 0.6411 Epoch 100/200 13/13 [==============================] - 0s 16ms/step - loss: 0.9874 - last_time_step_mse: 0.6831 - val_loss: 0.8932 - val_last_time_step_mse: 0.6405 Epoch 101/200 13/13 [==============================] - 0s 17ms/step - loss: 0.9853 - last_time_step_mse: 0.6834 - val_loss: 0.8902 - val_last_time_step_mse: 0.6348 Epoch 102/200 13/13 [==============================] - 0s 16ms/step - loss: 0.9921 - last_time_step_mse: 0.6827 - val_loss: 0.8931 - val_last_time_step_mse: 0.6427 Epoch 103/200 13/13 [==============================] - 0s 16ms/step - loss: 0.9818 - last_time_step_mse: 0.6812 - val_loss: 0.8890 - val_last_time_step_mse: 0.6395 Epoch 104/200 13/13 [==============================] - 0s 17ms/step - loss: 0.9850 - last_time_step_mse: 0.6753 - val_loss: 0.8796 - val_last_time_step_mse: 0.6484 Epoch 105/200 13/13 [==============================] - 0s 16ms/step - loss: 0.9751 - last_time_step_mse: 0.6752 - val_loss: 0.8916 - val_last_time_step_mse: 0.6293 Epoch 106/200 13/13 [==============================] - 0s 17ms/step - loss: 0.9721 - last_time_step_mse: 0.6734 - val_loss: 0.8768 - val_last_time_step_mse: 0.6460 Epoch 107/200 13/13 [==============================] - 0s 16ms/step - loss: 0.9691 - last_time_step_mse: 0.6717 - val_loss: 0.8795 - val_last_time_step_mse: 0.6420 Epoch 108/200 13/13 [==============================] - 0s 17ms/step - loss: 0.9749 - last_time_step_mse: 0.6742 - val_loss: 0.8795 - val_last_time_step_mse: 0.6277 Epoch 109/200 13/13 [==============================] - 0s 16ms/step - loss: 0.9736 - last_time_step_mse: 0.6679 - val_loss: 0.8765 - val_last_time_step_mse: 0.6676 Epoch 110/200 13/13 [==============================] - 0s 16ms/step - loss: 0.9731 - last_time_step_mse: 0.6894 - val_loss: 0.8805 - val_last_time_step_mse: 0.6784 Epoch 111/200 13/13 [==============================] - 0s 18ms/step - loss: 0.9712 - last_time_step_mse: 0.6864 - val_loss: 0.8697 - val_last_time_step_mse: 0.6355 Epoch 112/200 13/13 [==============================] - 0s 16ms/step - loss: 0.9627 - last_time_step_mse: 0.6736 - val_loss: 0.8640 - val_last_time_step_mse: 0.6317 Epoch 113/200 13/13 [==============================] - 0s 17ms/step - loss: 0.9552 - last_time_step_mse: 0.6624 - val_loss: 0.8558 - val_last_time_step_mse: 0.6288 Epoch 114/200 13/13 [==============================] - 0s 16ms/step - loss: 0.9533 - last_time_step_mse: 0.6596 - val_loss: 0.8599 - val_last_time_step_mse: 0.6378 Epoch 115/200 13/13 [==============================] - 0s 17ms/step - loss: 0.9550 - last_time_step_mse: 0.6692 - val_loss: 0.8651 - val_last_time_step_mse: 0.6206 Epoch 116/200 13/13 [==============================] - 0s 17ms/step - loss: 0.9493 - last_time_step_mse: 0.6578 - val_loss: 0.8547 - val_last_time_step_mse: 0.6347 Epoch 117/200 13/13 [==============================] - 0s 17ms/step - loss: 0.9501 - last_time_step_mse: 0.6607 - val_loss: 0.8692 - val_last_time_step_mse: 0.6232 Epoch 118/200 13/13 [==============================] - 0s 17ms/step - loss: 0.9520 - last_time_step_mse: 0.6563 - val_loss: 0.8546 - val_last_time_step_mse: 0.6330 Epoch 119/200 13/13 [==============================] - 0s 17ms/step - loss: 0.9515 - last_time_step_mse: 0.6638 - val_loss: 0.8498 - val_last_time_step_mse: 0.6317 Epoch 120/200 13/13 [==============================] - 0s 17ms/step - loss: 0.9415 - last_time_step_mse: 0.6529 - val_loss: 0.8454 - val_last_time_step_mse: 0.6182 Epoch 121/200 13/13 [==============================] - 0s 17ms/step - loss: 0.9404 - last_time_step_mse: 0.6407 - val_loss: 0.8381 - val_last_time_step_mse: 0.6198 Epoch 122/200 13/13 [==============================] - 0s 17ms/step - loss: 0.9322 - last_time_step_mse: 0.6390 - val_loss: 0.8412 - val_last_time_step_mse: 0.6179 Epoch 123/200 13/13 [==============================] - 0s 16ms/step - loss: 0.9325 - last_time_step_mse: 0.6435 - val_loss: 0.8340 - val_last_time_step_mse: 0.6298 Epoch 124/200 13/13 [==============================] - 0s 18ms/step - loss: 0.9302 - last_time_step_mse: 0.6366 - val_loss: 0.8309 - val_last_time_step_mse: 0.6223 Epoch 125/200 13/13 [==============================] - 0s 16ms/step - loss: 0.9306 - last_time_step_mse: 0.6489 - val_loss: 0.8527 - val_last_time_step_mse: 0.6097 Epoch 126/200 13/13 [==============================] - 0s 17ms/step - loss: 0.9261 - last_time_step_mse: 0.6418 - val_loss: 0.8286 - val_last_time_step_mse: 0.6272 Epoch 127/200 13/13 [==============================] - 0s 16ms/step - loss: 0.9255 - last_time_step_mse: 0.6408 - val_loss: 0.8306 - val_last_time_step_mse: 0.6287 Epoch 128/200 13/13 [==============================] - 0s 17ms/step - loss: 0.9228 - last_time_step_mse: 0.6406 - val_loss: 0.8308 - val_last_time_step_mse: 0.6201 Epoch 129/200 13/13 [==============================] - 0s 17ms/step - loss: 0.9231 - last_time_step_mse: 0.6446 - val_loss: 0.8288 - val_last_time_step_mse: 0.6122 Epoch 130/200 13/13 [==============================] - 0s 17ms/step - loss: 0.9143 - last_time_step_mse: 0.6289 - val_loss: 0.8239 - val_last_time_step_mse: 0.5989 Epoch 131/200 13/13 [==============================] - 0s 17ms/step - loss: 0.9273 - last_time_step_mse: 0.6381 - val_loss: 0.8317 - val_last_time_step_mse: 0.5968 Epoch 132/200 13/13 [==============================] - 0s 16ms/step - loss: 0.9156 - last_time_step_mse: 0.6365 - val_loss: 0.8314 - val_last_time_step_mse: 0.6113 Epoch 133/200 13/13 [==============================] - 0s 16ms/step - loss: 0.9116 - last_time_step_mse: 0.6301 - val_loss: 0.8220 - val_last_time_step_mse: 0.5889 Epoch 134/200 13/13 [==============================] - 0s 17ms/step - loss: 0.9066 - last_time_step_mse: 0.6209 - val_loss: 0.8255 - val_last_time_step_mse: 0.6017 Epoch 135/200 13/13 [==============================] - 0s 17ms/step - loss: 0.9110 - last_time_step_mse: 0.6272 - val_loss: 0.8216 - val_last_time_step_mse: 0.5910 Epoch 136/200 13/13 [==============================] - 0s 16ms/step - loss: 0.9060 - last_time_step_mse: 0.6339 - val_loss: 0.8150 - val_last_time_step_mse: 0.6109 Epoch 137/200 13/13 [==============================] - 0s 17ms/step - loss: 0.9065 - last_time_step_mse: 0.6169 - val_loss: 0.8102 - val_last_time_step_mse: 0.5828 Epoch 138/200 13/13 [==============================] - 0s 17ms/step - loss: 0.8978 - last_time_step_mse: 0.6156 - val_loss: 0.8094 - val_last_time_step_mse: 0.6105 Epoch 139/200 13/13 [==============================] - 0s 16ms/step - loss: 0.8945 - last_time_step_mse: 0.6078 - val_loss: 0.8064 - val_last_time_step_mse: 0.5912 Epoch 140/200 13/13 [==============================] - 0s 17ms/step - loss: 0.9094 - last_time_step_mse: 0.6148 - val_loss: 0.8057 - val_last_time_step_mse: 0.6106 Epoch 141/200 13/13 [==============================] - 0s 17ms/step - loss: 0.9065 - last_time_step_mse: 0.6200 - val_loss: 0.8136 - val_last_time_step_mse: 0.6097 Epoch 142/200 13/13 [==============================] - 0s 16ms/step - loss: 0.9005 - last_time_step_mse: 0.6143 - val_loss: 0.8057 - val_last_time_step_mse: 0.5790 Epoch 143/200 13/13 [==============================] - 0s 17ms/step - loss: 0.8939 - last_time_step_mse: 0.6040 - val_loss: 0.7964 - val_last_time_step_mse: 0.5776 Epoch 144/200 13/13 [==============================] - 0s 16ms/step - loss: 0.8844 - last_time_step_mse: 0.5971 - val_loss: 0.7991 - val_last_time_step_mse: 0.5678 Epoch 145/200 13/13 [==============================] - 0s 16ms/step - loss: 0.8934 - last_time_step_mse: 0.6031 - val_loss: 0.7961 - val_last_time_step_mse: 0.5733 Epoch 146/200 13/13 [==============================] - 0s 16ms/step - loss: 0.8984 - last_time_step_mse: 0.6174 - val_loss: 0.7967 - val_last_time_step_mse: 0.5635 Epoch 147/200 13/13 [==============================] - 0s 16ms/step - loss: 0.8863 - last_time_step_mse: 0.5958 - val_loss: 0.8126 - val_last_time_step_mse: 0.6126 Epoch 148/200 13/13 [==============================] - 0s 17ms/step - loss: 0.8900 - last_time_step_mse: 0.6030 - val_loss: 0.7948 - val_last_time_step_mse: 0.5888 Epoch 149/200 13/13 [==============================] - 0s 17ms/step - loss: 0.8897 - last_time_step_mse: 0.6060 - val_loss: 0.8103 - val_last_time_step_mse: 0.5695 Epoch 150/200 13/13 [==============================] - 0s 17ms/step - loss: 0.8840 - last_time_step_mse: 0.6005 - val_loss: 0.7931 - val_last_time_step_mse: 0.5829 Epoch 151/200 13/13 [==============================] - 0s 16ms/step - loss: 0.8781 - last_time_step_mse: 0.5898 - val_loss: 0.7850 - val_last_time_step_mse: 0.5571 Epoch 152/200 13/13 [==============================] - 0s 17ms/step - loss: 0.8783 - last_time_step_mse: 0.5871 - val_loss: 0.7866 - val_last_time_step_mse: 0.5549 Epoch 153/200 13/13 [==============================] - 0s 17ms/step - loss: 0.8790 - last_time_step_mse: 0.5841 - val_loss: 0.7828 - val_last_time_step_mse: 0.5515 Epoch 154/200 13/13 [==============================] - 0s 17ms/step - loss: 0.8748 - last_time_step_mse: 0.5888 - val_loss: 0.7803 - val_last_time_step_mse: 0.5526 Epoch 155/200 13/13 [==============================] - 0s 16ms/step - loss: 0.8729 - last_time_step_mse: 0.5850 - val_loss: 0.7878 - val_last_time_step_mse: 0.5718 Epoch 156/200 13/13 [==============================] - 0s 17ms/step - loss: 0.8691 - last_time_step_mse: 0.5843 - val_loss: 0.7795 - val_last_time_step_mse: 0.5523 Epoch 157/200 13/13 [==============================] - 0s 17ms/step - loss: 0.8736 - last_time_step_mse: 0.5876 - val_loss: 0.7990 - val_last_time_step_mse: 0.5951 Epoch 158/200 13/13 [==============================] - 0s 17ms/step - loss: 0.8737 - last_time_step_mse: 0.5855 - val_loss: 0.7795 - val_last_time_step_mse: 0.5605 Epoch 159/200 13/13 [==============================] - 0s 16ms/step - loss: 0.8716 - last_time_step_mse: 0.5822 - val_loss: 0.7758 - val_last_time_step_mse: 0.5470 Epoch 160/200 13/13 [==============================] - 0s 17ms/step - loss: 0.8767 - last_time_step_mse: 0.5897 - val_loss: 0.7974 - val_last_time_step_mse: 0.5584 Epoch 161/200 13/13 [==============================] - 0s 17ms/step - loss: 0.8790 - last_time_step_mse: 0.5969 - val_loss: 0.7767 - val_last_time_step_mse: 0.5453 Epoch 162/200 13/13 [==============================] - 0s 16ms/step - loss: 0.8638 - last_time_step_mse: 0.5776 - val_loss: 0.7716 - val_last_time_step_mse: 0.5475 Epoch 163/200 13/13 [==============================] - 0s 17ms/step - loss: 0.8629 - last_time_step_mse: 0.5748 - val_loss: 0.7736 - val_last_time_step_mse: 0.5408 Epoch 164/200 13/13 [==============================] - 0s 17ms/step - loss: 0.8591 - last_time_step_mse: 0.5756 - val_loss: 0.7703 - val_last_time_step_mse: 0.5409 Epoch 165/200 13/13 [==============================] - 0s 16ms/step - loss: 0.8612 - last_time_step_mse: 0.5791 - val_loss: 0.7725 - val_last_time_step_mse: 0.5518 Epoch 166/200 13/13 [==============================] - 0s 17ms/step - loss: 0.8674 - last_time_step_mse: 0.5883 - val_loss: 0.7693 - val_last_time_step_mse: 0.5589 Epoch 167/200 13/13 [==============================] - 0s 18ms/step - loss: 0.8598 - last_time_step_mse: 0.5784 - val_loss: 0.7697 - val_last_time_step_mse: 0.5415 Epoch 168/200 13/13 [==============================] - 0s 17ms/step - loss: 0.8567 - last_time_step_mse: 0.5701 - val_loss: 0.7648 - val_last_time_step_mse: 0.5442 Epoch 169/200 13/13 [==============================] - 0s 16ms/step - loss: 0.8527 - last_time_step_mse: 0.5760 - val_loss: 0.7766 - val_last_time_step_mse: 0.5587 Epoch 170/200 13/13 [==============================] - 0s 17ms/step - loss: 0.8578 - last_time_step_mse: 0.5789 - val_loss: 0.7610 - val_last_time_step_mse: 0.5401 Epoch 171/200 13/13 [==============================] - 0s 18ms/step - loss: 0.8508 - last_time_step_mse: 0.5701 - val_loss: 0.7590 - val_last_time_step_mse: 0.5365 Epoch 172/200 13/13 [==============================] - 0s 17ms/step - loss: 0.8605 - last_time_step_mse: 0.5735 - val_loss: 0.7661 - val_last_time_step_mse: 0.5404 Epoch 173/200 13/13 [==============================] - 0s 16ms/step - loss: 0.8528 - last_time_step_mse: 0.5722 - val_loss: 0.7604 - val_last_time_step_mse: 0.5318 Epoch 174/200 13/13 [==============================] - 0s 17ms/step - loss: 0.8532 - last_time_step_mse: 0.5663 - val_loss: 0.7682 - val_last_time_step_mse: 0.5297 Epoch 175/200 13/13 [==============================] - 0s 17ms/step - loss: 0.8550 - last_time_step_mse: 0.5826 - val_loss: 0.7601 - val_last_time_step_mse: 0.5315 Epoch 176/200 13/13 [==============================] - 0s 17ms/step - loss: 0.8449 - last_time_step_mse: 0.5631 - val_loss: 0.7544 - val_last_time_step_mse: 0.5365 Epoch 177/200 13/13 [==============================] - 0s 17ms/step - loss: 0.8461 - last_time_step_mse: 0.5648 - val_loss: 0.7528 - val_last_time_step_mse: 0.5327 Epoch 178/200 13/13 [==============================] - 0s 17ms/step - loss: 0.8524 - last_time_step_mse: 0.5675 - val_loss: 0.7596 - val_last_time_step_mse: 0.5581 Epoch 179/200 13/13 [==============================] - 0s 17ms/step - loss: 0.8435 - last_time_step_mse: 0.5641 - val_loss: 0.7548 - val_last_time_step_mse: 0.5378 Epoch 180/200 13/13 [==============================] - 0s 16ms/step - loss: 0.8436 - last_time_step_mse: 0.5699 - val_loss: 0.7573 - val_last_time_step_mse: 0.5512 Epoch 181/200 13/13 [==============================] - 0s 16ms/step - loss: 0.8439 - last_time_step_mse: 0.5678 - val_loss: 0.7516 - val_last_time_step_mse: 0.5319 Epoch 182/200 13/13 [==============================] - 0s 16ms/step - loss: 0.8390 - last_time_step_mse: 0.5609 - val_loss: 0.7529 - val_last_time_step_mse: 0.5306 Epoch 183/200 13/13 [==============================] - 0s 17ms/step - loss: 0.8415 - last_time_step_mse: 0.5686 - val_loss: 0.7565 - val_last_time_step_mse: 0.5299 Epoch 184/200 13/13 [==============================] - 0s 16ms/step - loss: 0.8382 - last_time_step_mse: 0.5598 - val_loss: 0.7479 - val_last_time_step_mse: 0.5306 Epoch 185/200 13/13 [==============================] - 0s 16ms/step - loss: 0.8360 - last_time_step_mse: 0.5594 - val_loss: 0.7459 - val_last_time_step_mse: 0.5283 Epoch 186/200 13/13 [==============================] - 0s 16ms/step - loss: 0.8369 - last_time_step_mse: 0.5640 - val_loss: 0.7541 - val_last_time_step_mse: 0.5380 Epoch 187/200 13/13 [==============================] - 0s 17ms/step - loss: 0.8431 - last_time_step_mse: 0.5672 - val_loss: 0.7530 - val_last_time_step_mse: 0.5393 Epoch 188/200 13/13 [==============================] - 0s 17ms/step - loss: 0.8374 - last_time_step_mse: 0.5633 - val_loss: 0.7498 - val_last_time_step_mse: 0.5333 Epoch 189/200 13/13 [==============================] - 0s 16ms/step - loss: 0.8413 - last_time_step_mse: 0.5688 - val_loss: 0.7443 - val_last_time_step_mse: 0.5320 Epoch 190/200 13/13 [==============================] - 0s 17ms/step - loss: 0.8399 - last_time_step_mse: 0.5655 - val_loss: 0.7453 - val_last_time_step_mse: 0.5412 Epoch 191/200 13/13 [==============================] - 0s 16ms/step - loss: 0.8363 - last_time_step_mse: 0.5644 - val_loss: 0.7520 - val_last_time_step_mse: 0.5402 Epoch 192/200 13/13 [==============================] - 0s 17ms/step - loss: 0.8402 - last_time_step_mse: 0.5646 - val_loss: 0.7462 - val_last_time_step_mse: 0.5263 Epoch 193/200 13/13 [==============================] - 0s 16ms/step - loss: 0.8379 - last_time_step_mse: 0.5622 - val_loss: 0.7505 - val_last_time_step_mse: 0.5454 Epoch 194/200 13/13 [==============================] - 0s 16ms/step - loss: 0.8361 - last_time_step_mse: 0.5630 - val_loss: 0.7408 - val_last_time_step_mse: 0.5358 Epoch 195/200 13/13 [==============================] - 0s 17ms/step - loss: 0.8341 - last_time_step_mse: 0.5601 - val_loss: 0.7507 - val_last_time_step_mse: 0.5242 Epoch 196/200 13/13 [==============================] - 0s 18ms/step - loss: 0.8396 - last_time_step_mse: 0.5590 - val_loss: 0.7447 - val_last_time_step_mse: 0.5233 Epoch 197/200 13/13 [==============================] - 0s 16ms/step - loss: 0.8327 - last_time_step_mse: 0.5611 - val_loss: 0.7464 - val_last_time_step_mse: 0.5376 Epoch 198/200 13/13 [==============================] - 0s 17ms/step - loss: 0.8349 - last_time_step_mse: 0.5624 - val_loss: 0.7518 - val_last_time_step_mse: 0.5483 Epoch 199/200 13/13 [==============================] - 0s 18ms/step - loss: 0.8353 - last_time_step_mse: 0.5690 - val_loss: 0.7413 - val_last_time_step_mse: 0.5325 Epoch 200/200 13/13 [==============================] - 0s 17ms/step - loss: 0.8309 - last_time_step_mse: 0.5593 - val_loss: 0.7477 - val_last_time_step_mse: 0.5277
plt.plot(history.history["val_last_time_step_mse"], label="val")
plt.plot(history.history["last_time_step_mse"], label="train")
plt.legend()
<matplotlib.legend.Legend at 0x7f9fa6346e80>
We will plot the new model together with the previous model
y_pred_conv = model_conv.predict(X_test)[:, -1][..., np.newaxis]
y_pred_lstm = model.predict(X_test)
plt.figure(figsize=(10,10))
for i in range(5):
plt.subplot(5,1,i+1)
plt.xticks([])
plt.plot(date_list_hr[:n_steps],X_test[i+10,:,0], "b.-")
plt.plot(date_list_hr[n_steps: n_steps+pred_steps],y_test[i+10,:,0],"r.-", label="real")
plt.plot(date_list_hr[n_steps: n_steps+pred_steps],y_pred_lstm[i+10,:], "g.-", label="pred_lstm")
plt.plot(date_list_hr[n_steps: n_steps+pred_steps],y_pred_conv[i+10,:], "y.-", label="pred_conv")
plt.legend()
plt.xticks(np.arange(0, n_steps+pred_steps, step=12), rotation=90)
plt.show()
model_conv.evaluate(X_test, y_test_2[:,3::2])
2/2 [==============================] - 0s 3ms/step - loss: 1.0258 - last_time_step_mse: 0.7111
[1.0257858037948608, 0.7111430168151855]
model_wave = keras.models.Sequential()
model_wave.add(keras.layers.InputLayer(input_shape=[None, X_train.shape[2]]))
for rate in (1, 2, 4, 8) * 2:
model_wave.add(keras.layers.Conv1D(filters=20, kernel_size=2, padding="causal",
activation="relu", dilation_rate=rate))
model_wave.add(keras.layers.Conv1D(filters=pred_steps, kernel_size=1))
model_wave.compile(loss="mse", optimizer="adam", metrics=[last_time_step_mse])
history = model_wave.fit(X_train, y_train_2, epochs=100,
validation_data=(X_valid, y_valid_2))
Epoch 1/100 13/13 [==============================] - 0s 27ms/step - loss: 7.7907 - last_time_step_mse: 7.5465 - val_loss: 7.2737 - val_last_time_step_mse: 6.9450 Epoch 2/100 13/13 [==============================] - 0s 9ms/step - loss: 7.6705 - last_time_step_mse: 7.3916 - val_loss: 7.0546 - val_last_time_step_mse: 6.6534 Epoch 3/100 13/13 [==============================] - 0s 9ms/step - loss: 7.1775 - last_time_step_mse: 6.7178 - val_loss: 6.1906 - val_last_time_step_mse: 5.5383 Epoch 4/100 13/13 [==============================] - 0s 9ms/step - loss: 6.0575 - last_time_step_mse: 5.3740 - val_loss: 5.0731 - val_last_time_step_mse: 4.2244 Epoch 5/100 13/13 [==============================] - 0s 10ms/step - loss: 4.9438 - last_time_step_mse: 4.0597 - val_loss: 4.1301 - val_last_time_step_mse: 3.1622 Epoch 6/100 13/13 [==============================] - 0s 10ms/step - loss: 4.0814 - last_time_step_mse: 3.0613 - val_loss: 3.4273 - val_last_time_step_mse: 2.4869 Epoch 7/100 13/13 [==============================] - 0s 11ms/step - loss: 3.4527 - last_time_step_mse: 2.4389 - val_loss: 2.9476 - val_last_time_step_mse: 2.1002 Epoch 8/100 13/13 [==============================] - 0s 9ms/step - loss: 2.9940 - last_time_step_mse: 2.0444 - val_loss: 2.5831 - val_last_time_step_mse: 1.8420 Epoch 9/100 13/13 [==============================] - 0s 9ms/step - loss: 2.6524 - last_time_step_mse: 1.7892 - val_loss: 2.3048 - val_last_time_step_mse: 1.6527 Epoch 10/100 13/13 [==============================] - 0s 9ms/step - loss: 2.4097 - last_time_step_mse: 1.5896 - val_loss: 2.1091 - val_last_time_step_mse: 1.4514 Epoch 11/100 13/13 [==============================] - 0s 9ms/step - loss: 2.2178 - last_time_step_mse: 1.4428 - val_loss: 1.9669 - val_last_time_step_mse: 1.2675 Epoch 12/100 13/13 [==============================] - 0s 9ms/step - loss: 2.0921 - last_time_step_mse: 1.2885 - val_loss: 1.8541 - val_last_time_step_mse: 1.1997 Epoch 13/100 13/13 [==============================] - 0s 9ms/step - loss: 1.9826 - last_time_step_mse: 1.1885 - val_loss: 1.7753 - val_last_time_step_mse: 1.1562 Epoch 14/100 13/13 [==============================] - 0s 9ms/step - loss: 1.8923 - last_time_step_mse: 1.1229 - val_loss: 1.6961 - val_last_time_step_mse: 1.1096 Epoch 15/100 13/13 [==============================] - 0s 9ms/step - loss: 1.8187 - last_time_step_mse: 1.0683 - val_loss: 1.6423 - val_last_time_step_mse: 1.0343 Epoch 16/100 13/13 [==============================] - 0s 9ms/step - loss: 1.7590 - last_time_step_mse: 1.0177 - val_loss: 1.5915 - val_last_time_step_mse: 0.9944 Epoch 17/100 13/13 [==============================] - 0s 9ms/step - loss: 1.7045 - last_time_step_mse: 0.9872 - val_loss: 1.5342 - val_last_time_step_mse: 0.9639 Epoch 18/100 13/13 [==============================] - 0s 9ms/step - loss: 1.6559 - last_time_step_mse: 0.9714 - val_loss: 1.5025 - val_last_time_step_mse: 0.9780 Epoch 19/100 13/13 [==============================] - 0s 9ms/step - loss: 1.6286 - last_time_step_mse: 0.9453 - val_loss: 1.4604 - val_last_time_step_mse: 0.9348 Epoch 20/100 13/13 [==============================] - 0s 10ms/step - loss: 1.5854 - last_time_step_mse: 0.9228 - val_loss: 1.4641 - val_last_time_step_mse: 0.9566 Epoch 21/100 13/13 [==============================] - 0s 9ms/step - loss: 1.5625 - last_time_step_mse: 0.9273 - val_loss: 1.3979 - val_last_time_step_mse: 0.9426 Epoch 22/100 13/13 [==============================] - 0s 10ms/step - loss: 1.5204 - last_time_step_mse: 0.9087 - val_loss: 1.3946 - val_last_time_step_mse: 0.9361 Epoch 23/100 13/13 [==============================] - 0s 10ms/step - loss: 1.4907 - last_time_step_mse: 0.8909 - val_loss: 1.3499 - val_last_time_step_mse: 0.8787 Epoch 24/100 13/13 [==============================] - 0s 9ms/step - loss: 1.4634 - last_time_step_mse: 0.8710 - val_loss: 1.3390 - val_last_time_step_mse: 0.8545 Epoch 25/100 13/13 [==============================] - 0s 9ms/step - loss: 1.4505 - last_time_step_mse: 0.8608 - val_loss: 1.3216 - val_last_time_step_mse: 0.8806 Epoch 26/100 13/13 [==============================] - 0s 9ms/step - loss: 1.4236 - last_time_step_mse: 0.8525 - val_loss: 1.2952 - val_last_time_step_mse: 0.8825 Epoch 27/100 13/13 [==============================] - 0s 9ms/step - loss: 1.4075 - last_time_step_mse: 0.8479 - val_loss: 1.2943 - val_last_time_step_mse: 0.8396 Epoch 28/100 13/13 [==============================] - 0s 10ms/step - loss: 1.4014 - last_time_step_mse: 0.8444 - val_loss: 1.2585 - val_last_time_step_mse: 0.8336 Epoch 29/100 13/13 [==============================] - 0s 10ms/step - loss: 1.3768 - last_time_step_mse: 0.8422 - val_loss: 1.2543 - val_last_time_step_mse: 0.8249 Epoch 30/100 13/13 [==============================] - 0s 10ms/step - loss: 1.3577 - last_time_step_mse: 0.8065 - val_loss: 1.2350 - val_last_time_step_mse: 0.7989 Epoch 31/100 13/13 [==============================] - 0s 9ms/step - loss: 1.3524 - last_time_step_mse: 0.8283 - val_loss: 1.2311 - val_last_time_step_mse: 0.8004 Epoch 32/100 13/13 [==============================] - 0s 9ms/step - loss: 1.3321 - last_time_step_mse: 0.7934 - val_loss: 1.2425 - val_last_time_step_mse: 0.8368 Epoch 33/100 13/13 [==============================] - 0s 9ms/step - loss: 1.3200 - last_time_step_mse: 0.7874 - val_loss: 1.1925 - val_last_time_step_mse: 0.7593 Epoch 34/100 13/13 [==============================] - 0s 9ms/step - loss: 1.3034 - last_time_step_mse: 0.7589 - val_loss: 1.2025 - val_last_time_step_mse: 0.7515 Epoch 35/100 13/13 [==============================] - 0s 9ms/step - loss: 1.3084 - last_time_step_mse: 0.7679 - val_loss: 1.2052 - val_last_time_step_mse: 0.7579 Epoch 36/100 13/13 [==============================] - 0s 10ms/step - loss: 1.3015 - last_time_step_mse: 0.7702 - val_loss: 1.1754 - val_last_time_step_mse: 0.7842 Epoch 37/100 13/13 [==============================] - 0s 10ms/step - loss: 1.2804 - last_time_step_mse: 0.7545 - val_loss: 1.1708 - val_last_time_step_mse: 0.7231 Epoch 38/100 13/13 [==============================] - 0s 9ms/step - loss: 1.2671 - last_time_step_mse: 0.7314 - val_loss: 1.1672 - val_last_time_step_mse: 0.7036 Epoch 39/100 13/13 [==============================] - 0s 9ms/step - loss: 1.2621 - last_time_step_mse: 0.7291 - val_loss: 1.1483 - val_last_time_step_mse: 0.7483 Epoch 40/100 13/13 [==============================] - 0s 9ms/step - loss: 1.2492 - last_time_step_mse: 0.7346 - val_loss: 1.1466 - val_last_time_step_mse: 0.7411 Epoch 41/100 13/13 [==============================] - 0s 10ms/step - loss: 1.2532 - last_time_step_mse: 0.7345 - val_loss: 1.1533 - val_last_time_step_mse: 0.7657 Epoch 42/100 13/13 [==============================] - 0s 10ms/step - loss: 1.2413 - last_time_step_mse: 0.7369 - val_loss: 1.1237 - val_last_time_step_mse: 0.7050 Epoch 43/100 13/13 [==============================] - 0s 10ms/step - loss: 1.2412 - last_time_step_mse: 0.7341 - val_loss: 1.1385 - val_last_time_step_mse: 0.7040 Epoch 44/100 13/13 [==============================] - 0s 9ms/step - loss: 1.2439 - last_time_step_mse: 0.7161 - val_loss: 1.1189 - val_last_time_step_mse: 0.7100 Epoch 45/100 13/13 [==============================] - 0s 10ms/step - loss: 1.2225 - last_time_step_mse: 0.7132 - val_loss: 1.1211 - val_last_time_step_mse: 0.7132 Epoch 46/100 13/13 [==============================] - 0s 9ms/step - loss: 1.2152 - last_time_step_mse: 0.7149 - val_loss: 1.1174 - val_last_time_step_mse: 0.6942 Epoch 47/100 13/13 [==============================] - 0s 9ms/step - loss: 1.2140 - last_time_step_mse: 0.7153 - val_loss: 1.1166 - val_last_time_step_mse: 0.6772 Epoch 48/100 13/13 [==============================] - 0s 9ms/step - loss: 1.2123 - last_time_step_mse: 0.7023 - val_loss: 1.1025 - val_last_time_step_mse: 0.6939 Epoch 49/100 13/13 [==============================] - 0s 10ms/step - loss: 1.2026 - last_time_step_mse: 0.7018 - val_loss: 1.1027 - val_last_time_step_mse: 0.6711 Epoch 50/100 13/13 [==============================] - 0s 10ms/step - loss: 1.2023 - last_time_step_mse: 0.6976 - val_loss: 1.0977 - val_last_time_step_mse: 0.6907 Epoch 51/100 13/13 [==============================] - 0s 11ms/step - loss: 1.1963 - last_time_step_mse: 0.6986 - val_loss: 1.1052 - val_last_time_step_mse: 0.6828 Epoch 52/100 13/13 [==============================] - 0s 10ms/step - loss: 1.1994 - last_time_step_mse: 0.6992 - val_loss: 1.1118 - val_last_time_step_mse: 0.6852 Epoch 53/100 13/13 [==============================] - 0s 10ms/step - loss: 1.2107 - last_time_step_mse: 0.7141 - val_loss: 1.1078 - val_last_time_step_mse: 0.7148 Epoch 54/100 13/13 [==============================] - 0s 10ms/step - loss: 1.2057 - last_time_step_mse: 0.6974 - val_loss: 1.0839 - val_last_time_step_mse: 0.6627 Epoch 55/100 13/13 [==============================] - 0s 10ms/step - loss: 1.2261 - last_time_step_mse: 0.7307 - val_loss: 1.1342 - val_last_time_step_mse: 0.7682 Epoch 56/100 13/13 [==============================] - 0s 10ms/step - loss: 1.2042 - last_time_step_mse: 0.6974 - val_loss: 1.0987 - val_last_time_step_mse: 0.7035 Epoch 57/100 13/13 [==============================] - 0s 11ms/step - loss: 1.1934 - last_time_step_mse: 0.7125 - val_loss: 1.0970 - val_last_time_step_mse: 0.6466 Epoch 58/100 13/13 [==============================] - 0s 11ms/step - loss: 1.1866 - last_time_step_mse: 0.7012 - val_loss: 1.0967 - val_last_time_step_mse: 0.6689 Epoch 59/100 13/13 [==============================] - 0s 12ms/step - loss: 1.1770 - last_time_step_mse: 0.6703 - val_loss: 1.0695 - val_last_time_step_mse: 0.6633 Epoch 60/100 13/13 [==============================] - 0s 10ms/step - loss: 1.1761 - last_time_step_mse: 0.6837 - val_loss: 1.1135 - val_last_time_step_mse: 0.7483 Epoch 61/100 13/13 [==============================] - 0s 10ms/step - loss: 1.1893 - last_time_step_mse: 0.7102 - val_loss: 1.0714 - val_last_time_step_mse: 0.6699 Epoch 62/100 13/13 [==============================] - 0s 10ms/step - loss: 1.1705 - last_time_step_mse: 0.6796 - val_loss: 1.0639 - val_last_time_step_mse: 0.6578 Epoch 63/100 13/13 [==============================] - 0s 11ms/step - loss: 1.1763 - last_time_step_mse: 0.6756 - val_loss: 1.0734 - val_last_time_step_mse: 0.6460 Epoch 64/100 13/13 [==============================] - 0s 11ms/step - loss: 1.1660 - last_time_step_mse: 0.6793 - val_loss: 1.0624 - val_last_time_step_mse: 0.6434 Epoch 65/100 13/13 [==============================] - 0s 10ms/step - loss: 1.1547 - last_time_step_mse: 0.6641 - val_loss: 1.0667 - val_last_time_step_mse: 0.6726 Epoch 66/100 13/13 [==============================] - 0s 10ms/step - loss: 1.1681 - last_time_step_mse: 0.6738 - val_loss: 1.0587 - val_last_time_step_mse: 0.6570 Epoch 67/100 13/13 [==============================] - 0s 10ms/step - loss: 1.1547 - last_time_step_mse: 0.6815 - val_loss: 1.0565 - val_last_time_step_mse: 0.6412 Epoch 68/100 13/13 [==============================] - 0s 9ms/step - loss: 1.1496 - last_time_step_mse: 0.6592 - val_loss: 1.0474 - val_last_time_step_mse: 0.6374 Epoch 69/100 13/13 [==============================] - 0s 11ms/step - loss: 1.1479 - last_time_step_mse: 0.6682 - val_loss: 1.0551 - val_last_time_step_mse: 0.6287 Epoch 70/100 13/13 [==============================] - 0s 10ms/step - loss: 1.1507 - last_time_step_mse: 0.6593 - val_loss: 1.0473 - val_last_time_step_mse: 0.6370 Epoch 71/100 13/13 [==============================] - 0s 10ms/step - loss: 1.1402 - last_time_step_mse: 0.6504 - val_loss: 1.0497 - val_last_time_step_mse: 0.6624 Epoch 72/100 13/13 [==============================] - 0s 10ms/step - loss: 1.1418 - last_time_step_mse: 0.6553 - val_loss: 1.0596 - val_last_time_step_mse: 0.6508 Epoch 73/100 13/13 [==============================] - 0s 11ms/step - loss: 1.1438 - last_time_step_mse: 0.6692 - val_loss: 1.0440 - val_last_time_step_mse: 0.6251 Epoch 74/100 13/13 [==============================] - 0s 10ms/step - loss: 1.1522 - last_time_step_mse: 0.6557 - val_loss: 1.0314 - val_last_time_step_mse: 0.6518 Epoch 75/100 13/13 [==============================] - 0s 10ms/step - loss: 1.1381 - last_time_step_mse: 0.6577 - val_loss: 1.0277 - val_last_time_step_mse: 0.6524 Epoch 76/100 13/13 [==============================] - 0s 11ms/step - loss: 1.1298 - last_time_step_mse: 0.6574 - val_loss: 1.0284 - val_last_time_step_mse: 0.6540 Epoch 77/100 13/13 [==============================] - 0s 11ms/step - loss: 1.1310 - last_time_step_mse: 0.6538 - val_loss: 1.0280 - val_last_time_step_mse: 0.6267 Epoch 78/100 13/13 [==============================] - 0s 10ms/step - loss: 1.1214 - last_time_step_mse: 0.6425 - val_loss: 1.0236 - val_last_time_step_mse: 0.6351 Epoch 79/100 13/13 [==============================] - 0s 10ms/step - loss: 1.1300 - last_time_step_mse: 0.6497 - val_loss: 1.0271 - val_last_time_step_mse: 0.6413 Epoch 80/100 13/13 [==============================] - 0s 11ms/step - loss: 1.1213 - last_time_step_mse: 0.6431 - val_loss: 1.0190 - val_last_time_step_mse: 0.6284 Epoch 81/100 13/13 [==============================] - 0s 10ms/step - loss: 1.1231 - last_time_step_mse: 0.6503 - val_loss: 1.0217 - val_last_time_step_mse: 0.6597 Epoch 82/100 13/13 [==============================] - 0s 10ms/step - loss: 1.1149 - last_time_step_mse: 0.6410 - val_loss: 1.0162 - val_last_time_step_mse: 0.6573 Epoch 83/100 13/13 [==============================] - 0s 9ms/step - loss: 1.1174 - last_time_step_mse: 0.6500 - val_loss: 1.0148 - val_last_time_step_mse: 0.6449 Epoch 84/100 13/13 [==============================] - 0s 10ms/step - loss: 1.1209 - last_time_step_mse: 0.6476 - val_loss: 1.0217 - val_last_time_step_mse: 0.6157 Epoch 85/100 13/13 [==============================] - 0s 11ms/step - loss: 1.1133 - last_time_step_mse: 0.6412 - val_loss: 1.0391 - val_last_time_step_mse: 0.6236 Epoch 86/100 13/13 [==============================] - 0s 11ms/step - loss: 1.1252 - last_time_step_mse: 0.6486 - val_loss: 1.0111 - val_last_time_step_mse: 0.6580 Epoch 87/100 13/13 [==============================] - 0s 11ms/step - loss: 1.1139 - last_time_step_mse: 0.6413 - val_loss: 1.0059 - val_last_time_step_mse: 0.6342 Epoch 88/100 13/13 [==============================] - 0s 10ms/step - loss: 1.1199 - last_time_step_mse: 0.6481 - val_loss: 1.0308 - val_last_time_step_mse: 0.6190 Epoch 89/100 13/13 [==============================] - 0s 11ms/step - loss: 1.1198 - last_time_step_mse: 0.6360 - val_loss: 1.0154 - val_last_time_step_mse: 0.6809 Epoch 90/100 13/13 [==============================] - 0s 11ms/step - loss: 1.1079 - last_time_step_mse: 0.6506 - val_loss: 1.0092 - val_last_time_step_mse: 0.6193 Epoch 91/100 13/13 [==============================] - 0s 10ms/step - loss: 1.1023 - last_time_step_mse: 0.6231 - val_loss: 1.0040 - val_last_time_step_mse: 0.6134 Epoch 92/100 13/13 [==============================] - 0s 11ms/step - loss: 1.0947 - last_time_step_mse: 0.6238 - val_loss: 1.0006 - val_last_time_step_mse: 0.6254 Epoch 93/100 13/13 [==============================] - 0s 11ms/step - loss: 1.0934 - last_time_step_mse: 0.6291 - val_loss: 0.9930 - val_last_time_step_mse: 0.6191 Epoch 94/100 13/13 [==============================] - 0s 10ms/step - loss: 1.0940 - last_time_step_mse: 0.6315 - val_loss: 0.9950 - val_last_time_step_mse: 0.5956 Epoch 95/100 13/13 [==============================] - 0s 10ms/step - loss: 1.0943 - last_time_step_mse: 0.6219 - val_loss: 0.9959 - val_last_time_step_mse: 0.6072 Epoch 96/100 13/13 [==============================] - 0s 10ms/step - loss: 1.0975 - last_time_step_mse: 0.6285 - val_loss: 0.9890 - val_last_time_step_mse: 0.6138 Epoch 97/100 13/13 [==============================] - 0s 10ms/step - loss: 1.0921 - last_time_step_mse: 0.6229 - val_loss: 1.0002 - val_last_time_step_mse: 0.6262 Epoch 98/100 13/13 [==============================] - 0s 10ms/step - loss: 1.0964 - last_time_step_mse: 0.6344 - val_loss: 0.9932 - val_last_time_step_mse: 0.5942 Epoch 99/100 13/13 [==============================] - 0s 10ms/step - loss: 1.0869 - last_time_step_mse: 0.6190 - val_loss: 0.9872 - val_last_time_step_mse: 0.6121 Epoch 100/100 13/13 [==============================] - 0s 10ms/step - loss: 1.0810 - last_time_step_mse: 0.6182 - val_loss: 0.9839 - val_last_time_step_mse: 0.6165
plt.plot(history.history["val_last_time_step_mse"], label="val")
plt.plot(history.history["last_time_step_mse"], label="train")
plt.legend()
<matplotlib.legend.Legend at 0x7f9fa0bd72b0>
y_pred_conv = model_conv.predict(X_test)[:, -1][..., np.newaxis]
y_pred_lstm = model.predict(X_test)
y_pred_wave = model_wave.predict(X_test)[:, -1][..., np.newaxis]
plt.figure(figsize=(20,10))
for i in range(10):
plt.subplot(5,2,i+1)
plt.xticks([])
plt.plot(date_list_hr[:n_steps],X_test[i,:,0], "b.-")
plt.plot(date_list_hr[n_steps: n_steps+pred_steps],y_test[i,:,0],"r.-", label="real")
plt.plot(date_list_hr[n_steps: n_steps+pred_steps],y_pred_lstm[i,:], "g.-", label="pred_lstm")
plt.plot(date_list_hr[n_steps: n_steps+pred_steps],y_pred_conv[i,:], "y.-", label="pred_conv")
plt.plot(date_list_hr[n_steps: n_steps+pred_steps],y_pred_wave[i,:,0], "k.-", label="pred_wave")
plt.legend()
plt.xticks(np.arange(0, n_steps+pred_steps, step=12), rotation=90)
plt.show()
np.random.seed(42)
tf.random.set_seed(42)
model_lstm = keras.models.Sequential([
keras.layers.LSTM(20, return_sequences=True),
keras.layers.LSTM(20, return_sequences=True),
keras.layers.Dense(pred_steps),
# keras.layers.Reshape([pred_steps, 5])
])
early_stopping_cb = keras.callbacks.EarlyStopping(patience=20, restore_best_weights=True)
reduce_lr_cb = keras.callbacks.ReduceLROnPlateau(factor=0.5, verbose=1)
model_lstm.compile(loss="mse", optimizer="adam", metrics=[last_time_step_mse])
history = model_lstm.fit(X_train, y_train_2, epochs=200,
validation_data=(X_valid, y_valid_2), callbacks=[early_stopping_cb,reduce_lr_cb])
Epoch 1/200 13/13 [==============================] - 1s 73ms/step - loss: 7.7427 - last_time_step_mse: 7.4987 - val_loss: 7.1405 - val_last_time_step_mse: 6.8124 Epoch 2/200 13/13 [==============================] - 0s 21ms/step - loss: 7.4541 - last_time_step_mse: 7.1593 - val_loss: 6.8041 - val_last_time_step_mse: 6.4262 Epoch 3/200 13/13 [==============================] - 0s 21ms/step - loss: 7.0513 - last_time_step_mse: 6.6863 - val_loss: 6.3921 - val_last_time_step_mse: 5.9926 Epoch 4/200 13/13 [==============================] - 0s 21ms/step - loss: 6.5809 - last_time_step_mse: 6.1337 - val_loss: 5.9110 - val_last_time_step_mse: 5.3889 Epoch 5/200 13/13 [==============================] - 0s 23ms/step - loss: 6.0800 - last_time_step_mse: 5.5114 - val_loss: 5.4537 - val_last_time_step_mse: 4.8864 Epoch 6/200 13/13 [==============================] - 0s 22ms/step - loss: 5.6277 - last_time_step_mse: 5.0076 - val_loss: 5.0586 - val_last_time_step_mse: 4.4787 Epoch 7/200 13/13 [==============================] - 0s 22ms/step - loss: 5.2426 - last_time_step_mse: 4.5876 - val_loss: 4.7250 - val_last_time_step_mse: 4.1497 Epoch 8/200 13/13 [==============================] - 0s 22ms/step - loss: 4.9119 - last_time_step_mse: 4.2516 - val_loss: 4.4475 - val_last_time_step_mse: 3.8498 Epoch 9/200 13/13 [==============================] - 0s 22ms/step - loss: 4.6360 - last_time_step_mse: 3.9393 - val_loss: 4.2105 - val_last_time_step_mse: 3.6102 Epoch 10/200 13/13 [==============================] - 0s 22ms/step - loss: 4.3945 - last_time_step_mse: 3.6953 - val_loss: 3.9914 - val_last_time_step_mse: 3.3814 Epoch 11/200 13/13 [==============================] - 0s 22ms/step - loss: 4.1754 - last_time_step_mse: 3.4738 - val_loss: 3.8026 - val_last_time_step_mse: 3.1879 Epoch 12/200 13/13 [==============================] - 0s 22ms/step - loss: 3.9819 - last_time_step_mse: 3.2632 - val_loss: 3.6411 - val_last_time_step_mse: 3.0327 Epoch 13/200 13/13 [==============================] - 0s 22ms/step - loss: 3.8167 - last_time_step_mse: 3.0928 - val_loss: 3.4798 - val_last_time_step_mse: 2.8655 Epoch 14/200 13/13 [==============================] - 0s 21ms/step - loss: 3.6509 - last_time_step_mse: 2.9161 - val_loss: 3.3469 - val_last_time_step_mse: 2.7340 Epoch 15/200 13/13 [==============================] - 0s 21ms/step - loss: 3.5029 - last_time_step_mse: 2.7693 - val_loss: 3.2069 - val_last_time_step_mse: 2.5820 Epoch 16/200 13/13 [==============================] - 0s 22ms/step - loss: 3.3629 - last_time_step_mse: 2.6203 - val_loss: 3.0754 - val_last_time_step_mse: 2.4636 Epoch 17/200 13/13 [==============================] - 0s 21ms/step - loss: 3.2296 - last_time_step_mse: 2.4862 - val_loss: 2.9554 - val_last_time_step_mse: 2.3384 Epoch 18/200 13/13 [==============================] - 0s 22ms/step - loss: 3.1084 - last_time_step_mse: 2.3561 - val_loss: 2.8472 - val_last_time_step_mse: 2.2151 Epoch 19/200 13/13 [==============================] - 0s 22ms/step - loss: 2.9998 - last_time_step_mse: 2.2485 - val_loss: 2.7665 - val_last_time_step_mse: 2.1189 Epoch 20/200 13/13 [==============================] - 0s 22ms/step - loss: 2.9000 - last_time_step_mse: 2.1406 - val_loss: 2.6482 - val_last_time_step_mse: 2.0149 Epoch 21/200 13/13 [==============================] - 0s 23ms/step - loss: 2.7951 - last_time_step_mse: 2.0425 - val_loss: 2.5656 - val_last_time_step_mse: 1.9490 Epoch 22/200 13/13 [==============================] - 0s 22ms/step - loss: 2.7105 - last_time_step_mse: 1.9524 - val_loss: 2.4836 - val_last_time_step_mse: 1.8540 Epoch 23/200 13/13 [==============================] - 0s 22ms/step - loss: 2.6355 - last_time_step_mse: 1.8785 - val_loss: 2.4168 - val_last_time_step_mse: 1.7846 Epoch 24/200 13/13 [==============================] - 0s 22ms/step - loss: 2.5600 - last_time_step_mse: 1.8003 - val_loss: 2.3516 - val_last_time_step_mse: 1.7177 Epoch 25/200 13/13 [==============================] - 0s 22ms/step - loss: 2.4967 - last_time_step_mse: 1.7477 - val_loss: 2.3052 - val_last_time_step_mse: 1.7204 Epoch 26/200 13/13 [==============================] - 0s 22ms/step - loss: 2.4473 - last_time_step_mse: 1.6982 - val_loss: 2.2427 - val_last_time_step_mse: 1.6164 Epoch 27/200 13/13 [==============================] - 0s 24ms/step - loss: 2.3958 - last_time_step_mse: 1.6437 - val_loss: 2.1956 - val_last_time_step_mse: 1.5814 Epoch 28/200 13/13 [==============================] - 0s 22ms/step - loss: 2.3478 - last_time_step_mse: 1.5953 - val_loss: 2.1510 - val_last_time_step_mse: 1.5348 Epoch 29/200 13/13 [==============================] - 0s 22ms/step - loss: 2.3008 - last_time_step_mse: 1.5494 - val_loss: 2.1137 - val_last_time_step_mse: 1.4858 Epoch 30/200 13/13 [==============================] - 0s 22ms/step - loss: 2.2610 - last_time_step_mse: 1.5250 - val_loss: 2.0876 - val_last_time_step_mse: 1.4865 Epoch 31/200 13/13 [==============================] - 0s 22ms/step - loss: 2.2355 - last_time_step_mse: 1.4966 - val_loss: 2.0479 - val_last_time_step_mse: 1.4376 Epoch 32/200 13/13 [==============================] - 0s 22ms/step - loss: 2.1991 - last_time_step_mse: 1.4568 - val_loss: 2.0213 - val_last_time_step_mse: 1.4155 Epoch 33/200 13/13 [==============================] - 0s 22ms/step - loss: 2.1638 - last_time_step_mse: 1.4284 - val_loss: 1.9860 - val_last_time_step_mse: 1.3891 Epoch 34/200 13/13 [==============================] - 0s 23ms/step - loss: 2.1256 - last_time_step_mse: 1.3900 - val_loss: 1.9553 - val_last_time_step_mse: 1.3428 Epoch 35/200 13/13 [==============================] - 0s 22ms/step - loss: 2.1026 - last_time_step_mse: 1.3707 - val_loss: 1.9268 - val_last_time_step_mse: 1.3274 Epoch 36/200 13/13 [==============================] - 0s 23ms/step - loss: 2.0716 - last_time_step_mse: 1.3462 - val_loss: 1.8973 - val_last_time_step_mse: 1.2991 Epoch 37/200 13/13 [==============================] - 0s 22ms/step - loss: 2.0390 - last_time_step_mse: 1.3201 - val_loss: 1.8797 - val_last_time_step_mse: 1.2736 Epoch 38/200 13/13 [==============================] - 0s 22ms/step - loss: 2.0146 - last_time_step_mse: 1.2960 - val_loss: 1.8533 - val_last_time_step_mse: 1.2575 Epoch 39/200 13/13 [==============================] - 0s 22ms/step - loss: 1.9921 - last_time_step_mse: 1.2761 - val_loss: 1.8461 - val_last_time_step_mse: 1.2572 Epoch 40/200 13/13 [==============================] - 0s 23ms/step - loss: 1.9666 - last_time_step_mse: 1.2554 - val_loss: 1.8260 - val_last_time_step_mse: 1.2389 Epoch 41/200 13/13 [==============================] - 0s 22ms/step - loss: 1.9626 - last_time_step_mse: 1.2645 - val_loss: 1.8061 - val_last_time_step_mse: 1.2089 Epoch 42/200 13/13 [==============================] - 0s 22ms/step - loss: 1.9286 - last_time_step_mse: 1.2273 - val_loss: 1.7649 - val_last_time_step_mse: 1.1714 Epoch 43/200 13/13 [==============================] - 0s 22ms/step - loss: 1.9088 - last_time_step_mse: 1.1956 - val_loss: 1.7710 - val_last_time_step_mse: 1.1810 Epoch 44/200 13/13 [==============================] - 0s 22ms/step - loss: 1.8883 - last_time_step_mse: 1.1814 - val_loss: 1.7311 - val_last_time_step_mse: 1.1462 Epoch 45/200 13/13 [==============================] - 0s 22ms/step - loss: 1.8635 - last_time_step_mse: 1.1641 - val_loss: 1.7095 - val_last_time_step_mse: 1.1313 Epoch 46/200 13/13 [==============================] - 0s 22ms/step - loss: 1.8456 - last_time_step_mse: 1.1452 - val_loss: 1.7023 - val_last_time_step_mse: 1.1401 Epoch 47/200 13/13 [==============================] - 0s 23ms/step - loss: 1.8296 - last_time_step_mse: 1.1334 - val_loss: 1.6781 - val_last_time_step_mse: 1.1025 Epoch 48/200 13/13 [==============================] - 0s 22ms/step - loss: 1.8140 - last_time_step_mse: 1.1153 - val_loss: 1.6664 - val_last_time_step_mse: 1.0918 Epoch 49/200 13/13 [==============================] - 0s 22ms/step - loss: 1.7897 - last_time_step_mse: 1.1005 - val_loss: 1.6427 - val_last_time_step_mse: 1.0676 Epoch 50/200 13/13 [==============================] - 0s 22ms/step - loss: 1.7781 - last_time_step_mse: 1.0836 - val_loss: 1.6338 - val_last_time_step_mse: 1.0709 Epoch 51/200 13/13 [==============================] - 0s 23ms/step - loss: 1.7627 - last_time_step_mse: 1.0756 - val_loss: 1.6294 - val_last_time_step_mse: 1.0953 Epoch 52/200 13/13 [==============================] - 0s 22ms/step - loss: 1.7537 - last_time_step_mse: 1.0732 - val_loss: 1.5998 - val_last_time_step_mse: 1.0287 Epoch 53/200 13/13 [==============================] - 0s 22ms/step - loss: 1.7592 - last_time_step_mse: 1.0595 - val_loss: 1.5949 - val_last_time_step_mse: 1.0485 Epoch 54/200 13/13 [==============================] - 0s 22ms/step - loss: 1.7264 - last_time_step_mse: 1.0510 - val_loss: 1.5880 - val_last_time_step_mse: 1.0253 Epoch 55/200 13/13 [==============================] - 0s 23ms/step - loss: 1.7165 - last_time_step_mse: 1.0391 - val_loss: 1.5707 - val_last_time_step_mse: 0.9921 Epoch 56/200 13/13 [==============================] - 0s 23ms/step - loss: 1.7009 - last_time_step_mse: 1.0160 - val_loss: 1.5729 - val_last_time_step_mse: 1.0143 Epoch 57/200 13/13 [==============================] - 0s 22ms/step - loss: 1.6878 - last_time_step_mse: 1.0013 - val_loss: 1.5542 - val_last_time_step_mse: 0.9782 Epoch 58/200 13/13 [==============================] - 0s 21ms/step - loss: 1.6821 - last_time_step_mse: 1.0020 - val_loss: 1.5273 - val_last_time_step_mse: 0.9631 Epoch 59/200 13/13 [==============================] - 0s 23ms/step - loss: 1.6585 - last_time_step_mse: 0.9789 - val_loss: 1.5100 - val_last_time_step_mse: 0.9577 Epoch 60/200 13/13 [==============================] - 0s 23ms/step - loss: 1.6484 - last_time_step_mse: 0.9713 - val_loss: 1.4975 - val_last_time_step_mse: 0.9492 Epoch 61/200 13/13 [==============================] - 0s 22ms/step - loss: 1.6281 - last_time_step_mse: 0.9572 - val_loss: 1.4865 - val_last_time_step_mse: 0.9429 Epoch 62/200 13/13 [==============================] - 0s 22ms/step - loss: 1.6177 - last_time_step_mse: 0.9548 - val_loss: 1.4760 - val_last_time_step_mse: 0.9292 Epoch 63/200 13/13 [==============================] - 0s 22ms/step - loss: 1.6085 - last_time_step_mse: 0.9388 - val_loss: 1.4742 - val_last_time_step_mse: 0.9291 Epoch 64/200 13/13 [==============================] - 0s 22ms/step - loss: 1.5918 - last_time_step_mse: 0.9329 - val_loss: 1.4662 - val_last_time_step_mse: 0.9167 Epoch 65/200 13/13 [==============================] - 0s 21ms/step - loss: 1.5835 - last_time_step_mse: 0.9252 - val_loss: 1.4625 - val_last_time_step_mse: 0.9123 Epoch 66/200 13/13 [==============================] - 0s 22ms/step - loss: 1.5883 - last_time_step_mse: 0.9436 - val_loss: 1.4388 - val_last_time_step_mse: 0.8995 Epoch 67/200 13/13 [==============================] - 0s 22ms/step - loss: 1.5601 - last_time_step_mse: 0.9075 - val_loss: 1.4368 - val_last_time_step_mse: 0.9007 Epoch 68/200 13/13 [==============================] - 0s 22ms/step - loss: 1.5496 - last_time_step_mse: 0.9004 - val_loss: 1.4258 - val_last_time_step_mse: 0.9084 Epoch 69/200 13/13 [==============================] - 0s 22ms/step - loss: 1.5386 - last_time_step_mse: 0.8986 - val_loss: 1.4206 - val_last_time_step_mse: 0.9070 Epoch 70/200 13/13 [==============================] - 0s 22ms/step - loss: 1.5306 - last_time_step_mse: 0.8878 - val_loss: 1.4038 - val_last_time_step_mse: 0.8623 Epoch 71/200 13/13 [==============================] - 0s 23ms/step - loss: 1.5203 - last_time_step_mse: 0.8737 - val_loss: 1.3956 - val_last_time_step_mse: 0.8615 Epoch 72/200 13/13 [==============================] - 0s 22ms/step - loss: 1.5138 - last_time_step_mse: 0.8628 - val_loss: 1.3937 - val_last_time_step_mse: 0.8699 Epoch 73/200 13/13 [==============================] - 0s 22ms/step - loss: 1.5010 - last_time_step_mse: 0.8670 - val_loss: 1.3779 - val_last_time_step_mse: 0.8454 Epoch 74/200 13/13 [==============================] - 0s 22ms/step - loss: 1.4932 - last_time_step_mse: 0.8508 - val_loss: 1.3631 - val_last_time_step_mse: 0.8386 Epoch 75/200 13/13 [==============================] - 0s 22ms/step - loss: 1.4822 - last_time_step_mse: 0.8376 - val_loss: 1.3509 - val_last_time_step_mse: 0.8365 Epoch 76/200 13/13 [==============================] - 0s 21ms/step - loss: 1.4745 - last_time_step_mse: 0.8395 - val_loss: 1.3425 - val_last_time_step_mse: 0.8299 Epoch 77/200 13/13 [==============================] - 0s 22ms/step - loss: 1.4618 - last_time_step_mse: 0.8287 - val_loss: 1.3336 - val_last_time_step_mse: 0.8282 Epoch 78/200 13/13 [==============================] - 0s 22ms/step - loss: 1.4548 - last_time_step_mse: 0.8259 - val_loss: 1.3289 - val_last_time_step_mse: 0.8247 Epoch 79/200 13/13 [==============================] - 0s 22ms/step - loss: 1.4513 - last_time_step_mse: 0.8179 - val_loss: 1.3192 - val_last_time_step_mse: 0.8155 Epoch 80/200 13/13 [==============================] - 0s 24ms/step - loss: 1.4426 - last_time_step_mse: 0.8106 - val_loss: 1.3149 - val_last_time_step_mse: 0.8181 Epoch 81/200 13/13 [==============================] - 0s 22ms/step - loss: 1.4340 - last_time_step_mse: 0.8120 - val_loss: 1.3069 - val_last_time_step_mse: 0.8108 Epoch 82/200 13/13 [==============================] - 0s 22ms/step - loss: 1.4275 - last_time_step_mse: 0.8019 - val_loss: 1.2988 - val_last_time_step_mse: 0.7876 Epoch 83/200 13/13 [==============================] - 0s 23ms/step - loss: 1.4241 - last_time_step_mse: 0.7994 - val_loss: 1.2921 - val_last_time_step_mse: 0.7845 Epoch 84/200 13/13 [==============================] - 0s 22ms/step - loss: 1.4150 - last_time_step_mse: 0.7986 - val_loss: 1.2965 - val_last_time_step_mse: 0.8035 Epoch 85/200 13/13 [==============================] - 0s 23ms/step - loss: 1.4063 - last_time_step_mse: 0.7817 - val_loss: 1.2903 - val_last_time_step_mse: 0.8013 Epoch 86/200 13/13 [==============================] - 0s 22ms/step - loss: 1.3990 - last_time_step_mse: 0.7814 - val_loss: 1.2734 - val_last_time_step_mse: 0.7801 Epoch 87/200 13/13 [==============================] - 0s 23ms/step - loss: 1.3936 - last_time_step_mse: 0.7804 - val_loss: 1.2754 - val_last_time_step_mse: 0.7896 Epoch 88/200 13/13 [==============================] - 0s 21ms/step - loss: 1.3921 - last_time_step_mse: 0.7854 - val_loss: 1.2656 - val_last_time_step_mse: 0.7774 Epoch 89/200 13/13 [==============================] - 0s 22ms/step - loss: 1.3838 - last_time_step_mse: 0.7663 - val_loss: 1.2748 - val_last_time_step_mse: 0.7904 Epoch 90/200 13/13 [==============================] - 0s 22ms/step - loss: 1.3816 - last_time_step_mse: 0.7720 - val_loss: 1.2622 - val_last_time_step_mse: 0.7708 Epoch 91/200 13/13 [==============================] - 0s 22ms/step - loss: 1.3741 - last_time_step_mse: 0.7580 - val_loss: 1.2501 - val_last_time_step_mse: 0.7484 Epoch 92/200 13/13 [==============================] - 0s 23ms/step - loss: 1.3649 - last_time_step_mse: 0.7496 - val_loss: 1.2468 - val_last_time_step_mse: 0.7468 Epoch 93/200 13/13 [==============================] - 0s 24ms/step - loss: 1.3599 - last_time_step_mse: 0.7485 - val_loss: 1.2379 - val_last_time_step_mse: 0.7408 Epoch 94/200 13/13 [==============================] - 0s 23ms/step - loss: 1.3565 - last_time_step_mse: 0.7452 - val_loss: 1.2366 - val_last_time_step_mse: 0.7544 Epoch 95/200 13/13 [==============================] - 0s 22ms/step - loss: 1.3626 - last_time_step_mse: 0.7507 - val_loss: 1.2306 - val_last_time_step_mse: 0.7340 Epoch 96/200 13/13 [==============================] - 0s 23ms/step - loss: 1.3489 - last_time_step_mse: 0.7383 - val_loss: 1.2255 - val_last_time_step_mse: 0.7463 Epoch 97/200 13/13 [==============================] - 0s 23ms/step - loss: 1.3449 - last_time_step_mse: 0.7339 - val_loss: 1.2266 - val_last_time_step_mse: 0.7284 Epoch 98/200 13/13 [==============================] - 0s 23ms/step - loss: 1.3414 - last_time_step_mse: 0.7313 - val_loss: 1.2261 - val_last_time_step_mse: 0.7341 Epoch 99/200 13/13 [==============================] - 0s 23ms/step - loss: 1.3387 - last_time_step_mse: 0.7312 - val_loss: 1.2227 - val_last_time_step_mse: 0.7349 Epoch 100/200 13/13 [==============================] - 0s 22ms/step - loss: 1.3333 - last_time_step_mse: 0.7329 - val_loss: 1.2152 - val_last_time_step_mse: 0.7282 Epoch 101/200 13/13 [==============================] - 0s 23ms/step - loss: 1.3314 - last_time_step_mse: 0.7277 - val_loss: 1.2113 - val_last_time_step_mse: 0.7250 Epoch 102/200 13/13 [==============================] - 0s 22ms/step - loss: 1.3297 - last_time_step_mse: 0.7242 - val_loss: 1.2117 - val_last_time_step_mse: 0.7310 Epoch 103/200 13/13 [==============================] - 0s 23ms/step - loss: 1.3286 - last_time_step_mse: 0.7229 - val_loss: 1.2122 - val_last_time_step_mse: 0.7359 Epoch 104/200 13/13 [==============================] - 0s 23ms/step - loss: 1.3328 - last_time_step_mse: 0.7414 - val_loss: 1.2128 - val_last_time_step_mse: 0.7278 Epoch 105/200 13/13 [==============================] - 0s 23ms/step - loss: 1.3291 - last_time_step_mse: 0.7292 - val_loss: 1.2057 - val_last_time_step_mse: 0.7289 Epoch 106/200 13/13 [==============================] - 0s 22ms/step - loss: 1.3233 - last_time_step_mse: 0.7194 - val_loss: 1.1985 - val_last_time_step_mse: 0.6987 Epoch 107/200 13/13 [==============================] - 0s 22ms/step - loss: 1.3210 - last_time_step_mse: 0.7206 - val_loss: 1.2044 - val_last_time_step_mse: 0.7266 Epoch 108/200 13/13 [==============================] - 0s 23ms/step - loss: 1.3183 - last_time_step_mse: 0.7149 - val_loss: 1.1986 - val_last_time_step_mse: 0.7136 Epoch 109/200 13/13 [==============================] - 0s 22ms/step - loss: 1.3162 - last_time_step_mse: 0.7154 - val_loss: 1.2058 - val_last_time_step_mse: 0.7309 Epoch 110/200 13/13 [==============================] - 0s 23ms/step - loss: 1.3216 - last_time_step_mse: 0.7386 - val_loss: 1.2174 - val_last_time_step_mse: 0.7206 Epoch 111/200 13/13 [==============================] - 0s 24ms/step - loss: 1.3163 - last_time_step_mse: 0.7238 - val_loss: 1.1990 - val_last_time_step_mse: 0.7239 Epoch 112/200 13/13 [==============================] - 0s 23ms/step - loss: 1.3113 - last_time_step_mse: 0.7132 - val_loss: 1.1955 - val_last_time_step_mse: 0.7058 Epoch 113/200 13/13 [==============================] - 0s 23ms/step - loss: 1.3037 - last_time_step_mse: 0.7076 - val_loss: 1.1835 - val_last_time_step_mse: 0.7060 Epoch 114/200 13/13 [==============================] - 0s 23ms/step - loss: 1.3053 - last_time_step_mse: 0.7101 - val_loss: 1.1816 - val_last_time_step_mse: 0.7022 Epoch 115/200 13/13 [==============================] - 0s 22ms/step - loss: 1.3012 - last_time_step_mse: 0.7139 - val_loss: 1.1916 - val_last_time_step_mse: 0.7171 Epoch 116/200 13/13 [==============================] - 0s 22ms/step - loss: 1.3034 - last_time_step_mse: 0.7088 - val_loss: 1.1839 - val_last_time_step_mse: 0.7016 Epoch 117/200 13/13 [==============================] - 0s 24ms/step - loss: 1.3084 - last_time_step_mse: 0.7082 - val_loss: 1.1849 - val_last_time_step_mse: 0.7095 Epoch 118/200 13/13 [==============================] - 0s 23ms/step - loss: 1.3073 - last_time_step_mse: 0.7160 - val_loss: 1.1814 - val_last_time_step_mse: 0.6886 Epoch 119/200 13/13 [==============================] - 0s 23ms/step - loss: 1.3008 - last_time_step_mse: 0.7057 - val_loss: 1.1747 - val_last_time_step_mse: 0.7042 Epoch 120/200 13/13 [==============================] - 0s 23ms/step - loss: 1.3026 - last_time_step_mse: 0.7118 - val_loss: 1.1784 - val_last_time_step_mse: 0.6929 Epoch 121/200 13/13 [==============================] - 0s 21ms/step - loss: 1.2927 - last_time_step_mse: 0.6947 - val_loss: 1.1754 - val_last_time_step_mse: 0.7034 Epoch 122/200 13/13 [==============================] - 0s 22ms/step - loss: 1.2949 - last_time_step_mse: 0.7076 - val_loss: 1.1765 - val_last_time_step_mse: 0.7007 Epoch 123/200 13/13 [==============================] - 0s 23ms/step - loss: 1.2954 - last_time_step_mse: 0.7091 - val_loss: 1.1818 - val_last_time_step_mse: 0.6948 Epoch 124/200 13/13 [==============================] - 0s 23ms/step - loss: 1.2924 - last_time_step_mse: 0.7024 - val_loss: 1.1771 - val_last_time_step_mse: 0.7034 Epoch 125/200 13/13 [==============================] - 0s 22ms/step - loss: 1.2949 - last_time_step_mse: 0.7077 - val_loss: 1.1719 - val_last_time_step_mse: 0.6972 Epoch 126/200 13/13 [==============================] - 0s 23ms/step - loss: 1.2947 - last_time_step_mse: 0.7118 - val_loss: 1.1849 - val_last_time_step_mse: 0.7096 Epoch 127/200 13/13 [==============================] - 0s 23ms/step - loss: 1.2942 - last_time_step_mse: 0.7102 - val_loss: 1.1670 - val_last_time_step_mse: 0.6955 Epoch 128/200 13/13 [==============================] - 0s 22ms/step - loss: 1.2906 - last_time_step_mse: 0.7116 - val_loss: 1.1805 - val_last_time_step_mse: 0.7319 Epoch 129/200 13/13 [==============================] - 0s 22ms/step - loss: 1.2923 - last_time_step_mse: 0.7134 - val_loss: 1.1738 - val_last_time_step_mse: 0.7121 Epoch 130/200 13/13 [==============================] - 0s 23ms/step - loss: 1.2851 - last_time_step_mse: 0.7021 - val_loss: 1.1648 - val_last_time_step_mse: 0.6904 Epoch 131/200 13/13 [==============================] - 0s 23ms/step - loss: 1.2976 - last_time_step_mse: 0.7182 - val_loss: 1.1787 - val_last_time_step_mse: 0.7340 Epoch 132/200 13/13 [==============================] - 0s 22ms/step - loss: 1.2881 - last_time_step_mse: 0.7072 - val_loss: 1.1634 - val_last_time_step_mse: 0.6989 Epoch 133/200 13/13 [==============================] - 0s 23ms/step - loss: 1.2799 - last_time_step_mse: 0.6953 - val_loss: 1.1634 - val_last_time_step_mse: 0.7045 Epoch 134/200 13/13 [==============================] - 0s 22ms/step - loss: 1.2778 - last_time_step_mse: 0.6992 - val_loss: 1.1664 - val_last_time_step_mse: 0.7016 Epoch 135/200 13/13 [==============================] - 0s 23ms/step - loss: 1.2784 - last_time_step_mse: 0.6962 - val_loss: 1.1730 - val_last_time_step_mse: 0.7294 Epoch 136/200 13/13 [==============================] - 0s 24ms/step - loss: 1.2760 - last_time_step_mse: 0.7051 - val_loss: 1.1589 - val_last_time_step_mse: 0.6889 Epoch 137/200 13/13 [==============================] - 0s 22ms/step - loss: 1.2752 - last_time_step_mse: 0.7009 - val_loss: 1.1561 - val_last_time_step_mse: 0.6952 Epoch 138/200 13/13 [==============================] - 0s 23ms/step - loss: 1.2740 - last_time_step_mse: 0.6910 - val_loss: 1.1563 - val_last_time_step_mse: 0.6862 Epoch 139/200 13/13 [==============================] - 0s 22ms/step - loss: 1.2724 - last_time_step_mse: 0.6930 - val_loss: 1.1567 - val_last_time_step_mse: 0.6937 Epoch 140/200 13/13 [==============================] - 0s 23ms/step - loss: 1.2827 - last_time_step_mse: 0.7059 - val_loss: 1.1536 - val_last_time_step_mse: 0.6789 Epoch 141/200 13/13 [==============================] - 0s 23ms/step - loss: 1.2843 - last_time_step_mse: 0.7174 - val_loss: 1.1627 - val_last_time_step_mse: 0.6914 Epoch 142/200 13/13 [==============================] - 0s 22ms/step - loss: 1.2726 - last_time_step_mse: 0.6953 - val_loss: 1.1545 - val_last_time_step_mse: 0.6938 Epoch 143/200 13/13 [==============================] - 0s 23ms/step - loss: 1.2693 - last_time_step_mse: 0.6900 - val_loss: 1.1517 - val_last_time_step_mse: 0.6861 Epoch 144/200 13/13 [==============================] - 0s 22ms/step - loss: 1.2652 - last_time_step_mse: 0.6940 - val_loss: 1.1503 - val_last_time_step_mse: 0.6789 Epoch 145/200 13/13 [==============================] - 0s 22ms/step - loss: 1.2747 - last_time_step_mse: 0.6972 - val_loss: 1.1622 - val_last_time_step_mse: 0.7158 Epoch 146/200 13/13 [==============================] - 0s 23ms/step - loss: 1.2738 - last_time_step_mse: 0.6976 - val_loss: 1.1501 - val_last_time_step_mse: 0.6908 Epoch 147/200 13/13 [==============================] - 0s 23ms/step - loss: 1.2684 - last_time_step_mse: 0.7000 - val_loss: 1.1502 - val_last_time_step_mse: 0.6833 Epoch 148/200 13/13 [==============================] - 0s 23ms/step - loss: 1.2634 - last_time_step_mse: 0.6945 - val_loss: 1.1495 - val_last_time_step_mse: 0.6793 Epoch 149/200 13/13 [==============================] - 0s 23ms/step - loss: 1.2726 - last_time_step_mse: 0.6981 - val_loss: 1.1578 - val_last_time_step_mse: 0.7100 Epoch 150/200 13/13 [==============================] - 0s 23ms/step - loss: 1.2773 - last_time_step_mse: 0.7083 - val_loss: 1.1493 - val_last_time_step_mse: 0.6768 Epoch 151/200 13/13 [==============================] - 0s 24ms/step - loss: 1.2635 - last_time_step_mse: 0.7020 - val_loss: 1.1443 - val_last_time_step_mse: 0.6857 Epoch 152/200 13/13 [==============================] - 0s 24ms/step - loss: 1.2610 - last_time_step_mse: 0.6889 - val_loss: 1.1446 - val_last_time_step_mse: 0.6877 Epoch 153/200 13/13 [==============================] - 0s 23ms/step - loss: 1.2618 - last_time_step_mse: 0.6877 - val_loss: 1.1481 - val_last_time_step_mse: 0.6928 Epoch 154/200 13/13 [==============================] - 0s 22ms/step - loss: 1.2596 - last_time_step_mse: 0.6932 - val_loss: 1.1395 - val_last_time_step_mse: 0.6776 Epoch 155/200 13/13 [==============================] - 0s 23ms/step - loss: 1.2573 - last_time_step_mse: 0.6873 - val_loss: 1.1416 - val_last_time_step_mse: 0.6829 Epoch 156/200 13/13 [==============================] - 0s 23ms/step - loss: 1.2526 - last_time_step_mse: 0.6873 - val_loss: 1.1381 - val_last_time_step_mse: 0.6715 Epoch 157/200 13/13 [==============================] - 0s 23ms/step - loss: 1.2594 - last_time_step_mse: 0.6834 - val_loss: 1.1454 - val_last_time_step_mse: 0.6835 Epoch 158/200 13/13 [==============================] - 0s 23ms/step - loss: 1.2537 - last_time_step_mse: 0.6930 - val_loss: 1.1440 - val_last_time_step_mse: 0.6749 Epoch 159/200 13/13 [==============================] - 0s 22ms/step - loss: 1.2554 - last_time_step_mse: 0.6801 - val_loss: 1.1372 - val_last_time_step_mse: 0.6785 Epoch 160/200 13/13 [==============================] - 0s 22ms/step - loss: 1.2594 - last_time_step_mse: 0.6914 - val_loss: 1.1435 - val_last_time_step_mse: 0.6910 Epoch 161/200 13/13 [==============================] - 0s 23ms/step - loss: 1.2571 - last_time_step_mse: 0.6873 - val_loss: 1.1371 - val_last_time_step_mse: 0.6828 Epoch 162/200 13/13 [==============================] - 0s 23ms/step - loss: 1.2550 - last_time_step_mse: 0.7047 - val_loss: 1.1334 - val_last_time_step_mse: 0.6832 Epoch 163/200 13/13 [==============================] - 0s 22ms/step - loss: 1.2562 - last_time_step_mse: 0.6890 - val_loss: 1.1455 - val_last_time_step_mse: 0.7059 Epoch 164/200 13/13 [==============================] - 0s 23ms/step - loss: 1.2545 - last_time_step_mse: 0.6935 - val_loss: 1.1377 - val_last_time_step_mse: 0.6821 Epoch 165/200 13/13 [==============================] - 0s 23ms/step - loss: 1.2536 - last_time_step_mse: 0.6903 - val_loss: 1.1380 - val_last_time_step_mse: 0.6845 Epoch 166/200 13/13 [==============================] - 0s 23ms/step - loss: 1.2527 - last_time_step_mse: 0.6944 - val_loss: 1.1325 - val_last_time_step_mse: 0.6808 Epoch 167/200 13/13 [==============================] - 0s 23ms/step - loss: 1.2459 - last_time_step_mse: 0.6857 - val_loss: 1.1350 - val_last_time_step_mse: 0.6840 Epoch 168/200 13/13 [==============================] - 0s 23ms/step - loss: 1.2436 - last_time_step_mse: 0.6827 - val_loss: 1.1290 - val_last_time_step_mse: 0.6655 Epoch 169/200 13/13 [==============================] - 0s 23ms/step - loss: 1.2418 - last_time_step_mse: 0.6782 - val_loss: 1.1335 - val_last_time_step_mse: 0.6807 Epoch 170/200 13/13 [==============================] - 0s 22ms/step - loss: 1.2412 - last_time_step_mse: 0.6817 - val_loss: 1.1283 - val_last_time_step_mse: 0.6669 Epoch 171/200 13/13 [==============================] - 0s 23ms/step - loss: 1.2379 - last_time_step_mse: 0.6771 - val_loss: 1.1276 - val_last_time_step_mse: 0.6786 Epoch 172/200 13/13 [==============================] - 0s 22ms/step - loss: 1.2474 - last_time_step_mse: 0.6895 - val_loss: 1.1400 - val_last_time_step_mse: 0.7091 Epoch 173/200 13/13 [==============================] - 0s 22ms/step - loss: 1.2457 - last_time_step_mse: 0.6876 - val_loss: 1.1298 - val_last_time_step_mse: 0.6853 Epoch 174/200 13/13 [==============================] - 0s 23ms/step - loss: 1.2395 - last_time_step_mse: 0.6818 - val_loss: 1.1327 - val_last_time_step_mse: 0.6751 Epoch 175/200 13/13 [==============================] - 0s 22ms/step - loss: 1.2388 - last_time_step_mse: 0.6759 - val_loss: 1.1264 - val_last_time_step_mse: 0.6734 Epoch 176/200 13/13 [==============================] - 0s 22ms/step - loss: 1.2353 - last_time_step_mse: 0.6768 - val_loss: 1.1213 - val_last_time_step_mse: 0.6639 Epoch 177/200 13/13 [==============================] - 0s 24ms/step - loss: 1.2379 - last_time_step_mse: 0.6778 - val_loss: 1.1241 - val_last_time_step_mse: 0.6755 Epoch 178/200 13/13 [==============================] - 0s 23ms/step - loss: 1.2381 - last_time_step_mse: 0.6800 - val_loss: 1.1366 - val_last_time_step_mse: 0.6830 Epoch 179/200 13/13 [==============================] - 0s 23ms/step - loss: 1.2393 - last_time_step_mse: 0.6817 - val_loss: 1.1278 - val_last_time_step_mse: 0.6746 Epoch 180/200 13/13 [==============================] - 0s 23ms/step - loss: 1.2337 - last_time_step_mse: 0.6803 - val_loss: 1.1241 - val_last_time_step_mse: 0.6764 Epoch 181/200 13/13 [==============================] - 0s 23ms/step - loss: 1.2320 - last_time_step_mse: 0.6723 - val_loss: 1.1184 - val_last_time_step_mse: 0.6644 Epoch 182/200 13/13 [==============================] - 0s 23ms/step - loss: 1.2304 - last_time_step_mse: 0.6758 - val_loss: 1.1213 - val_last_time_step_mse: 0.6720 Epoch 183/200 13/13 [==============================] - 0s 24ms/step - loss: 1.2344 - last_time_step_mse: 0.6749 - val_loss: 1.1243 - val_last_time_step_mse: 0.6723 Epoch 184/200 13/13 [==============================] - 0s 23ms/step - loss: 1.2324 - last_time_step_mse: 0.6763 - val_loss: 1.1205 - val_last_time_step_mse: 0.6844 Epoch 185/200 13/13 [==============================] - 0s 23ms/step - loss: 1.2297 - last_time_step_mse: 0.6791 - val_loss: 1.1147 - val_last_time_step_mse: 0.6674 Epoch 186/200 13/13 [==============================] - 0s 23ms/step - loss: 1.2298 - last_time_step_mse: 0.6790 - val_loss: 1.1176 - val_last_time_step_mse: 0.6701 Epoch 187/200 13/13 [==============================] - 0s 22ms/step - loss: 1.2246 - last_time_step_mse: 0.6711 - val_loss: 1.1122 - val_last_time_step_mse: 0.6592 Epoch 188/200 13/13 [==============================] - 0s 22ms/step - loss: 1.2251 - last_time_step_mse: 0.6675 - val_loss: 1.1156 - val_last_time_step_mse: 0.6711 Epoch 189/200 13/13 [==============================] - 0s 23ms/step - loss: 1.2269 - last_time_step_mse: 0.6738 - val_loss: 1.1190 - val_last_time_step_mse: 0.6579 Epoch 190/200 13/13 [==============================] - 0s 22ms/step - loss: 1.2242 - last_time_step_mse: 0.6752 - val_loss: 1.1124 - val_last_time_step_mse: 0.6727 Epoch 191/200 13/13 [==============================] - 0s 23ms/step - loss: 1.2230 - last_time_step_mse: 0.6670 - val_loss: 1.1100 - val_last_time_step_mse: 0.6620 Epoch 192/200 13/13 [==============================] - 0s 23ms/step - loss: 1.2220 - last_time_step_mse: 0.6695 - val_loss: 1.1116 - val_last_time_step_mse: 0.6577 Epoch 193/200 13/13 [==============================] - 0s 22ms/step - loss: 1.2228 - last_time_step_mse: 0.6675 - val_loss: 1.1123 - val_last_time_step_mse: 0.6582 Epoch 194/200 13/13 [==============================] - 0s 22ms/step - loss: 1.2219 - last_time_step_mse: 0.6704 - val_loss: 1.1066 - val_last_time_step_mse: 0.6569 Epoch 195/200 13/13 [==============================] - 0s 24ms/step - loss: 1.2218 - last_time_step_mse: 0.6652 - val_loss: 1.1118 - val_last_time_step_mse: 0.6638 Epoch 196/200 13/13 [==============================] - 0s 22ms/step - loss: 1.2267 - last_time_step_mse: 0.6715 - val_loss: 1.1125 - val_last_time_step_mse: 0.6649 Epoch 197/200 13/13 [==============================] - 0s 23ms/step - loss: 1.2219 - last_time_step_mse: 0.6669 - val_loss: 1.1095 - val_last_time_step_mse: 0.6671 Epoch 198/200 13/13 [==============================] - 0s 23ms/step - loss: 1.2194 - last_time_step_mse: 0.6700 - val_loss: 1.1278 - val_last_time_step_mse: 0.7204 Epoch 199/200 13/13 [==============================] - 0s 23ms/step - loss: 1.2334 - last_time_step_mse: 0.6956 - val_loss: 1.1118 - val_last_time_step_mse: 0.6698 Epoch 200/200 13/13 [==============================] - 0s 23ms/step - loss: 1.2227 - last_time_step_mse: 0.6741 - val_loss: 1.1141 - val_last_time_step_mse: 0.6741
plt.plot(history.history["val_last_time_step_mse"], label="val")
plt.plot(history.history["last_time_step_mse"], label="train")
plt.legend()
<matplotlib.legend.Legend at 0x7f9fa7290b38>
y_pred_conv = model_conv.predict(X_test)[:, -1][..., np.newaxis] * train_std + train_mean
y_pred_lstm = model.predict(X_test) * train_std + train_mean
y_pred_wave = model_wave.predict(X_test)[:, -1][..., np.newaxis] * train_std + train_mean
y_pred_lstm_2 = model_lstm.predict(X_test)[:, -1][..., np.newaxis] * train_std + train_mean
#Just copy previous day
from sklearn.metrics import mean_squared_error
y_pred = X_test[:,:,0]
plt.figure(figsize=(25,14))
for i in range(10):
plt.subplot(5,2,i+1)
plt.xticks([])
plt.plot(date_list_hr[:n_steps],X_test[i,:,0]*train_std + train_mean, "b.-")
plt.plot(date_list_hr[n_steps: n_steps+pred_steps],y_test[i,:,0]*train_std + train_mean,"r.-", label="real")
plt.plot(date_list_hr[n_steps: n_steps+pred_steps],y_pred_lstm[i,:], "g.-", label="pred_lstm")
plt.plot(date_list_hr[n_steps: n_steps+pred_steps],y_pred_conv[i,:], "y.-", label="pred_conv")
plt.plot(date_list_hr[n_steps: n_steps+pred_steps],y_pred_wave[i,:,0], "k.-", label="pred_wave")
plt.plot(date_list_hr[n_steps: n_steps+pred_steps],y_pred_lstm_2[i,:,0], "c.-", label="pred_lstm_2")
# plt.plot(date_list_hr[n_steps: n_steps+n_steps],y_pred[i,:], "c.-", label="copy")
plt.legend()
plt.xticks(np.arange(0, n_steps+pred_steps, step=12), rotation=90)
plt.show()
# y_pred_test = model_conv.predict(X_test[:,:12])[:, -1][..., np.newaxis]
# i=11
# plt.xticks(np.arange(0, n_steps+pred_steps, step=12), rotation=90)
# plt.plot(date_list_hr[:12],X_test[i,:12,0], "b.-")
# plt.plot(date_list_hr[12: 12+3],X_test[i,12:,0], "r.-", label="real")
# plt.plot(date_list_hr[12: 12+pred_steps], y_pred_test[i,:], "g.-", label="pred")
After evaluating 4 different kinds of models, we can see that using multi-step LSTM performs the best based on evaluating the mean squared error at the last step
print("LSTM: {}\nLSTM_2: {}\nCONV: {}\nWAVENET: {}\n".format(
model.evaluate(X_test, y_test),
model_lstm.evaluate(X_test, y_test_2),
model_conv.evaluate(X_test, y_test_2[:,3::2]),
model_wave.evaluate(X_test, y_test_2),
# mean_squared_error(y_test.reshape((36,48)),y_pred)
))
2/2 [==============================] - 0s 4ms/step - loss: 0.3325 2/2 [==============================] - 0s 4ms/step - loss: 1.4379 - last_time_step_mse: 0.8068 2/2 [==============================] - 0s 4ms/step - loss: 1.0258 - last_time_step_mse: 0.7111 2/2 [==============================] - 0s 3ms/step - loss: 1.3090 - last_time_step_mse: 0.7952 LSTM: 0.33249232172966003 LSTM_2: [1.4379059076309204, 0.8067888021469116] CONV: [1.0257858037948608, 0.7111430168151855] WAVENET: [1.3089678287506104, 0.795218825340271]