# havegc-v1
###### tags: `Code`
```python
# This Python 3 environment comes with many helpful analytics libraries installed
# It is defined by the kaggle/python Docker image: https://github.com/kaggle/docker-python
# For example, here's several helpful packages to load
import numpy as np # linear algebra
import pandas as pd # data processing, CSV file I/O (e.g. pd.read_csv)
# Input data files are available in the read-only "../input/" directory
# For example, running this (by clicking run or pressing Shift+Enter) will list all files under the input directory
import os
for dirname, _, filenames in os.walk('/kaggle/input'):
for filename in filenames:
print(os.path.join(dirname, filename))
# You can write up to 20GB to the current directory (/kaggle/working/) that gets preserved as output when you create a version using "Save & Run All"
# You can also write temporary files to /kaggle/temp/, but they won't be saved outside of the current session
```
/kaggle/input/g-research-crypto-forecasting/example_sample_submission.csv
/kaggle/input/g-research-crypto-forecasting/asset_details.csv
/kaggle/input/g-research-crypto-forecasting/example_test.csv
/kaggle/input/g-research-crypto-forecasting/train.csv
/kaggle/input/g-research-crypto-forecasting/supplemental_train.csv
/kaggle/input/g-research-crypto-forecasting/gresearch_crypto/competition.cpython-37m-x86_64-linux-gnu.so
/kaggle/input/g-research-crypto-forecasting/gresearch_crypto/__init__.py
```python
import pandas as pd
import numpy as np
from datetime import datetime
```
# Read data
```python
crypto_df = pd.read_csv('../input/g-research-crypto-forecasting/train.csv')
```
# 補空插值
```python
btc_cash = crypto_df[crypto_df["Asset_ID"]==2].set_index("timestamp")
btc_cash = btc_cash.reindex(range(btc_cash.index[0],btc_cash.index[-1]+60,60),method = "nearest")
bina = crypto_df[crypto_df["Asset_ID"]==0].set_index("timestamp")
bina = bina.reindex(range(bina.index[0],bina.index[-1]+60,60),method = "nearest")
btc = crypto_df[crypto_df["Asset_ID"]==1].set_index("timestamp")
btc = btc.reindex(range(btc.index[0],btc.index[-1]+60,60),method = "nearest")
eos = crypto_df[crypto_df["Asset_ID"]==5].set_index("timestamp")
eos = eos.reindex(range(eos.index[0],eos.index[-1]+60,60),method = "nearest")
eth_c = crypto_df[crypto_df["Asset_ID"]==7].set_index("timestamp")
eth_c = eth_c.reindex(range(eth_c.index[0],eth_c.index[-1]+60,60),method = "nearest")
eth = crypto_df[crypto_df["Asset_ID"]==6].set_index("timestamp")
eth = eth.reindex(range(eth.index[0],eth.index[-1]+60,60),method = "nearest")
lit = crypto_df[crypto_df["Asset_ID"]==9].set_index("timestamp")
lit = lit.reindex(range(lit.index[0],lit.index[-1]+60,60),method = "nearest")
mon = crypto_df[crypto_df["Asset_ID"]==11].set_index("timestamp")
mon = mon.reindex(range(mon.index[0],mon.index[-1]+60,60),method = "nearest")
tron = crypto_df[crypto_df["Asset_ID"]==13].set_index("timestamp")
tron = tron.reindex(range(tron.index[0],tron.index[-1]+60,60),method = "nearest")
ste = crypto_df[crypto_df["Asset_ID"]==12].set_index("timestamp")
ste = ste.reindex(range(ste.index[0],ste.index[-1]+60,60),method = "nearest")
car = crypto_df[crypto_df["Asset_ID"]==3].set_index("timestamp")
car = car.reindex(range(car.index[0],car.index[-1]+60,60),method = "nearest")
iota = crypto_df[crypto_df["Asset_ID"]==8].set_index("timestamp")
iota = iota.reindex(range(iota.index[0],iota.index[-1]+60,60),method = "nearest")
maker = crypto_df[crypto_df["Asset_ID"]==10].set_index("timestamp")
maker = maker.reindex(range(maker.index[0],maker.index[-1]+60,60),method = "nearest")
doge = crypto_df[crypto_df["Asset_ID"]==4].set_index("timestamp")
doge = doge.reindex(range(doge.index[0],doge.index[-1]+60,60),method = "nearest")
# fill
```
```python
btc_cash = btc_cash[-1000:] # a half of year
bina = bina[-1000:]
btc = btc[-1000:]
eos = eos[-1000:]
eth_c = eth_c[-1000:]
eth = eth[-1000:]
lit = lit[-1000:]
mon = mon[-1000:]
tron = tron[-1000:]
ste = ste[-1000:]
car = car[-1000:]
iota = iota[-1000:]
maker = maker[-1000:]
doge = doge[-1000:]
```
```python
btc_cash.interpolate(inplace = True)
bina.interpolate(inplace = True)
btc.interpolate(inplace = True)
eos.interpolate(inplace = True)
eth_c.interpolate(inplace = True)
eth.interpolate(inplace = True)
lit.interpolate(inplace = True)
mon.interpolate(inplace = True)
tron.interpolate(inplace = True)
ste.interpolate(inplace = True)
car.interpolate(inplace = True)
iota.interpolate(inplace = True)
maker.interpolate(inplace = True)
doge.interpolate(inplace = True)
```
```python
my_coin = [bina,btc,btc_cash,car,doge,eos,eth,eth_c,iota,lit,maker,mon,ste,tron]
```
```python
my_coin[3]
```
<div>
<style scoped>
.dataframe tbody tr th:only-of-type {
vertical-align: middle;
}
.dataframe tbody tr th {
vertical-align: top;
}
.dataframe thead th {
text-align: right;
}
</style>
<table border="1" class="dataframe">
<thead>
<tr style="text-align: right;">
<th></th>
<th>Asset_ID</th>
<th>Count</th>
<th>Open</th>
<th>High</th>
<th>Low</th>
<th>Close</th>
<th>Volume</th>
<th>VWAP</th>
<th>Target</th>
</tr>
<tr>
<th>timestamp</th>
<th></th>
<th></th>
<th></th>
<th></th>
<th></th>
<th></th>
<th></th>
<th></th>
<th></th>
</tr>
</thead>
<tbody>
<tr>
<th>1632122460</th>
<td>3</td>
<td>2706.0</td>
<td>2.145315</td>
<td>2.146367</td>
<td>2.139000</td>
<td>2.140881</td>
<td>697339.120607</td>
<td>2.142188</td>
<td>0.002877</td>
</tr>
<tr>
<th>1632122520</th>
<td>3</td>
<td>1235.0</td>
<td>2.142272</td>
<td>2.151000</td>
<td>2.140418</td>
<td>2.146135</td>
<td>967062.249315</td>
<td>2.146337</td>
<td>0.004043</td>
</tr>
<tr>
<th>1632122580</th>
<td>3</td>
<td>937.0</td>
<td>2.147650</td>
<td>2.154680</td>
<td>2.146000</td>
<td>2.151757</td>
<td>396960.943084</td>
<td>2.151083</td>
<td>0.003939</td>
</tr>
<tr>
<th>1632122640</th>
<td>3</td>
<td>522.0</td>
<td>2.152788</td>
<td>2.154500</td>
<td>2.150000</td>
<td>2.152481</td>
<td>218392.326888</td>
<td>2.152677</td>
<td>0.005361</td>
</tr>
<tr>
<th>1632122700</th>
<td>3</td>
<td>648.0</td>
<td>2.152414</td>
<td>2.153100</td>
<td>2.142900</td>
<td>2.144981</td>
<td>447728.929293</td>
<td>2.147516</td>
<td>0.004724</td>
</tr>
<tr>
<th>...</th>
<td>...</td>
<td>...</td>
<td>...</td>
<td>...</td>
<td>...</td>
<td>...</td>
<td>...</td>
<td>...</td>
<td>...</td>
</tr>
<tr>
<th>1632182160</th>
<td>3</td>
<td>1388.0</td>
<td>2.083244</td>
<td>2.084400</td>
<td>2.063800</td>
<td>2.067367</td>
<td>718089.514189</td>
<td>2.072282</td>
<td>-0.001147</td>
</tr>
<tr>
<th>1632182220</th>
<td>3</td>
<td>758.0</td>
<td>2.067776</td>
<td>2.071188</td>
<td>2.064000</td>
<td>2.066106</td>
<td>380634.009697</td>
<td>2.067684</td>
<td>-0.001147</td>
</tr>
<tr>
<th>1632182280</th>
<td>3</td>
<td>855.0</td>
<td>2.065735</td>
<td>2.068002</td>
<td>2.061000</td>
<td>2.064901</td>
<td>392212.248603</td>
<td>2.065272</td>
<td>-0.001147</td>
</tr>
<tr>
<th>1632182340</th>
<td>3</td>
<td>1176.0</td>
<td>2.065554</td>
<td>2.082600</td>
<td>2.064000</td>
<td>2.080337</td>
<td>721884.158667</td>
<td>2.075315</td>
<td>-0.001147</td>
</tr>
<tr>
<th>1632182400</th>
<td>3</td>
<td>677.0</td>
<td>2.081135</td>
<td>2.083000</td>
<td>2.078000</td>
<td>2.079269</td>
<td>165056.022472</td>
<td>2.080944</td>
<td>-0.001147</td>
</tr>
</tbody>
</table>
<p>1000 rows × 9 columns</p>
</div>
# Normalization
```python
from sklearn.preprocessing import MinMaxScaler, OrdinalEncoder
scaler_ar = [[0 for _ in range(7)] for _ in range(14)]
my_coin_normalization = []
for coin_num in range(14):
scaler_ar[coin_num][0] = MinMaxScaler(feature_range=(0, 1)).fit(my_coin[coin_num]["Count"].values.reshape(-1,1)) #Count
scaler_ar[coin_num][1] = MinMaxScaler(feature_range=(0, 1)).fit(my_coin[coin_num]["Open"].values.reshape(-1,1)) #Open
scaler_ar[coin_num][2] = MinMaxScaler(feature_range=(0, 1)).fit(my_coin[coin_num]["High"].values.reshape(-1,1))
scaler_ar[coin_num][3] = MinMaxScaler(feature_range=(0, 1)).fit(my_coin[coin_num]["Low"].values.reshape(-1,1))
scaler_ar[coin_num][4] = MinMaxScaler(feature_range=(0, 1)).fit(my_coin[coin_num]["Close"].values.reshape(-1,1))
scaler_ar[coin_num][5] = MinMaxScaler(feature_range=(0, 1)).fit(my_coin[coin_num]["Volume"].values.reshape(-1,1))
scaler_ar[coin_num][6] = MinMaxScaler(feature_range=(0, 1)).fit(my_coin[coin_num]["VWAP"].values.reshape(-1,1))
my_coin_normalization.append(my_coin[coin_num].reset_index())
```
```python
for coin_num in range(14):
num = 0
for col in my_coin_normalization[coin_num].columns:
if col == "timestamp" or col == "Asset_ID" or col == "Target":
continue
else:
s_output = scaler_ar[coin_num][num].transform(my_coin_normalization[coin_num][col].values.reshape(-1,1))
my_coin_normalization[coin_num][col] = pd.Series(s_output.flatten())
num += 1
```
```python
my_coin_normalization[1]
```
<div>
<style scoped>
.dataframe tbody tr th:only-of-type {
vertical-align: middle;
}
.dataframe tbody tr th {
vertical-align: top;
}
.dataframe thead th {
text-align: right;
}
</style>
<table border="1" class="dataframe">
<thead>
<tr style="text-align: right;">
<th></th>
<th>timestamp</th>
<th>Asset_ID</th>
<th>Count</th>
<th>Open</th>
<th>High</th>
<th>Low</th>
<th>Close</th>
<th>Volume</th>
<th>VWAP</th>
<th>Target</th>
</tr>
</thead>
<tbody>
<tr>
<th>0</th>
<td>1632122460</td>
<td>1</td>
<td>0.196105</td>
<td>0.982898</td>
<td>0.981950</td>
<td>0.952856</td>
<td>0.942632</td>
<td>0.199357</td>
<td>0.962261</td>
<td>-0.005089</td>
</tr>
<tr>
<th>1</th>
<td>1632122520</td>
<td>1</td>
<td>0.066416</td>
<td>0.947137</td>
<td>0.970599</td>
<td>0.956042</td>
<td>0.973294</td>
<td>0.074019</td>
<td>0.964865</td>
<td>-0.004977</td>
</tr>
<tr>
<th>2</th>
<td>1632122580</td>
<td>1</td>
<td>0.059720</td>
<td>0.982790</td>
<td>1.000000</td>
<td>0.989917</td>
<td>1.000000</td>
<td>0.044503</td>
<td>1.000000</td>
<td>-0.003969</td>
</tr>
<tr>
<th>3</th>
<td>1632122640</td>
<td>1</td>
<td>0.015955</td>
<td>1.000000</td>
<td>0.994304</td>
<td>1.000000</td>
<td>0.991382</td>
<td>0.012080</td>
<td>0.994945</td>
<td>-0.003805</td>
</tr>
<tr>
<th>4</th>
<td>1632122700</td>
<td>1</td>
<td>0.070789</td>
<td>0.993706</td>
<td>0.983872</td>
<td>0.973049</td>
<td>0.961335</td>
<td>0.157436</td>
<td>0.979123</td>
<td>-0.003800</td>
</tr>
<tr>
<th>...</th>
<td>...</td>
<td>...</td>
<td>...</td>
<td>...</td>
<td>...</td>
<td>...</td>
<td>...</td>
<td>...</td>
<td>...</td>
<td>...</td>
</tr>
<tr>
<th>995</th>
<td>1632182160</td>
<td>1</td>
<td>0.041168</td>
<td>0.137480</td>
<td>0.120737</td>
<td>0.139811</td>
<td>0.108727</td>
<td>0.024286</td>
<td>0.119639</td>
<td>0.002726</td>
</tr>
<tr>
<th>996</th>
<td>1632182220</td>
<td>1</td>
<td>0.044107</td>
<td>0.110536</td>
<td>0.096768</td>
<td>0.127257</td>
<td>0.095556</td>
<td>0.037983</td>
<td>0.100660</td>
<td>0.002726</td>
</tr>
<tr>
<th>997</th>
<td>1632182280</td>
<td>1</td>
<td>0.042740</td>
<td>0.095364</td>
<td>0.081497</td>
<td>0.113101</td>
<td>0.084419</td>
<td>0.029294</td>
<td>0.088616</td>
<td>0.002726</td>
</tr>
<tr>
<th>998</th>
<td>1632182340</td>
<td>1</td>
<td>0.113153</td>
<td>0.088466</td>
<td>0.134569</td>
<td>0.119988</td>
<td>0.148659</td>
<td>0.070517</td>
<td>0.119731</td>
<td>0.002726</td>
</tr>
<tr>
<th>999</th>
<td>1632182400</td>
<td>1</td>
<td>0.067065</td>
<td>0.146344</td>
<td>0.136748</td>
<td>0.167287</td>
<td>0.143671</td>
<td>0.064771</td>
<td>0.145409</td>
<td>0.002726</td>
</tr>
</tbody>
</table>
<p>1000 rows × 10 columns</p>
</div>
```python
coin_label = []
for coin_num in range(14):
coin_label.append(my_coin_normalization[coin_num]["Target"].values)
my_coin_normalization[coin_num].drop("Target",axis =1,inplace = True)
```
```python
print(my_coin_normalization[13])
# print(coin_label[13])
```
timestamp Asset_ID Count Open High Low Close \
0 1632122460 13 0.133915 0.648601 0.612721 0.691061 0.609594
1 1632122520 13 0.058229 0.629885 0.609719 0.699441 0.619055
2 1632122580 13 0.050138 0.643549 0.628304 0.706145 0.645244
3 1632122640 13 0.039174 0.662342 0.628304 0.719553 0.646493
4 1632122700 13 0.059719 0.661721 0.627843 0.700894 0.625780
.. ... ... ... ... ... ... ...
995 1632182160 13 0.042687 0.184422 0.147755 0.234637 0.151890
996 1632182220 13 0.096232 0.149415 0.112548 0.199106 0.111352
997 1632182280 13 0.068554 0.117510 0.083112 0.192179 0.110928
998 1632182340 13 0.064935 0.127597 0.141983 0.205587 0.163668
999 1632182400 13 0.032361 0.179286 0.150872 0.250279 0.166284
Volume VWAP
0 0.110543 0.653364
1 0.039387 0.662393
2 0.038228 0.678953
3 0.043761 0.682443
4 0.043714 0.672249
.. ... ...
995 0.045506 0.185419
996 0.090510 0.144249
997 0.066049 0.126235
998 0.057388 0.168112
999 0.037557 0.190998
[1000 rows x 9 columns]
# Time Series
```python
my_coin_gen = []
for coin_num in range(14):
features = []
features_num =0
for col in my_coin_normalization[coin_num].columns:
if col == "timestamp" or col == "Asset_ID":
continue
else:
feature_values = my_coin_normalization[coin_num][col].values
feature_reshape = feature_values.reshape(len(feature_values),1)
features.append(feature_reshape)
features_num += 1
train_gen = np.hstack((features[0],features[1],features[2],features[3],features[4],features[5],features[6]))
my_coin_gen.append(train_gen)
```
```python
print(my_coin_gen[13])
print(my_coin_gen[13].shape)
```
[[0.13391527 0.64860088 0.61272077 ... 0.609594 0.11054304 0.65336369]
[0.05822866 0.62988527 0.6097195 ... 0.61905462 0.03938736 0.66239312]
[0.05013839 0.6435487 0.62830428 ... 0.64524369 0.03822792 0.67895314]
...
[0.0685544 0.11750965 0.08311209 ... 0.11092775 0.06604855 0.1262354 ]
[0.06493506 0.12759701 0.14198315 ... 0.1636676 0.0573879 0.16811239]
[0.03236108 0.17928609 0.15087152 ... 0.16628425 0.03755723 0.19099832]]
(1000, 7)
```python
my_coin_lable_gen = []
for coin_num in range(14):
my_coin_lable_gen.append(coin_label[coin_num].reshape(len(coin_label[coin_num]),1))
```
```python
from keras.preprocessing.sequence import TimeseriesGenerator
history_input = 15
stride_num = 1
coin_TimeGenertors = []
for coin_num in range(14):
coin_TimeGenertors.append(TimeseriesGenerator(my_coin_gen[coin_num],my_coin_lable_gen[coin_num] ,stride = stride_num ,length=history_input, batch_size = 256))
```
# Create model
```python
from keras.models import Sequential
from keras.layers import LSTM, Dense, Dropout
def Multi_Step_LSTM_model():
# Use Keras sequential model
model = Sequential()
# First LSTM layer with Dropout regularisation; Set return_sequences to True to feed outputs to next layer
model.add(LSTM(units = 8, activation='relu', return_sequences = True, input_shape = (history_input, 7)))
model.add(Dropout(0.2))
# Second LSTM layer with Dropout regularisation; Set return_sequences to True to feed outputs to next layer
model.add(LSTM(units = 8, activation='relu', return_sequences = True))
model.add(Dropout(0.2))
# model.add(LSTM(units = 8, activation='relu', return_sequences = True))
# model.add(Dropout(0.2))
# Final LSTM layer with Dropout regularisation; Set return_sequences to False since now we will be predicting with the output layer
# model.add(LSTM(units = 50))
# model.add(Dropout(0.2))
# The output layer with linear activation to predict Open stock price
model.add(Dense(units=1, activation = "linear"))
return model
```
```python
models = []
for i in range(14):
model = Multi_Step_LSTM_model()
models.append(model)
```
2022-01-09 14:34:36.997466: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:937] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero
2022-01-09 14:34:37.103055: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:937] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero
2022-01-09 14:34:37.104427: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:937] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero
2022-01-09 14:34:37.106452: I tensorflow/core/platform/cpu_feature_guard.cc:142] This TensorFlow binary is optimized with oneAPI Deep Neural Network Library (oneDNN) to use the following CPU instructions in performance-critical operations: AVX2 AVX512F FMA
To enable them in other operations, rebuild TensorFlow with the appropriate compiler flags.
2022-01-09 14:34:37.107315: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:937] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero
2022-01-09 14:34:37.108368: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:937] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero
2022-01-09 14:34:37.109390: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:937] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero
2022-01-09 14:34:38.779257: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:937] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero
2022-01-09 14:34:38.780025: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:937] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero
2022-01-09 14:34:38.780712: I tensorflow/stream_executor/cuda/cuda_gpu_executor.cc:937] successful NUMA node read from SysFS had negative value (-1), but there must be at least one NUMA node, so returning NUMA node zero
2022-01-09 14:34:38.781297: I tensorflow/core/common_runtime/gpu/gpu_device.cc:1510] Created device /job:localhost/replica:0/task:0/device:GPU:0 with 15403 MB memory: -> device: 0, name: Tesla P100-PCIE-16GB, pci bus id: 0000:00:04.0, compute capability: 6.0
```python
models[0].summary()
```
Model: "sequential"
_________________________________________________________________
Layer (type) Output Shape Param #
=================================================================
lstm (LSTM) (None, 15, 8) 512
_________________________________________________________________
dropout (Dropout) (None, 15, 8) 0
_________________________________________________________________
lstm_1 (LSTM) (None, 15, 8) 544
_________________________________________________________________
dropout_1 (Dropout) (None, 15, 8) 0
_________________________________________________________________
dense (Dense) (None, 15, 1) 9
=================================================================
Total params: 1,065
Trainable params: 1,065
Non-trainable params: 0
_________________________________________________________________
```python
for i in range(14):
models[i].compile(optimizer='adam', loss='mean_squared_error', metrics = ['accuracy'])
```
```python
for coin_num in range(14):
models[coin_num].fit_generator(generator=coin_TimeGenertors[coin_num], steps_per_epoch=len(coin_TimeGenertors[coin_num]), epochs=20, verbose=2)
```
/opt/conda/lib/python3.7/site-packages/keras/engine/training.py:1972: UserWarning: `Model.fit_generator` is deprecated and will be removed in a future version. Please use `Model.fit`, which supports generators.
warnings.warn('`Model.fit_generator` is deprecated and '
2022-01-09 14:34:41.176043: I tensorflow/compiler/mlir/mlir_graph_optimization_pass.cc:185] None of the MLIR Optimization Passes are enabled (registered 2)
Epoch 1/20
4/4 - 3s - loss: 9.6645e-05 - accuracy: 0.0000e+00
Epoch 2/20
4/4 - 0s - loss: 9.1382e-05 - accuracy: 0.0000e+00
Epoch 3/20
4/4 - 0s - loss: 7.2489e-05 - accuracy: 0.0000e+00
Epoch 4/20
4/4 - 0s - loss: 6.2265e-05 - accuracy: 0.0000e+00
Epoch 5/20
4/4 - 0s - loss: 6.2114e-05 - accuracy: 0.0000e+00
Epoch 6/20
4/4 - 0s - loss: 5.6115e-05 - accuracy: 0.0000e+00
Epoch 7/20
4/4 - 0s - loss: 5.2822e-05 - accuracy: 0.0000e+00
Epoch 8/20
4/4 - 0s - loss: 5.3044e-05 - accuracy: 0.0000e+00
Epoch 9/20
4/4 - 0s - loss: 5.2327e-05 - accuracy: 0.0000e+00
Epoch 10/20
4/4 - 0s - loss: 5.1832e-05 - accuracy: 0.0000e+00
Epoch 11/20
4/4 - 0s - loss: 5.1590e-05 - accuracy: 0.0000e+00
Epoch 12/20
4/4 - 0s - loss: 5.1863e-05 - accuracy: 0.0000e+00
Epoch 13/20
4/4 - 0s - loss: 5.1913e-05 - accuracy: 0.0000e+00
Epoch 14/20
4/4 - 0s - loss: 5.1400e-05 - accuracy: 0.0000e+00
Epoch 15/20
4/4 - 0s - loss: 5.1660e-05 - accuracy: 0.0000e+00
Epoch 16/20
4/4 - 0s - loss: 5.1334e-05 - accuracy: 0.0000e+00
Epoch 17/20
4/4 - 0s - loss: 5.1436e-05 - accuracy: 0.0000e+00
Epoch 18/20
4/4 - 0s - loss: 5.1298e-05 - accuracy: 0.0000e+00
Epoch 19/20
4/4 - 0s - loss: 5.1412e-05 - accuracy: 0.0000e+00
Epoch 20/20
4/4 - 0s - loss: 5.1155e-05 - accuracy: 0.0000e+00
Epoch 1/20
4/4 - 2s - loss: 7.6615e-04 - accuracy: 0.0000e+00
Epoch 2/20
4/4 - 0s - loss: 5.2659e-04 - accuracy: 0.0000e+00
Epoch 3/20
4/4 - 0s - loss: 3.7112e-04 - accuracy: 0.0000e+00
Epoch 4/20
4/4 - 0s - loss: 3.2806e-04 - accuracy: 0.0000e+00
Epoch 5/20
4/4 - 0s - loss: 2.4283e-04 - accuracy: 0.0000e+00
Epoch 6/20
4/4 - 0s - loss: 1.9969e-04 - accuracy: 0.0000e+00
Epoch 7/20
4/4 - 0s - loss: 1.6253e-04 - accuracy: 0.0000e+00
Epoch 8/20
4/4 - 0s - loss: 1.2658e-04 - accuracy: 0.0000e+00
Epoch 9/20
4/4 - 0s - loss: 1.1240e-04 - accuracy: 0.0000e+00
Epoch 10/20
4/4 - 0s - loss: 9.5016e-05 - accuracy: 0.0000e+00
Epoch 11/20
4/4 - 0s - loss: 8.2912e-05 - accuracy: 0.0000e+00
Epoch 12/20
4/4 - 0s - loss: 7.3119e-05 - accuracy: 0.0000e+00
Epoch 13/20
4/4 - 0s - loss: 6.5718e-05 - accuracy: 0.0000e+00
Epoch 14/20
4/4 - 0s - loss: 5.5561e-05 - accuracy: 0.0000e+00
Epoch 15/20
4/4 - 0s - loss: 5.2642e-05 - accuracy: 0.0000e+00
Epoch 16/20
4/4 - 0s - loss: 4.9017e-05 - accuracy: 0.0000e+00
Epoch 17/20
4/4 - 0s - loss: 4.1369e-05 - accuracy: 0.0000e+00
Epoch 18/20
4/4 - 0s - loss: 3.9274e-05 - accuracy: 0.0000e+00
Epoch 19/20
4/4 - 0s - loss: 3.5713e-05 - accuracy: 0.0000e+00
Epoch 20/20
4/4 - 0s - loss: 3.3672e-05 - accuracy: 0.0000e+00
Epoch 1/20
4/4 - 2s - loss: 0.0544 - accuracy: 0.0000e+00
Epoch 2/20
4/4 - 0s - loss: 0.0360 - accuracy: 0.0000e+00
Epoch 3/20
4/4 - 0s - loss: 0.0280 - accuracy: 0.0000e+00
Epoch 4/20
4/4 - 0s - loss: 0.0210 - accuracy: 0.0000e+00
Epoch 5/20
4/4 - 0s - loss: 0.0145 - accuracy: 0.0000e+00
Epoch 6/20
4/4 - 0s - loss: 0.0118 - accuracy: 0.0000e+00
Epoch 7/20
4/4 - 0s - loss: 0.0083 - accuracy: 0.0000e+00
Epoch 8/20
4/4 - 0s - loss: 0.0066 - accuracy: 0.0000e+00
Epoch 9/20
4/4 - 0s - loss: 0.0055 - accuracy: 0.0000e+00
Epoch 10/20
4/4 - 0s - loss: 0.0044 - accuracy: 0.0000e+00
Epoch 11/20
4/4 - 0s - loss: 0.0043 - accuracy: 0.0000e+00
Epoch 12/20
4/4 - 0s - loss: 0.0038 - accuracy: 0.0000e+00
Epoch 13/20
4/4 - 0s - loss: 0.0035 - accuracy: 0.0000e+00
Epoch 14/20
4/4 - 0s - loss: 0.0030 - accuracy: 0.0000e+00
Epoch 15/20
4/4 - 0s - loss: 0.0029 - accuracy: 0.0000e+00
Epoch 16/20
4/4 - 0s - loss: 0.0028 - accuracy: 0.0000e+00
Epoch 17/20
4/4 - 0s - loss: 0.0026 - accuracy: 0.0000e+00
Epoch 18/20
4/4 - 0s - loss: 0.0025 - accuracy: 0.0000e+00
Epoch 19/20
4/4 - 0s - loss: 0.0025 - accuracy: 0.0000e+00
Epoch 20/20
4/4 - 0s - loss: 0.0025 - accuracy: 0.0000e+00
Epoch 1/20
4/4 - 2s - loss: 0.0277 - accuracy: 0.0000e+00
Epoch 2/20
4/4 - 0s - loss: 0.0162 - accuracy: 0.0000e+00
Epoch 3/20
4/4 - 0s - loss: 0.0115 - accuracy: 0.0000e+00
Epoch 4/20
4/4 - 0s - loss: 0.0068 - accuracy: 0.0000e+00
Epoch 5/20
4/4 - 0s - loss: 0.0043 - accuracy: 0.0000e+00
Epoch 6/20
4/4 - 0s - loss: 0.0031 - accuracy: 0.0000e+00
Epoch 7/20
4/4 - 0s - loss: 0.0022 - accuracy: 0.0000e+00
Epoch 8/20
4/4 - 0s - loss: 0.0018 - accuracy: 0.0000e+00
Epoch 9/20
4/4 - 0s - loss: 0.0017 - accuracy: 0.0000e+00
Epoch 10/20
4/4 - 0s - loss: 0.0016 - accuracy: 0.0000e+00
Epoch 11/20
4/4 - 0s - loss: 0.0015 - accuracy: 0.0000e+00
Epoch 12/20
4/4 - 0s - loss: 0.0013 - accuracy: 0.0000e+00
Epoch 13/20
4/4 - 0s - loss: 0.0012 - accuracy: 0.0000e+00
Epoch 14/20
4/4 - 0s - loss: 0.0011 - accuracy: 0.0000e+00
Epoch 15/20
4/4 - 0s - loss: 9.9078e-04 - accuracy: 0.0000e+00
Epoch 16/20
4/4 - 0s - loss: 8.9496e-04 - accuracy: 0.0000e+00
Epoch 17/20
4/4 - 0s - loss: 8.2581e-04 - accuracy: 0.0000e+00
Epoch 18/20
4/4 - 0s - loss: 7.5958e-04 - accuracy: 0.0000e+00
Epoch 19/20
4/4 - 0s - loss: 7.1926e-04 - accuracy: 0.0000e+00
Epoch 20/20
4/4 - 0s - loss: 6.6356e-04 - accuracy: 0.0000e+00
Epoch 1/20
4/4 - 2s - loss: 0.0298 - accuracy: 0.0000e+00
Epoch 2/20
4/4 - 0s - loss: 0.0163 - accuracy: 0.0000e+00
Epoch 3/20
4/4 - 0s - loss: 0.0096 - accuracy: 0.0000e+00
Epoch 4/20
4/4 - 0s - loss: 0.0047 - accuracy: 0.0000e+00
Epoch 5/20
4/4 - 0s - loss: 0.0023 - accuracy: 0.0000e+00
Epoch 6/20
4/4 - 0s - loss: 0.0025 - accuracy: 0.0000e+00
Epoch 7/20
4/4 - 0s - loss: 0.0024 - accuracy: 0.0000e+00
Epoch 8/20
4/4 - 0s - loss: 0.0019 - accuracy: 0.0000e+00
Epoch 9/20
4/4 - 0s - loss: 0.0015 - accuracy: 0.0000e+00
Epoch 10/20
4/4 - 0s - loss: 0.0013 - accuracy: 0.0000e+00
Epoch 11/20
4/4 - 0s - loss: 0.0012 - accuracy: 0.0000e+00
Epoch 12/20
4/4 - 0s - loss: 0.0011 - accuracy: 0.0000e+00
Epoch 13/20
4/4 - 0s - loss: 0.0010 - accuracy: 0.0000e+00
Epoch 14/20
4/4 - 0s - loss: 9.6796e-04 - accuracy: 0.0000e+00
Epoch 15/20
4/4 - 0s - loss: 8.9257e-04 - accuracy: 0.0000e+00
Epoch 16/20
4/4 - 0s - loss: 8.7007e-04 - accuracy: 0.0000e+00
Epoch 17/20
4/4 - 0s - loss: 8.0279e-04 - accuracy: 0.0000e+00
Epoch 18/20
4/4 - 0s - loss: 7.7600e-04 - accuracy: 0.0000e+00
Epoch 19/20
4/4 - 0s - loss: 7.3271e-04 - accuracy: 0.0000e+00
Epoch 20/20
4/4 - 0s - loss: 7.1703e-04 - accuracy: 0.0000e+00
Epoch 1/20
4/4 - 2s - loss: 6.0801e-04 - accuracy: 0.0000e+00
Epoch 2/20
4/4 - 0s - loss: 3.1252e-04 - accuracy: 0.0000e+00
Epoch 3/20
4/4 - 0s - loss: 2.0697e-04 - accuracy: 0.0000e+00
Epoch 4/20
4/4 - 0s - loss: 1.7094e-04 - accuracy: 0.0000e+00
Epoch 5/20
4/4 - 0s - loss: 1.5408e-04 - accuracy: 0.0000e+00
Epoch 6/20
4/4 - 0s - loss: 1.2951e-04 - accuracy: 0.0000e+00
Epoch 7/20
4/4 - 0s - loss: 1.0615e-04 - accuracy: 0.0000e+00
Epoch 8/20
4/4 - 0s - loss: 8.5775e-05 - accuracy: 0.0000e+00
Epoch 9/20
4/4 - 0s - loss: 7.2408e-05 - accuracy: 0.0000e+00
Epoch 10/20
4/4 - 0s - loss: 6.5711e-05 - accuracy: 0.0000e+00
Epoch 11/20
4/4 - 0s - loss: 5.7493e-05 - accuracy: 0.0000e+00
Epoch 12/20
4/4 - 0s - loss: 5.5058e-05 - accuracy: 0.0000e+00
Epoch 13/20
4/4 - 0s - loss: 4.6053e-05 - accuracy: 0.0000e+00
Epoch 14/20
4/4 - 0s - loss: 4.2896e-05 - accuracy: 0.0000e+00
Epoch 15/20
4/4 - 0s - loss: 3.8940e-05 - accuracy: 0.0000e+00
Epoch 16/20
4/4 - 0s - loss: 3.4962e-05 - accuracy: 0.0000e+00
Epoch 17/20
4/4 - 0s - loss: 3.0754e-05 - accuracy: 0.0000e+00
Epoch 18/20
4/4 - 0s - loss: 2.8841e-05 - accuracy: 0.0000e+00
Epoch 19/20
4/4 - 0s - loss: 2.5566e-05 - accuracy: 0.0000e+00
Epoch 20/20
4/4 - 0s - loss: 2.4236e-05 - accuracy: 0.0000e+00
Epoch 1/20
4/4 - 2s - loss: 2.9999e-04 - accuracy: 0.0000e+00
Epoch 2/20
4/4 - 0s - loss: 1.4931e-04 - accuracy: 0.0000e+00
Epoch 3/20
4/4 - 0s - loss: 1.0390e-04 - accuracy: 0.0000e+00
Epoch 4/20
4/4 - 0s - loss: 7.6023e-05 - accuracy: 0.0000e+00
Epoch 5/20
4/4 - 0s - loss: 5.3063e-05 - accuracy: 0.0000e+00
Epoch 6/20
4/4 - 0s - loss: 4.3499e-05 - accuracy: 0.0000e+00
Epoch 7/20
4/4 - 0s - loss: 3.3154e-05 - accuracy: 0.0000e+00
Epoch 8/20
4/4 - 0s - loss: 2.6799e-05 - accuracy: 0.0000e+00
Epoch 9/20
4/4 - 0s - loss: 2.1969e-05 - accuracy: 0.0000e+00
Epoch 10/20
4/4 - 0s - loss: 1.9047e-05 - accuracy: 0.0000e+00
Epoch 11/20
4/4 - 0s - loss: 1.7068e-05 - accuracy: 0.0000e+00
Epoch 12/20
4/4 - 0s - loss: 1.4458e-05 - accuracy: 0.0000e+00
Epoch 13/20
4/4 - 0s - loss: 1.2832e-05 - accuracy: 0.0000e+00
Epoch 14/20
4/4 - 0s - loss: 1.2165e-05 - accuracy: 0.0000e+00
Epoch 15/20
4/4 - 0s - loss: 1.1053e-05 - accuracy: 0.0000e+00
Epoch 16/20
4/4 - 0s - loss: 1.0835e-05 - accuracy: 0.0000e+00
Epoch 17/20
4/4 - 0s - loss: 1.0309e-05 - accuracy: 0.0000e+00
Epoch 18/20
4/4 - 0s - loss: 9.2550e-06 - accuracy: 0.0000e+00
Epoch 19/20
4/4 - 0s - loss: 8.7850e-06 - accuracy: 0.0000e+00
Epoch 20/20
4/4 - 0s - loss: 8.6509e-06 - accuracy: 0.0000e+00
Epoch 1/20
4/4 - 2s - loss: 0.0028 - accuracy: 0.0000e+00
Epoch 2/20
4/4 - 0s - loss: 0.0015 - accuracy: 0.0000e+00
Epoch 3/20
4/4 - 0s - loss: 9.0509e-04 - accuracy: 0.0000e+00
Epoch 4/20
4/4 - 0s - loss: 4.8791e-04 - accuracy: 0.0000e+00
Epoch 5/20
4/4 - 0s - loss: 4.2567e-04 - accuracy: 0.0000e+00
Epoch 6/20
4/4 - 0s - loss: 4.4432e-04 - accuracy: 0.0000e+00
Epoch 7/20
4/4 - 0s - loss: 4.0930e-04 - accuracy: 0.0000e+00
Epoch 8/20
4/4 - 0s - loss: 3.5255e-04 - accuracy: 0.0000e+00
Epoch 9/20
4/4 - 0s - loss: 2.8094e-04 - accuracy: 0.0000e+00
Epoch 10/20
4/4 - 0s - loss: 2.2270e-04 - accuracy: 0.0000e+00
Epoch 11/20
4/4 - 0s - loss: 1.9135e-04 - accuracy: 0.0000e+00
Epoch 12/20
4/4 - 0s - loss: 1.6882e-04 - accuracy: 0.0000e+00
Epoch 13/20
4/4 - 0s - loss: 1.5760e-04 - accuracy: 0.0000e+00
Epoch 14/20
4/4 - 0s - loss: 1.4090e-04 - accuracy: 0.0000e+00
Epoch 15/20
4/4 - 0s - loss: 1.1915e-04 - accuracy: 0.0000e+00
Epoch 16/20
4/4 - 0s - loss: 1.0343e-04 - accuracy: 0.0000e+00
Epoch 17/20
4/4 - 0s - loss: 9.5516e-05 - accuracy: 0.0000e+00
Epoch 18/20
4/4 - 0s - loss: 7.9004e-05 - accuracy: 0.0000e+00
Epoch 19/20
4/4 - 0s - loss: 7.3961e-05 - accuracy: 0.0000e+00
Epoch 20/20
4/4 - 0s - loss: 6.3367e-05 - accuracy: 0.0000e+00
Epoch 1/20
4/4 - 2s - loss: 0.0296 - accuracy: 0.0000e+00
Epoch 2/20
4/4 - 0s - loss: 0.0159 - accuracy: 0.0000e+00
Epoch 3/20
4/4 - 0s - loss: 0.0098 - accuracy: 0.0000e+00
Epoch 4/20
4/4 - 0s - loss: 0.0041 - accuracy: 0.0000e+00
Epoch 5/20
4/4 - 0s - loss: 0.0027 - accuracy: 0.0000e+00
Epoch 6/20
4/4 - 0s - loss: 0.0019 - accuracy: 0.0000e+00
Epoch 7/20
4/4 - 0s - loss: 0.0015 - accuracy: 0.0000e+00
Epoch 8/20
4/4 - 0s - loss: 0.0015 - accuracy: 0.0000e+00
Epoch 9/20
4/4 - 0s - loss: 0.0014 - accuracy: 0.0000e+00
Epoch 10/20
4/4 - 0s - loss: 0.0013 - accuracy: 0.0000e+00
Epoch 11/20
4/4 - 0s - loss: 0.0013 - accuracy: 0.0000e+00
Epoch 12/20
4/4 - 0s - loss: 0.0012 - accuracy: 0.0000e+00
Epoch 13/20
4/4 - 0s - loss: 0.0011 - accuracy: 0.0000e+00
Epoch 14/20
4/4 - 0s - loss: 0.0011 - accuracy: 0.0000e+00
Epoch 15/20
4/4 - 0s - loss: 0.0010 - accuracy: 0.0000e+00
Epoch 16/20
4/4 - 0s - loss: 9.5718e-04 - accuracy: 0.0000e+00
Epoch 17/20
4/4 - 0s - loss: 9.4786e-04 - accuracy: 0.0000e+00
Epoch 18/20
4/4 - 0s - loss: 8.9199e-04 - accuracy: 0.0000e+00
Epoch 19/20
4/4 - 0s - loss: 8.5520e-04 - accuracy: 0.0000e+00
Epoch 20/20
4/4 - 0s - loss: 8.7562e-04 - accuracy: 0.0000e+00
Epoch 1/20
4/4 - 2s - loss: 0.0013 - accuracy: 0.0000e+00
Epoch 2/20
4/4 - 0s - loss: 6.1693e-04 - accuracy: 0.0000e+00
Epoch 3/20
4/4 - 0s - loss: 1.5592e-04 - accuracy: 0.0000e+00
Epoch 4/20
4/4 - 0s - loss: 1.3052e-04 - accuracy: 0.0000e+00
Epoch 5/20
4/4 - 0s - loss: 1.4514e-04 - accuracy: 0.0000e+00
Epoch 6/20
4/4 - 0s - loss: 1.4862e-04 - accuracy: 0.0000e+00
Epoch 7/20
4/4 - 0s - loss: 1.3109e-04 - accuracy: 0.0000e+00
Epoch 8/20
4/4 - 0s - loss: 9.9758e-05 - accuracy: 0.0000e+00
Epoch 9/20
4/4 - 0s - loss: 7.2905e-05 - accuracy: 0.0000e+00
Epoch 10/20
4/4 - 0s - loss: 6.2499e-05 - accuracy: 0.0000e+00
Epoch 11/20
4/4 - 0s - loss: 5.5029e-05 - accuracy: 0.0000e+00
Epoch 12/20
4/4 - 0s - loss: 5.1103e-05 - accuracy: 0.0000e+00
Epoch 13/20
4/4 - 0s - loss: 4.5855e-05 - accuracy: 0.0000e+00
Epoch 14/20
4/4 - 0s - loss: 4.1874e-05 - accuracy: 0.0000e+00
Epoch 15/20
4/4 - 0s - loss: 3.9078e-05 - accuracy: 0.0000e+00
Epoch 16/20
4/4 - 0s - loss: 3.4593e-05 - accuracy: 0.0000e+00
Epoch 17/20
4/4 - 0s - loss: 3.3013e-05 - accuracy: 0.0000e+00
Epoch 18/20
4/4 - 0s - loss: 3.0751e-05 - accuracy: 0.0000e+00
Epoch 19/20
4/4 - 0s - loss: 2.9921e-05 - accuracy: 0.0000e+00
Epoch 20/20
4/4 - 0s - loss: 2.8166e-05 - accuracy: 0.0000e+00
Epoch 1/20
4/4 - 2s - loss: 0.0216 - accuracy: 0.0000e+00
Epoch 2/20
4/4 - 0s - loss: 0.0137 - accuracy: 0.0000e+00
Epoch 3/20
4/4 - 0s - loss: 0.0078 - accuracy: 0.0000e+00
Epoch 4/20
4/4 - 0s - loss: 0.0052 - accuracy: 0.0000e+00
Epoch 5/20
4/4 - 0s - loss: 0.0044 - accuracy: 0.0000e+00
Epoch 6/20
4/4 - 0s - loss: 0.0040 - accuracy: 0.0000e+00
Epoch 7/20
4/4 - 0s - loss: 0.0039 - accuracy: 0.0000e+00
Epoch 8/20
4/4 - 0s - loss: 0.0035 - accuracy: 0.0000e+00
Epoch 9/20
4/4 - 0s - loss: 0.0030 - accuracy: 0.0000e+00
Epoch 10/20
4/4 - 0s - loss: 0.0026 - accuracy: 0.0000e+00
Epoch 11/20
4/4 - 0s - loss: 0.0023 - accuracy: 0.0000e+00
Epoch 12/20
4/4 - 0s - loss: 0.0020 - accuracy: 0.0000e+00
Epoch 13/20
4/4 - 0s - loss: 0.0019 - accuracy: 0.0000e+00
Epoch 14/20
4/4 - 0s - loss: 0.0018 - accuracy: 0.0000e+00
Epoch 15/20
4/4 - 0s - loss: 0.0016 - accuracy: 0.0000e+00
Epoch 16/20
4/4 - 0s - loss: 0.0015 - accuracy: 0.0000e+00
Epoch 17/20
4/4 - 0s - loss: 0.0013 - accuracy: 0.0000e+00
Epoch 18/20
4/4 - 0s - loss: 0.0013 - accuracy: 0.0000e+00
Epoch 19/20
4/4 - 0s - loss: 0.0012 - accuracy: 0.0000e+00
Epoch 20/20
4/4 - 0s - loss: 0.0011 - accuracy: 0.0000e+00
Epoch 1/20
4/4 - 2s - loss: 0.0060 - accuracy: 0.0000e+00
Epoch 2/20
4/4 - 0s - loss: 0.0031 - accuracy: 0.0000e+00
Epoch 3/20
4/4 - 0s - loss: 0.0020 - accuracy: 0.0000e+00
Epoch 4/20
4/4 - 0s - loss: 0.0020 - accuracy: 0.0000e+00
Epoch 5/20
4/4 - 0s - loss: 0.0018 - accuracy: 0.0000e+00
Epoch 6/20
4/4 - 0s - loss: 0.0014 - accuracy: 0.0000e+00
Epoch 7/20
4/4 - 0s - loss: 0.0013 - accuracy: 0.0000e+00
Epoch 8/20
4/4 - 0s - loss: 0.0011 - accuracy: 0.0000e+00
Epoch 9/20
4/4 - 0s - loss: 9.8826e-04 - accuracy: 0.0000e+00
Epoch 10/20
4/4 - 0s - loss: 8.4238e-04 - accuracy: 0.0000e+00
Epoch 11/20
4/4 - 0s - loss: 8.2500e-04 - accuracy: 0.0000e+00
Epoch 12/20
4/4 - 0s - loss: 7.3410e-04 - accuracy: 0.0000e+00
Epoch 13/20
4/4 - 0s - loss: 6.9664e-04 - accuracy: 0.0000e+00
Epoch 14/20
4/4 - 0s - loss: 6.2974e-04 - accuracy: 0.0000e+00
Epoch 15/20
4/4 - 0s - loss: 5.6441e-04 - accuracy: 0.0000e+00
Epoch 16/20
4/4 - 0s - loss: 5.4568e-04 - accuracy: 0.0000e+00
Epoch 17/20
4/4 - 0s - loss: 5.0780e-04 - accuracy: 0.0000e+00
Epoch 18/20
4/4 - 0s - loss: 4.5471e-04 - accuracy: 0.0000e+00
Epoch 19/20
4/4 - 0s - loss: 4.3799e-04 - accuracy: 0.0000e+00
Epoch 20/20
4/4 - 0s - loss: 4.0894e-04 - accuracy: 0.0000e+00
Epoch 1/20
4/4 - 2s - loss: 0.0013 - accuracy: 0.0000e+00
Epoch 2/20
4/4 - 0s - loss: 4.3843e-04 - accuracy: 0.0000e+00
Epoch 3/20
4/4 - 0s - loss: 1.1715e-04 - accuracy: 0.0000e+00
Epoch 4/20
4/4 - 0s - loss: 1.0963e-04 - accuracy: 0.0000e+00
Epoch 5/20
4/4 - 0s - loss: 1.4022e-04 - accuracy: 0.0000e+00
Epoch 6/20
4/4 - 0s - loss: 1.4037e-04 - accuracy: 0.0000e+00
Epoch 7/20
4/4 - 0s - loss: 1.1221e-04 - accuracy: 0.0000e+00
Epoch 8/20
4/4 - 0s - loss: 8.2682e-05 - accuracy: 0.0000e+00
Epoch 9/20
4/4 - 0s - loss: 6.3215e-05 - accuracy: 0.0000e+00
Epoch 10/20
4/4 - 0s - loss: 5.7308e-05 - accuracy: 0.0000e+00
Epoch 11/20
4/4 - 0s - loss: 5.0844e-05 - accuracy: 0.0000e+00
Epoch 12/20
4/4 - 0s - loss: 4.3160e-05 - accuracy: 0.0000e+00
Epoch 13/20
4/4 - 0s - loss: 3.5902e-05 - accuracy: 0.0000e+00
Epoch 14/20
4/4 - 0s - loss: 3.0075e-05 - accuracy: 0.0000e+00
Epoch 15/20
4/4 - 0s - loss: 2.6361e-05 - accuracy: 0.0000e+00
Epoch 16/20
4/4 - 0s - loss: 2.3408e-05 - accuracy: 0.0000e+00
Epoch 17/20
4/4 - 0s - loss: 2.1033e-05 - accuracy: 0.0000e+00
Epoch 18/20
4/4 - 0s - loss: 1.8757e-05 - accuracy: 0.0000e+00
Epoch 19/20
4/4 - 0s - loss: 1.7221e-05 - accuracy: 0.0000e+00
Epoch 20/20
4/4 - 0s - loss: 1.5817e-05 - accuracy: 0.0000e+00
Epoch 1/20
4/4 - 2s - loss: 5.2030e-04 - accuracy: 0.0000e+00
Epoch 2/20
4/4 - 0s - loss: 2.1093e-04 - accuracy: 0.0000e+00
Epoch 3/20
4/4 - 0s - loss: 1.6922e-04 - accuracy: 0.0000e+00
Epoch 4/20
4/4 - 0s - loss: 1.6950e-04 - accuracy: 0.0000e+00
Epoch 5/20
4/4 - 0s - loss: 1.7261e-04 - accuracy: 0.0000e+00
Epoch 6/20
4/4 - 0s - loss: 1.5618e-04 - accuracy: 0.0000e+00
Epoch 7/20
4/4 - 0s - loss: 1.4116e-04 - accuracy: 0.0000e+00
Epoch 8/20
4/4 - 0s - loss: 1.2925e-04 - accuracy: 0.0000e+00
Epoch 9/20
4/4 - 0s - loss: 1.2336e-04 - accuracy: 0.0000e+00
Epoch 10/20
4/4 - 0s - loss: 1.1743e-04 - accuracy: 0.0000e+00
Epoch 11/20
4/4 - 0s - loss: 1.1287e-04 - accuracy: 0.0000e+00
Epoch 12/20
4/4 - 0s - loss: 1.0642e-04 - accuracy: 0.0000e+00
Epoch 13/20
4/4 - 0s - loss: 1.0331e-04 - accuracy: 0.0000e+00
Epoch 14/20
4/4 - 0s - loss: 1.0104e-04 - accuracy: 0.0000e+00
Epoch 15/20
4/4 - 0s - loss: 1.0121e-04 - accuracy: 0.0000e+00
Epoch 16/20
4/4 - 0s - loss: 1.0054e-04 - accuracy: 0.0000e+00
Epoch 17/20
4/4 - 0s - loss: 9.6288e-05 - accuracy: 0.0000e+00
Epoch 18/20
4/4 - 0s - loss: 9.4999e-05 - accuracy: 0.0000e+00
Epoch 19/20
4/4 - 0s - loss: 9.5346e-05 - accuracy: 0.0000e+00
Epoch 20/20
4/4 - 0s - loss: 9.3039e-05 - accuracy: 0.0000e+00
# Predict
```python
import gresearch_crypto
env = gresearch_crypto.make_env()
iter_test = env.iter_test()
```
```python
coin_last_15days = []
for coin_num in range(14):
coin_last_15days.append(my_coin_normalization[coin_num][-15:])
```
```python
coin_last_15days[0]
```
<div>
<style scoped>
.dataframe tbody tr th:only-of-type {
vertical-align: middle;
}
.dataframe tbody tr th {
vertical-align: top;
}
.dataframe thead th {
text-align: right;
}
</style>
<table border="1" class="dataframe">
<thead>
<tr style="text-align: right;">
<th></th>
<th>timestamp</th>
<th>Asset_ID</th>
<th>Count</th>
<th>Open</th>
<th>High</th>
<th>Low</th>
<th>Close</th>
<th>Volume</th>
<th>VWAP</th>
</tr>
</thead>
<tbody>
<tr>
<th>985</th>
<td>1632181560</td>
<td>0</td>
<td>0.040508</td>
<td>0.130033</td>
<td>0.139683</td>
<td>0.204591</td>
<td>0.173342</td>
<td>0.034688</td>
<td>0.172549</td>
</tr>
<tr>
<th>986</th>
<td>1632181620</td>
<td>0</td>
<td>0.032269</td>
<td>0.174634</td>
<td>0.142857</td>
<td>0.238680</td>
<td>0.168077</td>
<td>0.028421</td>
<td>0.188933</td>
</tr>
<tr>
<th>987</th>
<td>1632181680</td>
<td>0</td>
<td>0.036560</td>
<td>0.167781</td>
<td>0.139683</td>
<td>0.221635</td>
<td>0.151429</td>
<td>0.038662</td>
<td>0.177352</td>
</tr>
<tr>
<th>988</th>
<td>1632181740</td>
<td>0</td>
<td>0.063680</td>
<td>0.149327</td>
<td>0.161905</td>
<td>0.223908</td>
<td>0.196062</td>
<td>0.041953</td>
<td>0.190003</td>
</tr>
<tr>
<th>989</th>
<td>1632181800</td>
<td>0</td>
<td>0.111054</td>
<td>0.192432</td>
<td>0.180952</td>
<td>0.255724</td>
<td>0.216727</td>
<td>0.070558</td>
<td>0.218481</td>
</tr>
<tr>
<th>990</th>
<td>1632181860</td>
<td>0</td>
<td>0.032956</td>
<td>0.215359</td>
<td>0.181714</td>
<td>0.278251</td>
<td>0.209514</td>
<td>0.061007</td>
<td>0.226600</td>
</tr>
<tr>
<th>991</th>
<td>1632181920</td>
<td>0</td>
<td>0.033814</td>
<td>0.210886</td>
<td>0.188857</td>
<td>0.275609</td>
<td>0.219923</td>
<td>0.042996</td>
<td>0.232338</td>
</tr>
<tr>
<th>992</th>
<td>1632181980</td>
<td>0</td>
<td>0.060590</td>
<td>0.221846</td>
<td>0.204984</td>
<td>0.286972</td>
<td>0.240451</td>
<td>0.075502</td>
<td>0.241243</td>
</tr>
<tr>
<th>993</th>
<td>1632182040</td>
<td>0</td>
<td>0.023344</td>
<td>0.240330</td>
<td>0.209524</td>
<td>0.298335</td>
<td>0.235962</td>
<td>0.029558</td>
<td>0.251019</td>
</tr>
<tr>
<th>994</th>
<td>1632182100</td>
<td>0</td>
<td>0.022142</td>
<td>0.235583</td>
<td>0.203175</td>
<td>0.298335</td>
<td>0.235810</td>
<td>0.018117</td>
<td>0.247489</td>
</tr>
<tr>
<th>995</th>
<td>1632182160</td>
<td>0</td>
<td>0.021456</td>
<td>0.235461</td>
<td>0.200000</td>
<td>0.264246</td>
<td>0.199501</td>
<td>0.034315</td>
<td>0.227997</td>
</tr>
<tr>
<th>996</th>
<td>1632182220</td>
<td>0</td>
<td>0.024717</td>
<td>0.199286</td>
<td>0.168254</td>
<td>0.247202</td>
<td>0.177649</td>
<td>0.025164</td>
<td>0.203653</td>
</tr>
<tr>
<th>997</th>
<td>1632182280</td>
<td>0</td>
<td>0.028665</td>
<td>0.178054</td>
<td>0.142857</td>
<td>0.235839</td>
<td>0.167240</td>
<td>0.036785</td>
<td>0.182657</td>
</tr>
<tr>
<th>998</th>
<td>1632182340</td>
<td>0</td>
<td>0.074837</td>
<td>0.171368</td>
<td>0.203175</td>
<td>0.238680</td>
<td>0.235916</td>
<td>0.059925</td>
<td>0.228204</td>
</tr>
<tr>
<th>999</th>
<td>1632182400</td>
<td>0</td>
<td>0.047545</td>
<td>0.235568</td>
<td>0.209524</td>
<td>0.292654</td>
<td>0.225051</td>
<td>0.046088</td>
<td>0.249292</td>
</tr>
</tbody>
</table>
</div>
```python
coin_ori_data_1day = []
for coin_num in range(14):
temp = my_coin[coin_num][-1:].drop(["Target"],axis =1)
coin_ori_data_1day.append(temp)
```
```python
del crypto_df
import gc
```
```python
first_flag = [True for _ in range(14)]
for (test_df, sample_prediction_df) in iter_test:
ans_list = []
if test_df.empty == False:
test_df = test_df.reset_index()
else:
#test_df is empty
continue
while test_df.empty == False:
cur_coin = test_df.iloc[0]
cur_coin_num = int(cur_coin["Asset_ID"])
cur_coin = cur_coin.to_frame().T
test_df = test_df.iloc[1:]
test_df.reset_index(drop = True,inplace =True)
if cur_coin_num <0 or cur_coin_num > 13:
ans_list.append(-0.003)
continue
# fill
temp_data = coin_ori_data_1day[cur_coin_num]
temp_data = pd.concat([temp_data,cur_coin],axis=0)
temp_data.interpolate(inplace = True)
cur_coin = temp_data.iloc[-1]
cur_coin = cur_coin.to_frame().T
coin_ori_data_1day[cur_coin_num] = cur_coin
# normalize
feature_num = 0
for col in cur_coin.columns:
if col == "timestamp" or col == "Asset_ID" or col == "row_id" or col == "group_num":
continue
else:
s_output = scaler_ar[cur_coin_num][feature_num].transform(cur_coin[col].values.reshape(-1,1))
cur_coin[col] = pd.Series(s_output.flatten())
num += 1
#concat
cur_coin = cur_coin[["timestamp","Asset_ID","Count","Open","High","Low","Close","Volume","VWAP"]]
test = pd.concat([coin_last_15days[cur_coin_num],cur_coin],axis=0)
#maintain last_15days
coin_last_15days[cur_coin_num] = test.iloc[1:]
#TimeSeries
features = []
features_num =0
for col in test.columns:
if col == "timestamp" or col == "Asset_ID":
continue
else:
feature_values = test[col].values
feature_reshape = feature_values.reshape(len(feature_values),1)
features.append(feature_reshape)
features_num += 1
test_gen = np.hstack((features[0],features[1],features[2],features[3],features[4],features[5],features[6]))
test_gen_new = TimeseriesGenerator(test_gen, np.zeros(len(test_gen)),stride = 1 ,length=15, batch_size = 1)
#predict
ans = models[cur_coin_num].predict(test_gen_new)
ans = ans[:,1]
ans_list.append(ans)
del temp_data
del feature_num
del features_num
del cur_coin
del cur_coin_num
del test
del features
del feature_values
del feature_reshape
del test_gen
del ans
gc.collect()
sample_prediction_df['Target'] = ans_list
env.predict(sample_prediction_df)
del ans_list
gc.collect()
```
This version of the API is not optimized and should not be used to estimate the runtime of your code on the hidden test set.