# Utilisation librairies pandas, numpy, sklearn
```
import pandas as pd
from sklearn.model_selection import train_test_split
from sklearn.preprocessing import StandardScaler
from keras.utils import to_categorical
class Data :
def __init__(self) :
self.scaler = StandardScaler()
self.train = pd.read_csv("data_train.txt", sep="\t")
self.test = pd.read_csv("data_test.txt", sep="\t")
def processing(self) :
list_y_train = np.asarray(self.train.iloc[:, :60])
list_choice_train = self.train.iloc[:, -1].values.tolist()
list_y_test = np.asarray(self.test.iloc[:, :60])
list_choice_test = self.test.iloc[:, -1].values.tolist()
list_y_train = self.scaler.fit_transform(list_y_train)
list_y_test = self.scaler.transform(list_y_test)
list_choice_train = pd.DataFrame({"choice" : list_choice_train})
list_choice_train = np.asarray(pd.get_dummies(list_choice_train))
list_choice_test = pd.DataFrame({"choice" : list_choice_test})
list_choice_test = np.asarray(pd.get_dummies(list_choice_test))
list_y_train = list_y_train.reshape((list_y_train.shape[0], list_y_train.shape[1], 1))
list_y_test = list_y_test.reshape((list_y_test.shape[0], list_y_test.shape[1], 1))
return list_y_train, list_y_test, list_choice_train, list_choice_test
def transfrom(self, x) :
return self.scaler.transform(x)
```
# Utilisation de Neural Network frameworks: TensorFlow, Kerras
```
from keras.models import Sequential
from keras.layers import Input, Conv1D, MaxPooling1D, Flatten, Activation, Dense
import tensorflow as tf
import keras
from tensorflow.keras import initializers
# make model
class Model :
def __init__(self, shape = 60, activation='softmax',kernel_size_con1 = 2, kernel_size_con2 = 9
, activation_conv = None, kernel_initializer = 'glorot_uniform', optimizer = 'adam'
, loss = 'categorical_crossentropy', monitor = 'loss',min_delta = 0, learning_rate = 0.01) :
# initialiser parametre
self.min_delta = min_delta
self.monitor = monitor
self.number_epoch = 0
if kernel_initializer == "glorot_uniform" :
self.kernel_initializer = initializers.glorot_uniform(seed=0)
if kernel_initializer == "random_normal" :
self.kernel_initializer = initializers.random_normal(stddev=0.1, seed=0)
# construct model
self.model = Sequential()
self.model.add(Input(shape=(shape, 1)))
self.model.add(Conv1D(filters = 6, kernel_size=kernel_size_con1, kernel_initializer=self.kernel_initializer))
self.model.add(MaxPooling1D(pool_size=2))
self.model.add(Conv1D(filters = 12, kernel_size=kernel_size_con2, kernel_initializer=self.kernel_initializer))
self.model.add(MaxPooling1D(pool_size=2))
self.model.add(Flatten())
self.model.add(Dense(units=6, activation=activation))
# self.model.add(Activation("relu"))
opt = keras.optimizers.Adam(learning_rate=learning_rate)
self.model.compile(optimizer=opt, loss="categorical_crossentropy", metrics=["accuracy"])
def fit(self, X, y) :
callback = tf.keras.callbacks.EarlyStopping(monitor=self.monitor, patience=7, min_delta = self.min_delta)
self.model.fit(X, y, batch_size = 30, epochs = 100, validation_split = 0.1, callbacks=[callback])
self.number_epoch = callback.stopped_epoch
```