from keras.layers import LSTM, SimpleRNN, GRU
from keras.layers import Dense, Dropout, Activation, Embedding
from keras import backend as K
from keras.callbacks import EarlyStopping
## DNN MODEL##
K.set_value(model.optimizer.learning_rate, 0.0001)
es = EarlyStopping(monitor='val_loss', mode='min', verbose=1,patience=32)
model = Sequential()
model.add(Dense(60, input_dim=23, activation='relu',kernel_initializer='uniform'))
model.add(Dense(40,activation='relu'))
model.add(Dense(20,activation='relu'))
model.add(Dense(3, activation='softmax'))
model.compile(loss='categorical_crossentropy', optimizer='adam', metrics=['accuracy'])
## GRU MODEL##
K.set_value(model.optimizer.learning_rate, 0.0001)
es = EarlyStopping(monitor='val_loss', mode='min', verbose=1,patience=20)
model = Sequential()
model.add(GRU(100,input_dim=X.shape[1],kernel_initializer='uniform'))  # try using a GRU instead, for fun
model.add(Dense(3))
model.add(Activation('softmax'))
model.compile(loss='categorical_crossentropy',optimizer='Nadam',metrics=['accuracy'])
## LSTM MODEL##
K.set_value(model.optimizer.learning_rate, 0.0001)
es = EarlyStopping(monitor='val_loss', mode='min', verbose=1,patience=20)
model = Sequential()
model.add(LSTM(100,input_dim=X.shape[1],kernel_initializer='uniform'))  # try using a GRU instead, for fun  ### LSTM DÜĞÜM SAYISINI ARTIR ###
model.add(Dense(3))
model.add(Activation('sigmoid'))
model.compile(loss='categorical_crossentropy',optimizer='adam',metrics=['accuracy'])
## RNN MODEL##
K.set_value(model.optimizer.learning_rate, 0.0008)
es = EarlyStopping(monitor='val_loss', mode='min', verbose=1,patience=40)
model = Sequential()
model.add(SimpleRNN(100,input_dim=X.shape[1],kernel_initializer='uniform'))  
model.add(Dense(3))
model.add(Activation('softmax'))
model.compile(loss='categorical_crossentropy',optimizer='Adam',metrics=['accuracy'])