-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathtrain_CFG.py
More file actions
73 lines (60 loc) · 2.65 KB
/
train_CFG.py
File metadata and controls
73 lines (60 loc) · 2.65 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
from keras.models import Sequential
from keras.layers import Dense, Dropout, Activation, LSTM, SimpleRNN, GRU, Masking
from keras.optimizers import RMSprop, SGD, Adam, Nadam
from keras.callbacks import ModelCheckpoint
from data import getReadyData
import numpy as np
X_train, X_val, X_test, y_train, y_val, y_test = getReadyData()
def rnn_from_cfg(cfg):
saved_model_path = './SMAC3out/models/' \
+ str(cfg['cell_size']) + '_' \
+ str(cfg['n_cell']) + '_' \
+ str(cfg['nn_type']) + '_' \
+ str(cfg['dropout']) + '_' \
+ cfg['activation'] + '_' \
+ cfg['optimizer'] + '_' \
+ str(cfg['optimizer_lr']) + '_' \
+ str(cfg['learning_decay_rate']) + '.hdf5'
model = Sequential()
model.add(Masking(10.0, input_shape=(20, 2)))
if (cfg['n_cell'] == 2):
if cfg['nn_type'] == 'LSTM':
model.add(LSTM(cfg['cell_size'], return_sequences=True))
model.add(LSTM(cfg['cell_size']))
elif (cfg['nn_type'] == 'RNN'):
model.add(SimpleRNN(cfg['cell_size'], return_sequences=True))
model.add(SimpleRNN(cfg['cell_size']))
else:
model.add(GRU(cfg['cell_size'], return_sequences=True))
model.add(GRU(cfg['cell_size']))
else:
if cfg['nn_type'] == 'LSTM':
model.add(LSTM(cfg['cell_size']))
elif (cfg['nn_type'] == 'RNN'):
model.add(SimpleRNN(cfg['cell_size']))
else:
model.add(GRU(cfg['cell_size']))
model.add(Dropout(cfg['dropout']))
model.add(Dense(2))
model.add(Activation(cfg['activation']))
if cfg['optimizer'] == 'adam':
opt = Adam(lr=cfg['optimizer_lr'], decay = cfg['learning_decay_rate'])
elif cfg['optimizer'] == 'sgd':
opt = SGD(lr=cfg['optimizer_lr'], decay = cfg['learning_decay_rate'])
elif cfg['optimizer'] == 'nadam':
opt = Nadam(lr=cfg['optimizer_lr'], schedule_decay = cfg['learning_decay_rate'])
elif cfg['optimizer'] == 'RMSprop':
opt = RMSprop(lr=cfg['optimizer_lr'], decay = cfg['learning_decay_rate'])
model.compile(optimizer=opt, loss='categorical_crossentropy', metrics=['categorical_accuracy'])
model.summary()
model.fit(X_train, y_train, batch_size=1024, epochs=cfg['epochs'],
validation_data=[X_val, y_val],
callbacks=[ModelCheckpoint(saved_model_path, monitor='val_loss',
verbose=2, save_best_only=True)])
return model.evaluate(X_test, y_test)[0]
def main():
best_cfg = np.load("C:\\NNwork\\HFSF\\SMAC3out\\best.cfg")
best_cfg['epochs'] = 200
results = rnn_from_cfg(best_cfg)
if __name__ == '__main__':
main()