diff options
author | nunzip <np.scarh@gmail.com> | 2019-03-06 13:13:25 +0000 |
---|---|---|
committer | nunzip <np.scarh@gmail.com> | 2019-03-06 13:13:25 +0000 |
commit | f2d09edb7fb511364347ae9df1915a6655f45a0a (patch) | |
tree | 6b719c2bdbe0047d7be4f746b23a2d8640a447b9 | |
parent | 566e8c4c6fb643b3450365384331e9b4df863fdc (diff) | |
download | e4-gan-f2d09edb7fb511364347ae9df1915a6655f45a0a.tar.gz e4-gan-f2d09edb7fb511364347ae9df1915a6655f45a0a.tar.bz2 e4-gan-f2d09edb7fb511364347ae9df1915a6655f45a0a.zip |
Insert option to keep training the network with different splits
-rw-r--r-- | lenet.py | 6 |
1 files changed, 5 insertions, 1 deletions
@@ -101,7 +101,7 @@ def plot_history(history, metric = None): plt.ylabel('Loss') plt.xlabel('Epoch') -def train_classifier(x_train, y_train, x_val, y_val, batch_size=128, epochs=100, metrics=[categorical_accuracy], optimizer = None): +def train_classifier(x_train, y_train, x_val, y_val, batch_size=128, epochs=100, metrics=[categorical_accuracy], optimizer = None, keep_training = False): shape = (32, 32, 1) # Pad data to 32x32 (MNIST is 28x28) @@ -114,8 +114,12 @@ def train_classifier(x_train, y_train, x_val, y_val, batch_size=128, epochs=100, optimizer = optimizers.SGD(lr=0.001, decay=1e-6, momentum=0.9, nesterov=True) model.compile(loss='categorical_crossentropy', metrics=metrics, optimizer=optimizer) + + if keep_training: + model.load_weights('./model_gan.h5', by_name=False) history = model.fit(x_train, y_train, batch_size=batch_size, epochs=epochs, verbose=1, validation_data = (x_val, y_val)) + model.save_weights('./model_gan.h5') plot_history(history, 'categorical_accuracy') plot_history(history) return model |