diff options
| -rw-r--r-- | lenet.py | 6 | 
1 files changed, 4 insertions, 2 deletions
| @@ -101,7 +101,7 @@ def plot_history(history, metric = None):      plt.ylabel('Loss')      plt.xlabel('Epoch') -def train_classifier(x_train, y_train, x_val, y_val, batch_size=128, epochs=100, metrics=[categorical_accuracy], optimizer = None): +def train_classifier(x_train, y_train, x_val, y_val, batch_size=128, epochs=100, metrics=[categorical_accuracy], optimizer = None, keep_training = False):    shape = (32, 32, 1)    # Pad data to 32x32 (MNIST is 28x28) @@ -114,10 +114,12 @@ def train_classifier(x_train, y_train, x_val, y_val, batch_size=128, epochs=100,        optimizer = optimizers.SGD(lr=0.001, decay=1e-6, momentum=0.9, nesterov=True)    model.compile(loss='categorical_crossentropy', metrics=metrics, optimizer=optimizer) -   +  if keep_training: +    model.load_weights('./weights.h5')    history = model.fit(x_train, y_train, batch_size=batch_size, epochs=epochs, verbose=1, validation_data = (x_val, y_val))    plot_history(history, 'categorical_accuracy')    plot_history(history) +  model.save_weights('./weights.h5')    return model   def test_classifier(model, x_test, y_true): | 
