From 5d779afb5a9511323e3402537af172d68930d85c Mon Sep 17 00:00:00 2001 From: Vasil Zlatanov Date: Wed, 6 Mar 2019 23:49:46 +0000 Subject: Replace softmax with relu as we apply it in the function anyway --- lenet.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) (limited to 'lenet.py') diff --git a/lenet.py b/lenet.py index 5ed6705..97479ed 100644 --- a/lenet.py +++ b/lenet.py @@ -64,7 +64,8 @@ def get_lenet(shape): model.add(Dense(units=120, activation='relu')) model.add(Dense(units=84, activation='relu')) - model.add(Dense(units=10, activation = 'softmax')) + #model.add(Dense(units=10, activation = 'softmax')) + model.add(Dense(units=10, activation = 'relu')) return model def plot_history(history, metric = None): -- cgit v1.2.3-54-g00ecf