I took this advice, turned my output data into a categorical vetor, removed all forms of normalisation, and achieved 97% accuracy on training data, however, the testing dataset accuracy converges towards ~90%. My code is as follows:
import numpy as np
import sys, os
from keras.models import Sequential
from keras.layers.core import Activation, Dense, Dropout, Flatten, Reshape
from keras.layers.convolutional import Convolution2D, MaxPooling2D
from keras.layers.advanced_activations import PReLU
from keras.regularizers import l2
from keras.optimizers import SGD
trX = np.load(base + "trX.npy") # training data, shape (120000, 3, 128, 128)
trY = to_categorical(np.load(base + "trY.npy")).astype("float32") # training set outputs
teX = np.load(base + "teX.npy")
teY = to_categorical(np.load(base + "teY.npy")).astype("float32")
print trX.shape
print trY.shape
print teX.shape
print teY.shape
print "compiling model."
if params in os.listdir(base):
model = VGG_like_SL(weights_path=base+params)
print "Model parameters loaded."
else:
model = VGG_like_SL()
print "Model parameters initialised."
print model.layers[0].get_weights()[0].shape
decay = 1e-4
sgd = SGD(lr=0.01, momentum=0.9, decay=decay)
model.compile(loss="categorical_crossentropy", optimizer=sgd)
for layer in model.layers:
print layer.output_shape
val_loss, val_acc = model.evaluate(teX, teY, show_accuracy=True)
print "Initial (loss, acc): " + str((val_loss, val_acc))
print "Running gradient descent"
for i in xrange(1000):
lr = model.optimizer.lr.get_value()
it = model.optimizer.iterations.get_value()
rate = lr / (1. + decay*it)
print "Epoch %02d, lr=%1.4f:" % (i, rate)
print "Best validation loss so far: %1.4f" % val_loss
model.fit(trX, trY, batch_size=128, nb_epoch=1,
validation_data=(teX,teY), show_accuracy=True)
new_loss, new_acc = model.evaluate(teX, teY, show_accuracy=True)
if new_acc > val_acc:
model.save_weights(base + params, overwrite=True)
print "Model checkpoint saved."
val_loss = new_loss
val_acc = new_acc
--
You received this message because you are subscribed to the Google Groups "Keras-users" group.
To unsubscribe from this group and stop receiving emails from it, send an email to keras-users...@googlegroups.com.
To view this discussion on the web, visit https://groups.google.com/d/msgid/keras-users/79ce375f-d657-4cfe-9b45-4ca01edcf669%40googlegroups.com.