diff --git a/python/w2v_cnn_gen_hdf5.py b/python/w2v_cnn_gen_hdf5.py index 78ee36eadce83735e26d88866bf8d580dc1ff0a9..bf810569a9a3a7ae5131134bfde2fb2c4619d6d0 100644 --- a/python/w2v_cnn_gen_hdf5.py +++ b/python/w2v_cnn_gen_hdf5.py @@ -28,7 +28,7 @@ num_rows = 4.8E6 batchSize = 2048 steps = num_rows/batchSize #early stop -earlystop = keras.callbacks.EarlyStopping(monitor='sparse_categorical_accuracy',patience=10,verbose=False,restore_best_weights=True) +earlystop = keras.callbacks.EarlyStopping(monitor='val_sparse_categorical_accuracy',patience=5,verbose=False,restore_best_weights=True) cbList = [earlystop] trainData = hdf5Generator(path + "w2vCNN.hdf5", batchSize, "Train") diff --git a/python/w2v_sentence_mean gen.py b/python/w2v_sentence_mean gen.py index 74d8c226d92d229677a1a551bdb5f34dd0031e81..aed03f4f254362bb4afe9a3c216571028c72b761 100644 --- a/python/w2v_sentence_mean gen.py +++ b/python/w2v_sentence_mean gen.py @@ -72,7 +72,7 @@ modelNN.compile(optimizer='adam',loss='categorical_crossentropy',metrics=["spars # %% fit #early stop -earlystop = keras.callbacks.EarlyStopping(monitor='sparse_categorical_accuracy',patience=10,verbose=False,restore_best_weights=True) +earlystop = keras.callbacks.EarlyStopping(monitor='val_sparse_categorical_accuracy',patience=10,verbose=False,restore_best_weights=True) cbList = [earlystop] train = generate_arrays_from_file('./train.json',batchSize) diff --git a/python/w2v_sentence_mean.py b/python/w2v_sentence_mean.py index bf9fa952c9812a0641aa42730eb058e4d7021a8b..1085823dfb36c3ef44001c544795eb41d80581b4 100644 --- a/python/w2v_sentence_mean.py +++ b/python/w2v_sentence_mean.py @@ -64,7 +64,7 @@ modelNN.compile(optimizer='adam',loss='sparse_categorical_crossentropy',metrics= # %% fit #early stop -earlystop = keras.callbacks.EarlyStopping(monitor='sparse_categorical_accuracy',patience=5,verbose=False,restore_best_weights=True) +earlystop = keras.callbacks.EarlyStopping(monitor='val_sparse_categorical_accuracy',patience=5,verbose=False,restore_best_weights=True) cbList = [earlystop] count = np.unique(Y_train,return_counts=True)[1] cWeight = 1/(count/Y_train.size)