-
Notifications
You must be signed in to change notification settings - Fork 38
/
Copy pathtrain_words.py
71 lines (54 loc) · 2.47 KB
/
train_words.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
import pickle
import numpy as np
from keras.callbacks import ModelCheckpoint
from keras.layers import Dense, Flatten, Conv2D
from keras.layers import MaxPooling2D, Dropout
from keras.models import Sequential
from keras.utils import print_summary, np_utils
from sklearn.model_selection import train_test_split
from sklearn.utils import shuffle
def keras_model(image_x, image_y):
num_of_classes = 9
model = Sequential()
model.add(Conv2D(32, (5, 5), input_shape=(image_x, image_y, 1), activation='relu', padding='same'))
model.add(MaxPooling2D(pool_size=(2, 2), strides=(2, 2), padding='same'))
model.add(Conv2D(64, (5, 5), activation='relu', padding='same'))
model.add(MaxPooling2D(pool_size=(5, 5), strides=(5, 5), padding='same'))
model.add(Flatten())
model.add(Dense(1024, activation='relu'))
model.add(Dropout(0.6))
model.add(Dense(num_of_classes, activation='softmax'))
model.compile(loss='categorical_crossentropy', optimizer='adam', metrics=['accuracy'])
filepath = "HTML.h5"
checkpoint1 = ModelCheckpoint(filepath, monitor='val_acc', verbose=1, save_best_only=True, mode='max')
callbacks_list = [checkpoint1]
return model, callbacks_list
def loadFromPickle():
with open("features", "rb") as f:
features = np.array(pickle.load(f))
with open("labels", "rb") as f:
labels = np.array(pickle.load(f))
return features, labels
def augmentData(features, labels):
features = np.append(features, features[:, :, ::-1], axis=0)
labels = np.append(labels, -labels, axis=0)
return features, labels
def main():
features, labels = loadFromPickle()
# features, labels = augmentData(features, labels)
features, labels = shuffle(features, labels)
features = features / 255.
train_x, test_x, train_y, test_y = train_test_split(features, labels, random_state=0,
test_size=0.1)
train_x = train_x.reshape(train_x.shape[0], 100, 100, 1)
test_x = test_x.reshape(test_x.shape[0], 100, 100, 1)
train_y = np_utils.to_categorical(train_y)
test_y = np_utils.to_categorical(test_y)
model, callbacks_list = keras_model(image_x=100, image_y=100)
print_summary(model)
model.fit(train_x, train_y, validation_data=(test_x, test_y), epochs=5, batch_size=64,
callbacks=callbacks_list)
scores = model.evaluate(test_x, test_y, verbose=0)
print("CNN Error: %.2f%%" % (100 - scores[1] * 100))
model.save('HTML.h5')
main()