-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathimage_classifier.py
More file actions
109 lines (89 loc) · 3 KB
/
image_classifier.py
File metadata and controls
109 lines (89 loc) · 3 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
from keras import applications, optimizers, regularizers
from keras.preprocessing.image import ImageDataGenerator
from keras.models import Sequential, Model
from keras.layers import (Dropout, Flatten, Dense, Conv2D,
Activation, MaxPooling2D, BatchNormalization)
from keras_tqdm import TQDMCallback
#Image Dimensions
img_width, img_height, img_depth = 128, 128, 3
#Image folder directories
train_data_dir = 'images'
validation_data_dir = 'test_images'
#Building Model
model = Sequential()
#Input
model.add(Conv2D(32, (3, 3), input_shape=(img_width, img_height, img_depth)))
model.add(Activation("relu"))
model.add(BatchNormalization())
model.add(MaxPooling2D(pool_size=(3, 3)))
model.add(Dropout(0.5))
model.add(Conv2D(64, (3, 3)))
model.add(Activation("relu"))
model.add(BatchNormalization())
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Dropout(0.5))
model.add(Conv2D(64, (3, 3)))
model.add(Activation("relu"))
model.add(BatchNormalization())
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Dropout(0.5))
model.add(Conv2D(96, (3, 3)))
model.add(Activation("relu"))
model.add(BatchNormalization())
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Dropout(0.5))
model.add(Flatten())
model.add(Dense(activation='relu', units=img_width))
model.add(BatchNormalization())
#Output
model.add(Dense(6, activation='softmax'))
#adadelta = optimizers.Adadelta(lr=0.98, rho=0.95, epsilon=None, decay=0.0)
model.compile(loss='categorical_crossentropy',
optimizer='adadelta',
metrics=['accuracy'])
train_datagen = ImageDataGenerator(
rescale=1. / 255,
shear_range=0.3,
zoom_range=0.2,
horizontal_flip=True)
test_datagen = ImageDataGenerator(rescale=1. / 255)
epochs = 40
batch_size = 32
train_generator = train_datagen.flow_from_directory(
train_data_dir,
target_size=(img_width, img_height),
batch_size=batch_size,
class_mode='categorical', shuffle=True)
validation_generator = test_datagen.flow_from_directory(
validation_data_dir,
target_size=(img_width, img_height),
batch_size=batch_size,
class_mode='categorical')
H = model.fit_generator(
train_generator,
steps_per_epoch= 2111 // batch_size,
epochs=epochs,
verbose=1,
validation_data=validation_generator,
validation_steps=230 // batch_size)
import matplotlib.pyplot as plt
import numpy as np
plt.figure()
N = epochs
f, axarr = plt.subplots(2, sharex=True)
axarr[0].plot(np.arange(0, N), H.history["loss"], label="train_loss")
axarr[0].plot(np.arange(0, N), H.history["val_loss"], label="val_loss")
axarr[1].plot(np.arange(0, N), H.history["acc"], label="train_acc")
axarr[1].plot(np.arange(0, N), H.history["val_acc"], label="val_acc")
plt.tight_layout()
plt.xlabel("Epoch #")
plt.ylabel("Loss/Accuracy")
plt.legend(loc="upper left")
plt.show()
# serialize model to YAML
model_yaml = model.to_yaml()
with open("model.yaml", "w") as yaml_file:
yaml_file.write(model_yaml)
# serialize weights to HDF5
model.save_weights("model.h5")
print("Saved model to disk")