-
Notifications
You must be signed in to change notification settings - Fork 0
/
train.py
110 lines (86 loc) · 3.84 KB
/
train.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
import numpy as np
import keras
from keras.preprocessing.image import ImageDataGenerator, load_img, img_to_array, array_to_img
from keras.layers import Conv2D, Flatten, MaxPooling2D, Dense
from keras.models import Sequential
from keras.models import load_model
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
import glob, os, random
base_path = './dataset-resized'
img_list = glob.glob(os.path.join(base_path, '*/*.jpg'))
print(len(img_list))
batch_size_value = 16
train_datagen = ImageDataGenerator(
rescale=1. / 225, shear_range=0.1, zoom_range=0.1,
width_shift_range=0.1, height_shift_range=0.1, horizontal_flip=True,
vertical_flip=True, validation_split=0.1)
test_datagen = ImageDataGenerator(
rescale=1. / 255, validation_split=0.1)
train_generator = train_datagen.flow_from_directory(
base_path, target_size=(300, 300), batch_size=batch_size_value,
class_mode='categorical', subset='training', seed=0)
validation_generator = test_datagen.flow_from_directory(
base_path, target_size=(300, 300), batch_size=batch_size_value,
class_mode='categorical', subset='validation', seed=0)
labels = (train_generator.class_indices)
labels = dict((v, k) for k, v in labels.items())
print(labels)
model = Sequential([
Conv2D(filters=32, kernel_size=3, padding='same', activation='relu', input_shape=(300, 300, 3)),
MaxPooling2D(pool_size=2),
Conv2D(filters=64, kernel_size=3, padding='same', activation='relu'),
MaxPooling2D(pool_size=2),
Conv2D(filters=32, kernel_size=3, padding='same', activation='relu'),
MaxPooling2D(pool_size=2),
Conv2D(filters=32, kernel_size=3, padding='same', activation='relu'),
MaxPooling2D(pool_size=2),
Flatten(),
Dense(64, activation='relu'),
Dense(4, activation='softmax')
])
class LossHistory(keras.callbacks.Callback):
# 函数开始时创建盛放loss与acc的容器
def on_train_begin(self, logs={}):
self.losses = {'batch': [], 'epoch': []}
self.accuracy = {'batch': [], 'epoch': []}
self.val_loss = {'batch': [], 'epoch': []}
self.val_acc = {'batch': [], 'epoch': []}
# 按照batch来进行追加数据
def on_batch_end(self, batch, logs={}):
# 每一个batch完成后向容器里面追加loss,acc
self.losses['batch'].append(logs.get('loss'))
self.accuracy['batch'].append(logs.get('acc'))
self.val_loss['batch'].append(logs.get('val_loss'))
self.val_acc['batch'].append(logs.get('val_acc'))
def on_epoch_end(self, batch, logs={}):
# 每一个epoch完成后向容器里面追加loss,acc
self.losses['epoch'].append(logs.get('loss'))
self.accuracy['epoch'].append(logs.get('acc'))
self.val_loss['epoch'].append(logs.get('val_loss'))
self.val_acc['epoch'].append(logs.get('val_acc'))
def loss_plot(self, loss_type):
iters = range(len(self.losses[loss_type]))
plt.figure()
# acc
plt.plot(iters, self.accuracy[loss_type], 'r', label='train acc')
# loss
plt.plot(iters, self.losses[loss_type], 'g', label='train loss')
if loss_type == 'epoch':
# val_acc
plt.plot(iters, self.val_acc[loss_type], 'b', label='val acc')
# val_loss
plt.plot(iters, self.val_loss[loss_type], 'k', label='val loss')
plt.grid(True)
plt.xlabel(loss_type)
plt.ylabel('acc-loss')
plt.legend(loc="upper right")
# plt.show()
plt.savefig('./results/learning_rate.png')
history = LossHistory()
model.compile(loss='categorical_crossentropy', optimizer='adam', metrics=['acc'])
model.fit_generator(train_generator, epochs=100, steps_per_epoch=1789 // 32, validation_data=validation_generator,
validation_steps=198 // 32, callbacks=[history])
history.loss_plot('epoch')
model.save('./results/my_model.h5')