forked from bnsreenu/python_for_microscopists
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy path129_130_131-tips_tricks_callbacks_continuing_training.py
185 lines (144 loc) · 6.7 KB
/
129_130_131-tips_tricks_callbacks_continuing_training.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
#!/usr/bin/env python
__author__ = "Sreenivas Bhattiprolu"
__license__ = "Feel free to copy, I appreciate if you acknowledge Python for Microscopists"
# Video 129: https://youtu.be/wkwtIeq9Ljo
# Video 130: https://youtu.be/MSjW3qyw7H0
#Video 131: https://youtu.be/4umFSRPx-94
"""
@author: Sreenivas Bhattiprolu
Dataset from: https://lhncbc.nlm.nih.gov/publication/pub9932
"""
from matplotlib import pyplot as plt
from keras.preprocessing.image import ImageDataGenerator, img_to_array, load_img
datagen = ImageDataGenerator(
rotation_range=40,
width_shift_range=0.2,
height_shift_range=0.2,
shear_range=0.2,
zoom_range=0.2,
horizontal_flip=True,
fill_mode='constant') #Try other fill modes, e.g. nearest, reflect, wrap
############################
#Single image augmentation for demonstration purposes
img = load_img('cell_images/Parasitized/C33P1thinF_IMG_20150619_114756a_cell_179.png')
# uses Pillow in the backend, so need to convert to array
x = img_to_array(img) # this is a Numpy array with shape (3, 150, 150)
x = x.reshape((1,) + x.shape) # this is a Numpy array with shape (1, 3, 150, 150)
# the .flow() command below generates batches of randomly transformed images
# and saves the results to the `preview/` directory
i = 0
for batch in datagen.flow(x, batch_size=1,
save_to_dir='augmented', save_prefix='aug', save_format='png'):
i += 1
if i > 20:
break # otherwise the generator would loop indefinitely
#End Demo of single image
##########################################################
#############################################################
from keras.preprocessing.image import ImageDataGenerator
from keras.models import Sequential
from keras.layers import Conv2D, MaxPooling2D
from keras.layers import Activation, Dropout, Flatten, Dense
#from keras import backend as K
SIZE = 150
###2 conv and pool layers. with some normalization and drops in between.
INPUT_SHAPE = (SIZE, SIZE, 3) #change to (SIZE, SIZE, 3)
model = Sequential()
model.add(Conv2D(32, (3, 3), input_shape=INPUT_SHAPE))
model.add(Activation('relu'))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Conv2D(32, (3, 3)))
model.add(Activation('relu'))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Conv2D(64, (3, 3)))
model.add(Activation('relu'))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Flatten())
model.add(Dense(64))
model.add(Activation('relu'))
model.add(Dropout(0.5))
model.add(Dense(1))
model.add(Activation('sigmoid'))
model.compile(loss='binary_crossentropy',
optimizer='rmsprop',
metrics=['accuracy'])
print(model.summary())
###############################################################
batch_size = 16
#Let's prepare our data. We will use .flow_from_directory()
#to generate batches of image data (and their labels)
#directly from our png in their respective folders.
# this is the augmentation configuration we will use for training
train_datagen = ImageDataGenerator(
rescale=1./255,
rotation_range=45,
shear_range=0.2,
zoom_range=0.2,
horizontal_flip=True)
# this is the augmentation configuration we will use for validation:
# only rescaling. But you can try other operations
validation_datagen = ImageDataGenerator(rescale=1./255)
# this is a generator that will read pictures found in
# subfolers of 'data/train', and indefinitely generate
# batches of augmented image data
train_generator = train_datagen.flow_from_directory(
'cell_images', # this is the input directory
target_size=(150, 150), # all images will be resized to 64x64
batch_size=batch_size,
class_mode='binary') # since we use binary_crossentropy loss, we need binary labels
# this is a similar generator, for validation data
validation_generator = validation_datagen.flow_from_directory(
'cell_validation',
target_size=(150, 150),
batch_size=batch_size,
class_mode='binary')
#Add Callbacks, e.g. ModelCheckpoints, earlystopping, csvlogger.
from keras.callbacks import ModelCheckpoint, EarlyStopping, CSVLogger
#ModelCheckpoint callback saves a model at some interval.
filepath="saved_models/weights-improvement-{epoch:02d}-{val_acc:.2f}.hdf5" #File name includes epoch and validation accuracy.
#Use Mode = max for accuracy and min for loss.
checkpoint = ModelCheckpoint(filepath, monitor='val_acc', verbose=1, save_best_only=True, mode='max')
#https://www.tensorflow.org/api_docs/python/tf/keras/callbacks/EarlyStopping
early_stop = EarlyStopping(monitor='val_loss', patience=3, verbose=1)
#This callback will stop the training when there is no improvement in
# the validation loss for three consecutive epochs.
#CSVLogger logs epoch, acc, loss, val_acc, val_loss
log_csv = CSVLogger('my_logs.csv', separator=',', append=False)
callbacks_list = [checkpoint, early_stop, log_csv]
#We can now use these generators to train our model.
#Give this a name so we can call it later for plotting loss, accuracy etc. as a function of epochs.
history = model.fit_generator(
train_generator,
steps_per_epoch=2000 // batch_size, #The 2 slashes division return rounded integer
epochs=5,
validation_data=validation_generator,
validation_steps=800 // batch_size,
callbacks=callbacks_list)
model.save('malaria_augmented_model.h5') # always save your weights after training or during training
# evaluating the model
train_loss, train_acc = model.evaluate_generator(train_generator, steps=16)
validation_loss, test_acc = model.evaluate_generator(validation_generator, steps=16)
print('Train: %.3f, Test: %.3f' % (train_acc, test_acc))
# plot training history
print("Values stored in history are ... \n", history.history)
plt.plot(history.history['loss'], label='train')
plt.plot(history.history['val_loss'], label='test')
plt.legend()
plt.show()
#####################################################
"""
#To continue training, by modifying weights to existing model.
#The saved model can be reinstated.
from keras.models import load_model
new_model = load_model('malaria_augmented_model.h5')
results = new_model.evaluate_generator(validation_generator, steps=16)
print(" validation loss and accuracy are", results)
new_model.fit_generator(
train_generator,
steps_per_epoch=2000 // batch_size, #The 2 slashes division return rounded integer
epochs=5,
validation_data=validation_generator,
validation_steps=800 // batch_size,
callbacks=callbacks_list)
model.save('malaria_augmented_model_updated.h5')
"""