-
Notifications
You must be signed in to change notification settings - Fork 41
/
evaluate_model.py
200 lines (142 loc) · 7.05 KB
/
evaluate_model.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
import math
import os
import cv2
import numpy as np
from keras.models import load_model
from keras.preprocessing import image
from matplotlib import pyplot as plt
from setup.naive_approach import extract_dataset, generate_from_paths_and_labels
"""
This module contains functions used to evaluate our models, both grahically on example images and on test sets.
A function also allows to extract examples classified with a low confidence by the network.
"""
def graphically_evaluate_model(model_path, classes_names, test_image_dir, preprocess_input, image_size=(224, 224)):
"""
Loads a model, performs prediction on each image in test_image_dir and displays the image with the class name on
top of it.
:param model_path: path to the model.
:param classes_names: names of the classes.
:param test_image_dir: path to the test image directory.
:param preprocess_input: preprocessing function for the network.
:param image_size: size of the input image for the network.
"""
nbr_classes = len(classes_names)
model = load_model(model_path)
# for each image
for test_image_path in os.listdir(test_image_dir):
# load image using keras
img = image.load_img(test_image_dir + "/" + test_image_path, target_size=image_size)
# processed image to feed the network
processed_img = image.img_to_array(img)
processed_img = np.expand_dims(processed_img, axis=0)
processed_img = preprocess_input(processed_img)
# get prediction using the network
predictions = model.predict(processed_img)[0]
# transform [0,1] values into percentages and associate it to its class name (class_name order was used to
# one-hot encode the classes)
result = [(classes_names[i], float(predictions[i]) * 100.0) for i in range(nbr_classes)]
# sort the result by percentage
result.sort(reverse=True, key=lambda x: x[1])
# load image for displaying
img = cv2.imread(test_image_dir + "/" + test_image_path)
# transform into RGB
img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)
font = cv2.FONT_HERSHEY_COMPLEX
# write class percentages on the image
for i in range(nbr_classes):
# get the class and probability
(class_name, prob) = result[i]
textsize = cv2.getTextSize(class_name, font, 1, 2)[0]
textX = (img.shape[1] - textsize[0]) / 2
textY = (img.shape[0] + textsize[1]) / 2
# print max probability prediction on top of the image
if i == 0:
cv2.putText(img, class_name, (int(textX) - 100, int(textY)), font, 5, (255, 255, 255), 6, cv2.LINE_AA)
print("Class name: %s" % class_name)
print("Probability: %.2f%%" % prob)
plt.imshow(img)
plt.show()
def evaluate_model(model_path, classes, preprocessing, dataset_path):
"""
Loads a model and evaluates the model (metrics) on images provided in folder a dataset.
:param model_path: path to the model.
:param classes: names of the classes.
:param preprocessing: preprocessing function for the network.
:param dataset_path: path to the test dataset.
"""
# for simplicity, the dataset is loaded using 99.9% of images
(train_samples, train_labels), (val_samples, val_labels) = extract_dataset(dataset_path, classes, 0)
batch_size = 16
nbr_val_samples = len(val_samples)
# create a generator from the naive approach (so we don't perform data augmentation on these test images)
validation_sample_generator = generate_from_paths_and_labels(val_samples,
val_labels,
batch_size,
preprocessing,
image_size=(224, 224, 3))
model = load_model(model_path)
metrics = model.evaluate_generator(validation_sample_generator,
steps=math.ceil(nbr_val_samples / 16),
max_queue_size=10,
workers=1,
use_multiprocessing=True,
verbose=1)
# print the metrics
out = ""
for i in range(len(model.metrics_names)):
out += model.metrics_names[i]
out += " : "
out += str(float(metrics[i]))
out += " | "
return out
def extract_hard_samples(model_path, preprocess_input, dataset_path, threshold, image_size=(224, 224)):
"""
Extracts samples which are ard to classify for the network. Takes a dataset and a model as input, prediction is
performed by the model on the samples from the dataset specified by dataset_path and samples with a classification
confidence for the correct class lower than threshold are saved to a list.
:param model_path: path to the model.
:param preprocess_input: preprocessing function for the network.
:param dataset_path: path to the dataset.
:param threshold: confidence threshold for the prediction.
:param image_size: size of the image fed to the network.
"""
classes = ['fire', 'no_fire', 'start_fire']
nbr_classes = 3
model = load_model(model_path)
# paths to hard examples are saved
hard_examples = [[] for j in range(nbr_classes)]
# for each class
for i in range(nbr_classes):
class_name = classes[i]
# for each sample of that class
for sample_path in os.listdir(dataset_path + class_name):
img = image.load_img(dataset_path + class_name + "/" + sample_path, target_size=image_size)
# processed image to feed the network
processed_img = image.img_to_array(img)
processed_img = np.expand_dims(processed_img, axis=0)
processed_img = preprocess_input(processed_img)
# get prediction using the network
predictions = model.predict(processed_img)[0]
# if prediction is not satisfactory
if float(predictions[i]) < threshold:
hard_examples[i].append(sample_path)
return hard_examples
def display_hard_samples(hard_examples, dataset_path):
"""
Displays samples that are hard to classify from the dataset specified by dataset_path, hard_examples should be a
1x3 list containing paths of hard to classify samples.
:param hard_examples: the list of paths to difficult examples.
:param dataset_path: path to the dataset.
"""
classes = ['fire', 'no_fire', 'start_fire']
nbr_classes = 3
for i in range(nbr_classes):
class_name = classes[i]
print("========== CLASS : " + class_name + " ==========")
for sample_path in hard_examples[i]:
# load image for displaying
img = cv2.imread(dataset_path + "/" + class_name + "/" + sample_path)
# transform into RGB
img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)
plt.imshow(img)
plt.show()