forked from kenshohara/3D-ResNets-PyTorch
-
Notifications
You must be signed in to change notification settings - Fork 0
/
test.py
75 lines (60 loc) · 2.31 KB
/
test.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
import torch
# from torch.autograd import Variable
import torch.nn.functional as F
import time
import os
import sys
import json
from utils import AverageMeter
def calculate_video_results(output_buffer, video_id, test_results, class_names):
video_outputs = torch.stack(output_buffer)
average_scores = torch.mean(video_outputs, dim=0)
sorted_scores, locs = torch.topk(average_scores, k=10)
video_results = []
for i in range(sorted_scores.size(0)):
video_results.append({
'label': class_names[locs[i]],
'score': sorted_scores[i]
})
test_results['results'][video_id] = video_results
def test(data_loader, model, opt, class_names):
print('test')
model.eval()
batch_time = AverageMeter()
data_time = AverageMeter()
end_time = time.time()
output_buffer = []
previous_video_id = ''
test_results = {'results': {}}
for i, (inputs, targets) in enumerate(data_loader):
data_time.update(time.time() - end_time)
# inputs = Variable(inputs, volatile=True)
with torch.no_grad():
outputs = model(inputs)
if not opt.no_softmax_in_test:
outputs = F.softmax(outputs)
for j in range(outputs.size(0)):
if not (i == 0 and j == 0) and targets[j] != previous_video_id:
calculate_video_results(output_buffer, previous_video_id,
test_results, class_names)
output_buffer = []
output_buffer.append(outputs[j].data.cpu())
previous_video_id = targets[j]
if (i % 100) == 0:
with open(
os.path.join(opt.result_path, '{}.json'.format(
opt.test_subset)), 'w') as f:
json.dump(test_results, f)
batch_time.update(time.time() - end_time)
end_time = time.time()
print('[{}/{}]\t'
'Time {batch_time.val:.3f} ({batch_time.avg:.3f})\t'
'Data {data_time.val:.3f} ({data_time.avg:.3f})\t'.format(
i + 1,
len(data_loader),
batch_time=batch_time,
data_time=data_time))
with open(
os.path.join(opt.result_path, '{}.json'.format(opt.test_subset)),
'w') as f:
json.dump(test_results, f)