-
Notifications
You must be signed in to change notification settings - Fork 1
/
runs.py
147 lines (121 loc) · 5.22 KB
/
runs.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
import sys
import sql.runs
import sql.regressions
import sql.compiler
import sql.auth
import test_results
import io
import log_files
import tarfile
import csv
import regressions
def _process_runs(runs, db):
res = []
if len(runs) > 0:
cols = runs[0].keys()
for r in runs:
d = dict(zip(cols, r))
runid = r['id']
d.setdefault('runid', runid)
d.setdefault('summary', d['tests_run_status'])
d.setdefault('regressions', '--')
if d['tests_run_status'] == 'OK':
d['summary'] = test_results.summary(db, runid)
d['regressions'] = sql.regressions.counts(db, runid)
if d['regressions'] == 0:
d['regressions'] = 'none'
res.append(d)
return res
def details(db, runid):
res = {}
res['run'] = _process_runs(sql.runs.get(db, runid=runid), db)[0]
res['compilers'] = sql.compiler.by_run(db, runid)
return res
def most_recent(db):
runs = sql.runs.get(db, limit=20, order_by='run_date')
return _process_runs(runs, db)
def by_commit(db, commit):
runs = sql.runs.get_by_commit(db, commit)
return _process_runs(runs, db)
def by_branch(db, branch):
runs = sql.runs.get_by_branch(db, branch)
return _process_runs(runs, db)
def by_hostname(db, name):
runs = sql.runs.get_by_hostname(db, name)
return _process_runs(runs, db)
def search(db, filters):
runs = sql.runs.search(db, filters)
return _process_runs(runs, db)
def upload(db, user_file, token):
if user_file is None:
raise RuntimeError("Uploaded file is not valid")
if not sql.auth.is_valid(db, token):
raise RuntimeError("Authentication failed")
# Save the uploaded file
# NB: This needs to be done _before_ it is read from
from uuid import uuid4
file_name = str(uuid4()) + '.tar.gz'
user_file.save('logs/' + file_name)
try:
with tarfile.open(fileobj=user_file.file, mode="r:gz") as tar:
files = [m.name for m in tar.getmembers()]
if "build.log" not in files:
raise RuntimeError("No build log found")
logfile = tar.extractfile("build.log")
results = log_files.read_properties(io.TextIOWrapper(logfile, encoding='utf-8'))
# Split the architecture and vendor names
arch, vendor = results['arch'].split('/', 1)
results['arch'] = arch
results.setdefault('vendor', vendor)
# Save the log file name
results['user_file'] = file_name
# Determine the status of the Dyninst build
if "{0:s}/dyninst/Build.FAILED".format(results['root_dir']) in files:
results['dyninst_build_status'] = 'FAILED'
else:
results['dyninst_build_status'] = 'OK'
# Determine the status of the Testsuite build
if "{0:s}/testsuite/Build.FAILED".format(results['root_dir']) in files:
results['tests_build_status'] = 'FAILED'
elif "{0:s}/testsuite/build/build.out".format(results['root_dir']) not in files:
results['tests_build_status'] = 'not built'
else:
results['tests_build_status'] = 'OK'
# Determine the status of the Testsuite run
results_log_filename = "{0:s}/testsuite/tests/results.log".format(results['root_dir'])
if "{0:s}/Tests.FAILED".format(results['root_dir']) in files:
results['tests_run_status'] = 'FAILED'
elif results_log_filename not in files:
results['tests_run_status'] = 'not run'
else:
results['tests_run_status'] = 'OK'
# Read the git branches and commits
results.update(log_files.read_git_logs(tar, results['root_dir'], files))
# There may be a trailing period in the UTC date
# Sqlite doesn't like that, so remove it
results['date'] = results['date'].replace('.', '')
# Gather compiler information
results.update(log_files.read_compiler_logs(tar, results['root_dir'], files))
# Save the run information
try:
runid = sql.runs.create(db, results)
except:
e = sys.exc_info()[0]
raise RuntimeError("Error creating run: {0:s}".format(e))
# Load the results into the database
if results['dyninst_build_status'] == 'OK' and \
results['tests_build_status'] == 'OK' and \
results['tests_run_status'] == 'OK':
try:
logfile = tar.extractfile(results_log_filename)
reader = csv.reader(io.TextIOWrapper(logfile, encoding='utf-8'))
sql.test_results.bulk_insert(db, runid, reader)
except:
e = str(sys.exc_info()[0])
raise RuntimeError("Error inserting test_results: {0:s}".format(e))
# Create regressions, if any
regressions.create(db, runid)
except(tarfile.ReadError):
from os import unlink
unlink('logs/' + file_name)
raise RuntimeError("'{0:s}' is not a valid tarfile".format(user_file.filename))