Skip to content

Commit 1115ab6

Browse files
a-wainuclearcat
authored andcommitted
config: runtime: add new coverage report template
This new template is meant to be executed as a post-processing job, once every test job for a given `kbuild` node are complete. It gathers the raw coverage data from each of those test jobs and processes it as follows: * create a single JSON tracefile using `gcovr` * extract lines/functions coverage percentages for each job * create `test` child nodes for each job, reporting those percentages * merge all tracefiles in a single results file and generate both an HTML report and `lcov`-compatible tracefile; the HTML report gives a quick overview of the code coverage, while the tracefile can then be downloaded by developers for more targeted processing * create `test` child nodes for the `kbuild` job, reporting global lines/functions coverage percentages for this run Signed-off-by: Arnaud Ferraris <arnaud.ferraris@collabora.com>
1 parent c48cf49 commit 1115ab6

1 file changed

Lines changed: 226 additions & 0 deletions

File tree

Lines changed: 226 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,226 @@
1+
{# -*- mode: Python -*- -#}
2+
{# SPDX-License-Identifier: LGPL-2.1-or-later -#}
3+
4+
{%- extends 'base/python.jinja2' %}
5+
6+
{%- block python_imports %}
7+
{{ super() }}
8+
import gzip
9+
import json
10+
import shutil
11+
import subprocess
12+
{%- endblock %}
13+
14+
{%- block python_local_imports %}
15+
{{ super() }}
16+
import kernelci.api.helper
17+
{%- endblock %}
18+
19+
{%- block python_globals %}
20+
{{ super() }}
21+
{% endblock %}
22+
23+
{% block python_job -%}
24+
class Job(BaseJob):
25+
def _upload_artifacts(self):
26+
artifacts = {}
27+
storage = self._get_storage()
28+
if storage and self._node:
29+
root_path = '-'.join([JOB_NAME, self._node['id']])
30+
print(f"Uploading artifacts to {root_path}")
31+
for name, file_path in self._artifacts.items():
32+
if os.path.exists(file_path):
33+
file_url = storage.upload_single(
34+
(file_path, os.path.basename(file_path)), root_path
35+
)
36+
print(file_url)
37+
artifacts[name] = file_url
38+
return artifacts
39+
40+
def _extract_coverage(self, summary_file, node=None):
41+
if node is None:
42+
node = self._node
43+
44+
child_nodes = []
45+
46+
with open(summary_file, encoding='utf-8') as summary_json:
47+
summary = json.load(summary_json)
48+
node_data = node['data']
49+
50+
func_data = node_data.copy()
51+
func_percent = summary.get('function_percent')
52+
if func_percent is not None:
53+
func_data['misc'] = {'measurement': func_percent}
54+
child_nodes += [
55+
{
56+
'node': {
57+
'kind': 'test',
58+
'name': 'coverage.functions',
59+
'result': 'pass',
60+
'state': 'done',
61+
'data': func_data,
62+
},
63+
'child_nodes': [],
64+
},
65+
]
66+
67+
line_data = node_data.copy()
68+
line_percent = summary.get('line_percent')
69+
if line_percent is not None:
70+
line_data['misc'] = {'measurement': line_percent}
71+
child_nodes += [
72+
{
73+
'node': {
74+
'kind': 'test',
75+
'name': 'coverage.lines',
76+
'result': 'pass',
77+
'state': 'done',
78+
'data': line_data,
79+
},
80+
'child_nodes': [],
81+
},
82+
]
83+
84+
return {
85+
'node': {
86+
'result': 'pass' if node['id'] == self._node['id'] else node['result'],
87+
'artifacts': {},
88+
},
89+
'child_nodes': child_nodes,
90+
}
91+
92+
def _run(self, src_path):
93+
self._artifacts = {}
94+
api_helper = kernelci.api.helper.APIHelper(self._api)
95+
child_nodes = self._api.node.findfast({'parent': self._node['parent']})
96+
97+
log_path = os.path.join(self._workspace, f"log.txt")
98+
log_file = open(log_path, mode='w')
99+
100+
log_file.write("Getting coverage source...\n")
101+
tarball_url = self._get_artifact_url(self._node, 'coverage_source_tar_xz')
102+
self._get_source(tarball_url)
103+
# Not getting src_path from _get_source() as it doesn't work in our case
104+
# We do know that the top-level folder is named 'linux' however, so let's
105+
# just use that
106+
src_path = os.path.join(self._workspace, 'linux')
107+
log_file.write(f"Coverage source downloaded from {tarball_url}\n")
108+
109+
base_cmd = ['gcovr', '--root', src_path]
110+
tracefiles = []
111+
112+
# Download and process coverage data for all child nodes
113+
for cnode in child_nodes:
114+
if cnode['id'] == self._node['id']:
115+
log_file.write(f"Skipping self ({cnode['id']})\n")
116+
continue
117+
118+
coverage_dir = os.path.join(self._workspace, f"coverage-{cnode['id']}")
119+
json_summary = coverage_dir + '.summary.json'
120+
try:
121+
data_url = self._get_artifact_url(cnode, 'coverage_data')
122+
tracefile = coverage_dir + '.json'
123+
self._get_source(data_url, path=coverage_dir)
124+
log_file.write(f"Downloaded coverage data from {data_url}\n")
125+
except:
126+
log_file.write(f"WARNING: Unable to download coverage data for {cnode['id']}\n")
127+
continue
128+
129+
# We now have raw coverage data available, process it
130+
log_file.write(f"--- Processing coverage data for {cnode['id']} ---\n")
131+
cmd = subprocess.run(base_cmd + [
132+
'--gcov-ignore-parse-errors',
133+
'--object-directory', coverage_dir,
134+
'--json', tracefile,
135+
'--json-summary', json_summary,
136+
], stdout=subprocess.PIPE, stderr=subprocess.STDOUT, text=True)
137+
log_file.write(cmd.stdout)
138+
139+
try:
140+
cmd.check_returncode()
141+
except:
142+
log_file.write(f"WARNING: Unable to process coverage data for {cnode['id']}")
143+
continue
144+
145+
tracefiles += [tracefile]
146+
results = self._extract_coverage(json_summary, node=cnode)
147+
# We only want to create child nodes reporting coverage percentages, not actually
148+
# update the test node
149+
if len(results['child_nodes']) > 0:
150+
api_helper.submit_results(results, cnode)
151+
152+
# Coverage data has been processed for all child nodes, we can now merge the tracefiles
153+
args = base_cmd
154+
for trace in tracefiles:
155+
args += ['--add-tracefile', trace]
156+
157+
output_base = os.path.join(self._workspace, f"coverage-{self._node['parent']}")
158+
json_summary = output_base + '.summary.json'
159+
html_report = output_base + '.html'
160+
lcov_tracefile = output_base + '.info'
161+
args += [
162+
'--json-summary', json_summary,
163+
'--html', html_report,
164+
'--lcov', lcov_tracefile,
165+
]
166+
167+
log_file.write("--- Merging tracefiles ---\n")
168+
cmd = subprocess.run(args,
169+
stdout=subprocess.PIPE,
170+
stderr=subprocess.STDOUT,
171+
text=True)
172+
log_file.write(cmd.stdout)
173+
174+
# Ensure job completed successfully or report failure
175+
try:
176+
cmd.check_returncode()
177+
except:
178+
log_file.write(f"ERROR: Unable to generate coverage report\n")
179+
log_file.close()
180+
181+
self._artifacts = {'log': log_path}
182+
return {
183+
'node': {
184+
'result': 'fail',
185+
'artifacts': {},
186+
},
187+
'child_nodes': [],
188+
}
189+
190+
log_file.write("--- Compressing artifacts ---\n")
191+
compressed_lcov = lcov_tracefile + '.gz'
192+
with open(lcov_tracefile, 'rb') as f_in:
193+
with gzip.open(compressed_lcov, 'wb') as f_out:
194+
shutil.copyfileobj(f_in, f_out)
195+
196+
# Finish writing the job log and upload it along with other artifacts
197+
log_file.write("--- Job successful ---\n")
198+
log_file.close()
199+
200+
self._artifacts = {
201+
'coverage_report': html_report,
202+
'tracefile': compressed_lcov,
203+
'log': log_path,
204+
}
205+
206+
return self._extract_coverage(json_summary)
207+
208+
return results
209+
210+
def _submit(self, result):
211+
# Ensure top-level name is kept the same
212+
result = result.copy()
213+
# Update node from API, as we might have new fields
214+
# such as k8s_context
215+
node_id = self._node['id']
216+
self._node = self._api.node.get(node_id)
217+
# Upload artifacts and update node accordingly
218+
artifacts = self._upload_artifacts()
219+
result['node']['name'] = self._node['name']
220+
result['node']['state'] = 'done'
221+
result['node']['artifacts'] = artifacts
222+
# Actually submit the results
223+
api_helper = kernelci.api.helper.APIHelper(self._api)
224+
api_helper.submit_results(result, self._node)
225+
226+
{% endblock %}

0 commit comments

Comments
 (0)