Lava: Clear the bash cache manually
[lttng-ci.git] / scripts / lttng-baremetal-tests / lava-submit.py
1 #!/usr/bin/python
2 # Copyright (C) 2016 - Francis Deslauriers <francis.deslauriers@efficios.com>
3 #
4 # This program is free software: you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation, either version 3 of the License, or
7 # (at your option) any later version.
8 #
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
13 #
14 # You should have received a copy of the GNU General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16
17 import argparse
18 import base64
19 import json
20 import os
21 import sys
22 import time
23 import xmlrpclib
24 from collections import OrderedDict
25 from enum import Enum
26
27 USERNAME = 'frdeso'
28 HOSTNAME = 'lava-master.internal.efficios.com'
29 SCP_PATH = 'scp://jenkins-lava@storage.internal.efficios.com'
30
31 class TestType(Enum):
32 benchmarks=1
33 tests=2
34
35 def get_job_bundle_content(server, job):
36 bundle_sha = server.scheduler.job_status(str(job))['bundle_sha1']
37 bundle = server.dashboard.get(bundle_sha)
38
39 return json.loads(bundle['content'])
40
41 # Parse the results bundle to see the run-tests testcase
42 # of the lttng-kernel-tests passed successfully
43 def check_job_all_test_cases_state_count(server, job):
44 content = get_job_bundle_content(server, job)
45
46 passed_tests=0
47 failed_tests=0
48 for run in content['test_runs']:
49 for result in run['test_results']:
50 if 'test_case_id' in result :
51 if result['result'] in 'pass':
52 passed_tests+=1
53 elif result['test_case_id'] in 'wait_for_test_image_prompt':
54 # FIXME:This test is part of the boot action and fails
55 # randomly but doesn't affect the behaviour of the tests.
56 # No reply on the Lava IRC channel yet. We should update
57 # our Lava installation and try to reproduce it. This error
58 # was encountered ont the KVM trusty image only. Not seen
59 # on Xenial at this point.
60 pass
61 else:
62 failed_tests+=1
63 return (passed_tests, failed_tests)
64
65 # Parse the attachment of the testcase to fetch the stdout of the test suite
66 def print_test_output(server, job):
67 content = get_job_bundle_content(server, job)
68 found = False
69
70 for run in content['test_runs']:
71 if run['test_id'] in 'lttng-kernel-test':
72 for attachment in run['attachments']:
73 if attachment['pathname'] in 'stdout.log':
74
75 # Decode the base64 file and split on newlines to iterate
76 # on list
77 testoutput = base64.b64decode(attachment['content']).split('\n')
78
79 # Create a generator to iterate on the lines and keeping
80 # the state of the iterator across the two loops.
81 testoutput_iter = iter(testoutput)
82 for line in testoutput_iter:
83
84 # Find the header of the test case and start printing
85 # from there
86 if 'LAVA_SIGNAL_STARTTC run-tests' in line:
87 found = True
88 print('---- TEST SUITE OUTPUT BEGIN ----')
89 for line in testoutput_iter:
90 if 'LAVA_SIGNAL_ENDTC run-tests' not in line:
91 print(line)
92 else:
93 # Print until we reach the end of the
94 # section
95 break
96
97 if found is True:
98 print('----- TEST SUITE OUTPUT END -----')
99 break
100
101 def create_new_job(name, build_device):
102 job = OrderedDict({
103 'health_check': False,
104 'job_name': name,
105 'device_type':build_device,
106 'tags': [ ],
107 'timeout': 18000,
108 'actions': []
109 })
110 if build_device in 'x86':
111 job['tags'].append('dev-sda1')
112
113 return job
114
115 def get_boot_cmd():
116 command = OrderedDict({
117 'command': 'boot_image'
118 })
119 return command
120
121 def get_config_cmd(build_device):
122 packages=['bsdtar', 'psmisc', 'wget', 'python3', 'python3-pip', \
123 'libglib2.0-dev', 'libffi-dev', 'elfutils', 'libdw-dev', \
124 'libelf-dev', 'libmount-dev', 'libxml2']
125 command = OrderedDict({
126 'command': 'lava_command_run',
127 'parameters': {
128 'commands': [
129 'cat /etc/resolv.conf',
130 'echo nameserver 172.18.0.12 > /etc/resolv.conf',
131 'groupadd tracing'
132 ]
133 }
134 })
135 if build_device in 'x86':
136 command['parameters']['commands'].extend([
137 'mount /dev/sda1 /tmp',
138 'rm -rf /tmp/*'])
139
140 command['parameters']['commands'].extend([
141 'depmod -a',
142 'locale-gen en_US.UTF-8',
143 'apt-get update',
144 'apt-get upgrade',
145 'apt-get install -y {}'.format(' '.join(packages))
146 ])
147 return command
148
149 def get_benchmarks_cmd():
150 command = OrderedDict({
151 'command': 'lava_test_shell',
152 'parameters': {
153 'testdef_repos': [
154 {
155 'git-repo': 'https://github.com/lttng/lttng-ci.git',
156 'revision': 'master',
157 'testdef': 'lava/baremetal-tests/failing-close.yml'
158 },
159 {
160 'git-repo': 'https://github.com/lttng/lttng-ci.git',
161 'revision': 'master',
162 'testdef': 'lava/baremetal-tests/failing-open-efault.yml'
163 },
164 {
165 'git-repo': 'https://github.com/lttng/lttng-ci.git',
166 'revision': 'master',
167 'testdef': 'lava/baremetal-tests/failing-open-enoent.yml'
168 },
169 {
170 'git-repo': 'https://github.com/lttng/lttng-ci.git',
171 'revision': 'master',
172 'testdef': 'lava/baremetal-tests/perf-tests.yml'
173 }
174 ],
175 'timeout': 18000
176 }
177 })
178 return command
179
180 def get_tests_cmd():
181 command = OrderedDict({
182 'command': 'lava_test_shell',
183 'parameters': {
184 'testdef_repos': [
185 {
186 'git-repo': 'https://github.com/lttng/lttng-ci.git',
187 'revision': 'master',
188 'testdef': 'lava/baremetal-tests/kernel-tests.yml'
189 }
190 ],
191 'timeout': 18000
192 }
193 })
194 return command
195
196 def get_results_cmd(stream_name):
197 command = OrderedDict({
198 'command': 'submit_results',
199 'parameters': {
200 'server': 'http://lava-master.internal.efficios.com/RPC2/'
201 }
202 })
203 command['parameters']['stream']='/anonymous/'+stream_name+'/'
204 return command
205
206 def get_deploy_cmd_kvm(jenkins_job, kernel_path, linux_modules_path, lttng_modules_path):
207 command = OrderedDict({
208 'command': 'deploy_kernel',
209 'metadata': {},
210 'parameters': {
211 'customize': {},
212 'kernel': None,
213 'target_type': 'ubuntu',
214 'rootfs': 'file:///var/lib/lava-server/default/media/images/xenial.img.gz',
215 'login_prompt': 'kvm02 login:',
216 'username': 'root'
217 }
218 })
219
220 command['parameters']['customize'][SCP_PATH+linux_modules_path]=['rootfs:/','archive']
221 command['parameters']['customize'][SCP_PATH+lttng_modules_path]=['rootfs:/','archive']
222 command['parameters']['kernel'] = str(SCP_PATH+kernel_path)
223 command['metadata']['jenkins_jobname'] = jenkins_job
224
225 return command
226
227 def get_deploy_cmd_x86(jenkins_job, kernel_path, linux_modules_path, lttng_modules_path, nb_iter=None):
228 command = OrderedDict({
229 'command': 'deploy_kernel',
230 'metadata': {},
231 'parameters': {
232 'overlays': [],
233 'kernel': None,
234 'nfsrootfs': str(SCP_PATH+'/storage/jenkins-lava/rootfs/rootfs_amd64_trusty_2016-02-23-1134.tar.gz'),
235 'target_type': 'ubuntu'
236 }
237 })
238
239 command['parameters']['overlays'].append( str(SCP_PATH+linux_modules_path))
240 command['parameters']['overlays'].append( str(SCP_PATH+lttng_modules_path))
241 command['parameters']['kernel'] = str(SCP_PATH+kernel_path)
242 command['metadata']['jenkins_jobname'] = jenkins_job
243 if nb_iter is not None:
244 command['metadata']['nb_iterations'] = nb_iter
245
246 return command
247
248
249 def get_env_setup_cmd(build_device, lttng_tools_commit, lttng_ust_commit=None):
250 command = OrderedDict({
251 'command': 'lava_command_run',
252 'parameters': {
253 'commands': [
254 'pip3 install --upgrade pip',
255 'hash -r',
256 'git clone https://github.com/frdeso/syscall-bench-it.git bm',
257 'pip3 install vlttng',
258 ],
259 'timeout': 18000
260 }
261 })
262
263 vlttng_cmd = 'vlttng --jobs=$(nproc) --profile urcu-master' \
264 ' --profile babeltrace-stable-1.4 ' \
265 ' --profile lttng-tools-master' \
266 ' --override projects.lttng-tools.checkout='+lttng_tools_commit + \
267 ' --profile lttng-tools-no-man-pages'
268
269 if lttng_ust_commit is not None:
270 vlttng_cmd += ' --profile lttng-ust-master ' \
271 ' --override projects.lttng-ust.checkout='+lttng_ust_commit+ \
272 ' --profile lttng-ust-no-man-pages'
273
274 virtenv_path = None
275 if build_device in 'kvm':
276 virtenv_path = '/root/virtenv'
277 else:
278 virtenv_path = '/tmp/virtenv'
279
280 vlttng_cmd += ' '+virtenv_path
281
282 command['parameters']['commands'].append(vlttng_cmd)
283 command['parameters']['commands'].append('ln -s '+virtenv_path+' /root/lttngvenv')
284 command['parameters']['commands'].append('sync')
285
286 return command
287
288 def main():
289 test_type = None
290 parser = argparse.ArgumentParser(description='Launch baremetal test using Lava')
291 parser.add_argument('-t', '--type', required=True)
292 parser.add_argument('-j', '--jobname', required=True)
293 parser.add_argument('-k', '--kernel', required=True)
294 parser.add_argument('-km', '--kmodule', required=True)
295 parser.add_argument('-lm', '--lmodule', required=True)
296 parser.add_argument('-l', '--lava-key', required=True)
297 parser.add_argument('-tc', '--tools-commit', required=True)
298 parser.add_argument('-uc', '--ust-commit', required=False)
299 args = parser.parse_args()
300
301 if args.type in 'benchmarks':
302 test_type = TestType.benchmarks
303 elif args.type in 'tests':
304 test_type = TestType.tests
305 else:
306 print('argument -t/--type {} unrecognized. Exiting...'.format(args.type))
307 return -1
308
309 if test_type is TestType.benchmarks:
310 j = create_new_job(args.jobname, build_device='x86')
311 j['actions'].append(get_deploy_cmd_x86(args.jobname, args.kernel, args.kmodule, args.lmodule))
312 elif test_type is TestType.tests:
313 j = create_new_job(args.jobname, build_device='kvm')
314 j['actions'].append(get_deploy_cmd_kvm(args.jobname, args.kernel, args.kmodule, args.lmodule))
315
316 j['actions'].append(get_boot_cmd())
317
318 if test_type is TestType.benchmarks:
319 j['actions'].append(get_config_cmd('x86'))
320 j['actions'].append(get_env_setup_cmd('x86', args.tools_commit))
321 j['actions'].append(get_benchmarks_cmd())
322 j['actions'].append(get_results_cmd(stream_name='benchmark-kernel'))
323 elif test_type is TestType.tests:
324 if args.ust_commit is None:
325 print('Tests runs need -uc/--ust-commit options. Exiting...')
326 return -1
327 j['actions'].append(get_config_cmd('kvm'))
328 j['actions'].append(get_env_setup_cmd('kvm', args.tools_commit, args.ust_commit))
329 j['actions'].append(get_tests_cmd())
330 j['actions'].append(get_results_cmd(stream_name='tests-kernel'))
331 else:
332 assert False, 'Unknown test type'
333
334 server = xmlrpclib.ServerProxy('http://%s:%s@%s/RPC2' % (USERNAME, args.lava_key, HOSTNAME))
335
336 jobid = server.scheduler.submit_job(json.dumps(j))
337
338 print('Lava jobid:{}'.format(jobid))
339
340 #Check the status of the job every 30 seconds
341 jobstatus = server.scheduler.job_status(jobid)['job_status']
342 while jobstatus in 'Submitted' or jobstatus in 'Running':
343 time.sleep(30)
344 jobstatus = server.scheduler.job_status(jobid)['job_status']
345
346 passed, failed=check_job_all_test_cases_state_count(server, jobid)
347
348 if test_type is TestType.tests:
349 print_test_output(server, jobid)
350
351 print('Job ended with {} status.'.format(jobstatus))
352 if jobstatus not in 'Complete':
353 return -1
354 else:
355 print('With {} passed and {} failed Lava test cases.'.format(passed, failed))
356
357 if failed == 0:
358 return 0
359 else:
360 return -1
361
362 if __name__ == "__main__":
363 sys.exit(main())
This page took 0.039339 seconds and 5 git commands to generate.