Lava: Upgrade rootfs to Xenial for kernel tests
[lttng-ci.git] / scripts / lttng-baremetal-tests / lava-submit.py
1 #!/usr/bin/python
2 # Copyright (C) 2016 - Francis Deslauriers <francis.deslauriers@efficios.com>
3 #
4 # This program is free software: you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation, either version 3 of the License, or
7 # (at your option) any later version.
8 #
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
13 #
14 # You should have received a copy of the GNU General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16
17 import argparse
18 import base64
19 import json
20 import os
21 import sys
22 import time
23 import xmlrpclib
24 from collections import OrderedDict
25 from enum import Enum
26
27 USERNAME = 'frdeso'
28 HOSTNAME = 'lava-master.internal.efficios.com'
29 SCP_PATH = 'scp://jenkins-lava@storage.internal.efficios.com'
30
31 class TestType(Enum):
32 benchmarks=1
33 tests=2
34
35 def get_job_bundle_content(server, job):
36 bundle_sha = server.scheduler.job_status(str(job))['bundle_sha1']
37 bundle = server.dashboard.get(bundle_sha)
38
39 return json.loads(bundle['content'])
40
41 # Parse the results bundle to see the run-tests testcase
42 # of the lttng-kernel-tests passed successfully
43 def check_job_all_test_cases_state_count(server, job):
44 content = get_job_bundle_content(server, job)
45
46 passed_tests=0
47 failed_tests=0
48 for run in content['test_runs']:
49 for result in run['test_results']:
50 if 'test_case_id' in result :
51 if result['result'] in 'pass':
52 passed_tests+=1
53 elif result['test_case_id'] in 'wait_for_test_image_prompt':
54 # FIXME:This test is part of the boot action and fails
55 # randomly but doesn't affect the behaviour of the tests.
56 # No reply on the Lava IRC channel yet. We should update
57 # our Lava installation and try to reproduce it. This error
58 # was encountered ont the KVM trusty image only. Not seen
59 # on Xenial at this point.
60 pass
61 else:
62 failed_tests+=1
63 return (passed_tests, failed_tests)
64
65 # Parse the attachment of the testcase to fetch the stdout of the test suite
66 def print_test_output(server, job):
67 content = get_job_bundle_content(server, job)
68 found = False
69
70 for run in content['test_runs']:
71 if run['test_id'] in 'lttng-kernel-test':
72 for attachment in run['attachments']:
73 if attachment['pathname'] in 'stdout.log':
74
75 # Decode the base64 file and split on newlines to iterate
76 # on list
77 testoutput = base64.b64decode(attachment['content']).split('\n')
78
79 # Create a generator to iterate on the lines and keeping
80 # the state of the iterator across the two loops.
81 testoutput_iter = iter(testoutput)
82 for line in testoutput_iter:
83
84 # Find the header of the test case and start printing
85 # from there
86 if 'LAVA_SIGNAL_STARTTC run-tests' in line:
87 found = True
88 print('---- TEST SUITE OUTPUT BEGIN ----')
89 for line in testoutput_iter:
90 if 'LAVA_SIGNAL_ENDTC run-tests' not in line:
91 print(line)
92 else:
93 # Print until we reach the end of the
94 # section
95 break
96
97 if found is True:
98 print('----- TEST SUITE OUTPUT END -----')
99 break
100
101 def create_new_job(name, build_device):
102 job = OrderedDict({
103 'health_check': False,
104 'job_name': name,
105 'device_type':build_device,
106 'tags': [ ],
107 'timeout': 18000,
108 'actions': []
109 })
110 if build_device in 'x86':
111 job['tags'].append('dev-sda1')
112
113 return job
114
115 def get_boot_cmd():
116 command = OrderedDict({
117 'command': 'boot_image'
118 })
119 return command
120
121 def get_config_cmd(build_device):
122 packages=['bsdtar', 'psmisc', 'wget', 'python3', 'python3-pip', \
123 'libglib2.0-dev', 'libffi-dev', 'elfutils', 'libdw-dev', \
124 'libelf-dev', 'libmount-dev', 'libxml2']
125 command = OrderedDict({
126 'command': 'lava_command_run',
127 'parameters': {
128 'commands': [
129 'ifup eth0',
130 'route -n',
131 'cat /etc/resolv.conf',
132 'echo nameserver 172.18.0.12 > /etc/resolv.conf',
133 'groupadd tracing'
134 ]
135 }
136 })
137 if build_device in 'x86':
138 command['parameters']['commands'].extend([
139 'mount /dev/sda1 /tmp',
140 'rm -rf /tmp/*'])
141
142 command['parameters']['commands'].extend([
143 'depmod -a',
144 'locale-gen en_US.UTF-8',
145 'apt-get update',
146 'apt-get upgrade',
147 'apt-get install -y {}'.format(' '.join(packages))
148 ])
149 return command
150
151 def get_benchmarks_cmd():
152 command = OrderedDict({
153 'command': 'lava_test_shell',
154 'parameters': {
155 'testdef_repos': [
156 {
157 'git-repo': 'https://github.com/lttng/lttng-ci.git',
158 'revision': 'master',
159 'testdef': 'lava/baremetal-tests/failing-close.yml'
160 },
161 {
162 'git-repo': 'https://github.com/lttng/lttng-ci.git',
163 'revision': 'master',
164 'testdef': 'lava/baremetal-tests/failing-open-efault.yml'
165 },
166 {
167 'git-repo': 'https://github.com/lttng/lttng-ci.git',
168 'revision': 'master',
169 'testdef': 'lava/baremetal-tests/failing-open-enoent.yml'
170 },
171 {
172 'git-repo': 'https://github.com/lttng/lttng-ci.git',
173 'revision': 'master',
174 'testdef': 'lava/baremetal-tests/perf-tests.yml'
175 }
176 ],
177 'timeout': 18000
178 }
179 })
180 return command
181
182 def get_tests_cmd():
183 command = OrderedDict({
184 'command': 'lava_test_shell',
185 'parameters': {
186 'testdef_repos': [
187 {
188 'git-repo': 'https://github.com/lttng/lttng-ci.git',
189 'revision': 'master',
190 'testdef': 'lava/baremetal-tests/kernel-tests.yml'
191 }
192 ],
193 'timeout': 18000
194 }
195 })
196 return command
197
198 def get_results_cmd(stream_name):
199 command = OrderedDict({
200 'command': 'submit_results',
201 'parameters': {
202 'server': 'http://lava-master.internal.efficios.com/RPC2/'
203 }
204 })
205 command['parameters']['stream']='/anonymous/'+stream_name+'/'
206 return command
207
208 def get_deploy_cmd_kvm(jenkins_job, kernel_path, linux_modules_path, lttng_modules_path):
209 command = OrderedDict({
210 'command': 'deploy_kernel',
211 'metadata': {},
212 'parameters': {
213 'customize': {},
214 'kernel': None,
215 'target_type': 'ubuntu',
216 'rootfs': 'file:///var/lib/lava-server/default/media/images/xenial.img.gz',
217 'login_prompt': 'kvm02 login:',
218 'username': 'root'
219 }
220 })
221
222 command['parameters']['customize'][SCP_PATH+linux_modules_path]=['rootfs:/','archive']
223 command['parameters']['customize'][SCP_PATH+lttng_modules_path]=['rootfs:/','archive']
224 command['parameters']['kernel'] = str(SCP_PATH+kernel_path)
225 command['metadata']['jenkins_jobname'] = jenkins_job
226
227 return command
228
229 def get_deploy_cmd_x86(jenkins_job, kernel_path, linux_modules_path, lttng_modules_path, nb_iter=None):
230 command = OrderedDict({
231 'command': 'deploy_kernel',
232 'metadata': {},
233 'parameters': {
234 'overlays': [],
235 'kernel': None,
236 'nfsrootfs': str(SCP_PATH+'/storage/jenkins-lava/rootfs/rootfs_amd64_trusty_2016-02-23-1134.tar.gz'),
237 'target_type': 'ubuntu'
238 }
239 })
240
241 command['parameters']['overlays'].append( str(SCP_PATH+linux_modules_path))
242 command['parameters']['overlays'].append( str(SCP_PATH+lttng_modules_path))
243 command['parameters']['kernel'] = str(SCP_PATH+kernel_path)
244 command['metadata']['jenkins_jobname'] = jenkins_job
245 if nb_iter is not None:
246 command['metadata']['nb_iterations'] = nb_iter
247
248 return command
249
250
251 def get_env_setup_cmd(build_device, lttng_tools_commit, lttng_ust_commit=None):
252 command = OrderedDict({
253 'command': 'lava_command_run',
254 'parameters': {
255 'commands': [
256 'pip3 install --upgrade pip',
257 'git clone https://github.com/frdeso/syscall-bench-it.git bm',
258 'pip3 install vlttng',
259 ],
260 'timeout': 18000
261 }
262 })
263
264 vlttng_cmd = 'vlttng --jobs=16 --profile urcu-master' \
265 ' --profile babeltrace-stable-1.4 ' \
266 ' --profile lttng-tools-master' \
267 ' --override projects.lttng-tools.checkout='+lttng_tools_commit + \
268 ' --profile lttng-tools-no-man-pages'
269
270 if lttng_ust_commit is not None:
271 vlttng_cmd += ' --profile lttng-ust-master ' \
272 ' --override projects.lttng-ust.checkout='+lttng_ust_commit+ \
273 ' --profile lttng-ust-no-man-pages'
274
275 virtenv_path = None
276 if build_device in 'kvm':
277 virtenv_path = '/root/virtenv'
278 else:
279 virtenv_path = '/tmp/virtenv'
280
281 vlttng_cmd += ' '+virtenv_path
282
283 command['parameters']['commands'].append(vlttng_cmd)
284 command['parameters']['commands'].append('ln -s '+virtenv_path+' /root/lttngvenv')
285 command['parameters']['commands'].append('sync')
286
287 return command
288
289 def main():
290 test_type = None
291 parser = argparse.ArgumentParser(description='Launch baremetal test using Lava')
292 parser.add_argument('-t', '--type', required=True)
293 parser.add_argument('-j', '--jobname', required=True)
294 parser.add_argument('-k', '--kernel', required=True)
295 parser.add_argument('-km', '--kmodule', required=True)
296 parser.add_argument('-lm', '--lmodule', required=True)
297 parser.add_argument('-l', '--lava-key', required=True)
298 parser.add_argument('-tc', '--tools-commit', required=True)
299 parser.add_argument('-uc', '--ust-commit', required=False)
300 args = parser.parse_args()
301
302 if args.type in 'benchmarks':
303 test_type = TestType.benchmarks
304 elif args.type in 'tests':
305 test_type = TestType.tests
306 else:
307 print('argument -t/--type {} unrecognized. Exiting...'.format(args.type))
308 return -1
309
310 if test_type is TestType.benchmarks:
311 j = create_new_job(args.jobname, build_device='x86')
312 j['actions'].append(get_deploy_cmd_x86(args.jobname, args.kernel, args.kmodule, args.lmodule))
313 elif test_type is TestType.tests:
314 j = create_new_job(args.jobname, build_device='kvm')
315 j['actions'].append(get_deploy_cmd_kvm(args.jobname, args.kernel, args.kmodule, args.lmodule))
316
317 j['actions'].append(get_boot_cmd())
318
319 if test_type is TestType.benchmarks:
320 j['actions'].append(get_config_cmd('x86'))
321 j['actions'].append(get_env_setup_cmd('x86', args.tools_commit))
322 j['actions'].append(get_benchmarks_cmd())
323 j['actions'].append(get_results_cmd(stream_name='benchmark-kernel'))
324 elif test_type is TestType.tests:
325 if args.ust_commit is None:
326 print('Tests runs need -uc/--ust-commit options. Exiting...')
327 return -1
328 j['actions'].append(get_config_cmd('kvm'))
329 j['actions'].append(get_env_setup_cmd('kvm', args.tools_commit, args.ust_commit))
330 j['actions'].append(get_tests_cmd())
331 j['actions'].append(get_results_cmd(stream_name='tests-kernel'))
332 else:
333 assert False, 'Unknown test type'
334
335 server = xmlrpclib.ServerProxy('http://%s:%s@%s/RPC2' % (USERNAME, args.lava_key, HOSTNAME))
336
337 jobid = server.scheduler.submit_job(json.dumps(j))
338
339 print('Lava jobid:{}'.format(jobid))
340
341 #Check the status of the job every 30 seconds
342 jobstatus = server.scheduler.job_status(jobid)['job_status']
343 while jobstatus in 'Submitted' or jobstatus in 'Running':
344 time.sleep(30)
345 jobstatus = server.scheduler.job_status(jobid)['job_status']
346
347 passed, failed=check_job_all_test_cases_state_count(server, jobid)
348
349 if test_type is TestType.tests:
350 print_test_output(server, jobid)
351
352 print('Job ended with {} status.'.format(jobstatus))
353 if jobstatus not in 'Complete':
354 return -1
355 else:
356 print('With {} passed and {} failed Lava test cases.'.format(passed, failed))
357
358 if failed == 0:
359 return 0
360 else:
361 return -1
362
363 if __name__ == "__main__":
364 sys.exit(main())
This page took 0.037771 seconds and 5 git commands to generate.