Rename "tsc" to "timestamp"
[lttng-modules.git] / src / lttng-bytecode-specialize.c
1 /* SPDX-License-Identifier: MIT
2 *
3 * lttng-bytecode-specialize.c
4 *
5 * LTTng modules bytecode code specializer.
6 *
7 * Copyright (C) 2010-2016 Mathieu Desnoyers <mathieu.desnoyers@efficios.com>
8 */
9
10 #include <linux/slab.h>
11 #include <wrapper/compiler_attributes.h>
12
13 #include <lttng/lttng-bytecode.h>
14 #include <lttng/align.h>
15 #include <lttng/events-internal.h>
16
17 static ssize_t bytecode_reserve_data(struct bytecode_runtime *runtime,
18 size_t align, size_t len)
19 {
20 ssize_t ret;
21 size_t padding = offset_align(runtime->data_len, align);
22 size_t new_len = runtime->data_len + padding + len;
23 size_t new_alloc_len = new_len;
24 size_t old_alloc_len = runtime->data_alloc_len;
25
26 if (new_len > INTERPRETER_MAX_DATA_LEN)
27 return -EINVAL;
28
29 if (new_alloc_len > old_alloc_len) {
30 char *newptr;
31
32 new_alloc_len =
33 max_t(size_t, 1U << get_count_order(new_alloc_len), old_alloc_len << 1);
34 newptr = krealloc(runtime->data, new_alloc_len, GFP_KERNEL);
35 if (!newptr)
36 return -ENOMEM;
37 runtime->data = newptr;
38 /* We zero directly the memory from start of allocation. */
39 memset(&runtime->data[old_alloc_len], 0, new_alloc_len - old_alloc_len);
40 runtime->data_alloc_len = new_alloc_len;
41 }
42 runtime->data_len += padding;
43 ret = runtime->data_len;
44 runtime->data_len += len;
45 return ret;
46 }
47
48 static ssize_t bytecode_push_data(struct bytecode_runtime *runtime,
49 const void *p, size_t align, size_t len)
50 {
51 ssize_t offset;
52
53 offset = bytecode_reserve_data(runtime, align, len);
54 if (offset < 0)
55 return -ENOMEM;
56 memcpy(&runtime->data[offset], p, len);
57 return offset;
58 }
59
60 static int specialize_load_field(struct vstack_entry *stack_top,
61 struct load_op *insn)
62 {
63 int ret;
64
65 switch (stack_top->load.type) {
66 case LOAD_OBJECT:
67 break;
68 case LOAD_ROOT_CONTEXT:
69 case LOAD_ROOT_APP_CONTEXT:
70 case LOAD_ROOT_PAYLOAD:
71 default:
72 dbg_printk("Bytecode warning: cannot load root, missing field name.\n");
73 ret = -EINVAL;
74 goto end;
75 }
76 switch (stack_top->load.object_type) {
77 case OBJECT_TYPE_S8:
78 dbg_printk("op load field s8\n");
79 stack_top->type = REG_S64;
80 if (!stack_top->load.rev_bo)
81 insn->op = BYTECODE_OP_LOAD_FIELD_S8;
82 break;
83 case OBJECT_TYPE_S16:
84 dbg_printk("op load field s16\n");
85 stack_top->type = REG_S64;
86 if (!stack_top->load.rev_bo)
87 insn->op = BYTECODE_OP_LOAD_FIELD_S16;
88 break;
89 case OBJECT_TYPE_S32:
90 dbg_printk("op load field s32\n");
91 stack_top->type = REG_S64;
92 if (!stack_top->load.rev_bo)
93 insn->op = BYTECODE_OP_LOAD_FIELD_S32;
94 break;
95 case OBJECT_TYPE_S64:
96 dbg_printk("op load field s64\n");
97 stack_top->type = REG_S64;
98 if (!stack_top->load.rev_bo)
99 insn->op = BYTECODE_OP_LOAD_FIELD_S64;
100 break;
101 case OBJECT_TYPE_SIGNED_ENUM:
102 dbg_printk("op load field signed enumeration\n");
103 stack_top->type = REG_PTR;
104 break;
105 case OBJECT_TYPE_U8:
106 dbg_printk("op load field u8\n");
107 stack_top->type = REG_S64;
108 insn->op = BYTECODE_OP_LOAD_FIELD_U8;
109 break;
110 case OBJECT_TYPE_U16:
111 dbg_printk("op load field u16\n");
112 stack_top->type = REG_S64;
113 if (!stack_top->load.rev_bo)
114 insn->op = BYTECODE_OP_LOAD_FIELD_U16;
115 break;
116 case OBJECT_TYPE_U32:
117 dbg_printk("op load field u32\n");
118 stack_top->type = REG_S64;
119 if (!stack_top->load.rev_bo)
120 insn->op = BYTECODE_OP_LOAD_FIELD_U32;
121 break;
122 case OBJECT_TYPE_U64:
123 dbg_printk("op load field u64\n");
124 stack_top->type = REG_S64;
125 if (!stack_top->load.rev_bo)
126 insn->op = BYTECODE_OP_LOAD_FIELD_U64;
127 break;
128 case OBJECT_TYPE_UNSIGNED_ENUM:
129 dbg_printk("op load field unsigned enumeration\n");
130 stack_top->type = REG_PTR;
131 break;
132 case OBJECT_TYPE_DOUBLE:
133 printk(KERN_WARNING "LTTng: bytecode: Double type unsupported\n\n");
134 ret = -EINVAL;
135 goto end;
136 case OBJECT_TYPE_STRING:
137 dbg_printk("op load field string\n");
138 stack_top->type = REG_STRING;
139 insn->op = BYTECODE_OP_LOAD_FIELD_STRING;
140 break;
141 case OBJECT_TYPE_STRING_SEQUENCE:
142 dbg_printk("op load field string sequence\n");
143 stack_top->type = REG_STRING;
144 insn->op = BYTECODE_OP_LOAD_FIELD_SEQUENCE;
145 break;
146 case OBJECT_TYPE_DYNAMIC:
147 ret = -EINVAL;
148 goto end;
149 case OBJECT_TYPE_SEQUENCE:
150 case OBJECT_TYPE_ARRAY:
151 case OBJECT_TYPE_STRUCT:
152 case OBJECT_TYPE_VARIANT:
153 printk(KERN_WARNING "LTTng: bytecode: Sequences, arrays, struct and variant cannot be loaded (nested types).\n");
154 ret = -EINVAL;
155 goto end;
156 }
157 return 0;
158
159 end:
160 return ret;
161 }
162
163 static int specialize_get_index_object_type(enum object_type *otype,
164 int signedness, uint32_t elem_len)
165 {
166 switch (elem_len) {
167 case 8:
168 if (signedness)
169 *otype = OBJECT_TYPE_S8;
170 else
171 *otype = OBJECT_TYPE_U8;
172 break;
173 case 16:
174 if (signedness)
175 *otype = OBJECT_TYPE_S16;
176 else
177 *otype = OBJECT_TYPE_U16;
178 break;
179 case 32:
180 if (signedness)
181 *otype = OBJECT_TYPE_S32;
182 else
183 *otype = OBJECT_TYPE_U32;
184 break;
185 case 64:
186 if (signedness)
187 *otype = OBJECT_TYPE_S64;
188 else
189 *otype = OBJECT_TYPE_U64;
190 break;
191 default:
192 return -EINVAL;
193 }
194 return 0;
195 }
196
197 static int specialize_get_index(struct bytecode_runtime *runtime,
198 struct load_op *insn, uint64_t index,
199 struct vstack_entry *stack_top,
200 int idx_len)
201 {
202 int ret;
203 struct bytecode_get_index_data gid;
204 ssize_t data_offset;
205
206 memset(&gid, 0, sizeof(gid));
207 switch (stack_top->load.type) {
208 case LOAD_OBJECT:
209 switch (stack_top->load.object_type) {
210 case OBJECT_TYPE_ARRAY:
211 {
212 const struct lttng_kernel_event_field *field;
213 const struct lttng_kernel_type_array *array_type;
214 const struct lttng_kernel_type_integer *integer_type;
215 uint32_t elem_len, num_elems;
216 int signedness;
217
218 field = stack_top->load.field;
219 array_type = lttng_kernel_get_type_array(field->type);
220 if (!lttng_kernel_type_is_bytewise_integer(array_type->elem_type)) {
221 ret = -EINVAL;
222 goto end;
223 }
224 integer_type = lttng_kernel_get_type_integer(array_type->elem_type);
225 num_elems = array_type->length;
226 elem_len = integer_type->size;
227 signedness = integer_type->signedness;
228 if (index >= num_elems) {
229 ret = -EINVAL;
230 goto end;
231 }
232 ret = specialize_get_index_object_type(&stack_top->load.object_type,
233 signedness, elem_len);
234 if (ret)
235 goto end;
236 gid.offset = index * (elem_len / CHAR_BIT);
237 gid.array_len = num_elems * (elem_len / CHAR_BIT);
238 gid.elem.type = stack_top->load.object_type;
239 gid.elem.len = elem_len;
240 if (integer_type->reverse_byte_order)
241 gid.elem.rev_bo = true;
242 stack_top->load.rev_bo = gid.elem.rev_bo;
243 break;
244 }
245 case OBJECT_TYPE_SEQUENCE:
246 {
247 const struct lttng_kernel_event_field *field;
248 const struct lttng_kernel_type_sequence *sequence_type;
249 const struct lttng_kernel_type_integer *integer_type;
250 uint32_t elem_len;
251 int signedness;
252
253 field = stack_top->load.field;
254 sequence_type = lttng_kernel_get_type_sequence(field->type);
255 if (!lttng_kernel_type_is_bytewise_integer(sequence_type->elem_type)) {
256 ret = -EINVAL;
257 goto end;
258 }
259 integer_type = lttng_kernel_get_type_integer(sequence_type->elem_type);
260 elem_len = integer_type->size;
261 signedness = integer_type->signedness;
262 ret = specialize_get_index_object_type(&stack_top->load.object_type,
263 signedness, elem_len);
264 if (ret)
265 goto end;
266 gid.offset = index * (elem_len / CHAR_BIT);
267 gid.elem.type = stack_top->load.object_type;
268 gid.elem.len = elem_len;
269 if (integer_type->reverse_byte_order)
270 gid.elem.rev_bo = true;
271 stack_top->load.rev_bo = gid.elem.rev_bo;
272 break;
273 }
274 case OBJECT_TYPE_STRUCT:
275 /* Only generated by the specialize phase. */
276 case OBJECT_TYPE_VARIANT:
277 lttng_fallthrough;
278 default:
279 printk(KERN_WARNING "LTTng: bytecode: Unexpected get index type %d",
280 (int) stack_top->load.object_type);
281 ret = -EINVAL;
282 goto end;
283 }
284 break;
285 case LOAD_ROOT_CONTEXT:
286 case LOAD_ROOT_APP_CONTEXT:
287 case LOAD_ROOT_PAYLOAD:
288 printk(KERN_WARNING "LTTng: bytecode: Index lookup for root field not implemented yet.\n");
289 ret = -EINVAL;
290 goto end;
291 }
292 data_offset = bytecode_push_data(runtime, &gid,
293 __alignof__(gid), sizeof(gid));
294 if (data_offset < 0) {
295 ret = -EINVAL;
296 goto end;
297 }
298 switch (idx_len) {
299 case 2:
300 ((struct get_index_u16 *) insn->data)->index = data_offset;
301 break;
302 case 8:
303 ((struct get_index_u64 *) insn->data)->index = data_offset;
304 break;
305 default:
306 ret = -EINVAL;
307 goto end;
308 }
309
310 return 0;
311
312 end:
313 return ret;
314 }
315
316 static int specialize_context_lookup_name(struct lttng_kernel_ctx *ctx,
317 struct bytecode_runtime *bytecode,
318 struct load_op *insn)
319 {
320 uint16_t offset;
321 const char *name;
322
323 offset = ((struct get_symbol *) insn->data)->offset;
324 name = bytecode->p.bc->bc.data + bytecode->p.bc->bc.reloc_offset + offset;
325 return lttng_kernel_get_context_index(ctx, name);
326 }
327
328 static int specialize_load_object(const struct lttng_kernel_event_field *field,
329 struct vstack_load *load, bool is_context)
330 {
331 load->type = LOAD_OBJECT;
332
333 switch (field->type->type) {
334 case lttng_kernel_type_integer:
335 if (lttng_kernel_get_type_integer(field->type)->signedness)
336 load->object_type = OBJECT_TYPE_S64;
337 else
338 load->object_type = OBJECT_TYPE_U64;
339 load->rev_bo = false;
340 break;
341 case lttng_kernel_type_enum:
342 {
343 const struct lttng_kernel_type_enum *enum_type = lttng_kernel_get_type_enum(field->type);
344 const struct lttng_kernel_type_integer *integer_type = lttng_kernel_get_type_integer(enum_type->container_type);
345
346 if (integer_type->signedness)
347 load->object_type = OBJECT_TYPE_SIGNED_ENUM;
348 else
349 load->object_type = OBJECT_TYPE_UNSIGNED_ENUM;
350 load->rev_bo = false;
351 break;
352 }
353 case lttng_kernel_type_array:
354 {
355 const struct lttng_kernel_type_array *array_type = lttng_kernel_get_type_array(field->type);
356
357 if (!lttng_kernel_type_is_bytewise_integer(array_type->elem_type)) {
358 printk(KERN_WARNING "LTTng: bytecode: Array nesting only supports integer types.\n");
359 return -EINVAL;
360 }
361 if (is_context) {
362 load->object_type = OBJECT_TYPE_STRING;
363 } else {
364 if (array_type->encoding == lttng_kernel_string_encoding_none) {
365 load->object_type = OBJECT_TYPE_ARRAY;
366 load->field = field;
367 } else {
368 load->object_type = OBJECT_TYPE_STRING_SEQUENCE;
369 }
370 }
371 break;
372 }
373 case lttng_kernel_type_sequence:
374 {
375 const struct lttng_kernel_type_sequence *sequence_type = lttng_kernel_get_type_sequence(field->type);
376
377 if (!lttng_kernel_type_is_bytewise_integer(sequence_type->elem_type)) {
378 printk(KERN_WARNING "LTTng: bytecode: Sequence nesting only supports integer types.\n");
379 return -EINVAL;
380 }
381 if (is_context) {
382 load->object_type = OBJECT_TYPE_STRING;
383 } else {
384 if (sequence_type->encoding == lttng_kernel_string_encoding_none) {
385 load->object_type = OBJECT_TYPE_SEQUENCE;
386 load->field = field;
387 } else {
388 load->object_type = OBJECT_TYPE_STRING_SEQUENCE;
389 }
390 }
391 break;
392 }
393 case lttng_kernel_type_string:
394 load->object_type = OBJECT_TYPE_STRING;
395 break;
396 case lttng_kernel_type_struct:
397 printk(KERN_WARNING "LTTng: bytecode: Structure type cannot be loaded.\n");
398 return -EINVAL;
399 case lttng_kernel_type_variant:
400 printk(KERN_WARNING "LTTng: bytecode: Variant type cannot be loaded.\n");
401 return -EINVAL;
402 default:
403 printk(KERN_WARNING "LTTng: bytecode: Unknown type: %d", (int) field->type->type);
404 return -EINVAL;
405 }
406 return 0;
407 }
408
409 static int specialize_context_lookup(struct lttng_kernel_ctx *ctx,
410 struct bytecode_runtime *runtime,
411 struct load_op *insn,
412 struct vstack_load *load)
413 {
414 int idx, ret;
415 const struct lttng_kernel_ctx_field *ctx_field;
416 const struct lttng_kernel_event_field *field;
417 struct bytecode_get_index_data gid;
418 ssize_t data_offset;
419
420 idx = specialize_context_lookup_name(ctx, runtime, insn);
421 if (idx < 0) {
422 return -ENOENT;
423 }
424 ctx_field = &lttng_static_ctx->fields[idx];
425 field = ctx_field->event_field;
426 ret = specialize_load_object(field, load, true);
427 if (ret)
428 return ret;
429 /* Specialize each get_symbol into a get_index. */
430 insn->op = BYTECODE_OP_GET_INDEX_U16;
431 memset(&gid, 0, sizeof(gid));
432 gid.ctx_index = idx;
433 gid.elem.type = load->object_type;
434 gid.elem.rev_bo = load->rev_bo;
435 gid.field = field;
436 data_offset = bytecode_push_data(runtime, &gid,
437 __alignof__(gid), sizeof(gid));
438 if (data_offset < 0) {
439 return -EINVAL;
440 }
441 ((struct get_index_u16 *) insn->data)->index = data_offset;
442 return 0;
443 }
444
445 static int specialize_payload_lookup(const struct lttng_kernel_event_desc *event_desc,
446 struct bytecode_runtime *runtime,
447 struct load_op *insn,
448 struct vstack_load *load)
449 {
450 const char *name;
451 uint16_t offset;
452 unsigned int i, nr_fields;
453 bool found = false;
454 uint32_t field_offset = 0;
455 const struct lttng_kernel_event_field *field;
456 int ret;
457 struct bytecode_get_index_data gid;
458 ssize_t data_offset;
459
460 nr_fields = event_desc->tp_class->nr_fields;
461 offset = ((struct get_symbol *) insn->data)->offset;
462 name = runtime->p.bc->bc.data + runtime->p.bc->bc.reloc_offset + offset;
463 for (i = 0; i < nr_fields; i++) {
464 field = event_desc->tp_class->fields[i];
465 if (field->nofilter) {
466 continue;
467 }
468 if (!strcmp(field->name, name)) {
469 found = true;
470 break;
471 }
472 /* compute field offset on stack */
473 switch (field->type->type) {
474 case lttng_kernel_type_integer:
475 case lttng_kernel_type_enum:
476 field_offset += sizeof(int64_t);
477 break;
478 case lttng_kernel_type_array:
479 case lttng_kernel_type_sequence:
480 field_offset += sizeof(unsigned long);
481 field_offset += sizeof(void *);
482 break;
483 case lttng_kernel_type_string:
484 field_offset += sizeof(void *);
485 break;
486 default:
487 ret = -EINVAL;
488 goto end;
489 }
490 }
491 if (!found) {
492 ret = -EINVAL;
493 goto end;
494 }
495
496 ret = specialize_load_object(field, load, false);
497 if (ret)
498 goto end;
499
500 /* Specialize each get_symbol into a get_index. */
501 insn->op = BYTECODE_OP_GET_INDEX_U16;
502 memset(&gid, 0, sizeof(gid));
503 gid.offset = field_offset;
504 gid.elem.type = load->object_type;
505 gid.elem.rev_bo = load->rev_bo;
506 gid.field = field;
507 data_offset = bytecode_push_data(runtime, &gid,
508 __alignof__(gid), sizeof(gid));
509 if (data_offset < 0) {
510 ret = -EINVAL;
511 goto end;
512 }
513 ((struct get_index_u16 *) insn->data)->index = data_offset;
514 ret = 0;
515 end:
516 return ret;
517 }
518
519 int lttng_bytecode_specialize(const struct lttng_kernel_event_desc *event_desc,
520 struct bytecode_runtime *bytecode)
521 {
522 void *pc, *next_pc, *start_pc;
523 int ret = -EINVAL;
524 struct vstack _stack;
525 struct vstack *stack = &_stack;
526 struct lttng_kernel_ctx *ctx = bytecode->p.ctx;
527
528 vstack_init(stack);
529
530 start_pc = &bytecode->code[0];
531 for (pc = next_pc = start_pc; pc - start_pc < bytecode->len;
532 pc = next_pc) {
533 switch (*(bytecode_opcode_t *) pc) {
534 case BYTECODE_OP_UNKNOWN:
535 default:
536 printk(KERN_WARNING "LTTng: bytecode: unknown bytecode op %u\n",
537 (unsigned int) *(bytecode_opcode_t *) pc);
538 ret = -EINVAL;
539 goto end;
540
541 case BYTECODE_OP_RETURN:
542 case BYTECODE_OP_RETURN_S64:
543 ret = 0;
544 goto end;
545
546 /* binary */
547 case BYTECODE_OP_MUL:
548 case BYTECODE_OP_DIV:
549 case BYTECODE_OP_MOD:
550 case BYTECODE_OP_PLUS:
551 case BYTECODE_OP_MINUS:
552 printk(KERN_WARNING "LTTng: bytecode: unknown bytecode op %u\n",
553 (unsigned int) *(bytecode_opcode_t *) pc);
554 ret = -EINVAL;
555 goto end;
556
557 case BYTECODE_OP_EQ:
558 {
559 struct binary_op *insn = (struct binary_op *) pc;
560
561 switch(vstack_ax(stack)->type) {
562 default:
563 printk(KERN_WARNING "LTTng: bytecode: unknown register type\n");
564 ret = -EINVAL;
565 goto end;
566
567 case REG_STRING:
568 if (vstack_bx(stack)->type == REG_STAR_GLOB_STRING)
569 insn->op = BYTECODE_OP_EQ_STAR_GLOB_STRING;
570 else
571 insn->op = BYTECODE_OP_EQ_STRING;
572 break;
573 case REG_STAR_GLOB_STRING:
574 insn->op = BYTECODE_OP_EQ_STAR_GLOB_STRING;
575 break;
576 case REG_S64:
577 if (vstack_bx(stack)->type == REG_S64)
578 insn->op = BYTECODE_OP_EQ_S64;
579 else
580 insn->op = BYTECODE_OP_EQ_DOUBLE_S64;
581 break;
582 case REG_DOUBLE:
583 if (vstack_bx(stack)->type == REG_S64)
584 insn->op = BYTECODE_OP_EQ_S64_DOUBLE;
585 else
586 insn->op = BYTECODE_OP_EQ_DOUBLE;
587 break;
588 }
589 /* Pop 2, push 1 */
590 if (vstack_pop(stack)) {
591 ret = -EINVAL;
592 goto end;
593 }
594 vstack_ax(stack)->type = REG_S64;
595 next_pc += sizeof(struct binary_op);
596 break;
597 }
598
599 case BYTECODE_OP_NE:
600 {
601 struct binary_op *insn = (struct binary_op *) pc;
602
603 switch(vstack_ax(stack)->type) {
604 default:
605 printk(KERN_WARNING "LTTng: bytecode: unknown register type\n");
606 ret = -EINVAL;
607 goto end;
608
609 case REG_STRING:
610 if (vstack_bx(stack)->type == REG_STAR_GLOB_STRING)
611 insn->op = BYTECODE_OP_NE_STAR_GLOB_STRING;
612 else
613 insn->op = BYTECODE_OP_NE_STRING;
614 break;
615 case REG_STAR_GLOB_STRING:
616 insn->op = BYTECODE_OP_NE_STAR_GLOB_STRING;
617 break;
618 case REG_S64:
619 if (vstack_bx(stack)->type == REG_S64)
620 insn->op = BYTECODE_OP_NE_S64;
621 else
622 insn->op = BYTECODE_OP_NE_DOUBLE_S64;
623 break;
624 case REG_DOUBLE:
625 if (vstack_bx(stack)->type == REG_S64)
626 insn->op = BYTECODE_OP_NE_S64_DOUBLE;
627 else
628 insn->op = BYTECODE_OP_NE_DOUBLE;
629 break;
630 }
631 /* Pop 2, push 1 */
632 if (vstack_pop(stack)) {
633 ret = -EINVAL;
634 goto end;
635 }
636 vstack_ax(stack)->type = REG_S64;
637 next_pc += sizeof(struct binary_op);
638 break;
639 }
640
641 case BYTECODE_OP_GT:
642 {
643 struct binary_op *insn = (struct binary_op *) pc;
644
645 switch(vstack_ax(stack)->type) {
646 default:
647 printk(KERN_WARNING "LTTng: bytecode: unknown register type\n");
648 ret = -EINVAL;
649 goto end;
650
651 case REG_STAR_GLOB_STRING:
652 printk(KERN_WARNING "LTTng: bytecode: invalid register type for '>' binary operator\n");
653 ret = -EINVAL;
654 goto end;
655 case REG_STRING:
656 insn->op = BYTECODE_OP_GT_STRING;
657 break;
658 case REG_S64:
659 if (vstack_bx(stack)->type == REG_S64)
660 insn->op = BYTECODE_OP_GT_S64;
661 else
662 insn->op = BYTECODE_OP_GT_DOUBLE_S64;
663 break;
664 case REG_DOUBLE:
665 if (vstack_bx(stack)->type == REG_S64)
666 insn->op = BYTECODE_OP_GT_S64_DOUBLE;
667 else
668 insn->op = BYTECODE_OP_GT_DOUBLE;
669 break;
670 }
671 /* Pop 2, push 1 */
672 if (vstack_pop(stack)) {
673 ret = -EINVAL;
674 goto end;
675 }
676 vstack_ax(stack)->type = REG_S64;
677 next_pc += sizeof(struct binary_op);
678 break;
679 }
680
681 case BYTECODE_OP_LT:
682 {
683 struct binary_op *insn = (struct binary_op *) pc;
684
685 switch(vstack_ax(stack)->type) {
686 default:
687 printk(KERN_WARNING "LTTng: bytecode: unknown register type\n");
688 ret = -EINVAL;
689 goto end;
690
691 case REG_STAR_GLOB_STRING:
692 printk(KERN_WARNING "LTTng: bytecode: invalid register type for '<' binary operator\n");
693 ret = -EINVAL;
694 goto end;
695 case REG_STRING:
696 insn->op = BYTECODE_OP_LT_STRING;
697 break;
698 case REG_S64:
699 if (vstack_bx(stack)->type == REG_S64)
700 insn->op = BYTECODE_OP_LT_S64;
701 else
702 insn->op = BYTECODE_OP_LT_DOUBLE_S64;
703 break;
704 case REG_DOUBLE:
705 if (vstack_bx(stack)->type == REG_S64)
706 insn->op = BYTECODE_OP_LT_S64_DOUBLE;
707 else
708 insn->op = BYTECODE_OP_LT_DOUBLE;
709 break;
710 }
711 /* Pop 2, push 1 */
712 if (vstack_pop(stack)) {
713 ret = -EINVAL;
714 goto end;
715 }
716 vstack_ax(stack)->type = REG_S64;
717 next_pc += sizeof(struct binary_op);
718 break;
719 }
720
721 case BYTECODE_OP_GE:
722 {
723 struct binary_op *insn = (struct binary_op *) pc;
724
725 switch(vstack_ax(stack)->type) {
726 default:
727 printk(KERN_WARNING "LTTng: bytecode: unknown register type\n");
728 ret = -EINVAL;
729 goto end;
730
731 case REG_STAR_GLOB_STRING:
732 printk(KERN_WARNING "LTTng: bytecode: invalid register type for '>=' binary operator\n");
733 ret = -EINVAL;
734 goto end;
735 case REG_STRING:
736 insn->op = BYTECODE_OP_GE_STRING;
737 break;
738 case REG_S64:
739 if (vstack_bx(stack)->type == REG_S64)
740 insn->op = BYTECODE_OP_GE_S64;
741 else
742 insn->op = BYTECODE_OP_GE_DOUBLE_S64;
743 break;
744 case REG_DOUBLE:
745 if (vstack_bx(stack)->type == REG_S64)
746 insn->op = BYTECODE_OP_GE_S64_DOUBLE;
747 else
748 insn->op = BYTECODE_OP_GE_DOUBLE;
749 break;
750 }
751 /* Pop 2, push 1 */
752 if (vstack_pop(stack)) {
753 ret = -EINVAL;
754 goto end;
755 }
756 vstack_ax(stack)->type = REG_S64;
757 next_pc += sizeof(struct binary_op);
758 break;
759 }
760 case BYTECODE_OP_LE:
761 {
762 struct binary_op *insn = (struct binary_op *) pc;
763
764 switch(vstack_ax(stack)->type) {
765 default:
766 printk(KERN_WARNING "LTTng: bytecode: unknown register type\n");
767 ret = -EINVAL;
768 goto end;
769
770 case REG_STAR_GLOB_STRING:
771 printk(KERN_WARNING "LTTng: bytecode: invalid register type for '<=' binary operator\n");
772 ret = -EINVAL;
773 goto end;
774 case REG_STRING:
775 insn->op = BYTECODE_OP_LE_STRING;
776 break;
777 case REG_S64:
778 if (vstack_bx(stack)->type == REG_S64)
779 insn->op = BYTECODE_OP_LE_S64;
780 else
781 insn->op = BYTECODE_OP_LE_DOUBLE_S64;
782 break;
783 case REG_DOUBLE:
784 if (vstack_bx(stack)->type == REG_S64)
785 insn->op = BYTECODE_OP_LE_S64_DOUBLE;
786 else
787 insn->op = BYTECODE_OP_LE_DOUBLE;
788 break;
789 }
790 vstack_ax(stack)->type = REG_S64;
791 next_pc += sizeof(struct binary_op);
792 break;
793 }
794
795 case BYTECODE_OP_EQ_STRING:
796 case BYTECODE_OP_NE_STRING:
797 case BYTECODE_OP_GT_STRING:
798 case BYTECODE_OP_LT_STRING:
799 case BYTECODE_OP_GE_STRING:
800 case BYTECODE_OP_LE_STRING:
801 case BYTECODE_OP_EQ_STAR_GLOB_STRING:
802 case BYTECODE_OP_NE_STAR_GLOB_STRING:
803 case BYTECODE_OP_EQ_S64:
804 case BYTECODE_OP_NE_S64:
805 case BYTECODE_OP_GT_S64:
806 case BYTECODE_OP_LT_S64:
807 case BYTECODE_OP_GE_S64:
808 case BYTECODE_OP_LE_S64:
809 case BYTECODE_OP_EQ_DOUBLE:
810 case BYTECODE_OP_NE_DOUBLE:
811 case BYTECODE_OP_GT_DOUBLE:
812 case BYTECODE_OP_LT_DOUBLE:
813 case BYTECODE_OP_GE_DOUBLE:
814 case BYTECODE_OP_LE_DOUBLE:
815 case BYTECODE_OP_EQ_DOUBLE_S64:
816 case BYTECODE_OP_NE_DOUBLE_S64:
817 case BYTECODE_OP_GT_DOUBLE_S64:
818 case BYTECODE_OP_LT_DOUBLE_S64:
819 case BYTECODE_OP_GE_DOUBLE_S64:
820 case BYTECODE_OP_LE_DOUBLE_S64:
821 case BYTECODE_OP_EQ_S64_DOUBLE:
822 case BYTECODE_OP_NE_S64_DOUBLE:
823 case BYTECODE_OP_GT_S64_DOUBLE:
824 case BYTECODE_OP_LT_S64_DOUBLE:
825 case BYTECODE_OP_GE_S64_DOUBLE:
826 case BYTECODE_OP_LE_S64_DOUBLE:
827 case BYTECODE_OP_BIT_RSHIFT:
828 case BYTECODE_OP_BIT_LSHIFT:
829 case BYTECODE_OP_BIT_AND:
830 case BYTECODE_OP_BIT_OR:
831 case BYTECODE_OP_BIT_XOR:
832 {
833 /* Pop 2, push 1 */
834 if (vstack_pop(stack)) {
835 ret = -EINVAL;
836 goto end;
837 }
838 vstack_ax(stack)->type = REG_S64;
839 next_pc += sizeof(struct binary_op);
840 break;
841 }
842
843 /* unary */
844 case BYTECODE_OP_UNARY_PLUS:
845 {
846 struct unary_op *insn = (struct unary_op *) pc;
847
848 switch(vstack_ax(stack)->type) {
849 default:
850 printk(KERN_WARNING "LTTng: bytecode: unknown register type\n");
851 ret = -EINVAL;
852 goto end;
853
854 case REG_S64:
855 insn->op = BYTECODE_OP_UNARY_PLUS_S64;
856 break;
857 case REG_DOUBLE:
858 insn->op = BYTECODE_OP_UNARY_PLUS_DOUBLE;
859 break;
860 }
861 /* Pop 1, push 1 */
862 next_pc += sizeof(struct unary_op);
863 break;
864 }
865
866 case BYTECODE_OP_UNARY_MINUS:
867 {
868 struct unary_op *insn = (struct unary_op *) pc;
869
870 switch(vstack_ax(stack)->type) {
871 default:
872 printk(KERN_WARNING "LTTng: bytecode: unknown register type\n");
873 ret = -EINVAL;
874 goto end;
875
876 case REG_S64:
877 insn->op = BYTECODE_OP_UNARY_MINUS_S64;
878 break;
879 case REG_DOUBLE:
880 insn->op = BYTECODE_OP_UNARY_MINUS_DOUBLE;
881 break;
882 }
883 /* Pop 1, push 1 */
884 next_pc += sizeof(struct unary_op);
885 break;
886 }
887
888 case BYTECODE_OP_UNARY_NOT:
889 {
890 struct unary_op *insn = (struct unary_op *) pc;
891
892 switch(vstack_ax(stack)->type) {
893 default:
894 printk(KERN_WARNING "LTTng: bytecode: unknown register type\n");
895 ret = -EINVAL;
896 goto end;
897
898 case REG_S64:
899 insn->op = BYTECODE_OP_UNARY_NOT_S64;
900 break;
901 case REG_DOUBLE:
902 insn->op = BYTECODE_OP_UNARY_NOT_DOUBLE;
903 break;
904 }
905 /* Pop 1, push 1 */
906 next_pc += sizeof(struct unary_op);
907 break;
908 }
909
910 case BYTECODE_OP_UNARY_BIT_NOT:
911 {
912 /* Pop 1, push 1 */
913 next_pc += sizeof(struct unary_op);
914 break;
915 }
916
917 case BYTECODE_OP_UNARY_PLUS_S64:
918 case BYTECODE_OP_UNARY_MINUS_S64:
919 case BYTECODE_OP_UNARY_NOT_S64:
920 case BYTECODE_OP_UNARY_PLUS_DOUBLE:
921 case BYTECODE_OP_UNARY_MINUS_DOUBLE:
922 case BYTECODE_OP_UNARY_NOT_DOUBLE:
923 {
924 /* Pop 1, push 1 */
925 next_pc += sizeof(struct unary_op);
926 break;
927 }
928
929 /* logical */
930 case BYTECODE_OP_AND:
931 case BYTECODE_OP_OR:
932 {
933 /* Continue to next instruction */
934 /* Pop 1 when jump not taken */
935 if (vstack_pop(stack)) {
936 ret = -EINVAL;
937 goto end;
938 }
939 next_pc += sizeof(struct logical_op);
940 break;
941 }
942
943 /* load field ref */
944 case BYTECODE_OP_LOAD_FIELD_REF:
945 {
946 printk(KERN_WARNING "LTTng: bytecode: Unknown field ref type\n");
947 ret = -EINVAL;
948 goto end;
949 }
950 /* get context ref */
951 case BYTECODE_OP_GET_CONTEXT_REF:
952 {
953 printk(KERN_WARNING "LTTng: bytecode: Unknown get context ref type\n");
954 ret = -EINVAL;
955 goto end;
956 }
957 case BYTECODE_OP_LOAD_FIELD_REF_STRING:
958 case BYTECODE_OP_LOAD_FIELD_REF_SEQUENCE:
959 case BYTECODE_OP_GET_CONTEXT_REF_STRING:
960 case BYTECODE_OP_LOAD_FIELD_REF_USER_STRING:
961 case BYTECODE_OP_LOAD_FIELD_REF_USER_SEQUENCE:
962 {
963 if (vstack_push(stack)) {
964 ret = -EINVAL;
965 goto end;
966 }
967 vstack_ax(stack)->type = REG_STRING;
968 next_pc += sizeof(struct load_op) + sizeof(struct field_ref);
969 break;
970 }
971 case BYTECODE_OP_LOAD_FIELD_REF_S64:
972 case BYTECODE_OP_GET_CONTEXT_REF_S64:
973 {
974 if (vstack_push(stack)) {
975 ret = -EINVAL;
976 goto end;
977 }
978 vstack_ax(stack)->type = REG_S64;
979 next_pc += sizeof(struct load_op) + sizeof(struct field_ref);
980 break;
981 }
982 case BYTECODE_OP_LOAD_FIELD_REF_DOUBLE:
983 case BYTECODE_OP_GET_CONTEXT_REF_DOUBLE:
984 {
985 if (vstack_push(stack)) {
986 ret = -EINVAL;
987 goto end;
988 }
989 vstack_ax(stack)->type = REG_DOUBLE;
990 next_pc += sizeof(struct load_op) + sizeof(struct field_ref);
991 break;
992 }
993
994 /* load from immediate operand */
995 case BYTECODE_OP_LOAD_STRING:
996 {
997 struct load_op *insn = (struct load_op *) pc;
998
999 if (vstack_push(stack)) {
1000 ret = -EINVAL;
1001 goto end;
1002 }
1003 vstack_ax(stack)->type = REG_STRING;
1004 next_pc += sizeof(struct load_op) + strlen(insn->data) + 1;
1005 break;
1006 }
1007
1008 case BYTECODE_OP_LOAD_STAR_GLOB_STRING:
1009 {
1010 struct load_op *insn = (struct load_op *) pc;
1011
1012 if (vstack_push(stack)) {
1013 ret = -EINVAL;
1014 goto end;
1015 }
1016 vstack_ax(stack)->type = REG_STAR_GLOB_STRING;
1017 next_pc += sizeof(struct load_op) + strlen(insn->data) + 1;
1018 break;
1019 }
1020
1021 case BYTECODE_OP_LOAD_S64:
1022 {
1023 if (vstack_push(stack)) {
1024 ret = -EINVAL;
1025 goto end;
1026 }
1027 vstack_ax(stack)->type = REG_S64;
1028 next_pc += sizeof(struct load_op)
1029 + sizeof(struct literal_numeric);
1030 break;
1031 }
1032
1033 case BYTECODE_OP_LOAD_DOUBLE:
1034 {
1035 if (vstack_push(stack)) {
1036 ret = -EINVAL;
1037 goto end;
1038 }
1039 vstack_ax(stack)->type = REG_DOUBLE;
1040 next_pc += sizeof(struct load_op)
1041 + sizeof(struct literal_double);
1042 break;
1043 }
1044
1045 /* cast */
1046 case BYTECODE_OP_CAST_TO_S64:
1047 {
1048 struct cast_op *insn = (struct cast_op *) pc;
1049
1050 switch (vstack_ax(stack)->type) {
1051 default:
1052 printk(KERN_WARNING "LTTng: bytecode: unknown register type\n");
1053 ret = -EINVAL;
1054 goto end;
1055
1056 case REG_STRING:
1057 case REG_STAR_GLOB_STRING:
1058 printk(KERN_WARNING "LTTng: bytecode: Cast op can only be applied to numeric or floating point registers\n");
1059 ret = -EINVAL;
1060 goto end;
1061 case REG_S64:
1062 insn->op = BYTECODE_OP_CAST_NOP;
1063 break;
1064 case REG_DOUBLE:
1065 insn->op = BYTECODE_OP_CAST_DOUBLE_TO_S64;
1066 break;
1067 }
1068 /* Pop 1, push 1 */
1069 vstack_ax(stack)->type = REG_S64;
1070 next_pc += sizeof(struct cast_op);
1071 break;
1072 }
1073 case BYTECODE_OP_CAST_DOUBLE_TO_S64:
1074 {
1075 /* Pop 1, push 1 */
1076 vstack_ax(stack)->type = REG_S64;
1077 next_pc += sizeof(struct cast_op);
1078 break;
1079 }
1080 case BYTECODE_OP_CAST_NOP:
1081 {
1082 next_pc += sizeof(struct cast_op);
1083 break;
1084 }
1085
1086 /*
1087 * Instructions for recursive traversal through composed types.
1088 */
1089 case BYTECODE_OP_GET_CONTEXT_ROOT:
1090 {
1091 if (vstack_push(stack)) {
1092 ret = -EINVAL;
1093 goto end;
1094 }
1095 vstack_ax(stack)->type = REG_PTR;
1096 vstack_ax(stack)->load.type = LOAD_ROOT_CONTEXT;
1097 next_pc += sizeof(struct load_op);
1098 break;
1099 }
1100 case BYTECODE_OP_GET_APP_CONTEXT_ROOT:
1101 {
1102 if (vstack_push(stack)) {
1103 ret = -EINVAL;
1104 goto end;
1105 }
1106 vstack_ax(stack)->type = REG_PTR;
1107 vstack_ax(stack)->load.type = LOAD_ROOT_APP_CONTEXT;
1108 next_pc += sizeof(struct load_op);
1109 break;
1110 }
1111 case BYTECODE_OP_GET_PAYLOAD_ROOT:
1112 {
1113 if (vstack_push(stack)) {
1114 ret = -EINVAL;
1115 goto end;
1116 }
1117 vstack_ax(stack)->type = REG_PTR;
1118 vstack_ax(stack)->load.type = LOAD_ROOT_PAYLOAD;
1119 next_pc += sizeof(struct load_op);
1120 break;
1121 }
1122
1123 case BYTECODE_OP_LOAD_FIELD:
1124 {
1125 struct load_op *insn = (struct load_op *) pc;
1126
1127 WARN_ON_ONCE(vstack_ax(stack)->type != REG_PTR);
1128 /* Pop 1, push 1 */
1129 ret = specialize_load_field(vstack_ax(stack), insn);
1130 if (ret)
1131 goto end;
1132
1133 next_pc += sizeof(struct load_op);
1134 break;
1135 }
1136
1137 case BYTECODE_OP_LOAD_FIELD_S8:
1138 case BYTECODE_OP_LOAD_FIELD_S16:
1139 case BYTECODE_OP_LOAD_FIELD_S32:
1140 case BYTECODE_OP_LOAD_FIELD_S64:
1141 case BYTECODE_OP_LOAD_FIELD_U8:
1142 case BYTECODE_OP_LOAD_FIELD_U16:
1143 case BYTECODE_OP_LOAD_FIELD_U32:
1144 case BYTECODE_OP_LOAD_FIELD_U64:
1145 {
1146 /* Pop 1, push 1 */
1147 vstack_ax(stack)->type = REG_S64;
1148 next_pc += sizeof(struct load_op);
1149 break;
1150 }
1151
1152 case BYTECODE_OP_LOAD_FIELD_STRING:
1153 case BYTECODE_OP_LOAD_FIELD_SEQUENCE:
1154 {
1155 /* Pop 1, push 1 */
1156 vstack_ax(stack)->type = REG_STRING;
1157 next_pc += sizeof(struct load_op);
1158 break;
1159 }
1160
1161 case BYTECODE_OP_LOAD_FIELD_DOUBLE:
1162 {
1163 /* Pop 1, push 1 */
1164 vstack_ax(stack)->type = REG_DOUBLE;
1165 next_pc += sizeof(struct load_op);
1166 break;
1167 }
1168
1169 case BYTECODE_OP_GET_SYMBOL:
1170 {
1171 struct load_op *insn = (struct load_op *) pc;
1172
1173 dbg_printk("op get symbol\n");
1174 switch (vstack_ax(stack)->load.type) {
1175 case LOAD_OBJECT:
1176 printk(KERN_WARNING "LTTng: bytecode: Nested fields not implemented yet.\n");
1177 ret = -EINVAL;
1178 goto end;
1179 case LOAD_ROOT_CONTEXT:
1180 /* Lookup context field. */
1181 ret = specialize_context_lookup(ctx, bytecode, insn,
1182 &vstack_ax(stack)->load);
1183 if (ret)
1184 goto end;
1185 break;
1186 case LOAD_ROOT_APP_CONTEXT:
1187 ret = -EINVAL;
1188 goto end;
1189 case LOAD_ROOT_PAYLOAD:
1190 /* Lookup event payload field. */
1191 ret = specialize_payload_lookup(event_desc,
1192 bytecode, insn,
1193 &vstack_ax(stack)->load);
1194 if (ret)
1195 goto end;
1196 break;
1197 }
1198 next_pc += sizeof(struct load_op) + sizeof(struct get_symbol);
1199 break;
1200 }
1201
1202 case BYTECODE_OP_GET_SYMBOL_FIELD:
1203 {
1204 /* Always generated by specialize phase. */
1205 ret = -EINVAL;
1206 goto end;
1207 }
1208
1209 case BYTECODE_OP_GET_INDEX_U16:
1210 {
1211 struct load_op *insn = (struct load_op *) pc;
1212 struct get_index_u16 *index = (struct get_index_u16 *) insn->data;
1213
1214 dbg_printk("op get index u16\n");
1215 /* Pop 1, push 1 */
1216 ret = specialize_get_index(bytecode, insn, index->index,
1217 vstack_ax(stack), sizeof(*index));
1218 if (ret)
1219 goto end;
1220 next_pc += sizeof(struct load_op) + sizeof(struct get_index_u16);
1221 break;
1222 }
1223
1224 case BYTECODE_OP_GET_INDEX_U64:
1225 {
1226 struct load_op *insn = (struct load_op *) pc;
1227 struct get_index_u64 *index = (struct get_index_u64 *) insn->data;
1228
1229 dbg_printk("op get index u64\n");
1230 /* Pop 1, push 1 */
1231 ret = specialize_get_index(bytecode, insn, index->index,
1232 vstack_ax(stack), sizeof(*index));
1233 if (ret)
1234 goto end;
1235 next_pc += sizeof(struct load_op) + sizeof(struct get_index_u64);
1236 break;
1237 }
1238
1239 }
1240 }
1241 end:
1242 return ret;
1243 }
This page took 0.084148 seconds and 4 git commands to generate.