1 /* SPDX-License-Identifier: MIT
3 * lttng-bytecode-specialize.c
5 * LTTng modules bytecode code specializer.
7 * Copyright (C) 2010-2016 Mathieu Desnoyers <mathieu.desnoyers@efficios.com>
10 #include <linux/slab.h>
11 #include <wrapper/compiler_attributes.h>
13 #include <lttng/lttng-bytecode.h>
14 #include <lttng/align.h>
15 #include <lttng/events-internal.h>
17 static ssize_t
bytecode_reserve_data(struct bytecode_runtime
*runtime
,
18 size_t align
, size_t len
)
21 size_t padding
= offset_align(runtime
->data_len
, align
);
22 size_t new_len
= runtime
->data_len
+ padding
+ len
;
23 size_t new_alloc_len
= new_len
;
24 size_t old_alloc_len
= runtime
->data_alloc_len
;
26 if (new_len
> INTERPRETER_MAX_DATA_LEN
)
29 if (new_alloc_len
> old_alloc_len
) {
33 max_t(size_t, 1U << get_count_order(new_alloc_len
), old_alloc_len
<< 1);
34 newptr
= krealloc(runtime
->data
, new_alloc_len
, GFP_KERNEL
);
37 runtime
->data
= newptr
;
38 /* We zero directly the memory from start of allocation. */
39 memset(&runtime
->data
[old_alloc_len
], 0, new_alloc_len
- old_alloc_len
);
40 runtime
->data_alloc_len
= new_alloc_len
;
42 runtime
->data_len
+= padding
;
43 ret
= runtime
->data_len
;
44 runtime
->data_len
+= len
;
48 static ssize_t
bytecode_push_data(struct bytecode_runtime
*runtime
,
49 const void *p
, size_t align
, size_t len
)
53 offset
= bytecode_reserve_data(runtime
, align
, len
);
56 memcpy(&runtime
->data
[offset
], p
, len
);
60 static int specialize_load_field(struct vstack_entry
*stack_top
,
65 switch (stack_top
->load
.type
) {
68 case LOAD_ROOT_CONTEXT
:
69 case LOAD_ROOT_APP_CONTEXT
:
70 case LOAD_ROOT_PAYLOAD
:
72 dbg_printk("Bytecode warning: cannot load root, missing field name.\n");
76 switch (stack_top
->load
.object_type
) {
78 dbg_printk("op load field s8\n");
79 stack_top
->type
= REG_S64
;
80 if (!stack_top
->load
.rev_bo
)
81 insn
->op
= BYTECODE_OP_LOAD_FIELD_S8
;
84 dbg_printk("op load field s16\n");
85 stack_top
->type
= REG_S64
;
86 if (!stack_top
->load
.rev_bo
)
87 insn
->op
= BYTECODE_OP_LOAD_FIELD_S16
;
90 dbg_printk("op load field s32\n");
91 stack_top
->type
= REG_S64
;
92 if (!stack_top
->load
.rev_bo
)
93 insn
->op
= BYTECODE_OP_LOAD_FIELD_S32
;
96 dbg_printk("op load field s64\n");
97 stack_top
->type
= REG_S64
;
98 if (!stack_top
->load
.rev_bo
)
99 insn
->op
= BYTECODE_OP_LOAD_FIELD_S64
;
101 case OBJECT_TYPE_SIGNED_ENUM
:
102 dbg_printk("op load field signed enumeration\n");
103 stack_top
->type
= REG_PTR
;
106 dbg_printk("op load field u8\n");
107 stack_top
->type
= REG_S64
;
108 insn
->op
= BYTECODE_OP_LOAD_FIELD_U8
;
110 case OBJECT_TYPE_U16
:
111 dbg_printk("op load field u16\n");
112 stack_top
->type
= REG_S64
;
113 if (!stack_top
->load
.rev_bo
)
114 insn
->op
= BYTECODE_OP_LOAD_FIELD_U16
;
116 case OBJECT_TYPE_U32
:
117 dbg_printk("op load field u32\n");
118 stack_top
->type
= REG_S64
;
119 if (!stack_top
->load
.rev_bo
)
120 insn
->op
= BYTECODE_OP_LOAD_FIELD_U32
;
122 case OBJECT_TYPE_U64
:
123 dbg_printk("op load field u64\n");
124 stack_top
->type
= REG_S64
;
125 if (!stack_top
->load
.rev_bo
)
126 insn
->op
= BYTECODE_OP_LOAD_FIELD_U64
;
128 case OBJECT_TYPE_UNSIGNED_ENUM
:
129 dbg_printk("op load field unsigned enumeration\n");
130 stack_top
->type
= REG_PTR
;
132 case OBJECT_TYPE_DOUBLE
:
133 printk(KERN_WARNING
"LTTng: bytecode: Double type unsupported\n\n");
136 case OBJECT_TYPE_STRING
:
137 dbg_printk("op load field string\n");
138 stack_top
->type
= REG_STRING
;
139 insn
->op
= BYTECODE_OP_LOAD_FIELD_STRING
;
141 case OBJECT_TYPE_STRING_SEQUENCE
:
142 dbg_printk("op load field string sequence\n");
143 stack_top
->type
= REG_STRING
;
144 insn
->op
= BYTECODE_OP_LOAD_FIELD_SEQUENCE
;
146 case OBJECT_TYPE_DYNAMIC
:
149 case OBJECT_TYPE_SEQUENCE
:
150 case OBJECT_TYPE_ARRAY
:
151 case OBJECT_TYPE_STRUCT
:
152 case OBJECT_TYPE_VARIANT
:
153 printk(KERN_WARNING
"LTTng: bytecode: Sequences, arrays, struct and variant cannot be loaded (nested types).\n");
163 static int specialize_get_index_object_type(enum object_type
*otype
,
164 int signedness
, uint32_t elem_len
)
169 *otype
= OBJECT_TYPE_S8
;
171 *otype
= OBJECT_TYPE_U8
;
175 *otype
= OBJECT_TYPE_S16
;
177 *otype
= OBJECT_TYPE_U16
;
181 *otype
= OBJECT_TYPE_S32
;
183 *otype
= OBJECT_TYPE_U32
;
187 *otype
= OBJECT_TYPE_S64
;
189 *otype
= OBJECT_TYPE_U64
;
197 static int specialize_get_index(struct bytecode_runtime
*runtime
,
198 struct load_op
*insn
, uint64_t index
,
199 struct vstack_entry
*stack_top
,
203 struct bytecode_get_index_data gid
;
206 memset(&gid
, 0, sizeof(gid
));
207 switch (stack_top
->load
.type
) {
209 switch (stack_top
->load
.object_type
) {
210 case OBJECT_TYPE_ARRAY
:
212 const struct lttng_kernel_event_field
*field
;
213 const struct lttng_kernel_type_array
*array_type
;
214 const struct lttng_kernel_type_integer
*integer_type
;
215 uint32_t elem_len
, num_elems
;
218 field
= stack_top
->load
.field
;
219 array_type
= lttng_kernel_get_type_array(field
->type
);
220 if (!lttng_kernel_type_is_bytewise_integer(array_type
->elem_type
)) {
224 integer_type
= lttng_kernel_get_type_integer(array_type
->elem_type
);
225 num_elems
= array_type
->length
;
226 elem_len
= integer_type
->size
;
227 signedness
= integer_type
->signedness
;
228 if (index
>= num_elems
) {
232 ret
= specialize_get_index_object_type(&stack_top
->load
.object_type
,
233 signedness
, elem_len
);
236 gid
.offset
= index
* (elem_len
/ CHAR_BIT
);
237 gid
.array_len
= num_elems
* (elem_len
/ CHAR_BIT
);
238 gid
.elem
.type
= stack_top
->load
.object_type
;
239 gid
.elem
.len
= elem_len
;
240 if (integer_type
->reverse_byte_order
)
241 gid
.elem
.rev_bo
= true;
242 stack_top
->load
.rev_bo
= gid
.elem
.rev_bo
;
245 case OBJECT_TYPE_SEQUENCE
:
247 const struct lttng_kernel_event_field
*field
;
248 const struct lttng_kernel_type_sequence
*sequence_type
;
249 const struct lttng_kernel_type_integer
*integer_type
;
253 field
= stack_top
->load
.field
;
254 sequence_type
= lttng_kernel_get_type_sequence(field
->type
);
255 if (!lttng_kernel_type_is_bytewise_integer(sequence_type
->elem_type
)) {
259 integer_type
= lttng_kernel_get_type_integer(sequence_type
->elem_type
);
260 elem_len
= integer_type
->size
;
261 signedness
= integer_type
->signedness
;
262 ret
= specialize_get_index_object_type(&stack_top
->load
.object_type
,
263 signedness
, elem_len
);
266 gid
.offset
= index
* (elem_len
/ CHAR_BIT
);
267 gid
.elem
.type
= stack_top
->load
.object_type
;
268 gid
.elem
.len
= elem_len
;
269 if (integer_type
->reverse_byte_order
)
270 gid
.elem
.rev_bo
= true;
271 stack_top
->load
.rev_bo
= gid
.elem
.rev_bo
;
274 case OBJECT_TYPE_STRUCT
:
275 /* Only generated by the specialize phase. */
276 case OBJECT_TYPE_VARIANT
:
279 printk(KERN_WARNING
"LTTng: bytecode: Unexpected get index type %d",
280 (int) stack_top
->load
.object_type
);
285 case LOAD_ROOT_CONTEXT
:
286 case LOAD_ROOT_APP_CONTEXT
:
287 case LOAD_ROOT_PAYLOAD
:
288 printk(KERN_WARNING
"LTTng: bytecode: Index lookup for root field not implemented yet.\n");
292 data_offset
= bytecode_push_data(runtime
, &gid
,
293 __alignof__(gid
), sizeof(gid
));
294 if (data_offset
< 0) {
300 ((struct get_index_u16
*) insn
->data
)->index
= data_offset
;
303 ((struct get_index_u64
*) insn
->data
)->index
= data_offset
;
316 static int specialize_context_lookup_name(struct lttng_kernel_ctx
*ctx
,
317 struct bytecode_runtime
*bytecode
,
318 struct load_op
*insn
)
323 offset
= ((struct get_symbol
*) insn
->data
)->offset
;
324 name
= bytecode
->p
.bc
->bc
.data
+ bytecode
->p
.bc
->bc
.reloc_offset
+ offset
;
325 return lttng_kernel_get_context_index(ctx
, name
);
328 static int specialize_load_object(const struct lttng_kernel_event_field
*field
,
329 struct vstack_load
*load
, bool is_context
)
331 load
->type
= LOAD_OBJECT
;
333 switch (field
->type
->type
) {
334 case lttng_kernel_type_integer
:
335 if (lttng_kernel_get_type_integer(field
->type
)->signedness
)
336 load
->object_type
= OBJECT_TYPE_S64
;
338 load
->object_type
= OBJECT_TYPE_U64
;
339 load
->rev_bo
= false;
341 case lttng_kernel_type_enum
:
343 const struct lttng_kernel_type_enum
*enum_type
= lttng_kernel_get_type_enum(field
->type
);
344 const struct lttng_kernel_type_integer
*integer_type
= lttng_kernel_get_type_integer(enum_type
->container_type
);
346 if (integer_type
->signedness
)
347 load
->object_type
= OBJECT_TYPE_SIGNED_ENUM
;
349 load
->object_type
= OBJECT_TYPE_UNSIGNED_ENUM
;
350 load
->rev_bo
= false;
353 case lttng_kernel_type_array
:
355 const struct lttng_kernel_type_array
*array_type
= lttng_kernel_get_type_array(field
->type
);
357 if (!lttng_kernel_type_is_bytewise_integer(array_type
->elem_type
)) {
358 printk(KERN_WARNING
"LTTng: bytecode: Array nesting only supports integer types.\n");
362 load
->object_type
= OBJECT_TYPE_STRING
;
364 if (array_type
->encoding
== lttng_kernel_string_encoding_none
) {
365 load
->object_type
= OBJECT_TYPE_ARRAY
;
368 load
->object_type
= OBJECT_TYPE_STRING_SEQUENCE
;
373 case lttng_kernel_type_sequence
:
375 const struct lttng_kernel_type_sequence
*sequence_type
= lttng_kernel_get_type_sequence(field
->type
);
377 if (!lttng_kernel_type_is_bytewise_integer(sequence_type
->elem_type
)) {
378 printk(KERN_WARNING
"LTTng: bytecode: Sequence nesting only supports integer types.\n");
382 load
->object_type
= OBJECT_TYPE_STRING
;
384 if (sequence_type
->encoding
== lttng_kernel_string_encoding_none
) {
385 load
->object_type
= OBJECT_TYPE_SEQUENCE
;
388 load
->object_type
= OBJECT_TYPE_STRING_SEQUENCE
;
393 case lttng_kernel_type_string
:
394 load
->object_type
= OBJECT_TYPE_STRING
;
396 case lttng_kernel_type_struct
:
397 printk(KERN_WARNING
"LTTng: bytecode: Structure type cannot be loaded.\n");
399 case lttng_kernel_type_variant
:
400 printk(KERN_WARNING
"LTTng: bytecode: Variant type cannot be loaded.\n");
403 printk(KERN_WARNING
"LTTng: bytecode: Unknown type: %d", (int) field
->type
->type
);
409 static int specialize_context_lookup(struct lttng_kernel_ctx
*ctx
,
410 struct bytecode_runtime
*runtime
,
411 struct load_op
*insn
,
412 struct vstack_load
*load
)
415 const struct lttng_kernel_ctx_field
*ctx_field
;
416 const struct lttng_kernel_event_field
*field
;
417 struct bytecode_get_index_data gid
;
420 idx
= specialize_context_lookup_name(ctx
, runtime
, insn
);
424 ctx_field
= <tng_static_ctx
->fields
[idx
];
425 field
= ctx_field
->event_field
;
426 ret
= specialize_load_object(field
, load
, true);
429 /* Specialize each get_symbol into a get_index. */
430 insn
->op
= BYTECODE_OP_GET_INDEX_U16
;
431 memset(&gid
, 0, sizeof(gid
));
433 gid
.elem
.type
= load
->object_type
;
434 gid
.elem
.rev_bo
= load
->rev_bo
;
436 data_offset
= bytecode_push_data(runtime
, &gid
,
437 __alignof__(gid
), sizeof(gid
));
438 if (data_offset
< 0) {
441 ((struct get_index_u16
*) insn
->data
)->index
= data_offset
;
445 static int specialize_payload_lookup(const struct lttng_kernel_event_desc
*event_desc
,
446 struct bytecode_runtime
*runtime
,
447 struct load_op
*insn
,
448 struct vstack_load
*load
)
452 unsigned int i
, nr_fields
;
454 uint32_t field_offset
= 0;
455 const struct lttng_kernel_event_field
*field
;
457 struct bytecode_get_index_data gid
;
460 nr_fields
= event_desc
->tp_class
->nr_fields
;
461 offset
= ((struct get_symbol
*) insn
->data
)->offset
;
462 name
= runtime
->p
.bc
->bc
.data
+ runtime
->p
.bc
->bc
.reloc_offset
+ offset
;
463 for (i
= 0; i
< nr_fields
; i
++) {
464 field
= event_desc
->tp_class
->fields
[i
];
465 if (field
->nofilter
) {
468 if (!strcmp(field
->name
, name
)) {
472 /* compute field offset on stack */
473 switch (field
->type
->type
) {
474 case lttng_kernel_type_integer
:
475 case lttng_kernel_type_enum
:
476 field_offset
+= sizeof(int64_t);
478 case lttng_kernel_type_array
:
479 case lttng_kernel_type_sequence
:
480 field_offset
+= sizeof(unsigned long);
481 field_offset
+= sizeof(void *);
483 case lttng_kernel_type_string
:
484 field_offset
+= sizeof(void *);
496 ret
= specialize_load_object(field
, load
, false);
500 /* Specialize each get_symbol into a get_index. */
501 insn
->op
= BYTECODE_OP_GET_INDEX_U16
;
502 memset(&gid
, 0, sizeof(gid
));
503 gid
.offset
= field_offset
;
504 gid
.elem
.type
= load
->object_type
;
505 gid
.elem
.rev_bo
= load
->rev_bo
;
507 data_offset
= bytecode_push_data(runtime
, &gid
,
508 __alignof__(gid
), sizeof(gid
));
509 if (data_offset
< 0) {
513 ((struct get_index_u16
*) insn
->data
)->index
= data_offset
;
519 int lttng_bytecode_specialize(const struct lttng_kernel_event_desc
*event_desc
,
520 struct bytecode_runtime
*bytecode
)
522 void *pc
, *next_pc
, *start_pc
;
524 struct vstack _stack
;
525 struct vstack
*stack
= &_stack
;
526 struct lttng_kernel_ctx
*ctx
= bytecode
->p
.ctx
;
530 start_pc
= &bytecode
->code
[0];
531 for (pc
= next_pc
= start_pc
; pc
- start_pc
< bytecode
->len
;
533 switch (*(bytecode_opcode_t
*) pc
) {
534 case BYTECODE_OP_UNKNOWN
:
536 printk(KERN_WARNING
"LTTng: bytecode: unknown bytecode op %u\n",
537 (unsigned int) *(bytecode_opcode_t
*) pc
);
541 case BYTECODE_OP_RETURN
:
542 case BYTECODE_OP_RETURN_S64
:
547 case BYTECODE_OP_MUL
:
548 case BYTECODE_OP_DIV
:
549 case BYTECODE_OP_MOD
:
550 case BYTECODE_OP_PLUS
:
551 case BYTECODE_OP_MINUS
:
552 printk(KERN_WARNING
"LTTng: bytecode: unknown bytecode op %u\n",
553 (unsigned int) *(bytecode_opcode_t
*) pc
);
559 struct binary_op
*insn
= (struct binary_op
*) pc
;
561 switch(vstack_ax(stack
)->type
) {
563 printk(KERN_WARNING
"LTTng: bytecode: unknown register type\n");
568 if (vstack_bx(stack
)->type
== REG_STAR_GLOB_STRING
)
569 insn
->op
= BYTECODE_OP_EQ_STAR_GLOB_STRING
;
571 insn
->op
= BYTECODE_OP_EQ_STRING
;
573 case REG_STAR_GLOB_STRING
:
574 insn
->op
= BYTECODE_OP_EQ_STAR_GLOB_STRING
;
577 if (vstack_bx(stack
)->type
== REG_S64
)
578 insn
->op
= BYTECODE_OP_EQ_S64
;
580 insn
->op
= BYTECODE_OP_EQ_DOUBLE_S64
;
583 if (vstack_bx(stack
)->type
== REG_S64
)
584 insn
->op
= BYTECODE_OP_EQ_S64_DOUBLE
;
586 insn
->op
= BYTECODE_OP_EQ_DOUBLE
;
590 if (vstack_pop(stack
)) {
594 vstack_ax(stack
)->type
= REG_S64
;
595 next_pc
+= sizeof(struct binary_op
);
601 struct binary_op
*insn
= (struct binary_op
*) pc
;
603 switch(vstack_ax(stack
)->type
) {
605 printk(KERN_WARNING
"LTTng: bytecode: unknown register type\n");
610 if (vstack_bx(stack
)->type
== REG_STAR_GLOB_STRING
)
611 insn
->op
= BYTECODE_OP_NE_STAR_GLOB_STRING
;
613 insn
->op
= BYTECODE_OP_NE_STRING
;
615 case REG_STAR_GLOB_STRING
:
616 insn
->op
= BYTECODE_OP_NE_STAR_GLOB_STRING
;
619 if (vstack_bx(stack
)->type
== REG_S64
)
620 insn
->op
= BYTECODE_OP_NE_S64
;
622 insn
->op
= BYTECODE_OP_NE_DOUBLE_S64
;
625 if (vstack_bx(stack
)->type
== REG_S64
)
626 insn
->op
= BYTECODE_OP_NE_S64_DOUBLE
;
628 insn
->op
= BYTECODE_OP_NE_DOUBLE
;
632 if (vstack_pop(stack
)) {
636 vstack_ax(stack
)->type
= REG_S64
;
637 next_pc
+= sizeof(struct binary_op
);
643 struct binary_op
*insn
= (struct binary_op
*) pc
;
645 switch(vstack_ax(stack
)->type
) {
647 printk(KERN_WARNING
"LTTng: bytecode: unknown register type\n");
651 case REG_STAR_GLOB_STRING
:
652 printk(KERN_WARNING
"LTTng: bytecode: invalid register type for '>' binary operator\n");
656 insn
->op
= BYTECODE_OP_GT_STRING
;
659 if (vstack_bx(stack
)->type
== REG_S64
)
660 insn
->op
= BYTECODE_OP_GT_S64
;
662 insn
->op
= BYTECODE_OP_GT_DOUBLE_S64
;
665 if (vstack_bx(stack
)->type
== REG_S64
)
666 insn
->op
= BYTECODE_OP_GT_S64_DOUBLE
;
668 insn
->op
= BYTECODE_OP_GT_DOUBLE
;
672 if (vstack_pop(stack
)) {
676 vstack_ax(stack
)->type
= REG_S64
;
677 next_pc
+= sizeof(struct binary_op
);
683 struct binary_op
*insn
= (struct binary_op
*) pc
;
685 switch(vstack_ax(stack
)->type
) {
687 printk(KERN_WARNING
"LTTng: bytecode: unknown register type\n");
691 case REG_STAR_GLOB_STRING
:
692 printk(KERN_WARNING
"LTTng: bytecode: invalid register type for '<' binary operator\n");
696 insn
->op
= BYTECODE_OP_LT_STRING
;
699 if (vstack_bx(stack
)->type
== REG_S64
)
700 insn
->op
= BYTECODE_OP_LT_S64
;
702 insn
->op
= BYTECODE_OP_LT_DOUBLE_S64
;
705 if (vstack_bx(stack
)->type
== REG_S64
)
706 insn
->op
= BYTECODE_OP_LT_S64_DOUBLE
;
708 insn
->op
= BYTECODE_OP_LT_DOUBLE
;
712 if (vstack_pop(stack
)) {
716 vstack_ax(stack
)->type
= REG_S64
;
717 next_pc
+= sizeof(struct binary_op
);
723 struct binary_op
*insn
= (struct binary_op
*) pc
;
725 switch(vstack_ax(stack
)->type
) {
727 printk(KERN_WARNING
"LTTng: bytecode: unknown register type\n");
731 case REG_STAR_GLOB_STRING
:
732 printk(KERN_WARNING
"LTTng: bytecode: invalid register type for '>=' binary operator\n");
736 insn
->op
= BYTECODE_OP_GE_STRING
;
739 if (vstack_bx(stack
)->type
== REG_S64
)
740 insn
->op
= BYTECODE_OP_GE_S64
;
742 insn
->op
= BYTECODE_OP_GE_DOUBLE_S64
;
745 if (vstack_bx(stack
)->type
== REG_S64
)
746 insn
->op
= BYTECODE_OP_GE_S64_DOUBLE
;
748 insn
->op
= BYTECODE_OP_GE_DOUBLE
;
752 if (vstack_pop(stack
)) {
756 vstack_ax(stack
)->type
= REG_S64
;
757 next_pc
+= sizeof(struct binary_op
);
762 struct binary_op
*insn
= (struct binary_op
*) pc
;
764 switch(vstack_ax(stack
)->type
) {
766 printk(KERN_WARNING
"LTTng: bytecode: unknown register type\n");
770 case REG_STAR_GLOB_STRING
:
771 printk(KERN_WARNING
"LTTng: bytecode: invalid register type for '<=' binary operator\n");
775 insn
->op
= BYTECODE_OP_LE_STRING
;
778 if (vstack_bx(stack
)->type
== REG_S64
)
779 insn
->op
= BYTECODE_OP_LE_S64
;
781 insn
->op
= BYTECODE_OP_LE_DOUBLE_S64
;
784 if (vstack_bx(stack
)->type
== REG_S64
)
785 insn
->op
= BYTECODE_OP_LE_S64_DOUBLE
;
787 insn
->op
= BYTECODE_OP_LE_DOUBLE
;
790 vstack_ax(stack
)->type
= REG_S64
;
791 next_pc
+= sizeof(struct binary_op
);
795 case BYTECODE_OP_EQ_STRING
:
796 case BYTECODE_OP_NE_STRING
:
797 case BYTECODE_OP_GT_STRING
:
798 case BYTECODE_OP_LT_STRING
:
799 case BYTECODE_OP_GE_STRING
:
800 case BYTECODE_OP_LE_STRING
:
801 case BYTECODE_OP_EQ_STAR_GLOB_STRING
:
802 case BYTECODE_OP_NE_STAR_GLOB_STRING
:
803 case BYTECODE_OP_EQ_S64
:
804 case BYTECODE_OP_NE_S64
:
805 case BYTECODE_OP_GT_S64
:
806 case BYTECODE_OP_LT_S64
:
807 case BYTECODE_OP_GE_S64
:
808 case BYTECODE_OP_LE_S64
:
809 case BYTECODE_OP_EQ_DOUBLE
:
810 case BYTECODE_OP_NE_DOUBLE
:
811 case BYTECODE_OP_GT_DOUBLE
:
812 case BYTECODE_OP_LT_DOUBLE
:
813 case BYTECODE_OP_GE_DOUBLE
:
814 case BYTECODE_OP_LE_DOUBLE
:
815 case BYTECODE_OP_EQ_DOUBLE_S64
:
816 case BYTECODE_OP_NE_DOUBLE_S64
:
817 case BYTECODE_OP_GT_DOUBLE_S64
:
818 case BYTECODE_OP_LT_DOUBLE_S64
:
819 case BYTECODE_OP_GE_DOUBLE_S64
:
820 case BYTECODE_OP_LE_DOUBLE_S64
:
821 case BYTECODE_OP_EQ_S64_DOUBLE
:
822 case BYTECODE_OP_NE_S64_DOUBLE
:
823 case BYTECODE_OP_GT_S64_DOUBLE
:
824 case BYTECODE_OP_LT_S64_DOUBLE
:
825 case BYTECODE_OP_GE_S64_DOUBLE
:
826 case BYTECODE_OP_LE_S64_DOUBLE
:
827 case BYTECODE_OP_BIT_RSHIFT
:
828 case BYTECODE_OP_BIT_LSHIFT
:
829 case BYTECODE_OP_BIT_AND
:
830 case BYTECODE_OP_BIT_OR
:
831 case BYTECODE_OP_BIT_XOR
:
834 if (vstack_pop(stack
)) {
838 vstack_ax(stack
)->type
= REG_S64
;
839 next_pc
+= sizeof(struct binary_op
);
844 case BYTECODE_OP_UNARY_PLUS
:
846 struct unary_op
*insn
= (struct unary_op
*) pc
;
848 switch(vstack_ax(stack
)->type
) {
850 printk(KERN_WARNING
"LTTng: bytecode: unknown register type\n");
855 insn
->op
= BYTECODE_OP_UNARY_PLUS_S64
;
858 insn
->op
= BYTECODE_OP_UNARY_PLUS_DOUBLE
;
862 next_pc
+= sizeof(struct unary_op
);
866 case BYTECODE_OP_UNARY_MINUS
:
868 struct unary_op
*insn
= (struct unary_op
*) pc
;
870 switch(vstack_ax(stack
)->type
) {
872 printk(KERN_WARNING
"LTTng: bytecode: unknown register type\n");
877 insn
->op
= BYTECODE_OP_UNARY_MINUS_S64
;
880 insn
->op
= BYTECODE_OP_UNARY_MINUS_DOUBLE
;
884 next_pc
+= sizeof(struct unary_op
);
888 case BYTECODE_OP_UNARY_NOT
:
890 struct unary_op
*insn
= (struct unary_op
*) pc
;
892 switch(vstack_ax(stack
)->type
) {
894 printk(KERN_WARNING
"LTTng: bytecode: unknown register type\n");
899 insn
->op
= BYTECODE_OP_UNARY_NOT_S64
;
902 insn
->op
= BYTECODE_OP_UNARY_NOT_DOUBLE
;
906 next_pc
+= sizeof(struct unary_op
);
910 case BYTECODE_OP_UNARY_BIT_NOT
:
913 next_pc
+= sizeof(struct unary_op
);
917 case BYTECODE_OP_UNARY_PLUS_S64
:
918 case BYTECODE_OP_UNARY_MINUS_S64
:
919 case BYTECODE_OP_UNARY_NOT_S64
:
920 case BYTECODE_OP_UNARY_PLUS_DOUBLE
:
921 case BYTECODE_OP_UNARY_MINUS_DOUBLE
:
922 case BYTECODE_OP_UNARY_NOT_DOUBLE
:
925 next_pc
+= sizeof(struct unary_op
);
930 case BYTECODE_OP_AND
:
933 /* Continue to next instruction */
934 /* Pop 1 when jump not taken */
935 if (vstack_pop(stack
)) {
939 next_pc
+= sizeof(struct logical_op
);
944 case BYTECODE_OP_LOAD_FIELD_REF
:
946 printk(KERN_WARNING
"LTTng: bytecode: Unknown field ref type\n");
950 /* get context ref */
951 case BYTECODE_OP_GET_CONTEXT_REF
:
953 printk(KERN_WARNING
"LTTng: bytecode: Unknown get context ref type\n");
957 case BYTECODE_OP_LOAD_FIELD_REF_STRING
:
958 case BYTECODE_OP_LOAD_FIELD_REF_SEQUENCE
:
959 case BYTECODE_OP_GET_CONTEXT_REF_STRING
:
960 case BYTECODE_OP_LOAD_FIELD_REF_USER_STRING
:
961 case BYTECODE_OP_LOAD_FIELD_REF_USER_SEQUENCE
:
963 if (vstack_push(stack
)) {
967 vstack_ax(stack
)->type
= REG_STRING
;
968 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
971 case BYTECODE_OP_LOAD_FIELD_REF_S64
:
972 case BYTECODE_OP_GET_CONTEXT_REF_S64
:
974 if (vstack_push(stack
)) {
978 vstack_ax(stack
)->type
= REG_S64
;
979 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
982 case BYTECODE_OP_LOAD_FIELD_REF_DOUBLE
:
983 case BYTECODE_OP_GET_CONTEXT_REF_DOUBLE
:
985 if (vstack_push(stack
)) {
989 vstack_ax(stack
)->type
= REG_DOUBLE
;
990 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
994 /* load from immediate operand */
995 case BYTECODE_OP_LOAD_STRING
:
997 struct load_op
*insn
= (struct load_op
*) pc
;
999 if (vstack_push(stack
)) {
1003 vstack_ax(stack
)->type
= REG_STRING
;
1004 next_pc
+= sizeof(struct load_op
) + strlen(insn
->data
) + 1;
1008 case BYTECODE_OP_LOAD_STAR_GLOB_STRING
:
1010 struct load_op
*insn
= (struct load_op
*) pc
;
1012 if (vstack_push(stack
)) {
1016 vstack_ax(stack
)->type
= REG_STAR_GLOB_STRING
;
1017 next_pc
+= sizeof(struct load_op
) + strlen(insn
->data
) + 1;
1021 case BYTECODE_OP_LOAD_S64
:
1023 if (vstack_push(stack
)) {
1027 vstack_ax(stack
)->type
= REG_S64
;
1028 next_pc
+= sizeof(struct load_op
)
1029 + sizeof(struct literal_numeric
);
1033 case BYTECODE_OP_LOAD_DOUBLE
:
1035 if (vstack_push(stack
)) {
1039 vstack_ax(stack
)->type
= REG_DOUBLE
;
1040 next_pc
+= sizeof(struct load_op
)
1041 + sizeof(struct literal_double
);
1046 case BYTECODE_OP_CAST_TO_S64
:
1048 struct cast_op
*insn
= (struct cast_op
*) pc
;
1050 switch (vstack_ax(stack
)->type
) {
1052 printk(KERN_WARNING
"LTTng: bytecode: unknown register type\n");
1057 case REG_STAR_GLOB_STRING
:
1058 printk(KERN_WARNING
"LTTng: bytecode: Cast op can only be applied to numeric or floating point registers\n");
1062 insn
->op
= BYTECODE_OP_CAST_NOP
;
1065 insn
->op
= BYTECODE_OP_CAST_DOUBLE_TO_S64
;
1069 vstack_ax(stack
)->type
= REG_S64
;
1070 next_pc
+= sizeof(struct cast_op
);
1073 case BYTECODE_OP_CAST_DOUBLE_TO_S64
:
1076 vstack_ax(stack
)->type
= REG_S64
;
1077 next_pc
+= sizeof(struct cast_op
);
1080 case BYTECODE_OP_CAST_NOP
:
1082 next_pc
+= sizeof(struct cast_op
);
1087 * Instructions for recursive traversal through composed types.
1089 case BYTECODE_OP_GET_CONTEXT_ROOT
:
1091 if (vstack_push(stack
)) {
1095 vstack_ax(stack
)->type
= REG_PTR
;
1096 vstack_ax(stack
)->load
.type
= LOAD_ROOT_CONTEXT
;
1097 next_pc
+= sizeof(struct load_op
);
1100 case BYTECODE_OP_GET_APP_CONTEXT_ROOT
:
1102 if (vstack_push(stack
)) {
1106 vstack_ax(stack
)->type
= REG_PTR
;
1107 vstack_ax(stack
)->load
.type
= LOAD_ROOT_APP_CONTEXT
;
1108 next_pc
+= sizeof(struct load_op
);
1111 case BYTECODE_OP_GET_PAYLOAD_ROOT
:
1113 if (vstack_push(stack
)) {
1117 vstack_ax(stack
)->type
= REG_PTR
;
1118 vstack_ax(stack
)->load
.type
= LOAD_ROOT_PAYLOAD
;
1119 next_pc
+= sizeof(struct load_op
);
1123 case BYTECODE_OP_LOAD_FIELD
:
1125 struct load_op
*insn
= (struct load_op
*) pc
;
1127 WARN_ON_ONCE(vstack_ax(stack
)->type
!= REG_PTR
);
1129 ret
= specialize_load_field(vstack_ax(stack
), insn
);
1133 next_pc
+= sizeof(struct load_op
);
1137 case BYTECODE_OP_LOAD_FIELD_S8
:
1138 case BYTECODE_OP_LOAD_FIELD_S16
:
1139 case BYTECODE_OP_LOAD_FIELD_S32
:
1140 case BYTECODE_OP_LOAD_FIELD_S64
:
1141 case BYTECODE_OP_LOAD_FIELD_U8
:
1142 case BYTECODE_OP_LOAD_FIELD_U16
:
1143 case BYTECODE_OP_LOAD_FIELD_U32
:
1144 case BYTECODE_OP_LOAD_FIELD_U64
:
1147 vstack_ax(stack
)->type
= REG_S64
;
1148 next_pc
+= sizeof(struct load_op
);
1152 case BYTECODE_OP_LOAD_FIELD_STRING
:
1153 case BYTECODE_OP_LOAD_FIELD_SEQUENCE
:
1156 vstack_ax(stack
)->type
= REG_STRING
;
1157 next_pc
+= sizeof(struct load_op
);
1161 case BYTECODE_OP_LOAD_FIELD_DOUBLE
:
1164 vstack_ax(stack
)->type
= REG_DOUBLE
;
1165 next_pc
+= sizeof(struct load_op
);
1169 case BYTECODE_OP_GET_SYMBOL
:
1171 struct load_op
*insn
= (struct load_op
*) pc
;
1173 dbg_printk("op get symbol\n");
1174 switch (vstack_ax(stack
)->load
.type
) {
1176 printk(KERN_WARNING
"LTTng: bytecode: Nested fields not implemented yet.\n");
1179 case LOAD_ROOT_CONTEXT
:
1180 /* Lookup context field. */
1181 ret
= specialize_context_lookup(ctx
, bytecode
, insn
,
1182 &vstack_ax(stack
)->load
);
1186 case LOAD_ROOT_APP_CONTEXT
:
1189 case LOAD_ROOT_PAYLOAD
:
1190 /* Lookup event payload field. */
1191 ret
= specialize_payload_lookup(event_desc
,
1193 &vstack_ax(stack
)->load
);
1198 next_pc
+= sizeof(struct load_op
) + sizeof(struct get_symbol
);
1202 case BYTECODE_OP_GET_SYMBOL_FIELD
:
1204 /* Always generated by specialize phase. */
1209 case BYTECODE_OP_GET_INDEX_U16
:
1211 struct load_op
*insn
= (struct load_op
*) pc
;
1212 struct get_index_u16
*index
= (struct get_index_u16
*) insn
->data
;
1214 dbg_printk("op get index u16\n");
1216 ret
= specialize_get_index(bytecode
, insn
, index
->index
,
1217 vstack_ax(stack
), sizeof(*index
));
1220 next_pc
+= sizeof(struct load_op
) + sizeof(struct get_index_u16
);
1224 case BYTECODE_OP_GET_INDEX_U64
:
1226 struct load_op
*insn
= (struct load_op
*) pc
;
1227 struct get_index_u64
*index
= (struct get_index_u64
*) insn
->data
;
1229 dbg_printk("op get index u64\n");
1231 ret
= specialize_get_index(bytecode
, insn
, index
->index
,
1232 vstack_ax(stack
), sizeof(*index
));
1235 next_pc
+= sizeof(struct load_op
) + sizeof(struct get_index_u64
);