4b100ab861761abb68993a93ad09f0a50004588d
[lttng-modules.git] / src / lttng-bytecode-interpreter.c
1 /* SPDX-License-Identifier: MIT
2 *
3 * lttng-bytecode-interpreter.c
4 *
5 * LTTng modules bytecode interpreter.
6 *
7 * Copyright (C) 2010-2016 Mathieu Desnoyers <mathieu.desnoyers@efficios.com>
8 */
9
10 #include <wrapper/compiler_attributes.h>
11 #include <wrapper/uaccess.h>
12 #include <wrapper/objtool.h>
13 #include <wrapper/types.h>
14 #include <linux/swab.h>
15
16 #include <lttng/lttng-bytecode.h>
17 #include <lttng/string-utils.h>
18 #include <lttng/events-internal.h>
19
20 /*
21 * get_char should be called with page fault handler disabled if it is expected
22 * to handle user-space read.
23 */
24 static
25 char get_char(const struct estack_entry *reg, size_t offset)
26 {
27 if (unlikely(offset >= reg->u.s.seq_len))
28 return '\0';
29 if (reg->u.s.user) {
30 char c;
31
32 /* Handle invalid access as end of string. */
33 if (unlikely(!lttng_access_ok(VERIFY_READ,
34 reg->u.s.user_str + offset,
35 sizeof(c))))
36 return '\0';
37 /* Handle fault (nonzero return value) as end of string. */
38 if (unlikely(__copy_from_user_inatomic(&c,
39 reg->u.s.user_str + offset,
40 sizeof(c))))
41 return '\0';
42 return c;
43 } else {
44 return reg->u.s.str[offset];
45 }
46 }
47
48 /*
49 * -1: wildcard found.
50 * -2: unknown escape char.
51 * 0: normal char.
52 */
53 static
54 int parse_char(struct estack_entry *reg, char *c, size_t *offset)
55 {
56 switch (*c) {
57 case '\\':
58 (*offset)++;
59 *c = get_char(reg, *offset);
60 switch (*c) {
61 case '\\':
62 case '*':
63 return 0;
64 default:
65 return -2;
66 }
67 case '*':
68 return -1;
69 default:
70 return 0;
71 }
72 }
73
74 static
75 char get_char_at_cb(size_t at, void *data)
76 {
77 return get_char(data, at);
78 }
79
80 static
81 int stack_star_glob_match(struct estack *stack, int top, const char *cmp_type)
82 {
83 bool has_user = false;
84 int result;
85 struct estack_entry *pattern_reg;
86 struct estack_entry *candidate_reg;
87
88 /* Disable the page fault handler when reading from userspace. */
89 if (estack_bx(stack, top)->u.s.user
90 || estack_ax(stack, top)->u.s.user) {
91 has_user = true;
92 pagefault_disable();
93 }
94
95 /* Find out which side is the pattern vs. the candidate. */
96 if (estack_ax(stack, top)->u.s.literal_type == ESTACK_STRING_LITERAL_TYPE_STAR_GLOB) {
97 pattern_reg = estack_ax(stack, top);
98 candidate_reg = estack_bx(stack, top);
99 } else {
100 pattern_reg = estack_bx(stack, top);
101 candidate_reg = estack_ax(stack, top);
102 }
103
104 /* Perform the match operation. */
105 result = !strutils_star_glob_match_char_cb(get_char_at_cb,
106 pattern_reg, get_char_at_cb, candidate_reg);
107 if (has_user)
108 pagefault_enable();
109
110 return result;
111 }
112
113 static
114 int stack_strcmp(struct estack *stack, int top, const char *cmp_type)
115 {
116 size_t offset_bx = 0, offset_ax = 0;
117 int diff, has_user = 0;
118
119 if (estack_bx(stack, top)->u.s.user
120 || estack_ax(stack, top)->u.s.user) {
121 has_user = 1;
122 pagefault_disable();
123 }
124
125 for (;;) {
126 int ret;
127 int escaped_r0 = 0;
128 char char_bx, char_ax;
129
130 char_bx = get_char(estack_bx(stack, top), offset_bx);
131 char_ax = get_char(estack_ax(stack, top), offset_ax);
132
133 if (unlikely(char_bx == '\0')) {
134 if (char_ax == '\0') {
135 diff = 0;
136 break;
137 } else {
138 if (estack_ax(stack, top)->u.s.literal_type ==
139 ESTACK_STRING_LITERAL_TYPE_PLAIN) {
140 ret = parse_char(estack_ax(stack, top),
141 &char_ax, &offset_ax);
142 if (ret == -1) {
143 diff = 0;
144 break;
145 }
146 }
147 diff = -1;
148 break;
149 }
150 }
151 if (unlikely(char_ax == '\0')) {
152 if (estack_bx(stack, top)->u.s.literal_type ==
153 ESTACK_STRING_LITERAL_TYPE_PLAIN) {
154 ret = parse_char(estack_bx(stack, top),
155 &char_bx, &offset_bx);
156 if (ret == -1) {
157 diff = 0;
158 break;
159 }
160 }
161 diff = 1;
162 break;
163 }
164 if (estack_bx(stack, top)->u.s.literal_type ==
165 ESTACK_STRING_LITERAL_TYPE_PLAIN) {
166 ret = parse_char(estack_bx(stack, top),
167 &char_bx, &offset_bx);
168 if (ret == -1) {
169 diff = 0;
170 break;
171 } else if (ret == -2) {
172 escaped_r0 = 1;
173 }
174 /* else compare both char */
175 }
176 if (estack_ax(stack, top)->u.s.literal_type ==
177 ESTACK_STRING_LITERAL_TYPE_PLAIN) {
178 ret = parse_char(estack_ax(stack, top),
179 &char_ax, &offset_ax);
180 if (ret == -1) {
181 diff = 0;
182 break;
183 } else if (ret == -2) {
184 if (!escaped_r0) {
185 diff = -1;
186 break;
187 }
188 } else {
189 if (escaped_r0) {
190 diff = 1;
191 break;
192 }
193 }
194 } else {
195 if (escaped_r0) {
196 diff = 1;
197 break;
198 }
199 }
200 diff = char_bx - char_ax;
201 if (diff != 0)
202 break;
203 offset_bx++;
204 offset_ax++;
205 }
206 if (has_user)
207 pagefault_enable();
208
209 return diff;
210 }
211
212 int lttng_bytecode_interpret_error(
213 struct lttng_kernel_bytecode_runtime *bytecode_runtime __attribute__((unused)),
214 const char *stack_data __attribute__((unused)),
215 struct lttng_kernel_probe_ctx *probe_ctx __attribute__((unused)),
216 void *ctx __attribute__((unused)))
217 {
218 return LTTNG_KERNEL_BYTECODE_INTERPRETER_ERROR;
219 }
220
221 #ifdef INTERPRETER_USE_SWITCH
222
223 /*
224 * Fallback for compilers that do not support taking address of labels.
225 */
226
227 #define START_OP \
228 start_pc = &bytecode->data[0]; \
229 for (pc = next_pc = start_pc; pc - start_pc < bytecode->len; \
230 pc = next_pc) { \
231 dbg_printk("LTTng: Executing op %s (%u)\n", \
232 lttng_bytecode_print_op((unsigned int) *(bytecode_opcode_t *) pc), \
233 (unsigned int) *(bytecode_opcode_t *) pc); \
234 switch (*(bytecode_opcode_t *) pc) {
235
236 #define OP(name) case name
237
238 #define PO break
239
240 #define END_OP } \
241 }
242
243 #else
244
245 /*
246 * Dispatch-table based interpreter.
247 */
248
249 #define START_OP \
250 start_pc = &bytecode->code[0]; \
251 pc = next_pc = start_pc; \
252 if (unlikely(pc - start_pc >= bytecode->len)) \
253 goto end; \
254 goto *dispatch[*(bytecode_opcode_t *) pc];
255
256 #define OP(name) \
257 LABEL_##name
258
259 #define PO \
260 pc = next_pc; \
261 goto *dispatch[*(bytecode_opcode_t *) pc];
262
263 #define END_OP
264
265 #endif
266
267 #define IS_INTEGER_REGISTER(reg_type) \
268 (reg_type == REG_S64 || reg_type == REG_U64)
269
270 static int context_get_index(struct lttng_kernel_probe_ctx *lttng_probe_ctx,
271 struct load_ptr *ptr,
272 uint32_t idx)
273 {
274
275 struct lttng_kernel_ctx_field *ctx_field;
276 const struct lttng_kernel_event_field *field;
277 struct lttng_ctx_value v;
278
279 ctx_field = &lttng_static_ctx->fields[idx];
280 field = ctx_field->event_field;
281 ptr->type = LOAD_OBJECT;
282 /* field is only used for types nested within variants. */
283 ptr->field = NULL;
284
285 switch (field->type->type) {
286 case lttng_kernel_type_integer:
287 ctx_field->get_value(ctx_field->priv, lttng_probe_ctx, &v);
288 if (lttng_kernel_get_type_integer(field->type)->signedness) {
289 ptr->object_type = OBJECT_TYPE_S64;
290 ptr->u.s64 = v.u.s64;
291 ptr->ptr = &ptr->u.s64;
292 } else {
293 ptr->object_type = OBJECT_TYPE_U64;
294 ptr->u.u64 = v.u.s64; /* Cast. */
295 ptr->ptr = &ptr->u.u64;
296 }
297 ptr->rev_bo = lttng_kernel_get_type_integer(field->type)->reverse_byte_order;
298 break;
299 case lttng_kernel_type_enum:
300 {
301 const struct lttng_kernel_type_enum *enum_type = lttng_kernel_get_type_enum(field->type);
302 const struct lttng_kernel_type_integer *integer_type = lttng_kernel_get_type_integer(enum_type->container_type);
303
304 ctx_field->get_value(ctx_field->priv, lttng_probe_ctx, &v);
305 if (integer_type->signedness) {
306 ptr->object_type = OBJECT_TYPE_SIGNED_ENUM;
307 ptr->u.s64 = v.u.s64;
308 ptr->ptr = &ptr->u.s64;
309 } else {
310 ptr->object_type = OBJECT_TYPE_UNSIGNED_ENUM;
311 ptr->u.u64 = v.u.s64; /* Cast. */
312 ptr->ptr = &ptr->u.u64;
313 }
314 ptr->rev_bo = integer_type->reverse_byte_order;
315 break;
316 }
317 case lttng_kernel_type_array:
318 {
319 const struct lttng_kernel_type_array *array_type = lttng_kernel_get_type_array(field->type);
320
321 if (!lttng_kernel_type_is_bytewise_integer(array_type->elem_type)) {
322 printk(KERN_WARNING "LTTng: bytecode: Array nesting only supports integer types.\n");
323 return -EINVAL;
324 }
325 if (array_type->encoding == lttng_kernel_string_encoding_none) {
326 printk(KERN_WARNING "LTTng: bytecode: Only string arrays are supported for contexts.\n");
327 return -EINVAL;
328 }
329 ptr->object_type = OBJECT_TYPE_STRING;
330 ctx_field->get_value(ctx_field->priv, lttng_probe_ctx, &v);
331 ptr->ptr = v.u.str;
332 break;
333 }
334 case lttng_kernel_type_sequence:
335 {
336 const struct lttng_kernel_type_sequence *sequence_type = lttng_kernel_get_type_sequence(field->type);
337
338 if (!lttng_kernel_type_is_bytewise_integer(sequence_type->elem_type)) {
339 printk(KERN_WARNING "LTTng: bytecode: Sequence nesting only supports integer types.\n");
340 return -EINVAL;
341 }
342 if (sequence_type->encoding == lttng_kernel_string_encoding_none) {
343 printk(KERN_WARNING "LTTng: bytecode: Only string sequences are supported for contexts.\n");
344 return -EINVAL;
345 }
346 ptr->object_type = OBJECT_TYPE_STRING;
347 ctx_field->get_value(ctx_field->priv, lttng_probe_ctx, &v);
348 ptr->ptr = v.u.str;
349 break;
350 }
351 case lttng_kernel_type_string:
352 ptr->object_type = OBJECT_TYPE_STRING;
353 ctx_field->get_value(ctx_field->priv, lttng_probe_ctx, &v);
354 ptr->ptr = v.u.str;
355 break;
356 case lttng_kernel_type_struct:
357 printk(KERN_WARNING "LTTng: bytecode: Structure type cannot be loaded.\n");
358 return -EINVAL;
359 case lttng_kernel_type_variant:
360 printk(KERN_WARNING "LTTng: bytecode: Variant type cannot be loaded.\n");
361 return -EINVAL;
362 default:
363 printk(KERN_WARNING "LTTng: bytecode: Unknown type: %d", (int) field->type->type);
364 return -EINVAL;
365 }
366 return 0;
367 }
368
369 static int dynamic_get_index(struct lttng_kernel_probe_ctx *lttng_probe_ctx,
370 struct bytecode_runtime *runtime,
371 uint64_t index, struct estack_entry *stack_top)
372 {
373 int ret;
374 const struct bytecode_get_index_data *gid;
375
376 gid = (const struct bytecode_get_index_data *) &runtime->data[index];
377 switch (stack_top->u.ptr.type) {
378 case LOAD_OBJECT:
379 switch (stack_top->u.ptr.object_type) {
380 case OBJECT_TYPE_ARRAY:
381 {
382 const char *ptr;
383
384 WARN_ON_ONCE(gid->offset >= gid->array_len);
385 /* Skip count (unsigned long) */
386 ptr = *(const char **) (stack_top->u.ptr.ptr + sizeof(unsigned long));
387 ptr = ptr + gid->offset;
388 stack_top->u.ptr.ptr = ptr;
389 stack_top->u.ptr.object_type = gid->elem.type;
390 stack_top->u.ptr.rev_bo = gid->elem.rev_bo;
391 BUG_ON(stack_top->u.ptr.field->type->type != lttng_kernel_type_array);
392 stack_top->u.ptr.field = NULL;
393 break;
394 }
395 case OBJECT_TYPE_SEQUENCE:
396 {
397 const char *ptr;
398 size_t ptr_seq_len;
399
400 ptr = *(const char **) (stack_top->u.ptr.ptr + sizeof(unsigned long));
401 ptr_seq_len = *(unsigned long *) stack_top->u.ptr.ptr;
402 if (gid->offset >= gid->elem.len * ptr_seq_len) {
403 ret = -EINVAL;
404 goto end;
405 }
406 ptr = ptr + gid->offset;
407 stack_top->u.ptr.ptr = ptr;
408 stack_top->u.ptr.object_type = gid->elem.type;
409 stack_top->u.ptr.rev_bo = gid->elem.rev_bo;
410 BUG_ON(stack_top->u.ptr.field->type->type != lttng_kernel_type_sequence);
411 stack_top->u.ptr.field = NULL;
412 break;
413 }
414 case OBJECT_TYPE_STRUCT:
415 printk(KERN_WARNING "LTTng: bytecode: Nested structures are not supported yet.\n");
416 ret = -EINVAL;
417 goto end;
418 case OBJECT_TYPE_VARIANT:
419 default:
420 printk(KERN_WARNING "LTTng: bytecode: Unexpected get index type %d",
421 (int) stack_top->u.ptr.object_type);
422 ret = -EINVAL;
423 goto end;
424 }
425 break;
426 case LOAD_ROOT_CONTEXT:
427 lttng_fallthrough;
428 case LOAD_ROOT_APP_CONTEXT:
429 {
430 ret = context_get_index(lttng_probe_ctx,
431 &stack_top->u.ptr,
432 gid->ctx_index);
433 if (ret) {
434 goto end;
435 }
436 break;
437 }
438 case LOAD_ROOT_PAYLOAD:
439 stack_top->u.ptr.ptr += gid->offset;
440 if (gid->elem.type == OBJECT_TYPE_STRING)
441 stack_top->u.ptr.ptr = *(const char * const *) stack_top->u.ptr.ptr;
442 stack_top->u.ptr.object_type = gid->elem.type;
443 stack_top->u.ptr.type = LOAD_OBJECT;
444 stack_top->u.ptr.field = gid->field;
445 stack_top->u.ptr.rev_bo = gid->elem.rev_bo;
446 break;
447 }
448
449 stack_top->type = REG_PTR;
450
451 return 0;
452
453 end:
454 return ret;
455 }
456
457 static int dynamic_load_field(struct estack_entry *stack_top)
458 {
459 int ret;
460
461 switch (stack_top->u.ptr.type) {
462 case LOAD_OBJECT:
463 break;
464 case LOAD_ROOT_CONTEXT:
465 case LOAD_ROOT_APP_CONTEXT:
466 case LOAD_ROOT_PAYLOAD:
467 default:
468 dbg_printk("Bytecode warning: cannot load root, missing field name.\n");
469 ret = -EINVAL;
470 goto end;
471 }
472 switch (stack_top->u.ptr.object_type) {
473 case OBJECT_TYPE_S8:
474 dbg_printk("op load field s8\n");
475 stack_top->u.v = *(int8_t *) stack_top->u.ptr.ptr;
476 stack_top->type = REG_S64;
477 break;
478 case OBJECT_TYPE_S16:
479 {
480 int16_t tmp;
481
482 dbg_printk("op load field s16\n");
483 tmp = *(int16_t *) stack_top->u.ptr.ptr;
484 if (stack_top->u.ptr.rev_bo)
485 __swab16s(&tmp);
486 stack_top->u.v = tmp;
487 stack_top->type = REG_S64;
488 break;
489 }
490 case OBJECT_TYPE_S32:
491 {
492 int32_t tmp;
493
494 dbg_printk("op load field s32\n");
495 tmp = *(int32_t *) stack_top->u.ptr.ptr;
496 if (stack_top->u.ptr.rev_bo)
497 __swab32s(&tmp);
498 stack_top->u.v = tmp;
499 stack_top->type = REG_S64;
500 break;
501 }
502 case OBJECT_TYPE_S64:
503 {
504 int64_t tmp;
505
506 dbg_printk("op load field s64\n");
507 tmp = *(int64_t *) stack_top->u.ptr.ptr;
508 if (stack_top->u.ptr.rev_bo)
509 __swab64s(&tmp);
510 stack_top->u.v = tmp;
511 stack_top->type = REG_S64;
512 break;
513 }
514 case OBJECT_TYPE_SIGNED_ENUM:
515 {
516 int64_t tmp;
517
518 dbg_printk("op load field signed enumeration\n");
519 tmp = *(int64_t *) stack_top->u.ptr.ptr;
520 if (stack_top->u.ptr.rev_bo)
521 __swab64s(&tmp);
522 stack_top->u.v = tmp;
523 stack_top->type = REG_S64;
524 break;
525 }
526 case OBJECT_TYPE_U8:
527 dbg_printk("op load field u8\n");
528 stack_top->u.v = *(uint8_t *) stack_top->u.ptr.ptr;
529 stack_top->type = REG_U64;
530 break;
531 case OBJECT_TYPE_U16:
532 {
533 uint16_t tmp;
534
535 dbg_printk("op load field u16\n");
536 tmp = *(uint16_t *) stack_top->u.ptr.ptr;
537 if (stack_top->u.ptr.rev_bo)
538 __swab16s(&tmp);
539 stack_top->u.v = tmp;
540 stack_top->type = REG_U64;
541 break;
542 }
543 case OBJECT_TYPE_U32:
544 {
545 uint32_t tmp;
546
547 dbg_printk("op load field u32\n");
548 tmp = *(uint32_t *) stack_top->u.ptr.ptr;
549 if (stack_top->u.ptr.rev_bo)
550 __swab32s(&tmp);
551 stack_top->u.v = tmp;
552 stack_top->type = REG_U64;
553 break;
554 }
555 case OBJECT_TYPE_U64:
556 {
557 uint64_t tmp;
558
559 dbg_printk("op load field u64\n");
560 tmp = *(uint64_t *) stack_top->u.ptr.ptr;
561 if (stack_top->u.ptr.rev_bo)
562 __swab64s(&tmp);
563 stack_top->u.v = tmp;
564 stack_top->type = REG_U64;
565 break;
566 }
567 case OBJECT_TYPE_UNSIGNED_ENUM:
568 {
569 uint64_t tmp;
570
571 dbg_printk("op load field unsigned enumeration\n");
572 tmp = *(uint64_t *) stack_top->u.ptr.ptr;
573 if (stack_top->u.ptr.rev_bo)
574 __swab64s(&tmp);
575 stack_top->u.v = tmp;
576 stack_top->type = REG_U64;
577 break;
578 }
579 case OBJECT_TYPE_STRING:
580 {
581 const char *str;
582
583 dbg_printk("op load field string\n");
584 str = (const char *) stack_top->u.ptr.ptr;
585 stack_top->u.s.str = str;
586 if (unlikely(!stack_top->u.s.str)) {
587 dbg_printk("Bytecode warning: loading a NULL string.\n");
588 ret = -EINVAL;
589 goto end;
590 }
591 stack_top->u.s.seq_len = LTTNG_SIZE_MAX;
592 stack_top->u.s.literal_type =
593 ESTACK_STRING_LITERAL_TYPE_NONE;
594 stack_top->type = REG_STRING;
595 break;
596 }
597 case OBJECT_TYPE_STRING_SEQUENCE:
598 {
599 const char *ptr;
600
601 dbg_printk("op load field string sequence\n");
602 ptr = stack_top->u.ptr.ptr;
603 stack_top->u.s.seq_len = *(unsigned long *) ptr;
604 stack_top->u.s.str = *(const char **) (ptr + sizeof(unsigned long));
605 if (unlikely(!stack_top->u.s.str)) {
606 dbg_printk("Bytecode warning: loading a NULL sequence.\n");
607 ret = -EINVAL;
608 goto end;
609 }
610 stack_top->u.s.literal_type =
611 ESTACK_STRING_LITERAL_TYPE_NONE;
612 stack_top->type = REG_STRING;
613 break;
614 }
615 case OBJECT_TYPE_DYNAMIC:
616 /*
617 * Dynamic types in context are looked up
618 * by context get index.
619 */
620 ret = -EINVAL;
621 goto end;
622 case OBJECT_TYPE_DOUBLE:
623 ret = -EINVAL;
624 goto end;
625 case OBJECT_TYPE_SEQUENCE:
626 case OBJECT_TYPE_ARRAY:
627 case OBJECT_TYPE_STRUCT:
628 case OBJECT_TYPE_VARIANT:
629 printk(KERN_WARNING "LTTng: bytecode: Sequences, arrays, struct and variant cannot be loaded (nested types).\n");
630 ret = -EINVAL;
631 goto end;
632 }
633 return 0;
634
635 end:
636 return ret;
637 }
638
639 static
640 int lttng_bytecode_interpret_format_output(struct estack_entry *ax,
641 struct lttng_interpreter_output *output)
642 {
643 int ret;
644
645 again:
646 switch (ax->type) {
647 case REG_S64:
648 output->type = LTTNG_INTERPRETER_TYPE_S64;
649 output->u.s = ax->u.v;
650 break;
651 case REG_U64:
652 output->type = LTTNG_INTERPRETER_TYPE_U64;
653 output->u.u = (uint64_t) ax->u.v;
654 break;
655 case REG_STRING:
656 output->type = LTTNG_INTERPRETER_TYPE_STRING;
657 output->u.str.str = ax->u.s.str;
658 output->u.str.len = ax->u.s.seq_len;
659 break;
660 case REG_PTR:
661 switch (ax->u.ptr.object_type) {
662 case OBJECT_TYPE_S8:
663 case OBJECT_TYPE_S16:
664 case OBJECT_TYPE_S32:
665 case OBJECT_TYPE_S64:
666 case OBJECT_TYPE_U8:
667 case OBJECT_TYPE_U16:
668 case OBJECT_TYPE_U32:
669 case OBJECT_TYPE_U64:
670 case OBJECT_TYPE_DOUBLE:
671 case OBJECT_TYPE_STRING:
672 case OBJECT_TYPE_STRING_SEQUENCE:
673 ret = dynamic_load_field(ax);
674 if (ret)
675 return ret;
676 /* Retry after loading ptr into stack top. */
677 goto again;
678 case OBJECT_TYPE_SEQUENCE:
679 output->type = LTTNG_INTERPRETER_TYPE_SEQUENCE;
680 output->u.sequence.ptr = *(const char **) (ax->u.ptr.ptr + sizeof(unsigned long));
681 output->u.sequence.nr_elem = *(unsigned long *) ax->u.ptr.ptr;
682 output->u.sequence.nested_type = lttng_kernel_get_type_sequence(ax->u.ptr.field->type)->elem_type;
683 break;
684 case OBJECT_TYPE_ARRAY:
685 /* Skip count (unsigned long) */
686 output->type = LTTNG_INTERPRETER_TYPE_SEQUENCE;
687 output->u.sequence.ptr = *(const char **) (ax->u.ptr.ptr + sizeof(unsigned long));
688 output->u.sequence.nr_elem = lttng_kernel_get_type_array(ax->u.ptr.field->type)->length;
689 output->u.sequence.nested_type = lttng_kernel_get_type_array(ax->u.ptr.field->type)->elem_type;
690 break;
691 case OBJECT_TYPE_SIGNED_ENUM:
692 ret = dynamic_load_field(ax);
693 if (ret)
694 return ret;
695 output->type = LTTNG_INTERPRETER_TYPE_SIGNED_ENUM;
696 output->u.s = ax->u.v;
697 break;
698 case OBJECT_TYPE_UNSIGNED_ENUM:
699 ret = dynamic_load_field(ax);
700 if (ret)
701 return ret;
702 output->type = LTTNG_INTERPRETER_TYPE_UNSIGNED_ENUM;
703 output->u.u = ax->u.v;
704 break;
705 case OBJECT_TYPE_STRUCT:
706 case OBJECT_TYPE_VARIANT:
707 default:
708 return -EINVAL;
709 }
710
711 break;
712 case REG_STAR_GLOB_STRING:
713 case REG_TYPE_UNKNOWN:
714 default:
715 return -EINVAL;
716 }
717
718 return 0;
719 }
720
721 #ifdef DEBUG
722
723 #define DBG_USER_STR_CUTOFF 32
724
725 /*
726 * In debug mode, print user string (truncated, if necessary).
727 */
728 static inline
729 void dbg_load_ref_user_str_printk(const struct estack_entry *user_str_reg)
730 {
731 size_t pos = 0;
732 char last_char;
733 char user_str[DBG_USER_STR_CUTOFF];
734
735 pagefault_disable();
736 do {
737 last_char = get_char(user_str_reg, pos);
738 user_str[pos] = last_char;
739 pos++;
740 } while (last_char != '\0' && pos < sizeof(user_str));
741 pagefault_enable();
742
743 user_str[sizeof(user_str) - 1] = '\0';
744 dbg_printk("load field ref user string: '%s%s'\n", user_str,
745 last_char != '\0' ? "[...]" : "");
746 }
747 #else
748 static inline
749 void dbg_load_ref_user_str_printk(const struct estack_entry *user_str_reg)
750 {
751 }
752 #endif
753
754 /*
755 * Return LTTNG_KERNEL_BYTECODE_INTERPRETER_OK on success.
756 * Return LTTNG_KERNEL_BYTECODE_INTERPRETER_ERROR on error.
757 *
758 * For FILTER bytecode: expect a struct lttng_kernel_bytecode_filter_ctx *
759 * as @ctx argument.
760 * For CAPTURE bytecode: expect a struct lttng_interpreter_output *
761 * as @ctx argument.
762 */
763 int lttng_bytecode_interpret(struct lttng_kernel_bytecode_runtime *kernel_bytecode,
764 const char *interpreter_stack_data,
765 struct lttng_kernel_probe_ctx *lttng_probe_ctx,
766 void *caller_ctx)
767 {
768 struct bytecode_runtime *bytecode = container_of(kernel_bytecode, struct bytecode_runtime, p);
769 void *pc, *next_pc, *start_pc;
770 int ret = -EINVAL;
771 uint64_t retval = 0;
772 struct estack _stack;
773 struct estack *stack = &_stack;
774 register int64_t ax = 0, bx = 0;
775 register enum entry_type ax_t = REG_TYPE_UNKNOWN, bx_t = REG_TYPE_UNKNOWN;
776 register int top = INTERPRETER_STACK_EMPTY;
777 #ifndef INTERPRETER_USE_SWITCH
778 static void *dispatch[NR_BYTECODE_OPS] = {
779 [ BYTECODE_OP_UNKNOWN ] = &&LABEL_BYTECODE_OP_UNKNOWN,
780
781 [ BYTECODE_OP_RETURN ] = &&LABEL_BYTECODE_OP_RETURN,
782
783 /* binary */
784 [ BYTECODE_OP_MUL ] = &&LABEL_BYTECODE_OP_MUL,
785 [ BYTECODE_OP_DIV ] = &&LABEL_BYTECODE_OP_DIV,
786 [ BYTECODE_OP_MOD ] = &&LABEL_BYTECODE_OP_MOD,
787 [ BYTECODE_OP_PLUS ] = &&LABEL_BYTECODE_OP_PLUS,
788 [ BYTECODE_OP_MINUS ] = &&LABEL_BYTECODE_OP_MINUS,
789 [ BYTECODE_OP_BIT_RSHIFT ] = &&LABEL_BYTECODE_OP_BIT_RSHIFT,
790 [ BYTECODE_OP_BIT_LSHIFT ] = &&LABEL_BYTECODE_OP_BIT_LSHIFT,
791 [ BYTECODE_OP_BIT_AND ] = &&LABEL_BYTECODE_OP_BIT_AND,
792 [ BYTECODE_OP_BIT_OR ] = &&LABEL_BYTECODE_OP_BIT_OR,
793 [ BYTECODE_OP_BIT_XOR ] = &&LABEL_BYTECODE_OP_BIT_XOR,
794
795 /* binary comparators */
796 [ BYTECODE_OP_EQ ] = &&LABEL_BYTECODE_OP_EQ,
797 [ BYTECODE_OP_NE ] = &&LABEL_BYTECODE_OP_NE,
798 [ BYTECODE_OP_GT ] = &&LABEL_BYTECODE_OP_GT,
799 [ BYTECODE_OP_LT ] = &&LABEL_BYTECODE_OP_LT,
800 [ BYTECODE_OP_GE ] = &&LABEL_BYTECODE_OP_GE,
801 [ BYTECODE_OP_LE ] = &&LABEL_BYTECODE_OP_LE,
802
803 /* string binary comparator */
804 [ BYTECODE_OP_EQ_STRING ] = &&LABEL_BYTECODE_OP_EQ_STRING,
805 [ BYTECODE_OP_NE_STRING ] = &&LABEL_BYTECODE_OP_NE_STRING,
806 [ BYTECODE_OP_GT_STRING ] = &&LABEL_BYTECODE_OP_GT_STRING,
807 [ BYTECODE_OP_LT_STRING ] = &&LABEL_BYTECODE_OP_LT_STRING,
808 [ BYTECODE_OP_GE_STRING ] = &&LABEL_BYTECODE_OP_GE_STRING,
809 [ BYTECODE_OP_LE_STRING ] = &&LABEL_BYTECODE_OP_LE_STRING,
810
811 /* globbing pattern binary comparator */
812 [ BYTECODE_OP_EQ_STAR_GLOB_STRING ] = &&LABEL_BYTECODE_OP_EQ_STAR_GLOB_STRING,
813 [ BYTECODE_OP_NE_STAR_GLOB_STRING ] = &&LABEL_BYTECODE_OP_NE_STAR_GLOB_STRING,
814
815 /* s64 binary comparator */
816 [ BYTECODE_OP_EQ_S64 ] = &&LABEL_BYTECODE_OP_EQ_S64,
817 [ BYTECODE_OP_NE_S64 ] = &&LABEL_BYTECODE_OP_NE_S64,
818 [ BYTECODE_OP_GT_S64 ] = &&LABEL_BYTECODE_OP_GT_S64,
819 [ BYTECODE_OP_LT_S64 ] = &&LABEL_BYTECODE_OP_LT_S64,
820 [ BYTECODE_OP_GE_S64 ] = &&LABEL_BYTECODE_OP_GE_S64,
821 [ BYTECODE_OP_LE_S64 ] = &&LABEL_BYTECODE_OP_LE_S64,
822
823 /* double binary comparator */
824 [ BYTECODE_OP_EQ_DOUBLE ] = &&LABEL_BYTECODE_OP_EQ_DOUBLE,
825 [ BYTECODE_OP_NE_DOUBLE ] = &&LABEL_BYTECODE_OP_NE_DOUBLE,
826 [ BYTECODE_OP_GT_DOUBLE ] = &&LABEL_BYTECODE_OP_GT_DOUBLE,
827 [ BYTECODE_OP_LT_DOUBLE ] = &&LABEL_BYTECODE_OP_LT_DOUBLE,
828 [ BYTECODE_OP_GE_DOUBLE ] = &&LABEL_BYTECODE_OP_GE_DOUBLE,
829 [ BYTECODE_OP_LE_DOUBLE ] = &&LABEL_BYTECODE_OP_LE_DOUBLE,
830
831 /* Mixed S64-double binary comparators */
832 [ BYTECODE_OP_EQ_DOUBLE_S64 ] = &&LABEL_BYTECODE_OP_EQ_DOUBLE_S64,
833 [ BYTECODE_OP_NE_DOUBLE_S64 ] = &&LABEL_BYTECODE_OP_NE_DOUBLE_S64,
834 [ BYTECODE_OP_GT_DOUBLE_S64 ] = &&LABEL_BYTECODE_OP_GT_DOUBLE_S64,
835 [ BYTECODE_OP_LT_DOUBLE_S64 ] = &&LABEL_BYTECODE_OP_LT_DOUBLE_S64,
836 [ BYTECODE_OP_GE_DOUBLE_S64 ] = &&LABEL_BYTECODE_OP_GE_DOUBLE_S64,
837 [ BYTECODE_OP_LE_DOUBLE_S64 ] = &&LABEL_BYTECODE_OP_LE_DOUBLE_S64,
838
839 [ BYTECODE_OP_EQ_S64_DOUBLE ] = &&LABEL_BYTECODE_OP_EQ_S64_DOUBLE,
840 [ BYTECODE_OP_NE_S64_DOUBLE ] = &&LABEL_BYTECODE_OP_NE_S64_DOUBLE,
841 [ BYTECODE_OP_GT_S64_DOUBLE ] = &&LABEL_BYTECODE_OP_GT_S64_DOUBLE,
842 [ BYTECODE_OP_LT_S64_DOUBLE ] = &&LABEL_BYTECODE_OP_LT_S64_DOUBLE,
843 [ BYTECODE_OP_GE_S64_DOUBLE ] = &&LABEL_BYTECODE_OP_GE_S64_DOUBLE,
844 [ BYTECODE_OP_LE_S64_DOUBLE ] = &&LABEL_BYTECODE_OP_LE_S64_DOUBLE,
845
846 /* unary */
847 [ BYTECODE_OP_UNARY_PLUS ] = &&LABEL_BYTECODE_OP_UNARY_PLUS,
848 [ BYTECODE_OP_UNARY_MINUS ] = &&LABEL_BYTECODE_OP_UNARY_MINUS,
849 [ BYTECODE_OP_UNARY_NOT ] = &&LABEL_BYTECODE_OP_UNARY_NOT,
850 [ BYTECODE_OP_UNARY_PLUS_S64 ] = &&LABEL_BYTECODE_OP_UNARY_PLUS_S64,
851 [ BYTECODE_OP_UNARY_MINUS_S64 ] = &&LABEL_BYTECODE_OP_UNARY_MINUS_S64,
852 [ BYTECODE_OP_UNARY_NOT_S64 ] = &&LABEL_BYTECODE_OP_UNARY_NOT_S64,
853 [ BYTECODE_OP_UNARY_PLUS_DOUBLE ] = &&LABEL_BYTECODE_OP_UNARY_PLUS_DOUBLE,
854 [ BYTECODE_OP_UNARY_MINUS_DOUBLE ] = &&LABEL_BYTECODE_OP_UNARY_MINUS_DOUBLE,
855 [ BYTECODE_OP_UNARY_NOT_DOUBLE ] = &&LABEL_BYTECODE_OP_UNARY_NOT_DOUBLE,
856
857 /* logical */
858 [ BYTECODE_OP_AND ] = &&LABEL_BYTECODE_OP_AND,
859 [ BYTECODE_OP_OR ] = &&LABEL_BYTECODE_OP_OR,
860
861 /* load field ref */
862 [ BYTECODE_OP_LOAD_FIELD_REF ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_REF,
863 [ BYTECODE_OP_LOAD_FIELD_REF_STRING ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_REF_STRING,
864 [ BYTECODE_OP_LOAD_FIELD_REF_SEQUENCE ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_REF_SEQUENCE,
865 [ BYTECODE_OP_LOAD_FIELD_REF_S64 ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_REF_S64,
866 [ BYTECODE_OP_LOAD_FIELD_REF_DOUBLE ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_REF_DOUBLE,
867
868 /* load from immediate operand */
869 [ BYTECODE_OP_LOAD_STRING ] = &&LABEL_BYTECODE_OP_LOAD_STRING,
870 [ BYTECODE_OP_LOAD_STAR_GLOB_STRING ] = &&LABEL_BYTECODE_OP_LOAD_STAR_GLOB_STRING,
871 [ BYTECODE_OP_LOAD_S64 ] = &&LABEL_BYTECODE_OP_LOAD_S64,
872 [ BYTECODE_OP_LOAD_DOUBLE ] = &&LABEL_BYTECODE_OP_LOAD_DOUBLE,
873
874 /* cast */
875 [ BYTECODE_OP_CAST_TO_S64 ] = &&LABEL_BYTECODE_OP_CAST_TO_S64,
876 [ BYTECODE_OP_CAST_DOUBLE_TO_S64 ] = &&LABEL_BYTECODE_OP_CAST_DOUBLE_TO_S64,
877 [ BYTECODE_OP_CAST_NOP ] = &&LABEL_BYTECODE_OP_CAST_NOP,
878
879 /* get context ref */
880 [ BYTECODE_OP_GET_CONTEXT_REF ] = &&LABEL_BYTECODE_OP_GET_CONTEXT_REF,
881 [ BYTECODE_OP_GET_CONTEXT_REF_STRING ] = &&LABEL_BYTECODE_OP_GET_CONTEXT_REF_STRING,
882 [ BYTECODE_OP_GET_CONTEXT_REF_S64 ] = &&LABEL_BYTECODE_OP_GET_CONTEXT_REF_S64,
883 [ BYTECODE_OP_GET_CONTEXT_REF_DOUBLE ] = &&LABEL_BYTECODE_OP_GET_CONTEXT_REF_DOUBLE,
884
885 /* load userspace field ref */
886 [ BYTECODE_OP_LOAD_FIELD_REF_USER_STRING ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_REF_USER_STRING,
887 [ BYTECODE_OP_LOAD_FIELD_REF_USER_SEQUENCE ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_REF_USER_SEQUENCE,
888
889 /* Instructions for recursive traversal through composed types. */
890 [ BYTECODE_OP_GET_CONTEXT_ROOT ] = &&LABEL_BYTECODE_OP_GET_CONTEXT_ROOT,
891 [ BYTECODE_OP_GET_APP_CONTEXT_ROOT ] = &&LABEL_BYTECODE_OP_GET_APP_CONTEXT_ROOT,
892 [ BYTECODE_OP_GET_PAYLOAD_ROOT ] = &&LABEL_BYTECODE_OP_GET_PAYLOAD_ROOT,
893
894 [ BYTECODE_OP_GET_SYMBOL ] = &&LABEL_BYTECODE_OP_GET_SYMBOL,
895 [ BYTECODE_OP_GET_SYMBOL_FIELD ] = &&LABEL_BYTECODE_OP_GET_SYMBOL_FIELD,
896 [ BYTECODE_OP_GET_INDEX_U16 ] = &&LABEL_BYTECODE_OP_GET_INDEX_U16,
897 [ BYTECODE_OP_GET_INDEX_U64 ] = &&LABEL_BYTECODE_OP_GET_INDEX_U64,
898
899 [ BYTECODE_OP_LOAD_FIELD ] = &&LABEL_BYTECODE_OP_LOAD_FIELD,
900 [ BYTECODE_OP_LOAD_FIELD_S8 ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_S8,
901 [ BYTECODE_OP_LOAD_FIELD_S16 ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_S16,
902 [ BYTECODE_OP_LOAD_FIELD_S32 ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_S32,
903 [ BYTECODE_OP_LOAD_FIELD_S64 ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_S64,
904 [ BYTECODE_OP_LOAD_FIELD_U8 ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_U8,
905 [ BYTECODE_OP_LOAD_FIELD_U16 ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_U16,
906 [ BYTECODE_OP_LOAD_FIELD_U32 ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_U32,
907 [ BYTECODE_OP_LOAD_FIELD_U64 ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_U64,
908 [ BYTECODE_OP_LOAD_FIELD_STRING ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_STRING,
909 [ BYTECODE_OP_LOAD_FIELD_SEQUENCE ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_SEQUENCE,
910 [ BYTECODE_OP_LOAD_FIELD_DOUBLE ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_DOUBLE,
911
912 [ BYTECODE_OP_UNARY_BIT_NOT ] = &&LABEL_BYTECODE_OP_UNARY_BIT_NOT,
913
914 [ BYTECODE_OP_RETURN_S64 ] = &&LABEL_BYTECODE_OP_RETURN_S64,
915 };
916 #endif /* #ifndef INTERPRETER_USE_SWITCH */
917
918 START_OP
919
920 OP(BYTECODE_OP_UNKNOWN):
921 OP(BYTECODE_OP_LOAD_FIELD_REF):
922 OP(BYTECODE_OP_GET_CONTEXT_REF):
923 #ifdef INTERPRETER_USE_SWITCH
924 default:
925 #endif /* INTERPRETER_USE_SWITCH */
926 printk(KERN_WARNING "LTTng: bytecode: unknown bytecode op %u\n",
927 (unsigned int) *(bytecode_opcode_t *) pc);
928 ret = -EINVAL;
929 goto end;
930
931 OP(BYTECODE_OP_RETURN):
932 /* LTTNG_KERNEL_BYTECODE_INTERPRETER_ERROR or LTTNG_KERNEL_BYTECODE_INTERPRETER_OK */
933 switch (estack_ax_t) {
934 case REG_S64:
935 case REG_U64:
936 retval = !!estack_ax_v;
937 break;
938 case REG_DOUBLE:
939 case REG_STRING:
940 case REG_PTR:
941 if (kernel_bytecode->type != LTTNG_KERNEL_BYTECODE_TYPE_CAPTURE) {
942 ret = -EINVAL;
943 goto end;
944 }
945 retval = 0;
946 break;
947 case REG_STAR_GLOB_STRING:
948 case REG_TYPE_UNKNOWN:
949 ret = -EINVAL;
950 goto end;
951 }
952 ret = 0;
953 goto end;
954
955 OP(BYTECODE_OP_RETURN_S64):
956 /* LTTNG_KERNEL_BYTECODE_INTERPRETER_ERROR or LTTNG_KERNEL_BYTECODE_INTERPRETER_OK */
957 retval = !!estack_ax_v;
958 ret = 0;
959 goto end;
960
961 /* binary */
962 OP(BYTECODE_OP_MUL):
963 OP(BYTECODE_OP_DIV):
964 OP(BYTECODE_OP_MOD):
965 OP(BYTECODE_OP_PLUS):
966 OP(BYTECODE_OP_MINUS):
967 printk(KERN_WARNING "LTTng: bytecode: unsupported bytecode op %u\n",
968 (unsigned int) *(bytecode_opcode_t *) pc);
969 ret = -EINVAL;
970 goto end;
971
972 OP(BYTECODE_OP_EQ):
973 OP(BYTECODE_OP_NE):
974 OP(BYTECODE_OP_GT):
975 OP(BYTECODE_OP_LT):
976 OP(BYTECODE_OP_GE):
977 OP(BYTECODE_OP_LE):
978 printk(KERN_WARNING "LTTng: bytecode: unsupported non-specialized bytecode op %u\n",
979 (unsigned int) *(bytecode_opcode_t *) pc);
980 ret = -EINVAL;
981 goto end;
982
983 OP(BYTECODE_OP_EQ_STRING):
984 {
985 int res;
986
987 res = (stack_strcmp(stack, top, "==") == 0);
988 estack_pop(stack, top, ax, bx, ax_t, bx_t);
989 estack_ax_v = res;
990 estack_ax_t = REG_S64;
991 next_pc += sizeof(struct binary_op);
992 PO;
993 }
994 OP(BYTECODE_OP_NE_STRING):
995 {
996 int res;
997
998 res = (stack_strcmp(stack, top, "!=") != 0);
999 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1000 estack_ax_v = res;
1001 estack_ax_t = REG_S64;
1002 next_pc += sizeof(struct binary_op);
1003 PO;
1004 }
1005 OP(BYTECODE_OP_GT_STRING):
1006 {
1007 int res;
1008
1009 res = (stack_strcmp(stack, top, ">") > 0);
1010 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1011 estack_ax_v = res;
1012 estack_ax_t = REG_S64;
1013 next_pc += sizeof(struct binary_op);
1014 PO;
1015 }
1016 OP(BYTECODE_OP_LT_STRING):
1017 {
1018 int res;
1019
1020 res = (stack_strcmp(stack, top, "<") < 0);
1021 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1022 estack_ax_v = res;
1023 estack_ax_t = REG_S64;
1024 next_pc += sizeof(struct binary_op);
1025 PO;
1026 }
1027 OP(BYTECODE_OP_GE_STRING):
1028 {
1029 int res;
1030
1031 res = (stack_strcmp(stack, top, ">=") >= 0);
1032 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1033 estack_ax_v = res;
1034 estack_ax_t = REG_S64;
1035 next_pc += sizeof(struct binary_op);
1036 PO;
1037 }
1038 OP(BYTECODE_OP_LE_STRING):
1039 {
1040 int res;
1041
1042 res = (stack_strcmp(stack, top, "<=") <= 0);
1043 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1044 estack_ax_v = res;
1045 estack_ax_t = REG_S64;
1046 next_pc += sizeof(struct binary_op);
1047 PO;
1048 }
1049
1050 OP(BYTECODE_OP_EQ_STAR_GLOB_STRING):
1051 {
1052 int res;
1053
1054 res = (stack_star_glob_match(stack, top, "==") == 0);
1055 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1056 estack_ax_v = res;
1057 estack_ax_t = REG_S64;
1058 next_pc += sizeof(struct binary_op);
1059 PO;
1060 }
1061 OP(BYTECODE_OP_NE_STAR_GLOB_STRING):
1062 {
1063 int res;
1064
1065 res = (stack_star_glob_match(stack, top, "!=") != 0);
1066 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1067 estack_ax_v = res;
1068 estack_ax_t = REG_S64;
1069 next_pc += sizeof(struct binary_op);
1070 PO;
1071 }
1072
1073 OP(BYTECODE_OP_EQ_S64):
1074 {
1075 int res;
1076
1077 res = (estack_bx_v == estack_ax_v);
1078 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1079 estack_ax_v = res;
1080 estack_ax_t = REG_S64;
1081 next_pc += sizeof(struct binary_op);
1082 PO;
1083 }
1084 OP(BYTECODE_OP_NE_S64):
1085 {
1086 int res;
1087
1088 res = (estack_bx_v != estack_ax_v);
1089 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1090 estack_ax_v = res;
1091 estack_ax_t = REG_S64;
1092 next_pc += sizeof(struct binary_op);
1093 PO;
1094 }
1095 OP(BYTECODE_OP_GT_S64):
1096 {
1097 int res;
1098
1099 res = (estack_bx_v > estack_ax_v);
1100 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1101 estack_ax_v = res;
1102 estack_ax_t = REG_S64;
1103 next_pc += sizeof(struct binary_op);
1104 PO;
1105 }
1106 OP(BYTECODE_OP_LT_S64):
1107 {
1108 int res;
1109
1110 res = (estack_bx_v < estack_ax_v);
1111 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1112 estack_ax_v = res;
1113 estack_ax_t = REG_S64;
1114 next_pc += sizeof(struct binary_op);
1115 PO;
1116 }
1117 OP(BYTECODE_OP_GE_S64):
1118 {
1119 int res;
1120
1121 res = (estack_bx_v >= estack_ax_v);
1122 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1123 estack_ax_v = res;
1124 estack_ax_t = REG_S64;
1125 next_pc += sizeof(struct binary_op);
1126 PO;
1127 }
1128 OP(BYTECODE_OP_LE_S64):
1129 {
1130 int res;
1131
1132 res = (estack_bx_v <= estack_ax_v);
1133 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1134 estack_ax_v = res;
1135 estack_ax_t = REG_S64;
1136 next_pc += sizeof(struct binary_op);
1137 PO;
1138 }
1139
1140 OP(BYTECODE_OP_EQ_DOUBLE):
1141 OP(BYTECODE_OP_NE_DOUBLE):
1142 OP(BYTECODE_OP_GT_DOUBLE):
1143 OP(BYTECODE_OP_LT_DOUBLE):
1144 OP(BYTECODE_OP_GE_DOUBLE):
1145 OP(BYTECODE_OP_LE_DOUBLE):
1146 {
1147 BUG_ON(1);
1148 PO;
1149 }
1150
1151 /* Mixed S64-double binary comparators */
1152 OP(BYTECODE_OP_EQ_DOUBLE_S64):
1153 OP(BYTECODE_OP_NE_DOUBLE_S64):
1154 OP(BYTECODE_OP_GT_DOUBLE_S64):
1155 OP(BYTECODE_OP_LT_DOUBLE_S64):
1156 OP(BYTECODE_OP_GE_DOUBLE_S64):
1157 OP(BYTECODE_OP_LE_DOUBLE_S64):
1158 OP(BYTECODE_OP_EQ_S64_DOUBLE):
1159 OP(BYTECODE_OP_NE_S64_DOUBLE):
1160 OP(BYTECODE_OP_GT_S64_DOUBLE):
1161 OP(BYTECODE_OP_LT_S64_DOUBLE):
1162 OP(BYTECODE_OP_GE_S64_DOUBLE):
1163 OP(BYTECODE_OP_LE_S64_DOUBLE):
1164 {
1165 BUG_ON(1);
1166 PO;
1167 }
1168 OP(BYTECODE_OP_BIT_RSHIFT):
1169 {
1170 int64_t res;
1171
1172 if (!IS_INTEGER_REGISTER(estack_ax_t) || !IS_INTEGER_REGISTER(estack_bx_t)) {
1173 ret = -EINVAL;
1174 goto end;
1175 }
1176
1177 /* Catch undefined behavior. */
1178 if (unlikely(estack_ax_v < 0 || estack_ax_v >= 64)) {
1179 ret = -EINVAL;
1180 goto end;
1181 }
1182 res = ((uint64_t) estack_bx_v >> (uint32_t) estack_ax_v);
1183 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1184 estack_ax_v = res;
1185 estack_ax_t = REG_U64;
1186 next_pc += sizeof(struct binary_op);
1187 PO;
1188 }
1189 OP(BYTECODE_OP_BIT_LSHIFT):
1190 {
1191 int64_t res;
1192
1193 if (!IS_INTEGER_REGISTER(estack_ax_t) || !IS_INTEGER_REGISTER(estack_bx_t)) {
1194 ret = -EINVAL;
1195 goto end;
1196 }
1197
1198 /* Catch undefined behavior. */
1199 if (unlikely(estack_ax_v < 0 || estack_ax_v >= 64)) {
1200 ret = -EINVAL;
1201 goto end;
1202 }
1203 res = ((uint64_t) estack_bx_v << (uint32_t) estack_ax_v);
1204 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1205 estack_ax_v = res;
1206 estack_ax_t = REG_U64;
1207 next_pc += sizeof(struct binary_op);
1208 PO;
1209 }
1210 OP(BYTECODE_OP_BIT_AND):
1211 {
1212 int64_t res;
1213
1214 if (!IS_INTEGER_REGISTER(estack_ax_t) || !IS_INTEGER_REGISTER(estack_bx_t)) {
1215 ret = -EINVAL;
1216 goto end;
1217 }
1218
1219 res = ((uint64_t) estack_bx_v & (uint64_t) estack_ax_v);
1220 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1221 estack_ax_v = res;
1222 estack_ax_t = REG_U64;
1223 next_pc += sizeof(struct binary_op);
1224 PO;
1225 }
1226 OP(BYTECODE_OP_BIT_OR):
1227 {
1228 int64_t res;
1229
1230 if (!IS_INTEGER_REGISTER(estack_ax_t) || !IS_INTEGER_REGISTER(estack_bx_t)) {
1231 ret = -EINVAL;
1232 goto end;
1233 }
1234
1235 res = ((uint64_t) estack_bx_v | (uint64_t) estack_ax_v);
1236 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1237 estack_ax_v = res;
1238 estack_ax_t = REG_U64;
1239 next_pc += sizeof(struct binary_op);
1240 PO;
1241 }
1242 OP(BYTECODE_OP_BIT_XOR):
1243 {
1244 int64_t res;
1245
1246 if (!IS_INTEGER_REGISTER(estack_ax_t) || !IS_INTEGER_REGISTER(estack_bx_t)) {
1247 ret = -EINVAL;
1248 goto end;
1249 }
1250
1251 res = ((uint64_t) estack_bx_v ^ (uint64_t) estack_ax_v);
1252 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1253 estack_ax_v = res;
1254 estack_ax_t = REG_U64;
1255 next_pc += sizeof(struct binary_op);
1256 PO;
1257 }
1258
1259 /* unary */
1260 OP(BYTECODE_OP_UNARY_PLUS):
1261 OP(BYTECODE_OP_UNARY_MINUS):
1262 OP(BYTECODE_OP_UNARY_NOT):
1263 printk(KERN_WARNING "LTTng: bytecode: unsupported non-specialized bytecode op %u\n",
1264 (unsigned int) *(bytecode_opcode_t *) pc);
1265 ret = -EINVAL;
1266 goto end;
1267
1268
1269 OP(BYTECODE_OP_UNARY_BIT_NOT):
1270 {
1271 estack_ax_v = ~(uint64_t) estack_ax_v;
1272 estack_ax_t = REG_S64;
1273 next_pc += sizeof(struct unary_op);
1274 PO;
1275 }
1276
1277 OP(BYTECODE_OP_UNARY_PLUS_S64):
1278 {
1279 next_pc += sizeof(struct unary_op);
1280 PO;
1281 }
1282 OP(BYTECODE_OP_UNARY_MINUS_S64):
1283 {
1284 estack_ax_v = -estack_ax_v;
1285 estack_ax_t = REG_S64;
1286 next_pc += sizeof(struct unary_op);
1287 PO;
1288 }
1289 OP(BYTECODE_OP_UNARY_PLUS_DOUBLE):
1290 OP(BYTECODE_OP_UNARY_MINUS_DOUBLE):
1291 {
1292 BUG_ON(1);
1293 PO;
1294 }
1295 OP(BYTECODE_OP_UNARY_NOT_S64):
1296 {
1297 estack_ax_v = !estack_ax_v;
1298 estack_ax_t = REG_S64;
1299 next_pc += sizeof(struct unary_op);
1300 PO;
1301 }
1302 OP(BYTECODE_OP_UNARY_NOT_DOUBLE):
1303 {
1304 BUG_ON(1);
1305 PO;
1306 }
1307
1308 /* logical */
1309 OP(BYTECODE_OP_AND):
1310 {
1311 struct logical_op *insn = (struct logical_op *) pc;
1312
1313 /* If AX is 0, skip and evaluate to 0 */
1314 if (unlikely(estack_ax_v == 0)) {
1315 dbg_printk("Jumping to bytecode offset %u\n",
1316 (unsigned int) insn->skip_offset);
1317 next_pc = start_pc + insn->skip_offset;
1318 } else {
1319 /* Pop 1 when jump not taken */
1320 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1321 next_pc += sizeof(struct logical_op);
1322 }
1323 PO;
1324 }
1325 OP(BYTECODE_OP_OR):
1326 {
1327 struct logical_op *insn = (struct logical_op *) pc;
1328
1329 /* If AX is nonzero, skip and evaluate to 1 */
1330
1331 if (unlikely(estack_ax_v != 0)) {
1332 estack_ax_v = 1;
1333 dbg_printk("Jumping to bytecode offset %u\n",
1334 (unsigned int) insn->skip_offset);
1335 next_pc = start_pc + insn->skip_offset;
1336 } else {
1337 /* Pop 1 when jump not taken */
1338 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1339 next_pc += sizeof(struct logical_op);
1340 }
1341 PO;
1342 }
1343
1344
1345 /* load field ref */
1346 OP(BYTECODE_OP_LOAD_FIELD_REF_STRING):
1347 {
1348 struct load_op *insn = (struct load_op *) pc;
1349 struct field_ref *ref = (struct field_ref *) insn->data;
1350
1351 dbg_printk("load field ref offset %u type string\n",
1352 ref->offset);
1353 estack_push(stack, top, ax, bx, ax_t, bx_t);
1354 estack_ax(stack, top)->u.s.str =
1355 *(const char * const *) &interpreter_stack_data[ref->offset];
1356 if (unlikely(!estack_ax(stack, top)->u.s.str)) {
1357 dbg_printk("Bytecode warning: loading a NULL string.\n");
1358 ret = -EINVAL;
1359 goto end;
1360 }
1361 estack_ax(stack, top)->u.s.seq_len = LTTNG_SIZE_MAX;
1362 estack_ax(stack, top)->u.s.literal_type =
1363 ESTACK_STRING_LITERAL_TYPE_NONE;
1364 estack_ax(stack, top)->u.s.user = 0;
1365 estack_ax(stack, top)->type = REG_STRING;
1366 dbg_printk("ref load string %s\n", estack_ax(stack, top)->u.s.str);
1367 next_pc += sizeof(struct load_op) + sizeof(struct field_ref);
1368 PO;
1369 }
1370
1371 OP(BYTECODE_OP_LOAD_FIELD_REF_SEQUENCE):
1372 {
1373 struct load_op *insn = (struct load_op *) pc;
1374 struct field_ref *ref = (struct field_ref *) insn->data;
1375
1376 dbg_printk("load field ref offset %u type sequence\n",
1377 ref->offset);
1378 estack_push(stack, top, ax, bx, ax_t, bx_t);
1379 estack_ax(stack, top)->u.s.seq_len =
1380 *(unsigned long *) &interpreter_stack_data[ref->offset];
1381 estack_ax(stack, top)->u.s.str =
1382 *(const char **) (&interpreter_stack_data[ref->offset
1383 + sizeof(unsigned long)]);
1384 if (unlikely(!estack_ax(stack, top)->u.s.str)) {
1385 dbg_printk("Bytecode warning: loading a NULL sequence.\n");
1386 ret = -EINVAL;
1387 goto end;
1388 }
1389 estack_ax(stack, top)->u.s.literal_type =
1390 ESTACK_STRING_LITERAL_TYPE_NONE;
1391 estack_ax(stack, top)->u.s.user = 0;
1392 next_pc += sizeof(struct load_op) + sizeof(struct field_ref);
1393 PO;
1394 }
1395
1396 OP(BYTECODE_OP_LOAD_FIELD_REF_S64):
1397 {
1398 struct load_op *insn = (struct load_op *) pc;
1399 struct field_ref *ref = (struct field_ref *) insn->data;
1400
1401 dbg_printk("load field ref offset %u type s64\n",
1402 ref->offset);
1403 estack_push(stack, top, ax, bx, ax_t, bx_t);
1404 estack_ax_v =
1405 ((struct literal_numeric *) &interpreter_stack_data[ref->offset])->v;
1406 estack_ax_t = REG_S64;
1407 dbg_printk("ref load s64 %lld\n",
1408 (long long) estack_ax_v);
1409 next_pc += sizeof(struct load_op) + sizeof(struct field_ref);
1410 PO;
1411 }
1412
1413 OP(BYTECODE_OP_LOAD_FIELD_REF_DOUBLE):
1414 {
1415 BUG_ON(1);
1416 PO;
1417 }
1418
1419 /* load from immediate operand */
1420 OP(BYTECODE_OP_LOAD_STRING):
1421 {
1422 struct load_op *insn = (struct load_op *) pc;
1423
1424 dbg_printk("load string %s\n", insn->data);
1425 estack_push(stack, top, ax, bx, ax_t, bx_t);
1426 estack_ax(stack, top)->u.s.str = insn->data;
1427 estack_ax(stack, top)->u.s.seq_len = LTTNG_SIZE_MAX;
1428 estack_ax(stack, top)->u.s.literal_type =
1429 ESTACK_STRING_LITERAL_TYPE_PLAIN;
1430 estack_ax(stack, top)->u.s.user = 0;
1431 next_pc += sizeof(struct load_op) + strlen(insn->data) + 1;
1432 PO;
1433 }
1434
1435 OP(BYTECODE_OP_LOAD_STAR_GLOB_STRING):
1436 {
1437 struct load_op *insn = (struct load_op *) pc;
1438
1439 dbg_printk("load globbing pattern %s\n", insn->data);
1440 estack_push(stack, top, ax, bx, ax_t, bx_t);
1441 estack_ax(stack, top)->u.s.str = insn->data;
1442 estack_ax(stack, top)->u.s.seq_len = LTTNG_SIZE_MAX;
1443 estack_ax(stack, top)->u.s.literal_type =
1444 ESTACK_STRING_LITERAL_TYPE_STAR_GLOB;
1445 estack_ax(stack, top)->u.s.user = 0;
1446 next_pc += sizeof(struct load_op) + strlen(insn->data) + 1;
1447 PO;
1448 }
1449
1450 OP(BYTECODE_OP_LOAD_S64):
1451 {
1452 struct load_op *insn = (struct load_op *) pc;
1453
1454 estack_push(stack, top, ax, bx, ax_t, bx_t);
1455 estack_ax_v = ((struct literal_numeric *) insn->data)->v;
1456 estack_ax_t = REG_S64;
1457 dbg_printk("load s64 %lld\n",
1458 (long long) estack_ax_v);
1459 next_pc += sizeof(struct load_op)
1460 + sizeof(struct literal_numeric);
1461 PO;
1462 }
1463
1464 OP(BYTECODE_OP_LOAD_DOUBLE):
1465 {
1466 BUG_ON(1);
1467 PO;
1468 }
1469
1470 /* cast */
1471 OP(BYTECODE_OP_CAST_TO_S64):
1472 printk(KERN_WARNING "LTTng: bytecode: unsupported non-specialized bytecode op %u\n",
1473 (unsigned int) *(bytecode_opcode_t *) pc);
1474 ret = -EINVAL;
1475 goto end;
1476
1477 OP(BYTECODE_OP_CAST_DOUBLE_TO_S64):
1478 {
1479 BUG_ON(1);
1480 PO;
1481 }
1482
1483 OP(BYTECODE_OP_CAST_NOP):
1484 {
1485 next_pc += sizeof(struct cast_op);
1486 PO;
1487 }
1488
1489 /* get context ref */
1490 OP(BYTECODE_OP_GET_CONTEXT_REF_STRING):
1491 {
1492 struct load_op *insn = (struct load_op *) pc;
1493 struct field_ref *ref = (struct field_ref *) insn->data;
1494 struct lttng_kernel_ctx_field *ctx_field;
1495 struct lttng_ctx_value v;
1496
1497 dbg_printk("get context ref offset %u type string\n",
1498 ref->offset);
1499 ctx_field = &lttng_static_ctx->fields[ref->offset];
1500 ctx_field->get_value(ctx_field->priv, lttng_probe_ctx, &v);
1501 estack_push(stack, top, ax, bx, ax_t, bx_t);
1502 estack_ax(stack, top)->u.s.str = v.u.str;
1503 if (unlikely(!estack_ax(stack, top)->u.s.str)) {
1504 dbg_printk("Bytecode warning: loading a NULL string.\n");
1505 ret = -EINVAL;
1506 goto end;
1507 }
1508 estack_ax(stack, top)->u.s.seq_len = LTTNG_SIZE_MAX;
1509 estack_ax(stack, top)->u.s.literal_type =
1510 ESTACK_STRING_LITERAL_TYPE_NONE;
1511 estack_ax(stack, top)->u.s.user = 0;
1512 estack_ax(stack, top)->type = REG_STRING;
1513 dbg_printk("ref get context string %s\n", estack_ax(stack, top)->u.s.str);
1514 next_pc += sizeof(struct load_op) + sizeof(struct field_ref);
1515 PO;
1516 }
1517
1518 OP(BYTECODE_OP_GET_CONTEXT_REF_S64):
1519 {
1520 struct load_op *insn = (struct load_op *) pc;
1521 struct field_ref *ref = (struct field_ref *) insn->data;
1522 struct lttng_kernel_ctx_field *ctx_field;
1523 struct lttng_ctx_value v;
1524
1525 dbg_printk("get context ref offset %u type s64\n",
1526 ref->offset);
1527 ctx_field = &lttng_static_ctx->fields[ref->offset];
1528 ctx_field->get_value(ctx_field->priv, lttng_probe_ctx, &v);
1529 estack_push(stack, top, ax, bx, ax_t, bx_t);
1530 estack_ax_v = v.u.s64;
1531 estack_ax_t = REG_S64;
1532 dbg_printk("ref get context s64 %lld\n",
1533 (long long) estack_ax_v);
1534 next_pc += sizeof(struct load_op) + sizeof(struct field_ref);
1535 PO;
1536 }
1537
1538 OP(BYTECODE_OP_GET_CONTEXT_REF_DOUBLE):
1539 {
1540 BUG_ON(1);
1541 PO;
1542 }
1543
1544 /* load userspace field ref */
1545 OP(BYTECODE_OP_LOAD_FIELD_REF_USER_STRING):
1546 {
1547 struct load_op *insn = (struct load_op *) pc;
1548 struct field_ref *ref = (struct field_ref *) insn->data;
1549
1550 dbg_printk("load field ref offset %u type user string\n",
1551 ref->offset);
1552 estack_push(stack, top, ax, bx, ax_t, bx_t);
1553 estack_ax(stack, top)->u.s.user_str =
1554 *(const char * const *) &interpreter_stack_data[ref->offset];
1555 if (unlikely(!estack_ax(stack, top)->u.s.user_str)) {
1556 dbg_printk("Bytecode warning: loading a NULL string.\n");
1557 ret = -EINVAL;
1558 goto end;
1559 }
1560 estack_ax(stack, top)->u.s.seq_len = LTTNG_SIZE_MAX;
1561 estack_ax(stack, top)->u.s.literal_type =
1562 ESTACK_STRING_LITERAL_TYPE_NONE;
1563 estack_ax(stack, top)->u.s.user = 1;
1564 estack_ax(stack, top)->type = REG_STRING;
1565 dbg_load_ref_user_str_printk(estack_ax(stack, top));
1566 next_pc += sizeof(struct load_op) + sizeof(struct field_ref);
1567 PO;
1568 }
1569
1570 OP(BYTECODE_OP_LOAD_FIELD_REF_USER_SEQUENCE):
1571 {
1572 struct load_op *insn = (struct load_op *) pc;
1573 struct field_ref *ref = (struct field_ref *) insn->data;
1574
1575 dbg_printk("load field ref offset %u type user sequence\n",
1576 ref->offset);
1577 estack_push(stack, top, ax, bx, ax_t, bx_t);
1578 estack_ax(stack, top)->u.s.seq_len =
1579 *(unsigned long *) &interpreter_stack_data[ref->offset];
1580 estack_ax(stack, top)->u.s.user_str =
1581 *(const char **) (&interpreter_stack_data[ref->offset
1582 + sizeof(unsigned long)]);
1583 if (unlikely(!estack_ax(stack, top)->u.s.user_str)) {
1584 dbg_printk("Bytecode warning: loading a NULL sequence.\n");
1585 ret = -EINVAL;
1586 goto end;
1587 }
1588 estack_ax(stack, top)->u.s.literal_type =
1589 ESTACK_STRING_LITERAL_TYPE_NONE;
1590 estack_ax(stack, top)->u.s.user = 1;
1591 next_pc += sizeof(struct load_op) + sizeof(struct field_ref);
1592 PO;
1593 }
1594
1595 OP(BYTECODE_OP_GET_CONTEXT_ROOT):
1596 {
1597 dbg_printk("op get context root\n");
1598 estack_push(stack, top, ax, bx, ax_t, bx_t);
1599 estack_ax(stack, top)->u.ptr.type = LOAD_ROOT_CONTEXT;
1600 /* "field" only needed for variants. */
1601 estack_ax(stack, top)->u.ptr.field = NULL;
1602 estack_ax(stack, top)->type = REG_PTR;
1603 next_pc += sizeof(struct load_op);
1604 PO;
1605 }
1606
1607 OP(BYTECODE_OP_GET_APP_CONTEXT_ROOT):
1608 {
1609 BUG_ON(1);
1610 PO;
1611 }
1612
1613 OP(BYTECODE_OP_GET_PAYLOAD_ROOT):
1614 {
1615 dbg_printk("op get app payload root\n");
1616 estack_push(stack, top, ax, bx, ax_t, bx_t);
1617 estack_ax(stack, top)->u.ptr.type = LOAD_ROOT_PAYLOAD;
1618 estack_ax(stack, top)->u.ptr.ptr = interpreter_stack_data;
1619 /* "field" only needed for variants. */
1620 estack_ax(stack, top)->u.ptr.field = NULL;
1621 estack_ax(stack, top)->type = REG_PTR;
1622 next_pc += sizeof(struct load_op);
1623 PO;
1624 }
1625
1626 OP(BYTECODE_OP_GET_SYMBOL):
1627 {
1628 dbg_printk("op get symbol\n");
1629 switch (estack_ax(stack, top)->u.ptr.type) {
1630 case LOAD_OBJECT:
1631 printk(KERN_WARNING "LTTng: bytecode: Nested fields not implemented yet.\n");
1632 ret = -EINVAL;
1633 goto end;
1634 case LOAD_ROOT_CONTEXT:
1635 case LOAD_ROOT_APP_CONTEXT:
1636 case LOAD_ROOT_PAYLOAD:
1637 /*
1638 * symbol lookup is performed by
1639 * specialization.
1640 */
1641 ret = -EINVAL;
1642 goto end;
1643 }
1644 next_pc += sizeof(struct load_op) + sizeof(struct get_symbol);
1645 PO;
1646 }
1647
1648 OP(BYTECODE_OP_GET_SYMBOL_FIELD):
1649 {
1650 /*
1651 * Used for first variant encountered in a
1652 * traversal. Variants are not implemented yet.
1653 */
1654 ret = -EINVAL;
1655 goto end;
1656 }
1657
1658 OP(BYTECODE_OP_GET_INDEX_U16):
1659 {
1660 struct load_op *insn = (struct load_op *) pc;
1661 struct get_index_u16 *index = (struct get_index_u16 *) insn->data;
1662
1663 dbg_printk("op get index u16\n");
1664 ret = dynamic_get_index(lttng_probe_ctx, bytecode, index->index, estack_ax(stack, top));
1665 if (ret)
1666 goto end;
1667 estack_ax_v = estack_ax(stack, top)->u.v;
1668 estack_ax_t = estack_ax(stack, top)->type;
1669 next_pc += sizeof(struct load_op) + sizeof(struct get_index_u16);
1670 PO;
1671 }
1672
1673 OP(BYTECODE_OP_GET_INDEX_U64):
1674 {
1675 struct load_op *insn = (struct load_op *) pc;
1676 struct get_index_u64 *index = (struct get_index_u64 *) insn->data;
1677
1678 dbg_printk("op get index u64\n");
1679 ret = dynamic_get_index(lttng_probe_ctx, bytecode, index->index, estack_ax(stack, top));
1680 if (ret)
1681 goto end;
1682 estack_ax_v = estack_ax(stack, top)->u.v;
1683 estack_ax_t = estack_ax(stack, top)->type;
1684 next_pc += sizeof(struct load_op) + sizeof(struct get_index_u64);
1685 PO;
1686 }
1687
1688 OP(BYTECODE_OP_LOAD_FIELD):
1689 {
1690 dbg_printk("op load field\n");
1691 ret = dynamic_load_field(estack_ax(stack, top));
1692 if (ret)
1693 goto end;
1694 estack_ax_v = estack_ax(stack, top)->u.v;
1695 estack_ax_t = estack_ax(stack, top)->type;
1696 next_pc += sizeof(struct load_op);
1697 PO;
1698 }
1699
1700 OP(BYTECODE_OP_LOAD_FIELD_S8):
1701 {
1702 dbg_printk("op load field s8\n");
1703
1704 estack_ax_v = *(int8_t *) estack_ax(stack, top)->u.ptr.ptr;
1705 estack_ax_t = REG_S64;
1706 next_pc += sizeof(struct load_op);
1707 PO;
1708 }
1709 OP(BYTECODE_OP_LOAD_FIELD_S16):
1710 {
1711 dbg_printk("op load field s16\n");
1712
1713 estack_ax_v = *(int16_t *) estack_ax(stack, top)->u.ptr.ptr;
1714 estack_ax_t = REG_S64;
1715 next_pc += sizeof(struct load_op);
1716 PO;
1717 }
1718 OP(BYTECODE_OP_LOAD_FIELD_S32):
1719 {
1720 dbg_printk("op load field s32\n");
1721
1722 estack_ax_v = *(int32_t *) estack_ax(stack, top)->u.ptr.ptr;
1723 estack_ax_t = REG_S64;
1724 next_pc += sizeof(struct load_op);
1725 PO;
1726 }
1727 OP(BYTECODE_OP_LOAD_FIELD_S64):
1728 {
1729 dbg_printk("op load field s64\n");
1730
1731 estack_ax_v = *(int64_t *) estack_ax(stack, top)->u.ptr.ptr;
1732 estack_ax_t = REG_S64;
1733 next_pc += sizeof(struct load_op);
1734 PO;
1735 }
1736 OP(BYTECODE_OP_LOAD_FIELD_U8):
1737 {
1738 dbg_printk("op load field u8\n");
1739
1740 estack_ax_v = *(uint8_t *) estack_ax(stack, top)->u.ptr.ptr;
1741 estack_ax_t = REG_S64;
1742 next_pc += sizeof(struct load_op);
1743 PO;
1744 }
1745 OP(BYTECODE_OP_LOAD_FIELD_U16):
1746 {
1747 dbg_printk("op load field u16\n");
1748
1749 estack_ax_v = *(uint16_t *) estack_ax(stack, top)->u.ptr.ptr;
1750 estack_ax_t = REG_S64;
1751 next_pc += sizeof(struct load_op);
1752 PO;
1753 }
1754 OP(BYTECODE_OP_LOAD_FIELD_U32):
1755 {
1756 dbg_printk("op load field u32\n");
1757
1758 estack_ax_v = *(uint32_t *) estack_ax(stack, top)->u.ptr.ptr;
1759 estack_ax_t = REG_S64;
1760 next_pc += sizeof(struct load_op);
1761 PO;
1762 }
1763 OP(BYTECODE_OP_LOAD_FIELD_U64):
1764 {
1765 dbg_printk("op load field u64\n");
1766
1767 estack_ax_v = *(uint64_t *) estack_ax(stack, top)->u.ptr.ptr;
1768 estack_ax_t = REG_S64;
1769 next_pc += sizeof(struct load_op);
1770 PO;
1771 }
1772 OP(BYTECODE_OP_LOAD_FIELD_DOUBLE):
1773 {
1774 ret = -EINVAL;
1775 goto end;
1776 }
1777
1778 OP(BYTECODE_OP_LOAD_FIELD_STRING):
1779 {
1780 const char *str;
1781
1782 dbg_printk("op load field string\n");
1783 str = (const char *) estack_ax(stack, top)->u.ptr.ptr;
1784 estack_ax(stack, top)->u.s.str = str;
1785 if (unlikely(!estack_ax(stack, top)->u.s.str)) {
1786 dbg_printk("Bytecode warning: loading a NULL string.\n");
1787 ret = -EINVAL;
1788 goto end;
1789 }
1790 estack_ax(stack, top)->u.s.seq_len = LTTNG_SIZE_MAX;
1791 estack_ax(stack, top)->u.s.literal_type =
1792 ESTACK_STRING_LITERAL_TYPE_NONE;
1793 estack_ax(stack, top)->type = REG_STRING;
1794 next_pc += sizeof(struct load_op);
1795 PO;
1796 }
1797
1798 OP(BYTECODE_OP_LOAD_FIELD_SEQUENCE):
1799 {
1800 const char *ptr;
1801
1802 dbg_printk("op load field string sequence\n");
1803 ptr = estack_ax(stack, top)->u.ptr.ptr;
1804 estack_ax(stack, top)->u.s.seq_len = *(unsigned long *) ptr;
1805 estack_ax(stack, top)->u.s.str = *(const char **) (ptr + sizeof(unsigned long));
1806 if (unlikely(!estack_ax(stack, top)->u.s.str)) {
1807 dbg_printk("Bytecode warning: loading a NULL sequence.\n");
1808 ret = -EINVAL;
1809 goto end;
1810 }
1811 estack_ax(stack, top)->u.s.literal_type =
1812 ESTACK_STRING_LITERAL_TYPE_NONE;
1813 estack_ax(stack, top)->type = REG_STRING;
1814 next_pc += sizeof(struct load_op);
1815 PO;
1816 }
1817
1818 END_OP
1819 end:
1820 /* No need to prepare output if an error occurred. */
1821 if (ret)
1822 return LTTNG_KERNEL_BYTECODE_INTERPRETER_ERROR;
1823
1824 /* Prepare output. */
1825 switch (kernel_bytecode->type) {
1826 case LTTNG_KERNEL_BYTECODE_TYPE_FILTER:
1827 {
1828 struct lttng_kernel_bytecode_filter_ctx *filter_ctx =
1829 (struct lttng_kernel_bytecode_filter_ctx *) caller_ctx;
1830 if (retval)
1831 filter_ctx->result = LTTNG_KERNEL_BYTECODE_FILTER_ACCEPT;
1832 else
1833 filter_ctx->result = LTTNG_KERNEL_BYTECODE_FILTER_REJECT;
1834 break;
1835 }
1836 case LTTNG_KERNEL_BYTECODE_TYPE_CAPTURE:
1837 ret = lttng_bytecode_interpret_format_output(estack_ax(stack, top),
1838 (struct lttng_interpreter_output *) caller_ctx);
1839 break;
1840 default:
1841 ret = -EINVAL;
1842 break;
1843 }
1844 if (ret)
1845 return LTTNG_KERNEL_BYTECODE_INTERPRETER_ERROR;
1846 else
1847 return LTTNG_KERNEL_BYTECODE_INTERPRETER_OK;
1848 }
1849 LTTNG_STACK_FRAME_NON_STANDARD(lttng_bytecode_interpret);
1850
1851 /*
1852 * Return LTTNG_KERNEL_EVENT_FILTER_ACCEPT or LTTNG_KERNEL_EVENT_FILTER_REJECT.
1853 */
1854 int lttng_kernel_interpret_event_filter(const struct lttng_kernel_event_common *event,
1855 const char *interpreter_stack_data,
1856 struct lttng_kernel_probe_ctx *probe_ctx,
1857 void *event_filter_ctx __attribute__((unused)))
1858 {
1859 struct lttng_kernel_bytecode_runtime *filter_bc_runtime;
1860 struct list_head *filter_bytecode_runtime_head = &event->priv->filter_bytecode_runtime_head;
1861 struct lttng_kernel_bytecode_filter_ctx bytecode_filter_ctx;
1862 bool filter_record = false;
1863
1864 list_for_each_entry_rcu(filter_bc_runtime, filter_bytecode_runtime_head, node) {
1865 if (likely(filter_bc_runtime->interpreter_func(filter_bc_runtime,
1866 interpreter_stack_data, probe_ctx, &bytecode_filter_ctx) == LTTNG_KERNEL_BYTECODE_INTERPRETER_OK)) {
1867 if (unlikely(bytecode_filter_ctx.result == LTTNG_KERNEL_BYTECODE_FILTER_ACCEPT)) {
1868 filter_record = true;
1869 break;
1870 }
1871 }
1872 }
1873 if (filter_record)
1874 return LTTNG_KERNEL_EVENT_FILTER_ACCEPT;
1875 else
1876 return LTTNG_KERNEL_EVENT_FILTER_REJECT;
1877 }
1878
1879 #undef START_OP
1880 #undef OP
1881 #undef PO
1882 #undef END_OP
This page took 0.064542 seconds and 3 git commands to generate.