Rename "tsc" to "timestamp"
[lttng-modules.git] / src / lttng-bytecode-interpreter.c
1 /* SPDX-License-Identifier: MIT
2 *
3 * lttng-bytecode-interpreter.c
4 *
5 * LTTng modules bytecode interpreter.
6 *
7 * Copyright (C) 2010-2016 Mathieu Desnoyers <mathieu.desnoyers@efficios.com>
8 */
9
10 #include <wrapper/compiler_attributes.h>
11 #include <wrapper/uaccess.h>
12 #include <wrapper/objtool.h>
13 #include <wrapper/types.h>
14 #include <linux/swab.h>
15
16 #include <lttng/lttng-bytecode.h>
17 #include <lttng/string-utils.h>
18 #include <lttng/events-internal.h>
19
20 /*
21 * get_char should be called with page fault handler disabled if it is expected
22 * to handle user-space read.
23 */
24 static
25 char get_char(const struct estack_entry *reg, size_t offset)
26 {
27 if (unlikely(offset >= reg->u.s.seq_len))
28 return '\0';
29 if (reg->u.s.user) {
30 char c;
31
32 /* Handle invalid access as end of string. */
33 if (unlikely(!lttng_access_ok(VERIFY_READ,
34 reg->u.s.user_str + offset,
35 sizeof(c))))
36 return '\0';
37 /* Handle fault (nonzero return value) as end of string. */
38 if (unlikely(__copy_from_user_inatomic(&c,
39 reg->u.s.user_str + offset,
40 sizeof(c))))
41 return '\0';
42 return c;
43 } else {
44 return reg->u.s.str[offset];
45 }
46 }
47
48 /*
49 * -1: wildcard found.
50 * -2: unknown escape char.
51 * 0: normal char.
52 */
53 static
54 int parse_char(struct estack_entry *reg, char *c, size_t *offset)
55 {
56 switch (*c) {
57 case '\\':
58 (*offset)++;
59 *c = get_char(reg, *offset);
60 switch (*c) {
61 case '\\':
62 case '*':
63 return 0;
64 default:
65 return -2;
66 }
67 case '*':
68 return -1;
69 default:
70 return 0;
71 }
72 }
73
74 static
75 char get_char_at_cb(size_t at, void *data)
76 {
77 return get_char(data, at);
78 }
79
80 static
81 int stack_star_glob_match(struct estack *stack, int top, const char *cmp_type)
82 {
83 bool has_user = false;
84 int result;
85 struct estack_entry *pattern_reg;
86 struct estack_entry *candidate_reg;
87
88 /* Disable the page fault handler when reading from userspace. */
89 if (estack_bx(stack, top)->u.s.user
90 || estack_ax(stack, top)->u.s.user) {
91 has_user = true;
92 pagefault_disable();
93 }
94
95 /* Find out which side is the pattern vs. the candidate. */
96 if (estack_ax(stack, top)->u.s.literal_type == ESTACK_STRING_LITERAL_TYPE_STAR_GLOB) {
97 pattern_reg = estack_ax(stack, top);
98 candidate_reg = estack_bx(stack, top);
99 } else {
100 pattern_reg = estack_bx(stack, top);
101 candidate_reg = estack_ax(stack, top);
102 }
103
104 /* Perform the match operation. */
105 result = !strutils_star_glob_match_char_cb(get_char_at_cb,
106 pattern_reg, get_char_at_cb, candidate_reg);
107 if (has_user)
108 pagefault_enable();
109
110 return result;
111 }
112
113 static
114 int stack_strcmp(struct estack *stack, int top, const char *cmp_type)
115 {
116 size_t offset_bx = 0, offset_ax = 0;
117 int diff, has_user = 0;
118
119 if (estack_bx(stack, top)->u.s.user
120 || estack_ax(stack, top)->u.s.user) {
121 has_user = 1;
122 pagefault_disable();
123 }
124
125 for (;;) {
126 int ret;
127 int escaped_r0 = 0;
128 char char_bx, char_ax;
129
130 char_bx = get_char(estack_bx(stack, top), offset_bx);
131 char_ax = get_char(estack_ax(stack, top), offset_ax);
132
133 if (unlikely(char_bx == '\0')) {
134 if (char_ax == '\0') {
135 diff = 0;
136 break;
137 } else {
138 if (estack_ax(stack, top)->u.s.literal_type ==
139 ESTACK_STRING_LITERAL_TYPE_PLAIN) {
140 ret = parse_char(estack_ax(stack, top),
141 &char_ax, &offset_ax);
142 if (ret == -1) {
143 diff = 0;
144 break;
145 }
146 }
147 diff = -1;
148 break;
149 }
150 }
151 if (unlikely(char_ax == '\0')) {
152 if (estack_bx(stack, top)->u.s.literal_type ==
153 ESTACK_STRING_LITERAL_TYPE_PLAIN) {
154 ret = parse_char(estack_bx(stack, top),
155 &char_bx, &offset_bx);
156 if (ret == -1) {
157 diff = 0;
158 break;
159 }
160 }
161 diff = 1;
162 break;
163 }
164 if (estack_bx(stack, top)->u.s.literal_type ==
165 ESTACK_STRING_LITERAL_TYPE_PLAIN) {
166 ret = parse_char(estack_bx(stack, top),
167 &char_bx, &offset_bx);
168 if (ret == -1) {
169 diff = 0;
170 break;
171 } else if (ret == -2) {
172 escaped_r0 = 1;
173 }
174 /* else compare both char */
175 }
176 if (estack_ax(stack, top)->u.s.literal_type ==
177 ESTACK_STRING_LITERAL_TYPE_PLAIN) {
178 ret = parse_char(estack_ax(stack, top),
179 &char_ax, &offset_ax);
180 if (ret == -1) {
181 diff = 0;
182 break;
183 } else if (ret == -2) {
184 if (!escaped_r0) {
185 diff = -1;
186 break;
187 }
188 } else {
189 if (escaped_r0) {
190 diff = 1;
191 break;
192 }
193 }
194 } else {
195 if (escaped_r0) {
196 diff = 1;
197 break;
198 }
199 }
200 diff = char_bx - char_ax;
201 if (diff != 0)
202 break;
203 offset_bx++;
204 offset_ax++;
205 }
206 if (has_user)
207 pagefault_enable();
208
209 return diff;
210 }
211
212 int lttng_bytecode_interpret_error(
213 struct lttng_kernel_bytecode_runtime *bytecode_runtime __attribute__((unused)),
214 const char *stack_data __attribute__((unused)),
215 struct lttng_kernel_probe_ctx *probe_ctx __attribute__((unused)),
216 void *ctx __attribute__((unused)))
217 {
218 return LTTNG_KERNEL_BYTECODE_INTERPRETER_ERROR;
219 }
220
221 #ifdef INTERPRETER_USE_SWITCH
222
223 /*
224 * Fallback for compilers that do not support taking address of labels.
225 */
226
227 #define START_OP \
228 start_pc = &bytecode->data[0]; \
229 for (pc = next_pc = start_pc; pc - start_pc < bytecode->len; \
230 pc = next_pc) { \
231 dbg_printk("LTTng: Executing op %s (%u)\n", \
232 lttng_bytecode_print_op((unsigned int) *(bytecode_opcode_t *) pc), \
233 (unsigned int) *(bytecode_opcode_t *) pc); \
234 switch (*(bytecode_opcode_t *) pc) {
235
236 #define OP(name) case name
237
238 #define PO break
239
240 #define END_OP } \
241 }
242
243 #else
244
245 /*
246 * Dispatch-table based interpreter.
247 */
248
249 #define START_OP \
250 start_pc = &bytecode->code[0]; \
251 pc = next_pc = start_pc; \
252 if (unlikely(pc - start_pc >= bytecode->len)) \
253 goto end; \
254 goto *dispatch[*(bytecode_opcode_t *) pc];
255
256 #define OP(name) \
257 LABEL_##name
258
259 #define PO \
260 pc = next_pc; \
261 goto *dispatch[*(bytecode_opcode_t *) pc];
262
263 #define END_OP
264
265 #endif
266
267 #define IS_INTEGER_REGISTER(reg_type) \
268 (reg_type == REG_S64 || reg_type == REG_U64)
269
270 static int context_get_index(struct lttng_kernel_probe_ctx *lttng_probe_ctx,
271 struct load_ptr *ptr,
272 uint32_t idx)
273 {
274
275 struct lttng_kernel_ctx_field *ctx_field;
276 const struct lttng_kernel_event_field *field;
277 struct lttng_ctx_value v;
278
279 ctx_field = &lttng_static_ctx->fields[idx];
280 field = ctx_field->event_field;
281 ptr->type = LOAD_OBJECT;
282 /* field is only used for types nested within variants. */
283 ptr->field = NULL;
284
285 switch (field->type->type) {
286 case lttng_kernel_type_integer:
287 ctx_field->get_value(ctx_field->priv, lttng_probe_ctx, &v);
288 if (lttng_kernel_get_type_integer(field->type)->signedness) {
289 ptr->object_type = OBJECT_TYPE_S64;
290 ptr->u.s64 = v.u.s64;
291 ptr->ptr = &ptr->u.s64;
292 } else {
293 ptr->object_type = OBJECT_TYPE_U64;
294 ptr->u.u64 = v.u.s64; /* Cast. */
295 ptr->ptr = &ptr->u.u64;
296 }
297 break;
298 case lttng_kernel_type_enum:
299 {
300 const struct lttng_kernel_type_enum *enum_type = lttng_kernel_get_type_enum(field->type);
301 const struct lttng_kernel_type_integer *integer_type = lttng_kernel_get_type_integer(enum_type->container_type);
302
303 ctx_field->get_value(ctx_field->priv, lttng_probe_ctx, &v);
304 if (integer_type->signedness) {
305 ptr->object_type = OBJECT_TYPE_SIGNED_ENUM;
306 ptr->u.s64 = v.u.s64;
307 ptr->ptr = &ptr->u.s64;
308 } else {
309 ptr->object_type = OBJECT_TYPE_UNSIGNED_ENUM;
310 ptr->u.u64 = v.u.s64; /* Cast. */
311 ptr->ptr = &ptr->u.u64;
312 }
313 break;
314 }
315 case lttng_kernel_type_array:
316 {
317 const struct lttng_kernel_type_array *array_type = lttng_kernel_get_type_array(field->type);
318
319 if (!lttng_kernel_type_is_bytewise_integer(array_type->elem_type)) {
320 printk(KERN_WARNING "LTTng: bytecode: Array nesting only supports integer types.\n");
321 return -EINVAL;
322 }
323 if (array_type->encoding == lttng_kernel_string_encoding_none) {
324 printk(KERN_WARNING "LTTng: bytecode: Only string arrays are supported for contexts.\n");
325 return -EINVAL;
326 }
327 ptr->object_type = OBJECT_TYPE_STRING;
328 ctx_field->get_value(ctx_field->priv, lttng_probe_ctx, &v);
329 ptr->ptr = v.u.str;
330 break;
331 }
332 case lttng_kernel_type_sequence:
333 {
334 const struct lttng_kernel_type_sequence *sequence_type = lttng_kernel_get_type_sequence(field->type);
335
336 if (!lttng_kernel_type_is_bytewise_integer(sequence_type->elem_type)) {
337 printk(KERN_WARNING "LTTng: bytecode: Sequence nesting only supports integer types.\n");
338 return -EINVAL;
339 }
340 if (sequence_type->encoding == lttng_kernel_string_encoding_none) {
341 printk(KERN_WARNING "LTTng: bytecode: Only string sequences are supported for contexts.\n");
342 return -EINVAL;
343 }
344 ptr->object_type = OBJECT_TYPE_STRING;
345 ctx_field->get_value(ctx_field->priv, lttng_probe_ctx, &v);
346 ptr->ptr = v.u.str;
347 break;
348 }
349 case lttng_kernel_type_string:
350 ptr->object_type = OBJECT_TYPE_STRING;
351 ctx_field->get_value(ctx_field->priv, lttng_probe_ctx, &v);
352 ptr->ptr = v.u.str;
353 break;
354 case lttng_kernel_type_struct:
355 printk(KERN_WARNING "LTTng: bytecode: Structure type cannot be loaded.\n");
356 return -EINVAL;
357 case lttng_kernel_type_variant:
358 printk(KERN_WARNING "LTTng: bytecode: Variant type cannot be loaded.\n");
359 return -EINVAL;
360 default:
361 printk(KERN_WARNING "LTTng: bytecode: Unknown type: %d", (int) field->type->type);
362 return -EINVAL;
363 }
364 return 0;
365 }
366
367 static int dynamic_get_index(struct lttng_kernel_probe_ctx *lttng_probe_ctx,
368 struct bytecode_runtime *runtime,
369 uint64_t index, struct estack_entry *stack_top)
370 {
371 int ret;
372 const struct bytecode_get_index_data *gid;
373
374 gid = (const struct bytecode_get_index_data *) &runtime->data[index];
375 switch (stack_top->u.ptr.type) {
376 case LOAD_OBJECT:
377 switch (stack_top->u.ptr.object_type) {
378 case OBJECT_TYPE_ARRAY:
379 {
380 const char *ptr;
381
382 WARN_ON_ONCE(gid->offset >= gid->array_len);
383 /* Skip count (unsigned long) */
384 ptr = *(const char **) (stack_top->u.ptr.ptr + sizeof(unsigned long));
385 ptr = ptr + gid->offset;
386 stack_top->u.ptr.ptr = ptr;
387 stack_top->u.ptr.object_type = gid->elem.type;
388 stack_top->u.ptr.rev_bo = gid->elem.rev_bo;
389 BUG_ON(stack_top->u.ptr.field->type->type != lttng_kernel_type_array);
390 stack_top->u.ptr.field = NULL;
391 break;
392 }
393 case OBJECT_TYPE_SEQUENCE:
394 {
395 const char *ptr;
396 size_t ptr_seq_len;
397
398 ptr = *(const char **) (stack_top->u.ptr.ptr + sizeof(unsigned long));
399 ptr_seq_len = *(unsigned long *) stack_top->u.ptr.ptr;
400 if (gid->offset >= gid->elem.len * ptr_seq_len) {
401 ret = -EINVAL;
402 goto end;
403 }
404 ptr = ptr + gid->offset;
405 stack_top->u.ptr.ptr = ptr;
406 stack_top->u.ptr.object_type = gid->elem.type;
407 stack_top->u.ptr.rev_bo = gid->elem.rev_bo;
408 BUG_ON(stack_top->u.ptr.field->type->type != lttng_kernel_type_sequence);
409 stack_top->u.ptr.field = NULL;
410 break;
411 }
412 case OBJECT_TYPE_STRUCT:
413 printk(KERN_WARNING "LTTng: bytecode: Nested structures are not supported yet.\n");
414 ret = -EINVAL;
415 goto end;
416 case OBJECT_TYPE_VARIANT:
417 default:
418 printk(KERN_WARNING "LTTng: bytecode: Unexpected get index type %d",
419 (int) stack_top->u.ptr.object_type);
420 ret = -EINVAL;
421 goto end;
422 }
423 break;
424 case LOAD_ROOT_CONTEXT:
425 lttng_fallthrough;
426 case LOAD_ROOT_APP_CONTEXT:
427 {
428 ret = context_get_index(lttng_probe_ctx,
429 &stack_top->u.ptr,
430 gid->ctx_index);
431 if (ret) {
432 goto end;
433 }
434 break;
435 }
436 case LOAD_ROOT_PAYLOAD:
437 stack_top->u.ptr.ptr += gid->offset;
438 if (gid->elem.type == OBJECT_TYPE_STRING)
439 stack_top->u.ptr.ptr = *(const char * const *) stack_top->u.ptr.ptr;
440 stack_top->u.ptr.object_type = gid->elem.type;
441 stack_top->u.ptr.type = LOAD_OBJECT;
442 stack_top->u.ptr.field = gid->field;
443 stack_top->u.ptr.rev_bo = gid->elem.rev_bo;
444 break;
445 }
446
447 stack_top->type = REG_PTR;
448
449 return 0;
450
451 end:
452 return ret;
453 }
454
455 static int dynamic_load_field(struct estack_entry *stack_top)
456 {
457 int ret;
458
459 switch (stack_top->u.ptr.type) {
460 case LOAD_OBJECT:
461 break;
462 case LOAD_ROOT_CONTEXT:
463 case LOAD_ROOT_APP_CONTEXT:
464 case LOAD_ROOT_PAYLOAD:
465 default:
466 dbg_printk("Bytecode warning: cannot load root, missing field name.\n");
467 ret = -EINVAL;
468 goto end;
469 }
470 switch (stack_top->u.ptr.object_type) {
471 case OBJECT_TYPE_S8:
472 dbg_printk("op load field s8\n");
473 stack_top->u.v = *(int8_t *) stack_top->u.ptr.ptr;
474 stack_top->type = REG_S64;
475 break;
476 case OBJECT_TYPE_S16:
477 {
478 int16_t tmp;
479
480 dbg_printk("op load field s16\n");
481 tmp = *(int16_t *) stack_top->u.ptr.ptr;
482 if (stack_top->u.ptr.rev_bo)
483 __swab16s(&tmp);
484 stack_top->u.v = tmp;
485 stack_top->type = REG_S64;
486 break;
487 }
488 case OBJECT_TYPE_S32:
489 {
490 int32_t tmp;
491
492 dbg_printk("op load field s32\n");
493 tmp = *(int32_t *) stack_top->u.ptr.ptr;
494 if (stack_top->u.ptr.rev_bo)
495 __swab32s(&tmp);
496 stack_top->u.v = tmp;
497 stack_top->type = REG_S64;
498 break;
499 }
500 case OBJECT_TYPE_S64:
501 {
502 int64_t tmp;
503
504 dbg_printk("op load field s64\n");
505 tmp = *(int64_t *) stack_top->u.ptr.ptr;
506 if (stack_top->u.ptr.rev_bo)
507 __swab64s(&tmp);
508 stack_top->u.v = tmp;
509 stack_top->type = REG_S64;
510 break;
511 }
512 case OBJECT_TYPE_SIGNED_ENUM:
513 {
514 int64_t tmp;
515
516 dbg_printk("op load field signed enumeration\n");
517 tmp = *(int64_t *) stack_top->u.ptr.ptr;
518 if (stack_top->u.ptr.rev_bo)
519 __swab64s(&tmp);
520 stack_top->u.v = tmp;
521 stack_top->type = REG_S64;
522 break;
523 }
524 case OBJECT_TYPE_U8:
525 dbg_printk("op load field u8\n");
526 stack_top->u.v = *(uint8_t *) stack_top->u.ptr.ptr;
527 stack_top->type = REG_U64;
528 break;
529 case OBJECT_TYPE_U16:
530 {
531 uint16_t tmp;
532
533 dbg_printk("op load field u16\n");
534 tmp = *(uint16_t *) stack_top->u.ptr.ptr;
535 if (stack_top->u.ptr.rev_bo)
536 __swab16s(&tmp);
537 stack_top->u.v = tmp;
538 stack_top->type = REG_U64;
539 break;
540 }
541 case OBJECT_TYPE_U32:
542 {
543 uint32_t tmp;
544
545 dbg_printk("op load field u32\n");
546 tmp = *(uint32_t *) stack_top->u.ptr.ptr;
547 if (stack_top->u.ptr.rev_bo)
548 __swab32s(&tmp);
549 stack_top->u.v = tmp;
550 stack_top->type = REG_U64;
551 break;
552 }
553 case OBJECT_TYPE_U64:
554 {
555 uint64_t tmp;
556
557 dbg_printk("op load field u64\n");
558 tmp = *(uint64_t *) stack_top->u.ptr.ptr;
559 if (stack_top->u.ptr.rev_bo)
560 __swab64s(&tmp);
561 stack_top->u.v = tmp;
562 stack_top->type = REG_U64;
563 break;
564 }
565 case OBJECT_TYPE_UNSIGNED_ENUM:
566 {
567 uint64_t tmp;
568
569 dbg_printk("op load field unsigned enumeration\n");
570 tmp = *(uint64_t *) stack_top->u.ptr.ptr;
571 if (stack_top->u.ptr.rev_bo)
572 __swab64s(&tmp);
573 stack_top->u.v = tmp;
574 stack_top->type = REG_U64;
575 break;
576 }
577 case OBJECT_TYPE_STRING:
578 {
579 const char *str;
580
581 dbg_printk("op load field string\n");
582 str = (const char *) stack_top->u.ptr.ptr;
583 stack_top->u.s.str = str;
584 if (unlikely(!stack_top->u.s.str)) {
585 dbg_printk("Bytecode warning: loading a NULL string.\n");
586 ret = -EINVAL;
587 goto end;
588 }
589 stack_top->u.s.seq_len = LTTNG_SIZE_MAX;
590 stack_top->u.s.literal_type =
591 ESTACK_STRING_LITERAL_TYPE_NONE;
592 stack_top->type = REG_STRING;
593 break;
594 }
595 case OBJECT_TYPE_STRING_SEQUENCE:
596 {
597 const char *ptr;
598
599 dbg_printk("op load field string sequence\n");
600 ptr = stack_top->u.ptr.ptr;
601 stack_top->u.s.seq_len = *(unsigned long *) ptr;
602 stack_top->u.s.str = *(const char **) (ptr + sizeof(unsigned long));
603 if (unlikely(!stack_top->u.s.str)) {
604 dbg_printk("Bytecode warning: loading a NULL sequence.\n");
605 ret = -EINVAL;
606 goto end;
607 }
608 stack_top->u.s.literal_type =
609 ESTACK_STRING_LITERAL_TYPE_NONE;
610 stack_top->type = REG_STRING;
611 break;
612 }
613 case OBJECT_TYPE_DYNAMIC:
614 /*
615 * Dynamic types in context are looked up
616 * by context get index.
617 */
618 ret = -EINVAL;
619 goto end;
620 case OBJECT_TYPE_DOUBLE:
621 ret = -EINVAL;
622 goto end;
623 case OBJECT_TYPE_SEQUENCE:
624 case OBJECT_TYPE_ARRAY:
625 case OBJECT_TYPE_STRUCT:
626 case OBJECT_TYPE_VARIANT:
627 printk(KERN_WARNING "LTTng: bytecode: Sequences, arrays, struct and variant cannot be loaded (nested types).\n");
628 ret = -EINVAL;
629 goto end;
630 }
631 return 0;
632
633 end:
634 return ret;
635 }
636
637 static
638 int lttng_bytecode_interpret_format_output(struct estack_entry *ax,
639 struct lttng_interpreter_output *output)
640 {
641 int ret;
642
643 again:
644 switch (ax->type) {
645 case REG_S64:
646 output->type = LTTNG_INTERPRETER_TYPE_S64;
647 output->u.s = ax->u.v;
648 break;
649 case REG_U64:
650 output->type = LTTNG_INTERPRETER_TYPE_U64;
651 output->u.u = (uint64_t) ax->u.v;
652 break;
653 case REG_STRING:
654 output->type = LTTNG_INTERPRETER_TYPE_STRING;
655 output->u.str.str = ax->u.s.str;
656 output->u.str.len = ax->u.s.seq_len;
657 break;
658 case REG_PTR:
659 switch (ax->u.ptr.object_type) {
660 case OBJECT_TYPE_S8:
661 case OBJECT_TYPE_S16:
662 case OBJECT_TYPE_S32:
663 case OBJECT_TYPE_S64:
664 case OBJECT_TYPE_U8:
665 case OBJECT_TYPE_U16:
666 case OBJECT_TYPE_U32:
667 case OBJECT_TYPE_U64:
668 case OBJECT_TYPE_DOUBLE:
669 case OBJECT_TYPE_STRING:
670 case OBJECT_TYPE_STRING_SEQUENCE:
671 ret = dynamic_load_field(ax);
672 if (ret)
673 return ret;
674 /* Retry after loading ptr into stack top. */
675 goto again;
676 case OBJECT_TYPE_SEQUENCE:
677 output->type = LTTNG_INTERPRETER_TYPE_SEQUENCE;
678 output->u.sequence.ptr = *(const char **) (ax->u.ptr.ptr + sizeof(unsigned long));
679 output->u.sequence.nr_elem = *(unsigned long *) ax->u.ptr.ptr;
680 output->u.sequence.nested_type = lttng_kernel_get_type_sequence(ax->u.ptr.field->type)->elem_type;
681 break;
682 case OBJECT_TYPE_ARRAY:
683 /* Skip count (unsigned long) */
684 output->type = LTTNG_INTERPRETER_TYPE_SEQUENCE;
685 output->u.sequence.ptr = *(const char **) (ax->u.ptr.ptr + sizeof(unsigned long));
686 output->u.sequence.nr_elem = lttng_kernel_get_type_array(ax->u.ptr.field->type)->length;
687 output->u.sequence.nested_type = lttng_kernel_get_type_array(ax->u.ptr.field->type)->elem_type;
688 break;
689 case OBJECT_TYPE_SIGNED_ENUM:
690 ret = dynamic_load_field(ax);
691 if (ret)
692 return ret;
693 output->type = LTTNG_INTERPRETER_TYPE_SIGNED_ENUM;
694 output->u.s = ax->u.v;
695 break;
696 case OBJECT_TYPE_UNSIGNED_ENUM:
697 ret = dynamic_load_field(ax);
698 if (ret)
699 return ret;
700 output->type = LTTNG_INTERPRETER_TYPE_UNSIGNED_ENUM;
701 output->u.u = ax->u.v;
702 break;
703 case OBJECT_TYPE_STRUCT:
704 case OBJECT_TYPE_VARIANT:
705 default:
706 return -EINVAL;
707 }
708
709 break;
710 case REG_STAR_GLOB_STRING:
711 case REG_TYPE_UNKNOWN:
712 default:
713 return -EINVAL;
714 }
715
716 return 0;
717 }
718
719 #ifdef DEBUG
720
721 #define DBG_USER_STR_CUTOFF 32
722
723 /*
724 * In debug mode, print user string (truncated, if necessary).
725 */
726 static inline
727 void dbg_load_ref_user_str_printk(const struct estack_entry *user_str_reg)
728 {
729 size_t pos = 0;
730 char last_char;
731 char user_str[DBG_USER_STR_CUTOFF];
732
733 pagefault_disable();
734 do {
735 last_char = get_char(user_str_reg, pos);
736 user_str[pos] = last_char;
737 pos++;
738 } while (last_char != '\0' && pos < sizeof(user_str));
739 pagefault_enable();
740
741 user_str[sizeof(user_str) - 1] = '\0';
742 dbg_printk("load field ref user string: '%s%s'\n", user_str,
743 last_char != '\0' ? "[...]" : "");
744 }
745 #else
746 static inline
747 void dbg_load_ref_user_str_printk(const struct estack_entry *user_str_reg)
748 {
749 }
750 #endif
751
752 /*
753 * Return LTTNG_KERNEL_BYTECODE_INTERPRETER_OK on success.
754 * Return LTTNG_KERNEL_BYTECODE_INTERPRETER_ERROR on error.
755 *
756 * For FILTER bytecode: expect a struct lttng_kernel_bytecode_filter_ctx *
757 * as @ctx argument.
758 * For CAPTURE bytecode: expect a struct lttng_interpreter_output *
759 * as @ctx argument.
760 */
761 int lttng_bytecode_interpret(struct lttng_kernel_bytecode_runtime *kernel_bytecode,
762 const char *interpreter_stack_data,
763 struct lttng_kernel_probe_ctx *lttng_probe_ctx,
764 void *caller_ctx)
765 {
766 struct bytecode_runtime *bytecode = container_of(kernel_bytecode, struct bytecode_runtime, p);
767 void *pc, *next_pc, *start_pc;
768 int ret = -EINVAL;
769 uint64_t retval = 0;
770 struct estack _stack;
771 struct estack *stack = &_stack;
772 register int64_t ax = 0, bx = 0;
773 register enum entry_type ax_t = REG_TYPE_UNKNOWN, bx_t = REG_TYPE_UNKNOWN;
774 register int top = INTERPRETER_STACK_EMPTY;
775 #ifndef INTERPRETER_USE_SWITCH
776 static void *dispatch[NR_BYTECODE_OPS] = {
777 [ BYTECODE_OP_UNKNOWN ] = &&LABEL_BYTECODE_OP_UNKNOWN,
778
779 [ BYTECODE_OP_RETURN ] = &&LABEL_BYTECODE_OP_RETURN,
780
781 /* binary */
782 [ BYTECODE_OP_MUL ] = &&LABEL_BYTECODE_OP_MUL,
783 [ BYTECODE_OP_DIV ] = &&LABEL_BYTECODE_OP_DIV,
784 [ BYTECODE_OP_MOD ] = &&LABEL_BYTECODE_OP_MOD,
785 [ BYTECODE_OP_PLUS ] = &&LABEL_BYTECODE_OP_PLUS,
786 [ BYTECODE_OP_MINUS ] = &&LABEL_BYTECODE_OP_MINUS,
787 [ BYTECODE_OP_BIT_RSHIFT ] = &&LABEL_BYTECODE_OP_BIT_RSHIFT,
788 [ BYTECODE_OP_BIT_LSHIFT ] = &&LABEL_BYTECODE_OP_BIT_LSHIFT,
789 [ BYTECODE_OP_BIT_AND ] = &&LABEL_BYTECODE_OP_BIT_AND,
790 [ BYTECODE_OP_BIT_OR ] = &&LABEL_BYTECODE_OP_BIT_OR,
791 [ BYTECODE_OP_BIT_XOR ] = &&LABEL_BYTECODE_OP_BIT_XOR,
792
793 /* binary comparators */
794 [ BYTECODE_OP_EQ ] = &&LABEL_BYTECODE_OP_EQ,
795 [ BYTECODE_OP_NE ] = &&LABEL_BYTECODE_OP_NE,
796 [ BYTECODE_OP_GT ] = &&LABEL_BYTECODE_OP_GT,
797 [ BYTECODE_OP_LT ] = &&LABEL_BYTECODE_OP_LT,
798 [ BYTECODE_OP_GE ] = &&LABEL_BYTECODE_OP_GE,
799 [ BYTECODE_OP_LE ] = &&LABEL_BYTECODE_OP_LE,
800
801 /* string binary comparator */
802 [ BYTECODE_OP_EQ_STRING ] = &&LABEL_BYTECODE_OP_EQ_STRING,
803 [ BYTECODE_OP_NE_STRING ] = &&LABEL_BYTECODE_OP_NE_STRING,
804 [ BYTECODE_OP_GT_STRING ] = &&LABEL_BYTECODE_OP_GT_STRING,
805 [ BYTECODE_OP_LT_STRING ] = &&LABEL_BYTECODE_OP_LT_STRING,
806 [ BYTECODE_OP_GE_STRING ] = &&LABEL_BYTECODE_OP_GE_STRING,
807 [ BYTECODE_OP_LE_STRING ] = &&LABEL_BYTECODE_OP_LE_STRING,
808
809 /* globbing pattern binary comparator */
810 [ BYTECODE_OP_EQ_STAR_GLOB_STRING ] = &&LABEL_BYTECODE_OP_EQ_STAR_GLOB_STRING,
811 [ BYTECODE_OP_NE_STAR_GLOB_STRING ] = &&LABEL_BYTECODE_OP_NE_STAR_GLOB_STRING,
812
813 /* s64 binary comparator */
814 [ BYTECODE_OP_EQ_S64 ] = &&LABEL_BYTECODE_OP_EQ_S64,
815 [ BYTECODE_OP_NE_S64 ] = &&LABEL_BYTECODE_OP_NE_S64,
816 [ BYTECODE_OP_GT_S64 ] = &&LABEL_BYTECODE_OP_GT_S64,
817 [ BYTECODE_OP_LT_S64 ] = &&LABEL_BYTECODE_OP_LT_S64,
818 [ BYTECODE_OP_GE_S64 ] = &&LABEL_BYTECODE_OP_GE_S64,
819 [ BYTECODE_OP_LE_S64 ] = &&LABEL_BYTECODE_OP_LE_S64,
820
821 /* double binary comparator */
822 [ BYTECODE_OP_EQ_DOUBLE ] = &&LABEL_BYTECODE_OP_EQ_DOUBLE,
823 [ BYTECODE_OP_NE_DOUBLE ] = &&LABEL_BYTECODE_OP_NE_DOUBLE,
824 [ BYTECODE_OP_GT_DOUBLE ] = &&LABEL_BYTECODE_OP_GT_DOUBLE,
825 [ BYTECODE_OP_LT_DOUBLE ] = &&LABEL_BYTECODE_OP_LT_DOUBLE,
826 [ BYTECODE_OP_GE_DOUBLE ] = &&LABEL_BYTECODE_OP_GE_DOUBLE,
827 [ BYTECODE_OP_LE_DOUBLE ] = &&LABEL_BYTECODE_OP_LE_DOUBLE,
828
829 /* Mixed S64-double binary comparators */
830 [ BYTECODE_OP_EQ_DOUBLE_S64 ] = &&LABEL_BYTECODE_OP_EQ_DOUBLE_S64,
831 [ BYTECODE_OP_NE_DOUBLE_S64 ] = &&LABEL_BYTECODE_OP_NE_DOUBLE_S64,
832 [ BYTECODE_OP_GT_DOUBLE_S64 ] = &&LABEL_BYTECODE_OP_GT_DOUBLE_S64,
833 [ BYTECODE_OP_LT_DOUBLE_S64 ] = &&LABEL_BYTECODE_OP_LT_DOUBLE_S64,
834 [ BYTECODE_OP_GE_DOUBLE_S64 ] = &&LABEL_BYTECODE_OP_GE_DOUBLE_S64,
835 [ BYTECODE_OP_LE_DOUBLE_S64 ] = &&LABEL_BYTECODE_OP_LE_DOUBLE_S64,
836
837 [ BYTECODE_OP_EQ_S64_DOUBLE ] = &&LABEL_BYTECODE_OP_EQ_S64_DOUBLE,
838 [ BYTECODE_OP_NE_S64_DOUBLE ] = &&LABEL_BYTECODE_OP_NE_S64_DOUBLE,
839 [ BYTECODE_OP_GT_S64_DOUBLE ] = &&LABEL_BYTECODE_OP_GT_S64_DOUBLE,
840 [ BYTECODE_OP_LT_S64_DOUBLE ] = &&LABEL_BYTECODE_OP_LT_S64_DOUBLE,
841 [ BYTECODE_OP_GE_S64_DOUBLE ] = &&LABEL_BYTECODE_OP_GE_S64_DOUBLE,
842 [ BYTECODE_OP_LE_S64_DOUBLE ] = &&LABEL_BYTECODE_OP_LE_S64_DOUBLE,
843
844 /* unary */
845 [ BYTECODE_OP_UNARY_PLUS ] = &&LABEL_BYTECODE_OP_UNARY_PLUS,
846 [ BYTECODE_OP_UNARY_MINUS ] = &&LABEL_BYTECODE_OP_UNARY_MINUS,
847 [ BYTECODE_OP_UNARY_NOT ] = &&LABEL_BYTECODE_OP_UNARY_NOT,
848 [ BYTECODE_OP_UNARY_PLUS_S64 ] = &&LABEL_BYTECODE_OP_UNARY_PLUS_S64,
849 [ BYTECODE_OP_UNARY_MINUS_S64 ] = &&LABEL_BYTECODE_OP_UNARY_MINUS_S64,
850 [ BYTECODE_OP_UNARY_NOT_S64 ] = &&LABEL_BYTECODE_OP_UNARY_NOT_S64,
851 [ BYTECODE_OP_UNARY_PLUS_DOUBLE ] = &&LABEL_BYTECODE_OP_UNARY_PLUS_DOUBLE,
852 [ BYTECODE_OP_UNARY_MINUS_DOUBLE ] = &&LABEL_BYTECODE_OP_UNARY_MINUS_DOUBLE,
853 [ BYTECODE_OP_UNARY_NOT_DOUBLE ] = &&LABEL_BYTECODE_OP_UNARY_NOT_DOUBLE,
854
855 /* logical */
856 [ BYTECODE_OP_AND ] = &&LABEL_BYTECODE_OP_AND,
857 [ BYTECODE_OP_OR ] = &&LABEL_BYTECODE_OP_OR,
858
859 /* load field ref */
860 [ BYTECODE_OP_LOAD_FIELD_REF ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_REF,
861 [ BYTECODE_OP_LOAD_FIELD_REF_STRING ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_REF_STRING,
862 [ BYTECODE_OP_LOAD_FIELD_REF_SEQUENCE ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_REF_SEQUENCE,
863 [ BYTECODE_OP_LOAD_FIELD_REF_S64 ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_REF_S64,
864 [ BYTECODE_OP_LOAD_FIELD_REF_DOUBLE ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_REF_DOUBLE,
865
866 /* load from immediate operand */
867 [ BYTECODE_OP_LOAD_STRING ] = &&LABEL_BYTECODE_OP_LOAD_STRING,
868 [ BYTECODE_OP_LOAD_STAR_GLOB_STRING ] = &&LABEL_BYTECODE_OP_LOAD_STAR_GLOB_STRING,
869 [ BYTECODE_OP_LOAD_S64 ] = &&LABEL_BYTECODE_OP_LOAD_S64,
870 [ BYTECODE_OP_LOAD_DOUBLE ] = &&LABEL_BYTECODE_OP_LOAD_DOUBLE,
871
872 /* cast */
873 [ BYTECODE_OP_CAST_TO_S64 ] = &&LABEL_BYTECODE_OP_CAST_TO_S64,
874 [ BYTECODE_OP_CAST_DOUBLE_TO_S64 ] = &&LABEL_BYTECODE_OP_CAST_DOUBLE_TO_S64,
875 [ BYTECODE_OP_CAST_NOP ] = &&LABEL_BYTECODE_OP_CAST_NOP,
876
877 /* get context ref */
878 [ BYTECODE_OP_GET_CONTEXT_REF ] = &&LABEL_BYTECODE_OP_GET_CONTEXT_REF,
879 [ BYTECODE_OP_GET_CONTEXT_REF_STRING ] = &&LABEL_BYTECODE_OP_GET_CONTEXT_REF_STRING,
880 [ BYTECODE_OP_GET_CONTEXT_REF_S64 ] = &&LABEL_BYTECODE_OP_GET_CONTEXT_REF_S64,
881 [ BYTECODE_OP_GET_CONTEXT_REF_DOUBLE ] = &&LABEL_BYTECODE_OP_GET_CONTEXT_REF_DOUBLE,
882
883 /* load userspace field ref */
884 [ BYTECODE_OP_LOAD_FIELD_REF_USER_STRING ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_REF_USER_STRING,
885 [ BYTECODE_OP_LOAD_FIELD_REF_USER_SEQUENCE ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_REF_USER_SEQUENCE,
886
887 /* Instructions for recursive traversal through composed types. */
888 [ BYTECODE_OP_GET_CONTEXT_ROOT ] = &&LABEL_BYTECODE_OP_GET_CONTEXT_ROOT,
889 [ BYTECODE_OP_GET_APP_CONTEXT_ROOT ] = &&LABEL_BYTECODE_OP_GET_APP_CONTEXT_ROOT,
890 [ BYTECODE_OP_GET_PAYLOAD_ROOT ] = &&LABEL_BYTECODE_OP_GET_PAYLOAD_ROOT,
891
892 [ BYTECODE_OP_GET_SYMBOL ] = &&LABEL_BYTECODE_OP_GET_SYMBOL,
893 [ BYTECODE_OP_GET_SYMBOL_FIELD ] = &&LABEL_BYTECODE_OP_GET_SYMBOL_FIELD,
894 [ BYTECODE_OP_GET_INDEX_U16 ] = &&LABEL_BYTECODE_OP_GET_INDEX_U16,
895 [ BYTECODE_OP_GET_INDEX_U64 ] = &&LABEL_BYTECODE_OP_GET_INDEX_U64,
896
897 [ BYTECODE_OP_LOAD_FIELD ] = &&LABEL_BYTECODE_OP_LOAD_FIELD,
898 [ BYTECODE_OP_LOAD_FIELD_S8 ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_S8,
899 [ BYTECODE_OP_LOAD_FIELD_S16 ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_S16,
900 [ BYTECODE_OP_LOAD_FIELD_S32 ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_S32,
901 [ BYTECODE_OP_LOAD_FIELD_S64 ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_S64,
902 [ BYTECODE_OP_LOAD_FIELD_U8 ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_U8,
903 [ BYTECODE_OP_LOAD_FIELD_U16 ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_U16,
904 [ BYTECODE_OP_LOAD_FIELD_U32 ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_U32,
905 [ BYTECODE_OP_LOAD_FIELD_U64 ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_U64,
906 [ BYTECODE_OP_LOAD_FIELD_STRING ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_STRING,
907 [ BYTECODE_OP_LOAD_FIELD_SEQUENCE ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_SEQUENCE,
908 [ BYTECODE_OP_LOAD_FIELD_DOUBLE ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_DOUBLE,
909
910 [ BYTECODE_OP_UNARY_BIT_NOT ] = &&LABEL_BYTECODE_OP_UNARY_BIT_NOT,
911
912 [ BYTECODE_OP_RETURN_S64 ] = &&LABEL_BYTECODE_OP_RETURN_S64,
913 };
914 #endif /* #ifndef INTERPRETER_USE_SWITCH */
915
916 START_OP
917
918 OP(BYTECODE_OP_UNKNOWN):
919 OP(BYTECODE_OP_LOAD_FIELD_REF):
920 OP(BYTECODE_OP_GET_CONTEXT_REF):
921 #ifdef INTERPRETER_USE_SWITCH
922 default:
923 #endif /* INTERPRETER_USE_SWITCH */
924 printk(KERN_WARNING "LTTng: bytecode: unknown bytecode op %u\n",
925 (unsigned int) *(bytecode_opcode_t *) pc);
926 ret = -EINVAL;
927 goto end;
928
929 OP(BYTECODE_OP_RETURN):
930 /* LTTNG_KERNEL_BYTECODE_INTERPRETER_ERROR or LTTNG_KERNEL_BYTECODE_INTERPRETER_OK */
931 switch (estack_ax_t) {
932 case REG_S64:
933 case REG_U64:
934 retval = !!estack_ax_v;
935 break;
936 case REG_DOUBLE:
937 case REG_STRING:
938 case REG_PTR:
939 if (kernel_bytecode->type != LTTNG_KERNEL_BYTECODE_TYPE_CAPTURE) {
940 ret = -EINVAL;
941 goto end;
942 }
943 retval = 0;
944 break;
945 case REG_STAR_GLOB_STRING:
946 case REG_TYPE_UNKNOWN:
947 ret = -EINVAL;
948 goto end;
949 }
950 ret = 0;
951 goto end;
952
953 OP(BYTECODE_OP_RETURN_S64):
954 /* LTTNG_KERNEL_BYTECODE_INTERPRETER_ERROR or LTTNG_KERNEL_BYTECODE_INTERPRETER_OK */
955 retval = !!estack_ax_v;
956 ret = 0;
957 goto end;
958
959 /* binary */
960 OP(BYTECODE_OP_MUL):
961 OP(BYTECODE_OP_DIV):
962 OP(BYTECODE_OP_MOD):
963 OP(BYTECODE_OP_PLUS):
964 OP(BYTECODE_OP_MINUS):
965 printk(KERN_WARNING "LTTng: bytecode: unsupported bytecode op %u\n",
966 (unsigned int) *(bytecode_opcode_t *) pc);
967 ret = -EINVAL;
968 goto end;
969
970 OP(BYTECODE_OP_EQ):
971 OP(BYTECODE_OP_NE):
972 OP(BYTECODE_OP_GT):
973 OP(BYTECODE_OP_LT):
974 OP(BYTECODE_OP_GE):
975 OP(BYTECODE_OP_LE):
976 printk(KERN_WARNING "LTTng: bytecode: unsupported non-specialized bytecode op %u\n",
977 (unsigned int) *(bytecode_opcode_t *) pc);
978 ret = -EINVAL;
979 goto end;
980
981 OP(BYTECODE_OP_EQ_STRING):
982 {
983 int res;
984
985 res = (stack_strcmp(stack, top, "==") == 0);
986 estack_pop(stack, top, ax, bx, ax_t, bx_t);
987 estack_ax_v = res;
988 estack_ax_t = REG_S64;
989 next_pc += sizeof(struct binary_op);
990 PO;
991 }
992 OP(BYTECODE_OP_NE_STRING):
993 {
994 int res;
995
996 res = (stack_strcmp(stack, top, "!=") != 0);
997 estack_pop(stack, top, ax, bx, ax_t, bx_t);
998 estack_ax_v = res;
999 estack_ax_t = REG_S64;
1000 next_pc += sizeof(struct binary_op);
1001 PO;
1002 }
1003 OP(BYTECODE_OP_GT_STRING):
1004 {
1005 int res;
1006
1007 res = (stack_strcmp(stack, top, ">") > 0);
1008 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1009 estack_ax_v = res;
1010 estack_ax_t = REG_S64;
1011 next_pc += sizeof(struct binary_op);
1012 PO;
1013 }
1014 OP(BYTECODE_OP_LT_STRING):
1015 {
1016 int res;
1017
1018 res = (stack_strcmp(stack, top, "<") < 0);
1019 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1020 estack_ax_v = res;
1021 estack_ax_t = REG_S64;
1022 next_pc += sizeof(struct binary_op);
1023 PO;
1024 }
1025 OP(BYTECODE_OP_GE_STRING):
1026 {
1027 int res;
1028
1029 res = (stack_strcmp(stack, top, ">=") >= 0);
1030 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1031 estack_ax_v = res;
1032 estack_ax_t = REG_S64;
1033 next_pc += sizeof(struct binary_op);
1034 PO;
1035 }
1036 OP(BYTECODE_OP_LE_STRING):
1037 {
1038 int res;
1039
1040 res = (stack_strcmp(stack, top, "<=") <= 0);
1041 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1042 estack_ax_v = res;
1043 estack_ax_t = REG_S64;
1044 next_pc += sizeof(struct binary_op);
1045 PO;
1046 }
1047
1048 OP(BYTECODE_OP_EQ_STAR_GLOB_STRING):
1049 {
1050 int res;
1051
1052 res = (stack_star_glob_match(stack, top, "==") == 0);
1053 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1054 estack_ax_v = res;
1055 estack_ax_t = REG_S64;
1056 next_pc += sizeof(struct binary_op);
1057 PO;
1058 }
1059 OP(BYTECODE_OP_NE_STAR_GLOB_STRING):
1060 {
1061 int res;
1062
1063 res = (stack_star_glob_match(stack, top, "!=") != 0);
1064 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1065 estack_ax_v = res;
1066 estack_ax_t = REG_S64;
1067 next_pc += sizeof(struct binary_op);
1068 PO;
1069 }
1070
1071 OP(BYTECODE_OP_EQ_S64):
1072 {
1073 int res;
1074
1075 res = (estack_bx_v == estack_ax_v);
1076 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1077 estack_ax_v = res;
1078 estack_ax_t = REG_S64;
1079 next_pc += sizeof(struct binary_op);
1080 PO;
1081 }
1082 OP(BYTECODE_OP_NE_S64):
1083 {
1084 int res;
1085
1086 res = (estack_bx_v != estack_ax_v);
1087 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1088 estack_ax_v = res;
1089 estack_ax_t = REG_S64;
1090 next_pc += sizeof(struct binary_op);
1091 PO;
1092 }
1093 OP(BYTECODE_OP_GT_S64):
1094 {
1095 int res;
1096
1097 res = (estack_bx_v > estack_ax_v);
1098 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1099 estack_ax_v = res;
1100 estack_ax_t = REG_S64;
1101 next_pc += sizeof(struct binary_op);
1102 PO;
1103 }
1104 OP(BYTECODE_OP_LT_S64):
1105 {
1106 int res;
1107
1108 res = (estack_bx_v < estack_ax_v);
1109 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1110 estack_ax_v = res;
1111 estack_ax_t = REG_S64;
1112 next_pc += sizeof(struct binary_op);
1113 PO;
1114 }
1115 OP(BYTECODE_OP_GE_S64):
1116 {
1117 int res;
1118
1119 res = (estack_bx_v >= estack_ax_v);
1120 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1121 estack_ax_v = res;
1122 estack_ax_t = REG_S64;
1123 next_pc += sizeof(struct binary_op);
1124 PO;
1125 }
1126 OP(BYTECODE_OP_LE_S64):
1127 {
1128 int res;
1129
1130 res = (estack_bx_v <= estack_ax_v);
1131 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1132 estack_ax_v = res;
1133 estack_ax_t = REG_S64;
1134 next_pc += sizeof(struct binary_op);
1135 PO;
1136 }
1137
1138 OP(BYTECODE_OP_EQ_DOUBLE):
1139 OP(BYTECODE_OP_NE_DOUBLE):
1140 OP(BYTECODE_OP_GT_DOUBLE):
1141 OP(BYTECODE_OP_LT_DOUBLE):
1142 OP(BYTECODE_OP_GE_DOUBLE):
1143 OP(BYTECODE_OP_LE_DOUBLE):
1144 {
1145 BUG_ON(1);
1146 PO;
1147 }
1148
1149 /* Mixed S64-double binary comparators */
1150 OP(BYTECODE_OP_EQ_DOUBLE_S64):
1151 OP(BYTECODE_OP_NE_DOUBLE_S64):
1152 OP(BYTECODE_OP_GT_DOUBLE_S64):
1153 OP(BYTECODE_OP_LT_DOUBLE_S64):
1154 OP(BYTECODE_OP_GE_DOUBLE_S64):
1155 OP(BYTECODE_OP_LE_DOUBLE_S64):
1156 OP(BYTECODE_OP_EQ_S64_DOUBLE):
1157 OP(BYTECODE_OP_NE_S64_DOUBLE):
1158 OP(BYTECODE_OP_GT_S64_DOUBLE):
1159 OP(BYTECODE_OP_LT_S64_DOUBLE):
1160 OP(BYTECODE_OP_GE_S64_DOUBLE):
1161 OP(BYTECODE_OP_LE_S64_DOUBLE):
1162 {
1163 BUG_ON(1);
1164 PO;
1165 }
1166 OP(BYTECODE_OP_BIT_RSHIFT):
1167 {
1168 int64_t res;
1169
1170 if (!IS_INTEGER_REGISTER(estack_ax_t) || !IS_INTEGER_REGISTER(estack_bx_t)) {
1171 ret = -EINVAL;
1172 goto end;
1173 }
1174
1175 /* Catch undefined behavior. */
1176 if (unlikely(estack_ax_v < 0 || estack_ax_v >= 64)) {
1177 ret = -EINVAL;
1178 goto end;
1179 }
1180 res = ((uint64_t) estack_bx_v >> (uint32_t) estack_ax_v);
1181 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1182 estack_ax_v = res;
1183 estack_ax_t = REG_U64;
1184 next_pc += sizeof(struct binary_op);
1185 PO;
1186 }
1187 OP(BYTECODE_OP_BIT_LSHIFT):
1188 {
1189 int64_t res;
1190
1191 if (!IS_INTEGER_REGISTER(estack_ax_t) || !IS_INTEGER_REGISTER(estack_bx_t)) {
1192 ret = -EINVAL;
1193 goto end;
1194 }
1195
1196 /* Catch undefined behavior. */
1197 if (unlikely(estack_ax_v < 0 || estack_ax_v >= 64)) {
1198 ret = -EINVAL;
1199 goto end;
1200 }
1201 res = ((uint64_t) estack_bx_v << (uint32_t) estack_ax_v);
1202 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1203 estack_ax_v = res;
1204 estack_ax_t = REG_U64;
1205 next_pc += sizeof(struct binary_op);
1206 PO;
1207 }
1208 OP(BYTECODE_OP_BIT_AND):
1209 {
1210 int64_t res;
1211
1212 if (!IS_INTEGER_REGISTER(estack_ax_t) || !IS_INTEGER_REGISTER(estack_bx_t)) {
1213 ret = -EINVAL;
1214 goto end;
1215 }
1216
1217 res = ((uint64_t) estack_bx_v & (uint64_t) estack_ax_v);
1218 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1219 estack_ax_v = res;
1220 estack_ax_t = REG_U64;
1221 next_pc += sizeof(struct binary_op);
1222 PO;
1223 }
1224 OP(BYTECODE_OP_BIT_OR):
1225 {
1226 int64_t res;
1227
1228 if (!IS_INTEGER_REGISTER(estack_ax_t) || !IS_INTEGER_REGISTER(estack_bx_t)) {
1229 ret = -EINVAL;
1230 goto end;
1231 }
1232
1233 res = ((uint64_t) estack_bx_v | (uint64_t) estack_ax_v);
1234 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1235 estack_ax_v = res;
1236 estack_ax_t = REG_U64;
1237 next_pc += sizeof(struct binary_op);
1238 PO;
1239 }
1240 OP(BYTECODE_OP_BIT_XOR):
1241 {
1242 int64_t res;
1243
1244 if (!IS_INTEGER_REGISTER(estack_ax_t) || !IS_INTEGER_REGISTER(estack_bx_t)) {
1245 ret = -EINVAL;
1246 goto end;
1247 }
1248
1249 res = ((uint64_t) estack_bx_v ^ (uint64_t) estack_ax_v);
1250 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1251 estack_ax_v = res;
1252 estack_ax_t = REG_U64;
1253 next_pc += sizeof(struct binary_op);
1254 PO;
1255 }
1256
1257 /* unary */
1258 OP(BYTECODE_OP_UNARY_PLUS):
1259 OP(BYTECODE_OP_UNARY_MINUS):
1260 OP(BYTECODE_OP_UNARY_NOT):
1261 printk(KERN_WARNING "LTTng: bytecode: unsupported non-specialized bytecode op %u\n",
1262 (unsigned int) *(bytecode_opcode_t *) pc);
1263 ret = -EINVAL;
1264 goto end;
1265
1266
1267 OP(BYTECODE_OP_UNARY_BIT_NOT):
1268 {
1269 estack_ax_v = ~(uint64_t) estack_ax_v;
1270 estack_ax_t = REG_S64;
1271 next_pc += sizeof(struct unary_op);
1272 PO;
1273 }
1274
1275 OP(BYTECODE_OP_UNARY_PLUS_S64):
1276 {
1277 next_pc += sizeof(struct unary_op);
1278 PO;
1279 }
1280 OP(BYTECODE_OP_UNARY_MINUS_S64):
1281 {
1282 estack_ax_v = -estack_ax_v;
1283 estack_ax_t = REG_S64;
1284 next_pc += sizeof(struct unary_op);
1285 PO;
1286 }
1287 OP(BYTECODE_OP_UNARY_PLUS_DOUBLE):
1288 OP(BYTECODE_OP_UNARY_MINUS_DOUBLE):
1289 {
1290 BUG_ON(1);
1291 PO;
1292 }
1293 OP(BYTECODE_OP_UNARY_NOT_S64):
1294 {
1295 estack_ax_v = !estack_ax_v;
1296 estack_ax_t = REG_S64;
1297 next_pc += sizeof(struct unary_op);
1298 PO;
1299 }
1300 OP(BYTECODE_OP_UNARY_NOT_DOUBLE):
1301 {
1302 BUG_ON(1);
1303 PO;
1304 }
1305
1306 /* logical */
1307 OP(BYTECODE_OP_AND):
1308 {
1309 struct logical_op *insn = (struct logical_op *) pc;
1310
1311 /* If AX is 0, skip and evaluate to 0 */
1312 if (unlikely(estack_ax_v == 0)) {
1313 dbg_printk("Jumping to bytecode offset %u\n",
1314 (unsigned int) insn->skip_offset);
1315 next_pc = start_pc + insn->skip_offset;
1316 } else {
1317 /* Pop 1 when jump not taken */
1318 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1319 next_pc += sizeof(struct logical_op);
1320 }
1321 PO;
1322 }
1323 OP(BYTECODE_OP_OR):
1324 {
1325 struct logical_op *insn = (struct logical_op *) pc;
1326
1327 /* If AX is nonzero, skip and evaluate to 1 */
1328
1329 if (unlikely(estack_ax_v != 0)) {
1330 estack_ax_v = 1;
1331 dbg_printk("Jumping to bytecode offset %u\n",
1332 (unsigned int) insn->skip_offset);
1333 next_pc = start_pc + insn->skip_offset;
1334 } else {
1335 /* Pop 1 when jump not taken */
1336 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1337 next_pc += sizeof(struct logical_op);
1338 }
1339 PO;
1340 }
1341
1342
1343 /* load field ref */
1344 OP(BYTECODE_OP_LOAD_FIELD_REF_STRING):
1345 {
1346 struct load_op *insn = (struct load_op *) pc;
1347 struct field_ref *ref = (struct field_ref *) insn->data;
1348
1349 dbg_printk("load field ref offset %u type string\n",
1350 ref->offset);
1351 estack_push(stack, top, ax, bx, ax_t, bx_t);
1352 estack_ax(stack, top)->u.s.str =
1353 *(const char * const *) &interpreter_stack_data[ref->offset];
1354 if (unlikely(!estack_ax(stack, top)->u.s.str)) {
1355 dbg_printk("Bytecode warning: loading a NULL string.\n");
1356 ret = -EINVAL;
1357 goto end;
1358 }
1359 estack_ax(stack, top)->u.s.seq_len = LTTNG_SIZE_MAX;
1360 estack_ax(stack, top)->u.s.literal_type =
1361 ESTACK_STRING_LITERAL_TYPE_NONE;
1362 estack_ax(stack, top)->u.s.user = 0;
1363 estack_ax(stack, top)->type = REG_STRING;
1364 dbg_printk("ref load string %s\n", estack_ax(stack, top)->u.s.str);
1365 next_pc += sizeof(struct load_op) + sizeof(struct field_ref);
1366 PO;
1367 }
1368
1369 OP(BYTECODE_OP_LOAD_FIELD_REF_SEQUENCE):
1370 {
1371 struct load_op *insn = (struct load_op *) pc;
1372 struct field_ref *ref = (struct field_ref *) insn->data;
1373
1374 dbg_printk("load field ref offset %u type sequence\n",
1375 ref->offset);
1376 estack_push(stack, top, ax, bx, ax_t, bx_t);
1377 estack_ax(stack, top)->u.s.seq_len =
1378 *(unsigned long *) &interpreter_stack_data[ref->offset];
1379 estack_ax(stack, top)->u.s.str =
1380 *(const char **) (&interpreter_stack_data[ref->offset
1381 + sizeof(unsigned long)]);
1382 if (unlikely(!estack_ax(stack, top)->u.s.str)) {
1383 dbg_printk("Bytecode warning: loading a NULL sequence.\n");
1384 ret = -EINVAL;
1385 goto end;
1386 }
1387 estack_ax(stack, top)->u.s.literal_type =
1388 ESTACK_STRING_LITERAL_TYPE_NONE;
1389 estack_ax(stack, top)->u.s.user = 0;
1390 next_pc += sizeof(struct load_op) + sizeof(struct field_ref);
1391 PO;
1392 }
1393
1394 OP(BYTECODE_OP_LOAD_FIELD_REF_S64):
1395 {
1396 struct load_op *insn = (struct load_op *) pc;
1397 struct field_ref *ref = (struct field_ref *) insn->data;
1398
1399 dbg_printk("load field ref offset %u type s64\n",
1400 ref->offset);
1401 estack_push(stack, top, ax, bx, ax_t, bx_t);
1402 estack_ax_v =
1403 ((struct literal_numeric *) &interpreter_stack_data[ref->offset])->v;
1404 estack_ax_t = REG_S64;
1405 dbg_printk("ref load s64 %lld\n",
1406 (long long) estack_ax_v);
1407 next_pc += sizeof(struct load_op) + sizeof(struct field_ref);
1408 PO;
1409 }
1410
1411 OP(BYTECODE_OP_LOAD_FIELD_REF_DOUBLE):
1412 {
1413 BUG_ON(1);
1414 PO;
1415 }
1416
1417 /* load from immediate operand */
1418 OP(BYTECODE_OP_LOAD_STRING):
1419 {
1420 struct load_op *insn = (struct load_op *) pc;
1421
1422 dbg_printk("load string %s\n", insn->data);
1423 estack_push(stack, top, ax, bx, ax_t, bx_t);
1424 estack_ax(stack, top)->u.s.str = insn->data;
1425 estack_ax(stack, top)->u.s.seq_len = LTTNG_SIZE_MAX;
1426 estack_ax(stack, top)->u.s.literal_type =
1427 ESTACK_STRING_LITERAL_TYPE_PLAIN;
1428 estack_ax(stack, top)->u.s.user = 0;
1429 next_pc += sizeof(struct load_op) + strlen(insn->data) + 1;
1430 PO;
1431 }
1432
1433 OP(BYTECODE_OP_LOAD_STAR_GLOB_STRING):
1434 {
1435 struct load_op *insn = (struct load_op *) pc;
1436
1437 dbg_printk("load globbing pattern %s\n", insn->data);
1438 estack_push(stack, top, ax, bx, ax_t, bx_t);
1439 estack_ax(stack, top)->u.s.str = insn->data;
1440 estack_ax(stack, top)->u.s.seq_len = LTTNG_SIZE_MAX;
1441 estack_ax(stack, top)->u.s.literal_type =
1442 ESTACK_STRING_LITERAL_TYPE_STAR_GLOB;
1443 estack_ax(stack, top)->u.s.user = 0;
1444 next_pc += sizeof(struct load_op) + strlen(insn->data) + 1;
1445 PO;
1446 }
1447
1448 OP(BYTECODE_OP_LOAD_S64):
1449 {
1450 struct load_op *insn = (struct load_op *) pc;
1451
1452 estack_push(stack, top, ax, bx, ax_t, bx_t);
1453 estack_ax_v = ((struct literal_numeric *) insn->data)->v;
1454 estack_ax_t = REG_S64;
1455 dbg_printk("load s64 %lld\n",
1456 (long long) estack_ax_v);
1457 next_pc += sizeof(struct load_op)
1458 + sizeof(struct literal_numeric);
1459 PO;
1460 }
1461
1462 OP(BYTECODE_OP_LOAD_DOUBLE):
1463 {
1464 BUG_ON(1);
1465 PO;
1466 }
1467
1468 /* cast */
1469 OP(BYTECODE_OP_CAST_TO_S64):
1470 printk(KERN_WARNING "LTTng: bytecode: unsupported non-specialized bytecode op %u\n",
1471 (unsigned int) *(bytecode_opcode_t *) pc);
1472 ret = -EINVAL;
1473 goto end;
1474
1475 OP(BYTECODE_OP_CAST_DOUBLE_TO_S64):
1476 {
1477 BUG_ON(1);
1478 PO;
1479 }
1480
1481 OP(BYTECODE_OP_CAST_NOP):
1482 {
1483 next_pc += sizeof(struct cast_op);
1484 PO;
1485 }
1486
1487 /* get context ref */
1488 OP(BYTECODE_OP_GET_CONTEXT_REF_STRING):
1489 {
1490 struct load_op *insn = (struct load_op *) pc;
1491 struct field_ref *ref = (struct field_ref *) insn->data;
1492 struct lttng_kernel_ctx_field *ctx_field;
1493 struct lttng_ctx_value v;
1494
1495 dbg_printk("get context ref offset %u type string\n",
1496 ref->offset);
1497 ctx_field = &lttng_static_ctx->fields[ref->offset];
1498 ctx_field->get_value(ctx_field->priv, lttng_probe_ctx, &v);
1499 estack_push(stack, top, ax, bx, ax_t, bx_t);
1500 estack_ax(stack, top)->u.s.str = v.u.str;
1501 if (unlikely(!estack_ax(stack, top)->u.s.str)) {
1502 dbg_printk("Bytecode warning: loading a NULL string.\n");
1503 ret = -EINVAL;
1504 goto end;
1505 }
1506 estack_ax(stack, top)->u.s.seq_len = LTTNG_SIZE_MAX;
1507 estack_ax(stack, top)->u.s.literal_type =
1508 ESTACK_STRING_LITERAL_TYPE_NONE;
1509 estack_ax(stack, top)->u.s.user = 0;
1510 estack_ax(stack, top)->type = REG_STRING;
1511 dbg_printk("ref get context string %s\n", estack_ax(stack, top)->u.s.str);
1512 next_pc += sizeof(struct load_op) + sizeof(struct field_ref);
1513 PO;
1514 }
1515
1516 OP(BYTECODE_OP_GET_CONTEXT_REF_S64):
1517 {
1518 struct load_op *insn = (struct load_op *) pc;
1519 struct field_ref *ref = (struct field_ref *) insn->data;
1520 struct lttng_kernel_ctx_field *ctx_field;
1521 struct lttng_ctx_value v;
1522
1523 dbg_printk("get context ref offset %u type s64\n",
1524 ref->offset);
1525 ctx_field = &lttng_static_ctx->fields[ref->offset];
1526 ctx_field->get_value(ctx_field->priv, lttng_probe_ctx, &v);
1527 estack_push(stack, top, ax, bx, ax_t, bx_t);
1528 estack_ax_v = v.u.s64;
1529 estack_ax_t = REG_S64;
1530 dbg_printk("ref get context s64 %lld\n",
1531 (long long) estack_ax_v);
1532 next_pc += sizeof(struct load_op) + sizeof(struct field_ref);
1533 PO;
1534 }
1535
1536 OP(BYTECODE_OP_GET_CONTEXT_REF_DOUBLE):
1537 {
1538 BUG_ON(1);
1539 PO;
1540 }
1541
1542 /* load userspace field ref */
1543 OP(BYTECODE_OP_LOAD_FIELD_REF_USER_STRING):
1544 {
1545 struct load_op *insn = (struct load_op *) pc;
1546 struct field_ref *ref = (struct field_ref *) insn->data;
1547
1548 dbg_printk("load field ref offset %u type user string\n",
1549 ref->offset);
1550 estack_push(stack, top, ax, bx, ax_t, bx_t);
1551 estack_ax(stack, top)->u.s.user_str =
1552 *(const char * const *) &interpreter_stack_data[ref->offset];
1553 if (unlikely(!estack_ax(stack, top)->u.s.user_str)) {
1554 dbg_printk("Bytecode warning: loading a NULL string.\n");
1555 ret = -EINVAL;
1556 goto end;
1557 }
1558 estack_ax(stack, top)->u.s.seq_len = LTTNG_SIZE_MAX;
1559 estack_ax(stack, top)->u.s.literal_type =
1560 ESTACK_STRING_LITERAL_TYPE_NONE;
1561 estack_ax(stack, top)->u.s.user = 1;
1562 estack_ax(stack, top)->type = REG_STRING;
1563 dbg_load_ref_user_str_printk(estack_ax(stack, top));
1564 next_pc += sizeof(struct load_op) + sizeof(struct field_ref);
1565 PO;
1566 }
1567
1568 OP(BYTECODE_OP_LOAD_FIELD_REF_USER_SEQUENCE):
1569 {
1570 struct load_op *insn = (struct load_op *) pc;
1571 struct field_ref *ref = (struct field_ref *) insn->data;
1572
1573 dbg_printk("load field ref offset %u type user sequence\n",
1574 ref->offset);
1575 estack_push(stack, top, ax, bx, ax_t, bx_t);
1576 estack_ax(stack, top)->u.s.seq_len =
1577 *(unsigned long *) &interpreter_stack_data[ref->offset];
1578 estack_ax(stack, top)->u.s.user_str =
1579 *(const char **) (&interpreter_stack_data[ref->offset
1580 + sizeof(unsigned long)]);
1581 if (unlikely(!estack_ax(stack, top)->u.s.user_str)) {
1582 dbg_printk("Bytecode warning: loading a NULL sequence.\n");
1583 ret = -EINVAL;
1584 goto end;
1585 }
1586 estack_ax(stack, top)->u.s.literal_type =
1587 ESTACK_STRING_LITERAL_TYPE_NONE;
1588 estack_ax(stack, top)->u.s.user = 1;
1589 next_pc += sizeof(struct load_op) + sizeof(struct field_ref);
1590 PO;
1591 }
1592
1593 OP(BYTECODE_OP_GET_CONTEXT_ROOT):
1594 {
1595 dbg_printk("op get context root\n");
1596 estack_push(stack, top, ax, bx, ax_t, bx_t);
1597 estack_ax(stack, top)->u.ptr.type = LOAD_ROOT_CONTEXT;
1598 /* "field" only needed for variants. */
1599 estack_ax(stack, top)->u.ptr.field = NULL;
1600 estack_ax(stack, top)->type = REG_PTR;
1601 next_pc += sizeof(struct load_op);
1602 PO;
1603 }
1604
1605 OP(BYTECODE_OP_GET_APP_CONTEXT_ROOT):
1606 {
1607 BUG_ON(1);
1608 PO;
1609 }
1610
1611 OP(BYTECODE_OP_GET_PAYLOAD_ROOT):
1612 {
1613 dbg_printk("op get app payload root\n");
1614 estack_push(stack, top, ax, bx, ax_t, bx_t);
1615 estack_ax(stack, top)->u.ptr.type = LOAD_ROOT_PAYLOAD;
1616 estack_ax(stack, top)->u.ptr.ptr = interpreter_stack_data;
1617 /* "field" only needed for variants. */
1618 estack_ax(stack, top)->u.ptr.field = NULL;
1619 estack_ax(stack, top)->type = REG_PTR;
1620 next_pc += sizeof(struct load_op);
1621 PO;
1622 }
1623
1624 OP(BYTECODE_OP_GET_SYMBOL):
1625 {
1626 dbg_printk("op get symbol\n");
1627 switch (estack_ax(stack, top)->u.ptr.type) {
1628 case LOAD_OBJECT:
1629 printk(KERN_WARNING "LTTng: bytecode: Nested fields not implemented yet.\n");
1630 ret = -EINVAL;
1631 goto end;
1632 case LOAD_ROOT_CONTEXT:
1633 case LOAD_ROOT_APP_CONTEXT:
1634 case LOAD_ROOT_PAYLOAD:
1635 /*
1636 * symbol lookup is performed by
1637 * specialization.
1638 */
1639 ret = -EINVAL;
1640 goto end;
1641 }
1642 next_pc += sizeof(struct load_op) + sizeof(struct get_symbol);
1643 PO;
1644 }
1645
1646 OP(BYTECODE_OP_GET_SYMBOL_FIELD):
1647 {
1648 /*
1649 * Used for first variant encountered in a
1650 * traversal. Variants are not implemented yet.
1651 */
1652 ret = -EINVAL;
1653 goto end;
1654 }
1655
1656 OP(BYTECODE_OP_GET_INDEX_U16):
1657 {
1658 struct load_op *insn = (struct load_op *) pc;
1659 struct get_index_u16 *index = (struct get_index_u16 *) insn->data;
1660
1661 dbg_printk("op get index u16\n");
1662 ret = dynamic_get_index(lttng_probe_ctx, bytecode, index->index, estack_ax(stack, top));
1663 if (ret)
1664 goto end;
1665 estack_ax_v = estack_ax(stack, top)->u.v;
1666 estack_ax_t = estack_ax(stack, top)->type;
1667 next_pc += sizeof(struct load_op) + sizeof(struct get_index_u16);
1668 PO;
1669 }
1670
1671 OP(BYTECODE_OP_GET_INDEX_U64):
1672 {
1673 struct load_op *insn = (struct load_op *) pc;
1674 struct get_index_u64 *index = (struct get_index_u64 *) insn->data;
1675
1676 dbg_printk("op get index u64\n");
1677 ret = dynamic_get_index(lttng_probe_ctx, bytecode, index->index, estack_ax(stack, top));
1678 if (ret)
1679 goto end;
1680 estack_ax_v = estack_ax(stack, top)->u.v;
1681 estack_ax_t = estack_ax(stack, top)->type;
1682 next_pc += sizeof(struct load_op) + sizeof(struct get_index_u64);
1683 PO;
1684 }
1685
1686 OP(BYTECODE_OP_LOAD_FIELD):
1687 {
1688 dbg_printk("op load field\n");
1689 ret = dynamic_load_field(estack_ax(stack, top));
1690 if (ret)
1691 goto end;
1692 estack_ax_v = estack_ax(stack, top)->u.v;
1693 estack_ax_t = estack_ax(stack, top)->type;
1694 next_pc += sizeof(struct load_op);
1695 PO;
1696 }
1697
1698 OP(BYTECODE_OP_LOAD_FIELD_S8):
1699 {
1700 dbg_printk("op load field s8\n");
1701
1702 estack_ax_v = *(int8_t *) estack_ax(stack, top)->u.ptr.ptr;
1703 estack_ax_t = REG_S64;
1704 next_pc += sizeof(struct load_op);
1705 PO;
1706 }
1707 OP(BYTECODE_OP_LOAD_FIELD_S16):
1708 {
1709 dbg_printk("op load field s16\n");
1710
1711 estack_ax_v = *(int16_t *) estack_ax(stack, top)->u.ptr.ptr;
1712 estack_ax_t = REG_S64;
1713 next_pc += sizeof(struct load_op);
1714 PO;
1715 }
1716 OP(BYTECODE_OP_LOAD_FIELD_S32):
1717 {
1718 dbg_printk("op load field s32\n");
1719
1720 estack_ax_v = *(int32_t *) estack_ax(stack, top)->u.ptr.ptr;
1721 estack_ax_t = REG_S64;
1722 next_pc += sizeof(struct load_op);
1723 PO;
1724 }
1725 OP(BYTECODE_OP_LOAD_FIELD_S64):
1726 {
1727 dbg_printk("op load field s64\n");
1728
1729 estack_ax_v = *(int64_t *) estack_ax(stack, top)->u.ptr.ptr;
1730 estack_ax_t = REG_S64;
1731 next_pc += sizeof(struct load_op);
1732 PO;
1733 }
1734 OP(BYTECODE_OP_LOAD_FIELD_U8):
1735 {
1736 dbg_printk("op load field u8\n");
1737
1738 estack_ax_v = *(uint8_t *) estack_ax(stack, top)->u.ptr.ptr;
1739 estack_ax_t = REG_S64;
1740 next_pc += sizeof(struct load_op);
1741 PO;
1742 }
1743 OP(BYTECODE_OP_LOAD_FIELD_U16):
1744 {
1745 dbg_printk("op load field u16\n");
1746
1747 estack_ax_v = *(uint16_t *) estack_ax(stack, top)->u.ptr.ptr;
1748 estack_ax_t = REG_S64;
1749 next_pc += sizeof(struct load_op);
1750 PO;
1751 }
1752 OP(BYTECODE_OP_LOAD_FIELD_U32):
1753 {
1754 dbg_printk("op load field u32\n");
1755
1756 estack_ax_v = *(uint32_t *) estack_ax(stack, top)->u.ptr.ptr;
1757 estack_ax_t = REG_S64;
1758 next_pc += sizeof(struct load_op);
1759 PO;
1760 }
1761 OP(BYTECODE_OP_LOAD_FIELD_U64):
1762 {
1763 dbg_printk("op load field u64\n");
1764
1765 estack_ax_v = *(uint64_t *) estack_ax(stack, top)->u.ptr.ptr;
1766 estack_ax_t = REG_S64;
1767 next_pc += sizeof(struct load_op);
1768 PO;
1769 }
1770 OP(BYTECODE_OP_LOAD_FIELD_DOUBLE):
1771 {
1772 ret = -EINVAL;
1773 goto end;
1774 }
1775
1776 OP(BYTECODE_OP_LOAD_FIELD_STRING):
1777 {
1778 const char *str;
1779
1780 dbg_printk("op load field string\n");
1781 str = (const char *) estack_ax(stack, top)->u.ptr.ptr;
1782 estack_ax(stack, top)->u.s.str = str;
1783 if (unlikely(!estack_ax(stack, top)->u.s.str)) {
1784 dbg_printk("Bytecode warning: loading a NULL string.\n");
1785 ret = -EINVAL;
1786 goto end;
1787 }
1788 estack_ax(stack, top)->u.s.seq_len = LTTNG_SIZE_MAX;
1789 estack_ax(stack, top)->u.s.literal_type =
1790 ESTACK_STRING_LITERAL_TYPE_NONE;
1791 estack_ax(stack, top)->type = REG_STRING;
1792 next_pc += sizeof(struct load_op);
1793 PO;
1794 }
1795
1796 OP(BYTECODE_OP_LOAD_FIELD_SEQUENCE):
1797 {
1798 const char *ptr;
1799
1800 dbg_printk("op load field string sequence\n");
1801 ptr = estack_ax(stack, top)->u.ptr.ptr;
1802 estack_ax(stack, top)->u.s.seq_len = *(unsigned long *) ptr;
1803 estack_ax(stack, top)->u.s.str = *(const char **) (ptr + sizeof(unsigned long));
1804 if (unlikely(!estack_ax(stack, top)->u.s.str)) {
1805 dbg_printk("Bytecode warning: loading a NULL sequence.\n");
1806 ret = -EINVAL;
1807 goto end;
1808 }
1809 estack_ax(stack, top)->u.s.literal_type =
1810 ESTACK_STRING_LITERAL_TYPE_NONE;
1811 estack_ax(stack, top)->type = REG_STRING;
1812 next_pc += sizeof(struct load_op);
1813 PO;
1814 }
1815
1816 END_OP
1817 end:
1818 /* No need to prepare output if an error occurred. */
1819 if (ret)
1820 return LTTNG_KERNEL_BYTECODE_INTERPRETER_ERROR;
1821
1822 /* Prepare output. */
1823 switch (kernel_bytecode->type) {
1824 case LTTNG_KERNEL_BYTECODE_TYPE_FILTER:
1825 {
1826 struct lttng_kernel_bytecode_filter_ctx *filter_ctx =
1827 (struct lttng_kernel_bytecode_filter_ctx *) caller_ctx;
1828 if (retval)
1829 filter_ctx->result = LTTNG_KERNEL_BYTECODE_FILTER_ACCEPT;
1830 else
1831 filter_ctx->result = LTTNG_KERNEL_BYTECODE_FILTER_REJECT;
1832 break;
1833 }
1834 case LTTNG_KERNEL_BYTECODE_TYPE_CAPTURE:
1835 ret = lttng_bytecode_interpret_format_output(estack_ax(stack, top),
1836 (struct lttng_interpreter_output *) caller_ctx);
1837 break;
1838 default:
1839 ret = -EINVAL;
1840 break;
1841 }
1842 if (ret)
1843 return LTTNG_KERNEL_BYTECODE_INTERPRETER_ERROR;
1844 else
1845 return LTTNG_KERNEL_BYTECODE_INTERPRETER_OK;
1846 }
1847 LTTNG_STACK_FRAME_NON_STANDARD(lttng_bytecode_interpret);
1848
1849 /*
1850 * Return LTTNG_KERNEL_EVENT_FILTER_ACCEPT or LTTNG_KERNEL_EVENT_FILTER_REJECT.
1851 */
1852 int lttng_kernel_interpret_event_filter(const struct lttng_kernel_event_common *event,
1853 const char *interpreter_stack_data,
1854 struct lttng_kernel_probe_ctx *probe_ctx,
1855 void *event_filter_ctx __attribute__((unused)))
1856 {
1857 struct lttng_kernel_bytecode_runtime *filter_bc_runtime;
1858 struct list_head *filter_bytecode_runtime_head = &event->priv->filter_bytecode_runtime_head;
1859 struct lttng_kernel_bytecode_filter_ctx bytecode_filter_ctx;
1860 bool filter_record = false;
1861
1862 list_for_each_entry_rcu(filter_bc_runtime, filter_bytecode_runtime_head, node) {
1863 if (likely(filter_bc_runtime->interpreter_func(filter_bc_runtime,
1864 interpreter_stack_data, probe_ctx, &bytecode_filter_ctx) == LTTNG_KERNEL_BYTECODE_INTERPRETER_OK)) {
1865 if (unlikely(bytecode_filter_ctx.result == LTTNG_KERNEL_BYTECODE_FILTER_ACCEPT)) {
1866 filter_record = true;
1867 break;
1868 }
1869 }
1870 }
1871 if (filter_record)
1872 return LTTNG_KERNEL_EVENT_FILTER_ACCEPT;
1873 else
1874 return LTTNG_KERNEL_EVENT_FILTER_REJECT;
1875 }
1876
1877 #undef START_OP
1878 #undef OP
1879 #undef PO
1880 #undef END_OP
This page took 0.104544 seconds and 4 git commands to generate.