1 /* SPDX-License-Identifier: MIT
3 * lttng-filter-interpreter.c
5 * LTTng modules filter interpreter.
7 * Copyright (C) 2010-2016 Mathieu Desnoyers <mathieu.desnoyers@efficios.com>
10 #include <linux/uaccess.h>
11 #include <linux/frame.h>
12 #include <linux/limits.h>
13 #include <linux/swab.h>
15 #include <lttng-filter.h>
16 #include <lttng-string-utils.h>
18 STACK_FRAME_NON_STANDARD(lttng_filter_interpret_bytecode
);
21 * get_char should be called with page fault handler disabled if it is expected
22 * to handle user-space read.
25 char get_char(struct estack_entry
*reg
, size_t offset
)
27 if (unlikely(offset
>= reg
->u
.s
.seq_len
))
32 /* Handle invalid access as end of string. */
33 if (unlikely(!access_ok(reg
->u
.s
.user_str
+ offset
,
36 /* Handle fault (nonzero return value) as end of string. */
37 if (unlikely(__copy_from_user_inatomic(&c
,
38 reg
->u
.s
.user_str
+ offset
,
43 return reg
->u
.s
.str
[offset
];
49 * -2: unknown escape char.
53 int parse_char(struct estack_entry
*reg
, char *c
, size_t *offset
)
58 *c
= get_char(reg
, *offset
);
74 char get_char_at_cb(size_t at
, void *data
)
76 return get_char(data
, at
);
80 int stack_star_glob_match(struct estack
*stack
, int top
, const char *cmp_type
)
82 bool has_user
= false;
85 struct estack_entry
*pattern_reg
;
86 struct estack_entry
*candidate_reg
;
88 if (estack_bx(stack
, top
)->u
.s
.user
89 || estack_ax(stack
, top
)->u
.s
.user
) {
96 /* Find out which side is the pattern vs. the candidate. */
97 if (estack_ax(stack
, top
)->u
.s
.literal_type
== ESTACK_STRING_LITERAL_TYPE_STAR_GLOB
) {
98 pattern_reg
= estack_ax(stack
, top
);
99 candidate_reg
= estack_bx(stack
, top
);
101 pattern_reg
= estack_bx(stack
, top
);
102 candidate_reg
= estack_ax(stack
, top
);
105 /* Perform the match operation. */
106 result
= !strutils_star_glob_match_char_cb(get_char_at_cb
,
107 pattern_reg
, get_char_at_cb
, candidate_reg
);
117 int stack_strcmp(struct estack
*stack
, int top
, const char *cmp_type
)
119 size_t offset_bx
= 0, offset_ax
= 0;
120 int diff
, has_user
= 0;
123 if (estack_bx(stack
, top
)->u
.s
.user
124 || estack_ax(stack
, top
)->u
.s
.user
) {
134 char char_bx
, char_ax
;
136 char_bx
= get_char(estack_bx(stack
, top
), offset_bx
);
137 char_ax
= get_char(estack_ax(stack
, top
), offset_ax
);
139 if (unlikely(char_bx
== '\0')) {
140 if (char_ax
== '\0') {
144 if (estack_ax(stack
, top
)->u
.s
.literal_type
==
145 ESTACK_STRING_LITERAL_TYPE_PLAIN
) {
146 ret
= parse_char(estack_ax(stack
, top
),
147 &char_ax
, &offset_ax
);
157 if (unlikely(char_ax
== '\0')) {
158 if (estack_bx(stack
, top
)->u
.s
.literal_type
==
159 ESTACK_STRING_LITERAL_TYPE_PLAIN
) {
160 ret
= parse_char(estack_bx(stack
, top
),
161 &char_bx
, &offset_bx
);
170 if (estack_bx(stack
, top
)->u
.s
.literal_type
==
171 ESTACK_STRING_LITERAL_TYPE_PLAIN
) {
172 ret
= parse_char(estack_bx(stack
, top
),
173 &char_bx
, &offset_bx
);
177 } else if (ret
== -2) {
180 /* else compare both char */
182 if (estack_ax(stack
, top
)->u
.s
.literal_type
==
183 ESTACK_STRING_LITERAL_TYPE_PLAIN
) {
184 ret
= parse_char(estack_ax(stack
, top
),
185 &char_ax
, &offset_ax
);
189 } else if (ret
== -2) {
206 diff
= char_bx
- char_ax
;
219 uint64_t lttng_filter_false(void *filter_data
,
220 struct lttng_probe_ctx
*lttng_probe_ctx
,
221 const char *filter_stack_data
)
226 #ifdef INTERPRETER_USE_SWITCH
229 * Fallback for compilers that do not support taking address of labels.
233 start_pc = &bytecode->data[0]; \
234 for (pc = next_pc = start_pc; pc - start_pc < bytecode->len; \
236 dbg_printk("Executing op %s (%u)\n", \
237 lttng_filter_print_op((unsigned int) *(filter_opcode_t *) pc), \
238 (unsigned int) *(filter_opcode_t *) pc); \
239 switch (*(filter_opcode_t *) pc) {
241 #define OP(name) case name
251 * Dispatch-table based interpreter.
255 start_pc = &bytecode->code[0]; \
256 pc = next_pc = start_pc; \
257 if (unlikely(pc - start_pc >= bytecode->len)) \
259 goto *dispatch[*(filter_opcode_t *) pc];
266 goto *dispatch[*(filter_opcode_t *) pc];
272 static int context_get_index(struct lttng_probe_ctx
*lttng_probe_ctx
,
273 struct load_ptr
*ptr
,
277 struct lttng_ctx_field
*ctx_field
;
278 struct lttng_event_field
*field
;
279 union lttng_ctx_value v
;
281 ctx_field
= <tng_static_ctx
->fields
[idx
];
282 field
= &ctx_field
->event_field
;
283 ptr
->type
= LOAD_OBJECT
;
284 /* field is only used for types nested within variants. */
287 switch (field
->type
.atype
) {
289 ctx_field
->get_value(ctx_field
, lttng_probe_ctx
, &v
);
290 if (field
->type
.u
.integer
.signedness
) {
291 ptr
->object_type
= OBJECT_TYPE_S64
;
293 ptr
->ptr
= &ptr
->u
.s64
;
295 ptr
->object_type
= OBJECT_TYPE_U64
;
296 ptr
->u
.u64
= v
.s64
; /* Cast. */
297 ptr
->ptr
= &ptr
->u
.u64
;
300 case atype_enum_nestable
:
302 const struct lttng_integer_type
*itype
=
303 &field
->type
.u
.enum_nestable
.container_type
->u
.integer
;
305 ctx_field
->get_value(ctx_field
, lttng_probe_ctx
, &v
);
306 if (itype
->signedness
) {
307 ptr
->object_type
= OBJECT_TYPE_S64
;
309 ptr
->ptr
= &ptr
->u
.s64
;
311 ptr
->object_type
= OBJECT_TYPE_U64
;
312 ptr
->u
.u64
= v
.s64
; /* Cast. */
313 ptr
->ptr
= &ptr
->u
.u64
;
317 case atype_array_nestable
:
318 if (!lttng_is_bytewise_integer(field
->type
.u
.array_nestable
.elem_type
)) {
319 printk(KERN_WARNING
"Array nesting only supports integer types.\n");
322 if (field
->type
.u
.array_nestable
.elem_type
->u
.integer
.encoding
== lttng_encode_none
) {
323 printk(KERN_WARNING
"Only string arrays are supported for contexts.\n");
326 ptr
->object_type
= OBJECT_TYPE_STRING
;
327 ctx_field
->get_value(ctx_field
, lttng_probe_ctx
, &v
);
330 case atype_sequence_nestable
:
331 if (!lttng_is_bytewise_integer(field
->type
.u
.sequence_nestable
.elem_type
)) {
332 printk(KERN_WARNING
"Sequence nesting only supports integer types.\n");
335 if (field
->type
.u
.sequence_nestable
.elem_type
->u
.integer
.encoding
== lttng_encode_none
) {
336 printk(KERN_WARNING
"Only string sequences are supported for contexts.\n");
339 ptr
->object_type
= OBJECT_TYPE_STRING
;
340 ctx_field
->get_value(ctx_field
, lttng_probe_ctx
, &v
);
344 ptr
->object_type
= OBJECT_TYPE_STRING
;
345 ctx_field
->get_value(ctx_field
, lttng_probe_ctx
, &v
);
348 case atype_struct_nestable
:
349 printk(KERN_WARNING
"Structure type cannot be loaded.\n");
351 case atype_variant_nestable
:
352 printk(KERN_WARNING
"Variant type cannot be loaded.\n");
355 printk(KERN_WARNING
"Unknown type: %d", (int) field
->type
.atype
);
361 static int dynamic_get_index(struct lttng_probe_ctx
*lttng_probe_ctx
,
362 struct bytecode_runtime
*runtime
,
363 uint64_t index
, struct estack_entry
*stack_top
)
366 const struct filter_get_index_data
*gid
;
369 * Types nested within variants need to perform dynamic lookup
370 * based on the field descriptions. LTTng-UST does not implement
373 if (stack_top
->u
.ptr
.field
)
375 gid
= (const struct filter_get_index_data
*) &runtime
->data
[index
];
376 switch (stack_top
->u
.ptr
.type
) {
378 switch (stack_top
->u
.ptr
.object_type
) {
379 case OBJECT_TYPE_ARRAY
:
383 WARN_ON_ONCE(gid
->offset
>= gid
->array_len
);
384 /* Skip count (unsigned long) */
385 ptr
= *(const char **) (stack_top
->u
.ptr
.ptr
+ sizeof(unsigned long));
386 ptr
= ptr
+ gid
->offset
;
387 stack_top
->u
.ptr
.ptr
= ptr
;
388 stack_top
->u
.ptr
.object_type
= gid
->elem
.type
;
389 stack_top
->u
.ptr
.rev_bo
= gid
->elem
.rev_bo
;
390 /* field is only used for types nested within variants. */
391 stack_top
->u
.ptr
.field
= NULL
;
394 case OBJECT_TYPE_SEQUENCE
:
399 ptr
= *(const char **) (stack_top
->u
.ptr
.ptr
+ sizeof(unsigned long));
400 ptr_seq_len
= *(unsigned long *) stack_top
->u
.ptr
.ptr
;
401 if (gid
->offset
>= gid
->elem
.len
* ptr_seq_len
) {
405 ptr
= ptr
+ gid
->offset
;
406 stack_top
->u
.ptr
.ptr
= ptr
;
407 stack_top
->u
.ptr
.object_type
= gid
->elem
.type
;
408 stack_top
->u
.ptr
.rev_bo
= gid
->elem
.rev_bo
;
409 /* field is only used for types nested within variants. */
410 stack_top
->u
.ptr
.field
= NULL
;
413 case OBJECT_TYPE_STRUCT
:
414 printk(KERN_WARNING
"Nested structures are not supported yet.\n");
417 case OBJECT_TYPE_VARIANT
:
419 printk(KERN_WARNING
"Unexpected get index type %d",
420 (int) stack_top
->u
.ptr
.object_type
);
425 case LOAD_ROOT_CONTEXT
:
426 case LOAD_ROOT_APP_CONTEXT
: /* Fall-through */
428 ret
= context_get_index(lttng_probe_ctx
,
436 case LOAD_ROOT_PAYLOAD
:
437 stack_top
->u
.ptr
.ptr
+= gid
->offset
;
438 if (gid
->elem
.type
== OBJECT_TYPE_STRING
)
439 stack_top
->u
.ptr
.ptr
= *(const char * const *) stack_top
->u
.ptr
.ptr
;
440 stack_top
->u
.ptr
.object_type
= gid
->elem
.type
;
441 stack_top
->u
.ptr
.type
= LOAD_OBJECT
;
442 /* field is only used for types nested within variants. */
443 stack_top
->u
.ptr
.field
= NULL
;
452 static int dynamic_load_field(struct estack_entry
*stack_top
)
456 switch (stack_top
->u
.ptr
.type
) {
459 case LOAD_ROOT_CONTEXT
:
460 case LOAD_ROOT_APP_CONTEXT
:
461 case LOAD_ROOT_PAYLOAD
:
463 dbg_printk("Filter warning: cannot load root, missing field name.\n");
467 switch (stack_top
->u
.ptr
.object_type
) {
469 dbg_printk("op load field s8\n");
470 stack_top
->u
.v
= *(int8_t *) stack_top
->u
.ptr
.ptr
;
472 case OBJECT_TYPE_S16
:
476 dbg_printk("op load field s16\n");
477 tmp
= *(int16_t *) stack_top
->u
.ptr
.ptr
;
478 if (stack_top
->u
.ptr
.rev_bo
)
480 stack_top
->u
.v
= tmp
;
483 case OBJECT_TYPE_S32
:
487 dbg_printk("op load field s32\n");
488 tmp
= *(int32_t *) stack_top
->u
.ptr
.ptr
;
489 if (stack_top
->u
.ptr
.rev_bo
)
491 stack_top
->u
.v
= tmp
;
494 case OBJECT_TYPE_S64
:
498 dbg_printk("op load field s64\n");
499 tmp
= *(int64_t *) stack_top
->u
.ptr
.ptr
;
500 if (stack_top
->u
.ptr
.rev_bo
)
502 stack_top
->u
.v
= tmp
;
506 dbg_printk("op load field u8\n");
507 stack_top
->u
.v
= *(uint8_t *) stack_top
->u
.ptr
.ptr
;
509 case OBJECT_TYPE_U16
:
513 dbg_printk("op load field s16\n");
514 tmp
= *(uint16_t *) stack_top
->u
.ptr
.ptr
;
515 if (stack_top
->u
.ptr
.rev_bo
)
517 stack_top
->u
.v
= tmp
;
520 case OBJECT_TYPE_U32
:
524 dbg_printk("op load field u32\n");
525 tmp
= *(uint32_t *) stack_top
->u
.ptr
.ptr
;
526 if (stack_top
->u
.ptr
.rev_bo
)
528 stack_top
->u
.v
= tmp
;
531 case OBJECT_TYPE_U64
:
535 dbg_printk("op load field u64\n");
536 tmp
= *(uint64_t *) stack_top
->u
.ptr
.ptr
;
537 if (stack_top
->u
.ptr
.rev_bo
)
539 stack_top
->u
.v
= tmp
;
542 case OBJECT_TYPE_STRING
:
546 dbg_printk("op load field string\n");
547 str
= (const char *) stack_top
->u
.ptr
.ptr
;
548 stack_top
->u
.s
.str
= str
;
549 if (unlikely(!stack_top
->u
.s
.str
)) {
550 dbg_printk("Filter warning: loading a NULL string.\n");
554 stack_top
->u
.s
.seq_len
= SIZE_MAX
;
555 stack_top
->u
.s
.literal_type
=
556 ESTACK_STRING_LITERAL_TYPE_NONE
;
559 case OBJECT_TYPE_STRING_SEQUENCE
:
563 dbg_printk("op load field string sequence\n");
564 ptr
= stack_top
->u
.ptr
.ptr
;
565 stack_top
->u
.s
.seq_len
= *(unsigned long *) ptr
;
566 stack_top
->u
.s
.str
= *(const char **) (ptr
+ sizeof(unsigned long));
567 if (unlikely(!stack_top
->u
.s
.str
)) {
568 dbg_printk("Filter warning: loading a NULL sequence.\n");
572 stack_top
->u
.s
.literal_type
=
573 ESTACK_STRING_LITERAL_TYPE_NONE
;
576 case OBJECT_TYPE_DYNAMIC
:
578 * Dynamic types in context are looked up
579 * by context get index.
583 case OBJECT_TYPE_DOUBLE
:
586 case OBJECT_TYPE_SEQUENCE
:
587 case OBJECT_TYPE_ARRAY
:
588 case OBJECT_TYPE_STRUCT
:
589 case OBJECT_TYPE_VARIANT
:
590 printk(KERN_WARNING
"Sequences, arrays, struct and variant cannot be loaded (nested types).\n");
601 * Return 0 (discard), or raise the 0x1 flag (log event).
602 * Currently, other flags are kept for future extensions and have no
605 uint64_t lttng_filter_interpret_bytecode(void *filter_data
,
606 struct lttng_probe_ctx
*lttng_probe_ctx
,
607 const char *filter_stack_data
)
609 struct bytecode_runtime
*bytecode
= filter_data
;
610 void *pc
, *next_pc
, *start_pc
;
613 struct estack _stack
;
614 struct estack
*stack
= &_stack
;
615 register int64_t ax
= 0, bx
= 0;
616 register int top
= FILTER_STACK_EMPTY
;
617 #ifndef INTERPRETER_USE_SWITCH
618 static void *dispatch
[NR_FILTER_OPS
] = {
619 [ FILTER_OP_UNKNOWN
] = &&LABEL_FILTER_OP_UNKNOWN
,
621 [ FILTER_OP_RETURN
] = &&LABEL_FILTER_OP_RETURN
,
624 [ FILTER_OP_MUL
] = &&LABEL_FILTER_OP_MUL
,
625 [ FILTER_OP_DIV
] = &&LABEL_FILTER_OP_DIV
,
626 [ FILTER_OP_MOD
] = &&LABEL_FILTER_OP_MOD
,
627 [ FILTER_OP_PLUS
] = &&LABEL_FILTER_OP_PLUS
,
628 [ FILTER_OP_MINUS
] = &&LABEL_FILTER_OP_MINUS
,
629 [ FILTER_OP_BIT_RSHIFT
] = &&LABEL_FILTER_OP_BIT_RSHIFT
,
630 [ FILTER_OP_BIT_LSHIFT
] = &&LABEL_FILTER_OP_BIT_LSHIFT
,
631 [ FILTER_OP_BIT_AND
] = &&LABEL_FILTER_OP_BIT_AND
,
632 [ FILTER_OP_BIT_OR
] = &&LABEL_FILTER_OP_BIT_OR
,
633 [ FILTER_OP_BIT_XOR
] = &&LABEL_FILTER_OP_BIT_XOR
,
635 /* binary comparators */
636 [ FILTER_OP_EQ
] = &&LABEL_FILTER_OP_EQ
,
637 [ FILTER_OP_NE
] = &&LABEL_FILTER_OP_NE
,
638 [ FILTER_OP_GT
] = &&LABEL_FILTER_OP_GT
,
639 [ FILTER_OP_LT
] = &&LABEL_FILTER_OP_LT
,
640 [ FILTER_OP_GE
] = &&LABEL_FILTER_OP_GE
,
641 [ FILTER_OP_LE
] = &&LABEL_FILTER_OP_LE
,
643 /* string binary comparator */
644 [ FILTER_OP_EQ_STRING
] = &&LABEL_FILTER_OP_EQ_STRING
,
645 [ FILTER_OP_NE_STRING
] = &&LABEL_FILTER_OP_NE_STRING
,
646 [ FILTER_OP_GT_STRING
] = &&LABEL_FILTER_OP_GT_STRING
,
647 [ FILTER_OP_LT_STRING
] = &&LABEL_FILTER_OP_LT_STRING
,
648 [ FILTER_OP_GE_STRING
] = &&LABEL_FILTER_OP_GE_STRING
,
649 [ FILTER_OP_LE_STRING
] = &&LABEL_FILTER_OP_LE_STRING
,
651 /* globbing pattern binary comparator */
652 [ FILTER_OP_EQ_STAR_GLOB_STRING
] = &&LABEL_FILTER_OP_EQ_STAR_GLOB_STRING
,
653 [ FILTER_OP_NE_STAR_GLOB_STRING
] = &&LABEL_FILTER_OP_NE_STAR_GLOB_STRING
,
655 /* s64 binary comparator */
656 [ FILTER_OP_EQ_S64
] = &&LABEL_FILTER_OP_EQ_S64
,
657 [ FILTER_OP_NE_S64
] = &&LABEL_FILTER_OP_NE_S64
,
658 [ FILTER_OP_GT_S64
] = &&LABEL_FILTER_OP_GT_S64
,
659 [ FILTER_OP_LT_S64
] = &&LABEL_FILTER_OP_LT_S64
,
660 [ FILTER_OP_GE_S64
] = &&LABEL_FILTER_OP_GE_S64
,
661 [ FILTER_OP_LE_S64
] = &&LABEL_FILTER_OP_LE_S64
,
663 /* double binary comparator */
664 [ FILTER_OP_EQ_DOUBLE
] = &&LABEL_FILTER_OP_EQ_DOUBLE
,
665 [ FILTER_OP_NE_DOUBLE
] = &&LABEL_FILTER_OP_NE_DOUBLE
,
666 [ FILTER_OP_GT_DOUBLE
] = &&LABEL_FILTER_OP_GT_DOUBLE
,
667 [ FILTER_OP_LT_DOUBLE
] = &&LABEL_FILTER_OP_LT_DOUBLE
,
668 [ FILTER_OP_GE_DOUBLE
] = &&LABEL_FILTER_OP_GE_DOUBLE
,
669 [ FILTER_OP_LE_DOUBLE
] = &&LABEL_FILTER_OP_LE_DOUBLE
,
671 /* Mixed S64-double binary comparators */
672 [ FILTER_OP_EQ_DOUBLE_S64
] = &&LABEL_FILTER_OP_EQ_DOUBLE_S64
,
673 [ FILTER_OP_NE_DOUBLE_S64
] = &&LABEL_FILTER_OP_NE_DOUBLE_S64
,
674 [ FILTER_OP_GT_DOUBLE_S64
] = &&LABEL_FILTER_OP_GT_DOUBLE_S64
,
675 [ FILTER_OP_LT_DOUBLE_S64
] = &&LABEL_FILTER_OP_LT_DOUBLE_S64
,
676 [ FILTER_OP_GE_DOUBLE_S64
] = &&LABEL_FILTER_OP_GE_DOUBLE_S64
,
677 [ FILTER_OP_LE_DOUBLE_S64
] = &&LABEL_FILTER_OP_LE_DOUBLE_S64
,
679 [ FILTER_OP_EQ_S64_DOUBLE
] = &&LABEL_FILTER_OP_EQ_S64_DOUBLE
,
680 [ FILTER_OP_NE_S64_DOUBLE
] = &&LABEL_FILTER_OP_NE_S64_DOUBLE
,
681 [ FILTER_OP_GT_S64_DOUBLE
] = &&LABEL_FILTER_OP_GT_S64_DOUBLE
,
682 [ FILTER_OP_LT_S64_DOUBLE
] = &&LABEL_FILTER_OP_LT_S64_DOUBLE
,
683 [ FILTER_OP_GE_S64_DOUBLE
] = &&LABEL_FILTER_OP_GE_S64_DOUBLE
,
684 [ FILTER_OP_LE_S64_DOUBLE
] = &&LABEL_FILTER_OP_LE_S64_DOUBLE
,
687 [ FILTER_OP_UNARY_PLUS
] = &&LABEL_FILTER_OP_UNARY_PLUS
,
688 [ FILTER_OP_UNARY_MINUS
] = &&LABEL_FILTER_OP_UNARY_MINUS
,
689 [ FILTER_OP_UNARY_NOT
] = &&LABEL_FILTER_OP_UNARY_NOT
,
690 [ FILTER_OP_UNARY_PLUS_S64
] = &&LABEL_FILTER_OP_UNARY_PLUS_S64
,
691 [ FILTER_OP_UNARY_MINUS_S64
] = &&LABEL_FILTER_OP_UNARY_MINUS_S64
,
692 [ FILTER_OP_UNARY_NOT_S64
] = &&LABEL_FILTER_OP_UNARY_NOT_S64
,
693 [ FILTER_OP_UNARY_PLUS_DOUBLE
] = &&LABEL_FILTER_OP_UNARY_PLUS_DOUBLE
,
694 [ FILTER_OP_UNARY_MINUS_DOUBLE
] = &&LABEL_FILTER_OP_UNARY_MINUS_DOUBLE
,
695 [ FILTER_OP_UNARY_NOT_DOUBLE
] = &&LABEL_FILTER_OP_UNARY_NOT_DOUBLE
,
698 [ FILTER_OP_AND
] = &&LABEL_FILTER_OP_AND
,
699 [ FILTER_OP_OR
] = &&LABEL_FILTER_OP_OR
,
702 [ FILTER_OP_LOAD_FIELD_REF
] = &&LABEL_FILTER_OP_LOAD_FIELD_REF
,
703 [ FILTER_OP_LOAD_FIELD_REF_STRING
] = &&LABEL_FILTER_OP_LOAD_FIELD_REF_STRING
,
704 [ FILTER_OP_LOAD_FIELD_REF_SEQUENCE
] = &&LABEL_FILTER_OP_LOAD_FIELD_REF_SEQUENCE
,
705 [ FILTER_OP_LOAD_FIELD_REF_S64
] = &&LABEL_FILTER_OP_LOAD_FIELD_REF_S64
,
706 [ FILTER_OP_LOAD_FIELD_REF_DOUBLE
] = &&LABEL_FILTER_OP_LOAD_FIELD_REF_DOUBLE
,
708 /* load from immediate operand */
709 [ FILTER_OP_LOAD_STRING
] = &&LABEL_FILTER_OP_LOAD_STRING
,
710 [ FILTER_OP_LOAD_STAR_GLOB_STRING
] = &&LABEL_FILTER_OP_LOAD_STAR_GLOB_STRING
,
711 [ FILTER_OP_LOAD_S64
] = &&LABEL_FILTER_OP_LOAD_S64
,
712 [ FILTER_OP_LOAD_DOUBLE
] = &&LABEL_FILTER_OP_LOAD_DOUBLE
,
715 [ FILTER_OP_CAST_TO_S64
] = &&LABEL_FILTER_OP_CAST_TO_S64
,
716 [ FILTER_OP_CAST_DOUBLE_TO_S64
] = &&LABEL_FILTER_OP_CAST_DOUBLE_TO_S64
,
717 [ FILTER_OP_CAST_NOP
] = &&LABEL_FILTER_OP_CAST_NOP
,
719 /* get context ref */
720 [ FILTER_OP_GET_CONTEXT_REF
] = &&LABEL_FILTER_OP_GET_CONTEXT_REF
,
721 [ FILTER_OP_GET_CONTEXT_REF_STRING
] = &&LABEL_FILTER_OP_GET_CONTEXT_REF_STRING
,
722 [ FILTER_OP_GET_CONTEXT_REF_S64
] = &&LABEL_FILTER_OP_GET_CONTEXT_REF_S64
,
723 [ FILTER_OP_GET_CONTEXT_REF_DOUBLE
] = &&LABEL_FILTER_OP_GET_CONTEXT_REF_DOUBLE
,
725 /* load userspace field ref */
726 [ FILTER_OP_LOAD_FIELD_REF_USER_STRING
] = &&LABEL_FILTER_OP_LOAD_FIELD_REF_USER_STRING
,
727 [ FILTER_OP_LOAD_FIELD_REF_USER_SEQUENCE
] = &&LABEL_FILTER_OP_LOAD_FIELD_REF_USER_SEQUENCE
,
729 /* Instructions for recursive traversal through composed types. */
730 [ FILTER_OP_GET_CONTEXT_ROOT
] = &&LABEL_FILTER_OP_GET_CONTEXT_ROOT
,
731 [ FILTER_OP_GET_APP_CONTEXT_ROOT
] = &&LABEL_FILTER_OP_GET_APP_CONTEXT_ROOT
,
732 [ FILTER_OP_GET_PAYLOAD_ROOT
] = &&LABEL_FILTER_OP_GET_PAYLOAD_ROOT
,
734 [ FILTER_OP_GET_SYMBOL
] = &&LABEL_FILTER_OP_GET_SYMBOL
,
735 [ FILTER_OP_GET_SYMBOL_FIELD
] = &&LABEL_FILTER_OP_GET_SYMBOL_FIELD
,
736 [ FILTER_OP_GET_INDEX_U16
] = &&LABEL_FILTER_OP_GET_INDEX_U16
,
737 [ FILTER_OP_GET_INDEX_U64
] = &&LABEL_FILTER_OP_GET_INDEX_U64
,
739 [ FILTER_OP_LOAD_FIELD
] = &&LABEL_FILTER_OP_LOAD_FIELD
,
740 [ FILTER_OP_LOAD_FIELD_S8
] = &&LABEL_FILTER_OP_LOAD_FIELD_S8
,
741 [ FILTER_OP_LOAD_FIELD_S16
] = &&LABEL_FILTER_OP_LOAD_FIELD_S16
,
742 [ FILTER_OP_LOAD_FIELD_S32
] = &&LABEL_FILTER_OP_LOAD_FIELD_S32
,
743 [ FILTER_OP_LOAD_FIELD_S64
] = &&LABEL_FILTER_OP_LOAD_FIELD_S64
,
744 [ FILTER_OP_LOAD_FIELD_U8
] = &&LABEL_FILTER_OP_LOAD_FIELD_U8
,
745 [ FILTER_OP_LOAD_FIELD_U16
] = &&LABEL_FILTER_OP_LOAD_FIELD_U16
,
746 [ FILTER_OP_LOAD_FIELD_U32
] = &&LABEL_FILTER_OP_LOAD_FIELD_U32
,
747 [ FILTER_OP_LOAD_FIELD_U64
] = &&LABEL_FILTER_OP_LOAD_FIELD_U64
,
748 [ FILTER_OP_LOAD_FIELD_STRING
] = &&LABEL_FILTER_OP_LOAD_FIELD_STRING
,
749 [ FILTER_OP_LOAD_FIELD_SEQUENCE
] = &&LABEL_FILTER_OP_LOAD_FIELD_SEQUENCE
,
750 [ FILTER_OP_LOAD_FIELD_DOUBLE
] = &&LABEL_FILTER_OP_LOAD_FIELD_DOUBLE
,
752 [ FILTER_OP_UNARY_BIT_NOT
] = &&LABEL_FILTER_OP_UNARY_BIT_NOT
,
754 [ FILTER_OP_RETURN_S64
] = &&LABEL_FILTER_OP_RETURN_S64
,
756 #endif /* #ifndef INTERPRETER_USE_SWITCH */
760 OP(FILTER_OP_UNKNOWN
):
761 OP(FILTER_OP_LOAD_FIELD_REF
):
762 OP(FILTER_OP_GET_CONTEXT_REF
):
763 #ifdef INTERPRETER_USE_SWITCH
765 #endif /* INTERPRETER_USE_SWITCH */
766 printk(KERN_WARNING
"unknown bytecode op %u\n",
767 (unsigned int) *(filter_opcode_t
*) pc
);
771 OP(FILTER_OP_RETURN
):
772 OP(FILTER_OP_RETURN_S64
):
773 /* LTTNG_FILTER_DISCARD or LTTNG_FILTER_RECORD_FLAG */
774 retval
= !!estack_ax_v
;
784 printk(KERN_WARNING
"unsupported bytecode op %u\n",
785 (unsigned int) *(filter_opcode_t
*) pc
);
795 printk(KERN_WARNING
"unsupported non-specialized bytecode op %u\n",
796 (unsigned int) *(filter_opcode_t
*) pc
);
800 OP(FILTER_OP_EQ_STRING
):
804 res
= (stack_strcmp(stack
, top
, "==") == 0);
805 estack_pop(stack
, top
, ax
, bx
);
807 next_pc
+= sizeof(struct binary_op
);
810 OP(FILTER_OP_NE_STRING
):
814 res
= (stack_strcmp(stack
, top
, "!=") != 0);
815 estack_pop(stack
, top
, ax
, bx
);
817 next_pc
+= sizeof(struct binary_op
);
820 OP(FILTER_OP_GT_STRING
):
824 res
= (stack_strcmp(stack
, top
, ">") > 0);
825 estack_pop(stack
, top
, ax
, bx
);
827 next_pc
+= sizeof(struct binary_op
);
830 OP(FILTER_OP_LT_STRING
):
834 res
= (stack_strcmp(stack
, top
, "<") < 0);
835 estack_pop(stack
, top
, ax
, bx
);
837 next_pc
+= sizeof(struct binary_op
);
840 OP(FILTER_OP_GE_STRING
):
844 res
= (stack_strcmp(stack
, top
, ">=") >= 0);
845 estack_pop(stack
, top
, ax
, bx
);
847 next_pc
+= sizeof(struct binary_op
);
850 OP(FILTER_OP_LE_STRING
):
854 res
= (stack_strcmp(stack
, top
, "<=") <= 0);
855 estack_pop(stack
, top
, ax
, bx
);
857 next_pc
+= sizeof(struct binary_op
);
861 OP(FILTER_OP_EQ_STAR_GLOB_STRING
):
865 res
= (stack_star_glob_match(stack
, top
, "==") == 0);
866 estack_pop(stack
, top
, ax
, bx
);
868 next_pc
+= sizeof(struct binary_op
);
871 OP(FILTER_OP_NE_STAR_GLOB_STRING
):
875 res
= (stack_star_glob_match(stack
, top
, "!=") != 0);
876 estack_pop(stack
, top
, ax
, bx
);
878 next_pc
+= sizeof(struct binary_op
);
882 OP(FILTER_OP_EQ_S64
):
886 res
= (estack_bx_v
== estack_ax_v
);
887 estack_pop(stack
, top
, ax
, bx
);
889 next_pc
+= sizeof(struct binary_op
);
892 OP(FILTER_OP_NE_S64
):
896 res
= (estack_bx_v
!= estack_ax_v
);
897 estack_pop(stack
, top
, ax
, bx
);
899 next_pc
+= sizeof(struct binary_op
);
902 OP(FILTER_OP_GT_S64
):
906 res
= (estack_bx_v
> estack_ax_v
);
907 estack_pop(stack
, top
, ax
, bx
);
909 next_pc
+= sizeof(struct binary_op
);
912 OP(FILTER_OP_LT_S64
):
916 res
= (estack_bx_v
< estack_ax_v
);
917 estack_pop(stack
, top
, ax
, bx
);
919 next_pc
+= sizeof(struct binary_op
);
922 OP(FILTER_OP_GE_S64
):
926 res
= (estack_bx_v
>= estack_ax_v
);
927 estack_pop(stack
, top
, ax
, bx
);
929 next_pc
+= sizeof(struct binary_op
);
932 OP(FILTER_OP_LE_S64
):
936 res
= (estack_bx_v
<= estack_ax_v
);
937 estack_pop(stack
, top
, ax
, bx
);
939 next_pc
+= sizeof(struct binary_op
);
943 OP(FILTER_OP_EQ_DOUBLE
):
944 OP(FILTER_OP_NE_DOUBLE
):
945 OP(FILTER_OP_GT_DOUBLE
):
946 OP(FILTER_OP_LT_DOUBLE
):
947 OP(FILTER_OP_GE_DOUBLE
):
948 OP(FILTER_OP_LE_DOUBLE
):
954 /* Mixed S64-double binary comparators */
955 OP(FILTER_OP_EQ_DOUBLE_S64
):
956 OP(FILTER_OP_NE_DOUBLE_S64
):
957 OP(FILTER_OP_GT_DOUBLE_S64
):
958 OP(FILTER_OP_LT_DOUBLE_S64
):
959 OP(FILTER_OP_GE_DOUBLE_S64
):
960 OP(FILTER_OP_LE_DOUBLE_S64
):
961 OP(FILTER_OP_EQ_S64_DOUBLE
):
962 OP(FILTER_OP_NE_S64_DOUBLE
):
963 OP(FILTER_OP_GT_S64_DOUBLE
):
964 OP(FILTER_OP_LT_S64_DOUBLE
):
965 OP(FILTER_OP_GE_S64_DOUBLE
):
966 OP(FILTER_OP_LE_S64_DOUBLE
):
971 OP(FILTER_OP_BIT_RSHIFT
):
975 /* Catch undefined behavior. */
976 if (unlikely(estack_ax_v
< 0 || estack_ax_v
>= 64)) {
980 res
= ((uint64_t) estack_bx_v
>> (uint32_t) estack_ax_v
);
981 estack_pop(stack
, top
, ax
, bx
);
983 next_pc
+= sizeof(struct binary_op
);
986 OP(FILTER_OP_BIT_LSHIFT
):
990 /* Catch undefined behavior. */
991 if (unlikely(estack_ax_v
< 0 || estack_ax_v
>= 64)) {
995 res
= ((uint64_t) estack_bx_v
<< (uint32_t) estack_ax_v
);
996 estack_pop(stack
, top
, ax
, bx
);
998 next_pc
+= sizeof(struct binary_op
);
1001 OP(FILTER_OP_BIT_AND
):
1005 res
= ((uint64_t) estack_bx_v
& (uint64_t) estack_ax_v
);
1006 estack_pop(stack
, top
, ax
, bx
);
1008 next_pc
+= sizeof(struct binary_op
);
1011 OP(FILTER_OP_BIT_OR
):
1015 res
= ((uint64_t) estack_bx_v
| (uint64_t) estack_ax_v
);
1016 estack_pop(stack
, top
, ax
, bx
);
1018 next_pc
+= sizeof(struct binary_op
);
1021 OP(FILTER_OP_BIT_XOR
):
1025 res
= ((uint64_t) estack_bx_v
^ (uint64_t) estack_ax_v
);
1026 estack_pop(stack
, top
, ax
, bx
);
1028 next_pc
+= sizeof(struct binary_op
);
1033 OP(FILTER_OP_UNARY_PLUS
):
1034 OP(FILTER_OP_UNARY_MINUS
):
1035 OP(FILTER_OP_UNARY_NOT
):
1036 printk(KERN_WARNING
"unsupported non-specialized bytecode op %u\n",
1037 (unsigned int) *(filter_opcode_t
*) pc
);
1042 OP(FILTER_OP_UNARY_BIT_NOT
):
1044 estack_ax_v
= ~(uint64_t) estack_ax_v
;
1045 next_pc
+= sizeof(struct unary_op
);
1049 OP(FILTER_OP_UNARY_PLUS_S64
):
1051 next_pc
+= sizeof(struct unary_op
);
1054 OP(FILTER_OP_UNARY_MINUS_S64
):
1056 estack_ax_v
= -estack_ax_v
;
1057 next_pc
+= sizeof(struct unary_op
);
1060 OP(FILTER_OP_UNARY_PLUS_DOUBLE
):
1061 OP(FILTER_OP_UNARY_MINUS_DOUBLE
):
1066 OP(FILTER_OP_UNARY_NOT_S64
):
1068 estack_ax_v
= !estack_ax_v
;
1069 next_pc
+= sizeof(struct unary_op
);
1072 OP(FILTER_OP_UNARY_NOT_DOUBLE
):
1081 struct logical_op
*insn
= (struct logical_op
*) pc
;
1083 /* If AX is 0, skip and evaluate to 0 */
1084 if (unlikely(estack_ax_v
== 0)) {
1085 dbg_printk("Jumping to bytecode offset %u\n",
1086 (unsigned int) insn
->skip_offset
);
1087 next_pc
= start_pc
+ insn
->skip_offset
;
1089 /* Pop 1 when jump not taken */
1090 estack_pop(stack
, top
, ax
, bx
);
1091 next_pc
+= sizeof(struct logical_op
);
1097 struct logical_op
*insn
= (struct logical_op
*) pc
;
1099 /* If AX is nonzero, skip and evaluate to 1 */
1101 if (unlikely(estack_ax_v
!= 0)) {
1103 dbg_printk("Jumping to bytecode offset %u\n",
1104 (unsigned int) insn
->skip_offset
);
1105 next_pc
= start_pc
+ insn
->skip_offset
;
1107 /* Pop 1 when jump not taken */
1108 estack_pop(stack
, top
, ax
, bx
);
1109 next_pc
+= sizeof(struct logical_op
);
1115 /* load field ref */
1116 OP(FILTER_OP_LOAD_FIELD_REF_STRING
):
1118 struct load_op
*insn
= (struct load_op
*) pc
;
1119 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
1121 dbg_printk("load field ref offset %u type string\n",
1123 estack_push(stack
, top
, ax
, bx
);
1124 estack_ax(stack
, top
)->u
.s
.str
=
1125 *(const char * const *) &filter_stack_data
[ref
->offset
];
1126 if (unlikely(!estack_ax(stack
, top
)->u
.s
.str
)) {
1127 dbg_printk("Filter warning: loading a NULL string.\n");
1131 estack_ax(stack
, top
)->u
.s
.seq_len
= SIZE_MAX
;
1132 estack_ax(stack
, top
)->u
.s
.literal_type
=
1133 ESTACK_STRING_LITERAL_TYPE_NONE
;
1134 estack_ax(stack
, top
)->u
.s
.user
= 0;
1135 dbg_printk("ref load string %s\n", estack_ax(stack
, top
)->u
.s
.str
);
1136 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
1140 OP(FILTER_OP_LOAD_FIELD_REF_SEQUENCE
):
1142 struct load_op
*insn
= (struct load_op
*) pc
;
1143 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
1145 dbg_printk("load field ref offset %u type sequence\n",
1147 estack_push(stack
, top
, ax
, bx
);
1148 estack_ax(stack
, top
)->u
.s
.seq_len
=
1149 *(unsigned long *) &filter_stack_data
[ref
->offset
];
1150 estack_ax(stack
, top
)->u
.s
.str
=
1151 *(const char **) (&filter_stack_data
[ref
->offset
1152 + sizeof(unsigned long)]);
1153 if (unlikely(!estack_ax(stack
, top
)->u
.s
.str
)) {
1154 dbg_printk("Filter warning: loading a NULL sequence.\n");
1158 estack_ax(stack
, top
)->u
.s
.literal_type
=
1159 ESTACK_STRING_LITERAL_TYPE_NONE
;
1160 estack_ax(stack
, top
)->u
.s
.user
= 0;
1161 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
1165 OP(FILTER_OP_LOAD_FIELD_REF_S64
):
1167 struct load_op
*insn
= (struct load_op
*) pc
;
1168 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
1170 dbg_printk("load field ref offset %u type s64\n",
1172 estack_push(stack
, top
, ax
, bx
);
1174 ((struct literal_numeric
*) &filter_stack_data
[ref
->offset
])->v
;
1175 dbg_printk("ref load s64 %lld\n",
1176 (long long) estack_ax_v
);
1177 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
1181 OP(FILTER_OP_LOAD_FIELD_REF_DOUBLE
):
1187 /* load from immediate operand */
1188 OP(FILTER_OP_LOAD_STRING
):
1190 struct load_op
*insn
= (struct load_op
*) pc
;
1192 dbg_printk("load string %s\n", insn
->data
);
1193 estack_push(stack
, top
, ax
, bx
);
1194 estack_ax(stack
, top
)->u
.s
.str
= insn
->data
;
1195 estack_ax(stack
, top
)->u
.s
.seq_len
= SIZE_MAX
;
1196 estack_ax(stack
, top
)->u
.s
.literal_type
=
1197 ESTACK_STRING_LITERAL_TYPE_PLAIN
;
1198 estack_ax(stack
, top
)->u
.s
.user
= 0;
1199 next_pc
+= sizeof(struct load_op
) + strlen(insn
->data
) + 1;
1203 OP(FILTER_OP_LOAD_STAR_GLOB_STRING
):
1205 struct load_op
*insn
= (struct load_op
*) pc
;
1207 dbg_printk("load globbing pattern %s\n", insn
->data
);
1208 estack_push(stack
, top
, ax
, bx
);
1209 estack_ax(stack
, top
)->u
.s
.str
= insn
->data
;
1210 estack_ax(stack
, top
)->u
.s
.seq_len
= SIZE_MAX
;
1211 estack_ax(stack
, top
)->u
.s
.literal_type
=
1212 ESTACK_STRING_LITERAL_TYPE_STAR_GLOB
;
1213 estack_ax(stack
, top
)->u
.s
.user
= 0;
1214 next_pc
+= sizeof(struct load_op
) + strlen(insn
->data
) + 1;
1218 OP(FILTER_OP_LOAD_S64
):
1220 struct load_op
*insn
= (struct load_op
*) pc
;
1222 estack_push(stack
, top
, ax
, bx
);
1223 estack_ax_v
= ((struct literal_numeric
*) insn
->data
)->v
;
1224 dbg_printk("load s64 %lld\n",
1225 (long long) estack_ax_v
);
1226 next_pc
+= sizeof(struct load_op
)
1227 + sizeof(struct literal_numeric
);
1231 OP(FILTER_OP_LOAD_DOUBLE
):
1238 OP(FILTER_OP_CAST_TO_S64
):
1239 printk(KERN_WARNING
"unsupported non-specialized bytecode op %u\n",
1240 (unsigned int) *(filter_opcode_t
*) pc
);
1244 OP(FILTER_OP_CAST_DOUBLE_TO_S64
):
1250 OP(FILTER_OP_CAST_NOP
):
1252 next_pc
+= sizeof(struct cast_op
);
1256 /* get context ref */
1257 OP(FILTER_OP_GET_CONTEXT_REF_STRING
):
1259 struct load_op
*insn
= (struct load_op
*) pc
;
1260 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
1261 struct lttng_ctx_field
*ctx_field
;
1262 union lttng_ctx_value v
;
1264 dbg_printk("get context ref offset %u type string\n",
1266 ctx_field
= <tng_static_ctx
->fields
[ref
->offset
];
1267 ctx_field
->get_value(ctx_field
, lttng_probe_ctx
, &v
);
1268 estack_push(stack
, top
, ax
, bx
);
1269 estack_ax(stack
, top
)->u
.s
.str
= v
.str
;
1270 if (unlikely(!estack_ax(stack
, top
)->u
.s
.str
)) {
1271 dbg_printk("Filter warning: loading a NULL string.\n");
1275 estack_ax(stack
, top
)->u
.s
.seq_len
= SIZE_MAX
;
1276 estack_ax(stack
, top
)->u
.s
.literal_type
=
1277 ESTACK_STRING_LITERAL_TYPE_NONE
;
1278 estack_ax(stack
, top
)->u
.s
.user
= 0;
1279 dbg_printk("ref get context string %s\n", estack_ax(stack
, top
)->u
.s
.str
);
1280 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
1284 OP(FILTER_OP_GET_CONTEXT_REF_S64
):
1286 struct load_op
*insn
= (struct load_op
*) pc
;
1287 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
1288 struct lttng_ctx_field
*ctx_field
;
1289 union lttng_ctx_value v
;
1291 dbg_printk("get context ref offset %u type s64\n",
1293 ctx_field
= <tng_static_ctx
->fields
[ref
->offset
];
1294 ctx_field
->get_value(ctx_field
, lttng_probe_ctx
, &v
);
1295 estack_push(stack
, top
, ax
, bx
);
1296 estack_ax_v
= v
.s64
;
1297 dbg_printk("ref get context s64 %lld\n",
1298 (long long) estack_ax_v
);
1299 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
1303 OP(FILTER_OP_GET_CONTEXT_REF_DOUBLE
):
1309 /* load userspace field ref */
1310 OP(FILTER_OP_LOAD_FIELD_REF_USER_STRING
):
1312 struct load_op
*insn
= (struct load_op
*) pc
;
1313 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
1315 dbg_printk("load field ref offset %u type user string\n",
1317 estack_push(stack
, top
, ax
, bx
);
1318 estack_ax(stack
, top
)->u
.s
.user_str
=
1319 *(const char * const *) &filter_stack_data
[ref
->offset
];
1320 if (unlikely(!estack_ax(stack
, top
)->u
.s
.str
)) {
1321 dbg_printk("Filter warning: loading a NULL string.\n");
1325 estack_ax(stack
, top
)->u
.s
.seq_len
= SIZE_MAX
;
1326 estack_ax(stack
, top
)->u
.s
.literal_type
=
1327 ESTACK_STRING_LITERAL_TYPE_NONE
;
1328 estack_ax(stack
, top
)->u
.s
.user
= 1;
1329 dbg_printk("ref load string %s\n", estack_ax(stack
, top
)->u
.s
.str
);
1330 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
1334 OP(FILTER_OP_LOAD_FIELD_REF_USER_SEQUENCE
):
1336 struct load_op
*insn
= (struct load_op
*) pc
;
1337 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
1339 dbg_printk("load field ref offset %u type user sequence\n",
1341 estack_push(stack
, top
, ax
, bx
);
1342 estack_ax(stack
, top
)->u
.s
.seq_len
=
1343 *(unsigned long *) &filter_stack_data
[ref
->offset
];
1344 estack_ax(stack
, top
)->u
.s
.user_str
=
1345 *(const char **) (&filter_stack_data
[ref
->offset
1346 + sizeof(unsigned long)]);
1347 if (unlikely(!estack_ax(stack
, top
)->u
.s
.str
)) {
1348 dbg_printk("Filter warning: loading a NULL sequence.\n");
1352 estack_ax(stack
, top
)->u
.s
.literal_type
=
1353 ESTACK_STRING_LITERAL_TYPE_NONE
;
1354 estack_ax(stack
, top
)->u
.s
.user
= 1;
1355 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
1359 OP(FILTER_OP_GET_CONTEXT_ROOT
):
1361 dbg_printk("op get context root\n");
1362 estack_push(stack
, top
, ax
, bx
);
1363 estack_ax(stack
, top
)->u
.ptr
.type
= LOAD_ROOT_CONTEXT
;
1364 /* "field" only needed for variants. */
1365 estack_ax(stack
, top
)->u
.ptr
.field
= NULL
;
1366 next_pc
+= sizeof(struct load_op
);
1370 OP(FILTER_OP_GET_APP_CONTEXT_ROOT
):
1376 OP(FILTER_OP_GET_PAYLOAD_ROOT
):
1378 dbg_printk("op get app payload root\n");
1379 estack_push(stack
, top
, ax
, bx
);
1380 estack_ax(stack
, top
)->u
.ptr
.type
= LOAD_ROOT_PAYLOAD
;
1381 estack_ax(stack
, top
)->u
.ptr
.ptr
= filter_stack_data
;
1382 /* "field" only needed for variants. */
1383 estack_ax(stack
, top
)->u
.ptr
.field
= NULL
;
1384 next_pc
+= sizeof(struct load_op
);
1388 OP(FILTER_OP_GET_SYMBOL
):
1390 dbg_printk("op get symbol\n");
1391 switch (estack_ax(stack
, top
)->u
.ptr
.type
) {
1393 printk(KERN_WARNING
"Nested fields not implemented yet.\n");
1396 case LOAD_ROOT_CONTEXT
:
1397 case LOAD_ROOT_APP_CONTEXT
:
1398 case LOAD_ROOT_PAYLOAD
:
1400 * symbol lookup is performed by
1406 next_pc
+= sizeof(struct load_op
) + sizeof(struct get_symbol
);
1410 OP(FILTER_OP_GET_SYMBOL_FIELD
):
1413 * Used for first variant encountered in a
1414 * traversal. Variants are not implemented yet.
1420 OP(FILTER_OP_GET_INDEX_U16
):
1422 struct load_op
*insn
= (struct load_op
*) pc
;
1423 struct get_index_u16
*index
= (struct get_index_u16
*) insn
->data
;
1425 dbg_printk("op get index u16\n");
1426 ret
= dynamic_get_index(lttng_probe_ctx
, bytecode
, index
->index
, estack_ax(stack
, top
));
1429 estack_ax_v
= estack_ax(stack
, top
)->u
.v
;
1430 next_pc
+= sizeof(struct load_op
) + sizeof(struct get_index_u16
);
1434 OP(FILTER_OP_GET_INDEX_U64
):
1436 struct load_op
*insn
= (struct load_op
*) pc
;
1437 struct get_index_u64
*index
= (struct get_index_u64
*) insn
->data
;
1439 dbg_printk("op get index u64\n");
1440 ret
= dynamic_get_index(lttng_probe_ctx
, bytecode
, index
->index
, estack_ax(stack
, top
));
1443 estack_ax_v
= estack_ax(stack
, top
)->u
.v
;
1444 next_pc
+= sizeof(struct load_op
) + sizeof(struct get_index_u64
);
1448 OP(FILTER_OP_LOAD_FIELD
):
1450 dbg_printk("op load field\n");
1451 ret
= dynamic_load_field(estack_ax(stack
, top
));
1454 estack_ax_v
= estack_ax(stack
, top
)->u
.v
;
1455 next_pc
+= sizeof(struct load_op
);
1459 OP(FILTER_OP_LOAD_FIELD_S8
):
1461 dbg_printk("op load field s8\n");
1463 estack_ax_v
= *(int8_t *) estack_ax(stack
, top
)->u
.ptr
.ptr
;
1464 next_pc
+= sizeof(struct load_op
);
1467 OP(FILTER_OP_LOAD_FIELD_S16
):
1469 dbg_printk("op load field s16\n");
1471 estack_ax_v
= *(int16_t *) estack_ax(stack
, top
)->u
.ptr
.ptr
;
1472 next_pc
+= sizeof(struct load_op
);
1475 OP(FILTER_OP_LOAD_FIELD_S32
):
1477 dbg_printk("op load field s32\n");
1479 estack_ax_v
= *(int32_t *) estack_ax(stack
, top
)->u
.ptr
.ptr
;
1480 next_pc
+= sizeof(struct load_op
);
1483 OP(FILTER_OP_LOAD_FIELD_S64
):
1485 dbg_printk("op load field s64\n");
1487 estack_ax_v
= *(int64_t *) estack_ax(stack
, top
)->u
.ptr
.ptr
;
1488 next_pc
+= sizeof(struct load_op
);
1491 OP(FILTER_OP_LOAD_FIELD_U8
):
1493 dbg_printk("op load field u8\n");
1495 estack_ax_v
= *(uint8_t *) estack_ax(stack
, top
)->u
.ptr
.ptr
;
1496 next_pc
+= sizeof(struct load_op
);
1499 OP(FILTER_OP_LOAD_FIELD_U16
):
1501 dbg_printk("op load field u16\n");
1503 estack_ax_v
= *(uint16_t *) estack_ax(stack
, top
)->u
.ptr
.ptr
;
1504 next_pc
+= sizeof(struct load_op
);
1507 OP(FILTER_OP_LOAD_FIELD_U32
):
1509 dbg_printk("op load field u32\n");
1511 estack_ax_v
= *(uint32_t *) estack_ax(stack
, top
)->u
.ptr
.ptr
;
1512 next_pc
+= sizeof(struct load_op
);
1515 OP(FILTER_OP_LOAD_FIELD_U64
):
1517 dbg_printk("op load field u64\n");
1519 estack_ax_v
= *(uint64_t *) estack_ax(stack
, top
)->u
.ptr
.ptr
;
1520 next_pc
+= sizeof(struct load_op
);
1523 OP(FILTER_OP_LOAD_FIELD_DOUBLE
):
1529 OP(FILTER_OP_LOAD_FIELD_STRING
):
1533 dbg_printk("op load field string\n");
1534 str
= (const char *) estack_ax(stack
, top
)->u
.ptr
.ptr
;
1535 estack_ax(stack
, top
)->u
.s
.str
= str
;
1536 if (unlikely(!estack_ax(stack
, top
)->u
.s
.str
)) {
1537 dbg_printk("Filter warning: loading a NULL string.\n");
1541 estack_ax(stack
, top
)->u
.s
.seq_len
= SIZE_MAX
;
1542 estack_ax(stack
, top
)->u
.s
.literal_type
=
1543 ESTACK_STRING_LITERAL_TYPE_NONE
;
1544 next_pc
+= sizeof(struct load_op
);
1548 OP(FILTER_OP_LOAD_FIELD_SEQUENCE
):
1552 dbg_printk("op load field string sequence\n");
1553 ptr
= estack_ax(stack
, top
)->u
.ptr
.ptr
;
1554 estack_ax(stack
, top
)->u
.s
.seq_len
= *(unsigned long *) ptr
;
1555 estack_ax(stack
, top
)->u
.s
.str
= *(const char **) (ptr
+ sizeof(unsigned long));
1556 if (unlikely(!estack_ax(stack
, top
)->u
.s
.str
)) {
1557 dbg_printk("Filter warning: loading a NULL sequence.\n");
1561 estack_ax(stack
, top
)->u
.s
.literal_type
=
1562 ESTACK_STRING_LITERAL_TYPE_NONE
;
1563 next_pc
+= sizeof(struct load_op
);
1569 /* return 0 (discard) on error */