2 * lttng-filter-interpreter.c
4 * LTTng modules filter interpreter.
6 * Copyright (C) 2010-2016 Mathieu Desnoyers <mathieu.desnoyers@efficios.com>
8 * Permission is hereby granted, free of charge, to any person obtaining a copy
9 * of this software and associated documentation files (the "Software"), to deal
10 * in the Software without restriction, including without limitation the rights
11 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
12 * copies of the Software, and to permit persons to whom the Software is
13 * furnished to do so, subject to the following conditions:
15 * The above copyright notice and this permission notice shall be included in
16 * all copies or substantial portions of the Software.
18 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
19 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
20 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
21 * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
22 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
23 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
27 #include <wrapper/uaccess.h>
28 #include <wrapper/frame.h>
29 #include <wrapper/types.h>
31 #include <lttng-filter.h>
32 #include <lttng-string-utils.h>
34 LTTNG_STACK_FRAME_NON_STANDARD(lttng_filter_interpret_bytecode
);
37 * get_char should be called with page fault handler disabled if it is expected
38 * to handle user-space read.
41 char get_char(struct estack_entry
*reg
, size_t offset
)
43 if (unlikely(offset
>= reg
->u
.s
.seq_len
))
48 /* Handle invalid access as end of string. */
49 if (unlikely(!lttng_access_ok(VERIFY_READ
,
50 reg
->u
.s
.user_str
+ offset
,
53 /* Handle fault (nonzero return value) as end of string. */
54 if (unlikely(__copy_from_user_inatomic(&c
,
55 reg
->u
.s
.user_str
+ offset
,
60 return reg
->u
.s
.str
[offset
];
66 * -2: unknown escape char.
70 int parse_char(struct estack_entry
*reg
, char *c
, size_t *offset
)
75 *c
= get_char(reg
, *offset
);
91 char get_char_at_cb(size_t at
, void *data
)
93 return get_char(data
, at
);
97 int stack_star_glob_match(struct estack
*stack
, int top
, const char *cmp_type
)
99 bool has_user
= false;
102 struct estack_entry
*pattern_reg
;
103 struct estack_entry
*candidate_reg
;
105 if (estack_bx(stack
, top
)->u
.s
.user
106 || estack_ax(stack
, top
)->u
.s
.user
) {
113 /* Find out which side is the pattern vs. the candidate. */
114 if (estack_ax(stack
, top
)->u
.s
.literal_type
== ESTACK_STRING_LITERAL_TYPE_STAR_GLOB
) {
115 pattern_reg
= estack_ax(stack
, top
);
116 candidate_reg
= estack_bx(stack
, top
);
118 pattern_reg
= estack_bx(stack
, top
);
119 candidate_reg
= estack_ax(stack
, top
);
122 /* Perform the match operation. */
123 result
= !strutils_star_glob_match_char_cb(get_char_at_cb
,
124 pattern_reg
, get_char_at_cb
, candidate_reg
);
134 int stack_strcmp(struct estack
*stack
, int top
, const char *cmp_type
)
136 size_t offset_bx
= 0, offset_ax
= 0;
137 int diff
, has_user
= 0;
140 if (estack_bx(stack
, top
)->u
.s
.user
141 || estack_ax(stack
, top
)->u
.s
.user
) {
151 char char_bx
, char_ax
;
153 char_bx
= get_char(estack_bx(stack
, top
), offset_bx
);
154 char_ax
= get_char(estack_ax(stack
, top
), offset_ax
);
156 if (unlikely(char_bx
== '\0')) {
157 if (char_ax
== '\0') {
161 if (estack_ax(stack
, top
)->u
.s
.literal_type
==
162 ESTACK_STRING_LITERAL_TYPE_PLAIN
) {
163 ret
= parse_char(estack_ax(stack
, top
),
164 &char_ax
, &offset_ax
);
174 if (unlikely(char_ax
== '\0')) {
175 if (estack_bx(stack
, top
)->u
.s
.literal_type
==
176 ESTACK_STRING_LITERAL_TYPE_PLAIN
) {
177 ret
= parse_char(estack_bx(stack
, top
),
178 &char_bx
, &offset_bx
);
187 if (estack_bx(stack
, top
)->u
.s
.literal_type
==
188 ESTACK_STRING_LITERAL_TYPE_PLAIN
) {
189 ret
= parse_char(estack_bx(stack
, top
),
190 &char_bx
, &offset_bx
);
194 } else if (ret
== -2) {
197 /* else compare both char */
199 if (estack_ax(stack
, top
)->u
.s
.literal_type
==
200 ESTACK_STRING_LITERAL_TYPE_PLAIN
) {
201 ret
= parse_char(estack_ax(stack
, top
),
202 &char_ax
, &offset_ax
);
206 } else if (ret
== -2) {
223 diff
= char_bx
- char_ax
;
236 uint64_t lttng_filter_false(void *filter_data
,
237 struct lttng_probe_ctx
*lttng_probe_ctx
,
238 const char *filter_stack_data
)
243 #ifdef INTERPRETER_USE_SWITCH
246 * Fallback for compilers that do not support taking address of labels.
250 start_pc = &bytecode->data[0]; \
251 for (pc = next_pc = start_pc; pc - start_pc < bytecode->len; \
253 dbg_printk("Executing op %s (%u)\n", \
254 lttng_filter_print_op((unsigned int) *(filter_opcode_t *) pc), \
255 (unsigned int) *(filter_opcode_t *) pc); \
256 switch (*(filter_opcode_t *) pc) {
258 #define OP(name) case name
268 * Dispatch-table based interpreter.
272 start_pc = &bytecode->data[0]; \
273 pc = next_pc = start_pc; \
274 if (unlikely(pc - start_pc >= bytecode->len)) \
276 goto *dispatch[*(filter_opcode_t *) pc];
283 goto *dispatch[*(filter_opcode_t *) pc];
290 * Return 0 (discard), or raise the 0x1 flag (log event).
291 * Currently, other flags are kept for future extensions and have no
294 uint64_t lttng_filter_interpret_bytecode(void *filter_data
,
295 struct lttng_probe_ctx
*lttng_probe_ctx
,
296 const char *filter_stack_data
)
298 struct bytecode_runtime
*bytecode
= filter_data
;
299 void *pc
, *next_pc
, *start_pc
;
302 struct estack _stack
;
303 struct estack
*stack
= &_stack
;
304 register int64_t ax
= 0, bx
= 0;
305 register int top
= FILTER_STACK_EMPTY
;
306 #ifndef INTERPRETER_USE_SWITCH
307 static void *dispatch
[NR_FILTER_OPS
] = {
308 [ FILTER_OP_UNKNOWN
] = &&LABEL_FILTER_OP_UNKNOWN
,
310 [ FILTER_OP_RETURN
] = &&LABEL_FILTER_OP_RETURN
,
313 [ FILTER_OP_MUL
] = &&LABEL_FILTER_OP_MUL
,
314 [ FILTER_OP_DIV
] = &&LABEL_FILTER_OP_DIV
,
315 [ FILTER_OP_MOD
] = &&LABEL_FILTER_OP_MOD
,
316 [ FILTER_OP_PLUS
] = &&LABEL_FILTER_OP_PLUS
,
317 [ FILTER_OP_MINUS
] = &&LABEL_FILTER_OP_MINUS
,
318 [ FILTER_OP_RSHIFT
] = &&LABEL_FILTER_OP_RSHIFT
,
319 [ FILTER_OP_LSHIFT
] = &&LABEL_FILTER_OP_LSHIFT
,
320 [ FILTER_OP_BIN_AND
] = &&LABEL_FILTER_OP_BIN_AND
,
321 [ FILTER_OP_BIN_OR
] = &&LABEL_FILTER_OP_BIN_OR
,
322 [ FILTER_OP_BIN_XOR
] = &&LABEL_FILTER_OP_BIN_XOR
,
324 /* binary comparators */
325 [ FILTER_OP_EQ
] = &&LABEL_FILTER_OP_EQ
,
326 [ FILTER_OP_NE
] = &&LABEL_FILTER_OP_NE
,
327 [ FILTER_OP_GT
] = &&LABEL_FILTER_OP_GT
,
328 [ FILTER_OP_LT
] = &&LABEL_FILTER_OP_LT
,
329 [ FILTER_OP_GE
] = &&LABEL_FILTER_OP_GE
,
330 [ FILTER_OP_LE
] = &&LABEL_FILTER_OP_LE
,
332 /* string binary comparator */
333 [ FILTER_OP_EQ_STRING
] = &&LABEL_FILTER_OP_EQ_STRING
,
334 [ FILTER_OP_NE_STRING
] = &&LABEL_FILTER_OP_NE_STRING
,
335 [ FILTER_OP_GT_STRING
] = &&LABEL_FILTER_OP_GT_STRING
,
336 [ FILTER_OP_LT_STRING
] = &&LABEL_FILTER_OP_LT_STRING
,
337 [ FILTER_OP_GE_STRING
] = &&LABEL_FILTER_OP_GE_STRING
,
338 [ FILTER_OP_LE_STRING
] = &&LABEL_FILTER_OP_LE_STRING
,
340 /* globbing pattern binary comparator */
341 [ FILTER_OP_EQ_STAR_GLOB_STRING
] = &&LABEL_FILTER_OP_EQ_STAR_GLOB_STRING
,
342 [ FILTER_OP_NE_STAR_GLOB_STRING
] = &&LABEL_FILTER_OP_NE_STAR_GLOB_STRING
,
344 /* s64 binary comparator */
345 [ FILTER_OP_EQ_S64
] = &&LABEL_FILTER_OP_EQ_S64
,
346 [ FILTER_OP_NE_S64
] = &&LABEL_FILTER_OP_NE_S64
,
347 [ FILTER_OP_GT_S64
] = &&LABEL_FILTER_OP_GT_S64
,
348 [ FILTER_OP_LT_S64
] = &&LABEL_FILTER_OP_LT_S64
,
349 [ FILTER_OP_GE_S64
] = &&LABEL_FILTER_OP_GE_S64
,
350 [ FILTER_OP_LE_S64
] = &&LABEL_FILTER_OP_LE_S64
,
352 /* double binary comparator */
353 [ FILTER_OP_EQ_DOUBLE
] = &&LABEL_FILTER_OP_EQ_DOUBLE
,
354 [ FILTER_OP_NE_DOUBLE
] = &&LABEL_FILTER_OP_NE_DOUBLE
,
355 [ FILTER_OP_GT_DOUBLE
] = &&LABEL_FILTER_OP_GT_DOUBLE
,
356 [ FILTER_OP_LT_DOUBLE
] = &&LABEL_FILTER_OP_LT_DOUBLE
,
357 [ FILTER_OP_GE_DOUBLE
] = &&LABEL_FILTER_OP_GE_DOUBLE
,
358 [ FILTER_OP_LE_DOUBLE
] = &&LABEL_FILTER_OP_LE_DOUBLE
,
360 /* Mixed S64-double binary comparators */
361 [ FILTER_OP_EQ_DOUBLE_S64
] = &&LABEL_FILTER_OP_EQ_DOUBLE_S64
,
362 [ FILTER_OP_NE_DOUBLE_S64
] = &&LABEL_FILTER_OP_NE_DOUBLE_S64
,
363 [ FILTER_OP_GT_DOUBLE_S64
] = &&LABEL_FILTER_OP_GT_DOUBLE_S64
,
364 [ FILTER_OP_LT_DOUBLE_S64
] = &&LABEL_FILTER_OP_LT_DOUBLE_S64
,
365 [ FILTER_OP_GE_DOUBLE_S64
] = &&LABEL_FILTER_OP_GE_DOUBLE_S64
,
366 [ FILTER_OP_LE_DOUBLE_S64
] = &&LABEL_FILTER_OP_LE_DOUBLE_S64
,
368 [ FILTER_OP_EQ_S64_DOUBLE
] = &&LABEL_FILTER_OP_EQ_S64_DOUBLE
,
369 [ FILTER_OP_NE_S64_DOUBLE
] = &&LABEL_FILTER_OP_NE_S64_DOUBLE
,
370 [ FILTER_OP_GT_S64_DOUBLE
] = &&LABEL_FILTER_OP_GT_S64_DOUBLE
,
371 [ FILTER_OP_LT_S64_DOUBLE
] = &&LABEL_FILTER_OP_LT_S64_DOUBLE
,
372 [ FILTER_OP_GE_S64_DOUBLE
] = &&LABEL_FILTER_OP_GE_S64_DOUBLE
,
373 [ FILTER_OP_LE_S64_DOUBLE
] = &&LABEL_FILTER_OP_LE_S64_DOUBLE
,
376 [ FILTER_OP_UNARY_PLUS
] = &&LABEL_FILTER_OP_UNARY_PLUS
,
377 [ FILTER_OP_UNARY_MINUS
] = &&LABEL_FILTER_OP_UNARY_MINUS
,
378 [ FILTER_OP_UNARY_NOT
] = &&LABEL_FILTER_OP_UNARY_NOT
,
379 [ FILTER_OP_UNARY_PLUS_S64
] = &&LABEL_FILTER_OP_UNARY_PLUS_S64
,
380 [ FILTER_OP_UNARY_MINUS_S64
] = &&LABEL_FILTER_OP_UNARY_MINUS_S64
,
381 [ FILTER_OP_UNARY_NOT_S64
] = &&LABEL_FILTER_OP_UNARY_NOT_S64
,
382 [ FILTER_OP_UNARY_PLUS_DOUBLE
] = &&LABEL_FILTER_OP_UNARY_PLUS_DOUBLE
,
383 [ FILTER_OP_UNARY_MINUS_DOUBLE
] = &&LABEL_FILTER_OP_UNARY_MINUS_DOUBLE
,
384 [ FILTER_OP_UNARY_NOT_DOUBLE
] = &&LABEL_FILTER_OP_UNARY_NOT_DOUBLE
,
387 [ FILTER_OP_AND
] = &&LABEL_FILTER_OP_AND
,
388 [ FILTER_OP_OR
] = &&LABEL_FILTER_OP_OR
,
391 [ FILTER_OP_LOAD_FIELD_REF
] = &&LABEL_FILTER_OP_LOAD_FIELD_REF
,
392 [ FILTER_OP_LOAD_FIELD_REF_STRING
] = &&LABEL_FILTER_OP_LOAD_FIELD_REF_STRING
,
393 [ FILTER_OP_LOAD_FIELD_REF_SEQUENCE
] = &&LABEL_FILTER_OP_LOAD_FIELD_REF_SEQUENCE
,
394 [ FILTER_OP_LOAD_FIELD_REF_S64
] = &&LABEL_FILTER_OP_LOAD_FIELD_REF_S64
,
395 [ FILTER_OP_LOAD_FIELD_REF_DOUBLE
] = &&LABEL_FILTER_OP_LOAD_FIELD_REF_DOUBLE
,
397 /* load from immediate operand */
398 [ FILTER_OP_LOAD_STRING
] = &&LABEL_FILTER_OP_LOAD_STRING
,
399 [ FILTER_OP_LOAD_STAR_GLOB_STRING
] = &&LABEL_FILTER_OP_LOAD_STAR_GLOB_STRING
,
400 [ FILTER_OP_LOAD_S64
] = &&LABEL_FILTER_OP_LOAD_S64
,
401 [ FILTER_OP_LOAD_DOUBLE
] = &&LABEL_FILTER_OP_LOAD_DOUBLE
,
404 [ FILTER_OP_CAST_TO_S64
] = &&LABEL_FILTER_OP_CAST_TO_S64
,
405 [ FILTER_OP_CAST_DOUBLE_TO_S64
] = &&LABEL_FILTER_OP_CAST_DOUBLE_TO_S64
,
406 [ FILTER_OP_CAST_NOP
] = &&LABEL_FILTER_OP_CAST_NOP
,
408 /* get context ref */
409 [ FILTER_OP_GET_CONTEXT_REF
] = &&LABEL_FILTER_OP_GET_CONTEXT_REF
,
410 [ FILTER_OP_GET_CONTEXT_REF_STRING
] = &&LABEL_FILTER_OP_GET_CONTEXT_REF_STRING
,
411 [ FILTER_OP_GET_CONTEXT_REF_S64
] = &&LABEL_FILTER_OP_GET_CONTEXT_REF_S64
,
412 [ FILTER_OP_GET_CONTEXT_REF_DOUBLE
] = &&LABEL_FILTER_OP_GET_CONTEXT_REF_DOUBLE
,
414 /* load userspace field ref */
415 [ FILTER_OP_LOAD_FIELD_REF_USER_STRING
] = &&LABEL_FILTER_OP_LOAD_FIELD_REF_USER_STRING
,
416 [ FILTER_OP_LOAD_FIELD_REF_USER_SEQUENCE
] = &&LABEL_FILTER_OP_LOAD_FIELD_REF_USER_SEQUENCE
,
418 #endif /* #ifndef INTERPRETER_USE_SWITCH */
422 OP(FILTER_OP_UNKNOWN
):
423 OP(FILTER_OP_LOAD_FIELD_REF
):
424 OP(FILTER_OP_GET_CONTEXT_REF
):
425 #ifdef INTERPRETER_USE_SWITCH
427 #endif /* INTERPRETER_USE_SWITCH */
428 printk(KERN_WARNING
"unknown bytecode op %u\n",
429 (unsigned int) *(filter_opcode_t
*) pc
);
433 OP(FILTER_OP_RETURN
):
434 /* LTTNG_FILTER_DISCARD or LTTNG_FILTER_RECORD_FLAG */
435 retval
= !!estack_ax_v
;
445 OP(FILTER_OP_RSHIFT
):
446 OP(FILTER_OP_LSHIFT
):
447 OP(FILTER_OP_BIN_AND
):
448 OP(FILTER_OP_BIN_OR
):
449 OP(FILTER_OP_BIN_XOR
):
450 printk(KERN_WARNING
"unsupported bytecode op %u\n",
451 (unsigned int) *(filter_opcode_t
*) pc
);
461 printk(KERN_WARNING
"unsupported non-specialized bytecode op %u\n",
462 (unsigned int) *(filter_opcode_t
*) pc
);
466 OP(FILTER_OP_EQ_STRING
):
470 res
= (stack_strcmp(stack
, top
, "==") == 0);
471 estack_pop(stack
, top
, ax
, bx
);
473 next_pc
+= sizeof(struct binary_op
);
476 OP(FILTER_OP_NE_STRING
):
480 res
= (stack_strcmp(stack
, top
, "!=") != 0);
481 estack_pop(stack
, top
, ax
, bx
);
483 next_pc
+= sizeof(struct binary_op
);
486 OP(FILTER_OP_GT_STRING
):
490 res
= (stack_strcmp(stack
, top
, ">") > 0);
491 estack_pop(stack
, top
, ax
, bx
);
493 next_pc
+= sizeof(struct binary_op
);
496 OP(FILTER_OP_LT_STRING
):
500 res
= (stack_strcmp(stack
, top
, "<") < 0);
501 estack_pop(stack
, top
, ax
, bx
);
503 next_pc
+= sizeof(struct binary_op
);
506 OP(FILTER_OP_GE_STRING
):
510 res
= (stack_strcmp(stack
, top
, ">=") >= 0);
511 estack_pop(stack
, top
, ax
, bx
);
513 next_pc
+= sizeof(struct binary_op
);
516 OP(FILTER_OP_LE_STRING
):
520 res
= (stack_strcmp(stack
, top
, "<=") <= 0);
521 estack_pop(stack
, top
, ax
, bx
);
523 next_pc
+= sizeof(struct binary_op
);
527 OP(FILTER_OP_EQ_STAR_GLOB_STRING
):
531 res
= (stack_star_glob_match(stack
, top
, "==") == 0);
532 estack_pop(stack
, top
, ax
, bx
);
534 next_pc
+= sizeof(struct binary_op
);
537 OP(FILTER_OP_NE_STAR_GLOB_STRING
):
541 res
= (stack_star_glob_match(stack
, top
, "!=") != 0);
542 estack_pop(stack
, top
, ax
, bx
);
544 next_pc
+= sizeof(struct binary_op
);
548 OP(FILTER_OP_EQ_S64
):
552 res
= (estack_bx_v
== estack_ax_v
);
553 estack_pop(stack
, top
, ax
, bx
);
555 next_pc
+= sizeof(struct binary_op
);
558 OP(FILTER_OP_NE_S64
):
562 res
= (estack_bx_v
!= estack_ax_v
);
563 estack_pop(stack
, top
, ax
, bx
);
565 next_pc
+= sizeof(struct binary_op
);
568 OP(FILTER_OP_GT_S64
):
572 res
= (estack_bx_v
> estack_ax_v
);
573 estack_pop(stack
, top
, ax
, bx
);
575 next_pc
+= sizeof(struct binary_op
);
578 OP(FILTER_OP_LT_S64
):
582 res
= (estack_bx_v
< estack_ax_v
);
583 estack_pop(stack
, top
, ax
, bx
);
585 next_pc
+= sizeof(struct binary_op
);
588 OP(FILTER_OP_GE_S64
):
592 res
= (estack_bx_v
>= estack_ax_v
);
593 estack_pop(stack
, top
, ax
, bx
);
595 next_pc
+= sizeof(struct binary_op
);
598 OP(FILTER_OP_LE_S64
):
602 res
= (estack_bx_v
<= estack_ax_v
);
603 estack_pop(stack
, top
, ax
, bx
);
605 next_pc
+= sizeof(struct binary_op
);
609 OP(FILTER_OP_EQ_DOUBLE
):
610 OP(FILTER_OP_NE_DOUBLE
):
611 OP(FILTER_OP_GT_DOUBLE
):
612 OP(FILTER_OP_LT_DOUBLE
):
613 OP(FILTER_OP_GE_DOUBLE
):
614 OP(FILTER_OP_LE_DOUBLE
):
620 /* Mixed S64-double binary comparators */
621 OP(FILTER_OP_EQ_DOUBLE_S64
):
622 OP(FILTER_OP_NE_DOUBLE_S64
):
623 OP(FILTER_OP_GT_DOUBLE_S64
):
624 OP(FILTER_OP_LT_DOUBLE_S64
):
625 OP(FILTER_OP_GE_DOUBLE_S64
):
626 OP(FILTER_OP_LE_DOUBLE_S64
):
627 OP(FILTER_OP_EQ_S64_DOUBLE
):
628 OP(FILTER_OP_NE_S64_DOUBLE
):
629 OP(FILTER_OP_GT_S64_DOUBLE
):
630 OP(FILTER_OP_LT_S64_DOUBLE
):
631 OP(FILTER_OP_GE_S64_DOUBLE
):
632 OP(FILTER_OP_LE_S64_DOUBLE
):
639 OP(FILTER_OP_UNARY_PLUS
):
640 OP(FILTER_OP_UNARY_MINUS
):
641 OP(FILTER_OP_UNARY_NOT
):
642 printk(KERN_WARNING
"unsupported non-specialized bytecode op %u\n",
643 (unsigned int) *(filter_opcode_t
*) pc
);
648 OP(FILTER_OP_UNARY_PLUS_S64
):
650 next_pc
+= sizeof(struct unary_op
);
653 OP(FILTER_OP_UNARY_MINUS_S64
):
655 estack_ax_v
= -estack_ax_v
;
656 next_pc
+= sizeof(struct unary_op
);
659 OP(FILTER_OP_UNARY_PLUS_DOUBLE
):
660 OP(FILTER_OP_UNARY_MINUS_DOUBLE
):
665 OP(FILTER_OP_UNARY_NOT_S64
):
667 estack_ax_v
= !estack_ax_v
;
668 next_pc
+= sizeof(struct unary_op
);
671 OP(FILTER_OP_UNARY_NOT_DOUBLE
):
680 struct logical_op
*insn
= (struct logical_op
*) pc
;
682 /* If AX is 0, skip and evaluate to 0 */
683 if (unlikely(estack_ax_v
== 0)) {
684 dbg_printk("Jumping to bytecode offset %u\n",
685 (unsigned int) insn
->skip_offset
);
686 next_pc
= start_pc
+ insn
->skip_offset
;
688 /* Pop 1 when jump not taken */
689 estack_pop(stack
, top
, ax
, bx
);
690 next_pc
+= sizeof(struct logical_op
);
696 struct logical_op
*insn
= (struct logical_op
*) pc
;
698 /* If AX is nonzero, skip and evaluate to 1 */
700 if (unlikely(estack_ax_v
!= 0)) {
702 dbg_printk("Jumping to bytecode offset %u\n",
703 (unsigned int) insn
->skip_offset
);
704 next_pc
= start_pc
+ insn
->skip_offset
;
706 /* Pop 1 when jump not taken */
707 estack_pop(stack
, top
, ax
, bx
);
708 next_pc
+= sizeof(struct logical_op
);
715 OP(FILTER_OP_LOAD_FIELD_REF_STRING
):
717 struct load_op
*insn
= (struct load_op
*) pc
;
718 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
720 dbg_printk("load field ref offset %u type string\n",
722 estack_push(stack
, top
, ax
, bx
);
723 estack_ax(stack
, top
)->u
.s
.str
=
724 *(const char * const *) &filter_stack_data
[ref
->offset
];
725 if (unlikely(!estack_ax(stack
, top
)->u
.s
.str
)) {
726 dbg_printk("Filter warning: loading a NULL string.\n");
730 estack_ax(stack
, top
)->u
.s
.seq_len
= LTTNG_SIZE_MAX
;
731 estack_ax(stack
, top
)->u
.s
.literal_type
=
732 ESTACK_STRING_LITERAL_TYPE_NONE
;
733 estack_ax(stack
, top
)->u
.s
.user
= 0;
734 dbg_printk("ref load string %s\n", estack_ax(stack
, top
)->u
.s
.str
);
735 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
739 OP(FILTER_OP_LOAD_FIELD_REF_SEQUENCE
):
741 struct load_op
*insn
= (struct load_op
*) pc
;
742 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
744 dbg_printk("load field ref offset %u type sequence\n",
746 estack_push(stack
, top
, ax
, bx
);
747 estack_ax(stack
, top
)->u
.s
.seq_len
=
748 *(unsigned long *) &filter_stack_data
[ref
->offset
];
749 estack_ax(stack
, top
)->u
.s
.str
=
750 *(const char **) (&filter_stack_data
[ref
->offset
751 + sizeof(unsigned long)]);
752 if (unlikely(!estack_ax(stack
, top
)->u
.s
.str
)) {
753 dbg_printk("Filter warning: loading a NULL sequence.\n");
757 estack_ax(stack
, top
)->u
.s
.literal_type
=
758 ESTACK_STRING_LITERAL_TYPE_NONE
;
759 estack_ax(stack
, top
)->u
.s
.user
= 0;
760 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
764 OP(FILTER_OP_LOAD_FIELD_REF_S64
):
766 struct load_op
*insn
= (struct load_op
*) pc
;
767 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
769 dbg_printk("load field ref offset %u type s64\n",
771 estack_push(stack
, top
, ax
, bx
);
773 ((struct literal_numeric
*) &filter_stack_data
[ref
->offset
])->v
;
774 dbg_printk("ref load s64 %lld\n",
775 (long long) estack_ax_v
);
776 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
780 OP(FILTER_OP_LOAD_FIELD_REF_DOUBLE
):
786 /* load from immediate operand */
787 OP(FILTER_OP_LOAD_STRING
):
789 struct load_op
*insn
= (struct load_op
*) pc
;
791 dbg_printk("load string %s\n", insn
->data
);
792 estack_push(stack
, top
, ax
, bx
);
793 estack_ax(stack
, top
)->u
.s
.str
= insn
->data
;
794 estack_ax(stack
, top
)->u
.s
.seq_len
= LTTNG_SIZE_MAX
;
795 estack_ax(stack
, top
)->u
.s
.literal_type
=
796 ESTACK_STRING_LITERAL_TYPE_PLAIN
;
797 estack_ax(stack
, top
)->u
.s
.user
= 0;
798 next_pc
+= sizeof(struct load_op
) + strlen(insn
->data
) + 1;
802 OP(FILTER_OP_LOAD_STAR_GLOB_STRING
):
804 struct load_op
*insn
= (struct load_op
*) pc
;
806 dbg_printk("load globbing pattern %s\n", insn
->data
);
807 estack_push(stack
, top
, ax
, bx
);
808 estack_ax(stack
, top
)->u
.s
.str
= insn
->data
;
809 estack_ax(stack
, top
)->u
.s
.seq_len
= LTTNG_SIZE_MAX
;
810 estack_ax(stack
, top
)->u
.s
.literal_type
=
811 ESTACK_STRING_LITERAL_TYPE_STAR_GLOB
;
812 estack_ax(stack
, top
)->u
.s
.user
= 0;
813 next_pc
+= sizeof(struct load_op
) + strlen(insn
->data
) + 1;
817 OP(FILTER_OP_LOAD_S64
):
819 struct load_op
*insn
= (struct load_op
*) pc
;
821 estack_push(stack
, top
, ax
, bx
);
822 estack_ax_v
= ((struct literal_numeric
*) insn
->data
)->v
;
823 dbg_printk("load s64 %lld\n",
824 (long long) estack_ax_v
);
825 next_pc
+= sizeof(struct load_op
)
826 + sizeof(struct literal_numeric
);
830 OP(FILTER_OP_LOAD_DOUBLE
):
837 OP(FILTER_OP_CAST_TO_S64
):
838 printk(KERN_WARNING
"unsupported non-specialized bytecode op %u\n",
839 (unsigned int) *(filter_opcode_t
*) pc
);
843 OP(FILTER_OP_CAST_DOUBLE_TO_S64
):
849 OP(FILTER_OP_CAST_NOP
):
851 next_pc
+= sizeof(struct cast_op
);
855 /* get context ref */
856 OP(FILTER_OP_GET_CONTEXT_REF_STRING
):
858 struct load_op
*insn
= (struct load_op
*) pc
;
859 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
860 struct lttng_ctx_field
*ctx_field
;
861 union lttng_ctx_value v
;
863 dbg_printk("get context ref offset %u type string\n",
865 ctx_field
= <tng_static_ctx
->fields
[ref
->offset
];
866 ctx_field
->get_value(ctx_field
, lttng_probe_ctx
, &v
);
867 estack_push(stack
, top
, ax
, bx
);
868 estack_ax(stack
, top
)->u
.s
.str
= v
.str
;
869 if (unlikely(!estack_ax(stack
, top
)->u
.s
.str
)) {
870 dbg_printk("Filter warning: loading a NULL string.\n");
874 estack_ax(stack
, top
)->u
.s
.seq_len
= LTTNG_SIZE_MAX
;
875 estack_ax(stack
, top
)->u
.s
.literal_type
=
876 ESTACK_STRING_LITERAL_TYPE_NONE
;
877 estack_ax(stack
, top
)->u
.s
.user
= 0;
878 dbg_printk("ref get context string %s\n", estack_ax(stack
, top
)->u
.s
.str
);
879 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
883 OP(FILTER_OP_GET_CONTEXT_REF_S64
):
885 struct load_op
*insn
= (struct load_op
*) pc
;
886 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
887 struct lttng_ctx_field
*ctx_field
;
888 union lttng_ctx_value v
;
890 dbg_printk("get context ref offset %u type s64\n",
892 ctx_field
= <tng_static_ctx
->fields
[ref
->offset
];
893 ctx_field
->get_value(ctx_field
, lttng_probe_ctx
, &v
);
894 estack_push(stack
, top
, ax
, bx
);
896 dbg_printk("ref get context s64 %lld\n",
897 (long long) estack_ax_v
);
898 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
902 OP(FILTER_OP_GET_CONTEXT_REF_DOUBLE
):
908 /* load userspace field ref */
909 OP(FILTER_OP_LOAD_FIELD_REF_USER_STRING
):
911 struct load_op
*insn
= (struct load_op
*) pc
;
912 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
914 dbg_printk("load field ref offset %u type user string\n",
916 estack_push(stack
, top
, ax
, bx
);
917 estack_ax(stack
, top
)->u
.s
.user_str
=
918 *(const char * const *) &filter_stack_data
[ref
->offset
];
919 if (unlikely(!estack_ax(stack
, top
)->u
.s
.str
)) {
920 dbg_printk("Filter warning: loading a NULL string.\n");
924 estack_ax(stack
, top
)->u
.s
.seq_len
= LTTNG_SIZE_MAX
;
925 estack_ax(stack
, top
)->u
.s
.literal_type
=
926 ESTACK_STRING_LITERAL_TYPE_NONE
;
927 estack_ax(stack
, top
)->u
.s
.user
= 1;
928 dbg_printk("ref load string %s\n", estack_ax(stack
, top
)->u
.s
.str
);
929 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
933 OP(FILTER_OP_LOAD_FIELD_REF_USER_SEQUENCE
):
935 struct load_op
*insn
= (struct load_op
*) pc
;
936 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
938 dbg_printk("load field ref offset %u type user sequence\n",
940 estack_push(stack
, top
, ax
, bx
);
941 estack_ax(stack
, top
)->u
.s
.seq_len
=
942 *(unsigned long *) &filter_stack_data
[ref
->offset
];
943 estack_ax(stack
, top
)->u
.s
.user_str
=
944 *(const char **) (&filter_stack_data
[ref
->offset
945 + sizeof(unsigned long)]);
946 if (unlikely(!estack_ax(stack
, top
)->u
.s
.str
)) {
947 dbg_printk("Filter warning: loading a NULL sequence.\n");
951 estack_ax(stack
, top
)->u
.s
.literal_type
=
952 ESTACK_STRING_LITERAL_TYPE_NONE
;
953 estack_ax(stack
, top
)->u
.s
.user
= 1;
954 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
960 /* return 0 (discard) on error */