2 * lttng-filter-interpreter.c
4 * LTTng UST filter interpreter.
6 * Copyright (C) 2010-2012 Mathieu Desnoyers <mathieu.desnoyers@efficios.com>
8 * This library is free software; you can redistribute it and/or
9 * modify it under the terms of the GNU Lesser General Public
10 * License as published by the Free Software Foundation; only
11 * version 2.1 of the License.
13 * This library is distributed in the hope that it will be useful,
14 * but WITHOUT ANY WARRANTY; without even the implied warranty of
15 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
16 * Lesser General Public License for more details.
18 * You should have received a copy of the GNU Lesser General Public
19 * License along with this library; if not, write to the Free Software
20 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
23 #include "lttng-filter.h"
27 * -2: unknown escape char.
32 int parse_char(const char **p
)
52 int stack_strcmp(struct estack
*stack
, int top
, const char *cmp_type
)
54 const char *p
= estack_bx(stack
, top
)->u
.s
.str
, *q
= estack_ax(stack
, top
)->u
.s
.str
;
61 if (unlikely(p
- estack_bx(stack
, top
)->u
.s
.str
> estack_bx(stack
, top
)->u
.s
.seq_len
|| *p
== '\0')) {
62 if (q
- estack_ax(stack
, top
)->u
.s
.str
> estack_ax(stack
, top
)->u
.s
.seq_len
|| *q
== '\0') {
65 if (estack_ax(stack
, top
)->u
.s
.literal
) {
73 if (unlikely(q
- estack_ax(stack
, top
)->u
.s
.str
> estack_ax(stack
, top
)->u
.s
.seq_len
|| *q
== '\0')) {
74 if (p
- estack_bx(stack
, top
)->u
.s
.str
> estack_bx(stack
, top
)->u
.s
.seq_len
|| *p
== '\0') {
77 if (estack_bx(stack
, top
)->u
.s
.literal
) {
85 if (estack_bx(stack
, top
)->u
.s
.literal
) {
89 } else if (ret
== -2) {
92 /* else compare both char */
94 if (estack_ax(stack
, top
)->u
.s
.literal
) {
98 } else if (ret
== -2) {
118 uint64_t lttng_filter_false(void *filter_data
,
119 const char *filter_stack_data
)
124 #ifdef INTERPRETER_USE_SWITCH
127 * Fallback for compilers that do not support taking address of labels.
131 start_pc = &bytecode->data[0]; \
132 for (pc = next_pc = start_pc; pc - start_pc < bytecode->len; \
134 dbg_printf("Executing op %s (%u)\n", \
135 print_op((unsigned int) *(filter_opcode_t *) pc), \
136 (unsigned int) *(filter_opcode_t *) pc); \
137 switch (*(filter_opcode_t *) pc) {
139 #define OP(name) case name
149 * Dispatch-table based interpreter.
153 start_pc = &bytecode->data[0]; \
154 pc = next_pc = start_pc; \
155 if (unlikely(pc - start_pc >= bytecode->len)) \
157 goto *dispatch[*(filter_opcode_t *) pc];
164 goto *dispatch[*(filter_opcode_t *) pc];
171 * Return 0 (discard), or raise the 0x1 flag (log event).
172 * Currently, other flags are kept for future extensions and have no
175 uint64_t lttng_filter_interpret_bytecode(void *filter_data
,
176 const char *filter_stack_data
)
178 struct bytecode_runtime
*bytecode
= filter_data
;
179 struct lttng_ctx
*ctx
= bytecode
->p
.bc
->enabler
->chan
->ctx
;
180 void *pc
, *next_pc
, *start_pc
;
183 struct estack _stack
;
184 struct estack
*stack
= &_stack
;
185 register int64_t ax
= 0, bx
= 0;
186 register int top
= FILTER_STACK_EMPTY
;
187 #ifndef INTERPRETER_USE_SWITCH
188 static void *dispatch
[NR_FILTER_OPS
] = {
189 [ FILTER_OP_UNKNOWN
] = &&LABEL_FILTER_OP_UNKNOWN
,
191 [ FILTER_OP_RETURN
] = &&LABEL_FILTER_OP_RETURN
,
194 [ FILTER_OP_MUL
] = &&LABEL_FILTER_OP_MUL
,
195 [ FILTER_OP_DIV
] = &&LABEL_FILTER_OP_DIV
,
196 [ FILTER_OP_MOD
] = &&LABEL_FILTER_OP_MOD
,
197 [ FILTER_OP_PLUS
] = &&LABEL_FILTER_OP_PLUS
,
198 [ FILTER_OP_MINUS
] = &&LABEL_FILTER_OP_MINUS
,
199 [ FILTER_OP_RSHIFT
] = &&LABEL_FILTER_OP_RSHIFT
,
200 [ FILTER_OP_LSHIFT
] = &&LABEL_FILTER_OP_LSHIFT
,
201 [ FILTER_OP_BIN_AND
] = &&LABEL_FILTER_OP_BIN_AND
,
202 [ FILTER_OP_BIN_OR
] = &&LABEL_FILTER_OP_BIN_OR
,
203 [ FILTER_OP_BIN_XOR
] = &&LABEL_FILTER_OP_BIN_XOR
,
205 /* binary comparators */
206 [ FILTER_OP_EQ
] = &&LABEL_FILTER_OP_EQ
,
207 [ FILTER_OP_NE
] = &&LABEL_FILTER_OP_NE
,
208 [ FILTER_OP_GT
] = &&LABEL_FILTER_OP_GT
,
209 [ FILTER_OP_LT
] = &&LABEL_FILTER_OP_LT
,
210 [ FILTER_OP_GE
] = &&LABEL_FILTER_OP_GE
,
211 [ FILTER_OP_LE
] = &&LABEL_FILTER_OP_LE
,
213 /* string binary comparator */
214 [ FILTER_OP_EQ_STRING
] = &&LABEL_FILTER_OP_EQ_STRING
,
215 [ FILTER_OP_NE_STRING
] = &&LABEL_FILTER_OP_NE_STRING
,
216 [ FILTER_OP_GT_STRING
] = &&LABEL_FILTER_OP_GT_STRING
,
217 [ FILTER_OP_LT_STRING
] = &&LABEL_FILTER_OP_LT_STRING
,
218 [ FILTER_OP_GE_STRING
] = &&LABEL_FILTER_OP_GE_STRING
,
219 [ FILTER_OP_LE_STRING
] = &&LABEL_FILTER_OP_LE_STRING
,
221 /* s64 binary comparator */
222 [ FILTER_OP_EQ_S64
] = &&LABEL_FILTER_OP_EQ_S64
,
223 [ FILTER_OP_NE_S64
] = &&LABEL_FILTER_OP_NE_S64
,
224 [ FILTER_OP_GT_S64
] = &&LABEL_FILTER_OP_GT_S64
,
225 [ FILTER_OP_LT_S64
] = &&LABEL_FILTER_OP_LT_S64
,
226 [ FILTER_OP_GE_S64
] = &&LABEL_FILTER_OP_GE_S64
,
227 [ FILTER_OP_LE_S64
] = &&LABEL_FILTER_OP_LE_S64
,
229 /* double binary comparator */
230 [ FILTER_OP_EQ_DOUBLE
] = &&LABEL_FILTER_OP_EQ_DOUBLE
,
231 [ FILTER_OP_NE_DOUBLE
] = &&LABEL_FILTER_OP_NE_DOUBLE
,
232 [ FILTER_OP_GT_DOUBLE
] = &&LABEL_FILTER_OP_GT_DOUBLE
,
233 [ FILTER_OP_LT_DOUBLE
] = &&LABEL_FILTER_OP_LT_DOUBLE
,
234 [ FILTER_OP_GE_DOUBLE
] = &&LABEL_FILTER_OP_GE_DOUBLE
,
235 [ FILTER_OP_LE_DOUBLE
] = &&LABEL_FILTER_OP_LE_DOUBLE
,
237 /* Mixed S64-double binary comparators */
238 [ FILTER_OP_EQ_DOUBLE_S64
] = &&LABEL_FILTER_OP_EQ_DOUBLE_S64
,
239 [ FILTER_OP_NE_DOUBLE_S64
] = &&LABEL_FILTER_OP_NE_DOUBLE_S64
,
240 [ FILTER_OP_GT_DOUBLE_S64
] = &&LABEL_FILTER_OP_GT_DOUBLE_S64
,
241 [ FILTER_OP_LT_DOUBLE_S64
] = &&LABEL_FILTER_OP_LT_DOUBLE_S64
,
242 [ FILTER_OP_GE_DOUBLE_S64
] = &&LABEL_FILTER_OP_GE_DOUBLE_S64
,
243 [ FILTER_OP_LE_DOUBLE_S64
] = &&LABEL_FILTER_OP_LE_DOUBLE_S64
,
245 [ FILTER_OP_EQ_S64_DOUBLE
] = &&LABEL_FILTER_OP_EQ_S64_DOUBLE
,
246 [ FILTER_OP_NE_S64_DOUBLE
] = &&LABEL_FILTER_OP_NE_S64_DOUBLE
,
247 [ FILTER_OP_GT_S64_DOUBLE
] = &&LABEL_FILTER_OP_GT_S64_DOUBLE
,
248 [ FILTER_OP_LT_S64_DOUBLE
] = &&LABEL_FILTER_OP_LT_S64_DOUBLE
,
249 [ FILTER_OP_GE_S64_DOUBLE
] = &&LABEL_FILTER_OP_GE_S64_DOUBLE
,
250 [ FILTER_OP_LE_S64_DOUBLE
] = &&LABEL_FILTER_OP_LE_S64_DOUBLE
,
253 [ FILTER_OP_UNARY_PLUS
] = &&LABEL_FILTER_OP_UNARY_PLUS
,
254 [ FILTER_OP_UNARY_MINUS
] = &&LABEL_FILTER_OP_UNARY_MINUS
,
255 [ FILTER_OP_UNARY_NOT
] = &&LABEL_FILTER_OP_UNARY_NOT
,
256 [ FILTER_OP_UNARY_PLUS_S64
] = &&LABEL_FILTER_OP_UNARY_PLUS_S64
,
257 [ FILTER_OP_UNARY_MINUS_S64
] = &&LABEL_FILTER_OP_UNARY_MINUS_S64
,
258 [ FILTER_OP_UNARY_NOT_S64
] = &&LABEL_FILTER_OP_UNARY_NOT_S64
,
259 [ FILTER_OP_UNARY_PLUS_DOUBLE
] = &&LABEL_FILTER_OP_UNARY_PLUS_DOUBLE
,
260 [ FILTER_OP_UNARY_MINUS_DOUBLE
] = &&LABEL_FILTER_OP_UNARY_MINUS_DOUBLE
,
261 [ FILTER_OP_UNARY_NOT_DOUBLE
] = &&LABEL_FILTER_OP_UNARY_NOT_DOUBLE
,
264 [ FILTER_OP_AND
] = &&LABEL_FILTER_OP_AND
,
265 [ FILTER_OP_OR
] = &&LABEL_FILTER_OP_OR
,
268 [ FILTER_OP_LOAD_FIELD_REF
] = &&LABEL_FILTER_OP_LOAD_FIELD_REF
,
269 [ FILTER_OP_LOAD_FIELD_REF_STRING
] = &&LABEL_FILTER_OP_LOAD_FIELD_REF_STRING
,
270 [ FILTER_OP_LOAD_FIELD_REF_SEQUENCE
] = &&LABEL_FILTER_OP_LOAD_FIELD_REF_SEQUENCE
,
271 [ FILTER_OP_LOAD_FIELD_REF_S64
] = &&LABEL_FILTER_OP_LOAD_FIELD_REF_S64
,
272 [ FILTER_OP_LOAD_FIELD_REF_DOUBLE
] = &&LABEL_FILTER_OP_LOAD_FIELD_REF_DOUBLE
,
274 /* load from immediate operand */
275 [ FILTER_OP_LOAD_STRING
] = &&LABEL_FILTER_OP_LOAD_STRING
,
276 [ FILTER_OP_LOAD_S64
] = &&LABEL_FILTER_OP_LOAD_S64
,
277 [ FILTER_OP_LOAD_DOUBLE
] = &&LABEL_FILTER_OP_LOAD_DOUBLE
,
280 [ FILTER_OP_CAST_TO_S64
] = &&LABEL_FILTER_OP_CAST_TO_S64
,
281 [ FILTER_OP_CAST_DOUBLE_TO_S64
] = &&LABEL_FILTER_OP_CAST_DOUBLE_TO_S64
,
282 [ FILTER_OP_CAST_NOP
] = &&LABEL_FILTER_OP_CAST_NOP
,
284 /* get context ref */
285 [ FILTER_OP_GET_CONTEXT_REF
] = &&LABEL_FILTER_OP_GET_CONTEXT_REF
,
286 [ FILTER_OP_GET_CONTEXT_REF_STRING
] = &&LABEL_FILTER_OP_GET_CONTEXT_REF_STRING
,
287 [ FILTER_OP_GET_CONTEXT_REF_S64
] = &&LABEL_FILTER_OP_GET_CONTEXT_REF_S64
,
288 [ FILTER_OP_GET_CONTEXT_REF_DOUBLE
] = &&LABEL_FILTER_OP_GET_CONTEXT_REF_DOUBLE
,
290 #endif /* #ifndef INTERPRETER_USE_SWITCH */
294 OP(FILTER_OP_UNKNOWN
):
295 OP(FILTER_OP_LOAD_FIELD_REF
):
296 OP(FILTER_OP_GET_CONTEXT_REF
):
297 #ifdef INTERPRETER_USE_SWITCH
299 #endif /* INTERPRETER_USE_SWITCH */
300 ERR("unknown bytecode op %u\n",
301 (unsigned int) *(filter_opcode_t
*) pc
);
305 OP(FILTER_OP_RETURN
):
306 /* LTTNG_FILTER_DISCARD or LTTNG_FILTER_RECORD_FLAG */
307 retval
= !!estack_ax_v
;
317 OP(FILTER_OP_RSHIFT
):
318 OP(FILTER_OP_LSHIFT
):
319 OP(FILTER_OP_BIN_AND
):
320 OP(FILTER_OP_BIN_OR
):
321 OP(FILTER_OP_BIN_XOR
):
322 ERR("unsupported bytecode op %u\n",
323 (unsigned int) *(filter_opcode_t
*) pc
);
333 ERR("unsupported non-specialized bytecode op %u\n",
334 (unsigned int) *(filter_opcode_t
*) pc
);
338 OP(FILTER_OP_EQ_STRING
):
342 res
= (stack_strcmp(stack
, top
, "==") == 0);
343 estack_pop(stack
, top
, ax
, bx
);
345 next_pc
+= sizeof(struct binary_op
);
348 OP(FILTER_OP_NE_STRING
):
352 res
= (stack_strcmp(stack
, top
, "!=") != 0);
353 estack_pop(stack
, top
, ax
, bx
);
355 next_pc
+= sizeof(struct binary_op
);
358 OP(FILTER_OP_GT_STRING
):
362 res
= (stack_strcmp(stack
, top
, ">") > 0);
363 estack_pop(stack
, top
, ax
, bx
);
365 next_pc
+= sizeof(struct binary_op
);
368 OP(FILTER_OP_LT_STRING
):
372 res
= (stack_strcmp(stack
, top
, "<") < 0);
373 estack_pop(stack
, top
, ax
, bx
);
375 next_pc
+= sizeof(struct binary_op
);
378 OP(FILTER_OP_GE_STRING
):
382 res
= (stack_strcmp(stack
, top
, ">=") >= 0);
383 estack_pop(stack
, top
, ax
, bx
);
385 next_pc
+= sizeof(struct binary_op
);
388 OP(FILTER_OP_LE_STRING
):
392 res
= (stack_strcmp(stack
, top
, "<=") <= 0);
393 estack_pop(stack
, top
, ax
, bx
);
395 next_pc
+= sizeof(struct binary_op
);
399 OP(FILTER_OP_EQ_S64
):
403 res
= (estack_bx_v
== estack_ax_v
);
404 estack_pop(stack
, top
, ax
, bx
);
406 next_pc
+= sizeof(struct binary_op
);
409 OP(FILTER_OP_NE_S64
):
413 res
= (estack_bx_v
!= estack_ax_v
);
414 estack_pop(stack
, top
, ax
, bx
);
416 next_pc
+= sizeof(struct binary_op
);
419 OP(FILTER_OP_GT_S64
):
423 res
= (estack_bx_v
> estack_ax_v
);
424 estack_pop(stack
, top
, ax
, bx
);
426 next_pc
+= sizeof(struct binary_op
);
429 OP(FILTER_OP_LT_S64
):
433 res
= (estack_bx_v
< estack_ax_v
);
434 estack_pop(stack
, top
, ax
, bx
);
436 next_pc
+= sizeof(struct binary_op
);
439 OP(FILTER_OP_GE_S64
):
443 res
= (estack_bx_v
>= estack_ax_v
);
444 estack_pop(stack
, top
, ax
, bx
);
446 next_pc
+= sizeof(struct binary_op
);
449 OP(FILTER_OP_LE_S64
):
453 res
= (estack_bx_v
<= estack_ax_v
);
454 estack_pop(stack
, top
, ax
, bx
);
456 next_pc
+= sizeof(struct binary_op
);
460 OP(FILTER_OP_EQ_DOUBLE
):
464 res
= (estack_bx(stack
, top
)->u
.d
== estack_ax(stack
, top
)->u
.d
);
465 estack_pop(stack
, top
, ax
, bx
);
467 next_pc
+= sizeof(struct binary_op
);
470 OP(FILTER_OP_NE_DOUBLE
):
474 res
= (estack_bx(stack
, top
)->u
.d
!= estack_ax(stack
, top
)->u
.d
);
475 estack_pop(stack
, top
, ax
, bx
);
477 next_pc
+= sizeof(struct binary_op
);
480 OP(FILTER_OP_GT_DOUBLE
):
484 res
= (estack_bx(stack
, top
)->u
.d
> estack_ax(stack
, top
)->u
.d
);
485 estack_pop(stack
, top
, ax
, bx
);
487 next_pc
+= sizeof(struct binary_op
);
490 OP(FILTER_OP_LT_DOUBLE
):
494 res
= (estack_bx(stack
, top
)->u
.d
< estack_ax(stack
, top
)->u
.d
);
495 estack_pop(stack
, top
, ax
, bx
);
497 next_pc
+= sizeof(struct binary_op
);
500 OP(FILTER_OP_GE_DOUBLE
):
504 res
= (estack_bx(stack
, top
)->u
.d
>= estack_ax(stack
, top
)->u
.d
);
505 estack_pop(stack
, top
, ax
, bx
);
507 next_pc
+= sizeof(struct binary_op
);
510 OP(FILTER_OP_LE_DOUBLE
):
514 res
= (estack_bx(stack
, top
)->u
.d
<= estack_ax(stack
, top
)->u
.d
);
515 estack_pop(stack
, top
, ax
, bx
);
517 next_pc
+= sizeof(struct binary_op
);
521 /* Mixed S64-double binary comparators */
522 OP(FILTER_OP_EQ_DOUBLE_S64
):
526 res
= (estack_bx(stack
, top
)->u
.d
== estack_ax_v
);
527 estack_pop(stack
, top
, ax
, bx
);
529 next_pc
+= sizeof(struct binary_op
);
532 OP(FILTER_OP_NE_DOUBLE_S64
):
536 res
= (estack_bx(stack
, top
)->u
.d
!= estack_ax_v
);
537 estack_pop(stack
, top
, ax
, bx
);
539 next_pc
+= sizeof(struct binary_op
);
542 OP(FILTER_OP_GT_DOUBLE_S64
):
546 res
= (estack_bx(stack
, top
)->u
.d
> estack_ax_v
);
547 estack_pop(stack
, top
, ax
, bx
);
549 next_pc
+= sizeof(struct binary_op
);
552 OP(FILTER_OP_LT_DOUBLE_S64
):
556 res
= (estack_bx(stack
, top
)->u
.d
< estack_ax_v
);
557 estack_pop(stack
, top
, ax
, bx
);
559 next_pc
+= sizeof(struct binary_op
);
562 OP(FILTER_OP_GE_DOUBLE_S64
):
566 res
= (estack_bx(stack
, top
)->u
.d
>= estack_ax_v
);
567 estack_pop(stack
, top
, ax
, bx
);
569 next_pc
+= sizeof(struct binary_op
);
572 OP(FILTER_OP_LE_DOUBLE_S64
):
576 res
= (estack_bx(stack
, top
)->u
.d
<= estack_ax_v
);
577 estack_pop(stack
, top
, ax
, bx
);
579 next_pc
+= sizeof(struct binary_op
);
583 OP(FILTER_OP_EQ_S64_DOUBLE
):
587 res
= (estack_bx_v
== estack_ax(stack
, top
)->u
.d
);
588 estack_pop(stack
, top
, ax
, bx
);
590 next_pc
+= sizeof(struct binary_op
);
593 OP(FILTER_OP_NE_S64_DOUBLE
):
597 res
= (estack_bx_v
!= estack_ax(stack
, top
)->u
.d
);
598 estack_pop(stack
, top
, ax
, bx
);
600 next_pc
+= sizeof(struct binary_op
);
603 OP(FILTER_OP_GT_S64_DOUBLE
):
607 res
= (estack_bx_v
> estack_ax(stack
, top
)->u
.d
);
608 estack_pop(stack
, top
, ax
, bx
);
610 next_pc
+= sizeof(struct binary_op
);
613 OP(FILTER_OP_LT_S64_DOUBLE
):
617 res
= (estack_bx_v
< estack_ax(stack
, top
)->u
.d
);
618 estack_pop(stack
, top
, ax
, bx
);
620 next_pc
+= sizeof(struct binary_op
);
623 OP(FILTER_OP_GE_S64_DOUBLE
):
627 res
= (estack_bx_v
>= estack_ax(stack
, top
)->u
.d
);
628 estack_pop(stack
, top
, ax
, bx
);
630 next_pc
+= sizeof(struct binary_op
);
633 OP(FILTER_OP_LE_S64_DOUBLE
):
637 res
= (estack_bx_v
<= estack_ax(stack
, top
)->u
.d
);
638 estack_pop(stack
, top
, ax
, bx
);
640 next_pc
+= sizeof(struct binary_op
);
645 OP(FILTER_OP_UNARY_PLUS
):
646 OP(FILTER_OP_UNARY_MINUS
):
647 OP(FILTER_OP_UNARY_NOT
):
648 ERR("unsupported non-specialized bytecode op %u\n",
649 (unsigned int) *(filter_opcode_t
*) pc
);
654 OP(FILTER_OP_UNARY_PLUS_S64
):
655 OP(FILTER_OP_UNARY_PLUS_DOUBLE
):
657 next_pc
+= sizeof(struct unary_op
);
660 OP(FILTER_OP_UNARY_MINUS_S64
):
662 estack_ax_v
= -estack_ax_v
;
663 next_pc
+= sizeof(struct unary_op
);
666 OP(FILTER_OP_UNARY_MINUS_DOUBLE
):
668 estack_ax(stack
, top
)->u
.d
= -estack_ax(stack
, top
)->u
.d
;
669 next_pc
+= sizeof(struct unary_op
);
672 OP(FILTER_OP_UNARY_NOT_S64
):
674 estack_ax_v
= !estack_ax_v
;
675 next_pc
+= sizeof(struct unary_op
);
678 OP(FILTER_OP_UNARY_NOT_DOUBLE
):
680 estack_ax(stack
, top
)->u
.d
= !estack_ax(stack
, top
)->u
.d
;
681 next_pc
+= sizeof(struct unary_op
);
688 struct logical_op
*insn
= (struct logical_op
*) pc
;
690 /* If AX is 0, skip and evaluate to 0 */
691 if (unlikely(estack_ax_v
== 0)) {
692 dbg_printf("Jumping to bytecode offset %u\n",
693 (unsigned int) insn
->skip_offset
);
694 next_pc
= start_pc
+ insn
->skip_offset
;
696 /* Pop 1 when jump not taken */
697 estack_pop(stack
, top
, ax
, bx
);
698 next_pc
+= sizeof(struct logical_op
);
704 struct logical_op
*insn
= (struct logical_op
*) pc
;
706 /* If AX is nonzero, skip and evaluate to 1 */
708 if (unlikely(estack_ax_v
!= 0)) {
710 dbg_printf("Jumping to bytecode offset %u\n",
711 (unsigned int) insn
->skip_offset
);
712 next_pc
= start_pc
+ insn
->skip_offset
;
714 /* Pop 1 when jump not taken */
715 estack_pop(stack
, top
, ax
, bx
);
716 next_pc
+= sizeof(struct logical_op
);
723 OP(FILTER_OP_LOAD_FIELD_REF_STRING
):
725 struct load_op
*insn
= (struct load_op
*) pc
;
726 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
728 dbg_printf("load field ref offset %u type string\n",
730 estack_push(stack
, top
, ax
, bx
);
731 estack_ax(stack
, top
)->u
.s
.str
=
732 *(const char * const *) &filter_stack_data
[ref
->offset
];
733 if (unlikely(!estack_ax(stack
, top
)->u
.s
.str
)) {
734 dbg_printf("Filter warning: loading a NULL string.\n");
738 estack_ax(stack
, top
)->u
.s
.seq_len
= UINT_MAX
;
739 estack_ax(stack
, top
)->u
.s
.literal
= 0;
740 dbg_printf("ref load string %s\n", estack_ax(stack
, top
)->u
.s
.str
);
741 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
745 OP(FILTER_OP_LOAD_FIELD_REF_SEQUENCE
):
747 struct load_op
*insn
= (struct load_op
*) pc
;
748 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
750 dbg_printf("load field ref offset %u type sequence\n",
752 estack_push(stack
, top
, ax
, bx
);
753 estack_ax(stack
, top
)->u
.s
.seq_len
=
754 *(unsigned long *) &filter_stack_data
[ref
->offset
];
755 estack_ax(stack
, top
)->u
.s
.str
=
756 *(const char **) (&filter_stack_data
[ref
->offset
757 + sizeof(unsigned long)]);
758 if (unlikely(!estack_ax(stack
, top
)->u
.s
.str
)) {
759 dbg_printf("Filter warning: loading a NULL sequence.\n");
763 estack_ax(stack
, top
)->u
.s
.literal
= 0;
764 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
768 OP(FILTER_OP_LOAD_FIELD_REF_S64
):
770 struct load_op
*insn
= (struct load_op
*) pc
;
771 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
773 dbg_printf("load field ref offset %u type s64\n",
775 estack_push(stack
, top
, ax
, bx
);
777 ((struct literal_numeric
*) &filter_stack_data
[ref
->offset
])->v
;
778 dbg_printf("ref load s64 %" PRIi64
"\n", estack_ax_v
);
779 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
783 OP(FILTER_OP_LOAD_FIELD_REF_DOUBLE
):
785 struct load_op
*insn
= (struct load_op
*) pc
;
786 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
788 dbg_printf("load field ref offset %u type double\n",
790 estack_push(stack
, top
, ax
, bx
);
791 memcpy(&estack_ax(stack
, top
)->u
.d
, &filter_stack_data
[ref
->offset
],
792 sizeof(struct literal_double
));
793 dbg_printf("ref load double %g\n", estack_ax(stack
, top
)->u
.d
);
794 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
798 /* load from immediate operand */
799 OP(FILTER_OP_LOAD_STRING
):
801 struct load_op
*insn
= (struct load_op
*) pc
;
803 dbg_printf("load string %s\n", insn
->data
);
804 estack_push(stack
, top
, ax
, bx
);
805 estack_ax(stack
, top
)->u
.s
.str
= insn
->data
;
806 estack_ax(stack
, top
)->u
.s
.seq_len
= UINT_MAX
;
807 estack_ax(stack
, top
)->u
.s
.literal
= 1;
808 next_pc
+= sizeof(struct load_op
) + strlen(insn
->data
) + 1;
812 OP(FILTER_OP_LOAD_S64
):
814 struct load_op
*insn
= (struct load_op
*) pc
;
816 estack_push(stack
, top
, ax
, bx
);
817 estack_ax_v
= ((struct literal_numeric
*) insn
->data
)->v
;
818 dbg_printf("load s64 %" PRIi64
"\n", estack_ax_v
);
819 next_pc
+= sizeof(struct load_op
)
820 + sizeof(struct literal_numeric
);
824 OP(FILTER_OP_LOAD_DOUBLE
):
826 struct load_op
*insn
= (struct load_op
*) pc
;
828 estack_push(stack
, top
, ax
, bx
);
829 memcpy(&estack_ax(stack
, top
)->u
.d
, insn
->data
,
830 sizeof(struct literal_double
));
831 dbg_printf("load s64 %g\n", estack_ax(stack
, top
)->u
.d
);
832 next_pc
+= sizeof(struct load_op
)
833 + sizeof(struct literal_double
);
838 OP(FILTER_OP_CAST_TO_S64
):
839 ERR("unsupported non-specialized bytecode op %u\n",
840 (unsigned int) *(filter_opcode_t
*) pc
);
844 OP(FILTER_OP_CAST_DOUBLE_TO_S64
):
846 estack_ax_v
= (int64_t) estack_ax(stack
, top
)->u
.d
;
847 next_pc
+= sizeof(struct cast_op
);
851 OP(FILTER_OP_CAST_NOP
):
853 next_pc
+= sizeof(struct cast_op
);
857 /* get context ref */
858 OP(FILTER_OP_GET_CONTEXT_REF_STRING
):
860 struct load_op
*insn
= (struct load_op
*) pc
;
861 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
862 struct lttng_ctx_field
*ctx_field
;
863 union lttng_ctx_value v
;
865 dbg_printf("get context ref offset %u type string\n",
867 ctx_field
= &ctx
->fields
[ref
->offset
];
868 ctx_field
->get_value(ctx_field
, &v
);
869 estack_push(stack
, top
, ax
, bx
);
870 estack_ax(stack
, top
)->u
.s
.str
= v
.str
;
871 if (unlikely(!estack_ax(stack
, top
)->u
.s
.str
)) {
872 dbg_printf("Filter warning: loading a NULL string.\n");
876 estack_ax(stack
, top
)->u
.s
.seq_len
= UINT_MAX
;
877 estack_ax(stack
, top
)->u
.s
.literal
= 0;
878 dbg_printf("ref get context string %s\n", estack_ax(stack
, top
)->u
.s
.str
);
879 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
883 OP(FILTER_OP_GET_CONTEXT_REF_S64
):
885 struct load_op
*insn
= (struct load_op
*) pc
;
886 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
887 struct lttng_ctx_field
*ctx_field
;
888 union lttng_ctx_value v
;
890 dbg_printf("get context ref offset %u type s64\n",
892 ctx_field
= &ctx
->fields
[ref
->offset
];
893 ctx_field
->get_value(ctx_field
, &v
);
894 estack_push(stack
, top
, ax
, bx
);
896 dbg_printf("ref get context s64 %" PRIi64
"\n", estack_ax_v
);
897 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
901 OP(FILTER_OP_GET_CONTEXT_REF_DOUBLE
):
903 struct load_op
*insn
= (struct load_op
*) pc
;
904 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
905 struct lttng_ctx_field
*ctx_field
;
906 union lttng_ctx_value v
;
908 dbg_printf("get context ref offset %u type double\n",
910 ctx_field
= &ctx
->fields
[ref
->offset
];
911 ctx_field
->get_value(ctx_field
, &v
);
912 estack_push(stack
, top
, ax
, bx
);
913 memcpy(&estack_ax(stack
, top
)->u
.d
, &v
.d
, sizeof(struct literal_double
));
914 dbg_printf("ref get context double %g\n", estack_ax(stack
, top
)->u
.d
);
915 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
921 /* return 0 (discard) on error */