bytecode: propagate `rev_bo` of element
[lttng-modules.git] / lttng-filter-interpreter.c
1 /* SPDX-License-Identifier: MIT
2 *
3 * lttng-filter-interpreter.c
4 *
5 * LTTng modules filter interpreter.
6 *
7 * Copyright (C) 2010-2016 Mathieu Desnoyers <mathieu.desnoyers@efficios.com>
8 */
9
10 #include <wrapper/compiler_attributes.h>
11 #include <wrapper/uaccess.h>
12 #include <wrapper/objtool.h>
13 #include <wrapper/types.h>
14 #include <linux/swab.h>
15
16 #include <lttng-filter.h>
17 #include <lttng-string-utils.h>
18
19 LTTNG_STACK_FRAME_NON_STANDARD(lttng_filter_interpret_bytecode);
20
21 /*
22 * get_char should be called with page fault handler disabled if it is expected
23 * to handle user-space read.
24 */
25 static
26 char get_char(const struct estack_entry *reg, size_t offset)
27 {
28 if (unlikely(offset >= reg->u.s.seq_len))
29 return '\0';
30 if (reg->u.s.user) {
31 char c;
32
33 /* Handle invalid access as end of string. */
34 if (unlikely(!lttng_access_ok(VERIFY_READ,
35 reg->u.s.user_str + offset,
36 sizeof(c))))
37 return '\0';
38 /* Handle fault (nonzero return value) as end of string. */
39 if (unlikely(__copy_from_user_inatomic(&c,
40 reg->u.s.user_str + offset,
41 sizeof(c))))
42 return '\0';
43 return c;
44 } else {
45 return reg->u.s.str[offset];
46 }
47 }
48
49 /*
50 * -1: wildcard found.
51 * -2: unknown escape char.
52 * 0: normal char.
53 */
54 static
55 int parse_char(struct estack_entry *reg, char *c, size_t *offset)
56 {
57 switch (*c) {
58 case '\\':
59 (*offset)++;
60 *c = get_char(reg, *offset);
61 switch (*c) {
62 case '\\':
63 case '*':
64 return 0;
65 default:
66 return -2;
67 }
68 case '*':
69 return -1;
70 default:
71 return 0;
72 }
73 }
74
75 static
76 char get_char_at_cb(size_t at, void *data)
77 {
78 return get_char(data, at);
79 }
80
81 static
82 int stack_star_glob_match(struct estack *stack, int top, const char *cmp_type)
83 {
84 bool has_user = false;
85 int result;
86 struct estack_entry *pattern_reg;
87 struct estack_entry *candidate_reg;
88
89 /* Disable the page fault handler when reading from userspace. */
90 if (estack_bx(stack, top)->u.s.user
91 || estack_ax(stack, top)->u.s.user) {
92 has_user = true;
93 pagefault_disable();
94 }
95
96 /* Find out which side is the pattern vs. the candidate. */
97 if (estack_ax(stack, top)->u.s.literal_type == ESTACK_STRING_LITERAL_TYPE_STAR_GLOB) {
98 pattern_reg = estack_ax(stack, top);
99 candidate_reg = estack_bx(stack, top);
100 } else {
101 pattern_reg = estack_bx(stack, top);
102 candidate_reg = estack_ax(stack, top);
103 }
104
105 /* Perform the match operation. */
106 result = !strutils_star_glob_match_char_cb(get_char_at_cb,
107 pattern_reg, get_char_at_cb, candidate_reg);
108 if (has_user)
109 pagefault_enable();
110
111 return result;
112 }
113
114 static
115 int stack_strcmp(struct estack *stack, int top, const char *cmp_type)
116 {
117 size_t offset_bx = 0, offset_ax = 0;
118 int diff, has_user = 0;
119
120 if (estack_bx(stack, top)->u.s.user
121 || estack_ax(stack, top)->u.s.user) {
122 has_user = 1;
123 pagefault_disable();
124 }
125
126 for (;;) {
127 int ret;
128 int escaped_r0 = 0;
129 char char_bx, char_ax;
130
131 char_bx = get_char(estack_bx(stack, top), offset_bx);
132 char_ax = get_char(estack_ax(stack, top), offset_ax);
133
134 if (unlikely(char_bx == '\0')) {
135 if (char_ax == '\0') {
136 diff = 0;
137 break;
138 } else {
139 if (estack_ax(stack, top)->u.s.literal_type ==
140 ESTACK_STRING_LITERAL_TYPE_PLAIN) {
141 ret = parse_char(estack_ax(stack, top),
142 &char_ax, &offset_ax);
143 if (ret == -1) {
144 diff = 0;
145 break;
146 }
147 }
148 diff = -1;
149 break;
150 }
151 }
152 if (unlikely(char_ax == '\0')) {
153 if (estack_bx(stack, top)->u.s.literal_type ==
154 ESTACK_STRING_LITERAL_TYPE_PLAIN) {
155 ret = parse_char(estack_bx(stack, top),
156 &char_bx, &offset_bx);
157 if (ret == -1) {
158 diff = 0;
159 break;
160 }
161 }
162 diff = 1;
163 break;
164 }
165 if (estack_bx(stack, top)->u.s.literal_type ==
166 ESTACK_STRING_LITERAL_TYPE_PLAIN) {
167 ret = parse_char(estack_bx(stack, top),
168 &char_bx, &offset_bx);
169 if (ret == -1) {
170 diff = 0;
171 break;
172 } else if (ret == -2) {
173 escaped_r0 = 1;
174 }
175 /* else compare both char */
176 }
177 if (estack_ax(stack, top)->u.s.literal_type ==
178 ESTACK_STRING_LITERAL_TYPE_PLAIN) {
179 ret = parse_char(estack_ax(stack, top),
180 &char_ax, &offset_ax);
181 if (ret == -1) {
182 diff = 0;
183 break;
184 } else if (ret == -2) {
185 if (!escaped_r0) {
186 diff = -1;
187 break;
188 }
189 } else {
190 if (escaped_r0) {
191 diff = 1;
192 break;
193 }
194 }
195 } else {
196 if (escaped_r0) {
197 diff = 1;
198 break;
199 }
200 }
201 diff = char_bx - char_ax;
202 if (diff != 0)
203 break;
204 offset_bx++;
205 offset_ax++;
206 }
207 if (has_user)
208 pagefault_enable();
209
210 return diff;
211 }
212
213 uint64_t lttng_filter_false(void *filter_data,
214 struct lttng_probe_ctx *lttng_probe_ctx,
215 const char *filter_stack_data)
216 {
217 return 0;
218 }
219
220 #ifdef INTERPRETER_USE_SWITCH
221
222 /*
223 * Fallback for compilers that do not support taking address of labels.
224 */
225
226 #define START_OP \
227 start_pc = &bytecode->data[0]; \
228 for (pc = next_pc = start_pc; pc - start_pc < bytecode->len; \
229 pc = next_pc) { \
230 dbg_printk("Executing op %s (%u)\n", \
231 lttng_filter_print_op((unsigned int) *(filter_opcode_t *) pc), \
232 (unsigned int) *(filter_opcode_t *) pc); \
233 switch (*(filter_opcode_t *) pc) {
234
235 #define OP(name) case name
236
237 #define PO break
238
239 #define END_OP } \
240 }
241
242 #else
243
244 /*
245 * Dispatch-table based interpreter.
246 */
247
248 #define START_OP \
249 start_pc = &bytecode->code[0]; \
250 pc = next_pc = start_pc; \
251 if (unlikely(pc - start_pc >= bytecode->len)) \
252 goto end; \
253 goto *dispatch[*(filter_opcode_t *) pc];
254
255 #define OP(name) \
256 LABEL_##name
257
258 #define PO \
259 pc = next_pc; \
260 goto *dispatch[*(filter_opcode_t *) pc];
261
262 #define END_OP
263
264 #endif
265
266 static int context_get_index(struct lttng_probe_ctx *lttng_probe_ctx,
267 struct load_ptr *ptr,
268 uint32_t idx)
269 {
270
271 struct lttng_ctx_field *ctx_field;
272 struct lttng_event_field *field;
273 union lttng_ctx_value v;
274
275 ctx_field = &lttng_static_ctx->fields[idx];
276 field = &ctx_field->event_field;
277 ptr->type = LOAD_OBJECT;
278 /* field is only used for types nested within variants. */
279 ptr->field = NULL;
280
281 switch (field->type.atype) {
282 case atype_integer:
283 ctx_field->get_value(ctx_field, lttng_probe_ctx, &v);
284 if (field->type.u.basic.integer.signedness) {
285 ptr->object_type = OBJECT_TYPE_S64;
286 ptr->u.s64 = v.s64;
287 ptr->ptr = &ptr->u.s64;
288 } else {
289 ptr->object_type = OBJECT_TYPE_U64;
290 ptr->u.u64 = v.s64; /* Cast. */
291 ptr->ptr = &ptr->u.u64;
292 }
293 ptr->rev_bo = field->type.u.basic.integer.reverse_byte_order;
294 break;
295 case atype_enum:
296 {
297 const struct lttng_integer_type *itype =
298 &field->type.u.basic.enumeration.container_type;
299
300 ctx_field->get_value(ctx_field, lttng_probe_ctx, &v);
301 if (itype->signedness) {
302 ptr->object_type = OBJECT_TYPE_S64;
303 ptr->u.s64 = v.s64;
304 ptr->ptr = &ptr->u.s64;
305 } else {
306 ptr->object_type = OBJECT_TYPE_U64;
307 ptr->u.u64 = v.s64; /* Cast. */
308 ptr->ptr = &ptr->u.u64;
309 }
310 ptr->rev_bo = itype->reverse_byte_order;
311 break;
312 }
313 case atype_array:
314 if (field->type.u.array.elem_type.atype != atype_integer) {
315 printk(KERN_WARNING "Array nesting only supports integer types.\n");
316 return -EINVAL;
317 }
318 if (field->type.u.array.elem_type.u.basic.integer.encoding == lttng_encode_none) {
319 printk(KERN_WARNING "Only string arrays are supported for contexts.\n");
320 return -EINVAL;
321 }
322 ptr->object_type = OBJECT_TYPE_STRING;
323 ctx_field->get_value(ctx_field, lttng_probe_ctx, &v);
324 ptr->ptr = v.str;
325 break;
326 case atype_sequence:
327 if (field->type.u.sequence.elem_type.atype != atype_integer) {
328 printk(KERN_WARNING "Sequence nesting only supports integer types.\n");
329 return -EINVAL;
330 }
331 if (field->type.u.sequence.elem_type.u.basic.integer.encoding == lttng_encode_none) {
332 printk(KERN_WARNING "Only string sequences are supported for contexts.\n");
333 return -EINVAL;
334 }
335 ptr->object_type = OBJECT_TYPE_STRING;
336 ctx_field->get_value(ctx_field, lttng_probe_ctx, &v);
337 ptr->ptr = v.str;
338 break;
339 case atype_array_bitfield:
340 printk(KERN_WARNING "Bitfield array type is not supported.\n");
341 return -EINVAL;
342 case atype_sequence_bitfield:
343 printk(KERN_WARNING "Bitfield sequence type is not supported.\n");
344 return -EINVAL;
345 case atype_string:
346 ptr->object_type = OBJECT_TYPE_STRING;
347 ctx_field->get_value(ctx_field, lttng_probe_ctx, &v);
348 ptr->ptr = v.str;
349 break;
350 case atype_struct:
351 printk(KERN_WARNING "Structure type cannot be loaded.\n");
352 return -EINVAL;
353 default:
354 printk(KERN_WARNING "Unknown type: %d", (int) field->type.atype);
355 return -EINVAL;
356 }
357 return 0;
358 }
359
360 static int dynamic_get_index(struct lttng_probe_ctx *lttng_probe_ctx,
361 struct bytecode_runtime *runtime,
362 uint64_t index, struct estack_entry *stack_top)
363 {
364 int ret;
365 const struct filter_get_index_data *gid;
366
367 /*
368 * Types nested within variants need to perform dynamic lookup
369 * based on the field descriptions. LTTng-UST does not implement
370 * variants for now.
371 */
372 if (stack_top->u.ptr.field)
373 return -EINVAL;
374 gid = (const struct filter_get_index_data *) &runtime->data[index];
375 switch (stack_top->u.ptr.type) {
376 case LOAD_OBJECT:
377 switch (stack_top->u.ptr.object_type) {
378 case OBJECT_TYPE_ARRAY:
379 {
380 const char *ptr;
381
382 WARN_ON_ONCE(gid->offset >= gid->array_len);
383 /* Skip count (unsigned long) */
384 ptr = *(const char **) (stack_top->u.ptr.ptr + sizeof(unsigned long));
385 ptr = ptr + gid->offset;
386 stack_top->u.ptr.ptr = ptr;
387 stack_top->u.ptr.object_type = gid->elem.type;
388 stack_top->u.ptr.rev_bo = gid->elem.rev_bo;
389 /* field is only used for types nested within variants. */
390 stack_top->u.ptr.field = NULL;
391 break;
392 }
393 case OBJECT_TYPE_SEQUENCE:
394 {
395 const char *ptr;
396 size_t ptr_seq_len;
397
398 ptr = *(const char **) (stack_top->u.ptr.ptr + sizeof(unsigned long));
399 ptr_seq_len = *(unsigned long *) stack_top->u.ptr.ptr;
400 if (gid->offset >= gid->elem.len * ptr_seq_len) {
401 ret = -EINVAL;
402 goto end;
403 }
404 ptr = ptr + gid->offset;
405 stack_top->u.ptr.ptr = ptr;
406 stack_top->u.ptr.object_type = gid->elem.type;
407 stack_top->u.ptr.rev_bo = gid->elem.rev_bo;
408 /* field is only used for types nested within variants. */
409 stack_top->u.ptr.field = NULL;
410 break;
411 }
412 case OBJECT_TYPE_STRUCT:
413 printk(KERN_WARNING "Nested structures are not supported yet.\n");
414 ret = -EINVAL;
415 goto end;
416 case OBJECT_TYPE_VARIANT:
417 default:
418 printk(KERN_WARNING "Unexpected get index type %d",
419 (int) stack_top->u.ptr.object_type);
420 ret = -EINVAL;
421 goto end;
422 }
423 break;
424 case LOAD_ROOT_CONTEXT:
425 lttng_fallthrough;
426 case LOAD_ROOT_APP_CONTEXT:
427 {
428 ret = context_get_index(lttng_probe_ctx,
429 &stack_top->u.ptr,
430 gid->ctx_index);
431 if (ret) {
432 goto end;
433 }
434 break;
435 }
436 case LOAD_ROOT_PAYLOAD:
437 stack_top->u.ptr.ptr += gid->offset;
438 if (gid->elem.type == OBJECT_TYPE_STRING)
439 stack_top->u.ptr.ptr = *(const char * const *) stack_top->u.ptr.ptr;
440 stack_top->u.ptr.object_type = gid->elem.type;
441 stack_top->u.ptr.type = LOAD_OBJECT;
442 /* field is only used for types nested within variants. */
443 stack_top->u.ptr.field = NULL;
444 stack_top->u.ptr.rev_bo = gid->elem.rev_bo;
445 break;
446 }
447 return 0;
448
449 end:
450 return ret;
451 }
452
453 static int dynamic_load_field(struct estack_entry *stack_top)
454 {
455 int ret;
456
457 switch (stack_top->u.ptr.type) {
458 case LOAD_OBJECT:
459 break;
460 case LOAD_ROOT_CONTEXT:
461 case LOAD_ROOT_APP_CONTEXT:
462 case LOAD_ROOT_PAYLOAD:
463 default:
464 dbg_printk("Filter warning: cannot load root, missing field name.\n");
465 ret = -EINVAL;
466 goto end;
467 }
468 switch (stack_top->u.ptr.object_type) {
469 case OBJECT_TYPE_S8:
470 dbg_printk("op load field s8\n");
471 stack_top->u.v = *(int8_t *) stack_top->u.ptr.ptr;
472 break;
473 case OBJECT_TYPE_S16:
474 {
475 int16_t tmp;
476
477 dbg_printk("op load field s16\n");
478 tmp = *(int16_t *) stack_top->u.ptr.ptr;
479 if (stack_top->u.ptr.rev_bo)
480 __swab16s(&tmp);
481 stack_top->u.v = tmp;
482 break;
483 }
484 case OBJECT_TYPE_S32:
485 {
486 int32_t tmp;
487
488 dbg_printk("op load field s32\n");
489 tmp = *(int32_t *) stack_top->u.ptr.ptr;
490 if (stack_top->u.ptr.rev_bo)
491 __swab32s(&tmp);
492 stack_top->u.v = tmp;
493 break;
494 }
495 case OBJECT_TYPE_S64:
496 {
497 int64_t tmp;
498
499 dbg_printk("op load field s64\n");
500 tmp = *(int64_t *) stack_top->u.ptr.ptr;
501 if (stack_top->u.ptr.rev_bo)
502 __swab64s(&tmp);
503 stack_top->u.v = tmp;
504 break;
505 }
506 case OBJECT_TYPE_U8:
507 dbg_printk("op load field u8\n");
508 stack_top->u.v = *(uint8_t *) stack_top->u.ptr.ptr;
509 break;
510 case OBJECT_TYPE_U16:
511 {
512 uint16_t tmp;
513
514 dbg_printk("op load field s16\n");
515 tmp = *(uint16_t *) stack_top->u.ptr.ptr;
516 if (stack_top->u.ptr.rev_bo)
517 __swab16s(&tmp);
518 stack_top->u.v = tmp;
519 break;
520 }
521 case OBJECT_TYPE_U32:
522 {
523 uint32_t tmp;
524
525 dbg_printk("op load field u32\n");
526 tmp = *(uint32_t *) stack_top->u.ptr.ptr;
527 if (stack_top->u.ptr.rev_bo)
528 __swab32s(&tmp);
529 stack_top->u.v = tmp;
530 break;
531 }
532 case OBJECT_TYPE_U64:
533 {
534 uint64_t tmp;
535
536 dbg_printk("op load field u64\n");
537 tmp = *(uint64_t *) stack_top->u.ptr.ptr;
538 if (stack_top->u.ptr.rev_bo)
539 __swab64s(&tmp);
540 stack_top->u.v = tmp;
541 break;
542 }
543 case OBJECT_TYPE_STRING:
544 {
545 const char *str;
546
547 dbg_printk("op load field string\n");
548 str = (const char *) stack_top->u.ptr.ptr;
549 stack_top->u.s.str = str;
550 if (unlikely(!stack_top->u.s.str)) {
551 dbg_printk("Filter warning: loading a NULL string.\n");
552 ret = -EINVAL;
553 goto end;
554 }
555 stack_top->u.s.seq_len = LTTNG_SIZE_MAX;
556 stack_top->u.s.literal_type =
557 ESTACK_STRING_LITERAL_TYPE_NONE;
558 break;
559 }
560 case OBJECT_TYPE_STRING_SEQUENCE:
561 {
562 const char *ptr;
563
564 dbg_printk("op load field string sequence\n");
565 ptr = stack_top->u.ptr.ptr;
566 stack_top->u.s.seq_len = *(unsigned long *) ptr;
567 stack_top->u.s.str = *(const char **) (ptr + sizeof(unsigned long));
568 if (unlikely(!stack_top->u.s.str)) {
569 dbg_printk("Filter warning: loading a NULL sequence.\n");
570 ret = -EINVAL;
571 goto end;
572 }
573 stack_top->u.s.literal_type =
574 ESTACK_STRING_LITERAL_TYPE_NONE;
575 break;
576 }
577 case OBJECT_TYPE_DYNAMIC:
578 /*
579 * Dynamic types in context are looked up
580 * by context get index.
581 */
582 ret = -EINVAL;
583 goto end;
584 case OBJECT_TYPE_DOUBLE:
585 ret = -EINVAL;
586 goto end;
587 case OBJECT_TYPE_SEQUENCE:
588 case OBJECT_TYPE_ARRAY:
589 case OBJECT_TYPE_STRUCT:
590 case OBJECT_TYPE_VARIANT:
591 printk(KERN_WARNING "Sequences, arrays, struct and variant cannot be loaded (nested types).\n");
592 ret = -EINVAL;
593 goto end;
594 }
595 return 0;
596
597 end:
598 return ret;
599 }
600
601 #ifdef DEBUG
602
603 #define DBG_USER_STR_CUTOFF 32
604
605 /*
606 * In debug mode, print user string (truncated, if necessary).
607 */
608 static inline
609 void dbg_load_ref_user_str_printk(const struct estack_entry *user_str_reg)
610 {
611 size_t pos = 0;
612 char last_char;
613 char user_str[DBG_USER_STR_CUTOFF];
614
615 pagefault_disable();
616 do {
617 last_char = get_char(user_str_reg, pos);
618 user_str[pos] = last_char;
619 pos++;
620 } while (last_char != '\0' && pos < sizeof(user_str));
621 pagefault_enable();
622
623 user_str[sizeof(user_str) - 1] = '\0';
624 dbg_printk("load field ref user string: '%s%s'\n", user_str,
625 last_char != '\0' ? "[...]" : "");
626 }
627 #else
628 static inline
629 void dbg_load_ref_user_str_printk(const struct estack_entry *user_str_reg)
630 {
631 }
632 #endif
633
634 /*
635 * Return 0 (discard), or raise the 0x1 flag (log event).
636 * Currently, other flags are kept for future extensions and have no
637 * effect.
638 */
639 uint64_t lttng_filter_interpret_bytecode(void *filter_data,
640 struct lttng_probe_ctx *lttng_probe_ctx,
641 const char *filter_stack_data)
642 {
643 struct bytecode_runtime *bytecode = filter_data;
644 void *pc, *next_pc, *start_pc;
645 int ret = -EINVAL;
646 uint64_t retval = 0;
647 struct estack _stack;
648 struct estack *stack = &_stack;
649 register int64_t ax = 0, bx = 0;
650 register int top = FILTER_STACK_EMPTY;
651 #ifndef INTERPRETER_USE_SWITCH
652 static void *dispatch[NR_FILTER_OPS] = {
653 [ FILTER_OP_UNKNOWN ] = &&LABEL_FILTER_OP_UNKNOWN,
654
655 [ FILTER_OP_RETURN ] = &&LABEL_FILTER_OP_RETURN,
656
657 /* binary */
658 [ FILTER_OP_MUL ] = &&LABEL_FILTER_OP_MUL,
659 [ FILTER_OP_DIV ] = &&LABEL_FILTER_OP_DIV,
660 [ FILTER_OP_MOD ] = &&LABEL_FILTER_OP_MOD,
661 [ FILTER_OP_PLUS ] = &&LABEL_FILTER_OP_PLUS,
662 [ FILTER_OP_MINUS ] = &&LABEL_FILTER_OP_MINUS,
663 [ FILTER_OP_BIT_RSHIFT ] = &&LABEL_FILTER_OP_BIT_RSHIFT,
664 [ FILTER_OP_BIT_LSHIFT ] = &&LABEL_FILTER_OP_BIT_LSHIFT,
665 [ FILTER_OP_BIT_AND ] = &&LABEL_FILTER_OP_BIT_AND,
666 [ FILTER_OP_BIT_OR ] = &&LABEL_FILTER_OP_BIT_OR,
667 [ FILTER_OP_BIT_XOR ] = &&LABEL_FILTER_OP_BIT_XOR,
668
669 /* binary comparators */
670 [ FILTER_OP_EQ ] = &&LABEL_FILTER_OP_EQ,
671 [ FILTER_OP_NE ] = &&LABEL_FILTER_OP_NE,
672 [ FILTER_OP_GT ] = &&LABEL_FILTER_OP_GT,
673 [ FILTER_OP_LT ] = &&LABEL_FILTER_OP_LT,
674 [ FILTER_OP_GE ] = &&LABEL_FILTER_OP_GE,
675 [ FILTER_OP_LE ] = &&LABEL_FILTER_OP_LE,
676
677 /* string binary comparator */
678 [ FILTER_OP_EQ_STRING ] = &&LABEL_FILTER_OP_EQ_STRING,
679 [ FILTER_OP_NE_STRING ] = &&LABEL_FILTER_OP_NE_STRING,
680 [ FILTER_OP_GT_STRING ] = &&LABEL_FILTER_OP_GT_STRING,
681 [ FILTER_OP_LT_STRING ] = &&LABEL_FILTER_OP_LT_STRING,
682 [ FILTER_OP_GE_STRING ] = &&LABEL_FILTER_OP_GE_STRING,
683 [ FILTER_OP_LE_STRING ] = &&LABEL_FILTER_OP_LE_STRING,
684
685 /* globbing pattern binary comparator */
686 [ FILTER_OP_EQ_STAR_GLOB_STRING ] = &&LABEL_FILTER_OP_EQ_STAR_GLOB_STRING,
687 [ FILTER_OP_NE_STAR_GLOB_STRING ] = &&LABEL_FILTER_OP_NE_STAR_GLOB_STRING,
688
689 /* s64 binary comparator */
690 [ FILTER_OP_EQ_S64 ] = &&LABEL_FILTER_OP_EQ_S64,
691 [ FILTER_OP_NE_S64 ] = &&LABEL_FILTER_OP_NE_S64,
692 [ FILTER_OP_GT_S64 ] = &&LABEL_FILTER_OP_GT_S64,
693 [ FILTER_OP_LT_S64 ] = &&LABEL_FILTER_OP_LT_S64,
694 [ FILTER_OP_GE_S64 ] = &&LABEL_FILTER_OP_GE_S64,
695 [ FILTER_OP_LE_S64 ] = &&LABEL_FILTER_OP_LE_S64,
696
697 /* double binary comparator */
698 [ FILTER_OP_EQ_DOUBLE ] = &&LABEL_FILTER_OP_EQ_DOUBLE,
699 [ FILTER_OP_NE_DOUBLE ] = &&LABEL_FILTER_OP_NE_DOUBLE,
700 [ FILTER_OP_GT_DOUBLE ] = &&LABEL_FILTER_OP_GT_DOUBLE,
701 [ FILTER_OP_LT_DOUBLE ] = &&LABEL_FILTER_OP_LT_DOUBLE,
702 [ FILTER_OP_GE_DOUBLE ] = &&LABEL_FILTER_OP_GE_DOUBLE,
703 [ FILTER_OP_LE_DOUBLE ] = &&LABEL_FILTER_OP_LE_DOUBLE,
704
705 /* Mixed S64-double binary comparators */
706 [ FILTER_OP_EQ_DOUBLE_S64 ] = &&LABEL_FILTER_OP_EQ_DOUBLE_S64,
707 [ FILTER_OP_NE_DOUBLE_S64 ] = &&LABEL_FILTER_OP_NE_DOUBLE_S64,
708 [ FILTER_OP_GT_DOUBLE_S64 ] = &&LABEL_FILTER_OP_GT_DOUBLE_S64,
709 [ FILTER_OP_LT_DOUBLE_S64 ] = &&LABEL_FILTER_OP_LT_DOUBLE_S64,
710 [ FILTER_OP_GE_DOUBLE_S64 ] = &&LABEL_FILTER_OP_GE_DOUBLE_S64,
711 [ FILTER_OP_LE_DOUBLE_S64 ] = &&LABEL_FILTER_OP_LE_DOUBLE_S64,
712
713 [ FILTER_OP_EQ_S64_DOUBLE ] = &&LABEL_FILTER_OP_EQ_S64_DOUBLE,
714 [ FILTER_OP_NE_S64_DOUBLE ] = &&LABEL_FILTER_OP_NE_S64_DOUBLE,
715 [ FILTER_OP_GT_S64_DOUBLE ] = &&LABEL_FILTER_OP_GT_S64_DOUBLE,
716 [ FILTER_OP_LT_S64_DOUBLE ] = &&LABEL_FILTER_OP_LT_S64_DOUBLE,
717 [ FILTER_OP_GE_S64_DOUBLE ] = &&LABEL_FILTER_OP_GE_S64_DOUBLE,
718 [ FILTER_OP_LE_S64_DOUBLE ] = &&LABEL_FILTER_OP_LE_S64_DOUBLE,
719
720 /* unary */
721 [ FILTER_OP_UNARY_PLUS ] = &&LABEL_FILTER_OP_UNARY_PLUS,
722 [ FILTER_OP_UNARY_MINUS ] = &&LABEL_FILTER_OP_UNARY_MINUS,
723 [ FILTER_OP_UNARY_NOT ] = &&LABEL_FILTER_OP_UNARY_NOT,
724 [ FILTER_OP_UNARY_PLUS_S64 ] = &&LABEL_FILTER_OP_UNARY_PLUS_S64,
725 [ FILTER_OP_UNARY_MINUS_S64 ] = &&LABEL_FILTER_OP_UNARY_MINUS_S64,
726 [ FILTER_OP_UNARY_NOT_S64 ] = &&LABEL_FILTER_OP_UNARY_NOT_S64,
727 [ FILTER_OP_UNARY_PLUS_DOUBLE ] = &&LABEL_FILTER_OP_UNARY_PLUS_DOUBLE,
728 [ FILTER_OP_UNARY_MINUS_DOUBLE ] = &&LABEL_FILTER_OP_UNARY_MINUS_DOUBLE,
729 [ FILTER_OP_UNARY_NOT_DOUBLE ] = &&LABEL_FILTER_OP_UNARY_NOT_DOUBLE,
730
731 /* logical */
732 [ FILTER_OP_AND ] = &&LABEL_FILTER_OP_AND,
733 [ FILTER_OP_OR ] = &&LABEL_FILTER_OP_OR,
734
735 /* load field ref */
736 [ FILTER_OP_LOAD_FIELD_REF ] = &&LABEL_FILTER_OP_LOAD_FIELD_REF,
737 [ FILTER_OP_LOAD_FIELD_REF_STRING ] = &&LABEL_FILTER_OP_LOAD_FIELD_REF_STRING,
738 [ FILTER_OP_LOAD_FIELD_REF_SEQUENCE ] = &&LABEL_FILTER_OP_LOAD_FIELD_REF_SEQUENCE,
739 [ FILTER_OP_LOAD_FIELD_REF_S64 ] = &&LABEL_FILTER_OP_LOAD_FIELD_REF_S64,
740 [ FILTER_OP_LOAD_FIELD_REF_DOUBLE ] = &&LABEL_FILTER_OP_LOAD_FIELD_REF_DOUBLE,
741
742 /* load from immediate operand */
743 [ FILTER_OP_LOAD_STRING ] = &&LABEL_FILTER_OP_LOAD_STRING,
744 [ FILTER_OP_LOAD_STAR_GLOB_STRING ] = &&LABEL_FILTER_OP_LOAD_STAR_GLOB_STRING,
745 [ FILTER_OP_LOAD_S64 ] = &&LABEL_FILTER_OP_LOAD_S64,
746 [ FILTER_OP_LOAD_DOUBLE ] = &&LABEL_FILTER_OP_LOAD_DOUBLE,
747
748 /* cast */
749 [ FILTER_OP_CAST_TO_S64 ] = &&LABEL_FILTER_OP_CAST_TO_S64,
750 [ FILTER_OP_CAST_DOUBLE_TO_S64 ] = &&LABEL_FILTER_OP_CAST_DOUBLE_TO_S64,
751 [ FILTER_OP_CAST_NOP ] = &&LABEL_FILTER_OP_CAST_NOP,
752
753 /* get context ref */
754 [ FILTER_OP_GET_CONTEXT_REF ] = &&LABEL_FILTER_OP_GET_CONTEXT_REF,
755 [ FILTER_OP_GET_CONTEXT_REF_STRING ] = &&LABEL_FILTER_OP_GET_CONTEXT_REF_STRING,
756 [ FILTER_OP_GET_CONTEXT_REF_S64 ] = &&LABEL_FILTER_OP_GET_CONTEXT_REF_S64,
757 [ FILTER_OP_GET_CONTEXT_REF_DOUBLE ] = &&LABEL_FILTER_OP_GET_CONTEXT_REF_DOUBLE,
758
759 /* load userspace field ref */
760 [ FILTER_OP_LOAD_FIELD_REF_USER_STRING ] = &&LABEL_FILTER_OP_LOAD_FIELD_REF_USER_STRING,
761 [ FILTER_OP_LOAD_FIELD_REF_USER_SEQUENCE ] = &&LABEL_FILTER_OP_LOAD_FIELD_REF_USER_SEQUENCE,
762
763 /* Instructions for recursive traversal through composed types. */
764 [ FILTER_OP_GET_CONTEXT_ROOT ] = &&LABEL_FILTER_OP_GET_CONTEXT_ROOT,
765 [ FILTER_OP_GET_APP_CONTEXT_ROOT ] = &&LABEL_FILTER_OP_GET_APP_CONTEXT_ROOT,
766 [ FILTER_OP_GET_PAYLOAD_ROOT ] = &&LABEL_FILTER_OP_GET_PAYLOAD_ROOT,
767
768 [ FILTER_OP_GET_SYMBOL ] = &&LABEL_FILTER_OP_GET_SYMBOL,
769 [ FILTER_OP_GET_SYMBOL_FIELD ] = &&LABEL_FILTER_OP_GET_SYMBOL_FIELD,
770 [ FILTER_OP_GET_INDEX_U16 ] = &&LABEL_FILTER_OP_GET_INDEX_U16,
771 [ FILTER_OP_GET_INDEX_U64 ] = &&LABEL_FILTER_OP_GET_INDEX_U64,
772
773 [ FILTER_OP_LOAD_FIELD ] = &&LABEL_FILTER_OP_LOAD_FIELD,
774 [ FILTER_OP_LOAD_FIELD_S8 ] = &&LABEL_FILTER_OP_LOAD_FIELD_S8,
775 [ FILTER_OP_LOAD_FIELD_S16 ] = &&LABEL_FILTER_OP_LOAD_FIELD_S16,
776 [ FILTER_OP_LOAD_FIELD_S32 ] = &&LABEL_FILTER_OP_LOAD_FIELD_S32,
777 [ FILTER_OP_LOAD_FIELD_S64 ] = &&LABEL_FILTER_OP_LOAD_FIELD_S64,
778 [ FILTER_OP_LOAD_FIELD_U8 ] = &&LABEL_FILTER_OP_LOAD_FIELD_U8,
779 [ FILTER_OP_LOAD_FIELD_U16 ] = &&LABEL_FILTER_OP_LOAD_FIELD_U16,
780 [ FILTER_OP_LOAD_FIELD_U32 ] = &&LABEL_FILTER_OP_LOAD_FIELD_U32,
781 [ FILTER_OP_LOAD_FIELD_U64 ] = &&LABEL_FILTER_OP_LOAD_FIELD_U64,
782 [ FILTER_OP_LOAD_FIELD_STRING ] = &&LABEL_FILTER_OP_LOAD_FIELD_STRING,
783 [ FILTER_OP_LOAD_FIELD_SEQUENCE ] = &&LABEL_FILTER_OP_LOAD_FIELD_SEQUENCE,
784 [ FILTER_OP_LOAD_FIELD_DOUBLE ] = &&LABEL_FILTER_OP_LOAD_FIELD_DOUBLE,
785
786 [ FILTER_OP_UNARY_BIT_NOT ] = &&LABEL_FILTER_OP_UNARY_BIT_NOT,
787
788 [ FILTER_OP_RETURN_S64 ] = &&LABEL_FILTER_OP_RETURN_S64,
789 };
790 #endif /* #ifndef INTERPRETER_USE_SWITCH */
791
792 START_OP
793
794 OP(FILTER_OP_UNKNOWN):
795 OP(FILTER_OP_LOAD_FIELD_REF):
796 OP(FILTER_OP_GET_CONTEXT_REF):
797 #ifdef INTERPRETER_USE_SWITCH
798 default:
799 #endif /* INTERPRETER_USE_SWITCH */
800 printk(KERN_WARNING "unknown bytecode op %u\n",
801 (unsigned int) *(filter_opcode_t *) pc);
802 ret = -EINVAL;
803 goto end;
804
805 OP(FILTER_OP_RETURN):
806 OP(FILTER_OP_RETURN_S64):
807 /* LTTNG_FILTER_DISCARD or LTTNG_FILTER_RECORD_FLAG */
808 retval = !!estack_ax_v;
809 ret = 0;
810 goto end;
811
812 /* binary */
813 OP(FILTER_OP_MUL):
814 OP(FILTER_OP_DIV):
815 OP(FILTER_OP_MOD):
816 OP(FILTER_OP_PLUS):
817 OP(FILTER_OP_MINUS):
818 printk(KERN_WARNING "unsupported bytecode op %u\n",
819 (unsigned int) *(filter_opcode_t *) pc);
820 ret = -EINVAL;
821 goto end;
822
823 OP(FILTER_OP_EQ):
824 OP(FILTER_OP_NE):
825 OP(FILTER_OP_GT):
826 OP(FILTER_OP_LT):
827 OP(FILTER_OP_GE):
828 OP(FILTER_OP_LE):
829 printk(KERN_WARNING "unsupported non-specialized bytecode op %u\n",
830 (unsigned int) *(filter_opcode_t *) pc);
831 ret = -EINVAL;
832 goto end;
833
834 OP(FILTER_OP_EQ_STRING):
835 {
836 int res;
837
838 res = (stack_strcmp(stack, top, "==") == 0);
839 estack_pop(stack, top, ax, bx);
840 estack_ax_v = res;
841 next_pc += sizeof(struct binary_op);
842 PO;
843 }
844 OP(FILTER_OP_NE_STRING):
845 {
846 int res;
847
848 res = (stack_strcmp(stack, top, "!=") != 0);
849 estack_pop(stack, top, ax, bx);
850 estack_ax_v = res;
851 next_pc += sizeof(struct binary_op);
852 PO;
853 }
854 OP(FILTER_OP_GT_STRING):
855 {
856 int res;
857
858 res = (stack_strcmp(stack, top, ">") > 0);
859 estack_pop(stack, top, ax, bx);
860 estack_ax_v = res;
861 next_pc += sizeof(struct binary_op);
862 PO;
863 }
864 OP(FILTER_OP_LT_STRING):
865 {
866 int res;
867
868 res = (stack_strcmp(stack, top, "<") < 0);
869 estack_pop(stack, top, ax, bx);
870 estack_ax_v = res;
871 next_pc += sizeof(struct binary_op);
872 PO;
873 }
874 OP(FILTER_OP_GE_STRING):
875 {
876 int res;
877
878 res = (stack_strcmp(stack, top, ">=") >= 0);
879 estack_pop(stack, top, ax, bx);
880 estack_ax_v = res;
881 next_pc += sizeof(struct binary_op);
882 PO;
883 }
884 OP(FILTER_OP_LE_STRING):
885 {
886 int res;
887
888 res = (stack_strcmp(stack, top, "<=") <= 0);
889 estack_pop(stack, top, ax, bx);
890 estack_ax_v = res;
891 next_pc += sizeof(struct binary_op);
892 PO;
893 }
894
895 OP(FILTER_OP_EQ_STAR_GLOB_STRING):
896 {
897 int res;
898
899 res = (stack_star_glob_match(stack, top, "==") == 0);
900 estack_pop(stack, top, ax, bx);
901 estack_ax_v = res;
902 next_pc += sizeof(struct binary_op);
903 PO;
904 }
905 OP(FILTER_OP_NE_STAR_GLOB_STRING):
906 {
907 int res;
908
909 res = (stack_star_glob_match(stack, top, "!=") != 0);
910 estack_pop(stack, top, ax, bx);
911 estack_ax_v = res;
912 next_pc += sizeof(struct binary_op);
913 PO;
914 }
915
916 OP(FILTER_OP_EQ_S64):
917 {
918 int res;
919
920 res = (estack_bx_v == estack_ax_v);
921 estack_pop(stack, top, ax, bx);
922 estack_ax_v = res;
923 next_pc += sizeof(struct binary_op);
924 PO;
925 }
926 OP(FILTER_OP_NE_S64):
927 {
928 int res;
929
930 res = (estack_bx_v != estack_ax_v);
931 estack_pop(stack, top, ax, bx);
932 estack_ax_v = res;
933 next_pc += sizeof(struct binary_op);
934 PO;
935 }
936 OP(FILTER_OP_GT_S64):
937 {
938 int res;
939
940 res = (estack_bx_v > estack_ax_v);
941 estack_pop(stack, top, ax, bx);
942 estack_ax_v = res;
943 next_pc += sizeof(struct binary_op);
944 PO;
945 }
946 OP(FILTER_OP_LT_S64):
947 {
948 int res;
949
950 res = (estack_bx_v < estack_ax_v);
951 estack_pop(stack, top, ax, bx);
952 estack_ax_v = res;
953 next_pc += sizeof(struct binary_op);
954 PO;
955 }
956 OP(FILTER_OP_GE_S64):
957 {
958 int res;
959
960 res = (estack_bx_v >= estack_ax_v);
961 estack_pop(stack, top, ax, bx);
962 estack_ax_v = res;
963 next_pc += sizeof(struct binary_op);
964 PO;
965 }
966 OP(FILTER_OP_LE_S64):
967 {
968 int res;
969
970 res = (estack_bx_v <= estack_ax_v);
971 estack_pop(stack, top, ax, bx);
972 estack_ax_v = res;
973 next_pc += sizeof(struct binary_op);
974 PO;
975 }
976
977 OP(FILTER_OP_EQ_DOUBLE):
978 OP(FILTER_OP_NE_DOUBLE):
979 OP(FILTER_OP_GT_DOUBLE):
980 OP(FILTER_OP_LT_DOUBLE):
981 OP(FILTER_OP_GE_DOUBLE):
982 OP(FILTER_OP_LE_DOUBLE):
983 {
984 BUG_ON(1);
985 PO;
986 }
987
988 /* Mixed S64-double binary comparators */
989 OP(FILTER_OP_EQ_DOUBLE_S64):
990 OP(FILTER_OP_NE_DOUBLE_S64):
991 OP(FILTER_OP_GT_DOUBLE_S64):
992 OP(FILTER_OP_LT_DOUBLE_S64):
993 OP(FILTER_OP_GE_DOUBLE_S64):
994 OP(FILTER_OP_LE_DOUBLE_S64):
995 OP(FILTER_OP_EQ_S64_DOUBLE):
996 OP(FILTER_OP_NE_S64_DOUBLE):
997 OP(FILTER_OP_GT_S64_DOUBLE):
998 OP(FILTER_OP_LT_S64_DOUBLE):
999 OP(FILTER_OP_GE_S64_DOUBLE):
1000 OP(FILTER_OP_LE_S64_DOUBLE):
1001 {
1002 BUG_ON(1);
1003 PO;
1004 }
1005 OP(FILTER_OP_BIT_RSHIFT):
1006 {
1007 int64_t res;
1008
1009 /* Catch undefined behavior. */
1010 if (unlikely(estack_ax_v < 0 || estack_ax_v >= 64)) {
1011 ret = -EINVAL;
1012 goto end;
1013 }
1014 res = ((uint64_t) estack_bx_v >> (uint32_t) estack_ax_v);
1015 estack_pop(stack, top, ax, bx);
1016 estack_ax_v = res;
1017 next_pc += sizeof(struct binary_op);
1018 PO;
1019 }
1020 OP(FILTER_OP_BIT_LSHIFT):
1021 {
1022 int64_t res;
1023
1024 /* Catch undefined behavior. */
1025 if (unlikely(estack_ax_v < 0 || estack_ax_v >= 64)) {
1026 ret = -EINVAL;
1027 goto end;
1028 }
1029 res = ((uint64_t) estack_bx_v << (uint32_t) estack_ax_v);
1030 estack_pop(stack, top, ax, bx);
1031 estack_ax_v = res;
1032 next_pc += sizeof(struct binary_op);
1033 PO;
1034 }
1035 OP(FILTER_OP_BIT_AND):
1036 {
1037 int64_t res;
1038
1039 res = ((uint64_t) estack_bx_v & (uint64_t) estack_ax_v);
1040 estack_pop(stack, top, ax, bx);
1041 estack_ax_v = res;
1042 next_pc += sizeof(struct binary_op);
1043 PO;
1044 }
1045 OP(FILTER_OP_BIT_OR):
1046 {
1047 int64_t res;
1048
1049 res = ((uint64_t) estack_bx_v | (uint64_t) estack_ax_v);
1050 estack_pop(stack, top, ax, bx);
1051 estack_ax_v = res;
1052 next_pc += sizeof(struct binary_op);
1053 PO;
1054 }
1055 OP(FILTER_OP_BIT_XOR):
1056 {
1057 int64_t res;
1058
1059 res = ((uint64_t) estack_bx_v ^ (uint64_t) estack_ax_v);
1060 estack_pop(stack, top, ax, bx);
1061 estack_ax_v = res;
1062 next_pc += sizeof(struct binary_op);
1063 PO;
1064 }
1065
1066 /* unary */
1067 OP(FILTER_OP_UNARY_PLUS):
1068 OP(FILTER_OP_UNARY_MINUS):
1069 OP(FILTER_OP_UNARY_NOT):
1070 printk(KERN_WARNING "unsupported non-specialized bytecode op %u\n",
1071 (unsigned int) *(filter_opcode_t *) pc);
1072 ret = -EINVAL;
1073 goto end;
1074
1075
1076 OP(FILTER_OP_UNARY_BIT_NOT):
1077 {
1078 estack_ax_v = ~(uint64_t) estack_ax_v;
1079 next_pc += sizeof(struct unary_op);
1080 PO;
1081 }
1082
1083 OP(FILTER_OP_UNARY_PLUS_S64):
1084 {
1085 next_pc += sizeof(struct unary_op);
1086 PO;
1087 }
1088 OP(FILTER_OP_UNARY_MINUS_S64):
1089 {
1090 estack_ax_v = -estack_ax_v;
1091 next_pc += sizeof(struct unary_op);
1092 PO;
1093 }
1094 OP(FILTER_OP_UNARY_PLUS_DOUBLE):
1095 OP(FILTER_OP_UNARY_MINUS_DOUBLE):
1096 {
1097 BUG_ON(1);
1098 PO;
1099 }
1100 OP(FILTER_OP_UNARY_NOT_S64):
1101 {
1102 estack_ax_v = !estack_ax_v;
1103 next_pc += sizeof(struct unary_op);
1104 PO;
1105 }
1106 OP(FILTER_OP_UNARY_NOT_DOUBLE):
1107 {
1108 BUG_ON(1);
1109 PO;
1110 }
1111
1112 /* logical */
1113 OP(FILTER_OP_AND):
1114 {
1115 struct logical_op *insn = (struct logical_op *) pc;
1116
1117 /* If AX is 0, skip and evaluate to 0 */
1118 if (unlikely(estack_ax_v == 0)) {
1119 dbg_printk("Jumping to bytecode offset %u\n",
1120 (unsigned int) insn->skip_offset);
1121 next_pc = start_pc + insn->skip_offset;
1122 } else {
1123 /* Pop 1 when jump not taken */
1124 estack_pop(stack, top, ax, bx);
1125 next_pc += sizeof(struct logical_op);
1126 }
1127 PO;
1128 }
1129 OP(FILTER_OP_OR):
1130 {
1131 struct logical_op *insn = (struct logical_op *) pc;
1132
1133 /* If AX is nonzero, skip and evaluate to 1 */
1134
1135 if (unlikely(estack_ax_v != 0)) {
1136 estack_ax_v = 1;
1137 dbg_printk("Jumping to bytecode offset %u\n",
1138 (unsigned int) insn->skip_offset);
1139 next_pc = start_pc + insn->skip_offset;
1140 } else {
1141 /* Pop 1 when jump not taken */
1142 estack_pop(stack, top, ax, bx);
1143 next_pc += sizeof(struct logical_op);
1144 }
1145 PO;
1146 }
1147
1148
1149 /* load field ref */
1150 OP(FILTER_OP_LOAD_FIELD_REF_STRING):
1151 {
1152 struct load_op *insn = (struct load_op *) pc;
1153 struct field_ref *ref = (struct field_ref *) insn->data;
1154
1155 dbg_printk("load field ref offset %u type string\n",
1156 ref->offset);
1157 estack_push(stack, top, ax, bx);
1158 estack_ax(stack, top)->u.s.str =
1159 *(const char * const *) &filter_stack_data[ref->offset];
1160 if (unlikely(!estack_ax(stack, top)->u.s.str)) {
1161 dbg_printk("Filter warning: loading a NULL string.\n");
1162 ret = -EINVAL;
1163 goto end;
1164 }
1165 estack_ax(stack, top)->u.s.seq_len = LTTNG_SIZE_MAX;
1166 estack_ax(stack, top)->u.s.literal_type =
1167 ESTACK_STRING_LITERAL_TYPE_NONE;
1168 estack_ax(stack, top)->u.s.user = 0;
1169 dbg_printk("ref load string %s\n", estack_ax(stack, top)->u.s.str);
1170 next_pc += sizeof(struct load_op) + sizeof(struct field_ref);
1171 PO;
1172 }
1173
1174 OP(FILTER_OP_LOAD_FIELD_REF_SEQUENCE):
1175 {
1176 struct load_op *insn = (struct load_op *) pc;
1177 struct field_ref *ref = (struct field_ref *) insn->data;
1178
1179 dbg_printk("load field ref offset %u type sequence\n",
1180 ref->offset);
1181 estack_push(stack, top, ax, bx);
1182 estack_ax(stack, top)->u.s.seq_len =
1183 *(unsigned long *) &filter_stack_data[ref->offset];
1184 estack_ax(stack, top)->u.s.str =
1185 *(const char **) (&filter_stack_data[ref->offset
1186 + sizeof(unsigned long)]);
1187 if (unlikely(!estack_ax(stack, top)->u.s.str)) {
1188 dbg_printk("Filter warning: loading a NULL sequence.\n");
1189 ret = -EINVAL;
1190 goto end;
1191 }
1192 estack_ax(stack, top)->u.s.literal_type =
1193 ESTACK_STRING_LITERAL_TYPE_NONE;
1194 estack_ax(stack, top)->u.s.user = 0;
1195 next_pc += sizeof(struct load_op) + sizeof(struct field_ref);
1196 PO;
1197 }
1198
1199 OP(FILTER_OP_LOAD_FIELD_REF_S64):
1200 {
1201 struct load_op *insn = (struct load_op *) pc;
1202 struct field_ref *ref = (struct field_ref *) insn->data;
1203
1204 dbg_printk("load field ref offset %u type s64\n",
1205 ref->offset);
1206 estack_push(stack, top, ax, bx);
1207 estack_ax_v =
1208 ((struct literal_numeric *) &filter_stack_data[ref->offset])->v;
1209 dbg_printk("ref load s64 %lld\n",
1210 (long long) estack_ax_v);
1211 next_pc += sizeof(struct load_op) + sizeof(struct field_ref);
1212 PO;
1213 }
1214
1215 OP(FILTER_OP_LOAD_FIELD_REF_DOUBLE):
1216 {
1217 BUG_ON(1);
1218 PO;
1219 }
1220
1221 /* load from immediate operand */
1222 OP(FILTER_OP_LOAD_STRING):
1223 {
1224 struct load_op *insn = (struct load_op *) pc;
1225
1226 dbg_printk("load string %s\n", insn->data);
1227 estack_push(stack, top, ax, bx);
1228 estack_ax(stack, top)->u.s.str = insn->data;
1229 estack_ax(stack, top)->u.s.seq_len = LTTNG_SIZE_MAX;
1230 estack_ax(stack, top)->u.s.literal_type =
1231 ESTACK_STRING_LITERAL_TYPE_PLAIN;
1232 estack_ax(stack, top)->u.s.user = 0;
1233 next_pc += sizeof(struct load_op) + strlen(insn->data) + 1;
1234 PO;
1235 }
1236
1237 OP(FILTER_OP_LOAD_STAR_GLOB_STRING):
1238 {
1239 struct load_op *insn = (struct load_op *) pc;
1240
1241 dbg_printk("load globbing pattern %s\n", insn->data);
1242 estack_push(stack, top, ax, bx);
1243 estack_ax(stack, top)->u.s.str = insn->data;
1244 estack_ax(stack, top)->u.s.seq_len = LTTNG_SIZE_MAX;
1245 estack_ax(stack, top)->u.s.literal_type =
1246 ESTACK_STRING_LITERAL_TYPE_STAR_GLOB;
1247 estack_ax(stack, top)->u.s.user = 0;
1248 next_pc += sizeof(struct load_op) + strlen(insn->data) + 1;
1249 PO;
1250 }
1251
1252 OP(FILTER_OP_LOAD_S64):
1253 {
1254 struct load_op *insn = (struct load_op *) pc;
1255
1256 estack_push(stack, top, ax, bx);
1257 estack_ax_v = ((struct literal_numeric *) insn->data)->v;
1258 dbg_printk("load s64 %lld\n",
1259 (long long) estack_ax_v);
1260 next_pc += sizeof(struct load_op)
1261 + sizeof(struct literal_numeric);
1262 PO;
1263 }
1264
1265 OP(FILTER_OP_LOAD_DOUBLE):
1266 {
1267 BUG_ON(1);
1268 PO;
1269 }
1270
1271 /* cast */
1272 OP(FILTER_OP_CAST_TO_S64):
1273 printk(KERN_WARNING "unsupported non-specialized bytecode op %u\n",
1274 (unsigned int) *(filter_opcode_t *) pc);
1275 ret = -EINVAL;
1276 goto end;
1277
1278 OP(FILTER_OP_CAST_DOUBLE_TO_S64):
1279 {
1280 BUG_ON(1);
1281 PO;
1282 }
1283
1284 OP(FILTER_OP_CAST_NOP):
1285 {
1286 next_pc += sizeof(struct cast_op);
1287 PO;
1288 }
1289
1290 /* get context ref */
1291 OP(FILTER_OP_GET_CONTEXT_REF_STRING):
1292 {
1293 struct load_op *insn = (struct load_op *) pc;
1294 struct field_ref *ref = (struct field_ref *) insn->data;
1295 struct lttng_ctx_field *ctx_field;
1296 union lttng_ctx_value v;
1297
1298 dbg_printk("get context ref offset %u type string\n",
1299 ref->offset);
1300 ctx_field = &lttng_static_ctx->fields[ref->offset];
1301 ctx_field->get_value(ctx_field, lttng_probe_ctx, &v);
1302 estack_push(stack, top, ax, bx);
1303 estack_ax(stack, top)->u.s.str = v.str;
1304 if (unlikely(!estack_ax(stack, top)->u.s.str)) {
1305 dbg_printk("Filter warning: loading a NULL string.\n");
1306 ret = -EINVAL;
1307 goto end;
1308 }
1309 estack_ax(stack, top)->u.s.seq_len = LTTNG_SIZE_MAX;
1310 estack_ax(stack, top)->u.s.literal_type =
1311 ESTACK_STRING_LITERAL_TYPE_NONE;
1312 estack_ax(stack, top)->u.s.user = 0;
1313 dbg_printk("ref get context string %s\n", estack_ax(stack, top)->u.s.str);
1314 next_pc += sizeof(struct load_op) + sizeof(struct field_ref);
1315 PO;
1316 }
1317
1318 OP(FILTER_OP_GET_CONTEXT_REF_S64):
1319 {
1320 struct load_op *insn = (struct load_op *) pc;
1321 struct field_ref *ref = (struct field_ref *) insn->data;
1322 struct lttng_ctx_field *ctx_field;
1323 union lttng_ctx_value v;
1324
1325 dbg_printk("get context ref offset %u type s64\n",
1326 ref->offset);
1327 ctx_field = &lttng_static_ctx->fields[ref->offset];
1328 ctx_field->get_value(ctx_field, lttng_probe_ctx, &v);
1329 estack_push(stack, top, ax, bx);
1330 estack_ax_v = v.s64;
1331 dbg_printk("ref get context s64 %lld\n",
1332 (long long) estack_ax_v);
1333 next_pc += sizeof(struct load_op) + sizeof(struct field_ref);
1334 PO;
1335 }
1336
1337 OP(FILTER_OP_GET_CONTEXT_REF_DOUBLE):
1338 {
1339 BUG_ON(1);
1340 PO;
1341 }
1342
1343 /* load userspace field ref */
1344 OP(FILTER_OP_LOAD_FIELD_REF_USER_STRING):
1345 {
1346 struct load_op *insn = (struct load_op *) pc;
1347 struct field_ref *ref = (struct field_ref *) insn->data;
1348
1349 dbg_printk("load field ref offset %u type user string\n",
1350 ref->offset);
1351 estack_push(stack, top, ax, bx);
1352 estack_ax(stack, top)->u.s.user_str =
1353 *(const char * const *) &filter_stack_data[ref->offset];
1354 if (unlikely(!estack_ax(stack, top)->u.s.user_str)) {
1355 dbg_printk("Filter warning: loading a NULL string.\n");
1356 ret = -EINVAL;
1357 goto end;
1358 }
1359 estack_ax(stack, top)->u.s.seq_len = LTTNG_SIZE_MAX;
1360 estack_ax(stack, top)->u.s.literal_type =
1361 ESTACK_STRING_LITERAL_TYPE_NONE;
1362 estack_ax(stack, top)->u.s.user = 1;
1363 dbg_load_ref_user_str_printk(estack_ax(stack, top));
1364 next_pc += sizeof(struct load_op) + sizeof(struct field_ref);
1365 PO;
1366 }
1367
1368 OP(FILTER_OP_LOAD_FIELD_REF_USER_SEQUENCE):
1369 {
1370 struct load_op *insn = (struct load_op *) pc;
1371 struct field_ref *ref = (struct field_ref *) insn->data;
1372
1373 dbg_printk("load field ref offset %u type user sequence\n",
1374 ref->offset);
1375 estack_push(stack, top, ax, bx);
1376 estack_ax(stack, top)->u.s.seq_len =
1377 *(unsigned long *) &filter_stack_data[ref->offset];
1378 estack_ax(stack, top)->u.s.user_str =
1379 *(const char **) (&filter_stack_data[ref->offset
1380 + sizeof(unsigned long)]);
1381 if (unlikely(!estack_ax(stack, top)->u.s.user_str)) {
1382 dbg_printk("Filter warning: loading a NULL sequence.\n");
1383 ret = -EINVAL;
1384 goto end;
1385 }
1386 estack_ax(stack, top)->u.s.literal_type =
1387 ESTACK_STRING_LITERAL_TYPE_NONE;
1388 estack_ax(stack, top)->u.s.user = 1;
1389 next_pc += sizeof(struct load_op) + sizeof(struct field_ref);
1390 PO;
1391 }
1392
1393 OP(FILTER_OP_GET_CONTEXT_ROOT):
1394 {
1395 dbg_printk("op get context root\n");
1396 estack_push(stack, top, ax, bx);
1397 estack_ax(stack, top)->u.ptr.type = LOAD_ROOT_CONTEXT;
1398 /* "field" only needed for variants. */
1399 estack_ax(stack, top)->u.ptr.field = NULL;
1400 next_pc += sizeof(struct load_op);
1401 PO;
1402 }
1403
1404 OP(FILTER_OP_GET_APP_CONTEXT_ROOT):
1405 {
1406 BUG_ON(1);
1407 PO;
1408 }
1409
1410 OP(FILTER_OP_GET_PAYLOAD_ROOT):
1411 {
1412 dbg_printk("op get app payload root\n");
1413 estack_push(stack, top, ax, bx);
1414 estack_ax(stack, top)->u.ptr.type = LOAD_ROOT_PAYLOAD;
1415 estack_ax(stack, top)->u.ptr.ptr = filter_stack_data;
1416 /* "field" only needed for variants. */
1417 estack_ax(stack, top)->u.ptr.field = NULL;
1418 next_pc += sizeof(struct load_op);
1419 PO;
1420 }
1421
1422 OP(FILTER_OP_GET_SYMBOL):
1423 {
1424 dbg_printk("op get symbol\n");
1425 switch (estack_ax(stack, top)->u.ptr.type) {
1426 case LOAD_OBJECT:
1427 printk(KERN_WARNING "Nested fields not implemented yet.\n");
1428 ret = -EINVAL;
1429 goto end;
1430 case LOAD_ROOT_CONTEXT:
1431 case LOAD_ROOT_APP_CONTEXT:
1432 case LOAD_ROOT_PAYLOAD:
1433 /*
1434 * symbol lookup is performed by
1435 * specialization.
1436 */
1437 ret = -EINVAL;
1438 goto end;
1439 }
1440 next_pc += sizeof(struct load_op) + sizeof(struct get_symbol);
1441 PO;
1442 }
1443
1444 OP(FILTER_OP_GET_SYMBOL_FIELD):
1445 {
1446 /*
1447 * Used for first variant encountered in a
1448 * traversal. Variants are not implemented yet.
1449 */
1450 ret = -EINVAL;
1451 goto end;
1452 }
1453
1454 OP(FILTER_OP_GET_INDEX_U16):
1455 {
1456 struct load_op *insn = (struct load_op *) pc;
1457 struct get_index_u16 *index = (struct get_index_u16 *) insn->data;
1458
1459 dbg_printk("op get index u16\n");
1460 ret = dynamic_get_index(lttng_probe_ctx, bytecode, index->index, estack_ax(stack, top));
1461 if (ret)
1462 goto end;
1463 estack_ax_v = estack_ax(stack, top)->u.v;
1464 next_pc += sizeof(struct load_op) + sizeof(struct get_index_u16);
1465 PO;
1466 }
1467
1468 OP(FILTER_OP_GET_INDEX_U64):
1469 {
1470 struct load_op *insn = (struct load_op *) pc;
1471 struct get_index_u64 *index = (struct get_index_u64 *) insn->data;
1472
1473 dbg_printk("op get index u64\n");
1474 ret = dynamic_get_index(lttng_probe_ctx, bytecode, index->index, estack_ax(stack, top));
1475 if (ret)
1476 goto end;
1477 estack_ax_v = estack_ax(stack, top)->u.v;
1478 next_pc += sizeof(struct load_op) + sizeof(struct get_index_u64);
1479 PO;
1480 }
1481
1482 OP(FILTER_OP_LOAD_FIELD):
1483 {
1484 dbg_printk("op load field\n");
1485 ret = dynamic_load_field(estack_ax(stack, top));
1486 if (ret)
1487 goto end;
1488 estack_ax_v = estack_ax(stack, top)->u.v;
1489 next_pc += sizeof(struct load_op);
1490 PO;
1491 }
1492
1493 OP(FILTER_OP_LOAD_FIELD_S8):
1494 {
1495 dbg_printk("op load field s8\n");
1496
1497 estack_ax_v = *(int8_t *) estack_ax(stack, top)->u.ptr.ptr;
1498 next_pc += sizeof(struct load_op);
1499 PO;
1500 }
1501 OP(FILTER_OP_LOAD_FIELD_S16):
1502 {
1503 dbg_printk("op load field s16\n");
1504
1505 estack_ax_v = *(int16_t *) estack_ax(stack, top)->u.ptr.ptr;
1506 next_pc += sizeof(struct load_op);
1507 PO;
1508 }
1509 OP(FILTER_OP_LOAD_FIELD_S32):
1510 {
1511 dbg_printk("op load field s32\n");
1512
1513 estack_ax_v = *(int32_t *) estack_ax(stack, top)->u.ptr.ptr;
1514 next_pc += sizeof(struct load_op);
1515 PO;
1516 }
1517 OP(FILTER_OP_LOAD_FIELD_S64):
1518 {
1519 dbg_printk("op load field s64\n");
1520
1521 estack_ax_v = *(int64_t *) estack_ax(stack, top)->u.ptr.ptr;
1522 next_pc += sizeof(struct load_op);
1523 PO;
1524 }
1525 OP(FILTER_OP_LOAD_FIELD_U8):
1526 {
1527 dbg_printk("op load field u8\n");
1528
1529 estack_ax_v = *(uint8_t *) estack_ax(stack, top)->u.ptr.ptr;
1530 next_pc += sizeof(struct load_op);
1531 PO;
1532 }
1533 OP(FILTER_OP_LOAD_FIELD_U16):
1534 {
1535 dbg_printk("op load field u16\n");
1536
1537 estack_ax_v = *(uint16_t *) estack_ax(stack, top)->u.ptr.ptr;
1538 next_pc += sizeof(struct load_op);
1539 PO;
1540 }
1541 OP(FILTER_OP_LOAD_FIELD_U32):
1542 {
1543 dbg_printk("op load field u32\n");
1544
1545 estack_ax_v = *(uint32_t *) estack_ax(stack, top)->u.ptr.ptr;
1546 next_pc += sizeof(struct load_op);
1547 PO;
1548 }
1549 OP(FILTER_OP_LOAD_FIELD_U64):
1550 {
1551 dbg_printk("op load field u64\n");
1552
1553 estack_ax_v = *(uint64_t *) estack_ax(stack, top)->u.ptr.ptr;
1554 next_pc += sizeof(struct load_op);
1555 PO;
1556 }
1557 OP(FILTER_OP_LOAD_FIELD_DOUBLE):
1558 {
1559 ret = -EINVAL;
1560 goto end;
1561 }
1562
1563 OP(FILTER_OP_LOAD_FIELD_STRING):
1564 {
1565 const char *str;
1566
1567 dbg_printk("op load field string\n");
1568 str = (const char *) estack_ax(stack, top)->u.ptr.ptr;
1569 estack_ax(stack, top)->u.s.str = str;
1570 if (unlikely(!estack_ax(stack, top)->u.s.str)) {
1571 dbg_printk("Filter warning: loading a NULL string.\n");
1572 ret = -EINVAL;
1573 goto end;
1574 }
1575 estack_ax(stack, top)->u.s.seq_len = LTTNG_SIZE_MAX;
1576 estack_ax(stack, top)->u.s.literal_type =
1577 ESTACK_STRING_LITERAL_TYPE_NONE;
1578 next_pc += sizeof(struct load_op);
1579 PO;
1580 }
1581
1582 OP(FILTER_OP_LOAD_FIELD_SEQUENCE):
1583 {
1584 const char *ptr;
1585
1586 dbg_printk("op load field string sequence\n");
1587 ptr = estack_ax(stack, top)->u.ptr.ptr;
1588 estack_ax(stack, top)->u.s.seq_len = *(unsigned long *) ptr;
1589 estack_ax(stack, top)->u.s.str = *(const char **) (ptr + sizeof(unsigned long));
1590 if (unlikely(!estack_ax(stack, top)->u.s.str)) {
1591 dbg_printk("Filter warning: loading a NULL sequence.\n");
1592 ret = -EINVAL;
1593 goto end;
1594 }
1595 estack_ax(stack, top)->u.s.literal_type =
1596 ESTACK_STRING_LITERAL_TYPE_NONE;
1597 next_pc += sizeof(struct load_op);
1598 PO;
1599 }
1600
1601 END_OP
1602 end:
1603 /* return 0 (discard) on error */
1604 if (ret)
1605 return 0;
1606 return retval;
1607 }
1608
1609 #undef START_OP
1610 #undef OP
1611 #undef PO
1612 #undef END_OP
This page took 0.110168 seconds and 4 git commands to generate.