Rename "tsc" to "timestamp"
[lttng-modules.git] / src / lttng-bytecode-interpreter.c
1 /* SPDX-License-Identifier: MIT
2 *
3 * lttng-bytecode-interpreter.c
4 *
5 * LTTng modules bytecode interpreter.
6 *
7 * Copyright (C) 2010-2016 Mathieu Desnoyers <mathieu.desnoyers@efficios.com>
8 */
9
10 #include <wrapper/compiler_attributes.h>
11 #include <wrapper/uaccess.h>
12 #include <wrapper/objtool.h>
13 #include <wrapper/types.h>
14 #include <linux/swab.h>
15
16 #include <lttng/lttng-bytecode.h>
17 #include <lttng/string-utils.h>
18 #include <lttng/events-internal.h>
19 #include <lttng/probe-user.h>
20
21 /*
22 * get_char should be called with page fault handler disabled if it is expected
23 * to handle user-space read.
24 */
25 static
26 char get_char(const struct estack_entry *reg, size_t offset)
27 {
28 if (unlikely(offset >= reg->u.s.seq_len))
29 return '\0';
30 if (reg->u.s.user) {
31 char c;
32
33 /* Handle invalid access as end of string. */
34 if (unlikely(!lttng_access_ok(VERIFY_READ,
35 reg->u.s.user_str + offset,
36 sizeof(c))))
37 return '\0';
38 /* Handle fault (nonzero return value) as end of string. */
39 if (unlikely(__copy_from_user_inatomic(&c,
40 reg->u.s.user_str + offset,
41 sizeof(c))))
42 return '\0';
43 return c;
44 } else {
45 return reg->u.s.str[offset];
46 }
47 }
48
49 /*
50 * -1: wildcard found.
51 * -2: unknown escape char.
52 * 0: normal char.
53 */
54 static
55 int parse_char(struct estack_entry *reg, char *c, size_t *offset)
56 {
57 switch (*c) {
58 case '\\':
59 (*offset)++;
60 *c = get_char(reg, *offset);
61 switch (*c) {
62 case '\\':
63 case '*':
64 return 0;
65 default:
66 return -2;
67 }
68 case '*':
69 return -1;
70 default:
71 return 0;
72 }
73 }
74
75 static
76 char get_char_at_cb(size_t at, void *data)
77 {
78 return get_char(data, at);
79 }
80
81 static
82 int stack_star_glob_match(struct estack *stack, int top, const char *cmp_type)
83 {
84 bool has_user = false;
85 int result;
86 struct estack_entry *pattern_reg;
87 struct estack_entry *candidate_reg;
88
89 /* Disable the page fault handler when reading from userspace. */
90 if (estack_bx(stack, top)->u.s.user
91 || estack_ax(stack, top)->u.s.user) {
92 has_user = true;
93 pagefault_disable();
94 }
95
96 /* Find out which side is the pattern vs. the candidate. */
97 if (estack_ax(stack, top)->u.s.literal_type == ESTACK_STRING_LITERAL_TYPE_STAR_GLOB) {
98 pattern_reg = estack_ax(stack, top);
99 candidate_reg = estack_bx(stack, top);
100 } else {
101 pattern_reg = estack_bx(stack, top);
102 candidate_reg = estack_ax(stack, top);
103 }
104
105 /* Perform the match operation. */
106 result = !strutils_star_glob_match_char_cb(get_char_at_cb,
107 pattern_reg, get_char_at_cb, candidate_reg);
108 if (has_user)
109 pagefault_enable();
110
111 return result;
112 }
113
114 static
115 int stack_strcmp(struct estack *stack, int top, const char *cmp_type)
116 {
117 size_t offset_bx = 0, offset_ax = 0;
118 int diff, has_user = 0;
119
120 if (estack_bx(stack, top)->u.s.user
121 || estack_ax(stack, top)->u.s.user) {
122 has_user = 1;
123 pagefault_disable();
124 }
125
126 for (;;) {
127 int ret;
128 int escaped_r0 = 0;
129 char char_bx, char_ax;
130
131 char_bx = get_char(estack_bx(stack, top), offset_bx);
132 char_ax = get_char(estack_ax(stack, top), offset_ax);
133
134 if (unlikely(char_bx == '\0')) {
135 if (char_ax == '\0') {
136 diff = 0;
137 break;
138 } else {
139 if (estack_ax(stack, top)->u.s.literal_type ==
140 ESTACK_STRING_LITERAL_TYPE_PLAIN) {
141 ret = parse_char(estack_ax(stack, top),
142 &char_ax, &offset_ax);
143 if (ret == -1) {
144 diff = 0;
145 break;
146 }
147 }
148 diff = -1;
149 break;
150 }
151 }
152 if (unlikely(char_ax == '\0')) {
153 if (estack_bx(stack, top)->u.s.literal_type ==
154 ESTACK_STRING_LITERAL_TYPE_PLAIN) {
155 ret = parse_char(estack_bx(stack, top),
156 &char_bx, &offset_bx);
157 if (ret == -1) {
158 diff = 0;
159 break;
160 }
161 }
162 diff = 1;
163 break;
164 }
165 if (estack_bx(stack, top)->u.s.literal_type ==
166 ESTACK_STRING_LITERAL_TYPE_PLAIN) {
167 ret = parse_char(estack_bx(stack, top),
168 &char_bx, &offset_bx);
169 if (ret == -1) {
170 diff = 0;
171 break;
172 } else if (ret == -2) {
173 escaped_r0 = 1;
174 }
175 /* else compare both char */
176 }
177 if (estack_ax(stack, top)->u.s.literal_type ==
178 ESTACK_STRING_LITERAL_TYPE_PLAIN) {
179 ret = parse_char(estack_ax(stack, top),
180 &char_ax, &offset_ax);
181 if (ret == -1) {
182 diff = 0;
183 break;
184 } else if (ret == -2) {
185 if (!escaped_r0) {
186 diff = -1;
187 break;
188 }
189 } else {
190 if (escaped_r0) {
191 diff = 1;
192 break;
193 }
194 }
195 } else {
196 if (escaped_r0) {
197 diff = 1;
198 break;
199 }
200 }
201 diff = char_bx - char_ax;
202 if (diff != 0)
203 break;
204 offset_bx++;
205 offset_ax++;
206 }
207 if (has_user)
208 pagefault_enable();
209
210 return diff;
211 }
212
213 int lttng_bytecode_interpret_error(
214 struct lttng_kernel_bytecode_runtime *bytecode_runtime __attribute__((unused)),
215 const char *stack_data __attribute__((unused)),
216 struct lttng_kernel_probe_ctx *probe_ctx __attribute__((unused)),
217 void *ctx __attribute__((unused)))
218 {
219 return LTTNG_KERNEL_BYTECODE_INTERPRETER_ERROR;
220 }
221
222 #ifdef INTERPRETER_USE_SWITCH
223
224 /*
225 * Fallback for compilers that do not support taking address of labels.
226 */
227
228 #define START_OP \
229 start_pc = &bytecode->data[0]; \
230 for (pc = next_pc = start_pc; pc - start_pc < bytecode->len; \
231 pc = next_pc) { \
232 dbg_printk("LTTng: Executing op %s (%u)\n", \
233 lttng_bytecode_print_op((unsigned int) *(bytecode_opcode_t *) pc), \
234 (unsigned int) *(bytecode_opcode_t *) pc); \
235 switch (*(bytecode_opcode_t *) pc) {
236
237 #define OP(name) case name
238
239 #define PO break
240
241 #define END_OP } \
242 }
243
244 #else
245
246 /*
247 * Dispatch-table based interpreter.
248 */
249
250 #define START_OP \
251 start_pc = &bytecode->code[0]; \
252 pc = next_pc = start_pc; \
253 if (unlikely(pc - start_pc >= bytecode->len)) \
254 goto end; \
255 goto *dispatch[*(bytecode_opcode_t *) pc];
256
257 #define OP(name) \
258 LABEL_##name
259
260 #define PO \
261 pc = next_pc; \
262 goto *dispatch[*(bytecode_opcode_t *) pc];
263
264 #define END_OP
265
266 #endif
267
268 #define IS_INTEGER_REGISTER(reg_type) \
269 (reg_type == REG_S64 || reg_type == REG_U64)
270
271 static int context_get_index(struct lttng_kernel_probe_ctx *lttng_probe_ctx,
272 struct load_ptr *ptr,
273 uint32_t idx)
274 {
275
276 struct lttng_kernel_ctx_field *ctx_field;
277 const struct lttng_kernel_event_field *field;
278 struct lttng_ctx_value v;
279
280 ctx_field = &lttng_static_ctx->fields[idx];
281 field = ctx_field->event_field;
282 ptr->type = LOAD_OBJECT;
283 /* field is only used for types nested within variants. */
284 ptr->field = NULL;
285
286 switch (field->type->type) {
287 case lttng_kernel_type_integer:
288 {
289 const struct lttng_kernel_type_integer *integer_type = lttng_kernel_get_type_integer(field->type);
290
291 ctx_field->get_value(ctx_field->priv, lttng_probe_ctx, &v);
292 if (lttng_kernel_get_type_integer(field->type)->signedness) {
293 ptr->object_type = OBJECT_TYPE_S64;
294 ptr->u.s64 = v.u.s64;
295 ptr->ptr = &ptr->u.s64;
296 } else {
297 ptr->object_type = OBJECT_TYPE_U64;
298 ptr->u.u64 = v.u.s64; /* Cast. */
299 ptr->ptr = &ptr->u.u64;
300 }
301 ptr->rev_bo = integer_type->reverse_byte_order;
302 ptr->user = integer_type->user;
303 break;
304 }
305 case lttng_kernel_type_enum:
306 {
307 const struct lttng_kernel_type_enum *enum_type = lttng_kernel_get_type_enum(field->type);
308 const struct lttng_kernel_type_integer *integer_type = lttng_kernel_get_type_integer(enum_type->container_type);
309
310 ctx_field->get_value(ctx_field->priv, lttng_probe_ctx, &v);
311 if (integer_type->signedness) {
312 ptr->object_type = OBJECT_TYPE_SIGNED_ENUM;
313 ptr->u.s64 = v.u.s64;
314 ptr->ptr = &ptr->u.s64;
315 } else {
316 ptr->object_type = OBJECT_TYPE_UNSIGNED_ENUM;
317 ptr->u.u64 = v.u.s64; /* Cast. */
318 ptr->ptr = &ptr->u.u64;
319 }
320 ptr->rev_bo = integer_type->reverse_byte_order;
321 ptr->user = integer_type->user;
322 break;
323 }
324 case lttng_kernel_type_array:
325 {
326 const struct lttng_kernel_type_array *array_type = lttng_kernel_get_type_array(field->type);
327 const struct lttng_kernel_type_integer *integer_type;
328
329 if (!lttng_kernel_type_is_bytewise_integer(array_type->elem_type)) {
330 printk(KERN_WARNING "LTTng: bytecode: Array nesting only supports integer types.\n");
331 return -EINVAL;
332 }
333 if (array_type->encoding == lttng_kernel_string_encoding_none) {
334 printk(KERN_WARNING "LTTng: bytecode: Only string arrays are supported for contexts.\n");
335 return -EINVAL;
336 }
337 integer_type = lttng_kernel_get_type_integer(array_type->elem_type);
338 ptr->object_type = OBJECT_TYPE_STRING;
339 ptr->user = integer_type->user;
340 ctx_field->get_value(ctx_field->priv, lttng_probe_ctx, &v);
341 ptr->ptr = v.u.str;
342 break;
343 }
344 case lttng_kernel_type_sequence:
345 {
346 const struct lttng_kernel_type_sequence *sequence_type = lttng_kernel_get_type_sequence(field->type);
347 const struct lttng_kernel_type_integer *integer_type;
348
349 if (!lttng_kernel_type_is_bytewise_integer(sequence_type->elem_type)) {
350 printk(KERN_WARNING "LTTng: bytecode: Sequence nesting only supports integer types.\n");
351 return -EINVAL;
352 }
353 if (sequence_type->encoding == lttng_kernel_string_encoding_none) {
354 printk(KERN_WARNING "LTTng: bytecode: Only string sequences are supported for contexts.\n");
355 return -EINVAL;
356 }
357 integer_type = lttng_kernel_get_type_integer(sequence_type->elem_type);
358 ptr->object_type = OBJECT_TYPE_STRING;
359 ptr->user = integer_type->user;
360 ctx_field->get_value(ctx_field->priv, lttng_probe_ctx, &v);
361 ptr->ptr = v.u.str;
362 break;
363 }
364 case lttng_kernel_type_string:
365 {
366 const struct lttng_kernel_type_string *string_type = lttng_kernel_get_type_string(field->type);
367
368 ptr->object_type = OBJECT_TYPE_STRING;
369 ptr->user = string_type->user;
370 ctx_field->get_value(ctx_field->priv, lttng_probe_ctx, &v);
371 ptr->ptr = v.u.str;
372 break;
373 }
374 case lttng_kernel_type_struct:
375 printk(KERN_WARNING "LTTng: bytecode: Structure type cannot be loaded.\n");
376 return -EINVAL;
377 case lttng_kernel_type_variant:
378 printk(KERN_WARNING "LTTng: bytecode: Variant type cannot be loaded.\n");
379 return -EINVAL;
380 default:
381 printk(KERN_WARNING "LTTng: bytecode: Unknown type: %d", (int) field->type->type);
382 return -EINVAL;
383 }
384 return 0;
385 }
386
387 static int dynamic_get_index(struct lttng_kernel_probe_ctx *lttng_probe_ctx,
388 struct bytecode_runtime *runtime,
389 uint64_t index, struct estack_entry *stack_top)
390 {
391 int ret;
392 const struct bytecode_get_index_data *gid;
393
394 gid = (const struct bytecode_get_index_data *) &runtime->data[index];
395 switch (stack_top->u.ptr.type) {
396 case LOAD_OBJECT:
397 switch (stack_top->u.ptr.object_type) {
398 case OBJECT_TYPE_ARRAY:
399 {
400 const char *ptr;
401
402 WARN_ON_ONCE(gid->offset >= gid->array_len);
403 /* Skip count (unsigned long) */
404 ptr = *(const char **) (stack_top->u.ptr.ptr + sizeof(unsigned long));
405 ptr = ptr + gid->offset;
406 stack_top->u.ptr.ptr = ptr;
407 stack_top->u.ptr.object_type = gid->elem.type;
408 stack_top->u.ptr.rev_bo = gid->elem.rev_bo;
409 stack_top->u.ptr.user = gid->elem.user;
410 BUG_ON(stack_top->u.ptr.field->type->type != lttng_kernel_type_array);
411 stack_top->u.ptr.field = NULL;
412 break;
413 }
414 case OBJECT_TYPE_SEQUENCE:
415 {
416 const char *ptr;
417 size_t ptr_seq_len;
418
419 ptr = *(const char **) (stack_top->u.ptr.ptr + sizeof(unsigned long));
420 ptr_seq_len = *(unsigned long *) stack_top->u.ptr.ptr;
421 if (gid->offset >= gid->elem.len * ptr_seq_len) {
422 ret = -EINVAL;
423 goto end;
424 }
425 ptr = ptr + gid->offset;
426 stack_top->u.ptr.ptr = ptr;
427 stack_top->u.ptr.object_type = gid->elem.type;
428 stack_top->u.ptr.rev_bo = gid->elem.rev_bo;
429 stack_top->u.ptr.user = gid->elem.user;
430 BUG_ON(stack_top->u.ptr.field->type->type != lttng_kernel_type_sequence);
431 stack_top->u.ptr.field = NULL;
432 break;
433 }
434 case OBJECT_TYPE_STRUCT:
435 printk(KERN_WARNING "LTTng: bytecode: Nested structures are not supported yet.\n");
436 ret = -EINVAL;
437 goto end;
438 case OBJECT_TYPE_VARIANT:
439 default:
440 printk(KERN_WARNING "LTTng: bytecode: Unexpected get index type %d",
441 (int) stack_top->u.ptr.object_type);
442 ret = -EINVAL;
443 goto end;
444 }
445 break;
446 case LOAD_ROOT_CONTEXT:
447 lttng_fallthrough;
448 case LOAD_ROOT_APP_CONTEXT:
449 {
450 ret = context_get_index(lttng_probe_ctx,
451 &stack_top->u.ptr,
452 gid->ctx_index);
453 if (ret) {
454 goto end;
455 }
456 break;
457 }
458 case LOAD_ROOT_PAYLOAD:
459 stack_top->u.ptr.ptr += gid->offset;
460 if (gid->elem.type == OBJECT_TYPE_STRING)
461 stack_top->u.ptr.ptr = *(const char * const *) stack_top->u.ptr.ptr;
462 stack_top->u.ptr.object_type = gid->elem.type;
463 stack_top->u.ptr.type = LOAD_OBJECT;
464 stack_top->u.ptr.field = gid->field;
465 stack_top->u.ptr.rev_bo = gid->elem.rev_bo;
466 stack_top->u.ptr.user = gid->elem.user;
467 break;
468 }
469
470 stack_top->type = REG_PTR;
471
472 return 0;
473
474 end:
475 return ret;
476 }
477
478 static int dynamic_load_field(struct estack_entry *stack_top)
479 {
480 int ret;
481
482 switch (stack_top->u.ptr.type) {
483 case LOAD_OBJECT:
484 break;
485 case LOAD_ROOT_CONTEXT:
486 case LOAD_ROOT_APP_CONTEXT:
487 case LOAD_ROOT_PAYLOAD:
488 default:
489 dbg_printk("Bytecode warning: cannot load root, missing field name.\n");
490 ret = -EINVAL;
491 goto end;
492 }
493 switch (stack_top->u.ptr.object_type) {
494 case OBJECT_TYPE_S8:
495 dbg_printk("op load field s8\n");
496 if (stack_top->u.ptr.user) {
497 if (lttng_copy_from_user_check_nofault(&stack_top->u.v, (int8_t __user *) stack_top->u.ptr.ptr, sizeof(int8_t)))
498 stack_top->u.v = 0;
499 } else {
500 stack_top->u.v = *(int8_t *) stack_top->u.ptr.ptr;
501 }
502 stack_top->type = REG_S64;
503 break;
504 case OBJECT_TYPE_S16:
505 {
506 int16_t tmp;
507
508 dbg_printk("op load field s16\n");
509 if (stack_top->u.ptr.user) {
510 if (lttng_copy_from_user_check_nofault(&tmp, (int16_t __user *) stack_top->u.ptr.ptr, sizeof(int16_t)))
511 tmp = 0;
512 } else {
513 tmp = *(int16_t *) stack_top->u.ptr.ptr;
514 }
515 if (stack_top->u.ptr.rev_bo)
516 __swab16s(&tmp);
517 stack_top->u.v = tmp;
518 stack_top->type = REG_S64;
519 break;
520 }
521 case OBJECT_TYPE_S32:
522 {
523 int32_t tmp;
524
525 dbg_printk("op load field s32\n");
526 if (stack_top->u.ptr.user) {
527 if (lttng_copy_from_user_check_nofault(&tmp, (int32_t __user *) stack_top->u.ptr.ptr, sizeof(int32_t)))
528 tmp = 0;
529 } else {
530 tmp = *(int32_t *) stack_top->u.ptr.ptr;
531 }
532 if (stack_top->u.ptr.rev_bo)
533 __swab32s(&tmp);
534 stack_top->u.v = tmp;
535 stack_top->type = REG_S64;
536 break;
537 }
538 case OBJECT_TYPE_S64:
539 {
540 int64_t tmp;
541
542 dbg_printk("op load field s64\n");
543 if (stack_top->u.ptr.user) {
544 if (lttng_copy_from_user_check_nofault(&tmp, (int64_t __user *) stack_top->u.ptr.ptr, sizeof(int64_t)))
545 tmp = 0;
546 } else {
547 tmp = *(int64_t *) stack_top->u.ptr.ptr;
548 }
549 if (stack_top->u.ptr.rev_bo)
550 __swab64s(&tmp);
551 stack_top->u.v = tmp;
552 stack_top->type = REG_S64;
553 break;
554 }
555 case OBJECT_TYPE_SIGNED_ENUM:
556 {
557 int64_t tmp;
558
559 dbg_printk("op load field signed enumeration\n");
560 if (stack_top->u.ptr.user) {
561 dbg_printk("Bytecode warning: user enum unsupported.\n");
562 ret = -EINVAL;
563 goto end;
564 }
565 tmp = *(int64_t *) stack_top->u.ptr.ptr;
566 if (stack_top->u.ptr.rev_bo)
567 __swab64s(&tmp);
568 stack_top->u.v = tmp;
569 stack_top->type = REG_S64;
570 break;
571 }
572 case OBJECT_TYPE_U8:
573 dbg_printk("op load field u8\n");
574 if (stack_top->u.ptr.user) {
575 if (lttng_copy_from_user_check_nofault(&stack_top->u.v, (uint8_t __user *) stack_top->u.ptr.ptr, sizeof(uint8_t)))
576 stack_top->u.v = 0;
577 } else {
578 stack_top->u.v = *(uint8_t *) stack_top->u.ptr.ptr;
579 }
580 stack_top->type = REG_U64;
581 break;
582 case OBJECT_TYPE_U16:
583 {
584 uint16_t tmp;
585
586 dbg_printk("op load field u16\n");
587 if (stack_top->u.ptr.user) {
588 if (lttng_copy_from_user_check_nofault(&tmp, (uint16_t __user *) stack_top->u.ptr.ptr, sizeof(uint16_t)))
589 tmp = 0;
590 } else {
591 tmp = *(uint16_t *) stack_top->u.ptr.ptr;
592 }
593 if (stack_top->u.ptr.rev_bo)
594 __swab16s(&tmp);
595 stack_top->u.v = tmp;
596 stack_top->type = REG_U64;
597 break;
598 }
599 case OBJECT_TYPE_U32:
600 {
601 uint32_t tmp;
602
603 dbg_printk("op load field u32\n");
604 if (stack_top->u.ptr.user) {
605 if (lttng_copy_from_user_check_nofault(&tmp, (uint32_t __user *) stack_top->u.ptr.ptr, sizeof(uint32_t)))
606 tmp = 0;
607 } else {
608 tmp = *(uint32_t *) stack_top->u.ptr.ptr;
609 }
610 if (stack_top->u.ptr.rev_bo)
611 __swab32s(&tmp);
612 stack_top->u.v = tmp;
613 stack_top->type = REG_U64;
614 break;
615 }
616 case OBJECT_TYPE_U64:
617 {
618 uint64_t tmp;
619
620 dbg_printk("op load field u64\n");
621 if (stack_top->u.ptr.user) {
622 if (lttng_copy_from_user_check_nofault(&tmp, (uint64_t __user *) stack_top->u.ptr.ptr, sizeof(uint64_t)))
623 tmp = 0;
624 } else {
625 tmp = *(uint64_t *) stack_top->u.ptr.ptr;
626 }
627 if (stack_top->u.ptr.rev_bo)
628 __swab64s(&tmp);
629 stack_top->u.v = tmp;
630 stack_top->type = REG_U64;
631 break;
632 }
633 case OBJECT_TYPE_UNSIGNED_ENUM:
634 {
635 uint64_t tmp;
636
637 dbg_printk("op load field unsigned enumeration\n");
638 if (stack_top->u.ptr.user) {
639 dbg_printk("Bytecode warning: user enum unsupported.\n");
640 ret = -EINVAL;
641 goto end;
642 }
643 tmp = *(uint64_t *) stack_top->u.ptr.ptr;
644 if (stack_top->u.ptr.rev_bo)
645 __swab64s(&tmp);
646 stack_top->u.v = tmp;
647 stack_top->type = REG_U64;
648 break;
649 }
650 case OBJECT_TYPE_STRING:
651 {
652 dbg_printk("op load field string: user=%d\n", stack_top->u.ptr.user);
653 if (stack_top->u.ptr.user) {
654 const char __user *user_str = (const char __user *) stack_top->u.ptr.ptr;
655
656 stack_top->u.s.user_str = user_str;
657 if (unlikely(!stack_top->u.s.user_str)) {
658 dbg_printk("Bytecode warning: loading a NULL user string.\n");
659 ret = -EINVAL;
660 goto end;
661 }
662 stack_top->u.s.user = 1;
663 } else {
664 const char *str = (const char *) stack_top->u.ptr.ptr;
665
666 stack_top->u.s.str = str;
667 if (unlikely(!stack_top->u.s.str)) {
668 dbg_printk("Bytecode warning: loading a NULL string.\n");
669 ret = -EINVAL;
670 goto end;
671 }
672 stack_top->u.s.user = 0;
673 }
674 stack_top->u.s.seq_len = LTTNG_SIZE_MAX;
675 stack_top->u.s.literal_type = ESTACK_STRING_LITERAL_TYPE_NONE;
676 stack_top->type = REG_STRING;
677 break;
678 }
679 case OBJECT_TYPE_STRING_SEQUENCE:
680 {
681 const char *ptr;
682
683 dbg_printk("op load field string sequence: user=%d\n", stack_top->u.ptr.user);
684 ptr = stack_top->u.ptr.ptr;
685 stack_top->u.s.seq_len = *(unsigned long *) ptr;
686 if (stack_top->u.ptr.user) {
687 stack_top->u.s.user_str = *(const char __user **) (ptr + sizeof(unsigned long));
688 if (unlikely(!stack_top->u.s.user_str)) {
689 dbg_printk("Bytecode warning: loading a NULL user sequence.\n");
690 ret = -EINVAL;
691 goto end;
692 }
693 stack_top->u.s.user = 1;
694 } else {
695 stack_top->u.s.str = *(const char **) (ptr + sizeof(unsigned long));
696 if (unlikely(!stack_top->u.s.str)) {
697 dbg_printk("Bytecode warning: loading a NULL sequence.\n");
698 ret = -EINVAL;
699 goto end;
700 }
701 stack_top->u.s.user = 0;
702 }
703 stack_top->u.s.literal_type = ESTACK_STRING_LITERAL_TYPE_NONE;
704 stack_top->type = REG_STRING;
705 break;
706 }
707 case OBJECT_TYPE_DYNAMIC:
708 /*
709 * Dynamic types in context are looked up
710 * by context get index.
711 */
712 ret = -EINVAL;
713 goto end;
714 case OBJECT_TYPE_DOUBLE:
715 ret = -EINVAL;
716 goto end;
717 case OBJECT_TYPE_SEQUENCE:
718 case OBJECT_TYPE_ARRAY:
719 case OBJECT_TYPE_STRUCT:
720 case OBJECT_TYPE_VARIANT:
721 printk(KERN_WARNING "LTTng: bytecode: Sequences, arrays, struct and variant cannot be loaded (nested types).\n");
722 ret = -EINVAL;
723 goto end;
724 }
725 return 0;
726
727 end:
728 return ret;
729 }
730
731 static
732 int lttng_bytecode_interpret_format_output(struct estack_entry *ax,
733 struct lttng_interpreter_output *output)
734 {
735 int ret;
736
737 again:
738 switch (ax->type) {
739 case REG_S64:
740 output->type = LTTNG_INTERPRETER_TYPE_S64;
741 output->u.s = ax->u.v;
742 break;
743 case REG_U64:
744 output->type = LTTNG_INTERPRETER_TYPE_U64;
745 output->u.u = (uint64_t) ax->u.v;
746 break;
747 case REG_STRING:
748 output->type = LTTNG_INTERPRETER_TYPE_STRING;
749 output->u.str.len = ax->u.s.seq_len;
750 output->u.str.user = ax->u.s.user;
751 if (ax->u.s.user) {
752 output->u.str.user_str = ax->u.s.user_str;
753 } else {
754 output->u.str.str = ax->u.s.str;
755 }
756 break;
757 case REG_PTR:
758 switch (ax->u.ptr.object_type) {
759 case OBJECT_TYPE_S8:
760 case OBJECT_TYPE_S16:
761 case OBJECT_TYPE_S32:
762 case OBJECT_TYPE_S64:
763 case OBJECT_TYPE_U8:
764 case OBJECT_TYPE_U16:
765 case OBJECT_TYPE_U32:
766 case OBJECT_TYPE_U64:
767 case OBJECT_TYPE_DOUBLE:
768 case OBJECT_TYPE_STRING:
769 case OBJECT_TYPE_STRING_SEQUENCE:
770 ret = dynamic_load_field(ax);
771 if (ret)
772 return ret;
773 /* Retry after loading ptr into stack top. */
774 goto again;
775 case OBJECT_TYPE_SEQUENCE:
776 output->type = LTTNG_INTERPRETER_TYPE_SEQUENCE;
777 output->u.sequence.ptr = *(const char **) (ax->u.ptr.ptr + sizeof(unsigned long));
778 output->u.sequence.nr_elem = *(unsigned long *) ax->u.ptr.ptr;
779 output->u.sequence.nested_type = lttng_kernel_get_type_sequence(ax->u.ptr.field->type)->elem_type;
780 break;
781 case OBJECT_TYPE_ARRAY:
782 /* Skip count (unsigned long) */
783 output->type = LTTNG_INTERPRETER_TYPE_SEQUENCE;
784 output->u.sequence.ptr = *(const char **) (ax->u.ptr.ptr + sizeof(unsigned long));
785 output->u.sequence.nr_elem = lttng_kernel_get_type_array(ax->u.ptr.field->type)->length;
786 output->u.sequence.nested_type = lttng_kernel_get_type_array(ax->u.ptr.field->type)->elem_type;
787 break;
788 case OBJECT_TYPE_SIGNED_ENUM:
789 ret = dynamic_load_field(ax);
790 if (ret)
791 return ret;
792 output->type = LTTNG_INTERPRETER_TYPE_SIGNED_ENUM;
793 output->u.s = ax->u.v;
794 break;
795 case OBJECT_TYPE_UNSIGNED_ENUM:
796 ret = dynamic_load_field(ax);
797 if (ret)
798 return ret;
799 output->type = LTTNG_INTERPRETER_TYPE_UNSIGNED_ENUM;
800 output->u.u = ax->u.v;
801 break;
802 case OBJECT_TYPE_STRUCT:
803 case OBJECT_TYPE_VARIANT:
804 default:
805 return -EINVAL;
806 }
807
808 break;
809 case REG_STAR_GLOB_STRING:
810 case REG_TYPE_UNKNOWN:
811 default:
812 return -EINVAL;
813 }
814
815 return 0;
816 }
817
818 #ifdef DEBUG
819
820 #define DBG_USER_STR_CUTOFF 32
821
822 /*
823 * In debug mode, print user string (truncated, if necessary).
824 */
825 static inline
826 void dbg_load_ref_user_str_printk(const struct estack_entry *user_str_reg)
827 {
828 size_t pos = 0;
829 char last_char;
830 char user_str[DBG_USER_STR_CUTOFF];
831
832 pagefault_disable();
833 do {
834 last_char = get_char(user_str_reg, pos);
835 user_str[pos] = last_char;
836 pos++;
837 } while (last_char != '\0' && pos < sizeof(user_str));
838 pagefault_enable();
839
840 user_str[sizeof(user_str) - 1] = '\0';
841 dbg_printk("load field ref user string: '%s%s'\n", user_str,
842 last_char != '\0' ? "[...]" : "");
843 }
844 #else
845 static inline
846 void dbg_load_ref_user_str_printk(const struct estack_entry *user_str_reg)
847 {
848 }
849 #endif
850
851 /*
852 * Return LTTNG_KERNEL_BYTECODE_INTERPRETER_OK on success.
853 * Return LTTNG_KERNEL_BYTECODE_INTERPRETER_ERROR on error.
854 *
855 * For FILTER bytecode: expect a struct lttng_kernel_bytecode_filter_ctx *
856 * as @ctx argument.
857 * For CAPTURE bytecode: expect a struct lttng_interpreter_output *
858 * as @ctx argument.
859 */
860 int lttng_bytecode_interpret(struct lttng_kernel_bytecode_runtime *kernel_bytecode,
861 const char *interpreter_stack_data,
862 struct lttng_kernel_probe_ctx *lttng_probe_ctx,
863 void *caller_ctx)
864 {
865 struct bytecode_runtime *bytecode = container_of(kernel_bytecode, struct bytecode_runtime, p);
866 void *pc, *next_pc, *start_pc;
867 int ret = -EINVAL;
868 uint64_t retval = 0;
869 struct estack _stack;
870 struct estack *stack = &_stack;
871 register int64_t ax = 0, bx = 0;
872 register enum entry_type ax_t = REG_TYPE_UNKNOWN, bx_t = REG_TYPE_UNKNOWN;
873 register int top = INTERPRETER_STACK_EMPTY;
874 #ifndef INTERPRETER_USE_SWITCH
875 static void *dispatch[NR_BYTECODE_OPS] = {
876 [ BYTECODE_OP_UNKNOWN ] = &&LABEL_BYTECODE_OP_UNKNOWN,
877
878 [ BYTECODE_OP_RETURN ] = &&LABEL_BYTECODE_OP_RETURN,
879
880 /* binary */
881 [ BYTECODE_OP_MUL ] = &&LABEL_BYTECODE_OP_MUL,
882 [ BYTECODE_OP_DIV ] = &&LABEL_BYTECODE_OP_DIV,
883 [ BYTECODE_OP_MOD ] = &&LABEL_BYTECODE_OP_MOD,
884 [ BYTECODE_OP_PLUS ] = &&LABEL_BYTECODE_OP_PLUS,
885 [ BYTECODE_OP_MINUS ] = &&LABEL_BYTECODE_OP_MINUS,
886 [ BYTECODE_OP_BIT_RSHIFT ] = &&LABEL_BYTECODE_OP_BIT_RSHIFT,
887 [ BYTECODE_OP_BIT_LSHIFT ] = &&LABEL_BYTECODE_OP_BIT_LSHIFT,
888 [ BYTECODE_OP_BIT_AND ] = &&LABEL_BYTECODE_OP_BIT_AND,
889 [ BYTECODE_OP_BIT_OR ] = &&LABEL_BYTECODE_OP_BIT_OR,
890 [ BYTECODE_OP_BIT_XOR ] = &&LABEL_BYTECODE_OP_BIT_XOR,
891
892 /* binary comparators */
893 [ BYTECODE_OP_EQ ] = &&LABEL_BYTECODE_OP_EQ,
894 [ BYTECODE_OP_NE ] = &&LABEL_BYTECODE_OP_NE,
895 [ BYTECODE_OP_GT ] = &&LABEL_BYTECODE_OP_GT,
896 [ BYTECODE_OP_LT ] = &&LABEL_BYTECODE_OP_LT,
897 [ BYTECODE_OP_GE ] = &&LABEL_BYTECODE_OP_GE,
898 [ BYTECODE_OP_LE ] = &&LABEL_BYTECODE_OP_LE,
899
900 /* string binary comparator */
901 [ BYTECODE_OP_EQ_STRING ] = &&LABEL_BYTECODE_OP_EQ_STRING,
902 [ BYTECODE_OP_NE_STRING ] = &&LABEL_BYTECODE_OP_NE_STRING,
903 [ BYTECODE_OP_GT_STRING ] = &&LABEL_BYTECODE_OP_GT_STRING,
904 [ BYTECODE_OP_LT_STRING ] = &&LABEL_BYTECODE_OP_LT_STRING,
905 [ BYTECODE_OP_GE_STRING ] = &&LABEL_BYTECODE_OP_GE_STRING,
906 [ BYTECODE_OP_LE_STRING ] = &&LABEL_BYTECODE_OP_LE_STRING,
907
908 /* globbing pattern binary comparator */
909 [ BYTECODE_OP_EQ_STAR_GLOB_STRING ] = &&LABEL_BYTECODE_OP_EQ_STAR_GLOB_STRING,
910 [ BYTECODE_OP_NE_STAR_GLOB_STRING ] = &&LABEL_BYTECODE_OP_NE_STAR_GLOB_STRING,
911
912 /* s64 binary comparator */
913 [ BYTECODE_OP_EQ_S64 ] = &&LABEL_BYTECODE_OP_EQ_S64,
914 [ BYTECODE_OP_NE_S64 ] = &&LABEL_BYTECODE_OP_NE_S64,
915 [ BYTECODE_OP_GT_S64 ] = &&LABEL_BYTECODE_OP_GT_S64,
916 [ BYTECODE_OP_LT_S64 ] = &&LABEL_BYTECODE_OP_LT_S64,
917 [ BYTECODE_OP_GE_S64 ] = &&LABEL_BYTECODE_OP_GE_S64,
918 [ BYTECODE_OP_LE_S64 ] = &&LABEL_BYTECODE_OP_LE_S64,
919
920 /* double binary comparator */
921 [ BYTECODE_OP_EQ_DOUBLE ] = &&LABEL_BYTECODE_OP_EQ_DOUBLE,
922 [ BYTECODE_OP_NE_DOUBLE ] = &&LABEL_BYTECODE_OP_NE_DOUBLE,
923 [ BYTECODE_OP_GT_DOUBLE ] = &&LABEL_BYTECODE_OP_GT_DOUBLE,
924 [ BYTECODE_OP_LT_DOUBLE ] = &&LABEL_BYTECODE_OP_LT_DOUBLE,
925 [ BYTECODE_OP_GE_DOUBLE ] = &&LABEL_BYTECODE_OP_GE_DOUBLE,
926 [ BYTECODE_OP_LE_DOUBLE ] = &&LABEL_BYTECODE_OP_LE_DOUBLE,
927
928 /* Mixed S64-double binary comparators */
929 [ BYTECODE_OP_EQ_DOUBLE_S64 ] = &&LABEL_BYTECODE_OP_EQ_DOUBLE_S64,
930 [ BYTECODE_OP_NE_DOUBLE_S64 ] = &&LABEL_BYTECODE_OP_NE_DOUBLE_S64,
931 [ BYTECODE_OP_GT_DOUBLE_S64 ] = &&LABEL_BYTECODE_OP_GT_DOUBLE_S64,
932 [ BYTECODE_OP_LT_DOUBLE_S64 ] = &&LABEL_BYTECODE_OP_LT_DOUBLE_S64,
933 [ BYTECODE_OP_GE_DOUBLE_S64 ] = &&LABEL_BYTECODE_OP_GE_DOUBLE_S64,
934 [ BYTECODE_OP_LE_DOUBLE_S64 ] = &&LABEL_BYTECODE_OP_LE_DOUBLE_S64,
935
936 [ BYTECODE_OP_EQ_S64_DOUBLE ] = &&LABEL_BYTECODE_OP_EQ_S64_DOUBLE,
937 [ BYTECODE_OP_NE_S64_DOUBLE ] = &&LABEL_BYTECODE_OP_NE_S64_DOUBLE,
938 [ BYTECODE_OP_GT_S64_DOUBLE ] = &&LABEL_BYTECODE_OP_GT_S64_DOUBLE,
939 [ BYTECODE_OP_LT_S64_DOUBLE ] = &&LABEL_BYTECODE_OP_LT_S64_DOUBLE,
940 [ BYTECODE_OP_GE_S64_DOUBLE ] = &&LABEL_BYTECODE_OP_GE_S64_DOUBLE,
941 [ BYTECODE_OP_LE_S64_DOUBLE ] = &&LABEL_BYTECODE_OP_LE_S64_DOUBLE,
942
943 /* unary */
944 [ BYTECODE_OP_UNARY_PLUS ] = &&LABEL_BYTECODE_OP_UNARY_PLUS,
945 [ BYTECODE_OP_UNARY_MINUS ] = &&LABEL_BYTECODE_OP_UNARY_MINUS,
946 [ BYTECODE_OP_UNARY_NOT ] = &&LABEL_BYTECODE_OP_UNARY_NOT,
947 [ BYTECODE_OP_UNARY_PLUS_S64 ] = &&LABEL_BYTECODE_OP_UNARY_PLUS_S64,
948 [ BYTECODE_OP_UNARY_MINUS_S64 ] = &&LABEL_BYTECODE_OP_UNARY_MINUS_S64,
949 [ BYTECODE_OP_UNARY_NOT_S64 ] = &&LABEL_BYTECODE_OP_UNARY_NOT_S64,
950 [ BYTECODE_OP_UNARY_PLUS_DOUBLE ] = &&LABEL_BYTECODE_OP_UNARY_PLUS_DOUBLE,
951 [ BYTECODE_OP_UNARY_MINUS_DOUBLE ] = &&LABEL_BYTECODE_OP_UNARY_MINUS_DOUBLE,
952 [ BYTECODE_OP_UNARY_NOT_DOUBLE ] = &&LABEL_BYTECODE_OP_UNARY_NOT_DOUBLE,
953
954 /* logical */
955 [ BYTECODE_OP_AND ] = &&LABEL_BYTECODE_OP_AND,
956 [ BYTECODE_OP_OR ] = &&LABEL_BYTECODE_OP_OR,
957
958 /* load field ref */
959 [ BYTECODE_OP_LOAD_FIELD_REF ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_REF,
960 [ BYTECODE_OP_LOAD_FIELD_REF_STRING ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_REF_STRING,
961 [ BYTECODE_OP_LOAD_FIELD_REF_SEQUENCE ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_REF_SEQUENCE,
962 [ BYTECODE_OP_LOAD_FIELD_REF_S64 ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_REF_S64,
963 [ BYTECODE_OP_LOAD_FIELD_REF_DOUBLE ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_REF_DOUBLE,
964
965 /* load from immediate operand */
966 [ BYTECODE_OP_LOAD_STRING ] = &&LABEL_BYTECODE_OP_LOAD_STRING,
967 [ BYTECODE_OP_LOAD_STAR_GLOB_STRING ] = &&LABEL_BYTECODE_OP_LOAD_STAR_GLOB_STRING,
968 [ BYTECODE_OP_LOAD_S64 ] = &&LABEL_BYTECODE_OP_LOAD_S64,
969 [ BYTECODE_OP_LOAD_DOUBLE ] = &&LABEL_BYTECODE_OP_LOAD_DOUBLE,
970
971 /* cast */
972 [ BYTECODE_OP_CAST_TO_S64 ] = &&LABEL_BYTECODE_OP_CAST_TO_S64,
973 [ BYTECODE_OP_CAST_DOUBLE_TO_S64 ] = &&LABEL_BYTECODE_OP_CAST_DOUBLE_TO_S64,
974 [ BYTECODE_OP_CAST_NOP ] = &&LABEL_BYTECODE_OP_CAST_NOP,
975
976 /* get context ref */
977 [ BYTECODE_OP_GET_CONTEXT_REF ] = &&LABEL_BYTECODE_OP_GET_CONTEXT_REF,
978 [ BYTECODE_OP_GET_CONTEXT_REF_STRING ] = &&LABEL_BYTECODE_OP_GET_CONTEXT_REF_STRING,
979 [ BYTECODE_OP_GET_CONTEXT_REF_S64 ] = &&LABEL_BYTECODE_OP_GET_CONTEXT_REF_S64,
980 [ BYTECODE_OP_GET_CONTEXT_REF_DOUBLE ] = &&LABEL_BYTECODE_OP_GET_CONTEXT_REF_DOUBLE,
981
982 /* load userspace field ref */
983 [ BYTECODE_OP_LOAD_FIELD_REF_USER_STRING ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_REF_USER_STRING,
984 [ BYTECODE_OP_LOAD_FIELD_REF_USER_SEQUENCE ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_REF_USER_SEQUENCE,
985
986 /* Instructions for recursive traversal through composed types. */
987 [ BYTECODE_OP_GET_CONTEXT_ROOT ] = &&LABEL_BYTECODE_OP_GET_CONTEXT_ROOT,
988 [ BYTECODE_OP_GET_APP_CONTEXT_ROOT ] = &&LABEL_BYTECODE_OP_GET_APP_CONTEXT_ROOT,
989 [ BYTECODE_OP_GET_PAYLOAD_ROOT ] = &&LABEL_BYTECODE_OP_GET_PAYLOAD_ROOT,
990
991 [ BYTECODE_OP_GET_SYMBOL ] = &&LABEL_BYTECODE_OP_GET_SYMBOL,
992 [ BYTECODE_OP_GET_SYMBOL_FIELD ] = &&LABEL_BYTECODE_OP_GET_SYMBOL_FIELD,
993 [ BYTECODE_OP_GET_INDEX_U16 ] = &&LABEL_BYTECODE_OP_GET_INDEX_U16,
994 [ BYTECODE_OP_GET_INDEX_U64 ] = &&LABEL_BYTECODE_OP_GET_INDEX_U64,
995
996 [ BYTECODE_OP_LOAD_FIELD ] = &&LABEL_BYTECODE_OP_LOAD_FIELD,
997 [ BYTECODE_OP_LOAD_FIELD_S8 ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_S8,
998 [ BYTECODE_OP_LOAD_FIELD_S16 ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_S16,
999 [ BYTECODE_OP_LOAD_FIELD_S32 ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_S32,
1000 [ BYTECODE_OP_LOAD_FIELD_S64 ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_S64,
1001 [ BYTECODE_OP_LOAD_FIELD_U8 ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_U8,
1002 [ BYTECODE_OP_LOAD_FIELD_U16 ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_U16,
1003 [ BYTECODE_OP_LOAD_FIELD_U32 ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_U32,
1004 [ BYTECODE_OP_LOAD_FIELD_U64 ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_U64,
1005 [ BYTECODE_OP_LOAD_FIELD_STRING ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_STRING,
1006 [ BYTECODE_OP_LOAD_FIELD_SEQUENCE ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_SEQUENCE,
1007 [ BYTECODE_OP_LOAD_FIELD_DOUBLE ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_DOUBLE,
1008
1009 [ BYTECODE_OP_UNARY_BIT_NOT ] = &&LABEL_BYTECODE_OP_UNARY_BIT_NOT,
1010
1011 [ BYTECODE_OP_RETURN_S64 ] = &&LABEL_BYTECODE_OP_RETURN_S64,
1012 };
1013 #endif /* #ifndef INTERPRETER_USE_SWITCH */
1014
1015 START_OP
1016
1017 OP(BYTECODE_OP_UNKNOWN):
1018 OP(BYTECODE_OP_LOAD_FIELD_REF):
1019 OP(BYTECODE_OP_GET_CONTEXT_REF):
1020 #ifdef INTERPRETER_USE_SWITCH
1021 default:
1022 #endif /* INTERPRETER_USE_SWITCH */
1023 printk(KERN_WARNING "LTTng: bytecode: unknown bytecode op %u\n",
1024 (unsigned int) *(bytecode_opcode_t *) pc);
1025 ret = -EINVAL;
1026 goto end;
1027
1028 OP(BYTECODE_OP_RETURN):
1029 /* LTTNG_KERNEL_BYTECODE_INTERPRETER_ERROR or LTTNG_KERNEL_BYTECODE_INTERPRETER_OK */
1030 switch (estack_ax_t) {
1031 case REG_S64:
1032 case REG_U64:
1033 retval = !!estack_ax_v;
1034 break;
1035 case REG_DOUBLE:
1036 case REG_STRING:
1037 case REG_PTR:
1038 if (kernel_bytecode->type != LTTNG_KERNEL_BYTECODE_TYPE_CAPTURE) {
1039 ret = -EINVAL;
1040 goto end;
1041 }
1042 retval = 0;
1043 break;
1044 case REG_STAR_GLOB_STRING:
1045 case REG_TYPE_UNKNOWN:
1046 ret = -EINVAL;
1047 goto end;
1048 }
1049 ret = 0;
1050 goto end;
1051
1052 OP(BYTECODE_OP_RETURN_S64):
1053 /* LTTNG_KERNEL_BYTECODE_INTERPRETER_ERROR or LTTNG_KERNEL_BYTECODE_INTERPRETER_OK */
1054 retval = !!estack_ax_v;
1055 ret = 0;
1056 goto end;
1057
1058 /* binary */
1059 OP(BYTECODE_OP_MUL):
1060 OP(BYTECODE_OP_DIV):
1061 OP(BYTECODE_OP_MOD):
1062 OP(BYTECODE_OP_PLUS):
1063 OP(BYTECODE_OP_MINUS):
1064 printk(KERN_WARNING "LTTng: bytecode: unsupported bytecode op %u\n",
1065 (unsigned int) *(bytecode_opcode_t *) pc);
1066 ret = -EINVAL;
1067 goto end;
1068
1069 OP(BYTECODE_OP_EQ):
1070 OP(BYTECODE_OP_NE):
1071 OP(BYTECODE_OP_GT):
1072 OP(BYTECODE_OP_LT):
1073 OP(BYTECODE_OP_GE):
1074 OP(BYTECODE_OP_LE):
1075 printk(KERN_WARNING "LTTng: bytecode: unsupported non-specialized bytecode op %u\n",
1076 (unsigned int) *(bytecode_opcode_t *) pc);
1077 ret = -EINVAL;
1078 goto end;
1079
1080 OP(BYTECODE_OP_EQ_STRING):
1081 {
1082 int res;
1083
1084 res = (stack_strcmp(stack, top, "==") == 0);
1085 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1086 estack_ax_v = res;
1087 estack_ax_t = REG_S64;
1088 next_pc += sizeof(struct binary_op);
1089 PO;
1090 }
1091 OP(BYTECODE_OP_NE_STRING):
1092 {
1093 int res;
1094
1095 res = (stack_strcmp(stack, top, "!=") != 0);
1096 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1097 estack_ax_v = res;
1098 estack_ax_t = REG_S64;
1099 next_pc += sizeof(struct binary_op);
1100 PO;
1101 }
1102 OP(BYTECODE_OP_GT_STRING):
1103 {
1104 int res;
1105
1106 res = (stack_strcmp(stack, top, ">") > 0);
1107 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1108 estack_ax_v = res;
1109 estack_ax_t = REG_S64;
1110 next_pc += sizeof(struct binary_op);
1111 PO;
1112 }
1113 OP(BYTECODE_OP_LT_STRING):
1114 {
1115 int res;
1116
1117 res = (stack_strcmp(stack, top, "<") < 0);
1118 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1119 estack_ax_v = res;
1120 estack_ax_t = REG_S64;
1121 next_pc += sizeof(struct binary_op);
1122 PO;
1123 }
1124 OP(BYTECODE_OP_GE_STRING):
1125 {
1126 int res;
1127
1128 res = (stack_strcmp(stack, top, ">=") >= 0);
1129 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1130 estack_ax_v = res;
1131 estack_ax_t = REG_S64;
1132 next_pc += sizeof(struct binary_op);
1133 PO;
1134 }
1135 OP(BYTECODE_OP_LE_STRING):
1136 {
1137 int res;
1138
1139 res = (stack_strcmp(stack, top, "<=") <= 0);
1140 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1141 estack_ax_v = res;
1142 estack_ax_t = REG_S64;
1143 next_pc += sizeof(struct binary_op);
1144 PO;
1145 }
1146
1147 OP(BYTECODE_OP_EQ_STAR_GLOB_STRING):
1148 {
1149 int res;
1150
1151 res = (stack_star_glob_match(stack, top, "==") == 0);
1152 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1153 estack_ax_v = res;
1154 estack_ax_t = REG_S64;
1155 next_pc += sizeof(struct binary_op);
1156 PO;
1157 }
1158 OP(BYTECODE_OP_NE_STAR_GLOB_STRING):
1159 {
1160 int res;
1161
1162 res = (stack_star_glob_match(stack, top, "!=") != 0);
1163 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1164 estack_ax_v = res;
1165 estack_ax_t = REG_S64;
1166 next_pc += sizeof(struct binary_op);
1167 PO;
1168 }
1169
1170 OP(BYTECODE_OP_EQ_S64):
1171 {
1172 int res;
1173
1174 res = (estack_bx_v == estack_ax_v);
1175 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1176 estack_ax_v = res;
1177 estack_ax_t = REG_S64;
1178 next_pc += sizeof(struct binary_op);
1179 PO;
1180 }
1181 OP(BYTECODE_OP_NE_S64):
1182 {
1183 int res;
1184
1185 res = (estack_bx_v != estack_ax_v);
1186 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1187 estack_ax_v = res;
1188 estack_ax_t = REG_S64;
1189 next_pc += sizeof(struct binary_op);
1190 PO;
1191 }
1192 OP(BYTECODE_OP_GT_S64):
1193 {
1194 int res;
1195
1196 res = (estack_bx_v > estack_ax_v);
1197 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1198 estack_ax_v = res;
1199 estack_ax_t = REG_S64;
1200 next_pc += sizeof(struct binary_op);
1201 PO;
1202 }
1203 OP(BYTECODE_OP_LT_S64):
1204 {
1205 int res;
1206
1207 res = (estack_bx_v < estack_ax_v);
1208 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1209 estack_ax_v = res;
1210 estack_ax_t = REG_S64;
1211 next_pc += sizeof(struct binary_op);
1212 PO;
1213 }
1214 OP(BYTECODE_OP_GE_S64):
1215 {
1216 int res;
1217
1218 res = (estack_bx_v >= estack_ax_v);
1219 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1220 estack_ax_v = res;
1221 estack_ax_t = REG_S64;
1222 next_pc += sizeof(struct binary_op);
1223 PO;
1224 }
1225 OP(BYTECODE_OP_LE_S64):
1226 {
1227 int res;
1228
1229 res = (estack_bx_v <= estack_ax_v);
1230 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1231 estack_ax_v = res;
1232 estack_ax_t = REG_S64;
1233 next_pc += sizeof(struct binary_op);
1234 PO;
1235 }
1236
1237 OP(BYTECODE_OP_EQ_DOUBLE):
1238 OP(BYTECODE_OP_NE_DOUBLE):
1239 OP(BYTECODE_OP_GT_DOUBLE):
1240 OP(BYTECODE_OP_LT_DOUBLE):
1241 OP(BYTECODE_OP_GE_DOUBLE):
1242 OP(BYTECODE_OP_LE_DOUBLE):
1243 {
1244 BUG_ON(1);
1245 PO;
1246 }
1247
1248 /* Mixed S64-double binary comparators */
1249 OP(BYTECODE_OP_EQ_DOUBLE_S64):
1250 OP(BYTECODE_OP_NE_DOUBLE_S64):
1251 OP(BYTECODE_OP_GT_DOUBLE_S64):
1252 OP(BYTECODE_OP_LT_DOUBLE_S64):
1253 OP(BYTECODE_OP_GE_DOUBLE_S64):
1254 OP(BYTECODE_OP_LE_DOUBLE_S64):
1255 OP(BYTECODE_OP_EQ_S64_DOUBLE):
1256 OP(BYTECODE_OP_NE_S64_DOUBLE):
1257 OP(BYTECODE_OP_GT_S64_DOUBLE):
1258 OP(BYTECODE_OP_LT_S64_DOUBLE):
1259 OP(BYTECODE_OP_GE_S64_DOUBLE):
1260 OP(BYTECODE_OP_LE_S64_DOUBLE):
1261 {
1262 BUG_ON(1);
1263 PO;
1264 }
1265 OP(BYTECODE_OP_BIT_RSHIFT):
1266 {
1267 int64_t res;
1268
1269 if (!IS_INTEGER_REGISTER(estack_ax_t) || !IS_INTEGER_REGISTER(estack_bx_t)) {
1270 ret = -EINVAL;
1271 goto end;
1272 }
1273
1274 /* Catch undefined behavior. */
1275 if (unlikely(estack_ax_v < 0 || estack_ax_v >= 64)) {
1276 ret = -EINVAL;
1277 goto end;
1278 }
1279 res = ((uint64_t) estack_bx_v >> (uint32_t) estack_ax_v);
1280 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1281 estack_ax_v = res;
1282 estack_ax_t = REG_U64;
1283 next_pc += sizeof(struct binary_op);
1284 PO;
1285 }
1286 OP(BYTECODE_OP_BIT_LSHIFT):
1287 {
1288 int64_t res;
1289
1290 if (!IS_INTEGER_REGISTER(estack_ax_t) || !IS_INTEGER_REGISTER(estack_bx_t)) {
1291 ret = -EINVAL;
1292 goto end;
1293 }
1294
1295 /* Catch undefined behavior. */
1296 if (unlikely(estack_ax_v < 0 || estack_ax_v >= 64)) {
1297 ret = -EINVAL;
1298 goto end;
1299 }
1300 res = ((uint64_t) estack_bx_v << (uint32_t) estack_ax_v);
1301 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1302 estack_ax_v = res;
1303 estack_ax_t = REG_U64;
1304 next_pc += sizeof(struct binary_op);
1305 PO;
1306 }
1307 OP(BYTECODE_OP_BIT_AND):
1308 {
1309 int64_t res;
1310
1311 if (!IS_INTEGER_REGISTER(estack_ax_t) || !IS_INTEGER_REGISTER(estack_bx_t)) {
1312 ret = -EINVAL;
1313 goto end;
1314 }
1315
1316 res = ((uint64_t) estack_bx_v & (uint64_t) estack_ax_v);
1317 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1318 estack_ax_v = res;
1319 estack_ax_t = REG_U64;
1320 next_pc += sizeof(struct binary_op);
1321 PO;
1322 }
1323 OP(BYTECODE_OP_BIT_OR):
1324 {
1325 int64_t res;
1326
1327 if (!IS_INTEGER_REGISTER(estack_ax_t) || !IS_INTEGER_REGISTER(estack_bx_t)) {
1328 ret = -EINVAL;
1329 goto end;
1330 }
1331
1332 res = ((uint64_t) estack_bx_v | (uint64_t) estack_ax_v);
1333 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1334 estack_ax_v = res;
1335 estack_ax_t = REG_U64;
1336 next_pc += sizeof(struct binary_op);
1337 PO;
1338 }
1339 OP(BYTECODE_OP_BIT_XOR):
1340 {
1341 int64_t res;
1342
1343 if (!IS_INTEGER_REGISTER(estack_ax_t) || !IS_INTEGER_REGISTER(estack_bx_t)) {
1344 ret = -EINVAL;
1345 goto end;
1346 }
1347
1348 res = ((uint64_t) estack_bx_v ^ (uint64_t) estack_ax_v);
1349 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1350 estack_ax_v = res;
1351 estack_ax_t = REG_U64;
1352 next_pc += sizeof(struct binary_op);
1353 PO;
1354 }
1355
1356 /* unary */
1357 OP(BYTECODE_OP_UNARY_PLUS):
1358 OP(BYTECODE_OP_UNARY_MINUS):
1359 OP(BYTECODE_OP_UNARY_NOT):
1360 printk(KERN_WARNING "LTTng: bytecode: unsupported non-specialized bytecode op %u\n",
1361 (unsigned int) *(bytecode_opcode_t *) pc);
1362 ret = -EINVAL;
1363 goto end;
1364
1365
1366 OP(BYTECODE_OP_UNARY_BIT_NOT):
1367 {
1368 estack_ax_v = ~(uint64_t) estack_ax_v;
1369 estack_ax_t = REG_S64;
1370 next_pc += sizeof(struct unary_op);
1371 PO;
1372 }
1373
1374 OP(BYTECODE_OP_UNARY_PLUS_S64):
1375 {
1376 next_pc += sizeof(struct unary_op);
1377 PO;
1378 }
1379 OP(BYTECODE_OP_UNARY_MINUS_S64):
1380 {
1381 estack_ax_v = -estack_ax_v;
1382 estack_ax_t = REG_S64;
1383 next_pc += sizeof(struct unary_op);
1384 PO;
1385 }
1386 OP(BYTECODE_OP_UNARY_PLUS_DOUBLE):
1387 OP(BYTECODE_OP_UNARY_MINUS_DOUBLE):
1388 {
1389 BUG_ON(1);
1390 PO;
1391 }
1392 OP(BYTECODE_OP_UNARY_NOT_S64):
1393 {
1394 estack_ax_v = !estack_ax_v;
1395 estack_ax_t = REG_S64;
1396 next_pc += sizeof(struct unary_op);
1397 PO;
1398 }
1399 OP(BYTECODE_OP_UNARY_NOT_DOUBLE):
1400 {
1401 BUG_ON(1);
1402 PO;
1403 }
1404
1405 /* logical */
1406 OP(BYTECODE_OP_AND):
1407 {
1408 struct logical_op *insn = (struct logical_op *) pc;
1409
1410 /* If AX is 0, skip and evaluate to 0 */
1411 if (unlikely(estack_ax_v == 0)) {
1412 dbg_printk("Jumping to bytecode offset %u\n",
1413 (unsigned int) insn->skip_offset);
1414 next_pc = start_pc + insn->skip_offset;
1415 } else {
1416 /* Pop 1 when jump not taken */
1417 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1418 next_pc += sizeof(struct logical_op);
1419 }
1420 PO;
1421 }
1422 OP(BYTECODE_OP_OR):
1423 {
1424 struct logical_op *insn = (struct logical_op *) pc;
1425
1426 /* If AX is nonzero, skip and evaluate to 1 */
1427
1428 if (unlikely(estack_ax_v != 0)) {
1429 estack_ax_v = 1;
1430 dbg_printk("Jumping to bytecode offset %u\n",
1431 (unsigned int) insn->skip_offset);
1432 next_pc = start_pc + insn->skip_offset;
1433 } else {
1434 /* Pop 1 when jump not taken */
1435 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1436 next_pc += sizeof(struct logical_op);
1437 }
1438 PO;
1439 }
1440
1441
1442 /* load field ref */
1443 OP(BYTECODE_OP_LOAD_FIELD_REF_STRING):
1444 {
1445 struct load_op *insn = (struct load_op *) pc;
1446 struct field_ref *ref = (struct field_ref *) insn->data;
1447
1448 dbg_printk("load field ref offset %u type string\n",
1449 ref->offset);
1450 estack_push(stack, top, ax, bx, ax_t, bx_t);
1451 estack_ax(stack, top)->u.s.str =
1452 *(const char * const *) &interpreter_stack_data[ref->offset];
1453 if (unlikely(!estack_ax(stack, top)->u.s.str)) {
1454 dbg_printk("Bytecode warning: loading a NULL string.\n");
1455 ret = -EINVAL;
1456 goto end;
1457 }
1458 estack_ax(stack, top)->u.s.seq_len = LTTNG_SIZE_MAX;
1459 estack_ax(stack, top)->u.s.literal_type =
1460 ESTACK_STRING_LITERAL_TYPE_NONE;
1461 estack_ax(stack, top)->u.s.user = 0;
1462 estack_ax(stack, top)->type = REG_STRING;
1463 dbg_printk("ref load string %s\n", estack_ax(stack, top)->u.s.str);
1464 next_pc += sizeof(struct load_op) + sizeof(struct field_ref);
1465 PO;
1466 }
1467
1468 OP(BYTECODE_OP_LOAD_FIELD_REF_SEQUENCE):
1469 {
1470 struct load_op *insn = (struct load_op *) pc;
1471 struct field_ref *ref = (struct field_ref *) insn->data;
1472
1473 dbg_printk("load field ref offset %u type sequence\n",
1474 ref->offset);
1475 estack_push(stack, top, ax, bx, ax_t, bx_t);
1476 estack_ax(stack, top)->u.s.seq_len =
1477 *(unsigned long *) &interpreter_stack_data[ref->offset];
1478 estack_ax(stack, top)->u.s.str =
1479 *(const char **) (&interpreter_stack_data[ref->offset
1480 + sizeof(unsigned long)]);
1481 if (unlikely(!estack_ax(stack, top)->u.s.str)) {
1482 dbg_printk("Bytecode warning: loading a NULL sequence.\n");
1483 ret = -EINVAL;
1484 goto end;
1485 }
1486 estack_ax(stack, top)->u.s.literal_type =
1487 ESTACK_STRING_LITERAL_TYPE_NONE;
1488 estack_ax(stack, top)->u.s.user = 0;
1489 next_pc += sizeof(struct load_op) + sizeof(struct field_ref);
1490 PO;
1491 }
1492
1493 OP(BYTECODE_OP_LOAD_FIELD_REF_S64):
1494 {
1495 struct load_op *insn = (struct load_op *) pc;
1496 struct field_ref *ref = (struct field_ref *) insn->data;
1497
1498 dbg_printk("load field ref offset %u type s64\n",
1499 ref->offset);
1500 estack_push(stack, top, ax, bx, ax_t, bx_t);
1501 estack_ax_v =
1502 ((struct literal_numeric *) &interpreter_stack_data[ref->offset])->v;
1503 estack_ax_t = REG_S64;
1504 dbg_printk("ref load s64 %lld\n",
1505 (long long) estack_ax_v);
1506 next_pc += sizeof(struct load_op) + sizeof(struct field_ref);
1507 PO;
1508 }
1509
1510 OP(BYTECODE_OP_LOAD_FIELD_REF_DOUBLE):
1511 {
1512 BUG_ON(1);
1513 PO;
1514 }
1515
1516 /* load from immediate operand */
1517 OP(BYTECODE_OP_LOAD_STRING):
1518 {
1519 struct load_op *insn = (struct load_op *) pc;
1520
1521 dbg_printk("load string %s\n", insn->data);
1522 estack_push(stack, top, ax, bx, ax_t, bx_t);
1523 estack_ax(stack, top)->u.s.str = insn->data;
1524 estack_ax(stack, top)->u.s.seq_len = LTTNG_SIZE_MAX;
1525 estack_ax(stack, top)->u.s.literal_type =
1526 ESTACK_STRING_LITERAL_TYPE_PLAIN;
1527 estack_ax(stack, top)->u.s.user = 0;
1528 next_pc += sizeof(struct load_op) + strlen(insn->data) + 1;
1529 PO;
1530 }
1531
1532 OP(BYTECODE_OP_LOAD_STAR_GLOB_STRING):
1533 {
1534 struct load_op *insn = (struct load_op *) pc;
1535
1536 dbg_printk("load globbing pattern %s\n", insn->data);
1537 estack_push(stack, top, ax, bx, ax_t, bx_t);
1538 estack_ax(stack, top)->u.s.str = insn->data;
1539 estack_ax(stack, top)->u.s.seq_len = LTTNG_SIZE_MAX;
1540 estack_ax(stack, top)->u.s.literal_type =
1541 ESTACK_STRING_LITERAL_TYPE_STAR_GLOB;
1542 estack_ax(stack, top)->u.s.user = 0;
1543 next_pc += sizeof(struct load_op) + strlen(insn->data) + 1;
1544 PO;
1545 }
1546
1547 OP(BYTECODE_OP_LOAD_S64):
1548 {
1549 struct load_op *insn = (struct load_op *) pc;
1550
1551 estack_push(stack, top, ax, bx, ax_t, bx_t);
1552 estack_ax_v = ((struct literal_numeric *) insn->data)->v;
1553 estack_ax_t = REG_S64;
1554 dbg_printk("load s64 %lld\n",
1555 (long long) estack_ax_v);
1556 next_pc += sizeof(struct load_op)
1557 + sizeof(struct literal_numeric);
1558 PO;
1559 }
1560
1561 OP(BYTECODE_OP_LOAD_DOUBLE):
1562 {
1563 BUG_ON(1);
1564 PO;
1565 }
1566
1567 /* cast */
1568 OP(BYTECODE_OP_CAST_TO_S64):
1569 printk(KERN_WARNING "LTTng: bytecode: unsupported non-specialized bytecode op %u\n",
1570 (unsigned int) *(bytecode_opcode_t *) pc);
1571 ret = -EINVAL;
1572 goto end;
1573
1574 OP(BYTECODE_OP_CAST_DOUBLE_TO_S64):
1575 {
1576 BUG_ON(1);
1577 PO;
1578 }
1579
1580 OP(BYTECODE_OP_CAST_NOP):
1581 {
1582 next_pc += sizeof(struct cast_op);
1583 PO;
1584 }
1585
1586 /* get context ref */
1587 OP(BYTECODE_OP_GET_CONTEXT_REF_STRING):
1588 {
1589 struct load_op *insn = (struct load_op *) pc;
1590 struct field_ref *ref = (struct field_ref *) insn->data;
1591 struct lttng_kernel_ctx_field *ctx_field;
1592 struct lttng_ctx_value v;
1593
1594 dbg_printk("get context ref offset %u type string\n",
1595 ref->offset);
1596 ctx_field = &lttng_static_ctx->fields[ref->offset];
1597 ctx_field->get_value(ctx_field->priv, lttng_probe_ctx, &v);
1598 estack_push(stack, top, ax, bx, ax_t, bx_t);
1599 estack_ax(stack, top)->u.s.str = v.u.str;
1600 if (unlikely(!estack_ax(stack, top)->u.s.str)) {
1601 dbg_printk("Bytecode warning: loading a NULL string.\n");
1602 ret = -EINVAL;
1603 goto end;
1604 }
1605 estack_ax(stack, top)->u.s.seq_len = LTTNG_SIZE_MAX;
1606 estack_ax(stack, top)->u.s.literal_type =
1607 ESTACK_STRING_LITERAL_TYPE_NONE;
1608 estack_ax(stack, top)->u.s.user = 0;
1609 estack_ax(stack, top)->type = REG_STRING;
1610 dbg_printk("ref get context string %s\n", estack_ax(stack, top)->u.s.str);
1611 next_pc += sizeof(struct load_op) + sizeof(struct field_ref);
1612 PO;
1613 }
1614
1615 OP(BYTECODE_OP_GET_CONTEXT_REF_S64):
1616 {
1617 struct load_op *insn = (struct load_op *) pc;
1618 struct field_ref *ref = (struct field_ref *) insn->data;
1619 struct lttng_kernel_ctx_field *ctx_field;
1620 struct lttng_ctx_value v;
1621
1622 dbg_printk("get context ref offset %u type s64\n",
1623 ref->offset);
1624 ctx_field = &lttng_static_ctx->fields[ref->offset];
1625 ctx_field->get_value(ctx_field->priv, lttng_probe_ctx, &v);
1626 estack_push(stack, top, ax, bx, ax_t, bx_t);
1627 estack_ax_v = v.u.s64;
1628 estack_ax_t = REG_S64;
1629 dbg_printk("ref get context s64 %lld\n",
1630 (long long) estack_ax_v);
1631 next_pc += sizeof(struct load_op) + sizeof(struct field_ref);
1632 PO;
1633 }
1634
1635 OP(BYTECODE_OP_GET_CONTEXT_REF_DOUBLE):
1636 {
1637 BUG_ON(1);
1638 PO;
1639 }
1640
1641 /* load userspace field ref */
1642 OP(BYTECODE_OP_LOAD_FIELD_REF_USER_STRING):
1643 {
1644 struct load_op *insn = (struct load_op *) pc;
1645 struct field_ref *ref = (struct field_ref *) insn->data;
1646
1647 dbg_printk("load field ref offset %u type user string\n",
1648 ref->offset);
1649 estack_push(stack, top, ax, bx, ax_t, bx_t);
1650 estack_ax(stack, top)->u.s.user_str =
1651 *(const char * const *) &interpreter_stack_data[ref->offset];
1652 if (unlikely(!estack_ax(stack, top)->u.s.user_str)) {
1653 dbg_printk("Bytecode warning: loading a NULL string.\n");
1654 ret = -EINVAL;
1655 goto end;
1656 }
1657 estack_ax(stack, top)->u.s.seq_len = LTTNG_SIZE_MAX;
1658 estack_ax(stack, top)->u.s.literal_type =
1659 ESTACK_STRING_LITERAL_TYPE_NONE;
1660 estack_ax(stack, top)->u.s.user = 1;
1661 estack_ax(stack, top)->type = REG_STRING;
1662 dbg_load_ref_user_str_printk(estack_ax(stack, top));
1663 next_pc += sizeof(struct load_op) + sizeof(struct field_ref);
1664 PO;
1665 }
1666
1667 OP(BYTECODE_OP_LOAD_FIELD_REF_USER_SEQUENCE):
1668 {
1669 struct load_op *insn = (struct load_op *) pc;
1670 struct field_ref *ref = (struct field_ref *) insn->data;
1671
1672 dbg_printk("load field ref offset %u type user sequence\n",
1673 ref->offset);
1674 estack_push(stack, top, ax, bx, ax_t, bx_t);
1675 estack_ax(stack, top)->u.s.seq_len =
1676 *(unsigned long *) &interpreter_stack_data[ref->offset];
1677 estack_ax(stack, top)->u.s.user_str =
1678 *(const char **) (&interpreter_stack_data[ref->offset
1679 + sizeof(unsigned long)]);
1680 if (unlikely(!estack_ax(stack, top)->u.s.user_str)) {
1681 dbg_printk("Bytecode warning: loading a NULL sequence.\n");
1682 ret = -EINVAL;
1683 goto end;
1684 }
1685 estack_ax(stack, top)->u.s.literal_type =
1686 ESTACK_STRING_LITERAL_TYPE_NONE;
1687 estack_ax(stack, top)->u.s.user = 1;
1688 next_pc += sizeof(struct load_op) + sizeof(struct field_ref);
1689 PO;
1690 }
1691
1692 OP(BYTECODE_OP_GET_CONTEXT_ROOT):
1693 {
1694 dbg_printk("op get context root\n");
1695 estack_push(stack, top, ax, bx, ax_t, bx_t);
1696 estack_ax(stack, top)->u.ptr.type = LOAD_ROOT_CONTEXT;
1697 /* "field" only needed for variants. */
1698 estack_ax(stack, top)->u.ptr.field = NULL;
1699 estack_ax(stack, top)->type = REG_PTR;
1700 next_pc += sizeof(struct load_op);
1701 PO;
1702 }
1703
1704 OP(BYTECODE_OP_GET_APP_CONTEXT_ROOT):
1705 {
1706 BUG_ON(1);
1707 PO;
1708 }
1709
1710 OP(BYTECODE_OP_GET_PAYLOAD_ROOT):
1711 {
1712 dbg_printk("op get app payload root\n");
1713 estack_push(stack, top, ax, bx, ax_t, bx_t);
1714 estack_ax(stack, top)->u.ptr.type = LOAD_ROOT_PAYLOAD;
1715 estack_ax(stack, top)->u.ptr.ptr = interpreter_stack_data;
1716 /* "field" only needed for variants. */
1717 estack_ax(stack, top)->u.ptr.field = NULL;
1718 estack_ax(stack, top)->type = REG_PTR;
1719 next_pc += sizeof(struct load_op);
1720 PO;
1721 }
1722
1723 OP(BYTECODE_OP_GET_SYMBOL):
1724 {
1725 dbg_printk("op get symbol\n");
1726 switch (estack_ax(stack, top)->u.ptr.type) {
1727 case LOAD_OBJECT:
1728 printk(KERN_WARNING "LTTng: bytecode: Nested fields not implemented yet.\n");
1729 ret = -EINVAL;
1730 goto end;
1731 case LOAD_ROOT_CONTEXT:
1732 case LOAD_ROOT_APP_CONTEXT:
1733 case LOAD_ROOT_PAYLOAD:
1734 /*
1735 * symbol lookup is performed by
1736 * specialization.
1737 */
1738 ret = -EINVAL;
1739 goto end;
1740 }
1741 next_pc += sizeof(struct load_op) + sizeof(struct get_symbol);
1742 PO;
1743 }
1744
1745 OP(BYTECODE_OP_GET_SYMBOL_FIELD):
1746 {
1747 /*
1748 * Used for first variant encountered in a
1749 * traversal. Variants are not implemented yet.
1750 */
1751 ret = -EINVAL;
1752 goto end;
1753 }
1754
1755 OP(BYTECODE_OP_GET_INDEX_U16):
1756 {
1757 struct load_op *insn = (struct load_op *) pc;
1758 struct get_index_u16 *index = (struct get_index_u16 *) insn->data;
1759
1760 dbg_printk("op get index u16\n");
1761 ret = dynamic_get_index(lttng_probe_ctx, bytecode, index->index, estack_ax(stack, top));
1762 if (ret)
1763 goto end;
1764 estack_ax_v = estack_ax(stack, top)->u.v;
1765 estack_ax_t = estack_ax(stack, top)->type;
1766 next_pc += sizeof(struct load_op) + sizeof(struct get_index_u16);
1767 PO;
1768 }
1769
1770 OP(BYTECODE_OP_GET_INDEX_U64):
1771 {
1772 struct load_op *insn = (struct load_op *) pc;
1773 struct get_index_u64 *index = (struct get_index_u64 *) insn->data;
1774
1775 dbg_printk("op get index u64\n");
1776 ret = dynamic_get_index(lttng_probe_ctx, bytecode, index->index, estack_ax(stack, top));
1777 if (ret)
1778 goto end;
1779 estack_ax_v = estack_ax(stack, top)->u.v;
1780 estack_ax_t = estack_ax(stack, top)->type;
1781 next_pc += sizeof(struct load_op) + sizeof(struct get_index_u64);
1782 PO;
1783 }
1784
1785 OP(BYTECODE_OP_LOAD_FIELD):
1786 {
1787 dbg_printk("op load field\n");
1788 ret = dynamic_load_field(estack_ax(stack, top));
1789 if (ret)
1790 goto end;
1791 estack_ax_v = estack_ax(stack, top)->u.v;
1792 estack_ax_t = estack_ax(stack, top)->type;
1793 next_pc += sizeof(struct load_op);
1794 PO;
1795 }
1796
1797 OP(BYTECODE_OP_LOAD_FIELD_S8):
1798 {
1799 dbg_printk("op load field s8\n");
1800
1801 estack_ax_v = *(int8_t *) estack_ax(stack, top)->u.ptr.ptr;
1802 estack_ax_t = REG_S64;
1803 next_pc += sizeof(struct load_op);
1804 PO;
1805 }
1806 OP(BYTECODE_OP_LOAD_FIELD_S16):
1807 {
1808 dbg_printk("op load field s16\n");
1809
1810 estack_ax_v = *(int16_t *) estack_ax(stack, top)->u.ptr.ptr;
1811 estack_ax_t = REG_S64;
1812 next_pc += sizeof(struct load_op);
1813 PO;
1814 }
1815 OP(BYTECODE_OP_LOAD_FIELD_S32):
1816 {
1817 dbg_printk("op load field s32\n");
1818
1819 estack_ax_v = *(int32_t *) estack_ax(stack, top)->u.ptr.ptr;
1820 estack_ax_t = REG_S64;
1821 next_pc += sizeof(struct load_op);
1822 PO;
1823 }
1824 OP(BYTECODE_OP_LOAD_FIELD_S64):
1825 {
1826 dbg_printk("op load field s64\n");
1827
1828 estack_ax_v = *(int64_t *) estack_ax(stack, top)->u.ptr.ptr;
1829 estack_ax_t = REG_S64;
1830 next_pc += sizeof(struct load_op);
1831 PO;
1832 }
1833 OP(BYTECODE_OP_LOAD_FIELD_U8):
1834 {
1835 dbg_printk("op load field u8\n");
1836
1837 estack_ax_v = *(uint8_t *) estack_ax(stack, top)->u.ptr.ptr;
1838 estack_ax_t = REG_S64;
1839 next_pc += sizeof(struct load_op);
1840 PO;
1841 }
1842 OP(BYTECODE_OP_LOAD_FIELD_U16):
1843 {
1844 dbg_printk("op load field u16\n");
1845
1846 estack_ax_v = *(uint16_t *) estack_ax(stack, top)->u.ptr.ptr;
1847 estack_ax_t = REG_S64;
1848 next_pc += sizeof(struct load_op);
1849 PO;
1850 }
1851 OP(BYTECODE_OP_LOAD_FIELD_U32):
1852 {
1853 dbg_printk("op load field u32\n");
1854
1855 estack_ax_v = *(uint32_t *) estack_ax(stack, top)->u.ptr.ptr;
1856 estack_ax_t = REG_S64;
1857 next_pc += sizeof(struct load_op);
1858 PO;
1859 }
1860 OP(BYTECODE_OP_LOAD_FIELD_U64):
1861 {
1862 dbg_printk("op load field u64\n");
1863
1864 estack_ax_v = *(uint64_t *) estack_ax(stack, top)->u.ptr.ptr;
1865 estack_ax_t = REG_S64;
1866 next_pc += sizeof(struct load_op);
1867 PO;
1868 }
1869 OP(BYTECODE_OP_LOAD_FIELD_DOUBLE):
1870 {
1871 ret = -EINVAL;
1872 goto end;
1873 }
1874
1875 OP(BYTECODE_OP_LOAD_FIELD_STRING):
1876 {
1877 const char *str;
1878
1879 dbg_printk("op load field string\n");
1880 str = (const char *) estack_ax(stack, top)->u.ptr.ptr;
1881 estack_ax(stack, top)->u.s.str = str;
1882 if (unlikely(!estack_ax(stack, top)->u.s.str)) {
1883 dbg_printk("Bytecode warning: loading a NULL string.\n");
1884 ret = -EINVAL;
1885 goto end;
1886 }
1887 estack_ax(stack, top)->u.s.seq_len = LTTNG_SIZE_MAX;
1888 estack_ax(stack, top)->u.s.literal_type =
1889 ESTACK_STRING_LITERAL_TYPE_NONE;
1890 estack_ax(stack, top)->type = REG_STRING;
1891 estack_ax(stack, top)->u.s.user = 0;
1892 next_pc += sizeof(struct load_op);
1893 PO;
1894 }
1895
1896 OP(BYTECODE_OP_LOAD_FIELD_SEQUENCE):
1897 {
1898 const char *ptr;
1899
1900 dbg_printk("op load field string sequence\n");
1901 ptr = estack_ax(stack, top)->u.ptr.ptr;
1902 estack_ax(stack, top)->u.s.seq_len = *(unsigned long *) ptr;
1903 estack_ax(stack, top)->u.s.str = *(const char **) (ptr + sizeof(unsigned long));
1904 if (unlikely(!estack_ax(stack, top)->u.s.str)) {
1905 dbg_printk("Bytecode warning: loading a NULL sequence.\n");
1906 ret = -EINVAL;
1907 goto end;
1908 }
1909 estack_ax(stack, top)->u.s.literal_type =
1910 ESTACK_STRING_LITERAL_TYPE_NONE;
1911 estack_ax(stack, top)->type = REG_STRING;
1912 estack_ax(stack, top)->u.s.user = 0;
1913 next_pc += sizeof(struct load_op);
1914 PO;
1915 }
1916
1917 END_OP
1918 end:
1919 /* No need to prepare output if an error occurred. */
1920 if (ret)
1921 return LTTNG_KERNEL_BYTECODE_INTERPRETER_ERROR;
1922
1923 /* Prepare output. */
1924 switch (kernel_bytecode->type) {
1925 case LTTNG_KERNEL_BYTECODE_TYPE_FILTER:
1926 {
1927 struct lttng_kernel_bytecode_filter_ctx *filter_ctx =
1928 (struct lttng_kernel_bytecode_filter_ctx *) caller_ctx;
1929 if (retval)
1930 filter_ctx->result = LTTNG_KERNEL_BYTECODE_FILTER_ACCEPT;
1931 else
1932 filter_ctx->result = LTTNG_KERNEL_BYTECODE_FILTER_REJECT;
1933 break;
1934 }
1935 case LTTNG_KERNEL_BYTECODE_TYPE_CAPTURE:
1936 ret = lttng_bytecode_interpret_format_output(estack_ax(stack, top),
1937 (struct lttng_interpreter_output *) caller_ctx);
1938 break;
1939 default:
1940 ret = -EINVAL;
1941 break;
1942 }
1943 if (ret)
1944 return LTTNG_KERNEL_BYTECODE_INTERPRETER_ERROR;
1945 else
1946 return LTTNG_KERNEL_BYTECODE_INTERPRETER_OK;
1947 }
1948 LTTNG_STACK_FRAME_NON_STANDARD(lttng_bytecode_interpret);
1949
1950 /*
1951 * Return LTTNG_KERNEL_EVENT_FILTER_ACCEPT or LTTNG_KERNEL_EVENT_FILTER_REJECT.
1952 */
1953 int lttng_kernel_interpret_event_filter(const struct lttng_kernel_event_common *event,
1954 const char *interpreter_stack_data,
1955 struct lttng_kernel_probe_ctx *probe_ctx,
1956 void *event_filter_ctx __attribute__((unused)))
1957 {
1958 struct lttng_kernel_bytecode_runtime *filter_bc_runtime;
1959 struct list_head *filter_bytecode_runtime_head = &event->priv->filter_bytecode_runtime_head;
1960 struct lttng_kernel_bytecode_filter_ctx bytecode_filter_ctx;
1961 bool filter_record = false;
1962
1963 list_for_each_entry_rcu(filter_bc_runtime, filter_bytecode_runtime_head, node) {
1964 if (likely(filter_bc_runtime->interpreter_func(filter_bc_runtime,
1965 interpreter_stack_data, probe_ctx, &bytecode_filter_ctx) == LTTNG_KERNEL_BYTECODE_INTERPRETER_OK)) {
1966 if (unlikely(bytecode_filter_ctx.result == LTTNG_KERNEL_BYTECODE_FILTER_ACCEPT)) {
1967 filter_record = true;
1968 break;
1969 }
1970 }
1971 }
1972 if (filter_record)
1973 return LTTNG_KERNEL_EVENT_FILTER_ACCEPT;
1974 else
1975 return LTTNG_KERNEL_EVENT_FILTER_REJECT;
1976 }
1977
1978 #undef START_OP
1979 #undef OP
1980 #undef PO
1981 #undef END_OP
This page took 0.070247 seconds and 4 git commands to generate.