Combine common recorder/notifier functions to lttng_free_event_filter_runtime
[lttng-ust.git] / liblttng-ust / lttng-bytecode-interpreter.c
1 /*
2 * SPDX-License-Identifier: MIT
3 *
4 * Copyright (C) 2010-2016 Mathieu Desnoyers <mathieu.desnoyers@efficios.com>
5 *
6 * LTTng UST bytecode interpreter.
7 */
8
9 #define _LGPL_SOURCE
10 #include <stddef.h>
11 #include <stdint.h>
12
13 #include <lttng/urcu/pointer.h>
14 #include <lttng/ust-endian.h>
15 #include <lttng/ust-events.h>
16 #include "ust-events-internal.h"
17
18 #include "lttng-bytecode.h"
19 #include "string-utils.h"
20
21
22 /*
23 * -1: wildcard found.
24 * -2: unknown escape char.
25 * 0: normal char.
26 */
27
28 static
29 int parse_char(const char **p)
30 {
31 switch (**p) {
32 case '\\':
33 (*p)++;
34 switch (**p) {
35 case '\\':
36 case '*':
37 return 0;
38 default:
39 return -2;
40 }
41 case '*':
42 return -1;
43 default:
44 return 0;
45 }
46 }
47
48 /*
49 * Returns SIZE_MAX if the string is null-terminated, or the number of
50 * characters if not.
51 */
52 static
53 size_t get_str_or_seq_len(const struct estack_entry *entry)
54 {
55 return entry->u.s.seq_len;
56 }
57
58 static
59 int stack_star_glob_match(struct estack *stack, int top, const char *cmp_type)
60 {
61 const char *pattern;
62 const char *candidate;
63 size_t pattern_len;
64 size_t candidate_len;
65
66 /* Find out which side is the pattern vs. the candidate. */
67 if (estack_ax(stack, top)->u.s.literal_type == ESTACK_STRING_LITERAL_TYPE_STAR_GLOB) {
68 pattern = estack_ax(stack, top)->u.s.str;
69 pattern_len = get_str_or_seq_len(estack_ax(stack, top));
70 candidate = estack_bx(stack, top)->u.s.str;
71 candidate_len = get_str_or_seq_len(estack_bx(stack, top));
72 } else {
73 pattern = estack_bx(stack, top)->u.s.str;
74 pattern_len = get_str_or_seq_len(estack_bx(stack, top));
75 candidate = estack_ax(stack, top)->u.s.str;
76 candidate_len = get_str_or_seq_len(estack_ax(stack, top));
77 }
78
79 /* Perform the match. Returns 0 when the result is true. */
80 return !strutils_star_glob_match(pattern, pattern_len, candidate,
81 candidate_len);
82 }
83
84 static
85 int stack_strcmp(struct estack *stack, int top, const char *cmp_type)
86 {
87 const char *p = estack_bx(stack, top)->u.s.str, *q = estack_ax(stack, top)->u.s.str;
88 int ret;
89 int diff;
90
91 for (;;) {
92 int escaped_r0 = 0;
93
94 if (unlikely(p - estack_bx(stack, top)->u.s.str >= estack_bx(stack, top)->u.s.seq_len || *p == '\0')) {
95 if (q - estack_ax(stack, top)->u.s.str >= estack_ax(stack, top)->u.s.seq_len || *q == '\0') {
96 return 0;
97 } else {
98 if (estack_ax(stack, top)->u.s.literal_type ==
99 ESTACK_STRING_LITERAL_TYPE_PLAIN) {
100 ret = parse_char(&q);
101 if (ret == -1)
102 return 0;
103 }
104 return -1;
105 }
106 }
107 if (unlikely(q - estack_ax(stack, top)->u.s.str >= estack_ax(stack, top)->u.s.seq_len || *q == '\0')) {
108 if (estack_bx(stack, top)->u.s.literal_type ==
109 ESTACK_STRING_LITERAL_TYPE_PLAIN) {
110 ret = parse_char(&p);
111 if (ret == -1)
112 return 0;
113 }
114 return 1;
115 }
116 if (estack_bx(stack, top)->u.s.literal_type ==
117 ESTACK_STRING_LITERAL_TYPE_PLAIN) {
118 ret = parse_char(&p);
119 if (ret == -1) {
120 return 0;
121 } else if (ret == -2) {
122 escaped_r0 = 1;
123 }
124 /* else compare both char */
125 }
126 if (estack_ax(stack, top)->u.s.literal_type ==
127 ESTACK_STRING_LITERAL_TYPE_PLAIN) {
128 ret = parse_char(&q);
129 if (ret == -1) {
130 return 0;
131 } else if (ret == -2) {
132 if (!escaped_r0)
133 return -1;
134 } else {
135 if (escaped_r0)
136 return 1;
137 }
138 } else {
139 if (escaped_r0)
140 return 1;
141 }
142 diff = *p - *q;
143 if (diff != 0)
144 break;
145 p++;
146 q++;
147 }
148 return diff;
149 }
150
151 uint64_t lttng_bytecode_filter_interpret_false(void *filter_data,
152 const char *filter_stack_data)
153 {
154 return LTTNG_INTERPRETER_DISCARD;
155 }
156
157 uint64_t lttng_bytecode_capture_interpret_false(void *capture_data,
158 const char *capture_stack_data,
159 struct lttng_interpreter_output *output)
160 {
161 return LTTNG_INTERPRETER_DISCARD;
162 }
163
164 #ifdef INTERPRETER_USE_SWITCH
165
166 /*
167 * Fallback for compilers that do not support taking address of labels.
168 */
169
170 #define START_OP \
171 start_pc = &bytecode->data[0]; \
172 for (pc = next_pc = start_pc; pc - start_pc < bytecode->len; \
173 pc = next_pc) { \
174 dbg_printf("Executing op %s (%u)\n", \
175 lttng_bytecode_print_op((unsigned int) *(bytecode_opcode_t *) pc), \
176 (unsigned int) *(bytecode_opcode_t *) pc); \
177 switch (*(bytecode_opcode_t *) pc) {
178
179 #define OP(name) jump_target_##name: __attribute__((unused)); \
180 case name
181
182 #define PO break
183
184 #define END_OP } \
185 }
186
187 #define JUMP_TO(name) \
188 goto jump_target_##name
189
190 #else
191
192 /*
193 * Dispatch-table based interpreter.
194 */
195
196 #define START_OP \
197 start_pc = &bytecode->code[0]; \
198 pc = next_pc = start_pc; \
199 if (unlikely(pc - start_pc >= bytecode->len)) \
200 goto end; \
201 goto *dispatch[*(bytecode_opcode_t *) pc];
202
203 #define OP(name) \
204 LABEL_##name
205
206 #define PO \
207 pc = next_pc; \
208 goto *dispatch[*(bytecode_opcode_t *) pc];
209
210 #define END_OP
211
212 #define JUMP_TO(name) \
213 goto LABEL_##name
214
215 #endif
216
217 #define IS_INTEGER_REGISTER(reg_type) \
218 (reg_type == REG_U64 || reg_type == REG_S64)
219
220 static int context_get_index(struct lttng_ctx *ctx,
221 struct load_ptr *ptr,
222 uint32_t idx)
223 {
224
225 struct lttng_ctx_field *ctx_field;
226 struct lttng_event_field *field;
227 struct lttng_ctx_value v;
228
229 ctx_field = &ctx->fields[idx];
230 field = &ctx_field->event_field;
231 ptr->type = LOAD_OBJECT;
232 ptr->field = field;
233
234 switch (field->type.atype) {
235 case atype_integer:
236 ctx_field->get_value(ctx_field, &v);
237 if (field->type.u.integer.signedness) {
238 ptr->object_type = OBJECT_TYPE_S64;
239 ptr->u.s64 = v.u.s64;
240 ptr->ptr = &ptr->u.s64;
241 } else {
242 ptr->object_type = OBJECT_TYPE_U64;
243 ptr->u.u64 = v.u.s64; /* Cast. */
244 ptr->ptr = &ptr->u.u64;
245 }
246 break;
247 case atype_enum_nestable:
248 {
249 const struct lttng_integer_type *itype;
250
251 itype = &field->type.u.enum_nestable.container_type->u.integer;
252 ctx_field->get_value(ctx_field, &v);
253 if (itype->signedness) {
254 ptr->object_type = OBJECT_TYPE_SIGNED_ENUM;
255 ptr->u.s64 = v.u.s64;
256 ptr->ptr = &ptr->u.s64;
257 } else {
258 ptr->object_type = OBJECT_TYPE_UNSIGNED_ENUM;
259 ptr->u.u64 = v.u.s64; /* Cast. */
260 ptr->ptr = &ptr->u.u64;
261 }
262 break;
263 }
264 case atype_array_nestable:
265 if (field->type.u.array_nestable.elem_type->atype != atype_integer) {
266 ERR("Array nesting only supports integer types.");
267 return -EINVAL;
268 }
269 if (field->type.u.array_nestable.elem_type->u.integer.encoding == lttng_encode_none) {
270 ERR("Only string arrays are supported for contexts.");
271 return -EINVAL;
272 }
273 ptr->object_type = OBJECT_TYPE_STRING;
274 ctx_field->get_value(ctx_field, &v);
275 ptr->ptr = v.u.str;
276 break;
277 case atype_sequence_nestable:
278 if (field->type.u.sequence_nestable.elem_type->atype != atype_integer) {
279 ERR("Sequence nesting only supports integer types.");
280 return -EINVAL;
281 }
282 if (field->type.u.sequence_nestable.elem_type->u.integer.encoding == lttng_encode_none) {
283 ERR("Only string sequences are supported for contexts.");
284 return -EINVAL;
285 }
286 ptr->object_type = OBJECT_TYPE_STRING;
287 ctx_field->get_value(ctx_field, &v);
288 ptr->ptr = v.u.str;
289 break;
290 case atype_string:
291 ptr->object_type = OBJECT_TYPE_STRING;
292 ctx_field->get_value(ctx_field, &v);
293 ptr->ptr = v.u.str;
294 break;
295 case atype_float:
296 ptr->object_type = OBJECT_TYPE_DOUBLE;
297 ctx_field->get_value(ctx_field, &v);
298 ptr->u.d = v.u.d;
299 ptr->ptr = &ptr->u.d;
300 break;
301 case atype_dynamic:
302 ctx_field->get_value(ctx_field, &v);
303 switch (v.sel) {
304 case LTTNG_UST_DYNAMIC_TYPE_NONE:
305 return -EINVAL;
306 case LTTNG_UST_DYNAMIC_TYPE_U8:
307 case LTTNG_UST_DYNAMIC_TYPE_U16:
308 case LTTNG_UST_DYNAMIC_TYPE_U32:
309 case LTTNG_UST_DYNAMIC_TYPE_U64:
310 ptr->object_type = OBJECT_TYPE_U64;
311 ptr->u.u64 = v.u.u64;
312 ptr->ptr = &ptr->u.u64;
313 dbg_printf("context get index dynamic u64 %" PRIi64 "\n", ptr->u.u64);
314 break;
315 case LTTNG_UST_DYNAMIC_TYPE_S8:
316 case LTTNG_UST_DYNAMIC_TYPE_S16:
317 case LTTNG_UST_DYNAMIC_TYPE_S32:
318 case LTTNG_UST_DYNAMIC_TYPE_S64:
319 ptr->object_type = OBJECT_TYPE_S64;
320 ptr->u.s64 = v.u.s64;
321 ptr->ptr = &ptr->u.s64;
322 dbg_printf("context get index dynamic s64 %" PRIi64 "\n", ptr->u.s64);
323 break;
324 case LTTNG_UST_DYNAMIC_TYPE_FLOAT:
325 case LTTNG_UST_DYNAMIC_TYPE_DOUBLE:
326 ptr->object_type = OBJECT_TYPE_DOUBLE;
327 ptr->u.d = v.u.d;
328 ptr->ptr = &ptr->u.d;
329 dbg_printf("context get index dynamic double %g\n", ptr->u.d);
330 break;
331 case LTTNG_UST_DYNAMIC_TYPE_STRING:
332 ptr->object_type = OBJECT_TYPE_STRING;
333 ptr->ptr = v.u.str;
334 dbg_printf("context get index dynamic string %s\n", (const char *) ptr->ptr);
335 break;
336 default:
337 dbg_printf("Interpreter warning: unknown dynamic type (%d).\n", (int) v.sel);
338 return -EINVAL;
339 }
340 break;
341 default:
342 ERR("Unknown type: %d", (int) field->type.atype);
343 return -EINVAL;
344 }
345 return 0;
346 }
347
348 static int dynamic_get_index(struct lttng_ctx *ctx,
349 struct bytecode_runtime *runtime,
350 uint64_t index, struct estack_entry *stack_top)
351 {
352 int ret;
353 const struct bytecode_get_index_data *gid;
354
355 gid = (const struct bytecode_get_index_data *) &runtime->data[index];
356 switch (stack_top->u.ptr.type) {
357 case LOAD_OBJECT:
358 switch (stack_top->u.ptr.object_type) {
359 case OBJECT_TYPE_ARRAY:
360 {
361 const char *ptr;
362
363 assert(gid->offset < gid->array_len);
364 /* Skip count (unsigned long) */
365 ptr = *(const char **) (stack_top->u.ptr.ptr + sizeof(unsigned long));
366 ptr = ptr + gid->offset;
367 stack_top->u.ptr.ptr = ptr;
368 stack_top->u.ptr.object_type = gid->elem.type;
369 stack_top->u.ptr.rev_bo = gid->elem.rev_bo;
370 assert(stack_top->u.ptr.field->type.atype == atype_array_nestable);
371 stack_top->u.ptr.field = NULL;
372 break;
373 }
374 case OBJECT_TYPE_SEQUENCE:
375 {
376 const char *ptr;
377 size_t ptr_seq_len;
378
379 ptr = *(const char **) (stack_top->u.ptr.ptr + sizeof(unsigned long));
380 ptr_seq_len = *(unsigned long *) stack_top->u.ptr.ptr;
381 if (gid->offset >= gid->elem.len * ptr_seq_len) {
382 ret = -EINVAL;
383 goto end;
384 }
385 ptr = ptr + gid->offset;
386 stack_top->u.ptr.ptr = ptr;
387 stack_top->u.ptr.object_type = gid->elem.type;
388 stack_top->u.ptr.rev_bo = gid->elem.rev_bo;
389 assert(stack_top->u.ptr.field->type.atype == atype_sequence_nestable);
390 stack_top->u.ptr.field = NULL;
391 break;
392 }
393 case OBJECT_TYPE_STRUCT:
394 ERR("Nested structures are not supported yet.");
395 ret = -EINVAL;
396 goto end;
397 case OBJECT_TYPE_VARIANT:
398 default:
399 ERR("Unexpected get index type %d",
400 (int) stack_top->u.ptr.object_type);
401 ret = -EINVAL;
402 goto end;
403 }
404 break;
405 case LOAD_ROOT_CONTEXT:
406 case LOAD_ROOT_APP_CONTEXT: /* Fall-through */
407 {
408 ret = context_get_index(ctx,
409 &stack_top->u.ptr,
410 gid->ctx_index);
411 if (ret) {
412 goto end;
413 }
414 break;
415 }
416 case LOAD_ROOT_PAYLOAD:
417 stack_top->u.ptr.ptr += gid->offset;
418 if (gid->elem.type == OBJECT_TYPE_STRING)
419 stack_top->u.ptr.ptr = *(const char * const *) stack_top->u.ptr.ptr;
420 stack_top->u.ptr.object_type = gid->elem.type;
421 stack_top->u.ptr.type = LOAD_OBJECT;
422 stack_top->u.ptr.field = gid->field;
423 stack_top->u.ptr.rev_bo = gid->elem.rev_bo;
424 break;
425 }
426
427 stack_top->type = REG_PTR;
428
429 return 0;
430
431 end:
432 return ret;
433 }
434
435 static int dynamic_load_field(struct estack_entry *stack_top)
436 {
437 int ret;
438
439 switch (stack_top->u.ptr.type) {
440 case LOAD_OBJECT:
441 break;
442 case LOAD_ROOT_CONTEXT:
443 case LOAD_ROOT_APP_CONTEXT:
444 case LOAD_ROOT_PAYLOAD:
445 default:
446 dbg_printf("Interpreter warning: cannot load root, missing field name.\n");
447 ret = -EINVAL;
448 goto end;
449 }
450 switch (stack_top->u.ptr.object_type) {
451 case OBJECT_TYPE_S8:
452 dbg_printf("op load field s8\n");
453 stack_top->u.v = *(int8_t *) stack_top->u.ptr.ptr;
454 stack_top->type = REG_S64;
455 break;
456 case OBJECT_TYPE_S16:
457 {
458 int16_t tmp;
459
460 dbg_printf("op load field s16\n");
461 tmp = *(int16_t *) stack_top->u.ptr.ptr;
462 if (stack_top->u.ptr.rev_bo)
463 tmp = bswap_16(tmp);
464 stack_top->u.v = tmp;
465 stack_top->type = REG_S64;
466 break;
467 }
468 case OBJECT_TYPE_S32:
469 {
470 int32_t tmp;
471
472 dbg_printf("op load field s32\n");
473 tmp = *(int32_t *) stack_top->u.ptr.ptr;
474 if (stack_top->u.ptr.rev_bo)
475 tmp = bswap_32(tmp);
476 stack_top->u.v = tmp;
477 stack_top->type = REG_S64;
478 break;
479 }
480 case OBJECT_TYPE_S64:
481 {
482 int64_t tmp;
483
484 dbg_printf("op load field s64\n");
485 tmp = *(int64_t *) stack_top->u.ptr.ptr;
486 if (stack_top->u.ptr.rev_bo)
487 tmp = bswap_64(tmp);
488 stack_top->u.v = tmp;
489 stack_top->type = REG_S64;
490 break;
491 }
492 case OBJECT_TYPE_SIGNED_ENUM:
493 {
494 int64_t tmp;
495
496 dbg_printf("op load field signed enumeration\n");
497 tmp = *(int64_t *) stack_top->u.ptr.ptr;
498 if (stack_top->u.ptr.rev_bo)
499 tmp = bswap_64(tmp);
500 stack_top->u.v = tmp;
501 stack_top->type = REG_S64;
502 break;
503 }
504 case OBJECT_TYPE_U8:
505 dbg_printf("op load field u8\n");
506 stack_top->u.v = *(uint8_t *) stack_top->u.ptr.ptr;
507 stack_top->type = REG_U64;
508 break;
509 case OBJECT_TYPE_U16:
510 {
511 uint16_t tmp;
512
513 dbg_printf("op load field u16\n");
514 tmp = *(uint16_t *) stack_top->u.ptr.ptr;
515 if (stack_top->u.ptr.rev_bo)
516 tmp = bswap_16(tmp);
517 stack_top->u.v = tmp;
518 stack_top->type = REG_U64;
519 break;
520 }
521 case OBJECT_TYPE_U32:
522 {
523 uint32_t tmp;
524
525 dbg_printf("op load field u32\n");
526 tmp = *(uint32_t *) stack_top->u.ptr.ptr;
527 if (stack_top->u.ptr.rev_bo)
528 tmp = bswap_32(tmp);
529 stack_top->u.v = tmp;
530 stack_top->type = REG_U64;
531 break;
532 }
533 case OBJECT_TYPE_U64:
534 {
535 uint64_t tmp;
536
537 dbg_printf("op load field u64\n");
538 tmp = *(uint64_t *) stack_top->u.ptr.ptr;
539 if (stack_top->u.ptr.rev_bo)
540 tmp = bswap_64(tmp);
541 stack_top->u.v = tmp;
542 stack_top->type = REG_U64;
543 break;
544 }
545 case OBJECT_TYPE_UNSIGNED_ENUM:
546 {
547 uint64_t tmp;
548
549 dbg_printf("op load field unsigned enumeration\n");
550 tmp = *(uint64_t *) stack_top->u.ptr.ptr;
551 if (stack_top->u.ptr.rev_bo)
552 tmp = bswap_64(tmp);
553 stack_top->u.v = tmp;
554 stack_top->type = REG_U64;
555 break;
556 }
557 case OBJECT_TYPE_DOUBLE:
558 memcpy(&stack_top->u.d,
559 stack_top->u.ptr.ptr,
560 sizeof(struct literal_double));
561 stack_top->type = REG_DOUBLE;
562 break;
563 case OBJECT_TYPE_STRING:
564 {
565 const char *str;
566
567 dbg_printf("op load field string\n");
568 str = (const char *) stack_top->u.ptr.ptr;
569 stack_top->u.s.str = str;
570 if (unlikely(!stack_top->u.s.str)) {
571 dbg_printf("Interpreter warning: loading a NULL string.\n");
572 ret = -EINVAL;
573 goto end;
574 }
575 stack_top->u.s.seq_len = SIZE_MAX;
576 stack_top->u.s.literal_type =
577 ESTACK_STRING_LITERAL_TYPE_NONE;
578 stack_top->type = REG_STRING;
579 break;
580 }
581 case OBJECT_TYPE_STRING_SEQUENCE:
582 {
583 const char *ptr;
584
585 dbg_printf("op load field string sequence\n");
586 ptr = stack_top->u.ptr.ptr;
587 stack_top->u.s.seq_len = *(unsigned long *) ptr;
588 stack_top->u.s.str = *(const char **) (ptr + sizeof(unsigned long));
589 stack_top->type = REG_STRING;
590 if (unlikely(!stack_top->u.s.str)) {
591 dbg_printf("Interpreter warning: loading a NULL sequence.\n");
592 ret = -EINVAL;
593 goto end;
594 }
595 stack_top->u.s.literal_type =
596 ESTACK_STRING_LITERAL_TYPE_NONE;
597 break;
598 }
599 case OBJECT_TYPE_DYNAMIC:
600 /*
601 * Dynamic types in context are looked up
602 * by context get index.
603 */
604 ret = -EINVAL;
605 goto end;
606 case OBJECT_TYPE_SEQUENCE:
607 case OBJECT_TYPE_ARRAY:
608 case OBJECT_TYPE_STRUCT:
609 case OBJECT_TYPE_VARIANT:
610 ERR("Sequences, arrays, struct and variant cannot be loaded (nested types).");
611 ret = -EINVAL;
612 goto end;
613 }
614 return 0;
615
616 end:
617 return ret;
618 }
619
620 static
621 int lttng_bytecode_interpret_format_output(struct estack_entry *ax,
622 struct lttng_interpreter_output *output)
623 {
624 int ret;
625
626 again:
627 switch (ax->type) {
628 case REG_S64:
629 output->type = LTTNG_INTERPRETER_TYPE_S64;
630 output->u.s = ax->u.v;
631 break;
632 case REG_U64:
633 output->type = LTTNG_INTERPRETER_TYPE_U64;
634 output->u.u = (uint64_t) ax->u.v;
635 break;
636 case REG_DOUBLE:
637 output->type = LTTNG_INTERPRETER_TYPE_DOUBLE;
638 output->u.d = ax->u.d;
639 break;
640 case REG_STRING:
641 output->type = LTTNG_INTERPRETER_TYPE_STRING;
642 output->u.str.str = ax->u.s.str;
643 output->u.str.len = ax->u.s.seq_len;
644 break;
645 case REG_PTR:
646 switch (ax->u.ptr.object_type) {
647 case OBJECT_TYPE_S8:
648 case OBJECT_TYPE_S16:
649 case OBJECT_TYPE_S32:
650 case OBJECT_TYPE_S64:
651 case OBJECT_TYPE_U8:
652 case OBJECT_TYPE_U16:
653 case OBJECT_TYPE_U32:
654 case OBJECT_TYPE_U64:
655 case OBJECT_TYPE_DOUBLE:
656 case OBJECT_TYPE_STRING:
657 case OBJECT_TYPE_STRING_SEQUENCE:
658 ret = dynamic_load_field(ax);
659 if (ret)
660 return ret;
661 /* Retry after loading ptr into stack top. */
662 goto again;
663 case OBJECT_TYPE_SEQUENCE:
664 output->type = LTTNG_INTERPRETER_TYPE_SEQUENCE;
665 output->u.sequence.ptr = *(const char **) (ax->u.ptr.ptr + sizeof(unsigned long));
666 output->u.sequence.nr_elem = *(unsigned long *) ax->u.ptr.ptr;
667 output->u.sequence.nested_type = ax->u.ptr.field->type.u.sequence_nestable.elem_type;
668 break;
669 case OBJECT_TYPE_ARRAY:
670 /* Skip count (unsigned long) */
671 output->type = LTTNG_INTERPRETER_TYPE_SEQUENCE;
672 output->u.sequence.ptr = *(const char **) (ax->u.ptr.ptr + sizeof(unsigned long));
673 output->u.sequence.nr_elem = ax->u.ptr.field->type.u.array_nestable.length;
674 output->u.sequence.nested_type = ax->u.ptr.field->type.u.array_nestable.elem_type;
675 break;
676 case OBJECT_TYPE_SIGNED_ENUM:
677 ret = dynamic_load_field(ax);
678 if (ret)
679 return ret;
680 output->type = LTTNG_INTERPRETER_TYPE_SIGNED_ENUM;
681 output->u.s = ax->u.v;
682 break;
683 case OBJECT_TYPE_UNSIGNED_ENUM:
684 ret = dynamic_load_field(ax);
685 if (ret)
686 return ret;
687 output->type = LTTNG_INTERPRETER_TYPE_UNSIGNED_ENUM;
688 output->u.u = ax->u.v;
689 break;
690 case OBJECT_TYPE_STRUCT:
691 case OBJECT_TYPE_VARIANT:
692 default:
693 return -EINVAL;
694 }
695
696 break;
697 case REG_STAR_GLOB_STRING:
698 case REG_UNKNOWN:
699 default:
700 return -EINVAL;
701 }
702
703 return LTTNG_INTERPRETER_RECORD_FLAG;
704 }
705
706 /*
707 * For `output` equal to NULL:
708 * Return 0 (discard), or raise the 0x1 flag (log event).
709 * Currently, other flags are kept for future extensions and have no
710 * effect.
711 * For `output` not equal to NULL:
712 * Return 0 on success, negative error value on error.
713 */
714 static
715 uint64_t bytecode_interpret(void *interpreter_data,
716 const char *interpreter_stack_data,
717 struct lttng_interpreter_output *output)
718 {
719 struct bytecode_runtime *bytecode = interpreter_data;
720 struct lttng_ctx *ctx = lttng_ust_rcu_dereference(*bytecode->p.priv->pctx);
721 void *pc, *next_pc, *start_pc;
722 int ret = -EINVAL;
723 uint64_t retval = 0;
724 struct estack _stack;
725 struct estack *stack = &_stack;
726 register int64_t ax = 0, bx = 0;
727 register enum entry_type ax_t = REG_UNKNOWN, bx_t = REG_UNKNOWN;
728 register int top = INTERPRETER_STACK_EMPTY;
729 #ifndef INTERPRETER_USE_SWITCH
730 static void *dispatch[NR_BYTECODE_OPS] = {
731 [ BYTECODE_OP_UNKNOWN ] = &&LABEL_BYTECODE_OP_UNKNOWN,
732
733 [ BYTECODE_OP_RETURN ] = &&LABEL_BYTECODE_OP_RETURN,
734
735 /* binary */
736 [ BYTECODE_OP_MUL ] = &&LABEL_BYTECODE_OP_MUL,
737 [ BYTECODE_OP_DIV ] = &&LABEL_BYTECODE_OP_DIV,
738 [ BYTECODE_OP_MOD ] = &&LABEL_BYTECODE_OP_MOD,
739 [ BYTECODE_OP_PLUS ] = &&LABEL_BYTECODE_OP_PLUS,
740 [ BYTECODE_OP_MINUS ] = &&LABEL_BYTECODE_OP_MINUS,
741 [ BYTECODE_OP_BIT_RSHIFT ] = &&LABEL_BYTECODE_OP_BIT_RSHIFT,
742 [ BYTECODE_OP_BIT_LSHIFT ] = &&LABEL_BYTECODE_OP_BIT_LSHIFT,
743 [ BYTECODE_OP_BIT_AND ] = &&LABEL_BYTECODE_OP_BIT_AND,
744 [ BYTECODE_OP_BIT_OR ] = &&LABEL_BYTECODE_OP_BIT_OR,
745 [ BYTECODE_OP_BIT_XOR ] = &&LABEL_BYTECODE_OP_BIT_XOR,
746
747 /* binary comparators */
748 [ BYTECODE_OP_EQ ] = &&LABEL_BYTECODE_OP_EQ,
749 [ BYTECODE_OP_NE ] = &&LABEL_BYTECODE_OP_NE,
750 [ BYTECODE_OP_GT ] = &&LABEL_BYTECODE_OP_GT,
751 [ BYTECODE_OP_LT ] = &&LABEL_BYTECODE_OP_LT,
752 [ BYTECODE_OP_GE ] = &&LABEL_BYTECODE_OP_GE,
753 [ BYTECODE_OP_LE ] = &&LABEL_BYTECODE_OP_LE,
754
755 /* string binary comparator */
756 [ BYTECODE_OP_EQ_STRING ] = &&LABEL_BYTECODE_OP_EQ_STRING,
757 [ BYTECODE_OP_NE_STRING ] = &&LABEL_BYTECODE_OP_NE_STRING,
758 [ BYTECODE_OP_GT_STRING ] = &&LABEL_BYTECODE_OP_GT_STRING,
759 [ BYTECODE_OP_LT_STRING ] = &&LABEL_BYTECODE_OP_LT_STRING,
760 [ BYTECODE_OP_GE_STRING ] = &&LABEL_BYTECODE_OP_GE_STRING,
761 [ BYTECODE_OP_LE_STRING ] = &&LABEL_BYTECODE_OP_LE_STRING,
762
763 /* globbing pattern binary comparator */
764 [ BYTECODE_OP_EQ_STAR_GLOB_STRING ] = &&LABEL_BYTECODE_OP_EQ_STAR_GLOB_STRING,
765 [ BYTECODE_OP_NE_STAR_GLOB_STRING ] = &&LABEL_BYTECODE_OP_NE_STAR_GLOB_STRING,
766
767 /* s64 binary comparator */
768 [ BYTECODE_OP_EQ_S64 ] = &&LABEL_BYTECODE_OP_EQ_S64,
769 [ BYTECODE_OP_NE_S64 ] = &&LABEL_BYTECODE_OP_NE_S64,
770 [ BYTECODE_OP_GT_S64 ] = &&LABEL_BYTECODE_OP_GT_S64,
771 [ BYTECODE_OP_LT_S64 ] = &&LABEL_BYTECODE_OP_LT_S64,
772 [ BYTECODE_OP_GE_S64 ] = &&LABEL_BYTECODE_OP_GE_S64,
773 [ BYTECODE_OP_LE_S64 ] = &&LABEL_BYTECODE_OP_LE_S64,
774
775 /* double binary comparator */
776 [ BYTECODE_OP_EQ_DOUBLE ] = &&LABEL_BYTECODE_OP_EQ_DOUBLE,
777 [ BYTECODE_OP_NE_DOUBLE ] = &&LABEL_BYTECODE_OP_NE_DOUBLE,
778 [ BYTECODE_OP_GT_DOUBLE ] = &&LABEL_BYTECODE_OP_GT_DOUBLE,
779 [ BYTECODE_OP_LT_DOUBLE ] = &&LABEL_BYTECODE_OP_LT_DOUBLE,
780 [ BYTECODE_OP_GE_DOUBLE ] = &&LABEL_BYTECODE_OP_GE_DOUBLE,
781 [ BYTECODE_OP_LE_DOUBLE ] = &&LABEL_BYTECODE_OP_LE_DOUBLE,
782
783 /* Mixed S64-double binary comparators */
784 [ BYTECODE_OP_EQ_DOUBLE_S64 ] = &&LABEL_BYTECODE_OP_EQ_DOUBLE_S64,
785 [ BYTECODE_OP_NE_DOUBLE_S64 ] = &&LABEL_BYTECODE_OP_NE_DOUBLE_S64,
786 [ BYTECODE_OP_GT_DOUBLE_S64 ] = &&LABEL_BYTECODE_OP_GT_DOUBLE_S64,
787 [ BYTECODE_OP_LT_DOUBLE_S64 ] = &&LABEL_BYTECODE_OP_LT_DOUBLE_S64,
788 [ BYTECODE_OP_GE_DOUBLE_S64 ] = &&LABEL_BYTECODE_OP_GE_DOUBLE_S64,
789 [ BYTECODE_OP_LE_DOUBLE_S64 ] = &&LABEL_BYTECODE_OP_LE_DOUBLE_S64,
790
791 [ BYTECODE_OP_EQ_S64_DOUBLE ] = &&LABEL_BYTECODE_OP_EQ_S64_DOUBLE,
792 [ BYTECODE_OP_NE_S64_DOUBLE ] = &&LABEL_BYTECODE_OP_NE_S64_DOUBLE,
793 [ BYTECODE_OP_GT_S64_DOUBLE ] = &&LABEL_BYTECODE_OP_GT_S64_DOUBLE,
794 [ BYTECODE_OP_LT_S64_DOUBLE ] = &&LABEL_BYTECODE_OP_LT_S64_DOUBLE,
795 [ BYTECODE_OP_GE_S64_DOUBLE ] = &&LABEL_BYTECODE_OP_GE_S64_DOUBLE,
796 [ BYTECODE_OP_LE_S64_DOUBLE ] = &&LABEL_BYTECODE_OP_LE_S64_DOUBLE,
797
798 /* unary */
799 [ BYTECODE_OP_UNARY_PLUS ] = &&LABEL_BYTECODE_OP_UNARY_PLUS,
800 [ BYTECODE_OP_UNARY_MINUS ] = &&LABEL_BYTECODE_OP_UNARY_MINUS,
801 [ BYTECODE_OP_UNARY_NOT ] = &&LABEL_BYTECODE_OP_UNARY_NOT,
802 [ BYTECODE_OP_UNARY_PLUS_S64 ] = &&LABEL_BYTECODE_OP_UNARY_PLUS_S64,
803 [ BYTECODE_OP_UNARY_MINUS_S64 ] = &&LABEL_BYTECODE_OP_UNARY_MINUS_S64,
804 [ BYTECODE_OP_UNARY_NOT_S64 ] = &&LABEL_BYTECODE_OP_UNARY_NOT_S64,
805 [ BYTECODE_OP_UNARY_PLUS_DOUBLE ] = &&LABEL_BYTECODE_OP_UNARY_PLUS_DOUBLE,
806 [ BYTECODE_OP_UNARY_MINUS_DOUBLE ] = &&LABEL_BYTECODE_OP_UNARY_MINUS_DOUBLE,
807 [ BYTECODE_OP_UNARY_NOT_DOUBLE ] = &&LABEL_BYTECODE_OP_UNARY_NOT_DOUBLE,
808
809 /* logical */
810 [ BYTECODE_OP_AND ] = &&LABEL_BYTECODE_OP_AND,
811 [ BYTECODE_OP_OR ] = &&LABEL_BYTECODE_OP_OR,
812
813 /* load field ref */
814 [ BYTECODE_OP_LOAD_FIELD_REF ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_REF,
815 [ BYTECODE_OP_LOAD_FIELD_REF_STRING ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_REF_STRING,
816 [ BYTECODE_OP_LOAD_FIELD_REF_SEQUENCE ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_REF_SEQUENCE,
817 [ BYTECODE_OP_LOAD_FIELD_REF_S64 ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_REF_S64,
818 [ BYTECODE_OP_LOAD_FIELD_REF_DOUBLE ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_REF_DOUBLE,
819
820 /* load from immediate operand */
821 [ BYTECODE_OP_LOAD_STRING ] = &&LABEL_BYTECODE_OP_LOAD_STRING,
822 [ BYTECODE_OP_LOAD_STAR_GLOB_STRING ] = &&LABEL_BYTECODE_OP_LOAD_STAR_GLOB_STRING,
823 [ BYTECODE_OP_LOAD_S64 ] = &&LABEL_BYTECODE_OP_LOAD_S64,
824 [ BYTECODE_OP_LOAD_DOUBLE ] = &&LABEL_BYTECODE_OP_LOAD_DOUBLE,
825
826 /* cast */
827 [ BYTECODE_OP_CAST_TO_S64 ] = &&LABEL_BYTECODE_OP_CAST_TO_S64,
828 [ BYTECODE_OP_CAST_DOUBLE_TO_S64 ] = &&LABEL_BYTECODE_OP_CAST_DOUBLE_TO_S64,
829 [ BYTECODE_OP_CAST_NOP ] = &&LABEL_BYTECODE_OP_CAST_NOP,
830
831 /* get context ref */
832 [ BYTECODE_OP_GET_CONTEXT_REF ] = &&LABEL_BYTECODE_OP_GET_CONTEXT_REF,
833 [ BYTECODE_OP_GET_CONTEXT_REF_STRING ] = &&LABEL_BYTECODE_OP_GET_CONTEXT_REF_STRING,
834 [ BYTECODE_OP_GET_CONTEXT_REF_S64 ] = &&LABEL_BYTECODE_OP_GET_CONTEXT_REF_S64,
835 [ BYTECODE_OP_GET_CONTEXT_REF_DOUBLE ] = &&LABEL_BYTECODE_OP_GET_CONTEXT_REF_DOUBLE,
836
837 /* Instructions for recursive traversal through composed types. */
838 [ BYTECODE_OP_GET_CONTEXT_ROOT ] = &&LABEL_BYTECODE_OP_GET_CONTEXT_ROOT,
839 [ BYTECODE_OP_GET_APP_CONTEXT_ROOT ] = &&LABEL_BYTECODE_OP_GET_APP_CONTEXT_ROOT,
840 [ BYTECODE_OP_GET_PAYLOAD_ROOT ] = &&LABEL_BYTECODE_OP_GET_PAYLOAD_ROOT,
841
842 [ BYTECODE_OP_GET_SYMBOL ] = &&LABEL_BYTECODE_OP_GET_SYMBOL,
843 [ BYTECODE_OP_GET_SYMBOL_FIELD ] = &&LABEL_BYTECODE_OP_GET_SYMBOL_FIELD,
844 [ BYTECODE_OP_GET_INDEX_U16 ] = &&LABEL_BYTECODE_OP_GET_INDEX_U16,
845 [ BYTECODE_OP_GET_INDEX_U64 ] = &&LABEL_BYTECODE_OP_GET_INDEX_U64,
846
847 [ BYTECODE_OP_LOAD_FIELD ] = &&LABEL_BYTECODE_OP_LOAD_FIELD,
848 [ BYTECODE_OP_LOAD_FIELD_S8 ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_S8,
849 [ BYTECODE_OP_LOAD_FIELD_S16 ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_S16,
850 [ BYTECODE_OP_LOAD_FIELD_S32 ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_S32,
851 [ BYTECODE_OP_LOAD_FIELD_S64 ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_S64,
852 [ BYTECODE_OP_LOAD_FIELD_U8 ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_U8,
853 [ BYTECODE_OP_LOAD_FIELD_U16 ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_U16,
854 [ BYTECODE_OP_LOAD_FIELD_U32 ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_U32,
855 [ BYTECODE_OP_LOAD_FIELD_U64 ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_U64,
856 [ BYTECODE_OP_LOAD_FIELD_STRING ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_STRING,
857 [ BYTECODE_OP_LOAD_FIELD_SEQUENCE ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_SEQUENCE,
858 [ BYTECODE_OP_LOAD_FIELD_DOUBLE ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_DOUBLE,
859
860 [ BYTECODE_OP_UNARY_BIT_NOT ] = &&LABEL_BYTECODE_OP_UNARY_BIT_NOT,
861
862 [ BYTECODE_OP_RETURN_S64 ] = &&LABEL_BYTECODE_OP_RETURN_S64,
863 };
864 #endif /* #ifndef INTERPRETER_USE_SWITCH */
865
866 START_OP
867
868 OP(BYTECODE_OP_UNKNOWN):
869 OP(BYTECODE_OP_LOAD_FIELD_REF):
870 #ifdef INTERPRETER_USE_SWITCH
871 default:
872 #endif /* INTERPRETER_USE_SWITCH */
873 ERR("unknown bytecode op %u",
874 (unsigned int) *(bytecode_opcode_t *) pc);
875 ret = -EINVAL;
876 goto end;
877
878 OP(BYTECODE_OP_RETURN):
879 /* LTTNG_INTERPRETER_DISCARD or LTTNG_INTERPRETER_RECORD_FLAG */
880 /* Handle dynamic typing. */
881 switch (estack_ax_t) {
882 case REG_S64:
883 case REG_U64:
884 retval = !!estack_ax_v;
885 break;
886 case REG_DOUBLE:
887 case REG_STRING:
888 case REG_PTR:
889 if (!output) {
890 ret = -EINVAL;
891 goto end;
892 }
893 retval = 0;
894 break;
895 case REG_STAR_GLOB_STRING:
896 case REG_UNKNOWN:
897 default:
898 ret = -EINVAL;
899 goto end;
900 }
901 ret = 0;
902 goto end;
903
904 OP(BYTECODE_OP_RETURN_S64):
905 /* LTTNG_INTERPRETER_DISCARD or LTTNG_INTERPRETER_RECORD_FLAG */
906 retval = !!estack_ax_v;
907 ret = 0;
908 goto end;
909
910 /* binary */
911 OP(BYTECODE_OP_MUL):
912 OP(BYTECODE_OP_DIV):
913 OP(BYTECODE_OP_MOD):
914 OP(BYTECODE_OP_PLUS):
915 OP(BYTECODE_OP_MINUS):
916 ERR("unsupported bytecode op %u",
917 (unsigned int) *(bytecode_opcode_t *) pc);
918 ret = -EINVAL;
919 goto end;
920
921 OP(BYTECODE_OP_EQ):
922 {
923 /* Dynamic typing. */
924 switch (estack_ax_t) {
925 case REG_S64: /* Fall-through */
926 case REG_U64:
927 switch (estack_bx_t) {
928 case REG_S64: /* Fall-through */
929 case REG_U64:
930 JUMP_TO(BYTECODE_OP_EQ_S64);
931 case REG_DOUBLE:
932 JUMP_TO(BYTECODE_OP_EQ_DOUBLE_S64);
933 case REG_STRING: /* Fall-through */
934 case REG_STAR_GLOB_STRING:
935 ret = -EINVAL;
936 goto end;
937 default:
938 ERR("Unknown interpreter register type (%d)",
939 (int) estack_bx_t);
940 ret = -EINVAL;
941 goto end;
942 }
943 break;
944 case REG_DOUBLE:
945 switch (estack_bx_t) {
946 case REG_S64: /* Fall-through */
947 case REG_U64:
948 JUMP_TO(BYTECODE_OP_EQ_S64_DOUBLE);
949 case REG_DOUBLE:
950 JUMP_TO(BYTECODE_OP_EQ_DOUBLE);
951 case REG_STRING: /* Fall-through */
952 case REG_STAR_GLOB_STRING:
953 ret = -EINVAL;
954 goto end;
955 default:
956 ERR("Unknown interpreter register type (%d)",
957 (int) estack_bx_t);
958 ret = -EINVAL;
959 goto end;
960 }
961 break;
962 case REG_STRING:
963 switch (estack_bx_t) {
964 case REG_S64: /* Fall-through */
965 case REG_U64: /* Fall-through */
966 case REG_DOUBLE:
967 ret = -EINVAL;
968 goto end;
969 case REG_STRING:
970 JUMP_TO(BYTECODE_OP_EQ_STRING);
971 case REG_STAR_GLOB_STRING:
972 JUMP_TO(BYTECODE_OP_EQ_STAR_GLOB_STRING);
973 default:
974 ERR("Unknown interpreter register type (%d)",
975 (int) estack_bx_t);
976 ret = -EINVAL;
977 goto end;
978 }
979 break;
980 case REG_STAR_GLOB_STRING:
981 switch (estack_bx_t) {
982 case REG_S64: /* Fall-through */
983 case REG_U64: /* Fall-through */
984 case REG_DOUBLE:
985 ret = -EINVAL;
986 goto end;
987 case REG_STRING:
988 JUMP_TO(BYTECODE_OP_EQ_STAR_GLOB_STRING);
989 case REG_STAR_GLOB_STRING:
990 ret = -EINVAL;
991 goto end;
992 default:
993 ERR("Unknown interpreter register type (%d)",
994 (int) estack_bx_t);
995 ret = -EINVAL;
996 goto end;
997 }
998 break;
999 default:
1000 ERR("Unknown interpreter register type (%d)",
1001 (int) estack_ax_t);
1002 ret = -EINVAL;
1003 goto end;
1004 }
1005 }
1006 OP(BYTECODE_OP_NE):
1007 {
1008 /* Dynamic typing. */
1009 switch (estack_ax_t) {
1010 case REG_S64: /* Fall-through */
1011 case REG_U64:
1012 switch (estack_bx_t) {
1013 case REG_S64: /* Fall-through */
1014 case REG_U64:
1015 JUMP_TO(BYTECODE_OP_NE_S64);
1016 case REG_DOUBLE:
1017 JUMP_TO(BYTECODE_OP_NE_DOUBLE_S64);
1018 case REG_STRING: /* Fall-through */
1019 case REG_STAR_GLOB_STRING:
1020 ret = -EINVAL;
1021 goto end;
1022 default:
1023 ERR("Unknown interpreter register type (%d)",
1024 (int) estack_bx_t);
1025 ret = -EINVAL;
1026 goto end;
1027 }
1028 break;
1029 case REG_DOUBLE:
1030 switch (estack_bx_t) {
1031 case REG_S64: /* Fall-through */
1032 case REG_U64:
1033 JUMP_TO(BYTECODE_OP_NE_S64_DOUBLE);
1034 case REG_DOUBLE:
1035 JUMP_TO(BYTECODE_OP_NE_DOUBLE);
1036 case REG_STRING: /* Fall-through */
1037 case REG_STAR_GLOB_STRING:
1038 ret = -EINVAL;
1039 goto end;
1040 default:
1041 ERR("Unknown interpreter register type (%d)",
1042 (int) estack_bx_t);
1043 ret = -EINVAL;
1044 goto end;
1045 }
1046 break;
1047 case REG_STRING:
1048 switch (estack_bx_t) {
1049 case REG_S64: /* Fall-through */
1050 case REG_U64:
1051 case REG_DOUBLE:
1052 ret = -EINVAL;
1053 goto end;
1054 case REG_STRING:
1055 JUMP_TO(BYTECODE_OP_NE_STRING);
1056 case REG_STAR_GLOB_STRING:
1057 JUMP_TO(BYTECODE_OP_NE_STAR_GLOB_STRING);
1058 default:
1059 ERR("Unknown interpreter register type (%d)",
1060 (int) estack_bx_t);
1061 ret = -EINVAL;
1062 goto end;
1063 }
1064 break;
1065 case REG_STAR_GLOB_STRING:
1066 switch (estack_bx_t) {
1067 case REG_S64: /* Fall-through */
1068 case REG_U64:
1069 case REG_DOUBLE:
1070 ret = -EINVAL;
1071 goto end;
1072 case REG_STRING:
1073 JUMP_TO(BYTECODE_OP_NE_STAR_GLOB_STRING);
1074 case REG_STAR_GLOB_STRING:
1075 ret = -EINVAL;
1076 goto end;
1077 default:
1078 ERR("Unknown interpreter register type (%d)",
1079 (int) estack_bx_t);
1080 ret = -EINVAL;
1081 goto end;
1082 }
1083 break;
1084 default:
1085 ERR("Unknown interpreter register type (%d)",
1086 (int) estack_ax_t);
1087 ret = -EINVAL;
1088 goto end;
1089 }
1090 }
1091 OP(BYTECODE_OP_GT):
1092 {
1093 /* Dynamic typing. */
1094 switch (estack_ax_t) {
1095 case REG_S64: /* Fall-through */
1096 case REG_U64:
1097 switch (estack_bx_t) {
1098 case REG_S64: /* Fall-through */
1099 case REG_U64:
1100 JUMP_TO(BYTECODE_OP_GT_S64);
1101 case REG_DOUBLE:
1102 JUMP_TO(BYTECODE_OP_GT_DOUBLE_S64);
1103 case REG_STRING: /* Fall-through */
1104 case REG_STAR_GLOB_STRING:
1105 ret = -EINVAL;
1106 goto end;
1107 default:
1108 ERR("Unknown interpreter register type (%d)",
1109 (int) estack_bx_t);
1110 ret = -EINVAL;
1111 goto end;
1112 }
1113 break;
1114 case REG_DOUBLE:
1115 switch (estack_bx_t) {
1116 case REG_S64: /* Fall-through */
1117 case REG_U64:
1118 JUMP_TO(BYTECODE_OP_GT_S64_DOUBLE);
1119 case REG_DOUBLE:
1120 JUMP_TO(BYTECODE_OP_GT_DOUBLE);
1121 case REG_STRING: /* Fall-through */
1122 case REG_STAR_GLOB_STRING:
1123 ret = -EINVAL;
1124 goto end;
1125 default:
1126 ERR("Unknown interpreter register type (%d)",
1127 (int) estack_bx_t);
1128 ret = -EINVAL;
1129 goto end;
1130 }
1131 break;
1132 case REG_STRING:
1133 switch (estack_bx_t) {
1134 case REG_S64: /* Fall-through */
1135 case REG_U64: /* Fall-through */
1136 case REG_DOUBLE: /* Fall-through */
1137 case REG_STAR_GLOB_STRING:
1138 ret = -EINVAL;
1139 goto end;
1140 case REG_STRING:
1141 JUMP_TO(BYTECODE_OP_GT_STRING);
1142 default:
1143 ERR("Unknown interpreter register type (%d)",
1144 (int) estack_bx_t);
1145 ret = -EINVAL;
1146 goto end;
1147 }
1148 break;
1149 default:
1150 ERR("Unknown interpreter register type (%d)",
1151 (int) estack_ax_t);
1152 ret = -EINVAL;
1153 goto end;
1154 }
1155 }
1156 OP(BYTECODE_OP_LT):
1157 {
1158 /* Dynamic typing. */
1159 switch (estack_ax_t) {
1160 case REG_S64: /* Fall-through */
1161 case REG_U64:
1162 switch (estack_bx_t) {
1163 case REG_S64: /* Fall-through */
1164 case REG_U64:
1165 JUMP_TO(BYTECODE_OP_LT_S64);
1166 case REG_DOUBLE:
1167 JUMP_TO(BYTECODE_OP_LT_DOUBLE_S64);
1168 case REG_STRING: /* Fall-through */
1169 case REG_STAR_GLOB_STRING:
1170 ret = -EINVAL;
1171 goto end;
1172 default:
1173 ERR("Unknown interpreter register type (%d)",
1174 (int) estack_bx_t);
1175 ret = -EINVAL;
1176 goto end;
1177 }
1178 break;
1179 case REG_DOUBLE:
1180 switch (estack_bx_t) {
1181 case REG_S64: /* Fall-through */
1182 case REG_U64:
1183 JUMP_TO(BYTECODE_OP_LT_S64_DOUBLE);
1184 case REG_DOUBLE:
1185 JUMP_TO(BYTECODE_OP_LT_DOUBLE);
1186 case REG_STRING: /* Fall-through */
1187 case REG_STAR_GLOB_STRING:
1188 ret = -EINVAL;
1189 goto end;
1190 default:
1191 ERR("Unknown interpreter register type (%d)",
1192 (int) estack_bx_t);
1193 ret = -EINVAL;
1194 goto end;
1195 }
1196 break;
1197 case REG_STRING:
1198 switch (estack_bx_t) {
1199 case REG_S64: /* Fall-through */
1200 case REG_U64: /* Fall-through */
1201 case REG_DOUBLE: /* Fall-through */
1202 case REG_STAR_GLOB_STRING:
1203 ret = -EINVAL;
1204 goto end;
1205 case REG_STRING:
1206 JUMP_TO(BYTECODE_OP_LT_STRING);
1207 default:
1208 ERR("Unknown interpreter register type (%d)",
1209 (int) estack_bx_t);
1210 ret = -EINVAL;
1211 goto end;
1212 }
1213 break;
1214 default:
1215 ERR("Unknown interpreter register type (%d)",
1216 (int) estack_ax_t);
1217 ret = -EINVAL;
1218 goto end;
1219 }
1220 }
1221 OP(BYTECODE_OP_GE):
1222 {
1223 /* Dynamic typing. */
1224 switch (estack_ax_t) {
1225 case REG_S64: /* Fall-through */
1226 case REG_U64:
1227 switch (estack_bx_t) {
1228 case REG_S64: /* Fall-through */
1229 case REG_U64:
1230 JUMP_TO(BYTECODE_OP_GE_S64);
1231 case REG_DOUBLE:
1232 JUMP_TO(BYTECODE_OP_GE_DOUBLE_S64);
1233 case REG_STRING: /* Fall-through */
1234 case REG_STAR_GLOB_STRING:
1235 ret = -EINVAL;
1236 goto end;
1237 default:
1238 ERR("Unknown interpreter register type (%d)",
1239 (int) estack_bx_t);
1240 ret = -EINVAL;
1241 goto end;
1242 }
1243 break;
1244 case REG_DOUBLE:
1245 switch (estack_bx_t) {
1246 case REG_S64: /* Fall-through */
1247 case REG_U64:
1248 JUMP_TO(BYTECODE_OP_GE_S64_DOUBLE);
1249 case REG_DOUBLE:
1250 JUMP_TO(BYTECODE_OP_GE_DOUBLE);
1251 case REG_STRING: /* Fall-through */
1252 case REG_STAR_GLOB_STRING:
1253 ret = -EINVAL;
1254 goto end;
1255 default:
1256 ERR("Unknown interpreter register type (%d)",
1257 (int) estack_bx_t);
1258 ret = -EINVAL;
1259 goto end;
1260 }
1261 break;
1262 case REG_STRING:
1263 switch (estack_bx_t) {
1264 case REG_S64: /* Fall-through */
1265 case REG_U64: /* Fall-through */
1266 case REG_DOUBLE: /* Fall-through */
1267 case REG_STAR_GLOB_STRING:
1268 ret = -EINVAL;
1269 goto end;
1270 case REG_STRING:
1271 JUMP_TO(BYTECODE_OP_GE_STRING);
1272 default:
1273 ERR("Unknown interpreter register type (%d)",
1274 (int) estack_bx_t);
1275 ret = -EINVAL;
1276 goto end;
1277 }
1278 break;
1279 default:
1280 ERR("Unknown interpreter register type (%d)",
1281 (int) estack_ax_t);
1282 ret = -EINVAL;
1283 goto end;
1284 }
1285 }
1286 OP(BYTECODE_OP_LE):
1287 {
1288 /* Dynamic typing. */
1289 switch (estack_ax_t) {
1290 case REG_S64: /* Fall-through */
1291 case REG_U64:
1292 switch (estack_bx_t) {
1293 case REG_S64: /* Fall-through */
1294 case REG_U64:
1295 JUMP_TO(BYTECODE_OP_LE_S64);
1296 case REG_DOUBLE:
1297 JUMP_TO(BYTECODE_OP_LE_DOUBLE_S64);
1298 case REG_STRING: /* Fall-through */
1299 case REG_STAR_GLOB_STRING:
1300 ret = -EINVAL;
1301 goto end;
1302 default:
1303 ERR("Unknown interpreter register type (%d)",
1304 (int) estack_bx_t);
1305 ret = -EINVAL;
1306 goto end;
1307 }
1308 break;
1309 case REG_DOUBLE:
1310 switch (estack_bx_t) {
1311 case REG_S64: /* Fall-through */
1312 case REG_U64:
1313 JUMP_TO(BYTECODE_OP_LE_S64_DOUBLE);
1314 case REG_DOUBLE:
1315 JUMP_TO(BYTECODE_OP_LE_DOUBLE);
1316 case REG_STRING: /* Fall-through */
1317 case REG_STAR_GLOB_STRING:
1318 ret = -EINVAL;
1319 goto end;
1320 default:
1321 ERR("Unknown interpreter register type (%d)",
1322 (int) estack_bx_t);
1323 ret = -EINVAL;
1324 goto end;
1325 }
1326 break;
1327 case REG_STRING:
1328 switch (estack_bx_t) {
1329 case REG_S64: /* Fall-through */
1330 case REG_U64: /* Fall-through */
1331 case REG_DOUBLE: /* Fall-through */
1332 case REG_STAR_GLOB_STRING:
1333 ret = -EINVAL;
1334 goto end;
1335 case REG_STRING:
1336 JUMP_TO(BYTECODE_OP_LE_STRING);
1337 default:
1338 ERR("Unknown interpreter register type (%d)",
1339 (int) estack_bx_t);
1340 ret = -EINVAL;
1341 goto end;
1342 }
1343 break;
1344 default:
1345 ERR("Unknown interpreter register type (%d)",
1346 (int) estack_ax_t);
1347 ret = -EINVAL;
1348 goto end;
1349 }
1350 }
1351
1352 OP(BYTECODE_OP_EQ_STRING):
1353 {
1354 int res;
1355
1356 res = (stack_strcmp(stack, top, "==") == 0);
1357 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1358 estack_ax_v = res;
1359 estack_ax_t = REG_S64;
1360 next_pc += sizeof(struct binary_op);
1361 PO;
1362 }
1363 OP(BYTECODE_OP_NE_STRING):
1364 {
1365 int res;
1366
1367 res = (stack_strcmp(stack, top, "!=") != 0);
1368 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1369 estack_ax_v = res;
1370 estack_ax_t = REG_S64;
1371 next_pc += sizeof(struct binary_op);
1372 PO;
1373 }
1374 OP(BYTECODE_OP_GT_STRING):
1375 {
1376 int res;
1377
1378 res = (stack_strcmp(stack, top, ">") > 0);
1379 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1380 estack_ax_v = res;
1381 estack_ax_t = REG_S64;
1382 next_pc += sizeof(struct binary_op);
1383 PO;
1384 }
1385 OP(BYTECODE_OP_LT_STRING):
1386 {
1387 int res;
1388
1389 res = (stack_strcmp(stack, top, "<") < 0);
1390 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1391 estack_ax_v = res;
1392 estack_ax_t = REG_S64;
1393 next_pc += sizeof(struct binary_op);
1394 PO;
1395 }
1396 OP(BYTECODE_OP_GE_STRING):
1397 {
1398 int res;
1399
1400 res = (stack_strcmp(stack, top, ">=") >= 0);
1401 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1402 estack_ax_v = res;
1403 estack_ax_t = REG_S64;
1404 next_pc += sizeof(struct binary_op);
1405 PO;
1406 }
1407 OP(BYTECODE_OP_LE_STRING):
1408 {
1409 int res;
1410
1411 res = (stack_strcmp(stack, top, "<=") <= 0);
1412 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1413 estack_ax_v = res;
1414 estack_ax_t = REG_S64;
1415 next_pc += sizeof(struct binary_op);
1416 PO;
1417 }
1418
1419 OP(BYTECODE_OP_EQ_STAR_GLOB_STRING):
1420 {
1421 int res;
1422
1423 res = (stack_star_glob_match(stack, top, "==") == 0);
1424 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1425 estack_ax_v = res;
1426 estack_ax_t = REG_S64;
1427 next_pc += sizeof(struct binary_op);
1428 PO;
1429 }
1430 OP(BYTECODE_OP_NE_STAR_GLOB_STRING):
1431 {
1432 int res;
1433
1434 res = (stack_star_glob_match(stack, top, "!=") != 0);
1435 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1436 estack_ax_v = res;
1437 estack_ax_t = REG_S64;
1438 next_pc += sizeof(struct binary_op);
1439 PO;
1440 }
1441
1442 OP(BYTECODE_OP_EQ_S64):
1443 {
1444 int res;
1445
1446 res = (estack_bx_v == estack_ax_v);
1447 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1448 estack_ax_v = res;
1449 estack_ax_t = REG_S64;
1450 next_pc += sizeof(struct binary_op);
1451 PO;
1452 }
1453 OP(BYTECODE_OP_NE_S64):
1454 {
1455 int res;
1456
1457 res = (estack_bx_v != estack_ax_v);
1458 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1459 estack_ax_v = res;
1460 estack_ax_t = REG_S64;
1461 next_pc += sizeof(struct binary_op);
1462 PO;
1463 }
1464 OP(BYTECODE_OP_GT_S64):
1465 {
1466 int res;
1467
1468 res = (estack_bx_v > estack_ax_v);
1469 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1470 estack_ax_v = res;
1471 estack_ax_t = REG_S64;
1472 next_pc += sizeof(struct binary_op);
1473 PO;
1474 }
1475 OP(BYTECODE_OP_LT_S64):
1476 {
1477 int res;
1478
1479 res = (estack_bx_v < estack_ax_v);
1480 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1481 estack_ax_v = res;
1482 estack_ax_t = REG_S64;
1483 next_pc += sizeof(struct binary_op);
1484 PO;
1485 }
1486 OP(BYTECODE_OP_GE_S64):
1487 {
1488 int res;
1489
1490 res = (estack_bx_v >= estack_ax_v);
1491 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1492 estack_ax_v = res;
1493 estack_ax_t = REG_S64;
1494 next_pc += sizeof(struct binary_op);
1495 PO;
1496 }
1497 OP(BYTECODE_OP_LE_S64):
1498 {
1499 int res;
1500
1501 res = (estack_bx_v <= estack_ax_v);
1502 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1503 estack_ax_v = res;
1504 estack_ax_t = REG_S64;
1505 next_pc += sizeof(struct binary_op);
1506 PO;
1507 }
1508
1509 OP(BYTECODE_OP_EQ_DOUBLE):
1510 {
1511 int res;
1512
1513 res = (estack_bx(stack, top)->u.d == estack_ax(stack, top)->u.d);
1514 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1515 estack_ax_v = res;
1516 estack_ax_t = REG_S64;
1517 next_pc += sizeof(struct binary_op);
1518 PO;
1519 }
1520 OP(BYTECODE_OP_NE_DOUBLE):
1521 {
1522 int res;
1523
1524 res = (estack_bx(stack, top)->u.d != estack_ax(stack, top)->u.d);
1525 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1526 estack_ax_v = res;
1527 estack_ax_t = REG_S64;
1528 next_pc += sizeof(struct binary_op);
1529 PO;
1530 }
1531 OP(BYTECODE_OP_GT_DOUBLE):
1532 {
1533 int res;
1534
1535 res = (estack_bx(stack, top)->u.d > estack_ax(stack, top)->u.d);
1536 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1537 estack_ax_v = res;
1538 estack_ax_t = REG_S64;
1539 next_pc += sizeof(struct binary_op);
1540 PO;
1541 }
1542 OP(BYTECODE_OP_LT_DOUBLE):
1543 {
1544 int res;
1545
1546 res = (estack_bx(stack, top)->u.d < estack_ax(stack, top)->u.d);
1547 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1548 estack_ax_v = res;
1549 estack_ax_t = REG_S64;
1550 next_pc += sizeof(struct binary_op);
1551 PO;
1552 }
1553 OP(BYTECODE_OP_GE_DOUBLE):
1554 {
1555 int res;
1556
1557 res = (estack_bx(stack, top)->u.d >= estack_ax(stack, top)->u.d);
1558 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1559 estack_ax_v = res;
1560 estack_ax_t = REG_S64;
1561 next_pc += sizeof(struct binary_op);
1562 PO;
1563 }
1564 OP(BYTECODE_OP_LE_DOUBLE):
1565 {
1566 int res;
1567
1568 res = (estack_bx(stack, top)->u.d <= estack_ax(stack, top)->u.d);
1569 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1570 estack_ax_v = res;
1571 estack_ax_t = REG_S64;
1572 next_pc += sizeof(struct binary_op);
1573 PO;
1574 }
1575
1576 /* Mixed S64-double binary comparators */
1577 OP(BYTECODE_OP_EQ_DOUBLE_S64):
1578 {
1579 int res;
1580
1581 res = (estack_bx(stack, top)->u.d == estack_ax_v);
1582 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1583 estack_ax_v = res;
1584 estack_ax_t = REG_S64;
1585 next_pc += sizeof(struct binary_op);
1586 PO;
1587 }
1588 OP(BYTECODE_OP_NE_DOUBLE_S64):
1589 {
1590 int res;
1591
1592 res = (estack_bx(stack, top)->u.d != estack_ax_v);
1593 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1594 estack_ax_v = res;
1595 estack_ax_t = REG_S64;
1596 next_pc += sizeof(struct binary_op);
1597 PO;
1598 }
1599 OP(BYTECODE_OP_GT_DOUBLE_S64):
1600 {
1601 int res;
1602
1603 res = (estack_bx(stack, top)->u.d > estack_ax_v);
1604 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1605 estack_ax_v = res;
1606 estack_ax_t = REG_S64;
1607 next_pc += sizeof(struct binary_op);
1608 PO;
1609 }
1610 OP(BYTECODE_OP_LT_DOUBLE_S64):
1611 {
1612 int res;
1613
1614 res = (estack_bx(stack, top)->u.d < estack_ax_v);
1615 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1616 estack_ax_v = res;
1617 estack_ax_t = REG_S64;
1618 next_pc += sizeof(struct binary_op);
1619 PO;
1620 }
1621 OP(BYTECODE_OP_GE_DOUBLE_S64):
1622 {
1623 int res;
1624
1625 res = (estack_bx(stack, top)->u.d >= estack_ax_v);
1626 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1627 estack_ax_v = res;
1628 estack_ax_t = REG_S64;
1629 next_pc += sizeof(struct binary_op);
1630 PO;
1631 }
1632 OP(BYTECODE_OP_LE_DOUBLE_S64):
1633 {
1634 int res;
1635
1636 res = (estack_bx(stack, top)->u.d <= estack_ax_v);
1637 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1638 estack_ax_v = res;
1639 estack_ax_t = REG_S64;
1640 next_pc += sizeof(struct binary_op);
1641 PO;
1642 }
1643
1644 OP(BYTECODE_OP_EQ_S64_DOUBLE):
1645 {
1646 int res;
1647
1648 res = (estack_bx_v == estack_ax(stack, top)->u.d);
1649 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1650 estack_ax_v = res;
1651 estack_ax_t = REG_S64;
1652 next_pc += sizeof(struct binary_op);
1653 PO;
1654 }
1655 OP(BYTECODE_OP_NE_S64_DOUBLE):
1656 {
1657 int res;
1658
1659 res = (estack_bx_v != estack_ax(stack, top)->u.d);
1660 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1661 estack_ax_v = res;
1662 estack_ax_t = REG_S64;
1663 next_pc += sizeof(struct binary_op);
1664 PO;
1665 }
1666 OP(BYTECODE_OP_GT_S64_DOUBLE):
1667 {
1668 int res;
1669
1670 res = (estack_bx_v > estack_ax(stack, top)->u.d);
1671 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1672 estack_ax_v = res;
1673 estack_ax_t = REG_S64;
1674 next_pc += sizeof(struct binary_op);
1675 PO;
1676 }
1677 OP(BYTECODE_OP_LT_S64_DOUBLE):
1678 {
1679 int res;
1680
1681 res = (estack_bx_v < estack_ax(stack, top)->u.d);
1682 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1683 estack_ax_v = res;
1684 estack_ax_t = REG_S64;
1685 next_pc += sizeof(struct binary_op);
1686 PO;
1687 }
1688 OP(BYTECODE_OP_GE_S64_DOUBLE):
1689 {
1690 int res;
1691
1692 res = (estack_bx_v >= estack_ax(stack, top)->u.d);
1693 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1694 estack_ax_v = res;
1695 estack_ax_t = REG_S64;
1696 next_pc += sizeof(struct binary_op);
1697 PO;
1698 }
1699 OP(BYTECODE_OP_LE_S64_DOUBLE):
1700 {
1701 int res;
1702
1703 res = (estack_bx_v <= estack_ax(stack, top)->u.d);
1704 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1705 estack_ax_v = res;
1706 estack_ax_t = REG_S64;
1707 next_pc += sizeof(struct binary_op);
1708 PO;
1709 }
1710 OP(BYTECODE_OP_BIT_RSHIFT):
1711 {
1712 int64_t res;
1713
1714 if (!IS_INTEGER_REGISTER(estack_ax_t) || !IS_INTEGER_REGISTER(estack_bx_t)) {
1715 ret = -EINVAL;
1716 goto end;
1717 }
1718
1719 /* Catch undefined behavior. */
1720 if (caa_unlikely(estack_ax_v < 0 || estack_ax_v >= 64)) {
1721 ret = -EINVAL;
1722 goto end;
1723 }
1724 res = ((uint64_t) estack_bx_v >> (uint32_t) estack_ax_v);
1725 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1726 estack_ax_v = res;
1727 estack_ax_t = REG_U64;
1728 next_pc += sizeof(struct binary_op);
1729 PO;
1730 }
1731 OP(BYTECODE_OP_BIT_LSHIFT):
1732 {
1733 int64_t res;
1734
1735 if (!IS_INTEGER_REGISTER(estack_ax_t) || !IS_INTEGER_REGISTER(estack_bx_t)) {
1736 ret = -EINVAL;
1737 goto end;
1738 }
1739
1740 /* Catch undefined behavior. */
1741 if (caa_unlikely(estack_ax_v < 0 || estack_ax_v >= 64)) {
1742 ret = -EINVAL;
1743 goto end;
1744 }
1745 res = ((uint64_t) estack_bx_v << (uint32_t) estack_ax_v);
1746 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1747 estack_ax_v = res;
1748 estack_ax_t = REG_U64;
1749 next_pc += sizeof(struct binary_op);
1750 PO;
1751 }
1752 OP(BYTECODE_OP_BIT_AND):
1753 {
1754 int64_t res;
1755
1756 if (!IS_INTEGER_REGISTER(estack_ax_t) || !IS_INTEGER_REGISTER(estack_bx_t)) {
1757 ret = -EINVAL;
1758 goto end;
1759 }
1760
1761 res = ((uint64_t) estack_bx_v & (uint64_t) estack_ax_v);
1762 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1763 estack_ax_v = res;
1764 estack_ax_t = REG_U64;
1765 next_pc += sizeof(struct binary_op);
1766 PO;
1767 }
1768 OP(BYTECODE_OP_BIT_OR):
1769 {
1770 int64_t res;
1771
1772 if (!IS_INTEGER_REGISTER(estack_ax_t) || !IS_INTEGER_REGISTER(estack_bx_t)) {
1773 ret = -EINVAL;
1774 goto end;
1775 }
1776
1777 res = ((uint64_t) estack_bx_v | (uint64_t) estack_ax_v);
1778 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1779 estack_ax_v = res;
1780 estack_ax_t = REG_U64;
1781 next_pc += sizeof(struct binary_op);
1782 PO;
1783 }
1784 OP(BYTECODE_OP_BIT_XOR):
1785 {
1786 int64_t res;
1787
1788 if (!IS_INTEGER_REGISTER(estack_ax_t) || !IS_INTEGER_REGISTER(estack_bx_t)) {
1789 ret = -EINVAL;
1790 goto end;
1791 }
1792
1793 res = ((uint64_t) estack_bx_v ^ (uint64_t) estack_ax_v);
1794 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1795 estack_ax_v = res;
1796 estack_ax_t = REG_U64;
1797 next_pc += sizeof(struct binary_op);
1798 PO;
1799 }
1800
1801 /* unary */
1802 OP(BYTECODE_OP_UNARY_PLUS):
1803 {
1804 /* Dynamic typing. */
1805 switch (estack_ax_t) {
1806 case REG_S64: /* Fall-through. */
1807 case REG_U64:
1808 JUMP_TO(BYTECODE_OP_UNARY_PLUS_S64);
1809 case REG_DOUBLE:
1810 JUMP_TO(BYTECODE_OP_UNARY_PLUS_DOUBLE);
1811 case REG_STRING: /* Fall-through */
1812 case REG_STAR_GLOB_STRING:
1813 ret = -EINVAL;
1814 goto end;
1815 default:
1816 ERR("Unknown interpreter register type (%d)",
1817 (int) estack_ax_t);
1818 ret = -EINVAL;
1819 goto end;
1820 }
1821 }
1822 OP(BYTECODE_OP_UNARY_MINUS):
1823 {
1824 /* Dynamic typing. */
1825 switch (estack_ax_t) {
1826 case REG_S64: /* Fall-through. */
1827 case REG_U64:
1828 JUMP_TO(BYTECODE_OP_UNARY_MINUS_S64);
1829 case REG_DOUBLE:
1830 JUMP_TO(BYTECODE_OP_UNARY_MINUS_DOUBLE);
1831 case REG_STRING: /* Fall-through */
1832 case REG_STAR_GLOB_STRING:
1833 ret = -EINVAL;
1834 goto end;
1835 default:
1836 ERR("Unknown interpreter register type (%d)",
1837 (int) estack_ax_t);
1838 ret = -EINVAL;
1839 goto end;
1840 }
1841 }
1842 OP(BYTECODE_OP_UNARY_NOT):
1843 {
1844 /* Dynamic typing. */
1845 switch (estack_ax_t) {
1846 case REG_S64: /* Fall-through. */
1847 case REG_U64:
1848 JUMP_TO(BYTECODE_OP_UNARY_NOT_S64);
1849 case REG_DOUBLE:
1850 JUMP_TO(BYTECODE_OP_UNARY_NOT_DOUBLE);
1851 case REG_STRING: /* Fall-through */
1852 case REG_STAR_GLOB_STRING:
1853 ret = -EINVAL;
1854 goto end;
1855 default:
1856 ERR("Unknown interpreter register type (%d)",
1857 (int) estack_ax_t);
1858 ret = -EINVAL;
1859 goto end;
1860 }
1861 next_pc += sizeof(struct unary_op);
1862 PO;
1863 }
1864
1865 OP(BYTECODE_OP_UNARY_BIT_NOT):
1866 {
1867 /* Dynamic typing. */
1868 if (!IS_INTEGER_REGISTER(estack_ax_t)) {
1869 ret = -EINVAL;
1870 goto end;
1871 }
1872
1873 estack_ax_v = ~(uint64_t) estack_ax_v;
1874 estack_ax_t = REG_U64;
1875 next_pc += sizeof(struct unary_op);
1876 PO;
1877 }
1878
1879 OP(BYTECODE_OP_UNARY_PLUS_S64):
1880 OP(BYTECODE_OP_UNARY_PLUS_DOUBLE):
1881 {
1882 next_pc += sizeof(struct unary_op);
1883 PO;
1884 }
1885 OP(BYTECODE_OP_UNARY_MINUS_S64):
1886 {
1887 estack_ax_v = -estack_ax_v;
1888 next_pc += sizeof(struct unary_op);
1889 PO;
1890 }
1891 OP(BYTECODE_OP_UNARY_MINUS_DOUBLE):
1892 {
1893 estack_ax(stack, top)->u.d = -estack_ax(stack, top)->u.d;
1894 next_pc += sizeof(struct unary_op);
1895 PO;
1896 }
1897 OP(BYTECODE_OP_UNARY_NOT_S64):
1898 {
1899 estack_ax_v = !estack_ax_v;
1900 estack_ax_t = REG_S64;
1901 next_pc += sizeof(struct unary_op);
1902 PO;
1903 }
1904 OP(BYTECODE_OP_UNARY_NOT_DOUBLE):
1905 {
1906 estack_ax_v = !estack_ax(stack, top)->u.d;
1907 estack_ax_t = REG_S64;
1908 next_pc += sizeof(struct unary_op);
1909 PO;
1910 }
1911
1912 /* logical */
1913 OP(BYTECODE_OP_AND):
1914 {
1915 struct logical_op *insn = (struct logical_op *) pc;
1916
1917 if (estack_ax_t != REG_S64 && estack_ax_t != REG_U64) {
1918 ret = -EINVAL;
1919 goto end;
1920 }
1921 /* If AX is 0, skip and evaluate to 0 */
1922 if (unlikely(estack_ax_v == 0)) {
1923 dbg_printf("Jumping to bytecode offset %u\n",
1924 (unsigned int) insn->skip_offset);
1925 next_pc = start_pc + insn->skip_offset;
1926 } else {
1927 /* Pop 1 when jump not taken */
1928 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1929 next_pc += sizeof(struct logical_op);
1930 }
1931 PO;
1932 }
1933 OP(BYTECODE_OP_OR):
1934 {
1935 struct logical_op *insn = (struct logical_op *) pc;
1936
1937 if (estack_ax_t != REG_S64 && estack_ax_t != REG_U64) {
1938 ret = -EINVAL;
1939 goto end;
1940 }
1941 /* If AX is nonzero, skip and evaluate to 1 */
1942 if (unlikely(estack_ax_v != 0)) {
1943 estack_ax_v = 1;
1944 dbg_printf("Jumping to bytecode offset %u\n",
1945 (unsigned int) insn->skip_offset);
1946 next_pc = start_pc + insn->skip_offset;
1947 } else {
1948 /* Pop 1 when jump not taken */
1949 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1950 next_pc += sizeof(struct logical_op);
1951 }
1952 PO;
1953 }
1954
1955
1956 /* load field ref */
1957 OP(BYTECODE_OP_LOAD_FIELD_REF_STRING):
1958 {
1959 struct load_op *insn = (struct load_op *) pc;
1960 struct field_ref *ref = (struct field_ref *) insn->data;
1961
1962 dbg_printf("load field ref offset %u type string\n",
1963 ref->offset);
1964 estack_push(stack, top, ax, bx, ax_t, bx_t);
1965 estack_ax(stack, top)->u.s.str =
1966 *(const char * const *) &interpreter_stack_data[ref->offset];
1967 if (unlikely(!estack_ax(stack, top)->u.s.str)) {
1968 dbg_printf("Interpreter warning: loading a NULL string.\n");
1969 ret = -EINVAL;
1970 goto end;
1971 }
1972 estack_ax(stack, top)->u.s.seq_len = SIZE_MAX;
1973 estack_ax(stack, top)->u.s.literal_type =
1974 ESTACK_STRING_LITERAL_TYPE_NONE;
1975 estack_ax_t = REG_STRING;
1976 dbg_printf("ref load string %s\n", estack_ax(stack, top)->u.s.str);
1977 next_pc += sizeof(struct load_op) + sizeof(struct field_ref);
1978 PO;
1979 }
1980
1981 OP(BYTECODE_OP_LOAD_FIELD_REF_SEQUENCE):
1982 {
1983 struct load_op *insn = (struct load_op *) pc;
1984 struct field_ref *ref = (struct field_ref *) insn->data;
1985
1986 dbg_printf("load field ref offset %u type sequence\n",
1987 ref->offset);
1988 estack_push(stack, top, ax, bx, ax_t, bx_t);
1989 estack_ax(stack, top)->u.s.seq_len =
1990 *(unsigned long *) &interpreter_stack_data[ref->offset];
1991 estack_ax(stack, top)->u.s.str =
1992 *(const char **) (&interpreter_stack_data[ref->offset
1993 + sizeof(unsigned long)]);
1994 estack_ax_t = REG_STRING;
1995 if (unlikely(!estack_ax(stack, top)->u.s.str)) {
1996 dbg_printf("Interpreter warning: loading a NULL sequence.\n");
1997 ret = -EINVAL;
1998 goto end;
1999 }
2000 estack_ax(stack, top)->u.s.literal_type =
2001 ESTACK_STRING_LITERAL_TYPE_NONE;
2002 next_pc += sizeof(struct load_op) + sizeof(struct field_ref);
2003 PO;
2004 }
2005
2006 OP(BYTECODE_OP_LOAD_FIELD_REF_S64):
2007 {
2008 struct load_op *insn = (struct load_op *) pc;
2009 struct field_ref *ref = (struct field_ref *) insn->data;
2010
2011 dbg_printf("load field ref offset %u type s64\n",
2012 ref->offset);
2013 estack_push(stack, top, ax, bx, ax_t, bx_t);
2014 estack_ax_v =
2015 ((struct literal_numeric *) &interpreter_stack_data[ref->offset])->v;
2016 estack_ax_t = REG_S64;
2017 dbg_printf("ref load s64 %" PRIi64 "\n", estack_ax_v);
2018 next_pc += sizeof(struct load_op) + sizeof(struct field_ref);
2019 PO;
2020 }
2021
2022 OP(BYTECODE_OP_LOAD_FIELD_REF_DOUBLE):
2023 {
2024 struct load_op *insn = (struct load_op *) pc;
2025 struct field_ref *ref = (struct field_ref *) insn->data;
2026
2027 dbg_printf("load field ref offset %u type double\n",
2028 ref->offset);
2029 estack_push(stack, top, ax, bx, ax_t, bx_t);
2030 memcpy(&estack_ax(stack, top)->u.d, &interpreter_stack_data[ref->offset],
2031 sizeof(struct literal_double));
2032 estack_ax_t = REG_DOUBLE;
2033 dbg_printf("ref load double %g\n", estack_ax(stack, top)->u.d);
2034 next_pc += sizeof(struct load_op) + sizeof(struct field_ref);
2035 PO;
2036 }
2037
2038 /* load from immediate operand */
2039 OP(BYTECODE_OP_LOAD_STRING):
2040 {
2041 struct load_op *insn = (struct load_op *) pc;
2042
2043 dbg_printf("load string %s\n", insn->data);
2044 estack_push(stack, top, ax, bx, ax_t, bx_t);
2045 estack_ax(stack, top)->u.s.str = insn->data;
2046 estack_ax(stack, top)->u.s.seq_len = SIZE_MAX;
2047 estack_ax(stack, top)->u.s.literal_type =
2048 ESTACK_STRING_LITERAL_TYPE_PLAIN;
2049 estack_ax_t = REG_STRING;
2050 next_pc += sizeof(struct load_op) + strlen(insn->data) + 1;
2051 PO;
2052 }
2053
2054 OP(BYTECODE_OP_LOAD_STAR_GLOB_STRING):
2055 {
2056 struct load_op *insn = (struct load_op *) pc;
2057
2058 dbg_printf("load globbing pattern %s\n", insn->data);
2059 estack_push(stack, top, ax, bx, ax_t, bx_t);
2060 estack_ax(stack, top)->u.s.str = insn->data;
2061 estack_ax(stack, top)->u.s.seq_len = SIZE_MAX;
2062 estack_ax(stack, top)->u.s.literal_type =
2063 ESTACK_STRING_LITERAL_TYPE_STAR_GLOB;
2064 estack_ax_t = REG_STAR_GLOB_STRING;
2065 next_pc += sizeof(struct load_op) + strlen(insn->data) + 1;
2066 PO;
2067 }
2068
2069 OP(BYTECODE_OP_LOAD_S64):
2070 {
2071 struct load_op *insn = (struct load_op *) pc;
2072
2073 estack_push(stack, top, ax, bx, ax_t, bx_t);
2074 estack_ax_v = ((struct literal_numeric *) insn->data)->v;
2075 estack_ax_t = REG_S64;
2076 dbg_printf("load s64 %" PRIi64 "\n", estack_ax_v);
2077 next_pc += sizeof(struct load_op)
2078 + sizeof(struct literal_numeric);
2079 PO;
2080 }
2081
2082 OP(BYTECODE_OP_LOAD_DOUBLE):
2083 {
2084 struct load_op *insn = (struct load_op *) pc;
2085
2086 estack_push(stack, top, ax, bx, ax_t, bx_t);
2087 memcpy(&estack_ax(stack, top)->u.d, insn->data,
2088 sizeof(struct literal_double));
2089 estack_ax_t = REG_DOUBLE;
2090 dbg_printf("load double %g\n", estack_ax(stack, top)->u.d);
2091 next_pc += sizeof(struct load_op)
2092 + sizeof(struct literal_double);
2093 PO;
2094 }
2095
2096 /* cast */
2097 OP(BYTECODE_OP_CAST_TO_S64):
2098 {
2099 /* Dynamic typing. */
2100 switch (estack_ax_t) {
2101 case REG_S64:
2102 JUMP_TO(BYTECODE_OP_CAST_NOP);
2103 case REG_DOUBLE:
2104 JUMP_TO(BYTECODE_OP_CAST_DOUBLE_TO_S64);
2105 case REG_U64:
2106 estack_ax_t = REG_S64;
2107 next_pc += sizeof(struct cast_op);
2108 case REG_STRING: /* Fall-through */
2109 case REG_STAR_GLOB_STRING:
2110 ret = -EINVAL;
2111 goto end;
2112 default:
2113 ERR("Unknown interpreter register type (%d)",
2114 (int) estack_ax_t);
2115 ret = -EINVAL;
2116 goto end;
2117 }
2118 }
2119
2120 OP(BYTECODE_OP_CAST_DOUBLE_TO_S64):
2121 {
2122 estack_ax_v = (int64_t) estack_ax(stack, top)->u.d;
2123 estack_ax_t = REG_S64;
2124 next_pc += sizeof(struct cast_op);
2125 PO;
2126 }
2127
2128 OP(BYTECODE_OP_CAST_NOP):
2129 {
2130 next_pc += sizeof(struct cast_op);
2131 PO;
2132 }
2133
2134 /* get context ref */
2135 OP(BYTECODE_OP_GET_CONTEXT_REF):
2136 {
2137 struct load_op *insn = (struct load_op *) pc;
2138 struct field_ref *ref = (struct field_ref *) insn->data;
2139 struct lttng_ctx_field *ctx_field;
2140 struct lttng_ctx_value v;
2141
2142 dbg_printf("get context ref offset %u type dynamic\n",
2143 ref->offset);
2144 ctx_field = &ctx->fields[ref->offset];
2145 ctx_field->get_value(ctx_field, &v);
2146 estack_push(stack, top, ax, bx, ax_t, bx_t);
2147 switch (v.sel) {
2148 case LTTNG_UST_DYNAMIC_TYPE_NONE:
2149 ret = -EINVAL;
2150 goto end;
2151 case LTTNG_UST_DYNAMIC_TYPE_S64:
2152 estack_ax_v = v.u.s64;
2153 estack_ax_t = REG_S64;
2154 dbg_printf("ref get context dynamic s64 %" PRIi64 "\n", estack_ax_v);
2155 break;
2156 case LTTNG_UST_DYNAMIC_TYPE_DOUBLE:
2157 estack_ax(stack, top)->u.d = v.u.d;
2158 estack_ax_t = REG_DOUBLE;
2159 dbg_printf("ref get context dynamic double %g\n", estack_ax(stack, top)->u.d);
2160 break;
2161 case LTTNG_UST_DYNAMIC_TYPE_STRING:
2162 estack_ax(stack, top)->u.s.str = v.u.str;
2163 if (unlikely(!estack_ax(stack, top)->u.s.str)) {
2164 dbg_printf("Interpreter warning: loading a NULL string.\n");
2165 ret = -EINVAL;
2166 goto end;
2167 }
2168 estack_ax(stack, top)->u.s.seq_len = SIZE_MAX;
2169 estack_ax(stack, top)->u.s.literal_type =
2170 ESTACK_STRING_LITERAL_TYPE_NONE;
2171 dbg_printf("ref get context dynamic string %s\n", estack_ax(stack, top)->u.s.str);
2172 estack_ax_t = REG_STRING;
2173 break;
2174 default:
2175 dbg_printf("Interpreter warning: unknown dynamic type (%d).\n", (int) v.sel);
2176 ret = -EINVAL;
2177 goto end;
2178 }
2179 next_pc += sizeof(struct load_op) + sizeof(struct field_ref);
2180 PO;
2181 }
2182
2183 OP(BYTECODE_OP_GET_CONTEXT_REF_STRING):
2184 {
2185 struct load_op *insn = (struct load_op *) pc;
2186 struct field_ref *ref = (struct field_ref *) insn->data;
2187 struct lttng_ctx_field *ctx_field;
2188 struct lttng_ctx_value v;
2189
2190 dbg_printf("get context ref offset %u type string\n",
2191 ref->offset);
2192 ctx_field = &ctx->fields[ref->offset];
2193 ctx_field->get_value(ctx_field, &v);
2194 estack_push(stack, top, ax, bx, ax_t, bx_t);
2195 estack_ax(stack, top)->u.s.str = v.u.str;
2196 if (unlikely(!estack_ax(stack, top)->u.s.str)) {
2197 dbg_printf("Interpreter warning: loading a NULL string.\n");
2198 ret = -EINVAL;
2199 goto end;
2200 }
2201 estack_ax(stack, top)->u.s.seq_len = SIZE_MAX;
2202 estack_ax(stack, top)->u.s.literal_type =
2203 ESTACK_STRING_LITERAL_TYPE_NONE;
2204 estack_ax_t = REG_STRING;
2205 dbg_printf("ref get context string %s\n", estack_ax(stack, top)->u.s.str);
2206 next_pc += sizeof(struct load_op) + sizeof(struct field_ref);
2207 PO;
2208 }
2209
2210 OP(BYTECODE_OP_GET_CONTEXT_REF_S64):
2211 {
2212 struct load_op *insn = (struct load_op *) pc;
2213 struct field_ref *ref = (struct field_ref *) insn->data;
2214 struct lttng_ctx_field *ctx_field;
2215 struct lttng_ctx_value v;
2216
2217 dbg_printf("get context ref offset %u type s64\n",
2218 ref->offset);
2219 ctx_field = &ctx->fields[ref->offset];
2220 ctx_field->get_value(ctx_field, &v);
2221 estack_push(stack, top, ax, bx, ax_t, bx_t);
2222 estack_ax_v = v.u.s64;
2223 estack_ax_t = REG_S64;
2224 dbg_printf("ref get context s64 %" PRIi64 "\n", estack_ax_v);
2225 next_pc += sizeof(struct load_op) + sizeof(struct field_ref);
2226 PO;
2227 }
2228
2229 OP(BYTECODE_OP_GET_CONTEXT_REF_DOUBLE):
2230 {
2231 struct load_op *insn = (struct load_op *) pc;
2232 struct field_ref *ref = (struct field_ref *) insn->data;
2233 struct lttng_ctx_field *ctx_field;
2234 struct lttng_ctx_value v;
2235
2236 dbg_printf("get context ref offset %u type double\n",
2237 ref->offset);
2238 ctx_field = &ctx->fields[ref->offset];
2239 ctx_field->get_value(ctx_field, &v);
2240 estack_push(stack, top, ax, bx, ax_t, bx_t);
2241 memcpy(&estack_ax(stack, top)->u.d, &v.u.d, sizeof(struct literal_double));
2242 estack_ax_t = REG_DOUBLE;
2243 dbg_printf("ref get context double %g\n", estack_ax(stack, top)->u.d);
2244 next_pc += sizeof(struct load_op) + sizeof(struct field_ref);
2245 PO;
2246 }
2247
2248 OP(BYTECODE_OP_GET_CONTEXT_ROOT):
2249 {
2250 dbg_printf("op get context root\n");
2251 estack_push(stack, top, ax, bx, ax_t, bx_t);
2252 estack_ax(stack, top)->u.ptr.type = LOAD_ROOT_CONTEXT;
2253 /* "field" only needed for variants. */
2254 estack_ax(stack, top)->u.ptr.field = NULL;
2255 estack_ax_t = REG_PTR;
2256 next_pc += sizeof(struct load_op);
2257 PO;
2258 }
2259
2260 OP(BYTECODE_OP_GET_APP_CONTEXT_ROOT):
2261 {
2262 dbg_printf("op get app context root\n");
2263 estack_push(stack, top, ax, bx, ax_t, bx_t);
2264 estack_ax(stack, top)->u.ptr.type = LOAD_ROOT_APP_CONTEXT;
2265 /* "field" only needed for variants. */
2266 estack_ax(stack, top)->u.ptr.field = NULL;
2267 estack_ax_t = REG_PTR;
2268 next_pc += sizeof(struct load_op);
2269 PO;
2270 }
2271
2272 OP(BYTECODE_OP_GET_PAYLOAD_ROOT):
2273 {
2274 dbg_printf("op get app payload root\n");
2275 estack_push(stack, top, ax, bx, ax_t, bx_t);
2276 estack_ax(stack, top)->u.ptr.type = LOAD_ROOT_PAYLOAD;
2277 estack_ax(stack, top)->u.ptr.ptr = interpreter_stack_data;
2278 /* "field" only needed for variants. */
2279 estack_ax(stack, top)->u.ptr.field = NULL;
2280 estack_ax_t = REG_PTR;
2281 next_pc += sizeof(struct load_op);
2282 PO;
2283 }
2284
2285 OP(BYTECODE_OP_GET_SYMBOL):
2286 {
2287 dbg_printf("op get symbol\n");
2288 switch (estack_ax(stack, top)->u.ptr.type) {
2289 case LOAD_OBJECT:
2290 ERR("Nested fields not implemented yet.");
2291 ret = -EINVAL;
2292 goto end;
2293 case LOAD_ROOT_CONTEXT:
2294 case LOAD_ROOT_APP_CONTEXT:
2295 case LOAD_ROOT_PAYLOAD:
2296 /*
2297 * symbol lookup is performed by
2298 * specialization.
2299 */
2300 ret = -EINVAL;
2301 goto end;
2302 }
2303 next_pc += sizeof(struct load_op) + sizeof(struct get_symbol);
2304 PO;
2305 }
2306
2307 OP(BYTECODE_OP_GET_SYMBOL_FIELD):
2308 {
2309 /*
2310 * Used for first variant encountered in a
2311 * traversal. Variants are not implemented yet.
2312 */
2313 ret = -EINVAL;
2314 goto end;
2315 }
2316
2317 OP(BYTECODE_OP_GET_INDEX_U16):
2318 {
2319 struct load_op *insn = (struct load_op *) pc;
2320 struct get_index_u16 *index = (struct get_index_u16 *) insn->data;
2321
2322 dbg_printf("op get index u16\n");
2323 ret = dynamic_get_index(ctx, bytecode, index->index, estack_ax(stack, top));
2324 if (ret)
2325 goto end;
2326 estack_ax_v = estack_ax(stack, top)->u.v;
2327 estack_ax_t = estack_ax(stack, top)->type;
2328 next_pc += sizeof(struct load_op) + sizeof(struct get_index_u16);
2329 PO;
2330 }
2331
2332 OP(BYTECODE_OP_GET_INDEX_U64):
2333 {
2334 struct load_op *insn = (struct load_op *) pc;
2335 struct get_index_u64 *index = (struct get_index_u64 *) insn->data;
2336
2337 dbg_printf("op get index u64\n");
2338 ret = dynamic_get_index(ctx, bytecode, index->index, estack_ax(stack, top));
2339 if (ret)
2340 goto end;
2341 estack_ax_v = estack_ax(stack, top)->u.v;
2342 estack_ax_t = estack_ax(stack, top)->type;
2343 next_pc += sizeof(struct load_op) + sizeof(struct get_index_u64);
2344 PO;
2345 }
2346
2347 OP(BYTECODE_OP_LOAD_FIELD):
2348 {
2349 dbg_printf("op load field\n");
2350 ret = dynamic_load_field(estack_ax(stack, top));
2351 if (ret)
2352 goto end;
2353 estack_ax_v = estack_ax(stack, top)->u.v;
2354 estack_ax_t = estack_ax(stack, top)->type;
2355 next_pc += sizeof(struct load_op);
2356 PO;
2357 }
2358
2359 OP(BYTECODE_OP_LOAD_FIELD_S8):
2360 {
2361 dbg_printf("op load field s8\n");
2362
2363 estack_ax_v = *(int8_t *) estack_ax(stack, top)->u.ptr.ptr;
2364 estack_ax_t = REG_S64;
2365 next_pc += sizeof(struct load_op);
2366 PO;
2367 }
2368 OP(BYTECODE_OP_LOAD_FIELD_S16):
2369 {
2370 dbg_printf("op load field s16\n");
2371
2372 estack_ax_v = *(int16_t *) estack_ax(stack, top)->u.ptr.ptr;
2373 estack_ax_t = REG_S64;
2374 next_pc += sizeof(struct load_op);
2375 PO;
2376 }
2377 OP(BYTECODE_OP_LOAD_FIELD_S32):
2378 {
2379 dbg_printf("op load field s32\n");
2380
2381 estack_ax_v = *(int32_t *) estack_ax(stack, top)->u.ptr.ptr;
2382 estack_ax_t = REG_S64;
2383 next_pc += sizeof(struct load_op);
2384 PO;
2385 }
2386 OP(BYTECODE_OP_LOAD_FIELD_S64):
2387 {
2388 dbg_printf("op load field s64\n");
2389
2390 estack_ax_v = *(int64_t *) estack_ax(stack, top)->u.ptr.ptr;
2391 estack_ax_t = REG_S64;
2392 next_pc += sizeof(struct load_op);
2393 PO;
2394 }
2395 OP(BYTECODE_OP_LOAD_FIELD_U8):
2396 {
2397 dbg_printf("op load field u8\n");
2398
2399 estack_ax_v = *(uint8_t *) estack_ax(stack, top)->u.ptr.ptr;
2400 estack_ax_t = REG_U64;
2401 next_pc += sizeof(struct load_op);
2402 PO;
2403 }
2404 OP(BYTECODE_OP_LOAD_FIELD_U16):
2405 {
2406 dbg_printf("op load field u16\n");
2407
2408 estack_ax_v = *(uint16_t *) estack_ax(stack, top)->u.ptr.ptr;
2409 estack_ax_t = REG_U64;
2410 next_pc += sizeof(struct load_op);
2411 PO;
2412 }
2413 OP(BYTECODE_OP_LOAD_FIELD_U32):
2414 {
2415 dbg_printf("op load field u32\n");
2416
2417 estack_ax_v = *(uint32_t *) estack_ax(stack, top)->u.ptr.ptr;
2418 estack_ax_t = REG_U64;
2419 next_pc += sizeof(struct load_op);
2420 PO;
2421 }
2422 OP(BYTECODE_OP_LOAD_FIELD_U64):
2423 {
2424 dbg_printf("op load field u64\n");
2425
2426 estack_ax_v = *(uint64_t *) estack_ax(stack, top)->u.ptr.ptr;
2427 estack_ax_t = REG_U64;
2428 next_pc += sizeof(struct load_op);
2429 PO;
2430 }
2431 OP(BYTECODE_OP_LOAD_FIELD_DOUBLE):
2432 {
2433 dbg_printf("op load field double\n");
2434
2435 memcpy(&estack_ax(stack, top)->u.d,
2436 estack_ax(stack, top)->u.ptr.ptr,
2437 sizeof(struct literal_double));
2438 estack_ax(stack, top)->type = REG_DOUBLE;
2439 next_pc += sizeof(struct load_op);
2440 PO;
2441 }
2442
2443 OP(BYTECODE_OP_LOAD_FIELD_STRING):
2444 {
2445 const char *str;
2446
2447 dbg_printf("op load field string\n");
2448 str = (const char *) estack_ax(stack, top)->u.ptr.ptr;
2449 estack_ax(stack, top)->u.s.str = str;
2450 if (unlikely(!estack_ax(stack, top)->u.s.str)) {
2451 dbg_printf("Interpreter warning: loading a NULL string.\n");
2452 ret = -EINVAL;
2453 goto end;
2454 }
2455 estack_ax(stack, top)->u.s.seq_len = SIZE_MAX;
2456 estack_ax(stack, top)->u.s.literal_type =
2457 ESTACK_STRING_LITERAL_TYPE_NONE;
2458 estack_ax(stack, top)->type = REG_STRING;
2459 next_pc += sizeof(struct load_op);
2460 PO;
2461 }
2462
2463 OP(BYTECODE_OP_LOAD_FIELD_SEQUENCE):
2464 {
2465 const char *ptr;
2466
2467 dbg_printf("op load field string sequence\n");
2468 ptr = estack_ax(stack, top)->u.ptr.ptr;
2469 estack_ax(stack, top)->u.s.seq_len = *(unsigned long *) ptr;
2470 estack_ax(stack, top)->u.s.str = *(const char **) (ptr + sizeof(unsigned long));
2471 estack_ax(stack, top)->type = REG_STRING;
2472 if (unlikely(!estack_ax(stack, top)->u.s.str)) {
2473 dbg_printf("Interpreter warning: loading a NULL sequence.\n");
2474 ret = -EINVAL;
2475 goto end;
2476 }
2477 estack_ax(stack, top)->u.s.literal_type =
2478 ESTACK_STRING_LITERAL_TYPE_NONE;
2479 next_pc += sizeof(struct load_op);
2480 PO;
2481 }
2482
2483 END_OP
2484 end:
2485 /* Return _DISCARD on error. */
2486 if (ret)
2487 return LTTNG_INTERPRETER_DISCARD;
2488
2489 if (output) {
2490 return lttng_bytecode_interpret_format_output(estack_ax(stack, top),
2491 output);
2492 }
2493
2494 return retval;
2495 }
2496
2497 uint64_t lttng_bytecode_filter_interpret(void *filter_data,
2498 const char *filter_stack_data)
2499 {
2500 return bytecode_interpret(filter_data, filter_stack_data, NULL);
2501 }
2502
2503 uint64_t lttng_bytecode_capture_interpret(void *capture_data,
2504 const char *capture_stack_data,
2505 struct lttng_interpreter_output *output)
2506 {
2507 return bytecode_interpret(capture_data, capture_stack_data,
2508 (struct lttng_interpreter_output *) output);
2509 }
2510
2511 #undef START_OP
2512 #undef OP
2513 #undef PO
2514 #undef END_OP
This page took 0.082002 seconds and 4 git commands to generate.