a384d61f068c92d2d2e4d0b54eb42ddc42f4fe36
[lttng-ust.git] / liblttng-ust / lttng-bytecode-interpreter.c
1 /*
2 * SPDX-License-Identifier: MIT
3 *
4 * Copyright (C) 2010-2016 Mathieu Desnoyers <mathieu.desnoyers@efficios.com>
5 *
6 * LTTng UST bytecode interpreter.
7 */
8
9 #define _LGPL_SOURCE
10 #include <stddef.h>
11 #include <stdint.h>
12
13 #include <lttng/urcu/pointer.h>
14 #include <urcu/rculist.h>
15 #include <lttng/ust-endian.h>
16 #include <lttng/ust-events.h>
17 #include "ust-events-internal.h"
18
19 #include "lttng-bytecode.h"
20 #include "string-utils.h"
21
22
23 /*
24 * -1: wildcard found.
25 * -2: unknown escape char.
26 * 0: normal char.
27 */
28
29 static
30 int parse_char(const char **p)
31 {
32 switch (**p) {
33 case '\\':
34 (*p)++;
35 switch (**p) {
36 case '\\':
37 case '*':
38 return 0;
39 default:
40 return -2;
41 }
42 case '*':
43 return -1;
44 default:
45 return 0;
46 }
47 }
48
49 /*
50 * Returns SIZE_MAX if the string is null-terminated, or the number of
51 * characters if not.
52 */
53 static
54 size_t get_str_or_seq_len(const struct estack_entry *entry)
55 {
56 return entry->u.s.seq_len;
57 }
58
59 static
60 int stack_star_glob_match(struct estack *stack, int top, const char *cmp_type)
61 {
62 const char *pattern;
63 const char *candidate;
64 size_t pattern_len;
65 size_t candidate_len;
66
67 /* Find out which side is the pattern vs. the candidate. */
68 if (estack_ax(stack, top)->u.s.literal_type == ESTACK_STRING_LITERAL_TYPE_STAR_GLOB) {
69 pattern = estack_ax(stack, top)->u.s.str;
70 pattern_len = get_str_or_seq_len(estack_ax(stack, top));
71 candidate = estack_bx(stack, top)->u.s.str;
72 candidate_len = get_str_or_seq_len(estack_bx(stack, top));
73 } else {
74 pattern = estack_bx(stack, top)->u.s.str;
75 pattern_len = get_str_or_seq_len(estack_bx(stack, top));
76 candidate = estack_ax(stack, top)->u.s.str;
77 candidate_len = get_str_or_seq_len(estack_ax(stack, top));
78 }
79
80 /* Perform the match. Returns 0 when the result is true. */
81 return !strutils_star_glob_match(pattern, pattern_len, candidate,
82 candidate_len);
83 }
84
85 static
86 int stack_strcmp(struct estack *stack, int top, const char *cmp_type)
87 {
88 const char *p = estack_bx(stack, top)->u.s.str, *q = estack_ax(stack, top)->u.s.str;
89 int ret;
90 int diff;
91
92 for (;;) {
93 int escaped_r0 = 0;
94
95 if (unlikely(p - estack_bx(stack, top)->u.s.str >= estack_bx(stack, top)->u.s.seq_len || *p == '\0')) {
96 if (q - estack_ax(stack, top)->u.s.str >= estack_ax(stack, top)->u.s.seq_len || *q == '\0') {
97 return 0;
98 } else {
99 if (estack_ax(stack, top)->u.s.literal_type ==
100 ESTACK_STRING_LITERAL_TYPE_PLAIN) {
101 ret = parse_char(&q);
102 if (ret == -1)
103 return 0;
104 }
105 return -1;
106 }
107 }
108 if (unlikely(q - estack_ax(stack, top)->u.s.str >= estack_ax(stack, top)->u.s.seq_len || *q == '\0')) {
109 if (estack_bx(stack, top)->u.s.literal_type ==
110 ESTACK_STRING_LITERAL_TYPE_PLAIN) {
111 ret = parse_char(&p);
112 if (ret == -1)
113 return 0;
114 }
115 return 1;
116 }
117 if (estack_bx(stack, top)->u.s.literal_type ==
118 ESTACK_STRING_LITERAL_TYPE_PLAIN) {
119 ret = parse_char(&p);
120 if (ret == -1) {
121 return 0;
122 } else if (ret == -2) {
123 escaped_r0 = 1;
124 }
125 /* else compare both char */
126 }
127 if (estack_ax(stack, top)->u.s.literal_type ==
128 ESTACK_STRING_LITERAL_TYPE_PLAIN) {
129 ret = parse_char(&q);
130 if (ret == -1) {
131 return 0;
132 } else if (ret == -2) {
133 if (!escaped_r0)
134 return -1;
135 } else {
136 if (escaped_r0)
137 return 1;
138 }
139 } else {
140 if (escaped_r0)
141 return 1;
142 }
143 diff = *p - *q;
144 if (diff != 0)
145 break;
146 p++;
147 q++;
148 }
149 return diff;
150 }
151
152 int lttng_bytecode_interpret_error(struct lttng_ust_bytecode_runtime *bytecode_runtime,
153 const char *stack_data,
154 void *ctx)
155 {
156 return LTTNG_UST_BYTECODE_INTERPRETER_ERROR;
157 }
158
159 #ifdef INTERPRETER_USE_SWITCH
160
161 /*
162 * Fallback for compilers that do not support taking address of labels.
163 */
164
165 #define START_OP \
166 start_pc = &bytecode->data[0]; \
167 for (pc = next_pc = start_pc; pc - start_pc < bytecode->len; \
168 pc = next_pc) { \
169 dbg_printf("Executing op %s (%u)\n", \
170 lttng_bytecode_print_op((unsigned int) *(bytecode_opcode_t *) pc), \
171 (unsigned int) *(bytecode_opcode_t *) pc); \
172 switch (*(bytecode_opcode_t *) pc) {
173
174 #define OP(name) jump_target_##name: __attribute__((unused)); \
175 case name
176
177 #define PO break
178
179 #define END_OP } \
180 }
181
182 #define JUMP_TO(name) \
183 goto jump_target_##name
184
185 #else
186
187 /*
188 * Dispatch-table based interpreter.
189 */
190
191 #define START_OP \
192 start_pc = &bytecode->code[0]; \
193 pc = next_pc = start_pc; \
194 if (unlikely(pc - start_pc >= bytecode->len)) \
195 goto end; \
196 goto *dispatch[*(bytecode_opcode_t *) pc];
197
198 #define OP(name) \
199 LABEL_##name
200
201 #define PO \
202 pc = next_pc; \
203 goto *dispatch[*(bytecode_opcode_t *) pc];
204
205 #define END_OP
206
207 #define JUMP_TO(name) \
208 goto LABEL_##name
209
210 #endif
211
212 #define IS_INTEGER_REGISTER(reg_type) \
213 (reg_type == REG_U64 || reg_type == REG_S64)
214
215 static int context_get_index(struct lttng_ust_ctx *ctx,
216 struct load_ptr *ptr,
217 uint32_t idx)
218 {
219
220 struct lttng_ust_ctx_field *ctx_field;
221 struct lttng_ust_event_field *field;
222 struct lttng_ust_ctx_value v;
223
224 ctx_field = ctx->fields[idx];
225 field = ctx_field->event_field;
226 ptr->type = LOAD_OBJECT;
227 ptr->field = field;
228
229 switch (field->type->type) {
230 case lttng_ust_type_integer:
231 ctx_field->get_value(ctx_field, &v);
232 if (lttng_ust_get_type_integer(field->type)->signedness) {
233 ptr->object_type = OBJECT_TYPE_S64;
234 ptr->u.s64 = v.u.s64;
235 ptr->ptr = &ptr->u.s64;
236 } else {
237 ptr->object_type = OBJECT_TYPE_U64;
238 ptr->u.u64 = v.u.s64; /* Cast. */
239 ptr->ptr = &ptr->u.u64;
240 }
241 break;
242 case lttng_ust_type_enum:
243 {
244 const struct lttng_ust_type_integer *itype;
245
246 itype = lttng_ust_get_type_integer(lttng_ust_get_type_enum(field->type)->container_type);
247 ctx_field->get_value(ctx_field, &v);
248 if (itype->signedness) {
249 ptr->object_type = OBJECT_TYPE_SIGNED_ENUM;
250 ptr->u.s64 = v.u.s64;
251 ptr->ptr = &ptr->u.s64;
252 } else {
253 ptr->object_type = OBJECT_TYPE_UNSIGNED_ENUM;
254 ptr->u.u64 = v.u.s64; /* Cast. */
255 ptr->ptr = &ptr->u.u64;
256 }
257 break;
258 }
259 case lttng_ust_type_array:
260 if (lttng_ust_get_type_array(field->type)->elem_type->type != lttng_ust_type_integer) {
261 ERR("Array nesting only supports integer types.");
262 return -EINVAL;
263 }
264 if (lttng_ust_get_type_array(field->type)->encoding == lttng_ust_string_encoding_none) {
265 ERR("Only string arrays are supported for contexts.");
266 return -EINVAL;
267 }
268 ptr->object_type = OBJECT_TYPE_STRING;
269 ctx_field->get_value(ctx_field, &v);
270 ptr->ptr = v.u.str;
271 break;
272 case lttng_ust_type_sequence:
273 if (lttng_ust_get_type_sequence(field->type)->elem_type->type != lttng_ust_type_integer) {
274 ERR("Sequence nesting only supports integer types.");
275 return -EINVAL;
276 }
277 if (lttng_ust_get_type_sequence(field->type)->encoding == lttng_ust_string_encoding_none) {
278 ERR("Only string sequences are supported for contexts.");
279 return -EINVAL;
280 }
281 ptr->object_type = OBJECT_TYPE_STRING;
282 ctx_field->get_value(ctx_field, &v);
283 ptr->ptr = v.u.str;
284 break;
285 case lttng_ust_type_string:
286 ptr->object_type = OBJECT_TYPE_STRING;
287 ctx_field->get_value(ctx_field, &v);
288 ptr->ptr = v.u.str;
289 break;
290 case lttng_ust_type_float:
291 ptr->object_type = OBJECT_TYPE_DOUBLE;
292 ctx_field->get_value(ctx_field, &v);
293 ptr->u.d = v.u.d;
294 ptr->ptr = &ptr->u.d;
295 break;
296 case lttng_ust_type_dynamic:
297 ctx_field->get_value(ctx_field, &v);
298 switch (v.sel) {
299 case LTTNG_UST_DYNAMIC_TYPE_NONE:
300 return -EINVAL;
301 case LTTNG_UST_DYNAMIC_TYPE_U8:
302 case LTTNG_UST_DYNAMIC_TYPE_U16:
303 case LTTNG_UST_DYNAMIC_TYPE_U32:
304 case LTTNG_UST_DYNAMIC_TYPE_U64:
305 ptr->object_type = OBJECT_TYPE_U64;
306 ptr->u.u64 = v.u.u64;
307 ptr->ptr = &ptr->u.u64;
308 dbg_printf("context get index dynamic u64 %" PRIi64 "\n", ptr->u.u64);
309 break;
310 case LTTNG_UST_DYNAMIC_TYPE_S8:
311 case LTTNG_UST_DYNAMIC_TYPE_S16:
312 case LTTNG_UST_DYNAMIC_TYPE_S32:
313 case LTTNG_UST_DYNAMIC_TYPE_S64:
314 ptr->object_type = OBJECT_TYPE_S64;
315 ptr->u.s64 = v.u.s64;
316 ptr->ptr = &ptr->u.s64;
317 dbg_printf("context get index dynamic s64 %" PRIi64 "\n", ptr->u.s64);
318 break;
319 case LTTNG_UST_DYNAMIC_TYPE_FLOAT:
320 case LTTNG_UST_DYNAMIC_TYPE_DOUBLE:
321 ptr->object_type = OBJECT_TYPE_DOUBLE;
322 ptr->u.d = v.u.d;
323 ptr->ptr = &ptr->u.d;
324 dbg_printf("context get index dynamic double %g\n", ptr->u.d);
325 break;
326 case LTTNG_UST_DYNAMIC_TYPE_STRING:
327 ptr->object_type = OBJECT_TYPE_STRING;
328 ptr->ptr = v.u.str;
329 dbg_printf("context get index dynamic string %s\n", (const char *) ptr->ptr);
330 break;
331 default:
332 dbg_printf("Interpreter warning: unknown dynamic type (%d).\n", (int) v.sel);
333 return -EINVAL;
334 }
335 break;
336 default:
337 ERR("Unknown type: %d", (int) field->type->type);
338 return -EINVAL;
339 }
340 return 0;
341 }
342
343 static int dynamic_get_index(struct lttng_ust_ctx *ctx,
344 struct bytecode_runtime *runtime,
345 uint64_t index, struct estack_entry *stack_top)
346 {
347 int ret;
348 const struct bytecode_get_index_data *gid;
349
350 gid = (const struct bytecode_get_index_data *) &runtime->data[index];
351 switch (stack_top->u.ptr.type) {
352 case LOAD_OBJECT:
353 switch (stack_top->u.ptr.object_type) {
354 case OBJECT_TYPE_ARRAY:
355 {
356 const char *ptr;
357
358 assert(gid->offset < gid->array_len);
359 /* Skip count (unsigned long) */
360 ptr = *(const char **) (stack_top->u.ptr.ptr + sizeof(unsigned long));
361 ptr = ptr + gid->offset;
362 stack_top->u.ptr.ptr = ptr;
363 stack_top->u.ptr.object_type = gid->elem.type;
364 stack_top->u.ptr.rev_bo = gid->elem.rev_bo;
365 assert(stack_top->u.ptr.field->type->type == lttng_ust_type_array);
366 stack_top->u.ptr.field = NULL;
367 break;
368 }
369 case OBJECT_TYPE_SEQUENCE:
370 {
371 const char *ptr;
372 size_t ptr_seq_len;
373
374 ptr = *(const char **) (stack_top->u.ptr.ptr + sizeof(unsigned long));
375 ptr_seq_len = *(unsigned long *) stack_top->u.ptr.ptr;
376 if (gid->offset >= gid->elem.len * ptr_seq_len) {
377 ret = -EINVAL;
378 goto end;
379 }
380 ptr = ptr + gid->offset;
381 stack_top->u.ptr.ptr = ptr;
382 stack_top->u.ptr.object_type = gid->elem.type;
383 stack_top->u.ptr.rev_bo = gid->elem.rev_bo;
384 assert(stack_top->u.ptr.field->type->type == lttng_ust_type_sequence);
385 stack_top->u.ptr.field = NULL;
386 break;
387 }
388 case OBJECT_TYPE_STRUCT:
389 ERR("Nested structures are not supported yet.");
390 ret = -EINVAL;
391 goto end;
392 case OBJECT_TYPE_VARIANT:
393 default:
394 ERR("Unexpected get index type %d",
395 (int) stack_top->u.ptr.object_type);
396 ret = -EINVAL;
397 goto end;
398 }
399 break;
400 case LOAD_ROOT_CONTEXT:
401 case LOAD_ROOT_APP_CONTEXT: /* Fall-through */
402 {
403 ret = context_get_index(ctx,
404 &stack_top->u.ptr,
405 gid->ctx_index);
406 if (ret) {
407 goto end;
408 }
409 break;
410 }
411 case LOAD_ROOT_PAYLOAD:
412 stack_top->u.ptr.ptr += gid->offset;
413 if (gid->elem.type == OBJECT_TYPE_STRING)
414 stack_top->u.ptr.ptr = *(const char * const *) stack_top->u.ptr.ptr;
415 stack_top->u.ptr.object_type = gid->elem.type;
416 stack_top->u.ptr.type = LOAD_OBJECT;
417 stack_top->u.ptr.field = gid->field;
418 stack_top->u.ptr.rev_bo = gid->elem.rev_bo;
419 break;
420 }
421
422 stack_top->type = REG_PTR;
423
424 return 0;
425
426 end:
427 return ret;
428 }
429
430 static int dynamic_load_field(struct estack_entry *stack_top)
431 {
432 int ret;
433
434 switch (stack_top->u.ptr.type) {
435 case LOAD_OBJECT:
436 break;
437 case LOAD_ROOT_CONTEXT:
438 case LOAD_ROOT_APP_CONTEXT:
439 case LOAD_ROOT_PAYLOAD:
440 default:
441 dbg_printf("Interpreter warning: cannot load root, missing field name.\n");
442 ret = -EINVAL;
443 goto end;
444 }
445 switch (stack_top->u.ptr.object_type) {
446 case OBJECT_TYPE_S8:
447 dbg_printf("op load field s8\n");
448 stack_top->u.v = *(int8_t *) stack_top->u.ptr.ptr;
449 stack_top->type = REG_S64;
450 break;
451 case OBJECT_TYPE_S16:
452 {
453 int16_t tmp;
454
455 dbg_printf("op load field s16\n");
456 tmp = *(int16_t *) stack_top->u.ptr.ptr;
457 if (stack_top->u.ptr.rev_bo)
458 tmp = bswap_16(tmp);
459 stack_top->u.v = tmp;
460 stack_top->type = REG_S64;
461 break;
462 }
463 case OBJECT_TYPE_S32:
464 {
465 int32_t tmp;
466
467 dbg_printf("op load field s32\n");
468 tmp = *(int32_t *) stack_top->u.ptr.ptr;
469 if (stack_top->u.ptr.rev_bo)
470 tmp = bswap_32(tmp);
471 stack_top->u.v = tmp;
472 stack_top->type = REG_S64;
473 break;
474 }
475 case OBJECT_TYPE_S64:
476 {
477 int64_t tmp;
478
479 dbg_printf("op load field s64\n");
480 tmp = *(int64_t *) stack_top->u.ptr.ptr;
481 if (stack_top->u.ptr.rev_bo)
482 tmp = bswap_64(tmp);
483 stack_top->u.v = tmp;
484 stack_top->type = REG_S64;
485 break;
486 }
487 case OBJECT_TYPE_SIGNED_ENUM:
488 {
489 int64_t tmp;
490
491 dbg_printf("op load field signed enumeration\n");
492 tmp = *(int64_t *) stack_top->u.ptr.ptr;
493 if (stack_top->u.ptr.rev_bo)
494 tmp = bswap_64(tmp);
495 stack_top->u.v = tmp;
496 stack_top->type = REG_S64;
497 break;
498 }
499 case OBJECT_TYPE_U8:
500 dbg_printf("op load field u8\n");
501 stack_top->u.v = *(uint8_t *) stack_top->u.ptr.ptr;
502 stack_top->type = REG_U64;
503 break;
504 case OBJECT_TYPE_U16:
505 {
506 uint16_t tmp;
507
508 dbg_printf("op load field u16\n");
509 tmp = *(uint16_t *) stack_top->u.ptr.ptr;
510 if (stack_top->u.ptr.rev_bo)
511 tmp = bswap_16(tmp);
512 stack_top->u.v = tmp;
513 stack_top->type = REG_U64;
514 break;
515 }
516 case OBJECT_TYPE_U32:
517 {
518 uint32_t tmp;
519
520 dbg_printf("op load field u32\n");
521 tmp = *(uint32_t *) stack_top->u.ptr.ptr;
522 if (stack_top->u.ptr.rev_bo)
523 tmp = bswap_32(tmp);
524 stack_top->u.v = tmp;
525 stack_top->type = REG_U64;
526 break;
527 }
528 case OBJECT_TYPE_U64:
529 {
530 uint64_t tmp;
531
532 dbg_printf("op load field u64\n");
533 tmp = *(uint64_t *) stack_top->u.ptr.ptr;
534 if (stack_top->u.ptr.rev_bo)
535 tmp = bswap_64(tmp);
536 stack_top->u.v = tmp;
537 stack_top->type = REG_U64;
538 break;
539 }
540 case OBJECT_TYPE_UNSIGNED_ENUM:
541 {
542 uint64_t tmp;
543
544 dbg_printf("op load field unsigned enumeration\n");
545 tmp = *(uint64_t *) stack_top->u.ptr.ptr;
546 if (stack_top->u.ptr.rev_bo)
547 tmp = bswap_64(tmp);
548 stack_top->u.v = tmp;
549 stack_top->type = REG_U64;
550 break;
551 }
552 case OBJECT_TYPE_DOUBLE:
553 memcpy(&stack_top->u.d,
554 stack_top->u.ptr.ptr,
555 sizeof(struct literal_double));
556 stack_top->type = REG_DOUBLE;
557 break;
558 case OBJECT_TYPE_STRING:
559 {
560 const char *str;
561
562 dbg_printf("op load field string\n");
563 str = (const char *) stack_top->u.ptr.ptr;
564 stack_top->u.s.str = str;
565 if (unlikely(!stack_top->u.s.str)) {
566 dbg_printf("Interpreter warning: loading a NULL string.\n");
567 ret = -EINVAL;
568 goto end;
569 }
570 stack_top->u.s.seq_len = SIZE_MAX;
571 stack_top->u.s.literal_type =
572 ESTACK_STRING_LITERAL_TYPE_NONE;
573 stack_top->type = REG_STRING;
574 break;
575 }
576 case OBJECT_TYPE_STRING_SEQUENCE:
577 {
578 const char *ptr;
579
580 dbg_printf("op load field string sequence\n");
581 ptr = stack_top->u.ptr.ptr;
582 stack_top->u.s.seq_len = *(unsigned long *) ptr;
583 stack_top->u.s.str = *(const char **) (ptr + sizeof(unsigned long));
584 stack_top->type = REG_STRING;
585 if (unlikely(!stack_top->u.s.str)) {
586 dbg_printf("Interpreter warning: loading a NULL sequence.\n");
587 ret = -EINVAL;
588 goto end;
589 }
590 stack_top->u.s.literal_type =
591 ESTACK_STRING_LITERAL_TYPE_NONE;
592 break;
593 }
594 case OBJECT_TYPE_DYNAMIC:
595 /*
596 * Dynamic types in context are looked up
597 * by context get index.
598 */
599 ret = -EINVAL;
600 goto end;
601 case OBJECT_TYPE_SEQUENCE:
602 case OBJECT_TYPE_ARRAY:
603 case OBJECT_TYPE_STRUCT:
604 case OBJECT_TYPE_VARIANT:
605 ERR("Sequences, arrays, struct and variant cannot be loaded (nested types).");
606 ret = -EINVAL;
607 goto end;
608 }
609 return 0;
610
611 end:
612 return ret;
613 }
614
615 static
616 int lttng_bytecode_interpret_format_output(struct estack_entry *ax,
617 struct lttng_interpreter_output *output)
618 {
619 int ret;
620
621 again:
622 switch (ax->type) {
623 case REG_S64:
624 output->type = LTTNG_INTERPRETER_TYPE_S64;
625 output->u.s = ax->u.v;
626 break;
627 case REG_U64:
628 output->type = LTTNG_INTERPRETER_TYPE_U64;
629 output->u.u = (uint64_t) ax->u.v;
630 break;
631 case REG_DOUBLE:
632 output->type = LTTNG_INTERPRETER_TYPE_DOUBLE;
633 output->u.d = ax->u.d;
634 break;
635 case REG_STRING:
636 output->type = LTTNG_INTERPRETER_TYPE_STRING;
637 output->u.str.str = ax->u.s.str;
638 output->u.str.len = ax->u.s.seq_len;
639 break;
640 case REG_PTR:
641 switch (ax->u.ptr.object_type) {
642 case OBJECT_TYPE_S8:
643 case OBJECT_TYPE_S16:
644 case OBJECT_TYPE_S32:
645 case OBJECT_TYPE_S64:
646 case OBJECT_TYPE_U8:
647 case OBJECT_TYPE_U16:
648 case OBJECT_TYPE_U32:
649 case OBJECT_TYPE_U64:
650 case OBJECT_TYPE_DOUBLE:
651 case OBJECT_TYPE_STRING:
652 case OBJECT_TYPE_STRING_SEQUENCE:
653 ret = dynamic_load_field(ax);
654 if (ret)
655 return ret;
656 /* Retry after loading ptr into stack top. */
657 goto again;
658 case OBJECT_TYPE_SEQUENCE:
659 output->type = LTTNG_INTERPRETER_TYPE_SEQUENCE;
660 output->u.sequence.ptr = *(const char **) (ax->u.ptr.ptr + sizeof(unsigned long));
661 output->u.sequence.nr_elem = *(unsigned long *) ax->u.ptr.ptr;
662 output->u.sequence.nested_type = lttng_ust_get_type_sequence(ax->u.ptr.field->type)->elem_type;
663 break;
664 case OBJECT_TYPE_ARRAY:
665 /* Skip count (unsigned long) */
666 output->type = LTTNG_INTERPRETER_TYPE_SEQUENCE;
667 output->u.sequence.ptr = *(const char **) (ax->u.ptr.ptr + sizeof(unsigned long));
668 output->u.sequence.nr_elem = lttng_ust_get_type_array(ax->u.ptr.field->type)->length;
669 output->u.sequence.nested_type = lttng_ust_get_type_array(ax->u.ptr.field->type)->elem_type;
670 break;
671 case OBJECT_TYPE_SIGNED_ENUM:
672 ret = dynamic_load_field(ax);
673 if (ret)
674 return ret;
675 output->type = LTTNG_INTERPRETER_TYPE_SIGNED_ENUM;
676 output->u.s = ax->u.v;
677 break;
678 case OBJECT_TYPE_UNSIGNED_ENUM:
679 ret = dynamic_load_field(ax);
680 if (ret)
681 return ret;
682 output->type = LTTNG_INTERPRETER_TYPE_UNSIGNED_ENUM;
683 output->u.u = ax->u.v;
684 break;
685 case OBJECT_TYPE_STRUCT:
686 case OBJECT_TYPE_VARIANT:
687 default:
688 return -EINVAL;
689 }
690
691 break;
692 case REG_STAR_GLOB_STRING:
693 case REG_UNKNOWN:
694 default:
695 return -EINVAL;
696 }
697
698 return 0;
699 }
700
701 /*
702 * Return LTTNG_UST_BYTECODE_INTERPRETER_OK on success.
703 * Return LTTNG_UST_BYTECODE_INTERPRETER_ERROR on error.
704 *
705 * For FILTER bytecode: expect a struct lttng_ust_bytecode_filter_ctx *
706 * as @ctx argument.
707 * For CAPTURE bytecode: expect a struct lttng_interpreter_output *
708 * as @ctx argument.
709 */
710 int lttng_bytecode_interpret(struct lttng_ust_bytecode_runtime *ust_bytecode,
711 const char *interpreter_stack_data,
712 void *caller_ctx)
713 {
714 struct bytecode_runtime *bytecode = caa_container_of(ust_bytecode, struct bytecode_runtime, p);
715 struct lttng_ust_ctx *ctx = lttng_ust_rcu_dereference(*ust_bytecode->pctx);
716 void *pc, *next_pc, *start_pc;
717 int ret = -EINVAL, retval = 0;
718 struct estack _stack;
719 struct estack *stack = &_stack;
720 register int64_t ax = 0, bx = 0;
721 register enum entry_type ax_t = REG_UNKNOWN, bx_t = REG_UNKNOWN;
722 register int top = INTERPRETER_STACK_EMPTY;
723 #ifndef INTERPRETER_USE_SWITCH
724 static void *dispatch[NR_BYTECODE_OPS] = {
725 [ BYTECODE_OP_UNKNOWN ] = &&LABEL_BYTECODE_OP_UNKNOWN,
726
727 [ BYTECODE_OP_RETURN ] = &&LABEL_BYTECODE_OP_RETURN,
728
729 /* binary */
730 [ BYTECODE_OP_MUL ] = &&LABEL_BYTECODE_OP_MUL,
731 [ BYTECODE_OP_DIV ] = &&LABEL_BYTECODE_OP_DIV,
732 [ BYTECODE_OP_MOD ] = &&LABEL_BYTECODE_OP_MOD,
733 [ BYTECODE_OP_PLUS ] = &&LABEL_BYTECODE_OP_PLUS,
734 [ BYTECODE_OP_MINUS ] = &&LABEL_BYTECODE_OP_MINUS,
735 [ BYTECODE_OP_BIT_RSHIFT ] = &&LABEL_BYTECODE_OP_BIT_RSHIFT,
736 [ BYTECODE_OP_BIT_LSHIFT ] = &&LABEL_BYTECODE_OP_BIT_LSHIFT,
737 [ BYTECODE_OP_BIT_AND ] = &&LABEL_BYTECODE_OP_BIT_AND,
738 [ BYTECODE_OP_BIT_OR ] = &&LABEL_BYTECODE_OP_BIT_OR,
739 [ BYTECODE_OP_BIT_XOR ] = &&LABEL_BYTECODE_OP_BIT_XOR,
740
741 /* binary comparators */
742 [ BYTECODE_OP_EQ ] = &&LABEL_BYTECODE_OP_EQ,
743 [ BYTECODE_OP_NE ] = &&LABEL_BYTECODE_OP_NE,
744 [ BYTECODE_OP_GT ] = &&LABEL_BYTECODE_OP_GT,
745 [ BYTECODE_OP_LT ] = &&LABEL_BYTECODE_OP_LT,
746 [ BYTECODE_OP_GE ] = &&LABEL_BYTECODE_OP_GE,
747 [ BYTECODE_OP_LE ] = &&LABEL_BYTECODE_OP_LE,
748
749 /* string binary comparator */
750 [ BYTECODE_OP_EQ_STRING ] = &&LABEL_BYTECODE_OP_EQ_STRING,
751 [ BYTECODE_OP_NE_STRING ] = &&LABEL_BYTECODE_OP_NE_STRING,
752 [ BYTECODE_OP_GT_STRING ] = &&LABEL_BYTECODE_OP_GT_STRING,
753 [ BYTECODE_OP_LT_STRING ] = &&LABEL_BYTECODE_OP_LT_STRING,
754 [ BYTECODE_OP_GE_STRING ] = &&LABEL_BYTECODE_OP_GE_STRING,
755 [ BYTECODE_OP_LE_STRING ] = &&LABEL_BYTECODE_OP_LE_STRING,
756
757 /* globbing pattern binary comparator */
758 [ BYTECODE_OP_EQ_STAR_GLOB_STRING ] = &&LABEL_BYTECODE_OP_EQ_STAR_GLOB_STRING,
759 [ BYTECODE_OP_NE_STAR_GLOB_STRING ] = &&LABEL_BYTECODE_OP_NE_STAR_GLOB_STRING,
760
761 /* s64 binary comparator */
762 [ BYTECODE_OP_EQ_S64 ] = &&LABEL_BYTECODE_OP_EQ_S64,
763 [ BYTECODE_OP_NE_S64 ] = &&LABEL_BYTECODE_OP_NE_S64,
764 [ BYTECODE_OP_GT_S64 ] = &&LABEL_BYTECODE_OP_GT_S64,
765 [ BYTECODE_OP_LT_S64 ] = &&LABEL_BYTECODE_OP_LT_S64,
766 [ BYTECODE_OP_GE_S64 ] = &&LABEL_BYTECODE_OP_GE_S64,
767 [ BYTECODE_OP_LE_S64 ] = &&LABEL_BYTECODE_OP_LE_S64,
768
769 /* double binary comparator */
770 [ BYTECODE_OP_EQ_DOUBLE ] = &&LABEL_BYTECODE_OP_EQ_DOUBLE,
771 [ BYTECODE_OP_NE_DOUBLE ] = &&LABEL_BYTECODE_OP_NE_DOUBLE,
772 [ BYTECODE_OP_GT_DOUBLE ] = &&LABEL_BYTECODE_OP_GT_DOUBLE,
773 [ BYTECODE_OP_LT_DOUBLE ] = &&LABEL_BYTECODE_OP_LT_DOUBLE,
774 [ BYTECODE_OP_GE_DOUBLE ] = &&LABEL_BYTECODE_OP_GE_DOUBLE,
775 [ BYTECODE_OP_LE_DOUBLE ] = &&LABEL_BYTECODE_OP_LE_DOUBLE,
776
777 /* Mixed S64-double binary comparators */
778 [ BYTECODE_OP_EQ_DOUBLE_S64 ] = &&LABEL_BYTECODE_OP_EQ_DOUBLE_S64,
779 [ BYTECODE_OP_NE_DOUBLE_S64 ] = &&LABEL_BYTECODE_OP_NE_DOUBLE_S64,
780 [ BYTECODE_OP_GT_DOUBLE_S64 ] = &&LABEL_BYTECODE_OP_GT_DOUBLE_S64,
781 [ BYTECODE_OP_LT_DOUBLE_S64 ] = &&LABEL_BYTECODE_OP_LT_DOUBLE_S64,
782 [ BYTECODE_OP_GE_DOUBLE_S64 ] = &&LABEL_BYTECODE_OP_GE_DOUBLE_S64,
783 [ BYTECODE_OP_LE_DOUBLE_S64 ] = &&LABEL_BYTECODE_OP_LE_DOUBLE_S64,
784
785 [ BYTECODE_OP_EQ_S64_DOUBLE ] = &&LABEL_BYTECODE_OP_EQ_S64_DOUBLE,
786 [ BYTECODE_OP_NE_S64_DOUBLE ] = &&LABEL_BYTECODE_OP_NE_S64_DOUBLE,
787 [ BYTECODE_OP_GT_S64_DOUBLE ] = &&LABEL_BYTECODE_OP_GT_S64_DOUBLE,
788 [ BYTECODE_OP_LT_S64_DOUBLE ] = &&LABEL_BYTECODE_OP_LT_S64_DOUBLE,
789 [ BYTECODE_OP_GE_S64_DOUBLE ] = &&LABEL_BYTECODE_OP_GE_S64_DOUBLE,
790 [ BYTECODE_OP_LE_S64_DOUBLE ] = &&LABEL_BYTECODE_OP_LE_S64_DOUBLE,
791
792 /* unary */
793 [ BYTECODE_OP_UNARY_PLUS ] = &&LABEL_BYTECODE_OP_UNARY_PLUS,
794 [ BYTECODE_OP_UNARY_MINUS ] = &&LABEL_BYTECODE_OP_UNARY_MINUS,
795 [ BYTECODE_OP_UNARY_NOT ] = &&LABEL_BYTECODE_OP_UNARY_NOT,
796 [ BYTECODE_OP_UNARY_PLUS_S64 ] = &&LABEL_BYTECODE_OP_UNARY_PLUS_S64,
797 [ BYTECODE_OP_UNARY_MINUS_S64 ] = &&LABEL_BYTECODE_OP_UNARY_MINUS_S64,
798 [ BYTECODE_OP_UNARY_NOT_S64 ] = &&LABEL_BYTECODE_OP_UNARY_NOT_S64,
799 [ BYTECODE_OP_UNARY_PLUS_DOUBLE ] = &&LABEL_BYTECODE_OP_UNARY_PLUS_DOUBLE,
800 [ BYTECODE_OP_UNARY_MINUS_DOUBLE ] = &&LABEL_BYTECODE_OP_UNARY_MINUS_DOUBLE,
801 [ BYTECODE_OP_UNARY_NOT_DOUBLE ] = &&LABEL_BYTECODE_OP_UNARY_NOT_DOUBLE,
802
803 /* logical */
804 [ BYTECODE_OP_AND ] = &&LABEL_BYTECODE_OP_AND,
805 [ BYTECODE_OP_OR ] = &&LABEL_BYTECODE_OP_OR,
806
807 /* load field ref */
808 [ BYTECODE_OP_LOAD_FIELD_REF ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_REF,
809 [ BYTECODE_OP_LOAD_FIELD_REF_STRING ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_REF_STRING,
810 [ BYTECODE_OP_LOAD_FIELD_REF_SEQUENCE ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_REF_SEQUENCE,
811 [ BYTECODE_OP_LOAD_FIELD_REF_S64 ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_REF_S64,
812 [ BYTECODE_OP_LOAD_FIELD_REF_DOUBLE ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_REF_DOUBLE,
813
814 /* load from immediate operand */
815 [ BYTECODE_OP_LOAD_STRING ] = &&LABEL_BYTECODE_OP_LOAD_STRING,
816 [ BYTECODE_OP_LOAD_STAR_GLOB_STRING ] = &&LABEL_BYTECODE_OP_LOAD_STAR_GLOB_STRING,
817 [ BYTECODE_OP_LOAD_S64 ] = &&LABEL_BYTECODE_OP_LOAD_S64,
818 [ BYTECODE_OP_LOAD_DOUBLE ] = &&LABEL_BYTECODE_OP_LOAD_DOUBLE,
819
820 /* cast */
821 [ BYTECODE_OP_CAST_TO_S64 ] = &&LABEL_BYTECODE_OP_CAST_TO_S64,
822 [ BYTECODE_OP_CAST_DOUBLE_TO_S64 ] = &&LABEL_BYTECODE_OP_CAST_DOUBLE_TO_S64,
823 [ BYTECODE_OP_CAST_NOP ] = &&LABEL_BYTECODE_OP_CAST_NOP,
824
825 /* get context ref */
826 [ BYTECODE_OP_GET_CONTEXT_REF ] = &&LABEL_BYTECODE_OP_GET_CONTEXT_REF,
827 [ BYTECODE_OP_GET_CONTEXT_REF_STRING ] = &&LABEL_BYTECODE_OP_GET_CONTEXT_REF_STRING,
828 [ BYTECODE_OP_GET_CONTEXT_REF_S64 ] = &&LABEL_BYTECODE_OP_GET_CONTEXT_REF_S64,
829 [ BYTECODE_OP_GET_CONTEXT_REF_DOUBLE ] = &&LABEL_BYTECODE_OP_GET_CONTEXT_REF_DOUBLE,
830
831 /* Instructions for recursive traversal through composed types. */
832 [ BYTECODE_OP_GET_CONTEXT_ROOT ] = &&LABEL_BYTECODE_OP_GET_CONTEXT_ROOT,
833 [ BYTECODE_OP_GET_APP_CONTEXT_ROOT ] = &&LABEL_BYTECODE_OP_GET_APP_CONTEXT_ROOT,
834 [ BYTECODE_OP_GET_PAYLOAD_ROOT ] = &&LABEL_BYTECODE_OP_GET_PAYLOAD_ROOT,
835
836 [ BYTECODE_OP_GET_SYMBOL ] = &&LABEL_BYTECODE_OP_GET_SYMBOL,
837 [ BYTECODE_OP_GET_SYMBOL_FIELD ] = &&LABEL_BYTECODE_OP_GET_SYMBOL_FIELD,
838 [ BYTECODE_OP_GET_INDEX_U16 ] = &&LABEL_BYTECODE_OP_GET_INDEX_U16,
839 [ BYTECODE_OP_GET_INDEX_U64 ] = &&LABEL_BYTECODE_OP_GET_INDEX_U64,
840
841 [ BYTECODE_OP_LOAD_FIELD ] = &&LABEL_BYTECODE_OP_LOAD_FIELD,
842 [ BYTECODE_OP_LOAD_FIELD_S8 ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_S8,
843 [ BYTECODE_OP_LOAD_FIELD_S16 ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_S16,
844 [ BYTECODE_OP_LOAD_FIELD_S32 ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_S32,
845 [ BYTECODE_OP_LOAD_FIELD_S64 ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_S64,
846 [ BYTECODE_OP_LOAD_FIELD_U8 ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_U8,
847 [ BYTECODE_OP_LOAD_FIELD_U16 ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_U16,
848 [ BYTECODE_OP_LOAD_FIELD_U32 ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_U32,
849 [ BYTECODE_OP_LOAD_FIELD_U64 ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_U64,
850 [ BYTECODE_OP_LOAD_FIELD_STRING ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_STRING,
851 [ BYTECODE_OP_LOAD_FIELD_SEQUENCE ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_SEQUENCE,
852 [ BYTECODE_OP_LOAD_FIELD_DOUBLE ] = &&LABEL_BYTECODE_OP_LOAD_FIELD_DOUBLE,
853
854 [ BYTECODE_OP_UNARY_BIT_NOT ] = &&LABEL_BYTECODE_OP_UNARY_BIT_NOT,
855
856 [ BYTECODE_OP_RETURN_S64 ] = &&LABEL_BYTECODE_OP_RETURN_S64,
857 };
858 #endif /* #ifndef INTERPRETER_USE_SWITCH */
859
860 START_OP
861
862 OP(BYTECODE_OP_UNKNOWN):
863 OP(BYTECODE_OP_LOAD_FIELD_REF):
864 #ifdef INTERPRETER_USE_SWITCH
865 default:
866 #endif /* INTERPRETER_USE_SWITCH */
867 ERR("unknown bytecode op %u",
868 (unsigned int) *(bytecode_opcode_t *) pc);
869 ret = -EINVAL;
870 goto end;
871
872 OP(BYTECODE_OP_RETURN):
873 /* LTTNG_UST_BYTECODE_INTERPRETER_ERROR or LTTNG_UST_BYTECODE_INTERPRETER_OK */
874 /* Handle dynamic typing. */
875 switch (estack_ax_t) {
876 case REG_S64:
877 case REG_U64:
878 retval = !!estack_ax_v;
879 break;
880 case REG_DOUBLE:
881 case REG_STRING:
882 case REG_PTR:
883 if (ust_bytecode->type != LTTNG_UST_BYTECODE_TYPE_CAPTURE) {
884 ret = -EINVAL;
885 goto end;
886 }
887 retval = 0;
888 break;
889 case REG_STAR_GLOB_STRING:
890 case REG_UNKNOWN:
891 default:
892 ret = -EINVAL;
893 goto end;
894 }
895 ret = 0;
896 goto end;
897
898 OP(BYTECODE_OP_RETURN_S64):
899 /* LTTNG_UST_BYTECODE_INTERPRETER_ERROR or LTTNG_UST_BYTECODE_INTERPRETER_OK */
900 retval = !!estack_ax_v;
901 ret = 0;
902 goto end;
903
904 /* binary */
905 OP(BYTECODE_OP_MUL):
906 OP(BYTECODE_OP_DIV):
907 OP(BYTECODE_OP_MOD):
908 OP(BYTECODE_OP_PLUS):
909 OP(BYTECODE_OP_MINUS):
910 ERR("unsupported bytecode op %u",
911 (unsigned int) *(bytecode_opcode_t *) pc);
912 ret = -EINVAL;
913 goto end;
914
915 OP(BYTECODE_OP_EQ):
916 {
917 /* Dynamic typing. */
918 switch (estack_ax_t) {
919 case REG_S64: /* Fall-through */
920 case REG_U64:
921 switch (estack_bx_t) {
922 case REG_S64: /* Fall-through */
923 case REG_U64:
924 JUMP_TO(BYTECODE_OP_EQ_S64);
925 case REG_DOUBLE:
926 JUMP_TO(BYTECODE_OP_EQ_DOUBLE_S64);
927 case REG_STRING: /* Fall-through */
928 case REG_STAR_GLOB_STRING:
929 ret = -EINVAL;
930 goto end;
931 default:
932 ERR("Unknown interpreter register type (%d)",
933 (int) estack_bx_t);
934 ret = -EINVAL;
935 goto end;
936 }
937 break;
938 case REG_DOUBLE:
939 switch (estack_bx_t) {
940 case REG_S64: /* Fall-through */
941 case REG_U64:
942 JUMP_TO(BYTECODE_OP_EQ_S64_DOUBLE);
943 case REG_DOUBLE:
944 JUMP_TO(BYTECODE_OP_EQ_DOUBLE);
945 case REG_STRING: /* Fall-through */
946 case REG_STAR_GLOB_STRING:
947 ret = -EINVAL;
948 goto end;
949 default:
950 ERR("Unknown interpreter register type (%d)",
951 (int) estack_bx_t);
952 ret = -EINVAL;
953 goto end;
954 }
955 break;
956 case REG_STRING:
957 switch (estack_bx_t) {
958 case REG_S64: /* Fall-through */
959 case REG_U64: /* Fall-through */
960 case REG_DOUBLE:
961 ret = -EINVAL;
962 goto end;
963 case REG_STRING:
964 JUMP_TO(BYTECODE_OP_EQ_STRING);
965 case REG_STAR_GLOB_STRING:
966 JUMP_TO(BYTECODE_OP_EQ_STAR_GLOB_STRING);
967 default:
968 ERR("Unknown interpreter register type (%d)",
969 (int) estack_bx_t);
970 ret = -EINVAL;
971 goto end;
972 }
973 break;
974 case REG_STAR_GLOB_STRING:
975 switch (estack_bx_t) {
976 case REG_S64: /* Fall-through */
977 case REG_U64: /* Fall-through */
978 case REG_DOUBLE:
979 ret = -EINVAL;
980 goto end;
981 case REG_STRING:
982 JUMP_TO(BYTECODE_OP_EQ_STAR_GLOB_STRING);
983 case REG_STAR_GLOB_STRING:
984 ret = -EINVAL;
985 goto end;
986 default:
987 ERR("Unknown interpreter register type (%d)",
988 (int) estack_bx_t);
989 ret = -EINVAL;
990 goto end;
991 }
992 break;
993 default:
994 ERR("Unknown interpreter register type (%d)",
995 (int) estack_ax_t);
996 ret = -EINVAL;
997 goto end;
998 }
999 }
1000 OP(BYTECODE_OP_NE):
1001 {
1002 /* Dynamic typing. */
1003 switch (estack_ax_t) {
1004 case REG_S64: /* Fall-through */
1005 case REG_U64:
1006 switch (estack_bx_t) {
1007 case REG_S64: /* Fall-through */
1008 case REG_U64:
1009 JUMP_TO(BYTECODE_OP_NE_S64);
1010 case REG_DOUBLE:
1011 JUMP_TO(BYTECODE_OP_NE_DOUBLE_S64);
1012 case REG_STRING: /* Fall-through */
1013 case REG_STAR_GLOB_STRING:
1014 ret = -EINVAL;
1015 goto end;
1016 default:
1017 ERR("Unknown interpreter register type (%d)",
1018 (int) estack_bx_t);
1019 ret = -EINVAL;
1020 goto end;
1021 }
1022 break;
1023 case REG_DOUBLE:
1024 switch (estack_bx_t) {
1025 case REG_S64: /* Fall-through */
1026 case REG_U64:
1027 JUMP_TO(BYTECODE_OP_NE_S64_DOUBLE);
1028 case REG_DOUBLE:
1029 JUMP_TO(BYTECODE_OP_NE_DOUBLE);
1030 case REG_STRING: /* Fall-through */
1031 case REG_STAR_GLOB_STRING:
1032 ret = -EINVAL;
1033 goto end;
1034 default:
1035 ERR("Unknown interpreter register type (%d)",
1036 (int) estack_bx_t);
1037 ret = -EINVAL;
1038 goto end;
1039 }
1040 break;
1041 case REG_STRING:
1042 switch (estack_bx_t) {
1043 case REG_S64: /* Fall-through */
1044 case REG_U64:
1045 case REG_DOUBLE:
1046 ret = -EINVAL;
1047 goto end;
1048 case REG_STRING:
1049 JUMP_TO(BYTECODE_OP_NE_STRING);
1050 case REG_STAR_GLOB_STRING:
1051 JUMP_TO(BYTECODE_OP_NE_STAR_GLOB_STRING);
1052 default:
1053 ERR("Unknown interpreter register type (%d)",
1054 (int) estack_bx_t);
1055 ret = -EINVAL;
1056 goto end;
1057 }
1058 break;
1059 case REG_STAR_GLOB_STRING:
1060 switch (estack_bx_t) {
1061 case REG_S64: /* Fall-through */
1062 case REG_U64:
1063 case REG_DOUBLE:
1064 ret = -EINVAL;
1065 goto end;
1066 case REG_STRING:
1067 JUMP_TO(BYTECODE_OP_NE_STAR_GLOB_STRING);
1068 case REG_STAR_GLOB_STRING:
1069 ret = -EINVAL;
1070 goto end;
1071 default:
1072 ERR("Unknown interpreter register type (%d)",
1073 (int) estack_bx_t);
1074 ret = -EINVAL;
1075 goto end;
1076 }
1077 break;
1078 default:
1079 ERR("Unknown interpreter register type (%d)",
1080 (int) estack_ax_t);
1081 ret = -EINVAL;
1082 goto end;
1083 }
1084 }
1085 OP(BYTECODE_OP_GT):
1086 {
1087 /* Dynamic typing. */
1088 switch (estack_ax_t) {
1089 case REG_S64: /* Fall-through */
1090 case REG_U64:
1091 switch (estack_bx_t) {
1092 case REG_S64: /* Fall-through */
1093 case REG_U64:
1094 JUMP_TO(BYTECODE_OP_GT_S64);
1095 case REG_DOUBLE:
1096 JUMP_TO(BYTECODE_OP_GT_DOUBLE_S64);
1097 case REG_STRING: /* Fall-through */
1098 case REG_STAR_GLOB_STRING:
1099 ret = -EINVAL;
1100 goto end;
1101 default:
1102 ERR("Unknown interpreter register type (%d)",
1103 (int) estack_bx_t);
1104 ret = -EINVAL;
1105 goto end;
1106 }
1107 break;
1108 case REG_DOUBLE:
1109 switch (estack_bx_t) {
1110 case REG_S64: /* Fall-through */
1111 case REG_U64:
1112 JUMP_TO(BYTECODE_OP_GT_S64_DOUBLE);
1113 case REG_DOUBLE:
1114 JUMP_TO(BYTECODE_OP_GT_DOUBLE);
1115 case REG_STRING: /* Fall-through */
1116 case REG_STAR_GLOB_STRING:
1117 ret = -EINVAL;
1118 goto end;
1119 default:
1120 ERR("Unknown interpreter register type (%d)",
1121 (int) estack_bx_t);
1122 ret = -EINVAL;
1123 goto end;
1124 }
1125 break;
1126 case REG_STRING:
1127 switch (estack_bx_t) {
1128 case REG_S64: /* Fall-through */
1129 case REG_U64: /* Fall-through */
1130 case REG_DOUBLE: /* Fall-through */
1131 case REG_STAR_GLOB_STRING:
1132 ret = -EINVAL;
1133 goto end;
1134 case REG_STRING:
1135 JUMP_TO(BYTECODE_OP_GT_STRING);
1136 default:
1137 ERR("Unknown interpreter register type (%d)",
1138 (int) estack_bx_t);
1139 ret = -EINVAL;
1140 goto end;
1141 }
1142 break;
1143 default:
1144 ERR("Unknown interpreter register type (%d)",
1145 (int) estack_ax_t);
1146 ret = -EINVAL;
1147 goto end;
1148 }
1149 }
1150 OP(BYTECODE_OP_LT):
1151 {
1152 /* Dynamic typing. */
1153 switch (estack_ax_t) {
1154 case REG_S64: /* Fall-through */
1155 case REG_U64:
1156 switch (estack_bx_t) {
1157 case REG_S64: /* Fall-through */
1158 case REG_U64:
1159 JUMP_TO(BYTECODE_OP_LT_S64);
1160 case REG_DOUBLE:
1161 JUMP_TO(BYTECODE_OP_LT_DOUBLE_S64);
1162 case REG_STRING: /* Fall-through */
1163 case REG_STAR_GLOB_STRING:
1164 ret = -EINVAL;
1165 goto end;
1166 default:
1167 ERR("Unknown interpreter register type (%d)",
1168 (int) estack_bx_t);
1169 ret = -EINVAL;
1170 goto end;
1171 }
1172 break;
1173 case REG_DOUBLE:
1174 switch (estack_bx_t) {
1175 case REG_S64: /* Fall-through */
1176 case REG_U64:
1177 JUMP_TO(BYTECODE_OP_LT_S64_DOUBLE);
1178 case REG_DOUBLE:
1179 JUMP_TO(BYTECODE_OP_LT_DOUBLE);
1180 case REG_STRING: /* Fall-through */
1181 case REG_STAR_GLOB_STRING:
1182 ret = -EINVAL;
1183 goto end;
1184 default:
1185 ERR("Unknown interpreter register type (%d)",
1186 (int) estack_bx_t);
1187 ret = -EINVAL;
1188 goto end;
1189 }
1190 break;
1191 case REG_STRING:
1192 switch (estack_bx_t) {
1193 case REG_S64: /* Fall-through */
1194 case REG_U64: /* Fall-through */
1195 case REG_DOUBLE: /* Fall-through */
1196 case REG_STAR_GLOB_STRING:
1197 ret = -EINVAL;
1198 goto end;
1199 case REG_STRING:
1200 JUMP_TO(BYTECODE_OP_LT_STRING);
1201 default:
1202 ERR("Unknown interpreter register type (%d)",
1203 (int) estack_bx_t);
1204 ret = -EINVAL;
1205 goto end;
1206 }
1207 break;
1208 default:
1209 ERR("Unknown interpreter register type (%d)",
1210 (int) estack_ax_t);
1211 ret = -EINVAL;
1212 goto end;
1213 }
1214 }
1215 OP(BYTECODE_OP_GE):
1216 {
1217 /* Dynamic typing. */
1218 switch (estack_ax_t) {
1219 case REG_S64: /* Fall-through */
1220 case REG_U64:
1221 switch (estack_bx_t) {
1222 case REG_S64: /* Fall-through */
1223 case REG_U64:
1224 JUMP_TO(BYTECODE_OP_GE_S64);
1225 case REG_DOUBLE:
1226 JUMP_TO(BYTECODE_OP_GE_DOUBLE_S64);
1227 case REG_STRING: /* Fall-through */
1228 case REG_STAR_GLOB_STRING:
1229 ret = -EINVAL;
1230 goto end;
1231 default:
1232 ERR("Unknown interpreter register type (%d)",
1233 (int) estack_bx_t);
1234 ret = -EINVAL;
1235 goto end;
1236 }
1237 break;
1238 case REG_DOUBLE:
1239 switch (estack_bx_t) {
1240 case REG_S64: /* Fall-through */
1241 case REG_U64:
1242 JUMP_TO(BYTECODE_OP_GE_S64_DOUBLE);
1243 case REG_DOUBLE:
1244 JUMP_TO(BYTECODE_OP_GE_DOUBLE);
1245 case REG_STRING: /* Fall-through */
1246 case REG_STAR_GLOB_STRING:
1247 ret = -EINVAL;
1248 goto end;
1249 default:
1250 ERR("Unknown interpreter register type (%d)",
1251 (int) estack_bx_t);
1252 ret = -EINVAL;
1253 goto end;
1254 }
1255 break;
1256 case REG_STRING:
1257 switch (estack_bx_t) {
1258 case REG_S64: /* Fall-through */
1259 case REG_U64: /* Fall-through */
1260 case REG_DOUBLE: /* Fall-through */
1261 case REG_STAR_GLOB_STRING:
1262 ret = -EINVAL;
1263 goto end;
1264 case REG_STRING:
1265 JUMP_TO(BYTECODE_OP_GE_STRING);
1266 default:
1267 ERR("Unknown interpreter register type (%d)",
1268 (int) estack_bx_t);
1269 ret = -EINVAL;
1270 goto end;
1271 }
1272 break;
1273 default:
1274 ERR("Unknown interpreter register type (%d)",
1275 (int) estack_ax_t);
1276 ret = -EINVAL;
1277 goto end;
1278 }
1279 }
1280 OP(BYTECODE_OP_LE):
1281 {
1282 /* Dynamic typing. */
1283 switch (estack_ax_t) {
1284 case REG_S64: /* Fall-through */
1285 case REG_U64:
1286 switch (estack_bx_t) {
1287 case REG_S64: /* Fall-through */
1288 case REG_U64:
1289 JUMP_TO(BYTECODE_OP_LE_S64);
1290 case REG_DOUBLE:
1291 JUMP_TO(BYTECODE_OP_LE_DOUBLE_S64);
1292 case REG_STRING: /* Fall-through */
1293 case REG_STAR_GLOB_STRING:
1294 ret = -EINVAL;
1295 goto end;
1296 default:
1297 ERR("Unknown interpreter register type (%d)",
1298 (int) estack_bx_t);
1299 ret = -EINVAL;
1300 goto end;
1301 }
1302 break;
1303 case REG_DOUBLE:
1304 switch (estack_bx_t) {
1305 case REG_S64: /* Fall-through */
1306 case REG_U64:
1307 JUMP_TO(BYTECODE_OP_LE_S64_DOUBLE);
1308 case REG_DOUBLE:
1309 JUMP_TO(BYTECODE_OP_LE_DOUBLE);
1310 case REG_STRING: /* Fall-through */
1311 case REG_STAR_GLOB_STRING:
1312 ret = -EINVAL;
1313 goto end;
1314 default:
1315 ERR("Unknown interpreter register type (%d)",
1316 (int) estack_bx_t);
1317 ret = -EINVAL;
1318 goto end;
1319 }
1320 break;
1321 case REG_STRING:
1322 switch (estack_bx_t) {
1323 case REG_S64: /* Fall-through */
1324 case REG_U64: /* Fall-through */
1325 case REG_DOUBLE: /* Fall-through */
1326 case REG_STAR_GLOB_STRING:
1327 ret = -EINVAL;
1328 goto end;
1329 case REG_STRING:
1330 JUMP_TO(BYTECODE_OP_LE_STRING);
1331 default:
1332 ERR("Unknown interpreter register type (%d)",
1333 (int) estack_bx_t);
1334 ret = -EINVAL;
1335 goto end;
1336 }
1337 break;
1338 default:
1339 ERR("Unknown interpreter register type (%d)",
1340 (int) estack_ax_t);
1341 ret = -EINVAL;
1342 goto end;
1343 }
1344 }
1345
1346 OP(BYTECODE_OP_EQ_STRING):
1347 {
1348 int res;
1349
1350 res = (stack_strcmp(stack, top, "==") == 0);
1351 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1352 estack_ax_v = res;
1353 estack_ax_t = REG_S64;
1354 next_pc += sizeof(struct binary_op);
1355 PO;
1356 }
1357 OP(BYTECODE_OP_NE_STRING):
1358 {
1359 int res;
1360
1361 res = (stack_strcmp(stack, top, "!=") != 0);
1362 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1363 estack_ax_v = res;
1364 estack_ax_t = REG_S64;
1365 next_pc += sizeof(struct binary_op);
1366 PO;
1367 }
1368 OP(BYTECODE_OP_GT_STRING):
1369 {
1370 int res;
1371
1372 res = (stack_strcmp(stack, top, ">") > 0);
1373 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1374 estack_ax_v = res;
1375 estack_ax_t = REG_S64;
1376 next_pc += sizeof(struct binary_op);
1377 PO;
1378 }
1379 OP(BYTECODE_OP_LT_STRING):
1380 {
1381 int res;
1382
1383 res = (stack_strcmp(stack, top, "<") < 0);
1384 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1385 estack_ax_v = res;
1386 estack_ax_t = REG_S64;
1387 next_pc += sizeof(struct binary_op);
1388 PO;
1389 }
1390 OP(BYTECODE_OP_GE_STRING):
1391 {
1392 int res;
1393
1394 res = (stack_strcmp(stack, top, ">=") >= 0);
1395 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1396 estack_ax_v = res;
1397 estack_ax_t = REG_S64;
1398 next_pc += sizeof(struct binary_op);
1399 PO;
1400 }
1401 OP(BYTECODE_OP_LE_STRING):
1402 {
1403 int res;
1404
1405 res = (stack_strcmp(stack, top, "<=") <= 0);
1406 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1407 estack_ax_v = res;
1408 estack_ax_t = REG_S64;
1409 next_pc += sizeof(struct binary_op);
1410 PO;
1411 }
1412
1413 OP(BYTECODE_OP_EQ_STAR_GLOB_STRING):
1414 {
1415 int res;
1416
1417 res = (stack_star_glob_match(stack, top, "==") == 0);
1418 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1419 estack_ax_v = res;
1420 estack_ax_t = REG_S64;
1421 next_pc += sizeof(struct binary_op);
1422 PO;
1423 }
1424 OP(BYTECODE_OP_NE_STAR_GLOB_STRING):
1425 {
1426 int res;
1427
1428 res = (stack_star_glob_match(stack, top, "!=") != 0);
1429 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1430 estack_ax_v = res;
1431 estack_ax_t = REG_S64;
1432 next_pc += sizeof(struct binary_op);
1433 PO;
1434 }
1435
1436 OP(BYTECODE_OP_EQ_S64):
1437 {
1438 int res;
1439
1440 res = (estack_bx_v == estack_ax_v);
1441 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1442 estack_ax_v = res;
1443 estack_ax_t = REG_S64;
1444 next_pc += sizeof(struct binary_op);
1445 PO;
1446 }
1447 OP(BYTECODE_OP_NE_S64):
1448 {
1449 int res;
1450
1451 res = (estack_bx_v != estack_ax_v);
1452 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1453 estack_ax_v = res;
1454 estack_ax_t = REG_S64;
1455 next_pc += sizeof(struct binary_op);
1456 PO;
1457 }
1458 OP(BYTECODE_OP_GT_S64):
1459 {
1460 int res;
1461
1462 res = (estack_bx_v > estack_ax_v);
1463 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1464 estack_ax_v = res;
1465 estack_ax_t = REG_S64;
1466 next_pc += sizeof(struct binary_op);
1467 PO;
1468 }
1469 OP(BYTECODE_OP_LT_S64):
1470 {
1471 int res;
1472
1473 res = (estack_bx_v < estack_ax_v);
1474 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1475 estack_ax_v = res;
1476 estack_ax_t = REG_S64;
1477 next_pc += sizeof(struct binary_op);
1478 PO;
1479 }
1480 OP(BYTECODE_OP_GE_S64):
1481 {
1482 int res;
1483
1484 res = (estack_bx_v >= estack_ax_v);
1485 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1486 estack_ax_v = res;
1487 estack_ax_t = REG_S64;
1488 next_pc += sizeof(struct binary_op);
1489 PO;
1490 }
1491 OP(BYTECODE_OP_LE_S64):
1492 {
1493 int res;
1494
1495 res = (estack_bx_v <= estack_ax_v);
1496 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1497 estack_ax_v = res;
1498 estack_ax_t = REG_S64;
1499 next_pc += sizeof(struct binary_op);
1500 PO;
1501 }
1502
1503 OP(BYTECODE_OP_EQ_DOUBLE):
1504 {
1505 int res;
1506
1507 res = (estack_bx(stack, top)->u.d == estack_ax(stack, top)->u.d);
1508 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1509 estack_ax_v = res;
1510 estack_ax_t = REG_S64;
1511 next_pc += sizeof(struct binary_op);
1512 PO;
1513 }
1514 OP(BYTECODE_OP_NE_DOUBLE):
1515 {
1516 int res;
1517
1518 res = (estack_bx(stack, top)->u.d != estack_ax(stack, top)->u.d);
1519 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1520 estack_ax_v = res;
1521 estack_ax_t = REG_S64;
1522 next_pc += sizeof(struct binary_op);
1523 PO;
1524 }
1525 OP(BYTECODE_OP_GT_DOUBLE):
1526 {
1527 int res;
1528
1529 res = (estack_bx(stack, top)->u.d > estack_ax(stack, top)->u.d);
1530 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1531 estack_ax_v = res;
1532 estack_ax_t = REG_S64;
1533 next_pc += sizeof(struct binary_op);
1534 PO;
1535 }
1536 OP(BYTECODE_OP_LT_DOUBLE):
1537 {
1538 int res;
1539
1540 res = (estack_bx(stack, top)->u.d < estack_ax(stack, top)->u.d);
1541 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1542 estack_ax_v = res;
1543 estack_ax_t = REG_S64;
1544 next_pc += sizeof(struct binary_op);
1545 PO;
1546 }
1547 OP(BYTECODE_OP_GE_DOUBLE):
1548 {
1549 int res;
1550
1551 res = (estack_bx(stack, top)->u.d >= estack_ax(stack, top)->u.d);
1552 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1553 estack_ax_v = res;
1554 estack_ax_t = REG_S64;
1555 next_pc += sizeof(struct binary_op);
1556 PO;
1557 }
1558 OP(BYTECODE_OP_LE_DOUBLE):
1559 {
1560 int res;
1561
1562 res = (estack_bx(stack, top)->u.d <= estack_ax(stack, top)->u.d);
1563 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1564 estack_ax_v = res;
1565 estack_ax_t = REG_S64;
1566 next_pc += sizeof(struct binary_op);
1567 PO;
1568 }
1569
1570 /* Mixed S64-double binary comparators */
1571 OP(BYTECODE_OP_EQ_DOUBLE_S64):
1572 {
1573 int res;
1574
1575 res = (estack_bx(stack, top)->u.d == estack_ax_v);
1576 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1577 estack_ax_v = res;
1578 estack_ax_t = REG_S64;
1579 next_pc += sizeof(struct binary_op);
1580 PO;
1581 }
1582 OP(BYTECODE_OP_NE_DOUBLE_S64):
1583 {
1584 int res;
1585
1586 res = (estack_bx(stack, top)->u.d != estack_ax_v);
1587 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1588 estack_ax_v = res;
1589 estack_ax_t = REG_S64;
1590 next_pc += sizeof(struct binary_op);
1591 PO;
1592 }
1593 OP(BYTECODE_OP_GT_DOUBLE_S64):
1594 {
1595 int res;
1596
1597 res = (estack_bx(stack, top)->u.d > estack_ax_v);
1598 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1599 estack_ax_v = res;
1600 estack_ax_t = REG_S64;
1601 next_pc += sizeof(struct binary_op);
1602 PO;
1603 }
1604 OP(BYTECODE_OP_LT_DOUBLE_S64):
1605 {
1606 int res;
1607
1608 res = (estack_bx(stack, top)->u.d < estack_ax_v);
1609 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1610 estack_ax_v = res;
1611 estack_ax_t = REG_S64;
1612 next_pc += sizeof(struct binary_op);
1613 PO;
1614 }
1615 OP(BYTECODE_OP_GE_DOUBLE_S64):
1616 {
1617 int res;
1618
1619 res = (estack_bx(stack, top)->u.d >= estack_ax_v);
1620 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1621 estack_ax_v = res;
1622 estack_ax_t = REG_S64;
1623 next_pc += sizeof(struct binary_op);
1624 PO;
1625 }
1626 OP(BYTECODE_OP_LE_DOUBLE_S64):
1627 {
1628 int res;
1629
1630 res = (estack_bx(stack, top)->u.d <= estack_ax_v);
1631 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1632 estack_ax_v = res;
1633 estack_ax_t = REG_S64;
1634 next_pc += sizeof(struct binary_op);
1635 PO;
1636 }
1637
1638 OP(BYTECODE_OP_EQ_S64_DOUBLE):
1639 {
1640 int res;
1641
1642 res = (estack_bx_v == estack_ax(stack, top)->u.d);
1643 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1644 estack_ax_v = res;
1645 estack_ax_t = REG_S64;
1646 next_pc += sizeof(struct binary_op);
1647 PO;
1648 }
1649 OP(BYTECODE_OP_NE_S64_DOUBLE):
1650 {
1651 int res;
1652
1653 res = (estack_bx_v != estack_ax(stack, top)->u.d);
1654 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1655 estack_ax_v = res;
1656 estack_ax_t = REG_S64;
1657 next_pc += sizeof(struct binary_op);
1658 PO;
1659 }
1660 OP(BYTECODE_OP_GT_S64_DOUBLE):
1661 {
1662 int res;
1663
1664 res = (estack_bx_v > estack_ax(stack, top)->u.d);
1665 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1666 estack_ax_v = res;
1667 estack_ax_t = REG_S64;
1668 next_pc += sizeof(struct binary_op);
1669 PO;
1670 }
1671 OP(BYTECODE_OP_LT_S64_DOUBLE):
1672 {
1673 int res;
1674
1675 res = (estack_bx_v < estack_ax(stack, top)->u.d);
1676 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1677 estack_ax_v = res;
1678 estack_ax_t = REG_S64;
1679 next_pc += sizeof(struct binary_op);
1680 PO;
1681 }
1682 OP(BYTECODE_OP_GE_S64_DOUBLE):
1683 {
1684 int res;
1685
1686 res = (estack_bx_v >= estack_ax(stack, top)->u.d);
1687 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1688 estack_ax_v = res;
1689 estack_ax_t = REG_S64;
1690 next_pc += sizeof(struct binary_op);
1691 PO;
1692 }
1693 OP(BYTECODE_OP_LE_S64_DOUBLE):
1694 {
1695 int res;
1696
1697 res = (estack_bx_v <= estack_ax(stack, top)->u.d);
1698 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1699 estack_ax_v = res;
1700 estack_ax_t = REG_S64;
1701 next_pc += sizeof(struct binary_op);
1702 PO;
1703 }
1704 OP(BYTECODE_OP_BIT_RSHIFT):
1705 {
1706 int64_t res;
1707
1708 if (!IS_INTEGER_REGISTER(estack_ax_t) || !IS_INTEGER_REGISTER(estack_bx_t)) {
1709 ret = -EINVAL;
1710 goto end;
1711 }
1712
1713 /* Catch undefined behavior. */
1714 if (caa_unlikely(estack_ax_v < 0 || estack_ax_v >= 64)) {
1715 ret = -EINVAL;
1716 goto end;
1717 }
1718 res = ((uint64_t) estack_bx_v >> (uint32_t) estack_ax_v);
1719 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1720 estack_ax_v = res;
1721 estack_ax_t = REG_U64;
1722 next_pc += sizeof(struct binary_op);
1723 PO;
1724 }
1725 OP(BYTECODE_OP_BIT_LSHIFT):
1726 {
1727 int64_t res;
1728
1729 if (!IS_INTEGER_REGISTER(estack_ax_t) || !IS_INTEGER_REGISTER(estack_bx_t)) {
1730 ret = -EINVAL;
1731 goto end;
1732 }
1733
1734 /* Catch undefined behavior. */
1735 if (caa_unlikely(estack_ax_v < 0 || estack_ax_v >= 64)) {
1736 ret = -EINVAL;
1737 goto end;
1738 }
1739 res = ((uint64_t) estack_bx_v << (uint32_t) estack_ax_v);
1740 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1741 estack_ax_v = res;
1742 estack_ax_t = REG_U64;
1743 next_pc += sizeof(struct binary_op);
1744 PO;
1745 }
1746 OP(BYTECODE_OP_BIT_AND):
1747 {
1748 int64_t res;
1749
1750 if (!IS_INTEGER_REGISTER(estack_ax_t) || !IS_INTEGER_REGISTER(estack_bx_t)) {
1751 ret = -EINVAL;
1752 goto end;
1753 }
1754
1755 res = ((uint64_t) estack_bx_v & (uint64_t) estack_ax_v);
1756 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1757 estack_ax_v = res;
1758 estack_ax_t = REG_U64;
1759 next_pc += sizeof(struct binary_op);
1760 PO;
1761 }
1762 OP(BYTECODE_OP_BIT_OR):
1763 {
1764 int64_t res;
1765
1766 if (!IS_INTEGER_REGISTER(estack_ax_t) || !IS_INTEGER_REGISTER(estack_bx_t)) {
1767 ret = -EINVAL;
1768 goto end;
1769 }
1770
1771 res = ((uint64_t) estack_bx_v | (uint64_t) estack_ax_v);
1772 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1773 estack_ax_v = res;
1774 estack_ax_t = REG_U64;
1775 next_pc += sizeof(struct binary_op);
1776 PO;
1777 }
1778 OP(BYTECODE_OP_BIT_XOR):
1779 {
1780 int64_t res;
1781
1782 if (!IS_INTEGER_REGISTER(estack_ax_t) || !IS_INTEGER_REGISTER(estack_bx_t)) {
1783 ret = -EINVAL;
1784 goto end;
1785 }
1786
1787 res = ((uint64_t) estack_bx_v ^ (uint64_t) estack_ax_v);
1788 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1789 estack_ax_v = res;
1790 estack_ax_t = REG_U64;
1791 next_pc += sizeof(struct binary_op);
1792 PO;
1793 }
1794
1795 /* unary */
1796 OP(BYTECODE_OP_UNARY_PLUS):
1797 {
1798 /* Dynamic typing. */
1799 switch (estack_ax_t) {
1800 case REG_S64: /* Fall-through. */
1801 case REG_U64:
1802 JUMP_TO(BYTECODE_OP_UNARY_PLUS_S64);
1803 case REG_DOUBLE:
1804 JUMP_TO(BYTECODE_OP_UNARY_PLUS_DOUBLE);
1805 case REG_STRING: /* Fall-through */
1806 case REG_STAR_GLOB_STRING:
1807 ret = -EINVAL;
1808 goto end;
1809 default:
1810 ERR("Unknown interpreter register type (%d)",
1811 (int) estack_ax_t);
1812 ret = -EINVAL;
1813 goto end;
1814 }
1815 }
1816 OP(BYTECODE_OP_UNARY_MINUS):
1817 {
1818 /* Dynamic typing. */
1819 switch (estack_ax_t) {
1820 case REG_S64: /* Fall-through. */
1821 case REG_U64:
1822 JUMP_TO(BYTECODE_OP_UNARY_MINUS_S64);
1823 case REG_DOUBLE:
1824 JUMP_TO(BYTECODE_OP_UNARY_MINUS_DOUBLE);
1825 case REG_STRING: /* Fall-through */
1826 case REG_STAR_GLOB_STRING:
1827 ret = -EINVAL;
1828 goto end;
1829 default:
1830 ERR("Unknown interpreter register type (%d)",
1831 (int) estack_ax_t);
1832 ret = -EINVAL;
1833 goto end;
1834 }
1835 }
1836 OP(BYTECODE_OP_UNARY_NOT):
1837 {
1838 /* Dynamic typing. */
1839 switch (estack_ax_t) {
1840 case REG_S64: /* Fall-through. */
1841 case REG_U64:
1842 JUMP_TO(BYTECODE_OP_UNARY_NOT_S64);
1843 case REG_DOUBLE:
1844 JUMP_TO(BYTECODE_OP_UNARY_NOT_DOUBLE);
1845 case REG_STRING: /* Fall-through */
1846 case REG_STAR_GLOB_STRING:
1847 ret = -EINVAL;
1848 goto end;
1849 default:
1850 ERR("Unknown interpreter register type (%d)",
1851 (int) estack_ax_t);
1852 ret = -EINVAL;
1853 goto end;
1854 }
1855 next_pc += sizeof(struct unary_op);
1856 PO;
1857 }
1858
1859 OP(BYTECODE_OP_UNARY_BIT_NOT):
1860 {
1861 /* Dynamic typing. */
1862 if (!IS_INTEGER_REGISTER(estack_ax_t)) {
1863 ret = -EINVAL;
1864 goto end;
1865 }
1866
1867 estack_ax_v = ~(uint64_t) estack_ax_v;
1868 estack_ax_t = REG_U64;
1869 next_pc += sizeof(struct unary_op);
1870 PO;
1871 }
1872
1873 OP(BYTECODE_OP_UNARY_PLUS_S64):
1874 OP(BYTECODE_OP_UNARY_PLUS_DOUBLE):
1875 {
1876 next_pc += sizeof(struct unary_op);
1877 PO;
1878 }
1879 OP(BYTECODE_OP_UNARY_MINUS_S64):
1880 {
1881 estack_ax_v = -estack_ax_v;
1882 next_pc += sizeof(struct unary_op);
1883 PO;
1884 }
1885 OP(BYTECODE_OP_UNARY_MINUS_DOUBLE):
1886 {
1887 estack_ax(stack, top)->u.d = -estack_ax(stack, top)->u.d;
1888 next_pc += sizeof(struct unary_op);
1889 PO;
1890 }
1891 OP(BYTECODE_OP_UNARY_NOT_S64):
1892 {
1893 estack_ax_v = !estack_ax_v;
1894 estack_ax_t = REG_S64;
1895 next_pc += sizeof(struct unary_op);
1896 PO;
1897 }
1898 OP(BYTECODE_OP_UNARY_NOT_DOUBLE):
1899 {
1900 estack_ax_v = !estack_ax(stack, top)->u.d;
1901 estack_ax_t = REG_S64;
1902 next_pc += sizeof(struct unary_op);
1903 PO;
1904 }
1905
1906 /* logical */
1907 OP(BYTECODE_OP_AND):
1908 {
1909 struct logical_op *insn = (struct logical_op *) pc;
1910
1911 if (estack_ax_t != REG_S64 && estack_ax_t != REG_U64) {
1912 ret = -EINVAL;
1913 goto end;
1914 }
1915 /* If AX is 0, skip and evaluate to 0 */
1916 if (unlikely(estack_ax_v == 0)) {
1917 dbg_printf("Jumping to bytecode offset %u\n",
1918 (unsigned int) insn->skip_offset);
1919 next_pc = start_pc + insn->skip_offset;
1920 } else {
1921 /* Pop 1 when jump not taken */
1922 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1923 next_pc += sizeof(struct logical_op);
1924 }
1925 PO;
1926 }
1927 OP(BYTECODE_OP_OR):
1928 {
1929 struct logical_op *insn = (struct logical_op *) pc;
1930
1931 if (estack_ax_t != REG_S64 && estack_ax_t != REG_U64) {
1932 ret = -EINVAL;
1933 goto end;
1934 }
1935 /* If AX is nonzero, skip and evaluate to 1 */
1936 if (unlikely(estack_ax_v != 0)) {
1937 estack_ax_v = 1;
1938 dbg_printf("Jumping to bytecode offset %u\n",
1939 (unsigned int) insn->skip_offset);
1940 next_pc = start_pc + insn->skip_offset;
1941 } else {
1942 /* Pop 1 when jump not taken */
1943 estack_pop(stack, top, ax, bx, ax_t, bx_t);
1944 next_pc += sizeof(struct logical_op);
1945 }
1946 PO;
1947 }
1948
1949
1950 /* load field ref */
1951 OP(BYTECODE_OP_LOAD_FIELD_REF_STRING):
1952 {
1953 struct load_op *insn = (struct load_op *) pc;
1954 struct field_ref *ref = (struct field_ref *) insn->data;
1955
1956 dbg_printf("load field ref offset %u type string\n",
1957 ref->offset);
1958 estack_push(stack, top, ax, bx, ax_t, bx_t);
1959 estack_ax(stack, top)->u.s.str =
1960 *(const char * const *) &interpreter_stack_data[ref->offset];
1961 if (unlikely(!estack_ax(stack, top)->u.s.str)) {
1962 dbg_printf("Interpreter warning: loading a NULL string.\n");
1963 ret = -EINVAL;
1964 goto end;
1965 }
1966 estack_ax(stack, top)->u.s.seq_len = SIZE_MAX;
1967 estack_ax(stack, top)->u.s.literal_type =
1968 ESTACK_STRING_LITERAL_TYPE_NONE;
1969 estack_ax_t = REG_STRING;
1970 dbg_printf("ref load string %s\n", estack_ax(stack, top)->u.s.str);
1971 next_pc += sizeof(struct load_op) + sizeof(struct field_ref);
1972 PO;
1973 }
1974
1975 OP(BYTECODE_OP_LOAD_FIELD_REF_SEQUENCE):
1976 {
1977 struct load_op *insn = (struct load_op *) pc;
1978 struct field_ref *ref = (struct field_ref *) insn->data;
1979
1980 dbg_printf("load field ref offset %u type sequence\n",
1981 ref->offset);
1982 estack_push(stack, top, ax, bx, ax_t, bx_t);
1983 estack_ax(stack, top)->u.s.seq_len =
1984 *(unsigned long *) &interpreter_stack_data[ref->offset];
1985 estack_ax(stack, top)->u.s.str =
1986 *(const char **) (&interpreter_stack_data[ref->offset
1987 + sizeof(unsigned long)]);
1988 estack_ax_t = REG_STRING;
1989 if (unlikely(!estack_ax(stack, top)->u.s.str)) {
1990 dbg_printf("Interpreter warning: loading a NULL sequence.\n");
1991 ret = -EINVAL;
1992 goto end;
1993 }
1994 estack_ax(stack, top)->u.s.literal_type =
1995 ESTACK_STRING_LITERAL_TYPE_NONE;
1996 next_pc += sizeof(struct load_op) + sizeof(struct field_ref);
1997 PO;
1998 }
1999
2000 OP(BYTECODE_OP_LOAD_FIELD_REF_S64):
2001 {
2002 struct load_op *insn = (struct load_op *) pc;
2003 struct field_ref *ref = (struct field_ref *) insn->data;
2004
2005 dbg_printf("load field ref offset %u type s64\n",
2006 ref->offset);
2007 estack_push(stack, top, ax, bx, ax_t, bx_t);
2008 estack_ax_v =
2009 ((struct literal_numeric *) &interpreter_stack_data[ref->offset])->v;
2010 estack_ax_t = REG_S64;
2011 dbg_printf("ref load s64 %" PRIi64 "\n", estack_ax_v);
2012 next_pc += sizeof(struct load_op) + sizeof(struct field_ref);
2013 PO;
2014 }
2015
2016 OP(BYTECODE_OP_LOAD_FIELD_REF_DOUBLE):
2017 {
2018 struct load_op *insn = (struct load_op *) pc;
2019 struct field_ref *ref = (struct field_ref *) insn->data;
2020
2021 dbg_printf("load field ref offset %u type double\n",
2022 ref->offset);
2023 estack_push(stack, top, ax, bx, ax_t, bx_t);
2024 memcpy(&estack_ax(stack, top)->u.d, &interpreter_stack_data[ref->offset],
2025 sizeof(struct literal_double));
2026 estack_ax_t = REG_DOUBLE;
2027 dbg_printf("ref load double %g\n", estack_ax(stack, top)->u.d);
2028 next_pc += sizeof(struct load_op) + sizeof(struct field_ref);
2029 PO;
2030 }
2031
2032 /* load from immediate operand */
2033 OP(BYTECODE_OP_LOAD_STRING):
2034 {
2035 struct load_op *insn = (struct load_op *) pc;
2036
2037 dbg_printf("load string %s\n", insn->data);
2038 estack_push(stack, top, ax, bx, ax_t, bx_t);
2039 estack_ax(stack, top)->u.s.str = insn->data;
2040 estack_ax(stack, top)->u.s.seq_len = SIZE_MAX;
2041 estack_ax(stack, top)->u.s.literal_type =
2042 ESTACK_STRING_LITERAL_TYPE_PLAIN;
2043 estack_ax_t = REG_STRING;
2044 next_pc += sizeof(struct load_op) + strlen(insn->data) + 1;
2045 PO;
2046 }
2047
2048 OP(BYTECODE_OP_LOAD_STAR_GLOB_STRING):
2049 {
2050 struct load_op *insn = (struct load_op *) pc;
2051
2052 dbg_printf("load globbing pattern %s\n", insn->data);
2053 estack_push(stack, top, ax, bx, ax_t, bx_t);
2054 estack_ax(stack, top)->u.s.str = insn->data;
2055 estack_ax(stack, top)->u.s.seq_len = SIZE_MAX;
2056 estack_ax(stack, top)->u.s.literal_type =
2057 ESTACK_STRING_LITERAL_TYPE_STAR_GLOB;
2058 estack_ax_t = REG_STAR_GLOB_STRING;
2059 next_pc += sizeof(struct load_op) + strlen(insn->data) + 1;
2060 PO;
2061 }
2062
2063 OP(BYTECODE_OP_LOAD_S64):
2064 {
2065 struct load_op *insn = (struct load_op *) pc;
2066
2067 estack_push(stack, top, ax, bx, ax_t, bx_t);
2068 estack_ax_v = ((struct literal_numeric *) insn->data)->v;
2069 estack_ax_t = REG_S64;
2070 dbg_printf("load s64 %" PRIi64 "\n", estack_ax_v);
2071 next_pc += sizeof(struct load_op)
2072 + sizeof(struct literal_numeric);
2073 PO;
2074 }
2075
2076 OP(BYTECODE_OP_LOAD_DOUBLE):
2077 {
2078 struct load_op *insn = (struct load_op *) pc;
2079
2080 estack_push(stack, top, ax, bx, ax_t, bx_t);
2081 memcpy(&estack_ax(stack, top)->u.d, insn->data,
2082 sizeof(struct literal_double));
2083 estack_ax_t = REG_DOUBLE;
2084 dbg_printf("load double %g\n", estack_ax(stack, top)->u.d);
2085 next_pc += sizeof(struct load_op)
2086 + sizeof(struct literal_double);
2087 PO;
2088 }
2089
2090 /* cast */
2091 OP(BYTECODE_OP_CAST_TO_S64):
2092 {
2093 /* Dynamic typing. */
2094 switch (estack_ax_t) {
2095 case REG_S64:
2096 JUMP_TO(BYTECODE_OP_CAST_NOP);
2097 case REG_DOUBLE:
2098 JUMP_TO(BYTECODE_OP_CAST_DOUBLE_TO_S64);
2099 case REG_U64:
2100 estack_ax_t = REG_S64;
2101 next_pc += sizeof(struct cast_op);
2102 case REG_STRING: /* Fall-through */
2103 case REG_STAR_GLOB_STRING:
2104 ret = -EINVAL;
2105 goto end;
2106 default:
2107 ERR("Unknown interpreter register type (%d)",
2108 (int) estack_ax_t);
2109 ret = -EINVAL;
2110 goto end;
2111 }
2112 }
2113
2114 OP(BYTECODE_OP_CAST_DOUBLE_TO_S64):
2115 {
2116 estack_ax_v = (int64_t) estack_ax(stack, top)->u.d;
2117 estack_ax_t = REG_S64;
2118 next_pc += sizeof(struct cast_op);
2119 PO;
2120 }
2121
2122 OP(BYTECODE_OP_CAST_NOP):
2123 {
2124 next_pc += sizeof(struct cast_op);
2125 PO;
2126 }
2127
2128 /* get context ref */
2129 OP(BYTECODE_OP_GET_CONTEXT_REF):
2130 {
2131 struct load_op *insn = (struct load_op *) pc;
2132 struct field_ref *ref = (struct field_ref *) insn->data;
2133 struct lttng_ust_ctx_field *ctx_field;
2134 struct lttng_ust_ctx_value v;
2135
2136 dbg_printf("get context ref offset %u type dynamic\n",
2137 ref->offset);
2138 ctx_field = ctx->fields[ref->offset];
2139 ctx_field->get_value(ctx_field, &v);
2140 estack_push(stack, top, ax, bx, ax_t, bx_t);
2141 switch (v.sel) {
2142 case LTTNG_UST_DYNAMIC_TYPE_NONE:
2143 ret = -EINVAL;
2144 goto end;
2145 case LTTNG_UST_DYNAMIC_TYPE_S64:
2146 estack_ax_v = v.u.s64;
2147 estack_ax_t = REG_S64;
2148 dbg_printf("ref get context dynamic s64 %" PRIi64 "\n", estack_ax_v);
2149 break;
2150 case LTTNG_UST_DYNAMIC_TYPE_DOUBLE:
2151 estack_ax(stack, top)->u.d = v.u.d;
2152 estack_ax_t = REG_DOUBLE;
2153 dbg_printf("ref get context dynamic double %g\n", estack_ax(stack, top)->u.d);
2154 break;
2155 case LTTNG_UST_DYNAMIC_TYPE_STRING:
2156 estack_ax(stack, top)->u.s.str = v.u.str;
2157 if (unlikely(!estack_ax(stack, top)->u.s.str)) {
2158 dbg_printf("Interpreter warning: loading a NULL string.\n");
2159 ret = -EINVAL;
2160 goto end;
2161 }
2162 estack_ax(stack, top)->u.s.seq_len = SIZE_MAX;
2163 estack_ax(stack, top)->u.s.literal_type =
2164 ESTACK_STRING_LITERAL_TYPE_NONE;
2165 dbg_printf("ref get context dynamic string %s\n", estack_ax(stack, top)->u.s.str);
2166 estack_ax_t = REG_STRING;
2167 break;
2168 default:
2169 dbg_printf("Interpreter warning: unknown dynamic type (%d).\n", (int) v.sel);
2170 ret = -EINVAL;
2171 goto end;
2172 }
2173 next_pc += sizeof(struct load_op) + sizeof(struct field_ref);
2174 PO;
2175 }
2176
2177 OP(BYTECODE_OP_GET_CONTEXT_REF_STRING):
2178 {
2179 struct load_op *insn = (struct load_op *) pc;
2180 struct field_ref *ref = (struct field_ref *) insn->data;
2181 struct lttng_ust_ctx_field *ctx_field;
2182 struct lttng_ust_ctx_value v;
2183
2184 dbg_printf("get context ref offset %u type string\n",
2185 ref->offset);
2186 ctx_field = ctx->fields[ref->offset];
2187 ctx_field->get_value(ctx_field, &v);
2188 estack_push(stack, top, ax, bx, ax_t, bx_t);
2189 estack_ax(stack, top)->u.s.str = v.u.str;
2190 if (unlikely(!estack_ax(stack, top)->u.s.str)) {
2191 dbg_printf("Interpreter warning: loading a NULL string.\n");
2192 ret = -EINVAL;
2193 goto end;
2194 }
2195 estack_ax(stack, top)->u.s.seq_len = SIZE_MAX;
2196 estack_ax(stack, top)->u.s.literal_type =
2197 ESTACK_STRING_LITERAL_TYPE_NONE;
2198 estack_ax_t = REG_STRING;
2199 dbg_printf("ref get context string %s\n", estack_ax(stack, top)->u.s.str);
2200 next_pc += sizeof(struct load_op) + sizeof(struct field_ref);
2201 PO;
2202 }
2203
2204 OP(BYTECODE_OP_GET_CONTEXT_REF_S64):
2205 {
2206 struct load_op *insn = (struct load_op *) pc;
2207 struct field_ref *ref = (struct field_ref *) insn->data;
2208 struct lttng_ust_ctx_field *ctx_field;
2209 struct lttng_ust_ctx_value v;
2210
2211 dbg_printf("get context ref offset %u type s64\n",
2212 ref->offset);
2213 ctx_field = ctx->fields[ref->offset];
2214 ctx_field->get_value(ctx_field, &v);
2215 estack_push(stack, top, ax, bx, ax_t, bx_t);
2216 estack_ax_v = v.u.s64;
2217 estack_ax_t = REG_S64;
2218 dbg_printf("ref get context s64 %" PRIi64 "\n", estack_ax_v);
2219 next_pc += sizeof(struct load_op) + sizeof(struct field_ref);
2220 PO;
2221 }
2222
2223 OP(BYTECODE_OP_GET_CONTEXT_REF_DOUBLE):
2224 {
2225 struct load_op *insn = (struct load_op *) pc;
2226 struct field_ref *ref = (struct field_ref *) insn->data;
2227 struct lttng_ust_ctx_field *ctx_field;
2228 struct lttng_ust_ctx_value v;
2229
2230 dbg_printf("get context ref offset %u type double\n",
2231 ref->offset);
2232 ctx_field = ctx->fields[ref->offset];
2233 ctx_field->get_value(ctx_field, &v);
2234 estack_push(stack, top, ax, bx, ax_t, bx_t);
2235 memcpy(&estack_ax(stack, top)->u.d, &v.u.d, sizeof(struct literal_double));
2236 estack_ax_t = REG_DOUBLE;
2237 dbg_printf("ref get context double %g\n", estack_ax(stack, top)->u.d);
2238 next_pc += sizeof(struct load_op) + sizeof(struct field_ref);
2239 PO;
2240 }
2241
2242 OP(BYTECODE_OP_GET_CONTEXT_ROOT):
2243 {
2244 dbg_printf("op get context root\n");
2245 estack_push(stack, top, ax, bx, ax_t, bx_t);
2246 estack_ax(stack, top)->u.ptr.type = LOAD_ROOT_CONTEXT;
2247 /* "field" only needed for variants. */
2248 estack_ax(stack, top)->u.ptr.field = NULL;
2249 estack_ax_t = REG_PTR;
2250 next_pc += sizeof(struct load_op);
2251 PO;
2252 }
2253
2254 OP(BYTECODE_OP_GET_APP_CONTEXT_ROOT):
2255 {
2256 dbg_printf("op get app context root\n");
2257 estack_push(stack, top, ax, bx, ax_t, bx_t);
2258 estack_ax(stack, top)->u.ptr.type = LOAD_ROOT_APP_CONTEXT;
2259 /* "field" only needed for variants. */
2260 estack_ax(stack, top)->u.ptr.field = NULL;
2261 estack_ax_t = REG_PTR;
2262 next_pc += sizeof(struct load_op);
2263 PO;
2264 }
2265
2266 OP(BYTECODE_OP_GET_PAYLOAD_ROOT):
2267 {
2268 dbg_printf("op get app payload root\n");
2269 estack_push(stack, top, ax, bx, ax_t, bx_t);
2270 estack_ax(stack, top)->u.ptr.type = LOAD_ROOT_PAYLOAD;
2271 estack_ax(stack, top)->u.ptr.ptr = interpreter_stack_data;
2272 /* "field" only needed for variants. */
2273 estack_ax(stack, top)->u.ptr.field = NULL;
2274 estack_ax_t = REG_PTR;
2275 next_pc += sizeof(struct load_op);
2276 PO;
2277 }
2278
2279 OP(BYTECODE_OP_GET_SYMBOL):
2280 {
2281 dbg_printf("op get symbol\n");
2282 switch (estack_ax(stack, top)->u.ptr.type) {
2283 case LOAD_OBJECT:
2284 ERR("Nested fields not implemented yet.");
2285 ret = -EINVAL;
2286 goto end;
2287 case LOAD_ROOT_CONTEXT:
2288 case LOAD_ROOT_APP_CONTEXT:
2289 case LOAD_ROOT_PAYLOAD:
2290 /*
2291 * symbol lookup is performed by
2292 * specialization.
2293 */
2294 ret = -EINVAL;
2295 goto end;
2296 }
2297 next_pc += sizeof(struct load_op) + sizeof(struct get_symbol);
2298 PO;
2299 }
2300
2301 OP(BYTECODE_OP_GET_SYMBOL_FIELD):
2302 {
2303 /*
2304 * Used for first variant encountered in a
2305 * traversal. Variants are not implemented yet.
2306 */
2307 ret = -EINVAL;
2308 goto end;
2309 }
2310
2311 OP(BYTECODE_OP_GET_INDEX_U16):
2312 {
2313 struct load_op *insn = (struct load_op *) pc;
2314 struct get_index_u16 *index = (struct get_index_u16 *) insn->data;
2315
2316 dbg_printf("op get index u16\n");
2317 ret = dynamic_get_index(ctx, bytecode, index->index, estack_ax(stack, top));
2318 if (ret)
2319 goto end;
2320 estack_ax_v = estack_ax(stack, top)->u.v;
2321 estack_ax_t = estack_ax(stack, top)->type;
2322 next_pc += sizeof(struct load_op) + sizeof(struct get_index_u16);
2323 PO;
2324 }
2325
2326 OP(BYTECODE_OP_GET_INDEX_U64):
2327 {
2328 struct load_op *insn = (struct load_op *) pc;
2329 struct get_index_u64 *index = (struct get_index_u64 *) insn->data;
2330
2331 dbg_printf("op get index u64\n");
2332 ret = dynamic_get_index(ctx, bytecode, index->index, estack_ax(stack, top));
2333 if (ret)
2334 goto end;
2335 estack_ax_v = estack_ax(stack, top)->u.v;
2336 estack_ax_t = estack_ax(stack, top)->type;
2337 next_pc += sizeof(struct load_op) + sizeof(struct get_index_u64);
2338 PO;
2339 }
2340
2341 OP(BYTECODE_OP_LOAD_FIELD):
2342 {
2343 dbg_printf("op load field\n");
2344 ret = dynamic_load_field(estack_ax(stack, top));
2345 if (ret)
2346 goto end;
2347 estack_ax_v = estack_ax(stack, top)->u.v;
2348 estack_ax_t = estack_ax(stack, top)->type;
2349 next_pc += sizeof(struct load_op);
2350 PO;
2351 }
2352
2353 OP(BYTECODE_OP_LOAD_FIELD_S8):
2354 {
2355 dbg_printf("op load field s8\n");
2356
2357 estack_ax_v = *(int8_t *) estack_ax(stack, top)->u.ptr.ptr;
2358 estack_ax_t = REG_S64;
2359 next_pc += sizeof(struct load_op);
2360 PO;
2361 }
2362 OP(BYTECODE_OP_LOAD_FIELD_S16):
2363 {
2364 dbg_printf("op load field s16\n");
2365
2366 estack_ax_v = *(int16_t *) estack_ax(stack, top)->u.ptr.ptr;
2367 estack_ax_t = REG_S64;
2368 next_pc += sizeof(struct load_op);
2369 PO;
2370 }
2371 OP(BYTECODE_OP_LOAD_FIELD_S32):
2372 {
2373 dbg_printf("op load field s32\n");
2374
2375 estack_ax_v = *(int32_t *) estack_ax(stack, top)->u.ptr.ptr;
2376 estack_ax_t = REG_S64;
2377 next_pc += sizeof(struct load_op);
2378 PO;
2379 }
2380 OP(BYTECODE_OP_LOAD_FIELD_S64):
2381 {
2382 dbg_printf("op load field s64\n");
2383
2384 estack_ax_v = *(int64_t *) estack_ax(stack, top)->u.ptr.ptr;
2385 estack_ax_t = REG_S64;
2386 next_pc += sizeof(struct load_op);
2387 PO;
2388 }
2389 OP(BYTECODE_OP_LOAD_FIELD_U8):
2390 {
2391 dbg_printf("op load field u8\n");
2392
2393 estack_ax_v = *(uint8_t *) estack_ax(stack, top)->u.ptr.ptr;
2394 estack_ax_t = REG_U64;
2395 next_pc += sizeof(struct load_op);
2396 PO;
2397 }
2398 OP(BYTECODE_OP_LOAD_FIELD_U16):
2399 {
2400 dbg_printf("op load field u16\n");
2401
2402 estack_ax_v = *(uint16_t *) estack_ax(stack, top)->u.ptr.ptr;
2403 estack_ax_t = REG_U64;
2404 next_pc += sizeof(struct load_op);
2405 PO;
2406 }
2407 OP(BYTECODE_OP_LOAD_FIELD_U32):
2408 {
2409 dbg_printf("op load field u32\n");
2410
2411 estack_ax_v = *(uint32_t *) estack_ax(stack, top)->u.ptr.ptr;
2412 estack_ax_t = REG_U64;
2413 next_pc += sizeof(struct load_op);
2414 PO;
2415 }
2416 OP(BYTECODE_OP_LOAD_FIELD_U64):
2417 {
2418 dbg_printf("op load field u64\n");
2419
2420 estack_ax_v = *(uint64_t *) estack_ax(stack, top)->u.ptr.ptr;
2421 estack_ax_t = REG_U64;
2422 next_pc += sizeof(struct load_op);
2423 PO;
2424 }
2425 OP(BYTECODE_OP_LOAD_FIELD_DOUBLE):
2426 {
2427 dbg_printf("op load field double\n");
2428
2429 memcpy(&estack_ax(stack, top)->u.d,
2430 estack_ax(stack, top)->u.ptr.ptr,
2431 sizeof(struct literal_double));
2432 estack_ax(stack, top)->type = REG_DOUBLE;
2433 next_pc += sizeof(struct load_op);
2434 PO;
2435 }
2436
2437 OP(BYTECODE_OP_LOAD_FIELD_STRING):
2438 {
2439 const char *str;
2440
2441 dbg_printf("op load field string\n");
2442 str = (const char *) estack_ax(stack, top)->u.ptr.ptr;
2443 estack_ax(stack, top)->u.s.str = str;
2444 if (unlikely(!estack_ax(stack, top)->u.s.str)) {
2445 dbg_printf("Interpreter warning: loading a NULL string.\n");
2446 ret = -EINVAL;
2447 goto end;
2448 }
2449 estack_ax(stack, top)->u.s.seq_len = SIZE_MAX;
2450 estack_ax(stack, top)->u.s.literal_type =
2451 ESTACK_STRING_LITERAL_TYPE_NONE;
2452 estack_ax(stack, top)->type = REG_STRING;
2453 next_pc += sizeof(struct load_op);
2454 PO;
2455 }
2456
2457 OP(BYTECODE_OP_LOAD_FIELD_SEQUENCE):
2458 {
2459 const char *ptr;
2460
2461 dbg_printf("op load field string sequence\n");
2462 ptr = estack_ax(stack, top)->u.ptr.ptr;
2463 estack_ax(stack, top)->u.s.seq_len = *(unsigned long *) ptr;
2464 estack_ax(stack, top)->u.s.str = *(const char **) (ptr + sizeof(unsigned long));
2465 estack_ax(stack, top)->type = REG_STRING;
2466 if (unlikely(!estack_ax(stack, top)->u.s.str)) {
2467 dbg_printf("Interpreter warning: loading a NULL sequence.\n");
2468 ret = -EINVAL;
2469 goto end;
2470 }
2471 estack_ax(stack, top)->u.s.literal_type =
2472 ESTACK_STRING_LITERAL_TYPE_NONE;
2473 next_pc += sizeof(struct load_op);
2474 PO;
2475 }
2476
2477 END_OP
2478 end:
2479 /* No need to prepare output if an error occurred. */
2480 if (ret)
2481 return LTTNG_UST_BYTECODE_INTERPRETER_ERROR;
2482
2483 /* Prepare output. */
2484 switch (ust_bytecode->type) {
2485 case LTTNG_UST_BYTECODE_TYPE_FILTER:
2486 {
2487 struct lttng_ust_bytecode_filter_ctx *filter_ctx =
2488 (struct lttng_ust_bytecode_filter_ctx *) caller_ctx;
2489 if (retval)
2490 filter_ctx->result = LTTNG_UST_BYTECODE_FILTER_ACCEPT;
2491 else
2492 filter_ctx->result = LTTNG_UST_BYTECODE_FILTER_REJECT;
2493 break;
2494 }
2495 case LTTNG_UST_BYTECODE_TYPE_CAPTURE:
2496 ret = lttng_bytecode_interpret_format_output(estack_ax(stack, top),
2497 (struct lttng_interpreter_output *) caller_ctx);
2498 break;
2499 default:
2500 ret = -EINVAL;
2501 break;
2502 }
2503 if (ret)
2504 return LTTNG_UST_BYTECODE_INTERPRETER_ERROR;
2505 else
2506 return LTTNG_UST_BYTECODE_INTERPRETER_OK;
2507 }
2508
2509 /*
2510 * Return LTTNG_UST_EVENT_FILTER_ACCEPT or LTTNG_UST_EVENT_FILTER_REJECT.
2511 */
2512 int lttng_ust_interpret_event_filter(struct lttng_ust_event_common *event,
2513 const char *interpreter_stack_data,
2514 void *event_filter_ctx)
2515 {
2516 struct lttng_ust_bytecode_runtime *filter_bc_runtime;
2517 struct cds_list_head *filter_bytecode_runtime_head = &event->priv->filter_bytecode_runtime_head;
2518 struct lttng_ust_bytecode_filter_ctx bytecode_filter_ctx;
2519 bool filter_record = false;
2520
2521 cds_list_for_each_entry_rcu(filter_bc_runtime, filter_bytecode_runtime_head, node) {
2522 if (caa_likely(filter_bc_runtime->interpreter_func(filter_bc_runtime,
2523 interpreter_stack_data, &bytecode_filter_ctx) == LTTNG_UST_BYTECODE_INTERPRETER_OK)) {
2524 if (caa_unlikely(bytecode_filter_ctx.result == LTTNG_UST_BYTECODE_FILTER_ACCEPT)) {
2525 filter_record = true;
2526 break;
2527 }
2528 }
2529 }
2530 if (filter_record)
2531 return LTTNG_UST_EVENT_FILTER_ACCEPT;
2532 else
2533 return LTTNG_UST_EVENT_FILTER_REJECT;
2534 }
2535
2536 #undef START_OP
2537 #undef OP
2538 #undef PO
2539 #undef END_OP
This page took 0.07835 seconds and 3 git commands to generate.