2 * SPDX-License-Identifier: MIT
4 * Copyright (C) 2010-2016 Mathieu Desnoyers <mathieu.desnoyers@efficios.com>
6 * LTTng UST bytecode validator.
14 #include "rculfhash.h"
16 #include "lttng-bytecode.h"
17 #include "common/hash.h"
18 #include "common/strutils.h"
19 #include "lib/lttng-ust/events.h"
20 #include "common/macros.h"
23 * Number of merge points for hash table size. Hash table initialized to
24 * that size, and we do not resize, because we do not want to trigger
25 * RCU worker thread execution: fall-back on linear traversal if number
26 * of merge points exceeds this value.
28 #define DEFAULT_NR_MERGE_POINTS 128
29 #define MIN_NR_BUCKETS 128
30 #define MAX_NR_BUCKETS 128
32 /* merge point table node */
34 struct lttng_ust_lfht_node node
;
36 /* Context at merge point */
38 unsigned long target_pc
;
41 static unsigned long lttng_hash_seed
;
42 static unsigned int lttng_hash_seed_ready
;
45 int lttng_hash_match(struct lttng_ust_lfht_node
*node
, const void *key
)
47 struct lfht_mp_node
*mp_node
=
48 caa_container_of(node
, struct lfht_mp_node
, node
);
49 unsigned long key_pc
= (unsigned long) key
;
51 if (mp_node
->target_pc
== key_pc
)
58 int merge_points_compare(const struct vstack
*stacka
,
59 const struct vstack
*stackb
)
63 if (stacka
->top
!= stackb
->top
)
65 len
= stacka
->top
+ 1;
67 for (i
= 0; i
< len
; i
++) {
68 if (stacka
->e
[i
].type
!= REG_UNKNOWN
69 && stackb
->e
[i
].type
!= REG_UNKNOWN
70 && stacka
->e
[i
].type
!= stackb
->e
[i
].type
)
77 int merge_point_add_check(struct lttng_ust_lfht
*ht
, unsigned long target_pc
,
78 const struct vstack
*stack
)
80 struct lfht_mp_node
*node
;
81 unsigned long hash
= lttng_hash_mix((const char *) target_pc
,
84 struct lttng_ust_lfht_node
*ret
;
86 dbg_printf("Bytecode: adding merge point at offset %lu, hash %lu\n",
88 node
= zmalloc(sizeof(struct lfht_mp_node
));
91 node
->target_pc
= target_pc
;
92 memcpy(&node
->stack
, stack
, sizeof(node
->stack
));
93 ret
= lttng_ust_lfht_add_unique(ht
, hash
, lttng_hash_match
,
94 (const char *) target_pc
, &node
->node
);
95 if (ret
!= &node
->node
) {
96 struct lfht_mp_node
*ret_mp
=
97 caa_container_of(ret
, struct lfht_mp_node
, node
);
99 /* Key already present */
100 dbg_printf("Bytecode: compare merge points for offset %lu, hash %lu\n",
103 if (merge_points_compare(stack
, &ret_mp
->stack
)) {
104 ERR("Merge points differ for offset %lu\n",
113 * Binary comparators use top of stack and top of stack -1.
114 * Return 0 if typing is known to match, 1 if typing is dynamic
115 * (unknown), negative error value on error.
118 int bin_op_compare_check(struct vstack
*stack
, bytecode_opcode_t opcode
,
121 if (unlikely(!vstack_ax(stack
) || !vstack_bx(stack
)))
124 switch (vstack_ax(stack
)->type
) {
131 switch (vstack_bx(stack
)->type
) {
139 case REG_STAR_GLOB_STRING
:
140 if (opcode
!= BYTECODE_OP_EQ
&& opcode
!= BYTECODE_OP_NE
) {
150 case REG_STAR_GLOB_STRING
:
151 switch (vstack_bx(stack
)->type
) {
158 if (opcode
!= BYTECODE_OP_EQ
&& opcode
!= BYTECODE_OP_NE
) {
162 case REG_STAR_GLOB_STRING
:
172 switch (vstack_bx(stack
)->type
) {
179 case REG_STAR_GLOB_STRING
:
194 ERR("type mismatch for '%s' binary operator\n", str
);
198 ERR("empty stack for '%s' binary operator\n", str
);
202 ERR("unknown type for '%s' binary operator\n", str
);
207 * Binary bitwise operators use top of stack and top of stack -1.
208 * Return 0 if typing is known to match, 1 if typing is dynamic
209 * (unknown), negative error value on error.
212 int bin_op_bitwise_check(struct vstack
*stack
,
213 bytecode_opcode_t opcode
__attribute__((unused
)),
216 if (unlikely(!vstack_ax(stack
) || !vstack_bx(stack
)))
219 switch (vstack_ax(stack
)->type
) {
227 switch (vstack_bx(stack
)->type
) {
245 ERR("empty stack for '%s' binary operator\n", str
);
249 ERR("unknown type for '%s' binary operator\n", str
);
254 int validate_get_symbol(struct bytecode_runtime
*bytecode
,
255 const struct get_symbol
*sym
)
257 const char *str
, *str_limit
;
260 if (sym
->offset
>= bytecode
->p
.bc
->bc
.len
- bytecode
->p
.bc
->bc
.reloc_offset
)
263 str
= bytecode
->p
.bc
->bc
.data
+ bytecode
->p
.bc
->bc
.reloc_offset
+ sym
->offset
;
264 str_limit
= bytecode
->p
.bc
->bc
.data
+ bytecode
->p
.bc
->bc
.len
;
265 len_limit
= str_limit
- str
;
266 if (strnlen(str
, len_limit
) == len_limit
)
272 * Validate bytecode range overflow within the validation pass.
273 * Called for each instruction encountered.
276 int bytecode_validate_overflow(struct bytecode_runtime
*bytecode
,
277 char *start_pc
, char *pc
)
281 switch (*(bytecode_opcode_t
*) pc
) {
282 case BYTECODE_OP_UNKNOWN
:
285 ERR("unknown bytecode op %u\n",
286 (unsigned int) *(bytecode_opcode_t
*) pc
);
291 case BYTECODE_OP_RETURN
:
292 case BYTECODE_OP_RETURN_S64
:
294 if (unlikely(pc
+ sizeof(struct return_op
)
295 > start_pc
+ bytecode
->len
)) {
302 case BYTECODE_OP_MUL
:
303 case BYTECODE_OP_DIV
:
304 case BYTECODE_OP_MOD
:
305 case BYTECODE_OP_PLUS
:
306 case BYTECODE_OP_MINUS
:
308 ERR("unsupported bytecode op %u\n",
309 (unsigned int) *(bytecode_opcode_t
*) pc
);
320 case BYTECODE_OP_EQ_STRING
:
321 case BYTECODE_OP_NE_STRING
:
322 case BYTECODE_OP_GT_STRING
:
323 case BYTECODE_OP_LT_STRING
:
324 case BYTECODE_OP_GE_STRING
:
325 case BYTECODE_OP_LE_STRING
:
326 case BYTECODE_OP_EQ_STAR_GLOB_STRING
:
327 case BYTECODE_OP_NE_STAR_GLOB_STRING
:
328 case BYTECODE_OP_EQ_S64
:
329 case BYTECODE_OP_NE_S64
:
330 case BYTECODE_OP_GT_S64
:
331 case BYTECODE_OP_LT_S64
:
332 case BYTECODE_OP_GE_S64
:
333 case BYTECODE_OP_LE_S64
:
334 case BYTECODE_OP_EQ_DOUBLE
:
335 case BYTECODE_OP_NE_DOUBLE
:
336 case BYTECODE_OP_GT_DOUBLE
:
337 case BYTECODE_OP_LT_DOUBLE
:
338 case BYTECODE_OP_GE_DOUBLE
:
339 case BYTECODE_OP_LE_DOUBLE
:
340 case BYTECODE_OP_EQ_DOUBLE_S64
:
341 case BYTECODE_OP_NE_DOUBLE_S64
:
342 case BYTECODE_OP_GT_DOUBLE_S64
:
343 case BYTECODE_OP_LT_DOUBLE_S64
:
344 case BYTECODE_OP_GE_DOUBLE_S64
:
345 case BYTECODE_OP_LE_DOUBLE_S64
:
346 case BYTECODE_OP_EQ_S64_DOUBLE
:
347 case BYTECODE_OP_NE_S64_DOUBLE
:
348 case BYTECODE_OP_GT_S64_DOUBLE
:
349 case BYTECODE_OP_LT_S64_DOUBLE
:
350 case BYTECODE_OP_GE_S64_DOUBLE
:
351 case BYTECODE_OP_LE_S64_DOUBLE
:
352 case BYTECODE_OP_BIT_RSHIFT
:
353 case BYTECODE_OP_BIT_LSHIFT
:
354 case BYTECODE_OP_BIT_AND
:
355 case BYTECODE_OP_BIT_OR
:
356 case BYTECODE_OP_BIT_XOR
:
358 if (unlikely(pc
+ sizeof(struct binary_op
)
359 > start_pc
+ bytecode
->len
)) {
366 case BYTECODE_OP_UNARY_PLUS
:
367 case BYTECODE_OP_UNARY_MINUS
:
368 case BYTECODE_OP_UNARY_NOT
:
369 case BYTECODE_OP_UNARY_PLUS_S64
:
370 case BYTECODE_OP_UNARY_MINUS_S64
:
371 case BYTECODE_OP_UNARY_NOT_S64
:
372 case BYTECODE_OP_UNARY_PLUS_DOUBLE
:
373 case BYTECODE_OP_UNARY_MINUS_DOUBLE
:
374 case BYTECODE_OP_UNARY_NOT_DOUBLE
:
375 case BYTECODE_OP_UNARY_BIT_NOT
:
377 if (unlikely(pc
+ sizeof(struct unary_op
)
378 > start_pc
+ bytecode
->len
)) {
385 case BYTECODE_OP_AND
:
388 if (unlikely(pc
+ sizeof(struct logical_op
)
389 > start_pc
+ bytecode
->len
)) {
395 /* load field and get context ref */
396 case BYTECODE_OP_LOAD_FIELD_REF
:
397 case BYTECODE_OP_GET_CONTEXT_REF
:
398 case BYTECODE_OP_LOAD_FIELD_REF_STRING
:
399 case BYTECODE_OP_LOAD_FIELD_REF_SEQUENCE
:
400 case BYTECODE_OP_LOAD_FIELD_REF_S64
:
401 case BYTECODE_OP_LOAD_FIELD_REF_DOUBLE
:
402 case BYTECODE_OP_GET_CONTEXT_REF_STRING
:
403 case BYTECODE_OP_GET_CONTEXT_REF_S64
:
404 case BYTECODE_OP_GET_CONTEXT_REF_DOUBLE
:
406 if (unlikely(pc
+ sizeof(struct load_op
) + sizeof(struct field_ref
)
407 > start_pc
+ bytecode
->len
)) {
413 /* load from immediate operand */
414 case BYTECODE_OP_LOAD_STRING
:
415 case BYTECODE_OP_LOAD_STAR_GLOB_STRING
:
417 struct load_op
*insn
= (struct load_op
*) pc
;
418 uint32_t str_len
, maxlen
;
420 if (unlikely(pc
+ sizeof(struct load_op
)
421 > start_pc
+ bytecode
->len
)) {
426 maxlen
= start_pc
+ bytecode
->len
- pc
- sizeof(struct load_op
);
427 str_len
= strnlen(insn
->data
, maxlen
);
428 if (unlikely(str_len
>= maxlen
)) {
429 /* Final '\0' not found within range */
435 case BYTECODE_OP_LOAD_S64
:
437 if (unlikely(pc
+ sizeof(struct load_op
) + sizeof(struct literal_numeric
)
438 > start_pc
+ bytecode
->len
)) {
444 case BYTECODE_OP_LOAD_DOUBLE
:
446 if (unlikely(pc
+ sizeof(struct load_op
) + sizeof(struct literal_double
)
447 > start_pc
+ bytecode
->len
)) {
453 case BYTECODE_OP_CAST_TO_S64
:
454 case BYTECODE_OP_CAST_DOUBLE_TO_S64
:
455 case BYTECODE_OP_CAST_NOP
:
457 if (unlikely(pc
+ sizeof(struct cast_op
)
458 > start_pc
+ bytecode
->len
)) {
465 * Instructions for recursive traversal through composed types.
467 case BYTECODE_OP_GET_CONTEXT_ROOT
:
468 case BYTECODE_OP_GET_APP_CONTEXT_ROOT
:
469 case BYTECODE_OP_GET_PAYLOAD_ROOT
:
470 case BYTECODE_OP_LOAD_FIELD
:
471 case BYTECODE_OP_LOAD_FIELD_S8
:
472 case BYTECODE_OP_LOAD_FIELD_S16
:
473 case BYTECODE_OP_LOAD_FIELD_S32
:
474 case BYTECODE_OP_LOAD_FIELD_S64
:
475 case BYTECODE_OP_LOAD_FIELD_U8
:
476 case BYTECODE_OP_LOAD_FIELD_U16
:
477 case BYTECODE_OP_LOAD_FIELD_U32
:
478 case BYTECODE_OP_LOAD_FIELD_U64
:
479 case BYTECODE_OP_LOAD_FIELD_STRING
:
480 case BYTECODE_OP_LOAD_FIELD_SEQUENCE
:
481 case BYTECODE_OP_LOAD_FIELD_DOUBLE
:
482 if (unlikely(pc
+ sizeof(struct load_op
)
483 > start_pc
+ bytecode
->len
)) {
488 case BYTECODE_OP_GET_SYMBOL
:
490 struct load_op
*insn
= (struct load_op
*) pc
;
491 struct get_symbol
*sym
= (struct get_symbol
*) insn
->data
;
493 if (unlikely(pc
+ sizeof(struct load_op
) + sizeof(struct get_symbol
)
494 > start_pc
+ bytecode
->len
)) {
498 ret
= validate_get_symbol(bytecode
, sym
);
502 case BYTECODE_OP_GET_SYMBOL_FIELD
:
503 ERR("Unexpected get symbol field");
507 case BYTECODE_OP_GET_INDEX_U16
:
508 if (unlikely(pc
+ sizeof(struct load_op
) + sizeof(struct get_index_u16
)
509 > start_pc
+ bytecode
->len
)) {
514 case BYTECODE_OP_GET_INDEX_U64
:
515 if (unlikely(pc
+ sizeof(struct load_op
) + sizeof(struct get_index_u64
)
516 > start_pc
+ bytecode
->len
)) {
526 unsigned long delete_all_nodes(struct lttng_ust_lfht
*ht
)
528 struct lttng_ust_lfht_iter iter
;
529 struct lfht_mp_node
*node
;
530 unsigned long nr_nodes
= 0;
532 lttng_ust_lfht_for_each_entry(ht
, &iter
, node
, node
) {
535 ret
= lttng_ust_lfht_del(ht
, lttng_ust_lfht_iter_get_node(&iter
));
537 /* note: this hash table is never used concurrently */
550 int validate_instruction_context(
551 struct bytecode_runtime
*bytecode
__attribute__((unused
)),
552 struct vstack
*stack
,
557 const bytecode_opcode_t opcode
= *(bytecode_opcode_t
*) pc
;
560 case BYTECODE_OP_UNKNOWN
:
563 ERR("unknown bytecode op %u\n",
564 (unsigned int) *(bytecode_opcode_t
*) pc
);
569 case BYTECODE_OP_RETURN
:
570 case BYTECODE_OP_RETURN_S64
:
576 case BYTECODE_OP_MUL
:
577 case BYTECODE_OP_DIV
:
578 case BYTECODE_OP_MOD
:
579 case BYTECODE_OP_PLUS
:
580 case BYTECODE_OP_MINUS
:
582 ERR("unsupported bytecode op %u\n",
583 (unsigned int) opcode
);
590 ret
= bin_op_compare_check(stack
, opcode
, "==");
597 ret
= bin_op_compare_check(stack
, opcode
, "!=");
604 ret
= bin_op_compare_check(stack
, opcode
, ">");
611 ret
= bin_op_compare_check(stack
, opcode
, "<");
618 ret
= bin_op_compare_check(stack
, opcode
, ">=");
625 ret
= bin_op_compare_check(stack
, opcode
, "<=");
631 case BYTECODE_OP_EQ_STRING
:
632 case BYTECODE_OP_NE_STRING
:
633 case BYTECODE_OP_GT_STRING
:
634 case BYTECODE_OP_LT_STRING
:
635 case BYTECODE_OP_GE_STRING
:
636 case BYTECODE_OP_LE_STRING
:
638 if (!vstack_ax(stack
) || !vstack_bx(stack
)) {
639 ERR("Empty stack\n");
643 if (vstack_ax(stack
)->type
!= REG_STRING
644 || vstack_bx(stack
)->type
!= REG_STRING
) {
645 ERR("Unexpected register type for string comparator\n");
652 case BYTECODE_OP_EQ_STAR_GLOB_STRING
:
653 case BYTECODE_OP_NE_STAR_GLOB_STRING
:
655 if (!vstack_ax(stack
) || !vstack_bx(stack
)) {
656 ERR("Empty stack\n");
660 if (vstack_ax(stack
)->type
!= REG_STAR_GLOB_STRING
661 && vstack_bx(stack
)->type
!= REG_STAR_GLOB_STRING
) {
662 ERR("Unexpected register type for globbing pattern comparator\n");
669 case BYTECODE_OP_EQ_S64
:
670 case BYTECODE_OP_NE_S64
:
671 case BYTECODE_OP_GT_S64
:
672 case BYTECODE_OP_LT_S64
:
673 case BYTECODE_OP_GE_S64
:
674 case BYTECODE_OP_LE_S64
:
676 if (!vstack_ax(stack
) || !vstack_bx(stack
)) {
677 ERR("Empty stack\n");
681 switch (vstack_ax(stack
)->type
) {
686 ERR("Unexpected register type for s64 comparator\n");
690 switch (vstack_bx(stack
)->type
) {
695 ERR("Unexpected register type for s64 comparator\n");
702 case BYTECODE_OP_EQ_DOUBLE
:
703 case BYTECODE_OP_NE_DOUBLE
:
704 case BYTECODE_OP_GT_DOUBLE
:
705 case BYTECODE_OP_LT_DOUBLE
:
706 case BYTECODE_OP_GE_DOUBLE
:
707 case BYTECODE_OP_LE_DOUBLE
:
709 if (!vstack_ax(stack
) || !vstack_bx(stack
)) {
710 ERR("Empty stack\n");
714 if (vstack_ax(stack
)->type
!= REG_DOUBLE
&& vstack_bx(stack
)->type
!= REG_DOUBLE
) {
715 ERR("Double operator should have two double registers\n");
722 case BYTECODE_OP_EQ_DOUBLE_S64
:
723 case BYTECODE_OP_NE_DOUBLE_S64
:
724 case BYTECODE_OP_GT_DOUBLE_S64
:
725 case BYTECODE_OP_LT_DOUBLE_S64
:
726 case BYTECODE_OP_GE_DOUBLE_S64
:
727 case BYTECODE_OP_LE_DOUBLE_S64
:
729 if (!vstack_ax(stack
) || !vstack_bx(stack
)) {
730 ERR("Empty stack\n");
734 switch (vstack_ax(stack
)->type
) {
739 ERR("Double-S64 operator has unexpected register types\n");
743 switch (vstack_bx(stack
)->type
) {
747 ERR("Double-S64 operator has unexpected register types\n");
754 case BYTECODE_OP_EQ_S64_DOUBLE
:
755 case BYTECODE_OP_NE_S64_DOUBLE
:
756 case BYTECODE_OP_GT_S64_DOUBLE
:
757 case BYTECODE_OP_LT_S64_DOUBLE
:
758 case BYTECODE_OP_GE_S64_DOUBLE
:
759 case BYTECODE_OP_LE_S64_DOUBLE
:
761 if (!vstack_ax(stack
) || !vstack_bx(stack
)) {
762 ERR("Empty stack\n");
766 switch (vstack_ax(stack
)->type
) {
770 ERR("S64-Double operator has unexpected register types\n");
774 switch (vstack_bx(stack
)->type
) {
779 ERR("S64-Double operator has unexpected register types\n");
786 case BYTECODE_OP_BIT_RSHIFT
:
787 ret
= bin_op_bitwise_check(stack
, opcode
, ">>");
791 case BYTECODE_OP_BIT_LSHIFT
:
792 ret
= bin_op_bitwise_check(stack
, opcode
, "<<");
796 case BYTECODE_OP_BIT_AND
:
797 ret
= bin_op_bitwise_check(stack
, opcode
, "&");
801 case BYTECODE_OP_BIT_OR
:
802 ret
= bin_op_bitwise_check(stack
, opcode
, "|");
806 case BYTECODE_OP_BIT_XOR
:
807 ret
= bin_op_bitwise_check(stack
, opcode
, "^");
813 case BYTECODE_OP_UNARY_PLUS
:
814 case BYTECODE_OP_UNARY_MINUS
:
815 case BYTECODE_OP_UNARY_NOT
:
817 if (!vstack_ax(stack
)) {
818 ERR("Empty stack\n");
822 switch (vstack_ax(stack
)->type
) {
824 ERR("unknown register type\n");
829 case REG_STAR_GLOB_STRING
:
830 ERR("Unary op can only be applied to numeric or floating point registers\n");
844 case BYTECODE_OP_UNARY_BIT_NOT
:
846 if (!vstack_ax(stack
)) {
847 ERR("Empty stack\n");
851 switch (vstack_ax(stack
)->type
) {
853 ERR("unknown register type\n");
858 case REG_STAR_GLOB_STRING
:
860 ERR("Unary bitwise op can only be applied to numeric registers\n");
873 case BYTECODE_OP_UNARY_PLUS_S64
:
874 case BYTECODE_OP_UNARY_MINUS_S64
:
875 case BYTECODE_OP_UNARY_NOT_S64
:
877 if (!vstack_ax(stack
)) {
878 ERR("Empty stack\n");
882 if (vstack_ax(stack
)->type
!= REG_S64
&&
883 vstack_ax(stack
)->type
!= REG_U64
) {
884 ERR("Invalid register type\n");
891 case BYTECODE_OP_UNARY_PLUS_DOUBLE
:
892 case BYTECODE_OP_UNARY_MINUS_DOUBLE
:
893 case BYTECODE_OP_UNARY_NOT_DOUBLE
:
895 if (!vstack_ax(stack
)) {
896 ERR("Empty stack\n");
900 if (vstack_ax(stack
)->type
!= REG_DOUBLE
) {
901 ERR("Invalid register type\n");
909 case BYTECODE_OP_AND
:
912 struct logical_op
*insn
= (struct logical_op
*) pc
;
914 if (!vstack_ax(stack
)) {
915 ERR("Empty stack\n");
919 if (vstack_ax(stack
)->type
!= REG_S64
920 && vstack_ax(stack
)->type
!= REG_U64
921 && vstack_ax(stack
)->type
!= REG_UNKNOWN
) {
922 ERR("Logical comparator expects S64, U64 or dynamic register\n");
927 dbg_printf("Validate jumping to bytecode offset %u\n",
928 (unsigned int) insn
->skip_offset
);
929 if (unlikely(start_pc
+ insn
->skip_offset
<= pc
)) {
930 ERR("Loops are not allowed in bytecode\n");
938 case BYTECODE_OP_LOAD_FIELD_REF
:
940 ERR("Unknown field ref type\n");
944 case BYTECODE_OP_LOAD_FIELD_REF_STRING
:
945 case BYTECODE_OP_LOAD_FIELD_REF_SEQUENCE
:
947 struct load_op
*insn
= (struct load_op
*) pc
;
948 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
950 dbg_printf("Validate load field ref offset %u type string\n",
954 case BYTECODE_OP_LOAD_FIELD_REF_S64
:
956 struct load_op
*insn
= (struct load_op
*) pc
;
957 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
959 dbg_printf("Validate load field ref offset %u type s64\n",
963 case BYTECODE_OP_LOAD_FIELD_REF_DOUBLE
:
965 struct load_op
*insn
= (struct load_op
*) pc
;
966 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
968 dbg_printf("Validate load field ref offset %u type double\n",
973 /* load from immediate operand */
974 case BYTECODE_OP_LOAD_STRING
:
975 case BYTECODE_OP_LOAD_STAR_GLOB_STRING
:
980 case BYTECODE_OP_LOAD_S64
:
985 case BYTECODE_OP_LOAD_DOUBLE
:
990 case BYTECODE_OP_CAST_TO_S64
:
991 case BYTECODE_OP_CAST_DOUBLE_TO_S64
:
993 struct cast_op
*insn
= (struct cast_op
*) pc
;
995 if (!vstack_ax(stack
)) {
996 ERR("Empty stack\n");
1000 switch (vstack_ax(stack
)->type
) {
1002 ERR("unknown register type\n");
1007 case REG_STAR_GLOB_STRING
:
1008 ERR("Cast op can only be applied to numeric or floating point registers\n");
1020 if (insn
->op
== BYTECODE_OP_CAST_DOUBLE_TO_S64
) {
1021 if (vstack_ax(stack
)->type
!= REG_DOUBLE
) {
1022 ERR("Cast expects double\n");
1029 case BYTECODE_OP_CAST_NOP
:
1034 /* get context ref */
1035 case BYTECODE_OP_GET_CONTEXT_REF
:
1037 struct load_op
*insn
= (struct load_op
*) pc
;
1038 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
1040 dbg_printf("Validate get context ref offset %u type dynamic\n",
1044 case BYTECODE_OP_GET_CONTEXT_REF_STRING
:
1046 struct load_op
*insn
= (struct load_op
*) pc
;
1047 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
1049 dbg_printf("Validate get context ref offset %u type string\n",
1053 case BYTECODE_OP_GET_CONTEXT_REF_S64
:
1055 struct load_op
*insn
= (struct load_op
*) pc
;
1056 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
1058 dbg_printf("Validate get context ref offset %u type s64\n",
1062 case BYTECODE_OP_GET_CONTEXT_REF_DOUBLE
:
1064 struct load_op
*insn
= (struct load_op
*) pc
;
1065 struct field_ref
*ref
= (struct field_ref
*) insn
->data
;
1067 dbg_printf("Validate get context ref offset %u type double\n",
1073 * Instructions for recursive traversal through composed types.
1075 case BYTECODE_OP_GET_CONTEXT_ROOT
:
1077 dbg_printf("Validate get context root\n");
1080 case BYTECODE_OP_GET_APP_CONTEXT_ROOT
:
1082 dbg_printf("Validate get app context root\n");
1085 case BYTECODE_OP_GET_PAYLOAD_ROOT
:
1087 dbg_printf("Validate get payload root\n");
1090 case BYTECODE_OP_LOAD_FIELD
:
1093 * We tolerate that field type is unknown at validation,
1094 * because we are performing the load specialization in
1095 * a phase after validation.
1097 dbg_printf("Validate load field\n");
1102 * Disallow already specialized bytecode op load field instructions to
1103 * ensure that the received bytecode does not read a memory area larger
1104 * than the memory targeted by the instrumentation.
1106 case BYTECODE_OP_LOAD_FIELD_S8
:
1107 case BYTECODE_OP_LOAD_FIELD_S16
:
1108 case BYTECODE_OP_LOAD_FIELD_S32
:
1109 case BYTECODE_OP_LOAD_FIELD_S64
:
1110 case BYTECODE_OP_LOAD_FIELD_U8
:
1111 case BYTECODE_OP_LOAD_FIELD_U16
:
1112 case BYTECODE_OP_LOAD_FIELD_U32
:
1113 case BYTECODE_OP_LOAD_FIELD_U64
:
1114 case BYTECODE_OP_LOAD_FIELD_STRING
:
1115 case BYTECODE_OP_LOAD_FIELD_SEQUENCE
:
1116 case BYTECODE_OP_LOAD_FIELD_DOUBLE
:
1118 dbg_printf("Validate load field, reject specialized load instruction (%d)\n",
1124 case BYTECODE_OP_GET_SYMBOL
:
1126 struct load_op
*insn
= (struct load_op
*) pc
;
1127 struct get_symbol
*sym
= (struct get_symbol
*) insn
->data
;
1129 dbg_printf("Validate get symbol offset %u\n", sym
->offset
);
1133 case BYTECODE_OP_GET_SYMBOL_FIELD
:
1135 struct load_op
*insn
= (struct load_op
*) pc
;
1136 struct get_symbol
*sym
= (struct get_symbol
*) insn
->data
;
1138 dbg_printf("Validate get symbol field offset %u\n", sym
->offset
);
1142 case BYTECODE_OP_GET_INDEX_U16
:
1144 struct load_op
*insn
= (struct load_op
*) pc
;
1145 struct get_index_u16
*get_index
= (struct get_index_u16
*) insn
->data
;
1147 dbg_printf("Validate get index u16 index %u\n", get_index
->index
);
1151 case BYTECODE_OP_GET_INDEX_U64
:
1153 struct load_op
*insn
= (struct load_op
*) pc
;
1154 struct get_index_u64
*get_index
= (struct get_index_u64
*) insn
->data
;
1156 dbg_printf("Validate get index u64 index %" PRIu64
"\n", get_index
->index
);
1170 int validate_instruction_all_contexts(struct bytecode_runtime
*bytecode
,
1171 struct lttng_ust_lfht
*merge_points
,
1172 struct vstack
*stack
,
1177 unsigned long target_pc
= pc
- start_pc
;
1178 struct lttng_ust_lfht_iter iter
;
1179 struct lttng_ust_lfht_node
*node
;
1180 struct lfht_mp_node
*mp_node
;
1183 /* Validate the context resulting from the previous instruction */
1184 ret
= validate_instruction_context(bytecode
, stack
, start_pc
, pc
);
1188 /* Validate merge points */
1189 hash
= lttng_hash_mix((const char *) target_pc
, sizeof(target_pc
),
1191 lttng_ust_lfht_lookup(merge_points
, hash
, lttng_hash_match
,
1192 (const char *) target_pc
, &iter
);
1193 node
= lttng_ust_lfht_iter_get_node(&iter
);
1195 mp_node
= caa_container_of(node
, struct lfht_mp_node
, node
);
1197 dbg_printf("Bytecode: validate merge point at offset %lu\n",
1199 if (merge_points_compare(stack
, &mp_node
->stack
)) {
1200 ERR("Merge points differ for offset %lu\n",
1204 /* Once validated, we can remove the merge point */
1205 dbg_printf("Bytecode: remove merge point at offset %lu\n",
1207 ret
= lttng_ust_lfht_del(merge_points
, node
);
1214 * Validate load instructions: specialized instructions not accepted as input.
1217 * >0: going to next insn.
1218 * 0: success, stop iteration.
1222 int validate_load(char **_next_pc
,
1226 char *next_pc
= *_next_pc
;
1228 switch (*(bytecode_opcode_t
*) pc
) {
1229 case BYTECODE_OP_UNKNOWN
:
1232 ERR("Unknown bytecode op %u\n",
1233 (unsigned int) *(bytecode_opcode_t
*) pc
);
1238 case BYTECODE_OP_RETURN
:
1240 next_pc
+= sizeof(struct return_op
);
1244 case BYTECODE_OP_RETURN_S64
:
1246 next_pc
+= sizeof(struct return_op
);
1251 case BYTECODE_OP_MUL
:
1252 case BYTECODE_OP_DIV
:
1253 case BYTECODE_OP_MOD
:
1254 case BYTECODE_OP_PLUS
:
1255 case BYTECODE_OP_MINUS
:
1257 ERR("Unsupported bytecode op %u\n",
1258 (unsigned int) *(bytecode_opcode_t
*) pc
);
1263 case BYTECODE_OP_EQ
:
1264 case BYTECODE_OP_NE
:
1265 case BYTECODE_OP_GT
:
1266 case BYTECODE_OP_LT
:
1267 case BYTECODE_OP_GE
:
1268 case BYTECODE_OP_LE
:
1269 case BYTECODE_OP_EQ_STRING
:
1270 case BYTECODE_OP_NE_STRING
:
1271 case BYTECODE_OP_GT_STRING
:
1272 case BYTECODE_OP_LT_STRING
:
1273 case BYTECODE_OP_GE_STRING
:
1274 case BYTECODE_OP_LE_STRING
:
1275 case BYTECODE_OP_EQ_STAR_GLOB_STRING
:
1276 case BYTECODE_OP_NE_STAR_GLOB_STRING
:
1277 case BYTECODE_OP_EQ_S64
:
1278 case BYTECODE_OP_NE_S64
:
1279 case BYTECODE_OP_GT_S64
:
1280 case BYTECODE_OP_LT_S64
:
1281 case BYTECODE_OP_GE_S64
:
1282 case BYTECODE_OP_LE_S64
:
1283 case BYTECODE_OP_EQ_DOUBLE
:
1284 case BYTECODE_OP_NE_DOUBLE
:
1285 case BYTECODE_OP_GT_DOUBLE
:
1286 case BYTECODE_OP_LT_DOUBLE
:
1287 case BYTECODE_OP_GE_DOUBLE
:
1288 case BYTECODE_OP_LE_DOUBLE
:
1289 case BYTECODE_OP_EQ_DOUBLE_S64
:
1290 case BYTECODE_OP_NE_DOUBLE_S64
:
1291 case BYTECODE_OP_GT_DOUBLE_S64
:
1292 case BYTECODE_OP_LT_DOUBLE_S64
:
1293 case BYTECODE_OP_GE_DOUBLE_S64
:
1294 case BYTECODE_OP_LE_DOUBLE_S64
:
1295 case BYTECODE_OP_EQ_S64_DOUBLE
:
1296 case BYTECODE_OP_NE_S64_DOUBLE
:
1297 case BYTECODE_OP_GT_S64_DOUBLE
:
1298 case BYTECODE_OP_LT_S64_DOUBLE
:
1299 case BYTECODE_OP_GE_S64_DOUBLE
:
1300 case BYTECODE_OP_LE_S64_DOUBLE
:
1301 case BYTECODE_OP_BIT_RSHIFT
:
1302 case BYTECODE_OP_BIT_LSHIFT
:
1303 case BYTECODE_OP_BIT_AND
:
1304 case BYTECODE_OP_BIT_OR
:
1305 case BYTECODE_OP_BIT_XOR
:
1307 next_pc
+= sizeof(struct binary_op
);
1312 case BYTECODE_OP_UNARY_PLUS
:
1313 case BYTECODE_OP_UNARY_MINUS
:
1314 case BYTECODE_OP_UNARY_PLUS_S64
:
1315 case BYTECODE_OP_UNARY_MINUS_S64
:
1316 case BYTECODE_OP_UNARY_NOT_S64
:
1317 case BYTECODE_OP_UNARY_NOT
:
1318 case BYTECODE_OP_UNARY_BIT_NOT
:
1319 case BYTECODE_OP_UNARY_PLUS_DOUBLE
:
1320 case BYTECODE_OP_UNARY_MINUS_DOUBLE
:
1321 case BYTECODE_OP_UNARY_NOT_DOUBLE
:
1323 next_pc
+= sizeof(struct unary_op
);
1328 case BYTECODE_OP_AND
:
1329 case BYTECODE_OP_OR
:
1331 next_pc
+= sizeof(struct logical_op
);
1335 /* load field ref */
1336 case BYTECODE_OP_LOAD_FIELD_REF
:
1337 /* get context ref */
1338 case BYTECODE_OP_GET_CONTEXT_REF
:
1340 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
1343 case BYTECODE_OP_LOAD_FIELD_REF_STRING
:
1344 case BYTECODE_OP_LOAD_FIELD_REF_SEQUENCE
:
1345 case BYTECODE_OP_GET_CONTEXT_REF_STRING
:
1346 case BYTECODE_OP_LOAD_FIELD_REF_S64
:
1347 case BYTECODE_OP_GET_CONTEXT_REF_S64
:
1348 case BYTECODE_OP_LOAD_FIELD_REF_DOUBLE
:
1349 case BYTECODE_OP_GET_CONTEXT_REF_DOUBLE
:
1352 * Reject specialized load field ref instructions.
1358 /* load from immediate operand */
1359 case BYTECODE_OP_LOAD_STRING
:
1360 case BYTECODE_OP_LOAD_STAR_GLOB_STRING
:
1362 struct load_op
*insn
= (struct load_op
*) pc
;
1364 next_pc
+= sizeof(struct load_op
) + strlen(insn
->data
) + 1;
1368 case BYTECODE_OP_LOAD_S64
:
1370 next_pc
+= sizeof(struct load_op
) + sizeof(struct literal_numeric
);
1373 case BYTECODE_OP_LOAD_DOUBLE
:
1375 next_pc
+= sizeof(struct load_op
) + sizeof(struct literal_double
);
1379 case BYTECODE_OP_CAST_DOUBLE_TO_S64
:
1380 case BYTECODE_OP_CAST_TO_S64
:
1381 case BYTECODE_OP_CAST_NOP
:
1383 next_pc
+= sizeof(struct cast_op
);
1388 * Instructions for recursive traversal through composed types.
1390 case BYTECODE_OP_GET_CONTEXT_ROOT
:
1391 case BYTECODE_OP_GET_APP_CONTEXT_ROOT
:
1392 case BYTECODE_OP_GET_PAYLOAD_ROOT
:
1393 case BYTECODE_OP_LOAD_FIELD
:
1395 next_pc
+= sizeof(struct load_op
);
1399 case BYTECODE_OP_LOAD_FIELD_S8
:
1400 case BYTECODE_OP_LOAD_FIELD_S16
:
1401 case BYTECODE_OP_LOAD_FIELD_S32
:
1402 case BYTECODE_OP_LOAD_FIELD_S64
:
1403 case BYTECODE_OP_LOAD_FIELD_U8
:
1404 case BYTECODE_OP_LOAD_FIELD_U16
:
1405 case BYTECODE_OP_LOAD_FIELD_U32
:
1406 case BYTECODE_OP_LOAD_FIELD_U64
:
1407 case BYTECODE_OP_LOAD_FIELD_STRING
:
1408 case BYTECODE_OP_LOAD_FIELD_SEQUENCE
:
1409 case BYTECODE_OP_LOAD_FIELD_DOUBLE
:
1412 * Reject specialized load field instructions.
1418 case BYTECODE_OP_GET_SYMBOL
:
1419 case BYTECODE_OP_GET_SYMBOL_FIELD
:
1421 next_pc
+= sizeof(struct load_op
) + sizeof(struct get_symbol
);
1425 case BYTECODE_OP_GET_INDEX_U16
:
1427 next_pc
+= sizeof(struct load_op
) + sizeof(struct get_index_u16
);
1431 case BYTECODE_OP_GET_INDEX_U64
:
1433 next_pc
+= sizeof(struct load_op
) + sizeof(struct get_index_u64
);
1439 *_next_pc
= next_pc
;
1445 * >0: going to next insn.
1446 * 0: success, stop iteration.
1450 int exec_insn(struct bytecode_runtime
*bytecode
__attribute__((unused
)),
1451 struct lttng_ust_lfht
*merge_points
,
1452 struct vstack
*stack
,
1457 char *next_pc
= *_next_pc
;
1459 switch (*(bytecode_opcode_t
*) pc
) {
1460 case BYTECODE_OP_UNKNOWN
:
1463 ERR("unknown bytecode op %u\n",
1464 (unsigned int) *(bytecode_opcode_t
*) pc
);
1469 case BYTECODE_OP_RETURN
:
1471 if (!vstack_ax(stack
)) {
1472 ERR("Empty stack\n");
1476 switch (vstack_ax(stack
)->type
) {
1485 ERR("Unexpected register type %d at end of bytecode\n",
1486 (int) vstack_ax(stack
)->type
);
1494 case BYTECODE_OP_RETURN_S64
:
1496 if (!vstack_ax(stack
)) {
1497 ERR("Empty stack\n");
1501 switch (vstack_ax(stack
)->type
) {
1507 ERR("Unexpected register type %d at end of bytecode\n",
1508 (int) vstack_ax(stack
)->type
);
1518 case BYTECODE_OP_MUL
:
1519 case BYTECODE_OP_DIV
:
1520 case BYTECODE_OP_MOD
:
1521 case BYTECODE_OP_PLUS
:
1522 case BYTECODE_OP_MINUS
:
1524 ERR("unsupported bytecode op %u\n",
1525 (unsigned int) *(bytecode_opcode_t
*) pc
);
1530 case BYTECODE_OP_EQ
:
1531 case BYTECODE_OP_NE
:
1532 case BYTECODE_OP_GT
:
1533 case BYTECODE_OP_LT
:
1534 case BYTECODE_OP_GE
:
1535 case BYTECODE_OP_LE
:
1536 case BYTECODE_OP_EQ_STRING
:
1537 case BYTECODE_OP_NE_STRING
:
1538 case BYTECODE_OP_GT_STRING
:
1539 case BYTECODE_OP_LT_STRING
:
1540 case BYTECODE_OP_GE_STRING
:
1541 case BYTECODE_OP_LE_STRING
:
1542 case BYTECODE_OP_EQ_STAR_GLOB_STRING
:
1543 case BYTECODE_OP_NE_STAR_GLOB_STRING
:
1544 case BYTECODE_OP_EQ_S64
:
1545 case BYTECODE_OP_NE_S64
:
1546 case BYTECODE_OP_GT_S64
:
1547 case BYTECODE_OP_LT_S64
:
1548 case BYTECODE_OP_GE_S64
:
1549 case BYTECODE_OP_LE_S64
:
1550 case BYTECODE_OP_EQ_DOUBLE
:
1551 case BYTECODE_OP_NE_DOUBLE
:
1552 case BYTECODE_OP_GT_DOUBLE
:
1553 case BYTECODE_OP_LT_DOUBLE
:
1554 case BYTECODE_OP_GE_DOUBLE
:
1555 case BYTECODE_OP_LE_DOUBLE
:
1556 case BYTECODE_OP_EQ_DOUBLE_S64
:
1557 case BYTECODE_OP_NE_DOUBLE_S64
:
1558 case BYTECODE_OP_GT_DOUBLE_S64
:
1559 case BYTECODE_OP_LT_DOUBLE_S64
:
1560 case BYTECODE_OP_GE_DOUBLE_S64
:
1561 case BYTECODE_OP_LE_DOUBLE_S64
:
1562 case BYTECODE_OP_EQ_S64_DOUBLE
:
1563 case BYTECODE_OP_NE_S64_DOUBLE
:
1564 case BYTECODE_OP_GT_S64_DOUBLE
:
1565 case BYTECODE_OP_LT_S64_DOUBLE
:
1566 case BYTECODE_OP_GE_S64_DOUBLE
:
1567 case BYTECODE_OP_LE_S64_DOUBLE
:
1570 if (vstack_pop(stack
)) {
1574 if (!vstack_ax(stack
)) {
1575 ERR("Empty stack\n");
1579 switch (vstack_ax(stack
)->type
) {
1584 case REG_STAR_GLOB_STRING
:
1588 ERR("Unexpected register type %d for operation\n",
1589 (int) vstack_ax(stack
)->type
);
1594 vstack_ax(stack
)->type
= REG_S64
;
1595 next_pc
+= sizeof(struct binary_op
);
1599 case BYTECODE_OP_BIT_RSHIFT
:
1600 case BYTECODE_OP_BIT_LSHIFT
:
1601 case BYTECODE_OP_BIT_AND
:
1602 case BYTECODE_OP_BIT_OR
:
1603 case BYTECODE_OP_BIT_XOR
:
1606 if (vstack_pop(stack
)) {
1610 if (!vstack_ax(stack
)) {
1611 ERR("Empty stack\n");
1615 switch (vstack_ax(stack
)->type
) {
1620 case REG_STAR_GLOB_STRING
:
1624 ERR("Unexpected register type %d for operation\n",
1625 (int) vstack_ax(stack
)->type
);
1630 vstack_ax(stack
)->type
= REG_U64
;
1631 next_pc
+= sizeof(struct binary_op
);
1636 case BYTECODE_OP_UNARY_PLUS
:
1637 case BYTECODE_OP_UNARY_MINUS
:
1640 if (!vstack_ax(stack
)) {
1641 ERR("Empty stack\n");
1645 switch (vstack_ax(stack
)->type
) {
1652 ERR("Unexpected register type %d for operation\n",
1653 (int) vstack_ax(stack
)->type
);
1657 vstack_ax(stack
)->type
= REG_UNKNOWN
;
1658 next_pc
+= sizeof(struct unary_op
);
1662 case BYTECODE_OP_UNARY_PLUS_S64
:
1663 case BYTECODE_OP_UNARY_MINUS_S64
:
1664 case BYTECODE_OP_UNARY_NOT_S64
:
1667 if (!vstack_ax(stack
)) {
1668 ERR("Empty stack\n");
1672 switch (vstack_ax(stack
)->type
) {
1677 ERR("Unexpected register type %d for operation\n",
1678 (int) vstack_ax(stack
)->type
);
1683 next_pc
+= sizeof(struct unary_op
);
1687 case BYTECODE_OP_UNARY_NOT
:
1690 if (!vstack_ax(stack
)) {
1691 ERR("Empty stack\n");
1695 switch (vstack_ax(stack
)->type
) {
1702 ERR("Unexpected register type %d for operation\n",
1703 (int) vstack_ax(stack
)->type
);
1708 next_pc
+= sizeof(struct unary_op
);
1712 case BYTECODE_OP_UNARY_BIT_NOT
:
1715 if (!vstack_ax(stack
)) {
1716 ERR("Empty stack\n");
1720 switch (vstack_ax(stack
)->type
) {
1727 ERR("Unexpected register type %d for operation\n",
1728 (int) vstack_ax(stack
)->type
);
1733 vstack_ax(stack
)->type
= REG_U64
;
1734 next_pc
+= sizeof(struct unary_op
);
1738 case BYTECODE_OP_UNARY_NOT_DOUBLE
:
1741 if (!vstack_ax(stack
)) {
1742 ERR("Empty stack\n");
1746 switch (vstack_ax(stack
)->type
) {
1750 ERR("Incorrect register type %d for operation\n",
1751 (int) vstack_ax(stack
)->type
);
1756 vstack_ax(stack
)->type
= REG_S64
;
1757 next_pc
+= sizeof(struct unary_op
);
1761 case BYTECODE_OP_UNARY_PLUS_DOUBLE
:
1762 case BYTECODE_OP_UNARY_MINUS_DOUBLE
:
1765 if (!vstack_ax(stack
)) {
1766 ERR("Empty stack\n");
1770 switch (vstack_ax(stack
)->type
) {
1774 ERR("Incorrect register type %d for operation\n",
1775 (int) vstack_ax(stack
)->type
);
1780 vstack_ax(stack
)->type
= REG_DOUBLE
;
1781 next_pc
+= sizeof(struct unary_op
);
1786 case BYTECODE_OP_AND
:
1787 case BYTECODE_OP_OR
:
1789 struct logical_op
*insn
= (struct logical_op
*) pc
;
1792 /* Add merge point to table */
1793 merge_ret
= merge_point_add_check(merge_points
,
1794 insn
->skip_offset
, stack
);
1800 if (!vstack_ax(stack
)) {
1801 ERR("Empty stack\n");
1805 /* There is always a cast-to-s64 operation before a or/and op. */
1806 switch (vstack_ax(stack
)->type
) {
1811 ERR("Incorrect register type %d for operation\n",
1812 (int) vstack_ax(stack
)->type
);
1817 /* Continue to next instruction */
1818 /* Pop 1 when jump not taken */
1819 if (vstack_pop(stack
)) {
1823 next_pc
+= sizeof(struct logical_op
);
1827 /* load field ref */
1828 case BYTECODE_OP_LOAD_FIELD_REF
:
1830 ERR("Unknown field ref type\n");
1834 /* get context ref */
1835 case BYTECODE_OP_GET_CONTEXT_REF
:
1837 if (vstack_push(stack
)) {
1841 vstack_ax(stack
)->type
= REG_UNKNOWN
;
1842 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
1845 case BYTECODE_OP_LOAD_FIELD_REF_STRING
:
1846 case BYTECODE_OP_LOAD_FIELD_REF_SEQUENCE
:
1847 case BYTECODE_OP_GET_CONTEXT_REF_STRING
:
1849 if (vstack_push(stack
)) {
1853 vstack_ax(stack
)->type
= REG_STRING
;
1854 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
1857 case BYTECODE_OP_LOAD_FIELD_REF_S64
:
1858 case BYTECODE_OP_GET_CONTEXT_REF_S64
:
1860 if (vstack_push(stack
)) {
1864 vstack_ax(stack
)->type
= REG_S64
;
1865 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
1868 case BYTECODE_OP_LOAD_FIELD_REF_DOUBLE
:
1869 case BYTECODE_OP_GET_CONTEXT_REF_DOUBLE
:
1871 if (vstack_push(stack
)) {
1875 vstack_ax(stack
)->type
= REG_DOUBLE
;
1876 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
1880 /* load from immediate operand */
1881 case BYTECODE_OP_LOAD_STRING
:
1883 struct load_op
*insn
= (struct load_op
*) pc
;
1885 if (vstack_push(stack
)) {
1889 vstack_ax(stack
)->type
= REG_STRING
;
1890 next_pc
+= sizeof(struct load_op
) + strlen(insn
->data
) + 1;
1894 case BYTECODE_OP_LOAD_STAR_GLOB_STRING
:
1896 struct load_op
*insn
= (struct load_op
*) pc
;
1898 if (vstack_push(stack
)) {
1902 vstack_ax(stack
)->type
= REG_STAR_GLOB_STRING
;
1903 next_pc
+= sizeof(struct load_op
) + strlen(insn
->data
) + 1;
1907 case BYTECODE_OP_LOAD_S64
:
1909 if (vstack_push(stack
)) {
1913 vstack_ax(stack
)->type
= REG_S64
;
1914 next_pc
+= sizeof(struct load_op
)
1915 + sizeof(struct literal_numeric
);
1919 case BYTECODE_OP_LOAD_DOUBLE
:
1921 if (vstack_push(stack
)) {
1925 vstack_ax(stack
)->type
= REG_DOUBLE
;
1926 next_pc
+= sizeof(struct load_op
)
1927 + sizeof(struct literal_double
);
1931 case BYTECODE_OP_CAST_TO_S64
:
1932 case BYTECODE_OP_CAST_DOUBLE_TO_S64
:
1935 if (!vstack_ax(stack
)) {
1936 ERR("Empty stack\n");
1940 switch (vstack_ax(stack
)->type
) {
1947 ERR("Incorrect register type %d for cast\n",
1948 (int) vstack_ax(stack
)->type
);
1952 vstack_ax(stack
)->type
= REG_S64
;
1953 next_pc
+= sizeof(struct cast_op
);
1956 case BYTECODE_OP_CAST_NOP
:
1958 next_pc
+= sizeof(struct cast_op
);
1963 * Instructions for recursive traversal through composed types.
1965 case BYTECODE_OP_GET_CONTEXT_ROOT
:
1966 case BYTECODE_OP_GET_APP_CONTEXT_ROOT
:
1967 case BYTECODE_OP_GET_PAYLOAD_ROOT
:
1969 if (vstack_push(stack
)) {
1973 vstack_ax(stack
)->type
= REG_PTR
;
1974 next_pc
+= sizeof(struct load_op
);
1978 case BYTECODE_OP_LOAD_FIELD
:
1981 if (!vstack_ax(stack
)) {
1982 ERR("Empty stack\n");
1986 if (vstack_ax(stack
)->type
!= REG_PTR
) {
1987 ERR("Expecting pointer on top of stack\n");
1991 vstack_ax(stack
)->type
= REG_UNKNOWN
;
1992 next_pc
+= sizeof(struct load_op
);
1996 case BYTECODE_OP_LOAD_FIELD_S8
:
1997 case BYTECODE_OP_LOAD_FIELD_S16
:
1998 case BYTECODE_OP_LOAD_FIELD_S32
:
1999 case BYTECODE_OP_LOAD_FIELD_S64
:
2002 if (!vstack_ax(stack
)) {
2003 ERR("Empty stack\n");
2007 if (vstack_ax(stack
)->type
!= REG_PTR
) {
2008 ERR("Expecting pointer on top of stack\n");
2012 vstack_ax(stack
)->type
= REG_S64
;
2013 next_pc
+= sizeof(struct load_op
);
2017 case BYTECODE_OP_LOAD_FIELD_U8
:
2018 case BYTECODE_OP_LOAD_FIELD_U16
:
2019 case BYTECODE_OP_LOAD_FIELD_U32
:
2020 case BYTECODE_OP_LOAD_FIELD_U64
:
2023 if (!vstack_ax(stack
)) {
2024 ERR("Empty stack\n");
2028 if (vstack_ax(stack
)->type
!= REG_PTR
) {
2029 ERR("Expecting pointer on top of stack\n");
2033 vstack_ax(stack
)->type
= REG_U64
;
2034 next_pc
+= sizeof(struct load_op
);
2038 case BYTECODE_OP_LOAD_FIELD_STRING
:
2039 case BYTECODE_OP_LOAD_FIELD_SEQUENCE
:
2042 if (!vstack_ax(stack
)) {
2043 ERR("Empty stack\n");
2047 if (vstack_ax(stack
)->type
!= REG_PTR
) {
2048 ERR("Expecting pointer on top of stack\n");
2052 vstack_ax(stack
)->type
= REG_STRING
;
2053 next_pc
+= sizeof(struct load_op
);
2057 case BYTECODE_OP_LOAD_FIELD_DOUBLE
:
2060 if (!vstack_ax(stack
)) {
2061 ERR("Empty stack\n");
2065 if (vstack_ax(stack
)->type
!= REG_PTR
) {
2066 ERR("Expecting pointer on top of stack\n");
2070 vstack_ax(stack
)->type
= REG_DOUBLE
;
2071 next_pc
+= sizeof(struct load_op
);
2075 case BYTECODE_OP_GET_SYMBOL
:
2076 case BYTECODE_OP_GET_SYMBOL_FIELD
:
2079 if (!vstack_ax(stack
)) {
2080 ERR("Empty stack\n");
2084 if (vstack_ax(stack
)->type
!= REG_PTR
) {
2085 ERR("Expecting pointer on top of stack\n");
2089 next_pc
+= sizeof(struct load_op
) + sizeof(struct get_symbol
);
2093 case BYTECODE_OP_GET_INDEX_U16
:
2096 if (!vstack_ax(stack
)) {
2097 ERR("Empty stack\n");
2101 if (vstack_ax(stack
)->type
!= REG_PTR
) {
2102 ERR("Expecting pointer on top of stack\n");
2106 next_pc
+= sizeof(struct load_op
) + sizeof(struct get_index_u16
);
2110 case BYTECODE_OP_GET_INDEX_U64
:
2113 if (!vstack_ax(stack
)) {
2114 ERR("Empty stack\n");
2118 if (vstack_ax(stack
)->type
!= REG_PTR
) {
2119 ERR("Expecting pointer on top of stack\n");
2123 next_pc
+= sizeof(struct load_op
) + sizeof(struct get_index_u64
);
2129 *_next_pc
= next_pc
;
2133 int lttng_bytecode_validate_load(struct bytecode_runtime
*bytecode
)
2135 char *pc
, *next_pc
, *start_pc
;
2138 start_pc
= &bytecode
->code
[0];
2139 for (pc
= next_pc
= start_pc
; pc
- start_pc
< bytecode
->len
;
2141 ret
= bytecode_validate_overflow(bytecode
, start_pc
, pc
);
2144 ERR("Bytecode overflow\n");
2147 dbg_printf("Validating loads: op %s (%u)\n",
2148 lttng_bytecode_print_op((unsigned int) *(bytecode_opcode_t
*) pc
),
2149 (unsigned int) *(bytecode_opcode_t
*) pc
);
2151 ret
= validate_load(&next_pc
, pc
);
2160 * Never called concurrently (hash seed is shared).
2162 int lttng_bytecode_validate(struct bytecode_runtime
*bytecode
)
2164 struct lttng_ust_lfht
*merge_points
;
2165 char *pc
, *next_pc
, *start_pc
;
2167 struct vstack stack
;
2169 vstack_init(&stack
);
2171 if (!lttng_hash_seed_ready
) {
2172 lttng_hash_seed
= time(NULL
);
2173 lttng_hash_seed_ready
= 1;
2176 * Note: merge_points hash table used by single thread, and
2177 * never concurrently resized. Therefore, we can use it without
2178 * holding RCU read-side lock and free nodes without using
2181 merge_points
= lttng_ust_lfht_new(DEFAULT_NR_MERGE_POINTS
,
2182 MIN_NR_BUCKETS
, MAX_NR_BUCKETS
,
2184 if (!merge_points
) {
2185 ERR("Error allocating hash table for bytecode validation\n");
2188 start_pc
= &bytecode
->code
[0];
2189 for (pc
= next_pc
= start_pc
; pc
- start_pc
< bytecode
->len
;
2191 ret
= bytecode_validate_overflow(bytecode
, start_pc
, pc
);
2194 ERR("Bytecode overflow\n");
2197 dbg_printf("Validating op %s (%u)\n",
2198 lttng_bytecode_print_op((unsigned int) *(bytecode_opcode_t
*) pc
),
2199 (unsigned int) *(bytecode_opcode_t
*) pc
);
2202 * For each instruction, validate the current context
2203 * (traversal of entire execution flow), and validate
2204 * all merge points targeting this instruction.
2206 ret
= validate_instruction_all_contexts(bytecode
, merge_points
,
2207 &stack
, start_pc
, pc
);
2210 ret
= exec_insn(bytecode
, merge_points
, &stack
, &next_pc
, pc
);
2215 if (delete_all_nodes(merge_points
)) {
2217 ERR("Unexpected merge points\n");
2221 if (lttng_ust_lfht_destroy(merge_points
)) {
2222 ERR("Error destroying hash table\n");