d727270ae7bcd10bc89eddfbbdacf1ff887cb8ec
[lttng-ust.git] / src / lib / lttng-ust / lttng-bytecode-validator.c
1 /*
2 * SPDX-License-Identifier: MIT
3 *
4 * Copyright (C) 2010-2016 Mathieu Desnoyers <mathieu.desnoyers@efficios.com>
5 *
6 * LTTng UST bytecode validator.
7 */
8
9 #define _LGPL_SOURCE
10 #include <stddef.h>
11 #include <stdint.h>
12 #include <time.h>
13
14 #include "rculfhash.h"
15
16 #include "lttng-bytecode.h"
17 #include "common/hash.h"
18 #include "common/strutils.h"
19 #include "lib/lttng-ust/events.h"
20 #include "common/macros.h"
21
22 /*
23 * Number of merge points for hash table size. Hash table initialized to
24 * that size, and we do not resize, because we do not want to trigger
25 * RCU worker thread execution: fall-back on linear traversal if number
26 * of merge points exceeds this value.
27 */
28 #define DEFAULT_NR_MERGE_POINTS 128
29 #define MIN_NR_BUCKETS 128
30 #define MAX_NR_BUCKETS 128
31
32 /* merge point table node */
33 struct lfht_mp_node {
34 struct lttng_ust_lfht_node node;
35
36 /* Context at merge point */
37 struct vstack stack;
38 unsigned long target_pc;
39 };
40
41 static unsigned long lttng_hash_seed;
42 static unsigned int lttng_hash_seed_ready;
43
44 static
45 int lttng_hash_match(struct lttng_ust_lfht_node *node, const void *key)
46 {
47 struct lfht_mp_node *mp_node =
48 caa_container_of(node, struct lfht_mp_node, node);
49 unsigned long key_pc = (unsigned long) key;
50
51 if (mp_node->target_pc == key_pc)
52 return 1;
53 else
54 return 0;
55 }
56
57 static
58 int merge_points_compare(const struct vstack *stacka,
59 const struct vstack *stackb)
60 {
61 int i, len;
62
63 if (stacka->top != stackb->top)
64 return 1;
65 len = stacka->top + 1;
66 assert(len >= 0);
67 for (i = 0; i < len; i++) {
68 if (stacka->e[i].type != REG_UNKNOWN
69 && stackb->e[i].type != REG_UNKNOWN
70 && stacka->e[i].type != stackb->e[i].type)
71 return 1;
72 }
73 return 0;
74 }
75
76 static
77 int merge_point_add_check(struct lttng_ust_lfht *ht, unsigned long target_pc,
78 const struct vstack *stack)
79 {
80 struct lfht_mp_node *node;
81 unsigned long hash = lttng_hash_mix((const char *) target_pc,
82 sizeof(target_pc),
83 lttng_hash_seed);
84 struct lttng_ust_lfht_node *ret;
85
86 dbg_printf("Bytecode: adding merge point at offset %lu, hash %lu\n",
87 target_pc, hash);
88 node = zmalloc(sizeof(struct lfht_mp_node));
89 if (!node)
90 return -ENOMEM;
91 node->target_pc = target_pc;
92 memcpy(&node->stack, stack, sizeof(node->stack));
93 ret = lttng_ust_lfht_add_unique(ht, hash, lttng_hash_match,
94 (const char *) target_pc, &node->node);
95 if (ret != &node->node) {
96 struct lfht_mp_node *ret_mp =
97 caa_container_of(ret, struct lfht_mp_node, node);
98
99 /* Key already present */
100 dbg_printf("Bytecode: compare merge points for offset %lu, hash %lu\n",
101 target_pc, hash);
102 free(node);
103 if (merge_points_compare(stack, &ret_mp->stack)) {
104 ERR("Merge points differ for offset %lu\n",
105 target_pc);
106 return -EINVAL;
107 }
108 }
109 return 0;
110 }
111
112 /*
113 * Binary comparators use top of stack and top of stack -1.
114 * Return 0 if typing is known to match, 1 if typing is dynamic
115 * (unknown), negative error value on error.
116 */
117 static
118 int bin_op_compare_check(struct vstack *stack, bytecode_opcode_t opcode,
119 const char *str)
120 {
121 if (unlikely(!vstack_ax(stack) || !vstack_bx(stack)))
122 goto error_empty;
123
124 switch (vstack_ax(stack)->type) {
125 default:
126 goto error_type;
127
128 case REG_UNKNOWN:
129 goto unknown;
130 case REG_STRING:
131 switch (vstack_bx(stack)->type) {
132 default:
133 goto error_type;
134
135 case REG_UNKNOWN:
136 goto unknown;
137 case REG_STRING:
138 break;
139 case REG_STAR_GLOB_STRING:
140 if (opcode != BYTECODE_OP_EQ && opcode != BYTECODE_OP_NE) {
141 goto error_mismatch;
142 }
143 break;
144 case REG_S64:
145 case REG_U64:
146 case REG_DOUBLE:
147 goto error_mismatch;
148 }
149 break;
150 case REG_STAR_GLOB_STRING:
151 switch (vstack_bx(stack)->type) {
152 default:
153 goto error_type;
154
155 case REG_UNKNOWN:
156 goto unknown;
157 case REG_STRING:
158 if (opcode != BYTECODE_OP_EQ && opcode != BYTECODE_OP_NE) {
159 goto error_mismatch;
160 }
161 break;
162 case REG_STAR_GLOB_STRING:
163 case REG_S64:
164 case REG_U64:
165 case REG_DOUBLE:
166 goto error_mismatch;
167 }
168 break;
169 case REG_S64:
170 case REG_U64:
171 case REG_DOUBLE:
172 switch (vstack_bx(stack)->type) {
173 default:
174 goto error_type;
175
176 case REG_UNKNOWN:
177 goto unknown;
178 case REG_STRING:
179 case REG_STAR_GLOB_STRING:
180 goto error_mismatch;
181 case REG_S64:
182 case REG_U64:
183 case REG_DOUBLE:
184 break;
185 }
186 break;
187 }
188 return 0;
189
190 unknown:
191 return 1;
192
193 error_mismatch:
194 ERR("type mismatch for '%s' binary operator\n", str);
195 return -EINVAL;
196
197 error_empty:
198 ERR("empty stack for '%s' binary operator\n", str);
199 return -EINVAL;
200
201 error_type:
202 ERR("unknown type for '%s' binary operator\n", str);
203 return -EINVAL;
204 }
205
206 /*
207 * Binary bitwise operators use top of stack and top of stack -1.
208 * Return 0 if typing is known to match, 1 if typing is dynamic
209 * (unknown), negative error value on error.
210 */
211 static
212 int bin_op_bitwise_check(struct vstack *stack,
213 bytecode_opcode_t opcode __attribute__((unused)),
214 const char *str)
215 {
216 if (unlikely(!vstack_ax(stack) || !vstack_bx(stack)))
217 goto error_empty;
218
219 switch (vstack_ax(stack)->type) {
220 default:
221 goto error_type;
222
223 case REG_UNKNOWN:
224 goto unknown;
225 case REG_S64:
226 case REG_U64:
227 switch (vstack_bx(stack)->type) {
228 default:
229 goto error_type;
230
231 case REG_UNKNOWN:
232 goto unknown;
233 case REG_S64:
234 case REG_U64:
235 break;
236 }
237 break;
238 }
239 return 0;
240
241 unknown:
242 return 1;
243
244 error_empty:
245 ERR("empty stack for '%s' binary operator\n", str);
246 return -EINVAL;
247
248 error_type:
249 ERR("unknown type for '%s' binary operator\n", str);
250 return -EINVAL;
251 }
252
253 static
254 int validate_get_symbol(struct bytecode_runtime *bytecode,
255 const struct get_symbol *sym)
256 {
257 const char *str, *str_limit;
258 size_t len_limit;
259
260 if (sym->offset >= bytecode->p.bc->bc.len - bytecode->p.bc->bc.reloc_offset)
261 return -EINVAL;
262
263 str = bytecode->p.bc->bc.data + bytecode->p.bc->bc.reloc_offset + sym->offset;
264 str_limit = bytecode->p.bc->bc.data + bytecode->p.bc->bc.len;
265 len_limit = str_limit - str;
266 if (strnlen(str, len_limit) == len_limit)
267 return -EINVAL;
268 return 0;
269 }
270
271 /*
272 * Validate bytecode range overflow within the validation pass.
273 * Called for each instruction encountered.
274 */
275 static
276 int bytecode_validate_overflow(struct bytecode_runtime *bytecode,
277 char *start_pc, char *pc)
278 {
279 int ret = 0;
280
281 switch (*(bytecode_opcode_t *) pc) {
282 case BYTECODE_OP_UNKNOWN:
283 default:
284 {
285 ERR("unknown bytecode op %u\n",
286 (unsigned int) *(bytecode_opcode_t *) pc);
287 ret = -EINVAL;
288 break;
289 }
290
291 case BYTECODE_OP_RETURN:
292 case BYTECODE_OP_RETURN_S64:
293 {
294 if (unlikely(pc + sizeof(struct return_op)
295 > start_pc + bytecode->len)) {
296 ret = -ERANGE;
297 }
298 break;
299 }
300
301 /* binary */
302 case BYTECODE_OP_MUL:
303 case BYTECODE_OP_DIV:
304 case BYTECODE_OP_MOD:
305 case BYTECODE_OP_PLUS:
306 case BYTECODE_OP_MINUS:
307 {
308 ERR("unsupported bytecode op %u\n",
309 (unsigned int) *(bytecode_opcode_t *) pc);
310 ret = -EINVAL;
311 break;
312 }
313
314 case BYTECODE_OP_EQ:
315 case BYTECODE_OP_NE:
316 case BYTECODE_OP_GT:
317 case BYTECODE_OP_LT:
318 case BYTECODE_OP_GE:
319 case BYTECODE_OP_LE:
320 case BYTECODE_OP_EQ_STRING:
321 case BYTECODE_OP_NE_STRING:
322 case BYTECODE_OP_GT_STRING:
323 case BYTECODE_OP_LT_STRING:
324 case BYTECODE_OP_GE_STRING:
325 case BYTECODE_OP_LE_STRING:
326 case BYTECODE_OP_EQ_STAR_GLOB_STRING:
327 case BYTECODE_OP_NE_STAR_GLOB_STRING:
328 case BYTECODE_OP_EQ_S64:
329 case BYTECODE_OP_NE_S64:
330 case BYTECODE_OP_GT_S64:
331 case BYTECODE_OP_LT_S64:
332 case BYTECODE_OP_GE_S64:
333 case BYTECODE_OP_LE_S64:
334 case BYTECODE_OP_EQ_DOUBLE:
335 case BYTECODE_OP_NE_DOUBLE:
336 case BYTECODE_OP_GT_DOUBLE:
337 case BYTECODE_OP_LT_DOUBLE:
338 case BYTECODE_OP_GE_DOUBLE:
339 case BYTECODE_OP_LE_DOUBLE:
340 case BYTECODE_OP_EQ_DOUBLE_S64:
341 case BYTECODE_OP_NE_DOUBLE_S64:
342 case BYTECODE_OP_GT_DOUBLE_S64:
343 case BYTECODE_OP_LT_DOUBLE_S64:
344 case BYTECODE_OP_GE_DOUBLE_S64:
345 case BYTECODE_OP_LE_DOUBLE_S64:
346 case BYTECODE_OP_EQ_S64_DOUBLE:
347 case BYTECODE_OP_NE_S64_DOUBLE:
348 case BYTECODE_OP_GT_S64_DOUBLE:
349 case BYTECODE_OP_LT_S64_DOUBLE:
350 case BYTECODE_OP_GE_S64_DOUBLE:
351 case BYTECODE_OP_LE_S64_DOUBLE:
352 case BYTECODE_OP_BIT_RSHIFT:
353 case BYTECODE_OP_BIT_LSHIFT:
354 case BYTECODE_OP_BIT_AND:
355 case BYTECODE_OP_BIT_OR:
356 case BYTECODE_OP_BIT_XOR:
357 {
358 if (unlikely(pc + sizeof(struct binary_op)
359 > start_pc + bytecode->len)) {
360 ret = -ERANGE;
361 }
362 break;
363 }
364
365 /* unary */
366 case BYTECODE_OP_UNARY_PLUS:
367 case BYTECODE_OP_UNARY_MINUS:
368 case BYTECODE_OP_UNARY_NOT:
369 case BYTECODE_OP_UNARY_PLUS_S64:
370 case BYTECODE_OP_UNARY_MINUS_S64:
371 case BYTECODE_OP_UNARY_NOT_S64:
372 case BYTECODE_OP_UNARY_PLUS_DOUBLE:
373 case BYTECODE_OP_UNARY_MINUS_DOUBLE:
374 case BYTECODE_OP_UNARY_NOT_DOUBLE:
375 case BYTECODE_OP_UNARY_BIT_NOT:
376 {
377 if (unlikely(pc + sizeof(struct unary_op)
378 > start_pc + bytecode->len)) {
379 ret = -ERANGE;
380 }
381 break;
382 }
383
384 /* logical */
385 case BYTECODE_OP_AND:
386 case BYTECODE_OP_OR:
387 {
388 if (unlikely(pc + sizeof(struct logical_op)
389 > start_pc + bytecode->len)) {
390 ret = -ERANGE;
391 }
392 break;
393 }
394
395 /* load field ref */
396 case BYTECODE_OP_LOAD_FIELD_REF:
397 {
398 ERR("Unknown field ref type\n");
399 ret = -EINVAL;
400 break;
401 }
402
403 /* get context ref */
404 case BYTECODE_OP_GET_CONTEXT_REF:
405 case BYTECODE_OP_LOAD_FIELD_REF_STRING:
406 case BYTECODE_OP_LOAD_FIELD_REF_SEQUENCE:
407 case BYTECODE_OP_LOAD_FIELD_REF_S64:
408 case BYTECODE_OP_LOAD_FIELD_REF_DOUBLE:
409 case BYTECODE_OP_GET_CONTEXT_REF_STRING:
410 case BYTECODE_OP_GET_CONTEXT_REF_S64:
411 case BYTECODE_OP_GET_CONTEXT_REF_DOUBLE:
412 {
413 if (unlikely(pc + sizeof(struct load_op) + sizeof(struct field_ref)
414 > start_pc + bytecode->len)) {
415 ret = -ERANGE;
416 }
417 break;
418 }
419
420 /* load from immediate operand */
421 case BYTECODE_OP_LOAD_STRING:
422 case BYTECODE_OP_LOAD_STAR_GLOB_STRING:
423 {
424 struct load_op *insn = (struct load_op *) pc;
425 uint32_t str_len, maxlen;
426
427 if (unlikely(pc + sizeof(struct load_op)
428 > start_pc + bytecode->len)) {
429 ret = -ERANGE;
430 break;
431 }
432
433 maxlen = start_pc + bytecode->len - pc - sizeof(struct load_op);
434 str_len = strnlen(insn->data, maxlen);
435 if (unlikely(str_len >= maxlen)) {
436 /* Final '\0' not found within range */
437 ret = -ERANGE;
438 }
439 break;
440 }
441
442 case BYTECODE_OP_LOAD_S64:
443 {
444 if (unlikely(pc + sizeof(struct load_op) + sizeof(struct literal_numeric)
445 > start_pc + bytecode->len)) {
446 ret = -ERANGE;
447 }
448 break;
449 }
450
451 case BYTECODE_OP_LOAD_DOUBLE:
452 {
453 if (unlikely(pc + sizeof(struct load_op) + sizeof(struct literal_double)
454 > start_pc + bytecode->len)) {
455 ret = -ERANGE;
456 }
457 break;
458 }
459
460 case BYTECODE_OP_CAST_TO_S64:
461 case BYTECODE_OP_CAST_DOUBLE_TO_S64:
462 case BYTECODE_OP_CAST_NOP:
463 {
464 if (unlikely(pc + sizeof(struct cast_op)
465 > start_pc + bytecode->len)) {
466 ret = -ERANGE;
467 }
468 break;
469 }
470
471 /*
472 * Instructions for recursive traversal through composed types.
473 */
474 case BYTECODE_OP_GET_CONTEXT_ROOT:
475 case BYTECODE_OP_GET_APP_CONTEXT_ROOT:
476 case BYTECODE_OP_GET_PAYLOAD_ROOT:
477 case BYTECODE_OP_LOAD_FIELD:
478 case BYTECODE_OP_LOAD_FIELD_S8:
479 case BYTECODE_OP_LOAD_FIELD_S16:
480 case BYTECODE_OP_LOAD_FIELD_S32:
481 case BYTECODE_OP_LOAD_FIELD_S64:
482 case BYTECODE_OP_LOAD_FIELD_U8:
483 case BYTECODE_OP_LOAD_FIELD_U16:
484 case BYTECODE_OP_LOAD_FIELD_U32:
485 case BYTECODE_OP_LOAD_FIELD_U64:
486 case BYTECODE_OP_LOAD_FIELD_STRING:
487 case BYTECODE_OP_LOAD_FIELD_SEQUENCE:
488 case BYTECODE_OP_LOAD_FIELD_DOUBLE:
489 if (unlikely(pc + sizeof(struct load_op)
490 > start_pc + bytecode->len)) {
491 ret = -ERANGE;
492 }
493 break;
494
495 case BYTECODE_OP_GET_SYMBOL:
496 {
497 struct load_op *insn = (struct load_op *) pc;
498 struct get_symbol *sym = (struct get_symbol *) insn->data;
499
500 if (unlikely(pc + sizeof(struct load_op) + sizeof(struct get_symbol)
501 > start_pc + bytecode->len)) {
502 ret = -ERANGE;
503 break;
504 }
505 ret = validate_get_symbol(bytecode, sym);
506 break;
507 }
508
509 case BYTECODE_OP_GET_SYMBOL_FIELD:
510 ERR("Unexpected get symbol field");
511 ret = -EINVAL;
512 break;
513
514 case BYTECODE_OP_GET_INDEX_U16:
515 if (unlikely(pc + sizeof(struct load_op) + sizeof(struct get_index_u16)
516 > start_pc + bytecode->len)) {
517 ret = -ERANGE;
518 }
519 break;
520
521 case BYTECODE_OP_GET_INDEX_U64:
522 if (unlikely(pc + sizeof(struct load_op) + sizeof(struct get_index_u64)
523 > start_pc + bytecode->len)) {
524 ret = -ERANGE;
525 }
526 break;
527 }
528
529 return ret;
530 }
531
532 static
533 unsigned long delete_all_nodes(struct lttng_ust_lfht *ht)
534 {
535 struct lttng_ust_lfht_iter iter;
536 struct lfht_mp_node *node;
537 unsigned long nr_nodes = 0;
538
539 lttng_ust_lfht_for_each_entry(ht, &iter, node, node) {
540 int ret;
541
542 ret = lttng_ust_lfht_del(ht, lttng_ust_lfht_iter_get_node(&iter));
543 assert(!ret);
544 /* note: this hash table is never used concurrently */
545 free(node);
546 nr_nodes++;
547 }
548 return nr_nodes;
549 }
550
551 /*
552 * Return value:
553 * >=0: success
554 * <0: error
555 */
556 static
557 int validate_instruction_context(
558 struct bytecode_runtime *bytecode __attribute__((unused)),
559 struct vstack *stack,
560 char *start_pc,
561 char *pc)
562 {
563 int ret = 0;
564 const bytecode_opcode_t opcode = *(bytecode_opcode_t *) pc;
565
566 switch (opcode) {
567 case BYTECODE_OP_UNKNOWN:
568 default:
569 {
570 ERR("unknown bytecode op %u\n",
571 (unsigned int) *(bytecode_opcode_t *) pc);
572 ret = -EINVAL;
573 goto end;
574 }
575
576 case BYTECODE_OP_RETURN:
577 case BYTECODE_OP_RETURN_S64:
578 {
579 goto end;
580 }
581
582 /* binary */
583 case BYTECODE_OP_MUL:
584 case BYTECODE_OP_DIV:
585 case BYTECODE_OP_MOD:
586 case BYTECODE_OP_PLUS:
587 case BYTECODE_OP_MINUS:
588 {
589 ERR("unsupported bytecode op %u\n",
590 (unsigned int) opcode);
591 ret = -EINVAL;
592 goto end;
593 }
594
595 case BYTECODE_OP_EQ:
596 {
597 ret = bin_op_compare_check(stack, opcode, "==");
598 if (ret < 0)
599 goto end;
600 break;
601 }
602 case BYTECODE_OP_NE:
603 {
604 ret = bin_op_compare_check(stack, opcode, "!=");
605 if (ret < 0)
606 goto end;
607 break;
608 }
609 case BYTECODE_OP_GT:
610 {
611 ret = bin_op_compare_check(stack, opcode, ">");
612 if (ret < 0)
613 goto end;
614 break;
615 }
616 case BYTECODE_OP_LT:
617 {
618 ret = bin_op_compare_check(stack, opcode, "<");
619 if (ret < 0)
620 goto end;
621 break;
622 }
623 case BYTECODE_OP_GE:
624 {
625 ret = bin_op_compare_check(stack, opcode, ">=");
626 if (ret < 0)
627 goto end;
628 break;
629 }
630 case BYTECODE_OP_LE:
631 {
632 ret = bin_op_compare_check(stack, opcode, "<=");
633 if (ret < 0)
634 goto end;
635 break;
636 }
637
638 case BYTECODE_OP_EQ_STRING:
639 case BYTECODE_OP_NE_STRING:
640 case BYTECODE_OP_GT_STRING:
641 case BYTECODE_OP_LT_STRING:
642 case BYTECODE_OP_GE_STRING:
643 case BYTECODE_OP_LE_STRING:
644 {
645 if (!vstack_ax(stack) || !vstack_bx(stack)) {
646 ERR("Empty stack\n");
647 ret = -EINVAL;
648 goto end;
649 }
650 if (vstack_ax(stack)->type != REG_STRING
651 || vstack_bx(stack)->type != REG_STRING) {
652 ERR("Unexpected register type for string comparator\n");
653 ret = -EINVAL;
654 goto end;
655 }
656 break;
657 }
658
659 case BYTECODE_OP_EQ_STAR_GLOB_STRING:
660 case BYTECODE_OP_NE_STAR_GLOB_STRING:
661 {
662 if (!vstack_ax(stack) || !vstack_bx(stack)) {
663 ERR("Empty stack\n");
664 ret = -EINVAL;
665 goto end;
666 }
667 if (vstack_ax(stack)->type != REG_STAR_GLOB_STRING
668 && vstack_bx(stack)->type != REG_STAR_GLOB_STRING) {
669 ERR("Unexpected register type for globbing pattern comparator\n");
670 ret = -EINVAL;
671 goto end;
672 }
673 break;
674 }
675
676 case BYTECODE_OP_EQ_S64:
677 case BYTECODE_OP_NE_S64:
678 case BYTECODE_OP_GT_S64:
679 case BYTECODE_OP_LT_S64:
680 case BYTECODE_OP_GE_S64:
681 case BYTECODE_OP_LE_S64:
682 {
683 if (!vstack_ax(stack) || !vstack_bx(stack)) {
684 ERR("Empty stack\n");
685 ret = -EINVAL;
686 goto end;
687 }
688 switch (vstack_ax(stack)->type) {
689 case REG_S64:
690 case REG_U64:
691 break;
692 default:
693 ERR("Unexpected register type for s64 comparator\n");
694 ret = -EINVAL;
695 goto end;
696 }
697 switch (vstack_bx(stack)->type) {
698 case REG_S64:
699 case REG_U64:
700 break;
701 default:
702 ERR("Unexpected register type for s64 comparator\n");
703 ret = -EINVAL;
704 goto end;
705 }
706 break;
707 }
708
709 case BYTECODE_OP_EQ_DOUBLE:
710 case BYTECODE_OP_NE_DOUBLE:
711 case BYTECODE_OP_GT_DOUBLE:
712 case BYTECODE_OP_LT_DOUBLE:
713 case BYTECODE_OP_GE_DOUBLE:
714 case BYTECODE_OP_LE_DOUBLE:
715 {
716 if (!vstack_ax(stack) || !vstack_bx(stack)) {
717 ERR("Empty stack\n");
718 ret = -EINVAL;
719 goto end;
720 }
721 if (vstack_ax(stack)->type != REG_DOUBLE && vstack_bx(stack)->type != REG_DOUBLE) {
722 ERR("Double operator should have two double registers\n");
723 ret = -EINVAL;
724 goto end;
725 }
726 break;
727 }
728
729 case BYTECODE_OP_EQ_DOUBLE_S64:
730 case BYTECODE_OP_NE_DOUBLE_S64:
731 case BYTECODE_OP_GT_DOUBLE_S64:
732 case BYTECODE_OP_LT_DOUBLE_S64:
733 case BYTECODE_OP_GE_DOUBLE_S64:
734 case BYTECODE_OP_LE_DOUBLE_S64:
735 {
736 if (!vstack_ax(stack) || !vstack_bx(stack)) {
737 ERR("Empty stack\n");
738 ret = -EINVAL;
739 goto end;
740 }
741 switch (vstack_ax(stack)->type) {
742 case REG_S64:
743 case REG_U64:
744 break;
745 default:
746 ERR("Double-S64 operator has unexpected register types\n");
747 ret = -EINVAL;
748 goto end;
749 }
750 switch (vstack_bx(stack)->type) {
751 case REG_DOUBLE:
752 break;
753 default:
754 ERR("Double-S64 operator has unexpected register types\n");
755 ret = -EINVAL;
756 goto end;
757 }
758 break;
759 }
760
761 case BYTECODE_OP_EQ_S64_DOUBLE:
762 case BYTECODE_OP_NE_S64_DOUBLE:
763 case BYTECODE_OP_GT_S64_DOUBLE:
764 case BYTECODE_OP_LT_S64_DOUBLE:
765 case BYTECODE_OP_GE_S64_DOUBLE:
766 case BYTECODE_OP_LE_S64_DOUBLE:
767 {
768 if (!vstack_ax(stack) || !vstack_bx(stack)) {
769 ERR("Empty stack\n");
770 ret = -EINVAL;
771 goto end;
772 }
773 switch (vstack_ax(stack)->type) {
774 case REG_DOUBLE:
775 break;
776 default:
777 ERR("S64-Double operator has unexpected register types\n");
778 ret = -EINVAL;
779 goto end;
780 }
781 switch (vstack_bx(stack)->type) {
782 case REG_S64:
783 case REG_U64:
784 break;
785 default:
786 ERR("S64-Double operator has unexpected register types\n");
787 ret = -EINVAL;
788 goto end;
789 }
790 break;
791 }
792
793 case BYTECODE_OP_BIT_RSHIFT:
794 ret = bin_op_bitwise_check(stack, opcode, ">>");
795 if (ret < 0)
796 goto end;
797 break;
798 case BYTECODE_OP_BIT_LSHIFT:
799 ret = bin_op_bitwise_check(stack, opcode, "<<");
800 if (ret < 0)
801 goto end;
802 break;
803 case BYTECODE_OP_BIT_AND:
804 ret = bin_op_bitwise_check(stack, opcode, "&");
805 if (ret < 0)
806 goto end;
807 break;
808 case BYTECODE_OP_BIT_OR:
809 ret = bin_op_bitwise_check(stack, opcode, "|");
810 if (ret < 0)
811 goto end;
812 break;
813 case BYTECODE_OP_BIT_XOR:
814 ret = bin_op_bitwise_check(stack, opcode, "^");
815 if (ret < 0)
816 goto end;
817 break;
818
819 /* unary */
820 case BYTECODE_OP_UNARY_PLUS:
821 case BYTECODE_OP_UNARY_MINUS:
822 case BYTECODE_OP_UNARY_NOT:
823 {
824 if (!vstack_ax(stack)) {
825 ERR("Empty stack\n");
826 ret = -EINVAL;
827 goto end;
828 }
829 switch (vstack_ax(stack)->type) {
830 default:
831 ERR("unknown register type\n");
832 ret = -EINVAL;
833 goto end;
834
835 case REG_STRING:
836 case REG_STAR_GLOB_STRING:
837 ERR("Unary op can only be applied to numeric or floating point registers\n");
838 ret = -EINVAL;
839 goto end;
840 case REG_S64:
841 break;
842 case REG_U64:
843 break;
844 case REG_DOUBLE:
845 break;
846 case REG_UNKNOWN:
847 break;
848 }
849 break;
850 }
851 case BYTECODE_OP_UNARY_BIT_NOT:
852 {
853 if (!vstack_ax(stack)) {
854 ERR("Empty stack\n");
855 ret = -EINVAL;
856 goto end;
857 }
858 switch (vstack_ax(stack)->type) {
859 default:
860 ERR("unknown register type\n");
861 ret = -EINVAL;
862 goto end;
863
864 case REG_STRING:
865 case REG_STAR_GLOB_STRING:
866 case REG_DOUBLE:
867 ERR("Unary bitwise op can only be applied to numeric registers\n");
868 ret = -EINVAL;
869 goto end;
870 case REG_S64:
871 break;
872 case REG_U64:
873 break;
874 case REG_UNKNOWN:
875 break;
876 }
877 break;
878 }
879
880 case BYTECODE_OP_UNARY_PLUS_S64:
881 case BYTECODE_OP_UNARY_MINUS_S64:
882 case BYTECODE_OP_UNARY_NOT_S64:
883 {
884 if (!vstack_ax(stack)) {
885 ERR("Empty stack\n");
886 ret = -EINVAL;
887 goto end;
888 }
889 if (vstack_ax(stack)->type != REG_S64 &&
890 vstack_ax(stack)->type != REG_U64) {
891 ERR("Invalid register type\n");
892 ret = -EINVAL;
893 goto end;
894 }
895 break;
896 }
897
898 case BYTECODE_OP_UNARY_PLUS_DOUBLE:
899 case BYTECODE_OP_UNARY_MINUS_DOUBLE:
900 case BYTECODE_OP_UNARY_NOT_DOUBLE:
901 {
902 if (!vstack_ax(stack)) {
903 ERR("Empty stack\n");
904 ret = -EINVAL;
905 goto end;
906 }
907 if (vstack_ax(stack)->type != REG_DOUBLE) {
908 ERR("Invalid register type\n");
909 ret = -EINVAL;
910 goto end;
911 }
912 break;
913 }
914
915 /* logical */
916 case BYTECODE_OP_AND:
917 case BYTECODE_OP_OR:
918 {
919 struct logical_op *insn = (struct logical_op *) pc;
920
921 if (!vstack_ax(stack)) {
922 ERR("Empty stack\n");
923 ret = -EINVAL;
924 goto end;
925 }
926 if (vstack_ax(stack)->type != REG_S64
927 && vstack_ax(stack)->type != REG_U64
928 && vstack_ax(stack)->type != REG_UNKNOWN) {
929 ERR("Logical comparator expects S64, U64 or dynamic register\n");
930 ret = -EINVAL;
931 goto end;
932 }
933
934 dbg_printf("Validate jumping to bytecode offset %u\n",
935 (unsigned int) insn->skip_offset);
936 if (unlikely(start_pc + insn->skip_offset <= pc)) {
937 ERR("Loops are not allowed in bytecode\n");
938 ret = -EINVAL;
939 goto end;
940 }
941 break;
942 }
943
944 /* load field ref */
945 case BYTECODE_OP_LOAD_FIELD_REF:
946 {
947 ERR("Unknown field ref type\n");
948 ret = -EINVAL;
949 goto end;
950 }
951 case BYTECODE_OP_LOAD_FIELD_REF_STRING:
952 case BYTECODE_OP_LOAD_FIELD_REF_SEQUENCE:
953 {
954 struct load_op *insn = (struct load_op *) pc;
955 struct field_ref *ref = (struct field_ref *) insn->data;
956
957 dbg_printf("Validate load field ref offset %u type string\n",
958 ref->offset);
959 break;
960 }
961 case BYTECODE_OP_LOAD_FIELD_REF_S64:
962 {
963 struct load_op *insn = (struct load_op *) pc;
964 struct field_ref *ref = (struct field_ref *) insn->data;
965
966 dbg_printf("Validate load field ref offset %u type s64\n",
967 ref->offset);
968 break;
969 }
970 case BYTECODE_OP_LOAD_FIELD_REF_DOUBLE:
971 {
972 struct load_op *insn = (struct load_op *) pc;
973 struct field_ref *ref = (struct field_ref *) insn->data;
974
975 dbg_printf("Validate load field ref offset %u type double\n",
976 ref->offset);
977 break;
978 }
979
980 /* load from immediate operand */
981 case BYTECODE_OP_LOAD_STRING:
982 case BYTECODE_OP_LOAD_STAR_GLOB_STRING:
983 {
984 break;
985 }
986
987 case BYTECODE_OP_LOAD_S64:
988 {
989 break;
990 }
991
992 case BYTECODE_OP_LOAD_DOUBLE:
993 {
994 break;
995 }
996
997 case BYTECODE_OP_CAST_TO_S64:
998 case BYTECODE_OP_CAST_DOUBLE_TO_S64:
999 {
1000 struct cast_op *insn = (struct cast_op *) pc;
1001
1002 if (!vstack_ax(stack)) {
1003 ERR("Empty stack\n");
1004 ret = -EINVAL;
1005 goto end;
1006 }
1007 switch (vstack_ax(stack)->type) {
1008 default:
1009 ERR("unknown register type\n");
1010 ret = -EINVAL;
1011 goto end;
1012
1013 case REG_STRING:
1014 case REG_STAR_GLOB_STRING:
1015 ERR("Cast op can only be applied to numeric or floating point registers\n");
1016 ret = -EINVAL;
1017 goto end;
1018 case REG_S64:
1019 break;
1020 case REG_U64:
1021 break;
1022 case REG_DOUBLE:
1023 break;
1024 case REG_UNKNOWN:
1025 break;
1026 }
1027 if (insn->op == BYTECODE_OP_CAST_DOUBLE_TO_S64) {
1028 if (vstack_ax(stack)->type != REG_DOUBLE) {
1029 ERR("Cast expects double\n");
1030 ret = -EINVAL;
1031 goto end;
1032 }
1033 }
1034 break;
1035 }
1036 case BYTECODE_OP_CAST_NOP:
1037 {
1038 break;
1039 }
1040
1041 /* get context ref */
1042 case BYTECODE_OP_GET_CONTEXT_REF:
1043 {
1044 struct load_op *insn = (struct load_op *) pc;
1045 struct field_ref *ref = (struct field_ref *) insn->data;
1046
1047 dbg_printf("Validate get context ref offset %u type dynamic\n",
1048 ref->offset);
1049 break;
1050 }
1051 case BYTECODE_OP_GET_CONTEXT_REF_STRING:
1052 {
1053 struct load_op *insn = (struct load_op *) pc;
1054 struct field_ref *ref = (struct field_ref *) insn->data;
1055
1056 dbg_printf("Validate get context ref offset %u type string\n",
1057 ref->offset);
1058 break;
1059 }
1060 case BYTECODE_OP_GET_CONTEXT_REF_S64:
1061 {
1062 struct load_op *insn = (struct load_op *) pc;
1063 struct field_ref *ref = (struct field_ref *) insn->data;
1064
1065 dbg_printf("Validate get context ref offset %u type s64\n",
1066 ref->offset);
1067 break;
1068 }
1069 case BYTECODE_OP_GET_CONTEXT_REF_DOUBLE:
1070 {
1071 struct load_op *insn = (struct load_op *) pc;
1072 struct field_ref *ref = (struct field_ref *) insn->data;
1073
1074 dbg_printf("Validate get context ref offset %u type double\n",
1075 ref->offset);
1076 break;
1077 }
1078
1079 /*
1080 * Instructions for recursive traversal through composed types.
1081 */
1082 case BYTECODE_OP_GET_CONTEXT_ROOT:
1083 {
1084 dbg_printf("Validate get context root\n");
1085 break;
1086 }
1087 case BYTECODE_OP_GET_APP_CONTEXT_ROOT:
1088 {
1089 dbg_printf("Validate get app context root\n");
1090 break;
1091 }
1092 case BYTECODE_OP_GET_PAYLOAD_ROOT:
1093 {
1094 dbg_printf("Validate get payload root\n");
1095 break;
1096 }
1097 case BYTECODE_OP_LOAD_FIELD:
1098 {
1099 /*
1100 * We tolerate that field type is unknown at validation,
1101 * because we are performing the load specialization in
1102 * a phase after validation.
1103 */
1104 dbg_printf("Validate load field\n");
1105 break;
1106 }
1107
1108 /*
1109 * Disallow already specialized bytecode op load field instructions to
1110 * ensure that the received bytecode does not read a memory area larger
1111 * than the memory targeted by the instrumentation.
1112 */
1113 case BYTECODE_OP_LOAD_FIELD_S8:
1114 case BYTECODE_OP_LOAD_FIELD_S16:
1115 case BYTECODE_OP_LOAD_FIELD_S32:
1116 case BYTECODE_OP_LOAD_FIELD_S64:
1117 case BYTECODE_OP_LOAD_FIELD_U8:
1118 case BYTECODE_OP_LOAD_FIELD_U16:
1119 case BYTECODE_OP_LOAD_FIELD_U32:
1120 case BYTECODE_OP_LOAD_FIELD_U64:
1121 case BYTECODE_OP_LOAD_FIELD_STRING:
1122 case BYTECODE_OP_LOAD_FIELD_SEQUENCE:
1123 case BYTECODE_OP_LOAD_FIELD_DOUBLE:
1124 {
1125 dbg_printf("Validate load field, reject specialized load instruction (%d)\n",
1126 (int) opcode);
1127 ret = -EINVAL;
1128 goto end;
1129 }
1130
1131 case BYTECODE_OP_GET_SYMBOL:
1132 {
1133 struct load_op *insn = (struct load_op *) pc;
1134 struct get_symbol *sym = (struct get_symbol *) insn->data;
1135
1136 dbg_printf("Validate get symbol offset %u\n", sym->offset);
1137 break;
1138 }
1139
1140 case BYTECODE_OP_GET_SYMBOL_FIELD:
1141 {
1142 struct load_op *insn = (struct load_op *) pc;
1143 struct get_symbol *sym = (struct get_symbol *) insn->data;
1144
1145 dbg_printf("Validate get symbol field offset %u\n", sym->offset);
1146 break;
1147 }
1148
1149 case BYTECODE_OP_GET_INDEX_U16:
1150 {
1151 struct load_op *insn = (struct load_op *) pc;
1152 struct get_index_u16 *get_index = (struct get_index_u16 *) insn->data;
1153
1154 dbg_printf("Validate get index u16 index %u\n", get_index->index);
1155 break;
1156 }
1157
1158 case BYTECODE_OP_GET_INDEX_U64:
1159 {
1160 struct load_op *insn = (struct load_op *) pc;
1161 struct get_index_u64 *get_index = (struct get_index_u64 *) insn->data;
1162
1163 dbg_printf("Validate get index u64 index %" PRIu64 "\n", get_index->index);
1164 break;
1165 }
1166 }
1167 end:
1168 return ret;
1169 }
1170
1171 /*
1172 * Return value:
1173 * 0: success
1174 * <0: error
1175 */
1176 static
1177 int validate_instruction_all_contexts(struct bytecode_runtime *bytecode,
1178 struct lttng_ust_lfht *merge_points,
1179 struct vstack *stack,
1180 char *start_pc,
1181 char *pc)
1182 {
1183 int ret;
1184 unsigned long target_pc = pc - start_pc;
1185 struct lttng_ust_lfht_iter iter;
1186 struct lttng_ust_lfht_node *node;
1187 struct lfht_mp_node *mp_node;
1188 unsigned long hash;
1189
1190 /* Validate the context resulting from the previous instruction */
1191 ret = validate_instruction_context(bytecode, stack, start_pc, pc);
1192 if (ret < 0)
1193 return ret;
1194
1195 /* Validate merge points */
1196 hash = lttng_hash_mix((const char *) target_pc, sizeof(target_pc),
1197 lttng_hash_seed);
1198 lttng_ust_lfht_lookup(merge_points, hash, lttng_hash_match,
1199 (const char *) target_pc, &iter);
1200 node = lttng_ust_lfht_iter_get_node(&iter);
1201 if (node) {
1202 mp_node = caa_container_of(node, struct lfht_mp_node, node);
1203
1204 dbg_printf("Bytecode: validate merge point at offset %lu\n",
1205 target_pc);
1206 if (merge_points_compare(stack, &mp_node->stack)) {
1207 ERR("Merge points differ for offset %lu\n",
1208 target_pc);
1209 return -EINVAL;
1210 }
1211 /* Once validated, we can remove the merge point */
1212 dbg_printf("Bytecode: remove merge point at offset %lu\n",
1213 target_pc);
1214 ret = lttng_ust_lfht_del(merge_points, node);
1215 assert(!ret);
1216 }
1217 return 0;
1218 }
1219
1220 /*
1221 * Return value:
1222 * >0: going to next insn.
1223 * 0: success, stop iteration.
1224 * <0: error
1225 */
1226 static
1227 int exec_insn(struct bytecode_runtime *bytecode __attribute__((unused)),
1228 struct lttng_ust_lfht *merge_points,
1229 struct vstack *stack,
1230 char **_next_pc,
1231 char *pc)
1232 {
1233 int ret = 1;
1234 char *next_pc = *_next_pc;
1235
1236 switch (*(bytecode_opcode_t *) pc) {
1237 case BYTECODE_OP_UNKNOWN:
1238 default:
1239 {
1240 ERR("unknown bytecode op %u\n",
1241 (unsigned int) *(bytecode_opcode_t *) pc);
1242 ret = -EINVAL;
1243 goto end;
1244 }
1245
1246 case BYTECODE_OP_RETURN:
1247 {
1248 if (!vstack_ax(stack)) {
1249 ERR("Empty stack\n");
1250 ret = -EINVAL;
1251 goto end;
1252 }
1253 switch (vstack_ax(stack)->type) {
1254 case REG_S64:
1255 case REG_U64:
1256 case REG_DOUBLE:
1257 case REG_STRING:
1258 case REG_PTR:
1259 case REG_UNKNOWN:
1260 break;
1261 default:
1262 ERR("Unexpected register type %d at end of bytecode\n",
1263 (int) vstack_ax(stack)->type);
1264 ret = -EINVAL;
1265 goto end;
1266 }
1267
1268 ret = 0;
1269 goto end;
1270 }
1271 case BYTECODE_OP_RETURN_S64:
1272 {
1273 if (!vstack_ax(stack)) {
1274 ERR("Empty stack\n");
1275 ret = -EINVAL;
1276 goto end;
1277 }
1278 switch (vstack_ax(stack)->type) {
1279 case REG_S64:
1280 case REG_U64:
1281 break;
1282 default:
1283 case REG_UNKNOWN:
1284 ERR("Unexpected register type %d at end of bytecode\n",
1285 (int) vstack_ax(stack)->type);
1286 ret = -EINVAL;
1287 goto end;
1288 }
1289
1290 ret = 0;
1291 goto end;
1292 }
1293
1294 /* binary */
1295 case BYTECODE_OP_MUL:
1296 case BYTECODE_OP_DIV:
1297 case BYTECODE_OP_MOD:
1298 case BYTECODE_OP_PLUS:
1299 case BYTECODE_OP_MINUS:
1300 {
1301 ERR("unsupported bytecode op %u\n",
1302 (unsigned int) *(bytecode_opcode_t *) pc);
1303 ret = -EINVAL;
1304 goto end;
1305 }
1306
1307 case BYTECODE_OP_EQ:
1308 case BYTECODE_OP_NE:
1309 case BYTECODE_OP_GT:
1310 case BYTECODE_OP_LT:
1311 case BYTECODE_OP_GE:
1312 case BYTECODE_OP_LE:
1313 case BYTECODE_OP_EQ_STRING:
1314 case BYTECODE_OP_NE_STRING:
1315 case BYTECODE_OP_GT_STRING:
1316 case BYTECODE_OP_LT_STRING:
1317 case BYTECODE_OP_GE_STRING:
1318 case BYTECODE_OP_LE_STRING:
1319 case BYTECODE_OP_EQ_STAR_GLOB_STRING:
1320 case BYTECODE_OP_NE_STAR_GLOB_STRING:
1321 case BYTECODE_OP_EQ_S64:
1322 case BYTECODE_OP_NE_S64:
1323 case BYTECODE_OP_GT_S64:
1324 case BYTECODE_OP_LT_S64:
1325 case BYTECODE_OP_GE_S64:
1326 case BYTECODE_OP_LE_S64:
1327 case BYTECODE_OP_EQ_DOUBLE:
1328 case BYTECODE_OP_NE_DOUBLE:
1329 case BYTECODE_OP_GT_DOUBLE:
1330 case BYTECODE_OP_LT_DOUBLE:
1331 case BYTECODE_OP_GE_DOUBLE:
1332 case BYTECODE_OP_LE_DOUBLE:
1333 case BYTECODE_OP_EQ_DOUBLE_S64:
1334 case BYTECODE_OP_NE_DOUBLE_S64:
1335 case BYTECODE_OP_GT_DOUBLE_S64:
1336 case BYTECODE_OP_LT_DOUBLE_S64:
1337 case BYTECODE_OP_GE_DOUBLE_S64:
1338 case BYTECODE_OP_LE_DOUBLE_S64:
1339 case BYTECODE_OP_EQ_S64_DOUBLE:
1340 case BYTECODE_OP_NE_S64_DOUBLE:
1341 case BYTECODE_OP_GT_S64_DOUBLE:
1342 case BYTECODE_OP_LT_S64_DOUBLE:
1343 case BYTECODE_OP_GE_S64_DOUBLE:
1344 case BYTECODE_OP_LE_S64_DOUBLE:
1345 {
1346 /* Pop 2, push 1 */
1347 if (vstack_pop(stack)) {
1348 ret = -EINVAL;
1349 goto end;
1350 }
1351 if (!vstack_ax(stack)) {
1352 ERR("Empty stack\n");
1353 ret = -EINVAL;
1354 goto end;
1355 }
1356 switch (vstack_ax(stack)->type) {
1357 case REG_S64:
1358 case REG_U64:
1359 case REG_DOUBLE:
1360 case REG_STRING:
1361 case REG_STAR_GLOB_STRING:
1362 case REG_UNKNOWN:
1363 break;
1364 default:
1365 ERR("Unexpected register type %d for operation\n",
1366 (int) vstack_ax(stack)->type);
1367 ret = -EINVAL;
1368 goto end;
1369 }
1370
1371 vstack_ax(stack)->type = REG_S64;
1372 next_pc += sizeof(struct binary_op);
1373 break;
1374 }
1375
1376 case BYTECODE_OP_BIT_RSHIFT:
1377 case BYTECODE_OP_BIT_LSHIFT:
1378 case BYTECODE_OP_BIT_AND:
1379 case BYTECODE_OP_BIT_OR:
1380 case BYTECODE_OP_BIT_XOR:
1381 {
1382 /* Pop 2, push 1 */
1383 if (vstack_pop(stack)) {
1384 ret = -EINVAL;
1385 goto end;
1386 }
1387 if (!vstack_ax(stack)) {
1388 ERR("Empty stack\n");
1389 ret = -EINVAL;
1390 goto end;
1391 }
1392 switch (vstack_ax(stack)->type) {
1393 case REG_S64:
1394 case REG_U64:
1395 case REG_DOUBLE:
1396 case REG_STRING:
1397 case REG_STAR_GLOB_STRING:
1398 case REG_UNKNOWN:
1399 break;
1400 default:
1401 ERR("Unexpected register type %d for operation\n",
1402 (int) vstack_ax(stack)->type);
1403 ret = -EINVAL;
1404 goto end;
1405 }
1406
1407 vstack_ax(stack)->type = REG_U64;
1408 next_pc += sizeof(struct binary_op);
1409 break;
1410 }
1411
1412 /* unary */
1413 case BYTECODE_OP_UNARY_PLUS:
1414 case BYTECODE_OP_UNARY_MINUS:
1415 {
1416 /* Pop 1, push 1 */
1417 if (!vstack_ax(stack)) {
1418 ERR("Empty stack\n");
1419 ret = -EINVAL;
1420 goto end;
1421 }
1422 switch (vstack_ax(stack)->type) {
1423 case REG_UNKNOWN:
1424 case REG_DOUBLE:
1425 case REG_S64:
1426 case REG_U64:
1427 break;
1428 default:
1429 ERR("Unexpected register type %d for operation\n",
1430 (int) vstack_ax(stack)->type);
1431 ret = -EINVAL;
1432 goto end;
1433 }
1434 vstack_ax(stack)->type = REG_UNKNOWN;
1435 next_pc += sizeof(struct unary_op);
1436 break;
1437 }
1438
1439 case BYTECODE_OP_UNARY_PLUS_S64:
1440 case BYTECODE_OP_UNARY_MINUS_S64:
1441 case BYTECODE_OP_UNARY_NOT_S64:
1442 {
1443 /* Pop 1, push 1 */
1444 if (!vstack_ax(stack)) {
1445 ERR("Empty stack\n");
1446 ret = -EINVAL;
1447 goto end;
1448 }
1449 switch (vstack_ax(stack)->type) {
1450 case REG_S64:
1451 case REG_U64:
1452 break;
1453 default:
1454 ERR("Unexpected register type %d for operation\n",
1455 (int) vstack_ax(stack)->type);
1456 ret = -EINVAL;
1457 goto end;
1458 }
1459
1460 next_pc += sizeof(struct unary_op);
1461 break;
1462 }
1463
1464 case BYTECODE_OP_UNARY_NOT:
1465 {
1466 /* Pop 1, push 1 */
1467 if (!vstack_ax(stack)) {
1468 ERR("Empty stack\n");
1469 ret = -EINVAL;
1470 goto end;
1471 }
1472 switch (vstack_ax(stack)->type) {
1473 case REG_UNKNOWN:
1474 case REG_DOUBLE:
1475 case REG_S64:
1476 case REG_U64:
1477 break;
1478 default:
1479 ERR("Unexpected register type %d for operation\n",
1480 (int) vstack_ax(stack)->type);
1481 ret = -EINVAL;
1482 goto end;
1483 }
1484
1485 next_pc += sizeof(struct unary_op);
1486 break;
1487 }
1488
1489 case BYTECODE_OP_UNARY_BIT_NOT:
1490 {
1491 /* Pop 1, push 1 */
1492 if (!vstack_ax(stack)) {
1493 ERR("Empty stack\n");
1494 ret = -EINVAL;
1495 goto end;
1496 }
1497 switch (vstack_ax(stack)->type) {
1498 case REG_UNKNOWN:
1499 case REG_S64:
1500 case REG_U64:
1501 break;
1502 case REG_DOUBLE:
1503 default:
1504 ERR("Unexpected register type %d for operation\n",
1505 (int) vstack_ax(stack)->type);
1506 ret = -EINVAL;
1507 goto end;
1508 }
1509
1510 vstack_ax(stack)->type = REG_U64;
1511 next_pc += sizeof(struct unary_op);
1512 break;
1513 }
1514
1515 case BYTECODE_OP_UNARY_NOT_DOUBLE:
1516 {
1517 /* Pop 1, push 1 */
1518 if (!vstack_ax(stack)) {
1519 ERR("Empty stack\n");
1520 ret = -EINVAL;
1521 goto end;
1522 }
1523 switch (vstack_ax(stack)->type) {
1524 case REG_DOUBLE:
1525 break;
1526 default:
1527 ERR("Incorrect register type %d for operation\n",
1528 (int) vstack_ax(stack)->type);
1529 ret = -EINVAL;
1530 goto end;
1531 }
1532
1533 vstack_ax(stack)->type = REG_S64;
1534 next_pc += sizeof(struct unary_op);
1535 break;
1536 }
1537
1538 case BYTECODE_OP_UNARY_PLUS_DOUBLE:
1539 case BYTECODE_OP_UNARY_MINUS_DOUBLE:
1540 {
1541 /* Pop 1, push 1 */
1542 if (!vstack_ax(stack)) {
1543 ERR("Empty stack\n");
1544 ret = -EINVAL;
1545 goto end;
1546 }
1547 switch (vstack_ax(stack)->type) {
1548 case REG_DOUBLE:
1549 break;
1550 default:
1551 ERR("Incorrect register type %d for operation\n",
1552 (int) vstack_ax(stack)->type);
1553 ret = -EINVAL;
1554 goto end;
1555 }
1556
1557 vstack_ax(stack)->type = REG_DOUBLE;
1558 next_pc += sizeof(struct unary_op);
1559 break;
1560 }
1561
1562 /* logical */
1563 case BYTECODE_OP_AND:
1564 case BYTECODE_OP_OR:
1565 {
1566 struct logical_op *insn = (struct logical_op *) pc;
1567 int merge_ret;
1568
1569 /* Add merge point to table */
1570 merge_ret = merge_point_add_check(merge_points,
1571 insn->skip_offset, stack);
1572 if (merge_ret) {
1573 ret = merge_ret;
1574 goto end;
1575 }
1576
1577 if (!vstack_ax(stack)) {
1578 ERR("Empty stack\n");
1579 ret = -EINVAL;
1580 goto end;
1581 }
1582 /* There is always a cast-to-s64 operation before a or/and op. */
1583 switch (vstack_ax(stack)->type) {
1584 case REG_S64:
1585 case REG_U64:
1586 break;
1587 default:
1588 ERR("Incorrect register type %d for operation\n",
1589 (int) vstack_ax(stack)->type);
1590 ret = -EINVAL;
1591 goto end;
1592 }
1593
1594 /* Continue to next instruction */
1595 /* Pop 1 when jump not taken */
1596 if (vstack_pop(stack)) {
1597 ret = -EINVAL;
1598 goto end;
1599 }
1600 next_pc += sizeof(struct logical_op);
1601 break;
1602 }
1603
1604 /* load field ref */
1605 case BYTECODE_OP_LOAD_FIELD_REF:
1606 {
1607 ERR("Unknown field ref type\n");
1608 ret = -EINVAL;
1609 goto end;
1610 }
1611 /* get context ref */
1612 case BYTECODE_OP_GET_CONTEXT_REF:
1613 {
1614 if (vstack_push(stack)) {
1615 ret = -EINVAL;
1616 goto end;
1617 }
1618 vstack_ax(stack)->type = REG_UNKNOWN;
1619 next_pc += sizeof(struct load_op) + sizeof(struct field_ref);
1620 break;
1621 }
1622 case BYTECODE_OP_LOAD_FIELD_REF_STRING:
1623 case BYTECODE_OP_LOAD_FIELD_REF_SEQUENCE:
1624 case BYTECODE_OP_GET_CONTEXT_REF_STRING:
1625 {
1626 if (vstack_push(stack)) {
1627 ret = -EINVAL;
1628 goto end;
1629 }
1630 vstack_ax(stack)->type = REG_STRING;
1631 next_pc += sizeof(struct load_op) + sizeof(struct field_ref);
1632 break;
1633 }
1634 case BYTECODE_OP_LOAD_FIELD_REF_S64:
1635 case BYTECODE_OP_GET_CONTEXT_REF_S64:
1636 {
1637 if (vstack_push(stack)) {
1638 ret = -EINVAL;
1639 goto end;
1640 }
1641 vstack_ax(stack)->type = REG_S64;
1642 next_pc += sizeof(struct load_op) + sizeof(struct field_ref);
1643 break;
1644 }
1645 case BYTECODE_OP_LOAD_FIELD_REF_DOUBLE:
1646 case BYTECODE_OP_GET_CONTEXT_REF_DOUBLE:
1647 {
1648 if (vstack_push(stack)) {
1649 ret = -EINVAL;
1650 goto end;
1651 }
1652 vstack_ax(stack)->type = REG_DOUBLE;
1653 next_pc += sizeof(struct load_op) + sizeof(struct field_ref);
1654 break;
1655 }
1656
1657 /* load from immediate operand */
1658 case BYTECODE_OP_LOAD_STRING:
1659 {
1660 struct load_op *insn = (struct load_op *) pc;
1661
1662 if (vstack_push(stack)) {
1663 ret = -EINVAL;
1664 goto end;
1665 }
1666 vstack_ax(stack)->type = REG_STRING;
1667 next_pc += sizeof(struct load_op) + strlen(insn->data) + 1;
1668 break;
1669 }
1670
1671 case BYTECODE_OP_LOAD_STAR_GLOB_STRING:
1672 {
1673 struct load_op *insn = (struct load_op *) pc;
1674
1675 if (vstack_push(stack)) {
1676 ret = -EINVAL;
1677 goto end;
1678 }
1679 vstack_ax(stack)->type = REG_STAR_GLOB_STRING;
1680 next_pc += sizeof(struct load_op) + strlen(insn->data) + 1;
1681 break;
1682 }
1683
1684 case BYTECODE_OP_LOAD_S64:
1685 {
1686 if (vstack_push(stack)) {
1687 ret = -EINVAL;
1688 goto end;
1689 }
1690 vstack_ax(stack)->type = REG_S64;
1691 next_pc += sizeof(struct load_op)
1692 + sizeof(struct literal_numeric);
1693 break;
1694 }
1695
1696 case BYTECODE_OP_LOAD_DOUBLE:
1697 {
1698 if (vstack_push(stack)) {
1699 ret = -EINVAL;
1700 goto end;
1701 }
1702 vstack_ax(stack)->type = REG_DOUBLE;
1703 next_pc += sizeof(struct load_op)
1704 + sizeof(struct literal_double);
1705 break;
1706 }
1707
1708 case BYTECODE_OP_CAST_TO_S64:
1709 case BYTECODE_OP_CAST_DOUBLE_TO_S64:
1710 {
1711 /* Pop 1, push 1 */
1712 if (!vstack_ax(stack)) {
1713 ERR("Empty stack\n");
1714 ret = -EINVAL;
1715 goto end;
1716 }
1717 switch (vstack_ax(stack)->type) {
1718 case REG_S64:
1719 case REG_U64:
1720 case REG_DOUBLE:
1721 case REG_UNKNOWN:
1722 break;
1723 default:
1724 ERR("Incorrect register type %d for cast\n",
1725 (int) vstack_ax(stack)->type);
1726 ret = -EINVAL;
1727 goto end;
1728 }
1729 vstack_ax(stack)->type = REG_S64;
1730 next_pc += sizeof(struct cast_op);
1731 break;
1732 }
1733 case BYTECODE_OP_CAST_NOP:
1734 {
1735 next_pc += sizeof(struct cast_op);
1736 break;
1737 }
1738
1739 /*
1740 * Instructions for recursive traversal through composed types.
1741 */
1742 case BYTECODE_OP_GET_CONTEXT_ROOT:
1743 case BYTECODE_OP_GET_APP_CONTEXT_ROOT:
1744 case BYTECODE_OP_GET_PAYLOAD_ROOT:
1745 {
1746 if (vstack_push(stack)) {
1747 ret = -EINVAL;
1748 goto end;
1749 }
1750 vstack_ax(stack)->type = REG_PTR;
1751 next_pc += sizeof(struct load_op);
1752 break;
1753 }
1754
1755 case BYTECODE_OP_LOAD_FIELD:
1756 {
1757 /* Pop 1, push 1 */
1758 if (!vstack_ax(stack)) {
1759 ERR("Empty stack\n");
1760 ret = -EINVAL;
1761 goto end;
1762 }
1763 if (vstack_ax(stack)->type != REG_PTR) {
1764 ERR("Expecting pointer on top of stack\n");
1765 ret = -EINVAL;
1766 goto end;
1767 }
1768 vstack_ax(stack)->type = REG_UNKNOWN;
1769 next_pc += sizeof(struct load_op);
1770 break;
1771 }
1772
1773 case BYTECODE_OP_LOAD_FIELD_S8:
1774 case BYTECODE_OP_LOAD_FIELD_S16:
1775 case BYTECODE_OP_LOAD_FIELD_S32:
1776 case BYTECODE_OP_LOAD_FIELD_S64:
1777 {
1778 /* Pop 1, push 1 */
1779 if (!vstack_ax(stack)) {
1780 ERR("Empty stack\n");
1781 ret = -EINVAL;
1782 goto end;
1783 }
1784 if (vstack_ax(stack)->type != REG_PTR) {
1785 ERR("Expecting pointer on top of stack\n");
1786 ret = -EINVAL;
1787 goto end;
1788 }
1789 vstack_ax(stack)->type = REG_S64;
1790 next_pc += sizeof(struct load_op);
1791 break;
1792 }
1793
1794 case BYTECODE_OP_LOAD_FIELD_U8:
1795 case BYTECODE_OP_LOAD_FIELD_U16:
1796 case BYTECODE_OP_LOAD_FIELD_U32:
1797 case BYTECODE_OP_LOAD_FIELD_U64:
1798 {
1799 /* Pop 1, push 1 */
1800 if (!vstack_ax(stack)) {
1801 ERR("Empty stack\n");
1802 ret = -EINVAL;
1803 goto end;
1804 }
1805 if (vstack_ax(stack)->type != REG_PTR) {
1806 ERR("Expecting pointer on top of stack\n");
1807 ret = -EINVAL;
1808 goto end;
1809 }
1810 vstack_ax(stack)->type = REG_U64;
1811 next_pc += sizeof(struct load_op);
1812 break;
1813 }
1814
1815 case BYTECODE_OP_LOAD_FIELD_STRING:
1816 case BYTECODE_OP_LOAD_FIELD_SEQUENCE:
1817 {
1818 /* Pop 1, push 1 */
1819 if (!vstack_ax(stack)) {
1820 ERR("Empty stack\n");
1821 ret = -EINVAL;
1822 goto end;
1823 }
1824 if (vstack_ax(stack)->type != REG_PTR) {
1825 ERR("Expecting pointer on top of stack\n");
1826 ret = -EINVAL;
1827 goto end;
1828 }
1829 vstack_ax(stack)->type = REG_STRING;
1830 next_pc += sizeof(struct load_op);
1831 break;
1832 }
1833
1834 case BYTECODE_OP_LOAD_FIELD_DOUBLE:
1835 {
1836 /* Pop 1, push 1 */
1837 if (!vstack_ax(stack)) {
1838 ERR("Empty stack\n");
1839 ret = -EINVAL;
1840 goto end;
1841 }
1842 if (vstack_ax(stack)->type != REG_PTR) {
1843 ERR("Expecting pointer on top of stack\n");
1844 ret = -EINVAL;
1845 goto end;
1846 }
1847 vstack_ax(stack)->type = REG_DOUBLE;
1848 next_pc += sizeof(struct load_op);
1849 break;
1850 }
1851
1852 case BYTECODE_OP_GET_SYMBOL:
1853 case BYTECODE_OP_GET_SYMBOL_FIELD:
1854 {
1855 /* Pop 1, push 1 */
1856 if (!vstack_ax(stack)) {
1857 ERR("Empty stack\n");
1858 ret = -EINVAL;
1859 goto end;
1860 }
1861 if (vstack_ax(stack)->type != REG_PTR) {
1862 ERR("Expecting pointer on top of stack\n");
1863 ret = -EINVAL;
1864 goto end;
1865 }
1866 next_pc += sizeof(struct load_op) + sizeof(struct get_symbol);
1867 break;
1868 }
1869
1870 case BYTECODE_OP_GET_INDEX_U16:
1871 {
1872 /* Pop 1, push 1 */
1873 if (!vstack_ax(stack)) {
1874 ERR("Empty stack\n");
1875 ret = -EINVAL;
1876 goto end;
1877 }
1878 if (vstack_ax(stack)->type != REG_PTR) {
1879 ERR("Expecting pointer on top of stack\n");
1880 ret = -EINVAL;
1881 goto end;
1882 }
1883 next_pc += sizeof(struct load_op) + sizeof(struct get_index_u16);
1884 break;
1885 }
1886
1887 case BYTECODE_OP_GET_INDEX_U64:
1888 {
1889 /* Pop 1, push 1 */
1890 if (!vstack_ax(stack)) {
1891 ERR("Empty stack\n");
1892 ret = -EINVAL;
1893 goto end;
1894 }
1895 if (vstack_ax(stack)->type != REG_PTR) {
1896 ERR("Expecting pointer on top of stack\n");
1897 ret = -EINVAL;
1898 goto end;
1899 }
1900 next_pc += sizeof(struct load_op) + sizeof(struct get_index_u64);
1901 break;
1902 }
1903
1904 }
1905 end:
1906 *_next_pc = next_pc;
1907 return ret;
1908 }
1909
1910 /*
1911 * Never called concurrently (hash seed is shared).
1912 */
1913 int lttng_bytecode_validate(struct bytecode_runtime *bytecode)
1914 {
1915 struct lttng_ust_lfht *merge_points;
1916 char *pc, *next_pc, *start_pc;
1917 int ret = -EINVAL;
1918 struct vstack stack;
1919
1920 vstack_init(&stack);
1921
1922 if (!lttng_hash_seed_ready) {
1923 lttng_hash_seed = time(NULL);
1924 lttng_hash_seed_ready = 1;
1925 }
1926 /*
1927 * Note: merge_points hash table used by single thread, and
1928 * never concurrently resized. Therefore, we can use it without
1929 * holding RCU read-side lock and free nodes without using
1930 * call_rcu.
1931 */
1932 merge_points = lttng_ust_lfht_new(DEFAULT_NR_MERGE_POINTS,
1933 MIN_NR_BUCKETS, MAX_NR_BUCKETS,
1934 0, NULL);
1935 if (!merge_points) {
1936 ERR("Error allocating hash table for bytecode validation\n");
1937 return -ENOMEM;
1938 }
1939 start_pc = &bytecode->code[0];
1940 for (pc = next_pc = start_pc; pc - start_pc < bytecode->len;
1941 pc = next_pc) {
1942 ret = bytecode_validate_overflow(bytecode, start_pc, pc);
1943 if (ret != 0) {
1944 if (ret == -ERANGE)
1945 ERR("Bytecode overflow\n");
1946 goto end;
1947 }
1948 dbg_printf("Validating op %s (%u)\n",
1949 lttng_bytecode_print_op((unsigned int) *(bytecode_opcode_t *) pc),
1950 (unsigned int) *(bytecode_opcode_t *) pc);
1951
1952 /*
1953 * For each instruction, validate the current context
1954 * (traversal of entire execution flow), and validate
1955 * all merge points targeting this instruction.
1956 */
1957 ret = validate_instruction_all_contexts(bytecode, merge_points,
1958 &stack, start_pc, pc);
1959 if (ret)
1960 goto end;
1961 ret = exec_insn(bytecode, merge_points, &stack, &next_pc, pc);
1962 if (ret <= 0)
1963 goto end;
1964 }
1965 end:
1966 if (delete_all_nodes(merge_points)) {
1967 if (!ret) {
1968 ERR("Unexpected merge points\n");
1969 ret = -EINVAL;
1970 }
1971 }
1972 if (lttng_ust_lfht_destroy(merge_points)) {
1973 ERR("Error destroying hash table\n");
1974 }
1975 return ret;
1976 }
This page took 0.113185 seconds and 3 git commands to generate.