Implement per-context filtering
[lttng-ust.git] / liblttng-ust / lttng-filter-interpreter.c
1 /*
2 * lttng-filter-interpreter.c
3 *
4 * LTTng UST filter interpreter.
5 *
6 * Copyright (C) 2010-2012 Mathieu Desnoyers <mathieu.desnoyers@efficios.com>
7 *
8 * This library is free software; you can redistribute it and/or
9 * modify it under the terms of the GNU Lesser General Public
10 * License as published by the Free Software Foundation; only
11 * version 2.1 of the License.
12 *
13 * This library is distributed in the hope that it will be useful,
14 * but WITHOUT ANY WARRANTY; without even the implied warranty of
15 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
16 * Lesser General Public License for more details.
17 *
18 * You should have received a copy of the GNU Lesser General Public
19 * License along with this library; if not, write to the Free Software
20 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
21 */
22
23 #include "lttng-filter.h"
24
25 /*
26 * -1: wildcard found.
27 * -2: unknown escape char.
28 * 0: normal char.
29 */
30
31 static
32 int parse_char(const char **p)
33 {
34 switch (**p) {
35 case '\\':
36 (*p)++;
37 switch (**p) {
38 case '\\':
39 case '*':
40 return 0;
41 default:
42 return -2;
43 }
44 case '*':
45 return -1;
46 default:
47 return 0;
48 }
49 }
50
51 static
52 int stack_strcmp(struct estack *stack, int top, const char *cmp_type)
53 {
54 const char *p = estack_bx(stack, top)->u.s.str, *q = estack_ax(stack, top)->u.s.str;
55 int ret;
56 int diff;
57
58 for (;;) {
59 int escaped_r0 = 0;
60
61 if (unlikely(p - estack_bx(stack, top)->u.s.str > estack_bx(stack, top)->u.s.seq_len || *p == '\0')) {
62 if (q - estack_ax(stack, top)->u.s.str > estack_ax(stack, top)->u.s.seq_len || *q == '\0') {
63 return 0;
64 } else {
65 if (estack_ax(stack, top)->u.s.literal) {
66 ret = parse_char(&q);
67 if (ret == -1)
68 return 0;
69 }
70 return -1;
71 }
72 }
73 if (unlikely(q - estack_ax(stack, top)->u.s.str > estack_ax(stack, top)->u.s.seq_len || *q == '\0')) {
74 if (p - estack_bx(stack, top)->u.s.str > estack_bx(stack, top)->u.s.seq_len || *p == '\0') {
75 return 0;
76 } else {
77 if (estack_bx(stack, top)->u.s.literal) {
78 ret = parse_char(&p);
79 if (ret == -1)
80 return 0;
81 }
82 return 1;
83 }
84 }
85 if (estack_bx(stack, top)->u.s.literal) {
86 ret = parse_char(&p);
87 if (ret == -1) {
88 return 0;
89 } else if (ret == -2) {
90 escaped_r0 = 1;
91 }
92 /* else compare both char */
93 }
94 if (estack_ax(stack, top)->u.s.literal) {
95 ret = parse_char(&q);
96 if (ret == -1) {
97 return 0;
98 } else if (ret == -2) {
99 if (!escaped_r0)
100 return -1;
101 } else {
102 if (escaped_r0)
103 return 1;
104 }
105 } else {
106 if (escaped_r0)
107 return 1;
108 }
109 diff = *p - *q;
110 if (diff != 0)
111 break;
112 p++;
113 q++;
114 }
115 return diff;
116 }
117
118 uint64_t lttng_filter_false(void *filter_data,
119 const char *filter_stack_data)
120 {
121 return 0;
122 }
123
124 #ifdef INTERPRETER_USE_SWITCH
125
126 /*
127 * Fallback for compilers that do not support taking address of labels.
128 */
129
130 #define START_OP \
131 start_pc = &bytecode->data[0]; \
132 for (pc = next_pc = start_pc; pc - start_pc < bytecode->len; \
133 pc = next_pc) { \
134 dbg_printf("Executing op %s (%u)\n", \
135 print_op((unsigned int) *(filter_opcode_t *) pc), \
136 (unsigned int) *(filter_opcode_t *) pc); \
137 switch (*(filter_opcode_t *) pc) {
138
139 #define OP(name) case name
140
141 #define PO break
142
143 #define END_OP } \
144 }
145
146 #else
147
148 /*
149 * Dispatch-table based interpreter.
150 */
151
152 #define START_OP \
153 start_pc = &bytecode->data[0]; \
154 pc = next_pc = start_pc; \
155 if (unlikely(pc - start_pc >= bytecode->len)) \
156 goto end; \
157 goto *dispatch[*(filter_opcode_t *) pc];
158
159 #define OP(name) \
160 LABEL_##name
161
162 #define PO \
163 pc = next_pc; \
164 goto *dispatch[*(filter_opcode_t *) pc];
165
166 #define END_OP
167
168 #endif
169
170 /*
171 * Return 0 (discard), or raise the 0x1 flag (log event).
172 * Currently, other flags are kept for future extensions and have no
173 * effect.
174 */
175 uint64_t lttng_filter_interpret_bytecode(void *filter_data,
176 const char *filter_stack_data)
177 {
178 struct bytecode_runtime *bytecode = filter_data;
179 struct lttng_ctx *ctx = bytecode->p.bc->enabler->chan->ctx;
180 void *pc, *next_pc, *start_pc;
181 int ret = -EINVAL;
182 uint64_t retval = 0;
183 struct estack _stack;
184 struct estack *stack = &_stack;
185 register int64_t ax = 0, bx = 0;
186 register int top = FILTER_STACK_EMPTY;
187 #ifndef INTERPRETER_USE_SWITCH
188 static void *dispatch[NR_FILTER_OPS] = {
189 [ FILTER_OP_UNKNOWN ] = &&LABEL_FILTER_OP_UNKNOWN,
190
191 [ FILTER_OP_RETURN ] = &&LABEL_FILTER_OP_RETURN,
192
193 /* binary */
194 [ FILTER_OP_MUL ] = &&LABEL_FILTER_OP_MUL,
195 [ FILTER_OP_DIV ] = &&LABEL_FILTER_OP_DIV,
196 [ FILTER_OP_MOD ] = &&LABEL_FILTER_OP_MOD,
197 [ FILTER_OP_PLUS ] = &&LABEL_FILTER_OP_PLUS,
198 [ FILTER_OP_MINUS ] = &&LABEL_FILTER_OP_MINUS,
199 [ FILTER_OP_RSHIFT ] = &&LABEL_FILTER_OP_RSHIFT,
200 [ FILTER_OP_LSHIFT ] = &&LABEL_FILTER_OP_LSHIFT,
201 [ FILTER_OP_BIN_AND ] = &&LABEL_FILTER_OP_BIN_AND,
202 [ FILTER_OP_BIN_OR ] = &&LABEL_FILTER_OP_BIN_OR,
203 [ FILTER_OP_BIN_XOR ] = &&LABEL_FILTER_OP_BIN_XOR,
204
205 /* binary comparators */
206 [ FILTER_OP_EQ ] = &&LABEL_FILTER_OP_EQ,
207 [ FILTER_OP_NE ] = &&LABEL_FILTER_OP_NE,
208 [ FILTER_OP_GT ] = &&LABEL_FILTER_OP_GT,
209 [ FILTER_OP_LT ] = &&LABEL_FILTER_OP_LT,
210 [ FILTER_OP_GE ] = &&LABEL_FILTER_OP_GE,
211 [ FILTER_OP_LE ] = &&LABEL_FILTER_OP_LE,
212
213 /* string binary comparator */
214 [ FILTER_OP_EQ_STRING ] = &&LABEL_FILTER_OP_EQ_STRING,
215 [ FILTER_OP_NE_STRING ] = &&LABEL_FILTER_OP_NE_STRING,
216 [ FILTER_OP_GT_STRING ] = &&LABEL_FILTER_OP_GT_STRING,
217 [ FILTER_OP_LT_STRING ] = &&LABEL_FILTER_OP_LT_STRING,
218 [ FILTER_OP_GE_STRING ] = &&LABEL_FILTER_OP_GE_STRING,
219 [ FILTER_OP_LE_STRING ] = &&LABEL_FILTER_OP_LE_STRING,
220
221 /* s64 binary comparator */
222 [ FILTER_OP_EQ_S64 ] = &&LABEL_FILTER_OP_EQ_S64,
223 [ FILTER_OP_NE_S64 ] = &&LABEL_FILTER_OP_NE_S64,
224 [ FILTER_OP_GT_S64 ] = &&LABEL_FILTER_OP_GT_S64,
225 [ FILTER_OP_LT_S64 ] = &&LABEL_FILTER_OP_LT_S64,
226 [ FILTER_OP_GE_S64 ] = &&LABEL_FILTER_OP_GE_S64,
227 [ FILTER_OP_LE_S64 ] = &&LABEL_FILTER_OP_LE_S64,
228
229 /* double binary comparator */
230 [ FILTER_OP_EQ_DOUBLE ] = &&LABEL_FILTER_OP_EQ_DOUBLE,
231 [ FILTER_OP_NE_DOUBLE ] = &&LABEL_FILTER_OP_NE_DOUBLE,
232 [ FILTER_OP_GT_DOUBLE ] = &&LABEL_FILTER_OP_GT_DOUBLE,
233 [ FILTER_OP_LT_DOUBLE ] = &&LABEL_FILTER_OP_LT_DOUBLE,
234 [ FILTER_OP_GE_DOUBLE ] = &&LABEL_FILTER_OP_GE_DOUBLE,
235 [ FILTER_OP_LE_DOUBLE ] = &&LABEL_FILTER_OP_LE_DOUBLE,
236
237 /* Mixed S64-double binary comparators */
238 [ FILTER_OP_EQ_DOUBLE_S64 ] = &&LABEL_FILTER_OP_EQ_DOUBLE_S64,
239 [ FILTER_OP_NE_DOUBLE_S64 ] = &&LABEL_FILTER_OP_NE_DOUBLE_S64,
240 [ FILTER_OP_GT_DOUBLE_S64 ] = &&LABEL_FILTER_OP_GT_DOUBLE_S64,
241 [ FILTER_OP_LT_DOUBLE_S64 ] = &&LABEL_FILTER_OP_LT_DOUBLE_S64,
242 [ FILTER_OP_GE_DOUBLE_S64 ] = &&LABEL_FILTER_OP_GE_DOUBLE_S64,
243 [ FILTER_OP_LE_DOUBLE_S64 ] = &&LABEL_FILTER_OP_LE_DOUBLE_S64,
244
245 [ FILTER_OP_EQ_S64_DOUBLE ] = &&LABEL_FILTER_OP_EQ_S64_DOUBLE,
246 [ FILTER_OP_NE_S64_DOUBLE ] = &&LABEL_FILTER_OP_NE_S64_DOUBLE,
247 [ FILTER_OP_GT_S64_DOUBLE ] = &&LABEL_FILTER_OP_GT_S64_DOUBLE,
248 [ FILTER_OP_LT_S64_DOUBLE ] = &&LABEL_FILTER_OP_LT_S64_DOUBLE,
249 [ FILTER_OP_GE_S64_DOUBLE ] = &&LABEL_FILTER_OP_GE_S64_DOUBLE,
250 [ FILTER_OP_LE_S64_DOUBLE ] = &&LABEL_FILTER_OP_LE_S64_DOUBLE,
251
252 /* unary */
253 [ FILTER_OP_UNARY_PLUS ] = &&LABEL_FILTER_OP_UNARY_PLUS,
254 [ FILTER_OP_UNARY_MINUS ] = &&LABEL_FILTER_OP_UNARY_MINUS,
255 [ FILTER_OP_UNARY_NOT ] = &&LABEL_FILTER_OP_UNARY_NOT,
256 [ FILTER_OP_UNARY_PLUS_S64 ] = &&LABEL_FILTER_OP_UNARY_PLUS_S64,
257 [ FILTER_OP_UNARY_MINUS_S64 ] = &&LABEL_FILTER_OP_UNARY_MINUS_S64,
258 [ FILTER_OP_UNARY_NOT_S64 ] = &&LABEL_FILTER_OP_UNARY_NOT_S64,
259 [ FILTER_OP_UNARY_PLUS_DOUBLE ] = &&LABEL_FILTER_OP_UNARY_PLUS_DOUBLE,
260 [ FILTER_OP_UNARY_MINUS_DOUBLE ] = &&LABEL_FILTER_OP_UNARY_MINUS_DOUBLE,
261 [ FILTER_OP_UNARY_NOT_DOUBLE ] = &&LABEL_FILTER_OP_UNARY_NOT_DOUBLE,
262
263 /* logical */
264 [ FILTER_OP_AND ] = &&LABEL_FILTER_OP_AND,
265 [ FILTER_OP_OR ] = &&LABEL_FILTER_OP_OR,
266
267 /* load field ref */
268 [ FILTER_OP_LOAD_FIELD_REF ] = &&LABEL_FILTER_OP_LOAD_FIELD_REF,
269 [ FILTER_OP_LOAD_FIELD_REF_STRING ] = &&LABEL_FILTER_OP_LOAD_FIELD_REF_STRING,
270 [ FILTER_OP_LOAD_FIELD_REF_SEQUENCE ] = &&LABEL_FILTER_OP_LOAD_FIELD_REF_SEQUENCE,
271 [ FILTER_OP_LOAD_FIELD_REF_S64 ] = &&LABEL_FILTER_OP_LOAD_FIELD_REF_S64,
272 [ FILTER_OP_LOAD_FIELD_REF_DOUBLE ] = &&LABEL_FILTER_OP_LOAD_FIELD_REF_DOUBLE,
273
274 /* load from immediate operand */
275 [ FILTER_OP_LOAD_STRING ] = &&LABEL_FILTER_OP_LOAD_STRING,
276 [ FILTER_OP_LOAD_S64 ] = &&LABEL_FILTER_OP_LOAD_S64,
277 [ FILTER_OP_LOAD_DOUBLE ] = &&LABEL_FILTER_OP_LOAD_DOUBLE,
278
279 /* cast */
280 [ FILTER_OP_CAST_TO_S64 ] = &&LABEL_FILTER_OP_CAST_TO_S64,
281 [ FILTER_OP_CAST_DOUBLE_TO_S64 ] = &&LABEL_FILTER_OP_CAST_DOUBLE_TO_S64,
282 [ FILTER_OP_CAST_NOP ] = &&LABEL_FILTER_OP_CAST_NOP,
283
284 /* get context ref */
285 [ FILTER_OP_GET_CONTEXT_REF ] = &&LABEL_FILTER_OP_GET_CONTEXT_REF,
286 [ FILTER_OP_GET_CONTEXT_REF_STRING ] = &&LABEL_FILTER_OP_GET_CONTEXT_REF_STRING,
287 [ FILTER_OP_GET_CONTEXT_REF_S64 ] = &&LABEL_FILTER_OP_GET_CONTEXT_REF_S64,
288 [ FILTER_OP_GET_CONTEXT_REF_DOUBLE ] = &&LABEL_FILTER_OP_GET_CONTEXT_REF_DOUBLE,
289 };
290 #endif /* #ifndef INTERPRETER_USE_SWITCH */
291
292 START_OP
293
294 OP(FILTER_OP_UNKNOWN):
295 OP(FILTER_OP_LOAD_FIELD_REF):
296 OP(FILTER_OP_GET_CONTEXT_REF):
297 #ifdef INTERPRETER_USE_SWITCH
298 default:
299 #endif /* INTERPRETER_USE_SWITCH */
300 ERR("unknown bytecode op %u\n",
301 (unsigned int) *(filter_opcode_t *) pc);
302 ret = -EINVAL;
303 goto end;
304
305 OP(FILTER_OP_RETURN):
306 /* LTTNG_FILTER_DISCARD or LTTNG_FILTER_RECORD_FLAG */
307 retval = !!estack_ax_v;
308 ret = 0;
309 goto end;
310
311 /* binary */
312 OP(FILTER_OP_MUL):
313 OP(FILTER_OP_DIV):
314 OP(FILTER_OP_MOD):
315 OP(FILTER_OP_PLUS):
316 OP(FILTER_OP_MINUS):
317 OP(FILTER_OP_RSHIFT):
318 OP(FILTER_OP_LSHIFT):
319 OP(FILTER_OP_BIN_AND):
320 OP(FILTER_OP_BIN_OR):
321 OP(FILTER_OP_BIN_XOR):
322 ERR("unsupported bytecode op %u\n",
323 (unsigned int) *(filter_opcode_t *) pc);
324 ret = -EINVAL;
325 goto end;
326
327 OP(FILTER_OP_EQ):
328 OP(FILTER_OP_NE):
329 OP(FILTER_OP_GT):
330 OP(FILTER_OP_LT):
331 OP(FILTER_OP_GE):
332 OP(FILTER_OP_LE):
333 ERR("unsupported non-specialized bytecode op %u\n",
334 (unsigned int) *(filter_opcode_t *) pc);
335 ret = -EINVAL;
336 goto end;
337
338 OP(FILTER_OP_EQ_STRING):
339 {
340 int res;
341
342 res = (stack_strcmp(stack, top, "==") == 0);
343 estack_pop(stack, top, ax, bx);
344 estack_ax_v = res;
345 next_pc += sizeof(struct binary_op);
346 PO;
347 }
348 OP(FILTER_OP_NE_STRING):
349 {
350 int res;
351
352 res = (stack_strcmp(stack, top, "!=") != 0);
353 estack_pop(stack, top, ax, bx);
354 estack_ax_v = res;
355 next_pc += sizeof(struct binary_op);
356 PO;
357 }
358 OP(FILTER_OP_GT_STRING):
359 {
360 int res;
361
362 res = (stack_strcmp(stack, top, ">") > 0);
363 estack_pop(stack, top, ax, bx);
364 estack_ax_v = res;
365 next_pc += sizeof(struct binary_op);
366 PO;
367 }
368 OP(FILTER_OP_LT_STRING):
369 {
370 int res;
371
372 res = (stack_strcmp(stack, top, "<") < 0);
373 estack_pop(stack, top, ax, bx);
374 estack_ax_v = res;
375 next_pc += sizeof(struct binary_op);
376 PO;
377 }
378 OP(FILTER_OP_GE_STRING):
379 {
380 int res;
381
382 res = (stack_strcmp(stack, top, ">=") >= 0);
383 estack_pop(stack, top, ax, bx);
384 estack_ax_v = res;
385 next_pc += sizeof(struct binary_op);
386 PO;
387 }
388 OP(FILTER_OP_LE_STRING):
389 {
390 int res;
391
392 res = (stack_strcmp(stack, top, "<=") <= 0);
393 estack_pop(stack, top, ax, bx);
394 estack_ax_v = res;
395 next_pc += sizeof(struct binary_op);
396 PO;
397 }
398
399 OP(FILTER_OP_EQ_S64):
400 {
401 int res;
402
403 res = (estack_bx_v == estack_ax_v);
404 estack_pop(stack, top, ax, bx);
405 estack_ax_v = res;
406 next_pc += sizeof(struct binary_op);
407 PO;
408 }
409 OP(FILTER_OP_NE_S64):
410 {
411 int res;
412
413 res = (estack_bx_v != estack_ax_v);
414 estack_pop(stack, top, ax, bx);
415 estack_ax_v = res;
416 next_pc += sizeof(struct binary_op);
417 PO;
418 }
419 OP(FILTER_OP_GT_S64):
420 {
421 int res;
422
423 res = (estack_bx_v > estack_ax_v);
424 estack_pop(stack, top, ax, bx);
425 estack_ax_v = res;
426 next_pc += sizeof(struct binary_op);
427 PO;
428 }
429 OP(FILTER_OP_LT_S64):
430 {
431 int res;
432
433 res = (estack_bx_v < estack_ax_v);
434 estack_pop(stack, top, ax, bx);
435 estack_ax_v = res;
436 next_pc += sizeof(struct binary_op);
437 PO;
438 }
439 OP(FILTER_OP_GE_S64):
440 {
441 int res;
442
443 res = (estack_bx_v >= estack_ax_v);
444 estack_pop(stack, top, ax, bx);
445 estack_ax_v = res;
446 next_pc += sizeof(struct binary_op);
447 PO;
448 }
449 OP(FILTER_OP_LE_S64):
450 {
451 int res;
452
453 res = (estack_bx_v <= estack_ax_v);
454 estack_pop(stack, top, ax, bx);
455 estack_ax_v = res;
456 next_pc += sizeof(struct binary_op);
457 PO;
458 }
459
460 OP(FILTER_OP_EQ_DOUBLE):
461 {
462 int res;
463
464 res = (estack_bx(stack, top)->u.d == estack_ax(stack, top)->u.d);
465 estack_pop(stack, top, ax, bx);
466 estack_ax_v = res;
467 next_pc += sizeof(struct binary_op);
468 PO;
469 }
470 OP(FILTER_OP_NE_DOUBLE):
471 {
472 int res;
473
474 res = (estack_bx(stack, top)->u.d != estack_ax(stack, top)->u.d);
475 estack_pop(stack, top, ax, bx);
476 estack_ax_v = res;
477 next_pc += sizeof(struct binary_op);
478 PO;
479 }
480 OP(FILTER_OP_GT_DOUBLE):
481 {
482 int res;
483
484 res = (estack_bx(stack, top)->u.d > estack_ax(stack, top)->u.d);
485 estack_pop(stack, top, ax, bx);
486 estack_ax_v = res;
487 next_pc += sizeof(struct binary_op);
488 PO;
489 }
490 OP(FILTER_OP_LT_DOUBLE):
491 {
492 int res;
493
494 res = (estack_bx(stack, top)->u.d < estack_ax(stack, top)->u.d);
495 estack_pop(stack, top, ax, bx);
496 estack_ax_v = res;
497 next_pc += sizeof(struct binary_op);
498 PO;
499 }
500 OP(FILTER_OP_GE_DOUBLE):
501 {
502 int res;
503
504 res = (estack_bx(stack, top)->u.d >= estack_ax(stack, top)->u.d);
505 estack_pop(stack, top, ax, bx);
506 estack_ax_v = res;
507 next_pc += sizeof(struct binary_op);
508 PO;
509 }
510 OP(FILTER_OP_LE_DOUBLE):
511 {
512 int res;
513
514 res = (estack_bx(stack, top)->u.d <= estack_ax(stack, top)->u.d);
515 estack_pop(stack, top, ax, bx);
516 estack_ax_v = res;
517 next_pc += sizeof(struct binary_op);
518 PO;
519 }
520
521 /* Mixed S64-double binary comparators */
522 OP(FILTER_OP_EQ_DOUBLE_S64):
523 {
524 int res;
525
526 res = (estack_bx(stack, top)->u.d == estack_ax_v);
527 estack_pop(stack, top, ax, bx);
528 estack_ax_v = res;
529 next_pc += sizeof(struct binary_op);
530 PO;
531 }
532 OP(FILTER_OP_NE_DOUBLE_S64):
533 {
534 int res;
535
536 res = (estack_bx(stack, top)->u.d != estack_ax_v);
537 estack_pop(stack, top, ax, bx);
538 estack_ax_v = res;
539 next_pc += sizeof(struct binary_op);
540 PO;
541 }
542 OP(FILTER_OP_GT_DOUBLE_S64):
543 {
544 int res;
545
546 res = (estack_bx(stack, top)->u.d > estack_ax_v);
547 estack_pop(stack, top, ax, bx);
548 estack_ax_v = res;
549 next_pc += sizeof(struct binary_op);
550 PO;
551 }
552 OP(FILTER_OP_LT_DOUBLE_S64):
553 {
554 int res;
555
556 res = (estack_bx(stack, top)->u.d < estack_ax_v);
557 estack_pop(stack, top, ax, bx);
558 estack_ax_v = res;
559 next_pc += sizeof(struct binary_op);
560 PO;
561 }
562 OP(FILTER_OP_GE_DOUBLE_S64):
563 {
564 int res;
565
566 res = (estack_bx(stack, top)->u.d >= estack_ax_v);
567 estack_pop(stack, top, ax, bx);
568 estack_ax_v = res;
569 next_pc += sizeof(struct binary_op);
570 PO;
571 }
572 OP(FILTER_OP_LE_DOUBLE_S64):
573 {
574 int res;
575
576 res = (estack_bx(stack, top)->u.d <= estack_ax_v);
577 estack_pop(stack, top, ax, bx);
578 estack_ax_v = res;
579 next_pc += sizeof(struct binary_op);
580 PO;
581 }
582
583 OP(FILTER_OP_EQ_S64_DOUBLE):
584 {
585 int res;
586
587 res = (estack_bx_v == estack_ax(stack, top)->u.d);
588 estack_pop(stack, top, ax, bx);
589 estack_ax_v = res;
590 next_pc += sizeof(struct binary_op);
591 PO;
592 }
593 OP(FILTER_OP_NE_S64_DOUBLE):
594 {
595 int res;
596
597 res = (estack_bx_v != estack_ax(stack, top)->u.d);
598 estack_pop(stack, top, ax, bx);
599 estack_ax_v = res;
600 next_pc += sizeof(struct binary_op);
601 PO;
602 }
603 OP(FILTER_OP_GT_S64_DOUBLE):
604 {
605 int res;
606
607 res = (estack_bx_v > estack_ax(stack, top)->u.d);
608 estack_pop(stack, top, ax, bx);
609 estack_ax_v = res;
610 next_pc += sizeof(struct binary_op);
611 PO;
612 }
613 OP(FILTER_OP_LT_S64_DOUBLE):
614 {
615 int res;
616
617 res = (estack_bx_v < estack_ax(stack, top)->u.d);
618 estack_pop(stack, top, ax, bx);
619 estack_ax_v = res;
620 next_pc += sizeof(struct binary_op);
621 PO;
622 }
623 OP(FILTER_OP_GE_S64_DOUBLE):
624 {
625 int res;
626
627 res = (estack_bx_v >= estack_ax(stack, top)->u.d);
628 estack_pop(stack, top, ax, bx);
629 estack_ax_v = res;
630 next_pc += sizeof(struct binary_op);
631 PO;
632 }
633 OP(FILTER_OP_LE_S64_DOUBLE):
634 {
635 int res;
636
637 res = (estack_bx_v <= estack_ax(stack, top)->u.d);
638 estack_pop(stack, top, ax, bx);
639 estack_ax_v = res;
640 next_pc += sizeof(struct binary_op);
641 PO;
642 }
643
644 /* unary */
645 OP(FILTER_OP_UNARY_PLUS):
646 OP(FILTER_OP_UNARY_MINUS):
647 OP(FILTER_OP_UNARY_NOT):
648 ERR("unsupported non-specialized bytecode op %u\n",
649 (unsigned int) *(filter_opcode_t *) pc);
650 ret = -EINVAL;
651 goto end;
652
653
654 OP(FILTER_OP_UNARY_PLUS_S64):
655 OP(FILTER_OP_UNARY_PLUS_DOUBLE):
656 {
657 next_pc += sizeof(struct unary_op);
658 PO;
659 }
660 OP(FILTER_OP_UNARY_MINUS_S64):
661 {
662 estack_ax_v = -estack_ax_v;
663 next_pc += sizeof(struct unary_op);
664 PO;
665 }
666 OP(FILTER_OP_UNARY_MINUS_DOUBLE):
667 {
668 estack_ax(stack, top)->u.d = -estack_ax(stack, top)->u.d;
669 next_pc += sizeof(struct unary_op);
670 PO;
671 }
672 OP(FILTER_OP_UNARY_NOT_S64):
673 {
674 estack_ax_v = !estack_ax_v;
675 next_pc += sizeof(struct unary_op);
676 PO;
677 }
678 OP(FILTER_OP_UNARY_NOT_DOUBLE):
679 {
680 estack_ax(stack, top)->u.d = !estack_ax(stack, top)->u.d;
681 next_pc += sizeof(struct unary_op);
682 PO;
683 }
684
685 /* logical */
686 OP(FILTER_OP_AND):
687 {
688 struct logical_op *insn = (struct logical_op *) pc;
689
690 /* If AX is 0, skip and evaluate to 0 */
691 if (unlikely(estack_ax_v == 0)) {
692 dbg_printf("Jumping to bytecode offset %u\n",
693 (unsigned int) insn->skip_offset);
694 next_pc = start_pc + insn->skip_offset;
695 } else {
696 /* Pop 1 when jump not taken */
697 estack_pop(stack, top, ax, bx);
698 next_pc += sizeof(struct logical_op);
699 }
700 PO;
701 }
702 OP(FILTER_OP_OR):
703 {
704 struct logical_op *insn = (struct logical_op *) pc;
705
706 /* If AX is nonzero, skip and evaluate to 1 */
707
708 if (unlikely(estack_ax_v != 0)) {
709 estack_ax_v = 1;
710 dbg_printf("Jumping to bytecode offset %u\n",
711 (unsigned int) insn->skip_offset);
712 next_pc = start_pc + insn->skip_offset;
713 } else {
714 /* Pop 1 when jump not taken */
715 estack_pop(stack, top, ax, bx);
716 next_pc += sizeof(struct logical_op);
717 }
718 PO;
719 }
720
721
722 /* load field ref */
723 OP(FILTER_OP_LOAD_FIELD_REF_STRING):
724 {
725 struct load_op *insn = (struct load_op *) pc;
726 struct field_ref *ref = (struct field_ref *) insn->data;
727
728 dbg_printf("load field ref offset %u type string\n",
729 ref->offset);
730 estack_push(stack, top, ax, bx);
731 estack_ax(stack, top)->u.s.str =
732 *(const char * const *) &filter_stack_data[ref->offset];
733 if (unlikely(!estack_ax(stack, top)->u.s.str)) {
734 dbg_printf("Filter warning: loading a NULL string.\n");
735 ret = -EINVAL;
736 goto end;
737 }
738 estack_ax(stack, top)->u.s.seq_len = UINT_MAX;
739 estack_ax(stack, top)->u.s.literal = 0;
740 dbg_printf("ref load string %s\n", estack_ax(stack, top)->u.s.str);
741 next_pc += sizeof(struct load_op) + sizeof(struct field_ref);
742 PO;
743 }
744
745 OP(FILTER_OP_LOAD_FIELD_REF_SEQUENCE):
746 {
747 struct load_op *insn = (struct load_op *) pc;
748 struct field_ref *ref = (struct field_ref *) insn->data;
749
750 dbg_printf("load field ref offset %u type sequence\n",
751 ref->offset);
752 estack_push(stack, top, ax, bx);
753 estack_ax(stack, top)->u.s.seq_len =
754 *(unsigned long *) &filter_stack_data[ref->offset];
755 estack_ax(stack, top)->u.s.str =
756 *(const char **) (&filter_stack_data[ref->offset
757 + sizeof(unsigned long)]);
758 if (unlikely(!estack_ax(stack, top)->u.s.str)) {
759 dbg_printf("Filter warning: loading a NULL sequence.\n");
760 ret = -EINVAL;
761 goto end;
762 }
763 estack_ax(stack, top)->u.s.literal = 0;
764 next_pc += sizeof(struct load_op) + sizeof(struct field_ref);
765 PO;
766 }
767
768 OP(FILTER_OP_LOAD_FIELD_REF_S64):
769 {
770 struct load_op *insn = (struct load_op *) pc;
771 struct field_ref *ref = (struct field_ref *) insn->data;
772
773 dbg_printf("load field ref offset %u type s64\n",
774 ref->offset);
775 estack_push(stack, top, ax, bx);
776 estack_ax_v =
777 ((struct literal_numeric *) &filter_stack_data[ref->offset])->v;
778 dbg_printf("ref load s64 %" PRIi64 "\n", estack_ax_v);
779 next_pc += sizeof(struct load_op) + sizeof(struct field_ref);
780 PO;
781 }
782
783 OP(FILTER_OP_LOAD_FIELD_REF_DOUBLE):
784 {
785 struct load_op *insn = (struct load_op *) pc;
786 struct field_ref *ref = (struct field_ref *) insn->data;
787
788 dbg_printf("load field ref offset %u type double\n",
789 ref->offset);
790 estack_push(stack, top, ax, bx);
791 memcpy(&estack_ax(stack, top)->u.d, &filter_stack_data[ref->offset],
792 sizeof(struct literal_double));
793 dbg_printf("ref load double %g\n", estack_ax(stack, top)->u.d);
794 next_pc += sizeof(struct load_op) + sizeof(struct field_ref);
795 PO;
796 }
797
798 /* load from immediate operand */
799 OP(FILTER_OP_LOAD_STRING):
800 {
801 struct load_op *insn = (struct load_op *) pc;
802
803 dbg_printf("load string %s\n", insn->data);
804 estack_push(stack, top, ax, bx);
805 estack_ax(stack, top)->u.s.str = insn->data;
806 estack_ax(stack, top)->u.s.seq_len = UINT_MAX;
807 estack_ax(stack, top)->u.s.literal = 1;
808 next_pc += sizeof(struct load_op) + strlen(insn->data) + 1;
809 PO;
810 }
811
812 OP(FILTER_OP_LOAD_S64):
813 {
814 struct load_op *insn = (struct load_op *) pc;
815
816 estack_push(stack, top, ax, bx);
817 estack_ax_v = ((struct literal_numeric *) insn->data)->v;
818 dbg_printf("load s64 %" PRIi64 "\n", estack_ax_v);
819 next_pc += sizeof(struct load_op)
820 + sizeof(struct literal_numeric);
821 PO;
822 }
823
824 OP(FILTER_OP_LOAD_DOUBLE):
825 {
826 struct load_op *insn = (struct load_op *) pc;
827
828 estack_push(stack, top, ax, bx);
829 memcpy(&estack_ax(stack, top)->u.d, insn->data,
830 sizeof(struct literal_double));
831 dbg_printf("load s64 %g\n", estack_ax(stack, top)->u.d);
832 next_pc += sizeof(struct load_op)
833 + sizeof(struct literal_double);
834 PO;
835 }
836
837 /* cast */
838 OP(FILTER_OP_CAST_TO_S64):
839 ERR("unsupported non-specialized bytecode op %u\n",
840 (unsigned int) *(filter_opcode_t *) pc);
841 ret = -EINVAL;
842 goto end;
843
844 OP(FILTER_OP_CAST_DOUBLE_TO_S64):
845 {
846 estack_ax_v = (int64_t) estack_ax(stack, top)->u.d;
847 next_pc += sizeof(struct cast_op);
848 PO;
849 }
850
851 OP(FILTER_OP_CAST_NOP):
852 {
853 next_pc += sizeof(struct cast_op);
854 PO;
855 }
856
857 /* get context ref */
858 OP(FILTER_OP_GET_CONTEXT_REF_STRING):
859 {
860 struct load_op *insn = (struct load_op *) pc;
861 struct field_ref *ref = (struct field_ref *) insn->data;
862 struct lttng_ctx_field *ctx_field;
863 union lttng_ctx_value v;
864
865 dbg_printf("get context ref offset %u type string\n",
866 ref->offset);
867 ctx_field = &ctx->fields[ref->offset];
868 ctx_field->get_value(ctx_field, &v);
869 estack_push(stack, top, ax, bx);
870 estack_ax(stack, top)->u.s.str = v.str;
871 if (unlikely(!estack_ax(stack, top)->u.s.str)) {
872 dbg_printf("Filter warning: loading a NULL string.\n");
873 ret = -EINVAL;
874 goto end;
875 }
876 estack_ax(stack, top)->u.s.seq_len = UINT_MAX;
877 estack_ax(stack, top)->u.s.literal = 0;
878 dbg_printf("ref get context string %s\n", estack_ax(stack, top)->u.s.str);
879 next_pc += sizeof(struct load_op) + sizeof(struct field_ref);
880 PO;
881 }
882
883 OP(FILTER_OP_GET_CONTEXT_REF_S64):
884 {
885 struct load_op *insn = (struct load_op *) pc;
886 struct field_ref *ref = (struct field_ref *) insn->data;
887 struct lttng_ctx_field *ctx_field;
888 union lttng_ctx_value v;
889
890 dbg_printf("get context ref offset %u type s64\n",
891 ref->offset);
892 ctx_field = &ctx->fields[ref->offset];
893 ctx_field->get_value(ctx_field, &v);
894 estack_push(stack, top, ax, bx);
895 estack_ax_v = v.s64;
896 dbg_printf("ref get context s64 %" PRIi64 "\n", estack_ax_v);
897 next_pc += sizeof(struct load_op) + sizeof(struct field_ref);
898 PO;
899 }
900
901 OP(FILTER_OP_GET_CONTEXT_REF_DOUBLE):
902 {
903 struct load_op *insn = (struct load_op *) pc;
904 struct field_ref *ref = (struct field_ref *) insn->data;
905 struct lttng_ctx_field *ctx_field;
906 union lttng_ctx_value v;
907
908 dbg_printf("get context ref offset %u type double\n",
909 ref->offset);
910 ctx_field = &ctx->fields[ref->offset];
911 ctx_field->get_value(ctx_field, &v);
912 estack_push(stack, top, ax, bx);
913 memcpy(&estack_ax(stack, top)->u.d, &v.d, sizeof(struct literal_double));
914 dbg_printf("ref get context double %g\n", estack_ax(stack, top)->u.d);
915 next_pc += sizeof(struct load_op) + sizeof(struct field_ref);
916 PO;
917 }
918
919 END_OP
920 end:
921 /* return 0 (discard) on error */
922 if (ret)
923 return 0;
924 return retval;
925 }
926
927 #undef START_OP
928 #undef OP
929 #undef PO
930 #undef END_OP
This page took 0.048861 seconds and 5 git commands to generate.