1 // SPDX-License-Identifier: GPL-2.0-or-later
2 /*
3 * Copyright (C) 2015 Josh Poimboeuf <jpoimboe@redhat.com>
4 */
5
6 #include <stdio.h>
7 #include <stdlib.h>
8
9 #define unlikely(cond) (cond)
10 #include <asm/insn.h>
11 #include "../../../arch/x86/lib/inat.c"
12 #include "../../../arch/x86/lib/insn.c"
13
14 #define CONFIG_64BIT 1
15 #include <asm/nops.h>
16
17 #include <asm/orc_types.h>
18 #include <objtool/check.h>
19 #include <objtool/elf.h>
20 #include <objtool/arch.h>
21 #include <objtool/warn.h>
22 #include <objtool/endianness.h>
23 #include <objtool/builtin.h>
24 #include <arch/elf.h>
25
arch_ftrace_match(char * name)26 int arch_ftrace_match(char *name)
27 {
28 return !strcmp(name, "__fentry__");
29 }
30
is_x86_64(const struct elf * elf)31 static int is_x86_64(const struct elf *elf)
32 {
33 switch (elf->ehdr.e_machine) {
34 case EM_X86_64:
35 return 1;
36 case EM_386:
37 return 0;
38 default:
39 WARN("unexpected ELF machine type %d", elf->ehdr.e_machine);
40 return -1;
41 }
42 }
43
arch_callee_saved_reg(unsigned char reg)44 bool arch_callee_saved_reg(unsigned char reg)
45 {
46 switch (reg) {
47 case CFI_BP:
48 case CFI_BX:
49 case CFI_R12:
50 case CFI_R13:
51 case CFI_R14:
52 case CFI_R15:
53 return true;
54
55 case CFI_AX:
56 case CFI_CX:
57 case CFI_DX:
58 case CFI_SI:
59 case CFI_DI:
60 case CFI_SP:
61 case CFI_R8:
62 case CFI_R9:
63 case CFI_R10:
64 case CFI_R11:
65 case CFI_RA:
66 default:
67 return false;
68 }
69 }
70
arch_dest_reloc_offset(int addend)71 unsigned long arch_dest_reloc_offset(int addend)
72 {
73 return addend + 4;
74 }
75
arch_jump_destination(struct instruction * insn)76 unsigned long arch_jump_destination(struct instruction *insn)
77 {
78 return insn->offset + insn->len + insn->immediate;
79 }
80
arch_pc_relative_reloc(struct reloc * reloc)81 bool arch_pc_relative_reloc(struct reloc *reloc)
82 {
83 /*
84 * All relocation types where P (the address of the target)
85 * is included in the computation.
86 */
87 switch (reloc_type(reloc)) {
88 case R_X86_64_PC8:
89 case R_X86_64_PC16:
90 case R_X86_64_PC32:
91 case R_X86_64_PC64:
92
93 case R_X86_64_PLT32:
94 case R_X86_64_GOTPC32:
95 case R_X86_64_GOTPCREL:
96 return true;
97
98 default:
99 break;
100 }
101
102 return false;
103 }
104
105 #define ADD_OP(op) \
106 if (!(op = calloc(1, sizeof(*op)))) \
107 return -1; \
108 else for (*ops_list = op, ops_list = &op->next; op; op = NULL)
109
110 /*
111 * Helpers to decode ModRM/SIB:
112 *
113 * r/m| AX CX DX BX | SP | BP | SI DI |
114 * | R8 R9 R10 R11 | R12 | R13 | R14 R15 |
115 * Mod+----------------+-----+-----+---------+
116 * 00 | [r/m] |[SIB]|[IP+]| [r/m] |
117 * 01 | [r/m + d8] |[S+d]| [r/m + d8] |
118 * 10 | [r/m + d32] |[S+D]| [r/m + d32] |
119 * 11 | r/ m |
120 */
121
122 #define mod_is_mem() (modrm_mod != 3)
123 #define mod_is_reg() (modrm_mod == 3)
124
125 #define is_RIP() ((modrm_rm & 7) == CFI_BP && modrm_mod == 0)
126 #define have_SIB() ((modrm_rm & 7) == CFI_SP && mod_is_mem())
127
128 /*
129 * Check the ModRM register. If there is a SIB byte then check with
130 * the SIB base register. But if the SIB base is 5 (i.e. CFI_BP) and
131 * ModRM mod is 0 then there is no base register.
132 */
133 #define rm_is(reg) (have_SIB() ? \
134 sib_base == (reg) && sib_index == CFI_SP && \
135 (sib_base != CFI_BP || modrm_mod != 0) : \
136 modrm_rm == (reg))
137
138 #define rm_is_mem(reg) (mod_is_mem() && !is_RIP() && rm_is(reg))
139 #define rm_is_reg(reg) (mod_is_reg() && modrm_rm == (reg))
140
has_notrack_prefix(struct insn * insn)141 static bool has_notrack_prefix(struct insn *insn)
142 {
143 int i;
144
145 for (i = 0; i < insn->prefixes.nbytes; i++) {
146 if (insn->prefixes.bytes[i] == 0x3e)
147 return true;
148 }
149
150 return false;
151 }
152
arch_decode_instruction(struct objtool_file * file,const struct section * sec,unsigned long offset,unsigned int maxlen,struct instruction * insn)153 int arch_decode_instruction(struct objtool_file *file, const struct section *sec,
154 unsigned long offset, unsigned int maxlen,
155 struct instruction *insn)
156 {
157 struct stack_op **ops_list = &insn->stack_ops;
158 const struct elf *elf = file->elf;
159 struct insn ins;
160 int x86_64, ret;
161 unsigned char op1, op2, op3, prefix,
162 rex = 0, rex_b = 0, rex_r = 0, rex_w = 0, rex_x = 0,
163 modrm = 0, modrm_mod = 0, modrm_rm = 0, modrm_reg = 0,
164 sib = 0, /* sib_scale = 0, */ sib_index = 0, sib_base = 0;
165 struct stack_op *op = NULL;
166 struct symbol *sym;
167 u64 imm;
168
169 x86_64 = is_x86_64(elf);
170 if (x86_64 == -1)
171 return -1;
172
173 ret = insn_decode(&ins, sec->data->d_buf + offset, maxlen,
174 x86_64 ? INSN_MODE_64 : INSN_MODE_32);
175 if (ret < 0) {
176 WARN("can't decode instruction at %s:0x%lx", sec->name, offset);
177 return -1;
178 }
179
180 insn->len = ins.length;
181 insn->type = INSN_OTHER;
182
183 if (ins.vex_prefix.nbytes)
184 return 0;
185
186 prefix = ins.prefixes.bytes[0];
187
188 op1 = ins.opcode.bytes[0];
189 op2 = ins.opcode.bytes[1];
190 op3 = ins.opcode.bytes[2];
191
192 if (ins.rex_prefix.nbytes) {
193 rex = ins.rex_prefix.bytes[0];
194 rex_w = X86_REX_W(rex) >> 3;
195 rex_r = X86_REX_R(rex) >> 2;
196 rex_x = X86_REX_X(rex) >> 1;
197 rex_b = X86_REX_B(rex);
198 }
199
200 if (ins.modrm.nbytes) {
201 modrm = ins.modrm.bytes[0];
202 modrm_mod = X86_MODRM_MOD(modrm);
203 modrm_reg = X86_MODRM_REG(modrm) + 8*rex_r;
204 modrm_rm = X86_MODRM_RM(modrm) + 8*rex_b;
205 }
206
207 if (ins.sib.nbytes) {
208 sib = ins.sib.bytes[0];
209 /* sib_scale = X86_SIB_SCALE(sib); */
210 sib_index = X86_SIB_INDEX(sib) + 8*rex_x;
211 sib_base = X86_SIB_BASE(sib) + 8*rex_b;
212 }
213
214 switch (op1) {
215
216 case 0x1:
217 case 0x29:
218 if (rex_w && rm_is_reg(CFI_SP)) {
219
220 /* add/sub reg, %rsp */
221 ADD_OP(op) {
222 op->src.type = OP_SRC_ADD;
223 op->src.reg = modrm_reg;
224 op->dest.type = OP_DEST_REG;
225 op->dest.reg = CFI_SP;
226 }
227 }
228 break;
229
230 case 0x50 ... 0x57:
231
232 /* push reg */
233 ADD_OP(op) {
234 op->src.type = OP_SRC_REG;
235 op->src.reg = (op1 & 0x7) + 8*rex_b;
236 op->dest.type = OP_DEST_PUSH;
237 }
238
239 break;
240
241 case 0x58 ... 0x5f:
242
243 /* pop reg */
244 ADD_OP(op) {
245 op->src.type = OP_SRC_POP;
246 op->dest.type = OP_DEST_REG;
247 op->dest.reg = (op1 & 0x7) + 8*rex_b;
248 }
249
250 break;
251
252 case 0x68:
253 case 0x6a:
254 /* push immediate */
255 ADD_OP(op) {
256 op->src.type = OP_SRC_CONST;
257 op->dest.type = OP_DEST_PUSH;
258 }
259 break;
260
261 case 0x70 ... 0x7f:
262 insn->type = INSN_JUMP_CONDITIONAL;
263 break;
264
265 case 0x80 ... 0x83:
266 /*
267 * 1000 00sw : mod OP r/m : immediate
268 *
269 * s - sign extend immediate
270 * w - imm8 / imm32
271 *
272 * OP: 000 ADD 100 AND
273 * 001 OR 101 SUB
274 * 010 ADC 110 XOR
275 * 011 SBB 111 CMP
276 */
277
278 /* 64bit only */
279 if (!rex_w)
280 break;
281
282 /* %rsp target only */
283 if (!rm_is_reg(CFI_SP))
284 break;
285
286 imm = ins.immediate.value;
287 if (op1 & 2) { /* sign extend */
288 if (op1 & 1) { /* imm32 */
289 imm <<= 32;
290 imm = (s64)imm >> 32;
291 } else { /* imm8 */
292 imm <<= 56;
293 imm = (s64)imm >> 56;
294 }
295 }
296
297 switch (modrm_reg & 7) {
298 case 5:
299 imm = -imm;
300 fallthrough;
301 case 0:
302 /* add/sub imm, %rsp */
303 ADD_OP(op) {
304 op->src.type = OP_SRC_ADD;
305 op->src.reg = CFI_SP;
306 op->src.offset = imm;
307 op->dest.type = OP_DEST_REG;
308 op->dest.reg = CFI_SP;
309 }
310 break;
311
312 case 4:
313 /* and imm, %rsp */
314 ADD_OP(op) {
315 op->src.type = OP_SRC_AND;
316 op->src.reg = CFI_SP;
317 op->src.offset = ins.immediate.value;
318 op->dest.type = OP_DEST_REG;
319 op->dest.reg = CFI_SP;
320 }
321 break;
322
323 default:
324 /* WARN ? */
325 break;
326 }
327
328 break;
329
330 case 0x89:
331 if (!rex_w)
332 break;
333
334 if (modrm_reg == CFI_SP) {
335
336 if (mod_is_reg()) {
337 /* mov %rsp, reg */
338 ADD_OP(op) {
339 op->src.type = OP_SRC_REG;
340 op->src.reg = CFI_SP;
341 op->dest.type = OP_DEST_REG;
342 op->dest.reg = modrm_rm;
343 }
344 break;
345
346 } else {
347 /* skip RIP relative displacement */
348 if (is_RIP())
349 break;
350
351 /* skip nontrivial SIB */
352 if (have_SIB()) {
353 modrm_rm = sib_base;
354 if (sib_index != CFI_SP)
355 break;
356 }
357
358 /* mov %rsp, disp(%reg) */
359 ADD_OP(op) {
360 op->src.type = OP_SRC_REG;
361 op->src.reg = CFI_SP;
362 op->dest.type = OP_DEST_REG_INDIRECT;
363 op->dest.reg = modrm_rm;
364 op->dest.offset = ins.displacement.value;
365 }
366 break;
367 }
368
369 break;
370 }
371
372 if (rm_is_reg(CFI_SP)) {
373
374 /* mov reg, %rsp */
375 ADD_OP(op) {
376 op->src.type = OP_SRC_REG;
377 op->src.reg = modrm_reg;
378 op->dest.type = OP_DEST_REG;
379 op->dest.reg = CFI_SP;
380 }
381 break;
382 }
383
384 fallthrough;
385 case 0x88:
386 if (!rex_w)
387 break;
388
389 if (rm_is_mem(CFI_BP)) {
390
391 /* mov reg, disp(%rbp) */
392 ADD_OP(op) {
393 op->src.type = OP_SRC_REG;
394 op->src.reg = modrm_reg;
395 op->dest.type = OP_DEST_REG_INDIRECT;
396 op->dest.reg = CFI_BP;
397 op->dest.offset = ins.displacement.value;
398 }
399 break;
400 }
401
402 if (rm_is_mem(CFI_SP)) {
403
404 /* mov reg, disp(%rsp) */
405 ADD_OP(op) {
406 op->src.type = OP_SRC_REG;
407 op->src.reg = modrm_reg;
408 op->dest.type = OP_DEST_REG_INDIRECT;
409 op->dest.reg = CFI_SP;
410 op->dest.offset = ins.displacement.value;
411 }
412 break;
413 }
414
415 break;
416
417 case 0x8b:
418 if (!rex_w)
419 break;
420
421 if (rm_is_mem(CFI_BP)) {
422
423 /* mov disp(%rbp), reg */
424 ADD_OP(op) {
425 op->src.type = OP_SRC_REG_INDIRECT;
426 op->src.reg = CFI_BP;
427 op->src.offset = ins.displacement.value;
428 op->dest.type = OP_DEST_REG;
429 op->dest.reg = modrm_reg;
430 }
431 break;
432 }
433
434 if (rm_is_mem(CFI_SP)) {
435
436 /* mov disp(%rsp), reg */
437 ADD_OP(op) {
438 op->src.type = OP_SRC_REG_INDIRECT;
439 op->src.reg = CFI_SP;
440 op->src.offset = ins.displacement.value;
441 op->dest.type = OP_DEST_REG;
442 op->dest.reg = modrm_reg;
443 }
444 break;
445 }
446
447 break;
448
449 case 0x8d:
450 if (mod_is_reg()) {
451 WARN("invalid LEA encoding at %s:0x%lx", sec->name, offset);
452 break;
453 }
454
455 /* skip non 64bit ops */
456 if (!rex_w)
457 break;
458
459 /* skip RIP relative displacement */
460 if (is_RIP())
461 break;
462
463 /* skip nontrivial SIB */
464 if (have_SIB()) {
465 modrm_rm = sib_base;
466 if (sib_index != CFI_SP)
467 break;
468 }
469
470 /* lea disp(%src), %dst */
471 ADD_OP(op) {
472 op->src.offset = ins.displacement.value;
473 if (!op->src.offset) {
474 /* lea (%src), %dst */
475 op->src.type = OP_SRC_REG;
476 } else {
477 /* lea disp(%src), %dst */
478 op->src.type = OP_SRC_ADD;
479 }
480 op->src.reg = modrm_rm;
481 op->dest.type = OP_DEST_REG;
482 op->dest.reg = modrm_reg;
483 }
484 break;
485
486 case 0x8f:
487 /* pop to mem */
488 ADD_OP(op) {
489 op->src.type = OP_SRC_POP;
490 op->dest.type = OP_DEST_MEM;
491 }
492 break;
493
494 case 0x90:
495 insn->type = INSN_NOP;
496 break;
497
498 case 0x9c:
499 /* pushf */
500 ADD_OP(op) {
501 op->src.type = OP_SRC_CONST;
502 op->dest.type = OP_DEST_PUSHF;
503 }
504 break;
505
506 case 0x9d:
507 /* popf */
508 ADD_OP(op) {
509 op->src.type = OP_SRC_POPF;
510 op->dest.type = OP_DEST_MEM;
511 }
512 break;
513
514 case 0x0f:
515
516 if (op2 == 0x01) {
517
518 switch (insn_last_prefix_id(&ins)) {
519 case INAT_PFX_REPE:
520 case INAT_PFX_REPNE:
521 if (modrm == 0xca)
522 /* eretu/erets */
523 insn->type = INSN_CONTEXT_SWITCH;
524 break;
525 default:
526 if (modrm == 0xca)
527 insn->type = INSN_CLAC;
528 else if (modrm == 0xcb)
529 insn->type = INSN_STAC;
530 break;
531 }
532 } else if (op2 >= 0x80 && op2 <= 0x8f) {
533
534 insn->type = INSN_JUMP_CONDITIONAL;
535
536 } else if (op2 == 0x05 || op2 == 0x07 || op2 == 0x34 ||
537 op2 == 0x35) {
538
539 /* sysenter, sysret */
540 insn->type = INSN_CONTEXT_SWITCH;
541
542 } else if (op2 == 0x0b || op2 == 0xb9) {
543
544 /* ud2 */
545 insn->type = INSN_BUG;
546
547 } else if (op2 == 0x0d || op2 == 0x1f) {
548
549 /* nopl/nopw */
550 insn->type = INSN_NOP;
551
552 } else if (op2 == 0x1e) {
553
554 if (prefix == 0xf3 && (modrm == 0xfa || modrm == 0xfb))
555 insn->type = INSN_ENDBR;
556
557
558 } else if (op2 == 0x38 && op3 == 0xf8) {
559 if (ins.prefixes.nbytes == 1 &&
560 ins.prefixes.bytes[0] == 0xf2) {
561 /* ENQCMD cannot be used in the kernel. */
562 WARN("ENQCMD instruction at %s:%lx", sec->name,
563 offset);
564 }
565
566 } else if (op2 == 0xa0 || op2 == 0xa8) {
567
568 /* push fs/gs */
569 ADD_OP(op) {
570 op->src.type = OP_SRC_CONST;
571 op->dest.type = OP_DEST_PUSH;
572 }
573
574 } else if (op2 == 0xa1 || op2 == 0xa9) {
575
576 /* pop fs/gs */
577 ADD_OP(op) {
578 op->src.type = OP_SRC_POP;
579 op->dest.type = OP_DEST_MEM;
580 }
581 }
582
583 break;
584
585 case 0xc9:
586 /*
587 * leave
588 *
589 * equivalent to:
590 * mov bp, sp
591 * pop bp
592 */
593 ADD_OP(op) {
594 op->src.type = OP_SRC_REG;
595 op->src.reg = CFI_BP;
596 op->dest.type = OP_DEST_REG;
597 op->dest.reg = CFI_SP;
598 }
599 ADD_OP(op) {
600 op->src.type = OP_SRC_POP;
601 op->dest.type = OP_DEST_REG;
602 op->dest.reg = CFI_BP;
603 }
604 break;
605
606 case 0xcc:
607 /* int3 */
608 insn->type = INSN_TRAP;
609 break;
610
611 case 0xe3:
612 /* jecxz/jrcxz */
613 insn->type = INSN_JUMP_CONDITIONAL;
614 break;
615
616 case 0xe9:
617 case 0xeb:
618 insn->type = INSN_JUMP_UNCONDITIONAL;
619 break;
620
621 case 0xc2:
622 case 0xc3:
623 insn->type = INSN_RETURN;
624 break;
625
626 case 0xc7: /* mov imm, r/m */
627 if (!opts.noinstr)
628 break;
629
630 if (ins.length == 3+4+4 && !strncmp(sec->name, ".init.text", 10)) {
631 struct reloc *immr, *disp;
632 struct symbol *func;
633 int idx;
634
635 immr = find_reloc_by_dest(elf, (void *)sec, offset+3);
636 disp = find_reloc_by_dest(elf, (void *)sec, offset+7);
637
638 if (!immr || strcmp(immr->sym->name, "pv_ops"))
639 break;
640
641 idx = (reloc_addend(immr) + 8) / sizeof(void *);
642
643 func = disp->sym;
644 if (disp->sym->type == STT_SECTION)
645 func = find_symbol_by_offset(disp->sym->sec, reloc_addend(disp));
646 if (!func) {
647 WARN("no func for pv_ops[]");
648 return -1;
649 }
650
651 objtool_pv_add(file, idx, func);
652 }
653
654 break;
655
656 case 0xcf: /* iret */
657 /*
658 * Handle sync_core(), which has an IRET to self.
659 * All other IRET are in STT_NONE entry code.
660 */
661 sym = find_symbol_containing(sec, offset);
662 if (sym && sym->type == STT_FUNC) {
663 ADD_OP(op) {
664 /* add $40, %rsp */
665 op->src.type = OP_SRC_ADD;
666 op->src.reg = CFI_SP;
667 op->src.offset = 5*8;
668 op->dest.type = OP_DEST_REG;
669 op->dest.reg = CFI_SP;
670 }
671 break;
672 }
673
674 fallthrough;
675
676 case 0xca: /* retf */
677 case 0xcb: /* retf */
678 insn->type = INSN_CONTEXT_SWITCH;
679 break;
680
681 case 0xe0: /* loopne */
682 case 0xe1: /* loope */
683 case 0xe2: /* loop */
684 insn->type = INSN_JUMP_CONDITIONAL;
685 break;
686
687 case 0xe8:
688 insn->type = INSN_CALL;
689 /*
690 * For the impact on the stack, a CALL behaves like
691 * a PUSH of an immediate value (the return address).
692 */
693 ADD_OP(op) {
694 op->src.type = OP_SRC_CONST;
695 op->dest.type = OP_DEST_PUSH;
696 }
697 break;
698
699 case 0xfc:
700 insn->type = INSN_CLD;
701 break;
702
703 case 0xfd:
704 insn->type = INSN_STD;
705 break;
706
707 case 0xff:
708 if (modrm_reg == 2 || modrm_reg == 3) {
709
710 insn->type = INSN_CALL_DYNAMIC;
711 if (has_notrack_prefix(&ins))
712 WARN("notrack prefix found at %s:0x%lx", sec->name, offset);
713
714 } else if (modrm_reg == 4) {
715
716 insn->type = INSN_JUMP_DYNAMIC;
717 if (has_notrack_prefix(&ins))
718 WARN("notrack prefix found at %s:0x%lx", sec->name, offset);
719
720 } else if (modrm_reg == 5) {
721
722 /* jmpf */
723 insn->type = INSN_CONTEXT_SWITCH;
724
725 } else if (modrm_reg == 6) {
726
727 /* push from mem */
728 ADD_OP(op) {
729 op->src.type = OP_SRC_CONST;
730 op->dest.type = OP_DEST_PUSH;
731 }
732 }
733
734 break;
735
736 default:
737 break;
738 }
739
740 insn->immediate = ins.immediate.nbytes ? ins.immediate.value : 0;
741
742 return 0;
743 }
744
arch_initial_func_cfi_state(struct cfi_init_state * state)745 void arch_initial_func_cfi_state(struct cfi_init_state *state)
746 {
747 int i;
748
749 for (i = 0; i < CFI_NUM_REGS; i++) {
750 state->regs[i].base = CFI_UNDEFINED;
751 state->regs[i].offset = 0;
752 }
753
754 /* initial CFA (call frame address) */
755 state->cfa.base = CFI_SP;
756 state->cfa.offset = 8;
757
758 /* initial RA (return address) */
759 state->regs[CFI_RA].base = CFI_CFA;
760 state->regs[CFI_RA].offset = -8;
761 }
762
arch_nop_insn(int len)763 const char *arch_nop_insn(int len)
764 {
765 static const char nops[5][5] = {
766 { BYTES_NOP1 },
767 { BYTES_NOP2 },
768 { BYTES_NOP3 },
769 { BYTES_NOP4 },
770 { BYTES_NOP5 },
771 };
772
773 if (len < 1 || len > 5) {
774 WARN("invalid NOP size: %d\n", len);
775 return NULL;
776 }
777
778 return nops[len-1];
779 }
780
781 #define BYTE_RET 0xC3
782
arch_ret_insn(int len)783 const char *arch_ret_insn(int len)
784 {
785 static const char ret[5][5] = {
786 { BYTE_RET },
787 { BYTE_RET, 0xcc },
788 { BYTE_RET, 0xcc, BYTES_NOP1 },
789 { BYTE_RET, 0xcc, BYTES_NOP2 },
790 { BYTE_RET, 0xcc, BYTES_NOP3 },
791 };
792
793 if (len < 1 || len > 5) {
794 WARN("invalid RET size: %d\n", len);
795 return NULL;
796 }
797
798 return ret[len-1];
799 }
800
arch_decode_hint_reg(u8 sp_reg,int * base)801 int arch_decode_hint_reg(u8 sp_reg, int *base)
802 {
803 switch (sp_reg) {
804 case ORC_REG_UNDEFINED:
805 *base = CFI_UNDEFINED;
806 break;
807 case ORC_REG_SP:
808 *base = CFI_SP;
809 break;
810 case ORC_REG_BP:
811 *base = CFI_BP;
812 break;
813 case ORC_REG_SP_INDIRECT:
814 *base = CFI_SP_INDIRECT;
815 break;
816 case ORC_REG_R10:
817 *base = CFI_R10;
818 break;
819 case ORC_REG_R13:
820 *base = CFI_R13;
821 break;
822 case ORC_REG_DI:
823 *base = CFI_DI;
824 break;
825 case ORC_REG_DX:
826 *base = CFI_DX;
827 break;
828 default:
829 return -1;
830 }
831
832 return 0;
833 }
834
arch_is_retpoline(struct symbol * sym)835 bool arch_is_retpoline(struct symbol *sym)
836 {
837 return !strncmp(sym->name, "__x86_indirect_", 15);
838 }
839
arch_is_rethunk(struct symbol * sym)840 bool arch_is_rethunk(struct symbol *sym)
841 {
842 return !strcmp(sym->name, "__x86_return_thunk");
843 }
844
arch_is_embedded_insn(struct symbol * sym)845 bool arch_is_embedded_insn(struct symbol *sym)
846 {
847 return !strcmp(sym->name, "retbleed_return_thunk") ||
848 !strcmp(sym->name, "srso_safe_ret");
849 }
850