Lines Matching refs:__xlated
14 __xlated("4: r5 = 5")
15 __xlated("5: w0 = ")
16 __xlated("6: r0 = &(void __percpu *)(r0)")
17 __xlated("7: r0 = *(u32 *)(r0 +0)")
18 __xlated("8: exit")
64 __xlated("0: r1 = 1")
65 __xlated("1: call bpf_get_smp_processor_id")
66 __xlated("2: exit")
83 __xlated("1: r0 = &(void __percpu *)(r0)")
84 __xlated("...")
85 __xlated("3: exit")
100 __xlated("1: *(u64 *)(r10 -16) = r1")
101 __xlated("...")
102 __xlated("3: r0 = &(void __percpu *)(r0)")
103 __xlated("...")
104 __xlated("5: r2 = *(u64 *)(r10 -16)")
121 __xlated("1: *(u64 *)(r10 -16) = r6")
122 __xlated("...")
123 __xlated("3: r0 = &(void __percpu *)(r0)")
124 __xlated("...")
125 __xlated("5: r6 = *(u64 *)(r10 -16)")
142 __xlated("1: *(u64 *)(r10 -16) = r0")
143 __xlated("...")
144 __xlated("3: r0 = &(void __percpu *)(r0)")
145 __xlated("...")
146 __xlated("5: r0 = *(u64 *)(r10 -16)")
163 __xlated("2: *(u64 *)(r2 -16) = r1")
164 __xlated("...")
165 __xlated("4: r0 = &(void __percpu *)(r0)")
166 __xlated("...")
167 __xlated("6: r1 = *(u64 *)(r10 -16)")
185 __xlated("1: *(u64 *)(r10 -16) = r1")
186 __xlated("...")
187 __xlated("3: r0 = &(void __percpu *)(r0)")
188 __xlated("...")
189 __xlated("5: r2 = 1")
207 __xlated("2: *(u64 *)(r10 -16) = r1")
208 __xlated("...")
209 __xlated("4: r0 = &(void __percpu *)(r0)")
210 __xlated("...")
211 __xlated("6: r1 = *(u64 *)(r10 -8)")
229 __xlated("1: *(u32 *)(r10 -4) = r1")
230 __xlated("...")
231 __xlated("3: r0 = &(void __percpu *)(r0)")
232 __xlated("...")
233 __xlated("5: r1 = *(u32 *)(r10 -4)")
250 __xlated("1: *(u32 *)(r10 -16) = r1")
251 __xlated("...")
252 __xlated("3: r0 = &(void __percpu *)(r0)")
253 __xlated("...")
254 __xlated("5: r1 = *(u32 *)(r10 -16)")
271 __xlated("2: *(u32 *)(r10 -8) = r1")
272 __xlated("...")
273 __xlated("4: r0 = &(void __percpu *)(r0)")
274 __xlated("...")
275 __xlated("6: r1 = *(u32 *)(r10 -8)")
295 __xlated("0: r1 = 1")
296 __xlated("1: r2 = 2")
298 __xlated("2: *(u64 *)(r10 -8) = r1")
299 __xlated("3: *(u64 *)(r10 -16) = r2")
300 __xlated("...")
301 __xlated("5: r0 = &(void __percpu *)(r0)")
302 __xlated("...")
303 __xlated("7: r2 = *(u64 *)(r10 -16)")
304 __xlated("8: r1 = *(u64 *)(r10 -8)")
306 __xlated("...")
307 __xlated("10: r0 = &(void __percpu *)(r0)")
308 __xlated("...")
309 __xlated("12: exit")
336 __xlated("1: *(u64 *)(r10 -8) = r1")
337 __xlated("...")
338 __xlated("3: r0 = &(void __percpu *)(r0)")
339 __xlated("...")
340 __xlated("5: r1 = *(u64 *)(r10 -8)")
360 __xlated("1: *(u64 *)(r10 -8) = r1")
361 __xlated("...")
362 __xlated("3: r0 = &(void __percpu *)(r0)")
363 __xlated("...")
364 __xlated("5: r1 = *(u64 *)(r10 -8)")
384 __xlated("6: *(u64 *)(r10 -16) = r1")
385 __xlated("...")
386 __xlated("8: r0 = &(void __percpu *)(r0)")
387 __xlated("...")
388 __xlated("10: r1 = *(u64 *)(r10 -16)")
413 __xlated("6: *(u64 *)(r10 -16) = r1")
414 __xlated("...")
415 __xlated("8: r0 = &(void __percpu *)(r0)")
416 __xlated("...")
417 __xlated("10: r1 = *(u64 *)(r10 -16)")
442 __xlated("1: *(u64 *)(r10 -8) = r1")
443 __xlated("...")
444 __xlated("3: r0 = &(void __percpu *)(r0)")
445 __xlated("...")
446 __xlated("5: r1 = *(u64 *)(r10 -8)")
476 __xlated("1: *(u64 *)(r10 -8) = r1")
477 __xlated("...")
478 __xlated("3: r0 = &(void __percpu *)(r0)")
479 __xlated("...")
480 __xlated("5: r1 = *(u64 *)(r10 -8)")
506 __xlated("1: *(u64 *)(r10 -8) = r1")
507 __xlated("...")
508 __xlated("3: r0 = &(void __percpu *)(r0)")
509 __xlated("...")
510 __xlated("5: r1 = *(u64 *)(r10 -8)")
511 __xlated("...")
512 __xlated("9: call pc+1")
513 __xlated("...")
514 __xlated("10: exit")
516 __xlated("11: r1 = 1")
517 __xlated("...")
518 __xlated("13: r0 = &(void __percpu *)(r0)")
519 __xlated("...")
520 __xlated("15: exit")
556 __xlated("0: r1 = 1")
557 __xlated("...")
558 __xlated("2: r0 = &(void __percpu *)(r0)")
559 __xlated("...")
560 __xlated("4: call pc+1")
561 __xlated("5: exit")
563 __xlated("6: r1 = 1")
564 __xlated("...")
565 __xlated("8: r0 = &(void __percpu *)(r0)")
566 __xlated("...")
567 __xlated("10: *(u64 *)(r10 -16) = r1")
568 __xlated("11: exit")
602 __xlated("2: r0 = &(void __percpu *)(r0)")
625 __xlated("0: *(u64 *)(r10 -16) =")
626 __xlated("1: r1 = 1")
627 __xlated("...")
628 __xlated("3: r0 = &(void __percpu *)(r0)")
629 __xlated("...")
631 __xlated("5: r11 = *(u64 *)(r10 -16)")
632 __xlated("6: if r11 == 0x0 goto pc+3")
633 __xlated("7: r11 -= 1")
634 __xlated("8: *(u64 *)(r10 -16) = r11")
636 __xlated("9: *(u64 *)(r10 -8) = r1")
637 __xlated("10: exit")
668 __xlated("2: r1 = 1")
669 __xlated("3: w0 =")
670 __xlated("4: r0 = &(void __percpu *)(r0)")
671 __xlated("5: r0 = *(u32 *)(r0 +0)")
673 __xlated("6: r2 =")
674 __xlated("7: r3 = 0")
675 __xlated("8: r4 = 0")
676 __xlated("...")
678 __xlated("12: *(u64 *)(r10 -32) = r6")
679 __xlated("13: *(u64 *)(r10 -24) = r7")
680 __xlated("14: *(u64 *)(r10 -16) = r8")
681 __xlated("...")
682 __xlated("21: call pc+8") /* dummy_loop_callback */
684 __xlated("...")
685 __xlated("28: r0 = 0")
686 __xlated("29: exit")
688 __xlated("30: r0 = 0")
689 __xlated("31: exit")
717 __xlated("2: r1 = 42")
718 __xlated("3: w0 =")
719 __xlated("4: r0 = &(void __percpu *)(r0)")
720 __xlated("5: r0 = *(u32 *)(r0 +0)")
722 __xlated("6: *(u64 *)(r10 -16) = r1")
723 __xlated("7: call")
724 __xlated("8: r1 = *(u64 *)(r10 -16)")
725 __xlated("...")
727 __xlated("15: *(u64 *)(r10 -40) = r6")
728 __xlated("16: *(u64 *)(r10 -32) = r7")
729 __xlated("17: *(u64 *)(r10 -24) = r8")
762 __xlated("r0 = &(void __percpu *)(r0)")
794 __xlated("0: r2 = 1")
796 __xlated("1: r0 = r1")
797 __xlated("2: r0 = r2")
798 __xlated("3: exit")
815 __xlated("3: r3 = 1")
817 __xlated("4: r0 = r1")
818 __xlated("5: r0 = r3")