Lines Matching full:r2
15 r2 = *(u32*)(r1 + %[__sk_buff_data]); \ in __retval()
16 r0 -= r2; \ in __retval()
30 r2 = *(u32*)(r1 + %[__sk_buff_data]); \ in direct_packet_access_test1()
32 r0 = r2; \ in direct_packet_access_test1()
35 r0 = *(u8*)(r2 + 0); \ in direct_packet_access_test1()
61 r2 = *(u32*)(r1 + %[__sk_buff_len]); \ in direct_packet_access_test2()
62 r2 <<= 49; \ in direct_packet_access_test2()
63 r2 >>= 49; \ in direct_packet_access_test2()
64 r3 += r2; \ in direct_packet_access_test2()
65 r2 = r3; \ in direct_packet_access_test2()
66 r2 += 8; \ in direct_packet_access_test2()
68 if r2 > r1 goto l1_%=; \ in direct_packet_access_test2()
86 r2 = *(u32*)(r1 + %[__sk_buff_data]); \ in direct_packet_access_test3()
100 r2 = *(u32*)(r1 + %[__sk_buff_data]); \ in direct_packet_access_test4_write()
102 r0 = r2; \ in direct_packet_access_test4_write()
105 *(u8*)(r2 + 0) = r2; \ in direct_packet_access_test4_write()
120 r2 = *(u32*)(r1 + %[__sk_buff_data]); \ in pkt_end_reg_good_access()
122 r0 = r2; \ in pkt_end_reg_good_access()
127 l0_%=: r0 = *(u8*)(r2 + 0); \ in pkt_end_reg_good_access()
142 r2 = *(u32*)(r1 + %[__sk_buff_data]); \ in pkt_end_reg_bad_access()
144 r0 = r2; \ in pkt_end_reg_bad_access()
147 r0 = *(u8*)(r2 + 0); \ in pkt_end_reg_bad_access()
164 r2 = *(u32*)(r1 + %[__sk_buff_data]); \ in pkt_end_reg_both_accesses()
166 r0 = r2; \ in pkt_end_reg_both_accesses()
169 r0 = *(u8*)(r2 + 0); \ in pkt_end_reg_both_accesses()
172 l0_%=: r0 = *(u8*)(r2 + 0); \ in pkt_end_reg_both_accesses()
187 r2 = *(u32*)(r1 + %[__sk_buff_data]); \ in test8_double_test_variant_1()
189 r0 = r2; \ in test8_double_test_variant_1()
193 r0 = *(u8*)(r2 + 0); \ in test8_double_test_variant_1()
196 l0_%=: r0 = *(u8*)(r2 + 0); \ in test8_double_test_variant_1()
211 r2 = *(u32*)(r1 + %[__sk_buff_data]); \ in test9_double_test_variant_2()
213 r0 = r2; \ in test9_double_test_variant_2()
219 r0 = *(u8*)(r2 + 0); \ in test9_double_test_variant_2()
220 l1_%=: r0 = *(u8*)(r2 + 0); \ in test9_double_test_variant_2()
235 r2 = *(u32*)(r1 + %[__sk_buff_data]); \ in packet_access_test10_write_invalid()
237 r0 = r2; \ in packet_access_test10_write_invalid()
242 l0_%=: *(u8*)(r2 + 0) = r2; \ in packet_access_test10_write_invalid()
257 r2 = *(u32*)(r1 + %[__sk_buff_data]); \ in access_test11_shift_good_access()
259 r0 = r2; \ in access_test11_shift_good_access()
266 r6 = r2; \ in access_test11_shift_good_access()
284 r2 = *(u32*)(r1 + %[__sk_buff_data]); \ in access_test12_and_good_access()
286 r0 = r2; \ in access_test12_and_good_access()
293 r6 = r2; \ in access_test12_and_good_access()
311 r2 = *(u32*)(r1 + %[__sk_buff_data]); \ in access_test13_branches_good_access()
313 r0 = r2; \ in access_test13_branches_good_access()
325 r6 = r2; \ in access_test13_branches_good_access()
344 r2 = *(u32*)(r1 + %[__sk_buff_data]); \ in _0_const_imm_good_access()
346 r0 = r2; \ in _0_const_imm_good_access()
351 r6 = r2; \ in _0_const_imm_good_access()
366 __failure __msg("R2 invalid mem access 'scalar'")
371 r2 = *(u32*)(r1 + %[__sk_buff_data]); \ in __flag()
373 r0 = r2; \ in __flag()
379 *(u64*)(r4 + 0) = r2; \ in __flag()
381 r2 = *(u64*)(r4 + 0); \ in __flag()
382 *(u32*)(r2 + 0) = r5; \ in __flag()
397 r2 = *(u32*)(r1 + %[__sk_buff_data]); \ in test16_arith_on_data_end()
399 r0 = r2; \ in test16_arith_on_data_end()
403 *(u8*)(r2 + 0) = r2; \ in test16_arith_on_data_end()
419 r2 = *(u32*)(r1 + %[__sk_buff_data]); \ in __flag()
422 r0 = r2; \ in __flag()
444 r2 = *(u32*)(r1 + %[__sk_buff_data]); \ in test18_imm_pkt_ptr_1()
447 r0 += r2; \ in test18_imm_pkt_ptr_1()
449 *(u8*)(r2 + 0) = r2; \ in test18_imm_pkt_ptr_1()
464 r2 = *(u32*)(r1 + %[__sk_buff_data]); \ in test19_imm_pkt_ptr_2()
466 r0 = r2; \ in test19_imm_pkt_ptr_2()
470 r4 += r2; \ in test19_imm_pkt_ptr_2()
486 r2 = *(u32*)(r1 + %[__sk_buff_data]); \ in __flag()
493 r4 += r2; \ in __flag()
513 r2 = *(u32*)(r1 + %[__sk_buff_data]); \ in __flag()
515 r0 = r2; \ in __flag()
522 r4 += r2; \ in __flag()
542 r2 = *(u32*)(r1 + %[__sk_buff_data]); \ in __flag()
544 r0 = r2; \ in __flag()
546 *(u64*)(r10 - 8) = r2; \ in __flag()
550 r2 = *(u64*)(r10 - 8); \ in __flag()
555 r4 += r2; \ in __flag()
559 r2 = 1; \ in __flag()
560 *(u16*)(r4 + 0) = r2; \ in __flag()
576 r2 = *(u32*)(r1 + %[__sk_buff_data]); \ in __flag()
585 r0 += r2; \ in __flag()
606 r2 = *(u32*)(r1 + %[__sk_buff_data]); \ in __flag()
615 r0 += r2; \ in __flag()
635 r2 = *(u32*)(r1 + %[__sk_buff_data]); \ in test25_marking_on_good_access()
637 r0 = r2; \ in test25_marking_on_good_access()
642 l0_%=: r0 = *(u8*)(r2 + 0); \ in test25_marking_on_good_access()
656 r2 = *(u32*)(r1 + %[__sk_buff_data]); \ in test26_marking_on_bad_access()
658 r0 = r2; \ in test26_marking_on_bad_access()
661 r0 = *(u8*)(r2 + 0); \ in test26_marking_on_bad_access()
677 r2 = *(u32*)(r1 + %[__sk_buff_data]); \ in test27_marking_on_good_access()
679 r0 = r2; \ in test27_marking_on_good_access()
682 r0 = *(u8*)(r2 + 0); \ in test27_marking_on_good_access()
697 r2 = *(u32*)(r1 + %[__sk_buff_data]); \ in test28_marking_on_bad_access()
699 r0 = r2; \ in test28_marking_on_bad_access()
704 l0_%=: r0 = *(u8*)(r2 + 0); \ in test28_marking_on_bad_access()
719 r2 = *(u32*)(r1 + %[__sk_buff_data_end]); \ in reg_pkt_end_in_subprog()
738 if r3 > r2 goto l0_%=; \ in reg_pkt_end_in_subprog__1()
746 __failure __msg("invalid access to packet, off=0 size=1, R2")
759 /* r2 = ctx->data \ in __flag()
763 r2 = *(u32*)(r9 + %[__sk_buff_data]); \ in __flag()
771 /* r2 += r6 ; this forces assignment of ID to r2\ in __flag()
772 * r2 += 1 ; get some fixed off for r2\ in __flag()
776 r2 += r6; \ in __flag()
777 r2 += 1; \ in __flag()
783 * r2 = r3 ; optionally share ID between r2 and r3\ in __flag()
786 r2 = r3; \ in __flag()
789 /* r5 = *(u8 *) (r2 - 1) ; access packet memory using r2,\ in __flag()
792 r5 = *(u8*)(r2 - 1); \ in __flag()