Lines Matching full:r1
23 "r1 = 1;" in simple()
28 "*(u64 *)(r10 - 16) = r1;" in simple()
38 "r1 = *(u64 *)(r10 - 16);" in simple()
64 __xlated("0: r1 = 1")
71 "r1 = 1;" in canary_arm64_riscv64()
72 "*(u64 *)(r10 - 16) = r1;" in canary_arm64_riscv64()
74 "r1 = *(u64 *)(r10 - 16);" in canary_arm64_riscv64()
100 __xlated("1: *(u64 *)(r10 -16) = r1")
109 "r1 = 1;" in wrong_reg_in_pattern1()
110 "*(u64 *)(r10 - 16) = r1;" in wrong_reg_in_pattern1()
163 __xlated("2: *(u64 *)(r2 -16) = r1")
167 __xlated("6: r1 = *(u64 *)(r10 -16)")
172 "r1 = 1;" in wrong_base_in_pattern()
174 "*(u64 *)(r2 - 16) = r1;" in wrong_base_in_pattern()
176 "r1 = *(u64 *)(r10 - 16);" in wrong_base_in_pattern()
185 __xlated("1: *(u64 *)(r10 -16) = r1")
194 "r1 = 1;" in wrong_insn_in_pattern()
195 "*(u64 *)(r10 - 16) = r1;" in wrong_insn_in_pattern()
198 "r1 = *(u64 *)(r10 - 16);" in wrong_insn_in_pattern()
207 __xlated("2: *(u64 *)(r10 -16) = r1")
211 __xlated("6: r1 = *(u64 *)(r10 -8)")
216 "r1 = 1;" in wrong_off_in_pattern1()
217 "*(u64 *)(r10 - 8) = r1;" in wrong_off_in_pattern1()
218 "*(u64 *)(r10 - 16) = r1;" in wrong_off_in_pattern1()
220 "r1 = *(u64 *)(r10 - 8);" in wrong_off_in_pattern1()
229 __xlated("1: *(u32 *)(r10 -4) = r1")
233 __xlated("5: r1 = *(u32 *)(r10 -4)")
238 "r1 = 1;" in wrong_off_in_pattern2()
239 "*(u32 *)(r10 - 4) = r1;" in wrong_off_in_pattern2()
241 "r1 = *(u32 *)(r10 - 4);" in wrong_off_in_pattern2()
250 __xlated("1: *(u32 *)(r10 -16) = r1")
254 __xlated("5: r1 = *(u32 *)(r10 -16)")
259 "r1 = 1;" in wrong_size_in_pattern()
260 "*(u32 *)(r10 - 16) = r1;" in wrong_size_in_pattern()
262 "r1 = *(u32 *)(r10 - 16);" in wrong_size_in_pattern()
271 __xlated("2: *(u32 *)(r10 -8) = r1")
275 __xlated("6: r1 = *(u32 *)(r10 -8)")
280 "r1 = 1;" in partial_pattern()
282 "*(u32 *)(r10 - 8) = r1;" in partial_pattern()
286 "r1 = *(u32 *)(r10 - 8);" in partial_pattern()
295 __xlated("0: r1 = 1")
298 __xlated("2: *(u64 *)(r10 -8) = r1")
304 __xlated("8: r1 = *(u64 *)(r10 -8)")
314 "r1 = 1;" in min_stack_offset()
317 "*(u64 *)(r10 - 8) = r1;" in min_stack_offset()
321 "r1 = *(u64 *)(r10 - 8);" in min_stack_offset()
323 "*(u64 *)(r10 - 24) = r1;" in min_stack_offset()
327 "r1 = *(u64 *)(r10 - 24);" in min_stack_offset()
336 __xlated("1: *(u64 *)(r10 -8) = r1")
340 __xlated("5: r1 = *(u64 *)(r10 -8)")
345 "r1 = 1;" in bad_fixed_read()
346 "*(u64 *)(r10 - 8) = r1;" in bad_fixed_read()
348 "r1 = *(u64 *)(r10 - 8);" in bad_fixed_read()
349 "r1 = r10;" in bad_fixed_read()
350 "r1 += -8;" in bad_fixed_read()
351 "r1 = *(u64 *)(r1 - 0);" in bad_fixed_read()
360 __xlated("1: *(u64 *)(r10 -8) = r1")
364 __xlated("5: r1 = *(u64 *)(r10 -8)")
369 "r1 = 1;" in bad_fixed_write()
370 "*(u64 *)(r10 - 8) = r1;" in bad_fixed_write()
372 "r1 = *(u64 *)(r10 - 8);" in bad_fixed_write()
373 "r1 = r10;" in bad_fixed_write()
374 "r1 += -8;" in bad_fixed_write()
375 "*(u64 *)(r1 - 0) = r1;" in bad_fixed_write()
384 __xlated("6: *(u64 *)(r10 -16) = r1")
388 __xlated("10: r1 = *(u64 *)(r10 -16)")
393 "r6 = *(u64 *)(r1 + 0);" /* random scalar value */ in bad_varying_read()
398 "r1 = 1;" in bad_varying_read()
399 "*(u64 *)(r10 - 16) = r1;" in bad_varying_read()
401 "r1 = *(u64 *)(r10 - 16);" in bad_varying_read()
402 "r1 = r10;" in bad_varying_read()
403 "r1 += r7;" in bad_varying_read()
404 "r1 = *(u8 *)(r1 - 0);" /* touches slot [-16..-9] where spills are stored */ in bad_varying_read()
413 __xlated("6: *(u64 *)(r10 -16) = r1")
417 __xlated("10: r1 = *(u64 *)(r10 -16)")
422 "r6 = *(u64 *)(r1 + 0);" /* random scalar value */ in bad_varying_write()
427 "r1 = 1;" in bad_varying_write()
428 "*(u64 *)(r10 - 16) = r1;" in bad_varying_write()
430 "r1 = *(u64 *)(r10 - 16);" in bad_varying_write()
431 "r1 = r10;" in bad_varying_write()
432 "r1 += r7;" in bad_varying_write()
433 "*(u8 *)(r1 - 0) = r7;" /* touches slot [-16..-9] where spills are stored */ in bad_varying_write()
442 __xlated("1: *(u64 *)(r10 -8) = r1")
446 __xlated("5: r1 = *(u64 *)(r10 -8)")
451 "r1 = 1;" in bad_write_in_subprog()
452 "*(u64 *)(r10 - 8) = r1;" in bad_write_in_subprog()
454 "r1 = *(u64 *)(r10 - 8);" in bad_write_in_subprog()
455 "r1 = r10;" in bad_write_in_subprog()
456 "r1 += -8;" in bad_write_in_subprog()
469 "*(u64 *)(r1 - 0) = r0;" /* invalidates bpf_fastcall contract for caller: */ in bad_write_in_subprog_aux()
476 __xlated("1: *(u64 *)(r10 -8) = r1")
480 __xlated("5: r1 = *(u64 *)(r10 -8)")
485 "r1 = 1;" in bad_helper_write()
487 "*(u64 *)(r10 - 8) = r1;" in bad_helper_write()
489 "r1 = *(u64 *)(r10 - 8);" in bad_helper_write()
490 "r1 = r10;" in bad_helper_write()
491 "r1 += -8;" in bad_helper_write()
506 __xlated("1: *(u64 *)(r10 -8) = r1")
510 __xlated("5: r1 = *(u64 *)(r10 -8)")
516 __xlated("11: r1 = 1")
525 "r1 = 1;" in invalidate_one_subprog()
526 "*(u64 *)(r10 - 8) = r1;" in invalidate_one_subprog()
528 "r1 = *(u64 *)(r10 - 8);" in invalidate_one_subprog()
529 "r1 = r10;" in invalidate_one_subprog()
530 "r1 += -8;" in invalidate_one_subprog()
531 "r1 = *(u64 *)(r1 - 0);" in invalidate_one_subprog()
543 "r1 = 1;" in invalidate_one_subprog_aux()
544 "*(u64 *)(r10 - 8) = r1;" in invalidate_one_subprog_aux()
546 "r1 = *(u64 *)(r10 - 8);" in invalidate_one_subprog_aux()
556 __xlated("0: r1 = 1")
563 __xlated("6: r1 = 1")
567 __xlated("10: *(u64 *)(r10 -16) = r1")
573 "r1 = 1;" in subprogs_use_independent_offsets()
574 "*(u64 *)(r10 - 16) = r1;" in subprogs_use_independent_offsets()
576 "r1 = *(u64 *)(r10 - 16);" in subprogs_use_independent_offsets()
588 "r1 = 1;" in subprogs_use_independent_offsets_aux()
589 "*(u64 *)(r10 - 24) = r1;" in subprogs_use_independent_offsets_aux()
591 "r1 = *(u64 *)(r10 - 24);" in subprogs_use_independent_offsets_aux()
592 "*(u64 *)(r10 - 16) = r1;" in subprogs_use_independent_offsets_aux()
607 "r1 = 1;" in helper_call_does_not_prevent_bpf_fastcall()
608 "*(u64 *)(r10 - 8) = r1;" in helper_call_does_not_prevent_bpf_fastcall()
610 "r1 = *(u64 *)(r10 - 8);" in helper_call_does_not_prevent_bpf_fastcall()
611 "*(u64 *)(r10 - 8) = r1;" in helper_call_does_not_prevent_bpf_fastcall()
613 "r1 = *(u64 *)(r10 - 8);" in helper_call_does_not_prevent_bpf_fastcall()
626 __xlated("1: r1 = 1")
636 __xlated("9: *(u64 *)(r10 -8) = r1")
642 "r1 = 1;" in may_goto_interaction()
643 "*(u64 *)(r10 - 16) = r1;" in may_goto_interaction()
645 "r1 = *(u64 *)(r10 - 16);" in may_goto_interaction()
648 "*(u64 *)(r10 - 8) = r1;" in may_goto_interaction()
668 __xlated("2: r1 = 1")
694 "r1 = 1;" in bpf_loop_interaction1()
696 "*(u64 *)(r10 - 16) = r1;" in bpf_loop_interaction1()
698 "r1 = *(u64 *)(r10 - 16);" in bpf_loop_interaction1()
717 __xlated("2: r1 = 42")
722 __xlated("6: *(u64 *)(r10 -16) = r1")
724 __xlated("8: r1 = *(u64 *)(r10 -16)")
734 "r1 = 42;" in bpf_loop_interaction2()
736 "*(u64 *)(r10 - 16) = r1;" in bpf_loop_interaction2()
738 "r1 = *(u64 *)(r10 - 16);" in bpf_loop_interaction2()
739 "*(u64 *)(r10 - 16) = r1;" in bpf_loop_interaction2()
741 "r1 = *(u64 *)(r10 - 16);" in bpf_loop_interaction2()
772 "r1 = 42;" in cumulative_stack_depth()
773 "*(u64 *)(r10 - %[max_bpf_stack]) = r1;" in cumulative_stack_depth()
786 "*(u64 *)(r10 - 8) = r1;" in cumulative_stack_depth_subprog()
788 "r1 = *(u64 *)(r10 - 8);" in cumulative_stack_depth_subprog()
796 __xlated("1: r0 = r1")
817 __xlated("4: r0 = r1")