Lines Matching +full:0 +full:xcc

21  *		if (n != 0) {
26 * } while (--n != 0);
61 * copy final 0-63 bytes; exit with dst addr
75 * move final 0-31 bytes; exit with dst addr
137 #define STORE_ASI 0x80 /* ASI_P */
145 #define STORE_MRU_ASI 0x80 /* ASI_P */
185 cmp %g2, 0
186 tne %xcc, 5
212 bgu,pt %xcc, 7b
224 bgu,pn %xcc, .Llarge_align8_copy
233 ble,pn %xcc, .Lmedl63 ! skip big loop if less than 64 bytes
254 bgu,pt %xcc, .Lmedl64 ! repeat if at least 64 bytes left
258 ble,pt %xcc, .Lmedl31 ! to skip if 31 or fewer bytes left
273 ble,pt %xcc, .Lmedl15 ! skip if 15 or fewer bytes left
284 bz,pt %xcc, .Lsmallx ! exit if finished
286 blt,pt %xcc, .Lmedw7 ! skip if 7 or fewer bytes left
292 bnz,pn %xcc, .Lmedw7
301 andcc %o1, 0x3, %o5 ! test word alignment
302 bnz,pt %xcc, .Lunalignsetup ! branch to skip if not word aligned
314 bge,pt %xcc, .Lunalignrejoin ! otherwise rejoin main loop
319 ble,pt %xcc, .Lmedw31 ! skip big loop if less than 16
343 bgu,pt %xcc, .Lmedw32 ! repeat if at least 32 bytes left
348 bz,pt %xcc, .Lsmallx ! exit if finished
351 blt,pt %xcc, .Lmedw15
367 bz,pt %xcc, .Lsmallx ! exit if finished
369 blt,pn %xcc, .Lmedw7 ! skip if 7 or fewer bytes left
378 bz,pt %xcc, .Lsmallx ! exit if finished
381 blt,pn %xcc, .Lsmallleft3 ! skip if 3 or fewer bytes left
395 andcc %o0, 0x3f, %o3 ! %o3 == 0 means dst is 64 byte aligned
441 andn %o2, 0x3f, %o5 ! %o5 is multiple of block size
442 and %o2, 0x3f, %o2 ! residue bytes in %o2
459 blu,pt %xcc, .Lalign_loop_fin
468 bgu %xcc,.Lalign_loop_start
500 bgu %xcc,.Lalign_loop_rest
505 bgu,pt %xcc, .Lalign_loop_start
508 cmp %o5, 0
530 bgu %xcc,.Lalign_loop_fin
554 bge,pt %xcc,.Lunalign_large
556 andn %o2, 0x3f, %o5 ! %o5 is multiple of block size
557 and %o2, 0x3f, %o2 ! residue bytes in %o2
560 andn %o1, 0x7, %o4 ! %o4 has long word aligned src address
595 bgu,pt %xcc, .Lunalign_loop
601 andcc %o0, 0x3f, %o3 ! is dst 64-byte block aligned?
602 bz %xcc, .Lunalignsrc
607 andcc %o1, 0x1, %o5
608 bnz %xcc, .Lunalignbyte ! check for byte alignment
611 bnz %xcc, .Lunalignhalf
621 bnz %xcc, .Lunalignword
638 bnz %xcc, .Lunalignhalf
664 bnz %xcc, .Lunalignbyte_loop
670 andn %o2, 0x3f, %o5 ! %o5 is multiple of block size
671 and %o2, 0x3f, %o2 ! residue bytes in %o2
675 andn %o1, 0x7, %o4 ! %o4 has long word aligned src address
710 bgu,pt %xcc, .Lunalign_sloop
717 bleu %xcc, .Lunalign_short
719 andn %o2, 0x7, %o5 ! %o5 is multiple of 8
720 and %o2, 0x7, %o2 ! residue bytes in %o2
723 andn %o1, 0x7, %o4 ! %o4 has long word aligned src address
733 bgu,pt %xcc, .Lunalign_by8
758 LOAD(prefetch, %o1 + 0x40, #n_reads_strong)
759 andcc %g2, 0x7, %g0
760 bne,pn %xcc, .Lmedium_unaligned_cp
764 andncc %o2, 0x20 - 1, %o5
765 be,pn %xcc, 2f
767 1: EX_LD(LOAD(ldx, %o1 + 0x00, %o3), memcpy_retl_o2_plus_o5)
768 EX_LD(LOAD(ldx, %o1 + 0x08, %g2), memcpy_retl_o2_plus_o5)
769 EX_LD(LOAD(ldx, %o1 + 0x10, %g7), memcpy_retl_o2_plus_o5)
770 EX_LD(LOAD(ldx, %o1 + 0x18, %o4), memcpy_retl_o2_plus_o5)
771 add %o1, 0x20, %o1
772 subcc %o5, 0x20, %o5
773 EX_ST(STORE(stx, %o3, %o0 + 0x00), memcpy_retl_o2_plus_o5_plus_32)
774 EX_ST(STORE(stx, %g2, %o0 + 0x08), memcpy_retl_o2_plus_o5_plus_24)
775 EX_ST(STORE(stx, %g7, %o0 + 0x10), memcpy_retl_o2_plus_o5_plus_24)
776 EX_ST(STORE(stx, %o4, %o0 + 0x18), memcpy_retl_o2_plus_o5_plus_8)
777 bne,pt %xcc, 1b
778 add %o0, 0x20, %o0
779 2: andcc %o2, 0x18, %o5
780 be,pt %xcc, 3f
782 1: EX_LD(LOAD(ldx, %o1 + 0x00, %o3), memcpy_retl_o2_plus_o5)
783 add %o1, 0x08, %o1
784 add %o0, 0x08, %o0
785 subcc %o5, 0x08, %o5
786 bne,pt %xcc, 1b
787 EX_ST(STORE(stx, %o3, %o0 - 0x08), memcpy_retl_o2_plus_o5_plus_8)
789 cmp %o2, 0x04
790 bl,pn %xcc, .Ltiny_cp
792 EX_LD(LOAD(lduw, %o1 + 0x00, %o3), memcpy_retl_o2)
793 add %o1, 0x04, %o1
794 add %o0, 0x04, %o0
795 subcc %o2, 0x04, %o2
796 bne,pn %xcc, .Ltiny_cp
797 EX_ST(STORE(stw, %o3, %o0 - 0x04), memcpy_retl_o2_plus_4)
798 ba,a,pt %xcc, .Lexit_cp
803 and %o3, 0x7, %o3
807 1: EX_LD(LOAD(ldub, %o1 + 0x00, %g2), memcpy_retl_o2_plus_g1)
811 bne,pt %xcc, 1b
812 EX_ST(STORE(stb, %g2, %o0 - 0x01), memcpy_retl_o2_plus_g1_plus_1)
814 and %o1, 0x7, %o3
819 andn %o1, 0x7, %o1
820 EX_LD(LOAD(ldx, %o1 + 0x00, %o4), memcpy_retl_o2)
822 andn %o2, 0x08 - 1, %o5
825 1: EX_LD(LOAD(ldx, %o1 + 0x08, %g3), memcpy_retl_o2_plus_o5)
826 add %o1, 0x08, %o1
827 subcc %o5, 0x08, %o5
830 EX_ST(STORE(stx, %g7, %o0 + 0x00), memcpy_retl_o2_plus_o5_plus_8)
831 add %o0, 0x08, %o0
832 bne,pt %xcc, 1b
838 ba,pt %xcc, .Lsmall_unaligned_cp
841 EX_LD(LOAD(ldub, %o1 + 0x00, %o3), memcpy_retl_o2)
843 be,pn %xcc, .Lexit_cp
844 EX_ST(STORE(stb, %o3, %o0 + 0x00), memcpy_retl_o2_plus_1)
845 EX_LD(LOAD(ldub, %o1 + 0x01, %o3), memcpy_retl_o2)
847 be,pn %xcc, .Lexit_cp
848 EX_ST(STORE(stb, %o3, %o0 + 0x01), memcpy_retl_o2_plus_1)
849 EX_LD(LOAD(ldub, %o1 + 0x02, %o3), memcpy_retl_o2)
850 ba,pt %xcc, .Lexit_cp
851 EX_ST(STORE(stb, %o3, %o0 + 0x02), memcpy_retl_o2)
854 andcc %g2, 0x3, %g0
855 bne,pn %xcc, .Lsmall_unaligned_cp
856 andn %o2, 0x4 - 1, %o5
859 EX_LD(LOAD(lduw, %o1 + 0x00, %o3), memcpy_retl_o2_plus_o5)
860 add %o1, 0x04, %o1
861 subcc %o5, 0x04, %o5
862 add %o0, 0x04, %o0
863 bne,pt %xcc, 1b
864 EX_ST(STORE(stw, %o3, %o0 - 0x04), memcpy_retl_o2_plus_o5_plus_4)
867 ba,a,pt %xcc, .Ltiny_cp
870 1: EX_LD(LOAD(ldub, %o1 + 0x00, %o3), memcpy_retl_o2)
874 bne,pt %xcc, 1b
875 EX_ST(STORE(stb, %o3, %o0 - 0x01), memcpy_retl_o2_plus_1)
876 ba,a,pt %xcc, .Lexit_cp
880 bz,pt %xcc, .Lsmallx
882 blt,pn %xcc, .Lsmallleft3
896 bgu,pt %xcc, .Lsmallnotalign4 ! loop til 3 or fewer bytes remain
899 bz,pt %xcc, .Lsmallx
903 bz,pt %xcc, .Lsmallx
907 bz,pt %xcc, .Lsmallx
916 bnz,pn %xcc, .Lsmallleft3