Lines Matching full:volatile

69 	asm volatile("vld $vr20, %0" : : "m" (qmul[0]));  in raid6_2data_recov_lsx()
70 asm volatile("vld $vr21, %0" : : "m" (qmul[16])); in raid6_2data_recov_lsx()
71 asm volatile("vld $vr22, %0" : : "m" (pbmul[0])); in raid6_2data_recov_lsx()
72 asm volatile("vld $vr23, %0" : : "m" (pbmul[16])); in raid6_2data_recov_lsx()
76 asm volatile("vld $vr4, %0" : : "m" (q[0])); in raid6_2data_recov_lsx()
77 asm volatile("vld $vr5, %0" : : "m" (q[16])); in raid6_2data_recov_lsx()
78 asm volatile("vld $vr6, %0" : : "m" (q[32])); in raid6_2data_recov_lsx()
79 asm volatile("vld $vr7, %0" : : "m" (q[48])); in raid6_2data_recov_lsx()
81 asm volatile("vld $vr8, %0" : : "m" (dq[0])); in raid6_2data_recov_lsx()
82 asm volatile("vld $vr9, %0" : : "m" (dq[16])); in raid6_2data_recov_lsx()
83 asm volatile("vld $vr10, %0" : : "m" (dq[32])); in raid6_2data_recov_lsx()
84 asm volatile("vld $vr11, %0" : : "m" (dq[48])); in raid6_2data_recov_lsx()
85 asm volatile("vxor.v $vr4, $vr4, $vr8"); in raid6_2data_recov_lsx()
86 asm volatile("vxor.v $vr5, $vr5, $vr9"); in raid6_2data_recov_lsx()
87 asm volatile("vxor.v $vr6, $vr6, $vr10"); in raid6_2data_recov_lsx()
88 asm volatile("vxor.v $vr7, $vr7, $vr11"); in raid6_2data_recov_lsx()
90 asm volatile("vld $vr0, %0" : : "m" (p[0])); in raid6_2data_recov_lsx()
91 asm volatile("vld $vr1, %0" : : "m" (p[16])); in raid6_2data_recov_lsx()
92 asm volatile("vld $vr2, %0" : : "m" (p[32])); in raid6_2data_recov_lsx()
93 asm volatile("vld $vr3, %0" : : "m" (p[48])); in raid6_2data_recov_lsx()
95 asm volatile("vld $vr8, %0" : : "m" (dp[0])); in raid6_2data_recov_lsx()
96 asm volatile("vld $vr9, %0" : : "m" (dp[16])); in raid6_2data_recov_lsx()
97 asm volatile("vld $vr10, %0" : : "m" (dp[32])); in raid6_2data_recov_lsx()
98 asm volatile("vld $vr11, %0" : : "m" (dp[48])); in raid6_2data_recov_lsx()
99 asm volatile("vxor.v $vr0, $vr0, $vr8"); in raid6_2data_recov_lsx()
100 asm volatile("vxor.v $vr1, $vr1, $vr9"); in raid6_2data_recov_lsx()
101 asm volatile("vxor.v $vr2, $vr2, $vr10"); in raid6_2data_recov_lsx()
102 asm volatile("vxor.v $vr3, $vr3, $vr11"); in raid6_2data_recov_lsx()
105 asm volatile("vsrli.b $vr8, $vr4, 4"); in raid6_2data_recov_lsx()
106 asm volatile("vsrli.b $vr9, $vr5, 4"); in raid6_2data_recov_lsx()
107 asm volatile("vsrli.b $vr10, $vr6, 4"); in raid6_2data_recov_lsx()
108 asm volatile("vsrli.b $vr11, $vr7, 4"); in raid6_2data_recov_lsx()
110 asm volatile("vandi.b $vr4, $vr4, 0x0f"); in raid6_2data_recov_lsx()
111 asm volatile("vandi.b $vr5, $vr5, 0x0f"); in raid6_2data_recov_lsx()
112 asm volatile("vandi.b $vr6, $vr6, 0x0f"); in raid6_2data_recov_lsx()
113 asm volatile("vandi.b $vr7, $vr7, 0x0f"); in raid6_2data_recov_lsx()
115 asm volatile("vshuf.b $vr4, $vr20, $vr20, $vr4"); in raid6_2data_recov_lsx()
116 asm volatile("vshuf.b $vr5, $vr20, $vr20, $vr5"); in raid6_2data_recov_lsx()
117 asm volatile("vshuf.b $vr6, $vr20, $vr20, $vr6"); in raid6_2data_recov_lsx()
118 asm volatile("vshuf.b $vr7, $vr20, $vr20, $vr7"); in raid6_2data_recov_lsx()
120 asm volatile("vshuf.b $vr8, $vr21, $vr21, $vr8"); in raid6_2data_recov_lsx()
121 asm volatile("vshuf.b $vr9, $vr21, $vr21, $vr9"); in raid6_2data_recov_lsx()
122 asm volatile("vshuf.b $vr10, $vr21, $vr21, $vr10"); in raid6_2data_recov_lsx()
123 asm volatile("vshuf.b $vr11, $vr21, $vr21, $vr11"); in raid6_2data_recov_lsx()
125 asm volatile("vxor.v $vr16, $vr8, $vr4"); in raid6_2data_recov_lsx()
126 asm volatile("vxor.v $vr17, $vr9, $vr5"); in raid6_2data_recov_lsx()
127 asm volatile("vxor.v $vr18, $vr10, $vr6"); in raid6_2data_recov_lsx()
128 asm volatile("vxor.v $vr19, $vr11, $vr7"); in raid6_2data_recov_lsx()
131 asm volatile("vsrli.b $vr4, $vr0, 4"); in raid6_2data_recov_lsx()
132 asm volatile("vsrli.b $vr5, $vr1, 4"); in raid6_2data_recov_lsx()
133 asm volatile("vsrli.b $vr6, $vr2, 4"); in raid6_2data_recov_lsx()
134 asm volatile("vsrli.b $vr7, $vr3, 4"); in raid6_2data_recov_lsx()
136 asm volatile("vandi.b $vr12, $vr0, 0x0f"); in raid6_2data_recov_lsx()
137 asm volatile("vandi.b $vr13, $vr1, 0x0f"); in raid6_2data_recov_lsx()
138 asm volatile("vandi.b $vr14, $vr2, 0x0f"); in raid6_2data_recov_lsx()
139 asm volatile("vandi.b $vr15, $vr3, 0x0f"); in raid6_2data_recov_lsx()
141 asm volatile("vshuf.b $vr12, $vr22, $vr22, $vr12"); in raid6_2data_recov_lsx()
142 asm volatile("vshuf.b $vr13, $vr22, $vr22, $vr13"); in raid6_2data_recov_lsx()
143 asm volatile("vshuf.b $vr14, $vr22, $vr22, $vr14"); in raid6_2data_recov_lsx()
144 asm volatile("vshuf.b $vr15, $vr22, $vr22, $vr15"); in raid6_2data_recov_lsx()
146 asm volatile("vshuf.b $vr4, $vr23, $vr23, $vr4"); in raid6_2data_recov_lsx()
147 asm volatile("vshuf.b $vr5, $vr23, $vr23, $vr5"); in raid6_2data_recov_lsx()
148 asm volatile("vshuf.b $vr6, $vr23, $vr23, $vr6"); in raid6_2data_recov_lsx()
149 asm volatile("vshuf.b $vr7, $vr23, $vr23, $vr7"); in raid6_2data_recov_lsx()
151 asm volatile("vxor.v $vr4, $vr4, $vr12"); in raid6_2data_recov_lsx()
152 asm volatile("vxor.v $vr5, $vr5, $vr13"); in raid6_2data_recov_lsx()
153 asm volatile("vxor.v $vr6, $vr6, $vr14"); in raid6_2data_recov_lsx()
154 asm volatile("vxor.v $vr7, $vr7, $vr15"); in raid6_2data_recov_lsx()
157 asm volatile("vxor.v $vr4, $vr4, $vr16"); in raid6_2data_recov_lsx()
158 asm volatile("vxor.v $vr5, $vr5, $vr17"); in raid6_2data_recov_lsx()
159 asm volatile("vxor.v $vr6, $vr6, $vr18"); in raid6_2data_recov_lsx()
160 asm volatile("vxor.v $vr7, $vr7, $vr19"); in raid6_2data_recov_lsx()
161 asm volatile("vst $vr4, %0" : "=m" (dq[0])); in raid6_2data_recov_lsx()
162 asm volatile("vst $vr5, %0" : "=m" (dq[16])); in raid6_2data_recov_lsx()
163 asm volatile("vst $vr6, %0" : "=m" (dq[32])); in raid6_2data_recov_lsx()
164 asm volatile("vst $vr7, %0" : "=m" (dq[48])); in raid6_2data_recov_lsx()
167 asm volatile("vxor.v $vr0, $vr0, $vr4"); in raid6_2data_recov_lsx()
168 asm volatile("vxor.v $vr1, $vr1, $vr5"); in raid6_2data_recov_lsx()
169 asm volatile("vxor.v $vr2, $vr2, $vr6"); in raid6_2data_recov_lsx()
170 asm volatile("vxor.v $vr3, $vr3, $vr7"); in raid6_2data_recov_lsx()
171 asm volatile("vst $vr0, %0" : "=m" (dp[0])); in raid6_2data_recov_lsx()
172 asm volatile("vst $vr1, %0" : "=m" (dp[16])); in raid6_2data_recov_lsx()
173 asm volatile("vst $vr2, %0" : "=m" (dp[32])); in raid6_2data_recov_lsx()
174 asm volatile("vst $vr3, %0" : "=m" (dp[48])); in raid6_2data_recov_lsx()
215 asm volatile("vld $vr22, %0" : : "m" (qmul[0])); in raid6_datap_recov_lsx()
216 asm volatile("vld $vr23, %0" : : "m" (qmul[16])); in raid6_datap_recov_lsx()
220 asm volatile("vld $vr0, %0" : : "m" (p[0])); in raid6_datap_recov_lsx()
221 asm volatile("vld $vr1, %0" : : "m" (p[16])); in raid6_datap_recov_lsx()
222 asm volatile("vld $vr2, %0" : : "m" (p[32])); in raid6_datap_recov_lsx()
223 asm volatile("vld $vr3, %0" : : "m" (p[48])); in raid6_datap_recov_lsx()
225 asm volatile("vld $vr4, %0" : : "m" (dq[0])); in raid6_datap_recov_lsx()
226 asm volatile("vld $vr5, %0" : : "m" (dq[16])); in raid6_datap_recov_lsx()
227 asm volatile("vld $vr6, %0" : : "m" (dq[32])); in raid6_datap_recov_lsx()
228 asm volatile("vld $vr7, %0" : : "m" (dq[48])); in raid6_datap_recov_lsx()
230 asm volatile("vld $vr8, %0" : : "m" (q[0])); in raid6_datap_recov_lsx()
231 asm volatile("vld $vr9, %0" : : "m" (q[16])); in raid6_datap_recov_lsx()
232 asm volatile("vld $vr10, %0" : : "m" (q[32])); in raid6_datap_recov_lsx()
233 asm volatile("vld $vr11, %0" : : "m" (q[48])); in raid6_datap_recov_lsx()
234 asm volatile("vxor.v $vr4, $vr4, $vr8"); in raid6_datap_recov_lsx()
235 asm volatile("vxor.v $vr5, $vr5, $vr9"); in raid6_datap_recov_lsx()
236 asm volatile("vxor.v $vr6, $vr6, $vr10"); in raid6_datap_recov_lsx()
237 asm volatile("vxor.v $vr7, $vr7, $vr11"); in raid6_datap_recov_lsx()
240 asm volatile("vsrli.b $vr8, $vr4, 4"); in raid6_datap_recov_lsx()
241 asm volatile("vsrli.b $vr9, $vr5, 4"); in raid6_datap_recov_lsx()
242 asm volatile("vsrli.b $vr10, $vr6, 4"); in raid6_datap_recov_lsx()
243 asm volatile("vsrli.b $vr11, $vr7, 4"); in raid6_datap_recov_lsx()
245 asm volatile("vandi.b $vr4, $vr4, 0x0f"); in raid6_datap_recov_lsx()
246 asm volatile("vandi.b $vr5, $vr5, 0x0f"); in raid6_datap_recov_lsx()
247 asm volatile("vandi.b $vr6, $vr6, 0x0f"); in raid6_datap_recov_lsx()
248 asm volatile("vandi.b $vr7, $vr7, 0x0f"); in raid6_datap_recov_lsx()
250 asm volatile("vshuf.b $vr4, $vr22, $vr22, $vr4"); in raid6_datap_recov_lsx()
251 asm volatile("vshuf.b $vr5, $vr22, $vr22, $vr5"); in raid6_datap_recov_lsx()
252 asm volatile("vshuf.b $vr6, $vr22, $vr22, $vr6"); in raid6_datap_recov_lsx()
253 asm volatile("vshuf.b $vr7, $vr22, $vr22, $vr7"); in raid6_datap_recov_lsx()
255 asm volatile("vshuf.b $vr8, $vr23, $vr23, $vr8"); in raid6_datap_recov_lsx()
256 asm volatile("vshuf.b $vr9, $vr23, $vr23, $vr9"); in raid6_datap_recov_lsx()
257 asm volatile("vshuf.b $vr10, $vr23, $vr23, $vr10"); in raid6_datap_recov_lsx()
258 asm volatile("vshuf.b $vr11, $vr23, $vr23, $vr11"); in raid6_datap_recov_lsx()
260 asm volatile("vxor.v $vr4, $vr4, $vr8"); in raid6_datap_recov_lsx()
261 asm volatile("vxor.v $vr5, $vr5, $vr9"); in raid6_datap_recov_lsx()
262 asm volatile("vxor.v $vr6, $vr6, $vr10"); in raid6_datap_recov_lsx()
263 asm volatile("vxor.v $vr7, $vr7, $vr11"); in raid6_datap_recov_lsx()
264 asm volatile("vst $vr4, %0" : "=m" (dq[0])); in raid6_datap_recov_lsx()
265 asm volatile("vst $vr5, %0" : "=m" (dq[16])); in raid6_datap_recov_lsx()
266 asm volatile("vst $vr6, %0" : "=m" (dq[32])); in raid6_datap_recov_lsx()
267 asm volatile("vst $vr7, %0" : "=m" (dq[48])); in raid6_datap_recov_lsx()
270 asm volatile("vxor.v $vr0, $vr0, $vr4"); in raid6_datap_recov_lsx()
271 asm volatile("vxor.v $vr1, $vr1, $vr5"); in raid6_datap_recov_lsx()
272 asm volatile("vxor.v $vr2, $vr2, $vr6"); in raid6_datap_recov_lsx()
273 asm volatile("vxor.v $vr3, $vr3, $vr7"); in raid6_datap_recov_lsx()
274 asm volatile("vst $vr0, %0" : "=m" (p[0])); in raid6_datap_recov_lsx()
275 asm volatile("vst $vr1, %0" : "=m" (p[16])); in raid6_datap_recov_lsx()
276 asm volatile("vst $vr2, %0" : "=m" (p[32])); in raid6_datap_recov_lsx()
277 asm volatile("vst $vr3, %0" : "=m" (p[48])); in raid6_datap_recov_lsx()
343 asm volatile("vld $vr20, %0" : : "m" (qmul[0])); in raid6_2data_recov_lasx()
344 asm volatile("vld $vr21, %0" : : "m" (qmul[16])); in raid6_2data_recov_lasx()
345 asm volatile("vld $vr22, %0" : : "m" (pbmul[0])); in raid6_2data_recov_lasx()
346 asm volatile("vld $vr23, %0" : : "m" (pbmul[16])); in raid6_2data_recov_lasx()
347 asm volatile("xvreplve0.q $xr20, $xr20"); in raid6_2data_recov_lasx()
348 asm volatile("xvreplve0.q $xr21, $xr21"); in raid6_2data_recov_lasx()
349 asm volatile("xvreplve0.q $xr22, $xr22"); in raid6_2data_recov_lasx()
350 asm volatile("xvreplve0.q $xr23, $xr23"); in raid6_2data_recov_lasx()
354 asm volatile("xvld $xr0, %0" : : "m" (q[0])); in raid6_2data_recov_lasx()
355 asm volatile("xvld $xr1, %0" : : "m" (q[32])); in raid6_2data_recov_lasx()
357 asm volatile("xvld $xr4, %0" : : "m" (dq[0])); in raid6_2data_recov_lasx()
358 asm volatile("xvld $xr5, %0" : : "m" (dq[32])); in raid6_2data_recov_lasx()
359 asm volatile("xvxor.v $xr0, $xr0, $xr4"); in raid6_2data_recov_lasx()
360 asm volatile("xvxor.v $xr1, $xr1, $xr5"); in raid6_2data_recov_lasx()
362 asm volatile("xvld $xr2, %0" : : "m" (p[0])); in raid6_2data_recov_lasx()
363 asm volatile("xvld $xr3, %0" : : "m" (p[32])); in raid6_2data_recov_lasx()
365 asm volatile("xvld $xr4, %0" : : "m" (dp[0])); in raid6_2data_recov_lasx()
366 asm volatile("xvld $xr5, %0" : : "m" (dp[32])); in raid6_2data_recov_lasx()
367 asm volatile("xvxor.v $xr2, $xr2, $xr4"); in raid6_2data_recov_lasx()
368 asm volatile("xvxor.v $xr3, $xr3, $xr5"); in raid6_2data_recov_lasx()
371 asm volatile("xvsrli.b $xr4, $xr0, 4"); in raid6_2data_recov_lasx()
372 asm volatile("xvsrli.b $xr5, $xr1, 4"); in raid6_2data_recov_lasx()
374 asm volatile("xvandi.b $xr0, $xr0, 0x0f"); in raid6_2data_recov_lasx()
375 asm volatile("xvandi.b $xr1, $xr1, 0x0f"); in raid6_2data_recov_lasx()
377 asm volatile("xvshuf.b $xr0, $xr20, $xr20, $xr0"); in raid6_2data_recov_lasx()
378 asm volatile("xvshuf.b $xr1, $xr20, $xr20, $xr1"); in raid6_2data_recov_lasx()
380 asm volatile("xvshuf.b $xr4, $xr21, $xr21, $xr4"); in raid6_2data_recov_lasx()
381 asm volatile("xvshuf.b $xr5, $xr21, $xr21, $xr5"); in raid6_2data_recov_lasx()
383 asm volatile("xvxor.v $xr6, $xr4, $xr0"); in raid6_2data_recov_lasx()
384 asm volatile("xvxor.v $xr7, $xr5, $xr1"); in raid6_2data_recov_lasx()
387 asm volatile("xvsrli.b $xr4, $xr2, 4"); in raid6_2data_recov_lasx()
388 asm volatile("xvsrli.b $xr5, $xr3, 4"); in raid6_2data_recov_lasx()
390 asm volatile("xvandi.b $xr0, $xr2, 0x0f"); in raid6_2data_recov_lasx()
391 asm volatile("xvandi.b $xr1, $xr3, 0x0f"); in raid6_2data_recov_lasx()
393 asm volatile("xvshuf.b $xr0, $xr22, $xr22, $xr0"); in raid6_2data_recov_lasx()
394 asm volatile("xvshuf.b $xr1, $xr22, $xr22, $xr1"); in raid6_2data_recov_lasx()
396 asm volatile("xvshuf.b $xr4, $xr23, $xr23, $xr4"); in raid6_2data_recov_lasx()
397 asm volatile("xvshuf.b $xr5, $xr23, $xr23, $xr5"); in raid6_2data_recov_lasx()
399 asm volatile("xvxor.v $xr0, $xr0, $xr4"); in raid6_2data_recov_lasx()
400 asm volatile("xvxor.v $xr1, $xr1, $xr5"); in raid6_2data_recov_lasx()
403 asm volatile("xvxor.v $xr0, $xr0, $xr6"); in raid6_2data_recov_lasx()
404 asm volatile("xvxor.v $xr1, $xr1, $xr7"); in raid6_2data_recov_lasx()
407 asm volatile("xvxor.v $xr2, $xr2, $xr0"); in raid6_2data_recov_lasx()
408 asm volatile("xvxor.v $xr3, $xr3, $xr1"); in raid6_2data_recov_lasx()
410 asm volatile("xvst $xr0, %0" : "=m" (dq[0])); in raid6_2data_recov_lasx()
411 asm volatile("xvst $xr1, %0" : "=m" (dq[32])); in raid6_2data_recov_lasx()
412 asm volatile("xvst $xr2, %0" : "=m" (dp[0])); in raid6_2data_recov_lasx()
413 asm volatile("xvst $xr3, %0" : "=m" (dp[32])); in raid6_2data_recov_lasx()
454 asm volatile("vld $vr22, %0" : : "m" (qmul[0])); in raid6_datap_recov_lasx()
455 asm volatile("xvreplve0.q $xr22, $xr22"); in raid6_datap_recov_lasx()
456 asm volatile("vld $vr23, %0" : : "m" (qmul[16])); in raid6_datap_recov_lasx()
457 asm volatile("xvreplve0.q $xr23, $xr23"); in raid6_datap_recov_lasx()
461 asm volatile("xvld $xr0, %0" : : "m" (p[0])); in raid6_datap_recov_lasx()
462 asm volatile("xvld $xr1, %0" : : "m" (p[32])); in raid6_datap_recov_lasx()
464 asm volatile("xvld $xr2, %0" : : "m" (dq[0])); in raid6_datap_recov_lasx()
465 asm volatile("xvld $xr3, %0" : : "m" (dq[32])); in raid6_datap_recov_lasx()
467 asm volatile("xvld $xr4, %0" : : "m" (q[0])); in raid6_datap_recov_lasx()
468 asm volatile("xvld $xr5, %0" : : "m" (q[32])); in raid6_datap_recov_lasx()
469 asm volatile("xvxor.v $xr2, $xr2, $xr4"); in raid6_datap_recov_lasx()
470 asm volatile("xvxor.v $xr3, $xr3, $xr5"); in raid6_datap_recov_lasx()
473 asm volatile("xvsrli.b $xr4, $xr2, 4"); in raid6_datap_recov_lasx()
474 asm volatile("xvsrli.b $xr5, $xr3, 4"); in raid6_datap_recov_lasx()
476 asm volatile("xvandi.b $xr2, $xr2, 0x0f"); in raid6_datap_recov_lasx()
477 asm volatile("xvandi.b $xr3, $xr3, 0x0f"); in raid6_datap_recov_lasx()
479 asm volatile("xvshuf.b $xr2, $xr22, $xr22, $xr2"); in raid6_datap_recov_lasx()
480 asm volatile("xvshuf.b $xr3, $xr22, $xr22, $xr3"); in raid6_datap_recov_lasx()
482 asm volatile("xvshuf.b $xr4, $xr23, $xr23, $xr4"); in raid6_datap_recov_lasx()
483 asm volatile("xvshuf.b $xr5, $xr23, $xr23, $xr5"); in raid6_datap_recov_lasx()
485 asm volatile("xvxor.v $xr2, $xr2, $xr4"); in raid6_datap_recov_lasx()
486 asm volatile("xvxor.v $xr3, $xr3, $xr5"); in raid6_datap_recov_lasx()
489 asm volatile("xvxor.v $xr0, $xr0, $xr2"); in raid6_datap_recov_lasx()
490 asm volatile("xvxor.v $xr1, $xr1, $xr3"); in raid6_datap_recov_lasx()
492 asm volatile("xvst $xr2, %0" : "=m" (dq[0])); in raid6_datap_recov_lasx()
493 asm volatile("xvst $xr3, %0" : "=m" (dq[32])); in raid6_datap_recov_lasx()
494 asm volatile("xvst $xr0, %0" : "=m" (p[0])); in raid6_datap_recov_lasx()
495 asm volatile("xvst $xr1, %0" : "=m" (p[32])); in raid6_datap_recov_lasx()