Lines Matching +full:1 +full:v

25 #define arch_atomic_read(v)	READ_ONCE((v)->counter)  argument
26 #define arch_atomic_set(v,i) WRITE_ONCE(((v)->counter), (i)) argument
37 static inline void arch_atomic_##op(int i, atomic_t *v) \
42 prefetchw(&v->counter); \
44 "1: ldrex %0, [%3]\n" \
46 " strex %1, %0, [%3]\n" \
47 " teq %1, #0\n" \
48 " bne 1b" \
49 : "=&r" (result), "=&r" (tmp), "+Qo" (v->counter) \
50 : "r" (&v->counter), "Ir" (i) \
55 static inline int arch_atomic_##op##_return_relaxed(int i, atomic_t *v) \
60 prefetchw(&v->counter); \
63 "1: ldrex %0, [%3]\n" \
65 " strex %1, %0, [%3]\n" \
66 " teq %1, #0\n" \
67 " bne 1b" \
68 : "=&r" (result), "=&r" (tmp), "+Qo" (v->counter) \
69 : "r" (&v->counter), "Ir" (i) \
76 static inline int arch_atomic_fetch_##op##_relaxed(int i, atomic_t *v) \
81 prefetchw(&v->counter); \
84 "1: ldrex %0, [%4]\n" \
85 " " #asm_op " %1, %0, %5\n" \
86 " strex %2, %1, [%4]\n" \
88 " bne 1b" \
89 : "=&r" (result), "=&r" (val), "=&r" (tmp), "+Qo" (v->counter) \
90 : "r" (&v->counter), "Ir" (i) \
115 "ldrex %1, [%3]\n" in arch_atomic_cmpxchg_relaxed()
117 "teq %1, %4\n" in arch_atomic_cmpxchg_relaxed()
128 static inline int arch_atomic_fetch_add_unless(atomic_t *v, int a, int u) in arch_atomic_fetch_add_unless() argument
134 prefetchw(&v->counter); in arch_atomic_fetch_add_unless()
137 "1: ldrex %0, [%4]\n" in arch_atomic_fetch_add_unless()
140 " add %1, %0, %6\n" in arch_atomic_fetch_add_unless()
141 " strex %2, %1, [%4]\n" in arch_atomic_fetch_add_unless()
143 " bne 1b\n" in arch_atomic_fetch_add_unless()
145 : "=&r" (oldval), "=&r" (newval), "=&r" (tmp), "+Qo" (v->counter) in arch_atomic_fetch_add_unless()
146 : "r" (&v->counter), "r" (u), "r" (a) in arch_atomic_fetch_add_unless()
163 static inline void arch_atomic_##op(int i, atomic_t *v) \
168 v->counter c_op i; \
173 static inline int arch_atomic_##op##_return(int i, atomic_t *v) \
179 v->counter c_op i; \
180 val = v->counter; \
187 static inline int arch_atomic_fetch_##op(int i, atomic_t *v) \
193 val = v->counter; \
194 v->counter c_op i; \
210 static inline int arch_atomic_cmpxchg(atomic_t *v, int old, int new) in arch_atomic_cmpxchg() argument
216 ret = v->counter; in arch_atomic_cmpxchg()
218 v->counter = new; in arch_atomic_cmpxchg()
260 static inline s64 arch_atomic64_read(const atomic64_t *v) in arch_atomic64_read() argument
265 " ldrd %0, %H0, [%1]" in arch_atomic64_read()
267 : "r" (&v->counter), "Qo" (v->counter) in arch_atomic64_read()
273 static inline void arch_atomic64_set(atomic64_t *v, s64 i) in arch_atomic64_set() argument
276 " strd %2, %H2, [%1]" in arch_atomic64_set()
277 : "=Qo" (v->counter) in arch_atomic64_set()
278 : "r" (&v->counter), "r" (i) in arch_atomic64_set()
282 static inline s64 arch_atomic64_read(const atomic64_t *v) in arch_atomic64_read() argument
287 " ldrexd %0, %H0, [%1]" in arch_atomic64_read()
289 : "r" (&v->counter), "Qo" (v->counter) in arch_atomic64_read()
295 static inline void arch_atomic64_set(atomic64_t *v, s64 i) in arch_atomic64_set() argument
299 prefetchw(&v->counter); in arch_atomic64_set()
301 "1: ldrexd %0, %H0, [%2]\n" in arch_atomic64_set()
304 " bne 1b" in arch_atomic64_set()
305 : "=&r" (tmp), "=Qo" (v->counter) in arch_atomic64_set()
306 : "r" (&v->counter), "r" (i) in arch_atomic64_set()
312 static inline void arch_atomic64_##op(s64 i, atomic64_t *v) \
317 prefetchw(&v->counter); \
319 "1: ldrexd %0, %H0, [%3]\n" \
322 " strexd %1, %0, %H0, [%3]\n" \
323 " teq %1, #0\n" \
324 " bne 1b" \
325 : "=&r" (result), "=&r" (tmp), "+Qo" (v->counter) \
326 : "r" (&v->counter), "r" (i) \
332 arch_atomic64_##op##_return_relaxed(s64 i, atomic64_t *v) \
337 prefetchw(&v->counter); \
340 "1: ldrexd %0, %H0, [%3]\n" \
343 " strexd %1, %0, %H0, [%3]\n" \
344 " teq %1, #0\n" \
345 " bne 1b" \
346 : "=&r" (result), "=&r" (tmp), "+Qo" (v->counter) \
347 : "r" (&v->counter), "r" (i) \
355 arch_atomic64_fetch_##op##_relaxed(s64 i, atomic64_t *v) \
360 prefetchw(&v->counter); \
363 "1: ldrexd %0, %H0, [%4]\n" \
366 " strexd %2, %1, %H1, [%4]\n" \
368 " bne 1b" \
369 : "=&r" (result), "=&r" (val), "=&r" (tmp), "+Qo" (v->counter) \
370 : "r" (&v->counter), "r" (i) \
420 "ldrexd %1, %H1, [%3]\n" in ATOMIC64_OPS()
422 "teq %1, %4\n" in ATOMIC64_OPS()
442 "1: ldrexd %0, %H0, [%3]\n" in arch_atomic64_xchg_relaxed()
443 " strexd %1, %4, %H4, [%3]\n" in arch_atomic64_xchg_relaxed()
444 " teq %1, #0\n" in arch_atomic64_xchg_relaxed()
445 " bne 1b" in arch_atomic64_xchg_relaxed()
454 static inline s64 arch_atomic64_dec_if_positive(atomic64_t *v) in arch_atomic64_dec_if_positive() argument
460 prefetchw(&v->counter); in arch_atomic64_dec_if_positive()
463 "1: ldrexd %0, %H0, [%3]\n" in arch_atomic64_dec_if_positive()
464 " subs %Q0, %Q0, #1\n" in arch_atomic64_dec_if_positive()
468 " strexd %1, %0, %H0, [%3]\n" in arch_atomic64_dec_if_positive()
469 " teq %1, #0\n" in arch_atomic64_dec_if_positive()
470 " bne 1b\n" in arch_atomic64_dec_if_positive()
472 : "=&r" (result), "=&r" (tmp), "+Qo" (v->counter) in arch_atomic64_dec_if_positive()
473 : "r" (&v->counter) in arch_atomic64_dec_if_positive()
482 static inline s64 arch_atomic64_fetch_add_unless(atomic64_t *v, s64 a, s64 u) in arch_atomic64_fetch_add_unless() argument
488 prefetchw(&v->counter); in arch_atomic64_fetch_add_unless()
491 "1: ldrexd %0, %H0, [%4]\n" in arch_atomic64_fetch_add_unless()
497 " strexd %2, %1, %H1, [%4]\n" in arch_atomic64_fetch_add_unless()
499 " bne 1b\n" in arch_atomic64_fetch_add_unless()
501 : "=&r" (oldval), "=&r" (newval), "=&r" (tmp), "+Qo" (v->counter) in arch_atomic64_fetch_add_unless()
502 : "r" (&v->counter), "r" (u), "r" (a) in arch_atomic64_fetch_add_unless()