Lines Matching full:s64
2580 static __always_inline s64
2596 static __always_inline s64
2602 s64 ret; in raw_atomic64_read_acquire()
2618 * @i: s64 value to assign
2627 raw_atomic64_set(atomic64_t *v, s64 i) in raw_atomic64_set()
2635 * @i: s64 value to assign
2644 raw_atomic64_set_release(atomic64_t *v, s64 i) in raw_atomic64_set_release()
2660 * @i: s64 value to add
2670 raw_atomic64_add(s64 i, atomic64_t *v) in raw_atomic64_add()
2677 * @i: s64 value to add
2686 static __always_inline s64
2687 raw_atomic64_add_return(s64 i, atomic64_t *v) in raw_atomic64_add_return()
2692 s64 ret; in raw_atomic64_add_return()
2704 * @i: s64 value to add
2713 static __always_inline s64
2714 raw_atomic64_add_return_acquire(s64 i, atomic64_t *v) in raw_atomic64_add_return_acquire()
2719 s64 ret = arch_atomic64_add_return_relaxed(i, v); in raw_atomic64_add_return_acquire()
2731 * @i: s64 value to add
2740 static __always_inline s64
2741 raw_atomic64_add_return_release(s64 i, atomic64_t *v) in raw_atomic64_add_return_release()
2757 * @i: s64 value to add
2766 static __always_inline s64
2767 raw_atomic64_add_return_relaxed(s64 i, atomic64_t *v) in raw_atomic64_add_return_relaxed()
2780 * @i: s64 value to add
2789 static __always_inline s64
2790 raw_atomic64_fetch_add(s64 i, atomic64_t *v) in raw_atomic64_fetch_add()
2795 s64 ret; in raw_atomic64_fetch_add()
2807 * @i: s64 value to add
2816 static __always_inline s64
2817 raw_atomic64_fetch_add_acquire(s64 i, atomic64_t *v) in raw_atomic64_fetch_add_acquire()
2822 s64 ret = arch_atomic64_fetch_add_relaxed(i, v); in raw_atomic64_fetch_add_acquire()
2834 * @i: s64 value to add
2843 static __always_inline s64
2844 raw_atomic64_fetch_add_release(s64 i, atomic64_t *v) in raw_atomic64_fetch_add_release()
2860 * @i: s64 value to add
2869 static __always_inline s64
2870 raw_atomic64_fetch_add_relaxed(s64 i, atomic64_t *v) in raw_atomic64_fetch_add_relaxed()
2883 * @i: s64 value to subtract
2893 raw_atomic64_sub(s64 i, atomic64_t *v) in raw_atomic64_sub()
2900 * @i: s64 value to subtract
2909 static __always_inline s64
2910 raw_atomic64_sub_return(s64 i, atomic64_t *v) in raw_atomic64_sub_return()
2915 s64 ret; in raw_atomic64_sub_return()
2927 * @i: s64 value to subtract
2936 static __always_inline s64
2937 raw_atomic64_sub_return_acquire(s64 i, atomic64_t *v) in raw_atomic64_sub_return_acquire()
2942 s64 ret = arch_atomic64_sub_return_relaxed(i, v); in raw_atomic64_sub_return_acquire()
2954 * @i: s64 value to subtract
2963 static __always_inline s64
2964 raw_atomic64_sub_return_release(s64 i, atomic64_t *v) in raw_atomic64_sub_return_release()
2980 * @i: s64 value to subtract
2989 static __always_inline s64
2990 raw_atomic64_sub_return_relaxed(s64 i, atomic64_t *v) in raw_atomic64_sub_return_relaxed()
3003 * @i: s64 value to subtract
3012 static __always_inline s64
3013 raw_atomic64_fetch_sub(s64 i, atomic64_t *v) in raw_atomic64_fetch_sub()
3018 s64 ret; in raw_atomic64_fetch_sub()
3030 * @i: s64 value to subtract
3039 static __always_inline s64
3040 raw_atomic64_fetch_sub_acquire(s64 i, atomic64_t *v) in raw_atomic64_fetch_sub_acquire()
3045 s64 ret = arch_atomic64_fetch_sub_relaxed(i, v); in raw_atomic64_fetch_sub_acquire()
3057 * @i: s64 value to subtract
3066 static __always_inline s64
3067 raw_atomic64_fetch_sub_release(s64 i, atomic64_t *v) in raw_atomic64_fetch_sub_release()
3083 * @i: s64 value to subtract
3092 static __always_inline s64
3093 raw_atomic64_fetch_sub_relaxed(s64 i, atomic64_t *v) in raw_atomic64_fetch_sub_relaxed()
3134 static __always_inline s64
3140 s64 ret; in raw_atomic64_inc_return()
3160 static __always_inline s64
3166 s64 ret = arch_atomic64_inc_return_relaxed(v); in raw_atomic64_inc_return_acquire()
3186 static __always_inline s64
3211 static __always_inline s64
3233 static __always_inline s64
3239 s64 ret; in raw_atomic64_fetch_inc()
3259 static __always_inline s64
3265 s64 ret = arch_atomic64_fetch_inc_relaxed(v); in raw_atomic64_fetch_inc_acquire()
3285 static __always_inline s64
3310 static __always_inline s64
3352 static __always_inline s64
3358 s64 ret; in raw_atomic64_dec_return()
3378 static __always_inline s64
3384 s64 ret = arch_atomic64_dec_return_relaxed(v); in raw_atomic64_dec_return_acquire()
3404 static __always_inline s64
3429 static __always_inline s64
3451 static __always_inline s64
3457 s64 ret; in raw_atomic64_fetch_dec()
3477 static __always_inline s64
3483 s64 ret = arch_atomic64_fetch_dec_relaxed(v); in raw_atomic64_fetch_dec_acquire()
3503 static __always_inline s64
3528 static __always_inline s64
3542 * @i: s64 value
3552 raw_atomic64_and(s64 i, atomic64_t *v) in raw_atomic64_and()
3559 * @i: s64 value
3568 static __always_inline s64
3569 raw_atomic64_fetch_and(s64 i, atomic64_t *v) in raw_atomic64_fetch_and()
3574 s64 ret; in raw_atomic64_fetch_and()
3586 * @i: s64 value
3595 static __always_inline s64
3596 raw_atomic64_fetch_and_acquire(s64 i, atomic64_t *v) in raw_atomic64_fetch_and_acquire()
3601 s64 ret = arch_atomic64_fetch_and_relaxed(i, v); in raw_atomic64_fetch_and_acquire()
3613 * @i: s64 value
3622 static __always_inline s64
3623 raw_atomic64_fetch_and_release(s64 i, atomic64_t *v) in raw_atomic64_fetch_and_release()
3639 * @i: s64 value
3648 static __always_inline s64
3649 raw_atomic64_fetch_and_relaxed(s64 i, atomic64_t *v) in raw_atomic64_fetch_and_relaxed()
3662 * @i: s64 value
3672 raw_atomic64_andnot(s64 i, atomic64_t *v) in raw_atomic64_andnot()
3683 * @i: s64 value
3692 static __always_inline s64
3693 raw_atomic64_fetch_andnot(s64 i, atomic64_t *v) in raw_atomic64_fetch_andnot()
3698 s64 ret; in raw_atomic64_fetch_andnot()
3710 * @i: s64 value
3719 static __always_inline s64
3720 raw_atomic64_fetch_andnot_acquire(s64 i, atomic64_t *v) in raw_atomic64_fetch_andnot_acquire()
3725 s64 ret = arch_atomic64_fetch_andnot_relaxed(i, v); in raw_atomic64_fetch_andnot_acquire()
3737 * @i: s64 value
3746 static __always_inline s64
3747 raw_atomic64_fetch_andnot_release(s64 i, atomic64_t *v) in raw_atomic64_fetch_andnot_release()
3763 * @i: s64 value
3772 static __always_inline s64
3773 raw_atomic64_fetch_andnot_relaxed(s64 i, atomic64_t *v) in raw_atomic64_fetch_andnot_relaxed()
3786 * @i: s64 value
3796 raw_atomic64_or(s64 i, atomic64_t *v) in raw_atomic64_or()
3803 * @i: s64 value
3812 static __always_inline s64
3813 raw_atomic64_fetch_or(s64 i, atomic64_t *v) in raw_atomic64_fetch_or()
3818 s64 ret; in raw_atomic64_fetch_or()
3830 * @i: s64 value
3839 static __always_inline s64
3840 raw_atomic64_fetch_or_acquire(s64 i, atomic64_t *v) in raw_atomic64_fetch_or_acquire()
3845 s64 ret = arch_atomic64_fetch_or_relaxed(i, v); in raw_atomic64_fetch_or_acquire()
3857 * @i: s64 value
3866 static __always_inline s64
3867 raw_atomic64_fetch_or_release(s64 i, atomic64_t *v) in raw_atomic64_fetch_or_release()
3883 * @i: s64 value
3892 static __always_inline s64
3893 raw_atomic64_fetch_or_relaxed(s64 i, atomic64_t *v) in raw_atomic64_fetch_or_relaxed()
3906 * @i: s64 value
3916 raw_atomic64_xor(s64 i, atomic64_t *v) in raw_atomic64_xor()
3923 * @i: s64 value
3932 static __always_inline s64
3933 raw_atomic64_fetch_xor(s64 i, atomic64_t *v) in raw_atomic64_fetch_xor()
3938 s64 ret; in raw_atomic64_fetch_xor()
3950 * @i: s64 value
3959 static __always_inline s64
3960 raw_atomic64_fetch_xor_acquire(s64 i, atomic64_t *v) in raw_atomic64_fetch_xor_acquire()
3965 s64 ret = arch_atomic64_fetch_xor_relaxed(i, v); in raw_atomic64_fetch_xor_acquire()
3977 * @i: s64 value
3986 static __always_inline s64
3987 raw_atomic64_fetch_xor_release(s64 i, atomic64_t *v) in raw_atomic64_fetch_xor_release()
4003 * @i: s64 value
4012 static __always_inline s64
4013 raw_atomic64_fetch_xor_relaxed(s64 i, atomic64_t *v) in raw_atomic64_fetch_xor_relaxed()
4027 * @new: s64 value to assign
4035 static __always_inline s64
4036 raw_atomic64_xchg(atomic64_t *v, s64 new) in raw_atomic64_xchg()
4041 s64 ret; in raw_atomic64_xchg()
4054 * @new: s64 value to assign
4062 static __always_inline s64
4063 raw_atomic64_xchg_acquire(atomic64_t *v, s64 new) in raw_atomic64_xchg_acquire()
4068 s64 ret = arch_atomic64_xchg_relaxed(v, new); in raw_atomic64_xchg_acquire()
4081 * @new: s64 value to assign
4089 static __always_inline s64
4090 raw_atomic64_xchg_release(atomic64_t *v, s64 new) in raw_atomic64_xchg_release()
4107 * @new: s64 value to assign
4115 static __always_inline s64
4116 raw_atomic64_xchg_relaxed(atomic64_t *v, s64 new) in raw_atomic64_xchg_relaxed()
4130 * @old: s64 value to compare with
4131 * @new: s64 value to assign
4140 static __always_inline s64
4141 raw_atomic64_cmpxchg(atomic64_t *v, s64 old, s64 new) in raw_atomic64_cmpxchg()
4146 s64 ret; in raw_atomic64_cmpxchg()
4159 * @old: s64 value to compare with
4160 * @new: s64 value to assign
4169 static __always_inline s64
4170 raw_atomic64_cmpxchg_acquire(atomic64_t *v, s64 old, s64 new) in raw_atomic64_cmpxchg_acquire()
4175 s64 ret = arch_atomic64_cmpxchg_relaxed(v, old, new); in raw_atomic64_cmpxchg_acquire()
4188 * @old: s64 value to compare with
4189 * @new: s64 value to assign
4198 static __always_inline s64
4199 raw_atomic64_cmpxchg_release(atomic64_t *v, s64 old, s64 new) in raw_atomic64_cmpxchg_release()
4216 * @old: s64 value to compare with
4217 * @new: s64 value to assign
4226 static __always_inline s64
4227 raw_atomic64_cmpxchg_relaxed(atomic64_t *v, s64 old, s64 new) in raw_atomic64_cmpxchg_relaxed()
4241 * @old: pointer to s64 value to compare with
4242 * @new: s64 value to assign
4253 raw_atomic64_try_cmpxchg(atomic64_t *v, s64 *old, s64 new) in raw_atomic64_try_cmpxchg()
4264 s64 r, o = *old; in raw_atomic64_try_cmpxchg()
4275 * @old: pointer to s64 value to compare with
4276 * @new: s64 value to assign
4287 raw_atomic64_try_cmpxchg_acquire(atomic64_t *v, s64 *old, s64 new) in raw_atomic64_try_cmpxchg_acquire()
4298 s64 r, o = *old; in raw_atomic64_try_cmpxchg_acquire()
4309 * @old: pointer to s64 value to compare with
4310 * @new: s64 value to assign
4321 raw_atomic64_try_cmpxchg_release(atomic64_t *v, s64 *old, s64 new) in raw_atomic64_try_cmpxchg_release()
4331 s64 r, o = *old; in raw_atomic64_try_cmpxchg_release()
4342 * @old: pointer to s64 value to compare with
4343 * @new: s64 value to assign
4354 raw_atomic64_try_cmpxchg_relaxed(atomic64_t *v, s64 *old, s64 new) in raw_atomic64_try_cmpxchg_relaxed()
4361 s64 r, o = *old; in raw_atomic64_try_cmpxchg_relaxed()
4371 * @i: s64 value to subtract
4381 raw_atomic64_sub_and_test(s64 i, atomic64_t *v) in raw_atomic64_sub_and_test()
4432 * @i: s64 value to add
4442 raw_atomic64_add_negative(s64 i, atomic64_t *v) in raw_atomic64_add_negative()
4459 * @i: s64 value to add
4469 raw_atomic64_add_negative_acquire(s64 i, atomic64_t *v) in raw_atomic64_add_negative_acquire()
4486 * @i: s64 value to add
4496 raw_atomic64_add_negative_release(s64 i, atomic64_t *v) in raw_atomic64_add_negative_release()
4512 * @i: s64 value to add
4522 raw_atomic64_add_negative_relaxed(s64 i, atomic64_t *v) in raw_atomic64_add_negative_relaxed()
4536 * @a: s64 value to add
4537 * @u: s64 value to compare with
4546 static __always_inline s64
4547 raw_atomic64_fetch_add_unless(atomic64_t *v, s64 a, s64 u) in raw_atomic64_fetch_add_unless()
4552 s64 c = raw_atomic64_read(v); in raw_atomic64_fetch_add_unless()
4566 * @a: s64 value to add
4567 * @u: s64 value to compare with
4577 raw_atomic64_add_unless(atomic64_t *v, s64 a, s64 u) in raw_atomic64_add_unless()
4624 s64 c = raw_atomic64_read(v); in raw_atomic64_inc_unless_negative()
4652 s64 c = raw_atomic64_read(v); in raw_atomic64_dec_unless_positive()
4674 static __always_inline s64
4680 s64 dec, c = raw_atomic64_read(v); in raw_atomic64_dec_if_positive()