#ifndef _ASM_X86_REFCOUNT_H
#define _ASM_X86_REFCOUNT_H
#ifndef CONFIG_PAX_REFCOUNT
#error PAX_REFCOUNT must be enabled!
#endif
static __always_inline __must_check bool __refcount_add_not_zero(unsigned int i, refcount_t *r, int *oldp)
{
atomic_t *v = &r->refs;
int old = refcount_read(r);
do {
int new;
if (!old)
break;
asm volatile("addl %2,%0\n\t"
__PAX_REFCOUNT(overflow, 32, s)
: "=r" (new)
: "0" (old), "ir" (i),
[counter] "m" (v->counter)
: "cc", "cx");
} while (!atomic_try_cmpxchg_relaxed(v, &old, old + i));
if (oldp)
*oldp = old;
return old;
}
static __always_inline __must_check bool refcount_add_not_zero(int i, refcount_t *r)
{
return __refcount_add_not_zero(i, r, NULL);
}
static __always_inline void refcount_add(unsigned int i, refcount_t *r)
{
asm volatile(LOCK_PREFIX "addl %1,%0\n\t"
__PAX_REFCOUNT(overflow, 32, s)
: [counter] "+m" (r->refs.counter)
: "ir" (i)
: "cc", "cx");
}
static inline __must_check bool __refcount_inc_not_zero(refcount_t *r, int *oldp)
{
return __refcount_add_not_zero(1, r, oldp);
}
static __always_inline __must_check bool refcount_inc_not_zero(refcount_t *r)
{
return __refcount_inc_not_zero(r, NULL);
}
static __always_inline void __refcount_inc(refcount_t *r, int *oldp)
{
int old = atomic_fetch_add_relaxed(1, &r->refs);
if (oldp)
*oldp = old;
}
static __always_inline void refcount_inc(refcount_t *r)
{
asm volatile(LOCK_PREFIX "incl %0\n\t"
__PAX_REFCOUNT(overflow, 32, s)
: [counter] "+m" (r->refs.counter)
: : "cc", "cx");
}
static __always_inline __must_check bool refcount_sub_and_test(unsigned int i, refcount_t *r)
{
bool ret = GEN_BINARY_RMWcc_refcount(LOCK_PREFIX "subl", r->refs.counter, underflow, 32, e, "er", i, "cx");
if (ret) {
smp_acquire__after_ctrl_dep();
return true;
}
return false;
}
static __always_inline void refcount_sub(unsigned int i, refcount_t *r)
{
asm volatile(LOCK_PREFIX "subl %1,%0\n\t"
__PAX_REFCOUNT(underflow, 32, s)
: [counter] "+m" (r->refs.counter)
: "ir" (i)
: "cc", "cx");
}
static __always_inline __must_check bool __refcount_dec_and_test(refcount_t *r, int *oldp)
{
int old = atomic_fetch_sub_release(1, &r->refs);
if (oldp)
*oldp = old;
if (old == 1) {
smp_acquire__after_ctrl_dep();
return true;
}
return false;
}
static __always_inline __must_check bool refcount_dec_and_test(refcount_t *r)
{
bool ret = GEN_UNARY_RMWcc_refcount(LOCK_PREFIX "decl", r->refs.counter, underflow, 32, e, "cx");
if (ret) {
smp_acquire__after_ctrl_dep();
return true;
}
return false;
}
static __always_inline void __refcount_dec(refcount_t *r, int *oldp)
{
int old = atomic_fetch_sub_release(1, &r->refs);
if (oldp)
*oldp = old;
}
static __always_inline void refcount_dec(refcount_t *r)
{
asm volatile(LOCK_PREFIX "decl %0\n\t"
__PAX_REFCOUNT(underflow, 32, s)
: [counter] "+m" (r->refs.counter)
: : "cc", "cx");
}
static __always_inline __must_check bool refcount_dec_if_one(refcount_t *r)
{
int val = 1;
return atomic_try_cmpxchg_release(&r->refs, &val, 0);
}
static __always_inline __must_check bool refcount_dec_not_one(refcount_t *r)
{
unsigned int new, val = atomic_read(&r->refs);
do {
if (unlikely(val == 1))
return false;
asm volatile("subl %2,%0\n\t"
__PAX_REFCOUNT(overflow, 32, s)
: "=r" (new)
: "0" (val), "ir" (1),
[counter] "m" (r->refs.counter)
: "cc", "cx");
} while (!atomic_try_cmpxchg_release(&r->refs, &val, new));
return true;
}
#endif