mirror of https://gitee.com/openkylin/linux.git
arm64: asm: Kill 'asm/atomic_arch.h'
The contents of 'asm/atomic_arch.h' can be split across some of our other 'asm/' headers. Remove it. Reviewed-by: Andrew Murray <andrew.murray@arm.com> Signed-off-by: Will Deacon <will@kernel.org>
This commit is contained in:
parent
0ca98b2456
commit
0533f97b43
|
@ -17,9 +17,84 @@
|
|||
|
||||
#ifdef __KERNEL__
|
||||
|
||||
#include <asm/atomic_arch.h>
|
||||
#include <asm/cmpxchg.h>
|
||||
|
||||
#define ATOMIC_OP(op) \
|
||||
static inline void arch_##op(int i, atomic_t *v) \
|
||||
{ \
|
||||
__lse_ll_sc_body(op, i, v); \
|
||||
}
|
||||
|
||||
ATOMIC_OP(atomic_andnot)
|
||||
ATOMIC_OP(atomic_or)
|
||||
ATOMIC_OP(atomic_xor)
|
||||
ATOMIC_OP(atomic_add)
|
||||
ATOMIC_OP(atomic_and)
|
||||
ATOMIC_OP(atomic_sub)
|
||||
|
||||
|
||||
#define ATOMIC_FETCH_OP(name, op) \
|
||||
static inline int arch_##op##name(int i, atomic_t *v) \
|
||||
{ \
|
||||
return __lse_ll_sc_body(op##name, i, v); \
|
||||
}
|
||||
|
||||
#define ATOMIC_FETCH_OPS(op) \
|
||||
ATOMIC_FETCH_OP(_relaxed, op) \
|
||||
ATOMIC_FETCH_OP(_acquire, op) \
|
||||
ATOMIC_FETCH_OP(_release, op) \
|
||||
ATOMIC_FETCH_OP( , op)
|
||||
|
||||
ATOMIC_FETCH_OPS(atomic_fetch_andnot)
|
||||
ATOMIC_FETCH_OPS(atomic_fetch_or)
|
||||
ATOMIC_FETCH_OPS(atomic_fetch_xor)
|
||||
ATOMIC_FETCH_OPS(atomic_fetch_add)
|
||||
ATOMIC_FETCH_OPS(atomic_fetch_and)
|
||||
ATOMIC_FETCH_OPS(atomic_fetch_sub)
|
||||
ATOMIC_FETCH_OPS(atomic_add_return)
|
||||
ATOMIC_FETCH_OPS(atomic_sub_return)
|
||||
|
||||
|
||||
#define ATOMIC64_OP(op) \
|
||||
static inline void arch_##op(long i, atomic64_t *v) \
|
||||
{ \
|
||||
__lse_ll_sc_body(op, i, v); \
|
||||
}
|
||||
|
||||
ATOMIC64_OP(atomic64_andnot)
|
||||
ATOMIC64_OP(atomic64_or)
|
||||
ATOMIC64_OP(atomic64_xor)
|
||||
ATOMIC64_OP(atomic64_add)
|
||||
ATOMIC64_OP(atomic64_and)
|
||||
ATOMIC64_OP(atomic64_sub)
|
||||
|
||||
|
||||
#define ATOMIC64_FETCH_OP(name, op) \
|
||||
static inline long arch_##op##name(long i, atomic64_t *v) \
|
||||
{ \
|
||||
return __lse_ll_sc_body(op##name, i, v); \
|
||||
}
|
||||
|
||||
#define ATOMIC64_FETCH_OPS(op) \
|
||||
ATOMIC64_FETCH_OP(_relaxed, op) \
|
||||
ATOMIC64_FETCH_OP(_acquire, op) \
|
||||
ATOMIC64_FETCH_OP(_release, op) \
|
||||
ATOMIC64_FETCH_OP( , op)
|
||||
|
||||
ATOMIC64_FETCH_OPS(atomic64_fetch_andnot)
|
||||
ATOMIC64_FETCH_OPS(atomic64_fetch_or)
|
||||
ATOMIC64_FETCH_OPS(atomic64_fetch_xor)
|
||||
ATOMIC64_FETCH_OPS(atomic64_fetch_add)
|
||||
ATOMIC64_FETCH_OPS(atomic64_fetch_and)
|
||||
ATOMIC64_FETCH_OPS(atomic64_fetch_sub)
|
||||
ATOMIC64_FETCH_OPS(atomic64_add_return)
|
||||
ATOMIC64_FETCH_OPS(atomic64_sub_return)
|
||||
|
||||
static inline long arch_atomic64_dec_if_positive(atomic64_t *v)
|
||||
{
|
||||
return __lse_ll_sc_body(atomic64_dec_if_positive, v);
|
||||
}
|
||||
|
||||
#define ATOMIC_INIT(i) { (i) }
|
||||
|
||||
#define arch_atomic_read(v) READ_ONCE((v)->counter)
|
||||
|
|
|
@ -1,155 +0,0 @@
|
|||
/* SPDX-License-Identifier: GPL-2.0 */
|
||||
/*
|
||||
* Selection between LSE and LL/SC atomics.
|
||||
*
|
||||
* Copyright (C) 2018 ARM Ltd.
|
||||
* Author: Andrew Murray <andrew.murray@arm.com>
|
||||
*/
|
||||
|
||||
#ifndef __ASM_ATOMIC_ARCH_H
|
||||
#define __ASM_ATOMIC_ARCH_H
|
||||
|
||||
|
||||
#include <linux/jump_label.h>
|
||||
|
||||
#include <asm/cpucaps.h>
|
||||
#include <asm/atomic_ll_sc.h>
|
||||
#include <asm/atomic_lse.h>
|
||||
|
||||
extern struct static_key_false cpu_hwcap_keys[ARM64_NCAPS];
|
||||
extern struct static_key_false arm64_const_caps_ready;
|
||||
|
||||
static inline bool system_uses_lse_atomics(void)
|
||||
{
|
||||
return (IS_ENABLED(CONFIG_ARM64_LSE_ATOMICS) &&
|
||||
IS_ENABLED(CONFIG_AS_LSE) &&
|
||||
static_branch_likely(&arm64_const_caps_ready)) &&
|
||||
static_branch_likely(&cpu_hwcap_keys[ARM64_HAS_LSE_ATOMICS]);
|
||||
}
|
||||
|
||||
#define __lse_ll_sc_body(op, ...) \
|
||||
({ \
|
||||
system_uses_lse_atomics() ? \
|
||||
__lse_##op(__VA_ARGS__) : \
|
||||
__ll_sc_##op(__VA_ARGS__); \
|
||||
})
|
||||
|
||||
#define ATOMIC_OP(op) \
|
||||
static inline void arch_##op(int i, atomic_t *v) \
|
||||
{ \
|
||||
__lse_ll_sc_body(op, i, v); \
|
||||
}
|
||||
|
||||
ATOMIC_OP(atomic_andnot)
|
||||
ATOMIC_OP(atomic_or)
|
||||
ATOMIC_OP(atomic_xor)
|
||||
ATOMIC_OP(atomic_add)
|
||||
ATOMIC_OP(atomic_and)
|
||||
ATOMIC_OP(atomic_sub)
|
||||
|
||||
|
||||
#define ATOMIC_FETCH_OP(name, op) \
|
||||
static inline int arch_##op##name(int i, atomic_t *v) \
|
||||
{ \
|
||||
return __lse_ll_sc_body(op##name, i, v); \
|
||||
}
|
||||
|
||||
#define ATOMIC_FETCH_OPS(op) \
|
||||
ATOMIC_FETCH_OP(_relaxed, op) \
|
||||
ATOMIC_FETCH_OP(_acquire, op) \
|
||||
ATOMIC_FETCH_OP(_release, op) \
|
||||
ATOMIC_FETCH_OP( , op)
|
||||
|
||||
ATOMIC_FETCH_OPS(atomic_fetch_andnot)
|
||||
ATOMIC_FETCH_OPS(atomic_fetch_or)
|
||||
ATOMIC_FETCH_OPS(atomic_fetch_xor)
|
||||
ATOMIC_FETCH_OPS(atomic_fetch_add)
|
||||
ATOMIC_FETCH_OPS(atomic_fetch_and)
|
||||
ATOMIC_FETCH_OPS(atomic_fetch_sub)
|
||||
ATOMIC_FETCH_OPS(atomic_add_return)
|
||||
ATOMIC_FETCH_OPS(atomic_sub_return)
|
||||
|
||||
|
||||
#define ATOMIC64_OP(op) \
|
||||
static inline void arch_##op(long i, atomic64_t *v) \
|
||||
{ \
|
||||
__lse_ll_sc_body(op, i, v); \
|
||||
}
|
||||
|
||||
ATOMIC64_OP(atomic64_andnot)
|
||||
ATOMIC64_OP(atomic64_or)
|
||||
ATOMIC64_OP(atomic64_xor)
|
||||
ATOMIC64_OP(atomic64_add)
|
||||
ATOMIC64_OP(atomic64_and)
|
||||
ATOMIC64_OP(atomic64_sub)
|
||||
|
||||
|
||||
#define ATOMIC64_FETCH_OP(name, op) \
|
||||
static inline long arch_##op##name(long i, atomic64_t *v) \
|
||||
{ \
|
||||
return __lse_ll_sc_body(op##name, i, v); \
|
||||
}
|
||||
|
||||
#define ATOMIC64_FETCH_OPS(op) \
|
||||
ATOMIC64_FETCH_OP(_relaxed, op) \
|
||||
ATOMIC64_FETCH_OP(_acquire, op) \
|
||||
ATOMIC64_FETCH_OP(_release, op) \
|
||||
ATOMIC64_FETCH_OP( , op)
|
||||
|
||||
ATOMIC64_FETCH_OPS(atomic64_fetch_andnot)
|
||||
ATOMIC64_FETCH_OPS(atomic64_fetch_or)
|
||||
ATOMIC64_FETCH_OPS(atomic64_fetch_xor)
|
||||
ATOMIC64_FETCH_OPS(atomic64_fetch_add)
|
||||
ATOMIC64_FETCH_OPS(atomic64_fetch_and)
|
||||
ATOMIC64_FETCH_OPS(atomic64_fetch_sub)
|
||||
ATOMIC64_FETCH_OPS(atomic64_add_return)
|
||||
ATOMIC64_FETCH_OPS(atomic64_sub_return)
|
||||
|
||||
|
||||
static inline long arch_atomic64_dec_if_positive(atomic64_t *v)
|
||||
{
|
||||
return __lse_ll_sc_body(atomic64_dec_if_positive, v);
|
||||
}
|
||||
|
||||
#define __CMPXCHG_CASE(name, sz) \
|
||||
static inline u##sz __cmpxchg_case_##name##sz(volatile void *ptr, \
|
||||
u##sz old, \
|
||||
u##sz new) \
|
||||
{ \
|
||||
return __lse_ll_sc_body(_cmpxchg_case_##name##sz, \
|
||||
ptr, old, new); \
|
||||
}
|
||||
|
||||
__CMPXCHG_CASE( , 8)
|
||||
__CMPXCHG_CASE( , 16)
|
||||
__CMPXCHG_CASE( , 32)
|
||||
__CMPXCHG_CASE( , 64)
|
||||
__CMPXCHG_CASE(acq_, 8)
|
||||
__CMPXCHG_CASE(acq_, 16)
|
||||
__CMPXCHG_CASE(acq_, 32)
|
||||
__CMPXCHG_CASE(acq_, 64)
|
||||
__CMPXCHG_CASE(rel_, 8)
|
||||
__CMPXCHG_CASE(rel_, 16)
|
||||
__CMPXCHG_CASE(rel_, 32)
|
||||
__CMPXCHG_CASE(rel_, 64)
|
||||
__CMPXCHG_CASE(mb_, 8)
|
||||
__CMPXCHG_CASE(mb_, 16)
|
||||
__CMPXCHG_CASE(mb_, 32)
|
||||
__CMPXCHG_CASE(mb_, 64)
|
||||
|
||||
|
||||
#define __CMPXCHG_DBL(name) \
|
||||
static inline long __cmpxchg_double##name(unsigned long old1, \
|
||||
unsigned long old2, \
|
||||
unsigned long new1, \
|
||||
unsigned long new2, \
|
||||
volatile void *ptr) \
|
||||
{ \
|
||||
return __lse_ll_sc_body(_cmpxchg_double##name, \
|
||||
old1, old2, new1, new2, ptr); \
|
||||
}
|
||||
|
||||
__CMPXCHG_DBL( )
|
||||
__CMPXCHG_DBL(_mb)
|
||||
|
||||
#endif /* __ASM_ATOMIC_LSE_H */
|
|
@ -10,7 +10,6 @@
|
|||
#include <linux/build_bug.h>
|
||||
#include <linux/compiler.h>
|
||||
|
||||
#include <asm/atomic_arch.h>
|
||||
#include <asm/barrier.h>
|
||||
#include <asm/lse.h>
|
||||
|
||||
|
@ -104,6 +103,46 @@ __XCHG_GEN(_mb)
|
|||
#define arch_xchg_release(...) __xchg_wrapper(_rel, __VA_ARGS__)
|
||||
#define arch_xchg(...) __xchg_wrapper( _mb, __VA_ARGS__)
|
||||
|
||||
#define __CMPXCHG_CASE(name, sz) \
|
||||
static inline u##sz __cmpxchg_case_##name##sz(volatile void *ptr, \
|
||||
u##sz old, \
|
||||
u##sz new) \
|
||||
{ \
|
||||
return __lse_ll_sc_body(_cmpxchg_case_##name##sz, \
|
||||
ptr, old, new); \
|
||||
}
|
||||
|
||||
__CMPXCHG_CASE( , 8)
|
||||
__CMPXCHG_CASE( , 16)
|
||||
__CMPXCHG_CASE( , 32)
|
||||
__CMPXCHG_CASE( , 64)
|
||||
__CMPXCHG_CASE(acq_, 8)
|
||||
__CMPXCHG_CASE(acq_, 16)
|
||||
__CMPXCHG_CASE(acq_, 32)
|
||||
__CMPXCHG_CASE(acq_, 64)
|
||||
__CMPXCHG_CASE(rel_, 8)
|
||||
__CMPXCHG_CASE(rel_, 16)
|
||||
__CMPXCHG_CASE(rel_, 32)
|
||||
__CMPXCHG_CASE(rel_, 64)
|
||||
__CMPXCHG_CASE(mb_, 8)
|
||||
__CMPXCHG_CASE(mb_, 16)
|
||||
__CMPXCHG_CASE(mb_, 32)
|
||||
__CMPXCHG_CASE(mb_, 64)
|
||||
|
||||
#define __CMPXCHG_DBL(name) \
|
||||
static inline long __cmpxchg_double##name(unsigned long old1, \
|
||||
unsigned long old2, \
|
||||
unsigned long new1, \
|
||||
unsigned long new2, \
|
||||
volatile void *ptr) \
|
||||
{ \
|
||||
return __lse_ll_sc_body(_cmpxchg_double##name, \
|
||||
old1, old2, new1, new2, ptr); \
|
||||
}
|
||||
|
||||
__CMPXCHG_DBL( )
|
||||
__CMPXCHG_DBL(_mb)
|
||||
|
||||
#define __CMPXCHG_GEN(sfx) \
|
||||
static inline unsigned long __cmpxchg##sfx(volatile void *ptr, \
|
||||
unsigned long old, \
|
||||
|
|
|
@ -2,22 +2,46 @@
|
|||
#ifndef __ASM_LSE_H
|
||||
#define __ASM_LSE_H
|
||||
|
||||
#include <asm/atomic_ll_sc.h>
|
||||
|
||||
#if defined(CONFIG_AS_LSE) && defined(CONFIG_ARM64_LSE_ATOMICS)
|
||||
|
||||
#include <linux/compiler_types.h>
|
||||
#include <linux/export.h>
|
||||
#include <linux/jump_label.h>
|
||||
#include <linux/stringify.h>
|
||||
#include <asm/alternative.h>
|
||||
#include <asm/atomic_lse.h>
|
||||
#include <asm/cpucaps.h>
|
||||
|
||||
__asm__(".arch_extension lse");
|
||||
|
||||
extern struct static_key_false cpu_hwcap_keys[ARM64_NCAPS];
|
||||
extern struct static_key_false arm64_const_caps_ready;
|
||||
|
||||
static inline bool system_uses_lse_atomics(void)
|
||||
{
|
||||
return (static_branch_likely(&arm64_const_caps_ready)) &&
|
||||
static_branch_likely(&cpu_hwcap_keys[ARM64_HAS_LSE_ATOMICS]);
|
||||
}
|
||||
|
||||
#define __lse_ll_sc_body(op, ...) \
|
||||
({ \
|
||||
system_uses_lse_atomics() ? \
|
||||
__lse_##op(__VA_ARGS__) : \
|
||||
__ll_sc_##op(__VA_ARGS__); \
|
||||
})
|
||||
|
||||
/* In-line patching at runtime */
|
||||
#define ARM64_LSE_ATOMIC_INSN(llsc, lse) \
|
||||
ALTERNATIVE(llsc, lse, ARM64_HAS_LSE_ATOMICS)
|
||||
|
||||
#else /* CONFIG_AS_LSE && CONFIG_ARM64_LSE_ATOMICS */
|
||||
|
||||
static inline bool system_uses_lse_atomics(void) { return false; }
|
||||
|
||||
#define __lse_ll_sc_body(op, ...) __ll_sc_##op(__VA_ARGS__)
|
||||
|
||||
#define ARM64_LSE_ATOMIC_INSN(llsc, lse) llsc
|
||||
|
||||
#endif /* CONFIG_AS_LSE && CONFIG_ARM64_LSE_ATOMICS */
|
||||
|
|
Loading…
Reference in New Issue