[Date Prev][Date Next][Thread Prev][Thread Next][Date Index][Thread Index] [Xen-devel] [PATCH v3 14/46] xen: arm64: barriers and wait for interrupts/events
Signed-off-by: Ian Campbell <ian.campbell@xxxxxxxxxx> Acked-by: Tim Deegan <tim@xxxxxxx> --- v3: - smp barriers are the same as up (which are conservative) - add dmb --- xen/include/asm-arm/arm32/system.h | 29 +++++++++++++++++++++++++++++ xen/include/asm-arm/arm64/system.h | 29 +++++++++++++++++++++++++++++ xen/include/asm-arm/system.h | 20 ++++++++------------ 3 files changed, 66 insertions(+), 12 deletions(-) create mode 100644 xen/include/asm-arm/arm32/system.h create mode 100644 xen/include/asm-arm/arm64/system.h diff --git a/xen/include/asm-arm/arm32/system.h b/xen/include/asm-arm/arm32/system.h new file mode 100644 index 0000000..1380084 --- /dev/null +++ b/xen/include/asm-arm/arm32/system.h @@ -0,0 +1,29 @@ +/* Portions taken from Linux arch arm */ +#ifndef __ASM_ARM32_SYSTEM_H +#define __ASM_ARM32_SYSTEM_H + +#define sev() __asm__ __volatile__ ("sev" : : : "memory") +#define wfe() __asm__ __volatile__ ("wfe" : : : "memory") +#define wfi() __asm__ __volatile__ ("wfi" : : : "memory") + +#define isb() __asm__ __volatile__ ("isb" : : : "memory") +#define dsb() __asm__ __volatile__ ("dsb" : : : "memory") +#define dmb() __asm__ __volatile__ ("dmb" : : : "memory") + +#define mb() dsb() +#define rmb() dsb() +#define wmb() mb() + +#define smp_mb() mb() +#define smp_rmb() rmb() +#define smp_wmb() wmb() + +#endif +/* + * Local variables: + * mode: C + * c-set-style: "BSD" + * c-basic-offset: 4 + * indent-tabs-mode: nil + * End: + */ diff --git a/xen/include/asm-arm/arm64/system.h b/xen/include/asm-arm/arm64/system.h new file mode 100644 index 0000000..53c6f96 --- /dev/null +++ b/xen/include/asm-arm/arm64/system.h @@ -0,0 +1,29 @@ +/* Portions taken from Linux arch arm64 */ +#ifndef __ASM_ARM64_SYSTEM_H +#define __ASM_ARM64_SYSTEM_H + +#define sev() asm volatile("sev" : : : "memory") +#define wfe() asm volatile("wfe" : : : "memory") +#define wfi() asm volatile("wfi" : : : "memory") + +#define isb() asm volatile("isb" : : : "memory") +#define dsb() asm volatile("dsb sy" : : : "memory") +#define dmb() asm volatile("dmb sy" : : : "memory") + +#define mb() dsb() +#define rmb() dsb() +#define wmb() mb() + +#define smp_mb() mb() +#define smp_rmb() rmb() +#define smp_wmb() wmb() + +#endif +/* + * Local variables: + * mode: C + * c-set-style: "BSD" + * c-basic-offset: 4 + * indent-tabs-mode: nil + * End: + */ diff --git a/xen/include/asm-arm/system.h b/xen/include/asm-arm/system.h index f1e6f5e..2acef02 100644 --- a/xen/include/asm-arm/system.h +++ b/xen/include/asm-arm/system.h @@ -11,18 +11,6 @@ #define xchg(ptr,x) \ ((__typeof__(*(ptr)))__xchg((unsigned long)(x),(ptr),sizeof(*(ptr)))) -#define isb() __asm__ __volatile__ ("isb" : : : "memory") -#define dsb() __asm__ __volatile__ ("dsb" : : : "memory") -#define dmb() __asm__ __volatile__ ("dmb" : : : "memory") - -#define mb() dsb() -#define rmb() dsb() -#define wmb() mb() - -#define smp_mb() dmb() -#define smp_rmb() dmb() -#define smp_wmb() dmb() - /* * This is used to ensure the compiler did actually allocate the register we * asked it for some inline assembly sequences. Apparently we can't trust @@ -33,6 +21,14 @@ */ #define __asmeq(x, y) ".ifnc " x "," y " ; .err ; .endif\n\t" +#if defined(CONFIG_ARM_32) +# include <asm/arm32/system.h> +#elif defined(CONFIG_ARM_64) +# include <asm/arm64/system.h> +#else +# error "unknown ARM variant" +#endif + extern void __bad_xchg(volatile void *, int); static inline unsigned long __xchg(unsigned long x, volatile void *ptr, int size) -- 1.7.2.5 _______________________________________________ Xen-devel mailing list Xen-devel@xxxxxxxxxxxxx http://lists.xen.org/xen-devel
|
Lists.xenproject.org is hosted with RackSpace, monitoring our |