--- /dev/null
+/* Portions taken from Linux arch arm */
+#ifndef __ASM_ARM32_SYSTEM_H
+#define __ASM_ARM32_SYSTEM_H
+
+#define sev() __asm__ __volatile__ ("sev" : : : "memory")
+#define wfe() __asm__ __volatile__ ("wfe" : : : "memory")
+#define wfi() __asm__ __volatile__ ("wfi" : : : "memory")
+
+#define isb() __asm__ __volatile__ ("isb" : : : "memory")
+#define dsb() __asm__ __volatile__ ("dsb" : : : "memory")
+#define dmb() __asm__ __volatile__ ("dmb" : : : "memory")
+
+#define mb() dsb()
+#define rmb() dsb()
+#define wmb() mb()
+
+#define smp_mb() mb()
+#define smp_rmb() rmb()
+#define smp_wmb() wmb()
+
+#endif
+/*
+ * Local variables:
+ * mode: C
+ * c-set-style: "BSD"
+ * c-basic-offset: 4
+ * indent-tabs-mode: nil
+ * End:
+ */
--- /dev/null
+/* Portions taken from Linux arch arm64 */
+#ifndef __ASM_ARM64_SYSTEM_H
+#define __ASM_ARM64_SYSTEM_H
+
+#define sev() asm volatile("sev" : : : "memory")
+#define wfe() asm volatile("wfe" : : : "memory")
+#define wfi() asm volatile("wfi" : : : "memory")
+
+#define isb() asm volatile("isb" : : : "memory")
+#define dsb() asm volatile("dsb sy" : : : "memory")
+#define dmb() asm volatile("dmb sy" : : : "memory")
+
+#define mb() dsb()
+#define rmb() dsb()
+#define wmb() mb()
+
+#define smp_mb() mb()
+#define smp_rmb() rmb()
+#define smp_wmb() wmb()
+
+#endif
+/*
+ * Local variables:
+ * mode: C
+ * c-set-style: "BSD"
+ * c-basic-offset: 4
+ * indent-tabs-mode: nil
+ * End:
+ */
#define xchg(ptr,x) \
((__typeof__(*(ptr)))__xchg((unsigned long)(x),(ptr),sizeof(*(ptr))))
-#define isb() __asm__ __volatile__ ("isb" : : : "memory")
-#define dsb() __asm__ __volatile__ ("dsb" : : : "memory")
-#define dmb() __asm__ __volatile__ ("dmb" : : : "memory")
-
-#define mb() dsb()
-#define rmb() dsb()
-#define wmb() mb()
-
-#define smp_mb() dmb()
-#define smp_rmb() dmb()
-#define smp_wmb() dmb()
-
/*
* This is used to ensure the compiler did actually allocate the register we
* asked it for some inline assembly sequences. Apparently we can't trust
*/
#define __asmeq(x, y) ".ifnc " x "," y " ; .err ; .endif\n\t"
+#if defined(CONFIG_ARM_32)
+# include <asm/arm32/system.h>
+#elif defined(CONFIG_ARM_64)
+# include <asm/arm64/system.h>
+#else
+# error "unknown ARM variant"
+#endif
+
extern void __bad_xchg(volatile void *, int);
static inline unsigned long __xchg(unsigned long x, volatile void *ptr, int size)