Just like arm32ve, we should use dsb instruction instead of
dmb instruction for arch_mb(), arch_rmb(), and arch_wmb().
Signed-off-by: Anup Patel <
an...@brainfault.org>
Signed-off-by: Ashutosh Sharma <
ashutosh.s...@gmail.com>
---
arch/arm/cpu/arm64/cpu_vcpu_helper.c | 2 +-
arch/arm/cpu/arm64/include/arch_barrier.h | 20 ++++++++++----------
arch/arm/cpu/arm64/include/arch_gicv3.h | 4 ++--
arch/arm/cpu/arm64/include/cpu_mmu_lpae.h | 2 +-
4 files changed, 14 insertions(+), 14 deletions(-)
diff --git a/arch/arm/cpu/arm64/cpu_vcpu_helper.c b/arch/arm/cpu/arm64/cpu_vcpu_helper.c
index d3f26ac..8e3b22b 100644
--- a/arch/arm/cpu/arm64/cpu_vcpu_helper.c
+++ b/arch/arm/cpu/arm64/cpu_vcpu_helper.c
@@ -770,7 +770,7 @@ void arch_vcpu_switch(struct vmm_vcpu *tvcpu,
*/
inv_tlb_guest_allis();
/* Ensure changes are visible */
- dsb();
+ dsb(sy);
isb();
}
}
diff --git a/arch/arm/cpu/arm64/include/arch_barrier.h b/arch/arm/cpu/arm64/include/arch_barrier.h
index 40e95e4..d108101 100644
--- a/arch/arm/cpu/arm64/include/arch_barrier.h
+++ b/arch/arm/cpu/arm64/include/arch_barrier.h
@@ -18,32 +18,32 @@
*
* @file arch_barrier.h
* @author Sukanto Ghosh (
sukant...@gmail.com)
- * @brief architecure specific memory barriers
+ * @brief architecure specific memory barriers
*/
#ifndef __ARCH_BARRIER_H__
#define __ARCH_BARRIER_H__
-#define isb() asm volatile ("isb" : : : "memory")
-#define dsb() asm volatile ("dsb sy" : : : "memory")
-#define dmb() asm volatile ("dmb sy" : : : "memory")
+#define isb(opt) asm volatile("isb " #opt : : : "memory")
+#define dmb(opt) asm volatile("dmb " #opt : : : "memory")
+#define dsb(opt) asm volatile("dsb " #opt : : : "memory")
/* Read & Write Memory barrier */
-#define arch_mb() dmb()
+#define arch_mb() dsb(sy)
/* Read Memory barrier */
-#define arch_rmb() asm volatile ("dmb ld" : : : "memory")
+#define arch_rmb() dsb(ld)
/* Write Memory barrier */
-#define arch_wmb() asm volatile ("dmb st" : : : "memory")
+#define arch_wmb() dsb(st)
/* SMP Read & Write Memory barrier */
-#define arch_smp_mb() asm volatile ("dmb ish" : : : "memory")
+#define arch_smp_mb() dmb(ish)
/* SMP Read Memory barrier */
-#define arch_smp_rmb() asm volatile ("dmb ishld" : : : "memory")
+#define arch_smp_rmb() dmb(ishld)
/* SMP Write Memory barrier */
-#define arch_smp_wmb() asm volatile ("dmb ishst" : : : "memory")
+#define arch_smp_wmb() dmb(ishst)
/* CPU relax for busy loop */
#define arch_cpu_relax() asm volatile ("" : : : "memory")
diff --git a/arch/arm/cpu/arm64/include/arch_gicv3.h b/arch/arm/cpu/arm64/include/arch_gicv3.h
index 20561f8..c3102fb 100644
--- a/arch/arm/cpu/arm64/include/arch_gicv3.h
+++ b/arch/arm/cpu/arm64/include/arch_gicv3.h
@@ -117,7 +117,7 @@ static inline u64 arch_gic_read_iar(void)
asm volatile("mrs_s %0, " stringify(ICC_IAR1_EL1)
: "=r" (irqstat));
- dsb();
+ dsb(sy);
return irqstat;
}
@@ -138,7 +138,7 @@ static inline u64 arch_gic_read_iar_cavium_thunderx(void)
"mrs_s %0, " stringify(ICC_IAR1_EL1) "\n\t"
"nop;nop;nop;nop"
: "=r" (irqstat));
- dmb();
+ dmb(sy);
return irqstat;
}
diff --git a/arch/arm/cpu/arm64/include/cpu_mmu_lpae.h b/arch/arm/cpu/arm64/include/cpu_mmu_lpae.h
index a5a67c3..c032682 100644
--- a/arch/arm/cpu/arm64/include/cpu_mmu_lpae.h
+++ b/arch/arm/cpu/arm64/include/cpu_mmu_lpae.h
@@ -47,7 +47,7 @@
static inline void cpu_mmu_sync_tte(u64 *tte)
{
- dsb();
+ dsb(sy);
isb();
}
--
2.7.4