[PATCH 1/2] [ARM] arm64: Use dsb instead of dmb for arch_mb(), arch_rmb(), and arch_wmb()

642 views
Skip to first unread message

Anup Patel

unread,
Apr 4, 2017, 11:19:12 AM4/4/17
to xvisor...@googlegroups.com, Anup Patel, Ashutosh Sharma
Just like arm32ve, we should use dsb instruction instead of
dmb instruction for arch_mb(), arch_rmb(), and arch_wmb().

Signed-off-by: Anup Patel <an...@brainfault.org>
Signed-off-by: Ashutosh Sharma <ashutosh.s...@gmail.com>
---
arch/arm/cpu/arm64/cpu_vcpu_helper.c | 2 +-
arch/arm/cpu/arm64/include/arch_barrier.h | 20 ++++++++++----------
arch/arm/cpu/arm64/include/arch_gicv3.h | 4 ++--
arch/arm/cpu/arm64/include/cpu_mmu_lpae.h | 2 +-
4 files changed, 14 insertions(+), 14 deletions(-)

diff --git a/arch/arm/cpu/arm64/cpu_vcpu_helper.c b/arch/arm/cpu/arm64/cpu_vcpu_helper.c
index d3f26ac..8e3b22b 100644
--- a/arch/arm/cpu/arm64/cpu_vcpu_helper.c
+++ b/arch/arm/cpu/arm64/cpu_vcpu_helper.c
@@ -770,7 +770,7 @@ void arch_vcpu_switch(struct vmm_vcpu *tvcpu,
*/
inv_tlb_guest_allis();
/* Ensure changes are visible */
- dsb();
+ dsb(sy);
isb();
}
}
diff --git a/arch/arm/cpu/arm64/include/arch_barrier.h b/arch/arm/cpu/arm64/include/arch_barrier.h
index 40e95e4..d108101 100644
--- a/arch/arm/cpu/arm64/include/arch_barrier.h
+++ b/arch/arm/cpu/arm64/include/arch_barrier.h
@@ -18,32 +18,32 @@
*
* @file arch_barrier.h
* @author Sukanto Ghosh (sukant...@gmail.com)
- * @brief architecure specific memory barriers
+ * @brief architecure specific memory barriers
*/
#ifndef __ARCH_BARRIER_H__
#define __ARCH_BARRIER_H__

-#define isb() asm volatile ("isb" : : : "memory")
-#define dsb() asm volatile ("dsb sy" : : : "memory")
-#define dmb() asm volatile ("dmb sy" : : : "memory")
+#define isb(opt) asm volatile("isb " #opt : : : "memory")
+#define dmb(opt) asm volatile("dmb " #opt : : : "memory")
+#define dsb(opt) asm volatile("dsb " #opt : : : "memory")

/* Read & Write Memory barrier */
-#define arch_mb() dmb()
+#define arch_mb() dsb(sy)

/* Read Memory barrier */
-#define arch_rmb() asm volatile ("dmb ld" : : : "memory")
+#define arch_rmb() dsb(ld)

/* Write Memory barrier */
-#define arch_wmb() asm volatile ("dmb st" : : : "memory")
+#define arch_wmb() dsb(st)

/* SMP Read & Write Memory barrier */
-#define arch_smp_mb() asm volatile ("dmb ish" : : : "memory")
+#define arch_smp_mb() dmb(ish)

/* SMP Read Memory barrier */
-#define arch_smp_rmb() asm volatile ("dmb ishld" : : : "memory")
+#define arch_smp_rmb() dmb(ishld)

/* SMP Write Memory barrier */
-#define arch_smp_wmb() asm volatile ("dmb ishst" : : : "memory")
+#define arch_smp_wmb() dmb(ishst)

/* CPU relax for busy loop */
#define arch_cpu_relax() asm volatile ("" : : : "memory")
diff --git a/arch/arm/cpu/arm64/include/arch_gicv3.h b/arch/arm/cpu/arm64/include/arch_gicv3.h
index 20561f8..c3102fb 100644
--- a/arch/arm/cpu/arm64/include/arch_gicv3.h
+++ b/arch/arm/cpu/arm64/include/arch_gicv3.h
@@ -117,7 +117,7 @@ static inline u64 arch_gic_read_iar(void)

asm volatile("mrs_s %0, " stringify(ICC_IAR1_EL1)
: "=r" (irqstat));
- dsb();
+ dsb(sy);
return irqstat;
}

@@ -138,7 +138,7 @@ static inline u64 arch_gic_read_iar_cavium_thunderx(void)
"mrs_s %0, " stringify(ICC_IAR1_EL1) "\n\t"
"nop;nop;nop;nop"
: "=r" (irqstat));
- dmb();
+ dmb(sy);

return irqstat;
}
diff --git a/arch/arm/cpu/arm64/include/cpu_mmu_lpae.h b/arch/arm/cpu/arm64/include/cpu_mmu_lpae.h
index a5a67c3..c032682 100644
--- a/arch/arm/cpu/arm64/include/cpu_mmu_lpae.h
+++ b/arch/arm/cpu/arm64/include/cpu_mmu_lpae.h
@@ -47,7 +47,7 @@

static inline void cpu_mmu_sync_tte(u64 *tte)
{
- dsb();
+ dsb(sy);
isb();
}

--
2.7.4

Anup Patel

unread,
Apr 4, 2017, 11:19:16 AM4/4/17
to xvisor...@googlegroups.com, Anup Patel
This patch makes isb(), dmb(), and dsb() more generic like
arm64 port.

Signed-off-by: Anup Patel <an...@brainfault.org>
---
arch/arm/cpu/arm32ve/include/arch_barrier.h | 14 +++++++-------
1 file changed, 7 insertions(+), 7 deletions(-)

diff --git a/arch/arm/cpu/arm32ve/include/arch_barrier.h b/arch/arm/cpu/arm32ve/include/arch_barrier.h
index a3cfadb..b3627fe 100644
--- a/arch/arm/cpu/arm32ve/include/arch_barrier.h
+++ b/arch/arm/cpu/arm32ve/include/arch_barrier.h
@@ -23,9 +23,9 @@
#ifndef __ARCH_BARRIER_H__
#define __ARCH_BARRIER_H__

-#define isb() asm volatile ("isb" : : : "memory")
-#define dsb() asm volatile ("dsb" : : : "memory")
-#define dmb() asm volatile ("dmb" : : : "memory")
+#define isb(opt) asm volatile("isb " #opt : : : "memory")
+#define dmb(opt) asm volatile("dmb " #opt : : : "memory")
+#define dsb(opt) asm volatile("dsb " #opt : : : "memory")

#define wfi() asm volatile ("wfi" : : : "memory")
#define wfe() asm volatile ("wfe" : : : "memory")
@@ -38,16 +38,16 @@
#define arch_rmb() dsb()

/* Write Memory barrier */
-#define arch_wmb() dsb()
+#define arch_wmb() dsb(st)

/* SMP Read & Write Memory barrier */
-#define arch_smp_mb() dmb()
+#define arch_smp_mb() dmb(ish)

/* SMP Read Memory barrier */
-#define arch_smp_rmb() dmb()
+#define arch_smp_rmb() arch_smp_mb()

/* SMP Write Memory barrier */
-#define arch_smp_wmb() dmb()
+#define arch_smp_wmb() dmb(ishst)

/* CPU relax for busy loop */
#define arch_cpu_relax() asm volatile ("" : : : "memory")
--
2.7.4

Anup Patel

unread,
Apr 5, 2017, 10:52:40 AM4/5/17
to Xvisor Devel, Anup Patel, Ashutosh Sharma
Applied this patch on xvisor-next tree.

Regards,
Anup

Anup Patel

unread,
Apr 5, 2017, 10:52:59 AM4/5/17
to Xvisor Devel, Anup Patel
Reply all
Reply to author
Forward
0 new messages