|
@@ -15,6 +15,8 @@
|
|
#ifndef QEMU_ATOMIC_H
|
|
#ifndef QEMU_ATOMIC_H
|
|
#define QEMU_ATOMIC_H
|
|
#define QEMU_ATOMIC_H
|
|
|
|
|
|
|
|
+#include "compiler.h"
|
|
|
|
+
|
|
/* Compiler barrier */
|
|
/* Compiler barrier */
|
|
#define barrier() ({ asm volatile("" ::: "memory"); (void)0; })
|
|
#define barrier() ({ asm volatile("" ::: "memory"); (void)0; })
|
|
|
|
|
|
@@ -81,7 +83,7 @@
|
|
* no processors except Alpha need a barrier here. Leave it in if
|
|
* no processors except Alpha need a barrier here. Leave it in if
|
|
* using Thread Sanitizer to avoid warnings, otherwise optimize it away.
|
|
* using Thread Sanitizer to avoid warnings, otherwise optimize it away.
|
|
*/
|
|
*/
|
|
-#if defined(__SANITIZE_THREAD__)
|
|
|
|
|
|
+#ifdef QEMU_SANITIZE_THREAD
|
|
#define smp_read_barrier_depends() ({ barrier(); __atomic_thread_fence(__ATOMIC_CONSUME); })
|
|
#define smp_read_barrier_depends() ({ barrier(); __atomic_thread_fence(__ATOMIC_CONSUME); })
|
|
#elif defined(__alpha__)
|
|
#elif defined(__alpha__)
|
|
#define smp_read_barrier_depends() asm volatile("mb":::"memory")
|
|
#define smp_read_barrier_depends() asm volatile("mb":::"memory")
|
|
@@ -146,7 +148,7 @@
|
|
/* See above: most compilers currently treat consume and acquire the
|
|
/* See above: most compilers currently treat consume and acquire the
|
|
* same, but this slows down qatomic_rcu_read unnecessarily.
|
|
* same, but this slows down qatomic_rcu_read unnecessarily.
|
|
*/
|
|
*/
|
|
-#ifdef __SANITIZE_THREAD__
|
|
|
|
|
|
+#ifdef QEMU_SANITIZE_THREAD
|
|
#define qatomic_rcu_read__nocheck(ptr, valptr) \
|
|
#define qatomic_rcu_read__nocheck(ptr, valptr) \
|
|
__atomic_load(ptr, valptr, __ATOMIC_CONSUME);
|
|
__atomic_load(ptr, valptr, __ATOMIC_CONSUME);
|
|
#else
|
|
#else
|
|
@@ -254,7 +256,7 @@
|
|
#define qatomic_mb_read(ptr) \
|
|
#define qatomic_mb_read(ptr) \
|
|
qatomic_load_acquire(ptr)
|
|
qatomic_load_acquire(ptr)
|
|
|
|
|
|
-#if !defined(__SANITIZE_THREAD__) && \
|
|
|
|
|
|
+#if !defined(QEMU_SANITIZE_THREAD) && \
|
|
(defined(__i386__) || defined(__x86_64__) || defined(__s390x__))
|
|
(defined(__i386__) || defined(__x86_64__) || defined(__s390x__))
|
|
/* This is more efficient than a store plus a fence. */
|
|
/* This is more efficient than a store plus a fence. */
|
|
# define qatomic_mb_set(ptr, i) ((void)qatomic_xchg(ptr, i))
|
|
# define qatomic_mb_set(ptr, i) ((void)qatomic_xchg(ptr, i))
|