 177d9e0da0
			
		
	
	
		177d9e0da0
		
	
	
	
	
		
			
			We commonly define the header guard symbol without an explicit value. Normalize the exceptions. Done with scripts/clean-header-guards.pl. Signed-off-by: Markus Armbruster <armbru@redhat.com> Message-Id: <20190315145123.28030-8-armbru@redhat.com> Reviewed-by: Philippe Mathieu-Daudé <philmd@redhat.com>
		
			
				
	
	
		
			28 lines
		
	
	
		
			737 B
		
	
	
	
		
			C
		
	
	
	
	
	
			
		
		
	
	
			28 lines
		
	
	
		
			737 B
		
	
	
	
		
			C
		
	
	
	
	
	
| /*
 | |
|  * Process-global memory barriers
 | |
|  *
 | |
|  * Copyright (c) 2018 Red Hat, Inc.
 | |
|  *
 | |
|  * Author: Paolo Bonzini <pbonzini@redhat.com>
 | |
|  */
 | |
| 
 | |
| #ifndef QEMU_SYS_MEMBARRIER_H
 | |
| #define QEMU_SYS_MEMBARRIER_H
 | |
| 
 | |
| #ifdef CONFIG_MEMBARRIER
 | |
| /* Only block reordering at the compiler level in the performance-critical
 | |
|  * side.  The slow side forces processor-level ordering on all other cores
 | |
|  * through a system call.
 | |
|  */
 | |
| extern void smp_mb_global_init(void);
 | |
| extern void smp_mb_global(void);
 | |
| #define smp_mb_placeholder()       barrier()
 | |
| #else
 | |
| /* Keep it simple, execute a real memory barrier on both sides.  */
 | |
| static inline void smp_mb_global_init(void) {}
 | |
| #define smp_mb_global()            smp_mb()
 | |
| #define smp_mb_placeholder()       smp_mb()
 | |
| #endif
 | |
| 
 | |
| #endif
 |