blob: 6993b20d64fb6c1e263c532a39868c91ee0753a5 [file] [log] [blame]
David Howellsa0616cd2012-03-28 18:30:02 +01001/*
2 * Copyright IBM Corp. 1999, 2009
3 *
4 * Author(s): Martin Schwidefsky <schwidefsky@de.ibm.com>
5 */
6
7#ifndef __ASM_BARRIER_H
8#define __ASM_BARRIER_H
9
10/*
11 * Force strict CPU ordering.
12 * And yes, this is required on UP too when we're talking
13 * to devices.
David Howellsa0616cd2012-03-28 18:30:02 +010014 */
15
Heiko Carstensc6f48b02012-05-14 12:40:43 +020016static inline void mb(void)
17{
18 asm volatile("bcr 15,0" : : : "memory");
19}
David Howellsa0616cd2012-03-28 18:30:02 +010020
Heiko Carstensc6f48b02012-05-14 12:40:43 +020021#define rmb() mb()
22#define wmb() mb()
23#define read_barrier_depends() do { } while(0)
24#define smp_mb() mb()
25#define smp_rmb() rmb()
26#define smp_wmb() wmb()
27#define smp_read_barrier_depends() read_barrier_depends()
28#define smp_mb__before_clear_bit() smp_mb()
29#define smp_mb__after_clear_bit() smp_mb()
30
31#define set_mb(var, value) do { var = value; mb(); } while (0)
David Howellsa0616cd2012-03-28 18:30:02 +010032
33#endif /* __ASM_BARRIER_H */