| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 1 | #ifndef _LINUX_CPUSET_H | 
 | 2 | #define _LINUX_CPUSET_H | 
 | 3 | /* | 
 | 4 |  *  cpuset interface | 
 | 5 |  * | 
 | 6 |  *  Copyright (C) 2003 BULL SA | 
| Paul Jackson | 825a46a | 2006-03-24 03:16:03 -0800 | [diff] [blame] | 7 |  *  Copyright (C) 2004-2006 Silicon Graphics, Inc. | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 8 |  * | 
 | 9 |  */ | 
 | 10 |  | 
 | 11 | #include <linux/sched.h> | 
 | 12 | #include <linux/cpumask.h> | 
 | 13 | #include <linux/nodemask.h> | 
| Paul Menage | 8793d85 | 2007-10-18 23:39:39 -0700 | [diff] [blame] | 14 | #include <linux/cgroup.h> | 
| David Rientjes | a1bc5a4 | 2009-04-02 16:57:54 -0700 | [diff] [blame] | 15 | #include <linux/mm.h> | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 16 |  | 
 | 17 | #ifdef CONFIG_CPUSETS | 
 | 18 |  | 
| Paul Jackson | 202f72d | 2006-01-08 01:01:57 -0800 | [diff] [blame] | 19 | extern int number_of_cpusets;	/* How many cpusets are defined in system? */ | 
 | 20 |  | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 21 | extern int cpuset_init(void); | 
 | 22 | extern void cpuset_init_smp(void); | 
| Li Zefan | 6af866a | 2009-01-07 18:08:45 -0800 | [diff] [blame] | 23 | extern void cpuset_cpus_allowed(struct task_struct *p, struct cpumask *mask); | 
 | 24 | extern void cpuset_cpus_allowed_locked(struct task_struct *p, | 
 | 25 | 				       struct cpumask *mask); | 
| Paul Jackson | 909d75a | 2006-01-08 01:01:55 -0800 | [diff] [blame] | 26 | extern nodemask_t cpuset_mems_allowed(struct task_struct *p); | 
| Paul Jackson | 9276b1bc | 2006-12-06 20:31:48 -0800 | [diff] [blame] | 27 | #define cpuset_current_mems_allowed (current->mems_allowed) | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 28 | void cpuset_init_current_mems_allowed(void); | 
| Mel Gorman | 19770b3 | 2008-04-28 02:12:18 -0700 | [diff] [blame] | 29 | int cpuset_nodemask_valid_mems_allowed(nodemask_t *nodemask); | 
| Paul Jackson | 202f72d | 2006-01-08 01:01:57 -0800 | [diff] [blame] | 30 |  | 
| David Rientjes | a1bc5a4 | 2009-04-02 16:57:54 -0700 | [diff] [blame] | 31 | extern int __cpuset_node_allowed_softwall(int node, gfp_t gfp_mask); | 
 | 32 | extern int __cpuset_node_allowed_hardwall(int node, gfp_t gfp_mask); | 
| Paul Jackson | 02a0e53 | 2006-12-13 00:34:25 -0800 | [diff] [blame] | 33 |  | 
| David Rientjes | a1bc5a4 | 2009-04-02 16:57:54 -0700 | [diff] [blame] | 34 | static inline int cpuset_node_allowed_softwall(int node, gfp_t gfp_mask) | 
| Paul Jackson | 202f72d | 2006-01-08 01:01:57 -0800 | [diff] [blame] | 35 | { | 
| Paul Jackson | 02a0e53 | 2006-12-13 00:34:25 -0800 | [diff] [blame] | 36 | 	return number_of_cpusets <= 1 || | 
| David Rientjes | a1bc5a4 | 2009-04-02 16:57:54 -0700 | [diff] [blame] | 37 | 		__cpuset_node_allowed_softwall(node, gfp_mask); | 
| Paul Jackson | 02a0e53 | 2006-12-13 00:34:25 -0800 | [diff] [blame] | 38 | } | 
 | 39 |  | 
| David Rientjes | a1bc5a4 | 2009-04-02 16:57:54 -0700 | [diff] [blame] | 40 | static inline int cpuset_node_allowed_hardwall(int node, gfp_t gfp_mask) | 
| Paul Jackson | 02a0e53 | 2006-12-13 00:34:25 -0800 | [diff] [blame] | 41 | { | 
 | 42 | 	return number_of_cpusets <= 1 || | 
| David Rientjes | a1bc5a4 | 2009-04-02 16:57:54 -0700 | [diff] [blame] | 43 | 		__cpuset_node_allowed_hardwall(node, gfp_mask); | 
 | 44 | } | 
 | 45 |  | 
 | 46 | static inline int cpuset_zone_allowed_softwall(struct zone *z, gfp_t gfp_mask) | 
 | 47 | { | 
 | 48 | 	return cpuset_node_allowed_softwall(zone_to_nid(z), gfp_mask); | 
 | 49 | } | 
 | 50 |  | 
 | 51 | static inline int cpuset_zone_allowed_hardwall(struct zone *z, gfp_t gfp_mask) | 
 | 52 | { | 
 | 53 | 	return cpuset_node_allowed_hardwall(zone_to_nid(z), gfp_mask); | 
| Paul Jackson | 202f72d | 2006-01-08 01:01:57 -0800 | [diff] [blame] | 54 | } | 
 | 55 |  | 
| David Rientjes | bbe373f | 2007-10-16 23:25:58 -0700 | [diff] [blame] | 56 | extern int cpuset_mems_allowed_intersects(const struct task_struct *tsk1, | 
 | 57 | 					  const struct task_struct *tsk2); | 
| Paul Jackson | 3e0d98b | 2006-01-08 01:01:49 -0800 | [diff] [blame] | 58 |  | 
 | 59 | #define cpuset_memory_pressure_bump() 				\ | 
 | 60 | 	do {							\ | 
 | 61 | 		if (cpuset_memory_pressure_enabled)		\ | 
 | 62 | 			__cpuset_memory_pressure_bump();	\ | 
 | 63 | 	} while (0) | 
 | 64 | extern int cpuset_memory_pressure_enabled; | 
 | 65 | extern void __cpuset_memory_pressure_bump(void); | 
 | 66 |  | 
| Arjan van de Ven | 5404732 | 2007-02-12 00:55:28 -0800 | [diff] [blame] | 67 | extern const struct file_operations proc_cpuset_operations; | 
| Eric W. Biederman | df5f831 | 2008-02-08 04:18:33 -0800 | [diff] [blame] | 68 | struct seq_file; | 
 | 69 | extern void cpuset_task_status_allowed(struct seq_file *m, | 
 | 70 | 					struct task_struct *task); | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 71 |  | 
| Paul Jackson | 505970b | 2006-01-14 13:21:06 -0800 | [diff] [blame] | 72 | extern void cpuset_lock(void); | 
 | 73 | extern void cpuset_unlock(void); | 
 | 74 |  | 
| Paul Jackson | 825a46a | 2006-03-24 03:16:03 -0800 | [diff] [blame] | 75 | extern int cpuset_mem_spread_node(void); | 
 | 76 |  | 
 | 77 | static inline int cpuset_do_page_mem_spread(void) | 
 | 78 | { | 
 | 79 | 	return current->flags & PF_SPREAD_PAGE; | 
 | 80 | } | 
 | 81 |  | 
 | 82 | static inline int cpuset_do_slab_mem_spread(void) | 
 | 83 | { | 
 | 84 | 	return current->flags & PF_SPREAD_SLAB; | 
 | 85 | } | 
 | 86 |  | 
| Paul Menage | 8793d85 | 2007-10-18 23:39:39 -0700 | [diff] [blame] | 87 | extern int current_cpuset_is_being_rebound(void); | 
 | 88 |  | 
| Max Krasnyansky | e761b77 | 2008-07-15 04:43:49 -0700 | [diff] [blame] | 89 | extern void rebuild_sched_domains(void); | 
 | 90 |  | 
| David Rientjes | 75aa199 | 2009-01-06 14:39:01 -0800 | [diff] [blame] | 91 | extern void cpuset_print_task_mems_allowed(struct task_struct *p); | 
 | 92 |  | 
| Miao Xie | 58568d2 | 2009-06-16 15:31:49 -0700 | [diff] [blame] | 93 | static inline void set_mems_allowed(nodemask_t nodemask) | 
 | 94 | { | 
 | 95 | 	current->mems_allowed = nodemask; | 
 | 96 | } | 
 | 97 |  | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 98 | #else /* !CONFIG_CPUSETS */ | 
 | 99 |  | 
 | 100 | static inline int cpuset_init(void) { return 0; } | 
 | 101 | static inline void cpuset_init_smp(void) {} | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 102 |  | 
| Li Zefan | 6af866a | 2009-01-07 18:08:45 -0800 | [diff] [blame] | 103 | static inline void cpuset_cpus_allowed(struct task_struct *p, | 
 | 104 | 				       struct cpumask *mask) | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 105 | { | 
| Rusty Russell | aa85ea5 | 2009-03-30 22:05:15 -0600 | [diff] [blame] | 106 | 	cpumask_copy(mask, cpu_possible_mask); | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 107 | } | 
| Mike Travis | f9a86fc | 2008-04-04 18:11:07 -0700 | [diff] [blame] | 108 | static inline void cpuset_cpus_allowed_locked(struct task_struct *p, | 
| Li Zefan | 6af866a | 2009-01-07 18:08:45 -0800 | [diff] [blame] | 109 | 					      struct cpumask *mask) | 
| Cliff Wickman | 470fd64 | 2007-10-18 23:40:46 -0700 | [diff] [blame] | 110 | { | 
| Rusty Russell | aa85ea5 | 2009-03-30 22:05:15 -0600 | [diff] [blame] | 111 | 	cpumask_copy(mask, cpu_possible_mask); | 
| Cliff Wickman | 470fd64 | 2007-10-18 23:40:46 -0700 | [diff] [blame] | 112 | } | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 113 |  | 
| Paul Jackson | 909d75a | 2006-01-08 01:01:55 -0800 | [diff] [blame] | 114 | static inline nodemask_t cpuset_mems_allowed(struct task_struct *p) | 
 | 115 | { | 
 | 116 | 	return node_possible_map; | 
 | 117 | } | 
 | 118 |  | 
| Christoph Lameter | 0e1e7c7 | 2007-10-16 01:25:38 -0700 | [diff] [blame] | 119 | #define cpuset_current_mems_allowed (node_states[N_HIGH_MEMORY]) | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 120 | static inline void cpuset_init_current_mems_allowed(void) {} | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 121 |  | 
| Mel Gorman | 19770b3 | 2008-04-28 02:12:18 -0700 | [diff] [blame] | 122 | static inline int cpuset_nodemask_valid_mems_allowed(nodemask_t *nodemask) | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 123 | { | 
 | 124 | 	return 1; | 
 | 125 | } | 
 | 126 |  | 
| David Rientjes | a1bc5a4 | 2009-04-02 16:57:54 -0700 | [diff] [blame] | 127 | static inline int cpuset_node_allowed_softwall(int node, gfp_t gfp_mask) | 
 | 128 | { | 
 | 129 | 	return 1; | 
 | 130 | } | 
 | 131 |  | 
 | 132 | static inline int cpuset_node_allowed_hardwall(int node, gfp_t gfp_mask) | 
 | 133 | { | 
 | 134 | 	return 1; | 
 | 135 | } | 
 | 136 |  | 
| Paul Jackson | 02a0e53 | 2006-12-13 00:34:25 -0800 | [diff] [blame] | 137 | static inline int cpuset_zone_allowed_softwall(struct zone *z, gfp_t gfp_mask) | 
 | 138 | { | 
 | 139 | 	return 1; | 
 | 140 | } | 
 | 141 |  | 
 | 142 | static inline int cpuset_zone_allowed_hardwall(struct zone *z, gfp_t gfp_mask) | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 143 | { | 
 | 144 | 	return 1; | 
 | 145 | } | 
 | 146 |  | 
| David Rientjes | bbe373f | 2007-10-16 23:25:58 -0700 | [diff] [blame] | 147 | static inline int cpuset_mems_allowed_intersects(const struct task_struct *tsk1, | 
 | 148 | 						 const struct task_struct *tsk2) | 
| Paul Jackson | ef08e3b | 2005-09-06 15:18:13 -0700 | [diff] [blame] | 149 | { | 
 | 150 | 	return 1; | 
 | 151 | } | 
 | 152 |  | 
| Paul Jackson | 3e0d98b | 2006-01-08 01:01:49 -0800 | [diff] [blame] | 153 | static inline void cpuset_memory_pressure_bump(void) {} | 
 | 154 |  | 
| Eric W. Biederman | df5f831 | 2008-02-08 04:18:33 -0800 | [diff] [blame] | 155 | static inline void cpuset_task_status_allowed(struct seq_file *m, | 
 | 156 | 						struct task_struct *task) | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 157 | { | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 158 | } | 
 | 159 |  | 
| Paul Jackson | 505970b | 2006-01-14 13:21:06 -0800 | [diff] [blame] | 160 | static inline void cpuset_lock(void) {} | 
 | 161 | static inline void cpuset_unlock(void) {} | 
 | 162 |  | 
| Paul Jackson | 825a46a | 2006-03-24 03:16:03 -0800 | [diff] [blame] | 163 | static inline int cpuset_mem_spread_node(void) | 
 | 164 | { | 
 | 165 | 	return 0; | 
 | 166 | } | 
 | 167 |  | 
 | 168 | static inline int cpuset_do_page_mem_spread(void) | 
 | 169 | { | 
 | 170 | 	return 0; | 
 | 171 | } | 
 | 172 |  | 
 | 173 | static inline int cpuset_do_slab_mem_spread(void) | 
 | 174 | { | 
 | 175 | 	return 0; | 
 | 176 | } | 
 | 177 |  | 
| Paul Menage | 8793d85 | 2007-10-18 23:39:39 -0700 | [diff] [blame] | 178 | static inline int current_cpuset_is_being_rebound(void) | 
 | 179 | { | 
 | 180 | 	return 0; | 
 | 181 | } | 
 | 182 |  | 
| Max Krasnyansky | e761b77 | 2008-07-15 04:43:49 -0700 | [diff] [blame] | 183 | static inline void rebuild_sched_domains(void) | 
 | 184 | { | 
| Max Krasnyansky | dfb512e | 2008-08-29 13:11:41 -0700 | [diff] [blame] | 185 | 	partition_sched_domains(1, NULL, NULL); | 
| Max Krasnyansky | e761b77 | 2008-07-15 04:43:49 -0700 | [diff] [blame] | 186 | } | 
 | 187 |  | 
| David Rientjes | 75aa199 | 2009-01-06 14:39:01 -0800 | [diff] [blame] | 188 | static inline void cpuset_print_task_mems_allowed(struct task_struct *p) | 
 | 189 | { | 
 | 190 | } | 
 | 191 |  | 
| Miao Xie | 58568d2 | 2009-06-16 15:31:49 -0700 | [diff] [blame] | 192 | static inline void set_mems_allowed(nodemask_t nodemask) | 
 | 193 | { | 
 | 194 | } | 
 | 195 |  | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 196 | #endif /* !CONFIG_CPUSETS */ | 
 | 197 |  | 
 | 198 | #endif /* _LINUX_CPUSET_H */ |