| Martin Schwidefsky | 45e576b | 2008-05-07 09:22:59 +0200 | [diff] [blame] | 1 | /* | 
| Martin Schwidefsky | 45e576b | 2008-05-07 09:22:59 +0200 | [diff] [blame] | 2 |  * Copyright IBM Corp. 2008 | 
 | 3 |  * | 
 | 4 |  * Guest page hinting for unused pages. | 
 | 5 |  * | 
 | 6 |  * Author(s): Martin Schwidefsky <schwidefsky@de.ibm.com> | 
 | 7 |  */ | 
 | 8 |  | 
 | 9 | #include <linux/kernel.h> | 
 | 10 | #include <linux/errno.h> | 
 | 11 | #include <linux/types.h> | 
 | 12 | #include <linux/mm.h> | 
 | 13 | #include <linux/init.h> | 
 | 14 |  | 
 | 15 | #define ESSA_SET_STABLE		1 | 
 | 16 | #define ESSA_SET_UNUSED		2 | 
 | 17 |  | 
| Heiko Carstens | 2ddddf3 | 2009-09-11 10:29:01 +0200 | [diff] [blame] | 18 | static int cmma_flag = 1; | 
| Martin Schwidefsky | 45e576b | 2008-05-07 09:22:59 +0200 | [diff] [blame] | 19 |  | 
 | 20 | static int __init cmma(char *str) | 
 | 21 | { | 
 | 22 | 	char *parm; | 
| Heiko Carstens | 2ddddf3 | 2009-09-11 10:29:01 +0200 | [diff] [blame] | 23 |  | 
| Martin Schwidefsky | 45e576b | 2008-05-07 09:22:59 +0200 | [diff] [blame] | 24 | 	parm = strstrip(str); | 
 | 25 | 	if (strcmp(parm, "yes") == 0 || strcmp(parm, "on") == 0) { | 
 | 26 | 		cmma_flag = 1; | 
 | 27 | 		return 1; | 
 | 28 | 	} | 
 | 29 | 	cmma_flag = 0; | 
 | 30 | 	if (strcmp(parm, "no") == 0 || strcmp(parm, "off") == 0) | 
 | 31 | 		return 1; | 
 | 32 | 	return 0; | 
 | 33 | } | 
| Martin Schwidefsky | 45e576b | 2008-05-07 09:22:59 +0200 | [diff] [blame] | 34 | __setup("cmma=", cmma); | 
 | 35 |  | 
 | 36 | void __init cmma_init(void) | 
 | 37 | { | 
 | 38 | 	register unsigned long tmp asm("0") = 0; | 
 | 39 | 	register int rc asm("1") = -EOPNOTSUPP; | 
 | 40 |  | 
 | 41 | 	if (!cmma_flag) | 
 | 42 | 		return; | 
 | 43 | 	asm volatile( | 
 | 44 | 		"       .insn rrf,0xb9ab0000,%1,%1,0,0\n" | 
 | 45 | 		"0:     la      %0,0\n" | 
 | 46 | 		"1:\n" | 
 | 47 | 		EX_TABLE(0b,1b) | 
 | 48 | 		: "+&d" (rc), "+&d" (tmp)); | 
 | 49 | 	if (rc) | 
 | 50 | 		cmma_flag = 0; | 
 | 51 | } | 
 | 52 |  | 
| Heiko Carstens | 846955c | 2009-09-22 22:58:44 +0200 | [diff] [blame] | 53 | static inline void set_page_unstable(struct page *page, int order) | 
| Martin Schwidefsky | 45e576b | 2008-05-07 09:22:59 +0200 | [diff] [blame] | 54 | { | 
 | 55 | 	int i, rc; | 
 | 56 |  | 
| Martin Schwidefsky | 45e576b | 2008-05-07 09:22:59 +0200 | [diff] [blame] | 57 | 	for (i = 0; i < (1 << order); i++) | 
 | 58 | 		asm volatile(".insn rrf,0xb9ab0000,%0,%1,%2,0" | 
 | 59 | 			     : "=&d" (rc) | 
| Heiko Carstens | 846955c | 2009-09-22 22:58:44 +0200 | [diff] [blame] | 60 | 			     : "a" (page_to_phys(page + i)), | 
| Martin Schwidefsky | 45e576b | 2008-05-07 09:22:59 +0200 | [diff] [blame] | 61 | 			       "i" (ESSA_SET_UNUSED)); | 
 | 62 | } | 
 | 63 |  | 
| Heiko Carstens | 846955c | 2009-09-22 22:58:44 +0200 | [diff] [blame] | 64 | void arch_free_page(struct page *page, int order) | 
 | 65 | { | 
 | 66 | 	if (!cmma_flag) | 
 | 67 | 		return; | 
 | 68 | 	set_page_unstable(page, order); | 
 | 69 | } | 
 | 70 |  | 
 | 71 | static inline void set_page_stable(struct page *page, int order) | 
 | 72 | { | 
 | 73 | 	int i, rc; | 
 | 74 |  | 
 | 75 | 	for (i = 0; i < (1 << order); i++) | 
 | 76 | 		asm volatile(".insn rrf,0xb9ab0000,%0,%1,%2,0" | 
 | 77 | 			     : "=&d" (rc) | 
 | 78 | 			     : "a" (page_to_phys(page + i)), | 
 | 79 | 			       "i" (ESSA_SET_STABLE)); | 
 | 80 | } | 
 | 81 |  | 
| Martin Schwidefsky | 45e576b | 2008-05-07 09:22:59 +0200 | [diff] [blame] | 82 | void arch_alloc_page(struct page *page, int order) | 
 | 83 | { | 
| Heiko Carstens | 846955c | 2009-09-22 22:58:44 +0200 | [diff] [blame] | 84 | 	if (!cmma_flag) | 
 | 85 | 		return; | 
 | 86 | 	set_page_stable(page, order); | 
 | 87 | } | 
 | 88 |  | 
 | 89 | void arch_set_page_states(int make_stable) | 
 | 90 | { | 
 | 91 | 	unsigned long flags, order, t; | 
 | 92 | 	struct list_head *l; | 
 | 93 | 	struct page *page; | 
 | 94 | 	struct zone *zone; | 
| Martin Schwidefsky | 45e576b | 2008-05-07 09:22:59 +0200 | [diff] [blame] | 95 |  | 
 | 96 | 	if (!cmma_flag) | 
 | 97 | 		return; | 
| Heiko Carstens | 846955c | 2009-09-22 22:58:44 +0200 | [diff] [blame] | 98 | 	if (make_stable) | 
 | 99 | 		drain_local_pages(NULL); | 
 | 100 | 	for_each_populated_zone(zone) { | 
 | 101 | 		spin_lock_irqsave(&zone->lock, flags); | 
 | 102 | 		for_each_migratetype_order(order, t) { | 
 | 103 | 			list_for_each(l, &zone->free_area[order].free_list[t]) { | 
 | 104 | 				page = list_entry(l, struct page, lru); | 
 | 105 | 				if (make_stable) | 
 | 106 | 					set_page_stable(page, order); | 
 | 107 | 				else | 
 | 108 | 					set_page_unstable(page, order); | 
 | 109 | 			} | 
 | 110 | 		} | 
 | 111 | 		spin_unlock_irqrestore(&zone->lock, flags); | 
 | 112 | 	} | 
| Martin Schwidefsky | 45e576b | 2008-05-07 09:22:59 +0200 | [diff] [blame] | 113 | } |