| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 1 | /* | 
 | 2 |  * include/asm-sh/cpu-sh4/cacheflush.h | 
 | 3 |  * | 
 | 4 |  * Copyright (C) 1999 Niibe Yutaka | 
 | 5 |  * Copyright (C) 2003 Paul Mundt | 
 | 6 |  * | 
 | 7 |  * This file is subject to the terms and conditions of the GNU General Public | 
 | 8 |  * License.  See the file "COPYING" in the main directory of this archive | 
 | 9 |  * for more details. | 
 | 10 |  */ | 
 | 11 | #ifndef __ASM_CPU_SH4_CACHEFLUSH_H | 
 | 12 | #define __ASM_CPU_SH4_CACHEFLUSH_H | 
 | 13 |  | 
 | 14 | /* | 
 | 15 |  *  Caches are broken on SH-4 (unless we use write-through | 
 | 16 |  *  caching; in which case they're only semi-broken), | 
 | 17 |  *  so we need them. | 
 | 18 |  */ | 
| Paul Mundt | 26ff6c1 | 2006-09-27 15:13:36 +0900 | [diff] [blame] | 19 | void flush_cache_all(void); | 
| Paul Mundt | fe82891 | 2007-02-13 11:09:15 +0900 | [diff] [blame] | 20 | void flush_dcache_all(void); | 
| Paul Mundt | 26ff6c1 | 2006-09-27 15:13:36 +0900 | [diff] [blame] | 21 | void flush_cache_mm(struct mm_struct *mm); | 
| Ralf Baechle | ec8c044 | 2006-12-12 17:14:57 +0000 | [diff] [blame] | 22 | #define flush_cache_dup_mm(mm) flush_cache_mm(mm) | 
| Paul Mundt | 26ff6c1 | 2006-09-27 15:13:36 +0900 | [diff] [blame] | 23 | void flush_cache_range(struct vm_area_struct *vma, unsigned long start, | 
 | 24 | 		       unsigned long end); | 
 | 25 | void flush_cache_page(struct vm_area_struct *vma, unsigned long addr, | 
 | 26 | 		      unsigned long pfn); | 
 | 27 | void flush_dcache_page(struct page *pg); | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 28 |  | 
 | 29 | #define flush_dcache_mmap_lock(mapping)		do { } while (0) | 
 | 30 | #define flush_dcache_mmap_unlock(mapping)	do { } while (0) | 
 | 31 |  | 
| Paul Mundt | 26ff6c1 | 2006-09-27 15:13:36 +0900 | [diff] [blame] | 32 | void flush_icache_range(unsigned long start, unsigned long end); | 
 | 33 | void flush_cache_sigtramp(unsigned long addr); | 
 | 34 | void flush_icache_user_range(struct vm_area_struct *vma, struct page *page, | 
 | 35 | 			     unsigned long addr, int len); | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 36 |  | 
 | 37 | #define flush_icache_page(vma,pg)		do { } while (0) | 
 | 38 |  | 
 | 39 | /* Initialization of P3 area for copy_user_page */ | 
| Paul Mundt | 26ff6c1 | 2006-09-27 15:13:36 +0900 | [diff] [blame] | 40 | void p3_cache_init(void); | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 41 |  | 
| Paul Mundt | 39e688a | 2007-03-05 19:46:47 +0900 | [diff] [blame] | 42 | #define PG_mapped	PG_arch_1 | 
 | 43 |  | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 44 | #endif /* __ASM_CPU_SH4_CACHEFLUSH_H */ |