| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 1 | /* | 
|  | 2 | * include/asm-sh64/tlb.h | 
|  | 3 | * | 
|  | 4 | * Copyright (C) 2003  Paul Mundt | 
|  | 5 | * | 
|  | 6 | * This file is subject to the terms and conditions of the GNU General Public | 
|  | 7 | * License.  See the file "COPYING" in the main directory of this archive | 
|  | 8 | * for more details. | 
|  | 9 | * | 
|  | 10 | */ | 
|  | 11 | #ifndef __ASM_SH64_TLB_H | 
|  | 12 | #define __ASM_SH64_TLB_H | 
|  | 13 |  | 
|  | 14 | /* | 
|  | 15 | * Note! These are mostly unused, we just need the xTLB_LAST_VAR_UNRESTRICTED | 
|  | 16 | * for head.S! Once this limitation is gone, we can clean the rest of this up. | 
|  | 17 | */ | 
|  | 18 |  | 
|  | 19 | /* ITLB defines */ | 
|  | 20 | #define ITLB_FIXED	0x00000000	/* First fixed ITLB, see head.S */ | 
|  | 21 | #define ITLB_LAST_VAR_UNRESTRICTED	0x000003F0	/* Last ITLB */ | 
|  | 22 |  | 
|  | 23 | /* DTLB defines */ | 
|  | 24 | #define DTLB_FIXED	0x00800000	/* First fixed DTLB, see head.S */ | 
|  | 25 | #define DTLB_LAST_VAR_UNRESTRICTED	0x008003F0	/* Last DTLB */ | 
|  | 26 |  | 
|  | 27 | #ifndef __ASSEMBLY__ | 
|  | 28 |  | 
|  | 29 | /** | 
|  | 30 | * for_each_dtlb_entry | 
|  | 31 | * | 
|  | 32 | * @tlb:	TLB entry | 
|  | 33 | * | 
|  | 34 | * Iterate over free (non-wired) DTLB entries | 
|  | 35 | */ | 
|  | 36 | #define for_each_dtlb_entry(tlb)		\ | 
|  | 37 | for (tlb  = cpu_data->dtlb.first;	\ | 
|  | 38 | tlb <= cpu_data->dtlb.last;	\ | 
|  | 39 | tlb += cpu_data->dtlb.step) | 
|  | 40 |  | 
|  | 41 | /** | 
|  | 42 | * for_each_itlb_entry | 
|  | 43 | * | 
|  | 44 | * @tlb:	TLB entry | 
|  | 45 | * | 
|  | 46 | * Iterate over free (non-wired) ITLB entries | 
|  | 47 | */ | 
|  | 48 | #define for_each_itlb_entry(tlb)		\ | 
|  | 49 | for (tlb  = cpu_data->itlb.first;	\ | 
|  | 50 | tlb <= cpu_data->itlb.last;	\ | 
|  | 51 | tlb += cpu_data->itlb.step) | 
|  | 52 |  | 
|  | 53 | /** | 
|  | 54 | * __flush_tlb_slot | 
|  | 55 | * | 
|  | 56 | * @slot:	Address of TLB slot. | 
|  | 57 | * | 
|  | 58 | * Flushes TLB slot @slot. | 
|  | 59 | */ | 
|  | 60 | static inline void __flush_tlb_slot(unsigned long long slot) | 
|  | 61 | { | 
|  | 62 | __asm__ __volatile__ ("putcfg %0, 0, r63\n" : : "r" (slot)); | 
|  | 63 | } | 
|  | 64 |  | 
|  | 65 | /* arch/sh64/mm/tlb.c */ | 
|  | 66 | extern int sh64_tlb_init(void); | 
|  | 67 | extern unsigned long long sh64_next_free_dtlb_entry(void); | 
|  | 68 | extern unsigned long long sh64_get_wired_dtlb_entry(void); | 
|  | 69 | extern int sh64_put_wired_dtlb_entry(unsigned long long entry); | 
|  | 70 |  | 
|  | 71 | extern void sh64_setup_tlb_slot(unsigned long long config_addr, unsigned long eaddr, unsigned long asid, unsigned long paddr); | 
|  | 72 | extern void sh64_teardown_tlb_slot(unsigned long long config_addr); | 
|  | 73 |  | 
|  | 74 | #define tlb_start_vma(tlb, vma) \ | 
|  | 75 | flush_cache_range(vma, vma->vm_start, vma->vm_end) | 
|  | 76 |  | 
|  | 77 | #define tlb_end_vma(tlb, vma)	\ | 
|  | 78 | flush_tlb_range(vma, vma->vm_start, vma->vm_end) | 
|  | 79 |  | 
|  | 80 | #define __tlb_remove_tlb_entry(tlb, pte, address)	do { } while (0) | 
|  | 81 |  | 
|  | 82 | /* | 
|  | 83 | * Flush whole TLBs for MM | 
|  | 84 | */ | 
|  | 85 | #define tlb_flush(tlb)		flush_tlb_mm((tlb)->mm) | 
|  | 86 |  | 
|  | 87 | #include <asm-generic/tlb.h> | 
|  | 88 |  | 
|  | 89 | #endif /* __ASSEMBLY__ */ | 
|  | 90 |  | 
|  | 91 | #endif /* __ASM_SH64_TLB_H */ | 
|  | 92 |  |