Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame^] | 1 | #ifndef __ASM_PPC64_PROCESSOR_H |
| 2 | #define __ASM_PPC64_PROCESSOR_H |
| 3 | |
| 4 | /* |
| 5 | * Copyright (C) 2001 PPC 64 Team, IBM Corp |
| 6 | * |
| 7 | * This program is free software; you can redistribute it and/or |
| 8 | * modify it under the terms of the GNU General Public License |
| 9 | * as published by the Free Software Foundation; either version |
| 10 | * 2 of the License, or (at your option) any later version. |
| 11 | */ |
| 12 | |
| 13 | #include <linux/stringify.h> |
| 14 | #ifndef __ASSEMBLY__ |
| 15 | #include <linux/config.h> |
| 16 | #include <asm/atomic.h> |
| 17 | #include <asm/ppcdebug.h> |
| 18 | #include <asm/a.out.h> |
| 19 | #endif |
| 20 | #include <asm/ptrace.h> |
| 21 | #include <asm/types.h> |
| 22 | #include <asm/systemcfg.h> |
| 23 | |
| 24 | /* Machine State Register (MSR) Fields */ |
| 25 | #define MSR_SF_LG 63 /* Enable 64 bit mode */ |
| 26 | #define MSR_ISF_LG 61 /* Interrupt 64b mode valid on 630 */ |
| 27 | #define MSR_HV_LG 60 /* Hypervisor state */ |
| 28 | #define MSR_VEC_LG 25 /* Enable AltiVec */ |
| 29 | #define MSR_POW_LG 18 /* Enable Power Management */ |
| 30 | #define MSR_WE_LG 18 /* Wait State Enable */ |
| 31 | #define MSR_TGPR_LG 17 /* TLB Update registers in use */ |
| 32 | #define MSR_CE_LG 17 /* Critical Interrupt Enable */ |
| 33 | #define MSR_ILE_LG 16 /* Interrupt Little Endian */ |
| 34 | #define MSR_EE_LG 15 /* External Interrupt Enable */ |
| 35 | #define MSR_PR_LG 14 /* Problem State / Privilege Level */ |
| 36 | #define MSR_FP_LG 13 /* Floating Point enable */ |
| 37 | #define MSR_ME_LG 12 /* Machine Check Enable */ |
| 38 | #define MSR_FE0_LG 11 /* Floating Exception mode 0 */ |
| 39 | #define MSR_SE_LG 10 /* Single Step */ |
| 40 | #define MSR_BE_LG 9 /* Branch Trace */ |
| 41 | #define MSR_DE_LG 9 /* Debug Exception Enable */ |
| 42 | #define MSR_FE1_LG 8 /* Floating Exception mode 1 */ |
| 43 | #define MSR_IP_LG 6 /* Exception prefix 0x000/0xFFF */ |
| 44 | #define MSR_IR_LG 5 /* Instruction Relocate */ |
| 45 | #define MSR_DR_LG 4 /* Data Relocate */ |
| 46 | #define MSR_PE_LG 3 /* Protection Enable */ |
| 47 | #define MSR_PX_LG 2 /* Protection Exclusive Mode */ |
| 48 | #define MSR_PMM_LG 2 /* Performance monitor */ |
| 49 | #define MSR_RI_LG 1 /* Recoverable Exception */ |
| 50 | #define MSR_LE_LG 0 /* Little Endian */ |
| 51 | |
| 52 | #ifdef __ASSEMBLY__ |
| 53 | #define __MASK(X) (1<<(X)) |
| 54 | #else |
| 55 | #define __MASK(X) (1UL<<(X)) |
| 56 | #endif |
| 57 | |
| 58 | #define MSR_SF __MASK(MSR_SF_LG) /* Enable 64 bit mode */ |
| 59 | #define MSR_ISF __MASK(MSR_ISF_LG) /* Interrupt 64b mode valid on 630 */ |
| 60 | #define MSR_HV __MASK(MSR_HV_LG) /* Hypervisor state */ |
| 61 | #define MSR_VEC __MASK(MSR_VEC_LG) /* Enable AltiVec */ |
| 62 | #define MSR_POW __MASK(MSR_POW_LG) /* Enable Power Management */ |
| 63 | #define MSR_WE __MASK(MSR_WE_LG) /* Wait State Enable */ |
| 64 | #define MSR_TGPR __MASK(MSR_TGPR_LG) /* TLB Update registers in use */ |
| 65 | #define MSR_CE __MASK(MSR_CE_LG) /* Critical Interrupt Enable */ |
| 66 | #define MSR_ILE __MASK(MSR_ILE_LG) /* Interrupt Little Endian */ |
| 67 | #define MSR_EE __MASK(MSR_EE_LG) /* External Interrupt Enable */ |
| 68 | #define MSR_PR __MASK(MSR_PR_LG) /* Problem State / Privilege Level */ |
| 69 | #define MSR_FP __MASK(MSR_FP_LG) /* Floating Point enable */ |
| 70 | #define MSR_ME __MASK(MSR_ME_LG) /* Machine Check Enable */ |
| 71 | #define MSR_FE0 __MASK(MSR_FE0_LG) /* Floating Exception mode 0 */ |
| 72 | #define MSR_SE __MASK(MSR_SE_LG) /* Single Step */ |
| 73 | #define MSR_BE __MASK(MSR_BE_LG) /* Branch Trace */ |
| 74 | #define MSR_DE __MASK(MSR_DE_LG) /* Debug Exception Enable */ |
| 75 | #define MSR_FE1 __MASK(MSR_FE1_LG) /* Floating Exception mode 1 */ |
| 76 | #define MSR_IP __MASK(MSR_IP_LG) /* Exception prefix 0x000/0xFFF */ |
| 77 | #define MSR_IR __MASK(MSR_IR_LG) /* Instruction Relocate */ |
| 78 | #define MSR_DR __MASK(MSR_DR_LG) /* Data Relocate */ |
| 79 | #define MSR_PE __MASK(MSR_PE_LG) /* Protection Enable */ |
| 80 | #define MSR_PX __MASK(MSR_PX_LG) /* Protection Exclusive Mode */ |
| 81 | #define MSR_PMM __MASK(MSR_PMM_LG) /* Performance monitor */ |
| 82 | #define MSR_RI __MASK(MSR_RI_LG) /* Recoverable Exception */ |
| 83 | #define MSR_LE __MASK(MSR_LE_LG) /* Little Endian */ |
| 84 | |
| 85 | #define MSR_ MSR_ME | MSR_RI | MSR_IR | MSR_DR | MSR_ISF |
| 86 | #define MSR_KERNEL MSR_ | MSR_SF | MSR_HV |
| 87 | |
| 88 | #define MSR_USER32 MSR_ | MSR_PR | MSR_EE |
| 89 | #define MSR_USER64 MSR_USER32 | MSR_SF |
| 90 | |
| 91 | /* Floating Point Status and Control Register (FPSCR) Fields */ |
| 92 | |
| 93 | #define FPSCR_FX 0x80000000 /* FPU exception summary */ |
| 94 | #define FPSCR_FEX 0x40000000 /* FPU enabled exception summary */ |
| 95 | #define FPSCR_VX 0x20000000 /* Invalid operation summary */ |
| 96 | #define FPSCR_OX 0x10000000 /* Overflow exception summary */ |
| 97 | #define FPSCR_UX 0x08000000 /* Underflow exception summary */ |
| 98 | #define FPSCR_ZX 0x04000000 /* Zero-divide exception summary */ |
| 99 | #define FPSCR_XX 0x02000000 /* Inexact exception summary */ |
| 100 | #define FPSCR_VXSNAN 0x01000000 /* Invalid op for SNaN */ |
| 101 | #define FPSCR_VXISI 0x00800000 /* Invalid op for Inv - Inv */ |
| 102 | #define FPSCR_VXIDI 0x00400000 /* Invalid op for Inv / Inv */ |
| 103 | #define FPSCR_VXZDZ 0x00200000 /* Invalid op for Zero / Zero */ |
| 104 | #define FPSCR_VXIMZ 0x00100000 /* Invalid op for Inv * Zero */ |
| 105 | #define FPSCR_VXVC 0x00080000 /* Invalid op for Compare */ |
| 106 | #define FPSCR_FR 0x00040000 /* Fraction rounded */ |
| 107 | #define FPSCR_FI 0x00020000 /* Fraction inexact */ |
| 108 | #define FPSCR_FPRF 0x0001f000 /* FPU Result Flags */ |
| 109 | #define FPSCR_FPCC 0x0000f000 /* FPU Condition Codes */ |
| 110 | #define FPSCR_VXSOFT 0x00000400 /* Invalid op for software request */ |
| 111 | #define FPSCR_VXSQRT 0x00000200 /* Invalid op for square root */ |
| 112 | #define FPSCR_VXCVI 0x00000100 /* Invalid op for integer convert */ |
| 113 | #define FPSCR_VE 0x00000080 /* Invalid op exception enable */ |
| 114 | #define FPSCR_OE 0x00000040 /* IEEE overflow exception enable */ |
| 115 | #define FPSCR_UE 0x00000020 /* IEEE underflow exception enable */ |
| 116 | #define FPSCR_ZE 0x00000010 /* IEEE zero divide exception enable */ |
| 117 | #define FPSCR_XE 0x00000008 /* FP inexact exception enable */ |
| 118 | #define FPSCR_NI 0x00000004 /* FPU non IEEE-Mode */ |
| 119 | #define FPSCR_RN 0x00000003 /* FPU rounding control */ |
| 120 | |
| 121 | /* Special Purpose Registers (SPRNs)*/ |
| 122 | |
| 123 | #define SPRN_CDBCR 0x3D7 /* Cache Debug Control Register */ |
| 124 | #define SPRN_CTR 0x009 /* Count Register */ |
| 125 | #define SPRN_DABR 0x3F5 /* Data Address Breakpoint Register */ |
| 126 | #define SPRN_DAC1 0x3F6 /* Data Address Compare 1 */ |
| 127 | #define SPRN_DAC2 0x3F7 /* Data Address Compare 2 */ |
| 128 | #define SPRN_DAR 0x013 /* Data Address Register */ |
| 129 | #define SPRN_DBCR 0x3F2 /* Debug Control Regsiter */ |
| 130 | #define DBCR_EDM 0x80000000 |
| 131 | #define DBCR_IDM 0x40000000 |
| 132 | #define DBCR_RST(x) (((x) & 0x3) << 28) |
| 133 | #define DBCR_RST_NONE 0 |
| 134 | #define DBCR_RST_CORE 1 |
| 135 | #define DBCR_RST_CHIP 2 |
| 136 | #define DBCR_RST_SYSTEM 3 |
| 137 | #define DBCR_IC 0x08000000 /* Instruction Completion Debug Evnt */ |
| 138 | #define DBCR_BT 0x04000000 /* Branch Taken Debug Event */ |
| 139 | #define DBCR_EDE 0x02000000 /* Exception Debug Event */ |
| 140 | #define DBCR_TDE 0x01000000 /* TRAP Debug Event */ |
| 141 | #define DBCR_FER 0x00F80000 /* First Events Remaining Mask */ |
| 142 | #define DBCR_FT 0x00040000 /* Freeze Timers on Debug Event */ |
| 143 | #define DBCR_IA1 0x00020000 /* Instr. Addr. Compare 1 Enable */ |
| 144 | #define DBCR_IA2 0x00010000 /* Instr. Addr. Compare 2 Enable */ |
| 145 | #define DBCR_D1R 0x00008000 /* Data Addr. Compare 1 Read Enable */ |
| 146 | #define DBCR_D1W 0x00004000 /* Data Addr. Compare 1 Write Enable */ |
| 147 | #define DBCR_D1S(x) (((x) & 0x3) << 12) /* Data Adrr. Compare 1 Size */ |
| 148 | #define DAC_BYTE 0 |
| 149 | #define DAC_HALF 1 |
| 150 | #define DAC_WORD 2 |
| 151 | #define DAC_QUAD 3 |
| 152 | #define DBCR_D2R 0x00000800 /* Data Addr. Compare 2 Read Enable */ |
| 153 | #define DBCR_D2W 0x00000400 /* Data Addr. Compare 2 Write Enable */ |
| 154 | #define DBCR_D2S(x) (((x) & 0x3) << 8) /* Data Addr. Compare 2 Size */ |
| 155 | #define DBCR_SBT 0x00000040 /* Second Branch Taken Debug Event */ |
| 156 | #define DBCR_SED 0x00000020 /* Second Exception Debug Event */ |
| 157 | #define DBCR_STD 0x00000010 /* Second Trap Debug Event */ |
| 158 | #define DBCR_SIA 0x00000008 /* Second IAC Enable */ |
| 159 | #define DBCR_SDA 0x00000004 /* Second DAC Enable */ |
| 160 | #define DBCR_JOI 0x00000002 /* JTAG Serial Outbound Int. Enable */ |
| 161 | #define DBCR_JII 0x00000001 /* JTAG Serial Inbound Int. Enable */ |
| 162 | #define SPRN_DBCR0 0x3F2 /* Debug Control Register 0 */ |
| 163 | #define SPRN_DBCR1 0x3BD /* Debug Control Register 1 */ |
| 164 | #define SPRN_DBSR 0x3F0 /* Debug Status Register */ |
| 165 | #define SPRN_DCCR 0x3FA /* Data Cache Cacheability Register */ |
| 166 | #define DCCR_NOCACHE 0 /* Noncacheable */ |
| 167 | #define DCCR_CACHE 1 /* Cacheable */ |
| 168 | #define SPRN_DCMP 0x3D1 /* Data TLB Compare Register */ |
| 169 | #define SPRN_DCWR 0x3BA /* Data Cache Write-thru Register */ |
| 170 | #define DCWR_COPY 0 /* Copy-back */ |
| 171 | #define DCWR_WRITE 1 /* Write-through */ |
| 172 | #define SPRN_DEAR 0x3D5 /* Data Error Address Register */ |
| 173 | #define SPRN_DEC 0x016 /* Decrement Register */ |
| 174 | #define SPRN_DMISS 0x3D0 /* Data TLB Miss Register */ |
| 175 | #define SPRN_DSISR 0x012 /* Data Storage Interrupt Status Register */ |
| 176 | #define DSISR_NOHPTE 0x40000000 /* no translation found */ |
| 177 | #define DSISR_PROTFAULT 0x08000000 /* protection fault */ |
| 178 | #define DSISR_ISSTORE 0x02000000 /* access was a store */ |
| 179 | #define DSISR_DABRMATCH 0x00400000 /* hit data breakpoint */ |
| 180 | #define DSISR_NOSEGMENT 0x00200000 /* STAB/SLB miss */ |
| 181 | #define SPRN_EAR 0x11A /* External Address Register */ |
| 182 | #define SPRN_ESR 0x3D4 /* Exception Syndrome Register */ |
| 183 | #define ESR_IMCP 0x80000000 /* Instr. Machine Check - Protection */ |
| 184 | #define ESR_IMCN 0x40000000 /* Instr. Machine Check - Non-config */ |
| 185 | #define ESR_IMCB 0x20000000 /* Instr. Machine Check - Bus error */ |
| 186 | #define ESR_IMCT 0x10000000 /* Instr. Machine Check - Timeout */ |
| 187 | #define ESR_PIL 0x08000000 /* Program Exception - Illegal */ |
| 188 | #define ESR_PPR 0x04000000 /* Program Exception - Priveleged */ |
| 189 | #define ESR_PTR 0x02000000 /* Program Exception - Trap */ |
| 190 | #define ESR_DST 0x00800000 /* Storage Exception - Data miss */ |
| 191 | #define ESR_DIZ 0x00400000 /* Storage Exception - Zone fault */ |
| 192 | #define SPRN_EVPR 0x3D6 /* Exception Vector Prefix Register */ |
| 193 | #define SPRN_HASH1 0x3D2 /* Primary Hash Address Register */ |
| 194 | #define SPRN_HASH2 0x3D3 /* Secondary Hash Address Resgister */ |
| 195 | #define SPRN_HID0 0x3F0 /* Hardware Implementation Register 0 */ |
| 196 | #define HID0_EMCP (1<<31) /* Enable Machine Check pin */ |
| 197 | #define HID0_EBA (1<<29) /* Enable Bus Address Parity */ |
| 198 | #define HID0_EBD (1<<28) /* Enable Bus Data Parity */ |
| 199 | #define HID0_SBCLK (1<<27) |
| 200 | #define HID0_EICE (1<<26) |
| 201 | #define HID0_ECLK (1<<25) |
| 202 | #define HID0_PAR (1<<24) |
| 203 | #define HID0_DOZE (1<<23) |
| 204 | #define HID0_NAP (1<<22) |
| 205 | #define HID0_SLEEP (1<<21) |
| 206 | #define HID0_DPM (1<<20) |
| 207 | #define HID0_ICE (1<<15) /* Instruction Cache Enable */ |
| 208 | #define HID0_DCE (1<<14) /* Data Cache Enable */ |
| 209 | #define HID0_ILOCK (1<<13) /* Instruction Cache Lock */ |
| 210 | #define HID0_DLOCK (1<<12) /* Data Cache Lock */ |
| 211 | #define HID0_ICFI (1<<11) /* Instr. Cache Flash Invalidate */ |
| 212 | #define HID0_DCI (1<<10) /* Data Cache Invalidate */ |
| 213 | #define HID0_SPD (1<<9) /* Speculative disable */ |
| 214 | #define HID0_SGE (1<<7) /* Store Gathering Enable */ |
| 215 | #define HID0_SIED (1<<7) /* Serial Instr. Execution [Disable] */ |
| 216 | #define HID0_BTIC (1<<5) /* Branch Target Instruction Cache Enable */ |
| 217 | #define HID0_ABE (1<<3) /* Address Broadcast Enable */ |
| 218 | #define HID0_BHTE (1<<2) /* Branch History Table Enable */ |
| 219 | #define HID0_BTCD (1<<1) /* Branch target cache disable */ |
| 220 | #define SPRN_MSRDORM 0x3F1 /* Hardware Implementation Register 1 */ |
| 221 | #define SPRN_HID1 0x3F1 /* Hardware Implementation Register 1 */ |
| 222 | #define SPRN_IABR 0x3F2 /* Instruction Address Breakpoint Register */ |
| 223 | #define SPRN_NIADORM 0x3F3 /* Hardware Implementation Register 2 */ |
| 224 | #define SPRN_HID4 0x3F4 /* 970 HID4 */ |
| 225 | #define SPRN_HID5 0x3F6 /* 970 HID5 */ |
| 226 | #define SPRN_TSC 0x3FD /* Thread switch control */ |
| 227 | #define SPRN_TST 0x3FC /* Thread switch timeout */ |
| 228 | #define SPRN_IAC1 0x3F4 /* Instruction Address Compare 1 */ |
| 229 | #define SPRN_IAC2 0x3F5 /* Instruction Address Compare 2 */ |
| 230 | #define SPRN_ICCR 0x3FB /* Instruction Cache Cacheability Register */ |
| 231 | #define ICCR_NOCACHE 0 /* Noncacheable */ |
| 232 | #define ICCR_CACHE 1 /* Cacheable */ |
| 233 | #define SPRN_ICDBDR 0x3D3 /* Instruction Cache Debug Data Register */ |
| 234 | #define SPRN_ICMP 0x3D5 /* Instruction TLB Compare Register */ |
| 235 | #define SPRN_ICTC 0x3FB /* Instruction Cache Throttling Control Reg */ |
| 236 | #define SPRN_IMISS 0x3D4 /* Instruction TLB Miss Register */ |
| 237 | #define SPRN_IMMR 0x27E /* Internal Memory Map Register */ |
| 238 | #define SPRN_L2CR 0x3F9 /* Level 2 Cache Control Regsiter */ |
| 239 | #define SPRN_LR 0x008 /* Link Register */ |
| 240 | #define SPRN_PBL1 0x3FC /* Protection Bound Lower 1 */ |
| 241 | #define SPRN_PBL2 0x3FE /* Protection Bound Lower 2 */ |
| 242 | #define SPRN_PBU1 0x3FD /* Protection Bound Upper 1 */ |
| 243 | #define SPRN_PBU2 0x3FF /* Protection Bound Upper 2 */ |
| 244 | #define SPRN_PID 0x3B1 /* Process ID */ |
| 245 | #define SPRN_PIR 0x3FF /* Processor Identification Register */ |
| 246 | #define SPRN_PIT 0x3DB /* Programmable Interval Timer */ |
| 247 | #define SPRN_PURR 0x135 /* Processor Utilization of Resources Register */ |
| 248 | #define SPRN_PVR 0x11F /* Processor Version Register */ |
| 249 | #define SPRN_RPA 0x3D6 /* Required Physical Address Register */ |
| 250 | #define SPRN_SDA 0x3BF /* Sampled Data Address Register */ |
| 251 | #define SPRN_SDR1 0x019 /* MMU Hash Base Register */ |
| 252 | #define SPRN_SGR 0x3B9 /* Storage Guarded Register */ |
| 253 | #define SGR_NORMAL 0 |
| 254 | #define SGR_GUARDED 1 |
| 255 | #define SPRN_SIA 0x3BB /* Sampled Instruction Address Register */ |
| 256 | #define SPRN_SPRG0 0x110 /* Special Purpose Register General 0 */ |
| 257 | #define SPRN_SPRG1 0x111 /* Special Purpose Register General 1 */ |
| 258 | #define SPRN_SPRG2 0x112 /* Special Purpose Register General 2 */ |
| 259 | #define SPRN_SPRG3 0x113 /* Special Purpose Register General 3 */ |
| 260 | #define SPRN_SRR0 0x01A /* Save/Restore Register 0 */ |
| 261 | #define SPRN_SRR1 0x01B /* Save/Restore Register 1 */ |
| 262 | #define SPRN_TBRL 0x10C /* Time Base Read Lower Register (user, R/O) */ |
| 263 | #define SPRN_TBRU 0x10D /* Time Base Read Upper Register (user, R/O) */ |
| 264 | #define SPRN_TBWL 0x11C /* Time Base Lower Register (super, W/O) */ |
| 265 | #define SPRN_TBWU 0x11D /* Time Base Write Upper Register (super, W/O) */ |
| 266 | #define SPRN_HIOR 0x137 /* 970 Hypervisor interrupt offset */ |
| 267 | #define SPRN_TCR 0x3DA /* Timer Control Register */ |
| 268 | #define TCR_WP(x) (((x)&0x3)<<30) /* WDT Period */ |
| 269 | #define WP_2_17 0 /* 2^17 clocks */ |
| 270 | #define WP_2_21 1 /* 2^21 clocks */ |
| 271 | #define WP_2_25 2 /* 2^25 clocks */ |
| 272 | #define WP_2_29 3 /* 2^29 clocks */ |
| 273 | #define TCR_WRC(x) (((x)&0x3)<<28) /* WDT Reset Control */ |
| 274 | #define WRC_NONE 0 /* No reset will occur */ |
| 275 | #define WRC_CORE 1 /* Core reset will occur */ |
| 276 | #define WRC_CHIP 2 /* Chip reset will occur */ |
| 277 | #define WRC_SYSTEM 3 /* System reset will occur */ |
| 278 | #define TCR_WIE 0x08000000 /* WDT Interrupt Enable */ |
| 279 | #define TCR_PIE 0x04000000 /* PIT Interrupt Enable */ |
| 280 | #define TCR_FP(x) (((x)&0x3)<<24) /* FIT Period */ |
| 281 | #define FP_2_9 0 /* 2^9 clocks */ |
| 282 | #define FP_2_13 1 /* 2^13 clocks */ |
| 283 | #define FP_2_17 2 /* 2^17 clocks */ |
| 284 | #define FP_2_21 3 /* 2^21 clocks */ |
| 285 | #define TCR_FIE 0x00800000 /* FIT Interrupt Enable */ |
| 286 | #define TCR_ARE 0x00400000 /* Auto Reload Enable */ |
| 287 | #define SPRN_THRM1 0x3FC /* Thermal Management Register 1 */ |
| 288 | #define THRM1_TIN (1<<0) |
| 289 | #define THRM1_TIV (1<<1) |
| 290 | #define THRM1_THRES (0x7f<<2) |
| 291 | #define THRM1_TID (1<<29) |
| 292 | #define THRM1_TIE (1<<30) |
| 293 | #define THRM1_V (1<<31) |
| 294 | #define SPRN_THRM2 0x3FD /* Thermal Management Register 2 */ |
| 295 | #define SPRN_THRM3 0x3FE /* Thermal Management Register 3 */ |
| 296 | #define THRM3_E (1<<31) |
| 297 | #define SPRN_TSR 0x3D8 /* Timer Status Register */ |
| 298 | #define TSR_ENW 0x80000000 /* Enable Next Watchdog */ |
| 299 | #define TSR_WIS 0x40000000 /* WDT Interrupt Status */ |
| 300 | #define TSR_WRS(x) (((x)&0x3)<<28) /* WDT Reset Status */ |
| 301 | #define WRS_NONE 0 /* No WDT reset occurred */ |
| 302 | #define WRS_CORE 1 /* WDT forced core reset */ |
| 303 | #define WRS_CHIP 2 /* WDT forced chip reset */ |
| 304 | #define WRS_SYSTEM 3 /* WDT forced system reset */ |
| 305 | #define TSR_PIS 0x08000000 /* PIT Interrupt Status */ |
| 306 | #define TSR_FIS 0x04000000 /* FIT Interrupt Status */ |
| 307 | #define SPRN_USIA 0x3AB /* User Sampled Instruction Address Register */ |
| 308 | #define SPRN_XER 0x001 /* Fixed Point Exception Register */ |
| 309 | #define SPRN_ZPR 0x3B0 /* Zone Protection Register */ |
| 310 | #define SPRN_VRSAVE 0x100 /* Vector save */ |
| 311 | |
| 312 | /* Performance monitor SPRs */ |
| 313 | #define SPRN_SIAR 780 |
| 314 | #define SPRN_SDAR 781 |
| 315 | #define SPRN_MMCRA 786 |
| 316 | #define MMCRA_SIHV 0x10000000UL /* state of MSR HV when SIAR set */ |
| 317 | #define MMCRA_SIPR 0x08000000UL /* state of MSR PR when SIAR set */ |
| 318 | #define MMCRA_SAMPLE_ENABLE 0x00000001UL /* enable sampling */ |
| 319 | #define SPRN_PMC1 787 |
| 320 | #define SPRN_PMC2 788 |
| 321 | #define SPRN_PMC3 789 |
| 322 | #define SPRN_PMC4 790 |
| 323 | #define SPRN_PMC5 791 |
| 324 | #define SPRN_PMC6 792 |
| 325 | #define SPRN_PMC7 793 |
| 326 | #define SPRN_PMC8 794 |
| 327 | #define SPRN_MMCR0 795 |
| 328 | #define MMCR0_FC 0x80000000UL /* freeze counters. set to 1 on a perfmon exception */ |
| 329 | #define MMCR0_FCS 0x40000000UL /* freeze in supervisor state */ |
| 330 | #define MMCR0_KERNEL_DISABLE MMCR0_FCS |
| 331 | #define MMCR0_FCP 0x20000000UL /* freeze in problem state */ |
| 332 | #define MMCR0_PROBLEM_DISABLE MMCR0_FCP |
| 333 | #define MMCR0_FCM1 0x10000000UL /* freeze counters while MSR mark = 1 */ |
| 334 | #define MMCR0_FCM0 0x08000000UL /* freeze counters while MSR mark = 0 */ |
| 335 | #define MMCR0_PMXE 0x04000000UL /* performance monitor exception enable */ |
| 336 | #define MMCR0_FCECE 0x02000000UL /* freeze counters on enabled condition or event */ |
| 337 | /* time base exception enable */ |
| 338 | #define MMCR0_TBEE 0x00400000UL /* time base exception enable */ |
| 339 | #define MMCR0_PMC1CE 0x00008000UL /* PMC1 count enable*/ |
| 340 | #define MMCR0_PMCjCE 0x00004000UL /* PMCj count enable*/ |
| 341 | #define MMCR0_TRIGGER 0x00002000UL /* TRIGGER enable */ |
| 342 | #define MMCR0_PMAO 0x00000080UL /* performance monitor alert has occurred, set to 0 after handling exception */ |
| 343 | #define MMCR0_SHRFC 0x00000040UL /* SHRre freeze conditions between threads */ |
| 344 | #define MMCR0_FCTI 0x00000008UL /* freeze counters in tags inactive mode */ |
| 345 | #define MMCR0_FCTA 0x00000004UL /* freeze counters in tags active mode */ |
| 346 | #define MMCR0_FCWAIT 0x00000002UL /* freeze counter in WAIT state */ |
| 347 | #define MMCR0_FCHV 0x00000001UL /* freeze conditions in hypervisor mode */ |
| 348 | #define SPRN_MMCR1 798 |
| 349 | |
| 350 | /* Short-hand versions for a number of the above SPRNs */ |
| 351 | |
| 352 | #define CTR SPRN_CTR /* Counter Register */ |
| 353 | #define DAR SPRN_DAR /* Data Address Register */ |
| 354 | #define DABR SPRN_DABR /* Data Address Breakpoint Register */ |
| 355 | #define DCMP SPRN_DCMP /* Data TLB Compare Register */ |
| 356 | #define DEC SPRN_DEC /* Decrement Register */ |
| 357 | #define DMISS SPRN_DMISS /* Data TLB Miss Register */ |
| 358 | #define DSISR SPRN_DSISR /* Data Storage Interrupt Status Register */ |
| 359 | #define EAR SPRN_EAR /* External Address Register */ |
| 360 | #define HASH1 SPRN_HASH1 /* Primary Hash Address Register */ |
| 361 | #define HASH2 SPRN_HASH2 /* Secondary Hash Address Register */ |
| 362 | #define HID0 SPRN_HID0 /* Hardware Implementation Register 0 */ |
| 363 | #define MSRDORM SPRN_MSRDORM /* MSR Dormant Register */ |
| 364 | #define NIADORM SPRN_NIADORM /* NIA Dormant Register */ |
| 365 | #define TSC SPRN_TSC /* Thread switch control */ |
| 366 | #define TST SPRN_TST /* Thread switch timeout */ |
| 367 | #define IABR SPRN_IABR /* Instruction Address Breakpoint Register */ |
| 368 | #define ICMP SPRN_ICMP /* Instruction TLB Compare Register */ |
| 369 | #define IMISS SPRN_IMISS /* Instruction TLB Miss Register */ |
| 370 | #define IMMR SPRN_IMMR /* PPC 860/821 Internal Memory Map Register */ |
| 371 | #define L2CR SPRN_L2CR /* PPC 750 L2 control register */ |
| 372 | #define __LR SPRN_LR |
| 373 | #define PVR SPRN_PVR /* Processor Version */ |
| 374 | #define PIR SPRN_PIR /* Processor ID */ |
| 375 | #define PURR SPRN_PURR /* Processor Utilization of Resource Register */ |
| 376 | //#define RPA SPRN_RPA /* Required Physical Address Register */ |
| 377 | #define SDR1 SPRN_SDR1 /* MMU hash base register */ |
| 378 | #define SPR0 SPRN_SPRG0 /* Supervisor Private Registers */ |
| 379 | #define SPR1 SPRN_SPRG1 |
| 380 | #define SPR2 SPRN_SPRG2 |
| 381 | #define SPR3 SPRN_SPRG3 |
| 382 | #define SPRG0 SPRN_SPRG0 |
| 383 | #define SPRG1 SPRN_SPRG1 |
| 384 | #define SPRG2 SPRN_SPRG2 |
| 385 | #define SPRG3 SPRN_SPRG3 |
| 386 | #define SRR0 SPRN_SRR0 /* Save and Restore Register 0 */ |
| 387 | #define SRR1 SPRN_SRR1 /* Save and Restore Register 1 */ |
| 388 | #define TBRL SPRN_TBRL /* Time Base Read Lower Register */ |
| 389 | #define TBRU SPRN_TBRU /* Time Base Read Upper Register */ |
| 390 | #define TBWL SPRN_TBWL /* Time Base Write Lower Register */ |
| 391 | #define TBWU SPRN_TBWU /* Time Base Write Upper Register */ |
| 392 | #define ICTC 1019 |
| 393 | #define THRM1 SPRN_THRM1 /* Thermal Management Register 1 */ |
| 394 | #define THRM2 SPRN_THRM2 /* Thermal Management Register 2 */ |
| 395 | #define THRM3 SPRN_THRM3 /* Thermal Management Register 3 */ |
| 396 | #define XER SPRN_XER |
| 397 | |
| 398 | /* Processor Version Register (PVR) field extraction */ |
| 399 | |
| 400 | #define PVR_VER(pvr) (((pvr) >> 16) & 0xFFFF) /* Version field */ |
| 401 | #define PVR_REV(pvr) (((pvr) >> 0) & 0xFFFF) /* Revison field */ |
| 402 | |
| 403 | /* Processor Version Numbers */ |
| 404 | #define PV_NORTHSTAR 0x0033 |
| 405 | #define PV_PULSAR 0x0034 |
| 406 | #define PV_POWER4 0x0035 |
| 407 | #define PV_ICESTAR 0x0036 |
| 408 | #define PV_SSTAR 0x0037 |
| 409 | #define PV_POWER4p 0x0038 |
| 410 | #define PV_970 0x0039 |
| 411 | #define PV_POWER5 0x003A |
| 412 | #define PV_POWER5p 0x003B |
| 413 | #define PV_970FX 0x003C |
| 414 | #define PV_630 0x0040 |
| 415 | #define PV_630p 0x0041 |
| 416 | |
| 417 | /* Platforms supported by PPC64 */ |
| 418 | #define PLATFORM_PSERIES 0x0100 |
| 419 | #define PLATFORM_PSERIES_LPAR 0x0101 |
| 420 | #define PLATFORM_ISERIES_LPAR 0x0201 |
| 421 | #define PLATFORM_LPAR 0x0001 |
| 422 | #define PLATFORM_POWERMAC 0x0400 |
| 423 | #define PLATFORM_MAPLE 0x0500 |
| 424 | |
| 425 | /* Compatibility with drivers coming from PPC32 world */ |
| 426 | #define _machine (systemcfg->platform) |
| 427 | #define _MACH_Pmac PLATFORM_POWERMAC |
| 428 | |
| 429 | /* |
| 430 | * List of interrupt controllers. |
| 431 | */ |
| 432 | #define IC_INVALID 0 |
| 433 | #define IC_OPEN_PIC 1 |
| 434 | #define IC_PPC_XIC 2 |
| 435 | |
| 436 | #define XGLUE(a,b) a##b |
| 437 | #define GLUE(a,b) XGLUE(a,b) |
| 438 | |
| 439 | /* iSeries CTRL register (for runlatch) */ |
| 440 | |
| 441 | #define CTRLT 0x098 |
| 442 | #define CTRLF 0x088 |
| 443 | #define RUNLATCH 0x0001 |
| 444 | |
| 445 | #ifdef __ASSEMBLY__ |
| 446 | |
| 447 | #define _GLOBAL(name) \ |
| 448 | .section ".text"; \ |
| 449 | .align 2 ; \ |
| 450 | .globl name; \ |
| 451 | .globl GLUE(.,name); \ |
| 452 | .section ".opd","aw"; \ |
| 453 | name: \ |
| 454 | .quad GLUE(.,name); \ |
| 455 | .quad .TOC.@tocbase; \ |
| 456 | .quad 0; \ |
| 457 | .previous; \ |
| 458 | .type GLUE(.,name),@function; \ |
| 459 | GLUE(.,name): |
| 460 | |
| 461 | #define _STATIC(name) \ |
| 462 | .section ".text"; \ |
| 463 | .align 2 ; \ |
| 464 | .section ".opd","aw"; \ |
| 465 | name: \ |
| 466 | .quad GLUE(.,name); \ |
| 467 | .quad .TOC.@tocbase; \ |
| 468 | .quad 0; \ |
| 469 | .previous; \ |
| 470 | .type GLUE(.,name),@function; \ |
| 471 | GLUE(.,name): |
| 472 | |
| 473 | #else /* __ASSEMBLY__ */ |
| 474 | |
| 475 | /* |
| 476 | * Default implementation of macro that returns current |
| 477 | * instruction pointer ("program counter"). |
| 478 | */ |
| 479 | #define current_text_addr() ({ __label__ _l; _l: &&_l;}) |
| 480 | |
| 481 | /* Macros for setting and retrieving special purpose registers */ |
| 482 | |
| 483 | #define mfmsr() ({unsigned long rval; \ |
| 484 | asm volatile("mfmsr %0" : "=r" (rval)); rval;}) |
| 485 | |
| 486 | #define __mtmsrd(v, l) asm volatile("mtmsrd %0," __stringify(l) \ |
| 487 | : : "r" (v)) |
| 488 | #define mtmsrd(v) __mtmsrd((v), 0) |
| 489 | |
| 490 | #define mfspr(rn) ({unsigned long rval; \ |
| 491 | asm volatile("mfspr %0," __stringify(rn) \ |
| 492 | : "=r" (rval)); rval;}) |
| 493 | #define mtspr(rn, v) asm volatile("mtspr " __stringify(rn) ",%0" : : "r" (v)) |
| 494 | |
| 495 | #define mftb() ({unsigned long rval; \ |
| 496 | asm volatile("mftb %0" : "=r" (rval)); rval;}) |
| 497 | |
| 498 | #define mttbl(v) asm volatile("mttbl %0":: "r"(v)) |
| 499 | #define mttbu(v) asm volatile("mttbu %0":: "r"(v)) |
| 500 | |
| 501 | #define mfasr() ({unsigned long rval; \ |
| 502 | asm volatile("mfasr %0" : "=r" (rval)); rval;}) |
| 503 | |
| 504 | static inline void set_tb(unsigned int upper, unsigned int lower) |
| 505 | { |
| 506 | mttbl(0); |
| 507 | mttbu(upper); |
| 508 | mttbl(lower); |
| 509 | } |
| 510 | |
| 511 | #define __get_SP() ({unsigned long sp; \ |
| 512 | asm volatile("mr %0,1": "=r" (sp)); sp;}) |
| 513 | |
| 514 | #ifdef __KERNEL__ |
| 515 | |
| 516 | extern int have_of; |
| 517 | extern u64 ppc64_interrupt_controller; |
| 518 | |
| 519 | struct task_struct; |
| 520 | void start_thread(struct pt_regs *regs, unsigned long fdptr, unsigned long sp); |
| 521 | void release_thread(struct task_struct *); |
| 522 | |
| 523 | /* Prepare to copy thread state - unlazy all lazy status */ |
| 524 | extern void prepare_to_copy(struct task_struct *tsk); |
| 525 | |
| 526 | /* Create a new kernel thread. */ |
| 527 | extern long kernel_thread(int (*fn)(void *), void *arg, unsigned long flags); |
| 528 | |
| 529 | /* Lazy FPU handling on uni-processor */ |
| 530 | extern struct task_struct *last_task_used_math; |
| 531 | extern struct task_struct *last_task_used_altivec; |
| 532 | |
| 533 | /* 64-bit user address space is 41-bits (2TBs user VM) */ |
| 534 | #define TASK_SIZE_USER64 (0x0000020000000000UL) |
| 535 | |
| 536 | /* |
| 537 | * 32-bit user address space is 4GB - 1 page |
| 538 | * (this 1 page is needed so referencing of 0xFFFFFFFF generates EFAULT |
| 539 | */ |
| 540 | #define TASK_SIZE_USER32 (0x0000000100000000UL - (1*PAGE_SIZE)) |
| 541 | |
| 542 | #define TASK_SIZE (test_thread_flag(TIF_32BIT) ? \ |
| 543 | TASK_SIZE_USER32 : TASK_SIZE_USER64) |
| 544 | |
| 545 | /* We can't actually tell the TASK_SIZE given just the mm, but default |
| 546 | * to the 64-bit case to make sure that enough gets cleaned up. */ |
| 547 | #define MM_VM_SIZE(mm) TASK_SIZE_USER64 |
| 548 | |
| 549 | /* This decides where the kernel will search for a free chunk of vm |
| 550 | * space during mmap's. |
| 551 | */ |
| 552 | #define TASK_UNMAPPED_BASE_USER32 (PAGE_ALIGN(TASK_SIZE_USER32 / 4)) |
| 553 | #define TASK_UNMAPPED_BASE_USER64 (PAGE_ALIGN(TASK_SIZE_USER64 / 4)) |
| 554 | |
| 555 | #define TASK_UNMAPPED_BASE ((test_thread_flag(TIF_32BIT)||(ppcdebugset(PPCDBG_BINFMT_32ADDR))) ? \ |
| 556 | TASK_UNMAPPED_BASE_USER32 : TASK_UNMAPPED_BASE_USER64 ) |
| 557 | |
| 558 | typedef struct { |
| 559 | unsigned long seg; |
| 560 | } mm_segment_t; |
| 561 | |
| 562 | struct thread_struct { |
| 563 | unsigned long ksp; /* Kernel stack pointer */ |
| 564 | unsigned long ksp_vsid; |
| 565 | struct pt_regs *regs; /* Pointer to saved register state */ |
| 566 | mm_segment_t fs; /* for get_fs() validation */ |
| 567 | double fpr[32]; /* Complete floating point set */ |
| 568 | unsigned long fpscr; /* Floating point status (plus pad) */ |
| 569 | unsigned long fpexc_mode; /* Floating-point exception mode */ |
| 570 | unsigned long start_tb; /* Start purr when proc switched in */ |
| 571 | unsigned long accum_tb; /* Total accumilated purr for process */ |
| 572 | unsigned long vdso_base; /* base of the vDSO library */ |
| 573 | #ifdef CONFIG_ALTIVEC |
| 574 | /* Complete AltiVec register set */ |
| 575 | vector128 vr[32] __attribute((aligned(16))); |
| 576 | /* AltiVec status */ |
| 577 | vector128 vscr __attribute((aligned(16))); |
| 578 | unsigned long vrsave; |
| 579 | int used_vr; /* set if process has used altivec */ |
| 580 | #endif /* CONFIG_ALTIVEC */ |
| 581 | }; |
| 582 | |
| 583 | #define ARCH_MIN_TASKALIGN 16 |
| 584 | |
| 585 | #define INIT_SP (sizeof(init_stack) + (unsigned long) &init_stack) |
| 586 | |
| 587 | #define INIT_THREAD { \ |
| 588 | .ksp = INIT_SP, \ |
| 589 | .regs = (struct pt_regs *)INIT_SP - 1, \ |
| 590 | .fs = KERNEL_DS, \ |
| 591 | .fpr = {0}, \ |
| 592 | .fpscr = 0, \ |
| 593 | .fpexc_mode = MSR_FE0|MSR_FE1, \ |
| 594 | } |
| 595 | |
| 596 | /* |
| 597 | * Note: the vm_start and vm_end fields here should *not* |
| 598 | * be in kernel space. (Could vm_end == vm_start perhaps?) |
| 599 | */ |
| 600 | #define IOREMAP_MMAP { &ioremap_mm, 0, 0x1000, NULL, \ |
| 601 | PAGE_SHARED, VM_READ | VM_WRITE | VM_EXEC, \ |
| 602 | 1, NULL, NULL } |
| 603 | |
| 604 | extern struct mm_struct ioremap_mm; |
| 605 | |
| 606 | /* |
| 607 | * Return saved PC of a blocked thread. For now, this is the "user" PC |
| 608 | */ |
| 609 | #define thread_saved_pc(tsk) \ |
| 610 | ((tsk)->thread.regs? (tsk)->thread.regs->nip: 0) |
| 611 | |
| 612 | unsigned long get_wchan(struct task_struct *p); |
| 613 | |
| 614 | #define KSTK_EIP(tsk) ((tsk)->thread.regs? (tsk)->thread.regs->nip: 0) |
| 615 | #define KSTK_ESP(tsk) ((tsk)->thread.regs? (tsk)->thread.regs->gpr[1]: 0) |
| 616 | |
| 617 | /* Get/set floating-point exception mode */ |
| 618 | #define GET_FPEXC_CTL(tsk, adr) get_fpexc_mode((tsk), (adr)) |
| 619 | #define SET_FPEXC_CTL(tsk, val) set_fpexc_mode((tsk), (val)) |
| 620 | |
| 621 | extern int get_fpexc_mode(struct task_struct *tsk, unsigned long adr); |
| 622 | extern int set_fpexc_mode(struct task_struct *tsk, unsigned int val); |
| 623 | |
| 624 | static inline unsigned int __unpack_fe01(unsigned long msr_bits) |
| 625 | { |
| 626 | return ((msr_bits & MSR_FE0) >> 10) | ((msr_bits & MSR_FE1) >> 8); |
| 627 | } |
| 628 | |
| 629 | static inline unsigned long __pack_fe01(unsigned int fpmode) |
| 630 | { |
| 631 | return ((fpmode << 10) & MSR_FE0) | ((fpmode << 8) & MSR_FE1); |
| 632 | } |
| 633 | |
| 634 | #define cpu_relax() do { HMT_low(); HMT_medium(); barrier(); } while (0) |
| 635 | |
| 636 | /* |
| 637 | * Prefetch macros. |
| 638 | */ |
| 639 | #define ARCH_HAS_PREFETCH |
| 640 | #define ARCH_HAS_PREFETCHW |
| 641 | #define ARCH_HAS_SPINLOCK_PREFETCH |
| 642 | |
| 643 | static inline void prefetch(const void *x) |
| 644 | { |
| 645 | __asm__ __volatile__ ("dcbt 0,%0" : : "r" (x)); |
| 646 | } |
| 647 | |
| 648 | static inline void prefetchw(const void *x) |
| 649 | { |
| 650 | __asm__ __volatile__ ("dcbtst 0,%0" : : "r" (x)); |
| 651 | } |
| 652 | |
| 653 | #define spin_lock_prefetch(x) prefetchw(x) |
| 654 | |
| 655 | #define HAVE_ARCH_PICK_MMAP_LAYOUT |
| 656 | |
| 657 | #endif /* __KERNEL__ */ |
| 658 | |
| 659 | #endif /* __ASSEMBLY__ */ |
| 660 | |
| 661 | /* |
| 662 | * Number of entries in the SLB. If this ever changes we should handle |
| 663 | * it with a use a cpu feature fixup. |
| 664 | */ |
| 665 | #define SLB_NUM_ENTRIES 64 |
| 666 | |
| 667 | #endif /* __ASM_PPC64_PROCESSOR_H */ |