| David S. Miller | 6eda3a7 | 2008-04-28 00:47:20 -0700 | [diff] [blame] | 1 | 	/* We need to carefully read the error status, ACK the errors, | 
 | 2 | 	 * prevent recursive traps, and pass the information on to C | 
 | 3 | 	 * code for logging. | 
 | 4 | 	 * | 
 | 5 | 	 * We pass the AFAR in as-is, and we encode the status | 
 | 6 | 	 * information as described in asm-sparc64/sfafsr.h | 
 | 7 | 	 */ | 
 | 8 | 	.type		__spitfire_access_error,#function | 
 | 9 | __spitfire_access_error: | 
 | 10 | 	/* Disable ESTATE error reporting so that we do not take | 
 | 11 | 	 * recursive traps and RED state the processor. | 
 | 12 | 	 */ | 
 | 13 | 	stxa		%g0, [%g0] ASI_ESTATE_ERROR_EN | 
 | 14 | 	membar		#Sync | 
 | 15 |  | 
 | 16 | 	mov		UDBE_UE, %g1 | 
 | 17 | 	ldxa		[%g0] ASI_AFSR, %g4	! Get AFSR | 
 | 18 |  | 
 | 19 | 	/* __spitfire_cee_trap branches here with AFSR in %g4 and | 
 | 20 | 	 * UDBE_CE in %g1.  It only clears ESTATE_ERR_CE in the ESTATE | 
 | 21 | 	 * Error Enable register. | 
 | 22 | 	 */ | 
 | 23 | __spitfire_cee_trap_continue: | 
 | 24 | 	ldxa		[%g0] ASI_AFAR, %g5	! Get AFAR | 
 | 25 |  | 
 | 26 | 	rdpr		%tt, %g3 | 
 | 27 | 	and		%g3, 0x1ff, %g3		! Paranoia | 
 | 28 | 	sllx		%g3, SFSTAT_TRAP_TYPE_SHIFT, %g3 | 
 | 29 | 	or		%g4, %g3, %g4 | 
 | 30 | 	rdpr		%tl, %g3 | 
 | 31 | 	cmp		%g3, 1 | 
 | 32 | 	mov		1, %g3 | 
 | 33 | 	bleu		%xcc, 1f | 
 | 34 | 	 sllx		%g3, SFSTAT_TL_GT_ONE_SHIFT, %g3 | 
 | 35 |  | 
 | 36 | 	or		%g4, %g3, %g4 | 
 | 37 |  | 
 | 38 | 	/* Read in the UDB error register state, clearing the sticky | 
 | 39 | 	 * error bits as-needed.  We only clear them if the UE bit is | 
 | 40 | 	 * set.  Likewise, __spitfire_cee_trap below will only do so | 
 | 41 | 	 * if the CE bit is set. | 
 | 42 | 	 * | 
 | 43 | 	 * NOTE: UltraSparc-I/II have high and low UDB error | 
 | 44 | 	 *       registers, corresponding to the two UDB units | 
 | 45 | 	 *       present on those chips.  UltraSparc-IIi only | 
 | 46 | 	 *       has a single UDB, called "SDB" in the manual. | 
 | 47 | 	 *       For IIi the upper UDB register always reads | 
 | 48 | 	 *       as zero so for our purposes things will just | 
 | 49 | 	 *       work with the checks below. | 
 | 50 | 	 */ | 
 | 51 | 1:	ldxa		[%g0] ASI_UDBH_ERROR_R, %g3 | 
 | 52 | 	and		%g3, 0x3ff, %g7		! Paranoia | 
 | 53 | 	sllx		%g7, SFSTAT_UDBH_SHIFT, %g7 | 
 | 54 | 	or		%g4, %g7, %g4 | 
 | 55 | 	andcc		%g3, %g1, %g3		! UDBE_UE or UDBE_CE | 
 | 56 | 	be,pn		%xcc, 1f | 
 | 57 | 	 nop | 
 | 58 | 	stxa		%g3, [%g0] ASI_UDB_ERROR_W | 
 | 59 | 	membar		#Sync | 
 | 60 |  | 
 | 61 | 1:	mov		0x18, %g3 | 
 | 62 | 	ldxa		[%g3] ASI_UDBL_ERROR_R, %g3 | 
 | 63 | 	and		%g3, 0x3ff, %g7		! Paranoia | 
 | 64 | 	sllx		%g7, SFSTAT_UDBL_SHIFT, %g7 | 
 | 65 | 	or		%g4, %g7, %g4 | 
 | 66 | 	andcc		%g3, %g1, %g3		! UDBE_UE or UDBE_CE | 
 | 67 | 	be,pn		%xcc, 1f | 
 | 68 | 	 nop | 
 | 69 | 	mov		0x18, %g7 | 
 | 70 | 	stxa		%g3, [%g7] ASI_UDB_ERROR_W | 
 | 71 | 	membar		#Sync | 
 | 72 |  | 
 | 73 | 1:	/* Ok, now that we've latched the error state, clear the | 
 | 74 | 	 * sticky bits in the AFSR. | 
 | 75 | 	 */ | 
 | 76 | 	stxa		%g4, [%g0] ASI_AFSR | 
 | 77 | 	membar		#Sync | 
 | 78 |  | 
 | 79 | 	rdpr		%tl, %g2 | 
 | 80 | 	cmp		%g2, 1 | 
 | 81 | 	rdpr		%pil, %g2 | 
 | 82 | 	bleu,pt		%xcc, 1f | 
| David S. Miller | b4f4372 | 2008-11-23 21:55:29 -0800 | [diff] [blame] | 83 | 	 wrpr		%g0, PIL_NORMAL_MAX, %pil | 
| David S. Miller | 6eda3a7 | 2008-04-28 00:47:20 -0700 | [diff] [blame] | 84 |  | 
 | 85 | 	ba,pt		%xcc, etraptl1 | 
 | 86 | 	 rd		%pc, %g7 | 
 | 87 |  | 
 | 88 | 	ba,pt		%xcc, 2f | 
 | 89 | 	 nop | 
 | 90 |  | 
 | 91 | 1:	ba,pt		%xcc, etrap_irq | 
 | 92 | 	 rd		%pc, %g7 | 
 | 93 |  | 
 | 94 | 2: | 
 | 95 | #ifdef CONFIG_TRACE_IRQFLAGS | 
 | 96 | 	call	trace_hardirqs_off | 
 | 97 | 	 nop | 
 | 98 | #endif | 
 | 99 | 	mov		%l4, %o1 | 
 | 100 | 	mov		%l5, %o2 | 
 | 101 | 	call		spitfire_access_error | 
 | 102 | 	 add		%sp, PTREGS_OFF, %o0 | 
 | 103 | 	ba,pt		%xcc, rtrap | 
 | 104 | 	 nop | 
 | 105 | 	.size		__spitfire_access_error,.-__spitfire_access_error | 
 | 106 |  | 
 | 107 | 	/* This is the trap handler entry point for ECC correctable | 
 | 108 | 	 * errors.  They are corrected, but we listen for the trap so | 
 | 109 | 	 * that the event can be logged. | 
 | 110 | 	 * | 
 | 111 | 	 * Disrupting errors are either: | 
 | 112 | 	 * 1) single-bit ECC errors during UDB reads to system | 
 | 113 | 	 *    memory | 
 | 114 | 	 * 2) data parity errors during write-back events | 
 | 115 | 	 * | 
 | 116 | 	 * As far as I can make out from the manual, the CEE trap is | 
 | 117 | 	 * only for correctable errors during memory read accesses by | 
 | 118 | 	 * the front-end of the processor. | 
 | 119 | 	 * | 
 | 120 | 	 * The code below is only for trap level 1 CEE events, as it | 
 | 121 | 	 * is the only situation where we can safely record and log. | 
 | 122 | 	 * For trap level >1 we just clear the CE bit in the AFSR and | 
 | 123 | 	 * return. | 
 | 124 | 	 * | 
 | 125 | 	 * This is just like __spiftire_access_error above, but it | 
 | 126 | 	 * specifically handles correctable errors.  If an | 
 | 127 | 	 * uncorrectable error is indicated in the AFSR we will branch | 
 | 128 | 	 * directly above to __spitfire_access_error to handle it | 
 | 129 | 	 * instead.  Uncorrectable therefore takes priority over | 
 | 130 | 	 * correctable, and the error logging C code will notice this | 
 | 131 | 	 * case by inspecting the trap type. | 
 | 132 | 	 */ | 
 | 133 | 	.type		__spitfire_cee_trap,#function | 
 | 134 | __spitfire_cee_trap: | 
 | 135 | 	ldxa		[%g0] ASI_AFSR, %g4	! Get AFSR | 
 | 136 | 	mov		1, %g3 | 
 | 137 | 	sllx		%g3, SFAFSR_UE_SHIFT, %g3 | 
 | 138 | 	andcc		%g4, %g3, %g0		! Check for UE | 
 | 139 | 	bne,pn		%xcc, __spitfire_access_error | 
 | 140 | 	 nop | 
 | 141 |  | 
 | 142 | 	/* Ok, in this case we only have a correctable error. | 
 | 143 | 	 * Indicate we only wish to capture that state in register | 
 | 144 | 	 * %g1, and we only disable CE error reporting unlike UE | 
 | 145 | 	 * handling which disables all errors. | 
 | 146 | 	 */ | 
 | 147 | 	ldxa		[%g0] ASI_ESTATE_ERROR_EN, %g3 | 
 | 148 | 	andn		%g3, ESTATE_ERR_CE, %g3 | 
 | 149 | 	stxa		%g3, [%g0] ASI_ESTATE_ERROR_EN | 
 | 150 | 	membar		#Sync | 
 | 151 |  | 
 | 152 | 	/* Preserve AFSR in %g4, indicate UDB state to capture in %g1 */ | 
 | 153 | 	ba,pt		%xcc, __spitfire_cee_trap_continue | 
 | 154 | 	 mov		UDBE_CE, %g1 | 
 | 155 | 	.size		__spitfire_cee_trap,.-__spitfire_cee_trap | 
 | 156 |  | 
 | 157 | 	.type		__spitfire_data_access_exception_tl1,#function | 
 | 158 | __spitfire_data_access_exception_tl1: | 
 | 159 | 	rdpr		%pstate, %g4 | 
 | 160 | 	wrpr		%g4, PSTATE_MG|PSTATE_AG, %pstate | 
 | 161 | 	mov		TLB_SFSR, %g3 | 
 | 162 | 	mov		DMMU_SFAR, %g5 | 
 | 163 | 	ldxa		[%g3] ASI_DMMU, %g4	! Get SFSR | 
 | 164 | 	ldxa		[%g5] ASI_DMMU, %g5	! Get SFAR | 
 | 165 | 	stxa		%g0, [%g3] ASI_DMMU	! Clear SFSR.FaultValid bit | 
 | 166 | 	membar		#Sync | 
 | 167 | 	rdpr		%tt, %g3 | 
 | 168 | 	cmp		%g3, 0x80		! first win spill/fill trap | 
 | 169 | 	blu,pn		%xcc, 1f | 
 | 170 | 	 cmp		%g3, 0xff		! last win spill/fill trap | 
 | 171 | 	bgu,pn		%xcc, 1f | 
 | 172 | 	 nop | 
 | 173 | 	ba,pt		%xcc, winfix_dax | 
 | 174 | 	 rdpr		%tpc, %g3 | 
 | 175 | 1:	sethi		%hi(109f), %g7 | 
 | 176 | 	ba,pt		%xcc, etraptl1 | 
 | 177 | 109:	 or		%g7, %lo(109b), %g7 | 
 | 178 | 	mov		%l4, %o1 | 
 | 179 | 	mov		%l5, %o2 | 
 | 180 | 	call		spitfire_data_access_exception_tl1 | 
 | 181 | 	 add		%sp, PTREGS_OFF, %o0 | 
 | 182 | 	ba,pt		%xcc, rtrap | 
 | 183 | 	 nop | 
 | 184 | 	.size		__spitfire_data_access_exception_tl1,.-__spitfire_data_access_exception_tl1 | 
 | 185 |  | 
 | 186 | 	.type		__spitfire_data_access_exception,#function | 
 | 187 | __spitfire_data_access_exception: | 
 | 188 | 	rdpr		%pstate, %g4 | 
 | 189 | 	wrpr		%g4, PSTATE_MG|PSTATE_AG, %pstate | 
 | 190 | 	mov		TLB_SFSR, %g3 | 
 | 191 | 	mov		DMMU_SFAR, %g5 | 
 | 192 | 	ldxa		[%g3] ASI_DMMU, %g4	! Get SFSR | 
 | 193 | 	ldxa		[%g5] ASI_DMMU, %g5	! Get SFAR | 
 | 194 | 	stxa		%g0, [%g3] ASI_DMMU	! Clear SFSR.FaultValid bit | 
 | 195 | 	membar		#Sync | 
 | 196 | 	sethi		%hi(109f), %g7 | 
 | 197 | 	ba,pt		%xcc, etrap | 
 | 198 | 109:	 or		%g7, %lo(109b), %g7 | 
 | 199 | 	mov		%l4, %o1 | 
 | 200 | 	mov		%l5, %o2 | 
 | 201 | 	call		spitfire_data_access_exception | 
 | 202 | 	 add		%sp, PTREGS_OFF, %o0 | 
 | 203 | 	ba,pt		%xcc, rtrap | 
 | 204 | 	 nop | 
 | 205 | 	.size		__spitfire_data_access_exception,.-__spitfire_data_access_exception | 
 | 206 |  | 
 | 207 | 	.type		__spitfire_insn_access_exception_tl1,#function | 
 | 208 | __spitfire_insn_access_exception_tl1: | 
 | 209 | 	rdpr		%pstate, %g4 | 
 | 210 | 	wrpr		%g4, PSTATE_MG|PSTATE_AG, %pstate | 
 | 211 | 	mov		TLB_SFSR, %g3 | 
 | 212 | 	ldxa		[%g3] ASI_IMMU, %g4	! Get SFSR | 
 | 213 | 	rdpr		%tpc, %g5		! IMMU has no SFAR, use TPC | 
 | 214 | 	stxa		%g0, [%g3] ASI_IMMU	! Clear FaultValid bit | 
 | 215 | 	membar		#Sync | 
 | 216 | 	sethi		%hi(109f), %g7 | 
 | 217 | 	ba,pt		%xcc, etraptl1 | 
 | 218 | 109:	 or		%g7, %lo(109b), %g7 | 
 | 219 | 	mov		%l4, %o1 | 
 | 220 | 	mov		%l5, %o2 | 
 | 221 | 	call		spitfire_insn_access_exception_tl1 | 
 | 222 | 	 add		%sp, PTREGS_OFF, %o0 | 
 | 223 | 	ba,pt		%xcc, rtrap | 
 | 224 | 	 nop | 
 | 225 | 	.size		__spitfire_insn_access_exception_tl1,.-__spitfire_insn_access_exception_tl1 | 
 | 226 |  | 
 | 227 | 	.type		__spitfire_insn_access_exception,#function | 
 | 228 | __spitfire_insn_access_exception: | 
 | 229 | 	rdpr		%pstate, %g4 | 
 | 230 | 	wrpr		%g4, PSTATE_MG|PSTATE_AG, %pstate | 
 | 231 | 	mov		TLB_SFSR, %g3 | 
 | 232 | 	ldxa		[%g3] ASI_IMMU, %g4	! Get SFSR | 
 | 233 | 	rdpr		%tpc, %g5		! IMMU has no SFAR, use TPC | 
 | 234 | 	stxa		%g0, [%g3] ASI_IMMU	! Clear FaultValid bit | 
 | 235 | 	membar		#Sync | 
 | 236 | 	sethi		%hi(109f), %g7 | 
 | 237 | 	ba,pt		%xcc, etrap | 
 | 238 | 109:	 or		%g7, %lo(109b), %g7 | 
 | 239 | 	mov		%l4, %o1 | 
 | 240 | 	mov		%l5, %o2 | 
 | 241 | 	call		spitfire_insn_access_exception | 
 | 242 | 	 add		%sp, PTREGS_OFF, %o0 | 
 | 243 | 	ba,pt		%xcc, rtrap | 
 | 244 | 	 nop | 
 | 245 | 	.size		__spitfire_insn_access_exception,.-__spitfire_insn_access_exception |