Commit | Line | Data |
---|---|---|
920dae64 AT |
1 | /* |
2 | * ========== Copyright Header Begin ========================================== | |
3 | * | |
4 | * Hypervisor Software File: error_asm.h | |
5 | * | |
6 | * Copyright (c) 2006 Sun Microsystems, Inc. All Rights Reserved. | |
7 | * | |
8 | * - Do no alter or remove copyright notices | |
9 | * | |
10 | * - Redistribution and use of this software in source and binary forms, with | |
11 | * or without modification, are permitted provided that the following | |
12 | * conditions are met: | |
13 | * | |
14 | * - Redistribution of source code must retain the above copyright notice, | |
15 | * this list of conditions and the following disclaimer. | |
16 | * | |
17 | * - Redistribution in binary form must reproduce the above copyright notice, | |
18 | * this list of conditions and the following disclaimer in the | |
19 | * documentation and/or other materials provided with the distribution. | |
20 | * | |
21 | * Neither the name of Sun Microsystems, Inc. or the names of contributors | |
22 | * may be used to endorse or promote products derived from this software | |
23 | * without specific prior written permission. | |
24 | * | |
25 | * This software is provided "AS IS," without a warranty of any kind. | |
26 | * ALL EXPRESS OR IMPLIED CONDITIONS, REPRESENTATIONS AND WARRANTIES, | |
27 | * INCLUDING ANY IMPLIED WARRANTY OF MERCHANTABILITY, FITNESS FOR A | |
28 | * PARTICULAR PURPOSE OR NON-INFRINGEMENT, ARE HEREBY EXCLUDED. SUN | |
29 | * MICROSYSTEMS, INC. ("SUN") AND ITS LICENSORS SHALL NOT BE LIABLE FOR | |
30 | * ANY DAMAGES SUFFERED BY LICENSEE AS A RESULT OF USING, MODIFYING OR | |
31 | * DISTRIBUTING THIS SOFTWARE OR ITS DERIVATIVES. IN NO EVENT WILL SUN | |
32 | * OR ITS LICENSORS BE LIABLE FOR ANY LOST REVENUE, PROFIT OR DATA, OR | |
33 | * FOR DIRECT, INDIRECT, SPECIAL, CONSEQUENTIAL, INCIDENTAL OR PUNITIVE | |
34 | * DAMAGES, HOWEVER CAUSED AND REGARDLESS OF THE THEORY OF LIABILITY, | |
35 | * ARISING OUT OF THE USE OF OR INABILITY TO USE THIS SOFTWARE, EVEN IF | |
36 | * SUN HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. | |
37 | * | |
38 | * You acknowledge that this software is not designed, licensed or | |
39 | * intended for use in the design, construction, operation or maintenance of | |
40 | * any nuclear facility. | |
41 | * | |
42 | * ========== Copyright Header End ============================================ | |
43 | */ | |
44 | /* | |
45 | * Copyright 2007 Sun Microsystems, Inc. All rights reserved. | |
46 | * Use is subject to license terms. | |
47 | */ | |
48 | ||
49 | #ifndef _NIAGARA2_ERROR_ASM_H | |
50 | #define _NIAGARA2_ERROR_ASM_H | |
51 | ||
52 | #pragma ident "@(#)error_asm.h 1.6 07/09/18 SMI" | |
53 | ||
54 | #ifdef __cplusplus | |
55 | extern "C" { | |
56 | #endif | |
57 | ||
58 | #include <sys/htypes.h> | |
59 | #include <util.h> | |
60 | #include <debug.h> | |
61 | #include <traps.h> | |
62 | #include <error_defs.h> | |
63 | #include <error_soc.h> | |
64 | ||
65 | /* BEGIN CSTYLED */ | |
66 | ||
67 | /* | |
68 | * All the strand->strand_err_*[TL] data is stored as pointers, one per | |
69 | * trap level. To get to the appropriate pointer we multiply by | |
70 | * sizeof(uint64_t *), or use (sllx << 3). | |
71 | */ | |
72 | #define STRAND_ERR_POINTER_SHIFT 3 | |
73 | ||
74 | #define GET_STRAND_ERR_TL_ENTRY(tl_entry, offset, scr1) \ | |
75 | STRAND_STRUCT(tl_entry) ;\ | |
76 | rdpr %tl, scr1 ;\ | |
77 | dec scr1 ;\ | |
78 | sllx scr1, STRAND_ERR_POINTER_SHIFT, scr1 ;\ | |
79 | add scr1, offset, scr1 ;\ | |
80 | ldx [tl_entry + scr1], tl_entry | |
81 | ||
82 | #define SET_STRAND_ERR_TL_ENTRY(tl_entry, offset, scr1, scr2) \ | |
83 | STRAND_STRUCT(scr2) ;\ | |
84 | rdpr %tl, scr1 ;\ | |
85 | dec scr1 ;\ | |
86 | sllx scr1, STRAND_ERR_POINTER_SHIFT, scr1 ;\ | |
87 | add scr1, offset, scr1 ;\ | |
88 | stx tl_entry, [scr2 + scr1] | |
89 | ||
90 | ||
91 | /* | |
92 | * returns strand->strand_err_table_entry[TL] in err_table_entry | |
93 | */ | |
94 | #define GET_ERR_TABLE_ENTRY(err_table_entry, scr1) \ | |
95 | GET_STRAND_ERR_TL_ENTRY(err_table_entry, STRAND_ERR_TABLE_ENTRY, scr1) | |
96 | ||
97 | /* | |
98 | * returns strand->strand_diag_buf[TL] in diag_buf | |
99 | */ | |
100 | #define GET_ERR_DIAG_BUF(diag_buf, scr1) \ | |
101 | GET_STRAND_ERR_TL_ENTRY(diag_buf, STRAND_DIAG_BUF, scr1) | |
102 | ||
103 | /* | |
104 | * returns strand->strand_diag_buf[TL].err_diag_data in diag_buf | |
105 | */ | |
106 | #define GET_ERR_DIAG_DATA_BUF(diag_data_buf, scr1) \ | |
107 | .pushlocals ;\ | |
108 | GET_ERR_DIAG_BUF(diag_data_buf, scr1) ;\ | |
109 | brnz,a,pt diag_data_buf, 1f ;\ | |
110 | add diag_data_buf, ERR_DIAG_RPRT_ERR_DIAG, diag_data_buf ;\ | |
111 | 1: .poplocals | |
112 | ||
113 | /* | |
114 | * returns strand->strand_sun4v_rprt_buf[TL] in rprt_buf | |
115 | */ | |
116 | #define GET_ERR_SUN4V_RPRT_BUF(rprt_buf, scr1) \ | |
117 | GET_STRAND_ERR_TL_ENTRY(rprt_buf, STRAND_SUN4V_RPRT_BUF, scr1) | |
118 | /* | |
119 | * puts err_table_entry in strand->strand_err_table_entry[TL] | |
120 | */ | |
121 | #define SET_ERR_TABLE_ENTRY(err_table_entry, scr1, scr2) \ | |
122 | SET_STRAND_ERR_TL_ENTRY(err_table_entry, STRAND_ERR_TABLE_ENTRY, scr1, scr2) | |
123 | ||
124 | /* | |
125 | * The strand->strand_err_*[TL] ESR registers are stored as uint64_t, one per | |
126 | * trap level. To get to the appropriate pointer we multiply by | |
127 | * sizeof(uint64_t *), or use (sllx << 3). | |
128 | */ | |
129 | #define STRAND_ERR_REG_SHIFT 3 | |
130 | ||
131 | /* | |
132 | * stores esr in strand->strand_err_{isfsr|dsfsr|dsfar|desr|dfesr}[TL] | |
133 | */ | |
134 | #define STORE_ERR_ESR(desr, esr, scr1, scr2) \ | |
135 | STRAND_STRUCT(scr2) ;\ | |
136 | rdpr %tl, scr1 ;\ | |
137 | dec scr1 ;\ | |
138 | sllx scr1, STRAND_ERR_REG_SHIFT, scr1 ;\ | |
139 | add scr1, esr, scr1 ;\ | |
140 | stx desr, [scr2 + scr1] | |
141 | ||
142 | /* | |
143 | * stores isfsr in strand->strand_err_isfsr[TL] | |
144 | */ | |
145 | #define STORE_ERR_ISFSR(isfsr, scr1, scr2) \ | |
146 | STORE_ERR_ESR(isfsr, STRAND_ERR_ISFSR, scr1, scr2) | |
147 | /* | |
148 | * stores dsfsr in strand->strand_err_dsfsr[TL] | |
149 | */ | |
150 | #define STORE_ERR_DSFSR(dsfsr, scr1, scr2) \ | |
151 | STORE_ERR_ESR(dsfsr, STRAND_ERR_DSFSR, scr1, scr2) | |
152 | /* | |
153 | * stores dsfar in strand->strand_err_dsfar[TL] | |
154 | */ | |
155 | #define STORE_ERR_DSFAR(dsfar, scr1, scr2) \ | |
156 | STORE_ERR_ESR(dsfar, STRAND_ERR_DSFAR, scr1, scr2) | |
157 | /* | |
158 | * stores desr in strand->strand_err_desr[TL] | |
159 | */ | |
160 | #define STORE_ERR_DESR(desr, scr1, scr2) \ | |
161 | STORE_ERR_ESR(desr, STRAND_ERR_DESR, scr1, scr2) | |
162 | /* | |
163 | * stores dfesr in strand->strand_err_dfesr[TL] | |
164 | */ | |
165 | #define STORE_ERR_DFESR(dfesr, scr1, scr2) \ | |
166 | STORE_ERR_ESR(dfesr, STRAND_ERR_DFESR, scr1, scr2) | |
167 | ||
168 | /* | |
169 | * stores reg in strand->strand_err_return_addr[TL] | |
170 | */ | |
171 | #define STORE_ERR_RETURN_ADDR(reg, scr1, scr2) \ | |
172 | STORE_ERR_ESR(reg, STRAND_ERR_RETURN_ADDR, scr1, scr2) | |
173 | /* | |
174 | * loads esr from strand->strand_err_{isfsr|dsfsr|dsfar|desr|dfesr}[TL] | |
175 | */ | |
176 | #define GET_ERR_ESR(esr_reg, esr, scr1) \ | |
177 | STRAND_STRUCT(esr_reg) ;\ | |
178 | rdpr %tl, scr1 ;\ | |
179 | dec scr1 ;\ | |
180 | sllx scr1, STRAND_ERR_POINTER_SHIFT, scr1 ;\ | |
181 | add scr1, esr, scr1 ;\ | |
182 | ldx [esr_reg + scr1], esr_reg | |
183 | ||
184 | /* | |
185 | * returns strand->strand_err_isfsr[TL] in isfsr | |
186 | */ | |
187 | #define GET_ERR_ISFSR(isfsr, scr1) \ | |
188 | GET_ERR_ESR(isfsr, STRAND_ERR_ISFSR, scr1) | |
189 | ||
190 | /* | |
191 | * returns strand->strand_err_dsfsr[TL] in dsfsr | |
192 | */ | |
193 | #define GET_ERR_DSFSR(dsfsr, scr1) \ | |
194 | GET_ERR_ESR(dsfsr, STRAND_ERR_DSFSR, scr1) | |
195 | ||
196 | /* | |
197 | * returns strand->strand_err_dsfar[TL] in dsfar | |
198 | */ | |
199 | #define GET_ERR_DSFAR(dsfar, scr1) \ | |
200 | GET_ERR_ESR(dsfar, STRAND_ERR_DSFAR, scr1) | |
201 | ||
202 | /* | |
203 | * returns strand->strand_err_desr[TL] in desr | |
204 | */ | |
205 | #define GET_ERR_DESR(desr, scr1) \ | |
206 | GET_ERR_ESR(desr, STRAND_ERR_DESR, scr1) | |
207 | ||
208 | /* | |
209 | * returns strand->strand_err_dfesr[TL] in dfesr | |
210 | */ | |
211 | #define GET_ERR_DFESR(dfesr, scr1) \ | |
212 | GET_ERR_ESR(dfesr, STRAND_ERR_DFESR, scr1) | |
213 | ||
214 | /* | |
215 | * returns strand->strand_err_return_addr[TL] in reg | |
216 | */ | |
217 | #define GET_ERR_RETURN_ADDR(reg, scr1) \ | |
218 | GET_ERR_ESR(reg, STRAND_ERR_RETURN_ADDR, scr1) | |
219 | ||
220 | /* | |
221 | * Enable precise error traps | |
222 | */ | |
223 | #define ENABLE_PSCCE(scr1, scr2, scr3) \ | |
224 | setx ERR_PSCCE, scr2, scr1 ;\ | |
225 | mov CORE_ERR_TRAP_EN, scr2 ;\ | |
226 | ldxa [scr2]ASI_ERR_EN, scr3 ;\ | |
227 | or scr3, scr1, scr3 ;\ | |
228 | stxa scr3, [scr2]ASI_ERR_EN | |
229 | ||
230 | /* | |
231 | * Disable precise error traps | |
232 | */ | |
233 | #define DISABLE_PSCCE(scr1, scr2, scr3) \ | |
234 | setx ERR_PSCCE, scr2, scr1 ;\ | |
235 | mov CORE_ERR_TRAP_EN, scr2 ;\ | |
236 | ldxa [scr2]ASI_ERR_EN, scr3 ;\ | |
237 | andn scr3, scr1, scr3 ;\ | |
238 | stxa scr3, [scr2]ASI_ERR_EN | |
239 | ||
240 | /* | |
241 | * If TSTATE.GL == GL, we save GLOBALS[GL] -> cpu_globals[TL - 1] | |
242 | */ | |
243 | ||
244 | #ifdef IRF_ECC_ERRATA | |
245 | /* | |
246 | * The IRF fix is to work around a problem with the way the N2 | |
247 | * chip checks ECC on privileged/hyper-privileged and ASR | |
248 | * accesses. As well as checking the ECC on the target PR/ASR/HPR | |
249 | * of the instruction in question, it also checks the ECC of | |
250 | * an unrelated general purpose register. So, for example, | |
251 | * the instruction | |
252 | * | |
253 | * rd %asr25, %g5 (%asr25 STICK) | |
254 | * | |
255 | * checks ECC on the STICK register, on %g5 (for the current | |
256 | * GL), but also checks the ECC for %i0. So if the original IRF | |
257 | * error trap happened on %i0, we get another nested trap. | |
258 | * | |
259 | * The workaround is not to use the ASR/PR/HPR registers that do | |
260 | * this as we can get nested IRF traps which eventually cause | |
261 | * RED_State. | |
262 | * | |
263 | * Note that the ASR/PR/HPR accesses which also check a global | |
264 | * register check the global register at GL == current GL, which | |
265 | * if we got the IRF on a global register, will not be the | |
266 | * same as Trap GL, so we don't get the nested traps. | |
267 | * | |
268 | * There is (of course) an exception, where if we get an IRF | |
269 | * trap at TL> MAXGL, we are out of options, as current GL == Trap | |
270 | * GL. We have to look at the trap stack array, and we have to | |
271 | * use global registers so if we get an IRF error on a global at | |
272 | * TL > MAXGL we are going to get nested traps and RED_State. | |
273 | */ | |
274 | #define GET_ERR_GL(gl_reg) \ | |
275 | .pushlocals ;\ | |
276 | rdpr %tt, gl_reg ;\ | |
277 | cmp gl_reg, TT_PROCERR ;\ | |
278 | bne,a,pt %xcc, 0f ;\ | |
279 | rdpr %gl, gl_reg ;\ | |
280 | mov MMU_SFAR, gl_reg ;\ | |
281 | ldxa [gl_reg]ASI_DMMU, gl_reg /* D_SFAR */ ;\ | |
282 | srlx gl_reg, DSFAR_IRF_GL_SHIFT, gl_reg ;\ | |
283 | and gl_reg, DSFAR_IRF_GL_MASK, gl_reg ;\ | |
284 | /* gl_reg %gl when error trap taken */ ;\ | |
285 | inc gl_reg ;\ | |
286 | /* gl_reg saturated at MAXGL */ ;\ | |
287 | and gl_reg, DSFAR_IRF_GL_MASK, gl_reg ;\ | |
288 | /* gl_reg current %gl */ ;\ | |
289 | 0: ;\ | |
290 | .poplocals | |
291 | ||
292 | #define GET_ERR_CWP(cwp_reg) \ | |
293 | .pushlocals ;\ | |
294 | rdpr %tt, cwp_reg ;\ | |
295 | cmp cwp_reg, TT_PROCERR ;\ | |
296 | bne,a,pt %xcc, 0f ;\ | |
297 | rdpr %cwp, cwp_reg ;\ | |
298 | rdpr %tstate, cwp_reg ;\ | |
299 | srlx cwp_reg, TSTATE_CWP_SHIFT, cwp_reg ;\ | |
300 | and cwp_reg, TSTATE_CWP_MASK, cwp_reg ;\ | |
301 | 0: ;\ | |
302 | .poplocals | |
303 | ||
304 | #define GET_ERR_STICK(stick_reg) \ | |
305 | .pushlocals ;\ | |
306 | rdpr %tt, stick_reg ;\ | |
307 | cmp stick_reg, TT_PROCERR ;\ | |
308 | bne,a,pt %xcc, 0f ;\ | |
309 | rd STICK, stick_reg ;\ | |
310 | rd %tick, stick_reg ;\ | |
311 | 0: ;\ | |
312 | .poplocals | |
313 | #else | |
314 | ||
315 | #define GET_ERR_GL(gl_reg) \ | |
316 | rdpr %gl, gl_reg | |
317 | ||
318 | #define GET_ERR_CWP(cwp_reg) \ | |
319 | rdpr %cwp, cwp_reg | |
320 | ||
321 | #define GET_ERR_STICK(stick_reg) \ | |
322 | rd STICK, stick_reg | |
323 | ||
324 | #endif | |
325 | ||
326 | #define SAVE_GLOBALS() \ | |
327 | .pushlocals ;\ | |
328 | mov ASI_HSCRATCHPAD, %asi ;\ | |
329 | /* ;\ | |
330 | * We overwrite the VCPU scratchpad register ;\ | |
331 | */ ;\ | |
332 | stxa %g1, [%g0 + HSCRATCH_VCPU_STRUCT]%asi ;\ | |
333 | ;\ | |
334 | /* ;\ | |
335 | * If we have an error on the STRAND scratchpad ;\ | |
336 | * register we can't just load from it, or we will ;\ | |
337 | * get another error. ;\ | |
338 | * %g1 scratch ;\ | |
339 | */ ;\ | |
340 | rdpr %tt, %g1 ;\ | |
341 | cmp %g1, TT_PROCERR ;\ | |
342 | bne,pt %xcc, 1f ;\ | |
343 | mov MMU_SFSR, %g1 ;\ | |
344 | ldxa [%g1]ASI_DMMU, %g1 ;\ | |
345 | cmp %g1, DSFSR_SCAC ;\ | |
346 | bl,pt %xcc, 1f ;\ | |
347 | cmp %g1, DSFSR_SCAU ;\ | |
348 | bg,pn %xcc, 1f ;\ | |
349 | mov MMU_SFAR, %g1 ;\ | |
350 | ldxa [%g1]ASI_DMMU, %g1 ;\ | |
351 | srlx %g1, DSFAR_SCRATCHPAD_INDEX_SHIFT, %g1 ;\ | |
352 | and %g1, DSFAR_SCRATCHPAD_INDEX_MASK, %g1 ;\ | |
353 | sllx %g1, 3, %g1 /* VA of scratchpad reg (index * 8) */ ;\ | |
354 | cmp %g1, HSCRATCH_STRAND_STRUCT ;\ | |
355 | bne,pt %xcc, 1f ;\ | |
356 | nop ;\ | |
357 | ;\ | |
358 | /* ;\ | |
359 | * reset the strand struct, no globals saved ;\ | |
360 | */ ;\ | |
361 | PHYS_STRAND_ID(%g3) ;\ | |
362 | set STRAND_SIZE, %g2 ;\ | |
363 | mulx %g3, %g2, %g3 ;\ | |
364 | setx strands, %g4, %g5 ;\ | |
365 | RELOC_OFFSET(%g4, %g6) ;\ | |
366 | sub %g5, %g6, %g4 /* &strands */ ;\ | |
367 | add %g3, %g4, %g3 /* &strands[core_id] */ ;\ | |
368 | mov HSCRATCH_STRAND_STRUCT, %g2 ;\ | |
369 | stxa %g3, [%g2]ASI_HSCRATCHPAD ;\ | |
370 | ba,pt %xcc, 2f ;\ | |
371 | nop ;\ | |
372 | ;\ | |
373 | 1: ;\ | |
374 | /* ;\ | |
375 | * If the STRAND scratchpad is NULL, the VCPU ;\ | |
376 | * scratchpad will be also, so reset it back ;\ | |
377 | * to NULL here, nothing we can do ;\ | |
378 | */ ;\ | |
379 | ldxa [%g0 + HSCRATCH_STRAND_STRUCT]%asi, %g1 ;\ | |
380 | brz,a,pn %g1, 3f ;\ | |
381 | stxa %g0, [%g0 + HSCRATCH_VCPU_STRUCT]%asi ;\ | |
382 | ;\ | |
383 | /* ;\ | |
384 | * valid strand struct in %g1, save the globals ;\ | |
385 | */ ;\ | |
386 | stx %g2, [%g1 + STRAND_UE_TMP3] ;\ | |
387 | ;\ | |
388 | /* ;\ | |
389 | * %g1/%g2 now available for use ;\ | |
390 | */ ;\ | |
391 | rdpr %tstate, %g1 ;\ | |
392 | srlx %g1, TSTATE_GL_SHIFT, %g1 ;\ | |
393 | and %g1, TSTATE_GL_MASK, %g1 ;\ | |
394 | GET_ERR_GL(%g2) ;\ | |
395 | cmp %g1, %g2 ;\ | |
396 | bne,pt %xcc, 2f /* nothing to do, not MAXGL */ ;\ | |
397 | nop ;\ | |
398 | ;\ | |
399 | /* ;\ | |
400 | * get the strand struct back into %g1 ;\ | |
401 | */ ;\ | |
402 | ldxa [%g0 + HSCRATCH_STRAND_STRUCT]%asi, %g1 ;\ | |
403 | ;\ | |
404 | /* ;\ | |
405 | * get a couple of scratch registers ;\ | |
406 | */ ;\ | |
407 | stx %o0, [%g1 + STRAND_UE_TMP1] ;\ | |
408 | stx %o1, [%g1 + STRAND_UE_TMP2] ;\ | |
409 | mov %g1, %o0 /* %o0 strandp */ ;\ | |
410 | ;\ | |
411 | /* ;\ | |
412 | * restore original %g2 ;\ | |
413 | */ ;\ | |
414 | ldx [%g1 + STRAND_UE_TMP3], %g2 ;\ | |
415 | ;\ | |
416 | /* ;\ | |
417 | * restore original %g1 from VCPU scratchpad ;\ | |
418 | */ ;\ | |
419 | ldxa [%g0 + HSCRATCH_VCPU_STRUCT] %asi, %g1 ;\ | |
420 | ;\ | |
421 | rdpr %tl, %o1 ;\ | |
422 | sub %o1, 1, %o1 ;\ | |
423 | sllx %o1, TRAPGLOBALS_SHIFT, %o1 ;\ | |
424 | add %o0, %o1, %o1 ;\ | |
425 | stx %g7, [%o1 + STRAND_UE_GLOBALS + (7*8)] ;\ | |
426 | stx %g6, [%o1 + STRAND_UE_GLOBALS + (6*8)] ;\ | |
427 | stx %g5, [%o1 + STRAND_UE_GLOBALS + (5*8)] ;\ | |
428 | stx %g4, [%o1 + STRAND_UE_GLOBALS + (4*8)] ;\ | |
429 | stx %g3, [%o1 + STRAND_UE_GLOBALS + (3*8)] ;\ | |
430 | stx %g2, [%o1 + STRAND_UE_GLOBALS + (2*8)] ;\ | |
431 | stx %g1, [%o1 + STRAND_UE_GLOBALS + (1*8)] ;\ | |
432 | /* ;\ | |
433 | * Set globals-saved flag ;\ | |
434 | */ ;\ | |
435 | mov 1, %o1 ;\ | |
436 | stx %o1, [%o0 + STRAND_ERR_GLOBALS_SAVED] ;\ | |
437 | ldx [%o0 + STRAND_UE_TMP2], %o1 ;\ | |
438 | ldx [%o0 + STRAND_UE_TMP1], %o0 ;\ | |
439 | 2: ;\ | |
440 | /* ;\ | |
441 | * Restore scratchpad VCPU pointer ;\ | |
442 | * all globals available ;\ | |
443 | */ ;\ | |
444 | STRAND_STRUCT(%g1) ;\ | |
445 | ldub [%g1 + STRAND_ID], %g1 ;\ | |
446 | PID2VCPUP(%g1, %g2, %g3, %g4) ;\ | |
447 | SET_VCPU_STRUCT(%g2, %g3) ;\ | |
448 | 3: ;\ | |
449 | .poplocals | |
450 | ||
451 | /* | |
452 | * If TSTATE.GL == GL, we restore GLOBALS[GL] from cpu_globals[TL - 1] | |
453 | * All registers are clobbered. Must issue a retry/done immediately | |
454 | * after this macro. | |
455 | */ | |
456 | #define RESTORE_GLOBALS(instr) \ | |
457 | .pushlocals ;\ | |
458 | /* ;\ | |
459 | * check if globals saved, clear flag in ;\ | |
460 | * delay slot ;\ | |
461 | */ ;\ | |
462 | STRAND_STRUCT(%g1) ;\ | |
463 | ldx [%g1 + STRAND_ERR_GLOBALS_SAVED], %g2 ;\ | |
464 | brz,pt %g2, 1f ;\ | |
465 | stx %g0, [%g1 + STRAND_ERR_GLOBALS_SAVED] ;\ | |
466 | ;\ | |
467 | /* ;\ | |
468 | * get a couple of scratch registers ;\ | |
469 | * %g1 strandp ;\ | |
470 | */ ;\ | |
471 | stx %o0, [%g1 + STRAND_UE_TMP1] ;\ | |
472 | stx %o1, [%g1 + STRAND_UE_TMP2] ;\ | |
473 | mov %g1, %o0 /* %o0 strandp */ ;\ | |
474 | rdpr %tl, %o1 ;\ | |
475 | sub %o1, 1, %o1 ;\ | |
476 | sllx %o1, TRAPGLOBALS_SHIFT, %o1 ;\ | |
477 | add %o0, %o1, %o1 ;\ | |
478 | ldx [%o1 + STRAND_UE_GLOBALS + (1*8)], %g1 ;\ | |
479 | ldx [%o1 + STRAND_UE_GLOBALS + (2*8)], %g2 ;\ | |
480 | ldx [%o1 + STRAND_UE_GLOBALS + (3*8)], %g3 ;\ | |
481 | ldx [%o1 + STRAND_UE_GLOBALS + (4*8)], %g4 ;\ | |
482 | ldx [%o1 + STRAND_UE_GLOBALS + (5*8)], %g5 ;\ | |
483 | ldx [%o1 + STRAND_UE_GLOBALS + (6*8)], %g6 ;\ | |
484 | ldx [%o1 + STRAND_UE_GLOBALS + (7*8)], %g7 ;\ | |
485 | ldx [%o0 + STRAND_UE_TMP2], %o1 ;\ | |
486 | ldx [%o0 + STRAND_UE_TMP1], %o0 ;\ | |
487 | 1: ;\ | |
488 | instr /* retry/done/nop */ ;\ | |
489 | .poplocals | |
490 | ||
491 | /* | |
492 | * in: | |
493 | * | |
494 | * out: EHDL (CPUID | TL | SEQ No) | |
495 | * scr1 -> unique error sequence | |
496 | * | |
497 | */ | |
498 | #define GENERATE_EHDL(scr1, scr2) \ | |
499 | STRAND_STRUCT(scr2); ;\ | |
500 | ldx [scr2 + STRAND_ERR_SEQ_NO], scr1 /* get current seq# */ ;\ | |
501 | add scr1, 1, scr1 /* new seq# */ ;\ | |
502 | stx scr1, [scr2 + STRAND_ERR_SEQ_NO] /* update seq# */ ;\ | |
503 | sllx scr1, EHDL_SEQ_MASK_SHIFT, scr1 ;\ | |
504 | srlx scr1, EHDL_SEQ_MASK_SHIFT, scr1 /* scr1 = normalized seq# */;\ | |
505 | ldub [scr2 + STRAND_ID], scr2 /* scr2 has CPUID */ ;\ | |
506 | sllx scr2, EHDL_TL_BITS, scr2 /* scr2 << EHDL_TL_BITS */;\ | |
507 | sllx scr2, EHDL_CPUTL_SHIFT, scr2 /* scr2 now has cpuid in 63:56 */ ;\ | |
508 | or scr2, scr1, scr1 /* scr1 now has ehdl without tl */ ;\ | |
509 | rdpr %tl, scr2 /* scr2 = %tl */ ;\ | |
510 | sllx scr2, EHDL_CPUTL_SHIFT, scr2 /* scr2 tl in position */;\ | |
511 | or scr2, scr1, scr1 /* scr1 -> ehdl */ | |
512 | ||
513 | /* | |
514 | * relocate an address | |
515 | */ | |
516 | #define RELOC_ADDR(addr, scr) \ | |
517 | ROOT_STRUCT(scr) ;\ | |
518 | ldx [scr + CONFIG_RELOC], scr ;\ | |
519 | sub addr, scr, addr | |
520 | ||
521 | #ifdef DEBUG | |
522 | /* | |
523 | * %g1 error table entry | |
524 | * the first 'ba puts' will print out the error name | |
525 | */ | |
526 | #define PRINT_ERROR_TABLE_ENTRY() \ | |
527 | .pushlocals ;\ | |
528 | rdpr %tl, %g3 ;\ | |
529 | brz,pn %g3, 1f ;\ | |
530 | nop ;\ | |
531 | rdpr %tpc, %g4 ;\ | |
532 | STRAND_STRUCT(%g2) ;\ | |
533 | stx %g1, [%g2 + STRAND_UE_TMP1] ;\ | |
534 | stx %g4, [%g2 + STRAND_UE_TMP2] ;\ | |
535 | PRINT_NOTRAP("CPU: 0x") ;\ | |
536 | mov CMP_CORE_ID, %g4 ;\ | |
537 | ldxa [%g4]ASI_CMP_CORE, %g4 ;\ | |
538 | and %g4, 0x3f, %g4 /* strand_id bits [5:0] */ ;\ | |
539 | PRINTX_NOTRAP(%g4) ;\ | |
540 | PRINT_NOTRAP("\r\nTPC: 0x") ;\ | |
541 | STRAND_STRUCT(%g2) ;\ | |
542 | ldx [%g2 + STRAND_UE_TMP2], %g4 ;\ | |
543 | PRINTX_NOTRAP(%g4) ;\ | |
544 | PRINT_NOTRAP("\r\nTT: 0x") ;\ | |
545 | rdpr %tt, %g2 ;\ | |
546 | PRINTX_NOTRAP(%g2) ;\ | |
547 | PRINT_NOTRAP("\r\nTL: 0x") ;\ | |
548 | rdpr %tl, %g2 ;\ | |
549 | PRINTX_NOTRAP(%g2) ;\ | |
550 | PRINT_NOTRAP("\r\nTSTATE: 0x") ;\ | |
551 | rdpr %tstate, %g2 ;\ | |
552 | PRINTX_NOTRAP(%g2) ;\ | |
553 | PRINT_NOTRAP("\r\nD-SFSR: 0x") ;\ | |
554 | GET_ERR_DSFSR(%g2, %g3) ;\ | |
555 | PRINTX_NOTRAP(%g2) ;\ | |
556 | PRINT_NOTRAP("\r\nI-SFSR: 0x") ;\ | |
557 | GET_ERR_ISFSR(%g2, %g3) ;\ | |
558 | PRINTX_NOTRAP(%g2) ;\ | |
559 | PRINT_NOTRAP("\r\nD-SFAR: 0x") ;\ | |
560 | GET_ERR_DSFAR(%g2, %g3) ;\ | |
561 | PRINTX_NOTRAP(%g2) ;\ | |
562 | PRINT_NOTRAP("\r\nDESR: 0x") ;\ | |
563 | GET_ERR_DESR(%g2, %g3) ;\ | |
564 | PRINTX_NOTRAP(%g2) ;\ | |
565 | PRINT_NOTRAP("\r\nDFESR: 0x") ;\ | |
566 | GET_ERR_DFESR(%g2, %g3) ;\ | |
567 | PRINTX_NOTRAP(%g2) ;\ | |
568 | PRINT_NOTRAP("\r\n") ;\ | |
569 | STRAND_STRUCT(%g2) ;\ | |
570 | ldx [%g2 + STRAND_UE_TMP1], %g1 ;\ | |
571 | ba puts ;\ | |
572 | rd %pc, %g7 ;\ | |
573 | PRINT_NOTRAP("\r\n") ;\ | |
574 | STRAND_STRUCT(%g2) ;\ | |
575 | ldx [%g2 + STRAND_UE_TMP1], %g1 ;\ | |
576 | 1: ;\ | |
577 | .poplocals | |
578 | #endif | |
579 | ||
580 | #define SET_STRAND_ERR_FLAG(strand, flag, scr) \ | |
581 | lduw [strand + STRAND_ERR_FLAG], scr ;\ | |
582 | or scr, flag, scr ;\ | |
583 | stw scr, [strand + STRAND_ERR_FLAG] | |
584 | ||
585 | #define CLEAR_STRAND_ERR_FLAG(strand, flag, scr) \ | |
586 | lduw [strand + STRAND_ERR_FLAG], scr ;\ | |
587 | andn scr, flag, scr ;\ | |
588 | stw scr, [strand + STRAND_ERR_FLAG] | |
589 | ||
590 | #define SET_CPU_IN_ERROR(scr1, scr2) \ | |
591 | VCPU_STRUCT(scr1) ;\ | |
592 | mov CPU_STATE_ERROR, scr2 ;\ | |
593 | stx scr2, [scr1 + CPU_STATUS] | |
594 | ||
595 | #define HPRIV_ERROR() \ | |
596 | LEGION_EXIT(3) ;\ | |
597 | ba,a,pt %xcc, hvabort_exit | |
598 | ||
599 | #define FATAL_ERROR() \ | |
600 | LEGION_EXIT(3) ;\ | |
601 | ba,a,pt %xcc, hvabort_exit | |
602 | ||
603 | /* | |
604 | * Translate I/O PA to RA | |
605 | * | |
606 | * Currently no offset is used for non-cacheable I/O addresses. This | |
607 | * may change in the future. | |
608 | */ | |
609 | #define CPU_ERR_IO_PA_TO_RA(cpu, paddr, raddr) \ | |
610 | mov paddr, raddr | |
611 | ||
612 | /* | |
613 | * Translate PA to guest RA | |
614 | * | |
615 | * Note that this should only be used for DRAM PA translation. | |
616 | */ | |
617 | #define CPU_ERR_INVALID_RA (-1) | |
618 | ||
619 | #define CPU_ERR_PA_TO_RA(vcpu, paddr, raddr, scr1, scr2) \ | |
620 | .pushlocals ;\ | |
621 | VCPU2GUEST_STRUCT(vcpu, vcpu) ;\ | |
622 | PA2RA_CONV(vcpu, paddr, raddr, scr1, scr2) ;\ | |
623 | brnz,a,pn scr2, 1f /* ret 0 is success */ ;\ | |
624 | mov CPU_ERR_INVALID_RA, raddr ;\ | |
625 | 1: ;\ | |
626 | VCPU_STRUCT(vcpu) /* restore VCPU */ ;\ | |
627 | .poplocals | |
628 | ||
629 | ||
630 | #define TRAP_GUEST(pc, scr1, scr2) \ | |
631 | /* Read _current_ tstate */ ;\ | |
632 | rdpr %tstate, scr2 ;\ | |
633 | /* Bump %tl */ ;\ | |
634 | rdpr %tl, scr1 ;\ | |
635 | inc scr1 ;\ | |
636 | wrpr scr1, %tl ;\ | |
637 | /* Arrange for done to go to 'pc' */ ;\ | |
638 | wrpr pc, %tnpc ;\ | |
639 | /* Set up target %tl's pstate */ ;\ | |
640 | andn scr2, (PSTATE_AM | PSTATE_IE) << TSTATE_PSTATE_SHIFT, scr2 ;\ | |
641 | or scr2, (PSTATE_PRIV) << TSTATE_PSTATE_SHIFT, scr2 ;\ | |
642 | sllx scr2, 64 - TSTATE_GL_SHIFT, scr2 ;\ | |
643 | srlx scr2, 64 - TSTATE_GL_SHIFT, scr2 ;\ | |
644 | GET_ERR_GL(pc) ;\ | |
645 | sllx pc, TSTATE_GL_SHIFT, pc ;\ | |
646 | wrpr scr2, pc, %tstate ;\ | |
647 | mov HTSTATE_GUEST, scr1 ;\ | |
648 | wrhpr scr1, %htstate ;\ | |
649 | done | |
650 | ||
651 | /* | |
652 | * When correcting an FRFC error, we need to convert the correction | |
653 | * mask from an integer register to a FP register, so we store it | |
654 | * in CPU_FP_TMP3. We then load it into freg_scr1, load the FP reg | |
655 | * in error into freg_scr2, XOR and put the corrected data back | |
656 | * into the FP reg in error. | |
657 | */ | |
658 | /* single-precision FP ops */ | |
659 | #define CORRECT_FRFC_SP(strand, freg_in_error, freg_scr1, freg_scr2, label) \ | |
660 | st freg_scr1, [strand + STRAND_FP_TMP1] ;\ | |
661 | st freg_scr2, [strand + STRAND_FP_TMP2] ;\ | |
662 | ld [strand + STRAND_FP_TMP3], freg_scr1 ;\ | |
663 | fmovs freg_in_error, freg_scr2 ;\ | |
664 | fxors freg_scr2, freg_scr1, freg_scr2 ;\ | |
665 | fmovs freg_scr2, freg_in_error ;\ | |
666 | ld [strand + STRAND_FP_TMP1], freg_scr1 ;\ | |
667 | ba label ;\ | |
668 | ld [strand + STRAND_FP_TMP2], freg_scr2 | |
669 | ||
670 | /* double-precision FP ops */ | |
671 | #define CORRECT_FRFC_DP(strand, freg_in_error, freg_scr1, freg_scr2, label) \ | |
672 | std freg_scr1, [strand + STRAND_FP_TMP1] ;\ | |
673 | std freg_scr2, [strand + STRAND_FP_TMP2] ;\ | |
674 | ldd [strand + STRAND_FP_TMP3], freg_scr1 ;\ | |
675 | fmovd freg_in_error, freg_scr2 ;\ | |
676 | fxor freg_scr2, freg_scr1, freg_scr2 ;\ | |
677 | fmovd freg_scr2, freg_in_error ;\ | |
678 | ldd [strand + STRAND_FP_TMP1], freg_scr1 ;\ | |
679 | ba label ;\ | |
680 | ldd [strand + STRAND_FP_TMP2], freg_scr2 | |
681 | ||
682 | #define CORRECT_FRFC_SIZE (9 * SZ_INSTR) | |
683 | ||
684 | #define CORRECT_IRFC(ireg, correction_mask, scr, label) \ | |
685 | mov ireg, scr ;\ | |
686 | xor scr, correction_mask, scr ;\ | |
687 | ba label ;\ | |
688 | mov scr, ireg | |
689 | ||
690 | #define CORRECT_IRFC_SIZE (4 * SZ_INSTR) | |
691 | ||
692 | /* | |
693 | * macro to get a new error_table_entry. Must be within the same | |
694 | * error table. | |
695 | */ | |
696 | #define CONVERT_CE_TO_UE(num_entries) \ | |
697 | .pushlocals ;\ | |
698 | /* ;\ | |
699 | * Clear the error report in_use field ;\ | |
700 | */ ;\ | |
701 | GET_ERR_DIAG_BUF(%g4, %g5) ;\ | |
702 | brnz,a,pt %g4, 1f ;\ | |
703 | stub %g0, [%g4 + ERR_DIAG_RPRT_IN_USE] ;\ | |
704 | 1: ;\ | |
705 | /* ;\ | |
706 | * Clear the sun4v report in_use field ;\ | |
707 | */ ;\ | |
708 | GET_ERR_SUN4V_RPRT_BUF(%g4, %g5) ;\ | |
709 | brnz,a,pt %g4, 1f ;\ | |
710 | stub %g0, [%g4 + ERR_SUN4V_RPRT_IN_USE] ;\ | |
711 | 1: ;\ | |
712 | ;\ | |
713 | /* ;\ | |
714 | * get the current error table entry and calculate where the ;\ | |
715 | * new entry is from the num_entries to offset by ;\ | |
716 | */ ;\ | |
717 | GET_ERR_TABLE_ENTRY(%g1, %g2) ;\ | |
718 | sub %g1, num_entries * ERROR_TABLE_ENTRY_SIZE, %g1 ;\ | |
719 | /* ;\ | |
720 | * And now just start all over again ... ;\ | |
721 | */ ;\ | |
722 | ba error_handler ;\ | |
723 | nop ;\ | |
724 | .poplocals | |
725 | ||
726 | /* | |
727 | * Correct bad ECC in a trapstack array privileged register | |
728 | * index trap level of error | |
729 | */ | |
730 | #define CORRECT_TSA_PREG(priv_reg, index, bit_in_error, scr1, scr2, scr3, label) \ | |
731 | rdpr %tl, scr3 ;\ | |
732 | wrpr index, %tl ;\ | |
733 | rdpr priv_reg, scr1 ;\ | |
734 | mov 1, scr2 ;\ | |
735 | sllx scr2, bit_in_error, scr2 ;\ | |
736 | xor scr1, scr2, scr1 ;\ | |
737 | wrpr scr1, priv_reg ;\ | |
738 | ba label ;\ | |
739 | wrpr scr3, %tl | |
740 | ||
741 | /* | |
742 | * Correct bad ECC in a trapstack array hyper-privileged register | |
743 | * index trap level of error | |
744 | */ | |
745 | #define CORRECT_TSA_HREG(hpriv_reg, index, bit_in_error, scr1, scr2, scr3, label) \ | |
746 | rdpr %tl, scr3 ;\ | |
747 | wrpr index, %tl ;\ | |
748 | rdhpr hpriv_reg, scr1 ;\ | |
749 | mov 1, scr2 ;\ | |
750 | sllx scr2, bit_in_error, scr2 ;\ | |
751 | xor scr1, scr2, scr1 ;\ | |
752 | wrhpr scr1, hpriv_reg ;\ | |
753 | ba label ;\ | |
754 | wrpr scr3, %tl | |
755 | ||
756 | /* | |
757 | * Correct bad ECC in a trapstack array queue ASI | |
758 | */ | |
759 | #define CORRECT_TSA_QUEUE(va, bit_in_error, scr1, scr2, scr3, label) \ | |
760 | mov va, scr3 ;\ | |
761 | ldxa [scr3]ASI_QUEUE, scr1 ;\ | |
762 | mov 1, scr2 ;\ | |
763 | sllx scr2, bit_in_error, scr2 ;\ | |
764 | xor scr1, scr2, scr1 ;\ | |
765 | ba label ;\ | |
766 | stxa scr2, [scr3]ASI_QUEUE | |
767 | ||
768 | #define CORRECT_TSA_ALL_REGS(trap_level, scr1, scr2, label) \ | |
769 | rdpr %tl, scr2 ;\ | |
770 | wrpr trap_level, %tl ;\ | |
771 | rdpr %tpc, scr1 ;\ | |
772 | wrpr scr1, %tpc ;\ | |
773 | rdpr %tnpc, scr1 ;\ | |
774 | wrpr scr1, %tnpc ;\ | |
775 | rdpr %tt, scr1 ;\ | |
776 | wrpr scr1, %tt ;\ | |
777 | rdpr %tstate, scr1 ;\ | |
778 | wrpr scr1, %tstate ;\ | |
779 | rdhpr %htstate, scr1 ;\ | |
780 | wrhpr scr1, %htstate ;\ | |
781 | wrpr scr2, %tl ;\ | |
782 | mov ERROR_NONRESUMABLE_QUEUE_TAIL, scr1 ;\ | |
783 | ldxa [scr1]ASI_QUEUE, scr2 ;\ | |
784 | stxa scr2, [scr1]ASI_QUEUE ;\ | |
785 | mov ERROR_NONRESUMABLE_QUEUE_HEAD, scr1 ;\ | |
786 | ldxa [scr1]ASI_QUEUE, scr2 ;\ | |
787 | stxa scr2, [scr1]ASI_QUEUE ;\ | |
788 | mov ERROR_RESUMABLE_QUEUE_TAIL, scr1 ;\ | |
789 | ldxa [scr1]ASI_QUEUE, scr2 ;\ | |
790 | stxa scr2, [scr1]ASI_QUEUE ;\ | |
791 | mov ERROR_RESUMABLE_QUEUE_HEAD, scr1 ;\ | |
792 | ldxa [scr1]ASI_QUEUE, scr2 ;\ | |
793 | stxa scr2, [scr1]ASI_QUEUE ;\ | |
794 | mov DEV_MONDO_QUEUE_TAIL, scr1 ;\ | |
795 | ldxa [scr1]ASI_QUEUE, scr2 ;\ | |
796 | stxa scr2, [scr1]ASI_QUEUE ;\ | |
797 | mov DEV_MONDO_QUEUE_HEAD, scr1 ;\ | |
798 | ldxa [scr1]ASI_QUEUE, scr2 ;\ | |
799 | stxa scr2, [scr1]ASI_QUEUE ;\ | |
800 | mov CPU_MONDO_QUEUE_TAIL, scr1 ;\ | |
801 | ldxa [scr1]ASI_QUEUE, scr2 ;\ | |
802 | stxa scr2, [scr1]ASI_QUEUE ;\ | |
803 | mov CPU_MONDO_QUEUE_HEAD, scr1 ;\ | |
804 | ldxa [scr1]ASI_QUEUE, scr2 ;\ | |
805 | ba label ;\ | |
806 | stxa scr2, [scr1]ASI_QUEUE | |
807 | ||
808 | /* | |
809 | * If we get an SCAC/SCAU error on HSCRATCH_VCPU_STRUCT, we can't use the CPU_STRUCT() | |
810 | * macro as this will cause further errors. We will reload the HSCRATCH0 | |
811 | * register with the appropriate config.cpus[] address, clobbering the | |
812 | * globals in the process. | |
813 | * | |
814 | * The SCAU handler will check for this register | |
815 | * and if it is in error - and we have not clobbered MAXGL globals - it | |
816 | * will convert the error into an SCAC and continue. | |
817 | */ | |
818 | #define SCRATCHPAD_ERROR() \ | |
819 | .pushlocals ;\ | |
820 | mov MMU_SFSR, %g1 ;\ | |
821 | ldxa [%g1]ASI_DMMU, %g1 ;\ | |
822 | cmp %g1, DSFSR_SCAU ;\ | |
823 | be,pt %xcc, 1f ;\ | |
824 | cmp %g1, DSFSR_SCAC ;\ | |
825 | be,pt %xcc, 1f ;\ | |
826 | nop ;\ | |
827 | ba,pt %xcc, 3f ;\ | |
828 | .empty ;\ | |
829 | 1: ;\ | |
830 | mov MMU_SFAR, %g1 ;\ | |
831 | ldxa [%g1]ASI_DMMU, %g1 ;\ | |
832 | srlx %g1, DSFAR_SCRATCHPAD_INDEX_SHIFT, %g1 ;\ | |
833 | and %g1, DSFAR_SCRATCHPAD_INDEX_MASK, %g1 ;\ | |
834 | sllx %g1, 3, %g1 /* VA of scratchpad reg (index * 8) */ ;\ | |
835 | cmp %g1, HSCRATCH_VCPU_STRUCT ;\ | |
836 | be,pt %xcc, 2f ;\ | |
837 | cmp %g1, HSCRATCH_STRAND_STRUCT ;\ | |
838 | be,pt %xcc, 2f ;\ | |
839 | nop ;\ | |
840 | ba,pt %xcc, 3f ;\ | |
841 | .empty ;\ | |
842 | 2: ;\ | |
843 | /* ;\ | |
844 | * First the strand struct ;\ | |
845 | */ ;\ | |
846 | PHYS_STRAND_ID(%g3) ;\ | |
847 | set STRAND_SIZE, %g2 ;\ | |
848 | mulx %g3, %g2, %g3 ;\ | |
849 | setx strands, %g4, %g5 ;\ | |
850 | RELOC_OFFSET(%g4, %g6) ;\ | |
851 | sub %g5, %g6, %g4 /* &strands */ ;\ | |
852 | add %g3, %g4, %g3 /* &strands[core_id] */ ;\ | |
853 | mov HSCRATCH_STRAND_STRUCT, %g2 ;\ | |
854 | stxa %g3, [%g2]ASI_HSCRATCHPAD ;\ | |
855 | ;\ | |
856 | /* ;\ | |
857 | * Restore scratchpad VCPU pointer for that strand ;\ | |
858 | */ ;\ | |
859 | STRAND_STRUCT(%g1) ;\ | |
860 | ldub [%g1 + STRAND_ID], %g1 ;\ | |
861 | PID2VCPUP(%g1, %g2, %g3, %g4) ;\ | |
862 | SET_VCPU_STRUCT(%g2, %g3) ;\ | |
863 | 3: ;\ | |
864 | .poplocals | |
865 | ||
866 | /* | |
867 | * IRF index in D-SFAR is not Sparc V9 index | |
868 | * | |
869 | * Where %cwp even | |
870 | * index 0 --> %g0 | |
871 | * index 8 --> %o0 | |
872 | * index 16 --> %l0 | |
873 | * index 24 --> %i0 | |
874 | * | |
875 | * For an odd window, the IN and OUT registers change position :- | |
876 | * index 0 --> %g0 | |
877 | * index 24 --> %o0 | |
878 | * index 16 --> %l0 | |
879 | * index 8 --> %i0 | |
880 | * | |
881 | * returns converted idx, scr clobbered | |
882 | */ | |
883 | #define CONVERT_IRF_INDEX(idx, scr) ;\ | |
884 | .pushlocals ;\ | |
885 | cmp idx, 8 /* skip globals */ ;\ | |
886 | bl %xcc, 2f ;\ | |
887 | nop ;\ | |
888 | sub idx, 8, idx /* idx - 8 , (lose globals) */ ;\ | |
889 | GET_ERR_CWP(scr) ;\ | |
890 | btst 0x1, scr ;\ | |
891 | bz,pt %xcc, 1f /* even window, no change */ ;\ | |
892 | /* odd cwp, idx is register index - 8 */ ;\ | |
893 | cmp idx, 15 ;\ | |
894 | bg,a,pt %xcc, 1f ;\ | |
895 | /* %o register, index 24->31 back to 8->15 (- 8 remember) */ ;\ | |
896 | sub idx, 16, idx ;\ | |
897 | cmp idx, 8 /* (%o7 - 8 + 1) */ ;\ | |
898 | bl,a,pt %xcc, 1f ;\ | |
899 | /* %i register, index 8->15 up to 24->31 (- 8 of course) */ ;\ | |
900 | add idx, 16, idx ;\ | |
901 | 1: ;\ | |
902 | add idx, 8, idx /* globals back in */ ;\ | |
903 | 2: ;\ | |
904 | .poplocals | |
905 | ||
906 | /* | |
907 | * Calculate parity over data | |
908 | */ | |
909 | #define GEN_PARITY(data, parity) \ | |
910 | srlx data, 32, parity ;\ | |
911 | xor parity, data, data ;\ | |
912 | srlx data, 16, parity ;\ | |
913 | xor parity, data, data ;\ | |
914 | srlx data, 8, parity ;\ | |
915 | xor parity, data, data ;\ | |
916 | srlx data, 4, parity ;\ | |
917 | xor parity, data, data ;\ | |
918 | srlx data, 2, parity ;\ | |
919 | xor parity, data, data ;\ | |
920 | srlx data, 1, parity ;\ | |
921 | xor parity, data, data ;\ | |
922 | and data, 1, parity | |
923 | ||
924 | ||
925 | /* | |
926 | * Calculate check bits [6:0] for floating point data | |
927 | */ | |
928 | #define GEN_FRF_CHECK(data, chk, scr1, scr2, scr3, scr4) \ | |
929 | .pushlocals ;\ | |
930 | setx frfc_ecc_mask_table, scr2, scr1 ;\ | |
931 | RELOC_OFFSET(scr2, scr3) ;\ | |
932 | sub scr1, scr3, scr1 ;\ | |
933 | ;\ | |
934 | set NO_FRF_ECC_MASKS, scr2 ;\ | |
935 | mov 0, chk ;\ | |
936 | ba 2f ;\ | |
937 | nop ;\ | |
938 | 1: ;\ | |
939 | add scr1, ECC_MASK_TABLE_ENTRY_SIZE, scr1 ;\ | |
940 | 2: ;\ | |
941 | lduw [scr1], scr3 /* get appropriate mask */ ;\ | |
942 | ;\ | |
943 | and scr3, data, scr3 /* mask off unwanted data */ ;\ | |
944 | ;\ | |
945 | GEN_PARITY(scr3, scr4) ;\ | |
946 | ;\ | |
947 | sllx chk, 1, chk ;\ | |
948 | ;\ | |
949 | subcc scr2, 1, scr2 ;\ | |
950 | bgt %xcc, 1b ;\ | |
951 | or scr4, chk, chk ;\ | |
952 | .poplocals | |
953 | ||
954 | #if defined(DEBUG) && !defined(DEBUG_LEGION) | |
955 | #define CLEAR_SOC_INJECTOR_REG(scr1, scr2) \ | |
956 | setx SOC_ERROR_INJECTION_REG, scr1, scr2 ;\ | |
957 | stx %g0, [scr2] | |
958 | #else | |
959 | #define CLEAR_SOC_INJECTOR_REG(scr1, scr2) | |
960 | #endif | |
961 | ||
962 | #define CHECK_BLACKOUT_INTERVAL(scr) \ | |
963 | .pushlocals ;\ | |
964 | STRAND_STRUCT(scr) ;\ | |
965 | brz,pn scr, 0f ;\ | |
966 | nop ;\ | |
967 | STRAND2CONFIG_STRUCT(scr, scr) ;\ | |
968 | brz,pn scr, 0f ;\ | |
969 | nop ;\ | |
970 | ldx [scr + CONFIG_CE_BLACKOUT], scr ;\ | |
971 | brnz,pn scr, 1f /* zero: blackout disabled */ ;\ | |
972 | nop ;\ | |
973 | 0: ;\ | |
974 | HVRET ;\ | |
975 | 1: ;\ | |
976 | .poplocals | |
977 | ||
978 | /* END CSTYLED */ | |
979 | ||
980 | #ifdef __cplusplus | |
981 | } | |
982 | #endif | |
983 | ||
984 | #endif /* _NIAGARA2_ERROR_ASM_H */ |