Initial commit of OpenSPARC T2 architecture model.
[OpenSPARC-T2-SAM] / legion / src / procs / sunsparc / libniagara2 / niagara2_error.c
/*
* ========== Copyright Header Begin ==========================================
*
* OpenSPARC T2 Processor File: niagara2_error.c
* Copyright (c) 2006 Sun Microsystems, Inc. All Rights Reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES.
*
* The above named program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public
* License version 2 as published by the Free Software Foundation.
*
* The above named program is distributed in the hope that it will be
* useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* General Public License for more details.
*
* You should have received a copy of the GNU General Public
* License along with this work; if not, write to the Free Software
* Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA.
*
* ========== Copyright Header End ============================================
*/
/*
* Copyright 2007 Sun Microsystems, Inc. All rights reserved.
* Use is subject to license terms.
*/
#pragma ident "@(#)niagara2_error.c 1.8 07/02/28 SMI"
#include <stdio.h>
#include <stdlib.h>
#include <unistd.h>
#include <strings.h>
#include "ss_common.h"
#include "niagara2.h"
#if ERROR_INJECTION
/*
* This file contains Niagara 2 specific error injection routines.
*/
void niagara2_init_error_list()
{
int i;
/*
* Setup the error list in terms of the following format:
*
* Error <number, name>, Trap <type, name>, Trap <priority>, Encoded <errtype>, CERER.<bit>
*
*/
ss_error_desc_t error_init_list[] = {
{ E( ITTM ), T( instruction_access_MMU_error), 1, 1, B( ITTM, 61 ) },
{ E( ITTP ), T( instruction_access_MMU_error), 2, 2, B( ITTP, 63 ) },
{ E( ITDP ), T( instruction_access_MMU_error), 3, 3, B( ITDP, 62 ) },
{ E( ITMU ), T( instruction_access_MMU_error), 4, 4, B( HWTWMU, 59) },
{ E( ITL2U ), T( instruction_access_MMU_error), 5, 5, B( (char *)0,0) },
{ E( ITL2ND ), T( instruction_access_MMU_error), 5, 6, B( (char *)0, 0) },
{ E( ICL2U ), T( instruction_access_MMU_error), 6, 1, B( ICL2U, 54) },
{ E( ICL2ND ), T( instruction_access_MMU_error), 6, 2, B( ICL2ND, 53) },
{ E( IRFU ), T( internal_processor_error ), 1, 1, B( IRF, 52) },
{ E( IRFC ), T( internal_processor_error ), 2, 2, B( IRF, 52) },
{ E( FRFU ), T( internal_processor_error ), 3, 3, B( FRF, 50) },
{ E( FRFC ), T( internal_processor_error ), 4, 4, B( FRF, 50) },
{ E( SBDLC ), T( internal_processor_error ), 10, 5, B( SBDLC, 37) },
{ E( SBDLU ), T( internal_processor_error ), 10, 6, B( SBDLU, 36) },
{ E( MRAU ), T( internal_processor_error ), 11, 7, B( MRAU, 33) },
{ E( TSAC ), T( internal_processor_error ), 11, 8, B( TSAC, 32) },
{ E( TSAU ), T( internal_processor_error ), 11, 9, B( TSAU, 31) },
{ E( SCAC ), T( internal_processor_error ), 11, 10, B( SCAC, 30) },
{ E( SCAU ), T( internal_processor_error ), 11, 11, B( SCAU, 29) },
{ E( TCCP ), T( internal_processor_error ), 11, 12, B( TCCP, 28) },
{ E( TCCU ), T( internal_processor_error ), 11, 13, B( TCCU, 6) },
{ E( DTTM ), T( data_access_MMU_error ), 5, 1, B( DTTM, 47) },
{ E( DTTP ), T( data_access_MMU_error ), 6, 2, B( DTTP, 48) },
{ E( DTDP ), T( data_access_MMU_error ), 7, 3, B( DTDP, 46) },
{ E( DTMU ), T( data_access_MMU_error ), 8, 4, B( HWTWMU, 59) },
{ E( DTL2U ), T( data_access_MMU_error ), 9, 5, B( (char *)0, 0) },
{ E( DTL2ND ), T( data_access_MMU_error ), 9, 6, B( (char *)0, 0) },
{ E( DCL2U ), T( data_access_error ), 11, 1, B( DCL2U, 39) },
{ E( DCL2ND ), T( data_access_error ), 11, 2, B( DCL2ND, 38) },
{ E( SOCU ), T( data_access_error ), 12, 4, B( (char *)0, 0) },
{ E( ICVP ), T( hw_corrected_error ), Pri(11, 1), 1, B( ICVP, 18) },
{ E( ICTP ), T( hw_corrected_error ), Pri(11, 2), 2, B( ICTP, 17) },
{ E( ICTM ), T( hw_corrected_error ), Pri(11, 3), 3, B( ICTM, 16) },
{ E( ICDP ), T( hw_corrected_error ), Pri(11, 4), 4, B( ICDP, 15) },
{ E( DCVP ), T( hw_corrected_error ), Pri(12, 1), 5, B( DCVP, 14) },
{ E( DCTP ), T( hw_corrected_error ), Pri(12, 1), 6, B( DCTP, 13) },
{ E( DCTM ), T( hw_corrected_error ), Pri(12, 3), 7, B( DCTM, 12) },
{ E( DCDP ), T( hw_corrected_error ), Pri(12, 4), 8, B( DCDP, 11) },
{ E( L2C ), T( hw_corrected_error ), 13, 9, B( (char *)0, 0) },
{ E( SBDPC ), T( hw_corrected_error ), 14, 10, B( SBDPC, 10) },
{ E( SOCC ), T( hw_corrected_error ), 15, 11, B( (char *)0, 0) },
{ E( SBDPU ), T( sw_recoverable_error), 1, 6, B( SBDPU, 9) },
{ E( TCCD ), T( sw_recoverable_error), 2, 14, B( TCCD, 7) },
{ E( TCUD ), T( sw_recoverable_error), 2, 15, B( TCUD, 6) },
{ E( MAMU ), T( sw_recoverable_error), 3, 7, B( MAMU, 8) },
{ E( MAL2C ), T( sw_recoverable_error), 3, 8, B( MAL2C, 5) },
{ E( MAL2U ), T( sw_recoverable_error), 3, 9, B( MAL2U, 4) },
{ E( MAL2ND ), T( sw_recoverable_error), 3, 10, B( MAL2ND,3) },
{ E( CWQL2C ), T( sw_recoverable_error), 4, 11, B( CWQL2C, 2) },
{ E( CWQL2U ), T( sw_recoverable_error), 4, 12, B( CWQL2U, 1) },
{ E( CWQL2ND ), T( sw_recoverable_error), 4, 13, B( CWQL2ND, 0) },
{ E( L2C ), T( sw_recoverable_error), 5, 20, B( (char *)0, 0) },
{ E( L2U ), T( sw_recoverable_error), 5, 16, B( (char *)0, 0) },
{ E( L2ND ), T( sw_recoverable_error), 5, 17, B( (char *)0, 0) },
{ E( ITL2C ), T( sw_recoverable_error), 6, 1, B( (char *)0, 0) },
{ E( ICL2C ), T( sw_recoverable_error), 6, 2, B( (char *)0, 0) },
{ E( DTL2C ), T( sw_recoverable_error), 6, 3, B( (char *)0, 0) },
{ E( DCL2C ), T( sw_recoverable_error), 6, 4, B( (char *)0, 0) },
{ E( SOCU ), T( sw_recoverable_error), 7, 19, B( (char *)0, 0) },
{ E( NONE ), 0, (char *)0, 0, 0, B( (char *)0, 0) }
};
for (i = 0; error_init_list[i].error_type != NONE; i ++)
ss_error_list[error_init_list[i].error_type] = error_init_list[i];
}
void extract_error_type(error_conf_t *errorconfp)
{
int i;
for (i = 1; i< ERROR_MAXNUM; i++) {
if (streq(lex.strp, ss_error_list[i].error_name)) {
errorconfp->type_namep = strdup(lex.strp);
errorconfp->type = ss_error_list[i].error_type;
return;
}
}
lex_fatal("unknown error type parsing error config");
}
void update_errflags(simcpu_t * sp)
{
sp->errorp->check_xdcache = (find_errconf(sp, (LD|ST),
(IRFC|IRFU|FRFC|FRFU))) ? true : false;
sp->errorp->check_xicache = (find_errconf(sp, IFETCH,
(ICVP|ICTP|ICTM|ICDP))) ? true : false;
sp->errorp->check_dtlb = (find_errconf(sp, (LD|ST),
(DTTM|DTTP|DTDP|DTMU))) ? true : false;
}
/*
* If demap of tlb entry with parity error detected then remove error config
*/
void tlb_entry_error_match(simcpu_t *sp, ss_mmu_t *mmup, tlb_entry_t *tep)
{
error_conf_t *ep;
FIXME_WARNING(("tlb_entry_error_match() is not implemented!"));
#if ERROR_INJECTION_FIXME
DBGERR( lprintf(sp->gid, "ss_tlb_insert(): errorp->itep=%x"
" errorp->dtep=%x tep=%x\n",
sp->errorp->itep, sp->errorp->dtep, tep); );
if (sp->error_enabled) {
if (sp->errorp->itep == tep && mmup->is_immu) {
if ((ep = find_errconf(sp, IFETCH, IMDU)) == NULL)
goto tlb_warning;
if (remove_errconf(sp, ep) == NULL)
clear_errflags(sp); else update_errflags(sp);
sp->errorp->itep = NULL;
return;
} else
if (sp->errorp->dtep == tep && !mmup->is_immu) {
if ((ep = find_errconf(sp, (LD|ST), DMDU)) == NULL)
goto tlb_warning;
if (remove_errconf(sp, ep) == NULL)
clear_errflags(sp); else update_errflags(sp);
sp->errorp->dtep = NULL;
return;
}
return;
tlb_warning: EXEC_WARNING(("tlb_entry_error_match(): tracking tlb"
" entry in error for non-existent error config"));
}
#endif /* ERROR_INJECTION_FIXME */
}
bool_t itlb_hit_error_match(simcpu_t *sp, tlb_entry_t *tep)
{
error_t *errorp = sp->errorp;
error_conf_t *ep;
if (sp->error_check && (ep = find_errconf(sp, IFETCH, ITDP))) {
if (errorp->itep) {
DBGERR( lprintf(sp->gid, "ss_xic_miss(): "
" errorp->itep=%x, tep=%x\n", errorp->itep, tep); );
if ((tlb_entry_t *)errorp->itep == tep) {
ss_error_condition(sp, ep);
return true;
}
} else {
errorp->itep = tep;
ss_error_condition(sp, ep);
return true;
}
}
return false;
}
bool_t dtlb_hit_error_match(simcpu_t *sp, int op, tlb_entry_t *tep, tpaddr_t va)
{
error_t *errorp = sp->errorp;
error_conf_t *ep;
FIXME_WARNING(("dtlb_hit_error_match() is not implemented!"));
#if ERROR_INJECTION_FIXME
if (sp->error_check == true && errorp->check_dtlb) {
bool_t is_load, is_store;
is_load = IS_V9_MA_LOAD(op);
is_store = IS_V9_MA_STORE(op);
if (is_load)
ep = find_errconf(sp, LD, DMDU);
else
if (is_store) ep = find_errconf(sp, ST, DMSU);
if (ep) {
if (errorp->dtep) {
DBGERR( lprintf(sp->gid, "ss_memory_asi_access: "
"errorp->dtep=%x, tep=%x\n", errorp->dtep,tep); );
if ((tlb_entry_t *)errorp->dtep == tep) {
ss_error_condition(sp, ep);
return true;
}
} else {
errorp->dtep = tep;
errorp->addr = va;
ss_error_condition(sp, ep);
return true;
}
}
}
#endif /* ERROR_INJECTION_FIXME */
return false;
}
void xicache_error_match(simcpu_t *sp, tpaddr_t pa)
{
error_t *errorp = sp->errorp;
FIXME_WARNING(("xicache_error_match() is not implemented!"));
#if ERROR_INJECTION_FIXME
if (sp->error_check == true && errorp->check_xicache) {
ss_proc_t *npp;
error_conf_t *ep;
DBGERR( lprintf(sp->gid, "ss_xic_miss(): ifetch cache hit\n"); );
ep = find_errconf(sp, IFETCH, ITC|IDC|LDAC|LDAU|DAC|DAU);
npp = sp->config_procp->procp;
if (ep) {
switch(ep->type) {
case ITC:
case IDC:
errorp->addr = pa;
ss_error_condition(sp, ep);
break;
case LDAC:
case LDAU:
for (bank=0; bank<npp->num_l2banks; bank++) {
if (npp->l2p->control[bank] & L2_DIS) goto l2_disabled;
}
errorp->addr = pa;
ss_error_condition(sp, ep);
break;
case DAC:
for (bank=0; bank<npp->num_l2banks; bank++) {
if (npp->l2p->control[bank] & L2_DIS) goto l2_disabled;
}
errorp->addr = pa;
ss_error_condition(sp, ep);
break;
case DAU:
for (bank=0; bank<npp->num_l2banks; bank++) {
if (npp->l2p->control[bank] & L2_DIS) goto l2_disabled;
}
errorp->addr = pa;
ss_error_condition(sp, ep);
break;
l2_disabled: DBGERR( lprintf(sp->gid, "ss_xic_miss: No LDAC/LDAU Error"
" - L2 disabled\n"); );
break;
default:
break;
}
}
}
#endif /* ERROR_INJECTION_FIXME */
}
bool_t l2dram_access_error_match(simcpu_t *sp, int op, tpaddr_t pa)
{
error_t *errorp = sp->errorp;
ss_proc_t *npp = sp->config_procp->procp;
error_conf_t *ep;
FIXME_WARNING(("l2dram_access_error_match() is not implemented!"));
if (npp->error_check) {
l2c_t *l2p = npp->l2p;
bool_t is_load, is_store, is_atomic;
uint8_t bank;
is_load = IS_V9_MA_LOAD(op);
is_store = IS_V9_MA_STORE(op);
is_atomic = IS_V9_MA_ATOMIC(op);
#if ERROR_INJECTION_FIXME
/* check if direct-map mode displacement flushing the error cacheline */
l2p = npp->l2p;
bank = (pa >> 6) & 0x3;
if (l2p->control[bank] & L2_DMMODE) {
if ((pa & L2_DM_MASK) == (npp->errorp->ldac_addr & L2_DM_MASK)) {
npp->errorp->ldac_addr = NULL;
ss_set_errcheck(npp);
goto npp_err_done;
}
if ((pa & L2_DM_MASK) == (npp->errorp->ldau_addr & L2_DM_MASK)) {
npp->errorp->ldac_addr = NULL;
ss_set_errcheck(npp);
goto npp_err_done;
}
}
/*
* when accessing cacheline with error: load or partial store
* causes LDAC or LDAU, store to line with correctible error clears it,
* store to uncorrectible causes a writeback error
*/
if (pa == npp->errorp->ldac_addr) {
if (is_load ||
(is_store && (size == MA_Size8 || size == MA_Size16))) {
ep = new_errconf((is_load ? LD : ST), LDAC);
ep->npp = true;
goto lda_err;
} else if (is_store) {
npp->errorp->ldac_addr = NULL;
ss_set_errcheck(npp);
}
} else if (pa = npp->errorp->ldau_addr) {
if (is_load ||
(is_store && (size == MA_Size8 || size == MA_Size16))) {
ep = new_errconf((is_load ? LD : ST), LDAU);
ep->npp = true;
goto lda_err;
} else if (is_store) {
npp->errorp->ldau_addr = NULL;
ss_set_errcheck(npp);
}
}
}
npp_err_done:
/* now check for errors to be generated from this thread's error list */
if (sp->error_check && errorp->check_xdcache) {
bool_t is_load, is_store, is_atomic;
uint8_t bank;
xicache_t * xicp;
xicache_instn_t * xip;
uint64_t xidx;
tvaddr_t xpc;
is_load = IS_V9_MA_LOAD(op);
is_store = IS_V9_MA_STORE(op);
is_atomic = IS_V9_MA_ATOMIC(op);
if (is_load) ep = find_errconf(sp, LD,
(DTC|DDC|IRC|IRU|FRC|FRU|LDAC|LDWC|LDAU|LDWU|DAC|DAU));
else
if (is_store) ep = find_errconf(sp, ST,
(DTC|DDC|IRC|IRU|FRC|FRU|LDAC|LDWC|LDAU|LDWU|DAC|DAU));
if (ep)
switch(ep->type) {
case IRC:
case IRU:
case FRC:
case FRU:
xicp = sp->xicachep;
xpc = sp->pc;
xidx = (xpc>>2) & XICACHE_NUM_INSTR_MASK;
xip = &xicp->instn[xidx];
errorp->reg = X_RS1(xip->rawi);
ss_error_condition(sp, ep);
return true;
case DTC:
case DDC:
errorp->addr = pa;
ss_error_condition(sp, ep);
return true;
lda_err: case LDAU:
case LDAC:
l2p = npp->l2p;
for (bank=0; bank<npp->num_l2banks; bank++) {
if (l2p->control[bank] & L2_DIS) goto l2_disabled;
}
if (is_load) {
if (is_atomic) errorp->l2_write = L2_RW_bit;
errorp->addr = pa;
ss_error_condition(sp, ep);
return true;
} else
if (is_store && (size == MA_Size8 || size == MA_Size16)) {
errorp->l2_write = L2_RW_bit;
errorp->partial_st = true;
errorp->addr = pa;
ss_error_condition(sp, ep);
return true;
}
break;
ldw_err: case LDWU:
case LDWC:
l2p = npp->l2p;
for (bank=0; bank<npp->num_l2banks; bank++) {
if (l2p->control[bank] & L2_DIS) goto l2_disabled;
}
if (is_store) {
errorp->addr = pa;
ss_error_condition(sp, ep);
return true;
}
break;
case DAC:
l2p = npp->l2p;
for (bank=0; bank<npp->num_l2banks; bank++) {
if (l2p->control[bank] & L2_DIS) goto l2_disabled;
}
if (ep->op == LD && is_load) {
if (is_atomic) errorp->l2_write = L2_RW_bit;
errorp->addr = pa;
ss_error_condition(sp, ep);
return true;
} else
if (ep->op == ST && is_store) {
if (size == MA_Size8 || size == MA_Size16)
errorp->partial_st = true;
errorp->l2_write = L2_RW_bit;
errorp->addr = pa;
ss_error_condition(sp, ep);
return true;
}
break;
case DAU:
l2p = npp->l2p;
for (bank=0; bank<npp->num_l2banks; bank++) {
if (l2p->control[bank] & L2_DIS) goto l2_disabled;
}
if (ep->op == LD && is_load) {
if (is_atomic) errorp->l2_write = L2_RW_bit;
errorp->addr = pa;
ss_error_condition(sp, ep);
return true;
} else
if (ep->op == ST && is_store) {
if (size == MA_Size8 || size == MA_Size16)
errorp->partial_st = true;
errorp->l2_write = L2_RW_bit;
errorp->addr = pa;
ss_error_condition(sp, ep);
return true;
}
break;
l2_disabled: DBGERR( lprintf(sp->gid, "ss_memory_asi_access: "
"No LDAC/LDWC/LDAU/LDWU/DAC Error - L2 disabled\n"); );
break;
}
#endif /* ERROR_INJECTION_FIXME */
}
return false;
}
bool_t tlb_data_access_error_match(simcpu_t *sp, ss_mmu_t *mmup, uint64_t idx)
{
error_conf_t *ep;
FIXME_WARNING(("tlb_data_access_error_match() is not implemented!"));
if (sp->error_check == true && (ep = find_errconf(sp, ASI_LD, ITDP|DTDP))) {
if (ep->type == ITDP && mmup->is_immu) {
sp->errorp->tlb_idx[IMDU_IDX] = idx;
ss_error_condition(sp, ep);
return true;
} else
if (ep->type == DTDP && !mmup->is_immu) {
sp->errorp->tlb_idx[DMDU_IDX] = idx;
ss_error_condition(sp, ep);
return true;
}
}
return false;
}
bool_t tlb_tag_access_error_match(simcpu_t *sp, ss_mmu_t *mmup, uint64_t idx)
{
error_conf_t *ep;
FIXME_WARNING(("tlb_tag_access_error_match() is not implemented!"));
if (sp->error_check == true && (ep = find_errconf(sp, ASI_LD, ITTP|DTTP))) {
if (ep->type == ITTP && mmup->is_immu) {
sp->errorp->tlb_idx[IMTU_IDX] = idx;
ss_error_condition(sp, ep);
return true;
} else
if (ep->type == DTTP && !mmup->is_immu) {
sp->errorp->tlb_idx[DMTU_IDX] = idx;
ss_error_condition(sp, ep);
return true;
}
}
return false;
}
void ss_error_condition(simcpu_t *sp, error_conf_t *ep)
{
ss_strand_t * nsp;
ss_proc_t * npp;
l2c_t * l2p;
mcu_bank_t * dbp;
simcpu_t * esp;
sparcv9_cpu_t * v9p;
sparcv9_trap_type_t tt;
error_t * errorp;
uint8_t bank,tid;
int idx;
v9p = sp->specificp;
nsp = v9p->impl_specificp;
npp = sp->config_procp->procp;
errorp = sp->errorp;
DBGERR( lprintf(sp->gid, "ss_error_condition() etype = %s\n", ep->type_namep); );
switch (ep->type) {
case ITDP:
if (ep->op == IFETCH) {
if (nsp->error.cerer & ss_error_list[ITDP].enable_bit) {
nsp->error.isfsr = ss_error_list[ITDP].error_code;
N_TPC(v9p, v9p->tl) = MMU_PC(sp->pc);
tt = (sparcv9_trap_type_t)ss_error_list[ITDP].trap_type;
v9p->post_precise_trap(sp, tt);
}
}
return;
#if ERROR_INJECTION_FIXME
case IRC:
nsp->error.status = NA_IRC_bit;
nsp->error.addr = (I_REG_NUM(errorp->reg) | I_REG_WIN(v9p->cwp)
| I_SYND(IREG_FAKE_SYND_SINGLE));
if (nsp->error.enabled & NA_CEEN) {
tt = (sparcv9_trap_type_t)SS_trap_ECC_error;
v9p->post_precise_trap(sp, tt);
}
if (remove_errconf(sp, ep) == NULL)
clear_errflags(sp); else update_errflags(sp);
break;
case IRU:
nsp->error.status = NA_IRU_bit;
nsp->error.addr = (I_REG_NUM(errorp->reg) | I_REG_WIN(v9p->cwp)
| I_SYND(IREG_FAKE_SYND_DOUBLE));
if (nsp->error.enabled & NA_NCEEN) {
tt = Sparcv9_trap_internal_processor_error;
v9p->post_precise_trap(sp, tt);
}
if (remove_errconf(sp, ep) == NULL)
clear_errflags(sp); else update_errflags(sp);
break;
case FRC:
nsp->error.status = NA_FRC_bit;
nsp->error.addr = (F_REG_NUM(errorp->reg) |
EVEN_SYND(FREG_FAKE_SYND_SINGLE) | ODD_SYND(NULL));
if (nsp->error.enabled & NA_CEEN) {
tt = (sparcv9_trap_type_t)SS_trap_ECC_error;
v9p->post_precise_trap(sp, tt);
}
if (remove_errconf(sp, ep) == NULL)
clear_errflags(sp); else update_errflags(sp);
break;
case FRU:
nsp->error.status = NA_FRU_bit;
nsp->error.addr = (F_REG_NUM(errorp->reg) |
EVEN_SYND(FREG_FAKE_SYND_DOUBLE) | ODD_SYND(NULL));
if (nsp->error.enabled & NA_NCEEN) {
tt = Sparcv9_trap_internal_processor_error;
v9p->post_precise_trap(sp, tt);
}
if (remove_errconf(sp, ep) == NULL)
clear_errflags(sp); else update_errflags(sp);
break;
case IMTU:
nsp->error.status = (NA_PRIV_bit|NA_IMTU_bit);
nsp->error.addr = TLB_INDEX(errorp->tlb_idx[IMTU_IDX]);
errorp->tlb_idx[IMTU_IDX] = NULL;
if (nsp->error.enabled & NA_NCEEN) {
tt = Sparcv9_trap_data_access_error;
v9p->post_precise_trap(sp, tt);
}
if (remove_errconf(sp, ep) == NULL)
clear_errflags(sp); else update_errflags(sp);
break;
case DMTU:
nsp->error.status = (NA_PRIV_bit|NA_IMTU_bit);
nsp->error.addr = TLB_INDEX(errorp->tlb_idx[DMTU_IDX]);
errorp->tlb_idx[DMTU_IDX] = NULL;
if (nsp->error.enabled & NA_NCEEN) {
tt = Sparcv9_trap_data_access_error;
v9p->post_precise_trap(sp, tt);
}
if (remove_errconf(sp, ep) == NULL)
clear_errflags(sp); else update_errflags(sp);
break;
case DMDU:
if (ep->op == ASI_LD) {
nsp->error.status = (NA_PRIV_bit|NA_DMDU_bit);
nsp->error.addr = TLB_INDEX(errorp->tlb_idx[DMDU_IDX]);
errorp->tlb_idx[DMDU_IDX] = NULL;
if (nsp->error.enabled & NA_NCEEN) {
tt = Sparcv9_trap_data_access_error;
v9p->post_precise_trap(sp, tt);
}
if (remove_errconf(sp, ep) == NULL)
clear_errflags(sp); else update_errflags(sp);
} else {
nsp->error.status = NA_DMDU_bit;
nsp->error.status |= (ep->priv == V9_HyperPriv ||
ep->priv == V9_Priv) ? NA_PRIV_bit : 0;
nsp->error.addr = MMU_VA(errorp->addr);
if (nsp->error.enabled & NA_NCEEN) {
tt = Sparcv9_trap_data_access_error;
v9p->post_precise_trap(sp, tt);
}
}
break;
case DMSU:
nsp->error.status = NA_DMSU_bit;
nsp->error.status |= (ep->priv == V9_HyperPriv ||
ep->priv == V9_Priv) ? NA_PRIV_bit : 0;
nsp->error.addr = MMU_VA(errorp->addr);
if (nsp->error.enabled & NA_NCEEN) {
tt = Sparcv9_trap_data_access_error;
v9p->post_precise_trap(sp, tt);
}
break;
case ITC:
nsp->error.status = NA_ITC_bit;
goto icache_error;
case IDC:
nsp->error.status = NA_IDC_bit;
icache_error: nsp->error.status |= (ep->priv == V9_HyperPriv ||
ep->priv == V9_Priv) ? NA_PRIV_bit : 0;
nsp->error.addr = L1_PA(errorp->addr);
if (nsp->error.enabled & NA_CEEN) {
tt = (sparcv9_trap_type_t)SS_trap_ECC_error;
v9p->post_precise_trap(sp, tt);
}
if (remove_errconf(sp, ep) == NULL)
clear_errflags(sp); else update_errflags(sp);
break;
case DTC:
nsp->error.status = NA_DTC_bit;
goto dcache_error;
case DDC:
nsp->error.status = NA_DDC_bit;
dcache_error: nsp->error.status |= (ep->priv == V9_HyperPriv ||
ep->priv == V9_Priv) ? NA_PRIV_bit : 0;
nsp->error.addr = L1_PA(errorp->addr);
if (nsp->error.enabled & NA_CEEN) {
tt = (sparcv9_trap_type_t)SS_trap_ECC_error;
v9p->post_precise_trap(sp, tt);
}
if (remove_errconf(sp, ep) == NULL)
clear_errflags(sp); else update_errflags(sp);
break;
case MAU:
if (remove_errconf(sp, ep) == NULL) clear_errflags(sp);
IMPL_WARNING(("Unimplemented Error Type: %s\n", ep->type_namep));
break;
case LDAC:
bank = (errorp->addr >> 6) & 0x3;
l2p = npp->l2p;
tid = nsp->vcore_id;
l2p->error_status[bank] = L2_LDAC_bit | L2_TID(tid) | L2_VEC_bit |
L2_FAKE_SYND_SINGLE | errorp->l2_write;
l2p->error_address[bank] = L2_PA_LINE(errorp->addr);
if ((nsp->error.enabled & NA_CEEN) &&
(l2p->error_enable[bank] & L2_CEEN)) {
tt = (sparcv9_trap_type_t)SS_trap_ECC_error;
v9p->post_precise_trap(sp, tt);
}
/* l2 corrected on partial store or atomic hit */
if (errorp->l2_write) {
npp->errorp->ldac_addr = NULL;
ss_set_errcheck(npp);
} else {
int idx;
/* l2 uncorrected on load/ifetch hit so make error proc-wide */
npp->error_check = true;
npp->errorp->ldac_addr = errorp->addr;
/*
* NB: proper behavior is to flush all cpu xdcache's
* but there is no lock on the xdc so I didn't try it
*/
sp->xdcache_trans_flush_pending = true;
}
/* bit of a hack - some errorconf's aren't owned by sp's so free them */
if (ep->npp) free(ep);
else {
if (remove_errconf(sp, ep) == NULL)
clear_errflags(sp); else update_errflags(sp);
}
break;
case LDWC:
bank = (errorp->addr >> 6) & 0x3;
l2p = npp->l2p;
tid = nsp->vcore_id;
l2p->error_status[bank] = L2_LDWC_bit | L2_TID(tid) | L2_VEC_bit |
L2_FAKE_SYND_SINGLE | L2_RW_bit;
l2p->error_address[bank] = L2_PA_LINE(errorp->addr);
tid = (l2p->control[bank] & L2_ERRORSTEER);
v9p = npp->strand[STRANDID2IDX(npp, tid)];
nsp = v9p->impl_specificp;
esp = v9p->simp;
if ((nsp->error.enabled & NA_CEEN) &&
(l2p->error_enable[bank] & L2_CEEN)) {
tt = (sparcv9_trap_type_t)SS_trap_ECC_error;
v9p->post_precise_trap(esp, tt);
}
if (remove_errconf(sp, ep) == NULL)
clear_errflags(sp); else update_errflags(sp);
break;
case LDRC:
case LDSC:
if (remove_errconf(sp, ep) == NULL) clear_errflags(sp);
IMPL_WARNING(("Unimplemented Error Type: %s\n", ep->type_namep));
break;
case LDAU:
bank = (errorp->addr >> 6) & 0x3;
l2p = npp->l2p;
tid = nsp->vcore_id;
l2p->error_status[bank] = L2_LDAU_bit | L2_TID(tid) | L2_VEU_bit |
L2_FAKE_SYND_DOUBLE | errorp->l2_write;
l2p->error_address[bank] = L2_PA_LINE(errorp->addr);
if (l2p->error_enable[bank] & L2_NCEEN) {
nsp->error.status = NA_LDAU_bit;
nsp->error.status |= (ep->priv == V9_HyperPriv ||
ep->priv == V9_Priv) ? NA_PRIV_bit : 0;
nsp->error.addr = L1_PA(errorp->addr);
if (nsp->error.enabled & NA_NCEEN) {
tt = (ep->type == IFETCH)
? Sparcv9_trap_instruction_access_error
: Sparcv9_trap_data_access_error;
v9p->post_precise_trap(sp, tt);
}
}
/*
* store error info to cacheline for error handler diag access
* and to support direct-mapped mode displacement flushing
*/
/* index stores to a 32bit word and its ECC+rsvd bits */
idx = errorp->addr & (L2_WAY | L2_LINE | L2_BANK | L2_WORD) >> 2;
/* put oddeven select bit low so data is in addr order */
idx |= ((errorp->addr >> L2_ODDEVEN_SHIFT) & 1);
l2p->diag_datap[idx] = ((0xabbadada << 7) | L2_FAKE_SYND_DOUBLE);
/* index stores to a tag and its ECC+rsvd bits */
idx = errorp->addr & (L2_WAY | L2_LINE | L2_BANK) >> 6;
l2p->diag_tagp[idx] = (errorp->addr & L2_TAG) >> 12;
/* index valid/dirty or alloc/used bits and parity */
idx = errorp->addr & (L2_LINE | L2_BANK) >> 6;
idx |= ((errorp->addr & L2_VDSEL) >> 10);
l2p->diag_vuadp[idx] = 0xfff << 12; /* all lines valid/clean */
/* uncorrectible error in l2 so make it proc-wide */
npp->error_check = true;
npp->errorp->ldau_addr = errorp->addr;
sp->xdcache_trans_flush_pending = true;
/* bit of a hack - some errorconf's aren't owned by sp's so free them */
if (ep->npp) free(ep);
else {
if (remove_errconf(sp, ep) == NULL)
clear_errflags(sp); else update_errflags(sp);
}
break;
case LDWU:
bank = (errorp->addr >> 6) & 0x3;
l2p = npp->l2p;
tid = nsp->vcore_id;
l2p->error_status[bank] = L2_LDWU_bit | L2_TID(tid) | L2_VEU_bit |
L2_FAKE_SYND_DOUBLE | L2_RW_bit;
l2p->error_address[bank] = L2_PA_LINE(errorp->addr);
if ((nsp->error.enabled & NA_NCEEN) &&
(l2p->error_enable[bank] & L2_NCEEN)) {
tid = (l2p->control[bank] & L2_ERRORSTEER);
v9p = npp->strand[STRANDID2IDX(npp, tid)];
esp = v9p->simp;
tt = (sparcv9_trap_type_t)N1_trap_data_error;
v9p->post_precise_trap(esp, tt);
}
npp->error_check = true;
npp->errorp->ldau_addr = errorp->addr;
/* bit of a hack - some errorconf's aren't owned by sp's so free them */
if (ep->npp) free(ep);
else {
if (remove_errconf(sp, ep) == NULL)
clear_errflags(sp); else update_errflags(sp);
}
break;
case LDRU:
case LDSU:
case LTC:
case LVU:
case LRU:
if (remove_errconf(sp, ep) == NULL) clear_errflags(sp);
IMPL_WARNING(("Unimplemented Error Type: %s\n", ep->type_namep));
break;
case DAC:
l2p = npp->l2p;
bank = (errorp->addr >> 6) & 0x3;
dbp = &(npp->mbankp[bank]);
dbp->error_status = DRAM_DAC_bit | DRAM_FAKE_SYND_SINGLE;
/* if store miss and L2 disabled then only set DRAM error status */
if (ep->op == ST && !errorp->partial_st) {
for (bank=0; bank<npp->num_l2banks; bank++) {
if (l2p->control[bank] & L2_DIS)
break;
}
}
bank = (errorp->addr >> 6) & 0x3;
tid = nsp->vcore_id;
l2p->error_status[bank] = L2_DAC_bit | L2_TID(tid) | L2_VEC_bit |
errorp->l2_write;
l2p->error_address[bank] = L2_PA_LINE(errorp->addr);
if ((nsp->error.enabled & NA_CEEN) &&
(l2p->error_enable[bank] & L2_CEEN)) {
/*
* partial stores and odd-numbered cache lines
* redirected to errorsteer thread
*/
if (errorp->partial_st || (errorp->addr & 0x40)) {
tid = (l2p->control[bank] & L2_ERRORSTEER);
v9p = npp->strand[STRANDID2IDX(npp, tid)];
esp = v9p->simp;
l2p->error_status[bank] &= ~(errorp->l2_write);
tt = (sparcv9_trap_type_t)SS_trap_ECC_error;
v9p->post_precise_trap(esp, tt);
} else {
tt = (sparcv9_trap_type_t)SS_trap_ECC_error;
v9p->post_precise_trap(sp, tt);
}
}
if (remove_errconf(sp, ep) == NULL)
clear_errflags(sp); else update_errflags(sp);
break;
case DSC:
case DAU:
l2p = npp->l2p;
bank = (errorp->addr >> 6) & 0x3;
dbp = &(npp->mbankp[bank]);
dbp->error_status = DRAM_DAU_bit | DRAM_FAKE_SYND_DOUBLE;
tid = nsp->vcore_id;
l2p->error_status[bank] = L2_DAU_bit | L2_TID(tid) | L2_VEU_bit |
errorp->l2_write;
l2p->error_address[bank] = L2_PA_LINE(errorp->addr);
if (l2p->error_enable[bank] & L2_NCEEN) {
nsp->error.status = NA_LDAU_bit; /* as per Table 12-4 of PRM */
nsp->error.status |= (ep->priv == V9_HyperPriv ||
ep->priv == V9_Priv) ? NA_PRIV_bit : 0;
/*
* partial stores and odd-numbered cache lines
* redirected to errorsteer thread
*/
if (errorp->partial_st || (errorp->addr & 0x40)) {
tid = (l2p->control[bank] & L2_ERRORSTEER);
v9p = npp->strand[STRANDID2IDX(npp, tid)];
esp = v9p->simp;
l2p->error_status[bank] &= ~(errorp->l2_write);
/*
* set address to non-requested 16B block
* within the same 64B cache line
*/
if (!errorp->partial_st)
errorp->addr = (errorp->addr & ~0x30) |
(((errorp->addr & 0x30) + 0x10) % 0x40);
nsp->error.addr = L1_PA(errorp->addr);
tt = (sparcv9_trap_type_t)N1_trap_data_error;
v9p->post_precise_trap(esp, tt);
break;
}
nsp->error.addr = L1_PA(errorp->addr);
if (nsp->error.enabled & NA_NCEEN) {
tt = (ep->type == IFETCH)
? Sparcv9_trap_instruction_access_error
: Sparcv9_trap_data_access_error;
v9p->post_precise_trap(sp, tt);
}
}
if (remove_errconf(sp, ep) == NULL)
clear_errflags(sp); else update_errflags(sp);
break;
case DSU:
case DBU9:
case DRAM:
if (remove_errconf(sp, ep) == NULL) clear_errflags(sp);
IMPL_WARNING(("Unimplemented Error Type: %s\n", ep->type_namep));
break;
default:
if (remove_errconf(sp, ep) == NULL) clear_errflags(sp);
EXEC_WARNING(("Unspecified Error Type: %s\n", ep->type_namep));
break;
#endif /* ERROR_INJECTION_FIXME */
}
FIXME_WARNING(("ss_error_condition() is not implemented!"));
}
#endif /* ERROR_INJECTION */