* ========== Copyright Header Begin ==========================================
* OpenSPARC T2 Processor File: mmu_hptrap.s
* Copyright (C) 1995-2007 Sun Microsystems, Inc. All Rights Reserved
* 4150 Network Circle, Santa Clara, California 95054, U.S.A.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; version 2 of the License.
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
* For the avoidance of doubt, and except that if any non-GPL license
* choice is available it will apply instead, Sun elects to use only
* the General Public License version 2 (GPLv2) at this time for any
* software where a choice of GPL license versions is made
* available with the language indicating that GPLv2 or any later version
* may be used, or where a choice of which version of the GPL is applied is
* Please contact Sun Microsystems, Inc., 4150 Network Circle, Santa Clara,
* CA 95054 USA or visit www.sun.com if you need additional information or
* ========== Copyright Header End ============================================
SECTION .HTRAPS TEXT_VA=HPTRAP_TEXT_PA
setx ext_trap_0x8_begin, %g1, %g2
setx custom_trap_0x9_handler, %g1, %g2
setx ext_trap_0x9_begin, %g1, %g2
setx ext_trap_0x8_begin, %g1, %g2
setx custom_trap_0xc_handler, %g1, %g2
Instruction_address_range:
setx ext_trap_0x8_begin, %g1, %g2
setx custom_trap_0xe_handler, %g1, %g2
setx ext_trap_0x8_begin, %g1, %g2
ldxa [%g1] 0x54, %g2 ! DSFAR
ldxa [%g1] 0x54, %g2 ! DSFAR
ldxa [%g1] 0x54, %g2 ! DSFAR
ldxa [%g1] 0x54, %g2 ! DSFAR
setx ext_trap_0x1b_begin, %g1, %g2
setx custom_trap_0x2a_handler, %g1, %g2
setx ext_trap_0x2a_begin, %g1, %g2
setx custom_trap_0x2b_handler, %g1, %g2
setx ext_trap_0x2b_begin, %g1, %g2
setx custom_trap_0x2d_handler, %g1, %g2
ldxa [%g1] 0x54, %g2 ! DSFAR
setx custom_trap_0x31_handler, %g1, %g2
setx ext_trap_0x31_begin, %g1, %g2
ldxa [%g1] 0x54, %g2 ! DSFAR
LDDF_mem_address_not_aligned:
ldxa [%g1] 0x54, %g2 ! DSFAR
STDF_mem_address_not_aligned:
ldxa [%g1] 0x54, %g2 ! DSFAR
ldxa [%g1] 0x54, %g2 ! DSFAR
LDQF_mem_address_not_aligned:
ldxa [%g1] 0x54, %g2 ! DSFAR
STQF_mem_address_not_aligned:
ldxa [%g1] 0x54, %g2 ! DSFAR
ldxa [%g1] 0x54, %g2 ! DSFAR
setx ext_trap_0x3e_begin, %g1, %g2
setx ext_trap_0x3f_begin, %g1, %g2
ldxa [%g1] 0x54, %g2 ! DSFAR
ldxa [%g1] 0x54, %g2 ! DSFAR
setx custom_trap_0x64_handler, %g1, %g2
setx ext_trap_0x64_begin, %g1, %g2
setx custom_trap_0x68_handler, %g1, %g2
setx ext_trap_0x68_begin, %g1, %g2
fast_data_access_protection:
ldxa [%g1] 0x54, %g2 ! DSFAR
setx custom_trap_0x71_handler, %g1, %g2
setx custom_trap_0x72_handler, %g1, %g2
instruction_VA_watchpoint:
ldxa [%g1] 0x54, %g2 ! DSFAR
#ifdef MMU247 wrpr 1, %tl !Comment out for now..because handler is too big.
setx ext_trap_0x8_begin, %g1, %g2
ta HP_BAD_TRAP ! This is for trap 81, since this one is 1 instruction too long
! Comment out 1 align since the previous trap handler is one instruction too long
! For all demaps, assume register %i7 = {VA[63:13],13'h0}
stxa %g0, [%g1] ASI_IMMU_DEMAP
stxa %g0, [%g1] ASI_IMMU_DEMAP
stxa %g0, [%g1] ASI_IMMU_DEMAP
stxa %g0, [%g1] ASI_IMMU_DEMAP
stxa %g0, [%g1] ASI_IMMU_DEMAP
stxa %g0, [%g1] ASI_DMMU_DEMAP
stxa %g0, [%g1] ASI_DMMU_DEMAP
stxa %g0, [%g1] ASI_DMMU_DEMAP
stxa %g0, [%g1] ASI_DMMU_DEMAP
stxa %g0, [%g1] ASI_DMMU_DEMAP
stxa %g0, [%g1] ASI_DMMU_DEMAP
mov ASI_PARTITION_ID_VAL, %g1
ldxa [%g1] ASI_PARTITION_ID, %g2
stxa %g2, [%g1] ASI_PARTITION_ID
!setx ext_trap_0x192_begin, %g1, %g2
delay_loop: /* %i7 = loop count */
setx ext_trap_sem_lock_begin, %g1, %g2
setx ext_trap_sem_release_begin, %g1, %g2
setx sem_data, %g5, %g4 ! %g4 = sem_data
setx sem_data, %g5, %g4 ! %g4 = sem_data
! %i7 = target address for supervisor code
! %i7 = target address for supervisor code
hptrap_I_demap_all_pages:
stxa %g0, [%g1] ASI_IMMU_DEMAP
hptrap_I_demap_all_rpages:
stxa %g0, [%g1] ASI_IMMU_DEMAP
hptrap_D_demap_all_pages:
stxa %g0, [%g1] ASI_DMMU_DEMAP
hptrap_D_demap_all_rpages:
stxa %g0, [%g1] ASI_DMMU_DEMAP
setx ext_trap_access_itsb_ptr, %g1, %g2
setx ext_trap_access_dtsb_ptr, %g1, %g2
setx custom_trap_load_idata_in, %g1, %g2
setx ext_trap_load_idata_in, %g1, %g2
setx custom_trap_load_ddata_in, %g1, %g2
setx ext_trap_load_ddata_in, %g1, %g2
hptrap_read_idata_access:
setx custom_trap_read_idata_access, %g1, %g2
! We assume the index is in %i7
hptrap_read_ddata_access:
setx custom_trap_read_ddata_access, %g1, %g2
! We assume the index is in %i7
setx custom_user_trap, %g1, %g2
hptrap_load_idata_access:
setx custom_trap_load_idata_access, %g1, %g2
! We assume the index is in %i7
setx ext_trap_load_idata_access, %g1, %g2
hptrap_load_ddata_access:
setx custom_trap_load_ddata_access, %g1, %g2
! We assume the index is in %i7
setx ext_trap_load_ddata_access, %g1, %g2
! We assume the VA is in %i7
setx ext_trap_toggle_hwtw_demap, %g1, %g2
! We assume the index is in %i7
ldxa [%i7] MMU_ASI_I_TAG_READ_REG, %g1
! We assume the index is in %i7
ldxa [%i7] MMU_ASI_D_TAG_READ_REG, %g1
!******************************************************************************************
SECTION .HPTRAPS_EXT_SECT TEXT_VA=HPTRAPS_EXT_TEXT_PA, DATA_VA=HPTRAPS_EXT_DATA_PA
.global ext_trap_0x2a_begin
.global ext_trap_0x2b_begin
.global ext_trap_0x3e_begin
.global ext_trap_0x3f_begin
.global ext_trap_0x64_begin
.global ext_trap_0x68_begin
.global ext_trap_0x192_begin
.global ext_trap_sem_lock_begin
.global ext_trap_sem_release_begin
.global ext_trap_access_itsb_ptr
.global ext_trap_access_dtsb_ptr
.global ext_trap_0x8_begin
.global ext_trap_0x9_begin
.global ext_trap_0x1b_begin
.global ext_trap_0x31_begin
.global function_tsb_ptr_calc
.global ext_trap_load_idata_in
.global ext_trap_load_ddata_in
.global ext_trap_load_idata_access
.global ext_trap_load_ddata_access
.global ext_trap_toggle_hwtw_demap
ext_trap_toggle_hwtw_demap:
or %g0, MMU_ASI_TSB_CONFIG_REG, %g1
ldxa [MMU_ASI_Z_CTX_TSB_CONFIG_0_ADDR] %asi, %g2
stxa %g2, [MMU_ASI_Z_CTX_TSB_CONFIG_0_ADDR] %asi
ldxa [MMU_ASI_Z_CTX_TSB_CONFIG_1_ADDR] %asi, %g2
stxa %g2, [MMU_ASI_Z_CTX_TSB_CONFIG_1_ADDR] %asi
ldxa [MMU_ASI_Z_CTX_TSB_CONFIG_2_ADDR] %asi, %g2
stxa %g2, [MMU_ASI_Z_CTX_TSB_CONFIG_2_ADDR] %asi
ldxa [MMU_ASI_Z_CTX_TSB_CONFIG_3_ADDR] %asi, %g2
stxa %g2, [MMU_ASI_Z_CTX_TSB_CONFIG_3_ADDR] %asi
ldxa [MMU_ASI_NZ_CTX_TSB_CONFIG_0_ADDR] %asi, %g2
stxa %g2, [MMU_ASI_NZ_CTX_TSB_CONFIG_0_ADDR] %asi
ldxa [MMU_ASI_NZ_CTX_TSB_CONFIG_1_ADDR] %asi, %g2
stxa %g2, [MMU_ASI_NZ_CTX_TSB_CONFIG_1_ADDR] %asi
ldxa [MMU_ASI_NZ_CTX_TSB_CONFIG_2_ADDR] %asi, %g2
stxa %g2, [MMU_ASI_NZ_CTX_TSB_CONFIG_2_ADDR] %asi
ldxa [MMU_ASI_NZ_CTX_TSB_CONFIG_3_ADDR] %asi, %g2
stxa %g2, [MMU_ASI_NZ_CTX_TSB_CONFIG_3_ADDR] %asi
be,a %xcc, ext_trap_toggle_hwtw_no_demap
stxa %g0, [%g3] MMU_ASI_I_DEMAP
stxa %g0, [%g3] MMU_ASI_D_DEMAP
ext_trap_toggle_hwtw_no_demap:
!****************************************************************************************
ext_trap_access_itsb_ptr:
! we use %l1 to determine which register to access
bne,a %xcc, ext_trap_access_itsb_ptr_58
ext_trap_access_itsb_ptr_58:
bne,a %xcc, ext_trap_access_itsb_ptr_60
ext_trap_access_itsb_ptr_60:
bne,a %xcc, ext_trap_access_itsb_ptr_68
ext_trap_access_itsb_ptr_68:
bne,a %xcc, ext_trap_access_itsb_ptr_all
ext_trap_access_itsb_ptr_all:
!****************************************************************************************
ext_trap_access_dtsb_ptr:
! we use %l1 to determine which register to access
bne,a %xcc, ext_trap_access_dtsb_ptr_78
ext_trap_access_dtsb_ptr_78:
bne,a %xcc, ext_trap_access_dtsb_ptr_80
ext_trap_access_dtsb_ptr_80:
bne,a %xcc, ext_trap_access_dtsb_ptr_88
ext_trap_access_dtsb_ptr_88:
bne,a %xcc, ext_trap_access_dtsb_ptr_all
ext_trap_access_dtsb_ptr_all:
!****************************************************************************************
or %i7, %g0, %g7 ! save %i7
wr %g0, ASI_CORE_ID, %asi
ldxa [ASI_CORE_ID_VA] %asi, %g1 ! %g1 = core id + tid
or %g0, 0x0fff, %g3 ! %g3 = compare value
setx sem_data, %g5, %g4 ! %g4 = sem_data
casxa [%g4] 0x80, %g3, %g2
bne,a %xcc, ext_trap_sem_lock_loop
or %g7, %g0, %i7 ! restore %i7
ext_trap_sem_release_begin:
wr %g0, ASI_CORE_ID, %asi
ldxa [ASI_CORE_ID_VA] %asi, %g1 ! %g1 = core id + tid
or %g0, 0x0fff, %g3 ! %g3 = compare value
setx sem_data, %g5, %g4 ! %g4 = sem_data
casxa [%g4] 0x80, %g1, %g3
!****************************************************************************************
!****************************************************************************************
!****************************************************************************************
save ! Save %l2 and %l4 to be used in code below
ldxa [%g0] 0x50, %g1 ! %g1 = immu tag target
! Mask out context before comparing tags
andn %g1, %g3, %g1 ! %g1 = masked tag target
! Randomly pick zero or non-zero ctx to start with. We use context[0] as random seed
! context[0] = 0 ==> zero ctx is picked first
! context[0] = 1 ==> non-zero ctx is picked first
! The other consideration is we need to write the correct context (zero or non-zero)
! to the tag access register in order to get the correct tsb pointer. If we pick the
! zero context first, then we need to program the tag access register with a zero
! context. Otherwise, we will program the tag access register with a non-zero context.
! Case 1: zero context is picked first
! We read tsb pointers for the four zero context tsb config regs.
! If no entry was found, we invert the context in tag access reg
! Now we read the four non-zero context tsb config regs
! Case 2: non-zero context is picked first
! We invert the context in tag access reg
! Read tsb pointers for the four non-zero context tsb config regs
! If no entry was found, clear the context in tag access reg
! Read tsb pointers for the four zero context tsb config regs
setx tsb_addresses_zero, %g2, %l4
ldxa [%g5] 0x50, %l2 ! %l2 = tag access
and %l2, %g4, %l2 ! %l2 = tag access with zero context
! We use bit VA[28] as random selection between zero and non-zero context
brz,a %g3, trap_0x3e_zero_ctx
add %l4, 72, %l4 ! %l4 = tsb_addresses_non_zero
or %l2, 1, %l2 ! %l2 = tag access with non-zero context
stxa %l2, [%g5] 0x50 ! update tag access with proper context
ldx [%l4], %g4 ! %g4 = va to tsb config reg
or %g0, 0x50, %g2 ! %g2 = va to tsb pointer 0
! %g1=masked tag, %g2=va to tsb ptr, %g4=va to tsb config
ldxa [%g2] 0x54, %g3 ! %g3 = dtsb pointer
ldda [%g3] ASI_NUCLEUS_QUAD_LDD, %g6 ! %g6 = TTE_TAG, %g7 = TTE_DATA
andn %g6, %g3, %g3 ! %g3 has the masked tag
! Need to mask out va[27:22] if page size is 5
ldxa [%g4] 0x54, %g5 ! %g5 = TSB_CONFIG
and %g5, 0x70, %g4 ! %g4 = PSIZE
! %g1=tag target, %g2=va to tsb ptr, %g3=masked tte tag
! %g4=page size, %g5=tsb config reg
bne,a %xcc, trap_0x3e_next_tsb_ptr
brz,a %g3, trap_0x3e_next_tsb_ptr
be,a %xcc, trap_0x3e_skip_ra
! look up realrange registers
setx trap_ra_mask, %g3, %g6
be,a trap_0x3e_get_rr_limits
be,a trap_0x3e_get_rr_limits
be,a trap_0x3e_get_rr_limits
ldda [%g6] 0x24, %g2 ! %g2 = RA_max mask, %g3 = RA_min mask
sllx %g5, 13, %g5 ! %g5 = RA
or %g5, %g2, %g2 ! %g2 = RA_max
and %g5, %g3, %g3 ! %g3 = RA_min
or %g0, 0x108, %g1 ! %g1 points to REAL_RANGE_REG
bl,a %xcc, trap_0x3e_get_rr
ldxa [%g1] 0x52, %g5 ! %g5 = REAL_RANGE
srlx %g5, 63, %g6 ! Check for the enable bit
brz,a %g6, trap_0x3e_next_rr
sllx %g5, 10, %g6 ! %g6 = RR left shift by 10
sllx %g6, 13, %g6 ! %g6 = RA_max
bl,a %xcc, trap_0x3e_next_rr
srlx %g6, 24, %g6 ! %g6 = RA_min
bg,a %xcc, trap_0x3e_next_rr
ldxa [%g1] 0x52, %g1 ! %g1 = physical offset
stxa %l2, [%g4] 0x50 ! write non-zero context to tag access
stxa %g7, [%g5] 0x54 ! write to data in
bne,a %xcc, trap_0x3e_next_tsb_va
or %g0, 0x50, %g2 ! Need to wrap it back to 0x50 (itsb ptr 0)
stxa %l2, [%g5] 0x50 ! write inverted context to tag access
add %l4, 8, %l4 ! go to next VA for tsb config reg
cmp %g4, 0x0ff ! we have exhausted all 8 regs if value is 0xff
bne,a %xcc, trap_0x3e_next_tte
!****************************************************************************************
save ! Save %l2 and %l4 to be used in code below
ldxa [%g0] 0x58, %g1 ! %g1 = dmmu tag target
! Mask out context before comparing tags
andn %g1, %g3, %g1 ! %g1 = masked tag target
! Randomly pick zero or non-zero ctx to start with. We use context[0] as random seed
! context[0] = 0 ==> zero ctx is picked first
! context[0] = 1 ==> non-zero ctx is picked first
! The other consideration is we need to write the correct context (zero or non-zero)
! to the tag access register in order to get the correct tsb pointer. For a real miss
! the context is zero in the tag access register.
! Case 1: zero context is picked first
! We read tsb pointers for the four zero context tsb config regs.
! If no entry was found, we invert the context in tag access reg
! Now we read the four non-zero context tsb config regs
! Case 2: non-zero context is picked first
! We invert the context in tag access reg
! Read tsb pointers for the four non-zero context tsb config regs
! If no entry was found, clear the context in tag access reg
! Read tsb pointers for the four zero context tsb config regs
setx tsb_addresses_zero, %g2, %l4
ldxa [%g5] 0x58, %l2 ! %l2 = tag access
and %l2, %g4, %l2 ! %l2 = tag access with zero context
! We use bit VA[28] as random selection between zero and non-zero context
brz,a %g3, trap_0x3f_zero_ctx
add %l4, 72, %l4 ! %l4 = tsb_addresses_non_zero
stxa %l2, [%g5] 0x58 ! update tag access with proper context
ldx [%l4], %g4 ! %g4 = va to tsb config reg
or %g0, 0x70, %g2 ! %g2 = va to tsb pointer 0
! %g1=masked tag, %g2=va to tsb ptr, %g4=va to tsb config
ldxa [%g2] 0x54, %g3 ! %g3 = dtsb pointer
ldda [%g3] ASI_NUCLEUS_QUAD_LDD, %g6 ! %g6 = TTE_TAG, %g7 = TTE_DATA
andn %g6, %g3, %g3 ! %g3 has the tte tag with zero context
! Need to mask out va[27:22] if page size is 5
ldxa [%g4] 0x54, %g5 ! %g5 = TSB_CONFIG
and %g5, 0x70, %g4 ! %g4 = PSIZE
! %g1=tag target, %g2=va to tsb ptr, %g3=masked tte tag
! %g4=page size, %g5=tsb config reg
bne,a %xcc, trap_0x3f_next_tsb_ptr
brz,a %g3, trap_0x3f_next_tsb_ptr
be,a %xcc, trap_0x3f_skip_ra
! look up realrange registers
setx trap_ra_mask, %g3, %g6
be,a trap_0x3f_get_rr_limits
be,a trap_0x3f_get_rr_limits
be,a trap_0x3f_get_rr_limits
ldda [%g6] 0x24, %g2 ! %g2 = RA_max mask, %g3 = RA_min mask
sllx %g5, 13, %g5 ! %g5 = RA
or %g5, %g2, %g2 ! %g2 = RA_max
and %g5, %g3, %g3 ! %g3 = RA_min
or %g0, 0x108, %g1 ! %g1 points to REAL_RANGE_REG
bl,a %xcc, trap_0x3f_get_rr
ldxa [%g1] 0x52, %g5 ! %g5 = REAL_RANGE
srlx %g5, 63, %g6 ! Check for the enable bit
brz,a %g6, trap_0x3f_next_rr
sllx %g5, 10, %g6 ! %g6 = RR left shift by 10
sllx %g6, 13, %g6 ! %g6 = RA_max
bl,a %xcc, trap_0x3f_next_rr
srlx %g6, 24, %g6 ! %g6 = RA_min
bg,a %xcc, trap_0x3f_next_rr
ldxa [%g1] 0x52, %g1 ! %g1 = physical offset
stxa %l2, [%g4] 0x58 ! write non-zero context to tag access
stxa %g7, [%g5] 0x5c ! write to data in
bne,a %xcc, trap_0x3f_next_tsb_va
or %g0, 0x70, %g2 ! Need to wrap it back to 0x70 (dtsb ptr 0)
stxa %l2, [%g5] 0x58 ! write inverted context to tag access
add %l4, 8, %l4 ! go to next VA for tsb config reg
cmp %g4, 0x0ff ! we have exhausted all 8 regs if value is 0xff
bne,a %xcc, trap_0x3f_next_tte
! Instead of going to bad trap, we just go ahead and skip this ld/st instruction
!****************************************************************************************
ldxa [%g0] MMU_ASI_ITSB_TAG_TARGET_REG, %g1 ! %g1 = tag target
or %g0, MMU_ASI_ITSB_PTR_0_ADDR, %g2 ! %g2 = tsb ptr addr
srlx %g1, 48, %g3 ! %g3 = req_ctx
brz,a %g3, trap_0x64_load_tsb_config
or %g0, MMU_ASI_Z_CTX_TSB_CONFIG_0_ADDR, %g3
or %g0, MMU_ASI_NZ_CTX_TSB_CONFIG_0_ADDR, %g3
trap_0x64_load_tsb_config:
ldxa [%g3] MMU_ASI_TSB_CONFIG_REG, %g4 ! %g4 = tsb config
stxa %g4, [%g0] MMU_ASI_HYP_SCRATCHPAD_REG
ldxa [%g2] MMU_ASI_TSB_PTR_REG, %g5 ! %g5 = tsb ptr
ldda [%g5] ASI_NUCLEUS_QUAD_LDD, %g6 ! %g6=TTE_TAG, %g7=TTE_DATA
! If page size is 256m, mask out va[27:22]
ldxa [%g3] MMU_ASI_TSB_CONFIG_REG, %g4 ! %g4 = tsb config
bne,a %xcc, trap_0x64_not_256m_page
cmp %g6, %o1 ! Compare TAG
bne,a %xcc, trap_0x64_next_ptr
srlx %g7, 63, %g5 ! Check Valid bit
brz,a %g5, trap_0x64_next_ptr
! At this point, we have got the TTE to be loaded into the TLB.
! We just need to figure out what physical offset to use.
! Register %g4 and %g7 are restricted.
setx trap_ra_mask, %g5, %g6
ldxa [%g0] 0x4f, %g5 ! %g5 = TSB_CONFIG
and %g5, 0x100, %g3 ! %g3 has the ranotpa bit
brz,a %g3, trap_0x64_skip_ra
and %g5, 0x70, %g3 ! %g3 = PSIZE
be,a trap_0x64_get_rr_limits
be,a trap_0x64_get_rr_limits
be,a trap_0x64_get_rr_limits
ldda [%g6] 0x24, %g2 ! %g2 = RA_max mask, %g3 = RA_min mask
sllx %g5, 13, %g5 ! %g5 = RA
or %g5, %g2, %g2 ! %g2 = RA_max
and %g5, %g3, %g3 ! %g3 = RA_min
or %g0, 0x108, %g1 ! %g1 points to REAL_RANGE_REG
bl,a %xcc, trap_0x64_get_rr
ldxa [%g1] 0x52, %g5 ! %g5 = REAL_RANGE
srlx %g5, 63, %g6 ! Check for the enable bit
brz,a %g6, trap_0x64_next_rr
sllx %g5, 10, %g6 ! %g6 = RR left shift by 10
sllx %g6, 13, %g6 ! %g6 = RA_max
bl,a %xcc, trap_0x64_next_rr
srlx %g6, 24, %g6 ! %g6 = RA_min
bg,a %xcc, trap_0x64_next_rr
ldxa [%g1] 0x52, %g1 ! %g1 = physical offset
! To prevent multiple hits as a result of loading the itlb with entries with
! different contexts, we need to look at the use_context bits in the TSB config.
! The pseudo algorithm is
! We are assuming the context in the tte is the primary context 0
! if (not nucleus context) {
! store pctx_1 in tag_access reg
ldxa [%g0] 0x50, %l1 ! %l1 = itlb tag target
srlx %l1, 48, %l1 ! %l1 = req_ctx
brz,a %l1, trap_0x64_not_ctx_1
ldxa [%g0] 0x4f, %g5 ! %g5 = TSB_CONFIG
bne,a %xcc, trap_0x64_not_ctx_1
ldxa [%l1] 0x50, %l1 ! %l1 = itlb tag access
srlx %l0, 51, %l0 ! %l0 = context mask
andn %l1, %l0, %l1 ! %l1 = tag access with zero context
ldxa [%l2] 0x21, %l2 ! %l2 = pctx_1
or %l1, %l2, %l1 ! %l1 = tag access with pctx_1
stxa %l1, [%l3] 0x50 ! update tag access with pctx_1
stxa %g7, [%g0] MMU_ASI_I_DATA_IN_REG
bl,a %xcc, trap_0x64_load_tsb_config
setx ext_trap_0x8_begin, %g2, %g1
!****************************************************************************************
ldxa [%g0] MMU_ASI_DTSB_TAG_TARGET_REG, %g1 ! %g1 = dmmu_tag_target
or %g0, MMU_ASI_DTSB_PTR_0_ADDR, %g2 ! %g2 points to dtsb_ptr_0
srlx %g1, 48, %g3 ! %g3 = incoming ctx
brz,a %g3, trap_0x68_load_tsb_config
or %g0, MMU_ASI_Z_CTX_TSB_CONFIG_0_ADDR, %g3
or %g0, MMU_ASI_NZ_CTX_TSB_CONFIG_0_ADDR, %g3
trap_0x68_load_tsb_config:
ldxa [%g3] MMU_ASI_TSB_CONFIG_REG, %g4 ! %g4 = tsb_config
stxa %g4, [%g0] MMU_ASI_HYP_SCRATCHPAD_REG ! save tsb_config in scratch pad
ldxa [%g2] MMU_ASI_TSB_PTR_REG, %g5 ! %g5 = dtsb_ptr
ldda [%g5] ASI_NUCLEUS_QUAD_LDD, %g6 ! %g6=TTE_TAG, %g7=TTE_DATA
srlx %g7, 63, %g5 ! Check Valid bit
brz,a %g5, trap_0x68_next_ptr
! Need to check for context right here
! case (req_ctx == pctx0 && use_ctx_0 == 1):
! update_tag_access_with_pctx0
! case (req_ctx == pctx0 && use_ctx_0 == 0 && use_ctx_1 == 1):
! update_tag_access_with_pctx1
! case (req_ctx == sctx0 && use_ctx_0 == 1):
! update_tag_access_with_sctx0
! case (req_ctx == sctx0 && use_ctx_0 == 0 && use_ctx_1 == 1):
! update_tag_access_with_sctx1
ldxa [%g0] MMU_ASI_DTSB_TAG_TARGET_REG, %i0 ! %i0 = dtlb tag target
srlx %i0, 48, %i1 ! %i1 = req_ctx
brz,a %i1, trap_0x68_bypass_update_tag_access
! the req_ctx is non-zero, we need to look at the use_context bits next
ldxa [%g0] MMU_ASI_HYP_SCRATCHPAD_REG, %i2 ! %i2 = tsb config
be,a %xcc, trap_0x68_bypass_update_tag_access
be,a %xcc, trap_0x68_not_use_ctx_0
or %g0, MMU_ASI_PRIMARY_CONTEXT_0_ADDR, %i0
ldxa [%i0] MMU_ASI_CONTEXT_REG, %i3
be,a %xcc, trap_0x68_update_tag_access
or %g0, MMU_ASI_SECONDARY_CONTEXT_0_ADDR, %i0
ldxa [%i0] MMU_ASI_CONTEXT_REG, %i3
ba trap_0x68_update_tag_access
! use_context_0 = 0 && use_context_1 = 1
or %g0, MMU_ASI_PRIMARY_CONTEXT_0_ADDR, %i0
ldxa [%i0] MMU_ASI_CONTEXT_REG, %i3
bne,a %xcc, trap_0x68_secondary_ctx
or %g0, MMU_ASI_PRIMARY_CONTEXT_1_ADDR, %i0
ldxa [%i0] MMU_ASI_CONTEXT_REG, %i3
ba trap_0x68_update_tag_access
or %g0, MMU_ASI_SECONDARY_CONTEXT_1_ADDR, %i0
ldxa [%i0] MMU_ASI_CONTEXT_REG, %i3
trap_0x68_update_tag_access:
! %i3 has the new context
! we also need to mask out the context in the tte_tag (%g6)
srlx %g1, 16, %g1 ! %g1 = expected tag target with masked context
srlx %g6, 16, %g6 ! %g6 = actual tag target with masked context
or %g0, MMU_ASI_D_TAG_ACCESS_ADDR, %i0
ldxa [%i0] MMU_ASI_D_TAG_ACCESS_REG, %i0
or %i0, %i3, %i3 ! %i3 = tag access with updated context
or %g0, MMU_ASI_D_TAG_ACCESS_ADDR, %i0
stxa %i3, [%i0] MMU_ASI_D_TAG_ACCESS_REG
trap_0x68_bypass_update_tag_access:
! If page size is 256m, mask out va[27:22]
ldxa [%g0] MMU_ASI_HYP_SCRATCHPAD_REG, %g4 ! %g4 = TSB_CONFIG
bne,a %xcc, trap_0x68_not_256m_page
cmp %g6, %o1 ! Compare TAG
bne,a %xcc, trap_0x68_next_ptr
! At this point, we have got the TTE to be loaded into the TLB.
! We just need to figure out what physical offset to use.
! Register %g4 and %g7 are restricted.
setx trap_ra_mask, %g5, %g6
ldxa [%g0] MMU_ASI_HYP_SCRATCHPAD_REG, %g5 ! %g5 = TSB_CONFIG
and %g5, 0x100, %g3 ! %g3 has the ranotpa bit
brz,a %g3, trap_0x68_skip_ra
and %g5, 0x70, %g3 ! %g3 = PSIZE
be,a trap_0x68_get_rr_limits
be,a trap_0x68_get_rr_limits
be,a trap_0x68_get_rr_limits
ldda [%g6] 0x24, %g2 ! %g2 = RA_max mask, %g3 = RA_min mask
sllx %g5, 13, %g5 ! %g5 = RA
or %g5, %g2, %g2 ! %g2 = RA_max
and %g5, %g3, %g3 ! %g3 = RA_min
or %g0, MMU_ASI_REAL_RANGE_0_ADDR, %g1 ! %g1 points to REAL_RANGE_REG
bl,a %xcc, trap_0x68_get_rr
ldxa [%g1] MMU_ASI_REAL_RANGE_REG, %g5 ! %g5 = REAL_RANGE
brz,a %g6, trap_0x68_next_rr
sllx %g5, 10, %g6 ! %g6 = RR left shift by 10
sllx %g6, 13, %g6 ! %g6 = RA_hi
bl,a %xcc, trap_0x68_next_rr
srlx %g6, 24, %g6 ! %g6 = RA_lo
bg,a %xcc, trap_0x68_next_rr
ldxa [%g1] MMU_ASI_PHY_OFFSET_REG, %g1 ! %g1 = physical offset
stxa %g7, [%g0] MMU_ASI_D_DATA_IN_REG
bl,a %xcc, trap_0x68_load_tsb_config
/*************************************************************************/
! %i1[43:40] = count, %i7[11:0] = random offset
setx hptrap_itte_end, %g1, %g2 ! %g2 = pointer to count
ldx [%g2], %g3 ! %g3 = max count
umul %g3, 16, %g1 ! %g1 = length
sub %g2, %g1, %g1 ! %g1 = pointer to first entry
and %g3, 0xf, %g3 ! %g3 = count
brz,a %g3, load_idata_set_count
and %i7, 0x0fff, %i7 ! %i7 = random offset
add %g1, %i7, %i7 ! %i7 = pointer to random entry
! %g3 = number of entries to preload
! %i7 = pointer to random entry
ldda [%i7] ASI_NUCLEUS_QUAD_LDD, %g6 ! %g6 = tag access, %g7 = data
stxa %g6, [%g4] MMU_ASI_I_TAG_ACCESS_REG ! write to tag access
stxa %g7, [%g0] MMU_ASI_I_DATA_IN_REG ! write to data in
add %i7, 112, %i7 ! skip 7 entries
bcs,a %xcc, load_idata_in_no_wrap
add %g1, %i7, %i7 ! wrap %i7
brnz,a %g3, load_idata_in_next
/*************************************************************************/
! %i1[43:40] = count, %i7[11:0] = random offset
setx hptrap_dtte_end, %g1, %g2 ! %g2 = pointer to count
ldx [%g2], %g3 ! %g3 = max count
umul %g3, 16, %g1 ! %g1 = length
sub %g2, %g1, %g1 ! %g1 = pointer to first entry
and %g3, 0xf, %g3 ! %g3 = count
brz,a %g3, load_ddata_set_count
and %i7, 0x0fff, %i7 ! %i7 = random offset
add %g1, %i7, %i7 ! %i7 = pointer to random entry
! %g3 = number of entries to preload
! %i7 = pointer to random entry
ldda [%i7] ASI_NUCLEUS_QUAD_LDD, %g6 ! %g6 = tag access, %g7 = data
stxa %g6, [%g4] MMU_ASI_D_TAG_ACCESS_REG ! write to tag access
stxa %g7, [%g0] MMU_ASI_D_DATA_IN_REG ! write to data in
add %i7, 112, %i7 ! skip 7 entries
bcs,a %xcc, load_ddata_in_no_wrap
add %g1, %i7, %i7 ! wrap %i7
brnz,a %g3, load_ddata_in_next
/*************************************************************************/
ext_trap_load_idata_access:
! %i7[9:3] = index , %i7[2:0] = count, %i7[11:0] = random offset
setx hptrap_itte_end, %g1, %g2 ! %g2 = pointer to count
ldx [%g2], %g3 ! %g3 = max count
umul %g3, 16, %g1 ! %g1 = length
sub %g2, %g1, %g1 ! %g1 = pointer to first entry
and %i7, 0x7, %g3 ! %g3 = count
brz,a %g3, load_idata_access_set_count
load_idata_access_set_count:
and %i7, 0x05f8, %g5 ! %g5 = Real + index
add %g1, %i7, %i7 ! %i7 = pointer to random entry
! %g3 = number of entries to preload
! %i7 = pointer to random entry
ldda [%i7] ASI_NUCLEUS_QUAD_LDD, %g6 ! %g6 = tag access, %g7 = data
stxa %g6, [%g4] MMU_ASI_I_TAG_ACCESS_REG ! write to tag access
stxa %g7, [%g5] MMU_ASI_I_DATA_ACCESS_REG ! write to data access
add %i7, 112, %i7 ! skip 7 entries
bcs,a %xcc, load_idata_access_no_wrap
add %g1, %i7, %i7 ! wrap %i7
load_idata_access_no_wrap:
add %g5, 0x18, %g5 ! increment index by 3
brnz,a %g3, load_idata_access_next
/*************************************************************************/
ext_trap_load_ddata_access:
! %i7[9:3] = index , %i7[2:0] = count, %i7[11:0] = random offset
setx hptrap_itte_end, %g1, %g2 ! %g2 = pointer to count
ldx [%g2], %g3 ! %g3 = max count
umul %g3, 16, %g1 ! %g1 = length
sub %g2, %g1, %g1 ! %g1 = pointer to first entry
and %i7, 0x7, %g3 ! %g3 = count
brz,a %g3, load_ddata_access_set_count
load_ddata_access_set_count:
and %i7, 0x07f8, %g5 ! %g5 = Real + index
add %g1, %i7, %i7 ! %i7 = pointer to random entry
! %g3 = number of entries to preload
! %i7 = pointer to random entry
ldda [%i7] ASI_NUCLEUS_QUAD_LDD, %g6 ! %g6 = tag access, %g7 = data
stxa %g6, [%g4] MMU_ASI_D_TAG_ACCESS_REG ! write to tag access
stxa %g7, [%g5] MMU_ASI_D_DATA_ACCESS_REG ! write to data access
add %i7, 112, %i7 ! skip 7 entries
bcs,a %xcc, load_ddata_access_no_wrap
add %g1, %i7, %i7 ! wrap %i7
load_ddata_access_no_wrap:
add %g5, 0x18, %g5 ! increment index by 3
brnz,a %g3, load_ddata_access_next
!****************************************************************************************
! This handler processes the exception based on tid. For each thread, a pointer to a new
! code page is used to provide the target of the return address from the done.
wr %g0, ASI_CORE_ID, %asi
ldxa [ASI_CORE_ID_VA] %asi, %g1
and %g1, 7, %g1 ! %g1 = tid
bne,a %xcc, ext_trap_0x8_check_tid1
setx hptrap_user_code_ptrs_0_begin, %g3, %g2
ba ext_trap_0x8_get_ucptr
bne,a %xcc, ext_trap_0x8_check_tid2
setx hptrap_user_code_ptrs_1_begin, %g3, %g2
ba ext_trap_0x8_get_ucptr
bne,a %xcc, ext_trap_0x8_check_tid3
setx hptrap_user_code_ptrs_2_begin, %g3, %g2
ba ext_trap_0x8_get_ucptr
bne,a %xcc, ext_trap_0x8_check_tid4
setx hptrap_user_code_ptrs_3_begin, %g3, %g2
ba ext_trap_0x8_get_ucptr
bne,a %xcc, ext_trap_0x8_check_tid5
setx hptrap_user_code_ptrs_4_begin, %g3, %g2
ba ext_trap_0x8_get_ucptr
bne,a %xcc, ext_trap_0x8_check_tid6
setx hptrap_user_code_ptrs_5_begin, %g3, %g2
ba ext_trap_0x8_get_ucptr
bne,a %xcc, ext_trap_0x8_check_tid7
setx hptrap_user_code_ptrs_6_begin, %g3, %g2
ba ext_trap_0x8_get_ucptr
setx hptrap_user_code_ptrs_7_begin, %g3, %g2
ldx [%g2], %g3 ! %g3 = offset to new pointer set
add %g2, %g3, %g3 ! %g3 = pointer to next pointer set
ldx [%g3], %g4 ! %g4 = pointer to new code
bne,a %xcc, ext_trap_0x8_no_wrap_around
ldx [%g3], %g4 ! %g4 = pointer to new code
ext_trap_0x8_no_wrap_around:
ldx [%g3 + 8], %g5 ! %g5 = new offset
stx %g5, [%g2] ! save next offset
! We are returning to the top of the page except for the first code section
! This prevents re-initializing loop counter
/*************************************************************************/
setx ext_trap_0x9_cnt, %g1, %g2
bl,a %xcc, ext_trap_0x9_continue
setx ext_trap_0x8_begin, %g2, %g1
/*************************************************************************/
wr %g0, ASI_CORE_ID, %asi
ldxa [ASI_CORE_ID_VA] %asi, %l7
and %l7, %g1, %l7 ! %l7 has TID
setx Thr0_trap_occurence_cnt_begin, %g2, %g1
or %g0, NUM_IMPL_TRAPS, %g3
umul %l7, %g2, %g2 ! %g2 points to thread offset
add %g1, %g2, %g1 ! %g1 points to thread
add %g1, TRAP_0x1B_OFFSET, %g1 ! %g1 points to trap 0x1b occurence count
or %g0, MAX_OCCURENCE_CNT, %g4
bge %xcc, clear_occurence_cnt
/*************************************************************************/
setx ext_trap_0x31_cnt, %g1, %g2
bl,a %xcc, ext_trap_0x31_continue
/*************************************************************************/
brnz %g3, clear_occurence_cnt
setx Thr0_register_initial_values, %i6, %i7
add %i7, %i6, %i7 ! %i7 points to register values
! %i6 + 8 = return address
and %g2, 0x0f, %g3 ! %g3 = TSB size
setx tsb_base_mask, %g4, %g5
ldx [%g5+%g3], %g4 ! %g4 = tsb mask
and %g2, %g4, %i7 ! %i7 = TSB base
srlx %g4, 4, %g4 ! %g4 = page size
setx tsb_va_8k_mask, %g5, %g6
ldx [%g6 + %g3], %g5 ! %g5 = VA mask
ldx [%g6 + %g3], %g5 ! %g5 = VA mask
ldx [%g6 + %g3], %g5 ! %g5 = VA mask
ldx [%g6 + %g3], %g5 ! %g5 = VA mask
/*************************************************************************/
.global tsb_addresses_zero
.global tsb_addresses_non_zero
.xword 0x000000ffffffe000
.xword 0x000000ffffffc000
.xword 0x000000ffffff8000
.xword 0x000000ffffff0000
.xword 0x000000fffffe0000
.xword 0x000000fffffc0000
.xword 0x000000fffff80000
.xword 0x000000fffff00000
.xword 0x000000ffffe00000
.xword 0x000000ffffc00000
.xword 0x000000ffff800000
.xword 0x000000ffff000000
.xword 0x000000fffe000000
.xword 0x000000fffc000000
.xword 0x000000fff8000000
.xword 0x000000fff0000000
.xword 0x00000000003fe000
.xword 0x00000000007fe000
.xword 0x0000000000ffe000
.xword 0x0000000001ffe000
.xword 0x0000000003ffe000
.xword 0x0000000007ffe000
.xword 0x000000000fffe000
.xword 0x000000001fffe000
.xword 0x000000003fffe000
.xword 0x000000007fffe000
.xword 0x00000000ffffe000
.xword 0x00000001ffffe000
.xword 0x00000003ffffe000
.xword 0x00000007ffffe000
.xword 0x0000000fffffe000
.xword 0x0000001fffffe000
.xword 0x0000000001ff0000
.xword 0x0000000003ff0000
.xword 0x0000000007ff0000
.xword 0x000000000fff0000
.xword 0x000000001fff0000
.xword 0x000000003fff0000
.xword 0x000000007fff0000
.xword 0x00000000ffff0000
.xword 0x00000001ffff0000
.xword 0x00000003ffff0000
.xword 0x00000007ffff0000
.xword 0x0000000fffff0000
.xword 0x0000001fffff0000
.xword 0x0000003fffff0000
.xword 0x0000007fffff0000
.xword 0x000000ffffff0000
.xword 0x000000007fc00000
.xword 0x00000000ffc00000
.xword 0x00000001ffc00000
.xword 0x00000003ffc00000
.xword 0x00000007ffc00000
.xword 0x0000000fffc00000
.xword 0x0000001fffc00000
.xword 0x0000003fffc00000
.xword 0x0000007fffc00000
.xword 0x000000ffffc00000
.xword 0x000001ffffc00000
.xword 0x000003ffffc00000
.xword 0x000007ffffc00000
.xword 0x00000fffffc00000
.xword 0x00001fffffc00000
.xword 0x00003fffffc00000
.xword 0x0000001fe0000000
.xword 0x0000003fe0000000
.xword 0x0000007fe0000000
.xword 0x000000ffe0000000
.xword 0x000001ffe0000000
.xword 0x000003ffe0000000
.xword 0x000007ffe0000000
.xword 0x00000fffe0000000
.xword 0x00001fffe0000000
.xword 0x00003fffe0000000
.xword 0x00007fffe0000000
.xword 0x0000ffffe0000000
.xword 0x0001ffffe0000000
.xword 0x0003ffffe0000000
.xword 0x0007ffffe0000000
.xword 0x000fffffe0000000