Changeset 25a92bc1 in rtems


Ignore:
Timestamp:
Jul 11, 2008, 10:02:12 AM (11 years ago)
Author:
Thomas Doerfler <Thomas.Doerfler@…>
Branches:
4.10, 4.11, 4.9, master
Children:
80a0ae8
Parents:
a86f3aac
Message:

adapted powerpc exception code

Location:
c/src/lib/libcpu/powerpc
Files:
15 edited

Legend:

Unmodified
Added
Removed
  • c/src/lib/libcpu/powerpc/ChangeLog

    ra86f3aac r25a92bc1  
     12008-07-11      Sebastian Huber <sebastian.huber@embedded-brains.de>
     2
     3        * Makefile.am: Install powerpc-utility.h.
     4
     5        * shared/include/cpuIdent.h, shared/include/cpuIdent.c: Added e200 and
     6        e300 features.
     7
     8        * new-exceptions/cpu.c: Removed PR288 bugfix check.
     9
     10        * new-exceptions/e500_raw_exc_init.c: Added initialization for e200.
     11        Set IVPR register for e200 and e500 to ppc_exc_vector_base.
     12
     13        * new-exceptions/raw_exception.c, new-exceptions/raw_exception.h: Added
     14        vector categories for e200 and e300 cores.  Added global variable
     15        ppc_exc_vector_base for CPUs with IVPR register.
     16
     17        * new-exceptions/bspsupport/ppc_exc.S,
     18        new-exceptions/bspsupport/ppc_exc_asm_macros.h,
     19        new-exceptions/bspsupport/ppc_exc_bspsupp.h,
     20        new-exceptions/bspsupport/ppc_exc_hdl.c,
     21        new-exceptions/bspsupport/vectors.h,
     22        new-exceptions/bspsupport/vectors_init.c: Conistent code layout in most
     23        assember code sections and usage of defines for registers.  Usage of
     24        standard header files to avoid multiple definitions.
     25       
     26        Optimized exception code: Removed many branches and exploit branch
     27        prediction for asynchronous exceptions, moved common wrap code into
     28        WRAP macro to eliminate branch, static initialization of the handler
     29        table with a default handler to eliminate the test if a handler is
     30        present.  Register CR6 is no more cleared because the exeption handler
     31        functions are not variadic.
     32       
     33        New method to switch to the interrupt stack. It will be tested if the
     34        exception stack pointer is already inside the interrupt stack area.  It
     35        is no more necessary to disable interrupts.  The SPRG1 and SPRG2 are
     36        used to store the initial interrupt stack pointer and the interrupt
     37        stack memory area start.
     38
     39        Removed variable ppc_exc_msr_irq_mask and use general interrupt disable
     40        mask from SPRG0 instead.
     41
     42        New initialization routine ppc_exc_initialize() for bsp_start().  It
     43        takes the interrupt disable mask, interrupt stack start and size as
     44        parameters.
     45
     46        Added packed prologues for CPUs with IVPR and IVOR registers to save
     47        memory space.
     48
     49        Reverted ppc_exc_crit_always_enabled change from yesterday.
     50
     51        WARNING: Tests with critical interrupt exceptions crash the system at
     52        least on MPC8313ERDB and MPC8349EAMDS.  There may be somewhere a
     53        serious bug with the new code.
     54
    1552008-07-10      Till Straumann <strauman@slac.stanford.edu>
    256
  • c/src/lib/libcpu/powerpc/Makefile.am

    ra86f3aac r25a92bc1  
    1414include_libcpudir = $(includedir)/libcpu
    1515
    16 include_libcpu_HEADERS =
     16include_libcpu_HEADERS = shared/include/powerpc-utility.h
    1717
    1818EXTRA_DIST =
  • c/src/lib/libcpu/powerpc/new-exceptions/bspsupport/README

    ra86f3aac r25a92bc1  
    390390
    391391        And one more note: We never want to disable
    392     machine-check exceptions to avoid a checkstop.
     392        machine-check exceptions to avoid a checkstop.
    393393        This means that we cannot use enabling/disabling
    394394        this type of exception for protection of critical
     
    405405        use OS primitives and currently there are no
    406406        asynchronous machine-checks defined.
     407
     408   Epilogue:
     409
     410   You have to disable all asynchronous exceptions which may cause a context
     411   switch before the restoring of the SRRs and the RFI.  Reason:
     412   
     413      Suppose we are in the epilogue code of an EE between the move to SRRs and
     414      the RFI. Here EE is disabled but CE is enabled. Now a CE happens.  The
     415      handler decides that a thread dispatch is necessary. The CE checks if
     416      this is possible:
     417   
     418         o The thread dispatch disable level is 0, because the EE has already
     419           decremented it.
     420         o The EE lock variable is cleared.
     421         o The EE executes not the first instruction.
     422   
     423      Hence a thread dispatch is allowed. The CE issues a context switch to a
     424      task with EE enabled (for example a task waiting for a semaphore). Now a
     425      EE happens and the current content of the SRRs is lost.
  • c/src/lib/libcpu/powerpc/new-exceptions/bspsupport/ppc_exc.S

    ra86f3aac r25a92bc1  
    33 *
    44 * Modified and partially rewritten by Till Straumann, 2007
     5 *
     6 * Modified by Sebastian Huber <sebastian.huber@embedded-brains.de>, 2008.
    57 *
    68 * Low-level assembly code for PPC exceptions.
     
    2224         * and E500 machine-check, synchronous and asynchronous exceptions
    2325         */
    24         PPC_EXC_MIN_PROLOG_SYNC  _NAME=tmpl_std        _VEC=0 _PRI=std  _FLVR=std
    25         PPC_EXC_MIN_PROLOG_SYNC  _NAME=tmpl_p405_crit  _VEC=0 _PRI=crit _FLVR=p405_crit
    26         PPC_EXC_MIN_PROLOG_SYNC  _NAME=tmpl_bookE_crit _VEC=0 _PRI=crit _FLVR=bookE_crit
    27         PPC_EXC_MIN_PROLOG_SYNC  _NAME=tmpl_e500_mchk  _VEC=0 _PRI=mchk _FLVR=e500_mchk
     26        PPC_EXC_MIN_PROLOG_SYNC _NAME=tmpl_std        _VEC=0 _PRI=std  _FLVR=std
     27        PPC_EXC_MIN_PROLOG_SYNC _NAME=tmpl_p405_crit  _VEC=0 _PRI=crit _FLVR=p405_crit
     28        PPC_EXC_MIN_PROLOG_SYNC _NAME=tmpl_bookE_crit _VEC=0 _PRI=crit _FLVR=bookE_crit
     29        PPC_EXC_MIN_PROLOG_SYNC _NAME=tmpl_e500_mchk  _VEC=0 _PRI=mchk _FLVR=e500_mchk
    2830
    2931        PPC_EXC_MIN_PROLOG_ASYNC _NAME=tmpl_std        _VEC=0 _PRI=std  _FLVR=std
     
    3537ppc_exc_min_prolog_size      = 4 * 4
    3638
    37     /* Special prologue for 603e-style CPUs.
    38         *
    39         * 603e shadows GPR0..GPR3 for certain exceptions. We must switch
    40      * that off before we can use the stack pointer. Note that this is
    41      * ONLY safe if the shadowing is actually active -- otherwise, r1
    42      * is destroyed. We deliberately use r1 so problems become obvious
    43      * if this is misused!
    44      */
     39/* Special prologue for 603e-style CPUs.
     40 *
     41 * 603e shadows GPR0..GPR3 for certain exceptions. We must switch
     42 * that off before we can use the stack pointer. Note that this is
     43 * ONLY safe if the shadowing is actually active -- otherwise, r1
     44 * is destroyed. We deliberately use r1 so problems become obvious
     45 * if this is misused!
     46 */
    4547        .global ppc_exc_tgpr_clr_prolog
    4648ppc_exc_tgpr_clr_prolog:
    47     mfmsr   r1
    48     rlwinm  r1,r1,0,15,13
    49     mtmsr   r1
    50     isync
     49        mfmsr   r1
     50        rlwinm  r1,r1,0,15,13
     51        mtmsr   r1
     52        isync
    5153        /* FALL THRU TO 'auto' PROLOG */
    5254
     
    5961ppc_exc_min_prolog_auto:
    6062        stwu    r1, -EXCEPTION_FRAME_END(r1)
    61         stw             r3, GPR3_OFFSET(r1)
    62         mflr    r3
    63         bla  wrap_auto
     63        stw     VECTOR_REGISTER, VECTOR_OFFSET(r1)
     64        mflr    VECTOR_REGISTER
     65        bla     wrap_auto
     66
     67/**
     68 * @brief Use vector offsets with 16 byte boundaries.
     69 *
     70 * @see ppc_exc_min_prolog_auto();
     71 */
     72        .global ppc_exc_min_prolog_auto_packed
     73ppc_exc_min_prolog_auto_packed:
     74        stwu    r1, -EXCEPTION_FRAME_END(r1)
     75        stw     VECTOR_REGISTER, VECTOR_OFFSET(r1)
     76        mflr    VECTOR_REGISTER
     77        bla     wrap_auto_packed
    6478
    6579        .global ppc_exc_tgpr_clr_prolog_size
     
    7993        .global ppc_exc_min_prolog_auto_async
    8094ppc_exc_min_prolog_auto_async:
    81         stw             r1, ppc_exc_lock_std@sdarel(r13)
    82         stw             r3, ppc_exc_gpr3_std@sdarel(r13)
    83         mflr    r3
    84         bla  wrap_auto_async
     95        stw     r1, ppc_exc_lock_std@sdarel(r13)
     96        stw     VECTOR_REGISTER, ppc_exc_vector_register_std@sdarel(r13)
     97        mflr    VECTOR_REGISTER
     98        bla     wrap_auto_async
    8599
    86100/******************************************************/
     
    102116
    103117        /* Standard/classic powerpc */
    104         WRAP _FLVR=std _PRI=std _SRR0=srr0 _SRR1=srr1 _RFI=rfi
     118        WRAP    _FLVR=std _PRI=std _SRR0=srr0 _SRR1=srr1 _RFI=rfi
    105119
    106120        /* ppc405 has a critical exception using srr2/srr3 */
    107         WRAP _FLVR=p405_crit _PRI=crit _SRR0=srr2 _SRR1=srr3 _RFI=rfci
     121        WRAP    _FLVR=p405_crit _PRI=crit _SRR0=srr2 _SRR1=srr3 _RFI=rfci
    108122
    109123        /* bookE has critical exception using csrr0 cssr1 */
    110         WRAP _FLVR=bookE_crit _PRI=crit _SRR0=csrr0 _SRR1=csrr1 _RFI=rfci
     124        WRAP    _FLVR=bookE_crit _PRI=crit _SRR0=csrr0 _SRR1=csrr1 _RFI=rfci
    111125
    112126        /* e500 has machine-check exception using mcsrr0 mcssr1 */
    113         WRAP _FLVR=e500_mchk _PRI=mchk _SRR0=mcsrr0 _SRR1=mcsrr1 _RFI=rfmci
     127        WRAP    _FLVR=e500_mchk _PRI=mchk _SRR0=mcsrr0 _SRR1=mcsrr1 _RFI=rfmci
    114128
     129        /* LR holds vector, VECTOR_REGISTER holds orig. LR */
     130wrap_auto:
     131        stw     FRAME_REGISTER, FRAME_OFFSET(r1)
    115132
    116         /* LR holds vector, r3 holds orig. LR */
    117 wrap_auto:
    118         stw             r14, GPR14_OFFSET(r1)
    119         /* find address where we jumped from */
    120         mflr    r14
    121         /* restore LR     */
    122         mtlr    r3
    123         /* compute vector into R3 */
    124         rlwinm  r3, r14, 24, 26, 31
    125         /* we're now in almost the same state as if called by
    126          * min_prolog_std but we must skip saving r14
     133        /* Find address where we jumped from */
     134        mflr    FRAME_REGISTER
     135
     136        /* Restore LR */
     137        mtlr    VECTOR_REGISTER
     138
     139        /* Compute vector into R3 */
     140        rlwinm  VECTOR_REGISTER, FRAME_REGISTER, 24, 26, 31
     141
     142        /*
     143         * We're now in almost the same state as if called by
     144         * min_prolog_std but we must skip saving FRAME_REGISTER
    127145         * since that's done already
    128146         */
    129         b       wrap_no_save_r14_std
     147        b       wrap_no_save_frame_register_std
     148
     149        /* See: wrap_auto */
     150wrap_auto_packed:
     151        stw     FRAME_REGISTER, FRAME_OFFSET(r1)
     152        mflr    FRAME_REGISTER
     153        mtlr    VECTOR_REGISTER
     154        rlwinm  VECTOR_REGISTER, FRAME_REGISTER, 28, 26, 31
     155        b       wrap_no_save_frame_register_std
    130156
    131157wrap_auto_async:
    132158        stwu    r1, -EXCEPTION_FRAME_END(r1)
    133         stw             r14, GPR14_OFFSET(r1)
     159        stw     FRAME_REGISTER, FRAME_OFFSET(r1)
    134160        /* find address where we jumped from */
    135         mflr    r14
     161        mflr    FRAME_REGISTER
    136162        /* restore LR     */
    137         mtlr    r3
     163        mtlr    VECTOR_REGISTER
    138164        /* set upper bits to indicate that non-volatile
    139165         * registers should not be saved/restored.
    140166         */
    141         li      r3, 0xffff8000
     167        li      VECTOR_REGISTER, 0xffff8000
    142168        /* compute vector into R3 */
    143         rlwimi  r3, r14, 24, 26, 31
     169        rlwimi  VECTOR_REGISTER, FRAME_REGISTER, 24, 26, 31
    144170        /* we're now in almost the same state as if called by
    145          * min_prolog_std but we must skip saving r14
     171         * min_prolog_std but we must skip saving FRAME_REGISTER
    146172         * since that's done already
    147173         */
    148         b       wrap_no_save_r14_std
     174        b       wrap_no_save_frame_register_std
    149175
    150 /*
    151  * Common code for all flavors of exception and whether
    152  * they are synchronous or asynchronous.
    153  *
    154  * Call with
    155  *  r3 : vector
    156  *  r4 : srr0
    157  *  r5 : srr1
    158  *  r14: exception frame
    159  *  cr4: OR of lower-priority locks
    160  *  cr2: exception type (asyn/isr [<0] or synchronous [>=0])
    161  *  lr : is updated by 'bl'
    162  * all others: original state
    163  *
    164  * If this is an asynchronous exception ( cr2 < 0 ):
    165  *   - save volatile registers only,
    166  *   - disable thread dispatching,
    167  *   - switch to interrupt stack (if necessary),
    168  *   - call the C-dispatcher,
    169  *   - switch back the stack,
    170  *   - decrement the dispatch-disable level
    171  *   - check if it is safe to dispatch (disable-level must be 0
    172  *     AND no lower-priority asynchronous exception must be under
    173  *     way (as indicated by the lock variables).
    174  *   - If it would be OK to dispatch, call the C-wrapup code.
    175  *   - restore volatile registers
    176  *
    177  * Otherwise, i.e., if we are dealing with a synchronous exception
    178  * then:
    179  *   - save all registers
    180  *   - call the C-dispatcher
    181  *   - restore registers
    182  */
    183 
    184 wrap_common:
    185         stw             r4, SRR0_FRAME_OFFSET(r14)
    186         stw             r5, SRR1_FRAME_OFFSET(r14)
    187 
    188         /* prepare for calling C code; */
    189 
    190         /* use non-volatile r15 for remembering lr */
    191         stw             r15, GPR15_OFFSET(r14)
    192 
    193         /* save vector; negative if only scratch regs. are valid */
    194         stw             r3,  EXCEPTION_NUMBER_OFFSET(r14)
    195 
    196         /* save scratch registers */
    197 
    198         /* r2 should be unused or fixed anyways (eabi sdata2) */
    199         stw             r0,  GPR0_OFFSET(r14)
    200         stw             r2,  GPR2_OFFSET(r14)
    201         stw             r6,  GPR6_OFFSET(r14)
    202         stw             r7,  GPR7_OFFSET(r14)
    203         stw             r8,  GPR8_OFFSET(r14)
    204         stw             r9,  GPR9_OFFSET(r14)
    205         stw             r10, GPR10_OFFSET(r14)
    206         stw             r11, GPR11_OFFSET(r14)
    207         stw             r12, GPR12_OFFSET(r14)
    208         /* r13 must be fixed anyways (sysv sdata) */
    209 
    210         /* save LR */
    211         mflr    r15
    212 
    213         mfctr   r4
    214         mfxer   r5
    215         stw             r4,  EXC_CTR_OFFSET(r14)
    216         stw             r5,  EXC_XER_OFFSET(r14)
    217 
    218         /*
    219          * Switch MMU / RI on if necessary;
    220          * remember decision in cr3
    221          */
    222         lwz             r4,  ppc_exc_msr_bits@sdarel(r13)
    223         cmpwi   cr3, r4, 0
    224         beq             cr3, 1f
    225         mfmsr   r5
    226         or              r5, r5, r4
    227         mtmsr   r5
    228         sync
    229         isync
    230 1:
    231 
    232         /* If this is a asynchronous exception we skip ahead */
    233         blt             cr2, skip_save_nonvolatile_regs
    234 
    235         /* YES; they want everything ('normal exception') */
    236 
    237         /* save original stack pointer */
    238         lwz             r5,  EXC_MIN_GPR1(r14)
    239         stw             r5,  GPR1_OFFSET(r14)
    240 
    241         stw             r13, GPR13_OFFSET(r14)
    242 
    243         /* store r16..r31 into the exception frame */
    244         stmw    r16, GPR16_OFFSET(r14)
    245 
    246 skip_save_nonvolatile_regs:
    247         /* store address of exception frame in r4; vector is in r3 */
    248         addi  r4, r14, FRAME_LINK_SPACE
    249 
    250         /* load hi-halfword of C wrapper address */
    251         lis             r5, ppc_exc_C_wrapper@h
    252         /* clear CR[6] to make sure no vararg callee assumes that
    253          * there are any valid FP regs
    254          */
    255         crxor 6,6,6
    256         /* merge lo-halfword of C wrapper address */
    257         ori             r5, r5, ppc_exc_C_wrapper@l
    258         /* Far branch to ppc_C_wrapper */
    259         mtlr    r5
    260         blrl
    261 
    262         /* do not clobber r3 since we pass the return value
    263          * of ppc_exc_C_wrapper on to ppc_exc_wrapup
    264          */
    265 
    266         /* skip decrementing the thread-dispatch disable level
    267          * and calling ppc_exc_wrapup if this is a synchronous
    268          * exception.
    269          */
    270         bge             cr2, restore_nonvolatile_regs
    271 
    272         /* decrement ISR nest level;
    273          * disable all interrupts.
    274          * (Disabling IRQs here is not necessary if we
    275          * use the stack-switching strategy which tests
    276          * if we are alreay on the ISR-stack as opposed
    277          * to test the nesting level; see ppc_exc_asm_macros.h)
    278          */
    279         lwz             r4,  ppc_exc_msr_irq_mask@sdarel(r13)
    280         mfmsr   r5
    281         andc    r4, r5, r4
    282         mtmsr   r4
    283         lwz             r4, _ISR_Nest_level@sdarel(r13)
    284         addi    r4, r4, -1
    285         stw             r4, _ISR_Nest_level@sdarel(r13)
    286 
    287         /*
    288          * switch back to original stack (r14 == r1 if we are
    289          * still on the IRQ stack).
    290          */
    291         mr              r1, r14
    292 
    293         /* restore interrupt mask */
    294         mtmsr   r5
    295 
    296         /* decrement thread_dispatch level and check
    297          * if we have to run the dispatcher.
    298          */
    299         lwz             r5,  _Thread_Dispatch_disable_level@sdarel(r13)
    300         addic.  r5,  r5, -1
    301         stw             r5,  _Thread_Dispatch_disable_level@sdarel(r13)
    302        
    303         /* test _Thread_Dispatch_disable nesting level AND
    304          * lower priority locks (in cr4); ONLY if
    305          * _Thread_Dispatch_disable_level == 0 AND no lock is set
    306          * then call ppc_exc_wrapup which may do a context switch.
    307          */
    308         crand   EQ(cr0), EQ(cr0), EQ(cr4)
    309         bne             2f
    310         crxor   6,6,6
    311         /* Far branch to ppc_exc_wrapup */
    312         lis             r5, ppc_exc_wrapup@h
    313         addi    r4, r14, FRAME_LINK_SPACE
    314         ori             r5, r5, ppc_exc_wrapup@l
    315         mtlr    r5
    316         blrl
    317 2:
    318         lwz             r14, GPR14_OFFSET(r1)
    319 
    320         /* we can skip restoring r16..r31 */
    321         b               skip_restore_nonvolatile_regs
    322 
    323 restore_nonvolatile_regs:
    324         /* synchronous exc: restore everything from the exception frame */
    325         lwz             r14, GPR14_OFFSET(r1)
    326 
    327         /* restore stack pointer */
    328         lwz             r5,  GPR1_OFFSET(r1)
    329         stw             r5,  EXC_MIN_GPR1(r1)
    330 
    331         /* restore non-volatile regs */
    332         lwz             r13, GPR13_OFFSET(r1)
    333         lmw             r16, GPR16_OFFSET(r1)
    334 
    335 skip_restore_nonvolatile_regs:
    336         lwz             r3,  EXC_XER_OFFSET(r1)
    337         lwz             r4,  EXC_CTR_OFFSET(r1)
    338         mtxer   r3
    339         mtctr   r4
    340 
    341         /* restore lr, r15 */
    342         mtlr    r15
    343         lwz             r15, GPR15_OFFSET(r1)
    344 
    345         /* restore scratch regs */
    346         lwz             r12, GPR12_OFFSET(r1)
    347         lwz             r11, GPR11_OFFSET(r1)
    348         lwz             r10, GPR10_OFFSET(r1)
    349         lwz             r9,  GPR9_OFFSET(r1)
    350         lwz             r8,  GPR8_OFFSET(r1)
    351         lwz             r7,  GPR7_OFFSET(r1)
    352         lwz             r6,  GPR6_OFFSET(r1)
    353         /* r4, r5 are eventually restored by caller */
    354         lwz             r3,  GPR3_OFFSET(r1)
    355         lwz             r2,  GPR2_OFFSET(r1)
    356         /* r1, is eventually restored by caller */
    357         lwz             r0,  GPR0_OFFSET(r1)
    358 
    359         beq  cr3, 2f
    360         /* restore MSR settings */
    361         lwz             r5,  ppc_exc_msr_bits@sdarel(r13)
    362         mfmsr   r4
    363         andc    r4, r4, r5
    364         mtmsr   r4
    365         sync
    366         isync
    367 2:
    368 
    369         lwz             r4,  EXC_CR_OFFSET(r1)
    370         mtcr    r4
    371 
    372         /* Must disable interrupts prior to restoring SSRs.
    373          * Here's a scenario discovered by Sebastian Huber:
    374          *  1) CE happens between writing to SRR and RFI
    375          *  2) CE handler does something which requires a task switch
    376          *  3) CE wrapper returns and determines that task switch
    377      *     is OK since EE lock is not held, dispatch-disable level
    378          *     is zero etc.
    379          *  4) switch to other task enables EE
    380          *  5) eventually, switch back to task interrupted by 1)
    381          *  6) RFI happens but SRR contents have been clobbered.
    382          */
    383         lwz             r4,  ppc_exc_msr_irq_mask@sdarel(r13)
    384         mfmsr   r5
    385         andc    r4, r5, r4
    386         mtmsr   r4
    387 
    388         /* restore SRR and stack */
    389         lwz             r4,  SRR0_FRAME_OFFSET(r1)
    390         lwz             r5,  SRR1_FRAME_OFFSET(r1)
    391         blr
    392        
    393176        .global __ppc_exc_wrappers_end
    394177__ppc_exc_wrappers_end = .
  • c/src/lib/libcpu/powerpc/new-exceptions/bspsupport/ppc_exc_asm_macros.h

    ra86f3aac r25a92bc1  
    33 *
    44 * Modified and partially rewritten by Till Straumann, 2007-2008
     5 *
     6 * Modified by Sebastian Huber <sebastian.huber@embedded-brains.de>, 2008.
    57 *
    68 * Low-level assembly code for PPC exceptions (macros).
     
    1113 */
    1214
    13 #include <rtems/score/cpu.h>
    14 #include <bsp/vectors.h>
     15#include <libcpu/powerpc-utility.h>
    1516#include <libcpu/raw_exception.h>
    1617
    17 #define EXC_MIN_GPR1            0
    18 #define FRAME_LINK_SPACE        8
    19 
    20 
    21 #define r0      0
    22 #define r1      1
    23 #define r2      2
    24 #define r3      3
    25 #define r4      4
    26 #define r5      5
    27 #define r6      6
    28 #define r7      7
    29 #define r8      8
    30 #define r9      9
    31 #define r10     10
    32 #define r11     11
    33 #define r12     12
    34 #define r13     13
    35 #define r14     14
    36 #define r15     15
    37 #define r16     16
    38 #define r17     17
    39 #define r18     18
    40 #define r19     19
    41 #define r20     20
    42 #define r21     21
    43 #define r22     22
    44 #define r23     23
    45 #define r24     24
    46 #define r25     25
    47 #define r26     26
    48 #define r27     27
    49 #define r28     28
    50 #define r29     29
    51 #define r30     30
    52 #define r31     31
    53 
    54 #define cr0 0
    55 #define cr1 1
    56 #define cr4 4
    57 
    58 #define LT(cr)  ((cr)*4+0)
    59 #define GT(cr)  ((cr)*4+1)
    60 #define EQ(cr)  ((cr)*4+2)
    61 
    62 #define NOFRAME 0xffff8000
    63 
    64 /* Opcode of  'stw r1, off(r13)' */
    65 #define STW_R1_R13(off) ((((36<<10)|(r1<<5)|(r13))<<16) | ((off)&0xffff))
    66 
    67 /*
    68  **********************************************************************
    69  * MACRO: SWITCH_STACK
    70  **********************************************************************
    71  *
    72  * Increment _ISR_Nest_level and switch r1 to interrupt
    73  * stack if not already there.
    74  *
    75  * USES:    RA, RB, cr0
    76  * ON EXIT: RA, RB available, r1 points into interrupt
    77  *          stack.
    78  *
    79  * NOTES:
    80  *   - NEVER store stuff in a frame before
    81  *     reserving it (stwu r1) - otherwise
    82  *     higher-priority exception may overwrite.
    83  *   - algorithm should allow nesting of higher
    84  *     priority exceptions (HPE) (by disabling
    85  *     them while the stack is switched).
    86  */
    87 #if 1
    88         .macro  SWITCH_STACK RA RB FLVR
    89         mfspr   \RB, SPRG1
    90         cmplw   cr0, r1, \RB
    91         bgt             do_r1_reload_\FLVR
    92         lwz     \RA, ppc_exc_intr_stack_size@sdarel(r13)
    93         subf    \RB, \RA, \RB
    94         cmplw   cr0, r1, \RB
    95         bge             no_r1_reload_\FLVR
    96 do_r1_reload_\FLVR:
    97         mfspr   r1, SPRG1
    98 no_r1_reload_\FLVR:
    99         lwz             \RA, _ISR_Nest_level@sdarel(r13)
    100         addi    \RA, \RA, 1
    101         stw             \RA, _ISR_Nest_level@sdarel(r13)
    102         .endm
    103 #else
    104         .macro  SWITCH_STACK RA RB FLVR
    105         /* disable interrupts */
    106         lwz             \RA, ppc_exc_msr_irq_mask@sdarel(r13)
    107         mfmsr   \RB
    108         andc    \RA, \RB, \RA
    109         mtmsr   \RA
    110         /* increment nest level */
    111         lwz             \RA, _ISR_Nest_level@sdarel(r13)
    112         cmplwi  cr0, \RA, 0
    113         bne     no_r1_reload_\FLVR
    114         /* reload r1            */
    115         mfspr   r1, SPRG1
    116 no_r1_reload_\FLVR:
    117         addi    \RA, \RA, 1
    118         stw             \RA, _ISR_Nest_level@sdarel(r13)
    119         /* restore IRQ mask     */
    120         mtmsr   \RB
    121         .endm
    122 #endif
     18#include "vectors.h"
     19
     20#define LT(cr) ((cr)*4+0)
     21#define GT(cr) ((cr)*4+1)
     22#define EQ(cr) ((cr)*4+2)
     23
     24/* Opcode of 'stw r1, off(r13)' */
     25#define STW_R1_R13(off) ((((36<<10)|(r1<<5)|(r13))<<16) | ((off)&0xffff))
     26
     27#define FRAME_REGISTER r14
     28#define VECTOR_REGISTER r4
     29#define SCRATCH_REGISTER_0 r5
     30#define SCRATCH_REGISTER_1 r6
     31#define SCRATCH_REGISTER_2 r7
     32
     33#define FRAME_OFFSET( r) GPR14_OFFSET( r)
     34#define VECTOR_OFFSET( r) GPR4_OFFSET( r)
     35#define SCRATCH_REGISTER_0_OFFSET( r) GPR5_OFFSET( r)
     36#define SCRATCH_REGISTER_1_OFFSET( r) GPR6_OFFSET( r)
     37#define SCRATCH_REGISTER_2_OFFSET( r) GPR7_OFFSET( r)
     38
     39#define CR_TYPE 2
     40#define CR_MSR 3
     41#define CR_LOCK 4
    12342
    12443        /*
     
    14766
    14867/*
    149  **********************************************************************
     68 *****************************************************************************
    15069 * MACRO: PPC_EXC_MIN_PROLOG_ASYNC
    151  **********************************************************************
    152  * USES:    r3
    153  * ON EXIT: vector in r3
    154  *
    155  * NOTES:   r3 saved in special variable 'ppc_exc_gpr3_\_PRI'
     70 *****************************************************************************
     71 * USES:    VECTOR_REGISTER
     72 * ON EXIT: Vector in VECTOR_REGISTER
     73 *
     74 * NOTES:   VECTOR_REGISTER saved in special variable
     75 *          'ppc_exc_vector_register_\_PRI'.
    15676 *
    15777 */
    15878        .macro  PPC_EXC_MIN_PROLOG_ASYNC _NAME _VEC _PRI _FLVR
     79
    15980        .global ppc_exc_min_prolog_async_\_NAME
    16081ppc_exc_min_prolog_async_\_NAME:
    161         /* Atomically write lock variable in 1st instruction with non-zero value
    162          * (r1 is always nonzero; r13 could also be used)
     82        /* Atomically write lock variable in 1st instruction with non-zero
     83         * value (r1 is always nonzero; r13 could also be used)
    16384         *
    16485         * NOTE: raising an exception and executing this first instruction
    165          *       of the exception handler is apparently NOT atomic, i.e.,
    166          *       a low-priority IRQ could set the PC to this location and
    167          *       a critical IRQ could intervene just at this point.
     86         *       of the exception handler is apparently NOT atomic, i.e., a
     87         *       low-priority IRQ could set the PC to this location and a
     88         *       critical IRQ could intervene just at this point.
    16889         *
    16990         *       We check against this pathological case by checking the
     
    179100         *             is not used anywhere else (probably a safe assumption).
    180101         */
    181         stw             r1, ppc_exc_lock_\_PRI@sdarel(r13)
    182         /* We have no stack frame yet; store r3 in special area;
     102        stw     r1, ppc_exc_lock_\_PRI@sdarel(r13)
     103        /*      We have no stack frame yet; store VECTOR_REGISTER in special area;
    183104         * a higher-priority (critical) interrupt uses a different area
    184105         * (hence the different prologue snippets) (\PRI)
    185106         */
    186         stw             r3, ppc_exc_gpr3_\_PRI@sdarel(r13)
    187         /* Load vector.
    188          */
    189         li              r3, ( \_VEC | 0xffff8000 )
    190         /* Branch (must be within 32MB)
    191          */
    192         ba              wrap_\_FLVR
    193         .endm
    194 
    195 /*
    196  **********************************************************************
     107        stw     VECTOR_REGISTER, ppc_exc_vector_register_\_PRI@sdarel(r13)
     108        /*      Load vector.
     109         */
     110        li      VECTOR_REGISTER, ( \_VEC | 0xffff8000 )
     111        /*      Branch (must be within 32MB)
     112         */
     113        ba      wrap_\_FLVR
     114
     115        .endm
     116
     117/*
     118 *****************************************************************************
    197119 * MACRO: PPC_EXC_MIN_PROLOG_SYNC
    198  **********************************************************************
    199  * USES:    r3
    200  * ON EXIT: vector in r3
    201  *
    202  * NOTES:   exception stack frame pushed; r3 saved in frame
    203  *
    204  */
    205         .macro PPC_EXC_MIN_PROLOG_SYNC _NAME _VEC _PRI _FLVR
    206         .global ppc_exc_min_prolog_sync_\_NAME
     120 *****************************************************************************
     121 * USES:    VECTOR_REGISTER
     122 * ON EXIT: vector in VECTOR_REGISTER
     123 *
     124 * NOTES:   exception stack frame pushed; VECTOR_REGISTER saved in frame
     125 *
     126 */
     127        .macro  PPC_EXC_MIN_PROLOG_SYNC _NAME _VEC _PRI _FLVR
     128
     129        .global ppc_exc_min_prolog_sync_\_NAME
    207130ppc_exc_min_prolog_sync_\_NAME:
    208131        stwu    r1, -EXCEPTION_FRAME_END(r1)
    209         stw             r3, GPR3_OFFSET(r1)
    210         li              r3, \_VEC
    211         ba              wrap_nopush_\_FLVR
     132        stw     VECTOR_REGISTER, VECTOR_OFFSET(r1)
     133        li      VECTOR_REGISTER, \_VEC
     134        ba      wrap_nopush_\_FLVR
     135
    212136        .endm
    213137               
    214138/*
    215  **********************************************************************
     139 *****************************************************************************
    216140 * MACRO: TEST_1ST_OPCODE_crit
    217  **********************************************************************
    218  *
    219  * USES:    REG, cr4
    220  * ON EXIT: REG available (contains *pc - STW_R1_R13(0)), return value in cr4
    221  *
    222  * test opcode interrupted by critical (asynchronous) exception;
    223  * set cr4 if
    224  *
    225  *   *SRR0 == 'stw r1, ppc_exc_std_lock@sdarel(r13)'
    226  *
    227  */
    228         .macro TEST_1ST_OPCODE_crit _REG _SRR0
    229         mf\_SRR0 \_REG
    230         lwz             \_REG, 0(\_REG)
    231         /* opcode now in REG */
    232 
    233         /* subtract upper 16bits of 'stw r1, 0(r13)' instruction */
     141 *****************************************************************************
     142 *
     143 * USES:    REG, CR_LOCK
     144 * ON EXIT: REG available (contains *pc - STW_R1_R13(0)),
     145 *          return value in CR_LOCK.
     146 *
     147 * test opcode interrupted by critical (asynchronous) exception; set CR_LOCK if
     148 *
     149 *   *SRR0 == 'stw r1, ppc_exc_lock_std@sdarel(r13)'
     150 *
     151 */
     152        .macro  TEST_1ST_OPCODE_crit _REG
     153
     154        lwz     \_REG, SRR0_FRAME_OFFSET(FRAME_REGISTER)
     155        lwz     \_REG, 0(\_REG)
     156        /*      opcode now in REG */
     157
     158        /*      subtract upper 16bits of 'stw r1, 0(r13)' instruction */
    234159        subis   \_REG, \_REG, STW_R1_R13(0)@h
    235160        /*
     
    237162         * address offset then we have a match...
    238163         */
    239         cmpli   cr4, \_REG, ppc_exc_lock_std@sdarel
    240         .endm
    241 
    242 /*
    243  **********************************************************************
    244  * MACRO: TEST_1ST_OPCODE_mchk
    245  **********************************************************************
    246  * USES:    REG, cr0, cr4
    247  * ON EXIT: REG, cr0 available, return value in cr4
    248  *
    249  * test opcode interrupted by (asynchronous) machine-check exception;
    250  * set cr4 if
    251  *
    252  *   *SRR0 == 'stw r1, ppc_exc_std_lock@sdarel(r13)'
    253  *
    254  * OR
    255  *
    256  *   *SRR0 == 'stw r1, ppc_exc_crit_lock@sdarel(r13)'
    257  *
    258  */
    259         .macro TEST_1ST_OPCODE_mchk _REG _SRR0
    260         TEST_1ST_OPCODE_crit _REG=\_REG _SRR0=\_SRR0
    261         cmpli    cr0, \_REG, ppc_exc_lock_crit@sdarel
    262         /* cr4 set if 1st opcode matches writing either lock */
    263         cror     EQ(cr4), EQ(cr4), EQ(cr0)
    264         .endm
    265 
    266 /*
    267  **********************************************************************
     164        cmpli   CR_LOCK, \_REG, ppc_exc_lock_std@sdarel
     165
     166        .endm
     167
     168/*
     169 *****************************************************************************
    268170 * MACRO: TEST_LOCK_std
    269  **********************************************************************
    270  *
    271  * USES:    cr4
    272  * ON EXIT: cr4 is set (indicates no lower-priority locks are engaged)
    273  *
    274  */
    275         .macro TEST_LOCK_std _SRR0 _FLVR
    276         /* 'std' is lowest level, i.e., can not be locked -> EQ(cr4) = 1 */
    277         creqv EQ(cr4), EQ(cr4), EQ(cr4)
    278         .endm
    279 
    280 /*
    281  **********************************************************************
     171 *****************************************************************************
     172 *
     173 * USES:    CR_LOCK
     174 * ON EXIT: CR_LOCK is set (indicates no lower-priority locks are engaged)
     175 *
     176 */
     177        .macro  TEST_LOCK_std _FLVR
     178        /* 'std' is lowest level, i.e., can not be locked -> EQ(CR_LOCK) = 1 */
     179        creqv   EQ(CR_LOCK), EQ(CR_LOCK), EQ(CR_LOCK)
     180        .endm
     181
     182/*
     183 ******************************************************************************
    282184 * MACRO: TEST_LOCK_crit
    283  **********************************************************************
    284  *
    285  * USES:    cr4, cr0, r4, r5
    286  * ON EXIT: cr0, r4, r5 available, returns result in cr4
     185 ******************************************************************************
     186 *
     187 * USES:    CR_LOCK, cr0, SCRATCH_REGISTER_0, SCRATCH_REGISTER_1
     188 * ON EXIT: cr0, SCRATCH_REGISTER_0, SCRATCH_REGISTER_1 available,
     189 *          returns result in CR_LOCK.
    287190 *
    288191 * critical-exception wrapper has to check 'std' lock:
    289192 *
    290  * Return cr4 = (   ppc_std_lock == 0
    291  *               && * _SRR0 != <write std lock instruction>
    292  *               && ppc_exc_crit_always_enabled == 0 )
    293  *
    294  */
    295         .macro TEST_LOCK_crit _SRR0 _FLVR
    296         /* Are critical exceptions always enabled ? */
    297         lwz     r4, ppc_exc_crit_always_enabled@sdarel(r13)
    298         cmpwi cr4, r4, 0
    299         bne  cr4, TEST_LOCK_crit_done_\_FLVR
    300 
    301         /* STD interrupt could have been interrupted before
    302          * executing the 1st instruction which sets the lock;
    303          * check this case by looking at the opcode present
    304          * at the interrupted PC location.
    305          */
    306         TEST_1ST_OPCODE_crit _REG=r4 _SRR0=\_SRR0
    307         /*
    308          * At this point cr4 is set if
     193 * Return CR_LOCK = (   ppc_lock_std == 0
     194 *               && * SRR0 != <write std lock instruction> )
     195 *
     196 */
     197        .macro  TEST_LOCK_crit _FLVR
     198
     199        /* STD interrupt could have been interrupted before executing the 1st
     200         * instruction which sets the lock; check this case by looking at the
     201         * opcode present at the interrupted PC location.
     202         */
     203        TEST_1ST_OPCODE_crit    _REG=SCRATCH_REGISTER_0
     204        /*
     205         * At this point CR_LOCK is set if
    309206         *
    310207         *   *(PC) == 'stw r1, ppc_exc_lock_std@sdarel(r13)'
     
    313210       
    314211        /* check lock */
    315         lwz r5, ppc_exc_lock_std@sdarel(r13)
    316         cmpli   cr0, r5, 0
    317         /*
    318          *
    319          * cr4 = (   *pc != <write std lock instruction>
     212        lwz     SCRATCH_REGISTER_1, ppc_exc_lock_std@sdarel(r13)
     213        cmpli   cr0, SCRATCH_REGISTER_1, 0
     214        /*
     215         *
     216         * CR_LOCK = (   *pc != <write std lock instruction>
    320217         *        && ppc_exc_lock_std == 0 )
    321218         */
    322         crandc  EQ(cr4), EQ(cr0), EQ(cr4)
    323 TEST_LOCK_crit_done_\_FLVR:
    324         .endm
    325 
    326 #if 0
    327 /*
    328  **********************************************************************
     219        crandc  EQ(CR_LOCK), EQ(cr0), EQ(CR_LOCK)
     220
     221        .endm
     222
     223/*
     224 ******************************************************************************
    329225 * MACRO: TEST_LOCK_mchk
    330  **********************************************************************
    331  *
    332  * USES:    cr4, cr0, r4, r5
    333  * ON EXIT: cr0, r4, r5 available, returns result in cr4
    334  *
    335  * machine-check wrapper has to check 'std' and 'crit' locks, i.e.,
    336  *
    337  * Return cr4 = (   * _SRR0 != <write std  lock instruction>
    338  *               && * _SRR0 != <write crit lock instruction> )
    339  *               && ppc_std_lock  == 0
    340  *               && ppc_crit_lock == 0 )
    341  */
    342         .macro TEST_LOCK_mchk _SRR0 _FLVR
    343         TEST_1ST_OPCODE_mchk _REG=r4 _SRR0=\_SRR0
    344         /* cr4 set if 1st opcode matches writing either lock */
    345 
    346         /* proceed checking the locks */
    347         lwz r5, ppc_exc_lock_std@sdarel(r13)
    348         lwz r4, ppc_exc_lock_crit@sdarel(r13)
    349         /* set cr0 if neither lock is set */
    350         or.     r4, r4, r5
    351         /* set cr4 if
    352          *     cr0 is set   (neither lock set)
    353          * AND cr4 is clear (interrupted opcode doesn't match writing any lock)
    354          */
    355         crandc  EQ(cr4), EQ(cr0), EQ(cr4)
    356         .endm
    357 #else
    358 /*
    359  **********************************************************************
    360  * MACRO: TEST_LOCK_mchk
    361  **********************************************************************
    362  *
    363  * USES:    cr4
    364  * ON EXIT: cr4 is cleared.
    365  *
    366  * We never want to disable machine-check exceptions to avoid
    367  * a checkstop. This means that we cannot use enabling/disabling
    368  * this type of exception for protection of critical OS data structures.
    369  * Therefore, calling OS primitives from a machine-check handler
    370  * is ILLEGAL. Since machine-checks can happen anytime it is not
    371  * legal to perform a context switch (since the exception could
    372  * hit a IRQ protected section of code).
    373  * We simply let this test return 0 so that ppc_exc_wrapup is
    374  * never called after handling a machine-check.
    375  */
    376         .macro TEST_LOCK_mchk _SRR0 _FLVR
    377         crxor   EQ(cr4), EQ(cr4), EQ(cr4)
    378         .endm
    379 #endif
    380 
    381 
    382 /*
    383  **********************************************************************
     226 ******************************************************************************
     227 *
     228 * USES:    CR_LOCK
     229 * ON EXIT: CR_LOCK is cleared.
     230 *
     231 * We never want to disable machine-check exceptions to avoid a checkstop. This
     232 * means that we cannot use enabling/disabling this type of exception for
     233 * protection of critical OS data structures.  Therefore, calling OS primitives
     234 * from a machine-check handler is ILLEGAL. Since machine-checks can happen
     235 * anytime it is not legal to perform a context switch (since the exception
     236 * could hit a IRQ protected section of code).  We simply let this test return
     237 * 0 so that ppc_exc_wrapup is never called after handling a machine-check.
     238 */
     239        .macro  TEST_LOCK_mchk _SRR0 _FLVR
     240
     241        crxor   EQ(CR_LOCK), EQ(CR_LOCK), EQ(CR_LOCK)
     242
     243        .endm
     244
     245/*
     246 ******************************************************************************
     247 * MACRO: RECOVER_CHECK_\PRI
     248 ******************************************************************************
     249 *
     250 * USES:    cr0, SCRATCH_REGISTER_0, SCRATCH_REGISTER_1
     251 * ON EXIT: cr0, SCRATCH_REGISTER_0, SCRATCH_REGISTER_1 available
     252 *
     253 * Checks if the exception is recoverable for exceptions which need such a
     254 * test.
     255 */
     256
     257/* Standard*/
     258        .macro  RECOVER_CHECK_std _FLVR
     259
     260        /* Check if exception is recoverable */
     261        lwz     SCRATCH_REGISTER_0, SRR1_FRAME_OFFSET(FRAME_REGISTER)
     262        lwz     SCRATCH_REGISTER_1, ppc_exc_msr_bits@sdarel(r13)
     263        xor     SCRATCH_REGISTER_1, SCRATCH_REGISTER_1, SCRATCH_REGISTER_0
     264        andi.   SCRATCH_REGISTER_0, SCRATCH_REGISTER_1, MSR_RI
     265
     266recover_check_twiddle_std_\_FLVR:
     267
     268        /* Not recoverable? */
     269        bne     recover_check_twiddle_std_\_FLVR
     270
     271        .endm
     272
     273/* Critical */
     274        .macro  RECOVER_CHECK_crit _FLVR
     275
     276        /* Nothing to do */
     277
     278        .endm
     279
     280/* Machine check */
     281        .macro  RECOVER_CHECK_mchk _FLVR
     282
     283        /* Check if exception is recoverable */
     284        lwz     SCRATCH_REGISTER_0, SRR1_FRAME_OFFSET(FRAME_REGISTER)
     285        lwz     SCRATCH_REGISTER_1, ppc_exc_msr_bits@sdarel(r13)
     286        xor     SCRATCH_REGISTER_1, SCRATCH_REGISTER_1, SCRATCH_REGISTER_0
     287        andi.   SCRATCH_REGISTER_0, SCRATCH_REGISTER_1, MSR_RI
     288
     289recover_check_twiddle_mchk_\_FLVR:
     290
     291        /* Not recoverable? */
     292        bne     recover_check_twiddle_mchk_\_FLVR
     293
     294        .endm
     295
     296/*
     297 ******************************************************************************
    384298 * MACRO: WRAP
    385  **********************************************************************
    386  *
    387  * Minimal prologue snippets jump into WRAP
    388  * which prepares calling code common to all
    389  * flavors of exceptions.
    390  * We must have this macro instantiated for
    391  * each possible flavor of exception so that
    392  * we use the proper lock variable, SRR register pair and
    393  * RFI instruction.
    394  *
     299 ******************************************************************************
     300 *
     301 * Minimal prologue snippets jump into WRAP which calls the high level
     302 * exception handler.  We must have this macro  instantiated for each possible
     303 * flavor of exception so that we use the proper lock variable, SRR register
     304 * pair and RFI instruction.
     305 *
     306 * We have two types of exceptions: synchronous and asynchronous (= interrupt
     307 * like).  The type is encoded in the vector register (= VECTOR_REGISTER).  For
     308 * interrupt like exceptions the MSB in the vector register is set.  The
     309 * exception type is kept in the comparison register CR_TYPE.  Normal
     310 * exceptions (MSB is clear) use the task stack and a context switch may happen
     311 * at any time.  The interrupt like exceptions disable thread dispatching and
     312 * switch to the interrupt stack (base address is in SPRG1).
     313 *
     314 *                                      +
     315 *                                      |
     316 *                                      | Minimal prologue
     317 *                                      |
     318 *                                      +
     319 *                                      |
     320 *                                      | o Setup frame pointer
     321 *                                      | o Save basic registers
     322 *                                      | o Determine exception type:
     323 *                                      |   synchronous or asynchronous
     324 *                                      |
     325 *                                +-----+
     326 * Synchronous exceptions:        |     | Asynchronous exceptions:
     327 *                                |     |
     328 * Save non-volatile registers    |     | o Increment thread dispatch
     329 *                                |     |   disable level
     330 *                                |     | o Increment ISR nest level
     331 *                                |     | o Clear lock
     332 *                                |     | o Switch stack if necessary
     333 *                                |     |
     334 *                                +---->+
     335 *                                      |
     336 *                                      | o Save volatile registers
     337 *                                      | o Change MSR if necessary
     338 *                                      | o Call high level handler
     339 *                                      | o Call global handler if necessary
     340 *                                      | o Check if exception is recoverable
     341 *                                      |
     342 *                                +-----+
     343 * Synchronous exceptions:        |     | Asynchronous exceptions:
     344 *                                |     |
     345 * Restore non-volatile registers |     | o Decrement ISR nest level
     346 *                                |     | o Switch stack
     347 *                                |     | o Decrement thread dispatch
     348 *                                |     |   disable level
     349 *                                |     | o Test lock
     350 *                                |     | o May do a context switch
     351 *                                |     |
     352 *                                +---->+
     353 *                                      |
     354 *                                      | o Restore MSR if necessary
     355 *                                      | o Restore volatile registers
     356 *                                      | o Restore frame pointer
     357 *                                      | o Return
     358 *                                      |
     359 *                                      +
    395360 */
    396361        .macro  WRAP _FLVR _PRI _SRR0 _SRR1 _RFI
     362
    397363wrap_\_FLVR:
    398         stwu    r1,  -EXCEPTION_FRAME_END(r1)
     364
     365        /* Push exception frame */
     366        stwu    r1, -EXCEPTION_FRAME_END(r1)
     367
    399368wrap_nopush_\_FLVR:
    400         stw             r14, GPR14_OFFSET(r1)
    401 wrap_no_save_r14_\_FLVR:
    402 
    403         /* Save r4 r5 and CR; we want CR soon */
    404         mfcr    r14
    405         stw             r4,  GPR4_OFFSET(r1)
    406         stw             r5,  GPR5_OFFSET(r1)
    407         stw             r14, EXC_CR_OFFSET(r1)
    408 
    409         /* Check if this is an 'interrupt-type' exception
    410          * (MSB vector is set).
    411          * 'interrupt-type' exceptions disable thread dispatching
    412          * and switch to a private stack.
    413          * The type of exception is kept in (non-volatile) cr2
    414          *  < 0 -> interrupt-type
    415          *  > 0 -> 'normal' exception; always on task stack,
    416          *         may switch context at any time.
    417          */
    418         cmpwi   cr2, r3, 0
    419 
    420         /*
    421          * Save frame address in r14
    422          */
    423         mr      r14, r1
    424 
    425         bge     cr2, no_thread_dispatch_disable_\_FLVR
    426 
    427         /* first thing we need to
    428          * increment the thread-dispatch disable level
    429          * in case a higher priority exception occurs
    430          * we don't want it to run the scheduler.
    431          * (It is safe to increment this w/o disabling
    432          * higher priority interrupts since those will
    433          * see that we wrote the lock anyways).
    434          */
    435         lwz             r5,  _Thread_Dispatch_disable_level@sdarel(r13)
    436         addi    r5,  r5, 1
    437         stw             r5,  _Thread_Dispatch_disable_level@sdarel(r13)
    438 
    439         /* clear lock; no higher-priority interrupt occurring after
    440          * this point can cause a context switch.
    441          */
    442         li              r5,  0
    443         stw             r5,  ppc_exc_lock_\_PRI@sdarel(r13)
    444 
    445         /* test lower-priority locks; result in (non-volatile) cr4 */
    446         TEST_LOCK_\_PRI _SRR0=\_SRR0 _FLVR=\_FLVR
    447 
    448         /* Peform stack switch if necessary */
    449         SWITCH_STACK RA=r4 RB=r5 FLVR=\_FLVR
    450 
    451         /* save r3, in exception frame */
    452         lwz             r5,  ppc_exc_gpr3_\_PRI@sdarel(r13)
    453         stw             r5,  GPR3_OFFSET(r14)
    454 
    455 no_thread_dispatch_disable_\_FLVR:
    456 
    457         /* save lr into exception frame */
    458         mflr    r4
    459         stw     r4,  EXC_LR_OFFSET(r14)
    460 
    461         /* we now have r4,r5,lr,cr available;
    462          * r3  still holds the vector,
    463          * r14 a pointer to the exception frame (always on
    464          *     task stack)
    465          * r1  is the stack pointer, either on the task stack
    466          *     or on the IRQ stack
    467          */
    468 
    469         /* retrieve SRR0/SRR1 */
    470         mf\_SRR0        r4
    471         mf\_SRR1        r5
    472 
    473         /*
    474          * branch to common routine;
    475          *
    476          * r1, r3, r4, r5, cr, lr and r14 are saved on the
    477          * stack at this point.
    478          */
    479         bl              wrap_common
    480 
    481         /*
    482          * restore SRRs, r4, r5, r1 (stack pointer) and lr;
    483          * wrap_common restores r3, r14 and cr for us.
    484          *
    485          * NOTE: we restore r1 from the frame rather than
    486          * just popping (adding to current r1) since the
    487          * exception handler might have done strange things
    488          * (e.g., a debugger moving and relocating the stack).
    489          */
    490         mt\_SRR0        r4
    491         mt\_SRR1        r5
    492         /* restore lr */
    493         lwz             r5,  EXC_LR_OFFSET(r1)
    494         lwz             r4,  GPR4_OFFSET(r1)
    495         mtlr    r5
    496         lwz             r5,  GPR5_OFFSET(r1)
    497         lwz             r1,  EXC_MIN_GPR1(r1)
     369
     370        /* Save frame register */
     371        stw     FRAME_REGISTER, FRAME_OFFSET(r1)
     372
     373wrap_no_save_frame_register_\_FLVR:
     374
     375        /*
     376         * We save at first only some scratch registers
     377         * and the CR.  We use a non-volatile register
     378         * for the exception frame pointer (= FRAME_REGISTER).
     379         */
     380
     381        /* Move frame address in non-volatile FRAME_REGISTER */
     382        mr      FRAME_REGISTER, r1
     383
     384        /* Save scratch registers */
     385        stw     SCRATCH_REGISTER_0, SCRATCH_REGISTER_0_OFFSET(FRAME_REGISTER)
     386        stw     SCRATCH_REGISTER_1, SCRATCH_REGISTER_1_OFFSET(FRAME_REGISTER)
     387        stw     SCRATCH_REGISTER_2, SCRATCH_REGISTER_2_OFFSET(FRAME_REGISTER)
     388
     389        /* Save CR */
     390        mfcr    SCRATCH_REGISTER_0
     391        stw     SCRATCH_REGISTER_0, EXC_CR_OFFSET(FRAME_REGISTER)
     392
     393        /* Check exception type and remember it in non-volatile CR_TYPE */
     394        cmpwi   CR_TYPE, VECTOR_REGISTER, 0
     395
     396        /*
     397         * Depending on the exception type we do now save the non-volatile
     398         * registers or disable thread dispatching and switch to the ISR stack.
     399         */
     400
     401        /* Branch for synchronous exceptions */
     402        bge     CR_TYPE, wrap_save_non_volatile_regs_\_FLVR
     403
     404        /*
     405         * Increment the thread dispatch disable level in case a higher
     406         * priority exception occurs we don't want it to run the scheduler.  It
     407         * is safe to increment this without disabling higher priority
     408         * exceptions since those will see that we wrote the lock anyways.
     409         */
     410
     411        /* Increment ISR nest level and thread dispatch disable level */
     412        lwz     SCRATCH_REGISTER_0, _ISR_Nest_level@sdarel(r13)
     413        lwz     SCRATCH_REGISTER_1, _Thread_Dispatch_disable_level@sdarel(r13)
     414        addi    SCRATCH_REGISTER_0, SCRATCH_REGISTER_0, 1
     415        addi    SCRATCH_REGISTER_1, SCRATCH_REGISTER_1, 1
     416        stw     SCRATCH_REGISTER_0, _ISR_Nest_level@sdarel(r13)
     417        stw     SCRATCH_REGISTER_1, _Thread_Dispatch_disable_level@sdarel(r13)
     418
     419        /*
     420         * No higher-priority exception occurring after this point
     421         * can cause a context switch.
     422         */
     423
     424        /* Clear lock */
     425        li      SCRATCH_REGISTER_0, 0
     426        stw     SCRATCH_REGISTER_0, ppc_exc_lock_\_PRI@sdarel(r13)
     427
     428        /* Switch stack if necessary */
     429        mfspr   SCRATCH_REGISTER_0, SPRG1
     430        cmpw    SCRATCH_REGISTER_0, r1
     431        blt     wrap_stack_switch_\_FLVR
     432        mfspr   SCRATCH_REGISTER_1, SPRG2
     433        cmpw    SCRATCH_REGISTER_1, r1
     434        blt     wrap_stack_switch_done_\_FLVR
     435
     436wrap_stack_switch_\_FLVR:
     437
     438        mr      r1, SCRATCH_REGISTER_0
     439
     440wrap_stack_switch_done_\_FLVR:
     441
     442        /*
     443         * Load the pristine VECTOR_REGISTER from a special location for
     444         * asynchronous exceptions.  The synchronous exceptions save the
     445         * VECTOR_REGISTER in their minimal prologue.
     446         */
     447        lwz     SCRATCH_REGISTER_2, ppc_exc_vector_register_\_PRI@sdarel(r13)
     448
     449        /* Save pristine vector register */
     450        stw     SCRATCH_REGISTER_2, VECTOR_OFFSET(FRAME_REGISTER)
     451
     452wrap_disable_thread_dispatching_done_\_FLVR:
     453
     454        /*
     455         * We now have SCRATCH_REGISTER_0, SCRATCH_REGISTER_1,
     456         * SCRATCH_REGISTER_2 and CR available.  VECTOR_REGISTER still holds
     457         * the vector (and exception type).  FRAME_REGISTER is a pointer to the
     458         * exception frame (always on the stack of the interrupted context).
     459         * r1 is the stack pointer, either on the task stack or on the ISR
     460         * stack.  CR_TYPE holds the exception type.
     461         */
     462
     463        /* Save SRR0 */
     464        mfspr   SCRATCH_REGISTER_0, \_SRR0
     465        stw     SCRATCH_REGISTER_0, SRR0_FRAME_OFFSET(FRAME_REGISTER)
     466
     467        /* Save SRR1 */
     468        mfspr   SCRATCH_REGISTER_0, \_SRR1
     469        stw     SCRATCH_REGISTER_0, SRR1_FRAME_OFFSET(FRAME_REGISTER)
     470
     471        /* Save CTR */
     472        mfctr   SCRATCH_REGISTER_0
     473        stw     SCRATCH_REGISTER_0, EXC_CTR_OFFSET(FRAME_REGISTER)
     474
     475        /* Save XER */
     476        mfxer   SCRATCH_REGISTER_0
     477        stw     SCRATCH_REGISTER_0, EXC_XER_OFFSET(FRAME_REGISTER)
     478
     479        /* Save LR */
     480        mflr    SCRATCH_REGISTER_0
     481        stw     SCRATCH_REGISTER_0, EXC_LR_OFFSET(FRAME_REGISTER)
     482
     483        /* Save volatile registers */
     484        stw     r0, GPR0_OFFSET(FRAME_REGISTER)
     485        stw     r3, GPR3_OFFSET(FRAME_REGISTER)
     486        stw     r8, GPR8_OFFSET(FRAME_REGISTER)
     487        stw     r9, GPR9_OFFSET(FRAME_REGISTER)
     488        stw     r10, GPR10_OFFSET(FRAME_REGISTER)
     489        stw     r11, GPR11_OFFSET(FRAME_REGISTER)
     490        stw     r12, GPR12_OFFSET(FRAME_REGISTER)
     491
     492        /* Save read-only small data area anchor (EABI) */
     493        stw     r2, GPR2_OFFSET(FRAME_REGISTER)
     494
     495        /* Save vector number and exception type */
     496        stw     VECTOR_REGISTER, EXCEPTION_NUMBER_OFFSET(FRAME_REGISTER)
     497
     498        /* Load MSR bit mask */
     499        lwz     SCRATCH_REGISTER_0, ppc_exc_msr_bits@sdarel(r13)
     500
     501        /*
     502         * Change the MSR if necessary (MMU, RI),
     503         * remember decision in non-volatile CR_MSR
     504         */
     505        cmpwi   CR_MSR, SCRATCH_REGISTER_0, 0
     506        bne     CR_MSR, wrap_change_msr_\_FLVR
     507
     508wrap_change_msr_done_\_FLVR:
     509
     510        /*
     511         * Call high level exception handler
     512         */
     513
     514        /*
     515         * Get the handler table index from the vector number.  We have to
     516         * discard the exception type.  Take only the least significant five
     517         * bits (= LAST_VALID_EXC + 1) from the vector register.  Multiply by
     518         * four (= size of function pointer).
     519         */
     520        rlwinm  SCRATCH_REGISTER_1, VECTOR_REGISTER, 2, 25, 29
     521
     522        /* Load handler table address */
     523        LA      SCRATCH_REGISTER_0, ppc_exc_handler_table
     524
     525        /* Load handler address */
     526        lwzx    SCRATCH_REGISTER_0, SCRATCH_REGISTER_0, SCRATCH_REGISTER_1
     527
     528        /*
     529         * First parameter = exception frame pointer + FRAME_LINK_SPACE
     530         *
     531         * We add FRAME_LINK_SPACE to the frame pointer because the high level
     532         * handler expects a BSP_Exception_frame structure.
     533         */
     534        addi    r3, FRAME_REGISTER, FRAME_LINK_SPACE
     535
     536        /*
     537         * Second parameter = vector number (r4 is the VECTOR_REGISTER)
     538         *
     539         * Discard the exception type and store the vector number
     540         * in the vector register.  Take only the least significant
     541         * five bits (= LAST_VALID_EXC + 1).
     542         */
     543        rlwinm  VECTOR_REGISTER, VECTOR_REGISTER, 0, 27, 31
     544
     545        /* Call handler */
     546        mtctr   SCRATCH_REGISTER_0
     547        bctrl
     548
     549        /* Check return value and call global handler if necessary */
     550        cmpwi   r3, 0
     551        bne     wrap_call_global_handler_\_FLVR
     552
     553wrap_handler_done_\_FLVR:
     554
     555        /* Check if exception is recoverable */
     556        RECOVER_CHECK_\_PRI     _FLVR=\_FLVR
     557
     558        /*
     559         * Depending on the exception type we do now restore the non-volatile
     560         * registers or enable thread dispatching and switch back from the ISR
     561         * stack.
     562         */
     563
     564        /* Branch for synchronous exceptions */
     565        bge     CR_TYPE, wrap_restore_non_volatile_regs_\_FLVR
     566
     567        /*
     568         * Switch back to original stack (FRAME_REGISTER == r1 if we are still
     569         * on the IRQ stack).
     570         */
     571        mr      r1, FRAME_REGISTER
     572
     573        /*
     574         * Check thread dispatch disable level AND lower priority locks (in
     575         * CR_LOCK): ONLY if the thread dispatch disable level == 0 AND no lock
     576         * is set then call ppc_exc_wrapup() which may do a context switch.  We
     577         * can skip TEST_LOCK, because it has no side effects.
     578         */
     579
     580        /* Decrement ISR nest level and thread dispatch disable level */
     581        lwz     SCRATCH_REGISTER_0, _ISR_Nest_level@sdarel(r13)
     582        lwz     SCRATCH_REGISTER_1, _Thread_Dispatch_disable_level@sdarel(r13)
     583        subi    SCRATCH_REGISTER_0, SCRATCH_REGISTER_0, 1
     584        subic.  SCRATCH_REGISTER_1, SCRATCH_REGISTER_1, 1
     585        stw     SCRATCH_REGISTER_0, _ISR_Nest_level@sdarel(r13)
     586        stw     SCRATCH_REGISTER_1, _Thread_Dispatch_disable_level@sdarel(r13)
     587
     588        /* Branch to skip thread dispatching */
     589        bne     wrap_thread_dispatching_done_\_FLVR
     590
     591        /* Test lower-priority locks (result in non-volatile CR_LOCK) */
     592        TEST_LOCK_\_PRI _FLVR=\_FLVR
     593
     594        /* Branch to skip thread dispatching */
     595        bne     CR_LOCK, wrap_thread_dispatching_done_\_FLVR
     596
     597        /* Load address of ppc_exc_wrapup() */
     598        LA      SCRATCH_REGISTER_0, ppc_exc_wrapup
     599
     600        /* First parameter = exception frame pointer + FRAME_LINK_SPACE */
     601        addi    r3, FRAME_REGISTER, FRAME_LINK_SPACE
     602
     603        /* Call ppc_exc_wrapup() */
     604        mtctr   SCRATCH_REGISTER_0
     605        bctrl
     606
     607wrap_thread_dispatching_done_\_FLVR:
     608
     609        /* Restore MSR? */
     610        bne     CR_MSR, wrap_restore_msr_\_FLVR
     611
     612wrap_restore_msr_done_\_FLVR:
     613
     614        /*
     615         * At this point r1 is a valid exception frame pointer and
     616         * FRAME_REGISTER is no longer needed.
     617         */
     618
     619        /* Restore frame register */
     620        lwz     FRAME_REGISTER, FRAME_OFFSET(r1)
     621
     622        /* Restore XER and CTR */
     623        lwz     SCRATCH_REGISTER_0, EXC_XER_OFFSET(r1)
     624        lwz     SCRATCH_REGISTER_1, EXC_CTR_OFFSET(r1)
     625        mtxer   SCRATCH_REGISTER_0
     626        mtctr   SCRATCH_REGISTER_1
     627
     628        /* Restore CR and LR */
     629        lwz     SCRATCH_REGISTER_0, EXC_CR_OFFSET(r1)
     630        lwz     SCRATCH_REGISTER_1, EXC_LR_OFFSET(r1)
     631        mtcr    SCRATCH_REGISTER_0
     632        mtlr    SCRATCH_REGISTER_1
     633
     634        /* Restore volatile registers */
     635        lwz     r0, GPR0_OFFSET(r1)
     636        lwz     r3, GPR3_OFFSET(r1)
     637        lwz     r8, GPR8_OFFSET(r1)
     638        lwz     r9, GPR9_OFFSET(r1)
     639        lwz     r10, GPR10_OFFSET(r1)
     640        lwz     r11, GPR11_OFFSET(r1)
     641        lwz     r12, GPR12_OFFSET(r1)
     642
     643        /* Restore read-only small data area anchor (EABI) */
     644        lwz     r2, GPR2_OFFSET(r1)
     645
     646        /* Restore vector register */
     647        lwz     VECTOR_REGISTER, VECTOR_OFFSET(r1)
     648
     649        /*
     650         * Disable all asynchronous exceptions which can do a thread dispatch.
     651         * See README.
     652         */
     653        INTERRUPT_DISABLE       SCRATCH_REGISTER_1, SCRATCH_REGISTER_0
     654
     655        /* Restore scratch registers and SRRs */
     656        lwz     SCRATCH_REGISTER_0, SRR0_FRAME_OFFSET(r1)
     657        lwz     SCRATCH_REGISTER_1, SRR1_FRAME_OFFSET(r1)
     658        lwz     SCRATCH_REGISTER_2, SCRATCH_REGISTER_2_OFFSET(r1)
     659        mtspr   \_SRR0, SCRATCH_REGISTER_0
     660        lwz     SCRATCH_REGISTER_0, SCRATCH_REGISTER_0_OFFSET(r1)
     661        mtspr   \_SRR1, SCRATCH_REGISTER_1
     662        lwz     SCRATCH_REGISTER_1, SCRATCH_REGISTER_1_OFFSET(r1)
     663
     664        /*
     665         * We restore r1 from the frame rather than just popping (adding to
     666         * current r1) since the exception handler might have done strange
     667         * things (e.g. a debugger moving and relocating the stack).
     668         */
     669        lwz     r1, 0(r1)
     670
     671        /* Return */
    498672        \_RFI
    499         .endm
     673
     674wrap_change_msr_\_FLVR:
     675
     676        mfmsr   SCRATCH_REGISTER_1
     677        or      SCRATCH_REGISTER_1, SCRATCH_REGISTER_1, SCRATCH_REGISTER_0
     678        mtmsr   SCRATCH_REGISTER_1
     679        msync
     680        isync
     681        b       wrap_change_msr_done_\_FLVR
     682
     683wrap_restore_msr_\_FLVR:
     684
     685        lwz     SCRATCH_REGISTER_0, ppc_exc_msr_bits@sdarel(r13)
     686        mfmsr   SCRATCH_REGISTER_1
     687        andc    SCRATCH_REGISTER_1, SCRATCH_REGISTER_1, SCRATCH_REGISTER_0
     688        mtmsr   SCRATCH_REGISTER_1
     689        msync
     690        isync
     691        b       wrap_restore_msr_done_\_FLVR
     692
     693wrap_save_non_volatile_regs_\_FLVR:
     694
     695        /* Load pristine stack pointer */
     696        lwz     SCRATCH_REGISTER_1, 0(FRAME_REGISTER)
     697
     698        /* Save small data area anchor (SYSV) */
     699        stw     r13, GPR13_OFFSET(FRAME_REGISTER)
     700
     701        /* Save pristine stack pointer */
     702        stw     SCRATCH_REGISTER_1, GPR1_OFFSET(FRAME_REGISTER)
     703
     704        /* r14 is the FRAME_REGISTER and will be saved elsewhere */
     705
     706        /* Save non-volatile registers r15 .. r31 */
     707        stmw    r15, GPR15_OFFSET(FRAME_REGISTER)
     708
     709        b       wrap_disable_thread_dispatching_done_\_FLVR
     710
     711wrap_restore_non_volatile_regs_\_FLVR:
     712
     713        /* Load stack pointer */
     714        lwz     SCRATCH_REGISTER_0, GPR1_OFFSET(r1)
     715
     716        /* Restore small data area anchor (SYSV) */
     717        lwz     r13, GPR13_OFFSET(r1)
     718       
     719        /* r14 is the FRAME_REGISTER and will be restored elsewhere */
     720
     721        /* Restore non-volatile registers r15 .. r31 */
     722        lmw     r15, GPR15_OFFSET(r1)
     723
     724        /* Restore stack pointer */
     725        stw     SCRATCH_REGISTER_0, 0(r1)
     726
     727        b       wrap_thread_dispatching_done_\_FLVR
     728
     729wrap_call_global_handler_\_FLVR:
     730
     731        /* First parameter = exception frame pointer + FRAME_LINK_SPACE */
     732        addi    r3, FRAME_REGISTER, FRAME_LINK_SPACE
     733
     734        /* Load global handler address */
     735        LW      SCRATCH_REGISTER_0, globalExceptHdl
     736
     737        /* Check address */
     738        cmpwi   SCRATCH_REGISTER_0, 0
     739        beq     wrap_handler_done_\_FLVR
     740
     741        /* Call global handler */
     742        mtctr   SCRATCH_REGISTER_0
     743        bctrl
     744
     745        b       wrap_handler_done_\_FLVR
     746
     747        .endm
  • c/src/lib/libcpu/powerpc/new-exceptions/bspsupport/ppc_exc_bspsupp.h

    ra86f3aac r25a92bc1  
    1313#ifndef PPC_EXC_SHARED_H
    1414#define PPC_EXC_SHARED_H
     15
     16#include <stdint.h>
     17
     18#include "vectors.h"
    1519
    1620#ifdef __cplusplus
     
    3438 * Other return values are reserved.
    3539 */
    36 
    3740typedef int (*ppc_exc_handler_t)(BSP_Exception_frame *f, unsigned int vector);
    3841
     
    4649 */
    4750extern uint32_t ppc_exc_msr_bits;
    48 
    49 /*
    50  * Set of MSR bits required to disable all
    51  * asynchronous exceptions (depends on CPU type;
    52  * must be set during initialization).
    53  * Interrupt are disabled by writing the
    54  * one's complement of this mask to msr:
    55  *  msr &= ~ppc_exc_msr_irq_mask;
    56  */
    57 extern uint32_t ppc_exc_msr_irq_mask;
    58 
    59 /*
    60  * Cache size of the interrupt stack in a SDA variable
    61  */
    62 extern uint32_t ppc_exc_intr_stack_size;
    63 
    64 /*
    65  * This variable defines the semantics of asynchronous
    66  * critical exceptions ("critical interrupts")
    67  * on BookE-style CPUs.
    68  *
    69  * There are the following ways of using these interrupts
    70  *
    71  *  1) permanently disabled; no support
    72  *  2) permanently enabled; handlers for critical interrupts
    73  *     MUST NOT use any RTEMS primitives at all. They cannot
    74  *     directly e.g., release a semaphore.
    75  *  3) enabled/disabled by the OS during critical sections.
    76  *     In this scenario critical interrupts are not much
    77  *     different from 'normal' interrupts but handlers may
    78  *     safely use RTEMS primitives (i.e., the subset which
    79  *     is OK to use from ISR context).
    80  *
    81  * The BSP (or application) may initialize this
    82  * variable PRIOR to calling 'initialize_exceptions'
    83  * to any of the following values:
    84  *
    85  * NOTE: so far, OS_SUPPORT is not supported by the cpukit
    86  *       yet since the IRQ/enabling-disabling primitives
    87  *       do not mask MSR_CE yet.
    88  */
    89 #define PPC_EXC_CRIT_NO_OS_SUPPORT       1
    90 #define PPC_EXC_CRIT_OS_SUPPORT      0
    91 #define PPC_EXC_CRIT_DISABLED      (-1)
    92 
    93 extern int32_t ppc_exc_crit_always_enabled;
    9451
    9552/* (See README under CAVEATS). During initialization
     
    170127extern void ppc_exc_min_prolog_auto();
    171128
     129extern void ppc_exc_min_prolog_auto_packed();
     130
    172131
    173132/* CPU support may store the address of a function here
  • c/src/lib/libcpu/powerpc/new-exceptions/bspsupport/ppc_exc_hdl.c

    ra86f3aac r25a92bc1  
    4040uint32_t ppc_exc_lock_mchk = 0;
    4141
    42 uint32_t ppc_exc_gpr3_std     = 0;
    43 uint32_t ppc_exc_gpr3_crit    = 0;
    44 uint32_t ppc_exc_gpr3_mchk    = 0;
    45 
    46 uint32_t ppc_exc_msr_irq_mask =  MSR_EE;
     42uint32_t ppc_exc_vector_register_std     = 0;
     43uint32_t ppc_exc_vector_register_crit    = 0;
     44uint32_t ppc_exc_vector_register_mchk    = 0;
    4745
    4846/* MSR bits to enable once critical status info is saved and the stack
     
    5452uint32_t ppc_exc_msr_bits     = MSR_IR | MSR_DR | MSR_RI;
    5553
    56 uint32_t ppc_exc_intr_stack_size = 0;
    57 
    58 int32_t ppc_exc_crit_always_enabled = PPC_EXC_CRIT_NO_OS_SUPPORT;
    59 
     54int ppc_exc_handler_default( BSP_Exception_frame *f, unsigned int vector)
     55{
     56        return 1;
     57}
    6058
    6159/* Table of C-handlers */
    62 static ppc_exc_handler_t ppc_exc_handlers[LAST_VALID_EXC + 1] = {0, };
     60ppc_exc_handler_t ppc_exc_handler_table [LAST_VALID_EXC + 1] = {
     61        [0 ... LAST_VALID_EXC] = ppc_exc_handler_default
     62};
    6363
    64 ppc_exc_handler_t
    65 ppc_exc_get_handler(unsigned vector)
     64ppc_exc_handler_t ppc_exc_get_handler( unsigned vector)
    6665{
    67         if ( vector > LAST_VALID_EXC )
     66        ppc_exc_handler_t handler = NULL;
     67        if (vector > LAST_VALID_EXC) {
    6868                return 0;
    69         return ppc_exc_handlers[vector];
     69        }
     70        if (ppc_exc_handler_table [vector] != ppc_exc_handler_default) {
     71                handler = ppc_exc_handler_table [vector];
     72        }
     73        return handler;
    7074}
    7175
    72 int
    73 ppc_exc_set_handler(unsigned vector, ppc_exc_handler_t hdl)
     76int ppc_exc_set_handler( unsigned vector, ppc_exc_handler_t handler)
    7477{
    75         if ( vector > LAST_VALID_EXC )
     78        if (vector > LAST_VALID_EXC) {
    7679                return -1;
    77         ppc_exc_handlers[vector] = hdl;
     80        }
     81        if (handler == NULL) {
     82                ppc_exc_handler_table [vector] = ppc_exc_handler_default;
     83        } else {
     84                ppc_exc_handler_table [vector] = handler;
     85        }
    7886        return 0;
    7987}
    8088
    81 /* This routine executes on the interrupt stack (if vect < 0) */
    82 int
    83 ppc_exc_C_wrapper(int vect, BSP_Exception_frame *f)
     89void
     90ppc_exc_wrapup( BSP_Exception_frame *f)
    8491{
    85 unsigned int i    = vect & 0x3f;
    86 int          rval = 1;
    87 
    88         if ( i <= LAST_VALID_EXC  && ppc_exc_handlers[i] ) {
    89                 rval = ppc_exc_handlers[i](f, i);
    90         }
    91 
    92         if ( rval ) {
    93                 /* not handled, so far ... */
    94                 if ( globalExceptHdl ) {
    95                         /*
    96                          * global handler must be prepared to
    97                          * deal with asynchronous exceptions!
    98                          */
    99                         globalExceptHdl(f);
    100                 }
    101                 rval = 0;
    102         }
    103 
    104         if ( (ppc_exc_msr_bits ^ f->EXC_SRR1) & MSR_RI ) {
    105                 printk("unrecoverable exception (RI was clear), spinning to death.\n");
    106                 while (1)
    107                         ;
    108         }
    109 
    110         return rval;
    111 }
    112 
    113 void
    114 ppc_exc_wrapup(int ll_rval, BSP_Exception_frame *f)
    115 {
    116         /* Check if we need to run the global handler now */
    117         if ( ll_rval ) {
    118                 /* We get here if ppc_exc_C_wrapper() returned nonzero.
    119                  * This could be useful if we need to do something
    120                  * with thread-dispatching enabled (at this point it is)
    121                  * after handling an asynchronous exception.
    122                  */
    123         }
    12492        /* dispatch_disable level is decremented from assembly code.  */
    12593        if ( _Context_Switch_necessary ) {
  • c/src/lib/libcpu/powerpc/new-exceptions/bspsupport/vectors.h

    ra86f3aac r25a92bc1  
    1515#ifndef LIBCPU_POWERPC_BSPSUPP_VECTORS_H
    1616#define LIBCPU_POWERPC_BSPSUPP_VECTORS_H
     17
    1718#include <libcpu/raw_exception.h>
    1819
     
    6263#define EXC_XER_OFFSET 156
    6364#define EXC_LR_OFFSET 160
     65
     66/* Exception stack frame -> BSP_Exception_frame */
     67#define FRAME_LINK_SPACE 8
     68
    6469/*
    6570 * maintain the EABI requested 8 bytes aligment
     
    6772 * exception may need more registers to be processed...)
    6873 */
    69 #define    EXCEPTION_FRAME_END 176
     74#define EXCEPTION_FRAME_END 176
    7075
    7176#ifndef ASM
     77
     78#include <stdint.h>
    7279
    7380/* codemove is like memmove, but it also gets the cache line size
     
    8188extern void exception_nop_enable(const rtems_raw_except_connect_data* ptr);
    8289extern int  exception_always_enabled(const rtems_raw_except_connect_data* ptr);
    83 extern void initialize_exceptions();
     90
     91void ppc_exc_initialize(
     92  uint32_t interrupt_disable_mask,
     93  uint32_t interrupt_stack_start,
     94  uint32_t interrupt_stack_size
     95);
    8496
    8597typedef struct _BSP_Exception_frame {
  • c/src/lib/libcpu/powerpc/new-exceptions/bspsupport/vectors_init.c

    ra86f3aac r25a92bc1  
    4848#define STACK_CLAMP 50  /* in case we have a corrupted bottom */
    4949
     50SPR_RW(SPRG1)
     51SPR_RW(SPRG2)
    5052SPR_RO(LR)
    5153SPR_RO(DAR)
     
    251253}
    252254
    253 void ppc_exc_init(
     255void ppc_exc_table_init(
    254256        rtems_raw_except_connect_data    *exception_table,
    255257        int                               nEntries)
    256258{
    257 int i,v;
     259unsigned i,v;
    258260ppc_raw_exception_category cat;
    259261uintptr_t vaddr;
     
    271273        exception_config.defaultRawEntry.exceptIndex    = 0;
    272274        exception_config.defaultRawEntry.hdl.vector     = 0;
    273         /* Note that the 'auto' handler cannot be used for everything; in particular,
    274          * it assumes classic exceptions with a vector offset aligned on a 256-byte
    275          * boundary.
    276          */
    277         exception_config.defaultRawEntry.hdl.raw_hdl    = ppc_exc_min_prolog_auto;
     275
     276        if (ppc_cpu_has_ivpr_and_ivor()) {
     277                /* Use packed version with 16-byte boundaries for CPUs with IVPR and IVOR registers */
     278                exception_config.defaultRawEntry.hdl.raw_hdl = ppc_exc_min_prolog_auto_packed;
     279        } else {
     280                /* Note that the 'auto' handler cannot be used for everything; in particular,
     281                 * it assumes classic exceptions with a vector offset aligned on a 256-byte
     282                 * boundary.
     283                 */
     284                exception_config.defaultRawEntry.hdl.raw_hdl = ppc_exc_min_prolog_auto;
     285        }
    278286
    279287        /*
     
    307315                         * with a vector offset aligned on a 256-byte boundary.
    308316                         */
    309                         if ( PPC_EXC_CLASSIC == cat && 0 == ( vaddr & 0xff ) ) {
     317                        if (cat == PPC_EXC_CLASSIC && ((vaddr & 0xff) == 0 || (ppc_cpu_has_ivpr_and_ivor() && (vaddr & 0xf) == 0))) {
    310318                                exception_table[i].hdl.raw_hdl_size = exception_config.defaultRawEntry.hdl.raw_hdl_size;
    311319                                exception_table[i].hdl.raw_hdl      = exception_config.defaultRawEntry.hdl.raw_hdl;
     
    330338}
    331339
    332 void initialize_exceptions()
    333 {
    334 int i;
    335 int n = sizeof(exception_table)/sizeof(exception_table[0]);
     340
     341void ppc_exc_initialize(
     342        uint32_t interrupt_disable_mask,
     343        uint32_t interrupt_stack_start,
     344        uint32_t interrupt_stack_size
     345)
     346{
     347        int i;
     348        int n = sizeof(exception_table)/sizeof(exception_table[0]);
     349
     350        uint32_t interrupt_stack_end = 0;
     351        uint32_t interrupt_stack_pointer = 0;
     352        uint32_t *p = NULL;
     353
     354        /* Ensure proper interrupt stack alignment */
     355        interrupt_stack_start &= ~(CPU_STACK_ALIGNMENT - 1);
     356        interrupt_stack_size &= ~(CPU_STACK_ALIGNMENT - 1);
     357
     358        /* Interrupt stack end and pointer */
     359        interrupt_stack_end = interrupt_stack_start + interrupt_stack_size;
     360        interrupt_stack_pointer = interrupt_stack_end - PPC_MINIMUM_STACK_FRAME_SIZE;
     361
     362        /* Tag interrupt stack bottom */
     363        p = (uint32_t *) interrupt_stack_pointer;
     364        *p = 0;
     365
     366        /* Move interrupt stack values to special purpose registers */
     367        _write_SPRG1( interrupt_stack_pointer);
     368        _write_SPRG2( interrupt_stack_start);
     369
     370        /* Interrupt disable mask */
     371        ppc_interrupt_set_disable_mask( interrupt_disable_mask);
    336372
    337373        /* Use current MMU / RI settings when running C exception handlers */
    338374        ppc_exc_msr_bits = _read_MSR() & ( MSR_DR | MSR_IR | MSR_RI );
    339375
    340         /* Cache size of the interrupt stack in a SDA variable */
    341         ppc_exc_intr_stack_size = rtems_configuration_get_interrupt_stack_size();
    342 
    343         /* Copy into a SDA variable that is easy to access from
    344          * assembly code
    345          */
    346         if ( ppc_cpu_is_bookE() ) {
    347                 ppc_exc_msr_irq_mask = MSR_EE | MSR_CE | MSR_DE ;
    348                 switch (ppc_exc_crit_always_enabled) {
    349                         case PPC_EXC_CRIT_NO_OS_SUPPORT:
    350                                 _write_MSR(_read_MSR() | (MSR_CE | MSR_DE));
    351                         break;
    352 
    353                         case PPC_EXC_CRIT_OS_SUPPORT:
    354                                 printk("ppc_exc: PPC_EXC_CRIT_OS_SUPPORT not yet implemented\n");
    355                                 /* fall thru */
    356 
    357                         case PPC_EXC_CRIT_DISABLED:
    358                         default:
    359                                 ppc_exc_crit_always_enabled = PPC_EXC_CRIT_DISABLED;
    360                                 _write_MSR(_read_MSR() & ~(MSR_CE | MSR_DE));
    361                         break;
    362                 }
    363         } else {
    364                 ppc_exc_msr_irq_mask = MSR_EE ;
    365         }
    366 
    367376        for ( i=0; i<n; i++ )
    368377                exception_table[i].hdl.vector = i;
    369         ppc_exc_init(exception_table, n);
     378        ppc_exc_table_init(exception_table, n);
    370379
    371380        /* If we are on a classic PPC with MSR_DR enabled then
  • c/src/lib/libcpu/powerpc/new-exceptions/cpu.c

    ra86f3aac r25a92bc1  
    4848)
    4949{
    50   {
    51     unsigned hasFixed = 0;
    52     /* assert that our BSP has fixed PR288 */
    53     __asm__ __volatile__ ("mfspr %0, %2":
    54                           "=r"(hasFixed):"0"(hasFixed),"i"(SPRG0));
    55     if ( PPC_BSP_HAS_FIXED_PR288 != hasFixed ) {
    56       BSP_panic("This BSP needs to fix PR#288");
    57     }
    58   }
     50  /* Do nothing */
    5951}
    6052
  • c/src/lib/libcpu/powerpc/new-exceptions/e500_raw_exc_init.c

    ra86f3aac r25a92bc1  
    22#include <libcpu/raw_exception.h>
    33
     4#define MTIVPR(prefix) asm volatile("mtivpr %0"::"r"(prefix));
    45#define MTIVOR(x, vec) asm volatile("mtivor"#x" %0"::"r"(vec));
    56
     
    89e500_setup_raw_exceptions()
    910{
    10 unsigned c;
     11        unsigned c;
    1112        if ( ! (c = ppc_cpu_is_bookE()) || PPC_BOOKE_405 == c )
    1213                return;
    13         asm volatile("mtivpr %0"::"r"(0));
     14
     15        /* Set interupt vector prefix register */
     16        MTIVPR( ppc_exc_vector_base);
     17
    1418        /* setup vectors to be compatible with classic PPC */
    1519        MTIVOR(0,  ppc_get_vector_addr(ASM_BOOKE_CRIT_VECTOR)); /* Critical input not (yet) supported; use reset vector */
     
    3438        MTIVOR(35, ppc_get_vector_addr(ASM_60X_PERFMON_VECTOR));
    3539}
     40
     41void e200_setup_raw_exceptions()
     42{
     43        if (current_ppc_cpu != PPC_e200z6) {
     44                return;
     45        }
     46
     47        /* Interupt vector prefix register */
     48        MTIVPR( ppc_exc_vector_base);
     49
     50        /* Interupt vector offset register */
     51        MTIVOR( 0,  0); /* Critical input */
     52        MTIVOR( 1,  ppc_get_vector_addr( ASM_MACH_VECTOR));
     53        MTIVOR( 2,  ppc_get_vector_addr( ASM_PROT_VECTOR));
     54        MTIVOR( 3,  ppc_get_vector_addr( ASM_ISI_VECTOR));
     55        MTIVOR( 4,  ppc_get_vector_addr( ASM_EXT_VECTOR));
     56        MTIVOR( 5,  ppc_get_vector_addr( ASM_ALIGN_VECTOR));
     57        MTIVOR( 6,  ppc_get_vector_addr( ASM_PROG_VECTOR));
     58        MTIVOR( 7,  ppc_get_vector_addr( ASM_FLOAT_VECTOR));
     59        MTIVOR( 8,  ppc_get_vector_addr( ASM_SYS_VECTOR));
     60        MTIVOR( 9,  0); /* APU unavailable */
     61        MTIVOR( 10, ppc_get_vector_addr( ASM_BOOKE_DEC_VECTOR));
     62        MTIVOR( 11, ppc_get_vector_addr( ASM_BOOKE_FIT_VECTOR));
     63        MTIVOR( 12, ppc_get_vector_addr( ASM_BOOKE_WDOG_VECTOR));
     64        MTIVOR( 13, ppc_get_vector_addr( ASM_BOOKE_ITLBMISS_VECTOR));
     65        MTIVOR( 14, ppc_get_vector_addr( ASM_BOOKE_DTLBMISS_VECTOR));
     66        MTIVOR( 15, ppc_get_vector_addr( ASM_TRACE_VECTOR));
     67        MTIVOR( 32, ppc_get_vector_addr( ASM_E200_SPE_UNAVAILABLE_VECTOR));
     68        MTIVOR( 33, ppc_get_vector_addr( ASM_E200_SPE_DATA_VECTOR));
     69        MTIVOR( 34, ppc_get_vector_addr( ASM_E200_SPE_ROUND_VECTOR));
     70}
  • c/src/lib/libcpu/powerpc/new-exceptions/raw_exception.c

    ra86f3aac r25a92bc1  
    5757uint32_t bsp_raw_vector_is_405_critical = 0;
    5858
     59uint32_t ppc_exc_vector_base = 0;
     60
    5961void* ppc_get_vector_addr(rtems_vector vector)
    6062{
     
    9597    break;
    9698  }
    97   if ( bsp_exceptions_in_RAM )
    98     return ((void*)  vaddr);
     99  if (bsp_exceptions_in_RAM) {
     100    if (ppc_cpu_has_ivpr_and_ivor()) {
     101      return ((void*) ((vaddr >> 4) + ppc_exc_vector_base));
     102    } else {
     103      return ((void*) (vaddr + ppc_exc_vector_base));
     104    }
     105  }
    99106
    100107  return ((void*)  (vaddr + 0xfff00000));
    101108}
    102109
    103 static cat_ini_t mpc_860_vector_categories[LAST_VALID_EXC + 1] = {
     110static const cat_ini_t mpc_860_vector_categories[LAST_VALID_EXC + 1] = {
    104111  [ ASM_RESET_VECTOR           ] = PPC_EXC_CLASSIC,
    105112  [ ASM_MACH_VECTOR            ] = PPC_EXC_CLASSIC,
     
    129136
    130137
    131 static cat_ini_t mpc_5xx_vector_categories[LAST_VALID_EXC + 1] = {
     138static const cat_ini_t mpc_5xx_vector_categories[LAST_VALID_EXC + 1] = {
    132139  [ ASM_RESET_VECTOR           ] = PPC_EXC_CLASSIC,
    133140  [ ASM_MACH_VECTOR            ] = PPC_EXC_CLASSIC,
     
    154161};
    155162
    156 static cat_ini_t ppc_405_vector_categories[LAST_VALID_EXC + 1] = {
     163static const cat_ini_t ppc_405_vector_categories[LAST_VALID_EXC + 1] = {
    157164  [ ASM_EXT_VECTOR             ] = PPC_EXC_CLASSIC | PPC_EXC_ASYNC,
    158165  [ ASM_BOOKE_DEC_VECTOR       ] = PPC_EXC_CLASSIC | PPC_EXC_ASYNC,
     
    198205}
    199206
    200 static cat_ini_t mpc_750_vector_categories[LAST_VALID_EXC + 1] = {
     207static const cat_ini_t mpc_750_vector_categories[LAST_VALID_EXC + 1] = {
    201208        PPC_BASIC_VECS,
    202209  [ ASM_60X_SYSMGMT_VECTOR ] = PPC_EXC_CLASSIC | PPC_EXC_ASYNC,
     
    205212};
    206213
    207 static cat_ini_t psim_vector_categories[LAST_VALID_EXC + 1] = {
     214static const cat_ini_t psim_vector_categories[LAST_VALID_EXC + 1] = {
    208215  [ ASM_RESET_VECTOR       ] = PPC_EXC_CLASSIC,
    209216  [ ASM_MACH_VECTOR        ] = PPC_EXC_CLASSIC,
     
    226233};
    227234
    228 static cat_ini_t mpc_603_vector_categories[LAST_VALID_EXC + 1] = {
     235static const cat_ini_t mpc_603_vector_categories[LAST_VALID_EXC + 1] = {
    229236        PPC_BASIC_VECS,
    230237  [ ASM_60X_PERFMON_VECTOR ] = PPC_EXC_INVALID,
     
    237244};
    238245
    239 static cat_ini_t mpc_604_vector_categories[LAST_VALID_EXC + 1] = {
     246static const cat_ini_t mpc_604_vector_categories[LAST_VALID_EXC + 1] = {
    240247        PPC_BASIC_VECS,
    241248  [ ASM_60X_PERFMON_VECTOR ] = PPC_EXC_CLASSIC,
     
    248255};
    249256
    250 static cat_ini_t e500_vector_categories[LAST_VALID_EXC + 1] = {
     257static const cat_ini_t e200_vector_categories [LAST_VALID_EXC + 1] = {
     258        [ASM_MACH_VECTOR]                 = PPC_EXC_BOOKE_CRITICAL | PPC_EXC_ASYNC,
     259        [ASM_PROT_VECTOR]                 = PPC_EXC_CLASSIC,
     260        [ASM_ISI_VECTOR]                  = PPC_EXC_CLASSIC,
     261        [ASM_EXT_VECTOR]                  = PPC_EXC_CLASSIC | PPC_EXC_ASYNC,
     262        [ASM_ALIGN_VECTOR]                = PPC_EXC_CLASSIC,
     263        [ASM_PROG_VECTOR]                 = PPC_EXC_CLASSIC,
     264        [ASM_FLOAT_VECTOR]                = PPC_EXC_CLASSIC,
     265        [ASM_SYS_VECTOR]                  = PPC_EXC_CLASSIC,
     266        [ASM_BOOKE_DEC_VECTOR]            = PPC_EXC_CLASSIC | PPC_EXC_ASYNC,
     267        [ASM_BOOKE_FIT_VECTOR]            = PPC_EXC_CLASSIC | PPC_EXC_ASYNC,
     268        [ASM_BOOKE_WDOG_VECTOR]           = PPC_EXC_BOOKE_CRITICAL,
     269        [ASM_BOOKE_ITLBMISS_VECTOR]       = PPC_EXC_CLASSIC,
     270        [ASM_BOOKE_DTLBMISS_VECTOR]       = PPC_EXC_CLASSIC,
     271
     272        /* FIXME: Depending on HDI0[DAPUEN] this is a critical or debug exception */
     273        [ASM_TRACE_VECTOR]                = PPC_EXC_CLASSIC | PPC_EXC_BOOKE_CRITICAL,
     274
     275        [ASM_E200_SPE_UNAVAILABLE_VECTOR] = PPC_EXC_CLASSIC,
     276        [ASM_E200_SPE_DATA_VECTOR]        = PPC_EXC_CLASSIC,
     277        [ASM_E200_SPE_ROUND_VECTOR]       = PPC_EXC_CLASSIC,
     278};
     279
     280static const cat_ini_t e300_vector_categories [LAST_VALID_EXC + 1] = {
     281        PPC_BASIC_VECS,
     282        [ASM_E300_CRIT_VECTOR]    = PPC_EXC_BOOKE_CRITICAL | PPC_EXC_ASYNC,
     283        [ASM_E300_PERFMON_VECTOR] = PPC_EXC_CLASSIC,
     284        [ASM_E300_IMISS_VECTOR]   = PPC_EXC_CLASSIC,
     285        [ASM_E300_DLMISS_VECTOR]  = PPC_EXC_CLASSIC,
     286        [ASM_E300_DSMISS_VECTOR]  = PPC_EXC_CLASSIC,
     287        [ASM_E300_ADDR_VECTOR]    = PPC_EXC_CLASSIC,
     288        [ASM_E300_SYSMGMT_VECTOR] = PPC_EXC_CLASSIC | PPC_EXC_ASYNC,
     289};
     290
     291static const cat_ini_t e500_vector_categories[LAST_VALID_EXC + 1] = {
    251292  [ ASM_MACH_VECTOR                 ] = PPC_EXC_E500_MACHCHK,
    252293
     
    308349        /* case PPC_8240: -- same value as 8260 */
    309350        case PPC_8245:
     351                        rval = mpc_603_vector_categories[vector];
     352            break;
    310353        case PPC_e300c1:
    311354        case PPC_e300c2:
    312355        case PPC_e300c3:
    313                         rval = mpc_603_vector_categories[vector];
     356                        rval = e300_vector_categories[vector];
    314357            break;
    315358        case PPC_PSIM:
    316359                        rval = psim_vector_categories[vector];
    317360            break;
    318                 case PPC_8540:
     361        case PPC_8540:
    319362                        rval = e500_vector_categories[vector];
    320                         break;
     363            break;
     364        case PPC_e200z6:
     365                        rval = e200_vector_categories[vector];
     366            break;
    321367        case PPC_5XX:
    322368                        rval = mpc_5xx_vector_categories[vector];
     
    455501    rtems_interrupt_disable(k);
    456502
    457         if ( (c = ppc_cpu_is_bookE()) && PPC_BOOKE_405 != c ) {
     503        /* FIXME special case selection method */
     504        if (current_ppc_cpu == PPC_e200z6) {
     505                e200_setup_raw_exceptions();
     506        } else if ( (c = ppc_cpu_is_bookE()) && PPC_BOOKE_405 != c ) {
    458507                e500_setup_raw_exceptions();
    459508        }
  • c/src/lib/libcpu/powerpc/new-exceptions/raw_exception.h

    ra86f3aac r25a92bc1  
    4444#define ASM_FLOAT_VECTOR                             0x08
    4545#define ASM_DEC_VECTOR                               0x09
    46 #define ASM_60X_VEC_VECTOR                               0x0A
    4746#define ASM_SYS_VECTOR                               0x0C
    4847#define ASM_TRACE_VECTOR                             0x0D
     
    8180
    8281
     82#define ASM_60X_VEC_VECTOR                   0x0A
    8383#define ASM_60X_PERFMON_VECTOR               0x0F
    8484#define ASM_60X_IMISS_VECTOR                 0x10
     
    9090#define ASM_60X_ITM_VECTOR                   0x17
    9191
     92/* e200 */
     93#define ASM_E200_SPE_UNAVAILABLE_VECTOR      0x15
     94#define ASM_E200_SPE_DATA_VECTOR             0x16
     95#define ASM_E200_SPE_ROUND_VECTOR            0x17
     96
     97/* e300 */
     98#define ASM_E300_CRIT_VECTOR                 0x0A
     99#define ASM_E300_PERFMON_VECTOR              0x0F
     100#define ASM_E300_IMISS_VECTOR                ASM_60X_IMISS_VECTOR  /* Special case: Shadowed GPRs */
     101#define ASM_E300_DLMISS_VECTOR               ASM_60X_DLMISS_VECTOR /* Special case: Shadowed GPRs */
     102#define ASM_E300_DSMISS_VECTOR               ASM_60X_DSMISS_VECTOR /* Special case: Shadowed GPRs */
     103#define ASM_E300_ADDR_VECTOR                 0x13
     104#define ASM_E300_SYSMGMT_VECTOR              0x14
     105
     106/*
     107 * If you change that number make sure to adjust the wrapper code in ppc_exc.S
     108 * and that ppc_exc_handler_table will be correctly initialized.
     109 */
    92110#define LAST_VALID_EXC                       0x1F
    93111
     
    228246extern boolean bsp_exceptions_in_RAM;
    229247
     248/**
     249 * @brief Vector base address for CPUs (for example e200 and e500) with IVPR
     250 * and IVOR registers.
     251 */
     252extern uint32_t ppc_exc_vector_base;
     253
    230254# endif /* ASM */
    231255
  • c/src/lib/libcpu/powerpc/shared/include/cpuIdent.c

    ra86f3aac r25a92bc1  
    4848    case PPC_8260:              return "MPC8260";
    4949    case PPC_8245:              return "MPC8245";
    50         case PPC_8540:          return "MPC8540";
    51         case PPC_PSIM:          return "PSIM";
     50    case PPC_8540:              return "MPC8540";
     51    case PPC_PSIM:              return "PSIM";
     52    case PPC_e200z6:            return "e200z6";
    5253    default:
    5354      printk("Unknown CPU value of 0x%x. Please add it to "
     
    8586    case PPC_8260:
    8687    case PPC_8245:
    87         case PPC_PSIM:
    88         case PPC_8540:
     88    case PPC_PSIM:
     89    case PPC_8540:
     90    case PPC_e200z6:
     91    case PPC_e300c1:
     92    case PPC_e300c2:
     93    case PPC_e300c3:
    8994      break;
    9095    default:
     
    127132        break;
    128133        case PPC_8540:
     134        case PPC_e200z6:
    129135                current_ppc_features.is_bookE                   = PPC_BOOKE_E500;
    130136        default:
     
    151157        break;
    152158  }
    153                
     159
     160        switch (current_ppc_cpu) {
     161                case PPC_e200z6:
     162                        current_ppc_features.has_ivpr_and_ivor = 1;
     163                        break;
     164                default:
     165                        break;
     166        }
     167
    154168  return current_ppc_cpu;
    155169}
  • c/src/lib/libcpu/powerpc/shared/include/cpuIdent.h

    ra86f3aac r25a92bc1  
    1616#ifndef _LIBCPU_CPUIDENT_H
    1717#define _LIBCPU_CPUIDENT_H
     18
     19#include <stdbool.h>
    1820
    1921#ifndef ASM
     
    4547  PPC_e300c2  = 0x8084, /* e300c2  core */
    4648  PPC_e300c3  = 0x8085, /* e300c3  core */
     49  PPC_e200z6 = 0x8115,
    4750  PPC_PSIM = 0xfffe,  /* GDB PowerPC simulator -- fake version */
    4851  PPC_UNKNOWN = 0xffff
     
    6871        unsigned has_epic           : 1;
    6972        unsigned has_shadowed_gprs  : 1;
     73        unsigned has_ivpr_and_ivor  : 1;
    7074} ppc_feature_t;
    7175
     
    8286/* PUBLIC ACCESS ROUTINES */
    8387#define _PPC_FEAT_DECL(x) \
    84 static inline ppc_cpu_##x() { \
     88static inline unsigned ppc_cpu_##x() { \
    8589  if ( PPC_UNKNOWN == current_ppc_cpu ) \
    8690    get_ppc_cpu_type(); \
     
    96100_PPC_FEAT_DECL(has_epic)
    97101_PPC_FEAT_DECL(has_shadowed_gprs)
     102_PPC_FEAT_DECL(has_ivpr_and_ivor)
     103
     104static inline bool ppc_cpu_is_e300()
     105{
     106        if (current_ppc_cpu == PPC_UNKNOWN) {
     107                get_ppc_cpu_type();
     108        }
     109        return current_ppc_cpu == PPC_e300c1
     110                || current_ppc_cpu == PPC_e300c2
     111                || current_ppc_cpu == PPC_e300c3;
     112}
    98113
    99114#undef _PPC_FEAT_DECL
Note: See TracChangeset for help on using the changeset viewer.