source: rtems/c/src/lib/libcpu/powerpc/new-exceptions/bspsupport/ppc_exc_asm_macros.h @ 5166513c

4.104.114.95
Last change on this file since 5166513c was 5166513c, checked in by Till Straumann <strauman@…>, on 07/16/08 at 21:44:14

2008-07-16 Till Straumann <strauman@…>

  • new-exceptions/bspsupport/ppc_exc_asm_macros.h: Added a test to TEST_LOCK_crit so that a context switch is always prevented if MSR_CE is not set in the interrupt mask. (Support mode where the user wants to leave MSR_CE always enabled but abstains from calling OS primitives from the exception handler.)
  • Property mode set to 100644
File size: 23.6 KB
Line 
1/*
2 * (c) 1999, Eric Valette valette@crf.canon.fr
3 *
4 * Modified and partially rewritten by Till Straumann, 2007-2008
5 *
6 * Modified by Sebastian Huber <sebastian.huber@embedded-brains.de>, 2008.
7 *
8 * Low-level assembly code for PPC exceptions (macros).
9 *
10 * This file was written with the goal to eliminate
11 * ALL #ifdef <cpu_flavor> conditionals -- please do not
12 * reintroduce such statements.
13 */
14
15#include <libcpu/powerpc-utility.h>
16#include <libcpu/raw_exception.h>
17
18#include "vectors.h"
19
20#define LT(cr) ((cr)*4+0)
21#define GT(cr) ((cr)*4+1)
22#define EQ(cr) ((cr)*4+2)
23
24/* Opcode of 'stw r1, off(r13)' */
25#define STW_R1_R13(off) ((((36<<10)|(r1<<5)|(r13))<<16) | ((off)&0xffff))
26
27#define FRAME_REGISTER r14
28#define VECTOR_REGISTER r4
29#define SCRATCH_REGISTER_0 r5
30#define SCRATCH_REGISTER_1 r6
31#define SCRATCH_REGISTER_2 r7
32
33#define FRAME_OFFSET( r) GPR14_OFFSET( r)
34#define VECTOR_OFFSET( r) GPR4_OFFSET( r)
35#define SCRATCH_REGISTER_0_OFFSET( r) GPR5_OFFSET( r)
36#define SCRATCH_REGISTER_1_OFFSET( r) GPR6_OFFSET( r)
37#define SCRATCH_REGISTER_2_OFFSET( r) GPR7_OFFSET( r)
38
39#define CR_TYPE 2
40#define CR_MSR 3
41#define CR_LOCK 4
42
43        /*
44         * Minimal prologue snippets:
45         *
46         * Rationale: on some PPCs the vector offsets are spaced
47         * as closely as 16 bytes.
48         *
49         * If we deal with asynchronous exceptions ('interrupts')
50         * then we can use 4 instructions to
51         *   1. atomically write lock to indicate ISR is in progress
52         *      (we cannot atomically increase the Thread_Dispatch_disable_level,
53         *      see README)
54         *   2. save a register in special area
55         *   3. load register with vector info
56         *   4. branch
57         *
58         * If we deal with a synchronous exception (no stack switch
59         * nor dispatch-disabling necessary) then it's easier:
60         *   1. push stack frame
61         *   2. save register on stack
62         *   3. load register with vector info
63         *   4. branch
64         *
65         */
66
67/*
68 *****************************************************************************
69 * MACRO: PPC_EXC_MIN_PROLOG_ASYNC
70 *****************************************************************************
71 * USES:    VECTOR_REGISTER
72 * ON EXIT: Vector in VECTOR_REGISTER
73 *
74 * NOTES:   VECTOR_REGISTER saved in special variable
75 *          'ppc_exc_vector_register_\_PRI'.
76 *
77 */
78        .macro  PPC_EXC_MIN_PROLOG_ASYNC _NAME _VEC _PRI _FLVR
79
80        .global ppc_exc_min_prolog_async_\_NAME
81ppc_exc_min_prolog_async_\_NAME:
82        /* Atomically write lock variable in 1st instruction with non-zero
83         * value (r1 is always nonzero; r13 could also be used)
84         *
85         * NOTE: raising an exception and executing this first instruction
86         *       of the exception handler is apparently NOT atomic, i.e., a
87         *       low-priority IRQ could set the PC to this location and a
88         *       critical IRQ could intervene just at this point.
89         *
90         *       We check against this pathological case by checking the
91         *       opcode/instruction at the interrupted PC for matching
92         *
93         *         stw r1, ppc_exc_lock_XXX@sdarel(r13)
94         *
95         *       ASSUMPTION:
96         *          1) ALL 'asynchronous' exceptions (which disable thread-
97         *             dispatching) execute THIS 'magical' instruction
98         *             FIRST.
99         *          2) This instruction (including the address offset)
100         *             is not used anywhere else (probably a safe assumption).
101         */
102        stw     r1, ppc_exc_lock_\_PRI@sdarel(r13)
103        /*      We have no stack frame yet; store VECTOR_REGISTER in special area;
104         * a higher-priority (critical) interrupt uses a different area
105         * (hence the different prologue snippets) (\PRI)
106         */
107        stw     VECTOR_REGISTER, ppc_exc_vector_register_\_PRI@sdarel(r13)
108        /*      Load vector.
109         */
110        li      VECTOR_REGISTER, ( \_VEC | 0xffff8000 )
111        /*      Branch (must be within 32MB)
112         */
113        ba      wrap_\_FLVR
114
115        .endm
116
117/*
118 *****************************************************************************
119 * MACRO: PPC_EXC_MIN_PROLOG_SYNC
120 *****************************************************************************
121 * USES:    VECTOR_REGISTER
122 * ON EXIT: vector in VECTOR_REGISTER
123 *
124 * NOTES:   exception stack frame pushed; VECTOR_REGISTER saved in frame
125 *
126 */
127        .macro  PPC_EXC_MIN_PROLOG_SYNC _NAME _VEC _PRI _FLVR
128
129        .global ppc_exc_min_prolog_sync_\_NAME
130ppc_exc_min_prolog_sync_\_NAME:
131        stwu    r1, -EXCEPTION_FRAME_END(r1)
132        stw     VECTOR_REGISTER, VECTOR_OFFSET(r1)
133        li      VECTOR_REGISTER, \_VEC
134        ba      wrap_nopush_\_FLVR
135
136        .endm
137               
138/*
139 *****************************************************************************
140 * MACRO: TEST_1ST_OPCODE_crit
141 *****************************************************************************
142 *
143 * USES:    REG, cr0
144 * ON EXIT: REG available (contains *pc - STW_R1_R13(0)),
145 *          return value in cr0.
146 *
147 * test opcode interrupted by critical (asynchronous) exception; set CR_LOCK if
148 *
149 *   *SRR0 == 'stw r1, ppc_exc_lock_std@sdarel(r13)'
150 *
151 */
152        .macro  TEST_1ST_OPCODE_crit _REG
153
154        lwz     \_REG, SRR0_FRAME_OFFSET(FRAME_REGISTER)
155        lwz     \_REG, 0(\_REG)
156        /*      opcode now in REG */
157
158        /*      subtract upper 16bits of 'stw r1, 0(r13)' instruction */
159        subis   \_REG, \_REG, STW_R1_R13(0)@h
160        /*
161         * if what's left compares against the 'ppc_exc_lock_std@sdarel'
162         * address offset then we have a match...
163         */
164        cmplwi  cr0, \_REG, ppc_exc_lock_std@sdarel
165
166        .endm
167
168/*
169 *****************************************************************************
170 * MACRO: TEST_LOCK_std
171 *****************************************************************************
172 *
173 * USES:    CR_LOCK
174 * ON EXIT: CR_LOCK is set (indicates no lower-priority locks are engaged)
175 *
176 */
177        .macro  TEST_LOCK_std _FLVR
178        /* 'std' is lowest level, i.e., can not be locked -> EQ(CR_LOCK) = 1 */
179        creqv   EQ(CR_LOCK), EQ(CR_LOCK), EQ(CR_LOCK)
180        .endm
181
182/*
183 ******************************************************************************
184 * MACRO: TEST_LOCK_crit
185 ******************************************************************************
186 *
187 * USES:    CR_LOCK, cr0, SCRATCH_REGISTER_0, SCRATCH_REGISTER_1
188 * ON EXIT: cr0, SCRATCH_REGISTER_0, SCRATCH_REGISTER_1 available,
189 *          returns result in CR_LOCK.
190 *
191 * critical-exception wrapper has to check 'std' lock:
192 *
193 * Return CR_LOCK = (   (interrupt_mask & MSR_CE) != 0
194                 &&                  ppc_lock_std == 0
195 *               && * SRR0 != <write std lock instruction> )
196 *
197 */
198        .macro  TEST_LOCK_crit _FLVR
199        /* If MSR_CE is not in the IRQ mask then we must never allow
200         * thread-dispatching!
201         */
202        GET_INTERRUPT_MASK mask=SCRATCH_REGISTER_1
203        /* EQ(cr0) = ((interrupt_mask & MSR_CE) == 0) */
204        andis.  SCRATCH_REGISTER_1, SCRATCH_REGISTER_1, MSR_CE@h
205        beq     TEST_LOCK_crit_done_\_FLVR             
206
207        /* STD interrupt could have been interrupted before executing the 1st
208         * instruction which sets the lock; check this case by looking at the
209         * opcode present at the interrupted PC location.
210         */
211        TEST_1ST_OPCODE_crit    _REG=SCRATCH_REGISTER_0
212        /*
213         * At this point cr0 is set if
214         *
215         *   *(PC) == 'stw r1, ppc_exc_lock_std@sdarel(r13)'
216         *
217         */
218       
219        /* check lock */
220        lwz     SCRATCH_REGISTER_1, ppc_exc_lock_std@sdarel(r13)
221        cmplwi  CR_LOCK, SCRATCH_REGISTER_1, 0
222
223        /* set EQ(CR_LOCK) to result */
224TEST_LOCK_crit_done_\_FLVR:
225        /* If we end up here because the interrupt mask did not contain
226     * MSR_CE then cr0 is set and therefore the value of CR_LOCK
227         * does not matter since   x && !1 == 0:
228         *
229         *  if ( (interrupt_mask & MSR_CE) == 0 ) {
230         *      EQ(CR_LOCK) = EQ(CR_LOCK) && ! ((interrupt_mask & MSR_CE) == 0)
231         *  } else {
232         *      EQ(CR_LOCK) = (ppc_exc_lock_std == 0) && ! (*pc == <write std lock instruction>)
233         *  }
234         */
235        crandc  EQ(CR_LOCK), EQ(CR_LOCK), EQ(cr0)
236
237        .endm
238
239/*
240 ******************************************************************************
241 * MACRO: TEST_LOCK_mchk
242 ******************************************************************************
243 *
244 * USES:    CR_LOCK
245 * ON EXIT: CR_LOCK is cleared.
246 *
247 * We never want to disable machine-check exceptions to avoid a checkstop. This
248 * means that we cannot use enabling/disabling this type of exception for
249 * protection of critical OS data structures.  Therefore, calling OS primitives
250 * from a machine-check handler is ILLEGAL. Since machine-checks can happen
251 * anytime it is not legal to perform a context switch (since the exception
252 * could hit a IRQ protected section of code).  We simply let this test return
253 * 0 so that ppc_exc_wrapup is never called after handling a machine-check.
254 */
255        .macro  TEST_LOCK_mchk _SRR0 _FLVR
256
257        crxor   EQ(CR_LOCK), EQ(CR_LOCK), EQ(CR_LOCK)
258
259        .endm
260
261/*
262 ******************************************************************************
263 * MACRO: RECOVER_CHECK_\PRI
264 ******************************************************************************
265 *
266 * USES:    cr0, SCRATCH_REGISTER_0, SCRATCH_REGISTER_1
267 * ON EXIT: cr0, SCRATCH_REGISTER_0, SCRATCH_REGISTER_1 available
268 *
269 * Checks if the exception is recoverable for exceptions which need such a
270 * test.
271 */
272
273/* Standard*/
274        .macro  RECOVER_CHECK_std _FLVR
275
276        /* Check if exception is recoverable */
277        lwz     SCRATCH_REGISTER_0, SRR1_FRAME_OFFSET(FRAME_REGISTER)
278        lwz     SCRATCH_REGISTER_1, ppc_exc_msr_bits@sdarel(r13)
279        xor     SCRATCH_REGISTER_1, SCRATCH_REGISTER_1, SCRATCH_REGISTER_0
280        andi.   SCRATCH_REGISTER_0, SCRATCH_REGISTER_1, MSR_RI
281
282recover_check_twiddle_std_\_FLVR:
283
284        /* Not recoverable? */
285        bne     recover_check_twiddle_std_\_FLVR
286
287        .endm
288
289/* Critical */
290        .macro  RECOVER_CHECK_crit _FLVR
291
292        /* Nothing to do */
293
294        .endm
295
296/* Machine check */
297        .macro  RECOVER_CHECK_mchk _FLVR
298
299        /* Check if exception is recoverable */
300        lwz     SCRATCH_REGISTER_0, SRR1_FRAME_OFFSET(FRAME_REGISTER)
301        lwz     SCRATCH_REGISTER_1, ppc_exc_msr_bits@sdarel(r13)
302        xor     SCRATCH_REGISTER_1, SCRATCH_REGISTER_1, SCRATCH_REGISTER_0
303        andi.   SCRATCH_REGISTER_0, SCRATCH_REGISTER_1, MSR_RI
304
305recover_check_twiddle_mchk_\_FLVR:
306
307        /* Not recoverable? */
308        bne     recover_check_twiddle_mchk_\_FLVR
309
310        .endm
311
312/*
313 ******************************************************************************
314 * MACRO: WRAP
315 ******************************************************************************
316 *
317 * Minimal prologue snippets jump into WRAP which calls the high level
318 * exception handler.  We must have this macro  instantiated for each possible
319 * flavor of exception so that we use the proper lock variable, SRR register
320 * pair and RFI instruction.
321 *
322 * We have two types of exceptions: synchronous and asynchronous (= interrupt
323 * like).  The type is encoded in the vector register (= VECTOR_REGISTER).  For
324 * interrupt like exceptions the MSB in the vector register is set.  The
325 * exception type is kept in the comparison register CR_TYPE.  Normal
326 * exceptions (MSB is clear) use the task stack and a context switch may happen
327 * at any time.  The interrupt like exceptions disable thread dispatching and
328 * switch to the interrupt stack (base address is in SPRG1).
329 *
330 *                                      +
331 *                                      |
332 *                                      | Minimal prologue
333 *                                      |
334 *                                      +
335 *                                      |
336 *                                      | o Setup frame pointer
337 *                                      | o Save basic registers
338 *                                      | o Determine exception type:
339 *                                      |   synchronous or asynchronous
340 *                                      |
341 *                                +-----+
342 * Synchronous exceptions:        |     | Asynchronous exceptions:
343 *                                |     |
344 * Save non-volatile registers    |     | o Increment thread dispatch
345 *                                |     |   disable level
346 *                                |     | o Increment ISR nest level
347 *                                |     | o Clear lock
348 *                                |     | o Switch stack if necessary
349 *                                |     |
350 *                                +---->+
351 *                                      |
352 *                                      | o Save volatile registers
353 *                                      | o Change MSR if necessary
354 *                                      | o Call high level handler
355 *                                      | o Call global handler if necessary
356 *                                      | o Check if exception is recoverable
357 *                                      |
358 *                                +-----+
359 * Synchronous exceptions:        |     | Asynchronous exceptions:
360 *                                |     |
361 * Restore non-volatile registers |     | o Decrement ISR nest level
362 *                                |     | o Switch stack
363 *                                |     | o Decrement thread dispatch
364 *                                |     |   disable level
365 *                                |     | o Test lock
366 *                                |     | o May do a context switch
367 *                                |     |
368 *                                +---->+
369 *                                      |
370 *                                      | o Restore MSR if necessary
371 *                                      | o Restore volatile registers
372 *                                      | o Restore frame pointer
373 *                                      | o Return
374 *                                      |
375 *                                      +
376 */
377        .macro  WRAP _FLVR _PRI _SRR0 _SRR1 _RFI
378
379wrap_\_FLVR:
380
381        /* Push exception frame */
382        stwu    r1, -EXCEPTION_FRAME_END(r1)
383
384wrap_nopush_\_FLVR:
385
386        /* Save frame register */
387        stw     FRAME_REGISTER, FRAME_OFFSET(r1)
388
389wrap_no_save_frame_register_\_FLVR:
390
391        /*
392         * We save at first only some scratch registers
393         * and the CR.  We use a non-volatile register
394         * for the exception frame pointer (= FRAME_REGISTER).
395         */
396
397        /* Move frame address in non-volatile FRAME_REGISTER */
398        mr      FRAME_REGISTER, r1
399
400        /* Save scratch registers */
401        stw     SCRATCH_REGISTER_0, SCRATCH_REGISTER_0_OFFSET(FRAME_REGISTER)
402        stw     SCRATCH_REGISTER_1, SCRATCH_REGISTER_1_OFFSET(FRAME_REGISTER)
403        stw     SCRATCH_REGISTER_2, SCRATCH_REGISTER_2_OFFSET(FRAME_REGISTER)
404
405        /* Save CR */
406        mfcr    SCRATCH_REGISTER_0
407        stw     SCRATCH_REGISTER_0, EXC_CR_OFFSET(FRAME_REGISTER)
408
409        /* Check exception type and remember it in non-volatile CR_TYPE */
410        cmpwi   CR_TYPE, VECTOR_REGISTER, 0
411
412        /*
413         * Depending on the exception type we do now save the non-volatile
414         * registers or disable thread dispatching and switch to the ISR stack.
415         */
416
417        /* Branch for synchronous exceptions */
418        bge     CR_TYPE, wrap_save_non_volatile_regs_\_FLVR
419
420        /*
421         * Increment the thread dispatch disable level in case a higher
422         * priority exception occurs we don't want it to run the scheduler.  It
423         * is safe to increment this without disabling higher priority
424         * exceptions since those will see that we wrote the lock anyways.
425         */
426
427        /* Increment ISR nest level and thread dispatch disable level */
428        lwz     SCRATCH_REGISTER_0, _ISR_Nest_level@sdarel(r13)
429        lwz     SCRATCH_REGISTER_1, _Thread_Dispatch_disable_level@sdarel(r13)
430        addi    SCRATCH_REGISTER_0, SCRATCH_REGISTER_0, 1
431        addi    SCRATCH_REGISTER_1, SCRATCH_REGISTER_1, 1
432        stw     SCRATCH_REGISTER_0, _ISR_Nest_level@sdarel(r13)
433        stw     SCRATCH_REGISTER_1, _Thread_Dispatch_disable_level@sdarel(r13)
434
435        /*
436         * No higher-priority exception occurring after this point
437         * can cause a context switch.
438         */
439
440        /* Clear lock */
441        li      SCRATCH_REGISTER_0, 0
442        stw     SCRATCH_REGISTER_0, ppc_exc_lock_\_PRI@sdarel(r13)
443
444        /* Switch stack if necessary */
445        mfspr   SCRATCH_REGISTER_0, SPRG1
446        cmpw    SCRATCH_REGISTER_0, r1
447        blt     wrap_stack_switch_\_FLVR
448        mfspr   SCRATCH_REGISTER_1, SPRG2
449        cmpw    SCRATCH_REGISTER_1, r1
450        blt     wrap_stack_switch_done_\_FLVR
451
452wrap_stack_switch_\_FLVR:
453
454        mr      r1, SCRATCH_REGISTER_0
455
456wrap_stack_switch_done_\_FLVR:
457
458        /*
459         * Load the pristine VECTOR_REGISTER from a special location for
460         * asynchronous exceptions.  The synchronous exceptions save the
461         * VECTOR_REGISTER in their minimal prologue.
462         */
463        lwz     SCRATCH_REGISTER_2, ppc_exc_vector_register_\_PRI@sdarel(r13)
464
465        /* Save pristine vector register */
466        stw     SCRATCH_REGISTER_2, VECTOR_OFFSET(FRAME_REGISTER)
467
468wrap_disable_thread_dispatching_done_\_FLVR:
469
470        /*
471         * We now have SCRATCH_REGISTER_0, SCRATCH_REGISTER_1,
472         * SCRATCH_REGISTER_2 and CR available.  VECTOR_REGISTER still holds
473         * the vector (and exception type).  FRAME_REGISTER is a pointer to the
474         * exception frame (always on the stack of the interrupted context).
475         * r1 is the stack pointer, either on the task stack or on the ISR
476         * stack.  CR_TYPE holds the exception type.
477         */
478
479        /* Save SRR0 */
480        mfspr   SCRATCH_REGISTER_0, \_SRR0
481        stw     SCRATCH_REGISTER_0, SRR0_FRAME_OFFSET(FRAME_REGISTER)
482
483        /* Save SRR1 */
484        mfspr   SCRATCH_REGISTER_0, \_SRR1
485        stw     SCRATCH_REGISTER_0, SRR1_FRAME_OFFSET(FRAME_REGISTER)
486
487        /* Save CTR */
488        mfctr   SCRATCH_REGISTER_0
489        stw     SCRATCH_REGISTER_0, EXC_CTR_OFFSET(FRAME_REGISTER)
490
491        /* Save XER */
492        mfxer   SCRATCH_REGISTER_0
493        stw     SCRATCH_REGISTER_0, EXC_XER_OFFSET(FRAME_REGISTER)
494
495        /* Save LR */
496        mflr    SCRATCH_REGISTER_0
497        stw     SCRATCH_REGISTER_0, EXC_LR_OFFSET(FRAME_REGISTER)
498
499        /* Save volatile registers */
500        stw     r0, GPR0_OFFSET(FRAME_REGISTER)
501        stw     r3, GPR3_OFFSET(FRAME_REGISTER)
502        stw     r8, GPR8_OFFSET(FRAME_REGISTER)
503        stw     r9, GPR9_OFFSET(FRAME_REGISTER)
504        stw     r10, GPR10_OFFSET(FRAME_REGISTER)
505        stw     r11, GPR11_OFFSET(FRAME_REGISTER)
506        stw     r12, GPR12_OFFSET(FRAME_REGISTER)
507
508        /* Save read-only small data area anchor (EABI) */
509        stw     r2, GPR2_OFFSET(FRAME_REGISTER)
510
511        /* Save vector number and exception type */
512        stw     VECTOR_REGISTER, EXCEPTION_NUMBER_OFFSET(FRAME_REGISTER)
513
514        /* Load MSR bit mask */
515        lwz     SCRATCH_REGISTER_0, ppc_exc_msr_bits@sdarel(r13)
516
517        /*
518         * Change the MSR if necessary (MMU, RI),
519         * remember decision in non-volatile CR_MSR
520         */
521        cmpwi   CR_MSR, SCRATCH_REGISTER_0, 0
522        bne     CR_MSR, wrap_change_msr_\_FLVR
523
524wrap_change_msr_done_\_FLVR:
525
526        /*
527         * Call high level exception handler
528         */
529
530        /*
531         * Get the handler table index from the vector number.  We have to
532         * discard the exception type.  Take only the least significant five
533         * bits (= LAST_VALID_EXC + 1) from the vector register.  Multiply by
534         * four (= size of function pointer).
535         */
536        rlwinm  SCRATCH_REGISTER_1, VECTOR_REGISTER, 2, 25, 29
537
538        /* Load handler table address */
539        LA      SCRATCH_REGISTER_0, ppc_exc_handler_table
540
541        /* Load handler address */
542        lwzx    SCRATCH_REGISTER_0, SCRATCH_REGISTER_0, SCRATCH_REGISTER_1
543
544        /*
545         * First parameter = exception frame pointer + FRAME_LINK_SPACE
546         *
547         * We add FRAME_LINK_SPACE to the frame pointer because the high level
548         * handler expects a BSP_Exception_frame structure.
549         */
550        addi    r3, FRAME_REGISTER, FRAME_LINK_SPACE
551
552        /*
553         * Second parameter = vector number (r4 is the VECTOR_REGISTER)
554         *
555         * Discard the exception type and store the vector number
556         * in the vector register.  Take only the least significant
557         * five bits (= LAST_VALID_EXC + 1).
558         */
559        rlwinm  VECTOR_REGISTER, VECTOR_REGISTER, 0, 27, 31
560
561        /* Call handler */
562        mtctr   SCRATCH_REGISTER_0
563        bctrl
564
565        /* Check return value and call global handler if necessary */
566        cmpwi   r3, 0
567        bne     wrap_call_global_handler_\_FLVR
568
569wrap_handler_done_\_FLVR:
570
571        /* Check if exception is recoverable */
572        RECOVER_CHECK_\_PRI     _FLVR=\_FLVR
573
574        /*
575         * Depending on the exception type we do now restore the non-volatile
576         * registers or enable thread dispatching and switch back from the ISR
577         * stack.
578         */
579
580        /* Branch for synchronous exceptions */
581        bge     CR_TYPE, wrap_restore_non_volatile_regs_\_FLVR
582
583        /*
584         * Switch back to original stack (FRAME_REGISTER == r1 if we are still
585         * on the IRQ stack).
586         */
587        mr      r1, FRAME_REGISTER
588
589        /*
590         * Check thread dispatch disable level AND lower priority locks (in
591         * CR_LOCK): ONLY if the thread dispatch disable level == 0 AND no lock
592         * is set then call ppc_exc_wrapup() which may do a context switch.  We
593         * can skip TEST_LOCK, because it has no side effects.
594         */
595
596        /* Decrement ISR nest level and thread dispatch disable level */
597        lwz     SCRATCH_REGISTER_0, _ISR_Nest_level@sdarel(r13)
598        lwz     SCRATCH_REGISTER_1, _Thread_Dispatch_disable_level@sdarel(r13)
599        subi    SCRATCH_REGISTER_0, SCRATCH_REGISTER_0, 1
600        subic.  SCRATCH_REGISTER_1, SCRATCH_REGISTER_1, 1
601        stw     SCRATCH_REGISTER_0, _ISR_Nest_level@sdarel(r13)
602        stw     SCRATCH_REGISTER_1, _Thread_Dispatch_disable_level@sdarel(r13)
603
604        /* Branch to skip thread dispatching */
605        bne     wrap_thread_dispatching_done_\_FLVR
606
607        /* Test lower-priority locks (result in non-volatile CR_LOCK) */
608        TEST_LOCK_\_PRI _FLVR=\_FLVR
609
610        /* Branch to skip thread dispatching */
611        bne     CR_LOCK, wrap_thread_dispatching_done_\_FLVR
612
613        /* Load address of ppc_exc_wrapup() */
614        LA      SCRATCH_REGISTER_0, ppc_exc_wrapup
615
616        /* First parameter = exception frame pointer + FRAME_LINK_SPACE */
617        addi    r3, FRAME_REGISTER, FRAME_LINK_SPACE
618
619        /* Call ppc_exc_wrapup() */
620        mtctr   SCRATCH_REGISTER_0
621        bctrl
622
623wrap_thread_dispatching_done_\_FLVR:
624
625        /* Restore MSR? */
626        bne     CR_MSR, wrap_restore_msr_\_FLVR
627
628wrap_restore_msr_done_\_FLVR:
629
630        /*
631         * At this point r1 is a valid exception frame pointer and
632         * FRAME_REGISTER is no longer needed.
633         */
634
635        /* Restore frame register */
636        lwz     FRAME_REGISTER, FRAME_OFFSET(r1)
637
638        /* Restore XER and CTR */
639        lwz     SCRATCH_REGISTER_0, EXC_XER_OFFSET(r1)
640        lwz     SCRATCH_REGISTER_1, EXC_CTR_OFFSET(r1)
641        mtxer   SCRATCH_REGISTER_0
642        mtctr   SCRATCH_REGISTER_1
643
644        /* Restore CR and LR */
645        lwz     SCRATCH_REGISTER_0, EXC_CR_OFFSET(r1)
646        lwz     SCRATCH_REGISTER_1, EXC_LR_OFFSET(r1)
647        mtcr    SCRATCH_REGISTER_0
648        mtlr    SCRATCH_REGISTER_1
649
650        /* Restore volatile registers */
651        lwz     r0, GPR0_OFFSET(r1)
652        lwz     r3, GPR3_OFFSET(r1)
653        lwz     r8, GPR8_OFFSET(r1)
654        lwz     r9, GPR9_OFFSET(r1)
655        lwz     r10, GPR10_OFFSET(r1)
656        lwz     r11, GPR11_OFFSET(r1)
657        lwz     r12, GPR12_OFFSET(r1)
658
659        /* Restore read-only small data area anchor (EABI) */
660        lwz     r2, GPR2_OFFSET(r1)
661
662        /* Restore vector register */
663        lwz     VECTOR_REGISTER, VECTOR_OFFSET(r1)
664
665        /*
666         * Disable all asynchronous exceptions which can do a thread dispatch.
667         * See README.
668         */
669        INTERRUPT_DISABLE       SCRATCH_REGISTER_1, SCRATCH_REGISTER_0
670
671        /* Restore scratch registers and SRRs */
672        lwz     SCRATCH_REGISTER_0, SRR0_FRAME_OFFSET(r1)
673        lwz     SCRATCH_REGISTER_1, SRR1_FRAME_OFFSET(r1)
674        lwz     SCRATCH_REGISTER_2, SCRATCH_REGISTER_2_OFFSET(r1)
675        mtspr   \_SRR0, SCRATCH_REGISTER_0
676        lwz     SCRATCH_REGISTER_0, SCRATCH_REGISTER_0_OFFSET(r1)
677        mtspr   \_SRR1, SCRATCH_REGISTER_1
678        lwz     SCRATCH_REGISTER_1, SCRATCH_REGISTER_1_OFFSET(r1)
679
680        /*
681         * We restore r1 from the frame rather than just popping (adding to
682         * current r1) since the exception handler might have done strange
683         * things (e.g. a debugger moving and relocating the stack).
684         */
685        lwz     r1, 0(r1)
686
687        /* Return */
688        \_RFI
689
690wrap_change_msr_\_FLVR:
691
692        mfmsr   SCRATCH_REGISTER_1
693        or      SCRATCH_REGISTER_1, SCRATCH_REGISTER_1, SCRATCH_REGISTER_0
694        mtmsr   SCRATCH_REGISTER_1
695        msync
696        isync
697        b       wrap_change_msr_done_\_FLVR
698
699wrap_restore_msr_\_FLVR:
700
701        lwz     SCRATCH_REGISTER_0, ppc_exc_msr_bits@sdarel(r13)
702        mfmsr   SCRATCH_REGISTER_1
703        andc    SCRATCH_REGISTER_1, SCRATCH_REGISTER_1, SCRATCH_REGISTER_0
704        mtmsr   SCRATCH_REGISTER_1
705        msync
706        isync
707        b       wrap_restore_msr_done_\_FLVR
708
709wrap_save_non_volatile_regs_\_FLVR:
710
711        /* Load pristine stack pointer */
712        lwz     SCRATCH_REGISTER_1, 0(FRAME_REGISTER)
713
714        /* Save small data area anchor (SYSV) */
715        stw     r13, GPR13_OFFSET(FRAME_REGISTER)
716
717        /* Save pristine stack pointer */
718        stw     SCRATCH_REGISTER_1, GPR1_OFFSET(FRAME_REGISTER)
719
720        /* r14 is the FRAME_REGISTER and will be saved elsewhere */
721
722        /* Save non-volatile registers r15 .. r31 */
723        stmw    r15, GPR15_OFFSET(FRAME_REGISTER)
724
725        b       wrap_disable_thread_dispatching_done_\_FLVR
726
727wrap_restore_non_volatile_regs_\_FLVR:
728
729        /* Load stack pointer */
730        lwz     SCRATCH_REGISTER_0, GPR1_OFFSET(r1)
731
732        /* Restore small data area anchor (SYSV) */
733        lwz     r13, GPR13_OFFSET(r1)
734       
735        /* r14 is the FRAME_REGISTER and will be restored elsewhere */
736
737        /* Restore non-volatile registers r15 .. r31 */
738        lmw     r15, GPR15_OFFSET(r1)
739
740        /* Restore stack pointer */
741        stw     SCRATCH_REGISTER_0, 0(r1)
742
743        b       wrap_thread_dispatching_done_\_FLVR
744
745wrap_call_global_handler_\_FLVR:
746
747        /* First parameter = exception frame pointer + FRAME_LINK_SPACE */
748        addi    r3, FRAME_REGISTER, FRAME_LINK_SPACE
749
750        /* Load global handler address */
751        LW      SCRATCH_REGISTER_0, globalExceptHdl
752
753        /* Check address */
754        cmpwi   SCRATCH_REGISTER_0, 0
755        beq     wrap_handler_done_\_FLVR
756
757        /* Call global handler */
758        mtctr   SCRATCH_REGISTER_0
759        bctrl
760
761        b       wrap_handler_done_\_FLVR
762
763        .endm
Note: See TracBrowser for help on using the repository browser.