source: rtems/c/src/lib/libcpu/powerpc/new-exceptions/bspsupport/ppc_exc_asm_macros.h @ 2d2de4eb

4.104.115
Last change on this file since 2d2de4eb was 2d2de4eb, checked in by Thomas Doerfler <Thomas.Doerfler@…>, on 10/23/09 at 07:32:46

Update for exception support changes.

  • Property mode set to 100644
File size: 23.5 KB
Line 
1/*
2 * (c) 1999, Eric Valette valette@crf.canon.fr
3 *
4 * Modified and partially rewritten by Till Straumann, 2007-2008
5 *
6 * Modified by Sebastian Huber <sebastian.huber@embedded-brains.de>, 2008.
7 *
8 * Low-level assembly code for PPC exceptions (macros).
9 *
10 * This file was written with the goal to eliminate
11 * ALL #ifdef <cpu_flavor> conditionals -- please do not
12 * reintroduce such statements.
13 */
14
15#include <bsp/vectors.h>
16
17#define LT(cr) ((cr)*4+0)
18#define GT(cr) ((cr)*4+1)
19#define EQ(cr) ((cr)*4+2)
20
21/* Opcode of 'stw r1, off(r13)' */
22#define STW_R1_R13(off) ((((36<<10)|(r1<<5)|(r13))<<16) | ((off)&0xffff))
23
24#define FRAME_REGISTER r14
25#define VECTOR_REGISTER r4
26#define SCRATCH_REGISTER_0 r5
27#define SCRATCH_REGISTER_1 r6
28#define SCRATCH_REGISTER_2 r7
29
30#define FRAME_OFFSET( r) GPR14_OFFSET( r)
31#define VECTOR_OFFSET( r) GPR4_OFFSET( r)
32#define SCRATCH_REGISTER_0_OFFSET( r) GPR5_OFFSET( r)
33#define SCRATCH_REGISTER_1_OFFSET( r) GPR6_OFFSET( r)
34#define SCRATCH_REGISTER_2_OFFSET( r) GPR7_OFFSET( r)
35
36#define CR_TYPE 2
37#define CR_MSR 3
38#define CR_LOCK 4
39
40        /*
41         * Minimal prologue snippets:
42         *
43         * Rationale: on some PPCs the vector offsets are spaced
44         * as closely as 16 bytes.
45         *
46         * If we deal with asynchronous exceptions ('interrupts')
47         * then we can use 4 instructions to
48         *   1. atomically write lock to indicate ISR is in progress
49         *      (we cannot atomically increase the Thread_Dispatch_disable_level,
50         *      see README)
51         *   2. save a register in special area
52         *   3. load register with vector info
53         *   4. branch
54         *
55         * If we deal with a synchronous exception (no stack switch
56         * nor dispatch-disabling necessary) then it's easier:
57         *   1. push stack frame
58         *   2. save register on stack
59         *   3. load register with vector info
60         *   4. branch
61         *
62         */
63
64/*
65 *****************************************************************************
66 * MACRO: PPC_EXC_MIN_PROLOG_ASYNC
67 *****************************************************************************
68 * USES:    VECTOR_REGISTER
69 * ON EXIT: Vector in VECTOR_REGISTER
70 *
71 * NOTES:   VECTOR_REGISTER saved in special variable
72 *          'ppc_exc_vector_register_\_PRI'.
73 *
74 */
75        .macro  PPC_EXC_MIN_PROLOG_ASYNC _NAME _VEC _PRI _FLVR
76
77        .global ppc_exc_min_prolog_async_\_NAME
78ppc_exc_min_prolog_async_\_NAME:
79        /* Atomically write lock variable in 1st instruction with non-zero
80         * value (r1 is always nonzero; r13 could also be used)
81         *
82         * NOTE: raising an exception and executing this first instruction
83         *       of the exception handler is apparently NOT atomic, i.e., a
84         *       low-priority IRQ could set the PC to this location and a
85         *       critical IRQ could intervene just at this point.
86         *
87         *       We check against this pathological case by checking the
88         *       opcode/instruction at the interrupted PC for matching
89         *
90         *         stw r1, ppc_exc_lock_XXX@sdarel(r13)
91         *
92         *       ASSUMPTION:
93         *          1) ALL 'asynchronous' exceptions (which disable thread-
94         *             dispatching) execute THIS 'magical' instruction
95         *             FIRST.
96         *          2) This instruction (including the address offset)
97         *             is not used anywhere else (probably a safe assumption).
98         */
99        stw     r1, ppc_exc_lock_\_PRI@sdarel(r13)
100        /*      We have no stack frame yet; store VECTOR_REGISTER in special area;
101         * a higher-priority (critical) interrupt uses a different area
102         * (hence the different prologue snippets) (\PRI)
103         */
104        stw     VECTOR_REGISTER, ppc_exc_vector_register_\_PRI@sdarel(r13)
105        /*      Load vector.
106         */
107        li      VECTOR_REGISTER, ( \_VEC | 0xffff8000 )
108        /*      Branch (must be within 32MB)
109         */
110        ba      wrap_\_FLVR
111
112        .endm
113
114/*
115 *****************************************************************************
116 * MACRO: PPC_EXC_MIN_PROLOG_SYNC
117 *****************************************************************************
118 * USES:    VECTOR_REGISTER
119 * ON EXIT: vector in VECTOR_REGISTER
120 *
121 * NOTES:   exception stack frame pushed; VECTOR_REGISTER saved in frame
122 *
123 */
124        .macro  PPC_EXC_MIN_PROLOG_SYNC _NAME _VEC _PRI _FLVR
125
126        .global ppc_exc_min_prolog_sync_\_NAME
127ppc_exc_min_prolog_sync_\_NAME:
128        stwu    r1, -EXCEPTION_FRAME_END(r1)
129        stw     VECTOR_REGISTER, VECTOR_OFFSET(r1)
130        li      VECTOR_REGISTER, \_VEC
131        ba      wrap_nopush_\_FLVR
132
133        .endm
134               
135/*
136 *****************************************************************************
137 * MACRO: TEST_1ST_OPCODE_crit
138 *****************************************************************************
139 *
140 * USES:    REG, cr0
141 * ON EXIT: REG available (contains *pc - STW_R1_R13(0)),
142 *          return value in cr0.
143 *
144 * test opcode interrupted by critical (asynchronous) exception; set CR_LOCK if
145 *
146 *   *SRR0 == 'stw r1, ppc_exc_lock_std@sdarel(r13)'
147 *
148 */
149        .macro  TEST_1ST_OPCODE_crit _REG
150
151        lwz     \_REG, SRR0_FRAME_OFFSET(FRAME_REGISTER)
152        lwz     \_REG, 0(\_REG)
153        /*      opcode now in REG */
154
155        /*      subtract upper 16bits of 'stw r1, 0(r13)' instruction */
156        subis   \_REG, \_REG, STW_R1_R13(0)@h
157        /*
158         * if what's left compares against the 'ppc_exc_lock_std@sdarel'
159         * address offset then we have a match...
160         */
161        cmplwi  cr0, \_REG, ppc_exc_lock_std@sdarel
162
163        .endm
164
165/*
166 *****************************************************************************
167 * MACRO: TEST_LOCK_std
168 *****************************************************************************
169 *
170 * USES:    CR_LOCK
171 * ON EXIT: CR_LOCK is set (indicates no lower-priority locks are engaged)
172 *
173 */
174        .macro  TEST_LOCK_std _FLVR
175        /* 'std' is lowest level, i.e., can not be locked -> EQ(CR_LOCK) = 1 */
176        creqv   EQ(CR_LOCK), EQ(CR_LOCK), EQ(CR_LOCK)
177        .endm
178
179/*
180 ******************************************************************************
181 * MACRO: TEST_LOCK_crit
182 ******************************************************************************
183 *
184 * USES:    CR_LOCK, cr0, SCRATCH_REGISTER_0, SCRATCH_REGISTER_1
185 * ON EXIT: cr0, SCRATCH_REGISTER_0, SCRATCH_REGISTER_1 available,
186 *          returns result in CR_LOCK.
187 *
188 * critical-exception wrapper has to check 'std' lock:
189 *
190 * Return CR_LOCK = (   (interrupt_mask & MSR_CE) != 0
191                 &&                  ppc_lock_std == 0
192 *               && * SRR0 != <write std lock instruction> )
193 *
194 */
195        .macro  TEST_LOCK_crit _FLVR
196        /* If MSR_CE is not in the IRQ mask then we must never allow
197         * thread-dispatching!
198         */
199        GET_INTERRUPT_MASK mask=SCRATCH_REGISTER_1
200        /* EQ(cr0) = ((interrupt_mask & MSR_CE) == 0) */
201        andis.  SCRATCH_REGISTER_1, SCRATCH_REGISTER_1, MSR_CE@h
202        beq     TEST_LOCK_crit_done_\_FLVR             
203
204        /* STD interrupt could have been interrupted before executing the 1st
205         * instruction which sets the lock; check this case by looking at the
206         * opcode present at the interrupted PC location.
207         */
208        TEST_1ST_OPCODE_crit    _REG=SCRATCH_REGISTER_0
209        /*
210         * At this point cr0 is set if
211         *
212         *   *(PC) == 'stw r1, ppc_exc_lock_std@sdarel(r13)'
213         *
214         */
215       
216        /* check lock */
217        lwz     SCRATCH_REGISTER_1, ppc_exc_lock_std@sdarel(r13)
218        cmplwi  CR_LOCK, SCRATCH_REGISTER_1, 0
219
220        /* set EQ(CR_LOCK) to result */
221TEST_LOCK_crit_done_\_FLVR:
222        /* If we end up here because the interrupt mask did not contain
223     * MSR_CE then cr0 is set and therefore the value of CR_LOCK
224         * does not matter since   x && !1 == 0:
225         *
226         *  if ( (interrupt_mask & MSR_CE) == 0 ) {
227         *      EQ(CR_LOCK) = EQ(CR_LOCK) && ! ((interrupt_mask & MSR_CE) == 0)
228         *  } else {
229         *      EQ(CR_LOCK) = (ppc_exc_lock_std == 0) && ! (*pc == <write std lock instruction>)
230         *  }
231         */
232        crandc  EQ(CR_LOCK), EQ(CR_LOCK), EQ(cr0)
233
234        .endm
235
236/*
237 ******************************************************************************
238 * MACRO: TEST_LOCK_mchk
239 ******************************************************************************
240 *
241 * USES:    CR_LOCK
242 * ON EXIT: CR_LOCK is cleared.
243 *
244 * We never want to disable machine-check exceptions to avoid a checkstop. This
245 * means that we cannot use enabling/disabling this type of exception for
246 * protection of critical OS data structures.  Therefore, calling OS primitives
247 * from a machine-check handler is ILLEGAL. Since machine-checks can happen
248 * anytime it is not legal to perform a context switch (since the exception
249 * could hit a IRQ protected section of code).  We simply let this test return
250 * 0 so that ppc_exc_wrapup is never called after handling a machine-check.
251 */
252        .macro  TEST_LOCK_mchk _SRR0 _FLVR
253
254        crxor   EQ(CR_LOCK), EQ(CR_LOCK), EQ(CR_LOCK)
255
256        .endm
257
258/*
259 ******************************************************************************
260 * MACRO: RECOVER_CHECK_\PRI
261 ******************************************************************************
262 *
263 * USES:    cr0, SCRATCH_REGISTER_0, SCRATCH_REGISTER_1
264 * ON EXIT: cr0, SCRATCH_REGISTER_0, SCRATCH_REGISTER_1 available
265 *
266 * Checks if the exception is recoverable for exceptions which need such a
267 * test.
268 */
269
270/* Standard*/
271        .macro  RECOVER_CHECK_std _FLVR
272
273        /* Check if exception is recoverable */
274        lwz     SCRATCH_REGISTER_0, SRR1_FRAME_OFFSET(FRAME_REGISTER)
275        lwz     SCRATCH_REGISTER_1, ppc_exc_msr_bits@sdarel(r13)
276        xor     SCRATCH_REGISTER_1, SCRATCH_REGISTER_1, SCRATCH_REGISTER_0
277        andi.   SCRATCH_REGISTER_0, SCRATCH_REGISTER_1, MSR_RI
278
279recover_check_twiddle_std_\_FLVR:
280
281        /* Not recoverable? */
282        bne     recover_check_twiddle_std_\_FLVR
283
284        .endm
285
286/* Critical */
287        .macro  RECOVER_CHECK_crit _FLVR
288
289        /* Nothing to do */
290
291        .endm
292
293/* Machine check */
294        .macro  RECOVER_CHECK_mchk _FLVR
295
296        /* Check if exception is recoverable */
297        lwz     SCRATCH_REGISTER_0, SRR1_FRAME_OFFSET(FRAME_REGISTER)
298        lwz     SCRATCH_REGISTER_1, ppc_exc_msr_bits@sdarel(r13)
299        xor     SCRATCH_REGISTER_1, SCRATCH_REGISTER_1, SCRATCH_REGISTER_0
300        andi.   SCRATCH_REGISTER_0, SCRATCH_REGISTER_1, MSR_RI
301
302recover_check_twiddle_mchk_\_FLVR:
303
304        /* Not recoverable? */
305        bne     recover_check_twiddle_mchk_\_FLVR
306
307        .endm
308
309/*
310 ******************************************************************************
311 * MACRO: WRAP
312 ******************************************************************************
313 *
314 * Minimal prologue snippets jump into WRAP which calls the high level
315 * exception handler.  We must have this macro  instantiated for each possible
316 * flavor of exception so that we use the proper lock variable, SRR register
317 * pair and RFI instruction.
318 *
319 * We have two types of exceptions: synchronous and asynchronous (= interrupt
320 * like).  The type is encoded in the vector register (= VECTOR_REGISTER).  For
321 * interrupt like exceptions the MSB in the vector register is set.  The
322 * exception type is kept in the comparison register CR_TYPE.  Normal
323 * exceptions (MSB is clear) use the task stack and a context switch may happen
324 * at any time.  The interrupt like exceptions disable thread dispatching and
325 * switch to the interrupt stack (base address is in SPRG1).
326 *
327 *                                      +
328 *                                      |
329 *                                      | Minimal prologue
330 *                                      |
331 *                                      +
332 *                                      |
333 *                                      | o Setup frame pointer
334 *                                      | o Save basic registers
335 *                                      | o Determine exception type:
336 *                                      |   synchronous or asynchronous
337 *                                      |
338 *                                +-----+
339 * Synchronous exceptions:        |     | Asynchronous exceptions:
340 *                                |     |
341 * Save non-volatile registers    |     | o Increment thread dispatch
342 *                                |     |   disable level
343 *                                |     | o Increment ISR nest level
344 *                                |     | o Clear lock
345 *                                |     | o Switch stack if necessary
346 *                                |     |
347 *                                +---->+
348 *                                      |
349 *                                      | o Save volatile registers
350 *                                      | o Change MSR if necessary
351 *                                      | o Call high level handler
352 *                                      | o Call global handler if necessary
353 *                                      | o Check if exception is recoverable
354 *                                      |
355 *                                +-----+
356 * Synchronous exceptions:        |     | Asynchronous exceptions:
357 *                                |     |
358 * Restore non-volatile registers |     | o Decrement ISR nest level
359 *                                |     | o Switch stack
360 *                                |     | o Decrement thread dispatch
361 *                                |     |   disable level
362 *                                |     | o Test lock
363 *                                |     | o May do a context switch
364 *                                |     |
365 *                                +---->+
366 *                                      |
367 *                                      | o Restore MSR if necessary
368 *                                      | o Restore volatile registers
369 *                                      | o Restore frame pointer
370 *                                      | o Return
371 *                                      |
372 *                                      +
373 */
374        .macro  WRAP _FLVR _PRI _SRR0 _SRR1 _RFI
375
376wrap_\_FLVR:
377
378        /* Push exception frame */
379        stwu    r1, -EXCEPTION_FRAME_END(r1)
380
381wrap_nopush_\_FLVR:
382
383        /* Save frame register */
384        stw     FRAME_REGISTER, FRAME_OFFSET(r1)
385
386wrap_no_save_frame_register_\_FLVR:
387
388        /*
389         * We save at first only some scratch registers
390         * and the CR.  We use a non-volatile register
391         * for the exception frame pointer (= FRAME_REGISTER).
392         */
393
394        /* Move frame address in non-volatile FRAME_REGISTER */
395        mr      FRAME_REGISTER, r1
396
397        /* Save scratch registers */
398        stw     SCRATCH_REGISTER_0, SCRATCH_REGISTER_0_OFFSET(FRAME_REGISTER)
399        stw     SCRATCH_REGISTER_1, SCRATCH_REGISTER_1_OFFSET(FRAME_REGISTER)
400        stw     SCRATCH_REGISTER_2, SCRATCH_REGISTER_2_OFFSET(FRAME_REGISTER)
401
402        /* Save CR */
403        mfcr    SCRATCH_REGISTER_0
404        stw     SCRATCH_REGISTER_0, EXC_CR_OFFSET(FRAME_REGISTER)
405
406        /* Check exception type and remember it in non-volatile CR_TYPE */
407        cmpwi   CR_TYPE, VECTOR_REGISTER, 0
408
409        /*
410         * Depending on the exception type we do now save the non-volatile
411         * registers or disable thread dispatching and switch to the ISR stack.
412         */
413
414        /* Branch for synchronous exceptions */
415        bge     CR_TYPE, wrap_save_non_volatile_regs_\_FLVR
416
417        /*
418         * Increment the thread dispatch disable level in case a higher
419         * priority exception occurs we don't want it to run the scheduler.  It
420         * is safe to increment this without disabling higher priority
421         * exceptions since those will see that we wrote the lock anyways.
422         */
423
424        /* Increment ISR nest level and thread dispatch disable level */
425        lwz     SCRATCH_REGISTER_0, _ISR_Nest_level@sdarel(r13)
426        lwz     SCRATCH_REGISTER_1, _Thread_Dispatch_disable_level@sdarel(r13)
427        addi    SCRATCH_REGISTER_0, SCRATCH_REGISTER_0, 1
428        addi    SCRATCH_REGISTER_1, SCRATCH_REGISTER_1, 1
429        stw     SCRATCH_REGISTER_0, _ISR_Nest_level@sdarel(r13)
430        stw     SCRATCH_REGISTER_1, _Thread_Dispatch_disable_level@sdarel(r13)
431
432        /*
433         * No higher-priority exception occurring after this point
434         * can cause a context switch.
435         */
436
437        /* Clear lock */
438        li      SCRATCH_REGISTER_0, 0
439        stw     SCRATCH_REGISTER_0, ppc_exc_lock_\_PRI@sdarel(r13)
440
441        /* Switch stack if necessary */
442        mfspr   SCRATCH_REGISTER_0, SPRG1
443        cmpw    SCRATCH_REGISTER_0, r1
444        blt     wrap_stack_switch_\_FLVR
445        mfspr   SCRATCH_REGISTER_1, SPRG2
446        cmpw    SCRATCH_REGISTER_1, r1
447        blt     wrap_stack_switch_done_\_FLVR
448
449wrap_stack_switch_\_FLVR:
450
451        mr      r1, SCRATCH_REGISTER_0
452
453wrap_stack_switch_done_\_FLVR:
454
455        /*
456         * Load the pristine VECTOR_REGISTER from a special location for
457         * asynchronous exceptions.  The synchronous exceptions save the
458         * VECTOR_REGISTER in their minimal prologue.
459         */
460        lwz     SCRATCH_REGISTER_2, ppc_exc_vector_register_\_PRI@sdarel(r13)
461
462        /* Save pristine vector register */
463        stw     SCRATCH_REGISTER_2, VECTOR_OFFSET(FRAME_REGISTER)
464
465wrap_disable_thread_dispatching_done_\_FLVR:
466
467        /*
468         * We now have SCRATCH_REGISTER_0, SCRATCH_REGISTER_1,
469         * SCRATCH_REGISTER_2 and CR available.  VECTOR_REGISTER still holds
470         * the vector (and exception type).  FRAME_REGISTER is a pointer to the
471         * exception frame (always on the stack of the interrupted context).
472         * r1 is the stack pointer, either on the task stack or on the ISR
473         * stack.  CR_TYPE holds the exception type.
474         */
475
476        /* Save SRR0 */
477        mfspr   SCRATCH_REGISTER_0, \_SRR0
478        stw     SCRATCH_REGISTER_0, SRR0_FRAME_OFFSET(FRAME_REGISTER)
479
480        /* Save SRR1 */
481        mfspr   SCRATCH_REGISTER_0, \_SRR1
482        stw     SCRATCH_REGISTER_0, SRR1_FRAME_OFFSET(FRAME_REGISTER)
483
484        /* Save CTR */
485        mfctr   SCRATCH_REGISTER_0
486        stw     SCRATCH_REGISTER_0, EXC_CTR_OFFSET(FRAME_REGISTER)
487
488        /* Save XER */
489        mfxer   SCRATCH_REGISTER_0
490        stw     SCRATCH_REGISTER_0, EXC_XER_OFFSET(FRAME_REGISTER)
491
492        /* Save LR */
493        mflr    SCRATCH_REGISTER_0
494        stw     SCRATCH_REGISTER_0, EXC_LR_OFFSET(FRAME_REGISTER)
495
496        /* Save volatile registers */
497        stw     r0, GPR0_OFFSET(FRAME_REGISTER)
498        stw     r3, GPR3_OFFSET(FRAME_REGISTER)
499        stw     r8, GPR8_OFFSET(FRAME_REGISTER)
500        stw     r9, GPR9_OFFSET(FRAME_REGISTER)
501        stw     r10, GPR10_OFFSET(FRAME_REGISTER)
502        stw     r11, GPR11_OFFSET(FRAME_REGISTER)
503        stw     r12, GPR12_OFFSET(FRAME_REGISTER)
504
505        /* Save read-only small data area anchor (EABI) */
506        stw     r2, GPR2_OFFSET(FRAME_REGISTER)
507
508        /* Save vector number and exception type */
509        stw     VECTOR_REGISTER, EXCEPTION_NUMBER_OFFSET(FRAME_REGISTER)
510
511        /* Load MSR bit mask */
512        lwz     SCRATCH_REGISTER_0, ppc_exc_msr_bits@sdarel(r13)
513
514        /*
515         * Change the MSR if necessary (MMU, RI),
516         * remember decision in non-volatile CR_MSR
517         */
518        cmpwi   CR_MSR, SCRATCH_REGISTER_0, 0
519        bne     CR_MSR, wrap_change_msr_\_FLVR
520
521wrap_change_msr_done_\_FLVR:
522
523        /*
524         * Call high level exception handler
525         */
526
527        /*
528         * Get the handler table index from the vector number.  We have to
529         * discard the exception type.  Take only the least significant five
530         * bits (= LAST_VALID_EXC + 1) from the vector register.  Multiply by
531         * four (= size of function pointer).
532         */
533        rlwinm  SCRATCH_REGISTER_1, VECTOR_REGISTER, 2, 25, 29
534
535        /* Load handler table address */
536        LA      SCRATCH_REGISTER_0, ppc_exc_handler_table
537
538        /* Load handler address */
539        lwzx    SCRATCH_REGISTER_0, SCRATCH_REGISTER_0, SCRATCH_REGISTER_1
540
541        /*
542         * First parameter = exception frame pointer + FRAME_LINK_SPACE
543         *
544         * We add FRAME_LINK_SPACE to the frame pointer because the high level
545         * handler expects a BSP_Exception_frame structure.
546         */
547        addi    r3, FRAME_REGISTER, FRAME_LINK_SPACE
548
549        /*
550         * Second parameter = vector number (r4 is the VECTOR_REGISTER)
551         *
552         * Discard the exception type and store the vector number
553         * in the vector register.  Take only the least significant
554         * five bits (= LAST_VALID_EXC + 1).
555         */
556        rlwinm  VECTOR_REGISTER, VECTOR_REGISTER, 0, 27, 31
557
558        /* Call handler */
559        mtctr   SCRATCH_REGISTER_0
560        bctrl
561
562        /* Check return value and call global handler if necessary */
563        cmpwi   r3, 0
564        bne     wrap_call_global_handler_\_FLVR
565
566wrap_handler_done_\_FLVR:
567
568        /* Check if exception is recoverable */
569        RECOVER_CHECK_\_PRI     _FLVR=\_FLVR
570
571        /*
572         * Depending on the exception type we do now restore the non-volatile
573         * registers or enable thread dispatching and switch back from the ISR
574         * stack.
575         */
576
577        /* Branch for synchronous exceptions */
578        bge     CR_TYPE, wrap_restore_non_volatile_regs_\_FLVR
579
580        /*
581         * Switch back to original stack (FRAME_REGISTER == r1 if we are still
582         * on the IRQ stack).
583         */
584        mr      r1, FRAME_REGISTER
585
586        /*
587         * Check thread dispatch disable level AND lower priority locks (in
588         * CR_LOCK): ONLY if the thread dispatch disable level == 0 AND no lock
589         * is set then call ppc_exc_wrapup() which may do a context switch.  We
590         * can skip TEST_LOCK, because it has no side effects.
591         */
592
593        /* Decrement ISR nest level and thread dispatch disable level */
594        lwz     SCRATCH_REGISTER_0, _ISR_Nest_level@sdarel(r13)
595        lwz     SCRATCH_REGISTER_1, _Thread_Dispatch_disable_level@sdarel(r13)
596        subi    SCRATCH_REGISTER_0, SCRATCH_REGISTER_0, 1
597        subic.  SCRATCH_REGISTER_1, SCRATCH_REGISTER_1, 1
598        stw     SCRATCH_REGISTER_0, _ISR_Nest_level@sdarel(r13)
599        stw     SCRATCH_REGISTER_1, _Thread_Dispatch_disable_level@sdarel(r13)
600
601        /* Branch to skip thread dispatching */
602        bne     wrap_thread_dispatching_done_\_FLVR
603
604        /* Test lower-priority locks (result in non-volatile CR_LOCK) */
605        TEST_LOCK_\_PRI _FLVR=\_FLVR
606
607        /* Branch to skip thread dispatching */
608        bne     CR_LOCK, wrap_thread_dispatching_done_\_FLVR
609
610        /* Load address of ppc_exc_wrapup() */
611        LA      SCRATCH_REGISTER_0, ppc_exc_wrapup
612
613        /* First parameter = exception frame pointer + FRAME_LINK_SPACE */
614        addi    r3, FRAME_REGISTER, FRAME_LINK_SPACE
615
616        /* Call ppc_exc_wrapup() */
617        mtctr   SCRATCH_REGISTER_0
618        bctrl
619
620wrap_thread_dispatching_done_\_FLVR:
621
622        /* Restore MSR? */
623        bne     CR_MSR, wrap_restore_msr_\_FLVR
624
625wrap_restore_msr_done_\_FLVR:
626
627        /*
628         * At this point r1 is a valid exception frame pointer and
629         * FRAME_REGISTER is no longer needed.
630         */
631
632        /* Restore frame register */
633        lwz     FRAME_REGISTER, FRAME_OFFSET(r1)
634
635        /* Restore XER and CTR */
636        lwz     SCRATCH_REGISTER_0, EXC_XER_OFFSET(r1)
637        lwz     SCRATCH_REGISTER_1, EXC_CTR_OFFSET(r1)
638        mtxer   SCRATCH_REGISTER_0
639        mtctr   SCRATCH_REGISTER_1
640
641        /* Restore CR and LR */
642        lwz     SCRATCH_REGISTER_0, EXC_CR_OFFSET(r1)
643        lwz     SCRATCH_REGISTER_1, EXC_LR_OFFSET(r1)
644        mtcr    SCRATCH_REGISTER_0
645        mtlr    SCRATCH_REGISTER_1
646
647        /* Restore volatile registers */
648        lwz     r0, GPR0_OFFSET(r1)
649        lwz     r3, GPR3_OFFSET(r1)
650        lwz     r8, GPR8_OFFSET(r1)
651        lwz     r9, GPR9_OFFSET(r1)
652        lwz     r10, GPR10_OFFSET(r1)
653        lwz     r11, GPR11_OFFSET(r1)
654        lwz     r12, GPR12_OFFSET(r1)
655
656        /* Restore read-only small data area anchor (EABI) */
657        lwz     r2, GPR2_OFFSET(r1)
658
659        /* Restore vector register */
660        lwz     VECTOR_REGISTER, VECTOR_OFFSET(r1)
661
662        /*
663         * Disable all asynchronous exceptions which can do a thread dispatch.
664         * See README.
665         */
666        INTERRUPT_DISABLE       SCRATCH_REGISTER_1, SCRATCH_REGISTER_0
667
668        /* Restore scratch registers and SRRs */
669        lwz     SCRATCH_REGISTER_0, SRR0_FRAME_OFFSET(r1)
670        lwz     SCRATCH_REGISTER_1, SRR1_FRAME_OFFSET(r1)
671        lwz     SCRATCH_REGISTER_2, SCRATCH_REGISTER_2_OFFSET(r1)
672        mtspr   \_SRR0, SCRATCH_REGISTER_0
673        lwz     SCRATCH_REGISTER_0, SCRATCH_REGISTER_0_OFFSET(r1)
674        mtspr   \_SRR1, SCRATCH_REGISTER_1
675        lwz     SCRATCH_REGISTER_1, SCRATCH_REGISTER_1_OFFSET(r1)
676
677        /*
678         * We restore r1 from the frame rather than just popping (adding to
679         * current r1) since the exception handler might have done strange
680         * things (e.g. a debugger moving and relocating the stack).
681         */
682        lwz     r1, 0(r1)
683
684        /* Return */
685        \_RFI
686
687wrap_change_msr_\_FLVR:
688
689        mfmsr   SCRATCH_REGISTER_1
690        or      SCRATCH_REGISTER_1, SCRATCH_REGISTER_1, SCRATCH_REGISTER_0
691        mtmsr   SCRATCH_REGISTER_1
692        msync
693        isync
694        b       wrap_change_msr_done_\_FLVR
695
696wrap_restore_msr_\_FLVR:
697
698        lwz     SCRATCH_REGISTER_0, ppc_exc_msr_bits@sdarel(r13)
699        mfmsr   SCRATCH_REGISTER_1
700        andc    SCRATCH_REGISTER_1, SCRATCH_REGISTER_1, SCRATCH_REGISTER_0
701        mtmsr   SCRATCH_REGISTER_1
702        msync
703        isync
704        b       wrap_restore_msr_done_\_FLVR
705
706wrap_save_non_volatile_regs_\_FLVR:
707
708        /* Load pristine stack pointer */
709        lwz     SCRATCH_REGISTER_1, 0(FRAME_REGISTER)
710
711        /* Save small data area anchor (SYSV) */
712        stw     r13, GPR13_OFFSET(FRAME_REGISTER)
713
714        /* Save pristine stack pointer */
715        stw     SCRATCH_REGISTER_1, GPR1_OFFSET(FRAME_REGISTER)
716
717        /* r14 is the FRAME_REGISTER and will be saved elsewhere */
718
719        /* Save non-volatile registers r15 .. r31 */
720        stmw    r15, GPR15_OFFSET(FRAME_REGISTER)
721
722        b       wrap_disable_thread_dispatching_done_\_FLVR
723
724wrap_restore_non_volatile_regs_\_FLVR:
725
726        /* Load stack pointer */
727        lwz     SCRATCH_REGISTER_0, GPR1_OFFSET(r1)
728
729        /* Restore small data area anchor (SYSV) */
730        lwz     r13, GPR13_OFFSET(r1)
731       
732        /* r14 is the FRAME_REGISTER and will be restored elsewhere */
733
734        /* Restore non-volatile registers r15 .. r31 */
735        lmw     r15, GPR15_OFFSET(r1)
736
737        /* Restore stack pointer */
738        stw     SCRATCH_REGISTER_0, 0(r1)
739
740        b       wrap_thread_dispatching_done_\_FLVR
741
742wrap_call_global_handler_\_FLVR:
743
744        /* First parameter = exception frame pointer + FRAME_LINK_SPACE */
745        addi    r3, FRAME_REGISTER, FRAME_LINK_SPACE
746
747        /* Load global handler address */
748        LW      SCRATCH_REGISTER_0, globalExceptHdl
749
750        /* Check address */
751        cmpwi   SCRATCH_REGISTER_0, 0
752        beq     wrap_handler_done_\_FLVR
753
754        /* Call global handler */
755        mtctr   SCRATCH_REGISTER_0
756        bctrl
757
758        b       wrap_handler_done_\_FLVR
759
760        .endm
Note: See TracBrowser for help on using the repository browser.