source: rtems/c/src/lib/libcpu/powerpc/new-exceptions/bspsupport/ppc_exc_async_normal.S @ 5018894e

5
Last change on this file since 5018894e was 5018894e, checked in by Sebastian Huber <sebastian.huber@…>, on Nov 20, 2017 at 6:45:15 AM

bsps/powerpc: Fix PPC_EXC_CONFIG_USE_FIXED_HANDLER

For the SPE support we must store the upper half of r3 as well.

Update #3085.

  • Property mode set to 100644
File size: 15.4 KB
Line 
1/*
2 * Copyright (c) 2011, 2017 embedded brains GmbH.  All rights reserved.
3 *
4 *  embedded brains GmbH
5 *  Dornierstr. 4
6 *  82178 Puchheim
7 *  Germany
8 *  <rtems@embedded-brains.de>
9 *
10 * The license and distribution terms for this file may be
11 * found in the file LICENSE in this distribution or at
12 * http://www.rtems.org/license/LICENSE.
13 */
14
15#include <bspopts.h>
16#include <rtems/score/percpu.h>
17#include <bsp/vectors.h>
18
19#ifdef PPC_EXC_CONFIG_USE_FIXED_HANDLER
20
21#define SCRATCH_0_REGISTER r0
22#define SCRATCH_1_REGISTER r3
23#define SCRATCH_2_REGISTER r4
24#define SCRATCH_3_REGISTER r5
25#define SCRATCH_4_REGISTER r6
26#define SCRATCH_5_REGISTER r7
27#define SCRATCH_6_REGISTER r8
28#define SCRATCH_7_REGISTER r9
29#define SCRATCH_8_REGISTER r10
30#define SCRATCH_9_REGISTER r11
31#define SCRATCH_10_REGISTER r12
32#define FRAME_REGISTER r14
33
34#define SCRATCH_0_OFFSET GPR0_OFFSET
35#define SCRATCH_1_OFFSET GPR3_OFFSET
36#define SCRATCH_2_OFFSET GPR4_OFFSET
37#define SCRATCH_3_OFFSET GPR5_OFFSET
38#define SCRATCH_4_OFFSET GPR6_OFFSET
39#define SCRATCH_5_OFFSET GPR7_OFFSET
40#define SCRATCH_6_OFFSET GPR8_OFFSET
41#define SCRATCH_7_OFFSET GPR9_OFFSET
42#define SCRATCH_8_OFFSET GPR10_OFFSET
43#define SCRATCH_9_OFFSET GPR11_OFFSET
44#define SCRATCH_10_OFFSET GPR12_OFFSET
45#define FRAME_OFFSET PPC_EXC_INTERRUPT_FRAME_OFFSET
46
47#ifdef RTEMS_PROFILING
48.macro GET_TIME_BASE REG
49#if defined(__PPC_CPU_E6500__)
50        mfspr \REG, FSL_EIS_ATBL
51#elif defined(ppc8540)
52        mfspr   \REG, TBRL
53#else /* ppc8540 */
54        mftb    \REG
55#endif /* ppc8540 */
56.endm
57#endif /* RTEMS_PROFILING */
58
59        .global ppc_exc_min_prolog_async_tmpl_normal
60        .global ppc_exc_interrupt
61
62ppc_exc_min_prolog_async_tmpl_normal:
63
64        stwu    r1, -PPC_EXC_INTERRUPT_FRAME_SIZE(r1)
65        PPC_REG_STORE   SCRATCH_1_REGISTER, SCRATCH_1_OFFSET(r1)
66        li      SCRATCH_1_REGISTER, 0xffff8000
67
68        /*
69         * We store the absolute branch target address here.  It will be used
70         * to generate the branch operation in ppc_exc_make_prologue().
71         */
72        .int    ppc_exc_interrupt
73
74ppc_exc_interrupt:
75
76        /* Save non-volatile FRAME_REGISTER */
77        PPC_REG_STORE   FRAME_REGISTER, FRAME_OFFSET(r1)
78
79#ifdef RTEMS_PROFILING
80        /* Get entry instant */
81        GET_TIME_BASE   FRAME_REGISTER
82        stw     FRAME_REGISTER, PPC_EXC_INTERRUPT_ENTRY_INSTANT_OFFSET(r1)
83#endif /* RTEMS_PROFILING */
84
85#ifdef __SPE__
86        /* Enable SPE */
87        mfmsr   FRAME_REGISTER
88        oris    FRAME_REGISTER, FRAME_REGISTER, MSR_SPE >> 16
89        mtmsr   FRAME_REGISTER
90        isync
91
92        /*
93         * Save high order part of SCRATCH_1_REGISTER here.  The low order part
94         * was saved in the minimal prologue.
95         */
96        evmergehi       SCRATCH_1_REGISTER, SCRATCH_1_REGISTER, FRAME_REGISTER
97        PPC_REG_STORE   FRAME_REGISTER, GPR3_OFFSET(r1)
98#endif
99
100#if defined(PPC_MULTILIB_FPU) || defined(PPC_MULTILIB_ALTIVEC)
101        /* Enable FPU and/or AltiVec */
102        mfmsr   FRAME_REGISTER
103#ifdef PPC_MULTILIB_FPU
104        ori     FRAME_REGISTER, FRAME_REGISTER, MSR_FP
105#endif
106#ifdef PPC_MULTILIB_ALTIVEC
107        oris    FRAME_REGISTER, FRAME_REGISTER, MSR_VE >> 16
108#endif
109        mtmsr   FRAME_REGISTER
110        isync
111#endif
112
113        /* Move frame pointer to non-volatile FRAME_REGISTER */
114        mr      FRAME_REGISTER, r1
115
116        /*
117         * Save volatile registers.  The SCRATCH_1_REGISTER has been saved in
118         * minimum prologue.
119         */
120        PPC_GPR_STORE   SCRATCH_0_REGISTER, SCRATCH_0_OFFSET(r1)
121#ifdef __powerpc64__
122        PPC_GPR_STORE   r2, GPR2_OFFSET(r1)
123        LA32    r2, .TOC.
124#endif
125        PPC_GPR_STORE   SCRATCH_2_REGISTER, SCRATCH_2_OFFSET(r1)
126        GET_SELF_CPU_CONTROL    SCRATCH_2_REGISTER
127        PPC_GPR_STORE   SCRATCH_3_REGISTER, SCRATCH_3_OFFSET(r1)
128        PPC_GPR_STORE   SCRATCH_4_REGISTER, SCRATCH_4_OFFSET(r1)
129        PPC_GPR_STORE   SCRATCH_5_REGISTER, SCRATCH_5_OFFSET(r1)
130        PPC_GPR_STORE   SCRATCH_6_REGISTER, SCRATCH_6_OFFSET(r1)
131        PPC_GPR_STORE   SCRATCH_7_REGISTER, SCRATCH_7_OFFSET(r1)
132        PPC_GPR_STORE   SCRATCH_8_REGISTER, SCRATCH_8_OFFSET(r1)
133        PPC_GPR_STORE   SCRATCH_9_REGISTER, SCRATCH_9_OFFSET(r1)
134        PPC_GPR_STORE   SCRATCH_10_REGISTER, SCRATCH_10_OFFSET(r1)
135
136        /* Load ISR nest level and thread dispatch disable level */
137        lwz     SCRATCH_3_REGISTER, PER_CPU_ISR_NEST_LEVEL(SCRATCH_2_REGISTER)
138        lwz     SCRATCH_4_REGISTER, PER_CPU_THREAD_DISPATCH_DISABLE_LEVEL(SCRATCH_2_REGISTER)
139
140        /* Save SRR0, SRR1, CR, XER, CTR, and LR */
141        mfsrr0  SCRATCH_0_REGISTER
142        mfsrr1  SCRATCH_5_REGISTER
143        mfcr    SCRATCH_6_REGISTER
144        mfxer   SCRATCH_7_REGISTER
145        mfctr   SCRATCH_8_REGISTER
146        mflr    SCRATCH_9_REGISTER
147        PPC_REG_STORE   SCRATCH_0_REGISTER, SRR0_FRAME_OFFSET(r1)
148        PPC_REG_STORE   SCRATCH_5_REGISTER, SRR1_FRAME_OFFSET(r1)
149        stw     SCRATCH_6_REGISTER, EXC_CR_OFFSET(r1)
150        stw     SCRATCH_7_REGISTER, EXC_XER_OFFSET(r1)
151        PPC_REG_STORE   SCRATCH_8_REGISTER, EXC_CTR_OFFSET(r1)
152        PPC_REG_STORE   SCRATCH_9_REGISTER, EXC_LR_OFFSET(r1)
153
154#ifdef __SPE__
155        /* Save SPEFSCR and ACC */
156        mfspr   SCRATCH_0_REGISTER, FSL_EIS_SPEFSCR
157        evxor   SCRATCH_5_REGISTER, SCRATCH_5_REGISTER, SCRATCH_5_REGISTER
158        evmwumiaa       SCRATCH_5_REGISTER, SCRATCH_5_REGISTER, SCRATCH_5_REGISTER
159        stw     SCRATCH_0_REGISTER, PPC_EXC_SPEFSCR_OFFSET(r1)
160        evstdd  SCRATCH_5_REGISTER, PPC_EXC_ACC_OFFSET(r1)
161#endif
162
163#ifdef PPC_MULTILIB_ALTIVEC
164        /* Save volatile AltiVec context */
165        li      SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(0)
166        stvx    v0, r1, SCRATCH_0_REGISTER
167        mfvscr  v0
168        li      SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(1)
169        stvx    v1, r1, SCRATCH_0_REGISTER
170        li      SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(2)
171        stvx    v2, r1, SCRATCH_0_REGISTER
172        li      SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(3)
173        stvx    v3, r1, SCRATCH_0_REGISTER
174        li      SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(4)
175        stvx    v4, r1, SCRATCH_0_REGISTER
176        li      SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(5)
177        stvx    v5, r1, SCRATCH_0_REGISTER
178        li      SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(6)
179        stvx    v6, r1, SCRATCH_0_REGISTER
180        li      SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(7)
181        stvx    v7, r1, SCRATCH_0_REGISTER
182        li      SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(8)
183        stvx    v8, r1, SCRATCH_0_REGISTER
184        li      SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(9)
185        stvx    v9, r1, SCRATCH_0_REGISTER
186        li      SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(10)
187        stvx    v10, r1, SCRATCH_0_REGISTER
188        li      SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(11)
189        stvx    v11, r1, SCRATCH_0_REGISTER
190        li      SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(12)
191        stvx    v12, r1, SCRATCH_0_REGISTER
192        li      SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(13)
193        stvx    v13, r1, SCRATCH_0_REGISTER
194        li      SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(14)
195        stvx    v14, r1, SCRATCH_0_REGISTER
196        li      SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(15)
197        stvx    v15, r1, SCRATCH_0_REGISTER
198        li      SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(16)
199        stvx    v16, r1, SCRATCH_0_REGISTER
200        li      SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(17)
201        stvx    v17, r1, SCRATCH_0_REGISTER
202        li      SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(18)
203        stvx    v18, r1, SCRATCH_0_REGISTER
204        li      SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(19)
205        stvx    v19, r1, SCRATCH_0_REGISTER
206        li      SCRATCH_0_REGISTER, PPC_EXC_MIN_VSCR_OFFSET
207        stvewx  v0, r1, SCRATCH_0_REGISTER
208#endif
209
210#ifdef PPC_MULTILIB_FPU
211        /* Save volatile FPU context */
212        stfd    f0, PPC_EXC_MIN_FR_OFFSET(0)(r1)
213        mffs    f0
214        stfd    f1, PPC_EXC_MIN_FR_OFFSET(1)(r1)
215        stfd    f2, PPC_EXC_MIN_FR_OFFSET(2)(r1)
216        stfd    f3, PPC_EXC_MIN_FR_OFFSET(3)(r1)
217        stfd    f4, PPC_EXC_MIN_FR_OFFSET(4)(r1)
218        stfd    f5, PPC_EXC_MIN_FR_OFFSET(5)(r1)
219        stfd    f6, PPC_EXC_MIN_FR_OFFSET(6)(r1)
220        stfd    f7, PPC_EXC_MIN_FR_OFFSET(7)(r1)
221        stfd    f8, PPC_EXC_MIN_FR_OFFSET(8)(r1)
222        stfd    f9, PPC_EXC_MIN_FR_OFFSET(9)(r1)
223        stfd    f10, PPC_EXC_MIN_FR_OFFSET(10)(r1)
224        stfd    f11, PPC_EXC_MIN_FR_OFFSET(11)(r1)
225        stfd    f12, PPC_EXC_MIN_FR_OFFSET(12)(r1)
226        stfd    f13, PPC_EXC_MIN_FR_OFFSET(13)(r1)
227        stfd    f0, PPC_EXC_MIN_FPSCR_OFFSET(r1)
228#endif
229
230        /* Increment ISR nest level and thread dispatch disable level */
231        cmpwi   SCRATCH_3_REGISTER, 0
232#ifdef RTEMS_PROFILING
233        cmpwi   cr2, SCRATCH_3_REGISTER, 0
234#endif
235        addi    SCRATCH_3_REGISTER, SCRATCH_3_REGISTER, 1
236        addi    SCRATCH_4_REGISTER, SCRATCH_4_REGISTER, 1
237        stw     SCRATCH_3_REGISTER, PER_CPU_ISR_NEST_LEVEL(SCRATCH_2_REGISTER)
238        stw     SCRATCH_4_REGISTER, PER_CPU_THREAD_DISPATCH_DISABLE_LEVEL(SCRATCH_2_REGISTER)
239
240        /* Switch stack if necessary */
241        mfspr   SCRATCH_0_REGISTER, SPRG1
242        iselgt  r1, r1, SCRATCH_0_REGISTER
243
244        /* Call fixed high level handler */
245        bl      bsp_interrupt_dispatch
246        PPC64_NOP_FOR_LINKER_TOC_POINTER_RESTORE
247
248#ifdef RTEMS_PROFILING
249        /* Update profiling data if necessary */
250        bne     cr2, .Lprofiling_done
251        GET_SELF_CPU_CONTROL    r3
252        lwz     r4, PPC_EXC_INTERRUPT_ENTRY_INSTANT_OFFSET(FRAME_REGISTER)
253        GET_TIME_BASE   r5
254        bl      _Profiling_Outer_most_interrupt_entry_and_exit
255        PPC64_NOP_FOR_LINKER_TOC_POINTER_RESTORE
256.Lprofiling_done:
257#endif /* RTEMS_PROFILING */
258
259        /* Load some per-CPU variables */
260        GET_SELF_CPU_CONTROL    SCRATCH_1_REGISTER
261        lbz     SCRATCH_0_REGISTER, PER_CPU_DISPATCH_NEEDED(SCRATCH_1_REGISTER)
262        lwz     SCRATCH_5_REGISTER, PER_CPU_ISR_DISPATCH_DISABLE(SCRATCH_1_REGISTER)
263        lwz     SCRATCH_6_REGISTER, PER_CPU_THREAD_DISPATCH_DISABLE_LEVEL(SCRATCH_1_REGISTER)
264        lwz     SCRATCH_3_REGISTER, PER_CPU_ISR_NEST_LEVEL(SCRATCH_1_REGISTER)
265
266        /*
267         * Switch back to original stack (FRAME_REGISTER == r1 if we are still
268         * on the IRQ stack) and restore FRAME_REGISTER.
269         */
270        mr      r1, FRAME_REGISTER
271        PPC_REG_LOAD    FRAME_REGISTER, FRAME_OFFSET(r1)
272
273        /* Decrement levels and determine thread dispatch state */
274        xori    SCRATCH_0_REGISTER, SCRATCH_0_REGISTER, 1
275        or      SCRATCH_0_REGISTER, SCRATCH_0_REGISTER, SCRATCH_5_REGISTER
276        subi    SCRATCH_4_REGISTER, SCRATCH_6_REGISTER, 1
277        or.     SCRATCH_0_REGISTER, SCRATCH_0_REGISTER, SCRATCH_4_REGISTER
278        subi    SCRATCH_3_REGISTER, SCRATCH_3_REGISTER, 1
279
280        /* Store thread dispatch disable and ISR nest levels */
281        stw     SCRATCH_4_REGISTER, PER_CPU_THREAD_DISPATCH_DISABLE_LEVEL(SCRATCH_1_REGISTER)
282        stw     SCRATCH_3_REGISTER, PER_CPU_ISR_NEST_LEVEL(SCRATCH_1_REGISTER)
283
284        /*
285         * Check thread dispatch necessary, ISR dispatch disable and thread
286         * dispatch disable level.
287         */
288        bne     .Lthread_dispatch_done
289
290        /* Thread dispatch */
291.Ldo_thread_dispatch:
292
293        /* Set ISR dispatch disable and thread dispatch disable level to one */
294        li      SCRATCH_0_REGISTER, 1
295        stw     SCRATCH_0_REGISTER, PER_CPU_ISR_DISPATCH_DISABLE(SCRATCH_1_REGISTER)
296        stw     SCRATCH_0_REGISTER, PER_CPU_THREAD_DISPATCH_DISABLE_LEVEL(SCRATCH_1_REGISTER)
297
298        /*
299         * Call _Thread_Do_dispatch(), this function will enable interrupts.
300         * The r3 is SCRATCH_1_REGISTER.
301         */
302        mfmsr   r4
303        ori     r4, r4, MSR_EE
304        bl      _Thread_Do_dispatch
305        PPC64_NOP_FOR_LINKER_TOC_POINTER_RESTORE
306
307        /* Disable interrupts */
308        wrteei  0
309
310        /* SCRATCH_1_REGISTER is volatile, we must set it again */
311        GET_SELF_CPU_CONTROL    SCRATCH_1_REGISTER
312
313        /* Check if we have to do the thread dispatch again */
314        lbz     SCRATCH_0_REGISTER, PER_CPU_DISPATCH_NEEDED(SCRATCH_1_REGISTER)
315        cmpwi   SCRATCH_0_REGISTER, 0
316        bne     .Ldo_thread_dispatch
317
318        /* We are done with thread dispatching */
319        li      SCRATCH_0_REGISTER, 0
320        stw     SCRATCH_0_REGISTER, PER_CPU_ISR_DISPATCH_DISABLE(SCRATCH_1_REGISTER)
321
322.Lthread_dispatch_done:
323
324#ifdef PPC_MULTILIB_ALTIVEC
325        /* Restore volatile AltiVec context */
326        li      SCRATCH_0_REGISTER, PPC_EXC_MIN_VSCR_OFFSET
327        lvewx   v0, r1, SCRATCH_0_REGISTER
328        mtvscr  v0
329        li      SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(0)
330        lvx     v0, r1, SCRATCH_0_REGISTER
331        li      SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(1)
332        lvx     v1, r1, SCRATCH_0_REGISTER
333        li      SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(2)
334        lvx     v2, r1, SCRATCH_0_REGISTER
335        li      SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(3)
336        lvx     v3, r1, SCRATCH_0_REGISTER
337        li      SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(4)
338        lvx     v4, r1, SCRATCH_0_REGISTER
339        li      SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(5)
340        lvx     v5, r1, SCRATCH_0_REGISTER
341        li      SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(6)
342        lvx     v6, r1, SCRATCH_0_REGISTER
343        li      SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(7)
344        lvx     v7, r1, SCRATCH_0_REGISTER
345        li      SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(8)
346        lvx     v8, r1, SCRATCH_0_REGISTER
347        li      SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(9)
348        lvx     v9, r1, SCRATCH_0_REGISTER
349        li      SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(10)
350        lvx     v10, r1, SCRATCH_0_REGISTER
351        li      SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(11)
352        lvx     v11, r1, SCRATCH_0_REGISTER
353        li      SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(12)
354        lvx     v12, r1, SCRATCH_0_REGISTER
355        li      SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(13)
356        lvx     v13, r1, SCRATCH_0_REGISTER
357        li      SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(14)
358        lvx     v14, r1, SCRATCH_0_REGISTER
359        li      SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(15)
360        lvx     v15, r1, SCRATCH_0_REGISTER
361        li      SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(16)
362        lvx     v16, r1, SCRATCH_0_REGISTER
363        li      SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(17)
364        lvx     v17, r1, SCRATCH_0_REGISTER
365        li      SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(18)
366        lvx     v18, r1, SCRATCH_0_REGISTER
367        li      SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(19)
368        lvx     v19, r1, SCRATCH_0_REGISTER
369#endif
370
371#ifdef PPC_MULTILIB_FPU
372        /* Restore volatile FPU context */
373        lfd     f0, PPC_EXC_MIN_FPSCR_OFFSET(r1)
374        mtfsf   0xff, f0
375        lfd     f0, PPC_EXC_MIN_FR_OFFSET(0)(r1)
376        lfd     f1, PPC_EXC_MIN_FR_OFFSET(1)(r1)
377        lfd     f2, PPC_EXC_MIN_FR_OFFSET(2)(r1)
378        lfd     f3, PPC_EXC_MIN_FR_OFFSET(3)(r1)
379        lfd     f4, PPC_EXC_MIN_FR_OFFSET(4)(r1)
380        lfd     f5, PPC_EXC_MIN_FR_OFFSET(5)(r1)
381        lfd     f6, PPC_EXC_MIN_FR_OFFSET(6)(r1)
382        lfd     f7, PPC_EXC_MIN_FR_OFFSET(7)(r1)
383        lfd     f8, PPC_EXC_MIN_FR_OFFSET(8)(r1)
384        lfd     f9, PPC_EXC_MIN_FR_OFFSET(9)(r1)
385        lfd     f10, PPC_EXC_MIN_FR_OFFSET(10)(r1)
386        lfd     f11, PPC_EXC_MIN_FR_OFFSET(11)(r1)
387        lfd     f12, PPC_EXC_MIN_FR_OFFSET(12)(r1)
388        lfd     f13, PPC_EXC_MIN_FR_OFFSET(13)(r1)
389#endif
390
391#ifdef __SPE__
392        /* Load SPEFSCR and ACC */
393        lwz     SCRATCH_3_REGISTER, PPC_EXC_SPEFSCR_OFFSET(r1)
394        evldd   SCRATCH_4_REGISTER, PPC_EXC_ACC_OFFSET(r1)
395#endif
396
397        /*
398         * We must clear reservations here, since otherwise compare-and-swap
399         * atomic operations with interrupts enabled may yield wrong results.
400         * A compare-and-swap atomic operation is generated by the compiler
401         * like this:
402         *
403         *   .L1:
404         *     lwarx  r9, r0, r3
405         *     cmpw   r9, r4
406         *     bne-   .L2
407         *     stwcx. r5, r0, r3
408         *     bne-   .L1
409         *   .L2:
410         *
411         * Consider the following scenario.  A thread is interrupted right
412         * before the stwcx.  The interrupt updates the value using a
413         * compare-and-swap sequence.  Everything is fine up to this point.
414         * The interrupt performs now a compare-and-swap sequence which fails
415         * with a branch to .L2.  The current processor has now a reservation.
416         * The interrupt returns without further stwcx.  The thread updates the
417         * value using the unrelated reservation of the interrupt.
418         */
419        li      SCRATCH_0_REGISTER, FRAME_OFFSET
420        stwcx.  SCRATCH_0_REGISTER, r1, SCRATCH_0_REGISTER
421
422        /* Load SRR0, SRR1, CR, XER, CTR, and LR */
423        PPC_REG_LOAD    SCRATCH_5_REGISTER, SRR0_FRAME_OFFSET(r1)
424        PPC_REG_LOAD    SCRATCH_6_REGISTER, SRR1_FRAME_OFFSET(r1)
425        lwz     SCRATCH_7_REGISTER, EXC_CR_OFFSET(r1)
426        lwz     SCRATCH_8_REGISTER, EXC_XER_OFFSET(r1)
427        PPC_REG_LOAD    SCRATCH_9_REGISTER, EXC_CTR_OFFSET(r1)
428        PPC_REG_LOAD    SCRATCH_10_REGISTER, EXC_LR_OFFSET(r1)
429
430        /* Restore volatile registers */
431        PPC_GPR_LOAD    SCRATCH_0_REGISTER, SCRATCH_0_OFFSET(r1)
432#ifdef __powerpc64__
433        PPC_GPR_LOAD    r2, GPR2_OFFSET(r1)
434#endif
435        PPC_GPR_LOAD    SCRATCH_1_REGISTER, SCRATCH_1_OFFSET(r1)
436        PPC_GPR_LOAD    SCRATCH_2_REGISTER, SCRATCH_2_OFFSET(r1)
437
438#ifdef __SPE__
439        /* Restore SPEFSCR and ACC */
440        mtspr   FSL_EIS_SPEFSCR, SCRATCH_3_REGISTER
441        evmra   SCRATCH_4_REGISTER, SCRATCH_4_REGISTER
442#endif
443
444        /* Restore volatile registers */
445        PPC_GPR_LOAD    SCRATCH_3_REGISTER, SCRATCH_3_OFFSET(r1)
446        PPC_GPR_LOAD    SCRATCH_4_REGISTER, SCRATCH_4_OFFSET(r1)
447
448        /* Restore SRR0, SRR1, CR, CTR, XER, and LR plus volatile registers */
449        mtsrr0  SCRATCH_5_REGISTER
450        PPC_GPR_LOAD    SCRATCH_5_REGISTER, SCRATCH_5_OFFSET(r1)
451        mtsrr1  SCRATCH_6_REGISTER
452        PPC_GPR_LOAD    SCRATCH_6_REGISTER, SCRATCH_6_OFFSET(r1)
453        mtcr    SCRATCH_7_REGISTER
454        PPC_GPR_LOAD    SCRATCH_7_REGISTER, SCRATCH_7_OFFSET(r1)
455        mtxer   SCRATCH_8_REGISTER
456        PPC_GPR_LOAD    SCRATCH_8_REGISTER, SCRATCH_8_OFFSET(r1)
457        mtctr   SCRATCH_9_REGISTER
458        PPC_GPR_LOAD    SCRATCH_9_REGISTER, SCRATCH_9_OFFSET(r1)
459        mtlr    SCRATCH_10_REGISTER
460        PPC_GPR_LOAD    SCRATCH_10_REGISTER, SCRATCH_10_OFFSET(r1)
461
462        /* Pop stack */
463        addi    r1, r1, PPC_EXC_INTERRUPT_FRAME_SIZE
464
465        /* Return */
466        rfi
467
468/* Symbol provided for debugging and tracing */
469ppc_exc_interrupt_end:
470
471#endif /* PPC_EXC_CONFIG_USE_FIXED_HANDLER */
Note: See TracBrowser for help on using the repository browser.