source: rtems/c/src/lib/libcpu/powerpc/new-exceptions/cpu_asm.S @ 057c294

4.115
Last change on this file since 057c294 was 057c294, checked in by Sebastian Huber <sebastian.huber@…>, on 12/03/13 at 08:55:52

bsps/powerpc: Unconditionally clear reservations

  • Property mode set to 100644
File size: 9.9 KB
Line 
1/*  cpu_asm.s   1.1 - 95/12/04
2 *
3 *  This file contains the assembly code for the PowerPC implementation
4 *  of RTEMS.
5 *
6 *  Author:     Andrew Bray <andy@i-cubed.co.uk>
7 *
8 *  COPYRIGHT (c) 1995 by i-cubed ltd.
9 *
10 *  To anyone who acknowledges that this file is provided "AS IS"
11 *  without any express or implied warranty:
12 *      permission to use, copy, modify, and distribute this file
13 *      for any purpose is hereby granted without fee, provided that
14 *      the above copyright notice and this notice appears in all
15 *      copies, and that the name of i-cubed limited not be used in
16 *      advertising or publicity pertaining to distribution of the
17 *      software without specific, written prior permission.
18 *      i-cubed limited makes no representations about the suitability
19 *      of this software for any purpose.
20 *
21 *  Derived from c/src/exec/cpu/no_cpu/cpu_asm.c:
22 *
23 *  COPYRIGHT (c) 1989-1997.
24 *  On-Line Applications Research Corporation (OAR).
25 *
26 *  Copyright (c) 2011-2013 embedded brains GmbH.
27 *
28 *  The license and distribution terms for this file may in
29 *  the file LICENSE in this distribution or at
30 *  http://www.rtems.com/license/LICENSE.
31 */
32
33#include <rtems/asm.h>
34#include <rtems/powerpc/powerpc.h>
35#include <rtems/score/cpu.h>
36#include <bspopts.h>
37
38#if PPC_DEFAULT_CACHE_LINE_SIZE != 32
39  #error "unexpected PPC_DEFAULT_CACHE_LINE_SIZE value"
40#endif
41
42#ifdef BSP_USE_DATA_CACHE_BLOCK_TOUCH
43  #define DATA_CACHE_TOUCH(rega, regb) \
44        dcbt rega, regb
45#else
46  #define DATA_CACHE_TOUCH(rega, regb)
47#endif
48
49#if BSP_DATA_CACHE_ENABLED && PPC_CACHE_ALIGNMENT == 32
50  #define DATA_CACHE_ZERO_AND_TOUCH(reg, offset) \
51        li reg, offset; dcbz reg, r3; DATA_CACHE_TOUCH(reg, r4)
52#else
53  #define DATA_CACHE_ZERO_AND_TOUCH(reg, offset)
54#endif
55
56#define PPC_CONTEXT_CACHE_LINE_0 32
57#define PPC_CONTEXT_CACHE_LINE_1 64
58#define PPC_CONTEXT_CACHE_LINE_2 96
59#define PPC_CONTEXT_CACHE_LINE_3 128
60#define PPC_CONTEXT_CACHE_LINE_4 160
61
62/*
63 * Offsets for various Contexts
64 */
65
66#if (PPC_HAS_DOUBLE==1)
67        .set    FP_SIZE,        8
68#define LDF     lfd
69#define STF     stfd
70#else
71        .set    FP_SIZE,        4
72#define LDF     lfs
73#define STF     stfs
74#endif
75
76        .set    FP_0, 0
77        .set    FP_1, (FP_0 + FP_SIZE)
78        .set    FP_2, (FP_1 + FP_SIZE)
79        .set    FP_3, (FP_2 + FP_SIZE)
80        .set    FP_4, (FP_3 + FP_SIZE)
81        .set    FP_5, (FP_4 + FP_SIZE)
82        .set    FP_6, (FP_5 + FP_SIZE)
83        .set    FP_7, (FP_6 + FP_SIZE)
84        .set    FP_8, (FP_7 + FP_SIZE)
85        .set    FP_9, (FP_8 + FP_SIZE)
86        .set    FP_10, (FP_9 + FP_SIZE)
87        .set    FP_11, (FP_10 + FP_SIZE)
88        .set    FP_12, (FP_11 + FP_SIZE)
89        .set    FP_13, (FP_12 + FP_SIZE)
90        .set    FP_14, (FP_13 + FP_SIZE)
91        .set    FP_15, (FP_14 + FP_SIZE)
92        .set    FP_16, (FP_15 + FP_SIZE)
93        .set    FP_17, (FP_16 + FP_SIZE)
94        .set    FP_18, (FP_17 + FP_SIZE)
95        .set    FP_19, (FP_18 + FP_SIZE)
96        .set    FP_20, (FP_19 + FP_SIZE)
97        .set    FP_21, (FP_20 + FP_SIZE)
98        .set    FP_22, (FP_21 + FP_SIZE)
99        .set    FP_23, (FP_22 + FP_SIZE)
100        .set    FP_24, (FP_23 + FP_SIZE)
101        .set    FP_25, (FP_24 + FP_SIZE)
102        .set    FP_26, (FP_25 + FP_SIZE)
103        .set    FP_27, (FP_26 + FP_SIZE)
104        .set    FP_28, (FP_27 + FP_SIZE)
105        .set    FP_29, (FP_28 + FP_SIZE)
106        .set    FP_30, (FP_29 + FP_SIZE)
107        .set    FP_31, (FP_30 + FP_SIZE)
108        .set    FP_FPSCR, (FP_31 + FP_SIZE)
109
110        BEGIN_CODE
111/*
112 *  _CPU_Context_save_fp_context
113 *
114 *  This routine is responsible for saving the FP context
115 *  at *fp_context_ptr.  If the point to load the FP context
116 *  from is changed then the pointer is modified by this routine.
117 *
118 *  Sometimes a macro implementation of this is in cpu.h which dereferences
119 *  the ** and a similarly named routine in this file is passed something
120 *  like a (Context_Control_fp *).  The general rule on making this decision
121 *  is to avoid writing assembly language.
122 */
123
124        ALIGN (PPC_CACHE_ALIGNMENT, PPC_CACHE_ALIGN_POWER)
125        PUBLIC_PROC (_CPU_Context_save_fp)
126PROC (_CPU_Context_save_fp):
127#if (PPC_HAS_FPU == 1)
128/* A FP context switch may occur in an ISR or exception handler when the FPU is not
129 * available. Therefore, we must explicitely enable it here!
130 */
131        mfmsr   r4
132        andi.   r5,r4,MSR_FP
133        bne     1f
134        ori     r5,r4,MSR_FP
135        mtmsr   r5
136        isync
1371:
138        lwz     r3, 0(r3)
139        STF     f0, FP_0(r3)
140        STF     f1, FP_1(r3)
141        STF     f2, FP_2(r3)
142        STF     f3, FP_3(r3)
143        STF     f4, FP_4(r3)
144        STF     f5, FP_5(r3)
145        STF     f6, FP_6(r3)
146        STF     f7, FP_7(r3)
147        STF     f8, FP_8(r3)
148        STF     f9, FP_9(r3)
149        STF     f10, FP_10(r3)
150        STF     f11, FP_11(r3)
151        STF     f12, FP_12(r3)
152        STF     f13, FP_13(r3)
153        STF     f14, FP_14(r3)
154        STF     f15, FP_15(r3)
155        STF     f16, FP_16(r3)
156        STF     f17, FP_17(r3)
157        STF     f18, FP_18(r3)
158        STF     f19, FP_19(r3)
159        STF     f20, FP_20(r3)
160        STF     f21, FP_21(r3)
161        STF     f22, FP_22(r3)
162        STF     f23, FP_23(r3)
163        STF     f24, FP_24(r3)
164        STF     f25, FP_25(r3)
165        STF     f26, FP_26(r3)
166        STF     f27, FP_27(r3)
167        STF     f28, FP_28(r3)
168        STF     f29, FP_29(r3)
169        STF     f30, FP_30(r3)
170        STF     f31, FP_31(r3)
171        mffs    f2
172        STF     f2, FP_FPSCR(r3)
173        bne     1f
174        mtmsr   r4
175        isync
1761:
177#endif
178        blr
179
180/*
181 *  _CPU_Context_restore_fp_context
182 *
183 *  This routine is responsible for restoring the FP context
184 *  at *fp_context_ptr.  If the point to load the FP context
185 *  from is changed then the pointer is modified by this routine.
186 *
187 *  Sometimes a macro implementation of this is in cpu.h which dereferences
188 *  the ** and a similarly named routine in this file is passed something
189 *  like a (Context_Control_fp *).  The general rule on making this decision
190 *  is to avoid writing assembly language.
191 */
192
193        ALIGN (PPC_CACHE_ALIGNMENT, PPC_CACHE_ALIGN_POWER)
194        PUBLIC_PROC (_CPU_Context_restore_fp)
195PROC (_CPU_Context_restore_fp):
196#if (PPC_HAS_FPU == 1)
197        lwz     r3, 0(r3)
198/* A FP context switch may occur in an ISR or exception handler when the FPU is not
199 * available. Therefore, we must explicitely enable it here!
200 */
201        mfmsr   r4
202        andi.   r5,r4,MSR_FP
203        bne     1f
204        ori     r5,r4,MSR_FP
205        mtmsr   r5
206        isync
2071:
208        LDF     f2, FP_FPSCR(r3)
209        mtfsf   255, f2
210        LDF     f0, FP_0(r3)
211        LDF     f1, FP_1(r3)
212        LDF     f2, FP_2(r3)
213        LDF     f3, FP_3(r3)
214        LDF     f4, FP_4(r3)
215        LDF     f5, FP_5(r3)
216        LDF     f6, FP_6(r3)
217        LDF     f7, FP_7(r3)
218        LDF     f8, FP_8(r3)
219        LDF     f9, FP_9(r3)
220        LDF     f10, FP_10(r3)
221        LDF     f11, FP_11(r3)
222        LDF     f12, FP_12(r3)
223        LDF     f13, FP_13(r3)
224        LDF     f14, FP_14(r3)
225        LDF     f15, FP_15(r3)
226        LDF     f16, FP_16(r3)
227        LDF     f17, FP_17(r3)
228        LDF     f18, FP_18(r3)
229        LDF     f19, FP_19(r3)
230        LDF     f20, FP_20(r3)
231        LDF     f21, FP_21(r3)
232        LDF     f22, FP_22(r3)
233        LDF     f23, FP_23(r3)
234        LDF     f24, FP_24(r3)
235        LDF     f25, FP_25(r3)
236        LDF     f26, FP_26(r3)
237        LDF     f27, FP_27(r3)
238        LDF     f28, FP_28(r3)
239        LDF     f29, FP_29(r3)
240        LDF     f30, FP_30(r3)
241        LDF     f31, FP_31(r3)
242        bne     1f
243        mtmsr   r4
244        isync
2451:
246#endif
247        blr
248
249        ALIGN (PPC_CACHE_ALIGNMENT, PPC_CACHE_ALIGN_POWER)
250        PUBLIC_PROC (_CPU_Context_switch)
251PROC (_CPU_Context_switch):
252
253#ifdef BSP_USE_SYNC_IN_CONTEXT_SWITCH
254        sync
255        isync
256#endif
257
258        /* Align to a cache line */
259        clrrwi  r3, r3, 5
260        clrrwi  r4, r4, 5
261
262        DATA_CACHE_ZERO_AND_TOUCH(r10, PPC_CONTEXT_CACHE_LINE_0)
263        DATA_CACHE_ZERO_AND_TOUCH(r11, PPC_CONTEXT_CACHE_LINE_1)
264
265        /* Save context to r3 */
266
267        mfmsr   r5
268        mflr    r6
269        mfcr    r7
270
271        /*
272         * We have to clear the reservation of the executing thread.  See also
273         * Book E section 6.1.6.2 "Atomic Update Primitives".  Recent GCC
274         * versions use atomic operations in the C++ library for example.
275         */
276#if PPC_CONTEXT_OFFSET_GPR1 != PPC_CONTEXT_CACHE_LINE_0 \
277  || !BSP_DATA_CACHE_ENABLED \
278  || PPC_CACHE_ALIGNMENT != 32
279        li      r10, PPC_CONTEXT_OFFSET_GPR1
280#endif
281        stwcx.  r1, r3, r10
282
283        stw     r1, PPC_CONTEXT_OFFSET_GPR1(r3)
284        stw     r5, PPC_CONTEXT_OFFSET_MSR(r3)
285        stw     r6, PPC_CONTEXT_OFFSET_LR(r3)
286        stw     r7, PPC_CONTEXT_OFFSET_CR(r3)
287        PPC_GPR_STORE   r14, PPC_CONTEXT_OFFSET_GPR14(r3)
288        PPC_GPR_STORE   r15, PPC_CONTEXT_OFFSET_GPR15(r3)
289
290#if PPC_CONTEXT_OFFSET_GPR20 == PPC_CONTEXT_CACHE_LINE_2
291        DATA_CACHE_ZERO_AND_TOUCH(r10, PPC_CONTEXT_CACHE_LINE_2)
292#endif
293
294        PPC_GPR_STORE   r16, PPC_CONTEXT_OFFSET_GPR16(r3)
295        PPC_GPR_STORE   r17, PPC_CONTEXT_OFFSET_GPR17(r3)
296
297#if PPC_CONTEXT_OFFSET_GPR26 == PPC_CONTEXT_CACHE_LINE_2
298        DATA_CACHE_ZERO_AND_TOUCH(r10, PPC_CONTEXT_CACHE_LINE_2)
299#endif
300
301        PPC_GPR_STORE   r18, PPC_CONTEXT_OFFSET_GPR18(r3)
302        PPC_GPR_STORE   r19, PPC_CONTEXT_OFFSET_GPR19(r3)
303
304#if PPC_CONTEXT_OFFSET_GPR24 == PPC_CONTEXT_CACHE_LINE_3
305        DATA_CACHE_ZERO_AND_TOUCH(r10, PPC_CONTEXT_CACHE_LINE_3)
306#endif
307
308        PPC_GPR_STORE   r20, PPC_CONTEXT_OFFSET_GPR20(r3)
309        PPC_GPR_STORE   r21, PPC_CONTEXT_OFFSET_GPR21(r3)
310        PPC_GPR_STORE   r22, PPC_CONTEXT_OFFSET_GPR22(r3)
311        PPC_GPR_STORE   r23, PPC_CONTEXT_OFFSET_GPR23(r3)
312
313#if PPC_CONTEXT_OFFSET_GPR28 == PPC_CONTEXT_CACHE_LINE_4
314        DATA_CACHE_ZERO_AND_TOUCH(r10, PPC_CONTEXT_CACHE_LINE_4)
315#endif
316
317        PPC_GPR_STORE   r24, PPC_CONTEXT_OFFSET_GPR24(r3)
318        PPC_GPR_STORE   r25, PPC_CONTEXT_OFFSET_GPR25(r3)
319        PPC_GPR_STORE   r26, PPC_CONTEXT_OFFSET_GPR26(r3)
320        PPC_GPR_STORE   r27, PPC_CONTEXT_OFFSET_GPR27(r3)
321
322        PPC_GPR_STORE   r28, PPC_CONTEXT_OFFSET_GPR28(r3)
323        PPC_GPR_STORE   r29, PPC_CONTEXT_OFFSET_GPR29(r3)
324        PPC_GPR_STORE   r30, PPC_CONTEXT_OFFSET_GPR30(r3)
325        PPC_GPR_STORE   r31, PPC_CONTEXT_OFFSET_GPR31(r3)
326
327        stw     r2, PPC_CONTEXT_OFFSET_GPR2(r3)
328
329        /* Restore context from r4 */
330restore_context:
331
332#ifdef __ALTIVEC__
333        mr      r14, r4
334        .extern _CPU_Context_switch_altivec
335        bl      _CPU_Context_switch_altivec
336        mr      r4, r14
337#endif
338
339        lwz     r1, PPC_CONTEXT_OFFSET_GPR1(r4)
340        lwz     r5, PPC_CONTEXT_OFFSET_MSR(r4)
341        lwz     r6, PPC_CONTEXT_OFFSET_LR(r4)
342        lwz     r7, PPC_CONTEXT_OFFSET_CR(r4)
343
344        PPC_GPR_LOAD    r14, PPC_CONTEXT_OFFSET_GPR14(r4)
345        PPC_GPR_LOAD    r15, PPC_CONTEXT_OFFSET_GPR15(r4)
346
347        DATA_CACHE_TOUCH(r0, r1)
348
349        PPC_GPR_LOAD    r16, PPC_CONTEXT_OFFSET_GPR16(r4)
350        PPC_GPR_LOAD    r17, PPC_CONTEXT_OFFSET_GPR17(r4)
351        PPC_GPR_LOAD    r18, PPC_CONTEXT_OFFSET_GPR18(r4)
352        PPC_GPR_LOAD    r19, PPC_CONTEXT_OFFSET_GPR19(r4)
353
354        PPC_GPR_LOAD    r20, PPC_CONTEXT_OFFSET_GPR20(r4)
355        PPC_GPR_LOAD    r21, PPC_CONTEXT_OFFSET_GPR21(r4)
356        PPC_GPR_LOAD    r22, PPC_CONTEXT_OFFSET_GPR22(r4)
357        PPC_GPR_LOAD    r23, PPC_CONTEXT_OFFSET_GPR23(r4)
358
359        PPC_GPR_LOAD    r24, PPC_CONTEXT_OFFSET_GPR24(r4)
360        PPC_GPR_LOAD    r25, PPC_CONTEXT_OFFSET_GPR25(r4)
361        PPC_GPR_LOAD    r26, PPC_CONTEXT_OFFSET_GPR26(r4)
362        PPC_GPR_LOAD    r27, PPC_CONTEXT_OFFSET_GPR27(r4)
363
364        PPC_GPR_LOAD    r28, PPC_CONTEXT_OFFSET_GPR28(r4)
365        PPC_GPR_LOAD    r29, PPC_CONTEXT_OFFSET_GPR29(r4)
366        PPC_GPR_LOAD    r30, PPC_CONTEXT_OFFSET_GPR30(r4)
367        PPC_GPR_LOAD    r31, PPC_CONTEXT_OFFSET_GPR31(r4)
368
369        lwz     r2, PPC_CONTEXT_OFFSET_GPR2(r4)
370
371        mtcr    r7
372        mtlr    r6
373        mtmsr   r5
374
375#ifdef BSP_USE_SYNC_IN_CONTEXT_SWITCH
376        isync
377#endif
378
379        blr
380
381        PUBLIC_PROC (_CPU_Context_restore)
382PROC (_CPU_Context_restore):
383        /* Align to a cache line */
384        clrrwi  r4, r3, 5
385
386#ifdef __ALTIVEC__
387        li      r3, 0
388#endif
389
390        b       restore_context
Note: See TracBrowser for help on using the repository browser.