source: rtems/cpukit/score/cpu/arm/arm_exc_interrupt.S @ 2afb22b

5
Last change on this file since 2afb22b was d5c8756, checked in by Sebastian Huber <sebastian.huber@…>, on 05/23/17 at 08:03:56

arm: Fix profiling support of Thumb-1 targets

Update #2751.

  • Property mode set to 100644
File size: 7.2 KB
Line 
1/**
2 * @file
3 *
4 * @ingroup ScoreCPU
5 *
6 * @brief ARM interrupt exception prologue and epilogue.
7 */
8
9/*
10 * Copyright (c) 2009, 2016 embedded brains GmbH.  All rights reserved.
11 *
12 *  embedded brains GmbH
13 *  Dornierstr. 4
14 *  82178 Puchheim
15 *  Germany
16 *  <rtems@embedded-brains.de>
17 *
18 * The license and distribution terms for this file may be
19 * found in the file LICENSE in this distribution or at
20 * http://www.rtems.org/license/LICENSE.
21 */
22
23/*
24 * The upper EXCHANGE_SIZE bytes of the INT stack area are used for data
25 * exchange between INT and SVC mode.  Below of this is the actual INT stack.
26 * The exchange area is only accessed if INT is disabled.
27 */
28
29#ifdef HAVE_CONFIG_H
30#include "config.h"
31#endif
32
33#include <rtems/asm.h>
34
35#ifdef ARM_MULTILIB_ARCH_V4
36
37#define EXCHANGE_LR r4
38#define EXCHANGE_SPSR r5
39#define EXCHANGE_CPSR r6
40#define EXCHANGE_INT_SP r8
41
42#define EXCHANGE_LIST {EXCHANGE_LR, EXCHANGE_SPSR, EXCHANGE_CPSR, EXCHANGE_INT_SP}
43#define EXCHANGE_SIZE 16
44
45#define SELF_CPU_CONTROL r7
46#define NON_VOLATILE_SCRATCH r9
47
48#define CONTEXT_LIST {r0, r1, r2, r3, EXCHANGE_LR, EXCHANGE_SPSR, SELF_CPU_CONTROL, r12}
49#define CONTEXT_SIZE 32
50
51.arm
52.globl _ARMV4_Exception_interrupt
53_ARMV4_Exception_interrupt:
54
55        /* Save exchange registers to exchange area */
56        stmdb   sp, EXCHANGE_LIST
57
58        /* Set exchange registers */
59        mov     EXCHANGE_LR, lr
60        mrs     EXCHANGE_SPSR, SPSR
61        mrs     EXCHANGE_CPSR, CPSR
62        sub     EXCHANGE_INT_SP, sp, #EXCHANGE_SIZE
63
64        /* Switch to SVC mode */
65        orr     EXCHANGE_CPSR, EXCHANGE_CPSR, #0x1
66        msr     CPSR_c, EXCHANGE_CPSR
67
68        /*
69         * Save context.  We save the link register separately because it has
70         * to be restored in SVC mode.  The other registers can be restored in
71         * INT mode.  Ensure that stack remains 8 byte aligned.  Use register
72         * necessary for the stack alignment for the stack pointer of the
73         * interrupted context.
74         */
75        stmdb   sp!, CONTEXT_LIST
76        stmdb   sp!, {NON_VOLATILE_SCRATCH, lr}
77
78#ifdef ARM_MULTILIB_VFP
79        /* Save VFP context */
80        vmrs    r0, FPSCR
81        vstmdb  sp!, {d0-d7}
82#ifdef ARM_MULTILIB_VFP_D32
83        vstmdb  sp!, {d16-d31}
84#endif
85        stmdb   sp!, {r0, r1}
86#endif /* ARM_MULTILIB_VFP */
87
88        /* Get per-CPU control of current processor */
89        GET_SELF_CPU_CONTROL    SELF_CPU_CONTROL
90
91        /* Remember INT stack pointer */
92        mov     r1, EXCHANGE_INT_SP
93
94        /* Restore exchange registers from exchange area */
95        ldmia   r1, EXCHANGE_LIST
96
97        /* Get interrupt nest level */
98        ldr     r2, [SELF_CPU_CONTROL, #PER_CPU_ISR_NEST_LEVEL]
99
100        /* Switch stack if necessary and save original stack pointer */
101        mov     NON_VOLATILE_SCRATCH, sp
102        cmp     r2, #0
103        moveq   sp, r1
104
105        /* Switch to Thumb-2 instructions if necessary */
106        SWITCH_FROM_ARM_TO_THUMB_2      r1
107
108        /* Increment interrupt nest and thread dispatch disable level */
109        ldr     r3, [SELF_CPU_CONTROL, #PER_CPU_THREAD_DISPATCH_DISABLE_LEVEL]
110        add     r2, #1
111        add     r3, #1
112        str     r2, [SELF_CPU_CONTROL, #PER_CPU_ISR_NEST_LEVEL]
113        str     r3, [SELF_CPU_CONTROL, #PER_CPU_THREAD_DISPATCH_DISABLE_LEVEL]
114
115        /* Call BSP dependent interrupt dispatcher */
116#ifdef RTEMS_PROFILING
117        cmp     r2, #1
118        bne     .Lskip_profiling
119        BLX_TO_THUMB_1  _CPU_Counter_read
120        mov     SELF_CPU_CONTROL, r0
121        BLX_TO_THUMB_1  bsp_interrupt_dispatch
122        BLX_TO_THUMB_1  _CPU_Counter_read
123        mov     r2, r0
124        mov     r1, SELF_CPU_CONTROL
125        GET_SELF_CPU_CONTROL    r0
126        mov     SELF_CPU_CONTROL, r0
127        BLX_TO_THUMB_1  _Profiling_Outer_most_interrupt_entry_and_exit
128.Lprofiling_done:
129#else
130        BLX_TO_THUMB_1  bsp_interrupt_dispatch
131#endif
132
133        /* Load some per-CPU variables */
134        ldr     r0, [SELF_CPU_CONTROL, #PER_CPU_THREAD_DISPATCH_DISABLE_LEVEL]
135        ldrb    r1, [SELF_CPU_CONTROL, #PER_CPU_DISPATCH_NEEDED]
136        ldr     r2, [SELF_CPU_CONTROL, #PER_CPU_ISR_DISPATCH_DISABLE]
137        ldr     r3, [SELF_CPU_CONTROL, #PER_CPU_ISR_NEST_LEVEL]
138
139        /* Restore stack pointer */
140        mov     sp, NON_VOLATILE_SCRATCH
141
142        /* Save CPSR in non-volatile register */
143        mrs     NON_VOLATILE_SCRATCH, CPSR
144
145        /* Decrement levels and determine thread dispatch state */
146        eor     r1, r0
147        sub     r0, #1
148        orr     r1, r0
149        orr     r1, r2
150        sub     r3, #1
151
152        /* Store thread dispatch disable and ISR nest levels */
153        str     r0, [SELF_CPU_CONTROL, #PER_CPU_THREAD_DISPATCH_DISABLE_LEVEL]
154        str     r3, [SELF_CPU_CONTROL, #PER_CPU_ISR_NEST_LEVEL]
155
156        /*
157         * Check thread dispatch necessary, ISR dispatch disable and thread
158         * dispatch disable level.
159         */
160        cmp     r1, #0
161        bne     .Lthread_dispatch_done
162
163        /* Thread dispatch */
164        mrs     NON_VOLATILE_SCRATCH, CPSR
165
166.Ldo_thread_dispatch:
167
168        /* Set ISR dispatch disable and thread dispatch disable level to one */
169        mov     r0, #1
170        str     r0, [SELF_CPU_CONTROL, #PER_CPU_ISR_DISPATCH_DISABLE]
171        str     r0, [SELF_CPU_CONTROL, #PER_CPU_THREAD_DISPATCH_DISABLE_LEVEL]
172
173        /* Call _Thread_Do_dispatch(), this function will enable interrupts */
174        mov     r0, SELF_CPU_CONTROL
175        mov     r1, NON_VOLATILE_SCRATCH
176        mov     r2, #0x80
177        bic     r1, r2
178        BLX_TO_THUMB_1  _Thread_Do_dispatch
179
180        /* Disable interrupts */
181        msr     CPSR, NON_VOLATILE_SCRATCH
182
183#ifdef RTEMS_SMP
184        GET_SELF_CPU_CONTROL    SELF_CPU_CONTROL
185#endif
186
187        /* Check if we have to do the thread dispatch again */
188        ldrb    r0, [SELF_CPU_CONTROL, #PER_CPU_DISPATCH_NEEDED]
189        cmp     r0, #0
190        bne     .Ldo_thread_dispatch
191
192        /* We are done with thread dispatching */
193        mov     r0, #0
194        str     r0, [SELF_CPU_CONTROL, #PER_CPU_ISR_DISPATCH_DISABLE]
195
196.Lthread_dispatch_done:
197
198        /* Switch to ARM instructions if necessary */
199        SWITCH_FROM_THUMB_2_TO_ARM
200
201#ifdef ARM_MULTILIB_VFP
202        /* Restore VFP context */
203        ldmia   sp!, {r0, r1}
204#ifdef ARM_MULTILIB_VFP_D32
205        vldmia  sp!, {d16-d31}
206#endif
207        vldmia  sp!, {d0-d7}
208        vmsr    FPSCR, r0
209#endif /* ARM_MULTILIB_VFP */
210
211        /* Restore NON_VOLATILE_SCRATCH register and link register */
212        ldmia   sp!, {NON_VOLATILE_SCRATCH, lr}
213
214        /*
215         * XXX: Remember and restore stack pointer.  The data on the stack is
216         * still in use.  So the stack is now in an inconsistent state.  The
217         * FIQ handler implementation must not use this area.
218         */
219        mov     r0, sp
220        add     sp, #CONTEXT_SIZE
221
222        /* Get INT mode program status register */
223        mrs     r1, CPSR
224        bic     r1, r1, #0x1
225
226        /* Switch to INT mode */
227        msr     CPSR_c, r1
228
229        /* Save EXCHANGE_LR and EXCHANGE_SPSR registers to exchange area */
230        stmdb   sp!, {EXCHANGE_LR, EXCHANGE_SPSR}
231
232        /* Restore context */
233        ldmia   r0, CONTEXT_LIST
234
235        /* Set return address and program status */
236        mov     lr, EXCHANGE_LR
237        msr     SPSR_fsxc, EXCHANGE_SPSR
238
239        /* Restore EXCHANGE_LR and EXCHANGE_SPSR registers from exchange area */
240        ldmia   sp!, {EXCHANGE_LR, EXCHANGE_SPSR}
241
242#ifdef ARM_MULTILIB_HAS_LOAD_STORE_EXCLUSIVE
243        /*
244         * We must clear reservations here, since otherwise compare-and-swap
245         * atomic operations with interrupts enabled may yield wrong results.
246         * A compare-and-swap atomic operation is generated by the compiler
247         * like this:
248         *
249         *   .L1:
250         *     ldrex r1, [r0]
251         *     cmp   r1, r3
252         *     bne   .L2
253         *     strex r3, r2, [r0]
254         *     cmp   r3, #0
255         *     bne   .L1
256         *   .L2:
257         *
258         * Consider the following scenario.  A thread is interrupted right
259         * before the strex.  The interrupt updates the value using a
260         * compare-and-swap sequence.  Everything is fine up to this point.
261         * The interrupt performs now a compare-and-swap sequence which fails
262         * with a branch to .L2.  The current processor has now a reservation.
263         * The interrupt returns without further strex.  The thread updates the
264         * value using the unrelated reservation of the interrupt.
265         */
266        clrex
267#endif
268
269        /* Return from interrupt */
270        subs    pc, lr, #4
271
272#ifdef RTEMS_PROFILING
273#ifdef __thumb2__
274.thumb
275#else
276.arm
277#endif
278.Lskip_profiling:
279        BLX_TO_THUMB_1  bsp_interrupt_dispatch
280        b       .Lprofiling_done
281#endif
282
283#endif /* ARM_MULTILIB_ARCH_V4 */
Note: See TracBrowser for help on using the repository browser.