1 | /* SPDX-License-Identifier: BSD-2-Clause */ |
---|
2 | |
---|
3 | /** |
---|
4 | * @file |
---|
5 | * |
---|
6 | * @ingroup RTEMSScoreCPUARM |
---|
7 | * |
---|
8 | * @brief ARM interrupt exception prologue and epilogue. |
---|
9 | */ |
---|
10 | |
---|
11 | /* |
---|
12 | * Copyright (c) 2009, 2022 embedded brains GmbH. All rights reserved. |
---|
13 | * |
---|
14 | * Redistribution and use in source and binary forms, with or without |
---|
15 | * modification, are permitted provided that the following conditions |
---|
16 | * are met: |
---|
17 | * 1. Redistributions of source code must retain the above copyright |
---|
18 | * notice, this list of conditions and the following disclaimer. |
---|
19 | * 2. Redistributions in binary form must reproduce the above copyright |
---|
20 | * notice, this list of conditions and the following disclaimer in the |
---|
21 | * documentation and/or other materials provided with the distribution. |
---|
22 | * |
---|
23 | * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" |
---|
24 | * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE |
---|
25 | * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE |
---|
26 | * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE |
---|
27 | * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR |
---|
28 | * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF |
---|
29 | * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS |
---|
30 | * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN |
---|
31 | * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) |
---|
32 | * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE |
---|
33 | * POSSIBILITY OF SUCH DAMAGE. |
---|
34 | */ |
---|
35 | |
---|
36 | /* |
---|
37 | * The upper EXCHANGE_SIZE bytes of the INT stack area are used for data |
---|
38 | * exchange between INT and SVC mode. Below of this is the actual INT stack. |
---|
39 | * The exchange area is only accessed if INT is disabled. |
---|
40 | */ |
---|
41 | |
---|
42 | #ifdef HAVE_CONFIG_H |
---|
43 | #include "config.h" |
---|
44 | #endif |
---|
45 | |
---|
46 | #include <rtems/asm.h> |
---|
47 | |
---|
48 | #ifdef ARM_MULTILIB_ARCH_V4 |
---|
49 | |
---|
50 | #define STACK_POINTER_ADJUST r7 |
---|
51 | #define NON_VOLATILE_SCRATCH r9 |
---|
52 | |
---|
53 | #ifndef ARM_MULTILIB_HAS_STORE_RETURN_STATE |
---|
54 | |
---|
55 | #define EXCHANGE_LR r4 |
---|
56 | #define EXCHANGE_SPSR r5 |
---|
57 | #define EXCHANGE_CPSR r6 |
---|
58 | #define EXCHANGE_INT_SP r8 |
---|
59 | |
---|
60 | #define EXCHANGE_LIST {EXCHANGE_LR, EXCHANGE_SPSR, EXCHANGE_CPSR, EXCHANGE_INT_SP} |
---|
61 | #define EXCHANGE_SIZE 16 |
---|
62 | |
---|
63 | #define CONTEXT_LIST {r0, r1, r2, r3, EXCHANGE_LR, EXCHANGE_SPSR, NON_VOLATILE_SCRATCH, r12} |
---|
64 | #define CONTEXT_SIZE 32 |
---|
65 | |
---|
66 | #endif /* ARM_MULTILIB_HAS_STORE_RETURN_STATE */ |
---|
67 | |
---|
68 | .arm |
---|
69 | .globl _ARMV4_Exception_interrupt |
---|
70 | _ARMV4_Exception_interrupt: |
---|
71 | |
---|
72 | #ifdef ARM_MULTILIB_HAS_STORE_RETURN_STATE |
---|
73 | /* Prepare return from interrupt */ |
---|
74 | sub lr, lr, #4 |
---|
75 | |
---|
76 | /* Save LR_irq and SPSR_irq to the SVC stack */ |
---|
77 | srsfd sp!, #ARM_PSR_M_SVC |
---|
78 | |
---|
79 | /* Switch to SVC mode */ |
---|
80 | cps #ARM_PSR_M_SVC |
---|
81 | |
---|
82 | /* |
---|
83 | * Save the volatile registers, two non-volatile registers used for |
---|
84 | * interrupt processing, and the link register. |
---|
85 | */ |
---|
86 | push {r0-r3, STACK_POINTER_ADJUST, NON_VOLATILE_SCRATCH, r12, lr} |
---|
87 | #else /* ARM_MULTILIB_HAS_STORE_RETURN_STATE */ |
---|
88 | /* Save exchange registers to exchange area */ |
---|
89 | stmdb sp, EXCHANGE_LIST |
---|
90 | |
---|
91 | /* Set exchange registers */ |
---|
92 | mov EXCHANGE_LR, lr |
---|
93 | mrs EXCHANGE_SPSR, SPSR |
---|
94 | mrs EXCHANGE_CPSR, CPSR |
---|
95 | sub EXCHANGE_INT_SP, sp, #EXCHANGE_SIZE |
---|
96 | |
---|
97 | /* Switch to SVC mode */ |
---|
98 | orr EXCHANGE_CPSR, EXCHANGE_CPSR, #0x1 |
---|
99 | msr CPSR_c, EXCHANGE_CPSR |
---|
100 | |
---|
101 | /* |
---|
102 | * Save context. We save the link register separately because it has |
---|
103 | * to be restored in SVC mode. The other registers can be restored in |
---|
104 | * INT mode. Ensure that the size of the saved registers is an |
---|
105 | * integral multiple of 8 bytes. Provide a non-volatile scratch |
---|
106 | * register which may be used accross function calls. |
---|
107 | */ |
---|
108 | push CONTEXT_LIST |
---|
109 | push {STACK_POINTER_ADJUST, lr} |
---|
110 | #endif /* ARM_MULTILIB_HAS_STORE_RETURN_STATE */ |
---|
111 | |
---|
112 | /* |
---|
113 | * On a public interface, the stack pointer must be aligned on an |
---|
114 | * 8-byte boundary. However, it may temporarily be only aligned on a |
---|
115 | * 4-byte boundary. Make sure the stack pointer is aligned on an |
---|
116 | * 8-byte boundary. |
---|
117 | */ |
---|
118 | and STACK_POINTER_ADJUST, sp, #0x4 |
---|
119 | sub sp, sp, STACK_POINTER_ADJUST |
---|
120 | |
---|
121 | /* Get per-CPU control of current processor */ |
---|
122 | GET_SELF_CPU_CONTROL r0 |
---|
123 | |
---|
124 | #ifdef ARM_MULTILIB_VFP |
---|
125 | /* Save VFP context */ |
---|
126 | vmrs r2, FPSCR |
---|
127 | vpush {d0-d7} |
---|
128 | #ifdef ARM_MULTILIB_VFP_D32 |
---|
129 | vpush {d16-d31} |
---|
130 | #endif |
---|
131 | push {r2, r3} |
---|
132 | #endif /* ARM_MULTILIB_VFP */ |
---|
133 | |
---|
134 | #ifndef ARM_MULTILIB_HAS_STORE_RETURN_STATE |
---|
135 | /* Remember INT stack pointer */ |
---|
136 | mov r1, EXCHANGE_INT_SP |
---|
137 | |
---|
138 | /* Restore exchange registers from exchange area */ |
---|
139 | ldmia r1, EXCHANGE_LIST |
---|
140 | #endif /* ARM_MULTILIB_HAS_STORE_RETURN_STATE */ |
---|
141 | |
---|
142 | /* Get interrupt nest level */ |
---|
143 | ldr r2, [r0, #PER_CPU_ISR_NEST_LEVEL] |
---|
144 | |
---|
145 | /* Switch stack if necessary and save original stack pointer */ |
---|
146 | mov NON_VOLATILE_SCRATCH, sp |
---|
147 | cmp r2, #0 |
---|
148 | #ifdef ARM_MULTILIB_HAS_STORE_RETURN_STATE |
---|
149 | ldreq sp, [r0, #PER_CPU_INTERRUPT_STACK_HIGH] |
---|
150 | #else |
---|
151 | moveq sp, r1 |
---|
152 | #endif |
---|
153 | |
---|
154 | /* Increment interrupt nest and thread dispatch disable level */ |
---|
155 | ldr r3, [r0, #PER_CPU_THREAD_DISPATCH_DISABLE_LEVEL] |
---|
156 | add r2, r2, #1 |
---|
157 | add r3, r3, #1 |
---|
158 | str r2, [r0, #PER_CPU_ISR_NEST_LEVEL] |
---|
159 | str r3, [r0, #PER_CPU_THREAD_DISPATCH_DISABLE_LEVEL] |
---|
160 | |
---|
161 | /* Call BSP dependent interrupt dispatcher */ |
---|
162 | #ifdef RTEMS_PROFILING |
---|
163 | cmp r2, #1 |
---|
164 | bne .Lskip_profiling |
---|
165 | BLX_TO_THUMB_1 _CPU_Counter_read |
---|
166 | push {r0, r1} |
---|
167 | GET_SELF_CPU_CONTROL r0 |
---|
168 | BLX_TO_THUMB_1 bsp_interrupt_dispatch |
---|
169 | BLX_TO_THUMB_1 _CPU_Counter_read |
---|
170 | pop {r1, r3} |
---|
171 | mov r2, r0 |
---|
172 | GET_SELF_CPU_CONTROL r0 |
---|
173 | BLX_TO_THUMB_1 _Profiling_Outer_most_interrupt_entry_and_exit |
---|
174 | .Lprofiling_done: |
---|
175 | #else |
---|
176 | BLX_TO_THUMB_1 bsp_interrupt_dispatch |
---|
177 | #endif |
---|
178 | |
---|
179 | /* Get per-CPU control of current processor */ |
---|
180 | GET_SELF_CPU_CONTROL r0 |
---|
181 | |
---|
182 | /* Load some per-CPU variables */ |
---|
183 | ldr r12, [r0, #PER_CPU_THREAD_DISPATCH_DISABLE_LEVEL] |
---|
184 | ldrb r1, [r0, #PER_CPU_DISPATCH_NEEDED] |
---|
185 | ldr r2, [r0, #PER_CPU_ISR_DISPATCH_DISABLE] |
---|
186 | ldr r3, [r0, #PER_CPU_ISR_NEST_LEVEL] |
---|
187 | |
---|
188 | /* Restore stack pointer */ |
---|
189 | mov sp, NON_VOLATILE_SCRATCH |
---|
190 | |
---|
191 | /* Decrement levels and determine thread dispatch state */ |
---|
192 | eor r1, r1, r12 |
---|
193 | sub r12, r12, #1 |
---|
194 | orr r1, r1, r12 |
---|
195 | orr r1, r1, r2 |
---|
196 | sub r3, r3, #1 |
---|
197 | |
---|
198 | /* Store thread dispatch disable and ISR nest levels */ |
---|
199 | str r12, [r0, #PER_CPU_THREAD_DISPATCH_DISABLE_LEVEL] |
---|
200 | str r3, [r0, #PER_CPU_ISR_NEST_LEVEL] |
---|
201 | |
---|
202 | /* |
---|
203 | * Check thread dispatch necessary, ISR dispatch disable and thread |
---|
204 | * dispatch disable level. |
---|
205 | */ |
---|
206 | cmp r1, #0 |
---|
207 | bne .Lthread_dispatch_done |
---|
208 | |
---|
209 | /* Save CPSR in non-volatile register */ |
---|
210 | mrs NON_VOLATILE_SCRATCH, CPSR |
---|
211 | |
---|
212 | /* Thread dispatch */ |
---|
213 | |
---|
214 | .Ldo_thread_dispatch: |
---|
215 | |
---|
216 | /* Set ISR dispatch disable and thread dispatch disable level to one */ |
---|
217 | mov r12, #1 |
---|
218 | str r12, [r0, #PER_CPU_ISR_DISPATCH_DISABLE] |
---|
219 | str r12, [r0, #PER_CPU_THREAD_DISPATCH_DISABLE_LEVEL] |
---|
220 | |
---|
221 | /* Call _Thread_Do_dispatch(), this function will enable interrupts */ |
---|
222 | bic r1, NON_VOLATILE_SCRATCH, #0x80 |
---|
223 | BLX_TO_THUMB_1 _Thread_Do_dispatch |
---|
224 | |
---|
225 | /* Disable interrupts */ |
---|
226 | msr CPSR, NON_VOLATILE_SCRATCH |
---|
227 | |
---|
228 | /* |
---|
229 | * Get per-CPU control of current processor. In SMP configurations, we |
---|
230 | * may run on another processor after the _Thread_Do_dispatch() call. |
---|
231 | */ |
---|
232 | GET_SELF_CPU_CONTROL r0 |
---|
233 | |
---|
234 | /* Check if we have to do the thread dispatch again */ |
---|
235 | ldrb r12, [r0, #PER_CPU_DISPATCH_NEEDED] |
---|
236 | cmp r12, #0 |
---|
237 | bne .Ldo_thread_dispatch |
---|
238 | |
---|
239 | /* We are done with thread dispatching */ |
---|
240 | mov r12, #0 |
---|
241 | str r12, [r0, #PER_CPU_ISR_DISPATCH_DISABLE] |
---|
242 | |
---|
243 | .Lthread_dispatch_done: |
---|
244 | |
---|
245 | #ifdef ARM_MULTILIB_VFP |
---|
246 | /* Restore VFP context */ |
---|
247 | pop {r2, r3} |
---|
248 | #ifdef ARM_MULTILIB_VFP_D32 |
---|
249 | vpop {d16-d31} |
---|
250 | #endif |
---|
251 | vpop {d0-d7} |
---|
252 | vmsr FPSCR, r2 |
---|
253 | #endif /* ARM_MULTILIB_VFP */ |
---|
254 | |
---|
255 | /* Undo stack pointer adjustment */ |
---|
256 | add sp, sp, STACK_POINTER_ADJUST |
---|
257 | |
---|
258 | #ifdef ARM_MULTILIB_HAS_STORE_RETURN_STATE |
---|
259 | /* |
---|
260 | * Restore the volatile registers, two non-volatile registers used for |
---|
261 | * interrupt processing, and the link register. |
---|
262 | */ |
---|
263 | pop {r0-r3, STACK_POINTER_ADJUST, NON_VOLATILE_SCRATCH, r12, lr} |
---|
264 | #else /* ARM_MULTILIB_HAS_STORE_RETURN_STATE */ |
---|
265 | /* Restore STACK_POINTER_ADJUST register and link register */ |
---|
266 | pop {STACK_POINTER_ADJUST, lr} |
---|
267 | |
---|
268 | /* |
---|
269 | * XXX: Remember and restore stack pointer. The data on the stack is |
---|
270 | * still in use. So the stack is now in an inconsistent state. The |
---|
271 | * FIQ handler implementation must not use this area. |
---|
272 | */ |
---|
273 | mov r12, sp |
---|
274 | add sp, #CONTEXT_SIZE |
---|
275 | |
---|
276 | /* Get INT mode program status register */ |
---|
277 | mrs r1, CPSR |
---|
278 | bic r1, r1, #0x1 |
---|
279 | |
---|
280 | /* Switch to INT mode */ |
---|
281 | msr CPSR_c, r1 |
---|
282 | |
---|
283 | /* Save EXCHANGE_LR and EXCHANGE_SPSR registers to exchange area */ |
---|
284 | push {EXCHANGE_LR, EXCHANGE_SPSR} |
---|
285 | |
---|
286 | /* Restore context */ |
---|
287 | ldmia r12, CONTEXT_LIST |
---|
288 | |
---|
289 | /* Set return address and program status */ |
---|
290 | mov lr, EXCHANGE_LR |
---|
291 | msr SPSR_fsxc, EXCHANGE_SPSR |
---|
292 | |
---|
293 | /* Restore EXCHANGE_LR and EXCHANGE_SPSR registers from exchange area */ |
---|
294 | pop {EXCHANGE_LR, EXCHANGE_SPSR} |
---|
295 | #endif /* ARM_MULTILIB_HAS_STORE_RETURN_STATE */ |
---|
296 | |
---|
297 | #ifdef ARM_MULTILIB_HAS_LOAD_STORE_EXCLUSIVE |
---|
298 | /* |
---|
299 | * We must clear reservations here, since otherwise compare-and-swap |
---|
300 | * atomic operations with interrupts enabled may yield wrong results. |
---|
301 | * A compare-and-swap atomic operation is generated by the compiler |
---|
302 | * like this: |
---|
303 | * |
---|
304 | * .L1: |
---|
305 | * ldrex r1, [r0] |
---|
306 | * cmp r1, r3 |
---|
307 | * bne .L2 |
---|
308 | * strex r3, r2, [r0] |
---|
309 | * cmp r3, #0 |
---|
310 | * bne .L1 |
---|
311 | * .L2: |
---|
312 | * |
---|
313 | * Consider the following scenario. A thread is interrupted right |
---|
314 | * before the strex. The interrupt updates the value using a |
---|
315 | * compare-and-swap sequence. Everything is fine up to this point. |
---|
316 | * The interrupt performs now a compare-and-swap sequence which fails |
---|
317 | * with a branch to .L2. The current processor has now a reservation. |
---|
318 | * The interrupt returns without further strex. The thread updates the |
---|
319 | * value using the unrelated reservation of the interrupt. |
---|
320 | */ |
---|
321 | clrex |
---|
322 | #endif |
---|
323 | |
---|
324 | /* Return from interrupt */ |
---|
325 | #ifdef ARM_MULTILIB_HAS_STORE_RETURN_STATE |
---|
326 | rfefd sp! |
---|
327 | #else |
---|
328 | subs pc, lr, #4 |
---|
329 | #endif |
---|
330 | |
---|
331 | #ifdef RTEMS_PROFILING |
---|
332 | .Lskip_profiling: |
---|
333 | BLX_TO_THUMB_1 bsp_interrupt_dispatch |
---|
334 | b .Lprofiling_done |
---|
335 | #endif |
---|
336 | |
---|
337 | #endif /* ARM_MULTILIB_ARCH_V4 */ |
---|