1 | /* SPDX-License-Identifier: BSD-2-Clause */ |
---|
2 | |
---|
3 | /** |
---|
4 | * @file |
---|
5 | * |
---|
6 | * @ingroup RTEMSScoreCPUARM |
---|
7 | * |
---|
8 | * @brief ARM architecture support implementation. |
---|
9 | */ |
---|
10 | |
---|
11 | /* |
---|
12 | * This file contains all assembly code for the ARM implementation |
---|
13 | * of RTEMS. |
---|
14 | * |
---|
15 | * Copyright (c) 2007 by Ray Xu, <Rayx.cn@gmail.com> |
---|
16 | * Thumb support added. |
---|
17 | * |
---|
18 | * Copyright (c) 2002 by Advent Networks, Inc. |
---|
19 | * Jay Monkman <jmonkman@adventnetworks.com> |
---|
20 | * |
---|
21 | * COPYRIGHT (c) 2000 Canon Research Centre France SA. |
---|
22 | * Emmanuel Raguet, mailto:raguet@crf.canon.fr |
---|
23 | * |
---|
24 | * Copyright (C) 2013, 2017 embedded brains GmbH & Co. KG |
---|
25 | * |
---|
26 | * Redistribution and use in source and binary forms, with or without |
---|
27 | * modification, are permitted provided that the following conditions |
---|
28 | * are met: |
---|
29 | * 1. Redistributions of source code must retain the above copyright |
---|
30 | * notice, this list of conditions and the following disclaimer. |
---|
31 | * 2. Redistributions in binary form must reproduce the above copyright |
---|
32 | * notice, this list of conditions and the following disclaimer in the |
---|
33 | * documentation and/or other materials provided with the distribution. |
---|
34 | * |
---|
35 | * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" |
---|
36 | * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE |
---|
37 | * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE |
---|
38 | * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE |
---|
39 | * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR |
---|
40 | * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF |
---|
41 | * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS |
---|
42 | * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN |
---|
43 | * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) |
---|
44 | * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE |
---|
45 | * POSSIBILITY OF SUCH DAMAGE. |
---|
46 | * |
---|
47 | */ |
---|
48 | |
---|
49 | #ifdef HAVE_CONFIG_H |
---|
50 | #include "config.h" |
---|
51 | #endif |
---|
52 | |
---|
53 | #include <rtems/asm.h> |
---|
54 | |
---|
55 | #ifdef ARM_MULTILIB_ARCH_V4 |
---|
56 | |
---|
57 | .text |
---|
58 | |
---|
59 | /* |
---|
60 | * void _CPU_Context_switch( run_context, heir_context ) |
---|
61 | * void _CPU_Context_restore( run_context, heir_context ) |
---|
62 | * |
---|
63 | * This routine performs a normal non-FP context. |
---|
64 | * |
---|
65 | * R0 = run_context R1 = heir_context |
---|
66 | * |
---|
67 | * This function copies the current registers to where r0 points, then |
---|
68 | * restores the ones from where r1 points. |
---|
69 | * |
---|
70 | * Using the ldm/stm opcodes save 2-3 us on 100 MHz ARM9TDMI with |
---|
71 | * a 16 bit data bus. |
---|
72 | * |
---|
73 | */ |
---|
74 | |
---|
75 | DEFINE_FUNCTION_ARM(_CPU_Context_switch) |
---|
76 | .globl _CPU_Context_switch_no_return |
---|
77 | .set _CPU_Context_switch_no_return, _CPU_Context_switch |
---|
78 | |
---|
79 | /* Start saving context */ |
---|
80 | GET_SELF_CPU_CONTROL r2 |
---|
81 | ldr r3, [r2, #PER_CPU_ISR_DISPATCH_DISABLE] |
---|
82 | stm r0, {r4, r5, r6, r7, r8, r9, r10, r11, r13, r14} |
---|
83 | |
---|
84 | #ifdef ARM_MULTILIB_VFP |
---|
85 | add r5, r0, #ARM_CONTEXT_CONTROL_D8_OFFSET |
---|
86 | vstm r5, {d8-d15} |
---|
87 | #endif |
---|
88 | |
---|
89 | str r3, [r0, #ARM_CONTEXT_CONTROL_ISR_DISPATCH_DISABLE] |
---|
90 | |
---|
91 | #ifdef RTEMS_SMP |
---|
92 | /* |
---|
93 | * The executing thread no longer executes on this processor. Switch |
---|
94 | * the stack to the temporary interrupt stack of this processor. Mark |
---|
95 | * the context of the executing thread as not executing. |
---|
96 | */ |
---|
97 | dmb |
---|
98 | add sp, r2, #(PER_CPU_INTERRUPT_FRAME_AREA + CPU_INTERRUPT_FRAME_SIZE) |
---|
99 | mov r3, #0 |
---|
100 | strb r3, [r0, #ARM_CONTEXT_CONTROL_IS_EXECUTING_OFFSET] |
---|
101 | |
---|
102 | .L_check_is_executing: |
---|
103 | |
---|
104 | /* Check the is executing indicator of the heir context */ |
---|
105 | add r3, r1, #ARM_CONTEXT_CONTROL_IS_EXECUTING_OFFSET |
---|
106 | ldrexb r4, [r3] |
---|
107 | cmp r4, #0 |
---|
108 | bne .L_get_potential_new_heir |
---|
109 | |
---|
110 | /* Try to update the is executing indicator of the heir context */ |
---|
111 | mov r4, #1 |
---|
112 | strexb r5, r4, [r3] |
---|
113 | cmp r5, #0 |
---|
114 | bne .L_get_potential_new_heir |
---|
115 | dmb |
---|
116 | #endif |
---|
117 | |
---|
118 | /* Start restoring context */ |
---|
119 | .L_restore: |
---|
120 | #if !defined(RTEMS_SMP) && defined(ARM_MULTILIB_HAS_LOAD_STORE_EXCLUSIVE) |
---|
121 | clrex |
---|
122 | #endif |
---|
123 | |
---|
124 | #ifdef ARM_MULTILIB_HAS_THREAD_ID_REGISTER |
---|
125 | ldr r3, [r1, #ARM_CONTEXT_CONTROL_THREAD_ID_OFFSET] |
---|
126 | #endif |
---|
127 | |
---|
128 | ldr r4, [r1, #ARM_CONTEXT_CONTROL_ISR_DISPATCH_DISABLE] |
---|
129 | |
---|
130 | #ifdef ARM_MULTILIB_VFP |
---|
131 | add r5, r1, #ARM_CONTEXT_CONTROL_D8_OFFSET |
---|
132 | vldm r5, {d8-d15} |
---|
133 | #endif |
---|
134 | |
---|
135 | #ifdef ARM_MULTILIB_HAS_THREAD_ID_REGISTER |
---|
136 | mcr p15, 0, r3, c13, c0, 3 |
---|
137 | #endif |
---|
138 | |
---|
139 | str r4, [r2, #PER_CPU_ISR_DISPATCH_DISABLE] |
---|
140 | |
---|
141 | /* In ARMv5T and above the load of PC is an interworking branch */ |
---|
142 | #if __ARM_ARCH >= 5 |
---|
143 | ldm r1, {r4, r5, r6, r7, r8, r9, r10, r11, r13, pc} |
---|
144 | #else |
---|
145 | ldm r1, {r4, r5, r6, r7, r8, r9, r10, r11, r13, r14} |
---|
146 | bx lr |
---|
147 | #endif |
---|
148 | |
---|
149 | /* |
---|
150 | * void _CPU_Context_restore( new_context ) |
---|
151 | * |
---|
152 | * This function copies the restores the registers from where r0 points. |
---|
153 | * It must match _CPU_Context_switch() |
---|
154 | * |
---|
155 | */ |
---|
156 | DEFINE_FUNCTION_ARM(_CPU_Context_restore) |
---|
157 | mov r1, r0 |
---|
158 | GET_SELF_CPU_CONTROL r2 |
---|
159 | b .L_restore |
---|
160 | |
---|
161 | #ifdef RTEMS_SMP |
---|
162 | .L_get_potential_new_heir: |
---|
163 | |
---|
164 | /* We may have a new heir */ |
---|
165 | |
---|
166 | /* Read the executing and heir */ |
---|
167 | ldr r4, [r2, #PER_CPU_OFFSET_EXECUTING] |
---|
168 | ldr r5, [r2, #PER_CPU_OFFSET_HEIR] |
---|
169 | |
---|
170 | /* |
---|
171 | * Update the executing only if necessary to avoid cache line |
---|
172 | * monopolization. |
---|
173 | */ |
---|
174 | cmp r4, r5 |
---|
175 | beq .L_check_is_executing |
---|
176 | |
---|
177 | /* Calculate the heir context pointer */ |
---|
178 | sub r4, r1, r4 |
---|
179 | add r1, r5, r4 |
---|
180 | |
---|
181 | /* Update the executing */ |
---|
182 | str r5, [r2, #PER_CPU_OFFSET_EXECUTING] |
---|
183 | |
---|
184 | b .L_check_is_executing |
---|
185 | |
---|
186 | DEFINE_FUNCTION_ARM(_ARM_Start_multitasking) |
---|
187 | mov r1, r0 |
---|
188 | GET_SELF_CPU_CONTROL r2 |
---|
189 | |
---|
190 | /* Switch the stack to the temporary interrupt stack of this processor */ |
---|
191 | add sp, r2, #(PER_CPU_INTERRUPT_FRAME_AREA + CPU_INTERRUPT_FRAME_SIZE) |
---|
192 | |
---|
193 | /* Enable IRQ interrupts */ |
---|
194 | cpsie i |
---|
195 | |
---|
196 | b .L_check_is_executing |
---|
197 | #endif |
---|
198 | |
---|
199 | #endif /* ARM_MULTILIB_ARCH_V4 */ |
---|