source: rtems/cpukit/score/cpu/powerpc/include/rtems/score/cpuimpl.h @ 03e4d1e9

Last change on this file since 03e4d1e9 was 03e4d1e9, checked in by Sebastian Huber <sebastian.huber@…>, on 06/29/22 at 12:37:47

score: Add _CPU_Use_thread_local_storage()

At some point during system initialization, the idle threads are created.
Afterwards, the boot processor basically executes within the context of an idle
thread with thread dispatching disabled. On some architectures, the
thread-local storage area of the associated thread must be set in dedicated
processor registers. Add the new CPU port function to do this:

void _CPU_Use_thread_local_storage( const Context_Control *context )

Close #4672.

  • Property mode set to 100644
File size: 9.4 KB
Line 
1/* SPDX-License-Identifier: BSD-2-Clause */
2
3/**
4 * @file
5 *
6 * @brief CPU Port Implementation API
7 */
8
9/*
10 * Copyright (C) 1999 Eric Valette (valette@crf.canon.fr)
11 *                    Canon Centre Recherche France.
12 *
13 * Copyright (C) 2007 Till Straumann <strauman@slac.stanford.edu>
14 *
15 * Copyright (c) 2009, 2017 embedded brains GmbH
16 *
17 * Redistribution and use in source and binary forms, with or without
18 * modification, are permitted provided that the following conditions
19 * are met:
20 * 1. Redistributions of source code must retain the above copyright
21 *    notice, this list of conditions and the following disclaimer.
22 * 2. Redistributions in binary form must reproduce the above copyright
23 *    notice, this list of conditions and the following disclaimer in the
24 *    documentation and/or other materials provided with the distribution.
25 *
26 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
27 * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
28 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
29 * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
30 * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
31 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
32 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
33 * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
34 * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
35 * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
36 * POSSIBILITY OF SUCH DAMAGE.
37 */
38
39#ifndef _RTEMS_SCORE_CPUIMPL_H
40#define _RTEMS_SCORE_CPUIMPL_H
41
42#include <rtems/score/cpu.h>
43
44/**
45 * @defgroup RTEMSScoreCPUPowerPC PowerPC
46 *
47 * @ingroup RTEMSScoreCPU
48 *
49 * @brief PowerPC Architecture Support
50 *
51 * @{
52 */
53
54/* Exception stack frame -> BSP_Exception_frame */
55#ifdef __powerpc64__
56  #define FRAME_LINK_SPACE 32
57#else
58  #define FRAME_LINK_SPACE 8
59#endif
60
61#define SRR0_FRAME_OFFSET FRAME_LINK_SPACE
62#define SRR1_FRAME_OFFSET (SRR0_FRAME_OFFSET + PPC_REG_SIZE)
63#define EXCEPTION_NUMBER_OFFSET (SRR1_FRAME_OFFSET + PPC_REG_SIZE)
64#define PPC_EXC_INTERRUPT_ENTRY_INSTANT_OFFSET (EXCEPTION_NUMBER_OFFSET + 4)
65#define EXC_CR_OFFSET (EXCEPTION_NUMBER_OFFSET + 8)
66#define EXC_XER_OFFSET (EXC_CR_OFFSET + 4)
67#define EXC_CTR_OFFSET (EXC_XER_OFFSET + 4)
68#define EXC_LR_OFFSET (EXC_CTR_OFFSET + PPC_REG_SIZE)
69#define PPC_EXC_INTERRUPT_FRAME_OFFSET (EXC_LR_OFFSET + PPC_REG_SIZE)
70
71#ifndef __SPE__
72  #define PPC_EXC_GPR_OFFSET(gpr) \
73    ((gpr) * PPC_GPR_SIZE + PPC_EXC_INTERRUPT_FRAME_OFFSET + PPC_REG_SIZE)
74  #define PPC_EXC_GPR3_PROLOGUE_OFFSET PPC_EXC_GPR_OFFSET(3)
75  #if defined(PPC_MULTILIB_ALTIVEC) && defined(PPC_MULTILIB_FPU)
76    #define PPC_EXC_VRSAVE_OFFSET PPC_EXC_GPR_OFFSET(33)
77    #define PPC_EXC_VSCR_OFFSET (PPC_EXC_VRSAVE_OFFSET + 28)
78    #define PPC_EXC_VR_OFFSET(v) ((v) * 16 + PPC_EXC_VSCR_OFFSET + 4)
79    #define PPC_EXC_FR_OFFSET(f) ((f) * 8 + PPC_EXC_VR_OFFSET(32))
80    #define PPC_EXC_FPSCR_OFFSET PPC_EXC_FR_OFFSET(32)
81    #define PPC_EXC_FRAME_SIZE PPC_EXC_FR_OFFSET(34)
82    #define PPC_EXC_MIN_VSCR_OFFSET (PPC_EXC_GPR_OFFSET(13) + 12)
83    #define PPC_EXC_MIN_VR_OFFSET(v) ((v) * 16 + PPC_EXC_MIN_VSCR_OFFSET + 4)
84    #define PPC_EXC_MIN_FR_OFFSET(f) ((f) * 8 + PPC_EXC_MIN_VR_OFFSET(20))
85    #define PPC_EXC_MIN_FPSCR_OFFSET PPC_EXC_MIN_FR_OFFSET(14)
86    #define CPU_INTERRUPT_FRAME_SIZE \
87      (PPC_EXC_MIN_FR_OFFSET(16) + PPC_STACK_RED_ZONE_SIZE)
88  #elif defined(PPC_MULTILIB_ALTIVEC)
89    #define PPC_EXC_VRSAVE_OFFSET PPC_EXC_GPR_OFFSET(33)
90    #define PPC_EXC_VSCR_OFFSET (PPC_EXC_VRSAVE_OFFSET + 28)
91    #define PPC_EXC_VR_OFFSET(v) ((v) * 16 + PPC_EXC_VSCR_OFFSET + 4)
92    #define PPC_EXC_FRAME_SIZE PPC_EXC_VR_OFFSET(32)
93    #define PPC_EXC_MIN_VSCR_OFFSET (PPC_EXC_GPR_OFFSET(13) + 12)
94    #define PPC_EXC_MIN_VR_OFFSET(v) ((v) * 16 + PPC_EXC_MIN_VSCR_OFFSET + 4)
95    #define CPU_INTERRUPT_FRAME_SIZE \
96      (PPC_EXC_MIN_VR_OFFSET(20) + PPC_STACK_RED_ZONE_SIZE)
97  #elif defined(PPC_MULTILIB_FPU)
98    #define PPC_EXC_FR_OFFSET(f) ((f) * 8 + PPC_EXC_GPR_OFFSET(33))
99    #define PPC_EXC_FPSCR_OFFSET PPC_EXC_FR_OFFSET(32)
100    #define PPC_EXC_FRAME_SIZE PPC_EXC_FR_OFFSET(34)
101    #define PPC_EXC_MIN_FR_OFFSET(f) ((f) * 8 + PPC_EXC_GPR_OFFSET(13))
102    #define PPC_EXC_MIN_FPSCR_OFFSET PPC_EXC_MIN_FR_OFFSET(14)
103    #define CPU_INTERRUPT_FRAME_SIZE \
104      (PPC_EXC_MIN_FR_OFFSET(16) + PPC_STACK_RED_ZONE_SIZE)
105  #else
106    #define PPC_EXC_FRAME_SIZE PPC_EXC_GPR_OFFSET(33)
107    #define CPU_INTERRUPT_FRAME_SIZE \
108      (PPC_EXC_GPR_OFFSET(13) + PPC_STACK_RED_ZONE_SIZE)
109  #endif
110#else
111  #define PPC_EXC_SPEFSCR_OFFSET 44
112  #define PPC_EXC_ACC_OFFSET 48
113  #define PPC_EXC_GPR_OFFSET(gpr) ((gpr) * PPC_GPR_SIZE + 56)
114  #define PPC_EXC_GPR3_PROLOGUE_OFFSET (PPC_EXC_GPR_OFFSET(3) + 4)
115  #define CPU_INTERRUPT_FRAME_SIZE (160 + PPC_STACK_RED_ZONE_SIZE)
116  #define PPC_EXC_FRAME_SIZE 320
117#endif
118
119#define GPR0_OFFSET PPC_EXC_GPR_OFFSET(0)
120#define GPR1_OFFSET PPC_EXC_GPR_OFFSET(1)
121#define GPR2_OFFSET PPC_EXC_GPR_OFFSET(2)
122#define GPR3_OFFSET PPC_EXC_GPR_OFFSET(3)
123#define GPR4_OFFSET PPC_EXC_GPR_OFFSET(4)
124#define GPR5_OFFSET PPC_EXC_GPR_OFFSET(5)
125#define GPR6_OFFSET PPC_EXC_GPR_OFFSET(6)
126#define GPR7_OFFSET PPC_EXC_GPR_OFFSET(7)
127#define GPR8_OFFSET PPC_EXC_GPR_OFFSET(8)
128#define GPR9_OFFSET PPC_EXC_GPR_OFFSET(9)
129#define GPR10_OFFSET PPC_EXC_GPR_OFFSET(10)
130#define GPR11_OFFSET PPC_EXC_GPR_OFFSET(11)
131#define GPR12_OFFSET PPC_EXC_GPR_OFFSET(12)
132#define GPR13_OFFSET PPC_EXC_GPR_OFFSET(13)
133#define GPR14_OFFSET PPC_EXC_GPR_OFFSET(14)
134#define GPR15_OFFSET PPC_EXC_GPR_OFFSET(15)
135#define GPR16_OFFSET PPC_EXC_GPR_OFFSET(16)
136#define GPR17_OFFSET PPC_EXC_GPR_OFFSET(17)
137#define GPR18_OFFSET PPC_EXC_GPR_OFFSET(18)
138#define GPR19_OFFSET PPC_EXC_GPR_OFFSET(19)
139#define GPR20_OFFSET PPC_EXC_GPR_OFFSET(20)
140#define GPR21_OFFSET PPC_EXC_GPR_OFFSET(21)
141#define GPR22_OFFSET PPC_EXC_GPR_OFFSET(22)
142#define GPR23_OFFSET PPC_EXC_GPR_OFFSET(23)
143#define GPR24_OFFSET PPC_EXC_GPR_OFFSET(24)
144#define GPR25_OFFSET PPC_EXC_GPR_OFFSET(25)
145#define GPR26_OFFSET PPC_EXC_GPR_OFFSET(26)
146#define GPR27_OFFSET PPC_EXC_GPR_OFFSET(27)
147#define GPR28_OFFSET PPC_EXC_GPR_OFFSET(28)
148#define GPR29_OFFSET PPC_EXC_GPR_OFFSET(29)
149#define GPR30_OFFSET PPC_EXC_GPR_OFFSET(30)
150#define GPR31_OFFSET PPC_EXC_GPR_OFFSET(31)
151
152#define CPU_PER_CPU_CONTROL_SIZE 0
153
154#ifdef RTEMS_SMP
155
156/* Use SPRG0 for the per-CPU control of the current processor */
157#define PPC_PER_CPU_CONTROL_REGISTER 272
158
159#endif /* RTEMS_SMP */
160
161#ifndef ASM
162
163#ifdef __cplusplus
164extern "C" {
165#endif
166
167typedef struct {
168  uintptr_t FRAME_SP;
169  #ifdef __powerpc64__
170    uint32_t FRAME_CR;
171    uint32_t FRAME_RESERVED;
172  #endif
173  uintptr_t FRAME_LR;
174  #ifdef __powerpc64__
175    uintptr_t FRAME_TOC;
176  #endif
177  uintptr_t EXC_SRR0;
178  uintptr_t EXC_SRR1;
179  uint32_t RESERVED_FOR_ALIGNMENT_0;
180  uint32_t EXC_INTERRUPT_ENTRY_INSTANT;
181  uint32_t EXC_CR;
182  uint32_t EXC_XER;
183  uintptr_t EXC_CTR;
184  uintptr_t EXC_LR;
185  uintptr_t EXC_INTERRUPT_FRAME;
186  #ifdef __SPE__
187    uint32_t EXC_SPEFSCR;
188    uint64_t EXC_ACC;
189  #endif
190  PPC_GPR_TYPE GPR0;
191  PPC_GPR_TYPE GPR1;
192  PPC_GPR_TYPE GPR2;
193  PPC_GPR_TYPE GPR3;
194  PPC_GPR_TYPE GPR4;
195  PPC_GPR_TYPE GPR5;
196  PPC_GPR_TYPE GPR6;
197  PPC_GPR_TYPE GPR7;
198  PPC_GPR_TYPE GPR8;
199  PPC_GPR_TYPE GPR9;
200  PPC_GPR_TYPE GPR10;
201  PPC_GPR_TYPE GPR11;
202  PPC_GPR_TYPE GPR12;
203  #ifdef PPC_MULTILIB_ALTIVEC
204    /* This field must take stvewx/lvewx requirements into account */
205    uint32_t RESERVED_FOR_ALIGNMENT_3[3];
206    uint32_t VSCR;
207
208    uint8_t V0[16];
209    uint8_t V1[16];
210    uint8_t V2[16];
211    uint8_t V3[16];
212    uint8_t V4[16];
213    uint8_t V5[16];
214    uint8_t V6[16];
215    uint8_t V7[16];
216    uint8_t V8[16];
217    uint8_t V9[16];
218    uint8_t V10[16];
219    uint8_t V11[16];
220    uint8_t V12[16];
221    uint8_t V13[16];
222    uint8_t V14[16];
223    uint8_t V15[16];
224    uint8_t V16[16];
225    uint8_t V17[16];
226    uint8_t V18[16];
227    uint8_t V19[16];
228  #endif
229  #ifdef PPC_MULTILIB_FPU
230    double F0;
231    double F1;
232    double F2;
233    double F3;
234    double F4;
235    double F5;
236    double F6;
237    double F7;
238    double F8;
239    double F9;
240    double F10;
241    double F11;
242    double F12;
243    double F13;
244    uint64_t FPSCR;
245    uint64_t RESERVED_FOR_ALIGNMENT_4;
246  #endif
247  #if PPC_STACK_RED_ZONE_SIZE > 0
248    uint8_t RED_ZONE[ PPC_STACK_RED_ZONE_SIZE ];
249  #endif
250} CPU_Interrupt_frame;
251
252#ifdef RTEMS_SMP
253
254static inline struct Per_CPU_Control *_PPC_Get_current_per_CPU_control( void )
255{
256  struct Per_CPU_Control *cpu_self;
257
258  __asm__ volatile (
259    "mfspr %0, " RTEMS_XSTRING( PPC_PER_CPU_CONTROL_REGISTER )
260    : "=r" ( cpu_self )
261  );
262
263  return cpu_self;
264}
265
266#define _CPU_Get_current_per_CPU_control() _PPC_Get_current_per_CPU_control()
267
268#endif /* RTEMS_SMP */
269
270RTEMS_NO_RETURN void _CPU_Fatal_halt( uint32_t source, CPU_Uint32ptr error );
271
272void _CPU_Context_volatile_clobber( uintptr_t pattern );
273
274void _CPU_Context_validate( uintptr_t pattern );
275
276RTEMS_INLINE_ROUTINE void _CPU_Instruction_illegal( void )
277{
278  __asm__ volatile ( ".long 0" );
279}
280
281RTEMS_INLINE_ROUTINE void _CPU_Instruction_no_operation( void )
282{
283  __asm__ volatile ( "nop" );
284}
285
286RTEMS_INLINE_ROUTINE void _CPU_Use_thread_local_storage(
287  const Context_Control *context
288)
289{
290#ifdef __powerpc64__
291   register uintptr_t tp __asm__( "13" );
292#else
293   register uintptr_t tp __asm__( "2" );
294#endif
295
296   tp = ppc_get_context( context )->tp;
297
298   /* Make sure that the register assignment is not optimized away */
299   __asm__ volatile ( "" : : "r" ( tp ) );
300}
301
302#ifdef __cplusplus
303}
304#endif
305
306#endif /* ASM */
307
308/** @} */
309
310#endif /* _RTEMS_SCORE_CPUIMPL_H */
Note: See TracBrowser for help on using the repository browser.