Changeset 1869bb7 in rtems
- Timestamp:
- 05/18/12 13:47:23 (11 years ago)
- Branches:
- 4.11, 5, master
- Children:
- 105ccdd5
- Parents:
- 721fe34
- git-author:
- Sebastian Huber <sebastian.huber@…> (05/18/12 13:47:23)
- git-committer:
- Sebastian Huber <sebastian.huber@…> (06/04/12 07:54:31)
- Files:
-
- 8 edited
Legend:
- Unmodified
- Added
- Removed
-
c/src/lib/libcpu/powerpc/mpc6xx/altivec/vec_sup.c
r721fe34 r1869bb7 234 234 * Therefore, we compute it here and store it in memory... 235 235 */ 236 _CPU_altivec_ctxt_off = (uint32_t) &((Context_Control*)0)->altivec;236 _CPU_altivec_ctxt_off = offsetof(ppc_context, altivec); 237 237 /* 238 238 * Add space possibly needed for alignment -
c/src/lib/libcpu/powerpc/mpc6xx/altivec/vec_sup_asm.S
r721fe34 r1869bb7 624 624 blr 625 625 626 .global _CPU_Context_restore_altivec627 _CPU_Context_restore_altivec:628 /* Restore is like 'switch' but we don't have629 * to save an old context.630 * Move argument to second arg and load NULL pointer631 * to first one, then jump to 'switch' routine.632 */633 mr r4, r3634 li r3, 0635 b _CPU_Context_switch_altivec636 637 626 .global _CPU_Context_switch_altivec 638 627 _CPU_Context_switch_altivec: -
c/src/lib/libcpu/powerpc/new-exceptions/bspsupport/ppc_exc_async_normal.S
r721fe34 r1869bb7 79 79 80 80 /* Load ISR nest level and thread dispatch disable level */ 81 PPC_ EXC_GPR_STORE ISR_NEST_HADDR_REGISTER, ISR_NEST_HADDR_OFFSET(r1)81 PPC_GPR_STORE ISR_NEST_HADDR_REGISTER, ISR_NEST_HADDR_OFFSET(r1) 82 82 lis ISR_NEST_HADDR_REGISTER, ISR_NEST_LEVEL@ha 83 PPC_ EXC_GPR_STORE ISR_NEST_REGISTER, ISR_NEST_OFFSET(r1)83 PPC_GPR_STORE ISR_NEST_REGISTER, ISR_NEST_OFFSET(r1) 84 84 lwz ISR_NEST_REGISTER, ISR_NEST_LEVEL@l(ISR_NEST_HADDR_REGISTER) 85 PPC_ EXC_GPR_STORE DISPATCH_LEVEL_REGISTER, DISPATCH_LEVEL_OFFSET(r1)85 PPC_GPR_STORE DISPATCH_LEVEL_REGISTER, DISPATCH_LEVEL_OFFSET(r1) 86 86 lwz DISPATCH_LEVEL_REGISTER, _Thread_Dispatch_disable_level@sdarel(r13) 87 87 88 PPC_ EXC_GPR_STORE SCRATCH_0_REGISTER, SCRATCH_0_OFFSET(r1)88 PPC_GPR_STORE SCRATCH_0_REGISTER, SCRATCH_0_OFFSET(r1) 89 89 90 90 #ifdef __SPE__ … … 97 97 #endif 98 98 99 PPC_ EXC_GPR_STORE HANDLER_REGISTER, HANDLER_OFFSET(r1)99 PPC_GPR_STORE HANDLER_REGISTER, HANDLER_OFFSET(r1) 100 100 101 101 /* … … 110 110 lwzx HANDLER_REGISTER, HANDLER_REGISTER, SCRATCH_0_REGISTER 111 111 112 PPC_ EXC_GPR_STORE SCRATCH_1_REGISTER, SCRATCH_1_OFFSET(r1)113 PPC_ EXC_GPR_STORE SCRATCH_2_REGISTER, SCRATCH_2_OFFSET(r1)114 PPC_ EXC_GPR_STORE SCRATCH_3_REGISTER, SCRATCH_3_OFFSET(r1)115 PPC_ EXC_GPR_STORE SCRATCH_4_REGISTER, SCRATCH_4_OFFSET(r1)116 PPC_ EXC_GPR_STORE SCRATCH_5_REGISTER, SCRATCH_5_OFFSET(r1)112 PPC_GPR_STORE SCRATCH_1_REGISTER, SCRATCH_1_OFFSET(r1) 113 PPC_GPR_STORE SCRATCH_2_REGISTER, SCRATCH_2_OFFSET(r1) 114 PPC_GPR_STORE SCRATCH_3_REGISTER, SCRATCH_3_OFFSET(r1) 115 PPC_GPR_STORE SCRATCH_4_REGISTER, SCRATCH_4_OFFSET(r1) 116 PPC_GPR_STORE SCRATCH_5_REGISTER, SCRATCH_5_OFFSET(r1) 117 117 118 118 /* Save SRR0, SRR1, CR, CTR, XER, and LR */ … … 198 198 lwz SCRATCH_5_REGISTER, EXC_LR_OFFSET(r1) 199 199 200 PPC_ EXC_GPR_LOAD VECTOR_REGISTER, VECTOR_OFFSET(r1)201 PPC_ EXC_GPR_LOAD ISR_NEST_HADDR_REGISTER, ISR_NEST_HADDR_OFFSET(r1)202 PPC_ EXC_GPR_LOAD ISR_NEST_REGISTER, ISR_NEST_OFFSET(r1)200 PPC_GPR_LOAD VECTOR_REGISTER, VECTOR_OFFSET(r1) 201 PPC_GPR_LOAD ISR_NEST_HADDR_REGISTER, ISR_NEST_HADDR_OFFSET(r1) 202 PPC_GPR_LOAD ISR_NEST_REGISTER, ISR_NEST_OFFSET(r1) 203 203 204 204 #ifdef __SPE__ … … 206 206 mtspr FSL_EIS_SPEFSCR, DISPATCH_LEVEL_REGISTER 207 207 #endif 208 PPC_ EXC_GPR_LOAD DISPATCH_LEVEL_REGISTER, DISPATCH_LEVEL_OFFSET(r1)208 PPC_GPR_LOAD DISPATCH_LEVEL_REGISTER, DISPATCH_LEVEL_OFFSET(r1) 209 209 210 210 #ifdef __SPE__ … … 212 212 evmra HANDLER_REGISTER, HANDLER_REGISTER 213 213 #endif 214 PPC_ EXC_GPR_LOAD HANDLER_REGISTER, HANDLER_OFFSET(r1)214 PPC_GPR_LOAD HANDLER_REGISTER, HANDLER_OFFSET(r1) 215 215 216 216 /* Restore SRR0, SRR1, CR, CTR, XER, and LR */ 217 217 mtsrr0 SCRATCH_0_REGISTER 218 PPC_ EXC_GPR_LOAD SCRATCH_0_REGISTER, SCRATCH_0_OFFSET(r1)218 PPC_GPR_LOAD SCRATCH_0_REGISTER, SCRATCH_0_OFFSET(r1) 219 219 mtsrr1 SCRATCH_1_REGISTER 220 PPC_ EXC_GPR_LOAD SCRATCH_1_REGISTER, SCRATCH_1_OFFSET(r1)220 PPC_GPR_LOAD SCRATCH_1_REGISTER, SCRATCH_1_OFFSET(r1) 221 221 mtcr SCRATCH_2_REGISTER 222 PPC_ EXC_GPR_LOAD SCRATCH_2_REGISTER, SCRATCH_2_OFFSET(r1)222 PPC_GPR_LOAD SCRATCH_2_REGISTER, SCRATCH_2_OFFSET(r1) 223 223 mtctr SCRATCH_3_REGISTER 224 PPC_ EXC_GPR_LOAD SCRATCH_3_REGISTER, SCRATCH_3_OFFSET(r1)224 PPC_GPR_LOAD SCRATCH_3_REGISTER, SCRATCH_3_OFFSET(r1) 225 225 mtxer SCRATCH_4_REGISTER 226 PPC_ EXC_GPR_LOAD SCRATCH_4_REGISTER, SCRATCH_4_OFFSET(r1)226 PPC_GPR_LOAD SCRATCH_4_REGISTER, SCRATCH_4_OFFSET(r1) 227 227 mtlr SCRATCH_5_REGISTER 228 PPC_ EXC_GPR_LOAD SCRATCH_5_REGISTER, SCRATCH_5_OFFSET(r1)228 PPC_GPR_LOAD SCRATCH_5_REGISTER, SCRATCH_5_OFFSET(r1) 229 229 230 230 /* Pop stack */ -
c/src/lib/libcpu/powerpc/new-exceptions/bspsupport/vectors.h
r721fe34 r1869bb7 143 143 144 144 #ifndef __SPE__ 145 #define PPC_EXC_GPR_TYPE unsigned 146 #define PPC_EXC_GPR_SIZE 4 147 #define PPC_EXC_GPR_OFFSET(gpr) ((gpr) * PPC_EXC_GPR_SIZE + 36) 145 #define PPC_EXC_GPR_OFFSET(gpr) ((gpr) * PPC_GPR_SIZE + 36) 148 146 #define PPC_EXC_VECTOR_PROLOGUE_OFFSET PPC_EXC_GPR_OFFSET(4) 149 #define PPC_EXC_GPR_LOAD lwz150 #define PPC_EXC_GPR_STORE stw151 147 #define PPC_EXC_MINIMAL_FRAME_SIZE 96 152 148 #define PPC_EXC_FRAME_SIZE 176 153 149 #else 154 #define PPC_EXC_GPR_TYPE uint64_t155 #define PPC_EXC_GPR_SIZE 8156 150 #define PPC_EXC_SPEFSCR_OFFSET 36 157 151 #define PPC_EXC_ACC_OFFSET 40 158 #define PPC_EXC_GPR_OFFSET(gpr) ((gpr) * PPC_ EXC_GPR_SIZE + 48)152 #define PPC_EXC_GPR_OFFSET(gpr) ((gpr) * PPC_GPR_SIZE + 48) 159 153 #define PPC_EXC_VECTOR_PROLOGUE_OFFSET (PPC_EXC_GPR_OFFSET(4) + 4) 160 #define PPC_EXC_GPR_LOAD evldd161 #define PPC_EXC_GPR_STORE evstdd162 154 #define PPC_EXC_MINIMAL_FRAME_SIZE 160 163 155 #define PPC_EXC_FRAME_SIZE 320 … … 269 261 uint64_t EXC_ACC; 270 262 #endif 271 PPC_ EXC_GPR_TYPE GPR0;272 PPC_ EXC_GPR_TYPE GPR1;273 PPC_ EXC_GPR_TYPE GPR2;274 PPC_ EXC_GPR_TYPE GPR3;275 PPC_ EXC_GPR_TYPE GPR4;276 PPC_ EXC_GPR_TYPE GPR5;277 PPC_ EXC_GPR_TYPE GPR6;278 PPC_ EXC_GPR_TYPE GPR7;279 PPC_ EXC_GPR_TYPE GPR8;280 PPC_ EXC_GPR_TYPE GPR9;281 PPC_ EXC_GPR_TYPE GPR10;282 PPC_ EXC_GPR_TYPE GPR11;283 PPC_ EXC_GPR_TYPE GPR12;284 PPC_ EXC_GPR_TYPE GPR13;285 PPC_ EXC_GPR_TYPE GPR14;286 PPC_ EXC_GPR_TYPE GPR15;287 PPC_ EXC_GPR_TYPE GPR16;288 PPC_ EXC_GPR_TYPE GPR17;289 PPC_ EXC_GPR_TYPE GPR18;290 PPC_ EXC_GPR_TYPE GPR19;291 PPC_ EXC_GPR_TYPE GPR20;292 PPC_ EXC_GPR_TYPE GPR21;293 PPC_ EXC_GPR_TYPE GPR22;294 PPC_ EXC_GPR_TYPE GPR23;295 PPC_ EXC_GPR_TYPE GPR24;296 PPC_ EXC_GPR_TYPE GPR25;297 PPC_ EXC_GPR_TYPE GPR26;298 PPC_ EXC_GPR_TYPE GPR27;299 PPC_ EXC_GPR_TYPE GPR28;300 PPC_ EXC_GPR_TYPE GPR29;301 PPC_ EXC_GPR_TYPE GPR30;302 PPC_ EXC_GPR_TYPE GPR31;263 PPC_GPR_TYPE GPR0; 264 PPC_GPR_TYPE GPR1; 265 PPC_GPR_TYPE GPR2; 266 PPC_GPR_TYPE GPR3; 267 PPC_GPR_TYPE GPR4; 268 PPC_GPR_TYPE GPR5; 269 PPC_GPR_TYPE GPR6; 270 PPC_GPR_TYPE GPR7; 271 PPC_GPR_TYPE GPR8; 272 PPC_GPR_TYPE GPR9; 273 PPC_GPR_TYPE GPR10; 274 PPC_GPR_TYPE GPR11; 275 PPC_GPR_TYPE GPR12; 276 PPC_GPR_TYPE GPR13; 277 PPC_GPR_TYPE GPR14; 278 PPC_GPR_TYPE GPR15; 279 PPC_GPR_TYPE GPR16; 280 PPC_GPR_TYPE GPR17; 281 PPC_GPR_TYPE GPR18; 282 PPC_GPR_TYPE GPR19; 283 PPC_GPR_TYPE GPR20; 284 PPC_GPR_TYPE GPR21; 285 PPC_GPR_TYPE GPR22; 286 PPC_GPR_TYPE GPR23; 287 PPC_GPR_TYPE GPR24; 288 PPC_GPR_TYPE GPR25; 289 PPC_GPR_TYPE GPR26; 290 PPC_GPR_TYPE GPR27; 291 PPC_GPR_TYPE GPR28; 292 PPC_GPR_TYPE GPR29; 293 PPC_GPR_TYPE GPR30; 294 PPC_GPR_TYPE GPR31; 303 295 unsigned EXC_MSR; 304 296 unsigned EXC_DAR; -
c/src/lib/libcpu/powerpc/new-exceptions/cpu.c
r721fe34 r1869bb7 66 66 ) 67 67 { 68 ppc_context *the_ppc_context; 68 69 uint32_t msr_value; 69 70 uint32_t sp; … … 123 124 memset( the_context, 0, sizeof( *the_context ) ); 124 125 125 PPC_CONTEXT_SET_SP( the_context, sp ); 126 PPC_CONTEXT_SET_PC( the_context, (uint32_t) entry_point ); 127 PPC_CONTEXT_SET_MSR( the_context, msr_value ); 128 129 #ifndef __SPE__ 130 #if (PPC_ABI == PPC_ABI_SVR4) 131 /* 132 * SVR4 says R2 is for 'system-reserved' use; it cannot hurt to 133 * propagate R2 to all task contexts. 134 */ 135 { uint32_t r2 = 0; 136 unsigned r13 = 0; 137 __asm__ volatile ("mr %0,2; mr %1,13" : "=r" ((r2)), "=r" ((r13))); 138 139 the_context->gpr2 = r2; 140 the_context->gpr13 = r13; 141 } 142 #elif (PPC_ABI == PPC_ABI_EABI) 143 { uint32_t r2 = 0; 144 unsigned r13 = 0; 145 __asm__ volatile ("mr %0,2; mr %1,13" : "=r" ((r2)), "=r" ((r13))); 146 147 the_context->gpr2 = r2; 148 the_context->gpr13 = r13; 149 } 150 #else 151 #error unsupported PPC_ABI 152 #endif 153 #endif /* __SPE__ */ 126 the_ppc_context = ppc_get_context( the_context ); 127 the_ppc_context->gpr1 = sp; 128 the_ppc_context->msr = msr_value; 129 the_ppc_context->lr = (uint32_t) entry_point; 154 130 155 131 #ifdef __ALTIVEC__ -
c/src/lib/libcpu/powerpc/new-exceptions/cpu_asm.S
r721fe34 r1869bb7 24 24 * On-Line Applications Research Corporation (OAR). 25 25 * 26 * Copyright (c) 2011 embedded brains GmbH.26 * Copyright (c) 2011-2012 embedded brains GmbH. 27 27 * 28 28 * The license and distribution terms for this file may in … … 36 36 #include <bspopts.h> 37 37 38 #if PPC_DEFAULT_CACHE_LINE_SIZE != 32 39 #error "unexpected PPC_DEFAULT_CACHE_LINE_SIZE value" 40 #endif 41 42 #ifdef BSP_USE_DATA_CACHE_BLOCK_TOUCH 43 #define DATA_CACHE_TOUCH(rega, regb) \ 44 dcbt rega, regb 45 #else 46 #define DATA_CACHE_TOUCH(rega, regb) 47 #endif 48 38 49 #if BSP_DATA_CACHE_ENABLED && PPC_CACHE_ALIGNMENT == 32 39 #define DATA_CACHE_ALIGNMENT(reg) \40 li reg, PPC_CACHE_ALIGNMENT41 #define DATA_CACHE_ZERO(rega, regb) \42 dcbz rega, regb43 #define DATA_CACHE_TOUCH(rega, regb) \44 dcbt rega, regb45 50 #define DATA_CACHE_ZERO_AND_TOUCH(reg, offset) \ 46 li reg, offset; dcbz reg, r3; dcbt reg, r4 51 li reg, offset; dcbz reg, r3; DATA_CACHE_TOUCH(reg, r4) 47 52 #else 48 #define DATA_CACHE_ALIGNMENT(reg) 49 #define DATA_CACHE_ZERO(rega, regb) 50 #define DATA_CACHE_TOUCH(rega, regb) 51 #define DATA_CACHE_ZERO_AND_TOUCH(reg, offset) \ 52 li reg, offset 53 #endif 53 #define DATA_CACHE_ZERO_AND_TOUCH(reg, offset) 54 #endif 55 56 #define PPC_CONTEXT_CACHE_LINE_0 32 57 #define PPC_CONTEXT_CACHE_LINE_1 64 58 #define PPC_CONTEXT_CACHE_LINE_2 96 59 #define PPC_CONTEXT_CACHE_LINE_3 128 60 #define PPC_CONTEXT_CACHE_LINE_4 160 54 61 55 62 /* 56 63 * Offsets for various Contexts 57 64 */ 58 .set GP_1, 059 .set GP_2, (GP_1 + 4)60 .set GP_13, (GP_2 + 4)61 .set GP_14, (GP_13 + 4)62 63 .set GP_15, (GP_14 + 4)64 .set GP_16, (GP_15 + 4)65 .set GP_17, (GP_16 + 4)66 .set GP_18, (GP_17 + 4)67 68 .set GP_19, (GP_18 + 4)69 .set GP_20, (GP_19 + 4)70 .set GP_21, (GP_20 + 4)71 .set GP_22, (GP_21 + 4)72 73 .set GP_23, (GP_22 + 4)74 .set GP_24, (GP_23 + 4)75 .set GP_25, (GP_24 + 4)76 .set GP_26, (GP_25 + 4)77 78 .set GP_27, (GP_26 + 4)79 .set GP_28, (GP_27 + 4)80 .set GP_29, (GP_28 + 4)81 .set GP_30, (GP_29 + 4)82 83 .set GP_31, (GP_30 + 4)84 .set GP_CR, (GP_31 + 4)85 .set GP_PC, (GP_CR + 4)86 .set GP_MSR, (GP_PC + 4)87 65 88 66 #if (PPC_HAS_DOUBLE==1) … … 129 107 .set FP_31, (FP_30 + FP_SIZE) 130 108 .set FP_FPSCR, (FP_31 + FP_SIZE) 131 132 .set IP_LINK, 0133 .set IP_0, (IP_LINK + 8)134 .set IP_2, (IP_0 + 4)135 136 .set IP_3, (IP_2 + 4)137 .set IP_4, (IP_3 + 4)138 .set IP_5, (IP_4 + 4)139 .set IP_6, (IP_5 + 4)140 141 .set IP_7, (IP_6 + 4)142 .set IP_8, (IP_7 + 4)143 .set IP_9, (IP_8 + 4)144 .set IP_10, (IP_9 + 4)145 146 .set IP_11, (IP_10 + 4)147 .set IP_12, (IP_11 + 4)148 .set IP_13, (IP_12 + 4)149 .set IP_28, (IP_13 + 4)150 151 .set IP_29, (IP_28 + 4)152 .set IP_30, (IP_29 + 4)153 .set IP_31, (IP_30 + 4)154 .set IP_CR, (IP_31 + 4)155 156 .set IP_CTR, (IP_CR + 4)157 .set IP_XER, (IP_CTR + 4)158 .set IP_LR, (IP_XER + 4)159 .set IP_PC, (IP_LR + 4)160 161 .set IP_MSR, (IP_PC + 4)162 .set IP_END, (IP_MSR + 16)163 109 164 110 BEGIN_CODE … … 301 247 blr 302 248 303 /* _CPU_Context_switch304 *305 * This routine performs a normal non-FP context switch.306 */307 249 ALIGN (PPC_CACHE_ALIGNMENT, PPC_CACHE_ALIGN_POWER) 308 250 PUBLIC_PROC (_CPU_Context_switch) 309 251 PROC (_CPU_Context_switch): 310 #ifndef __SPE__ 252 253 #ifdef BSP_USE_SYNC_IN_CONTEXT_SWITCH 311 254 sync 312 255 isync 313 /* This assumes that all the registers are in the given order */ 314 DATA_CACHE_ALIGNMENT(r5) 315 addi r9,r3,-4 316 DATA_CACHE_ZERO(r5, r9) 317 #ifdef RTEMS_MULTIPROCESSING 318 /* 319 * We have to clear the reservation of the executing thread. See also 320 * Book E section 6.1.6.2 "Atomic Update Primitives". 321 */ 322 li r10, GP_1 + 4 323 stwcx. r1, r9, r10 324 #endif 325 stw r1, GP_1+4(r9) 326 stw r2, GP_2+4(r9) 327 #if (PPC_USE_MULTIPLE == 1) 328 addi r9, r9, GP_18+4 329 DATA_CACHE_ZERO(r5, r9) 330 stmw r13, GP_13-GP_18(r9) 331 #else 332 stw r13, GP_13+4(r9) 333 stw r14, GP_14+4(r9) 334 stw r15, GP_15+4(r9) 335 stw r16, GP_16+4(r9) 336 stw r17, GP_17+4(r9) 337 stwu r18, GP_18+4(r9) 338 DATA_CACHE_ZERO(r5, r9) 339 stw r19, GP_19-GP_18(r9) 340 stw r20, GP_20-GP_18(r9) 341 stw r21, GP_21-GP_18(r9) 342 stw r22, GP_22-GP_18(r9) 343 stw r23, GP_23-GP_18(r9) 344 stw r24, GP_24-GP_18(r9) 345 stw r25, GP_25-GP_18(r9) 346 stw r26, GP_26-GP_18(r9) 347 stw r27, GP_27-GP_18(r9) 348 stw r28, GP_28-GP_18(r9) 349 stw r29, GP_29-GP_18(r9) 350 stw r30, GP_30-GP_18(r9) 351 stw r31, GP_31-GP_18(r9) 352 #endif 353 DATA_CACHE_TOUCH(r0, r4) 354 mfcr r6 355 stw r6, GP_CR-GP_18(r9) 356 mflr r7 357 stw r7, GP_PC-GP_18(r9) 358 mfmsr r8 359 stw r8, GP_MSR-GP_18(r9) 360 361 #ifdef __ALTIVEC__ 362 mr r14, r4 363 EXTERN_PROC(_CPU_Context_switch_altivec) 364 bl _CPU_Context_switch_altivec 365 mr r4, r14 366 DATA_CACHE_ALIGNMENT(r5) 367 #endif 368 369 DATA_CACHE_TOUCH(r5, r4) 370 lwz r1, GP_1(r4) 371 lwz r2, GP_2(r4) 372 #if (PPC_USE_MULTIPLE == 1) 373 addi r4, r4, GP_19 374 DATA_CACHE_TOUCH(r5, r4) 375 lmw r13, GP_13-GP_19(r4) 376 #else 377 lwz r13, GP_13(r4) 378 lwz r14, GP_14(r4) 379 lwz r15, GP_15(r4) 380 lwz r16, GP_16(r4) 381 lwz r17, GP_17(r4) 382 lwz r18, GP_18(r4) 383 lwzu r19, GP_19(r4) 384 DATA_CACHE_TOUCH(r5, r4) 385 lwz r20, GP_20-GP_19(r4) 386 lwz r21, GP_21-GP_19(r4) 387 lwz r22, GP_22-GP_19(r4) 388 lwz r23, GP_23-GP_19(r4) 389 lwz r24, GP_24-GP_19(r4) 390 lwz r25, GP_25-GP_19(r4) 391 lwz r26, GP_26-GP_19(r4) 392 lwz r27, GP_27-GP_19(r4) 393 lwz r28, GP_28-GP_19(r4) 394 lwz r29, GP_29-GP_19(r4) 395 lwz r30, GP_30-GP_19(r4) 396 lwz r31, GP_31-GP_19(r4) 397 #endif 398 lwz r6, GP_CR-GP_19(r4) 399 lwz r7, GP_PC-GP_19(r4) 400 lwz r8, GP_MSR-GP_19(r4) 401 mtcrf 255, r6 402 mtlr r7 403 mtmsr r8 404 isync 405 406 blr 407 #else /* __SPE__ */ 256 #endif 257 408 258 /* Align to a cache line */ 409 259 clrrwi r3, r3, 5 … … 422 272 * We have to clear the reservation of the executing thread. See also 423 273 * Book E section 6.1.6.2 "Atomic Update Primitives". 424 *425 * Here we assume PPC_CONTEXT_OFFSET_SP == PPC_CONTEXT_CACHE_LINE_0.426 274 */ 275 #if PPC_CONTEXT_OFFSET_GPR1 != PPC_CONTEXT_CACHE_LINE_0 \ 276 || !BSP_DATA_CACHE_ENABLED \ 277 || PPC_CACHE_ALIGNMENT != 32 278 li r10, PPC_CONTEXT_OFFSET_GPR1 279 #endif 427 280 stwcx. r1, r3, r10 428 281 #endif 429 stw r1, PPC_CONTEXT_OFFSET_ SP(r3)282 stw r1, PPC_CONTEXT_OFFSET_GPR1(r3) 430 283 stw r5, PPC_CONTEXT_OFFSET_MSR(r3) 431 284 stw r6, PPC_CONTEXT_OFFSET_LR(r3) 432 285 stw r7, PPC_CONTEXT_OFFSET_CR(r3) 433 evstdd r14, PPC_CONTEXT_OFFSET_GPR14(r3) 434 evstdd r15, PPC_CONTEXT_OFFSET_GPR15(r3) 435 286 PPC_GPR_STORE r14, PPC_CONTEXT_OFFSET_GPR14(r3) 287 PPC_GPR_STORE r15, PPC_CONTEXT_OFFSET_GPR15(r3) 288 289 #if PPC_CONTEXT_OFFSET_GPR20 == PPC_CONTEXT_CACHE_LINE_2 436 290 DATA_CACHE_ZERO_AND_TOUCH(r10, PPC_CONTEXT_CACHE_LINE_2) 437 438 evstdd r16, PPC_CONTEXT_OFFSET_GPR16(r3) 439 evstdd r17, PPC_CONTEXT_OFFSET_GPR17(r3) 440 evstdd r18, PPC_CONTEXT_OFFSET_GPR18(r3) 441 evstdd r19, PPC_CONTEXT_OFFSET_GPR19(r3) 442 291 #endif 292 293 PPC_GPR_STORE r16, PPC_CONTEXT_OFFSET_GPR16(r3) 294 PPC_GPR_STORE r17, PPC_CONTEXT_OFFSET_GPR17(r3) 295 296 #if PPC_CONTEXT_OFFSET_GPR26 == PPC_CONTEXT_CACHE_LINE_2 297 DATA_CACHE_ZERO_AND_TOUCH(r10, PPC_CONTEXT_CACHE_LINE_2) 298 #endif 299 300 PPC_GPR_STORE r18, PPC_CONTEXT_OFFSET_GPR18(r3) 301 PPC_GPR_STORE r19, PPC_CONTEXT_OFFSET_GPR19(r3) 302 303 #if PPC_CONTEXT_OFFSET_GPR24 == PPC_CONTEXT_CACHE_LINE_3 443 304 DATA_CACHE_ZERO_AND_TOUCH(r10, PPC_CONTEXT_CACHE_LINE_3) 444 445 evstdd r20, PPC_CONTEXT_OFFSET_GPR20(r3) 446 evstdd r21, PPC_CONTEXT_OFFSET_GPR21(r3) 447 evstdd r22, PPC_CONTEXT_OFFSET_GPR22(r3) 448 evstdd r23, PPC_CONTEXT_OFFSET_GPR23(r3) 449 305 #endif 306 307 PPC_GPR_STORE r20, PPC_CONTEXT_OFFSET_GPR20(r3) 308 PPC_GPR_STORE r21, PPC_CONTEXT_OFFSET_GPR21(r3) 309 PPC_GPR_STORE r22, PPC_CONTEXT_OFFSET_GPR22(r3) 310 PPC_GPR_STORE r23, PPC_CONTEXT_OFFSET_GPR23(r3) 311 312 #if PPC_CONTEXT_OFFSET_GPR28 == PPC_CONTEXT_CACHE_LINE_4 450 313 DATA_CACHE_ZERO_AND_TOUCH(r10, PPC_CONTEXT_CACHE_LINE_4) 451 452 evstdd r24, PPC_CONTEXT_OFFSET_GPR24(r3) 453 evstdd r25, PPC_CONTEXT_OFFSET_GPR25(r3) 454 evstdd r26, PPC_CONTEXT_OFFSET_GPR26(r3) 455 evstdd r27, PPC_CONTEXT_OFFSET_GPR27(r3) 456 457 evstdd r28, PPC_CONTEXT_OFFSET_GPR28(r3) 458 evstdd r29, PPC_CONTEXT_OFFSET_GPR29(r3) 459 evstdd r30, PPC_CONTEXT_OFFSET_GPR30(r3) 460 evstdd r31, PPC_CONTEXT_OFFSET_GPR31(r3) 314 #endif 315 316 PPC_GPR_STORE r24, PPC_CONTEXT_OFFSET_GPR24(r3) 317 PPC_GPR_STORE r25, PPC_CONTEXT_OFFSET_GPR25(r3) 318 PPC_GPR_STORE r26, PPC_CONTEXT_OFFSET_GPR26(r3) 319 PPC_GPR_STORE r27, PPC_CONTEXT_OFFSET_GPR27(r3) 320 321 PPC_GPR_STORE r28, PPC_CONTEXT_OFFSET_GPR28(r3) 322 PPC_GPR_STORE r29, PPC_CONTEXT_OFFSET_GPR29(r3) 323 PPC_GPR_STORE r30, PPC_CONTEXT_OFFSET_GPR30(r3) 324 PPC_GPR_STORE r31, PPC_CONTEXT_OFFSET_GPR31(r3) 461 325 462 326 /* Restore context from r4 */ 463 327 restore_context: 464 328 465 lwz r1, PPC_CONTEXT_OFFSET_SP(r4) 329 #ifdef __ALTIVEC__ 330 mr r14, r4 331 .extern _CPU_Context_switch_altivec 332 bl _CPU_Context_switch_altivec 333 mr r4, r14 334 #endif 335 336 lwz r1, PPC_CONTEXT_OFFSET_GPR1(r4) 466 337 lwz r5, PPC_CONTEXT_OFFSET_MSR(r4) 467 338 lwz r6, PPC_CONTEXT_OFFSET_LR(r4) 468 339 lwz r7, PPC_CONTEXT_OFFSET_CR(r4) 469 340 470 evlddr14, PPC_CONTEXT_OFFSET_GPR14(r4)471 evlddr15, PPC_CONTEXT_OFFSET_GPR15(r4)341 PPC_GPR_LOAD r14, PPC_CONTEXT_OFFSET_GPR14(r4) 342 PPC_GPR_LOAD r15, PPC_CONTEXT_OFFSET_GPR15(r4) 472 343 473 344 DATA_CACHE_TOUCH(r0, r1) 474 345 475 evlddr16, PPC_CONTEXT_OFFSET_GPR16(r4)476 evlddr17, PPC_CONTEXT_OFFSET_GPR17(r4)477 evlddr18, PPC_CONTEXT_OFFSET_GPR18(r4)478 evlddr19, PPC_CONTEXT_OFFSET_GPR19(r4)479 480 evlddr20, PPC_CONTEXT_OFFSET_GPR20(r4)481 evlddr21, PPC_CONTEXT_OFFSET_GPR21(r4)482 evlddr22, PPC_CONTEXT_OFFSET_GPR22(r4)483 evlddr23, PPC_CONTEXT_OFFSET_GPR23(r4)484 485 evlddr24, PPC_CONTEXT_OFFSET_GPR24(r4)486 evlddr25, PPC_CONTEXT_OFFSET_GPR25(r4)487 evlddr26, PPC_CONTEXT_OFFSET_GPR26(r4)488 evlddr27, PPC_CONTEXT_OFFSET_GPR27(r4)489 490 evlddr28, PPC_CONTEXT_OFFSET_GPR28(r4)491 evlddr29, PPC_CONTEXT_OFFSET_GPR29(r4)492 evlddr30, PPC_CONTEXT_OFFSET_GPR30(r4)493 evlddr31, PPC_CONTEXT_OFFSET_GPR31(r4)346 PPC_GPR_LOAD r16, PPC_CONTEXT_OFFSET_GPR16(r4) 347 PPC_GPR_LOAD r17, PPC_CONTEXT_OFFSET_GPR17(r4) 348 PPC_GPR_LOAD r18, PPC_CONTEXT_OFFSET_GPR18(r4) 349 PPC_GPR_LOAD r19, PPC_CONTEXT_OFFSET_GPR19(r4) 350 351 PPC_GPR_LOAD r20, PPC_CONTEXT_OFFSET_GPR20(r4) 352 PPC_GPR_LOAD r21, PPC_CONTEXT_OFFSET_GPR21(r4) 353 PPC_GPR_LOAD r22, PPC_CONTEXT_OFFSET_GPR22(r4) 354 PPC_GPR_LOAD r23, PPC_CONTEXT_OFFSET_GPR23(r4) 355 356 PPC_GPR_LOAD r24, PPC_CONTEXT_OFFSET_GPR24(r4) 357 PPC_GPR_LOAD r25, PPC_CONTEXT_OFFSET_GPR25(r4) 358 PPC_GPR_LOAD r26, PPC_CONTEXT_OFFSET_GPR26(r4) 359 PPC_GPR_LOAD r27, PPC_CONTEXT_OFFSET_GPR27(r4) 360 361 PPC_GPR_LOAD r28, PPC_CONTEXT_OFFSET_GPR28(r4) 362 PPC_GPR_LOAD r29, PPC_CONTEXT_OFFSET_GPR29(r4) 363 PPC_GPR_LOAD r30, PPC_CONTEXT_OFFSET_GPR30(r4) 364 PPC_GPR_LOAD r31, PPC_CONTEXT_OFFSET_GPR31(r4) 494 365 495 366 mtcr r7 … … 497 368 mtmsr r5 498 369 370 #ifdef BSP_USE_SYNC_IN_CONTEXT_SWITCH 371 isync 372 #endif 373 499 374 blr 500 #endif /* __SPE__ */ 501 502 /* 503 * _CPU_Context_restore 504 * 505 * This routine is generallu used only to restart self in an 506 * efficient manner. It may simply be a label in _CPU_Context_switch. 507 * 508 * NOTE: May be unnecessary to reload some registers. 509 */ 510 /* 511 * ACB: Don't worry about cache optimisation here - this is not THAT critical. 512 */ 513 ALIGN (PPC_CACHE_ALIGNMENT, PPC_CACHE_ALIGN_POWER) 375 514 376 PUBLIC_PROC (_CPU_Context_restore) 515 377 PROC (_CPU_Context_restore): 516 #ifndef __SPE__517 lwz r5, GP_CR(r3)518 lwz r6, GP_PC(r3)519 lwz r7, GP_MSR(r3)520 mtcrf 255, r5521 mtlr r6522 mtmsr r7523 isync524 lwz r1, GP_1(r3)525 lwz r2, GP_2(r3)526 #if (PPC_USE_MULTIPLE == 1)527 lmw r13, GP_13(r3)528 #else529 lwz r13, GP_13(r3)530 lwz r14, GP_14(r3)531 lwz r15, GP_15(r3)532 lwz r16, GP_16(r3)533 lwz r17, GP_17(r3)534 lwz r18, GP_18(r3)535 lwz r19, GP_19(r3)536 lwz r20, GP_20(r3)537 lwz r21, GP_21(r3)538 lwz r22, GP_22(r3)539 lwz r23, GP_23(r3)540 lwz r24, GP_24(r3)541 lwz r25, GP_25(r3)542 lwz r26, GP_26(r3)543 lwz r27, GP_27(r3)544 lwz r28, GP_28(r3)545 lwz r29, GP_29(r3)546 lwz r30, GP_30(r3)547 lwz r31, GP_31(r3)548 #endif549 #ifdef __ALTIVEC__550 EXTERN_PROC(_CPU_Context_restore_altivec)551 b _CPU_Context_restore_altivec552 #endif553 blr554 #else /* __SPE__ */555 378 /* Align to a cache line */ 556 379 clrrwi r4, r3, 5 557 380 381 #ifdef __ALTIVEC__ 382 li r3, 0 383 #endif 384 558 385 b restore_context 559 #endif /* __SPE__ */ -
cpukit/score/cpu/powerpc/cpu.c
r721fe34 r1869bb7 11 11 12 12 #ifdef HAVE_CONFIG_H 13 #include "config.h"13 #include "config.h" 14 14 #endif 15 15 16 #include <rtems/system.h> 17 #include <rtems/score/cpu.h> 18 19 #define PPC_ASSERT_OFFSET(field, off) \ 20 RTEMS_STATIC_ASSERT( \ 21 offsetof(ppc_context, field) + PPC_DEFAULT_CACHE_LINE_SIZE \ 22 == PPC_CONTEXT_OFFSET_ ## off, \ 23 ppc_context_offset_ ## field \ 24 ) 25 26 PPC_ASSERT_OFFSET(gpr1, GPR1); 27 PPC_ASSERT_OFFSET(msr, MSR); 28 PPC_ASSERT_OFFSET(lr, LR); 29 PPC_ASSERT_OFFSET(cr, CR); 30 PPC_ASSERT_OFFSET(gpr14, GPR14); 31 PPC_ASSERT_OFFSET(gpr15, GPR15); 32 PPC_ASSERT_OFFSET(gpr16, GPR16); 33 PPC_ASSERT_OFFSET(gpr17, GPR17); 34 PPC_ASSERT_OFFSET(gpr18, GPR18); 35 PPC_ASSERT_OFFSET(gpr19, GPR19); 36 PPC_ASSERT_OFFSET(gpr20, GPR20); 37 PPC_ASSERT_OFFSET(gpr21, GPR21); 38 PPC_ASSERT_OFFSET(gpr22, GPR22); 39 PPC_ASSERT_OFFSET(gpr23, GPR23); 40 PPC_ASSERT_OFFSET(gpr24, GPR24); 41 PPC_ASSERT_OFFSET(gpr25, GPR25); 42 PPC_ASSERT_OFFSET(gpr26, GPR26); 43 PPC_ASSERT_OFFSET(gpr27, GPR27); 44 PPC_ASSERT_OFFSET(gpr28, GPR28); 45 PPC_ASSERT_OFFSET(gpr29, GPR29); 46 PPC_ASSERT_OFFSET(gpr30, GPR30); 47 PPC_ASSERT_OFFSET(gpr31, GPR31); 48 49 RTEMS_STATIC_ASSERT( 50 sizeof(Context_Control) % PPC_DEFAULT_CACHE_LINE_SIZE == 0, 51 ppc_context_size 52 ); -
cpukit/score/cpu/powerpc/rtems/score/cpu.h
r721fe34 r1869bb7 24 24 * Copyright (c) 2001 Surrey Satellite Technology Limited (SSTL). 25 25 * 26 * Copyright (c) 2010-201 1embedded brains GmbH.26 * Copyright (c) 2010-2012 embedded brains GmbH. 27 27 * 28 28 * The license and distribution terms for this file may be … … 250 250 */ 251 251 252 #ifndef __SPE__ 253 #define PPC_GPR_TYPE uint32_t 254 #define PPC_GPR_SIZE 4 255 #define PPC_GPR_LOAD lwz 256 #define PPC_GPR_STORE stw 257 #else 258 #define PPC_GPR_TYPE uint64_t 259 #define PPC_GPR_SIZE 8 260 #define PPC_GPR_LOAD evldd 261 #define PPC_GPR_STORE evstdd 262 #endif 263 264 #define PPC_DEFAULT_CACHE_LINE_SIZE 32 265 252 266 #ifndef ASM 253 267 268 /* Non-volatile context according to E500ABIUG and EABI */ 254 269 typedef struct { 255 #ifndef __SPE__ 256 uint32_t gpr1; /* Stack pointer for all */ 257 uint32_t gpr2; /* Reserved SVR4, section ptr EABI + */ 258 uint32_t gpr13; /* Section ptr SVR4/EABI */ 259 uint32_t gpr14; /* Non volatile for all */ 260 uint32_t gpr15; /* Non volatile for all */ 261 uint32_t gpr16; /* Non volatile for all */ 262 uint32_t gpr17; /* Non volatile for all */ 263 uint32_t gpr18; /* Non volatile for all */ 264 uint32_t gpr19; /* Non volatile for all */ 265 uint32_t gpr20; /* Non volatile for all */ 266 uint32_t gpr21; /* Non volatile for all */ 267 uint32_t gpr22; /* Non volatile for all */ 268 uint32_t gpr23; /* Non volatile for all */ 269 uint32_t gpr24; /* Non volatile for all */ 270 uint32_t gpr25; /* Non volatile for all */ 271 uint32_t gpr26; /* Non volatile for all */ 272 uint32_t gpr27; /* Non volatile for all */ 273 uint32_t gpr28; /* Non volatile for all */ 274 uint32_t gpr29; /* Non volatile for all */ 275 uint32_t gpr30; /* Non volatile for all */ 276 uint32_t gpr31; /* Non volatile for all */ 277 uint32_t cr; /* PART of the CR is non volatile for all */ 278 uint32_t pc; /* Program counter/Link register */ 279 uint32_t msr; /* Initial interrupt level */ 280 #ifdef __ALTIVEC__ 281 /* 282 * 12 non-volatile vector registers, cache-aligned area for vscr/vrsave 283 * and padding to ensure cache-alignment. Unfortunately, we can't verify 284 * the cache line size here in the cpukit but altivec support code will 285 * produce an error if this is ever different from 32 bytes. 286 * 287 * Note: it is the BSP/CPU-support's responsibility to save/restore 288 * volatile vregs across interrupts and exceptions. 289 */ 290 uint8_t altivec[16*12 + 32 + 32]; 291 #endif 292 #else 293 /* Non-volatile context according to E500ABIUG and EABI */ 294 uint32_t context [ 295 8 /* Cache line padding */ 296 + 1 /* Stack pointer */ 297 + 1 /* MSR */ 298 + 1 /* LR */ 299 + 1 /* CR */ 300 + 18 * 2 /* GPR 14 to GPR 31 */ 301 ]; 270 uint32_t gpr1; 271 uint32_t msr; 272 uint32_t lr; 273 uint32_t cr; 274 PPC_GPR_TYPE gpr14; 275 PPC_GPR_TYPE gpr15; 276 PPC_GPR_TYPE gpr16; 277 PPC_GPR_TYPE gpr17; 278 PPC_GPR_TYPE gpr18; 279 PPC_GPR_TYPE gpr19; 280 PPC_GPR_TYPE gpr20; 281 PPC_GPR_TYPE gpr21; 282 PPC_GPR_TYPE gpr22; 283 PPC_GPR_TYPE gpr23; 284 PPC_GPR_TYPE gpr24; 285 PPC_GPR_TYPE gpr25; 286 PPC_GPR_TYPE gpr26; 287 PPC_GPR_TYPE gpr27; 288 PPC_GPR_TYPE gpr28; 289 PPC_GPR_TYPE gpr29; 290 PPC_GPR_TYPE gpr30; 291 PPC_GPR_TYPE gpr31; 292 #ifdef __ALTIVEC__ 293 /* 294 * 12 non-volatile vector registers, cache-aligned area for vscr/vrsave 295 * and padding to ensure cache-alignment. Unfortunately, we can't verify 296 * the cache line size here in the cpukit but altivec support code will 297 * produce an error if this is ever different from 32 bytes. 298 * 299 * Note: it is the BSP/CPU-support's responsibility to save/restore 300 * volatile vregs across interrupts and exceptions. 301 */ 302 uint8_t altivec[16*12 + 32 + PPC_DEFAULT_CACHE_LINE_SIZE]; 302 303 #endif 304 } ppc_context; 305 306 typedef struct { 307 uint8_t context [ 308 PPC_DEFAULT_CACHE_LINE_SIZE 309 + sizeof(ppc_context) 310 + (sizeof(ppc_context) % PPC_DEFAULT_CACHE_LINE_SIZE == 0 311 ? 0 312 : PPC_DEFAULT_CACHE_LINE_SIZE 313 - sizeof(ppc_context) % PPC_DEFAULT_CACHE_LINE_SIZE) 314 ]; 303 315 } Context_Control; 316 317 static inline ppc_context *ppc_get_context( Context_Control *context ) 318 { 319 uintptr_t clsz = PPC_DEFAULT_CACHE_LINE_SIZE; 320 uintptr_t mask = clsz - 1; 321 uintptr_t addr = (uintptr_t) context; 322 323 return (ppc_context *) ((addr & ~mask) + clsz); 324 } 325 326 #define _CPU_Context_Get_SP( _context ) \ 327 ppc_get_context(_context)->gpr1 304 328 #endif /* ASM */ 305 329 306 #ifndef __SPE__ 307 #define PPC_CONTEXT_SET_SP( _context, _sp ) \ 308 do { \ 309 (_context)->gpr1 = _sp; \ 310 } while (0) 311 312 #define PPC_CONTEXT_GET_CR( _context ) \ 313 (_context)->cr 314 315 #define PPC_CONTEXT_GET_MSR( _context ) \ 316 (_context)->msr 317 318 #define PPC_CONTEXT_SET_MSR( _context, _msr ) \ 319 do { \ 320 (_context)->msr = _msr; \ 321 } while (0) 322 323 #define PPC_CONTEXT_FIRST_SAVED_GPR 13 324 325 #define PPC_CONTEXT_GET_FIRST_SAVED( _context ) \ 326 (_context)->gpr13 327 328 #define PPC_CONTEXT_GET_PC( _context ) \ 329 (_context)->pc 330 331 #define PPC_CONTEXT_SET_PC( _context, _pc ) \ 332 do { \ 333 (_context)->pc = _pc; \ 334 } while (0) 335 336 #define _CPU_Context_Get_SP( _context ) \ 337 (_context)->gpr1 338 #else 339 #define PPC_CONTEXT_CACHE_LINE_0 32 340 #define PPC_CONTEXT_OFFSET_SP 32 341 #define PPC_CONTEXT_OFFSET_MSR 36 342 #define PPC_CONTEXT_OFFSET_LR 40 343 #define PPC_CONTEXT_OFFSET_CR 44 344 #define PPC_CONTEXT_OFFSET_GPR14 48 345 #define PPC_CONTEXT_OFFSET_GPR15 56 346 #define PPC_CONTEXT_CACHE_LINE_1 64 347 #define PPC_CONTEXT_OFFSET_GPR16 64 348 #define PPC_CONTEXT_OFFSET_GPR17 72 349 #define PPC_CONTEXT_OFFSET_GPR18 80 350 #define PPC_CONTEXT_OFFSET_GPR19 88 351 #define PPC_CONTEXT_CACHE_LINE_2 96 352 #define PPC_CONTEXT_OFFSET_GPR20 96 353 #define PPC_CONTEXT_OFFSET_GPR21 104 354 #define PPC_CONTEXT_OFFSET_GPR22 112 355 #define PPC_CONTEXT_OFFSET_GPR23 120 356 #define PPC_CONTEXT_CACHE_LINE_3 128 357 #define PPC_CONTEXT_OFFSET_GPR24 128 358 #define PPC_CONTEXT_OFFSET_GPR25 136 359 #define PPC_CONTEXT_OFFSET_GPR26 144 360 #define PPC_CONTEXT_OFFSET_GPR27 152 361 #define PPC_CONTEXT_CACHE_LINE_4 160 362 #define PPC_CONTEXT_OFFSET_GPR28 160 363 #define PPC_CONTEXT_OFFSET_GPR29 168 364 #define PPC_CONTEXT_OFFSET_GPR30 176 365 #define PPC_CONTEXT_OFFSET_GPR31 184 366 367 #define PPC_CONTEXT_AREA( _context ) \ 368 ((uint32_t *) (((uintptr_t) (_context)) & ~0x1fU)) 369 370 #define PPC_CONTEXT_FIELD( _context, _offset ) \ 371 PPC_CONTEXT_AREA( _context ) [(_offset) / 4] 372 373 #define PPC_CONTEXT_SET_SP( _context, _sp ) \ 374 do { \ 375 PPC_CONTEXT_FIELD( _context, PPC_CONTEXT_OFFSET_SP ) = _sp; \ 376 } while (0) 377 378 #define PPC_CONTEXT_GET_CR( _context ) \ 379 PPC_CONTEXT_FIELD( _context, PPC_CONTEXT_OFFSET_CR ) 380 381 #define PPC_CONTEXT_GET_MSR( _context ) \ 382 PPC_CONTEXT_FIELD( _context, PPC_CONTEXT_OFFSET_MSR ) 383 384 #define PPC_CONTEXT_SET_MSR( _context, _msr ) \ 385 do { \ 386 PPC_CONTEXT_FIELD( _context, PPC_CONTEXT_OFFSET_MSR ) = _msr; \ 387 } while (0) 388 389 #define PPC_CONTEXT_FIRST_SAVED_GPR 14 390 391 #define PPC_CONTEXT_GET_FIRST_SAVED( _context ) \ 392 PPC_CONTEXT_FIELD( _context, PPC_CONTEXT_OFFSET_GPR14 ) 393 394 #define PPC_CONTEXT_GET_PC( _context ) \ 395 PPC_CONTEXT_FIELD( _context, PPC_CONTEXT_OFFSET_LR ) 396 397 #define PPC_CONTEXT_SET_PC( _context, _pc ) \ 398 do { \ 399 PPC_CONTEXT_FIELD( _context, PPC_CONTEXT_OFFSET_LR ) = _pc; \ 400 } while (0) 401 402 #define _CPU_Context_Get_SP( _context ) \ 403 PPC_CONTEXT_FIELD( _context, PPC_CONTEXT_OFFSET_SP ) 404 #endif 330 #define PPC_CONTEXT_OFFSET_GPR1 32 331 #define PPC_CONTEXT_OFFSET_MSR 36 332 #define PPC_CONTEXT_OFFSET_LR 40 333 #define PPC_CONTEXT_OFFSET_CR 44 334 335 #define PPC_CONTEXT_GPR_OFFSET( gpr ) \ 336 (((gpr) - 14) * PPC_GPR_SIZE + 48) 337 338 #define PPC_CONTEXT_OFFSET_GPR14 PPC_CONTEXT_GPR_OFFSET( 14 ) 339 #define PPC_CONTEXT_OFFSET_GPR15 PPC_CONTEXT_GPR_OFFSET( 15 ) 340 #define PPC_CONTEXT_OFFSET_GPR16 PPC_CONTEXT_GPR_OFFSET( 16 ) 341 #define PPC_CONTEXT_OFFSET_GPR17 PPC_CONTEXT_GPR_OFFSET( 17 ) 342 #define PPC_CONTEXT_OFFSET_GPR18 PPC_CONTEXT_GPR_OFFSET( 18 ) 343 #define PPC_CONTEXT_OFFSET_GPR19 PPC_CONTEXT_GPR_OFFSET( 19 ) 344 #define PPC_CONTEXT_OFFSET_GPR20 PPC_CONTEXT_GPR_OFFSET( 20 ) 345 #define PPC_CONTEXT_OFFSET_GPR21 PPC_CONTEXT_GPR_OFFSET( 21 ) 346 #define PPC_CONTEXT_OFFSET_GPR22 PPC_CONTEXT_GPR_OFFSET( 22 ) 347 #define PPC_CONTEXT_OFFSET_GPR23 PPC_CONTEXT_GPR_OFFSET( 23 ) 348 #define PPC_CONTEXT_OFFSET_GPR24 PPC_CONTEXT_GPR_OFFSET( 24 ) 349 #define PPC_CONTEXT_OFFSET_GPR25 PPC_CONTEXT_GPR_OFFSET( 25 ) 350 #define PPC_CONTEXT_OFFSET_GPR26 PPC_CONTEXT_GPR_OFFSET( 26 ) 351 #define PPC_CONTEXT_OFFSET_GPR27 PPC_CONTEXT_GPR_OFFSET( 27 ) 352 #define PPC_CONTEXT_OFFSET_GPR28 PPC_CONTEXT_GPR_OFFSET( 28 ) 353 #define PPC_CONTEXT_OFFSET_GPR29 PPC_CONTEXT_GPR_OFFSET( 29 ) 354 #define PPC_CONTEXT_OFFSET_GPR30 PPC_CONTEXT_GPR_OFFSET( 30 ) 355 #define PPC_CONTEXT_OFFSET_GPR31 PPC_CONTEXT_GPR_OFFSET( 31 ) 405 356 406 357 #ifndef ASM
Note: See TracChangeset
for help on using the changeset viewer.