1 | /* |
---|
2 | * (c) 1999, Eric Valette valette@crf.canon.fr |
---|
3 | * |
---|
4 | * Modified and partially rewritten by Till Straumann, 2007-2008 |
---|
5 | * |
---|
6 | * Modified by Sebastian Huber <sebastian.huber@embedded-brains.de>, 2008-2012. |
---|
7 | * |
---|
8 | * Low-level assembly code for PPC exceptions (macros). |
---|
9 | * |
---|
10 | * This file was written with the goal to eliminate |
---|
11 | * ALL #ifdef <cpu_flavor> conditionals -- please do not |
---|
12 | * reintroduce such statements. |
---|
13 | */ |
---|
14 | |
---|
15 | #include <bspopts.h> |
---|
16 | #include <bsp/vectors.h> |
---|
17 | #include <libcpu/powerpc-utility.h> |
---|
18 | |
---|
19 | #define LT(cr) ((cr)*4+0) |
---|
20 | #define GT(cr) ((cr)*4+1) |
---|
21 | #define EQ(cr) ((cr)*4+2) |
---|
22 | |
---|
23 | /* Opcode of 'stw r1, off(r13)' */ |
---|
24 | #define STW_R1_R13(off) ((((36<<10)|(r1<<5)|(r13))<<16) | ((off)&0xffff)) |
---|
25 | |
---|
26 | #define FRAME_REGISTER r14 |
---|
27 | #define VECTOR_REGISTER r4 |
---|
28 | #define SCRATCH_REGISTER_0 r5 |
---|
29 | #define SCRATCH_REGISTER_1 r6 |
---|
30 | #define SCRATCH_REGISTER_2 r7 |
---|
31 | |
---|
32 | #define FRAME_OFFSET( r) GPR14_OFFSET( r) |
---|
33 | #define VECTOR_OFFSET( r) GPR4_OFFSET( r) |
---|
34 | #define SCRATCH_REGISTER_0_OFFSET( r) GPR5_OFFSET( r) |
---|
35 | #define SCRATCH_REGISTER_1_OFFSET( r) GPR6_OFFSET( r) |
---|
36 | #define SCRATCH_REGISTER_2_OFFSET( r) GPR7_OFFSET( r) |
---|
37 | |
---|
38 | #define CR_TYPE 2 |
---|
39 | #define CR_MSR 3 |
---|
40 | #define CR_LOCK 4 |
---|
41 | |
---|
42 | /* |
---|
43 | * Minimal prologue snippets: |
---|
44 | * |
---|
45 | * Rationale: on some PPCs the vector offsets are spaced |
---|
46 | * as closely as 16 bytes. |
---|
47 | * |
---|
48 | * If we deal with asynchronous exceptions ('interrupts') |
---|
49 | * then we can use 4 instructions to |
---|
50 | * 1. atomically write lock to indicate ISR is in progress |
---|
51 | * (we cannot atomically increase the Thread_Dispatch_disable_level, |
---|
52 | * see README) |
---|
53 | * 2. save a register in special area |
---|
54 | * 3. load register with vector info |
---|
55 | * 4. branch |
---|
56 | * |
---|
57 | * If we deal with a synchronous exception (no stack switch |
---|
58 | * nor dispatch-disabling necessary) then it's easier: |
---|
59 | * 1. push stack frame |
---|
60 | * 2. save register on stack |
---|
61 | * 3. load register with vector info |
---|
62 | * 4. branch |
---|
63 | * |
---|
64 | */ |
---|
65 | |
---|
66 | /* |
---|
67 | ***************************************************************************** |
---|
68 | * MACRO: PPC_EXC_MIN_PROLOG_ASYNC |
---|
69 | ***************************************************************************** |
---|
70 | * USES: VECTOR_REGISTER |
---|
71 | * ON EXIT: Vector in VECTOR_REGISTER |
---|
72 | * |
---|
73 | * NOTES: VECTOR_REGISTER saved in special variable |
---|
74 | * 'ppc_exc_vector_register_\_PRI'. |
---|
75 | * |
---|
76 | */ |
---|
77 | .macro PPC_EXC_MIN_PROLOG_ASYNC _NAME _VEC _PRI _FLVR |
---|
78 | |
---|
79 | .global ppc_exc_min_prolog_async_\_NAME |
---|
80 | ppc_exc_min_prolog_async_\_NAME: |
---|
81 | /* Atomically write lock variable in 1st instruction with non-zero |
---|
82 | * value (r1 is always nonzero; r13 could also be used) |
---|
83 | * |
---|
84 | * NOTE: raising an exception and executing this first instruction |
---|
85 | * of the exception handler is apparently NOT atomic, i.e., a |
---|
86 | * low-priority IRQ could set the PC to this location and a |
---|
87 | * critical IRQ could intervene just at this point. |
---|
88 | * |
---|
89 | * We check against this pathological case by checking the |
---|
90 | * opcode/instruction at the interrupted PC for matching |
---|
91 | * |
---|
92 | * stw r1, ppc_exc_lock_XXX@sdarel(r13) |
---|
93 | * |
---|
94 | * ASSUMPTION: |
---|
95 | * 1) ALL 'asynchronous' exceptions (which disable thread- |
---|
96 | * dispatching) execute THIS 'magical' instruction |
---|
97 | * FIRST. |
---|
98 | * 2) This instruction (including the address offset) |
---|
99 | * is not used anywhere else (probably a safe assumption). |
---|
100 | */ |
---|
101 | stw r1, ppc_exc_lock_\_PRI@sdarel(r13) |
---|
102 | /* We have no stack frame yet; store VECTOR_REGISTER in special area; |
---|
103 | * a higher-priority (critical) interrupt uses a different area |
---|
104 | * (hence the different prologue snippets) (\PRI) |
---|
105 | */ |
---|
106 | stw VECTOR_REGISTER, ppc_exc_vector_register_\_PRI@sdarel(r13) |
---|
107 | /* Load vector. |
---|
108 | */ |
---|
109 | li VECTOR_REGISTER, ( \_VEC | 0xffff8000 ) |
---|
110 | |
---|
111 | /* |
---|
112 | * We store the absolute branch target address here. It will be used |
---|
113 | * to generate the branch operation in ppc_exc_make_prologue(). |
---|
114 | */ |
---|
115 | .int ppc_exc_wrap_\_FLVR |
---|
116 | |
---|
117 | .endm |
---|
118 | |
---|
119 | /* |
---|
120 | ***************************************************************************** |
---|
121 | * MACRO: PPC_EXC_MIN_PROLOG_SYNC |
---|
122 | ***************************************************************************** |
---|
123 | * USES: VECTOR_REGISTER |
---|
124 | * ON EXIT: vector in VECTOR_REGISTER |
---|
125 | * |
---|
126 | * NOTES: exception stack frame pushed; VECTOR_REGISTER saved in frame |
---|
127 | * |
---|
128 | */ |
---|
129 | .macro PPC_EXC_MIN_PROLOG_SYNC _NAME _VEC _PRI _FLVR |
---|
130 | |
---|
131 | .global ppc_exc_min_prolog_sync_\_NAME |
---|
132 | ppc_exc_min_prolog_sync_\_NAME: |
---|
133 | stwu r1, -EXCEPTION_FRAME_END(r1) |
---|
134 | stw VECTOR_REGISTER, VECTOR_OFFSET(r1) |
---|
135 | li VECTOR_REGISTER, \_VEC |
---|
136 | |
---|
137 | /* |
---|
138 | * We store the absolute branch target address here. It will be used |
---|
139 | * to generate the branch operation in ppc_exc_make_prologue(). |
---|
140 | */ |
---|
141 | .int ppc_exc_wrap_nopush_\_FLVR |
---|
142 | |
---|
143 | .endm |
---|
144 | |
---|
145 | /* |
---|
146 | ***************************************************************************** |
---|
147 | * MACRO: TEST_1ST_OPCODE_crit |
---|
148 | ***************************************************************************** |
---|
149 | * |
---|
150 | * USES: REG, cr0 |
---|
151 | * ON EXIT: REG available (contains *pc - STW_R1_R13(0)), |
---|
152 | * return value in cr0. |
---|
153 | * |
---|
154 | * test opcode interrupted by critical (asynchronous) exception; set CR_LOCK if |
---|
155 | * |
---|
156 | * *SRR0 == 'stw r1, ppc_exc_lock_std@sdarel(r13)' |
---|
157 | * |
---|
158 | */ |
---|
159 | .macro TEST_1ST_OPCODE_crit _REG |
---|
160 | |
---|
161 | lwz \_REG, SRR0_FRAME_OFFSET(FRAME_REGISTER) |
---|
162 | lwz \_REG, 0(\_REG) |
---|
163 | /* opcode now in REG */ |
---|
164 | |
---|
165 | /* subtract upper 16bits of 'stw r1, 0(r13)' instruction */ |
---|
166 | subis \_REG, \_REG, STW_R1_R13(0)@h |
---|
167 | /* |
---|
168 | * if what's left compares against the 'ppc_exc_lock_std@sdarel' |
---|
169 | * address offset then we have a match... |
---|
170 | */ |
---|
171 | cmplwi cr0, \_REG, ppc_exc_lock_std@sdarel |
---|
172 | |
---|
173 | .endm |
---|
174 | |
---|
175 | /* |
---|
176 | ***************************************************************************** |
---|
177 | * MACRO: TEST_LOCK_std |
---|
178 | ***************************************************************************** |
---|
179 | * |
---|
180 | * USES: CR_LOCK |
---|
181 | * ON EXIT: CR_LOCK is set (indicates no lower-priority locks are engaged) |
---|
182 | * |
---|
183 | */ |
---|
184 | .macro TEST_LOCK_std _FLVR |
---|
185 | /* 'std' is lowest level, i.e., can not be locked -> EQ(CR_LOCK) = 1 */ |
---|
186 | creqv EQ(CR_LOCK), EQ(CR_LOCK), EQ(CR_LOCK) |
---|
187 | .endm |
---|
188 | |
---|
189 | /* |
---|
190 | ****************************************************************************** |
---|
191 | * MACRO: TEST_LOCK_crit |
---|
192 | ****************************************************************************** |
---|
193 | * |
---|
194 | * USES: CR_LOCK, cr0, SCRATCH_REGISTER_0, SCRATCH_REGISTER_1 |
---|
195 | * ON EXIT: cr0, SCRATCH_REGISTER_0, SCRATCH_REGISTER_1 available, |
---|
196 | * returns result in CR_LOCK. |
---|
197 | * |
---|
198 | * critical-exception wrapper has to check 'std' lock: |
---|
199 | * |
---|
200 | * Return CR_LOCK = ( (interrupt_mask & MSR_CE) != 0 |
---|
201 | && ppc_lock_std == 0 |
---|
202 | * && * SRR0 != <write std lock instruction> ) |
---|
203 | * |
---|
204 | */ |
---|
205 | .macro TEST_LOCK_crit _FLVR |
---|
206 | /* If MSR_CE is not in the IRQ mask then we must never allow |
---|
207 | * thread-dispatching! |
---|
208 | */ |
---|
209 | GET_INTERRUPT_MASK mask=SCRATCH_REGISTER_1 |
---|
210 | /* EQ(cr0) = ((interrupt_mask & MSR_CE) == 0) */ |
---|
211 | andis. SCRATCH_REGISTER_1, SCRATCH_REGISTER_1, MSR_CE@h |
---|
212 | beq TEST_LOCK_crit_done_\_FLVR |
---|
213 | |
---|
214 | /* STD interrupt could have been interrupted before executing the 1st |
---|
215 | * instruction which sets the lock; check this case by looking at the |
---|
216 | * opcode present at the interrupted PC location. |
---|
217 | */ |
---|
218 | TEST_1ST_OPCODE_crit _REG=SCRATCH_REGISTER_0 |
---|
219 | /* |
---|
220 | * At this point cr0 is set if |
---|
221 | * |
---|
222 | * *(PC) == 'stw r1, ppc_exc_lock_std@sdarel(r13)' |
---|
223 | * |
---|
224 | */ |
---|
225 | |
---|
226 | /* check lock */ |
---|
227 | lwz SCRATCH_REGISTER_1, ppc_exc_lock_std@sdarel(r13) |
---|
228 | cmplwi CR_LOCK, SCRATCH_REGISTER_1, 0 |
---|
229 | |
---|
230 | /* set EQ(CR_LOCK) to result */ |
---|
231 | TEST_LOCK_crit_done_\_FLVR: |
---|
232 | /* If we end up here because the interrupt mask did not contain |
---|
233 | * MSR_CE then cr0 is set and therefore the value of CR_LOCK |
---|
234 | * does not matter since x && !1 == 0: |
---|
235 | * |
---|
236 | * if ( (interrupt_mask & MSR_CE) == 0 ) { |
---|
237 | * EQ(CR_LOCK) = EQ(CR_LOCK) && ! ((interrupt_mask & MSR_CE) == 0) |
---|
238 | * } else { |
---|
239 | * EQ(CR_LOCK) = (ppc_exc_lock_std == 0) && ! (*pc == <write std lock instruction>) |
---|
240 | * } |
---|
241 | */ |
---|
242 | crandc EQ(CR_LOCK), EQ(CR_LOCK), EQ(cr0) |
---|
243 | |
---|
244 | .endm |
---|
245 | |
---|
246 | /* |
---|
247 | ****************************************************************************** |
---|
248 | * MACRO: TEST_LOCK_mchk |
---|
249 | ****************************************************************************** |
---|
250 | * |
---|
251 | * USES: CR_LOCK |
---|
252 | * ON EXIT: CR_LOCK is cleared. |
---|
253 | * |
---|
254 | * We never want to disable machine-check exceptions to avoid a checkstop. This |
---|
255 | * means that we cannot use enabling/disabling this type of exception for |
---|
256 | * protection of critical OS data structures. Therefore, calling OS primitives |
---|
257 | * from a machine-check handler is ILLEGAL. Since machine-checks can happen |
---|
258 | * anytime it is not legal to perform a context switch (since the exception |
---|
259 | * could hit a IRQ protected section of code). We simply let this test return |
---|
260 | * 0 so that ppc_exc_wrapup is never called after handling a machine-check. |
---|
261 | */ |
---|
262 | .macro TEST_LOCK_mchk _SRR0 _FLVR |
---|
263 | |
---|
264 | crxor EQ(CR_LOCK), EQ(CR_LOCK), EQ(CR_LOCK) |
---|
265 | |
---|
266 | .endm |
---|
267 | |
---|
268 | /* |
---|
269 | ****************************************************************************** |
---|
270 | * MACRO: RECOVER_CHECK_\PRI |
---|
271 | ****************************************************************************** |
---|
272 | * |
---|
273 | * USES: cr0, SCRATCH_REGISTER_0, SCRATCH_REGISTER_1 |
---|
274 | * ON EXIT: cr0, SCRATCH_REGISTER_0, SCRATCH_REGISTER_1 available |
---|
275 | * |
---|
276 | * Checks if the exception is recoverable for exceptions which need such a |
---|
277 | * test. |
---|
278 | */ |
---|
279 | |
---|
280 | /* Standard*/ |
---|
281 | .macro RECOVER_CHECK_std _FLVR |
---|
282 | |
---|
283 | #ifndef PPC_EXC_CONFIG_BOOKE_ONLY |
---|
284 | |
---|
285 | /* Check if exception is recoverable */ |
---|
286 | lwz SCRATCH_REGISTER_0, SRR1_FRAME_OFFSET(FRAME_REGISTER) |
---|
287 | lwz SCRATCH_REGISTER_1, ppc_exc_msr_bits@sdarel(r13) |
---|
288 | xor SCRATCH_REGISTER_1, SCRATCH_REGISTER_1, SCRATCH_REGISTER_0 |
---|
289 | andi. SCRATCH_REGISTER_0, SCRATCH_REGISTER_1, MSR_RI |
---|
290 | |
---|
291 | recover_check_twiddle_std_\_FLVR: |
---|
292 | |
---|
293 | /* Not recoverable? */ |
---|
294 | bne recover_check_twiddle_std_\_FLVR |
---|
295 | |
---|
296 | #endif /* PPC_EXC_CONFIG_BOOKE_ONLY */ |
---|
297 | |
---|
298 | .endm |
---|
299 | |
---|
300 | /* Critical */ |
---|
301 | .macro RECOVER_CHECK_crit _FLVR |
---|
302 | |
---|
303 | /* Nothing to do */ |
---|
304 | |
---|
305 | .endm |
---|
306 | |
---|
307 | /* Machine check */ |
---|
308 | .macro RECOVER_CHECK_mchk _FLVR |
---|
309 | |
---|
310 | #ifndef PPC_EXC_CONFIG_BOOKE_ONLY |
---|
311 | |
---|
312 | /* Check if exception is recoverable */ |
---|
313 | lwz SCRATCH_REGISTER_0, SRR1_FRAME_OFFSET(FRAME_REGISTER) |
---|
314 | lwz SCRATCH_REGISTER_1, ppc_exc_msr_bits@sdarel(r13) |
---|
315 | xor SCRATCH_REGISTER_1, SCRATCH_REGISTER_1, SCRATCH_REGISTER_0 |
---|
316 | andi. SCRATCH_REGISTER_0, SCRATCH_REGISTER_1, MSR_RI |
---|
317 | |
---|
318 | recover_check_twiddle_mchk_\_FLVR: |
---|
319 | |
---|
320 | /* Not recoverable? */ |
---|
321 | bne recover_check_twiddle_mchk_\_FLVR |
---|
322 | |
---|
323 | #endif /* PPC_EXC_CONFIG_BOOKE_ONLY */ |
---|
324 | |
---|
325 | .endm |
---|
326 | |
---|
327 | /* |
---|
328 | ****************************************************************************** |
---|
329 | * MACRO: WRAP |
---|
330 | ****************************************************************************** |
---|
331 | * |
---|
332 | * Minimal prologue snippets jump into WRAP which calls the high level |
---|
333 | * exception handler. We must have this macro instantiated for each possible |
---|
334 | * flavor of exception so that we use the proper lock variable, SRR register |
---|
335 | * pair and RFI instruction. |
---|
336 | * |
---|
337 | * We have two types of exceptions: synchronous and asynchronous (= interrupt |
---|
338 | * like). The type is encoded in the vector register (= VECTOR_REGISTER). For |
---|
339 | * interrupt like exceptions the MSB in the vector register is set. The |
---|
340 | * exception type is kept in the comparison register CR_TYPE. Normal |
---|
341 | * exceptions (MSB is clear) use the task stack and a context switch may happen |
---|
342 | * at any time. The interrupt like exceptions disable thread dispatching and |
---|
343 | * switch to the interrupt stack (base address is in SPRG1). |
---|
344 | * |
---|
345 | * + |
---|
346 | * | |
---|
347 | * | Minimal prologue |
---|
348 | * | |
---|
349 | * + |
---|
350 | * | |
---|
351 | * | o Setup frame pointer |
---|
352 | * | o Save basic registers |
---|
353 | * | o Determine exception type: |
---|
354 | * | synchronous or asynchronous |
---|
355 | * | |
---|
356 | * +-----+ |
---|
357 | * Synchronous exceptions: | | Asynchronous exceptions: |
---|
358 | * | | |
---|
359 | * Save non-volatile registers | | o Increment thread dispatch |
---|
360 | * | | disable level |
---|
361 | * | | o Increment ISR nest level |
---|
362 | * | | o Clear lock |
---|
363 | * | | o Switch stack if necessary |
---|
364 | * | | |
---|
365 | * +---->+ |
---|
366 | * | |
---|
367 | * | o Save volatile registers |
---|
368 | * | o Change MSR if necessary |
---|
369 | * | o Call high level handler |
---|
370 | * | o Call global handler if necessary |
---|
371 | * | o Check if exception is recoverable |
---|
372 | * | |
---|
373 | * +-----+ |
---|
374 | * Synchronous exceptions: | | Asynchronous exceptions: |
---|
375 | * | | |
---|
376 | * Restore non-volatile registers | | o Decrement ISR nest level |
---|
377 | * | | o Switch stack |
---|
378 | * | | o Decrement thread dispatch |
---|
379 | * | | disable level |
---|
380 | * | | o Test lock |
---|
381 | * | | o May do a context switch |
---|
382 | * | | |
---|
383 | * +---->+ |
---|
384 | * | |
---|
385 | * | o Restore MSR if necessary |
---|
386 | * | o Restore volatile registers |
---|
387 | * | o Restore frame pointer |
---|
388 | * | o Return |
---|
389 | * | |
---|
390 | * + |
---|
391 | */ |
---|
392 | .macro WRAP _FLVR _PRI _SRR0 _SRR1 _RFI |
---|
393 | |
---|
394 | .global ppc_exc_wrap_\_FLVR |
---|
395 | ppc_exc_wrap_\_FLVR: |
---|
396 | |
---|
397 | /* Push exception frame */ |
---|
398 | stwu r1, -EXCEPTION_FRAME_END(r1) |
---|
399 | |
---|
400 | .global ppc_exc_wrap_nopush_\_FLVR |
---|
401 | ppc_exc_wrap_nopush_\_FLVR: |
---|
402 | |
---|
403 | /* Save frame register */ |
---|
404 | stw FRAME_REGISTER, FRAME_OFFSET(r1) |
---|
405 | |
---|
406 | wrap_no_save_frame_register_\_FLVR: |
---|
407 | |
---|
408 | /* |
---|
409 | * We save at first only some scratch registers |
---|
410 | * and the CR. We use a non-volatile register |
---|
411 | * for the exception frame pointer (= FRAME_REGISTER). |
---|
412 | */ |
---|
413 | |
---|
414 | /* Move frame address in non-volatile FRAME_REGISTER */ |
---|
415 | mr FRAME_REGISTER, r1 |
---|
416 | |
---|
417 | /* Save scratch registers */ |
---|
418 | stw SCRATCH_REGISTER_0, SCRATCH_REGISTER_0_OFFSET(FRAME_REGISTER) |
---|
419 | stw SCRATCH_REGISTER_1, SCRATCH_REGISTER_1_OFFSET(FRAME_REGISTER) |
---|
420 | stw SCRATCH_REGISTER_2, SCRATCH_REGISTER_2_OFFSET(FRAME_REGISTER) |
---|
421 | |
---|
422 | /* Save CR */ |
---|
423 | mfcr SCRATCH_REGISTER_0 |
---|
424 | stw SCRATCH_REGISTER_0, EXC_CR_OFFSET(FRAME_REGISTER) |
---|
425 | |
---|
426 | /* Check exception type and remember it in non-volatile CR_TYPE */ |
---|
427 | cmpwi CR_TYPE, VECTOR_REGISTER, 0 |
---|
428 | |
---|
429 | /* |
---|
430 | * Depending on the exception type we do now save the non-volatile |
---|
431 | * registers or disable thread dispatching and switch to the ISR stack. |
---|
432 | */ |
---|
433 | |
---|
434 | /* Branch for synchronous exceptions */ |
---|
435 | bge CR_TYPE, wrap_save_non_volatile_regs_\_FLVR |
---|
436 | |
---|
437 | /* |
---|
438 | * Increment the thread dispatch disable level in case a higher |
---|
439 | * priority exception occurs we don't want it to run the scheduler. It |
---|
440 | * is safe to increment this without disabling higher priority |
---|
441 | * exceptions since those will see that we wrote the lock anyways. |
---|
442 | */ |
---|
443 | |
---|
444 | /* Increment ISR nest level and thread dispatch disable level */ |
---|
445 | GET_SELF_CPU_CONTROL SCRATCH_REGISTER_2 |
---|
446 | lwz SCRATCH_REGISTER_0, PER_CPU_ISR_NEST_LEVEL@l(SCRATCH_REGISTER_2) |
---|
447 | lwz SCRATCH_REGISTER_1, PER_CPU_THREAD_DISPATCH_DISABLE_LEVEL(SCRATCH_REGISTER_2) |
---|
448 | addi SCRATCH_REGISTER_0, SCRATCH_REGISTER_0, 1 |
---|
449 | addi SCRATCH_REGISTER_1, SCRATCH_REGISTER_1, 1 |
---|
450 | stw SCRATCH_REGISTER_0, PER_CPU_ISR_NEST_LEVEL@l(SCRATCH_REGISTER_2) |
---|
451 | stw SCRATCH_REGISTER_1, PER_CPU_THREAD_DISPATCH_DISABLE_LEVEL(SCRATCH_REGISTER_2) |
---|
452 | |
---|
453 | /* |
---|
454 | * No higher-priority exception occurring after this point |
---|
455 | * can cause a context switch. |
---|
456 | */ |
---|
457 | |
---|
458 | /* Clear lock */ |
---|
459 | li SCRATCH_REGISTER_0, 0 |
---|
460 | stw SCRATCH_REGISTER_0, ppc_exc_lock_\_PRI@sdarel(r13) |
---|
461 | |
---|
462 | /* Switch stack if necessary */ |
---|
463 | mfspr SCRATCH_REGISTER_0, SPRG1 |
---|
464 | cmpw SCRATCH_REGISTER_0, r1 |
---|
465 | blt wrap_stack_switch_\_FLVR |
---|
466 | mfspr SCRATCH_REGISTER_1, SPRG2 |
---|
467 | cmpw SCRATCH_REGISTER_1, r1 |
---|
468 | blt wrap_stack_switch_done_\_FLVR |
---|
469 | |
---|
470 | wrap_stack_switch_\_FLVR: |
---|
471 | |
---|
472 | mr r1, SCRATCH_REGISTER_0 |
---|
473 | |
---|
474 | wrap_stack_switch_done_\_FLVR: |
---|
475 | |
---|
476 | /* |
---|
477 | * Load the pristine VECTOR_REGISTER from a special location for |
---|
478 | * asynchronous exceptions. The synchronous exceptions save the |
---|
479 | * VECTOR_REGISTER in their minimal prologue. |
---|
480 | */ |
---|
481 | lwz SCRATCH_REGISTER_2, ppc_exc_vector_register_\_PRI@sdarel(r13) |
---|
482 | |
---|
483 | /* Save pristine vector register */ |
---|
484 | stw SCRATCH_REGISTER_2, VECTOR_OFFSET(FRAME_REGISTER) |
---|
485 | |
---|
486 | wrap_disable_thread_dispatching_done_\_FLVR: |
---|
487 | |
---|
488 | /* |
---|
489 | * We now have SCRATCH_REGISTER_0, SCRATCH_REGISTER_1, |
---|
490 | * SCRATCH_REGISTER_2 and CR available. VECTOR_REGISTER still holds |
---|
491 | * the vector (and exception type). FRAME_REGISTER is a pointer to the |
---|
492 | * exception frame (always on the stack of the interrupted context). |
---|
493 | * r1 is the stack pointer, either on the task stack or on the ISR |
---|
494 | * stack. CR_TYPE holds the exception type. |
---|
495 | */ |
---|
496 | |
---|
497 | /* Save SRR0 */ |
---|
498 | mfspr SCRATCH_REGISTER_0, \_SRR0 |
---|
499 | stw SCRATCH_REGISTER_0, SRR0_FRAME_OFFSET(FRAME_REGISTER) |
---|
500 | |
---|
501 | /* Save SRR1 */ |
---|
502 | mfspr SCRATCH_REGISTER_0, \_SRR1 |
---|
503 | stw SCRATCH_REGISTER_0, SRR1_FRAME_OFFSET(FRAME_REGISTER) |
---|
504 | |
---|
505 | /* Save CTR */ |
---|
506 | mfctr SCRATCH_REGISTER_0 |
---|
507 | stw SCRATCH_REGISTER_0, EXC_CTR_OFFSET(FRAME_REGISTER) |
---|
508 | |
---|
509 | /* Save XER */ |
---|
510 | mfxer SCRATCH_REGISTER_0 |
---|
511 | stw SCRATCH_REGISTER_0, EXC_XER_OFFSET(FRAME_REGISTER) |
---|
512 | |
---|
513 | /* Save LR */ |
---|
514 | mflr SCRATCH_REGISTER_0 |
---|
515 | stw SCRATCH_REGISTER_0, EXC_LR_OFFSET(FRAME_REGISTER) |
---|
516 | |
---|
517 | /* Save volatile registers */ |
---|
518 | stw r0, GPR0_OFFSET(FRAME_REGISTER) |
---|
519 | stw r3, GPR3_OFFSET(FRAME_REGISTER) |
---|
520 | stw r8, GPR8_OFFSET(FRAME_REGISTER) |
---|
521 | stw r9, GPR9_OFFSET(FRAME_REGISTER) |
---|
522 | stw r10, GPR10_OFFSET(FRAME_REGISTER) |
---|
523 | stw r11, GPR11_OFFSET(FRAME_REGISTER) |
---|
524 | stw r12, GPR12_OFFSET(FRAME_REGISTER) |
---|
525 | |
---|
526 | /* Save read-only small data area anchor (EABI) */ |
---|
527 | stw r2, GPR2_OFFSET(FRAME_REGISTER) |
---|
528 | |
---|
529 | /* Save vector number and exception type */ |
---|
530 | stw VECTOR_REGISTER, EXCEPTION_NUMBER_OFFSET(FRAME_REGISTER) |
---|
531 | |
---|
532 | #ifndef PPC_EXC_CONFIG_BOOKE_ONLY |
---|
533 | |
---|
534 | /* Load MSR bit mask */ |
---|
535 | lwz SCRATCH_REGISTER_0, ppc_exc_msr_bits@sdarel(r13) |
---|
536 | |
---|
537 | /* |
---|
538 | * Change the MSR if necessary (MMU, RI), |
---|
539 | * remember decision in non-volatile CR_MSR |
---|
540 | */ |
---|
541 | cmpwi CR_MSR, SCRATCH_REGISTER_0, 0 |
---|
542 | bne CR_MSR, wrap_change_msr_\_FLVR |
---|
543 | |
---|
544 | wrap_change_msr_done_\_FLVR: |
---|
545 | |
---|
546 | #endif /* PPC_EXC_CONFIG_BOOKE_ONLY */ |
---|
547 | |
---|
548 | #ifdef __ALTIVEC__ |
---|
549 | LA SCRATCH_REGISTER_0, _CPU_save_altivec_volatile |
---|
550 | mtctr SCRATCH_REGISTER_0 |
---|
551 | addi r3, FRAME_REGISTER, EXC_VEC_OFFSET |
---|
552 | bctrl |
---|
553 | /* |
---|
554 | * Establish defaults for vrsave and vscr |
---|
555 | */ |
---|
556 | li SCRATCH_REGISTER_0, 0 |
---|
557 | mtvrsave SCRATCH_REGISTER_0 |
---|
558 | /* |
---|
559 | * Use java/c9x mode; clear saturation bit |
---|
560 | */ |
---|
561 | vxor 0, 0, 0 |
---|
562 | mtvscr 0 |
---|
563 | /* |
---|
564 | * Reload VECTOR_REGISTER |
---|
565 | */ |
---|
566 | lwz VECTOR_REGISTER, EXCEPTION_NUMBER_OFFSET(FRAME_REGISTER) |
---|
567 | #endif |
---|
568 | |
---|
569 | /* |
---|
570 | * Call high level exception handler |
---|
571 | */ |
---|
572 | |
---|
573 | /* |
---|
574 | * Get the handler table index from the vector number. We have to |
---|
575 | * discard the exception type. Take only the least significant five |
---|
576 | * bits (= LAST_VALID_EXC + 1) from the vector register. Multiply by |
---|
577 | * four (= size of function pointer). |
---|
578 | */ |
---|
579 | rlwinm SCRATCH_REGISTER_1, VECTOR_REGISTER, 2, 25, 29 |
---|
580 | |
---|
581 | /* Load handler table address */ |
---|
582 | LA SCRATCH_REGISTER_0, ppc_exc_handler_table |
---|
583 | |
---|
584 | /* Load handler address */ |
---|
585 | lwzx SCRATCH_REGISTER_0, SCRATCH_REGISTER_0, SCRATCH_REGISTER_1 |
---|
586 | |
---|
587 | /* |
---|
588 | * First parameter = exception frame pointer + FRAME_LINK_SPACE |
---|
589 | * |
---|
590 | * We add FRAME_LINK_SPACE to the frame pointer because the high level |
---|
591 | * handler expects a BSP_Exception_frame structure. |
---|
592 | */ |
---|
593 | addi r3, FRAME_REGISTER, FRAME_LINK_SPACE |
---|
594 | |
---|
595 | /* |
---|
596 | * Second parameter = vector number (r4 is the VECTOR_REGISTER) |
---|
597 | * |
---|
598 | * Discard the exception type and store the vector number |
---|
599 | * in the vector register. Take only the least significant |
---|
600 | * five bits (= LAST_VALID_EXC + 1). |
---|
601 | */ |
---|
602 | rlwinm VECTOR_REGISTER, VECTOR_REGISTER, 0, 27, 31 |
---|
603 | |
---|
604 | /* Call handler */ |
---|
605 | mtctr SCRATCH_REGISTER_0 |
---|
606 | bctrl |
---|
607 | |
---|
608 | /* Check return value and call global handler if necessary */ |
---|
609 | cmpwi r3, 0 |
---|
610 | bne wrap_call_global_handler_\_FLVR |
---|
611 | |
---|
612 | wrap_handler_done_\_FLVR: |
---|
613 | |
---|
614 | /* Check if exception is recoverable */ |
---|
615 | RECOVER_CHECK_\_PRI _FLVR=\_FLVR |
---|
616 | |
---|
617 | /* |
---|
618 | * Depending on the exception type we do now restore the non-volatile |
---|
619 | * registers or enable thread dispatching and switch back from the ISR |
---|
620 | * stack. |
---|
621 | */ |
---|
622 | |
---|
623 | /* Branch for synchronous exceptions */ |
---|
624 | bge CR_TYPE, wrap_restore_non_volatile_regs_\_FLVR |
---|
625 | |
---|
626 | /* |
---|
627 | * Switch back to original stack (FRAME_REGISTER == r1 if we are still |
---|
628 | * on the IRQ stack). |
---|
629 | */ |
---|
630 | mr r1, FRAME_REGISTER |
---|
631 | |
---|
632 | /* |
---|
633 | * Check thread dispatch disable level AND lower priority locks (in |
---|
634 | * CR_LOCK): ONLY if the thread dispatch disable level == 0 AND no lock |
---|
635 | * is set then call ppc_exc_wrapup() which may do a context switch. We |
---|
636 | * can skip TEST_LOCK, because it has no side effects. |
---|
637 | */ |
---|
638 | |
---|
639 | /* Decrement ISR nest level and thread dispatch disable level */ |
---|
640 | GET_SELF_CPU_CONTROL SCRATCH_REGISTER_2 |
---|
641 | lwz SCRATCH_REGISTER_0, PER_CPU_ISR_NEST_LEVEL@l(SCRATCH_REGISTER_2) |
---|
642 | lwz SCRATCH_REGISTER_1, PER_CPU_THREAD_DISPATCH_DISABLE_LEVEL(SCRATCH_REGISTER_2) |
---|
643 | subi SCRATCH_REGISTER_0, SCRATCH_REGISTER_0, 1 |
---|
644 | subic. SCRATCH_REGISTER_1, SCRATCH_REGISTER_1, 1 |
---|
645 | stw SCRATCH_REGISTER_0, PER_CPU_ISR_NEST_LEVEL@l(SCRATCH_REGISTER_2) |
---|
646 | stw SCRATCH_REGISTER_1, PER_CPU_THREAD_DISPATCH_DISABLE_LEVEL(SCRATCH_REGISTER_2) |
---|
647 | |
---|
648 | /* Branch to skip thread dispatching */ |
---|
649 | bne wrap_thread_dispatching_done_\_FLVR |
---|
650 | |
---|
651 | /* Test lower-priority locks (result in non-volatile CR_LOCK) */ |
---|
652 | TEST_LOCK_\_PRI _FLVR=\_FLVR |
---|
653 | |
---|
654 | /* Branch to skip thread dispatching */ |
---|
655 | bne CR_LOCK, wrap_thread_dispatching_done_\_FLVR |
---|
656 | |
---|
657 | /* Load address of ppc_exc_wrapup() */ |
---|
658 | LA SCRATCH_REGISTER_0, ppc_exc_wrapup |
---|
659 | |
---|
660 | /* First parameter = exception frame pointer + FRAME_LINK_SPACE */ |
---|
661 | addi r3, FRAME_REGISTER, FRAME_LINK_SPACE |
---|
662 | |
---|
663 | /* Call ppc_exc_wrapup() */ |
---|
664 | mtctr SCRATCH_REGISTER_0 |
---|
665 | bctrl |
---|
666 | |
---|
667 | wrap_thread_dispatching_done_\_FLVR: |
---|
668 | |
---|
669 | #ifdef __ALTIVEC__ |
---|
670 | LA SCRATCH_REGISTER_0, _CPU_load_altivec_volatile |
---|
671 | mtctr SCRATCH_REGISTER_0 |
---|
672 | addi r3, FRAME_REGISTER, EXC_VEC_OFFSET |
---|
673 | bctrl |
---|
674 | #endif |
---|
675 | |
---|
676 | #ifndef PPC_EXC_CONFIG_BOOKE_ONLY |
---|
677 | |
---|
678 | /* Restore MSR? */ |
---|
679 | bne CR_MSR, wrap_restore_msr_\_FLVR |
---|
680 | |
---|
681 | wrap_restore_msr_done_\_FLVR: |
---|
682 | |
---|
683 | #endif /* PPC_EXC_CONFIG_BOOKE_ONLY */ |
---|
684 | |
---|
685 | /* |
---|
686 | * At this point r1 is a valid exception frame pointer and |
---|
687 | * FRAME_REGISTER is no longer needed. |
---|
688 | */ |
---|
689 | |
---|
690 | /* Restore frame register */ |
---|
691 | lwz FRAME_REGISTER, FRAME_OFFSET(r1) |
---|
692 | |
---|
693 | /* Restore XER and CTR */ |
---|
694 | lwz SCRATCH_REGISTER_0, EXC_XER_OFFSET(r1) |
---|
695 | lwz SCRATCH_REGISTER_1, EXC_CTR_OFFSET(r1) |
---|
696 | mtxer SCRATCH_REGISTER_0 |
---|
697 | mtctr SCRATCH_REGISTER_1 |
---|
698 | |
---|
699 | /* Restore CR and LR */ |
---|
700 | lwz SCRATCH_REGISTER_0, EXC_CR_OFFSET(r1) |
---|
701 | lwz SCRATCH_REGISTER_1, EXC_LR_OFFSET(r1) |
---|
702 | mtcr SCRATCH_REGISTER_0 |
---|
703 | mtlr SCRATCH_REGISTER_1 |
---|
704 | |
---|
705 | /* Restore volatile registers */ |
---|
706 | lwz r0, GPR0_OFFSET(r1) |
---|
707 | lwz r3, GPR3_OFFSET(r1) |
---|
708 | lwz r8, GPR8_OFFSET(r1) |
---|
709 | lwz r9, GPR9_OFFSET(r1) |
---|
710 | lwz r10, GPR10_OFFSET(r1) |
---|
711 | lwz r11, GPR11_OFFSET(r1) |
---|
712 | lwz r12, GPR12_OFFSET(r1) |
---|
713 | |
---|
714 | /* Restore read-only small data area anchor (EABI) */ |
---|
715 | lwz r2, GPR2_OFFSET(r1) |
---|
716 | |
---|
717 | /* Restore vector register */ |
---|
718 | lwz VECTOR_REGISTER, VECTOR_OFFSET(r1) |
---|
719 | |
---|
720 | /* |
---|
721 | * Disable all asynchronous exceptions which can do a thread dispatch. |
---|
722 | * See README. |
---|
723 | */ |
---|
724 | INTERRUPT_DISABLE SCRATCH_REGISTER_1, SCRATCH_REGISTER_0 |
---|
725 | |
---|
726 | /* Restore scratch registers and SRRs */ |
---|
727 | lwz SCRATCH_REGISTER_0, SRR0_FRAME_OFFSET(r1) |
---|
728 | lwz SCRATCH_REGISTER_1, SRR1_FRAME_OFFSET(r1) |
---|
729 | lwz SCRATCH_REGISTER_2, SCRATCH_REGISTER_2_OFFSET(r1) |
---|
730 | mtspr \_SRR0, SCRATCH_REGISTER_0 |
---|
731 | lwz SCRATCH_REGISTER_0, SCRATCH_REGISTER_0_OFFSET(r1) |
---|
732 | mtspr \_SRR1, SCRATCH_REGISTER_1 |
---|
733 | lwz SCRATCH_REGISTER_1, SCRATCH_REGISTER_1_OFFSET(r1) |
---|
734 | |
---|
735 | /* |
---|
736 | * We restore r1 from the frame rather than just popping (adding to |
---|
737 | * current r1) since the exception handler might have done strange |
---|
738 | * things (e.g. a debugger moving and relocating the stack). |
---|
739 | */ |
---|
740 | lwz r1, 0(r1) |
---|
741 | |
---|
742 | /* Return */ |
---|
743 | \_RFI |
---|
744 | |
---|
745 | #ifndef PPC_EXC_CONFIG_BOOKE_ONLY |
---|
746 | |
---|
747 | wrap_change_msr_\_FLVR: |
---|
748 | |
---|
749 | mfmsr SCRATCH_REGISTER_1 |
---|
750 | or SCRATCH_REGISTER_1, SCRATCH_REGISTER_1, SCRATCH_REGISTER_0 |
---|
751 | mtmsr SCRATCH_REGISTER_1 |
---|
752 | msync |
---|
753 | isync |
---|
754 | b wrap_change_msr_done_\_FLVR |
---|
755 | |
---|
756 | wrap_restore_msr_\_FLVR: |
---|
757 | |
---|
758 | lwz SCRATCH_REGISTER_0, ppc_exc_msr_bits@sdarel(r13) |
---|
759 | mfmsr SCRATCH_REGISTER_1 |
---|
760 | andc SCRATCH_REGISTER_1, SCRATCH_REGISTER_1, SCRATCH_REGISTER_0 |
---|
761 | mtmsr SCRATCH_REGISTER_1 |
---|
762 | msync |
---|
763 | isync |
---|
764 | b wrap_restore_msr_done_\_FLVR |
---|
765 | |
---|
766 | #endif /* PPC_EXC_CONFIG_BOOKE_ONLY */ |
---|
767 | |
---|
768 | wrap_save_non_volatile_regs_\_FLVR: |
---|
769 | |
---|
770 | /* Load pristine stack pointer */ |
---|
771 | lwz SCRATCH_REGISTER_1, 0(FRAME_REGISTER) |
---|
772 | |
---|
773 | /* Save small data area anchor (SYSV) */ |
---|
774 | stw r13, GPR13_OFFSET(FRAME_REGISTER) |
---|
775 | |
---|
776 | /* Save pristine stack pointer */ |
---|
777 | stw SCRATCH_REGISTER_1, GPR1_OFFSET(FRAME_REGISTER) |
---|
778 | |
---|
779 | /* r14 is the FRAME_REGISTER and will be saved elsewhere */ |
---|
780 | |
---|
781 | /* Save non-volatile registers r15 .. r31 */ |
---|
782 | #ifndef __SPE__ |
---|
783 | stmw r15, GPR15_OFFSET(FRAME_REGISTER) |
---|
784 | #else |
---|
785 | stw r15, GPR15_OFFSET(FRAME_REGISTER) |
---|
786 | stw r16, GPR16_OFFSET(FRAME_REGISTER) |
---|
787 | stw r17, GPR17_OFFSET(FRAME_REGISTER) |
---|
788 | stw r18, GPR18_OFFSET(FRAME_REGISTER) |
---|
789 | stw r19, GPR19_OFFSET(FRAME_REGISTER) |
---|
790 | stw r20, GPR20_OFFSET(FRAME_REGISTER) |
---|
791 | stw r21, GPR21_OFFSET(FRAME_REGISTER) |
---|
792 | stw r22, GPR22_OFFSET(FRAME_REGISTER) |
---|
793 | stw r23, GPR23_OFFSET(FRAME_REGISTER) |
---|
794 | stw r24, GPR24_OFFSET(FRAME_REGISTER) |
---|
795 | stw r25, GPR25_OFFSET(FRAME_REGISTER) |
---|
796 | stw r26, GPR26_OFFSET(FRAME_REGISTER) |
---|
797 | stw r27, GPR27_OFFSET(FRAME_REGISTER) |
---|
798 | stw r28, GPR28_OFFSET(FRAME_REGISTER) |
---|
799 | stw r29, GPR29_OFFSET(FRAME_REGISTER) |
---|
800 | stw r30, GPR30_OFFSET(FRAME_REGISTER) |
---|
801 | stw r31, GPR31_OFFSET(FRAME_REGISTER) |
---|
802 | #endif |
---|
803 | |
---|
804 | b wrap_disable_thread_dispatching_done_\_FLVR |
---|
805 | |
---|
806 | wrap_restore_non_volatile_regs_\_FLVR: |
---|
807 | |
---|
808 | /* Load stack pointer */ |
---|
809 | lwz SCRATCH_REGISTER_0, GPR1_OFFSET(r1) |
---|
810 | |
---|
811 | /* Restore small data area anchor (SYSV) */ |
---|
812 | lwz r13, GPR13_OFFSET(r1) |
---|
813 | |
---|
814 | /* r14 is the FRAME_REGISTER and will be restored elsewhere */ |
---|
815 | |
---|
816 | /* Restore non-volatile registers r15 .. r31 */ |
---|
817 | #ifndef __SPE__ |
---|
818 | lmw r15, GPR15_OFFSET(r1) |
---|
819 | #else |
---|
820 | lwz r15, GPR15_OFFSET(FRAME_REGISTER) |
---|
821 | lwz r16, GPR16_OFFSET(FRAME_REGISTER) |
---|
822 | lwz r17, GPR17_OFFSET(FRAME_REGISTER) |
---|
823 | lwz r18, GPR18_OFFSET(FRAME_REGISTER) |
---|
824 | lwz r19, GPR19_OFFSET(FRAME_REGISTER) |
---|
825 | lwz r20, GPR20_OFFSET(FRAME_REGISTER) |
---|
826 | lwz r21, GPR21_OFFSET(FRAME_REGISTER) |
---|
827 | lwz r22, GPR22_OFFSET(FRAME_REGISTER) |
---|
828 | lwz r23, GPR23_OFFSET(FRAME_REGISTER) |
---|
829 | lwz r24, GPR24_OFFSET(FRAME_REGISTER) |
---|
830 | lwz r25, GPR25_OFFSET(FRAME_REGISTER) |
---|
831 | lwz r26, GPR26_OFFSET(FRAME_REGISTER) |
---|
832 | lwz r27, GPR27_OFFSET(FRAME_REGISTER) |
---|
833 | lwz r28, GPR28_OFFSET(FRAME_REGISTER) |
---|
834 | lwz r29, GPR29_OFFSET(FRAME_REGISTER) |
---|
835 | lwz r30, GPR30_OFFSET(FRAME_REGISTER) |
---|
836 | lwz r31, GPR31_OFFSET(FRAME_REGISTER) |
---|
837 | #endif |
---|
838 | |
---|
839 | /* Restore stack pointer */ |
---|
840 | stw SCRATCH_REGISTER_0, 0(r1) |
---|
841 | |
---|
842 | b wrap_thread_dispatching_done_\_FLVR |
---|
843 | |
---|
844 | wrap_call_global_handler_\_FLVR: |
---|
845 | |
---|
846 | /* First parameter = exception frame pointer + FRAME_LINK_SPACE */ |
---|
847 | addi r3, FRAME_REGISTER, FRAME_LINK_SPACE |
---|
848 | |
---|
849 | #ifndef PPC_EXC_CONFIG_USE_FIXED_HANDLER |
---|
850 | |
---|
851 | /* Load global handler address */ |
---|
852 | LW SCRATCH_REGISTER_0, globalExceptHdl |
---|
853 | |
---|
854 | /* Check address */ |
---|
855 | cmpwi SCRATCH_REGISTER_0, 0 |
---|
856 | beq wrap_handler_done_\_FLVR |
---|
857 | |
---|
858 | /* Call global handler */ |
---|
859 | mtctr SCRATCH_REGISTER_0 |
---|
860 | bctrl |
---|
861 | |
---|
862 | #else /* PPC_EXC_CONFIG_USE_FIXED_HANDLER */ |
---|
863 | |
---|
864 | /* Call fixed global handler */ |
---|
865 | bl C_exception_handler |
---|
866 | |
---|
867 | #endif /* PPC_EXC_CONFIG_USE_FIXED_HANDLER */ |
---|
868 | |
---|
869 | b wrap_handler_done_\_FLVR |
---|
870 | |
---|
871 | .endm |
---|