source: rtems/cpukit/score/cpu/aarch64/aarch64-exception-default.S @ a27ba3f

Last change on this file since a27ba3f was 9951cee, checked in by Kinsey Moore <kinsey.moore@…>, on 02/15/21 at 15:05:10

bsps/aarch64: RTEMS_DEBUG stack alignment faults

Run with stack alignment faults enabled under RTEMS_DEBUG to catch any
stack misalignments early. This makes it easier to track them down
should they ever occur.

  • Property mode set to 100644
File size: 16.7 KB
Line 
1/* SPDX-License-Identifier: BSD-2-Clause */
2
3/**
4 * @file
5 *
6 * @ingroup RTEMSScoreCPUAArch64
7 *
8 * @brief Implementation of AArch64 exception vector table.
9 *
10 * This file implements the AArch64 exception vector table and its embedded
11 * jump handlers along with the code necessary to call higher level C handlers.
12 */
13
14/*
15 * Copyright (C) 2020 On-Line Applications Research Corporation (OAR)
16 * Written by Kinsey Moore <kinsey.moore@oarcorp.com>
17 *
18 * Redistribution and use in source and binary forms, with or without
19 * modification, are permitted provided that the following conditions
20 * are met:
21 * 1. Redistributions of source code must retain the above copyright
22 *    notice, this list of conditions and the following disclaimer.
23 * 2. Redistributions in binary form must reproduce the above copyright
24 *    notice, this list of conditions and the following disclaimer in the
25 *    documentation and/or other materials provided with the distribution.
26 *
27 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
28 * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
29 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
30 * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
31 * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
32 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
33 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
34 * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
35 * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
36 * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
37 * POSSIBILITY OF SUCH DAMAGE.
38 */
39
40#ifdef HAVE_CONFIG_H
41#include "config.h"
42#endif
43
44#include <rtems/asm.h>
45
46.extern _AArch64_Exception_default
47
48.globl  bsp_start_vector_table_begin
49.globl  bsp_start_vector_table_end
50.globl  bsp_start_vector_table_size
51.globl  bsp_vector_table_size
52
53.section ".text"
54
55/*
56 * This is the exception vector table and the pointers to the default
57 * exceptions handlers. Each vector in the table has space for up to 32
58 * instructions. The space of the last two instructions in each vector is used
59 * for the exception handler pointer.
60 *
61 * The operation of all exceptions is as follows:
62 * * An exception occurs
63 * * A vector is chosen based on the exception type and machine state
64 * * Execution begins at the chosen vector
65 * * X0 and LR are pushed onto the current stack
66 * * An unconditional branch and link is taken to the next instruction to get
67 *   the PC
68 * * The exception handler pointer (EHP) is retrieved from the current vector using
69 *   the PC
70 * * Branch and link to the EHP
71 * * X0 and LR are popped from the current stack after returning from the EHP
72 * * The exception returns to the previous execution state
73 */
74
75/*
76 * TODO(kmoore) The current implementation here assumes that SP is not
77 * misaligned.
78 */
79        .macro  JUMP_HANDLER_SHORT
80/* Mask to use in BIC, lower 7 bits */
81        mov x0, #0x7f
82/* LR contains PC, mask off to the base of the current vector */
83        bic x0, lr,     x0
84/* Load address from the last word in the vector */
85        ldr x0, [x0,    #0x78]
86/*
87 * Branch and link to the address in x0. There is no reason to save the current
88 * LR since it has already been saved and the current contents are junk.
89 */
90        blr x0
91/* Pop x0,lr from stack */
92        ldp x0, lr,     [sp],   #0x10
93/* Return from exception */
94        eret
95        nop
96        nop
97        nop
98        nop
99        nop
100        nop
101        nop
102        nop
103        nop
104        nop
105        nop
106        nop
107        nop
108        nop
109        nop
110        nop
111        nop
112        nop
113        nop
114        nop
115        nop
116        .endm
117
118        .macro  JUMP_HANDLER
119        JUMP_HANDLER_SHORT
120        nop
121        .endm
122
123        .macro  JUMP_TARGET_SP0
124/* Takes up the space of 2 instructions */
125#ifdef AARCH64_MULTILIB_ARCH_V8_ILP32
126        .word .print_exception_dump_sp0
127        .word 0x0
128#else
129        .dword .print_exception_dump_sp0
130#endif
131        .endm
132
133        .macro  JUMP_TARGET_SPx
134/* Takes up the space of 2 instructions */
135#ifdef AARCH64_MULTILIB_ARCH_V8_ILP32
136        .word .print_exception_dump_spx
137        .word 0x0
138#else
139        .dword .print_exception_dump_spx
140#endif
141        .endm
142
143bsp_start_vector_table_begin:
144.balign 0x800
145Vector_table_el3:
146/*
147 * The exception handler for synchronous exceptions from the current EL
148 * using SP0.
149 */
150curr_el_sp0_sync:
151        stp x0, lr,     [sp, #-0x10]!   /* Push x0,lr on to the stack */
152        bl curr_el_sp0_sync_get_pc      /* Get current execution address */
153curr_el_sp0_sync_get_pc:                /* The current PC is now in LR */
154        JUMP_HANDLER
155        JUMP_TARGET_SP0
156.balign 0x80
157/* The exception handler for IRQ exceptions from the current EL using SP0. */
158curr_el_sp0_irq:
159        stp x0, lr,     [sp, #-0x10]!   /* Push x0,lr on to the stack */
160        bl curr_el_sp0_irq_get_pc       /* Get current execution address */
161curr_el_sp0_irq_get_pc:                 /* The current PC is now in LR */
162        JUMP_HANDLER
163        JUMP_TARGET_SP0
164.balign 0x80
165/* The exception handler for FIQ exceptions from the current EL using SP0. */
166curr_el_sp0_fiq:
167        stp x0, lr,     [sp, #-0x10]!   /* Push x0,lr on to the stack */
168        bl curr_el_sp0_fiq_get_pc       /* Get current execution address */
169curr_el_sp0_fiq_get_pc:                 /* The current PC is now in LR */
170        JUMP_HANDLER
171        JUMP_TARGET_SP0
172.balign 0x80
173/*
174 * The exception handler for system error exceptions from the current EL using
175 * SP0.
176 */
177curr_el_sp0_serror:
178        stp x0, lr,     [sp, #-0x10]!   /* Push x0,lr on to the stack */
179        bl curr_el_sp0_serror_get_pc    /* Get current execution address */
180curr_el_sp0_serror_get_pc:              /* The current PC is now in LR */
181        JUMP_HANDLER
182        JUMP_TARGET_SP0
183.balign 0x80
184/*
185 * The exception handler for synchronous exceptions from the current EL using
186 * the current SP.
187 */
188curr_el_spx_sync:
189        msr SCTLR_EL1, XZR
190        stp x0, lr,     [sp, #-0x10]!   /* Push x0,lr on to the stack */
191        bl curr_el_spx_sync_get_pc      /* Get current execution address */
192curr_el_spx_sync_get_pc:                /* The current PC is now in LR */
193/* Use short jump handler since this has an extra instruction to clear SCTLR */
194        JUMP_HANDLER_SHORT
195        JUMP_TARGET_SPx
196.balign 0x80
197/*
198 * The exception handler for IRQ exceptions from the current EL using the
199 * current SP.
200 */
201curr_el_spx_irq:
202        stp x0, lr,     [sp, #-0x10]!   /* Push x0,lr on to the stack */
203        bl curr_el_spx_irq_get_pc       /* Get current execution address */
204curr_el_spx_irq_get_pc:                 /* The current PC is now in LR */
205        JUMP_HANDLER
206        JUMP_TARGET_SPx
207.balign 0x80
208/*
209 * The exception handler for FIQ exceptions from the current EL using the
210 * current SP.
211 */
212curr_el_spx_fiq:
213        stp x0, lr,     [sp, #-0x10]!   /* Push x0,lr on to the stack */
214        bl curr_el_spx_fiq_get_pc       /* Get current execution address */
215curr_el_spx_fiq_get_pc:                 /* The current PC is now in LR */
216        JUMP_HANDLER
217        JUMP_TARGET_SPx
218.balign 0x80
219/*
220 * The exception handler for system error exceptions from the current EL using
221 * the current SP.
222 */
223curr_el_spx_serror:
224        stp x0, lr,     [sp, #-0x10]!   /* Push x0,lr on to the stack */
225        bl curr_el_spx_serror_get_pc    /* Get current execution address */
226curr_el_spx_serror_get_pc:              /* The current PC is now in LR */
227        JUMP_HANDLER
228        JUMP_TARGET_SPx
229.balign 0x80
230/*
231 * The exception handler for synchronous exceptions from a lower EL (AArch64).
232 */
233lower_el_aarch64_sync:
234        stp x0, lr,     [sp, #-0x10]!   /* Push x0,lr on to the stack */
235        bl lower_el_aarch64_sync_get_pc /* Get current execution address */
236lower_el_aarch64_sync_get_pc:           /* The current PC is now in LR */
237        JUMP_HANDLER
238        JUMP_TARGET_SPx
239.balign 0x80
240/* The exception handler for IRQ exceptions from a lower EL (AArch64). */
241lower_el_aarch64_irq:
242        stp x0, lr,     [sp, #-0x10]!   /* Push x0,lr on to the stack */
243        bl lower_el_aarch64_irq_get_pc  /* Get current execution address */
244lower_el_aarch64_irq_get_pc:            /* The current PC is now in LR */
245        JUMP_HANDLER
246        JUMP_TARGET_SPx
247.balign 0x80
248/* The exception handler for FIQ exceptions from a lower EL (AArch64). */
249lower_el_aarch64_fiq:
250        stp x0, lr,     [sp, #-0x10]!   /* Push x0,lr on to the stack */
251        bl lower_el_aarch64_fiq_get_pc  /* Get current execution address */
252lower_el_aarch64_fiq_get_pc:            /* The current PC is now in LR */
253        JUMP_HANDLER
254        JUMP_TARGET_SPx
255.balign 0x80
256/*
257 * The exception handler for system error exceptions from a lower EL(AArch64).
258 */
259lower_el_aarch64_serror:
260/* Push x0,lr on to the stack */
261        stp x0, lr,     [sp, #-0x10]!
262/* Get current execution address */
263        bl lower_el_aarch64_serror_get_pc
264lower_el_aarch64_serror_get_pc:         /* The current PC is now in LR */
265        JUMP_HANDLER
266        JUMP_TARGET_SPx
267.balign 0x80
268/*
269 * The exception handler for the synchronous exception from a lower EL(AArch32).
270 */
271lower_el_aarch32_sync:
272        stp x0, lr,     [sp, #-0x10]!   /* Push x0,lr on to the stack */
273        bl lower_el_aarch32_sync_get_pc /* Get current execution address */
274lower_el_aarch32_sync_get_pc:           /* The current PC is now in LR */
275        JUMP_HANDLER
276        JUMP_TARGET_SPx
277.balign 0x80
278/* The exception handler for the IRQ exception from a lower EL (AArch32). */
279lower_el_aarch32_irq:
280        stp x0, lr,     [sp, #-0x10]!   /* Push x0,lr on to the stack */
281        bl lower_el_aarch32_irq_get_pc  /* Get current execution address */
282lower_el_aarch32_irq_get_pc:            /* The current PC is now in LR */
283        JUMP_HANDLER
284        JUMP_TARGET_SPx
285.balign 0x80
286/* The exception handler for the FIQ exception from a lower EL (AArch32). */
287lower_el_aarch32_fiq:
288        stp x0, lr,     [sp, #-0x10]!   /* Push x0,lr on to the stack */
289        bl lower_el_aarch32_fiq_get_pc  /* Get current execution address */
290lower_el_aarch32_fiq_get_pc:            /* The current PC is now in LR */
291        JUMP_HANDLER
292        JUMP_TARGET_SPx
293.balign 0x80
294/*
295 * The exception handler for the system error exception from a lower EL
296 * (AArch32).
297 */
298lower_el_aarch32_serror:
299/* Push x0,lr on to the stack */
300        stp x0, lr,     [sp, #-0x10]!
301/* Get current execution address */
302        bl lower_el_aarch32_serror_get_pc
303lower_el_aarch32_serror_get_pc  :               /* The current PC is now in LR */
304        JUMP_HANDLER
305        JUMP_TARGET_SPx
306
307bsp_start_vector_table_end:
308
309        .set    bsp_start_vector_table_size, bsp_start_vector_table_end - bsp_start_vector_table_begin
310        .set    bsp_vector_table_size, bsp_start_vector_table_size
311
312/*
313 * This involves switching a few things around. the real x0 and lr are on SPx
314 * and need to be retrieved while the lr upon entry contains the pointer into
315 * the AArch64 vector table
316 */
317.print_exception_dump_spx:
318/* Switch to exception stack (SP0) */
319        msr spsel, #0
320/* Save space for exception context */
321        sub sp, sp, #AARCH64_EXCEPTION_FRAME_SIZE
322/*
323 * Push exception vector, LR currently points into the actual exception vector
324 * table
325 */
326        and lr, lr, #0x780
327        lsr lr, lr, #7
328        str lr, [sp, #AARCH64_EXCEPTION_FRAME_REGISTER_VECTOR_OFFSET]
329/* Pop x0,lr from stack, saved by generic handler */
330/*
331 * This modifies the stack pointer back to the pre-vector-handler value which is
332 * safe because this will never return
333 */
334        msr spsel, #1
335        ldp x0, lr, [sp], #0x10
336        msr spsel, #0
337/* Save LR */
338        str lr, [sp, #AARCH64_EXCEPTION_FRAME_REGISTER_LR_OFFSET]
339/* Push the start of the context */
340        bl .push_exception_context_start
341/* Save original sp in x0 for .push_exception_context_finish */
342        msr spsel, #1
343        mov x0, sp
344        msr spsel, #0
345/* Push the remainder of the context */
346        bl .push_exception_context_finish
347/* Save sp into x0 for handler */
348        mov x0, sp
349/* Jump into the handler */
350        bl _AArch64_Exception_default
351
352        /* Just in case */
353        b       twiddle
354
355.print_exception_dump_sp0:
356/* Save space for exception context */
357        sub sp, sp, #AARCH64_EXCEPTION_FRAME_SIZE
358/*
359 * Push exception vector, LR currently points into the actual exception vector
360 */
361        and lr, lr, #0x780
362        lsr lr, lr, #7
363        str lr, [sp, #AARCH64_EXCEPTION_FRAME_REGISTER_VECTOR_OFFSET]
364/* Get x0,lr from stack, saved by generic handler */
365        add sp, sp, #AARCH64_EXCEPTION_FRAME_SIZE
366        ldp x0, lr, [sp]
367        sub sp, sp, #AARCH64_EXCEPTION_FRAME_SIZE
368/* Save LR */
369        str lr, [sp, #AARCH64_EXCEPTION_FRAME_REGISTER_LR_OFFSET]
370/* Push the start of the context */
371        bl .push_exception_context_start
372/* Save original sp in x0 for .push_exception_context_finish */
373        add x0, sp, #(AARCH64_EXCEPTION_FRAME_SIZE + 0x10)
374/* Push the remainder of the context */
375        bl .push_exception_context_finish
376/* Save sp (exception frame) into x0 for handler */
377        mov x0, sp
378/* Jump into the handler */
379        bl _AArch64_Exception_default
380
381        /* Just in case */
382twiddle:
383        b       twiddle
384
385/* Assumes SP is at the base of the context and LR has already been pushed */
386.push_exception_context_start:
387/* Push x0-x29(fp) */
388        stp x0,  x1,  [sp, #0x00]
389        stp x2,  x3,  [sp, #0x10]
390        stp x4,  x5,  [sp, #0x20]
391        stp x6,  x7,  [sp, #0x30]
392        stp x8,  x9,  [sp, #0x40]
393        stp x10, x11, [sp, #0x50]
394        stp x12, x13, [sp, #0x60]
395        stp x14, x15, [sp, #0x70]
396        stp x16, x17, [sp, #0x80]
397        stp x18, x19, [sp, #0x90]
398        stp x20, x21, [sp, #0xa0]
399        stp x22, x23, [sp, #0xb0]
400        stp x24, x25, [sp, #0xc0]
401        stp x26, x27, [sp, #0xd0]
402        stp x28, x29, [sp, #0xe0]
403        ret
404
405/* Expects original SP to be stored in x0 */
406.push_exception_context_finish:
407/* Get exception LR for PC */
408        mrs x1, ELR_EL1
409/* Push sp and pc */
410        stp x0, x1, [sp, #AARCH64_EXCEPTION_FRAME_REGISTER_SP_OFFSET]
411/* Get daif and spsr */
412        mrs x0, DAIF
413        mrs x1, SPSR_EL1
414/* Push daif and spsr */
415        stp x0, x1, [sp, #AARCH64_EXCEPTION_FRAME_REGISTER_DAIF_OFFSET]
416/* Get ESR and FAR */
417        mrs x0, ESR_EL1
418        mrs x1, FAR_EL1
419/* Push FAR and ESR */
420        stp x0, x1, [sp, #AARCH64_EXCEPTION_FRAME_REGISTER_SYNDROME_OFFSET]
421/* Get fpcr and fpsr */
422        mrs x0, FPSR
423        mrs x1, FPCR
424/* Push fpcr and fpsr */
425        stp x0, x1, [sp, #AARCH64_EXCEPTION_FRAME_REGISTER_FPSR_OFFSET]
426/* Push VFP registers */
427        stp q0,  q1,  [sp, #(AARCH64_EXCEPTION_FRAME_REGISTER_Q0_OFFSET + 0x000)]
428        stp q2,  q3,  [sp, #(AARCH64_EXCEPTION_FRAME_REGISTER_Q0_OFFSET + 0x020)]
429        stp q4,  q5,  [sp, #(AARCH64_EXCEPTION_FRAME_REGISTER_Q0_OFFSET + 0x040)]
430        stp q6,  q7,  [sp, #(AARCH64_EXCEPTION_FRAME_REGISTER_Q0_OFFSET + 0x060)]
431        stp q8,  q9,  [sp, #(AARCH64_EXCEPTION_FRAME_REGISTER_Q0_OFFSET + 0x080)]
432        stp q10, q11, [sp, #(AARCH64_EXCEPTION_FRAME_REGISTER_Q0_OFFSET + 0x0a0)]
433        stp q12, q13, [sp, #(AARCH64_EXCEPTION_FRAME_REGISTER_Q0_OFFSET + 0x0c0)]
434        stp q14, q15, [sp, #(AARCH64_EXCEPTION_FRAME_REGISTER_Q0_OFFSET + 0x0e0)]
435        stp q16, q17, [sp, #(AARCH64_EXCEPTION_FRAME_REGISTER_Q0_OFFSET + 0x100)]
436        stp q18, q19, [sp, #(AARCH64_EXCEPTION_FRAME_REGISTER_Q0_OFFSET + 0x120)]
437        stp q20, q21, [sp, #(AARCH64_EXCEPTION_FRAME_REGISTER_Q0_OFFSET + 0x140)]
438        stp q22, q23, [sp, #(AARCH64_EXCEPTION_FRAME_REGISTER_Q0_OFFSET + 0x160)]
439        stp q24, q25, [sp, #(AARCH64_EXCEPTION_FRAME_REGISTER_Q0_OFFSET + 0x180)]
440        stp q26, q27, [sp, #(AARCH64_EXCEPTION_FRAME_REGISTER_Q0_OFFSET + 0x1a0)]
441        stp q28, q29, [sp, #(AARCH64_EXCEPTION_FRAME_REGISTER_Q0_OFFSET + 0x1c0)]
442        stp q30, q31, [sp, #(AARCH64_EXCEPTION_FRAME_REGISTER_Q0_OFFSET + 0x1e0)]
443/* Done, return to exception handler */
444        ret
445
446/*
447 * Apply the exception frame to the current register status, SP points to the EF
448 */
449.pop_exception_context_and_ret:
450/* Pop daif and spsr */
451        ldp x2, x3, [sp, #AARCH64_EXCEPTION_FRAME_REGISTER_DAIF_OFFSET]
452/* Restore daif and spsr */
453        msr DAIF, x2
454        msr SPSR_EL1, x3
455/* Pop FAR and ESR */
456        ldp x2, x3, [sp, #AARCH64_EXCEPTION_FRAME_REGISTER_SYNDROME_OFFSET]
457/* Restore ESR and FAR */
458        msr ESR_EL1, x2
459        msr FAR_EL1, x3
460/* Pop fpcr and fpsr */
461        ldp x2, x3, [sp, #AARCH64_EXCEPTION_FRAME_REGISTER_FPSR_OFFSET]
462/* Restore fpcr and fpsr */
463        msr FPSR, x2
464        msr FPCR, x3
465/* Restore LR */
466        ldr lr, [sp, #AARCH64_EXCEPTION_FRAME_REGISTER_LR_OFFSET]
467/* Pop VFP registers */
468        ldp q0,  q1,  [sp, #(AARCH64_EXCEPTION_FRAME_REGISTER_Q0_OFFSET + 0x000)]
469        ldp q2,  q3,  [sp, #(AARCH64_EXCEPTION_FRAME_REGISTER_Q0_OFFSET + 0x020)]
470        ldp q4,  q5,  [sp, #(AARCH64_EXCEPTION_FRAME_REGISTER_Q0_OFFSET + 0x040)]
471        ldp q6,  q7,  [sp, #(AARCH64_EXCEPTION_FRAME_REGISTER_Q0_OFFSET + 0x060)]
472        ldp q8,  q9,  [sp, #(AARCH64_EXCEPTION_FRAME_REGISTER_Q0_OFFSET + 0x080)]
473        ldp q10, q11, [sp, #(AARCH64_EXCEPTION_FRAME_REGISTER_Q0_OFFSET + 0x0a0)]
474        ldp q12, q13, [sp, #(AARCH64_EXCEPTION_FRAME_REGISTER_Q0_OFFSET + 0x0c0)]
475        ldp q14, q15, [sp, #(AARCH64_EXCEPTION_FRAME_REGISTER_Q0_OFFSET + 0x0e0)]
476        ldp q16, q17, [sp, #(AARCH64_EXCEPTION_FRAME_REGISTER_Q0_OFFSET + 0x100)]
477        ldp q18, q19, [sp, #(AARCH64_EXCEPTION_FRAME_REGISTER_Q0_OFFSET + 0x120)]
478        ldp q20, q21, [sp, #(AARCH64_EXCEPTION_FRAME_REGISTER_Q0_OFFSET + 0x140)]
479        ldp q22, q23, [sp, #(AARCH64_EXCEPTION_FRAME_REGISTER_Q0_OFFSET + 0x160)]
480        ldp q24, q25, [sp, #(AARCH64_EXCEPTION_FRAME_REGISTER_Q0_OFFSET + 0x180)]
481        ldp q26, q27, [sp, #(AARCH64_EXCEPTION_FRAME_REGISTER_Q0_OFFSET + 0x1a0)]
482        ldp q28, q29, [sp, #(AARCH64_EXCEPTION_FRAME_REGISTER_Q0_OFFSET + 0x1c0)]
483        ldp q30, q31, [sp, #(AARCH64_EXCEPTION_FRAME_REGISTER_Q0_OFFSET + 0x1e0)]
484/* Pop x0-x29(fp) */
485        ldp x2,  x3,  [sp, #0x10]
486        ldp x4,  x5,  [sp, #0x20]
487        ldp x6,  x7,  [sp, #0x30]
488        ldp x8,  x9,  [sp, #0x40]
489        ldp x10, x11, [sp, #0x50]
490        ldp x12, x13, [sp, #0x60]
491        ldp x14, x15, [sp, #0x70]
492        ldp x16, x17, [sp, #0x80]
493        ldp x18, x19, [sp, #0x90]
494        ldp x20, x21, [sp, #0xa0]
495        ldp x22, x23, [sp, #0xb0]
496        ldp x24, x25, [sp, #0xc0]
497        ldp x26, x27, [sp, #0xd0]
498        ldp x28, x29, [sp, #0xe0]
499/* Pop sp (ignored since sp should be shortly restored anyway) and ELR */
500        ldp x0, x1, [sp, #AARCH64_EXCEPTION_FRAME_REGISTER_SP_OFFSET]
501/* Restore exception LR */
502        msr ELR_EL1, x1
503        ldp x0,  x1,  [sp, #0x00]
504        add sp, sp, #AARCH64_EXCEPTION_FRAME_SIZE
505
506/* We must clear reservations to ensure consistency with atomic operations */
507        clrex
508
509        ret
Note: See TracBrowser for help on using the repository browser.