source: rtems/cpukit/score/cpu/aarch64/aarch64-exception-default.S @ 1a7afb5

Last change on this file since 1a7afb5 was d188e6e8, checked in by Kinsey Moore <kinsey.moore@…>, on 12/08/20 at 15:11:34

cpukit/aarch64: Add explanation of exception flow

  • Property mode set to 100644
File size: 16.5 KB
Line 
1/* SPDX-License-Identifier: BSD-2-Clause */
2
3/**
4 * @file
5 *
6 * @ingroup RTEMSScoreCPUAArch64
7 *
8 * @brief Implementation of AArch64 exception vector table.
9 *
10 * This file implements the AArch64 exception vector table and its embedded
11 * jump handlers along with the code necessary to call higher level C handlers.
12 */
13
14/*
15 * Copyright (C) 2020 On-Line Applications Research Corporation (OAR)
16 * Written by Kinsey Moore <kinsey.moore@oarcorp.com>
17 *
18 * Redistribution and use in source and binary forms, with or without
19 * modification, are permitted provided that the following conditions
20 * are met:
21 * 1. Redistributions of source code must retain the above copyright
22 *    notice, this list of conditions and the following disclaimer.
23 * 2. Redistributions in binary form must reproduce the above copyright
24 *    notice, this list of conditions and the following disclaimer in the
25 *    documentation and/or other materials provided with the distribution.
26 *
27 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
28 * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
29 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
30 * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
31 * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
32 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
33 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
34 * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
35 * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
36 * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
37 * POSSIBILITY OF SUCH DAMAGE.
38 */
39
40#ifdef HAVE_CONFIG_H
41#include "config.h"
42#endif
43
44#include <rtems/asm.h>
45
46.extern _AArch64_Exception_default
47
48.globl  bsp_start_vector_table_begin
49.globl  bsp_start_vector_table_end
50.globl  bsp_start_vector_table_size
51.globl  bsp_vector_table_size
52
53.section ".text"
54
55/*
56 * This is the exception vector table and the pointers to the default
57 * exceptions handlers. Each vector in the table has space for up to 32
58 * instructions. The space of the last two instructions in each vector is used
59 * for the exception handler pointer.
60 *
61 * The operation of all exceptions is as follows:
62 * * An exception occurs
63 * * A vector is chosen based on the exception type and machine state
64 * * Execution begins at the chosen vector
65 * * X0 and LR are pushed onto the current stack
66 * * An unconditional branch and link is taken to the next instruction to get
67 *   the PC
68 * * The exception handler pointer (EHP) is retrieved from the current vector using
69 *   the PC
70 * * Branch and link to the EHP
71 * * X0 and LR are popped from the current stack after returning from the EHP
72 * * The exception returns to the previous execution state
73 */
74
75/*
76 * TODO(kmoore) The current implementation here assumes that SP is not
77 * misaligned.
78 */
79        .macro  JUMP_HANDLER
80/* Mask to use in BIC, lower 7 bits */
81        mov x0, #0x7f
82/* LR contains PC, mask off to the base of the current vector */
83        bic x0, lr,     x0
84/* Load address from the last word in the vector */
85        ldr x0, [x0,    #0x78]
86/*
87 * Branch and link to the address in x0. There is no reason to save the current
88 * LR since it has already been saved and the current contents are junk.
89 */
90        blr x0
91/* Pop x0,lr from stack */
92        ldp x0, lr,     [sp],   #0x10
93/* Return from exception */
94        eret
95        nop
96        nop
97        nop
98        nop
99        nop
100        nop
101        nop
102        nop
103        nop
104        nop
105        nop
106        nop
107        nop
108        nop
109        nop
110        nop
111        nop
112        nop
113        nop
114        nop
115        nop
116        nop
117        .endm
118
119        .macro  JUMP_TARGET_SP0
120/* Takes up the space of 2 instructions */
121#ifdef AARCH64_MULTILIB_ARCH_V8_ILP32
122        .word .print_exception_dump_sp0
123        .word 0x0
124#else
125        .dword .print_exception_dump_sp0
126#endif
127        .endm
128
129        .macro  JUMP_TARGET_SPx
130/* Takes up the space of 2 instructions */
131#ifdef AARCH64_MULTILIB_ARCH_V8_ILP32
132        .word .print_exception_dump_spx
133        .word 0x0
134#else
135        .dword .print_exception_dump_spx
136#endif
137        .endm
138
139bsp_start_vector_table_begin:
140.balign 0x800
141Vector_table_el3:
142/*
143 * The exception handler for synchronous exceptions from the current EL
144 * using SP0.
145 */
146curr_el_sp0_sync:
147        stp x0, lr,     [sp, #-0x10]!   /* Push x0,lr on to the stack */
148        bl curr_el_sp0_sync_get_pc      /* Get current execution address */
149curr_el_sp0_sync_get_pc:                /* The current PC is now in LR */
150        JUMP_HANDLER
151        JUMP_TARGET_SP0
152.balign 0x80
153/* The exception handler for IRQ exceptions from the current EL using SP0. */
154curr_el_sp0_irq:
155        stp x0, lr,     [sp, #-0x10]!   /* Push x0,lr on to the stack */
156        bl curr_el_sp0_irq_get_pc       /* Get current execution address */
157curr_el_sp0_irq_get_pc:                 /* The current PC is now in LR */
158        JUMP_HANDLER
159        JUMP_TARGET_SP0
160.balign 0x80
161/* The exception handler for FIQ exceptions from the current EL using SP0. */
162curr_el_sp0_fiq:
163        stp x0, lr,     [sp, #-0x10]!   /* Push x0,lr on to the stack */
164        bl curr_el_sp0_fiq_get_pc       /* Get current execution address */
165curr_el_sp0_fiq_get_pc:                 /* The current PC is now in LR */
166        JUMP_HANDLER
167        JUMP_TARGET_SP0
168.balign 0x80
169/*
170 * The exception handler for system error exceptions from the current EL using
171 * SP0.
172 */
173curr_el_sp0_serror:
174        stp x0, lr,     [sp, #-0x10]!   /* Push x0,lr on to the stack */
175        bl curr_el_sp0_serror_get_pc    /* Get current execution address */
176curr_el_sp0_serror_get_pc:              /* The current PC is now in LR */
177        JUMP_HANDLER
178        JUMP_TARGET_SP0
179.balign 0x80
180/*
181 * The exception handler for synchronous exceptions from the current EL using
182 * the current SP.
183 */
184curr_el_spx_sync:
185        stp x0, lr,     [sp, #-0x10]!   /* Push x0,lr on to the stack */
186        bl curr_el_spx_sync_get_pc      /* Get current execution address */
187curr_el_spx_sync_get_pc:                /* The current PC is now in LR */
188        JUMP_HANDLER
189        JUMP_TARGET_SPx
190.balign 0x80
191/*
192 * The exception handler for IRQ exceptions from the current EL using the
193 * current SP.
194 */
195curr_el_spx_irq:
196        stp x0, lr,     [sp, #-0x10]!   /* Push x0,lr on to the stack */
197        bl curr_el_spx_irq_get_pc       /* Get current execution address */
198curr_el_spx_irq_get_pc:                 /* The current PC is now in LR */
199        JUMP_HANDLER
200        JUMP_TARGET_SPx
201.balign 0x80
202/*
203 * The exception handler for FIQ exceptions from the current EL using the
204 * current SP.
205 */
206curr_el_spx_fiq:
207        stp x0, lr,     [sp, #-0x10]!   /* Push x0,lr on to the stack */
208        bl curr_el_spx_fiq_get_pc       /* Get current execution address */
209curr_el_spx_fiq_get_pc:                 /* The current PC is now in LR */
210        JUMP_HANDLER
211        JUMP_TARGET_SPx
212.balign 0x80
213/*
214 * The exception handler for system error exceptions from the current EL using
215 * the current SP.
216 */
217curr_el_spx_serror:
218        stp x0, lr,     [sp, #-0x10]!   /* Push x0,lr on to the stack */
219        bl curr_el_spx_serror_get_pc    /* Get current execution address */
220curr_el_spx_serror_get_pc:              /* The current PC is now in LR */
221        JUMP_HANDLER
222        JUMP_TARGET_SPx
223.balign 0x80
224/*
225 * The exception handler for synchronous exceptions from a lower EL (AArch64).
226 */
227lower_el_aarch64_sync:
228        stp x0, lr,     [sp, #-0x10]!   /* Push x0,lr on to the stack */
229        bl lower_el_aarch64_sync_get_pc /* Get current execution address */
230lower_el_aarch64_sync_get_pc:           /* The current PC is now in LR */
231        JUMP_HANDLER
232        JUMP_TARGET_SPx
233.balign 0x80
234/* The exception handler for IRQ exceptions from a lower EL (AArch64). */
235lower_el_aarch64_irq:
236        stp x0, lr,     [sp, #-0x10]!   /* Push x0,lr on to the stack */
237        bl lower_el_aarch64_irq_get_pc  /* Get current execution address */
238lower_el_aarch64_irq_get_pc:            /* The current PC is now in LR */
239        JUMP_HANDLER
240        JUMP_TARGET_SPx
241.balign 0x80
242/* The exception handler for FIQ exceptions from a lower EL (AArch64). */
243lower_el_aarch64_fiq:
244        stp x0, lr,     [sp, #-0x10]!   /* Push x0,lr on to the stack */
245        bl lower_el_aarch64_fiq_get_pc  /* Get current execution address */
246lower_el_aarch64_fiq_get_pc:            /* The current PC is now in LR */
247        JUMP_HANDLER
248        JUMP_TARGET_SPx
249.balign 0x80
250/*
251 * The exception handler for system error exceptions from a lower EL(AArch64).
252 */
253lower_el_aarch64_serror:
254/* Push x0,lr on to the stack */
255        stp x0, lr,     [sp, #-0x10]!
256/* Get current execution address */
257        bl lower_el_aarch64_serror_get_pc
258lower_el_aarch64_serror_get_pc:         /* The current PC is now in LR */
259        JUMP_HANDLER
260        JUMP_TARGET_SPx
261.balign 0x80
262/*
263 * The exception handler for the synchronous exception from a lower EL(AArch32).
264 */
265lower_el_aarch32_sync:
266        stp x0, lr,     [sp, #-0x10]!   /* Push x0,lr on to the stack */
267        bl lower_el_aarch32_sync_get_pc /* Get current execution address */
268lower_el_aarch32_sync_get_pc:           /* The current PC is now in LR */
269        JUMP_HANDLER
270        JUMP_TARGET_SPx
271.balign 0x80
272/* The exception handler for the IRQ exception from a lower EL (AArch32). */
273lower_el_aarch32_irq:
274        stp x0, lr,     [sp, #-0x10]!   /* Push x0,lr on to the stack */
275        bl lower_el_aarch32_irq_get_pc  /* Get current execution address */
276lower_el_aarch32_irq_get_pc:            /* The current PC is now in LR */
277        JUMP_HANDLER
278        JUMP_TARGET_SPx
279.balign 0x80
280/* The exception handler for the FIQ exception from a lower EL (AArch32). */
281lower_el_aarch32_fiq:
282        stp x0, lr,     [sp, #-0x10]!   /* Push x0,lr on to the stack */
283        bl lower_el_aarch32_fiq_get_pc  /* Get current execution address */
284lower_el_aarch32_fiq_get_pc:            /* The current PC is now in LR */
285        JUMP_HANDLER
286        JUMP_TARGET_SPx
287.balign 0x80
288/*
289 * The exception handler for the system error exception from a lower EL
290 * (AArch32).
291 */
292lower_el_aarch32_serror:
293/* Push x0,lr on to the stack */
294        stp x0, lr,     [sp, #-0x10]!
295/* Get current execution address */
296        bl lower_el_aarch32_serror_get_pc
297lower_el_aarch32_serror_get_pc  :               /* The current PC is now in LR */
298        JUMP_HANDLER
299        JUMP_TARGET_SPx
300
301bsp_start_vector_table_end:
302
303        .set    bsp_start_vector_table_size, bsp_start_vector_table_end - bsp_start_vector_table_begin
304        .set    bsp_vector_table_size, bsp_start_vector_table_size
305
306/*
307 * This involves switching a few things around. the real x0 and lr are on SPx
308 * and need to be retrieved while the lr upon entry contains the pointer into
309 * the AArch64 vector table
310 */
311.print_exception_dump_spx:
312/* Switch to exception stack (SP0) */
313        msr spsel, #0
314/* Save space for exception context */
315        sub sp, sp, #AARCH64_EXCEPTION_FRAME_SIZE
316/*
317 * Push exception vector, LR currently points into the actual exception vector
318 * table
319 */
320        and lr, lr, #0x780
321        lsr lr, lr, #7
322        str lr, [sp, #AARCH64_EXCEPTION_FRAME_REGISTER_VECTOR_OFFSET]
323/* Pop x0,lr from stack, saved by generic handler */
324/*
325 * This modifies the stack pointer back to the pre-vector-handler value which is
326 * safe because this will never return
327 */
328        msr spsel, #1
329        ldp x0, lr, [sp], #0x10
330        msr spsel, #0
331/* Save LR */
332        str lr, [sp, #AARCH64_EXCEPTION_FRAME_REGISTER_LR_OFFSET]
333/* Push the start of the context */
334        bl .push_exception_context_start
335/* Save original sp in x0 for .push_exception_context_finish */
336        msr spsel, #1
337        mov x0, sp
338        msr spsel, #0
339/* Push the remainder of the context */
340        bl .push_exception_context_finish
341/* Save sp into x0 for handler */
342        mov x0, sp
343/* Jump into the handler */
344        bl _AArch64_Exception_default
345
346        /* Just in case */
347        b       twiddle
348
349.print_exception_dump_sp0:
350/* Save space for exception context */
351        sub sp, sp, #AARCH64_EXCEPTION_FRAME_SIZE
352/*
353 * Push exception vector, LR currently points into the actual exception vector
354 */
355        and lr, lr, #0x780
356        lsr lr, lr, #7
357        str lr, [sp, #AARCH64_EXCEPTION_FRAME_REGISTER_VECTOR_OFFSET]
358/* Get x0,lr from stack, saved by generic handler */
359        add sp, sp, #AARCH64_EXCEPTION_FRAME_SIZE
360        ldp x0, lr, [sp]
361        sub sp, sp, #AARCH64_EXCEPTION_FRAME_SIZE
362/* Save LR */
363        str lr, [sp, #AARCH64_EXCEPTION_FRAME_REGISTER_LR_OFFSET]
364/* Push the start of the context */
365        bl .push_exception_context_start
366/* Save original sp in x0 for .push_exception_context_finish */
367        add x0, sp, #(AARCH64_EXCEPTION_FRAME_SIZE + 0x10)
368/* Push the remainder of the context */
369        bl .push_exception_context_finish
370/* Save sp (exception frame) into x0 for handler */
371        mov x0, sp
372/* Jump into the handler */
373        bl _AArch64_Exception_default
374
375        /* Just in case */
376twiddle:
377        b       twiddle
378
379/* Assumes SP is at the base of the context and LR has already been pushed */
380.push_exception_context_start:
381/* Push x0-x29(fp) */
382        stp x0,  x1,  [sp, #0x00]
383        stp x2,  x3,  [sp, #0x10]
384        stp x4,  x5,  [sp, #0x20]
385        stp x6,  x7,  [sp, #0x30]
386        stp x8,  x9,  [sp, #0x40]
387        stp x10, x11, [sp, #0x50]
388        stp x12, x13, [sp, #0x60]
389        stp x14, x15, [sp, #0x70]
390        stp x16, x17, [sp, #0x80]
391        stp x18, x19, [sp, #0x90]
392        stp x20, x21, [sp, #0xa0]
393        stp x22, x23, [sp, #0xb0]
394        stp x24, x25, [sp, #0xc0]
395        stp x26, x27, [sp, #0xd0]
396        stp x28, x29, [sp, #0xe0]
397        ret
398
399/* Expects original SP to be stored in x0 */
400.push_exception_context_finish:
401/* Get exception LR for PC */
402        mrs x1, ELR_EL1
403/* Push sp and pc */
404        stp x0, x1, [sp, #AARCH64_EXCEPTION_FRAME_REGISTER_SP_OFFSET]
405/* Get daif and spsr */
406        mrs x0, DAIF
407        mrs x1, SPSR_EL1
408/* Push daif and spsr */
409        stp x0, x1, [sp, #AARCH64_EXCEPTION_FRAME_REGISTER_DAIF_OFFSET]
410/* Get ESR and FAR */
411        mrs x0, ESR_EL1
412        mrs x1, FAR_EL1
413/* Push FAR and ESR */
414        stp x0, x1, [sp, #AARCH64_EXCEPTION_FRAME_REGISTER_SYNDROME_OFFSET]
415/* Get fpcr and fpsr */
416        mrs x0, FPSR
417        mrs x1, FPCR
418/* Push fpcr and fpsr */
419        stp x0, x1, [sp, #AARCH64_EXCEPTION_FRAME_REGISTER_FPSR_OFFSET]
420/* Push VFP registers */
421        stp q0,  q1,  [sp, #(AARCH64_EXCEPTION_FRAME_REGISTER_Q0_OFFSET + 0x000)]
422        stp q2,  q3,  [sp, #(AARCH64_EXCEPTION_FRAME_REGISTER_Q0_OFFSET + 0x020)]
423        stp q4,  q5,  [sp, #(AARCH64_EXCEPTION_FRAME_REGISTER_Q0_OFFSET + 0x040)]
424        stp q6,  q7,  [sp, #(AARCH64_EXCEPTION_FRAME_REGISTER_Q0_OFFSET + 0x060)]
425        stp q8,  q9,  [sp, #(AARCH64_EXCEPTION_FRAME_REGISTER_Q0_OFFSET + 0x080)]
426        stp q10, q11, [sp, #(AARCH64_EXCEPTION_FRAME_REGISTER_Q0_OFFSET + 0x0a0)]
427        stp q12, q13, [sp, #(AARCH64_EXCEPTION_FRAME_REGISTER_Q0_OFFSET + 0x0c0)]
428        stp q14, q15, [sp, #(AARCH64_EXCEPTION_FRAME_REGISTER_Q0_OFFSET + 0x0e0)]
429        stp q16, q17, [sp, #(AARCH64_EXCEPTION_FRAME_REGISTER_Q0_OFFSET + 0x100)]
430        stp q18, q19, [sp, #(AARCH64_EXCEPTION_FRAME_REGISTER_Q0_OFFSET + 0x120)]
431        stp q20, q21, [sp, #(AARCH64_EXCEPTION_FRAME_REGISTER_Q0_OFFSET + 0x140)]
432        stp q22, q23, [sp, #(AARCH64_EXCEPTION_FRAME_REGISTER_Q0_OFFSET + 0x160)]
433        stp q24, q25, [sp, #(AARCH64_EXCEPTION_FRAME_REGISTER_Q0_OFFSET + 0x180)]
434        stp q26, q27, [sp, #(AARCH64_EXCEPTION_FRAME_REGISTER_Q0_OFFSET + 0x1a0)]
435        stp q28, q29, [sp, #(AARCH64_EXCEPTION_FRAME_REGISTER_Q0_OFFSET + 0x1c0)]
436        stp q30, q31, [sp, #(AARCH64_EXCEPTION_FRAME_REGISTER_Q0_OFFSET + 0x1e0)]
437/* Done, return to exception handler */
438        ret
439
440/*
441 * Apply the exception frame to the current register status, SP points to the EF
442 */
443.pop_exception_context_and_ret:
444/* Pop daif and spsr */
445        ldp x2, x3, [sp, #AARCH64_EXCEPTION_FRAME_REGISTER_DAIF_OFFSET]
446/* Restore daif and spsr */
447        msr DAIF, x2
448        msr SPSR_EL1, x3
449/* Pop FAR and ESR */
450        ldp x2, x3, [sp, #AARCH64_EXCEPTION_FRAME_REGISTER_SYNDROME_OFFSET]
451/* Restore ESR and FAR */
452        msr ESR_EL1, x2
453        msr FAR_EL1, x3
454/* Pop fpcr and fpsr */
455        ldp x2, x3, [sp, #AARCH64_EXCEPTION_FRAME_REGISTER_FPSR_OFFSET]
456/* Restore fpcr and fpsr */
457        msr FPSR, x2
458        msr FPCR, x3
459/* Restore LR */
460        ldr lr, [sp, #AARCH64_EXCEPTION_FRAME_REGISTER_LR_OFFSET]
461/* Pop VFP registers */
462        ldp q0,  q1,  [sp, #(AARCH64_EXCEPTION_FRAME_REGISTER_Q0_OFFSET + 0x000)]
463        ldp q2,  q3,  [sp, #(AARCH64_EXCEPTION_FRAME_REGISTER_Q0_OFFSET + 0x020)]
464        ldp q4,  q5,  [sp, #(AARCH64_EXCEPTION_FRAME_REGISTER_Q0_OFFSET + 0x040)]
465        ldp q6,  q7,  [sp, #(AARCH64_EXCEPTION_FRAME_REGISTER_Q0_OFFSET + 0x060)]
466        ldp q8,  q9,  [sp, #(AARCH64_EXCEPTION_FRAME_REGISTER_Q0_OFFSET + 0x080)]
467        ldp q10, q11, [sp, #(AARCH64_EXCEPTION_FRAME_REGISTER_Q0_OFFSET + 0x0a0)]
468        ldp q12, q13, [sp, #(AARCH64_EXCEPTION_FRAME_REGISTER_Q0_OFFSET + 0x0c0)]
469        ldp q14, q15, [sp, #(AARCH64_EXCEPTION_FRAME_REGISTER_Q0_OFFSET + 0x0e0)]
470        ldp q16, q17, [sp, #(AARCH64_EXCEPTION_FRAME_REGISTER_Q0_OFFSET + 0x100)]
471        ldp q18, q19, [sp, #(AARCH64_EXCEPTION_FRAME_REGISTER_Q0_OFFSET + 0x120)]
472        ldp q20, q21, [sp, #(AARCH64_EXCEPTION_FRAME_REGISTER_Q0_OFFSET + 0x140)]
473        ldp q22, q23, [sp, #(AARCH64_EXCEPTION_FRAME_REGISTER_Q0_OFFSET + 0x160)]
474        ldp q24, q25, [sp, #(AARCH64_EXCEPTION_FRAME_REGISTER_Q0_OFFSET + 0x180)]
475        ldp q26, q27, [sp, #(AARCH64_EXCEPTION_FRAME_REGISTER_Q0_OFFSET + 0x1a0)]
476        ldp q28, q29, [sp, #(AARCH64_EXCEPTION_FRAME_REGISTER_Q0_OFFSET + 0x1c0)]
477        ldp q30, q31, [sp, #(AARCH64_EXCEPTION_FRAME_REGISTER_Q0_OFFSET + 0x1e0)]
478/* Pop x0-x29(fp) */
479        ldp x2,  x3,  [sp, #0x10]
480        ldp x4,  x5,  [sp, #0x20]
481        ldp x6,  x7,  [sp, #0x30]
482        ldp x8,  x9,  [sp, #0x40]
483        ldp x10, x11, [sp, #0x50]
484        ldp x12, x13, [sp, #0x60]
485        ldp x14, x15, [sp, #0x70]
486        ldp x16, x17, [sp, #0x80]
487        ldp x18, x19, [sp, #0x90]
488        ldp x20, x21, [sp, #0xa0]
489        ldp x22, x23, [sp, #0xb0]
490        ldp x24, x25, [sp, #0xc0]
491        ldp x26, x27, [sp, #0xd0]
492        ldp x28, x29, [sp, #0xe0]
493/* Pop sp (ignored since sp should be shortly restored anyway) and ELR */
494        ldp x0, x1, [sp, #AARCH64_EXCEPTION_FRAME_REGISTER_SP_OFFSET]
495/* Restore exception LR */
496        msr ELR_EL1, x1
497        ldp x0,  x1,  [sp, #0x00]
498        add sp, sp, #AARCH64_EXCEPTION_FRAME_SIZE
499
500/* We must clear reservations to ensure consistency with atomic operations */
501        clrex
502
503        ret
Note: See TracBrowser for help on using the repository browser.