Changeset ea7b1b7 in rtems


Ignore:
Timestamp:
01/06/22 19:03:04 (6 months ago)
Author:
Gedare Bloom <gedare@…>
Branches:
master
Children:
63d27156
Parents:
37377b0
git-author:
Gedare Bloom <gedare@…> (01/06/22 19:03:04)
git-committer:
Gedare Bloom <gedare@…> (01/12/22 16:00:19)
Message:

bsps/aarch64: refactor register init and hooks

Location:
bsps/aarch64
Files:
2 edited

Legend:

Unmodified
Added
Removed
  • bsps/aarch64/shared/start/start.S

    r37377b0 rea7b1b7  
    5656  mov x6, x2    /* physical address of ATAGs or DTB */
    5757#else /* BSP_START_NEEDS_REGISTER_INITIALIZATION */
     58  /*
     59   * This block is dead code. No aarch64 targets require this. It might be
     60   * needed for hardware simulations or in future processor variants with
     61   * lock-step cores.
     62   */
    5863  mov x0, XZR
    5964  mov x1, XZR
     
    8893  mov x30, XZR
    8994#ifdef AARCH64_MULTILIB_VFP
    90 #endif
    91 #endif
    92 
    93   /* Initialize SCTLR_EL1 */
    94   mov x0, XZR
    95 #if defined(RTEMS_DEBUG)
    96   /* Enable Stack alignment checking */
    97   orr x0, x0, #(1<<3)
    98 #endif
    99   msr SCTLR_EL1, x0
    100 
    101   mrs x0, CurrentEL
    102   cmp x0, #(1<<2)
    103   b.eq _el1_start
    104   cmp x0, #(2<<2)
    105   b.eq _el2_start
    106 
    107 _el3_start:
    108   /* Drop from EL3 to EL2 */
    109 
    110   /* Initialize HCR_EL2 and SCTLR_EL2 */
    111   msr HCR_EL2, XZR
    112   msr SCTLR_EL2, XZR
    113   /* Set EL2 Execution state via SCR_EL3 */
    114   mrs x0, SCR_EL3
    115   /* Set EL2 to AArch64 */
    116   orr x0, x0, #(1<<10)
    117 #ifdef AARCH64_IS_NONSECURE
    118   /* Set EL1 to NS */
    119   orr x0, x0, #1
    120 #endif
    121   msr SCR_EL3, x0
    122 
    123   /* set EL2h mode for eret */
    124 #ifdef AARCH64_IS_NONSECURE
    125   mov x0, #0b01001
    126 #else
    127   mov x0, #0b00101
    128 #endif
    129 
    130   msr SPSR_EL3, x0
    131 
    132   /* Set EL2 entry point */
    133 #ifdef AARCH64_IS_NONSECURE
    134   adr x0, _el2_start
    135 #else
    136   adr x0, _el1_start
    137 #endif
    138   msr ELR_EL3, x0
    139   eret
    140 
    141 _el2_start:
    142   /* Drop from EL2 to EL1 */
    143 
    144   /* Configure HCR_EL2 */
    145   mrs x0, HCR_EL2
    146   /* Set EL1 Execution state to AArch64 */
    147   orr x0, x0, #(1<<31)
    148   /* Disable ID traps */
    149   bic x0, x0, #(1<<15)
    150   bic x0, x0, #(1<<16)
    151   bic x0, x0, #(1<<17)
    152   bic x0, x0, #(1<<18)
    153   msr HCR_EL2, x0
    154 
    155   /* Set to EL1h mode for eret */
    156   mov x0, #0b00101
    157   msr SPSR_EL2, x0
    158 
    159   /* Set EL1 entry point */
    160   adr x0, _el1_start
    161   msr ELR_EL2, x0
    162   eret
    163 
    164 _el1_start:
    165 
    166 #ifdef RTEMS_SMP
    167   /* Read MPIDR and get current processor index */
    168   mrs x7, mpidr_el1
    169   and x7, x7, #0xff
    170 #endif
    171 
    172 #ifdef RTEMS_SMP
    173   /*
    174    * Get current per-CPU control and store it in PL1 only Thread ID
    175    * Register (TPIDR_EL1).
    176    */
    177 #ifdef AARCH64_MULTILIB_ARCH_V8_ILP32
    178   ldr w1, =_Per_CPU_Information
    179 #else
    180   ldr x1, =_Per_CPU_Information
    181 #endif
    182   add x1, x1, x7, lsl #PER_CPU_CONTROL_SIZE_LOG2
    183   msr TPIDR_EL1, x1
    184 
    185 #endif
    186 
    187   /* Calculate interrupt stack area end for current processor */
    188 #ifdef AARCH64_MULTILIB_ARCH_V8_ILP32
    189   ldr w1, =_ISR_Stack_size
    190 #else
    191   ldr x1, =_ISR_Stack_size
    192 #endif
    193 #ifdef RTEMS_SMP
    194   add x3, x7, #1
    195   mul x1, x1, x3
    196 #endif
    197 #ifdef AARCH64_MULTILIB_ARCH_V8_ILP32
    198   ldr w2, =_ISR_Stack_area_begin
    199 #else
    200   ldr x2, =_ISR_Stack_area_begin
    201 #endif
    202   add x3, x1, x2
    203 
    204   /* Disable interrupts and debug */
    205   msr DAIFSet, #0xa
    206 
    207 #ifdef BSP_START_NEEDS_REGISTER_INITIALIZATION
    208   mov x8, XZR
    209   mov x9, XZR
    210   mov x10, XZR
    211   mov x11, XZR
    212   mov x12, XZR
    213   mov x13, XZR
    214   mov x14, XZR
    215   mov x15, XZR
    216 #endif
    217 
    218   /*
    219    * SPx: the stack pointer corresponding to the current exception level
    220    * Normal operation for RTEMS on AArch64 uses SPx and runs on EL1
    221    * Exception operation (synchronous errors, IRQ, FIQ, System Errors) uses SP0
    222   */
    223 #ifdef AARCH64_MULTILIB_ARCH_V8_ILP32
    224   ldr w1, =bsp_stack_exception_size
    225 #else
    226   ldr x1, =bsp_stack_exception_size
    227 #endif
    228   /* Switch to SP0 and set exception stack */
    229   msr spsel, #0
    230   mov sp, x3
    231   /* Switch back to SPx for normal operation */
    232   msr spsel, #1
    233   sub x3, x3, x1
    234 
    235   /* Set SP1 stack used for normal operation */
    236   mov sp, x3
    237 
    238   /* Stay in EL1 mode */
    239 
    240 #ifdef AARCH64_MULTILIB_VFP
    241 #ifdef AARCH64_MULTILIB_HAS_CPACR
    242   /* Read CPACR */
    243   mrs x0, CPACR_EL1
    244 
    245   /* Enable EL1 access permissions for CP10 */
    246   orr x0, x0, #(1 << 20)
    247 
    248   /* Write CPACR */
    249   msr CPACR_EL1, x0
    250   isb
    251 #endif
    252 
    253   /* FPU does not need to be enabled on AArch64 */
    254 
    255 #ifdef BSP_START_NEEDS_REGISTER_INITIALIZATION
    256   mov x0, #0
    25795  mov CPTR_EL3, XZR
    25896  mov CPTR_EL2, XZR
     
    289127  mov d30, XZR
    290128  mov d31, XZR
     129#endif /* AARCH64_MULTILIB_VFP */
    291130#endif /* BSP_START_NEEDS_REGISTER_INITIALIZATION */
     131
     132  /* Initialize SCTLR_EL1 */
     133  mov x0, XZR
     134#if defined(RTEMS_DEBUG)
     135  /* Enable Stack alignment checking */
     136  orr x0, x0, #(1<<3)
     137#endif
     138  msr SCTLR_EL1, x0
     139
     140  mrs x0, CurrentEL
     141  cmp x0, #(1<<2)
     142  b.eq _el1_start
     143  cmp x0, #(2<<2)
     144  b.eq _el2_start
     145
     146_el3_start:
     147  /* Drop from EL3 to EL2 */
     148
     149  /* Initialize HCR_EL2 and SCTLR_EL2 */
     150  msr HCR_EL2, XZR
     151  msr SCTLR_EL2, XZR
     152  /* Set EL2 Execution state via SCR_EL3 */
     153  mrs x0, SCR_EL3
     154  /* Set EL2 to AArch64 */
     155  orr x0, x0, #(1<<10)
     156#ifdef AARCH64_IS_NONSECURE
     157  /* Set EL1 to NS */
     158  orr x0, x0, #1
     159#endif
     160  msr SCR_EL3, x0
     161
     162  /* set EL2h mode for eret */
     163#ifdef AARCH64_IS_NONSECURE
     164  mov x0, #0b01001
     165#else
     166  mov x0, #0b00101
     167#endif
     168
     169  msr SPSR_EL3, x0
     170
     171  /* Set EL2 entry point */
     172#ifdef AARCH64_IS_NONSECURE
     173  adr x0, _el2_start
     174#else
     175  adr x0, _el1_start
     176#endif
     177  msr ELR_EL3, x0
     178  eret
     179
     180_el2_start:
     181  /* Drop from EL2 to EL1 */
     182
     183  /* Configure HCR_EL2 */
     184  mrs x0, HCR_EL2
     185  /* Set EL1 Execution state to AArch64 */
     186  orr x0, x0, #(1<<31)
     187  /* Disable ID traps */
     188  bic x0, x0, #(1<<15)
     189  bic x0, x0, #(1<<16)
     190  bic x0, x0, #(1<<17)
     191  bic x0, x0, #(1<<18)
     192  msr HCR_EL2, x0
     193
     194  /* Set to EL1h mode for eret */
     195  mov x0, #0b00101
     196  msr SPSR_EL2, x0
     197
     198  /* Set EL1 entry point */
     199  adr x0, _el1_start
     200  msr ELR_EL2, x0
     201  eret
     202
     203_el1_start:
     204
     205#ifdef RTEMS_SMP
     206  /* Read MPIDR and get current processor index */
     207  mrs x7, mpidr_el1
     208  and x7, x7, #0xff
     209#endif
     210
     211#ifdef RTEMS_SMP
     212  /*
     213   * Get current per-CPU control and store it in PL1 only Thread ID
     214   * Register (TPIDR_EL1).
     215   */
     216#ifdef AARCH64_MULTILIB_ARCH_V8_ILP32
     217  ldr w1, =_Per_CPU_Information
     218#else
     219  ldr x1, =_Per_CPU_Information
     220#endif
     221  add x1, x1, x7, lsl #PER_CPU_CONTROL_SIZE_LOG2
     222  msr TPIDR_EL1, x1
     223
     224#endif
     225
     226  /* Calculate interrupt stack area end for current processor */
     227#ifdef AARCH64_MULTILIB_ARCH_V8_ILP32
     228  ldr w1, =_ISR_Stack_size
     229#else
     230  ldr x1, =_ISR_Stack_size
     231#endif
     232#ifdef RTEMS_SMP
     233  add x3, x7, #1
     234  mul x1, x1, x3
     235#endif
     236#ifdef AARCH64_MULTILIB_ARCH_V8_ILP32
     237  ldr w2, =_ISR_Stack_area_begin
     238#else
     239  ldr x2, =_ISR_Stack_area_begin
     240#endif
     241  add x3, x1, x2
     242
     243  /* Disable interrupts and debug */
     244  msr DAIFSet, #0xa
     245
     246#ifdef BSP_START_NEEDS_REGISTER_INITIALIZATION
     247  mov x8, XZR
     248  mov x9, XZR
     249  mov x10, XZR
     250  mov x11, XZR
     251  mov x12, XZR
     252  mov x13, XZR
     253  mov x14, XZR
     254  mov x15, XZR
     255#endif
     256
     257  /*
     258   * SPx: the stack pointer corresponding to the current exception level
     259   * Normal operation for RTEMS on AArch64 uses SPx and runs on EL1
     260   * Exception operation (synchronous errors, IRQ, FIQ, System Errors) uses SP0
     261  */
     262#ifdef AARCH64_MULTILIB_ARCH_V8_ILP32
     263  ldr w1, =bsp_stack_exception_size
     264#else
     265  ldr x1, =bsp_stack_exception_size
     266#endif
     267  /* Switch to SP0 and set exception stack */
     268  msr spsel, #0
     269  mov sp, x3
     270  /* Switch back to SPx for normal operation */
     271  msr spsel, #1
     272  sub x3, x3, x1
     273
     274  /* Set SP1 stack used for normal operation */
     275  mov sp, x3
     276
     277  /* Stay in EL1 mode */
     278
     279#ifdef AARCH64_MULTILIB_VFP
     280#ifdef AARCH64_MULTILIB_HAS_CPACR
     281  /* Read CPACR */
     282  mrs x0, CPACR_EL1
     283
     284  /* Enable EL1 access permissions for CP10 */
     285  orr x0, x0, #(1 << 20)
     286
     287  /* Write CPACR */
     288  msr CPACR_EL1, x0
     289  isb
     290#endif
     291
     292  /* FPU does not need to be enabled on AArch64 */
    292293
    293294#endif /* AARCH64_MULTILIB_VFP */
  • bsps/aarch64/xilinx-zynqmp/start/bspstarthooks.c

    r37377b0 rea7b1b7  
    4848BSP_START_TEXT_SECTION void bsp_start_hook_0( void )
    4949{
     50  /* do nothing */
     51}
     52
     53BSP_START_TEXT_SECTION void bsp_start_hook_1( void )
     54{
    5055#ifdef RTEMS_SMP
    5156  uint32_t cpu_index_self;
     
    7378      _Per_CPU_Get_by_index( cpu_index_self )
    7479    );
     80    /* Unreached */
    7581  }
     82#endif /* RTEMS_SMP */
    7683
    77 #endif
    78 }
    79 
    80 BSP_START_TEXT_SECTION void bsp_start_hook_1( void )
    81 {
    8284  AArch64_start_set_vector_base();
    8385  bsp_start_copy_sections();
Note: See TracChangeset for help on using the changeset viewer.