Split ARM and ARM64 architectures. Details: - CONFIG_ARM64 is decoupled from CONFIG_ARM (not a subset anymore) - Arch and include AArch64 files are in a dedicated directory (arch/arm64 and include/arch/arm64) - AArch64 boards and SoC are moved to soc/arm64 and boards/arm64 - AArch64-specific DTS files are moved to dts/arm64 - The A72 support for the bcm_vk/viper board is moved in the boards/bcm_vk/viper directory Signed-off-by: Carlo Caione <ccaione@baylibre.com>
169 lines
2.9 KiB
ArmAsm
169 lines
2.9 KiB
ArmAsm
/*
|
|
* Copyright (c) 2019 Carlo Caione <ccaione@baylibre.com>
|
|
*
|
|
* SPDX-License-Identifier: Apache-2.0
|
|
*/
|
|
|
|
#include <toolchain.h>
|
|
#include <linker/sections.h>
|
|
#include <arch/cpu.h>
|
|
#include "vector_table.h"
|
|
#include "macro_priv.inc"
|
|
|
|
_ASM_FILE_PROLOGUE
|
|
|
|
/*
|
|
* Platform specific pre-C init code
|
|
*
|
|
* Note: - Stack is not yet available
|
|
* - x23 must be preserved
|
|
*/
|
|
|
|
WTEXT(z_arm64_el3_plat_prep_c)
|
|
SECTION_FUNC(TEXT,z_arm64_el3_plat_prep_c)
|
|
ret
|
|
|
|
WTEXT(z_arm64_el2_plat_prep_c)
|
|
SECTION_FUNC(TEXT,z_arm64_el2_plat_prep_c)
|
|
ret
|
|
|
|
WTEXT(z_arm64_el1_plat_prep_c)
|
|
SECTION_FUNC(TEXT,z_arm64_el1_plat_prep_c)
|
|
ret
|
|
|
|
/*
|
|
* Set the minimum necessary to safely call C code
|
|
*/
|
|
|
|
GTEXT(__reset_prep_c)
|
|
SECTION_SUBSEC_FUNC(TEXT,_reset_section,__reset_prep_c)
|
|
/* return address: x23 */
|
|
mov x23, x30
|
|
|
|
switch_el x0, 3f, 2f, 1f
|
|
3:
|
|
/* Reinitialize SCTLR from scratch in EL3 */
|
|
ldr w0, =(SCTLR_EL3_RES1 | SCTLR_I_BIT | SCTLR_SA_BIT)
|
|
msr sctlr_el3, x0
|
|
|
|
/* Custom plat prep_c init */
|
|
bl z_arm64_el3_plat_prep_c
|
|
|
|
b out
|
|
2:
|
|
/* Disable alignment fault checking */
|
|
mrs x0, sctlr_el2
|
|
bic x0, x0, SCTLR_A_BIT
|
|
msr sctlr_el2, x0
|
|
|
|
/* Custom plat prep_c init */
|
|
bl z_arm64_el2_plat_prep_c
|
|
|
|
b out
|
|
1:
|
|
/* Disable alignment fault checking */
|
|
mrs x0, sctlr_el1
|
|
bic x0, x0, SCTLR_A_BIT
|
|
msr sctlr_el1, x0
|
|
|
|
/* Custom plat prep_c init */
|
|
bl z_arm64_el1_plat_prep_c
|
|
|
|
out:
|
|
isb
|
|
|
|
/* Select SP_EL0 */
|
|
msr SPSel, #0
|
|
|
|
#if CONFIG_MP_NUM_CPUS > 1
|
|
get_cpu_id x0
|
|
cbnz x0, L_secondary_stack
|
|
#endif
|
|
|
|
/* Initialize stack */
|
|
ldr x0, =(z_interrupt_stacks)
|
|
add x0, x0, #(CONFIG_ISR_STACK_SIZE)
|
|
mov sp, x0
|
|
|
|
ret x23
|
|
|
|
#if CONFIG_MP_NUM_CPUS > 1
|
|
L_secondary_stack:
|
|
get_cpu_id x1
|
|
adr x0, arm64_cpu_init
|
|
mov x2, #ARM64_CPU_INIT_SIZE
|
|
madd x0, x1, x2, x0
|
|
ldr x0, [x0]
|
|
cbz x0, L_enable_secondary
|
|
dmb ld
|
|
|
|
mov sp, x0
|
|
|
|
ret x23
|
|
#endif
|
|
|
|
/*
|
|
* Reset vector
|
|
*
|
|
* Ran when the system comes out of reset. The processor is in thread mode with
|
|
* privileged level. At this point, neither SP_EL0 nor SP_ELx point to a valid
|
|
* area in SRAM.
|
|
*/
|
|
|
|
GTEXT(__reset)
|
|
SECTION_SUBSEC_FUNC(TEXT,_reset_section,__reset)
|
|
|
|
GTEXT(__start)
|
|
SECTION_SUBSEC_FUNC(TEXT,_reset_section,__start)
|
|
/* Mask all exceptions */
|
|
msr DAIFSet, #0xf
|
|
|
|
/* Prepare for calling C code */
|
|
bl __reset_prep_c
|
|
|
|
/* Platform hook for highest EL */
|
|
bl z_arm64_el_highest_init
|
|
|
|
switch_el:
|
|
switch_el x0, 3f, 2f, 1f
|
|
3:
|
|
/* EL3 init */
|
|
bl z_arm64_el3_init
|
|
|
|
/* Get next EL */
|
|
adr x0, switch_el
|
|
bl z_arm64_el3_get_next_el
|
|
eret
|
|
|
|
2:
|
|
/* EL2 init */
|
|
bl z_arm64_el2_init
|
|
|
|
/* Move to EL1 with all exceptions masked */
|
|
mov_imm x0, (SPSR_DAIF_MASK | SPSR_MODE_EL1T)
|
|
msr spsr_el2, x0
|
|
|
|
adr x0, 1f
|
|
msr elr_el2, x0
|
|
eret
|
|
|
|
1:
|
|
/* EL1 init */
|
|
bl z_arm64_el1_init
|
|
|
|
/* Enable SError interrupts */
|
|
msr DAIFClr, #(DAIFCLR_ABT_BIT)
|
|
isb
|
|
|
|
#if CONFIG_MP_NUM_CPUS > 1
|
|
get_cpu_id x0
|
|
cbnz x0, L_enable_secondary
|
|
#endif
|
|
|
|
b z_arm64_prep_c
|
|
|
|
#if CONFIG_MP_NUM_CPUS > 1
|
|
L_enable_secondary:
|
|
b z_arm64_secondary_prep_c
|
|
#endif
|