******************************************************************************/
void bl1_arch_setup(void)
{
- unsigned long tmp_reg = 0;
-
- /* Enable alignment checks */
- tmp_reg = read_sctlr_el3();
- tmp_reg |= (SCTLR_A_BIT | SCTLR_SA_BIT);
- write_sctlr_el3(tmp_reg);
- isb();
-
/*
* Set the next EL to be AArch64, route external abort and SError
* interrupts to EL3
*/
- tmp_reg = SCR_RES1_BITS | SCR_RW_BIT | SCR_EA_BIT;
- write_scr(tmp_reg);
+ write_scr_el3(SCR_RES1_BITS | SCR_RW_BIT | SCR_EA_BIT);
/*
* Enable SError and Debug exceptions
func bl1_entrypoint
/* ---------------------------------------------
* Set the CPU endianness before doing anything
- * that might involve memory reads or writes
+ * that might involve memory reads or writes.
* ---------------------------------------------
*/
mrs x0, sctlr_el3
*/
bl cpu_reset_handler
- /* -------------------------------
- * Enable the instruction cache.
- * -------------------------------
+ /* ---------------------------------------------
+ * Enable the instruction cache, stack pointer
+ * and data access alignment checks
+ * ---------------------------------------------
*/
+ mov x1, #(SCTLR_I_BIT | SCTLR_A_BIT | SCTLR_SA_BIT)
mrs x0, sctlr_el3
- orr x0, x0, #SCTLR_I_BIT
+ orr x0, x0, x1
msr sctlr_el3, x0
isb
msr vbar_el1, x0
/* ---------------------------------------------
- * Enable the instruction cache.
+ * Enable the instruction cache, stack pointer
+ * and data access alignment checks
* ---------------------------------------------
*/
+ mov x1, #(SCTLR_I_BIT | SCTLR_A_BIT | SCTLR_SA_BIT)
mrs x0, sctlr_el1
- orr x0, x0, #SCTLR_I_BIT
+ orr x0, x0, x1
msr sctlr_el1, x0
isb
******************************************************************************/
void bl31_arch_setup(void)
{
- unsigned long tmp_reg = 0;
- uint64_t counter_freq;
-
- /* Enable alignment checks */
- tmp_reg = read_sctlr_el3();
- tmp_reg |= (SCTLR_A_BIT | SCTLR_SA_BIT);
- write_sctlr_el3(tmp_reg);
-
/*
* Route external abort and SError interrupts to EL3
* other SCR bits will be configured before exiting to a lower exception
* level
*/
- tmp_reg = SCR_RES1_BITS | SCR_EA_BIT;
- write_scr(tmp_reg);
+ write_scr_el3(SCR_RES1_BITS | SCR_EA_BIT);
/*
* Enable SError and Debug exceptions
enable_debug_exceptions();
/* Program the counter frequency */
- counter_freq = plat_get_syscnt_freq();
- write_cntfrq_el0(counter_freq);
+ write_cntfrq_el0(plat_get_syscnt_freq());
}
mov x20, x0
mov x21, x1
#else
+ /* ---------------------------------------------
+ * Set the CPU endianness before doing anything
+ * that might involve memory reads or writes.
+ * ---------------------------------------------
+ */
+ mrs x0, sctlr_el3
+ bic x0, x0, #SCTLR_EE_BIT
+ msr sctlr_el3, x0
+ isb
/* -----------------------------------------------------
* Perform any processor specific actions upon reset
*/
bl cpu_reset_handler
#endif
-
/* ---------------------------------------------
- * Enable the instruction cache.
+ * Enable the instruction cache, stack pointer
+ * and data access alignment checks
* ---------------------------------------------
*/
- mrs x1, sctlr_el3
- orr x1, x1, #SCTLR_I_BIT
- msr sctlr_el3, x1
+ mov x1, #(SCTLR_I_BIT | SCTLR_A_BIT | SCTLR_SA_BIT)
+ mrs x0, sctlr_el3
+ orr x0, x0, x1
+ msr sctlr_el3, x0
isb
/* ---------------------------------------------
msr vbar_el1, x0
/* ---------------------------------------------
- * Enable the instruction cache.
+ * Enable the instruction cache, stack pointer
+ * and data access alignment checks
* ---------------------------------------------
*/
+ mov x1, #(SCTLR_I_BIT | SCTLR_A_BIT | SCTLR_SA_BIT)
mrs x0, sctlr_el1
- orr x0, x0, #SCTLR_I_BIT
+ orr x0, x0, x1
msr sctlr_el1, x0
isb
msr vbar_el1, x0
/* ---------------------------------------------
- * Enable the instruction cache.
+ * Enable the instruction cache, stack pointer
+ * and data access alignment checks
* ---------------------------------------------
*/
+ mov x1, #(SCTLR_I_BIT | SCTLR_A_BIT | SCTLR_SA_BIT)
mrs x0, sctlr_el1
- orr x0, x0, #SCTLR_I_BIT
+ orr x0, x0, x1
msr sctlr_el1, x0
isb
#define SCTLR_A_BIT (1 << 1)
#define SCTLR_C_BIT (1 << 2)
#define SCTLR_SA_BIT (1 << 3)
-#define SCTLR_B_BIT (1 << 7)
-#define SCTLR_Z_BIT (1 << 11)
#define SCTLR_I_BIT (1 << 12)
#define SCTLR_WXN_BIT (1 << 19)
-#define SCTLR_EXCEPTION_BITS (0x3 << 6)
#define SCTLR_EE_BIT (1 << 25)
/* CPUECTLR definitions */
isb(); \
\
sctlr = read_sctlr_el##_el(); \
- sctlr |= SCTLR_WXN_BIT | SCTLR_M_BIT | SCTLR_I_BIT; \
- sctlr |= SCTLR_A_BIT; \
+ sctlr |= SCTLR_WXN_BIT | SCTLR_M_BIT; \
\
if (flags & DISABLE_DCACHE) \
sctlr &= ~SCTLR_C_BIT; \
adr x23, psci_afflvl_suspend_finishers
psci_aff_common_finish_entry:
+#if !RESET_TO_BL31
+ /* ---------------------------------------------
+ * Enable the instruction cache, stack pointer
+ * and data access alignment checks. Also, set
+ * the EL3 exception endianess to little-endian.
+ * It can be assumed that BL3-1 entrypoint code
+ * will do this when RESET_TO_BL31 is set. The
+ * same assumption cannot be made when another
+ * boot loader executes before BL3-1 in the warm
+ * boot path e.g. BL1.
+ * ---------------------------------------------
+ */
+ mov x1, #(SCTLR_I_BIT | SCTLR_A_BIT | SCTLR_SA_BIT)
+ mrs x0, sctlr_el3
+ orr x0, x0, x1
+ msr sctlr_el3, x0
+ isb
+#endif
+
/* ---------------------------------------------
* Initialise the pcpu cache pointer for the CPU
* ---------------------------------------------