/*
- * Copyright (c) 2016-2017, ARM Limited and Contributors. All rights reserved.
+ * Copyright (c) 2016-2018, ARM Limited and Contributors. All rights reserved.
*
* SPDX-License-Identifier: BSD-3-Clause
*/
.globl sp_min_vector_table
.globl sp_min_entrypoint
.globl sp_min_warm_entrypoint
+ .globl sp_min_handle_smc
+ .globl sp_min_handle_fiq
.macro route_fiq_to_sp_min reg
/* -----------------------------------------------------
vector_base sp_min_vector_table
b sp_min_entrypoint
b plat_panic_handler /* Undef */
- b handle_smc /* Syscall */
+ b sp_min_handle_smc /* Syscall */
b plat_panic_handler /* Prefetch abort */
b plat_panic_handler /* Data abort */
b plat_panic_handler /* Reserved */
b plat_panic_handler /* IRQ */
- b handle_fiq /* FIQ */
+ b sp_min_handle_fiq /* FIQ */
/*
/*
* SMC handling function for SP_MIN.
*/
-func handle_smc
+func sp_min_handle_smc
/* On SMC entry, `sp` points to `smc_ctx_t`. Save `lr`. */
str lr, [sp, #SMC_CTX_LR_MON]
/* `r0` points to `smc_ctx_t` */
b sp_min_exit
-endfunc handle_smc
+endfunc sp_min_handle_smc
/*
* Secure Interrupts handling function for SP_MIN.
*/
-func handle_fiq
+func sp_min_handle_fiq
#if !SP_MIN_WITH_SECURE_FIQ
b plat_panic_handler
#else
b sp_min_exit
#endif
-endfunc handle_fiq
+endfunc sp_min_handle_fiq
/*
* The Warm boot entrypoint for SP_MIN.
#
-# Copyright (c) 2016-2017, ARM Limited and Contributors. All rights reserved.
+# Copyright (c) 2016-2018, ARM Limited and Contributors. All rights reserved.
#
# SPDX-License-Identifier: BSD-3-Clause
#
BL32_SOURCES += lib/extensions/amu/aarch32/amu.c
endif
+ifeq (${WORKAROUND_CVE_2017_5715},1)
+BL32_SOURCES += bl32/sp_min/workaround_cve_2017_5715_bpiall.S \
+ bl32/sp_min/workaround_cve_2017_5715_icache_inv.S
+endif
+
BL32_LINKERFILE := bl32/sp_min/sp_min.ld.S
# Include the platform-specific SP_MIN Makefile
--- /dev/null
+/*
+ * Copyright (c) 2018, ARM Limited and Contributors. All rights reserved.
+ *
+ * SPDX-License-Identifier: BSD-3-Clause
+ */
+
+#include <asm_macros.S>
+
+ .globl workaround_bpiall_runtime_exceptions
+
+vector_base workaround_bpiall_runtime_exceptions
+ /* We encode the exception entry in the bottom 3 bits of SP */
+ add sp, sp, #1 /* Reset: 0b111 */
+ add sp, sp, #1 /* Undef: 0b110 */
+ add sp, sp, #1 /* Syscall: 0b101 */
+ add sp, sp, #1 /* Prefetch abort: 0b100 */
+ add sp, sp, #1 /* Data abort: 0b011 */
+ add sp, sp, #1 /* Reserved: 0b010 */
+ add sp, sp, #1 /* IRQ: 0b001 */
+ nop /* FIQ: 0b000 */
+
+ /*
+ * Invalidate the branch predictor, `r0` is a dummy register
+ * and is unused.
+ */
+ stcopr r0, BPIALL
+ isb
+
+ /*
+ * As we cannot use any temporary registers and cannot
+ * clobber SP, we can decode the exception entry using
+ * an unrolled binary search.
+ *
+ * Note, if this code is re-used by other secure payloads,
+ * the below exception entry vectors must be changed to
+ * the vectors specific to that secure payload.
+ */
+
+ tst sp, #4
+ bne 1f
+
+ tst sp, #2
+ bne 3f
+
+ /* Expected encoding: 0x1 and 0x0 */
+ tst sp, #1
+ /* Restore original value of SP by clearing the bottom 3 bits */
+ bic sp, sp, #0x7
+ bne plat_panic_handler /* IRQ */
+ b sp_min_handle_fiq /* FIQ */
+
+1:
+ tst sp, #2
+ bne 2f
+
+ /* Expected encoding: 0x4 and 0x5 */
+ tst sp, #1
+ bic sp, sp, #0x7
+ bne sp_min_handle_smc /* Syscall */
+ b plat_panic_handler /* Prefetch abort */
+
+2:
+ /* Expected encoding: 0x7 and 0x6 */
+ tst sp, #1
+ bic sp, sp, #0x7
+ bne sp_min_entrypoint /* Reset */
+ b plat_panic_handler /* Undef */
+
+3:
+ /* Expected encoding: 0x2 and 0x3 */
+ tst sp, #1
+ bic sp, sp, #0x7
+ bne plat_panic_handler /* Data abort */
+ b plat_panic_handler /* Reserved */
--- /dev/null
+/*
+ * Copyright (c) 2018, ARM Limited and Contributors. All rights reserved.
+ *
+ * SPDX-License-Identifier: BSD-3-Clause
+ */
+
+#include <asm_macros.S>
+
+ .globl workaround_icache_inv_runtime_exceptions
+
+vector_base workaround_icache_inv_runtime_exceptions
+ /* We encode the exception entry in the bottom 3 bits of SP */
+ add sp, sp, #1 /* Reset: 0b111 */
+ add sp, sp, #1 /* Undef: 0b110 */
+ add sp, sp, #1 /* Syscall: 0b101 */
+ add sp, sp, #1 /* Prefetch abort: 0b100 */
+ add sp, sp, #1 /* Data abort: 0b011 */
+ add sp, sp, #1 /* Reserved: 0b010 */
+ add sp, sp, #1 /* IRQ: 0b001 */
+ nop /* FIQ: 0b000 */
+
+ /*
+ * Invalidate the instruction cache, which we assume also
+ * invalidates the branch predictor. This may depend on
+ * other CPU specific changes (e.g. an ACTLR setting).
+ */
+ stcopr r0, ICIALLU
+ isb
+
+ /*
+ * As we cannot use any temporary registers and cannot
+ * clobber SP, we can decode the exception entry using
+ * an unrolled binary search.
+ *
+ * Note, if this code is re-used by other secure payloads,
+ * the below exception entry vectors must be changed to
+ * the vectors specific to that secure payload.
+ */
+
+ tst sp, #4
+ bne 1f
+
+ tst sp, #2
+ bne 3f
+
+ /* Expected encoding: 0x1 and 0x0 */
+ tst sp, #1
+ /* Restore original value of SP by clearing the bottom 3 bits */
+ bic sp, sp, #0x7
+ bne plat_panic_handler /* IRQ */
+ b sp_min_handle_fiq /* FIQ */
+
+1:
+ /* Expected encoding: 0x4 and 0x5 */
+ tst sp, #2
+ bne 2f
+
+ tst sp, #1
+ bic sp, sp, #0x7
+ bne sp_min_handle_smc /* Syscall */
+ b plat_panic_handler /* Prefetch abort */
+
+2:
+ /* Expected encoding: 0x7 and 0x6 */
+ tst sp, #1
+ bic sp, sp, #0x7
+ bne sp_min_entrypoint /* Reset */
+ b plat_panic_handler /* Undef */
+
+3:
+ /* Expected encoding: 0x2 and 0x3 */
+ tst sp, #1
+ bic sp, sp, #0x7
+ bne plat_panic_handler /* Data abort */
+ b plat_panic_handler /* Reserved */
/*
- * Copyright (c) 2016-2017, ARM Limited and Contributors. All rights reserved.
+ * Copyright (c) 2016-2018, ARM Limited and Contributors. All rights reserved.
*
* SPDX-License-Identifier: BSD-3-Clause
*/
/*
* Helper macro to initialise EL3 registers we care about.
*/
- .macro el3_arch_init_common _exception_vectors
+ .macro el3_arch_init_common
/* ---------------------------------------------------------------------
* SCTLR has already been initialised - read current value before
* modifying.
stcopr r0, SCTLR
isb
- /* ---------------------------------------------------------------------
- * Set the exception vectors (VBAR/MVBAR).
- * ---------------------------------------------------------------------
- */
- ldr r0, =\_exception_vectors
- stcopr r0, VBAR
- stcopr r0, MVBAR
- isb
-
/* ---------------------------------------------------------------------
* Initialise SCR, setting all fields rather than relying on the hw.
*
bxne r0
.endif /* _warm_boot_mailbox */
+ /* ---------------------------------------------------------------------
+ * Set the exception vectors (VBAR/MVBAR).
+ * ---------------------------------------------------------------------
+ */
+ ldr r0, =\_exception_vectors
+ stcopr r0, VBAR
+ stcopr r0, MVBAR
+ isb
+
/* ---------------------------------------------------------------------
* It is a cold boot.
* Perform any processor specific actions upon reset e.g. cache, TLB
*/
bl reset_handler
- el3_arch_init_common \_exception_vectors
+ el3_arch_init_common
.if \_secondary_cold_boot
/* -------------------------------------------------------------
/*
- * Copyright (c) 2016-2017, ARM Limited and Contributors. All rights reserved.
+ * Copyright (c) 2016-2018, ARM Limited and Contributors. All rights reserved.
*
* SPDX-License-Identifier: BSD-3-Clause
*/
#define TLBIMVAA p15, 0, c8, c7, 3
#define TLBIMVAAIS p15, 0, c8, c3, 3
#define BPIALLIS p15, 0, c7, c1, 6
+#define BPIALL p15, 0, c7, c5, 6
+#define ICIALLU p15, 0, c7, c5, 0
#define HSCTLR p15, 4, c1, c0, 0
#define HCR p15, 4, c1, c1, 0
#define HCPTR p15, 4, c1, c1, 2
/*
- * Copyright (c) 2016-2017, ARM Limited and Contributors. All rights reserved.
+ * Copyright (c) 2016-2018, ARM Limited and Contributors. All rights reserved.
*
* SPDX-License-Identifier: BSD-3-Clause
*/
#define SMC_CTX_LR_MON 0x80
#define SMC_CTX_SCR 0x84
#define SMC_CTX_PMCR 0x88
-#define SMC_CTX_SIZE 0x8C
+#define SMC_CTX_SIZE 0x90
#ifndef __ASSEMBLY__
#include <cassert.h>
u_register_t lr_mon;
u_register_t scr;
u_register_t pmcr;
-} smc_ctx_t;
+ /*
+ * The workaround for CVE-2017-5715 requires storing information in
+ * the bottom 3 bits of the stack pointer. Add a padding field to
+ * force the size of the struct to be a multiple of 8.
+ */
+ u_register_t pad;
+} smc_ctx_t __aligned(8);
/*
* Compile time assertions related to the 'smc_context' structure to
CASSERT(SMC_CTX_SPSR_MON == __builtin_offsetof(smc_ctx_t, spsr_mon), \
assert_smc_ctx_spsr_mon_offset_mismatch);
+CASSERT((sizeof(smc_ctx_t) & 0x7) == 0, assert_smc_ctx_not_aligned);
CASSERT(SMC_CTX_SIZE == sizeof(smc_ctx_t), assert_smc_ctx_size_mismatch);
/* Convenience macros to return from SMC handler */