$(eval $(call assert_boolean,CREATE_KEYS))
$(eval $(call assert_boolean,CTX_INCLUDE_AARCH32_REGS))
$(eval $(call assert_boolean,CTX_INCLUDE_FPREGS))
+$(eval $(call assert_boolean,CTX_INCLUDE_PAUTH_REGS))
$(eval $(call assert_boolean,DEBUG))
$(eval $(call assert_boolean,DYN_DISABLE_AUTH))
$(eval $(call assert_boolean,EL3_EXCEPTION_HANDLING))
$(eval $(call add_define,COLD_BOOT_SINGLE_CPU))
$(eval $(call add_define,CTX_INCLUDE_AARCH32_REGS))
$(eval $(call add_define,CTX_INCLUDE_FPREGS))
+$(eval $(call add_define,CTX_INCLUDE_PAUTH_REGS))
$(eval $(call add_define,EL3_EXCEPTION_HANDLING))
$(eval $(call add_define,ENABLE_AMU))
$(eval $(call add_define,ENABLE_ASSERTIONS))
/*
- * Copyright (c) 2018, ARM Limited and Contributors. All rights reserved.
+ * Copyright (c) 2018-2019, ARM Limited and Contributors. All rights reserved.
*
* SPDX-License-Identifier: BSD-3-Clause
*/
/* Save GP registers */
bl save_gp_registers
+#if CTX_INCLUDE_PAUTH_REGS
+ bl pauth_context_save
+#endif
+
/* Setup exception class and syndrome arguments for platform handler */
mov x0, #ERROR_EA_SYNC
mrs x1, esr_el3
/* Save GP registers */
bl save_gp_registers
+#if CTX_INCLUDE_PAUTH_REGS
+ bl pauth_context_save
+#endif
+
/* Setup exception class and syndrome arguments for platform handler */
mov x0, #ERROR_EA_ASYNC
mrs x1, esr_el3
* ---------------------------------------------------------------------
*/
.macro handle_interrupt_exception label
+
bl save_gp_registers
+
+#if CTX_INCLUDE_PAUTH_REGS
+ bl pauth_context_save
+#endif
+
/* Save the EL3 system registers needed to return from this exception */
mrs x0, spsr_el3
mrs x1, elr_el3
tbnz x0, #FUNCID_CC_SHIFT, smc_prohibited
smc_handler64:
+ /* NOTE: The code below must preserve x0-x4 */
+
+ /* Save general purpose registers */
+ bl save_gp_registers
+
+#if CTX_INCLUDE_PAUTH_REGS
+ bl pauth_context_save
+#endif
+
/*
* Populate the parameters for the SMC handler.
* We already have x0-x4 in place. x5 will point to a cookie (not used
* now). x6 will point to the context structure (SP_EL3) and x7 will
* contain flags we need to pass to the handler.
*/
- bl save_gp_registers
-
mov x5, xzr
mov x6, sp
Armv8.3-A
~~~~~~~~~
-- Pointer Authentication features of Armv8.3-A are unconditionally enabled so
- that lower ELs are allowed to use them without causing a trap to EL3.
+- Pointer authentication features of Armv8.3-A are unconditionally enabled in
+ the Non-secure world so that lower ELs are allowed to use them without
+ causing a trap to EL3.
+
+ In order to enable the Secure world to use it, ``CTX_INCLUDE_PAUTH_REGS``
+ must be set to 1. This will add all pointer authentication system registers
+ to the context that is saved when doing a world switch.
Armv7-A
~~~~~~~
registers to be included when saving and restoring the CPU context. Default
is 0.
+- ``CTX_INCLUDE_PAUTH_REGS``: Boolean option that, when set to 1, will cause
+ the ARMv8.3-PAuth registers to be included when saving and restoring the CPU
+ context. Note that if the hardware supports this extension and this option is
+ set to 0 the value of the registers will be leaked between Secure and
+ Non-secure worlds. The default is 0.
+
- ``DEBUG``: Chooses between a debug and release build. It can take either 0
(release) or 1 (debug) as values. 0 is the default.
#define ID_AA64PFR0_GIC_SHIFT U(24)
#define ID_AA64PFR0_GIC_WIDTH U(4)
-#define ID_AA64PFR0_GIC_MASK ((ULL(1) << ID_AA64PFR0_GIC_WIDTH) - ULL(1))
+#define ID_AA64PFR0_GIC_MASK ULL(0xf)
/* ID_AA64ISAR1_EL1 definitions */
+#define ID_AA64ISAR1_EL1 S3_0_C0_C6_1
#define ID_AA64ISAR1_GPI_SHIFT U(28)
#define ID_AA64ISAR1_GPI_WIDTH U(4)
+#define ID_AA64ISAR1_GPI_MASK ULL(0xf)
#define ID_AA64ISAR1_GPA_SHIFT U(24)
#define ID_AA64ISAR1_GPA_WIDTH U(4)
+#define ID_AA64ISAR1_GPA_MASK ULL(0xf)
#define ID_AA64ISAR1_API_SHIFT U(8)
#define ID_AA64ISAR1_API_WIDTH U(4)
+#define ID_AA64ISAR1_API_MASK ULL(0xf)
#define ID_AA64ISAR1_APA_SHIFT U(4)
#define ID_AA64ISAR1_APA_WIDTH U(4)
-
-#define ID_AA64ISAR1_GPI_MASK \
- (((ULL(1) << ID_AA64ISAR1_GPI_WIDTH) - ULL(1)) << ID_AA64ISAR1_GPI_SHIFT)
-#define ID_AA64ISAR1_GPA_MASK \
- (((ULL(1) << ID_AA64ISAR1_GPA_WIDTH) - ULL(1)) << ID_AA64ISAR1_GPA_SHIFT)
-#define ID_AA64ISAR1_API_MASK \
- (((ULL(1) << ID_AA64ISAR1_API_WIDTH) - ULL(1)) << ID_AA64ISAR1_API_SHIFT)
-#define ID_AA64ISAR1_APA_MASK \
- (((ULL(1) << ID_AA64ISAR1_APA_WIDTH) - ULL(1)) << ID_AA64ISAR1_APA_SHIFT)
+#define ID_AA64ISAR1_APA_MASK ULL(0xf)
/* ID_AA64MMFR0_EL1 definitions */
#define ID_AA64MMFR0_EL1_PARANGE_SHIFT U(0)
#define SCTLR_E0E_BIT (ULL(1) << 24)
#define SCTLR_EE_BIT (ULL(1) << 25)
#define SCTLR_UCI_BIT (ULL(1) << 26)
-#define SCTLR_TRE_BIT (ULL(1) << 28)
-#define SCTLR_AFE_BIT (ULL(1) << 29)
-#define SCTLR_TE_BIT (ULL(1) << 30)
+#define SCTLR_EnIA_BIT (ULL(1) << 31)
#define SCTLR_DSSBS_BIT (ULL(1) << 44)
#define SCTLR_RESET_VAL SCTLR_EL3_RES1
/*******************************************************************************
* Armv8.3 Pointer Authentication Registers
******************************************************************************/
+#define APIAKeyLo_EL1 S3_0_C2_C1_0
+#define APIAKeyHi_EL1 S3_0_C2_C1_1
+#define APIBKeyLo_EL1 S3_0_C2_C1_2
+#define APIBKeyHi_EL1 S3_0_C2_C1_3
+#define APDAKeyLo_EL1 S3_0_C2_C2_0
+#define APDAKeyHi_EL1 S3_0_C2_C2_1
+#define APDBKeyLo_EL1 S3_0_C2_C2_2
+#define APDBKeyHi_EL1 S3_0_C2_C2_3
#define APGAKeyLo_EL1 S3_0_C2_C3_0
+#define APGAKeyHi_EL1 S3_0_C2_C3_1
/*******************************************************************************
* Armv8.4 Data Independent Timing Registers
* authentication instructions from lower ELs.
* ---------------------------------------------------------------------
*/
- mov_imm x0, ((SCR_RESET_VAL | SCR_EA_BIT | SCR_SIF_BIT | \
- SCR_API_BIT | SCR_APK_BIT) \
+ mov_imm x0, ((SCR_RESET_VAL | SCR_EA_BIT | SCR_SIF_BIT) \
& ~(SCR_TWE_BIT | SCR_TWI_BIT | SCR_SMD_BIT))
+#if CTX_INCLUDE_PAUTH_REGS
+ /*
+ * If the pointer authentication registers are saved during world
+ * switches, enable pointer authentication everywhere, as it is safe to
+ * do so.
+ */
+ orr x0, x0, #(SCR_API_BIT | SCR_APK_BIT)
+#endif
msr scr_el3, x0
/* ---------------------------------------------------------------------
#define CTX_CVE_2018_3639_DISABLE U(0)
#define CTX_CVE_2018_3639_END U(0x10) /* Align to the next 16 byte boundary */
+/*******************************************************************************
+ * Registers related to ARMv8.3-PAuth.
+ ******************************************************************************/
+#define CTX_PAUTH_REGS_OFFSET (CTX_CVE_2018_3639_OFFSET + CTX_CVE_2018_3639_END)
+#if CTX_INCLUDE_PAUTH_REGS
+#define CTX_PACIAKEY_LO U(0x0)
+#define CTX_PACIAKEY_HI U(0x8)
+#define CTX_PACIBKEY_LO U(0x10)
+#define CTX_PACIBKEY_HI U(0x18)
+#define CTX_PACDAKEY_LO U(0x20)
+#define CTX_PACDAKEY_HI U(0x28)
+#define CTX_PACDBKEY_LO U(0x30)
+#define CTX_PACDBKEY_HI U(0x38)
+#define CTX_PACGAKEY_LO U(0x40)
+#define CTX_PACGAKEY_HI U(0x48)
+#define CTX_PACGAKEY_END U(0x50)
+#define CTX_PAUTH_REGS_END U(0x60) /* Align to the next 16 byte boundary */
+#else
+#define CTX_PAUTH_REGS_END U(0)
+#endif /* CTX_INCLUDE_PAUTH_REGS */
+
#ifndef __ASSEMBLY__
#include <stdint.h>
#endif
#define CTX_EL3STATE_ALL (CTX_EL3STATE_END >> DWORD_SHIFT)
#define CTX_CVE_2018_3639_ALL (CTX_CVE_2018_3639_END >> DWORD_SHIFT)
+#if CTX_INCLUDE_PAUTH_REGS
+# define CTX_PAUTH_REGS_ALL (CTX_PAUTH_REGS_END >> DWORD_SHIFT)
+#endif
/*
* AArch64 general purpose register context structure. Usually x0-x18,
/* Function pointer used by CVE-2018-3639 dynamic mitigation */
DEFINE_REG_STRUCT(cve_2018_3639, CTX_CVE_2018_3639_ALL);
+/* Registers associated to ARMv8.3-PAuth */
+#if CTX_INCLUDE_PAUTH_REGS
+DEFINE_REG_STRUCT(pauth, CTX_PAUTH_REGS_ALL);
+#endif
+
/*
* Macros to access members of any of the above structures using their
* offsets
fp_regs_t fpregs_ctx;
#endif
cve_2018_3639_t cve_2018_3639_ctx;
+#if CTX_INCLUDE_PAUTH_REGS
+ pauth_t pauth_ctx;
+#endif
} cpu_context_t;
/* Macros to access members of the 'cpu_context_t' structure */
#define get_sysregs_ctx(h) (&((cpu_context_t *) h)->sysregs_ctx)
#define get_gpregs_ctx(h) (&((cpu_context_t *) h)->gpregs_ctx)
#define get_cve_2018_3639_ctx(h) (&((cpu_context_t *) h)->cve_2018_3639_ctx)
+#if CTX_INCLUDE_PAUTH_REGS
+# define get_pauth_ctx(h) (&((cpu_context_t *) h)->pauth_ctx)
+#endif
/*
* Compile time assertions related to the 'cpu_context' structure to
assert_core_context_el3state_offset_mismatch);
CASSERT(CTX_CVE_2018_3639_OFFSET == __builtin_offsetof(cpu_context_t, cve_2018_3639_ctx), \
assert_core_context_cve_2018_3639_offset_mismatch);
+#if CTX_INCLUDE_PAUTH_REGS
+CASSERT(CTX_PAUTH_REGS_OFFSET == __builtin_offsetof(cpu_context_t, pauth_ctx), \
+ assert_core_context_pauth_offset_mismatch);
+#endif
/*
* Helper macro to set the general purpose registers that correspond to
#if CTX_INCLUDE_FPREGS
.global fpregs_context_save
.global fpregs_context_restore
+#endif
+#if CTX_INCLUDE_PAUTH_REGS
+ .global pauth_context_restore
+ .global pauth_context_save
#endif
.global save_gp_registers
.global restore_gp_registers
endfunc fpregs_context_restore
#endif /* CTX_INCLUDE_FPREGS */
+#if CTX_INCLUDE_PAUTH_REGS
+/* -----------------------------------------------------
+ * The following function strictly follows the AArch64
+ * PCS to use x9-x17 (temporary caller-saved registers)
+ * to save the ARMv8.3-PAuth register context. It assumes
+ * that 'sp' is pointing to a 'cpu_context_t' structure
+ * to where the register context will be saved.
+ * -----------------------------------------------------
+ */
+func pauth_context_save
+ add x11, sp, #CTX_PAUTH_REGS_OFFSET
+
+ mrs x9, APIAKeyLo_EL1
+ mrs x10, APIAKeyHi_EL1
+ stp x9, x10, [x11, #CTX_PACIAKEY_LO]
+
+ mrs x9, APIBKeyLo_EL1
+ mrs x10, APIBKeyHi_EL1
+ stp x9, x10, [x11, #CTX_PACIBKEY_LO]
+
+ mrs x9, APDAKeyLo_EL1
+ mrs x10, APDAKeyHi_EL1
+ stp x9, x10, [x11, #CTX_PACDAKEY_LO]
+
+ mrs x9, APDBKeyLo_EL1
+ mrs x10, APDBKeyHi_EL1
+ stp x9, x10, [x11, #CTX_PACDBKEY_LO]
+
+ mrs x9, APGAKeyLo_EL1
+ mrs x10, APGAKeyHi_EL1
+ stp x9, x10, [x11, #CTX_PACGAKEY_LO]
+
+ ret
+endfunc pauth_context_save
+
+/* -----------------------------------------------------
+ * The following function strictly follows the AArch64
+ * PCS to use x9-x17 (temporary caller-saved registers)
+ * to restore the ARMv8.3-PAuth register context. It assumes
+ * that 'sp' is pointing to a 'cpu_context_t' structure
+ * from where the register context will be restored.
+ * -----------------------------------------------------
+ */
+func pauth_context_restore
+ add x11, sp, #CTX_PAUTH_REGS_OFFSET
+
+ ldp x9, x10, [x11, #CTX_PACIAKEY_LO]
+ msr APIAKeyLo_EL1, x9
+ msr APIAKeyHi_EL1, x10
+
+ ldp x9, x10, [x11, #CTX_PACIAKEY_LO]
+ msr APIBKeyLo_EL1, x9
+ msr APIBKeyHi_EL1, x10
+
+ ldp x9, x10, [x11, #CTX_PACDAKEY_LO]
+ msr APDAKeyLo_EL1, x9
+ msr APDAKeyHi_EL1, x10
+
+ ldp x9, x10, [x11, #CTX_PACDBKEY_LO]
+ msr APDBKeyLo_EL1, x9
+ msr APDBKeyHi_EL1, x10
+
+ ldp x9, x10, [x11, #CTX_PACGAKEY_LO]
+ msr APGAKeyLo_EL1, x9
+ msr APGAKeyHi_EL1, x10
+
+ ret
+endfunc pauth_context_restore
+#endif /* CTX_INCLUDE_PAUTH_REGS */
+
/* -----------------------------------------------------
* The following functions are used to save and restore
* all the general purpose registers. Ideally we would
1:
#endif
+#if CTX_INCLUDE_PAUTH_REGS
+ /* Restore ARMv8.3-PAuth registers */
+ bl pauth_context_restore
+#endif
+
/* Restore saved general purpose registers and return */
b restore_gp_registers_eret
endfunc el3_exit
scr_el3 |= SCR_FIEN_BIT;
#endif
+#if !CTX_INCLUDE_PAUTH_REGS
+ /*
+ * If the pointer authentication registers aren't saved during world
+ * switches the value of the registers can be leaked from the Secure to
+ * the Non-secure world. To prevent this, rather than enabling pointer
+ * authentication everywhere, we only enable it in the Non-secure world.
+ *
+ * If the Secure world wants to use pointer authentication,
+ * CTX_INCLUDE_PAUTH_REGS must be set to 1.
+ */
+ if (security_state == NON_SECURE)
+ scr_el3 |= SCR_API_BIT | SCR_APK_BIT;
+#endif /* !CTX_INCLUDE_PAUTH_REGS */
+
#ifdef IMAGE_BL31
/*
* SCR_EL3.IRQ, SCR_EL3.FIQ: Enable the physical FIQ and IRQ routing as
# Include FP registers in cpu context
CTX_INCLUDE_FPREGS := 0
+# Include pointer authentication (ARMv8.3-PAuth) registers in cpu context. This
+# must be set to 1 if the platform wants to use this feature in the Secure
+# world. It is not needed to use it in the Non-secure world.
+CTX_INCLUDE_PAUTH_REGS := 0
+
# Debug build
DEBUG := 0