$(info Branch Protection is an experimental feature)
endif
+ifeq ($(CTX_INCLUDE_MTE_REGS),1)
+ ifneq (${ARCH},aarch64)
+ $(error CTX_INCLUDE_MTE_REGS requires AArch64)
+ else
+ $(info CTX_INCLUDE_MTE_REGS is an experimental feature)
+ endif
+endif
+
################################################################################
# Process platform overrideable behaviour
################################################################################
$(eval $(call assert_boolean,CTX_INCLUDE_AARCH32_REGS))
$(eval $(call assert_boolean,CTX_INCLUDE_FPREGS))
$(eval $(call assert_boolean,CTX_INCLUDE_PAUTH_REGS))
+$(eval $(call assert_boolean,CTX_INCLUDE_MTE_REGS))
$(eval $(call assert_boolean,DEBUG))
$(eval $(call assert_boolean,DYN_DISABLE_AUTH))
$(eval $(call assert_boolean,EL3_EXCEPTION_HANDLING))
$(eval $(call add_define,CTX_INCLUDE_FPREGS))
$(eval $(call add_define,CTX_INCLUDE_PAUTH_REGS))
$(eval $(call add_define,EL3_EXCEPTION_HANDLING))
+$(eval $(call add_define,CTX_INCLUDE_MTE_REGS))
$(eval $(call add_define,ENABLE_AMU))
$(eval $(call add_define,ENABLE_ASSERTIONS))
$(eval $(call add_define,ENABLE_BTI))
*/
tsp_get_magic(service_args);
+#if CTX_INCLUDE_MTE_REGS
+ /*
+ * Write a dummy value to an MTE register, to simulate usage in the
+ * secure world
+ */
+ write_gcr_el1(0x99);
+#endif
+
/* Determine the function to perform based on the function ID */
switch (TSP_BARE_FID(func)) {
case TSP_ADD:
******************************************************************************/
#define SSBS S3_3_C4_C2_6
+/*******************************************************************************
+ * Armv8.5 - Memory Tagging Extension Registers
+ ******************************************************************************/
+#define TFSRE0_EL1 S3_0_C5_C6_1
+#define TFSR_EL1 S3_0_C5_C6_0
+#define RGSR_EL1 S3_0_C1_C0_5
+#define GCR_EL1 S3_0_C1_C0_6
+
#endif /* ARCH_H */
DEFINE_RENAME_SYSREG_RW_FUNCS(apiakeyhi_el1, APIAKeyHi_EL1)
DEFINE_RENAME_SYSREG_RW_FUNCS(apiakeylo_el1, APIAKeyLo_EL1)
+/* Armv8.5 MTE Registers */
+DEFINE_RENAME_SYSREG_RW_FUNCS(tfsre0_el1, TFSRE0_EL1)
+DEFINE_RENAME_SYSREG_RW_FUNCS(tfsr_el1, TFSR_EL1)
+DEFINE_RENAME_SYSREG_RW_FUNCS(rgsr_el1, RGSR_EL1)
+DEFINE_RENAME_SYSREG_RW_FUNCS(gcr_el1, GCR_EL1)
+
#define IS_IN_EL(x) \
(GET_EL(read_CurrentEl()) == MODE_EL##x)
#define CTX_TIMER_SYSREGS_END CTX_AARCH32_END
#endif /* NS_TIMER_SWITCH */
+#if CTX_INCLUDE_MTE_REGS
+#define CTX_TFSRE0_EL1 (CTX_TIMER_SYSREGS_END + U(0x0))
+#define CTX_TFSR_EL1 (CTX_TIMER_SYSREGS_END + U(0x8))
+#define CTX_RGSR_EL1 (CTX_TIMER_SYSREGS_END + U(0x10))
+#define CTX_GCR_EL1 (CTX_TIMER_SYSREGS_END + U(0x18))
+
+/* Align to the next 16 byte boundary */
+#define CTX_MTE_REGS_END (CTX_TIMER_SYSREGS_END + U(0x20))
+#else
+#define CTX_MTE_REGS_END CTX_TIMER_SYSREGS_END
+#endif /* CTX_INCLUDE_MTE_REGS */
+
/*
* End of system registers.
*/
-#define CTX_SYSREGS_END CTX_TIMER_SYSREGS_END
+#define CTX_SYSREGS_END CTX_MTE_REGS_END
/*******************************************************************************
* Constants that allow assembler code to access members of and the 'fp_regs'
str x14, [x0, #CTX_CNTKCTL_EL1]
#endif
+ /* Save MTE system registers if the build has instructed so */
+#if CTX_INCLUDE_MTE_REGS
+ mrs x15, TFSRE0_EL1
+ mrs x16, TFSR_EL1
+ stp x15, x16, [x0, #CTX_TFSRE0_EL1]
+
+ mrs x9, RGSR_EL1
+ mrs x10, GCR_EL1
+ stp x9, x10, [x0, #CTX_RGSR_EL1]
+#endif
+
ret
endfunc el1_sysregs_context_save
ldr x14, [x0, #CTX_CNTKCTL_EL1]
msr cntkctl_el1, x14
#endif
+ /* Restore MTE system registers if the build has instructed so */
+#if CTX_INCLUDE_MTE_REGS
+ ldp x11, x12, [x0, #CTX_TFSRE0_EL1]
+ msr TFSRE0_EL1, x11
+ msr TFSR_EL1, x12
+
+ ldp x13, x14, [x0, #CTX_RGSR_EL1]
+ msr RGSR_EL1, x13
+ msr GCR_EL1, x14
+#endif
/* No explict ISB required here as ERET covers it */
ret
scr_el3 |= SCR_API_BIT | SCR_APK_BIT;
#endif /* !CTX_INCLUDE_PAUTH_REGS */
- unsigned int mte = get_armv8_5_mte_support();
-
/*
- * Enable MTE support unilaterally for normal world if the CPU supports
- * it.
+ * Enable MTE support. Support is enabled unilaterally for the normal
+ * world, and only for the secure world when CTX_INCLUDE_MTE_REGS is
+ * set.
*/
- if (mte != MTE_UNIMPLEMENTED) {
- if (security_state == NON_SECURE) {
- scr_el3 |= SCR_ATA_BIT;
- }
+ unsigned int mte = get_armv8_5_mte_support();
+#if CTX_INCLUDE_MTE_REGS
+ assert(mte == MTE_IMPLEMENTED_ELX);
+ scr_el3 |= SCR_ATA_BIT;
+#else
+ if (mte == MTE_IMPLEMENTED_EL0) {
+ /*
+ * Can enable MTE across both worlds as no MTE registers are
+ * used
+ */
+ scr_el3 |= SCR_ATA_BIT;
+ } else if (mte == MTE_IMPLEMENTED_ELX && security_state == NON_SECURE) {
+ /*
+ * Can only enable MTE in Non-Secure world without register
+ * saving
+ */
+ scr_el3 |= SCR_ATA_BIT;
}
+#endif
#ifdef IMAGE_BL31
/*
override ENABLE_SPE_FOR_LOWER_ELS := 0
endif
+# Include Memory Tagging Extension registers in cpu context. This must be set
+# to 1 if the platform wants to use this feature in the Secure world and MTE is
+# enabled at ELX.
+CTX_INCLUDE_MTE_REGS := 0
+
ENABLE_AMU := 0
# By default, enable Scalable Vector Extension if implemented for Non-secure