.globl el3_exit
.globl get_exception_stack
+ .macro save_x18_to_x29_sp_el0
+ stp x18, x19, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X18]
+ stp x20, x21, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X20]
+ stp x22, x23, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X22]
+ stp x24, x25, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X24]
+ stp x26, x27, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X26]
+ stp x28, x29, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X28]
+ mrs x18, sp_el0
+ str x18, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_SP_EL0]
+ .endm
+
.section .vectors, "ax"; .align 11
.align 7
stp x4, x5, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X4]
stp x6, x7, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X6]
+ /* Save rest of the gpregs and sp_el0*/
+ save_x18_to_x29_sp_el0
+
mov x5, xzr
mov x6, sp
adr x14, rt_svc_descs_indices
ldrb w15, [x14, x16]
- /* Save x18 and SP_EL0 */
- mrs x17, sp_el0
- stp x18, x17, [x6, #CTX_GPREGS_OFFSET + CTX_GPREG_X18]
-
/* -----------------------------------------------------
* Restore the saved C runtime stack value which will
* become the new SP_EL0 i.e. EL3 runtime stack. It was
msr elr_el3, x17
/* Restore saved general purpose registers and return */
- bl restore_scratch_registers
- ldp x30, xzr, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR]
+ bl restore_gp_registers
+ ldr x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR]
eret
smc_unknown:
* content). Either way, we aren't leaking any secure information
* through them
*/
- bl restore_scratch_registers_callee
+ bl restore_gp_registers_callee
smc_prohibited:
- ldp x30, xzr, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR]
+ ldr x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR]
mov w0, #SMC_UNK
eret
/* -----------------------------------------------------
* The following functions are used to saved and restore
- * all the caller saved registers as per the aapcs_64.
+ * all the general pupose registers. Ideally we would
+ * only save and restore the callee saved registers when
+ * a world switch occurs but that type of implementation
+ * is more complex. So currently we will always save and
+ * restore these registers on entry and exit of EL3.
* These are not macros to ensure their invocation fits
* within the 32 instructions per exception vector.
* -----------------------------------------------------
*/
-func save_scratch_registers
+func save_gp_registers
stp x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X0]
stp x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
stp x4, x5, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X4]
stp x12, x13, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X12]
stp x14, x15, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X14]
stp x16, x17, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X16]
- mrs x17, sp_el0
- stp x18, x17, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X18]
+ save_x18_to_x29_sp_el0
ret
-func restore_scratch_registers
+func restore_gp_registers
ldp x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X0]
ldp x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
-restore_scratch_registers_callee:
- ldp x18, x17, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X18]
+restore_gp_registers_callee:
+ ldr x17, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_SP_EL0]
ldp x4, x5, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X4]
ldp x6, x7, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X6]
ldp x10, x11, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X10]
ldp x12, x13, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X12]
ldp x14, x15, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X14]
-
msr sp_el0, x17
ldp x16, x17, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X16]
+ ldp x18, x19, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X18]
+ ldp x20, x21, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X20]
+ ldp x22, x23, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X22]
+ ldp x24, x25, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X24]
+ ldp x26, x27, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X26]
+ ldp x28, x29, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X28]
ret
/* -----------------------------------------------------
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
-
#include <arch.h>
#include <context.h>
-
- /* ---------------------------------------------
- * Zero out the callee saved register to prevent
- * leakage of secure state into the normal world
- * during the first ERET after a cold/warm boot.
- * ---------------------------------------------
- */
- .macro zero_callee_saved_regs
- mov x19, xzr
- mov x20, xzr
- mov x21, xzr
- mov x22, xzr
- mov x23, xzr
- mov x24, xzr
- mov x25, xzr
- mov x26, xzr
- mov x27, xzr
- mov x28, xzr
- mov x29, xzr
- .endm
-
.macro switch_to_exception_stack reg1 reg2
mov \reg1 , sp
ldr \reg2, [\reg1, #CTX_EL3STATE_OFFSET + CTX_EXCEPTION_SP]
* -----------------------------------------------------
*/
.macro handle_sync_exception
- stp x30, xzr, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR]
+ str x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR]
mrs x30, esr_el3
ubfx x30, x30, #ESR_EC_SHIFT, #ESR_EC_LENGTH
* not expect any such exceptions.
* -----------------------------------------------------
*/
- bl save_scratch_registers
+ bl save_gp_registers
switch_to_exception_stack x0 x1
/* Save the core_context pointer for handled faults */
ldp x0, xzr, [sp], #0x10
mov sp, x0
- bl restore_scratch_registers
- ldp x30, xzr, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR]
+ bl restore_gp_registers
+ ldr x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR]
eret
.endm
* -----------------------------------------------------
*/
.macro handle_async_exception type
- stp x30, xzr, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR]
- bl save_scratch_registers
+ str x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR]
+ bl save_gp_registers
switch_to_exception_stack x0 x1
/* Save the core_context pointer */
ldp x0, xzr, [sp], #0x10
mov sp, x0
- bl restore_scratch_registers
- ldp x30, xzr, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR]
+ bl restore_gp_registers
+ ldr x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR]
.endm
#define CTX_GPREG_X16 0x80
#define CTX_GPREG_X17 0x88
#define CTX_GPREG_X18 0x90
-#define CTX_GPREG_SP_EL0 0x98
-#define CTX_GPREG_LR 0xa0
-/* Unused space to allow registers to be stored as pairs */
-#define CTX_GPREGS_END 0xb0
+#define CTX_GPREG_X19 0x98
+#define CTX_GPREG_X20 0xa0
+#define CTX_GPREG_X21 0xa8
+#define CTX_GPREG_X22 0xb0
+#define CTX_GPREG_X23 0xb8
+#define CTX_GPREG_X24 0xc0
+#define CTX_GPREG_X25 0xc8
+#define CTX_GPREG_X26 0xd0
+#define CTX_GPREG_X27 0xd8
+#define CTX_GPREG_X28 0xe0
+#define CTX_GPREG_X29 0xe8
+#define CTX_GPREG_LR 0xf0
+#define CTX_GPREG_SP_EL0 0xf8
+#define CTX_GPREGS_END 0x100
/*******************************************************************************
* Constants that allow assembler code to access members of and the 'el3_state'
#define CTX_EL3STATE_ALL (CTX_EL3STATE_END >> DWORD_SHIFT)
/*
- * AArch64 general purpose register context structure. Only x0-x18, lr
- * are saved as the compiler is expected to preserve the remaining
+ * AArch64 general purpose register context structure. Usually x0-x18,
+ * lr are saved as the compiler is expected to preserve the remaining
* callee saved registers if used by the C runtime and the assembler
- * does not touch the remaining.
+ * does not touch the remaining. But in case of world switch during
+ * exception handling, we need to save the callee registers too.
*/
DEFINE_REG_STRUCT(gp_regs, CTX_GPREG_ALL);