If unsure, say N
+if ARM64_PSEUDO_NMI
+config ARM64_DEBUG_PRIORITY_MASKING
+ bool "Debug interrupt priority masking"
+ help
+ This adds runtime checks to functions enabling/disabling
+ interrupts when using priority masking. The additional checks verify
+ the validity of ICC_PMR_EL1 when calling concerned functions.
+
+ If unsure, say N
+endif
+
config RELOCATABLE
bool
help
cpus_have_const_cap(ARM64_HAS_IRQ_PRIO_MASKING);
}
+static inline bool system_has_prio_mask_debugging(void)
+{
+ return IS_ENABLED(CONFIG_ARM64_DEBUG_PRIORITY_MASKING) &&
+ system_uses_irq_prio_masking();
+}
+
#define ARM64_SSBD_UNKNOWN -1
#define ARM64_SSBD_FORCE_DISABLE 0
#define ARM64_SSBD_KERNEL 1
/* mask/save/unmask/restore all exceptions, including interrupts. */
static inline void local_daif_mask(void)
{
+ WARN_ON(system_has_prio_mask_debugging() &&
+ (read_sysreg_s(SYS_ICC_PMR_EL1) == (GIC_PRIO_IRQOFF |
+ GIC_PRIO_PSR_I_SET)));
+
asm volatile(
"msr daifset, #0xf // local_daif_mask\n"
:
{
bool irq_disabled = flags & PSR_I_BIT;
+ WARN_ON(system_has_prio_mask_debugging() &&
+ !(read_sysreg(daif) & PSR_I_BIT));
+
if (!irq_disabled) {
trace_hardirqs_on();
*/
static inline void arch_local_irq_enable(void)
{
+ if (system_has_prio_mask_debugging()) {
+ u32 pmr = read_sysreg_s(SYS_ICC_PMR_EL1);
+
+ WARN_ON_ONCE(pmr != GIC_PRIO_IRQON && pmr != GIC_PRIO_IRQOFF);
+ }
+
asm volatile(ALTERNATIVE(
"msr daifclr, #2 // arch_local_irq_enable\n"
"nop",
static inline void arch_local_irq_disable(void)
{
+ if (system_has_prio_mask_debugging()) {
+ u32 pmr = read_sysreg_s(SYS_ICC_PMR_EL1);
+
+ WARN_ON_ONCE(pmr != GIC_PRIO_IRQON && pmr != GIC_PRIO_IRQOFF);
+ }
+
asm volatile(ALTERNATIVE(
"msr daifset, #2 // arch_local_irq_disable",
__msr_s(SYS_ICC_PMR_EL1, "%0"),