asm volatile("MOV TXMASKI,%0\n" : : "r" (flags) : "memory");
}
-static inline void arch_local_irq_enable(void)
-{
#ifdef CONFIG_SMP
- preempt_disable();
- arch_local_irq_restore(get_trigger_mask());
- preempt_enable_no_resched();
+/* Avoid circular include dependencies through <linux/preempt.h> */
+void arch_local_irq_enable(void);
#else
+static inline void arch_local_irq_enable(void)
+{
arch_local_irq_restore(get_trigger_mask());
-#endif
}
+#endif
#endif /* (__ASSEMBLY__) */
#include <linux/types.h>
#include <linux/init.h>
#include <linux/interrupt.h>
+#include <linux/preempt.h>
#include <linux/ptrace.h>
#include <linux/module.h>
#include <linux/kallsyms.h>
#endif
#ifdef CONFIG_SMP
-unsigned int get_trigger_mask(void)
+static inline unsigned int _get_trigger_mask(void)
{
unsigned long cpu = smp_processor_id();
return per_cpu(trigger_mask, cpu);
}
+unsigned int get_trigger_mask(void)
+{
+ return _get_trigger_mask();
+}
+
static void set_trigger_mask(unsigned int mask)
{
unsigned long cpu = smp_processor_id();
per_cpu(trigger_mask, cpu) = mask;
}
+
+void arch_local_irq_enable(void)
+{
+ preempt_disable();
+ arch_local_irq_restore(_get_trigger_mask());
+ preempt_enable_no_resched();
+}
+EXPORT_SYMBOL(arch_local_irq_enable);
#else
static void set_trigger_mask(unsigned int mask)
{