2006-07-03 01:24:43 -06:00
|
|
|
/*
|
|
|
|
* include/asm-i386/irqflags.h
|
|
|
|
*
|
|
|
|
* IRQ flags handling
|
|
|
|
*
|
|
|
|
* This file gets included from lowlevel asm headers too, to provide
|
|
|
|
* wrapped versions of the local_irq_*() APIs, based on the
|
2006-07-03 01:24:44 -06:00
|
|
|
* raw_local_irq_*() functions from the lowlevel headers.
|
2006-07-03 01:24:43 -06:00
|
|
|
*/
|
|
|
|
#ifndef _ASM_IRQFLAGS_H
|
|
|
|
#define _ASM_IRQFLAGS_H
|
2007-05-02 11:27:10 -06:00
|
|
|
#include <asm/processor-flags.h>
|
2006-07-03 01:24:43 -06:00
|
|
|
|
2007-05-02 11:27:10 -06:00
|
|
|
#ifndef __ASSEMBLY__
|
|
|
|
static inline unsigned long native_save_fl(void)
|
|
|
|
{
|
|
|
|
unsigned long f;
|
|
|
|
asm volatile("pushfl ; popl %0":"=g" (f): /* no input */);
|
|
|
|
return f;
|
|
|
|
}
|
|
|
|
|
|
|
|
static inline void native_restore_fl(unsigned long f)
|
|
|
|
{
|
|
|
|
asm volatile("pushl %0 ; popfl": /* no output */
|
|
|
|
:"g" (f)
|
|
|
|
:"memory", "cc");
|
|
|
|
}
|
|
|
|
|
|
|
|
static inline void native_irq_disable(void)
|
|
|
|
{
|
|
|
|
asm volatile("cli": : :"memory");
|
|
|
|
}
|
|
|
|
|
|
|
|
static inline void native_irq_enable(void)
|
|
|
|
{
|
|
|
|
asm volatile("sti": : :"memory");
|
|
|
|
}
|
|
|
|
|
|
|
|
static inline void native_safe_halt(void)
|
|
|
|
{
|
|
|
|
asm volatile("sti; hlt": : :"memory");
|
|
|
|
}
|
|
|
|
|
|
|
|
static inline void native_halt(void)
|
|
|
|
{
|
|
|
|
asm volatile("hlt": : :"memory");
|
|
|
|
}
|
|
|
|
#endif /* __ASSEMBLY__ */
|
|
|
|
|
2006-12-06 18:14:07 -07:00
|
|
|
#ifdef CONFIG_PARAVIRT
|
|
|
|
#include <asm/paravirt.h>
|
|
|
|
#else
|
2006-07-03 01:24:44 -06:00
|
|
|
#ifndef __ASSEMBLY__
|
2006-07-03 01:24:43 -06:00
|
|
|
|
2006-07-03 01:24:44 -06:00
|
|
|
static inline unsigned long __raw_local_save_flags(void)
|
|
|
|
{
|
2007-05-02 11:27:10 -06:00
|
|
|
return native_save_fl();
|
2006-07-03 01:24:44 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
static inline void raw_local_irq_restore(unsigned long flags)
|
|
|
|
{
|
2007-05-02 11:27:10 -06:00
|
|
|
native_restore_fl(flags);
|
2006-07-03 01:24:44 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
static inline void raw_local_irq_disable(void)
|
|
|
|
{
|
2007-05-02 11:27:10 -06:00
|
|
|
native_irq_disable();
|
2006-07-03 01:24:44 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
static inline void raw_local_irq_enable(void)
|
|
|
|
{
|
2007-05-02 11:27:10 -06:00
|
|
|
native_irq_enable();
|
2006-07-03 01:24:44 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
/*
|
|
|
|
* Used in the idle loop; sti takes one instruction cycle
|
|
|
|
* to complete:
|
|
|
|
*/
|
|
|
|
static inline void raw_safe_halt(void)
|
|
|
|
{
|
2007-05-02 11:27:10 -06:00
|
|
|
native_safe_halt();
|
2006-07-03 01:24:44 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
/*
|
|
|
|
* Used when interrupts are already enabled or to
|
|
|
|
* shutdown the processor:
|
|
|
|
*/
|
|
|
|
static inline void halt(void)
|
|
|
|
{
|
2007-05-02 11:27:10 -06:00
|
|
|
native_halt();
|
2006-07-03 01:24:44 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
/*
|
|
|
|
* For spinlocks, etc:
|
|
|
|
*/
|
|
|
|
static inline unsigned long __raw_local_irq_save(void)
|
|
|
|
{
|
|
|
|
unsigned long flags = __raw_local_save_flags();
|
|
|
|
|
|
|
|
raw_local_irq_disable();
|
|
|
|
|
|
|
|
return flags;
|
|
|
|
}
|
|
|
|
|
2006-12-06 18:14:07 -07:00
|
|
|
#else
|
2006-12-06 18:14:08 -07:00
|
|
|
#define DISABLE_INTERRUPTS(clobbers) cli
|
|
|
|
#define ENABLE_INTERRUPTS(clobbers) sti
|
2006-12-06 18:14:07 -07:00
|
|
|
#define ENABLE_INTERRUPTS_SYSEXIT sti; sysexit
|
|
|
|
#define INTERRUPT_RETURN iret
|
|
|
|
#define GET_CR0_INTO_EAX movl %cr0, %eax
|
|
|
|
#endif /* __ASSEMBLY__ */
|
|
|
|
#endif /* CONFIG_PARAVIRT */
|
|
|
|
|
|
|
|
#ifndef __ASSEMBLY__
|
|
|
|
#define raw_local_save_flags(flags) \
|
|
|
|
do { (flags) = __raw_local_save_flags(); } while (0)
|
|
|
|
|
2006-07-03 01:24:44 -06:00
|
|
|
#define raw_local_irq_save(flags) \
|
|
|
|
do { (flags) = __raw_local_irq_save(); } while (0)
|
|
|
|
|
2006-12-06 18:14:07 -07:00
|
|
|
static inline int raw_irqs_disabled_flags(unsigned long flags)
|
|
|
|
{
|
2007-05-02 11:27:10 -06:00
|
|
|
return !(flags & X86_EFLAGS_IF);
|
2006-12-06 18:14:07 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
static inline int raw_irqs_disabled(void)
|
|
|
|
{
|
|
|
|
unsigned long flags = __raw_local_save_flags();
|
|
|
|
|
|
|
|
return raw_irqs_disabled_flags(flags);
|
|
|
|
}
|
2006-07-03 01:24:44 -06:00
|
|
|
#endif /* __ASSEMBLY__ */
|
2006-07-03 01:24:43 -06:00
|
|
|
|
|
|
|
/*
|
|
|
|
* Do the CPU's IRQ-state tracing from assembly code. We call a
|
|
|
|
* C function, so save all the C-clobbered registers:
|
|
|
|
*/
|
|
|
|
#ifdef CONFIG_TRACE_IRQFLAGS
|
|
|
|
|
|
|
|
# define TRACE_IRQS_ON \
|
|
|
|
pushl %eax; \
|
|
|
|
pushl %ecx; \
|
|
|
|
pushl %edx; \
|
|
|
|
call trace_hardirqs_on; \
|
|
|
|
popl %edx; \
|
|
|
|
popl %ecx; \
|
|
|
|
popl %eax;
|
|
|
|
|
|
|
|
# define TRACE_IRQS_OFF \
|
|
|
|
pushl %eax; \
|
|
|
|
pushl %ecx; \
|
|
|
|
pushl %edx; \
|
|
|
|
call trace_hardirqs_off; \
|
|
|
|
popl %edx; \
|
|
|
|
popl %ecx; \
|
|
|
|
popl %eax;
|
|
|
|
|
|
|
|
#else
|
|
|
|
# define TRACE_IRQS_ON
|
|
|
|
# define TRACE_IRQS_OFF
|
|
|
|
#endif
|
|
|
|
|
2007-10-11 14:11:12 -06:00
|
|
|
#ifdef CONFIG_DEBUG_LOCK_ALLOC
|
|
|
|
# define LOCKDEP_SYS_EXIT \
|
|
|
|
pushl %eax; \
|
|
|
|
pushl %ecx; \
|
|
|
|
pushl %edx; \
|
|
|
|
call lockdep_sys_exit; \
|
|
|
|
popl %edx; \
|
|
|
|
popl %ecx; \
|
|
|
|
popl %eax;
|
|
|
|
#else
|
|
|
|
# define LOCKDEP_SYS_EXIT
|
|
|
|
#endif
|
|
|
|
|
2006-07-03 01:24:43 -06:00
|
|
|
#endif
|