[ARM] 4133/1: Add ISB after changes to CP15 registers
According to ARM ARM, changes to the CP15 registers are only guaranteed to be visible after an Instruction Synchronization Barrier (ISB). This patch adds the ISB at the end of set_cr and set_copro_access functions and also moves them further down in the file, below the isb macro definition. Signed-off-by: Catalin Marinas <catalin.marinas@arm.com> Signed-off-by: Russell King <rmk+kernel@arm.linux.org.uk>
This commit is contained in:
parent
e6a5d66f58
commit
56660faf9e
1 changed files with 40 additions and 38 deletions
|
@ -140,44 +140,6 @@ static inline int cpu_is_xsc3(void)
|
|||
#define cpu_is_xscale() 1
|
||||
#endif
|
||||
|
||||
extern unsigned long cr_no_alignment; /* defined in entry-armv.S */
|
||||
extern unsigned long cr_alignment; /* defined in entry-armv.S */
|
||||
|
||||
static inline unsigned int get_cr(void)
|
||||
{
|
||||
unsigned int val;
|
||||
asm("mrc p15, 0, %0, c1, c0, 0 @ get CR" : "=r" (val) : : "cc");
|
||||
return val;
|
||||
}
|
||||
|
||||
static inline void set_cr(unsigned int val)
|
||||
{
|
||||
asm volatile("mcr p15, 0, %0, c1, c0, 0 @ set CR"
|
||||
: : "r" (val) : "cc");
|
||||
}
|
||||
|
||||
#ifndef CONFIG_SMP
|
||||
extern void adjust_cr(unsigned long mask, unsigned long set);
|
||||
#endif
|
||||
|
||||
#define CPACC_FULL(n) (3 << (n * 2))
|
||||
#define CPACC_SVC(n) (1 << (n * 2))
|
||||
#define CPACC_DISABLE(n) (0 << (n * 2))
|
||||
|
||||
static inline unsigned int get_copro_access(void)
|
||||
{
|
||||
unsigned int val;
|
||||
asm("mrc p15, 0, %0, c1, c0, 2 @ get copro access"
|
||||
: "=r" (val) : : "cc");
|
||||
return val;
|
||||
}
|
||||
|
||||
static inline void set_copro_access(unsigned int val)
|
||||
{
|
||||
asm volatile("mcr p15, 0, %0, c1, c0, 2 @ set copro access"
|
||||
: : "r" (val) : "cc");
|
||||
}
|
||||
|
||||
#define UDBG_UNDEFINED (1 << 0)
|
||||
#define UDBG_SYSCALL (1 << 1)
|
||||
#define UDBG_BADABORT (1 << 2)
|
||||
|
@ -212,6 +174,46 @@ extern unsigned int user_debug;
|
|||
#define set_mb(var, value) do { var = value; mb(); } while (0)
|
||||
#define nop() __asm__ __volatile__("mov\tr0,r0\t@ nop\n\t");
|
||||
|
||||
extern unsigned long cr_no_alignment; /* defined in entry-armv.S */
|
||||
extern unsigned long cr_alignment; /* defined in entry-armv.S */
|
||||
|
||||
static inline unsigned int get_cr(void)
|
||||
{
|
||||
unsigned int val;
|
||||
asm("mrc p15, 0, %0, c1, c0, 0 @ get CR" : "=r" (val) : : "cc");
|
||||
return val;
|
||||
}
|
||||
|
||||
static inline void set_cr(unsigned int val)
|
||||
{
|
||||
asm volatile("mcr p15, 0, %0, c1, c0, 0 @ set CR"
|
||||
: : "r" (val) : "cc");
|
||||
isb();
|
||||
}
|
||||
|
||||
#ifndef CONFIG_SMP
|
||||
extern void adjust_cr(unsigned long mask, unsigned long set);
|
||||
#endif
|
||||
|
||||
#define CPACC_FULL(n) (3 << (n * 2))
|
||||
#define CPACC_SVC(n) (1 << (n * 2))
|
||||
#define CPACC_DISABLE(n) (0 << (n * 2))
|
||||
|
||||
static inline unsigned int get_copro_access(void)
|
||||
{
|
||||
unsigned int val;
|
||||
asm("mrc p15, 0, %0, c1, c0, 2 @ get copro access"
|
||||
: "=r" (val) : : "cc");
|
||||
return val;
|
||||
}
|
||||
|
||||
static inline void set_copro_access(unsigned int val)
|
||||
{
|
||||
asm volatile("mcr p15, 0, %0, c1, c0, 2 @ set copro access"
|
||||
: : "r" (val) : "cc");
|
||||
isb();
|
||||
}
|
||||
|
||||
/*
|
||||
* switch_mm() may do a full cache flush over the context switch,
|
||||
* so enable interrupts over the context switch to avoid high
|
||||
|
|
Loading…
Reference in a new issue