include/asm-x86/xor_64.h: checkpatch cleanups - formatting only

Signed-off-by: Joe Perches <joe@perches.com>
Signed-off-by: Ingo Molnar <mingo@elte.hu>
This commit is contained in:
Joe Perches 2008-03-23 01:04:03 -07:00 committed by Ingo Molnar
parent 8fdf765527
commit 687c805409

View file

@ -31,11 +31,14 @@
* no advantages to be gotten from x86-64 here anyways.
*/
typedef struct { unsigned long a,b; } __attribute__((aligned(16))) xmm_store_t;
typedef struct {
unsigned long a, b;
} __attribute__((aligned(16))) xmm_store_t;
/* Doesn't use gcc to save the XMM registers, because there is no easy way to
tell it to do a clts before the register saving. */
#define XMMS_SAVE do { \
#define XMMS_SAVE \
do { \
preempt_disable(); \
asm volatile( \
"movq %%cr0,%0 ;\n\t" \
@ -49,7 +52,8 @@ typedef struct { unsigned long a,b; } __attribute__((aligned(16))) xmm_store_t;
: "memory"); \
} while (0)
#define XMMS_RESTORE do { \
#define XMMS_RESTORE \
do { \
asm volatile( \
"sfence ;\n\t" \
"movups (%1),%%xmm0 ;\n\t" \
@ -141,7 +145,7 @@ xor_sse_3(unsigned long bytes, unsigned long *p1, unsigned long *p2,
XMMS_SAVE;
__asm__ __volatile__ (
asm volatile(
#undef BLOCK
#define BLOCK(i) \
PF1(i) \
@ -200,7 +204,7 @@ xor_sse_4(unsigned long bytes, unsigned long *p1, unsigned long *p2,
XMMS_SAVE;
__asm__ __volatile__ (
asm volatile(
#undef BLOCK
#define BLOCK(i) \
PF1(i) \
@ -267,7 +271,7 @@ xor_sse_5(unsigned long bytes, unsigned long *p1, unsigned long *p2,
XMMS_SAVE;
__asm__ __volatile__ (
asm volatile(
#undef BLOCK
#define BLOCK(i) \
PF1(i) \