1
0
Fork 0

include/asm-x86/xor_32.h: checkpatch cleanups - formatting only

Signed-off-by: Joe Perches <joe@perches.com>
Signed-off-by: Ingo Molnar <mingo@elte.hu>
hifive-unleashed-5.1
Joe Perches 2008-03-23 01:04:02 -07:00 committed by Ingo Molnar
parent d6ae390a0b
commit 8fdf765527
1 changed files with 248 additions and 246 deletions

View File

@ -32,7 +32,7 @@ xor_pII_mmx_2(unsigned long bytes, unsigned long *p1, unsigned long *p2)
kernel_fpu_begin();
__asm__ __volatile__ (
asm volatile(
#undef BLOCK
#define BLOCK(i) \
LD(i, 0) \
@ -76,7 +76,7 @@ xor_pII_mmx_3(unsigned long bytes, unsigned long *p1, unsigned long *p2,
kernel_fpu_begin();
__asm__ __volatile__ (
asm volatile(
#undef BLOCK
#define BLOCK(i) \
LD(i, 0) \
@ -125,7 +125,7 @@ xor_pII_mmx_4(unsigned long bytes, unsigned long *p1, unsigned long *p2,
kernel_fpu_begin();
__asm__ __volatile__ (
asm volatile(
#undef BLOCK
#define BLOCK(i) \
LD(i, 0) \
@ -186,9 +186,9 @@ xor_pII_mmx_5(unsigned long bytes, unsigned long *p1, unsigned long *p2,
because we modify p4 and p5 there, but we can't mark them
as read/write, otherwise we'd overflow the 10-asm-operands
limit of GCC < 3.1. */
__asm__ ("" : "+r" (p4), "+r" (p5));
asm("" : "+r" (p4), "+r" (p5));
__asm__ __volatile__ (
asm volatile(
#undef BLOCK
#define BLOCK(i) \
LD(i, 0) \
@ -239,7 +239,7 @@ xor_pII_mmx_5(unsigned long bytes, unsigned long *p1, unsigned long *p2,
/* p4 and p5 were modified, and now the variables are dead.
Clobber them just to be sure nobody does something stupid
like assuming they have some legal value. */
__asm__ ("" : "=r" (p4), "=r" (p5));
asm("" : "=r" (p4), "=r" (p5));
kernel_fpu_end();
}
@ -259,7 +259,7 @@ xor_p5_mmx_2(unsigned long bytes, unsigned long *p1, unsigned long *p2)
kernel_fpu_begin();
__asm__ __volatile__ (
asm volatile(
" .align 32 ;\n"
" 1: ;\n"
" movq (%1), %%mm0 ;\n"
@ -307,7 +307,7 @@ xor_p5_mmx_3(unsigned long bytes, unsigned long *p1, unsigned long *p2,
kernel_fpu_begin();
__asm__ __volatile__ (
asm volatile(
" .align 32,0x90 ;\n"
" 1: ;\n"
" movq (%1), %%mm0 ;\n"
@ -364,7 +364,7 @@ xor_p5_mmx_4(unsigned long bytes, unsigned long *p1, unsigned long *p2,
kernel_fpu_begin();
__asm__ __volatile__ (
asm volatile(
" .align 32,0x90 ;\n"
" 1: ;\n"
" movq (%1), %%mm0 ;\n"
@ -436,9 +436,9 @@ xor_p5_mmx_5(unsigned long bytes, unsigned long *p1, unsigned long *p2,
because we modify p4 and p5 there, but we can't mark them
as read/write, otherwise we'd overflow the 10-asm-operands
limit of GCC < 3.1. */
__asm__ ("" : "+r" (p4), "+r" (p5));
asm("" : "+r" (p4), "+r" (p5));
__asm__ __volatile__ (
asm volatile(
" .align 32,0x90 ;\n"
" 1: ;\n"
" movq (%1), %%mm0 ;\n"
@ -505,7 +505,7 @@ xor_p5_mmx_5(unsigned long bytes, unsigned long *p1, unsigned long *p2,
/* p4 and p5 were modified, and now the variables are dead.
Clobber them just to be sure nobody does something stupid
like assuming they have some legal value. */
__asm__ ("" : "=r" (p4), "=r" (p5));
asm("" : "=r" (p4), "=r" (p5));
kernel_fpu_end();
}
@ -531,11 +531,12 @@ static struct xor_block_template xor_block_p5_mmx = {
* Copyright (C) 1999 Zach Brown (with obvious credit due Ingo)
*/
#define XMMS_SAVE do { \
#define XMMS_SAVE \
do { \
preempt_disable(); \
cr0 = read_cr0(); \
clts(); \
__asm__ __volatile__ ( \
asm volatile( \
"movups %%xmm0,(%0) ;\n\t" \
"movups %%xmm1,0x10(%0) ;\n\t" \
"movups %%xmm2,0x20(%0) ;\n\t" \
@ -545,8 +546,9 @@ static struct xor_block_template xor_block_p5_mmx = {
: "memory"); \
} while (0)
#define XMMS_RESTORE do { \
__asm__ __volatile__ ( \
#define XMMS_RESTORE \
do { \
asm volatile( \
"sfence ;\n\t" \
"movups (%0),%%xmm0 ;\n\t" \
"movups 0x10(%0),%%xmm1 ;\n\t" \
@ -587,7 +589,7 @@ xor_sse_2(unsigned long bytes, unsigned long *p1, unsigned long *p2)
XMMS_SAVE;
__asm__ __volatile__ (
asm volatile(
#undef BLOCK
#define BLOCK(i) \
LD(i, 0) \
@ -641,7 +643,7 @@ xor_sse_3(unsigned long bytes, unsigned long *p1, unsigned long *p2,
XMMS_SAVE;
__asm__ __volatile__ (
asm volatile(
#undef BLOCK
#define BLOCK(i) \
PF1(i) \
@ -702,7 +704,7 @@ xor_sse_4(unsigned long bytes, unsigned long *p1, unsigned long *p2,
XMMS_SAVE;
__asm__ __volatile__ (
asm volatile(
#undef BLOCK
#define BLOCK(i) \
PF1(i) \
@ -776,9 +778,9 @@ xor_sse_5(unsigned long bytes, unsigned long *p1, unsigned long *p2,
because we modify p4 and p5 there, but we can't mark them
as read/write, otherwise we'd overflow the 10-asm-operands
limit of GCC < 3.1. */
__asm__ ("" : "+r" (p4), "+r" (p5));
asm("" : "+r" (p4), "+r" (p5));
__asm__ __volatile__ (
asm volatile(
#undef BLOCK
#define BLOCK(i) \
PF1(i) \
@ -843,7 +845,7 @@ xor_sse_5(unsigned long bytes, unsigned long *p1, unsigned long *p2,
/* p4 and p5 were modified, and now the variables are dead.
Clobber them just to be sure nobody does something stupid
like assuming they have some legal value. */
__asm__ ("" : "=r" (p4), "=r" (p5));
asm("" : "=r" (p4), "=r" (p5));
XMMS_RESTORE;
}