x86: Clean up the loadsegment() macro

Make it readable in the source too, not just in the assembly output.
No change in functionality.

Cc: Brian Gerst <brgerst@gmail.com>
LKML-Reference: <1259176706-5908-1-git-send-email-brgerst@gmail.com>
Signed-off-by: Ingo Molnar <mingo@elte.hu>
This commit is contained in:
Ingo Molnar 2009-11-26 10:37:55 +01:00
parent 79b0379cee
commit 64b028b226
1 changed files with 15 additions and 13 deletions

View File

@ -155,19 +155,21 @@ extern void native_load_gs_index(unsigned);
* Load a segment. Fall back on loading the zero
* segment if something goes wrong..
*/
#define loadsegment(seg, value) \
do { \
unsigned short __val = value; \
asm volatile("\n" \
"1:\t" \
"movl %k0,%%" #seg "\n" \
".section .fixup,\"ax\"\n" \
"2:\t" \
"xorl %k0,%k0\n\t" \
"jmp 1b\n" \
".previous\n" \
_ASM_EXTABLE(1b, 2b) \
: "+r" (__val) : : "memory"); \
#define loadsegment(seg, value) \
do { \
unsigned short __val = (value); \
\
asm volatile(" \n" \
"1: movl %k0,%%" #seg " \n" \
\
".section .fixup,\"ax\" \n" \
"2: xorl %k0,%k0 \n" \
" jmp 1b \n" \
".previous \n" \
\
_ASM_EXTABLE(1b, 2b) \
\
: "+r" (__val) : : "memory"); \
} while (0)
/*