[PATCH] x86_64: Small assembly improvements

Save a byte here and there.  Ultimatively useless, but these things always
catch my eyes when reading the code so just fix them for now.

Also I got at least one patch fixing of them already, which gives a good
excuse.

Signed-off-by: Andi Kleen <ak@suse.de>
Signed-off-by: Andrew Morton <akpm@osdl.org>
Signed-off-by: Linus Torvalds <torvalds@osdl.org>
diff --git a/arch/x86_64/kernel/entry.S b/arch/x86_64/kernel/entry.S
index 2881749..0696e02 100644
--- a/arch/x86_64/kernel/entry.S
+++ b/arch/x86_64/kernel/entry.S
@@ -76,7 +76,7 @@
 
 	.macro FAKE_STACK_FRAME child_rip
 	/* push in order ss, rsp, eflags, cs, rip */
-	xorq %rax, %rax
+	xorl %eax, %eax
 	pushq %rax /* ss */
 	CFI_ADJUST_CFA_OFFSET	8
 	pushq %rax /* rsp */
@@ -423,7 +423,7 @@
 	testl $3,CS(%rdi)
 	je 1f
 	swapgs	
-1:	addl $1,%gs:pda_irqcount	# RED-PEN should check preempt count
+1:	incl	%gs:pda_irqcount	# RED-PEN should check preempt count
 	movq %gs:pda_irqstackptr,%rax
 	cmoveq %rax,%rsp							
 	pushq %rdi			# save old stack	
@@ -436,7 +436,7 @@
 ret_from_intr:		
 	popq  %rdi
 	cli	
-	subl $1,%gs:pda_irqcount
+	decl %gs:pda_irqcount
 #ifdef CONFIG_DEBUG_INFO
 	movq RBP(%rdi),%rbp
 #endif
@@ -494,7 +494,7 @@
 	sti
 	SAVE_REST
 	movq $-1,ORIG_RAX(%rsp) 			
-	xorq %rsi,%rsi		# oldset
+	xorl %esi,%esi		# oldset
 	movq %rsp,%rdi		# &pt_regs
 	call do_notify_resume
 	RESTORE_REST
@@ -752,7 +752,7 @@
 	movq %rsi, %rdi
 	call *%rax
 	# exit
-	xorq %rdi, %rdi
+	xorl %edi, %edi
 	call do_exit
 
 /*
diff --git a/arch/x86_64/kernel/head.S b/arch/x86_64/kernel/head.S
index 8d765aa..98ff5eb 100644
--- a/arch/x86_64/kernel/head.S
+++ b/arch/x86_64/kernel/head.S
@@ -137,14 +137,14 @@
 	wrmsr
 
 	/* Setup cr0 */
-	xorq	%rax, %rax
-	btsq	$31, %rax			/* Enable paging */
-	btsq	$0, %rax			/* Enable protected mode */
-	btsq	$1, %rax			/* Enable MP */
-	btsq	$4, %rax			/* Enable ET */
-	btsq	$5, %rax			/* Enable NE */
-	btsq	$16, %rax			/* Enable WP */
-	btsq	$18, %rax			/* Enable AM */
+#define CR0_PM				1		/* protected mode */
+#define CR0_MP				(1<<1)
+#define CR0_ET				(1<<4)
+#define CR0_NE				(1<<5)
+#define CR0_WP				(1<<16)
+#define CR0_AM				(1<<18)
+#define CR0_PAGING 			(1<<31)
+	movl $CR0_PM|CR0_MP|CR0_ET|CR0_NE|CR0_WP|CR0_AM|CR0_PAGING,%eax
 	/* Make changes effective */
 	movq	%rax, %cr0