summaryrefslogtreecommitdiffstats
path: root/arch/x86/boot/pmjump.S
diff options
context:
space:
mode:
authorH. Peter Anvin <hpa@zytor.com>2008-01-30 13:33:01 +0100
committerIngo Molnar <mingo@elte.hu>2008-01-30 13:33:01 +0100
commitc4d9ba6da9f050ebb7e0d70769e3dca0fd45334f (patch)
tree03c771875a9cf2cc2026066f8a686cf53f0739f4 /arch/x86/boot/pmjump.S
parentx86: <asm/segment.h>: boot GDT entries are 32/64-independent (diff)
downloadlinux-c4d9ba6da9f050ebb7e0d70769e3dca0fd45334f.tar.xz
linux-c4d9ba6da9f050ebb7e0d70769e3dca0fd45334f.zip
x86 setup: make PM transition more paranoid; cleanup 32-bit entry
Make the transition to protected mode more paranoid by having back-to-back near jump (to synchronize the 386/486 prefetch queue) and far jump (to set up the code segment.) While we're at it, zero as many registers as practical (for future expandability of the 32-bit entry interface) and enter 32-bit mode with a valid stack. Note that the 32-bit code cannot rely on this stack, or we'll break all other existing users of the 32-bit entrypoint, but it may make debugging hacks easier to write. Signed-off-by: H. Peter Anvin <hpa@zytor.com> Signed-off-by: Ingo Molnar <mingo@elte.hu> Signed-off-by: Thomas Gleixner <tglx@linutronix.de>
Diffstat (limited to 'arch/x86/boot/pmjump.S')
-rw-r--r--arch/x86/boot/pmjump.S44
1 files changed, 32 insertions, 12 deletions
diff --git a/arch/x86/boot/pmjump.S b/arch/x86/boot/pmjump.S
index fa6bed1fac14..ef0da1f2c7fd 100644
--- a/arch/x86/boot/pmjump.S
+++ b/arch/x86/boot/pmjump.S
@@ -29,12 +29,13 @@
*/
protected_mode_jump:
movl %edx, %esi # Pointer to boot_params table
- movl %eax, 2f # Patch ljmpl instruction
+
+ xorl %ebx, %ebx
+ movw %cs, %bx
+ shll $4, %ebx
+ addl %ebx, 2f
movw $__BOOT_DS, %cx
- xorl %ebx, %ebx # Per the 32-bit boot protocol
- xorl %ebp, %ebp # Per the 32-bit boot protocol
- xorl %edi, %edi # Per the 32-bit boot protocol
movl %cr0, %edx
orb $1, %dl # Protected mode (PE) bit
@@ -42,15 +43,34 @@ protected_mode_jump:
jmp 1f # Short jump to serialize on 386/486
1:
- movw %cx, %ds
- movw %cx, %es
- movw %cx, %fs
- movw %cx, %gs
- movw %cx, %ss
-
- # Jump to the 32-bit entrypoint
+ # Transition to 32-bit mode
.byte 0x66, 0xea # ljmpl opcode
-2: .long 0 # offset
+2: .long in_pm32 # offset
.word __BOOT_CS # segment
.size protected_mode_jump, .-protected_mode_jump
+
+ .code32
+ .type in_pm32, @function
+in_pm32:
+ # Set up data segments for flat 32-bit mode
+ movl %ecx, %ds
+ movl %ecx, %es
+ movl %ecx, %fs
+ movl %ecx, %gs
+ movl %ecx, %ss
+ # The 32-bit code sets up its own stack, but this way we do have
+ # a valid stack if some debugging hack wants to use it.
+ addl %ebx, %esp
+
+ # Clear registers to allow for future extensions to the
+ # 32-bit boot protocol
+ xorl %ecx, %ecx
+ xorl %edx, %edx
+ xorl %ebx, %ebx
+ xorl %ebp, %ebp
+ xorl %edi, %edi
+
+ jmpl *%eax # Jump to the 32-bit entrypoint
+
+ .size in_pm32, .-in_pm32