diff options
author | Richard Braun <rbraun@sceen.net> | 2014-05-28 21:10:55 +0200 |
---|---|---|
committer | Richard Braun <rbraun@sceen.net> | 2014-05-28 21:10:55 +0200 |
commit | 2cc712601311cd6d9524c1af368a0d2576b3896f (patch) | |
tree | 4aa10eb7b60ba7c52489f007c2316fabe3441f03 /arch/x86/machine/boot_asm.S | |
parent | 8e0fd5a7964d7869fd37360fd1d61f09c7abc728 (diff) |
x86/{boot,cpu}: dynamically allocate AP boot stacks
The main goal of this change is to reduce the size of the kernel
executable image since boot stacks, despite being uninitialized,
aren't declared as common symbols.
Diffstat (limited to 'arch/x86/machine/boot_asm.S')
-rw-r--r-- | arch/x86/machine/boot_asm.S | 21 |
1 files changed, 9 insertions, 12 deletions
diff --git a/arch/x86/machine/boot_asm.S b/arch/x86/machine/boot_asm.S index da2b8f1..c52cc7f 100644 --- a/arch/x86/machine/boot_asm.S +++ b/arch/x86/machine/boot_asm.S @@ -58,7 +58,7 @@ ASM_ENTRY(_start) ljmp $8, $1f 1: - movl $(boot_stacks - KERNEL_OFFSET + BOOT_STACK_SIZE), %esp + movl $(boot_stack + STACK_SIZE), %esp #ifdef __LP64__ call boot_check_long_mode @@ -85,7 +85,7 @@ ASM_ENTRY(_start) #ifdef __LP64__ movq %rax, %cr3 - movq $(boot_stacks + BOOT_STACK_SIZE), %rsp + movq $(boot_stack + STACK_SIZE), %rsp #else /* __LP64__ */ movl %eax, %cr3 movl %cr0, %eax @@ -94,7 +94,7 @@ ASM_ENTRY(_start) ljmp $8, $1f 1: - movl $(boot_stacks + BOOT_STACK_SIZE), %esp + movl $(boot_stack + STACK_SIZE), %esp #endif /* __LP64__ */ xorl %ebp, %ebp @@ -233,10 +233,7 @@ ASM_ENTRY(boot_ap_start32) ljmp $8, $1f 1: - movl boot_ap_id, %esp - incl %esp - shll $BOOT_STACK_SHIFT, %esp - addl $(boot_stacks - KERNEL_OFFSET), %esp + movl $(boot_ap_stack + STACK_SIZE), %esp #ifdef __LP64__ call boot_setup_long_mode @@ -262,14 +259,14 @@ ASM_ENTRY(boot_ap_start32) 1: #endif /* __LP64__ */ - movl boot_ap_id, %esp - incl %esp - shll $BOOT_STACK_SHIFT, %esp + call cpu_get_boot_stack #ifdef __LP64__ - addq $boot_stacks, %rsp + movq %rax, %rsp + addq $STACK_SIZE, %rsp #else /* __LP64__ */ - addl $boot_stacks, %esp + movl %eax, %esp + addl $STACK_SIZE, %esp #endif /* __LP64__ */ xorl %ebp, %ebp |