summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorThomas Weißschuh <linux@weissschuh.net>2025-03-16 20:46:07 +0100
committerThomas Weißschuh <linux@weissschuh.net>2025-04-14 07:51:25 +0200
commit60ccc16f530a480abfde399bf80779e5b021b3fe (patch)
treebff5516b4c2dae0c373d25fb8a56bf3266d86eda
parent8e1930296f921d3246f331c394c0516feb36993a (diff)
tools/nolibc: drop manual stack pointer alignment
The stack pointer is already aligned by the kernel to a multiple of 16. See STACK_ROUND() in fs/binfmt_elf.c. The manual realignment is unnecessary, drop it. Signed-off-by: Thomas Weißschuh <linux@weissschuh.net> Acked-by: Willy Tarreau <w@1wt.eu> Link: https://lore.kernel.org/r/20250316-nolibc-sp-align-v1-1-1e1fb073ca1e@weissschuh.net
-rw-r--r--tools/include/nolibc/arch-aarch64.h1
-rw-r--r--tools/include/nolibc/arch-arm.h2
-rw-r--r--tools/include/nolibc/arch-i386.h2
-rw-r--r--tools/include/nolibc/arch-loongarch.h7
-rw-r--r--tools/include/nolibc/arch-powerpc.h2
-rw-r--r--tools/include/nolibc/arch-riscv.h1
-rw-r--r--tools/include/nolibc/arch-x86_64.h1
7 files changed, 0 insertions, 16 deletions
diff --git a/tools/include/nolibc/arch-aarch64.h b/tools/include/nolibc/arch-aarch64.h
index 06fdef7b291a..937a348da42e 100644
--- a/tools/include/nolibc/arch-aarch64.h
+++ b/tools/include/nolibc/arch-aarch64.h
@@ -146,7 +146,6 @@ void __attribute__((weak, noreturn)) __nolibc_entrypoint __no_stack_protector _s
{
__asm__ volatile (
"mov x0, sp\n" /* save stack pointer to x0, as arg1 of _start_c */
- "and sp, x0, -16\n" /* sp must be 16-byte aligned in the callee */
"bl _start_c\n" /* transfer to c runtime */
);
__nolibc_entrypoint_epilogue();
diff --git a/tools/include/nolibc/arch-arm.h b/tools/include/nolibc/arch-arm.h
index 6180ff99ab43..1f66e7e5a444 100644
--- a/tools/include/nolibc/arch-arm.h
+++ b/tools/include/nolibc/arch-arm.h
@@ -189,8 +189,6 @@ void __attribute__((weak, noreturn)) __nolibc_entrypoint __no_stack_protector _s
{
__asm__ volatile (
"mov r0, sp\n" /* save stack pointer to %r0, as arg1 of _start_c */
- "and ip, r0, #-8\n" /* sp must be 8-byte aligned in the callee */
- "mov sp, ip\n"
"bl _start_c\n" /* transfer to c runtime */
);
__nolibc_entrypoint_epilogue();
diff --git a/tools/include/nolibc/arch-i386.h b/tools/include/nolibc/arch-i386.h
index ff5afc35bbd8..7c9b38e96418 100644
--- a/tools/include/nolibc/arch-i386.h
+++ b/tools/include/nolibc/arch-i386.h
@@ -167,8 +167,6 @@ void __attribute__((weak, noreturn)) __nolibc_entrypoint __no_stack_protector _s
__asm__ volatile (
"xor %ebp, %ebp\n" /* zero the stack frame */
"mov %esp, %eax\n" /* save stack pointer to %eax, as arg1 of _start_c */
- "add $12, %esp\n" /* avoid over-estimating after the 'and' & 'sub' below */
- "and $-16, %esp\n" /* the %esp must be 16-byte aligned on 'call' */
"sub $12, %esp\n" /* sub 12 to keep it aligned after the push %eax */
"push %eax\n" /* push arg1 on stack to support plain stack modes too */
"call _start_c\n" /* transfer to c runtime */
diff --git a/tools/include/nolibc/arch-loongarch.h b/tools/include/nolibc/arch-loongarch.h
index fb519545959e..5511705303ea 100644
--- a/tools/include/nolibc/arch-loongarch.h
+++ b/tools/include/nolibc/arch-loongarch.h
@@ -142,18 +142,11 @@
_arg1; \
})
-#if __loongarch_grlen == 32
-#define LONG_BSTRINS "bstrins.w"
-#else /* __loongarch_grlen == 64 */
-#define LONG_BSTRINS "bstrins.d"
-#endif
-
/* startup code */
void __attribute__((weak, noreturn)) __nolibc_entrypoint __no_stack_protector _start(void)
{
__asm__ volatile (
"move $a0, $sp\n" /* save stack pointer to $a0, as arg1 of _start_c */
- LONG_BSTRINS " $sp, $zero, 3, 0\n" /* $sp must be 16-byte aligned */
"bl _start_c\n" /* transfer to c runtime */
);
__nolibc_entrypoint_epilogue();
diff --git a/tools/include/nolibc/arch-powerpc.h b/tools/include/nolibc/arch-powerpc.h
index ee2fdb8d601d..204564bbcd32 100644
--- a/tools/include/nolibc/arch-powerpc.h
+++ b/tools/include/nolibc/arch-powerpc.h
@@ -201,7 +201,6 @@ void __attribute__((weak, noreturn)) __nolibc_entrypoint __no_stack_protector _s
__asm__ volatile (
"mr 3, 1\n" /* save stack pointer to r3, as arg1 of _start_c */
- "clrrdi 1, 1, 4\n" /* align the stack to 16 bytes */
"li 0, 0\n" /* zero the frame pointer */
"stdu 1, -32(1)\n" /* the initial stack frame */
"bl _start_c\n" /* transfer to c runtime */
@@ -209,7 +208,6 @@ void __attribute__((weak, noreturn)) __nolibc_entrypoint __no_stack_protector _s
#else
__asm__ volatile (
"mr 3, 1\n" /* save stack pointer to r3, as arg1 of _start_c */
- "clrrwi 1, 1, 4\n" /* align the stack to 16 bytes */
"li 0, 0\n" /* zero the frame pointer */
"stwu 1, -16(1)\n" /* the initial stack frame */
"bl _start_c\n" /* transfer to c runtime */
diff --git a/tools/include/nolibc/arch-riscv.h b/tools/include/nolibc/arch-riscv.h
index 8827bf936212..885383a86c38 100644
--- a/tools/include/nolibc/arch-riscv.h
+++ b/tools/include/nolibc/arch-riscv.h
@@ -148,7 +148,6 @@ void __attribute__((weak, noreturn)) __nolibc_entrypoint __no_stack_protector _s
"lla gp, __global_pointer$\n"
".option pop\n"
"mv a0, sp\n" /* save stack pointer to a0, as arg1 of _start_c */
- "andi sp, a0, -16\n" /* sp must be 16-byte aligned */
"call _start_c\n" /* transfer to c runtime */
);
__nolibc_entrypoint_epilogue();
diff --git a/tools/include/nolibc/arch-x86_64.h b/tools/include/nolibc/arch-x86_64.h
index 1e40620a2b33..67305e24dbef 100644
--- a/tools/include/nolibc/arch-x86_64.h
+++ b/tools/include/nolibc/arch-x86_64.h
@@ -166,7 +166,6 @@ void __attribute__((weak, noreturn)) __nolibc_entrypoint __no_stack_protector _s
__asm__ volatile (
"xor %ebp, %ebp\n" /* zero the stack frame */
"mov %rsp, %rdi\n" /* save stack pointer to %rdi, as arg1 of _start_c */
- "and $-16, %rsp\n" /* %rsp must be 16-byte aligned before call */
"call _start_c\n" /* transfer to c runtime */
"hlt\n" /* ensure it does not return */
);