summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorStefan Liebler <stli@linux.vnet.ibm.com>2015-01-16 09:17:32 +0100
committerAndreas Krebbel <krebbel@linux.vnet.ibm.com>2015-01-16 09:17:32 +0100
commit1d53248326dbd3c620a2bca16d35eff3019d900a (patch)
tree0a1fc9df0fd77039c5086acbcbe98183cfb64521
parent53fbd16918791258ebc1684b1a1afd543b814780 (diff)
S390: Get rid of linknamespace failures for string functions.
-rw-r--r--ChangeLog62
-rw-r--r--NEWS2
-rw-r--r--sysdeps/s390/s390-32/memcmp.S4
-rw-r--r--sysdeps/s390/s390-32/memcpy.S10
-rw-r--r--sysdeps/s390/s390-32/memset.S4
-rw-r--r--sysdeps/s390/s390-32/multiarch/ifunc-resolve.c16
-rw-r--r--sysdeps/s390/s390-32/multiarch/memcmp.S12
-rw-r--r--sysdeps/s390/s390-32/multiarch/memcpy.S14
-rw-r--r--sysdeps/s390/s390-32/multiarch/memset.S18
-rw-r--r--sysdeps/s390/s390-64/memcmp.S4
-rw-r--r--sysdeps/s390/s390-64/memcpy.S10
-rw-r--r--sysdeps/s390/s390-64/memset.S4
-rw-r--r--sysdeps/s390/s390-64/multiarch/ifunc-resolve.c18
-rw-r--r--sysdeps/s390/s390-64/multiarch/memcmp.S12
-rw-r--r--sysdeps/s390/s390-64/multiarch/memcpy.S14
-rw-r--r--sysdeps/s390/s390-64/multiarch/memset.S18
16 files changed, 142 insertions, 80 deletions
diff --git a/ChangeLog b/ChangeLog
index a410620b38..15e004f7ac 100644
--- a/ChangeLog
+++ b/ChangeLog
@@ -1,3 +1,65 @@
+2015-01-16 Stefan Liebler <stli@linux.vnet.ibm.com>
+
+ [BZ #17848]
+ * sysdeps/s390/s390-32/memcmp.S
+ (memcmp_g5): Rename to __memcmp_g5.
+ * sysdeps/s390/s390-32/memcpy.S
+ (memcpy_g5): Rename to __memcpy_g5.
+ Jump to __memcpy_mvcle instead of memcpy_mvcle.
+ (memcpy_mvcle) Rename to __memcpy_mvcle.
+ * sysdeps/s390/s390-32/memset.S
+ (memset_g5): Rename to __memset_g5.
+ * sysdeps/s390/s390-32/multiarch/ifunc-resolve.c
+ (IFUNC_RESOLVE): Prefix ifunc-resolve function
+ and use prefixed functions.
+ * sysdeps/s390/s390-32/multiarch/memcmp.S
+ (memcmp_z196): Rename to __memcmp_z196.
+ (memcmp_z10): Rename to __memcmp_z10.
+ (memcmp): Set alias to __memcmp_g5.
+ (bcmp): Set alias to __memcmp_g5.
+ * sysdeps/s390/s390-32/multiarch/memcpy.S
+ (memcpy_z196): Rename to __memcpy_z196.
+ Jump to __memcpy_mvcle instead of memcpy_mvcle.
+ (memcpy_z10): Rename to __memcpy_z10.
+ Jump to __memcpy_mvcle instead of memcpy_mvcle.
+ (memcpy): Set alias to __memcpy_g5.
+ * sysdeps/s390/s390-32/multiarch/memset.S
+ (memset_z196): Rename to __memset_z196.
+ Jump to __memset_mvcle instead of memset_mvcle.
+ (memset_z10): Rename to __memset_z10.
+ Jump to __memset_mvcle instead of memset_mvcle.
+ (memset_mvcle) Rename to __memset_mvcle.
+ (memset): Set alias to __memset_g5.
+ * sysdeps/s390/s390-64/memcmp.S
+ (memcmp_z900): Rename to __memcmp_z900.
+ * sysdeps/s390/s390-64/memcpy.S
+ (memcpy_z900): Rename to __memcpy_z900.
+ Jump to __memcpy_mvcle instead of memcpy_mvcle.
+ (memcpy_mvcle) Rename to __memcpy_mvcle.
+ * sysdeps/s390/s390-64/memset.S
+ (memset_z900): Rename to __memset_z900.
+ * sysdeps/s390/s390-64/multiarch/ifunc-resolve.c
+ (IFUNC_RESOLVE): Prefix ifunc-resolve function
+ and use prefixed functions.
+ * sysdeps/s390/s390-64/multiarch/memcmp.S
+ (memcmp_z196): Rename to __memcmp_z196.
+ (memcmp_z10): Rename to __memcmp_z10.
+ (memcmp): Set alias to __memcmp_z900.
+ (bcmp): Set alias to __memcmp_z900.
+ * sysdeps/s390/s390-64/multiarch/memcpy.S
+ (memcpy_z196): Rename to __memcpy_z196.
+ Jump to __memcpy_mvcle instead of memcpy_mvcle.
+ (memcpy_z10): Rename to __memcpy_z10.
+ Jump to __memcpy_mvcle instead of memcpy_mvcle.
+ (memcpy): Set alias to __memcpy_z900.
+ * sysdeps/s390/s390-64/multiarch/memset.S
+ (memset_z196): Rename to __memset_z196.
+ Jump to __memset_mvcle instead of memset_mvcle.
+ (memset_z10): Rename to __memset_z10.
+ Jump to __memset_mvcle instead of memset_mvcle.
+ (memset_mvcle) Rename to __memset_mvcle.
+ (memset): Set alias to __memset_z900.
+
2015-01-14 Joseph Myers <joseph@codesourcery.com>
[BZ #17748]
diff --git a/NEWS b/NEWS
index 3bdc96a402..d908425e6c 100644
--- a/NEWS
+++ b/NEWS
@@ -17,7 +17,7 @@ Version 2.21
17630, 17633, 17634, 17635, 17647, 17653, 17657, 17658, 17664, 17665,
17668, 17682, 17717, 17719, 17722, 17723, 17724, 17725, 17732, 17733,
17744, 17745, 17746, 17747, 17748, 17775, 17777, 17780, 17781, 17782,
- 17791, 17793, 17796, 17797, 17803, 17806, 17834
+ 17791, 17793, 17796, 17797, 17803, 17806, 17834, 17848
* Optimized strcpy, stpcpy, strncpy, stpncpy, strcmp, and strncmp
implementations for powerpc64/powerpc64le.
diff --git a/sysdeps/s390/s390-32/memcmp.S b/sysdeps/s390/s390-32/memcmp.S
index 191ff5b209..93f06d57a3 100644
--- a/sysdeps/s390/s390-32/memcmp.S
+++ b/sysdeps/s390/s390-32/memcmp.S
@@ -27,7 +27,7 @@
.text
#ifdef USE_MULTIARCH
-ENTRY(memcmp_g5)
+ENTRY(__memcmp_g5)
#else
ENTRY(memcmp)
#endif
@@ -58,7 +58,7 @@ ENTRY(memcmp)
.L_G5_17:
clc 0(1,%r3),0(%r2)
#ifdef USE_MULTIARCH
-END(memcmp_g5)
+END(__memcmp_g5)
#else
END(memcmp)
libc_hidden_builtin_def (memcmp)
diff --git a/sysdeps/s390/s390-32/memcpy.S b/sysdeps/s390/s390-32/memcpy.S
index d088625a28..f26fd00ff8 100644
--- a/sysdeps/s390/s390-32/memcpy.S
+++ b/sysdeps/s390/s390-32/memcpy.S
@@ -26,7 +26,7 @@
%r4 = number of bytes to copy. */
#ifdef USE_MULTIARCH
-ENTRY(memcpy_g5)
+ENTRY(__memcpy_g5)
#else
ENTRY(memcpy)
#endif
@@ -49,7 +49,7 @@ ENTRY(memcpy)
br %r14
.L_G5_13:
chi %r5,4096 # Switch to mvcle for copies >1MB
- jh memcpy_mvcle
+ jh __memcpy_mvcle
.L_G5_12:
mvc 0(256,%r1),0(%r3)
la %r1,256(%r1)
@@ -60,13 +60,13 @@ ENTRY(memcpy)
.L_G5_17:
mvc 0(1,%r1),0(%r3)
#ifdef USE_MULTIARCH
-END(memcpy_g5)
+END(__memcpy_g5)
#else
END(memcpy)
libc_hidden_builtin_def (memcpy)
#endif
-ENTRY(memcpy_mvcle)
+ENTRY(__memcpy_mvcle)
# Using as standalone function will result in unexpected
# results since the length field is incremented by 1 in order to
# compensate the changes already done in the functions above.
@@ -79,4 +79,4 @@ ENTRY(memcpy_mvcle)
jo .L_MVCLE_1
lr %r2,%r1 # return destination address
br %r14
-END(memcpy_mvcle)
+END(__memcpy_mvcle)
diff --git a/sysdeps/s390/s390-32/memset.S b/sysdeps/s390/s390-32/memset.S
index dc7ce0a649..0abba61c22 100644
--- a/sysdeps/s390/s390-32/memset.S
+++ b/sysdeps/s390/s390-32/memset.S
@@ -28,7 +28,7 @@
.text
#ifdef USE_MULTIARCH
-ENTRY(memset_g5)
+ENTRY(__memset_g5)
#else
ENTRY(memset)
#endif
@@ -58,7 +58,7 @@ ENTRY(memset)
.L_G5_20:
mvc 1(1,%r1),0(%r1)
#ifdef USE_MULTIARCH
-END(memset_g5)
+END(__memset_g5)
#else
END(memset)
libc_hidden_builtin_def (memset)
diff --git a/sysdeps/s390/s390-32/multiarch/ifunc-resolve.c b/sysdeps/s390/s390-32/multiarch/ifunc-resolve.c
index c9abd1b6f1..8e0cdd5df1 100644
--- a/sysdeps/s390/s390-32/multiarch/ifunc-resolve.c
+++ b/sysdeps/s390/s390-32/multiarch/ifunc-resolve.c
@@ -28,17 +28,17 @@
#define IFUNC_RESOLVE(FUNC) \
asm (".globl " #FUNC "\n\t" \
".type " #FUNC ",@gnu_indirect_function\n\t" \
- ".set " #FUNC ",resolve_" #FUNC "\n\t" \
+ ".set " #FUNC ",__resolve_" #FUNC "\n\t" \
".globl __GI_" #FUNC "\n\t" \
".set __GI_" #FUNC "," #FUNC "\n"); \
\
/* Make the declarations of the optimized functions hidden in order
to prevent GOT slots being generated for them. */ \
- extern void *FUNC##_z196 attribute_hidden; \
- extern void *FUNC##_z10 attribute_hidden; \
- extern void *FUNC##_g5 attribute_hidden; \
+ extern void *__##FUNC##_z196 attribute_hidden; \
+ extern void *__##FUNC##_z10 attribute_hidden; \
+ extern void *__##FUNC##_g5 attribute_hidden; \
\
- void *resolve_##FUNC (unsigned long int dl_hwcap) \
+ void *__resolve_##FUNC (unsigned long int dl_hwcap) \
{ \
if ((dl_hwcap & HWCAP_S390_STFLE) \
&& (dl_hwcap & HWCAP_S390_ZARCH) \
@@ -53,11 +53,11 @@
: : "cc"); \
\
if ((stfle_bits & (1ULL << (63 - STFLE_BITS_Z196))) != 0) \
- return &FUNC##_z196; \
+ return &__##FUNC##_z196; \
else if ((stfle_bits & (1ULL << (63 - STFLE_BITS_Z10))) != 0) \
- return &FUNC##_z10; \
+ return &__##FUNC##_z10; \
} \
- return &FUNC##_g5; \
+ return &__##FUNC##_g5; \
}
IFUNC_RESOLVE(memset)
diff --git a/sysdeps/s390/s390-32/multiarch/memcmp.S b/sysdeps/s390/s390-32/multiarch/memcmp.S
index 0d97c5856d..584dc99792 100644
--- a/sysdeps/s390/s390-32/multiarch/memcmp.S
+++ b/sysdeps/s390/s390-32/multiarch/memcmp.S
@@ -29,7 +29,7 @@
#if IS_IN (libc)
-ENTRY(memcmp_z196)
+ENTRY(__memcmp_z196)
.machine "z196"
.machinemode "zarch_nohighgprs"
ltr %r4,%r4
@@ -61,9 +61,9 @@ ENTRY(memcmp_z196)
br %r14
.L_Z196_14:
clc 0(1,%r3),0(%r2)
-END(memcmp_z196)
+END(__memcmp_z196)
-ENTRY(memcmp_z10)
+ENTRY(__memcmp_z10)
.machine "z10"
.machinemode "zarch_nohighgprs"
ltr %r4,%r4
@@ -90,7 +90,7 @@ ENTRY(memcmp_z10)
j .L_Z10_3
.L_Z10_15:
clc 0(1,%r3),0(%r2)
-END(memcmp_z10)
+END(__memcmp_z10)
#endif
@@ -98,7 +98,7 @@ END(memcmp_z10)
#if !IS_IN (libc)
.globl memcmp
-.set memcmp,memcmp_g5
+.set memcmp,__memcmp_g5
.weak bcmp
-.set bcmp,memcmp_g5
+.set bcmp,__memcmp_g5
#endif
diff --git a/sysdeps/s390/s390-32/multiarch/memcpy.S b/sysdeps/s390/s390-32/multiarch/memcpy.S
index 725d8688b1..51f4fcff15 100644
--- a/sysdeps/s390/s390-32/multiarch/memcpy.S
+++ b/sysdeps/s390/s390-32/multiarch/memcpy.S
@@ -29,7 +29,7 @@
#if defined SHARED && IS_IN (libc)
-ENTRY(memcpy_z196)
+ENTRY(__memcpy_z196)
.machine "z196"
.machinemode "zarch_nohighgprs"
llgfr %r4,%r4
@@ -46,7 +46,7 @@ ENTRY(memcpy_z196)
br %r14
.L_Z196_5:
cgfi %r5,262144 # Switch to mvcle for copies >64MB
- jh memcpy_mvcle
+ jh __memcpy_mvcle
.L_Z196_2:
pfd 1,768(%r3)
pfd 2,768(%r1)
@@ -58,9 +58,9 @@ ENTRY(memcpy_z196)
j .L_Z196_3
.L_Z196_14:
mvc 0(1,%r1),0(%r3)
-END(memcpy_z196)
+END(__memcpy_z196)
-ENTRY(memcpy_z10)
+ENTRY(__memcpy_z10)
.machine "z10"
.machinemode "zarch_nohighgprs"
llgfr %r4,%r4
@@ -75,7 +75,7 @@ ENTRY(memcpy_z10)
br %r14
.L_Z10_13:
cgfi %r5,65535 # Switch to mvcle for copies >16MB
- jh memcpy_mvcle
+ jh __memcpy_mvcle
.L_Z10_12:
pfd 1,768(%r3)
pfd 2,768(%r1)
@@ -86,7 +86,7 @@ ENTRY(memcpy_z10)
j .L_Z10_3
.L_Z10_15:
mvc 0(1,%r1),0(%r3)
-END(memcpy_z10)
+END(__memcpy_z10)
#endif
@@ -94,5 +94,5 @@ END(memcpy_z10)
#if !defined SHARED || !IS_IN (libc)
.globl memcpy
-.set memcpy,memcpy_g5
+.set memcpy,__memcpy_g5
#endif
diff --git a/sysdeps/s390/s390-32/multiarch/memset.S b/sysdeps/s390/s390-32/multiarch/memset.S
index ab97af04e9..1a7b45f369 100644
--- a/sysdeps/s390/s390-32/multiarch/memset.S
+++ b/sysdeps/s390/s390-32/multiarch/memset.S
@@ -29,7 +29,7 @@
#if IS_IN (libc)
-ENTRY(memset_z196)
+ENTRY(__memset_z196)
.machine "z196"
.machinemode "zarch_nohighgprs"
llgfr %r4,%r4
@@ -49,7 +49,7 @@ ENTRY(memset_z196)
br %r14
.L_Z196_1:
cgfi %r5,1048576
- jh memset_mvcle # Switch to mvcle for >256MB
+ jh __memset_mvcle # Switch to mvcle for >256MB
.L_Z196_2:
pfd 2,1024(%r1)
mvc 1(256,%r1),0(%r1)
@@ -59,9 +59,9 @@ ENTRY(memset_z196)
j .L_Z196_3
.L_Z196_17:
mvc 1(1,%r1),0(%r1)
-END(memset_z196)
+END(__memset_z196)
-ENTRY(memset_z10)
+ENTRY(__memset_z10)
.machine "z10"
.machinemode "zarch_nohighgprs"
llgfr %r4,%r4
@@ -78,7 +78,7 @@ ENTRY(memset_z10)
br %r14
.L_Z10_15:
cgfi %r5,163840 # Switch to mvcle for >40MB
- jh memset_mvcle
+ jh __memset_mvcle
.L_Z10_14:
pfd 2,1024(%r1)
mvc 1(256,%r1),0(%r1)
@@ -87,9 +87,9 @@ ENTRY(memset_z10)
j .L_Z10_3
.L_Z10_18:
mvc 1(1,%r1),0(%r1)
-END(memset_z10)
+END(__memset_z10)
-ENTRY(memset_mvcle)
+ENTRY(__memset_mvcle)
ahi %r4,2 # take back the change done by the caller
lr %r0,%r2 # save source address
lr %r1,%r3 # move pad byte to R1
@@ -101,7 +101,7 @@ ENTRY(memset_mvcle)
lr %r2,%r0 # return value is source address
.L1:
br %r14
-END(memset_mvcle)
+END(__memset_mvcle)
#endif
@@ -109,5 +109,5 @@ END(memset_mvcle)
#if !IS_IN (libc)
.globl memset
-.set memset,memset_g5
+.set memset,__memset_g5
#endif
diff --git a/sysdeps/s390/s390-64/memcmp.S b/sysdeps/s390/s390-64/memcmp.S
index 327d7fab38..6767438f28 100644
--- a/sysdeps/s390/s390-64/memcmp.S
+++ b/sysdeps/s390/s390-64/memcmp.S
@@ -27,7 +27,7 @@
.text
#ifdef USE_MULTIARCH
-ENTRY(memcmp_z900)
+ENTRY(__memcmp_z900)
#else
ENTRY(memcmp)
#endif
@@ -56,7 +56,7 @@ ENTRY(memcmp)
.L_Z900_15:
clc 0(1,%r3),0(%r2)
#ifdef USE_MULTIARCH
-END(memcmp_z900)
+END(__memcmp_z900)
#else
END(memcmp)
libc_hidden_builtin_def (memcmp)
diff --git a/sysdeps/s390/s390-64/memcpy.S b/sysdeps/s390/s390-64/memcpy.S
index da387afe6c..3f122dcf0f 100644
--- a/sysdeps/s390/s390-64/memcpy.S
+++ b/sysdeps/s390/s390-64/memcpy.S
@@ -29,7 +29,7 @@
.text
#ifdef USE_MULTIARCH
-ENTRY(memcpy_z900)
+ENTRY(__memcpy_z900)
#else
ENTRY(memcpy)
#endif
@@ -48,7 +48,7 @@ ENTRY(memcpy)
br %r14
.L_Z900_13:
chi %r5,4096 # Switch to mvcle for copies >1MB
- jh memcpy_mvcle
+ jh __memcpy_mvcle
.L_Z900_12:
mvc 0(256,%r1),0(%r3)
la %r1,256(%r1)
@@ -59,13 +59,13 @@ ENTRY(memcpy)
mvc 0(1,%r1),0(%r3)
#ifdef USE_MULTIARCH
-END(memcpy_z900)
+END(__memcpy_z900)
#else
END(memcpy)
libc_hidden_builtin_def (memcpy)
#endif
-ENTRY(memcpy_mvcle)
+ENTRY(__memcpy_mvcle)
# Using as standalone function will result in unexpected
# results since the length field is incremented by 1 in order to
# compensate the changes already done in the functions above.
@@ -78,4 +78,4 @@ ENTRY(memcpy_mvcle)
jo .L_MVCLE_1
lgr %r2,%r1 # return destination address
br %r14
-END(memcpy_mvcle)
+END(__memcpy_mvcle)
diff --git a/sysdeps/s390/s390-64/memset.S b/sysdeps/s390/s390-64/memset.S
index 78e74a05a4..1e307d7ec8 100644
--- a/sysdeps/s390/s390-64/memset.S
+++ b/sysdeps/s390/s390-64/memset.S
@@ -29,7 +29,7 @@
.text
#ifdef USE_MULTIARCH
-ENTRY(memset_z900)
+ENTRY(__memset_z900)
#else
ENTRY(memset)
#endif
@@ -57,7 +57,7 @@ ENTRY(memset)
.L_Z900_18:
mvc 1(1,%r1),0(%r1)
#ifdef USE_MULTIARCH
-END(memset_z900)
+END(__memset_z900)
#else
END(memset)
libc_hidden_builtin_def (memset)
diff --git a/sysdeps/s390/s390-64/multiarch/ifunc-resolve.c b/sysdeps/s390/s390-64/multiarch/ifunc-resolve.c
index fce8ef6401..b303304f31 100644
--- a/sysdeps/s390/s390-64/multiarch/ifunc-resolve.c
+++ b/sysdeps/s390/s390-64/multiarch/ifunc-resolve.c
@@ -28,17 +28,17 @@
#define IFUNC_RESOLVE(FUNC) \
asm (".globl " #FUNC "\n\t" \
".type " #FUNC ",@gnu_indirect_function\n\t" \
- ".set " #FUNC ",resolve_" #FUNC "\n\t" \
+ ".set " #FUNC ",__resolve_" #FUNC "\n\t" \
".globl __GI_" #FUNC "\n\t" \
".set __GI_" #FUNC "," #FUNC "\n"); \
\
/* Make the declarations of the optimized functions hidden in order
to prevent GOT slots being generated for them. */ \
- extern void *FUNC##_z196 attribute_hidden; \
- extern void *FUNC##_z10 attribute_hidden; \
- extern void *FUNC##_z900 attribute_hidden; \
+ extern void *__##FUNC##_z196 attribute_hidden; \
+ extern void *__##FUNC##_z10 attribute_hidden; \
+ extern void *__##FUNC##_z900 attribute_hidden; \
\
- void *resolve_##FUNC (unsigned long int dl_hwcap) \
+ void *__resolve_##FUNC (unsigned long int dl_hwcap) \
{ \
if (dl_hwcap & HWCAP_S390_STFLE) \
{ \
@@ -54,14 +54,14 @@
: : "cc"); \
\
if ((stfle_bits & (1UL << (63 - STFLE_BITS_Z196))) != 0) \
- return &FUNC##_z196; \
+ return &__##FUNC##_z196; \
else if ((stfle_bits & (1UL << (63 - STFLE_BITS_Z10))) != 0) \
- return &FUNC##_z10; \
+ return &__##FUNC##_z10; \
else \
- return &FUNC##_z900; \
+ return &__##FUNC##_z900; \
} \
else \
- return &FUNC##_z900; \
+ return &__##FUNC##_z900; \
}
IFUNC_RESOLVE(memset)
diff --git a/sysdeps/s390/s390-64/multiarch/memcmp.S b/sysdeps/s390/s390-64/multiarch/memcmp.S
index eed49c71f5..049847d9cf 100644
--- a/sysdeps/s390/s390-64/multiarch/memcmp.S
+++ b/sysdeps/s390/s390-64/multiarch/memcmp.S
@@ -29,7 +29,7 @@
#if IS_IN (libc)
-ENTRY(memcmp_z196)
+ENTRY(__memcmp_z196)
.machine "z196"
ltgr %r4,%r4
je .L_Z196_4
@@ -60,9 +60,9 @@ ENTRY(memcmp_z196)
br %r14
.L_Z196_14:
clc 0(1,%r3),0(%r2)
-END(memcmp_z196)
+END(__memcmp_z196)
-ENTRY(memcmp_z10)
+ENTRY(__memcmp_z10)
.machine "z10"
ltgr %r4,%r4
je .L_Z10_4
@@ -87,7 +87,7 @@ ENTRY(memcmp_z10)
j .L_Z10_3
.L_Z10_15:
clc 0(1,%r3),0(%r2)
-END(memcmp_z10)
+END(__memcmp_z10)
#endif
@@ -95,7 +95,7 @@ END(memcmp_z10)
#if !IS_IN (libc)
.globl memcmp
-.set memcmp,memcmp_z900
+.set memcmp,__memcmp_z900
.weak bcmp
-.set bcmp,memcmp_z900
+.set bcmp,__memcmp_z900
#endif
diff --git a/sysdeps/s390/s390-64/multiarch/memcpy.S b/sysdeps/s390/s390-64/multiarch/memcpy.S
index 575ff4307e..fc670c7ac4 100644
--- a/sysdeps/s390/s390-64/multiarch/memcpy.S
+++ b/sysdeps/s390/s390-64/multiarch/memcpy.S
@@ -29,7 +29,7 @@
#if defined SHARED && IS_IN (libc)
-ENTRY(memcpy_z196)
+ENTRY(__memcpy_z196)
.machine "z196"
ltgr %r4,%r4
je .L_Z196_4
@@ -44,7 +44,7 @@ ENTRY(memcpy_z196)
br %r14
.L_Z196_5:
cgfi %r5,262144 # Switch to mvcle for copies >64MB
- jh memcpy_mvcle
+ jh __memcpy_mvcle
.L_Z196_2:
pfd 1,768(%r3)
pfd 2,768(%r1)
@@ -56,9 +56,9 @@ ENTRY(memcpy_z196)
j .L_Z196_3
.L_Z196_14:
mvc 0(1,%r1),0(%r3)
-END(memcpy_z196)
+END(__memcpy_z196)
-ENTRY(memcpy_z10)
+ENTRY(__memcpy_z10)
.machine "z10"
cgije %r4,0,.L_Z10_4
aghi %r4,-1
@@ -71,7 +71,7 @@ ENTRY(memcpy_z10)
br %r14
.L_Z10_13:
cgfi %r5,65535 # Switch to mvcle for copies >16MB
- jh memcpy_mvcle
+ jh __memcpy_mvcle
.L_Z10_12:
pfd 1,768(%r3)
pfd 2,768(%r1)
@@ -82,7 +82,7 @@ ENTRY(memcpy_z10)
j .L_Z10_3
.L_Z10_15:
mvc 0(1,%r1),0(%r3)
-END(memcpy_z10)
+END(__memcpy_z10)
#endif
@@ -90,5 +90,5 @@ END(memcpy_z10)
#if !defined SHARED || !IS_IN (libc)
.globl memcpy
-.set memcpy,memcpy_z900
+.set memcpy,__memcpy_z900
#endif
diff --git a/sysdeps/s390/s390-64/multiarch/memset.S b/sysdeps/s390/s390-64/multiarch/memset.S
index 74345c01e8..3ac110a7e0 100644
--- a/sysdeps/s390/s390-64/multiarch/memset.S
+++ b/sysdeps/s390/s390-64/multiarch/memset.S
@@ -29,7 +29,7 @@
#if IS_IN (libc)
-ENTRY(memset_z196)
+ENTRY(__memset_z196)
.machine "z196"
ltgr %r4,%r4
je .L_Z196_4
@@ -47,7 +47,7 @@ ENTRY(memset_z196)
br %r14
.L_Z196_1:
cgfi %r5,1048576
- jh memset_mvcle # Switch to mvcle for >256MB
+ jh __memset_mvcle # Switch to mvcle for >256MB
.L_Z196_2:
pfd 2,1024(%r1)
mvc 1(256,%r1),0(%r1)
@@ -57,9 +57,9 @@ ENTRY(memset_z196)
j .L_Z196_3
.L_Z196_17:
mvc 1(1,%r1),0(%r1)
-END(memset_z196)
+END(__memset_z196)
-ENTRY(memset_z10)
+ENTRY(__memset_z10)
.machine "z10"
cgije %r4,0,.L_Z10_4
stc %r3,0(%r2)
@@ -74,7 +74,7 @@ ENTRY(memset_z10)
br %r14
.L_Z10_15:
cgfi %r5,163840 # Switch to mvcle for >40MB
- jh memset_mvcle
+ jh __memset_mvcle
.L_Z10_14:
pfd 2,1024(%r1)
mvc 1(256,%r1),0(%r1)
@@ -83,9 +83,9 @@ ENTRY(memset_z10)
j .L_Z10_3
.L_Z10_18:
mvc 1(1,%r1),0(%r1)
-END(memset_z10)
+END(__memset_z10)
-ENTRY(memset_mvcle)
+ENTRY(__memset_mvcle)
aghi %r4,2 # take back the change done by the caller
lgr %r0,%r2 # save source address
lgr %r1,%r3 # move pad byte to R1
@@ -97,7 +97,7 @@ ENTRY(memset_mvcle)
lgr %r2,%r0 # return value is source address
.L1:
br %r14
-END(memset_mvcle)
+END(__memset_mvcle)
#endif
@@ -105,5 +105,5 @@ END(memset_mvcle)
#if !IS_IN (libc)
.globl memset
-.set memset,memset_z900
+.set memset,__memset_z900
#endif