summaryrefslogtreecommitdiff
path: root/sysdeps/s390/s390-32
diff options
context:
space:
mode:
authorStefan Liebler <stli@linux.vnet.ibm.com>2016-05-24 10:39:13 +0200
committerStefan Liebler <stli@linux.vnet.ibm.com>2016-05-24 10:39:13 +0200
commit4c011268960c6f24650672597deed756f21ad363 (patch)
treec4c8aba80f0dbdd15fe0bbe2f9383dd6d0091b15 /sysdeps/s390/s390-32
parent71655832555411915d157b17253c8ffe0848533a (diff)
S390: Implement mempcpy with help of memcpy. [BZ #19765]
There exist optimized memcpy functions on s390, but no optimized mempcpy. This patch adds mempcpy entry points in memcpy.S files, which use the memcpy implementation. Now mempcpy itself is also an IFUNC function as memcpy is and the variants are listed in ifunc-impl-list.c. The s390 string.h does not define _HAVE_STRING_ARCH_mempcpy. Instead mempcpy string/string.h inlines memcpy() + n. If n is constant and small enough, GCC emits instructions like mvi or mvc and avoids the function call to memcpy. If n is not constant, then memcpy is called and n is added afterwards. If _HAVE_STRING_ARCH_mempcpy would be defined, mempcpy would be called in every case. According to PR70140 "Inefficient expansion of __builtin_mempcpy" (https://gcc.gnu.org/bugzilla/show_bug.cgi?id=70140) GCC should handle a call to mempcpy in the same way as memcpy. Then either the mempcpy macro in string/string.h has to be removed or _HAVE_STRING_ARCH_mempcpy has to be defined for S390. ChangeLog: [BZ #19765] * sysdeps/s390/mempcpy.S: New File. * sysdeps/s390/multiarch/mempcpy.c: Likewise. * sysdeps/s390/multiarch/Makefile (sysdep_routines): Add mempcpy. * sysdeps/s390/multiarch/ifunc-impl-list.c (__libc_ifunc_impl_list): Add mempcpy variants. * sysdeps/s390/s390-32/memcpy.S: Add mempcpy entry point. (memcpy): Adjust to be usable from mempcpy entry point. (__memcpy_mvcle): Likewise. * sysdeps/s390/s390-64/memcpy.S: Likewise. * sysdeps/s390/s390-32/multiarch/memcpy-s390.S: Add entry points ____mempcpy_z196, ____mempcpy_z10 and add __GI_ symbols for mempcpy. (__memcpy_z196): Adjust to be usable from mempcpy entry point. (__memcpy_z10): Likewise. * sysdeps/s390/s390-64/multiarch/memcpy-s390x.S: Likewise.
Diffstat (limited to 'sysdeps/s390/s390-32')
-rw-r--r--sysdeps/s390/s390-32/memcpy.S50
-rw-r--r--sysdeps/s390/s390-32/multiarch/memcpy-s390.S31
2 files changed, 59 insertions, 22 deletions
diff --git a/sysdeps/s390/s390-32/memcpy.S b/sysdeps/s390/s390-32/memcpy.S
index 2ac51ab62e..6be5104b68 100644
--- a/sysdeps/s390/s390-32/memcpy.S
+++ b/sysdeps/s390/s390-32/memcpy.S
@@ -25,12 +25,23 @@
%r3 = address of source memory area
%r4 = number of bytes to copy. */
-#ifdef USE_MULTIARCH
-ENTRY(__memcpy_default)
-#else
-ENTRY(memcpy)
+ .text
+ENTRY(__mempcpy)
+ .machine "g5"
+ lr %r1,%r2 # Use as dest
+ la %r2,0(%r4,%r2) # Return dest + n
+ j .L_G5_start
+END(__mempcpy)
+#ifndef USE_MULTIARCH
+libc_hidden_def (__mempcpy)
+weak_alias (__mempcpy, mempcpy)
+libc_hidden_builtin_def (mempcpy)
#endif
+
+ENTRY(memcpy)
.machine "g5"
+ lr %r1,%r2 # r1: Use as dest ; r2: Return dest
+.L_G5_start:
st %r13,52(%r15)
.cfi_offset 13, -44
basr %r13,0
@@ -41,14 +52,13 @@ ENTRY(memcpy)
lr %r5,%r4
srl %r5,8
ltr %r5,%r5
- lr %r1,%r2
jne .L_G5_13
ex %r4,.L_G5_17-.L_G5_16(%r13)
.L_G5_4:
l %r13,52(%r15)
br %r14
.L_G5_13:
- chi %r5,4096 # Switch to mvcle for copies >1MB
+ chi %r5,4096 # Switch to mvcle for copies >1MB
jh __memcpy_mvcle
.L_G5_12:
mvc 0(256,%r1),0(%r3)
@@ -59,24 +69,24 @@ ENTRY(memcpy)
j .L_G5_4
.L_G5_17:
mvc 0(1,%r1),0(%r3)
-#ifdef USE_MULTIARCH
-END(__memcpy_default)
-#else
END(memcpy)
+#ifndef USE_MULTIARCH
libc_hidden_builtin_def (memcpy)
#endif
ENTRY(__memcpy_mvcle)
- # Using as standalone function will result in unexpected
- # results since the length field is incremented by 1 in order to
- # compensate the changes already done in the functions above.
- ahi %r4,1 # length + 1
- lr %r5,%r4 # source length
- lr %r4,%r3 # source address
- lr %r3,%r5 # destination length = source length
+ # Using as standalone function will result in unexpected
+ # results since the length field is incremented by 1 in order to
+ # compensate the changes already done in the functions above.
+ lr %r0,%r2 # backup return dest [ + n ]
+ ahi %r4,1 # length + 1
+ lr %r5,%r4 # source length
+ lr %r4,%r3 # source address
+ lr %r2,%r1 # destination address
+ lr %r3,%r5 # destination length = source length
.L_MVCLE_1:
- mvcle %r2,%r4,0 # thats it, MVCLE is your friend
- jo .L_MVCLE_1
- lr %r2,%r1 # return destination address
- br %r14
+ mvcle %r2,%r4,0 # thats it, MVCLE is your friend
+ jo .L_MVCLE_1
+ lr %r2,%r0 # return destination address
+ br %r14
END(__memcpy_mvcle)
diff --git a/sysdeps/s390/s390-32/multiarch/memcpy-s390.S b/sysdeps/s390/s390-32/multiarch/memcpy-s390.S
index 92ffaea5ed..297a8943b1 100644
--- a/sysdeps/s390/s390-32/multiarch/memcpy-s390.S
+++ b/sysdeps/s390/s390-32/multiarch/memcpy-s390.S
@@ -29,14 +29,23 @@
#if defined SHARED && IS_IN (libc)
+ENTRY(____mempcpy_z196)
+ .machine "z196"
+ .machinemode "zarch_nohighgprs"
+ lr %r1,%r2 # Use as dest
+ la %r2,0(%r4,%r2) # Return dest + n
+ j .L_Z196_start
+END(____mempcpy_z196)
+
ENTRY(__memcpy_z196)
.machine "z196"
.machinemode "zarch_nohighgprs"
+ lr %r1,%r2 # r1: Use as dest ; r2: Return dest
+.L_Z196_start:
llgfr %r4,%r4
ltgr %r4,%r4
je .L_Z196_4
aghi %r4,-1
- lr %r1,%r2
srlg %r5,%r4,8
ltgr %r5,%r5
jne .L_Z196_5
@@ -60,13 +69,22 @@ ENTRY(__memcpy_z196)
mvc 0(1,%r1),0(%r3)
END(__memcpy_z196)
+ENTRY(____mempcpy_z10)
+ .machine "z10"
+ .machinemode "zarch_nohighgprs"
+ lr %r1,%r2 # Use as dest
+ la %r2,0(%r4,%r2) # Return dest + n
+ j .L_Z10_start
+END(____mempcpy_z10)
+
ENTRY(__memcpy_z10)
.machine "z10"
.machinemode "zarch_nohighgprs"
+ lr %r1,%r2 # r1: Use as dest ; r2: Return dest
+.L_Z10_start:
llgfr %r4,%r4
cgije %r4,0,.L_Z10_4
aghi %r4,-1
- lr %r1,%r2
srlg %r5,%r4,8
cgijlh %r5,0,.L_Z10_13
.L_Z10_3:
@@ -88,14 +106,23 @@ ENTRY(__memcpy_z10)
mvc 0(1,%r1),0(%r3)
END(__memcpy_z10)
+# define __mempcpy ____mempcpy_default
#endif /* SHARED && IS_IN (libc) */
+#define memcpy __memcpy_default
#include "../memcpy.S"
+#undef memcpy
#if defined SHARED && IS_IN (libc)
.globl __GI_memcpy
.set __GI_memcpy,__memcpy_default
+.globl __GI_mempcpy
+.set __GI_mempcpy,____mempcpy_default
+.globl __GI___mempcpy
+.set __GI___mempcpy,____mempcpy_default
#else
.globl memcpy
.set memcpy,__memcpy_default
+.weak mempcpy
+.set mempcpy,__mempcpy
#endif