aboutsummaryrefslogtreecommitdiff
path: root/sysdeps/aarch64/multiarch/ifunc-impl-list.c
diff options
context:
space:
mode:
authorWilco Dijkstra <wilco.dijkstra@arm.com>2024-12-24 18:01:59 +0000
committerWilco Dijkstra <wilco.dijkstra@arm.com>2025-02-20 15:31:50 +0000
commit163b1bbb76caba4d9673c07940c5930a1afa7548 (patch)
tree8b99d9c978ffdd89299c245ea01916ea4c0b1621 /sysdeps/aarch64/multiarch/ifunc-impl-list.c
parent5a4573be6f96ff49111bb6cae767676b5aafa7a8 (diff)
downloadglibc-163b1bbb76caba4d9673c07940c5930a1afa7548.tar.xz
glibc-163b1bbb76caba4d9673c07940c5930a1afa7548.zip
AArch64: Add SVE memset
Add SVE memset based on the generic memset with predicated load for sizes < 16. Unaligned memsets of 128-1024 are improved by ~20% on average by using aligned stores for the last 64 bytes. Performance of random memset benchmark improves by ~2% on Neoverse V1. Reviewed-by: Yury Khrustalev <yury.khrustalev@arm.com>
Diffstat (limited to 'sysdeps/aarch64/multiarch/ifunc-impl-list.c')
-rw-r--r--sysdeps/aarch64/multiarch/ifunc-impl-list.c1
1 files changed, 1 insertions, 0 deletions
diff --git a/sysdeps/aarch64/multiarch/ifunc-impl-list.c b/sysdeps/aarch64/multiarch/ifunc-impl-list.c
index 0481e450be..8dc314b67d 100644
--- a/sysdeps/aarch64/multiarch/ifunc-impl-list.c
+++ b/sysdeps/aarch64/multiarch/ifunc-impl-list.c
@@ -57,6 +57,7 @@ __libc_ifunc_impl_list (const char *name, struct libc_ifunc_impl *array,
IFUNC_IMPL_ADD (array, i, memset, 1, __memset_kunpeng)
#if HAVE_AARCH64_SVE_ASM
IFUNC_IMPL_ADD (array, i, memset, sve && !bti && zva_size == 256, __memset_a64fx)
+ IFUNC_IMPL_ADD (array, i, memset, sve && zva_size == 64, __memset_sve_zva64)
#endif
IFUNC_IMPL_ADD (array, i, memset, mops, __memset_mops)
IFUNC_IMPL_ADD (array, i, memset, 1, __memset_generic))