668eaab0c7
* Fri Jul 22 2022 Arjun Shankar <arjun@redhat.com> - 2.34-40 - Sync with upstream branch release/2.34/master, commit b2f32e746492615a6eb3e66fac1e766e32e8deb1: - malloc: Simplify implementation of __malloc_assert - Update syscall-names.list for Linux 5.18 - x86: Add missing IS_IN (libc) check to strncmp-sse4_2.S - x86: Move mem{p}{mov|cpy}_{chk_}erms to its own file - x86: Move and slightly improve memset_erms - x86: Add definition for __wmemset_chk AVX2 RTM in ifunc impl list - x86: Put wcs{n}len-sse4.1 in the sse4.1 text section - x86: Align entry for memrchr to 64-bytes. - x86: Add BMI1/BMI2 checks for ISA_V3 check - x86: Cleanup bounds checking in large memcpy case - x86: Add bounds `x86_non_temporal_threshold` - x86: Add sse42 implementation to strcmp's ifunc - x86: Fix misordered logic for setting `rep_movsb_stop_threshold` - x86: Align varshift table to 32-bytes - x86: ZERO_UPPER_VEC_REGISTERS_RETURN_XTEST expect no transactions - x86: Shrink code size of memchr-evex.S - x86: Shrink code size of memchr-avx2.S - x86: Optimize memrchr-avx2.S - x86: Optimize memrchr-evex.S - x86: Optimize memrchr-sse2.S - x86: Add COND_VZEROUPPER that can replace vzeroupper if no `ret` - x86: Create header for VEC classes in x86 strings library - x86_64: Add strstr function with 512-bit EVEX - x86-64: Ignore r_addend for R_X86_64_GLOB_DAT/R_X86_64_JUMP_SLOT - x86_64: Implement evex512 version of strlen, strnlen, wcslen and wcsnlen - x86_64: Remove bzero optimization - x86_64: Remove end of line trailing spaces - nptl: Fix ___pthread_unregister_cancel_restore asynchronous restore - linux: Fix mq_timereceive check for 32 bit fallback code (BZ 29304) Resolves: #2109505
57 lines
1.7 KiB
Diff
57 lines
1.7 KiB
Diff
commit aadd0a1c7c89d016e1186c81c0efcafa36bf84fc
|
|
Author: Noah Goldstein <goldstein.w.n@gmail.com>
|
|
Date: Fri Jun 24 09:42:15 2022 -0700
|
|
|
|
x86: Put wcs{n}len-sse4.1 in the sse4.1 text section
|
|
|
|
Previously was missing but the two implementations shouldn't get in
|
|
the sse2 (generic) text section.
|
|
|
|
(cherry picked from commit afc6e4328ff80973bde50d5401691b4c4b2e522c)
|
|
|
|
diff --git a/sysdeps/x86_64/multiarch/strlen-vec.S b/sysdeps/x86_64/multiarch/strlen-vec.S
|
|
index 031753a91763b351..762f4755020c35f9 100644
|
|
--- a/sysdeps/x86_64/multiarch/strlen-vec.S
|
|
+++ b/sysdeps/x86_64/multiarch/strlen-vec.S
|
|
@@ -28,6 +28,10 @@
|
|
# define SHIFT_RETURN
|
|
#endif
|
|
|
|
+#ifndef SECTION
|
|
+# define SECTION(p) p
|
|
+#endif
|
|
+
|
|
/* Long lived register in strlen(s), strnlen(s, n) are:
|
|
|
|
%xmm3 - zero
|
|
@@ -37,7 +41,7 @@
|
|
*/
|
|
|
|
|
|
-.text
|
|
+ .section SECTION(.text),"ax",@progbits
|
|
ENTRY(strlen)
|
|
|
|
/* Test 64 bytes from %rax for zero. Save result as bitmask in %rdx. */
|
|
diff --git a/sysdeps/x86_64/multiarch/wcslen-sse4_1.S b/sysdeps/x86_64/multiarch/wcslen-sse4_1.S
|
|
index 7e62621afc729492..e306a77f51e650d1 100644
|
|
--- a/sysdeps/x86_64/multiarch/wcslen-sse4_1.S
|
|
+++ b/sysdeps/x86_64/multiarch/wcslen-sse4_1.S
|
|
@@ -1,4 +1,5 @@
|
|
#define AS_WCSLEN
|
|
#define strlen __wcslen_sse4_1
|
|
+#define SECTION(p) p##.sse4.1
|
|
|
|
#include "strlen-vec.S"
|
|
diff --git a/sysdeps/x86_64/multiarch/wcsnlen-sse4_1.S b/sysdeps/x86_64/multiarch/wcsnlen-sse4_1.S
|
|
index 5fa51fe07cbbdf5c..d2f7dd6e2254736c 100644
|
|
--- a/sysdeps/x86_64/multiarch/wcsnlen-sse4_1.S
|
|
+++ b/sysdeps/x86_64/multiarch/wcsnlen-sse4_1.S
|
|
@@ -1,5 +1,6 @@
|
|
#define AS_WCSLEN
|
|
#define AS_STRNLEN
|
|
#define strlen __wcsnlen_sse4_1
|
|
+#define SECTION(p) p##.sse4.1
|
|
|
|
#include "strlen-vec.S"
|