Patchwork [x86_64] Update memcpy, mempcpy and memmove selection order for Excavator CPU BZ #19583

login
register
mail settings
Submitter Pawar, Amit
Date March 18, 2016, 1:22 p.m.
Message ID <SN1PR12MB0733522F9520520B45459C24978C0@SN1PR12MB0733.namprd12.prod.outlook.com>
Download mbox | patch
Permalink /patch/11376/
State New
Headers show

Comments

Pawar, Amit - March 18, 2016, 1:22 p.m.
>No, it isn't fixed.  Avoid_AVX_Fast_Unaligned_Load should disable __memcpy_avx_unaligned and nothing more.  Also you need to fix ALL selections.



Will update all IFUNC's if this ok else please suggest.

--Amit Pawar

Patch

diff --git a/sysdeps/x86_64/multiarch/memcpy.S b/sysdeps/x86_64/multiarch/memcpy.S

index 8882590..a5afaf4 100644

--- a/sysdeps/x86_64/multiarch/memcpy.S

+++ b/sysdeps/x86_64/multiarch/memcpy.S

@@ -39,6 +39,8 @@  ENTRY(__new_memcpy)

        ret
 #endif
 1:     lea     __memcpy_avx_unaligned(%rip), %RAX_LP
+       HAS_ARCH_FEATURE (Avoid_AVX_Fast_Unaligned_Load)

+       jnz     3f

        HAS_ARCH_FEATURE (AVX_Fast_Unaligned_Load)
        jnz     2f
        lea     __memcpy_sse2_unaligned(%rip), %RAX_LP
@@ -52,6 +54,8 @@  ENTRY(__new_memcpy)

        jnz     2f
        lea     __memcpy_ssse3(%rip), %RAX_LP
 2:     ret
+3:     lea     __memcpy_ssse3(%rip), %RAX_LP

+       ret

 END(__new_memcpy)

 # undef ENTRY