b1dca6
commit 107e6a3c2212ba7a3a4ec7cae8d82d73f7c95d0b
b1dca6
Author: H.J. Lu <hjl.tools@gmail.com>
b1dca6
Date:   Mon Jun 29 16:36:08 2020 -0700
b1dca6
b1dca6
    x86: Support usable check for all CPU features
b1dca6
    
b1dca6
    Support usable check for all CPU features with the following changes:
b1dca6
    
b1dca6
    1. Change struct cpu_features to
b1dca6
    
b1dca6
    struct cpuid_features
b1dca6
    {
b1dca6
      struct cpuid_registers cpuid;
b1dca6
      struct cpuid_registers usable;
b1dca6
    };
b1dca6
    
b1dca6
    struct cpu_features
b1dca6
    {
b1dca6
      struct cpu_features_basic basic;
b1dca6
      struct cpuid_features features[COMMON_CPUID_INDEX_MAX];
b1dca6
      unsigned int preferred[PREFERRED_FEATURE_INDEX_MAX];
b1dca6
    ...
b1dca6
    };
b1dca6
    
b1dca6
    so that there is a usable bit for each cpuid bit.
b1dca6
    2. After the cpuid bits have been initialized, copy the known bits to the
b1dca6
    usable bits.  EAX/EBX from INDEX_1 and EAX from INDEX_7 aren't used for
b1dca6
    CPU feature detection.
b1dca6
    3. Clear the usable bits which require OS support.
b1dca6
    4. If the feature is supported by OS, copy its cpuid bit to its usable
b1dca6
    bit.
b1dca6
    5. Replace HAS_CPU_FEATURE and CPU_FEATURES_CPU_P with CPU_FEATURE_USABLE
b1dca6
    and CPU_FEATURE_USABLE_P to check if a feature is usable.
b1dca6
    6. Add DEPR_FPU_CS_DS for INDEX_7_EBX_13.
b1dca6
    7. Unset MPX feature since it has been deprecated.
b1dca6
    
b1dca6
    The results are
b1dca6
    
b1dca6
    1. If the feature is known and doesn't requre OS support, its usable bit
b1dca6
    is copied from the cpuid bit.
b1dca6
    2. Otherwise, its usable bit is copied from the cpuid bit only if the
b1dca6
    feature is known to supported by OS.
b1dca6
    3. CPU_FEATURE_USABLE/CPU_FEATURE_USABLE_P are used to check if the
b1dca6
    feature can be used.
b1dca6
    4. HAS_CPU_FEATURE/CPU_FEATURE_CPU_P are used to check if CPU supports
b1dca6
    the feature.
b1dca6
b1dca6
diff --git a/sysdeps/i386/fpu/fclrexcpt.c b/sysdeps/i386/fpu/fclrexcpt.c
b1dca6
index 8463b102e7b79f07..9eff917e88235c64 100644
b1dca6
--- a/sysdeps/i386/fpu/fclrexcpt.c
b1dca6
+++ b/sysdeps/i386/fpu/fclrexcpt.c
b1dca6
@@ -41,7 +41,7 @@ __feclearexcept (int excepts)
b1dca6
   __asm__ ("fldenv %0" : : "m" (*&temp));
b1dca6
 
b1dca6
   /* If the CPU supports SSE, we clear the MXCSR as well.  */
b1dca6
-  if (HAS_CPU_FEATURE (SSE))
b1dca6
+  if (CPU_FEATURE_USABLE (SSE))
b1dca6
     {
b1dca6
       unsigned int xnew_exc;
b1dca6
 
b1dca6
diff --git a/sysdeps/i386/fpu/fedisblxcpt.c b/sysdeps/i386/fpu/fedisblxcpt.c
b1dca6
index e2738e6d6c8304fe..3b5436018d08a269 100644
b1dca6
--- a/sysdeps/i386/fpu/fedisblxcpt.c
b1dca6
+++ b/sysdeps/i386/fpu/fedisblxcpt.c
b1dca6
@@ -38,7 +38,7 @@ fedisableexcept (int excepts)
b1dca6
   __asm__ ("fldcw %0" : : "m" (*&new_exc));
b1dca6
 
b1dca6
   /* If the CPU supports SSE we set the MXCSR as well.  */
b1dca6
-  if (HAS_CPU_FEATURE (SSE))
b1dca6
+  if (CPU_FEATURE_USABLE (SSE))
b1dca6
     {
b1dca6
       unsigned int xnew_exc;
b1dca6
 
b1dca6
diff --git a/sysdeps/i386/fpu/feenablxcpt.c b/sysdeps/i386/fpu/feenablxcpt.c
b1dca6
index a4d986266636835b..88f46f6078e12e2c 100644
b1dca6
--- a/sysdeps/i386/fpu/feenablxcpt.c
b1dca6
+++ b/sysdeps/i386/fpu/feenablxcpt.c
b1dca6
@@ -38,7 +38,7 @@ feenableexcept (int excepts)
b1dca6
   __asm__ ("fldcw %0" : : "m" (*&new_exc));
b1dca6
 
b1dca6
   /* If the CPU supports SSE we set the MXCSR as well.  */
b1dca6
-  if (HAS_CPU_FEATURE (SSE))
b1dca6
+  if (CPU_FEATURE_USABLE (SSE))
b1dca6
     {
b1dca6
       unsigned int xnew_exc;
b1dca6
 
b1dca6
diff --git a/sysdeps/i386/fpu/fegetenv.c b/sysdeps/i386/fpu/fegetenv.c
b1dca6
index 2a1a8507bac9bfa5..2a800fb6d6e856f3 100644
b1dca6
--- a/sysdeps/i386/fpu/fegetenv.c
b1dca6
+++ b/sysdeps/i386/fpu/fegetenv.c
b1dca6
@@ -31,7 +31,7 @@ __fegetenv (fenv_t *envp)
b1dca6
      would block all exceptions.  */
b1dca6
   __asm__ ("fldenv %0" : : "m" (*envp));
b1dca6
 
b1dca6
-  if (HAS_CPU_FEATURE (SSE))
b1dca6
+  if (CPU_FEATURE_USABLE (SSE))
b1dca6
     __asm__ ("stmxcsr %0" : "=m" (envp->__eip));
b1dca6
 
b1dca6
   /* Success.  */
b1dca6
diff --git a/sysdeps/i386/fpu/fegetmode.c b/sysdeps/i386/fpu/fegetmode.c
b1dca6
index 86de9f5548f4b0b4..b01ca64fc9187b10 100644
b1dca6
--- a/sysdeps/i386/fpu/fegetmode.c
b1dca6
+++ b/sysdeps/i386/fpu/fegetmode.c
b1dca6
@@ -26,7 +26,7 @@ int
b1dca6
 fegetmode (femode_t *modep)
b1dca6
 {
b1dca6
   _FPU_GETCW (modep->__control_word);
b1dca6
-  if (HAS_CPU_FEATURE (SSE))
b1dca6
+  if (CPU_FEATURE_USABLE (SSE))
b1dca6
     __asm__ ("stmxcsr %0" : "=m" (modep->__mxcsr));
b1dca6
   return 0;
b1dca6
 }
b1dca6
diff --git a/sysdeps/i386/fpu/feholdexcpt.c b/sysdeps/i386/fpu/feholdexcpt.c
b1dca6
index 270554df31928cda..e2f3f97b9494f900 100644
b1dca6
--- a/sysdeps/i386/fpu/feholdexcpt.c
b1dca6
+++ b/sysdeps/i386/fpu/feholdexcpt.c
b1dca6
@@ -30,7 +30,7 @@ __feholdexcept (fenv_t *envp)
b1dca6
   __asm__ volatile ("fnstenv %0; fnclex" : "=m" (*envp));
b1dca6
 
b1dca6
   /* If the CPU supports SSE we set the MXCSR as well.  */
b1dca6
-  if (HAS_CPU_FEATURE (SSE))
b1dca6
+  if (CPU_FEATURE_USABLE (SSE))
b1dca6
     {
b1dca6
       unsigned int xwork;
b1dca6
 
b1dca6
diff --git a/sysdeps/i386/fpu/fesetenv.c b/sysdeps/i386/fpu/fesetenv.c
b1dca6
index 6df6849da4007a45..5c8bf1f71a474aa9 100644
b1dca6
--- a/sysdeps/i386/fpu/fesetenv.c
b1dca6
+++ b/sysdeps/i386/fpu/fesetenv.c
b1dca6
@@ -79,7 +79,7 @@ __fesetenv (const fenv_t *envp)
b1dca6
 
b1dca6
   __asm__ ("fldenv %0" : : "m" (temp));
b1dca6
 
b1dca6
-  if (HAS_CPU_FEATURE (SSE))
b1dca6
+  if (CPU_FEATURE_USABLE (SSE))
b1dca6
     {
b1dca6
       unsigned int mxcsr;
b1dca6
       __asm__ ("stmxcsr %0" : "=m" (mxcsr));
b1dca6
diff --git a/sysdeps/i386/fpu/fesetmode.c b/sysdeps/i386/fpu/fesetmode.c
b1dca6
index 9aad6ea99f810786..35881b6adf5b0aed 100644
b1dca6
--- a/sysdeps/i386/fpu/fesetmode.c
b1dca6
+++ b/sysdeps/i386/fpu/fesetmode.c
b1dca6
@@ -35,7 +35,7 @@ fesetmode (const femode_t *modep)
b1dca6
   else
b1dca6
     cw = modep->__control_word;
b1dca6
   _FPU_SETCW (cw);
b1dca6
-  if (HAS_CPU_FEATURE (SSE))
b1dca6
+  if (CPU_FEATURE_USABLE (SSE))
b1dca6
     {
b1dca6
       unsigned int mxcsr;
b1dca6
       __asm__ ("stmxcsr %0" : "=m" (mxcsr));
b1dca6
diff --git a/sysdeps/i386/fpu/fesetround.c b/sysdeps/i386/fpu/fesetround.c
b1dca6
index d260046c65d0aba0..5d38b6b8624bdaef 100644
b1dca6
--- a/sysdeps/i386/fpu/fesetround.c
b1dca6
+++ b/sysdeps/i386/fpu/fesetround.c
b1dca6
@@ -37,7 +37,7 @@ __fesetround (int round)
b1dca6
   __asm__ ("fldcw %0" : : "m" (*&cw);;
b1dca6
 
b1dca6
   /* If the CPU supports SSE we set the MXCSR as well.  */
b1dca6
-  if (HAS_CPU_FEATURE (SSE))
b1dca6
+  if (CPU_FEATURE_USABLE (SSE))
b1dca6
     {
b1dca6
       unsigned int xcw;
b1dca6
 
b1dca6
diff --git a/sysdeps/i386/fpu/feupdateenv.c b/sysdeps/i386/fpu/feupdateenv.c
b1dca6
index db3ff96dfa8336ec..1246b21e30740922 100644
b1dca6
--- a/sysdeps/i386/fpu/feupdateenv.c
b1dca6
+++ b/sysdeps/i386/fpu/feupdateenv.c
b1dca6
@@ -32,7 +32,7 @@ __feupdateenv (const fenv_t *envp)
b1dca6
   __asm__ ("fnstsw %0" : "=m" (*&temp));
b1dca6
 
b1dca6
   /* If the CPU supports SSE we test the MXCSR as well.  */
b1dca6
-  if (HAS_CPU_FEATURE (SSE))
b1dca6
+  if (CPU_FEATURE_USABLE (SSE))
b1dca6
     __asm__ ("stmxcsr %0" : "=m" (*&xtemp));
b1dca6
 
b1dca6
   temp = (temp | xtemp) & FE_ALL_EXCEPT;
b1dca6
diff --git a/sysdeps/i386/fpu/fgetexcptflg.c b/sysdeps/i386/fpu/fgetexcptflg.c
b1dca6
index 39d1f7df3aa24b25..acb2ae15ea681c13 100644
b1dca6
--- a/sysdeps/i386/fpu/fgetexcptflg.c
b1dca6
+++ b/sysdeps/i386/fpu/fgetexcptflg.c
b1dca6
@@ -34,7 +34,7 @@ __fegetexceptflag (fexcept_t *flagp, int excepts)
b1dca6
   *flagp = temp & excepts & FE_ALL_EXCEPT;
b1dca6
 
b1dca6
   /* If the CPU supports SSE, we clear the MXCSR as well.  */
b1dca6
-  if (HAS_CPU_FEATURE (SSE))
b1dca6
+  if (CPU_FEATURE_USABLE (SSE))
b1dca6
     {
b1dca6
       unsigned int sse_exc;
b1dca6
 
b1dca6
diff --git a/sysdeps/i386/fpu/fsetexcptflg.c b/sysdeps/i386/fpu/fsetexcptflg.c
b1dca6
index 21e70251cfbf8a73..caa15c0cf105a9bc 100644
b1dca6
--- a/sysdeps/i386/fpu/fsetexcptflg.c
b1dca6
+++ b/sysdeps/i386/fpu/fsetexcptflg.c
b1dca6
@@ -41,7 +41,7 @@ __fesetexceptflag (const fexcept_t *flagp, int excepts)
b1dca6
   __asm__ ("fldenv %0" : : "m" (*&temp));
b1dca6
 
b1dca6
   /* If the CPU supports SSE, we set the MXCSR as well.  */
b1dca6
-  if (HAS_CPU_FEATURE (SSE))
b1dca6
+  if (CPU_FEATURE_USABLE (SSE))
b1dca6
     {
b1dca6
       unsigned int xnew_exc;
b1dca6
 
b1dca6
diff --git a/sysdeps/i386/fpu/ftestexcept.c b/sysdeps/i386/fpu/ftestexcept.c
b1dca6
index c1b5e90356bae9da..06d6134e0d85eeef 100644
b1dca6
--- a/sysdeps/i386/fpu/ftestexcept.c
b1dca6
+++ b/sysdeps/i386/fpu/ftestexcept.c
b1dca6
@@ -32,7 +32,7 @@ fetestexcept (int excepts)
b1dca6
   __asm__ ("fnstsw %0" : "=a" (temp));
b1dca6
 
b1dca6
   /* If the CPU supports SSE we test the MXCSR as well.  */
b1dca6
-  if (HAS_CPU_FEATURE (SSE))
b1dca6
+  if (CPU_FEATURE_USABLE (SSE))
b1dca6
     __asm__ ("stmxcsr %0" : "=m" (*&xtemp));
b1dca6
 
b1dca6
   return (temp | xtemp) & excepts & FE_ALL_EXCEPT;
b1dca6
diff --git a/sysdeps/i386/i686/fpu/multiarch/s_cosf.c b/sysdeps/i386/i686/fpu/multiarch/s_cosf.c
b1dca6
index a4556a478d16974a..c31592f238d67916 100644
b1dca6
--- a/sysdeps/i386/i686/fpu/multiarch/s_cosf.c
b1dca6
+++ b/sysdeps/i386/i686/fpu/multiarch/s_cosf.c
b1dca6
@@ -23,7 +23,7 @@ extern float __cosf_sse2 (float);
b1dca6
 extern float __cosf_ia32 (float);
b1dca6
 float __cosf (float);
b1dca6
 
b1dca6
-libm_ifunc (__cosf, HAS_CPU_FEATURE (SSE2) ? __cosf_sse2 : __cosf_ia32);
b1dca6
+libm_ifunc (__cosf, CPU_FEATURE_USABLE (SSE2) ? __cosf_sse2 : __cosf_ia32);
b1dca6
 libm_alias_float (__cos, cos);
b1dca6
 
b1dca6
 #define COSF __cosf_ia32
b1dca6
diff --git a/sysdeps/i386/i686/fpu/multiarch/s_sincosf.c b/sysdeps/i386/i686/fpu/multiarch/s_sincosf.c
b1dca6
index 5f21f5c0eda20fd1..116c541dba54dd16 100644
b1dca6
--- a/sysdeps/i386/i686/fpu/multiarch/s_sincosf.c
b1dca6
+++ b/sysdeps/i386/i686/fpu/multiarch/s_sincosf.c
b1dca6
@@ -24,7 +24,7 @@ extern void __sincosf_ia32 (float, float *, float *);
b1dca6
 void __sincosf (float, float *, float *);
b1dca6
 
b1dca6
 libm_ifunc (__sincosf,
b1dca6
-	    HAS_CPU_FEATURE (SSE2) ? __sincosf_sse2 : __sincosf_ia32);
b1dca6
+	    CPU_FEATURE_USABLE (SSE2) ? __sincosf_sse2 : __sincosf_ia32);
b1dca6
 libm_alias_float (__sincos, sincos);
b1dca6
 
b1dca6
 #define SINCOSF __sincosf_ia32
b1dca6
diff --git a/sysdeps/i386/i686/fpu/multiarch/s_sinf.c b/sysdeps/i386/i686/fpu/multiarch/s_sinf.c
b1dca6
index 80a7ffaa1e36b492..63abd34c21a1c83f 100644
b1dca6
--- a/sysdeps/i386/i686/fpu/multiarch/s_sinf.c
b1dca6
+++ b/sysdeps/i386/i686/fpu/multiarch/s_sinf.c
b1dca6
@@ -23,7 +23,7 @@ extern float __sinf_sse2 (float);
b1dca6
 extern float __sinf_ia32 (float);
b1dca6
 float __sinf (float);
b1dca6
 
b1dca6
-libm_ifunc (__sinf, HAS_CPU_FEATURE (SSE2) ? __sinf_sse2 : __sinf_ia32);
b1dca6
+libm_ifunc (__sinf, CPU_FEATURE_USABLE (SSE2) ? __sinf_sse2 : __sinf_ia32);
b1dca6
 libm_alias_float (__sin, sin);
b1dca6
 #define SINF __sinf_ia32
b1dca6
 #include <sysdeps/ieee754/flt-32/s_sinf.c>
b1dca6
diff --git a/sysdeps/i386/i686/multiarch/ifunc-impl-list.c b/sysdeps/i386/i686/multiarch/ifunc-impl-list.c
b1dca6
index a926b04acdfbb889..06e7231d94e21c02 100644
b1dca6
--- a/sysdeps/i386/i686/multiarch/ifunc-impl-list.c
b1dca6
+++ b/sysdeps/i386/i686/multiarch/ifunc-impl-list.c
b1dca6
@@ -38,35 +38,35 @@ __libc_ifunc_impl_list (const char *name, struct libc_ifunc_impl *array,
b1dca6
 
b1dca6
   /* Support sysdeps/i386/i686/multiarch/bcopy.S.  */
b1dca6
   IFUNC_IMPL (i, name, bcopy,
b1dca6
-	      IFUNC_IMPL_ADD (array, i, bcopy, HAS_CPU_FEATURE (SSSE3),
b1dca6
+	      IFUNC_IMPL_ADD (array, i, bcopy, CPU_FEATURE_USABLE (SSSE3),
b1dca6
 			      __bcopy_ssse3_rep)
b1dca6
-	      IFUNC_IMPL_ADD (array, i, bcopy, HAS_CPU_FEATURE (SSSE3),
b1dca6
+	      IFUNC_IMPL_ADD (array, i, bcopy, CPU_FEATURE_USABLE (SSSE3),
b1dca6
 			      __bcopy_ssse3)
b1dca6
-	      IFUNC_IMPL_ADD (array, i, bcopy, HAS_CPU_FEATURE (SSE2),
b1dca6
+	      IFUNC_IMPL_ADD (array, i, bcopy, CPU_FEATURE_USABLE (SSE2),
b1dca6
 			      __bcopy_sse2_unaligned)
b1dca6
 	      IFUNC_IMPL_ADD (array, i, bcopy, 1, __bcopy_ia32))
b1dca6
 
b1dca6
   /* Support sysdeps/i386/i686/multiarch/bzero.S.  */
b1dca6
   IFUNC_IMPL (i, name, bzero,
b1dca6
-	      IFUNC_IMPL_ADD (array, i, bzero, HAS_CPU_FEATURE (SSE2),
b1dca6
+	      IFUNC_IMPL_ADD (array, i, bzero, CPU_FEATURE_USABLE (SSE2),
b1dca6
 			      __bzero_sse2_rep)
b1dca6
-	      IFUNC_IMPL_ADD (array, i, bzero, HAS_CPU_FEATURE (SSE2),
b1dca6
+	      IFUNC_IMPL_ADD (array, i, bzero, CPU_FEATURE_USABLE (SSE2),
b1dca6
 			      __bzero_sse2)
b1dca6
 	      IFUNC_IMPL_ADD (array, i, bzero, 1, __bzero_ia32))
b1dca6
 
b1dca6
   /* Support sysdeps/i386/i686/multiarch/memchr.S.  */
b1dca6
   IFUNC_IMPL (i, name, memchr,
b1dca6
-	      IFUNC_IMPL_ADD (array, i, memchr, HAS_CPU_FEATURE (SSE2),
b1dca6
+	      IFUNC_IMPL_ADD (array, i, memchr, CPU_FEATURE_USABLE (SSE2),
b1dca6
 			      __memchr_sse2_bsf)
b1dca6
-	      IFUNC_IMPL_ADD (array, i, memchr, HAS_CPU_FEATURE (SSE2),
b1dca6
+	      IFUNC_IMPL_ADD (array, i, memchr, CPU_FEATURE_USABLE (SSE2),
b1dca6
 			      __memchr_sse2)
b1dca6
 	      IFUNC_IMPL_ADD (array, i, memchr, 1, __memchr_ia32))
b1dca6
 
b1dca6
   /* Support sysdeps/i386/i686/multiarch/memcmp.S.  */
b1dca6
   IFUNC_IMPL (i, name, memcmp,
b1dca6
-	      IFUNC_IMPL_ADD (array, i, memcmp, HAS_CPU_FEATURE (SSE4_2),
b1dca6
+	      IFUNC_IMPL_ADD (array, i, memcmp, CPU_FEATURE_USABLE (SSE4_2),
b1dca6
 			      __memcmp_sse4_2)
b1dca6
-	      IFUNC_IMPL_ADD (array, i, memcmp, HAS_CPU_FEATURE (SSSE3),
b1dca6
+	      IFUNC_IMPL_ADD (array, i, memcmp, CPU_FEATURE_USABLE (SSSE3),
b1dca6
 			      __memcmp_ssse3)
b1dca6
 	      IFUNC_IMPL_ADD (array, i, memcmp, 1, __memcmp_ia32))
b1dca6
 
b1dca6
@@ -74,13 +74,13 @@ __libc_ifunc_impl_list (const char *name, struct libc_ifunc_impl *array,
b1dca6
   /* Support sysdeps/i386/i686/multiarch/memmove_chk.S.  */
b1dca6
   IFUNC_IMPL (i, name, __memmove_chk,
b1dca6
 	      IFUNC_IMPL_ADD (array, i, __memmove_chk,
b1dca6
-			      HAS_CPU_FEATURE (SSSE3),
b1dca6
+			      CPU_FEATURE_USABLE (SSSE3),
b1dca6
 			      __memmove_chk_ssse3_rep)
b1dca6
 	      IFUNC_IMPL_ADD (array, i, __memmove_chk,
b1dca6
-			      HAS_CPU_FEATURE (SSSE3),
b1dca6
+			      CPU_FEATURE_USABLE (SSSE3),
b1dca6
 			      __memmove_chk_ssse3)
b1dca6
 	      IFUNC_IMPL_ADD (array, i, __memmove_chk,
b1dca6
-			      HAS_CPU_FEATURE (SSE2),
b1dca6
+			      CPU_FEATURE_USABLE (SSE2),
b1dca6
 			      __memmove_chk_sse2_unaligned)
b1dca6
 	      IFUNC_IMPL_ADD (array, i, __memmove_chk, 1,
b1dca6
 			      __memmove_chk_ia32))
b1dca6
@@ -88,19 +88,19 @@ __libc_ifunc_impl_list (const char *name, struct libc_ifunc_impl *array,
b1dca6
 
b1dca6
   /* Support sysdeps/i386/i686/multiarch/memmove.S.  */
b1dca6
   IFUNC_IMPL (i, name, memmove,
b1dca6
-	      IFUNC_IMPL_ADD (array, i, memmove, HAS_CPU_FEATURE (SSSE3),
b1dca6
+	      IFUNC_IMPL_ADD (array, i, memmove, CPU_FEATURE_USABLE (SSSE3),
b1dca6
 			      __memmove_ssse3_rep)
b1dca6
-	      IFUNC_IMPL_ADD (array, i, memmove, HAS_CPU_FEATURE (SSSE3),
b1dca6
+	      IFUNC_IMPL_ADD (array, i, memmove, CPU_FEATURE_USABLE (SSSE3),
b1dca6
 			      __memmove_ssse3)
b1dca6
-	      IFUNC_IMPL_ADD (array, i, memmove, HAS_CPU_FEATURE (SSE2),
b1dca6
+	      IFUNC_IMPL_ADD (array, i, memmove, CPU_FEATURE_USABLE (SSE2),
b1dca6
 			      __memmove_sse2_unaligned)
b1dca6
 	      IFUNC_IMPL_ADD (array, i, memmove, 1, __memmove_ia32))
b1dca6
 
b1dca6
   /* Support sysdeps/i386/i686/multiarch/memrchr.S.  */
b1dca6
   IFUNC_IMPL (i, name, memrchr,
b1dca6
-	      IFUNC_IMPL_ADD (array, i, memrchr, HAS_CPU_FEATURE (SSE2),
b1dca6
+	      IFUNC_IMPL_ADD (array, i, memrchr, CPU_FEATURE_USABLE (SSE2),
b1dca6
 			      __memrchr_sse2_bsf)
b1dca6
-	      IFUNC_IMPL_ADD (array, i, memrchr, HAS_CPU_FEATURE (SSE2),
b1dca6
+	      IFUNC_IMPL_ADD (array, i, memrchr, CPU_FEATURE_USABLE (SSE2),
b1dca6
 			      __memrchr_sse2)
b1dca6
 	      IFUNC_IMPL_ADD (array, i, memrchr, 1, __memrchr_ia32))
b1dca6
 
b1dca6
@@ -108,10 +108,10 @@ __libc_ifunc_impl_list (const char *name, struct libc_ifunc_impl *array,
b1dca6
   /* Support sysdeps/i386/i686/multiarch/memset_chk.S.  */
b1dca6
   IFUNC_IMPL (i, name, __memset_chk,
b1dca6
 	      IFUNC_IMPL_ADD (array, i, __memset_chk,
b1dca6
-			      HAS_CPU_FEATURE (SSE2),
b1dca6
+			      CPU_FEATURE_USABLE (SSE2),
b1dca6
 			      __memset_chk_sse2_rep)
b1dca6
 	      IFUNC_IMPL_ADD (array, i, __memset_chk,
b1dca6
-			      HAS_CPU_FEATURE (SSE2),
b1dca6
+			      CPU_FEATURE_USABLE (SSE2),
b1dca6
 			      __memset_chk_sse2)
b1dca6
 	      IFUNC_IMPL_ADD (array, i, __memset_chk, 1,
b1dca6
 			      __memset_chk_ia32))
b1dca6
@@ -119,102 +119,102 @@ __libc_ifunc_impl_list (const char *name, struct libc_ifunc_impl *array,
b1dca6
 
b1dca6
   /* Support sysdeps/i386/i686/multiarch/memset.S.  */
b1dca6
   IFUNC_IMPL (i, name, memset,
b1dca6
-	      IFUNC_IMPL_ADD (array, i, memset, HAS_CPU_FEATURE (SSE2),
b1dca6
+	      IFUNC_IMPL_ADD (array, i, memset, CPU_FEATURE_USABLE (SSE2),
b1dca6
 			      __memset_sse2_rep)
b1dca6
-	      IFUNC_IMPL_ADD (array, i, memset, HAS_CPU_FEATURE (SSE2),
b1dca6
+	      IFUNC_IMPL_ADD (array, i, memset, CPU_FEATURE_USABLE (SSE2),
b1dca6
 			      __memset_sse2)
b1dca6
 	      IFUNC_IMPL_ADD (array, i, memset, 1, __memset_ia32))
b1dca6
 
b1dca6
   /* Support sysdeps/i386/i686/multiarch/rawmemchr.S.  */
b1dca6
   IFUNC_IMPL (i, name, rawmemchr,
b1dca6
-	      IFUNC_IMPL_ADD (array, i, rawmemchr, HAS_CPU_FEATURE (SSE2),
b1dca6
+	      IFUNC_IMPL_ADD (array, i, rawmemchr, CPU_FEATURE_USABLE (SSE2),
b1dca6
 			      __rawmemchr_sse2_bsf)
b1dca6
-	      IFUNC_IMPL_ADD (array, i, rawmemchr, HAS_CPU_FEATURE (SSE2),
b1dca6
+	      IFUNC_IMPL_ADD (array, i, rawmemchr, CPU_FEATURE_USABLE (SSE2),
b1dca6
 			      __rawmemchr_sse2)
b1dca6
 	      IFUNC_IMPL_ADD (array, i, rawmemchr, 1, __rawmemchr_ia32))
b1dca6
 
b1dca6
   /* Support sysdeps/i386/i686/multiarch/stpncpy.S.  */
b1dca6
   IFUNC_IMPL (i, name, stpncpy,
b1dca6
-	      IFUNC_IMPL_ADD (array, i, stpncpy, HAS_CPU_FEATURE (SSSE3),
b1dca6
+	      IFUNC_IMPL_ADD (array, i, stpncpy, CPU_FEATURE_USABLE (SSSE3),
b1dca6
 			      __stpncpy_ssse3)
b1dca6
-	      IFUNC_IMPL_ADD (array, i, stpncpy, HAS_CPU_FEATURE (SSE2),
b1dca6
+	      IFUNC_IMPL_ADD (array, i, stpncpy, CPU_FEATURE_USABLE (SSE2),
b1dca6
 			      __stpncpy_sse2)
b1dca6
 	      IFUNC_IMPL_ADD (array, i, stpncpy, 1, __stpncpy_ia32))
b1dca6
 
b1dca6
   /* Support sysdeps/i386/i686/multiarch/stpcpy.S.  */
b1dca6
   IFUNC_IMPL (i, name, stpcpy,
b1dca6
-	      IFUNC_IMPL_ADD (array, i, stpcpy, HAS_CPU_FEATURE (SSSE3),
b1dca6
+	      IFUNC_IMPL_ADD (array, i, stpcpy, CPU_FEATURE_USABLE (SSSE3),
b1dca6
 			      __stpcpy_ssse3)
b1dca6
-	      IFUNC_IMPL_ADD (array, i, stpcpy, HAS_CPU_FEATURE (SSE2),
b1dca6
+	      IFUNC_IMPL_ADD (array, i, stpcpy, CPU_FEATURE_USABLE (SSE2),
b1dca6
 			      __stpcpy_sse2)
b1dca6
 	      IFUNC_IMPL_ADD (array, i, stpcpy, 1, __stpcpy_ia32))
b1dca6
 
b1dca6
   /* Support sysdeps/i386/i686/multiarch/strcasecmp.S.  */
b1dca6
   IFUNC_IMPL (i, name, strcasecmp,
b1dca6
 	      IFUNC_IMPL_ADD (array, i, strcasecmp,
b1dca6
-			      HAS_CPU_FEATURE (SSE4_2),
b1dca6
+			      CPU_FEATURE_USABLE (SSE4_2),
b1dca6
 			      __strcasecmp_sse4_2)
b1dca6
 	      IFUNC_IMPL_ADD (array, i, strcasecmp,
b1dca6
-			      HAS_CPU_FEATURE (SSSE3),
b1dca6
+			      CPU_FEATURE_USABLE (SSSE3),
b1dca6
 			      __strcasecmp_ssse3)
b1dca6
 	      IFUNC_IMPL_ADD (array, i, strcasecmp, 1, __strcasecmp_ia32))
b1dca6
 
b1dca6
   /* Support sysdeps/i386/i686/multiarch/strcasecmp_l.S.  */
b1dca6
   IFUNC_IMPL (i, name, strcasecmp_l,
b1dca6
 	      IFUNC_IMPL_ADD (array, i, strcasecmp_l,
b1dca6
-			      HAS_CPU_FEATURE (SSE4_2),
b1dca6
+			      CPU_FEATURE_USABLE (SSE4_2),
b1dca6
 			      __strcasecmp_l_sse4_2)
b1dca6
 	      IFUNC_IMPL_ADD (array, i, strcasecmp_l,
b1dca6
-			      HAS_CPU_FEATURE (SSSE3),
b1dca6
+			      CPU_FEATURE_USABLE (SSSE3),
b1dca6
 			      __strcasecmp_l_ssse3)
b1dca6
 	      IFUNC_IMPL_ADD (array, i, strcasecmp_l, 1,
b1dca6
 			      __strcasecmp_l_ia32))
b1dca6
 
b1dca6
   /* Support sysdeps/i386/i686/multiarch/strcat.S.  */
b1dca6
   IFUNC_IMPL (i, name, strcat,
b1dca6
-	      IFUNC_IMPL_ADD (array, i, strcat, HAS_CPU_FEATURE (SSSE3),
b1dca6
+	      IFUNC_IMPL_ADD (array, i, strcat, CPU_FEATURE_USABLE (SSSE3),
b1dca6
 			      __strcat_ssse3)
b1dca6
-	      IFUNC_IMPL_ADD (array, i, strcat, HAS_CPU_FEATURE (SSE2),
b1dca6
+	      IFUNC_IMPL_ADD (array, i, strcat, CPU_FEATURE_USABLE (SSE2),
b1dca6
 			      __strcat_sse2)
b1dca6
 	      IFUNC_IMPL_ADD (array, i, strcat, 1, __strcat_ia32))
b1dca6
 
b1dca6
   /* Support sysdeps/i386/i686/multiarch/strchr.S.  */
b1dca6
   IFUNC_IMPL (i, name, strchr,
b1dca6
-	      IFUNC_IMPL_ADD (array, i, strchr, HAS_CPU_FEATURE (SSE2),
b1dca6
+	      IFUNC_IMPL_ADD (array, i, strchr, CPU_FEATURE_USABLE (SSE2),
b1dca6
 			      __strchr_sse2_bsf)
b1dca6
-	      IFUNC_IMPL_ADD (array, i, strchr, HAS_CPU_FEATURE (SSE2),
b1dca6
+	      IFUNC_IMPL_ADD (array, i, strchr, CPU_FEATURE_USABLE (SSE2),
b1dca6
 			      __strchr_sse2)
b1dca6
 	      IFUNC_IMPL_ADD (array, i, strchr, 1, __strchr_ia32))
b1dca6
 
b1dca6
   /* Support sysdeps/i386/i686/multiarch/strcmp.S.  */
b1dca6
   IFUNC_IMPL (i, name, strcmp,
b1dca6
-	      IFUNC_IMPL_ADD (array, i, strcmp, HAS_CPU_FEATURE (SSE4_2),
b1dca6
+	      IFUNC_IMPL_ADD (array, i, strcmp, CPU_FEATURE_USABLE (SSE4_2),
b1dca6
 			      __strcmp_sse4_2)
b1dca6
-	      IFUNC_IMPL_ADD (array, i, strcmp, HAS_CPU_FEATURE (SSSE3),
b1dca6
+	      IFUNC_IMPL_ADD (array, i, strcmp, CPU_FEATURE_USABLE (SSSE3),
b1dca6
 			      __strcmp_ssse3)
b1dca6
 	      IFUNC_IMPL_ADD (array, i, strcmp, 1, __strcmp_ia32))
b1dca6
 
b1dca6
   /* Support sysdeps/i386/i686/multiarch/strcpy.S.  */
b1dca6
   IFUNC_IMPL (i, name, strcpy,
b1dca6
-	      IFUNC_IMPL_ADD (array, i, strcpy, HAS_CPU_FEATURE (SSSE3),
b1dca6
+	      IFUNC_IMPL_ADD (array, i, strcpy, CPU_FEATURE_USABLE (SSSE3),
b1dca6
 			      __strcpy_ssse3)
b1dca6
-	      IFUNC_IMPL_ADD (array, i, strcpy, HAS_CPU_FEATURE (SSE2),
b1dca6
+	      IFUNC_IMPL_ADD (array, i, strcpy, CPU_FEATURE_USABLE (SSE2),
b1dca6
 			      __strcpy_sse2)
b1dca6
 	      IFUNC_IMPL_ADD (array, i, strcpy, 1, __strcpy_ia32))
b1dca6
 
b1dca6
   /* Support sysdeps/i386/i686/multiarch/strcspn.S.  */
b1dca6
   IFUNC_IMPL (i, name, strcspn,
b1dca6
-	      IFUNC_IMPL_ADD (array, i, strcspn, HAS_CPU_FEATURE (SSE4_2),
b1dca6
+	      IFUNC_IMPL_ADD (array, i, strcspn, CPU_FEATURE_USABLE (SSE4_2),
b1dca6
 			      __strcspn_sse42)
b1dca6
 	      IFUNC_IMPL_ADD (array, i, strcspn, 1, __strcspn_ia32))
b1dca6
 
b1dca6
   /* Support sysdeps/i386/i686/multiarch/strncase.S.  */
b1dca6
   IFUNC_IMPL (i, name, strncasecmp,
b1dca6
 	      IFUNC_IMPL_ADD (array, i, strncasecmp,
b1dca6
-			      HAS_CPU_FEATURE (SSE4_2),
b1dca6
+			      CPU_FEATURE_USABLE (SSE4_2),
b1dca6
 			      __strncasecmp_sse4_2)
b1dca6
 	      IFUNC_IMPL_ADD (array, i, strncasecmp,
b1dca6
-			      HAS_CPU_FEATURE (SSSE3),
b1dca6
+			      CPU_FEATURE_USABLE (SSSE3),
b1dca6
 			      __strncasecmp_ssse3)
b1dca6
 	      IFUNC_IMPL_ADD (array, i, strncasecmp, 1,
b1dca6
 			      __strncasecmp_ia32))
b1dca6
@@ -222,91 +222,91 @@ __libc_ifunc_impl_list (const char *name, struct libc_ifunc_impl *array,
b1dca6
   /* Support sysdeps/i386/i686/multiarch/strncase_l.S.  */
b1dca6
   IFUNC_IMPL (i, name, strncasecmp_l,
b1dca6
 	      IFUNC_IMPL_ADD (array, i, strncasecmp_l,
b1dca6
-			      HAS_CPU_FEATURE (SSE4_2),
b1dca6
+			      CPU_FEATURE_USABLE (SSE4_2),
b1dca6
 			      __strncasecmp_l_sse4_2)
b1dca6
 	      IFUNC_IMPL_ADD (array, i, strncasecmp_l,
b1dca6
-			      HAS_CPU_FEATURE (SSSE3),
b1dca6
+			      CPU_FEATURE_USABLE (SSSE3),
b1dca6
 			      __strncasecmp_l_ssse3)
b1dca6
 	      IFUNC_IMPL_ADD (array, i, strncasecmp_l, 1,
b1dca6
 			      __strncasecmp_l_ia32))
b1dca6
 
b1dca6
   /* Support sysdeps/i386/i686/multiarch/strncat.S.  */
b1dca6
   IFUNC_IMPL (i, name, strncat,
b1dca6
-	      IFUNC_IMPL_ADD (array, i, strncat, HAS_CPU_FEATURE (SSSE3),
b1dca6
+	      IFUNC_IMPL_ADD (array, i, strncat, CPU_FEATURE_USABLE (SSSE3),
b1dca6
 			      __strncat_ssse3)
b1dca6
-	      IFUNC_IMPL_ADD (array, i, strncat, HAS_CPU_FEATURE (SSE2),
b1dca6
+	      IFUNC_IMPL_ADD (array, i, strncat, CPU_FEATURE_USABLE (SSE2),
b1dca6
 			      __strncat_sse2)
b1dca6
 	      IFUNC_IMPL_ADD (array, i, strncat, 1, __strncat_ia32))
b1dca6
 
b1dca6
   /* Support sysdeps/i386/i686/multiarch/strncpy.S.  */
b1dca6
   IFUNC_IMPL (i, name, strncpy,
b1dca6
-	      IFUNC_IMPL_ADD (array, i, strncpy, HAS_CPU_FEATURE (SSSE3),
b1dca6
+	      IFUNC_IMPL_ADD (array, i, strncpy, CPU_FEATURE_USABLE (SSSE3),
b1dca6
 			      __strncpy_ssse3)
b1dca6
-	      IFUNC_IMPL_ADD (array, i, strncpy, HAS_CPU_FEATURE (SSE2),
b1dca6
+	      IFUNC_IMPL_ADD (array, i, strncpy, CPU_FEATURE_USABLE (SSE2),
b1dca6
 			      __strncpy_sse2)
b1dca6
 	      IFUNC_IMPL_ADD (array, i, strncpy, 1, __strncpy_ia32))
b1dca6
 
b1dca6
   /* Support sysdeps/i386/i686/multiarch/strnlen.S.  */
b1dca6
   IFUNC_IMPL (i, name, strnlen,
b1dca6
-	      IFUNC_IMPL_ADD (array, i, strnlen, HAS_CPU_FEATURE (SSE2),
b1dca6
+	      IFUNC_IMPL_ADD (array, i, strnlen, CPU_FEATURE_USABLE (SSE2),
b1dca6
 			      __strnlen_sse2)
b1dca6
 	      IFUNC_IMPL_ADD (array, i, strnlen, 1, __strnlen_ia32))
b1dca6
 
b1dca6
   /* Support sysdeps/i386/i686/multiarch/strpbrk.S.  */
b1dca6
   IFUNC_IMPL (i, name, strpbrk,
b1dca6
-	      IFUNC_IMPL_ADD (array, i, strpbrk, HAS_CPU_FEATURE (SSE4_2),
b1dca6
+	      IFUNC_IMPL_ADD (array, i, strpbrk, CPU_FEATURE_USABLE (SSE4_2),
b1dca6
 			      __strpbrk_sse42)
b1dca6
 	      IFUNC_IMPL_ADD (array, i, strpbrk, 1, __strpbrk_ia32))
b1dca6
 
b1dca6
   /* Support sysdeps/i386/i686/multiarch/strrchr.S.  */
b1dca6
   IFUNC_IMPL (i, name, strrchr,
b1dca6
-	      IFUNC_IMPL_ADD (array, i, strrchr, HAS_CPU_FEATURE (SSE2),
b1dca6
+	      IFUNC_IMPL_ADD (array, i, strrchr, CPU_FEATURE_USABLE (SSE2),
b1dca6
 			      __strrchr_sse2_bsf)
b1dca6
-	      IFUNC_IMPL_ADD (array, i, strrchr, HAS_CPU_FEATURE (SSE2),
b1dca6
+	      IFUNC_IMPL_ADD (array, i, strrchr, CPU_FEATURE_USABLE (SSE2),
b1dca6
 			      __strrchr_sse2)
b1dca6
 	      IFUNC_IMPL_ADD (array, i, strrchr, 1, __strrchr_ia32))
b1dca6
 
b1dca6
   /* Support sysdeps/i386/i686/multiarch/strspn.S.  */
b1dca6
   IFUNC_IMPL (i, name, strspn,
b1dca6
-	      IFUNC_IMPL_ADD (array, i, strspn, HAS_CPU_FEATURE (SSE4_2),
b1dca6
+	      IFUNC_IMPL_ADD (array, i, strspn, CPU_FEATURE_USABLE (SSE4_2),
b1dca6
 			      __strspn_sse42)
b1dca6
 	      IFUNC_IMPL_ADD (array, i, strspn, 1, __strspn_ia32))
b1dca6
 
b1dca6
   /* Support sysdeps/i386/i686/multiarch/wcschr.S.  */
b1dca6
   IFUNC_IMPL (i, name, wcschr,
b1dca6
-	      IFUNC_IMPL_ADD (array, i, wcschr, HAS_CPU_FEATURE (SSE2),
b1dca6
+	      IFUNC_IMPL_ADD (array, i, wcschr, CPU_FEATURE_USABLE (SSE2),
b1dca6
 			      __wcschr_sse2)
b1dca6
 	      IFUNC_IMPL_ADD (array, i, wcschr, 1, __wcschr_ia32))
b1dca6
 
b1dca6
   /* Support sysdeps/i386/i686/multiarch/wcscmp.S.  */
b1dca6
   IFUNC_IMPL (i, name, wcscmp,
b1dca6
-	      IFUNC_IMPL_ADD (array, i, wcscmp, HAS_CPU_FEATURE (SSE2),
b1dca6
+	      IFUNC_IMPL_ADD (array, i, wcscmp, CPU_FEATURE_USABLE (SSE2),
b1dca6
 			      __wcscmp_sse2)
b1dca6
 	      IFUNC_IMPL_ADD (array, i, wcscmp, 1, __wcscmp_ia32))
b1dca6
 
b1dca6
   /* Support sysdeps/i386/i686/multiarch/wcscpy.S.  */
b1dca6
   IFUNC_IMPL (i, name, wcscpy,
b1dca6
-	      IFUNC_IMPL_ADD (array, i, wcscpy, HAS_CPU_FEATURE (SSSE3),
b1dca6
+	      IFUNC_IMPL_ADD (array, i, wcscpy, CPU_FEATURE_USABLE (SSSE3),
b1dca6
 			      __wcscpy_ssse3)
b1dca6
 	      IFUNC_IMPL_ADD (array, i, wcscpy, 1, __wcscpy_ia32))
b1dca6
 
b1dca6
   /* Support sysdeps/i386/i686/multiarch/wcslen.S.  */
b1dca6
   IFUNC_IMPL (i, name, wcslen,
b1dca6
-	      IFUNC_IMPL_ADD (array, i, wcslen, HAS_CPU_FEATURE (SSE2),
b1dca6
+	      IFUNC_IMPL_ADD (array, i, wcslen, CPU_FEATURE_USABLE (SSE2),
b1dca6
 			      __wcslen_sse2)
b1dca6
 	      IFUNC_IMPL_ADD (array, i, wcslen, 1, __wcslen_ia32))
b1dca6
 
b1dca6
   /* Support sysdeps/i386/i686/multiarch/wcsrchr.S.  */
b1dca6
   IFUNC_IMPL (i, name, wcsrchr,
b1dca6
-	      IFUNC_IMPL_ADD (array, i, wcsrchr, HAS_CPU_FEATURE (SSE2),
b1dca6
+	      IFUNC_IMPL_ADD (array, i, wcsrchr, CPU_FEATURE_USABLE (SSE2),
b1dca6
 			      __wcsrchr_sse2)
b1dca6
 	      IFUNC_IMPL_ADD (array, i, wcsrchr, 1, __wcsrchr_ia32))
b1dca6
 
b1dca6
   /* Support sysdeps/i386/i686/multiarch/wmemcmp.S.  */
b1dca6
   IFUNC_IMPL (i, name, wmemcmp,
b1dca6
-	      IFUNC_IMPL_ADD (array, i, wmemcmp, HAS_CPU_FEATURE (SSE4_2),
b1dca6
+	      IFUNC_IMPL_ADD (array, i, wmemcmp, CPU_FEATURE_USABLE (SSE4_2),
b1dca6
 			      __wmemcmp_sse4_2)
b1dca6
-	      IFUNC_IMPL_ADD (array, i, wmemcmp, HAS_CPU_FEATURE (SSSE3),
b1dca6
+	      IFUNC_IMPL_ADD (array, i, wmemcmp, CPU_FEATURE_USABLE (SSSE3),
b1dca6
 			      __wmemcmp_ssse3)
b1dca6
 	      IFUNC_IMPL_ADD (array, i, wmemcmp, 1, __wmemcmp_ia32))
b1dca6
 
b1dca6
@@ -314,64 +314,64 @@ __libc_ifunc_impl_list (const char *name, struct libc_ifunc_impl *array,
b1dca6
   /* Support sysdeps/i386/i686/multiarch/memcpy_chk.S.  */
b1dca6
   IFUNC_IMPL (i, name, __memcpy_chk,
b1dca6
 	      IFUNC_IMPL_ADD (array, i, __memcpy_chk,
b1dca6
-			      HAS_CPU_FEATURE (SSSE3),
b1dca6
+			      CPU_FEATURE_USABLE (SSSE3),
b1dca6
 			      __memcpy_chk_ssse3_rep)
b1dca6
 	      IFUNC_IMPL_ADD (array, i, __memcpy_chk,
b1dca6
-			      HAS_CPU_FEATURE (SSSE3),
b1dca6
+			      CPU_FEATURE_USABLE (SSSE3),
b1dca6
 			      __memcpy_chk_ssse3)
b1dca6
 	      IFUNC_IMPL_ADD (array, i, __memcpy_chk,
b1dca6
-			      HAS_CPU_FEATURE (SSE2),
b1dca6
+			      CPU_FEATURE_USABLE (SSE2),
b1dca6
 			      __memcpy_chk_sse2_unaligned)
b1dca6
 	      IFUNC_IMPL_ADD (array, i, __memcpy_chk, 1,
b1dca6
 			      __memcpy_chk_ia32))
b1dca6
 
b1dca6
   /* Support sysdeps/i386/i686/multiarch/memcpy.S.  */
b1dca6
   IFUNC_IMPL (i, name, memcpy,
b1dca6
-	      IFUNC_IMPL_ADD (array, i, memcpy, HAS_CPU_FEATURE (SSSE3),
b1dca6
+	      IFUNC_IMPL_ADD (array, i, memcpy, CPU_FEATURE_USABLE (SSSE3),
b1dca6
 			      __memcpy_ssse3_rep)
b1dca6
-	      IFUNC_IMPL_ADD (array, i, memcpy, HAS_CPU_FEATURE (SSSE3),
b1dca6
+	      IFUNC_IMPL_ADD (array, i, memcpy, CPU_FEATURE_USABLE (SSSE3),
b1dca6
 			      __memcpy_ssse3)
b1dca6
-	      IFUNC_IMPL_ADD (array, i, memcpy, HAS_CPU_FEATURE (SSE2),
b1dca6
+	      IFUNC_IMPL_ADD (array, i, memcpy, CPU_FEATURE_USABLE (SSE2),
b1dca6
 			      __memcpy_sse2_unaligned)
b1dca6
 	      IFUNC_IMPL_ADD (array, i, memcpy, 1, __memcpy_ia32))
b1dca6
 
b1dca6
   /* Support sysdeps/i386/i686/multiarch/mempcpy_chk.S.  */
b1dca6
   IFUNC_IMPL (i, name, __mempcpy_chk,
b1dca6
 	      IFUNC_IMPL_ADD (array, i, __mempcpy_chk,
b1dca6
-			      HAS_CPU_FEATURE (SSSE3),
b1dca6
+			      CPU_FEATURE_USABLE (SSSE3),
b1dca6
 			      __mempcpy_chk_ssse3_rep)
b1dca6
 	      IFUNC_IMPL_ADD (array, i, __mempcpy_chk,
b1dca6
-			      HAS_CPU_FEATURE (SSSE3),
b1dca6
+			      CPU_FEATURE_USABLE (SSSE3),
b1dca6
 			      __mempcpy_chk_ssse3)
b1dca6
 	      IFUNC_IMPL_ADD (array, i, __mempcpy_chk,
b1dca6
-			      HAS_CPU_FEATURE (SSE2),
b1dca6
+			      CPU_FEATURE_USABLE (SSE2),
b1dca6
 			      __mempcpy_chk_sse2_unaligned)
b1dca6
 	      IFUNC_IMPL_ADD (array, i, __mempcpy_chk, 1,
b1dca6
 			      __mempcpy_chk_ia32))
b1dca6
 
b1dca6
   /* Support sysdeps/i386/i686/multiarch/mempcpy.S.  */
b1dca6
   IFUNC_IMPL (i, name, mempcpy,
b1dca6
-	      IFUNC_IMPL_ADD (array, i, mempcpy, HAS_CPU_FEATURE (SSSE3),
b1dca6
+	      IFUNC_IMPL_ADD (array, i, mempcpy, CPU_FEATURE_USABLE (SSSE3),
b1dca6
 			      __mempcpy_ssse3_rep)
b1dca6
-	      IFUNC_IMPL_ADD (array, i, mempcpy, HAS_CPU_FEATURE (SSSE3),
b1dca6
+	      IFUNC_IMPL_ADD (array, i, mempcpy, CPU_FEATURE_USABLE (SSSE3),
b1dca6
 			      __mempcpy_ssse3)
b1dca6
-	      IFUNC_IMPL_ADD (array, i, mempcpy, HAS_CPU_FEATURE (SSE2),
b1dca6
+	      IFUNC_IMPL_ADD (array, i, mempcpy, CPU_FEATURE_USABLE (SSE2),
b1dca6
 			      __mempcpy_sse2_unaligned)
b1dca6
 	      IFUNC_IMPL_ADD (array, i, mempcpy, 1, __mempcpy_ia32))
b1dca6
 
b1dca6
   /* Support sysdeps/i386/i686/multiarch/strlen.S.  */
b1dca6
   IFUNC_IMPL (i, name, strlen,
b1dca6
-	      IFUNC_IMPL_ADD (array, i, strlen, HAS_CPU_FEATURE (SSE2),
b1dca6
+	      IFUNC_IMPL_ADD (array, i, strlen, CPU_FEATURE_USABLE (SSE2),
b1dca6
 			      __strlen_sse2_bsf)
b1dca6
-	      IFUNC_IMPL_ADD (array, i, strlen, HAS_CPU_FEATURE (SSE2),
b1dca6
+	      IFUNC_IMPL_ADD (array, i, strlen, CPU_FEATURE_USABLE (SSE2),
b1dca6
 			      __strlen_sse2)
b1dca6
 	      IFUNC_IMPL_ADD (array, i, strlen, 1, __strlen_ia32))
b1dca6
 
b1dca6
   /* Support sysdeps/i386/i686/multiarch/strncmp.S.  */
b1dca6
   IFUNC_IMPL (i, name, strncmp,
b1dca6
-	      IFUNC_IMPL_ADD (array, i, strncmp, HAS_CPU_FEATURE (SSE4_2),
b1dca6
+	      IFUNC_IMPL_ADD (array, i, strncmp, CPU_FEATURE_USABLE (SSE4_2),
b1dca6
 			      __strncmp_sse4_2)
b1dca6
-	      IFUNC_IMPL_ADD (array, i, strncmp, HAS_CPU_FEATURE (SSSE3),
b1dca6
+	      IFUNC_IMPL_ADD (array, i, strncmp, CPU_FEATURE_USABLE (SSSE3),
b1dca6
 			      __strncmp_ssse3)
b1dca6
 	      IFUNC_IMPL_ADD (array, i, strncmp, 1, __strncmp_ia32))
b1dca6
 #endif
b1dca6
diff --git a/sysdeps/i386/i686/multiarch/ifunc-memmove.h b/sysdeps/i386/i686/multiarch/ifunc-memmove.h
b1dca6
index f0e97561784a82d5..cd4333f84b114552 100644
b1dca6
--- a/sysdeps/i386/i686/multiarch/ifunc-memmove.h
b1dca6
+++ b/sysdeps/i386/i686/multiarch/ifunc-memmove.h
b1dca6
@@ -33,7 +33,7 @@ IFUNC_SELECTOR (void)
b1dca6
   if (CPU_FEATURES_ARCH_P (cpu_features, Fast_Unaligned_Load))
b1dca6
     return OPTIMIZE (sse2_unaligned);
b1dca6
 
b1dca6
-  if (CPU_FEATURES_CPU_P (cpu_features, SSSE3))
b1dca6
+  if (CPU_FEATURE_USABLE_P (cpu_features, SSSE3))
b1dca6
     {
b1dca6
       if (CPU_FEATURES_ARCH_P (cpu_features, Fast_Rep_String))
b1dca6
 	return OPTIMIZE (ssse3_rep);
b1dca6
diff --git a/sysdeps/i386/i686/multiarch/ifunc-memset.h b/sysdeps/i386/i686/multiarch/ifunc-memset.h
b1dca6
index e96609439aef30d1..6cf96ebcd480dba4 100644
b1dca6
--- a/sysdeps/i386/i686/multiarch/ifunc-memset.h
b1dca6
+++ b/sysdeps/i386/i686/multiarch/ifunc-memset.h
b1dca6
@@ -28,7 +28,7 @@ IFUNC_SELECTOR (void)
b1dca6
 {
b1dca6
   const struct cpu_features* cpu_features = __get_cpu_features ();
b1dca6
 
b1dca6
-  if (CPU_FEATURES_CPU_P (cpu_features, SSE2))
b1dca6
+  if (CPU_FEATURE_USABLE_P (cpu_features, SSE2))
b1dca6
     {
b1dca6
       if (CPU_FEATURES_ARCH_P (cpu_features, Fast_Rep_String))
b1dca6
 	return OPTIMIZE (sse2_rep);
b1dca6
diff --git a/sysdeps/i386/i686/multiarch/ifunc-sse2-bsf.h b/sysdeps/i386/i686/multiarch/ifunc-sse2-bsf.h
b1dca6
index f5e7f1b846c28454..de30f004db53f227 100644
b1dca6
--- a/sysdeps/i386/i686/multiarch/ifunc-sse2-bsf.h
b1dca6
+++ b/sysdeps/i386/i686/multiarch/ifunc-sse2-bsf.h
b1dca6
@@ -28,7 +28,7 @@ IFUNC_SELECTOR (void)
b1dca6
 {
b1dca6
   const struct cpu_features* cpu_features = __get_cpu_features ();
b1dca6
 
b1dca6
-  if (CPU_FEATURES_CPU_P (cpu_features, SSE2))
b1dca6
+  if (CPU_FEATURE_USABLE_P (cpu_features, SSE2))
b1dca6
     {
b1dca6
       if (CPU_FEATURES_ARCH_P (cpu_features, Slow_BSF))
b1dca6
 	return OPTIMIZE (sse2);
b1dca6
diff --git a/sysdeps/i386/i686/multiarch/ifunc-sse2-ssse3.h b/sysdeps/i386/i686/multiarch/ifunc-sse2-ssse3.h
b1dca6
index a33fe44f504bd178..299d73e3144698d7 100644
b1dca6
--- a/sysdeps/i386/i686/multiarch/ifunc-sse2-ssse3.h
b1dca6
+++ b/sysdeps/i386/i686/multiarch/ifunc-sse2-ssse3.h
b1dca6
@@ -29,11 +29,11 @@ IFUNC_SELECTOR (void)
b1dca6
 {
b1dca6
   const struct cpu_features* cpu_features = __get_cpu_features ();
b1dca6
 
b1dca6
-  if (CPU_FEATURES_CPU_P (cpu_features, SSE2)
b1dca6
+  if (CPU_FEATURE_USABLE_P (cpu_features, SSE2)
b1dca6
       && CPU_FEATURES_ARCH_P (cpu_features, Fast_Rep_String))
b1dca6
     return OPTIMIZE (sse2);
b1dca6
 
b1dca6
-  if (CPU_FEATURES_CPU_P (cpu_features, SSSE3))
b1dca6
+  if (CPU_FEATURE_USABLE_P (cpu_features, SSSE3))
b1dca6
     return OPTIMIZE (ssse3);
b1dca6
 
b1dca6
   return OPTIMIZE (ia32);
b1dca6
diff --git a/sysdeps/i386/i686/multiarch/ifunc-sse2.h b/sysdeps/i386/i686/multiarch/ifunc-sse2.h
b1dca6
index 706c0329c9a76573..e1ba025299037bfb 100644
b1dca6
--- a/sysdeps/i386/i686/multiarch/ifunc-sse2.h
b1dca6
+++ b/sysdeps/i386/i686/multiarch/ifunc-sse2.h
b1dca6
@@ -27,7 +27,7 @@ IFUNC_SELECTOR (void)
b1dca6
 {
b1dca6
   const struct cpu_features* cpu_features = __get_cpu_features ();
b1dca6
 
b1dca6
-  if (CPU_FEATURES_CPU_P (cpu_features, SSE2))
b1dca6
+  if (CPU_FEATURE_USABLE_P (cpu_features, SSE2))
b1dca6
     return OPTIMIZE (sse2);
b1dca6
 
b1dca6
   return OPTIMIZE (ia32);
b1dca6
diff --git a/sysdeps/i386/i686/multiarch/ifunc-sse4_2.h b/sysdeps/i386/i686/multiarch/ifunc-sse4_2.h
b1dca6
index de7fa2f185ad9a59..641cec2ced510524 100644
b1dca6
--- a/sysdeps/i386/i686/multiarch/ifunc-sse4_2.h
b1dca6
+++ b/sysdeps/i386/i686/multiarch/ifunc-sse4_2.h
b1dca6
@@ -27,7 +27,7 @@ IFUNC_SELECTOR (void)
b1dca6
 {
b1dca6
   const struct cpu_features* cpu_features = __get_cpu_features ();
b1dca6
 
b1dca6
-  if (CPU_FEATURES_CPU_P (cpu_features, SSE4_2))
b1dca6
+  if (CPU_FEATURE_USABLE_P (cpu_features, SSE4_2))
b1dca6
     return OPTIMIZE (sse42);
b1dca6
 
b1dca6
   return OPTIMIZE (ia32);
b1dca6
diff --git a/sysdeps/i386/i686/multiarch/ifunc-ssse3-sse4_2.h b/sysdeps/i386/i686/multiarch/ifunc-ssse3-sse4_2.h
b1dca6
index bd772a9298ab7d6b..6b2b461e47e94b66 100644
b1dca6
--- a/sysdeps/i386/i686/multiarch/ifunc-ssse3-sse4_2.h
b1dca6
+++ b/sysdeps/i386/i686/multiarch/ifunc-ssse3-sse4_2.h
b1dca6
@@ -29,10 +29,10 @@ IFUNC_SELECTOR (void)
b1dca6
 {
b1dca6
   const struct cpu_features* cpu_features = __get_cpu_features ();
b1dca6
 
b1dca6
-  if (CPU_FEATURES_CPU_P (cpu_features, SSE4_2))
b1dca6
+  if (CPU_FEATURE_USABLE_P (cpu_features, SSE4_2))
b1dca6
     return OPTIMIZE (sse4_2);
b1dca6
 
b1dca6
-  if (CPU_FEATURES_CPU_P (cpu_features, SSSE3))
b1dca6
+  if (CPU_FEATURE_USABLE_P (cpu_features, SSSE3))
b1dca6
     return OPTIMIZE (ssse3);
b1dca6
 
b1dca6
   return OPTIMIZE (ia32);
b1dca6
diff --git a/sysdeps/i386/i686/multiarch/s_fma.c b/sysdeps/i386/i686/multiarch/s_fma.c
b1dca6
index 7f39f5fdc972fcc7..0cf6e41b03043911 100644
b1dca6
--- a/sysdeps/i386/i686/multiarch/s_fma.c
b1dca6
+++ b/sysdeps/i386/i686/multiarch/s_fma.c
b1dca6
@@ -27,7 +27,7 @@ extern double __fma_ia32 (double x, double y, double z) attribute_hidden;
b1dca6
 extern double __fma_fma (double x, double y, double z) attribute_hidden;
b1dca6
 
b1dca6
 libm_ifunc (__fma,
b1dca6
-	    HAS_ARCH_FEATURE (FMA_Usable) ? __fma_fma : __fma_ia32);
b1dca6
+	    CPU_FEATURE_USABLE (FMA) ? __fma_fma : __fma_ia32);
b1dca6
 libm_alias_double (__fma, fma)
b1dca6
 
b1dca6
 #define __fma __fma_ia32
b1dca6
diff --git a/sysdeps/i386/i686/multiarch/s_fmaf.c b/sysdeps/i386/i686/multiarch/s_fmaf.c
b1dca6
index 1ebb6e975ee86f54..638cd5b10ba57592 100644
b1dca6
--- a/sysdeps/i386/i686/multiarch/s_fmaf.c
b1dca6
+++ b/sysdeps/i386/i686/multiarch/s_fmaf.c
b1dca6
@@ -27,7 +27,7 @@ extern float __fmaf_ia32 (float x, float y, float z) attribute_hidden;
b1dca6
 extern float __fmaf_fma (float x, float y, float z) attribute_hidden;
b1dca6
 
b1dca6
 libm_ifunc (__fmaf,
b1dca6
-	    HAS_ARCH_FEATURE (FMA_Usable) ? __fmaf_fma : __fmaf_ia32);
b1dca6
+	    CPU_FEATURE_USABLE (FMA) ? __fmaf_fma : __fmaf_ia32);
b1dca6
 libm_alias_float (__fma, fma)
b1dca6
 
b1dca6
 #define __fmaf __fmaf_ia32
b1dca6
diff --git a/sysdeps/i386/i686/multiarch/wcscpy.c b/sysdeps/i386/i686/multiarch/wcscpy.c
b1dca6
index be89ab81b066d463..ea149b0d3af357f2 100644
b1dca6
--- a/sysdeps/i386/i686/multiarch/wcscpy.c
b1dca6
+++ b/sysdeps/i386/i686/multiarch/wcscpy.c
b1dca6
@@ -34,7 +34,7 @@ IFUNC_SELECTOR (void)
b1dca6
 {
b1dca6
   const struct cpu_features* cpu_features = __get_cpu_features ();
b1dca6
 
b1dca6
-  if (CPU_FEATURES_CPU_P (cpu_features, SSSE3))
b1dca6
+  if (CPU_FEATURE_USABLE_P (cpu_features, SSSE3))
b1dca6
     return OPTIMIZE (ssse3);
b1dca6
 
b1dca6
   return OPTIMIZE (ia32);
b1dca6
diff --git a/sysdeps/i386/setfpucw.c b/sysdeps/i386/setfpucw.c
b1dca6
index 931302bcd03d221b..3fa2651d46a70ab6 100644
b1dca6
--- a/sysdeps/i386/setfpucw.c
b1dca6
+++ b/sysdeps/i386/setfpucw.c
b1dca6
@@ -39,7 +39,7 @@ __setfpucw (fpu_control_t set)
b1dca6
   __asm__ ("fldcw %0" : : "m" (*&cw);;
b1dca6
 
b1dca6
   /* If the CPU supports SSE, we set the MXCSR as well.  */
b1dca6
-  if (HAS_CPU_FEATURE (SSE))
b1dca6
+  if (CPU_FEATURE_USABLE (SSE))
b1dca6
     {
b1dca6
       unsigned int xnew_exc;
b1dca6
 
b1dca6
diff --git a/sysdeps/unix/sysv/linux/x86/elision-conf.c b/sysdeps/unix/sysv/linux/x86/elision-conf.c
b1dca6
index 22af294426596add..bdfc514a238f92a8 100644
b1dca6
--- a/sysdeps/unix/sysv/linux/x86/elision-conf.c
b1dca6
+++ b/sysdeps/unix/sysv/linux/x86/elision-conf.c
b1dca6
@@ -64,7 +64,7 @@ do_set_elision_enable (int32_t elision_enable)
b1dca6
      if __libc_enable_secure isn't enabled since elision_enable will be set
b1dca6
      according to the default, which is disabled.  */
b1dca6
   if (elision_enable == 1)
b1dca6
-    __pthread_force_elision = HAS_CPU_FEATURE (RTM) ? 1 : 0;
b1dca6
+    __pthread_force_elision = CPU_FEATURE_USABLE (RTM) ? 1 : 0;
b1dca6
 }
b1dca6
 
b1dca6
 /* The pthread->elision_enable tunable is 0 or 1 indicating that elision
b1dca6
diff --git a/sysdeps/x86/cacheinfo.c b/sysdeps/x86/cacheinfo.c
b1dca6
index f4edbc0103beb435..fdfe2684759d968c 100644
b1dca6
--- a/sysdeps/x86/cacheinfo.c
b1dca6
+++ b/sysdeps/x86/cacheinfo.c
b1dca6
@@ -583,7 +583,7 @@ get_common_cache_info (long int *shared_ptr, unsigned int *threads_ptr,
b1dca6
 
b1dca6
   /* A value of 0 for the HTT bit indicates there is only a single
b1dca6
      logical processor.  */
b1dca6
-  if (HAS_CPU_FEATURE (HTT))
b1dca6
+  if (CPU_FEATURE_USABLE (HTT))
b1dca6
     {
b1dca6
       /* Figure out the number of logical threads that share the
b1dca6
          highest cache level.  */
b1dca6
@@ -732,7 +732,7 @@ intel_bug_no_cache_info:
b1dca6
           /* Assume that all logical threads share the highest cache
b1dca6
              level.  */
b1dca6
           threads
b1dca6
-            = ((cpu_features->cpuid[COMMON_CPUID_INDEX_1].ebx
b1dca6
+            = ((cpu_features->features[COMMON_CPUID_INDEX_1].cpuid.ebx
b1dca6
                 >> 16) & 0xff);
b1dca6
         }
b1dca6
 
b1dca6
@@ -887,14 +887,14 @@ init_cacheinfo (void)
b1dca6
   unsigned int minimum_rep_movsb_threshold;
b1dca6
   /* NB: The default REP MOVSB threshold is 2048 * (VEC_SIZE / 16).  */
b1dca6
   unsigned int rep_movsb_threshold;
b1dca6
-  if (CPU_FEATURES_ARCH_P (cpu_features, AVX512F_Usable)
b1dca6
-      && !CPU_FEATURES_ARCH_P (cpu_features, Prefer_No_AVX512))
b1dca6
+  if (CPU_FEATURE_USABLE_P (cpu_features, AVX512F)
b1dca6
+      && !CPU_FEATURE_PREFERRED_P (cpu_features, Prefer_No_AVX512))
b1dca6
     {
b1dca6
       rep_movsb_threshold = 2048 * (64 / 16);
b1dca6
       minimum_rep_movsb_threshold = 64 * 8;
b1dca6
     }
b1dca6
-  else if (CPU_FEATURES_ARCH_P (cpu_features,
b1dca6
-				AVX_Fast_Unaligned_Load))
b1dca6
+  else if (CPU_FEATURE_PREFERRED_P (cpu_features,
b1dca6
+				    AVX_Fast_Unaligned_Load))
b1dca6
     {
b1dca6
       rep_movsb_threshold = 2048 * (32 / 16);
b1dca6
       minimum_rep_movsb_threshold = 32 * 8;
b1dca6
diff --git a/sysdeps/x86/cpu-features.c b/sysdeps/x86/cpu-features.c
b1dca6
index ad470f79ef7769fc..f13a1df4555c7000 100644
b1dca6
--- a/sysdeps/x86/cpu-features.c
b1dca6
+++ b/sysdeps/x86/cpu-features.c
b1dca6
@@ -42,73 +42,109 @@ extern void TUNABLE_CALLBACK (set_x86_shstk) (tunable_val_t *)
b1dca6
 #endif
b1dca6
 
b1dca6
 static void
b1dca6
-get_extended_indices (struct cpu_features *cpu_features)
b1dca6
+update_usable (struct cpu_features *cpu_features)
b1dca6
 {
b1dca6
-  unsigned int eax, ebx, ecx, edx;
b1dca6
-  __cpuid (0x80000000, eax, ebx, ecx, edx);
b1dca6
-  if (eax >= 0x80000001)
b1dca6
-    __cpuid (0x80000001,
b1dca6
-	     cpu_features->cpuid[COMMON_CPUID_INDEX_80000001].eax,
b1dca6
-	     cpu_features->cpuid[COMMON_CPUID_INDEX_80000001].ebx,
b1dca6
-	     cpu_features->cpuid[COMMON_CPUID_INDEX_80000001].ecx,
b1dca6
-	     cpu_features->cpuid[COMMON_CPUID_INDEX_80000001].edx);
b1dca6
-  if (eax >= 0x80000007)
b1dca6
-    __cpuid (0x80000007,
b1dca6
-	     cpu_features->cpuid[COMMON_CPUID_INDEX_80000007].eax,
b1dca6
-	     cpu_features->cpuid[COMMON_CPUID_INDEX_80000007].ebx,
b1dca6
-	     cpu_features->cpuid[COMMON_CPUID_INDEX_80000007].ecx,
b1dca6
-	     cpu_features->cpuid[COMMON_CPUID_INDEX_80000007].edx);
b1dca6
-  if (eax >= 0x80000008)
b1dca6
-    __cpuid (0x80000008,
b1dca6
-	     cpu_features->cpuid[COMMON_CPUID_INDEX_80000008].eax,
b1dca6
-	     cpu_features->cpuid[COMMON_CPUID_INDEX_80000008].ebx,
b1dca6
-	     cpu_features->cpuid[COMMON_CPUID_INDEX_80000008].ecx,
b1dca6
-	     cpu_features->cpuid[COMMON_CPUID_INDEX_80000008].edx);
b1dca6
-}
b1dca6
-
b1dca6
-static void
b1dca6
-get_common_indices (struct cpu_features *cpu_features,
b1dca6
-		    unsigned int *family, unsigned int *model,
b1dca6
-		    unsigned int *extended_model, unsigned int *stepping)
b1dca6
-{
b1dca6
-  if (family)
b1dca6
-    {
b1dca6
-      unsigned int eax;
b1dca6
-      __cpuid (1, eax, cpu_features->cpuid[COMMON_CPUID_INDEX_1].ebx,
b1dca6
-	       cpu_features->cpuid[COMMON_CPUID_INDEX_1].ecx,
b1dca6
-	       cpu_features->cpuid[COMMON_CPUID_INDEX_1].edx);
b1dca6
-      cpu_features->cpuid[COMMON_CPUID_INDEX_1].eax = eax;
b1dca6
-      *family = (eax >> 8) & 0x0f;
b1dca6
-      *model = (eax >> 4) & 0x0f;
b1dca6
-      *extended_model = (eax >> 12) & 0xf0;
b1dca6
-      *stepping = eax & 0x0f;
b1dca6
-      if (*family == 0x0f)
b1dca6
-	{
b1dca6
-	  *family += (eax >> 20) & 0xff;
b1dca6
-	  *model += *extended_model;
b1dca6
-	}
b1dca6
-    }
b1dca6
-
b1dca6
-  if (cpu_features->basic.max_cpuid >= 7)
b1dca6
-    {
b1dca6
-      __cpuid_count (7, 0,
b1dca6
-		     cpu_features->cpuid[COMMON_CPUID_INDEX_7].eax,
b1dca6
-		     cpu_features->cpuid[COMMON_CPUID_INDEX_7].ebx,
b1dca6
-		     cpu_features->cpuid[COMMON_CPUID_INDEX_7].ecx,
b1dca6
-		     cpu_features->cpuid[COMMON_CPUID_INDEX_7].edx);
b1dca6
-      __cpuid_count (7, 1,
b1dca6
-		     cpu_features->cpuid[COMMON_CPUID_INDEX_7_ECX_1].eax,
b1dca6
-		     cpu_features->cpuid[COMMON_CPUID_INDEX_7_ECX_1].ebx,
b1dca6
-		     cpu_features->cpuid[COMMON_CPUID_INDEX_7_ECX_1].ecx,
b1dca6
-		     cpu_features->cpuid[COMMON_CPUID_INDEX_7_ECX_1].edx);
b1dca6
-    }
b1dca6
-
b1dca6
-  if (cpu_features->basic.max_cpuid >= 0xd)
b1dca6
-    __cpuid_count (0xd, 1,
b1dca6
-		   cpu_features->cpuid[COMMON_CPUID_INDEX_D_ECX_1].eax,
b1dca6
-		   cpu_features->cpuid[COMMON_CPUID_INDEX_D_ECX_1].ebx,
b1dca6
-		   cpu_features->cpuid[COMMON_CPUID_INDEX_D_ECX_1].ecx,
b1dca6
-		   cpu_features->cpuid[COMMON_CPUID_INDEX_D_ECX_1].edx);
b1dca6
+  /* Before COMMON_CPUID_INDEX_80000001, copy the cpuid array elements to
b1dca6
+     the usable array.  */
b1dca6
+  unsigned int i;
b1dca6
+  for (i = 0; i < COMMON_CPUID_INDEX_80000001; i++)
b1dca6
+    cpu_features->features[i].usable = cpu_features->features[i].cpuid;
b1dca6
+
b1dca6
+  /* Before COMMON_CPUID_INDEX_80000001, clear the unknown usable bits
b1dca6
+     and the always zero bits.  */
b1dca6
+  CPU_FEATURE_UNSET (cpu_features, INDEX_1_ECX_16);
b1dca6
+  CPU_FEATURE_UNSET (cpu_features, INDEX_1_ECX_31);
b1dca6
+  CPU_FEATURE_UNSET (cpu_features, INDEX_1_EDX_10);
b1dca6
+  CPU_FEATURE_UNSET (cpu_features, INDEX_1_EDX_20);
b1dca6
+  CPU_FEATURE_UNSET (cpu_features, INDEX_1_EDX_30);
b1dca6
+  CPU_FEATURE_UNSET (cpu_features, INDEX_7_EBX_6);
b1dca6
+  CPU_FEATURE_UNSET (cpu_features, INDEX_7_EBX_22);
b1dca6
+  CPU_FEATURE_UNSET (cpu_features, INDEX_7_ECX_13);
b1dca6
+  CPU_FEATURE_UNSET (cpu_features, INDEX_7_ECX_15);
b1dca6
+  CPU_FEATURE_UNSET (cpu_features, INDEX_7_ECX_16);
b1dca6
+  CPU_FEATURE_UNSET (cpu_features, INDEX_7_ECX_23);
b1dca6
+  CPU_FEATURE_UNSET (cpu_features, INDEX_7_ECX_24);
b1dca6
+  CPU_FEATURE_UNSET (cpu_features, INDEX_7_ECX_26);
b1dca6
+  CPU_FEATURE_UNSET (cpu_features, INDEX_7_EDX_0);
b1dca6
+  CPU_FEATURE_UNSET (cpu_features, INDEX_7_EDX_1);
b1dca6
+  CPU_FEATURE_UNSET (cpu_features, INDEX_7_EDX_5);
b1dca6
+  CPU_FEATURE_UNSET (cpu_features, INDEX_7_EDX_6);
b1dca6
+  CPU_FEATURE_UNSET (cpu_features, INDEX_7_EDX_7);
b1dca6
+  CPU_FEATURE_UNSET (cpu_features, INDEX_7_EDX_9);
b1dca6
+  CPU_FEATURE_UNSET (cpu_features, INDEX_7_EDX_11);
b1dca6
+  CPU_FEATURE_UNSET (cpu_features, INDEX_7_EDX_12);
b1dca6
+  CPU_FEATURE_UNSET (cpu_features, INDEX_7_EDX_13);
b1dca6
+  CPU_FEATURE_UNSET (cpu_features, INDEX_7_EDX_17);
b1dca6
+  CPU_FEATURE_UNSET (cpu_features, INDEX_7_EDX_19);
b1dca6
+  CPU_FEATURE_UNSET (cpu_features, INDEX_7_EDX_21);
b1dca6
+  CPU_FEATURE_UNSET (cpu_features, INDEX_7_EDX_23);
b1dca6
+
b1dca6
+  /* EAX/EBX from COMMON_CPUID_INDEX_1 and EAX from COMMON_CPUID_INDEX_7
b1dca6
+     aren't used for CPU feature detection.  */
b1dca6
+  cpu_features->features[COMMON_CPUID_INDEX_1].usable.eax = 0;
b1dca6
+  cpu_features->features[COMMON_CPUID_INDEX_1].usable.ebx = 0;
b1dca6
+  cpu_features->features[COMMON_CPUID_INDEX_7].usable.eax = 0;
b1dca6
+
b1dca6
+  /* Starting from COMMON_CPUID_INDEX_80000001, copy the cpuid bits to
b1dca6
+     usable bits.  */
b1dca6
+  CPU_FEATURE_SET_USABLE (cpu_features, LAHF64_SAHF64);
b1dca6
+  CPU_FEATURE_SET_USABLE (cpu_features, SVM);
b1dca6
+  CPU_FEATURE_SET_USABLE (cpu_features, LZCNT);
b1dca6
+  CPU_FEATURE_SET_USABLE (cpu_features, SSE4A);
b1dca6
+  CPU_FEATURE_SET_USABLE (cpu_features, PREFETCHW);
b1dca6
+  CPU_FEATURE_SET_USABLE (cpu_features, XOP);
b1dca6
+  CPU_FEATURE_SET_USABLE (cpu_features, LWP);
b1dca6
+  CPU_FEATURE_SET_USABLE (cpu_features, FMA4);
b1dca6
+  CPU_FEATURE_SET_USABLE (cpu_features, TBM);
b1dca6
+  CPU_FEATURE_SET_USABLE (cpu_features, SYSCALL_SYSRET);
b1dca6
+  CPU_FEATURE_SET_USABLE (cpu_features, NX);
b1dca6
+  CPU_FEATURE_SET_USABLE (cpu_features, PAGE1GB);
b1dca6
+  CPU_FEATURE_SET_USABLE (cpu_features, RDTSCP);
b1dca6
+  CPU_FEATURE_SET_USABLE (cpu_features, LM);
b1dca6
+  CPU_FEATURE_SET_USABLE (cpu_features, XSAVEOPT);
b1dca6
+  CPU_FEATURE_SET_USABLE (cpu_features, XSAVEC);
b1dca6
+  CPU_FEATURE_SET_USABLE (cpu_features, XGETBV_ECX_1);
b1dca6
+  CPU_FEATURE_SET_USABLE (cpu_features, XSAVES);
b1dca6
+  CPU_FEATURE_SET_USABLE (cpu_features, XFD);
b1dca6
+  CPU_FEATURE_SET_USABLE (cpu_features, INVARIANT_TSC);
b1dca6
+  CPU_FEATURE_SET_USABLE (cpu_features, WBNOINVD);
b1dca6
+  CPU_FEATURE_SET_USABLE (cpu_features, AVX512_BF16);
b1dca6
+
b1dca6
+  /* MPX has been deprecated.  */
b1dca6
+  CPU_FEATURE_UNSET (cpu_features, MPX);
b1dca6
+
b1dca6
+  /* Clear the usable bits which require OS support.  */
b1dca6
+  CPU_FEATURE_UNSET (cpu_features, FMA);
b1dca6
+  CPU_FEATURE_UNSET (cpu_features, AVX);
b1dca6
+  CPU_FEATURE_UNSET (cpu_features, F16C);
b1dca6
+  CPU_FEATURE_UNSET (cpu_features, AVX2);
b1dca6
+  CPU_FEATURE_UNSET (cpu_features, AVX512F);
b1dca6
+  CPU_FEATURE_UNSET (cpu_features, AVX512DQ);
b1dca6
+  CPU_FEATURE_UNSET (cpu_features, AVX512_IFMA);
b1dca6
+  CPU_FEATURE_UNSET (cpu_features, AVX512PF);
b1dca6
+  CPU_FEATURE_UNSET (cpu_features, AVX512ER);
b1dca6
+  CPU_FEATURE_UNSET (cpu_features, AVX512CD);
b1dca6
+  CPU_FEATURE_UNSET (cpu_features, AVX512BW);
b1dca6
+  CPU_FEATURE_UNSET (cpu_features, AVX512VL);
b1dca6
+  CPU_FEATURE_UNSET (cpu_features, AVX512_VBMI);
b1dca6
+  CPU_FEATURE_UNSET (cpu_features, PKU);
b1dca6
+  CPU_FEATURE_UNSET (cpu_features, AVX512_VBMI2);
b1dca6
+  CPU_FEATURE_UNSET (cpu_features, VAES);
b1dca6
+  CPU_FEATURE_UNSET (cpu_features, VPCLMULQDQ);
b1dca6
+  CPU_FEATURE_UNSET (cpu_features, AVX512_VNNI);
b1dca6
+  CPU_FEATURE_UNSET (cpu_features, AVX512_BITALG);
b1dca6
+  CPU_FEATURE_UNSET (cpu_features, AVX512_VPOPCNTDQ);
b1dca6
+  CPU_FEATURE_UNSET (cpu_features, AVX512_4VNNIW);
b1dca6
+  CPU_FEATURE_UNSET (cpu_features, AVX512_4FMAPS);
b1dca6
+  CPU_FEATURE_UNSET (cpu_features, AVX512_VP2INTERSECT);
b1dca6
+  CPU_FEATURE_UNSET (cpu_features, AMX_BF16);
b1dca6
+  CPU_FEATURE_UNSET (cpu_features, AMX_TILE);
b1dca6
+  CPU_FEATURE_UNSET (cpu_features, AMX_INT8);
b1dca6
+  CPU_FEATURE_UNSET (cpu_features, XOP);
b1dca6
+  CPU_FEATURE_UNSET (cpu_features, FMA4);
b1dca6
+  CPU_FEATURE_UNSET (cpu_features, XSAVEC);
b1dca6
+  CPU_FEATURE_UNSET (cpu_features, XFD);
b1dca6
+  CPU_FEATURE_UNSET (cpu_features, AVX512_BF16);
b1dca6
 
b1dca6
   /* Can we call xgetbv?  */
b1dca6
   if (CPU_FEATURES_CPU_P (cpu_features, OSXSAVE))
b1dca6
@@ -123,40 +159,28 @@ get_common_indices (struct cpu_features *cpu_features,
b1dca6
 	  /* Determine if AVX is usable.  */
b1dca6
 	  if (CPU_FEATURES_CPU_P (cpu_features, AVX))
b1dca6
 	    {
b1dca6
-	      cpu_features->usable[index_arch_AVX_Usable]
b1dca6
-		|= bit_arch_AVX_Usable;
b1dca6
+	      CPU_FEATURE_SET (cpu_features, AVX);
b1dca6
 	      /* The following features depend on AVX being usable.  */
b1dca6
 	      /* Determine if AVX2 is usable.  */
b1dca6
 	      if (CPU_FEATURES_CPU_P (cpu_features, AVX2))
b1dca6
-	      {
b1dca6
-		cpu_features->usable[index_arch_AVX2_Usable]
b1dca6
-		  |= bit_arch_AVX2_Usable;
b1dca6
-
b1dca6
-	        /* Unaligned load with 256-bit AVX registers are faster on
b1dca6
-	           Intel/AMD processors with AVX2.  */
b1dca6
-	        cpu_features->preferred[index_arch_AVX_Fast_Unaligned_Load]
b1dca6
-		  |= bit_arch_AVX_Fast_Unaligned_Load;
b1dca6
-	      }
b1dca6
+		{
b1dca6
+		  CPU_FEATURE_SET (cpu_features, AVX2);
b1dca6
+
b1dca6
+		  /* Unaligned load with 256-bit AVX registers are faster
b1dca6
+		     on Intel/AMD processors with AVX2.  */
b1dca6
+		  cpu_features->preferred[index_arch_AVX_Fast_Unaligned_Load]
b1dca6
+		    |= bit_arch_AVX_Fast_Unaligned_Load;
b1dca6
+		}
b1dca6
 	      /* Determine if FMA is usable.  */
b1dca6
-	      if (CPU_FEATURES_CPU_P (cpu_features, FMA))
b1dca6
-		cpu_features->usable[index_arch_FMA_Usable]
b1dca6
-		  |= bit_arch_FMA_Usable;
b1dca6
+	      CPU_FEATURE_SET_USABLE (cpu_features, FMA);
b1dca6
 	      /* Determine if VAES is usable.  */
b1dca6
-	      if (CPU_FEATURES_CPU_P (cpu_features, VAES))
b1dca6
-		cpu_features->usable[index_arch_VAES_Usable]
b1dca6
-		  |= bit_arch_VAES_Usable;
b1dca6
+	      CPU_FEATURE_SET_USABLE (cpu_features, VAES);
b1dca6
 	      /* Determine if VPCLMULQDQ is usable.  */
b1dca6
-	      if (CPU_FEATURES_CPU_P (cpu_features, VPCLMULQDQ))
b1dca6
-		cpu_features->usable[index_arch_VPCLMULQDQ_Usable]
b1dca6
-		  |= bit_arch_VPCLMULQDQ_Usable;
b1dca6
+	      CPU_FEATURE_SET_USABLE (cpu_features, VPCLMULQDQ);
b1dca6
 	      /* Determine if XOP is usable.  */
b1dca6
-	      if (CPU_FEATURES_CPU_P (cpu_features, XOP))
b1dca6
-		cpu_features->usable[index_arch_XOP_Usable]
b1dca6
-		  |= bit_arch_XOP_Usable;
b1dca6
+	      CPU_FEATURE_SET_USABLE (cpu_features, XOP);
b1dca6
 	      /* Determine if F16C is usable.  */
b1dca6
-	      if (CPU_FEATURES_CPU_P (cpu_features, F16C))
b1dca6
-		cpu_features->usable[index_arch_F16C_Usable]
b1dca6
-		  |= bit_arch_F16C_Usable;
b1dca6
+	      CPU_FEATURE_SET_USABLE (cpu_features, F16C);
b1dca6
 	    }
b1dca6
 
b1dca6
 	  /* Check if OPMASK state, upper 256-bit of ZMM0-ZMM15 and
b1dca6
@@ -168,73 +192,41 @@ get_common_indices (struct cpu_features *cpu_features,
b1dca6
 	      /* Determine if AVX512F is usable.  */
b1dca6
 	      if (CPU_FEATURES_CPU_P (cpu_features, AVX512F))
b1dca6
 		{
b1dca6
-		  cpu_features->usable[index_arch_AVX512F_Usable]
b1dca6
-		    |= bit_arch_AVX512F_Usable;
b1dca6
+		  CPU_FEATURE_SET (cpu_features, AVX512F);
b1dca6
 		  /* Determine if AVX512CD is usable.  */
b1dca6
-		  if (CPU_FEATURES_CPU_P (cpu_features, AVX512CD))
b1dca6
-		    cpu_features->usable[index_arch_AVX512CD_Usable]
b1dca6
-		      |= bit_arch_AVX512CD_Usable;
b1dca6
+		  CPU_FEATURE_SET_USABLE (cpu_features, AVX512CD);
b1dca6
 		  /* Determine if AVX512ER is usable.  */
b1dca6
-		  if (CPU_FEATURES_CPU_P (cpu_features, AVX512ER))
b1dca6
-		    cpu_features->usable[index_arch_AVX512ER_Usable]
b1dca6
-		      |= bit_arch_AVX512ER_Usable;
b1dca6
+		  CPU_FEATURE_SET_USABLE (cpu_features, AVX512ER);
b1dca6
 		  /* Determine if AVX512PF is usable.  */
b1dca6
-		  if (CPU_FEATURES_CPU_P (cpu_features, AVX512PF))
b1dca6
-		    cpu_features->usable[index_arch_AVX512PF_Usable]
b1dca6
-		      |= bit_arch_AVX512PF_Usable;
b1dca6
+		  CPU_FEATURE_SET_USABLE (cpu_features, AVX512PF);
b1dca6
 		  /* Determine if AVX512VL is usable.  */
b1dca6
-		  if (CPU_FEATURES_CPU_P (cpu_features, AVX512VL))
b1dca6
-		    cpu_features->usable[index_arch_AVX512VL_Usable]
b1dca6
-		      |= bit_arch_AVX512VL_Usable;
b1dca6
+		  CPU_FEATURE_SET_USABLE (cpu_features, AVX512VL);
b1dca6
 		  /* Determine if AVX512DQ is usable.  */
b1dca6
-		  if (CPU_FEATURES_CPU_P (cpu_features, AVX512DQ))
b1dca6
-		    cpu_features->usable[index_arch_AVX512DQ_Usable]
b1dca6
-		      |= bit_arch_AVX512DQ_Usable;
b1dca6
+		  CPU_FEATURE_SET_USABLE (cpu_features, AVX512DQ);
b1dca6
 		  /* Determine if AVX512BW is usable.  */
b1dca6
-		  if (CPU_FEATURES_CPU_P (cpu_features, AVX512BW))
b1dca6
-		    cpu_features->usable[index_arch_AVX512BW_Usable]
b1dca6
-		      |= bit_arch_AVX512BW_Usable;
b1dca6
+		  CPU_FEATURE_SET_USABLE (cpu_features, AVX512BW);
b1dca6
 		  /* Determine if AVX512_4FMAPS is usable.  */
b1dca6
-		  if (CPU_FEATURES_CPU_P (cpu_features, AVX512_4FMAPS))
b1dca6
-		    cpu_features->usable[index_arch_AVX512_4FMAPS_Usable]
b1dca6
-		      |= bit_arch_AVX512_4FMAPS_Usable;
b1dca6
+		  CPU_FEATURE_SET_USABLE (cpu_features, AVX512_4FMAPS);
b1dca6
 		  /* Determine if AVX512_4VNNIW is usable.  */
b1dca6
-		  if (CPU_FEATURES_CPU_P (cpu_features, AVX512_4VNNIW))
b1dca6
-		    cpu_features->usable[index_arch_AVX512_4VNNIW_Usable]
b1dca6
-		      |= bit_arch_AVX512_4VNNIW_Usable;
b1dca6
+		  CPU_FEATURE_SET_USABLE (cpu_features, AVX512_4VNNIW);
b1dca6
 		  /* Determine if AVX512_BITALG is usable.  */
b1dca6
-		  if (CPU_FEATURES_CPU_P (cpu_features, AVX512_BITALG))
b1dca6
-		    cpu_features->usable[index_arch_AVX512_BITALG_Usable]
b1dca6
-		      |= bit_arch_AVX512_BITALG_Usable;
b1dca6
+		  CPU_FEATURE_SET_USABLE (cpu_features, AVX512_BITALG);
b1dca6
 		  /* Determine if AVX512_IFMA is usable.  */
b1dca6
-		  if (CPU_FEATURES_CPU_P (cpu_features, AVX512_IFMA))
b1dca6
-		    cpu_features->usable[index_arch_AVX512_IFMA_Usable]
b1dca6
-		      |= bit_arch_AVX512_IFMA_Usable;
b1dca6
+		  CPU_FEATURE_SET_USABLE (cpu_features, AVX512_IFMA);
b1dca6
 		  /* Determine if AVX512_VBMI is usable.  */
b1dca6
-		  if (CPU_FEATURES_CPU_P (cpu_features, AVX512_VBMI))
b1dca6
-		    cpu_features->usable[index_arch_AVX512_VBMI_Usable]
b1dca6
-		      |= bit_arch_AVX512_VBMI_Usable;
b1dca6
+		  CPU_FEATURE_SET_USABLE (cpu_features, AVX512_VBMI);
b1dca6
 		  /* Determine if AVX512_VBMI2 is usable.  */
b1dca6
-		  if (CPU_FEATURES_CPU_P (cpu_features, AVX512_VBMI2))
b1dca6
-		    cpu_features->usable[index_arch_AVX512_VBMI2_Usable]
b1dca6
-		      |= bit_arch_AVX512_VBMI2_Usable;
b1dca6
+		  CPU_FEATURE_SET_USABLE (cpu_features, AVX512_VBMI2);
b1dca6
 		  /* Determine if is AVX512_VNNI usable.  */
b1dca6
-		  if (CPU_FEATURES_CPU_P (cpu_features, AVX512_VNNI))
b1dca6
-		    cpu_features->usable[index_arch_AVX512_VNNI_Usable]
b1dca6
-		      |= bit_arch_AVX512_VNNI_Usable;
b1dca6
+		  CPU_FEATURE_SET_USABLE (cpu_features, AVX512_VNNI);
b1dca6
 		  /* Determine if AVX512_VPOPCNTDQ is usable.  */
b1dca6
-		  if (CPU_FEATURES_CPU_P (cpu_features, AVX512_VPOPCNTDQ))
b1dca6
-		    cpu_features->usable[index_arch_AVX512_VPOPCNTDQ_Usable]
b1dca6
-		      |= bit_arch_AVX512_VPOPCNTDQ_Usable;
b1dca6
+		  CPU_FEATURE_SET_USABLE (cpu_features,
b1dca6
+					  AVX512_VPOPCNTDQ);
b1dca6
 		  /* Determine if AVX512_VP2INTERSECT is usable.  */
b1dca6
-		  if (CPU_FEATURES_CPU_P (cpu_features,
b1dca6
-					  AVX512_VP2INTERSECT))
b1dca6
-		    cpu_features->usable[index_arch_AVX512_VP2INTERSECT_Usable]
b1dca6
-		      |= bit_arch_AVX512_VP2INTERSECT_Usable;
b1dca6
+		  CPU_FEATURE_SET_USABLE (cpu_features,
b1dca6
+					  AVX512_VP2INTERSECT);
b1dca6
 		  /* Determine if AVX512_BF16 is usable.  */
b1dca6
-		  if (CPU_FEATURES_CPU_P (cpu_features, AVX512_BF16))
b1dca6
-		    cpu_features->usable[index_arch_AVX512_BF16_Usable]
b1dca6
-		      |= bit_arch_AVX512_BF16_Usable;
b1dca6
+		  CPU_FEATURE_SET_USABLE (cpu_features, AVX512_BF16);
b1dca6
 		}
b1dca6
 	    }
b1dca6
 	}
b1dca6
@@ -244,19 +236,17 @@ get_common_indices (struct cpu_features *cpu_features,
b1dca6
 	  == (bit_XTILECFG_state | bit_XTILEDATA_state))
b1dca6
 	{
b1dca6
 	  /* Determine if AMX_BF16 is usable.  */
b1dca6
-	  if (CPU_FEATURES_CPU_P (cpu_features, AMX_BF16))
b1dca6
-	    cpu_features->usable[index_arch_AMX_BF16_Usable]
b1dca6
-	      |= bit_arch_AMX_BF16_Usable;
b1dca6
+	  CPU_FEATURE_SET_USABLE (cpu_features, AMX_BF16);
b1dca6
 	  /* Determine if AMX_TILE is usable.  */
b1dca6
-	  if (CPU_FEATURES_CPU_P (cpu_features, AMX_TILE))
b1dca6
-	    cpu_features->usable[index_arch_AMX_TILE_Usable]
b1dca6
-	      |= bit_arch_AMX_TILE_Usable;
b1dca6
+	  CPU_FEATURE_SET_USABLE (cpu_features, AMX_TILE);
b1dca6
 	  /* Determine if AMX_INT8 is usable.  */
b1dca6
-	  if (CPU_FEATURES_CPU_P (cpu_features, AMX_INT8))
b1dca6
-	    cpu_features->usable[index_arch_AMX_INT8_Usable]
b1dca6
-	      |= bit_arch_AMX_INT8_Usable;
b1dca6
+	  CPU_FEATURE_SET_USABLE (cpu_features, AMX_INT8);
b1dca6
 	}
b1dca6
 
b1dca6
+
b1dca6
+      /* XFD is usable only when OSXSAVE is enabled.  */
b1dca6
+      CPU_FEATURE_SET_USABLE (cpu_features, XFD);
b1dca6
+
b1dca6
       /* For _dl_runtime_resolve, set xsave_state_size to xsave area
b1dca6
 	 size + integer register save size and align it to 64 bytes.  */
b1dca6
       if (cpu_features->basic.max_cpuid >= 0xd)
b1dca6
@@ -318,8 +308,7 @@ get_common_indices (struct cpu_features *cpu_features,
b1dca6
 		    {
b1dca6
 		      cpu_features->xsave_state_size
b1dca6
 			= ALIGN_UP (size + STATE_SAVE_OFFSET, 64);
b1dca6
-		      cpu_features->usable[index_arch_XSAVEC_Usable]
b1dca6
-			|= bit_arch_XSAVEC_Usable;
b1dca6
+		      CPU_FEATURE_SET (cpu_features, XSAVEC);
b1dca6
 		    }
b1dca6
 		}
b1dca6
 	    }
b1dca6
@@ -328,8 +317,79 @@ get_common_indices (struct cpu_features *cpu_features,
b1dca6
 
b1dca6
   /* Determine if PKU is usable.  */
b1dca6
   if (CPU_FEATURES_CPU_P (cpu_features, OSPKE))
b1dca6
-    cpu_features->usable[index_arch_PKU_Usable]
b1dca6
-      |= bit_arch_PKU_Usable;
b1dca6
+    CPU_FEATURE_SET (cpu_features, PKU);
b1dca6
+}
b1dca6
+
b1dca6
+static void
b1dca6
+get_extended_indices (struct cpu_features *cpu_features)
b1dca6
+{
b1dca6
+  unsigned int eax, ebx, ecx, edx;
b1dca6
+  __cpuid (0x80000000, eax, ebx, ecx, edx);
b1dca6
+  if (eax >= 0x80000001)
b1dca6
+    __cpuid (0x80000001,
b1dca6
+	     cpu_features->features[COMMON_CPUID_INDEX_80000001].cpuid.eax,
b1dca6
+	     cpu_features->features[COMMON_CPUID_INDEX_80000001].cpuid.ebx,
b1dca6
+	     cpu_features->features[COMMON_CPUID_INDEX_80000001].cpuid.ecx,
b1dca6
+	     cpu_features->features[COMMON_CPUID_INDEX_80000001].cpuid.edx);
b1dca6
+  if (eax >= 0x80000007)
b1dca6
+    __cpuid (0x80000007,
b1dca6
+	     cpu_features->features[COMMON_CPUID_INDEX_80000007].cpuid.eax,
b1dca6
+	     cpu_features->features[COMMON_CPUID_INDEX_80000007].cpuid.ebx,
b1dca6
+	     cpu_features->features[COMMON_CPUID_INDEX_80000007].cpuid.ecx,
b1dca6
+	     cpu_features->features[COMMON_CPUID_INDEX_80000007].cpuid.edx);
b1dca6
+  if (eax >= 0x80000008)
b1dca6
+    __cpuid (0x80000008,
b1dca6
+	     cpu_features->features[COMMON_CPUID_INDEX_80000008].cpuid.eax,
b1dca6
+	     cpu_features->features[COMMON_CPUID_INDEX_80000008].cpuid.ebx,
b1dca6
+	     cpu_features->features[COMMON_CPUID_INDEX_80000008].cpuid.ecx,
b1dca6
+	     cpu_features->features[COMMON_CPUID_INDEX_80000008].cpuid.edx);
b1dca6
+}
b1dca6
+
b1dca6
+static void
b1dca6
+get_common_indices (struct cpu_features *cpu_features,
b1dca6
+		    unsigned int *family, unsigned int *model,
b1dca6
+		    unsigned int *extended_model, unsigned int *stepping)
b1dca6
+{
b1dca6
+  if (family)
b1dca6
+    {
b1dca6
+      unsigned int eax;
b1dca6
+      __cpuid (1, eax,
b1dca6
+	       cpu_features->features[COMMON_CPUID_INDEX_1].cpuid.ebx,
b1dca6
+	       cpu_features->features[COMMON_CPUID_INDEX_1].cpuid.ecx,
b1dca6
+	       cpu_features->features[COMMON_CPUID_INDEX_1].cpuid.edx);
b1dca6
+      cpu_features->features[COMMON_CPUID_INDEX_1].cpuid.eax = eax;
b1dca6
+      *family = (eax >> 8) & 0x0f;
b1dca6
+      *model = (eax >> 4) & 0x0f;
b1dca6
+      *extended_model = (eax >> 12) & 0xf0;
b1dca6
+      *stepping = eax & 0x0f;
b1dca6
+      if (*family == 0x0f)
b1dca6
+	{
b1dca6
+	  *family += (eax >> 20) & 0xff;
b1dca6
+	  *model += *extended_model;
b1dca6
+	}
b1dca6
+    }
b1dca6
+
b1dca6
+  if (cpu_features->basic.max_cpuid >= 7)
b1dca6
+    {
b1dca6
+      __cpuid_count (7, 0,
b1dca6
+		     cpu_features->features[COMMON_CPUID_INDEX_7].cpuid.eax,
b1dca6
+		     cpu_features->features[COMMON_CPUID_INDEX_7].cpuid.ebx,
b1dca6
+		     cpu_features->features[COMMON_CPUID_INDEX_7].cpuid.ecx,
b1dca6
+		     cpu_features->features[COMMON_CPUID_INDEX_7].cpuid.edx);
b1dca6
+      __cpuid_count (7, 1,
b1dca6
+		     cpu_features->features[COMMON_CPUID_INDEX_7_ECX_1].cpuid.eax,
b1dca6
+		     cpu_features->features[COMMON_CPUID_INDEX_7_ECX_1].cpuid.ebx,
b1dca6
+		     cpu_features->features[COMMON_CPUID_INDEX_7_ECX_1].cpuid.ecx,
b1dca6
+		     cpu_features->features[COMMON_CPUID_INDEX_7_ECX_1].cpuid.edx);
b1dca6
+    }
b1dca6
+
b1dca6
+  if (cpu_features->basic.max_cpuid >= 0xd)
b1dca6
+    __cpuid_count (0xd, 1,
b1dca6
+		   cpu_features->features[COMMON_CPUID_INDEX_D_ECX_1].cpuid.eax,
b1dca6
+		   cpu_features->features[COMMON_CPUID_INDEX_D_ECX_1].cpuid.ebx,
b1dca6
+		   cpu_features->features[COMMON_CPUID_INDEX_D_ECX_1].cpuid.ecx,
b1dca6
+		   cpu_features->features[COMMON_CPUID_INDEX_D_ECX_1].cpuid.edx);
b1dca6
+
b1dca6
 }
b1dca6
 
b1dca6
 _Static_assert (((index_arch_Fast_Unaligned_Load
b1dca6
@@ -353,8 +413,6 @@ init_cpu_features (struct cpu_features *cpu_features)
b1dca6
   unsigned int stepping = 0;
b1dca6
   enum cpu_features_kind kind;
b1dca6
 
b1dca6
-  cpu_features->usable_p = cpu_features->usable;
b1dca6
-
b1dca6
 #if !HAS_CPUID
b1dca6
   if (__get_cpuid_max (0, 0) == 0)
b1dca6
     {
b1dca6
@@ -377,6 +435,8 @@ init_cpu_features (struct cpu_features *cpu_features)
b1dca6
 
b1dca6
       get_extended_indices (cpu_features);
b1dca6
 
b1dca6
+      update_usable (cpu_features);
b1dca6
+
b1dca6
       if (family == 0x06)
b1dca6
 	{
b1dca6
 	  model += extended_model;
b1dca6
@@ -473,7 +533,7 @@ init_cpu_features (struct cpu_features *cpu_features)
b1dca6
 		 with stepping >= 4) to avoid TSX on kernels that weren't
b1dca6
 		 updated with the latest microcode package (which disables
b1dca6
 		 broken feature by default).  */
b1dca6
-	      cpu_features->cpuid[index_cpu_RTM].reg_RTM &= ~bit_cpu_RTM;
b1dca6
+	      CPU_FEATURE_UNSET (cpu_features, RTM);
b1dca6
 	      break;
b1dca6
 	    }
b1dca6
 	}
b1dca6
@@ -501,15 +561,15 @@ init_cpu_features (struct cpu_features *cpu_features)
b1dca6
 
b1dca6
       get_extended_indices (cpu_features);
b1dca6
 
b1dca6
-      ecx = cpu_features->cpuid[COMMON_CPUID_INDEX_1].ecx;
b1dca6
+      update_usable (cpu_features);
b1dca6
 
b1dca6
-      if (HAS_ARCH_FEATURE (AVX_Usable))
b1dca6
+      ecx = cpu_features->features[COMMON_CPUID_INDEX_1].cpuid.ecx;
b1dca6
+
b1dca6
+      if (CPU_FEATURE_USABLE_P (cpu_features, AVX))
b1dca6
 	{
b1dca6
 	  /* Since the FMA4 bit is in COMMON_CPUID_INDEX_80000001 and
b1dca6
 	     FMA4 requires AVX, determine if FMA4 is usable here.  */
b1dca6
-	  if (CPU_FEATURES_CPU_P (cpu_features, FMA4))
b1dca6
-	    cpu_features->usable[index_arch_FMA4_Usable]
b1dca6
-	      |= bit_arch_FMA4_Usable;
b1dca6
+	  CPU_FEATURE_SET_USABLE (cpu_features, FMA4);
b1dca6
 	}
b1dca6
 
b1dca6
       if (family == 0x15)
b1dca6
@@ -540,13 +600,15 @@ init_cpu_features (struct cpu_features *cpu_features)
b1dca6
 
b1dca6
       get_extended_indices (cpu_features);
b1dca6
 
b1dca6
+      update_usable (cpu_features);
b1dca6
+
b1dca6
       model += extended_model;
b1dca6
       if (family == 0x6)
b1dca6
         {
b1dca6
           if (model == 0xf || model == 0x19)
b1dca6
             {
b1dca6
-              cpu_features->usable[index_arch_AVX_Usable]
b1dca6
-                &= ~(bit_arch_AVX_Usable | bit_arch_AVX2_Usable);
b1dca6
+	      CPU_FEATURE_UNSET (cpu_features, AVX);
b1dca6
+	      CPU_FEATURE_UNSET (cpu_features, AVX2);
b1dca6
 
b1dca6
               cpu_features->preferred[index_arch_Slow_SSE4_2]
b1dca6
                 |= bit_arch_Slow_SSE4_2;
b1dca6
@@ -559,8 +621,8 @@ init_cpu_features (struct cpu_features *cpu_features)
b1dca6
         {
b1dca6
 	  if (model == 0x1b)
b1dca6
 	    {
b1dca6
-	      cpu_features->usable[index_arch_AVX_Usable]
b1dca6
-		&= ~(bit_arch_AVX_Usable | bit_arch_AVX2_Usable);
b1dca6
+	      CPU_FEATURE_UNSET (cpu_features, AVX);
b1dca6
+	      CPU_FEATURE_UNSET (cpu_features, AVX2);
b1dca6
 
b1dca6
 	      cpu_features->preferred[index_arch_Slow_SSE4_2]
b1dca6
 		|= bit_arch_Slow_SSE4_2;
b1dca6
@@ -570,8 +632,8 @@ init_cpu_features (struct cpu_features *cpu_features)
b1dca6
 	    }
b1dca6
 	  else if (model == 0x3b)
b1dca6
 	    {
b1dca6
-	      cpu_features->usable[index_arch_AVX_Usable]
b1dca6
-		&= ~(bit_arch_AVX_Usable | bit_arch_AVX2_Usable);
b1dca6
+	      CPU_FEATURE_UNSET (cpu_features, AVX);
b1dca6
+	      CPU_FEATURE_UNSET (cpu_features, AVX2);
b1dca6
 
b1dca6
 	      cpu_features->preferred[index_arch_AVX_Fast_Unaligned_Load]
b1dca6
 		&= ~bit_arch_AVX_Fast_Unaligned_Load;
b1dca6
@@ -582,6 +644,7 @@ init_cpu_features (struct cpu_features *cpu_features)
b1dca6
     {
b1dca6
       kind = arch_kind_other;
b1dca6
       get_common_indices (cpu_features, NULL, NULL, NULL, NULL);
b1dca6
+      update_usable (cpu_features);
b1dca6
     }
b1dca6
 
b1dca6
   /* Support i586 if CX8 is available.  */
b1dca6
@@ -628,31 +691,30 @@ no_cpuid:
b1dca6
     {
b1dca6
       const char *platform = NULL;
b1dca6
 
b1dca6
-      if (CPU_FEATURES_ARCH_P (cpu_features, AVX512F_Usable)
b1dca6
-	  && CPU_FEATURES_CPU_P (cpu_features, AVX512CD))
b1dca6
+      if (CPU_FEATURE_USABLE_P (cpu_features, AVX512CD))
b1dca6
 	{
b1dca6
-	  if (CPU_FEATURES_CPU_P (cpu_features, AVX512ER))
b1dca6
+	  if (CPU_FEATURE_USABLE_P (cpu_features, AVX512ER))
b1dca6
 	    {
b1dca6
-	      if (CPU_FEATURES_CPU_P (cpu_features, AVX512PF))
b1dca6
+	      if (CPU_FEATURE_USABLE_P (cpu_features, AVX512PF))
b1dca6
 		platform = "xeon_phi";
b1dca6
 	    }
b1dca6
 	  else
b1dca6
 	    {
b1dca6
-	      if (CPU_FEATURES_CPU_P (cpu_features, AVX512BW)
b1dca6
-		  && CPU_FEATURES_CPU_P (cpu_features, AVX512DQ)
b1dca6
-		  && CPU_FEATURES_CPU_P (cpu_features, AVX512VL))
b1dca6
+	      if (CPU_FEATURE_USABLE_P (cpu_features, AVX512BW)
b1dca6
+		  && CPU_FEATURE_USABLE_P (cpu_features, AVX512DQ)
b1dca6
+		  && CPU_FEATURE_USABLE_P (cpu_features, AVX512VL))
b1dca6
 		GLRO(dl_hwcap) |= HWCAP_X86_AVX512_1;
b1dca6
 	    }
b1dca6
 	}
b1dca6
 
b1dca6
       if (platform == NULL
b1dca6
-	  && CPU_FEATURES_ARCH_P (cpu_features, AVX2_Usable)
b1dca6
-	  && CPU_FEATURES_ARCH_P (cpu_features, FMA_Usable)
b1dca6
-	  && CPU_FEATURES_CPU_P (cpu_features, BMI1)
b1dca6
-	  && CPU_FEATURES_CPU_P (cpu_features, BMI2)
b1dca6
-	  && CPU_FEATURES_CPU_P (cpu_features, LZCNT)
b1dca6
-	  && CPU_FEATURES_CPU_P (cpu_features, MOVBE)
b1dca6
-	  && CPU_FEATURES_CPU_P (cpu_features, POPCNT))
b1dca6
+	  && CPU_FEATURE_USABLE_P (cpu_features, AVX2)
b1dca6
+	  && CPU_FEATURE_USABLE_P (cpu_features, FMA)
b1dca6
+	  && CPU_FEATURE_USABLE_P (cpu_features, BMI1)
b1dca6
+	  && CPU_FEATURE_USABLE_P (cpu_features, BMI2)
b1dca6
+	  && CPU_FEATURE_USABLE_P (cpu_features, LZCNT)
b1dca6
+	  && CPU_FEATURE_USABLE_P (cpu_features, MOVBE)
b1dca6
+	  && CPU_FEATURE_USABLE_P (cpu_features, POPCNT))
b1dca6
 	platform = "haswell";
b1dca6
 
b1dca6
       if (platform != NULL)
b1dca6
@@ -660,7 +722,7 @@ no_cpuid:
b1dca6
     }
b1dca6
 #else
b1dca6
   GLRO(dl_hwcap) = 0;
b1dca6
-  if (CPU_FEATURES_CPU_P (cpu_features, SSE2))
b1dca6
+  if (CPU_FEATURE_USABLE_P (cpu_features, SSE2))
b1dca6
     GLRO(dl_hwcap) |= HWCAP_X86_SSE2;
b1dca6
 
b1dca6
   if (CPU_FEATURES_ARCH_P (cpu_features, I686))
b1dca6
@@ -695,9 +757,9 @@ no_cpuid:
b1dca6
 	     GLIBC_TUNABLES=glibc.cpu.hwcaps=-IBT,-SHSTK
b1dca6
 	   */
b1dca6
 	  unsigned int cet_feature = 0;
b1dca6
-	  if (!HAS_CPU_FEATURE (IBT))
b1dca6
+	  if (!CPU_FEATURE_USABLE (IBT))
b1dca6
 	    cet_feature |= GNU_PROPERTY_X86_FEATURE_1_IBT;
b1dca6
-	  if (!HAS_CPU_FEATURE (SHSTK))
b1dca6
+	  if (!CPU_FEATURE_USABLE (SHSTK))
b1dca6
 	    cet_feature |= GNU_PROPERTY_X86_FEATURE_1_SHSTK;
b1dca6
 
b1dca6
 	  if (cet_feature)
b1dca6
diff --git a/sysdeps/x86/cpu-features.h b/sysdeps/x86/cpu-features.h
b1dca6
index 0f19c64352c4d7f1..21708c028a12dbb2 100644
b1dca6
--- a/sysdeps/x86/cpu-features.h
b1dca6
+++ b/sysdeps/x86/cpu-features.h
b1dca6
@@ -18,15 +18,6 @@
b1dca6
 #ifndef cpu_features_h
b1dca6
 #define cpu_features_h
b1dca6
 
b1dca6
-enum
b1dca6
-{
b1dca6
-  /* The integer bit array index for the first set of usable feature
b1dca6
-     bits.  */
b1dca6
-  USABLE_FEATURE_INDEX_1 = 0,
b1dca6
-  /* The current maximum size of the feature integer bit array.  */
b1dca6
-  USABLE_FEATURE_INDEX_MAX
b1dca6
-};
b1dca6
-
b1dca6
 enum
b1dca6
 {
b1dca6
   /* The integer bit array index for the first set of preferred feature
b1dca6
@@ -57,6 +48,12 @@ struct cpuid_registers
b1dca6
   unsigned int edx;
b1dca6
 };
b1dca6
 
b1dca6
+struct cpuid_features
b1dca6
+{
b1dca6
+  struct cpuid_registers cpuid;
b1dca6
+  struct cpuid_registers usable;
b1dca6
+};
b1dca6
+
b1dca6
 enum cpu_features_kind
b1dca6
 {
b1dca6
   arch_kind_unknown = 0,
b1dca6
@@ -78,9 +75,7 @@ struct cpu_features_basic
b1dca6
 struct cpu_features
b1dca6
 {
b1dca6
   struct cpu_features_basic basic;
b1dca6
-  unsigned int *usable_p;
b1dca6
-  struct cpuid_registers cpuid[COMMON_CPUID_INDEX_MAX];
b1dca6
-  unsigned int usable[USABLE_FEATURE_INDEX_MAX];
b1dca6
+  struct cpuid_features features[COMMON_CPUID_INDEX_MAX];
b1dca6
   unsigned int preferred[PREFERRED_FEATURE_INDEX_MAX];
b1dca6
   /* The state size for XSAVEC or XSAVE.  The type must be unsigned long
b1dca6
      int so that we use
b1dca6
@@ -91,7 +86,7 @@ struct cpu_features
b1dca6
   unsigned long int xsave_state_size;
b1dca6
   /* The full state size for XSAVE when XSAVEC is disabled by
b1dca6
 
b1dca6
-     GLIBC_TUNABLES=glibc.cpu.hwcaps=-XSAVEC_Usable
b1dca6
+     GLIBC_TUNABLES=glibc.cpu.hwcaps=-XSAVEC
b1dca6
    */
b1dca6
   unsigned int xsave_state_full_size;
b1dca6
   /* Data cache size for use in memory and string routines, typically
b1dca6
@@ -114,117 +109,40 @@ extern const struct cpu_features *__get_cpu_features (void)
b1dca6
      __attribute__ ((const));
b1dca6
 
b1dca6
 /* Only used directly in cpu-features.c.  */
b1dca6
-# define CPU_FEATURES_CPU_P(ptr, name) \
b1dca6
-  ((ptr->cpuid[index_cpu_##name].reg_##name & (bit_cpu_##name)) != 0)
b1dca6
-# define CPU_FEATURES_ARCH_P(ptr, name) \
b1dca6
-  ((ptr->feature_##name[index_arch_##name] & (bit_arch_##name)) != 0)
b1dca6
+#define CPU_FEATURE_CHECK_P(ptr, name, check) \
b1dca6
+  ((ptr->features[index_cpu_##name].check.reg_##name \
b1dca6
+    & bit_cpu_##name) != 0)
b1dca6
+#define CPU_FEATURE_SET(ptr, name) \
b1dca6
+  ptr->features[index_cpu_##name].usable.reg_##name |= bit_cpu_##name;
b1dca6
+#define CPU_FEATURE_UNSET(ptr, name) \
b1dca6
+  ptr->features[index_cpu_##name].usable.reg_##name &= ~bit_cpu_##name;
b1dca6
+#define CPU_FEATURE_SET_USABLE(ptr, name) \
b1dca6
+  ptr->features[index_cpu_##name].usable.reg_##name \
b1dca6
+     |= ptr->features[index_cpu_##name].cpuid.reg_##name & bit_cpu_##name;
b1dca6
+#define CPU_FEATURE_PREFERRED_P(ptr, name) \
b1dca6
+  ((ptr->preferred[index_arch_##name] & bit_arch_##name) != 0)
b1dca6
+#define CPU_FEATURE_CPU_P(ptr, name) \
b1dca6
+  CPU_FEATURE_CHECK_P (ptr, name, cpuid)
b1dca6
+#define CPU_FEATURE_USABLE_P(ptr, name) \
b1dca6
+  CPU_FEATURE_CHECK_P (ptr, name, usable)
b1dca6
 
b1dca6
 /* HAS_CPU_FEATURE evaluates to true if CPU supports the feature.  */
b1dca6
 #define HAS_CPU_FEATURE(name) \
b1dca6
-  CPU_FEATURES_CPU_P (__get_cpu_features (), name)
b1dca6
-/* HAS_ARCH_FEATURE evaluates to true if we may use the feature at
b1dca6
-   runtime.  */
b1dca6
-# define HAS_ARCH_FEATURE(name) \
b1dca6
-  CPU_FEATURES_ARCH_P (__get_cpu_features (), name)
b1dca6
+  CPU_FEATURE_CPU_P (__get_cpu_features (), name)
b1dca6
 /* CPU_FEATURE_USABLE evaluates to true if the feature is usable.  */
b1dca6
 #define CPU_FEATURE_USABLE(name) \
b1dca6
-  HAS_ARCH_FEATURE (name##_Usable)
b1dca6
-
b1dca6
-/* Architecture features.  */
b1dca6
-
b1dca6
-/* USABLE_FEATURE_INDEX_1.  */
b1dca6
-#define bit_arch_AVX_Usable			(1u << 0)
b1dca6
-#define bit_arch_AVX2_Usable			(1u << 1)
b1dca6
-#define bit_arch_AVX512F_Usable			(1u << 2)
b1dca6
-#define bit_arch_AVX512CD_Usable		(1u << 3)
b1dca6
-#define bit_arch_AVX512ER_Usable		(1u << 4)
b1dca6
-#define bit_arch_AVX512PF_Usable		(1u << 5)
b1dca6
-#define bit_arch_AVX512VL_Usable		(1u << 6)
b1dca6
-#define bit_arch_AVX512DQ_Usable		(1u << 7)
b1dca6
-#define bit_arch_AVX512BW_Usable		(1u << 8)
b1dca6
-#define bit_arch_AVX512_4FMAPS_Usable		(1u << 9)
b1dca6
-#define bit_arch_AVX512_4VNNIW_Usable		(1u << 10)
b1dca6
-#define bit_arch_AVX512_BITALG_Usable		(1u << 11)
b1dca6
-#define bit_arch_AVX512_IFMA_Usable		(1u << 12)
b1dca6
-#define bit_arch_AVX512_VBMI_Usable		(1u << 13)
b1dca6
-#define bit_arch_AVX512_VBMI2_Usable		(1u << 14)
b1dca6
-#define bit_arch_AVX512_VNNI_Usable		(1u << 15)
b1dca6
-#define bit_arch_AVX512_VPOPCNTDQ_Usable	(1u << 16)
b1dca6
-#define bit_arch_FMA_Usable			(1u << 17)
b1dca6
-#define bit_arch_FMA4_Usable			(1u << 18)
b1dca6
-#define bit_arch_VAES_Usable			(1u << 19)
b1dca6
-#define bit_arch_VPCLMULQDQ_Usable		(1u << 20)
b1dca6
-#define bit_arch_XOP_Usable			(1u << 21)
b1dca6
-#define bit_arch_XSAVEC_Usable			(1u << 22)
b1dca6
-#define bit_arch_F16C_Usable			(1u << 23)
b1dca6
-#define bit_arch_AVX512_VP2INTERSECT_Usable	(1u << 24)
b1dca6
-#define bit_arch_AVX512_BF16_Usable		(1u << 25)
b1dca6
-#define bit_arch_PKU_Usable			(1u << 26)
b1dca6
-#define bit_arch_AMX_BF16_Usable		(1u << 27)
b1dca6
-#define bit_arch_AMX_TILE_Usable		(1u << 28)
b1dca6
-#define bit_arch_AMX_INT8_Usable		(1u << 29)
b1dca6
-
b1dca6
-#define index_arch_AVX_Usable			USABLE_FEATURE_INDEX_1
b1dca6
-#define index_arch_AVX2_Usable			USABLE_FEATURE_INDEX_1
b1dca6
-#define index_arch_AVX512F_Usable		USABLE_FEATURE_INDEX_1
b1dca6
-#define index_arch_AVX512CD_Usable		USABLE_FEATURE_INDEX_1
b1dca6
-#define index_arch_AVX512ER_Usable		USABLE_FEATURE_INDEX_1
b1dca6
-#define index_arch_AVX512PF_Usable		USABLE_FEATURE_INDEX_1
b1dca6
-#define index_arch_AVX512VL_Usable		USABLE_FEATURE_INDEX_1
b1dca6
-#define index_arch_AVX512BW_Usable		USABLE_FEATURE_INDEX_1
b1dca6
-#define index_arch_AVX512DQ_Usable		USABLE_FEATURE_INDEX_1
b1dca6
-#define index_arch_AVX512_4FMAPS_Usable		USABLE_FEATURE_INDEX_1
b1dca6
-#define index_arch_AVX512_4VNNIW_Usable		USABLE_FEATURE_INDEX_1
b1dca6
-#define index_arch_AVX512_BITALG_Usable		USABLE_FEATURE_INDEX_1
b1dca6
-#define index_arch_AVX512_IFMA_Usable		USABLE_FEATURE_INDEX_1
b1dca6
-#define index_arch_AVX512_VBMI_Usable		USABLE_FEATURE_INDEX_1
b1dca6
-#define index_arch_AVX512_VBMI2_Usable		USABLE_FEATURE_INDEX_1
b1dca6
-#define index_arch_AVX512_VNNI_Usable		USABLE_FEATURE_INDEX_1
b1dca6
-#define index_arch_AVX512_VPOPCNTDQ_Usable	USABLE_FEATURE_INDEX_1
b1dca6
-#define index_arch_FMA_Usable			USABLE_FEATURE_INDEX_1
b1dca6
-#define index_arch_FMA4_Usable			USABLE_FEATURE_INDEX_1
b1dca6
-#define index_arch_VAES_Usable			USABLE_FEATURE_INDEX_1
b1dca6
-#define index_arch_VPCLMULQDQ_Usable		USABLE_FEATURE_INDEX_1
b1dca6
-#define index_arch_XOP_Usable			USABLE_FEATURE_INDEX_1
b1dca6
-#define index_arch_XSAVEC_Usable		USABLE_FEATURE_INDEX_1
b1dca6
-#define index_arch_F16C_Usable			USABLE_FEATURE_INDEX_1
b1dca6
-#define index_arch_AVX512_VP2INTERSECT_Usable	USABLE_FEATURE_INDEX_1
b1dca6
-#define index_arch_AVX512_BF16_Usable		USABLE_FEATURE_INDEX_1
b1dca6
-#define index_arch_PKU_Usable			USABLE_FEATURE_INDEX_1
b1dca6
-#define index_arch_AMX_BF16_Usable		USABLE_FEATURE_INDEX_1
b1dca6
-#define index_arch_AMX_TILE_Usable		USABLE_FEATURE_INDEX_1
b1dca6
-#define index_arch_AMX_INT8_Usable		USABLE_FEATURE_INDEX_1
b1dca6
-
b1dca6
-#define feature_AVX_Usable			usable
b1dca6
-#define feature_AVX2_Usable			usable
b1dca6
-#define feature_AVX512F_Usable			usable
b1dca6
-#define feature_AVX512CD_Usable			usable
b1dca6
-#define feature_AVX512ER_Usable			usable
b1dca6
-#define feature_AVX512PF_Usable			usable
b1dca6
-#define feature_AVX512VL_Usable			usable
b1dca6
-#define feature_AVX512BW_Usable			usable
b1dca6
-#define feature_AVX512DQ_Usable			usable
b1dca6
-#define feature_AVX512_4FMAPS_Usable		usable
b1dca6
-#define feature_AVX512_4VNNIW_Usable		usable
b1dca6
-#define feature_AVX512_BITALG_Usable		usable
b1dca6
-#define feature_AVX512_IFMA_Usable		usable
b1dca6
-#define feature_AVX512_VBMI_Usable		usable
b1dca6
-#define feature_AVX512_VBMI2_Usable		usable
b1dca6
-#define feature_AVX512_VNNI_Usable		usable
b1dca6
-#define feature_AVX512_VPOPCNTDQ_Usable		usable
b1dca6
-#define feature_FMA_Usable			usable
b1dca6
-#define feature_FMA4_Usable			usable
b1dca6
-#define feature_VAES_Usable			usable
b1dca6
-#define feature_VPCLMULQDQ_Usable		usable
b1dca6
-#define feature_XOP_Usable			usable
b1dca6
-#define feature_XSAVEC_Usable			usable
b1dca6
-#define feature_F16C_Usable			usable
b1dca6
-#define feature_AVX512_VP2INTERSECT_Usable	usable
b1dca6
-#define feature_AVX512_BF16_Usable		usable
b1dca6
-#define feature_PKU_Usable			usable
b1dca6
-#define feature_AMX_BF16_Usable			usable
b1dca6
-#define feature_AMX_TILE_Usable			usable
b1dca6
-#define feature_AMX_INT8_Usable			usable
b1dca6
+  CPU_FEATURE_USABLE_P (__get_cpu_features (), name)
b1dca6
+/* CPU_FEATURE_PREFER evaluates to true if we prefer the feature at
b1dca6
+   runtime.  */
b1dca6
+#define CPU_FEATURE_PREFERRED(name) \
b1dca6
+  CPU_FEATURE_PREFERRED_P(__get_cpu_features (), name)
b1dca6
+
b1dca6
+#define CPU_FEATURES_CPU_P(ptr, name) \
b1dca6
+  CPU_FEATURE_CPU_P (ptr, name)
b1dca6
+#define CPU_FEATURES_ARCH_P(ptr, name) \
b1dca6
+  CPU_FEATURE_PREFERRED_P (ptr, name)
b1dca6
+#define HAS_ARCH_FEATURE(name) \
b1dca6
+  CPU_FEATURE_PREFERRED (name)
b1dca6
 
b1dca6
 /* CPU features.  */
b1dca6
 
b1dca6
@@ -247,6 +165,7 @@ extern const struct cpu_features *__get_cpu_features (void)
b1dca6
 #define bit_cpu_CMPXCHG16B	(1u << 13)
b1dca6
 #define bit_cpu_XTPRUPDCTRL	(1u << 14)
b1dca6
 #define bit_cpu_PDCM		(1u << 15)
b1dca6
+#define bit_cpu_INDEX_1_ECX_16	(1u << 16)
b1dca6
 #define bit_cpu_PCID		(1u << 17)
b1dca6
 #define bit_cpu_DCA		(1u << 18)
b1dca6
 #define bit_cpu_SSE4_1		(1u << 19)
b1dca6
@@ -261,6 +180,7 @@ extern const struct cpu_features *__get_cpu_features (void)
b1dca6
 #define bit_cpu_AVX		(1u << 28)
b1dca6
 #define bit_cpu_F16C		(1u << 29)
b1dca6
 #define bit_cpu_RDRAND		(1u << 30)
b1dca6
+#define bit_cpu_INDEX_1_ECX_31	(1u << 31)
b1dca6
 
b1dca6
 /* EDX.  */
b1dca6
 #define bit_cpu_FPU		(1u << 0)
b1dca6
@@ -273,6 +193,7 @@ extern const struct cpu_features *__get_cpu_features (void)
b1dca6
 #define bit_cpu_MCE		(1u << 7)
b1dca6
 #define bit_cpu_CX8		(1u << 8)
b1dca6
 #define bit_cpu_APIC		(1u << 9)
b1dca6
+#define bit_cpu_INDEX_1_EDX_10	(1u << 10)
b1dca6
 #define bit_cpu_SEP		(1u << 11)
b1dca6
 #define bit_cpu_MTRR		(1u << 12)
b1dca6
 #define bit_cpu_PGE		(1u << 13)
b1dca6
@@ -282,6 +203,7 @@ extern const struct cpu_features *__get_cpu_features (void)
b1dca6
 #define bit_cpu_PSE_36		(1u << 17)
b1dca6
 #define bit_cpu_PSN		(1u << 18)
b1dca6
 #define bit_cpu_CLFSH		(1u << 19)
b1dca6
+#define bit_cpu_INDEX_1_EDX_20	(1u << 20)
b1dca6
 #define bit_cpu_DS		(1u << 21)
b1dca6
 #define bit_cpu_ACPI		(1u << 22)
b1dca6
 #define bit_cpu_MMX		(1u << 23)
b1dca6
@@ -291,6 +213,7 @@ extern const struct cpu_features *__get_cpu_features (void)
b1dca6
 #define bit_cpu_SS		(1u << 27)
b1dca6
 #define bit_cpu_HTT		(1u << 28)
b1dca6
 #define bit_cpu_TM		(1u << 29)
b1dca6
+#define bit_cpu_INDEX_1_EDX_30	(1u << 30)
b1dca6
 #define bit_cpu_PBE		(1u << 31)
b1dca6
 
b1dca6
 /* COMMON_CPUID_INDEX_7.  */
b1dca6
@@ -302,12 +225,14 @@ extern const struct cpu_features *__get_cpu_features (void)
b1dca6
 #define bit_cpu_BMI1		(1u << 3)
b1dca6
 #define bit_cpu_HLE		(1u << 4)
b1dca6
 #define bit_cpu_AVX2		(1u << 5)
b1dca6
+#define bit_cpu_INDEX_7_EBX_6	(1u << 6)
b1dca6
 #define bit_cpu_SMEP		(1u << 7)
b1dca6
 #define bit_cpu_BMI2		(1u << 8)
b1dca6
 #define bit_cpu_ERMS		(1u << 9)
b1dca6
 #define bit_cpu_INVPCID		(1u << 10)
b1dca6
 #define bit_cpu_RTM		(1u << 11)
b1dca6
 #define bit_cpu_PQM		(1u << 12)
b1dca6
+#define bit_cpu_DEPR_FPU_CS_DS	(1u << 13)
b1dca6
 #define bit_cpu_MPX		(1u << 14)
b1dca6
 #define bit_cpu_PQE		(1u << 15)
b1dca6
 #define bit_cpu_AVX512F		(1u << 16)
b1dca6
@@ -316,6 +241,7 @@ extern const struct cpu_features *__get_cpu_features (void)
b1dca6
 #define bit_cpu_ADX		(1u << 19)
b1dca6
 #define bit_cpu_SMAP		(1u << 20)
b1dca6
 #define bit_cpu_AVX512_IFMA	(1u << 21)
b1dca6
+#define bit_cpu_INDEX_7_EBX_22	(1u << 22)
b1dca6
 #define bit_cpu_CLFLUSHOPT	(1u << 23)
b1dca6
 #define bit_cpu_CLWB		(1u << 24)
b1dca6
 #define bit_cpu_TRACE		(1u << 25)
b1dca6
@@ -340,9 +266,17 @@ extern const struct cpu_features *__get_cpu_features (void)
b1dca6
 #define bit_cpu_VPCLMULQDQ	(1u << 10)
b1dca6
 #define bit_cpu_AVX512_VNNI	(1u << 11)
b1dca6
 #define bit_cpu_AVX512_BITALG	(1u << 12)
b1dca6
+#define bit_cpu_INDEX_7_ECX_13	(1u << 13)
b1dca6
 #define bit_cpu_AVX512_VPOPCNTDQ (1u << 14)
b1dca6
+#define bit_cpu_INDEX_7_ECX_15	(1u << 15)
b1dca6
+#define bit_cpu_INDEX_7_ECX_16	(1u << 16)
b1dca6
+/* Note: Bits 17-21: The value of MAWAU used by the BNDLDX and BNDSTX
b1dca6
+   instructions in 64-bit mode.  */
b1dca6
 #define bit_cpu_RDPID		(1u << 22)
b1dca6
+#define bit_cpu_INDEX_7_ECX_23	(1u << 23)
b1dca6
+#define bit_cpu_INDEX_7_ECX_24	(1u << 24)
b1dca6
 #define bit_cpu_CLDEMOTE	(1u << 25)
b1dca6
+#define bit_cpu_INDEX_7_ECX_26	(1u << 26)
b1dca6
 #define bit_cpu_MOVDIRI		(1u << 27)
b1dca6
 #define bit_cpu_MOVDIR64B	(1u << 28)
b1dca6
 #define bit_cpu_ENQCMD		(1u << 29)
b1dca6
@@ -350,17 +284,30 @@ extern const struct cpu_features *__get_cpu_features (void)
b1dca6
 #define bit_cpu_PKS		(1u << 31)
b1dca6
 
b1dca6
 /* EDX.  */
b1dca6
+#define bit_cpu_INDEX_7_EDX_0	(1u << 0)
b1dca6
+#define bit_cpu_INDEX_7_EDX_1	(1u << 1)
b1dca6
 #define bit_cpu_AVX512_4VNNIW	(1u << 2)
b1dca6
 #define bit_cpu_AVX512_4FMAPS	(1u << 3)
b1dca6
 #define bit_cpu_FSRM		(1u << 4)
b1dca6
+#define bit_cpu_INDEX_7_EDX_5	(1u << 5)
b1dca6
+#define bit_cpu_INDEX_7_EDX_6	(1u << 6)
b1dca6
+#define bit_cpu_INDEX_7_EDX_7	(1u << 7)
b1dca6
 #define bit_cpu_AVX512_VP2INTERSECT (1u << 8)
b1dca6
+#define bit_cpu_INDEX_7_EDX_9	(1u << 9)
b1dca6
 #define bit_cpu_MD_CLEAR	(1u << 10)
b1dca6
+#define bit_cpu_INDEX_7_EDX_11	(1u << 11)
b1dca6
+#define bit_cpu_INDEX_7_EDX_12	(1u << 12)
b1dca6
+#define bit_cpu_INDEX_7_EDX_13	(1u << 13)
b1dca6
 #define bit_cpu_SERIALIZE	(1u << 14)
b1dca6
 #define bit_cpu_HYBRID		(1u << 15)
b1dca6
 #define bit_cpu_TSXLDTRK	(1u << 16)
b1dca6
+#define bit_cpu_INDEX_7_EDX_17	(1u << 17)
b1dca6
 #define bit_cpu_PCONFIG		(1u << 18)
b1dca6
+#define bit_cpu_INDEX_7_EDX_19	(1u << 19)
b1dca6
 #define bit_cpu_IBT		(1u << 20)
b1dca6
+#define bit_cpu_INDEX_7_EDX_21	(1u << 21)
b1dca6
 #define bit_cpu_AMX_BF16	(1u << 22)
b1dca6
+#define bit_cpu_INDEX_7_EDX_23	(1u << 23)
b1dca6
 #define bit_cpu_AMX_TILE	(1u << 24)
b1dca6
 #define bit_cpu_AMX_INT8	(1u << 25)
b1dca6
 #define bit_cpu_IBRS_IBPB	(1u << 26)
b1dca6
@@ -433,6 +380,7 @@ extern const struct cpu_features *__get_cpu_features (void)
b1dca6
 #define index_cpu_CMPXCHG16B	COMMON_CPUID_INDEX_1
b1dca6
 #define index_cpu_XTPRUPDCTRL	COMMON_CPUID_INDEX_1
b1dca6
 #define index_cpu_PDCM		COMMON_CPUID_INDEX_1
b1dca6
+#define index_cpu_INDEX_1_ECX_16 COMMON_CPUID_INDEX_1
b1dca6
 #define index_cpu_PCID		COMMON_CPUID_INDEX_1
b1dca6
 #define index_cpu_DCA		COMMON_CPUID_INDEX_1
b1dca6
 #define index_cpu_SSE4_1	COMMON_CPUID_INDEX_1
b1dca6
@@ -447,6 +395,7 @@ extern const struct cpu_features *__get_cpu_features (void)
b1dca6
 #define index_cpu_AVX		COMMON_CPUID_INDEX_1
b1dca6
 #define index_cpu_F16C		COMMON_CPUID_INDEX_1
b1dca6
 #define index_cpu_RDRAND	COMMON_CPUID_INDEX_1
b1dca6
+#define index_cpu_INDEX_1_ECX_31 COMMON_CPUID_INDEX_1
b1dca6
 
b1dca6
 /* ECX.  */
b1dca6
 #define index_cpu_FPU		COMMON_CPUID_INDEX_1
b1dca6
@@ -459,6 +408,7 @@ extern const struct cpu_features *__get_cpu_features (void)
b1dca6
 #define index_cpu_MCE		COMMON_CPUID_INDEX_1
b1dca6
 #define index_cpu_CX8		COMMON_CPUID_INDEX_1
b1dca6
 #define index_cpu_APIC		COMMON_CPUID_INDEX_1
b1dca6
+#define index_cpu_INDEX_1_EDX_10 COMMON_CPUID_INDEX_1
b1dca6
 #define index_cpu_SEP		COMMON_CPUID_INDEX_1
b1dca6
 #define index_cpu_MTRR		COMMON_CPUID_INDEX_1
b1dca6
 #define index_cpu_PGE		COMMON_CPUID_INDEX_1
b1dca6
@@ -468,6 +418,7 @@ extern const struct cpu_features *__get_cpu_features (void)
b1dca6
 #define index_cpu_PSE_36	COMMON_CPUID_INDEX_1
b1dca6
 #define index_cpu_PSN		COMMON_CPUID_INDEX_1
b1dca6
 #define index_cpu_CLFSH		COMMON_CPUID_INDEX_1
b1dca6
+#define index_cpu_INDEX_1_EDX_20 COMMON_CPUID_INDEX_1
b1dca6
 #define index_cpu_DS		COMMON_CPUID_INDEX_1
b1dca6
 #define index_cpu_ACPI		COMMON_CPUID_INDEX_1
b1dca6
 #define index_cpu_MMX		COMMON_CPUID_INDEX_1
b1dca6
@@ -477,6 +428,7 @@ extern const struct cpu_features *__get_cpu_features (void)
b1dca6
 #define index_cpu_SS		COMMON_CPUID_INDEX_1
b1dca6
 #define index_cpu_HTT		COMMON_CPUID_INDEX_1
b1dca6
 #define index_cpu_TM		COMMON_CPUID_INDEX_1
b1dca6
+#define index_cpu_INDEX_1_EDX_30 COMMON_CPUID_INDEX_1
b1dca6
 #define index_cpu_PBE		COMMON_CPUID_INDEX_1
b1dca6
 
b1dca6
 /* COMMON_CPUID_INDEX_7.  */
b1dca6
@@ -488,12 +440,14 @@ extern const struct cpu_features *__get_cpu_features (void)
b1dca6
 #define index_cpu_BMI1		COMMON_CPUID_INDEX_7
b1dca6
 #define index_cpu_HLE		COMMON_CPUID_INDEX_7
b1dca6
 #define index_cpu_AVX2		COMMON_CPUID_INDEX_7
b1dca6
+#define index_cpu_INDEX_7_EBX_6	COMMON_CPUID_INDEX_7
b1dca6
 #define index_cpu_SMEP		COMMON_CPUID_INDEX_7
b1dca6
 #define index_cpu_BMI2		COMMON_CPUID_INDEX_7
b1dca6
 #define index_cpu_ERMS		COMMON_CPUID_INDEX_7
b1dca6
 #define index_cpu_INVPCID	COMMON_CPUID_INDEX_7
b1dca6
 #define index_cpu_RTM		COMMON_CPUID_INDEX_7
b1dca6
 #define index_cpu_PQM		COMMON_CPUID_INDEX_7
b1dca6
+#define index_cpu_DEPR_FPU_CS_DS COMMON_CPUID_INDEX_7
b1dca6
 #define index_cpu_MPX		COMMON_CPUID_INDEX_7
b1dca6
 #define index_cpu_PQE		COMMON_CPUID_INDEX_7
b1dca6
 #define index_cpu_AVX512F	COMMON_CPUID_INDEX_7
b1dca6
@@ -502,6 +456,7 @@ extern const struct cpu_features *__get_cpu_features (void)
b1dca6
 #define index_cpu_ADX		COMMON_CPUID_INDEX_7
b1dca6
 #define index_cpu_SMAP		COMMON_CPUID_INDEX_7
b1dca6
 #define index_cpu_AVX512_IFMA	COMMON_CPUID_INDEX_7
b1dca6
+#define index_cpu_INDEX_7_EBX_22 COMMON_CPUID_INDEX_7
b1dca6
 #define index_cpu_CLFLUSHOPT	COMMON_CPUID_INDEX_7
b1dca6
 #define index_cpu_CLWB		COMMON_CPUID_INDEX_7
b1dca6
 #define index_cpu_TRACE		COMMON_CPUID_INDEX_7
b1dca6
@@ -526,9 +481,15 @@ extern const struct cpu_features *__get_cpu_features (void)
b1dca6
 #define index_cpu_VPCLMULQDQ	COMMON_CPUID_INDEX_7
b1dca6
 #define index_cpu_AVX512_VNNI	COMMON_CPUID_INDEX_7
b1dca6
 #define index_cpu_AVX512_BITALG COMMON_CPUID_INDEX_7
b1dca6
+#define index_cpu_INDEX_7_ECX_13 COMMON_CPUID_INDEX_7
b1dca6
 #define index_cpu_AVX512_VPOPCNTDQ COMMON_CPUID_INDEX_7
b1dca6
+#define index_cpu_INDEX_7_ECX_15 COMMON_CPUID_INDEX_7
b1dca6
+#define index_cpu_INDEX_7_ECX_16 COMMON_CPUID_INDEX_7
b1dca6
 #define index_cpu_RDPID		COMMON_CPUID_INDEX_7
b1dca6
+#define index_cpu_INDEX_7_ECX_23 COMMON_CPUID_INDEX_7
b1dca6
+#define index_cpu_INDEX_7_ECX_24 COMMON_CPUID_INDEX_7
b1dca6
 #define index_cpu_CLDEMOTE	COMMON_CPUID_INDEX_7
b1dca6
+#define index_cpu_INDEX_7_ECX_26 COMMON_CPUID_INDEX_7
b1dca6
 #define index_cpu_MOVDIRI	COMMON_CPUID_INDEX_7
b1dca6
 #define index_cpu_MOVDIR64B	COMMON_CPUID_INDEX_7
b1dca6
 #define index_cpu_ENQCMD	COMMON_CPUID_INDEX_7
b1dca6
@@ -536,17 +497,30 @@ extern const struct cpu_features *__get_cpu_features (void)
b1dca6
 #define index_cpu_PKS		COMMON_CPUID_INDEX_7
b1dca6
 
b1dca6
 /* EDX.  */
b1dca6
+#define index_cpu_INDEX_7_EDX_0	COMMON_CPUID_INDEX_7
b1dca6
+#define index_cpu_INDEX_7_EDX_1	COMMON_CPUID_INDEX_7
b1dca6
 #define index_cpu_AVX512_4VNNIW COMMON_CPUID_INDEX_7
b1dca6
 #define index_cpu_AVX512_4FMAPS	COMMON_CPUID_INDEX_7
b1dca6
 #define index_cpu_FSRM		COMMON_CPUID_INDEX_7
b1dca6
+#define index_cpu_INDEX_7_EDX_5	COMMON_CPUID_INDEX_7
b1dca6
+#define index_cpu_INDEX_7_EDX_6	COMMON_CPUID_INDEX_7
b1dca6
+#define index_cpu_INDEX_7_EDX_7	COMMON_CPUID_INDEX_7
b1dca6
 #define index_cpu_AVX512_VP2INTERSECT COMMON_CPUID_INDEX_7
b1dca6
+#define index_cpu_INDEX_7_EDX_9	COMMON_CPUID_INDEX_7
b1dca6
 #define index_cpu_MD_CLEAR	COMMON_CPUID_INDEX_7
b1dca6
+#define index_cpu_INDEX_7_EDX_11 COMMON_CPUID_INDEX_7
b1dca6
+#define index_cpu_INDEX_7_EDX_12 COMMON_CPUID_INDEX_7
b1dca6
+#define index_cpu_INDEX_7_EDX_13 COMMON_CPUID_INDEX_7
b1dca6
 #define index_cpu_SERIALIZE	COMMON_CPUID_INDEX_7
b1dca6
 #define index_cpu_HYBRID	COMMON_CPUID_INDEX_7
b1dca6
 #define index_cpu_TSXLDTRK	COMMON_CPUID_INDEX_7
b1dca6
+#define index_cpu_INDEX_7_EDX_17 COMMON_CPUID_INDEX_7
b1dca6
 #define index_cpu_PCONFIG	COMMON_CPUID_INDEX_7
b1dca6
+#define index_cpu_INDEX_7_EDX_19 COMMON_CPUID_INDEX_7
b1dca6
 #define index_cpu_IBT		COMMON_CPUID_INDEX_7
b1dca6
+#define index_cpu_INDEX_7_EDX_21 COMMON_CPUID_INDEX_7
b1dca6
 #define index_cpu_AMX_BF16	COMMON_CPUID_INDEX_7
b1dca6
+#define index_cpu_INDEX_7_EDX_23 COMMON_CPUID_INDEX_7
b1dca6
 #define index_cpu_AMX_TILE	COMMON_CPUID_INDEX_7
b1dca6
 #define index_cpu_AMX_INT8	COMMON_CPUID_INDEX_7
b1dca6
 #define index_cpu_IBRS_IBPB	COMMON_CPUID_INDEX_7
b1dca6
@@ -619,6 +593,7 @@ extern const struct cpu_features *__get_cpu_features (void)
b1dca6
 #define reg_CMPXCHG16B		ecx
b1dca6
 #define reg_XTPRUPDCTRL		ecx
b1dca6
 #define reg_PDCM		ecx
b1dca6
+#define reg_INDEX_1_ECX_16	ecx
b1dca6
 #define reg_PCID		ecx
b1dca6
 #define reg_DCA			ecx
b1dca6
 #define reg_SSE4_1		ecx
b1dca6
@@ -633,6 +608,7 @@ extern const struct cpu_features *__get_cpu_features (void)
b1dca6
 #define reg_AVX			ecx
b1dca6
 #define reg_F16C		ecx
b1dca6
 #define reg_RDRAND		ecx
b1dca6
+#define reg_INDEX_1_ECX_31	ecx
b1dca6
 
b1dca6
 /* EDX.  */
b1dca6
 #define reg_FPU			edx
b1dca6
@@ -645,6 +621,7 @@ extern const struct cpu_features *__get_cpu_features (void)
b1dca6
 #define reg_MCE			edx
b1dca6
 #define reg_CX8			edx
b1dca6
 #define reg_APIC		edx
b1dca6
+#define reg_INDEX_1_EDX_10	edx
b1dca6
 #define reg_SEP			edx
b1dca6
 #define reg_MTRR		edx
b1dca6
 #define reg_PGE			edx
b1dca6
@@ -654,6 +631,7 @@ extern const struct cpu_features *__get_cpu_features (void)
b1dca6
 #define reg_PSE_36		edx
b1dca6
 #define reg_PSN			edx
b1dca6
 #define reg_CLFSH		edx
b1dca6
+#define reg_INDEX_1_EDX_20	edx
b1dca6
 #define reg_DS			edx
b1dca6
 #define reg_ACPI		edx
b1dca6
 #define reg_MMX			edx
b1dca6
@@ -663,6 +641,7 @@ extern const struct cpu_features *__get_cpu_features (void)
b1dca6
 #define reg_SS			edx
b1dca6
 #define reg_HTT			edx
b1dca6
 #define reg_TM			edx
b1dca6
+#define reg_INDEX_1_EDX_30	edx
b1dca6
 #define reg_PBE			edx
b1dca6
 
b1dca6
 /* COMMON_CPUID_INDEX_7.  */
b1dca6
@@ -675,11 +654,13 @@ extern const struct cpu_features *__get_cpu_features (void)
b1dca6
 #define reg_HLE			ebx
b1dca6
 #define reg_BMI2		ebx
b1dca6
 #define reg_AVX2		ebx
b1dca6
+#define reg_INDEX_7_EBX_6	ebx
b1dca6
 #define reg_SMEP		ebx
b1dca6
 #define reg_ERMS		ebx
b1dca6
 #define reg_INVPCID		ebx
b1dca6
 #define reg_RTM			ebx
b1dca6
 #define reg_PQM			ebx
b1dca6
+#define reg_DEPR_FPU_CS_DS	ebx
b1dca6
 #define reg_MPX			ebx
b1dca6
 #define reg_PQE			ebx
b1dca6
 #define reg_AVX512F		ebx
b1dca6
@@ -688,6 +669,7 @@ extern const struct cpu_features *__get_cpu_features (void)
b1dca6
 #define reg_ADX			ebx
b1dca6
 #define reg_SMAP		ebx
b1dca6
 #define reg_AVX512_IFMA		ebx
b1dca6
+#define reg_INDEX_7_EBX_22	ebx
b1dca6
 #define reg_CLFLUSHOPT		ebx
b1dca6
 #define reg_CLWB		ebx
b1dca6
 #define reg_TRACE		ebx
b1dca6
@@ -712,9 +694,15 @@ extern const struct cpu_features *__get_cpu_features (void)
b1dca6
 #define reg_VPCLMULQDQ		ecx
b1dca6
 #define reg_AVX512_VNNI		ecx
b1dca6
 #define reg_AVX512_BITALG	ecx
b1dca6
+#define reg_INDEX_7_ECX_13	ecx
b1dca6
 #define reg_AVX512_VPOPCNTDQ	ecx
b1dca6
+#define reg_INDEX_7_ECX_15	ecx
b1dca6
+#define reg_INDEX_7_ECX_16	ecx
b1dca6
 #define reg_RDPID		ecx
b1dca6
+#define reg_INDEX_7_ECX_23	ecx
b1dca6
+#define reg_INDEX_7_ECX_24	ecx
b1dca6
 #define reg_CLDEMOTE		ecx
b1dca6
+#define reg_INDEX_7_ECX_26	ecx
b1dca6
 #define reg_MOVDIRI		ecx
b1dca6
 #define reg_MOVDIR64B		ecx
b1dca6
 #define reg_ENQCMD		ecx
b1dca6
@@ -722,17 +710,30 @@ extern const struct cpu_features *__get_cpu_features (void)
b1dca6
 #define reg_PKS			ecx
b1dca6
 
b1dca6
 /* EDX.  */
b1dca6
+#define reg_INDEX_7_EDX_0	edx
b1dca6
+#define reg_INDEX_7_EDX_1	edx
b1dca6
 #define reg_AVX512_4VNNIW	edx
b1dca6
 #define reg_AVX512_4FMAPS	edx
b1dca6
 #define reg_FSRM		edx
b1dca6
+#define reg_INDEX_7_EDX_5	edx
b1dca6
+#define reg_INDEX_7_EDX_6	edx
b1dca6
+#define reg_INDEX_7_EDX_7	edx
b1dca6
 #define reg_AVX512_VP2INTERSECT	edx
b1dca6
+#define reg_INDEX_7_EDX_9	edx
b1dca6
 #define reg_MD_CLEAR		edx
b1dca6
+#define reg_INDEX_7_EDX_11	edx
b1dca6
+#define reg_INDEX_7_EDX_12	edx
b1dca6
+#define reg_INDEX_7_EDX_13	edx
b1dca6
 #define reg_SERIALIZE		edx
b1dca6
 #define reg_HYBRID		edx
b1dca6
 #define reg_TSXLDTRK		edx
b1dca6
+#define reg_INDEX_7_EDX_17	edx
b1dca6
 #define reg_PCONFIG		edx
b1dca6
+#define reg_INDEX_7_EDX_19	edx
b1dca6
 #define reg_IBT			edx
b1dca6
+#define reg_INDEX_7_EDX_21	edx
b1dca6
 #define reg_AMX_BF16		edx
b1dca6
+#define reg_INDEX_7_EDX_23	edx
b1dca6
 #define reg_AMX_TILE		edx
b1dca6
 #define reg_AMX_INT8		edx
b1dca6
 #define reg_IBRS_IBPB		edx
b1dca6
@@ -821,23 +822,6 @@ extern const struct cpu_features *__get_cpu_features (void)
b1dca6
 #define index_arch_MathVec_Prefer_No_AVX512	PREFERRED_FEATURE_INDEX_1
b1dca6
 #define index_arch_Prefer_FSRM			PREFERRED_FEATURE_INDEX_1
b1dca6
 
b1dca6
-#define feature_Fast_Rep_String			preferred
b1dca6
-#define feature_Fast_Copy_Backward		preferred
b1dca6
-#define feature_Slow_BSF			preferred
b1dca6
-#define feature_Fast_Unaligned_Load		preferred
b1dca6
-#define feature_Prefer_PMINUB_for_stringop 	preferred
b1dca6
-#define feature_Fast_Unaligned_Copy		preferred
b1dca6
-#define feature_I586				preferred
b1dca6
-#define feature_I686				preferred
b1dca6
-#define feature_Slow_SSE4_2			preferred
b1dca6
-#define feature_AVX_Fast_Unaligned_Load		preferred
b1dca6
-#define feature_Prefer_MAP_32BIT_EXEC		preferred
b1dca6
-#define feature_Prefer_No_VZEROUPPER		preferred
b1dca6
-#define feature_Prefer_ERMS			preferred
b1dca6
-#define feature_Prefer_No_AVX512		preferred
b1dca6
-#define feature_MathVec_Prefer_No_AVX512	preferred
b1dca6
-#define feature_Prefer_FSRM			preferred
b1dca6
-
b1dca6
 /* XCR0 Feature flags.  */
b1dca6
 #define bit_XMM_state		(1u << 1)
b1dca6
 #define bit_YMM_state		(1u << 2)
b1dca6
@@ -851,8 +835,6 @@ extern const struct cpu_features *__get_cpu_features (void)
b1dca6
 /* Unused for x86.  */
b1dca6
 #  define INIT_ARCH()
b1dca6
 #  define __get_cpu_features()	(&GLRO(dl_x86_cpu_features))
b1dca6
-#  define x86_get_cpuid_registers(i) \
b1dca6
-       (&(GLRO(dl_x86_cpu_features).cpuid[i]))
b1dca6
 # endif
b1dca6
 
b1dca6
 #ifdef __x86_64__
b1dca6
diff --git a/sysdeps/x86/cpu-tunables.c b/sysdeps/x86/cpu-tunables.c
b1dca6
index 012ae48933055eaa..0728023007a0f423 100644
b1dca6
--- a/sysdeps/x86/cpu-tunables.c
b1dca6
+++ b/sysdeps/x86/cpu-tunables.c
b1dca6
@@ -43,66 +43,45 @@ extern __typeof (memcmp) DEFAULT_MEMCMP;
b1dca6
   _Static_assert (sizeof (#name) - 1 == len, #name " != " #len);	\
b1dca6
   if (!DEFAULT_MEMCMP (f, #name, len))					\
b1dca6
     {									\
b1dca6
-      cpu_features->cpuid[index_cpu_##name].reg_##name			\
b1dca6
-	&= ~bit_cpu_##name;						\
b1dca6
+      CPU_FEATURE_UNSET (cpu_features, name)				\
b1dca6
       break;								\
b1dca6
     }
b1dca6
 
b1dca6
-/* Disable an ARCH feature NAME.  We don't enable an ARCH feature which
b1dca6
-   isn't available.  */
b1dca6
-# define CHECK_GLIBC_IFUNC_ARCH_OFF(f, cpu_features, name, len)		\
b1dca6
+/* Disable a preferred feature NAME.  We don't enable a preferred feature
b1dca6
+   which isn't available.  */
b1dca6
+# define CHECK_GLIBC_IFUNC_PREFERRED_OFF(f, cpu_features, name, len)	\
b1dca6
   _Static_assert (sizeof (#name) - 1 == len, #name " != " #len);	\
b1dca6
   if (!DEFAULT_MEMCMP (f, #name, len))					\
b1dca6
     {									\
b1dca6
-      cpu_features->feature_##name[index_arch_##name]			\
b1dca6
+      cpu_features->preferred[index_arch_##name]			\
b1dca6
 	&= ~bit_arch_##name;						\
b1dca6
       break;								\
b1dca6
     }
b1dca6
 
b1dca6
-/* Enable/disable an ARCH feature NAME.  */
b1dca6
-# define CHECK_GLIBC_IFUNC_ARCH_BOTH(f, cpu_features, name, disable,	\
b1dca6
-				    len)				\
b1dca6
+/* Enable/disable a preferred feature NAME.  */
b1dca6
+# define CHECK_GLIBC_IFUNC_PREFERRED_BOTH(f, cpu_features, name,	\
b1dca6
+					  disable, len)			\
b1dca6
   _Static_assert (sizeof (#name) - 1 == len, #name " != " #len);	\
b1dca6
   if (!DEFAULT_MEMCMP (f, #name, len))					\
b1dca6
     {									\
b1dca6
       if (disable)							\
b1dca6
-	cpu_features->feature_##name[index_arch_##name]			\
b1dca6
-	  &= ~bit_arch_##name;						\
b1dca6
+	cpu_features->preferred[index_arch_##name] &= ~bit_arch_##name;	\
b1dca6
       else								\
b1dca6
-	cpu_features->feature_##name[index_arch_##name]			\
b1dca6
-	  |= bit_arch_##name;						\
b1dca6
+	cpu_features->preferred[index_arch_##name] |= bit_arch_##name;	\
b1dca6
       break;								\
b1dca6
     }
b1dca6
 
b1dca6
-/* Enable/disable an ARCH feature NAME.  Enable an ARCH feature only
b1dca6
-   if the ARCH feature NEED is also enabled.  */
b1dca6
-# define CHECK_GLIBC_IFUNC_ARCH_NEED_ARCH_BOTH(f, cpu_features, name,	\
b1dca6
+/* Enable/disable a preferred feature NAME.  Enable a preferred feature
b1dca6
+   only if the feature NEED is usable.  */
b1dca6
+# define CHECK_GLIBC_IFUNC_PREFERRED_NEED_BOTH(f, cpu_features, name,	\
b1dca6
 					       need, disable, len)	\
b1dca6
   _Static_assert (sizeof (#name) - 1 == len, #name " != " #len);	\
b1dca6
   if (!DEFAULT_MEMCMP (f, #name, len))					\
b1dca6
     {									\
b1dca6
       if (disable)							\
b1dca6
-	cpu_features->feature_##name[index_arch_##name]			\
b1dca6
-	  &= ~bit_arch_##name;						\
b1dca6
-      else if (CPU_FEATURES_ARCH_P (cpu_features, need))		\
b1dca6
-	cpu_features->feature_##name[index_arch_##name]			\
b1dca6
-	  |= bit_arch_##name;						\
b1dca6
-      break;								\
b1dca6
-    }
b1dca6
-
b1dca6
-/* Enable/disable an ARCH feature NAME.  Enable an ARCH feature only
b1dca6
-   if the CPU feature NEED is also enabled.  */
b1dca6
-# define CHECK_GLIBC_IFUNC_ARCH_NEED_CPU_BOTH(f, cpu_features, name,	\
b1dca6
-					      need, disable, len)	\
b1dca6
-  _Static_assert (sizeof (#name) - 1 == len, #name " != " #len);	\
b1dca6
-  if (!DEFAULT_MEMCMP (f, #name, len))					\
b1dca6
-    {									\
b1dca6
-      if (disable)							\
b1dca6
-	cpu_features->feature_##name[index_arch_##name]			\
b1dca6
-	  &= ~bit_arch_##name;						\
b1dca6
-      else if (CPU_FEATURES_CPU_P (cpu_features, need))			\
b1dca6
-	cpu_features->feature_##name[index_arch_##name]			\
b1dca6
-	  |= bit_arch_##name;						\
b1dca6
+	cpu_features->preferred[index_arch_##name] &= ~bit_arch_##name;	\
b1dca6
+      else if (CPU_FEATURE_USABLE_P (cpu_features, need))		\
b1dca6
+	cpu_features->preferred[index_arch_##name] |= bit_arch_##name;	\
b1dca6
       break;								\
b1dca6
     }
b1dca6
 
b1dca6
@@ -178,8 +157,8 @@ TUNABLE_CALLBACK (set_hwcaps) (tunable_val_t *valp)
b1dca6
 	      CHECK_GLIBC_IFUNC_CPU_OFF (n, cpu_features, ERMS, 4);
b1dca6
 	      CHECK_GLIBC_IFUNC_CPU_OFF (n, cpu_features, FMA4, 4);
b1dca6
 	      CHECK_GLIBC_IFUNC_CPU_OFF (n, cpu_features, SSE2, 4);
b1dca6
-	      CHECK_GLIBC_IFUNC_ARCH_OFF (n, cpu_features, I586, 4);
b1dca6
-	      CHECK_GLIBC_IFUNC_ARCH_OFF (n, cpu_features, I686, 4);
b1dca6
+	      CHECK_GLIBC_IFUNC_PREFERRED_OFF (n, cpu_features, I586, 4);
b1dca6
+	      CHECK_GLIBC_IFUNC_PREFERRED_OFF (n, cpu_features, I686, 4);
b1dca6
 	    }
b1dca6
 	  break;
b1dca6
 	case 5:
b1dca6
@@ -197,6 +176,13 @@ TUNABLE_CALLBACK (set_hwcaps) (tunable_val_t *valp)
b1dca6
 	      CHECK_GLIBC_IFUNC_CPU_OFF (n, cpu_features, POPCNT, 6);
b1dca6
 	      CHECK_GLIBC_IFUNC_CPU_OFF (n, cpu_features, SSE4_1, 6);
b1dca6
 	      CHECK_GLIBC_IFUNC_CPU_OFF (n, cpu_features, SSE4_2, 6);
b1dca6
+	      if (!DEFAULT_MEMCMP (n, "XSAVEC", 6))
b1dca6
+		{
b1dca6
+		  /* Update xsave_state_size to XSAVE state size.  */
b1dca6
+		  cpu_features->xsave_state_size
b1dca6
+		    = cpu_features->xsave_state_full_size;
b1dca6
+		  CPU_FEATURE_UNSET (cpu_features, XSAVEC);
b1dca6
+		}
b1dca6
 	    }
b1dca6
 	  break;
b1dca6
 	case 7:
b1dca6
@@ -216,115 +202,85 @@ TUNABLE_CALLBACK (set_hwcaps) (tunable_val_t *valp)
b1dca6
 	      CHECK_GLIBC_IFUNC_CPU_OFF (n, cpu_features, AVX512PF, 8);
b1dca6
 	      CHECK_GLIBC_IFUNC_CPU_OFF (n, cpu_features, AVX512VL, 8);
b1dca6
 	    }
b1dca6
-	  CHECK_GLIBC_IFUNC_ARCH_BOTH (n, cpu_features, Slow_BSF,
b1dca6
-				       disable, 8);
b1dca6
-	  break;
b1dca6
-	case 10:
b1dca6
-	  if (disable)
b1dca6
-	    {
b1dca6
-	      CHECK_GLIBC_IFUNC_ARCH_OFF (n, cpu_features, AVX_Usable,
b1dca6
-					  10);
b1dca6
-	      CHECK_GLIBC_IFUNC_ARCH_OFF (n, cpu_features, FMA_Usable,
b1dca6
-					  10);
b1dca6
-	    }
b1dca6
+	  CHECK_GLIBC_IFUNC_PREFERRED_BOTH (n, cpu_features, Slow_BSF,
b1dca6
+					    disable, 8);
b1dca6
 	  break;
b1dca6
 	case 11:
b1dca6
-	  if (disable)
b1dca6
 	    {
b1dca6
-	      CHECK_GLIBC_IFUNC_ARCH_OFF (n, cpu_features, AVX2_Usable,
b1dca6
-					  11);
b1dca6
-	      CHECK_GLIBC_IFUNC_ARCH_OFF (n, cpu_features, FMA4_Usable,
b1dca6
-					  11);
b1dca6
-	    }
b1dca6
-	  CHECK_GLIBC_IFUNC_ARCH_BOTH (n, cpu_features, Prefer_ERMS,
b1dca6
-				       disable, 11);
b1dca6
-	  CHECK_GLIBC_IFUNC_ARCH_NEED_CPU_BOTH (n, cpu_features,
b1dca6
-						Slow_SSE4_2, SSE4_2,
b1dca6
+	      CHECK_GLIBC_IFUNC_PREFERRED_BOTH (n, cpu_features,
b1dca6
+						Prefer_ERMS,
b1dca6
 						disable, 11);
b1dca6
-	  CHECK_GLIBC_IFUNC_ARCH_BOTH (n, cpu_features, Prefer_FSRM,
b1dca6
-				       disable, 11);
b1dca6
-	  break;
b1dca6
-	case 13:
b1dca6
-	  if (disable)
b1dca6
-	    {
b1dca6
-	      /* Update xsave_state_size to XSAVE state size.  */
b1dca6
-	      cpu_features->xsave_state_size
b1dca6
-		= cpu_features->xsave_state_full_size;
b1dca6
-	      CHECK_GLIBC_IFUNC_ARCH_OFF (n, cpu_features,
b1dca6
-					  XSAVEC_Usable, 13);
b1dca6
-	    }
b1dca6
-	  break;
b1dca6
-	case 14:
b1dca6
-	  if (disable)
b1dca6
-	    {
b1dca6
-	      CHECK_GLIBC_IFUNC_ARCH_OFF (n, cpu_features,
b1dca6
-					  AVX512F_Usable, 14);
b1dca6
+	      CHECK_GLIBC_IFUNC_PREFERRED_BOTH (n, cpu_features,
b1dca6
+						Prefer_FSRM,
b1dca6
+						disable, 11);
b1dca6
+	      CHECK_GLIBC_IFUNC_PREFERRED_NEED_BOTH (n, cpu_features,
b1dca6
+						     Slow_SSE4_2,
b1dca6
+						     SSE4_2,
b1dca6
+						     disable, 11);
b1dca6
 	    }
b1dca6
 	  break;
b1dca6
 	case 15:
b1dca6
-	  if (disable)
b1dca6
 	    {
b1dca6
-	      CHECK_GLIBC_IFUNC_ARCH_OFF (n, cpu_features,
b1dca6
-					  AVX512DQ_Usable, 15);
b1dca6
+	      CHECK_GLIBC_IFUNC_PREFERRED_BOTH (n, cpu_features,
b1dca6
+						Fast_Rep_String,
b1dca6
+						disable, 15);
b1dca6
 	    }
b1dca6
-	  CHECK_GLIBC_IFUNC_ARCH_BOTH (n, cpu_features, Fast_Rep_String,
b1dca6
-				       disable, 15);
b1dca6
 	  break;
b1dca6
 	case 16:
b1dca6
 	    {
b1dca6
-	      CHECK_GLIBC_IFUNC_ARCH_NEED_ARCH_BOTH
b1dca6
-		(n, cpu_features, Prefer_No_AVX512, AVX512F_Usable,
b1dca6
+	      CHECK_GLIBC_IFUNC_PREFERRED_NEED_BOTH
b1dca6
+		(n, cpu_features, Prefer_No_AVX512, AVX512F,
b1dca6
 		 disable, 16);
b1dca6
 	    }
b1dca6
 	  break;
b1dca6
 	case 18:
b1dca6
 	    {
b1dca6
-	      CHECK_GLIBC_IFUNC_ARCH_BOTH (n, cpu_features,
b1dca6
-					   Fast_Copy_Backward, disable,
b1dca6
-					   18);
b1dca6
+	      CHECK_GLIBC_IFUNC_PREFERRED_BOTH (n, cpu_features,
b1dca6
+						Fast_Copy_Backward,
b1dca6
+						disable, 18);
b1dca6
 	    }
b1dca6
 	  break;
b1dca6
 	case 19:
b1dca6
 	    {
b1dca6
-	      CHECK_GLIBC_IFUNC_ARCH_BOTH (n, cpu_features,
b1dca6
-					   Fast_Unaligned_Load, disable,
b1dca6
-					   19);
b1dca6
-	      CHECK_GLIBC_IFUNC_ARCH_BOTH (n, cpu_features,
b1dca6
-					   Fast_Unaligned_Copy, disable,
b1dca6
-					   19);
b1dca6
+	      CHECK_GLIBC_IFUNC_PREFERRED_BOTH (n, cpu_features,
b1dca6
+						Fast_Unaligned_Load,
b1dca6
+						disable, 19);
b1dca6
+	      CHECK_GLIBC_IFUNC_PREFERRED_BOTH (n, cpu_features,
b1dca6
+						Fast_Unaligned_Copy,
b1dca6
+						disable, 19);
b1dca6
 	    }
b1dca6
 	  break;
b1dca6
 	case 20:
b1dca6
 	    {
b1dca6
-	      CHECK_GLIBC_IFUNC_ARCH_NEED_ARCH_BOTH
b1dca6
-		(n, cpu_features, Prefer_No_VZEROUPPER, AVX_Usable,
b1dca6
-		 disable, 20);
b1dca6
+	      CHECK_GLIBC_IFUNC_PREFERRED_NEED_BOTH
b1dca6
+		(n, cpu_features, Prefer_No_VZEROUPPER, AVX, disable,
b1dca6
+		 20);
b1dca6
 	    }
b1dca6
 	  break;
b1dca6
 	case 21:
b1dca6
 	    {
b1dca6
-	      CHECK_GLIBC_IFUNC_ARCH_BOTH (n, cpu_features,
b1dca6
-					   Prefer_MAP_32BIT_EXEC, disable,
b1dca6
-					   21);
b1dca6
+	      CHECK_GLIBC_IFUNC_PREFERRED_BOTH (n, cpu_features,
b1dca6
+						Prefer_MAP_32BIT_EXEC,
b1dca6
+						disable, 21);
b1dca6
 	    }
b1dca6
 	  break;
b1dca6
 	case 23:
b1dca6
 	    {
b1dca6
-	      CHECK_GLIBC_IFUNC_ARCH_NEED_ARCH_BOTH
b1dca6
-		(n, cpu_features, AVX_Fast_Unaligned_Load, AVX_Usable,
b1dca6
+	      CHECK_GLIBC_IFUNC_PREFERRED_NEED_BOTH
b1dca6
+		(n, cpu_features, AVX_Fast_Unaligned_Load, AVX,
b1dca6
 		 disable, 23);
b1dca6
 	    }
b1dca6
 	  break;
b1dca6
 	case 24:
b1dca6
 	    {
b1dca6
-	      CHECK_GLIBC_IFUNC_ARCH_NEED_ARCH_BOTH
b1dca6
-		(n, cpu_features, MathVec_Prefer_No_AVX512,
b1dca6
-		 AVX512F_Usable, disable, 24);
b1dca6
+	      CHECK_GLIBC_IFUNC_PREFERRED_NEED_BOTH
b1dca6
+		(n, cpu_features, MathVec_Prefer_No_AVX512, AVX512F,
b1dca6
+		 disable, 24);
b1dca6
 	    }
b1dca6
 	  break;
b1dca6
 	case 26:
b1dca6
 	    {
b1dca6
-	      CHECK_GLIBC_IFUNC_ARCH_NEED_CPU_BOTH
b1dca6
+	      CHECK_GLIBC_IFUNC_PREFERRED_NEED_BOTH
b1dca6
 		(n, cpu_features, Prefer_PMINUB_for_stringop, SSE2,
b1dca6
 		 disable, 26);
b1dca6
 	    }
b1dca6
diff --git a/sysdeps/x86/dl-cet.c b/sysdeps/x86/dl-cet.c
b1dca6
index d481bddc27e5d7cc..11ff0618fae7230f 100644
b1dca6
--- a/sysdeps/x86/dl-cet.c
b1dca6
+++ b/sysdeps/x86/dl-cet.c
b1dca6
@@ -74,10 +74,10 @@ dl_cet_check (struct link_map *m, const char *program)
b1dca6
 
b1dca6
 	     GLIBC_TUNABLES=glibc.cpu.hwcaps=-IBT,-SHSTK
b1dca6
 	   */
b1dca6
-	  enable_ibt &= (HAS_CPU_FEATURE (IBT)
b1dca6
+	  enable_ibt &= (CPU_FEATURE_USABLE (IBT)
b1dca6
 			 && (enable_ibt_type == cet_always_on
b1dca6
 			     || (m->l_cet & lc_ibt) != 0));
b1dca6
-	  enable_shstk &= (HAS_CPU_FEATURE (SHSTK)
b1dca6
+	  enable_shstk &= (CPU_FEATURE_USABLE (SHSTK)
b1dca6
 			   && (enable_shstk_type == cet_always_on
b1dca6
 			       || (m->l_cet & lc_shstk) != 0));
b1dca6
 	}
b1dca6
diff --git a/sysdeps/x86/tst-get-cpu-features.c b/sysdeps/x86/tst-get-cpu-features.c
b1dca6
index c56f309ba0736c0d..85ec9d5a091e2c88 100644
b1dca6
--- a/sysdeps/x86/tst-get-cpu-features.c
b1dca6
+++ b/sysdeps/x86/tst-get-cpu-features.c
b1dca6
@@ -137,6 +137,7 @@ do_test (void)
b1dca6
   CHECK_CPU_FEATURE (INVPCID);
b1dca6
   CHECK_CPU_FEATURE (RTM);
b1dca6
   CHECK_CPU_FEATURE (PQM);
b1dca6
+  CHECK_CPU_FEATURE (DEPR_FPU_CS_DS);
b1dca6
   CHECK_CPU_FEATURE (MPX);
b1dca6
   CHECK_CPU_FEATURE (PQE);
b1dca6
   CHECK_CPU_FEATURE (AVX512F);
b1dca6
@@ -218,35 +219,156 @@ do_test (void)
b1dca6
   CHECK_CPU_FEATURE (AVX512_BF16);
b1dca6
 
b1dca6
   printf ("Usable CPU features:\n");
b1dca6
+  CHECK_CPU_FEATURE_USABLE (SSE3);
b1dca6
+  CHECK_CPU_FEATURE_USABLE (PCLMULQDQ);
b1dca6
+  CHECK_CPU_FEATURE_USABLE (DTES64);
b1dca6
+  CHECK_CPU_FEATURE_USABLE (MONITOR);
b1dca6
+  CHECK_CPU_FEATURE_USABLE (DS_CPL);
b1dca6
+  CHECK_CPU_FEATURE_USABLE (VMX);
b1dca6
+  CHECK_CPU_FEATURE_USABLE (SMX);
b1dca6
+  CHECK_CPU_FEATURE_USABLE (EST);
b1dca6
+  CHECK_CPU_FEATURE_USABLE (TM2);
b1dca6
+  CHECK_CPU_FEATURE_USABLE (SSSE3);
b1dca6
+  CHECK_CPU_FEATURE_USABLE (CNXT_ID);
b1dca6
+  CHECK_CPU_FEATURE_USABLE (SDBG);
b1dca6
   CHECK_CPU_FEATURE_USABLE (FMA);
b1dca6
+  CHECK_CPU_FEATURE_USABLE (CMPXCHG16B);
b1dca6
+  CHECK_CPU_FEATURE_USABLE (XTPRUPDCTRL);
b1dca6
+  CHECK_CPU_FEATURE_USABLE (PDCM);
b1dca6
+  CHECK_CPU_FEATURE_USABLE (PCID);
b1dca6
+  CHECK_CPU_FEATURE_USABLE (DCA);
b1dca6
+  CHECK_CPU_FEATURE_USABLE (SSE4_1);
b1dca6
+  CHECK_CPU_FEATURE_USABLE (SSE4_2);
b1dca6
+  CHECK_CPU_FEATURE_USABLE (X2APIC);
b1dca6
+  CHECK_CPU_FEATURE_USABLE (MOVBE);
b1dca6
+  CHECK_CPU_FEATURE_USABLE (POPCNT);
b1dca6
+  CHECK_CPU_FEATURE_USABLE (TSC_DEADLINE);
b1dca6
+  CHECK_CPU_FEATURE_USABLE (AES);
b1dca6
+  CHECK_CPU_FEATURE_USABLE (XSAVE);
b1dca6
+  CHECK_CPU_FEATURE_USABLE (OSXSAVE);
b1dca6
   CHECK_CPU_FEATURE_USABLE (AVX);
b1dca6
   CHECK_CPU_FEATURE_USABLE (F16C);
b1dca6
+  CHECK_CPU_FEATURE_USABLE (RDRAND);
b1dca6
+  CHECK_CPU_FEATURE_USABLE (FPU);
b1dca6
+  CHECK_CPU_FEATURE_USABLE (VME);
b1dca6
+  CHECK_CPU_FEATURE_USABLE (DE);
b1dca6
+  CHECK_CPU_FEATURE_USABLE (PSE);
b1dca6
+  CHECK_CPU_FEATURE_USABLE (TSC);
b1dca6
+  CHECK_CPU_FEATURE_USABLE (MSR);
b1dca6
+  CHECK_CPU_FEATURE_USABLE (PAE);
b1dca6
+  CHECK_CPU_FEATURE_USABLE (MCE);
b1dca6
+  CHECK_CPU_FEATURE_USABLE (CX8);
b1dca6
+  CHECK_CPU_FEATURE_USABLE (APIC);
b1dca6
+  CHECK_CPU_FEATURE_USABLE (SEP);
b1dca6
+  CHECK_CPU_FEATURE_USABLE (MTRR);
b1dca6
+  CHECK_CPU_FEATURE_USABLE (PGE);
b1dca6
+  CHECK_CPU_FEATURE_USABLE (MCA);
b1dca6
+  CHECK_CPU_FEATURE_USABLE (CMOV);
b1dca6
+  CHECK_CPU_FEATURE_USABLE (PAT);
b1dca6
+  CHECK_CPU_FEATURE_USABLE (PSE_36);
b1dca6
+  CHECK_CPU_FEATURE_USABLE (PSN);
b1dca6
+  CHECK_CPU_FEATURE_USABLE (CLFSH);
b1dca6
+  CHECK_CPU_FEATURE_USABLE (DS);
b1dca6
+  CHECK_CPU_FEATURE_USABLE (ACPI);
b1dca6
+  CHECK_CPU_FEATURE_USABLE (MMX);
b1dca6
+  CHECK_CPU_FEATURE_USABLE (FXSR);
b1dca6
+  CHECK_CPU_FEATURE_USABLE (SSE);
b1dca6
+  CHECK_CPU_FEATURE_USABLE (SSE2);
b1dca6
+  CHECK_CPU_FEATURE_USABLE (SS);
b1dca6
+  CHECK_CPU_FEATURE_USABLE (HTT);
b1dca6
+  CHECK_CPU_FEATURE_USABLE (TM);
b1dca6
+  CHECK_CPU_FEATURE_USABLE (PBE);
b1dca6
+  CHECK_CPU_FEATURE_USABLE (FSGSBASE);
b1dca6
+  CHECK_CPU_FEATURE_USABLE (TSC_ADJUST);
b1dca6
+  CHECK_CPU_FEATURE_USABLE (SGX);
b1dca6
+  CHECK_CPU_FEATURE_USABLE (BMI1);
b1dca6
+  CHECK_CPU_FEATURE_USABLE (HLE);
b1dca6
   CHECK_CPU_FEATURE_USABLE (AVX2);
b1dca6
+  CHECK_CPU_FEATURE_USABLE (SMEP);
b1dca6
+  CHECK_CPU_FEATURE_USABLE (BMI2);
b1dca6
+  CHECK_CPU_FEATURE_USABLE (ERMS);
b1dca6
+  CHECK_CPU_FEATURE_USABLE (INVPCID);
b1dca6
+  CHECK_CPU_FEATURE_USABLE (RTM);
b1dca6
+  CHECK_CPU_FEATURE_USABLE (PQM);
b1dca6
+  CHECK_CPU_FEATURE_USABLE (DEPR_FPU_CS_DS);
b1dca6
+  CHECK_CPU_FEATURE_USABLE (MPX);
b1dca6
+  CHECK_CPU_FEATURE_USABLE (PQE);
b1dca6
   CHECK_CPU_FEATURE_USABLE (AVX512F);
b1dca6
   CHECK_CPU_FEATURE_USABLE (AVX512DQ);
b1dca6
+  CHECK_CPU_FEATURE_USABLE (RDSEED);
b1dca6
+  CHECK_CPU_FEATURE_USABLE (ADX);
b1dca6
+  CHECK_CPU_FEATURE_USABLE (SMAP);
b1dca6
   CHECK_CPU_FEATURE_USABLE (AVX512_IFMA);
b1dca6
+  CHECK_CPU_FEATURE_USABLE (CLFLUSHOPT);
b1dca6
+  CHECK_CPU_FEATURE_USABLE (CLWB);
b1dca6
+  CHECK_CPU_FEATURE_USABLE (TRACE);
b1dca6
   CHECK_CPU_FEATURE_USABLE (AVX512PF);
b1dca6
   CHECK_CPU_FEATURE_USABLE (AVX512ER);
b1dca6
   CHECK_CPU_FEATURE_USABLE (AVX512CD);
b1dca6
+  CHECK_CPU_FEATURE_USABLE (SHA);
b1dca6
   CHECK_CPU_FEATURE_USABLE (AVX512BW);
b1dca6
   CHECK_CPU_FEATURE_USABLE (AVX512VL);
b1dca6
+  CHECK_CPU_FEATURE_USABLE (PREFETCHWT1);
b1dca6
   CHECK_CPU_FEATURE_USABLE (AVX512_VBMI);
b1dca6
+  CHECK_CPU_FEATURE_USABLE (UMIP);
b1dca6
   CHECK_CPU_FEATURE_USABLE (PKU);
b1dca6
+  CHECK_CPU_FEATURE_USABLE (OSPKE);
b1dca6
+  CHECK_CPU_FEATURE_USABLE (WAITPKG);
b1dca6
   CHECK_CPU_FEATURE_USABLE (AVX512_VBMI2);
b1dca6
+  CHECK_CPU_FEATURE_USABLE (SHSTK);
b1dca6
+  CHECK_CPU_FEATURE_USABLE (GFNI);
b1dca6
   CHECK_CPU_FEATURE_USABLE (VAES);
b1dca6
   CHECK_CPU_FEATURE_USABLE (VPCLMULQDQ);
b1dca6
   CHECK_CPU_FEATURE_USABLE (AVX512_VNNI);
b1dca6
   CHECK_CPU_FEATURE_USABLE (AVX512_BITALG);
b1dca6
   CHECK_CPU_FEATURE_USABLE (AVX512_VPOPCNTDQ);
b1dca6
+  CHECK_CPU_FEATURE_USABLE (RDPID);
b1dca6
+  CHECK_CPU_FEATURE_USABLE (CLDEMOTE);
b1dca6
+  CHECK_CPU_FEATURE_USABLE (MOVDIRI);
b1dca6
+  CHECK_CPU_FEATURE_USABLE (MOVDIR64B);
b1dca6
+  CHECK_CPU_FEATURE_USABLE (ENQCMD);
b1dca6
+  CHECK_CPU_FEATURE_USABLE (SGX_LC);
b1dca6
+  CHECK_CPU_FEATURE_USABLE (PKS);
b1dca6
   CHECK_CPU_FEATURE_USABLE (AVX512_4VNNIW);
b1dca6
   CHECK_CPU_FEATURE_USABLE (AVX512_4FMAPS);
b1dca6
+  CHECK_CPU_FEATURE_USABLE (FSRM);
b1dca6
   CHECK_CPU_FEATURE_USABLE (AVX512_VP2INTERSECT);
b1dca6
+  CHECK_CPU_FEATURE_USABLE (MD_CLEAR);
b1dca6
+  CHECK_CPU_FEATURE_USABLE (SERIALIZE);
b1dca6
+  CHECK_CPU_FEATURE_USABLE (HYBRID);
b1dca6
+  CHECK_CPU_FEATURE_USABLE (TSXLDTRK);
b1dca6
+  CHECK_CPU_FEATURE_USABLE (PCONFIG);
b1dca6
+  CHECK_CPU_FEATURE_USABLE (IBT);
b1dca6
   CHECK_CPU_FEATURE_USABLE (AMX_BF16);
b1dca6
   CHECK_CPU_FEATURE_USABLE (AMX_TILE);
b1dca6
   CHECK_CPU_FEATURE_USABLE (AMX_INT8);
b1dca6
+  CHECK_CPU_FEATURE_USABLE (IBRS_IBPB);
b1dca6
+  CHECK_CPU_FEATURE_USABLE (STIBP);
b1dca6
+  CHECK_CPU_FEATURE_USABLE (L1D_FLUSH);
b1dca6
+  CHECK_CPU_FEATURE_USABLE (ARCH_CAPABILITIES);
b1dca6
+  CHECK_CPU_FEATURE_USABLE (CORE_CAPABILITIES);
b1dca6
+  CHECK_CPU_FEATURE_USABLE (SSBD);
b1dca6
+  CHECK_CPU_FEATURE_USABLE (LAHF64_SAHF64);
b1dca6
+  CHECK_CPU_FEATURE_USABLE (SVM);
b1dca6
+  CHECK_CPU_FEATURE_USABLE (LZCNT);
b1dca6
+  CHECK_CPU_FEATURE_USABLE (SSE4A);
b1dca6
+  CHECK_CPU_FEATURE_USABLE (PREFETCHW);
b1dca6
   CHECK_CPU_FEATURE_USABLE (XOP);
b1dca6
+  CHECK_CPU_FEATURE_USABLE (LWP);
b1dca6
   CHECK_CPU_FEATURE_USABLE (FMA4);
b1dca6
+  CHECK_CPU_FEATURE_USABLE (TBM);
b1dca6
+  CHECK_CPU_FEATURE_USABLE (SYSCALL_SYSRET);
b1dca6
+  CHECK_CPU_FEATURE_USABLE (NX);
b1dca6
+  CHECK_CPU_FEATURE_USABLE (PAGE1GB);
b1dca6
+  CHECK_CPU_FEATURE_USABLE (RDTSCP);
b1dca6
+  CHECK_CPU_FEATURE_USABLE (LM);
b1dca6
+  CHECK_CPU_FEATURE_USABLE (XSAVEOPT);
b1dca6
   CHECK_CPU_FEATURE_USABLE (XSAVEC);
b1dca6
+  CHECK_CPU_FEATURE_USABLE (XGETBV_ECX_1);
b1dca6
+  CHECK_CPU_FEATURE_USABLE (XSAVES);
b1dca6
+  CHECK_CPU_FEATURE_USABLE (XFD);
b1dca6
+  CHECK_CPU_FEATURE_USABLE (INVARIANT_TSC);
b1dca6
+  CHECK_CPU_FEATURE_USABLE (WBNOINVD);
b1dca6
   CHECK_CPU_FEATURE_USABLE (AVX512_BF16);
b1dca6
 
b1dca6
   return 0;
b1dca6
diff --git a/sysdeps/x86_64/Makefile b/sysdeps/x86_64/Makefile
b1dca6
index e3bb45d78811d70f..42b97c5cc73892cc 100644
b1dca6
--- a/sysdeps/x86_64/Makefile
b1dca6
+++ b/sysdeps/x86_64/Makefile
b1dca6
@@ -57,7 +57,7 @@ modules-names += x86_64/tst-x86_64mod-1
b1dca6
 LDFLAGS-tst-x86_64mod-1.so = -Wl,-soname,tst-x86_64mod-1.so
b1dca6
 ifneq (no,$(have-tunables))
b1dca6
 # Test the state size for XSAVE when XSAVEC is disabled.
b1dca6
-tst-x86_64-1-ENV = GLIBC_TUNABLES=glibc.cpu.hwcaps=-XSAVEC_Usable
b1dca6
+tst-x86_64-1-ENV = GLIBC_TUNABLES=glibc.cpu.hwcaps=-XSAVEC
b1dca6
 endif
b1dca6
 
b1dca6
 $(objpfx)tst-x86_64-1: $(objpfx)x86_64/tst-x86_64mod-1.so
b1dca6
@@ -71,10 +71,10 @@ CFLAGS-tst-platformmod-2.c = -mno-avx
b1dca6
 LDFLAGS-tst-platformmod-2.so = -Wl,-soname,tst-platformmod-2.so
b1dca6
 $(objpfx)tst-platform-1: $(objpfx)tst-platformmod-1.so
b1dca6
 $(objpfx)tst-platform-1.out: $(objpfx)x86_64/tst-platformmod-2.so
b1dca6
-# Turn off AVX512F_Usable and AVX2_Usable so that GLRO(dl_platform) is
b1dca6
+# Turn off AVX512F and AVX2 so that GLRO(dl_platform) is
b1dca6
 # always set to x86_64.
b1dca6
 tst-platform-1-ENV = LD_PRELOAD=$(objpfx)\$$PLATFORM/tst-platformmod-2.so \
b1dca6
-	GLIBC_TUNABLES=glibc.cpu.hwcaps=-AVX512F_Usable,-AVX2_Usable
b1dca6
+	GLIBC_TUNABLES=glibc.cpu.hwcaps=-AVX512F,-AVX2
b1dca6
 endif
b1dca6
 
b1dca6
 tests += tst-audit3 tst-audit4 tst-audit5 tst-audit6 tst-audit7 \
b1dca6
diff --git a/sysdeps/x86_64/dl-machine.h b/sysdeps/x86_64/dl-machine.h
b1dca6
index 23afb3c05dbe17d6..d58298d787ef352c 100644
b1dca6
--- a/sysdeps/x86_64/dl-machine.h
b1dca6
+++ b/sysdeps/x86_64/dl-machine.h
b1dca6
@@ -99,9 +99,9 @@ elf_machine_runtime_setup (struct link_map *l, int lazy, int profile)
b1dca6
 	 end in this function.  */
b1dca6
       if (__glibc_unlikely (profile))
b1dca6
 	{
b1dca6
-	  if (HAS_ARCH_FEATURE (AVX512F_Usable))
b1dca6
+	  if (CPU_FEATURE_USABLE (AVX512F))
b1dca6
 	    *(ElfW(Addr) *) (got + 2) = (ElfW(Addr)) &_dl_runtime_profile_avx512;
b1dca6
-	  else if (HAS_ARCH_FEATURE (AVX_Usable))
b1dca6
+	  else if (CPU_FEATURE_USABLE (AVX))
b1dca6
 	    *(ElfW(Addr) *) (got + 2) = (ElfW(Addr)) &_dl_runtime_profile_avx;
b1dca6
 	  else
b1dca6
 	    *(ElfW(Addr) *) (got + 2) = (ElfW(Addr)) &_dl_runtime_profile_sse;
b1dca6
@@ -119,7 +119,7 @@ elf_machine_runtime_setup (struct link_map *l, int lazy, int profile)
b1dca6
 	     the resolved address.  */
b1dca6
 	  if (GLRO(dl_x86_cpu_features).xsave_state_size != 0)
b1dca6
 	    *(ElfW(Addr) *) (got + 2)
b1dca6
-	      = (HAS_ARCH_FEATURE (XSAVEC_Usable)
b1dca6
+	      = (CPU_FEATURE_USABLE (XSAVEC)
b1dca6
 		 ? (ElfW(Addr)) &_dl_runtime_resolve_xsavec
b1dca6
 		 : (ElfW(Addr)) &_dl_runtime_resolve_xsave);
b1dca6
 	  else
b1dca6
diff --git a/sysdeps/x86_64/fpu/math-tests-arch.h b/sysdeps/x86_64/fpu/math-tests-arch.h
b1dca6
index a5df133292ce39b0..61955d70863321fd 100644
b1dca6
--- a/sysdeps/x86_64/fpu/math-tests-arch.h
b1dca6
+++ b/sysdeps/x86_64/fpu/math-tests-arch.h
b1dca6
@@ -24,7 +24,7 @@
b1dca6
 # define CHECK_ARCH_EXT                                        \
b1dca6
   do                                                           \
b1dca6
     {                                                          \
b1dca6
-      if (!HAS_ARCH_FEATURE (AVX_Usable)) return;              \
b1dca6
+      if (!CPU_FEATURE_USABLE (AVX)) return;                   \
b1dca6
     }                                                          \
b1dca6
   while (0)
b1dca6
 
b1dca6
@@ -34,7 +34,7 @@
b1dca6
 # define CHECK_ARCH_EXT                                        \
b1dca6
   do                                                           \
b1dca6
     {                                                          \
b1dca6
-      if (!HAS_ARCH_FEATURE (AVX2_Usable)) return;             \
b1dca6
+      if (!CPU_FEATURE_USABLE (AVX2)) return;                  \
b1dca6
     }                                                          \
b1dca6
   while (0)
b1dca6
 
b1dca6
@@ -44,7 +44,7 @@
b1dca6
 # define CHECK_ARCH_EXT                                        \
b1dca6
   do                                                           \
b1dca6
     {                                                          \
b1dca6
-      if (!HAS_ARCH_FEATURE (AVX512F_Usable)) return;          \
b1dca6
+      if (!CPU_FEATURE_USABLE (AVX512F)) return;               \
b1dca6
     }                                                          \
b1dca6
   while (0)
b1dca6
 
b1dca6
diff --git a/sysdeps/x86_64/fpu/multiarch/ifunc-avx-fma4.h b/sysdeps/x86_64/fpu/multiarch/ifunc-avx-fma4.h
b1dca6
index a5f9375afc683663..399ed90362f476b7 100644
b1dca6
--- a/sysdeps/x86_64/fpu/multiarch/ifunc-avx-fma4.h
b1dca6
+++ b/sysdeps/x86_64/fpu/multiarch/ifunc-avx-fma4.h
b1dca6
@@ -29,14 +29,14 @@ IFUNC_SELECTOR (void)
b1dca6
 {
b1dca6
   const struct cpu_features* cpu_features = __get_cpu_features ();
b1dca6
 
b1dca6
-  if (CPU_FEATURES_ARCH_P (cpu_features, FMA_Usable)
b1dca6
-      && CPU_FEATURES_ARCH_P (cpu_features, AVX2_Usable))
b1dca6
+  if (CPU_FEATURE_USABLE_P (cpu_features, FMA)
b1dca6
+      && CPU_FEATURE_USABLE_P (cpu_features, AVX2))
b1dca6
     return OPTIMIZE (fma);
b1dca6
 
b1dca6
-  if (CPU_FEATURES_ARCH_P (cpu_features, FMA4_Usable))
b1dca6
+  if (CPU_FEATURE_USABLE_P (cpu_features, FMA4))
b1dca6
     return OPTIMIZE (fma4);
b1dca6
 
b1dca6
-  if (CPU_FEATURES_ARCH_P (cpu_features, AVX_Usable))
b1dca6
+  if (CPU_FEATURE_USABLE_P (cpu_features, AVX))
b1dca6
     return OPTIMIZE (avx);
b1dca6
 
b1dca6
   return OPTIMIZE (sse2);
b1dca6
diff --git a/sysdeps/x86_64/fpu/multiarch/ifunc-fma.h b/sysdeps/x86_64/fpu/multiarch/ifunc-fma.h
b1dca6
index 63a8cd221fb34e28..c6717d65dfd160e7 100644
b1dca6
--- a/sysdeps/x86_64/fpu/multiarch/ifunc-fma.h
b1dca6
+++ b/sysdeps/x86_64/fpu/multiarch/ifunc-fma.h
b1dca6
@@ -26,8 +26,8 @@ IFUNC_SELECTOR (void)
b1dca6
 {
b1dca6
   const struct cpu_features* cpu_features = __get_cpu_features ();
b1dca6
 
b1dca6
-  if (CPU_FEATURES_ARCH_P (cpu_features, FMA_Usable)
b1dca6
-      && CPU_FEATURES_ARCH_P (cpu_features, AVX2_Usable))
b1dca6
+  if (CPU_FEATURE_USABLE_P (cpu_features, FMA)
b1dca6
+      && CPU_FEATURE_USABLE_P (cpu_features, AVX2))
b1dca6
     return OPTIMIZE (fma);
b1dca6
 
b1dca6
   return OPTIMIZE (sse2);
b1dca6
diff --git a/sysdeps/x86_64/fpu/multiarch/ifunc-fma4.h b/sysdeps/x86_64/fpu/multiarch/ifunc-fma4.h
b1dca6
index a2526a2ee0e55e18..76c677198dac5cb0 100644
b1dca6
--- a/sysdeps/x86_64/fpu/multiarch/ifunc-fma4.h
b1dca6
+++ b/sysdeps/x86_64/fpu/multiarch/ifunc-fma4.h
b1dca6
@@ -28,11 +28,11 @@ IFUNC_SELECTOR (void)
b1dca6
 {
b1dca6
   const struct cpu_features* cpu_features = __get_cpu_features ();
b1dca6
 
b1dca6
-  if (CPU_FEATURES_ARCH_P (cpu_features, FMA_Usable)
b1dca6
-      && CPU_FEATURES_ARCH_P (cpu_features, AVX2_Usable))
b1dca6
+  if (CPU_FEATURE_USABLE_P (cpu_features, FMA)
b1dca6
+      && CPU_FEATURE_USABLE_P (cpu_features, AVX2))
b1dca6
     return OPTIMIZE (fma);
b1dca6
 
b1dca6
-  if (CPU_FEATURES_ARCH_P (cpu_features, FMA4_Usable))
b1dca6
+  if (CPU_FEATURE_USABLE_P (cpu_features, FMA))
b1dca6
     return OPTIMIZE (fma4);
b1dca6
 
b1dca6
   return OPTIMIZE (sse2);
b1dca6
diff --git a/sysdeps/x86_64/fpu/multiarch/ifunc-mathvec-avx2.h b/sysdeps/x86_64/fpu/multiarch/ifunc-mathvec-avx2.h
b1dca6
index bd2d32e4186c11e3..d84d82a3a22f0e86 100644
b1dca6
--- a/sysdeps/x86_64/fpu/multiarch/ifunc-mathvec-avx2.h
b1dca6
+++ b/sysdeps/x86_64/fpu/multiarch/ifunc-mathvec-avx2.h
b1dca6
@@ -31,8 +31,8 @@ IFUNC_SELECTOR (void)
b1dca6
 {
b1dca6
   const struct cpu_features* cpu_features = __get_cpu_features ();
b1dca6
 
b1dca6
-  if (CPU_FEATURES_ARCH_P (cpu_features, FMA_Usable)
b1dca6
-      && CPU_FEATURES_ARCH_P (cpu_features, AVX2_Usable))
b1dca6
+  if (CPU_FEATURE_USABLE_P (cpu_features, FMA)
b1dca6
+      && CPU_FEATURE_USABLE_P (cpu_features, AVX2))
b1dca6
     return OPTIMIZE (avx2);
b1dca6
 
b1dca6
   return OPTIMIZE (sse_wrapper);
b1dca6
diff --git a/sysdeps/x86_64/fpu/multiarch/ifunc-mathvec-avx512.h b/sysdeps/x86_64/fpu/multiarch/ifunc-mathvec-avx512.h
b1dca6
index 174e462cfbcfa0a5..a2d9972e5a02b87c 100644
b1dca6
--- a/sysdeps/x86_64/fpu/multiarch/ifunc-mathvec-avx512.h
b1dca6
+++ b/sysdeps/x86_64/fpu/multiarch/ifunc-mathvec-avx512.h
b1dca6
@@ -34,10 +34,10 @@ IFUNC_SELECTOR (void)
b1dca6
 
b1dca6
   if (!CPU_FEATURES_ARCH_P (cpu_features, MathVec_Prefer_No_AVX512))
b1dca6
     {
b1dca6
-      if (CPU_FEATURES_ARCH_P (cpu_features, AVX512DQ_Usable))
b1dca6
+      if (CPU_FEATURE_USABLE_P (cpu_features, AVX512DQ))
b1dca6
 	return OPTIMIZE (skx);
b1dca6
 
b1dca6
-      if (CPU_FEATURES_ARCH_P (cpu_features, AVX512F_Usable))
b1dca6
+      if (CPU_FEATURE_USABLE_P (cpu_features, AVX512F))
b1dca6
 	return OPTIMIZE (knl);
b1dca6
     }
b1dca6
 
b1dca6
diff --git a/sysdeps/x86_64/fpu/multiarch/ifunc-mathvec-sse4_1.h b/sysdeps/x86_64/fpu/multiarch/ifunc-mathvec-sse4_1.h
b1dca6
index c1e70ebfc1b424e6..64d03f6cb1caa9b7 100644
b1dca6
--- a/sysdeps/x86_64/fpu/multiarch/ifunc-mathvec-sse4_1.h
b1dca6
+++ b/sysdeps/x86_64/fpu/multiarch/ifunc-mathvec-sse4_1.h
b1dca6
@@ -31,7 +31,7 @@ IFUNC_SELECTOR (void)
b1dca6
 {
b1dca6
   const struct cpu_features* cpu_features = __get_cpu_features ();
b1dca6
 
b1dca6
-  if (CPU_FEATURES_CPU_P (cpu_features, SSE4_1))
b1dca6
+  if (CPU_FEATURE_USABLE_P (cpu_features, SSE4_1))
b1dca6
     return OPTIMIZE (sse4);
b1dca6
 
b1dca6
   return OPTIMIZE (sse2);
b1dca6
diff --git a/sysdeps/x86_64/fpu/multiarch/ifunc-sse4_1.h b/sysdeps/x86_64/fpu/multiarch/ifunc-sse4_1.h
b1dca6
index a8710ba80226f13f..81bca1c9ecde9fb7 100644
b1dca6
--- a/sysdeps/x86_64/fpu/multiarch/ifunc-sse4_1.h
b1dca6
+++ b/sysdeps/x86_64/fpu/multiarch/ifunc-sse4_1.h
b1dca6
@@ -26,7 +26,7 @@ IFUNC_SELECTOR (void)
b1dca6
 {
b1dca6
   const struct cpu_features* cpu_features = __get_cpu_features ();
b1dca6
 
b1dca6
-  if (CPU_FEATURES_CPU_P (cpu_features, SSE4_1))
b1dca6
+  if (CPU_FEATURE_USABLE_P (cpu_features, SSE4_1))
b1dca6
     return OPTIMIZE (sse41);
b1dca6
 
b1dca6
   return OPTIMIZE (c);
b1dca6
diff --git a/sysdeps/x86_64/fpu/multiarch/s_fma.c b/sysdeps/x86_64/fpu/multiarch/s_fma.c
b1dca6
index 875c76d3727e6d3c..9ea8d368d08220a9 100644
b1dca6
--- a/sysdeps/x86_64/fpu/multiarch/s_fma.c
b1dca6
+++ b/sysdeps/x86_64/fpu/multiarch/s_fma.c
b1dca6
@@ -41,8 +41,8 @@ __fma_fma4 (double x, double y, double z)
b1dca6
 }
b1dca6
 
b1dca6
 
b1dca6
-libm_ifunc (__fma, HAS_ARCH_FEATURE (FMA_Usable)
b1dca6
-	    ? __fma_fma3 : (HAS_ARCH_FEATURE (FMA4_Usable)
b1dca6
+libm_ifunc (__fma, CPU_FEATURE_USABLE (FMA)
b1dca6
+	    ? __fma_fma3 : (CPU_FEATURE_USABLE (FMA4)
b1dca6
 			    ? __fma_fma4 : __fma_sse2));
b1dca6
 libm_alias_double (__fma, fma)
b1dca6
 
b1dca6
diff --git a/sysdeps/x86_64/fpu/multiarch/s_fmaf.c b/sysdeps/x86_64/fpu/multiarch/s_fmaf.c
b1dca6
index 5f4c2ec0be15c2dc..33e64ef8d1a03269 100644
b1dca6
--- a/sysdeps/x86_64/fpu/multiarch/s_fmaf.c
b1dca6
+++ b/sysdeps/x86_64/fpu/multiarch/s_fmaf.c
b1dca6
@@ -40,8 +40,8 @@ __fmaf_fma4 (float x, float y, float z)
b1dca6
 }
b1dca6
 
b1dca6
 
b1dca6
-libm_ifunc (__fmaf, HAS_ARCH_FEATURE (FMA_Usable)
b1dca6
-	    ? __fmaf_fma3 : (HAS_ARCH_FEATURE (FMA4_Usable)
b1dca6
+libm_ifunc (__fmaf, CPU_FEATURE_USABLE (FMA)
b1dca6
+	    ? __fmaf_fma3 : (CPU_FEATURE_USABLE (FMA4)
b1dca6
 			     ? __fmaf_fma4 : __fmaf_sse2));
b1dca6
 libm_alias_float (__fma, fma)
b1dca6
 
b1dca6
diff --git a/sysdeps/x86_64/multiarch/ifunc-avx2.h b/sysdeps/x86_64/multiarch/ifunc-avx2.h
b1dca6
index 9cab837642b7af21..5c88640a2d901ec6 100644
b1dca6
--- a/sysdeps/x86_64/multiarch/ifunc-avx2.h
b1dca6
+++ b/sysdeps/x86_64/multiarch/ifunc-avx2.h
b1dca6
@@ -28,7 +28,7 @@ IFUNC_SELECTOR (void)
b1dca6
   const struct cpu_features* cpu_features = __get_cpu_features ();
b1dca6
 
b1dca6
   if (!CPU_FEATURES_ARCH_P (cpu_features, Prefer_No_VZEROUPPER)
b1dca6
-      && CPU_FEATURES_ARCH_P (cpu_features, AVX2_Usable)
b1dca6
+      && CPU_FEATURE_USABLE_P (cpu_features, AVX2)
b1dca6
       && CPU_FEATURES_ARCH_P (cpu_features, AVX_Fast_Unaligned_Load))
b1dca6
     return OPTIMIZE (avx2);
b1dca6
 
b1dca6
diff --git a/sysdeps/x86_64/multiarch/ifunc-impl-list.c b/sysdeps/x86_64/multiarch/ifunc-impl-list.c
b1dca6
index 8b55bb6954000cc2..fe13505ca1ac7ef0 100644
b1dca6
--- a/sysdeps/x86_64/multiarch/ifunc-impl-list.c
b1dca6
+++ b/sysdeps/x86_64/multiarch/ifunc-impl-list.c
b1dca6
@@ -41,19 +41,19 @@ __libc_ifunc_impl_list (const char *name, struct libc_ifunc_impl *array,
b1dca6
   /* Support sysdeps/x86_64/multiarch/memchr.c.  */
b1dca6
   IFUNC_IMPL (i, name, memchr,
b1dca6
 	      IFUNC_IMPL_ADD (array, i, memchr,
b1dca6
-			      HAS_ARCH_FEATURE (AVX2_Usable),
b1dca6
+			      CPU_FEATURE_USABLE (AVX2),
b1dca6
 			      __memchr_avx2)
b1dca6
 	      IFUNC_IMPL_ADD (array, i, memchr, 1, __memchr_sse2))
b1dca6
 
b1dca6
   /* Support sysdeps/x86_64/multiarch/memcmp.c.  */
b1dca6
   IFUNC_IMPL (i, name, memcmp,
b1dca6
 	      IFUNC_IMPL_ADD (array, i, memcmp,
b1dca6
-			      (HAS_ARCH_FEATURE (AVX2_Usable)
b1dca6
-			       && HAS_CPU_FEATURE (MOVBE)),
b1dca6
+			      (CPU_FEATURE_USABLE (AVX2)
b1dca6
+			       && CPU_FEATURE_USABLE (MOVBE)),
b1dca6
 			      __memcmp_avx2_movbe)
b1dca6
-	      IFUNC_IMPL_ADD (array, i, memcmp, HAS_CPU_FEATURE (SSE4_1),
b1dca6
+	      IFUNC_IMPL_ADD (array, i, memcmp, CPU_FEATURE_USABLE (SSE4_1),
b1dca6
 			      __memcmp_sse4_1)
b1dca6
-	      IFUNC_IMPL_ADD (array, i, memcmp, HAS_CPU_FEATURE (SSSE3),
b1dca6
+	      IFUNC_IMPL_ADD (array, i, memcmp, CPU_FEATURE_USABLE (SSSE3),
b1dca6
 			      __memcmp_ssse3)
b1dca6
 	      IFUNC_IMPL_ADD (array, i, memcmp, 1, __memcmp_sse2))
b1dca6
 
b1dca6
@@ -61,25 +61,25 @@ __libc_ifunc_impl_list (const char *name, struct libc_ifunc_impl *array,
b1dca6
   /* Support sysdeps/x86_64/multiarch/memmove_chk.c.  */
b1dca6
   IFUNC_IMPL (i, name, __memmove_chk,
b1dca6
 	      IFUNC_IMPL_ADD (array, i, __memmove_chk,
b1dca6
-			      HAS_ARCH_FEATURE (AVX512F_Usable),
b1dca6
+			      CPU_FEATURE_USABLE (AVX512F),
b1dca6
 			      __memmove_chk_avx512_no_vzeroupper)
b1dca6
 	      IFUNC_IMPL_ADD (array, i, __memmove_chk,
b1dca6
-			      HAS_ARCH_FEATURE (AVX512F_Usable),
b1dca6
+			      CPU_FEATURE_USABLE (AVX512F),
b1dca6
 			      __memmove_chk_avx512_unaligned)
b1dca6
 	      IFUNC_IMPL_ADD (array, i, __memmove_chk,
b1dca6
-			      HAS_ARCH_FEATURE (AVX512F_Usable),
b1dca6
+			      CPU_FEATURE_USABLE (AVX512F),
b1dca6
 			      __memmove_chk_avx512_unaligned_erms)
b1dca6
 	      IFUNC_IMPL_ADD (array, i, __memmove_chk,
b1dca6
-			      HAS_ARCH_FEATURE (AVX_Usable),
b1dca6
+			      CPU_FEATURE_USABLE (AVX),
b1dca6
 			      __memmove_chk_avx_unaligned)
b1dca6
 	      IFUNC_IMPL_ADD (array, i, __memmove_chk,
b1dca6
-			      HAS_ARCH_FEATURE (AVX_Usable),
b1dca6
+			      CPU_FEATURE_USABLE (AVX),
b1dca6
 			      __memmove_chk_avx_unaligned_erms)
b1dca6
 	      IFUNC_IMPL_ADD (array, i, __memmove_chk,
b1dca6
-			      HAS_CPU_FEATURE (SSSE3),
b1dca6
+			      CPU_FEATURE_USABLE (SSSE3),
b1dca6
 			      __memmove_chk_ssse3_back)
b1dca6
 	      IFUNC_IMPL_ADD (array, i, __memmove_chk,
b1dca6
-			      HAS_CPU_FEATURE (SSSE3),
b1dca6
+			      CPU_FEATURE_USABLE (SSSE3),
b1dca6
 			      __memmove_chk_ssse3)
b1dca6
 	      IFUNC_IMPL_ADD (array, i, __memmove_chk, 1,
b1dca6
 			      __memmove_chk_sse2_unaligned)
b1dca6
@@ -92,23 +92,23 @@ __libc_ifunc_impl_list (const char *name, struct libc_ifunc_impl *array,
b1dca6
   /* Support sysdeps/x86_64/multiarch/memmove.c.  */
b1dca6
   IFUNC_IMPL (i, name, memmove,
b1dca6
 	      IFUNC_IMPL_ADD (array, i, memmove,
b1dca6
-			      HAS_ARCH_FEATURE (AVX_Usable),
b1dca6
+			      CPU_FEATURE_USABLE (AVX),
b1dca6
 			      __memmove_avx_unaligned)
b1dca6
 	      IFUNC_IMPL_ADD (array, i, memmove,
b1dca6
-			      HAS_ARCH_FEATURE (AVX_Usable),
b1dca6
+			      CPU_FEATURE_USABLE (AVX),
b1dca6
 			      __memmove_avx_unaligned_erms)
b1dca6
 	      IFUNC_IMPL_ADD (array, i, memmove,
b1dca6
-			      HAS_ARCH_FEATURE (AVX512F_Usable),
b1dca6
+			      CPU_FEATURE_USABLE (AVX512F),
b1dca6
 			      __memmove_avx512_no_vzeroupper)
b1dca6
 	      IFUNC_IMPL_ADD (array, i, memmove,
b1dca6
-			      HAS_ARCH_FEATURE (AVX512F_Usable),
b1dca6
+			      CPU_FEATURE_USABLE (AVX512F),
b1dca6
 			      __memmove_avx512_unaligned)
b1dca6
 	      IFUNC_IMPL_ADD (array, i, memmove,
b1dca6
-			      HAS_ARCH_FEATURE (AVX512F_Usable),
b1dca6
+			      CPU_FEATURE_USABLE (AVX512F),
b1dca6
 			      __memmove_avx512_unaligned_erms)
b1dca6
-	      IFUNC_IMPL_ADD (array, i, memmove, HAS_CPU_FEATURE (SSSE3),
b1dca6
+	      IFUNC_IMPL_ADD (array, i, memmove, CPU_FEATURE_USABLE (SSSE3),
b1dca6
 			      __memmove_ssse3_back)
b1dca6
-	      IFUNC_IMPL_ADD (array, i, memmove, HAS_CPU_FEATURE (SSSE3),
b1dca6
+	      IFUNC_IMPL_ADD (array, i, memmove, CPU_FEATURE_USABLE (SSSE3),
b1dca6
 			      __memmove_ssse3)
b1dca6
 	      IFUNC_IMPL_ADD (array, i, memmove, 1, __memmove_erms)
b1dca6
 	      IFUNC_IMPL_ADD (array, i, memmove, 1,
b1dca6
@@ -119,7 +119,7 @@ __libc_ifunc_impl_list (const char *name, struct libc_ifunc_impl *array,
b1dca6
   /* Support sysdeps/x86_64/multiarch/memrchr.c.  */
b1dca6
   IFUNC_IMPL (i, name, memrchr,
b1dca6
 	      IFUNC_IMPL_ADD (array, i, memrchr,
b1dca6
-			      HAS_ARCH_FEATURE (AVX2_Usable),
b1dca6
+			      CPU_FEATURE_USABLE (AVX2),
b1dca6
 			      __memrchr_avx2)
b1dca6
 	      IFUNC_IMPL_ADD (array, i, memrchr, 1, __memrchr_sse2))
b1dca6
 
b1dca6
@@ -133,19 +133,19 @@ __libc_ifunc_impl_list (const char *name, struct libc_ifunc_impl *array,
b1dca6
 	      IFUNC_IMPL_ADD (array, i, __memset_chk, 1,
b1dca6
 			      __memset_chk_sse2_unaligned_erms)
b1dca6
 	      IFUNC_IMPL_ADD (array, i, __memset_chk,
b1dca6
-			      HAS_ARCH_FEATURE (AVX2_Usable),
b1dca6
+			      CPU_FEATURE_USABLE (AVX2),
b1dca6
 			      __memset_chk_avx2_unaligned)
b1dca6
 	      IFUNC_IMPL_ADD (array, i, __memset_chk,
b1dca6
-			      HAS_ARCH_FEATURE (AVX2_Usable),
b1dca6
+			      CPU_FEATURE_USABLE (AVX2),
b1dca6
 			      __memset_chk_avx2_unaligned_erms)
b1dca6
 	      IFUNC_IMPL_ADD (array, i, __memset_chk,
b1dca6
-			      HAS_ARCH_FEATURE (AVX512F_Usable),
b1dca6
+			      CPU_FEATURE_USABLE (AVX512F),
b1dca6
 			      __memset_chk_avx512_unaligned_erms)
b1dca6
 	      IFUNC_IMPL_ADD (array, i, __memset_chk,
b1dca6
-			      HAS_ARCH_FEATURE (AVX512F_Usable),
b1dca6
+			      CPU_FEATURE_USABLE (AVX512F),
b1dca6
 			      __memset_chk_avx512_unaligned)
b1dca6
 	      IFUNC_IMPL_ADD (array, i, __memset_chk,
b1dca6
-			      HAS_ARCH_FEATURE (AVX512F_Usable),
b1dca6
+			      CPU_FEATURE_USABLE (AVX512F),
b1dca6
 			      __memset_chk_avx512_no_vzeroupper)
b1dca6
 	      )
b1dca6
 #endif
b1dca6
@@ -158,48 +158,48 @@ __libc_ifunc_impl_list (const char *name, struct libc_ifunc_impl *array,
b1dca6
 			      __memset_sse2_unaligned_erms)
b1dca6
 	      IFUNC_IMPL_ADD (array, i, memset, 1, __memset_erms)
b1dca6
 	      IFUNC_IMPL_ADD (array, i, memset,
b1dca6
-			      HAS_ARCH_FEATURE (AVX2_Usable),
b1dca6
+			      CPU_FEATURE_USABLE (AVX2),
b1dca6
 			      __memset_avx2_unaligned)
b1dca6
 	      IFUNC_IMPL_ADD (array, i, memset,
b1dca6
-			      HAS_ARCH_FEATURE (AVX2_Usable),
b1dca6
+			      CPU_FEATURE_USABLE (AVX2),
b1dca6
 			      __memset_avx2_unaligned_erms)
b1dca6
 	      IFUNC_IMPL_ADD (array, i, memset,
b1dca6
-			      HAS_ARCH_FEATURE (AVX512F_Usable),
b1dca6
+			      CPU_FEATURE_USABLE (AVX512F),
b1dca6
 			      __memset_avx512_unaligned_erms)
b1dca6
 	      IFUNC_IMPL_ADD (array, i, memset,
b1dca6
-			      HAS_ARCH_FEATURE (AVX512F_Usable),
b1dca6
+			      CPU_FEATURE_USABLE (AVX512F),
b1dca6
 			      __memset_avx512_unaligned)
b1dca6
 	      IFUNC_IMPL_ADD (array, i, memset,
b1dca6
-			      HAS_ARCH_FEATURE (AVX512F_Usable),
b1dca6
+			      CPU_FEATURE_USABLE (AVX512F),
b1dca6
 			      __memset_avx512_no_vzeroupper)
b1dca6
 	     )
b1dca6
 
b1dca6
   /* Support sysdeps/x86_64/multiarch/rawmemchr.c.  */
b1dca6
   IFUNC_IMPL (i, name, rawmemchr,
b1dca6
 	      IFUNC_IMPL_ADD (array, i, rawmemchr,
b1dca6
-			      HAS_ARCH_FEATURE (AVX2_Usable),
b1dca6
+			      CPU_FEATURE_USABLE (AVX2),
b1dca6
 			      __rawmemchr_avx2)
b1dca6
 	      IFUNC_IMPL_ADD (array, i, rawmemchr, 1, __rawmemchr_sse2))
b1dca6
 
b1dca6
   /* Support sysdeps/x86_64/multiarch/strlen.c.  */
b1dca6
   IFUNC_IMPL (i, name, strlen,
b1dca6
 	      IFUNC_IMPL_ADD (array, i, strlen,
b1dca6
-			      HAS_ARCH_FEATURE (AVX2_Usable),
b1dca6
+			      CPU_FEATURE_USABLE (AVX2),
b1dca6
 			      __strlen_avx2)
b1dca6
 	      IFUNC_IMPL_ADD (array, i, strlen, 1, __strlen_sse2))
b1dca6
 
b1dca6
   /* Support sysdeps/x86_64/multiarch/strnlen.c.  */
b1dca6
   IFUNC_IMPL (i, name, strnlen,
b1dca6
 	      IFUNC_IMPL_ADD (array, i, strnlen,
b1dca6
-			      HAS_ARCH_FEATURE (AVX2_Usable),
b1dca6
+			      CPU_FEATURE_USABLE (AVX2),
b1dca6
 			      __strnlen_avx2)
b1dca6
 	      IFUNC_IMPL_ADD (array, i, strnlen, 1, __strnlen_sse2))
b1dca6
 
b1dca6
   /* Support sysdeps/x86_64/multiarch/stpncpy.c.  */
b1dca6
   IFUNC_IMPL (i, name, stpncpy,
b1dca6
-	      IFUNC_IMPL_ADD (array, i, stpncpy, HAS_CPU_FEATURE (SSSE3),
b1dca6
+	      IFUNC_IMPL_ADD (array, i, stpncpy, CPU_FEATURE_USABLE (SSSE3),
b1dca6
 			      __stpncpy_ssse3)
b1dca6
-	      IFUNC_IMPL_ADD (array, i, stpncpy, HAS_ARCH_FEATURE (AVX2_Usable),
b1dca6
+	      IFUNC_IMPL_ADD (array, i, stpncpy, CPU_FEATURE_USABLE (AVX2),
b1dca6
 			      __stpncpy_avx2)
b1dca6
 	      IFUNC_IMPL_ADD (array, i, stpncpy, 1,
b1dca6
 			      __stpncpy_sse2_unaligned)
b1dca6
@@ -207,9 +207,9 @@ __libc_ifunc_impl_list (const char *name, struct libc_ifunc_impl *array,
b1dca6
 
b1dca6
   /* Support sysdeps/x86_64/multiarch/stpcpy.c.  */
b1dca6
   IFUNC_IMPL (i, name, stpcpy,
b1dca6
-	      IFUNC_IMPL_ADD (array, i, stpcpy, HAS_CPU_FEATURE (SSSE3),
b1dca6
+	      IFUNC_IMPL_ADD (array, i, stpcpy, CPU_FEATURE_USABLE (SSSE3),
b1dca6
 			      __stpcpy_ssse3)
b1dca6
-	      IFUNC_IMPL_ADD (array, i, stpcpy, HAS_ARCH_FEATURE (AVX2_Usable),
b1dca6
+	      IFUNC_IMPL_ADD (array, i, stpcpy, CPU_FEATURE_USABLE (AVX2),
b1dca6
 			      __stpcpy_avx2)
b1dca6
 	      IFUNC_IMPL_ADD (array, i, stpcpy, 1, __stpcpy_sse2_unaligned)
b1dca6
 	      IFUNC_IMPL_ADD (array, i, stpcpy, 1, __stpcpy_sse2))
b1dca6
@@ -217,35 +217,35 @@ __libc_ifunc_impl_list (const char *name, struct libc_ifunc_impl *array,
b1dca6
   /* Support sysdeps/x86_64/multiarch/strcasecmp_l.c.  */
b1dca6
   IFUNC_IMPL (i, name, strcasecmp,
b1dca6
 	      IFUNC_IMPL_ADD (array, i, strcasecmp,
b1dca6
-			      HAS_ARCH_FEATURE (AVX_Usable),
b1dca6
+			      CPU_FEATURE_USABLE (AVX),
b1dca6
 			      __strcasecmp_avx)
b1dca6
 	      IFUNC_IMPL_ADD (array, i, strcasecmp,
b1dca6
-			      HAS_CPU_FEATURE (SSE4_2),
b1dca6
+			      CPU_FEATURE_USABLE (SSE4_2),
b1dca6
 			      __strcasecmp_sse42)
b1dca6
 	      IFUNC_IMPL_ADD (array, i, strcasecmp,
b1dca6
-			      HAS_CPU_FEATURE (SSSE3),
b1dca6
+			      CPU_FEATURE_USABLE (SSSE3),
b1dca6
 			      __strcasecmp_ssse3)
b1dca6
 	      IFUNC_IMPL_ADD (array, i, strcasecmp, 1, __strcasecmp_sse2))
b1dca6
 
b1dca6
   /* Support sysdeps/x86_64/multiarch/strcasecmp_l.c.  */
b1dca6
   IFUNC_IMPL (i, name, strcasecmp_l,
b1dca6
 	      IFUNC_IMPL_ADD (array, i, strcasecmp_l,
b1dca6
-			      HAS_ARCH_FEATURE (AVX_Usable),
b1dca6
+			      CPU_FEATURE_USABLE (AVX),
b1dca6
 			      __strcasecmp_l_avx)
b1dca6
 	      IFUNC_IMPL_ADD (array, i, strcasecmp_l,
b1dca6
-			      HAS_CPU_FEATURE (SSE4_2),
b1dca6
+			      CPU_FEATURE_USABLE (SSE4_2),
b1dca6
 			      __strcasecmp_l_sse42)
b1dca6
 	      IFUNC_IMPL_ADD (array, i, strcasecmp_l,
b1dca6
-			      HAS_CPU_FEATURE (SSSE3),
b1dca6
+			      CPU_FEATURE_USABLE (SSSE3),
b1dca6
 			      __strcasecmp_l_ssse3)
b1dca6
 	      IFUNC_IMPL_ADD (array, i, strcasecmp_l, 1,
b1dca6
 			      __strcasecmp_l_sse2))
b1dca6
 
b1dca6
   /* Support sysdeps/x86_64/multiarch/strcat.c.  */
b1dca6
   IFUNC_IMPL (i, name, strcat,
b1dca6
-	      IFUNC_IMPL_ADD (array, i, strcat, HAS_ARCH_FEATURE (AVX2_Usable),
b1dca6
+	      IFUNC_IMPL_ADD (array, i, strcat, CPU_FEATURE_USABLE (AVX2),
b1dca6
 			      __strcat_avx2)
b1dca6
-	      IFUNC_IMPL_ADD (array, i, strcat, HAS_CPU_FEATURE (SSSE3),
b1dca6
+	      IFUNC_IMPL_ADD (array, i, strcat, CPU_FEATURE_USABLE (SSSE3),
b1dca6
 			      __strcat_ssse3)
b1dca6
 	      IFUNC_IMPL_ADD (array, i, strcat, 1, __strcat_sse2_unaligned)
b1dca6
 	      IFUNC_IMPL_ADD (array, i, strcat, 1, __strcat_sse2))
b1dca6
@@ -253,7 +253,7 @@ __libc_ifunc_impl_list (const char *name, struct libc_ifunc_impl *array,
b1dca6
   /* Support sysdeps/x86_64/multiarch/strchr.c.  */
b1dca6
   IFUNC_IMPL (i, name, strchr,
b1dca6
 	      IFUNC_IMPL_ADD (array, i, strchr,
b1dca6
-			      HAS_ARCH_FEATURE (AVX2_Usable),
b1dca6
+			      CPU_FEATURE_USABLE (AVX2),
b1dca6
 			      __strchr_avx2)
b1dca6
 	      IFUNC_IMPL_ADD (array, i, strchr, 1, __strchr_sse2_no_bsf)
b1dca6
 	      IFUNC_IMPL_ADD (array, i, strchr, 1, __strchr_sse2))
b1dca6
@@ -261,54 +261,54 @@ __libc_ifunc_impl_list (const char *name, struct libc_ifunc_impl *array,
b1dca6
   /* Support sysdeps/x86_64/multiarch/strchrnul.c.  */
b1dca6
   IFUNC_IMPL (i, name, strchrnul,
b1dca6
 	      IFUNC_IMPL_ADD (array, i, strchrnul,
b1dca6
-			      HAS_ARCH_FEATURE (AVX2_Usable),
b1dca6
+			      CPU_FEATURE_USABLE (AVX2),
b1dca6
 			      __strchrnul_avx2)
b1dca6
 	      IFUNC_IMPL_ADD (array, i, strchrnul, 1, __strchrnul_sse2))
b1dca6
 
b1dca6
   /* Support sysdeps/x86_64/multiarch/strrchr.c.  */
b1dca6
   IFUNC_IMPL (i, name, strrchr,
b1dca6
 	      IFUNC_IMPL_ADD (array, i, strrchr,
b1dca6
-			      HAS_ARCH_FEATURE (AVX2_Usable),
b1dca6
+			      CPU_FEATURE_USABLE (AVX2),
b1dca6
 			      __strrchr_avx2)
b1dca6
 	      IFUNC_IMPL_ADD (array, i, strrchr, 1, __strrchr_sse2))
b1dca6
 
b1dca6
   /* Support sysdeps/x86_64/multiarch/strcmp.c.  */
b1dca6
   IFUNC_IMPL (i, name, strcmp,
b1dca6
 	      IFUNC_IMPL_ADD (array, i, strcmp,
b1dca6
-			      HAS_ARCH_FEATURE (AVX2_Usable),
b1dca6
+			      CPU_FEATURE_USABLE (AVX2),
b1dca6
 			      __strcmp_avx2)
b1dca6
-	      IFUNC_IMPL_ADD (array, i, strcmp, HAS_CPU_FEATURE (SSE4_2),
b1dca6
+	      IFUNC_IMPL_ADD (array, i, strcmp, CPU_FEATURE_USABLE (SSE4_2),
b1dca6
 			      __strcmp_sse42)
b1dca6
-	      IFUNC_IMPL_ADD (array, i, strcmp, HAS_CPU_FEATURE (SSSE3),
b1dca6
+	      IFUNC_IMPL_ADD (array, i, strcmp, CPU_FEATURE_USABLE (SSSE3),
b1dca6
 			      __strcmp_ssse3)
b1dca6
 	      IFUNC_IMPL_ADD (array, i, strcmp, 1, __strcmp_sse2_unaligned)
b1dca6
 	      IFUNC_IMPL_ADD (array, i, strcmp, 1, __strcmp_sse2))
b1dca6
 
b1dca6
   /* Support sysdeps/x86_64/multiarch/strcpy.c.  */
b1dca6
   IFUNC_IMPL (i, name, strcpy,
b1dca6
-	      IFUNC_IMPL_ADD (array, i, strcpy, HAS_ARCH_FEATURE (AVX2_Usable),
b1dca6
+	      IFUNC_IMPL_ADD (array, i, strcpy, CPU_FEATURE_USABLE (AVX2),
b1dca6
 			      __strcpy_avx2)
b1dca6
-	      IFUNC_IMPL_ADD (array, i, strcpy, HAS_CPU_FEATURE (SSSE3),
b1dca6
+	      IFUNC_IMPL_ADD (array, i, strcpy, CPU_FEATURE_USABLE (SSSE3),
b1dca6
 			      __strcpy_ssse3)
b1dca6
 	      IFUNC_IMPL_ADD (array, i, strcpy, 1, __strcpy_sse2_unaligned)
b1dca6
 	      IFUNC_IMPL_ADD (array, i, strcpy, 1, __strcpy_sse2))
b1dca6
 
b1dca6
   /* Support sysdeps/x86_64/multiarch/strcspn.c.  */
b1dca6
   IFUNC_IMPL (i, name, strcspn,
b1dca6
-	      IFUNC_IMPL_ADD (array, i, strcspn, HAS_CPU_FEATURE (SSE4_2),
b1dca6
+	      IFUNC_IMPL_ADD (array, i, strcspn, CPU_FEATURE_USABLE (SSE4_2),
b1dca6
 			      __strcspn_sse42)
b1dca6
 	      IFUNC_IMPL_ADD (array, i, strcspn, 1, __strcspn_sse2))
b1dca6
 
b1dca6
   /* Support sysdeps/x86_64/multiarch/strncase_l.c.  */
b1dca6
   IFUNC_IMPL (i, name, strncasecmp,
b1dca6
 	      IFUNC_IMPL_ADD (array, i, strncasecmp,
b1dca6
-			      HAS_ARCH_FEATURE (AVX_Usable),
b1dca6
+			      CPU_FEATURE_USABLE (AVX),
b1dca6
 			      __strncasecmp_avx)
b1dca6
 	      IFUNC_IMPL_ADD (array, i, strncasecmp,
b1dca6
-			      HAS_CPU_FEATURE (SSE4_2),
b1dca6
+			      CPU_FEATURE_USABLE (SSE4_2),
b1dca6
 			      __strncasecmp_sse42)
b1dca6
 	      IFUNC_IMPL_ADD (array, i, strncasecmp,
b1dca6
-			      HAS_CPU_FEATURE (SSSE3),
b1dca6
+			      CPU_FEATURE_USABLE (SSSE3),
b1dca6
 			      __strncasecmp_ssse3)
b1dca6
 	      IFUNC_IMPL_ADD (array, i, strncasecmp, 1,
b1dca6
 			      __strncasecmp_sse2))
b1dca6
@@ -316,22 +316,22 @@ __libc_ifunc_impl_list (const char *name, struct libc_ifunc_impl *array,
b1dca6
   /* Support sysdeps/x86_64/multiarch/strncase_l.c.  */
b1dca6
   IFUNC_IMPL (i, name, strncasecmp_l,
b1dca6
 	      IFUNC_IMPL_ADD (array, i, strncasecmp_l,
b1dca6
-			      HAS_ARCH_FEATURE (AVX_Usable),
b1dca6
+			      CPU_FEATURE_USABLE (AVX),
b1dca6
 			      __strncasecmp_l_avx)
b1dca6
 	      IFUNC_IMPL_ADD (array, i, strncasecmp_l,
b1dca6
-			      HAS_CPU_FEATURE (SSE4_2),
b1dca6
+			      CPU_FEATURE_USABLE (SSE4_2),
b1dca6
 			      __strncasecmp_l_sse42)
b1dca6
 	      IFUNC_IMPL_ADD (array, i, strncasecmp_l,
b1dca6
-			      HAS_CPU_FEATURE (SSSE3),
b1dca6
+			      CPU_FEATURE_USABLE (SSSE3),
b1dca6
 			      __strncasecmp_l_ssse3)
b1dca6
 	      IFUNC_IMPL_ADD (array, i, strncasecmp_l, 1,
b1dca6
 			      __strncasecmp_l_sse2))
b1dca6
 
b1dca6
   /* Support sysdeps/x86_64/multiarch/strncat.c.  */
b1dca6
   IFUNC_IMPL (i, name, strncat,
b1dca6
-	      IFUNC_IMPL_ADD (array, i, strncat, HAS_ARCH_FEATURE (AVX2_Usable),
b1dca6
+	      IFUNC_IMPL_ADD (array, i, strncat, CPU_FEATURE_USABLE (AVX2),
b1dca6
 			      __strncat_avx2)
b1dca6
-	      IFUNC_IMPL_ADD (array, i, strncat, HAS_CPU_FEATURE (SSSE3),
b1dca6
+	      IFUNC_IMPL_ADD (array, i, strncat, CPU_FEATURE_USABLE (SSSE3),
b1dca6
 			      __strncat_ssse3)
b1dca6
 	      IFUNC_IMPL_ADD (array, i, strncat, 1,
b1dca6
 			      __strncat_sse2_unaligned)
b1dca6
@@ -339,9 +339,9 @@ __libc_ifunc_impl_list (const char *name, struct libc_ifunc_impl *array,
b1dca6
 
b1dca6
   /* Support sysdeps/x86_64/multiarch/strncpy.c.  */
b1dca6
   IFUNC_IMPL (i, name, strncpy,
b1dca6
-	      IFUNC_IMPL_ADD (array, i, strncpy, HAS_ARCH_FEATURE (AVX2_Usable),
b1dca6
+	      IFUNC_IMPL_ADD (array, i, strncpy, CPU_FEATURE_USABLE (AVX2),
b1dca6
 			      __strncpy_avx2)
b1dca6
-	      IFUNC_IMPL_ADD (array, i, strncpy, HAS_CPU_FEATURE (SSSE3),
b1dca6
+	      IFUNC_IMPL_ADD (array, i, strncpy, CPU_FEATURE_USABLE (SSSE3),
b1dca6
 			      __strncpy_ssse3)
b1dca6
 	      IFUNC_IMPL_ADD (array, i, strncpy, 1,
b1dca6
 			      __strncpy_sse2_unaligned)
b1dca6
@@ -349,14 +349,14 @@ __libc_ifunc_impl_list (const char *name, struct libc_ifunc_impl *array,
b1dca6
 
b1dca6
   /* Support sysdeps/x86_64/multiarch/strpbrk.c.  */
b1dca6
   IFUNC_IMPL (i, name, strpbrk,
b1dca6
-	      IFUNC_IMPL_ADD (array, i, strpbrk, HAS_CPU_FEATURE (SSE4_2),
b1dca6
+	      IFUNC_IMPL_ADD (array, i, strpbrk, CPU_FEATURE_USABLE (SSE4_2),
b1dca6
 			      __strpbrk_sse42)
b1dca6
 	      IFUNC_IMPL_ADD (array, i, strpbrk, 1, __strpbrk_sse2))
b1dca6
 
b1dca6
 
b1dca6
   /* Support sysdeps/x86_64/multiarch/strspn.c.  */
b1dca6
   IFUNC_IMPL (i, name, strspn,
b1dca6
-	      IFUNC_IMPL_ADD (array, i, strspn, HAS_CPU_FEATURE (SSE4_2),
b1dca6
+	      IFUNC_IMPL_ADD (array, i, strspn, CPU_FEATURE_USABLE (SSE4_2),
b1dca6
 			      __strspn_sse42)
b1dca6
 	      IFUNC_IMPL_ADD (array, i, strspn, 1, __strspn_sse2))
b1dca6
 
b1dca6
@@ -368,70 +368,70 @@ __libc_ifunc_impl_list (const char *name, struct libc_ifunc_impl *array,
b1dca6
   /* Support sysdeps/x86_64/multiarch/wcschr.c.  */
b1dca6
   IFUNC_IMPL (i, name, wcschr,
b1dca6
 	      IFUNC_IMPL_ADD (array, i, wcschr,
b1dca6
-			      HAS_ARCH_FEATURE (AVX2_Usable),
b1dca6
+			      CPU_FEATURE_USABLE (AVX2),
b1dca6
 			      __wcschr_avx2)
b1dca6
 	      IFUNC_IMPL_ADD (array, i, wcschr, 1, __wcschr_sse2))
b1dca6
 
b1dca6
   /* Support sysdeps/x86_64/multiarch/wcsrchr.c.  */
b1dca6
   IFUNC_IMPL (i, name, wcsrchr,
b1dca6
 	      IFUNC_IMPL_ADD (array, i, wcsrchr,
b1dca6
-			      HAS_ARCH_FEATURE (AVX2_Usable),
b1dca6
+			      CPU_FEATURE_USABLE (AVX2),
b1dca6
 			      __wcsrchr_avx2)
b1dca6
 	      IFUNC_IMPL_ADD (array, i, wcsrchr, 1, __wcsrchr_sse2))
b1dca6
 
b1dca6
   /* Support sysdeps/x86_64/multiarch/wcscmp.c.  */
b1dca6
   IFUNC_IMPL (i, name, wcscmp,
b1dca6
 	      IFUNC_IMPL_ADD (array, i, wcscmp,
b1dca6
-			      HAS_ARCH_FEATURE (AVX2_Usable),
b1dca6
+			      CPU_FEATURE_USABLE (AVX2),
b1dca6
 			      __wcscmp_avx2)
b1dca6
 	      IFUNC_IMPL_ADD (array, i, wcscmp, 1, __wcscmp_sse2))
b1dca6
 
b1dca6
   /* Support sysdeps/x86_64/multiarch/wcsncmp.c.  */
b1dca6
   IFUNC_IMPL (i, name, wcsncmp,
b1dca6
 	      IFUNC_IMPL_ADD (array, i, wcsncmp,
b1dca6
-			      HAS_ARCH_FEATURE (AVX2_Usable),
b1dca6
+			      CPU_FEATURE_USABLE (AVX2),
b1dca6
 			      __wcsncmp_avx2)
b1dca6
 	      IFUNC_IMPL_ADD (array, i, wcsncmp, 1, __wcsncmp_sse2))
b1dca6
 
b1dca6
   /* Support sysdeps/x86_64/multiarch/wcscpy.c.  */
b1dca6
   IFUNC_IMPL (i, name, wcscpy,
b1dca6
-	      IFUNC_IMPL_ADD (array, i, wcscpy, HAS_CPU_FEATURE (SSSE3),
b1dca6
+	      IFUNC_IMPL_ADD (array, i, wcscpy, CPU_FEATURE_USABLE (SSSE3),
b1dca6
 			      __wcscpy_ssse3)
b1dca6
 	      IFUNC_IMPL_ADD (array, i, wcscpy, 1, __wcscpy_sse2))
b1dca6
 
b1dca6
   /* Support sysdeps/x86_64/multiarch/wcslen.c.  */
b1dca6
   IFUNC_IMPL (i, name, wcslen,
b1dca6
 	      IFUNC_IMPL_ADD (array, i, wcslen,
b1dca6
-			      HAS_ARCH_FEATURE (AVX2_Usable),
b1dca6
+			      CPU_FEATURE_USABLE (AVX2),
b1dca6
 			      __wcslen_avx2)
b1dca6
 	      IFUNC_IMPL_ADD (array, i, wcslen, 1, __wcslen_sse2))
b1dca6
 
b1dca6
   /* Support sysdeps/x86_64/multiarch/wcsnlen.c.  */
b1dca6
   IFUNC_IMPL (i, name, wcsnlen,
b1dca6
 	      IFUNC_IMPL_ADD (array, i, wcsnlen,
b1dca6
-			      HAS_ARCH_FEATURE (AVX2_Usable),
b1dca6
+			      CPU_FEATURE_USABLE (AVX2),
b1dca6
 			      __wcsnlen_avx2)
b1dca6
 	      IFUNC_IMPL_ADD (array, i, wcsnlen,
b1dca6
-			      HAS_CPU_FEATURE (SSE4_1),
b1dca6
+			      CPU_FEATURE_USABLE (SSE4_1),
b1dca6
 			      __wcsnlen_sse4_1)
b1dca6
 	      IFUNC_IMPL_ADD (array, i, wcsnlen, 1, __wcsnlen_sse2))
b1dca6
 
b1dca6
   /* Support sysdeps/x86_64/multiarch/wmemchr.c.  */
b1dca6
   IFUNC_IMPL (i, name, wmemchr,
b1dca6
 	      IFUNC_IMPL_ADD (array, i, wmemchr,
b1dca6
-			      HAS_ARCH_FEATURE (AVX2_Usable),
b1dca6
+			      CPU_FEATURE_USABLE (AVX2),
b1dca6
 			      __wmemchr_avx2)
b1dca6
 	      IFUNC_IMPL_ADD (array, i, wmemchr, 1, __wmemchr_sse2))
b1dca6
 
b1dca6
   /* Support sysdeps/x86_64/multiarch/wmemcmp.c.  */
b1dca6
   IFUNC_IMPL (i, name, wmemcmp,
b1dca6
 	      IFUNC_IMPL_ADD (array, i, wmemcmp,
b1dca6
-			      (HAS_ARCH_FEATURE (AVX2_Usable)
b1dca6
-			       && HAS_CPU_FEATURE (MOVBE)),
b1dca6
+			      (CPU_FEATURE_USABLE (AVX2)
b1dca6
+			       && CPU_FEATURE_USABLE (MOVBE)),
b1dca6
 			      __wmemcmp_avx2_movbe)
b1dca6
-	      IFUNC_IMPL_ADD (array, i, wmemcmp, HAS_CPU_FEATURE (SSE4_1),
b1dca6
+	      IFUNC_IMPL_ADD (array, i, wmemcmp, CPU_FEATURE_USABLE (SSE4_1),
b1dca6
 			      __wmemcmp_sse4_1)
b1dca6
-	      IFUNC_IMPL_ADD (array, i, wmemcmp, HAS_CPU_FEATURE (SSSE3),
b1dca6
+	      IFUNC_IMPL_ADD (array, i, wmemcmp, CPU_FEATURE_USABLE (SSSE3),
b1dca6
 			      __wmemcmp_ssse3)
b1dca6
 	      IFUNC_IMPL_ADD (array, i, wmemcmp, 1, __wmemcmp_sse2))
b1dca6
 
b1dca6
@@ -440,35 +440,35 @@ __libc_ifunc_impl_list (const char *name, struct libc_ifunc_impl *array,
b1dca6
 	      IFUNC_IMPL_ADD (array, i, wmemset, 1,
b1dca6
 			      __wmemset_sse2_unaligned)
b1dca6
 	      IFUNC_IMPL_ADD (array, i, wmemset,
b1dca6
-			      HAS_ARCH_FEATURE (AVX2_Usable),
b1dca6
+			      CPU_FEATURE_USABLE (AVX2),
b1dca6
 			      __wmemset_avx2_unaligned)
b1dca6
 	      IFUNC_IMPL_ADD (array, i, wmemset,
b1dca6
-			      HAS_ARCH_FEATURE (AVX512F_Usable),
b1dca6
+			      CPU_FEATURE_USABLE (AVX512F),
b1dca6
 			      __wmemset_avx512_unaligned))
b1dca6
 
b1dca6
 #ifdef SHARED
b1dca6
   /* Support sysdeps/x86_64/multiarch/memcpy_chk.c.  */
b1dca6
   IFUNC_IMPL (i, name, __memcpy_chk,
b1dca6
 	      IFUNC_IMPL_ADD (array, i, __memcpy_chk,
b1dca6
-			      HAS_ARCH_FEATURE (AVX512F_Usable),
b1dca6
+			      CPU_FEATURE_USABLE (AVX512F),
b1dca6
 			      __memcpy_chk_avx512_no_vzeroupper)
b1dca6
 	      IFUNC_IMPL_ADD (array, i, __memcpy_chk,
b1dca6
-			      HAS_ARCH_FEATURE (AVX512F_Usable),
b1dca6
+			      CPU_FEATURE_USABLE (AVX512F),
b1dca6
 			      __memcpy_chk_avx512_unaligned)
b1dca6
 	      IFUNC_IMPL_ADD (array, i, __memcpy_chk,
b1dca6
-			      HAS_ARCH_FEATURE (AVX512F_Usable),
b1dca6
+			      CPU_FEATURE_USABLE (AVX512F),
b1dca6
 			      __memcpy_chk_avx512_unaligned_erms)
b1dca6
 	      IFUNC_IMPL_ADD (array, i, __memcpy_chk,
b1dca6
-			      HAS_ARCH_FEATURE (AVX_Usable),
b1dca6
+			      CPU_FEATURE_USABLE (AVX),
b1dca6
 			      __memcpy_chk_avx_unaligned)
b1dca6
 	      IFUNC_IMPL_ADD (array, i, __memcpy_chk,
b1dca6
-			      HAS_ARCH_FEATURE (AVX_Usable),
b1dca6
+			      CPU_FEATURE_USABLE (AVX),
b1dca6
 			      __memcpy_chk_avx_unaligned_erms)
b1dca6
 	      IFUNC_IMPL_ADD (array, i, __memcpy_chk,
b1dca6
-			      HAS_CPU_FEATURE (SSSE3),
b1dca6
+			      CPU_FEATURE_USABLE (SSSE3),
b1dca6
 			      __memcpy_chk_ssse3_back)
b1dca6
 	      IFUNC_IMPL_ADD (array, i, __memcpy_chk,
b1dca6
-			      HAS_CPU_FEATURE (SSSE3),
b1dca6
+			      CPU_FEATURE_USABLE (SSSE3),
b1dca6
 			      __memcpy_chk_ssse3)
b1dca6
 	      IFUNC_IMPL_ADD (array, i, __memcpy_chk, 1,
b1dca6
 			      __memcpy_chk_sse2_unaligned)
b1dca6
@@ -481,23 +481,23 @@ __libc_ifunc_impl_list (const char *name, struct libc_ifunc_impl *array,
b1dca6
   /* Support sysdeps/x86_64/multiarch/memcpy.c.  */
b1dca6
   IFUNC_IMPL (i, name, memcpy,
b1dca6
 	      IFUNC_IMPL_ADD (array, i, memcpy,
b1dca6
-			      HAS_ARCH_FEATURE (AVX_Usable),
b1dca6
+			      CPU_FEATURE_USABLE (AVX),
b1dca6
 			      __memcpy_avx_unaligned)
b1dca6
 	      IFUNC_IMPL_ADD (array, i, memcpy,
b1dca6
-			      HAS_ARCH_FEATURE (AVX_Usable),
b1dca6
+			      CPU_FEATURE_USABLE (AVX),
b1dca6
 			      __memcpy_avx_unaligned_erms)
b1dca6
-	      IFUNC_IMPL_ADD (array, i, memcpy, HAS_CPU_FEATURE (SSSE3),
b1dca6
+	      IFUNC_IMPL_ADD (array, i, memcpy, CPU_FEATURE_USABLE (SSSE3),
b1dca6
 			      __memcpy_ssse3_back)
b1dca6
-	      IFUNC_IMPL_ADD (array, i, memcpy, HAS_CPU_FEATURE (SSSE3),
b1dca6
+	      IFUNC_IMPL_ADD (array, i, memcpy, CPU_FEATURE_USABLE (SSSE3),
b1dca6
 			      __memcpy_ssse3)
b1dca6
 	      IFUNC_IMPL_ADD (array, i, memcpy,
b1dca6
-			      HAS_ARCH_FEATURE (AVX512F_Usable),
b1dca6
+			      CPU_FEATURE_USABLE (AVX512F),
b1dca6
 			      __memcpy_avx512_no_vzeroupper)
b1dca6
 	      IFUNC_IMPL_ADD (array, i, memcpy,
b1dca6
-			      HAS_ARCH_FEATURE (AVX512F_Usable),
b1dca6
+			      CPU_FEATURE_USABLE (AVX512F),
b1dca6
 			      __memcpy_avx512_unaligned)
b1dca6
 	      IFUNC_IMPL_ADD (array, i, memcpy,
b1dca6
-			      HAS_ARCH_FEATURE (AVX512F_Usable),
b1dca6
+			      CPU_FEATURE_USABLE (AVX512F),
b1dca6
 			      __memcpy_avx512_unaligned_erms)
b1dca6
 	      IFUNC_IMPL_ADD (array, i, memcpy, 1, __memcpy_sse2_unaligned)
b1dca6
 	      IFUNC_IMPL_ADD (array, i, memcpy, 1,
b1dca6
@@ -508,25 +508,25 @@ __libc_ifunc_impl_list (const char *name, struct libc_ifunc_impl *array,
b1dca6
   /* Support sysdeps/x86_64/multiarch/mempcpy_chk.c.  */
b1dca6
   IFUNC_IMPL (i, name, __mempcpy_chk,
b1dca6
 	      IFUNC_IMPL_ADD (array, i, __mempcpy_chk,
b1dca6
-			      HAS_ARCH_FEATURE (AVX512F_Usable),
b1dca6
+			      CPU_FEATURE_USABLE (AVX512F),
b1dca6
 			      __mempcpy_chk_avx512_no_vzeroupper)
b1dca6
 	      IFUNC_IMPL_ADD (array, i, __mempcpy_chk,
b1dca6
-			      HAS_ARCH_FEATURE (AVX512F_Usable),
b1dca6
+			      CPU_FEATURE_USABLE (AVX512F),
b1dca6
 			      __mempcpy_chk_avx512_unaligned)
b1dca6
 	      IFUNC_IMPL_ADD (array, i, __mempcpy_chk,
b1dca6
-			      HAS_ARCH_FEATURE (AVX512F_Usable),
b1dca6
+			      CPU_FEATURE_USABLE (AVX512F),
b1dca6
 			      __mempcpy_chk_avx512_unaligned_erms)
b1dca6
 	      IFUNC_IMPL_ADD (array, i, __mempcpy_chk,
b1dca6
-			      HAS_ARCH_FEATURE (AVX_Usable),
b1dca6
+			      CPU_FEATURE_USABLE (AVX),
b1dca6
 			      __mempcpy_chk_avx_unaligned)
b1dca6
 	      IFUNC_IMPL_ADD (array, i, __mempcpy_chk,
b1dca6
-			      HAS_ARCH_FEATURE (AVX_Usable),
b1dca6
+			      CPU_FEATURE_USABLE (AVX),
b1dca6
 			      __mempcpy_chk_avx_unaligned_erms)
b1dca6
 	      IFUNC_IMPL_ADD (array, i, __mempcpy_chk,
b1dca6
-			      HAS_CPU_FEATURE (SSSE3),
b1dca6
+			      CPU_FEATURE_USABLE (SSSE3),
b1dca6
 			      __mempcpy_chk_ssse3_back)
b1dca6
 	      IFUNC_IMPL_ADD (array, i, __mempcpy_chk,
b1dca6
-			      HAS_CPU_FEATURE (SSSE3),
b1dca6
+			      CPU_FEATURE_USABLE (SSSE3),
b1dca6
 			      __mempcpy_chk_ssse3)
b1dca6
 	      IFUNC_IMPL_ADD (array, i, __mempcpy_chk, 1,
b1dca6
 			      __mempcpy_chk_sse2_unaligned)
b1dca6
@@ -539,23 +539,23 @@ __libc_ifunc_impl_list (const char *name, struct libc_ifunc_impl *array,
b1dca6
   /* Support sysdeps/x86_64/multiarch/mempcpy.c.  */
b1dca6
   IFUNC_IMPL (i, name, mempcpy,
b1dca6
 	      IFUNC_IMPL_ADD (array, i, mempcpy,
b1dca6
-			      HAS_ARCH_FEATURE (AVX512F_Usable),
b1dca6
+			      CPU_FEATURE_USABLE (AVX512F),
b1dca6
 			      __mempcpy_avx512_no_vzeroupper)
b1dca6
 	      IFUNC_IMPL_ADD (array, i, mempcpy,
b1dca6
-			      HAS_ARCH_FEATURE (AVX512F_Usable),
b1dca6
+			      CPU_FEATURE_USABLE (AVX512F),
b1dca6
 			      __mempcpy_avx512_unaligned)
b1dca6
 	      IFUNC_IMPL_ADD (array, i, mempcpy,
b1dca6
-			      HAS_ARCH_FEATURE (AVX512F_Usable),
b1dca6
+			      CPU_FEATURE_USABLE (AVX512F),
b1dca6
 			      __mempcpy_avx512_unaligned_erms)
b1dca6
 	      IFUNC_IMPL_ADD (array, i, mempcpy,
b1dca6
-			      HAS_ARCH_FEATURE (AVX_Usable),
b1dca6
+			      CPU_FEATURE_USABLE (AVX),
b1dca6
 			      __mempcpy_avx_unaligned)
b1dca6
 	      IFUNC_IMPL_ADD (array, i, mempcpy,
b1dca6
-			      HAS_ARCH_FEATURE (AVX_Usable),
b1dca6
+			      CPU_FEATURE_USABLE (AVX),
b1dca6
 			      __mempcpy_avx_unaligned_erms)
b1dca6
-	      IFUNC_IMPL_ADD (array, i, mempcpy, HAS_CPU_FEATURE (SSSE3),
b1dca6
+	      IFUNC_IMPL_ADD (array, i, mempcpy, CPU_FEATURE_USABLE (SSSE3),
b1dca6
 			      __mempcpy_ssse3_back)
b1dca6
-	      IFUNC_IMPL_ADD (array, i, mempcpy, HAS_CPU_FEATURE (SSSE3),
b1dca6
+	      IFUNC_IMPL_ADD (array, i, mempcpy, CPU_FEATURE_USABLE (SSSE3),
b1dca6
 			      __mempcpy_ssse3)
b1dca6
 	      IFUNC_IMPL_ADD (array, i, mempcpy, 1,
b1dca6
 			      __mempcpy_sse2_unaligned)
b1dca6
@@ -566,11 +566,11 @@ __libc_ifunc_impl_list (const char *name, struct libc_ifunc_impl *array,
b1dca6
   /* Support sysdeps/x86_64/multiarch/strncmp.c.  */
b1dca6
   IFUNC_IMPL (i, name, strncmp,
b1dca6
 	      IFUNC_IMPL_ADD (array, i, strncmp,
b1dca6
-			      HAS_ARCH_FEATURE (AVX2_Usable),
b1dca6
+			      CPU_FEATURE_USABLE (AVX2),
b1dca6
 			      __strncmp_avx2)
b1dca6
-	      IFUNC_IMPL_ADD (array, i, strncmp, HAS_CPU_FEATURE (SSE4_2),
b1dca6
+	      IFUNC_IMPL_ADD (array, i, strncmp, CPU_FEATURE_USABLE (SSE4_2),
b1dca6
 			      __strncmp_sse42)
b1dca6
-	      IFUNC_IMPL_ADD (array, i, strncmp, HAS_CPU_FEATURE (SSSE3),
b1dca6
+	      IFUNC_IMPL_ADD (array, i, strncmp, CPU_FEATURE_USABLE (SSSE3),
b1dca6
 			      __strncmp_ssse3)
b1dca6
 	      IFUNC_IMPL_ADD (array, i, strncmp, 1, __strncmp_sse2))
b1dca6
 
b1dca6
@@ -580,10 +580,10 @@ __libc_ifunc_impl_list (const char *name, struct libc_ifunc_impl *array,
b1dca6
 	      IFUNC_IMPL_ADD (array, i, __wmemset_chk, 1,
b1dca6
 			      __wmemset_chk_sse2_unaligned)
b1dca6
 	      IFUNC_IMPL_ADD (array, i, __wmemset_chk,
b1dca6
-			      HAS_ARCH_FEATURE (AVX2_Usable),
b1dca6
+			      CPU_FEATURE_USABLE (AVX2),
b1dca6
 			      __wmemset_chk_avx2_unaligned)
b1dca6
 	      IFUNC_IMPL_ADD (array, i, __wmemset_chk,
b1dca6
-			      HAS_ARCH_FEATURE (AVX512F_Usable),
b1dca6
+			      CPU_FEATURE_USABLE (AVX512F),
b1dca6
 			      __wmemset_chk_avx512_unaligned))
b1dca6
 #endif
b1dca6
 
b1dca6
diff --git a/sysdeps/x86_64/multiarch/ifunc-memcmp.h b/sysdeps/x86_64/multiarch/ifunc-memcmp.h
b1dca6
index bf5ab8eb7ffd0002..6c1f3153579d19c4 100644
b1dca6
--- a/sysdeps/x86_64/multiarch/ifunc-memcmp.h
b1dca6
+++ b/sysdeps/x86_64/multiarch/ifunc-memcmp.h
b1dca6
@@ -30,15 +30,15 @@ IFUNC_SELECTOR (void)
b1dca6
   const struct cpu_features* cpu_features = __get_cpu_features ();
b1dca6
 
b1dca6
   if (!CPU_FEATURES_ARCH_P (cpu_features, Prefer_No_VZEROUPPER)
b1dca6
-      && CPU_FEATURES_ARCH_P (cpu_features, AVX2_Usable)
b1dca6
-      && CPU_FEATURES_CPU_P (cpu_features, MOVBE)
b1dca6
+      && CPU_FEATURE_USABLE_P (cpu_features, AVX2)
b1dca6
+      && CPU_FEATURE_USABLE_P (cpu_features, MOVBE)
b1dca6
       && CPU_FEATURES_ARCH_P (cpu_features, AVX_Fast_Unaligned_Load))
b1dca6
     return OPTIMIZE (avx2_movbe);
b1dca6
 
b1dca6
-  if (CPU_FEATURES_CPU_P (cpu_features, SSE4_1))
b1dca6
+  if (CPU_FEATURE_USABLE_P (cpu_features, SSE4_1))
b1dca6
     return OPTIMIZE (sse4_1);
b1dca6
 
b1dca6
-  if (CPU_FEATURES_CPU_P (cpu_features, SSSE3))
b1dca6
+  if (CPU_FEATURE_USABLE_P (cpu_features, SSSE3))
b1dca6
     return OPTIMIZE (ssse3);
b1dca6
 
b1dca6
   return OPTIMIZE (sse2);
b1dca6
diff --git a/sysdeps/x86_64/multiarch/ifunc-memmove.h b/sysdeps/x86_64/multiarch/ifunc-memmove.h
b1dca6
index 5b1eb1c92c2f199b..5e5f02994531ec14 100644
b1dca6
--- a/sysdeps/x86_64/multiarch/ifunc-memmove.h
b1dca6
+++ b/sysdeps/x86_64/multiarch/ifunc-memmove.h
b1dca6
@@ -45,13 +45,13 @@ IFUNC_SELECTOR (void)
b1dca6
       || CPU_FEATURES_ARCH_P (cpu_features, Prefer_FSRM))
b1dca6
     return OPTIMIZE (erms);
b1dca6
 
b1dca6
-  if (CPU_FEATURES_ARCH_P (cpu_features, AVX512F_Usable)
b1dca6
+  if (CPU_FEATURE_USABLE_P (cpu_features, AVX512F)
b1dca6
       && !CPU_FEATURES_ARCH_P (cpu_features, Prefer_No_AVX512))
b1dca6
     {
b1dca6
       if (CPU_FEATURES_ARCH_P (cpu_features, Prefer_No_VZEROUPPER))
b1dca6
 	return OPTIMIZE (avx512_no_vzeroupper);
b1dca6
 
b1dca6
-      if (CPU_FEATURES_CPU_P (cpu_features, ERMS))
b1dca6
+      if (CPU_FEATURE_USABLE_P (cpu_features, ERMS))
b1dca6
 	return OPTIMIZE (avx512_unaligned_erms);
b1dca6
 
b1dca6
       return OPTIMIZE (avx512_unaligned);
b1dca6
@@ -59,16 +59,16 @@ IFUNC_SELECTOR (void)
b1dca6
 
b1dca6
   if (CPU_FEATURES_ARCH_P (cpu_features, AVX_Fast_Unaligned_Load))
b1dca6
     {
b1dca6
-      if (CPU_FEATURES_CPU_P (cpu_features, ERMS))
b1dca6
+      if (CPU_FEATURE_USABLE_P (cpu_features, ERMS))
b1dca6
 	return OPTIMIZE (avx_unaligned_erms);
b1dca6
 
b1dca6
       return OPTIMIZE (avx_unaligned);
b1dca6
     }
b1dca6
 
b1dca6
-  if (!CPU_FEATURES_CPU_P (cpu_features, SSSE3)
b1dca6
+  if (!CPU_FEATURE_USABLE_P (cpu_features, SSSE3)
b1dca6
       || CPU_FEATURES_ARCH_P (cpu_features, Fast_Unaligned_Copy))
b1dca6
     {
b1dca6
-      if (CPU_FEATURES_CPU_P (cpu_features, ERMS))
b1dca6
+      if (CPU_FEATURE_USABLE_P (cpu_features, ERMS))
b1dca6
 	return OPTIMIZE (sse2_unaligned_erms);
b1dca6
 
b1dca6
       return OPTIMIZE (sse2_unaligned);
b1dca6
diff --git a/sysdeps/x86_64/multiarch/ifunc-memset.h b/sysdeps/x86_64/multiarch/ifunc-memset.h
b1dca6
index 19b5ae676c2d5d53..708bd72e2c3d3963 100644
b1dca6
--- a/sysdeps/x86_64/multiarch/ifunc-memset.h
b1dca6
+++ b/sysdeps/x86_64/multiarch/ifunc-memset.h
b1dca6
@@ -42,27 +42,27 @@ IFUNC_SELECTOR (void)
b1dca6
   if (CPU_FEATURES_ARCH_P (cpu_features, Prefer_ERMS))
b1dca6
     return OPTIMIZE (erms);
b1dca6
 
b1dca6
-  if (CPU_FEATURES_ARCH_P (cpu_features, AVX512F_Usable)
b1dca6
+  if (CPU_FEATURE_USABLE_P (cpu_features, AVX512F)
b1dca6
       && !CPU_FEATURES_ARCH_P (cpu_features, Prefer_No_AVX512))
b1dca6
     {
b1dca6
       if (CPU_FEATURES_ARCH_P (cpu_features, Prefer_No_VZEROUPPER))
b1dca6
 	return OPTIMIZE (avx512_no_vzeroupper);
b1dca6
 
b1dca6
-      if (CPU_FEATURES_CPU_P (cpu_features, ERMS))
b1dca6
+      if (CPU_FEATURE_USABLE_P (cpu_features, ERMS))
b1dca6
 	return OPTIMIZE (avx512_unaligned_erms);
b1dca6
 
b1dca6
       return OPTIMIZE (avx512_unaligned);
b1dca6
     }
b1dca6
 
b1dca6
-  if (CPU_FEATURES_ARCH_P (cpu_features, AVX2_Usable))
b1dca6
+  if (CPU_FEATURE_USABLE_P (cpu_features, AVX2))
b1dca6
     {
b1dca6
-      if (CPU_FEATURES_CPU_P (cpu_features, ERMS))
b1dca6
+      if (CPU_FEATURE_USABLE_P (cpu_features, ERMS))
b1dca6
 	return OPTIMIZE (avx2_unaligned_erms);
b1dca6
       else
b1dca6
 	return OPTIMIZE (avx2_unaligned);
b1dca6
     }
b1dca6
 
b1dca6
-  if (CPU_FEATURES_CPU_P (cpu_features, ERMS))
b1dca6
+  if (CPU_FEATURE_USABLE_P (cpu_features, ERMS))
b1dca6
     return OPTIMIZE (sse2_unaligned_erms);
b1dca6
 
b1dca6
   return OPTIMIZE (sse2_unaligned);
b1dca6
diff --git a/sysdeps/x86_64/multiarch/ifunc-sse4_2.h b/sysdeps/x86_64/multiarch/ifunc-sse4_2.h
b1dca6
index f2b791cccf12c425..73383f4b583b29c8 100644
b1dca6
--- a/sysdeps/x86_64/multiarch/ifunc-sse4_2.h
b1dca6
+++ b/sysdeps/x86_64/multiarch/ifunc-sse4_2.h
b1dca6
@@ -27,7 +27,7 @@ IFUNC_SELECTOR (void)
b1dca6
 {
b1dca6
   const struct cpu_features* cpu_features = __get_cpu_features ();
b1dca6
 
b1dca6
-  if (CPU_FEATURES_CPU_P (cpu_features, SSE4_2))
b1dca6
+  if (CPU_FEATURE_USABLE_P (cpu_features, SSE4_2))
b1dca6
     return OPTIMIZE (sse42);
b1dca6
 
b1dca6
   return OPTIMIZE (sse2);
b1dca6
diff --git a/sysdeps/x86_64/multiarch/ifunc-strcasecmp.h b/sysdeps/x86_64/multiarch/ifunc-strcasecmp.h
b1dca6
index 1ca170b663a4e65c..6a4bb07849a11f51 100644
b1dca6
--- a/sysdeps/x86_64/multiarch/ifunc-strcasecmp.h
b1dca6
+++ b/sysdeps/x86_64/multiarch/ifunc-strcasecmp.h
b1dca6
@@ -29,14 +29,14 @@ IFUNC_SELECTOR (void)
b1dca6
 {
b1dca6
   const struct cpu_features* cpu_features = __get_cpu_features ();
b1dca6
 
b1dca6
-  if (CPU_FEATURES_ARCH_P (cpu_features, AVX_Usable))
b1dca6
+  if (CPU_FEATURE_USABLE_P (cpu_features, AVX))
b1dca6
     return OPTIMIZE (avx);
b1dca6
 
b1dca6
-  if (CPU_FEATURES_CPU_P (cpu_features, SSE4_2)
b1dca6
+  if (CPU_FEATURE_USABLE_P (cpu_features, SSE4_2)
b1dca6
       && !CPU_FEATURES_ARCH_P (cpu_features, Slow_SSE4_2))
b1dca6
     return OPTIMIZE (sse42);
b1dca6
 
b1dca6
-  if (CPU_FEATURES_CPU_P (cpu_features, SSSE3))
b1dca6
+  if (CPU_FEATURE_USABLE_P (cpu_features, SSSE3))
b1dca6
     return OPTIMIZE (ssse3);
b1dca6
 
b1dca6
   return OPTIMIZE (sse2);
b1dca6
diff --git a/sysdeps/x86_64/multiarch/ifunc-strcpy.h b/sysdeps/x86_64/multiarch/ifunc-strcpy.h
b1dca6
index 4f2286fefccda069..100dca5cde0ecac5 100644
b1dca6
--- a/sysdeps/x86_64/multiarch/ifunc-strcpy.h
b1dca6
+++ b/sysdeps/x86_64/multiarch/ifunc-strcpy.h
b1dca6
@@ -32,14 +32,14 @@ IFUNC_SELECTOR (void)
b1dca6
   const struct cpu_features* cpu_features = __get_cpu_features ();
b1dca6
 
b1dca6
   if (!CPU_FEATURES_ARCH_P (cpu_features, Prefer_No_VZEROUPPER)
b1dca6
-      && CPU_FEATURES_ARCH_P (cpu_features, AVX2_Usable)
b1dca6
+      && CPU_FEATURE_USABLE_P (cpu_features, AVX2)
b1dca6
       && CPU_FEATURES_ARCH_P (cpu_features, AVX_Fast_Unaligned_Load))
b1dca6
     return OPTIMIZE (avx2);
b1dca6
 
b1dca6
   if (CPU_FEATURES_ARCH_P (cpu_features, Fast_Unaligned_Load))
b1dca6
     return OPTIMIZE (sse2_unaligned);
b1dca6
 
b1dca6
-  if (CPU_FEATURES_CPU_P (cpu_features, SSSE3))
b1dca6
+  if (CPU_FEATURE_USABLE_P (cpu_features, SSSE3))
b1dca6
     return OPTIMIZE (ssse3);
b1dca6
 
b1dca6
   return OPTIMIZE (sse2);
b1dca6
diff --git a/sysdeps/x86_64/multiarch/ifunc-wmemset.h b/sysdeps/x86_64/multiarch/ifunc-wmemset.h
b1dca6
index 2f1085f5fc483c70..eb2422104751b235 100644
b1dca6
--- a/sysdeps/x86_64/multiarch/ifunc-wmemset.h
b1dca6
+++ b/sysdeps/x86_64/multiarch/ifunc-wmemset.h
b1dca6
@@ -28,10 +28,10 @@ IFUNC_SELECTOR (void)
b1dca6
   const struct cpu_features* cpu_features = __get_cpu_features ();
b1dca6
 
b1dca6
   if (!CPU_FEATURES_ARCH_P (cpu_features, Prefer_No_VZEROUPPER)
b1dca6
-      && CPU_FEATURES_ARCH_P (cpu_features, AVX2_Usable)
b1dca6
+      && CPU_FEATURE_USABLE_P (cpu_features, AVX2)
b1dca6
       && CPU_FEATURES_ARCH_P (cpu_features, AVX_Fast_Unaligned_Load))
b1dca6
     {
b1dca6
-      if (CPU_FEATURES_ARCH_P (cpu_features, AVX512F_Usable)
b1dca6
+      if (CPU_FEATURE_USABLE_P (cpu_features, AVX512F)
b1dca6
 	  && !CPU_FEATURES_ARCH_P (cpu_features, Prefer_No_AVX512))
b1dca6
 	return OPTIMIZE (avx512_unaligned);
b1dca6
       else
b1dca6
diff --git a/sysdeps/x86_64/multiarch/sched_cpucount.c b/sysdeps/x86_64/multiarch/sched_cpucount.c
b1dca6
index 7949119dcdb5a94b..b38ff37c6511ca1b 100644
b1dca6
--- a/sysdeps/x86_64/multiarch/sched_cpucount.c
b1dca6
+++ b/sysdeps/x86_64/multiarch/sched_cpucount.c
b1dca6
@@ -33,4 +33,4 @@
b1dca6
 #undef __sched_cpucount
b1dca6
 
b1dca6
 libc_ifunc (__sched_cpucount,
b1dca6
-	    HAS_CPU_FEATURE (POPCNT) ? popcount_cpucount : generic_cpucount);
b1dca6
+	    CPU_FEATURE_USABLE (POPCNT) ? popcount_cpucount : generic_cpucount);
b1dca6
diff --git a/sysdeps/x86_64/multiarch/strchr.c b/sysdeps/x86_64/multiarch/strchr.c
b1dca6
index 76d64fb378e9bbac..329547132c3a301b 100644
b1dca6
--- a/sysdeps/x86_64/multiarch/strchr.c
b1dca6
+++ b/sysdeps/x86_64/multiarch/strchr.c
b1dca6
@@ -36,7 +36,7 @@ IFUNC_SELECTOR (void)
b1dca6
   const struct cpu_features* cpu_features = __get_cpu_features ();
b1dca6
 
b1dca6
   if (!CPU_FEATURES_ARCH_P (cpu_features, Prefer_No_VZEROUPPER)
b1dca6
-      && CPU_FEATURES_ARCH_P (cpu_features, AVX2_Usable)
b1dca6
+      && CPU_FEATURE_USABLE_P (cpu_features, AVX2)
b1dca6
       && CPU_FEATURES_ARCH_P (cpu_features, AVX_Fast_Unaligned_Load))
b1dca6
     return OPTIMIZE (avx2);
b1dca6
 
b1dca6
diff --git a/sysdeps/x86_64/multiarch/strcmp.c b/sysdeps/x86_64/multiarch/strcmp.c
b1dca6
index b903e418df151ec1..3f433fbccf9e7121 100644
b1dca6
--- a/sysdeps/x86_64/multiarch/strcmp.c
b1dca6
+++ b/sysdeps/x86_64/multiarch/strcmp.c
b1dca6
@@ -37,14 +37,14 @@ IFUNC_SELECTOR (void)
b1dca6
   const struct cpu_features* cpu_features = __get_cpu_features ();
b1dca6
 
b1dca6
   if (!CPU_FEATURES_ARCH_P (cpu_features, Prefer_No_VZEROUPPER)
b1dca6
-      && CPU_FEATURES_ARCH_P (cpu_features, AVX2_Usable)
b1dca6
+      && CPU_FEATURE_USABLE_P (cpu_features, AVX2)
b1dca6
       && CPU_FEATURES_ARCH_P (cpu_features, AVX_Fast_Unaligned_Load))
b1dca6
     return OPTIMIZE (avx2);
b1dca6
 
b1dca6
   if (CPU_FEATURES_ARCH_P (cpu_features, Fast_Unaligned_Load))
b1dca6
     return OPTIMIZE (sse2_unaligned);
b1dca6
 
b1dca6
-  if (CPU_FEATURES_CPU_P (cpu_features, SSSE3))
b1dca6
+  if (CPU_FEATURE_USABLE_P (cpu_features, SSSE3))
b1dca6
     return OPTIMIZE (ssse3);
b1dca6
 
b1dca6
   return OPTIMIZE (sse2);
b1dca6
diff --git a/sysdeps/x86_64/multiarch/strncmp.c b/sysdeps/x86_64/multiarch/strncmp.c
b1dca6
index 02b6d0b6f5717e2a..686d654f3da84379 100644
b1dca6
--- a/sysdeps/x86_64/multiarch/strncmp.c
b1dca6
+++ b/sysdeps/x86_64/multiarch/strncmp.c
b1dca6
@@ -37,15 +37,15 @@ IFUNC_SELECTOR (void)
b1dca6
   const struct cpu_features* cpu_features = __get_cpu_features ();
b1dca6
 
b1dca6
   if (!CPU_FEATURES_ARCH_P (cpu_features, Prefer_No_VZEROUPPER)
b1dca6
-      && CPU_FEATURES_ARCH_P (cpu_features, AVX2_Usable)
b1dca6
+      && CPU_FEATURE_USABLE_P (cpu_features, AVX2)
b1dca6
       && CPU_FEATURES_ARCH_P (cpu_features, AVX_Fast_Unaligned_Load))
b1dca6
     return OPTIMIZE (avx2);
b1dca6
 
b1dca6
-  if (CPU_FEATURES_CPU_P (cpu_features, SSE4_2)
b1dca6
+  if (CPU_FEATURE_USABLE_P (cpu_features, SSE4_2)
b1dca6
       && !CPU_FEATURES_ARCH_P (cpu_features, Slow_SSE4_2))
b1dca6
     return OPTIMIZE (sse42);
b1dca6
 
b1dca6
-  if (CPU_FEATURES_CPU_P (cpu_features, SSSE3))
b1dca6
+  if (CPU_FEATURE_USABLE_P (cpu_features, SSSE3))
b1dca6
     return OPTIMIZE (ssse3);
b1dca6
 
b1dca6
   return OPTIMIZE (sse2);
b1dca6
diff --git a/sysdeps/x86_64/multiarch/test-multiarch.c b/sysdeps/x86_64/multiarch/test-multiarch.c
b1dca6
index 417147c3d5f325a5..cc2ea56a6753402d 100644
b1dca6
--- a/sysdeps/x86_64/multiarch/test-multiarch.c
b1dca6
+++ b/sysdeps/x86_64/multiarch/test-multiarch.c
b1dca6
@@ -75,18 +75,18 @@ do_test (int argc, char **argv)
b1dca6
   int fails;
b1dca6
 
b1dca6
   get_cpuinfo ();
b1dca6
-  fails = check_proc ("avx", HAS_ARCH_FEATURE (AVX_Usable),
b1dca6
-		      "HAS_ARCH_FEATURE (AVX_Usable)");
b1dca6
-  fails += check_proc ("fma4", HAS_ARCH_FEATURE (FMA4_Usable),
b1dca6
-		       "HAS_ARCH_FEATURE (FMA4_Usable)");
b1dca6
-  fails += check_proc ("sse4_2", HAS_CPU_FEATURE (SSE4_2),
b1dca6
-		       "HAS_CPU_FEATURE (SSE4_2)");
b1dca6
-  fails += check_proc ("sse4_1", HAS_CPU_FEATURE (SSE4_1)
b1dca6
-		       , "HAS_CPU_FEATURE (SSE4_1)");
b1dca6
-  fails += check_proc ("ssse3", HAS_CPU_FEATURE (SSSE3),
b1dca6
-		       "HAS_CPU_FEATURE (SSSE3)");
b1dca6
-  fails += check_proc ("popcnt", HAS_CPU_FEATURE (POPCNT),
b1dca6
-		       "HAS_CPU_FEATURE (POPCNT)");
b1dca6
+  fails = check_proc ("avx", CPU_FEATURE_USABLE (AVX),
b1dca6
+		      "CPU_FEATURE_USABLE (AVX)");
b1dca6
+  fails += check_proc ("fma4", CPU_FEATURE_USABLE (FMA4),
b1dca6
+		       "CPU_FEATURE_USABLE (FMA4)");
b1dca6
+  fails += check_proc ("sse4_2", CPU_FEATURE_USABLE (SSE4_2),
b1dca6
+		       "CPU_FEATURE_USABLE (SSE4_2)");
b1dca6
+  fails += check_proc ("sse4_1", CPU_FEATURE_USABLE (SSE4_1)
b1dca6
+		       , "CPU_FEATURE_USABLE (SSE4_1)");
b1dca6
+  fails += check_proc ("ssse3", CPU_FEATURE_USABLE (SSSE3),
b1dca6
+		       "CPU_FEATURE_USABLE (SSSE3)");
b1dca6
+  fails += check_proc ("popcnt", CPU_FEATURE_USABLE (POPCNT),
b1dca6
+		       "CPU_FEATURE_USABLE (POPCNT)");
b1dca6
 
b1dca6
   printf ("%d differences between /proc/cpuinfo and glibc code.\n", fails);
b1dca6
 
b1dca6
diff --git a/sysdeps/x86_64/multiarch/wcscpy.c b/sysdeps/x86_64/multiarch/wcscpy.c
b1dca6
index f23b1fd853a4dcb4..8fffb5c3163ab3e4 100644
b1dca6
--- a/sysdeps/x86_64/multiarch/wcscpy.c
b1dca6
+++ b/sysdeps/x86_64/multiarch/wcscpy.c
b1dca6
@@ -34,7 +34,7 @@ IFUNC_SELECTOR (void)
b1dca6
 {
b1dca6
   const struct cpu_features* cpu_features = __get_cpu_features ();
b1dca6
 
b1dca6
-  if (CPU_FEATURES_CPU_P (cpu_features, SSSE3))
b1dca6
+  if (CPU_FEATURE_USABLE_P (cpu_features, SSSE3))
b1dca6
     return OPTIMIZE (ssse3);
b1dca6
 
b1dca6
   return OPTIMIZE (sse2);
b1dca6
diff --git a/sysdeps/x86_64/multiarch/wcsnlen.c b/sysdeps/x86_64/multiarch/wcsnlen.c
b1dca6
index bd376057e3e26ed6..b3144c938df70b1e 100644
b1dca6
--- a/sysdeps/x86_64/multiarch/wcsnlen.c
b1dca6
+++ b/sysdeps/x86_64/multiarch/wcsnlen.c
b1dca6
@@ -36,11 +36,11 @@ IFUNC_SELECTOR (void)
b1dca6
   const struct cpu_features* cpu_features = __get_cpu_features ();
b1dca6
 
b1dca6
   if (!CPU_FEATURES_ARCH_P (cpu_features, Prefer_No_VZEROUPPER)
b1dca6
-      && CPU_FEATURES_ARCH_P (cpu_features, AVX2_Usable)
b1dca6
+      && CPU_FEATURE_USABLE_P (cpu_features, AVX2)
b1dca6
       && CPU_FEATURES_ARCH_P (cpu_features, AVX_Fast_Unaligned_Load))
b1dca6
     return OPTIMIZE (avx2);
b1dca6
 
b1dca6
-  if (CPU_FEATURES_CPU_P (cpu_features, SSE4_1))
b1dca6
+  if (CPU_FEATURE_USABLE_P (cpu_features, SSE4_1))
b1dca6
     return OPTIMIZE (sse4_1);
b1dca6
 
b1dca6
   return OPTIMIZE (sse2);