summaryrefslogtreecommitdiffstats
path: root/crypto/x86_64cpuid.pl
diff options
context:
space:
mode:
authorAndy Polyakov <appro@openssl.org>2017-03-12 14:45:06 +0100
committerAndy Polyakov <appro@openssl.org>2017-03-13 18:42:10 +0100
commit1aed5e1ac28790cc915ad03e86e2d5e896a4ea13 (patch)
tree09b002ec478012fb1315acaa81b72de3719eaba8 /crypto/x86_64cpuid.pl
parentDocument in CHANGES that config now recognises 64-bit mingw (diff)
downloadopenssl-1aed5e1ac28790cc915ad03e86e2d5e896a4ea13.tar.xz
openssl-1aed5e1ac28790cc915ad03e86e2d5e896a4ea13.zip
crypto/x86*cpuid.pl: move extended feature detection.
Exteneded feature flags were not pulled on AMD processors, as result a number of extensions were effectively masked on Ryzen. Original fix for x86_64cpuid.pl addressed this problem, but messed up processor vendor detection. This fix moves extended feature detection past basic feature detection where it belongs. 32-bit counterpart is harmonized too. Reviewed-by: Rich Salz <rsalz@openssl.org> Reviewed-by: Richard Levitte <levitte@openssl.org>
Diffstat (limited to 'crypto/x86_64cpuid.pl')
-rw-r--r--crypto/x86_64cpuid.pl21
1 files changed, 10 insertions, 11 deletions
diff --git a/crypto/x86_64cpuid.pl b/crypto/x86_64cpuid.pl
index c2a7d72b0e..2467af7e9e 100644
--- a/crypto/x86_64cpuid.pl
+++ b/crypto/x86_64cpuid.pl
@@ -68,20 +68,10 @@ OPENSSL_ia32_cpuid:
.cfi_register %rbx,%r8
xor %eax,%eax
- mov %eax,8(%rdi) # clear 3rd word
+ mov %eax,8(%rdi) # clear extended feature flags
cpuid
mov %eax,%r11d # max value for standard query level
- cmp \$7,%eax
- jb .Lno_extended_info
-
- mov \$7,%eax
- xor %ecx,%ecx
- cpuid
- mov %ebx,8(%rdi)
-
-.Lno_extended_info:
-
xor %eax,%eax
cmp \$0x756e6547,%ebx # "Genu"
setne %al
@@ -175,6 +165,15 @@ OPENSSL_ia32_cpuid:
or %ecx,%r9d # merge AMD XOP flag
mov %edx,%r10d # %r9d:%r10d is copy of %ecx:%edx
+
+ cmp \$7,%r11d
+ jb .Lno_extended_info
+ mov \$7,%eax
+ xor %ecx,%ecx
+ cpuid
+ mov %ebx,8(%rdi) # save extended feature flags
+.Lno_extended_info:
+
bt \$27,%r9d # check OSXSAVE bit
jnc .Lclear_avx
xor %ecx,%ecx # XCR0