aboutsummaryrefslogtreecommitdiff
path: root/openssl/crypto/modes
diff options
context:
space:
mode:
Diffstat (limited to 'openssl/crypto/modes')
-rw-r--r--openssl/crypto/modes/asm/ghash-x86.pl6
-rw-r--r--openssl/crypto/modes/asm/ghash-x86_64.pl3
-rw-r--r--openssl/crypto/modes/gcm128.c4
3 files changed, 7 insertions, 6 deletions
diff --git a/openssl/crypto/modes/asm/ghash-x86.pl b/openssl/crypto/modes/asm/ghash-x86.pl
index 6b09669d4..83c727e07 100644
--- a/openssl/crypto/modes/asm/ghash-x86.pl
+++ b/openssl/crypto/modes/asm/ghash-x86.pl
@@ -635,7 +635,7 @@ sub mmx_loop() {
{ my @lo = ("mm0","mm1","mm2");
my @hi = ("mm3","mm4","mm5");
my @tmp = ("mm6","mm7");
- my $off1=0,$off2=0,$i;
+ my ($off1,$off2,$i) = (0,0,);
&add ($Htbl,128); # optimize for size
&lea ("edi",&DWP(16+128,"esp"));
@@ -883,7 +883,7 @@ sub reduction_alg9 { # 17/13 times faster than Intel version
my ($Xhi,$Xi) = @_;
# 1st phase
- &movdqa ($T1,$Xi) #
+ &movdqa ($T1,$Xi); #
&psllq ($Xi,1);
&pxor ($Xi,$T1); #
&psllq ($Xi,5); #
@@ -1019,7 +1019,7 @@ my ($Xhi,$Xi) = @_;
&movdqa ($Xhn,$Xn);
&pxor ($Xhi,$T1); # "Ii+Xi", consume early
- &movdqa ($T1,$Xi) #&reduction_alg9($Xhi,$Xi); 1st phase
+ &movdqa ($T1,$Xi); #&reduction_alg9($Xhi,$Xi); 1st phase
&psllq ($Xi,1);
&pxor ($Xi,$T1); #
&psllq ($Xi,5); #
diff --git a/openssl/crypto/modes/asm/ghash-x86_64.pl b/openssl/crypto/modes/asm/ghash-x86_64.pl
index a5ae18088..38d779edb 100644
--- a/openssl/crypto/modes/asm/ghash-x86_64.pl
+++ b/openssl/crypto/modes/asm/ghash-x86_64.pl
@@ -50,7 +50,8 @@ $0 =~ m/(.*[\/\\])[^\/\\]+$/; $dir=$1;
( $xlate="${dir}../../perlasm/x86_64-xlate.pl" and -f $xlate) or
die "can't locate x86_64-xlate.pl";
-open STDOUT,"| $^X $xlate $flavour $output";
+open OUT,"| \"$^X\" $xlate $flavour $output";
+*STDOUT=*OUT;
# common register layout
$nlo="%rax";
diff --git a/openssl/crypto/modes/gcm128.c b/openssl/crypto/modes/gcm128.c
index 7d6d03497..0e6ff8b0a 100644
--- a/openssl/crypto/modes/gcm128.c
+++ b/openssl/crypto/modes/gcm128.c
@@ -723,7 +723,7 @@ void CRYPTO_gcm128_init(GCM128_CONTEXT *ctx,void *key,block128_f block)
# endif
gcm_init_4bit(ctx->Htable,ctx->H.u);
# if defined(GHASH_ASM_X86) /* x86 only */
-# if defined(OPENSSL_IA32_SSE2)
+# if defined(OPENSSL_IA32_SSE2)
if (OPENSSL_ia32cap_P[0]&(1<<25)) { /* check SSE bit */
# else
if (OPENSSL_ia32cap_P[0]&(1<<23)) { /* check MMX bit */
@@ -1398,7 +1398,7 @@ int CRYPTO_gcm128_finish(GCM128_CONTEXT *ctx,const unsigned char *tag,
void (*gcm_gmult_p)(u64 Xi[2],const u128 Htable[16]) = ctx->gmult;
#endif
- if (ctx->mres)
+ if (ctx->mres || ctx->ares)
GCM_MUL(ctx,Xi);
if (is_endian.little) {