diff options
| author | miod <> | 2014-04-13 15:16:40 +0000 |
|---|---|---|
| committer | miod <> | 2014-04-13 15:16:40 +0000 |
| commit | dbf195d7fd4efbe4ee578f1dc5c72e7f3deadf18 (patch) | |
| tree | 0de23bccc2c4fc24d466d6a4291987efc8e44770 /src/lib/libcrypto/modes | |
| parent | 210506dd94a697493eb809fb31df7283912ded73 (diff) | |
| parent | 52628ee3f51f011b463aaedb1a28aa0524b43cb3 (diff) | |
| download | openbsd-dbf195d7fd4efbe4ee578f1dc5c72e7f3deadf18.tar.gz openbsd-dbf195d7fd4efbe4ee578f1dc5c72e7f3deadf18.tar.bz2 openbsd-dbf195d7fd4efbe4ee578f1dc5c72e7f3deadf18.zip | |
This commit was generated by cvs2git to track changes on a CVS vendor
branch.
Diffstat (limited to 'src/lib/libcrypto/modes')
| -rw-r--r-- | src/lib/libcrypto/modes/asm/ghash-alpha.pl | 25 | ||||
| -rw-r--r-- | src/lib/libcrypto/modes/asm/ghash-parisc.pl | 1 | ||||
| -rw-r--r-- | src/lib/libcrypto/modes/asm/ghash-x86.pl | 6 | ||||
| -rw-r--r-- | src/lib/libcrypto/modes/asm/ghash-x86_64.pl | 3 | ||||
| -rw-r--r-- | src/lib/libcrypto/modes/cbc128.c | 25 | ||||
| -rw-r--r-- | src/lib/libcrypto/modes/ccm128.c | 2 | ||||
| -rw-r--r-- | src/lib/libcrypto/modes/cts128.c | 28 | ||||
| -rw-r--r-- | src/lib/libcrypto/modes/gcm128.c | 196 | ||||
| -rw-r--r-- | src/lib/libcrypto/modes/modes_lcl.h | 9 |
9 files changed, 221 insertions, 74 deletions
diff --git a/src/lib/libcrypto/modes/asm/ghash-alpha.pl b/src/lib/libcrypto/modes/asm/ghash-alpha.pl index 6358b2750f..aa36029386 100644 --- a/src/lib/libcrypto/modes/asm/ghash-alpha.pl +++ b/src/lib/libcrypto/modes/asm/ghash-alpha.pl | |||
| @@ -266,8 +266,8 @@ gcm_gmult_4bit: | |||
| 266 | ldq $Xlo,8($Xi) | 266 | ldq $Xlo,8($Xi) |
| 267 | ldq $Xhi,0($Xi) | 267 | ldq $Xhi,0($Xi) |
| 268 | 268 | ||
| 269 | br $rem_4bit,.Lpic1 | 269 | bsr $t0,picmeup |
| 270 | .Lpic1: lda $rem_4bit,rem_4bit-.Lpic1($rem_4bit) | 270 | nop |
| 271 | ___ | 271 | ___ |
| 272 | 272 | ||
| 273 | &loop(); | 273 | &loop(); |
| @@ -341,8 +341,8 @@ gcm_ghash_4bit: | |||
| 341 | ldq $Xhi,0($Xi) | 341 | ldq $Xhi,0($Xi) |
| 342 | ldq $Xlo,8($Xi) | 342 | ldq $Xlo,8($Xi) |
| 343 | 343 | ||
| 344 | br $rem_4bit,.Lpic2 | 344 | bsr $t0,picmeup |
| 345 | .Lpic2: lda $rem_4bit,rem_4bit-.Lpic2($rem_4bit) | 345 | nop |
| 346 | 346 | ||
| 347 | .Louter: | 347 | .Louter: |
| 348 | extql $inhi,$inp,$inhi | 348 | extql $inhi,$inp,$inhi |
| @@ -436,11 +436,20 @@ $code.=<<___; | |||
| 436 | .end gcm_ghash_4bit | 436 | .end gcm_ghash_4bit |
| 437 | 437 | ||
| 438 | .align 4 | 438 | .align 4 |
| 439 | .ent picmeup | ||
| 440 | picmeup: | ||
| 441 | .frame sp,0,$t0 | ||
| 442 | .prologue 0 | ||
| 443 | br $rem_4bit,.Lpic | ||
| 444 | .Lpic: lda $rem_4bit,12($rem_4bit) | ||
| 445 | ret ($t0) | ||
| 446 | .end picmeup | ||
| 447 | nop | ||
| 439 | rem_4bit: | 448 | rem_4bit: |
| 440 | .quad 0x0000<<48, 0x1C20<<48, 0x3840<<48, 0x2460<<48 | 449 | .long 0,0x0000<<16, 0,0x1C20<<16, 0,0x3840<<16, 0,0x2460<<16 |
| 441 | .quad 0x7080<<48, 0x6CA0<<48, 0x48C0<<48, 0x54E0<<48 | 450 | .long 0,0x7080<<16, 0,0x6CA0<<16, 0,0x48C0<<16, 0,0x54E0<<16 |
| 442 | .quad 0xE100<<48, 0xFD20<<48, 0xD940<<48, 0xC560<<48 | 451 | .long 0,0xE100<<16, 0,0xFD20<<16, 0,0xD940<<16, 0,0xC560<<16 |
| 443 | .quad 0x9180<<48, 0x8DA0<<48, 0xA9C0<<48, 0xB5E0<<48 | 452 | .long 0,0x9180<<16, 0,0x8DA0<<16, 0,0xA9C0<<16, 0,0xB5E0<<16 |
| 444 | .ascii "GHASH for Alpha, CRYPTOGAMS by <appro\@openssl.org>" | 453 | .ascii "GHASH for Alpha, CRYPTOGAMS by <appro\@openssl.org>" |
| 445 | .align 4 | 454 | .align 4 |
| 446 | 455 | ||
diff --git a/src/lib/libcrypto/modes/asm/ghash-parisc.pl b/src/lib/libcrypto/modes/asm/ghash-parisc.pl index 8c7454ee93..d5ad96b403 100644 --- a/src/lib/libcrypto/modes/asm/ghash-parisc.pl +++ b/src/lib/libcrypto/modes/asm/ghash-parisc.pl | |||
| @@ -724,6 +724,7 @@ foreach (split("\n",$code)) { | |||
| 724 | s/cmpb,\*/comb,/; | 724 | s/cmpb,\*/comb,/; |
| 725 | s/,\*/,/; | 725 | s/,\*/,/; |
| 726 | } | 726 | } |
| 727 | s/\bbv\b/bve/ if ($SIZE_T==8); | ||
| 727 | print $_,"\n"; | 728 | print $_,"\n"; |
| 728 | } | 729 | } |
| 729 | 730 | ||
diff --git a/src/lib/libcrypto/modes/asm/ghash-x86.pl b/src/lib/libcrypto/modes/asm/ghash-x86.pl index 6b09669d47..83c727e07f 100644 --- a/src/lib/libcrypto/modes/asm/ghash-x86.pl +++ b/src/lib/libcrypto/modes/asm/ghash-x86.pl | |||
| @@ -635,7 +635,7 @@ sub mmx_loop() { | |||
| 635 | { my @lo = ("mm0","mm1","mm2"); | 635 | { my @lo = ("mm0","mm1","mm2"); |
| 636 | my @hi = ("mm3","mm4","mm5"); | 636 | my @hi = ("mm3","mm4","mm5"); |
| 637 | my @tmp = ("mm6","mm7"); | 637 | my @tmp = ("mm6","mm7"); |
| 638 | my $off1=0,$off2=0,$i; | 638 | my ($off1,$off2,$i) = (0,0,); |
| 639 | 639 | ||
| 640 | &add ($Htbl,128); # optimize for size | 640 | &add ($Htbl,128); # optimize for size |
| 641 | &lea ("edi",&DWP(16+128,"esp")); | 641 | &lea ("edi",&DWP(16+128,"esp")); |
| @@ -883,7 +883,7 @@ sub reduction_alg9 { # 17/13 times faster than Intel version | |||
| 883 | my ($Xhi,$Xi) = @_; | 883 | my ($Xhi,$Xi) = @_; |
| 884 | 884 | ||
| 885 | # 1st phase | 885 | # 1st phase |
| 886 | &movdqa ($T1,$Xi) # | 886 | &movdqa ($T1,$Xi); # |
| 887 | &psllq ($Xi,1); | 887 | &psllq ($Xi,1); |
| 888 | &pxor ($Xi,$T1); # | 888 | &pxor ($Xi,$T1); # |
| 889 | &psllq ($Xi,5); # | 889 | &psllq ($Xi,5); # |
| @@ -1019,7 +1019,7 @@ my ($Xhi,$Xi) = @_; | |||
| 1019 | &movdqa ($Xhn,$Xn); | 1019 | &movdqa ($Xhn,$Xn); |
| 1020 | &pxor ($Xhi,$T1); # "Ii+Xi", consume early | 1020 | &pxor ($Xhi,$T1); # "Ii+Xi", consume early |
| 1021 | 1021 | ||
| 1022 | &movdqa ($T1,$Xi) #&reduction_alg9($Xhi,$Xi); 1st phase | 1022 | &movdqa ($T1,$Xi); #&reduction_alg9($Xhi,$Xi); 1st phase |
| 1023 | &psllq ($Xi,1); | 1023 | &psllq ($Xi,1); |
| 1024 | &pxor ($Xi,$T1); # | 1024 | &pxor ($Xi,$T1); # |
| 1025 | &psllq ($Xi,5); # | 1025 | &psllq ($Xi,5); # |
diff --git a/src/lib/libcrypto/modes/asm/ghash-x86_64.pl b/src/lib/libcrypto/modes/asm/ghash-x86_64.pl index a5ae180882..38d779edbc 100644 --- a/src/lib/libcrypto/modes/asm/ghash-x86_64.pl +++ b/src/lib/libcrypto/modes/asm/ghash-x86_64.pl | |||
| @@ -50,7 +50,8 @@ $0 =~ m/(.*[\/\\])[^\/\\]+$/; $dir=$1; | |||
| 50 | ( $xlate="${dir}../../perlasm/x86_64-xlate.pl" and -f $xlate) or | 50 | ( $xlate="${dir}../../perlasm/x86_64-xlate.pl" and -f $xlate) or |
| 51 | die "can't locate x86_64-xlate.pl"; | 51 | die "can't locate x86_64-xlate.pl"; |
| 52 | 52 | ||
| 53 | open STDOUT,"| $^X $xlate $flavour $output"; | 53 | open OUT,"| \"$^X\" $xlate $flavour $output"; |
| 54 | *STDOUT=*OUT; | ||
| 54 | 55 | ||
| 55 | # common register layout | 56 | # common register layout |
| 56 | $nlo="%rax"; | 57 | $nlo="%rax"; |
diff --git a/src/lib/libcrypto/modes/cbc128.c b/src/lib/libcrypto/modes/cbc128.c index 3d3782cbe1..0e54f75470 100644 --- a/src/lib/libcrypto/modes/cbc128.c +++ b/src/lib/libcrypto/modes/cbc128.c | |||
| @@ -117,7 +117,7 @@ void CRYPTO_cbc128_decrypt(const unsigned char *in, unsigned char *out, | |||
| 117 | unsigned char ivec[16], block128_f block) | 117 | unsigned char ivec[16], block128_f block) |
| 118 | { | 118 | { |
| 119 | size_t n; | 119 | size_t n; |
| 120 | union { size_t align; unsigned char c[16]; } tmp; | 120 | union { size_t t[16/sizeof(size_t)]; unsigned char c[16]; } tmp; |
| 121 | 121 | ||
| 122 | assert(in && out && key && ivec); | 122 | assert(in && out && key && ivec); |
| 123 | 123 | ||
| @@ -137,11 +137,13 @@ void CRYPTO_cbc128_decrypt(const unsigned char *in, unsigned char *out, | |||
| 137 | out += 16; | 137 | out += 16; |
| 138 | } | 138 | } |
| 139 | } | 139 | } |
| 140 | else { | 140 | else if (16%sizeof(size_t) == 0) { /* always true */ |
| 141 | while (len>=16) { | 141 | while (len>=16) { |
| 142 | size_t *out_t=(size_t *)out, *iv_t=(size_t *)iv; | ||
| 143 | |||
| 142 | (*block)(in, out, key); | 144 | (*block)(in, out, key); |
| 143 | for(n=0; n<16; n+=sizeof(size_t)) | 145 | for(n=0; n<16/sizeof(size_t); n++) |
| 144 | *(size_t *)(out+n) ^= *(size_t *)(iv+n); | 146 | out_t[n] ^= iv_t[n]; |
| 145 | iv = in; | 147 | iv = in; |
| 146 | len -= 16; | 148 | len -= 16; |
| 147 | in += 16; | 149 | in += 16; |
| @@ -165,15 +167,16 @@ void CRYPTO_cbc128_decrypt(const unsigned char *in, unsigned char *out, | |||
| 165 | out += 16; | 167 | out += 16; |
| 166 | } | 168 | } |
| 167 | } | 169 | } |
| 168 | else { | 170 | else if (16%sizeof(size_t) == 0) { /* always true */ |
| 169 | size_t c; | ||
| 170 | while (len>=16) { | 171 | while (len>=16) { |
| 172 | size_t c, *out_t=(size_t *)out, *ivec_t=(size_t *)ivec; | ||
| 173 | const size_t *in_t=(const size_t *)in; | ||
| 174 | |||
| 171 | (*block)(in, tmp.c, key); | 175 | (*block)(in, tmp.c, key); |
| 172 | for(n=0; n<16; n+=sizeof(size_t)) { | 176 | for(n=0; n<16/sizeof(size_t); n++) { |
| 173 | c = *(size_t *)(in+n); | 177 | c = in_t[n]; |
| 174 | *(size_t *)(out+n) = | 178 | out_t[n] = tmp.t[n] ^ ivec_t[n]; |
| 175 | *(size_t *)(tmp.c+n) ^ *(size_t *)(ivec+n); | 179 | ivec_t[n] = c; |
| 176 | *(size_t *)(ivec+n) = c; | ||
| 177 | } | 180 | } |
| 178 | len -= 16; | 181 | len -= 16; |
| 179 | in += 16; | 182 | in += 16; |
diff --git a/src/lib/libcrypto/modes/ccm128.c b/src/lib/libcrypto/modes/ccm128.c index c9b35e5b35..3ce11d0d98 100644 --- a/src/lib/libcrypto/modes/ccm128.c +++ b/src/lib/libcrypto/modes/ccm128.c | |||
| @@ -87,7 +87,7 @@ int CRYPTO_ccm128_setiv(CCM128_CONTEXT *ctx, | |||
| 87 | ctx->nonce.c[11] = (u8)(mlen>>(32%(sizeof(mlen)*8))); | 87 | ctx->nonce.c[11] = (u8)(mlen>>(32%(sizeof(mlen)*8))); |
| 88 | } | 88 | } |
| 89 | else | 89 | else |
| 90 | *(u32*)(&ctx->nonce.c[8]) = 0; | 90 | ctx->nonce.u[1] = 0; |
| 91 | 91 | ||
| 92 | ctx->nonce.c[12] = (u8)(mlen>>24); | 92 | ctx->nonce.c[12] = (u8)(mlen>>24); |
| 93 | ctx->nonce.c[13] = (u8)(mlen>>16); | 93 | ctx->nonce.c[13] = (u8)(mlen>>16); |
diff --git a/src/lib/libcrypto/modes/cts128.c b/src/lib/libcrypto/modes/cts128.c index c0e1f3696c..2d583de6f6 100644 --- a/src/lib/libcrypto/modes/cts128.c +++ b/src/lib/libcrypto/modes/cts128.c | |||
| @@ -108,12 +108,8 @@ size_t CRYPTO_cts128_encrypt(const unsigned char *in, unsigned char *out, | |||
| 108 | (*cbc)(in,out-16,residue,key,ivec,1); | 108 | (*cbc)(in,out-16,residue,key,ivec,1); |
| 109 | memcpy(out,tmp.c,residue); | 109 | memcpy(out,tmp.c,residue); |
| 110 | #else | 110 | #else |
| 111 | { | 111 | memset(tmp.c,0,sizeof(tmp)); |
| 112 | size_t n; | ||
| 113 | for (n=0; n<16; n+=sizeof(size_t)) | ||
| 114 | *(size_t *)(tmp.c+n) = 0; | ||
| 115 | memcpy(tmp.c,in,residue); | 112 | memcpy(tmp.c,in,residue); |
| 116 | } | ||
| 117 | memcpy(out,out-16,residue); | 113 | memcpy(out,out-16,residue); |
| 118 | (*cbc)(tmp.c,out-16,16,key,ivec,1); | 114 | (*cbc)(tmp.c,out-16,16,key,ivec,1); |
| 119 | #endif | 115 | #endif |
| @@ -144,12 +140,8 @@ size_t CRYPTO_nistcts128_encrypt(const unsigned char *in, unsigned char *out, | |||
| 144 | #if defined(CBC_HANDLES_TRUNCATED_IO) | 140 | #if defined(CBC_HANDLES_TRUNCATED_IO) |
| 145 | (*cbc)(in,out-16+residue,residue,key,ivec,1); | 141 | (*cbc)(in,out-16+residue,residue,key,ivec,1); |
| 146 | #else | 142 | #else |
| 147 | { | 143 | memset(tmp.c,0,sizeof(tmp)); |
| 148 | size_t n; | ||
| 149 | for (n=0; n<16; n+=sizeof(size_t)) | ||
| 150 | *(size_t *)(tmp.c+n) = 0; | ||
| 151 | memcpy(tmp.c,in,residue); | 144 | memcpy(tmp.c,in,residue); |
| 152 | } | ||
| 153 | (*cbc)(tmp.c,out-16+residue,16,key,ivec,1); | 145 | (*cbc)(tmp.c,out-16+residue,16,key,ivec,1); |
| 154 | #endif | 146 | #endif |
| 155 | return len+residue; | 147 | return len+residue; |
| @@ -177,8 +169,7 @@ size_t CRYPTO_cts128_decrypt_block(const unsigned char *in, unsigned char *out, | |||
| 177 | 169 | ||
| 178 | (*block)(in,tmp.c+16,key); | 170 | (*block)(in,tmp.c+16,key); |
| 179 | 171 | ||
| 180 | for (n=0; n<16; n+=sizeof(size_t)) | 172 | memcpy(tmp.c,tmp.c+16,16); |
| 181 | *(size_t *)(tmp.c+n) = *(size_t *)(tmp.c+16+n); | ||
| 182 | memcpy(tmp.c,in+16,residue); | 173 | memcpy(tmp.c,in+16,residue); |
| 183 | (*block)(tmp.c,tmp.c,key); | 174 | (*block)(tmp.c,tmp.c,key); |
| 184 | 175 | ||
| @@ -220,8 +211,7 @@ size_t CRYPTO_nistcts128_decrypt_block(const unsigned char *in, unsigned char *o | |||
| 220 | 211 | ||
| 221 | (*block)(in+residue,tmp.c+16,key); | 212 | (*block)(in+residue,tmp.c+16,key); |
| 222 | 213 | ||
| 223 | for (n=0; n<16; n+=sizeof(size_t)) | 214 | memcpy(tmp.c,tmp.c+16,16); |
| 224 | *(size_t *)(tmp.c+n) = *(size_t *)(tmp.c+16+n); | ||
| 225 | memcpy(tmp.c,in,residue); | 215 | memcpy(tmp.c,in,residue); |
| 226 | (*block)(tmp.c,tmp.c,key); | 216 | (*block)(tmp.c,tmp.c,key); |
| 227 | 217 | ||
| @@ -240,7 +230,7 @@ size_t CRYPTO_nistcts128_decrypt_block(const unsigned char *in, unsigned char *o | |||
| 240 | size_t CRYPTO_cts128_decrypt(const unsigned char *in, unsigned char *out, | 230 | size_t CRYPTO_cts128_decrypt(const unsigned char *in, unsigned char *out, |
| 241 | size_t len, const void *key, | 231 | size_t len, const void *key, |
| 242 | unsigned char ivec[16], cbc128_f cbc) | 232 | unsigned char ivec[16], cbc128_f cbc) |
| 243 | { size_t residue, n; | 233 | { size_t residue; |
| 244 | union { size_t align; unsigned char c[32]; } tmp; | 234 | union { size_t align; unsigned char c[32]; } tmp; |
| 245 | 235 | ||
| 246 | assert (in && out && key && ivec); | 236 | assert (in && out && key && ivec); |
| @@ -257,8 +247,7 @@ size_t CRYPTO_cts128_decrypt(const unsigned char *in, unsigned char *out, | |||
| 257 | out += len; | 247 | out += len; |
| 258 | } | 248 | } |
| 259 | 249 | ||
| 260 | for (n=16; n<32; n+=sizeof(size_t)) | 250 | memset(tmp.c,0,sizeof(tmp)); |
| 261 | *(size_t *)(tmp.c+n) = 0; | ||
| 262 | /* this places in[16] at &tmp.c[16] and decrypted block at &tmp.c[0] */ | 251 | /* this places in[16] at &tmp.c[16] and decrypted block at &tmp.c[0] */ |
| 263 | (*cbc)(in,tmp.c,16,key,tmp.c+16,0); | 252 | (*cbc)(in,tmp.c,16,key,tmp.c+16,0); |
| 264 | 253 | ||
| @@ -275,7 +264,7 @@ size_t CRYPTO_cts128_decrypt(const unsigned char *in, unsigned char *out, | |||
| 275 | size_t CRYPTO_nistcts128_decrypt(const unsigned char *in, unsigned char *out, | 264 | size_t CRYPTO_nistcts128_decrypt(const unsigned char *in, unsigned char *out, |
| 276 | size_t len, const void *key, | 265 | size_t len, const void *key, |
| 277 | unsigned char ivec[16], cbc128_f cbc) | 266 | unsigned char ivec[16], cbc128_f cbc) |
| 278 | { size_t residue, n; | 267 | { size_t residue; |
| 279 | union { size_t align; unsigned char c[32]; } tmp; | 268 | union { size_t align; unsigned char c[32]; } tmp; |
| 280 | 269 | ||
| 281 | assert (in && out && key && ivec); | 270 | assert (in && out && key && ivec); |
| @@ -297,8 +286,7 @@ size_t CRYPTO_nistcts128_decrypt(const unsigned char *in, unsigned char *out, | |||
| 297 | out += len; | 286 | out += len; |
| 298 | } | 287 | } |
| 299 | 288 | ||
| 300 | for (n=16; n<32; n+=sizeof(size_t)) | 289 | memset(tmp.c,0,sizeof(tmp)); |
| 301 | *(size_t *)(tmp.c+n) = 0; | ||
| 302 | /* this places in[16] at &tmp.c[16] and decrypted block at &tmp.c[0] */ | 290 | /* this places in[16] at &tmp.c[16] and decrypted block at &tmp.c[0] */ |
| 303 | (*cbc)(in+residue,tmp.c,16,key,tmp.c+16,0); | 291 | (*cbc)(in+residue,tmp.c,16,key,tmp.c+16,0); |
| 304 | 292 | ||
diff --git a/src/lib/libcrypto/modes/gcm128.c b/src/lib/libcrypto/modes/gcm128.c index 7d6d034970..e1dc2b0f47 100644 --- a/src/lib/libcrypto/modes/gcm128.c +++ b/src/lib/libcrypto/modes/gcm128.c | |||
| @@ -723,7 +723,7 @@ void CRYPTO_gcm128_init(GCM128_CONTEXT *ctx,void *key,block128_f block) | |||
| 723 | # endif | 723 | # endif |
| 724 | gcm_init_4bit(ctx->Htable,ctx->H.u); | 724 | gcm_init_4bit(ctx->Htable,ctx->H.u); |
| 725 | # if defined(GHASH_ASM_X86) /* x86 only */ | 725 | # if defined(GHASH_ASM_X86) /* x86 only */ |
| 726 | # if defined(OPENSSL_IA32_SSE2) | 726 | # if defined(OPENSSL_IA32_SSE2) |
| 727 | if (OPENSSL_ia32cap_P[0]&(1<<25)) { /* check SSE bit */ | 727 | if (OPENSSL_ia32cap_P[0]&(1<<25)) { /* check SSE bit */ |
| 728 | # else | 728 | # else |
| 729 | if (OPENSSL_ia32cap_P[0]&(1<<23)) { /* check MMX bit */ | 729 | if (OPENSSL_ia32cap_P[0]&(1<<23)) { /* check MMX bit */ |
| @@ -810,7 +810,11 @@ void CRYPTO_gcm128_setiv(GCM128_CONTEXT *ctx,const unsigned char *iv,size_t len) | |||
| 810 | GCM_MUL(ctx,Yi); | 810 | GCM_MUL(ctx,Yi); |
| 811 | 811 | ||
| 812 | if (is_endian.little) | 812 | if (is_endian.little) |
| 813 | #ifdef BSWAP4 | ||
| 814 | ctr = BSWAP4(ctx->Yi.d[3]); | ||
| 815 | #else | ||
| 813 | ctr = GETU32(ctx->Yi.c+12); | 816 | ctr = GETU32(ctx->Yi.c+12); |
| 817 | #endif | ||
| 814 | else | 818 | else |
| 815 | ctr = ctx->Yi.d[3]; | 819 | ctr = ctx->Yi.d[3]; |
| 816 | } | 820 | } |
| @@ -818,7 +822,11 @@ void CRYPTO_gcm128_setiv(GCM128_CONTEXT *ctx,const unsigned char *iv,size_t len) | |||
| 818 | (*ctx->block)(ctx->Yi.c,ctx->EK0.c,ctx->key); | 822 | (*ctx->block)(ctx->Yi.c,ctx->EK0.c,ctx->key); |
| 819 | ++ctr; | 823 | ++ctr; |
| 820 | if (is_endian.little) | 824 | if (is_endian.little) |
| 825 | #ifdef BSWAP4 | ||
| 826 | ctx->Yi.d[3] = BSWAP4(ctr); | ||
| 827 | #else | ||
| 821 | PUTU32(ctx->Yi.c+12,ctr); | 828 | PUTU32(ctx->Yi.c+12,ctr); |
| 829 | #endif | ||
| 822 | else | 830 | else |
| 823 | ctx->Yi.d[3] = ctr; | 831 | ctx->Yi.d[3] = ctr; |
| 824 | } | 832 | } |
| @@ -913,7 +921,11 @@ int CRYPTO_gcm128_encrypt(GCM128_CONTEXT *ctx, | |||
| 913 | } | 921 | } |
| 914 | 922 | ||
| 915 | if (is_endian.little) | 923 | if (is_endian.little) |
| 924 | #ifdef BSWAP4 | ||
| 925 | ctr = BSWAP4(ctx->Yi.d[3]); | ||
| 926 | #else | ||
| 916 | ctr = GETU32(ctx->Yi.c+12); | 927 | ctr = GETU32(ctx->Yi.c+12); |
| 928 | #endif | ||
| 917 | else | 929 | else |
| 918 | ctr = ctx->Yi.d[3]; | 930 | ctr = ctx->Yi.d[3]; |
| 919 | 931 | ||
| @@ -941,15 +953,21 @@ int CRYPTO_gcm128_encrypt(GCM128_CONTEXT *ctx, | |||
| 941 | size_t j=GHASH_CHUNK; | 953 | size_t j=GHASH_CHUNK; |
| 942 | 954 | ||
| 943 | while (j) { | 955 | while (j) { |
| 956 | size_t *out_t=(size_t *)out; | ||
| 957 | const size_t *in_t=(const size_t *)in; | ||
| 958 | |||
| 944 | (*block)(ctx->Yi.c,ctx->EKi.c,key); | 959 | (*block)(ctx->Yi.c,ctx->EKi.c,key); |
| 945 | ++ctr; | 960 | ++ctr; |
| 946 | if (is_endian.little) | 961 | if (is_endian.little) |
| 962 | #ifdef BSWAP4 | ||
| 963 | ctx->Yi.d[3] = BSWAP4(ctr); | ||
| 964 | #else | ||
| 947 | PUTU32(ctx->Yi.c+12,ctr); | 965 | PUTU32(ctx->Yi.c+12,ctr); |
| 966 | #endif | ||
| 948 | else | 967 | else |
| 949 | ctx->Yi.d[3] = ctr; | 968 | ctx->Yi.d[3] = ctr; |
| 950 | for (i=0; i<16; i+=sizeof(size_t)) | 969 | for (i=0; i<16/sizeof(size_t); ++i) |
| 951 | *(size_t *)(out+i) = | 970 | out_t[i] = in_t[i] ^ ctx->EKi.t[i]; |
| 952 | *(size_t *)(in+i)^*(size_t *)(ctx->EKi.c+i); | ||
| 953 | out += 16; | 971 | out += 16; |
| 954 | in += 16; | 972 | in += 16; |
| 955 | j -= 16; | 973 | j -= 16; |
| @@ -961,15 +979,21 @@ int CRYPTO_gcm128_encrypt(GCM128_CONTEXT *ctx, | |||
| 961 | size_t j=i; | 979 | size_t j=i; |
| 962 | 980 | ||
| 963 | while (len>=16) { | 981 | while (len>=16) { |
| 982 | size_t *out_t=(size_t *)out; | ||
| 983 | const size_t *in_t=(const size_t *)in; | ||
| 984 | |||
| 964 | (*block)(ctx->Yi.c,ctx->EKi.c,key); | 985 | (*block)(ctx->Yi.c,ctx->EKi.c,key); |
| 965 | ++ctr; | 986 | ++ctr; |
| 966 | if (is_endian.little) | 987 | if (is_endian.little) |
| 988 | #ifdef BSWAP4 | ||
| 989 | ctx->Yi.d[3] = BSWAP4(ctr); | ||
| 990 | #else | ||
| 967 | PUTU32(ctx->Yi.c+12,ctr); | 991 | PUTU32(ctx->Yi.c+12,ctr); |
| 992 | #endif | ||
| 968 | else | 993 | else |
| 969 | ctx->Yi.d[3] = ctr; | 994 | ctx->Yi.d[3] = ctr; |
| 970 | for (i=0; i<16; i+=sizeof(size_t)) | 995 | for (i=0; i<16/sizeof(size_t); ++i) |
| 971 | *(size_t *)(out+i) = | 996 | out_t[i] = in_t[i] ^ ctx->EKi.t[i]; |
| 972 | *(size_t *)(in+i)^*(size_t *)(ctx->EKi.c+i); | ||
| 973 | out += 16; | 997 | out += 16; |
| 974 | in += 16; | 998 | in += 16; |
| 975 | len -= 16; | 999 | len -= 16; |
| @@ -978,16 +1002,22 @@ int CRYPTO_gcm128_encrypt(GCM128_CONTEXT *ctx, | |||
| 978 | } | 1002 | } |
| 979 | #else | 1003 | #else |
| 980 | while (len>=16) { | 1004 | while (len>=16) { |
| 1005 | size_t *out_t=(size_t *)out; | ||
| 1006 | const size_t *in_t=(const size_t *)in; | ||
| 1007 | |||
| 981 | (*block)(ctx->Yi.c,ctx->EKi.c,key); | 1008 | (*block)(ctx->Yi.c,ctx->EKi.c,key); |
| 982 | ++ctr; | 1009 | ++ctr; |
| 983 | if (is_endian.little) | 1010 | if (is_endian.little) |
| 1011 | #ifdef BSWAP4 | ||
| 1012 | ctx->Yi.d[3] = BSWAP4(ctr); | ||
| 1013 | #else | ||
| 984 | PUTU32(ctx->Yi.c+12,ctr); | 1014 | PUTU32(ctx->Yi.c+12,ctr); |
| 1015 | #endif | ||
| 985 | else | 1016 | else |
| 986 | ctx->Yi.d[3] = ctr; | 1017 | ctx->Yi.d[3] = ctr; |
| 987 | for (i=0; i<16; i+=sizeof(size_t)) | 1018 | for (i=0; i<16/sizeof(size_t); ++i) |
| 988 | *(size_t *)(ctx->Xi.c+i) ^= | 1019 | ctx->Xi.t[i] ^= |
| 989 | *(size_t *)(out+i) = | 1020 | out_t[i] = in_t[i]^ctx->EKi.t[i]; |
| 990 | *(size_t *)(in+i)^*(size_t *)(ctx->EKi.c+i); | ||
| 991 | GCM_MUL(ctx,Xi); | 1021 | GCM_MUL(ctx,Xi); |
| 992 | out += 16; | 1022 | out += 16; |
| 993 | in += 16; | 1023 | in += 16; |
| @@ -998,7 +1028,11 @@ int CRYPTO_gcm128_encrypt(GCM128_CONTEXT *ctx, | |||
| 998 | (*block)(ctx->Yi.c,ctx->EKi.c,key); | 1028 | (*block)(ctx->Yi.c,ctx->EKi.c,key); |
| 999 | ++ctr; | 1029 | ++ctr; |
| 1000 | if (is_endian.little) | 1030 | if (is_endian.little) |
| 1031 | #ifdef BSWAP4 | ||
| 1032 | ctx->Yi.d[3] = BSWAP4(ctr); | ||
| 1033 | #else | ||
| 1001 | PUTU32(ctx->Yi.c+12,ctr); | 1034 | PUTU32(ctx->Yi.c+12,ctr); |
| 1035 | #endif | ||
| 1002 | else | 1036 | else |
| 1003 | ctx->Yi.d[3] = ctr; | 1037 | ctx->Yi.d[3] = ctr; |
| 1004 | while (len--) { | 1038 | while (len--) { |
| @@ -1016,7 +1050,11 @@ int CRYPTO_gcm128_encrypt(GCM128_CONTEXT *ctx, | |||
| 1016 | (*block)(ctx->Yi.c,ctx->EKi.c,key); | 1050 | (*block)(ctx->Yi.c,ctx->EKi.c,key); |
| 1017 | ++ctr; | 1051 | ++ctr; |
| 1018 | if (is_endian.little) | 1052 | if (is_endian.little) |
| 1053 | #ifdef BSWAP4 | ||
| 1054 | ctx->Yi.d[3] = BSWAP4(ctr); | ||
| 1055 | #else | ||
| 1019 | PUTU32(ctx->Yi.c+12,ctr); | 1056 | PUTU32(ctx->Yi.c+12,ctr); |
| 1057 | #endif | ||
| 1020 | else | 1058 | else |
| 1021 | ctx->Yi.d[3] = ctr; | 1059 | ctx->Yi.d[3] = ctr; |
| 1022 | } | 1060 | } |
| @@ -1060,7 +1098,11 @@ int CRYPTO_gcm128_decrypt(GCM128_CONTEXT *ctx, | |||
| 1060 | } | 1098 | } |
| 1061 | 1099 | ||
| 1062 | if (is_endian.little) | 1100 | if (is_endian.little) |
| 1101 | #ifdef BSWAP4 | ||
| 1102 | ctr = BSWAP4(ctx->Yi.d[3]); | ||
| 1103 | #else | ||
| 1063 | ctr = GETU32(ctx->Yi.c+12); | 1104 | ctr = GETU32(ctx->Yi.c+12); |
| 1105 | #endif | ||
| 1064 | else | 1106 | else |
| 1065 | ctr = ctx->Yi.d[3]; | 1107 | ctr = ctx->Yi.d[3]; |
| 1066 | 1108 | ||
| @@ -1091,15 +1133,21 @@ int CRYPTO_gcm128_decrypt(GCM128_CONTEXT *ctx, | |||
| 1091 | 1133 | ||
| 1092 | GHASH(ctx,in,GHASH_CHUNK); | 1134 | GHASH(ctx,in,GHASH_CHUNK); |
| 1093 | while (j) { | 1135 | while (j) { |
| 1136 | size_t *out_t=(size_t *)out; | ||
| 1137 | const size_t *in_t=(const size_t *)in; | ||
| 1138 | |||
| 1094 | (*block)(ctx->Yi.c,ctx->EKi.c,key); | 1139 | (*block)(ctx->Yi.c,ctx->EKi.c,key); |
| 1095 | ++ctr; | 1140 | ++ctr; |
| 1096 | if (is_endian.little) | 1141 | if (is_endian.little) |
| 1142 | #ifdef BSWAP4 | ||
| 1143 | ctx->Yi.d[3] = BSWAP4(ctr); | ||
| 1144 | #else | ||
| 1097 | PUTU32(ctx->Yi.c+12,ctr); | 1145 | PUTU32(ctx->Yi.c+12,ctr); |
| 1146 | #endif | ||
| 1098 | else | 1147 | else |
| 1099 | ctx->Yi.d[3] = ctr; | 1148 | ctx->Yi.d[3] = ctr; |
| 1100 | for (i=0; i<16; i+=sizeof(size_t)) | 1149 | for (i=0; i<16/sizeof(size_t); ++i) |
| 1101 | *(size_t *)(out+i) = | 1150 | out_t[i] = in_t[i]^ctx->EKi.t[i]; |
| 1102 | *(size_t *)(in+i)^*(size_t *)(ctx->EKi.c+i); | ||
| 1103 | out += 16; | 1151 | out += 16; |
| 1104 | in += 16; | 1152 | in += 16; |
| 1105 | j -= 16; | 1153 | j -= 16; |
| @@ -1109,15 +1157,21 @@ int CRYPTO_gcm128_decrypt(GCM128_CONTEXT *ctx, | |||
| 1109 | if ((i = (len&(size_t)-16))) { | 1157 | if ((i = (len&(size_t)-16))) { |
| 1110 | GHASH(ctx,in,i); | 1158 | GHASH(ctx,in,i); |
| 1111 | while (len>=16) { | 1159 | while (len>=16) { |
| 1160 | size_t *out_t=(size_t *)out; | ||
| 1161 | const size_t *in_t=(const size_t *)in; | ||
| 1162 | |||
| 1112 | (*block)(ctx->Yi.c,ctx->EKi.c,key); | 1163 | (*block)(ctx->Yi.c,ctx->EKi.c,key); |
| 1113 | ++ctr; | 1164 | ++ctr; |
| 1114 | if (is_endian.little) | 1165 | if (is_endian.little) |
| 1166 | #ifdef BSWAP4 | ||
| 1167 | ctx->Yi.d[3] = BSWAP4(ctr); | ||
| 1168 | #else | ||
| 1115 | PUTU32(ctx->Yi.c+12,ctr); | 1169 | PUTU32(ctx->Yi.c+12,ctr); |
| 1170 | #endif | ||
| 1116 | else | 1171 | else |
| 1117 | ctx->Yi.d[3] = ctr; | 1172 | ctx->Yi.d[3] = ctr; |
| 1118 | for (i=0; i<16; i+=sizeof(size_t)) | 1173 | for (i=0; i<16/sizeof(size_t); ++i) |
| 1119 | *(size_t *)(out+i) = | 1174 | out_t[i] = in_t[i]^ctx->EKi.t[i]; |
| 1120 | *(size_t *)(in+i)^*(size_t *)(ctx->EKi.c+i); | ||
| 1121 | out += 16; | 1175 | out += 16; |
| 1122 | in += 16; | 1176 | in += 16; |
| 1123 | len -= 16; | 1177 | len -= 16; |
| @@ -1125,16 +1179,23 @@ int CRYPTO_gcm128_decrypt(GCM128_CONTEXT *ctx, | |||
| 1125 | } | 1179 | } |
| 1126 | #else | 1180 | #else |
| 1127 | while (len>=16) { | 1181 | while (len>=16) { |
| 1182 | size_t *out_t=(size_t *)out; | ||
| 1183 | const size_t *in_t=(const size_t *)in; | ||
| 1184 | |||
| 1128 | (*block)(ctx->Yi.c,ctx->EKi.c,key); | 1185 | (*block)(ctx->Yi.c,ctx->EKi.c,key); |
| 1129 | ++ctr; | 1186 | ++ctr; |
| 1130 | if (is_endian.little) | 1187 | if (is_endian.little) |
| 1188 | #ifdef BSWAP4 | ||
| 1189 | ctx->Yi.d[3] = BSWAP4(ctr); | ||
| 1190 | #else | ||
| 1131 | PUTU32(ctx->Yi.c+12,ctr); | 1191 | PUTU32(ctx->Yi.c+12,ctr); |
| 1192 | #endif | ||
| 1132 | else | 1193 | else |
| 1133 | ctx->Yi.d[3] = ctr; | 1194 | ctx->Yi.d[3] = ctr; |
| 1134 | for (i=0; i<16; i+=sizeof(size_t)) { | 1195 | for (i=0; i<16/sizeof(size_t); ++i) { |
| 1135 | size_t c = *(size_t *)(in+i); | 1196 | size_t c = in[i]; |
| 1136 | *(size_t *)(out+i) = c^*(size_t *)(ctx->EKi.c+i); | 1197 | out[i] = c^ctx->EKi.t[i]; |
| 1137 | *(size_t *)(ctx->Xi.c+i) ^= c; | 1198 | ctx->Xi.t[i] ^= c; |
| 1138 | } | 1199 | } |
| 1139 | GCM_MUL(ctx,Xi); | 1200 | GCM_MUL(ctx,Xi); |
| 1140 | out += 16; | 1201 | out += 16; |
| @@ -1146,7 +1207,11 @@ int CRYPTO_gcm128_decrypt(GCM128_CONTEXT *ctx, | |||
| 1146 | (*block)(ctx->Yi.c,ctx->EKi.c,key); | 1207 | (*block)(ctx->Yi.c,ctx->EKi.c,key); |
| 1147 | ++ctr; | 1208 | ++ctr; |
| 1148 | if (is_endian.little) | 1209 | if (is_endian.little) |
| 1210 | #ifdef BSWAP4 | ||
| 1211 | ctx->Yi.d[3] = BSWAP4(ctr); | ||
| 1212 | #else | ||
| 1149 | PUTU32(ctx->Yi.c+12,ctr); | 1213 | PUTU32(ctx->Yi.c+12,ctr); |
| 1214 | #endif | ||
| 1150 | else | 1215 | else |
| 1151 | ctx->Yi.d[3] = ctr; | 1216 | ctx->Yi.d[3] = ctr; |
| 1152 | while (len--) { | 1217 | while (len--) { |
| @@ -1167,7 +1232,11 @@ int CRYPTO_gcm128_decrypt(GCM128_CONTEXT *ctx, | |||
| 1167 | (*block)(ctx->Yi.c,ctx->EKi.c,key); | 1232 | (*block)(ctx->Yi.c,ctx->EKi.c,key); |
| 1168 | ++ctr; | 1233 | ++ctr; |
| 1169 | if (is_endian.little) | 1234 | if (is_endian.little) |
| 1235 | #ifdef BSWAP4 | ||
| 1236 | ctx->Yi.d[3] = BSWAP4(ctr); | ||
| 1237 | #else | ||
| 1170 | PUTU32(ctx->Yi.c+12,ctr); | 1238 | PUTU32(ctx->Yi.c+12,ctr); |
| 1239 | #endif | ||
| 1171 | else | 1240 | else |
| 1172 | ctx->Yi.d[3] = ctr; | 1241 | ctx->Yi.d[3] = ctr; |
| 1173 | } | 1242 | } |
| @@ -1212,7 +1281,11 @@ int CRYPTO_gcm128_encrypt_ctr32(GCM128_CONTEXT *ctx, | |||
| 1212 | } | 1281 | } |
| 1213 | 1282 | ||
| 1214 | if (is_endian.little) | 1283 | if (is_endian.little) |
| 1284 | #ifdef BSWAP4 | ||
| 1285 | ctr = BSWAP4(ctx->Yi.d[3]); | ||
| 1286 | #else | ||
| 1215 | ctr = GETU32(ctx->Yi.c+12); | 1287 | ctr = GETU32(ctx->Yi.c+12); |
| 1288 | #endif | ||
| 1216 | else | 1289 | else |
| 1217 | ctr = ctx->Yi.d[3]; | 1290 | ctr = ctx->Yi.d[3]; |
| 1218 | 1291 | ||
| @@ -1234,7 +1307,11 @@ int CRYPTO_gcm128_encrypt_ctr32(GCM128_CONTEXT *ctx, | |||
| 1234 | (*stream)(in,out,GHASH_CHUNK/16,key,ctx->Yi.c); | 1307 | (*stream)(in,out,GHASH_CHUNK/16,key,ctx->Yi.c); |
| 1235 | ctr += GHASH_CHUNK/16; | 1308 | ctr += GHASH_CHUNK/16; |
| 1236 | if (is_endian.little) | 1309 | if (is_endian.little) |
| 1310 | #ifdef BSWAP4 | ||
| 1311 | ctx->Yi.d[3] = BSWAP4(ctr); | ||
| 1312 | #else | ||
| 1237 | PUTU32(ctx->Yi.c+12,ctr); | 1313 | PUTU32(ctx->Yi.c+12,ctr); |
| 1314 | #endif | ||
| 1238 | else | 1315 | else |
| 1239 | ctx->Yi.d[3] = ctr; | 1316 | ctx->Yi.d[3] = ctr; |
| 1240 | GHASH(ctx,out,GHASH_CHUNK); | 1317 | GHASH(ctx,out,GHASH_CHUNK); |
| @@ -1249,7 +1326,11 @@ int CRYPTO_gcm128_encrypt_ctr32(GCM128_CONTEXT *ctx, | |||
| 1249 | (*stream)(in,out,j,key,ctx->Yi.c); | 1326 | (*stream)(in,out,j,key,ctx->Yi.c); |
| 1250 | ctr += (unsigned int)j; | 1327 | ctr += (unsigned int)j; |
| 1251 | if (is_endian.little) | 1328 | if (is_endian.little) |
| 1329 | #ifdef BSWAP4 | ||
| 1330 | ctx->Yi.d[3] = BSWAP4(ctr); | ||
| 1331 | #else | ||
| 1252 | PUTU32(ctx->Yi.c+12,ctr); | 1332 | PUTU32(ctx->Yi.c+12,ctr); |
| 1333 | #endif | ||
| 1253 | else | 1334 | else |
| 1254 | ctx->Yi.d[3] = ctr; | 1335 | ctx->Yi.d[3] = ctr; |
| 1255 | in += i; | 1336 | in += i; |
| @@ -1269,7 +1350,11 @@ int CRYPTO_gcm128_encrypt_ctr32(GCM128_CONTEXT *ctx, | |||
| 1269 | (*ctx->block)(ctx->Yi.c,ctx->EKi.c,key); | 1350 | (*ctx->block)(ctx->Yi.c,ctx->EKi.c,key); |
| 1270 | ++ctr; | 1351 | ++ctr; |
| 1271 | if (is_endian.little) | 1352 | if (is_endian.little) |
| 1353 | #ifdef BSWAP4 | ||
| 1354 | ctx->Yi.d[3] = BSWAP4(ctr); | ||
| 1355 | #else | ||
| 1272 | PUTU32(ctx->Yi.c+12,ctr); | 1356 | PUTU32(ctx->Yi.c+12,ctr); |
| 1357 | #endif | ||
| 1273 | else | 1358 | else |
| 1274 | ctx->Yi.d[3] = ctr; | 1359 | ctx->Yi.d[3] = ctr; |
| 1275 | while (len--) { | 1360 | while (len--) { |
| @@ -1311,7 +1396,11 @@ int CRYPTO_gcm128_decrypt_ctr32(GCM128_CONTEXT *ctx, | |||
| 1311 | } | 1396 | } |
| 1312 | 1397 | ||
| 1313 | if (is_endian.little) | 1398 | if (is_endian.little) |
| 1399 | #ifdef BSWAP4 | ||
| 1400 | ctr = BSWAP4(ctx->Yi.d[3]); | ||
| 1401 | #else | ||
| 1314 | ctr = GETU32(ctx->Yi.c+12); | 1402 | ctr = GETU32(ctx->Yi.c+12); |
| 1403 | #endif | ||
| 1315 | else | 1404 | else |
| 1316 | ctr = ctx->Yi.d[3]; | 1405 | ctr = ctx->Yi.d[3]; |
| 1317 | 1406 | ||
| @@ -1336,7 +1425,11 @@ int CRYPTO_gcm128_decrypt_ctr32(GCM128_CONTEXT *ctx, | |||
| 1336 | (*stream)(in,out,GHASH_CHUNK/16,key,ctx->Yi.c); | 1425 | (*stream)(in,out,GHASH_CHUNK/16,key,ctx->Yi.c); |
| 1337 | ctr += GHASH_CHUNK/16; | 1426 | ctr += GHASH_CHUNK/16; |
| 1338 | if (is_endian.little) | 1427 | if (is_endian.little) |
| 1428 | #ifdef BSWAP4 | ||
| 1429 | ctx->Yi.d[3] = BSWAP4(ctr); | ||
| 1430 | #else | ||
| 1339 | PUTU32(ctx->Yi.c+12,ctr); | 1431 | PUTU32(ctx->Yi.c+12,ctr); |
| 1432 | #endif | ||
| 1340 | else | 1433 | else |
| 1341 | ctx->Yi.d[3] = ctr; | 1434 | ctx->Yi.d[3] = ctr; |
| 1342 | out += GHASH_CHUNK; | 1435 | out += GHASH_CHUNK; |
| @@ -1362,7 +1455,11 @@ int CRYPTO_gcm128_decrypt_ctr32(GCM128_CONTEXT *ctx, | |||
| 1362 | (*stream)(in,out,j,key,ctx->Yi.c); | 1455 | (*stream)(in,out,j,key,ctx->Yi.c); |
| 1363 | ctr += (unsigned int)j; | 1456 | ctr += (unsigned int)j; |
| 1364 | if (is_endian.little) | 1457 | if (is_endian.little) |
| 1458 | #ifdef BSWAP4 | ||
| 1459 | ctx->Yi.d[3] = BSWAP4(ctr); | ||
| 1460 | #else | ||
| 1365 | PUTU32(ctx->Yi.c+12,ctr); | 1461 | PUTU32(ctx->Yi.c+12,ctr); |
| 1462 | #endif | ||
| 1366 | else | 1463 | else |
| 1367 | ctx->Yi.d[3] = ctr; | 1464 | ctx->Yi.d[3] = ctr; |
| 1368 | out += i; | 1465 | out += i; |
| @@ -1373,7 +1470,11 @@ int CRYPTO_gcm128_decrypt_ctr32(GCM128_CONTEXT *ctx, | |||
| 1373 | (*ctx->block)(ctx->Yi.c,ctx->EKi.c,key); | 1470 | (*ctx->block)(ctx->Yi.c,ctx->EKi.c,key); |
| 1374 | ++ctr; | 1471 | ++ctr; |
| 1375 | if (is_endian.little) | 1472 | if (is_endian.little) |
| 1473 | #ifdef BSWAP4 | ||
| 1474 | ctx->Yi.d[3] = BSWAP4(ctr); | ||
| 1475 | #else | ||
| 1376 | PUTU32(ctx->Yi.c+12,ctr); | 1476 | PUTU32(ctx->Yi.c+12,ctr); |
| 1477 | #endif | ||
| 1377 | else | 1478 | else |
| 1378 | ctx->Yi.d[3] = ctr; | 1479 | ctx->Yi.d[3] = ctr; |
| 1379 | while (len--) { | 1480 | while (len--) { |
| @@ -1398,7 +1499,7 @@ int CRYPTO_gcm128_finish(GCM128_CONTEXT *ctx,const unsigned char *tag, | |||
| 1398 | void (*gcm_gmult_p)(u64 Xi[2],const u128 Htable[16]) = ctx->gmult; | 1499 | void (*gcm_gmult_p)(u64 Xi[2],const u128 Htable[16]) = ctx->gmult; |
| 1399 | #endif | 1500 | #endif |
| 1400 | 1501 | ||
| 1401 | if (ctx->mres) | 1502 | if (ctx->mres || ctx->ares) |
| 1402 | GCM_MUL(ctx,Xi); | 1503 | GCM_MUL(ctx,Xi); |
| 1403 | 1504 | ||
| 1404 | if (is_endian.little) { | 1505 | if (is_endian.little) { |
| @@ -1669,6 +1770,46 @@ static const u8 IV18[]={0x93,0x13,0x22,0x5d,0xf8,0x84,0x06,0xe5,0x55,0x90,0x9c,0 | |||
| 1669 | 0xa2,0x41,0x89,0x97,0x20,0x0e,0xf8,0x2e,0x44,0xae,0x7e,0x3f}, | 1770 | 0xa2,0x41,0x89,0x97,0x20,0x0e,0xf8,0x2e,0x44,0xae,0x7e,0x3f}, |
| 1670 | T18[]= {0xa4,0x4a,0x82,0x66,0xee,0x1c,0x8e,0xb0,0xc8,0xb5,0xd4,0xcf,0x5a,0xe9,0xf1,0x9a}; | 1771 | T18[]= {0xa4,0x4a,0x82,0x66,0xee,0x1c,0x8e,0xb0,0xc8,0xb5,0xd4,0xcf,0x5a,0xe9,0xf1,0x9a}; |
| 1671 | 1772 | ||
| 1773 | /* Test Case 19 */ | ||
| 1774 | #define K19 K1 | ||
| 1775 | #define P19 P1 | ||
| 1776 | #define IV19 IV1 | ||
| 1777 | #define C19 C1 | ||
| 1778 | static const u8 A19[]= {0xd9,0x31,0x32,0x25,0xf8,0x84,0x06,0xe5,0xa5,0x59,0x09,0xc5,0xaf,0xf5,0x26,0x9a, | ||
| 1779 | 0x86,0xa7,0xa9,0x53,0x15,0x34,0xf7,0xda,0x2e,0x4c,0x30,0x3d,0x8a,0x31,0x8a,0x72, | ||
| 1780 | 0x1c,0x3c,0x0c,0x95,0x95,0x68,0x09,0x53,0x2f,0xcf,0x0e,0x24,0x49,0xa6,0xb5,0x25, | ||
| 1781 | 0xb1,0x6a,0xed,0xf5,0xaa,0x0d,0xe6,0x57,0xba,0x63,0x7b,0x39,0x1a,0xaf,0xd2,0x55, | ||
| 1782 | 0x52,0x2d,0xc1,0xf0,0x99,0x56,0x7d,0x07,0xf4,0x7f,0x37,0xa3,0x2a,0x84,0x42,0x7d, | ||
| 1783 | 0x64,0x3a,0x8c,0xdc,0xbf,0xe5,0xc0,0xc9,0x75,0x98,0xa2,0xbd,0x25,0x55,0xd1,0xaa, | ||
| 1784 | 0x8c,0xb0,0x8e,0x48,0x59,0x0d,0xbb,0x3d,0xa7,0xb0,0x8b,0x10,0x56,0x82,0x88,0x38, | ||
| 1785 | 0xc5,0xf6,0x1e,0x63,0x93,0xba,0x7a,0x0a,0xbc,0xc9,0xf6,0x62,0x89,0x80,0x15,0xad}, | ||
| 1786 | T19[]= {0x5f,0xea,0x79,0x3a,0x2d,0x6f,0x97,0x4d,0x37,0xe6,0x8e,0x0c,0xb8,0xff,0x94,0x92}; | ||
| 1787 | |||
| 1788 | /* Test Case 20 */ | ||
| 1789 | #define K20 K1 | ||
| 1790 | #define A20 A1 | ||
| 1791 | static const u8 IV20[64]={0xff,0xff,0xff,0xff}, /* this results in 0xff in counter LSB */ | ||
| 1792 | P20[288], | ||
| 1793 | C20[]= {0x56,0xb3,0x37,0x3c,0xa9,0xef,0x6e,0x4a,0x2b,0x64,0xfe,0x1e,0x9a,0x17,0xb6,0x14, | ||
| 1794 | 0x25,0xf1,0x0d,0x47,0xa7,0x5a,0x5f,0xce,0x13,0xef,0xc6,0xbc,0x78,0x4a,0xf2,0x4f, | ||
| 1795 | 0x41,0x41,0xbd,0xd4,0x8c,0xf7,0xc7,0x70,0x88,0x7a,0xfd,0x57,0x3c,0xca,0x54,0x18, | ||
| 1796 | 0xa9,0xae,0xff,0xcd,0x7c,0x5c,0xed,0xdf,0xc6,0xa7,0x83,0x97,0xb9,0xa8,0x5b,0x49, | ||
| 1797 | 0x9d,0xa5,0x58,0x25,0x72,0x67,0xca,0xab,0x2a,0xd0,0xb2,0x3c,0xa4,0x76,0xa5,0x3c, | ||
| 1798 | 0xb1,0x7f,0xb4,0x1c,0x4b,0x8b,0x47,0x5c,0xb4,0xf3,0xf7,0x16,0x50,0x94,0xc2,0x29, | ||
| 1799 | 0xc9,0xe8,0xc4,0xdc,0x0a,0x2a,0x5f,0xf1,0x90,0x3e,0x50,0x15,0x11,0x22,0x13,0x76, | ||
| 1800 | 0xa1,0xcd,0xb8,0x36,0x4c,0x50,0x61,0xa2,0x0c,0xae,0x74,0xbc,0x4a,0xcd,0x76,0xce, | ||
| 1801 | 0xb0,0xab,0xc9,0xfd,0x32,0x17,0xef,0x9f,0x8c,0x90,0xbe,0x40,0x2d,0xdf,0x6d,0x86, | ||
| 1802 | 0x97,0xf4,0xf8,0x80,0xdf,0xf1,0x5b,0xfb,0x7a,0x6b,0x28,0x24,0x1e,0xc8,0xfe,0x18, | ||
| 1803 | 0x3c,0x2d,0x59,0xe3,0xf9,0xdf,0xff,0x65,0x3c,0x71,0x26,0xf0,0xac,0xb9,0xe6,0x42, | ||
| 1804 | 0x11,0xf4,0x2b,0xae,0x12,0xaf,0x46,0x2b,0x10,0x70,0xbe,0xf1,0xab,0x5e,0x36,0x06, | ||
| 1805 | 0x87,0x2c,0xa1,0x0d,0xee,0x15,0xb3,0x24,0x9b,0x1a,0x1b,0x95,0x8f,0x23,0x13,0x4c, | ||
| 1806 | 0x4b,0xcc,0xb7,0xd0,0x32,0x00,0xbc,0xe4,0x20,0xa2,0xf8,0xeb,0x66,0xdc,0xf3,0x64, | ||
| 1807 | 0x4d,0x14,0x23,0xc1,0xb5,0x69,0x90,0x03,0xc1,0x3e,0xce,0xf4,0xbf,0x38,0xa3,0xb6, | ||
| 1808 | 0x0e,0xed,0xc3,0x40,0x33,0xba,0xc1,0x90,0x27,0x83,0xdc,0x6d,0x89,0xe2,0xe7,0x74, | ||
| 1809 | 0x18,0x8a,0x43,0x9c,0x7e,0xbc,0xc0,0x67,0x2d,0xbd,0xa4,0xdd,0xcf,0xb2,0x79,0x46, | ||
| 1810 | 0x13,0xb0,0xbe,0x41,0x31,0x5e,0xf7,0x78,0x70,0x8a,0x70,0xee,0x7d,0x75,0x16,0x5c}, | ||
| 1811 | T20[]= {0x8b,0x30,0x7f,0x6b,0x33,0x28,0x6d,0x0a,0xb0,0x26,0xa9,0xed,0x3f,0xe1,0xe8,0x5f}; | ||
| 1812 | |||
| 1672 | #define TEST_CASE(n) do { \ | 1813 | #define TEST_CASE(n) do { \ |
| 1673 | u8 out[sizeof(P##n)]; \ | 1814 | u8 out[sizeof(P##n)]; \ |
| 1674 | AES_set_encrypt_key(K##n,sizeof(K##n)*8,&key); \ | 1815 | AES_set_encrypt_key(K##n,sizeof(K##n)*8,&key); \ |
| @@ -1713,6 +1854,8 @@ int main() | |||
| 1713 | TEST_CASE(16); | 1854 | TEST_CASE(16); |
| 1714 | TEST_CASE(17); | 1855 | TEST_CASE(17); |
| 1715 | TEST_CASE(18); | 1856 | TEST_CASE(18); |
| 1857 | TEST_CASE(19); | ||
| 1858 | TEST_CASE(20); | ||
| 1716 | 1859 | ||
| 1717 | #ifdef OPENSSL_CPUID_OBJ | 1860 | #ifdef OPENSSL_CPUID_OBJ |
| 1718 | { | 1861 | { |
| @@ -1743,11 +1886,16 @@ int main() | |||
| 1743 | ctr_t/(double)sizeof(buf), | 1886 | ctr_t/(double)sizeof(buf), |
| 1744 | (gcm_t-ctr_t)/(double)sizeof(buf)); | 1887 | (gcm_t-ctr_t)/(double)sizeof(buf)); |
| 1745 | #ifdef GHASH | 1888 | #ifdef GHASH |
| 1746 | GHASH(&ctx,buf.c,sizeof(buf)); | 1889 | { |
| 1890 | void (*gcm_ghash_p)(u64 Xi[2],const u128 Htable[16], | ||
| 1891 | const u8 *inp,size_t len) = ctx.ghash; | ||
| 1892 | |||
| 1893 | GHASH((&ctx),buf.c,sizeof(buf)); | ||
| 1747 | start = OPENSSL_rdtsc(); | 1894 | start = OPENSSL_rdtsc(); |
| 1748 | for (i=0;i<100;++i) GHASH(&ctx,buf.c,sizeof(buf)); | 1895 | for (i=0;i<100;++i) GHASH((&ctx),buf.c,sizeof(buf)); |
| 1749 | gcm_t = OPENSSL_rdtsc() - start; | 1896 | gcm_t = OPENSSL_rdtsc() - start; |
| 1750 | printf("%.2f\n",gcm_t/(double)sizeof(buf)/(double)i); | 1897 | printf("%.2f\n",gcm_t/(double)sizeof(buf)/(double)i); |
| 1898 | } | ||
| 1751 | #endif | 1899 | #endif |
| 1752 | } | 1900 | } |
| 1753 | #endif | 1901 | #endif |
diff --git a/src/lib/libcrypto/modes/modes_lcl.h b/src/lib/libcrypto/modes/modes_lcl.h index b6dc3c336f..9d83e12844 100644 --- a/src/lib/libcrypto/modes/modes_lcl.h +++ b/src/lib/libcrypto/modes/modes_lcl.h | |||
| @@ -29,10 +29,7 @@ typedef unsigned char u8; | |||
| 29 | #if defined(__i386) || defined(__i386__) || \ | 29 | #if defined(__i386) || defined(__i386__) || \ |
| 30 | defined(__x86_64) || defined(__x86_64__) || \ | 30 | defined(__x86_64) || defined(__x86_64__) || \ |
| 31 | defined(_M_IX86) || defined(_M_AMD64) || defined(_M_X64) || \ | 31 | defined(_M_IX86) || defined(_M_AMD64) || defined(_M_X64) || \ |
| 32 | defined(__s390__) || defined(__s390x__) || \ | 32 | defined(__s390__) || defined(__s390x__) |
| 33 | ( (defined(__arm__) || defined(__arm)) && \ | ||
| 34 | (defined(__ARM_ARCH_7__) || defined(__ARM_ARCH_7A__) || \ | ||
| 35 | defined(__ARM_ARCH_7R__) || defined(__ARM_ARCH_7M__)) ) | ||
| 36 | # undef STRICT_ALIGNMENT | 33 | # undef STRICT_ALIGNMENT |
| 37 | #endif | 34 | #endif |
| 38 | 35 | ||
| @@ -101,8 +98,8 @@ typedef struct { u64 hi,lo; } u128; | |||
| 101 | 98 | ||
| 102 | struct gcm128_context { | 99 | struct gcm128_context { |
| 103 | /* Following 6 names follow names in GCM specification */ | 100 | /* Following 6 names follow names in GCM specification */ |
| 104 | union { u64 u[2]; u32 d[4]; u8 c[16]; } Yi,EKi,EK0,len, | 101 | union { u64 u[2]; u32 d[4]; u8 c[16]; size_t t[16/sizeof(size_t)]; } |
| 105 | Xi,H; | 102 | Yi,EKi,EK0,len,Xi,H; |
| 106 | /* Relative position of Xi, H and pre-computed Htable is used | 103 | /* Relative position of Xi, H and pre-computed Htable is used |
| 107 | * in some assembler modules, i.e. don't change the order! */ | 104 | * in some assembler modules, i.e. don't change the order! */ |
| 108 | #if TABLE_BITS==8 | 105 | #if TABLE_BITS==8 |
