summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authortb <>2023-12-18 21:15:00 +0000
committertb <>2023-12-18 21:15:00 +0000
commitb94f27252e5d924442c45f66956492249d0a6d26 (patch)
tree2a7878f701742ce293d9fe98104bd41cfcd38715
parent0e07bfe346298d2a344167ad16d300f830c4b9ea (diff)
downloadopenbsd-b94f27252e5d924442c45f66956492249d0a6d26.tar.gz
openbsd-b94f27252e5d924442c45f66956492249d0a6d26.tar.bz2
openbsd-b94f27252e5d924442c45f66956492249d0a6d26.zip
cmac: use block_size rather than bl
This is purely mechanical apart from a single line wrap.
-rw-r--r--src/lib/libcrypto/cmac/cmac.c71
1 files changed, 36 insertions, 35 deletions
diff --git a/src/lib/libcrypto/cmac/cmac.c b/src/lib/libcrypto/cmac/cmac.c
index 0df40277bc..9fe907609a 100644
--- a/src/lib/libcrypto/cmac/cmac.c
+++ b/src/lib/libcrypto/cmac/cmac.c
@@ -1,4 +1,4 @@
1/* $OpenBSD: cmac.c,v 1.17 2023/12/15 13:45:05 tb Exp $ */ 1/* $OpenBSD: cmac.c,v 1.18 2023/12/18 21:15:00 tb Exp $ */
2/* Written by Dr Stephen N Henson (steve@openssl.org) for the OpenSSL 2/* Written by Dr Stephen N Henson (steve@openssl.org) for the OpenSSL
3 * project. 3 * project.
4 */ 4 */
@@ -90,21 +90,21 @@ struct CMAC_CTX_st {
90 * and R64 = (1 << 64) | 0x1b for the only supported block sizes 128 and 64. 90 * and R64 = (1 << 64) | 0x1b for the only supported block sizes 128 and 64.
91 */ 91 */
92static void 92static void
93make_kn(unsigned char *kn, const unsigned char *l, int bl) 93make_kn(unsigned char *kn, const unsigned char *l, int block_size)
94{ 94{
95 unsigned char mask, Rb; 95 unsigned char mask, Rb;
96 int i; 96 int i;
97 97
98 /* Choose Rb according to the block size in bytes. */ 98 /* Choose Rb according to the block size in bytes. */
99 Rb = bl == 16 ? 0x87 : 0x1b; 99 Rb = block_size == 16 ? 0x87 : 0x1b;
100 100
101 /* Compute l << 1 up to last byte. */ 101 /* Compute l << 1 up to last byte. */
102 for (i = 0; i < bl - 1; i++) 102 for (i = 0; i < block_size - 1; i++)
103 kn[i] = (l[i] << 1) | (l[i + 1] >> 7); 103 kn[i] = (l[i] << 1) | (l[i + 1] >> 7);
104 104
105 /* Only xor with Rb if the MSB is one. */ 105 /* Only xor with Rb if the MSB is one. */
106 mask = 0 - (l[0] >> 7); 106 mask = 0 - (l[0] >> 7);
107 kn[bl - 1] = (l[bl - 1] << 1) ^ (Rb & mask); 107 kn[block_size - 1] = (l[block_size - 1] << 1) ^ (Rb & mask);
108} 108}
109 109
110CMAC_CTX * 110CMAC_CTX *
@@ -154,17 +154,17 @@ LCRYPTO_ALIAS(CMAC_CTX_free);
154int 154int
155CMAC_CTX_copy(CMAC_CTX *out, const CMAC_CTX *in) 155CMAC_CTX_copy(CMAC_CTX *out, const CMAC_CTX *in)
156{ 156{
157 int bl; 157 int block_size;
158 158
159 if (in->nlast_block == -1) 159 if (in->nlast_block == -1)
160 return 0; 160 return 0;
161 if (!EVP_CIPHER_CTX_copy(&out->cctx, &in->cctx)) 161 if (!EVP_CIPHER_CTX_copy(&out->cctx, &in->cctx))
162 return 0; 162 return 0;
163 bl = EVP_CIPHER_CTX_block_size(&in->cctx); 163 block_size = EVP_CIPHER_CTX_block_size(&in->cctx);
164 memcpy(out->k1, in->k1, bl); 164 memcpy(out->k1, in->k1, block_size);
165 memcpy(out->k2, in->k2, bl); 165 memcpy(out->k2, in->k2, block_size);
166 memcpy(out->tbl, in->tbl, bl); 166 memcpy(out->tbl, in->tbl, block_size);
167 memcpy(out->last_block, in->last_block, bl); 167 memcpy(out->last_block, in->last_block, block_size);
168 out->nlast_block = in->nlast_block; 168 out->nlast_block = in->nlast_block;
169 return 1; 169 return 1;
170} 170}
@@ -175,7 +175,7 @@ CMAC_Init(CMAC_CTX *ctx, const void *key, size_t keylen,
175 const EVP_CIPHER *cipher, ENGINE *impl) 175 const EVP_CIPHER *cipher, ENGINE *impl)
176{ 176{
177 static unsigned char zero_iv[EVP_MAX_BLOCK_LENGTH]; 177 static unsigned char zero_iv[EVP_MAX_BLOCK_LENGTH];
178 int bl; 178 int block_size;
179 179
180 /* All zeros means restart */ 180 /* All zeros means restart */
181 if (key == NULL && cipher == NULL && keylen == 0) { 181 if (key == NULL && cipher == NULL && keylen == 0) {
@@ -208,8 +208,8 @@ CMAC_Init(CMAC_CTX *ctx, const void *key, size_t keylen,
208 return 0; 208 return 0;
209 209
210 /* make_kn() only supports block sizes of 8 and 16 bytes. */ 210 /* make_kn() only supports block sizes of 8 and 16 bytes. */
211 bl = EVP_CIPHER_CTX_block_size(&ctx->cctx); 211 block_size = EVP_CIPHER_CTX_block_size(&ctx->cctx);
212 if (bl != 8 && bl != 16) 212 if (block_size != 8 && block_size != 16)
213 return 0; 213 return 0;
214 214
215 /* 215 /*
@@ -220,13 +220,13 @@ CMAC_Init(CMAC_CTX *ctx, const void *key, size_t keylen,
220 return 0; 220 return 0;
221 if (!EVP_EncryptInit_ex(&ctx->cctx, NULL, NULL, key, zero_iv)) 221 if (!EVP_EncryptInit_ex(&ctx->cctx, NULL, NULL, key, zero_iv))
222 return 0; 222 return 0;
223 if (!EVP_Cipher(&ctx->cctx, ctx->tbl, zero_iv, bl)) 223 if (!EVP_Cipher(&ctx->cctx, ctx->tbl, zero_iv, block_size))
224 return 0; 224 return 0;
225 225
226 /* Section 6.1, step 2: compute k1 from intermediate secret. */ 226 /* Section 6.1, step 2: compute k1 from intermediate secret. */
227 make_kn(ctx->k1, ctx->tbl, bl); 227 make_kn(ctx->k1, ctx->tbl, block_size);
228 /* Section 6.1, step 3: compute k2 from k1. */ 228 /* Section 6.1, step 3: compute k2 from k1. */
229 make_kn(ctx->k2, ctx->k1, bl); 229 make_kn(ctx->k2, ctx->k1, block_size);
230 230
231 /* Destroy intermediate secret and reset last block count. */ 231 /* Destroy intermediate secret and reset last block count. */
232 explicit_bzero(ctx->tbl, sizeof(ctx->tbl)); 232 explicit_bzero(ctx->tbl, sizeof(ctx->tbl));
@@ -245,18 +245,18 @@ int
245CMAC_Update(CMAC_CTX *ctx, const void *in, size_t dlen) 245CMAC_Update(CMAC_CTX *ctx, const void *in, size_t dlen)
246{ 246{
247 const unsigned char *data = in; 247 const unsigned char *data = in;
248 size_t bl; 248 size_t block_size;
249 249
250 if (ctx->nlast_block == -1) 250 if (ctx->nlast_block == -1)
251 return 0; 251 return 0;
252 if (dlen == 0) 252 if (dlen == 0)
253 return 1; 253 return 1;
254 bl = EVP_CIPHER_CTX_block_size(&ctx->cctx); 254 block_size = EVP_CIPHER_CTX_block_size(&ctx->cctx);
255 /* Copy into partial block if we need to */ 255 /* Copy into partial block if we need to */
256 if (ctx->nlast_block > 0) { 256 if (ctx->nlast_block > 0) {
257 size_t nleft; 257 size_t nleft;
258 258
259 nleft = bl - ctx->nlast_block; 259 nleft = block_size - ctx->nlast_block;
260 if (dlen < nleft) 260 if (dlen < nleft)
261 nleft = dlen; 261 nleft = dlen;
262 memcpy(ctx->last_block + ctx->nlast_block, data, nleft); 262 memcpy(ctx->last_block + ctx->nlast_block, data, nleft);
@@ -267,15 +267,16 @@ CMAC_Update(CMAC_CTX *ctx, const void *in, size_t dlen)
267 return 1; 267 return 1;
268 data += nleft; 268 data += nleft;
269 /* Else not final block so encrypt it */ 269 /* Else not final block so encrypt it */
270 if (!EVP_Cipher(&ctx->cctx, ctx->tbl, ctx->last_block, bl)) 270 if (!EVP_Cipher(&ctx->cctx, ctx->tbl, ctx->last_block,
271 block_size))
271 return 0; 272 return 0;
272 } 273 }
273 /* Encrypt all but one of the complete blocks left */ 274 /* Encrypt all but one of the complete blocks left */
274 while (dlen > bl) { 275 while (dlen > block_size) {
275 if (!EVP_Cipher(&ctx->cctx, ctx->tbl, data, bl)) 276 if (!EVP_Cipher(&ctx->cctx, ctx->tbl, data, block_size))
276 return 0; 277 return 0;
277 dlen -= bl; 278 dlen -= block_size;
278 data += bl; 279 data += block_size;
279 } 280 }
280 /* Copy any data left to last block buffer */ 281 /* Copy any data left to last block buffer */
281 memcpy(ctx->last_block, data, dlen); 282 memcpy(ctx->last_block, data, dlen);
@@ -287,28 +288,28 @@ LCRYPTO_ALIAS(CMAC_Update);
287int 288int
288CMAC_Final(CMAC_CTX *ctx, unsigned char *out, size_t *poutlen) 289CMAC_Final(CMAC_CTX *ctx, unsigned char *out, size_t *poutlen)
289{ 290{
290 int i, bl, lb; 291 int i, block_size, lb;
291 292
292 if (ctx->nlast_block == -1) 293 if (ctx->nlast_block == -1)
293 return 0; 294 return 0;
294 bl = EVP_CIPHER_CTX_block_size(&ctx->cctx); 295 block_size = EVP_CIPHER_CTX_block_size(&ctx->cctx);
295 *poutlen = (size_t)bl; 296 *poutlen = (size_t)block_size;
296 if (!out) 297 if (!out)
297 return 1; 298 return 1;
298 lb = ctx->nlast_block; 299 lb = ctx->nlast_block;
299 /* Is last block complete? */ 300 /* Is last block complete? */
300 if (lb == bl) { 301 if (lb == block_size) {
301 for (i = 0; i < bl; i++) 302 for (i = 0; i < block_size; i++)
302 out[i] = ctx->last_block[i] ^ ctx->k1[i]; 303 out[i] = ctx->last_block[i] ^ ctx->k1[i];
303 } else { 304 } else {
304 ctx->last_block[lb] = 0x80; 305 ctx->last_block[lb] = 0x80;
305 if (bl - lb > 1) 306 if (block_size - lb > 1)
306 memset(ctx->last_block + lb + 1, 0, bl - lb - 1); 307 memset(ctx->last_block + lb + 1, 0, block_size - lb - 1);
307 for (i = 0; i < bl; i++) 308 for (i = 0; i < block_size; i++)
308 out[i] = ctx->last_block[i] ^ ctx->k2[i]; 309 out[i] = ctx->last_block[i] ^ ctx->k2[i];
309 } 310 }
310 if (!EVP_Cipher(&ctx->cctx, out, out, bl)) { 311 if (!EVP_Cipher(&ctx->cctx, out, out, block_size)) {
311 explicit_bzero(out, bl); 312 explicit_bzero(out, block_size);
312 return 0; 313 return 0;
313 } 314 }
314 return 1; 315 return 1;