diff options
Diffstat (limited to 'src/lib/libcrypto/evp/evp_enc.c')
-rw-r--r-- | src/lib/libcrypto/evp/evp_enc.c | 672 |
1 files changed, 333 insertions, 339 deletions
diff --git a/src/lib/libcrypto/evp/evp_enc.c b/src/lib/libcrypto/evp/evp_enc.c index e8ca502633..0c3e8fcc39 100644 --- a/src/lib/libcrypto/evp/evp_enc.c +++ b/src/lib/libcrypto/evp/evp_enc.c | |||
@@ -5,21 +5,21 @@ | |||
5 | * This package is an SSL implementation written | 5 | * This package is an SSL implementation written |
6 | * by Eric Young (eay@cryptsoft.com). | 6 | * by Eric Young (eay@cryptsoft.com). |
7 | * The implementation was written so as to conform with Netscapes SSL. | 7 | * The implementation was written so as to conform with Netscapes SSL. |
8 | * | 8 | * |
9 | * This library is free for commercial and non-commercial use as long as | 9 | * This library is free for commercial and non-commercial use as long as |
10 | * the following conditions are aheared to. The following conditions | 10 | * the following conditions are aheared to. The following conditions |
11 | * apply to all code found in this distribution, be it the RC4, RSA, | 11 | * apply to all code found in this distribution, be it the RC4, RSA, |
12 | * lhash, DES, etc., code; not just the SSL code. The SSL documentation | 12 | * lhash, DES, etc., code; not just the SSL code. The SSL documentation |
13 | * included with this distribution is covered by the same copyright terms | 13 | * included with this distribution is covered by the same copyright terms |
14 | * except that the holder is Tim Hudson (tjh@cryptsoft.com). | 14 | * except that the holder is Tim Hudson (tjh@cryptsoft.com). |
15 | * | 15 | * |
16 | * Copyright remains Eric Young's, and as such any Copyright notices in | 16 | * Copyright remains Eric Young's, and as such any Copyright notices in |
17 | * the code are not to be removed. | 17 | * the code are not to be removed. |
18 | * If this package is used in a product, Eric Young should be given attribution | 18 | * If this package is used in a product, Eric Young should be given attribution |
19 | * as the author of the parts of the library used. | 19 | * as the author of the parts of the library used. |
20 | * This can be in the form of a textual message at program startup or | 20 | * This can be in the form of a textual message at program startup or |
21 | * in documentation (online or textual) provided with the package. | 21 | * in documentation (online or textual) provided with the package. |
22 | * | 22 | * |
23 | * Redistribution and use in source and binary forms, with or without | 23 | * Redistribution and use in source and binary forms, with or without |
24 | * modification, are permitted provided that the following conditions | 24 | * modification, are permitted provided that the following conditions |
25 | * are met: | 25 | * are met: |
@@ -34,10 +34,10 @@ | |||
34 | * Eric Young (eay@cryptsoft.com)" | 34 | * Eric Young (eay@cryptsoft.com)" |
35 | * The word 'cryptographic' can be left out if the rouines from the library | 35 | * The word 'cryptographic' can be left out if the rouines from the library |
36 | * being used are not cryptographic related :-). | 36 | * being used are not cryptographic related :-). |
37 | * 4. If you include any Windows specific code (or a derivative thereof) from | 37 | * 4. If you include any Windows specific code (or a derivative thereof) from |
38 | * the apps directory (application code) you must include an acknowledgement: | 38 | * the apps directory (application code) you must include an acknowledgement: |
39 | * "This product includes software written by Tim Hudson (tjh@cryptsoft.com)" | 39 | * "This product includes software written by Tim Hudson (tjh@cryptsoft.com)" |
40 | * | 40 | * |
41 | * THIS SOFTWARE IS PROVIDED BY ERIC YOUNG ``AS IS'' AND | 41 | * THIS SOFTWARE IS PROVIDED BY ERIC YOUNG ``AS IS'' AND |
42 | * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE | 42 | * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE |
43 | * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE | 43 | * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE |
@@ -49,7 +49,7 @@ | |||
49 | * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY | 49 | * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY |
50 | * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF | 50 | * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF |
51 | * SUCH DAMAGE. | 51 | * SUCH DAMAGE. |
52 | * | 52 | * |
53 | * The licence and distribution terms for any publically available version or | 53 | * The licence and distribution terms for any publically available version or |
54 | * derivative of this code cannot be changed. i.e. this code cannot simply be | 54 | * derivative of this code cannot be changed. i.e. this code cannot simply be |
55 | * copied and put under another distribution licence | 55 | * copied and put under another distribution licence |
@@ -68,498 +68,486 @@ | |||
68 | 68 | ||
69 | #define M_do_cipher(ctx, out, in, inl) ctx->cipher->do_cipher(ctx, out, in, inl) | 69 | #define M_do_cipher(ctx, out, in, inl) ctx->cipher->do_cipher(ctx, out, in, inl) |
70 | 70 | ||
71 | const char EVP_version[]="EVP" OPENSSL_VERSION_PTEXT; | 71 | const char EVP_version[] = "EVP" OPENSSL_VERSION_PTEXT; |
72 | 72 | ||
73 | void EVP_CIPHER_CTX_init(EVP_CIPHER_CTX *ctx) | 73 | void |
74 | { | 74 | EVP_CIPHER_CTX_init(EVP_CIPHER_CTX *ctx) |
75 | memset(ctx,0,sizeof(EVP_CIPHER_CTX)); | 75 | { |
76 | memset(ctx, 0, sizeof(EVP_CIPHER_CTX)); | ||
76 | /* ctx->cipher=NULL; */ | 77 | /* ctx->cipher=NULL; */ |
77 | } | 78 | } |
78 | 79 | ||
79 | EVP_CIPHER_CTX *EVP_CIPHER_CTX_new(void) | 80 | EVP_CIPHER_CTX * |
80 | { | 81 | EVP_CIPHER_CTX_new(void) |
81 | EVP_CIPHER_CTX *ctx=malloc(sizeof *ctx); | 82 | { |
83 | EVP_CIPHER_CTX *ctx = malloc(sizeof *ctx); | ||
82 | if (ctx) | 84 | if (ctx) |
83 | EVP_CIPHER_CTX_init(ctx); | 85 | EVP_CIPHER_CTX_init(ctx); |
84 | return ctx; | 86 | return ctx; |
85 | } | 87 | } |
86 | 88 | ||
87 | int EVP_CipherInit(EVP_CIPHER_CTX *ctx, const EVP_CIPHER *cipher, | 89 | int |
88 | const unsigned char *key, const unsigned char *iv, int enc) | 90 | EVP_CipherInit(EVP_CIPHER_CTX *ctx, const EVP_CIPHER *cipher, |
89 | { | 91 | const unsigned char *key, const unsigned char *iv, int enc) |
92 | { | ||
90 | if (cipher) | 93 | if (cipher) |
91 | EVP_CIPHER_CTX_init(ctx); | 94 | EVP_CIPHER_CTX_init(ctx); |
92 | return EVP_CipherInit_ex(ctx,cipher,NULL,key,iv,enc); | 95 | return EVP_CipherInit_ex(ctx, cipher, NULL, key, iv, enc); |
93 | } | 96 | } |
94 | 97 | ||
95 | int EVP_CipherInit_ex(EVP_CIPHER_CTX *ctx, const EVP_CIPHER *cipher, ENGINE *impl, | 98 | int |
96 | const unsigned char *key, const unsigned char *iv, int enc) | 99 | EVP_CipherInit_ex(EVP_CIPHER_CTX *ctx, const EVP_CIPHER *cipher, ENGINE *impl, |
97 | { | 100 | const unsigned char *key, const unsigned char *iv, int enc) |
101 | { | ||
98 | if (enc == -1) | 102 | if (enc == -1) |
99 | enc = ctx->encrypt; | 103 | enc = ctx->encrypt; |
100 | else | 104 | else { |
101 | { | ||
102 | if (enc) | 105 | if (enc) |
103 | enc = 1; | 106 | enc = 1; |
104 | ctx->encrypt = enc; | 107 | ctx->encrypt = enc; |
105 | } | 108 | } |
106 | #ifndef OPENSSL_NO_ENGINE | 109 | #ifndef OPENSSL_NO_ENGINE |
107 | /* Whether it's nice or not, "Inits" can be used on "Final"'d contexts | 110 | /* Whether it's nice or not, "Inits" can be used on "Final"'d contexts |
108 | * so this context may already have an ENGINE! Try to avoid releasing | 111 | * so this context may already have an ENGINE! Try to avoid releasing |
109 | * the previous handle, re-querying for an ENGINE, and having a | 112 | * the previous handle, re-querying for an ENGINE, and having a |
110 | * reinitialisation, when it may all be unecessary. */ | 113 | * reinitialisation, when it may all be unecessary. */ |
111 | if (ctx->engine && ctx->cipher && (!cipher || | 114 | if (ctx->engine && ctx->cipher && |
112 | (cipher && (cipher->nid == ctx->cipher->nid)))) | 115 | (!cipher || (cipher && (cipher->nid == ctx->cipher->nid)))) |
113 | goto skip_to_init; | 116 | goto skip_to_init; |
114 | #endif | 117 | #endif |
115 | if (cipher) | 118 | if (cipher) { |
116 | { | ||
117 | /* Ensure a context left lying around from last time is cleared | 119 | /* Ensure a context left lying around from last time is cleared |
118 | * (the previous check attempted to avoid this if the same | 120 | * (the previous check attempted to avoid this if the same |
119 | * ENGINE and EVP_CIPHER could be used). */ | 121 | * ENGINE and EVP_CIPHER could be used). */ |
120 | if (ctx->cipher) | 122 | if (ctx->cipher) { |
121 | { | ||
122 | unsigned long flags = ctx->flags; | 123 | unsigned long flags = ctx->flags; |
123 | EVP_CIPHER_CTX_cleanup(ctx); | 124 | EVP_CIPHER_CTX_cleanup(ctx); |
124 | /* Restore encrypt and flags */ | 125 | /* Restore encrypt and flags */ |
125 | ctx->encrypt = enc; | 126 | ctx->encrypt = enc; |
126 | ctx->flags = flags; | 127 | ctx->flags = flags; |
127 | } | 128 | } |
128 | #ifndef OPENSSL_NO_ENGINE | 129 | #ifndef OPENSSL_NO_ENGINE |
129 | if(impl) | 130 | if (impl) { |
130 | { | 131 | if (!ENGINE_init(impl)) { |
131 | if (!ENGINE_init(impl)) | 132 | EVPerr(EVP_F_EVP_CIPHERINIT_EX, |
132 | { | 133 | EVP_R_INITIALIZATION_ERROR); |
133 | EVPerr(EVP_F_EVP_CIPHERINIT_EX, EVP_R_INITIALIZATION_ERROR); | ||
134 | return 0; | 134 | return 0; |
135 | } | ||
136 | } | 135 | } |
137 | else | 136 | } else |
138 | /* Ask if an ENGINE is reserved for this job */ | 137 | /* Ask if an ENGINE is reserved for this job */ |
139 | impl = ENGINE_get_cipher_engine(cipher->nid); | 138 | impl = ENGINE_get_cipher_engine(cipher->nid); |
140 | if(impl) | 139 | if (impl) { |
141 | { | ||
142 | /* There's an ENGINE for this job ... (apparently) */ | 140 | /* There's an ENGINE for this job ... (apparently) */ |
143 | const EVP_CIPHER *c = ENGINE_get_cipher(impl, cipher->nid); | 141 | const EVP_CIPHER *c = |
144 | if(!c) | 142 | ENGINE_get_cipher(impl, cipher->nid); |
145 | { | 143 | if (!c) { |
146 | /* One positive side-effect of US's export | 144 | /* One positive side-effect of US's export |
147 | * control history, is that we should at least | 145 | * control history, is that we should at least |
148 | * be able to avoid using US mispellings of | 146 | * be able to avoid using US mispellings of |
149 | * "initialisation"? */ | 147 | * "initialisation"? */ |
150 | EVPerr(EVP_F_EVP_CIPHERINIT_EX, EVP_R_INITIALIZATION_ERROR); | 148 | EVPerr(EVP_F_EVP_CIPHERINIT_EX, |
149 | EVP_R_INITIALIZATION_ERROR); | ||
151 | return 0; | 150 | return 0; |
152 | } | 151 | } |
153 | /* We'll use the ENGINE's private cipher definition */ | 152 | /* We'll use the ENGINE's private cipher definition */ |
154 | cipher = c; | 153 | cipher = c; |
155 | /* Store the ENGINE functional reference so we know | 154 | /* Store the ENGINE functional reference so we know |
156 | * 'cipher' came from an ENGINE and we need to release | 155 | * 'cipher' came from an ENGINE and we need to release |
157 | * it when done. */ | 156 | * it when done. */ |
158 | ctx->engine = impl; | 157 | ctx->engine = impl; |
159 | } | 158 | } else |
160 | else | ||
161 | ctx->engine = NULL; | 159 | ctx->engine = NULL; |
162 | #endif | 160 | #endif |
163 | 161 | ||
164 | ctx->cipher=cipher; | 162 | ctx->cipher = cipher; |
165 | if (ctx->cipher->ctx_size) | 163 | if (ctx->cipher->ctx_size) { |
166 | { | 164 | ctx->cipher_data = malloc(ctx->cipher->ctx_size); |
167 | ctx->cipher_data=malloc(ctx->cipher->ctx_size); | 165 | if (!ctx->cipher_data) { |
168 | if (!ctx->cipher_data) | 166 | EVPerr(EVP_F_EVP_CIPHERINIT_EX, |
169 | { | 167 | ERR_R_MALLOC_FAILURE); |
170 | EVPerr(EVP_F_EVP_CIPHERINIT_EX, ERR_R_MALLOC_FAILURE); | ||
171 | return 0; | 168 | return 0; |
172 | } | ||
173 | } | 169 | } |
174 | else | 170 | } else { |
175 | { | ||
176 | ctx->cipher_data = NULL; | 171 | ctx->cipher_data = NULL; |
177 | } | 172 | } |
178 | ctx->key_len = cipher->key_len; | 173 | ctx->key_len = cipher->key_len; |
179 | ctx->flags = 0; | 174 | ctx->flags = 0; |
180 | if(ctx->cipher->flags & EVP_CIPH_CTRL_INIT) | 175 | if (ctx->cipher->flags & EVP_CIPH_CTRL_INIT) { |
181 | { | 176 | if (!EVP_CIPHER_CTX_ctrl(ctx, EVP_CTRL_INIT, 0, NULL)) { |
182 | if(!EVP_CIPHER_CTX_ctrl(ctx, EVP_CTRL_INIT, 0, NULL)) | 177 | EVPerr(EVP_F_EVP_CIPHERINIT_EX, |
183 | { | 178 | EVP_R_INITIALIZATION_ERROR); |
184 | EVPerr(EVP_F_EVP_CIPHERINIT_EX, EVP_R_INITIALIZATION_ERROR); | ||
185 | return 0; | 179 | return 0; |
186 | } | ||
187 | } | 180 | } |
188 | } | 181 | } |
189 | else if(!ctx->cipher) | 182 | } else if (!ctx->cipher) { |
190 | { | ||
191 | EVPerr(EVP_F_EVP_CIPHERINIT_EX, EVP_R_NO_CIPHER_SET); | 183 | EVPerr(EVP_F_EVP_CIPHERINIT_EX, EVP_R_NO_CIPHER_SET); |
192 | return 0; | 184 | return 0; |
193 | } | 185 | } |
194 | #ifndef OPENSSL_NO_ENGINE | 186 | #ifndef OPENSSL_NO_ENGINE |
195 | skip_to_init: | 187 | skip_to_init: |
196 | #endif | 188 | #endif |
197 | /* we assume block size is a power of 2 in *cryptUpdate */ | 189 | /* we assume block size is a power of 2 in *cryptUpdate */ |
198 | OPENSSL_assert(ctx->cipher->block_size == 1 | 190 | OPENSSL_assert(ctx->cipher->block_size == 1 || |
199 | || ctx->cipher->block_size == 8 | 191 | ctx->cipher->block_size == 8 || |
200 | || ctx->cipher->block_size == 16); | 192 | ctx->cipher->block_size == 16); |
201 | 193 | ||
202 | if(!(EVP_CIPHER_CTX_flags(ctx) & EVP_CIPH_CUSTOM_IV)) { | 194 | if (!(EVP_CIPHER_CTX_flags(ctx) & EVP_CIPH_CUSTOM_IV)) { |
203 | switch(EVP_CIPHER_CTX_mode(ctx)) { | 195 | switch (EVP_CIPHER_CTX_mode(ctx)) { |
204 | 196 | ||
205 | case EVP_CIPH_STREAM_CIPHER: | 197 | case EVP_CIPH_STREAM_CIPHER: |
206 | case EVP_CIPH_ECB_MODE: | 198 | case EVP_CIPH_ECB_MODE: |
207 | break; | 199 | break; |
208 | 200 | ||
209 | case EVP_CIPH_CFB_MODE: | 201 | case EVP_CIPH_CFB_MODE: |
210 | case EVP_CIPH_OFB_MODE: | 202 | case EVP_CIPH_OFB_MODE: |
211 | 203 | ||
212 | ctx->num = 0; | 204 | ctx->num = 0; |
213 | /* fall-through */ | 205 | /* fall-through */ |
214 | 206 | ||
215 | case EVP_CIPH_CBC_MODE: | 207 | case EVP_CIPH_CBC_MODE: |
216 | 208 | ||
217 | OPENSSL_assert(EVP_CIPHER_CTX_iv_length(ctx) <= | 209 | OPENSSL_assert(EVP_CIPHER_CTX_iv_length(ctx) <= |
218 | (int)sizeof(ctx->iv)); | 210 | (int)sizeof(ctx->iv)); |
219 | if(iv) memcpy(ctx->oiv, iv, EVP_CIPHER_CTX_iv_length(ctx)); | 211 | if (iv) |
220 | memcpy(ctx->iv, ctx->oiv, EVP_CIPHER_CTX_iv_length(ctx)); | 212 | memcpy(ctx->oiv, iv, |
213 | EVP_CIPHER_CTX_iv_length(ctx)); | ||
214 | memcpy(ctx->iv, ctx->oiv, | ||
215 | EVP_CIPHER_CTX_iv_length(ctx)); | ||
221 | break; | 216 | break; |
222 | 217 | ||
223 | case EVP_CIPH_CTR_MODE: | 218 | case EVP_CIPH_CTR_MODE: |
224 | ctx->num = 0; | 219 | ctx->num = 0; |
225 | /* Don't reuse IV for CTR mode */ | 220 | /* Don't reuse IV for CTR mode */ |
226 | if(iv) | 221 | if (iv) |
227 | memcpy(ctx->iv, iv, EVP_CIPHER_CTX_iv_length(ctx)); | 222 | memcpy(ctx->iv, iv, |
223 | EVP_CIPHER_CTX_iv_length(ctx)); | ||
228 | break; | 224 | break; |
229 | 225 | ||
230 | default: | 226 | default: |
231 | return 0; | 227 | return 0; |
232 | break; | 228 | break; |
233 | } | 229 | } |
234 | } | 230 | } |
235 | 231 | ||
236 | if(key || (ctx->cipher->flags & EVP_CIPH_ALWAYS_CALL_INIT)) { | 232 | if (key || (ctx->cipher->flags & EVP_CIPH_ALWAYS_CALL_INIT)) { |
237 | if(!ctx->cipher->init(ctx,key,iv,enc)) return 0; | 233 | if (!ctx->cipher->init(ctx, key, iv, enc)) |
234 | return 0; | ||
238 | } | 235 | } |
239 | ctx->buf_len=0; | 236 | ctx->buf_len = 0; |
240 | ctx->final_used=0; | 237 | ctx->final_used = 0; |
241 | ctx->block_mask=ctx->cipher->block_size-1; | 238 | ctx->block_mask = ctx->cipher->block_size - 1; |
242 | return 1; | 239 | return 1; |
243 | } | 240 | } |
244 | 241 | ||
245 | int EVP_CipherUpdate(EVP_CIPHER_CTX *ctx, unsigned char *out, int *outl, | 242 | int |
246 | const unsigned char *in, int inl) | 243 | EVP_CipherUpdate(EVP_CIPHER_CTX *ctx, unsigned char *out, int *outl, |
247 | { | 244 | const unsigned char *in, int inl) |
245 | { | ||
248 | if (ctx->encrypt) | 246 | if (ctx->encrypt) |
249 | return EVP_EncryptUpdate(ctx,out,outl,in,inl); | 247 | return EVP_EncryptUpdate(ctx, out, outl, in, inl); |
250 | else return EVP_DecryptUpdate(ctx,out,outl,in,inl); | 248 | else |
251 | } | 249 | return EVP_DecryptUpdate(ctx, out, outl, in, inl); |
250 | } | ||
252 | 251 | ||
253 | int EVP_CipherFinal_ex(EVP_CIPHER_CTX *ctx, unsigned char *out, int *outl) | 252 | int |
254 | { | 253 | EVP_CipherFinal_ex(EVP_CIPHER_CTX *ctx, unsigned char *out, int *outl) |
254 | { | ||
255 | if (ctx->encrypt) | 255 | if (ctx->encrypt) |
256 | return EVP_EncryptFinal_ex(ctx,out,outl); | 256 | return EVP_EncryptFinal_ex(ctx, out, outl); |
257 | else return EVP_DecryptFinal_ex(ctx,out,outl); | 257 | else |
258 | } | 258 | return EVP_DecryptFinal_ex(ctx, out, outl); |
259 | } | ||
259 | 260 | ||
260 | int EVP_CipherFinal(EVP_CIPHER_CTX *ctx, unsigned char *out, int *outl) | 261 | int |
261 | { | 262 | EVP_CipherFinal(EVP_CIPHER_CTX *ctx, unsigned char *out, int *outl) |
263 | { | ||
262 | if (ctx->encrypt) | 264 | if (ctx->encrypt) |
263 | return EVP_EncryptFinal(ctx,out,outl); | 265 | return EVP_EncryptFinal(ctx, out, outl); |
264 | else return EVP_DecryptFinal(ctx,out,outl); | 266 | else |
265 | } | 267 | return EVP_DecryptFinal(ctx, out, outl); |
268 | } | ||
266 | 269 | ||
267 | int EVP_EncryptInit(EVP_CIPHER_CTX *ctx, const EVP_CIPHER *cipher, | 270 | int |
268 | const unsigned char *key, const unsigned char *iv) | 271 | EVP_EncryptInit(EVP_CIPHER_CTX *ctx, const EVP_CIPHER *cipher, |
269 | { | 272 | const unsigned char *key, const unsigned char *iv) |
273 | { | ||
270 | return EVP_CipherInit(ctx, cipher, key, iv, 1); | 274 | return EVP_CipherInit(ctx, cipher, key, iv, 1); |
271 | } | 275 | } |
272 | 276 | ||
273 | int EVP_EncryptInit_ex(EVP_CIPHER_CTX *ctx,const EVP_CIPHER *cipher, ENGINE *impl, | 277 | int |
274 | const unsigned char *key, const unsigned char *iv) | 278 | EVP_EncryptInit_ex(EVP_CIPHER_CTX *ctx, const EVP_CIPHER *cipher, ENGINE *impl, |
275 | { | 279 | const unsigned char *key, const unsigned char *iv) |
280 | { | ||
276 | return EVP_CipherInit_ex(ctx, cipher, impl, key, iv, 1); | 281 | return EVP_CipherInit_ex(ctx, cipher, impl, key, iv, 1); |
277 | } | 282 | } |
278 | 283 | ||
279 | int EVP_DecryptInit(EVP_CIPHER_CTX *ctx, const EVP_CIPHER *cipher, | 284 | int |
280 | const unsigned char *key, const unsigned char *iv) | 285 | EVP_DecryptInit(EVP_CIPHER_CTX *ctx, const EVP_CIPHER *cipher, |
281 | { | 286 | const unsigned char *key, const unsigned char *iv) |
287 | { | ||
282 | return EVP_CipherInit(ctx, cipher, key, iv, 0); | 288 | return EVP_CipherInit(ctx, cipher, key, iv, 0); |
283 | } | 289 | } |
284 | 290 | ||
285 | int EVP_DecryptInit_ex(EVP_CIPHER_CTX *ctx, const EVP_CIPHER *cipher, ENGINE *impl, | 291 | int |
286 | const unsigned char *key, const unsigned char *iv) | 292 | EVP_DecryptInit_ex(EVP_CIPHER_CTX *ctx, const EVP_CIPHER *cipher, ENGINE *impl, |
287 | { | 293 | const unsigned char *key, const unsigned char *iv) |
294 | { | ||
288 | return EVP_CipherInit_ex(ctx, cipher, impl, key, iv, 0); | 295 | return EVP_CipherInit_ex(ctx, cipher, impl, key, iv, 0); |
289 | } | 296 | } |
290 | 297 | ||
291 | int EVP_EncryptUpdate(EVP_CIPHER_CTX *ctx, unsigned char *out, int *outl, | 298 | int |
292 | const unsigned char *in, int inl) | 299 | EVP_EncryptUpdate(EVP_CIPHER_CTX *ctx, unsigned char *out, int *outl, |
293 | { | 300 | const unsigned char *in, int inl) |
294 | int i,j,bl; | 301 | { |
302 | int i, j, bl; | ||
295 | 303 | ||
296 | if (ctx->cipher->flags & EVP_CIPH_FLAG_CUSTOM_CIPHER) | 304 | if (ctx->cipher->flags & EVP_CIPH_FLAG_CUSTOM_CIPHER) { |
297 | { | ||
298 | i = M_do_cipher(ctx, out, in, inl); | 305 | i = M_do_cipher(ctx, out, in, inl); |
299 | if (i < 0) | 306 | if (i < 0) |
300 | return 0; | 307 | return 0; |
301 | else | 308 | else |
302 | *outl = i; | 309 | *outl = i; |
303 | return 1; | 310 | return 1; |
304 | } | 311 | } |
305 | 312 | ||
306 | if (inl <= 0) | 313 | if (inl <= 0) { |
307 | { | ||
308 | *outl = 0; | 314 | *outl = 0; |
309 | return inl == 0; | 315 | return inl == 0; |
310 | } | 316 | } |
311 | 317 | ||
312 | if(ctx->buf_len == 0 && (inl&(ctx->block_mask)) == 0) | 318 | if (ctx->buf_len == 0 && (inl&(ctx->block_mask)) == 0) { |
313 | { | 319 | if (M_do_cipher(ctx, out, in, inl)) { |
314 | if(M_do_cipher(ctx,out,in,inl)) | 320 | *outl = inl; |
315 | { | ||
316 | *outl=inl; | ||
317 | return 1; | 321 | return 1; |
318 | } | 322 | } else { |
319 | else | 323 | *outl = 0; |
320 | { | ||
321 | *outl=0; | ||
322 | return 0; | 324 | return 0; |
323 | } | ||
324 | } | 325 | } |
325 | i=ctx->buf_len; | 326 | } |
326 | bl=ctx->cipher->block_size; | 327 | i = ctx->buf_len; |
328 | bl = ctx->cipher->block_size; | ||
327 | OPENSSL_assert(bl <= (int)sizeof(ctx->buf)); | 329 | OPENSSL_assert(bl <= (int)sizeof(ctx->buf)); |
328 | if (i != 0) | 330 | if (i != 0) { |
329 | { | 331 | if (i + inl < bl) { |
330 | if (i+inl < bl) | 332 | memcpy(&(ctx->buf[i]), in, inl); |
331 | { | 333 | ctx->buf_len += inl; |
332 | memcpy(&(ctx->buf[i]),in,inl); | 334 | *outl = 0; |
333 | ctx->buf_len+=inl; | ||
334 | *outl=0; | ||
335 | return 1; | 335 | return 1; |
336 | } | 336 | } else { |
337 | else | 337 | j = bl - i; |
338 | { | 338 | memcpy(&(ctx->buf[i]), in, j); |
339 | j=bl-i; | 339 | if (!M_do_cipher(ctx, out, ctx->buf, bl)) |
340 | memcpy(&(ctx->buf[i]),in,j); | 340 | return 0; |
341 | if(!M_do_cipher(ctx,out,ctx->buf,bl)) return 0; | 341 | inl -= j; |
342 | inl-=j; | 342 | in += j; |
343 | in+=j; | 343 | out += bl; |
344 | out+=bl; | 344 | *outl = bl; |
345 | *outl=bl; | ||
346 | } | ||
347 | } | 345 | } |
348 | else | 346 | } else |
349 | *outl = 0; | 347 | *outl = 0; |
350 | i=inl&(bl-1); | 348 | i = inl&(bl - 1); |
351 | inl-=i; | 349 | inl -= i; |
352 | if (inl > 0) | 350 | if (inl > 0) { |
353 | { | 351 | if (!M_do_cipher(ctx, out, in, inl)) |
354 | if(!M_do_cipher(ctx,out,in,inl)) return 0; | 352 | return 0; |
355 | *outl+=inl; | 353 | *outl += inl; |
356 | } | 354 | } |
357 | 355 | ||
358 | if (i != 0) | 356 | if (i != 0) |
359 | memcpy(ctx->buf,&(in[inl]),i); | 357 | memcpy(ctx->buf, &(in[inl]), i); |
360 | ctx->buf_len=i; | 358 | ctx->buf_len = i; |
361 | return 1; | 359 | return 1; |
362 | } | 360 | } |
363 | 361 | ||
364 | int EVP_EncryptFinal(EVP_CIPHER_CTX *ctx, unsigned char *out, int *outl) | 362 | int |
365 | { | 363 | EVP_EncryptFinal(EVP_CIPHER_CTX *ctx, unsigned char *out, int *outl) |
364 | { | ||
366 | int ret; | 365 | int ret; |
366 | |||
367 | ret = EVP_EncryptFinal_ex(ctx, out, outl); | 367 | ret = EVP_EncryptFinal_ex(ctx, out, outl); |
368 | return ret; | 368 | return ret; |
369 | } | 369 | } |
370 | 370 | ||
371 | int EVP_EncryptFinal_ex(EVP_CIPHER_CTX *ctx, unsigned char *out, int *outl) | 371 | int |
372 | { | 372 | EVP_EncryptFinal_ex(EVP_CIPHER_CTX *ctx, unsigned char *out, int *outl) |
373 | int n,ret; | 373 | { |
374 | int n, ret; | ||
374 | unsigned int i, b, bl; | 375 | unsigned int i, b, bl; |
375 | 376 | ||
376 | if (ctx->cipher->flags & EVP_CIPH_FLAG_CUSTOM_CIPHER) | 377 | if (ctx->cipher->flags & EVP_CIPH_FLAG_CUSTOM_CIPHER) { |
377 | { | ||
378 | ret = M_do_cipher(ctx, out, NULL, 0); | 378 | ret = M_do_cipher(ctx, out, NULL, 0); |
379 | if (ret < 0) | 379 | if (ret < 0) |
380 | return 0; | 380 | return 0; |
381 | else | 381 | else |
382 | *outl = ret; | 382 | *outl = ret; |
383 | return 1; | 383 | return 1; |
384 | } | 384 | } |
385 | 385 | ||
386 | b=ctx->cipher->block_size; | 386 | b = ctx->cipher->block_size; |
387 | OPENSSL_assert(b <= sizeof ctx->buf); | 387 | OPENSSL_assert(b <= sizeof ctx->buf); |
388 | if (b == 1) | 388 | if (b == 1) { |
389 | { | 389 | *outl = 0; |
390 | *outl=0; | ||
391 | return 1; | 390 | return 1; |
392 | } | 391 | } |
393 | bl=ctx->buf_len; | 392 | bl = ctx->buf_len; |
394 | if (ctx->flags & EVP_CIPH_NO_PADDING) | 393 | if (ctx->flags & EVP_CIPH_NO_PADDING) { |
395 | { | 394 | if (bl) { |
396 | if(bl) | 395 | EVPerr(EVP_F_EVP_ENCRYPTFINAL_EX, |
397 | { | 396 | EVP_R_DATA_NOT_MULTIPLE_OF_BLOCK_LENGTH); |
398 | EVPerr(EVP_F_EVP_ENCRYPTFINAL_EX,EVP_R_DATA_NOT_MULTIPLE_OF_BLOCK_LENGTH); | ||
399 | return 0; | 397 | return 0; |
400 | } | 398 | } |
401 | *outl = 0; | 399 | *outl = 0; |
402 | return 1; | 400 | return 1; |
403 | } | 401 | } |
404 | 402 | ||
405 | n=b-bl; | 403 | n = b - bl; |
406 | for (i=bl; i<b; i++) | 404 | for (i = bl; i < b; i++) |
407 | ctx->buf[i]=n; | 405 | ctx->buf[i] = n; |
408 | ret=M_do_cipher(ctx,out,ctx->buf,b); | 406 | ret = M_do_cipher(ctx, out, ctx->buf, b); |
409 | 407 | ||
410 | 408 | ||
411 | if(ret) | 409 | if (ret) |
412 | *outl=b; | 410 | *outl = b; |
413 | 411 | ||
414 | return ret; | 412 | return ret; |
415 | } | 413 | } |
416 | 414 | ||
417 | int EVP_DecryptUpdate(EVP_CIPHER_CTX *ctx, unsigned char *out, int *outl, | 415 | int |
418 | const unsigned char *in, int inl) | 416 | EVP_DecryptUpdate(EVP_CIPHER_CTX *ctx, unsigned char *out, int *outl, |
419 | { | 417 | const unsigned char *in, int inl) |
418 | { | ||
420 | int fix_len; | 419 | int fix_len; |
421 | unsigned int b; | 420 | unsigned int b; |
422 | 421 | ||
423 | if (ctx->cipher->flags & EVP_CIPH_FLAG_CUSTOM_CIPHER) | 422 | if (ctx->cipher->flags & EVP_CIPH_FLAG_CUSTOM_CIPHER) { |
424 | { | ||
425 | fix_len = M_do_cipher(ctx, out, in, inl); | 423 | fix_len = M_do_cipher(ctx, out, in, inl); |
426 | if (fix_len < 0) | 424 | if (fix_len < 0) { |
427 | { | ||
428 | *outl = 0; | 425 | *outl = 0; |
429 | return 0; | 426 | return 0; |
430 | } | 427 | } else |
431 | else | ||
432 | *outl = fix_len; | 428 | *outl = fix_len; |
433 | return 1; | 429 | return 1; |
434 | } | 430 | } |
435 | 431 | ||
436 | if (inl <= 0) | 432 | if (inl <= 0) { |
437 | { | ||
438 | *outl = 0; | 433 | *outl = 0; |
439 | return inl == 0; | 434 | return inl == 0; |
440 | } | 435 | } |
441 | 436 | ||
442 | if (ctx->flags & EVP_CIPH_NO_PADDING) | 437 | if (ctx->flags & EVP_CIPH_NO_PADDING) |
443 | return EVP_EncryptUpdate(ctx, out, outl, in, inl); | 438 | return EVP_EncryptUpdate(ctx, out, outl, in, inl); |
444 | 439 | ||
445 | b=ctx->cipher->block_size; | 440 | b = ctx->cipher->block_size; |
446 | OPENSSL_assert(b <= sizeof ctx->final); | 441 | OPENSSL_assert(b <= sizeof ctx->final); |
447 | 442 | ||
448 | if(ctx->final_used) | 443 | if (ctx->final_used) { |
449 | { | 444 | memcpy(out, ctx->final, b); |
450 | memcpy(out,ctx->final,b); | 445 | out += b; |
451 | out+=b; | ||
452 | fix_len = 1; | 446 | fix_len = 1; |
453 | } | 447 | } else |
454 | else | ||
455 | fix_len = 0; | 448 | fix_len = 0; |
456 | 449 | ||
457 | 450 | ||
458 | if(!EVP_EncryptUpdate(ctx,out,outl,in,inl)) | 451 | if (!EVP_EncryptUpdate(ctx, out, outl, in, inl)) |
459 | return 0; | 452 | return 0; |
460 | 453 | ||
461 | /* if we have 'decrypted' a multiple of block size, make sure | 454 | /* if we have 'decrypted' a multiple of block size, make sure |
462 | * we have a copy of this last block */ | 455 | * we have a copy of this last block */ |
463 | if (b > 1 && !ctx->buf_len) | 456 | if (b > 1 && !ctx->buf_len) { |
464 | { | 457 | *outl -= b; |
465 | *outl-=b; | 458 | ctx->final_used = 1; |
466 | ctx->final_used=1; | 459 | memcpy(ctx->final, &out[*outl], b); |
467 | memcpy(ctx->final,&out[*outl],b); | 460 | } else |
468 | } | ||
469 | else | ||
470 | ctx->final_used = 0; | 461 | ctx->final_used = 0; |
471 | 462 | ||
472 | if (fix_len) | 463 | if (fix_len) |
473 | *outl += b; | 464 | *outl += b; |
474 | 465 | ||
475 | return 1; | 466 | return 1; |
476 | } | 467 | } |
477 | 468 | ||
478 | int EVP_DecryptFinal(EVP_CIPHER_CTX *ctx, unsigned char *out, int *outl) | 469 | int |
479 | { | 470 | EVP_DecryptFinal(EVP_CIPHER_CTX *ctx, unsigned char *out, int *outl) |
471 | { | ||
480 | int ret; | 472 | int ret; |
473 | |||
481 | ret = EVP_DecryptFinal_ex(ctx, out, outl); | 474 | ret = EVP_DecryptFinal_ex(ctx, out, outl); |
482 | return ret; | 475 | return ret; |
483 | } | 476 | } |
484 | 477 | ||
485 | int EVP_DecryptFinal_ex(EVP_CIPHER_CTX *ctx, unsigned char *out, int *outl) | 478 | int |
486 | { | 479 | EVP_DecryptFinal_ex(EVP_CIPHER_CTX *ctx, unsigned char *out, int *outl) |
487 | int i,n; | 480 | { |
481 | int i, n; | ||
488 | unsigned int b; | 482 | unsigned int b; |
489 | *outl=0; | 483 | *outl = 0; |
490 | 484 | ||
491 | if (ctx->cipher->flags & EVP_CIPH_FLAG_CUSTOM_CIPHER) | 485 | if (ctx->cipher->flags & EVP_CIPH_FLAG_CUSTOM_CIPHER) { |
492 | { | ||
493 | i = M_do_cipher(ctx, out, NULL, 0); | 486 | i = M_do_cipher(ctx, out, NULL, 0); |
494 | if (i < 0) | 487 | if (i < 0) |
495 | return 0; | 488 | return 0; |
496 | else | 489 | else |
497 | *outl = i; | 490 | *outl = i; |
498 | return 1; | 491 | return 1; |
499 | } | 492 | } |
500 | 493 | ||
501 | b=ctx->cipher->block_size; | 494 | b = ctx->cipher->block_size; |
502 | if (ctx->flags & EVP_CIPH_NO_PADDING) | 495 | if (ctx->flags & EVP_CIPH_NO_PADDING) { |
503 | { | 496 | if (ctx->buf_len) { |
504 | if(ctx->buf_len) | 497 | EVPerr(EVP_F_EVP_DECRYPTFINAL_EX, |
505 | { | 498 | EVP_R_DATA_NOT_MULTIPLE_OF_BLOCK_LENGTH); |
506 | EVPerr(EVP_F_EVP_DECRYPTFINAL_EX,EVP_R_DATA_NOT_MULTIPLE_OF_BLOCK_LENGTH); | ||
507 | return 0; | 499 | return 0; |
508 | } | 500 | } |
509 | *outl = 0; | 501 | *outl = 0; |
510 | return 1; | 502 | return 1; |
503 | } | ||
504 | if (b > 1) { | ||
505 | if (ctx->buf_len || !ctx->final_used) { | ||
506 | EVPerr(EVP_F_EVP_DECRYPTFINAL_EX, | ||
507 | EVP_R_WRONG_FINAL_BLOCK_LENGTH); | ||
508 | return (0); | ||
511 | } | 509 | } |
512 | if (b > 1) | ||
513 | { | ||
514 | if (ctx->buf_len || !ctx->final_used) | ||
515 | { | ||
516 | EVPerr(EVP_F_EVP_DECRYPTFINAL_EX,EVP_R_WRONG_FINAL_BLOCK_LENGTH); | ||
517 | return(0); | ||
518 | } | ||
519 | OPENSSL_assert(b <= sizeof ctx->final); | 510 | OPENSSL_assert(b <= sizeof ctx->final); |
520 | n=ctx->final[b-1]; | 511 | n = ctx->final[b - 1]; |
521 | if (n == 0 || n > (int)b) | 512 | if (n == 0 || n > (int)b) { |
522 | { | 513 | EVPerr(EVP_F_EVP_DECRYPTFINAL_EX, EVP_R_BAD_DECRYPT); |
523 | EVPerr(EVP_F_EVP_DECRYPTFINAL_EX,EVP_R_BAD_DECRYPT); | 514 | return (0); |
524 | return(0); | 515 | } |
525 | } | 516 | for (i = 0; i < n; i++) { |
526 | for (i=0; i<n; i++) | 517 | if (ctx->final[--b] != n) { |
527 | { | 518 | EVPerr(EVP_F_EVP_DECRYPTFINAL_EX, |
528 | if (ctx->final[--b] != n) | 519 | EVP_R_BAD_DECRYPT); |
529 | { | 520 | return (0); |
530 | EVPerr(EVP_F_EVP_DECRYPTFINAL_EX,EVP_R_BAD_DECRYPT); | ||
531 | return(0); | ||
532 | } | ||
533 | } | 521 | } |
534 | n=ctx->cipher->block_size-n; | ||
535 | for (i=0; i<n; i++) | ||
536 | out[i]=ctx->final[i]; | ||
537 | *outl=n; | ||
538 | } | 522 | } |
539 | else | 523 | n = ctx->cipher->block_size - n; |
540 | *outl=0; | 524 | for (i = 0; i < n; i++) |
541 | return(1); | 525 | out[i] = ctx->final[i]; |
542 | } | 526 | *outl = n; |
527 | } else | ||
528 | *outl = 0; | ||
529 | return (1); | ||
530 | } | ||
543 | 531 | ||
544 | void EVP_CIPHER_CTX_free(EVP_CIPHER_CTX *ctx) | 532 | void |
545 | { | 533 | EVP_CIPHER_CTX_free(EVP_CIPHER_CTX *ctx) |
546 | if (ctx) | 534 | { |
547 | { | 535 | if (ctx) { |
548 | EVP_CIPHER_CTX_cleanup(ctx); | 536 | EVP_CIPHER_CTX_cleanup(ctx); |
549 | free(ctx); | 537 | free(ctx); |
550 | } | ||
551 | } | 538 | } |
539 | } | ||
552 | 540 | ||
553 | int EVP_CIPHER_CTX_cleanup(EVP_CIPHER_CTX *c) | 541 | int |
554 | { | 542 | EVP_CIPHER_CTX_cleanup(EVP_CIPHER_CTX *c) |
555 | if (c->cipher != NULL) | 543 | { |
556 | { | 544 | if (c->cipher != NULL) { |
557 | if(c->cipher->cleanup && !c->cipher->cleanup(c)) | 545 | if (c->cipher->cleanup && !c->cipher->cleanup(c)) |
558 | return 0; | 546 | return 0; |
559 | /* Cleanse cipher context data */ | 547 | /* Cleanse cipher context data */ |
560 | if (c->cipher_data) | 548 | if (c->cipher_data) |
561 | OPENSSL_cleanse(c->cipher_data, c->cipher->ctx_size); | 549 | OPENSSL_cleanse(c->cipher_data, c->cipher->ctx_size); |
562 | } | 550 | } |
563 | if (c->cipher_data) | 551 | if (c->cipher_data) |
564 | free(c->cipher_data); | 552 | free(c->cipher_data); |
565 | #ifndef OPENSSL_NO_ENGINE | 553 | #ifndef OPENSSL_NO_ENGINE |
@@ -568,93 +556,99 @@ int EVP_CIPHER_CTX_cleanup(EVP_CIPHER_CTX *c) | |||
568 | * functional reference we held for this reason. */ | 556 | * functional reference we held for this reason. */ |
569 | ENGINE_finish(c->engine); | 557 | ENGINE_finish(c->engine); |
570 | #endif | 558 | #endif |
571 | memset(c,0,sizeof(EVP_CIPHER_CTX)); | 559 | memset(c, 0, sizeof(EVP_CIPHER_CTX)); |
572 | return 1; | 560 | return 1; |
573 | } | 561 | } |
574 | 562 | ||
575 | int EVP_CIPHER_CTX_set_key_length(EVP_CIPHER_CTX *c, int keylen) | 563 | int |
576 | { | 564 | EVP_CIPHER_CTX_set_key_length(EVP_CIPHER_CTX *c, int keylen) |
577 | if(c->cipher->flags & EVP_CIPH_CUSTOM_KEY_LENGTH) | 565 | { |
578 | return EVP_CIPHER_CTX_ctrl(c, EVP_CTRL_SET_KEY_LENGTH, keylen, NULL); | 566 | if (c->cipher->flags & EVP_CIPH_CUSTOM_KEY_LENGTH) |
579 | if(c->key_len == keylen) return 1; | 567 | return EVP_CIPHER_CTX_ctrl(c, EVP_CTRL_SET_KEY_LENGTH, |
580 | if((keylen > 0) && (c->cipher->flags & EVP_CIPH_VARIABLE_LENGTH)) | 568 | keylen, NULL); |
581 | { | 569 | if (c->key_len == keylen) |
570 | return 1; | ||
571 | if ((keylen > 0) && (c->cipher->flags & EVP_CIPH_VARIABLE_LENGTH)) { | ||
582 | c->key_len = keylen; | 572 | c->key_len = keylen; |
583 | return 1; | 573 | return 1; |
584 | } | ||
585 | EVPerr(EVP_F_EVP_CIPHER_CTX_SET_KEY_LENGTH,EVP_R_INVALID_KEY_LENGTH); | ||
586 | return 0; | ||
587 | } | 574 | } |
575 | EVPerr(EVP_F_EVP_CIPHER_CTX_SET_KEY_LENGTH, EVP_R_INVALID_KEY_LENGTH); | ||
576 | return 0; | ||
577 | } | ||
588 | 578 | ||
589 | int EVP_CIPHER_CTX_set_padding(EVP_CIPHER_CTX *ctx, int pad) | 579 | int |
590 | { | 580 | EVP_CIPHER_CTX_set_padding(EVP_CIPHER_CTX *ctx, int pad) |
591 | if (pad) ctx->flags &= ~EVP_CIPH_NO_PADDING; | 581 | { |
592 | else ctx->flags |= EVP_CIPH_NO_PADDING; | 582 | if (pad) |
583 | ctx->flags &= ~EVP_CIPH_NO_PADDING; | ||
584 | else | ||
585 | ctx->flags |= EVP_CIPH_NO_PADDING; | ||
593 | return 1; | 586 | return 1; |
594 | } | 587 | } |
595 | 588 | ||
596 | int EVP_CIPHER_CTX_ctrl(EVP_CIPHER_CTX *ctx, int type, int arg, void *ptr) | 589 | int |
590 | EVP_CIPHER_CTX_ctrl(EVP_CIPHER_CTX *ctx, int type, int arg, void *ptr) | ||
597 | { | 591 | { |
598 | int ret; | 592 | int ret; |
599 | if(!ctx->cipher) { | 593 | |
594 | if (!ctx->cipher) { | ||
600 | EVPerr(EVP_F_EVP_CIPHER_CTX_CTRL, EVP_R_NO_CIPHER_SET); | 595 | EVPerr(EVP_F_EVP_CIPHER_CTX_CTRL, EVP_R_NO_CIPHER_SET); |
601 | return 0; | 596 | return 0; |
602 | } | 597 | } |
603 | 598 | ||
604 | if(!ctx->cipher->ctrl) { | 599 | if (!ctx->cipher->ctrl) { |
605 | EVPerr(EVP_F_EVP_CIPHER_CTX_CTRL, EVP_R_CTRL_NOT_IMPLEMENTED); | 600 | EVPerr(EVP_F_EVP_CIPHER_CTX_CTRL, EVP_R_CTRL_NOT_IMPLEMENTED); |
606 | return 0; | 601 | return 0; |
607 | } | 602 | } |
608 | 603 | ||
609 | ret = ctx->cipher->ctrl(ctx, type, arg, ptr); | 604 | ret = ctx->cipher->ctrl(ctx, type, arg, ptr); |
610 | if(ret == -1) { | 605 | if (ret == -1) { |
611 | EVPerr(EVP_F_EVP_CIPHER_CTX_CTRL, EVP_R_CTRL_OPERATION_NOT_IMPLEMENTED); | 606 | EVPerr(EVP_F_EVP_CIPHER_CTX_CTRL, |
607 | EVP_R_CTRL_OPERATION_NOT_IMPLEMENTED); | ||
612 | return 0; | 608 | return 0; |
613 | } | 609 | } |
614 | return ret; | 610 | return ret; |
615 | } | 611 | } |
616 | 612 | ||
617 | int EVP_CIPHER_CTX_rand_key(EVP_CIPHER_CTX *ctx, unsigned char *key) | 613 | int |
618 | { | 614 | EVP_CIPHER_CTX_rand_key(EVP_CIPHER_CTX *ctx, unsigned char *key) |
615 | { | ||
619 | if (ctx->cipher->flags & EVP_CIPH_RAND_KEY) | 616 | if (ctx->cipher->flags & EVP_CIPH_RAND_KEY) |
620 | return EVP_CIPHER_CTX_ctrl(ctx, EVP_CTRL_RAND_KEY, 0, key); | 617 | return EVP_CIPHER_CTX_ctrl(ctx, EVP_CTRL_RAND_KEY, 0, key); |
621 | if (RAND_bytes(key, ctx->key_len) <= 0) | 618 | if (RAND_bytes(key, ctx->key_len) <= 0) |
622 | return 0; | 619 | return 0; |
623 | return 1; | 620 | return 1; |
624 | } | 621 | } |
625 | 622 | ||
626 | int EVP_CIPHER_CTX_copy(EVP_CIPHER_CTX *out, const EVP_CIPHER_CTX *in) | 623 | int |
627 | { | 624 | EVP_CIPHER_CTX_copy(EVP_CIPHER_CTX *out, const EVP_CIPHER_CTX *in) |
628 | if ((in == NULL) || (in->cipher == NULL)) | 625 | { |
629 | { | 626 | if ((in == NULL) || (in->cipher == NULL)) { |
630 | EVPerr(EVP_F_EVP_CIPHER_CTX_COPY,EVP_R_INPUT_NOT_INITIALIZED); | 627 | EVPerr(EVP_F_EVP_CIPHER_CTX_COPY, EVP_R_INPUT_NOT_INITIALIZED); |
631 | return 0; | 628 | return 0; |
632 | } | 629 | } |
633 | #ifndef OPENSSL_NO_ENGINE | 630 | #ifndef OPENSSL_NO_ENGINE |
634 | /* Make sure it's safe to copy a cipher context using an ENGINE */ | 631 | /* Make sure it's safe to copy a cipher context using an ENGINE */ |
635 | if (in->engine && !ENGINE_init(in->engine)) | 632 | if (in->engine && !ENGINE_init(in->engine)) { |
636 | { | 633 | EVPerr(EVP_F_EVP_CIPHER_CTX_COPY, ERR_R_ENGINE_LIB); |
637 | EVPerr(EVP_F_EVP_CIPHER_CTX_COPY,ERR_R_ENGINE_LIB); | ||
638 | return 0; | 634 | return 0; |
639 | } | 635 | } |
640 | #endif | 636 | #endif |
641 | 637 | ||
642 | EVP_CIPHER_CTX_cleanup(out); | 638 | EVP_CIPHER_CTX_cleanup(out); |
643 | memcpy(out,in,sizeof *out); | 639 | memcpy(out, in, sizeof *out); |
644 | 640 | ||
645 | if (in->cipher_data && in->cipher->ctx_size) | 641 | if (in->cipher_data && in->cipher->ctx_size) { |
646 | { | 642 | out->cipher_data = malloc(in->cipher->ctx_size); |
647 | out->cipher_data=malloc(in->cipher->ctx_size); | 643 | if (!out->cipher_data) { |
648 | if (!out->cipher_data) | 644 | EVPerr(EVP_F_EVP_CIPHER_CTX_COPY, ERR_R_MALLOC_FAILURE); |
649 | { | ||
650 | EVPerr(EVP_F_EVP_CIPHER_CTX_COPY,ERR_R_MALLOC_FAILURE); | ||
651 | return 0; | 645 | return 0; |
652 | } | ||
653 | memcpy(out->cipher_data,in->cipher_data,in->cipher->ctx_size); | ||
654 | } | 646 | } |
647 | memcpy(out->cipher_data, in->cipher_data, in->cipher->ctx_size); | ||
648 | } | ||
655 | 649 | ||
656 | if (in->cipher->flags & EVP_CIPH_CUSTOM_COPY) | 650 | if (in->cipher->flags & EVP_CIPH_CUSTOM_COPY) |
657 | return in->cipher->ctrl((EVP_CIPHER_CTX *)in, EVP_CTRL_COPY, 0, out); | 651 | return in->cipher->ctrl((EVP_CIPHER_CTX *)in, |
652 | EVP_CTRL_COPY, 0, out); | ||
658 | return 1; | 653 | return 1; |
659 | } | 654 | } |
660 | |||