Lines Matching defs:md_size

199 /* ssl3_cbc_copy_mac copies |md_size| bytes from the end of |rec| to |out| in
207 * rec->orig_len >= md_size
208 * md_size <= EVP_MAX_MD_SIZE
220 unsigned md_size,unsigned orig_len)
231 unsigned mac_start = mac_end - md_size;
239 OPENSSL_assert(orig_len >= md_size);
240 OPENSSL_assert(md_size <= EVP_MAX_MD_SIZE);
247 if (orig_len > md_size + 255 + 1)
248 scan_start = orig_len - (md_size + 255 + 1);
249 /* div_spoiler contains a multiple of md_size that is used to cause the
253 * The aim of right-shifting md_size is so that the compiler doesn't
255 * to prove that md_size is always even, which I hope is beyond it. */
256 div_spoiler = md_size >> 1;
258 rotate_offset = (div_spoiler + mac_start - scan_start) % md_size;
260 memset(rotated_mac, 0, md_size);
267 j &= constant_time_lt(j,md_size);
273 for (i = 0; i < md_size; i++)
278 rotate_offset &= constant_time_lt(rotate_offset,md_size);
281 memset(out, 0, md_size);
282 rotate_offset = md_size - rotate_offset;
283 rotate_offset &= constant_time_lt(rotate_offset,md_size);
284 for (i = 0; i < md_size; i++)
286 for (j = 0; j < md_size; j++)
289 rotate_offset &= constant_time_lt(rotate_offset,md_size);
416 unsigned md_size, md_block_size = 64;
443 md_size = 16;
451 md_size = 20;
458 md_size = 224/8;
464 md_size = 32;
472 md_size = 384/8;
480 md_size = 64;
497 OPENSSL_assert(md_size <= EVP_MAX_MD_SIZE);
533 max_mac_bytes = len - md_size - 1;
548 mac_end_offset = data_plus_mac_size + header_length - md_size;
680 for (j = 0; j < md_size; j++)
693 EVP_DigestUpdate(&md_ctx, mac_out, md_size);
702 EVP_DigestUpdate(&md_ctx, mac_out, md_size);