-/* $OpenBSD: md4.c,v 1.13 2024/03/26 07:12:52 jsing Exp $ */
+/* $OpenBSD: md4.c,v 1.14 2024/03/26 12:18:23 jsing Exp $ */
/* Copyright (C) 1995-1998 Eric Young (eay@cryptsoft.com)
* All rights reserved.
*
#include <string.h>
#include <openssl/opensslconf.h>
+
#include <openssl/md4.h>
+#include "crypto_internal.h"
+
+/* Ensure that MD4_LONG and uint32_t are equivalent size. */
+CTASSERT(sizeof(MD4_LONG) == sizeof(uint32_t));
+
__BEGIN_HIDDEN_DECLS
void md4_block_data_order (MD4_CTX *c, const void *p, size_t num);
#undef X
#endif
void
-md4_block_data_order(MD4_CTX *c, const void *data_, size_t num)
+md4_block_data_order(MD4_CTX *c, const void *_in, size_t num)
{
- const unsigned char *data = data_;
- unsigned int A, B, C, D, l;
+ const uint8_t *in = _in;
+ const MD4_LONG *in32;
+ unsigned int A, B, C, D;
unsigned int X0, X1, X2, X3, X4, X5, X6, X7,
X8, X9, X10, X11, X12, X13, X14, X15;
C = c->C;
D = c->D;
- for (; num--; ) {
- HOST_c2l(data, l);
- X0 = l;
- HOST_c2l(data, l);
- X1 = l;
+ while (num-- > 0) {
+ if ((uintptr_t)in % 4 == 0) {
+ /* Input is 32 bit aligned. */
+ in32 = (const MD4_LONG *)in;
+ X0 = le32toh(in32[0]);
+ X1 = le32toh(in32[1]);
+ X2 = le32toh(in32[2]);
+ X3 = le32toh(in32[3]);
+ X4 = le32toh(in32[4]);
+ X5 = le32toh(in32[5]);
+ X6 = le32toh(in32[6]);
+ X7 = le32toh(in32[7]);
+ X8 = le32toh(in32[8]);
+ X9 = le32toh(in32[9]);
+ X10 = le32toh(in32[10]);
+ X11 = le32toh(in32[11]);
+ X12 = le32toh(in32[12]);
+ X13 = le32toh(in32[13]);
+ X14 = le32toh(in32[14]);
+ X15 = le32toh(in32[15]);
+ } else {
+ /* Input is not 32 bit aligned. */
+ X0 = crypto_load_le32toh(&in[0 * 4]);
+ X1 = crypto_load_le32toh(&in[1 * 4]);
+ X2 = crypto_load_le32toh(&in[2 * 4]);
+ X3 = crypto_load_le32toh(&in[3 * 4]);
+ X4 = crypto_load_le32toh(&in[4 * 4]);
+ X5 = crypto_load_le32toh(&in[5 * 4]);
+ X6 = crypto_load_le32toh(&in[6 * 4]);
+ X7 = crypto_load_le32toh(&in[7 * 4]);
+ X8 = crypto_load_le32toh(&in[8 * 4]);
+ X9 = crypto_load_le32toh(&in[9 * 4]);
+ X10 = crypto_load_le32toh(&in[10 * 4]);
+ X11 = crypto_load_le32toh(&in[11 * 4]);
+ X12 = crypto_load_le32toh(&in[12 * 4]);
+ X13 = crypto_load_le32toh(&in[13 * 4]);
+ X14 = crypto_load_le32toh(&in[14 * 4]);
+ X15 = crypto_load_le32toh(&in[15 * 4]);
+ }
+ in += MD4_CBLOCK;
+
/* Round 0 */
R0(A, B, C, D, X0, 3, 0);
- HOST_c2l(data, l);
- X2 = l;
R0(D, A, B, C, X1, 7, 0);
- HOST_c2l(data, l);
- X3 = l;
R0(C, D, A, B, X2, 11, 0);
- HOST_c2l(data, l);
- X4 = l;
R0(B, C, D, A, X3, 19, 0);
- HOST_c2l(data, l);
- X5 = l;
R0(A, B, C, D, X4, 3, 0);
- HOST_c2l(data, l);
- X6 = l;
R0(D, A, B, C, X5, 7, 0);
- HOST_c2l(data, l);
- X7 = l;
R0(C, D, A, B, X6, 11, 0);
- HOST_c2l(data, l);
- X8 = l;
R0(B, C, D, A, X7, 19, 0);
- HOST_c2l(data, l);
- X9 = l;
R0(A, B, C, D, X8, 3, 0);
- HOST_c2l(data, l);
- X10 = l;
R0(D, A,B, C,X9, 7, 0);
- HOST_c2l(data, l);
- X11 = l;
R0(C, D,A, B,X10, 11, 0);
- HOST_c2l(data, l);
- X12 = l;
R0(B, C,D, A,X11, 19, 0);
- HOST_c2l(data, l);
- X13 = l;
R0(A, B,C, D,X12, 3, 0);
- HOST_c2l(data, l);
- X14 = l;
R0(D, A,B, C,X13, 7, 0);
- HOST_c2l(data, l);
- X15 = l;
R0(C, D,A, B,X14, 11, 0);
R0(B, C,D, A,X15, 19, 0);
+
/* Round 1 */
R1(A, B, C, D, X0, 3, 0x5A827999L);
R1(D, A, B, C, X4, 5, 0x5A827999L);
R1(D, A, B, C, X7, 5, 0x5A827999L);
R1(C, D, A, B, X11, 9, 0x5A827999L);
R1(B, C, D, A, X15, 13, 0x5A827999L);
+
/* Round 2 */
R2(A, B, C, D, X0, 3, 0x6ED9EBA1L);
R2(D, A, B, C, X8, 9, 0x6ED9EBA1L);
{
unsigned char *p = (unsigned char *)c->data;
size_t n = c->num;
- unsigned long ll;
p[n] = 0x80; /* there is always room for one */
n++;
n = 0;
md4_block_data_order(c, p, 1);
}
+
memset(p + n, 0, MD4_CBLOCK - 8 - n);
+ c->data[MD4_LBLOCK - 2] = htole32(c->Nl);
+ c->data[MD4_LBLOCK - 1] = htole32(c->Nh);
- p += MD4_CBLOCK - 8;
-#if defined(DATA_ORDER_IS_BIG_ENDIAN)
- HOST_l2c(c->Nh, p);
- HOST_l2c(c->Nl, p);
-#elif defined(DATA_ORDER_IS_LITTLE_ENDIAN)
- HOST_l2c(c->Nl, p);
- HOST_l2c(c->Nh, p);
-#endif
- p -= MD4_CBLOCK;
md4_block_data_order(c, p, 1);
c->num = 0;
memset(p, 0, MD4_CBLOCK);
- ll = c->A;
- HOST_l2c(ll, md);
- ll = c->B;
- HOST_l2c(ll, md);
- ll = c->C;
- HOST_l2c(ll, md);
- ll = c->D;
- HOST_l2c(ll, md);
+ crypto_store_htole32(&md[0 * 4], c->A);
+ crypto_store_htole32(&md[1 * 4], c->B);
+ crypto_store_htole32(&md[2 * 4], c->C);
+ crypto_store_htole32(&md[3 * 4], c->D);
return 1;
}