ucommon  7.0.0
About: GNU uCommon C++ is a portable and optimized class framework for writing C++ applications that need to use threads and support concurrent synchronization, and that use sockets, XML parsing, object serialization, thread-optimized string and data structure classes, etc..
  Fossies Dox: ucommon-7.0.0.tar.gz  ("inofficial" and yet experimental doxygen-generated source code documentation)  

sha2.cpp
Go to the documentation of this file.
1 /*
2  ---------------------------------------------------------------------------
3  Copyright (c) 2002, Dr Brian Gladman, Worcester, UK. All rights reserved.
4 
5  LICENSE TERMS
6 
7  The free distribution and use of this software in both source and binary
8  form is allowed (with or without changes) provided that:
9 
10  1. distributions of this source code include the above copyright
11  notice, this list of conditions and the following disclaimer;
12 
13  2. distributions in binary form include the above copyright
14  notice, this list of conditions and the following disclaimer
15  in the documentation and/or other associated materials;
16 
17  3. the copyright holder's name is not used to endorse products
18  built using this software without specific written permission.
19 
20  ALTERNATIVELY, provided that this notice is retained in full, this product
21  may be distributed under the terms of the GNU General Public License (GPL),
22  in which case the provisions of the GPL apply INSTEAD OF those given above.
23 
24  DISCLAIMER
25 
26  This software is provided 'as is' with no explicit or implied warranties
27  in respect of its properties, including, but not limited to, correctness
28  and/or fitness for purpose.
29  ---------------------------------------------------------------------------
30  Issue Date: 01/08/2005
31 
32  This is a byte oriented version of SHA2 that operates on arrays of bytes
33  stored in memory. This code implements sha256, sha384 and sha512 but the
34  latter two functions rely on efficient 64-bit integer operations that
35  may not be very efficient on 32-bit machines
36 
37  The sha256 functions use a type 'sha256_ctx' to hold details of the
38  current hash state and uses the following three calls:
39 
40  void sha256_begin(sha256_ctx ctx[1])
41  void sha256_hash(const unsigned char data[],
42  unsigned long len, sha256_ctx ctx[1])
43  void sha_end1(unsigned char hval[], sha256_ctx ctx[1])
44 
45  The first subroutine initialises a hash computation by setting up the
46  context in the sha256_ctx context. The second subroutine hashes 8-bit
47  bytes from array data[] into the hash state withinh sha256_ctx context,
48  the number of bytes to be hashed being given by the the unsigned long
49  integer len. The third subroutine completes the hash calculation and
50  places the resulting digest value in the array of 8-bit bytes hval[].
51 
52  The sha384 and sha512 functions are similar and use the interfaces:
53 
54  void sha384_begin(sha384_ctx ctx[1]);
55  void sha384_hash(const unsigned char data[],
56  unsigned long len, sha384_ctx ctx[1]);
57  void sha384_end(unsigned char hval[], sha384_ctx ctx[1]);
58 
59  void sha512_begin(sha512_ctx ctx[1]);
60  void sha512_hash(const unsigned char data[],
61  unsigned long len, sha512_ctx ctx[1]);
62  void sha512_end(unsigned char hval[], sha512_ctx ctx[1]);
63 
64  In addition there is a function sha2 that can be used to call all these
65  functions using a call with a hash length parameter as follows:
66 
67  int sha2_begin(unsigned long len, sha2_ctx ctx[1]);
68  void sha2_hash(const unsigned char data[],
69  unsigned long len, sha2_ctx ctx[1]);
70  void sha2_end(unsigned char hval[], sha2_ctx ctx[1]);
71 
72  My thanks to Erik Andersen <andersen@codepoet.org> for testing this code
73  on big-endian systems and for his assistance with corrections
74 */
75 
76 #if 0
77 #define UNROLL_SHA2 /* for SHA2 loop unroll */
78 #endif
79 
80 #include <string.h> /* for memcpy() etc. */
81 
82 #include "sha2.h"
83 
84 #include "brg_endian.h"
85 
86 #if defined(__cplusplus)
87 extern "C"
88 {
89 #endif
90 
91 #if defined( _MSC_VER ) && ( _MSC_VER > 800 )
92 #pragma intrinsic(memcpy)
93 #endif
94 
95 #if 0 && defined(_MSC_VER)
96 #define rotl32 _lrotl
97 #define rotr32 _lrotr
98 #else
99 #define rotl32(x,n) (((x) << n) | ((x) >> (32 - n)))
100 #define rotr32(x,n) (((x) >> n) | ((x) << (32 - n)))
101 #endif
102 
103 #if !defined(bswap_32)
104 #define bswap_32(x) ((rotr32((x), 24) & 0x00ff00ff) | (rotr32((x), 8) & 0xff00ff00))
105 #endif
106 
107 #if (PLATFORM_BYTE_ORDER == IS_LITTLE_ENDIAN)
108 #define SWAP_BYTES
109 #else
110 #undef SWAP_BYTES
111 #endif
112 
113 #if 0
114 
115 #define ch(x,y,z) (((x) & (y)) ^ (~(x) & (z)))
116 #define maj(x,y,z) (((x) & (y)) ^ ((x) & (z)) ^ ((y) & (z)))
117 
118 #else /* Thanks to Rich Schroeppel and Colin Plumb for the following */
119 
120 #define ch(x,y,z) ((z) ^ ((x) & ((y) ^ (z))))
121 #define maj(x,y,z) (((x) & (y)) | ((z) & ((x) ^ (y))))
122 
123 #endif
124 
125 /* round transforms for SHA256 and SHA512 compression functions */
126 
127 #define vf(n,i) v[(n - i) & 7]
128 
129 #define hf(i) (p[i & 15] += \
130  g_1(p[(i + 14) & 15]) + p[(i + 9) & 15] + g_0(p[(i + 1) & 15]))
131 
132 #define v_cycle(i,j) \
133  vf(7,i) += (j ? hf(i) : p[i]) + k_0[i+j] \
134  + s_1(vf(4,i)) + ch(vf(4,i),vf(5,i),vf(6,i)); \
135  vf(3,i) += vf(7,i); \
136  vf(7,i) += s_0(vf(0,i))+ maj(vf(0,i),vf(1,i),vf(2,i))
137 
138 #if defined(SHA_224) || defined(SHA_256)
139 
140 #define SHA256_MASK (SHA256_BLOCK_SIZE - 1)
141 
142 #if defined(SWAP_BYTES)
143 #define bsw_32(p,n) \
144  { int _i = (n); while(_i--) ((uint_32t*)p)[_i] = bswap_32(((uint_32t*)p)[_i]); }
145 #else
146 #define bsw_32(p,n)
147 #endif
148 
149 #define s_0(x) (rotr32((x), 2) ^ rotr32((x), 13) ^ rotr32((x), 22))
150 #define s_1(x) (rotr32((x), 6) ^ rotr32((x), 11) ^ rotr32((x), 25))
151 #define g_0(x) (rotr32((x), 7) ^ rotr32((x), 18) ^ ((x) >> 3))
152 #define g_1(x) (rotr32((x), 17) ^ rotr32((x), 19) ^ ((x) >> 10))
153 #define k_0 k256
154 
155 /* rotated SHA256 round definition. Rather than swapping variables as in */
156 /* FIPS-180, different variables are 'rotated' on each round, returning */
157 /* to their starting positions every eight rounds */
158 
159 #define q(n) v##n
160 
161 #define one_cycle(a,b,c,d,e,f,g,h,k,w) \
162  q(h) += s_1(q(e)) + ch(q(e), q(f), q(g)) + k + w; \
163  q(d) += q(h); q(h) += s_0(q(a)) + maj(q(a), q(b), q(c))
164 
165 /* SHA256 mixing data */
166 
167 const uint_32t k256[64] =
168 { 0x428a2f98ul, 0x71374491ul, 0xb5c0fbcful, 0xe9b5dba5ul,
169  0x3956c25bul, 0x59f111f1ul, 0x923f82a4ul, 0xab1c5ed5ul,
170  0xd807aa98ul, 0x12835b01ul, 0x243185beul, 0x550c7dc3ul,
171  0x72be5d74ul, 0x80deb1feul, 0x9bdc06a7ul, 0xc19bf174ul,
172  0xe49b69c1ul, 0xefbe4786ul, 0x0fc19dc6ul, 0x240ca1ccul,
173  0x2de92c6ful, 0x4a7484aaul, 0x5cb0a9dcul, 0x76f988daul,
174  0x983e5152ul, 0xa831c66dul, 0xb00327c8ul, 0xbf597fc7ul,
175  0xc6e00bf3ul, 0xd5a79147ul, 0x06ca6351ul, 0x14292967ul,
176  0x27b70a85ul, 0x2e1b2138ul, 0x4d2c6dfcul, 0x53380d13ul,
177  0x650a7354ul, 0x766a0abbul, 0x81c2c92eul, 0x92722c85ul,
178  0xa2bfe8a1ul, 0xa81a664bul, 0xc24b8b70ul, 0xc76c51a3ul,
179  0xd192e819ul, 0xd6990624ul, 0xf40e3585ul, 0x106aa070ul,
180  0x19a4c116ul, 0x1e376c08ul, 0x2748774cul, 0x34b0bcb5ul,
181  0x391c0cb3ul, 0x4ed8aa4aul, 0x5b9cca4ful, 0x682e6ff3ul,
182  0x748f82eeul, 0x78a5636ful, 0x84c87814ul, 0x8cc70208ul,
183  0x90befffaul, 0xa4506cebul, 0xbef9a3f7ul, 0xc67178f2ul,
184 };
185 
186 /* Compile 64 bytes of hash data into SHA256 digest value */
187 /* NOTE: this routine assumes that the byte order in the */
188 /* ctx->wbuf[] at this point is such that low address bytes */
189 /* in the ORIGINAL byte stream will go into the high end of */
190 /* words on BOTH big and little endian systems */
191 
193 {
194 #if !defined(UNROLL_SHA2)
195 
196  uint_32t j, *p = ctx->wbuf, v[8];
197 
198  memcpy(v, ctx->hash, 8 * sizeof(uint_32t));
199 
200  for(j = 0; j < 64; j += 16)
201  {
202  v_cycle( 0, j); v_cycle( 1, j);
203  v_cycle( 2, j); v_cycle( 3, j);
204  v_cycle( 4, j); v_cycle( 5, j);
205  v_cycle( 6, j); v_cycle( 7, j);
206  v_cycle( 8, j); v_cycle( 9, j);
207  v_cycle(10, j); v_cycle(11, j);
208  v_cycle(12, j); v_cycle(13, j);
209  v_cycle(14, j); v_cycle(15, j);
210  }
211 
212  ctx->hash[0] += v[0]; ctx->hash[1] += v[1];
213  ctx->hash[2] += v[2]; ctx->hash[3] += v[3];
214  ctx->hash[4] += v[4]; ctx->hash[5] += v[5];
215  ctx->hash[6] += v[6]; ctx->hash[7] += v[7];
216 
217 #else
218 
219  uint_32t *p = ctx->wbuf,v0,v1,v2,v3,v4,v5,v6,v7;
220 
221  v0 = ctx->hash[0]; v1 = ctx->hash[1];
222  v2 = ctx->hash[2]; v3 = ctx->hash[3];
223  v4 = ctx->hash[4]; v5 = ctx->hash[5];
224  v6 = ctx->hash[6]; v7 = ctx->hash[7];
225 
226  one_cycle(0,1,2,3,4,5,6,7,k256[ 0],p[ 0]);
227  one_cycle(7,0,1,2,3,4,5,6,k256[ 1],p[ 1]);
228  one_cycle(6,7,0,1,2,3,4,5,k256[ 2],p[ 2]);
229  one_cycle(5,6,7,0,1,2,3,4,k256[ 3],p[ 3]);
230  one_cycle(4,5,6,7,0,1,2,3,k256[ 4],p[ 4]);
231  one_cycle(3,4,5,6,7,0,1,2,k256[ 5],p[ 5]);
232  one_cycle(2,3,4,5,6,7,0,1,k256[ 6],p[ 6]);
233  one_cycle(1,2,3,4,5,6,7,0,k256[ 7],p[ 7]);
234  one_cycle(0,1,2,3,4,5,6,7,k256[ 8],p[ 8]);
235  one_cycle(7,0,1,2,3,4,5,6,k256[ 9],p[ 9]);
236  one_cycle(6,7,0,1,2,3,4,5,k256[10],p[10]);
237  one_cycle(5,6,7,0,1,2,3,4,k256[11],p[11]);
238  one_cycle(4,5,6,7,0,1,2,3,k256[12],p[12]);
239  one_cycle(3,4,5,6,7,0,1,2,k256[13],p[13]);
240  one_cycle(2,3,4,5,6,7,0,1,k256[14],p[14]);
241  one_cycle(1,2,3,4,5,6,7,0,k256[15],p[15]);
242 
243  one_cycle(0,1,2,3,4,5,6,7,k256[16],hf( 0));
244  one_cycle(7,0,1,2,3,4,5,6,k256[17],hf( 1));
245  one_cycle(6,7,0,1,2,3,4,5,k256[18],hf( 2));
246  one_cycle(5,6,7,0,1,2,3,4,k256[19],hf( 3));
247  one_cycle(4,5,6,7,0,1,2,3,k256[20],hf( 4));
248  one_cycle(3,4,5,6,7,0,1,2,k256[21],hf( 5));
249  one_cycle(2,3,4,5,6,7,0,1,k256[22],hf( 6));
250  one_cycle(1,2,3,4,5,6,7,0,k256[23],hf( 7));
251  one_cycle(0,1,2,3,4,5,6,7,k256[24],hf( 8));
252  one_cycle(7,0,1,2,3,4,5,6,k256[25],hf( 9));
253  one_cycle(6,7,0,1,2,3,4,5,k256[26],hf(10));
254  one_cycle(5,6,7,0,1,2,3,4,k256[27],hf(11));
255  one_cycle(4,5,6,7,0,1,2,3,k256[28],hf(12));
256  one_cycle(3,4,5,6,7,0,1,2,k256[29],hf(13));
257  one_cycle(2,3,4,5,6,7,0,1,k256[30],hf(14));
258  one_cycle(1,2,3,4,5,6,7,0,k256[31],hf(15));
259 
260  one_cycle(0,1,2,3,4,5,6,7,k256[32],hf( 0));
261  one_cycle(7,0,1,2,3,4,5,6,k256[33],hf( 1));
262  one_cycle(6,7,0,1,2,3,4,5,k256[34],hf( 2));
263  one_cycle(5,6,7,0,1,2,3,4,k256[35],hf( 3));
264  one_cycle(4,5,6,7,0,1,2,3,k256[36],hf( 4));
265  one_cycle(3,4,5,6,7,0,1,2,k256[37],hf( 5));
266  one_cycle(2,3,4,5,6,7,0,1,k256[38],hf( 6));
267  one_cycle(1,2,3,4,5,6,7,0,k256[39],hf( 7));
268  one_cycle(0,1,2,3,4,5,6,7,k256[40],hf( 8));
269  one_cycle(7,0,1,2,3,4,5,6,k256[41],hf( 9));
270  one_cycle(6,7,0,1,2,3,4,5,k256[42],hf(10));
271  one_cycle(5,6,7,0,1,2,3,4,k256[43],hf(11));
272  one_cycle(4,5,6,7,0,1,2,3,k256[44],hf(12));
273  one_cycle(3,4,5,6,7,0,1,2,k256[45],hf(13));
274  one_cycle(2,3,4,5,6,7,0,1,k256[46],hf(14));
275  one_cycle(1,2,3,4,5,6,7,0,k256[47],hf(15));
276 
277  one_cycle(0,1,2,3,4,5,6,7,k256[48],hf( 0));
278  one_cycle(7,0,1,2,3,4,5,6,k256[49],hf( 1));
279  one_cycle(6,7,0,1,2,3,4,5,k256[50],hf( 2));
280  one_cycle(5,6,7,0,1,2,3,4,k256[51],hf( 3));
281  one_cycle(4,5,6,7,0,1,2,3,k256[52],hf( 4));
282  one_cycle(3,4,5,6,7,0,1,2,k256[53],hf( 5));
283  one_cycle(2,3,4,5,6,7,0,1,k256[54],hf( 6));
284  one_cycle(1,2,3,4,5,6,7,0,k256[55],hf( 7));
285  one_cycle(0,1,2,3,4,5,6,7,k256[56],hf( 8));
286  one_cycle(7,0,1,2,3,4,5,6,k256[57],hf( 9));
287  one_cycle(6,7,0,1,2,3,4,5,k256[58],hf(10));
288  one_cycle(5,6,7,0,1,2,3,4,k256[59],hf(11));
289  one_cycle(4,5,6,7,0,1,2,3,k256[60],hf(12));
290  one_cycle(3,4,5,6,7,0,1,2,k256[61],hf(13));
291  one_cycle(2,3,4,5,6,7,0,1,k256[62],hf(14));
292  one_cycle(1,2,3,4,5,6,7,0,k256[63],hf(15));
293 
294  ctx->hash[0] += v0; ctx->hash[1] += v1;
295  ctx->hash[2] += v2; ctx->hash[3] += v3;
296  ctx->hash[4] += v4; ctx->hash[5] += v5;
297  ctx->hash[6] += v6; ctx->hash[7] += v7;
298 #endif
299 }
300 
301 /* SHA256 hash data in an array of bytes into hash buffer */
302 /* and call the hash_compile function as required. */
303 
304 VOID_RETURN sha256_hash(const unsigned char data[], unsigned long len, sha256_ctx ctx[1])
305 { uint_32t pos = (uint_32t)(ctx->count[0] & SHA256_MASK),
306  space = SHA256_BLOCK_SIZE - pos;
307  const unsigned char *sp = data;
308 
309  if((ctx->count[0] += len) < len)
310  ++(ctx->count[1]);
311 
312  while(len >= space) /* tranfer whole blocks while possible */
313  {
314  memcpy(((unsigned char*)ctx->wbuf) + pos, sp, space);
315  sp += space; len -= space; space = SHA256_BLOCK_SIZE; pos = 0;
316  bsw_32(ctx->wbuf, SHA256_BLOCK_SIZE >> 2)
317  sha256_compile(ctx);
318  }
319 
320  memcpy(((unsigned char*)ctx->wbuf) + pos, sp, len);
321 }
322 
323 /* SHA256 Final padding and digest calculation */
324 
325 static void sha_end1(unsigned char hval[], sha256_ctx ctx[1], const unsigned int hlen)
326 { uint_32t i = (uint_32t)(ctx->count[0] & SHA256_MASK);
327 
328  /* put bytes in the buffer in an order in which references to */
329  /* 32-bit words will put bytes with lower addresses into the */
330  /* top of 32 bit words on BOTH big and little endian machines */
331  bsw_32(ctx->wbuf, (i + 3) >> 2)
332 
333  /* we now need to mask valid bytes and add the padding which is */
334  /* a single 1 bit and as many zero bits as necessary. Note that */
335  /* we can always add the first padding byte here because the */
336  /* buffer always has at least one empty slot */
337  ctx->wbuf[i >> 2] &= 0xffffff80 << 8 * (~i & 3);
338  ctx->wbuf[i >> 2] |= 0x00000080 << 8 * (~i & 3);
339 
340  /* we need 9 or more empty positions, one for the padding byte */
341  /* (above) and eight for the length count. If there is not */
342  /* enough space pad and empty the buffer */
343  if(i > SHA256_BLOCK_SIZE - 9)
344  {
345  if(i < 60) ctx->wbuf[15] = 0;
346  sha256_compile(ctx);
347  i = 0;
348  }
349  else /* compute a word index for the empty buffer positions */
350  i = (i >> 2) + 1;
351 
352  while(i < 14) /* and zero pad all but last two positions */
353  ctx->wbuf[i++] = 0;
354 
355  /* the following 32-bit length fields are assembled in the */
356  /* wrong byte order on little endian machines but this is */
357  /* corrected later since they are only ever used as 32-bit */
358  /* word values. */
359  ctx->wbuf[14] = (ctx->count[1] << 3) | (ctx->count[0] >> 29);
360  ctx->wbuf[15] = ctx->count[0] << 3;
361  sha256_compile(ctx);
362 
363  /* extract the hash value as bytes in case the hash buffer is */
364  /* mislaigned for 32-bit words */
365  for(i = 0; i < hlen; ++i)
366  hval[i] = (unsigned char)(ctx->hash[i >> 2] >> (8 * (~i & 3)));
367 }
368 
369 #endif
370 
371 #if defined(SHA_224)
372 
373 const uint_32t i224[8] =
374 {
375  0xc1059ed8ul, 0x367cd507ul, 0x3070dd17ul, 0xf70e5939ul,
376  0xffc00b31ul, 0x68581511ul, 0x64f98fa7ul, 0xbefa4fa4ul
377 };
378 
380 {
381  ctx->count[0] = ctx->count[1] = 0;
382  memcpy(ctx->hash, i224, 8 * sizeof(uint_32t));
383 }
384 
385 VOID_RETURN sha224_end(unsigned char hval[], sha224_ctx ctx[1])
386 {
387  sha_end1(hval, ctx, SHA224_DIGEST_SIZE);
388 }
389 
390 VOID_RETURN sha224(unsigned char hval[], const unsigned char data[], unsigned long len)
391 { sha224_ctx cx[1];
392 
393  sha224_begin(cx);
394  sha224_hash(data, len, cx);
395  sha_end1(hval, cx, SHA224_DIGEST_SIZE);
396 }
397 
398 #endif
399 
400 #if defined(SHA_256)
401 
402 const uint_32t i256[8] =
403 {
404  0x6a09e667ul, 0xbb67ae85ul, 0x3c6ef372ul, 0xa54ff53aul,
405  0x510e527ful, 0x9b05688cul, 0x1f83d9abul, 0x5be0cd19ul
406 };
407 
409 {
410  ctx->count[0] = ctx->count[1] = 0;
411  memcpy(ctx->hash, i256, 8 * sizeof(uint_32t));
412 }
413 
414 VOID_RETURN sha256_end(unsigned char hval[], sha256_ctx ctx[1])
415 {
416  sha_end1(hval, ctx, SHA256_DIGEST_SIZE);
417 }
418 
419 VOID_RETURN sha256(unsigned char hval[], const unsigned char data[], unsigned long len)
420 { sha256_ctx cx[1];
421 
422  sha256_begin(cx);
423  sha256_hash(data, len, cx);
424  sha_end1(hval, cx, SHA256_DIGEST_SIZE);
425 }
426 
427 #endif
428 
429 #if defined(SHA_384) || defined(SHA_512)
430 
431 #define SHA512_MASK (SHA512_BLOCK_SIZE - 1)
432 
433 #define rotr64(x,n) (((x) >> n) | ((x) << (64 - n)))
434 
435 #if !defined(bswap_64)
436 #define bswap_64(x) (((uint_64t)(bswap_32((uint_32t)(x)))) << 32 | bswap_32((uint_32t)((x) >> 32)))
437 #endif
438 
439 #if defined(SWAP_BYTES)
440 #define bsw_64(p,n) \
441  { int _i = (n); while(_i--) ((uint_64t*)p)[_i] = bswap_64(((uint_64t*)p)[_i]); }
442 #else
443 #define bsw_64(p,n)
444 #endif
445 
446 /* SHA512 mixing function definitions */
447 
448 #ifdef s_0
449 # undef s_0
450 # undef s_1
451 # undef g_0
452 # undef g_1
453 # undef k_0
454 #endif
455 
456 #define s_0(x) (rotr64((x), 28) ^ rotr64((x), 34) ^ rotr64((x), 39))
457 #define s_1(x) (rotr64((x), 14) ^ rotr64((x), 18) ^ rotr64((x), 41))
458 #define g_0(x) (rotr64((x), 1) ^ rotr64((x), 8) ^ ((x) >> 7))
459 #define g_1(x) (rotr64((x), 19) ^ rotr64((x), 61) ^ ((x) >> 6))
460 #define k_0 k512
461 
462 /* SHA384/SHA512 mixing data */
463 
464 const uint_64t k512[80] =
465 {
466  li_64(428a2f98d728ae22), li_64(7137449123ef65cd),
467  li_64(b5c0fbcfec4d3b2f), li_64(e9b5dba58189dbbc),
468  li_64(3956c25bf348b538), li_64(59f111f1b605d019),
469  li_64(923f82a4af194f9b), li_64(ab1c5ed5da6d8118),
470  li_64(d807aa98a3030242), li_64(12835b0145706fbe),
471  li_64(243185be4ee4b28c), li_64(550c7dc3d5ffb4e2),
472  li_64(72be5d74f27b896f), li_64(80deb1fe3b1696b1),
473  li_64(9bdc06a725c71235), li_64(c19bf174cf692694),
474  li_64(e49b69c19ef14ad2), li_64(efbe4786384f25e3),
475  li_64(0fc19dc68b8cd5b5), li_64(240ca1cc77ac9c65),
476  li_64(2de92c6f592b0275), li_64(4a7484aa6ea6e483),
477  li_64(5cb0a9dcbd41fbd4), li_64(76f988da831153b5),
478  li_64(983e5152ee66dfab), li_64(a831c66d2db43210),
479  li_64(b00327c898fb213f), li_64(bf597fc7beef0ee4),
480  li_64(c6e00bf33da88fc2), li_64(d5a79147930aa725),
481  li_64(06ca6351e003826f), li_64(142929670a0e6e70),
482  li_64(27b70a8546d22ffc), li_64(2e1b21385c26c926),
483  li_64(4d2c6dfc5ac42aed), li_64(53380d139d95b3df),
484  li_64(650a73548baf63de), li_64(766a0abb3c77b2a8),
485  li_64(81c2c92e47edaee6), li_64(92722c851482353b),
486  li_64(a2bfe8a14cf10364), li_64(a81a664bbc423001),
487  li_64(c24b8b70d0f89791), li_64(c76c51a30654be30),
488  li_64(d192e819d6ef5218), li_64(d69906245565a910),
489  li_64(f40e35855771202a), li_64(106aa07032bbd1b8),
490  li_64(19a4c116b8d2d0c8), li_64(1e376c085141ab53),
491  li_64(2748774cdf8eeb99), li_64(34b0bcb5e19b48a8),
492  li_64(391c0cb3c5c95a63), li_64(4ed8aa4ae3418acb),
493  li_64(5b9cca4f7763e373), li_64(682e6ff3d6b2b8a3),
494  li_64(748f82ee5defb2fc), li_64(78a5636f43172f60),
495  li_64(84c87814a1f0ab72), li_64(8cc702081a6439ec),
496  li_64(90befffa23631e28), li_64(a4506cebde82bde9),
497  li_64(bef9a3f7b2c67915), li_64(c67178f2e372532b),
498  li_64(ca273eceea26619c), li_64(d186b8c721c0c207),
499  li_64(eada7dd6cde0eb1e), li_64(f57d4f7fee6ed178),
500  li_64(06f067aa72176fba), li_64(0a637dc5a2c898a6),
501  li_64(113f9804bef90dae), li_64(1b710b35131c471b),
502  li_64(28db77f523047d84), li_64(32caab7b40c72493),
503  li_64(3c9ebe0a15c9bebc), li_64(431d67c49c100d4c),
504  li_64(4cc5d4becb3e42b6), li_64(597f299cfc657e2a),
505  li_64(5fcb6fab3ad6faec), li_64(6c44198c4a475817)
506 };
507 
508 /* Compile 128 bytes of hash data into SHA384/512 digest */
509 /* NOTE: this routine assumes that the byte order in the */
510 /* ctx->wbuf[] at this point is such that low address bytes */
511 /* in the ORIGINAL byte stream will go into the high end of */
512 /* words on BOTH big and little endian systems */
513 
515 { uint_64t v[8], *p = ctx->wbuf;
516  uint_32t j;
517 
518  memcpy(v, ctx->hash, 8 * sizeof(uint_64t));
519 
520  for(j = 0; j < 80; j += 16)
521  {
522  v_cycle( 0, j); v_cycle( 1, j);
523  v_cycle( 2, j); v_cycle( 3, j);
524  v_cycle( 4, j); v_cycle( 5, j);
525  v_cycle( 6, j); v_cycle( 7, j);
526  v_cycle( 8, j); v_cycle( 9, j);
527  v_cycle(10, j); v_cycle(11, j);
528  v_cycle(12, j); v_cycle(13, j);
529  v_cycle(14, j); v_cycle(15, j);
530  }
531 
532  ctx->hash[0] += v[0]; ctx->hash[1] += v[1];
533  ctx->hash[2] += v[2]; ctx->hash[3] += v[3];
534  ctx->hash[4] += v[4]; ctx->hash[5] += v[5];
535  ctx->hash[6] += v[6]; ctx->hash[7] += v[7];
536 }
537 
538 /* Compile 128 bytes of hash data into SHA256 digest value */
539 /* NOTE: this routine assumes that the byte order in the */
540 /* ctx->wbuf[] at this point is in such an order that low */
541 /* address bytes in the ORIGINAL byte stream placed in this */
542 /* buffer will now go to the high end of words on BOTH big */
543 /* and little endian systems */
544 
545 VOID_RETURN sha512_hash(const unsigned char data[], unsigned long len, sha512_ctx ctx[1])
546 { uint_32t pos = (uint_32t)(ctx->count[0] & SHA512_MASK),
547  space = SHA512_BLOCK_SIZE - pos;
548  const unsigned char *sp = data;
549 
550  if((ctx->count[0] += len) < len)
551  ++(ctx->count[1]);
552 
553  while(len >= space) /* tranfer whole blocks while possible */
554  {
555  memcpy(((unsigned char*)ctx->wbuf) + pos, sp, space);
556  sp += space; len -= space; space = SHA512_BLOCK_SIZE; pos = 0;
557  bsw_64(ctx->wbuf, SHA512_BLOCK_SIZE >> 3);
558  sha512_compile(ctx);
559  }
560 
561  memcpy(((unsigned char*)ctx->wbuf) + pos, sp, len);
562 }
563 
564 /* SHA384/512 Final padding and digest calculation */
565 
566 static void sha_end2(unsigned char hval[], sha512_ctx ctx[1], const unsigned int hlen)
567 { uint_32t i = (uint_32t)(ctx->count[0] & SHA512_MASK);
568 
569  /* put bytes in the buffer in an order in which references to */
570  /* 32-bit words will put bytes with lower addresses into the */
571  /* top of 32 bit words on BOTH big and little endian machines */
572  bsw_64(ctx->wbuf, (i + 7) >> 3);
573 
574  /* we now need to mask valid bytes and add the padding which is */
575  /* a single 1 bit and as many zero bits as necessary. Note that */
576  /* we can always add the first padding byte here because the */
577  /* buffer always has at least one empty slot */
578  ctx->wbuf[i >> 3] &= li_64(ffffffffffffff00) << 8 * (~i & 7);
579  ctx->wbuf[i >> 3] |= li_64(0000000000000080) << 8 * (~i & 7);
580 
581  /* we need 17 or more empty byte positions, one for the padding */
582  /* byte (above) and sixteen for the length count. If there is */
583  /* not enough space pad and empty the buffer */
584  if(i > SHA512_BLOCK_SIZE - 17)
585  {
586  if(i < 120) ctx->wbuf[15] = 0;
587  sha512_compile(ctx);
588  i = 0;
589  }
590  else
591  i = (i >> 3) + 1;
592 
593  while(i < 14)
594  ctx->wbuf[i++] = 0;
595 
596  /* the following 64-bit length fields are assembled in the */
597  /* wrong byte order on little endian machines but this is */
598  /* corrected later since they are only ever used as 64-bit */
599  /* word values. */
600  ctx->wbuf[14] = (ctx->count[1] << 3) | (ctx->count[0] >> 61);
601  ctx->wbuf[15] = ctx->count[0] << 3;
602  sha512_compile(ctx);
603 
604  /* extract the hash value as bytes in case the hash buffer is */
605  /* misaligned for 32-bit words */
606  for(i = 0; i < hlen; ++i)
607  hval[i] = (unsigned char)(ctx->hash[i >> 3] >> (8 * (~i & 7)));
608 }
609 
610 #endif
611 
612 #if defined(SHA_384)
613 
614 /* SHA384 initialisation data */
615 
616 const uint_64t i384[80] =
617 {
618  li_64(cbbb9d5dc1059ed8), li_64(629a292a367cd507),
619  li_64(9159015a3070dd17), li_64(152fecd8f70e5939),
620  li_64(67332667ffc00b31), li_64(8eb44a8768581511),
621  li_64(db0c2e0d64f98fa7), li_64(47b5481dbefa4fa4)
622 };
623 
625 {
626  ctx->count[0] = ctx->count[1] = 0;
627  memcpy(ctx->hash, i384, 8 * sizeof(uint_64t));
628 }
629 
630 VOID_RETURN sha384_end(unsigned char hval[], sha384_ctx ctx[1])
631 {
632  sha_end2(hval, ctx, SHA384_DIGEST_SIZE);
633 }
634 
635 VOID_RETURN sha384(unsigned char hval[], const unsigned char data[], unsigned long len)
636 { sha384_ctx cx[1];
637 
638  sha384_begin(cx);
639  sha384_hash(data, len, cx);
640  sha_end2(hval, cx, SHA384_DIGEST_SIZE);
641 }
642 
643 #endif
644 
645 #if defined(SHA_512)
646 
647 /* SHA512 initialisation data */
648 
649 const uint_64t i512[80] =
650 {
651  li_64(6a09e667f3bcc908), li_64(bb67ae8584caa73b),
652  li_64(3c6ef372fe94f82b), li_64(a54ff53a5f1d36f1),
653  li_64(510e527fade682d1), li_64(9b05688c2b3e6c1f),
654  li_64(1f83d9abfb41bd6b), li_64(5be0cd19137e2179)
655 };
656 
658 {
659  ctx->count[0] = ctx->count[1] = 0;
660  memcpy(ctx->hash, i512, 8 * sizeof(uint_64t));
661 }
662 
663 VOID_RETURN sha512_end(unsigned char hval[], sha512_ctx ctx[1])
664 {
665  sha_end2(hval, ctx, SHA512_DIGEST_SIZE);
666 }
667 
668 VOID_RETURN sha512(unsigned char hval[], const unsigned char data[], unsigned long len)
669 { sha512_ctx cx[1];
670 
671  sha512_begin(cx);
672  sha512_hash(data, len, cx);
673  sha_end2(hval, cx, SHA512_DIGEST_SIZE);
674 }
675 
676 #endif
677 
678 #if defined(SHA_2)
679 
680 #define CTX_224(x) ((x)->uu->ctx256)
681 #define CTX_256(x) ((x)->uu->ctx256)
682 #define CTX_384(x) ((x)->uu->ctx512)
683 #define CTX_512(x) ((x)->uu->ctx512)
684 
685 /* SHA2 initialisation */
686 
687 INT_RETURN sha2_begin(unsigned long len, sha2_ctx ctx[1])
688 {
689  switch(len)
690  {
691 #if defined(SHA_224)
692  case 224:
693  case 28: CTX_256(ctx)->count[0] = CTX_256(ctx)->count[1] = 0;
694  memcpy(CTX_256(ctx)->hash, i224, 32);
695  ctx->sha2_len = 28; return EXIT_SUCCESS;
696 #endif
697 #if defined(SHA_256)
698  case 256:
699  case 32: CTX_256(ctx)->count[0] = CTX_256(ctx)->count[1] = 0;
700  memcpy(CTX_256(ctx)->hash, i256, 32);
701  ctx->sha2_len = 32; return EXIT_SUCCESS;
702 #endif
703 #if defined(SHA_384)
704  case 384:
705  case 48: CTX_384(ctx)->count[0] = CTX_384(ctx)->count[1] = 0;
706  memcpy(CTX_384(ctx)->hash, i384, 64);
707  ctx->sha2_len = 48; return EXIT_SUCCESS;
708 #endif
709 #if defined(SHA_512)
710  case 512:
711  case 64: CTX_512(ctx)->count[0] = CTX_512(ctx)->count[1] = 0;
712  memcpy(CTX_512(ctx)->hash, i512, 64);
713  ctx->sha2_len = 64; return EXIT_SUCCESS;
714 #endif
715  default: return EXIT_FAILURE;
716  }
717 }
718 
719 VOID_RETURN sha2_hash(const unsigned char data[], unsigned long len, sha2_ctx ctx[1])
720 {
721  switch(ctx->sha2_len)
722  {
723 #if defined(SHA_224)
724  case 28: sha224_hash(data, len, CTX_224(ctx)); return;
725 #endif
726 #if defined(SHA_256)
727  case 32: sha256_hash(data, len, CTX_256(ctx)); return;
728 #endif
729 #if defined(SHA_384)
730  case 48: sha384_hash(data, len, CTX_384(ctx)); return;
731 #endif
732 #if defined(SHA_512)
733  case 64: sha512_hash(data, len, CTX_512(ctx)); return;
734 #endif
735  }
736 }
737 
738 VOID_RETURN sha2_end(unsigned char hval[], sha2_ctx ctx[1])
739 {
740  switch(ctx->sha2_len)
741  {
742 #if defined(SHA_224)
743  case 28: sha_end1(hval, CTX_224(ctx), SHA224_DIGEST_SIZE); return;
744 #endif
745 #if defined(SHA_256)
746  case 32: sha_end1(hval, CTX_256(ctx), SHA256_DIGEST_SIZE); return;
747 #endif
748 #if defined(SHA_384)
749  case 48: sha_end2(hval, CTX_384(ctx), SHA384_DIGEST_SIZE); return;
750 #endif
751 #if defined(SHA_512)
752  case 64: sha_end2(hval, CTX_512(ctx), SHA512_DIGEST_SIZE); return;
753 #endif
754  }
755 }
756 
757 INT_RETURN sha2(unsigned char hval[], unsigned long size,
758  const unsigned char data[], unsigned long len)
759 { sha2_ctx cx[1];
760 
761  if(sha2_begin(size, cx) == EXIT_SUCCESS)
762  {
763  sha2_hash(data, len, cx); sha2_end(hval, cx); return EXIT_SUCCESS;
764  }
765  else
766  return EXIT_FAILURE;
767 }
768 
769 #endif
770 
771 #if defined(__cplusplus)
772 }
773 #endif
sha256_begin
void sha256_begin(sha256_ctx ctx[1])
Definition: sha2.cpp:408
sha512_ctx::count
uint_64t count[2]
Definition: sha2.h:113
sha224_begin
void sha224_begin(sha224_ctx ctx[1])
Definition: sha2.cpp:379
one_cycle
#define one_cycle(a, b, c, d, e, f, g, h, k, w)
Definition: sha2.cpp:161
sha512_ctx::hash
uint_64t hash[8]
Definition: sha2.h:114
SHA256_DIGEST_SIZE
#define SHA256_DIGEST_SIZE
Definition: sha2.h:66
sha2_begin
int sha2_begin(unsigned long len, sha2_ctx ctx[1])
Definition: sha2.cpp:687
sha384_hash
#define sha384_hash
Definition: sha2.h:131
i224
const uint_32t i224[8]
Definition: sha2.cpp:373
sha512_end
void sha512_end(unsigned char hval[], sha512_ctx ctx[1])
Definition: sha2.cpp:663
sha512_hash
void sha512_hash(const unsigned char data[], unsigned long len, sha512_ctx ctx[1])
Definition: sha2.cpp:545
sha512
void sha512(unsigned char hval[], const unsigned char data[], unsigned long len)
Definition: sha2.cpp:668
SHA256_BLOCK_SIZE
#define SHA256_BLOCK_SIZE
Definition: sha2.h:67
sha256_compile
void sha256_compile(sha256_ctx ctx[1])
Definition: sha2.cpp:192
k256
const uint_32t k256[64]
Definition: sha2.cpp:167
sha256
void sha256(unsigned char hval[], const unsigned char data[], unsigned long len)
Definition: sha2.cpp:419
sha512_ctx
Definition: sha2.h:112
sha224_hash
#define sha224_hash
Definition: sha2.h:82
sha_end2
static void sha_end2(unsigned char hval[], sha512_ctx ctx[1], const unsigned int hlen)
Definition: sha2.cpp:566
brg_endian.h
VOID_RETURN
#define VOID_RETURN
Definition: brg_types.h:161
sha512_ctx::wbuf
uint_64t wbuf[16]
Definition: sha2.h:115
sha2
int sha2(unsigned char hval[], unsigned long size, const unsigned char data[], unsigned long len)
Definition: sha2.cpp:757
sha256_ctx
Definition: sha2.h:71
SHA384_DIGEST_SIZE
#define SHA384_DIGEST_SIZE
Definition: sha2.h:104
CTX_224
#define CTX_224(x)
Definition: sha2.cpp:680
k512
const uint_64t k512[80]
Definition: sha2.cpp:464
sha512_begin
void sha512_begin(sha512_ctx ctx[1])
Definition: sha2.cpp:657
bsw_64
#define bsw_64(p, n)
Definition: sha2.cpp:443
sha256_ctx::wbuf
uint_32t wbuf[16]
Definition: sha2.h:74
hash
static shell::stringopt hash('h', "--digest", _TEXT("digest method (sha)"), "method", "sha")
sha2_ctx::sha2_len
uint_32t sha2_len
Definition: sha2.h:125
bsw_32
#define bsw_32(p, n)
Definition: sha2.cpp:146
sha_end1
static void sha_end1(unsigned char hval[], sha256_ctx ctx[1], const unsigned int hlen)
Definition: sha2.cpp:325
sha2_hash
void sha2_hash(const unsigned char data[], unsigned long len, sha2_ctx ctx[1])
Definition: sha2.cpp:719
sha256_ctx::count
uint_32t count[2]
Definition: sha2.h:72
sha256_end
void sha256_end(unsigned char hval[], sha256_ctx ctx[1])
Definition: sha2.cpp:414
sha224_end
void sha224_end(unsigned char hval[], sha224_ctx ctx[1])
Definition: sha2.cpp:385
v_cycle
#define v_cycle(i, j)
Definition: sha2.cpp:132
SHA224_DIGEST_SIZE
#define SHA224_DIGEST_SIZE
Definition: sha2.h:64
SHA512_BLOCK_SIZE
#define SHA512_BLOCK_SIZE
Definition: sha2.h:107
sha2_ctx
Definition: sha2.h:120
CTX_512
#define CTX_512(x)
Definition: sha2.cpp:683
sha384
void sha384(unsigned char hval[], const unsigned char data[], unsigned long len)
Definition: sha2.cpp:635
sha224
void sha224(unsigned char hval[], const unsigned char data[], unsigned long len)
Definition: sha2.cpp:390
i256
const uint_32t i256[8]
Definition: sha2.cpp:402
sha512_compile
void sha512_compile(sha512_ctx ctx[1])
Definition: sha2.cpp:514
SHA512_MASK
#define SHA512_MASK
Definition: sha2.cpp:431
SHA512_DIGEST_SIZE
#define SHA512_DIGEST_SIZE
Definition: sha2.h:106
INT_RETURN
#define INT_RETURN
Definition: brg_types.h:162
SHA256_MASK
#define SHA256_MASK
Definition: sha2.cpp:140
sha2.h
hf
#define hf(i)
Definition: sha2.cpp:129
i384
const uint_64t i384[80]
Definition: sha2.cpp:616
sha384_end
void sha384_end(unsigned char hval[], sha384_ctx ctx[1])
Definition: sha2.cpp:630
CTX_256
#define CTX_256(x)
Definition: sha2.cpp:681
i512
const uint_64t i512[80]
Definition: sha2.cpp:649
sha384_begin
void sha384_begin(sha384_ctx ctx[1])
Definition: sha2.cpp:624
sha256_ctx::hash
uint_32t hash[8]
Definition: sha2.h:73
sha2_end
void sha2_end(unsigned char hval[], sha2_ctx ctx[1])
Definition: sha2.cpp:738
sha256_hash
void sha256_hash(const unsigned char data[], unsigned long len, sha256_ctx ctx[1])
Definition: sha2.cpp:304
CTX_384
#define CTX_384(x)
Definition: sha2.cpp:682