1 /* $Id: sha2.c 227 2010-06-16 17:28:38Z tp $ */
3 * SHA-224 / SHA-256 implementation.
5 * ==========================(LICENSE BEGIN)============================
7 * Copyright (c) 2007-2010 Projet RNRT SAPHIR
9 * Permission is hereby granted, free of charge, to any person obtaining
10 * a copy of this software and associated documentation files (the
11 * "Software"), to deal in the Software without restriction, including
12 * without limitation the rights to use, copy, modify, merge, publish,
13 * distribute, sublicense, and/or sell copies of the Software, and to
14 * permit persons to whom the Software is furnished to do so, subject to
15 * the following conditions:
17 * The above copyright notice and this permission notice shall be
18 * included in all copies or substantial portions of the Software.
20 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
21 * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
22 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
23 * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
24 * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
25 * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
26 * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
28 * ===========================(LICENSE END)=============================
38 #if SPH_SMALL_FOOTPRINT && !defined SPH_SMALL_FOOTPRINT_SHA2
39 #define SPH_SMALL_FOOTPRINT_SHA2 1
42 #define CH(X, Y, Z) ((((Y) ^ (Z)) & (X)) ^ (Z))
43 #define MAJ(X, Y, Z) (((Y) & (Z)) | (((Y) | (Z)) & (X)))
45 #define ROTR SPH_ROTR32
47 #define BSG2_0(x) (ROTR(x, 2) ^ ROTR(x, 13) ^ ROTR(x, 22))
48 #define BSG2_1(x) (ROTR(x, 6) ^ ROTR(x, 11) ^ ROTR(x, 25))
49 #define SSG2_0(x) (ROTR(x, 7) ^ ROTR(x, 18) ^ SPH_T32((x) >> 3))
50 #define SSG2_1(x) (ROTR(x, 17) ^ ROTR(x, 19) ^ SPH_T32((x) >> 10))
52 static const sph_u32 H224[8] = {
53 SPH_C32(0xC1059ED8), SPH_C32(0x367CD507), SPH_C32(0x3070DD17),
54 SPH_C32(0xF70E5939), SPH_C32(0xFFC00B31), SPH_C32(0x68581511),
55 SPH_C32(0x64F98FA7), SPH_C32(0xBEFA4FA4)
58 static const sph_u32 H256[8] = {
59 SPH_C32(0x6A09E667), SPH_C32(0xBB67AE85), SPH_C32(0x3C6EF372),
60 SPH_C32(0xA54FF53A), SPH_C32(0x510E527F), SPH_C32(0x9B05688C),
61 SPH_C32(0x1F83D9AB), SPH_C32(0x5BE0CD19)
65 * The SHA2_ROUND_BODY defines the body for a SHA-224 / SHA-256
66 * compression function implementation. The "in" parameter should
67 * evaluate, when applied to a numerical input parameter from 0 to 15,
68 * to an expression which yields the corresponding input block. The "r"
69 * parameter should evaluate to an array or pointer expression
70 * designating the array of 8 words which contains the input and output
71 * of the compression function.
74 #if SPH_SMALL_FOOTPRINT_SHA2
76 static const sph_u32 K[64] = {
77 SPH_C32(0x428A2F98), SPH_C32(0x71374491),
78 SPH_C32(0xB5C0FBCF), SPH_C32(0xE9B5DBA5),
79 SPH_C32(0x3956C25B), SPH_C32(0x59F111F1),
80 SPH_C32(0x923F82A4), SPH_C32(0xAB1C5ED5),
81 SPH_C32(0xD807AA98), SPH_C32(0x12835B01),
82 SPH_C32(0x243185BE), SPH_C32(0x550C7DC3),
83 SPH_C32(0x72BE5D74), SPH_C32(0x80DEB1FE),
84 SPH_C32(0x9BDC06A7), SPH_C32(0xC19BF174),
85 SPH_C32(0xE49B69C1), SPH_C32(0xEFBE4786),
86 SPH_C32(0x0FC19DC6), SPH_C32(0x240CA1CC),
87 SPH_C32(0x2DE92C6F), SPH_C32(0x4A7484AA),
88 SPH_C32(0x5CB0A9DC), SPH_C32(0x76F988DA),
89 SPH_C32(0x983E5152), SPH_C32(0xA831C66D),
90 SPH_C32(0xB00327C8), SPH_C32(0xBF597FC7),
91 SPH_C32(0xC6E00BF3), SPH_C32(0xD5A79147),
92 SPH_C32(0x06CA6351), SPH_C32(0x14292967),
93 SPH_C32(0x27B70A85), SPH_C32(0x2E1B2138),
94 SPH_C32(0x4D2C6DFC), SPH_C32(0x53380D13),
95 SPH_C32(0x650A7354), SPH_C32(0x766A0ABB),
96 SPH_C32(0x81C2C92E), SPH_C32(0x92722C85),
97 SPH_C32(0xA2BFE8A1), SPH_C32(0xA81A664B),
98 SPH_C32(0xC24B8B70), SPH_C32(0xC76C51A3),
99 SPH_C32(0xD192E819), SPH_C32(0xD6990624),
100 SPH_C32(0xF40E3585), SPH_C32(0x106AA070),
101 SPH_C32(0x19A4C116), SPH_C32(0x1E376C08),
102 SPH_C32(0x2748774C), SPH_C32(0x34B0BCB5),
103 SPH_C32(0x391C0CB3), SPH_C32(0x4ED8AA4A),
104 SPH_C32(0x5B9CCA4F), SPH_C32(0x682E6FF3),
105 SPH_C32(0x748F82EE), SPH_C32(0x78A5636F),
106 SPH_C32(0x84C87814), SPH_C32(0x8CC70208),
107 SPH_C32(0x90BEFFFA), SPH_C32(0xA4506CEB),
108 SPH_C32(0xBEF9A3F7), SPH_C32(0xC67178F2)
111 #define SHA2_MEXP1(in, pc) do { \
115 #define SHA2_MEXP2(in, pc) do { \
116 W[(pc) & 0x0F] = SPH_T32(SSG2_1(W[((pc) - 2) & 0x0F]) \
117 + W[((pc) - 7) & 0x0F] \
118 + SSG2_0(W[((pc) - 15) & 0x0F]) + W[(pc) & 0x0F]); \
121 #define SHA2_STEPn(n, a, b, c, d, e, f, g, h, in, pc) do { \
123 SHA2_MEXP ## n(in, pc); \
124 t1 = SPH_T32(h + BSG2_1(e) + CH(e, f, g) \
125 + K[pcount + (pc)] + W[(pc) & 0x0F]); \
126 t2 = SPH_T32(BSG2_0(a) + MAJ(a, b, c)); \
127 d = SPH_T32(d + t1); \
128 h = SPH_T32(t1 + t2); \
131 #define SHA2_STEP1(a, b, c, d, e, f, g, h, in, pc) \
132 SHA2_STEPn(1, a, b, c, d, e, f, g, h, in, pc)
133 #define SHA2_STEP2(a, b, c, d, e, f, g, h, in, pc) \
134 SHA2_STEPn(2, a, b, c, d, e, f, g, h, in, pc)
136 #define SHA2_ROUND_BODY(in, r) do { \
137 sph_u32 A, B, C, D, E, F, G, H; \
150 SHA2_STEP1(A, B, C, D, E, F, G, H, in, 0); \
151 SHA2_STEP1(H, A, B, C, D, E, F, G, in, 1); \
152 SHA2_STEP1(G, H, A, B, C, D, E, F, in, 2); \
153 SHA2_STEP1(F, G, H, A, B, C, D, E, in, 3); \
154 SHA2_STEP1(E, F, G, H, A, B, C, D, in, 4); \
155 SHA2_STEP1(D, E, F, G, H, A, B, C, in, 5); \
156 SHA2_STEP1(C, D, E, F, G, H, A, B, in, 6); \
157 SHA2_STEP1(B, C, D, E, F, G, H, A, in, 7); \
158 SHA2_STEP1(A, B, C, D, E, F, G, H, in, 8); \
159 SHA2_STEP1(H, A, B, C, D, E, F, G, in, 9); \
160 SHA2_STEP1(G, H, A, B, C, D, E, F, in, 10); \
161 SHA2_STEP1(F, G, H, A, B, C, D, E, in, 11); \
162 SHA2_STEP1(E, F, G, H, A, B, C, D, in, 12); \
163 SHA2_STEP1(D, E, F, G, H, A, B, C, in, 13); \
164 SHA2_STEP1(C, D, E, F, G, H, A, B, in, 14); \
165 SHA2_STEP1(B, C, D, E, F, G, H, A, in, 15); \
166 for (pcount = 16; pcount < 64; pcount += 16) { \
167 SHA2_STEP2(A, B, C, D, E, F, G, H, in, 0); \
168 SHA2_STEP2(H, A, B, C, D, E, F, G, in, 1); \
169 SHA2_STEP2(G, H, A, B, C, D, E, F, in, 2); \
170 SHA2_STEP2(F, G, H, A, B, C, D, E, in, 3); \
171 SHA2_STEP2(E, F, G, H, A, B, C, D, in, 4); \
172 SHA2_STEP2(D, E, F, G, H, A, B, C, in, 5); \
173 SHA2_STEP2(C, D, E, F, G, H, A, B, in, 6); \
174 SHA2_STEP2(B, C, D, E, F, G, H, A, in, 7); \
175 SHA2_STEP2(A, B, C, D, E, F, G, H, in, 8); \
176 SHA2_STEP2(H, A, B, C, D, E, F, G, in, 9); \
177 SHA2_STEP2(G, H, A, B, C, D, E, F, in, 10); \
178 SHA2_STEP2(F, G, H, A, B, C, D, E, in, 11); \
179 SHA2_STEP2(E, F, G, H, A, B, C, D, in, 12); \
180 SHA2_STEP2(D, E, F, G, H, A, B, C, in, 13); \
181 SHA2_STEP2(C, D, E, F, G, H, A, B, in, 14); \
182 SHA2_STEP2(B, C, D, E, F, G, H, A, in, 15); \
184 (r)[0] = SPH_T32((r)[0] + A); \
185 (r)[1] = SPH_T32((r)[1] + B); \
186 (r)[2] = SPH_T32((r)[2] + C); \
187 (r)[3] = SPH_T32((r)[3] + D); \
188 (r)[4] = SPH_T32((r)[4] + E); \
189 (r)[5] = SPH_T32((r)[5] + F); \
190 (r)[6] = SPH_T32((r)[6] + G); \
191 (r)[7] = SPH_T32((r)[7] + H); \
196 #define SHA2_ROUND_BODY(in, r) do { \
197 sph_u32 A, B, C, D, E, F, G, H, T1, T2; \
198 sph_u32 W00, W01, W02, W03, W04, W05, W06, W07; \
199 sph_u32 W08, W09, W10, W11, W12, W13, W14, W15; \
210 T1 = SPH_T32(H + BSG2_1(E) + CH(E, F, G) \
211 + SPH_C32(0x428A2F98) + W00); \
212 T2 = SPH_T32(BSG2_0(A) + MAJ(A, B, C)); \
213 D = SPH_T32(D + T1); \
214 H = SPH_T32(T1 + T2); \
216 T1 = SPH_T32(G + BSG2_1(D) + CH(D, E, F) \
217 + SPH_C32(0x71374491) + W01); \
218 T2 = SPH_T32(BSG2_0(H) + MAJ(H, A, B)); \
219 C = SPH_T32(C + T1); \
220 G = SPH_T32(T1 + T2); \
222 T1 = SPH_T32(F + BSG2_1(C) + CH(C, D, E) \
223 + SPH_C32(0xB5C0FBCF) + W02); \
224 T2 = SPH_T32(BSG2_0(G) + MAJ(G, H, A)); \
225 B = SPH_T32(B + T1); \
226 F = SPH_T32(T1 + T2); \
228 T1 = SPH_T32(E + BSG2_1(B) + CH(B, C, D) \
229 + SPH_C32(0xE9B5DBA5) + W03); \
230 T2 = SPH_T32(BSG2_0(F) + MAJ(F, G, H)); \
231 A = SPH_T32(A + T1); \
232 E = SPH_T32(T1 + T2); \
234 T1 = SPH_T32(D + BSG2_1(A) + CH(A, B, C) \
235 + SPH_C32(0x3956C25B) + W04); \
236 T2 = SPH_T32(BSG2_0(E) + MAJ(E, F, G)); \
237 H = SPH_T32(H + T1); \
238 D = SPH_T32(T1 + T2); \
240 T1 = SPH_T32(C + BSG2_1(H) + CH(H, A, B) \
241 + SPH_C32(0x59F111F1) + W05); \
242 T2 = SPH_T32(BSG2_0(D) + MAJ(D, E, F)); \
243 G = SPH_T32(G + T1); \
244 C = SPH_T32(T1 + T2); \
246 T1 = SPH_T32(B + BSG2_1(G) + CH(G, H, A) \
247 + SPH_C32(0x923F82A4) + W06); \
248 T2 = SPH_T32(BSG2_0(C) + MAJ(C, D, E)); \
249 F = SPH_T32(F + T1); \
250 B = SPH_T32(T1 + T2); \
252 T1 = SPH_T32(A + BSG2_1(F) + CH(F, G, H) \
253 + SPH_C32(0xAB1C5ED5) + W07); \
254 T2 = SPH_T32(BSG2_0(B) + MAJ(B, C, D)); \
255 E = SPH_T32(E + T1); \
256 A = SPH_T32(T1 + T2); \
258 T1 = SPH_T32(H + BSG2_1(E) + CH(E, F, G) \
259 + SPH_C32(0xD807AA98) + W08); \
260 T2 = SPH_T32(BSG2_0(A) + MAJ(A, B, C)); \
261 D = SPH_T32(D + T1); \
262 H = SPH_T32(T1 + T2); \
264 T1 = SPH_T32(G + BSG2_1(D) + CH(D, E, F) \
265 + SPH_C32(0x12835B01) + W09); \
266 T2 = SPH_T32(BSG2_0(H) + MAJ(H, A, B)); \
267 C = SPH_T32(C + T1); \
268 G = SPH_T32(T1 + T2); \
270 T1 = SPH_T32(F + BSG2_1(C) + CH(C, D, E) \
271 + SPH_C32(0x243185BE) + W10); \
272 T2 = SPH_T32(BSG2_0(G) + MAJ(G, H, A)); \
273 B = SPH_T32(B + T1); \
274 F = SPH_T32(T1 + T2); \
276 T1 = SPH_T32(E + BSG2_1(B) + CH(B, C, D) \
277 + SPH_C32(0x550C7DC3) + W11); \
278 T2 = SPH_T32(BSG2_0(F) + MAJ(F, G, H)); \
279 A = SPH_T32(A + T1); \
280 E = SPH_T32(T1 + T2); \
282 T1 = SPH_T32(D + BSG2_1(A) + CH(A, B, C) \
283 + SPH_C32(0x72BE5D74) + W12); \
284 T2 = SPH_T32(BSG2_0(E) + MAJ(E, F, G)); \
285 H = SPH_T32(H + T1); \
286 D = SPH_T32(T1 + T2); \
288 T1 = SPH_T32(C + BSG2_1(H) + CH(H, A, B) \
289 + SPH_C32(0x80DEB1FE) + W13); \
290 T2 = SPH_T32(BSG2_0(D) + MAJ(D, E, F)); \
291 G = SPH_T32(G + T1); \
292 C = SPH_T32(T1 + T2); \
294 T1 = SPH_T32(B + BSG2_1(G) + CH(G, H, A) \
295 + SPH_C32(0x9BDC06A7) + W14); \
296 T2 = SPH_T32(BSG2_0(C) + MAJ(C, D, E)); \
297 F = SPH_T32(F + T1); \
298 B = SPH_T32(T1 + T2); \
300 T1 = SPH_T32(A + BSG2_1(F) + CH(F, G, H) \
301 + SPH_C32(0xC19BF174) + W15); \
302 T2 = SPH_T32(BSG2_0(B) + MAJ(B, C, D)); \
303 E = SPH_T32(E + T1); \
304 A = SPH_T32(T1 + T2); \
305 W00 = SPH_T32(SSG2_1(W14) + W09 + SSG2_0(W01) + W00); \
306 T1 = SPH_T32(H + BSG2_1(E) + CH(E, F, G) \
307 + SPH_C32(0xE49B69C1) + W00); \
308 T2 = SPH_T32(BSG2_0(A) + MAJ(A, B, C)); \
309 D = SPH_T32(D + T1); \
310 H = SPH_T32(T1 + T2); \
311 W01 = SPH_T32(SSG2_1(W15) + W10 + SSG2_0(W02) + W01); \
312 T1 = SPH_T32(G + BSG2_1(D) + CH(D, E, F) \
313 + SPH_C32(0xEFBE4786) + W01); \
314 T2 = SPH_T32(BSG2_0(H) + MAJ(H, A, B)); \
315 C = SPH_T32(C + T1); \
316 G = SPH_T32(T1 + T2); \
317 W02 = SPH_T32(SSG2_1(W00) + W11 + SSG2_0(W03) + W02); \
318 T1 = SPH_T32(F + BSG2_1(C) + CH(C, D, E) \
319 + SPH_C32(0x0FC19DC6) + W02); \
320 T2 = SPH_T32(BSG2_0(G) + MAJ(G, H, A)); \
321 B = SPH_T32(B + T1); \
322 F = SPH_T32(T1 + T2); \
323 W03 = SPH_T32(SSG2_1(W01) + W12 + SSG2_0(W04) + W03); \
324 T1 = SPH_T32(E + BSG2_1(B) + CH(B, C, D) \
325 + SPH_C32(0x240CA1CC) + W03); \
326 T2 = SPH_T32(BSG2_0(F) + MAJ(F, G, H)); \
327 A = SPH_T32(A + T1); \
328 E = SPH_T32(T1 + T2); \
329 W04 = SPH_T32(SSG2_1(W02) + W13 + SSG2_0(W05) + W04); \
330 T1 = SPH_T32(D + BSG2_1(A) + CH(A, B, C) \
331 + SPH_C32(0x2DE92C6F) + W04); \
332 T2 = SPH_T32(BSG2_0(E) + MAJ(E, F, G)); \
333 H = SPH_T32(H + T1); \
334 D = SPH_T32(T1 + T2); \
335 W05 = SPH_T32(SSG2_1(W03) + W14 + SSG2_0(W06) + W05); \
336 T1 = SPH_T32(C + BSG2_1(H) + CH(H, A, B) \
337 + SPH_C32(0x4A7484AA) + W05); \
338 T2 = SPH_T32(BSG2_0(D) + MAJ(D, E, F)); \
339 G = SPH_T32(G + T1); \
340 C = SPH_T32(T1 + T2); \
341 W06 = SPH_T32(SSG2_1(W04) + W15 + SSG2_0(W07) + W06); \
342 T1 = SPH_T32(B + BSG2_1(G) + CH(G, H, A) \
343 + SPH_C32(0x5CB0A9DC) + W06); \
344 T2 = SPH_T32(BSG2_0(C) + MAJ(C, D, E)); \
345 F = SPH_T32(F + T1); \
346 B = SPH_T32(T1 + T2); \
347 W07 = SPH_T32(SSG2_1(W05) + W00 + SSG2_0(W08) + W07); \
348 T1 = SPH_T32(A + BSG2_1(F) + CH(F, G, H) \
349 + SPH_C32(0x76F988DA) + W07); \
350 T2 = SPH_T32(BSG2_0(B) + MAJ(B, C, D)); \
351 E = SPH_T32(E + T1); \
352 A = SPH_T32(T1 + T2); \
353 W08 = SPH_T32(SSG2_1(W06) + W01 + SSG2_0(W09) + W08); \
354 T1 = SPH_T32(H + BSG2_1(E) + CH(E, F, G) \
355 + SPH_C32(0x983E5152) + W08); \
356 T2 = SPH_T32(BSG2_0(A) + MAJ(A, B, C)); \
357 D = SPH_T32(D + T1); \
358 H = SPH_T32(T1 + T2); \
359 W09 = SPH_T32(SSG2_1(W07) + W02 + SSG2_0(W10) + W09); \
360 T1 = SPH_T32(G + BSG2_1(D) + CH(D, E, F) \
361 + SPH_C32(0xA831C66D) + W09); \
362 T2 = SPH_T32(BSG2_0(H) + MAJ(H, A, B)); \
363 C = SPH_T32(C + T1); \
364 G = SPH_T32(T1 + T2); \
365 W10 = SPH_T32(SSG2_1(W08) + W03 + SSG2_0(W11) + W10); \
366 T1 = SPH_T32(F + BSG2_1(C) + CH(C, D, E) \
367 + SPH_C32(0xB00327C8) + W10); \
368 T2 = SPH_T32(BSG2_0(G) + MAJ(G, H, A)); \
369 B = SPH_T32(B + T1); \
370 F = SPH_T32(T1 + T2); \
371 W11 = SPH_T32(SSG2_1(W09) + W04 + SSG2_0(W12) + W11); \
372 T1 = SPH_T32(E + BSG2_1(B) + CH(B, C, D) \
373 + SPH_C32(0xBF597FC7) + W11); \
374 T2 = SPH_T32(BSG2_0(F) + MAJ(F, G, H)); \
375 A = SPH_T32(A + T1); \
376 E = SPH_T32(T1 + T2); \
377 W12 = SPH_T32(SSG2_1(W10) + W05 + SSG2_0(W13) + W12); \
378 T1 = SPH_T32(D + BSG2_1(A) + CH(A, B, C) \
379 + SPH_C32(0xC6E00BF3) + W12); \
380 T2 = SPH_T32(BSG2_0(E) + MAJ(E, F, G)); \
381 H = SPH_T32(H + T1); \
382 D = SPH_T32(T1 + T2); \
383 W13 = SPH_T32(SSG2_1(W11) + W06 + SSG2_0(W14) + W13); \
384 T1 = SPH_T32(C + BSG2_1(H) + CH(H, A, B) \
385 + SPH_C32(0xD5A79147) + W13); \
386 T2 = SPH_T32(BSG2_0(D) + MAJ(D, E, F)); \
387 G = SPH_T32(G + T1); \
388 C = SPH_T32(T1 + T2); \
389 W14 = SPH_T32(SSG2_1(W12) + W07 + SSG2_0(W15) + W14); \
390 T1 = SPH_T32(B + BSG2_1(G) + CH(G, H, A) \
391 + SPH_C32(0x06CA6351) + W14); \
392 T2 = SPH_T32(BSG2_0(C) + MAJ(C, D, E)); \
393 F = SPH_T32(F + T1); \
394 B = SPH_T32(T1 + T2); \
395 W15 = SPH_T32(SSG2_1(W13) + W08 + SSG2_0(W00) + W15); \
396 T1 = SPH_T32(A + BSG2_1(F) + CH(F, G, H) \
397 + SPH_C32(0x14292967) + W15); \
398 T2 = SPH_T32(BSG2_0(B) + MAJ(B, C, D)); \
399 E = SPH_T32(E + T1); \
400 A = SPH_T32(T1 + T2); \
401 W00 = SPH_T32(SSG2_1(W14) + W09 + SSG2_0(W01) + W00); \
402 T1 = SPH_T32(H + BSG2_1(E) + CH(E, F, G) \
403 + SPH_C32(0x27B70A85) + W00); \
404 T2 = SPH_T32(BSG2_0(A) + MAJ(A, B, C)); \
405 D = SPH_T32(D + T1); \
406 H = SPH_T32(T1 + T2); \
407 W01 = SPH_T32(SSG2_1(W15) + W10 + SSG2_0(W02) + W01); \
408 T1 = SPH_T32(G + BSG2_1(D) + CH(D, E, F) \
409 + SPH_C32(0x2E1B2138) + W01); \
410 T2 = SPH_T32(BSG2_0(H) + MAJ(H, A, B)); \
411 C = SPH_T32(C + T1); \
412 G = SPH_T32(T1 + T2); \
413 W02 = SPH_T32(SSG2_1(W00) + W11 + SSG2_0(W03) + W02); \
414 T1 = SPH_T32(F + BSG2_1(C) + CH(C, D, E) \
415 + SPH_C32(0x4D2C6DFC) + W02); \
416 T2 = SPH_T32(BSG2_0(G) + MAJ(G, H, A)); \
417 B = SPH_T32(B + T1); \
418 F = SPH_T32(T1 + T2); \
419 W03 = SPH_T32(SSG2_1(W01) + W12 + SSG2_0(W04) + W03); \
420 T1 = SPH_T32(E + BSG2_1(B) + CH(B, C, D) \
421 + SPH_C32(0x53380D13) + W03); \
422 T2 = SPH_T32(BSG2_0(F) + MAJ(F, G, H)); \
423 A = SPH_T32(A + T1); \
424 E = SPH_T32(T1 + T2); \
425 W04 = SPH_T32(SSG2_1(W02) + W13 + SSG2_0(W05) + W04); \
426 T1 = SPH_T32(D + BSG2_1(A) + CH(A, B, C) \
427 + SPH_C32(0x650A7354) + W04); \
428 T2 = SPH_T32(BSG2_0(E) + MAJ(E, F, G)); \
429 H = SPH_T32(H + T1); \
430 D = SPH_T32(T1 + T2); \
431 W05 = SPH_T32(SSG2_1(W03) + W14 + SSG2_0(W06) + W05); \
432 T1 = SPH_T32(C + BSG2_1(H) + CH(H, A, B) \
433 + SPH_C32(0x766A0ABB) + W05); \
434 T2 = SPH_T32(BSG2_0(D) + MAJ(D, E, F)); \
435 G = SPH_T32(G + T1); \
436 C = SPH_T32(T1 + T2); \
437 W06 = SPH_T32(SSG2_1(W04) + W15 + SSG2_0(W07) + W06); \
438 T1 = SPH_T32(B + BSG2_1(G) + CH(G, H, A) \
439 + SPH_C32(0x81C2C92E) + W06); \
440 T2 = SPH_T32(BSG2_0(C) + MAJ(C, D, E)); \
441 F = SPH_T32(F + T1); \
442 B = SPH_T32(T1 + T2); \
443 W07 = SPH_T32(SSG2_1(W05) + W00 + SSG2_0(W08) + W07); \
444 T1 = SPH_T32(A + BSG2_1(F) + CH(F, G, H) \
445 + SPH_C32(0x92722C85) + W07); \
446 T2 = SPH_T32(BSG2_0(B) + MAJ(B, C, D)); \
447 E = SPH_T32(E + T1); \
448 A = SPH_T32(T1 + T2); \
449 W08 = SPH_T32(SSG2_1(W06) + W01 + SSG2_0(W09) + W08); \
450 T1 = SPH_T32(H + BSG2_1(E) + CH(E, F, G) \
451 + SPH_C32(0xA2BFE8A1) + W08); \
452 T2 = SPH_T32(BSG2_0(A) + MAJ(A, B, C)); \
453 D = SPH_T32(D + T1); \
454 H = SPH_T32(T1 + T2); \
455 W09 = SPH_T32(SSG2_1(W07) + W02 + SSG2_0(W10) + W09); \
456 T1 = SPH_T32(G + BSG2_1(D) + CH(D, E, F) \
457 + SPH_C32(0xA81A664B) + W09); \
458 T2 = SPH_T32(BSG2_0(H) + MAJ(H, A, B)); \
459 C = SPH_T32(C + T1); \
460 G = SPH_T32(T1 + T2); \
461 W10 = SPH_T32(SSG2_1(W08) + W03 + SSG2_0(W11) + W10); \
462 T1 = SPH_T32(F + BSG2_1(C) + CH(C, D, E) \
463 + SPH_C32(0xC24B8B70) + W10); \
464 T2 = SPH_T32(BSG2_0(G) + MAJ(G, H, A)); \
465 B = SPH_T32(B + T1); \
466 F = SPH_T32(T1 + T2); \
467 W11 = SPH_T32(SSG2_1(W09) + W04 + SSG2_0(W12) + W11); \
468 T1 = SPH_T32(E + BSG2_1(B) + CH(B, C, D) \
469 + SPH_C32(0xC76C51A3) + W11); \
470 T2 = SPH_T32(BSG2_0(F) + MAJ(F, G, H)); \
471 A = SPH_T32(A + T1); \
472 E = SPH_T32(T1 + T2); \
473 W12 = SPH_T32(SSG2_1(W10) + W05 + SSG2_0(W13) + W12); \
474 T1 = SPH_T32(D + BSG2_1(A) + CH(A, B, C) \
475 + SPH_C32(0xD192E819) + W12); \
476 T2 = SPH_T32(BSG2_0(E) + MAJ(E, F, G)); \
477 H = SPH_T32(H + T1); \
478 D = SPH_T32(T1 + T2); \
479 W13 = SPH_T32(SSG2_1(W11) + W06 + SSG2_0(W14) + W13); \
480 T1 = SPH_T32(C + BSG2_1(H) + CH(H, A, B) \
481 + SPH_C32(0xD6990624) + W13); \
482 T2 = SPH_T32(BSG2_0(D) + MAJ(D, E, F)); \
483 G = SPH_T32(G + T1); \
484 C = SPH_T32(T1 + T2); \
485 W14 = SPH_T32(SSG2_1(W12) + W07 + SSG2_0(W15) + W14); \
486 T1 = SPH_T32(B + BSG2_1(G) + CH(G, H, A) \
487 + SPH_C32(0xF40E3585) + W14); \
488 T2 = SPH_T32(BSG2_0(C) + MAJ(C, D, E)); \
489 F = SPH_T32(F + T1); \
490 B = SPH_T32(T1 + T2); \
491 W15 = SPH_T32(SSG2_1(W13) + W08 + SSG2_0(W00) + W15); \
492 T1 = SPH_T32(A + BSG2_1(F) + CH(F, G, H) \
493 + SPH_C32(0x106AA070) + W15); \
494 T2 = SPH_T32(BSG2_0(B) + MAJ(B, C, D)); \
495 E = SPH_T32(E + T1); \
496 A = SPH_T32(T1 + T2); \
497 W00 = SPH_T32(SSG2_1(W14) + W09 + SSG2_0(W01) + W00); \
498 T1 = SPH_T32(H + BSG2_1(E) + CH(E, F, G) \
499 + SPH_C32(0x19A4C116) + W00); \
500 T2 = SPH_T32(BSG2_0(A) + MAJ(A, B, C)); \
501 D = SPH_T32(D + T1); \
502 H = SPH_T32(T1 + T2); \
503 W01 = SPH_T32(SSG2_1(W15) + W10 + SSG2_0(W02) + W01); \
504 T1 = SPH_T32(G + BSG2_1(D) + CH(D, E, F) \
505 + SPH_C32(0x1E376C08) + W01); \
506 T2 = SPH_T32(BSG2_0(H) + MAJ(H, A, B)); \
507 C = SPH_T32(C + T1); \
508 G = SPH_T32(T1 + T2); \
509 W02 = SPH_T32(SSG2_1(W00) + W11 + SSG2_0(W03) + W02); \
510 T1 = SPH_T32(F + BSG2_1(C) + CH(C, D, E) \
511 + SPH_C32(0x2748774C) + W02); \
512 T2 = SPH_T32(BSG2_0(G) + MAJ(G, H, A)); \
513 B = SPH_T32(B + T1); \
514 F = SPH_T32(T1 + T2); \
515 W03 = SPH_T32(SSG2_1(W01) + W12 + SSG2_0(W04) + W03); \
516 T1 = SPH_T32(E + BSG2_1(B) + CH(B, C, D) \
517 + SPH_C32(0x34B0BCB5) + W03); \
518 T2 = SPH_T32(BSG2_0(F) + MAJ(F, G, H)); \
519 A = SPH_T32(A + T1); \
520 E = SPH_T32(T1 + T2); \
521 W04 = SPH_T32(SSG2_1(W02) + W13 + SSG2_0(W05) + W04); \
522 T1 = SPH_T32(D + BSG2_1(A) + CH(A, B, C) \
523 + SPH_C32(0x391C0CB3) + W04); \
524 T2 = SPH_T32(BSG2_0(E) + MAJ(E, F, G)); \
525 H = SPH_T32(H + T1); \
526 D = SPH_T32(T1 + T2); \
527 W05 = SPH_T32(SSG2_1(W03) + W14 + SSG2_0(W06) + W05); \
528 T1 = SPH_T32(C + BSG2_1(H) + CH(H, A, B) \
529 + SPH_C32(0x4ED8AA4A) + W05); \
530 T2 = SPH_T32(BSG2_0(D) + MAJ(D, E, F)); \
531 G = SPH_T32(G + T1); \
532 C = SPH_T32(T1 + T2); \
533 W06 = SPH_T32(SSG2_1(W04) + W15 + SSG2_0(W07) + W06); \
534 T1 = SPH_T32(B + BSG2_1(G) + CH(G, H, A) \
535 + SPH_C32(0x5B9CCA4F) + W06); \
536 T2 = SPH_T32(BSG2_0(C) + MAJ(C, D, E)); \
537 F = SPH_T32(F + T1); \
538 B = SPH_T32(T1 + T2); \
539 W07 = SPH_T32(SSG2_1(W05) + W00 + SSG2_0(W08) + W07); \
540 T1 = SPH_T32(A + BSG2_1(F) + CH(F, G, H) \
541 + SPH_C32(0x682E6FF3) + W07); \
542 T2 = SPH_T32(BSG2_0(B) + MAJ(B, C, D)); \
543 E = SPH_T32(E + T1); \
544 A = SPH_T32(T1 + T2); \
545 W08 = SPH_T32(SSG2_1(W06) + W01 + SSG2_0(W09) + W08); \
546 T1 = SPH_T32(H + BSG2_1(E) + CH(E, F, G) \
547 + SPH_C32(0x748F82EE) + W08); \
548 T2 = SPH_T32(BSG2_0(A) + MAJ(A, B, C)); \
549 D = SPH_T32(D + T1); \
550 H = SPH_T32(T1 + T2); \
551 W09 = SPH_T32(SSG2_1(W07) + W02 + SSG2_0(W10) + W09); \
552 T1 = SPH_T32(G + BSG2_1(D) + CH(D, E, F) \
553 + SPH_C32(0x78A5636F) + W09); \
554 T2 = SPH_T32(BSG2_0(H) + MAJ(H, A, B)); \
555 C = SPH_T32(C + T1); \
556 G = SPH_T32(T1 + T2); \
557 W10 = SPH_T32(SSG2_1(W08) + W03 + SSG2_0(W11) + W10); \
558 T1 = SPH_T32(F + BSG2_1(C) + CH(C, D, E) \
559 + SPH_C32(0x84C87814) + W10); \
560 T2 = SPH_T32(BSG2_0(G) + MAJ(G, H, A)); \
561 B = SPH_T32(B + T1); \
562 F = SPH_T32(T1 + T2); \
563 W11 = SPH_T32(SSG2_1(W09) + W04 + SSG2_0(W12) + W11); \
564 T1 = SPH_T32(E + BSG2_1(B) + CH(B, C, D) \
565 + SPH_C32(0x8CC70208) + W11); \
566 T2 = SPH_T32(BSG2_0(F) + MAJ(F, G, H)); \
567 A = SPH_T32(A + T1); \
568 E = SPH_T32(T1 + T2); \
569 W12 = SPH_T32(SSG2_1(W10) + W05 + SSG2_0(W13) + W12); \
570 T1 = SPH_T32(D + BSG2_1(A) + CH(A, B, C) \
571 + SPH_C32(0x90BEFFFA) + W12); \
572 T2 = SPH_T32(BSG2_0(E) + MAJ(E, F, G)); \
573 H = SPH_T32(H + T1); \
574 D = SPH_T32(T1 + T2); \
575 W13 = SPH_T32(SSG2_1(W11) + W06 + SSG2_0(W14) + W13); \
576 T1 = SPH_T32(C + BSG2_1(H) + CH(H, A, B) \
577 + SPH_C32(0xA4506CEB) + W13); \
578 T2 = SPH_T32(BSG2_0(D) + MAJ(D, E, F)); \
579 G = SPH_T32(G + T1); \
580 C = SPH_T32(T1 + T2); \
581 W14 = SPH_T32(SSG2_1(W12) + W07 + SSG2_0(W15) + W14); \
582 T1 = SPH_T32(B + BSG2_1(G) + CH(G, H, A) \
583 + SPH_C32(0xBEF9A3F7) + W14); \
584 T2 = SPH_T32(BSG2_0(C) + MAJ(C, D, E)); \
585 F = SPH_T32(F + T1); \
586 B = SPH_T32(T1 + T2); \
587 W15 = SPH_T32(SSG2_1(W13) + W08 + SSG2_0(W00) + W15); \
588 T1 = SPH_T32(A + BSG2_1(F) + CH(F, G, H) \
589 + SPH_C32(0xC67178F2) + W15); \
590 T2 = SPH_T32(BSG2_0(B) + MAJ(B, C, D)); \
591 E = SPH_T32(E + T1); \
592 A = SPH_T32(T1 + T2); \
593 (r)[0] = SPH_T32((r)[0] + A); \
594 (r)[1] = SPH_T32((r)[1] + B); \
595 (r)[2] = SPH_T32((r)[2] + C); \
596 (r)[3] = SPH_T32((r)[3] + D); \
597 (r)[4] = SPH_T32((r)[4] + E); \
598 (r)[5] = SPH_T32((r)[5] + F); \
599 (r)[6] = SPH_T32((r)[6] + G); \
600 (r)[7] = SPH_T32((r)[7] + H); \
606 * One round of SHA-224 / SHA-256. The data must be aligned for 32-bit access.
609 sha2_round(const unsigned char *data, sph_u32 r[8])
611 #define SHA2_IN(x) sph_dec32be_aligned(data + (4 * (x)))
612 SHA2_ROUND_BODY(SHA2_IN, r);
618 sph_sha224_init(void *cc)
620 sph_sha224_context *sc;
623 memcpy(sc->val, H224, sizeof H224);
627 sc->count_high = sc->count_low = 0;
633 sph_sha256_init(void *cc)
635 sph_sha256_context *sc;
638 memcpy(sc->val, H256, sizeof H256);
642 sc->count_high = sc->count_low = 0;
646 #define RFUN sha2_round
649 #include "md_helper.c"
653 sph_sha224_close(void *cc, void *dst)
655 sha224_close(cc, dst, 7);
661 sph_sha224_addbits_and_close(void *cc, unsigned ub, unsigned n, void *dst)
663 sha224_addbits_and_close(cc, ub, n, dst, 7);
669 sph_sha256_close(void *cc, void *dst)
671 sha224_close(cc, dst, 8);
677 sph_sha256_addbits_and_close(void *cc, unsigned ub, unsigned n, void *dst)
679 sha224_addbits_and_close(cc, ub, n, dst, 8);
685 sph_sha224_comp(const sph_u32 msg[16], sph_u32 val[8])
687 #define SHA2_IN(x) msg[x]
688 SHA2_ROUND_BODY(SHA2_IN, val);