pq_crypto 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (93) hide show
  1. checksums.yaml +7 -0
  2. data/.github/workflows/ci.yml +37 -0
  3. data/CHANGELOG.md +29 -0
  4. data/GET_STARTED.md +65 -0
  5. data/LICENSE.txt +21 -0
  6. data/README.md +135 -0
  7. data/SECURITY.md +57 -0
  8. data/ext/pqcrypto/extconf.rb +157 -0
  9. data/ext/pqcrypto/mldsa_api.h +51 -0
  10. data/ext/pqcrypto/mlkem_api.h +21 -0
  11. data/ext/pqcrypto/pqcrypto_ruby_secure.c +889 -0
  12. data/ext/pqcrypto/pqcrypto_secure.c +1178 -0
  13. data/ext/pqcrypto/pqcrypto_secure.h +135 -0
  14. data/ext/pqcrypto/vendor/.vendored +5 -0
  15. data/ext/pqcrypto/vendor/pqclean/common/aes.c +639 -0
  16. data/ext/pqcrypto/vendor/pqclean/common/aes.h +64 -0
  17. data/ext/pqcrypto/vendor/pqclean/common/compat.h +73 -0
  18. data/ext/pqcrypto/vendor/pqclean/common/crypto_declassify.h +7 -0
  19. data/ext/pqcrypto/vendor/pqclean/common/fips202.c +928 -0
  20. data/ext/pqcrypto/vendor/pqclean/common/fips202.h +166 -0
  21. data/ext/pqcrypto/vendor/pqclean/common/keccak2x/feat.S +168 -0
  22. data/ext/pqcrypto/vendor/pqclean/common/keccak2x/fips202x2.c +684 -0
  23. data/ext/pqcrypto/vendor/pqclean/common/keccak2x/fips202x2.h +60 -0
  24. data/ext/pqcrypto/vendor/pqclean/common/keccak4x/KeccakP-1600-times4-SIMD256.c +1028 -0
  25. data/ext/pqcrypto/vendor/pqclean/common/keccak4x/KeccakP-1600-times4-SnP.h +50 -0
  26. data/ext/pqcrypto/vendor/pqclean/common/keccak4x/KeccakP-1600-unrolling.macros +198 -0
  27. data/ext/pqcrypto/vendor/pqclean/common/keccak4x/Makefile +8 -0
  28. data/ext/pqcrypto/vendor/pqclean/common/keccak4x/Makefile.Microsoft_nmake +8 -0
  29. data/ext/pqcrypto/vendor/pqclean/common/keccak4x/SIMD256-config.h +3 -0
  30. data/ext/pqcrypto/vendor/pqclean/common/keccak4x/align.h +34 -0
  31. data/ext/pqcrypto/vendor/pqclean/common/keccak4x/brg_endian.h +142 -0
  32. data/ext/pqcrypto/vendor/pqclean/common/nistseedexpander.c +101 -0
  33. data/ext/pqcrypto/vendor/pqclean/common/nistseedexpander.h +39 -0
  34. data/ext/pqcrypto/vendor/pqclean/common/randombytes.c +355 -0
  35. data/ext/pqcrypto/vendor/pqclean/common/randombytes.h +27 -0
  36. data/ext/pqcrypto/vendor/pqclean/common/sha2.c +769 -0
  37. data/ext/pqcrypto/vendor/pqclean/common/sha2.h +173 -0
  38. data/ext/pqcrypto/vendor/pqclean/common/sp800-185.c +156 -0
  39. data/ext/pqcrypto/vendor/pqclean/common/sp800-185.h +27 -0
  40. data/ext/pqcrypto/vendor/pqclean/crypto_kem/ml-kem-768/clean/LICENSE +5 -0
  41. data/ext/pqcrypto/vendor/pqclean/crypto_kem/ml-kem-768/clean/Makefile +19 -0
  42. data/ext/pqcrypto/vendor/pqclean/crypto_kem/ml-kem-768/clean/Makefile.Microsoft_nmake +23 -0
  43. data/ext/pqcrypto/vendor/pqclean/crypto_kem/ml-kem-768/clean/api.h +18 -0
  44. data/ext/pqcrypto/vendor/pqclean/crypto_kem/ml-kem-768/clean/cbd.c +83 -0
  45. data/ext/pqcrypto/vendor/pqclean/crypto_kem/ml-kem-768/clean/cbd.h +11 -0
  46. data/ext/pqcrypto/vendor/pqclean/crypto_kem/ml-kem-768/clean/indcpa.c +327 -0
  47. data/ext/pqcrypto/vendor/pqclean/crypto_kem/ml-kem-768/clean/indcpa.h +22 -0
  48. data/ext/pqcrypto/vendor/pqclean/crypto_kem/ml-kem-768/clean/kem.c +164 -0
  49. data/ext/pqcrypto/vendor/pqclean/crypto_kem/ml-kem-768/clean/kem.h +23 -0
  50. data/ext/pqcrypto/vendor/pqclean/crypto_kem/ml-kem-768/clean/ntt.c +146 -0
  51. data/ext/pqcrypto/vendor/pqclean/crypto_kem/ml-kem-768/clean/ntt.h +14 -0
  52. data/ext/pqcrypto/vendor/pqclean/crypto_kem/ml-kem-768/clean/params.h +36 -0
  53. data/ext/pqcrypto/vendor/pqclean/crypto_kem/ml-kem-768/clean/poly.c +299 -0
  54. data/ext/pqcrypto/vendor/pqclean/crypto_kem/ml-kem-768/clean/poly.h +37 -0
  55. data/ext/pqcrypto/vendor/pqclean/crypto_kem/ml-kem-768/clean/polyvec.c +188 -0
  56. data/ext/pqcrypto/vendor/pqclean/crypto_kem/ml-kem-768/clean/polyvec.h +26 -0
  57. data/ext/pqcrypto/vendor/pqclean/crypto_kem/ml-kem-768/clean/reduce.c +41 -0
  58. data/ext/pqcrypto/vendor/pqclean/crypto_kem/ml-kem-768/clean/reduce.h +13 -0
  59. data/ext/pqcrypto/vendor/pqclean/crypto_kem/ml-kem-768/clean/symmetric-shake.c +71 -0
  60. data/ext/pqcrypto/vendor/pqclean/crypto_kem/ml-kem-768/clean/symmetric.h +30 -0
  61. data/ext/pqcrypto/vendor/pqclean/crypto_kem/ml-kem-768/clean/verify.c +67 -0
  62. data/ext/pqcrypto/vendor/pqclean/crypto_kem/ml-kem-768/clean/verify.h +13 -0
  63. data/ext/pqcrypto/vendor/pqclean/crypto_sign/ml-dsa-65/clean/LICENSE +5 -0
  64. data/ext/pqcrypto/vendor/pqclean/crypto_sign/ml-dsa-65/clean/Makefile +19 -0
  65. data/ext/pqcrypto/vendor/pqclean/crypto_sign/ml-dsa-65/clean/Makefile.Microsoft_nmake +23 -0
  66. data/ext/pqcrypto/vendor/pqclean/crypto_sign/ml-dsa-65/clean/api.h +50 -0
  67. data/ext/pqcrypto/vendor/pqclean/crypto_sign/ml-dsa-65/clean/ntt.c +98 -0
  68. data/ext/pqcrypto/vendor/pqclean/crypto_sign/ml-dsa-65/clean/ntt.h +10 -0
  69. data/ext/pqcrypto/vendor/pqclean/crypto_sign/ml-dsa-65/clean/packing.c +261 -0
  70. data/ext/pqcrypto/vendor/pqclean/crypto_sign/ml-dsa-65/clean/packing.h +31 -0
  71. data/ext/pqcrypto/vendor/pqclean/crypto_sign/ml-dsa-65/clean/params.h +44 -0
  72. data/ext/pqcrypto/vendor/pqclean/crypto_sign/ml-dsa-65/clean/poly.c +799 -0
  73. data/ext/pqcrypto/vendor/pqclean/crypto_sign/ml-dsa-65/clean/poly.h +52 -0
  74. data/ext/pqcrypto/vendor/pqclean/crypto_sign/ml-dsa-65/clean/polyvec.c +415 -0
  75. data/ext/pqcrypto/vendor/pqclean/crypto_sign/ml-dsa-65/clean/polyvec.h +65 -0
  76. data/ext/pqcrypto/vendor/pqclean/crypto_sign/ml-dsa-65/clean/reduce.c +69 -0
  77. data/ext/pqcrypto/vendor/pqclean/crypto_sign/ml-dsa-65/clean/reduce.h +17 -0
  78. data/ext/pqcrypto/vendor/pqclean/crypto_sign/ml-dsa-65/clean/rounding.c +92 -0
  79. data/ext/pqcrypto/vendor/pqclean/crypto_sign/ml-dsa-65/clean/rounding.h +14 -0
  80. data/ext/pqcrypto/vendor/pqclean/crypto_sign/ml-dsa-65/clean/sign.c +407 -0
  81. data/ext/pqcrypto/vendor/pqclean/crypto_sign/ml-dsa-65/clean/sign.h +47 -0
  82. data/ext/pqcrypto/vendor/pqclean/crypto_sign/ml-dsa-65/clean/symmetric-shake.c +26 -0
  83. data/ext/pqcrypto/vendor/pqclean/crypto_sign/ml-dsa-65/clean/symmetric.h +34 -0
  84. data/lib/pq_crypto/errors.rb +10 -0
  85. data/lib/pq_crypto/hybrid_kem.rb +106 -0
  86. data/lib/pq_crypto/kem.rb +199 -0
  87. data/lib/pq_crypto/serialization.rb +102 -0
  88. data/lib/pq_crypto/signature.rb +198 -0
  89. data/lib/pq_crypto/version.rb +5 -0
  90. data/lib/pq_crypto.rb +177 -0
  91. data/lib/pqcrypto.rb +3 -0
  92. data/script/vendor_libs.rb +199 -0
  93. metadata +195 -0
@@ -0,0 +1,769 @@
1
+ /* Based on the public domain implementation in
2
+ * crypto_hash/sha512/ref/ from http://bench.cr.yp.to/supercop.html
3
+ * by D. J. Bernstein */
4
+
5
+ #include <stddef.h>
6
+ #include <stdint.h>
7
+ #include <stdlib.h>
8
+ #include <string.h>
9
+
10
+ #include "sha2.h"
11
+
12
+ static uint32_t load_bigendian_32(const uint8_t *x) {
13
+ return (uint32_t)(x[3]) | (((uint32_t)(x[2])) << 8) |
14
+ (((uint32_t)(x[1])) << 16) | (((uint32_t)(x[0])) << 24);
15
+ }
16
+
17
+ static uint64_t load_bigendian_64(const uint8_t *x) {
18
+ return (uint64_t)(x[7]) | (((uint64_t)(x[6])) << 8) |
19
+ (((uint64_t)(x[5])) << 16) | (((uint64_t)(x[4])) << 24) |
20
+ (((uint64_t)(x[3])) << 32) | (((uint64_t)(x[2])) << 40) |
21
+ (((uint64_t)(x[1])) << 48) | (((uint64_t)(x[0])) << 56);
22
+ }
23
+
24
+ static void store_bigendian_32(uint8_t *x, uint64_t u) {
25
+ x[3] = (uint8_t) u;
26
+ u >>= 8;
27
+ x[2] = (uint8_t) u;
28
+ u >>= 8;
29
+ x[1] = (uint8_t) u;
30
+ u >>= 8;
31
+ x[0] = (uint8_t) u;
32
+ }
33
+
34
+ static void store_bigendian_64(uint8_t *x, uint64_t u) {
35
+ x[7] = (uint8_t) u;
36
+ u >>= 8;
37
+ x[6] = (uint8_t) u;
38
+ u >>= 8;
39
+ x[5] = (uint8_t) u;
40
+ u >>= 8;
41
+ x[4] = (uint8_t) u;
42
+ u >>= 8;
43
+ x[3] = (uint8_t) u;
44
+ u >>= 8;
45
+ x[2] = (uint8_t) u;
46
+ u >>= 8;
47
+ x[1] = (uint8_t) u;
48
+ u >>= 8;
49
+ x[0] = (uint8_t) u;
50
+ }
51
+
52
+ #define SHR(x, c) ((x) >> (c))
53
+ #define ROTR_32(x, c) (((x) >> (c)) | ((x) << (32 - (c))))
54
+ #define ROTR_64(x, c) (((x) >> (c)) | ((x) << (64 - (c))))
55
+
56
+ #define Ch(x, y, z) (((x) & (y)) ^ (~(x) & (z)))
57
+ #define Maj(x, y, z) (((x) & (y)) ^ ((x) & (z)) ^ ((y) & (z)))
58
+
59
+ #define Sigma0_32(x) (ROTR_32(x, 2) ^ ROTR_32(x,13) ^ ROTR_32(x,22))
60
+ #define Sigma1_32(x) (ROTR_32(x, 6) ^ ROTR_32(x,11) ^ ROTR_32(x,25))
61
+ #define sigma0_32(x) (ROTR_32(x, 7) ^ ROTR_32(x,18) ^ SHR(x, 3))
62
+ #define sigma1_32(x) (ROTR_32(x,17) ^ ROTR_32(x,19) ^ SHR(x,10))
63
+
64
+ #define Sigma0_64(x) (ROTR_64(x, 28) ^ ROTR_64(x, 34) ^ ROTR_64(x, 39))
65
+ #define Sigma1_64(x) (ROTR_64(x, 14) ^ ROTR_64(x, 18) ^ ROTR_64(x, 41))
66
+ #define sigma0_64(x) (ROTR_64(x, 1) ^ ROTR_64(x, 8) ^ SHR(x, 7))
67
+ #define sigma1_64(x) (ROTR_64(x, 19) ^ ROTR_64(x, 61) ^ SHR(x, 6))
68
+
69
+ #define M_32(w0, w14, w9, w1) w0 = sigma1_32(w14) + (w9) + sigma0_32(w1) + (w0);
70
+ #define M_64(w0, w14, w9, w1) w0 = sigma1_64(w14) + (w9) + sigma0_64(w1) + (w0);
71
+
72
+ #define EXPAND_32 \
73
+ M_32(w0, w14, w9, w1) \
74
+ M_32(w1, w15, w10, w2) \
75
+ M_32(w2, w0, w11, w3) \
76
+ M_32(w3, w1, w12, w4) \
77
+ M_32(w4, w2, w13, w5) \
78
+ M_32(w5, w3, w14, w6) \
79
+ M_32(w6, w4, w15, w7) \
80
+ M_32(w7, w5, w0, w8) \
81
+ M_32(w8, w6, w1, w9) \
82
+ M_32(w9, w7, w2, w10) \
83
+ M_32(w10, w8, w3, w11) \
84
+ M_32(w11, w9, w4, w12) \
85
+ M_32(w12, w10, w5, w13) \
86
+ M_32(w13, w11, w6, w14) \
87
+ M_32(w14, w12, w7, w15) \
88
+ M_32(w15, w13, w8, w0)
89
+
90
+ #define EXPAND_64 \
91
+ M_64(w0, w14, w9, w1) \
92
+ M_64(w1, w15, w10, w2) \
93
+ M_64(w2, w0, w11, w3) \
94
+ M_64(w3, w1, w12, w4) \
95
+ M_64(w4, w2, w13, w5) \
96
+ M_64(w5, w3, w14, w6) \
97
+ M_64(w6, w4, w15, w7) \
98
+ M_64(w7, w5, w0, w8) \
99
+ M_64(w8, w6, w1, w9) \
100
+ M_64(w9, w7, w2, w10) \
101
+ M_64(w10, w8, w3, w11) \
102
+ M_64(w11, w9, w4, w12) \
103
+ M_64(w12, w10, w5, w13) \
104
+ M_64(w13, w11, w6, w14) \
105
+ M_64(w14, w12, w7, w15) \
106
+ M_64(w15, w13, w8, w0)
107
+
108
+ #define F_32(w, k) \
109
+ T1 = h + Sigma1_32(e) + Ch(e, f, g) + (k) + (w); \
110
+ T2 = Sigma0_32(a) + Maj(a, b, c); \
111
+ h = g; \
112
+ g = f; \
113
+ f = e; \
114
+ e = d + T1; \
115
+ d = c; \
116
+ c = b; \
117
+ b = a; \
118
+ a = T1 + T2;
119
+
120
+ #define F_64(w, k) \
121
+ T1 = h + Sigma1_64(e) + Ch(e, f, g) + (k) + (w); \
122
+ T2 = Sigma0_64(a) + Maj(a, b, c); \
123
+ h = g; \
124
+ g = f; \
125
+ f = e; \
126
+ e = d + T1; \
127
+ d = c; \
128
+ c = b; \
129
+ b = a; \
130
+ a = T1 + T2;
131
+
132
+ static size_t crypto_hashblocks_sha256(uint8_t *statebytes,
133
+ const uint8_t *in, size_t inlen) {
134
+ uint32_t state[8];
135
+ uint32_t a;
136
+ uint32_t b;
137
+ uint32_t c;
138
+ uint32_t d;
139
+ uint32_t e;
140
+ uint32_t f;
141
+ uint32_t g;
142
+ uint32_t h;
143
+ uint32_t T1;
144
+ uint32_t T2;
145
+
146
+ a = load_bigendian_32(statebytes + 0);
147
+ state[0] = a;
148
+ b = load_bigendian_32(statebytes + 4);
149
+ state[1] = b;
150
+ c = load_bigendian_32(statebytes + 8);
151
+ state[2] = c;
152
+ d = load_bigendian_32(statebytes + 12);
153
+ state[3] = d;
154
+ e = load_bigendian_32(statebytes + 16);
155
+ state[4] = e;
156
+ f = load_bigendian_32(statebytes + 20);
157
+ state[5] = f;
158
+ g = load_bigendian_32(statebytes + 24);
159
+ state[6] = g;
160
+ h = load_bigendian_32(statebytes + 28);
161
+ state[7] = h;
162
+
163
+ while (inlen >= 64) {
164
+ uint32_t w0 = load_bigendian_32(in + 0);
165
+ uint32_t w1 = load_bigendian_32(in + 4);
166
+ uint32_t w2 = load_bigendian_32(in + 8);
167
+ uint32_t w3 = load_bigendian_32(in + 12);
168
+ uint32_t w4 = load_bigendian_32(in + 16);
169
+ uint32_t w5 = load_bigendian_32(in + 20);
170
+ uint32_t w6 = load_bigendian_32(in + 24);
171
+ uint32_t w7 = load_bigendian_32(in + 28);
172
+ uint32_t w8 = load_bigendian_32(in + 32);
173
+ uint32_t w9 = load_bigendian_32(in + 36);
174
+ uint32_t w10 = load_bigendian_32(in + 40);
175
+ uint32_t w11 = load_bigendian_32(in + 44);
176
+ uint32_t w12 = load_bigendian_32(in + 48);
177
+ uint32_t w13 = load_bigendian_32(in + 52);
178
+ uint32_t w14 = load_bigendian_32(in + 56);
179
+ uint32_t w15 = load_bigendian_32(in + 60);
180
+
181
+ F_32(w0, 0x428a2f98)
182
+ F_32(w1, 0x71374491)
183
+ F_32(w2, 0xb5c0fbcf)
184
+ F_32(w3, 0xe9b5dba5)
185
+ F_32(w4, 0x3956c25b)
186
+ F_32(w5, 0x59f111f1)
187
+ F_32(w6, 0x923f82a4)
188
+ F_32(w7, 0xab1c5ed5)
189
+ F_32(w8, 0xd807aa98)
190
+ F_32(w9, 0x12835b01)
191
+ F_32(w10, 0x243185be)
192
+ F_32(w11, 0x550c7dc3)
193
+ F_32(w12, 0x72be5d74)
194
+ F_32(w13, 0x80deb1fe)
195
+ F_32(w14, 0x9bdc06a7)
196
+ F_32(w15, 0xc19bf174)
197
+
198
+ EXPAND_32
199
+
200
+ F_32(w0, 0xe49b69c1)
201
+ F_32(w1, 0xefbe4786)
202
+ F_32(w2, 0x0fc19dc6)
203
+ F_32(w3, 0x240ca1cc)
204
+ F_32(w4, 0x2de92c6f)
205
+ F_32(w5, 0x4a7484aa)
206
+ F_32(w6, 0x5cb0a9dc)
207
+ F_32(w7, 0x76f988da)
208
+ F_32(w8, 0x983e5152)
209
+ F_32(w9, 0xa831c66d)
210
+ F_32(w10, 0xb00327c8)
211
+ F_32(w11, 0xbf597fc7)
212
+ F_32(w12, 0xc6e00bf3)
213
+ F_32(w13, 0xd5a79147)
214
+ F_32(w14, 0x06ca6351)
215
+ F_32(w15, 0x14292967)
216
+
217
+ EXPAND_32
218
+
219
+ F_32(w0, 0x27b70a85)
220
+ F_32(w1, 0x2e1b2138)
221
+ F_32(w2, 0x4d2c6dfc)
222
+ F_32(w3, 0x53380d13)
223
+ F_32(w4, 0x650a7354)
224
+ F_32(w5, 0x766a0abb)
225
+ F_32(w6, 0x81c2c92e)
226
+ F_32(w7, 0x92722c85)
227
+ F_32(w8, 0xa2bfe8a1)
228
+ F_32(w9, 0xa81a664b)
229
+ F_32(w10, 0xc24b8b70)
230
+ F_32(w11, 0xc76c51a3)
231
+ F_32(w12, 0xd192e819)
232
+ F_32(w13, 0xd6990624)
233
+ F_32(w14, 0xf40e3585)
234
+ F_32(w15, 0x106aa070)
235
+
236
+ EXPAND_32
237
+
238
+ F_32(w0, 0x19a4c116)
239
+ F_32(w1, 0x1e376c08)
240
+ F_32(w2, 0x2748774c)
241
+ F_32(w3, 0x34b0bcb5)
242
+ F_32(w4, 0x391c0cb3)
243
+ F_32(w5, 0x4ed8aa4a)
244
+ F_32(w6, 0x5b9cca4f)
245
+ F_32(w7, 0x682e6ff3)
246
+ F_32(w8, 0x748f82ee)
247
+ F_32(w9, 0x78a5636f)
248
+ F_32(w10, 0x84c87814)
249
+ F_32(w11, 0x8cc70208)
250
+ F_32(w12, 0x90befffa)
251
+ F_32(w13, 0xa4506ceb)
252
+ F_32(w14, 0xbef9a3f7)
253
+ F_32(w15, 0xc67178f2)
254
+
255
+ a += state[0];
256
+ b += state[1];
257
+ c += state[2];
258
+ d += state[3];
259
+ e += state[4];
260
+ f += state[5];
261
+ g += state[6];
262
+ h += state[7];
263
+
264
+ state[0] = a;
265
+ state[1] = b;
266
+ state[2] = c;
267
+ state[3] = d;
268
+ state[4] = e;
269
+ state[5] = f;
270
+ state[6] = g;
271
+ state[7] = h;
272
+
273
+ in += 64;
274
+ inlen -= 64;
275
+ }
276
+
277
+ store_bigendian_32(statebytes + 0, state[0]);
278
+ store_bigendian_32(statebytes + 4, state[1]);
279
+ store_bigendian_32(statebytes + 8, state[2]);
280
+ store_bigendian_32(statebytes + 12, state[3]);
281
+ store_bigendian_32(statebytes + 16, state[4]);
282
+ store_bigendian_32(statebytes + 20, state[5]);
283
+ store_bigendian_32(statebytes + 24, state[6]);
284
+ store_bigendian_32(statebytes + 28, state[7]);
285
+
286
+ return inlen;
287
+ }
288
+
289
+ static size_t crypto_hashblocks_sha512(uint8_t *statebytes,
290
+ const uint8_t *in, size_t inlen) {
291
+ uint64_t state[8];
292
+ uint64_t a;
293
+ uint64_t b;
294
+ uint64_t c;
295
+ uint64_t d;
296
+ uint64_t e;
297
+ uint64_t f;
298
+ uint64_t g;
299
+ uint64_t h;
300
+ uint64_t T1;
301
+ uint64_t T2;
302
+
303
+ a = load_bigendian_64(statebytes + 0);
304
+ state[0] = a;
305
+ b = load_bigendian_64(statebytes + 8);
306
+ state[1] = b;
307
+ c = load_bigendian_64(statebytes + 16);
308
+ state[2] = c;
309
+ d = load_bigendian_64(statebytes + 24);
310
+ state[3] = d;
311
+ e = load_bigendian_64(statebytes + 32);
312
+ state[4] = e;
313
+ f = load_bigendian_64(statebytes + 40);
314
+ state[5] = f;
315
+ g = load_bigendian_64(statebytes + 48);
316
+ state[6] = g;
317
+ h = load_bigendian_64(statebytes + 56);
318
+ state[7] = h;
319
+
320
+ while (inlen >= 128) {
321
+ uint64_t w0 = load_bigendian_64(in + 0);
322
+ uint64_t w1 = load_bigendian_64(in + 8);
323
+ uint64_t w2 = load_bigendian_64(in + 16);
324
+ uint64_t w3 = load_bigendian_64(in + 24);
325
+ uint64_t w4 = load_bigendian_64(in + 32);
326
+ uint64_t w5 = load_bigendian_64(in + 40);
327
+ uint64_t w6 = load_bigendian_64(in + 48);
328
+ uint64_t w7 = load_bigendian_64(in + 56);
329
+ uint64_t w8 = load_bigendian_64(in + 64);
330
+ uint64_t w9 = load_bigendian_64(in + 72);
331
+ uint64_t w10 = load_bigendian_64(in + 80);
332
+ uint64_t w11 = load_bigendian_64(in + 88);
333
+ uint64_t w12 = load_bigendian_64(in + 96);
334
+ uint64_t w13 = load_bigendian_64(in + 104);
335
+ uint64_t w14 = load_bigendian_64(in + 112);
336
+ uint64_t w15 = load_bigendian_64(in + 120);
337
+
338
+ F_64(w0, 0x428a2f98d728ae22ULL)
339
+ F_64(w1, 0x7137449123ef65cdULL)
340
+ F_64(w2, 0xb5c0fbcfec4d3b2fULL)
341
+ F_64(w3, 0xe9b5dba58189dbbcULL)
342
+ F_64(w4, 0x3956c25bf348b538ULL)
343
+ F_64(w5, 0x59f111f1b605d019ULL)
344
+ F_64(w6, 0x923f82a4af194f9bULL)
345
+ F_64(w7, 0xab1c5ed5da6d8118ULL)
346
+ F_64(w8, 0xd807aa98a3030242ULL)
347
+ F_64(w9, 0x12835b0145706fbeULL)
348
+ F_64(w10, 0x243185be4ee4b28cULL)
349
+ F_64(w11, 0x550c7dc3d5ffb4e2ULL)
350
+ F_64(w12, 0x72be5d74f27b896fULL)
351
+ F_64(w13, 0x80deb1fe3b1696b1ULL)
352
+ F_64(w14, 0x9bdc06a725c71235ULL)
353
+ F_64(w15, 0xc19bf174cf692694ULL)
354
+
355
+ EXPAND_64
356
+
357
+ F_64(w0, 0xe49b69c19ef14ad2ULL)
358
+ F_64(w1, 0xefbe4786384f25e3ULL)
359
+ F_64(w2, 0x0fc19dc68b8cd5b5ULL)
360
+ F_64(w3, 0x240ca1cc77ac9c65ULL)
361
+ F_64(w4, 0x2de92c6f592b0275ULL)
362
+ F_64(w5, 0x4a7484aa6ea6e483ULL)
363
+ F_64(w6, 0x5cb0a9dcbd41fbd4ULL)
364
+ F_64(w7, 0x76f988da831153b5ULL)
365
+ F_64(w8, 0x983e5152ee66dfabULL)
366
+ F_64(w9, 0xa831c66d2db43210ULL)
367
+ F_64(w10, 0xb00327c898fb213fULL)
368
+ F_64(w11, 0xbf597fc7beef0ee4ULL)
369
+ F_64(w12, 0xc6e00bf33da88fc2ULL)
370
+ F_64(w13, 0xd5a79147930aa725ULL)
371
+ F_64(w14, 0x06ca6351e003826fULL)
372
+ F_64(w15, 0x142929670a0e6e70ULL)
373
+
374
+ EXPAND_64
375
+
376
+ F_64(w0, 0x27b70a8546d22ffcULL)
377
+ F_64(w1, 0x2e1b21385c26c926ULL)
378
+ F_64(w2, 0x4d2c6dfc5ac42aedULL)
379
+ F_64(w3, 0x53380d139d95b3dfULL)
380
+ F_64(w4, 0x650a73548baf63deULL)
381
+ F_64(w5, 0x766a0abb3c77b2a8ULL)
382
+ F_64(w6, 0x81c2c92e47edaee6ULL)
383
+ F_64(w7, 0x92722c851482353bULL)
384
+ F_64(w8, 0xa2bfe8a14cf10364ULL)
385
+ F_64(w9, 0xa81a664bbc423001ULL)
386
+ F_64(w10, 0xc24b8b70d0f89791ULL)
387
+ F_64(w11, 0xc76c51a30654be30ULL)
388
+ F_64(w12, 0xd192e819d6ef5218ULL)
389
+ F_64(w13, 0xd69906245565a910ULL)
390
+ F_64(w14, 0xf40e35855771202aULL)
391
+ F_64(w15, 0x106aa07032bbd1b8ULL)
392
+
393
+ EXPAND_64
394
+
395
+ F_64(w0, 0x19a4c116b8d2d0c8ULL)
396
+ F_64(w1, 0x1e376c085141ab53ULL)
397
+ F_64(w2, 0x2748774cdf8eeb99ULL)
398
+ F_64(w3, 0x34b0bcb5e19b48a8ULL)
399
+ F_64(w4, 0x391c0cb3c5c95a63ULL)
400
+ F_64(w5, 0x4ed8aa4ae3418acbULL)
401
+ F_64(w6, 0x5b9cca4f7763e373ULL)
402
+ F_64(w7, 0x682e6ff3d6b2b8a3ULL)
403
+ F_64(w8, 0x748f82ee5defb2fcULL)
404
+ F_64(w9, 0x78a5636f43172f60ULL)
405
+ F_64(w10, 0x84c87814a1f0ab72ULL)
406
+ F_64(w11, 0x8cc702081a6439ecULL)
407
+ F_64(w12, 0x90befffa23631e28ULL)
408
+ F_64(w13, 0xa4506cebde82bde9ULL)
409
+ F_64(w14, 0xbef9a3f7b2c67915ULL)
410
+ F_64(w15, 0xc67178f2e372532bULL)
411
+
412
+ EXPAND_64
413
+
414
+ F_64(w0, 0xca273eceea26619cULL)
415
+ F_64(w1, 0xd186b8c721c0c207ULL)
416
+ F_64(w2, 0xeada7dd6cde0eb1eULL)
417
+ F_64(w3, 0xf57d4f7fee6ed178ULL)
418
+ F_64(w4, 0x06f067aa72176fbaULL)
419
+ F_64(w5, 0x0a637dc5a2c898a6ULL)
420
+ F_64(w6, 0x113f9804bef90daeULL)
421
+ F_64(w7, 0x1b710b35131c471bULL)
422
+ F_64(w8, 0x28db77f523047d84ULL)
423
+ F_64(w9, 0x32caab7b40c72493ULL)
424
+ F_64(w10, 0x3c9ebe0a15c9bebcULL)
425
+ F_64(w11, 0x431d67c49c100d4cULL)
426
+ F_64(w12, 0x4cc5d4becb3e42b6ULL)
427
+ F_64(w13, 0x597f299cfc657e2aULL)
428
+ F_64(w14, 0x5fcb6fab3ad6faecULL)
429
+ F_64(w15, 0x6c44198c4a475817ULL)
430
+
431
+ a += state[0];
432
+ b += state[1];
433
+ c += state[2];
434
+ d += state[3];
435
+ e += state[4];
436
+ f += state[5];
437
+ g += state[6];
438
+ h += state[7];
439
+
440
+ state[0] = a;
441
+ state[1] = b;
442
+ state[2] = c;
443
+ state[3] = d;
444
+ state[4] = e;
445
+ state[5] = f;
446
+ state[6] = g;
447
+ state[7] = h;
448
+
449
+ in += 128;
450
+ inlen -= 128;
451
+ }
452
+
453
+ store_bigendian_64(statebytes + 0, state[0]);
454
+ store_bigendian_64(statebytes + 8, state[1]);
455
+ store_bigendian_64(statebytes + 16, state[2]);
456
+ store_bigendian_64(statebytes + 24, state[3]);
457
+ store_bigendian_64(statebytes + 32, state[4]);
458
+ store_bigendian_64(statebytes + 40, state[5]);
459
+ store_bigendian_64(statebytes + 48, state[6]);
460
+ store_bigendian_64(statebytes + 56, state[7]);
461
+
462
+ return inlen;
463
+ }
464
+
465
+ static const uint8_t iv_224[32] = {
466
+ 0xc1, 0x05, 0x9e, 0xd8, 0x36, 0x7c, 0xd5, 0x07,
467
+ 0x30, 0x70, 0xdd, 0x17, 0xf7, 0x0e, 0x59, 0x39,
468
+ 0xff, 0xc0, 0x0b, 0x31, 0x68, 0x58, 0x15, 0x11,
469
+ 0x64, 0xf9, 0x8f, 0xa7, 0xbe, 0xfa, 0x4f, 0xa4
470
+ };
471
+
472
+ static const uint8_t iv_256[32] = {
473
+ 0x6a, 0x09, 0xe6, 0x67, 0xbb, 0x67, 0xae, 0x85,
474
+ 0x3c, 0x6e, 0xf3, 0x72, 0xa5, 0x4f, 0xf5, 0x3a,
475
+ 0x51, 0x0e, 0x52, 0x7f, 0x9b, 0x05, 0x68, 0x8c,
476
+ 0x1f, 0x83, 0xd9, 0xab, 0x5b, 0xe0, 0xcd, 0x19
477
+ };
478
+
479
+ static const uint8_t iv_384[64] = {
480
+ 0xcb, 0xbb, 0x9d, 0x5d, 0xc1, 0x05, 0x9e, 0xd8, 0x62, 0x9a, 0x29,
481
+ 0x2a, 0x36, 0x7c, 0xd5, 0x07, 0x91, 0x59, 0x01, 0x5a, 0x30, 0x70,
482
+ 0xdd, 0x17, 0x15, 0x2f, 0xec, 0xd8, 0xf7, 0x0e, 0x59, 0x39, 0x67,
483
+ 0x33, 0x26, 0x67, 0xff, 0xc0, 0x0b, 0x31, 0x8e, 0xb4, 0x4a, 0x87,
484
+ 0x68, 0x58, 0x15, 0x11, 0xdb, 0x0c, 0x2e, 0x0d, 0x64, 0xf9, 0x8f,
485
+ 0xa7, 0x47, 0xb5, 0x48, 0x1d, 0xbe, 0xfa, 0x4f, 0xa4
486
+ };
487
+
488
+ static const uint8_t iv_512[64] = {
489
+ 0x6a, 0x09, 0xe6, 0x67, 0xf3, 0xbc, 0xc9, 0x08, 0xbb, 0x67, 0xae,
490
+ 0x85, 0x84, 0xca, 0xa7, 0x3b, 0x3c, 0x6e, 0xf3, 0x72, 0xfe, 0x94,
491
+ 0xf8, 0x2b, 0xa5, 0x4f, 0xf5, 0x3a, 0x5f, 0x1d, 0x36, 0xf1, 0x51,
492
+ 0x0e, 0x52, 0x7f, 0xad, 0xe6, 0x82, 0xd1, 0x9b, 0x05, 0x68, 0x8c,
493
+ 0x2b, 0x3e, 0x6c, 0x1f, 0x1f, 0x83, 0xd9, 0xab, 0xfb, 0x41, 0xbd,
494
+ 0x6b, 0x5b, 0xe0, 0xcd, 0x19, 0x13, 0x7e, 0x21, 0x79
495
+ };
496
+
497
+ void sha224_inc_init(sha224ctx *state) {
498
+ state->ctx = malloc(PQC_SHA256CTX_BYTES);
499
+ if (state->ctx == NULL) {
500
+ exit(111);
501
+ }
502
+ for (size_t i = 0; i < 32; ++i) {
503
+ state->ctx[i] = iv_224[i];
504
+ }
505
+ for (size_t i = 32; i < 40; ++i) {
506
+ state->ctx[i] = 0;
507
+ }
508
+ }
509
+
510
+ void sha256_inc_init(sha256ctx *state) {
511
+ state->ctx = malloc(PQC_SHA256CTX_BYTES);
512
+ if (state->ctx == NULL) {
513
+ exit(111);
514
+ }
515
+ for (size_t i = 0; i < 32; ++i) {
516
+ state->ctx[i] = iv_256[i];
517
+ }
518
+ for (size_t i = 32; i < 40; ++i) {
519
+ state->ctx[i] = 0;
520
+ }
521
+ }
522
+
523
+ void sha384_inc_init(sha384ctx *state) {
524
+ state->ctx = malloc(PQC_SHA512CTX_BYTES);
525
+ if (state->ctx == NULL) {
526
+ exit(111);
527
+ }
528
+ for (size_t i = 0; i < 64; ++i) {
529
+ state->ctx[i] = iv_384[i];
530
+ }
531
+ for (size_t i = 64; i < 72; ++i) {
532
+ state->ctx[i] = 0;
533
+ }
534
+ }
535
+
536
+ void sha512_inc_init(sha512ctx *state) {
537
+ state->ctx = malloc(PQC_SHA512CTX_BYTES);
538
+ if (state->ctx == NULL) {
539
+ exit(111);
540
+ }
541
+ for (size_t i = 0; i < 64; ++i) {
542
+ state->ctx[i] = iv_512[i];
543
+ }
544
+ for (size_t i = 64; i < 72; ++i) {
545
+ state->ctx[i] = 0;
546
+ }
547
+ }
548
+
549
+ void sha224_inc_ctx_clone(sha224ctx *stateout, const sha224ctx *statein) {
550
+ stateout->ctx = malloc(PQC_SHA256CTX_BYTES);
551
+ if (stateout->ctx == NULL) {
552
+ exit(111);
553
+ }
554
+ memcpy(stateout->ctx, statein->ctx, PQC_SHA256CTX_BYTES);
555
+ }
556
+
557
+ void sha256_inc_ctx_clone(sha256ctx *stateout, const sha256ctx *statein) {
558
+ stateout->ctx = malloc(PQC_SHA256CTX_BYTES);
559
+ if (stateout->ctx == NULL) {
560
+ exit(111);
561
+ }
562
+ memcpy(stateout->ctx, statein->ctx, PQC_SHA256CTX_BYTES);
563
+ }
564
+
565
+ void sha384_inc_ctx_clone(sha384ctx *stateout, const sha384ctx *statein) {
566
+ stateout->ctx = malloc(PQC_SHA512CTX_BYTES);
567
+ if (stateout->ctx == NULL) {
568
+ exit(111);
569
+ }
570
+ memcpy(stateout->ctx, statein->ctx, PQC_SHA512CTX_BYTES);
571
+ }
572
+
573
+ void sha512_inc_ctx_clone(sha512ctx *stateout, const sha512ctx *statein) {
574
+ stateout->ctx = malloc(PQC_SHA512CTX_BYTES);
575
+ if (stateout->ctx == NULL) {
576
+ exit(111);
577
+ }
578
+ memcpy(stateout->ctx, statein->ctx, PQC_SHA512CTX_BYTES);
579
+ }
580
+
581
+ /* Destroy the hash state. */
582
+ void sha224_inc_ctx_release(sha224ctx *state) {
583
+ free(state->ctx);
584
+ }
585
+
586
+ /* Destroy the hash state. */
587
+ void sha256_inc_ctx_release(sha256ctx *state) {
588
+ free(state->ctx);
589
+ }
590
+
591
+ /* Destroy the hash state. */
592
+ void sha384_inc_ctx_release(sha384ctx *state) {
593
+ free(state->ctx);
594
+ }
595
+
596
+ /* Destroy the hash state. */
597
+ void sha512_inc_ctx_release(sha512ctx *state) {
598
+ free(state->ctx);
599
+ }
600
+
601
+ void sha256_inc_blocks(sha256ctx *state, const uint8_t *in, size_t inblocks) {
602
+ uint64_t bytes = load_bigendian_64(state->ctx + 32);
603
+
604
+ crypto_hashblocks_sha256(state->ctx, in, 64 * inblocks);
605
+ bytes += 64 * inblocks;
606
+
607
+ store_bigendian_64(state->ctx + 32, bytes);
608
+ }
609
+
610
+ void sha224_inc_blocks(sha224ctx *state, const uint8_t *in, size_t inblocks) {
611
+ sha256_inc_blocks((sha256ctx *) state, in, inblocks);
612
+ }
613
+
614
+ void sha512_inc_blocks(sha512ctx *state, const uint8_t *in, size_t inblocks) {
615
+ uint64_t bytes = load_bigendian_64(state->ctx + 64);
616
+
617
+ crypto_hashblocks_sha512(state->ctx, in, 128 * inblocks);
618
+ bytes += 128 * inblocks;
619
+
620
+ store_bigendian_64(state->ctx + 64, bytes);
621
+ }
622
+
623
+ void sha384_inc_blocks(sha384ctx *state, const uint8_t *in, size_t inblocks) {
624
+ sha512_inc_blocks((sha512ctx *) state, in, inblocks);
625
+ }
626
+
627
+ void sha256_inc_finalize(uint8_t *out, sha256ctx *state, const uint8_t *in, size_t inlen) {
628
+ uint8_t padded[128];
629
+ uint64_t bytes = load_bigendian_64(state->ctx + 32) + inlen;
630
+
631
+ crypto_hashblocks_sha256(state->ctx, in, inlen);
632
+ in += inlen;
633
+ inlen &= 63;
634
+ in -= inlen;
635
+
636
+ for (size_t i = 0; i < inlen; ++i) {
637
+ padded[i] = in[i];
638
+ }
639
+ padded[inlen] = 0x80;
640
+
641
+ if (inlen < 56) {
642
+ for (size_t i = inlen + 1; i < 56; ++i) {
643
+ padded[i] = 0;
644
+ }
645
+ padded[56] = (uint8_t) (bytes >> 53);
646
+ padded[57] = (uint8_t) (bytes >> 45);
647
+ padded[58] = (uint8_t) (bytes >> 37);
648
+ padded[59] = (uint8_t) (bytes >> 29);
649
+ padded[60] = (uint8_t) (bytes >> 21);
650
+ padded[61] = (uint8_t) (bytes >> 13);
651
+ padded[62] = (uint8_t) (bytes >> 5);
652
+ padded[63] = (uint8_t) (bytes << 3);
653
+ crypto_hashblocks_sha256(state->ctx, padded, 64);
654
+ } else {
655
+ for (size_t i = inlen + 1; i < 120; ++i) {
656
+ padded[i] = 0;
657
+ }
658
+ padded[120] = (uint8_t) (bytes >> 53);
659
+ padded[121] = (uint8_t) (bytes >> 45);
660
+ padded[122] = (uint8_t) (bytes >> 37);
661
+ padded[123] = (uint8_t) (bytes >> 29);
662
+ padded[124] = (uint8_t) (bytes >> 21);
663
+ padded[125] = (uint8_t) (bytes >> 13);
664
+ padded[126] = (uint8_t) (bytes >> 5);
665
+ padded[127] = (uint8_t) (bytes << 3);
666
+ crypto_hashblocks_sha256(state->ctx, padded, 128);
667
+ }
668
+
669
+ for (size_t i = 0; i < 32; ++i) {
670
+ out[i] = state->ctx[i];
671
+ }
672
+ sha256_inc_ctx_release(state);
673
+ }
674
+
675
+ void sha224_inc_finalize(uint8_t *out, sha224ctx *state, const uint8_t *in, size_t inlen) {
676
+ uint8_t tmp[32];
677
+ sha256_inc_finalize(tmp, (sha256ctx *)state, in, inlen);
678
+
679
+ for (size_t i = 0; i < 28; ++i) {
680
+ out[i] = tmp[i];
681
+ }
682
+ }
683
+
684
+ void sha512_inc_finalize(uint8_t *out, sha512ctx *state, const uint8_t *in, size_t inlen) {
685
+ uint8_t padded[256];
686
+ uint64_t bytes = load_bigendian_64(state->ctx + 64) + inlen;
687
+
688
+ crypto_hashblocks_sha512(state->ctx, in, inlen);
689
+ in += inlen;
690
+ inlen &= 127;
691
+ in -= inlen;
692
+
693
+ for (size_t i = 0; i < inlen; ++i) {
694
+ padded[i] = in[i];
695
+ }
696
+ padded[inlen] = 0x80;
697
+
698
+ if (inlen < 112) {
699
+ for (size_t i = inlen + 1; i < 119; ++i) {
700
+ padded[i] = 0;
701
+ }
702
+ padded[119] = (uint8_t) (bytes >> 61);
703
+ padded[120] = (uint8_t) (bytes >> 53);
704
+ padded[121] = (uint8_t) (bytes >> 45);
705
+ padded[122] = (uint8_t) (bytes >> 37);
706
+ padded[123] = (uint8_t) (bytes >> 29);
707
+ padded[124] = (uint8_t) (bytes >> 21);
708
+ padded[125] = (uint8_t) (bytes >> 13);
709
+ padded[126] = (uint8_t) (bytes >> 5);
710
+ padded[127] = (uint8_t) (bytes << 3);
711
+ crypto_hashblocks_sha512(state->ctx, padded, 128);
712
+ } else {
713
+ for (size_t i = inlen + 1; i < 247; ++i) {
714
+ padded[i] = 0;
715
+ }
716
+ padded[247] = (uint8_t) (bytes >> 61);
717
+ padded[248] = (uint8_t) (bytes >> 53);
718
+ padded[249] = (uint8_t) (bytes >> 45);
719
+ padded[250] = (uint8_t) (bytes >> 37);
720
+ padded[251] = (uint8_t) (bytes >> 29);
721
+ padded[252] = (uint8_t) (bytes >> 21);
722
+ padded[253] = (uint8_t) (bytes >> 13);
723
+ padded[254] = (uint8_t) (bytes >> 5);
724
+ padded[255] = (uint8_t) (bytes << 3);
725
+ crypto_hashblocks_sha512(state->ctx, padded, 256);
726
+ }
727
+
728
+ for (size_t i = 0; i < 64; ++i) {
729
+ out[i] = state->ctx[i];
730
+ }
731
+ sha512_inc_ctx_release(state);
732
+ }
733
+
734
+ void sha384_inc_finalize(uint8_t *out, sha384ctx *state, const uint8_t *in, size_t inlen) {
735
+ uint8_t tmp[64];
736
+ sha512_inc_finalize(tmp, (sha512ctx *)state, in, inlen);
737
+
738
+ for (size_t i = 0; i < 48; ++i) {
739
+ out[i] = tmp[i];
740
+ }
741
+ }
742
+
743
+ void sha224(uint8_t *out, const uint8_t *in, size_t inlen) {
744
+ sha224ctx state;
745
+
746
+ sha224_inc_init(&state);
747
+ sha224_inc_finalize(out, &state, in, inlen);
748
+ }
749
+
750
+ void sha256(uint8_t *out, const uint8_t *in, size_t inlen) {
751
+ sha256ctx state;
752
+
753
+ sha256_inc_init(&state);
754
+ sha256_inc_finalize(out, &state, in, inlen);
755
+ }
756
+
757
+ void sha384(uint8_t *out, const uint8_t *in, size_t inlen) {
758
+ sha384ctx state;
759
+
760
+ sha384_inc_init(&state);
761
+ sha384_inc_finalize(out, &state, in, inlen);
762
+ }
763
+
764
+ void sha512(uint8_t *out, const uint8_t *in, size_t inlen) {
765
+ sha512ctx state;
766
+
767
+ sha512_inc_init(&state);
768
+ sha512_inc_finalize(out, &state, in, inlen);
769
+ }