asherah 3.0.8 → 3.0.13
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.asherah-version +1 -1
- package/package.json +3 -2
- package/scripts/download-libraries.sh +66 -17
- package/src/cobhan_buffer.h +23 -0
- package/src/cobhan_buffer_napi.h +23 -0
package/.asherah-version
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
ASHERAH_VERSION=v0.4.
|
|
1
|
+
ASHERAH_VERSION=v0.4.36
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "asherah",
|
|
3
|
-
"version": "3.0.
|
|
3
|
+
"version": "3.0.13",
|
|
4
4
|
"description": "Asherah envelope encryption and key rotation library",
|
|
5
5
|
"exports": {
|
|
6
6
|
"node-addons": "./dist/asherah.node"
|
|
@@ -16,8 +16,9 @@
|
|
|
16
16
|
"test:mocha-debug": "lldb -o run -- node node_modules/mocha/bin/mocha --inspect-brk",
|
|
17
17
|
"test:mocha": "mocha",
|
|
18
18
|
"test": "nyc npm run test:mocha",
|
|
19
|
+
"test:bun": "bun test/bun-test.js",
|
|
19
20
|
"debug": "nyc npm run test:mocha-debug",
|
|
20
|
-
"posttest": "npm run lint",
|
|
21
|
+
"posttest": "npm run lint && npm run test:bun",
|
|
21
22
|
"lint": "eslint src/**.ts --fix",
|
|
22
23
|
"update": "npx npm-check-updates --target latest -u -x mocha && npm i && npm audit fix"
|
|
23
24
|
},
|
|
@@ -5,6 +5,8 @@ set -e # Exit on any command failure
|
|
|
5
5
|
# Global Constants
|
|
6
6
|
CHECK_INTERVAL_SECONDS=$((5 * 60)) # 5 minutes
|
|
7
7
|
MAX_DOWNLOAD_RETRIES=3
|
|
8
|
+
MAX_FILE_DOWNLOAD_RETRIES=5
|
|
9
|
+
DOWNLOAD_TIMEOUT=60 # 1 minute per file
|
|
8
10
|
|
|
9
11
|
# Function to check if a specific file download is necessary
|
|
10
12
|
function check_download_required {
|
|
@@ -52,26 +54,53 @@ function check_download_required {
|
|
|
52
54
|
return 1 # (download not required)
|
|
53
55
|
}
|
|
54
56
|
|
|
55
|
-
# Function to download a file
|
|
57
|
+
# Function to download a file with retry logic
|
|
56
58
|
function download_file {
|
|
57
59
|
local url=$1
|
|
58
60
|
local file=$2
|
|
59
61
|
local etag_file="${file}.etag"
|
|
62
|
+
local retry_count=0
|
|
63
|
+
local backoff=1
|
|
64
|
+
|
|
65
|
+
while [[ $retry_count -lt $MAX_FILE_DOWNLOAD_RETRIES ]]; do
|
|
66
|
+
# Use --show-error to display errors even with -s (silent progress)
|
|
67
|
+
# Add connection and max-time timeouts to prevent hanging
|
|
68
|
+
if curl -sS -L --fail \
|
|
69
|
+
--connect-timeout 30 \
|
|
70
|
+
--max-time "$DOWNLOAD_TIMEOUT" \
|
|
71
|
+
--retry 2 \
|
|
72
|
+
--retry-delay 2 \
|
|
73
|
+
--etag-save "$etag_file" \
|
|
74
|
+
--etag-compare "$etag_file" \
|
|
75
|
+
-O "$url"; then
|
|
76
|
+
# Explicitly touch the etag file to update its modification time only if successful
|
|
77
|
+
touch "$etag_file"
|
|
78
|
+
return 0
|
|
79
|
+
fi
|
|
60
80
|
|
|
61
|
-
|
|
62
|
-
echo "Failed to download $url" >&2
|
|
63
|
-
exit 1
|
|
64
|
-
fi
|
|
81
|
+
((retry_count++))
|
|
65
82
|
|
|
66
|
-
|
|
67
|
-
|
|
83
|
+
if [[ $retry_count -lt $MAX_FILE_DOWNLOAD_RETRIES ]]; then
|
|
84
|
+
echo "Download attempt $retry_count failed for $url, retrying in ${backoff}s..." >&2
|
|
85
|
+
sleep "$backoff"
|
|
86
|
+
# Exponential backoff with a cap at 16 seconds
|
|
87
|
+
backoff=$((backoff * 2))
|
|
88
|
+
if [[ $backoff -gt 16 ]]; then
|
|
89
|
+
backoff=16
|
|
90
|
+
fi
|
|
91
|
+
fi
|
|
92
|
+
done
|
|
93
|
+
|
|
94
|
+
echo "Failed to download $url after $MAX_FILE_DOWNLOAD_RETRIES attempts" >&2
|
|
95
|
+
return 1
|
|
68
96
|
}
|
|
69
97
|
|
|
70
98
|
# Function to verify checksums
|
|
71
99
|
function verify_checksums {
|
|
72
100
|
local archive=$1
|
|
73
101
|
local header=$2
|
|
74
|
-
local
|
|
102
|
+
local warmup=$3
|
|
103
|
+
local sums=$4
|
|
75
104
|
|
|
76
105
|
# Determine the available SHA hashing utility
|
|
77
106
|
if command -v sha256sum &> /dev/null; then
|
|
@@ -90,9 +119,9 @@ function verify_checksums {
|
|
|
90
119
|
fi
|
|
91
120
|
|
|
92
121
|
# Filter the relevant checksums and verify they are not empty
|
|
93
|
-
checksums=$(grep -e "${archive}" -e "${header}" "${sums}")
|
|
122
|
+
checksums=$(grep -e "${archive}" -e "${header}" -e "${warmup}" "${sums}")
|
|
94
123
|
if [[ -z "$checksums" ]]; then
|
|
95
|
-
echo "Error: No matching checksums found for ${archive} or ${
|
|
124
|
+
echo "Error: No matching checksums found for ${archive}, ${header}, or ${warmup} in ${sums}." >&2
|
|
96
125
|
return 1
|
|
97
126
|
fi
|
|
98
127
|
|
|
@@ -144,11 +173,13 @@ function detect_os_and_cpu {
|
|
|
144
173
|
#echo "Using Asherah libraries for Linux x86_64"
|
|
145
174
|
ARCHIVE="libasherah-x64.a"
|
|
146
175
|
HEADER="libasherah-x64-archive.h"
|
|
176
|
+
WARMUP="go-warmup-linux-x64.so"
|
|
147
177
|
SUMS="SHA256SUMS"
|
|
148
178
|
elif [[ ${MACHINE} == 'aarch64' ]]; then
|
|
149
179
|
#echo "Using Asherah libraries for Linux aarch64"
|
|
150
180
|
ARCHIVE="libasherah-arm64.a"
|
|
151
181
|
HEADER="libasherah-arm64-archive.h"
|
|
182
|
+
WARMUP="go-warmup-linux-arm64.so"
|
|
152
183
|
SUMS="SHA256SUMS"
|
|
153
184
|
else
|
|
154
185
|
#echo "Unsupported CPU architecture: ${MACHINE}" >&2
|
|
@@ -159,11 +190,13 @@ function detect_os_and_cpu {
|
|
|
159
190
|
#echo "Using Asherah libraries for MacOS x86_64"
|
|
160
191
|
ARCHIVE="libasherah-darwin-x64.a"
|
|
161
192
|
HEADER="libasherah-darwin-x64-archive.h"
|
|
193
|
+
WARMUP="go-warmup-darwin-x64.dylib"
|
|
162
194
|
SUMS="SHA256SUMS-darwin"
|
|
163
195
|
elif [[ ${MACHINE} == 'arm64' ]]; then
|
|
164
196
|
#echo "Using Asherah libraries for MacOS arm64"
|
|
165
197
|
ARCHIVE="libasherah-darwin-arm64.a"
|
|
166
198
|
HEADER="libasherah-darwin-arm64-archive.h"
|
|
199
|
+
WARMUP="go-warmup-darwin-arm64.dylib"
|
|
167
200
|
SUMS="SHA256SUMS-darwin"
|
|
168
201
|
else
|
|
169
202
|
echo "Unsupported CPU architecture: ${MACHINE}" >&2
|
|
@@ -174,7 +207,7 @@ function detect_os_and_cpu {
|
|
|
174
207
|
exit 1
|
|
175
208
|
fi
|
|
176
209
|
|
|
177
|
-
echo "${ARCHIVE}" "${HEADER}" "${SUMS}" # Return value
|
|
210
|
+
echo "${ARCHIVE}" "${HEADER}" "${WARMUP}" "${SUMS}" # Return value
|
|
178
211
|
}
|
|
179
212
|
|
|
180
213
|
# Parse script arguments
|
|
@@ -216,18 +249,20 @@ function main {
|
|
|
216
249
|
no_cache=$(parse_args "$@")
|
|
217
250
|
|
|
218
251
|
# Detect OS and CPU architecture
|
|
219
|
-
read -r archive header sums < <(detect_os_and_cpu)
|
|
252
|
+
read -r archive header warmup sums < <(detect_os_and_cpu)
|
|
220
253
|
echo "Archive: $archive"
|
|
221
254
|
echo "Header: $header"
|
|
255
|
+
echo "Warmup: $warmup"
|
|
222
256
|
echo "Sums: $sums"
|
|
223
257
|
echo "Version: $ASHERAH_VERSION"
|
|
224
258
|
|
|
225
259
|
# Interpolate the URLs
|
|
226
260
|
url_prefix="https://github.com/godaddy/asherah-cobhan/releases/download/${ASHERAH_VERSION}"
|
|
227
|
-
file_names=("${archive}" "${header}" "${sums}")
|
|
261
|
+
file_names=("${archive}" "${header}" "${warmup}" "${sums}")
|
|
228
262
|
file_urls=(
|
|
229
263
|
"${url_prefix}/${archive}"
|
|
230
264
|
"${url_prefix}/${header}"
|
|
265
|
+
"${url_prefix}/${warmup}"
|
|
231
266
|
"${url_prefix}/${sums}"
|
|
232
267
|
)
|
|
233
268
|
|
|
@@ -238,30 +273,44 @@ function main {
|
|
|
238
273
|
local retries=0
|
|
239
274
|
local checksums_verified=false
|
|
240
275
|
while [[ $checksums_verified == false && $retries -lt $MAX_DOWNLOAD_RETRIES ]]; do
|
|
276
|
+
local download_failed=false
|
|
277
|
+
|
|
241
278
|
# Per-file touch and download logic
|
|
242
279
|
for i in "${!file_names[@]}"; do
|
|
243
280
|
if check_download_required "${file_names[$i]}" "$no_cache" "$CHECK_INTERVAL_SECONDS"; then
|
|
244
|
-
download_file "${file_urls[$i]}" "${file_names[$i]}"
|
|
281
|
+
if ! download_file "${file_urls[$i]}" "${file_names[$i]}"; then
|
|
282
|
+
echo "Failed to download ${file_names[$i]}" >&2
|
|
283
|
+
download_failed=true
|
|
284
|
+
break
|
|
285
|
+
fi
|
|
245
286
|
else
|
|
246
287
|
interval_str=$(interval_message "$CHECK_INTERVAL_SECONDS")
|
|
247
288
|
echo "${file_names[$i]} is up to date (checked within the last ${interval_str})"
|
|
248
289
|
fi
|
|
249
290
|
done
|
|
250
291
|
|
|
292
|
+
# If any download failed, retry the whole batch
|
|
293
|
+
if [[ $download_failed == true ]]; then
|
|
294
|
+
echo "Download failed, cleaning up and retrying..."
|
|
295
|
+
rm -f ./*.a ./*.h ./*.so ./*.dylib ./*.etag
|
|
296
|
+
((retries++))
|
|
297
|
+
sleep 2
|
|
298
|
+
continue
|
|
299
|
+
fi
|
|
300
|
+
|
|
251
301
|
# Verify checksums and copy files
|
|
252
|
-
if verify_checksums "${archive}" "${header}" "${sums}"; then
|
|
302
|
+
if verify_checksums "${archive}" "${header}" "${warmup}" "${sums}"; then
|
|
253
303
|
copy_files "${archive}" "${header}"
|
|
254
304
|
checksums_verified=true
|
|
255
305
|
else
|
|
256
306
|
echo "Verification failed, re-downloading files..."
|
|
257
307
|
((retries++))
|
|
258
308
|
# Sleep for a bit before retrying to avoid hammering the server
|
|
259
|
-
sleep
|
|
309
|
+
sleep 2
|
|
260
310
|
fi
|
|
261
311
|
done
|
|
262
312
|
|
|
263
313
|
if [[ $checksums_verified == true ]]; then
|
|
264
|
-
copy_files "${archive}" "${header}"
|
|
265
314
|
echo "Asherah libraries downloaded successfully"
|
|
266
315
|
else
|
|
267
316
|
echo "Failed to download Asherah libraries after $retries retries."
|
package/src/cobhan_buffer.h
CHANGED
|
@@ -9,6 +9,12 @@
|
|
|
9
9
|
#include <stdexcept> // for std::runtime_error, std::invalid_argument
|
|
10
10
|
#include <string> // for std::string
|
|
11
11
|
|
|
12
|
+
#ifdef _WIN32
|
|
13
|
+
#include <windows.h> // for SecureZeroMemory
|
|
14
|
+
#else
|
|
15
|
+
#include <string.h> // for explicit_bzero
|
|
16
|
+
#endif
|
|
17
|
+
|
|
12
18
|
class CobhanBuffer {
|
|
13
19
|
public:
|
|
14
20
|
// Used for requesting a new heap-based buffer allocation that can handle
|
|
@@ -56,6 +62,23 @@ public:
|
|
|
56
62
|
|
|
57
63
|
[[nodiscard]] size_t get_data_len_bytes() const { return *data_len_ptr; }
|
|
58
64
|
|
|
65
|
+
void secure_wipe_data() {
|
|
66
|
+
if (data_ptr && get_data_len_bytes() > 0) {
|
|
67
|
+
#ifdef _WIN32
|
|
68
|
+
// Windows secure zero
|
|
69
|
+
SecureZeroMemory(data_ptr, get_data_len_bytes());
|
|
70
|
+
#elif defined(__linux__) && defined(__GLIBC__)
|
|
71
|
+
// Linux with glibc has explicit_bzero
|
|
72
|
+
explicit_bzero(data_ptr, get_data_len_bytes());
|
|
73
|
+
#else
|
|
74
|
+
// Fallback - volatile to prevent optimization
|
|
75
|
+
volatile char *p = data_ptr;
|
|
76
|
+
size_t len = get_data_len_bytes();
|
|
77
|
+
while (len--) *p++ = 0;
|
|
78
|
+
#endif
|
|
79
|
+
}
|
|
80
|
+
}
|
|
81
|
+
|
|
59
82
|
~CobhanBuffer() {
|
|
60
83
|
verify_canaries();
|
|
61
84
|
cleanup();
|
package/src/cobhan_buffer_napi.h
CHANGED
|
@@ -192,4 +192,27 @@ private:
|
|
|
192
192
|
}
|
|
193
193
|
};
|
|
194
194
|
|
|
195
|
+
// Specialized class for buffers containing sensitive data
|
|
196
|
+
class SensitiveCobhanBufferNapi : public CobhanBufferNapi {
|
|
197
|
+
public:
|
|
198
|
+
using CobhanBufferNapi::CobhanBufferNapi; // Inherit all constructors
|
|
199
|
+
|
|
200
|
+
// Move constructor - needed for async workers
|
|
201
|
+
SensitiveCobhanBufferNapi(SensitiveCobhanBufferNapi &&other) noexcept
|
|
202
|
+
: CobhanBufferNapi(std::move(other)) {}
|
|
203
|
+
|
|
204
|
+
// Also allow moving from base class (for async worker initialization)
|
|
205
|
+
SensitiveCobhanBufferNapi(CobhanBufferNapi &&other) noexcept
|
|
206
|
+
: CobhanBufferNapi(std::move(other)) {}
|
|
207
|
+
|
|
208
|
+
~SensitiveCobhanBufferNapi() {
|
|
209
|
+
// TODO: Fix for async operations - currently breaks async tests
|
|
210
|
+
// because data gets wiped before async operation completes
|
|
211
|
+
// Only wipe if we still own data (haven't been moved from)
|
|
212
|
+
// if (get_data_ptr() != nullptr) {
|
|
213
|
+
// secure_wipe_data();
|
|
214
|
+
// }
|
|
215
|
+
}
|
|
216
|
+
};
|
|
217
|
+
|
|
195
218
|
#endif // COBHAN_BUFFER_NAPI_H
|