withub-cli 0.1.0 → 0.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +74 -26
- package/dist/commands/account.js +262 -63
- package/dist/commands/chain.js +35 -0
- package/dist/commands/checkout.js +137 -16
- package/dist/commands/clone.js +64 -7
- package/dist/commands/commit.js +4 -16
- package/dist/commands/fetch.js +76 -4
- package/dist/commands/init.js +75 -13
- package/dist/commands/invite.js +68 -53
- package/dist/commands/ipfsCar.js +98 -0
- package/dist/commands/lighthouse.js +97 -0
- package/dist/commands/lighthouseDownload.js +58 -0
- package/dist/commands/lighthousePin.js +62 -0
- package/dist/commands/pull.js +2 -1
- package/dist/commands/push.js +224 -8
- package/dist/commands/registerCommands.js +108 -2
- package/dist/commands/remove-user.js +46 -0
- package/dist/commands/removeUser.js +30 -1
- package/dist/index.js +15 -0
- package/dist/lib/chain.js +72 -0
- package/dist/lib/crypto.js +62 -0
- package/dist/lib/evmClone.js +255 -0
- package/dist/lib/evmKeys.js +218 -0
- package/dist/lib/evmProvider.js +88 -0
- package/dist/lib/evmRepo.js +192 -0
- package/dist/lib/ipfsCar.js +132 -0
- package/dist/lib/keccak.js +125 -0
- package/dist/lib/keys.js +102 -37
- package/dist/lib/lighthouse.js +661 -0
- package/dist/lib/lit.js +165 -0
- package/dist/lib/manifest.js +22 -4
- package/dist/lib/repo.js +94 -0
- package/dist/lib/schema.js +26 -6
- package/dist/lib/walrus.js +11 -1
- package/package.json +17 -2
|
@@ -0,0 +1,661 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
exports.resolveLighthouseApiKey = resolveLighthouseApiKey;
|
|
7
|
+
exports.resolveLighthouseGatewayUrl = resolveLighthouseGatewayUrl;
|
|
8
|
+
exports.resolveLighthouseUploadUrl = resolveLighthouseUploadUrl;
|
|
9
|
+
exports.resolveLighthousePinUrl = resolveLighthousePinUrl;
|
|
10
|
+
exports.requireLighthouseApiKey = requireLighthouseApiKey;
|
|
11
|
+
exports.uploadFileToLighthouse = uploadFileToLighthouse;
|
|
12
|
+
exports.uploadTextToLighthouse = uploadTextToLighthouse;
|
|
13
|
+
exports.uploadBufferToLighthouse = uploadBufferToLighthouse;
|
|
14
|
+
exports.pinCidWithLighthouse = pinCidWithLighthouse;
|
|
15
|
+
exports.downloadFromLighthouseGateway = downloadFromLighthouseGateway;
|
|
16
|
+
const crypto_1 = __importDefault(require("crypto"));
|
|
17
|
+
const fs_1 = __importDefault(require("fs"));
|
|
18
|
+
const promises_1 = __importDefault(require("fs/promises"));
|
|
19
|
+
const os_1 = __importDefault(require("os"));
|
|
20
|
+
const path_1 = __importDefault(require("path"));
|
|
21
|
+
const stream_1 = require("stream");
|
|
22
|
+
const sdk_1 = __importDefault(require("@lighthouse-web3/sdk"));
|
|
23
|
+
const dotenv_1 = __importDefault(require("dotenv"));
|
|
24
|
+
let dotenvLoaded = false;
|
|
25
|
+
let globalConfigLoaded = false;
|
|
26
|
+
let globalConfigCache = {};
|
|
27
|
+
const DEFAULT_LIGHTHOUSE_PIN_URL = 'https://api.lighthouse.storage/api/lighthouse/pin';
|
|
28
|
+
const DEFAULT_CACHE_FILE = 'lighthouse-uploads.json';
|
|
29
|
+
function resolveLighthouseApiKey() {
|
|
30
|
+
loadDotEnvOnce();
|
|
31
|
+
const envKey = process.env.LIGHTHOUSE_API_KEY || process.env.WIT_LIGHTHOUSE_API_KEY;
|
|
32
|
+
if (envKey)
|
|
33
|
+
return envKey;
|
|
34
|
+
const cfg = loadGlobalConfigOnce();
|
|
35
|
+
const cfgKey = cfg.lighthouse_api_key || cfg.lighthouseApiKey;
|
|
36
|
+
if (typeof cfgKey === 'string' && cfgKey.trim().length > 0)
|
|
37
|
+
return cfgKey;
|
|
38
|
+
return null;
|
|
39
|
+
}
|
|
40
|
+
function resolveLighthouseGatewayUrl() {
|
|
41
|
+
loadDotEnvOnce();
|
|
42
|
+
const envUrl = process.env.WIT_LIGHTHOUSE_GATEWAY_URL ||
|
|
43
|
+
process.env.LIGHTHOUSE_GATEWAY_URL ||
|
|
44
|
+
process.env.WIT_IPFS_GATEWAY_URL ||
|
|
45
|
+
process.env.IPFS_GATEWAY_URL;
|
|
46
|
+
if (envUrl)
|
|
47
|
+
return normalizeGatewayUrl(envUrl);
|
|
48
|
+
const cfg = loadGlobalConfigOnce();
|
|
49
|
+
const cfgUrl = cfg.lighthouse_gateway_url ||
|
|
50
|
+
cfg.lighthouseGatewayUrl ||
|
|
51
|
+
cfg.ipfs_gateway_url ||
|
|
52
|
+
cfg.ipfsGatewayUrl ||
|
|
53
|
+
(Array.isArray(cfg.ipfs_gateway_urls) && cfg.ipfs_gateway_urls[0]) ||
|
|
54
|
+
(Array.isArray(cfg.ipfsGatewayUrls) && cfg.ipfsGatewayUrls[0]);
|
|
55
|
+
return normalizeGatewayUrl(cfgUrl || 'https://gateway.lighthouse.storage');
|
|
56
|
+
}
|
|
57
|
+
function resolveLighthouseUploadUrl() {
|
|
58
|
+
loadDotEnvOnce();
|
|
59
|
+
const envUrl = process.env.WIT_LIGHTHOUSE_UPLOAD_URL || process.env.LIGHTHOUSE_UPLOAD_URL;
|
|
60
|
+
if (envUrl)
|
|
61
|
+
return normalizeEndpointUrl(envUrl);
|
|
62
|
+
const cfg = loadGlobalConfigOnce();
|
|
63
|
+
const cfgUrl = cfg.lighthouse_upload_url || cfg.lighthouseUploadUrl;
|
|
64
|
+
return normalizeEndpointUrl(cfgUrl);
|
|
65
|
+
}
|
|
66
|
+
function resolveLighthousePinUrl() {
|
|
67
|
+
loadDotEnvOnce();
|
|
68
|
+
const envUrl = process.env.WIT_LIGHTHOUSE_PIN_URL || process.env.LIGHTHOUSE_PIN_URL;
|
|
69
|
+
if (envUrl)
|
|
70
|
+
return normalizeEndpointUrl(envUrl);
|
|
71
|
+
const cfg = loadGlobalConfigOnce();
|
|
72
|
+
const cfgUrl = cfg.lighthouse_pin_url || cfg.lighthousePinUrl;
|
|
73
|
+
return normalizeEndpointUrl(cfgUrl);
|
|
74
|
+
}
|
|
75
|
+
function requireLighthouseApiKey() {
|
|
76
|
+
const key = resolveLighthouseApiKey();
|
|
77
|
+
if (!key) {
|
|
78
|
+
throw new Error('Missing LIGHTHOUSE_API_KEY. Set it in .env, ~/.witconfig, or export it before running this command.');
|
|
79
|
+
}
|
|
80
|
+
return key;
|
|
81
|
+
}
|
|
82
|
+
async function uploadFileToLighthouse(filePath, opts = {}) {
|
|
83
|
+
const apiKey = opts.apiKey || requireLighthouseApiKey();
|
|
84
|
+
const cidVersion = normalizeCidVersion(opts.cidVersion);
|
|
85
|
+
let cacheKey = null;
|
|
86
|
+
let cache = null;
|
|
87
|
+
let fileSize = null;
|
|
88
|
+
const useCache = opts.useCache !== false;
|
|
89
|
+
if (useCache) {
|
|
90
|
+
const stat = await promises_1.default.stat(filePath);
|
|
91
|
+
if (stat.isFile()) {
|
|
92
|
+
fileSize = stat.size;
|
|
93
|
+
const sha256 = await computeFileSha256(filePath);
|
|
94
|
+
cacheKey = buildUploadCacheKey(sha256, cidVersion);
|
|
95
|
+
cache = await readUploadCache();
|
|
96
|
+
const hit = cache.entries[cacheKey];
|
|
97
|
+
if (hit?.cid) {
|
|
98
|
+
return {
|
|
99
|
+
cid: hit.cid,
|
|
100
|
+
name: path_1.default.basename(filePath),
|
|
101
|
+
size: String(hit.size),
|
|
102
|
+
raw: { cache: true, key: cacheKey },
|
|
103
|
+
fromCache: true,
|
|
104
|
+
};
|
|
105
|
+
}
|
|
106
|
+
}
|
|
107
|
+
}
|
|
108
|
+
const response = await uploadWithRetry(() => sdk_1.default.upload(filePath, apiKey, cidVersion, mapProgress(opts.onProgress)), opts);
|
|
109
|
+
const result = mapUploadResponse(response);
|
|
110
|
+
if (useCache && cacheKey) {
|
|
111
|
+
await persistUploadCache(cacheKey, {
|
|
112
|
+
cid: result.cid,
|
|
113
|
+
size: fileSize ?? parseSize(result.size),
|
|
114
|
+
cidVersion,
|
|
115
|
+
source: 'file',
|
|
116
|
+
createdAt: new Date().toISOString(),
|
|
117
|
+
}, cache);
|
|
118
|
+
}
|
|
119
|
+
return result;
|
|
120
|
+
}
|
|
121
|
+
async function uploadTextToLighthouse(text, name, opts = {}) {
|
|
122
|
+
const apiKey = opts.apiKey || requireLighthouseApiKey();
|
|
123
|
+
const cidVersion = normalizeCidVersion(opts.cidVersion);
|
|
124
|
+
const useCache = opts.useCache !== false;
|
|
125
|
+
let cacheKey = null;
|
|
126
|
+
let cache = null;
|
|
127
|
+
const size = Buffer.byteLength(text, 'utf8');
|
|
128
|
+
if (useCache) {
|
|
129
|
+
const sha256 = computeBufferSha256(Buffer.from(text));
|
|
130
|
+
cacheKey = buildUploadCacheKey(sha256, cidVersion);
|
|
131
|
+
cache = await readUploadCache();
|
|
132
|
+
const hit = cache.entries[cacheKey];
|
|
133
|
+
if (hit?.cid) {
|
|
134
|
+
return {
|
|
135
|
+
cid: hit.cid,
|
|
136
|
+
name,
|
|
137
|
+
size: String(hit.size),
|
|
138
|
+
raw: { cache: true, key: cacheKey },
|
|
139
|
+
fromCache: true,
|
|
140
|
+
};
|
|
141
|
+
}
|
|
142
|
+
}
|
|
143
|
+
const response = await uploadWithRetry(() => sdk_1.default.uploadText(text, apiKey, name, cidVersion), opts);
|
|
144
|
+
const result = mapUploadResponse(response);
|
|
145
|
+
if (useCache && cacheKey) {
|
|
146
|
+
await persistUploadCache(cacheKey, {
|
|
147
|
+
cid: result.cid,
|
|
148
|
+
size,
|
|
149
|
+
cidVersion,
|
|
150
|
+
source: 'text',
|
|
151
|
+
createdAt: new Date().toISOString(),
|
|
152
|
+
}, cache);
|
|
153
|
+
}
|
|
154
|
+
return result;
|
|
155
|
+
}
|
|
156
|
+
async function uploadBufferToLighthouse(buffer, opts = {}) {
|
|
157
|
+
const apiKey = opts.apiKey || requireLighthouseApiKey();
|
|
158
|
+
const cidVersion = normalizeCidVersion(opts.cidVersion);
|
|
159
|
+
const useCache = opts.useCache !== false;
|
|
160
|
+
let cacheKey = null;
|
|
161
|
+
let cache = null;
|
|
162
|
+
if (useCache) {
|
|
163
|
+
const sha256 = computeBufferSha256(buffer);
|
|
164
|
+
cacheKey = buildUploadCacheKey(sha256, cidVersion);
|
|
165
|
+
cache = await readUploadCache();
|
|
166
|
+
const hit = cache.entries[cacheKey];
|
|
167
|
+
if (hit?.cid) {
|
|
168
|
+
return {
|
|
169
|
+
cid: hit.cid,
|
|
170
|
+
size: String(hit.size),
|
|
171
|
+
raw: { cache: true, key: cacheKey },
|
|
172
|
+
fromCache: true,
|
|
173
|
+
};
|
|
174
|
+
}
|
|
175
|
+
}
|
|
176
|
+
const response = await uploadWithRetry(() => sdk_1.default.uploadBuffer(buffer, apiKey, cidVersion), opts);
|
|
177
|
+
const result = mapUploadResponse(response);
|
|
178
|
+
if (useCache && cacheKey) {
|
|
179
|
+
await persistUploadCache(cacheKey, {
|
|
180
|
+
cid: result.cid,
|
|
181
|
+
size: buffer.length,
|
|
182
|
+
cidVersion,
|
|
183
|
+
source: 'buffer',
|
|
184
|
+
createdAt: new Date().toISOString(),
|
|
185
|
+
}, cache);
|
|
186
|
+
}
|
|
187
|
+
return result;
|
|
188
|
+
}
|
|
189
|
+
async function pinCidWithLighthouse(cid, opts = {}) {
|
|
190
|
+
if (!cid) {
|
|
191
|
+
throw new Error('CID is required to pin.');
|
|
192
|
+
}
|
|
193
|
+
const apiKey = opts.apiKey || requireLighthouseApiKey();
|
|
194
|
+
const pinUrl = normalizeEndpointUrl(opts.pinUrl || resolveLighthousePinUrl()) || DEFAULT_LIGHTHOUSE_PIN_URL;
|
|
195
|
+
const response = await postJsonWithRetry(pinUrl, { cid }, {
|
|
196
|
+
apiKey,
|
|
197
|
+
timeoutMs: opts.timeoutMs,
|
|
198
|
+
retries: opts.retries,
|
|
199
|
+
retryDelayMs: opts.retryDelayMs,
|
|
200
|
+
onRetry: opts.onRetry,
|
|
201
|
+
});
|
|
202
|
+
return { cid, pinUrl, raw: response };
|
|
203
|
+
}
|
|
204
|
+
async function downloadFromLighthouseGateway(cid, opts = {}) {
|
|
205
|
+
const format = opts.format === 'car' ? 'car' : 'raw';
|
|
206
|
+
const gateway = normalizeGatewayUrl(opts.gatewayUrl || resolveLighthouseGatewayUrl());
|
|
207
|
+
const url = format === 'car' ? `${gateway}/ipfs/${cid}?format=car` : `${gateway}/ipfs/${cid}`;
|
|
208
|
+
const bytes = await fetchWithRetry(url, {
|
|
209
|
+
retries: opts.retries,
|
|
210
|
+
retryDelayMs: opts.retryDelayMs,
|
|
211
|
+
timeoutMs: opts.timeoutMs,
|
|
212
|
+
});
|
|
213
|
+
if (opts.verify !== false) {
|
|
214
|
+
await verifyCidBytes(cid, bytes, format);
|
|
215
|
+
}
|
|
216
|
+
return { bytes, gateway, url };
|
|
217
|
+
}
|
|
218
|
+
function normalizeCidVersion(cidVersion) {
|
|
219
|
+
if (cidVersion === 0)
|
|
220
|
+
return 0;
|
|
221
|
+
if (cidVersion === 1)
|
|
222
|
+
return 1;
|
|
223
|
+
return 1;
|
|
224
|
+
}
|
|
225
|
+
function normalizeGatewayUrl(url) {
|
|
226
|
+
if (!url)
|
|
227
|
+
return 'https://gateway.lighthouse.storage';
|
|
228
|
+
return url.replace(/\/+$/, '');
|
|
229
|
+
}
|
|
230
|
+
function normalizeEndpointUrl(url) {
|
|
231
|
+
if (!url)
|
|
232
|
+
return null;
|
|
233
|
+
return url.trim().replace(/\/+$/, '');
|
|
234
|
+
}
|
|
235
|
+
function mapProgress(onProgress) {
|
|
236
|
+
if (!onProgress)
|
|
237
|
+
return undefined;
|
|
238
|
+
return (data) => {
|
|
239
|
+
if (typeof data?.progress !== 'number')
|
|
240
|
+
return;
|
|
241
|
+
onProgress(data.progress);
|
|
242
|
+
};
|
|
243
|
+
}
|
|
244
|
+
function mapUploadResponse(response) {
|
|
245
|
+
const data = response?.data ?? response;
|
|
246
|
+
const cid = extractCid(data);
|
|
247
|
+
if (!cid) {
|
|
248
|
+
throw new Error('Lighthouse upload response did not include a CID.');
|
|
249
|
+
}
|
|
250
|
+
return {
|
|
251
|
+
cid,
|
|
252
|
+
name: data?.Name ?? data?.name,
|
|
253
|
+
size: data?.Size ?? data?.size,
|
|
254
|
+
raw: data,
|
|
255
|
+
fromCache: false,
|
|
256
|
+
};
|
|
257
|
+
}
|
|
258
|
+
function extractCid(data) {
|
|
259
|
+
if (!data)
|
|
260
|
+
return null;
|
|
261
|
+
if (Array.isArray(data) && data.length > 0)
|
|
262
|
+
return extractCid(data[0]);
|
|
263
|
+
if (typeof data.Hash === 'string')
|
|
264
|
+
return data.Hash;
|
|
265
|
+
if (typeof data.hash === 'string')
|
|
266
|
+
return data.hash;
|
|
267
|
+
if (typeof data.cid === 'string')
|
|
268
|
+
return data.cid;
|
|
269
|
+
if (typeof data.Cid === 'string')
|
|
270
|
+
return data.Cid;
|
|
271
|
+
if (typeof data.CID === 'string')
|
|
272
|
+
return data.CID;
|
|
273
|
+
if (typeof data.IpfsHash === 'string')
|
|
274
|
+
return data.IpfsHash;
|
|
275
|
+
return null;
|
|
276
|
+
}
|
|
277
|
+
function buildUploadCacheKey(sha256, cidVersion) {
|
|
278
|
+
return `${sha256}:v${cidVersion}`;
|
|
279
|
+
}
|
|
280
|
+
async function readUploadCache() {
|
|
281
|
+
const cachePath = resolveUploadCachePath();
|
|
282
|
+
try {
|
|
283
|
+
const raw = await promises_1.default.readFile(cachePath, 'utf8');
|
|
284
|
+
const parsed = JSON.parse(raw);
|
|
285
|
+
if (parsed && parsed.version === 1 && parsed.entries && typeof parsed.entries === 'object') {
|
|
286
|
+
return parsed;
|
|
287
|
+
}
|
|
288
|
+
}
|
|
289
|
+
catch (err) {
|
|
290
|
+
if (err?.code === 'ENOENT') {
|
|
291
|
+
return { version: 1, entries: {} };
|
|
292
|
+
}
|
|
293
|
+
// eslint-disable-next-line no-console
|
|
294
|
+
console.warn(`Warning: could not read Lighthouse cache ${cachePath}: ${err.message}`);
|
|
295
|
+
}
|
|
296
|
+
return { version: 1, entries: {} };
|
|
297
|
+
}
|
|
298
|
+
async function persistUploadCache(cacheKey, entry, cache) {
|
|
299
|
+
try {
|
|
300
|
+
const resolved = cache ?? (await readUploadCache());
|
|
301
|
+
resolved.entries[cacheKey] = entry;
|
|
302
|
+
await writeUploadCache(resolved);
|
|
303
|
+
}
|
|
304
|
+
catch (err) {
|
|
305
|
+
// eslint-disable-next-line no-console
|
|
306
|
+
console.warn(`Warning: could not update Lighthouse cache: ${err?.message || err}`);
|
|
307
|
+
}
|
|
308
|
+
}
|
|
309
|
+
async function writeUploadCache(cache) {
|
|
310
|
+
const cachePath = resolveUploadCachePath();
|
|
311
|
+
await promises_1.default.mkdir(path_1.default.dirname(cachePath), { recursive: true });
|
|
312
|
+
await promises_1.default.writeFile(cachePath, JSON.stringify(cache, null, 2) + '\n', 'utf8');
|
|
313
|
+
}
|
|
314
|
+
function resolveUploadCachePath() {
|
|
315
|
+
const witDir = findWitDir(process.cwd());
|
|
316
|
+
if (witDir) {
|
|
317
|
+
return path_1.default.join(witDir, 'cache', DEFAULT_CACHE_FILE);
|
|
318
|
+
}
|
|
319
|
+
return path_1.default.join(os_1.default.homedir(), '.wit', 'cache', DEFAULT_CACHE_FILE);
|
|
320
|
+
}
|
|
321
|
+
function findWitDir(startDir) {
|
|
322
|
+
let current = path_1.default.resolve(startDir);
|
|
323
|
+
while (true) {
|
|
324
|
+
const candidate = path_1.default.join(current, '.wit');
|
|
325
|
+
if (fs_1.default.existsSync(candidate)) {
|
|
326
|
+
try {
|
|
327
|
+
if (fs_1.default.statSync(candidate).isDirectory())
|
|
328
|
+
return candidate;
|
|
329
|
+
}
|
|
330
|
+
catch {
|
|
331
|
+
// ignore stat errors
|
|
332
|
+
}
|
|
333
|
+
}
|
|
334
|
+
const parent = path_1.default.dirname(current);
|
|
335
|
+
if (parent === current)
|
|
336
|
+
return null;
|
|
337
|
+
current = parent;
|
|
338
|
+
}
|
|
339
|
+
}
|
|
340
|
+
async function computeFileSha256(filePath) {
|
|
341
|
+
return new Promise((resolve, reject) => {
|
|
342
|
+
const hash = crypto_1.default.createHash('sha256');
|
|
343
|
+
const stream = fs_1.default.createReadStream(filePath);
|
|
344
|
+
stream.on('data', (chunk) => hash.update(chunk));
|
|
345
|
+
stream.on('error', reject);
|
|
346
|
+
stream.on('end', () => resolve(hash.digest('hex')));
|
|
347
|
+
});
|
|
348
|
+
}
|
|
349
|
+
function computeBufferSha256(buffer) {
|
|
350
|
+
return crypto_1.default.createHash('sha256').update(buffer).digest('hex');
|
|
351
|
+
}
|
|
352
|
+
function parseSize(value) {
|
|
353
|
+
if (!value)
|
|
354
|
+
return 0;
|
|
355
|
+
const parsed = Number.parseInt(value, 10);
|
|
356
|
+
return Number.isNaN(parsed) ? 0 : parsed;
|
|
357
|
+
}
|
|
358
|
+
function loadDotEnvOnce() {
|
|
359
|
+
if (dotenvLoaded)
|
|
360
|
+
return;
|
|
361
|
+
dotenvLoaded = true;
|
|
362
|
+
tryLoadDotEnv({ quiet: true });
|
|
363
|
+
if (hasApiKey())
|
|
364
|
+
return;
|
|
365
|
+
const fallbackPath = resolveCliEnvPath();
|
|
366
|
+
if (fallbackPath) {
|
|
367
|
+
tryLoadDotEnv({ quiet: true, override: true, path: fallbackPath });
|
|
368
|
+
return;
|
|
369
|
+
}
|
|
370
|
+
tryLoadDotEnv({ quiet: true, override: true });
|
|
371
|
+
}
|
|
372
|
+
async function uploadWithRetry(fn, opts) {
|
|
373
|
+
const retries = normalizeRetryCount(opts.retries, 3);
|
|
374
|
+
const retryDelayMs = normalizeRetryDelay(opts.retryDelayMs, 1000);
|
|
375
|
+
let lastError = null;
|
|
376
|
+
for (let attempt = 0; attempt <= retries; attempt += 1) {
|
|
377
|
+
try {
|
|
378
|
+
return await fn();
|
|
379
|
+
}
|
|
380
|
+
catch (err) {
|
|
381
|
+
lastError = err instanceof Error ? err : new Error(String(err));
|
|
382
|
+
if (attempt >= retries || !shouldRetryUploadError(lastError)) {
|
|
383
|
+
break;
|
|
384
|
+
}
|
|
385
|
+
const delay = computeBackoff(retryDelayMs, attempt);
|
|
386
|
+
if (opts.onRetry) {
|
|
387
|
+
opts.onRetry(attempt + 1, lastError, delay);
|
|
388
|
+
}
|
|
389
|
+
await sleep(delay);
|
|
390
|
+
}
|
|
391
|
+
}
|
|
392
|
+
throw lastError ?? new Error('Lighthouse upload failed.');
|
|
393
|
+
}
|
|
394
|
+
async function postJsonWithRetry(url, payload, opts) {
|
|
395
|
+
const retries = normalizeRetryCount(opts.retries, 3);
|
|
396
|
+
const retryDelayMs = normalizeRetryDelay(opts.retryDelayMs, 1000);
|
|
397
|
+
let lastError = null;
|
|
398
|
+
for (let attempt = 0; attempt <= retries; attempt += 1) {
|
|
399
|
+
try {
|
|
400
|
+
return await postJsonOnce(url, payload, opts.apiKey, opts.timeoutMs);
|
|
401
|
+
}
|
|
402
|
+
catch (err) {
|
|
403
|
+
lastError = err instanceof Error ? err : new Error(String(err));
|
|
404
|
+
if (attempt >= retries || !shouldRetryUploadError(lastError)) {
|
|
405
|
+
break;
|
|
406
|
+
}
|
|
407
|
+
const delay = computeBackoff(retryDelayMs, attempt);
|
|
408
|
+
if (opts.onRetry) {
|
|
409
|
+
opts.onRetry(attempt + 1, lastError, delay);
|
|
410
|
+
}
|
|
411
|
+
await sleep(delay);
|
|
412
|
+
}
|
|
413
|
+
}
|
|
414
|
+
throw lastError ?? new Error('Lighthouse request failed.');
|
|
415
|
+
}
|
|
416
|
+
async function postJsonOnce(url, payload, apiKey, timeoutMs) {
|
|
417
|
+
const controller = new AbortController();
|
|
418
|
+
const timeout = timeoutMs ?? 30000;
|
|
419
|
+
const timer = setTimeout(() => controller.abort(), timeout);
|
|
420
|
+
try {
|
|
421
|
+
const res = await fetch(url, {
|
|
422
|
+
method: 'POST',
|
|
423
|
+
headers: {
|
|
424
|
+
Authorization: `Bearer ${apiKey}`,
|
|
425
|
+
'Content-Type': 'application/json',
|
|
426
|
+
},
|
|
427
|
+
body: JSON.stringify(payload),
|
|
428
|
+
signal: controller.signal,
|
|
429
|
+
});
|
|
430
|
+
if (!res.ok) {
|
|
431
|
+
const text = await safeReadText(res);
|
|
432
|
+
const message = text
|
|
433
|
+
? `Pin request failed: ${res.status} ${res.statusText || ''} - ${text}`.trim()
|
|
434
|
+
: `Pin request failed: ${res.status} ${res.statusText || ''}`.trim();
|
|
435
|
+
const err = new Error(message);
|
|
436
|
+
err.status = res.status;
|
|
437
|
+
throw err;
|
|
438
|
+
}
|
|
439
|
+
return await readJsonOrText(res);
|
|
440
|
+
}
|
|
441
|
+
finally {
|
|
442
|
+
clearTimeout(timer);
|
|
443
|
+
}
|
|
444
|
+
}
|
|
445
|
+
async function fetchWithRetry(url, opts) {
|
|
446
|
+
const retries = opts.retries ?? 3;
|
|
447
|
+
const retryDelayMs = opts.retryDelayMs ?? 500;
|
|
448
|
+
const timeoutMs = opts.timeoutMs ?? 30000;
|
|
449
|
+
let lastError = null;
|
|
450
|
+
for (let attempt = 0; attempt <= retries; attempt += 1) {
|
|
451
|
+
try {
|
|
452
|
+
return await fetchOnce(url, timeoutMs);
|
|
453
|
+
}
|
|
454
|
+
catch (err) {
|
|
455
|
+
lastError = err instanceof Error ? err : new Error(String(err));
|
|
456
|
+
if (attempt >= retries || !shouldRetryError(lastError))
|
|
457
|
+
break;
|
|
458
|
+
const delay = computeBackoff(retryDelayMs, attempt);
|
|
459
|
+
await sleep(delay);
|
|
460
|
+
}
|
|
461
|
+
}
|
|
462
|
+
throw lastError ?? new Error('Gateway download failed.');
|
|
463
|
+
}
|
|
464
|
+
async function fetchOnce(url, timeoutMs) {
|
|
465
|
+
const controller = new AbortController();
|
|
466
|
+
const timer = setTimeout(() => controller.abort(), timeoutMs);
|
|
467
|
+
try {
|
|
468
|
+
const res = await fetch(url, { signal: controller.signal });
|
|
469
|
+
if (!res.ok) {
|
|
470
|
+
const message = `Gateway returned ${res.status} ${res.statusText || ''}`.trim();
|
|
471
|
+
const err = new Error(message);
|
|
472
|
+
err.status = res.status;
|
|
473
|
+
throw err;
|
|
474
|
+
}
|
|
475
|
+
const buf = await res.arrayBuffer();
|
|
476
|
+
return new Uint8Array(buf);
|
|
477
|
+
}
|
|
478
|
+
finally {
|
|
479
|
+
clearTimeout(timer);
|
|
480
|
+
}
|
|
481
|
+
}
|
|
482
|
+
function shouldRetryError(err) {
|
|
483
|
+
const status = err?.status;
|
|
484
|
+
if (!status)
|
|
485
|
+
return true;
|
|
486
|
+
if (status >= 500)
|
|
487
|
+
return true;
|
|
488
|
+
if (status === 429 || status === 408)
|
|
489
|
+
return true;
|
|
490
|
+
return false;
|
|
491
|
+
}
|
|
492
|
+
function shouldRetryUploadError(err) {
|
|
493
|
+
const status = extractStatusCode(err);
|
|
494
|
+
if (status !== null) {
|
|
495
|
+
if (status >= 500)
|
|
496
|
+
return true;
|
|
497
|
+
if (status === 429 || status === 408)
|
|
498
|
+
return true;
|
|
499
|
+
return false;
|
|
500
|
+
}
|
|
501
|
+
const message = err.message?.toLowerCase() || '';
|
|
502
|
+
if (!message)
|
|
503
|
+
return true;
|
|
504
|
+
if (message.includes('timed out') || message.includes('timeout'))
|
|
505
|
+
return true;
|
|
506
|
+
if (message.includes('network error'))
|
|
507
|
+
return true;
|
|
508
|
+
if (message.includes('socket hang up'))
|
|
509
|
+
return true;
|
|
510
|
+
if (message.includes('econnreset'))
|
|
511
|
+
return true;
|
|
512
|
+
if (message.includes('econnrefused'))
|
|
513
|
+
return true;
|
|
514
|
+
if (message.includes('enotfound'))
|
|
515
|
+
return true;
|
|
516
|
+
if (message.includes('eai_again'))
|
|
517
|
+
return true;
|
|
518
|
+
if (message.includes('epipe'))
|
|
519
|
+
return true;
|
|
520
|
+
return false;
|
|
521
|
+
}
|
|
522
|
+
function extractStatusCode(err) {
|
|
523
|
+
const direct = err?.status;
|
|
524
|
+
if (typeof direct === 'number' && Number.isFinite(direct))
|
|
525
|
+
return direct;
|
|
526
|
+
const message = err.message || '';
|
|
527
|
+
const match = message.match(/status(?: code)?\s*(\d{3})/i);
|
|
528
|
+
if (!match)
|
|
529
|
+
return null;
|
|
530
|
+
const parsed = Number.parseInt(match[1], 10);
|
|
531
|
+
return Number.isNaN(parsed) ? null : parsed;
|
|
532
|
+
}
|
|
533
|
+
function computeBackoff(baseMs, attempt) {
|
|
534
|
+
const jitter = 0.7 + Math.random() * 0.6;
|
|
535
|
+
return Math.round(baseMs * Math.pow(2, attempt) * jitter);
|
|
536
|
+
}
|
|
537
|
+
function sleep(ms) {
|
|
538
|
+
return new Promise((resolve) => setTimeout(resolve, ms));
|
|
539
|
+
}
|
|
540
|
+
function normalizeRetryCount(value, fallback) {
|
|
541
|
+
if (typeof value !== 'number' || Number.isNaN(value))
|
|
542
|
+
return fallback;
|
|
543
|
+
return Math.max(0, Math.floor(value));
|
|
544
|
+
}
|
|
545
|
+
function normalizeRetryDelay(value, fallback) {
|
|
546
|
+
if (typeof value !== 'number' || Number.isNaN(value))
|
|
547
|
+
return fallback;
|
|
548
|
+
return Math.max(0, Math.floor(value));
|
|
549
|
+
}
|
|
550
|
+
async function readJsonOrText(res) {
|
|
551
|
+
const text = await res.text();
|
|
552
|
+
if (!text)
|
|
553
|
+
return null;
|
|
554
|
+
try {
|
|
555
|
+
return JSON.parse(text);
|
|
556
|
+
}
|
|
557
|
+
catch {
|
|
558
|
+
return text;
|
|
559
|
+
}
|
|
560
|
+
}
|
|
561
|
+
async function safeReadText(res) {
|
|
562
|
+
try {
|
|
563
|
+
return await res.text();
|
|
564
|
+
}
|
|
565
|
+
catch {
|
|
566
|
+
return '';
|
|
567
|
+
}
|
|
568
|
+
}
|
|
569
|
+
async function verifyCidBytes(cid, bytes, format) {
|
|
570
|
+
if (format === 'car') {
|
|
571
|
+
await verifyCarBytes(cid, bytes);
|
|
572
|
+
return;
|
|
573
|
+
}
|
|
574
|
+
await verifyRawBytes(cid, bytes);
|
|
575
|
+
}
|
|
576
|
+
async function verifyCarBytes(cid, bytes) {
|
|
577
|
+
const [{ CarReader }, { CID }, { validateBlock }] = await Promise.all([
|
|
578
|
+
import('@ipld/car/reader'),
|
|
579
|
+
import('multiformats/cid'),
|
|
580
|
+
// @ts-ignore
|
|
581
|
+
import('@web3-storage/car-block-validator'),
|
|
582
|
+
]);
|
|
583
|
+
const reader = await CarReader.fromBytes(bytes);
|
|
584
|
+
const roots = await reader.getRoots();
|
|
585
|
+
const target = CID.parse(cid);
|
|
586
|
+
const match = roots.some((root) => (root.equals ? root.equals(target) : String(root) === cid));
|
|
587
|
+
if (!match) {
|
|
588
|
+
throw new Error(`CID ${cid} not found in CAR roots.`);
|
|
589
|
+
}
|
|
590
|
+
for await (const block of reader.blocks()) {
|
|
591
|
+
await validateBlock(block);
|
|
592
|
+
}
|
|
593
|
+
}
|
|
594
|
+
async function verifyRawBytes(cid, bytes) {
|
|
595
|
+
const [{ createFileEncoderStream }, { CID }] = await Promise.all([import('ipfs-car'), import('multiformats/cid')]);
|
|
596
|
+
const fileLike = {
|
|
597
|
+
stream: () => stream_1.Readable.toWeb(stream_1.Readable.from([Buffer.from(bytes)])),
|
|
598
|
+
};
|
|
599
|
+
const stream = createFileEncoderStream(fileLike);
|
|
600
|
+
const reader = stream.getReader();
|
|
601
|
+
let rootCid;
|
|
602
|
+
while (true) {
|
|
603
|
+
const { value, done } = await reader.read();
|
|
604
|
+
if (done)
|
|
605
|
+
break;
|
|
606
|
+
if (value?.cid) {
|
|
607
|
+
rootCid = value.cid;
|
|
608
|
+
}
|
|
609
|
+
}
|
|
610
|
+
if (!rootCid) {
|
|
611
|
+
throw new Error('Failed to compute CID for downloaded content.');
|
|
612
|
+
}
|
|
613
|
+
const computed = CID.asCID(rootCid) ?? CID.decode(rootCid.bytes);
|
|
614
|
+
const target = CID.parse(cid);
|
|
615
|
+
if (!computed.equals(target)) {
|
|
616
|
+
throw new Error(`CID mismatch: expected ${cid}, got ${computed.toString()}`);
|
|
617
|
+
}
|
|
618
|
+
}
|
|
619
|
+
function hasApiKey() {
|
|
620
|
+
const key = process.env.LIGHTHOUSE_API_KEY || process.env.WIT_LIGHTHOUSE_API_KEY;
|
|
621
|
+
return typeof key === 'string' && key.trim().length > 0;
|
|
622
|
+
}
|
|
623
|
+
function loadGlobalConfigOnce() {
|
|
624
|
+
if (globalConfigLoaded)
|
|
625
|
+
return globalConfigCache;
|
|
626
|
+
globalConfigLoaded = true;
|
|
627
|
+
const configPath = path_1.default.join(os_1.default.homedir(), '.witconfig');
|
|
628
|
+
if (!fs_1.default.existsSync(configPath))
|
|
629
|
+
return globalConfigCache;
|
|
630
|
+
try {
|
|
631
|
+
const raw = fs_1.default.readFileSync(configPath, 'utf8');
|
|
632
|
+
const parsed = JSON.parse(raw);
|
|
633
|
+
if (parsed && typeof parsed === 'object') {
|
|
634
|
+
globalConfigCache = parsed;
|
|
635
|
+
}
|
|
636
|
+
}
|
|
637
|
+
catch (err) {
|
|
638
|
+
// eslint-disable-next-line no-console
|
|
639
|
+
console.warn(`Warning: could not read ${configPath}: ${err.message}`);
|
|
640
|
+
globalConfigCache = {};
|
|
641
|
+
}
|
|
642
|
+
return globalConfigCache;
|
|
643
|
+
}
|
|
644
|
+
function tryLoadDotEnv(options) {
|
|
645
|
+
try {
|
|
646
|
+
dotenv_1.default.config(options);
|
|
647
|
+
}
|
|
648
|
+
catch {
|
|
649
|
+
// ignore missing/invalid .env
|
|
650
|
+
}
|
|
651
|
+
}
|
|
652
|
+
function resolveCliEnvPath() {
|
|
653
|
+
const cliRoot = path_1.default.resolve(__dirname, '..', '..');
|
|
654
|
+
const envPath = path_1.default.join(cliRoot, '.env');
|
|
655
|
+
if (fs_1.default.existsSync(envPath))
|
|
656
|
+
return envPath;
|
|
657
|
+
const envLocalPath = path_1.default.join(cliRoot, '.env.local');
|
|
658
|
+
if (fs_1.default.existsSync(envLocalPath))
|
|
659
|
+
return envLocalPath;
|
|
660
|
+
return null;
|
|
661
|
+
}
|