@fileverse/api 0.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +142 -0
- package/dist/cli/index.js +1258 -0
- package/dist/cli/index.js.map +1 -0
- package/dist/commands/index.js +1548 -0
- package/dist/commands/index.js.map +1 -0
- package/dist/index.js +17802 -0
- package/dist/index.js.map +1 -0
- package/dist/mcp/index.js +14052 -0
- package/dist/mcp/index.js.map +1 -0
- package/dist/worker.js +2846 -0
- package/dist/worker.js.map +1 -0
- package/package.json +93 -0
- package/public/llm.txt +621 -0
- package/public/openapi.json +922 -0
|
@@ -0,0 +1,1548 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
|
|
3
|
+
// src/commands/index.ts
|
|
4
|
+
import { Command as Command9 } from "commander";
|
|
5
|
+
|
|
6
|
+
// src/config/index.ts
|
|
7
|
+
import dotenv from "dotenv";
|
|
8
|
+
import path from "path";
|
|
9
|
+
import fs from "fs";
|
|
10
|
+
import os from "os";
|
|
11
|
+
|
|
12
|
+
// src/cli/constants.generated.ts
|
|
13
|
+
var STATIC_CONFIG = {
|
|
14
|
+
API_URL: "https://prod-apps-storage-5cdacc06ff79.herokuapp.com/",
|
|
15
|
+
SERVER_DID: "did:key:z6Mkroj9bxTin6Z5S9qwx2G2b87NPrCX7S85FhCpmBGPcDCz",
|
|
16
|
+
PROXY_SERVER_DID: "did:key:z6MkrZSmq8D6vQG87YbjUQatXeptaCCXWdTx8fYaWxWbRUHB",
|
|
17
|
+
NETWORK_NAME: "gnosis",
|
|
18
|
+
DEFAULT_PORT: "8001",
|
|
19
|
+
DEFAULT_RPC_URL: "https://rpc.gnosischain.com",
|
|
20
|
+
PIMLICO_PROXY_URL: "https://pimlico-proxy-0a326da116f8.herokuapp.com/",
|
|
21
|
+
SERVICE_NAME: "fileverse-api",
|
|
22
|
+
LOG_LEVEL: "info",
|
|
23
|
+
FRONTEND_URL: "https://docs.fileverse.io"
|
|
24
|
+
};
|
|
25
|
+
|
|
26
|
+
// src/config/index.ts
|
|
27
|
+
var projectEnvPath = path.join(process.cwd(), "config", ".env");
|
|
28
|
+
var userEnvPath = path.join(os.homedir(), ".fileverse", ".env");
|
|
29
|
+
function getEnvPath() {
|
|
30
|
+
if (fs.existsSync(projectEnvPath)) {
|
|
31
|
+
return projectEnvPath;
|
|
32
|
+
}
|
|
33
|
+
return userEnvPath;
|
|
34
|
+
}
|
|
35
|
+
function loadConfig(override = true) {
|
|
36
|
+
const envPath = getEnvPath();
|
|
37
|
+
dotenv.config({ path: envPath, override });
|
|
38
|
+
}
|
|
39
|
+
loadConfig(false);
|
|
40
|
+
function getRuntimeConfig() {
|
|
41
|
+
return {
|
|
42
|
+
get API_KEY() {
|
|
43
|
+
return process.env.API_KEY;
|
|
44
|
+
},
|
|
45
|
+
get RPC_URL() {
|
|
46
|
+
return process.env.RPC_URL || STATIC_CONFIG.DEFAULT_RPC_URL;
|
|
47
|
+
},
|
|
48
|
+
get DB_PATH() {
|
|
49
|
+
return process.env.DB_PATH;
|
|
50
|
+
},
|
|
51
|
+
get PORT() {
|
|
52
|
+
return process.env.PORT || STATIC_CONFIG.DEFAULT_PORT;
|
|
53
|
+
},
|
|
54
|
+
get NODE_ENV() {
|
|
55
|
+
return process.env.NODE_ENV || "production";
|
|
56
|
+
},
|
|
57
|
+
get FRONTEND_URL() {
|
|
58
|
+
return process.env.FRONTEND_URL || STATIC_CONFIG.FRONTEND_URL;
|
|
59
|
+
}
|
|
60
|
+
};
|
|
61
|
+
}
|
|
62
|
+
var config = {
|
|
63
|
+
...STATIC_CONFIG,
|
|
64
|
+
get SERVICE_NAME() {
|
|
65
|
+
return STATIC_CONFIG.SERVICE_NAME;
|
|
66
|
+
},
|
|
67
|
+
get LOG_LEVEL() {
|
|
68
|
+
return STATIC_CONFIG.LOG_LEVEL;
|
|
69
|
+
},
|
|
70
|
+
get NETWORK_NAME() {
|
|
71
|
+
return STATIC_CONFIG.NETWORK_NAME;
|
|
72
|
+
},
|
|
73
|
+
get UPLOAD_SERVER_URL() {
|
|
74
|
+
return STATIC_CONFIG.API_URL;
|
|
75
|
+
},
|
|
76
|
+
get UPLOAD_SERVER_DID() {
|
|
77
|
+
return STATIC_CONFIG.SERVER_DID;
|
|
78
|
+
},
|
|
79
|
+
get API_KEY() {
|
|
80
|
+
return process.env.API_KEY;
|
|
81
|
+
},
|
|
82
|
+
get RPC_URL() {
|
|
83
|
+
return process.env.RPC_URL || STATIC_CONFIG.DEFAULT_RPC_URL;
|
|
84
|
+
},
|
|
85
|
+
get DB_PATH() {
|
|
86
|
+
return process.env.DB_PATH;
|
|
87
|
+
},
|
|
88
|
+
get PORT() {
|
|
89
|
+
return process.env.PORT || STATIC_CONFIG.DEFAULT_PORT;
|
|
90
|
+
},
|
|
91
|
+
get NODE_ENV() {
|
|
92
|
+
return process.env.NODE_ENV || "production";
|
|
93
|
+
},
|
|
94
|
+
get IP() {
|
|
95
|
+
return process.env.IP || "0.0.0.0";
|
|
96
|
+
},
|
|
97
|
+
get FRONTEND_URL() {
|
|
98
|
+
return process.env.FRONTEND_URL || STATIC_CONFIG.FRONTEND_URL;
|
|
99
|
+
}
|
|
100
|
+
};
|
|
101
|
+
|
|
102
|
+
// src/infra/logger.ts
|
|
103
|
+
import pino from "pino";
|
|
104
|
+
var isProduction = config.NODE_ENV === "production";
|
|
105
|
+
var pinoInstance = pino({
|
|
106
|
+
name: STATIC_CONFIG.SERVICE_NAME,
|
|
107
|
+
level: STATIC_CONFIG.LOG_LEVEL,
|
|
108
|
+
formatters: {
|
|
109
|
+
bindings: (bindings) => ({ name: bindings.name }),
|
|
110
|
+
level: (label) => ({ level: label })
|
|
111
|
+
},
|
|
112
|
+
serializers: {
|
|
113
|
+
err(err) {
|
|
114
|
+
if (!err) return err;
|
|
115
|
+
if (isProduction) {
|
|
116
|
+
return { type: err.name, message: err.message };
|
|
117
|
+
}
|
|
118
|
+
return {
|
|
119
|
+
type: err.name,
|
|
120
|
+
message: err.message,
|
|
121
|
+
stack: err.stack
|
|
122
|
+
};
|
|
123
|
+
}
|
|
124
|
+
},
|
|
125
|
+
transport: config.NODE_ENV !== "production" ? {
|
|
126
|
+
target: "pino-pretty",
|
|
127
|
+
options: {
|
|
128
|
+
colorize: true,
|
|
129
|
+
translateTime: "SYS:standard",
|
|
130
|
+
ignore: "pid,hostname",
|
|
131
|
+
errorProps: "*",
|
|
132
|
+
errorLikeObjectKeys: ["err", "error"]
|
|
133
|
+
}
|
|
134
|
+
} : void 0
|
|
135
|
+
});
|
|
136
|
+
var createLogMethod = (level) => {
|
|
137
|
+
return (...args) => {
|
|
138
|
+
const [first, ...rest] = args;
|
|
139
|
+
const log = pinoInstance[level].bind(pinoInstance);
|
|
140
|
+
if (typeof first === "object" && first !== null && !(first instanceof Error)) {
|
|
141
|
+
log(first, ...rest);
|
|
142
|
+
return;
|
|
143
|
+
}
|
|
144
|
+
if (rest.length > 0) {
|
|
145
|
+
const last = rest[rest.length - 1];
|
|
146
|
+
if (last instanceof Error) {
|
|
147
|
+
log({ err: last }, first, ...rest.slice(0, -1));
|
|
148
|
+
return;
|
|
149
|
+
}
|
|
150
|
+
}
|
|
151
|
+
if (first instanceof Error) {
|
|
152
|
+
log({ err: first }, first.message);
|
|
153
|
+
return;
|
|
154
|
+
}
|
|
155
|
+
log(first, ...rest);
|
|
156
|
+
};
|
|
157
|
+
};
|
|
158
|
+
var logger = {
|
|
159
|
+
trace: createLogMethod("trace"),
|
|
160
|
+
debug: createLogMethod("debug"),
|
|
161
|
+
info: createLogMethod("info"),
|
|
162
|
+
warn: createLogMethod("warn"),
|
|
163
|
+
error: createLogMethod("error"),
|
|
164
|
+
fatal: createLogMethod("fatal"),
|
|
165
|
+
get level() {
|
|
166
|
+
return pinoInstance.level;
|
|
167
|
+
},
|
|
168
|
+
set level(lvl) {
|
|
169
|
+
pinoInstance.level = lvl;
|
|
170
|
+
},
|
|
171
|
+
child: pinoInstance.child.bind(pinoInstance)
|
|
172
|
+
};
|
|
173
|
+
|
|
174
|
+
// src/infra/database/connection.ts
|
|
175
|
+
import Database from "better-sqlite3";
|
|
176
|
+
|
|
177
|
+
// src/infra/database/models/files.model.ts
|
|
178
|
+
import { uuidv7 } from "uuidv7";
|
|
179
|
+
var FilesModel = class {
|
|
180
|
+
static TABLE = "files";
|
|
181
|
+
static parseFile(fileRaw) {
|
|
182
|
+
let metadata = {};
|
|
183
|
+
try {
|
|
184
|
+
if (fileRaw.metadata) {
|
|
185
|
+
metadata = typeof fileRaw.metadata === "string" ? JSON.parse(fileRaw.metadata) : fileRaw.metadata;
|
|
186
|
+
}
|
|
187
|
+
} catch (e) {
|
|
188
|
+
metadata = {};
|
|
189
|
+
}
|
|
190
|
+
return {
|
|
191
|
+
_id: fileRaw._id,
|
|
192
|
+
ddocId: fileRaw.ddocId,
|
|
193
|
+
title: fileRaw.title,
|
|
194
|
+
content: fileRaw.content,
|
|
195
|
+
localVersion: fileRaw.localVersion,
|
|
196
|
+
onchainVersion: fileRaw.onchainVersion,
|
|
197
|
+
syncStatus: fileRaw.syncStatus,
|
|
198
|
+
isDeleted: fileRaw.isDeleted,
|
|
199
|
+
onChainFileId: fileRaw.onChainFileId ?? null,
|
|
200
|
+
portalAddress: fileRaw.portalAddress,
|
|
201
|
+
metadata: metadata || {},
|
|
202
|
+
createdAt: fileRaw.createdAt,
|
|
203
|
+
updatedAt: fileRaw.updatedAt,
|
|
204
|
+
linkKey: fileRaw.linkKey,
|
|
205
|
+
linkKeyNonce: fileRaw.linkKeyNonce,
|
|
206
|
+
commentKey: fileRaw.commentKey,
|
|
207
|
+
link: fileRaw.link
|
|
208
|
+
};
|
|
209
|
+
}
|
|
210
|
+
static findAll(portalAddress, limit, skip) {
|
|
211
|
+
const whereClause = "isDeleted = 0 AND portalAddress = ?";
|
|
212
|
+
const params = [portalAddress];
|
|
213
|
+
const countSql = `
|
|
214
|
+
SELECT COUNT(*) as count
|
|
215
|
+
FROM ${this.TABLE}
|
|
216
|
+
WHERE ${whereClause}
|
|
217
|
+
`;
|
|
218
|
+
const totalResult = QueryBuilder.selectOne(countSql, params);
|
|
219
|
+
const total = totalResult?.count || 0;
|
|
220
|
+
const sql = `
|
|
221
|
+
SELECT *
|
|
222
|
+
FROM ${this.TABLE}
|
|
223
|
+
WHERE ${whereClause}
|
|
224
|
+
`;
|
|
225
|
+
const completeSql = QueryBuilder.paginate(sql, {
|
|
226
|
+
limit,
|
|
227
|
+
offset: skip,
|
|
228
|
+
orderBy: "createdAt",
|
|
229
|
+
orderDirection: "DESC"
|
|
230
|
+
});
|
|
231
|
+
const filesRaw = QueryBuilder.select(completeSql, params);
|
|
232
|
+
const files = filesRaw.map(this.parseFile);
|
|
233
|
+
const hasNext = skip !== void 0 && limit !== void 0 ? skip + limit < total : false;
|
|
234
|
+
return { files, total, hasNext };
|
|
235
|
+
}
|
|
236
|
+
static findById(_id, portalAddress) {
|
|
237
|
+
const sql = `
|
|
238
|
+
SELECT *
|
|
239
|
+
FROM ${this.TABLE}
|
|
240
|
+
WHERE _id = ? AND isDeleted = 0 AND portalAddress = ?
|
|
241
|
+
`;
|
|
242
|
+
const result = QueryBuilder.selectOne(sql, [_id, portalAddress]);
|
|
243
|
+
return result ? this.parseFile(result) : void 0;
|
|
244
|
+
}
|
|
245
|
+
static findByIdIncludingDeleted(_id) {
|
|
246
|
+
const sql = `
|
|
247
|
+
SELECT *
|
|
248
|
+
FROM ${this.TABLE}
|
|
249
|
+
WHERE _id = ?
|
|
250
|
+
`;
|
|
251
|
+
const result = QueryBuilder.selectOne(sql, [_id]);
|
|
252
|
+
return result ? this.parseFile(result) : void 0;
|
|
253
|
+
}
|
|
254
|
+
static findByIdExcludingDeleted(_id) {
|
|
255
|
+
const sql = `
|
|
256
|
+
SELECT *
|
|
257
|
+
FROM ${this.TABLE}
|
|
258
|
+
WHERE _id = ? AND isDeleted = 0
|
|
259
|
+
`;
|
|
260
|
+
const result = QueryBuilder.selectOne(sql, [_id]);
|
|
261
|
+
return result ? this.parseFile(result) : void 0;
|
|
262
|
+
}
|
|
263
|
+
static findByDDocId(ddocId, portalAddress) {
|
|
264
|
+
const sql = `
|
|
265
|
+
SELECT *
|
|
266
|
+
FROM ${this.TABLE}
|
|
267
|
+
WHERE ddocId = ? AND isDeleted = 0 AND portalAddress = ?
|
|
268
|
+
`;
|
|
269
|
+
const result = QueryBuilder.selectOne(sql, [ddocId, portalAddress]);
|
|
270
|
+
return result ? this.parseFile(result) : void 0;
|
|
271
|
+
}
|
|
272
|
+
static searchByTitle(searchTerm, portalAddress, limit, skip) {
|
|
273
|
+
const sql = `
|
|
274
|
+
SELECT *
|
|
275
|
+
FROM ${this.TABLE}
|
|
276
|
+
WHERE LOWER(title) LIKE LOWER(?) AND isDeleted = 0 AND portalAddress = ?
|
|
277
|
+
`;
|
|
278
|
+
const completeSql = QueryBuilder.paginate(sql, {
|
|
279
|
+
limit,
|
|
280
|
+
offset: skip,
|
|
281
|
+
orderBy: "createdAt",
|
|
282
|
+
orderDirection: "DESC"
|
|
283
|
+
});
|
|
284
|
+
const filesRaw = QueryBuilder.select(completeSql, [`%${searchTerm}%`, portalAddress]);
|
|
285
|
+
return filesRaw.map(this.parseFile);
|
|
286
|
+
}
|
|
287
|
+
static create(input) {
|
|
288
|
+
const _id = uuidv7();
|
|
289
|
+
const sql = `
|
|
290
|
+
INSERT INTO ${this.TABLE}
|
|
291
|
+
(_id, title, content, ddocId, portalAddress)
|
|
292
|
+
VALUES (?, ?, ?, ?, ?)
|
|
293
|
+
`;
|
|
294
|
+
QueryBuilder.execute(sql, [_id, input.title, input.content, input.ddocId, input.portalAddress]);
|
|
295
|
+
const created = this.findById(_id, input.portalAddress);
|
|
296
|
+
if (!created) {
|
|
297
|
+
throw new Error("Failed to create file");
|
|
298
|
+
}
|
|
299
|
+
return created;
|
|
300
|
+
}
|
|
301
|
+
static update(_id, payload, portalAddress) {
|
|
302
|
+
const now = (/* @__PURE__ */ new Date()).toISOString();
|
|
303
|
+
const keys = [];
|
|
304
|
+
const values = [];
|
|
305
|
+
for (const [k, v] of Object.entries(payload)) {
|
|
306
|
+
if (v !== void 0) {
|
|
307
|
+
if (k === "metadata" && typeof v === "object") {
|
|
308
|
+
keys.push(`${k} = ?`);
|
|
309
|
+
values.push(JSON.stringify(v));
|
|
310
|
+
} else {
|
|
311
|
+
keys.push(`${k} = ?`);
|
|
312
|
+
values.push(v);
|
|
313
|
+
}
|
|
314
|
+
}
|
|
315
|
+
}
|
|
316
|
+
keys.push("updatedAt = ?");
|
|
317
|
+
values.push(now, _id, portalAddress);
|
|
318
|
+
const updateChain = keys.join(", ");
|
|
319
|
+
const sql = `UPDATE ${this.TABLE} SET ${updateChain} WHERE _id = ? AND portalAddress = ?`;
|
|
320
|
+
QueryBuilder.execute(sql, values);
|
|
321
|
+
const updated = this.findById(_id, portalAddress);
|
|
322
|
+
if (!updated) {
|
|
323
|
+
throw new Error("Failed to update file");
|
|
324
|
+
}
|
|
325
|
+
return updated;
|
|
326
|
+
}
|
|
327
|
+
static softDelete(_id) {
|
|
328
|
+
const now = (/* @__PURE__ */ new Date()).toISOString();
|
|
329
|
+
const sql = `
|
|
330
|
+
UPDATE ${this.TABLE}
|
|
331
|
+
SET isDeleted = 1, syncStatus = 'pending', updatedAt = ?
|
|
332
|
+
WHERE _id = ?
|
|
333
|
+
`;
|
|
334
|
+
QueryBuilder.execute(sql, [now, _id]);
|
|
335
|
+
const deleted = this.findByIdIncludingDeleted(_id);
|
|
336
|
+
if (!deleted) {
|
|
337
|
+
throw new Error("Failed to delete file");
|
|
338
|
+
}
|
|
339
|
+
return deleted;
|
|
340
|
+
}
|
|
341
|
+
};
|
|
342
|
+
|
|
343
|
+
// src/infra/database/models/portals.model.ts
|
|
344
|
+
import { uuidv7 as uuidv72 } from "uuidv7";
|
|
345
|
+
|
|
346
|
+
// src/infra/database/models/apikeys.model.ts
|
|
347
|
+
import { uuidv7 as uuidv73 } from "uuidv7";
|
|
348
|
+
var ApiKeysModel = class {
|
|
349
|
+
static TABLE = "api_keys";
|
|
350
|
+
static create(input) {
|
|
351
|
+
const _id = uuidv73();
|
|
352
|
+
const now = (/* @__PURE__ */ new Date()).toISOString();
|
|
353
|
+
const sql = `INSERT INTO ${this.TABLE} (_id, apiKeySeed, name, collaboratorAddress, portalAddress, createdAt)
|
|
354
|
+
VALUES (?, ?, ?, ?, ?, ?)`;
|
|
355
|
+
const result = QueryBuilder.execute(sql, [
|
|
356
|
+
_id,
|
|
357
|
+
input.apiKeySeed,
|
|
358
|
+
input.name,
|
|
359
|
+
input.collaboratorAddress,
|
|
360
|
+
input.portalAddress,
|
|
361
|
+
now
|
|
362
|
+
]);
|
|
363
|
+
if (result.changes === 0) {
|
|
364
|
+
throw new Error("Failed to create API key");
|
|
365
|
+
}
|
|
366
|
+
const created = this.findById(_id);
|
|
367
|
+
if (!created) {
|
|
368
|
+
throw new Error("Failed to create API key");
|
|
369
|
+
}
|
|
370
|
+
return created;
|
|
371
|
+
}
|
|
372
|
+
static findById(_id) {
|
|
373
|
+
const sql = `SELECT _id, apiKeySeed, name, collaboratorAddress, portalAddress, createdAt, isDeleted FROM ${this.TABLE} WHERE _id = ? AND isDeleted = 0`;
|
|
374
|
+
return QueryBuilder.selectOne(sql, [_id]);
|
|
375
|
+
}
|
|
376
|
+
static findByCollaboratorAddress(collaboratorAddress) {
|
|
377
|
+
const sql = `SELECT _id, apiKeySeed, name, collaboratorAddress, portalAddress, createdAt, isDeleted FROM ${this.TABLE} WHERE collaboratorAddress = ? AND isDeleted = 0 LIMIT 1`;
|
|
378
|
+
return QueryBuilder.selectOne(sql, [collaboratorAddress]);
|
|
379
|
+
}
|
|
380
|
+
static delete(_id) {
|
|
381
|
+
const sql = `UPDATE ${this.TABLE} SET isDeleted = 1 WHERE _id = ?`;
|
|
382
|
+
QueryBuilder.execute(sql, [_id]);
|
|
383
|
+
}
|
|
384
|
+
static findByPortalAddress(portalAddress) {
|
|
385
|
+
const sql = `SELECT _id, apiKeySeed, name, collaboratorAddress, portalAddress, createdAt, isDeleted FROM ${this.TABLE} WHERE portalAddress = ? AND isDeleted = 0`;
|
|
386
|
+
return QueryBuilder.selectOne(sql, [portalAddress]);
|
|
387
|
+
}
|
|
388
|
+
static findByApiKey(apiKey) {
|
|
389
|
+
const sql = `SELECT _id, apiKeySeed, name, collaboratorAddress, portalAddress, createdAt, isDeleted FROM ${this.TABLE} WHERE apiKeySeed = ? AND isDeleted = 0`;
|
|
390
|
+
return QueryBuilder.selectOne(sql, [apiKey]);
|
|
391
|
+
}
|
|
392
|
+
};
|
|
393
|
+
|
|
394
|
+
// src/infra/database/models/events.model.ts
|
|
395
|
+
import { uuidv7 as uuidv74 } from "uuidv7";
|
|
396
|
+
|
|
397
|
+
// src/infra/worker/workerSignal.ts
|
|
398
|
+
import { EventEmitter } from "events";
|
|
399
|
+
var WorkerSignal = class extends EventEmitter {
|
|
400
|
+
};
|
|
401
|
+
var workerSignal = new WorkerSignal();
|
|
402
|
+
workerSignal.setMaxListeners(20);
|
|
403
|
+
function notifyNewEvent() {
|
|
404
|
+
workerSignal.emit("newEvent");
|
|
405
|
+
}
|
|
406
|
+
|
|
407
|
+
// src/infra/database/models/events.model.ts
|
|
408
|
+
var RETRY_DELAYS_MS = [5e3, 3e4, 12e4];
|
|
409
|
+
var EventsModel = class {
|
|
410
|
+
static TABLE = "events";
|
|
411
|
+
static create(input) {
|
|
412
|
+
const _id = uuidv74();
|
|
413
|
+
const timestamp = Date.now();
|
|
414
|
+
const status = "pending";
|
|
415
|
+
const sql = `
|
|
416
|
+
INSERT INTO ${this.TABLE}
|
|
417
|
+
(_id, type, timestamp, fileId, portalAddress, status, retryCount, lastError, lockedAt, nextRetryAt)
|
|
418
|
+
VALUES (?, ?, ?, ?, ?, ?, 0, NULL, NULL, NULL)
|
|
419
|
+
`;
|
|
420
|
+
QueryBuilder.execute(sql, [_id, input.type, timestamp, input.fileId, input.portalAddress, status]);
|
|
421
|
+
notifyNewEvent();
|
|
422
|
+
return {
|
|
423
|
+
_id,
|
|
424
|
+
type: input.type,
|
|
425
|
+
timestamp,
|
|
426
|
+
fileId: input.fileId,
|
|
427
|
+
portalAddress: input.portalAddress,
|
|
428
|
+
status,
|
|
429
|
+
retryCount: 0,
|
|
430
|
+
lastError: null,
|
|
431
|
+
lockedAt: null,
|
|
432
|
+
nextRetryAt: null
|
|
433
|
+
};
|
|
434
|
+
}
|
|
435
|
+
static findById(_id) {
|
|
436
|
+
const sql = `SELECT * FROM ${this.TABLE} WHERE _id = ?`;
|
|
437
|
+
const row = QueryBuilder.selectOne(sql, [_id]);
|
|
438
|
+
return row ? this.parseEvent(row) : void 0;
|
|
439
|
+
}
|
|
440
|
+
static findNextPending() {
|
|
441
|
+
const sql = `
|
|
442
|
+
SELECT * FROM ${this.TABLE}
|
|
443
|
+
WHERE status = 'pending'
|
|
444
|
+
ORDER BY timestamp ASC
|
|
445
|
+
LIMIT 1
|
|
446
|
+
`;
|
|
447
|
+
const row = QueryBuilder.selectOne(sql, []);
|
|
448
|
+
return row ? this.parseEvent(row) : void 0;
|
|
449
|
+
}
|
|
450
|
+
static findNextEligible(lockedFileIds) {
|
|
451
|
+
const now = Date.now();
|
|
452
|
+
const exclusionClause = lockedFileIds.length > 0 ? `AND e1.fileId NOT IN (${lockedFileIds.map(() => "?").join(", ")})` : "";
|
|
453
|
+
const sql = `
|
|
454
|
+
SELECT e1.* FROM ${this.TABLE} e1
|
|
455
|
+
WHERE e1.status = 'pending'
|
|
456
|
+
AND (e1.nextRetryAt IS NULL OR e1.nextRetryAt <= ?)
|
|
457
|
+
${exclusionClause}
|
|
458
|
+
AND NOT EXISTS (
|
|
459
|
+
SELECT 1 FROM ${this.TABLE} e2
|
|
460
|
+
WHERE e2.fileId = e1.fileId
|
|
461
|
+
AND e2.status = 'pending'
|
|
462
|
+
AND e2.timestamp < e1.timestamp
|
|
463
|
+
)
|
|
464
|
+
ORDER BY e1.timestamp ASC
|
|
465
|
+
LIMIT 1
|
|
466
|
+
`;
|
|
467
|
+
const params = [now, ...lockedFileIds];
|
|
468
|
+
const row = QueryBuilder.selectOne(sql, params);
|
|
469
|
+
return row ? this.parseEvent(row) : void 0;
|
|
470
|
+
}
|
|
471
|
+
static markProcessing(_id) {
|
|
472
|
+
const sql = `
|
|
473
|
+
UPDATE ${this.TABLE}
|
|
474
|
+
SET status = 'processing',
|
|
475
|
+
lockedAt = ?
|
|
476
|
+
WHERE _id = ?
|
|
477
|
+
`;
|
|
478
|
+
QueryBuilder.execute(sql, [Date.now(), _id]);
|
|
479
|
+
}
|
|
480
|
+
static markProcessed(_id) {
|
|
481
|
+
const sql = `
|
|
482
|
+
UPDATE ${this.TABLE}
|
|
483
|
+
SET status = 'processed',
|
|
484
|
+
lockedAt = NULL
|
|
485
|
+
WHERE _id = ?
|
|
486
|
+
`;
|
|
487
|
+
QueryBuilder.execute(sql, [_id]);
|
|
488
|
+
}
|
|
489
|
+
static scheduleRetry(_id, errorMsg) {
|
|
490
|
+
const event = this.findById(_id);
|
|
491
|
+
if (!event) return;
|
|
492
|
+
const delay = RETRY_DELAYS_MS[Math.min(event.retryCount, RETRY_DELAYS_MS.length - 1)];
|
|
493
|
+
const nextRetryAt = Date.now() + delay;
|
|
494
|
+
const sql = `
|
|
495
|
+
UPDATE ${this.TABLE}
|
|
496
|
+
SET status = 'pending',
|
|
497
|
+
retryCount = retryCount + 1,
|
|
498
|
+
lastError = ?,
|
|
499
|
+
nextRetryAt = ?,
|
|
500
|
+
lockedAt = NULL
|
|
501
|
+
WHERE _id = ?
|
|
502
|
+
`;
|
|
503
|
+
QueryBuilder.execute(sql, [errorMsg, nextRetryAt, _id]);
|
|
504
|
+
}
|
|
505
|
+
static scheduleRetryAfter(_id, errorMsg, retryAfterMs) {
|
|
506
|
+
const nextRetryAt = Date.now() + retryAfterMs;
|
|
507
|
+
const sql = `
|
|
508
|
+
UPDATE ${this.TABLE}
|
|
509
|
+
SET status = 'pending',
|
|
510
|
+
lastError = ?,
|
|
511
|
+
nextRetryAt = ?,
|
|
512
|
+
lockedAt = NULL
|
|
513
|
+
WHERE _id = ?
|
|
514
|
+
`;
|
|
515
|
+
QueryBuilder.execute(sql, [errorMsg, nextRetryAt, _id]);
|
|
516
|
+
}
|
|
517
|
+
static markFailed(_id, errorMsg) {
|
|
518
|
+
const sql = `
|
|
519
|
+
UPDATE ${this.TABLE}
|
|
520
|
+
SET status = 'failed',
|
|
521
|
+
lastError = ?,
|
|
522
|
+
lockedAt = NULL
|
|
523
|
+
WHERE _id = ?
|
|
524
|
+
`;
|
|
525
|
+
QueryBuilder.execute(sql, [errorMsg, _id]);
|
|
526
|
+
}
|
|
527
|
+
static listFailed(portalAddress) {
|
|
528
|
+
const portalClause = portalAddress != null ? "AND portalAddress = ?" : "";
|
|
529
|
+
const sql = `
|
|
530
|
+
SELECT * FROM ${this.TABLE}
|
|
531
|
+
WHERE status = 'failed'
|
|
532
|
+
${portalClause}
|
|
533
|
+
ORDER BY timestamp ASC
|
|
534
|
+
`;
|
|
535
|
+
const params = portalAddress != null ? [portalAddress] : [];
|
|
536
|
+
const rows = QueryBuilder.select(sql, params);
|
|
537
|
+
return rows.map((row) => this.parseEvent(row));
|
|
538
|
+
}
|
|
539
|
+
static resetFailedToPending(_id, portalAddress) {
|
|
540
|
+
const portalClause = portalAddress != null ? "AND portalAddress = ?" : "";
|
|
541
|
+
const sql = `
|
|
542
|
+
UPDATE ${this.TABLE}
|
|
543
|
+
SET status = 'pending',
|
|
544
|
+
retryCount = 0,
|
|
545
|
+
lastError = NULL,
|
|
546
|
+
nextRetryAt = NULL,
|
|
547
|
+
lockedAt = NULL
|
|
548
|
+
WHERE _id = ?
|
|
549
|
+
AND status = 'failed'
|
|
550
|
+
${portalClause}
|
|
551
|
+
`;
|
|
552
|
+
const params = portalAddress != null ? [_id, portalAddress] : [_id];
|
|
553
|
+
const result = QueryBuilder.execute(sql, params);
|
|
554
|
+
if (result.changes > 0) {
|
|
555
|
+
notifyNewEvent();
|
|
556
|
+
}
|
|
557
|
+
return result.changes > 0;
|
|
558
|
+
}
|
|
559
|
+
static resetAllFailedToPending(portalAddress) {
|
|
560
|
+
const portalClause = portalAddress != null ? "AND portalAddress = ?" : "";
|
|
561
|
+
const sql = `
|
|
562
|
+
UPDATE ${this.TABLE}
|
|
563
|
+
SET status = 'pending',
|
|
564
|
+
retryCount = 0,
|
|
565
|
+
lastError = NULL,
|
|
566
|
+
nextRetryAt = NULL,
|
|
567
|
+
lockedAt = NULL
|
|
568
|
+
WHERE status = 'failed'
|
|
569
|
+
${portalClause}
|
|
570
|
+
`;
|
|
571
|
+
const params = portalAddress != null ? [portalAddress] : [];
|
|
572
|
+
const result = QueryBuilder.execute(sql, params);
|
|
573
|
+
if (result.changes > 0) {
|
|
574
|
+
notifyNewEvent();
|
|
575
|
+
}
|
|
576
|
+
return result.changes;
|
|
577
|
+
}
|
|
578
|
+
static resetStaleEvents(staleThreshold) {
|
|
579
|
+
const sql = `
|
|
580
|
+
UPDATE ${this.TABLE}
|
|
581
|
+
SET status = 'pending',
|
|
582
|
+
lockedAt = NULL,
|
|
583
|
+
userOpHash = NULL,
|
|
584
|
+
pendingPayload = NULL
|
|
585
|
+
WHERE status = 'processing'
|
|
586
|
+
AND lockedAt IS NOT NULL
|
|
587
|
+
AND lockedAt < ?
|
|
588
|
+
`;
|
|
589
|
+
const result = QueryBuilder.execute(sql, [staleThreshold]);
|
|
590
|
+
return result.changes;
|
|
591
|
+
}
|
|
592
|
+
static setEventPendingOp(_id, userOpHash, payload) {
|
|
593
|
+
const sql = `UPDATE ${this.TABLE} SET userOpHash = ?, pendingPayload = ? WHERE _id = ?`;
|
|
594
|
+
QueryBuilder.execute(sql, [userOpHash, JSON.stringify(payload), _id]);
|
|
595
|
+
}
|
|
596
|
+
static clearEventPendingOp(_id) {
|
|
597
|
+
const sql = `UPDATE ${this.TABLE} SET userOpHash = NULL, pendingPayload = NULL WHERE _id = ?`;
|
|
598
|
+
QueryBuilder.execute(sql, [_id]);
|
|
599
|
+
}
|
|
600
|
+
static parseEvent(row) {
|
|
601
|
+
return {
|
|
602
|
+
_id: row._id,
|
|
603
|
+
type: row.type,
|
|
604
|
+
timestamp: row.timestamp,
|
|
605
|
+
fileId: row.fileId,
|
|
606
|
+
portalAddress: row.portalAddress ?? "",
|
|
607
|
+
status: row.status,
|
|
608
|
+
retryCount: row.retryCount,
|
|
609
|
+
lastError: row.lastError,
|
|
610
|
+
lockedAt: row.lockedAt,
|
|
611
|
+
nextRetryAt: row.nextRetryAt,
|
|
612
|
+
userOpHash: row.userOpHash ?? null,
|
|
613
|
+
pendingPayload: row.pendingPayload ?? null
|
|
614
|
+
};
|
|
615
|
+
}
|
|
616
|
+
};
|
|
617
|
+
|
|
618
|
+
// src/sdk/key-store.ts
|
|
619
|
+
import { eciesDecrypt, eciesEncrypt, generateECKeyPair } from "@fileverse/crypto/ecies";
|
|
620
|
+
|
|
621
|
+
// src/sdk/auth-token-provider.ts
|
|
622
|
+
import * as ucans from "@ucans/ucans";
|
|
623
|
+
|
|
624
|
+
// src/domain/portal/publish.ts
|
|
625
|
+
import { fromUint8Array as fromUint8Array3, toUint8Array as toUint8Array3 } from "js-base64";
|
|
626
|
+
import { stringToBytes } from "viem";
|
|
627
|
+
import { deriveHKDFKey } from "@fileverse/crypto/kdf";
|
|
628
|
+
import { generateKeyPairFromSeed } from "@stablelib/ed25519";
|
|
629
|
+
import * as ucans2 from "@ucans/ucans";
|
|
630
|
+
|
|
631
|
+
// src/sdk/smart-agent.ts
|
|
632
|
+
import { toHex as toHex2 } from "viem";
|
|
633
|
+
import { privateKeyToAccount } from "viem/accounts";
|
|
634
|
+
|
|
635
|
+
// src/sdk/pimlico-utils.ts
|
|
636
|
+
import { createPublicClient, http, hexToBigInt, toHex, toBytes } from "viem";
|
|
637
|
+
import { createPimlicoClient } from "permissionless/clients/pimlico";
|
|
638
|
+
import { createSmartAccountClient } from "permissionless";
|
|
639
|
+
import { toSafeSmartAccount } from "permissionless/accounts";
|
|
640
|
+
import { entryPoint07Address } from "viem/account-abstraction";
|
|
641
|
+
|
|
642
|
+
// src/constants/chains.ts
|
|
643
|
+
import { sepolia, gnosis } from "viem/chains";
|
|
644
|
+
|
|
645
|
+
// src/constants/index.ts
|
|
646
|
+
var NETWORK_NAME = STATIC_CONFIG.NETWORK_NAME;
|
|
647
|
+
var UPLOAD_SERVER_URL = STATIC_CONFIG.API_URL;
|
|
648
|
+
var CHAIN_MAP = {
|
|
649
|
+
gnosis,
|
|
650
|
+
sepolia
|
|
651
|
+
};
|
|
652
|
+
var CHAIN = CHAIN_MAP[NETWORK_NAME];
|
|
653
|
+
|
|
654
|
+
// src/sdk/pimlico-utils.ts
|
|
655
|
+
import { generatePrivateKey } from "viem/accounts";
|
|
656
|
+
|
|
657
|
+
// src/sdk/file-manager.ts
|
|
658
|
+
import { fromUint8Array as fromUint8Array2, toUint8Array as toUint8Array2 } from "js-base64";
|
|
659
|
+
|
|
660
|
+
// src/sdk/file-utils.ts
|
|
661
|
+
import { getArgon2idHash } from "@fileverse/crypto/argon";
|
|
662
|
+
import { bytesToBase64, generateRandomBytes as generateRandomBytes2 } from "@fileverse/crypto/utils";
|
|
663
|
+
import { derivePBKDF2Key, encryptAesCBC } from "@fileverse/crypto/kdf";
|
|
664
|
+
import { secretBoxEncrypt } from "@fileverse/crypto/nacl";
|
|
665
|
+
import hkdf from "futoin-hkdf";
|
|
666
|
+
import tweetnacl from "tweetnacl";
|
|
667
|
+
import { fromUint8Array, toUint8Array } from "js-base64";
|
|
668
|
+
|
|
669
|
+
// src/sdk/file-encryption.ts
|
|
670
|
+
import { generateRandomBytes } from "@fileverse/crypto/utils";
|
|
671
|
+
|
|
672
|
+
// src/sdk/file-utils.ts
|
|
673
|
+
import { toAESKey, aesEncrypt } from "@fileverse/crypto/webcrypto";
|
|
674
|
+
import axios from "axios";
|
|
675
|
+
import { encodeFunctionData, parseEventLogs } from "viem";
|
|
676
|
+
|
|
677
|
+
// src/sdk/file-manager.ts
|
|
678
|
+
import { generateAESKey, exportAESKey } from "@fileverse/crypto/webcrypto";
|
|
679
|
+
import { markdownToYjs } from "@fileverse/content-processor";
|
|
680
|
+
|
|
681
|
+
// src/errors/rate-limit.ts
|
|
682
|
+
import { HttpRequestError } from "viem";
|
|
683
|
+
|
|
684
|
+
// src/infra/worker/worker.ts
|
|
685
|
+
var STALE_THRESHOLD_MS = 5 * 60 * 1e3;
|
|
686
|
+
|
|
687
|
+
// src/appWorker.ts
|
|
688
|
+
var worker = null;
|
|
689
|
+
async function closeWorker() {
|
|
690
|
+
if (worker) {
|
|
691
|
+
await worker.close();
|
|
692
|
+
worker = null;
|
|
693
|
+
}
|
|
694
|
+
}
|
|
695
|
+
|
|
696
|
+
// src/infra/reporter.ts
|
|
697
|
+
var Reporter = class {
|
|
698
|
+
async reportError(message) {
|
|
699
|
+
console.error("Error reported:", message);
|
|
700
|
+
}
|
|
701
|
+
};
|
|
702
|
+
var reporter_default = new Reporter();
|
|
703
|
+
|
|
704
|
+
// src/infra/database/connection.ts
|
|
705
|
+
var DatabaseConnectionManager = class _DatabaseConnectionManager {
|
|
706
|
+
static instance;
|
|
707
|
+
db = null;
|
|
708
|
+
constructor() {
|
|
709
|
+
}
|
|
710
|
+
static getInstance() {
|
|
711
|
+
if (!_DatabaseConnectionManager.instance) {
|
|
712
|
+
_DatabaseConnectionManager.instance = new _DatabaseConnectionManager();
|
|
713
|
+
}
|
|
714
|
+
return _DatabaseConnectionManager.instance;
|
|
715
|
+
}
|
|
716
|
+
getConnection() {
|
|
717
|
+
if (!this.db) {
|
|
718
|
+
const dbPath = config.DB_PATH;
|
|
719
|
+
this.db = new Database(dbPath, {
|
|
720
|
+
verbose: config.NODE_ENV === "development" ? (msg) => logger.debug(String(msg)) : void 0
|
|
721
|
+
});
|
|
722
|
+
this.db.pragma("journal_mode = WAL");
|
|
723
|
+
this.db.pragma("foreign_keys = ON");
|
|
724
|
+
this.db.prepare("SELECT 1").get();
|
|
725
|
+
logger.info(`SQLite database connected: ${dbPath}`);
|
|
726
|
+
}
|
|
727
|
+
return this.db;
|
|
728
|
+
}
|
|
729
|
+
async close() {
|
|
730
|
+
if (this.db) {
|
|
731
|
+
this.db.close();
|
|
732
|
+
this.db = null;
|
|
733
|
+
logger.info("Database connection closed");
|
|
734
|
+
}
|
|
735
|
+
}
|
|
736
|
+
isConnected() {
|
|
737
|
+
return this.db !== null && this.db.open;
|
|
738
|
+
}
|
|
739
|
+
};
|
|
740
|
+
var databaseConnectionManager = DatabaseConnectionManager.getInstance();
|
|
741
|
+
|
|
742
|
+
// src/domain/file/constants.ts
|
|
743
|
+
var DEFAULT_LIST_LIMIT = 10;
|
|
744
|
+
|
|
745
|
+
// src/infra/database/query-builder.ts
|
|
746
|
+
function getDb() {
|
|
747
|
+
return databaseConnectionManager.getConnection();
|
|
748
|
+
}
|
|
749
|
+
var QueryBuilder = class {
|
|
750
|
+
static select(sql, params = []) {
|
|
751
|
+
const stmt = getDb().prepare(sql);
|
|
752
|
+
return stmt.all(params);
|
|
753
|
+
}
|
|
754
|
+
static selectOne(sql, params = []) {
|
|
755
|
+
const stmt = getDb().prepare(sql);
|
|
756
|
+
return stmt.get(params);
|
|
757
|
+
}
|
|
758
|
+
static execute(sql, params = []) {
|
|
759
|
+
const stmt = getDb().prepare(sql);
|
|
760
|
+
const result = stmt.run(params);
|
|
761
|
+
return {
|
|
762
|
+
changes: result.changes,
|
|
763
|
+
lastInsertRowid: result.lastInsertRowid
|
|
764
|
+
};
|
|
765
|
+
}
|
|
766
|
+
static transaction(callback) {
|
|
767
|
+
return getDb().transaction(callback)();
|
|
768
|
+
}
|
|
769
|
+
static paginate(sql, options = {}) {
|
|
770
|
+
let query = sql;
|
|
771
|
+
if (options.orderBy) {
|
|
772
|
+
query += ` ORDER BY ${options.orderBy} ${options.orderDirection || "ASC"}`;
|
|
773
|
+
}
|
|
774
|
+
const hasOffset = (options.offset ?? 0) > 0;
|
|
775
|
+
const limit = options.limit ?? (hasOffset ? DEFAULT_LIST_LIMIT : void 0);
|
|
776
|
+
if (limit) {
|
|
777
|
+
query += ` LIMIT ${limit}`;
|
|
778
|
+
}
|
|
779
|
+
if (hasOffset) {
|
|
780
|
+
query += ` OFFSET ${options.offset}`;
|
|
781
|
+
}
|
|
782
|
+
return query;
|
|
783
|
+
}
|
|
784
|
+
};
|
|
785
|
+
|
|
786
|
+
// src/infra/database/index.ts
|
|
787
|
+
function getDb2() {
|
|
788
|
+
return databaseConnectionManager.getConnection();
|
|
789
|
+
}
|
|
790
|
+
var closeDatabase = async () => {
|
|
791
|
+
await databaseConnectionManager.close();
|
|
792
|
+
};
|
|
793
|
+
var database_default = getDb2;
|
|
794
|
+
|
|
795
|
+
// src/infra/database/migrations/index.ts
|
|
796
|
+
var STABLE_SCHEMA = `
|
|
797
|
+
CREATE TABLE IF NOT EXISTS files (
|
|
798
|
+
_id TEXT PRIMARY KEY,
|
|
799
|
+
ddocId TEXT NOT NULL,
|
|
800
|
+
title TEXT NOT NULL,
|
|
801
|
+
content TEXT NOT NULL,
|
|
802
|
+
localVersion INTEGER NOT NULL DEFAULT 1,
|
|
803
|
+
onchainVersion INTEGER NOT NULL DEFAULT 0,
|
|
804
|
+
syncStatus TEXT NOT NULL DEFAULT 'pending',
|
|
805
|
+
createdAt DATETIME DEFAULT CURRENT_TIMESTAMP,
|
|
806
|
+
updatedAt DATETIME DEFAULT CURRENT_TIMESTAMP,
|
|
807
|
+
isDeleted INTEGER NOT NULL DEFAULT 0,
|
|
808
|
+
portalAddress TEXT NOT NULL,
|
|
809
|
+
metadata TEXT DEFAULT '{}',
|
|
810
|
+
onChainFileId INTEGER,
|
|
811
|
+
commentKey TEXT,
|
|
812
|
+
linkKey TEXT,
|
|
813
|
+
linkKeyNonce TEXT,
|
|
814
|
+
link TEXT
|
|
815
|
+
);
|
|
816
|
+
CREATE INDEX IF NOT EXISTS idx_files_createdAt ON files(createdAt);
|
|
817
|
+
CREATE INDEX IF NOT EXISTS idx_files_syncStatus ON files(syncStatus);
|
|
818
|
+
CREATE INDEX IF NOT EXISTS idx_files_title ON files(title);
|
|
819
|
+
CREATE INDEX IF NOT EXISTS idx_files_portalAddress ON files(portalAddress);
|
|
820
|
+
|
|
821
|
+
CREATE TABLE IF NOT EXISTS portals (
|
|
822
|
+
_id TEXT PRIMARY KEY,
|
|
823
|
+
portalAddress TEXT NOT NULL UNIQUE,
|
|
824
|
+
portalSeed TEXT NOT NULL UNIQUE,
|
|
825
|
+
ownerAddress TEXT NOT NULL,
|
|
826
|
+
createdAt DATETIME DEFAULT CURRENT_TIMESTAMP,
|
|
827
|
+
updatedAt DATETIME DEFAULT CURRENT_TIMESTAMP
|
|
828
|
+
);
|
|
829
|
+
|
|
830
|
+
CREATE TABLE IF NOT EXISTS api_keys (
|
|
831
|
+
_id TEXT PRIMARY KEY,
|
|
832
|
+
apiKeySeed TEXT NOT NULL UNIQUE,
|
|
833
|
+
name TEXT NOT NULL,
|
|
834
|
+
collaboratorAddress TEXT NOT NULL UNIQUE,
|
|
835
|
+
portalAddress TEXT NOT NULL,
|
|
836
|
+
createdAt DATETIME DEFAULT CURRENT_TIMESTAMP,
|
|
837
|
+
isDeleted INTEGER NOT NULL DEFAULT 0
|
|
838
|
+
);
|
|
839
|
+
|
|
840
|
+
CREATE TABLE IF NOT EXISTS events (
|
|
841
|
+
_id TEXT PRIMARY KEY,
|
|
842
|
+
type TEXT NOT NULL CHECK (type IN ('create', 'update', 'delete')),
|
|
843
|
+
timestamp INTEGER NOT NULL,
|
|
844
|
+
fileId TEXT NOT NULL,
|
|
845
|
+
status TEXT NOT NULL DEFAULT 'pending' CHECK (status IN ('pending', 'processing', 'processed', 'failed')),
|
|
846
|
+
retryCount INTEGER NOT NULL DEFAULT 0,
|
|
847
|
+
lastError TEXT,
|
|
848
|
+
lockedAt INTEGER,
|
|
849
|
+
nextRetryAt INTEGER,
|
|
850
|
+
userOpHash TEXT,
|
|
851
|
+
pendingPayload TEXT,
|
|
852
|
+
portalAddress TEXT
|
|
853
|
+
);
|
|
854
|
+
CREATE INDEX IF NOT EXISTS idx_events_pending_eligible ON events (status, nextRetryAt, timestamp) WHERE status = 'pending';
|
|
855
|
+
CREATE INDEX IF NOT EXISTS idx_events_file_pending_ts ON events (fileId, status, timestamp) WHERE status = 'pending';
|
|
856
|
+
CREATE INDEX IF NOT EXISTS idx_events_processing_locked ON events (status, lockedAt) WHERE status = 'processing';
|
|
857
|
+
CREATE INDEX IF NOT EXISTS idx_events_failed_portal ON events (portalAddress, status) WHERE status = 'failed';
|
|
858
|
+
|
|
859
|
+
CREATE TABLE IF NOT EXISTS folders (
|
|
860
|
+
_id TEXT PRIMARY KEY,
|
|
861
|
+
onchainFileId INTEGER NOT NULL,
|
|
862
|
+
folderId TEXT NOT NULL,
|
|
863
|
+
folderRef TEXT NOT NULL,
|
|
864
|
+
folderName TEXT NOT NULL,
|
|
865
|
+
portalAddress TEXT NOT NULL,
|
|
866
|
+
metadataIPFSHash TEXT NOT NULL,
|
|
867
|
+
contentIPFSHash TEXT NOT NULL,
|
|
868
|
+
isDeleted INTEGER NOT NULL DEFAULT 0,
|
|
869
|
+
lastTransactionHash TEXT,
|
|
870
|
+
lastTransactionBlockNumber INTEGER NOT NULL,
|
|
871
|
+
lastTransactionBlockTimestamp INTEGER NOT NULL,
|
|
872
|
+
created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
|
|
873
|
+
updated_at DATETIME DEFAULT CURRENT_TIMESTAMP
|
|
874
|
+
);
|
|
875
|
+
CREATE INDEX IF NOT EXISTS idx_folders_folderRef_folderId ON folders(folderRef, folderId);
|
|
876
|
+
CREATE INDEX IF NOT EXISTS idx_folders_folderRef ON folders(folderRef);
|
|
877
|
+
CREATE INDEX IF NOT EXISTS idx_folders_created_at ON folders(created_at);
|
|
878
|
+
`;
|
|
879
|
+
function runMigrations() {
|
|
880
|
+
const db = database_default();
|
|
881
|
+
db.exec(STABLE_SCHEMA);
|
|
882
|
+
logger.debug("Database schema ready");
|
|
883
|
+
}
|
|
884
|
+
|
|
885
|
+
// src/commands/listCommand.ts
|
|
886
|
+
import { Command } from "commander";
|
|
887
|
+
import Table from "cli-table3";
|
|
888
|
+
|
|
889
|
+
// src/domain/file/index.ts
|
|
890
|
+
import { generate } from "short-uuid";
|
|
891
|
+
function listFiles(params) {
|
|
892
|
+
const { limit, skip, portalAddress } = params;
|
|
893
|
+
const effectiveLimit = limit || DEFAULT_LIST_LIMIT;
|
|
894
|
+
const result = FilesModel.findAll(portalAddress, effectiveLimit, skip);
|
|
895
|
+
const processedFiles = result.files.map((file) => ({
|
|
896
|
+
ddocId: file.ddocId,
|
|
897
|
+
link: file.link,
|
|
898
|
+
title: file.title,
|
|
899
|
+
content: file.content,
|
|
900
|
+
localVersion: file.localVersion,
|
|
901
|
+
onchainVersion: file.onchainVersion,
|
|
902
|
+
syncStatus: file.syncStatus,
|
|
903
|
+
isDeleted: file.isDeleted,
|
|
904
|
+
onChainFileId: file.onChainFileId,
|
|
905
|
+
portalAddress: file.portalAddress,
|
|
906
|
+
createdAt: file.createdAt,
|
|
907
|
+
updatedAt: file.updatedAt
|
|
908
|
+
}));
|
|
909
|
+
return {
|
|
910
|
+
ddocs: processedFiles,
|
|
911
|
+
total: result.total,
|
|
912
|
+
hasNext: result.hasNext
|
|
913
|
+
};
|
|
914
|
+
}
|
|
915
|
+
function getFile(ddocId, portalAddress) {
|
|
916
|
+
if (!ddocId) {
|
|
917
|
+
throw new Error("ddocId is required");
|
|
918
|
+
}
|
|
919
|
+
const file = FilesModel.findByDDocId(ddocId, portalAddress);
|
|
920
|
+
if (!file) {
|
|
921
|
+
return null;
|
|
922
|
+
}
|
|
923
|
+
return {
|
|
924
|
+
ddocId: file.ddocId,
|
|
925
|
+
link: file.link || "",
|
|
926
|
+
title: file.title,
|
|
927
|
+
content: file.content,
|
|
928
|
+
localVersion: file.localVersion,
|
|
929
|
+
onchainVersion: file.onchainVersion,
|
|
930
|
+
syncStatus: file.syncStatus,
|
|
931
|
+
isDeleted: file.isDeleted,
|
|
932
|
+
onChainFileId: file.onChainFileId,
|
|
933
|
+
portalAddress: file.portalAddress,
|
|
934
|
+
createdAt: file.createdAt,
|
|
935
|
+
updatedAt: file.updatedAt
|
|
936
|
+
};
|
|
937
|
+
}
|
|
938
|
+
var createFile = async (input) => {
|
|
939
|
+
if (!input.title || !input.content || !input.portalAddress) {
|
|
940
|
+
throw new Error("title, content, and portalAddress are required");
|
|
941
|
+
}
|
|
942
|
+
const ddocId = generate();
|
|
943
|
+
const file = FilesModel.create({
|
|
944
|
+
title: input.title,
|
|
945
|
+
content: input.content,
|
|
946
|
+
ddocId,
|
|
947
|
+
portalAddress: input.portalAddress
|
|
948
|
+
});
|
|
949
|
+
EventsModel.create({ type: "create", fileId: file._id, portalAddress: file.portalAddress });
|
|
950
|
+
return file;
|
|
951
|
+
};
|
|
952
|
+
var updateFile = async (ddocId, payload, portalAddress) => {
|
|
953
|
+
if (!ddocId) {
|
|
954
|
+
throw new Error("ddocId is required");
|
|
955
|
+
}
|
|
956
|
+
if (!payload.title && !payload.content) {
|
|
957
|
+
throw new Error("At least one field is required: Either provide title, content, or both");
|
|
958
|
+
}
|
|
959
|
+
const existingFile = FilesModel.findByDDocId(ddocId, portalAddress);
|
|
960
|
+
if (!existingFile) {
|
|
961
|
+
throw new Error(`File with ddocId ${ddocId} not found`);
|
|
962
|
+
}
|
|
963
|
+
const updatePayload = {
|
|
964
|
+
...payload,
|
|
965
|
+
localVersion: existingFile.localVersion + 1,
|
|
966
|
+
syncStatus: "pending"
|
|
967
|
+
// since the update is done in local db, it's not on the chain yet. hence pending
|
|
968
|
+
};
|
|
969
|
+
const updatedFile = FilesModel.update(existingFile._id, updatePayload, portalAddress);
|
|
970
|
+
EventsModel.create({ type: "update", fileId: updatedFile._id, portalAddress: updatedFile.portalAddress });
|
|
971
|
+
return {
|
|
972
|
+
ddocId: updatedFile.ddocId,
|
|
973
|
+
link: updatedFile.link,
|
|
974
|
+
title: updatedFile.title,
|
|
975
|
+
content: updatedFile.content,
|
|
976
|
+
localVersion: updatedFile.localVersion,
|
|
977
|
+
onchainVersion: updatedFile.onchainVersion,
|
|
978
|
+
syncStatus: updatedFile.syncStatus,
|
|
979
|
+
isDeleted: updatedFile.isDeleted,
|
|
980
|
+
onChainFileId: updatedFile.onChainFileId,
|
|
981
|
+
portalAddress: updatedFile.portalAddress,
|
|
982
|
+
createdAt: updatedFile.createdAt,
|
|
983
|
+
updatedAt: updatedFile.updatedAt
|
|
984
|
+
};
|
|
985
|
+
};
|
|
986
|
+
var deleteFile = async (ddocId, portalAddress) => {
|
|
987
|
+
if (!ddocId) {
|
|
988
|
+
throw new Error("ddocId is required");
|
|
989
|
+
}
|
|
990
|
+
const existingFile = FilesModel.findByDDocId(ddocId, portalAddress);
|
|
991
|
+
if (!existingFile) {
|
|
992
|
+
throw new Error(`File with ddocId ${ddocId} not found`);
|
|
993
|
+
}
|
|
994
|
+
const deletedFile = FilesModel.softDelete(existingFile._id);
|
|
995
|
+
EventsModel.create({ type: "delete", fileId: deletedFile._id, portalAddress: deletedFile.portalAddress });
|
|
996
|
+
return deletedFile;
|
|
997
|
+
};
|
|
998
|
+
|
|
999
|
+
// src/commands/utils/util.ts
|
|
1000
|
+
var columnNames = {
|
|
1001
|
+
index: "#",
|
|
1002
|
+
ddocId: "DDoc ID",
|
|
1003
|
+
title: "Title",
|
|
1004
|
+
status: "Status",
|
|
1005
|
+
local: "Local version",
|
|
1006
|
+
onchain: "On-chain version",
|
|
1007
|
+
deleted: "Deleted",
|
|
1008
|
+
created: "Created",
|
|
1009
|
+
lastModified: "Last modified"
|
|
1010
|
+
};
|
|
1011
|
+
var columnWidth = {
|
|
1012
|
+
[columnNames.index]: 3,
|
|
1013
|
+
[columnNames.ddocId]: 25,
|
|
1014
|
+
[columnNames.title]: 25,
|
|
1015
|
+
[columnNames.status]: 10,
|
|
1016
|
+
[columnNames.local]: 16,
|
|
1017
|
+
[columnNames.onchain]: 18,
|
|
1018
|
+
[columnNames.deleted]: 10,
|
|
1019
|
+
[columnNames.created]: 12,
|
|
1020
|
+
[columnNames.lastModified]: 20
|
|
1021
|
+
};
|
|
1022
|
+
function formatDate(date) {
|
|
1023
|
+
const d = typeof date === "string" ? new Date(date) : date;
|
|
1024
|
+
const day = String(d.getDate()).padStart(2, "0");
|
|
1025
|
+
const month = String(d.getMonth() + 1).padStart(2, "0");
|
|
1026
|
+
const year = d.getFullYear();
|
|
1027
|
+
return `${day}-${month}-${year}`;
|
|
1028
|
+
}
|
|
1029
|
+
function getElapsedTime(date) {
|
|
1030
|
+
const now = /* @__PURE__ */ new Date();
|
|
1031
|
+
const past = typeof date === "string" ? new Date(date) : date;
|
|
1032
|
+
const diffMs = now.getTime() - past.getTime();
|
|
1033
|
+
if (diffMs < 0) {
|
|
1034
|
+
return "just now";
|
|
1035
|
+
}
|
|
1036
|
+
const diffSeconds = Math.floor(diffMs / 1e3);
|
|
1037
|
+
const diffMinutes = Math.floor(diffSeconds / 60);
|
|
1038
|
+
const diffHours = Math.floor(diffMinutes / 60);
|
|
1039
|
+
const diffDays = Math.floor(diffHours / 24);
|
|
1040
|
+
const diffWeeks = Math.floor(diffDays / 7);
|
|
1041
|
+
const diffMonths = Math.floor(diffDays / 30);
|
|
1042
|
+
const diffYears = Math.floor(diffDays / 365);
|
|
1043
|
+
const units = [
|
|
1044
|
+
{ value: diffSeconds, max: 60, name: "second" },
|
|
1045
|
+
{ value: diffMinutes, max: 60, name: "minute" },
|
|
1046
|
+
{ value: diffHours, max: 24, name: "hour" },
|
|
1047
|
+
{ value: diffDays, max: 7, name: "day" },
|
|
1048
|
+
{ value: diffWeeks, max: 4, name: "week" },
|
|
1049
|
+
{ value: diffMonths, max: 12, name: "month" },
|
|
1050
|
+
{ value: diffYears, max: Infinity, name: "year" }
|
|
1051
|
+
];
|
|
1052
|
+
const unit = units.find((u) => u.value < u.max);
|
|
1053
|
+
if (unit) {
|
|
1054
|
+
const label = unit.value === 1 ? unit.name : `${unit.name}s`;
|
|
1055
|
+
return `${unit.value} ${label} ago`;
|
|
1056
|
+
}
|
|
1057
|
+
return "just now";
|
|
1058
|
+
}
|
|
1059
|
+
function validateApiKey(apiKey) {
|
|
1060
|
+
if (!apiKey) {
|
|
1061
|
+
const API_KEY_SETUP_MESSAGE = `
|
|
1062
|
+
API key is not configured.
|
|
1063
|
+
|
|
1064
|
+
To set up your API key, run:
|
|
1065
|
+
fileverse-api --apiKey <your-api-key> --rpcUrl <rpc-url>
|
|
1066
|
+
|
|
1067
|
+
This will configure your Fileverse API instance and save your credentials.
|
|
1068
|
+
After setup, you can use ddctl commands.
|
|
1069
|
+
`;
|
|
1070
|
+
console.error(API_KEY_SETUP_MESSAGE);
|
|
1071
|
+
process.exit(1);
|
|
1072
|
+
}
|
|
1073
|
+
}
|
|
1074
|
+
|
|
1075
|
+
// src/commands/listCommand.ts
|
|
1076
|
+
var listCommand = new Command().name("list").description("List all ddocs").option("-l, --limit <number>", "Limit the number of results", parseInt).option("-s, --skip <number>", "Skip the first N results", parseInt).action(async (options) => {
|
|
1077
|
+
try {
|
|
1078
|
+
const runtimeConfig = getRuntimeConfig();
|
|
1079
|
+
const apiKey = runtimeConfig.API_KEY;
|
|
1080
|
+
validateApiKey(apiKey);
|
|
1081
|
+
const portalAddress = ApiKeysModel.findByApiKey(apiKey)?.portalAddress;
|
|
1082
|
+
if (!portalAddress) throw new Error("Portal address is required");
|
|
1083
|
+
const params = {
|
|
1084
|
+
limit: options.limit,
|
|
1085
|
+
skip: options.skip,
|
|
1086
|
+
portalAddress
|
|
1087
|
+
};
|
|
1088
|
+
const result = listFiles(params);
|
|
1089
|
+
if (result.ddocs.length === 0) {
|
|
1090
|
+
console.log("No ddocs found.");
|
|
1091
|
+
return;
|
|
1092
|
+
}
|
|
1093
|
+
const table = new Table({
|
|
1094
|
+
head: [
|
|
1095
|
+
columnNames.index,
|
|
1096
|
+
columnNames.ddocId,
|
|
1097
|
+
columnNames.title,
|
|
1098
|
+
columnNames.status,
|
|
1099
|
+
columnNames.local,
|
|
1100
|
+
columnNames.onchain,
|
|
1101
|
+
columnNames.created,
|
|
1102
|
+
columnNames.lastModified
|
|
1103
|
+
],
|
|
1104
|
+
colWidths: [
|
|
1105
|
+
columnWidth[columnNames.index],
|
|
1106
|
+
columnWidth[columnNames.ddocId],
|
|
1107
|
+
columnWidth[columnNames.title],
|
|
1108
|
+
columnWidth[columnNames.status],
|
|
1109
|
+
columnWidth[columnNames.local],
|
|
1110
|
+
columnWidth[columnNames.onchain],
|
|
1111
|
+
columnWidth[columnNames.created],
|
|
1112
|
+
columnWidth[columnNames.lastModified]
|
|
1113
|
+
],
|
|
1114
|
+
style: { head: [] }
|
|
1115
|
+
});
|
|
1116
|
+
result.ddocs.forEach((ddoc, index) => {
|
|
1117
|
+
const ddocId = ddoc.ddocId || "N/A";
|
|
1118
|
+
table.push([
|
|
1119
|
+
index + 1,
|
|
1120
|
+
ddocId,
|
|
1121
|
+
ddoc.title.length > 23 ? ddoc.title.substring(0, 20) + "..." : ddoc.title,
|
|
1122
|
+
ddoc.syncStatus,
|
|
1123
|
+
ddoc.localVersion,
|
|
1124
|
+
ddoc.onchainVersion,
|
|
1125
|
+
formatDate(ddoc.createdAt),
|
|
1126
|
+
getElapsedTime(ddoc.updatedAt)
|
|
1127
|
+
]);
|
|
1128
|
+
});
|
|
1129
|
+
console.log(`
|
|
1130
|
+
Found ${result.total} ddoc(s):
|
|
1131
|
+
`);
|
|
1132
|
+
console.log(table.toString());
|
|
1133
|
+
if (result.hasNext) {
|
|
1134
|
+
console.log("\n(More results available. Use --skip and --limit for pagination)");
|
|
1135
|
+
}
|
|
1136
|
+
} catch (error) {
|
|
1137
|
+
console.error("Error listing ddocs:", error.message);
|
|
1138
|
+
throw error;
|
|
1139
|
+
}
|
|
1140
|
+
});
|
|
1141
|
+
|
|
1142
|
+
// src/commands/getCommand.ts
|
|
1143
|
+
import { Command as Command2 } from "commander";
|
|
1144
|
+
import Table2 from "cli-table3";
|
|
1145
|
+
var getCommand = new Command2().name("get").description("Get a ddoc by its ID").argument("<ddocId>", "The ddoc ID to retrieve").action(async (ddocId) => {
|
|
1146
|
+
try {
|
|
1147
|
+
const runtimeConfig = getRuntimeConfig();
|
|
1148
|
+
const apiKey = runtimeConfig.API_KEY;
|
|
1149
|
+
validateApiKey(apiKey);
|
|
1150
|
+
const portalAddress = ApiKeysModel.findByApiKey(apiKey)?.portalAddress;
|
|
1151
|
+
if (!portalAddress) throw new Error("Portal address is required");
|
|
1152
|
+
const file = getFile(ddocId, portalAddress);
|
|
1153
|
+
if (!file) {
|
|
1154
|
+
console.error(`Ddoc with ID "${ddocId}" not found.`);
|
|
1155
|
+
return;
|
|
1156
|
+
}
|
|
1157
|
+
const table = new Table2({
|
|
1158
|
+
head: [
|
|
1159
|
+
columnNames.ddocId,
|
|
1160
|
+
columnNames.title,
|
|
1161
|
+
columnNames.status,
|
|
1162
|
+
columnNames.local,
|
|
1163
|
+
columnNames.onchain,
|
|
1164
|
+
columnNames.deleted,
|
|
1165
|
+
columnNames.created,
|
|
1166
|
+
columnNames.lastModified
|
|
1167
|
+
],
|
|
1168
|
+
colWidths: [
|
|
1169
|
+
columnWidth[columnNames.ddocId],
|
|
1170
|
+
columnWidth[columnNames.title],
|
|
1171
|
+
columnWidth[columnNames.status],
|
|
1172
|
+
columnWidth[columnNames.local],
|
|
1173
|
+
columnWidth[columnNames.onchain],
|
|
1174
|
+
columnWidth[columnNames.deleted],
|
|
1175
|
+
columnWidth[columnNames.created],
|
|
1176
|
+
columnWidth[columnNames.lastModified]
|
|
1177
|
+
],
|
|
1178
|
+
style: { head: [] }
|
|
1179
|
+
});
|
|
1180
|
+
const fileDdocId = file.ddocId || "N/A";
|
|
1181
|
+
table.push([
|
|
1182
|
+
fileDdocId,
|
|
1183
|
+
file.title.length > 23 ? file.title.substring(0, 20) + "..." : file.title,
|
|
1184
|
+
file.syncStatus,
|
|
1185
|
+
file.localVersion,
|
|
1186
|
+
file.onchainVersion,
|
|
1187
|
+
file.isDeleted ? "True" : "False",
|
|
1188
|
+
formatDate(file.createdAt),
|
|
1189
|
+
getElapsedTime(file.updatedAt)
|
|
1190
|
+
]);
|
|
1191
|
+
console.log("\nDdoc details:\n");
|
|
1192
|
+
console.log(table.toString());
|
|
1193
|
+
if (file.link) {
|
|
1194
|
+
console.log(`
|
|
1195
|
+
Link: ${file.link}
|
|
1196
|
+
`);
|
|
1197
|
+
}
|
|
1198
|
+
} catch (error) {
|
|
1199
|
+
console.error("Error getting ddoc:", error.message);
|
|
1200
|
+
throw error;
|
|
1201
|
+
}
|
|
1202
|
+
});
|
|
1203
|
+
|
|
1204
|
+
// src/commands/createCommand.ts
|
|
1205
|
+
import { Command as Command3 } from "commander";
|
|
1206
|
+
import * as fs2 from "fs";
|
|
1207
|
+
import * as path2 from "path";
|
|
1208
|
+
import Table3 from "cli-table3";
|
|
1209
|
+
var createCommand = new Command3().name("create").description("Create a new ddoc from a file").argument("<filepath>", "Path to the file to create ddoc from").action(async (filepath) => {
|
|
1210
|
+
try {
|
|
1211
|
+
if (!fs2.existsSync(filepath)) {
|
|
1212
|
+
throw new Error(`File not found: ${filepath}`);
|
|
1213
|
+
}
|
|
1214
|
+
const runtimeConfig = getRuntimeConfig();
|
|
1215
|
+
const apiKey = runtimeConfig.API_KEY;
|
|
1216
|
+
validateApiKey(apiKey);
|
|
1217
|
+
const portalAddress = ApiKeysModel.findByApiKey(apiKey)?.portalAddress;
|
|
1218
|
+
if (!portalAddress) throw new Error("Portal address is required");
|
|
1219
|
+
const content = fs2.readFileSync(filepath, "utf-8");
|
|
1220
|
+
if (!content || content.trim().length === 0) {
|
|
1221
|
+
console.error("Error creating ddoc: File content cannot be empty. Add some content to the file and try again.");
|
|
1222
|
+
process.exit(1);
|
|
1223
|
+
}
|
|
1224
|
+
const basename3 = path2.basename(filepath);
|
|
1225
|
+
const lastDotIndex = basename3.lastIndexOf(".");
|
|
1226
|
+
const title = lastDotIndex > 0 ? basename3.substring(0, lastDotIndex) : basename3;
|
|
1227
|
+
const file = await createFile({ title, content, portalAddress });
|
|
1228
|
+
console.log("\nDdoc created successfully!\n");
|
|
1229
|
+
const table = new Table3({
|
|
1230
|
+
head: [
|
|
1231
|
+
columnNames.ddocId,
|
|
1232
|
+
columnNames.title,
|
|
1233
|
+
columnNames.status,
|
|
1234
|
+
columnNames.local,
|
|
1235
|
+
columnNames.onchain,
|
|
1236
|
+
columnNames.created,
|
|
1237
|
+
columnNames.lastModified
|
|
1238
|
+
],
|
|
1239
|
+
colWidths: [
|
|
1240
|
+
columnWidth[columnNames.ddocId],
|
|
1241
|
+
columnWidth[columnNames.title],
|
|
1242
|
+
columnWidth[columnNames.status],
|
|
1243
|
+
columnWidth[columnNames.local],
|
|
1244
|
+
columnWidth[columnNames.onchain],
|
|
1245
|
+
columnWidth[columnNames.created],
|
|
1246
|
+
columnWidth[columnNames.lastModified]
|
|
1247
|
+
],
|
|
1248
|
+
style: { head: [] }
|
|
1249
|
+
});
|
|
1250
|
+
const ddocId = file.ddocId || "N/A";
|
|
1251
|
+
table.push([
|
|
1252
|
+
ddocId,
|
|
1253
|
+
file.title.length > 23 ? file.title.substring(0, 20) + "..." : file.title,
|
|
1254
|
+
file.syncStatus,
|
|
1255
|
+
file.localVersion,
|
|
1256
|
+
file.onchainVersion,
|
|
1257
|
+
formatDate(file.createdAt),
|
|
1258
|
+
getElapsedTime(file.updatedAt)
|
|
1259
|
+
]);
|
|
1260
|
+
console.log(table.toString());
|
|
1261
|
+
} catch (error) {
|
|
1262
|
+
console.error("Error creating ddoc:", error.message);
|
|
1263
|
+
process.exit(1);
|
|
1264
|
+
}
|
|
1265
|
+
});
|
|
1266
|
+
|
|
1267
|
+
// src/commands/updateCommand.ts
|
|
1268
|
+
import * as fs3 from "fs";
|
|
1269
|
+
import * as path3 from "path";
|
|
1270
|
+
import * as os2 from "os";
|
|
1271
|
+
import { Command as Command4 } from "commander";
|
|
1272
|
+
import { spawnSync } from "child_process";
|
|
1273
|
+
import Table4 from "cli-table3";
|
|
1274
|
+
function showTable(updatedFile) {
|
|
1275
|
+
const table = new Table4({
|
|
1276
|
+
head: [
|
|
1277
|
+
columnNames.ddocId,
|
|
1278
|
+
columnNames.title,
|
|
1279
|
+
columnNames.status,
|
|
1280
|
+
columnNames.local,
|
|
1281
|
+
columnNames.onchain,
|
|
1282
|
+
columnNames.created,
|
|
1283
|
+
columnNames.lastModified
|
|
1284
|
+
],
|
|
1285
|
+
colWidths: [
|
|
1286
|
+
columnWidth[columnNames.ddocId],
|
|
1287
|
+
columnWidth[columnNames.title],
|
|
1288
|
+
columnWidth[columnNames.status],
|
|
1289
|
+
columnWidth[columnNames.local],
|
|
1290
|
+
columnWidth[columnNames.onchain],
|
|
1291
|
+
columnWidth[columnNames.created],
|
|
1292
|
+
columnWidth[columnNames.lastModified]
|
|
1293
|
+
],
|
|
1294
|
+
style: { head: [] }
|
|
1295
|
+
});
|
|
1296
|
+
const fileDdocId = updatedFile.ddocId || "N/A";
|
|
1297
|
+
table.push([
|
|
1298
|
+
fileDdocId,
|
|
1299
|
+
updatedFile.title.length > 23 ? updatedFile.title.substring(0, 20) + "..." : updatedFile.title,
|
|
1300
|
+
updatedFile.syncStatus,
|
|
1301
|
+
updatedFile.localVersion,
|
|
1302
|
+
updatedFile.onchainVersion,
|
|
1303
|
+
formatDate(updatedFile.createdAt),
|
|
1304
|
+
getElapsedTime(updatedFile.updatedAt)
|
|
1305
|
+
]);
|
|
1306
|
+
console.log(table.toString());
|
|
1307
|
+
}
|
|
1308
|
+
var updateCommand = new Command4().name("update").description("Update an existing ddoc. Use -f to update from a file, or omit -f to edit in vi editor").argument("<ddocId>", "The ddoc ID to update").option("-f, --file <file_path>", "path to file to update ddoc from (if omitted, opens vi editor)").action(async (ddocId, options) => {
|
|
1309
|
+
try {
|
|
1310
|
+
const runtimeConfig = getRuntimeConfig();
|
|
1311
|
+
const apiKey = runtimeConfig.API_KEY;
|
|
1312
|
+
validateApiKey(apiKey);
|
|
1313
|
+
const portalAddress = ApiKeysModel.findByApiKey(apiKey)?.portalAddress;
|
|
1314
|
+
if (!portalAddress) throw new Error("Portal address is required");
|
|
1315
|
+
const file = getFile(ddocId, portalAddress);
|
|
1316
|
+
if (!file) {
|
|
1317
|
+
throw new Error(`ddoc with ${ddocId} not found.`);
|
|
1318
|
+
}
|
|
1319
|
+
const filePath = options?.file ?? "";
|
|
1320
|
+
if (filePath) {
|
|
1321
|
+
const content = fs3.readFileSync(filePath, "utf-8");
|
|
1322
|
+
if (!content || content.trim().length === 0) {
|
|
1323
|
+
throw new Error(`file content cannot be empty`);
|
|
1324
|
+
}
|
|
1325
|
+
const title = path3.basename(filePath);
|
|
1326
|
+
const payload = {
|
|
1327
|
+
title,
|
|
1328
|
+
content
|
|
1329
|
+
};
|
|
1330
|
+
const updatedFile = await updateFile(ddocId, payload, portalAddress);
|
|
1331
|
+
console.log("\n\u2713 Ddoc updated successfully!\n");
|
|
1332
|
+
showTable(updatedFile);
|
|
1333
|
+
return;
|
|
1334
|
+
}
|
|
1335
|
+
const tmpFilePath = path3.join(os2.tmpdir(), `tmp-${ddocId}-${Date.now()}.txt`);
|
|
1336
|
+
fs3.writeFileSync(tmpFilePath, file.content);
|
|
1337
|
+
const editor = process.env.EDITOR || "vi";
|
|
1338
|
+
const result = spawnSync(editor, [tmpFilePath], { stdio: "inherit" });
|
|
1339
|
+
if (result.status === 0) {
|
|
1340
|
+
const newContent = fs3.readFileSync(tmpFilePath, "utf-8");
|
|
1341
|
+
if (newContent === file.content) {
|
|
1342
|
+
console.log(`No changes made. Update cancelled.`);
|
|
1343
|
+
fs3.unlinkSync(tmpFilePath);
|
|
1344
|
+
return;
|
|
1345
|
+
}
|
|
1346
|
+
const payload = {
|
|
1347
|
+
title: file.title,
|
|
1348
|
+
// keeping same title as current
|
|
1349
|
+
content: newContent
|
|
1350
|
+
};
|
|
1351
|
+
const updatedFile = await updateFile(ddocId, payload, portalAddress);
|
|
1352
|
+
console.log("\n\u2713 Ddoc updated successfully!\n");
|
|
1353
|
+
showTable(updatedFile);
|
|
1354
|
+
}
|
|
1355
|
+
fs3.unlinkSync(tmpFilePath);
|
|
1356
|
+
} catch (error) {
|
|
1357
|
+
console.error("Error updating ddoc:", error.message);
|
|
1358
|
+
throw error;
|
|
1359
|
+
}
|
|
1360
|
+
});
|
|
1361
|
+
|
|
1362
|
+
// src/commands/deleteCommand.ts
|
|
1363
|
+
import { Command as Command5 } from "commander";
|
|
1364
|
+
var deleteCommand = new Command5().name("delete").description("Delete one or more ddocs by their IDs").argument("<ddocIds...>", "One or more ddoc IDs to delete (space-separated)").action(async (ddocIds) => {
|
|
1365
|
+
try {
|
|
1366
|
+
const runtimeConfig = getRuntimeConfig();
|
|
1367
|
+
const apiKey = runtimeConfig.API_KEY;
|
|
1368
|
+
validateApiKey(apiKey);
|
|
1369
|
+
const portalAddress = ApiKeysModel.findByApiKey(apiKey)?.portalAddress;
|
|
1370
|
+
if (!portalAddress) throw new Error("Portal address is required");
|
|
1371
|
+
for (const ddocId of ddocIds) {
|
|
1372
|
+
try {
|
|
1373
|
+
await deleteFile(ddocId, portalAddress);
|
|
1374
|
+
console.log(`ddoc ${ddocId} deleted successfully`);
|
|
1375
|
+
} catch (error) {
|
|
1376
|
+
console.error(`Error deleting ddoc ${ddocId}:`, error.message);
|
|
1377
|
+
}
|
|
1378
|
+
}
|
|
1379
|
+
} catch (error) {
|
|
1380
|
+
console.error("Error:", error.message);
|
|
1381
|
+
throw error;
|
|
1382
|
+
}
|
|
1383
|
+
});
|
|
1384
|
+
|
|
1385
|
+
// src/commands/downloadCommand.ts
|
|
1386
|
+
import { Command as Command6 } from "commander";
|
|
1387
|
+
import * as fs4 from "fs";
|
|
1388
|
+
var downloadCommand = new Command6().name("download").description("Download a ddoc to a local file").argument("<ddocId>", "The ddoc ID to download").option("-o, --output <filename>", "Output filename (only supports markdown)").action(async (ddocId, options) => {
|
|
1389
|
+
try {
|
|
1390
|
+
const runtimeConfig = getRuntimeConfig();
|
|
1391
|
+
const apiKey = runtimeConfig.API_KEY;
|
|
1392
|
+
validateApiKey(apiKey);
|
|
1393
|
+
const portalAddress = ApiKeysModel.findByApiKey(apiKey)?.portalAddress;
|
|
1394
|
+
if (!portalAddress) throw new Error("Portal address is required");
|
|
1395
|
+
const file = getFile(ddocId, portalAddress);
|
|
1396
|
+
if (!file) {
|
|
1397
|
+
console.error(`Ddoc with ID "${ddocId}" not found.`);
|
|
1398
|
+
return;
|
|
1399
|
+
}
|
|
1400
|
+
let outputFilename = file.title;
|
|
1401
|
+
if (options.output) {
|
|
1402
|
+
outputFilename = options.output.endsWith(".md") ? options.output : `${options.output}.md`;
|
|
1403
|
+
}
|
|
1404
|
+
fs4.writeFileSync(outputFilename, file.content, "utf-8");
|
|
1405
|
+
console.log(`
|
|
1406
|
+
\u2713 Ddoc downloaded successfully to: ${outputFilename}
|
|
1407
|
+
`);
|
|
1408
|
+
} catch (error) {
|
|
1409
|
+
console.error("Error downloading ddoc:", error.message);
|
|
1410
|
+
throw error;
|
|
1411
|
+
}
|
|
1412
|
+
});
|
|
1413
|
+
|
|
1414
|
+
// src/commands/viewCommand.ts
|
|
1415
|
+
import { Command as Command7 } from "commander";
|
|
1416
|
+
var viewCommand = new Command7().name("view").description("View content preview of a ddoc").argument("<ddocId>", "The ddoc ID to view").option("-n, --lines <number>", "Number of lines to preview (default: 10)", "10").action(async (ddocId, options) => {
|
|
1417
|
+
try {
|
|
1418
|
+
const runtimeConfig = getRuntimeConfig();
|
|
1419
|
+
const apiKey = runtimeConfig.API_KEY;
|
|
1420
|
+
validateApiKey(apiKey);
|
|
1421
|
+
const portalAddress = ApiKeysModel.findByApiKey(apiKey)?.portalAddress;
|
|
1422
|
+
if (!portalAddress) throw new Error("Portal address is required");
|
|
1423
|
+
const file = getFile(ddocId, portalAddress);
|
|
1424
|
+
if (!file) {
|
|
1425
|
+
console.error(`Ddoc with ID "${ddocId}" not found.`);
|
|
1426
|
+
return;
|
|
1427
|
+
}
|
|
1428
|
+
const content = file.content || "";
|
|
1429
|
+
const contentLines = content.split("\n");
|
|
1430
|
+
const totalLines = contentLines.length;
|
|
1431
|
+
const previewLines = Math.max(1, parseInt(options.lines || "10", 10));
|
|
1432
|
+
const linesToShow = Math.min(previewLines, totalLines);
|
|
1433
|
+
if (content.trim().length === 0) {
|
|
1434
|
+
console.log("\nContent preview:\n");
|
|
1435
|
+
console.log("=".repeat(80));
|
|
1436
|
+
console.log("(empty)");
|
|
1437
|
+
console.log("=".repeat(80));
|
|
1438
|
+
} else {
|
|
1439
|
+
const preview = contentLines.slice(0, linesToShow).join("\n");
|
|
1440
|
+
console.log("\nContent preview:\n");
|
|
1441
|
+
console.log("=".repeat(80));
|
|
1442
|
+
console.log(preview);
|
|
1443
|
+
if (totalLines > linesToShow) {
|
|
1444
|
+
console.log(`
|
|
1445
|
+
... (${totalLines - linesToShow} more line${totalLines - linesToShow === 1 ? "" : "s"})`);
|
|
1446
|
+
}
|
|
1447
|
+
console.log("=".repeat(80));
|
|
1448
|
+
console.log(`
|
|
1449
|
+
Showing ${linesToShow} of ${totalLines} line${totalLines === 1 ? "" : "s"}
|
|
1450
|
+
`);
|
|
1451
|
+
}
|
|
1452
|
+
} catch (error) {
|
|
1453
|
+
console.error("Error viewing ddoc:", error.message);
|
|
1454
|
+
throw error;
|
|
1455
|
+
}
|
|
1456
|
+
});
|
|
1457
|
+
|
|
1458
|
+
// src/commands/eventsCommand.ts
|
|
1459
|
+
import { Command as Command8 } from "commander";
|
|
1460
|
+
import Table5 from "cli-table3";
|
|
1461
|
+
var MAX_ERROR_LEN = 60;
|
|
1462
|
+
var eventsCommand = new Command8().name("events").description("Worker event operations (list failed, retry)");
|
|
1463
|
+
eventsCommand.command("list-failed").description("List all failed events").action(async () => {
|
|
1464
|
+
try {
|
|
1465
|
+
const events = EventsModel.listFailed();
|
|
1466
|
+
if (events.length === 0) {
|
|
1467
|
+
console.log("No failed events.");
|
|
1468
|
+
return;
|
|
1469
|
+
}
|
|
1470
|
+
const table = new Table5({
|
|
1471
|
+
head: ["ID", "File ID", "Portal", "Type", "Timestamp", "Last Error"],
|
|
1472
|
+
colWidths: [28, 12, 14, 10, 12, MAX_ERROR_LEN],
|
|
1473
|
+
style: { head: [] }
|
|
1474
|
+
});
|
|
1475
|
+
events.forEach((e) => {
|
|
1476
|
+
const err = e.lastError ?? "";
|
|
1477
|
+
table.push([
|
|
1478
|
+
e._id,
|
|
1479
|
+
e.fileId,
|
|
1480
|
+
e.portalAddress || "\u2014",
|
|
1481
|
+
e.type,
|
|
1482
|
+
formatDate(new Date(e.timestamp)),
|
|
1483
|
+
err.length > MAX_ERROR_LEN ? err.slice(0, MAX_ERROR_LEN - 3) + "..." : err
|
|
1484
|
+
]);
|
|
1485
|
+
});
|
|
1486
|
+
console.log(`
|
|
1487
|
+
Failed events (${events.length}):
|
|
1488
|
+
`);
|
|
1489
|
+
console.log(table.toString());
|
|
1490
|
+
} catch (error) {
|
|
1491
|
+
const msg = error instanceof Error ? error.message : String(error);
|
|
1492
|
+
console.error("Error listing failed events:", msg);
|
|
1493
|
+
throw error;
|
|
1494
|
+
}
|
|
1495
|
+
});
|
|
1496
|
+
eventsCommand.command("retry <eventId>").description("Retry a single failed event by ID").action(async (eventId) => {
|
|
1497
|
+
try {
|
|
1498
|
+
const updated = EventsModel.resetFailedToPending(eventId);
|
|
1499
|
+
if (updated) {
|
|
1500
|
+
console.log(`Event ${eventId} reset to pending. Worker will pick it up.`);
|
|
1501
|
+
} else {
|
|
1502
|
+
console.error(`Event not found or not in failed state: ${eventId}`);
|
|
1503
|
+
process.exitCode = 1;
|
|
1504
|
+
}
|
|
1505
|
+
} catch (error) {
|
|
1506
|
+
const msg = error instanceof Error ? error.message : String(error);
|
|
1507
|
+
console.error("Error retrying event:", msg);
|
|
1508
|
+
throw error;
|
|
1509
|
+
}
|
|
1510
|
+
});
|
|
1511
|
+
eventsCommand.command("retry-all").description("Retry all failed events").action(async () => {
|
|
1512
|
+
try {
|
|
1513
|
+
const count = EventsModel.resetAllFailedToPending();
|
|
1514
|
+
console.log(`Reset ${count} failed event(s) to pending. Worker will pick them up.`);
|
|
1515
|
+
} catch (error) {
|
|
1516
|
+
const msg = error instanceof Error ? error.message : String(error);
|
|
1517
|
+
console.error("Error retrying failed events:", msg);
|
|
1518
|
+
throw error;
|
|
1519
|
+
}
|
|
1520
|
+
});
|
|
1521
|
+
|
|
1522
|
+
// src/commands/index.ts
|
|
1523
|
+
logger.level = "error";
|
|
1524
|
+
runMigrations();
|
|
1525
|
+
var program = new Command9().name("ddctl").description("CLI tool to manage your ddocs").version("0.0.1").addHelpText("beforeAll", "\n").addHelpText("afterAll", "\n");
|
|
1526
|
+
program.addCommand(listCommand);
|
|
1527
|
+
program.addCommand(getCommand);
|
|
1528
|
+
program.addCommand(createCommand);
|
|
1529
|
+
program.addCommand(updateCommand);
|
|
1530
|
+
program.addCommand(deleteCommand);
|
|
1531
|
+
program.addCommand(downloadCommand);
|
|
1532
|
+
program.addCommand(viewCommand);
|
|
1533
|
+
program.addCommand(eventsCommand);
|
|
1534
|
+
program.parseAsync().then(async () => {
|
|
1535
|
+
try {
|
|
1536
|
+
await closeWorker();
|
|
1537
|
+
await closeDatabase();
|
|
1538
|
+
} catch (error) {
|
|
1539
|
+
}
|
|
1540
|
+
process.exit(0);
|
|
1541
|
+
}).catch((error) => {
|
|
1542
|
+
console.error("Error:", error);
|
|
1543
|
+
process.exit(1);
|
|
1544
|
+
});
|
|
1545
|
+
export {
|
|
1546
|
+
program
|
|
1547
|
+
};
|
|
1548
|
+
//# sourceMappingURL=index.js.map
|