@certik/skynet 0.22.1 → 0.22.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.vscode/settings.json +5 -0
- package/CHANGELOG.md +4 -0
- package/build.ts +23 -0
- package/bun.lockb +0 -0
- package/dist/abi.d.ts +1 -2
- package/dist/abi.js +569 -563
- package/dist/address.d.ts +0 -1
- package/dist/address.js +22 -21
- package/dist/api.d.ts +0 -1
- package/dist/api.js +235 -120
- package/dist/app.d.ts +1 -2
- package/dist/app.js +2030 -276
- package/dist/availability.d.ts +0 -1
- package/dist/availability.js +126 -56
- package/dist/cli.d.ts +0 -1
- package/dist/cli.js +28 -24
- package/dist/const.d.ts +0 -1
- package/dist/const.js +153 -132
- package/dist/databricks.d.ts +0 -1
- package/dist/databricks.js +198 -58
- package/dist/date.d.ts +0 -1
- package/dist/date.js +48 -21
- package/dist/deploy.d.ts +0 -1
- package/dist/deploy.js +427 -292
- package/dist/dynamodb.d.ts +3 -4
- package/dist/dynamodb.js +432 -281
- package/dist/env.d.ts +2 -3
- package/dist/env.js +16 -9
- package/dist/graphql.d.ts +0 -1
- package/dist/graphql.js +26 -23
- package/dist/indexer.d.ts +0 -1
- package/dist/indexer.js +1050 -441
- package/dist/log.d.ts +0 -1
- package/dist/log.js +53 -52
- package/dist/object-hash.d.ts +0 -1
- package/dist/object-hash.js +49 -59
- package/dist/opsgenie.d.ts +1 -1
- package/dist/opsgenie.js +31 -30
- package/dist/por.d.ts +0 -1
- package/dist/por.js +113 -123
- package/dist/s3.d.ts +7 -8
- package/dist/s3.js +103 -91
- package/dist/search.d.ts +0 -1
- package/dist/search.js +100 -25
- package/dist/selector.d.ts +0 -1
- package/dist/selector.js +34 -38
- package/dist/slack.d.ts +0 -1
- package/dist/slack.js +27 -21
- package/dist/util.d.ts +0 -1
- package/dist/util.js +21 -20
- package/examples/api.ts +1 -1
- package/examples/indexer.ts +1 -1
- package/examples/mode-indexer.ts +1 -1
- package/package.json +5 -4
- package/{graphql.ts → src/graphql.ts} +1 -1
- package/{opsgenie.ts → src/opsgenie.ts} +2 -1
- package/tsconfig.build.json +2 -5
- package/tsconfig.json +11 -20
- package/dist/abi.d.ts.map +0 -1
- package/dist/address.d.ts.map +0 -1
- package/dist/api.d.ts.map +0 -1
- package/dist/app.d.ts.map +0 -1
- package/dist/availability.d.ts.map +0 -1
- package/dist/cli.d.ts.map +0 -1
- package/dist/const.d.ts.map +0 -1
- package/dist/databricks.d.ts.map +0 -1
- package/dist/date.d.ts.map +0 -1
- package/dist/deploy.d.ts.map +0 -1
- package/dist/dynamodb.d.ts.map +0 -1
- package/dist/env.d.ts.map +0 -1
- package/dist/graphql.d.ts.map +0 -1
- package/dist/indexer.d.ts.map +0 -1
- package/dist/log.d.ts.map +0 -1
- package/dist/object-hash.d.ts.map +0 -1
- package/dist/opsgenie.d.ts.map +0 -1
- package/dist/por.d.ts.map +0 -1
- package/dist/s3.d.ts.map +0 -1
- package/dist/search.d.ts.map +0 -1
- package/dist/selector.d.ts.map +0 -1
- package/dist/slack.d.ts.map +0 -1
- package/dist/util.d.ts.map +0 -1
- /package/{abi.ts → src/abi.ts} +0 -0
- /package/{address.ts → src/address.ts} +0 -0
- /package/{api.ts → src/api.ts} +0 -0
- /package/{app.ts → src/app.ts} +0 -0
- /package/{availability.ts → src/availability.ts} +0 -0
- /package/{cli.ts → src/cli.ts} +0 -0
- /package/{const.ts → src/const.ts} +0 -0
- /package/{databricks.ts → src/databricks.ts} +0 -0
- /package/{date.ts → src/date.ts} +0 -0
- /package/{deploy.ts → src/deploy.ts} +0 -0
- /package/{dynamodb.ts → src/dynamodb.ts} +0 -0
- /package/{env.ts → src/env.ts} +0 -0
- /package/{indexer.ts → src/indexer.ts} +0 -0
- /package/{log.ts → src/log.ts} +0 -0
- /package/{object-hash.ts → src/object-hash.ts} +0 -0
- /package/{por.ts → src/por.ts} +0 -0
- /package/{s3.ts → src/s3.ts} +0 -0
- /package/{search.ts → src/search.ts} +0 -0
- /package/{selector.ts → src/selector.ts} +0 -0
- /package/{slack.ts → src/slack.ts} +0 -0
- /package/{util.ts → src/util.ts} +0 -0
package/dist/app.js
CHANGED
|
@@ -1,328 +1,2082 @@
|
|
|
1
|
-
|
|
2
|
-
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
console.log(`
|
|
8
|
-
Usage
|
|
9
|
-
|
|
10
|
-
$ ${getBinaryName()} run <options>
|
|
11
|
-
$ ${getBinaryName()} deploy <options>
|
|
12
|
-
$ ${getBinaryName()} delete <options>
|
|
13
|
-
`);
|
|
1
|
+
// src/selector.ts
|
|
2
|
+
function getSelectorDesc(selector) {
|
|
3
|
+
return Object.keys(selector).map((name) => {
|
|
4
|
+
return ` --${name.padEnd(14)}${selector[name].desc || selector[name].description || ""}`;
|
|
5
|
+
}).join(`
|
|
6
|
+
`);
|
|
14
7
|
}
|
|
15
|
-
function
|
|
16
|
-
|
|
8
|
+
function getSelectorFlags(selector) {
|
|
9
|
+
return Object.keys(selector).reduce((acc, name) => {
|
|
10
|
+
const flag = {
|
|
11
|
+
type: selector[name].type || "string",
|
|
12
|
+
...selector[name]
|
|
13
|
+
};
|
|
14
|
+
if (!selector[name].optional && selector[name].isRequired !== false) {
|
|
15
|
+
flag.isRequired = true;
|
|
16
|
+
}
|
|
17
|
+
return { ...acc, [name]: flag };
|
|
18
|
+
}, {});
|
|
17
19
|
}
|
|
18
|
-
function
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
20
|
+
function toSelectorString(selectorFlags, delim = ",") {
|
|
21
|
+
return Object.keys(selectorFlags).sort().map((flag) => {
|
|
22
|
+
return `${flag}=${selectorFlags[flag]}`;
|
|
23
|
+
}).join(delim);
|
|
24
|
+
}
|
|
25
|
+
function normalizeSelectorValue(v) {
|
|
26
|
+
return v.replace(/[^A-Za-z0-9]+/g, "-");
|
|
27
|
+
}
|
|
28
|
+
function getJobName(name, selectorFlags, mode) {
|
|
29
|
+
const selectorNamePart = Object.keys(selectorFlags).sort().map((name2) => selectorFlags[name2]).join("-");
|
|
30
|
+
let jobName = name;
|
|
31
|
+
if (mode) {
|
|
32
|
+
jobName += `-${mode}`;
|
|
33
|
+
}
|
|
34
|
+
if (selectorNamePart.length > 0) {
|
|
35
|
+
jobName += `-${normalizeSelectorValue(selectorNamePart)}`;
|
|
36
|
+
}
|
|
37
|
+
return jobName;
|
|
38
|
+
}
|
|
39
|
+
// src/env.ts
|
|
40
|
+
function ensureAndGet(envName, defaultValue) {
|
|
41
|
+
return process.env[envName] || defaultValue;
|
|
42
|
+
}
|
|
43
|
+
function getEnvironment() {
|
|
44
|
+
return ensureAndGet("SKYNET_ENVIRONMENT", "dev");
|
|
45
|
+
}
|
|
46
|
+
function getEnvOrThrow(envName) {
|
|
47
|
+
if (!process.env[envName]) {
|
|
48
|
+
throw new Error(`Must set environment variable ${envName}`);
|
|
49
|
+
}
|
|
50
|
+
return process.env[envName];
|
|
51
|
+
}
|
|
52
|
+
function isProduction() {
|
|
53
|
+
return getEnvironment() === "prd";
|
|
54
|
+
}
|
|
55
|
+
function isDev() {
|
|
56
|
+
return getEnvironment() === "dev";
|
|
57
|
+
}
|
|
58
|
+
// src/log.ts
|
|
59
|
+
function isObject(a) {
|
|
60
|
+
return !!a && a.constructor === Object;
|
|
61
|
+
}
|
|
62
|
+
function print(o) {
|
|
63
|
+
if (Array.isArray(o)) {
|
|
64
|
+
return `[${o.map(print).join(", ")}]`;
|
|
65
|
+
}
|
|
66
|
+
if (isObject(o)) {
|
|
67
|
+
return `{${Object.keys(o).map((k) => `${k}: ${o[k]}`).join(", ")}}`;
|
|
68
|
+
}
|
|
69
|
+
return `${o}`;
|
|
70
|
+
}
|
|
71
|
+
function getLine(params) {
|
|
72
|
+
let line = "";
|
|
73
|
+
for (let i = 0, l = params.length;i < l; i++) {
|
|
74
|
+
line += `${print(params[i])} `.replace(/\n/gm, "\t");
|
|
75
|
+
}
|
|
76
|
+
return line.trim();
|
|
77
|
+
}
|
|
78
|
+
function timestamp() {
|
|
79
|
+
return new Date().toISOString();
|
|
80
|
+
}
|
|
81
|
+
var inline = {
|
|
82
|
+
debug: function(...args) {
|
|
83
|
+
if (true) {
|
|
84
|
+
console.log(`${timestamp()} ${getLine(args)}`);
|
|
27
85
|
}
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
86
|
+
},
|
|
87
|
+
log: function(...args) {
|
|
88
|
+
if (true) {
|
|
89
|
+
console.log(`${timestamp()} ${getLine(args)}`);
|
|
31
90
|
}
|
|
32
|
-
}
|
|
33
|
-
function
|
|
34
|
-
if (
|
|
35
|
-
|
|
36
|
-
|
|
91
|
+
},
|
|
92
|
+
error: function(...args) {
|
|
93
|
+
if (true) {
|
|
94
|
+
console.error(`${timestamp()} ${getLine(args)}`);
|
|
95
|
+
}
|
|
96
|
+
}
|
|
97
|
+
};
|
|
98
|
+
var logger = {
|
|
99
|
+
debug: function(...args) {
|
|
100
|
+
if (true) {
|
|
101
|
+
console.log(`[${timestamp()}]`, ...args);
|
|
102
|
+
}
|
|
103
|
+
},
|
|
104
|
+
log: function(...args) {
|
|
105
|
+
if (true) {
|
|
106
|
+
console.log(`[${timestamp()}]`, ...args);
|
|
37
107
|
}
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
108
|
+
},
|
|
109
|
+
error: function(...args) {
|
|
110
|
+
if (true) {
|
|
111
|
+
console.error(`[${timestamp()}]`, ...args);
|
|
112
|
+
}
|
|
113
|
+
}
|
|
114
|
+
};
|
|
115
|
+
// src/api.ts
|
|
116
|
+
import osModule from "os";
|
|
117
|
+
import express from "express";
|
|
118
|
+
import meow from "meow";
|
|
119
|
+
async function logStartMiddleware(_, res, next) {
|
|
120
|
+
const start = new Date;
|
|
121
|
+
res.set("x-requested-at", start.toISOString());
|
|
122
|
+
next();
|
|
123
|
+
}
|
|
124
|
+
async function contextMiddleware(_, res, next) {
|
|
125
|
+
res.set("x-instance-id", osModule.hostname());
|
|
126
|
+
next();
|
|
127
|
+
}
|
|
128
|
+
async function logEndMiddleware(req, res, next) {
|
|
129
|
+
const requestedAt = res.get("x-requested-at");
|
|
130
|
+
if (!requestedAt) {
|
|
131
|
+
inline.log("missing x-requested-at header");
|
|
132
|
+
next();
|
|
133
|
+
return;
|
|
134
|
+
}
|
|
135
|
+
const start = new Date(requestedAt).getTime();
|
|
136
|
+
const end = new Date().getTime();
|
|
137
|
+
const logInfo = {
|
|
138
|
+
start,
|
|
139
|
+
end,
|
|
140
|
+
elapsed: `${end - start}ms`,
|
|
141
|
+
endpoint: req.path,
|
|
142
|
+
host: req.hostname,
|
|
143
|
+
status: res.statusCode
|
|
144
|
+
};
|
|
145
|
+
if (res.statusMessage) {
|
|
146
|
+
logInfo.errorMessage = res.statusMessage;
|
|
147
|
+
}
|
|
148
|
+
inline.log(JSON.stringify(logInfo));
|
|
149
|
+
next();
|
|
150
|
+
}
|
|
151
|
+
var apiKeyMiddleware = (key) => {
|
|
152
|
+
async function requireAPIKey(req, res, next) {
|
|
153
|
+
try {
|
|
154
|
+
const apiKey = req.get("x-api-key") || req.query["api-key"];
|
|
155
|
+
if (!apiKey) {
|
|
156
|
+
inline.log("request without api key");
|
|
157
|
+
res.status(400).send("require x-api-key header");
|
|
158
|
+
return;
|
|
159
|
+
}
|
|
160
|
+
if (typeof key === "string") {
|
|
161
|
+
if (apiKey !== key) {
|
|
162
|
+
inline.log("request has an invalid api key");
|
|
163
|
+
res.status(400).send("invalid api key");
|
|
164
|
+
return;
|
|
45
165
|
}
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
166
|
+
inline.log(`requested by valid key ${key.slice(0, 6)}`);
|
|
167
|
+
} else {
|
|
168
|
+
const name = key[apiKey];
|
|
169
|
+
if (!name) {
|
|
170
|
+
inline.log("request has an invalid api key");
|
|
171
|
+
res.status(400).send("invalid api key");
|
|
172
|
+
return;
|
|
51
173
|
}
|
|
52
|
-
|
|
53
|
-
|
|
174
|
+
inline.log(`requested by authorized user ${name}`);
|
|
175
|
+
}
|
|
176
|
+
next();
|
|
177
|
+
} catch (err) {
|
|
178
|
+
inline.log("check api key error", err);
|
|
179
|
+
res.status(500).send("internal error");
|
|
180
|
+
}
|
|
181
|
+
}
|
|
182
|
+
return requireAPIKey;
|
|
183
|
+
};
|
|
184
|
+
async function startApiApp({
|
|
185
|
+
binaryName,
|
|
186
|
+
name,
|
|
187
|
+
selector = {},
|
|
188
|
+
routes,
|
|
189
|
+
serve,
|
|
190
|
+
beforeListen
|
|
191
|
+
}) {
|
|
192
|
+
const app = express();
|
|
193
|
+
app.use(express.json({ limit: "20mb" }));
|
|
194
|
+
const cli = meow(`
|
|
195
|
+
Usage
|
|
196
|
+
$ ${binaryName} <options>
|
|
197
|
+
|
|
198
|
+
Options
|
|
199
|
+
${getSelectorDesc(selector)}
|
|
200
|
+
--verbose Output debug messages
|
|
201
|
+
`, {
|
|
202
|
+
importMeta: import.meta,
|
|
203
|
+
description: false,
|
|
204
|
+
version: false,
|
|
205
|
+
flags: {
|
|
206
|
+
...getSelectorFlags(selector),
|
|
207
|
+
verbose: {
|
|
208
|
+
type: "boolean",
|
|
209
|
+
default: false
|
|
210
|
+
}
|
|
211
|
+
}
|
|
212
|
+
});
|
|
213
|
+
const { verbose, ...selectorFlags } = cli.flags;
|
|
214
|
+
for (const route of routes) {
|
|
215
|
+
const method = route.method ? route.method.toLowerCase() : "get";
|
|
216
|
+
const middlewares = route.middlewares || [];
|
|
217
|
+
if (route.protected) {
|
|
218
|
+
if (!serve.apiKey) {
|
|
219
|
+
throw new Error("serve.apiKey is required for protected route");
|
|
220
|
+
}
|
|
221
|
+
middlewares.unshift(apiKeyMiddleware(serve.apiKey));
|
|
222
|
+
}
|
|
223
|
+
if (app[method]) {
|
|
224
|
+
if (verbose) {
|
|
225
|
+
inline.log(`registering ${method} ${route.path}`);
|
|
226
|
+
}
|
|
227
|
+
app[method](route.path, contextMiddleware, logStartMiddleware, ...middlewares, async (req, res, next) => {
|
|
228
|
+
try {
|
|
229
|
+
await route.handler({ req, res, ...selectorFlags });
|
|
230
|
+
} catch (routeErr) {
|
|
231
|
+
if (routeErr instanceof Error) {
|
|
232
|
+
inline.log("caught route err", routeErr, routeErr.stack);
|
|
233
|
+
res.status(500).send(`internal server error: ${routeErr.message}`);
|
|
234
|
+
} else {
|
|
235
|
+
inline.log("caught route err", routeErr);
|
|
236
|
+
res.status(500).send("internal server error");
|
|
237
|
+
}
|
|
54
238
|
}
|
|
55
|
-
|
|
239
|
+
next();
|
|
240
|
+
}, logEndMiddleware);
|
|
241
|
+
}
|
|
242
|
+
}
|
|
243
|
+
if (!routes.some((r) => r.path === "/" && r.method?.toUpperCase() === "GET")) {
|
|
244
|
+
app.get("/", (_, res) => {
|
|
245
|
+
res.send("ok");
|
|
246
|
+
});
|
|
247
|
+
}
|
|
248
|
+
if (beforeListen) {
|
|
249
|
+
await beforeListen({ app });
|
|
250
|
+
}
|
|
251
|
+
app.listen(serve.port, () => {
|
|
252
|
+
if (isProduction()) {
|
|
253
|
+
inline.log(`${name} listening at https://api.wf.corp.certik.com${serve.prefix}`);
|
|
254
|
+
} else {
|
|
255
|
+
inline.log(`${name} listening at http://localhost:${serve.port}`);
|
|
256
|
+
}
|
|
257
|
+
});
|
|
56
258
|
}
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
259
|
+
|
|
260
|
+
// src/object-hash.ts
|
|
261
|
+
import xh from "@node-rs/xxhash";
|
|
262
|
+
function getHash(obj) {
|
|
263
|
+
const xxh3 = xh.xxh3.Xxh3.withSeed();
|
|
264
|
+
hash(obj, xxh3);
|
|
265
|
+
return xxh3.digest().toString(16);
|
|
266
|
+
}
|
|
267
|
+
function hash(obj, xxh3) {
|
|
268
|
+
if (obj === null) {
|
|
269
|
+
xxh3.update("null");
|
|
270
|
+
} else if (obj === undefined) {
|
|
271
|
+
xxh3.update("undefined");
|
|
272
|
+
} else if (typeof obj === "string") {
|
|
273
|
+
xxh3.update(obj);
|
|
274
|
+
} else if (typeof obj === "number") {
|
|
275
|
+
xxh3.update(obj.toString());
|
|
276
|
+
} else if (typeof obj === "boolean") {
|
|
277
|
+
xxh3.update(obj.toString());
|
|
278
|
+
} else if (typeof obj === "bigint") {
|
|
279
|
+
xxh3.update(obj.toString());
|
|
280
|
+
} else if (obj instanceof Date) {
|
|
281
|
+
xxh3.update(obj.toISOString());
|
|
282
|
+
} else if (Array.isArray(obj)) {
|
|
283
|
+
arrayHash(obj, xxh3);
|
|
284
|
+
} else if (obj instanceof Set) {
|
|
285
|
+
setHash(obj, xxh3);
|
|
286
|
+
} else if (obj instanceof Map) {
|
|
287
|
+
mapHash(obj, xxh3);
|
|
288
|
+
} else if (typeof obj === "object") {
|
|
289
|
+
objectHash(obj, xxh3);
|
|
290
|
+
} else {
|
|
291
|
+
throw new Error(`Unsupported type: ${obj}`);
|
|
292
|
+
}
|
|
293
|
+
}
|
|
294
|
+
function arrayHash(array, xxh3) {
|
|
295
|
+
xxh3.update("[");
|
|
296
|
+
for (const obj of array) {
|
|
297
|
+
hash(obj, xxh3);
|
|
298
|
+
}
|
|
299
|
+
xxh3.update("]");
|
|
300
|
+
}
|
|
301
|
+
function setHash(_set, _xxh3) {
|
|
302
|
+
throw new Error("Set hashing not implemented");
|
|
303
|
+
}
|
|
304
|
+
function mapHash(map, xxh3) {
|
|
305
|
+
const array = Array.from(map.entries()).sort(([aKey], [bKey]) => aKey.localeCompare(bKey));
|
|
306
|
+
for (const [key, value] of array) {
|
|
307
|
+
hash(key, xxh3);
|
|
308
|
+
hash(value, xxh3);
|
|
309
|
+
}
|
|
310
|
+
}
|
|
311
|
+
function objectHash(obj, xxh3) {
|
|
312
|
+
const array = Object.entries(obj).sort(([aKey], [bKey]) => aKey.localeCompare(bKey));
|
|
313
|
+
for (const [key, value] of array) {
|
|
314
|
+
hash(key, xxh3);
|
|
315
|
+
hash(value, xxh3);
|
|
316
|
+
}
|
|
317
|
+
}
|
|
318
|
+
|
|
319
|
+
// src/availability.ts
|
|
320
|
+
import pThrottle from "p-throttle";
|
|
321
|
+
import pMemoize from "p-memoize";
|
|
322
|
+
import QuickLRU from "quick-lru";
|
|
323
|
+
async function wait(time) {
|
|
324
|
+
return new Promise((resolve) => {
|
|
325
|
+
setTimeout(resolve, time);
|
|
326
|
+
});
|
|
327
|
+
}
|
|
328
|
+
async function exponentialRetry(func, {
|
|
329
|
+
maxRetry,
|
|
330
|
+
initialDuration,
|
|
331
|
+
growFactor,
|
|
332
|
+
test,
|
|
333
|
+
verbose
|
|
334
|
+
}) {
|
|
335
|
+
let retries = maxRetry;
|
|
336
|
+
let duration = initialDuration || 5000;
|
|
337
|
+
const growFactorFinal = growFactor || 2;
|
|
338
|
+
let result = await func();
|
|
339
|
+
while (!test(result) && retries > 0) {
|
|
340
|
+
if (verbose) {
|
|
341
|
+
console.log("failed attempt result", result);
|
|
342
|
+
console.log(`sleep for ${duration}ms after failed attempt, remaining ${retries} attempts`);
|
|
343
|
+
}
|
|
344
|
+
retries = retries - 1;
|
|
345
|
+
await wait(duration);
|
|
346
|
+
result = await func();
|
|
347
|
+
duration = duration * growFactorFinal;
|
|
348
|
+
}
|
|
349
|
+
if (verbose) {
|
|
350
|
+
console.log(`function to retry ends with status ${test(result)}, number of retries done: ${maxRetry - retries}}`);
|
|
351
|
+
}
|
|
352
|
+
return result;
|
|
353
|
+
}
|
|
354
|
+
function withRetry(func, options) {
|
|
355
|
+
let retries = options?.maxRetry || 3;
|
|
356
|
+
let duration = options?.initialDuration || 500;
|
|
357
|
+
const growFactorFinal = options?.growFactor || 2;
|
|
358
|
+
return async (...args) => {
|
|
359
|
+
do {
|
|
360
|
+
try {
|
|
361
|
+
return await func(...args);
|
|
362
|
+
} catch (error) {
|
|
363
|
+
retries = retries - 1;
|
|
364
|
+
if (retries <= 0) {
|
|
365
|
+
throw error;
|
|
63
366
|
}
|
|
367
|
+
await wait(duration);
|
|
368
|
+
duration = duration * growFactorFinal;
|
|
369
|
+
}
|
|
370
|
+
} while (retries > 0);
|
|
371
|
+
throw new Error("unreachable");
|
|
372
|
+
};
|
|
373
|
+
}
|
|
374
|
+
function memoize(func, options) {
|
|
375
|
+
if (!options) {
|
|
376
|
+
options = {};
|
|
377
|
+
}
|
|
378
|
+
if (!options.cache) {
|
|
379
|
+
options.cache = new QuickLRU({ maxSize: options.lruMaxSize || 1e4 });
|
|
380
|
+
}
|
|
381
|
+
if (!options.cacheKey) {
|
|
382
|
+
options.cacheKey = (args) => getHash(args);
|
|
383
|
+
}
|
|
384
|
+
return pMemoize(func, options);
|
|
385
|
+
}
|
|
386
|
+
// src/util.ts
|
|
387
|
+
function range(startAt, endAt, step) {
|
|
388
|
+
const arr = [];
|
|
389
|
+
for (let i = startAt;i <= endAt; i += step) {
|
|
390
|
+
arr.push([i, Math.min(endAt, i + step - 1)]);
|
|
391
|
+
}
|
|
392
|
+
return arr;
|
|
393
|
+
}
|
|
394
|
+
function arrayGroup(array, groupSize) {
|
|
395
|
+
const groups = [];
|
|
396
|
+
for (let i = 0;i < array.length; i += groupSize) {
|
|
397
|
+
groups.push(array.slice(i, i + groupSize));
|
|
398
|
+
}
|
|
399
|
+
return groups;
|
|
400
|
+
}
|
|
401
|
+
function fillRange(start, end) {
|
|
402
|
+
const result = [];
|
|
403
|
+
for (let i = start;i <= end; i++) {
|
|
404
|
+
result.push(i);
|
|
405
|
+
}
|
|
406
|
+
return result;
|
|
407
|
+
}
|
|
408
|
+
// src/dynamodb.ts
|
|
409
|
+
import {
|
|
410
|
+
DynamoDBDocumentClient,
|
|
411
|
+
ScanCommand,
|
|
412
|
+
BatchWriteCommand,
|
|
413
|
+
GetCommand,
|
|
414
|
+
PutCommand,
|
|
415
|
+
QueryCommand,
|
|
416
|
+
UpdateCommand
|
|
417
|
+
} from "@aws-sdk/lib-dynamodb";
|
|
418
|
+
import { DynamoDBClient, DescribeTableCommand } from "@aws-sdk/client-dynamodb";
|
|
419
|
+
var _dynamoDB;
|
|
420
|
+
var _docClient;
|
|
421
|
+
function getDynamoDB(forceNew = false) {
|
|
422
|
+
if (!_dynamoDB || forceNew) {
|
|
423
|
+
_dynamoDB = new DynamoDBClient;
|
|
424
|
+
}
|
|
425
|
+
return _dynamoDB;
|
|
426
|
+
}
|
|
427
|
+
function getDocClient(forceNew = false) {
|
|
428
|
+
const marshallOptions = {
|
|
429
|
+
convertEmptyValues: true,
|
|
430
|
+
removeUndefinedValues: true,
|
|
431
|
+
convertClassInstanceToMap: true
|
|
432
|
+
};
|
|
433
|
+
const unmarshallOptions = {
|
|
434
|
+
wrapNumbers: false
|
|
435
|
+
};
|
|
436
|
+
if (!_docClient || forceNew) {
|
|
437
|
+
_docClient = DynamoDBDocumentClient.from(getDynamoDB(), {
|
|
438
|
+
marshallOptions,
|
|
439
|
+
unmarshallOptions
|
|
64
440
|
});
|
|
65
|
-
|
|
441
|
+
}
|
|
442
|
+
return _docClient;
|
|
66
443
|
}
|
|
67
|
-
function
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
444
|
+
async function scanWholeTable(options) {
|
|
445
|
+
const dynamodb = getDocClient();
|
|
446
|
+
let items = [];
|
|
447
|
+
let count = 0;
|
|
448
|
+
let scannedCount = 0;
|
|
449
|
+
let data = await dynamodb.send(new ScanCommand(options));
|
|
450
|
+
while (data.LastEvaluatedKey) {
|
|
451
|
+
if (data.Items) {
|
|
452
|
+
items = items.concat(data.Items);
|
|
453
|
+
}
|
|
454
|
+
count += data.Count || 0;
|
|
455
|
+
scannedCount += data.ScannedCount || 0;
|
|
456
|
+
data = await dynamodb.send(new ScanCommand({ ...options, ExclusiveStartKey: data.LastEvaluatedKey }));
|
|
457
|
+
}
|
|
458
|
+
if (data.Items) {
|
|
459
|
+
items = items.concat(data.Items);
|
|
460
|
+
}
|
|
461
|
+
count += data.Count || 0;
|
|
462
|
+
scannedCount += data.ScannedCount || 0;
|
|
463
|
+
return {
|
|
464
|
+
Items: items,
|
|
465
|
+
Count: count,
|
|
466
|
+
ScannedCount: scannedCount
|
|
467
|
+
};
|
|
468
|
+
}
|
|
469
|
+
async function batchCreateRecords(tableName, records, maxWritingCapacity, verbose = false) {
|
|
470
|
+
if (verbose) {
|
|
471
|
+
console.log(`creating ${records.length} items in ${tableName}`);
|
|
472
|
+
}
|
|
473
|
+
const docClient = getDocClient();
|
|
474
|
+
let remainingItems = records;
|
|
475
|
+
let prevRemainingCount = remainingItems.length + 1;
|
|
476
|
+
let factor = 1;
|
|
477
|
+
let rejection = undefined;
|
|
478
|
+
while (remainingItems.length > 0 && factor <= 128 && !rejection) {
|
|
479
|
+
if (prevRemainingCount === remainingItems.length) {
|
|
480
|
+
await wait(5000 * factor);
|
|
481
|
+
factor = factor * 2;
|
|
71
482
|
}
|
|
72
|
-
if (
|
|
73
|
-
|
|
483
|
+
if (factor >= 32) {
|
|
484
|
+
console.log(`WARNING: no progress for a long time for batchCreateRecords, please check`);
|
|
74
485
|
}
|
|
75
|
-
|
|
76
|
-
|
|
486
|
+
const slices = arrayGroup(remainingItems.slice(0, maxWritingCapacity), 25);
|
|
487
|
+
const results = await Promise.allSettled(slices.map((rs) => docClient.send(new BatchWriteCommand({
|
|
488
|
+
RequestItems: {
|
|
489
|
+
[tableName]: rs.map((record) => ({ PutRequest: { Item: record } }))
|
|
490
|
+
}
|
|
491
|
+
}))));
|
|
492
|
+
const isFulfilled = (p) => p.status === "fulfilled";
|
|
493
|
+
const isRejected = (p) => p.status === "rejected";
|
|
494
|
+
prevRemainingCount = remainingItems.length;
|
|
495
|
+
remainingItems = remainingItems.slice(maxWritingCapacity);
|
|
496
|
+
results.forEach((rs, idx) => {
|
|
497
|
+
if (isRejected(rs)) {
|
|
498
|
+
remainingItems = remainingItems.concat(slices[idx]);
|
|
499
|
+
rejection = rs;
|
|
500
|
+
} else if (isFulfilled(rs) && rs.value.UnprocessedItems && Object.keys(rs.value.UnprocessedItems).length > 0) {
|
|
501
|
+
const unprocessedItems = rs.value.UnprocessedItems[tableName].map((it) => it.PutRequest?.Item ?? []).flat();
|
|
502
|
+
remainingItems = remainingItems.concat(unprocessedItems);
|
|
503
|
+
}
|
|
504
|
+
});
|
|
505
|
+
if (verbose) {
|
|
506
|
+
console.log(`processed=${prevRemainingCount - remainingItems.length}, remaining=${remainingItems.length}`);
|
|
77
507
|
}
|
|
78
|
-
|
|
508
|
+
}
|
|
509
|
+
if (rejection) {
|
|
510
|
+
console.log("batchCreateRecords rejected", rejection);
|
|
511
|
+
throw new Error(`batchCreateRecords rejected, failed items=${remainingItems.length}`);
|
|
512
|
+
}
|
|
513
|
+
if (remainingItems.length > 0) {
|
|
514
|
+
console.log(`failed batchCreateRecords, failed items=${remainingItems.length}`);
|
|
515
|
+
throw new Error(`batchCreateRecords retry failed, failed items=${remainingItems.length}`);
|
|
516
|
+
}
|
|
79
517
|
}
|
|
80
|
-
function
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
518
|
+
async function createRecord(tableName, fields, verbose = false) {
|
|
519
|
+
if (verbose) {
|
|
520
|
+
console.log("creating", tableName, fields);
|
|
521
|
+
}
|
|
522
|
+
const docClient = getDocClient();
|
|
523
|
+
const params = {
|
|
524
|
+
TableName: tableName,
|
|
525
|
+
Item: fields
|
|
526
|
+
};
|
|
527
|
+
return docClient.send(new PutCommand(params));
|
|
528
|
+
}
|
|
529
|
+
async function readRecord(tableName, key, verbose = false) {
|
|
530
|
+
if (verbose) {
|
|
531
|
+
console.log("reading", tableName, key);
|
|
532
|
+
}
|
|
533
|
+
const docClient = getDocClient();
|
|
534
|
+
const record = await docClient.send(new GetCommand({
|
|
535
|
+
TableName: tableName,
|
|
536
|
+
Key: key
|
|
537
|
+
}));
|
|
538
|
+
return record.Item;
|
|
539
|
+
}
|
|
540
|
+
async function getRecordsByKey(tableName, keys, indexName) {
|
|
541
|
+
const docClient = getDocClient();
|
|
542
|
+
const keyNames = Object.keys(keys);
|
|
543
|
+
const conditionExpression = keyNames.map((key) => `#${key} = :${key}`).join(" and ");
|
|
544
|
+
const params = {
|
|
545
|
+
TableName: tableName,
|
|
546
|
+
KeyConditionExpression: conditionExpression,
|
|
547
|
+
ExpressionAttributeNames: generateExpressionNames(keyNames),
|
|
548
|
+
ExpressionAttributeValues: generateExpressionValues(keyNames, keys)
|
|
549
|
+
};
|
|
550
|
+
if (indexName) {
|
|
551
|
+
params.IndexName = indexName;
|
|
552
|
+
}
|
|
553
|
+
try {
|
|
554
|
+
let data = await docClient.send(new QueryCommand(params));
|
|
555
|
+
let items = data.Items ?? [];
|
|
556
|
+
while (data.LastEvaluatedKey) {
|
|
557
|
+
data = await docClient.send(new QueryCommand({
|
|
558
|
+
...params,
|
|
559
|
+
ExclusiveStartKey: data.LastEvaluatedKey
|
|
560
|
+
}));
|
|
561
|
+
if (data.Items) {
|
|
562
|
+
items = items.concat(data.Items);
|
|
563
|
+
}
|
|
84
564
|
}
|
|
85
|
-
|
|
86
|
-
|
|
565
|
+
return items;
|
|
566
|
+
} catch (err) {
|
|
567
|
+
console.log(err);
|
|
568
|
+
if (err instanceof Error && "statusCode" in err && err.statusCode === 400) {
|
|
569
|
+
return null;
|
|
87
570
|
}
|
|
88
|
-
|
|
571
|
+
throw err;
|
|
572
|
+
}
|
|
89
573
|
}
|
|
90
|
-
function
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
574
|
+
async function getRecordByKey(tableName, keys, indexName) {
|
|
575
|
+
if (indexName) {
|
|
576
|
+
const records = await getRecordsByKey(tableName, keys, indexName);
|
|
577
|
+
if (records) {
|
|
578
|
+
return records[0];
|
|
579
|
+
} else {
|
|
580
|
+
return null;
|
|
581
|
+
}
|
|
582
|
+
} else {
|
|
583
|
+
return readRecord(tableName, keys);
|
|
584
|
+
}
|
|
585
|
+
}
|
|
586
|
+
function generateExpressionNames(keys) {
|
|
587
|
+
return keys.reduce((acc, key) => ({ ...acc, [`#${key}`]: key }), {});
|
|
588
|
+
}
|
|
589
|
+
function generateExpressionValues(keys, fields) {
|
|
590
|
+
return keys.reduce((acc, key) => ({ ...acc, [`:${key}`]: fields[key] }), {});
|
|
591
|
+
}
|
|
592
|
+
async function updateRecordByKey(tableName, idKey, fields, conditionExpressions = null, verbose = false) {
|
|
593
|
+
if (verbose) {
|
|
594
|
+
console.log("update", tableName, idKey, fields);
|
|
595
|
+
}
|
|
596
|
+
const docClient = getDocClient();
|
|
597
|
+
const idKeyNames = Object.keys(idKey);
|
|
598
|
+
const fieldsToDelete = Object.keys(fields).filter((f) => fields[f] === undefined);
|
|
599
|
+
const fieldsToUpdate = Object.keys(fields).filter((k) => !idKeyNames.includes(k) && !fieldsToDelete.includes(k));
|
|
600
|
+
let data;
|
|
601
|
+
if (fieldsToDelete.length > 0) {
|
|
602
|
+
if (verbose) {
|
|
603
|
+
console.log("delete fields", tableName, fieldsToDelete);
|
|
604
|
+
}
|
|
605
|
+
const deleteParams = {
|
|
606
|
+
TableName: tableName,
|
|
607
|
+
Key: idKey,
|
|
608
|
+
ExpressionAttributeNames: generateExpressionNames(fieldsToDelete),
|
|
609
|
+
UpdateExpression: `REMOVE ${fieldsToDelete.map((f) => `#${f}`).join(", ")}`,
|
|
610
|
+
ReturnValues: "ALL_NEW"
|
|
611
|
+
};
|
|
612
|
+
if (conditionExpressions) {
|
|
613
|
+
deleteParams.ConditionExpression = conditionExpressions;
|
|
614
|
+
}
|
|
615
|
+
data = await docClient.send(new UpdateCommand(deleteParams));
|
|
616
|
+
}
|
|
617
|
+
if (fieldsToUpdate.length > 0) {
|
|
618
|
+
if (verbose) {
|
|
619
|
+
console.log("update fields", tableName, fieldsToUpdate);
|
|
620
|
+
}
|
|
621
|
+
const updateExpressions = fieldsToUpdate.map((key) => `#${key} = :${key}`);
|
|
622
|
+
const params = {
|
|
623
|
+
TableName: tableName,
|
|
624
|
+
Key: idKey,
|
|
625
|
+
ExpressionAttributeNames: generateExpressionNames(fieldsToUpdate),
|
|
626
|
+
ExpressionAttributeValues: generateExpressionValues(fieldsToUpdate, fields),
|
|
627
|
+
UpdateExpression: `SET ${updateExpressions.join(", ")}`,
|
|
628
|
+
ReturnValues: "ALL_NEW"
|
|
629
|
+
};
|
|
630
|
+
if (conditionExpressions) {
|
|
631
|
+
params.ConditionExpression = conditionExpressions;
|
|
632
|
+
}
|
|
633
|
+
data = await docClient.send(new UpdateCommand(params));
|
|
634
|
+
}
|
|
635
|
+
return data?.Attributes;
|
|
636
|
+
}
|
|
637
|
+
async function batchDeleteRecords(tableName, keys) {
|
|
638
|
+
const docClient = getDocClient();
|
|
639
|
+
for (let start = 0;start < keys.length; start += 25) {
|
|
640
|
+
const slice = keys.slice(start, start + 25);
|
|
641
|
+
await docClient.send(new BatchWriteCommand({
|
|
642
|
+
RequestItems: {
|
|
643
|
+
[tableName]: slice.map((key) => {
|
|
644
|
+
return { DeleteRequest: { Key: key } };
|
|
645
|
+
})
|
|
646
|
+
}
|
|
647
|
+
}));
|
|
648
|
+
}
|
|
649
|
+
}
|
|
650
|
+
function getKeyName(keySchema, type) {
|
|
651
|
+
const key = keySchema.find((k) => k.KeyType === type);
|
|
652
|
+
return key?.AttributeName;
|
|
653
|
+
}
|
|
654
|
+
function getIndexKeyName(globalSecondaryIndexes, indexName, type) {
|
|
655
|
+
const idx = globalSecondaryIndexes.find((i) => i.IndexName === indexName);
|
|
656
|
+
return idx?.KeySchema && getKeyName(idx.KeySchema, type);
|
|
657
|
+
}
|
|
658
|
+
async function deleteRecordsByHashKey(tableName, indexName, hashKeyValue, verbose = false) {
|
|
659
|
+
const docClient = getDocClient();
|
|
660
|
+
const meta = await getDynamoDB().send(new DescribeTableCommand({ TableName: tableName }));
|
|
661
|
+
if (!meta.Table) {
|
|
662
|
+
throw new Error(`cannot find table ${tableName}`);
|
|
663
|
+
}
|
|
664
|
+
if (indexName && !meta.Table.GlobalSecondaryIndexes) {
|
|
665
|
+
throw new Error(`cannot find global secondary indexes for table ${tableName}`);
|
|
666
|
+
}
|
|
667
|
+
if (!meta.Table.KeySchema) {
|
|
668
|
+
throw new Error(`cannot find key schema for table ${tableName}`);
|
|
669
|
+
}
|
|
670
|
+
const hashKeyName = indexName ? getIndexKeyName(meta.Table.GlobalSecondaryIndexes, indexName, "HASH") : getKeyName(meta.Table.KeySchema, "HASH");
|
|
671
|
+
if (!hashKeyName) {
|
|
672
|
+
throw new Error(`cannot find hash key name for table ${tableName}`);
|
|
673
|
+
}
|
|
674
|
+
const mainHashKeyName = getKeyName(meta.Table.KeySchema, "HASH");
|
|
675
|
+
if (!mainHashKeyName) {
|
|
676
|
+
throw new Error(`cannot find main hash key name for table ${tableName}`);
|
|
677
|
+
}
|
|
678
|
+
const mainRangeKeyName = getKeyName(meta.Table.KeySchema, "RANGE");
|
|
679
|
+
if (!mainRangeKeyName) {
|
|
680
|
+
throw new Error(`cannot find main range key name for table ${tableName}`);
|
|
681
|
+
}
|
|
682
|
+
let totalDeleted = 0;
|
|
683
|
+
const params = {
|
|
684
|
+
TableName: tableName,
|
|
685
|
+
KeyConditionExpression: "#hashKeyName = :hashKeyValue",
|
|
686
|
+
ExpressionAttributeNames: { "#hashKeyName": hashKeyName },
|
|
687
|
+
ExpressionAttributeValues: { ":hashKeyValue": hashKeyValue }
|
|
688
|
+
};
|
|
689
|
+
if (indexName) {
|
|
690
|
+
params.IndexName = indexName;
|
|
691
|
+
}
|
|
692
|
+
let data = await docClient.send(new QueryCommand(params));
|
|
693
|
+
if (data.Items) {
|
|
694
|
+
await batchDeleteRecords(tableName, data.Items.map((item) => mainRangeKeyName ? {
|
|
695
|
+
[mainHashKeyName]: item[mainHashKeyName],
|
|
696
|
+
[mainRangeKeyName]: item[mainRangeKeyName]
|
|
697
|
+
} : {
|
|
698
|
+
[mainHashKeyName]: item[mainHashKeyName]
|
|
699
|
+
}));
|
|
700
|
+
totalDeleted += data.Items.length;
|
|
701
|
+
}
|
|
702
|
+
while (data.LastEvaluatedKey) {
|
|
703
|
+
data = await docClient.send(new QueryCommand({
|
|
704
|
+
...params,
|
|
705
|
+
ExclusiveStartKey: data.LastEvaluatedKey
|
|
706
|
+
}));
|
|
707
|
+
if (data.Items) {
|
|
708
|
+
await batchDeleteRecords(tableName, data.Items.map((item) => mainRangeKeyName ? {
|
|
709
|
+
[mainHashKeyName]: item[mainHashKeyName],
|
|
710
|
+
[mainRangeKeyName]: item[mainRangeKeyName]
|
|
711
|
+
} : {
|
|
712
|
+
[mainHashKeyName]: item[mainHashKeyName]
|
|
713
|
+
}));
|
|
714
|
+
totalDeleted += data.Items.length;
|
|
715
|
+
}
|
|
716
|
+
}
|
|
717
|
+
if (verbose) {
|
|
718
|
+
console.log(`successfully delete ${totalDeleted} items`);
|
|
719
|
+
}
|
|
720
|
+
return totalDeleted;
|
|
721
|
+
}
|
|
722
|
+
// src/cli.ts
|
|
723
|
+
import path from "path";
|
|
724
|
+
import fs from "fs";
|
|
725
|
+
function getBinaryName() {
|
|
726
|
+
const binaryNameParts = process.argv[1].split(path.sep);
|
|
727
|
+
const binaryName = binaryNameParts[binaryNameParts.length - 1];
|
|
728
|
+
return binaryName;
|
|
729
|
+
}
|
|
730
|
+
function detectSkynetDirectory() {
|
|
731
|
+
return detectDirectory(process.argv[1], "SkynetAPIDefinitions.yml");
|
|
732
|
+
}
|
|
733
|
+
function detectWorkingDirectory() {
|
|
734
|
+
const wd = detectDirectory(process.argv[1], "package.json");
|
|
735
|
+
const skynetd = detectDirectory(process.argv[1], "SkynetAPIDefinitions.yml");
|
|
736
|
+
return wd.slice(skynetd.length + path.sep.length).replace(path.sep, "/");
|
|
737
|
+
}
|
|
738
|
+
function detectDirectory(fullBinPath, sentinel = "package.json") {
|
|
739
|
+
let parentFolder = path.dirname(fullBinPath);
|
|
740
|
+
while (parentFolder) {
|
|
741
|
+
const sentinelPath = path.join(parentFolder, sentinel);
|
|
742
|
+
if (fs.existsSync(sentinelPath)) {
|
|
743
|
+
return parentFolder;
|
|
744
|
+
}
|
|
745
|
+
const newParentFolder = path.dirname(parentFolder);
|
|
746
|
+
if (newParentFolder === parentFolder) {
|
|
747
|
+
break;
|
|
748
|
+
}
|
|
749
|
+
parentFolder = newParentFolder;
|
|
750
|
+
}
|
|
751
|
+
throw new Error("Cannot detect current working directory");
|
|
752
|
+
}
|
|
753
|
+
function detectBin() {
|
|
754
|
+
const wd = detectDirectory(process.argv[1], "package.json");
|
|
755
|
+
return process.argv[1].slice(wd.length + path.sep.length).replace(path.sep, "/");
|
|
756
|
+
}
|
|
757
|
+
// src/date.ts
|
|
758
|
+
var MS_IN_A_DAY = 3600 * 24 * 1000;
|
|
759
|
+
function getDateOnly(date) {
|
|
760
|
+
return new Date(date).toISOString().split("T")[0];
|
|
761
|
+
}
|
|
762
|
+
function findDateAfter(date, n) {
|
|
763
|
+
const d = new Date(date);
|
|
764
|
+
const after = new Date(d.getTime() + MS_IN_A_DAY * n);
|
|
765
|
+
return getDateOnly(after);
|
|
766
|
+
}
|
|
767
|
+
function daysInRange(from, to) {
|
|
768
|
+
const fromTime = new Date(from).getTime();
|
|
769
|
+
const toTime = new Date(to).getTime();
|
|
770
|
+
if (fromTime > toTime) {
|
|
771
|
+
throw new Error(`range to date couldn't be earlier than range from date`);
|
|
772
|
+
}
|
|
773
|
+
const daysBetween = Math.floor((toTime - fromTime) / MS_IN_A_DAY);
|
|
774
|
+
const dates = [getDateOnly(new Date(fromTime))];
|
|
775
|
+
for (let i = 1;i <= daysBetween; i += 1) {
|
|
776
|
+
dates.push(getDateOnly(new Date(fromTime + i * MS_IN_A_DAY)));
|
|
777
|
+
}
|
|
778
|
+
return dates;
|
|
779
|
+
}
|
|
780
|
+
function dateRange(from, to, step) {
|
|
781
|
+
const days = daysInRange(from, to);
|
|
782
|
+
const windows = arrayGroup(days, step);
|
|
783
|
+
return windows.map((w) => [w[0], w[w.length - 1]]);
|
|
784
|
+
}
|
|
785
|
+
// src/indexer.ts
|
|
786
|
+
import meow2 from "meow";
|
|
787
|
+
var STATE_TABLE_NAME = "skynet-" + getEnvironment() + "-indexer-state";
|
|
788
|
+
async function getIndexerLatestId(name, selectorFlags) {
|
|
789
|
+
const record = await getRecordByKey(STATE_TABLE_NAME, {
|
|
790
|
+
name: `${name}Since(${toSelectorString(selectorFlags)})`
|
|
791
|
+
});
|
|
792
|
+
return record?.value;
|
|
793
|
+
}
|
|
794
|
+
async function getIndexerValidatedId(name, selectorFlags) {
|
|
795
|
+
const record = await getRecordByKey(STATE_TABLE_NAME, {
|
|
796
|
+
name: `${name}Validate(${toSelectorString(selectorFlags)})`
|
|
797
|
+
});
|
|
798
|
+
if (record) {
|
|
799
|
+
return record.value;
|
|
800
|
+
}
|
|
801
|
+
return;
|
|
802
|
+
}
|
|
803
|
+
function increaseId(type, currentId, n) {
|
|
804
|
+
if (type === "date") {
|
|
805
|
+
if (typeof currentId !== "string") {
|
|
806
|
+
throw new Error("invalid type for date id");
|
|
807
|
+
}
|
|
808
|
+
return findDateAfter(currentId, n);
|
|
809
|
+
}
|
|
810
|
+
if (typeof currentId !== "number") {
|
|
811
|
+
throw new Error("Invalid type for numeric id");
|
|
812
|
+
}
|
|
813
|
+
return currentId + n;
|
|
814
|
+
}
|
|
815
|
+
function createModeIndexerApp({
|
|
816
|
+
binaryName,
|
|
817
|
+
name,
|
|
818
|
+
selector = {},
|
|
819
|
+
build,
|
|
820
|
+
buildBatchSize = 1,
|
|
821
|
+
buildConcurrency = 1,
|
|
822
|
+
validate,
|
|
823
|
+
validateBatchSize = 1,
|
|
824
|
+
validateConcurrency = 1,
|
|
825
|
+
maxRetry = 2,
|
|
826
|
+
state
|
|
827
|
+
}) {
|
|
828
|
+
const defaultState = {
|
|
829
|
+
type: "block",
|
|
830
|
+
getMinId: async () => 1,
|
|
831
|
+
getMaxId: async () => {
|
|
832
|
+
throw new Error("must implement getMaxId");
|
|
833
|
+
}
|
|
834
|
+
};
|
|
835
|
+
const finalState = {
|
|
836
|
+
...defaultState,
|
|
837
|
+
...state
|
|
838
|
+
};
|
|
839
|
+
function range2(from, to, step) {
|
|
840
|
+
if (typeof from === "string" && typeof to === "string") {
|
|
841
|
+
if (finalState.type === "date") {
|
|
842
|
+
return dateRange(from, to, step);
|
|
843
|
+
}
|
|
844
|
+
throw new Error("Invalid type for numeric range");
|
|
845
|
+
}
|
|
846
|
+
if (typeof from === "number" && typeof to === "number") {
|
|
847
|
+
return range(from, to, step);
|
|
848
|
+
}
|
|
849
|
+
throw new Error("Invalid type for range");
|
|
850
|
+
}
|
|
851
|
+
function fillRange2(from, to) {
|
|
852
|
+
if (typeof from === "string" && typeof to === "string") {
|
|
853
|
+
if (finalState.type === "date") {
|
|
854
|
+
return daysInRange(from, to);
|
|
855
|
+
}
|
|
856
|
+
throw new Error("Invalid type for numeric range");
|
|
857
|
+
}
|
|
858
|
+
if (typeof from === "number" && typeof to === "number") {
|
|
859
|
+
return fillRange(from, to);
|
|
860
|
+
}
|
|
861
|
+
throw new Error("Invalid type for range");
|
|
862
|
+
}
|
|
863
|
+
function offsetRange(from, to) {
|
|
864
|
+
return fillRange2(from, to).length;
|
|
865
|
+
}
|
|
866
|
+
async function runMode(flags) {
|
|
867
|
+
const { mode, from: fromUntyped, to: toUntyped, status, verbose: verboseUntyped, ...untypeSelectorFlags } = flags;
|
|
868
|
+
const from = fromUntyped;
|
|
869
|
+
const to = toUntyped;
|
|
870
|
+
const verbose = verboseUntyped;
|
|
871
|
+
const selectorFlags = untypeSelectorFlags;
|
|
872
|
+
if (status) {
|
|
873
|
+
const stateItem = await getRecordByKey(STATE_TABLE_NAME, {
|
|
874
|
+
name: `${name}RebuildState(${toSelectorString(selectorFlags)})`
|
|
875
|
+
});
|
|
876
|
+
const fromItem = await getRecordByKey(STATE_TABLE_NAME, {
|
|
877
|
+
name: `${name}Since(${toSelectorString(selectorFlags)})`
|
|
878
|
+
});
|
|
879
|
+
const validateItem = await getRecordByKey(STATE_TABLE_NAME, {
|
|
880
|
+
name: `${name}Validate(${toSelectorString(selectorFlags)})`
|
|
881
|
+
});
|
|
882
|
+
inline.log(`RebuildState=${stateItem?.value} Since=${fromItem?.value} Validated=${validateItem?.value}`);
|
|
883
|
+
process.exit(0);
|
|
884
|
+
}
|
|
885
|
+
inline.log(`[MODE INDEXER] mode=${mode}, env=${getEnvironment()}, ${toSelectorString(selectorFlags, ", ")}`);
|
|
886
|
+
if (mode === "reset") {
|
|
887
|
+
await runReset(selectorFlags);
|
|
888
|
+
} else if (mode === "rebuild") {
|
|
889
|
+
const rebuildFrom = from || await finalState.getMinId(selectorFlags);
|
|
890
|
+
const rebuildTo = to || await finalState.getMaxId(selectorFlags);
|
|
891
|
+
await runReset(selectorFlags);
|
|
892
|
+
await runRebuild(selectorFlags, rebuildFrom, rebuildTo, verbose);
|
|
893
|
+
} else if (mode === "resume-rebuild") {
|
|
894
|
+
const previousRebuildEnds = await getIndexerLatestId(name, selectorFlags);
|
|
895
|
+
const rebuildFrom = from || previousRebuildEnds !== undefined && increaseId(finalState.type, previousRebuildEnds, 1) || await finalState.getMinId(selectorFlags);
|
|
896
|
+
const rebuildTo = to || await finalState.getMaxId(selectorFlags);
|
|
897
|
+
await runRebuild(selectorFlags, rebuildFrom, rebuildTo, verbose);
|
|
898
|
+
} else if (mode === "validate" || mode === "validation") {
|
|
899
|
+
const previousRebuildEnds = await getIndexerLatestId(name, selectorFlags);
|
|
900
|
+
if (!previousRebuildEnds) {
|
|
901
|
+
inline.log(`[MODE INDEXER] cannot validate without a successful rebuild`);
|
|
902
|
+
process.exit(0);
|
|
903
|
+
}
|
|
904
|
+
const previousValidatedTo = await getIndexerValidatedId(name, selectorFlags);
|
|
905
|
+
const validateFrom = from || previousValidatedTo || await finalState.getMinId(selectorFlags);
|
|
906
|
+
const validateTo = to || previousRebuildEnds;
|
|
907
|
+
const shouldSaveState = !to;
|
|
908
|
+
await runValidate(selectorFlags, validateFrom, validateTo, shouldSaveState, verbose);
|
|
909
|
+
} else if (mode === "one") {
|
|
910
|
+
if (to) {
|
|
911
|
+
inline.log("[MODE INDEXER] one mode ignores --to option. you may want to use range mode instead");
|
|
912
|
+
}
|
|
913
|
+
if (!from) {
|
|
914
|
+
inline.log(`[MODE INDEXER] must provide --from option for one mode`);
|
|
915
|
+
process.exit(1);
|
|
916
|
+
}
|
|
917
|
+
await runRange(selectorFlags, from, from, verbose);
|
|
918
|
+
} else if (mode === "range") {
|
|
919
|
+
if (!from || !to) {
|
|
920
|
+
inline.log(`[MODE INDEXER] must provide --from and --to option for range mode`);
|
|
921
|
+
process.exit(1);
|
|
922
|
+
}
|
|
923
|
+
await runRange(selectorFlags, from, to, verbose);
|
|
924
|
+
} else {
|
|
925
|
+
const stateItem = await getRecordByKey(STATE_TABLE_NAME, {
|
|
926
|
+
name: `${name}RebuildState(${toSelectorString(selectorFlags)})`
|
|
927
|
+
});
|
|
928
|
+
if (!stateItem || stateItem.value !== "succeed") {
|
|
929
|
+
inline.log("[MODE INDEXER] skip because rebuild hasn't done yet");
|
|
930
|
+
process.exit(0);
|
|
931
|
+
}
|
|
932
|
+
const latestId = await getIndexerLatestId(name, selectorFlags);
|
|
933
|
+
if (!latestId) {
|
|
934
|
+
throw new Error(`[MODE INDEXER] cannot find the latest ${finalState.type}`);
|
|
935
|
+
}
|
|
936
|
+
const deltaFrom = increaseId(finalState.type, latestId, 1);
|
|
937
|
+
const deltaTo = await state.getMaxId(selectorFlags);
|
|
938
|
+
await runDelta(selectorFlags, deltaFrom, deltaTo, verbose);
|
|
939
|
+
}
|
|
940
|
+
}
|
|
941
|
+
async function runRange(selectorFlags, from, to, verbose) {
|
|
942
|
+
const startTime = Date.now();
|
|
943
|
+
inline.log(`[MODE INDEXER] building range, from=${from}, to=${to}, ${toSelectorString(selectorFlags, ", ")}, batchSize=${buildBatchSize}, concurrency=${buildConcurrency}`);
|
|
944
|
+
const failedIds = await execBuild(selectorFlags, from, to, verbose, false);
|
|
945
|
+
if (failedIds.length > 0) {
|
|
946
|
+
inline.log(`[MODE INDEXER] built with some failed ${finalState.type}`, failedIds);
|
|
947
|
+
process.exit(1);
|
|
948
|
+
} else {
|
|
949
|
+
inline.log(`[MODE INDEXER] built successfully in ${Date.now() - startTime}ms`);
|
|
950
|
+
process.exit(0);
|
|
951
|
+
}
|
|
952
|
+
}
|
|
953
|
+
async function runValidate(selectorFlags, from, to, shouldSaveState, verbose) {
|
|
954
|
+
if (!validate) {
|
|
955
|
+
inline.log(`[MODE INDEXER] the indexer doesn't support validate mode, validate function not implemented`);
|
|
956
|
+
process.exit(1);
|
|
957
|
+
}
|
|
958
|
+
const startTime = Date.now();
|
|
959
|
+
inline.log(`[MODE INDEXER] validating, from=${from}, to=${to}, ${toSelectorString(selectorFlags, ", ")}, batchSize=${validateBatchSize}, concurrency=${validateConcurrency}`);
|
|
960
|
+
const windows = range2(from, to, validateBatchSize * validateConcurrency);
|
|
961
|
+
inline.log(`[MODE INDEXER] from=${from}, to=${to}, batchSize=${validateBatchSize}, concurrency=${validateConcurrency}`);
|
|
962
|
+
for (const [windowStart, windowEnd] of windows) {
|
|
963
|
+
inline.log(`[MODE INDEXER] validating window ${windowStart}~${windowEnd}, concurrency=${validateConcurrency}`);
|
|
964
|
+
const batches = range2(windowStart, windowEnd, validateBatchSize);
|
|
965
|
+
await Promise.all(batches.map(async ([batchStart, batchEnd]) => {
|
|
966
|
+
const result = await exponentialRetry(async () => {
|
|
967
|
+
try {
|
|
968
|
+
await validate({
|
|
969
|
+
...selectorFlags,
|
|
970
|
+
from: batchStart,
|
|
971
|
+
to: batchEnd,
|
|
972
|
+
verbose
|
|
100
973
|
});
|
|
101
|
-
|
|
102
|
-
|
|
974
|
+
return true;
|
|
975
|
+
} catch (err) {
|
|
976
|
+
inline.error(`got error in validation`, err);
|
|
977
|
+
return false;
|
|
978
|
+
}
|
|
979
|
+
}, {
|
|
980
|
+
maxRetry,
|
|
981
|
+
test: (r) => r,
|
|
982
|
+
verbose
|
|
983
|
+
});
|
|
984
|
+
if (!result) {
|
|
985
|
+
throw new Error(`Terminate validation due to critical errors, from=${batchStart}, to=${batchEnd}`);
|
|
986
|
+
}
|
|
987
|
+
}));
|
|
988
|
+
if (shouldSaveState) {
|
|
989
|
+
await createRecord(STATE_TABLE_NAME, {
|
|
990
|
+
name: `${name}Validate(${toSelectorString(selectorFlags)})`,
|
|
991
|
+
value: to
|
|
992
|
+
});
|
|
993
|
+
if (verbose) {
|
|
994
|
+
inline.log(`[MODE INDEXER] updated processed ${finalState.type} to ${windowEnd}`);
|
|
995
|
+
}
|
|
996
|
+
}
|
|
997
|
+
}
|
|
998
|
+
inline.log(`[MODE INDEXER] validated ${offsetRange(from, to)} ${finalState.type} successfully in ${Date.now() - startTime}ms`);
|
|
999
|
+
}
|
|
1000
|
+
async function execBuild(selectorFlags, from, to, verbose, shouldSaveState = false) {
|
|
1001
|
+
let failedIds = [];
|
|
1002
|
+
const windows = range2(from, to, buildBatchSize * buildConcurrency);
|
|
1003
|
+
for (const [windowStart, windowEnd] of windows) {
|
|
1004
|
+
inline.log(`[MODE INDEXER] building window ${windowStart}~${windowEnd}, concurrency = ${buildConcurrency}`);
|
|
1005
|
+
const batches = range2(windowStart, windowEnd, buildBatchSize);
|
|
1006
|
+
const batchResults = await Promise.all(batches.map(async ([batchStart, batchEnd]) => await exponentialRetry(async () => {
|
|
1007
|
+
try {
|
|
1008
|
+
const ids = await build({
|
|
1009
|
+
...selectorFlags,
|
|
1010
|
+
from: batchStart,
|
|
1011
|
+
to: batchEnd,
|
|
1012
|
+
verbose
|
|
1013
|
+
});
|
|
1014
|
+
if (ids && ids.length > 0) {
|
|
1015
|
+
return ids;
|
|
1016
|
+
} else {
|
|
1017
|
+
return false;
|
|
1018
|
+
}
|
|
1019
|
+
} catch (err) {
|
|
1020
|
+
inline.error(`[MODE INDEXER] got error in build`, err);
|
|
1021
|
+
return fillRange2(batchStart, batchEnd);
|
|
1022
|
+
}
|
|
1023
|
+
}, {
|
|
1024
|
+
maxRetry,
|
|
1025
|
+
test: (r) => !r,
|
|
1026
|
+
verbose
|
|
1027
|
+
})));
|
|
1028
|
+
if (shouldSaveState) {
|
|
1029
|
+
await createRecord(STATE_TABLE_NAME, {
|
|
1030
|
+
name: `${name}Since(${toSelectorString(selectorFlags)})`,
|
|
1031
|
+
value: windowEnd
|
|
1032
|
+
});
|
|
1033
|
+
if (verbose) {
|
|
1034
|
+
inline.log(`[MODE INDEXER] updated processed ${finalState.type} to ${windowEnd}`);
|
|
1035
|
+
}
|
|
1036
|
+
}
|
|
1037
|
+
batchResults.forEach((ids) => {
|
|
1038
|
+
if (ids) {
|
|
1039
|
+
failedIds = failedIds.concat(ids);
|
|
1040
|
+
}
|
|
1041
|
+
});
|
|
1042
|
+
}
|
|
1043
|
+
failedIds.sort();
|
|
1044
|
+
return failedIds;
|
|
1045
|
+
}
|
|
1046
|
+
async function runRebuild(selectorFlags, from, to, verbose) {
|
|
1047
|
+
const startTime = Date.now();
|
|
1048
|
+
inline.log(`[MODE INDEXER] rebuilding, from=${from}, to=${to}, ${toSelectorString(selectorFlags, ", ")}, batchSize=${buildBatchSize}, concurrency=${buildConcurrency}`);
|
|
1049
|
+
await createRecord(STATE_TABLE_NAME, {
|
|
1050
|
+
name: `${name}RebuildState(${toSelectorString(selectorFlags)})`,
|
|
1051
|
+
value: "running"
|
|
1052
|
+
});
|
|
1053
|
+
const failedIds = await execBuild(selectorFlags, from, to, verbose, true);
|
|
1054
|
+
await createRecord(STATE_TABLE_NAME, {
|
|
1055
|
+
name: `${name}Since(${toSelectorString(selectorFlags)})`,
|
|
1056
|
+
value: to
|
|
1057
|
+
});
|
|
1058
|
+
await createRecord(STATE_TABLE_NAME, {
|
|
1059
|
+
name: `${name}RebuildState(${toSelectorString(selectorFlags)})`,
|
|
1060
|
+
value: "succeed"
|
|
1061
|
+
});
|
|
1062
|
+
if (failedIds.length > 0) {
|
|
1063
|
+
inline.log(`[MODE INDEXER] built ${offsetRange(from, to)} ${finalState.type}(s) with some failed ${finalState.type}`, failedIds);
|
|
1064
|
+
process.exit(1);
|
|
1065
|
+
} else {
|
|
1066
|
+
inline.log(`[MODE INDEXER] built ${offsetRange(from, to)} ${finalState.type}(s) successfully in ${Date.now() - startTime}ms`);
|
|
1067
|
+
process.exit(0);
|
|
1068
|
+
}
|
|
1069
|
+
}
|
|
1070
|
+
async function runDelta(selectorFlags, from, to, verbose) {
|
|
1071
|
+
const startTime = Date.now();
|
|
1072
|
+
if (to < from) {
|
|
1073
|
+
inline.log(`[MODE INDEXER] skip delta, there're no more items need to be processed, from=${from}, to=${to}, ${toSelectorString(selectorFlags, ", ")}`);
|
|
1074
|
+
return;
|
|
1075
|
+
}
|
|
1076
|
+
inline.log(`[MODE INDEXER] starting delta, from=${from}, to=${to}, ${toSelectorString(selectorFlags, ", ")}, batchSize=${buildBatchSize}, concurrency=${buildConcurrency}`);
|
|
1077
|
+
try {
|
|
1078
|
+
const failedIds = await execBuild(selectorFlags, from, to, verbose, true);
|
|
1079
|
+
if (failedIds.length > 0) {
|
|
1080
|
+
inline.log("[MODE INDEXER] built with some failed txs", failedIds);
|
|
1081
|
+
await createRecord(STATE_TABLE_NAME, {
|
|
1082
|
+
name: `${name}DeltaState(${toSelectorString(selectorFlags)})`,
|
|
1083
|
+
value: "failed"
|
|
1084
|
+
});
|
|
1085
|
+
await createRecord(STATE_TABLE_NAME, {
|
|
1086
|
+
name: `${name}Since(${toSelectorString(selectorFlags)})`,
|
|
1087
|
+
value: to < failedIds[0] ? to : failedIds[0]
|
|
1088
|
+
});
|
|
1089
|
+
process.exit(1);
|
|
1090
|
+
} else {
|
|
1091
|
+
await createRecord(STATE_TABLE_NAME, {
|
|
1092
|
+
name: `${name}DeltaState(${toSelectorString(selectorFlags)})`,
|
|
1093
|
+
value: "succeed"
|
|
1094
|
+
});
|
|
1095
|
+
await createRecord(STATE_TABLE_NAME, {
|
|
1096
|
+
name: `${name}Since(${toSelectorString(selectorFlags)})`,
|
|
1097
|
+
value: to
|
|
1098
|
+
});
|
|
1099
|
+
inline.log(`[MODE INDEXER] built successfully in ${Date.now() - startTime}ms`);
|
|
1100
|
+
process.exit(0);
|
|
1101
|
+
}
|
|
1102
|
+
} catch (err) {
|
|
1103
|
+
inline.error("[MODE INDEXER] delta build failed", from, to, err);
|
|
1104
|
+
process.exit(1);
|
|
1105
|
+
}
|
|
1106
|
+
}
|
|
1107
|
+
async function runReset(selectorFlags) {
|
|
1108
|
+
const startTime = Date.now();
|
|
1109
|
+
inline.log(`[MODE INDEXER] starting reset, ${toSelectorString(selectorFlags, ", ")}`);
|
|
1110
|
+
inline.log("[MODE INDEXER] reset state", STATE_TABLE_NAME);
|
|
1111
|
+
await createRecord(STATE_TABLE_NAME, {
|
|
1112
|
+
name: `${name}Since(${toSelectorString(selectorFlags)})`,
|
|
1113
|
+
value: 0
|
|
1114
|
+
});
|
|
1115
|
+
await createRecord(STATE_TABLE_NAME, {
|
|
1116
|
+
name: `${name}Validate(${toSelectorString(selectorFlags)})`,
|
|
1117
|
+
value: 0
|
|
1118
|
+
});
|
|
1119
|
+
await createRecord(STATE_TABLE_NAME, {
|
|
1120
|
+
name: `${name}RebuildState(${toSelectorString(selectorFlags)})`,
|
|
1121
|
+
value: "init"
|
|
1122
|
+
});
|
|
1123
|
+
inline.log(`[MODE INDEXER] reset successfully in ${Date.now() - startTime}ms`);
|
|
1124
|
+
}
|
|
1125
|
+
async function run() {
|
|
1126
|
+
if (!binaryName) {
|
|
1127
|
+
binaryName = getBinaryName();
|
|
1128
|
+
}
|
|
1129
|
+
const cli = meow2(`
|
|
1130
|
+
Usage
|
|
1131
|
+
|
|
1132
|
+
$ ${binaryName} <options>
|
|
1133
|
+
|
|
1134
|
+
Options
|
|
1135
|
+
${selector ? getSelectorDesc(selector) : ""}
|
|
1136
|
+
--mode could be delta/rebuild/resume-rebuild/validate/one/range/reset
|
|
1137
|
+
--from min ${finalState.type} to build
|
|
1138
|
+
--to max ${finalState.type} to build
|
|
1139
|
+
--status print status of indexer and exit
|
|
1140
|
+
--verbose Output debug messages
|
|
1141
|
+
`, {
|
|
1142
|
+
importMeta: import.meta,
|
|
1143
|
+
description: false,
|
|
1144
|
+
version: false,
|
|
1145
|
+
flags: {
|
|
1146
|
+
...getSelectorFlags(selector),
|
|
1147
|
+
mode: {
|
|
1148
|
+
type: "string",
|
|
1149
|
+
default: "delta"
|
|
103
1150
|
},
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
schedule: build.schedule,
|
|
116
|
-
restart: build.restart,
|
|
117
|
-
killTimeout: build.killTimeout,
|
|
118
|
-
cpu: build.cpu,
|
|
119
|
-
mem: build.mem,
|
|
120
|
-
});
|
|
121
|
-
return deploy();
|
|
1151
|
+
from: {
|
|
1152
|
+
aliases: ["since"],
|
|
1153
|
+
type: "string"
|
|
1154
|
+
},
|
|
1155
|
+
to: {
|
|
1156
|
+
aliases: ["until"],
|
|
1157
|
+
type: "string"
|
|
1158
|
+
},
|
|
1159
|
+
status: {
|
|
1160
|
+
type: "boolean",
|
|
1161
|
+
default: false
|
|
122
1162
|
},
|
|
1163
|
+
verbose: {
|
|
1164
|
+
type: "boolean",
|
|
1165
|
+
default: false
|
|
1166
|
+
}
|
|
1167
|
+
}
|
|
123
1168
|
});
|
|
1169
|
+
try {
|
|
1170
|
+
return runMode(cli.flags);
|
|
1171
|
+
} catch (err) {
|
|
1172
|
+
inline.error(err);
|
|
1173
|
+
process.exit(1);
|
|
1174
|
+
}
|
|
1175
|
+
}
|
|
1176
|
+
return { run };
|
|
124
1177
|
}
|
|
125
|
-
function
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
1178
|
+
function createIndexerApp({
|
|
1179
|
+
binaryName,
|
|
1180
|
+
selector = {},
|
|
1181
|
+
build,
|
|
1182
|
+
maxRetry = 2
|
|
1183
|
+
}) {
|
|
1184
|
+
async function run() {
|
|
1185
|
+
if (!binaryName) {
|
|
1186
|
+
binaryName = getBinaryName();
|
|
129
1187
|
}
|
|
130
|
-
|
|
131
|
-
|
|
1188
|
+
const cli = meow2(`
|
|
1189
|
+
Usage
|
|
1190
|
+
$ ${binaryName} <options>
|
|
1191
|
+
|
|
1192
|
+
Options
|
|
1193
|
+
${selector ? getSelectorDesc(selector) : ""}
|
|
1194
|
+
--verbose Output debug messages
|
|
1195
|
+
`, {
|
|
1196
|
+
importMeta: import.meta,
|
|
1197
|
+
description: false,
|
|
1198
|
+
version: false,
|
|
1199
|
+
flags: {
|
|
1200
|
+
...getSelectorFlags(selector),
|
|
1201
|
+
verbose: {
|
|
1202
|
+
type: "boolean",
|
|
1203
|
+
default: false
|
|
1204
|
+
}
|
|
1205
|
+
}
|
|
1206
|
+
});
|
|
1207
|
+
async function runBuild(flags) {
|
|
1208
|
+
const { verbose: untypedVerbose, ...untypedSelectorFlags } = flags;
|
|
1209
|
+
const verbose = untypedVerbose;
|
|
1210
|
+
const selectorFlags = untypedSelectorFlags;
|
|
1211
|
+
const startTime = Date.now();
|
|
1212
|
+
if (Object.keys(selectorFlags).length > 0) {
|
|
1213
|
+
inline.log(`[INDEXER] starting build, ${toSelectorString(selectorFlags, ", ")}`);
|
|
1214
|
+
} else {
|
|
1215
|
+
inline.log(`[INDEXER] starting build`);
|
|
1216
|
+
}
|
|
1217
|
+
const result = await exponentialRetry(async () => {
|
|
1218
|
+
try {
|
|
1219
|
+
await build(flags);
|
|
1220
|
+
return true;
|
|
1221
|
+
} catch (err) {
|
|
1222
|
+
inline.log(`[INDEXER] got error in build`, err);
|
|
1223
|
+
return false;
|
|
1224
|
+
}
|
|
1225
|
+
}, {
|
|
1226
|
+
maxRetry,
|
|
1227
|
+
test: (r) => r,
|
|
1228
|
+
verbose
|
|
1229
|
+
});
|
|
1230
|
+
if (!result) {
|
|
1231
|
+
throw new Error(`[INDEXER] Build failed due to critical errors`);
|
|
1232
|
+
}
|
|
1233
|
+
inline.log(`[INDEXER] build successfully in ${Date.now() - startTime}ms`);
|
|
132
1234
|
}
|
|
133
|
-
|
|
134
|
-
|
|
1235
|
+
return runBuild(cli.flags).catch((err) => {
|
|
1236
|
+
inline.error(err);
|
|
1237
|
+
process.exit(1);
|
|
1238
|
+
});
|
|
1239
|
+
}
|
|
1240
|
+
return { run };
|
|
1241
|
+
}
|
|
1242
|
+
// src/deploy.ts
|
|
1243
|
+
import fs2 from "fs/promises";
|
|
1244
|
+
import fso from "fs";
|
|
1245
|
+
import { execa } from "execa";
|
|
1246
|
+
import meow3 from "meow";
|
|
1247
|
+
import chalk from "chalk";
|
|
1248
|
+
import which from "which";
|
|
1249
|
+
var INTERVAL_ALIASES = {
|
|
1250
|
+
secondly: "*/1 * * * * * *",
|
|
1251
|
+
"@secondly": "*/1 * * * * * *",
|
|
1252
|
+
minutely: "0 * * * * * *",
|
|
1253
|
+
"@minutely": "0 * * * * * *",
|
|
1254
|
+
hourly: "0 0 * * * * *",
|
|
1255
|
+
"@hourly": "0 0 * * * * *",
|
|
1256
|
+
daily: "0 0 0 * * * *",
|
|
1257
|
+
"@daily": "0 0 0 * * * *",
|
|
1258
|
+
weekly: "0 0 0 * * 0 *",
|
|
1259
|
+
"@weekly": "0 0 0 * * 0 *"
|
|
1260
|
+
};
|
|
1261
|
+
var genConfig = ({
|
|
1262
|
+
jobName,
|
|
1263
|
+
workingDirectory,
|
|
1264
|
+
cmd,
|
|
1265
|
+
cron,
|
|
1266
|
+
count,
|
|
1267
|
+
restart,
|
|
1268
|
+
killTimeout,
|
|
1269
|
+
cpu,
|
|
1270
|
+
mem,
|
|
1271
|
+
service,
|
|
1272
|
+
additionalEnv = {},
|
|
1273
|
+
type = "batch",
|
|
1274
|
+
region = "skynet-dc1",
|
|
1275
|
+
isProduction: isProduction2
|
|
1276
|
+
}) => `job "${jobName}" {
|
|
1277
|
+
datacenters = ["${region}"]
|
|
1278
|
+
|
|
1279
|
+
type = "${type}"
|
|
1280
|
+
|
|
1281
|
+
${cron ? `# Triggers periodically
|
|
1282
|
+
periodic {
|
|
1283
|
+
crons = ["${cron}"]
|
|
1284
|
+
prohibit_overlap = true
|
|
1285
|
+
}` : ""}
|
|
1286
|
+
|
|
1287
|
+
constraint {
|
|
1288
|
+
attribute = "\${meta.has_nodejs}"
|
|
1289
|
+
value = "true"
|
|
1290
|
+
}
|
|
1291
|
+
|
|
1292
|
+
constraint {
|
|
1293
|
+
attribute = "\${meta.has_skynet}"
|
|
1294
|
+
value = "true"
|
|
1295
|
+
}
|
|
1296
|
+
|
|
1297
|
+
group "default" {
|
|
1298
|
+
${count && count > 1 ? `count = ${count}` : ""}
|
|
1299
|
+
${count && count > 1 ? `# Rolling Update
|
|
1300
|
+
update {
|
|
1301
|
+
max_parallel = 1
|
|
1302
|
+
min_healthy_time = "10s"
|
|
1303
|
+
}` : ""}
|
|
1304
|
+
|
|
1305
|
+
reschedule {
|
|
1306
|
+
attempts = 0
|
|
1307
|
+
unlimited = false
|
|
135
1308
|
}
|
|
136
|
-
|
|
1309
|
+
|
|
1310
|
+
${service ? `# Setup Service Network
|
|
1311
|
+
network {
|
|
1312
|
+
port "http" {
|
|
1313
|
+
static = ${service.port}
|
|
1314
|
+
}
|
|
1315
|
+
}` : ""}
|
|
1316
|
+
|
|
1317
|
+
task "main" {
|
|
1318
|
+
driver = "raw_exec"
|
|
1319
|
+
|
|
1320
|
+
config {
|
|
1321
|
+
command = "sh"
|
|
1322
|
+
args = [
|
|
1323
|
+
"-c",
|
|
1324
|
+
"cd \${meta.skynet_code_path}/${workingDirectory} && if [ -e bun.lockb ]; then bun install --silent; else yarn install --silent; fi && exec ${cmd}"
|
|
1325
|
+
]
|
|
1326
|
+
}
|
|
1327
|
+
|
|
1328
|
+
${service ? `# Setup API Routes
|
|
1329
|
+
service {
|
|
1330
|
+
name = "${jobName}"
|
|
1331
|
+
port = "http"
|
|
1332
|
+
|
|
1333
|
+
tags = [
|
|
1334
|
+
"urlprefix-${service.prefix} strip=${service.prefix}",
|
|
1335
|
+
]
|
|
1336
|
+
|
|
1337
|
+
check {
|
|
1338
|
+
type = "http"
|
|
1339
|
+
path = "/"
|
|
1340
|
+
interval = "10s"
|
|
1341
|
+
timeout = "2s"
|
|
1342
|
+
}
|
|
1343
|
+
}
|
|
1344
|
+
` : ""}
|
|
1345
|
+
|
|
1346
|
+
# doppler integration support
|
|
1347
|
+
# it is always there but a project can decide to not use it
|
|
1348
|
+
template {
|
|
1349
|
+
change_mode = "restart"
|
|
1350
|
+
destination = "secrets/context.env"
|
|
1351
|
+
env = true
|
|
1352
|
+
data = "DOPPLER_TOKEN={{key \\"infra-nomad/doppler-token\\"}}"
|
|
1353
|
+
}
|
|
1354
|
+
|
|
1355
|
+
# always update SKYNET_DEPLOYED_AT so that new deployment always triggers
|
|
1356
|
+
env {
|
|
1357
|
+
SKYNET_DEPLOYED_AT="${new Date().toISOString()}"
|
|
1358
|
+
HOME="/root"
|
|
1359
|
+
DOPPLER_PROJECT="${workingDirectory}"
|
|
1360
|
+
DOPPLER_CONFIG="${isProduction2 ? "prd" : "dev"}"
|
|
1361
|
+
SKYNET_ENVIRONMENT="${isProduction2 ? "prd" : "dev"}"
|
|
1362
|
+
${Object.entries(additionalEnv).filter((kv) => !!kv[1]).map(([key, value]) => `${key}="${value}"`).join(`
|
|
1363
|
+
`)}
|
|
1364
|
+
}
|
|
1365
|
+
|
|
1366
|
+
kill_timeout = "${killTimeout || "60s"}"
|
|
1367
|
+
|
|
1368
|
+
# Specify the maximum resources required to run the task,
|
|
1369
|
+
# include CPU and memory.
|
|
1370
|
+
resources {
|
|
1371
|
+
cpu = ${cpu} # MHz
|
|
1372
|
+
memory = ${mem} # MB
|
|
1373
|
+
}
|
|
1374
|
+
|
|
1375
|
+
# Setting the server task as the leader of the task group allows Nomad to
|
|
1376
|
+
# signal the log shipper task to gracefully shutdown when the server exits.
|
|
1377
|
+
leader = true
|
|
1378
|
+
|
|
1379
|
+
${restart ? `
|
|
1380
|
+
# Restart the job if it fails
|
|
1381
|
+
restart {
|
|
1382
|
+
attempts = ${restart.attempts ?? 2}
|
|
1383
|
+
mode = "${restart.mode ?? "fail"}"
|
|
1384
|
+
interval = "${restart.interval ?? "30m"}"
|
|
1385
|
+
delay = "${restart.delay ?? "15s"}"
|
|
1386
|
+
}
|
|
1387
|
+
` : `
|
|
1388
|
+
# do not retry from the periodical job will reschedule anyway
|
|
1389
|
+
restart {
|
|
1390
|
+
attempts = 0
|
|
1391
|
+
mode = "fail"
|
|
1392
|
+
}`}
|
|
1393
|
+
}
|
|
1394
|
+
}
|
|
1395
|
+
}`;
|
|
1396
|
+
async function prepareNomad(isProduction2) {
|
|
1397
|
+
if (isProduction2) {
|
|
1398
|
+
console.log("Deploy to Production");
|
|
1399
|
+
} else {
|
|
1400
|
+
const skynetDir = detectSkynetDirectory();
|
|
1401
|
+
if (!fso.existsSync("/tmp/skynet")) {
|
|
1402
|
+
await execa("ln", ["-s", skynetDir, "/tmp/skynet"]);
|
|
1403
|
+
}
|
|
1404
|
+
console.log("Deploy locally, please start nomad server in a separate terminal");
|
|
1405
|
+
console.log(`You can start nomad server by running ${chalk.inverse(`${skynetDir}/infra-nomad/dev/start.sh`)}`);
|
|
1406
|
+
console.log(`Then you can visit ${chalk.underline("http://localhost:4646/ui/jobs")} to check submitted dev jobs.
|
|
1407
|
+
`);
|
|
1408
|
+
}
|
|
137
1409
|
}
|
|
138
|
-
function
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
|
|
1410
|
+
function getNomadAddr(isProduction2) {
|
|
1411
|
+
return isProduction2 ? getEnvOrThrow("NOMAD_ADDR") : "http://127.0.0.1:4646";
|
|
1412
|
+
}
|
|
1413
|
+
async function getNomadPath() {
|
|
1414
|
+
try {
|
|
1415
|
+
return await which("nomad");
|
|
1416
|
+
} catch (missingNomad) {
|
|
1417
|
+
console.log(`Deploy requires ${chalk.bold("nomad")} binary, please follow ${chalk.underline("https://learn.hashicorp.com/tutorials/nomad/get-started-install")} for installation`, missingNomad);
|
|
1418
|
+
throw new Error("missing nomad binary");
|
|
1419
|
+
}
|
|
1420
|
+
}
|
|
1421
|
+
async function runNomadJob(nomadPath, nomadAddr, jobName, nomadJobDefinition, isStop, isDryRun) {
|
|
1422
|
+
try {
|
|
1423
|
+
if (isStop) {
|
|
1424
|
+
const nomad = execa(nomadPath, ["job", "stop", jobName], {
|
|
1425
|
+
env: {
|
|
1426
|
+
NOMAD_ADDR: nomadAddr
|
|
143
1427
|
}
|
|
144
|
-
|
|
145
|
-
|
|
1428
|
+
});
|
|
1429
|
+
nomad.stdout.pipe(process.stdout);
|
|
1430
|
+
await nomad;
|
|
1431
|
+
console.log(chalk.green(`Stopped nomad job ${jobName} in ${nomadAddr}`));
|
|
1432
|
+
} else if (isDryRun) {
|
|
1433
|
+
console.log("Definition for", jobName);
|
|
1434
|
+
console.log("========================================");
|
|
1435
|
+
console.log(nomadJobDefinition);
|
|
1436
|
+
} else {
|
|
1437
|
+
const jobFileName = `/tmp/job-${jobName}`;
|
|
1438
|
+
await fs2.writeFile(jobFileName, nomadJobDefinition);
|
|
1439
|
+
const nomad = execa(nomadPath, ["job", "run", jobFileName], {
|
|
1440
|
+
env: {
|
|
1441
|
+
NOMAD_ADDR: nomadAddr
|
|
146
1442
|
}
|
|
147
|
-
|
|
148
|
-
|
|
1443
|
+
});
|
|
1444
|
+
nomad.stdout.pipe(process.stdout);
|
|
1445
|
+
await nomad;
|
|
1446
|
+
console.log(chalk.green(`Deployed nomad job ${jobName} to ${nomadAddr}`));
|
|
1447
|
+
}
|
|
1448
|
+
} catch (nomadExecErr) {
|
|
1449
|
+
if (nomadExecErr instanceof Error) {
|
|
1450
|
+
console.log("Nomad Execution Error:");
|
|
1451
|
+
console.log(nomadExecErr.message);
|
|
1452
|
+
console.log("");
|
|
1453
|
+
}
|
|
1454
|
+
console.log(`Failed to run ${chalk.bold("nomad")} commands, please ensure nomad server is accessible at ${chalk.bold(nomadAddr)}`);
|
|
1455
|
+
throw new Error("nomad execution error");
|
|
1456
|
+
}
|
|
1457
|
+
}
|
|
1458
|
+
function createModeDeploy({
|
|
1459
|
+
binaryName,
|
|
1460
|
+
name,
|
|
1461
|
+
workingDirectory,
|
|
1462
|
+
bin = "bin/indexer",
|
|
1463
|
+
selector = {},
|
|
1464
|
+
env = {},
|
|
1465
|
+
region = "skynet-dc1",
|
|
1466
|
+
deltaSchedule,
|
|
1467
|
+
validateSchedule,
|
|
1468
|
+
deltaKillTimeout,
|
|
1469
|
+
deltaCpu,
|
|
1470
|
+
deltaMem,
|
|
1471
|
+
rebuildKillTimeout,
|
|
1472
|
+
rebuildCpu,
|
|
1473
|
+
rebuildMem,
|
|
1474
|
+
validateKillTimeout,
|
|
1475
|
+
validateCpu,
|
|
1476
|
+
validateMem
|
|
1477
|
+
}) {
|
|
1478
|
+
async function deployMode({
|
|
1479
|
+
mode,
|
|
1480
|
+
from,
|
|
1481
|
+
to,
|
|
1482
|
+
stop,
|
|
1483
|
+
production,
|
|
1484
|
+
dryRun,
|
|
1485
|
+
verbose,
|
|
1486
|
+
schedule: cmdSchedule,
|
|
1487
|
+
...selectorFlags
|
|
1488
|
+
}) {
|
|
1489
|
+
if (mode === "delta") {
|
|
1490
|
+
from = 0;
|
|
1491
|
+
to = 0;
|
|
1492
|
+
}
|
|
1493
|
+
const isPeriodic = from === 0 && to === 0 && ["delta", "validate"].includes(mode);
|
|
1494
|
+
const jobName = getJobName(name, selectorFlags, mode);
|
|
1495
|
+
const selectorCmdPart = Object.entries(selectorFlags).sort().map(([name2, value]) => `--${name2} ${value}`).join(" ");
|
|
1496
|
+
let args = `--mode ${mode} ${selectorCmdPart}`;
|
|
1497
|
+
if (verbose) {
|
|
1498
|
+
args += ` --verbose`;
|
|
1499
|
+
}
|
|
1500
|
+
let rangeArgs = "";
|
|
1501
|
+
if (from > 0) {
|
|
1502
|
+
rangeArgs += ` --from ${from}`;
|
|
1503
|
+
}
|
|
1504
|
+
if (to > 0) {
|
|
1505
|
+
rangeArgs += ` --to ${to}`;
|
|
1506
|
+
}
|
|
1507
|
+
const modeResouces = {
|
|
1508
|
+
rebuild: { cpu: rebuildCpu, mem: rebuildMem, killTimeout: rebuildKillTimeout },
|
|
1509
|
+
"resume-rebuild": { cpu: rebuildCpu, mem: rebuildMem, killTimeout: rebuildKillTimeout },
|
|
1510
|
+
validate: {
|
|
1511
|
+
cpu: validateCpu || rebuildCpu,
|
|
1512
|
+
mem: validateMem || rebuildMem,
|
|
1513
|
+
killTimeout: validateKillTimeout || rebuildKillTimeout
|
|
1514
|
+
},
|
|
1515
|
+
delta: { cpu: deltaCpu, mem: deltaMem, killTimeout: deltaKillTimeout }
|
|
1516
|
+
};
|
|
1517
|
+
const cpu = modeResouces[mode]?.cpu || deltaCpu;
|
|
1518
|
+
const mem = modeResouces[mode]?.mem || deltaMem;
|
|
1519
|
+
const killTimeout = modeResouces[mode]?.killTimeout || deltaKillTimeout;
|
|
1520
|
+
let deltaCron = typeof deltaSchedule === "function" ? deltaSchedule(jobName) : deltaSchedule;
|
|
1521
|
+
if (deltaSchedule && cmdSchedule) {
|
|
1522
|
+
deltaCron = cmdSchedule;
|
|
1523
|
+
}
|
|
1524
|
+
let validateCron = typeof validateSchedule === "function" ? validateSchedule(jobName) : validateSchedule;
|
|
1525
|
+
if (validateSchedule && cmdSchedule) {
|
|
1526
|
+
validateCron = cmdSchedule;
|
|
1527
|
+
}
|
|
1528
|
+
const modeIntervals = {
|
|
1529
|
+
delta: deltaCron ? INTERVAL_ALIASES[deltaCron] || deltaCron : undefined,
|
|
1530
|
+
validate: validateCron ? INTERVAL_ALIASES[validateCron] || validateCron : undefined
|
|
1531
|
+
};
|
|
1532
|
+
const mainJobDefinition = genConfig({
|
|
1533
|
+
jobName,
|
|
1534
|
+
cron: isPeriodic ? modeIntervals[mode] : undefined,
|
|
1535
|
+
workingDirectory,
|
|
1536
|
+
additionalEnv: env,
|
|
1537
|
+
region,
|
|
1538
|
+
cmd: `${bin} ${args} ${rangeArgs}`,
|
|
1539
|
+
killTimeout,
|
|
1540
|
+
cpu,
|
|
1541
|
+
mem,
|
|
1542
|
+
isProduction: production
|
|
1543
|
+
});
|
|
1544
|
+
const nomadPath = await getNomadPath();
|
|
1545
|
+
await prepareNomad(production);
|
|
1546
|
+
const nomadAddr = getNomadAddr(production);
|
|
1547
|
+
await runNomadJob(nomadPath, nomadAddr, jobName, mainJobDefinition, stop, dryRun);
|
|
1548
|
+
}
|
|
1549
|
+
async function deploy() {
|
|
1550
|
+
if (!binaryName) {
|
|
1551
|
+
binaryName = getBinaryName();
|
|
1552
|
+
}
|
|
1553
|
+
const cli = meow3(`
|
|
1554
|
+
Usage
|
|
1555
|
+
|
|
1556
|
+
$ ${binaryName} <options>
|
|
1557
|
+
|
|
1558
|
+
Options
|
|
1559
|
+
${getSelectorDesc(selector)}
|
|
1560
|
+
--mode could be delta/rebuild/resume-rebuild/validate/one/range/reset
|
|
1561
|
+
--from min id to build
|
|
1562
|
+
--to max id to build
|
|
1563
|
+
--stop stop job instead of running the job
|
|
1564
|
+
--production deploy to production, default is development
|
|
1565
|
+
--schedule override default schedule, support aliases: secondly, minutely, hourly, daily, weekly
|
|
1566
|
+
--verbose Output debug messages
|
|
1567
|
+
--dry-run print nomad job file but do not really execute it
|
|
1568
|
+
|
|
1569
|
+
Examples
|
|
1570
|
+
${binaryName} --mode delta
|
|
1571
|
+
${binaryName} --mode rebuild
|
|
1572
|
+
${binaryName} --mode validate
|
|
1573
|
+
`, {
|
|
1574
|
+
importMeta: import.meta,
|
|
1575
|
+
description: false,
|
|
1576
|
+
version: false,
|
|
1577
|
+
flags: {
|
|
1578
|
+
...getSelectorFlags(selector),
|
|
1579
|
+
mode: {
|
|
1580
|
+
type: "string",
|
|
1581
|
+
default: "delta"
|
|
1582
|
+
},
|
|
1583
|
+
from: {
|
|
1584
|
+
aliases: ["since"],
|
|
1585
|
+
type: "number",
|
|
1586
|
+
default: 0
|
|
1587
|
+
},
|
|
1588
|
+
to: {
|
|
1589
|
+
aliases: ["until"],
|
|
1590
|
+
type: "number",
|
|
1591
|
+
default: 0
|
|
1592
|
+
},
|
|
1593
|
+
schedule: {
|
|
1594
|
+
type: "string"
|
|
1595
|
+
},
|
|
1596
|
+
verbose: {
|
|
1597
|
+
type: "boolean",
|
|
1598
|
+
default: false
|
|
1599
|
+
},
|
|
1600
|
+
production: {
|
|
1601
|
+
aliases: ["prd"],
|
|
1602
|
+
type: "boolean",
|
|
1603
|
+
default: false
|
|
1604
|
+
},
|
|
1605
|
+
dryRun: {
|
|
1606
|
+
type: "boolean",
|
|
1607
|
+
default: false
|
|
1608
|
+
},
|
|
1609
|
+
stop: {
|
|
1610
|
+
type: "boolean",
|
|
1611
|
+
default: false
|
|
149
1612
|
}
|
|
1613
|
+
}
|
|
1614
|
+
});
|
|
1615
|
+
try {
|
|
1616
|
+
return deployMode(cli.flags);
|
|
1617
|
+
} catch (err) {
|
|
1618
|
+
console.error(err);
|
|
1619
|
+
process.exit(1);
|
|
150
1620
|
}
|
|
151
|
-
|
|
1621
|
+
}
|
|
1622
|
+
return { deploy };
|
|
152
1623
|
}
|
|
153
|
-
function
|
|
154
|
-
|
|
155
|
-
|
|
156
|
-
|
|
157
|
-
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
|
|
168
|
-
|
|
169
|
-
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
|
|
176
|
-
|
|
177
|
-
|
|
1624
|
+
function createDeploy({
|
|
1625
|
+
binaryName,
|
|
1626
|
+
name,
|
|
1627
|
+
workingDirectory,
|
|
1628
|
+
bin = "bin/indexer",
|
|
1629
|
+
selector = {},
|
|
1630
|
+
region = "skynet-dc1",
|
|
1631
|
+
type = "batch",
|
|
1632
|
+
env = {},
|
|
1633
|
+
count,
|
|
1634
|
+
schedule,
|
|
1635
|
+
restart,
|
|
1636
|
+
killTimeout,
|
|
1637
|
+
cpu,
|
|
1638
|
+
mem,
|
|
1639
|
+
service
|
|
1640
|
+
}) {
|
|
1641
|
+
async function deployModeless({
|
|
1642
|
+
production,
|
|
1643
|
+
stop,
|
|
1644
|
+
dryRun,
|
|
1645
|
+
verbose,
|
|
1646
|
+
schedule: cmdSchedule,
|
|
1647
|
+
...selectorFlags
|
|
1648
|
+
}) {
|
|
1649
|
+
const jobName = getJobName(name, selectorFlags);
|
|
1650
|
+
const selectorCmdPart = Object.entries(selectorFlags).sort().map(([name2, value]) => `--${name2} ${value}`).join(" ");
|
|
1651
|
+
let args = `${selectorCmdPart}`;
|
|
1652
|
+
if (verbose) {
|
|
1653
|
+
args += ` --verbose`;
|
|
1654
|
+
}
|
|
1655
|
+
let cron = typeof schedule === "function" ? schedule(jobName) : schedule;
|
|
1656
|
+
if (schedule && cmdSchedule) {
|
|
1657
|
+
cron = cmdSchedule;
|
|
1658
|
+
}
|
|
1659
|
+
const nomadJobDefinition = genConfig({
|
|
1660
|
+
jobName,
|
|
1661
|
+
cron: cron ? INTERVAL_ALIASES[cron] || cron : undefined,
|
|
1662
|
+
count,
|
|
1663
|
+
restart,
|
|
1664
|
+
workingDirectory,
|
|
1665
|
+
additionalEnv: env,
|
|
1666
|
+
region,
|
|
1667
|
+
type,
|
|
1668
|
+
cmd: `${bin} ${args}`,
|
|
1669
|
+
killTimeout,
|
|
1670
|
+
cpu,
|
|
1671
|
+
mem,
|
|
1672
|
+
service,
|
|
1673
|
+
isProduction: production
|
|
1674
|
+
});
|
|
1675
|
+
const nomadPath = await getNomadPath();
|
|
1676
|
+
await prepareNomad(production);
|
|
1677
|
+
const nomadAddr = getNomadAddr(production);
|
|
1678
|
+
await runNomadJob(nomadPath, nomadAddr, jobName, nomadJobDefinition, stop, dryRun);
|
|
1679
|
+
}
|
|
1680
|
+
async function deploy() {
|
|
1681
|
+
if (!binaryName) {
|
|
1682
|
+
binaryName = getBinaryName();
|
|
1683
|
+
}
|
|
1684
|
+
const cli = meow3(`
|
|
1685
|
+
Usage
|
|
1686
|
+
|
|
1687
|
+
$ ${binaryName} <options>
|
|
1688
|
+
|
|
1689
|
+
Options
|
|
1690
|
+
${getSelectorDesc(selector)}
|
|
1691
|
+
--stop stop job instead of running the job
|
|
1692
|
+
--production deploy to production, default is development
|
|
1693
|
+
--schedule override default schedule, support aliases: secondly, minutely, hourly, daily, weekly
|
|
1694
|
+
--verbose Output debug messages
|
|
1695
|
+
--dry-run print nomad job file but do not really execute it
|
|
1696
|
+
`, {
|
|
1697
|
+
importMeta: import.meta,
|
|
1698
|
+
description: false,
|
|
1699
|
+
version: false,
|
|
1700
|
+
flags: {
|
|
1701
|
+
...getSelectorFlags(selector),
|
|
1702
|
+
schedule: {
|
|
1703
|
+
type: "string"
|
|
178
1704
|
},
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
|
|
182
|
-
const { deploy } = createModeDeploy({
|
|
183
|
-
binaryName: `${getBinaryName()} deploy`,
|
|
184
|
-
name,
|
|
185
|
-
workingDirectory: detectWorkingDirectory(),
|
|
186
|
-
bin: needDoppler ? `doppler run -- ${bin} run` : `${bin} run`,
|
|
187
|
-
selector,
|
|
188
|
-
region,
|
|
189
|
-
env,
|
|
190
|
-
deltaSchedule: build.schedule,
|
|
191
|
-
deltaKillTimeout: build.killTimeout,
|
|
192
|
-
deltaCpu: build.cpu,
|
|
193
|
-
deltaMem: build.mem,
|
|
194
|
-
rebuildKillTimeout: build.killTimeout,
|
|
195
|
-
rebuildCpu: build.cpu,
|
|
196
|
-
rebuildMem: build.mem,
|
|
197
|
-
validateSchedule: validate && validate.schedule,
|
|
198
|
-
validateKillTimeout: validate && validate.killTimeout,
|
|
199
|
-
validateCpu: validate && validate.cpu,
|
|
200
|
-
validateMem: validate && validate.mem,
|
|
201
|
-
});
|
|
202
|
-
return deploy();
|
|
1705
|
+
verbose: {
|
|
1706
|
+
type: "boolean",
|
|
1707
|
+
default: false
|
|
203
1708
|
},
|
|
1709
|
+
production: {
|
|
1710
|
+
aliases: ["prd"],
|
|
1711
|
+
type: "boolean",
|
|
1712
|
+
default: false
|
|
1713
|
+
},
|
|
1714
|
+
dryRun: {
|
|
1715
|
+
type: "boolean",
|
|
1716
|
+
default: false
|
|
1717
|
+
},
|
|
1718
|
+
stop: {
|
|
1719
|
+
type: "boolean",
|
|
1720
|
+
default: false
|
|
1721
|
+
}
|
|
1722
|
+
}
|
|
204
1723
|
});
|
|
1724
|
+
try {
|
|
1725
|
+
return deployModeless(cli.flags);
|
|
1726
|
+
} catch (err) {
|
|
1727
|
+
console.error(err);
|
|
1728
|
+
process.exit(1);
|
|
1729
|
+
}
|
|
1730
|
+
}
|
|
1731
|
+
return { deploy };
|
|
205
1732
|
}
|
|
206
|
-
|
|
207
|
-
|
|
208
|
-
|
|
209
|
-
|
|
1733
|
+
// src/app.ts
|
|
1734
|
+
import { EOL } from "os";
|
|
1735
|
+
function printAppHelp() {
|
|
1736
|
+
console.log(`
|
|
1737
|
+
Usage
|
|
1738
|
+
|
|
1739
|
+
$ ${getBinaryName()} run <options>
|
|
1740
|
+
$ ${getBinaryName()} deploy <options>
|
|
1741
|
+
$ ${getBinaryName()} delete <options>
|
|
1742
|
+
`);
|
|
1743
|
+
}
|
|
1744
|
+
function isDeleteCommand(command) {
|
|
1745
|
+
return ["delete", "stop", "remove"].includes(command);
|
|
1746
|
+
}
|
|
1747
|
+
function checkAndSetEnv(env) {
|
|
1748
|
+
const missingEnvs = [];
|
|
1749
|
+
for (const key of Object.keys(env)) {
|
|
1750
|
+
if (env[key]) {
|
|
1751
|
+
process.env[key] = env[key];
|
|
1752
|
+
} else if (!process.env[key]) {
|
|
1753
|
+
missingEnvs.push(key);
|
|
1754
|
+
}
|
|
1755
|
+
}
|
|
1756
|
+
if (missingEnvs.length > 0) {
|
|
1757
|
+
console.log(`The following environment value shouldn't be empty:${EOL}- ${missingEnvs.join(EOL + "- ")}`);
|
|
1758
|
+
process.exit(1);
|
|
1759
|
+
}
|
|
1760
|
+
}
|
|
1761
|
+
function createApp({
|
|
1762
|
+
parameterErrors,
|
|
1763
|
+
env,
|
|
1764
|
+
onRun,
|
|
1765
|
+
onDeploy
|
|
1766
|
+
}) {
|
|
1767
|
+
if (parameterErrors.length > 0) {
|
|
1768
|
+
console.log(`Parameter Validation Failed:${EOL}- ${parameterErrors.join(EOL + "- ")}`);
|
|
1769
|
+
process.exit(1);
|
|
1770
|
+
}
|
|
1771
|
+
return async () => {
|
|
1772
|
+
const subCommand = process.argv[2];
|
|
1773
|
+
process.argv = [process.argv[0], process.argv[1], ...process.argv.slice(3)];
|
|
1774
|
+
if (subCommand === "run") {
|
|
1775
|
+
checkAndSetEnv(env);
|
|
1776
|
+
await onRun();
|
|
1777
|
+
} else if (subCommand === "deploy" || isDeleteCommand(subCommand)) {
|
|
1778
|
+
if (isDeleteCommand(subCommand)) {
|
|
1779
|
+
process.argv.push("--stop");
|
|
1780
|
+
}
|
|
1781
|
+
await onDeploy();
|
|
1782
|
+
} else {
|
|
1783
|
+
printAppHelp();
|
|
210
1784
|
}
|
|
211
|
-
|
|
212
|
-
|
|
1785
|
+
};
|
|
1786
|
+
}
|
|
1787
|
+
function checkEnvParameter(env) {
|
|
1788
|
+
const errors = [];
|
|
1789
|
+
const envKeys = Object.keys(env);
|
|
1790
|
+
envKeys.forEach((k) => {
|
|
1791
|
+
if (!env[k] && env[k] !== SENSITIVE_VALUE) {
|
|
1792
|
+
errors.push(`must have valid non-empty value for env.${k}`);
|
|
213
1793
|
}
|
|
214
|
-
|
|
215
|
-
|
|
1794
|
+
});
|
|
1795
|
+
return errors;
|
|
1796
|
+
}
|
|
1797
|
+
function checkIndexerBuildParameter(build) {
|
|
1798
|
+
const errors = [];
|
|
1799
|
+
if (!build?.func) {
|
|
1800
|
+
errors.push("must define build.func");
|
|
1801
|
+
}
|
|
1802
|
+
if (!build?.cpu) {
|
|
1803
|
+
errors.push("must define build.cpu");
|
|
1804
|
+
}
|
|
1805
|
+
if (!build?.mem) {
|
|
1806
|
+
errors.push("must define build.mem");
|
|
1807
|
+
}
|
|
1808
|
+
return errors;
|
|
1809
|
+
}
|
|
1810
|
+
function checkStateParameter(state) {
|
|
1811
|
+
const errors = [];
|
|
1812
|
+
if (!state?.type) {
|
|
1813
|
+
errors.push("must define state.type");
|
|
1814
|
+
}
|
|
1815
|
+
if (!state?.getMaxId) {
|
|
1816
|
+
errors.push("must define state.getMaxId");
|
|
1817
|
+
}
|
|
1818
|
+
return errors;
|
|
1819
|
+
}
|
|
1820
|
+
function indexer({
|
|
1821
|
+
name,
|
|
1822
|
+
selector,
|
|
1823
|
+
build,
|
|
1824
|
+
env = {},
|
|
1825
|
+
region = "skynet-dc1"
|
|
1826
|
+
}) {
|
|
1827
|
+
return createApp({
|
|
1828
|
+
parameterErrors: [...checkIndexerBuildParameter(build), ...checkEnvParameter(env)],
|
|
1829
|
+
env,
|
|
1830
|
+
onRun: () => {
|
|
1831
|
+
const { run } = createIndexerApp({
|
|
1832
|
+
binaryName: `${getBinaryName()} run`,
|
|
1833
|
+
selector,
|
|
1834
|
+
build: build.func,
|
|
1835
|
+
maxRetry: build.maxRetry
|
|
1836
|
+
});
|
|
1837
|
+
process.title = name;
|
|
1838
|
+
return run();
|
|
1839
|
+
},
|
|
1840
|
+
onDeploy: () => {
|
|
1841
|
+
const bin = detectBin();
|
|
1842
|
+
const needDoppler = Object.values(env).some((v) => v === SENSITIVE_VALUE);
|
|
1843
|
+
const { deploy } = createDeploy({
|
|
1844
|
+
binaryName: `${getBinaryName()} deploy`,
|
|
1845
|
+
name,
|
|
1846
|
+
workingDirectory: detectWorkingDirectory(),
|
|
1847
|
+
bin: needDoppler ? `doppler run -- ${bin} run` : `${bin} run`,
|
|
1848
|
+
selector,
|
|
1849
|
+
region,
|
|
1850
|
+
env,
|
|
1851
|
+
schedule: build.schedule,
|
|
1852
|
+
restart: build.restart,
|
|
1853
|
+
killTimeout: build.killTimeout,
|
|
1854
|
+
cpu: build.cpu,
|
|
1855
|
+
mem: build.mem
|
|
1856
|
+
});
|
|
1857
|
+
return deploy();
|
|
216
1858
|
}
|
|
217
|
-
|
|
218
|
-
|
|
1859
|
+
});
|
|
1860
|
+
}
|
|
1861
|
+
function checkModeIndexerBuildParameter(build) {
|
|
1862
|
+
const errors = [];
|
|
1863
|
+
if (!build?.func) {
|
|
1864
|
+
errors.push("must define build.func");
|
|
1865
|
+
}
|
|
1866
|
+
if (!build?.cpu) {
|
|
1867
|
+
errors.push("must define build.cpu");
|
|
1868
|
+
}
|
|
1869
|
+
if (!build?.mem) {
|
|
1870
|
+
errors.push("must define build.mem");
|
|
1871
|
+
}
|
|
1872
|
+
return errors;
|
|
1873
|
+
}
|
|
1874
|
+
function checkModeIndexerValidateParameter(validate) {
|
|
1875
|
+
const errors = [];
|
|
1876
|
+
if (validate) {
|
|
1877
|
+
if (!validate.func) {
|
|
1878
|
+
errors.push("must define validate.func");
|
|
219
1879
|
}
|
|
220
|
-
if (!
|
|
221
|
-
|
|
1880
|
+
if (!validate.cpu) {
|
|
1881
|
+
errors.push("must define validate.cpu");
|
|
222
1882
|
}
|
|
223
|
-
if (
|
|
224
|
-
|
|
1883
|
+
if (!validate.mem) {
|
|
1884
|
+
errors.push("must define validate.mem");
|
|
225
1885
|
}
|
|
226
|
-
|
|
1886
|
+
}
|
|
1887
|
+
return errors;
|
|
1888
|
+
}
|
|
1889
|
+
function modeIndexer({
|
|
1890
|
+
name,
|
|
1891
|
+
selector,
|
|
1892
|
+
state,
|
|
1893
|
+
build,
|
|
1894
|
+
validate,
|
|
1895
|
+
env = {},
|
|
1896
|
+
region = "skynet-dc1"
|
|
1897
|
+
}) {
|
|
1898
|
+
return createApp({
|
|
1899
|
+
parameterErrors: [
|
|
1900
|
+
...checkModeIndexerBuildParameter(build),
|
|
1901
|
+
...checkModeIndexerValidateParameter(validate),
|
|
1902
|
+
...checkStateParameter(state),
|
|
1903
|
+
...checkEnvParameter(env)
|
|
1904
|
+
],
|
|
1905
|
+
env,
|
|
1906
|
+
onRun: () => {
|
|
1907
|
+
const { run } = createModeIndexerApp({
|
|
1908
|
+
binaryName: `${getBinaryName()} run`,
|
|
1909
|
+
name,
|
|
1910
|
+
selector,
|
|
1911
|
+
build: build.func,
|
|
1912
|
+
maxRetry: build.maxRetry,
|
|
1913
|
+
buildBatchSize: build.batchSize,
|
|
1914
|
+
buildConcurrency: build.concurrency,
|
|
1915
|
+
validate: validate && validate.func,
|
|
1916
|
+
validateBatchSize: validate && validate.batchSize,
|
|
1917
|
+
validateConcurrency: validate && validate.concurrency,
|
|
1918
|
+
state
|
|
1919
|
+
});
|
|
1920
|
+
process.title = name;
|
|
1921
|
+
return run();
|
|
1922
|
+
},
|
|
1923
|
+
onDeploy: () => {
|
|
1924
|
+
const bin = detectBin();
|
|
1925
|
+
const needDoppler = Object.values(env).some((v) => v === SENSITIVE_VALUE);
|
|
1926
|
+
const { deploy } = createModeDeploy({
|
|
1927
|
+
binaryName: `${getBinaryName()} deploy`,
|
|
1928
|
+
name,
|
|
1929
|
+
workingDirectory: detectWorkingDirectory(),
|
|
1930
|
+
bin: needDoppler ? `doppler run -- ${bin} run` : `${bin} run`,
|
|
1931
|
+
selector,
|
|
1932
|
+
region,
|
|
1933
|
+
env,
|
|
1934
|
+
deltaSchedule: build.schedule,
|
|
1935
|
+
deltaKillTimeout: build.killTimeout,
|
|
1936
|
+
deltaCpu: build.cpu,
|
|
1937
|
+
deltaMem: build.mem,
|
|
1938
|
+
rebuildKillTimeout: build.killTimeout,
|
|
1939
|
+
rebuildCpu: build.cpu,
|
|
1940
|
+
rebuildMem: build.mem,
|
|
1941
|
+
validateSchedule: validate && validate.schedule,
|
|
1942
|
+
validateKillTimeout: validate && validate.killTimeout,
|
|
1943
|
+
validateCpu: validate && validate.cpu,
|
|
1944
|
+
validateMem: validate && validate.mem
|
|
1945
|
+
});
|
|
1946
|
+
return deploy();
|
|
1947
|
+
}
|
|
1948
|
+
});
|
|
1949
|
+
}
|
|
1950
|
+
function checkApiServeParameter(serve, routes) {
|
|
1951
|
+
const errors = [];
|
|
1952
|
+
if (!serve?.prefix) {
|
|
1953
|
+
errors.push("must define serve.prefix");
|
|
1954
|
+
} else if (!serve.prefix.startsWith("/")) {
|
|
1955
|
+
errors.push("server.prefix must start with /, e.g. /my-api");
|
|
1956
|
+
}
|
|
1957
|
+
if (!serve?.port) {
|
|
1958
|
+
errors.push("must define serve.port");
|
|
1959
|
+
}
|
|
1960
|
+
if (!serve?.cpu) {
|
|
1961
|
+
errors.push("must define serve.cpu");
|
|
1962
|
+
}
|
|
1963
|
+
if (!serve?.mem) {
|
|
1964
|
+
errors.push("must define serve.mem");
|
|
1965
|
+
}
|
|
1966
|
+
if (routes.some((r) => r.protected) && !serve.apiKey) {
|
|
1967
|
+
errors.push("must define serve.apiKey since some routes are protected");
|
|
1968
|
+
}
|
|
1969
|
+
return errors;
|
|
227
1970
|
}
|
|
228
1971
|
function checkApiRoutesParameter(routes) {
|
|
229
|
-
|
|
230
|
-
|
|
231
|
-
|
|
232
|
-
|
|
233
|
-
|
|
234
|
-
|
|
235
|
-
|
|
236
|
-
|
|
237
|
-
|
|
238
|
-
|
|
239
|
-
|
|
240
|
-
|
|
241
|
-
|
|
242
|
-
|
|
243
|
-
|
|
244
|
-
}
|
|
245
|
-
}
|
|
1972
|
+
const errors = [];
|
|
1973
|
+
if (!Array.isArray(routes)) {
|
|
1974
|
+
errors.push("routes must be an array");
|
|
1975
|
+
} else {
|
|
1976
|
+
for (let i = 0;i < routes.length; i++) {
|
|
1977
|
+
const route = routes[i];
|
|
1978
|
+
if (!route.path) {
|
|
1979
|
+
errors.push(`routes[${i}] must define path`);
|
|
1980
|
+
}
|
|
1981
|
+
if (!route.handler) {
|
|
1982
|
+
errors.push(`routes[${i}] must define handler`);
|
|
1983
|
+
}
|
|
1984
|
+
if (route.middlewares && !Array.isArray(route.middlewares)) {
|
|
1985
|
+
errors.push(`routes[${i}].middlewares must be an array`);
|
|
1986
|
+
}
|
|
246
1987
|
}
|
|
247
|
-
|
|
248
|
-
|
|
249
|
-
|
|
250
|
-
|
|
251
|
-
|
|
252
|
-
|
|
253
|
-
|
|
254
|
-
|
|
255
|
-
|
|
256
|
-
|
|
257
|
-
|
|
1988
|
+
}
|
|
1989
|
+
return errors;
|
|
1990
|
+
}
|
|
1991
|
+
function api({
|
|
1992
|
+
name,
|
|
1993
|
+
routes,
|
|
1994
|
+
serve,
|
|
1995
|
+
beforeListen,
|
|
1996
|
+
env = {},
|
|
1997
|
+
region = "skynet-dc1"
|
|
1998
|
+
}) {
|
|
1999
|
+
const selector = {};
|
|
2000
|
+
return createApp({
|
|
2001
|
+
parameterErrors: [
|
|
2002
|
+
...checkApiRoutesParameter(routes),
|
|
2003
|
+
...checkApiServeParameter(serve, routes),
|
|
2004
|
+
...checkEnvParameter(env)
|
|
2005
|
+
],
|
|
2006
|
+
env,
|
|
2007
|
+
onRun: () => {
|
|
2008
|
+
process.title = name;
|
|
2009
|
+
return startApiApp({
|
|
2010
|
+
binaryName: `${getBinaryName()} run`,
|
|
2011
|
+
name,
|
|
2012
|
+
selector,
|
|
2013
|
+
routes,
|
|
2014
|
+
serve,
|
|
2015
|
+
beforeListen
|
|
2016
|
+
});
|
|
2017
|
+
},
|
|
2018
|
+
onDeploy: () => {
|
|
2019
|
+
const bin = detectBin();
|
|
2020
|
+
const needDoppler = Object.values(env).some((v) => v === SENSITIVE_VALUE);
|
|
2021
|
+
const { deploy } = createDeploy({
|
|
2022
|
+
binaryName: `${getBinaryName()} deploy`,
|
|
2023
|
+
name,
|
|
2024
|
+
workingDirectory: detectWorkingDirectory(),
|
|
2025
|
+
bin: needDoppler ? `doppler run -- ${bin} run` : `${bin} run`,
|
|
2026
|
+
selector,
|
|
2027
|
+
region,
|
|
258
2028
|
env,
|
|
259
|
-
|
|
260
|
-
|
|
261
|
-
|
|
262
|
-
|
|
263
|
-
|
|
264
|
-
|
|
265
|
-
routes,
|
|
266
|
-
serve,
|
|
267
|
-
beforeListen,
|
|
268
|
-
});
|
|
2029
|
+
type: "service",
|
|
2030
|
+
restart: {
|
|
2031
|
+
attempts: 3,
|
|
2032
|
+
delay: "15s",
|
|
2033
|
+
mode: "delay",
|
|
2034
|
+
interval: "2m"
|
|
269
2035
|
},
|
|
270
|
-
|
|
271
|
-
|
|
272
|
-
|
|
273
|
-
|
|
274
|
-
|
|
275
|
-
|
|
276
|
-
|
|
277
|
-
|
|
278
|
-
|
|
279
|
-
|
|
280
|
-
env,
|
|
281
|
-
type: "service",
|
|
282
|
-
restart: {
|
|
283
|
-
attempts: 3,
|
|
284
|
-
delay: "15s",
|
|
285
|
-
mode: "delay",
|
|
286
|
-
interval: "2m",
|
|
287
|
-
},
|
|
288
|
-
count: serve.instances,
|
|
289
|
-
killTimeout: serve.killTimeout,
|
|
290
|
-
cpu: serve.cpu,
|
|
291
|
-
mem: serve.mem,
|
|
292
|
-
service: {
|
|
293
|
-
prefix: serve.prefix,
|
|
294
|
-
port: serve.port,
|
|
295
|
-
},
|
|
296
|
-
});
|
|
297
|
-
return deploy();
|
|
298
|
-
},
|
|
299
|
-
});
|
|
300
|
-
}
|
|
301
|
-
const SENSITIVE_VALUE = null;
|
|
302
|
-
const every = (n = 1) => {
|
|
303
|
-
if (n === 1) {
|
|
304
|
-
return {
|
|
305
|
-
second: "*/1 * * * * * *",
|
|
306
|
-
seconds: "*/1 * * * * * *",
|
|
307
|
-
minute: "0 * * * * * *",
|
|
308
|
-
minutes: "0 * * * * * *",
|
|
309
|
-
hour: "0 0 * * * * *",
|
|
310
|
-
hours: "0 0 * * * * *",
|
|
311
|
-
day: "0 0 0 * * * *",
|
|
312
|
-
days: "0 0 0 * * * *",
|
|
313
|
-
week: "0 0 0 * * 0 *",
|
|
314
|
-
weeks: "0 0 0 * * 0 *",
|
|
315
|
-
};
|
|
2036
|
+
count: serve.instances,
|
|
2037
|
+
killTimeout: serve.killTimeout,
|
|
2038
|
+
cpu: serve.cpu,
|
|
2039
|
+
mem: serve.mem,
|
|
2040
|
+
service: {
|
|
2041
|
+
prefix: serve.prefix,
|
|
2042
|
+
port: serve.port
|
|
2043
|
+
}
|
|
2044
|
+
});
|
|
2045
|
+
return deploy();
|
|
316
2046
|
}
|
|
2047
|
+
});
|
|
2048
|
+
}
|
|
2049
|
+
var SENSITIVE_VALUE = null;
|
|
2050
|
+
var every = (n = 1) => {
|
|
2051
|
+
if (n === 1) {
|
|
317
2052
|
return {
|
|
318
|
-
|
|
319
|
-
|
|
320
|
-
|
|
321
|
-
|
|
322
|
-
|
|
323
|
-
|
|
324
|
-
|
|
325
|
-
|
|
2053
|
+
second: "*/1 * * * * * *",
|
|
2054
|
+
seconds: "*/1 * * * * * *",
|
|
2055
|
+
minute: "0 * * * * * *",
|
|
2056
|
+
minutes: "0 * * * * * *",
|
|
2057
|
+
hour: "0 0 * * * * *",
|
|
2058
|
+
hours: "0 0 * * * * *",
|
|
2059
|
+
day: "0 0 0 * * * *",
|
|
2060
|
+
days: "0 0 0 * * * *",
|
|
2061
|
+
week: "0 0 0 * * 0 *",
|
|
2062
|
+
weeks: "0 0 0 * * 0 *"
|
|
326
2063
|
};
|
|
2064
|
+
}
|
|
2065
|
+
return {
|
|
2066
|
+
second: `*/${n} * * * * * *`,
|
|
2067
|
+
seconds: `*/${n} * * * * * *`,
|
|
2068
|
+
minute: `0 */${n} * * * * *`,
|
|
2069
|
+
minutes: `0 */${n} * * * * *`,
|
|
2070
|
+
hour: `0 0 */${n} * * * *`,
|
|
2071
|
+
hours: `0 0 */${n} * * * *`,
|
|
2072
|
+
day: `0 0 0 */${n} * * *`,
|
|
2073
|
+
days: `0 0 0 */${n} * * *`
|
|
2074
|
+
};
|
|
2075
|
+
};
|
|
2076
|
+
export {
|
|
2077
|
+
modeIndexer,
|
|
2078
|
+
indexer,
|
|
2079
|
+
every,
|
|
2080
|
+
api,
|
|
2081
|
+
SENSITIVE_VALUE
|
|
327
2082
|
};
|
|
328
|
-
export { indexer, modeIndexer, api, every, SENSITIVE_VALUE };
|