@shopify/cli 3.84.2 → 3.85.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +1 -1
- package/dist/{chokidar-KCZH6DIC.js → chokidar-TTCYG5AA.js} +5 -5
- package/dist/{chokidar-XPSJ2FUJ.js → chokidar-XUA2BN3J.js} +5 -5
- package/dist/{chunk-OCEWRJZI.js → chunk-62H2KYOJ.js} +17 -21
- package/dist/{chunk-6A27UWO7.js → chunk-6YB46YWH.js} +4 -4
- package/dist/{chunk-L5OKA4W4.js → chunk-7JE3EHN3.js} +7 -7
- package/dist/chunk-7LEPNDDJ.js +87 -0
- package/dist/{chunk-MXDBI4PG.js → chunk-7N5JGLLX.js} +5 -5
- package/dist/{chunk-HMDWNGIV.js → chunk-AM4QB5OM.js} +206 -100
- package/dist/{chunk-W5G2YPO2.js → chunk-BQ3PZIHZ.js} +7 -7
- package/dist/{chunk-TFVMB62A.js → chunk-CKYPRLQQ.js} +19 -18
- package/dist/{chunk-AQVXNWM2.js → chunk-CNY4KRKG.js} +2 -2
- package/dist/{del-SAZHGAZZ.js → chunk-EFOOQV72.js} +17 -378
- package/dist/{chunk-3FBDJEGD.js → chunk-F7F4BQYW.js} +1824 -762
- package/dist/{chunk-66VZYBID.js → chunk-F7J5CUMZ.js} +4 -4
- package/dist/{chunk-2TBGVDDK.js → chunk-FLHWGFGH.js} +3 -3
- package/dist/{chunk-JHLYV5K7.js → chunk-G6OERDCU.js} +7 -7
- package/dist/{chunk-6JFZSZKE.js → chunk-GGNBWCCE.js} +16380 -5793
- package/dist/chunk-IWFYXDPH.js +109 -0
- package/dist/{chunk-7UHRC35E.js → chunk-J3DHMVKN.js} +2 -2
- package/dist/{chunk-Z6MEYGAG.js → chunk-JDI5KJ6D.js} +3 -3
- package/dist/chunk-KIC7OBUL.js +4654 -0
- package/dist/{chunk-YUI23PKV.js → chunk-KJT4XRJY.js} +2 -2
- package/dist/{chunk-46YOOMEM.js → chunk-LOJKECKW.js} +2 -2
- package/dist/{chunk-TAA5HGRT.js → chunk-MHFD5A4P.js} +666 -589
- package/dist/{chunk-QS5WPZTS.js → chunk-O47I6ERR.js} +10 -9
- package/dist/{chunk-KHBZ52PT.js → chunk-OXCV2R7K.js} +43 -19
- package/dist/{chunk-XD5EFQE2.js → chunk-OXVB7GS3.js} +5 -5
- package/dist/{chunk-UOLT7THS.js → chunk-PYZUPEOA.js} +3 -3
- package/dist/{chunk-G74IQQ6M.js → chunk-Q64LTCDT.js} +3 -3
- package/dist/{chunk-DCU33ZNP.js → chunk-R7QUG5YH.js} +3 -3
- package/dist/{chunk-YPRC4TK6.js → chunk-S3MBABAK.js} +187 -43
- package/dist/{chunk-ZRU5CKDG.js → chunk-TRKSH2XY.js} +4 -4
- package/dist/{chunk-BSKQ4RKV.js → chunk-TVWMGUCW.js} +3 -3
- package/dist/{chunk-G5R6YD27.js → chunk-UATXMR5F.js} +2 -103
- package/dist/{chunk-DZCCH5OM.js → chunk-UYWDOH6Y.js} +8 -8
- package/dist/{chunk-BX2C6EVM.js → chunk-VCGC2OW6.js} +2 -2
- package/dist/{chunk-YNNUWX3I.js → chunk-VMPHJR5V.js} +4 -4
- package/dist/{chunk-7YVG6N4O.js → chunk-W54BTT4L.js} +2 -2
- package/dist/{chunk-RTTTZZGY.js → chunk-XKHE52EY.js} +4 -4
- package/dist/{chunk-MRQXBTNU.js → chunk-XNKJ52IR.js} +3 -3
- package/dist/{chunk-EG6MBBEN.js → chunk-Y2JP6WFP.js} +2 -2
- package/dist/{chunk-M6EH6UKK.js → chunk-YIQ4GHA2.js} +4 -4
- package/dist/{chunk-ZBPY6YL5.js → chunk-YJZEHV2H.js} +8 -8
- package/dist/{chunk-T2Z26YN7.js → chunk-ZZACM6JY.js} +3 -3
- package/dist/cli/commands/auth/login.d.ts +8 -0
- package/dist/cli/commands/auth/login.js +28 -0
- package/dist/cli/commands/auth/login.test.js +53 -0
- package/dist/cli/commands/auth/logout.js +15 -17
- package/dist/cli/commands/auth/logout.test.js +20 -22
- package/dist/cli/commands/cache/clear.js +15 -16
- package/dist/cli/commands/debug/command-flags.js +15 -16
- package/dist/cli/commands/docs/generate.js +15 -16
- package/dist/cli/commands/docs/generate.test.js +21 -22
- package/dist/cli/commands/help.js +15 -16
- package/dist/cli/commands/kitchen-sink/async.js +16 -17
- package/dist/cli/commands/kitchen-sink/async.test.js +17 -18
- package/dist/cli/commands/kitchen-sink/index.js +18 -19
- package/dist/cli/commands/kitchen-sink/index.test.js +21 -22
- package/dist/cli/commands/kitchen-sink/prompts.js +16 -17
- package/dist/cli/commands/kitchen-sink/prompts.test.js +17 -18
- package/dist/cli/commands/kitchen-sink/static.js +16 -17
- package/dist/cli/commands/kitchen-sink/static.test.js +17 -18
- package/dist/cli/commands/notifications/generate.js +16 -17
- package/dist/cli/commands/notifications/list.js +16 -17
- package/dist/cli/commands/search.js +16 -17
- package/dist/cli/commands/upgrade.js +16 -17
- package/dist/cli/commands/upgrade.test.js +1 -1
- package/dist/cli/commands/version.js +16 -17
- package/dist/cli/commands/version.test.js +17 -18
- package/dist/cli/services/commands/notifications.js +11 -12
- package/dist/cli/services/commands/search.js +7 -8
- package/dist/cli/services/commands/search.test.js +8 -9
- package/dist/cli/services/commands/version.js +8 -9
- package/dist/cli/services/commands/version.test.js +10 -11
- package/dist/cli/services/kitchen-sink/async.js +7 -8
- package/dist/cli/services/kitchen-sink/prompts.js +7 -8
- package/dist/cli/services/kitchen-sink/static.js +7 -8
- package/dist/cli/services/upgrade.js +8 -9
- package/dist/cli/services/upgrade.test.js +13 -14
- package/dist/configs/all.yml +3 -3
- package/dist/configs/recommended.yml +3 -3
- package/dist/{custom-oclif-loader-BBMEEF2H.js → custom-oclif-loader-MVTJFCUD.js} +11 -12
- package/dist/data/latest.json +1 -1
- package/dist/data/objects.json +2 -2
- package/dist/data/shopify_system_translations.json +82 -24
- package/dist/data/tags.json +10 -10
- package/dist/del-DNZ7X2HW.js +377 -0
- package/dist/{error-handler-734CUCMD.js → error-handler-OEZ5DBZB.js} +13 -14
- package/dist/hooks/postrun.js +13 -15
- package/dist/hooks/prerun.js +14 -15
- package/dist/index.d.ts +1 -1
- package/dist/index.js +7792 -5700
- package/dist/{lib-OSXFX3I2.js → lib-DEEC6IKU.js} +3 -4
- package/dist/{local-5AKZWHSS.js → local-72XWEFWL.js} +7 -8
- package/dist/{morph-TYIH255V.js → morph-5D7H6MU2.js} +13 -11
- package/dist/{node-package-manager-DMOU4DTB.js → node-package-manager-TTBSTZXN.js} +8 -9
- package/dist/{npa-E675GQOI.js → npa-LHT53SWR.js} +3 -5
- package/dist/{path-GB4VIEM6.js → path-COZT77T2.js} +2 -2
- package/dist/tsconfig.tsbuildinfo +1 -1
- package/dist/{ui-JQXGLHMO.js → ui-XUA267UA.js} +9 -8
- package/dist/{workerd-FTKMK3NR.js → workerd-KJQYQH4A.js} +23 -23
- package/oclif.manifest.json +79 -9
- package/package.json +10 -10
- package/dist/chunk-CP3BRHWK.js +0 -33
- package/dist/chunk-EZQWZ57B.js +0 -53
- package/dist/chunk-SJMHVGQ5.js +0 -119
- package/dist/chunk-SM7O6ZFJ.js +0 -1216
|
@@ -0,0 +1,4654 @@
|
|
|
1
|
+
import {
|
|
2
|
+
ClientError,
|
|
3
|
+
GraphQLClient,
|
|
4
|
+
GraphQLClientError,
|
|
5
|
+
abortSignalFromRequestBehaviour,
|
|
6
|
+
allAPIs,
|
|
7
|
+
blockPartnersAccess,
|
|
8
|
+
buildHeaders,
|
|
9
|
+
getIdentityTokenInformation,
|
|
10
|
+
getPartnersToken,
|
|
11
|
+
hashString,
|
|
12
|
+
httpsAgent,
|
|
13
|
+
nonRandomUUID,
|
|
14
|
+
requestMode,
|
|
15
|
+
resolveRequestDocument,
|
|
16
|
+
retryAwareRequest,
|
|
17
|
+
sanitizeURL,
|
|
18
|
+
sanitizedHeadersOutput,
|
|
19
|
+
shopifyFetch,
|
|
20
|
+
z
|
|
21
|
+
} from "./chunk-62H2KYOJ.js";
|
|
22
|
+
import {
|
|
23
|
+
cacheRetrieveOrRepopulate,
|
|
24
|
+
getCurrentSessionId,
|
|
25
|
+
getPackageManager,
|
|
26
|
+
getSessions,
|
|
27
|
+
packageManagerFromUserAgent,
|
|
28
|
+
removeCurrentSessionId,
|
|
29
|
+
removeSessions,
|
|
30
|
+
setCurrentSessionId,
|
|
31
|
+
setSessions,
|
|
32
|
+
timeIntervalToMilliseconds
|
|
33
|
+
} from "./chunk-OXCV2R7K.js";
|
|
34
|
+
import {
|
|
35
|
+
CLI_KIT_VERSION
|
|
36
|
+
} from "./chunk-KJT4XRJY.js";
|
|
37
|
+
import {
|
|
38
|
+
AbortError,
|
|
39
|
+
BugError,
|
|
40
|
+
FatalError,
|
|
41
|
+
addPublicMetadata,
|
|
42
|
+
addSensitiveMetadata,
|
|
43
|
+
ciPlatform,
|
|
44
|
+
cloudEnvironment,
|
|
45
|
+
currentProcessIsGlobal,
|
|
46
|
+
environmentVariables,
|
|
47
|
+
firstPartyDev,
|
|
48
|
+
formatPackageManagerCommand,
|
|
49
|
+
import_ts_error,
|
|
50
|
+
isCI,
|
|
51
|
+
isCloudEnvironment,
|
|
52
|
+
isTTY,
|
|
53
|
+
isWsl,
|
|
54
|
+
keypress,
|
|
55
|
+
macAddress,
|
|
56
|
+
openURL,
|
|
57
|
+
outputCompleted,
|
|
58
|
+
outputContent,
|
|
59
|
+
outputDebug,
|
|
60
|
+
outputInfo,
|
|
61
|
+
outputToken,
|
|
62
|
+
platformAndArch,
|
|
63
|
+
require_arrayLikeKeys,
|
|
64
|
+
require_arrayMap,
|
|
65
|
+
require_arrayPush,
|
|
66
|
+
require_baseDifference,
|
|
67
|
+
require_baseFlatten,
|
|
68
|
+
require_baseForOwn,
|
|
69
|
+
require_baseGet,
|
|
70
|
+
require_baseGetAllKeys,
|
|
71
|
+
require_baseIsEqual,
|
|
72
|
+
require_baseIteratee,
|
|
73
|
+
require_baseKeys,
|
|
74
|
+
require_baseRest,
|
|
75
|
+
require_castPath,
|
|
76
|
+
require_defineProperty,
|
|
77
|
+
require_eq,
|
|
78
|
+
require_get,
|
|
79
|
+
require_getSymbols,
|
|
80
|
+
require_getTag,
|
|
81
|
+
require_isArguments,
|
|
82
|
+
require_isArray,
|
|
83
|
+
require_isArrayLike,
|
|
84
|
+
require_isArrayLikeObject,
|
|
85
|
+
require_isBuffer,
|
|
86
|
+
require_isIndex,
|
|
87
|
+
require_isObject,
|
|
88
|
+
require_isPrototype,
|
|
89
|
+
require_isTypedArray,
|
|
90
|
+
require_keys,
|
|
91
|
+
require_mapToArray,
|
|
92
|
+
require_overArg,
|
|
93
|
+
require_stubArray,
|
|
94
|
+
require_toKey,
|
|
95
|
+
runWithTimer,
|
|
96
|
+
sessionConstants,
|
|
97
|
+
stringifyMessage,
|
|
98
|
+
themeToken
|
|
99
|
+
} from "./chunk-MHFD5A4P.js";
|
|
100
|
+
import {
|
|
101
|
+
cwd
|
|
102
|
+
} from "./chunk-Y2JP6WFP.js";
|
|
103
|
+
import {
|
|
104
|
+
__commonJS,
|
|
105
|
+
__require,
|
|
106
|
+
__toESM,
|
|
107
|
+
init_cjs_shims
|
|
108
|
+
} from "./chunk-PKR7KJ6P.js";
|
|
109
|
+
|
|
110
|
+
// ../../node_modules/.pnpm/network-interfaces@1.1.0/node_modules/network-interfaces/index.js
|
|
111
|
+
var require_network_interfaces = __commonJS({
|
|
112
|
+
"../../node_modules/.pnpm/network-interfaces@1.1.0/node_modules/network-interfaces/index.js"(exports2) {
|
|
113
|
+
"use strict";
|
|
114
|
+
init_cjs_shims();
|
|
115
|
+
var os2 = __require("os");
|
|
116
|
+
function isValid(address, options2) {
|
|
117
|
+
return !(typeof options2.internal == "boolean" && address.internal !== options2.internal || options2.ipVersion === 4 && address.family !== "IPv4" || options2.ipVersion === 6 && address.family !== "IPv6");
|
|
118
|
+
}
|
|
119
|
+
function findAddresses(interfaceName, options2 = {}) {
|
|
120
|
+
let addresses = os2.networkInterfaces()[interfaceName];
|
|
121
|
+
if (!addresses)
|
|
122
|
+
throw new Error(`Network interface "${interfaceName}" does not exist`);
|
|
123
|
+
let result = [];
|
|
124
|
+
for (let address of addresses)
|
|
125
|
+
isValid(address, options2) && result.push(address);
|
|
126
|
+
return result;
|
|
127
|
+
}
|
|
128
|
+
exports2.toIp = function(interfaceName, options2) {
|
|
129
|
+
let addresses = findAddresses(interfaceName, options2);
|
|
130
|
+
if (addresses.length === 0)
|
|
131
|
+
throw new Error(`No suitable IP address found on interface "${interfaceName}"`);
|
|
132
|
+
return addresses[0].address;
|
|
133
|
+
};
|
|
134
|
+
exports2.toIps = function(interfaceName, options2) {
|
|
135
|
+
return findAddresses(interfaceName, options2).map((address) => address.address);
|
|
136
|
+
};
|
|
137
|
+
exports2.fromIp = function(ip, options2) {
|
|
138
|
+
let interfaces = os2.networkInterfaces(), interfaceNames = Object.keys(interfaces);
|
|
139
|
+
for (let interfaceName of interfaceNames)
|
|
140
|
+
for (let address of interfaces[interfaceName])
|
|
141
|
+
if (address.address === ip && isValid(address, options2))
|
|
142
|
+
return interfaceName;
|
|
143
|
+
throw new Error(`No suitable interfaces were found with IP address "${ip}"`);
|
|
144
|
+
};
|
|
145
|
+
exports2.getInterface = function(options2) {
|
|
146
|
+
let interfaces = os2.networkInterfaces(), interfaceNames = Object.keys(interfaces);
|
|
147
|
+
for (let interfaceName of interfaceNames)
|
|
148
|
+
if (findAddresses(interfaceName, options2).length > 0)
|
|
149
|
+
return interfaceName;
|
|
150
|
+
throw new Error("No suitable interfaces were found");
|
|
151
|
+
};
|
|
152
|
+
exports2.getInterfaces = function(options2) {
|
|
153
|
+
let interfaces = os2.networkInterfaces(), interfaceNames = Object.keys(interfaces), result = [];
|
|
154
|
+
for (let interfaceName of interfaceNames)
|
|
155
|
+
findAddresses(interfaceName, options2).length > 0 && result.push(interfaceName);
|
|
156
|
+
return result;
|
|
157
|
+
};
|
|
158
|
+
}
|
|
159
|
+
});
|
|
160
|
+
|
|
161
|
+
// ../../node_modules/.pnpm/deepmerge@4.3.1/node_modules/deepmerge/dist/cjs.js
|
|
162
|
+
var require_cjs = __commonJS({
|
|
163
|
+
"../../node_modules/.pnpm/deepmerge@4.3.1/node_modules/deepmerge/dist/cjs.js"(exports2, module2) {
|
|
164
|
+
"use strict";
|
|
165
|
+
init_cjs_shims();
|
|
166
|
+
var isMergeableObject = function(value) {
|
|
167
|
+
return isNonNullObject(value) && !isSpecial(value);
|
|
168
|
+
};
|
|
169
|
+
function isNonNullObject(value) {
|
|
170
|
+
return !!value && typeof value == "object";
|
|
171
|
+
}
|
|
172
|
+
function isSpecial(value) {
|
|
173
|
+
var stringValue = Object.prototype.toString.call(value);
|
|
174
|
+
return stringValue === "[object RegExp]" || stringValue === "[object Date]" || isReactElement(value);
|
|
175
|
+
}
|
|
176
|
+
var canUseSymbol = typeof Symbol == "function" && Symbol.for, REACT_ELEMENT_TYPE = canUseSymbol ? Symbol.for("react.element") : 60103;
|
|
177
|
+
function isReactElement(value) {
|
|
178
|
+
return value.$$typeof === REACT_ELEMENT_TYPE;
|
|
179
|
+
}
|
|
180
|
+
function emptyTarget(val) {
|
|
181
|
+
return Array.isArray(val) ? [] : {};
|
|
182
|
+
}
|
|
183
|
+
function cloneUnlessOtherwiseSpecified(value, options2) {
|
|
184
|
+
return options2.clone !== !1 && options2.isMergeableObject(value) ? deepmerge(emptyTarget(value), value, options2) : value;
|
|
185
|
+
}
|
|
186
|
+
function defaultArrayMerge(target, source, options2) {
|
|
187
|
+
return target.concat(source).map(function(element) {
|
|
188
|
+
return cloneUnlessOtherwiseSpecified(element, options2);
|
|
189
|
+
});
|
|
190
|
+
}
|
|
191
|
+
function getMergeFunction(key, options2) {
|
|
192
|
+
if (!options2.customMerge)
|
|
193
|
+
return deepmerge;
|
|
194
|
+
var customMerge = options2.customMerge(key);
|
|
195
|
+
return typeof customMerge == "function" ? customMerge : deepmerge;
|
|
196
|
+
}
|
|
197
|
+
function getEnumerableOwnPropertySymbols(target) {
|
|
198
|
+
return Object.getOwnPropertySymbols ? Object.getOwnPropertySymbols(target).filter(function(symbol) {
|
|
199
|
+
return Object.propertyIsEnumerable.call(target, symbol);
|
|
200
|
+
}) : [];
|
|
201
|
+
}
|
|
202
|
+
function getKeys(target) {
|
|
203
|
+
return Object.keys(target).concat(getEnumerableOwnPropertySymbols(target));
|
|
204
|
+
}
|
|
205
|
+
function propertyIsOnObject(object, property) {
|
|
206
|
+
try {
|
|
207
|
+
return property in object;
|
|
208
|
+
} catch {
|
|
209
|
+
return !1;
|
|
210
|
+
}
|
|
211
|
+
}
|
|
212
|
+
function propertyIsUnsafe(target, key) {
|
|
213
|
+
return propertyIsOnObject(target, key) && !(Object.hasOwnProperty.call(target, key) && Object.propertyIsEnumerable.call(target, key));
|
|
214
|
+
}
|
|
215
|
+
function mergeObject(target, source, options2) {
|
|
216
|
+
var destination = {};
|
|
217
|
+
return options2.isMergeableObject(target) && getKeys(target).forEach(function(key) {
|
|
218
|
+
destination[key] = cloneUnlessOtherwiseSpecified(target[key], options2);
|
|
219
|
+
}), getKeys(source).forEach(function(key) {
|
|
220
|
+
propertyIsUnsafe(target, key) || (propertyIsOnObject(target, key) && options2.isMergeableObject(source[key]) ? destination[key] = getMergeFunction(key, options2)(target[key], source[key], options2) : destination[key] = cloneUnlessOtherwiseSpecified(source[key], options2));
|
|
221
|
+
}), destination;
|
|
222
|
+
}
|
|
223
|
+
function deepmerge(target, source, options2) {
|
|
224
|
+
options2 = options2 || {}, options2.arrayMerge = options2.arrayMerge || defaultArrayMerge, options2.isMergeableObject = options2.isMergeableObject || isMergeableObject, options2.cloneUnlessOtherwiseSpecified = cloneUnlessOtherwiseSpecified;
|
|
225
|
+
var sourceIsArray = Array.isArray(source), targetIsArray = Array.isArray(target), sourceAndTargetTypesMatch = sourceIsArray === targetIsArray;
|
|
226
|
+
return sourceAndTargetTypesMatch ? sourceIsArray ? options2.arrayMerge(target, source, options2) : mergeObject(target, source, options2) : cloneUnlessOtherwiseSpecified(source, options2);
|
|
227
|
+
}
|
|
228
|
+
deepmerge.all = function(array, options2) {
|
|
229
|
+
if (!Array.isArray(array))
|
|
230
|
+
throw new Error("first argument should be an array");
|
|
231
|
+
return array.reduce(function(prev, next) {
|
|
232
|
+
return deepmerge(prev, next, options2);
|
|
233
|
+
}, {});
|
|
234
|
+
};
|
|
235
|
+
var deepmerge_1 = deepmerge;
|
|
236
|
+
module2.exports = deepmerge_1;
|
|
237
|
+
}
|
|
238
|
+
});
|
|
239
|
+
|
|
240
|
+
// ../../node_modules/.pnpm/lodash@4.17.21/node_modules/lodash/_baseAssignValue.js
|
|
241
|
+
var require_baseAssignValue = __commonJS({
|
|
242
|
+
"../../node_modules/.pnpm/lodash@4.17.21/node_modules/lodash/_baseAssignValue.js"(exports2, module2) {
|
|
243
|
+
init_cjs_shims();
|
|
244
|
+
var defineProperty = require_defineProperty();
|
|
245
|
+
function baseAssignValue(object, key, value) {
|
|
246
|
+
key == "__proto__" && defineProperty ? defineProperty(object, key, {
|
|
247
|
+
configurable: !0,
|
|
248
|
+
enumerable: !0,
|
|
249
|
+
value,
|
|
250
|
+
writable: !0
|
|
251
|
+
}) : object[key] = value;
|
|
252
|
+
}
|
|
253
|
+
module2.exports = baseAssignValue;
|
|
254
|
+
}
|
|
255
|
+
});
|
|
256
|
+
|
|
257
|
+
// ../../node_modules/.pnpm/lodash@4.17.21/node_modules/lodash/_assignValue.js
|
|
258
|
+
var require_assignValue = __commonJS({
|
|
259
|
+
"../../node_modules/.pnpm/lodash@4.17.21/node_modules/lodash/_assignValue.js"(exports2, module2) {
|
|
260
|
+
init_cjs_shims();
|
|
261
|
+
var baseAssignValue = require_baseAssignValue(), eq = require_eq(), objectProto = Object.prototype, hasOwnProperty = objectProto.hasOwnProperty;
|
|
262
|
+
function assignValue(object, key, value) {
|
|
263
|
+
var objValue = object[key];
|
|
264
|
+
(!(hasOwnProperty.call(object, key) && eq(objValue, value)) || value === void 0 && !(key in object)) && baseAssignValue(object, key, value);
|
|
265
|
+
}
|
|
266
|
+
module2.exports = assignValue;
|
|
267
|
+
}
|
|
268
|
+
});
|
|
269
|
+
|
|
270
|
+
// ../../node_modules/.pnpm/lodash@4.17.21/node_modules/lodash/_baseSet.js
|
|
271
|
+
var require_baseSet = __commonJS({
|
|
272
|
+
"../../node_modules/.pnpm/lodash@4.17.21/node_modules/lodash/_baseSet.js"(exports2, module2) {
|
|
273
|
+
init_cjs_shims();
|
|
274
|
+
var assignValue = require_assignValue(), castPath = require_castPath(), isIndex = require_isIndex(), isObject2 = require_isObject(), toKey = require_toKey();
|
|
275
|
+
function baseSet(object, path, value, customizer) {
|
|
276
|
+
if (!isObject2(object))
|
|
277
|
+
return object;
|
|
278
|
+
path = castPath(path, object);
|
|
279
|
+
for (var index = -1, length = path.length, lastIndex = length - 1, nested = object; nested != null && ++index < length; ) {
|
|
280
|
+
var key = toKey(path[index]), newValue = value;
|
|
281
|
+
if (key === "__proto__" || key === "constructor" || key === "prototype")
|
|
282
|
+
return object;
|
|
283
|
+
if (index != lastIndex) {
|
|
284
|
+
var objValue = nested[key];
|
|
285
|
+
newValue = customizer ? customizer(objValue, key, nested) : void 0, newValue === void 0 && (newValue = isObject2(objValue) ? objValue : isIndex(path[index + 1]) ? [] : {});
|
|
286
|
+
}
|
|
287
|
+
assignValue(nested, key, newValue), nested = nested[key];
|
|
288
|
+
}
|
|
289
|
+
return object;
|
|
290
|
+
}
|
|
291
|
+
module2.exports = baseSet;
|
|
292
|
+
}
|
|
293
|
+
});
|
|
294
|
+
|
|
295
|
+
// ../../node_modules/.pnpm/lodash@4.17.21/node_modules/lodash/_basePickBy.js
|
|
296
|
+
var require_basePickBy = __commonJS({
|
|
297
|
+
"../../node_modules/.pnpm/lodash@4.17.21/node_modules/lodash/_basePickBy.js"(exports2, module2) {
|
|
298
|
+
init_cjs_shims();
|
|
299
|
+
var baseGet = require_baseGet(), baseSet = require_baseSet(), castPath = require_castPath();
|
|
300
|
+
function basePickBy(object, paths, predicate) {
|
|
301
|
+
for (var index = -1, length = paths.length, result = {}; ++index < length; ) {
|
|
302
|
+
var path = paths[index], value = baseGet(object, path);
|
|
303
|
+
predicate(value, path) && baseSet(result, castPath(path, object), value);
|
|
304
|
+
}
|
|
305
|
+
return result;
|
|
306
|
+
}
|
|
307
|
+
module2.exports = basePickBy;
|
|
308
|
+
}
|
|
309
|
+
});
|
|
310
|
+
|
|
311
|
+
// ../../node_modules/.pnpm/lodash@4.17.21/node_modules/lodash/_getPrototype.js
|
|
312
|
+
var require_getPrototype = __commonJS({
|
|
313
|
+
"../../node_modules/.pnpm/lodash@4.17.21/node_modules/lodash/_getPrototype.js"(exports2, module2) {
|
|
314
|
+
init_cjs_shims();
|
|
315
|
+
var overArg = require_overArg(), getPrototype = overArg(Object.getPrototypeOf, Object);
|
|
316
|
+
module2.exports = getPrototype;
|
|
317
|
+
}
|
|
318
|
+
});
|
|
319
|
+
|
|
320
|
+
// ../../node_modules/.pnpm/lodash@4.17.21/node_modules/lodash/_getSymbolsIn.js
|
|
321
|
+
var require_getSymbolsIn = __commonJS({
|
|
322
|
+
"../../node_modules/.pnpm/lodash@4.17.21/node_modules/lodash/_getSymbolsIn.js"(exports2, module2) {
|
|
323
|
+
init_cjs_shims();
|
|
324
|
+
var arrayPush = require_arrayPush(), getPrototype = require_getPrototype(), getSymbols = require_getSymbols(), stubArray = require_stubArray(), nativeGetSymbols = Object.getOwnPropertySymbols, getSymbolsIn = nativeGetSymbols ? function(object) {
|
|
325
|
+
for (var result = []; object; )
|
|
326
|
+
arrayPush(result, getSymbols(object)), object = getPrototype(object);
|
|
327
|
+
return result;
|
|
328
|
+
} : stubArray;
|
|
329
|
+
module2.exports = getSymbolsIn;
|
|
330
|
+
}
|
|
331
|
+
});
|
|
332
|
+
|
|
333
|
+
// ../../node_modules/.pnpm/lodash@4.17.21/node_modules/lodash/_nativeKeysIn.js
|
|
334
|
+
var require_nativeKeysIn = __commonJS({
|
|
335
|
+
"../../node_modules/.pnpm/lodash@4.17.21/node_modules/lodash/_nativeKeysIn.js"(exports2, module2) {
|
|
336
|
+
init_cjs_shims();
|
|
337
|
+
function nativeKeysIn(object) {
|
|
338
|
+
var result = [];
|
|
339
|
+
if (object != null)
|
|
340
|
+
for (var key in Object(object))
|
|
341
|
+
result.push(key);
|
|
342
|
+
return result;
|
|
343
|
+
}
|
|
344
|
+
module2.exports = nativeKeysIn;
|
|
345
|
+
}
|
|
346
|
+
});
|
|
347
|
+
|
|
348
|
+
// ../../node_modules/.pnpm/lodash@4.17.21/node_modules/lodash/_baseKeysIn.js
|
|
349
|
+
var require_baseKeysIn = __commonJS({
|
|
350
|
+
"../../node_modules/.pnpm/lodash@4.17.21/node_modules/lodash/_baseKeysIn.js"(exports2, module2) {
|
|
351
|
+
init_cjs_shims();
|
|
352
|
+
var isObject2 = require_isObject(), isPrototype = require_isPrototype(), nativeKeysIn = require_nativeKeysIn(), objectProto = Object.prototype, hasOwnProperty = objectProto.hasOwnProperty;
|
|
353
|
+
function baseKeysIn(object) {
|
|
354
|
+
if (!isObject2(object))
|
|
355
|
+
return nativeKeysIn(object);
|
|
356
|
+
var isProto = isPrototype(object), result = [];
|
|
357
|
+
for (var key in object)
|
|
358
|
+
key == "constructor" && (isProto || !hasOwnProperty.call(object, key)) || result.push(key);
|
|
359
|
+
return result;
|
|
360
|
+
}
|
|
361
|
+
module2.exports = baseKeysIn;
|
|
362
|
+
}
|
|
363
|
+
});
|
|
364
|
+
|
|
365
|
+
// ../../node_modules/.pnpm/lodash@4.17.21/node_modules/lodash/keysIn.js
|
|
366
|
+
var require_keysIn = __commonJS({
|
|
367
|
+
"../../node_modules/.pnpm/lodash@4.17.21/node_modules/lodash/keysIn.js"(exports2, module2) {
|
|
368
|
+
init_cjs_shims();
|
|
369
|
+
var arrayLikeKeys = require_arrayLikeKeys(), baseKeysIn = require_baseKeysIn(), isArrayLike = require_isArrayLike();
|
|
370
|
+
function keysIn(object) {
|
|
371
|
+
return isArrayLike(object) ? arrayLikeKeys(object, !0) : baseKeysIn(object);
|
|
372
|
+
}
|
|
373
|
+
module2.exports = keysIn;
|
|
374
|
+
}
|
|
375
|
+
});
|
|
376
|
+
|
|
377
|
+
// ../../node_modules/.pnpm/lodash@4.17.21/node_modules/lodash/_getAllKeysIn.js
|
|
378
|
+
var require_getAllKeysIn = __commonJS({
|
|
379
|
+
"../../node_modules/.pnpm/lodash@4.17.21/node_modules/lodash/_getAllKeysIn.js"(exports2, module2) {
|
|
380
|
+
init_cjs_shims();
|
|
381
|
+
var baseGetAllKeys = require_baseGetAllKeys(), getSymbolsIn = require_getSymbolsIn(), keysIn = require_keysIn();
|
|
382
|
+
function getAllKeysIn(object) {
|
|
383
|
+
return baseGetAllKeys(object, keysIn, getSymbolsIn);
|
|
384
|
+
}
|
|
385
|
+
module2.exports = getAllKeysIn;
|
|
386
|
+
}
|
|
387
|
+
});
|
|
388
|
+
|
|
389
|
+
// ../../node_modules/.pnpm/lodash@4.17.21/node_modules/lodash/pickBy.js
|
|
390
|
+
var require_pickBy = __commonJS({
|
|
391
|
+
"../../node_modules/.pnpm/lodash@4.17.21/node_modules/lodash/pickBy.js"(exports2, module2) {
|
|
392
|
+
init_cjs_shims();
|
|
393
|
+
var arrayMap = require_arrayMap(), baseIteratee = require_baseIteratee(), basePickBy = require_basePickBy(), getAllKeysIn = require_getAllKeysIn();
|
|
394
|
+
function pickBy2(object, predicate) {
|
|
395
|
+
if (object == null)
|
|
396
|
+
return {};
|
|
397
|
+
var props = arrayMap(getAllKeysIn(object), function(prop) {
|
|
398
|
+
return [prop];
|
|
399
|
+
});
|
|
400
|
+
return predicate = baseIteratee(predicate), basePickBy(object, props, function(value, path) {
|
|
401
|
+
return predicate(value, path[0]);
|
|
402
|
+
});
|
|
403
|
+
}
|
|
404
|
+
module2.exports = pickBy2;
|
|
405
|
+
}
|
|
406
|
+
});
|
|
407
|
+
|
|
408
|
+
// ../../node_modules/.pnpm/lodash@4.17.21/node_modules/lodash/mapValues.js
|
|
409
|
+
var require_mapValues = __commonJS({
|
|
410
|
+
"../../node_modules/.pnpm/lodash@4.17.21/node_modules/lodash/mapValues.js"(exports2, module2) {
|
|
411
|
+
init_cjs_shims();
|
|
412
|
+
var baseAssignValue = require_baseAssignValue(), baseForOwn = require_baseForOwn(), baseIteratee = require_baseIteratee();
|
|
413
|
+
function mapValues(object, iteratee) {
|
|
414
|
+
var result = {};
|
|
415
|
+
return iteratee = baseIteratee(iteratee, 3), baseForOwn(object, function(value, key, object2) {
|
|
416
|
+
baseAssignValue(result, key, iteratee(value, key, object2));
|
|
417
|
+
}), result;
|
|
418
|
+
}
|
|
419
|
+
module2.exports = mapValues;
|
|
420
|
+
}
|
|
421
|
+
});
|
|
422
|
+
|
|
423
|
+
// ../../node_modules/.pnpm/lodash@4.17.21/node_modules/lodash/isEqual.js
|
|
424
|
+
var require_isEqual = __commonJS({
|
|
425
|
+
"../../node_modules/.pnpm/lodash@4.17.21/node_modules/lodash/isEqual.js"(exports2, module2) {
|
|
426
|
+
init_cjs_shims();
|
|
427
|
+
var baseIsEqual = require_baseIsEqual();
|
|
428
|
+
function isEqual(value, other) {
|
|
429
|
+
return baseIsEqual(value, other);
|
|
430
|
+
}
|
|
431
|
+
module2.exports = isEqual;
|
|
432
|
+
}
|
|
433
|
+
});
|
|
434
|
+
|
|
435
|
+
// ../../node_modules/.pnpm/lodash@4.17.21/node_modules/lodash/last.js
|
|
436
|
+
var require_last = __commonJS({
|
|
437
|
+
"../../node_modules/.pnpm/lodash@4.17.21/node_modules/lodash/last.js"(exports2, module2) {
|
|
438
|
+
init_cjs_shims();
|
|
439
|
+
function last(array) {
|
|
440
|
+
var length = array == null ? 0 : array.length;
|
|
441
|
+
return length ? array[length - 1] : void 0;
|
|
442
|
+
}
|
|
443
|
+
module2.exports = last;
|
|
444
|
+
}
|
|
445
|
+
});
|
|
446
|
+
|
|
447
|
+
// ../../node_modules/.pnpm/lodash@4.17.21/node_modules/lodash/differenceWith.js
|
|
448
|
+
var require_differenceWith = __commonJS({
|
|
449
|
+
"../../node_modules/.pnpm/lodash@4.17.21/node_modules/lodash/differenceWith.js"(exports2, module2) {
|
|
450
|
+
init_cjs_shims();
|
|
451
|
+
var baseDifference = require_baseDifference(), baseFlatten = require_baseFlatten(), baseRest = require_baseRest(), isArrayLikeObject = require_isArrayLikeObject(), last = require_last(), differenceWith2 = baseRest(function(array, values) {
|
|
452
|
+
var comparator = last(values);
|
|
453
|
+
return isArrayLikeObject(comparator) && (comparator = void 0), isArrayLikeObject(array) ? baseDifference(array, baseFlatten(values, 1, isArrayLikeObject, !0), void 0, comparator) : [];
|
|
454
|
+
});
|
|
455
|
+
module2.exports = differenceWith2;
|
|
456
|
+
}
|
|
457
|
+
});
|
|
458
|
+
|
|
459
|
+
// ../../node_modules/.pnpm/lodash@4.17.21/node_modules/lodash/fromPairs.js
|
|
460
|
+
var require_fromPairs = __commonJS({
|
|
461
|
+
"../../node_modules/.pnpm/lodash@4.17.21/node_modules/lodash/fromPairs.js"(exports2, module2) {
|
|
462
|
+
init_cjs_shims();
|
|
463
|
+
function fromPairs2(pairs) {
|
|
464
|
+
for (var index = -1, length = pairs == null ? 0 : pairs.length, result = {}; ++index < length; ) {
|
|
465
|
+
var pair = pairs[index];
|
|
466
|
+
result[pair[0]] = pair[1];
|
|
467
|
+
}
|
|
468
|
+
return result;
|
|
469
|
+
}
|
|
470
|
+
module2.exports = fromPairs2;
|
|
471
|
+
}
|
|
472
|
+
});
|
|
473
|
+
|
|
474
|
+
// ../../node_modules/.pnpm/lodash@4.17.21/node_modules/lodash/_baseToPairs.js
|
|
475
|
+
var require_baseToPairs = __commonJS({
|
|
476
|
+
"../../node_modules/.pnpm/lodash@4.17.21/node_modules/lodash/_baseToPairs.js"(exports2, module2) {
|
|
477
|
+
init_cjs_shims();
|
|
478
|
+
var arrayMap = require_arrayMap();
|
|
479
|
+
function baseToPairs(object, props) {
|
|
480
|
+
return arrayMap(props, function(key) {
|
|
481
|
+
return [key, object[key]];
|
|
482
|
+
});
|
|
483
|
+
}
|
|
484
|
+
module2.exports = baseToPairs;
|
|
485
|
+
}
|
|
486
|
+
});
|
|
487
|
+
|
|
488
|
+
// ../../node_modules/.pnpm/lodash@4.17.21/node_modules/lodash/_setToPairs.js
|
|
489
|
+
var require_setToPairs = __commonJS({
|
|
490
|
+
"../../node_modules/.pnpm/lodash@4.17.21/node_modules/lodash/_setToPairs.js"(exports2, module2) {
|
|
491
|
+
init_cjs_shims();
|
|
492
|
+
function setToPairs(set2) {
|
|
493
|
+
var index = -1, result = Array(set2.size);
|
|
494
|
+
return set2.forEach(function(value) {
|
|
495
|
+
result[++index] = [value, value];
|
|
496
|
+
}), result;
|
|
497
|
+
}
|
|
498
|
+
module2.exports = setToPairs;
|
|
499
|
+
}
|
|
500
|
+
});
|
|
501
|
+
|
|
502
|
+
// ../../node_modules/.pnpm/lodash@4.17.21/node_modules/lodash/_createToPairs.js
|
|
503
|
+
var require_createToPairs = __commonJS({
|
|
504
|
+
"../../node_modules/.pnpm/lodash@4.17.21/node_modules/lodash/_createToPairs.js"(exports2, module2) {
|
|
505
|
+
init_cjs_shims();
|
|
506
|
+
var baseToPairs = require_baseToPairs(), getTag = require_getTag(), mapToArray = require_mapToArray(), setToPairs = require_setToPairs(), mapTag = "[object Map]", setTag = "[object Set]";
|
|
507
|
+
function createToPairs(keysFunc) {
|
|
508
|
+
return function(object) {
|
|
509
|
+
var tag = getTag(object);
|
|
510
|
+
return tag == mapTag ? mapToArray(object) : tag == setTag ? setToPairs(object) : baseToPairs(object, keysFunc(object));
|
|
511
|
+
};
|
|
512
|
+
}
|
|
513
|
+
module2.exports = createToPairs;
|
|
514
|
+
}
|
|
515
|
+
});
|
|
516
|
+
|
|
517
|
+
// ../../node_modules/.pnpm/lodash@4.17.21/node_modules/lodash/toPairs.js
|
|
518
|
+
var require_toPairs = __commonJS({
|
|
519
|
+
"../../node_modules/.pnpm/lodash@4.17.21/node_modules/lodash/toPairs.js"(exports2, module2) {
|
|
520
|
+
init_cjs_shims();
|
|
521
|
+
var createToPairs = require_createToPairs(), keys = require_keys(), toPairs2 = createToPairs(keys);
|
|
522
|
+
module2.exports = toPairs2;
|
|
523
|
+
}
|
|
524
|
+
});
|
|
525
|
+
|
|
526
|
+
// ../../node_modules/.pnpm/lodash@4.17.21/node_modules/lodash/set.js
|
|
527
|
+
var require_set = __commonJS({
|
|
528
|
+
"../../node_modules/.pnpm/lodash@4.17.21/node_modules/lodash/set.js"(exports2, module2) {
|
|
529
|
+
init_cjs_shims();
|
|
530
|
+
var baseSet = require_baseSet();
|
|
531
|
+
function set2(object, path, value) {
|
|
532
|
+
return object == null ? object : baseSet(object, path, value);
|
|
533
|
+
}
|
|
534
|
+
module2.exports = set2;
|
|
535
|
+
}
|
|
536
|
+
});
|
|
537
|
+
|
|
538
|
+
// ../../node_modules/.pnpm/lodash@4.17.21/node_modules/lodash/_baseSlice.js
|
|
539
|
+
var require_baseSlice = __commonJS({
|
|
540
|
+
"../../node_modules/.pnpm/lodash@4.17.21/node_modules/lodash/_baseSlice.js"(exports2, module2) {
|
|
541
|
+
init_cjs_shims();
|
|
542
|
+
function baseSlice(array, start, end) {
|
|
543
|
+
var index = -1, length = array.length;
|
|
544
|
+
start < 0 && (start = -start > length ? 0 : length + start), end = end > length ? length : end, end < 0 && (end += length), length = start > end ? 0 : end - start >>> 0, start >>>= 0;
|
|
545
|
+
for (var result = Array(length); ++index < length; )
|
|
546
|
+
result[index] = array[index + start];
|
|
547
|
+
return result;
|
|
548
|
+
}
|
|
549
|
+
module2.exports = baseSlice;
|
|
550
|
+
}
|
|
551
|
+
});
|
|
552
|
+
|
|
553
|
+
// ../../node_modules/.pnpm/lodash@4.17.21/node_modules/lodash/_parent.js
|
|
554
|
+
var require_parent = __commonJS({
|
|
555
|
+
"../../node_modules/.pnpm/lodash@4.17.21/node_modules/lodash/_parent.js"(exports2, module2) {
|
|
556
|
+
init_cjs_shims();
|
|
557
|
+
var baseGet = require_baseGet(), baseSlice = require_baseSlice();
|
|
558
|
+
function parent(object, path) {
|
|
559
|
+
return path.length < 2 ? object : baseGet(object, baseSlice(path, 0, -1));
|
|
560
|
+
}
|
|
561
|
+
module2.exports = parent;
|
|
562
|
+
}
|
|
563
|
+
});
|
|
564
|
+
|
|
565
|
+
// ../../node_modules/.pnpm/lodash@4.17.21/node_modules/lodash/_baseUnset.js
|
|
566
|
+
var require_baseUnset = __commonJS({
|
|
567
|
+
"../../node_modules/.pnpm/lodash@4.17.21/node_modules/lodash/_baseUnset.js"(exports2, module2) {
|
|
568
|
+
init_cjs_shims();
|
|
569
|
+
var castPath = require_castPath(), last = require_last(), parent = require_parent(), toKey = require_toKey();
|
|
570
|
+
function baseUnset(object, path) {
|
|
571
|
+
return path = castPath(path, object), object = parent(object, path), object == null || delete object[toKey(last(path))];
|
|
572
|
+
}
|
|
573
|
+
module2.exports = baseUnset;
|
|
574
|
+
}
|
|
575
|
+
});
|
|
576
|
+
|
|
577
|
+
// ../../node_modules/.pnpm/lodash@4.17.21/node_modules/lodash/unset.js
|
|
578
|
+
var require_unset = __commonJS({
|
|
579
|
+
"../../node_modules/.pnpm/lodash@4.17.21/node_modules/lodash/unset.js"(exports2, module2) {
|
|
580
|
+
init_cjs_shims();
|
|
581
|
+
var baseUnset = require_baseUnset();
|
|
582
|
+
function unset2(object, path) {
|
|
583
|
+
return object == null ? !0 : baseUnset(object, path);
|
|
584
|
+
}
|
|
585
|
+
module2.exports = unset2;
|
|
586
|
+
}
|
|
587
|
+
});
|
|
588
|
+
|
|
589
|
+
// ../../node_modules/.pnpm/lodash@4.17.21/node_modules/lodash/isEmpty.js
|
|
590
|
+
var require_isEmpty = __commonJS({
|
|
591
|
+
"../../node_modules/.pnpm/lodash@4.17.21/node_modules/lodash/isEmpty.js"(exports2, module2) {
|
|
592
|
+
init_cjs_shims();
|
|
593
|
+
var baseKeys = require_baseKeys(), getTag = require_getTag(), isArguments = require_isArguments(), isArray = require_isArray(), isArrayLike = require_isArrayLike(), isBuffer = require_isBuffer(), isPrototype = require_isPrototype(), isTypedArray = require_isTypedArray(), mapTag = "[object Map]", setTag = "[object Set]", objectProto = Object.prototype, hasOwnProperty = objectProto.hasOwnProperty;
|
|
594
|
+
function isEmpty2(value) {
|
|
595
|
+
if (value == null)
|
|
596
|
+
return !0;
|
|
597
|
+
if (isArrayLike(value) && (isArray(value) || typeof value == "string" || typeof value.splice == "function" || isBuffer(value) || isTypedArray(value) || isArguments(value)))
|
|
598
|
+
return !value.length;
|
|
599
|
+
var tag = getTag(value);
|
|
600
|
+
if (tag == mapTag || tag == setTag)
|
|
601
|
+
return !value.size;
|
|
602
|
+
if (isPrototype(value))
|
|
603
|
+
return !baseKeys(value).length;
|
|
604
|
+
for (var key in value)
|
|
605
|
+
if (hasOwnProperty.call(value, key))
|
|
606
|
+
return !1;
|
|
607
|
+
return !0;
|
|
608
|
+
}
|
|
609
|
+
module2.exports = isEmpty2;
|
|
610
|
+
}
|
|
611
|
+
});
|
|
612
|
+
|
|
613
|
+
// ../../node_modules/.pnpm/bottleneck@2.19.5/node_modules/bottleneck/lib/parser.js
|
|
614
|
+
var require_parser = __commonJS({
|
|
615
|
+
"../../node_modules/.pnpm/bottleneck@2.19.5/node_modules/bottleneck/lib/parser.js"(exports2) {
|
|
616
|
+
"use strict";
|
|
617
|
+
init_cjs_shims();
|
|
618
|
+
exports2.load = function(received, defaults, onto = {}) {
|
|
619
|
+
var k, ref, v;
|
|
620
|
+
for (k in defaults)
|
|
621
|
+
v = defaults[k], onto[k] = (ref = received[k]) != null ? ref : v;
|
|
622
|
+
return onto;
|
|
623
|
+
};
|
|
624
|
+
exports2.overwrite = function(received, defaults, onto = {}) {
|
|
625
|
+
var k, v;
|
|
626
|
+
for (k in received)
|
|
627
|
+
v = received[k], defaults[k] !== void 0 && (onto[k] = v);
|
|
628
|
+
return onto;
|
|
629
|
+
};
|
|
630
|
+
}
|
|
631
|
+
});
|
|
632
|
+
|
|
633
|
+
// ../../node_modules/.pnpm/bottleneck@2.19.5/node_modules/bottleneck/lib/DLList.js
|
|
634
|
+
var require_DLList = __commonJS({
|
|
635
|
+
"../../node_modules/.pnpm/bottleneck@2.19.5/node_modules/bottleneck/lib/DLList.js"(exports2, module2) {
|
|
636
|
+
"use strict";
|
|
637
|
+
init_cjs_shims();
|
|
638
|
+
var DLList;
|
|
639
|
+
DLList = class {
|
|
640
|
+
constructor(incr, decr) {
|
|
641
|
+
this.incr = incr, this.decr = decr, this._first = null, this._last = null, this.length = 0;
|
|
642
|
+
}
|
|
643
|
+
push(value) {
|
|
644
|
+
var node;
|
|
645
|
+
this.length++, typeof this.incr == "function" && this.incr(), node = {
|
|
646
|
+
value,
|
|
647
|
+
prev: this._last,
|
|
648
|
+
next: null
|
|
649
|
+
}, this._last != null ? (this._last.next = node, this._last = node) : this._first = this._last = node;
|
|
650
|
+
}
|
|
651
|
+
shift() {
|
|
652
|
+
var value;
|
|
653
|
+
if (this._first != null)
|
|
654
|
+
return this.length--, typeof this.decr == "function" && this.decr(), value = this._first.value, (this._first = this._first.next) != null ? this._first.prev = null : this._last = null, value;
|
|
655
|
+
}
|
|
656
|
+
first() {
|
|
657
|
+
if (this._first != null)
|
|
658
|
+
return this._first.value;
|
|
659
|
+
}
|
|
660
|
+
getArray() {
|
|
661
|
+
var node, ref, results;
|
|
662
|
+
for (node = this._first, results = []; node != null; )
|
|
663
|
+
results.push((ref = node, node = node.next, ref.value));
|
|
664
|
+
return results;
|
|
665
|
+
}
|
|
666
|
+
forEachShift(cb) {
|
|
667
|
+
var node;
|
|
668
|
+
for (node = this.shift(); node != null; )
|
|
669
|
+
cb(node), node = this.shift();
|
|
670
|
+
}
|
|
671
|
+
debug() {
|
|
672
|
+
var node, ref, ref1, ref2, results;
|
|
673
|
+
for (node = this._first, results = []; node != null; )
|
|
674
|
+
results.push((ref = node, node = node.next, {
|
|
675
|
+
value: ref.value,
|
|
676
|
+
prev: (ref1 = ref.prev) != null ? ref1.value : void 0,
|
|
677
|
+
next: (ref2 = ref.next) != null ? ref2.value : void 0
|
|
678
|
+
}));
|
|
679
|
+
return results;
|
|
680
|
+
}
|
|
681
|
+
};
|
|
682
|
+
module2.exports = DLList;
|
|
683
|
+
}
|
|
684
|
+
});
|
|
685
|
+
|
|
686
|
+
// ../../node_modules/.pnpm/bottleneck@2.19.5/node_modules/bottleneck/lib/Events.js
|
|
687
|
+
var require_Events = __commonJS({
|
|
688
|
+
"../../node_modules/.pnpm/bottleneck@2.19.5/node_modules/bottleneck/lib/Events.js"(exports2, module2) {
|
|
689
|
+
"use strict";
|
|
690
|
+
init_cjs_shims();
|
|
691
|
+
function asyncGeneratorStep2(gen, resolve, reject, _next, _throw, key, arg) {
|
|
692
|
+
try {
|
|
693
|
+
var info = gen[key](arg), value = info.value;
|
|
694
|
+
} catch (error) {
|
|
695
|
+
reject(error);
|
|
696
|
+
return;
|
|
697
|
+
}
|
|
698
|
+
info.done ? resolve(value) : Promise.resolve(value).then(_next, _throw);
|
|
699
|
+
}
|
|
700
|
+
function _asyncToGenerator2(fn) {
|
|
701
|
+
return function() {
|
|
702
|
+
var self = this, args = arguments;
|
|
703
|
+
return new Promise(function(resolve, reject) {
|
|
704
|
+
var gen = fn.apply(self, args);
|
|
705
|
+
function _next(value) {
|
|
706
|
+
asyncGeneratorStep2(gen, resolve, reject, _next, _throw, "next", value);
|
|
707
|
+
}
|
|
708
|
+
function _throw(err2) {
|
|
709
|
+
asyncGeneratorStep2(gen, resolve, reject, _next, _throw, "throw", err2);
|
|
710
|
+
}
|
|
711
|
+
_next(void 0);
|
|
712
|
+
});
|
|
713
|
+
};
|
|
714
|
+
}
|
|
715
|
+
var Events2;
|
|
716
|
+
Events2 = class {
|
|
717
|
+
constructor(instance) {
|
|
718
|
+
if (this.instance = instance, this._events = {}, this.instance.on != null || this.instance.once != null || this.instance.removeAllListeners != null)
|
|
719
|
+
throw new Error("An Emitter already exists for this object");
|
|
720
|
+
this.instance.on = (name, cb) => this._addListener(name, "many", cb), this.instance.once = (name, cb) => this._addListener(name, "once", cb), this.instance.removeAllListeners = (name = null) => name != null ? delete this._events[name] : this._events = {};
|
|
721
|
+
}
|
|
722
|
+
_addListener(name, status, cb) {
|
|
723
|
+
var base;
|
|
724
|
+
return (base = this._events)[name] == null && (base[name] = []), this._events[name].push({
|
|
725
|
+
cb,
|
|
726
|
+
status
|
|
727
|
+
}), this.instance;
|
|
728
|
+
}
|
|
729
|
+
listenerCount(name) {
|
|
730
|
+
return this._events[name] != null ? this._events[name].length : 0;
|
|
731
|
+
}
|
|
732
|
+
trigger(name, ...args) {
|
|
733
|
+
var _this = this;
|
|
734
|
+
return _asyncToGenerator2(function* () {
|
|
735
|
+
var e, promises;
|
|
736
|
+
try {
|
|
737
|
+
return name !== "debug" && _this.trigger("debug", `Event triggered: ${name}`, args), _this._events[name] == null ? void 0 : (_this._events[name] = _this._events[name].filter(function(listener) {
|
|
738
|
+
return listener.status !== "none";
|
|
739
|
+
}), promises = _this._events[name].map(
|
|
740
|
+
/* @__PURE__ */ function() {
|
|
741
|
+
var _ref = _asyncToGenerator2(function* (listener) {
|
|
742
|
+
var e2, returned;
|
|
743
|
+
if (listener.status !== "none") {
|
|
744
|
+
listener.status === "once" && (listener.status = "none");
|
|
745
|
+
try {
|
|
746
|
+
return returned = typeof listener.cb == "function" ? listener.cb(...args) : void 0, typeof returned?.then == "function" ? yield returned : returned;
|
|
747
|
+
} catch (error) {
|
|
748
|
+
return e2 = error, _this.trigger("error", e2), null;
|
|
749
|
+
}
|
|
750
|
+
}
|
|
751
|
+
});
|
|
752
|
+
return function(_x) {
|
|
753
|
+
return _ref.apply(this, arguments);
|
|
754
|
+
};
|
|
755
|
+
}()
|
|
756
|
+
), (yield Promise.all(promises)).find(function(x) {
|
|
757
|
+
return x != null;
|
|
758
|
+
}));
|
|
759
|
+
} catch (error) {
|
|
760
|
+
return e = error, _this.trigger("error", e), null;
|
|
761
|
+
}
|
|
762
|
+
})();
|
|
763
|
+
}
|
|
764
|
+
};
|
|
765
|
+
module2.exports = Events2;
|
|
766
|
+
}
|
|
767
|
+
});
|
|
768
|
+
|
|
769
|
+
// ../../node_modules/.pnpm/bottleneck@2.19.5/node_modules/bottleneck/lib/Queues.js
|
|
770
|
+
var require_Queues = __commonJS({
|
|
771
|
+
"../../node_modules/.pnpm/bottleneck@2.19.5/node_modules/bottleneck/lib/Queues.js"(exports2, module2) {
|
|
772
|
+
"use strict";
|
|
773
|
+
init_cjs_shims();
|
|
774
|
+
var DLList, Events2, Queues;
|
|
775
|
+
DLList = require_DLList();
|
|
776
|
+
Events2 = require_Events();
|
|
777
|
+
Queues = class {
|
|
778
|
+
constructor(num_priorities) {
|
|
779
|
+
var i;
|
|
780
|
+
this.Events = new Events2(this), this._length = 0, this._lists = function() {
|
|
781
|
+
var j, ref, results;
|
|
782
|
+
for (results = [], i = j = 1, ref = num_priorities; 1 <= ref ? j <= ref : j >= ref; i = 1 <= ref ? ++j : --j)
|
|
783
|
+
results.push(new DLList(() => this.incr(), () => this.decr()));
|
|
784
|
+
return results;
|
|
785
|
+
}.call(this);
|
|
786
|
+
}
|
|
787
|
+
incr() {
|
|
788
|
+
if (this._length++ === 0)
|
|
789
|
+
return this.Events.trigger("leftzero");
|
|
790
|
+
}
|
|
791
|
+
decr() {
|
|
792
|
+
if (--this._length === 0)
|
|
793
|
+
return this.Events.trigger("zero");
|
|
794
|
+
}
|
|
795
|
+
push(job) {
|
|
796
|
+
return this._lists[job.options.priority].push(job);
|
|
797
|
+
}
|
|
798
|
+
queued(priority) {
|
|
799
|
+
return priority != null ? this._lists[priority].length : this._length;
|
|
800
|
+
}
|
|
801
|
+
shiftAll(fn) {
|
|
802
|
+
return this._lists.forEach(function(list) {
|
|
803
|
+
return list.forEachShift(fn);
|
|
804
|
+
});
|
|
805
|
+
}
|
|
806
|
+
getFirst(arr = this._lists) {
|
|
807
|
+
var j, len, list;
|
|
808
|
+
for (j = 0, len = arr.length; j < len; j++)
|
|
809
|
+
if (list = arr[j], list.length > 0)
|
|
810
|
+
return list;
|
|
811
|
+
return [];
|
|
812
|
+
}
|
|
813
|
+
shiftLastFrom(priority) {
|
|
814
|
+
return this.getFirst(this._lists.slice(priority).reverse()).shift();
|
|
815
|
+
}
|
|
816
|
+
};
|
|
817
|
+
module2.exports = Queues;
|
|
818
|
+
}
|
|
819
|
+
});
|
|
820
|
+
|
|
821
|
+
// ../../node_modules/.pnpm/bottleneck@2.19.5/node_modules/bottleneck/lib/BottleneckError.js
|
|
822
|
+
var require_BottleneckError = __commonJS({
|
|
823
|
+
"../../node_modules/.pnpm/bottleneck@2.19.5/node_modules/bottleneck/lib/BottleneckError.js"(exports2, module2) {
|
|
824
|
+
"use strict";
|
|
825
|
+
init_cjs_shims();
|
|
826
|
+
var BottleneckError;
|
|
827
|
+
BottleneckError = class extends Error {
|
|
828
|
+
};
|
|
829
|
+
module2.exports = BottleneckError;
|
|
830
|
+
}
|
|
831
|
+
});
|
|
832
|
+
|
|
833
|
+
// ../../node_modules/.pnpm/bottleneck@2.19.5/node_modules/bottleneck/lib/Job.js
|
|
834
|
+
var require_Job = __commonJS({
|
|
835
|
+
"../../node_modules/.pnpm/bottleneck@2.19.5/node_modules/bottleneck/lib/Job.js"(exports2, module2) {
|
|
836
|
+
"use strict";
|
|
837
|
+
init_cjs_shims();
|
|
838
|
+
function asyncGeneratorStep2(gen, resolve, reject, _next, _throw, key, arg) {
|
|
839
|
+
try {
|
|
840
|
+
var info = gen[key](arg), value = info.value;
|
|
841
|
+
} catch (error) {
|
|
842
|
+
reject(error);
|
|
843
|
+
return;
|
|
844
|
+
}
|
|
845
|
+
info.done ? resolve(value) : Promise.resolve(value).then(_next, _throw);
|
|
846
|
+
}
|
|
847
|
+
function _asyncToGenerator2(fn) {
|
|
848
|
+
return function() {
|
|
849
|
+
var self = this, args = arguments;
|
|
850
|
+
return new Promise(function(resolve, reject) {
|
|
851
|
+
var gen = fn.apply(self, args);
|
|
852
|
+
function _next(value) {
|
|
853
|
+
asyncGeneratorStep2(gen, resolve, reject, _next, _throw, "next", value);
|
|
854
|
+
}
|
|
855
|
+
function _throw(err2) {
|
|
856
|
+
asyncGeneratorStep2(gen, resolve, reject, _next, _throw, "throw", err2);
|
|
857
|
+
}
|
|
858
|
+
_next(void 0);
|
|
859
|
+
});
|
|
860
|
+
};
|
|
861
|
+
}
|
|
862
|
+
var BottleneckError, DEFAULT_PRIORITY, Job, NUM_PRIORITIES, parser2;
|
|
863
|
+
NUM_PRIORITIES = 10;
|
|
864
|
+
DEFAULT_PRIORITY = 5;
|
|
865
|
+
parser2 = require_parser();
|
|
866
|
+
BottleneckError = require_BottleneckError();
|
|
867
|
+
Job = class {
|
|
868
|
+
constructor(task, args, options2, jobDefaults, rejectOnDrop, Events2, _states, Promise2) {
|
|
869
|
+
this.task = task, this.args = args, this.rejectOnDrop = rejectOnDrop, this.Events = Events2, this._states = _states, this.Promise = Promise2, this.options = parser2.load(options2, jobDefaults), this.options.priority = this._sanitizePriority(this.options.priority), this.options.id === jobDefaults.id && (this.options.id = `${this.options.id}-${this._randomIndex()}`), this.promise = new this.Promise((_resolve, _reject) => {
|
|
870
|
+
this._resolve = _resolve, this._reject = _reject;
|
|
871
|
+
}), this.retryCount = 0;
|
|
872
|
+
}
|
|
873
|
+
_sanitizePriority(priority) {
|
|
874
|
+
var sProperty;
|
|
875
|
+
return sProperty = ~~priority !== priority ? DEFAULT_PRIORITY : priority, sProperty < 0 ? 0 : sProperty > NUM_PRIORITIES - 1 ? NUM_PRIORITIES - 1 : sProperty;
|
|
876
|
+
}
|
|
877
|
+
_randomIndex() {
|
|
878
|
+
return Math.random().toString(36).slice(2);
|
|
879
|
+
}
|
|
880
|
+
doDrop({
|
|
881
|
+
error,
|
|
882
|
+
message = "This job has been dropped by Bottleneck"
|
|
883
|
+
} = {}) {
|
|
884
|
+
return this._states.remove(this.options.id) ? (this.rejectOnDrop && this._reject(error ?? new BottleneckError(message)), this.Events.trigger("dropped", {
|
|
885
|
+
args: this.args,
|
|
886
|
+
options: this.options,
|
|
887
|
+
task: this.task,
|
|
888
|
+
promise: this.promise
|
|
889
|
+
}), !0) : !1;
|
|
890
|
+
}
|
|
891
|
+
_assertStatus(expected) {
|
|
892
|
+
var status;
|
|
893
|
+
if (status = this._states.jobStatus(this.options.id), !(status === expected || expected === "DONE" && status === null))
|
|
894
|
+
throw new BottleneckError(`Invalid job status ${status}, expected ${expected}. Please open an issue at https://github.com/SGrondin/bottleneck/issues`);
|
|
895
|
+
}
|
|
896
|
+
doReceive() {
|
|
897
|
+
return this._states.start(this.options.id), this.Events.trigger("received", {
|
|
898
|
+
args: this.args,
|
|
899
|
+
options: this.options
|
|
900
|
+
});
|
|
901
|
+
}
|
|
902
|
+
doQueue(reachedHWM, blocked) {
|
|
903
|
+
return this._assertStatus("RECEIVED"), this._states.next(this.options.id), this.Events.trigger("queued", {
|
|
904
|
+
args: this.args,
|
|
905
|
+
options: this.options,
|
|
906
|
+
reachedHWM,
|
|
907
|
+
blocked
|
|
908
|
+
});
|
|
909
|
+
}
|
|
910
|
+
doRun() {
|
|
911
|
+
return this.retryCount === 0 ? (this._assertStatus("QUEUED"), this._states.next(this.options.id)) : this._assertStatus("EXECUTING"), this.Events.trigger("scheduled", {
|
|
912
|
+
args: this.args,
|
|
913
|
+
options: this.options
|
|
914
|
+
});
|
|
915
|
+
}
|
|
916
|
+
doExecute(chained, clearGlobalState, run, free) {
|
|
917
|
+
var _this = this;
|
|
918
|
+
return _asyncToGenerator2(function* () {
|
|
919
|
+
var error, eventInfo, passed;
|
|
920
|
+
_this.retryCount === 0 ? (_this._assertStatus("RUNNING"), _this._states.next(_this.options.id)) : _this._assertStatus("EXECUTING"), eventInfo = {
|
|
921
|
+
args: _this.args,
|
|
922
|
+
options: _this.options,
|
|
923
|
+
retryCount: _this.retryCount
|
|
924
|
+
}, _this.Events.trigger("executing", eventInfo);
|
|
925
|
+
try {
|
|
926
|
+
if (passed = yield chained != null ? chained.schedule(_this.options, _this.task, ..._this.args) : _this.task(..._this.args), clearGlobalState())
|
|
927
|
+
return _this.doDone(eventInfo), yield free(_this.options, eventInfo), _this._assertStatus("DONE"), _this._resolve(passed);
|
|
928
|
+
} catch (error1) {
|
|
929
|
+
return error = error1, _this._onFailure(error, eventInfo, clearGlobalState, run, free);
|
|
930
|
+
}
|
|
931
|
+
})();
|
|
932
|
+
}
|
|
933
|
+
doExpire(clearGlobalState, run, free) {
|
|
934
|
+
var error, eventInfo;
|
|
935
|
+
return this._states.jobStatus(this.options.id === "RUNNING") && this._states.next(this.options.id), this._assertStatus("EXECUTING"), eventInfo = {
|
|
936
|
+
args: this.args,
|
|
937
|
+
options: this.options,
|
|
938
|
+
retryCount: this.retryCount
|
|
939
|
+
}, error = new BottleneckError(`This job timed out after ${this.options.expiration} ms.`), this._onFailure(error, eventInfo, clearGlobalState, run, free);
|
|
940
|
+
}
|
|
941
|
+
_onFailure(error, eventInfo, clearGlobalState, run, free) {
|
|
942
|
+
var _this2 = this;
|
|
943
|
+
return _asyncToGenerator2(function* () {
|
|
944
|
+
var retry, retryAfter;
|
|
945
|
+
if (clearGlobalState())
|
|
946
|
+
return retry = yield _this2.Events.trigger("failed", error, eventInfo), retry != null ? (retryAfter = ~~retry, _this2.Events.trigger("retry", `Retrying ${_this2.options.id} after ${retryAfter} ms`, eventInfo), _this2.retryCount++, run(retryAfter)) : (_this2.doDone(eventInfo), yield free(_this2.options, eventInfo), _this2._assertStatus("DONE"), _this2._reject(error));
|
|
947
|
+
})();
|
|
948
|
+
}
|
|
949
|
+
doDone(eventInfo) {
|
|
950
|
+
return this._assertStatus("EXECUTING"), this._states.next(this.options.id), this.Events.trigger("done", eventInfo);
|
|
951
|
+
}
|
|
952
|
+
};
|
|
953
|
+
module2.exports = Job;
|
|
954
|
+
}
|
|
955
|
+
});
|
|
956
|
+
|
|
957
|
+
// ../../node_modules/.pnpm/bottleneck@2.19.5/node_modules/bottleneck/lib/LocalDatastore.js
|
|
958
|
+
var require_LocalDatastore = __commonJS({
|
|
959
|
+
"../../node_modules/.pnpm/bottleneck@2.19.5/node_modules/bottleneck/lib/LocalDatastore.js"(exports2, module2) {
|
|
960
|
+
"use strict";
|
|
961
|
+
init_cjs_shims();
|
|
962
|
+
function asyncGeneratorStep2(gen, resolve, reject, _next, _throw, key, arg) {
|
|
963
|
+
try {
|
|
964
|
+
var info = gen[key](arg), value = info.value;
|
|
965
|
+
} catch (error) {
|
|
966
|
+
reject(error);
|
|
967
|
+
return;
|
|
968
|
+
}
|
|
969
|
+
info.done ? resolve(value) : Promise.resolve(value).then(_next, _throw);
|
|
970
|
+
}
|
|
971
|
+
function _asyncToGenerator2(fn) {
|
|
972
|
+
return function() {
|
|
973
|
+
var self = this, args = arguments;
|
|
974
|
+
return new Promise(function(resolve, reject) {
|
|
975
|
+
var gen = fn.apply(self, args);
|
|
976
|
+
function _next(value) {
|
|
977
|
+
asyncGeneratorStep2(gen, resolve, reject, _next, _throw, "next", value);
|
|
978
|
+
}
|
|
979
|
+
function _throw(err2) {
|
|
980
|
+
asyncGeneratorStep2(gen, resolve, reject, _next, _throw, "throw", err2);
|
|
981
|
+
}
|
|
982
|
+
_next(void 0);
|
|
983
|
+
});
|
|
984
|
+
};
|
|
985
|
+
}
|
|
986
|
+
var BottleneckError, LocalDatastore, parser2;
|
|
987
|
+
parser2 = require_parser();
|
|
988
|
+
BottleneckError = require_BottleneckError();
|
|
989
|
+
LocalDatastore = class {
|
|
990
|
+
constructor(instance, storeOptions, storeInstanceOptions) {
|
|
991
|
+
this.instance = instance, this.storeOptions = storeOptions, this.clientId = this.instance._randomIndex(), parser2.load(storeInstanceOptions, storeInstanceOptions, this), this._nextRequest = this._lastReservoirRefresh = this._lastReservoirIncrease = Date.now(), this._running = 0, this._done = 0, this._unblockTime = 0, this.ready = this.Promise.resolve(), this.clients = {}, this._startHeartbeat();
|
|
992
|
+
}
|
|
993
|
+
_startHeartbeat() {
|
|
994
|
+
var base;
|
|
995
|
+
return this.heartbeat == null && (this.storeOptions.reservoirRefreshInterval != null && this.storeOptions.reservoirRefreshAmount != null || this.storeOptions.reservoirIncreaseInterval != null && this.storeOptions.reservoirIncreaseAmount != null) ? typeof (base = this.heartbeat = setInterval(() => {
|
|
996
|
+
var amount, incr, maximum, now, reservoir;
|
|
997
|
+
if (now = Date.now(), this.storeOptions.reservoirRefreshInterval != null && now >= this._lastReservoirRefresh + this.storeOptions.reservoirRefreshInterval && (this._lastReservoirRefresh = now, this.storeOptions.reservoir = this.storeOptions.reservoirRefreshAmount, this.instance._drainAll(this.computeCapacity())), this.storeOptions.reservoirIncreaseInterval != null && now >= this._lastReservoirIncrease + this.storeOptions.reservoirIncreaseInterval) {
|
|
998
|
+
var _this$storeOptions = this.storeOptions;
|
|
999
|
+
if (amount = _this$storeOptions.reservoirIncreaseAmount, maximum = _this$storeOptions.reservoirIncreaseMaximum, reservoir = _this$storeOptions.reservoir, this._lastReservoirIncrease = now, incr = maximum != null ? Math.min(amount, maximum - reservoir) : amount, incr > 0)
|
|
1000
|
+
return this.storeOptions.reservoir += incr, this.instance._drainAll(this.computeCapacity());
|
|
1001
|
+
}
|
|
1002
|
+
}, this.heartbeatInterval)).unref == "function" ? base.unref() : void 0 : clearInterval(this.heartbeat);
|
|
1003
|
+
}
|
|
1004
|
+
__publish__(message) {
|
|
1005
|
+
var _this = this;
|
|
1006
|
+
return _asyncToGenerator2(function* () {
|
|
1007
|
+
return yield _this.yieldLoop(), _this.instance.Events.trigger("message", message.toString());
|
|
1008
|
+
})();
|
|
1009
|
+
}
|
|
1010
|
+
__disconnect__(flush) {
|
|
1011
|
+
var _this2 = this;
|
|
1012
|
+
return _asyncToGenerator2(function* () {
|
|
1013
|
+
return yield _this2.yieldLoop(), clearInterval(_this2.heartbeat), _this2.Promise.resolve();
|
|
1014
|
+
})();
|
|
1015
|
+
}
|
|
1016
|
+
yieldLoop(t = 0) {
|
|
1017
|
+
return new this.Promise(function(resolve, reject) {
|
|
1018
|
+
return setTimeout(resolve, t);
|
|
1019
|
+
});
|
|
1020
|
+
}
|
|
1021
|
+
computePenalty() {
|
|
1022
|
+
var ref;
|
|
1023
|
+
return (ref = this.storeOptions.penalty) != null ? ref : 15 * this.storeOptions.minTime || 5e3;
|
|
1024
|
+
}
|
|
1025
|
+
__updateSettings__(options2) {
|
|
1026
|
+
var _this3 = this;
|
|
1027
|
+
return _asyncToGenerator2(function* () {
|
|
1028
|
+
return yield _this3.yieldLoop(), parser2.overwrite(options2, options2, _this3.storeOptions), _this3._startHeartbeat(), _this3.instance._drainAll(_this3.computeCapacity()), !0;
|
|
1029
|
+
})();
|
|
1030
|
+
}
|
|
1031
|
+
__running__() {
|
|
1032
|
+
var _this4 = this;
|
|
1033
|
+
return _asyncToGenerator2(function* () {
|
|
1034
|
+
return yield _this4.yieldLoop(), _this4._running;
|
|
1035
|
+
})();
|
|
1036
|
+
}
|
|
1037
|
+
__queued__() {
|
|
1038
|
+
var _this5 = this;
|
|
1039
|
+
return _asyncToGenerator2(function* () {
|
|
1040
|
+
return yield _this5.yieldLoop(), _this5.instance.queued();
|
|
1041
|
+
})();
|
|
1042
|
+
}
|
|
1043
|
+
__done__() {
|
|
1044
|
+
var _this6 = this;
|
|
1045
|
+
return _asyncToGenerator2(function* () {
|
|
1046
|
+
return yield _this6.yieldLoop(), _this6._done;
|
|
1047
|
+
})();
|
|
1048
|
+
}
|
|
1049
|
+
__groupCheck__(time) {
|
|
1050
|
+
var _this7 = this;
|
|
1051
|
+
return _asyncToGenerator2(function* () {
|
|
1052
|
+
return yield _this7.yieldLoop(), _this7._nextRequest + _this7.timeout < time;
|
|
1053
|
+
})();
|
|
1054
|
+
}
|
|
1055
|
+
computeCapacity() {
|
|
1056
|
+
var maxConcurrent, reservoir, _this$storeOptions2 = this.storeOptions;
|
|
1057
|
+
return maxConcurrent = _this$storeOptions2.maxConcurrent, reservoir = _this$storeOptions2.reservoir, maxConcurrent != null && reservoir != null ? Math.min(maxConcurrent - this._running, reservoir) : maxConcurrent != null ? maxConcurrent - this._running : reservoir ?? null;
|
|
1058
|
+
}
|
|
1059
|
+
conditionsCheck(weight) {
|
|
1060
|
+
var capacity;
|
|
1061
|
+
return capacity = this.computeCapacity(), capacity == null || weight <= capacity;
|
|
1062
|
+
}
|
|
1063
|
+
__incrementReservoir__(incr) {
|
|
1064
|
+
var _this8 = this;
|
|
1065
|
+
return _asyncToGenerator2(function* () {
|
|
1066
|
+
var reservoir;
|
|
1067
|
+
return yield _this8.yieldLoop(), reservoir = _this8.storeOptions.reservoir += incr, _this8.instance._drainAll(_this8.computeCapacity()), reservoir;
|
|
1068
|
+
})();
|
|
1069
|
+
}
|
|
1070
|
+
__currentReservoir__() {
|
|
1071
|
+
var _this9 = this;
|
|
1072
|
+
return _asyncToGenerator2(function* () {
|
|
1073
|
+
return yield _this9.yieldLoop(), _this9.storeOptions.reservoir;
|
|
1074
|
+
})();
|
|
1075
|
+
}
|
|
1076
|
+
isBlocked(now) {
|
|
1077
|
+
return this._unblockTime >= now;
|
|
1078
|
+
}
|
|
1079
|
+
check(weight, now) {
|
|
1080
|
+
return this.conditionsCheck(weight) && this._nextRequest - now <= 0;
|
|
1081
|
+
}
|
|
1082
|
+
__check__(weight) {
|
|
1083
|
+
var _this10 = this;
|
|
1084
|
+
return _asyncToGenerator2(function* () {
|
|
1085
|
+
var now;
|
|
1086
|
+
return yield _this10.yieldLoop(), now = Date.now(), _this10.check(weight, now);
|
|
1087
|
+
})();
|
|
1088
|
+
}
|
|
1089
|
+
__register__(index, weight, expiration) {
|
|
1090
|
+
var _this11 = this;
|
|
1091
|
+
return _asyncToGenerator2(function* () {
|
|
1092
|
+
var now, wait;
|
|
1093
|
+
return yield _this11.yieldLoop(), now = Date.now(), _this11.conditionsCheck(weight) ? (_this11._running += weight, _this11.storeOptions.reservoir != null && (_this11.storeOptions.reservoir -= weight), wait = Math.max(_this11._nextRequest - now, 0), _this11._nextRequest = now + wait + _this11.storeOptions.minTime, {
|
|
1094
|
+
success: !0,
|
|
1095
|
+
wait,
|
|
1096
|
+
reservoir: _this11.storeOptions.reservoir
|
|
1097
|
+
}) : {
|
|
1098
|
+
success: !1
|
|
1099
|
+
};
|
|
1100
|
+
})();
|
|
1101
|
+
}
|
|
1102
|
+
strategyIsBlock() {
|
|
1103
|
+
return this.storeOptions.strategy === 3;
|
|
1104
|
+
}
|
|
1105
|
+
__submit__(queueLength, weight) {
|
|
1106
|
+
var _this12 = this;
|
|
1107
|
+
return _asyncToGenerator2(function* () {
|
|
1108
|
+
var blocked, now, reachedHWM;
|
|
1109
|
+
if (yield _this12.yieldLoop(), _this12.storeOptions.maxConcurrent != null && weight > _this12.storeOptions.maxConcurrent)
|
|
1110
|
+
throw new BottleneckError(`Impossible to add a job having a weight of ${weight} to a limiter having a maxConcurrent setting of ${_this12.storeOptions.maxConcurrent}`);
|
|
1111
|
+
return now = Date.now(), reachedHWM = _this12.storeOptions.highWater != null && queueLength === _this12.storeOptions.highWater && !_this12.check(weight, now), blocked = _this12.strategyIsBlock() && (reachedHWM || _this12.isBlocked(now)), blocked && (_this12._unblockTime = now + _this12.computePenalty(), _this12._nextRequest = _this12._unblockTime + _this12.storeOptions.minTime, _this12.instance._dropAllQueued()), {
|
|
1112
|
+
reachedHWM,
|
|
1113
|
+
blocked,
|
|
1114
|
+
strategy: _this12.storeOptions.strategy
|
|
1115
|
+
};
|
|
1116
|
+
})();
|
|
1117
|
+
}
|
|
1118
|
+
__free__(index, weight) {
|
|
1119
|
+
var _this13 = this;
|
|
1120
|
+
return _asyncToGenerator2(function* () {
|
|
1121
|
+
return yield _this13.yieldLoop(), _this13._running -= weight, _this13._done += weight, _this13.instance._drainAll(_this13.computeCapacity()), {
|
|
1122
|
+
running: _this13._running
|
|
1123
|
+
};
|
|
1124
|
+
})();
|
|
1125
|
+
}
|
|
1126
|
+
};
|
|
1127
|
+
module2.exports = LocalDatastore;
|
|
1128
|
+
}
|
|
1129
|
+
});
|
|
1130
|
+
|
|
1131
|
+
// ../../node_modules/.pnpm/bottleneck@2.19.5/node_modules/bottleneck/lib/lua.json
|
|
1132
|
+
var require_lua = __commonJS({
|
|
1133
|
+
"../../node_modules/.pnpm/bottleneck@2.19.5/node_modules/bottleneck/lib/lua.json"(exports2, module2) {
|
|
1134
|
+
module2.exports = {
|
|
1135
|
+
"blacklist_client.lua": `local blacklist = ARGV[num_static_argv + 1]
|
|
1136
|
+
|
|
1137
|
+
if redis.call('zscore', client_last_seen_key, blacklist) then
|
|
1138
|
+
redis.call('zadd', client_last_seen_key, 0, blacklist)
|
|
1139
|
+
end
|
|
1140
|
+
|
|
1141
|
+
|
|
1142
|
+
return {}
|
|
1143
|
+
`,
|
|
1144
|
+
"check.lua": `local weight = tonumber(ARGV[num_static_argv + 1])
|
|
1145
|
+
|
|
1146
|
+
local capacity = process_tick(now, false)['capacity']
|
|
1147
|
+
local nextRequest = tonumber(redis.call('hget', settings_key, 'nextRequest'))
|
|
1148
|
+
|
|
1149
|
+
return conditions_check(capacity, weight) and nextRequest - now <= 0
|
|
1150
|
+
`,
|
|
1151
|
+
"conditions_check.lua": `local conditions_check = function (capacity, weight)
|
|
1152
|
+
return capacity == nil or weight <= capacity
|
|
1153
|
+
end
|
|
1154
|
+
`,
|
|
1155
|
+
"current_reservoir.lua": `return process_tick(now, false)['reservoir']
|
|
1156
|
+
`,
|
|
1157
|
+
"done.lua": `process_tick(now, false)
|
|
1158
|
+
|
|
1159
|
+
return tonumber(redis.call('hget', settings_key, 'done'))
|
|
1160
|
+
`,
|
|
1161
|
+
"free.lua": `local index = ARGV[num_static_argv + 1]
|
|
1162
|
+
|
|
1163
|
+
redis.call('zadd', job_expirations_key, 0, index)
|
|
1164
|
+
|
|
1165
|
+
return process_tick(now, false)['running']
|
|
1166
|
+
`,
|
|
1167
|
+
"get_time.lua": `redis.replicate_commands()
|
|
1168
|
+
|
|
1169
|
+
local get_time = function ()
|
|
1170
|
+
local time = redis.call('time')
|
|
1171
|
+
|
|
1172
|
+
return tonumber(time[1]..string.sub(time[2], 1, 3))
|
|
1173
|
+
end
|
|
1174
|
+
`,
|
|
1175
|
+
"group_check.lua": `return not (redis.call('exists', settings_key) == 1)
|
|
1176
|
+
`,
|
|
1177
|
+
"heartbeat.lua": `process_tick(now, true)
|
|
1178
|
+
`,
|
|
1179
|
+
"increment_reservoir.lua": `local incr = tonumber(ARGV[num_static_argv + 1])
|
|
1180
|
+
|
|
1181
|
+
redis.call('hincrby', settings_key, 'reservoir', incr)
|
|
1182
|
+
|
|
1183
|
+
local reservoir = process_tick(now, true)['reservoir']
|
|
1184
|
+
|
|
1185
|
+
local groupTimeout = tonumber(redis.call('hget', settings_key, 'groupTimeout'))
|
|
1186
|
+
refresh_expiration(0, 0, groupTimeout)
|
|
1187
|
+
|
|
1188
|
+
return reservoir
|
|
1189
|
+
`,
|
|
1190
|
+
"init.lua": `local clear = tonumber(ARGV[num_static_argv + 1])
|
|
1191
|
+
local limiter_version = ARGV[num_static_argv + 2]
|
|
1192
|
+
local num_local_argv = num_static_argv + 2
|
|
1193
|
+
|
|
1194
|
+
if clear == 1 then
|
|
1195
|
+
redis.call('del', unpack(KEYS))
|
|
1196
|
+
end
|
|
1197
|
+
|
|
1198
|
+
if redis.call('exists', settings_key) == 0 then
|
|
1199
|
+
-- Create
|
|
1200
|
+
local args = {'hmset', settings_key}
|
|
1201
|
+
|
|
1202
|
+
for i = num_local_argv + 1, #ARGV do
|
|
1203
|
+
table.insert(args, ARGV[i])
|
|
1204
|
+
end
|
|
1205
|
+
|
|
1206
|
+
redis.call(unpack(args))
|
|
1207
|
+
redis.call('hmset', settings_key,
|
|
1208
|
+
'nextRequest', now,
|
|
1209
|
+
'lastReservoirRefresh', now,
|
|
1210
|
+
'lastReservoirIncrease', now,
|
|
1211
|
+
'running', 0,
|
|
1212
|
+
'done', 0,
|
|
1213
|
+
'unblockTime', 0,
|
|
1214
|
+
'capacityPriorityCounter', 0
|
|
1215
|
+
)
|
|
1216
|
+
|
|
1217
|
+
else
|
|
1218
|
+
-- Apply migrations
|
|
1219
|
+
local settings = redis.call('hmget', settings_key,
|
|
1220
|
+
'id',
|
|
1221
|
+
'version'
|
|
1222
|
+
)
|
|
1223
|
+
local id = settings[1]
|
|
1224
|
+
local current_version = settings[2]
|
|
1225
|
+
|
|
1226
|
+
if current_version ~= limiter_version then
|
|
1227
|
+
local version_digits = {}
|
|
1228
|
+
for k, v in string.gmatch(current_version, "([^.]+)") do
|
|
1229
|
+
table.insert(version_digits, tonumber(k))
|
|
1230
|
+
end
|
|
1231
|
+
|
|
1232
|
+
-- 2.10.0
|
|
1233
|
+
if version_digits[2] < 10 then
|
|
1234
|
+
redis.call('hsetnx', settings_key, 'reservoirRefreshInterval', '')
|
|
1235
|
+
redis.call('hsetnx', settings_key, 'reservoirRefreshAmount', '')
|
|
1236
|
+
redis.call('hsetnx', settings_key, 'lastReservoirRefresh', '')
|
|
1237
|
+
redis.call('hsetnx', settings_key, 'done', 0)
|
|
1238
|
+
redis.call('hset', settings_key, 'version', '2.10.0')
|
|
1239
|
+
end
|
|
1240
|
+
|
|
1241
|
+
-- 2.11.1
|
|
1242
|
+
if version_digits[2] < 11 or (version_digits[2] == 11 and version_digits[3] < 1) then
|
|
1243
|
+
if redis.call('hstrlen', settings_key, 'lastReservoirRefresh') == 0 then
|
|
1244
|
+
redis.call('hmset', settings_key,
|
|
1245
|
+
'lastReservoirRefresh', now,
|
|
1246
|
+
'version', '2.11.1'
|
|
1247
|
+
)
|
|
1248
|
+
end
|
|
1249
|
+
end
|
|
1250
|
+
|
|
1251
|
+
-- 2.14.0
|
|
1252
|
+
if version_digits[2] < 14 then
|
|
1253
|
+
local old_running_key = 'b_'..id..'_running'
|
|
1254
|
+
local old_executing_key = 'b_'..id..'_executing'
|
|
1255
|
+
|
|
1256
|
+
if redis.call('exists', old_running_key) == 1 then
|
|
1257
|
+
redis.call('rename', old_running_key, job_weights_key)
|
|
1258
|
+
end
|
|
1259
|
+
if redis.call('exists', old_executing_key) == 1 then
|
|
1260
|
+
redis.call('rename', old_executing_key, job_expirations_key)
|
|
1261
|
+
end
|
|
1262
|
+
redis.call('hset', settings_key, 'version', '2.14.0')
|
|
1263
|
+
end
|
|
1264
|
+
|
|
1265
|
+
-- 2.15.2
|
|
1266
|
+
if version_digits[2] < 15 or (version_digits[2] == 15 and version_digits[3] < 2) then
|
|
1267
|
+
redis.call('hsetnx', settings_key, 'capacityPriorityCounter', 0)
|
|
1268
|
+
redis.call('hset', settings_key, 'version', '2.15.2')
|
|
1269
|
+
end
|
|
1270
|
+
|
|
1271
|
+
-- 2.17.0
|
|
1272
|
+
if version_digits[2] < 17 then
|
|
1273
|
+
redis.call('hsetnx', settings_key, 'clientTimeout', 10000)
|
|
1274
|
+
redis.call('hset', settings_key, 'version', '2.17.0')
|
|
1275
|
+
end
|
|
1276
|
+
|
|
1277
|
+
-- 2.18.0
|
|
1278
|
+
if version_digits[2] < 18 then
|
|
1279
|
+
redis.call('hsetnx', settings_key, 'reservoirIncreaseInterval', '')
|
|
1280
|
+
redis.call('hsetnx', settings_key, 'reservoirIncreaseAmount', '')
|
|
1281
|
+
redis.call('hsetnx', settings_key, 'reservoirIncreaseMaximum', '')
|
|
1282
|
+
redis.call('hsetnx', settings_key, 'lastReservoirIncrease', now)
|
|
1283
|
+
redis.call('hset', settings_key, 'version', '2.18.0')
|
|
1284
|
+
end
|
|
1285
|
+
|
|
1286
|
+
end
|
|
1287
|
+
|
|
1288
|
+
process_tick(now, false)
|
|
1289
|
+
end
|
|
1290
|
+
|
|
1291
|
+
local groupTimeout = tonumber(redis.call('hget', settings_key, 'groupTimeout'))
|
|
1292
|
+
refresh_expiration(0, 0, groupTimeout)
|
|
1293
|
+
|
|
1294
|
+
return {}
|
|
1295
|
+
`,
|
|
1296
|
+
"process_tick.lua": `local process_tick = function (now, always_publish)
|
|
1297
|
+
|
|
1298
|
+
local compute_capacity = function (maxConcurrent, running, reservoir)
|
|
1299
|
+
if maxConcurrent ~= nil and reservoir ~= nil then
|
|
1300
|
+
return math.min((maxConcurrent - running), reservoir)
|
|
1301
|
+
elseif maxConcurrent ~= nil then
|
|
1302
|
+
return maxConcurrent - running
|
|
1303
|
+
elseif reservoir ~= nil then
|
|
1304
|
+
return reservoir
|
|
1305
|
+
else
|
|
1306
|
+
return nil
|
|
1307
|
+
end
|
|
1308
|
+
end
|
|
1309
|
+
|
|
1310
|
+
local settings = redis.call('hmget', settings_key,
|
|
1311
|
+
'id',
|
|
1312
|
+
'maxConcurrent',
|
|
1313
|
+
'running',
|
|
1314
|
+
'reservoir',
|
|
1315
|
+
'reservoirRefreshInterval',
|
|
1316
|
+
'reservoirRefreshAmount',
|
|
1317
|
+
'lastReservoirRefresh',
|
|
1318
|
+
'reservoirIncreaseInterval',
|
|
1319
|
+
'reservoirIncreaseAmount',
|
|
1320
|
+
'reservoirIncreaseMaximum',
|
|
1321
|
+
'lastReservoirIncrease',
|
|
1322
|
+
'capacityPriorityCounter',
|
|
1323
|
+
'clientTimeout'
|
|
1324
|
+
)
|
|
1325
|
+
local id = settings[1]
|
|
1326
|
+
local maxConcurrent = tonumber(settings[2])
|
|
1327
|
+
local running = tonumber(settings[3])
|
|
1328
|
+
local reservoir = tonumber(settings[4])
|
|
1329
|
+
local reservoirRefreshInterval = tonumber(settings[5])
|
|
1330
|
+
local reservoirRefreshAmount = tonumber(settings[6])
|
|
1331
|
+
local lastReservoirRefresh = tonumber(settings[7])
|
|
1332
|
+
local reservoirIncreaseInterval = tonumber(settings[8])
|
|
1333
|
+
local reservoirIncreaseAmount = tonumber(settings[9])
|
|
1334
|
+
local reservoirIncreaseMaximum = tonumber(settings[10])
|
|
1335
|
+
local lastReservoirIncrease = tonumber(settings[11])
|
|
1336
|
+
local capacityPriorityCounter = tonumber(settings[12])
|
|
1337
|
+
local clientTimeout = tonumber(settings[13])
|
|
1338
|
+
|
|
1339
|
+
local initial_capacity = compute_capacity(maxConcurrent, running, reservoir)
|
|
1340
|
+
|
|
1341
|
+
--
|
|
1342
|
+
-- Process 'running' changes
|
|
1343
|
+
--
|
|
1344
|
+
local expired = redis.call('zrangebyscore', job_expirations_key, '-inf', '('..now)
|
|
1345
|
+
|
|
1346
|
+
if #expired > 0 then
|
|
1347
|
+
redis.call('zremrangebyscore', job_expirations_key, '-inf', '('..now)
|
|
1348
|
+
|
|
1349
|
+
local flush_batch = function (batch, acc)
|
|
1350
|
+
local weights = redis.call('hmget', job_weights_key, unpack(batch))
|
|
1351
|
+
redis.call('hdel', job_weights_key, unpack(batch))
|
|
1352
|
+
local clients = redis.call('hmget', job_clients_key, unpack(batch))
|
|
1353
|
+
redis.call('hdel', job_clients_key, unpack(batch))
|
|
1354
|
+
|
|
1355
|
+
-- Calculate sum of removed weights
|
|
1356
|
+
for i = 1, #weights do
|
|
1357
|
+
acc['total'] = acc['total'] + (tonumber(weights[i]) or 0)
|
|
1358
|
+
end
|
|
1359
|
+
|
|
1360
|
+
-- Calculate sum of removed weights by client
|
|
1361
|
+
local client_weights = {}
|
|
1362
|
+
for i = 1, #clients do
|
|
1363
|
+
local removed = tonumber(weights[i]) or 0
|
|
1364
|
+
if removed > 0 then
|
|
1365
|
+
acc['client_weights'][clients[i]] = (acc['client_weights'][clients[i]] or 0) + removed
|
|
1366
|
+
end
|
|
1367
|
+
end
|
|
1368
|
+
end
|
|
1369
|
+
|
|
1370
|
+
local acc = {
|
|
1371
|
+
['total'] = 0,
|
|
1372
|
+
['client_weights'] = {}
|
|
1373
|
+
}
|
|
1374
|
+
local batch_size = 1000
|
|
1375
|
+
|
|
1376
|
+
-- Compute changes to Zsets and apply changes to Hashes
|
|
1377
|
+
for i = 1, #expired, batch_size do
|
|
1378
|
+
local batch = {}
|
|
1379
|
+
for j = i, math.min(i + batch_size - 1, #expired) do
|
|
1380
|
+
table.insert(batch, expired[j])
|
|
1381
|
+
end
|
|
1382
|
+
|
|
1383
|
+
flush_batch(batch, acc)
|
|
1384
|
+
end
|
|
1385
|
+
|
|
1386
|
+
-- Apply changes to Zsets
|
|
1387
|
+
if acc['total'] > 0 then
|
|
1388
|
+
redis.call('hincrby', settings_key, 'done', acc['total'])
|
|
1389
|
+
running = tonumber(redis.call('hincrby', settings_key, 'running', -acc['total']))
|
|
1390
|
+
end
|
|
1391
|
+
|
|
1392
|
+
for client, weight in pairs(acc['client_weights']) do
|
|
1393
|
+
redis.call('zincrby', client_running_key, -weight, client)
|
|
1394
|
+
end
|
|
1395
|
+
end
|
|
1396
|
+
|
|
1397
|
+
--
|
|
1398
|
+
-- Process 'reservoir' changes
|
|
1399
|
+
--
|
|
1400
|
+
local reservoirRefreshActive = reservoirRefreshInterval ~= nil and reservoirRefreshAmount ~= nil
|
|
1401
|
+
if reservoirRefreshActive and now >= lastReservoirRefresh + reservoirRefreshInterval then
|
|
1402
|
+
reservoir = reservoirRefreshAmount
|
|
1403
|
+
redis.call('hmset', settings_key,
|
|
1404
|
+
'reservoir', reservoir,
|
|
1405
|
+
'lastReservoirRefresh', now
|
|
1406
|
+
)
|
|
1407
|
+
end
|
|
1408
|
+
|
|
1409
|
+
local reservoirIncreaseActive = reservoirIncreaseInterval ~= nil and reservoirIncreaseAmount ~= nil
|
|
1410
|
+
if reservoirIncreaseActive and now >= lastReservoirIncrease + reservoirIncreaseInterval then
|
|
1411
|
+
local num_intervals = math.floor((now - lastReservoirIncrease) / reservoirIncreaseInterval)
|
|
1412
|
+
local incr = reservoirIncreaseAmount * num_intervals
|
|
1413
|
+
if reservoirIncreaseMaximum ~= nil then
|
|
1414
|
+
incr = math.min(incr, reservoirIncreaseMaximum - (reservoir or 0))
|
|
1415
|
+
end
|
|
1416
|
+
if incr > 0 then
|
|
1417
|
+
reservoir = (reservoir or 0) + incr
|
|
1418
|
+
end
|
|
1419
|
+
redis.call('hmset', settings_key,
|
|
1420
|
+
'reservoir', reservoir,
|
|
1421
|
+
'lastReservoirIncrease', lastReservoirIncrease + (num_intervals * reservoirIncreaseInterval)
|
|
1422
|
+
)
|
|
1423
|
+
end
|
|
1424
|
+
|
|
1425
|
+
--
|
|
1426
|
+
-- Clear unresponsive clients
|
|
1427
|
+
--
|
|
1428
|
+
local unresponsive = redis.call('zrangebyscore', client_last_seen_key, '-inf', (now - clientTimeout))
|
|
1429
|
+
local unresponsive_lookup = {}
|
|
1430
|
+
local terminated_clients = {}
|
|
1431
|
+
for i = 1, #unresponsive do
|
|
1432
|
+
unresponsive_lookup[unresponsive[i]] = true
|
|
1433
|
+
if tonumber(redis.call('zscore', client_running_key, unresponsive[i])) == 0 then
|
|
1434
|
+
table.insert(terminated_clients, unresponsive[i])
|
|
1435
|
+
end
|
|
1436
|
+
end
|
|
1437
|
+
if #terminated_clients > 0 then
|
|
1438
|
+
redis.call('zrem', client_running_key, unpack(terminated_clients))
|
|
1439
|
+
redis.call('hdel', client_num_queued_key, unpack(terminated_clients))
|
|
1440
|
+
redis.call('zrem', client_last_registered_key, unpack(terminated_clients))
|
|
1441
|
+
redis.call('zrem', client_last_seen_key, unpack(terminated_clients))
|
|
1442
|
+
end
|
|
1443
|
+
|
|
1444
|
+
--
|
|
1445
|
+
-- Broadcast capacity changes
|
|
1446
|
+
--
|
|
1447
|
+
local final_capacity = compute_capacity(maxConcurrent, running, reservoir)
|
|
1448
|
+
|
|
1449
|
+
if always_publish or (initial_capacity ~= nil and final_capacity == nil) then
|
|
1450
|
+
-- always_publish or was not unlimited, now unlimited
|
|
1451
|
+
redis.call('publish', 'b_'..id, 'capacity:'..(final_capacity or ''))
|
|
1452
|
+
|
|
1453
|
+
elseif initial_capacity ~= nil and final_capacity ~= nil and final_capacity > initial_capacity then
|
|
1454
|
+
-- capacity was increased
|
|
1455
|
+
-- send the capacity message to the limiter having the lowest number of running jobs
|
|
1456
|
+
-- the tiebreaker is the limiter having not registered a job in the longest time
|
|
1457
|
+
|
|
1458
|
+
local lowest_concurrency_value = nil
|
|
1459
|
+
local lowest_concurrency_clients = {}
|
|
1460
|
+
local lowest_concurrency_last_registered = {}
|
|
1461
|
+
local client_concurrencies = redis.call('zrange', client_running_key, 0, -1, 'withscores')
|
|
1462
|
+
|
|
1463
|
+
for i = 1, #client_concurrencies, 2 do
|
|
1464
|
+
local client = client_concurrencies[i]
|
|
1465
|
+
local concurrency = tonumber(client_concurrencies[i+1])
|
|
1466
|
+
|
|
1467
|
+
if (
|
|
1468
|
+
lowest_concurrency_value == nil or lowest_concurrency_value == concurrency
|
|
1469
|
+
) and (
|
|
1470
|
+
not unresponsive_lookup[client]
|
|
1471
|
+
) and (
|
|
1472
|
+
tonumber(redis.call('hget', client_num_queued_key, client)) > 0
|
|
1473
|
+
) then
|
|
1474
|
+
lowest_concurrency_value = concurrency
|
|
1475
|
+
table.insert(lowest_concurrency_clients, client)
|
|
1476
|
+
local last_registered = tonumber(redis.call('zscore', client_last_registered_key, client))
|
|
1477
|
+
table.insert(lowest_concurrency_last_registered, last_registered)
|
|
1478
|
+
end
|
|
1479
|
+
end
|
|
1480
|
+
|
|
1481
|
+
if #lowest_concurrency_clients > 0 then
|
|
1482
|
+
local position = 1
|
|
1483
|
+
local earliest = lowest_concurrency_last_registered[1]
|
|
1484
|
+
|
|
1485
|
+
for i,v in ipairs(lowest_concurrency_last_registered) do
|
|
1486
|
+
if v < earliest then
|
|
1487
|
+
position = i
|
|
1488
|
+
earliest = v
|
|
1489
|
+
end
|
|
1490
|
+
end
|
|
1491
|
+
|
|
1492
|
+
local next_client = lowest_concurrency_clients[position]
|
|
1493
|
+
redis.call('publish', 'b_'..id,
|
|
1494
|
+
'capacity-priority:'..(final_capacity or '')..
|
|
1495
|
+
':'..next_client..
|
|
1496
|
+
':'..capacityPriorityCounter
|
|
1497
|
+
)
|
|
1498
|
+
redis.call('hincrby', settings_key, 'capacityPriorityCounter', '1')
|
|
1499
|
+
else
|
|
1500
|
+
redis.call('publish', 'b_'..id, 'capacity:'..(final_capacity or ''))
|
|
1501
|
+
end
|
|
1502
|
+
end
|
|
1503
|
+
|
|
1504
|
+
return {
|
|
1505
|
+
['capacity'] = final_capacity,
|
|
1506
|
+
['running'] = running,
|
|
1507
|
+
['reservoir'] = reservoir
|
|
1508
|
+
}
|
|
1509
|
+
end
|
|
1510
|
+
`,
|
|
1511
|
+
"queued.lua": `local clientTimeout = tonumber(redis.call('hget', settings_key, 'clientTimeout'))
|
|
1512
|
+
local valid_clients = redis.call('zrangebyscore', client_last_seen_key, (now - clientTimeout), 'inf')
|
|
1513
|
+
local client_queued = redis.call('hmget', client_num_queued_key, unpack(valid_clients))
|
|
1514
|
+
|
|
1515
|
+
local sum = 0
|
|
1516
|
+
for i = 1, #client_queued do
|
|
1517
|
+
sum = sum + tonumber(client_queued[i])
|
|
1518
|
+
end
|
|
1519
|
+
|
|
1520
|
+
return sum
|
|
1521
|
+
`,
|
|
1522
|
+
"refresh_expiration.lua": `local refresh_expiration = function (now, nextRequest, groupTimeout)
|
|
1523
|
+
|
|
1524
|
+
if groupTimeout ~= nil then
|
|
1525
|
+
local ttl = (nextRequest + groupTimeout) - now
|
|
1526
|
+
|
|
1527
|
+
for i = 1, #KEYS do
|
|
1528
|
+
redis.call('pexpire', KEYS[i], ttl)
|
|
1529
|
+
end
|
|
1530
|
+
end
|
|
1531
|
+
|
|
1532
|
+
end
|
|
1533
|
+
`,
|
|
1534
|
+
"refs.lua": `local settings_key = KEYS[1]
|
|
1535
|
+
local job_weights_key = KEYS[2]
|
|
1536
|
+
local job_expirations_key = KEYS[3]
|
|
1537
|
+
local job_clients_key = KEYS[4]
|
|
1538
|
+
local client_running_key = KEYS[5]
|
|
1539
|
+
local client_num_queued_key = KEYS[6]
|
|
1540
|
+
local client_last_registered_key = KEYS[7]
|
|
1541
|
+
local client_last_seen_key = KEYS[8]
|
|
1542
|
+
|
|
1543
|
+
local now = tonumber(ARGV[1])
|
|
1544
|
+
local client = ARGV[2]
|
|
1545
|
+
|
|
1546
|
+
local num_static_argv = 2
|
|
1547
|
+
`,
|
|
1548
|
+
"register.lua": `local index = ARGV[num_static_argv + 1]
|
|
1549
|
+
local weight = tonumber(ARGV[num_static_argv + 2])
|
|
1550
|
+
local expiration = tonumber(ARGV[num_static_argv + 3])
|
|
1551
|
+
|
|
1552
|
+
local state = process_tick(now, false)
|
|
1553
|
+
local capacity = state['capacity']
|
|
1554
|
+
local reservoir = state['reservoir']
|
|
1555
|
+
|
|
1556
|
+
local settings = redis.call('hmget', settings_key,
|
|
1557
|
+
'nextRequest',
|
|
1558
|
+
'minTime',
|
|
1559
|
+
'groupTimeout'
|
|
1560
|
+
)
|
|
1561
|
+
local nextRequest = tonumber(settings[1])
|
|
1562
|
+
local minTime = tonumber(settings[2])
|
|
1563
|
+
local groupTimeout = tonumber(settings[3])
|
|
1564
|
+
|
|
1565
|
+
if conditions_check(capacity, weight) then
|
|
1566
|
+
|
|
1567
|
+
redis.call('hincrby', settings_key, 'running', weight)
|
|
1568
|
+
redis.call('hset', job_weights_key, index, weight)
|
|
1569
|
+
if expiration ~= nil then
|
|
1570
|
+
redis.call('zadd', job_expirations_key, now + expiration, index)
|
|
1571
|
+
end
|
|
1572
|
+
redis.call('hset', job_clients_key, index, client)
|
|
1573
|
+
redis.call('zincrby', client_running_key, weight, client)
|
|
1574
|
+
redis.call('hincrby', client_num_queued_key, client, -1)
|
|
1575
|
+
redis.call('zadd', client_last_registered_key, now, client)
|
|
1576
|
+
|
|
1577
|
+
local wait = math.max(nextRequest - now, 0)
|
|
1578
|
+
local newNextRequest = now + wait + minTime
|
|
1579
|
+
|
|
1580
|
+
if reservoir == nil then
|
|
1581
|
+
redis.call('hset', settings_key,
|
|
1582
|
+
'nextRequest', newNextRequest
|
|
1583
|
+
)
|
|
1584
|
+
else
|
|
1585
|
+
reservoir = reservoir - weight
|
|
1586
|
+
redis.call('hmset', settings_key,
|
|
1587
|
+
'reservoir', reservoir,
|
|
1588
|
+
'nextRequest', newNextRequest
|
|
1589
|
+
)
|
|
1590
|
+
end
|
|
1591
|
+
|
|
1592
|
+
refresh_expiration(now, newNextRequest, groupTimeout)
|
|
1593
|
+
|
|
1594
|
+
return {true, wait, reservoir}
|
|
1595
|
+
|
|
1596
|
+
else
|
|
1597
|
+
return {false}
|
|
1598
|
+
end
|
|
1599
|
+
`,
|
|
1600
|
+
"register_client.lua": `local queued = tonumber(ARGV[num_static_argv + 1])
|
|
1601
|
+
|
|
1602
|
+
-- Could have been re-registered concurrently
|
|
1603
|
+
if not redis.call('zscore', client_last_seen_key, client) then
|
|
1604
|
+
redis.call('zadd', client_running_key, 0, client)
|
|
1605
|
+
redis.call('hset', client_num_queued_key, client, queued)
|
|
1606
|
+
redis.call('zadd', client_last_registered_key, 0, client)
|
|
1607
|
+
end
|
|
1608
|
+
|
|
1609
|
+
redis.call('zadd', client_last_seen_key, now, client)
|
|
1610
|
+
|
|
1611
|
+
return {}
|
|
1612
|
+
`,
|
|
1613
|
+
"running.lua": `return process_tick(now, false)['running']
|
|
1614
|
+
`,
|
|
1615
|
+
"submit.lua": `local queueLength = tonumber(ARGV[num_static_argv + 1])
|
|
1616
|
+
local weight = tonumber(ARGV[num_static_argv + 2])
|
|
1617
|
+
|
|
1618
|
+
local capacity = process_tick(now, false)['capacity']
|
|
1619
|
+
|
|
1620
|
+
local settings = redis.call('hmget', settings_key,
|
|
1621
|
+
'id',
|
|
1622
|
+
'maxConcurrent',
|
|
1623
|
+
'highWater',
|
|
1624
|
+
'nextRequest',
|
|
1625
|
+
'strategy',
|
|
1626
|
+
'unblockTime',
|
|
1627
|
+
'penalty',
|
|
1628
|
+
'minTime',
|
|
1629
|
+
'groupTimeout'
|
|
1630
|
+
)
|
|
1631
|
+
local id = settings[1]
|
|
1632
|
+
local maxConcurrent = tonumber(settings[2])
|
|
1633
|
+
local highWater = tonumber(settings[3])
|
|
1634
|
+
local nextRequest = tonumber(settings[4])
|
|
1635
|
+
local strategy = tonumber(settings[5])
|
|
1636
|
+
local unblockTime = tonumber(settings[6])
|
|
1637
|
+
local penalty = tonumber(settings[7])
|
|
1638
|
+
local minTime = tonumber(settings[8])
|
|
1639
|
+
local groupTimeout = tonumber(settings[9])
|
|
1640
|
+
|
|
1641
|
+
if maxConcurrent ~= nil and weight > maxConcurrent then
|
|
1642
|
+
return redis.error_reply('OVERWEIGHT:'..weight..':'..maxConcurrent)
|
|
1643
|
+
end
|
|
1644
|
+
|
|
1645
|
+
local reachedHWM = (highWater ~= nil and queueLength == highWater
|
|
1646
|
+
and not (
|
|
1647
|
+
conditions_check(capacity, weight)
|
|
1648
|
+
and nextRequest - now <= 0
|
|
1649
|
+
)
|
|
1650
|
+
)
|
|
1651
|
+
|
|
1652
|
+
local blocked = strategy == 3 and (reachedHWM or unblockTime >= now)
|
|
1653
|
+
|
|
1654
|
+
if blocked then
|
|
1655
|
+
local computedPenalty = penalty
|
|
1656
|
+
if computedPenalty == nil then
|
|
1657
|
+
if minTime == 0 then
|
|
1658
|
+
computedPenalty = 5000
|
|
1659
|
+
else
|
|
1660
|
+
computedPenalty = 15 * minTime
|
|
1661
|
+
end
|
|
1662
|
+
end
|
|
1663
|
+
|
|
1664
|
+
local newNextRequest = now + computedPenalty + minTime
|
|
1665
|
+
|
|
1666
|
+
redis.call('hmset', settings_key,
|
|
1667
|
+
'unblockTime', now + computedPenalty,
|
|
1668
|
+
'nextRequest', newNextRequest
|
|
1669
|
+
)
|
|
1670
|
+
|
|
1671
|
+
local clients_queued_reset = redis.call('hkeys', client_num_queued_key)
|
|
1672
|
+
local queued_reset = {}
|
|
1673
|
+
for i = 1, #clients_queued_reset do
|
|
1674
|
+
table.insert(queued_reset, clients_queued_reset[i])
|
|
1675
|
+
table.insert(queued_reset, 0)
|
|
1676
|
+
end
|
|
1677
|
+
redis.call('hmset', client_num_queued_key, unpack(queued_reset))
|
|
1678
|
+
|
|
1679
|
+
redis.call('publish', 'b_'..id, 'blocked:')
|
|
1680
|
+
|
|
1681
|
+
refresh_expiration(now, newNextRequest, groupTimeout)
|
|
1682
|
+
end
|
|
1683
|
+
|
|
1684
|
+
if not blocked and not reachedHWM then
|
|
1685
|
+
redis.call('hincrby', client_num_queued_key, client, 1)
|
|
1686
|
+
end
|
|
1687
|
+
|
|
1688
|
+
return {reachedHWM, blocked, strategy}
|
|
1689
|
+
`,
|
|
1690
|
+
"update_settings.lua": `local args = {'hmset', settings_key}
|
|
1691
|
+
|
|
1692
|
+
for i = num_static_argv + 1, #ARGV do
|
|
1693
|
+
table.insert(args, ARGV[i])
|
|
1694
|
+
end
|
|
1695
|
+
|
|
1696
|
+
redis.call(unpack(args))
|
|
1697
|
+
|
|
1698
|
+
process_tick(now, true)
|
|
1699
|
+
|
|
1700
|
+
local groupTimeout = tonumber(redis.call('hget', settings_key, 'groupTimeout'))
|
|
1701
|
+
refresh_expiration(0, 0, groupTimeout)
|
|
1702
|
+
|
|
1703
|
+
return {}
|
|
1704
|
+
`,
|
|
1705
|
+
"validate_client.lua": `if not redis.call('zscore', client_last_seen_key, client) then
|
|
1706
|
+
return redis.error_reply('UNKNOWN_CLIENT')
|
|
1707
|
+
end
|
|
1708
|
+
|
|
1709
|
+
redis.call('zadd', client_last_seen_key, now, client)
|
|
1710
|
+
`,
|
|
1711
|
+
"validate_keys.lua": `if not (redis.call('exists', settings_key) == 1) then
|
|
1712
|
+
return redis.error_reply('SETTINGS_KEY_NOT_FOUND')
|
|
1713
|
+
end
|
|
1714
|
+
`
|
|
1715
|
+
};
|
|
1716
|
+
}
|
|
1717
|
+
});
|
|
1718
|
+
|
|
1719
|
+
// ../../node_modules/.pnpm/bottleneck@2.19.5/node_modules/bottleneck/lib/Scripts.js
|
|
1720
|
+
var require_Scripts = __commonJS({
|
|
1721
|
+
"../../node_modules/.pnpm/bottleneck@2.19.5/node_modules/bottleneck/lib/Scripts.js"(exports2) {
|
|
1722
|
+
"use strict";
|
|
1723
|
+
init_cjs_shims();
|
|
1724
|
+
var headers, lua, templates;
|
|
1725
|
+
lua = require_lua();
|
|
1726
|
+
headers = {
|
|
1727
|
+
refs: lua["refs.lua"],
|
|
1728
|
+
validate_keys: lua["validate_keys.lua"],
|
|
1729
|
+
validate_client: lua["validate_client.lua"],
|
|
1730
|
+
refresh_expiration: lua["refresh_expiration.lua"],
|
|
1731
|
+
process_tick: lua["process_tick.lua"],
|
|
1732
|
+
conditions_check: lua["conditions_check.lua"],
|
|
1733
|
+
get_time: lua["get_time.lua"]
|
|
1734
|
+
};
|
|
1735
|
+
exports2.allKeys = function(id) {
|
|
1736
|
+
return [
|
|
1737
|
+
/*
|
|
1738
|
+
HASH
|
|
1739
|
+
*/
|
|
1740
|
+
`b_${id}_settings`,
|
|
1741
|
+
/*
|
|
1742
|
+
HASH
|
|
1743
|
+
job index -> weight
|
|
1744
|
+
*/
|
|
1745
|
+
`b_${id}_job_weights`,
|
|
1746
|
+
/*
|
|
1747
|
+
ZSET
|
|
1748
|
+
job index -> expiration
|
|
1749
|
+
*/
|
|
1750
|
+
`b_${id}_job_expirations`,
|
|
1751
|
+
/*
|
|
1752
|
+
HASH
|
|
1753
|
+
job index -> client
|
|
1754
|
+
*/
|
|
1755
|
+
`b_${id}_job_clients`,
|
|
1756
|
+
/*
|
|
1757
|
+
ZSET
|
|
1758
|
+
client -> sum running
|
|
1759
|
+
*/
|
|
1760
|
+
`b_${id}_client_running`,
|
|
1761
|
+
/*
|
|
1762
|
+
HASH
|
|
1763
|
+
client -> num queued
|
|
1764
|
+
*/
|
|
1765
|
+
`b_${id}_client_num_queued`,
|
|
1766
|
+
/*
|
|
1767
|
+
ZSET
|
|
1768
|
+
client -> last job registered
|
|
1769
|
+
*/
|
|
1770
|
+
`b_${id}_client_last_registered`,
|
|
1771
|
+
/*
|
|
1772
|
+
ZSET
|
|
1773
|
+
client -> last seen
|
|
1774
|
+
*/
|
|
1775
|
+
`b_${id}_client_last_seen`
|
|
1776
|
+
];
|
|
1777
|
+
};
|
|
1778
|
+
templates = {
|
|
1779
|
+
init: {
|
|
1780
|
+
keys: exports2.allKeys,
|
|
1781
|
+
headers: ["process_tick"],
|
|
1782
|
+
refresh_expiration: !0,
|
|
1783
|
+
code: lua["init.lua"]
|
|
1784
|
+
},
|
|
1785
|
+
group_check: {
|
|
1786
|
+
keys: exports2.allKeys,
|
|
1787
|
+
headers: [],
|
|
1788
|
+
refresh_expiration: !1,
|
|
1789
|
+
code: lua["group_check.lua"]
|
|
1790
|
+
},
|
|
1791
|
+
register_client: {
|
|
1792
|
+
keys: exports2.allKeys,
|
|
1793
|
+
headers: ["validate_keys"],
|
|
1794
|
+
refresh_expiration: !1,
|
|
1795
|
+
code: lua["register_client.lua"]
|
|
1796
|
+
},
|
|
1797
|
+
blacklist_client: {
|
|
1798
|
+
keys: exports2.allKeys,
|
|
1799
|
+
headers: ["validate_keys", "validate_client"],
|
|
1800
|
+
refresh_expiration: !1,
|
|
1801
|
+
code: lua["blacklist_client.lua"]
|
|
1802
|
+
},
|
|
1803
|
+
heartbeat: {
|
|
1804
|
+
keys: exports2.allKeys,
|
|
1805
|
+
headers: ["validate_keys", "validate_client", "process_tick"],
|
|
1806
|
+
refresh_expiration: !1,
|
|
1807
|
+
code: lua["heartbeat.lua"]
|
|
1808
|
+
},
|
|
1809
|
+
update_settings: {
|
|
1810
|
+
keys: exports2.allKeys,
|
|
1811
|
+
headers: ["validate_keys", "validate_client", "process_tick"],
|
|
1812
|
+
refresh_expiration: !0,
|
|
1813
|
+
code: lua["update_settings.lua"]
|
|
1814
|
+
},
|
|
1815
|
+
running: {
|
|
1816
|
+
keys: exports2.allKeys,
|
|
1817
|
+
headers: ["validate_keys", "validate_client", "process_tick"],
|
|
1818
|
+
refresh_expiration: !1,
|
|
1819
|
+
code: lua["running.lua"]
|
|
1820
|
+
},
|
|
1821
|
+
queued: {
|
|
1822
|
+
keys: exports2.allKeys,
|
|
1823
|
+
headers: ["validate_keys", "validate_client"],
|
|
1824
|
+
refresh_expiration: !1,
|
|
1825
|
+
code: lua["queued.lua"]
|
|
1826
|
+
},
|
|
1827
|
+
done: {
|
|
1828
|
+
keys: exports2.allKeys,
|
|
1829
|
+
headers: ["validate_keys", "validate_client", "process_tick"],
|
|
1830
|
+
refresh_expiration: !1,
|
|
1831
|
+
code: lua["done.lua"]
|
|
1832
|
+
},
|
|
1833
|
+
check: {
|
|
1834
|
+
keys: exports2.allKeys,
|
|
1835
|
+
headers: ["validate_keys", "validate_client", "process_tick", "conditions_check"],
|
|
1836
|
+
refresh_expiration: !1,
|
|
1837
|
+
code: lua["check.lua"]
|
|
1838
|
+
},
|
|
1839
|
+
submit: {
|
|
1840
|
+
keys: exports2.allKeys,
|
|
1841
|
+
headers: ["validate_keys", "validate_client", "process_tick", "conditions_check"],
|
|
1842
|
+
refresh_expiration: !0,
|
|
1843
|
+
code: lua["submit.lua"]
|
|
1844
|
+
},
|
|
1845
|
+
register: {
|
|
1846
|
+
keys: exports2.allKeys,
|
|
1847
|
+
headers: ["validate_keys", "validate_client", "process_tick", "conditions_check"],
|
|
1848
|
+
refresh_expiration: !0,
|
|
1849
|
+
code: lua["register.lua"]
|
|
1850
|
+
},
|
|
1851
|
+
free: {
|
|
1852
|
+
keys: exports2.allKeys,
|
|
1853
|
+
headers: ["validate_keys", "validate_client", "process_tick"],
|
|
1854
|
+
refresh_expiration: !0,
|
|
1855
|
+
code: lua["free.lua"]
|
|
1856
|
+
},
|
|
1857
|
+
current_reservoir: {
|
|
1858
|
+
keys: exports2.allKeys,
|
|
1859
|
+
headers: ["validate_keys", "validate_client", "process_tick"],
|
|
1860
|
+
refresh_expiration: !1,
|
|
1861
|
+
code: lua["current_reservoir.lua"]
|
|
1862
|
+
},
|
|
1863
|
+
increment_reservoir: {
|
|
1864
|
+
keys: exports2.allKeys,
|
|
1865
|
+
headers: ["validate_keys", "validate_client", "process_tick"],
|
|
1866
|
+
refresh_expiration: !0,
|
|
1867
|
+
code: lua["increment_reservoir.lua"]
|
|
1868
|
+
}
|
|
1869
|
+
};
|
|
1870
|
+
exports2.names = Object.keys(templates);
|
|
1871
|
+
exports2.keys = function(name, id) {
|
|
1872
|
+
return templates[name].keys(id);
|
|
1873
|
+
};
|
|
1874
|
+
exports2.payload = function(name) {
|
|
1875
|
+
var template;
|
|
1876
|
+
return template = templates[name], Array.prototype.concat(headers.refs, template.headers.map(function(h) {
|
|
1877
|
+
return headers[h];
|
|
1878
|
+
}), template.refresh_expiration ? headers.refresh_expiration : "", template.code).join(`
|
|
1879
|
+
`);
|
|
1880
|
+
};
|
|
1881
|
+
}
|
|
1882
|
+
});
|
|
1883
|
+
|
|
1884
|
+
// ../../node_modules/.pnpm/bottleneck@2.19.5/node_modules/bottleneck/lib/RedisConnection.js
|
|
1885
|
+
var require_RedisConnection = __commonJS({
|
|
1886
|
+
"../../node_modules/.pnpm/bottleneck@2.19.5/node_modules/bottleneck/lib/RedisConnection.js"(exports, module) {
|
|
1887
|
+
"use strict";
|
|
1888
|
+
init_cjs_shims();
|
|
1889
|
+
function asyncGeneratorStep(gen, resolve, reject, _next, _throw, key, arg) {
|
|
1890
|
+
try {
|
|
1891
|
+
var info = gen[key](arg), value = info.value;
|
|
1892
|
+
} catch (error) {
|
|
1893
|
+
reject(error);
|
|
1894
|
+
return;
|
|
1895
|
+
}
|
|
1896
|
+
info.done ? resolve(value) : Promise.resolve(value).then(_next, _throw);
|
|
1897
|
+
}
|
|
1898
|
+
function _asyncToGenerator(fn) {
|
|
1899
|
+
return function() {
|
|
1900
|
+
var self = this, args = arguments;
|
|
1901
|
+
return new Promise(function(resolve, reject) {
|
|
1902
|
+
var gen = fn.apply(self, args);
|
|
1903
|
+
function _next(value) {
|
|
1904
|
+
asyncGeneratorStep(gen, resolve, reject, _next, _throw, "next", value);
|
|
1905
|
+
}
|
|
1906
|
+
function _throw(err2) {
|
|
1907
|
+
asyncGeneratorStep(gen, resolve, reject, _next, _throw, "throw", err2);
|
|
1908
|
+
}
|
|
1909
|
+
_next(void 0);
|
|
1910
|
+
});
|
|
1911
|
+
};
|
|
1912
|
+
}
|
|
1913
|
+
var Events, RedisConnection, Scripts, parser;
|
|
1914
|
+
parser = require_parser();
|
|
1915
|
+
Events = require_Events();
|
|
1916
|
+
Scripts = require_Scripts();
|
|
1917
|
+
RedisConnection = function() {
|
|
1918
|
+
class RedisConnection {
|
|
1919
|
+
constructor(options = {}) {
|
|
1920
|
+
parser.load(options, this.defaults, this), this.Redis == null && (this.Redis = eval("require")("redis")), this.Events == null && (this.Events = new Events(this)), this.terminated = !1, this.client == null && (this.client = this.Redis.createClient(this.clientOptions)), this.subscriber = this.client.duplicate(), this.limiters = {}, this.shas = {}, this.ready = this.Promise.all([this._setup(this.client, !1), this._setup(this.subscriber, !0)]).then(() => this._loadScripts()).then(() => ({
|
|
1921
|
+
client: this.client,
|
|
1922
|
+
subscriber: this.subscriber
|
|
1923
|
+
}));
|
|
1924
|
+
}
|
|
1925
|
+
_setup(client, sub) {
|
|
1926
|
+
return client.setMaxListeners(0), new this.Promise((resolve, reject) => (client.on("error", (e) => this.Events.trigger("error", e)), sub && client.on("message", (channel, message) => {
|
|
1927
|
+
var ref;
|
|
1928
|
+
return (ref = this.limiters[channel]) != null ? ref._store.onMessage(channel, message) : void 0;
|
|
1929
|
+
}), client.ready ? resolve() : client.once("ready", resolve)));
|
|
1930
|
+
}
|
|
1931
|
+
_loadScript(name) {
|
|
1932
|
+
return new this.Promise((resolve, reject) => {
|
|
1933
|
+
var payload;
|
|
1934
|
+
return payload = Scripts.payload(name), this.client.multi([["script", "load", payload]]).exec((err2, replies) => err2 != null ? reject(err2) : (this.shas[name] = replies[0], resolve(replies[0])));
|
|
1935
|
+
});
|
|
1936
|
+
}
|
|
1937
|
+
_loadScripts() {
|
|
1938
|
+
return this.Promise.all(Scripts.names.map((k) => this._loadScript(k)));
|
|
1939
|
+
}
|
|
1940
|
+
__runCommand__(cmd) {
|
|
1941
|
+
var _this = this;
|
|
1942
|
+
return _asyncToGenerator(function* () {
|
|
1943
|
+
return yield _this.ready, new _this.Promise((resolve, reject) => _this.client.multi([cmd]).exec_atomic(function(err2, replies) {
|
|
1944
|
+
return err2 != null ? reject(err2) : resolve(replies[0]);
|
|
1945
|
+
}));
|
|
1946
|
+
})();
|
|
1947
|
+
}
|
|
1948
|
+
__addLimiter__(instance) {
|
|
1949
|
+
return this.Promise.all([instance.channel(), instance.channel_client()].map((channel) => new this.Promise((resolve, reject) => {
|
|
1950
|
+
var handler;
|
|
1951
|
+
return handler = (chan) => {
|
|
1952
|
+
if (chan === channel)
|
|
1953
|
+
return this.subscriber.removeListener("subscribe", handler), this.limiters[channel] = instance, resolve();
|
|
1954
|
+
}, this.subscriber.on("subscribe", handler), this.subscriber.subscribe(channel);
|
|
1955
|
+
})));
|
|
1956
|
+
}
|
|
1957
|
+
__removeLimiter__(instance) {
|
|
1958
|
+
var _this2 = this;
|
|
1959
|
+
return this.Promise.all([instance.channel(), instance.channel_client()].map(
|
|
1960
|
+
/* @__PURE__ */ function() {
|
|
1961
|
+
var _ref = _asyncToGenerator(function* (channel) {
|
|
1962
|
+
return _this2.terminated || (yield new _this2.Promise((resolve, reject) => _this2.subscriber.unsubscribe(channel, function(err2, chan) {
|
|
1963
|
+
if (err2 != null)
|
|
1964
|
+
return reject(err2);
|
|
1965
|
+
if (chan === channel)
|
|
1966
|
+
return resolve();
|
|
1967
|
+
}))), delete _this2.limiters[channel];
|
|
1968
|
+
});
|
|
1969
|
+
return function(_x) {
|
|
1970
|
+
return _ref.apply(this, arguments);
|
|
1971
|
+
};
|
|
1972
|
+
}()
|
|
1973
|
+
));
|
|
1974
|
+
}
|
|
1975
|
+
__scriptArgs__(name, id, args, cb) {
|
|
1976
|
+
var keys;
|
|
1977
|
+
return keys = Scripts.keys(name, id), [this.shas[name], keys.length].concat(keys, args, cb);
|
|
1978
|
+
}
|
|
1979
|
+
__scriptFn__(name) {
|
|
1980
|
+
return this.client.evalsha.bind(this.client);
|
|
1981
|
+
}
|
|
1982
|
+
disconnect(flush = !0) {
|
|
1983
|
+
var i, k, len, ref;
|
|
1984
|
+
for (ref = Object.keys(this.limiters), i = 0, len = ref.length; i < len; i++)
|
|
1985
|
+
k = ref[i], clearInterval(this.limiters[k]._store.heartbeat);
|
|
1986
|
+
return this.limiters = {}, this.terminated = !0, this.client.end(flush), this.subscriber.end(flush), this.Promise.resolve();
|
|
1987
|
+
}
|
|
1988
|
+
}
|
|
1989
|
+
return RedisConnection.prototype.datastore = "redis", RedisConnection.prototype.defaults = {
|
|
1990
|
+
Redis: null,
|
|
1991
|
+
clientOptions: {},
|
|
1992
|
+
client: null,
|
|
1993
|
+
Promise,
|
|
1994
|
+
Events: null
|
|
1995
|
+
}, RedisConnection;
|
|
1996
|
+
}.call(void 0);
|
|
1997
|
+
module.exports = RedisConnection;
|
|
1998
|
+
}
|
|
1999
|
+
});
|
|
2000
|
+
|
|
2001
|
+
// ../../node_modules/.pnpm/bottleneck@2.19.5/node_modules/bottleneck/lib/IORedisConnection.js
|
|
2002
|
+
var require_IORedisConnection = __commonJS({
|
|
2003
|
+
"../../node_modules/.pnpm/bottleneck@2.19.5/node_modules/bottleneck/lib/IORedisConnection.js"(exports, module) {
|
|
2004
|
+
"use strict";
|
|
2005
|
+
init_cjs_shims();
|
|
2006
|
+
function _slicedToArray(arr, i) {
|
|
2007
|
+
return _arrayWithHoles(arr) || _iterableToArrayLimit(arr, i) || _nonIterableRest();
|
|
2008
|
+
}
|
|
2009
|
+
function _nonIterableRest() {
|
|
2010
|
+
throw new TypeError("Invalid attempt to destructure non-iterable instance");
|
|
2011
|
+
}
|
|
2012
|
+
function _iterableToArrayLimit(arr, i) {
|
|
2013
|
+
var _arr = [], _n = !0, _d = !1, _e = void 0;
|
|
2014
|
+
try {
|
|
2015
|
+
for (var _i = arr[Symbol.iterator](), _s; !(_n = (_s = _i.next()).done) && (_arr.push(_s.value), !(i && _arr.length === i)); _n = !0)
|
|
2016
|
+
;
|
|
2017
|
+
} catch (err2) {
|
|
2018
|
+
_d = !0, _e = err2;
|
|
2019
|
+
} finally {
|
|
2020
|
+
try {
|
|
2021
|
+
!_n && _i.return != null && _i.return();
|
|
2022
|
+
} finally {
|
|
2023
|
+
if (_d) throw _e;
|
|
2024
|
+
}
|
|
2025
|
+
}
|
|
2026
|
+
return _arr;
|
|
2027
|
+
}
|
|
2028
|
+
function _arrayWithHoles(arr) {
|
|
2029
|
+
if (Array.isArray(arr)) return arr;
|
|
2030
|
+
}
|
|
2031
|
+
function asyncGeneratorStep(gen, resolve, reject, _next, _throw, key, arg) {
|
|
2032
|
+
try {
|
|
2033
|
+
var info = gen[key](arg), value = info.value;
|
|
2034
|
+
} catch (error) {
|
|
2035
|
+
reject(error);
|
|
2036
|
+
return;
|
|
2037
|
+
}
|
|
2038
|
+
info.done ? resolve(value) : Promise.resolve(value).then(_next, _throw);
|
|
2039
|
+
}
|
|
2040
|
+
function _asyncToGenerator(fn) {
|
|
2041
|
+
return function() {
|
|
2042
|
+
var self = this, args = arguments;
|
|
2043
|
+
return new Promise(function(resolve, reject) {
|
|
2044
|
+
var gen = fn.apply(self, args);
|
|
2045
|
+
function _next(value) {
|
|
2046
|
+
asyncGeneratorStep(gen, resolve, reject, _next, _throw, "next", value);
|
|
2047
|
+
}
|
|
2048
|
+
function _throw(err2) {
|
|
2049
|
+
asyncGeneratorStep(gen, resolve, reject, _next, _throw, "throw", err2);
|
|
2050
|
+
}
|
|
2051
|
+
_next(void 0);
|
|
2052
|
+
});
|
|
2053
|
+
};
|
|
2054
|
+
}
|
|
2055
|
+
var Events, IORedisConnection, Scripts, parser;
|
|
2056
|
+
parser = require_parser();
|
|
2057
|
+
Events = require_Events();
|
|
2058
|
+
Scripts = require_Scripts();
|
|
2059
|
+
IORedisConnection = function() {
|
|
2060
|
+
class IORedisConnection {
|
|
2061
|
+
constructor(options = {}) {
|
|
2062
|
+
parser.load(options, this.defaults, this), this.Redis == null && (this.Redis = eval("require")("ioredis")), this.Events == null && (this.Events = new Events(this)), this.terminated = !1, this.clusterNodes != null ? (this.client = new this.Redis.Cluster(this.clusterNodes, this.clientOptions), this.subscriber = new this.Redis.Cluster(this.clusterNodes, this.clientOptions)) : this.client != null && this.client.duplicate == null ? this.subscriber = new this.Redis.Cluster(this.client.startupNodes, this.client.options) : (this.client == null && (this.client = new this.Redis(this.clientOptions)), this.subscriber = this.client.duplicate()), this.limiters = {}, this.ready = this.Promise.all([this._setup(this.client, !1), this._setup(this.subscriber, !0)]).then(() => (this._loadScripts(), {
|
|
2063
|
+
client: this.client,
|
|
2064
|
+
subscriber: this.subscriber
|
|
2065
|
+
}));
|
|
2066
|
+
}
|
|
2067
|
+
_setup(client, sub) {
|
|
2068
|
+
return client.setMaxListeners(0), new this.Promise((resolve, reject) => (client.on("error", (e) => this.Events.trigger("error", e)), sub && client.on("message", (channel, message) => {
|
|
2069
|
+
var ref;
|
|
2070
|
+
return (ref = this.limiters[channel]) != null ? ref._store.onMessage(channel, message) : void 0;
|
|
2071
|
+
}), client.status === "ready" ? resolve() : client.once("ready", resolve)));
|
|
2072
|
+
}
|
|
2073
|
+
_loadScripts() {
|
|
2074
|
+
return Scripts.names.forEach((name) => this.client.defineCommand(name, {
|
|
2075
|
+
lua: Scripts.payload(name)
|
|
2076
|
+
}));
|
|
2077
|
+
}
|
|
2078
|
+
__runCommand__(cmd) {
|
|
2079
|
+
var _this = this;
|
|
2080
|
+
return _asyncToGenerator(function* () {
|
|
2081
|
+
var _, deleted;
|
|
2082
|
+
yield _this.ready;
|
|
2083
|
+
var _ref = yield _this.client.pipeline([cmd]).exec(), _ref2 = _slicedToArray(_ref, 1), _ref2$ = _slicedToArray(_ref2[0], 2);
|
|
2084
|
+
return _ = _ref2$[0], deleted = _ref2$[1], deleted;
|
|
2085
|
+
})();
|
|
2086
|
+
}
|
|
2087
|
+
__addLimiter__(instance) {
|
|
2088
|
+
return this.Promise.all([instance.channel(), instance.channel_client()].map((channel) => new this.Promise((resolve, reject) => this.subscriber.subscribe(channel, () => (this.limiters[channel] = instance, resolve())))));
|
|
2089
|
+
}
|
|
2090
|
+
__removeLimiter__(instance) {
|
|
2091
|
+
var _this2 = this;
|
|
2092
|
+
return [instance.channel(), instance.channel_client()].forEach(
|
|
2093
|
+
/* @__PURE__ */ function() {
|
|
2094
|
+
var _ref3 = _asyncToGenerator(function* (channel) {
|
|
2095
|
+
return _this2.terminated || (yield _this2.subscriber.unsubscribe(channel)), delete _this2.limiters[channel];
|
|
2096
|
+
});
|
|
2097
|
+
return function(_x) {
|
|
2098
|
+
return _ref3.apply(this, arguments);
|
|
2099
|
+
};
|
|
2100
|
+
}()
|
|
2101
|
+
);
|
|
2102
|
+
}
|
|
2103
|
+
__scriptArgs__(name, id, args, cb) {
|
|
2104
|
+
var keys;
|
|
2105
|
+
return keys = Scripts.keys(name, id), [keys.length].concat(keys, args, cb);
|
|
2106
|
+
}
|
|
2107
|
+
__scriptFn__(name) {
|
|
2108
|
+
return this.client[name].bind(this.client);
|
|
2109
|
+
}
|
|
2110
|
+
disconnect(flush = !0) {
|
|
2111
|
+
var i, k, len, ref;
|
|
2112
|
+
for (ref = Object.keys(this.limiters), i = 0, len = ref.length; i < len; i++)
|
|
2113
|
+
k = ref[i], clearInterval(this.limiters[k]._store.heartbeat);
|
|
2114
|
+
return this.limiters = {}, this.terminated = !0, flush ? this.Promise.all([this.client.quit(), this.subscriber.quit()]) : (this.client.disconnect(), this.subscriber.disconnect(), this.Promise.resolve());
|
|
2115
|
+
}
|
|
2116
|
+
}
|
|
2117
|
+
return IORedisConnection.prototype.datastore = "ioredis", IORedisConnection.prototype.defaults = {
|
|
2118
|
+
Redis: null,
|
|
2119
|
+
clientOptions: {},
|
|
2120
|
+
clusterNodes: null,
|
|
2121
|
+
client: null,
|
|
2122
|
+
Promise,
|
|
2123
|
+
Events: null
|
|
2124
|
+
}, IORedisConnection;
|
|
2125
|
+
}.call(void 0);
|
|
2126
|
+
module.exports = IORedisConnection;
|
|
2127
|
+
}
|
|
2128
|
+
});
|
|
2129
|
+
|
|
2130
|
+
// ../../node_modules/.pnpm/bottleneck@2.19.5/node_modules/bottleneck/lib/RedisDatastore.js
|
|
2131
|
+
var require_RedisDatastore = __commonJS({
|
|
2132
|
+
"../../node_modules/.pnpm/bottleneck@2.19.5/node_modules/bottleneck/lib/RedisDatastore.js"(exports2, module2) {
|
|
2133
|
+
"use strict";
|
|
2134
|
+
init_cjs_shims();
|
|
2135
|
+
function _slicedToArray2(arr, i) {
|
|
2136
|
+
return _arrayWithHoles2(arr) || _iterableToArrayLimit2(arr, i) || _nonIterableRest2();
|
|
2137
|
+
}
|
|
2138
|
+
function _nonIterableRest2() {
|
|
2139
|
+
throw new TypeError("Invalid attempt to destructure non-iterable instance");
|
|
2140
|
+
}
|
|
2141
|
+
function _iterableToArrayLimit2(arr, i) {
|
|
2142
|
+
var _arr = [], _n = !0, _d = !1, _e = void 0;
|
|
2143
|
+
try {
|
|
2144
|
+
for (var _i = arr[Symbol.iterator](), _s; !(_n = (_s = _i.next()).done) && (_arr.push(_s.value), !(i && _arr.length === i)); _n = !0)
|
|
2145
|
+
;
|
|
2146
|
+
} catch (err2) {
|
|
2147
|
+
_d = !0, _e = err2;
|
|
2148
|
+
} finally {
|
|
2149
|
+
try {
|
|
2150
|
+
!_n && _i.return != null && _i.return();
|
|
2151
|
+
} finally {
|
|
2152
|
+
if (_d) throw _e;
|
|
2153
|
+
}
|
|
2154
|
+
}
|
|
2155
|
+
return _arr;
|
|
2156
|
+
}
|
|
2157
|
+
function _arrayWithHoles2(arr) {
|
|
2158
|
+
if (Array.isArray(arr)) return arr;
|
|
2159
|
+
}
|
|
2160
|
+
function asyncGeneratorStep2(gen, resolve, reject, _next, _throw, key, arg) {
|
|
2161
|
+
try {
|
|
2162
|
+
var info = gen[key](arg), value = info.value;
|
|
2163
|
+
} catch (error) {
|
|
2164
|
+
reject(error);
|
|
2165
|
+
return;
|
|
2166
|
+
}
|
|
2167
|
+
info.done ? resolve(value) : Promise.resolve(value).then(_next, _throw);
|
|
2168
|
+
}
|
|
2169
|
+
function _asyncToGenerator2(fn) {
|
|
2170
|
+
return function() {
|
|
2171
|
+
var self = this, args = arguments;
|
|
2172
|
+
return new Promise(function(resolve, reject) {
|
|
2173
|
+
var gen = fn.apply(self, args);
|
|
2174
|
+
function _next(value) {
|
|
2175
|
+
asyncGeneratorStep2(gen, resolve, reject, _next, _throw, "next", value);
|
|
2176
|
+
}
|
|
2177
|
+
function _throw(err2) {
|
|
2178
|
+
asyncGeneratorStep2(gen, resolve, reject, _next, _throw, "throw", err2);
|
|
2179
|
+
}
|
|
2180
|
+
_next(void 0);
|
|
2181
|
+
});
|
|
2182
|
+
};
|
|
2183
|
+
}
|
|
2184
|
+
var BottleneckError, IORedisConnection2, RedisConnection2, RedisDatastore, parser2;
|
|
2185
|
+
parser2 = require_parser();
|
|
2186
|
+
BottleneckError = require_BottleneckError();
|
|
2187
|
+
RedisConnection2 = require_RedisConnection();
|
|
2188
|
+
IORedisConnection2 = require_IORedisConnection();
|
|
2189
|
+
RedisDatastore = class {
|
|
2190
|
+
constructor(instance, storeOptions, storeInstanceOptions) {
|
|
2191
|
+
this.instance = instance, this.storeOptions = storeOptions, this.originalId = this.instance.id, this.clientId = this.instance._randomIndex(), parser2.load(storeInstanceOptions, storeInstanceOptions, this), this.clients = {}, this.capacityPriorityCounters = {}, this.sharedConnection = this.connection != null, this.connection == null && (this.connection = this.instance.datastore === "redis" ? new RedisConnection2({
|
|
2192
|
+
Redis: this.Redis,
|
|
2193
|
+
clientOptions: this.clientOptions,
|
|
2194
|
+
Promise: this.Promise,
|
|
2195
|
+
Events: this.instance.Events
|
|
2196
|
+
}) : this.instance.datastore === "ioredis" ? new IORedisConnection2({
|
|
2197
|
+
Redis: this.Redis,
|
|
2198
|
+
clientOptions: this.clientOptions,
|
|
2199
|
+
clusterNodes: this.clusterNodes,
|
|
2200
|
+
Promise: this.Promise,
|
|
2201
|
+
Events: this.instance.Events
|
|
2202
|
+
}) : void 0), this.instance.connection = this.connection, this.instance.datastore = this.connection.datastore, this.ready = this.connection.ready.then((clients) => (this.clients = clients, this.runScript("init", this.prepareInitSettings(this.clearDatastore)))).then(() => this.connection.__addLimiter__(this.instance)).then(() => this.runScript("register_client", [this.instance.queued()])).then(() => {
|
|
2203
|
+
var base;
|
|
2204
|
+
return typeof (base = this.heartbeat = setInterval(() => this.runScript("heartbeat", []).catch((e) => this.instance.Events.trigger("error", e)), this.heartbeatInterval)).unref == "function" && base.unref(), this.clients;
|
|
2205
|
+
});
|
|
2206
|
+
}
|
|
2207
|
+
__publish__(message) {
|
|
2208
|
+
var _this = this;
|
|
2209
|
+
return _asyncToGenerator2(function* () {
|
|
2210
|
+
var client, _ref = yield _this.ready;
|
|
2211
|
+
return client = _ref.client, client.publish(_this.instance.channel(), `message:${message.toString()}`);
|
|
2212
|
+
})();
|
|
2213
|
+
}
|
|
2214
|
+
onMessage(channel, message) {
|
|
2215
|
+
var _this2 = this;
|
|
2216
|
+
return _asyncToGenerator2(function* () {
|
|
2217
|
+
var capacity, counter, data, drained, e, newCapacity, pos, priorityClient, rawCapacity, type;
|
|
2218
|
+
try {
|
|
2219
|
+
pos = message.indexOf(":");
|
|
2220
|
+
var _ref2 = [message.slice(0, pos), message.slice(pos + 1)];
|
|
2221
|
+
if (type = _ref2[0], data = _ref2[1], type === "capacity")
|
|
2222
|
+
return yield _this2.instance._drainAll(data.length > 0 ? ~~data : void 0);
|
|
2223
|
+
if (type === "capacity-priority") {
|
|
2224
|
+
var _data$split = data.split(":"), _data$split2 = _slicedToArray2(_data$split, 3);
|
|
2225
|
+
return rawCapacity = _data$split2[0], priorityClient = _data$split2[1], counter = _data$split2[2], capacity = rawCapacity.length > 0 ? ~~rawCapacity : void 0, priorityClient === _this2.clientId ? (drained = yield _this2.instance._drainAll(capacity), newCapacity = capacity != null ? capacity - (drained || 0) : "", yield _this2.clients.client.publish(_this2.instance.channel(), `capacity-priority:${newCapacity}::${counter}`)) : priorityClient === "" ? (clearTimeout(_this2.capacityPriorityCounters[counter]), delete _this2.capacityPriorityCounters[counter], _this2.instance._drainAll(capacity)) : _this2.capacityPriorityCounters[counter] = setTimeout(
|
|
2226
|
+
/* @__PURE__ */ _asyncToGenerator2(function* () {
|
|
2227
|
+
var e2;
|
|
2228
|
+
try {
|
|
2229
|
+
return delete _this2.capacityPriorityCounters[counter], yield _this2.runScript("blacklist_client", [priorityClient]), yield _this2.instance._drainAll(capacity);
|
|
2230
|
+
} catch (error) {
|
|
2231
|
+
return e2 = error, _this2.instance.Events.trigger("error", e2);
|
|
2232
|
+
}
|
|
2233
|
+
}),
|
|
2234
|
+
1e3
|
|
2235
|
+
);
|
|
2236
|
+
} else {
|
|
2237
|
+
if (type === "message")
|
|
2238
|
+
return _this2.instance.Events.trigger("message", data);
|
|
2239
|
+
if (type === "blocked")
|
|
2240
|
+
return yield _this2.instance._dropAllQueued();
|
|
2241
|
+
}
|
|
2242
|
+
} catch (error) {
|
|
2243
|
+
return e = error, _this2.instance.Events.trigger("error", e);
|
|
2244
|
+
}
|
|
2245
|
+
})();
|
|
2246
|
+
}
|
|
2247
|
+
__disconnect__(flush) {
|
|
2248
|
+
return clearInterval(this.heartbeat), this.sharedConnection ? this.connection.__removeLimiter__(this.instance) : this.connection.disconnect(flush);
|
|
2249
|
+
}
|
|
2250
|
+
runScript(name, args) {
|
|
2251
|
+
var _this3 = this;
|
|
2252
|
+
return _asyncToGenerator2(function* () {
|
|
2253
|
+
return name === "init" || name === "register_client" || (yield _this3.ready), new _this3.Promise((resolve, reject) => {
|
|
2254
|
+
var all_args, arr;
|
|
2255
|
+
return all_args = [Date.now(), _this3.clientId].concat(args), _this3.instance.Events.trigger("debug", `Calling Redis script: ${name}.lua`, all_args), arr = _this3.connection.__scriptArgs__(name, _this3.originalId, all_args, function(err2, replies) {
|
|
2256
|
+
return err2 != null ? reject(err2) : resolve(replies);
|
|
2257
|
+
}), _this3.connection.__scriptFn__(name)(...arr);
|
|
2258
|
+
}).catch((e) => e.message === "SETTINGS_KEY_NOT_FOUND" ? name === "heartbeat" ? _this3.Promise.resolve() : _this3.runScript("init", _this3.prepareInitSettings(!1)).then(() => _this3.runScript(name, args)) : e.message === "UNKNOWN_CLIENT" ? _this3.runScript("register_client", [_this3.instance.queued()]).then(() => _this3.runScript(name, args)) : _this3.Promise.reject(e));
|
|
2259
|
+
})();
|
|
2260
|
+
}
|
|
2261
|
+
prepareArray(arr) {
|
|
2262
|
+
var i, len, results, x;
|
|
2263
|
+
for (results = [], i = 0, len = arr.length; i < len; i++)
|
|
2264
|
+
x = arr[i], results.push(x != null ? x.toString() : "");
|
|
2265
|
+
return results;
|
|
2266
|
+
}
|
|
2267
|
+
prepareObject(obj) {
|
|
2268
|
+
var arr, k, v;
|
|
2269
|
+
arr = [];
|
|
2270
|
+
for (k in obj)
|
|
2271
|
+
v = obj[k], arr.push(k, v != null ? v.toString() : "");
|
|
2272
|
+
return arr;
|
|
2273
|
+
}
|
|
2274
|
+
prepareInitSettings(clear) {
|
|
2275
|
+
var args;
|
|
2276
|
+
return args = this.prepareObject(Object.assign({}, this.storeOptions, {
|
|
2277
|
+
id: this.originalId,
|
|
2278
|
+
version: this.instance.version,
|
|
2279
|
+
groupTimeout: this.timeout,
|
|
2280
|
+
clientTimeout: this.clientTimeout
|
|
2281
|
+
})), args.unshift(clear ? 1 : 0, this.instance.version), args;
|
|
2282
|
+
}
|
|
2283
|
+
convertBool(b) {
|
|
2284
|
+
return !!b;
|
|
2285
|
+
}
|
|
2286
|
+
__updateSettings__(options2) {
|
|
2287
|
+
var _this4 = this;
|
|
2288
|
+
return _asyncToGenerator2(function* () {
|
|
2289
|
+
return yield _this4.runScript("update_settings", _this4.prepareObject(options2)), parser2.overwrite(options2, options2, _this4.storeOptions);
|
|
2290
|
+
})();
|
|
2291
|
+
}
|
|
2292
|
+
__running__() {
|
|
2293
|
+
return this.runScript("running", []);
|
|
2294
|
+
}
|
|
2295
|
+
__queued__() {
|
|
2296
|
+
return this.runScript("queued", []);
|
|
2297
|
+
}
|
|
2298
|
+
__done__() {
|
|
2299
|
+
return this.runScript("done", []);
|
|
2300
|
+
}
|
|
2301
|
+
__groupCheck__() {
|
|
2302
|
+
var _this5 = this;
|
|
2303
|
+
return _asyncToGenerator2(function* () {
|
|
2304
|
+
return _this5.convertBool(yield _this5.runScript("group_check", []));
|
|
2305
|
+
})();
|
|
2306
|
+
}
|
|
2307
|
+
__incrementReservoir__(incr) {
|
|
2308
|
+
return this.runScript("increment_reservoir", [incr]);
|
|
2309
|
+
}
|
|
2310
|
+
__currentReservoir__() {
|
|
2311
|
+
return this.runScript("current_reservoir", []);
|
|
2312
|
+
}
|
|
2313
|
+
__check__(weight) {
|
|
2314
|
+
var _this6 = this;
|
|
2315
|
+
return _asyncToGenerator2(function* () {
|
|
2316
|
+
return _this6.convertBool(yield _this6.runScript("check", _this6.prepareArray([weight])));
|
|
2317
|
+
})();
|
|
2318
|
+
}
|
|
2319
|
+
__register__(index, weight, expiration) {
|
|
2320
|
+
var _this7 = this;
|
|
2321
|
+
return _asyncToGenerator2(function* () {
|
|
2322
|
+
var reservoir, success, wait, _ref4 = yield _this7.runScript("register", _this7.prepareArray([index, weight, expiration])), _ref5 = _slicedToArray2(_ref4, 3);
|
|
2323
|
+
return success = _ref5[0], wait = _ref5[1], reservoir = _ref5[2], {
|
|
2324
|
+
success: _this7.convertBool(success),
|
|
2325
|
+
wait,
|
|
2326
|
+
reservoir
|
|
2327
|
+
};
|
|
2328
|
+
})();
|
|
2329
|
+
}
|
|
2330
|
+
__submit__(queueLength, weight) {
|
|
2331
|
+
var _this8 = this;
|
|
2332
|
+
return _asyncToGenerator2(function* () {
|
|
2333
|
+
var blocked, e, maxConcurrent, overweight, reachedHWM, strategy;
|
|
2334
|
+
try {
|
|
2335
|
+
var _ref6 = yield _this8.runScript("submit", _this8.prepareArray([queueLength, weight])), _ref7 = _slicedToArray2(_ref6, 3);
|
|
2336
|
+
return reachedHWM = _ref7[0], blocked = _ref7[1], strategy = _ref7[2], {
|
|
2337
|
+
reachedHWM: _this8.convertBool(reachedHWM),
|
|
2338
|
+
blocked: _this8.convertBool(blocked),
|
|
2339
|
+
strategy
|
|
2340
|
+
};
|
|
2341
|
+
} catch (error) {
|
|
2342
|
+
if (e = error, e.message.indexOf("OVERWEIGHT") === 0) {
|
|
2343
|
+
var _e$message$split = e.message.split(":"), _e$message$split2 = _slicedToArray2(_e$message$split, 3);
|
|
2344
|
+
throw overweight = _e$message$split2[0], weight = _e$message$split2[1], maxConcurrent = _e$message$split2[2], new BottleneckError(`Impossible to add a job having a weight of ${weight} to a limiter having a maxConcurrent setting of ${maxConcurrent}`);
|
|
2345
|
+
} else
|
|
2346
|
+
throw e;
|
|
2347
|
+
}
|
|
2348
|
+
})();
|
|
2349
|
+
}
|
|
2350
|
+
__free__(index, weight) {
|
|
2351
|
+
var _this9 = this;
|
|
2352
|
+
return _asyncToGenerator2(function* () {
|
|
2353
|
+
var running;
|
|
2354
|
+
return running = yield _this9.runScript("free", _this9.prepareArray([index])), {
|
|
2355
|
+
running
|
|
2356
|
+
};
|
|
2357
|
+
})();
|
|
2358
|
+
}
|
|
2359
|
+
};
|
|
2360
|
+
module2.exports = RedisDatastore;
|
|
2361
|
+
}
|
|
2362
|
+
});
|
|
2363
|
+
|
|
2364
|
+
// ../../node_modules/.pnpm/bottleneck@2.19.5/node_modules/bottleneck/lib/States.js
|
|
2365
|
+
var require_States = __commonJS({
|
|
2366
|
+
"../../node_modules/.pnpm/bottleneck@2.19.5/node_modules/bottleneck/lib/States.js"(exports2, module2) {
|
|
2367
|
+
"use strict";
|
|
2368
|
+
init_cjs_shims();
|
|
2369
|
+
var BottleneckError, States;
|
|
2370
|
+
BottleneckError = require_BottleneckError();
|
|
2371
|
+
States = class {
|
|
2372
|
+
constructor(status1) {
|
|
2373
|
+
this.status = status1, this._jobs = {}, this.counts = this.status.map(function() {
|
|
2374
|
+
return 0;
|
|
2375
|
+
});
|
|
2376
|
+
}
|
|
2377
|
+
next(id) {
|
|
2378
|
+
var current, next;
|
|
2379
|
+
if (current = this._jobs[id], next = current + 1, current != null && next < this.status.length)
|
|
2380
|
+
return this.counts[current]--, this.counts[next]++, this._jobs[id]++;
|
|
2381
|
+
if (current != null)
|
|
2382
|
+
return this.counts[current]--, delete this._jobs[id];
|
|
2383
|
+
}
|
|
2384
|
+
start(id) {
|
|
2385
|
+
var initial;
|
|
2386
|
+
return initial = 0, this._jobs[id] = initial, this.counts[initial]++;
|
|
2387
|
+
}
|
|
2388
|
+
remove(id) {
|
|
2389
|
+
var current;
|
|
2390
|
+
return current = this._jobs[id], current != null && (this.counts[current]--, delete this._jobs[id]), current != null;
|
|
2391
|
+
}
|
|
2392
|
+
jobStatus(id) {
|
|
2393
|
+
var ref;
|
|
2394
|
+
return (ref = this.status[this._jobs[id]]) != null ? ref : null;
|
|
2395
|
+
}
|
|
2396
|
+
statusJobs(status) {
|
|
2397
|
+
var k, pos, ref, results, v;
|
|
2398
|
+
if (status != null) {
|
|
2399
|
+
if (pos = this.status.indexOf(status), pos < 0)
|
|
2400
|
+
throw new BottleneckError(`status must be one of ${this.status.join(", ")}`);
|
|
2401
|
+
ref = this._jobs, results = [];
|
|
2402
|
+
for (k in ref)
|
|
2403
|
+
v = ref[k], v === pos && results.push(k);
|
|
2404
|
+
return results;
|
|
2405
|
+
} else
|
|
2406
|
+
return Object.keys(this._jobs);
|
|
2407
|
+
}
|
|
2408
|
+
statusCounts() {
|
|
2409
|
+
return this.counts.reduce((acc, v, i) => (acc[this.status[i]] = v, acc), {});
|
|
2410
|
+
}
|
|
2411
|
+
};
|
|
2412
|
+
module2.exports = States;
|
|
2413
|
+
}
|
|
2414
|
+
});
|
|
2415
|
+
|
|
2416
|
+
// ../../node_modules/.pnpm/bottleneck@2.19.5/node_modules/bottleneck/lib/Sync.js
|
|
2417
|
+
var require_Sync = __commonJS({
|
|
2418
|
+
"../../node_modules/.pnpm/bottleneck@2.19.5/node_modules/bottleneck/lib/Sync.js"(exports2, module2) {
|
|
2419
|
+
"use strict";
|
|
2420
|
+
init_cjs_shims();
|
|
2421
|
+
function asyncGeneratorStep2(gen, resolve, reject, _next, _throw, key, arg) {
|
|
2422
|
+
try {
|
|
2423
|
+
var info = gen[key](arg), value = info.value;
|
|
2424
|
+
} catch (error) {
|
|
2425
|
+
reject(error);
|
|
2426
|
+
return;
|
|
2427
|
+
}
|
|
2428
|
+
info.done ? resolve(value) : Promise.resolve(value).then(_next, _throw);
|
|
2429
|
+
}
|
|
2430
|
+
function _asyncToGenerator2(fn) {
|
|
2431
|
+
return function() {
|
|
2432
|
+
var self = this, args = arguments;
|
|
2433
|
+
return new Promise(function(resolve, reject) {
|
|
2434
|
+
var gen = fn.apply(self, args);
|
|
2435
|
+
function _next(value) {
|
|
2436
|
+
asyncGeneratorStep2(gen, resolve, reject, _next, _throw, "next", value);
|
|
2437
|
+
}
|
|
2438
|
+
function _throw(err2) {
|
|
2439
|
+
asyncGeneratorStep2(gen, resolve, reject, _next, _throw, "throw", err2);
|
|
2440
|
+
}
|
|
2441
|
+
_next(void 0);
|
|
2442
|
+
});
|
|
2443
|
+
};
|
|
2444
|
+
}
|
|
2445
|
+
var DLList, Sync;
|
|
2446
|
+
DLList = require_DLList();
|
|
2447
|
+
Sync = class {
|
|
2448
|
+
constructor(name, Promise2) {
|
|
2449
|
+
this.schedule = this.schedule.bind(this), this.name = name, this.Promise = Promise2, this._running = 0, this._queue = new DLList();
|
|
2450
|
+
}
|
|
2451
|
+
isEmpty() {
|
|
2452
|
+
return this._queue.length === 0;
|
|
2453
|
+
}
|
|
2454
|
+
_tryToRun() {
|
|
2455
|
+
var _this = this;
|
|
2456
|
+
return _asyncToGenerator2(function* () {
|
|
2457
|
+
var args, cb, error, reject, resolve, returned, task;
|
|
2458
|
+
if (_this._running < 1 && _this._queue.length > 0) {
|
|
2459
|
+
_this._running++;
|
|
2460
|
+
var _this$_queue$shift = _this._queue.shift();
|
|
2461
|
+
return task = _this$_queue$shift.task, args = _this$_queue$shift.args, resolve = _this$_queue$shift.resolve, reject = _this$_queue$shift.reject, cb = yield _asyncToGenerator2(function* () {
|
|
2462
|
+
try {
|
|
2463
|
+
return returned = yield task(...args), function() {
|
|
2464
|
+
return resolve(returned);
|
|
2465
|
+
};
|
|
2466
|
+
} catch (error1) {
|
|
2467
|
+
return error = error1, function() {
|
|
2468
|
+
return reject(error);
|
|
2469
|
+
};
|
|
2470
|
+
}
|
|
2471
|
+
})(), _this._running--, _this._tryToRun(), cb();
|
|
2472
|
+
}
|
|
2473
|
+
})();
|
|
2474
|
+
}
|
|
2475
|
+
schedule(task, ...args) {
|
|
2476
|
+
var promise, reject, resolve;
|
|
2477
|
+
return resolve = reject = null, promise = new this.Promise(function(_resolve, _reject) {
|
|
2478
|
+
return resolve = _resolve, reject = _reject;
|
|
2479
|
+
}), this._queue.push({
|
|
2480
|
+
task,
|
|
2481
|
+
args,
|
|
2482
|
+
resolve,
|
|
2483
|
+
reject
|
|
2484
|
+
}), this._tryToRun(), promise;
|
|
2485
|
+
}
|
|
2486
|
+
};
|
|
2487
|
+
module2.exports = Sync;
|
|
2488
|
+
}
|
|
2489
|
+
});
|
|
2490
|
+
|
|
2491
|
+
// ../../node_modules/.pnpm/bottleneck@2.19.5/node_modules/bottleneck/lib/version.json
|
|
2492
|
+
var require_version = __commonJS({
|
|
2493
|
+
"../../node_modules/.pnpm/bottleneck@2.19.5/node_modules/bottleneck/lib/version.json"(exports2, module2) {
|
|
2494
|
+
module2.exports = { version: "2.19.5" };
|
|
2495
|
+
}
|
|
2496
|
+
});
|
|
2497
|
+
|
|
2498
|
+
// ../../node_modules/.pnpm/bottleneck@2.19.5/node_modules/bottleneck/lib/Group.js
|
|
2499
|
+
var require_Group = __commonJS({
|
|
2500
|
+
"../../node_modules/.pnpm/bottleneck@2.19.5/node_modules/bottleneck/lib/Group.js"(exports2, module2) {
|
|
2501
|
+
"use strict";
|
|
2502
|
+
init_cjs_shims();
|
|
2503
|
+
function _slicedToArray2(arr, i) {
|
|
2504
|
+
return _arrayWithHoles2(arr) || _iterableToArrayLimit2(arr, i) || _nonIterableRest2();
|
|
2505
|
+
}
|
|
2506
|
+
function _nonIterableRest2() {
|
|
2507
|
+
throw new TypeError("Invalid attempt to destructure non-iterable instance");
|
|
2508
|
+
}
|
|
2509
|
+
function _iterableToArrayLimit2(arr, i) {
|
|
2510
|
+
var _arr = [], _n = !0, _d = !1, _e = void 0;
|
|
2511
|
+
try {
|
|
2512
|
+
for (var _i = arr[Symbol.iterator](), _s; !(_n = (_s = _i.next()).done) && (_arr.push(_s.value), !(i && _arr.length === i)); _n = !0)
|
|
2513
|
+
;
|
|
2514
|
+
} catch (err2) {
|
|
2515
|
+
_d = !0, _e = err2;
|
|
2516
|
+
} finally {
|
|
2517
|
+
try {
|
|
2518
|
+
!_n && _i.return != null && _i.return();
|
|
2519
|
+
} finally {
|
|
2520
|
+
if (_d) throw _e;
|
|
2521
|
+
}
|
|
2522
|
+
}
|
|
2523
|
+
return _arr;
|
|
2524
|
+
}
|
|
2525
|
+
function _arrayWithHoles2(arr) {
|
|
2526
|
+
if (Array.isArray(arr)) return arr;
|
|
2527
|
+
}
|
|
2528
|
+
function asyncGeneratorStep2(gen, resolve, reject, _next, _throw, key, arg) {
|
|
2529
|
+
try {
|
|
2530
|
+
var info = gen[key](arg), value = info.value;
|
|
2531
|
+
} catch (error) {
|
|
2532
|
+
reject(error);
|
|
2533
|
+
return;
|
|
2534
|
+
}
|
|
2535
|
+
info.done ? resolve(value) : Promise.resolve(value).then(_next, _throw);
|
|
2536
|
+
}
|
|
2537
|
+
function _asyncToGenerator2(fn) {
|
|
2538
|
+
return function() {
|
|
2539
|
+
var self = this, args = arguments;
|
|
2540
|
+
return new Promise(function(resolve, reject) {
|
|
2541
|
+
var gen = fn.apply(self, args);
|
|
2542
|
+
function _next(value) {
|
|
2543
|
+
asyncGeneratorStep2(gen, resolve, reject, _next, _throw, "next", value);
|
|
2544
|
+
}
|
|
2545
|
+
function _throw(err2) {
|
|
2546
|
+
asyncGeneratorStep2(gen, resolve, reject, _next, _throw, "throw", err2);
|
|
2547
|
+
}
|
|
2548
|
+
_next(void 0);
|
|
2549
|
+
});
|
|
2550
|
+
};
|
|
2551
|
+
}
|
|
2552
|
+
var Events2, Group, IORedisConnection2, RedisConnection2, Scripts2, parser2;
|
|
2553
|
+
parser2 = require_parser();
|
|
2554
|
+
Events2 = require_Events();
|
|
2555
|
+
RedisConnection2 = require_RedisConnection();
|
|
2556
|
+
IORedisConnection2 = require_IORedisConnection();
|
|
2557
|
+
Scripts2 = require_Scripts();
|
|
2558
|
+
Group = function() {
|
|
2559
|
+
class Group2 {
|
|
2560
|
+
constructor(limiterOptions = {}) {
|
|
2561
|
+
this.deleteKey = this.deleteKey.bind(this), this.limiterOptions = limiterOptions, parser2.load(this.limiterOptions, this.defaults, this), this.Events = new Events2(this), this.instances = {}, this.Bottleneck = require_Bottleneck(), this._startAutoCleanup(), this.sharedConnection = this.connection != null, this.connection == null && (this.limiterOptions.datastore === "redis" ? this.connection = new RedisConnection2(Object.assign({}, this.limiterOptions, {
|
|
2562
|
+
Events: this.Events
|
|
2563
|
+
})) : this.limiterOptions.datastore === "ioredis" && (this.connection = new IORedisConnection2(Object.assign({}, this.limiterOptions, {
|
|
2564
|
+
Events: this.Events
|
|
2565
|
+
}))));
|
|
2566
|
+
}
|
|
2567
|
+
key(key = "") {
|
|
2568
|
+
var ref;
|
|
2569
|
+
return (ref = this.instances[key]) != null ? ref : (() => {
|
|
2570
|
+
var limiter2;
|
|
2571
|
+
return limiter2 = this.instances[key] = new this.Bottleneck(Object.assign(this.limiterOptions, {
|
|
2572
|
+
id: `${this.id}-${key}`,
|
|
2573
|
+
timeout: this.timeout,
|
|
2574
|
+
connection: this.connection
|
|
2575
|
+
})), this.Events.trigger("created", limiter2, key), limiter2;
|
|
2576
|
+
})();
|
|
2577
|
+
}
|
|
2578
|
+
deleteKey(key = "") {
|
|
2579
|
+
var _this = this;
|
|
2580
|
+
return _asyncToGenerator2(function* () {
|
|
2581
|
+
var deleted, instance;
|
|
2582
|
+
return instance = _this.instances[key], _this.connection && (deleted = yield _this.connection.__runCommand__(["del", ...Scripts2.allKeys(`${_this.id}-${key}`)])), instance != null && (delete _this.instances[key], yield instance.disconnect()), instance != null || deleted > 0;
|
|
2583
|
+
})();
|
|
2584
|
+
}
|
|
2585
|
+
limiters() {
|
|
2586
|
+
var k, ref, results, v;
|
|
2587
|
+
ref = this.instances, results = [];
|
|
2588
|
+
for (k in ref)
|
|
2589
|
+
v = ref[k], results.push({
|
|
2590
|
+
key: k,
|
|
2591
|
+
limiter: v
|
|
2592
|
+
});
|
|
2593
|
+
return results;
|
|
2594
|
+
}
|
|
2595
|
+
keys() {
|
|
2596
|
+
return Object.keys(this.instances);
|
|
2597
|
+
}
|
|
2598
|
+
clusterKeys() {
|
|
2599
|
+
var _this2 = this;
|
|
2600
|
+
return _asyncToGenerator2(function* () {
|
|
2601
|
+
var cursor, end, found, i, k, keys, len, next, start;
|
|
2602
|
+
if (_this2.connection == null)
|
|
2603
|
+
return _this2.Promise.resolve(_this2.keys());
|
|
2604
|
+
for (keys = [], cursor = null, start = `b_${_this2.id}-`.length, end = 9; cursor !== 0; ) {
|
|
2605
|
+
var _ref = yield _this2.connection.__runCommand__(["scan", cursor ?? 0, "match", `b_${_this2.id}-*_settings`, "count", 1e4]), _ref2 = _slicedToArray2(_ref, 2);
|
|
2606
|
+
for (next = _ref2[0], found = _ref2[1], cursor = ~~next, i = 0, len = found.length; i < len; i++)
|
|
2607
|
+
k = found[i], keys.push(k.slice(start, -end));
|
|
2608
|
+
}
|
|
2609
|
+
return keys;
|
|
2610
|
+
})();
|
|
2611
|
+
}
|
|
2612
|
+
_startAutoCleanup() {
|
|
2613
|
+
var _this3 = this, base;
|
|
2614
|
+
return clearInterval(this.interval), typeof (base = this.interval = setInterval(
|
|
2615
|
+
/* @__PURE__ */ _asyncToGenerator2(function* () {
|
|
2616
|
+
var e, k, ref, results, time, v;
|
|
2617
|
+
time = Date.now(), ref = _this3.instances, results = [];
|
|
2618
|
+
for (k in ref) {
|
|
2619
|
+
v = ref[k];
|
|
2620
|
+
try {
|
|
2621
|
+
(yield v._store.__groupCheck__(time)) ? results.push(_this3.deleteKey(k)) : results.push(void 0);
|
|
2622
|
+
} catch (error) {
|
|
2623
|
+
e = error, results.push(v.Events.trigger("error", e));
|
|
2624
|
+
}
|
|
2625
|
+
}
|
|
2626
|
+
return results;
|
|
2627
|
+
}),
|
|
2628
|
+
this.timeout / 2
|
|
2629
|
+
)).unref == "function" ? base.unref() : void 0;
|
|
2630
|
+
}
|
|
2631
|
+
updateSettings(options2 = {}) {
|
|
2632
|
+
if (parser2.overwrite(options2, this.defaults, this), parser2.overwrite(options2, options2, this.limiterOptions), options2.timeout != null)
|
|
2633
|
+
return this._startAutoCleanup();
|
|
2634
|
+
}
|
|
2635
|
+
disconnect(flush = !0) {
|
|
2636
|
+
var ref;
|
|
2637
|
+
if (!this.sharedConnection)
|
|
2638
|
+
return (ref = this.connection) != null ? ref.disconnect(flush) : void 0;
|
|
2639
|
+
}
|
|
2640
|
+
}
|
|
2641
|
+
return Group2.prototype.defaults = {
|
|
2642
|
+
timeout: 1e3 * 60 * 5,
|
|
2643
|
+
connection: null,
|
|
2644
|
+
Promise,
|
|
2645
|
+
id: "group-key"
|
|
2646
|
+
}, Group2;
|
|
2647
|
+
}.call(void 0);
|
|
2648
|
+
module2.exports = Group;
|
|
2649
|
+
}
|
|
2650
|
+
});
|
|
2651
|
+
|
|
2652
|
+
// ../../node_modules/.pnpm/bottleneck@2.19.5/node_modules/bottleneck/lib/Batcher.js
|
|
2653
|
+
var require_Batcher = __commonJS({
|
|
2654
|
+
"../../node_modules/.pnpm/bottleneck@2.19.5/node_modules/bottleneck/lib/Batcher.js"(exports2, module2) {
|
|
2655
|
+
"use strict";
|
|
2656
|
+
init_cjs_shims();
|
|
2657
|
+
var Batcher, Events2, parser2;
|
|
2658
|
+
parser2 = require_parser();
|
|
2659
|
+
Events2 = require_Events();
|
|
2660
|
+
Batcher = function() {
|
|
2661
|
+
class Batcher2 {
|
|
2662
|
+
constructor(options2 = {}) {
|
|
2663
|
+
this.options = options2, parser2.load(this.options, this.defaults, this), this.Events = new Events2(this), this._arr = [], this._resetPromise(), this._lastFlush = Date.now();
|
|
2664
|
+
}
|
|
2665
|
+
_resetPromise() {
|
|
2666
|
+
return this._promise = new this.Promise((res, rej) => this._resolve = res);
|
|
2667
|
+
}
|
|
2668
|
+
_flush() {
|
|
2669
|
+
return clearTimeout(this._timeout), this._lastFlush = Date.now(), this._resolve(), this.Events.trigger("batch", this._arr), this._arr = [], this._resetPromise();
|
|
2670
|
+
}
|
|
2671
|
+
add(data) {
|
|
2672
|
+
var ret;
|
|
2673
|
+
return this._arr.push(data), ret = this._promise, this._arr.length === this.maxSize ? this._flush() : this.maxTime != null && this._arr.length === 1 && (this._timeout = setTimeout(() => this._flush(), this.maxTime)), ret;
|
|
2674
|
+
}
|
|
2675
|
+
}
|
|
2676
|
+
return Batcher2.prototype.defaults = {
|
|
2677
|
+
maxTime: null,
|
|
2678
|
+
maxSize: null,
|
|
2679
|
+
Promise
|
|
2680
|
+
}, Batcher2;
|
|
2681
|
+
}.call(void 0);
|
|
2682
|
+
module2.exports = Batcher;
|
|
2683
|
+
}
|
|
2684
|
+
});
|
|
2685
|
+
|
|
2686
|
+
// ../../node_modules/.pnpm/bottleneck@2.19.5/node_modules/bottleneck/lib/Bottleneck.js
|
|
2687
|
+
var require_Bottleneck = __commonJS({
|
|
2688
|
+
"../../node_modules/.pnpm/bottleneck@2.19.5/node_modules/bottleneck/lib/Bottleneck.js"(exports2, module2) {
|
|
2689
|
+
"use strict";
|
|
2690
|
+
init_cjs_shims();
|
|
2691
|
+
function _slicedToArray2(arr, i) {
|
|
2692
|
+
return _arrayWithHoles2(arr) || _iterableToArrayLimit2(arr, i) || _nonIterableRest2();
|
|
2693
|
+
}
|
|
2694
|
+
function _iterableToArrayLimit2(arr, i) {
|
|
2695
|
+
var _arr = [], _n = !0, _d = !1, _e = void 0;
|
|
2696
|
+
try {
|
|
2697
|
+
for (var _i = arr[Symbol.iterator](), _s; !(_n = (_s = _i.next()).done) && (_arr.push(_s.value), !(i && _arr.length === i)); _n = !0)
|
|
2698
|
+
;
|
|
2699
|
+
} catch (err2) {
|
|
2700
|
+
_d = !0, _e = err2;
|
|
2701
|
+
} finally {
|
|
2702
|
+
try {
|
|
2703
|
+
!_n && _i.return != null && _i.return();
|
|
2704
|
+
} finally {
|
|
2705
|
+
if (_d) throw _e;
|
|
2706
|
+
}
|
|
2707
|
+
}
|
|
2708
|
+
return _arr;
|
|
2709
|
+
}
|
|
2710
|
+
function _toArray(arr) {
|
|
2711
|
+
return _arrayWithHoles2(arr) || _iterableToArray(arr) || _nonIterableRest2();
|
|
2712
|
+
}
|
|
2713
|
+
function _nonIterableRest2() {
|
|
2714
|
+
throw new TypeError("Invalid attempt to destructure non-iterable instance");
|
|
2715
|
+
}
|
|
2716
|
+
function _iterableToArray(iter) {
|
|
2717
|
+
if (Symbol.iterator in Object(iter) || Object.prototype.toString.call(iter) === "[object Arguments]") return Array.from(iter);
|
|
2718
|
+
}
|
|
2719
|
+
function _arrayWithHoles2(arr) {
|
|
2720
|
+
if (Array.isArray(arr)) return arr;
|
|
2721
|
+
}
|
|
2722
|
+
function asyncGeneratorStep2(gen, resolve, reject, _next, _throw, key, arg) {
|
|
2723
|
+
try {
|
|
2724
|
+
var info = gen[key](arg), value = info.value;
|
|
2725
|
+
} catch (error) {
|
|
2726
|
+
reject(error);
|
|
2727
|
+
return;
|
|
2728
|
+
}
|
|
2729
|
+
info.done ? resolve(value) : Promise.resolve(value).then(_next, _throw);
|
|
2730
|
+
}
|
|
2731
|
+
function _asyncToGenerator2(fn) {
|
|
2732
|
+
return function() {
|
|
2733
|
+
var self = this, args = arguments;
|
|
2734
|
+
return new Promise(function(resolve, reject) {
|
|
2735
|
+
var gen = fn.apply(self, args);
|
|
2736
|
+
function _next(value) {
|
|
2737
|
+
asyncGeneratorStep2(gen, resolve, reject, _next, _throw, "next", value);
|
|
2738
|
+
}
|
|
2739
|
+
function _throw(err2) {
|
|
2740
|
+
asyncGeneratorStep2(gen, resolve, reject, _next, _throw, "throw", err2);
|
|
2741
|
+
}
|
|
2742
|
+
_next(void 0);
|
|
2743
|
+
});
|
|
2744
|
+
};
|
|
2745
|
+
}
|
|
2746
|
+
var Bottleneck2, DEFAULT_PRIORITY, Events2, Job, LocalDatastore, NUM_PRIORITIES, Queues, RedisDatastore, States, Sync, parser2, splice = [].splice;
|
|
2747
|
+
NUM_PRIORITIES = 10;
|
|
2748
|
+
DEFAULT_PRIORITY = 5;
|
|
2749
|
+
parser2 = require_parser();
|
|
2750
|
+
Queues = require_Queues();
|
|
2751
|
+
Job = require_Job();
|
|
2752
|
+
LocalDatastore = require_LocalDatastore();
|
|
2753
|
+
RedisDatastore = require_RedisDatastore();
|
|
2754
|
+
Events2 = require_Events();
|
|
2755
|
+
States = require_States();
|
|
2756
|
+
Sync = require_Sync();
|
|
2757
|
+
Bottleneck2 = function() {
|
|
2758
|
+
class Bottleneck3 {
|
|
2759
|
+
constructor(options2 = {}, ...invalid) {
|
|
2760
|
+
var storeInstanceOptions, storeOptions;
|
|
2761
|
+
this._addToQueue = this._addToQueue.bind(this), this._validateOptions(options2, invalid), parser2.load(options2, this.instanceDefaults, this), this._queues = new Queues(NUM_PRIORITIES), this._scheduled = {}, this._states = new States(["RECEIVED", "QUEUED", "RUNNING", "EXECUTING"].concat(this.trackDoneStatus ? ["DONE"] : [])), this._limiter = null, this.Events = new Events2(this), this._submitLock = new Sync("submit", this.Promise), this._registerLock = new Sync("register", this.Promise), storeOptions = parser2.load(options2, this.storeDefaults, {}), this._store = function() {
|
|
2762
|
+
if (this.datastore === "redis" || this.datastore === "ioredis" || this.connection != null)
|
|
2763
|
+
return storeInstanceOptions = parser2.load(options2, this.redisStoreDefaults, {}), new RedisDatastore(this, storeOptions, storeInstanceOptions);
|
|
2764
|
+
if (this.datastore === "local")
|
|
2765
|
+
return storeInstanceOptions = parser2.load(options2, this.localStoreDefaults, {}), new LocalDatastore(this, storeOptions, storeInstanceOptions);
|
|
2766
|
+
throw new Bottleneck3.prototype.BottleneckError(`Invalid datastore type: ${this.datastore}`);
|
|
2767
|
+
}.call(this), this._queues.on("leftzero", () => {
|
|
2768
|
+
var ref;
|
|
2769
|
+
return (ref = this._store.heartbeat) != null && typeof ref.ref == "function" ? ref.ref() : void 0;
|
|
2770
|
+
}), this._queues.on("zero", () => {
|
|
2771
|
+
var ref;
|
|
2772
|
+
return (ref = this._store.heartbeat) != null && typeof ref.unref == "function" ? ref.unref() : void 0;
|
|
2773
|
+
});
|
|
2774
|
+
}
|
|
2775
|
+
_validateOptions(options2, invalid) {
|
|
2776
|
+
if (!(options2 != null && typeof options2 == "object" && invalid.length === 0))
|
|
2777
|
+
throw new Bottleneck3.prototype.BottleneckError("Bottleneck v2 takes a single object argument. Refer to https://github.com/SGrondin/bottleneck#upgrading-to-v2 if you're upgrading from Bottleneck v1.");
|
|
2778
|
+
}
|
|
2779
|
+
ready() {
|
|
2780
|
+
return this._store.ready;
|
|
2781
|
+
}
|
|
2782
|
+
clients() {
|
|
2783
|
+
return this._store.clients;
|
|
2784
|
+
}
|
|
2785
|
+
channel() {
|
|
2786
|
+
return `b_${this.id}`;
|
|
2787
|
+
}
|
|
2788
|
+
channel_client() {
|
|
2789
|
+
return `b_${this.id}_${this._store.clientId}`;
|
|
2790
|
+
}
|
|
2791
|
+
publish(message) {
|
|
2792
|
+
return this._store.__publish__(message);
|
|
2793
|
+
}
|
|
2794
|
+
disconnect(flush = !0) {
|
|
2795
|
+
return this._store.__disconnect__(flush);
|
|
2796
|
+
}
|
|
2797
|
+
chain(_limiter) {
|
|
2798
|
+
return this._limiter = _limiter, this;
|
|
2799
|
+
}
|
|
2800
|
+
queued(priority) {
|
|
2801
|
+
return this._queues.queued(priority);
|
|
2802
|
+
}
|
|
2803
|
+
clusterQueued() {
|
|
2804
|
+
return this._store.__queued__();
|
|
2805
|
+
}
|
|
2806
|
+
empty() {
|
|
2807
|
+
return this.queued() === 0 && this._submitLock.isEmpty();
|
|
2808
|
+
}
|
|
2809
|
+
running() {
|
|
2810
|
+
return this._store.__running__();
|
|
2811
|
+
}
|
|
2812
|
+
done() {
|
|
2813
|
+
return this._store.__done__();
|
|
2814
|
+
}
|
|
2815
|
+
jobStatus(id) {
|
|
2816
|
+
return this._states.jobStatus(id);
|
|
2817
|
+
}
|
|
2818
|
+
jobs(status) {
|
|
2819
|
+
return this._states.statusJobs(status);
|
|
2820
|
+
}
|
|
2821
|
+
counts() {
|
|
2822
|
+
return this._states.statusCounts();
|
|
2823
|
+
}
|
|
2824
|
+
_randomIndex() {
|
|
2825
|
+
return Math.random().toString(36).slice(2);
|
|
2826
|
+
}
|
|
2827
|
+
check(weight = 1) {
|
|
2828
|
+
return this._store.__check__(weight);
|
|
2829
|
+
}
|
|
2830
|
+
_clearGlobalState(index) {
|
|
2831
|
+
return this._scheduled[index] != null ? (clearTimeout(this._scheduled[index].expiration), delete this._scheduled[index], !0) : !1;
|
|
2832
|
+
}
|
|
2833
|
+
_free(index, job, options2, eventInfo) {
|
|
2834
|
+
var _this = this;
|
|
2835
|
+
return _asyncToGenerator2(function* () {
|
|
2836
|
+
var e, running;
|
|
2837
|
+
try {
|
|
2838
|
+
var _ref = yield _this._store.__free__(index, options2.weight);
|
|
2839
|
+
if (running = _ref.running, _this.Events.trigger("debug", `Freed ${options2.id}`, eventInfo), running === 0 && _this.empty())
|
|
2840
|
+
return _this.Events.trigger("idle");
|
|
2841
|
+
} catch (error1) {
|
|
2842
|
+
return e = error1, _this.Events.trigger("error", e);
|
|
2843
|
+
}
|
|
2844
|
+
})();
|
|
2845
|
+
}
|
|
2846
|
+
_run(index, job, wait) {
|
|
2847
|
+
var clearGlobalState, free, run;
|
|
2848
|
+
return job.doRun(), clearGlobalState = this._clearGlobalState.bind(this, index), run = this._run.bind(this, index, job), free = this._free.bind(this, index, job), this._scheduled[index] = {
|
|
2849
|
+
timeout: setTimeout(() => job.doExecute(this._limiter, clearGlobalState, run, free), wait),
|
|
2850
|
+
expiration: job.options.expiration != null ? setTimeout(function() {
|
|
2851
|
+
return job.doExpire(clearGlobalState, run, free);
|
|
2852
|
+
}, wait + job.options.expiration) : void 0,
|
|
2853
|
+
job
|
|
2854
|
+
};
|
|
2855
|
+
}
|
|
2856
|
+
_drainOne(capacity) {
|
|
2857
|
+
return this._registerLock.schedule(() => {
|
|
2858
|
+
var args, index, next, options2, queue;
|
|
2859
|
+
if (this.queued() === 0)
|
|
2860
|
+
return this.Promise.resolve(null);
|
|
2861
|
+
queue = this._queues.getFirst();
|
|
2862
|
+
var _next2 = next = queue.first();
|
|
2863
|
+
return options2 = _next2.options, args = _next2.args, capacity != null && options2.weight > capacity ? this.Promise.resolve(null) : (this.Events.trigger("debug", `Draining ${options2.id}`, {
|
|
2864
|
+
args,
|
|
2865
|
+
options: options2
|
|
2866
|
+
}), index = this._randomIndex(), this._store.__register__(index, options2.weight, options2.expiration).then(({
|
|
2867
|
+
success,
|
|
2868
|
+
wait,
|
|
2869
|
+
reservoir
|
|
2870
|
+
}) => {
|
|
2871
|
+
var empty;
|
|
2872
|
+
return this.Events.trigger("debug", `Drained ${options2.id}`, {
|
|
2873
|
+
success,
|
|
2874
|
+
args,
|
|
2875
|
+
options: options2
|
|
2876
|
+
}), success ? (queue.shift(), empty = this.empty(), empty && this.Events.trigger("empty"), reservoir === 0 && this.Events.trigger("depleted", empty), this._run(index, next, wait), this.Promise.resolve(options2.weight)) : this.Promise.resolve(null);
|
|
2877
|
+
}));
|
|
2878
|
+
});
|
|
2879
|
+
}
|
|
2880
|
+
_drainAll(capacity, total = 0) {
|
|
2881
|
+
return this._drainOne(capacity).then((drained) => {
|
|
2882
|
+
var newCapacity;
|
|
2883
|
+
return drained != null ? (newCapacity = capacity != null ? capacity - drained : capacity, this._drainAll(newCapacity, total + drained)) : this.Promise.resolve(total);
|
|
2884
|
+
}).catch((e) => this.Events.trigger("error", e));
|
|
2885
|
+
}
|
|
2886
|
+
_dropAllQueued(message) {
|
|
2887
|
+
return this._queues.shiftAll(function(job) {
|
|
2888
|
+
return job.doDrop({
|
|
2889
|
+
message
|
|
2890
|
+
});
|
|
2891
|
+
});
|
|
2892
|
+
}
|
|
2893
|
+
stop(options2 = {}) {
|
|
2894
|
+
var done, waitForExecuting;
|
|
2895
|
+
return options2 = parser2.load(options2, this.stopDefaults), waitForExecuting = (at) => {
|
|
2896
|
+
var finished;
|
|
2897
|
+
return finished = () => {
|
|
2898
|
+
var counts;
|
|
2899
|
+
return counts = this._states.counts, counts[0] + counts[1] + counts[2] + counts[3] === at;
|
|
2900
|
+
}, new this.Promise((resolve, reject) => finished() ? resolve() : this.on("done", () => {
|
|
2901
|
+
if (finished())
|
|
2902
|
+
return this.removeAllListeners("done"), resolve();
|
|
2903
|
+
}));
|
|
2904
|
+
}, done = options2.dropWaitingJobs ? (this._run = function(index, next) {
|
|
2905
|
+
return next.doDrop({
|
|
2906
|
+
message: options2.dropErrorMessage
|
|
2907
|
+
});
|
|
2908
|
+
}, this._drainOne = () => this.Promise.resolve(null), this._registerLock.schedule(() => this._submitLock.schedule(() => {
|
|
2909
|
+
var k, ref, v;
|
|
2910
|
+
ref = this._scheduled;
|
|
2911
|
+
for (k in ref)
|
|
2912
|
+
v = ref[k], this.jobStatus(v.job.options.id) === "RUNNING" && (clearTimeout(v.timeout), clearTimeout(v.expiration), v.job.doDrop({
|
|
2913
|
+
message: options2.dropErrorMessage
|
|
2914
|
+
}));
|
|
2915
|
+
return this._dropAllQueued(options2.dropErrorMessage), waitForExecuting(0);
|
|
2916
|
+
}))) : this.schedule({
|
|
2917
|
+
priority: NUM_PRIORITIES - 1,
|
|
2918
|
+
weight: 0
|
|
2919
|
+
}, () => waitForExecuting(1)), this._receive = function(job) {
|
|
2920
|
+
return job._reject(new Bottleneck3.prototype.BottleneckError(options2.enqueueErrorMessage));
|
|
2921
|
+
}, this.stop = () => this.Promise.reject(new Bottleneck3.prototype.BottleneckError("stop() has already been called")), done;
|
|
2922
|
+
}
|
|
2923
|
+
_addToQueue(job) {
|
|
2924
|
+
var _this2 = this;
|
|
2925
|
+
return _asyncToGenerator2(function* () {
|
|
2926
|
+
var args, blocked, error, options2, reachedHWM, shifted, strategy;
|
|
2927
|
+
args = job.args, options2 = job.options;
|
|
2928
|
+
try {
|
|
2929
|
+
var _ref2 = yield _this2._store.__submit__(_this2.queued(), options2.weight);
|
|
2930
|
+
reachedHWM = _ref2.reachedHWM, blocked = _ref2.blocked, strategy = _ref2.strategy;
|
|
2931
|
+
} catch (error1) {
|
|
2932
|
+
return error = error1, _this2.Events.trigger("debug", `Could not queue ${options2.id}`, {
|
|
2933
|
+
args,
|
|
2934
|
+
options: options2,
|
|
2935
|
+
error
|
|
2936
|
+
}), job.doDrop({
|
|
2937
|
+
error
|
|
2938
|
+
}), !1;
|
|
2939
|
+
}
|
|
2940
|
+
return blocked ? (job.doDrop(), !0) : reachedHWM && (shifted = strategy === Bottleneck3.prototype.strategy.LEAK ? _this2._queues.shiftLastFrom(options2.priority) : strategy === Bottleneck3.prototype.strategy.OVERFLOW_PRIORITY ? _this2._queues.shiftLastFrom(options2.priority + 1) : strategy === Bottleneck3.prototype.strategy.OVERFLOW ? job : void 0, shifted?.doDrop(), shifted == null || strategy === Bottleneck3.prototype.strategy.OVERFLOW) ? (shifted == null && job.doDrop(), reachedHWM) : (job.doQueue(reachedHWM, blocked), _this2._queues.push(job), yield _this2._drainAll(), reachedHWM);
|
|
2941
|
+
})();
|
|
2942
|
+
}
|
|
2943
|
+
_receive(job) {
|
|
2944
|
+
return this._states.jobStatus(job.options.id) != null ? (job._reject(new Bottleneck3.prototype.BottleneckError(`A job with the same id already exists (id=${job.options.id})`)), !1) : (job.doReceive(), this._submitLock.schedule(this._addToQueue, job));
|
|
2945
|
+
}
|
|
2946
|
+
submit(...args) {
|
|
2947
|
+
var cb, fn, job, options2, ref, ref1, task;
|
|
2948
|
+
if (typeof args[0] == "function") {
|
|
2949
|
+
var _ref3, _ref4, _splice$call, _splice$call2;
|
|
2950
|
+
ref = args, _ref3 = ref, _ref4 = _toArray(_ref3), fn = _ref4[0], args = _ref4.slice(1), _splice$call = splice.call(args, -1), _splice$call2 = _slicedToArray2(_splice$call, 1), cb = _splice$call2[0], options2 = parser2.load({}, this.jobDefaults);
|
|
2951
|
+
} else {
|
|
2952
|
+
var _ref5, _ref6, _splice$call3, _splice$call4;
|
|
2953
|
+
ref1 = args, _ref5 = ref1, _ref6 = _toArray(_ref5), options2 = _ref6[0], fn = _ref6[1], args = _ref6.slice(2), _splice$call3 = splice.call(args, -1), _splice$call4 = _slicedToArray2(_splice$call3, 1), cb = _splice$call4[0], options2 = parser2.load(options2, this.jobDefaults);
|
|
2954
|
+
}
|
|
2955
|
+
return task = (...args2) => new this.Promise(function(resolve, reject) {
|
|
2956
|
+
return fn(...args2, function(...args3) {
|
|
2957
|
+
return (args3[0] != null ? reject : resolve)(args3);
|
|
2958
|
+
});
|
|
2959
|
+
}), job = new Job(task, args, options2, this.jobDefaults, this.rejectOnDrop, this.Events, this._states, this.Promise), job.promise.then(function(args2) {
|
|
2960
|
+
return typeof cb == "function" ? cb(...args2) : void 0;
|
|
2961
|
+
}).catch(function(args2) {
|
|
2962
|
+
return Array.isArray(args2) ? typeof cb == "function" ? cb(...args2) : void 0 : typeof cb == "function" ? cb(args2) : void 0;
|
|
2963
|
+
}), this._receive(job);
|
|
2964
|
+
}
|
|
2965
|
+
schedule(...args) {
|
|
2966
|
+
var job, options2, task;
|
|
2967
|
+
if (typeof args[0] == "function") {
|
|
2968
|
+
var _args = args, _args2 = _toArray(_args);
|
|
2969
|
+
task = _args2[0], args = _args2.slice(1), options2 = {};
|
|
2970
|
+
} else {
|
|
2971
|
+
var _args3 = args, _args4 = _toArray(_args3);
|
|
2972
|
+
options2 = _args4[0], task = _args4[1], args = _args4.slice(2);
|
|
2973
|
+
}
|
|
2974
|
+
return job = new Job(task, args, options2, this.jobDefaults, this.rejectOnDrop, this.Events, this._states, this.Promise), this._receive(job), job.promise;
|
|
2975
|
+
}
|
|
2976
|
+
wrap(fn) {
|
|
2977
|
+
var schedule, wrapped;
|
|
2978
|
+
return schedule = this.schedule.bind(this), wrapped = function(...args) {
|
|
2979
|
+
return schedule(fn.bind(this), ...args);
|
|
2980
|
+
}, wrapped.withOptions = function(options2, ...args) {
|
|
2981
|
+
return schedule(options2, fn, ...args);
|
|
2982
|
+
}, wrapped;
|
|
2983
|
+
}
|
|
2984
|
+
updateSettings(options2 = {}) {
|
|
2985
|
+
var _this3 = this;
|
|
2986
|
+
return _asyncToGenerator2(function* () {
|
|
2987
|
+
return yield _this3._store.__updateSettings__(parser2.overwrite(options2, _this3.storeDefaults)), parser2.overwrite(options2, _this3.instanceDefaults, _this3), _this3;
|
|
2988
|
+
})();
|
|
2989
|
+
}
|
|
2990
|
+
currentReservoir() {
|
|
2991
|
+
return this._store.__currentReservoir__();
|
|
2992
|
+
}
|
|
2993
|
+
incrementReservoir(incr = 0) {
|
|
2994
|
+
return this._store.__incrementReservoir__(incr);
|
|
2995
|
+
}
|
|
2996
|
+
}
|
|
2997
|
+
return Bottleneck3.default = Bottleneck3, Bottleneck3.Events = Events2, Bottleneck3.version = Bottleneck3.prototype.version = require_version().version, Bottleneck3.strategy = Bottleneck3.prototype.strategy = {
|
|
2998
|
+
LEAK: 1,
|
|
2999
|
+
OVERFLOW: 2,
|
|
3000
|
+
OVERFLOW_PRIORITY: 4,
|
|
3001
|
+
BLOCK: 3
|
|
3002
|
+
}, Bottleneck3.BottleneckError = Bottleneck3.prototype.BottleneckError = require_BottleneckError(), Bottleneck3.Group = Bottleneck3.prototype.Group = require_Group(), Bottleneck3.RedisConnection = Bottleneck3.prototype.RedisConnection = require_RedisConnection(), Bottleneck3.IORedisConnection = Bottleneck3.prototype.IORedisConnection = require_IORedisConnection(), Bottleneck3.Batcher = Bottleneck3.prototype.Batcher = require_Batcher(), Bottleneck3.prototype.jobDefaults = {
|
|
3003
|
+
priority: DEFAULT_PRIORITY,
|
|
3004
|
+
weight: 1,
|
|
3005
|
+
expiration: null,
|
|
3006
|
+
id: "<no-id>"
|
|
3007
|
+
}, Bottleneck3.prototype.storeDefaults = {
|
|
3008
|
+
maxConcurrent: null,
|
|
3009
|
+
minTime: 0,
|
|
3010
|
+
highWater: null,
|
|
3011
|
+
strategy: Bottleneck3.prototype.strategy.LEAK,
|
|
3012
|
+
penalty: null,
|
|
3013
|
+
reservoir: null,
|
|
3014
|
+
reservoirRefreshInterval: null,
|
|
3015
|
+
reservoirRefreshAmount: null,
|
|
3016
|
+
reservoirIncreaseInterval: null,
|
|
3017
|
+
reservoirIncreaseAmount: null,
|
|
3018
|
+
reservoirIncreaseMaximum: null
|
|
3019
|
+
}, Bottleneck3.prototype.localStoreDefaults = {
|
|
3020
|
+
Promise,
|
|
3021
|
+
timeout: null,
|
|
3022
|
+
heartbeatInterval: 250
|
|
3023
|
+
}, Bottleneck3.prototype.redisStoreDefaults = {
|
|
3024
|
+
Promise,
|
|
3025
|
+
timeout: null,
|
|
3026
|
+
heartbeatInterval: 5e3,
|
|
3027
|
+
clientTimeout: 1e4,
|
|
3028
|
+
Redis: null,
|
|
3029
|
+
clientOptions: {},
|
|
3030
|
+
clusterNodes: null,
|
|
3031
|
+
clearDatastore: !1,
|
|
3032
|
+
connection: null
|
|
3033
|
+
}, Bottleneck3.prototype.instanceDefaults = {
|
|
3034
|
+
datastore: "local",
|
|
3035
|
+
connection: null,
|
|
3036
|
+
id: "<no-id>",
|
|
3037
|
+
rejectOnDrop: !0,
|
|
3038
|
+
trackDoneStatus: !1,
|
|
3039
|
+
Promise
|
|
3040
|
+
}, Bottleneck3.prototype.stopDefaults = {
|
|
3041
|
+
enqueueErrorMessage: "This limiter has been stopped and cannot accept new jobs.",
|
|
3042
|
+
dropWaitingJobs: !0,
|
|
3043
|
+
dropErrorMessage: "This limiter has been stopped."
|
|
3044
|
+
}, Bottleneck3;
|
|
3045
|
+
}.call(void 0);
|
|
3046
|
+
module2.exports = Bottleneck2;
|
|
3047
|
+
}
|
|
3048
|
+
});
|
|
3049
|
+
|
|
3050
|
+
// ../../node_modules/.pnpm/bottleneck@2.19.5/node_modules/bottleneck/lib/index.js
|
|
3051
|
+
var require_lib = __commonJS({
|
|
3052
|
+
"../../node_modules/.pnpm/bottleneck@2.19.5/node_modules/bottleneck/lib/index.js"(exports2, module2) {
|
|
3053
|
+
"use strict";
|
|
3054
|
+
init_cjs_shims();
|
|
3055
|
+
module2.exports = require_Bottleneck();
|
|
3056
|
+
}
|
|
3057
|
+
});
|
|
3058
|
+
|
|
3059
|
+
// ../cli-kit/dist/private/node/context/service.js
|
|
3060
|
+
init_cjs_shims();
|
|
3061
|
+
var Environment;
|
|
3062
|
+
(function(Environment2) {
|
|
3063
|
+
Environment2.Local = "local", Environment2.Production = "production";
|
|
3064
|
+
})(Environment || (Environment = {}));
|
|
3065
|
+
function serviceEnvironment(env = process.env) {
|
|
3066
|
+
return env[environmentVariables.serviceEnv] === "local" ? Environment.Local : Environment.Production;
|
|
3067
|
+
}
|
|
3068
|
+
function isLocalEnvironment(env = process.env) {
|
|
3069
|
+
return serviceEnvironment(env) === Environment.Local;
|
|
3070
|
+
}
|
|
3071
|
+
|
|
3072
|
+
// ../cli-kit/dist/public/node/session.js
|
|
3073
|
+
init_cjs_shims();
|
|
3074
|
+
|
|
3075
|
+
// ../cli-kit/dist/private/node/session/store.js
|
|
3076
|
+
init_cjs_shims();
|
|
3077
|
+
|
|
3078
|
+
// ../cli-kit/dist/private/node/session/schema.js
|
|
3079
|
+
init_cjs_shims();
|
|
3080
|
+
var DateSchema = z.preprocess((arg) => typeof arg == "string" || arg instanceof Date ? new Date(arg) : null, z.date()), IdentityTokenSchema = z.object({
|
|
3081
|
+
accessToken: z.string(),
|
|
3082
|
+
refreshToken: z.string(),
|
|
3083
|
+
expiresAt: DateSchema,
|
|
3084
|
+
scopes: z.array(z.string()),
|
|
3085
|
+
userId: z.string(),
|
|
3086
|
+
alias: z.string().optional()
|
|
3087
|
+
}), ApplicationTokenSchema = z.object({
|
|
3088
|
+
accessToken: z.string(),
|
|
3089
|
+
expiresAt: DateSchema,
|
|
3090
|
+
scopes: z.array(z.string()),
|
|
3091
|
+
storeFqdn: z.string().optional()
|
|
3092
|
+
}), SessionSchema = z.object({
|
|
3093
|
+
identity: IdentityTokenSchema,
|
|
3094
|
+
applications: z.object({}).catchall(ApplicationTokenSchema)
|
|
3095
|
+
}), SessionsSchema = z.object({}).catchall(z.object({}).catchall(SessionSchema));
|
|
3096
|
+
function validateCachedIdentityTokenStructure(identityToken) {
|
|
3097
|
+
return IdentityTokenSchema.safeParse(identityToken).success;
|
|
3098
|
+
}
|
|
3099
|
+
|
|
3100
|
+
// ../cli-kit/dist/public/node/context/fqdn.js
|
|
3101
|
+
init_cjs_shims();
|
|
3102
|
+
|
|
3103
|
+
// ../cli-kit/dist/public/node/vendor/dev_server/index.js
|
|
3104
|
+
init_cjs_shims();
|
|
3105
|
+
|
|
3106
|
+
// ../cli-kit/dist/public/node/vendor/dev_server/env.js
|
|
3107
|
+
init_cjs_shims();
|
|
3108
|
+
var isDevServerEnvironment = process.env.USING_DEV === "1";
|
|
3109
|
+
function assertCompatibleEnvironment() {
|
|
3110
|
+
if (!isDevServerEnvironment)
|
|
3111
|
+
throw new Error("DevServer is not supported in this environment");
|
|
3112
|
+
}
|
|
3113
|
+
|
|
3114
|
+
// ../cli-kit/dist/public/node/vendor/dev_server/dev-server.js
|
|
3115
|
+
init_cjs_shims();
|
|
3116
|
+
import fs4 from "node:fs";
|
|
3117
|
+
|
|
3118
|
+
// ../cli-kit/dist/public/node/vendor/dev_server/dev-server-2024.js
|
|
3119
|
+
init_cjs_shims();
|
|
3120
|
+
var ni = __toESM(require_network_interfaces(), 1);
|
|
3121
|
+
import fs2 from "node:fs";
|
|
3122
|
+
|
|
3123
|
+
// ../cli-kit/dist/public/node/vendor/dev_server/network/index.js
|
|
3124
|
+
init_cjs_shims();
|
|
3125
|
+
import { spawnSync } from "node:child_process";
|
|
3126
|
+
|
|
3127
|
+
// ../cli-kit/dist/public/node/vendor/dev_server/network/host.js
|
|
3128
|
+
init_cjs_shims();
|
|
3129
|
+
import fs from "node:fs";
|
|
3130
|
+
var HOSTS_FILE = "/etc/hosts", hostToIpCache = {}, lastModifiedTime = 0;
|
|
3131
|
+
function loadHostsFile() {
|
|
3132
|
+
try {
|
|
3133
|
+
let modifiedTime = fs.statSync(HOSTS_FILE).mtimeMs;
|
|
3134
|
+
if (modifiedTime === lastModifiedTime)
|
|
3135
|
+
return;
|
|
3136
|
+
let lines = fs.readFileSync(HOSTS_FILE, "utf8").split(/\r?\n/);
|
|
3137
|
+
hostToIpCache = {};
|
|
3138
|
+
for (let line of lines) {
|
|
3139
|
+
if (line.trim().startsWith("#") || line.trim() === "")
|
|
3140
|
+
continue;
|
|
3141
|
+
let matches = /^\s*(?<ipAddress>[^\s#]+)\s+(?<matchedHostName>[^\s#]+)\s*(#.*)?$/.exec(line);
|
|
3142
|
+
if (matches && matches.groups) {
|
|
3143
|
+
let { ipAddress, matchedHostName } = matches.groups;
|
|
3144
|
+
matchedHostName && ipAddress && (hostToIpCache[matchedHostName] = ipAddress);
|
|
3145
|
+
}
|
|
3146
|
+
}
|
|
3147
|
+
lastModifiedTime = modifiedTime;
|
|
3148
|
+
} catch (error) {
|
|
3149
|
+
console.log("Error reading hosts file:", error);
|
|
3150
|
+
}
|
|
3151
|
+
}
|
|
3152
|
+
function getIpFromHosts(hostname) {
|
|
3153
|
+
loadHostsFile();
|
|
3154
|
+
let ipAddress = hostToIpCache[hostname];
|
|
3155
|
+
if (ipAddress)
|
|
3156
|
+
return ipAddress;
|
|
3157
|
+
throw new Error(`No IP found for hostname: ${hostname}`);
|
|
3158
|
+
}
|
|
3159
|
+
|
|
3160
|
+
// ../cli-kit/dist/public/node/vendor/dev_server/network/index.js
|
|
3161
|
+
var DEFAULT_CONNECT_TIMEOUT = 100, checkPort;
|
|
3162
|
+
function assertConnectable(options2) {
|
|
3163
|
+
checkPort || (checkPort = getCheckPortHelper());
|
|
3164
|
+
let { port, addr, timeout = DEFAULT_CONNECT_TIMEOUT } = options2;
|
|
3165
|
+
try {
|
|
3166
|
+
if (!checkPort(addr === "localhost" ? "127.0.0.1" : addr, port, timeout))
|
|
3167
|
+
throw new Error(`DevServer for '${options2.projectName}' is not running on ${port} / ${addr}: \`dev up ${options2.projectName}\` to start it.`);
|
|
3168
|
+
} catch (err2) {
|
|
3169
|
+
throw new Error(`DevServer check for '${options2.projectName}' on ${port} / ${addr} failed (${err2})`);
|
|
3170
|
+
}
|
|
3171
|
+
}
|
|
3172
|
+
function getCheckPortHelper() {
|
|
3173
|
+
return fallbackCheckPort;
|
|
3174
|
+
}
|
|
3175
|
+
function fallbackCheckPort(address, port, timeout) {
|
|
3176
|
+
return spawnSync("nc", ["-z", "-w", "1", address, port.toString()], {
|
|
3177
|
+
timeout,
|
|
3178
|
+
stdio: "ignore"
|
|
3179
|
+
}).status === 0;
|
|
3180
|
+
}
|
|
3181
|
+
|
|
3182
|
+
// ../cli-kit/dist/public/node/vendor/dev_server/dev-server-2024.js
|
|
3183
|
+
var NON_SHOP_PREFIXES = ["app", "dev", "shopify"], BACKEND_PORT = 8080;
|
|
3184
|
+
function createServer(projectName) {
|
|
3185
|
+
return {
|
|
3186
|
+
host: (options2 = {}) => host(projectName, options2),
|
|
3187
|
+
url: (options2 = {}) => url(projectName, options2)
|
|
3188
|
+
};
|
|
3189
|
+
}
|
|
3190
|
+
function host(projectName, options2 = {}) {
|
|
3191
|
+
assertCompatibleEnvironment(), (assertRunningOverride || assertRunning2024)(projectName);
|
|
3192
|
+
let prefix = (options2.nonstandardHostPrefix || projectName).replace(/_/g, "-");
|
|
3193
|
+
if (projectName === "shopify") {
|
|
3194
|
+
if (prefix.endsWith("-dev-api"))
|
|
3195
|
+
return `${prefix.replace("-dev-api", "")}.dev-api.shop.dev`;
|
|
3196
|
+
if (!NON_SHOP_PREFIXES.includes(prefix))
|
|
3197
|
+
return `${prefix}.my.shop.dev`;
|
|
3198
|
+
}
|
|
3199
|
+
return `${prefix}.shop.dev`;
|
|
3200
|
+
}
|
|
3201
|
+
function url(projectName, options2 = {}) {
|
|
3202
|
+
return `https://${host(projectName, options2)}`;
|
|
3203
|
+
}
|
|
3204
|
+
function assertRunning2024(projectName) {
|
|
3205
|
+
assertConnectable({
|
|
3206
|
+
projectName,
|
|
3207
|
+
addr: getBackendIp(projectName),
|
|
3208
|
+
port: BACKEND_PORT
|
|
3209
|
+
});
|
|
3210
|
+
}
|
|
3211
|
+
function getBackendIp(projectName) {
|
|
3212
|
+
try {
|
|
3213
|
+
let backendIp = resolveBackendHost(projectName);
|
|
3214
|
+
return ni.fromIp(backendIp, { internal: !0, ipVersion: 4 }), backendIp;
|
|
3215
|
+
} catch {
|
|
3216
|
+
throw new Error(`DevServer for '${projectName}' is not running: \`dev up ${projectName}\` to start it.`);
|
|
3217
|
+
}
|
|
3218
|
+
}
|
|
3219
|
+
function resolveBackendHost(name) {
|
|
3220
|
+
let host3;
|
|
3221
|
+
try {
|
|
3222
|
+
host3 = fs2.readlinkSync(`/opt/nginx/etc/manifest/${name}/current`);
|
|
3223
|
+
} catch {
|
|
3224
|
+
host3 = `${name}.root.shopify.dev.internal`;
|
|
3225
|
+
}
|
|
3226
|
+
try {
|
|
3227
|
+
return getIpFromHosts(host3);
|
|
3228
|
+
} catch {
|
|
3229
|
+
return host3;
|
|
3230
|
+
}
|
|
3231
|
+
}
|
|
3232
|
+
var assertRunningOverride;
|
|
3233
|
+
|
|
3234
|
+
// ../cli-kit/dist/public/node/vendor/dev_server/dev-server-2016.js
|
|
3235
|
+
init_cjs_shims();
|
|
3236
|
+
import fs3 from "fs";
|
|
3237
|
+
import * as os from "node:os";
|
|
3238
|
+
function createServer2(projectName) {
|
|
3239
|
+
return {
|
|
3240
|
+
host: (options2 = {}) => host2(projectName, options2),
|
|
3241
|
+
url: (options2 = {}) => url2(projectName, options2)
|
|
3242
|
+
};
|
|
3243
|
+
}
|
|
3244
|
+
function host2(projectName, options2 = {}) {
|
|
3245
|
+
return assertCompatibleEnvironment(), (assertRunningOverride2 || assertRunning2016)(projectName), `${options2.nonstandardHostPrefix || projectName}.myshopify.io`;
|
|
3246
|
+
}
|
|
3247
|
+
function url2(projectName, options2 = {}) {
|
|
3248
|
+
return `https://${host2(projectName, options2)}`;
|
|
3249
|
+
}
|
|
3250
|
+
function assertRunning2016(projectName) {
|
|
3251
|
+
let [addr, port] = getAddrPort(projectName);
|
|
3252
|
+
assertConnectable({ projectName, addr, port });
|
|
3253
|
+
}
|
|
3254
|
+
function getAddrPort(name) {
|
|
3255
|
+
try {
|
|
3256
|
+
let portContent = fs3.readFileSync(`${os.homedir()}/.local/run/services/${name}/server/port`, "utf-8");
|
|
3257
|
+
return ["localhost", parseInt(portContent, 10)];
|
|
3258
|
+
} catch {
|
|
3259
|
+
throw new Error(`DevServer for '${name}' is not running: \`dev up ${name}\` to start it.`);
|
|
3260
|
+
}
|
|
3261
|
+
}
|
|
3262
|
+
var assertRunningOverride2;
|
|
3263
|
+
|
|
3264
|
+
// ../cli-kit/dist/public/node/vendor/dev_server/dev-server.js
|
|
3265
|
+
var DevServer = class {
|
|
3266
|
+
constructor(projectName) {
|
|
3267
|
+
if (this.projectName = projectName, projectName === "shopify")
|
|
3268
|
+
throw new Error("Use `import {DevServerCore}` for the 'shopify' project");
|
|
3269
|
+
this.serverImpl = inferProjectServer(projectName);
|
|
3270
|
+
}
|
|
3271
|
+
host(options2) {
|
|
3272
|
+
return this.serverImpl.host(options2);
|
|
3273
|
+
}
|
|
3274
|
+
url(options2) {
|
|
3275
|
+
return this.serverImpl.url(options2);
|
|
3276
|
+
}
|
|
3277
|
+
}, DevServerCore = class {
|
|
3278
|
+
constructor() {
|
|
3279
|
+
this.serverImpl = inferProjectServer("shopify");
|
|
3280
|
+
}
|
|
3281
|
+
host(prefix) {
|
|
3282
|
+
return this.serverImpl.host({ nonstandardHostPrefix: prefix });
|
|
3283
|
+
}
|
|
3284
|
+
url(prefix) {
|
|
3285
|
+
return this.serverImpl.url({ nonstandardHostPrefix: prefix });
|
|
3286
|
+
}
|
|
3287
|
+
}, INFERENCE_MODE_SENTINEL = "/opt/dev/misc/dev-server-inference-mode";
|
|
3288
|
+
function inferProjectServer(projectName) {
|
|
3289
|
+
return inferenceModeAndProjectIsEdition2016(projectName) ? createServer2(projectName) : createServer(projectName);
|
|
3290
|
+
}
|
|
3291
|
+
function inferenceModeAndProjectIsEdition2016(projectName) {
|
|
3292
|
+
try {
|
|
3293
|
+
fs4.accessSync(INFERENCE_MODE_SENTINEL);
|
|
3294
|
+
try {
|
|
3295
|
+
return fs4.accessSync(`/opt/nginx/etc/manifest/${projectName}/current/edition-2024`), !1;
|
|
3296
|
+
} catch {
|
|
3297
|
+
return !0;
|
|
3298
|
+
}
|
|
3299
|
+
} catch {
|
|
3300
|
+
return !1;
|
|
3301
|
+
}
|
|
3302
|
+
}
|
|
3303
|
+
|
|
3304
|
+
// ../cli-kit/dist/public/node/context/fqdn.js
|
|
3305
|
+
var NotProvidedStoreFQDNError = new AbortError("Couldn't obtain the Shopify FQDN because the store FQDN was not provided.");
|
|
3306
|
+
async function partnersFqdn() {
|
|
3307
|
+
if (blockPartnersAccess())
|
|
3308
|
+
throw new BugError("Partners API is blocked by the SHOPIFY_CLI_NEVER_USE_PARTNERS_API environment variable.");
|
|
3309
|
+
let environment = serviceEnvironment(), productionFqdn = "partners.shopify.com";
|
|
3310
|
+
switch (environment) {
|
|
3311
|
+
case "local":
|
|
3312
|
+
return new DevServer("partners").host();
|
|
3313
|
+
default:
|
|
3314
|
+
return productionFqdn;
|
|
3315
|
+
}
|
|
3316
|
+
}
|
|
3317
|
+
async function adminFqdn() {
|
|
3318
|
+
let environment = serviceEnvironment(), productionFqdn = "admin.shopify.com";
|
|
3319
|
+
switch (environment) {
|
|
3320
|
+
case "local":
|
|
3321
|
+
return new DevServerCore().host("admin");
|
|
3322
|
+
default:
|
|
3323
|
+
return productionFqdn;
|
|
3324
|
+
}
|
|
3325
|
+
}
|
|
3326
|
+
async function appManagementFqdn() {
|
|
3327
|
+
let environment = serviceEnvironment(), productionFqdn = "app.shopify.com";
|
|
3328
|
+
switch (environment) {
|
|
3329
|
+
case "local":
|
|
3330
|
+
return new DevServerCore().host("app");
|
|
3331
|
+
default:
|
|
3332
|
+
return productionFqdn;
|
|
3333
|
+
}
|
|
3334
|
+
}
|
|
3335
|
+
async function appDevFqdn(storeFqdn) {
|
|
3336
|
+
switch (serviceEnvironment()) {
|
|
3337
|
+
case "local":
|
|
3338
|
+
return new DevServerCore().host("app");
|
|
3339
|
+
default:
|
|
3340
|
+
return storeFqdn;
|
|
3341
|
+
}
|
|
3342
|
+
}
|
|
3343
|
+
async function developerDashboardFqdn() {
|
|
3344
|
+
let environment = serviceEnvironment(), productionFqdn = "dev.shopify.com";
|
|
3345
|
+
switch (environment) {
|
|
3346
|
+
case "local":
|
|
3347
|
+
return new DevServerCore().host("dev");
|
|
3348
|
+
default:
|
|
3349
|
+
return productionFqdn;
|
|
3350
|
+
}
|
|
3351
|
+
}
|
|
3352
|
+
async function businessPlatformFqdn() {
|
|
3353
|
+
let environment = serviceEnvironment(), productionFqdn = "destinations.shopifysvc.com";
|
|
3354
|
+
switch (environment) {
|
|
3355
|
+
case "local":
|
|
3356
|
+
return new DevServer("business-platform").host();
|
|
3357
|
+
default:
|
|
3358
|
+
return productionFqdn;
|
|
3359
|
+
}
|
|
3360
|
+
}
|
|
3361
|
+
async function identityFqdn() {
|
|
3362
|
+
let environment = serviceEnvironment(), productionFqdn = "accounts.shopify.com";
|
|
3363
|
+
switch (environment) {
|
|
3364
|
+
case "local":
|
|
3365
|
+
return new DevServer("identity").host();
|
|
3366
|
+
default:
|
|
3367
|
+
return productionFqdn;
|
|
3368
|
+
}
|
|
3369
|
+
}
|
|
3370
|
+
async function normalizeStoreFqdn(store2) {
|
|
3371
|
+
let storeFqdn = store2.replace(/^https?:\/\//, "").replace(/\/$/, ""), addDomain = async (storeFqdn2) => {
|
|
3372
|
+
switch (serviceEnvironment()) {
|
|
3373
|
+
case "local":
|
|
3374
|
+
return new DevServerCore().host(storeFqdn2);
|
|
3375
|
+
default:
|
|
3376
|
+
return `${storeFqdn2}.myshopify.com`;
|
|
3377
|
+
}
|
|
3378
|
+
};
|
|
3379
|
+
return ((storeFqdn2) => storeFqdn2.endsWith(".myshopify.com") || storeFqdn2.endsWith("shopify.io") || storeFqdn2.endsWith(".shop.dev"))(storeFqdn) ? storeFqdn : addDomain(storeFqdn);
|
|
3380
|
+
}
|
|
3381
|
+
|
|
3382
|
+
// ../cli-kit/dist/private/node/session/store.js
|
|
3383
|
+
async function store(sessions) {
|
|
3384
|
+
let jsonSessions = JSON.stringify(sessions);
|
|
3385
|
+
setSessions(jsonSessions);
|
|
3386
|
+
}
|
|
3387
|
+
async function fetch() {
|
|
3388
|
+
let content = getSessions();
|
|
3389
|
+
if (!content)
|
|
3390
|
+
return;
|
|
3391
|
+
let contentJson = JSON.parse(content), parsedSessions = await SessionsSchema.safeParseAsync(contentJson);
|
|
3392
|
+
if (parsedSessions.success)
|
|
3393
|
+
return parsedSessions.data;
|
|
3394
|
+
await remove();
|
|
3395
|
+
}
|
|
3396
|
+
async function remove() {
|
|
3397
|
+
removeSessions(), removeCurrentSessionId();
|
|
3398
|
+
}
|
|
3399
|
+
async function getSessionAlias(userId2) {
|
|
3400
|
+
let sessions = await fetch();
|
|
3401
|
+
if (!sessions)
|
|
3402
|
+
return;
|
|
3403
|
+
let fqdn = await identityFqdn();
|
|
3404
|
+
if (!(!sessions[fqdn] || !sessions[fqdn][userId2]))
|
|
3405
|
+
return sessions[fqdn][userId2].identity.alias;
|
|
3406
|
+
}
|
|
3407
|
+
async function findSessionByAlias(alias) {
|
|
3408
|
+
let sessions = await fetch();
|
|
3409
|
+
if (!sessions)
|
|
3410
|
+
return;
|
|
3411
|
+
let fqdn = await identityFqdn(), fqdnSessions = sessions[fqdn];
|
|
3412
|
+
if (fqdnSessions) {
|
|
3413
|
+
for (let [userId2, session] of Object.entries(fqdnSessions))
|
|
3414
|
+
if (session.identity.alias === alias)
|
|
3415
|
+
return userId2;
|
|
3416
|
+
}
|
|
3417
|
+
}
|
|
3418
|
+
|
|
3419
|
+
// ../cli-kit/dist/private/node/session/exchange.js
|
|
3420
|
+
init_cjs_shims();
|
|
3421
|
+
|
|
3422
|
+
// ../cli-kit/dist/private/node/session/identity.js
|
|
3423
|
+
init_cjs_shims();
|
|
3424
|
+
function clientId() {
|
|
3425
|
+
let environment = serviceEnvironment();
|
|
3426
|
+
return environment === Environment.Local ? "e5380e02-312a-7408-5718-e07017e9cf52" : environment === Environment.Production ? "fbdb2649-e327-4907-8f67-908d24cfd7e3" : "e5380e02-312a-7408-5718-e07017e9cf52";
|
|
3427
|
+
}
|
|
3428
|
+
function applicationId(api) {
|
|
3429
|
+
switch (api) {
|
|
3430
|
+
case "admin": {
|
|
3431
|
+
let environment = serviceEnvironment();
|
|
3432
|
+
return environment === Environment.Local ? "e92482cebb9bfb9fb5a0199cc770fde3de6c8d16b798ee73e36c9d815e070e52" : environment === Environment.Production ? "7ee65a63608843c577db8b23c4d7316ea0a01bd2f7594f8a9c06ea668c1b775c" : "e92482cebb9bfb9fb5a0199cc770fde3de6c8d16b798ee73e36c9d815e070e52";
|
|
3433
|
+
}
|
|
3434
|
+
case "partners": {
|
|
3435
|
+
let environment = serviceEnvironment();
|
|
3436
|
+
return environment === Environment.Local ? "df89d73339ac3c6c5f0a98d9ca93260763e384d51d6038da129889c308973978" : environment === Environment.Production ? "271e16d403dfa18082ffb3d197bd2b5f4479c3fc32736d69296829cbb28d41a6" : "df89d73339ac3c6c5f0a98d9ca93260763e384d51d6038da129889c308973978";
|
|
3437
|
+
}
|
|
3438
|
+
case "storefront-renderer": {
|
|
3439
|
+
let environment = serviceEnvironment();
|
|
3440
|
+
return environment === Environment.Local ? "46f603de-894f-488d-9471-5b721280ff49" : environment === Environment.Production ? "ee139b3d-5861-4d45-b387-1bc3ada7811c" : "46f603de-894f-488d-9471-5b721280ff49";
|
|
3441
|
+
}
|
|
3442
|
+
case "business-platform": {
|
|
3443
|
+
let environment = serviceEnvironment();
|
|
3444
|
+
return environment === Environment.Local ? "ace6dc89-b526-456d-a942-4b8ef6acda4b" : environment === Environment.Production ? "32ff8ee5-82b8-4d93-9f8a-c6997cefb7dc" : "ace6dc89-b526-456d-a942-4b8ef6acda4b";
|
|
3445
|
+
}
|
|
3446
|
+
case "app-management":
|
|
3447
|
+
return serviceEnvironment() === Environment.Production ? "7ee65a63608843c577db8b23c4d7316ea0a01bd2f7594f8a9c06ea668c1b775c" : "e92482cebb9bfb9fb5a0199cc770fde3de6c8d16b798ee73e36c9d815e070e52";
|
|
3448
|
+
default:
|
|
3449
|
+
throw new BugError(`Application id for API of type: ${api}`);
|
|
3450
|
+
}
|
|
3451
|
+
}
|
|
3452
|
+
|
|
3453
|
+
// ../cli-kit/dist/private/node/session/scopes.js
|
|
3454
|
+
init_cjs_shims();
|
|
3455
|
+
function allDefaultScopes(extraScopes = []) {
|
|
3456
|
+
let scopes = allAPIs.map((api) => defaultApiScopes(api)).flat();
|
|
3457
|
+
return scopes = ["openid", ...scopes, ...extraScopes].map(scopeTransform), Array.from(new Set(scopes));
|
|
3458
|
+
}
|
|
3459
|
+
function apiScopes(api, extraScopes = []) {
|
|
3460
|
+
let scopes = [...defaultApiScopes(api), ...extraScopes.map(scopeTransform)].map(scopeTransform);
|
|
3461
|
+
return Array.from(new Set(scopes));
|
|
3462
|
+
}
|
|
3463
|
+
function tokenExchangeScopes(api) {
|
|
3464
|
+
switch (api) {
|
|
3465
|
+
case "partners":
|
|
3466
|
+
return [scopeTransform("cli")];
|
|
3467
|
+
case "app-management":
|
|
3468
|
+
return [scopeTransform("app-management")];
|
|
3469
|
+
case "business-platform":
|
|
3470
|
+
return [scopeTransform("destinations")];
|
|
3471
|
+
default:
|
|
3472
|
+
throw new BugError(`API not supported for token exchange: ${api}`);
|
|
3473
|
+
}
|
|
3474
|
+
}
|
|
3475
|
+
function defaultApiScopes(api) {
|
|
3476
|
+
switch (api) {
|
|
3477
|
+
case "admin":
|
|
3478
|
+
return ["graphql", "themes", "collaborator"];
|
|
3479
|
+
case "storefront-renderer":
|
|
3480
|
+
return ["devtools"];
|
|
3481
|
+
case "partners":
|
|
3482
|
+
return ["cli"];
|
|
3483
|
+
case "business-platform":
|
|
3484
|
+
return ["destinations", "store-management", "on-demand-user-access"];
|
|
3485
|
+
case "app-management":
|
|
3486
|
+
return ["app-management"];
|
|
3487
|
+
default:
|
|
3488
|
+
throw new BugError(`Unknown API: ${api}`);
|
|
3489
|
+
}
|
|
3490
|
+
}
|
|
3491
|
+
function scopeTransform(scope) {
|
|
3492
|
+
switch (scope) {
|
|
3493
|
+
case "graphql":
|
|
3494
|
+
return "https://api.shopify.com/auth/shop.admin.graphql";
|
|
3495
|
+
case "themes":
|
|
3496
|
+
return "https://api.shopify.com/auth/shop.admin.themes";
|
|
3497
|
+
case "collaborator":
|
|
3498
|
+
return "https://api.shopify.com/auth/partners.collaborator-relationships.readonly";
|
|
3499
|
+
case "cli":
|
|
3500
|
+
return "https://api.shopify.com/auth/partners.app.cli.access";
|
|
3501
|
+
case "devtools":
|
|
3502
|
+
return "https://api.shopify.com/auth/shop.storefront-renderer.devtools";
|
|
3503
|
+
case "destinations":
|
|
3504
|
+
return "https://api.shopify.com/auth/destinations.readonly";
|
|
3505
|
+
case "store-management":
|
|
3506
|
+
return "https://api.shopify.com/auth/organization.store-management";
|
|
3507
|
+
case "on-demand-user-access":
|
|
3508
|
+
return "https://api.shopify.com/auth/organization.on-demand-user-access";
|
|
3509
|
+
case "app-management":
|
|
3510
|
+
return "https://api.shopify.com/auth/organization.apps.manage";
|
|
3511
|
+
default:
|
|
3512
|
+
return scope;
|
|
3513
|
+
}
|
|
3514
|
+
}
|
|
3515
|
+
|
|
3516
|
+
// ../cli-kit/dist/public/node/result.js
|
|
3517
|
+
init_cjs_shims();
|
|
3518
|
+
var ok = (value) => new Ok(value), err = (err2) => new Err(err2), Ok = class {
|
|
3519
|
+
constructor(value) {
|
|
3520
|
+
this.value = value;
|
|
3521
|
+
}
|
|
3522
|
+
/**
|
|
3523
|
+
* Check if a `Result` is an `Err` inferring its type. `!isErr()` should be used before accessing the `value`
|
|
3524
|
+
*
|
|
3525
|
+
* @returns `false` as the `Resul` is `OK`
|
|
3526
|
+
*/
|
|
3527
|
+
isErr() {
|
|
3528
|
+
return !1;
|
|
3529
|
+
}
|
|
3530
|
+
/**
|
|
3531
|
+
* Runs the `handler` method an return the same an unaltered copy of the `Result`. It could be used to log an
|
|
3532
|
+
* output when the result is `Ok` without breaking the flow
|
|
3533
|
+
*
|
|
3534
|
+
* @param handler - method to be run when the result is `Ok`
|
|
3535
|
+
* @returns a copy of the same `Result`
|
|
3536
|
+
*/
|
|
3537
|
+
doOnOk(handler) {
|
|
3538
|
+
return handler(this.value), ok(this.value);
|
|
3539
|
+
}
|
|
3540
|
+
/**
|
|
3541
|
+
* A safe mode to throw the `error` of the `Result`
|
|
3542
|
+
*/
|
|
3543
|
+
valueOrBug() {
|
|
3544
|
+
return this.value;
|
|
3545
|
+
}
|
|
3546
|
+
/**
|
|
3547
|
+
* Throws an abort error if the result doesn't represent a value.
|
|
3548
|
+
*/
|
|
3549
|
+
valueOrAbort() {
|
|
3550
|
+
return this.value;
|
|
3551
|
+
}
|
|
3552
|
+
/**
|
|
3553
|
+
* Maps the value to another one with a different type. It leaves the `Error` type unaltered
|
|
3554
|
+
*
|
|
3555
|
+
* @param mapper - The mapper method to apply an `OK` value
|
|
3556
|
+
* @returns a new result with the new mapped value
|
|
3557
|
+
*/
|
|
3558
|
+
map(mapper) {
|
|
3559
|
+
return ok(mapper(this.value));
|
|
3560
|
+
}
|
|
3561
|
+
/**
|
|
3562
|
+
* Maps the error type to another one. It leaves the `Ok` type and value unaltered
|
|
3563
|
+
*
|
|
3564
|
+
* @param _mapper - This mapper method is not used for an `Ok` value
|
|
3565
|
+
* @returns a new result with the new mapped error type and an value
|
|
3566
|
+
*/
|
|
3567
|
+
mapError(_mapper) {
|
|
3568
|
+
return ok(this.value);
|
|
3569
|
+
}
|
|
3570
|
+
}, Err = class {
|
|
3571
|
+
// eslint-disable-next-line node/handle-callback-err
|
|
3572
|
+
constructor(error) {
|
|
3573
|
+
this.error = error;
|
|
3574
|
+
}
|
|
3575
|
+
/**
|
|
3576
|
+
* Check if a `Result` is an `Err` inferring its type. `!isErr()` should be used before accessing the `value`
|
|
3577
|
+
*
|
|
3578
|
+
* @returns `false` as the `Resul` is `OK`
|
|
3579
|
+
*/
|
|
3580
|
+
isErr() {
|
|
3581
|
+
return !0;
|
|
3582
|
+
}
|
|
3583
|
+
/**
|
|
3584
|
+
* Return an unaltered copy of the `Error` without doing anything.
|
|
3585
|
+
*
|
|
3586
|
+
* @param _handler - This handler method is not used for an `Error`
|
|
3587
|
+
* @returns a copy of the same `Error`
|
|
3588
|
+
*/
|
|
3589
|
+
doOnOk(_handler) {
|
|
3590
|
+
return err(this.error);
|
|
3591
|
+
}
|
|
3592
|
+
/**
|
|
3593
|
+
* A safe mode to throw the `error` of the `Result`
|
|
3594
|
+
*/
|
|
3595
|
+
valueOrBug() {
|
|
3596
|
+
throw this.error;
|
|
3597
|
+
}
|
|
3598
|
+
/**
|
|
3599
|
+
* Throws an abort error if the result doesn't represent a value.
|
|
3600
|
+
*/
|
|
3601
|
+
valueOrAbort() {
|
|
3602
|
+
if (this.error instanceof FatalError)
|
|
3603
|
+
throw this.error;
|
|
3604
|
+
if (this.error instanceof import_ts_error.ExtendableError || this.error instanceof Error) {
|
|
3605
|
+
let error = new AbortError(this.error.message);
|
|
3606
|
+
throw error.stack = this.error.stack, error;
|
|
3607
|
+
} else
|
|
3608
|
+
throw new AbortError(`${this.error}`);
|
|
3609
|
+
}
|
|
3610
|
+
/**
|
|
3611
|
+
* Maps the value type to another one. It leaves the `Error` unaltered
|
|
3612
|
+
*
|
|
3613
|
+
* @param _mapper - This mapper method is not used for an `Error` value
|
|
3614
|
+
* @returns a new result with the new value type and an unaltered error
|
|
3615
|
+
*/
|
|
3616
|
+
map(_mapper) {
|
|
3617
|
+
return err(this.error);
|
|
3618
|
+
}
|
|
3619
|
+
/**
|
|
3620
|
+
* Maps the error to another one with a different type. It leaves the value type unaltered
|
|
3621
|
+
*
|
|
3622
|
+
* @param mapper - The mapper method to apply an `Error` value
|
|
3623
|
+
* @returns a new result with the new mapped error
|
|
3624
|
+
*/
|
|
3625
|
+
mapError(mapper) {
|
|
3626
|
+
return err(mapper(this.error));
|
|
3627
|
+
}
|
|
3628
|
+
};
|
|
3629
|
+
|
|
3630
|
+
// ../cli-kit/dist/private/node/session.js
|
|
3631
|
+
init_cjs_shims();
|
|
3632
|
+
|
|
3633
|
+
// ../cli-kit/dist/private/node/session/validate.js
|
|
3634
|
+
init_cjs_shims();
|
|
3635
|
+
function validateScopes(requestedScopes, identity) {
|
|
3636
|
+
let currentScopes = identity.scopes;
|
|
3637
|
+
return firstPartyDev() !== currentScopes.includes("employee") ? !1 : requestedScopes.every((scope) => currentScopes.includes(scope));
|
|
3638
|
+
}
|
|
3639
|
+
async function validateSession(scopes, applications, session) {
|
|
3640
|
+
if (!session || !validateScopes(scopes, session.identity))
|
|
3641
|
+
return "needs_full_auth";
|
|
3642
|
+
let tokensAreExpired = isTokenExpired(session.identity);
|
|
3643
|
+
if (applications.partnersApi) {
|
|
3644
|
+
let appId = applicationId("partners"), token = session.applications[appId];
|
|
3645
|
+
tokensAreExpired = tokensAreExpired || isTokenExpired(token);
|
|
3646
|
+
}
|
|
3647
|
+
if (applications.appManagementApi) {
|
|
3648
|
+
let appId = applicationId("app-management"), token = session.applications[appId];
|
|
3649
|
+
tokensAreExpired = tokensAreExpired || isTokenExpired(token);
|
|
3650
|
+
}
|
|
3651
|
+
if (applications.storefrontRendererApi) {
|
|
3652
|
+
let appId = applicationId("storefront-renderer"), token = session.applications[appId];
|
|
3653
|
+
tokensAreExpired = tokensAreExpired || isTokenExpired(token);
|
|
3654
|
+
}
|
|
3655
|
+
if (applications.adminApi) {
|
|
3656
|
+
let appId = applicationId("admin"), realAppId = `${applications.adminApi.storeFqdn}-${appId}`, token = session.applications[realAppId];
|
|
3657
|
+
tokensAreExpired = tokensAreExpired || isTokenExpired(token);
|
|
3658
|
+
}
|
|
3659
|
+
return outputDebug(`- Token validation -> It's expired: ${tokensAreExpired}`), validateCachedIdentityTokenStructure(session.identity) ? tokensAreExpired ? "needs_refresh" : "ok" : "needs_full_auth";
|
|
3660
|
+
}
|
|
3661
|
+
function isTokenExpired(token) {
|
|
3662
|
+
return token ? token.expiresAt < expireThreshold() : !0;
|
|
3663
|
+
}
|
|
3664
|
+
function expireThreshold() {
|
|
3665
|
+
return new Date(Date.now() + sessionConstants.expirationTimeMarginInMinutes * 60 * 1e3);
|
|
3666
|
+
}
|
|
3667
|
+
|
|
3668
|
+
// ../cli-kit/dist/private/node/session/device-authorization.js
|
|
3669
|
+
init_cjs_shims();
|
|
3670
|
+
async function requestDeviceAuthorization(scopes) {
|
|
3671
|
+
let fqdn = await identityFqdn(), queryParams = { client_id: clientId(), scope: scopes.join(" ") }, url3 = `https://${fqdn}/oauth/device_authorization`, response = await shopifyFetch(url3, {
|
|
3672
|
+
method: "POST",
|
|
3673
|
+
headers: { "Content-type": "application/x-www-form-urlencoded" },
|
|
3674
|
+
body: convertRequestToParams(queryParams)
|
|
3675
|
+
}), jsonResult;
|
|
3676
|
+
try {
|
|
3677
|
+
jsonResult = await response.json();
|
|
3678
|
+
} catch {
|
|
3679
|
+
throw new BugError("Received unexpected response from the authorization service. If this issue persists, please contact support at https://help.shopify.com");
|
|
3680
|
+
}
|
|
3681
|
+
if (outputDebug(outputContent`Received device authorization code: ${outputToken.json(jsonResult)}`), !jsonResult.device_code || !jsonResult.verification_uri_complete)
|
|
3682
|
+
throw new BugError("Failed to start authorization process");
|
|
3683
|
+
if (outputInfo(`
|
|
3684
|
+
To run this command, log in to Shopify.`), isCI())
|
|
3685
|
+
throw new AbortError("Authorization is required to continue, but the current environment does not support interactive prompts.", "To resolve this, specify credentials in your environment, or run the command in an interactive environment such as your local terminal.");
|
|
3686
|
+
outputInfo(outputContent`User verification code: ${jsonResult.user_code}`);
|
|
3687
|
+
let linkToken = outputToken.link(jsonResult.verification_uri_complete), cloudMessage = () => {
|
|
3688
|
+
outputInfo(outputContent`👉 Open this link to start the auth process: ${linkToken}`);
|
|
3689
|
+
};
|
|
3690
|
+
return isCloudEnvironment() || !isTTY() ? cloudMessage() : (outputInfo("\u{1F449} Press any key to open the login page on your browser"), await keypress(), await openURL(jsonResult.verification_uri_complete) ? outputInfo(outputContent`Opened link to start the auth process: ${linkToken}`) : cloudMessage()), {
|
|
3691
|
+
deviceCode: jsonResult.device_code,
|
|
3692
|
+
userCode: jsonResult.user_code,
|
|
3693
|
+
verificationUri: jsonResult.verification_uri,
|
|
3694
|
+
expiresIn: jsonResult.expires_in,
|
|
3695
|
+
verificationUriComplete: jsonResult.verification_uri_complete,
|
|
3696
|
+
interval: jsonResult.interval
|
|
3697
|
+
};
|
|
3698
|
+
}
|
|
3699
|
+
async function pollForDeviceAuthorization(code, interval = 5) {
|
|
3700
|
+
let currentIntervalInSeconds = interval;
|
|
3701
|
+
return new Promise((resolve, reject) => {
|
|
3702
|
+
let onPoll = async () => {
|
|
3703
|
+
let result = await exchangeDeviceCodeForAccessToken(code);
|
|
3704
|
+
if (!result.isErr()) {
|
|
3705
|
+
resolve(result.value);
|
|
3706
|
+
return;
|
|
3707
|
+
}
|
|
3708
|
+
let error = result.error ?? "unknown_failure";
|
|
3709
|
+
switch (outputDebug(outputContent`Polling for device authorization... status: ${error}`), error) {
|
|
3710
|
+
case "authorization_pending": {
|
|
3711
|
+
startPolling();
|
|
3712
|
+
return;
|
|
3713
|
+
}
|
|
3714
|
+
case "slow_down":
|
|
3715
|
+
currentIntervalInSeconds += 5;
|
|
3716
|
+
{
|
|
3717
|
+
startPolling();
|
|
3718
|
+
return;
|
|
3719
|
+
}
|
|
3720
|
+
case "access_denied":
|
|
3721
|
+
case "expired_token":
|
|
3722
|
+
case "unknown_failure":
|
|
3723
|
+
reject(result);
|
|
3724
|
+
}
|
|
3725
|
+
}, startPolling = () => {
|
|
3726
|
+
setTimeout(onPoll, currentIntervalInSeconds * 1e3);
|
|
3727
|
+
};
|
|
3728
|
+
startPolling();
|
|
3729
|
+
});
|
|
3730
|
+
}
|
|
3731
|
+
function convertRequestToParams(queryParams) {
|
|
3732
|
+
return Object.entries(queryParams).map(([key, value]) => value && `${key}=${value}`).filter((hasValue) => !!hasValue).join("&");
|
|
3733
|
+
}
|
|
3734
|
+
|
|
3735
|
+
// ../cli-kit/dist/private/node/api/rest.js
|
|
3736
|
+
init_cjs_shims();
|
|
3737
|
+
function isThemeAccessSession(session) {
|
|
3738
|
+
return session.token.startsWith("shptka_");
|
|
3739
|
+
}
|
|
3740
|
+
|
|
3741
|
+
// ../cli-kit/dist/private/node/api/graphql/business-platform-destinations/user-email.js
|
|
3742
|
+
init_cjs_shims();
|
|
3743
|
+
var UserEmailQueryString = `
|
|
3744
|
+
query UserEmail {
|
|
3745
|
+
currentUserAccount {
|
|
3746
|
+
email
|
|
3747
|
+
}
|
|
3748
|
+
}
|
|
3749
|
+
`;
|
|
3750
|
+
|
|
3751
|
+
// ../cli-kit/dist/public/common/object.js
|
|
3752
|
+
init_cjs_shims();
|
|
3753
|
+
|
|
3754
|
+
// ../cli-kit/dist/private/common/array.js
|
|
3755
|
+
init_cjs_shims();
|
|
3756
|
+
function unionArrayStrategy(destinationArray, sourceArray) {
|
|
3757
|
+
return Array.from(/* @__PURE__ */ new Set([...destinationArray, ...sourceArray]));
|
|
3758
|
+
}
|
|
3759
|
+
|
|
3760
|
+
// ../cli-kit/dist/public/common/object.js
|
|
3761
|
+
var import_deepmerge = __toESM(require_cjs(), 1), import_pickBy = __toESM(require_pickBy(), 1), import_mapValues = __toESM(require_mapValues(), 1), import_isEqual = __toESM(require_isEqual(), 1), import_differenceWith = __toESM(require_differenceWith(), 1), import_fromPairs = __toESM(require_fromPairs(), 1), import_toPairs = __toESM(require_toPairs(), 1), import_get = __toESM(require_get(), 1), import_set = __toESM(require_set(), 1), import_unset = __toESM(require_unset(), 1), import_isEmpty = __toESM(require_isEmpty(), 1);
|
|
3762
|
+
function deepMergeObjects(lhs, rhs, arrayMergeStrategy = unionArrayStrategy) {
|
|
3763
|
+
return (0, import_deepmerge.default)(lhs, rhs, { arrayMerge: arrayMergeStrategy });
|
|
3764
|
+
}
|
|
3765
|
+
function pickBy(object, predicate) {
|
|
3766
|
+
return (0, import_pickBy.default)(object, predicate);
|
|
3767
|
+
}
|
|
3768
|
+
function deepCompare(one, two) {
|
|
3769
|
+
return (0, import_isEqual.default)(one, two);
|
|
3770
|
+
}
|
|
3771
|
+
function deepDifference(one, two) {
|
|
3772
|
+
let changes = (0, import_differenceWith.default)((0, import_toPairs.default)(one), (0, import_toPairs.default)(two), deepCompare), changes2 = (0, import_differenceWith.default)((0, import_toPairs.default)(two), (0, import_toPairs.default)(one), deepCompare);
|
|
3773
|
+
return [(0, import_fromPairs.default)(changes), (0, import_fromPairs.default)(changes2)];
|
|
3774
|
+
}
|
|
3775
|
+
function getPathValue(object, path) {
|
|
3776
|
+
return (0, import_get.default)(object, path) === void 0 ? void 0 : (0, import_get.default)(object, path);
|
|
3777
|
+
}
|
|
3778
|
+
function setPathValue(object, path, value) {
|
|
3779
|
+
return (0, import_set.default)(object, path, value);
|
|
3780
|
+
}
|
|
3781
|
+
function isEmpty(object) {
|
|
3782
|
+
return (0, import_isEmpty.default)(object);
|
|
3783
|
+
}
|
|
3784
|
+
function compact(object) {
|
|
3785
|
+
return Object.fromEntries(Object.entries(object).filter(([_, value]) => value != null));
|
|
3786
|
+
}
|
|
3787
|
+
|
|
3788
|
+
// ../cli-kit/dist/public/node/api/business-platform.js
|
|
3789
|
+
init_cjs_shims();
|
|
3790
|
+
|
|
3791
|
+
// ../cli-kit/dist/public/node/api/graphql.js
|
|
3792
|
+
init_cjs_shims();
|
|
3793
|
+
|
|
3794
|
+
// ../cli-kit/dist/private/node/api/graphql.js
|
|
3795
|
+
init_cjs_shims();
|
|
3796
|
+
function debugLogRequestInfo(api, query, url3, variables, headers = {}) {
|
|
3797
|
+
outputDebug(outputContent`Sending ${outputToken.json(api)} GraphQL request:
|
|
3798
|
+
${outputToken.raw(query.toString().trim())}
|
|
3799
|
+
${variables ? `
|
|
3800
|
+
With variables:
|
|
3801
|
+
${sanitizeVariables(variables)}
|
|
3802
|
+
` : ""}
|
|
3803
|
+
With request headers:
|
|
3804
|
+
${sanitizedHeadersOutput(headers)}\n
|
|
3805
|
+
to ${sanitizeURL(url3)}`);
|
|
3806
|
+
}
|
|
3807
|
+
function sanitizeVariables(variables) {
|
|
3808
|
+
let result = { ...variables }, sanitizedResult = sanitizeDeepVariables(result, ["apiKey", "serialized_script"]);
|
|
3809
|
+
return JSON.stringify(sanitizedResult, null, 2);
|
|
3810
|
+
}
|
|
3811
|
+
function sanitizeDeepVariables(value, sensitiveKeys) {
|
|
3812
|
+
if (typeof value == "string")
|
|
3813
|
+
try {
|
|
3814
|
+
let parsed = JSON.parse(value);
|
|
3815
|
+
if (typeof parsed == "object" && parsed !== null) {
|
|
3816
|
+
let sanitized = sanitizeDeepVariables(parsed, sensitiveKeys);
|
|
3817
|
+
return JSON.stringify(sanitized, null);
|
|
3818
|
+
}
|
|
3819
|
+
} catch {
|
|
3820
|
+
return value;
|
|
3821
|
+
}
|
|
3822
|
+
if (typeof value != "object" || value === null)
|
|
3823
|
+
return value;
|
|
3824
|
+
if (Array.isArray(value))
|
|
3825
|
+
return value.map((item) => sanitizeDeepVariables(item, sensitiveKeys));
|
|
3826
|
+
let result = {};
|
|
3827
|
+
for (let [key, val] of Object.entries(value)) {
|
|
3828
|
+
if (sensitiveKeys.includes(key) && typeof val == "string") {
|
|
3829
|
+
result[key] = "*****";
|
|
3830
|
+
continue;
|
|
3831
|
+
}
|
|
3832
|
+
result[key] = sanitizeDeepVariables(val, sensitiveKeys);
|
|
3833
|
+
}
|
|
3834
|
+
return result;
|
|
3835
|
+
}
|
|
3836
|
+
function errorHandler(api) {
|
|
3837
|
+
return (error, requestId) => {
|
|
3838
|
+
if (error instanceof ClientError) {
|
|
3839
|
+
let { status } = error.response, errorMessage = stringifyMessage(outputContent`
|
|
3840
|
+
The ${outputToken.raw(api)} GraphQL API responded unsuccessfully with${status === 200 ? "" : ` the HTTP status ${status} and`} errors:
|
|
3841
|
+
|
|
3842
|
+
${outputToken.json(error.response.errors)}
|
|
3843
|
+
`);
|
|
3844
|
+
requestId && (errorMessage += `
|
|
3845
|
+
Request ID: ${requestId}
|
|
3846
|
+
`);
|
|
3847
|
+
let mappedError;
|
|
3848
|
+
return status < 500 ? mappedError = new GraphQLClientError(errorMessage, status, error.response.errors) : mappedError = new AbortError(errorMessage), mappedError.stack = error.stack, mappedError;
|
|
3849
|
+
} else
|
|
3850
|
+
return error;
|
|
3851
|
+
};
|
|
3852
|
+
}
|
|
3853
|
+
|
|
3854
|
+
// ../cli-kit/dist/private/node/request-ids.js
|
|
3855
|
+
init_cjs_shims();
|
|
3856
|
+
var RequestIDCollection = class _RequestIDCollection {
|
|
3857
|
+
constructor() {
|
|
3858
|
+
this.requestIds = [];
|
|
3859
|
+
}
|
|
3860
|
+
static getInstance() {
|
|
3861
|
+
return _RequestIDCollection.instance || (_RequestIDCollection.instance = new _RequestIDCollection()), _RequestIDCollection.instance;
|
|
3862
|
+
}
|
|
3863
|
+
/**
|
|
3864
|
+
* Add a request ID to the collection
|
|
3865
|
+
* We only report the first MAX_REQUEST_IDS request IDs.
|
|
3866
|
+
*/
|
|
3867
|
+
addRequestId(requestId) {
|
|
3868
|
+
requestId && this.requestIds.length < 100 && this.requestIds.push(requestId);
|
|
3869
|
+
}
|
|
3870
|
+
/**
|
|
3871
|
+
* Get all collected request IDs
|
|
3872
|
+
*/
|
|
3873
|
+
getRequestIds() {
|
|
3874
|
+
return this.requestIds;
|
|
3875
|
+
}
|
|
3876
|
+
/**
|
|
3877
|
+
* Clear all stored request IDs
|
|
3878
|
+
*/
|
|
3879
|
+
clear() {
|
|
3880
|
+
this.requestIds = [];
|
|
3881
|
+
}
|
|
3882
|
+
}, requestIdsCollection = RequestIDCollection.getInstance();
|
|
3883
|
+
|
|
3884
|
+
// ../cli-kit/dist/public/node/api/graphql.js
|
|
3885
|
+
async function createGraphQLClient({ url: url3, addedHeaders, token }) {
|
|
3886
|
+
let headers = {
|
|
3887
|
+
...addedHeaders,
|
|
3888
|
+
...buildHeaders(token)
|
|
3889
|
+
}, clientOptions = { agent: await httpsAgent(), headers };
|
|
3890
|
+
return {
|
|
3891
|
+
client: new GraphQLClient(url3, clientOptions),
|
|
3892
|
+
headers
|
|
3893
|
+
};
|
|
3894
|
+
}
|
|
3895
|
+
async function performGraphQLRequest(options2) {
|
|
3896
|
+
let { token, addedHeaders, queryAsString, variables, api, url: url3, responseOptions, unauthorizedHandler, cacheOptions } = options2, behaviour = requestMode(options2.preferredBehaviour ?? "default"), { headers, client } = await createGraphQLClient({ url: url3, addedHeaders, token });
|
|
3897
|
+
debugLogRequestInfo(api, queryAsString, url3, variables, headers);
|
|
3898
|
+
let rawGraphQLRequest = async () => {
|
|
3899
|
+
let fullResponse;
|
|
3900
|
+
try {
|
|
3901
|
+
return client.requestConfig.signal = abortSignalFromRequestBehaviour(behaviour), fullResponse = await client.rawRequest(queryAsString, variables), await logLastRequestIdFromResponse(fullResponse), fullResponse;
|
|
3902
|
+
} catch (error) {
|
|
3903
|
+
throw error instanceof ClientError && await logLastRequestIdFromResponse(error.response), error;
|
|
3904
|
+
}
|
|
3905
|
+
}, tokenRefreshHandler = unauthorizedHandler?.handler, tokenRefreshUnauthorizedHandlerFunction = tokenRefreshHandler ? async () => {
|
|
3906
|
+
let refreshTokenResult = await tokenRefreshHandler();
|
|
3907
|
+
if (refreshTokenResult.token) {
|
|
3908
|
+
let { client: newClient, headers: newHeaders } = await createGraphQLClient({
|
|
3909
|
+
url: url3,
|
|
3910
|
+
addedHeaders,
|
|
3911
|
+
token: refreshTokenResult.token
|
|
3912
|
+
});
|
|
3913
|
+
return client = newClient, headers = newHeaders, !0;
|
|
3914
|
+
} else
|
|
3915
|
+
return !1;
|
|
3916
|
+
} : void 0, request = () => retryAwareRequest({ request: rawGraphQLRequest, url: url3, ...behaviour }, responseOptions?.handleErrors === !1 ? void 0 : errorHandler(api)), executeWithTimer = () => runWithTimer("cmd_all_timing_network_ms")(async () => {
|
|
3917
|
+
let response;
|
|
3918
|
+
try {
|
|
3919
|
+
response = await request();
|
|
3920
|
+
} catch (error) {
|
|
3921
|
+
if (error instanceof ClientError && error.response.status === 401 && tokenRefreshUnauthorizedHandlerFunction)
|
|
3922
|
+
if (await tokenRefreshUnauthorizedHandlerFunction())
|
|
3923
|
+
response = await request();
|
|
3924
|
+
else
|
|
3925
|
+
throw error;
|
|
3926
|
+
else
|
|
3927
|
+
throw error;
|
|
3928
|
+
}
|
|
3929
|
+
return responseOptions?.onResponse && responseOptions.onResponse(response), response.data;
|
|
3930
|
+
});
|
|
3931
|
+
if (cacheOptions === void 0)
|
|
3932
|
+
return executeWithTimer();
|
|
3933
|
+
let { cacheTTL, cacheExtraKey, cacheStore } = cacheOptions, queryHash = nonRandomUUID(queryAsString), variablesHash = nonRandomUUID(JSON.stringify(variables ?? {})), cacheKey = `q-${queryHash}-${variablesHash}-${CLI_KIT_VERSION}-${cacheExtraKey ?? ""}`, result = await cacheRetrieveOrRepopulate(cacheKey, async () => {
|
|
3934
|
+
let result2 = await executeWithTimer();
|
|
3935
|
+
return JSON.stringify(result2);
|
|
3936
|
+
}, timeIntervalToMilliseconds(cacheTTL), cacheStore);
|
|
3937
|
+
return JSON.parse(result);
|
|
3938
|
+
}
|
|
3939
|
+
async function logLastRequestIdFromResponse(response) {
|
|
3940
|
+
try {
|
|
3941
|
+
let requestId = response.headers.get("x-request-id");
|
|
3942
|
+
requestIdsCollection.addRequestId(requestId), await addPublicMetadata(() => ({
|
|
3943
|
+
cmd_all_last_graphql_request_id: requestId ?? void 0
|
|
3944
|
+
}));
|
|
3945
|
+
} catch {
|
|
3946
|
+
}
|
|
3947
|
+
}
|
|
3948
|
+
async function graphqlRequest(options2) {
|
|
3949
|
+
return performGraphQLRequest({
|
|
3950
|
+
...options2,
|
|
3951
|
+
queryAsString: options2.query
|
|
3952
|
+
});
|
|
3953
|
+
}
|
|
3954
|
+
async function graphqlRequestDoc(options2) {
|
|
3955
|
+
return performGraphQLRequest({
|
|
3956
|
+
...options2,
|
|
3957
|
+
queryAsString: resolveRequestDocument(options2.query).query
|
|
3958
|
+
});
|
|
3959
|
+
}
|
|
3960
|
+
|
|
3961
|
+
// ../cli-kit/dist/public/node/api/partners.js
|
|
3962
|
+
init_cjs_shims();
|
|
3963
|
+
|
|
3964
|
+
// ../cli-kit/dist/public/node/api/utilities.js
|
|
3965
|
+
init_cjs_shims();
|
|
3966
|
+
var addCursorAndFiltersToAppLogsUrl = (baseUrl, cursor, filters) => {
|
|
3967
|
+
let url3 = new URL(baseUrl);
|
|
3968
|
+
return cursor && url3.searchParams.append("cursor", cursor), filters?.status && url3.searchParams.append("status", filters.status), filters?.source && url3.searchParams.append("source", filters.source), url3.toString();
|
|
3969
|
+
};
|
|
3970
|
+
|
|
3971
|
+
// ../cli-kit/dist/private/node/context/deprecations-store.js
|
|
3972
|
+
init_cjs_shims();
|
|
3973
|
+
var globalWithDeprecationsStore = {
|
|
3974
|
+
...globalThis,
|
|
3975
|
+
deprecationsStore: {
|
|
3976
|
+
nextDeprecationDate: void 0
|
|
3977
|
+
}
|
|
3978
|
+
};
|
|
3979
|
+
function getNextDeprecationDate() {
|
|
3980
|
+
return globalWithDeprecationsStore.deprecationsStore.nextDeprecationDate;
|
|
3981
|
+
}
|
|
3982
|
+
function setNextDeprecationDate(dates) {
|
|
3983
|
+
if (dates.length < 1)
|
|
3984
|
+
return;
|
|
3985
|
+
let earliestFutureDateTime = earliestDateTimeAfter(Date.now(), dates);
|
|
3986
|
+
if (!earliestFutureDateTime)
|
|
3987
|
+
return;
|
|
3988
|
+
let nextDeprecationDate = getNextDeprecationDate();
|
|
3989
|
+
(!nextDeprecationDate || earliestFutureDateTime < nextDeprecationDate.getTime()) && (globalWithDeprecationsStore.deprecationsStore.nextDeprecationDate = new Date(earliestFutureDateTime));
|
|
3990
|
+
}
|
|
3991
|
+
function earliestDateTimeAfter(afterTime, dates) {
|
|
3992
|
+
return dates.map((date) => date.getTime()).sort().find((time) => time > afterTime);
|
|
3993
|
+
}
|
|
3994
|
+
|
|
3995
|
+
// ../cli-kit/dist/public/node/api/partners.js
|
|
3996
|
+
var import_bottleneck = __toESM(require_lib(), 1), limiter = new import_bottleneck.default({
|
|
3997
|
+
minTime: 150,
|
|
3998
|
+
maxConcurrent: 10
|
|
3999
|
+
});
|
|
4000
|
+
async function setupRequest(token) {
|
|
4001
|
+
let api = "Partners", url3 = `https://${await partnersFqdn()}/api/cli/graphql`;
|
|
4002
|
+
return {
|
|
4003
|
+
token,
|
|
4004
|
+
api,
|
|
4005
|
+
url: url3,
|
|
4006
|
+
responseOptions: { onResponse: handleDeprecations }
|
|
4007
|
+
};
|
|
4008
|
+
}
|
|
4009
|
+
async function partnersRequest(query, token, variables, cacheOptions, preferredBehaviour, unauthorizedHandler) {
|
|
4010
|
+
let opts = await setupRequest(token);
|
|
4011
|
+
return limiter.schedule(() => graphqlRequest({
|
|
4012
|
+
...opts,
|
|
4013
|
+
query,
|
|
4014
|
+
variables,
|
|
4015
|
+
cacheOptions,
|
|
4016
|
+
preferredBehaviour,
|
|
4017
|
+
unauthorizedHandler
|
|
4018
|
+
}));
|
|
4019
|
+
}
|
|
4020
|
+
var generateFetchAppLogUrl = async (cursor, filters) => {
|
|
4021
|
+
let url3 = `https://${await partnersFqdn()}/app_logs/poll`;
|
|
4022
|
+
return addCursorAndFiltersToAppLogsUrl(url3, cursor, filters);
|
|
4023
|
+
};
|
|
4024
|
+
async function partnersRequestDoc(query, token, variables, preferredBehaviour, unauthorizedHandler) {
|
|
4025
|
+
try {
|
|
4026
|
+
let opts = await setupRequest(token);
|
|
4027
|
+
return limiter.schedule(() => graphqlRequestDoc({
|
|
4028
|
+
...opts,
|
|
4029
|
+
query,
|
|
4030
|
+
variables,
|
|
4031
|
+
preferredBehaviour,
|
|
4032
|
+
unauthorizedHandler
|
|
4033
|
+
}));
|
|
4034
|
+
} catch (error) {
|
|
4035
|
+
if (error.errors?.[0]?.extensions?.type === "unsupported_client_version") {
|
|
4036
|
+
let packageManager = await getPackageManager(cwd());
|
|
4037
|
+
throw new AbortError(["Upgrade your CLI version to run this command."], null, [
|
|
4038
|
+
["Run", { command: formatPackageManagerCommand(packageManager, "shopify upgrade") }]
|
|
4039
|
+
]);
|
|
4040
|
+
}
|
|
4041
|
+
throw error;
|
|
4042
|
+
}
|
|
4043
|
+
}
|
|
4044
|
+
function handleDeprecations(response) {
|
|
4045
|
+
if (!response.extensions)
|
|
4046
|
+
return;
|
|
4047
|
+
let deprecationDates = [];
|
|
4048
|
+
for (let deprecation of response.extensions.deprecations)
|
|
4049
|
+
deprecation.supportedUntilDate && deprecationDates.push(new Date(deprecation.supportedUntilDate));
|
|
4050
|
+
setNextDeprecationDate(deprecationDates);
|
|
4051
|
+
}
|
|
4052
|
+
|
|
4053
|
+
// ../cli-kit/dist/public/node/api/business-platform.js
|
|
4054
|
+
async function setupRequest2(token) {
|
|
4055
|
+
let api = "BusinessPlatform", url3 = `https://${await businessPlatformFqdn()}/destinations/api/2020-07/graphql`;
|
|
4056
|
+
return {
|
|
4057
|
+
token,
|
|
4058
|
+
api,
|
|
4059
|
+
url: url3,
|
|
4060
|
+
responseOptions: { onResponse: handleDeprecations }
|
|
4061
|
+
};
|
|
4062
|
+
}
|
|
4063
|
+
async function businessPlatformRequest(query, token, variables, cacheOptions) {
|
|
4064
|
+
return graphqlRequest({
|
|
4065
|
+
...await setupRequest2(token),
|
|
4066
|
+
query,
|
|
4067
|
+
variables,
|
|
4068
|
+
cacheOptions
|
|
4069
|
+
});
|
|
4070
|
+
}
|
|
4071
|
+
async function businessPlatformRequestDoc(options2) {
|
|
4072
|
+
return graphqlRequestDoc({
|
|
4073
|
+
...await setupRequest2(options2.token),
|
|
4074
|
+
query: options2.query,
|
|
4075
|
+
variables: options2.variables,
|
|
4076
|
+
cacheOptions: options2.cacheOptions,
|
|
4077
|
+
unauthorizedHandler: options2.unauthorizedHandler
|
|
4078
|
+
});
|
|
4079
|
+
}
|
|
4080
|
+
async function setupOrganizationsRequest(token, organizationId) {
|
|
4081
|
+
let api = "BusinessPlatform", url3 = `https://${await businessPlatformFqdn()}/organizations/api/unstable/organization/${organizationId}/graphql`;
|
|
4082
|
+
return {
|
|
4083
|
+
token,
|
|
4084
|
+
api,
|
|
4085
|
+
url: url3,
|
|
4086
|
+
responseOptions: { onResponse: handleDeprecations }
|
|
4087
|
+
};
|
|
4088
|
+
}
|
|
4089
|
+
async function businessPlatformOrganizationsRequest(options2) {
|
|
4090
|
+
return graphqlRequest({
|
|
4091
|
+
query: options2.query,
|
|
4092
|
+
...await setupOrganizationsRequest(options2.token, options2.organizationId),
|
|
4093
|
+
variables: options2.variables,
|
|
4094
|
+
unauthorizedHandler: options2.unauthorizedHandler
|
|
4095
|
+
});
|
|
4096
|
+
}
|
|
4097
|
+
async function businessPlatformOrganizationsRequestDoc(options2) {
|
|
4098
|
+
return graphqlRequestDoc({
|
|
4099
|
+
query: options2.query,
|
|
4100
|
+
...await setupOrganizationsRequest(options2.token, options2.organizationId),
|
|
4101
|
+
variables: options2.variables,
|
|
4102
|
+
unauthorizedHandler: options2.unauthorizedHandler
|
|
4103
|
+
});
|
|
4104
|
+
}
|
|
4105
|
+
|
|
4106
|
+
// ../cli-kit/dist/private/node/session.js
|
|
4107
|
+
async function fetchEmail(businessPlatformToken) {
|
|
4108
|
+
if (businessPlatformToken)
|
|
4109
|
+
try {
|
|
4110
|
+
return (await businessPlatformRequest(UserEmailQueryString, businessPlatformToken)).currentUserAccount?.email;
|
|
4111
|
+
} catch (error) {
|
|
4112
|
+
outputDebug(outputContent`Failed to fetch user email: ${error.message ?? String(error)}`);
|
|
4113
|
+
return;
|
|
4114
|
+
}
|
|
4115
|
+
}
|
|
4116
|
+
var userId, authMethod = "none";
|
|
4117
|
+
async function getLastSeenUserIdAfterAuth() {
|
|
4118
|
+
if (userId)
|
|
4119
|
+
return userId;
|
|
4120
|
+
let currentSessionId = getCurrentSessionId();
|
|
4121
|
+
if (currentSessionId)
|
|
4122
|
+
return currentSessionId;
|
|
4123
|
+
let customToken = getPartnersToken() ?? themeToken();
|
|
4124
|
+
return customToken ? nonRandomUUID(customToken) : "unknown";
|
|
4125
|
+
}
|
|
4126
|
+
function setLastSeenUserIdAfterAuth(id) {
|
|
4127
|
+
userId = id;
|
|
4128
|
+
}
|
|
4129
|
+
async function getLastSeenAuthMethod() {
|
|
4130
|
+
if (authMethod !== "none")
|
|
4131
|
+
return authMethod;
|
|
4132
|
+
if (getCurrentSessionId())
|
|
4133
|
+
return "device_auth";
|
|
4134
|
+
if (getPartnersToken())
|
|
4135
|
+
return "partners_token";
|
|
4136
|
+
let themePassword = themeToken();
|
|
4137
|
+
return themePassword ? isThemeAccessSession({ token: themePassword, storeFqdn: "" }) ? "theme_access_token" : "custom_app_token" : "none";
|
|
4138
|
+
}
|
|
4139
|
+
function setLastSeenAuthMethod(method) {
|
|
4140
|
+
authMethod = method;
|
|
4141
|
+
}
|
|
4142
|
+
async function ensureAuthenticated(applications, _env, { forceRefresh = !1, noPrompt = !1, forceNewSession = !1 } = {}) {
|
|
4143
|
+
let fqdn = await identityFqdn(), previousStoreFqdn = applications.adminApi?.storeFqdn;
|
|
4144
|
+
if (previousStoreFqdn) {
|
|
4145
|
+
let normalizedStoreName = await normalizeStoreFqdn(previousStoreFqdn);
|
|
4146
|
+
previousStoreFqdn === applications.adminApi?.storeFqdn && (applications.adminApi.storeFqdn = normalizedStoreName);
|
|
4147
|
+
}
|
|
4148
|
+
let sessions = await fetch() ?? {}, currentSessionId = getCurrentSessionId();
|
|
4149
|
+
if (!currentSessionId) {
|
|
4150
|
+
let userIds = Object.keys(sessions[fqdn] ?? {});
|
|
4151
|
+
userIds.length > 0 && (currentSessionId = userIds[0]);
|
|
4152
|
+
}
|
|
4153
|
+
let currentSession = currentSessionId && !forceNewSession ? sessions[fqdn]?.[currentSessionId] : void 0, scopes = getFlattenScopes(applications);
|
|
4154
|
+
outputDebug(outputContent`Validating existing session against the scopes:
|
|
4155
|
+
${outputToken.json(scopes)}
|
|
4156
|
+
For applications:
|
|
4157
|
+
${outputToken.json(applications)}
|
|
4158
|
+
`);
|
|
4159
|
+
let validationResult = await validateSession(scopes, applications, currentSession), newSession = {};
|
|
4160
|
+
if (validationResult === "needs_full_auth")
|
|
4161
|
+
await throwOnNoPrompt(noPrompt), outputDebug(outputContent`Initiating the full authentication flow...`), newSession = await executeCompleteFlow(applications);
|
|
4162
|
+
else if (validationResult === "needs_refresh" || forceRefresh) {
|
|
4163
|
+
outputDebug(outputContent`The current session is valid but needs refresh. Refreshing...`);
|
|
4164
|
+
try {
|
|
4165
|
+
newSession = await refreshTokens(currentSession, applications);
|
|
4166
|
+
} catch (error) {
|
|
4167
|
+
if (error instanceof InvalidGrantError)
|
|
4168
|
+
await throwOnNoPrompt(noPrompt), newSession = await executeCompleteFlow(applications);
|
|
4169
|
+
else throw error instanceof InvalidRequestError ? (await remove(), new AbortError(`
|
|
4170
|
+
Error validating auth session`, "We've cleared the current session, please try again")) : error;
|
|
4171
|
+
}
|
|
4172
|
+
}
|
|
4173
|
+
let completeSession = { ...currentSession, ...newSession }, newSessionId = completeSession.identity.userId, updatedSessions = {
|
|
4174
|
+
...sessions,
|
|
4175
|
+
[fqdn]: { ...sessions[fqdn], [newSessionId]: completeSession }
|
|
4176
|
+
};
|
|
4177
|
+
isEmpty(newSession) || (await store(updatedSessions), setCurrentSessionId(newSessionId));
|
|
4178
|
+
let tokens = await tokensFor(applications, completeSession), envToken = getPartnersToken();
|
|
4179
|
+
return envToken && applications.partnersApi && (tokens.partners = (await exchangeCustomPartnerToken(envToken)).accessToken), setLastSeenAuthMethod(envToken ? "partners_token" : "device_auth"), setLastSeenUserIdAfterAuth(tokens.userId), tokens;
|
|
4180
|
+
}
|
|
4181
|
+
async function throwOnNoPrompt(noPrompt) {
|
|
4182
|
+
if (noPrompt)
|
|
4183
|
+
throw await logout(), new AbortError(`The currently available CLI credentials are invalid.
|
|
4184
|
+
|
|
4185
|
+
The CLI is currently unable to prompt for reauthentication.`, "Restart the CLI process you were running. If in an interactive terminal, you will be prompted to reauthenticate. If in a non-interactive terminal, ensure the correct credentials are available in the program environment.");
|
|
4186
|
+
}
|
|
4187
|
+
async function executeCompleteFlow(applications) {
|
|
4188
|
+
let scopes = getFlattenScopes(applications), exchangeScopes = getExchangeScopes(applications), store2 = applications.adminApi?.storeFqdn;
|
|
4189
|
+
firstPartyDev() && (outputDebug(outputContent`Authenticating as Shopify Employee...`), scopes.push("employee"));
|
|
4190
|
+
let identityToken, identityTokenInformation = getIdentityTokenInformation();
|
|
4191
|
+
if (identityTokenInformation)
|
|
4192
|
+
identityToken = buildIdentityTokenFromEnv(scopes, identityTokenInformation);
|
|
4193
|
+
else {
|
|
4194
|
+
outputDebug(outputContent`Requesting device authorization code...`);
|
|
4195
|
+
let deviceAuth = await requestDeviceAuthorization(scopes);
|
|
4196
|
+
outputDebug(outputContent`Starting polling for the identity token...`), identityToken = await pollForDeviceAuthorization(deviceAuth.deviceCode, deviceAuth.interval);
|
|
4197
|
+
}
|
|
4198
|
+
outputDebug(outputContent`CLI token received. Exchanging it for application tokens...`);
|
|
4199
|
+
let result = await exchangeAccessForApplicationTokens(identityToken, exchangeScopes, store2), businessPlatformToken = result[applicationId("business-platform")]?.accessToken, alias = await fetchEmail(businessPlatformToken) ?? identityToken.userId, session = {
|
|
4200
|
+
identity: {
|
|
4201
|
+
...identityToken,
|
|
4202
|
+
alias
|
|
4203
|
+
},
|
|
4204
|
+
applications: result
|
|
4205
|
+
};
|
|
4206
|
+
return outputCompleted("Logged in."), session;
|
|
4207
|
+
}
|
|
4208
|
+
async function refreshTokens(session, applications) {
|
|
4209
|
+
let identityToken = await refreshAccessToken(session.identity), exchangeScopes = getExchangeScopes(applications), applicationTokens = await exchangeAccessForApplicationTokens(identityToken, exchangeScopes, applications.adminApi?.storeFqdn);
|
|
4210
|
+
return {
|
|
4211
|
+
identity: identityToken,
|
|
4212
|
+
applications: applicationTokens
|
|
4213
|
+
};
|
|
4214
|
+
}
|
|
4215
|
+
async function tokensFor(applications, session) {
|
|
4216
|
+
let tokens = {
|
|
4217
|
+
userId: session.identity.userId
|
|
4218
|
+
};
|
|
4219
|
+
if (applications.adminApi) {
|
|
4220
|
+
let appId = applicationId("admin"), realAppId = `${applications.adminApi.storeFqdn}-${appId}`, token = session.applications[realAppId]?.accessToken;
|
|
4221
|
+
token && (tokens.admin = { token, storeFqdn: applications.adminApi.storeFqdn });
|
|
4222
|
+
}
|
|
4223
|
+
if (applications.partnersApi) {
|
|
4224
|
+
let appId = applicationId("partners");
|
|
4225
|
+
tokens.partners = session.applications[appId]?.accessToken;
|
|
4226
|
+
}
|
|
4227
|
+
if (applications.storefrontRendererApi) {
|
|
4228
|
+
let appId = applicationId("storefront-renderer");
|
|
4229
|
+
tokens.storefront = session.applications[appId]?.accessToken;
|
|
4230
|
+
}
|
|
4231
|
+
if (applications.businessPlatformApi) {
|
|
4232
|
+
let appId = applicationId("business-platform");
|
|
4233
|
+
tokens.businessPlatform = session.applications[appId]?.accessToken;
|
|
4234
|
+
}
|
|
4235
|
+
if (applications.appManagementApi) {
|
|
4236
|
+
let appId = applicationId("app-management");
|
|
4237
|
+
tokens.appManagement = session.applications[appId]?.accessToken;
|
|
4238
|
+
}
|
|
4239
|
+
return tokens;
|
|
4240
|
+
}
|
|
4241
|
+
function getFlattenScopes(apps) {
|
|
4242
|
+
let admin = apps.adminApi?.scopes ?? [], partner = apps.partnersApi?.scopes ?? [], storefront = apps.storefrontRendererApi?.scopes ?? [], businessPlatform = apps.businessPlatformApi?.scopes ?? [], appManagement = apps.appManagementApi?.scopes ?? [], requestedScopes = [...admin, ...partner, ...storefront, ...businessPlatform, ...appManagement];
|
|
4243
|
+
return allDefaultScopes(requestedScopes);
|
|
4244
|
+
}
|
|
4245
|
+
function getExchangeScopes(apps) {
|
|
4246
|
+
let adminScope = apps.adminApi?.scopes ?? [], partnerScope = apps.partnersApi?.scopes ?? [], storefrontScopes = apps.storefrontRendererApi?.scopes ?? [], businessPlatformScopes = apps.businessPlatformApi?.scopes ?? [], appManagementScopes = apps.appManagementApi?.scopes ?? [];
|
|
4247
|
+
return {
|
|
4248
|
+
admin: apiScopes("admin", adminScope),
|
|
4249
|
+
partners: apiScopes("partners", partnerScope),
|
|
4250
|
+
storefront: apiScopes("storefront-renderer", storefrontScopes),
|
|
4251
|
+
businessPlatform: apiScopes("business-platform", businessPlatformScopes),
|
|
4252
|
+
appManagement: apiScopes("app-management", appManagementScopes)
|
|
4253
|
+
};
|
|
4254
|
+
}
|
|
4255
|
+
function buildIdentityTokenFromEnv(scopes, identityTokenInformation) {
|
|
4256
|
+
return {
|
|
4257
|
+
...identityTokenInformation,
|
|
4258
|
+
expiresAt: new Date(Date.now() + 30 * 24 * 60 * 60 * 1e3),
|
|
4259
|
+
scopes,
|
|
4260
|
+
alias: identityTokenInformation.userId
|
|
4261
|
+
};
|
|
4262
|
+
}
|
|
4263
|
+
|
|
4264
|
+
// ../../node_modules/.pnpm/jose@5.9.6/node_modules/jose/dist/node/esm/index.js
|
|
4265
|
+
init_cjs_shims();
|
|
4266
|
+
|
|
4267
|
+
// ../../node_modules/.pnpm/jose@5.9.6/node_modules/jose/dist/node/esm/runtime/base64url.js
|
|
4268
|
+
init_cjs_shims();
|
|
4269
|
+
import { Buffer } from "node:buffer";
|
|
4270
|
+
|
|
4271
|
+
// ../../node_modules/.pnpm/jose@5.9.6/node_modules/jose/dist/node/esm/lib/buffer_utils.js
|
|
4272
|
+
init_cjs_shims();
|
|
4273
|
+
var encoder = new TextEncoder(), decoder = new TextDecoder(), MAX_INT32 = 2 ** 32;
|
|
4274
|
+
|
|
4275
|
+
// ../../node_modules/.pnpm/jose@5.9.6/node_modules/jose/dist/node/esm/runtime/base64url.js
|
|
4276
|
+
function normalize(input) {
|
|
4277
|
+
let encoded = input;
|
|
4278
|
+
return encoded instanceof Uint8Array && (encoded = decoder.decode(encoded)), encoded;
|
|
4279
|
+
}
|
|
4280
|
+
var decode = (input) => new Uint8Array(Buffer.from(normalize(input), "base64url"));
|
|
4281
|
+
|
|
4282
|
+
// ../../node_modules/.pnpm/jose@5.9.6/node_modules/jose/dist/node/esm/util/errors.js
|
|
4283
|
+
init_cjs_shims();
|
|
4284
|
+
var JOSEError = class extends Error {
|
|
4285
|
+
static code = "ERR_JOSE_GENERIC";
|
|
4286
|
+
code = "ERR_JOSE_GENERIC";
|
|
4287
|
+
constructor(message, options2) {
|
|
4288
|
+
super(message, options2), this.name = this.constructor.name, Error.captureStackTrace?.(this, this.constructor);
|
|
4289
|
+
}
|
|
4290
|
+
};
|
|
4291
|
+
var JWTInvalid = class extends JOSEError {
|
|
4292
|
+
static code = "ERR_JWT_INVALID";
|
|
4293
|
+
code = "ERR_JWT_INVALID";
|
|
4294
|
+
};
|
|
4295
|
+
|
|
4296
|
+
// ../../node_modules/.pnpm/jose@5.9.6/node_modules/jose/dist/node/esm/lib/is_object.js
|
|
4297
|
+
init_cjs_shims();
|
|
4298
|
+
function isObjectLike(value) {
|
|
4299
|
+
return typeof value == "object" && value !== null;
|
|
4300
|
+
}
|
|
4301
|
+
function isObject(input) {
|
|
4302
|
+
if (!isObjectLike(input) || Object.prototype.toString.call(input) !== "[object Object]")
|
|
4303
|
+
return !1;
|
|
4304
|
+
if (Object.getPrototypeOf(input) === null)
|
|
4305
|
+
return !0;
|
|
4306
|
+
let proto = input;
|
|
4307
|
+
for (; Object.getPrototypeOf(proto) !== null; )
|
|
4308
|
+
proto = Object.getPrototypeOf(proto);
|
|
4309
|
+
return Object.getPrototypeOf(input) === proto;
|
|
4310
|
+
}
|
|
4311
|
+
|
|
4312
|
+
// ../../node_modules/.pnpm/jose@5.9.6/node_modules/jose/dist/node/esm/util/base64url.js
|
|
4313
|
+
init_cjs_shims();
|
|
4314
|
+
var decode2 = decode;
|
|
4315
|
+
|
|
4316
|
+
// ../../node_modules/.pnpm/jose@5.9.6/node_modules/jose/dist/node/esm/util/decode_jwt.js
|
|
4317
|
+
init_cjs_shims();
|
|
4318
|
+
function decodeJwt(jwt) {
|
|
4319
|
+
if (typeof jwt != "string")
|
|
4320
|
+
throw new JWTInvalid("JWTs must use Compact JWS serialization, JWT must be a string");
|
|
4321
|
+
let { 1: payload, length } = jwt.split(".");
|
|
4322
|
+
if (length === 5)
|
|
4323
|
+
throw new JWTInvalid("Only JWTs using Compact JWS serialization can be decoded");
|
|
4324
|
+
if (length !== 3)
|
|
4325
|
+
throw new JWTInvalid("Invalid JWT");
|
|
4326
|
+
if (!payload)
|
|
4327
|
+
throw new JWTInvalid("JWTs must contain a payload");
|
|
4328
|
+
let decoded;
|
|
4329
|
+
try {
|
|
4330
|
+
decoded = decode2(payload);
|
|
4331
|
+
} catch {
|
|
4332
|
+
throw new JWTInvalid("Failed to base64url decode the payload");
|
|
4333
|
+
}
|
|
4334
|
+
let result;
|
|
4335
|
+
try {
|
|
4336
|
+
result = JSON.parse(decoder.decode(decoded));
|
|
4337
|
+
} catch {
|
|
4338
|
+
throw new JWTInvalid("Failed to parse the decoded payload as JSON");
|
|
4339
|
+
}
|
|
4340
|
+
if (!isObject(result))
|
|
4341
|
+
throw new JWTInvalid("Invalid JWT Claims Set");
|
|
4342
|
+
return result;
|
|
4343
|
+
}
|
|
4344
|
+
|
|
4345
|
+
// ../cli-kit/dist/private/node/session/exchange.js
|
|
4346
|
+
var InvalidGrantError = class extends import_ts_error.ExtendableError {
|
|
4347
|
+
}, InvalidRequestError = class extends import_ts_error.ExtendableError {
|
|
4348
|
+
}, InvalidTargetError = class extends AbortError {
|
|
4349
|
+
};
|
|
4350
|
+
async function exchangeAccessForApplicationTokens(identityToken, scopes, store2) {
|
|
4351
|
+
let token = identityToken.accessToken, [partners, storefront, businessPlatform, admin, appManagement] = await Promise.all([
|
|
4352
|
+
requestAppToken("partners", token, scopes.partners),
|
|
4353
|
+
requestAppToken("storefront-renderer", token, scopes.storefront),
|
|
4354
|
+
requestAppToken("business-platform", token, scopes.businessPlatform),
|
|
4355
|
+
store2 ? requestAppToken("admin", token, scopes.admin, store2) : {},
|
|
4356
|
+
requestAppToken("app-management", token, scopes.appManagement)
|
|
4357
|
+
]);
|
|
4358
|
+
return {
|
|
4359
|
+
...partners,
|
|
4360
|
+
...storefront,
|
|
4361
|
+
...businessPlatform,
|
|
4362
|
+
...admin,
|
|
4363
|
+
...appManagement
|
|
4364
|
+
};
|
|
4365
|
+
}
|
|
4366
|
+
async function refreshAccessToken(currentToken) {
|
|
4367
|
+
let clientId2 = clientId(), params = {
|
|
4368
|
+
grant_type: "refresh_token",
|
|
4369
|
+
access_token: currentToken.accessToken,
|
|
4370
|
+
refresh_token: currentToken.refreshToken,
|
|
4371
|
+
client_id: clientId2
|
|
4372
|
+
}, value = (await tokenRequest(params)).mapError(tokenRequestErrorHandler).valueOrBug();
|
|
4373
|
+
return buildIdentityToken(value, currentToken.userId);
|
|
4374
|
+
}
|
|
4375
|
+
async function exchangeCliTokenForAccessToken(apiName, token, scopes) {
|
|
4376
|
+
let appId = applicationId(apiName);
|
|
4377
|
+
try {
|
|
4378
|
+
let accessToken = (await requestAppToken(apiName, token, scopes))[appId].accessToken, userId2 = nonRandomUUID(token);
|
|
4379
|
+
return setLastSeenUserIdAfterAuth(userId2), setLastSeenAuthMethod("partners_token"), { accessToken, userId: userId2 };
|
|
4380
|
+
} catch {
|
|
4381
|
+
let prettyName = apiName.replace(/-/g, " ").replace(/\b\w/g, (char) => char.toUpperCase());
|
|
4382
|
+
throw new AbortError(`The custom token provided can't be used for the ${prettyName} API.`, "Ensure the token is correct and not expired.");
|
|
4383
|
+
}
|
|
4384
|
+
}
|
|
4385
|
+
async function exchangeCustomPartnerToken(token) {
|
|
4386
|
+
return exchangeCliTokenForAccessToken("partners", token, tokenExchangeScopes("partners"));
|
|
4387
|
+
}
|
|
4388
|
+
async function exchangeCliTokenForAppManagementAccessToken(token) {
|
|
4389
|
+
return exchangeCliTokenForAccessToken("app-management", token, tokenExchangeScopes("app-management"));
|
|
4390
|
+
}
|
|
4391
|
+
async function exchangeCliTokenForBusinessPlatformAccessToken(token) {
|
|
4392
|
+
return exchangeCliTokenForAccessToken("business-platform", token, tokenExchangeScopes("business-platform"));
|
|
4393
|
+
}
|
|
4394
|
+
async function exchangeDeviceCodeForAccessToken(deviceCode) {
|
|
4395
|
+
let clientId2 = await clientId(), tokenResult = await tokenRequest({
|
|
4396
|
+
grant_type: "urn:ietf:params:oauth:grant-type:device_code",
|
|
4397
|
+
device_code: deviceCode,
|
|
4398
|
+
client_id: clientId2
|
|
4399
|
+
});
|
|
4400
|
+
if (tokenResult.isErr())
|
|
4401
|
+
return err(tokenResult.error.error);
|
|
4402
|
+
let identityToken = buildIdentityToken(tokenResult.value);
|
|
4403
|
+
return ok(identityToken);
|
|
4404
|
+
}
|
|
4405
|
+
async function requestAppToken(api, token, scopes = [], store2) {
|
|
4406
|
+
let appId = applicationId(api), params = {
|
|
4407
|
+
grant_type: "urn:ietf:params:oauth:grant-type:token-exchange",
|
|
4408
|
+
requested_token_type: "urn:ietf:params:oauth:token-type:access_token",
|
|
4409
|
+
subject_token_type: "urn:ietf:params:oauth:token-type:access_token",
|
|
4410
|
+
client_id: await clientId(),
|
|
4411
|
+
audience: appId,
|
|
4412
|
+
scope: scopes.join(" "),
|
|
4413
|
+
subject_token: token,
|
|
4414
|
+
...api === "admin" && { destination: `https://${store2}/admin`, store: store2 }
|
|
4415
|
+
}, identifier = appId;
|
|
4416
|
+
api === "admin" && store2 && (identifier = `${store2}-${appId}`);
|
|
4417
|
+
let value = (await tokenRequest(params)).mapError(tokenRequestErrorHandler).valueOrBug(), appToken = buildApplicationToken(value);
|
|
4418
|
+
return { [identifier]: appToken };
|
|
4419
|
+
}
|
|
4420
|
+
function tokenRequestErrorHandler({ error, store: store2 }) {
|
|
4421
|
+
let invalidTargetErrorMessage = `You are not authorized to use the CLI to develop in the provided store${store2 ? `: ${store2}` : "."}`;
|
|
4422
|
+
return error === "invalid_grant" ? new InvalidGrantError() : error === "invalid_request" ? new InvalidRequestError() : error === "invalid_target" ? new InvalidTargetError(invalidTargetErrorMessage, "", [
|
|
4423
|
+
"Ensure you have logged in to the store using the Shopify admin at least once.",
|
|
4424
|
+
"Ensure you are the store owner, or have a staff account if you are attempting to log in to a development store.",
|
|
4425
|
+
"Ensure you are using the permanent store domain, not a vanity domain."
|
|
4426
|
+
]) : new AbortError(error);
|
|
4427
|
+
}
|
|
4428
|
+
async function tokenRequest(params) {
|
|
4429
|
+
let fqdn = await identityFqdn(), url3 = new URL(`https://${fqdn}/oauth/token`);
|
|
4430
|
+
url3.search = new URLSearchParams(Object.entries(params)).toString();
|
|
4431
|
+
let res = await shopifyFetch(url3.href, { method: "POST" }), payload = await res.json();
|
|
4432
|
+
return res.ok ? ok(payload) : err({ error: payload.error, store: params.store });
|
|
4433
|
+
}
|
|
4434
|
+
function buildIdentityToken(result, existingUserId) {
|
|
4435
|
+
let userId2 = existingUserId ?? (result.id_token ? decodeJwt(result.id_token).sub : void 0);
|
|
4436
|
+
if (!userId2)
|
|
4437
|
+
throw new BugError("Error setting userId for session. No id_token or pre-existing user ID provided.");
|
|
4438
|
+
return {
|
|
4439
|
+
accessToken: result.access_token,
|
|
4440
|
+
refreshToken: result.refresh_token,
|
|
4441
|
+
expiresAt: new Date(Date.now() + result.expires_in * 1e3),
|
|
4442
|
+
scopes: result.scope.split(" "),
|
|
4443
|
+
userId: userId2
|
|
4444
|
+
};
|
|
4445
|
+
}
|
|
4446
|
+
function buildApplicationToken(result) {
|
|
4447
|
+
return {
|
|
4448
|
+
accessToken: result.access_token,
|
|
4449
|
+
expiresAt: new Date(Date.now() + result.expires_in * 1e3),
|
|
4450
|
+
scopes: result.scope.split(" ")
|
|
4451
|
+
};
|
|
4452
|
+
}
|
|
4453
|
+
|
|
4454
|
+
// ../cli-kit/dist/public/node/session.js
|
|
4455
|
+
async function ensureAuthenticatedUser(env = process.env, options2 = {}) {
|
|
4456
|
+
return outputDebug(outputContent`Ensuring that the user is authenticated with no particular scopes`), { userId: (await ensureAuthenticated({}, env, options2)).userId };
|
|
4457
|
+
}
|
|
4458
|
+
async function ensureAuthenticatedPartners(scopes = [], env = process.env, options2 = {}) {
|
|
4459
|
+
outputDebug(outputContent`Ensuring that the user is authenticated with the Partners API with the following scopes:
|
|
4460
|
+
${outputToken.json(scopes)}
|
|
4461
|
+
`);
|
|
4462
|
+
let envToken = getPartnersToken();
|
|
4463
|
+
if (envToken) {
|
|
4464
|
+
let result = await exchangeCustomPartnerToken(envToken);
|
|
4465
|
+
return { token: result.accessToken, userId: result.userId };
|
|
4466
|
+
}
|
|
4467
|
+
let tokens = await ensureAuthenticated({ partnersApi: { scopes } }, env, options2);
|
|
4468
|
+
if (!tokens.partners)
|
|
4469
|
+
throw new BugError("No partners token found after ensuring authenticated");
|
|
4470
|
+
return { token: tokens.partners, userId: tokens.userId };
|
|
4471
|
+
}
|
|
4472
|
+
async function ensureAuthenticatedAppManagementAndBusinessPlatform(options2 = {}, appManagementScopes = [], businessPlatformScopes = [], env = process.env) {
|
|
4473
|
+
outputDebug(outputContent`Ensuring that the user is authenticated with the App Management API with the following scopes:
|
|
4474
|
+
${outputToken.json(appManagementScopes)}
|
|
4475
|
+
`);
|
|
4476
|
+
let envToken = getPartnersToken();
|
|
4477
|
+
if (envToken) {
|
|
4478
|
+
let appManagmentToken = await exchangeCliTokenForAppManagementAccessToken(envToken), businessPlatformToken = await exchangeCliTokenForBusinessPlatformAccessToken(envToken);
|
|
4479
|
+
return {
|
|
4480
|
+
appManagementToken: appManagmentToken.accessToken,
|
|
4481
|
+
userId: appManagmentToken.userId,
|
|
4482
|
+
businessPlatformToken: businessPlatformToken.accessToken
|
|
4483
|
+
};
|
|
4484
|
+
}
|
|
4485
|
+
let tokens = await ensureAuthenticated({ appManagementApi: { scopes: appManagementScopes }, businessPlatformApi: { scopes: businessPlatformScopes } }, env, options2);
|
|
4486
|
+
if (!tokens.appManagement || !tokens.businessPlatform)
|
|
4487
|
+
throw new BugError("No App Management or Business Platform token found after ensuring authenticated");
|
|
4488
|
+
return {
|
|
4489
|
+
appManagementToken: tokens.appManagement,
|
|
4490
|
+
userId: tokens.userId,
|
|
4491
|
+
businessPlatformToken: tokens.businessPlatform
|
|
4492
|
+
};
|
|
4493
|
+
}
|
|
4494
|
+
async function ensureAuthenticatedStorefront(scopes = [], password = void 0, options2 = {}) {
|
|
4495
|
+
if (password) {
|
|
4496
|
+
let authMethod2 = isThemeAccessSession({ token: password, storeFqdn: "" }) ? "theme_access_token" : "custom_app_token";
|
|
4497
|
+
return setLastSeenAuthMethod(authMethod2), setLastSeenUserIdAfterAuth(nonRandomUUID(password)), password;
|
|
4498
|
+
}
|
|
4499
|
+
outputDebug(outputContent`Ensuring that the user is authenticated with the Storefront API with the following scopes:
|
|
4500
|
+
${outputToken.json(scopes)}
|
|
4501
|
+
`);
|
|
4502
|
+
let tokens = await ensureAuthenticated({ storefrontRendererApi: { scopes } }, process.env, options2);
|
|
4503
|
+
if (!tokens.storefront)
|
|
4504
|
+
throw new BugError("No storefront token found after ensuring authenticated");
|
|
4505
|
+
return tokens.storefront;
|
|
4506
|
+
}
|
|
4507
|
+
async function ensureAuthenticatedAdmin(store2, scopes = [], options2 = {}) {
|
|
4508
|
+
outputDebug(outputContent`Ensuring that the user is authenticated with the Admin API with the following scopes for the store ${outputToken.raw(store2)}:
|
|
4509
|
+
${outputToken.json(scopes)}
|
|
4510
|
+
`);
|
|
4511
|
+
let tokens = await ensureAuthenticated({ adminApi: { scopes, storeFqdn: store2 } }, process.env, {
|
|
4512
|
+
...options2
|
|
4513
|
+
});
|
|
4514
|
+
if (!tokens.admin)
|
|
4515
|
+
throw new BugError("No admin token found after ensuring authenticated");
|
|
4516
|
+
return tokens.admin;
|
|
4517
|
+
}
|
|
4518
|
+
async function ensureAuthenticatedThemes(store2, password, scopes = [], options2 = {}) {
|
|
4519
|
+
if (outputDebug(outputContent`Ensuring that the user is authenticated with the Theme API with the following scopes:
|
|
4520
|
+
${outputToken.json(scopes)}
|
|
4521
|
+
`), password) {
|
|
4522
|
+
let session = { token: password, storeFqdn: store2 }, authMethod2 = isThemeAccessSession(session) ? "theme_access_token" : "custom_app_token";
|
|
4523
|
+
return setLastSeenAuthMethod(authMethod2), setLastSeenUserIdAfterAuth(nonRandomUUID(password)), session;
|
|
4524
|
+
}
|
|
4525
|
+
return ensureAuthenticatedAdmin(store2, scopes, options2);
|
|
4526
|
+
}
|
|
4527
|
+
async function ensureAuthenticatedBusinessPlatform(scopes = []) {
|
|
4528
|
+
outputDebug(outputContent`Ensuring that the user is authenticated with the Business Platform API with the following scopes:
|
|
4529
|
+
${outputToken.json(scopes)}
|
|
4530
|
+
`);
|
|
4531
|
+
let tokens = await ensureAuthenticated({ businessPlatformApi: { scopes } }, process.env);
|
|
4532
|
+
if (!tokens.businessPlatform)
|
|
4533
|
+
throw new BugError("No business-platform token found after ensuring authenticated");
|
|
4534
|
+
return tokens.businessPlatform;
|
|
4535
|
+
}
|
|
4536
|
+
function logout() {
|
|
4537
|
+
return remove();
|
|
4538
|
+
}
|
|
4539
|
+
|
|
4540
|
+
// ../cli-kit/dist/private/node/analytics.js
|
|
4541
|
+
init_cjs_shims();
|
|
4542
|
+
async function startAnalytics({ commandContent, args, currentTime = (/* @__PURE__ */ new Date()).getTime(), commandClass }) {
|
|
4543
|
+
let startCommand = commandContent.command;
|
|
4544
|
+
commandClass && Object.prototype.hasOwnProperty.call(commandClass, "analyticsNameOverride") && (startCommand = commandClass.analyticsNameOverride() ?? commandContent.command);
|
|
4545
|
+
let pluginName = commandClass?.plugin?.name;
|
|
4546
|
+
commandClass && "customPluginName" in commandClass && (pluginName = commandClass.customPluginName), await addSensitiveMetadata(() => ({
|
|
4547
|
+
commandStartOptions: {
|
|
4548
|
+
startTime: currentTime,
|
|
4549
|
+
startCommand,
|
|
4550
|
+
startArgs: args
|
|
4551
|
+
}
|
|
4552
|
+
})), await addPublicMetadata(() => ({
|
|
4553
|
+
cmd_all_launcher: packageManagerFromUserAgent(),
|
|
4554
|
+
cmd_all_alias_used: commandContent.alias,
|
|
4555
|
+
cmd_all_topic: commandContent.topic,
|
|
4556
|
+
cmd_all_plugin: pluginName,
|
|
4557
|
+
cmd_all_force: flagIncluded("force", commandClass) ? args.includes("--force") : void 0
|
|
4558
|
+
}));
|
|
4559
|
+
}
|
|
4560
|
+
async function getEnvironmentData(config) {
|
|
4561
|
+
let ciplatform = ciPlatform(), pluginNames = getPluginNames(config), shopifyPlugins = pluginNames.filter((plugin) => plugin.startsWith("@shopify/")), { platform, arch } = platformAndArch();
|
|
4562
|
+
return {
|
|
4563
|
+
uname: `${platform} ${arch}`,
|
|
4564
|
+
env_ci: ciplatform.isCI,
|
|
4565
|
+
env_ci_platform: ciplatform.name,
|
|
4566
|
+
env_plugin_installed_any_custom: pluginNames.length !== shopifyPlugins.length,
|
|
4567
|
+
env_plugin_installed_shopify: JSON.stringify(shopifyPlugins),
|
|
4568
|
+
env_shell: config.shell,
|
|
4569
|
+
env_web_ide: cloudEnvironment().editor ? cloudEnvironment().platform : void 0,
|
|
4570
|
+
env_device_id: hashString(await macAddress()),
|
|
4571
|
+
env_cloud: cloudEnvironment().platform,
|
|
4572
|
+
env_package_manager: await getPackageManager(cwd()),
|
|
4573
|
+
env_is_global: currentProcessIsGlobal(),
|
|
4574
|
+
env_auth_method: await getLastSeenAuthMethod(),
|
|
4575
|
+
env_is_wsl: await isWsl(),
|
|
4576
|
+
env_build_repository: "Shopify/cli"
|
|
4577
|
+
};
|
|
4578
|
+
}
|
|
4579
|
+
async function getSensitiveEnvironmentData(config) {
|
|
4580
|
+
return {
|
|
4581
|
+
env_plugin_installed_all: JSON.stringify(getPluginNames(config))
|
|
4582
|
+
};
|
|
4583
|
+
}
|
|
4584
|
+
function getPluginNames(config) {
|
|
4585
|
+
return [...config.plugins.keys()].sort().filter((plugin) => !plugin.startsWith("@oclif/"));
|
|
4586
|
+
}
|
|
4587
|
+
function flagIncluded(flag, commandClass) {
|
|
4588
|
+
if (!commandClass)
|
|
4589
|
+
return !1;
|
|
4590
|
+
let commandFlags = commandClass.flags ?? {};
|
|
4591
|
+
return Object.keys(commandFlags).includes(flag);
|
|
4592
|
+
}
|
|
4593
|
+
|
|
4594
|
+
export {
|
|
4595
|
+
serviceEnvironment,
|
|
4596
|
+
isLocalEnvironment,
|
|
4597
|
+
DevServerCore,
|
|
4598
|
+
partnersFqdn,
|
|
4599
|
+
adminFqdn,
|
|
4600
|
+
appManagementFqdn,
|
|
4601
|
+
appDevFqdn,
|
|
4602
|
+
developerDashboardFqdn,
|
|
4603
|
+
businessPlatformFqdn,
|
|
4604
|
+
identityFqdn,
|
|
4605
|
+
normalizeStoreFqdn,
|
|
4606
|
+
ok,
|
|
4607
|
+
err,
|
|
4608
|
+
fetch,
|
|
4609
|
+
getSessionAlias,
|
|
4610
|
+
findSessionByAlias,
|
|
4611
|
+
isThemeAccessSession,
|
|
4612
|
+
ensureAuthenticatedUser,
|
|
4613
|
+
ensureAuthenticatedPartners,
|
|
4614
|
+
ensureAuthenticatedAppManagementAndBusinessPlatform,
|
|
4615
|
+
ensureAuthenticatedStorefront,
|
|
4616
|
+
ensureAuthenticatedAdmin,
|
|
4617
|
+
ensureAuthenticatedThemes,
|
|
4618
|
+
ensureAuthenticatedBusinessPlatform,
|
|
4619
|
+
logout,
|
|
4620
|
+
require_cjs,
|
|
4621
|
+
require_baseAssignValue,
|
|
4622
|
+
require_assignValue,
|
|
4623
|
+
require_getPrototype,
|
|
4624
|
+
require_getSymbolsIn,
|
|
4625
|
+
require_keysIn,
|
|
4626
|
+
require_getAllKeysIn,
|
|
4627
|
+
deepMergeObjects,
|
|
4628
|
+
pickBy,
|
|
4629
|
+
deepCompare,
|
|
4630
|
+
deepDifference,
|
|
4631
|
+
getPathValue,
|
|
4632
|
+
setPathValue,
|
|
4633
|
+
isEmpty,
|
|
4634
|
+
compact,
|
|
4635
|
+
requestIdsCollection,
|
|
4636
|
+
graphqlRequest,
|
|
4637
|
+
graphqlRequestDoc,
|
|
4638
|
+
addCursorAndFiltersToAppLogsUrl,
|
|
4639
|
+
getNextDeprecationDate,
|
|
4640
|
+
setNextDeprecationDate,
|
|
4641
|
+
require_lib,
|
|
4642
|
+
partnersRequest,
|
|
4643
|
+
generateFetchAppLogUrl,
|
|
4644
|
+
partnersRequestDoc,
|
|
4645
|
+
businessPlatformRequest,
|
|
4646
|
+
businessPlatformRequestDoc,
|
|
4647
|
+
businessPlatformOrganizationsRequest,
|
|
4648
|
+
businessPlatformOrganizationsRequestDoc,
|
|
4649
|
+
getLastSeenUserIdAfterAuth,
|
|
4650
|
+
startAnalytics,
|
|
4651
|
+
getEnvironmentData,
|
|
4652
|
+
getSensitiveEnvironmentData
|
|
4653
|
+
};
|
|
4654
|
+
//# sourceMappingURL=chunk-KIC7OBUL.js.map
|