@sanity/cli 3.36.4 → 3.36.5-canary.34
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/lib/_chunks-cjs/cli.js +1779 -1317
- package/lib/_chunks-cjs/cli.js.map +1 -1
- package/lib/_chunks-cjs/journeyConfig.js +11 -11
- package/lib/_chunks-cjs/journeyConfig.js.map +1 -1
- package/lib/_chunks-cjs/loadEnv.js +105 -2719
- package/lib/_chunks-cjs/loadEnv.js.map +1 -1
- package/lib/cli.d.ts +10 -0
- package/lib/index.d.ts +448 -0
- package/lib/index.esm.js +449 -5
- package/lib/index.esm.js.map +1 -1
- package/lib/index.js +3 -3
- package/lib/index.js.map +1 -1
- package/lib/index.mjs +452 -0
- package/lib/index.mjs.map +1 -0
- package/lib/run.d.ts +1 -0
- package/lib/workers/getAndWriteJourneySchema.d.ts +1 -0
- package/lib/workers/getCliConfig.d.ts +1 -0
- package/lib/workers/typegenGenerate.d.ts +37 -0
- package/lib/workers/typegenGenerate.js +6 -2
- package/lib/workers/typegenGenerate.js.map +1 -1
- package/package.json +18 -21
- package/src/actions/init-project/templates/shopify.ts +7 -0
- package/src/util/journeyConfig.ts +5 -5
- package/templates/ecommerce/plugins/barcode-input/BarcodeInput.js +1 -1
- package/templates/get-started/plugins/sanity-plugin-tutorial/GetStartedTutorial.tsx +1 -1
- package/templates/shared/tsconfig.json +2 -4
- package/templates/shopify/README.md +3 -3
- package/templates/shopify/components/hotspots/ProductTooltip.tsx +1 -1
- package/templates/shopify/components/icons/Shopify.tsx +1 -1
- package/templates/shopify/components/inputs/CollectionHidden.tsx +1 -1
- package/templates/shopify/components/media/ColorTheme.tsx +1 -1
- package/templates/shopify/components/studio/Navbar.tsx +29 -0
- package/templates/shopify/constants.ts +31 -1
- package/templates/shopify/docs/features.md +6 -7
- package/templates/shopify/schemaTypes/documents/collection.tsx +11 -47
- package/templates/shopify/schemaTypes/documents/colorTheme.tsx +2 -17
- package/templates/shopify/schemaTypes/documents/page.ts +10 -37
- package/templates/shopify/schemaTypes/documents/product.tsx +4 -27
- package/templates/shopify/schemaTypes/documents/productVariant.tsx +4 -13
- package/templates/shopify/schemaTypes/index.ts +97 -126
- package/templates/shopify/schemaTypes/objects/collection/{group.ts → collectionGroupType.ts} +7 -10
- package/templates/shopify/schemaTypes/objects/collection/{links.ts → collectionLinksType.ts} +4 -4
- package/templates/shopify/schemaTypes/objects/customProductOption/{colorObject.tsx → customProductOptionColorObjectType.tsx} +3 -6
- package/templates/shopify/schemaTypes/objects/customProductOption/{color.tsx → customProductOptionColorType.tsx} +2 -10
- package/templates/shopify/schemaTypes/objects/customProductOption/{sizeObject.ts → customProductOptionSizeObjectType.ts} +3 -7
- package/templates/shopify/schemaTypes/objects/customProductOption/{size.ts → customProductOptionSizeType.ts} +3 -12
- package/templates/shopify/schemaTypes/objects/global/footerType.ts +22 -0
- package/templates/shopify/schemaTypes/objects/global/menuLinksType.ts +21 -0
- package/templates/shopify/schemaTypes/objects/global/{menu.ts → menuType.ts} +2 -4
- package/templates/shopify/schemaTypes/objects/global/{notFoundPage.ts → notFoundPageType.ts} +1 -6
- package/templates/shopify/schemaTypes/objects/hotspot/{imageWithProductHotspots.ts → imageWithProductHotspotsType.ts} +4 -6
- package/templates/shopify/schemaTypes/objects/hotspot/{productHotspots.tsx → productHotspotsType.tsx} +3 -5
- package/templates/shopify/schemaTypes/objects/hotspot/{spot.tsx → spotType.tsx} +1 -3
- package/templates/shopify/schemaTypes/{annotations/linkEmail.tsx → objects/link/linkEmailType.tsx} +5 -13
- package/templates/shopify/schemaTypes/{annotations/linkExternal.tsx → objects/link/linkExternalType.tsx} +6 -13
- package/templates/shopify/schemaTypes/{annotations/linkInternal.tsx → objects/link/linkInternalType.tsx} +5 -12
- package/templates/shopify/schemaTypes/{annotations/product.tsx → objects/link/linkProductType.tsx} +4 -16
- package/templates/shopify/schemaTypes/objects/module/{accordionGroup.ts → accordionGroupType.ts} +6 -9
- package/templates/shopify/schemaTypes/objects/module/{accordion.ts → accordionType.ts} +4 -11
- package/templates/shopify/schemaTypes/objects/module/{callToAction.tsx → callToActionType.tsx} +10 -23
- package/templates/shopify/schemaTypes/objects/module/{callout.ts → calloutType.ts} +4 -10
- package/templates/shopify/schemaTypes/objects/module/{collection.tsx → collectionReferenceType.tsx} +4 -9
- package/templates/shopify/schemaTypes/objects/module/gridItemType.ts +41 -0
- package/templates/shopify/schemaTypes/objects/module/{grid.ts → gridType.ts} +7 -15
- package/templates/shopify/schemaTypes/objects/{hero/home.tsx → module/heroType.tsx} +14 -17
- package/templates/shopify/schemaTypes/objects/module/{imageCallToAction.tsx → imageCallToActionType.tsx} +6 -10
- package/templates/shopify/schemaTypes/objects/module/{image.ts → imageFeatureType.ts} +6 -18
- package/templates/shopify/schemaTypes/objects/module/{images.tsx → imageFeaturesType.tsx} +7 -25
- package/templates/shopify/schemaTypes/objects/module/{instagram.ts → instagramType.ts} +2 -2
- package/templates/shopify/schemaTypes/objects/module/{products.tsx → productFeaturesType.tsx} +6 -11
- package/templates/shopify/schemaTypes/objects/module/{product.tsx → productReferenceType.tsx} +6 -8
- package/templates/shopify/schemaTypes/objects/{seo/seo.ts → seoType.ts} +7 -6
- package/templates/shopify/schemaTypes/objects/shopify/{shopifyCollectionRule.tsx → collectionRuleType.tsx} +2 -10
- package/templates/shopify/schemaTypes/objects/shopify/{inventory.ts → inventoryType.ts} +7 -12
- package/templates/shopify/schemaTypes/objects/shopify/{option.tsx → optionType.tsx} +6 -12
- package/templates/shopify/schemaTypes/objects/shopify/{placeholderString.ts → placeholderStringType.ts} +3 -1
- package/templates/shopify/schemaTypes/objects/shopify/{priceRange.ts → priceRangeType.ts} +5 -7
- package/templates/shopify/schemaTypes/objects/shopify/{productWithVariant.tsx → productWithVariantType.tsx} +2 -2
- package/templates/shopify/schemaTypes/objects/shopify/{proxyString.ts → proxyStringType.ts} +1 -1
- package/templates/shopify/schemaTypes/objects/shopify/{shopifyCollection.ts → shopifyCollectionType.ts} +4 -25
- package/templates/shopify/schemaTypes/objects/shopify/{shopifyProduct.ts → shopifyProductType.ts} +5 -35
- package/templates/shopify/schemaTypes/objects/shopify/{shopifyProductVariant.ts → shopifyProductVariantType.ts} +1 -27
- package/templates/shopify/schemaTypes/portableText/portableTextSimpleType.tsx +45 -0
- package/templates/shopify/schemaTypes/portableText/portableTextType.tsx +52 -0
- package/templates/shopify/schemaTypes/singletons/homeType.ts +49 -0
- package/templates/shopify/schemaTypes/singletons/{settings.ts → settingsType.ts} +7 -9
- package/templates/shopify/utils/shopifyUrls.ts +3 -3
- package/templates/shopify/utils/validateSlug.ts +3 -7
- package/lib/_chunks-cjs/index.js +0 -547
- package/lib/_chunks-cjs/index.js.map +0 -1
- package/lib/_chunks-cjs/node.js +0 -213
- package/lib/_chunks-cjs/node.js.map +0 -1
- package/lib/_chunks-cjs/stegaEncodeSourceMap.js +0 -357
- package/lib/_chunks-cjs/stegaEncodeSourceMap.js.map +0 -1
- package/lib/_chunks-es/index.js +0 -3336
- package/lib/_chunks-es/index.js.map +0 -1
- package/lib/_chunks-es/node.js +0 -216
- package/lib/_chunks-es/node.js.map +0 -1
- package/lib/_chunks-es/stegaEncodeSourceMap.js +0 -358
- package/lib/_chunks-es/stegaEncodeSourceMap.js.map +0 -1
- package/lib/index.cjs.mjs +0 -7
- package/templates/shopify/schemaTypes/blocks/body.tsx +0 -70
- package/templates/shopify/schemaTypes/objects/global/footer.ts +0 -57
- package/templates/shopify/schemaTypes/objects/global/linkExternal.ts +0 -52
- package/templates/shopify/schemaTypes/objects/global/linkInternal.ts +0 -65
- package/templates/shopify/schemaTypes/objects/global/links.ts +0 -16
- package/templates/shopify/schemaTypes/objects/hero/collection.tsx +0 -42
- package/templates/shopify/schemaTypes/objects/hero/page.tsx +0 -35
- package/templates/shopify/schemaTypes/objects/module/accordionBody.ts +0 -45
- package/templates/shopify/schemaTypes/objects/module/gridItem.ts +0 -91
- package/templates/shopify/schemaTypes/objects/seo/description.tsx +0 -10
- package/templates/shopify/schemaTypes/objects/seo/home.tsx +0 -31
- package/templates/shopify/schemaTypes/objects/seo/page.tsx +0 -37
- package/templates/shopify/schemaTypes/objects/seo/shopify.tsx +0 -40
- package/templates/shopify/schemaTypes/singletons/home.ts +0 -62
- /package/lib/{dts/src/index.d.ts → index.d.mts} +0 -0
package/lib/index.mjs
ADDED
@@ -0,0 +1,452 @@
|
|
1
|
+
import { createClient } from "@sanity/client";
|
2
|
+
import fs$1 from "fs";
|
3
|
+
import path$1 from "path";
|
4
|
+
import "worker_threads";
|
5
|
+
import "pkg-dir";
|
6
|
+
import debugIt from "debug";
|
7
|
+
import fs$2 from "node:fs";
|
8
|
+
import path$2 from "node:path";
|
9
|
+
import require$$2 from "os";
|
10
|
+
import require$$3 from "crypto";
|
11
|
+
const requireFunc = typeof __webpack_require__ == "function" ? __non_webpack_require__ : require;
|
12
|
+
function dynamicRequire(request) {
|
13
|
+
const mod = requireFunc(request);
|
14
|
+
return mod.__esModule && mod.default ? mod.default : mod;
|
15
|
+
}
|
16
|
+
dynamicRequire.resolve = requireFunc.resolve;
|
17
|
+
function getCliConfigSync(cwd) {
|
18
|
+
return getSanityCliConfig(cwd) || getSanityJsonConfig(cwd);
|
19
|
+
}
|
20
|
+
function getSanityJsonConfig(cwd) {
|
21
|
+
const configPath = path$1.join(cwd, "sanity.json");
|
22
|
+
return fs$1.existsSync(configPath) ? {
|
23
|
+
config: loadJsonConfig(configPath),
|
24
|
+
path: configPath,
|
25
|
+
version: 2
|
26
|
+
} : null;
|
27
|
+
}
|
28
|
+
function getSanityCliConfig(cwd) {
|
29
|
+
const jsConfigPath = path$1.join(cwd, "sanity.cli.js"), tsConfigPath = path$1.join(cwd, "sanity.cli.ts"), [js, ts] = [fs$1.existsSync(jsConfigPath), fs$1.existsSync(tsConfigPath)];
|
30
|
+
return !js && !ts ? null : !js && ts ? {
|
31
|
+
config: importConfig(tsConfigPath),
|
32
|
+
path: tsConfigPath,
|
33
|
+
version: 3
|
34
|
+
} : (js && ts && warn("Found both `sanity.cli.js` and `sanity.cli.ts` - using sanity.cli.js"), {
|
35
|
+
config: importConfig(jsConfigPath),
|
36
|
+
path: jsConfigPath,
|
37
|
+
version: 3
|
38
|
+
});
|
39
|
+
}
|
40
|
+
function loadJsonConfig(filePath) {
|
41
|
+
try {
|
42
|
+
const content = fs$1.readFileSync(filePath, "utf8");
|
43
|
+
return JSON.parse(content);
|
44
|
+
} catch (err) {
|
45
|
+
return console.error(`Error reading "${filePath}": ${err.message}`), null;
|
46
|
+
}
|
47
|
+
}
|
48
|
+
function importConfig(filePath) {
|
49
|
+
try {
|
50
|
+
const config2 = dynamicRequire(filePath);
|
51
|
+
if (config2 === null || typeof config2 != "object")
|
52
|
+
throw new Error("Module export is not a configuration object");
|
53
|
+
return "default" in config2 ? config2.default : config2;
|
54
|
+
} catch (err) {
|
55
|
+
return err.code === "MODULE_NOT_FOUND" && err.message.includes("sanity/cli") || console.error(`Error reading "${filePath}": ${err.message}`), null;
|
56
|
+
}
|
57
|
+
}
|
58
|
+
function warn(warning) {
|
59
|
+
typeof process.send == "function" ? process.send({ type: "warning", warning }) : console.warn(warning);
|
60
|
+
}
|
61
|
+
const debug = debugIt("sanity:cli");
|
62
|
+
function resolveRootDir(cwd) {
|
63
|
+
try {
|
64
|
+
return resolveProjectRoot(cwd) || cwd;
|
65
|
+
} catch (err) {
|
66
|
+
throw new Error(`Error occurred trying to resolve project root:
|
67
|
+
${err.message}`);
|
68
|
+
}
|
69
|
+
}
|
70
|
+
function hasStudioConfig(basePath) {
|
71
|
+
return [
|
72
|
+
fileExists(path$1.join(basePath, "sanity.config.js")),
|
73
|
+
fileExists(path$1.join(basePath, "sanity.config.ts")),
|
74
|
+
isSanityV2StudioRoot(basePath)
|
75
|
+
].some(Boolean);
|
76
|
+
}
|
77
|
+
function resolveProjectRoot(basePath, iterations = 0) {
|
78
|
+
if (hasStudioConfig(basePath))
|
79
|
+
return basePath;
|
80
|
+
const parentDir = path$1.resolve(basePath, "..");
|
81
|
+
return parentDir === basePath || iterations > 30 ? !1 : resolveProjectRoot(parentDir, iterations + 1);
|
82
|
+
}
|
83
|
+
function isSanityV2StudioRoot(basePath) {
|
84
|
+
try {
|
85
|
+
const content = fs$1.readFileSync(path$1.join(basePath, "sanity.json"), "utf8"), isRoot = !!JSON.parse(content)?.root;
|
86
|
+
return isRoot && debug("Found Sanity v2 studio root at %s", basePath), isRoot;
|
87
|
+
} catch {
|
88
|
+
return !1;
|
89
|
+
}
|
90
|
+
}
|
91
|
+
function fileExists(filePath) {
|
92
|
+
return fs$1.existsSync(filePath);
|
93
|
+
}
|
94
|
+
function getCliClient(options = {}) {
|
95
|
+
if (typeof process != "object")
|
96
|
+
throw new Error("getCliClient() should only be called from node.js scripts");
|
97
|
+
const {
|
98
|
+
// eslint-disable-next-line no-process-env
|
99
|
+
cwd = process.env.SANITY_BASE_PATH || process.cwd(),
|
100
|
+
useCdn = !1,
|
101
|
+
apiVersion = "2022-06-06",
|
102
|
+
projectId,
|
103
|
+
dataset,
|
104
|
+
token = getCliClient.__internal__getToken()
|
105
|
+
} = options;
|
106
|
+
if (projectId && dataset)
|
107
|
+
return createClient({ projectId, dataset, apiVersion, useCdn, token });
|
108
|
+
const rootDir = resolveRootDir(cwd), { config: config2 } = getCliConfigSync(rootDir) || {};
|
109
|
+
if (!config2)
|
110
|
+
throw new Error("Unable to resolve CLI configuration");
|
111
|
+
const apiConfig = config2?.api || {};
|
112
|
+
if (!apiConfig.projectId || !apiConfig.dataset)
|
113
|
+
throw new Error("Unable to resolve project ID/dataset from CLI configuration");
|
114
|
+
return createClient({
|
115
|
+
projectId: apiConfig.projectId,
|
116
|
+
dataset: apiConfig.dataset,
|
117
|
+
apiVersion,
|
118
|
+
useCdn,
|
119
|
+
token
|
120
|
+
});
|
121
|
+
}
|
122
|
+
getCliClient.__internal__getToken = () => {
|
123
|
+
};
|
124
|
+
function defineCliConfig(config2) {
|
125
|
+
return config2;
|
126
|
+
}
|
127
|
+
function createCliConfig(config2) {
|
128
|
+
return config2;
|
129
|
+
}
|
130
|
+
var main$1 = { exports: {} }, name = "dotenv", version$1 = "16.4.5", description = "Loads environment variables from .env file", main = "lib/main.js", types = "lib/main.d.ts", exports = {
|
131
|
+
".": {
|
132
|
+
types: "./lib/main.d.ts",
|
133
|
+
require: "./lib/main.js",
|
134
|
+
default: "./lib/main.js"
|
135
|
+
},
|
136
|
+
"./config": "./config.js",
|
137
|
+
"./config.js": "./config.js",
|
138
|
+
"./lib/env-options": "./lib/env-options.js",
|
139
|
+
"./lib/env-options.js": "./lib/env-options.js",
|
140
|
+
"./lib/cli-options": "./lib/cli-options.js",
|
141
|
+
"./lib/cli-options.js": "./lib/cli-options.js",
|
142
|
+
"./package.json": "./package.json"
|
143
|
+
}, scripts = {
|
144
|
+
"dts-check": "tsc --project tests/types/tsconfig.json",
|
145
|
+
lint: "standard",
|
146
|
+
"lint-readme": "standard-markdown",
|
147
|
+
pretest: "npm run lint && npm run dts-check",
|
148
|
+
test: "tap tests/*.js --100 -Rspec",
|
149
|
+
"test:coverage": "tap --coverage-report=lcov",
|
150
|
+
prerelease: "npm test",
|
151
|
+
release: "standard-version"
|
152
|
+
}, repository = {
|
153
|
+
type: "git",
|
154
|
+
url: "git://github.com/motdotla/dotenv.git"
|
155
|
+
}, funding = "https://dotenvx.com", keywords = [
|
156
|
+
"dotenv",
|
157
|
+
"env",
|
158
|
+
".env",
|
159
|
+
"environment",
|
160
|
+
"variables",
|
161
|
+
"config",
|
162
|
+
"settings"
|
163
|
+
], readmeFilename = "README.md", license = "BSD-2-Clause", devDependencies = {
|
164
|
+
"@definitelytyped/dtslint": "^0.0.133",
|
165
|
+
"@types/node": "^18.11.3",
|
166
|
+
decache: "^4.6.1",
|
167
|
+
sinon: "^14.0.1",
|
168
|
+
standard: "^17.0.0",
|
169
|
+
"standard-markdown": "^7.1.0",
|
170
|
+
"standard-version": "^9.5.0",
|
171
|
+
tap: "^16.3.0",
|
172
|
+
tar: "^6.1.11",
|
173
|
+
typescript: "^4.8.4"
|
174
|
+
}, engines = {
|
175
|
+
node: ">=12"
|
176
|
+
}, browser = {
|
177
|
+
fs: !1
|
178
|
+
}, require$$4 = {
|
179
|
+
name,
|
180
|
+
version: version$1,
|
181
|
+
description,
|
182
|
+
main,
|
183
|
+
types,
|
184
|
+
exports,
|
185
|
+
scripts,
|
186
|
+
repository,
|
187
|
+
funding,
|
188
|
+
keywords,
|
189
|
+
readmeFilename,
|
190
|
+
license,
|
191
|
+
devDependencies,
|
192
|
+
engines,
|
193
|
+
browser
|
194
|
+
};
|
195
|
+
const fs = fs$1, path = path$1, os = require$$2, crypto = require$$3, packageJson = require$$4, version = packageJson.version, LINE = /(?:^|^)\s*(?:export\s+)?([\w.-]+)(?:\s*=\s*?|:\s+?)(\s*'(?:\\'|[^'])*'|\s*"(?:\\"|[^"])*"|\s*`(?:\\`|[^`])*`|[^#\r\n]+)?\s*(?:#.*)?(?:$|$)/mg;
|
196
|
+
function parse(src) {
|
197
|
+
const obj = {};
|
198
|
+
let lines = src.toString();
|
199
|
+
lines = lines.replace(/\r\n?/mg, `
|
200
|
+
`);
|
201
|
+
let match;
|
202
|
+
for (; (match = LINE.exec(lines)) != null; ) {
|
203
|
+
const key = match[1];
|
204
|
+
let value = match[2] || "";
|
205
|
+
value = value.trim();
|
206
|
+
const maybeQuote = value[0];
|
207
|
+
value = value.replace(/^(['"`])([\s\S]*)\1$/mg, "$2"), maybeQuote === '"' && (value = value.replace(/\\n/g, `
|
208
|
+
`), value = value.replace(/\\r/g, "\r")), obj[key] = value;
|
209
|
+
}
|
210
|
+
return obj;
|
211
|
+
}
|
212
|
+
function _parseVault(options) {
|
213
|
+
const vaultPath = _vaultPath(options), result = DotenvModule.configDotenv({ path: vaultPath });
|
214
|
+
if (!result.parsed) {
|
215
|
+
const err = new Error(`MISSING_DATA: Cannot parse ${vaultPath} for an unknown reason`);
|
216
|
+
throw err.code = "MISSING_DATA", err;
|
217
|
+
}
|
218
|
+
const keys = _dotenvKey(options).split(","), length = keys.length;
|
219
|
+
let decrypted;
|
220
|
+
for (let i = 0; i < length; i++)
|
221
|
+
try {
|
222
|
+
const key = keys[i].trim(), attrs = _instructions(result, key);
|
223
|
+
decrypted = DotenvModule.decrypt(attrs.ciphertext, attrs.key);
|
224
|
+
break;
|
225
|
+
} catch (error) {
|
226
|
+
if (i + 1 >= length)
|
227
|
+
throw error;
|
228
|
+
}
|
229
|
+
return DotenvModule.parse(decrypted);
|
230
|
+
}
|
231
|
+
function _log(message) {
|
232
|
+
console.log(`[dotenv@${version}][INFO] ${message}`);
|
233
|
+
}
|
234
|
+
function _warn(message) {
|
235
|
+
console.log(`[dotenv@${version}][WARN] ${message}`);
|
236
|
+
}
|
237
|
+
function _debug(message) {
|
238
|
+
console.log(`[dotenv@${version}][DEBUG] ${message}`);
|
239
|
+
}
|
240
|
+
function _dotenvKey(options) {
|
241
|
+
return options && options.DOTENV_KEY && options.DOTENV_KEY.length > 0 ? options.DOTENV_KEY : process.env.DOTENV_KEY && process.env.DOTENV_KEY.length > 0 ? process.env.DOTENV_KEY : "";
|
242
|
+
}
|
243
|
+
function _instructions(result, dotenvKey) {
|
244
|
+
let uri;
|
245
|
+
try {
|
246
|
+
uri = new URL(dotenvKey);
|
247
|
+
} catch (error) {
|
248
|
+
if (error.code === "ERR_INVALID_URL") {
|
249
|
+
const err = new Error("INVALID_DOTENV_KEY: Wrong format. Must be in valid uri format like dotenv://:key_1234@dotenvx.com/vault/.env.vault?environment=development");
|
250
|
+
throw err.code = "INVALID_DOTENV_KEY", err;
|
251
|
+
}
|
252
|
+
throw error;
|
253
|
+
}
|
254
|
+
const key = uri.password;
|
255
|
+
if (!key) {
|
256
|
+
const err = new Error("INVALID_DOTENV_KEY: Missing key part");
|
257
|
+
throw err.code = "INVALID_DOTENV_KEY", err;
|
258
|
+
}
|
259
|
+
const environment = uri.searchParams.get("environment");
|
260
|
+
if (!environment) {
|
261
|
+
const err = new Error("INVALID_DOTENV_KEY: Missing environment part");
|
262
|
+
throw err.code = "INVALID_DOTENV_KEY", err;
|
263
|
+
}
|
264
|
+
const environmentKey = `DOTENV_VAULT_${environment.toUpperCase()}`, ciphertext = result.parsed[environmentKey];
|
265
|
+
if (!ciphertext) {
|
266
|
+
const err = new Error(`NOT_FOUND_DOTENV_ENVIRONMENT: Cannot locate environment ${environmentKey} in your .env.vault file.`);
|
267
|
+
throw err.code = "NOT_FOUND_DOTENV_ENVIRONMENT", err;
|
268
|
+
}
|
269
|
+
return { ciphertext, key };
|
270
|
+
}
|
271
|
+
function _vaultPath(options) {
|
272
|
+
let possibleVaultPath = null;
|
273
|
+
if (options && options.path && options.path.length > 0)
|
274
|
+
if (Array.isArray(options.path))
|
275
|
+
for (const filepath of options.path)
|
276
|
+
fs.existsSync(filepath) && (possibleVaultPath = filepath.endsWith(".vault") ? filepath : `${filepath}.vault`);
|
277
|
+
else
|
278
|
+
possibleVaultPath = options.path.endsWith(".vault") ? options.path : `${options.path}.vault`;
|
279
|
+
else
|
280
|
+
possibleVaultPath = path.resolve(process.cwd(), ".env.vault");
|
281
|
+
return fs.existsSync(possibleVaultPath) ? possibleVaultPath : null;
|
282
|
+
}
|
283
|
+
function _resolveHome(envPath) {
|
284
|
+
return envPath[0] === "~" ? path.join(os.homedir(), envPath.slice(1)) : envPath;
|
285
|
+
}
|
286
|
+
function _configVault(options) {
|
287
|
+
_log("Loading env from encrypted .env.vault");
|
288
|
+
const parsed = DotenvModule._parseVault(options);
|
289
|
+
let processEnv = process.env;
|
290
|
+
return options && options.processEnv != null && (processEnv = options.processEnv), DotenvModule.populate(processEnv, parsed, options), { parsed };
|
291
|
+
}
|
292
|
+
function configDotenv(options) {
|
293
|
+
const dotenvPath = path.resolve(process.cwd(), ".env");
|
294
|
+
let encoding = "utf8";
|
295
|
+
const debug2 = !!(options && options.debug);
|
296
|
+
options && options.encoding ? encoding = options.encoding : debug2 && _debug("No encoding is specified. UTF-8 is used by default");
|
297
|
+
let optionPaths = [dotenvPath];
|
298
|
+
if (options && options.path)
|
299
|
+
if (!Array.isArray(options.path))
|
300
|
+
optionPaths = [_resolveHome(options.path)];
|
301
|
+
else {
|
302
|
+
optionPaths = [];
|
303
|
+
for (const filepath of options.path)
|
304
|
+
optionPaths.push(_resolveHome(filepath));
|
305
|
+
}
|
306
|
+
let lastError;
|
307
|
+
const parsedAll = {};
|
308
|
+
for (const path2 of optionPaths)
|
309
|
+
try {
|
310
|
+
const parsed = DotenvModule.parse(fs.readFileSync(path2, { encoding }));
|
311
|
+
DotenvModule.populate(parsedAll, parsed, options);
|
312
|
+
} catch (e) {
|
313
|
+
debug2 && _debug(`Failed to load ${path2} ${e.message}`), lastError = e;
|
314
|
+
}
|
315
|
+
let processEnv = process.env;
|
316
|
+
return options && options.processEnv != null && (processEnv = options.processEnv), DotenvModule.populate(processEnv, parsedAll, options), lastError ? { parsed: parsedAll, error: lastError } : { parsed: parsedAll };
|
317
|
+
}
|
318
|
+
function config(options) {
|
319
|
+
if (_dotenvKey(options).length === 0)
|
320
|
+
return DotenvModule.configDotenv(options);
|
321
|
+
const vaultPath = _vaultPath(options);
|
322
|
+
return vaultPath ? DotenvModule._configVault(options) : (_warn(`You set DOTENV_KEY but you are missing a .env.vault file at ${vaultPath}. Did you forget to build it?`), DotenvModule.configDotenv(options));
|
323
|
+
}
|
324
|
+
function decrypt(encrypted, keyStr) {
|
325
|
+
const key = Buffer.from(keyStr.slice(-64), "hex");
|
326
|
+
let ciphertext = Buffer.from(encrypted, "base64");
|
327
|
+
const nonce = ciphertext.subarray(0, 12), authTag = ciphertext.subarray(-16);
|
328
|
+
ciphertext = ciphertext.subarray(12, -16);
|
329
|
+
try {
|
330
|
+
const aesgcm = crypto.createDecipheriv("aes-256-gcm", key, nonce);
|
331
|
+
return aesgcm.setAuthTag(authTag), `${aesgcm.update(ciphertext)}${aesgcm.final()}`;
|
332
|
+
} catch (error) {
|
333
|
+
const isRange = error instanceof RangeError, invalidKeyLength = error.message === "Invalid key length", decryptionFailed = error.message === "Unsupported state or unable to authenticate data";
|
334
|
+
if (isRange || invalidKeyLength) {
|
335
|
+
const err = new Error("INVALID_DOTENV_KEY: It must be 64 characters long (or more)");
|
336
|
+
throw err.code = "INVALID_DOTENV_KEY", err;
|
337
|
+
} else if (decryptionFailed) {
|
338
|
+
const err = new Error("DECRYPTION_FAILED: Please check your DOTENV_KEY");
|
339
|
+
throw err.code = "DECRYPTION_FAILED", err;
|
340
|
+
} else
|
341
|
+
throw error;
|
342
|
+
}
|
343
|
+
}
|
344
|
+
function populate(processEnv, parsed, options = {}) {
|
345
|
+
const debug2 = !!(options && options.debug), override = !!(options && options.override);
|
346
|
+
if (typeof parsed != "object") {
|
347
|
+
const err = new Error("OBJECT_REQUIRED: Please check the processEnv argument being passed to populate");
|
348
|
+
throw err.code = "OBJECT_REQUIRED", err;
|
349
|
+
}
|
350
|
+
for (const key of Object.keys(parsed))
|
351
|
+
Object.prototype.hasOwnProperty.call(processEnv, key) ? (override === !0 && (processEnv[key] = parsed[key]), debug2 && _debug(override === !0 ? `"${key}" is already defined and WAS overwritten` : `"${key}" is already defined and was NOT overwritten`)) : processEnv[key] = parsed[key];
|
352
|
+
}
|
353
|
+
const DotenvModule = {
|
354
|
+
configDotenv,
|
355
|
+
_configVault,
|
356
|
+
_parseVault,
|
357
|
+
config,
|
358
|
+
decrypt,
|
359
|
+
parse,
|
360
|
+
populate
|
361
|
+
};
|
362
|
+
main$1.exports.configDotenv = DotenvModule.configDotenv;
|
363
|
+
main$1.exports._configVault = DotenvModule._configVault;
|
364
|
+
main$1.exports._parseVault = DotenvModule._parseVault;
|
365
|
+
main$1.exports.config = DotenvModule.config;
|
366
|
+
main$1.exports.decrypt = DotenvModule.decrypt;
|
367
|
+
var parse_1 = main$1.exports.parse = DotenvModule.parse;
|
368
|
+
main$1.exports.populate = DotenvModule.populate;
|
369
|
+
main$1.exports = DotenvModule;
|
370
|
+
function _interpolate(envValue, environment, config2) {
|
371
|
+
const matches = envValue.match(/(.?\${*[\w]*(?::-[\w/]*)?}*)/g) || [];
|
372
|
+
return matches.reduce(function(newEnv, match, index) {
|
373
|
+
const parts = /(.?)\${*([\w]*(?::-[\w/]*)?)?}*/g.exec(match);
|
374
|
+
if (!parts || parts.length === 0)
|
375
|
+
return newEnv;
|
376
|
+
const prefix = parts[1];
|
377
|
+
let value, replacePart;
|
378
|
+
if (prefix === "\\")
|
379
|
+
replacePart = parts[0], value = replacePart.replace("\\$", "$");
|
380
|
+
else {
|
381
|
+
const keyParts = parts[2].split(":-"), key = keyParts[0];
|
382
|
+
if (replacePart = parts[0].substring(prefix.length), value = Object.prototype.hasOwnProperty.call(environment, key) ? environment[key] : config2.parsed[key] || keyParts[1] || "", keyParts.length > 1 && value) {
|
383
|
+
const replaceNested = matches[index + 1];
|
384
|
+
matches[index + 1] = "", newEnv = newEnv.replace(replaceNested, "");
|
385
|
+
}
|
386
|
+
value = _interpolate(value, environment, config2);
|
387
|
+
}
|
388
|
+
return newEnv.replace(replacePart, value);
|
389
|
+
}, envValue);
|
390
|
+
}
|
391
|
+
function expand(config2) {
|
392
|
+
const environment = config2.ignoreProcessEnv ? {} : process.env;
|
393
|
+
for (const configKey in config2.parsed) {
|
394
|
+
const value = Object.prototype.hasOwnProperty.call(environment, configKey) ? environment[configKey] : config2.parsed[configKey];
|
395
|
+
config2.parsed[configKey] = _interpolate(value, environment, config2);
|
396
|
+
}
|
397
|
+
for (const processKey in config2.parsed)
|
398
|
+
environment[processKey] = config2.parsed[processKey];
|
399
|
+
return config2;
|
400
|
+
}
|
401
|
+
var expand_1 = expand;
|
402
|
+
function loadEnv(mode, envDir, prefixes = ["VITE_"]) {
|
403
|
+
if (mode === "local")
|
404
|
+
throw new Error(
|
405
|
+
'"local" cannot be used as a mode name because it conflicts with the .local postfix for .env files.'
|
406
|
+
);
|
407
|
+
const env = {}, envFiles = [
|
408
|
+
/** default file */
|
409
|
+
".env",
|
410
|
+
/** local file */
|
411
|
+
".env.local",
|
412
|
+
/** mode file */
|
413
|
+
`.env.${mode}`,
|
414
|
+
/** mode local file */
|
415
|
+
`.env.${mode}.local`
|
416
|
+
], parsed = Object.fromEntries(
|
417
|
+
envFiles.flatMap((file) => {
|
418
|
+
const envPath = lookupFile(envDir, [file], {
|
419
|
+
rootDir: envDir
|
420
|
+
});
|
421
|
+
return envPath ? Object.entries(parse_1(fs$2.readFileSync(envPath))) : [];
|
422
|
+
})
|
423
|
+
);
|
424
|
+
parsed.NODE_ENV && process.env.VITE_USER_NODE_ENV === void 0 && (process.env.VITE_USER_NODE_ENV = parsed.NODE_ENV), parsed.BROWSER && process.env.BROWSER === void 0 && (process.env.BROWSER = parsed.BROWSER), parsed.BROWSER_ARGS && process.env.BROWSER_ARGS === void 0 && (process.env.BROWSER_ARGS = parsed.BROWSER_ARGS);
|
425
|
+
try {
|
426
|
+
expand_1({ parsed });
|
427
|
+
} catch (e) {
|
428
|
+
throw e.message.includes("split") ? new Error("dotenv-expand failed to expand env vars. Maybe you need to escape `$`?") : e;
|
429
|
+
}
|
430
|
+
for (const [key, value] of Object.entries(parsed))
|
431
|
+
prefixes.some((prefix) => key.startsWith(prefix)) && (env[key] = value);
|
432
|
+
for (const key in process.env)
|
433
|
+
prefixes.some((prefix) => key.startsWith(prefix)) && (env[key] = process.env[key]);
|
434
|
+
return env;
|
435
|
+
}
|
436
|
+
function lookupFile(dir, formats, options) {
|
437
|
+
for (const format of formats) {
|
438
|
+
const fullPath = path$2.join(dir, format);
|
439
|
+
if (fs$2.existsSync(fullPath) && fs$2.statSync(fullPath).isFile())
|
440
|
+
return fullPath;
|
441
|
+
}
|
442
|
+
const parentDir = path$2.dirname(dir);
|
443
|
+
if (parentDir !== dir && (!options?.rootDir || parentDir.startsWith(options?.rootDir)))
|
444
|
+
return lookupFile(parentDir, formats, options);
|
445
|
+
}
|
446
|
+
export {
|
447
|
+
createCliConfig,
|
448
|
+
defineCliConfig,
|
449
|
+
getCliClient,
|
450
|
+
loadEnv
|
451
|
+
};
|
452
|
+
//# sourceMappingURL=index.mjs.map
|
@@ -0,0 +1 @@
|
|
1
|
+
{"version":3,"file":"index.mjs","sources":["../src/util/dynamicRequire.ts","../src/util/getCliConfig.ts","../src/debug.ts","../src/util/resolveRootDir.ts","../src/cliClient.ts","../src/config.ts","../../../../node_modules/.pnpm/dotenv@16.4.5/node_modules/dotenv/lib/main.js","../../../../node_modules/.pnpm/dotenv-expand@9.0.0/node_modules/dotenv-expand/lib/main.js","../src/util/loadEnv.ts"],"sourcesContent":["// Prevent webpack from bundling in webpack context,\n// use regular node require for unbundled context\n\n/* eslint-disable camelcase, no-undef */\ndeclare const __webpack_require__: boolean\ndeclare const __non_webpack_require__: typeof require\n\nconst requireFunc: typeof require =\n typeof __webpack_require__ === 'function' ? __non_webpack_require__ : require\n/* eslint-enable camelcase, no-undef */\n\nexport function dynamicRequire<T = any>(request: string): T {\n const mod = requireFunc(request)\n return mod.__esModule && mod.default ? mod.default : mod\n}\n\ndynamicRequire.resolve = requireFunc.resolve\n","/* eslint-disable no-sync */\n\n/**\n * Reads the Sanity CLI config from one of the following files (in preferred order):\n * - sanity.cli.js\n * - sanity.cli.ts\n *\n * Note: There are two ways of using this:\n * a) `getCliConfig(cwd)`\n * b) `getCliConfig(cwd, {forked: true})`\n *\n * Approach a is generally a bit faster as it avoids the forking startup time, while\n * approach b could be considered \"safer\" since any side-effects of running the config\n * file will not bleed into the current CLI process directly.\n */\nimport fs from 'fs'\nimport path from 'path'\nimport {Worker} from 'worker_threads'\n\nimport {type CliConfig, type SanityJson} from '../types'\nimport {getCliWorkerPath} from './cliWorker'\nimport {dynamicRequire} from './dynamicRequire'\n\nexport type CliMajorVersion = 2 | 3\n\nexport type CliConfigResult =\n | {config: SanityJson; path: string; version: 2}\n | {config: CliConfig; path: string; version: 3}\n | {config: null; path: string; version: CliMajorVersion}\n\nexport async function getCliConfig(\n cwd: string,\n {forked}: {forked?: boolean} = {},\n): Promise<CliConfigResult | null> {\n if (forked) {\n try {\n return await getCliConfigForked(cwd)\n } catch (err) {\n // Intentional noop - try unforked variant\n }\n }\n\n const {unregister} = __DEV__\n ? {unregister: () => undefined}\n : require('esbuild-register/dist/node').register()\n\n try {\n const v3Config = getSanityCliConfig(cwd)\n if (v3Config) {\n return v3Config\n }\n\n return getSanityJsonConfig(cwd)\n } catch (err) {\n throw err\n } finally {\n unregister()\n }\n}\n\nexport function getCliConfigSync(cwd: string): CliConfigResult | null {\n const v3Config = getSanityCliConfig(cwd)\n return v3Config ? v3Config : getSanityJsonConfig(cwd)\n}\n\nasync function getCliConfigForked(cwd: string): Promise<CliConfigResult | null> {\n const workerPath = await getCliWorkerPath('getCliConfig')\n return new Promise((resolve, reject) => {\n const worker = new Worker(workerPath, {\n workerData: cwd,\n // eslint-disable-next-line no-process-env\n env: process.env,\n })\n worker.on('message', (message) => {\n if (message.type === 'config') {\n resolve(message.config)\n } else {\n const error = new Error(message.error)\n ;(error as any).type = message.errorType\n reject(new Error(message.error))\n }\n })\n worker.on('error', reject)\n worker.on('exit', (code) => {\n if (code !== 0) {\n reject(new Error(`Worker stopped with exit code ${code}`))\n }\n })\n })\n}\n\nfunction getSanityJsonConfig(cwd: string): CliConfigResult | null {\n const configPath = path.join(cwd, 'sanity.json')\n\n if (!fs.existsSync(configPath)) {\n return null\n }\n\n return {\n config: loadJsonConfig(configPath),\n path: configPath,\n version: 2,\n }\n}\n\nfunction getSanityCliConfig(cwd: string): CliConfigResult | null {\n const jsConfigPath = path.join(cwd, 'sanity.cli.js')\n const tsConfigPath = path.join(cwd, 'sanity.cli.ts')\n\n const [js, ts] = [fs.existsSync(jsConfigPath), fs.existsSync(tsConfigPath)]\n\n if (!js && !ts) {\n return null\n }\n\n if (!js && ts) {\n return {\n config: importConfig(tsConfigPath),\n path: tsConfigPath,\n version: 3,\n }\n }\n\n if (js && ts) {\n warn('Found both `sanity.cli.js` and `sanity.cli.ts` - using sanity.cli.js')\n }\n\n return {\n config: importConfig(jsConfigPath),\n path: jsConfigPath,\n version: 3,\n }\n}\n\nfunction loadJsonConfig(filePath: string): SanityJson | null {\n try {\n const content = fs.readFileSync(filePath, 'utf8')\n return JSON.parse(content)\n } catch (err) {\n console.error(`Error reading \"${filePath}\": ${err.message}`)\n return null\n }\n}\n\nfunction importConfig(filePath: string): CliConfig | null {\n try {\n const config = dynamicRequire<CliConfig | {default: CliConfig} | null>(filePath)\n if (config === null || typeof config !== 'object') {\n throw new Error('Module export is not a configuration object')\n }\n\n return 'default' in config ? config.default : config\n } catch (err) {\n // If attempting to import `defineCliConfig` or similar from `sanity/cli`,\n // accept the fact that it might not be installed. Instead, let the CLI\n // give a warning about the `sanity` module not being installed\n if (err.code === 'MODULE_NOT_FOUND' && err.message.includes('sanity/cli')) {\n return null\n }\n\n console.error(`Error reading \"${filePath}\": ${err.message}`)\n return null\n }\n}\n\nfunction warn(warning: string) {\n if (typeof process.send === 'function') {\n process.send({type: 'warning', warning})\n } else {\n console.warn(warning)\n }\n}\n","import debugIt from 'debug'\n\nexport const debug = debugIt('sanity:cli')\n","/* eslint-disable no-sync */\nimport fs from 'fs'\nimport path from 'path'\n\nimport {debug} from '../debug'\n\n/**\n * Resolve project root directory, falling back to cwd if it cannot be found\n */\nexport function resolveRootDir(cwd: string): string {\n try {\n return resolveProjectRoot(cwd) || cwd\n } catch (err) {\n throw new Error(`Error occurred trying to resolve project root:\\n${err.message}`)\n }\n}\n\nfunction hasStudioConfig(basePath: string): boolean {\n const buildConfigs = [\n fileExists(path.join(basePath, 'sanity.config.js')),\n fileExists(path.join(basePath, 'sanity.config.ts')),\n isSanityV2StudioRoot(basePath),\n ]\n\n return buildConfigs.some(Boolean)\n}\n\nfunction resolveProjectRoot(basePath: string, iterations = 0): string | false {\n if (hasStudioConfig(basePath)) {\n return basePath\n }\n\n const parentDir = path.resolve(basePath, '..')\n if (parentDir === basePath || iterations > 30) {\n // Reached root (or max depth), give up\n return false\n }\n\n return resolveProjectRoot(parentDir, iterations + 1)\n}\n\nfunction isSanityV2StudioRoot(basePath: string): boolean {\n try {\n const content = fs.readFileSync(path.join(basePath, 'sanity.json'), 'utf8')\n const sanityJson = JSON.parse(content)\n const isRoot = Boolean(sanityJson?.root)\n if (isRoot) {\n debug('Found Sanity v2 studio root at %s', basePath)\n }\n return isRoot\n } catch (err) {\n return false\n }\n}\n\nfunction fileExists(filePath: string): boolean {\n return fs.existsSync(filePath)\n}\n","import {createClient, type SanityClient} from '@sanity/client'\n\nimport {getCliConfigSync} from './util/getCliConfig'\nimport {resolveRootDir} from './util/resolveRootDir'\n\nexport interface CliClientOptions {\n cwd?: string\n\n projectId?: string\n dataset?: string\n useCdn?: boolean\n token?: string\n apiVersion?: string\n}\n\nexport function getCliClient(options: CliClientOptions = {}): SanityClient {\n if (typeof process !== 'object') {\n throw new Error('getCliClient() should only be called from node.js scripts')\n }\n\n const {\n // eslint-disable-next-line no-process-env\n cwd = process.env.SANITY_BASE_PATH || process.cwd(),\n useCdn = false,\n apiVersion = '2022-06-06',\n projectId,\n dataset,\n token = getCliClient.__internal__getToken(),\n } = options\n\n if (projectId && dataset) {\n return createClient({projectId, dataset, apiVersion, useCdn, token})\n }\n\n const rootDir = resolveRootDir(cwd)\n const {config} = getCliConfigSync(rootDir) || {}\n if (!config) {\n throw new Error('Unable to resolve CLI configuration')\n }\n\n const apiConfig = config?.api || {}\n if (!apiConfig.projectId || !apiConfig.dataset) {\n throw new Error('Unable to resolve project ID/dataset from CLI configuration')\n }\n\n return createClient({\n projectId: apiConfig.projectId,\n dataset: apiConfig.dataset,\n apiVersion,\n useCdn,\n token,\n })\n}\n\n/* eslint-disable camelcase */\n/**\n * @internal\n * @deprecated This is only for INTERNAL use, and should not be relied upon outside of official Sanity modules\n * @returns A token to use when constructing a client without a `token` explicitly defined, or undefined\n */\ngetCliClient.__internal__getToken = (): string | undefined => undefined\n/* eslint-enable camelcase */\n","import {type CliConfig} from './types'\n\n/** @beta */\nexport function defineCliConfig(config: CliConfig): CliConfig {\n return config\n}\n\n/**\n * @deprecated Use `defineCliConfig` instead\n * @beta\n */\nexport function createCliConfig(config: CliConfig): CliConfig {\n return config\n}\n","const fs = require('fs')\nconst path = require('path')\nconst os = require('os')\nconst crypto = require('crypto')\nconst packageJson = require('../package.json')\n\nconst version = packageJson.version\n\nconst LINE = /(?:^|^)\\s*(?:export\\s+)?([\\w.-]+)(?:\\s*=\\s*?|:\\s+?)(\\s*'(?:\\\\'|[^'])*'|\\s*\"(?:\\\\\"|[^\"])*\"|\\s*`(?:\\\\`|[^`])*`|[^#\\r\\n]+)?\\s*(?:#.*)?(?:$|$)/mg\n\n// Parse src into an Object\nfunction parse (src) {\n const obj = {}\n\n // Convert buffer to string\n let lines = src.toString()\n\n // Convert line breaks to same format\n lines = lines.replace(/\\r\\n?/mg, '\\n')\n\n let match\n while ((match = LINE.exec(lines)) != null) {\n const key = match[1]\n\n // Default undefined or null to empty string\n let value = (match[2] || '')\n\n // Remove whitespace\n value = value.trim()\n\n // Check if double quoted\n const maybeQuote = value[0]\n\n // Remove surrounding quotes\n value = value.replace(/^(['\"`])([\\s\\S]*)\\1$/mg, '$2')\n\n // Expand newlines if double quoted\n if (maybeQuote === '\"') {\n value = value.replace(/\\\\n/g, '\\n')\n value = value.replace(/\\\\r/g, '\\r')\n }\n\n // Add to object\n obj[key] = value\n }\n\n return obj\n}\n\nfunction _parseVault (options) {\n const vaultPath = _vaultPath(options)\n\n // Parse .env.vault\n const result = DotenvModule.configDotenv({ path: vaultPath })\n if (!result.parsed) {\n const err = new Error(`MISSING_DATA: Cannot parse ${vaultPath} for an unknown reason`)\n err.code = 'MISSING_DATA'\n throw err\n }\n\n // handle scenario for comma separated keys - for use with key rotation\n // example: DOTENV_KEY=\"dotenv://:key_1234@dotenvx.com/vault/.env.vault?environment=prod,dotenv://:key_7890@dotenvx.com/vault/.env.vault?environment=prod\"\n const keys = _dotenvKey(options).split(',')\n const length = keys.length\n\n let decrypted\n for (let i = 0; i < length; i++) {\n try {\n // Get full key\n const key = keys[i].trim()\n\n // Get instructions for decrypt\n const attrs = _instructions(result, key)\n\n // Decrypt\n decrypted = DotenvModule.decrypt(attrs.ciphertext, attrs.key)\n\n break\n } catch (error) {\n // last key\n if (i + 1 >= length) {\n throw error\n }\n // try next key\n }\n }\n\n // Parse decrypted .env string\n return DotenvModule.parse(decrypted)\n}\n\nfunction _log (message) {\n console.log(`[dotenv@${version}][INFO] ${message}`)\n}\n\nfunction _warn (message) {\n console.log(`[dotenv@${version}][WARN] ${message}`)\n}\n\nfunction _debug (message) {\n console.log(`[dotenv@${version}][DEBUG] ${message}`)\n}\n\nfunction _dotenvKey (options) {\n // prioritize developer directly setting options.DOTENV_KEY\n if (options && options.DOTENV_KEY && options.DOTENV_KEY.length > 0) {\n return options.DOTENV_KEY\n }\n\n // secondary infra already contains a DOTENV_KEY environment variable\n if (process.env.DOTENV_KEY && process.env.DOTENV_KEY.length > 0) {\n return process.env.DOTENV_KEY\n }\n\n // fallback to empty string\n return ''\n}\n\nfunction _instructions (result, dotenvKey) {\n // Parse DOTENV_KEY. Format is a URI\n let uri\n try {\n uri = new URL(dotenvKey)\n } catch (error) {\n if (error.code === 'ERR_INVALID_URL') {\n const err = new Error('INVALID_DOTENV_KEY: Wrong format. Must be in valid uri format like dotenv://:key_1234@dotenvx.com/vault/.env.vault?environment=development')\n err.code = 'INVALID_DOTENV_KEY'\n throw err\n }\n\n throw error\n }\n\n // Get decrypt key\n const key = uri.password\n if (!key) {\n const err = new Error('INVALID_DOTENV_KEY: Missing key part')\n err.code = 'INVALID_DOTENV_KEY'\n throw err\n }\n\n // Get environment\n const environment = uri.searchParams.get('environment')\n if (!environment) {\n const err = new Error('INVALID_DOTENV_KEY: Missing environment part')\n err.code = 'INVALID_DOTENV_KEY'\n throw err\n }\n\n // Get ciphertext payload\n const environmentKey = `DOTENV_VAULT_${environment.toUpperCase()}`\n const ciphertext = result.parsed[environmentKey] // DOTENV_VAULT_PRODUCTION\n if (!ciphertext) {\n const err = new Error(`NOT_FOUND_DOTENV_ENVIRONMENT: Cannot locate environment ${environmentKey} in your .env.vault file.`)\n err.code = 'NOT_FOUND_DOTENV_ENVIRONMENT'\n throw err\n }\n\n return { ciphertext, key }\n}\n\nfunction _vaultPath (options) {\n let possibleVaultPath = null\n\n if (options && options.path && options.path.length > 0) {\n if (Array.isArray(options.path)) {\n for (const filepath of options.path) {\n if (fs.existsSync(filepath)) {\n possibleVaultPath = filepath.endsWith('.vault') ? filepath : `${filepath}.vault`\n }\n }\n } else {\n possibleVaultPath = options.path.endsWith('.vault') ? options.path : `${options.path}.vault`\n }\n } else {\n possibleVaultPath = path.resolve(process.cwd(), '.env.vault')\n }\n\n if (fs.existsSync(possibleVaultPath)) {\n return possibleVaultPath\n }\n\n return null\n}\n\nfunction _resolveHome (envPath) {\n return envPath[0] === '~' ? path.join(os.homedir(), envPath.slice(1)) : envPath\n}\n\nfunction _configVault (options) {\n _log('Loading env from encrypted .env.vault')\n\n const parsed = DotenvModule._parseVault(options)\n\n let processEnv = process.env\n if (options && options.processEnv != null) {\n processEnv = options.processEnv\n }\n\n DotenvModule.populate(processEnv, parsed, options)\n\n return { parsed }\n}\n\nfunction configDotenv (options) {\n const dotenvPath = path.resolve(process.cwd(), '.env')\n let encoding = 'utf8'\n const debug = Boolean(options && options.debug)\n\n if (options && options.encoding) {\n encoding = options.encoding\n } else {\n if (debug) {\n _debug('No encoding is specified. UTF-8 is used by default')\n }\n }\n\n let optionPaths = [dotenvPath] // default, look for .env\n if (options && options.path) {\n if (!Array.isArray(options.path)) {\n optionPaths = [_resolveHome(options.path)]\n } else {\n optionPaths = [] // reset default\n for (const filepath of options.path) {\n optionPaths.push(_resolveHome(filepath))\n }\n }\n }\n\n // Build the parsed data in a temporary object (because we need to return it). Once we have the final\n // parsed data, we will combine it with process.env (or options.processEnv if provided).\n let lastError\n const parsedAll = {}\n for (const path of optionPaths) {\n try {\n // Specifying an encoding returns a string instead of a buffer\n const parsed = DotenvModule.parse(fs.readFileSync(path, { encoding }))\n\n DotenvModule.populate(parsedAll, parsed, options)\n } catch (e) {\n if (debug) {\n _debug(`Failed to load ${path} ${e.message}`)\n }\n lastError = e\n }\n }\n\n let processEnv = process.env\n if (options && options.processEnv != null) {\n processEnv = options.processEnv\n }\n\n DotenvModule.populate(processEnv, parsedAll, options)\n\n if (lastError) {\n return { parsed: parsedAll, error: lastError }\n } else {\n return { parsed: parsedAll }\n }\n}\n\n// Populates process.env from .env file\nfunction config (options) {\n // fallback to original dotenv if DOTENV_KEY is not set\n if (_dotenvKey(options).length === 0) {\n return DotenvModule.configDotenv(options)\n }\n\n const vaultPath = _vaultPath(options)\n\n // dotenvKey exists but .env.vault file does not exist\n if (!vaultPath) {\n _warn(`You set DOTENV_KEY but you are missing a .env.vault file at ${vaultPath}. Did you forget to build it?`)\n\n return DotenvModule.configDotenv(options)\n }\n\n return DotenvModule._configVault(options)\n}\n\nfunction decrypt (encrypted, keyStr) {\n const key = Buffer.from(keyStr.slice(-64), 'hex')\n let ciphertext = Buffer.from(encrypted, 'base64')\n\n const nonce = ciphertext.subarray(0, 12)\n const authTag = ciphertext.subarray(-16)\n ciphertext = ciphertext.subarray(12, -16)\n\n try {\n const aesgcm = crypto.createDecipheriv('aes-256-gcm', key, nonce)\n aesgcm.setAuthTag(authTag)\n return `${aesgcm.update(ciphertext)}${aesgcm.final()}`\n } catch (error) {\n const isRange = error instanceof RangeError\n const invalidKeyLength = error.message === 'Invalid key length'\n const decryptionFailed = error.message === 'Unsupported state or unable to authenticate data'\n\n if (isRange || invalidKeyLength) {\n const err = new Error('INVALID_DOTENV_KEY: It must be 64 characters long (or more)')\n err.code = 'INVALID_DOTENV_KEY'\n throw err\n } else if (decryptionFailed) {\n const err = new Error('DECRYPTION_FAILED: Please check your DOTENV_KEY')\n err.code = 'DECRYPTION_FAILED'\n throw err\n } else {\n throw error\n }\n }\n}\n\n// Populate process.env with parsed values\nfunction populate (processEnv, parsed, options = {}) {\n const debug = Boolean(options && options.debug)\n const override = Boolean(options && options.override)\n\n if (typeof parsed !== 'object') {\n const err = new Error('OBJECT_REQUIRED: Please check the processEnv argument being passed to populate')\n err.code = 'OBJECT_REQUIRED'\n throw err\n }\n\n // Set process.env\n for (const key of Object.keys(parsed)) {\n if (Object.prototype.hasOwnProperty.call(processEnv, key)) {\n if (override === true) {\n processEnv[key] = parsed[key]\n }\n\n if (debug) {\n if (override === true) {\n _debug(`\"${key}\" is already defined and WAS overwritten`)\n } else {\n _debug(`\"${key}\" is already defined and was NOT overwritten`)\n }\n }\n } else {\n processEnv[key] = parsed[key]\n }\n }\n}\n\nconst DotenvModule = {\n configDotenv,\n _configVault,\n _parseVault,\n config,\n decrypt,\n parse,\n populate\n}\n\nmodule.exports.configDotenv = DotenvModule.configDotenv\nmodule.exports._configVault = DotenvModule._configVault\nmodule.exports._parseVault = DotenvModule._parseVault\nmodule.exports.config = DotenvModule.config\nmodule.exports.decrypt = DotenvModule.decrypt\nmodule.exports.parse = DotenvModule.parse\nmodule.exports.populate = DotenvModule.populate\n\nmodule.exports = DotenvModule\n","'use strict'\n\nfunction _interpolate (envValue, environment, config) {\n const matches = envValue.match(/(.?\\${*[\\w]*(?::-[\\w/]*)?}*)/g) || []\n\n return matches.reduce(function (newEnv, match, index) {\n const parts = /(.?)\\${*([\\w]*(?::-[\\w/]*)?)?}*/g.exec(match)\n if (!parts || parts.length === 0) {\n return newEnv\n }\n\n const prefix = parts[1]\n\n let value, replacePart\n\n if (prefix === '\\\\') {\n replacePart = parts[0]\n value = replacePart.replace('\\\\$', '$')\n } else {\n const keyParts = parts[2].split(':-')\n const key = keyParts[0]\n replacePart = parts[0].substring(prefix.length)\n // process.env value 'wins' over .env file's value\n value = Object.prototype.hasOwnProperty.call(environment, key)\n ? environment[key]\n : (config.parsed[key] || keyParts[1] || '')\n\n // If the value is found, remove nested expansions.\n if (keyParts.length > 1 && value) {\n const replaceNested = matches[index + 1]\n matches[index + 1] = ''\n\n newEnv = newEnv.replace(replaceNested, '')\n }\n // Resolve recursive interpolations\n value = _interpolate(value, environment, config)\n }\n\n return newEnv.replace(replacePart, value)\n }, envValue)\n}\n\nfunction expand (config) {\n // if ignoring process.env, use a blank object\n const environment = config.ignoreProcessEnv ? {} : process.env\n\n for (const configKey in config.parsed) {\n const value = Object.prototype.hasOwnProperty.call(environment, configKey) ? environment[configKey] : config.parsed[configKey]\n\n config.parsed[configKey] = _interpolate(value, environment, config)\n }\n\n for (const processKey in config.parsed) {\n environment[processKey] = config.parsed[processKey]\n }\n\n return config\n}\n\nmodule.exports.expand = expand\n","/**\n * This is an \"inlined\" version of Vite's `loadEnv` function,\n * simplified somewhat to only support our use case.\n *\n * Ideally we'd just use `loadEnv` from Vite, but importing it\n * causes bundling issues due to node APIs and downstream dependencies.\n *\n * Vite is MIT licensed, copyright (c) Yuxi (Evan) You and Vite contributors.\n */\n\n/* eslint-disable no-process-env */\nimport fs from 'node:fs'\nimport path from 'node:path'\n\nimport {parse} from 'dotenv'\nimport {expand} from 'dotenv-expand'\n\nexport function loadEnv(\n mode: string,\n envDir: string,\n prefixes: string[] = ['VITE_'],\n): Record<string, string> {\n if (mode === 'local') {\n throw new Error(\n `\"local\" cannot be used as a mode name because it conflicts with ` +\n `the .local postfix for .env files.`,\n )\n }\n\n const env: Record<string, string> = {}\n const envFiles = [\n /** default file */ `.env`,\n /** local file */ `.env.local`,\n /** mode file */ `.env.${mode}`,\n /** mode local file */ `.env.${mode}.local`,\n ]\n\n const parsed = Object.fromEntries(\n envFiles.flatMap((file) => {\n const envPath = lookupFile(envDir, [file], {\n rootDir: envDir,\n })\n if (!envPath) return []\n return Object.entries(parse(fs.readFileSync(envPath)))\n }),\n )\n\n // test NODE_ENV override before expand as otherwise process.env.NODE_ENV would override this\n if (parsed.NODE_ENV && process.env.VITE_USER_NODE_ENV === undefined) {\n process.env.VITE_USER_NODE_ENV = parsed.NODE_ENV\n }\n // support BROWSER and BROWSER_ARGS env variables\n if (parsed.BROWSER && process.env.BROWSER === undefined) {\n process.env.BROWSER = parsed.BROWSER\n }\n if (parsed.BROWSER_ARGS && process.env.BROWSER_ARGS === undefined) {\n process.env.BROWSER_ARGS = parsed.BROWSER_ARGS\n }\n\n try {\n // let environment variables use each other\n expand({parsed})\n } catch (e) {\n // custom error handling until https://github.com/motdotla/dotenv-expand/issues/65 is fixed upstream\n // check for message \"TypeError: Cannot read properties of undefined (reading 'split')\"\n if (e.message.includes('split')) {\n throw new Error('dotenv-expand failed to expand env vars. Maybe you need to escape `$`?')\n }\n throw e\n }\n\n // only keys that start with prefix are exposed to client\n for (const [key, value] of Object.entries(parsed)) {\n if (prefixes.some((prefix) => key.startsWith(prefix))) {\n env[key] = value\n }\n }\n\n // check if there are actual env variables starting with VITE_*\n // these are typically provided inline and should be prioritized\n for (const key in process.env) {\n if (prefixes.some((prefix) => key.startsWith(prefix))) {\n env[key] = process.env[key] as string\n }\n }\n\n return env\n}\n\nfunction lookupFile(\n dir: string,\n formats: string[],\n options?: {\n rootDir?: string\n },\n): string | undefined {\n for (const format of formats) {\n const fullPath = path.join(dir, format)\n // eslint-disable-next-line no-sync\n if (fs.existsSync(fullPath) && fs.statSync(fullPath).isFile()) {\n return fullPath\n }\n }\n const parentDir = path.dirname(dir)\n if (parentDir !== dir && (!options?.rootDir || parentDir.startsWith(options?.rootDir))) {\n return lookupFile(parentDir, formats, options)\n }\n\n return undefined\n}\n"],"names":["path","fs","config","require$$0","require$$1","debug","mainModule","parse","expand"],"mappings":";;;;;;;;;;AAOA,MAAM,cACJ,OAAO,uBAAwB,aAAa,0BAA0B;AAGjE,SAAS,eAAwB,SAAoB;AACpD,QAAA,MAAM,YAAY,OAAO;AAC/B,SAAO,IAAI,cAAc,IAAI,UAAU,IAAI,UAAU;AACvD;AAEA,eAAe,UAAU,YAAY;AC4C9B,SAAS,iBAAiB,KAAqC;AAE7D,SADU,mBAAmB,GAAG,KACV,oBAAoB,GAAG;AACtD;AA4BA,SAAS,oBAAoB,KAAqC;AAChE,QAAM,aAAaA,OAAK,KAAK,KAAK,aAAa;AAE1C,SAAAC,KAAG,WAAW,UAAU,IAItB;AAAA,IACL,QAAQ,eAAe,UAAU;AAAA,IACjC,MAAM;AAAA,IACN,SAAS;AAAA,EANF,IAAA;AAQX;AAEA,SAAS,mBAAmB,KAAqC;AACzD,QAAA,eAAeD,OAAK,KAAK,KAAK,eAAe,GAC7C,eAAeA,OAAK,KAAK,KAAK,eAAe,GAE7C,CAAC,IAAI,EAAE,IAAI,CAACC,KAAG,WAAW,YAAY,GAAGA,KAAG,WAAW,YAAY,CAAC;AAE1E,SAAI,CAAC,MAAM,CAAC,KACH,OAGL,CAAC,MAAM,KACF;AAAA,IACL,QAAQ,aAAa,YAAY;AAAA,IACjC,MAAM;AAAA,IACN,SAAS;AAAA,EAIT,KAAA,MAAM,MACR,KAAK,sEAAsE,GAGtE;AAAA,IACL,QAAQ,aAAa,YAAY;AAAA,IACjC,MAAM;AAAA,IACN,SAAS;AAAA,EAAA;AAEb;AAEA,SAAS,eAAe,UAAqC;AACvD,MAAA;AACF,UAAM,UAAUA,KAAG,aAAa,UAAU,MAAM;AACzC,WAAA,KAAK,MAAM,OAAO;AAAA,WAClB,KAAK;AACZ,WAAA,QAAQ,MAAM,kBAAkB,QAAQ,MAAM,IAAI,OAAO,EAAE,GACpD;AAAA,EACT;AACF;AAEA,SAAS,aAAa,UAAoC;AACpD,MAAA;AACI,UAAAC,UAAS,eAAwD,QAAQ;AAC3E,QAAAA,YAAW,QAAQ,OAAOA,WAAW;AACjC,YAAA,IAAI,MAAM,6CAA6C;AAGxD,WAAA,aAAaA,UAASA,QAAO,UAAUA;AAAA,WACvC,KAAK;AAIZ,WAAI,IAAI,SAAS,sBAAsB,IAAI,QAAQ,SAAS,YAAY,KAIxE,QAAQ,MAAM,kBAAkB,QAAQ,MAAM,IAAI,OAAO,EAAE,GACpD;AAAA,EACT;AACF;AAEA,SAAS,KAAK,SAAiB;AACzB,SAAO,QAAQ,QAAS,aAC1B,QAAQ,KAAK,EAAC,MAAM,WAAW,QAAO,CAAC,IAEvC,QAAQ,KAAK,OAAO;AAExB;ACzKa,MAAA,QAAQ,QAAQ,YAAY;ACOlC,SAAS,eAAe,KAAqB;AAC9C,MAAA;AACK,WAAA,mBAAmB,GAAG,KAAK;AAAA,WAC3B,KAAK;AACZ,UAAM,IAAI,MAAM;AAAA,EAAmD,IAAI,OAAO,EAAE;AAAA,EAClF;AACF;AAEA,SAAS,gBAAgB,UAA2B;AAC7B,SAAA;AAAA,IACnB,WAAWF,OAAK,KAAK,UAAU,kBAAkB,CAAC;AAAA,IAClD,WAAWA,OAAK,KAAK,UAAU,kBAAkB,CAAC;AAAA,IAClD,qBAAqB,QAAQ;AAAA,EAAA,EAGX,KAAK,OAAO;AAClC;AAEA,SAAS,mBAAmB,UAAkB,aAAa,GAAmB;AAC5E,MAAI,gBAAgB,QAAQ;AACnB,WAAA;AAGT,QAAM,YAAYA,OAAK,QAAQ,UAAU,IAAI;AACzC,SAAA,cAAc,YAAY,aAAa,KAElC,KAGF,mBAAmB,WAAW,aAAa,CAAC;AACrD;AAEA,SAAS,qBAAqB,UAA2B;AACnD,MAAA;AACF,UAAM,UAAUC,KAAG,aAAaD,OAAK,KAAK,UAAU,aAAa,GAAG,MAAM,GAEpE,SAAS,CAAA,CADI,KAAK,MAAM,OAAO,GACF;AACnC,WAAI,UACF,MAAM,qCAAqC,QAAQ,GAE9C;AAAA,EAAA,QACK;AACL,WAAA;AAAA,EACT;AACF;AAEA,SAAS,WAAW,UAA2B;AACtC,SAAAC,KAAG,WAAW,QAAQ;AAC/B;AC1CgB,SAAA,aAAa,UAA4B,IAAkB;AACzE,MAAI,OAAO,WAAY;AACf,UAAA,IAAI,MAAM,2DAA2D;AAGvE,QAAA;AAAA;AAAA,IAEJ,MAAM,QAAQ,IAAI,oBAAoB,QAAQ,IAAI;AAAA,IAClD,SAAS;AAAA,IACT,aAAa;AAAA,IACb;AAAA,IACA;AAAA,IACA,QAAQ,aAAa,qBAAqB;AAAA,EACxC,IAAA;AAEJ,MAAI,aAAa;AACf,WAAO,aAAa,EAAC,WAAW,SAAS,YAAY,QAAQ,OAAM;AAG/D,QAAA,UAAU,eAAe,GAAG,GAC5B,EAAC,QAAAC,YAAU,iBAAiB,OAAO,KAAK;AAC9C,MAAI,CAACA;AACG,UAAA,IAAI,MAAM,qCAAqC;AAGjD,QAAA,YAAYA,SAAQ,OAAO;AACjC,MAAI,CAAC,UAAU,aAAa,CAAC,UAAU;AAC/B,UAAA,IAAI,MAAM,6DAA6D;AAG/E,SAAO,aAAa;AAAA,IAClB,WAAW,UAAU;AAAA,IACrB,SAAS,UAAU;AAAA,IACnB;AAAA,IACA;AAAA,IACA;AAAA,EAAA,CACD;AACH;AAQA,aAAa,uBAAuB,MAAuB;AAAA;ACzDpD,SAAS,gBAAgBA,SAA8B;AACrD,SAAAA;AACT;AAMO,SAAS,gBAAgBA,SAA8B;AACrD,SAAAA;AACT;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;ACbA,MAAM,KAAKC,MACL,OAAOC,QACP,KAAK,YACL,SAAS,YACT,cAAc,YAEd,UAAU,YAAY,SAEtB,OAAO;AAGb,SAAS,MAAO,KAAK;AACnB,QAAM,MAAM,CAAE;AAGd,MAAI,QAAQ,IAAI,SAAU;AAG1B,UAAQ,MAAM,QAAQ,WAAW;AAAA,CAAI;AAErC,MAAI;AACJ,UAAQ,QAAQ,KAAK,KAAK,KAAK,MAAM,QAAM;AACzC,UAAM,MAAM,MAAM,CAAC;AAGnB,QAAI,QAAS,MAAM,CAAC,KAAK;AAGzB,YAAQ,MAAM,KAAM;AAGpB,UAAM,aAAa,MAAM,CAAC;AAG1B,YAAQ,MAAM,QAAQ,0BAA0B,IAAI,GAGhD,eAAe,QACjB,QAAQ,MAAM,QAAQ,QAAQ;AAAA,CAAI,GAClC,QAAQ,MAAM,QAAQ,QAAQ,IAAI,IAIpC,IAAI,GAAG,IAAI;AAAA,EACZ;AAED,SAAO;AACT;AAEA,SAAS,YAAa,SAAS;AAC7B,QAAM,YAAY,WAAW,OAAO,GAG9B,SAAS,aAAa,aAAa,EAAE,MAAM,UAAS,CAAE;AAC5D,MAAI,CAAC,OAAO,QAAQ;AAClB,UAAM,MAAM,IAAI,MAAM,8BAA8B,SAAS,wBAAwB;AACrF,cAAI,OAAO,gBACL;AAAA,EACP;AAID,QAAM,OAAO,WAAW,OAAO,EAAE,MAAM,GAAG,GACpC,SAAS,KAAK;AAEpB,MAAI;AACJ,WAAS,IAAI,GAAG,IAAI,QAAQ;AAC1B,QAAI;AAEF,YAAM,MAAM,KAAK,CAAC,EAAE,KAAM,GAGpB,QAAQ,cAAc,QAAQ,GAAG;AAGvC,kBAAY,aAAa,QAAQ,MAAM,YAAY,MAAM,GAAG;AAE5D;AAAA,IACD,SAAQ,OAAO;AAEd,UAAI,IAAI,KAAK;AACX,cAAM;AAAA,IAGT;AAIH,SAAO,aAAa,MAAM,SAAS;AACrC;AAEA,SAAS,KAAM,SAAS;AACtB,UAAQ,IAAI,WAAW,OAAO,WAAW,OAAO,EAAE;AACpD;AAEA,SAAS,MAAO,SAAS;AACvB,UAAQ,IAAI,WAAW,OAAO,WAAW,OAAO,EAAE;AACpD;AAEA,SAAS,OAAQ,SAAS;AACxB,UAAQ,IAAI,WAAW,OAAO,YAAY,OAAO,EAAE;AACrD;AAEA,SAAS,WAAY,SAAS;AAE5B,SAAI,WAAW,QAAQ,cAAc,QAAQ,WAAW,SAAS,IACxD,QAAQ,aAIb,QAAQ,IAAI,cAAc,QAAQ,IAAI,WAAW,SAAS,IACrD,QAAQ,IAAI,aAId;AACT;AAEA,SAAS,cAAe,QAAQ,WAAW;AAEzC,MAAI;AACJ,MAAI;AACF,UAAM,IAAI,IAAI,SAAS;AAAA,EACxB,SAAQ,OAAO;AACd,QAAI,MAAM,SAAS,mBAAmB;AACpC,YAAM,MAAM,IAAI,MAAM,4IAA4I;AAClK,gBAAI,OAAO,sBACL;AAAA,IACP;AAED,UAAM;AAAA,EACP;AAGD,QAAM,MAAM,IAAI;AAChB,MAAI,CAAC,KAAK;AACR,UAAM,MAAM,IAAI,MAAM,sCAAsC;AAC5D,cAAI,OAAO,sBACL;AAAA,EACP;AAGD,QAAM,cAAc,IAAI,aAAa,IAAI,aAAa;AACtD,MAAI,CAAC,aAAa;AAChB,UAAM,MAAM,IAAI,MAAM,8CAA8C;AACpE,cAAI,OAAO,sBACL;AAAA,EACP;AAGD,QAAM,iBAAiB,gBAAgB,YAAY,YAAa,CAAA,IAC1D,aAAa,OAAO,OAAO,cAAc;AAC/C,MAAI,CAAC,YAAY;AACf,UAAM,MAAM,IAAI,MAAM,2DAA2D,cAAc,2BAA2B;AAC1H,cAAI,OAAO,gCACL;AAAA,EACP;AAED,SAAO,EAAE,YAAY,IAAK;AAC5B;AAEA,SAAS,WAAY,SAAS;AAC5B,MAAI,oBAAoB;AAExB,MAAI,WAAW,QAAQ,QAAQ,QAAQ,KAAK,SAAS;AACnD,QAAI,MAAM,QAAQ,QAAQ,IAAI;AAC5B,iBAAW,YAAY,QAAQ;AAC7B,QAAI,GAAG,WAAW,QAAQ,MACxB,oBAAoB,SAAS,SAAS,QAAQ,IAAI,WAAW,GAAG,QAAQ;AAAA;AAI5E,0BAAoB,QAAQ,KAAK,SAAS,QAAQ,IAAI,QAAQ,OAAO,GAAG,QAAQ,IAAI;AAAA;AAGtF,wBAAoB,KAAK,QAAQ,QAAQ,IAAG,GAAI,YAAY;AAG9D,SAAI,GAAG,WAAW,iBAAiB,IAC1B,oBAGF;AACT;AAEA,SAAS,aAAc,SAAS;AAC9B,SAAO,QAAQ,CAAC,MAAM,MAAM,KAAK,KAAK,GAAG,QAAS,GAAE,QAAQ,MAAM,CAAC,CAAC,IAAI;AAC1E;AAEA,SAAS,aAAc,SAAS;AAC9B,OAAK,uCAAuC;AAE5C,QAAM,SAAS,aAAa,YAAY,OAAO;AAE/C,MAAI,aAAa,QAAQ;AACzB,SAAI,WAAW,QAAQ,cAAc,SACnC,aAAa,QAAQ,aAGvB,aAAa,SAAS,YAAY,QAAQ,OAAO,GAE1C,EAAE,OAAQ;AACnB;AAEA,SAAS,aAAc,SAAS;AAC9B,QAAM,aAAa,KAAK,QAAQ,QAAQ,IAAK,GAAE,MAAM;AACrD,MAAI,WAAW;AACf,QAAMC,SAAQ,GAAQ,WAAW,QAAQ;AAEzC,EAAI,WAAW,QAAQ,WACrB,WAAW,QAAQ,WAEfA,UACF,OAAO,oDAAoD;AAI/D,MAAI,cAAc,CAAC,UAAU;AAC7B,MAAI,WAAW,QAAQ;AACrB,QAAI,CAAC,MAAM,QAAQ,QAAQ,IAAI;AAC7B,oBAAc,CAAC,aAAa,QAAQ,IAAI,CAAC;AAAA,SACpC;AACL,oBAAc,CAAE;AAChB,iBAAW,YAAY,QAAQ;AAC7B,oBAAY,KAAK,aAAa,QAAQ,CAAC;AAAA,IAE1C;AAKH,MAAI;AACJ,QAAM,YAAY,CAAE;AACpB,aAAWL,SAAQ;AACjB,QAAI;AAEF,YAAM,SAAS,aAAa,MAAM,GAAG,aAAaA,OAAM,EAAE,SAAQ,CAAE,CAAC;AAErE,mBAAa,SAAS,WAAW,QAAQ,OAAO;AAAA,IACjD,SAAQ,GAAG;AACV,MAAIK,UACF,OAAO,kBAAkBL,KAAI,IAAI,EAAE,OAAO,EAAE,GAE9C,YAAY;AAAA,IACb;AAGH,MAAI,aAAa,QAAQ;AAOzB,SANI,WAAW,QAAQ,cAAc,SACnC,aAAa,QAAQ,aAGvB,aAAa,SAAS,YAAY,WAAW,OAAO,GAEhD,YACK,EAAE,QAAQ,WAAW,OAAO,UAAW,IAEvC,EAAE,QAAQ,UAAW;AAEhC;AAGA,SAAS,OAAQ,SAAS;AAExB,MAAI,WAAW,OAAO,EAAE,WAAW;AACjC,WAAO,aAAa,aAAa,OAAO;AAG1C,QAAM,YAAY,WAAW,OAAO;AAGpC,SAAK,YAME,aAAa,aAAa,OAAO,KALtC,MAAM,+DAA+D,SAAS,+BAA+B,GAEtG,aAAa,aAAa,OAAO;AAI5C;AAEA,SAAS,QAAS,WAAW,QAAQ;AACnC,QAAM,MAAM,OAAO,KAAK,OAAO,MAAM,GAAG,GAAG,KAAK;AAChD,MAAI,aAAa,OAAO,KAAK,WAAW,QAAQ;AAEhD,QAAM,QAAQ,WAAW,SAAS,GAAG,EAAE,GACjC,UAAU,WAAW,SAAS,GAAG;AACvC,eAAa,WAAW,SAAS,IAAI,GAAG;AAExC,MAAI;AACF,UAAM,SAAS,OAAO,iBAAiB,eAAe,KAAK,KAAK;AAChE,kBAAO,WAAW,OAAO,GAClB,GAAG,OAAO,OAAO,UAAU,CAAC,GAAG,OAAO,MAAK,CAAE;AAAA,EACrD,SAAQ,OAAO;AACd,UAAM,UAAU,iBAAiB,YAC3B,mBAAmB,MAAM,YAAY,sBACrC,mBAAmB,MAAM,YAAY;AAE3C,QAAI,WAAW,kBAAkB;AAC/B,YAAM,MAAM,IAAI,MAAM,6DAA6D;AACnF,gBAAI,OAAO,sBACL;AAAA,IACP,WAAU,kBAAkB;AAC3B,YAAM,MAAM,IAAI,MAAM,iDAAiD;AACvE,gBAAI,OAAO,qBACL;AAAA,IACZ;AACM,YAAM;AAAA,EAET;AACH;AAGA,SAAS,SAAU,YAAY,QAAQ,UAAU,CAAA,GAAI;AACnD,QAAMK,SAAQ,GAAQ,WAAW,QAAQ,QACnC,WAAW,GAAQ,WAAW,QAAQ;AAE5C,MAAI,OAAO,UAAW,UAAU;AAC9B,UAAM,MAAM,IAAI,MAAM,gFAAgF;AACtG,cAAI,OAAO,mBACL;AAAA,EACP;AAGD,aAAW,OAAO,OAAO,KAAK,MAAM;AAClC,IAAI,OAAO,UAAU,eAAe,KAAK,YAAY,GAAG,KAClD,aAAa,OACf,WAAW,GAAG,IAAI,OAAO,GAAG,IAG1BA,UAEA,OADE,aAAa,KACR,IAAI,GAAG,6CAEP,IAAI,GAAG,8CAF0C,KAM5D,WAAW,GAAG,IAAI,OAAO,GAAG;AAGlC;AAEA,MAAM,eAAe;AAAA,EACnB;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;AAE2BC,OAAA,QAAA,eAAG,aAAa;AAChBA,OAAA,QAAA,eAAG,aAAa;AACjBA,OAAA,QAAA,cAAG,aAAa;AACrBA,OAAA,QAAA,SAAG,aAAa;AACfA,OAAA,QAAA,UAAG,aAAa;AACtC,IAAoB,UAAAA,OAAA,QAAA,QAAG,aAAa;AACbA,OAAA,QAAA,WAAG,aAAa;AAEvCA,OAAA,UAAiB;ACtWjB,SAAS,aAAc,UAAU,aAAaJ,SAAQ;AACpD,QAAM,UAAU,SAAS,MAAM,+BAA+B,KAAK,CAAE;AAErE,SAAO,QAAQ,OAAO,SAAU,QAAQ,OAAO,OAAO;AACpD,UAAM,QAAQ,mCAAmC,KAAK,KAAK;AAC3D,QAAI,CAAC,SAAS,MAAM,WAAW;AAC7B,aAAO;AAGT,UAAM,SAAS,MAAM,CAAC;AAEtB,QAAI,OAAO;AAEX,QAAI,WAAW;AACb,oBAAc,MAAM,CAAC,GACrB,QAAQ,YAAY,QAAQ,OAAO,GAAG;AAAA,SACjC;AACL,YAAM,WAAW,MAAM,CAAC,EAAE,MAAM,IAAI,GAC9B,MAAM,SAAS,CAAC;AAQtB,UAPA,cAAc,MAAM,CAAC,EAAE,UAAU,OAAO,MAAM,GAE9C,QAAQ,OAAO,UAAU,eAAe,KAAK,aAAa,GAAG,IACzD,YAAY,GAAG,IACdA,QAAO,OAAO,GAAG,KAAK,SAAS,CAAC,KAAK,IAGtC,SAAS,SAAS,KAAK,OAAO;AAChC,cAAM,gBAAgB,QAAQ,QAAQ,CAAC;AACvC,gBAAQ,QAAQ,CAAC,IAAI,IAErB,SAAS,OAAO,QAAQ,eAAe,EAAE;AAAA,MAC1C;AAED,cAAQ,aAAa,OAAO,aAAaA,OAAM;AAAA,IAChD;AAED,WAAO,OAAO,QAAQ,aAAa,KAAK;AAAA,EACzC,GAAE,QAAQ;AACb;AAEA,SAAS,OAAQA,SAAQ;AAEvB,QAAM,cAAcA,QAAO,mBAAmB,CAAE,IAAG,QAAQ;AAE3D,aAAW,aAAaA,QAAO,QAAQ;AACrC,UAAM,QAAQ,OAAO,UAAU,eAAe,KAAK,aAAa,SAAS,IAAI,YAAY,SAAS,IAAIA,QAAO,OAAO,SAAS;AAE7H,IAAAA,QAAO,OAAO,SAAS,IAAI,aAAa,OAAO,aAAaA,OAAM;AAAA,EACnE;AAED,aAAW,cAAcA,QAAO;AAC9B,gBAAY,UAAU,IAAIA,QAAO,OAAO,UAAU;AAGpD,SAAOA;AACT;AAEA,IAAA,WAAwB;AC1CjB,SAAS,QACd,MACA,QACA,WAAqB,CAAC,OAAO,GACL;AACxB,MAAI,SAAS;AACX,UAAM,IAAI;AAAA,MACR;AAAA,IAAA;AAKE,QAAA,MAA8B,CAAC,GAC/B,WAAW;AAAA;AAAA,IACK;AAAA;AAAA,IACF;AAAA;AAAA,IACD,QAAQ,IAAI;AAAA;AAAA,IACN,QAAQ,IAAI;AAAA,EAAA,GAG/B,SAAS,OAAO;AAAA,IACpB,SAAS,QAAQ,CAAC,SAAS;AACzB,YAAM,UAAU,WAAW,QAAQ,CAAC,IAAI,GAAG;AAAA,QACzC,SAAS;AAAA,MAAA,CACV;AACI,aAAA,UACE,OAAO,QAAQK,QAAMN,KAAG,aAAa,OAAO,CAAC,CAAC,IADhC;IAAC,CAEvB;AAAA,EAAA;AAIC,SAAO,YAAY,QAAQ,IAAI,uBAAuB,WACxD,QAAQ,IAAI,qBAAqB,OAAO,WAGtC,OAAO,WAAW,QAAQ,IAAI,YAAY,WAC5C,QAAQ,IAAI,UAAU,OAAO,UAE3B,OAAO,gBAAgB,QAAQ,IAAI,iBAAiB,WACtD,QAAQ,IAAI,eAAe,OAAO;AAGhC,MAAA;AAEKO,aAAA,EAAC,QAAO;AAAA,WACR,GAAG;AAGN,UAAA,EAAE,QAAQ,SAAS,OAAO,IACtB,IAAI,MAAM,wEAAwE,IAEpF;AAAA,EACR;AAGA,aAAW,CAAC,KAAK,KAAK,KAAK,OAAO,QAAQ,MAAM;AAC1C,aAAS,KAAK,CAAC,WAAW,IAAI,WAAW,MAAM,CAAC,MAClD,IAAI,GAAG,IAAI;AAMf,aAAW,OAAO,QAAQ;AACpB,aAAS,KAAK,CAAC,WAAW,IAAI,WAAW,MAAM,CAAC,MAClD,IAAI,GAAG,IAAI,QAAQ,IAAI,GAAG;AAIvB,SAAA;AACT;AAEA,SAAS,WACP,KACA,SACA,SAGoB;AACpB,aAAW,UAAU,SAAS;AAC5B,UAAM,WAAWR,OAAK,KAAK,KAAK,MAAM;AAElC,QAAAC,KAAG,WAAW,QAAQ,KAAKA,KAAG,SAAS,QAAQ,EAAE,OAAO;AACnD,aAAA;AAAA,EAEX;AACM,QAAA,YAAYD,OAAK,QAAQ,GAAG;AAC9B,MAAA,cAAc,QAAQ,CAAC,SAAS,WAAW,UAAU,WAAW,SAAS,OAAO;AAC3E,WAAA,WAAW,WAAW,SAAS,OAAO;AAIjD;","x_google_ignoreList":[6,7]}
|
package/lib/run.d.ts
ADDED
@@ -0,0 +1 @@
|
|
1
|
+
export {}
|
@@ -0,0 +1 @@
|
|
1
|
+
export {}
|
@@ -0,0 +1 @@
|
|
1
|
+
export {}
|
@@ -0,0 +1,37 @@
|
|
1
|
+
export declare interface TypegenGenerateTypesWorkerData {
|
2
|
+
workDir: string
|
3
|
+
workspaceName?: string
|
4
|
+
schemaPath: string
|
5
|
+
searchPath: string | string[]
|
6
|
+
}
|
7
|
+
|
8
|
+
export declare type TypegenGenerateTypesWorkerMessage =
|
9
|
+
| {
|
10
|
+
type: 'error'
|
11
|
+
error: Error
|
12
|
+
fatal: boolean
|
13
|
+
query?: string
|
14
|
+
filename?: string
|
15
|
+
}
|
16
|
+
| {
|
17
|
+
type: 'types'
|
18
|
+
filename: string
|
19
|
+
types: {
|
20
|
+
queryName: string
|
21
|
+
query: string
|
22
|
+
type: string
|
23
|
+
unknownTypeNodesGenerated: number
|
24
|
+
typeNodesGenerated: number
|
25
|
+
}[]
|
26
|
+
}
|
27
|
+
| {
|
28
|
+
type: 'schema'
|
29
|
+
filename: string
|
30
|
+
schema: string
|
31
|
+
length: number
|
32
|
+
}
|
33
|
+
| {
|
34
|
+
type: 'complete'
|
35
|
+
}
|
36
|
+
|
37
|
+
export {}
|
@@ -1,6 +1,10 @@
|
|
1
1
|
"use strict";
|
2
|
-
var node_worker_threads = require("node:worker_threads"), codegen = require("@sanity/codegen"),
|
3
|
-
|
2
|
+
var node_worker_threads = require("node:worker_threads"), codegen = require("@sanity/codegen"), debugIt = require("debug"), groqJs = require("groq-js");
|
3
|
+
function _interopDefaultCompat(e) {
|
4
|
+
return e && typeof e == "object" && "default" in e ? e : { default: e };
|
5
|
+
}
|
6
|
+
var debugIt__default = /* @__PURE__ */ _interopDefaultCompat(debugIt);
|
7
|
+
const $info = debugIt__default.default("sanity:codegen:generate:info");
|
4
8
|
if (node_worker_threads.isMainThread || !node_worker_threads.parentPort)
|
5
9
|
throw new Error("This module must be run as a worker thread");
|
6
10
|
const opts = node_worker_threads.workerData;
|
@@ -1 +1 @@
|
|
1
|
-
{"version":3,"file":"typegenGenerate.js","sources":["../../src/workers/typegenGenerate.ts"],"sourcesContent":["import {isMainThread, parentPort, workerData as _workerData} from 'node:worker_threads'\n\nimport {\n findQueriesInPath,\n getResolver,\n readSchema,\n registerBabel,\n safeParseQuery,\n TypeGenerator,\n} from '@sanity/codegen'\nimport createDebug from 'debug'\nimport {typeEvaluate, type TypeNode} from 'groq-js'\n\nconst $info = createDebug('sanity:codegen:generate:info')\n\nexport interface TypegenGenerateTypesWorkerData {\n workDir: string\n workspaceName?: string\n schemaPath: string\n searchPath: string | string[]\n}\n\nexport type TypegenGenerateTypesWorkerMessage =\n | {\n type: 'error'\n error: Error\n fatal: boolean\n query?: string\n filename?: string\n }\n | {\n type: 'types'\n filename: string\n types: {\n queryName: string\n query: string\n type: string\n unknownTypeNodesGenerated: number\n typeNodesGenerated: number\n }[]\n }\n | {\n type: 'schema'\n filename: string\n schema: string\n length: number\n }\n | {\n type: 'complete'\n }\n\nif (isMainThread || !parentPort) {\n throw new Error('This module must be run as a worker thread')\n}\n\nconst opts = _workerData as TypegenGenerateTypesWorkerData\n\nregisterBabel()\n\nasync function main() {\n const schema = await readSchema(opts.schemaPath)\n\n const typeGenerator = new TypeGenerator(schema)\n const schemaTypes = [\n typeGenerator.generateSchemaTypes(),\n TypeGenerator.generateKnownTypes(),\n ].join('\\n')\n const resolver = getResolver()\n\n parentPort?.postMessage({\n type: 'schema',\n schema: schemaTypes,\n filename: 'schema.json',\n length: schema.length,\n } satisfies TypegenGenerateTypesWorkerMessage)\n\n const queries = findQueriesInPath({\n path: opts.searchPath,\n resolver,\n })\n\n for await (const result of queries) {\n if (result.type === 'error') {\n parentPort?.postMessage({\n type: 'error',\n error: result.error,\n fatal: false,\n filename: result.filename,\n } satisfies TypegenGenerateTypesWorkerMessage)\n continue\n }\n $info(`Processing ${result.queries.length} queries in \"${result.filename}\"...`)\n\n const fileQueryTypes: {\n queryName: string\n query: string\n type: string\n unknownTypeNodesGenerated: number\n typeNodesGenerated: number\n }[] = []\n for (const {name: queryName, result: query} of result.queries) {\n try {\n const ast = safeParseQuery(query)\n const queryTypes = typeEvaluate(ast, schema)\n\n const type = typeGenerator.generateTypeNodeTypes(`${queryName}Result`, queryTypes)\n\n const queryTypeStats = walkAndCountQueryTypeNodeStats(queryTypes)\n fileQueryTypes.push({\n queryName,\n query,\n type,\n unknownTypeNodesGenerated: queryTypeStats.unknownTypes,\n typeNodesGenerated: queryTypeStats.allTypes,\n })\n } catch (err) {\n parentPort?.postMessage({\n type: 'error',\n error: new Error(\n `Error generating types for query \"${queryName}\" in \"${result.filename}\": ${err.message}`,\n {cause: err},\n ),\n fatal: false,\n query,\n } satisfies TypegenGenerateTypesWorkerMessage)\n }\n }\n\n if (fileQueryTypes.length > 0) {\n $info(`Generated types for ${fileQueryTypes.length} queries in \"${result.filename}\"\\n`)\n parentPort?.postMessage({\n type: 'types',\n types: fileQueryTypes,\n filename: result.filename,\n } satisfies TypegenGenerateTypesWorkerMessage)\n }\n }\n\n parentPort?.postMessage({\n type: 'complete',\n } satisfies TypegenGenerateTypesWorkerMessage)\n}\n\nfunction walkAndCountQueryTypeNodeStats(typeNode: TypeNode): {\n allTypes: number\n unknownTypes: number\n} {\n switch (typeNode.type) {\n case 'unknown': {\n return {allTypes: 1, unknownTypes: 1}\n }\n case 'array': {\n const acc = walkAndCountQueryTypeNodeStats(typeNode.of)\n acc.allTypes += 1 // count the array type itself\n return acc\n }\n case 'object': {\n // if the rest is unknown, we count it as one unknown type\n if (typeNode.rest && typeNode.rest.type === 'unknown') {\n return {allTypes: 2, unknownTypes: 1} // count the object type itself as well\n }\n\n const restStats = typeNode.rest\n ? walkAndCountQueryTypeNodeStats(typeNode.rest)\n : {allTypes: 1, unknownTypes: 0} // count the object type itself\n\n return Object.values(typeNode.attributes).reduce((acc, attribute) => {\n const {allTypes, unknownTypes} = walkAndCountQueryTypeNodeStats(attribute.value)\n return {allTypes: acc.allTypes + allTypes, unknownTypes: acc.unknownTypes + unknownTypes}\n }, restStats)\n }\n case 'union': {\n return typeNode.of.reduce(\n (acc, type) => {\n const {allTypes, unknownTypes} = walkAndCountQueryTypeNodeStats(type)\n return {allTypes: acc.allTypes + allTypes, unknownTypes: acc.unknownTypes + unknownTypes}\n },\n {allTypes: 1, unknownTypes: 0}, // count the union type itself\n )\n }\n default: {\n return {allTypes: 1, unknownTypes: 0}\n }\n }\n}\n\nmain()\n"],"names":["createDebug","isMainThread","parentPort","_workerData","registerBabel","readSchema","TypeGenerator","getResolver","findQueriesInPath","safeParseQuery","typeEvaluate"],"mappings":"
|
1
|
+
{"version":3,"file":"typegenGenerate.js","sources":["../../src/workers/typegenGenerate.ts"],"sourcesContent":["import {isMainThread, parentPort, workerData as _workerData} from 'node:worker_threads'\n\nimport {\n findQueriesInPath,\n getResolver,\n readSchema,\n registerBabel,\n safeParseQuery,\n TypeGenerator,\n} from '@sanity/codegen'\nimport createDebug from 'debug'\nimport {typeEvaluate, type TypeNode} from 'groq-js'\n\nconst $info = createDebug('sanity:codegen:generate:info')\n\nexport interface TypegenGenerateTypesWorkerData {\n workDir: string\n workspaceName?: string\n schemaPath: string\n searchPath: string | string[]\n}\n\nexport type TypegenGenerateTypesWorkerMessage =\n | {\n type: 'error'\n error: Error\n fatal: boolean\n query?: string\n filename?: string\n }\n | {\n type: 'types'\n filename: string\n types: {\n queryName: string\n query: string\n type: string\n unknownTypeNodesGenerated: number\n typeNodesGenerated: number\n }[]\n }\n | {\n type: 'schema'\n filename: string\n schema: string\n length: number\n }\n | {\n type: 'complete'\n }\n\nif (isMainThread || !parentPort) {\n throw new Error('This module must be run as a worker thread')\n}\n\nconst opts = _workerData as TypegenGenerateTypesWorkerData\n\nregisterBabel()\n\nasync function main() {\n const schema = await readSchema(opts.schemaPath)\n\n const typeGenerator = new TypeGenerator(schema)\n const schemaTypes = [\n typeGenerator.generateSchemaTypes(),\n TypeGenerator.generateKnownTypes(),\n ].join('\\n')\n const resolver = getResolver()\n\n parentPort?.postMessage({\n type: 'schema',\n schema: schemaTypes,\n filename: 'schema.json',\n length: schema.length,\n } satisfies TypegenGenerateTypesWorkerMessage)\n\n const queries = findQueriesInPath({\n path: opts.searchPath,\n resolver,\n })\n\n for await (const result of queries) {\n if (result.type === 'error') {\n parentPort?.postMessage({\n type: 'error',\n error: result.error,\n fatal: false,\n filename: result.filename,\n } satisfies TypegenGenerateTypesWorkerMessage)\n continue\n }\n $info(`Processing ${result.queries.length} queries in \"${result.filename}\"...`)\n\n const fileQueryTypes: {\n queryName: string\n query: string\n type: string\n unknownTypeNodesGenerated: number\n typeNodesGenerated: number\n }[] = []\n for (const {name: queryName, result: query} of result.queries) {\n try {\n const ast = safeParseQuery(query)\n const queryTypes = typeEvaluate(ast, schema)\n\n const type = typeGenerator.generateTypeNodeTypes(`${queryName}Result`, queryTypes)\n\n const queryTypeStats = walkAndCountQueryTypeNodeStats(queryTypes)\n fileQueryTypes.push({\n queryName,\n query,\n type,\n unknownTypeNodesGenerated: queryTypeStats.unknownTypes,\n typeNodesGenerated: queryTypeStats.allTypes,\n })\n } catch (err) {\n parentPort?.postMessage({\n type: 'error',\n error: new Error(\n `Error generating types for query \"${queryName}\" in \"${result.filename}\": ${err.message}`,\n {cause: err},\n ),\n fatal: false,\n query,\n } satisfies TypegenGenerateTypesWorkerMessage)\n }\n }\n\n if (fileQueryTypes.length > 0) {\n $info(`Generated types for ${fileQueryTypes.length} queries in \"${result.filename}\"\\n`)\n parentPort?.postMessage({\n type: 'types',\n types: fileQueryTypes,\n filename: result.filename,\n } satisfies TypegenGenerateTypesWorkerMessage)\n }\n }\n\n parentPort?.postMessage({\n type: 'complete',\n } satisfies TypegenGenerateTypesWorkerMessage)\n}\n\nfunction walkAndCountQueryTypeNodeStats(typeNode: TypeNode): {\n allTypes: number\n unknownTypes: number\n} {\n switch (typeNode.type) {\n case 'unknown': {\n return {allTypes: 1, unknownTypes: 1}\n }\n case 'array': {\n const acc = walkAndCountQueryTypeNodeStats(typeNode.of)\n acc.allTypes += 1 // count the array type itself\n return acc\n }\n case 'object': {\n // if the rest is unknown, we count it as one unknown type\n if (typeNode.rest && typeNode.rest.type === 'unknown') {\n return {allTypes: 2, unknownTypes: 1} // count the object type itself as well\n }\n\n const restStats = typeNode.rest\n ? walkAndCountQueryTypeNodeStats(typeNode.rest)\n : {allTypes: 1, unknownTypes: 0} // count the object type itself\n\n return Object.values(typeNode.attributes).reduce((acc, attribute) => {\n const {allTypes, unknownTypes} = walkAndCountQueryTypeNodeStats(attribute.value)\n return {allTypes: acc.allTypes + allTypes, unknownTypes: acc.unknownTypes + unknownTypes}\n }, restStats)\n }\n case 'union': {\n return typeNode.of.reduce(\n (acc, type) => {\n const {allTypes, unknownTypes} = walkAndCountQueryTypeNodeStats(type)\n return {allTypes: acc.allTypes + allTypes, unknownTypes: acc.unknownTypes + unknownTypes}\n },\n {allTypes: 1, unknownTypes: 0}, // count the union type itself\n )\n }\n default: {\n return {allTypes: 1, unknownTypes: 0}\n }\n }\n}\n\nmain()\n"],"names":["createDebug","isMainThread","parentPort","_workerData","registerBabel","readSchema","TypeGenerator","getResolver","findQueriesInPath","safeParseQuery","typeEvaluate"],"mappings":";;;;;;AAaA,MAAM,QAAQA,iBAAAA,QAAY,8BAA8B;AAsCxD,IAAIC,oBAAAA,gBAAgB,CAACC,oBAAA;AACb,QAAA,IAAI,MAAM,4CAA4C;AAG9D,MAAM,OAAOC,oBAAAA;AAEbC,QAAAA;AAEA,eAAe,OAAO;AACd,QAAA,SAAS,MAAMC,mBAAW,KAAK,UAAU,GAEzC,gBAAgB,IAAIC,QAAA,cAAc,MAAM,GACxC,cAAc;AAAA,IAClB,cAAc,oBAAoB;AAAA,IAClCA,QAAAA,cAAc,mBAAmB;AAAA,IACjC,KAAK;AAAA,CAAI,GACL,WAAWC,QAAAA;AAEjBL,sBAAAA,YAAY,YAAY;AAAA,IACtB,MAAM;AAAA,IACN,QAAQ;AAAA,IACR,UAAU;AAAA,IACV,QAAQ,OAAO;AAAA,EAAA,CAC4B;AAE7C,QAAM,UAAUM,QAAAA,kBAAkB;AAAA,IAChC,MAAM,KAAK;AAAA,IACX;AAAA,EAAA,CACD;AAED,mBAAiB,UAAU,SAAS;AAC9B,QAAA,OAAO,SAAS,SAAS;AAC3BN,0BAAAA,YAAY,YAAY;AAAA,QACtB,MAAM;AAAA,QACN,OAAO,OAAO;AAAA,QACd,OAAO;AAAA,QACP,UAAU,OAAO;AAAA,MAAA,CAC0B;AAC7C;AAAA,IACF;AACA,UAAM,cAAc,OAAO,QAAQ,MAAM,gBAAgB,OAAO,QAAQ,MAAM;AAE9E,UAAM,iBAMA,CAAA;AACN,eAAW,EAAC,MAAM,WAAW,QAAQ,MAAA,KAAU,OAAO;AAChD,UAAA;AACF,cAAM,MAAMO,QAAe,eAAA,KAAK,GAC1B,aAAaC,oBAAa,KAAK,MAAM,GAErC,OAAO,cAAc,sBAAsB,GAAG,SAAS,UAAU,UAAU,GAE3E,iBAAiB,+BAA+B,UAAU;AAChE,uBAAe,KAAK;AAAA,UAClB;AAAA,UACA;AAAA,UACA;AAAA,UACA,2BAA2B,eAAe;AAAA,UAC1C,oBAAoB,eAAe;AAAA,QAAA,CACpC;AAAA,eACM,KAAK;AACZR,4BAAAA,YAAY,YAAY;AAAA,UACtB,MAAM;AAAA,UACN,OAAO,IAAI;AAAA,YACT,qCAAqC,SAAS,SAAS,OAAO,QAAQ,MAAM,IAAI,OAAO;AAAA,YACvF,EAAC,OAAO,IAAG;AAAA,UACb;AAAA,UACA,OAAO;AAAA,UACP;AAAA,QAAA,CAC2C;AAAA,MAC/C;AAGE,mBAAe,SAAS,MAC1B,MAAM,uBAAuB,eAAe,MAAM,gBAAgB,OAAO,QAAQ;AAAA,CAAK,GACtFA,gCAAY,YAAY;AAAA,MACtB,MAAM;AAAA,MACN,OAAO;AAAA,MACP,UAAU,OAAO;AAAA,IAC0B,CAAA;AAAA,EAEjD;AAEAA,sBAAAA,YAAY,YAAY;AAAA,IACtB,MAAM;AAAA,EAAA,CACqC;AAC/C;AAEA,SAAS,+BAA+B,UAGtC;AACA,UAAQ,SAAS,MAAM;AAAA,IACrB,KAAK;AACH,aAAO,EAAC,UAAU,GAAG,cAAc,EAAC;AAAA,IAEtC,KAAK,SAAS;AACN,YAAA,MAAM,+BAA+B,SAAS,EAAE;AACtD,aAAA,IAAI,YAAY,GACT;AAAA,IACT;AAAA,IACA,KAAK,UAAU;AAEb,UAAI,SAAS,QAAQ,SAAS,KAAK,SAAS;AAC1C,eAAO,EAAC,UAAU,GAAG,cAAc,EAAC;AAGhC,YAAA,YAAY,SAAS,OACvB,+BAA+B,SAAS,IAAI,IAC5C,EAAC,UAAU,GAAG,cAAc,EAAC;AAE1B,aAAA,OAAO,OAAO,SAAS,UAAU,EAAE,OAAO,CAAC,KAAK,cAAc;AACnE,cAAM,EAAC,UAAU,aAAA,IAAgB,+BAA+B,UAAU,KAAK;AACxE,eAAA,EAAC,UAAU,IAAI,WAAW,UAAU,cAAc,IAAI,eAAe;SAC3E,SAAS;AAAA,IACd;AAAA,IACA,KAAK;AACH,aAAO,SAAS,GAAG;AAAA,QACjB,CAAC,KAAK,SAAS;AACb,gBAAM,EAAC,UAAU,aAAY,IAAI,+BAA+B,IAAI;AAC7D,iBAAA,EAAC,UAAU,IAAI,WAAW,UAAU,cAAc,IAAI,eAAe;QAC9E;AAAA,QACA,EAAC,UAAU,GAAG,cAAc,EAAC;AAAA;AAAA,MAAA;AAAA,IAGjC;AACE,aAAO,EAAC,UAAU,GAAG,cAAc,EAAC;AAAA,EAExC;AACF;AAEA,KAAK;"}
|