@sanity/cli 3.87.1 → 3.88.1-typegen-experimental.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (39) hide show
  1. package/lib/_chunks-cjs/cli.js +58780 -56791
  2. package/lib/_chunks-cjs/cli.js.map +1 -1
  3. package/lib/_chunks-cjs/generateAction.js +113 -111
  4. package/lib/_chunks-cjs/generateAction.js.map +1 -1
  5. package/lib/_chunks-cjs/loadEnv.js +3 -3
  6. package/lib/_chunks-cjs/loadEnv.js.map +1 -1
  7. package/lib/_chunks-cjs/workerChannel.js +84 -0
  8. package/lib/_chunks-cjs/workerChannel.js.map +1 -0
  9. package/lib/workers/typegenGenerate.d.ts +144 -33
  10. package/lib/workers/typegenGenerate.js +83 -112
  11. package/lib/workers/typegenGenerate.js.map +1 -1
  12. package/package.json +20 -22
  13. package/src/actions/init-project/templates/appQuickstart.ts +2 -2
  14. package/src/actions/init-project/templates/appSanityUi.ts +2 -2
  15. package/src/actions/typegen/generate.telemetry.ts +9 -3
  16. package/src/actions/typegen/generateAction.ts +159 -152
  17. package/src/cli.ts +0 -0
  18. package/src/commands/blueprints/addBlueprintsCommand.ts +52 -56
  19. package/src/commands/blueprints/blueprintsGroup.ts +0 -1
  20. package/src/commands/blueprints/configBlueprintsCommand.ts +50 -74
  21. package/src/commands/blueprints/deployBlueprintsCommand.ts +41 -133
  22. package/src/commands/blueprints/destroyBlueprintsCommand.ts +76 -0
  23. package/src/commands/blueprints/infoBlueprintsCommand.ts +29 -51
  24. package/src/commands/blueprints/initBlueprintsCommand.ts +55 -73
  25. package/src/commands/blueprints/logsBlueprintsCommand.ts +43 -81
  26. package/src/commands/blueprints/planBlueprintsCommand.ts +26 -36
  27. package/src/commands/blueprints/stacksBlueprintsCommand.ts +43 -51
  28. package/src/commands/functions/devFunctionsCommand.ts +1 -2
  29. package/src/commands/functions/envFunctionsCommand.ts +55 -46
  30. package/src/commands/functions/functionsGroup.ts +1 -2
  31. package/src/commands/functions/logsFunctionsCommand.ts +101 -58
  32. package/src/commands/functions/testFunctionsCommand.ts +56 -36
  33. package/src/commands/index.ts +6 -4
  34. package/src/commands/projects/listProjectsCommand.ts +0 -0
  35. package/src/commands/projects/projectsGroup.ts +0 -0
  36. package/src/util/__tests__/workerChannel.test.ts +222 -0
  37. package/src/util/workerChannel.ts +312 -0
  38. package/src/workers/typegenGenerate.ts +181 -183
  39. package/templates/app-sanity-ui/src/ExampleComponent.tsx +1 -1
@@ -1,10 +1,33 @@
1
1
  "use strict";
2
- var fs = require("node:fs/promises"), path = require("node:path"), node_worker_threads = require("node:worker_threads"), codegen = require("@sanity/codegen"), prettier = require("prettier"), cliWorker = require("./cliWorker.js"), telemetry = require("@sanity/telemetry");
2
+ var __create = Object.create;
3
+ var __defProp = Object.defineProperty;
4
+ var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
5
+ var __getOwnPropNames = Object.getOwnPropertyNames;
6
+ var __getProtoOf = Object.getPrototypeOf, __hasOwnProp = Object.prototype.hasOwnProperty;
7
+ var __copyProps = (to, from, except, desc) => {
8
+ if (from && typeof from == "object" || typeof from == "function")
9
+ for (let key of __getOwnPropNames(from))
10
+ !__hasOwnProp.call(to, key) && key !== except && __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
11
+ return to;
12
+ };
13
+ var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
14
+ // If the importer is in node compatibility mode or this is not an ESM
15
+ // file that has been converted to a CommonJS file using a Babel-
16
+ // compatible transform (i.e. "__esModule" has not been set), then set
17
+ // "default" to the CommonJS "module.exports" for node compatibility.
18
+ isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: !0 }) : target,
19
+ mod
20
+ ));
21
+ var fs = require("node:fs/promises"), path = require("node:path"), process = require("node:process"), node_worker_threads = require("node:worker_threads"), codegen = require("@sanity/codegen"), cliWorker = require("./cliWorker.js"), workerChannel = require("./workerChannel.js"), telemetry = require("@sanity/telemetry");
22
+ function _interopDefaultCompat(e) {
23
+ return e && typeof e == "object" && "default" in e ? e : { default: e };
24
+ }
25
+ var process__default = /* @__PURE__ */ _interopDefaultCompat(process);
3
26
  const TypesGeneratedTrace = telemetry.defineTrace({
4
27
  name: "Types Generated",
5
- version: 0,
28
+ version: 1,
6
29
  description: "Trace emitted when generating TypeScript types for queries"
7
- }), generatedFileWarning = `/**
30
+ }), DEFAULT_CONFIG_PATH = "sanity-typegen.json", GENERATED_FILE_WARNING = `/**
8
31
  * ---------------------------------------------------------------------------------
9
32
  * This file has been generated by Sanity TypeGen.
10
33
  * Command: \`sanity typegen generate\`
@@ -18,122 +41,101 @@ const TypesGeneratedTrace = telemetry.defineTrace({
18
41
  * ---------------------------------------------------------------------------------
19
42
  */
20
43
 
21
- `;
44
+ `, percentageFormatter = new Intl.NumberFormat("en-US", {
45
+ style: "percent",
46
+ minimumFractionDigits: 1,
47
+ maximumFractionDigits: 1
48
+ }), percent = (value) => percentageFormatter.format(Math.min(value, 1)), count = (amount, plural, singular = plural.slice(0, Math.max(0, plural.length - 1))) => `${amount.toLocaleString("en-US")} ${amount === 1 ? singular : plural}`;
22
49
  async function typegenGenerateAction(args, context) {
23
50
  const flags = args.extOptions, { output, workDir, telemetry: telemetry2 } = context, trace = telemetry2.trace(TypesGeneratedTrace);
24
51
  trace.start();
25
- const codegenConfig = await codegen.readConfig(flags["config-path"] || "sanity-typegen.json");
52
+ const spinner = output.spinner("Generating types\u2026");
53
+ spinner.start();
54
+ let codegenConfig;
55
+ const configPath = flags["config-path"] ?? DEFAULT_CONFIG_PATH;
26
56
  try {
27
- if (!(await fs.stat(codegenConfig.schema)).isFile())
28
- throw new Error(`Schema path is not a file: ${codegenConfig.schema}`);
29
- } catch (err) {
30
- if (err.code === "ENOENT") {
31
- const hint = codegenConfig.schema === "./schema.json" ? ' - did you run "sanity schema extract"?' : "";
32
- throw new Error(`Schema file not found: ${codegenConfig.schema}${hint}`);
33
- }
34
- throw err;
57
+ codegenConfig = await codegen.readConfig(configPath), spinner.info(`Using typegen configuration found at "${configPath}"`);
58
+ } catch (error) {
59
+ if (error?.code !== "ENOENT") throw error;
60
+ codegenConfig = codegen.DEFAULT_CONFIG, configPath !== DEFAULT_CONFIG_PATH && spinner.warn(
61
+ `Configuration file not found at specified path "${configPath}". Falling back to default settings.`
62
+ );
35
63
  }
36
- const outputPath = path.join(process.cwd(), codegenConfig.generates), outputDir = path.dirname(outputPath);
37
- await fs.mkdir(outputDir, { recursive: !0 });
38
- const workerPath = await cliWorker.getCliWorkerPath("typegenGenerate"), spinner = output.spinner({}).start("Generating types"), worker = new node_worker_threads.Worker(workerPath, {
39
- workerData: {
40
- workDir,
41
- schemaPath: codegenConfig.schema,
42
- searchPath: codegenConfig.path,
43
- overloadClientMethods: codegenConfig.overloadClientMethods
44
- },
45
- // eslint-disable-next-line no-process-env
46
- env: process.env
47
- }), typeFile = await fs.open(
48
- outputPath,
49
- // eslint-disable-next-line no-bitwise
50
- fs.constants.O_TRUNC | fs.constants.O_CREAT | fs.constants.O_WRONLY
51
- );
52
- typeFile.write(generatedFileWarning);
53
- const stats = {
54
- queryFilesCount: 0,
55
- errors: 0,
56
- queriesCount: 0,
57
- schemaTypesCount: 0,
58
- unknownTypeNodesGenerated: 0,
59
- typeNodesGenerated: 0,
60
- emptyUnionTypeNodesGenerated: 0,
61
- size: 0
62
- };
63
- await new Promise((resolve, reject) => {
64
- worker.addListener("message", (msg) => {
65
- if (msg.type === "error") {
66
- if (msg.fatal) {
67
- trace.error(msg.error), reject(msg.error);
68
- return;
69
- }
70
- const errorMessage = msg.filename ? `${msg.error.message} in "${msg.filename}"` : msg.error.message;
71
- spinner.fail(errorMessage), stats.errors++;
72
- return;
73
- }
74
- if (msg.type === "complete") {
75
- resolve();
76
- return;
77
- }
78
- if (msg.type === "typemap") {
79
- let typeMapStr = `// Query TypeMap
80
- `;
81
- typeMapStr += msg.typeMap, typeFile.write(typeMapStr), stats.size += Buffer.byteLength(typeMapStr);
82
- return;
83
- }
84
- let fileTypeString = `// Source: ${msg.filename}
85
- `;
86
- if (msg.type === "schema") {
87
- stats.schemaTypesCount += msg.length, fileTypeString += msg.schema, typeFile.write(fileTypeString);
88
- return;
89
- }
90
- if (msg.type === "types") {
91
- stats.queryFilesCount++;
92
- for (const {
93
- queryName,
94
- query,
95
- type,
96
- typeNodesGenerated,
97
- unknownTypeNodesGenerated,
98
- emptyUnionTypeNodesGenerated
99
- } of msg.types)
100
- fileTypeString += `// Variable: ${queryName}
101
- `, fileTypeString += `// Query: ${query.replace(/(\r\n|\n|\r)/gm, "").trim()}
102
- `, fileTypeString += type, stats.queriesCount++, stats.typeNodesGenerated += typeNodesGenerated, stats.unknownTypeNodesGenerated += unknownTypeNodesGenerated, stats.emptyUnionTypeNodesGenerated += emptyUnionTypeNodesGenerated;
103
- typeFile.write(`${fileTypeString}
104
- `), stats.size += Buffer.byteLength(fileTypeString);
64
+ const {
65
+ schemas,
66
+ augmentGroqModule,
67
+ formatGeneratedCode,
68
+ generates,
69
+ overloadClientMethods,
70
+ path: searchPath
71
+ } = codegenConfig;
72
+ spinner.start(`Loading schema${schemas.length === 1 ? "" : "s"}\u2026`);
73
+ const outputPath = path.join(process__default.default.cwd(), generates);
74
+ await fs.mkdir(path.dirname(outputPath), { recursive: !0 });
75
+ const workerData = {
76
+ workDir,
77
+ schemas,
78
+ searchPath,
79
+ overloadClientMethods,
80
+ augmentGroqModule
81
+ }, worker = new node_worker_threads.Worker(await cliWorker.getCliWorkerPath("typegenGenerate"), {
82
+ workerData,
83
+ env: process__default.default.env
84
+ }), receiver = workerChannel.createReceiver(worker);
85
+ let fileHandle, schemaStats, queryStats;
86
+ try {
87
+ await receiver.event.loadedSchemas(), spinner.succeed(
88
+ schemas.length === 1 ? `Loaded schema from ${schemas[0].schemaPath}` : `Loaded ${count(schemas.length, "schemas")}`
89
+ ), spinner.start("Generating schema types\u2026"), fileHandle = await fs.open(outputPath, "w"), await fileHandle.write(GENERATED_FILE_WARNING);
90
+ const schemaResult = await receiver.event.generatedSchemaDeclarations();
91
+ schemaStats = schemaResult.schemaStats, await fileHandle.write(schemaResult.code);
92
+ const schemaTypesCount = count(schemaStats.schemaTypesCount, "schema types"), schemaCount = count(schemaStats.schemaCount, "schemas");
93
+ spinner.succeed(
94
+ `Generated ${schemaTypesCount}${schemas.length > 1 ? ` from ${schemaCount}` : ""}`
95
+ ), spinner.start("Generating query types\u2026");
96
+ const expectedFiles = (await receiver.event.fileCount()).fileCount, expectedFileCount = count(expectedFiles, "files");
97
+ for await (const {
98
+ progress,
99
+ ...queryResult
100
+ } of receiver.stream.generatedQueryResultDeclaration()) {
101
+ const queryCount = count(progress.queriesCount, "queries", "query"), projectionCount = count(progress.projectionsCount, "projections");
102
+ spinner.text = `Generating query types\u2026 (${percent(progress.filesCount / expectedFiles)})
103
+ \u2514\u2500 Processed ${progress.filesCount} of ${expectedFileCount}. Found ${queryCount}, ${projectionCount}.`, queryResult.type === "error" && spinner.fail(queryResult.message), queryResult.type === "declaration" && await fileHandle.write(queryResult.code);
104
+ }
105
+ const result = await receiver.event.generationComplete();
106
+ queryStats = result.queryStats, await fileHandle.write(result.augmentedQueryResultDeclarations.code), await fileHandle.close(), fileHandle = null;
107
+ const queryTypesCount = count(queryStats.queriesCount, "query types"), projectionTypesCount = count(queryStats.projectionsCount, "projection types"), scannedFilesCount = count(queryStats.totalScannedFilesCount, "scanned files");
108
+ if (spinner.succeed(
109
+ `Generated ${queryTypesCount} and ${projectionTypesCount} from ${scannedFilesCount}`
110
+ ), formatGeneratedCode) {
111
+ spinner.start("Formatting generated types with prettier\u2026");
112
+ try {
113
+ const prettier = await import("prettier"), prettierConfig = await prettier.resolveConfig(outputPath);
114
+ fileHandle = await fs.open(outputPath, fs.constants.O_RDWR);
115
+ const code = await fileHandle.readFile({ encoding: "utf-8" }), formattedCode = await prettier.format(code, {
116
+ ...prettierConfig,
117
+ parser: "typescript"
118
+ });
119
+ await fileHandle.truncate(), await fileHandle.write(formattedCode, 0), await fileHandle.close(), fileHandle = null, spinner.succeed("Formatted generated types with prettier");
120
+ } catch (err) {
121
+ spinner.warn(`Failed to format generated types with prettier: ${err.message}`);
105
122
  }
106
- }), worker.addListener("error", reject);
107
- }), await typeFile.close();
108
- const prettierConfig = codegenConfig.formatGeneratedCode ? await prettier.resolveConfig(outputPath).catch((err) => (output.warn(`Failed to load prettier config: ${err.message}`), null)) : null;
109
- if (prettierConfig) {
110
- const formatFile = await fs.open(outputPath, fs.constants.O_RDWR);
111
- try {
112
- const code = await formatFile.readFile(), formattedCode = await prettier.format(code.toString(), {
113
- ...prettierConfig,
114
- parser: "typescript"
115
- });
116
- await formatFile.truncate(), await formatFile.write(formattedCode, 0), spinner.info("Formatted generated types with Prettier");
117
- } catch (err) {
118
- output.warn(`Failed to format generated types with Prettier: ${err.message}`);
119
- } finally {
120
- await formatFile.close();
121
123
  }
124
+ const outputStat = await fs.stat(outputPath);
125
+ trace.log({
126
+ outputSize: outputStat.size,
127
+ ...schemaStats,
128
+ ...queryStats,
129
+ configOverloadClientMethods: overloadClientMethods,
130
+ configAugmentGroqModule: augmentGroqModule
131
+ }), queryStats.errorCount > 0 ? spinner.warn(
132
+ `Encountered ${count(queryStats.errorCount, "errors")} in ${count(queryStats.filesWithErrors, "files")} while generating types to ${generates}`
133
+ ) : spinner.succeed(`Successfully generated types to ${generates}`);
134
+ } catch (err) {
135
+ throw trace.error(err), err;
136
+ } finally {
137
+ await fileHandle?.close(), await receiver.dispose(), trace.complete();
122
138
  }
123
- trace.log({
124
- outputSize: stats.size,
125
- queriesCount: stats.queriesCount,
126
- schemaTypesCount: stats.schemaTypesCount,
127
- queryFilesCount: stats.queryFilesCount,
128
- filesWithErrors: stats.errors,
129
- typeNodesGenerated: stats.typeNodesGenerated,
130
- unknownTypeNodesGenerated: stats.unknownTypeNodesGenerated,
131
- unknownTypeNodesRatio: stats.typeNodesGenerated > 0 ? stats.unknownTypeNodesGenerated / stats.typeNodesGenerated : 0,
132
- emptyUnionTypeNodesGenerated: stats.emptyUnionTypeNodesGenerated,
133
- configOverloadClientMethods: codegenConfig.overloadClientMethods
134
- }), trace.complete(), stats.errors > 0 && spinner.warn(`Encountered errors in ${stats.errors} files while generating types`), spinner.succeed(
135
- `Generated TypeScript types for ${stats.schemaTypesCount} schema types and ${stats.queriesCount} GROQ queries in ${stats.queryFilesCount} files into: ${codegenConfig.generates}`
136
- );
137
139
  }
138
140
  exports.default = typegenGenerateAction;
139
141
  //# sourceMappingURL=generateAction.js.map
@@ -1 +1 @@
1
- {"version":3,"file":"generateAction.js","sources":["../../src/actions/typegen/generate.telemetry.ts","../../src/actions/typegen/generateAction.ts"],"sourcesContent":["import {defineTrace} from '@sanity/telemetry'\n\ninterface TypesGeneratedTraceAttrubutes {\n outputSize: number\n queriesCount: number\n schemaTypesCount: number\n queryFilesCount: number\n filesWithErrors: number\n typeNodesGenerated: number\n unknownTypeNodesGenerated: number\n unknownTypeNodesRatio: number\n emptyUnionTypeNodesGenerated: number\n configOverloadClientMethods: boolean\n}\n\nexport const TypesGeneratedTrace = defineTrace<TypesGeneratedTraceAttrubutes>({\n name: 'Types Generated',\n version: 0,\n description: 'Trace emitted when generating TypeScript types for queries',\n})\n","import {constants, mkdir, open, stat} from 'node:fs/promises'\nimport {dirname, join} from 'node:path'\nimport {Worker} from 'node:worker_threads'\n\nimport {readConfig} from '@sanity/codegen'\nimport {format as prettierFormat, resolveConfig as resolvePrettierConfig} from 'prettier'\n\nimport {type CliCommandArguments, type CliCommandContext} from '../../types'\nimport {getCliWorkerPath} from '../../util/cliWorker'\nimport {\n type TypegenGenerateTypesWorkerData,\n type TypegenGenerateTypesWorkerMessage,\n} from '../../workers/typegenGenerate'\nimport {TypesGeneratedTrace} from './generate.telemetry'\n\nexport interface TypegenGenerateTypesCommandFlags {\n 'config-path'?: string\n}\n\nconst generatedFileWarning = `/**\n * ---------------------------------------------------------------------------------\n * This file has been generated by Sanity TypeGen.\n * Command: \\`sanity typegen generate\\`\n *\n * Any modifications made directly to this file will be overwritten the next time\n * the TypeScript definitions are generated. Please make changes to the Sanity\n * schema definitions and/or GROQ queries if you need to update these types.\n *\n * For more information on how to use Sanity TypeGen, visit the official documentation:\n * https://www.sanity.io/docs/sanity-typegen\n * ---------------------------------------------------------------------------------\n */\\n\\n`\n\nexport default async function typegenGenerateAction(\n args: CliCommandArguments<TypegenGenerateTypesCommandFlags>,\n context: CliCommandContext,\n): Promise<void> {\n const flags = args.extOptions\n const {output, workDir, telemetry} = context\n\n const trace = telemetry.trace(TypesGeneratedTrace)\n trace.start()\n\n const codegenConfig = await readConfig(flags['config-path'] || 'sanity-typegen.json')\n\n try {\n const schemaStats = await stat(codegenConfig.schema)\n if (!schemaStats.isFile()) {\n throw new Error(`Schema path is not a file: ${codegenConfig.schema}`)\n }\n } catch (err) {\n if (err.code === 'ENOENT') {\n // If the user has not provided a specific schema path (eg we're using the default), give some help\n const hint =\n codegenConfig.schema === './schema.json' ? ` - did you run \"sanity schema extract\"?` : ''\n throw new Error(`Schema file not found: ${codegenConfig.schema}${hint}`)\n }\n throw err\n }\n\n const outputPath = join(process.cwd(), codegenConfig.generates)\n const outputDir = dirname(outputPath)\n await mkdir(outputDir, {recursive: true})\n const workerPath = await getCliWorkerPath('typegenGenerate')\n\n const spinner = output.spinner({}).start('Generating types')\n\n const worker = new Worker(workerPath, {\n workerData: {\n workDir,\n schemaPath: codegenConfig.schema,\n searchPath: codegenConfig.path,\n overloadClientMethods: codegenConfig.overloadClientMethods,\n } satisfies TypegenGenerateTypesWorkerData,\n // eslint-disable-next-line no-process-env\n env: process.env,\n })\n\n const typeFile = await open(\n outputPath,\n // eslint-disable-next-line no-bitwise\n constants.O_TRUNC | constants.O_CREAT | constants.O_WRONLY,\n )\n\n typeFile.write(generatedFileWarning)\n\n const stats = {\n queryFilesCount: 0,\n errors: 0,\n queriesCount: 0,\n schemaTypesCount: 0,\n unknownTypeNodesGenerated: 0,\n typeNodesGenerated: 0,\n emptyUnionTypeNodesGenerated: 0,\n size: 0,\n }\n\n await new Promise<void>((resolve, reject) => {\n worker.addListener('message', (msg: TypegenGenerateTypesWorkerMessage) => {\n if (msg.type === 'error') {\n if (msg.fatal) {\n trace.error(msg.error)\n reject(msg.error)\n return\n }\n const errorMessage = msg.filename\n ? `${msg.error.message} in \"${msg.filename}\"`\n : msg.error.message\n spinner.fail(errorMessage)\n stats.errors++\n return\n }\n if (msg.type === 'complete') {\n resolve()\n return\n }\n\n if (msg.type === 'typemap') {\n let typeMapStr = `// Query TypeMap\\n`\n typeMapStr += msg.typeMap\n typeFile.write(typeMapStr)\n stats.size += Buffer.byteLength(typeMapStr)\n return\n }\n\n let fileTypeString = `// Source: ${msg.filename}\\n`\n\n if (msg.type === 'schema') {\n stats.schemaTypesCount += msg.length\n fileTypeString += msg.schema\n typeFile.write(fileTypeString)\n return\n }\n\n if (msg.type === 'types') {\n stats.queryFilesCount++\n for (const {\n queryName,\n query,\n type,\n typeNodesGenerated,\n unknownTypeNodesGenerated,\n emptyUnionTypeNodesGenerated,\n } of msg.types) {\n fileTypeString += `// Variable: ${queryName}\\n`\n fileTypeString += `// Query: ${query.replace(/(\\r\\n|\\n|\\r)/gm, '').trim()}\\n`\n fileTypeString += type\n stats.queriesCount++\n stats.typeNodesGenerated += typeNodesGenerated\n stats.unknownTypeNodesGenerated += unknownTypeNodesGenerated\n stats.emptyUnionTypeNodesGenerated += emptyUnionTypeNodesGenerated\n }\n typeFile.write(`${fileTypeString}\\n`)\n stats.size += Buffer.byteLength(fileTypeString)\n }\n })\n worker.addListener('error', reject)\n })\n\n await typeFile.close()\n\n const prettierConfig = codegenConfig.formatGeneratedCode\n ? await resolvePrettierConfig(outputPath).catch((err) => {\n output.warn(`Failed to load prettier config: ${err.message}`)\n return null\n })\n : null\n\n if (prettierConfig) {\n const formatFile = await open(outputPath, constants.O_RDWR)\n try {\n const code = await formatFile.readFile()\n const formattedCode = await prettierFormat(code.toString(), {\n ...prettierConfig,\n parser: 'typescript' as const,\n })\n await formatFile.truncate()\n await formatFile.write(formattedCode, 0)\n\n spinner.info('Formatted generated types with Prettier')\n } catch (err) {\n output.warn(`Failed to format generated types with Prettier: ${err.message}`)\n } finally {\n await formatFile.close()\n }\n }\n\n trace.log({\n outputSize: stats.size,\n queriesCount: stats.queriesCount,\n schemaTypesCount: stats.schemaTypesCount,\n queryFilesCount: stats.queryFilesCount,\n filesWithErrors: stats.errors,\n typeNodesGenerated: stats.typeNodesGenerated,\n unknownTypeNodesGenerated: stats.unknownTypeNodesGenerated,\n unknownTypeNodesRatio:\n stats.typeNodesGenerated > 0 ? stats.unknownTypeNodesGenerated / stats.typeNodesGenerated : 0,\n emptyUnionTypeNodesGenerated: stats.emptyUnionTypeNodesGenerated,\n configOverloadClientMethods: codegenConfig.overloadClientMethods,\n })\n\n trace.complete()\n if (stats.errors > 0) {\n spinner.warn(`Encountered errors in ${stats.errors} files while generating types`)\n }\n\n spinner.succeed(\n `Generated TypeScript types for ${stats.schemaTypesCount} schema types and ${stats.queriesCount} GROQ queries in ${stats.queryFilesCount} files into: ${codegenConfig.generates}`,\n )\n}\n"],"names":["defineTrace","telemetry","readConfig","stat","join","dirname","mkdir","getCliWorkerPath","Worker","open","constants","resolvePrettierConfig","prettierFormat"],"mappings":";;AAeO,MAAM,sBAAsBA,UAAAA,YAA2C;AAAA,EAC5E,MAAM;AAAA,EACN,SAAS;AAAA,EACT,aAAa;AACf,CAAC,GCAK,uBAAuB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAcC,eAAA,sBAC5B,MACA,SACe;AACf,QAAM,QAAQ,KAAK,YACb,EAAC,QAAQ,SAAS,WAAAC,WAAS,IAAI,SAE/B,QAAQA,WAAU,MAAM,mBAAmB;AACjD,QAAM,MAAM;AAEZ,QAAM,gBAAgB,MAAMC,QAAA,WAAW,MAAM,aAAa,KAAK,qBAAqB;AAEhF,MAAA;AAEF,QAAI,EADgB,MAAMC,GAAA,KAAK,cAAc,MAAM,GAClC,OAAO;AACtB,YAAM,IAAI,MAAM,8BAA8B,cAAc,MAAM,EAAE;AAAA,WAE/D,KAAK;AACR,QAAA,IAAI,SAAS,UAAU;AAEzB,YAAM,OACJ,cAAc,WAAW,kBAAkB,4CAA4C;AACzF,YAAM,IAAI,MAAM,0BAA0B,cAAc,MAAM,GAAG,IAAI,EAAE;AAAA,IAAA;AAEnE,UAAA;AAAA,EAAA;AAGF,QAAA,aAAaC,KAAAA,KAAK,QAAQ,IAAI,GAAG,cAAc,SAAS,GACxD,YAAYC,KAAA,QAAQ,UAAU;AACpC,QAAMC,GAAM,MAAA,WAAW,EAAC,WAAW,IAAK;AACxC,QAAM,aAAa,MAAMC,UAAA,iBAAiB,iBAAiB,GAErD,UAAU,OAAO,QAAQ,CAAA,CAAE,EAAE,MAAM,kBAAkB,GAErD,SAAS,IAAIC,2BAAO,YAAY;AAAA,IACpC,YAAY;AAAA,MACV;AAAA,MACA,YAAY,cAAc;AAAA,MAC1B,YAAY,cAAc;AAAA,MAC1B,uBAAuB,cAAc;AAAA,IACvC;AAAA;AAAA,IAEA,KAAK,QAAQ;AAAA,EAAA,CACd,GAEK,WAAW,MAAMC,GAAA;AAAA,IACrB;AAAA;AAAA,IAEAC,GAAAA,UAAU,UAAUA,aAAU,UAAUA,GAAAA,UAAU;AAAA,EACpD;AAEA,WAAS,MAAM,oBAAoB;AAEnC,QAAM,QAAQ;AAAA,IACZ,iBAAiB;AAAA,IACjB,QAAQ;AAAA,IACR,cAAc;AAAA,IACd,kBAAkB;AAAA,IAClB,2BAA2B;AAAA,IAC3B,oBAAoB;AAAA,IACpB,8BAA8B;AAAA,IAC9B,MAAM;AAAA,EACR;AAEA,QAAM,IAAI,QAAc,CAAC,SAAS,WAAW;AACpC,WAAA,YAAY,WAAW,CAAC,QAA2C;AACpE,UAAA,IAAI,SAAS,SAAS;AACxB,YAAI,IAAI,OAAO;AACb,gBAAM,MAAM,IAAI,KAAK,GACrB,OAAO,IAAI,KAAK;AAChB;AAAA,QAAA;AAEF,cAAM,eAAe,IAAI,WACrB,GAAG,IAAI,MAAM,OAAO,QAAQ,IAAI,QAAQ,MACxC,IAAI,MAAM;AACN,gBAAA,KAAK,YAAY,GACzB,MAAM;AACN;AAAA,MAAA;AAEE,UAAA,IAAI,SAAS,YAAY;AACnB,gBAAA;AACR;AAAA,MAAA;AAGE,UAAA,IAAI,SAAS,WAAW;AAC1B,YAAI,aAAa;AAAA;AACH,sBAAA,IAAI,SAClB,SAAS,MAAM,UAAU,GACzB,MAAM,QAAQ,OAAO,WAAW,UAAU;AAC1C;AAAA,MAAA;AAGE,UAAA,iBAAiB,cAAc,IAAI,QAAQ;AAAA;AAE3C,UAAA,IAAI,SAAS,UAAU;AACnB,cAAA,oBAAoB,IAAI,QAC9B,kBAAkB,IAAI,QACtB,SAAS,MAAM,cAAc;AAC7B;AAAA,MAAA;AAGE,UAAA,IAAI,SAAS,SAAS;AAClB,cAAA;AACK,mBAAA;AAAA,UACT;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,aACG,IAAI;AACP,4BAAkB,gBAAgB,SAAS;AAAA,GAC3C,kBAAkB,aAAa,MAAM,QAAQ,kBAAkB,EAAE,EAAE,KAAM,CAAA;AAAA,GACzE,kBAAkB,MAClB,MAAM,gBACN,MAAM,sBAAsB,oBAC5B,MAAM,6BAA6B,2BACnC,MAAM,gCAAgC;AAE/B,iBAAA,MAAM,GAAG,cAAc;AAAA,CAAI,GACpC,MAAM,QAAQ,OAAO,WAAW,cAAc;AAAA,MAAA;AAAA,IAEjD,CAAA,GACD,OAAO,YAAY,SAAS,MAAM;AAAA,EAAA,CACnC,GAED,MAAM,SAAS,MAAM;AAErB,QAAM,iBAAiB,cAAc,sBACjC,MAAMC,SAAsB,cAAA,UAAU,EAAE,MAAM,CAAC,SAC7C,OAAO,KAAK,mCAAmC,IAAI,OAAO,EAAE,GACrD,KACR,IACD;AAEJ,MAAI,gBAAgB;AAClB,UAAM,aAAa,MAAMF,GAAAA,KAAK,YAAYC,GAAAA,UAAU,MAAM;AACtD,QAAA;AACI,YAAA,OAAO,MAAM,WAAW,SAAS,GACjC,gBAAgB,MAAME,SAAA,OAAe,KAAK,YAAY;AAAA,QAC1D,GAAG;AAAA,QACH,QAAQ;AAAA,MAAA,CACT;AACK,YAAA,WAAW,YACjB,MAAM,WAAW,MAAM,eAAe,CAAC,GAEvC,QAAQ,KAAK,yCAAyC;AAAA,aAC/C,KAAK;AACZ,aAAO,KAAK,mDAAmD,IAAI,OAAO,EAAE;AAAA,IAAA,UAC5E;AACA,YAAM,WAAW,MAAM;AAAA,IAAA;AAAA,EACzB;AAGF,QAAM,IAAI;AAAA,IACR,YAAY,MAAM;AAAA,IAClB,cAAc,MAAM;AAAA,IACpB,kBAAkB,MAAM;AAAA,IACxB,iBAAiB,MAAM;AAAA,IACvB,iBAAiB,MAAM;AAAA,IACvB,oBAAoB,MAAM;AAAA,IAC1B,2BAA2B,MAAM;AAAA,IACjC,uBACE,MAAM,qBAAqB,IAAI,MAAM,4BAA4B,MAAM,qBAAqB;AAAA,IAC9F,8BAA8B,MAAM;AAAA,IACpC,6BAA6B,cAAc;AAAA,EAAA,CAC5C,GAED,MAAM,SAAA,GACF,MAAM,SAAS,KACjB,QAAQ,KAAK,yBAAyB,MAAM,MAAM,+BAA+B,GAGnF,QAAQ;AAAA,IACN,kCAAkC,MAAM,gBAAgB,qBAAqB,MAAM,YAAY,oBAAoB,MAAM,eAAe,gBAAgB,cAAc,SAAS;AAAA,EACjL;AACF;;"}
1
+ {"version":3,"file":"generateAction.js","sources":["../../src/actions/typegen/generate.telemetry.ts","../../src/actions/typegen/generateAction.ts"],"sourcesContent":["import {defineTrace} from '@sanity/telemetry'\n\ninterface TypesGeneratedTraceAttributes {\n outputSize: number\n queriesCount: number\n projectionsCount: number\n schemaTypesCount: number\n schemaCount: number\n totalScannedFilesCount: number\n queryFilesCount: number\n projectionFilesCount: number\n filesWithErrors: number\n errorCount: number\n typeNodesGenerated: number\n unknownTypeNodesGenerated: number\n unknownTypeNodesRatio: number\n emptyUnionTypeNodesGenerated: number\n configOverloadClientMethods: boolean\n configAugmentGroqModule: boolean\n}\n\nexport const TypesGeneratedTrace = defineTrace<TypesGeneratedTraceAttributes>({\n name: 'Types Generated',\n version: 1,\n description: 'Trace emitted when generating TypeScript types for queries',\n})\n","/* eslint-disable max-statements */\nimport {constants, mkdir, open, stat} from 'node:fs/promises'\nimport {dirname, join} from 'node:path'\nimport process from 'node:process'\nimport {Worker} from 'node:worker_threads'\n\nimport {DEFAULT_CONFIG, readConfig} from '@sanity/codegen'\n\nimport {type CliCommandArguments, type CliCommandContext} from '../../types'\nimport {getCliWorkerPath} from '../../util/cliWorker'\nimport {createReceiver} from '../../util/workerChannel'\nimport {\n type TypegenGenerateTypesWorkerData,\n type TypegenWorkerChannel,\n} from '../../workers/typegenGenerate'\nimport {TypesGeneratedTrace} from './generate.telemetry'\n\nexport interface TypegenGenerateTypesCommandFlags {\n 'config-path'?: string\n}\n\nconst DEFAULT_CONFIG_PATH = 'sanity-typegen.json'\nconst GENERATED_FILE_WARNING = `/**\n * ---------------------------------------------------------------------------------\n * This file has been generated by Sanity TypeGen.\n * Command: \\`sanity typegen generate\\`\n *\n * Any modifications made directly to this file will be overwritten the next time\n * the TypeScript definitions are generated. Please make changes to the Sanity\n * schema definitions and/or GROQ queries if you need to update these types.\n *\n * For more information on how to use Sanity TypeGen, visit the official documentation:\n * https://www.sanity.io/docs/sanity-typegen\n * ---------------------------------------------------------------------------------\n */\\n\\n`\n\nconst percentageFormatter = new Intl.NumberFormat('en-US', {\n style: 'percent',\n minimumFractionDigits: 1,\n maximumFractionDigits: 1,\n})\n\nconst percent = (value: number): string => percentageFormatter.format(Math.min(value, 1))\n\nconst count = (\n amount: number,\n plural: string,\n singular: string = plural.slice(0, Math.max(0, plural.length - 1)),\n): string => `${amount.toLocaleString('en-US')} ${amount === 1 ? singular : plural}`\n\nexport default async function typegenGenerateAction(\n args: CliCommandArguments<TypegenGenerateTypesCommandFlags>,\n context: CliCommandContext,\n): Promise<void> {\n const flags = args.extOptions\n const {output, workDir, telemetry} = context\n\n const trace = telemetry.trace(TypesGeneratedTrace)\n trace.start()\n\n const spinner = output.spinner('Generating types…')\n spinner.start()\n\n let codegenConfig\n const configPath = flags['config-path'] ?? DEFAULT_CONFIG_PATH\n try {\n codegenConfig = await readConfig(configPath)\n spinner.info(`Using typegen configuration found at \"${configPath}\"`)\n } catch (error) {\n if (error?.code !== 'ENOENT') throw error\n codegenConfig = DEFAULT_CONFIG\n\n if (configPath !== DEFAULT_CONFIG_PATH) {\n spinner.warn(\n `Configuration file not found at specified path \"${configPath}\". Falling back to default settings.`,\n )\n }\n }\n const {\n schemas,\n augmentGroqModule,\n formatGeneratedCode,\n generates,\n overloadClientMethods,\n path: searchPath,\n } = codegenConfig\n\n spinner.start(`Loading schema${schemas.length === 1 ? '' : 's'}…`)\n\n const outputPath = join(process.cwd(), generates)\n await mkdir(dirname(outputPath), {recursive: true})\n\n const workerData: TypegenGenerateTypesWorkerData = {\n workDir,\n schemas,\n searchPath,\n overloadClientMethods,\n augmentGroqModule,\n }\n const worker = new Worker(await getCliWorkerPath('typegenGenerate'), {\n workerData,\n env: process.env,\n })\n const receiver = createReceiver<TypegenWorkerChannel>(worker)\n\n let fileHandle\n let schemaStats\n let queryStats\n\n try {\n await receiver.event.loadedSchemas()\n spinner.succeed(\n schemas.length === 1\n ? `Loaded schema from ${schemas[0].schemaPath}`\n : `Loaded ${count(schemas.length, 'schemas')}`,\n )\n\n spinner.start('Generating schema types…')\n fileHandle = await open(outputPath, 'w')\n await fileHandle.write(GENERATED_FILE_WARNING)\n const schemaResult = await receiver.event.generatedSchemaDeclarations()\n schemaStats = schemaResult.schemaStats\n await fileHandle.write(schemaResult.code)\n\n const schemaTypesCount = count(schemaStats.schemaTypesCount, 'schema types')\n const schemaCount = count(schemaStats.schemaCount, 'schemas')\n spinner.succeed(\n `Generated ${schemaTypesCount}${schemas.length > 1 ? ` from ${schemaCount}` : ''}`,\n )\n\n spinner.start('Generating query types…')\n const expectedFiles = (await receiver.event.fileCount()).fileCount\n const expectedFileCount = count(expectedFiles, 'files')\n\n for await (const {\n progress,\n ...queryResult\n } of receiver.stream.generatedQueryResultDeclaration()) {\n const queryCount = count(progress.queriesCount, 'queries', 'query')\n const projectionCount = count(progress.projectionsCount, 'projections')\n spinner.text =\n `Generating query types… (${percent(progress.filesCount / expectedFiles)})\\n` +\n ` └─ Processed ${progress.filesCount} of ${expectedFileCount}. Found ${queryCount}, ${projectionCount}.`\n\n if (queryResult.type === 'error') {\n spinner.fail(queryResult.message)\n }\n\n if (queryResult.type === 'declaration') {\n await fileHandle.write(queryResult.code)\n }\n }\n\n const result = await receiver.event.generationComplete()\n queryStats = result.queryStats\n await fileHandle.write(result.augmentedQueryResultDeclarations.code)\n await fileHandle.close()\n fileHandle = null\n\n const queryTypesCount = count(queryStats.queriesCount, 'query types')\n const projectionTypesCount = count(queryStats.projectionsCount, 'projection types')\n const scannedFilesCount = count(queryStats.totalScannedFilesCount, 'scanned files')\n spinner.succeed(\n `Generated ${queryTypesCount} and ${projectionTypesCount} from ${scannedFilesCount}`,\n )\n\n if (formatGeneratedCode) {\n spinner.start(`Formatting generated types with prettier…`)\n\n try {\n const prettier = await import('prettier')\n const prettierConfig = await prettier.resolveConfig(outputPath)\n\n fileHandle = await open(outputPath, constants.O_RDWR)\n const code = await fileHandle.readFile({encoding: 'utf-8'})\n const formattedCode = await prettier.format(code, {\n ...prettierConfig,\n parser: 'typescript' as const,\n })\n await fileHandle.truncate()\n await fileHandle.write(formattedCode, 0)\n await fileHandle.close()\n fileHandle = null\n\n spinner.succeed('Formatted generated types with prettier')\n } catch (err) {\n spinner.warn(`Failed to format generated types with prettier: ${err.message}`)\n }\n }\n\n // Gather final stats and report success\n const outputStat = await stat(outputPath)\n\n trace.log({\n outputSize: outputStat.size,\n ...schemaStats,\n ...queryStats,\n configOverloadClientMethods: overloadClientMethods,\n configAugmentGroqModule: augmentGroqModule,\n })\n\n if (queryStats.errorCount > 0) {\n spinner.warn(\n `Encountered ${count(queryStats.errorCount, 'errors')} in ${count(queryStats.filesWithErrors, 'files')} while generating types to ${generates}`,\n )\n } else {\n spinner.succeed(`Successfully generated types to ${generates}`)\n }\n } catch (err) {\n trace.error(err)\n throw err\n } finally {\n await fileHandle?.close()\n await receiver.dispose()\n trace.complete()\n }\n}\n"],"names":["defineTrace","telemetry","readConfig","DEFAULT_CONFIG","join","process","mkdir","dirname","Worker","getCliWorkerPath","createReceiver","open","constants","stat"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;AAqBO,MAAM,sBAAsBA,UAAAA,YAA2C;AAAA,EAC5E,MAAM;AAAA,EACN,SAAS;AAAA,EACT,aAAa;AACf,CAAC,GCJK,sBAAsB,uBACtB,yBAAyB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,GAczB,sBAAsB,IAAI,KAAK,aAAa,SAAS;AAAA,EACzD,OAAO;AAAA,EACP,uBAAuB;AAAA,EACvB,uBAAuB;AACzB,CAAC,GAEK,UAAU,CAAC,UAA0B,oBAAoB,OAAO,KAAK,IAAI,OAAO,CAAC,CAAC,GAElF,QAAQ,CACZ,QACA,QACA,WAAmB,OAAO,MAAM,GAAG,KAAK,IAAI,GAAG,OAAO,SAAS,CAAC,CAAC,MACtD,GAAG,OAAO,eAAe,OAAO,CAAC,IAAI,WAAW,IAAI,WAAW,MAAM;AAEpD,eAAA,sBAC5B,MACA,SACe;AACf,QAAM,QAAQ,KAAK,YACb,EAAC,QAAQ,SAAS,WAAAC,WAAS,IAAI,SAE/B,QAAQA,WAAU,MAAM,mBAAmB;AACjD,QAAM,MAAM;AAEN,QAAA,UAAU,OAAO,QAAQ,wBAAmB;AAClD,UAAQ,MAAM;AAEV,MAAA;AACE,QAAA,aAAa,MAAM,aAAa,KAAK;AACvC,MAAA;AACc,oBAAA,MAAMC,mBAAW,UAAU,GAC3C,QAAQ,KAAK,yCAAyC,UAAU,GAAG;AAAA,WAC5D,OAAO;AACV,QAAA,OAAO,SAAS,SAAgB,OAAA;AACpB,oBAAAC,QAAAA,gBAEZ,eAAe,uBACjB,QAAQ;AAAA,MACN,mDAAmD,UAAU;AAAA,IAC/D;AAAA,EAAA;AAGE,QAAA;AAAA,IACJ;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA,MAAM;AAAA,EAAA,IACJ;AAEJ,UAAQ,MAAM,iBAAiB,QAAQ,WAAW,IAAI,KAAK,GAAG,QAAG;AAEjE,QAAM,aAAaC,KAAA,KAAKC,iBAAQ,QAAA,IAAA,GAAO,SAAS;AAChD,QAAMC,GAAAA,MAAMC,KAAAA,QAAQ,UAAU,GAAG,EAAC,WAAW,IAAK;AAElD,QAAM,aAA6C;AAAA,IACjD;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,KAEI,SAAS,IAAIC,oBAAAA,OAAO,MAAMC,UAAA,iBAAiB,iBAAiB,GAAG;AAAA,IACnE;AAAA,IACA,KAAKJ,iBAAAA,QAAQ;AAAA,EAAA,CACd,GACK,WAAWK,cAAA,eAAqC,MAAM;AAE5D,MAAI,YACA,aACA;AAEA,MAAA;AACF,UAAM,SAAS,MAAM,cAAc,GACnC,QAAQ;AAAA,MACN,QAAQ,WAAW,IACf,sBAAsB,QAAQ,CAAC,EAAE,UAAU,KAC3C,UAAU,MAAM,QAAQ,QAAQ,SAAS,CAAC;AAAA,IAGhD,GAAA,QAAQ,MAAM,+BAA0B,GACxC,aAAa,MAAMC,GAAAA,KAAK,YAAY,GAAG,GACvC,MAAM,WAAW,MAAM,sBAAsB;AAC7C,UAAM,eAAe,MAAM,SAAS,MAAM,4BAA4B;AACtE,kBAAc,aAAa,aAC3B,MAAM,WAAW,MAAM,aAAa,IAAI;AAElC,UAAA,mBAAmB,MAAM,YAAY,kBAAkB,cAAc,GACrE,cAAc,MAAM,YAAY,aAAa,SAAS;AACpD,YAAA;AAAA,MACN,aAAa,gBAAgB,GAAG,QAAQ,SAAS,IAAI,SAAS,WAAW,KAAK,EAAE;AAAA,IAAA,GAGlF,QAAQ,MAAM,8BAAyB;AACjC,UAAA,iBAAiB,MAAM,SAAS,MAAM,aAAa,WACnD,oBAAoB,MAAM,eAAe,OAAO;AAErC,qBAAA;AAAA,MACf;AAAA,MACA,GAAG;AAAA,IAAA,KACA,SAAS,OAAO,mCAAmC;AAChD,YAAA,aAAa,MAAM,SAAS,cAAc,WAAW,OAAO,GAC5D,kBAAkB,MAAM,SAAS,kBAAkB,aAAa;AACtE,cAAQ,OACN,iCAA4B,QAAQ,SAAS,aAAa,aAAa,CAAC;AAAA,2BACtD,SAAS,UAAU,OAAO,iBAAiB,WAAW,UAAU,KAAK,eAAe,KAEpG,YAAY,SAAS,WACvB,QAAQ,KAAK,YAAY,OAAO,GAG9B,YAAY,SAAS,iBACvB,MAAM,WAAW,MAAM,YAAY,IAAI;AAAA,IAAA;AAI3C,UAAM,SAAS,MAAM,SAAS,MAAM,mBAAmB;AACvD,iBAAa,OAAO,YACpB,MAAM,WAAW,MAAM,OAAO,iCAAiC,IAAI,GACnE,MAAM,WAAW,SACjB,aAAa;AAEb,UAAM,kBAAkB,MAAM,WAAW,cAAc,aAAa,GAC9D,uBAAuB,MAAM,WAAW,kBAAkB,kBAAkB,GAC5E,oBAAoB,MAAM,WAAW,wBAAwB,eAAe;AAKlF,QAJA,QAAQ;AAAA,MACN,aAAa,eAAe,QAAQ,oBAAoB,SAAS,iBAAiB;AAAA,OAGhF,qBAAqB;AACvB,cAAQ,MAAM,gDAA2C;AAErD,UAAA;AACI,cAAA,WAAW,MAAM,OAAO,UAAU,GAClC,iBAAiB,MAAM,SAAS,cAAc,UAAU;AAE9D,qBAAa,MAAMA,GAAA,KAAK,YAAYC,GAAAA,UAAU,MAAM;AACpD,cAAM,OAAO,MAAM,WAAW,SAAS,EAAC,UAAU,QAAQ,CAAA,GACpD,gBAAgB,MAAM,SAAS,OAAO,MAAM;AAAA,UAChD,GAAG;AAAA,UACH,QAAQ;AAAA,QAAA,CACT;AACD,cAAM,WAAW,SAAS,GAC1B,MAAM,WAAW,MAAM,eAAe,CAAC,GACvC,MAAM,WAAW,MAAM,GACvB,aAAa,MAEb,QAAQ,QAAQ,yCAAyC;AAAA,eAClD,KAAK;AACZ,gBAAQ,KAAK,mDAAmD,IAAI,OAAO,EAAE;AAAA,MAAA;AAAA,IAC/E;AAII,UAAA,aAAa,MAAMC,GAAA,KAAK,UAAU;AAExC,UAAM,IAAI;AAAA,MACR,YAAY,WAAW;AAAA,MACvB,GAAG;AAAA,MACH,GAAG;AAAA,MACH,6BAA6B;AAAA,MAC7B,yBAAyB;AAAA,IAC1B,CAAA,GAEG,WAAW,aAAa,IAC1B,QAAQ;AAAA,MACN,eAAe,MAAM,WAAW,YAAY,QAAQ,CAAC,OAAO,MAAM,WAAW,iBAAiB,OAAO,CAAC,8BAA8B,SAAS;AAAA,IAG/I,IAAA,QAAQ,QAAQ,mCAAmC,SAAS,EAAE;AAAA,WAEzD,KAAK;AACN,UAAA,MAAA,MAAM,GAAG,GACT;AAAA,EAAA,UACN;AACM,UAAA,YAAY,SAClB,MAAM,SAAS,QAAQ,GACvB,MAAM,SAAS;AAAA,EAAA;AAEnB;;"}
@@ -1,10 +1,10 @@
1
1
  "use strict";
2
- var fs = require("node:fs"), path = require("node:path"), createDebug = require("debug"), require$$0$1 = require("fs"), require$$0 = require("path"), require$$0$2 = require("os"), require$$3 = require("crypto");
2
+ var fs = require("node:fs"), path = require("node:path"), debugIt = require("debug"), require$$0$1 = require("fs"), require$$0 = require("path"), require$$0$2 = require("os"), require$$3 = require("crypto");
3
3
  function _interopDefaultCompat(e) {
4
4
  return e && typeof e == "object" && "default" in e ? e : { default: e };
5
5
  }
6
- var fs__default = /* @__PURE__ */ _interopDefaultCompat(fs), path__default = /* @__PURE__ */ _interopDefaultCompat(path), createDebug__default = /* @__PURE__ */ _interopDefaultCompat(createDebug), require$$0__default$1 = /* @__PURE__ */ _interopDefaultCompat(require$$0$1), require$$0__default = /* @__PURE__ */ _interopDefaultCompat(require$$0), require$$0__default$2 = /* @__PURE__ */ _interopDefaultCompat(require$$0$2), require$$3__default = /* @__PURE__ */ _interopDefaultCompat(require$$3);
7
- const debug = createDebug__default.default("sanity:cli");
6
+ var fs__default = /* @__PURE__ */ _interopDefaultCompat(fs), path__default = /* @__PURE__ */ _interopDefaultCompat(path), debugIt__default = /* @__PURE__ */ _interopDefaultCompat(debugIt), require$$0__default$1 = /* @__PURE__ */ _interopDefaultCompat(require$$0$1), require$$0__default = /* @__PURE__ */ _interopDefaultCompat(require$$0), require$$0__default$2 = /* @__PURE__ */ _interopDefaultCompat(require$$0$2), require$$3__default = /* @__PURE__ */ _interopDefaultCompat(require$$3);
7
+ const debug = debugIt__default.default("sanity:cli");
8
8
  function resolveRootDir(cwd) {
9
9
  try {
10
10
  return resolveProjectRoot(cwd) || cwd;
@@ -1 +1 @@
1
- {"version":3,"file":"loadEnv.js","sources":["../../src/debug.ts","../../src/util/resolveRootDir.ts","../../../../../node_modules/.pnpm/dotenv@16.4.7/node_modules/dotenv/lib/main.js","../../../../../node_modules/.pnpm/dotenv-expand@9.0.0/node_modules/dotenv-expand/lib/main.js","../../src/util/loadEnv.ts"],"sourcesContent":["import debugIt from 'debug'\n\nexport const debug = debugIt('sanity:cli')\n","/* eslint-disable no-sync */\nimport fs from 'node:fs'\nimport path from 'node:path'\n\nimport {debug} from '../debug'\n\n/**\n * Resolve project root directory, falling back to cwd if it cannot be found\n */\nexport function resolveRootDir(cwd: string): string {\n try {\n return resolveProjectRoot(cwd) || cwd\n } catch (err) {\n throw new Error(`Error occurred trying to resolve project root:\\n${err.message}`)\n }\n}\n\nfunction hasSanityConfig(basePath: string, configName: string): boolean {\n const buildConfigs = [\n fileExists(path.join(basePath, `${configName}.js`)),\n fileExists(path.join(basePath, `${configName}.ts`)),\n isSanityV2StudioRoot(basePath),\n ]\n\n return buildConfigs.some(Boolean)\n}\n\nfunction resolveProjectRoot(basePath: string, iterations = 0): string | false {\n const configName = 'sanity.config'\n if (hasSanityConfig(basePath, configName)) {\n return basePath\n }\n\n const parentDir = path.resolve(basePath, '..')\n if (parentDir === basePath || iterations > 30) {\n // Reached root (or max depth), give up\n return false\n }\n\n return resolveProjectRoot(parentDir, iterations + 1)\n}\n\nfunction isSanityV2StudioRoot(basePath: string): boolean {\n try {\n const content = fs.readFileSync(path.join(basePath, 'sanity.json'), 'utf8')\n const sanityJson = JSON.parse(content)\n const isRoot = Boolean(sanityJson?.root)\n if (isRoot) {\n debug('Found Sanity v2 studio root at %s', basePath)\n }\n return isRoot\n } catch (err) {\n return false\n }\n}\n\nfunction fileExists(filePath: string): boolean {\n return fs.existsSync(filePath)\n}\n","const fs = require('fs')\nconst path = require('path')\nconst os = require('os')\nconst crypto = require('crypto')\nconst packageJson = require('../package.json')\n\nconst version = packageJson.version\n\nconst LINE = /(?:^|^)\\s*(?:export\\s+)?([\\w.-]+)(?:\\s*=\\s*?|:\\s+?)(\\s*'(?:\\\\'|[^'])*'|\\s*\"(?:\\\\\"|[^\"])*\"|\\s*`(?:\\\\`|[^`])*`|[^#\\r\\n]+)?\\s*(?:#.*)?(?:$|$)/mg\n\n// Parse src into an Object\nfunction parse (src) {\n const obj = {}\n\n // Convert buffer to string\n let lines = src.toString()\n\n // Convert line breaks to same format\n lines = lines.replace(/\\r\\n?/mg, '\\n')\n\n let match\n while ((match = LINE.exec(lines)) != null) {\n const key = match[1]\n\n // Default undefined or null to empty string\n let value = (match[2] || '')\n\n // Remove whitespace\n value = value.trim()\n\n // Check if double quoted\n const maybeQuote = value[0]\n\n // Remove surrounding quotes\n value = value.replace(/^(['\"`])([\\s\\S]*)\\1$/mg, '$2')\n\n // Expand newlines if double quoted\n if (maybeQuote === '\"') {\n value = value.replace(/\\\\n/g, '\\n')\n value = value.replace(/\\\\r/g, '\\r')\n }\n\n // Add to object\n obj[key] = value\n }\n\n return obj\n}\n\nfunction _parseVault (options) {\n const vaultPath = _vaultPath(options)\n\n // Parse .env.vault\n const result = DotenvModule.configDotenv({ path: vaultPath })\n if (!result.parsed) {\n const err = new Error(`MISSING_DATA: Cannot parse ${vaultPath} for an unknown reason`)\n err.code = 'MISSING_DATA'\n throw err\n }\n\n // handle scenario for comma separated keys - for use with key rotation\n // example: DOTENV_KEY=\"dotenv://:key_1234@dotenvx.com/vault/.env.vault?environment=prod,dotenv://:key_7890@dotenvx.com/vault/.env.vault?environment=prod\"\n const keys = _dotenvKey(options).split(',')\n const length = keys.length\n\n let decrypted\n for (let i = 0; i < length; i++) {\n try {\n // Get full key\n const key = keys[i].trim()\n\n // Get instructions for decrypt\n const attrs = _instructions(result, key)\n\n // Decrypt\n decrypted = DotenvModule.decrypt(attrs.ciphertext, attrs.key)\n\n break\n } catch (error) {\n // last key\n if (i + 1 >= length) {\n throw error\n }\n // try next key\n }\n }\n\n // Parse decrypted .env string\n return DotenvModule.parse(decrypted)\n}\n\nfunction _log (message) {\n console.log(`[dotenv@${version}][INFO] ${message}`)\n}\n\nfunction _warn (message) {\n console.log(`[dotenv@${version}][WARN] ${message}`)\n}\n\nfunction _debug (message) {\n console.log(`[dotenv@${version}][DEBUG] ${message}`)\n}\n\nfunction _dotenvKey (options) {\n // prioritize developer directly setting options.DOTENV_KEY\n if (options && options.DOTENV_KEY && options.DOTENV_KEY.length > 0) {\n return options.DOTENV_KEY\n }\n\n // secondary infra already contains a DOTENV_KEY environment variable\n if (process.env.DOTENV_KEY && process.env.DOTENV_KEY.length > 0) {\n return process.env.DOTENV_KEY\n }\n\n // fallback to empty string\n return ''\n}\n\nfunction _instructions (result, dotenvKey) {\n // Parse DOTENV_KEY. Format is a URI\n let uri\n try {\n uri = new URL(dotenvKey)\n } catch (error) {\n if (error.code === 'ERR_INVALID_URL') {\n const err = new Error('INVALID_DOTENV_KEY: Wrong format. Must be in valid uri format like dotenv://:key_1234@dotenvx.com/vault/.env.vault?environment=development')\n err.code = 'INVALID_DOTENV_KEY'\n throw err\n }\n\n throw error\n }\n\n // Get decrypt key\n const key = uri.password\n if (!key) {\n const err = new Error('INVALID_DOTENV_KEY: Missing key part')\n err.code = 'INVALID_DOTENV_KEY'\n throw err\n }\n\n // Get environment\n const environment = uri.searchParams.get('environment')\n if (!environment) {\n const err = new Error('INVALID_DOTENV_KEY: Missing environment part')\n err.code = 'INVALID_DOTENV_KEY'\n throw err\n }\n\n // Get ciphertext payload\n const environmentKey = `DOTENV_VAULT_${environment.toUpperCase()}`\n const ciphertext = result.parsed[environmentKey] // DOTENV_VAULT_PRODUCTION\n if (!ciphertext) {\n const err = new Error(`NOT_FOUND_DOTENV_ENVIRONMENT: Cannot locate environment ${environmentKey} in your .env.vault file.`)\n err.code = 'NOT_FOUND_DOTENV_ENVIRONMENT'\n throw err\n }\n\n return { ciphertext, key }\n}\n\nfunction _vaultPath (options) {\n let possibleVaultPath = null\n\n if (options && options.path && options.path.length > 0) {\n if (Array.isArray(options.path)) {\n for (const filepath of options.path) {\n if (fs.existsSync(filepath)) {\n possibleVaultPath = filepath.endsWith('.vault') ? filepath : `${filepath}.vault`\n }\n }\n } else {\n possibleVaultPath = options.path.endsWith('.vault') ? options.path : `${options.path}.vault`\n }\n } else {\n possibleVaultPath = path.resolve(process.cwd(), '.env.vault')\n }\n\n if (fs.existsSync(possibleVaultPath)) {\n return possibleVaultPath\n }\n\n return null\n}\n\nfunction _resolveHome (envPath) {\n return envPath[0] === '~' ? path.join(os.homedir(), envPath.slice(1)) : envPath\n}\n\nfunction _configVault (options) {\n _log('Loading env from encrypted .env.vault')\n\n const parsed = DotenvModule._parseVault(options)\n\n let processEnv = process.env\n if (options && options.processEnv != null) {\n processEnv = options.processEnv\n }\n\n DotenvModule.populate(processEnv, parsed, options)\n\n return { parsed }\n}\n\nfunction configDotenv (options) {\n const dotenvPath = path.resolve(process.cwd(), '.env')\n let encoding = 'utf8'\n const debug = Boolean(options && options.debug)\n\n if (options && options.encoding) {\n encoding = options.encoding\n } else {\n if (debug) {\n _debug('No encoding is specified. UTF-8 is used by default')\n }\n }\n\n let optionPaths = [dotenvPath] // default, look for .env\n if (options && options.path) {\n if (!Array.isArray(options.path)) {\n optionPaths = [_resolveHome(options.path)]\n } else {\n optionPaths = [] // reset default\n for (const filepath of options.path) {\n optionPaths.push(_resolveHome(filepath))\n }\n }\n }\n\n // Build the parsed data in a temporary object (because we need to return it). Once we have the final\n // parsed data, we will combine it with process.env (or options.processEnv if provided).\n let lastError\n const parsedAll = {}\n for (const path of optionPaths) {\n try {\n // Specifying an encoding returns a string instead of a buffer\n const parsed = DotenvModule.parse(fs.readFileSync(path, { encoding }))\n\n DotenvModule.populate(parsedAll, parsed, options)\n } catch (e) {\n if (debug) {\n _debug(`Failed to load ${path} ${e.message}`)\n }\n lastError = e\n }\n }\n\n let processEnv = process.env\n if (options && options.processEnv != null) {\n processEnv = options.processEnv\n }\n\n DotenvModule.populate(processEnv, parsedAll, options)\n\n if (lastError) {\n return { parsed: parsedAll, error: lastError }\n } else {\n return { parsed: parsedAll }\n }\n}\n\n// Populates process.env from .env file\nfunction config (options) {\n // fallback to original dotenv if DOTENV_KEY is not set\n if (_dotenvKey(options).length === 0) {\n return DotenvModule.configDotenv(options)\n }\n\n const vaultPath = _vaultPath(options)\n\n // dotenvKey exists but .env.vault file does not exist\n if (!vaultPath) {\n _warn(`You set DOTENV_KEY but you are missing a .env.vault file at ${vaultPath}. Did you forget to build it?`)\n\n return DotenvModule.configDotenv(options)\n }\n\n return DotenvModule._configVault(options)\n}\n\nfunction decrypt (encrypted, keyStr) {\n const key = Buffer.from(keyStr.slice(-64), 'hex')\n let ciphertext = Buffer.from(encrypted, 'base64')\n\n const nonce = ciphertext.subarray(0, 12)\n const authTag = ciphertext.subarray(-16)\n ciphertext = ciphertext.subarray(12, -16)\n\n try {\n const aesgcm = crypto.createDecipheriv('aes-256-gcm', key, nonce)\n aesgcm.setAuthTag(authTag)\n return `${aesgcm.update(ciphertext)}${aesgcm.final()}`\n } catch (error) {\n const isRange = error instanceof RangeError\n const invalidKeyLength = error.message === 'Invalid key length'\n const decryptionFailed = error.message === 'Unsupported state or unable to authenticate data'\n\n if (isRange || invalidKeyLength) {\n const err = new Error('INVALID_DOTENV_KEY: It must be 64 characters long (or more)')\n err.code = 'INVALID_DOTENV_KEY'\n throw err\n } else if (decryptionFailed) {\n const err = new Error('DECRYPTION_FAILED: Please check your DOTENV_KEY')\n err.code = 'DECRYPTION_FAILED'\n throw err\n } else {\n throw error\n }\n }\n}\n\n// Populate process.env with parsed values\nfunction populate (processEnv, parsed, options = {}) {\n const debug = Boolean(options && options.debug)\n const override = Boolean(options && options.override)\n\n if (typeof parsed !== 'object') {\n const err = new Error('OBJECT_REQUIRED: Please check the processEnv argument being passed to populate')\n err.code = 'OBJECT_REQUIRED'\n throw err\n }\n\n // Set process.env\n for (const key of Object.keys(parsed)) {\n if (Object.prototype.hasOwnProperty.call(processEnv, key)) {\n if (override === true) {\n processEnv[key] = parsed[key]\n }\n\n if (debug) {\n if (override === true) {\n _debug(`\"${key}\" is already defined and WAS overwritten`)\n } else {\n _debug(`\"${key}\" is already defined and was NOT overwritten`)\n }\n }\n } else {\n processEnv[key] = parsed[key]\n }\n }\n}\n\nconst DotenvModule = {\n configDotenv,\n _configVault,\n _parseVault,\n config,\n decrypt,\n parse,\n populate\n}\n\nmodule.exports.configDotenv = DotenvModule.configDotenv\nmodule.exports._configVault = DotenvModule._configVault\nmodule.exports._parseVault = DotenvModule._parseVault\nmodule.exports.config = DotenvModule.config\nmodule.exports.decrypt = DotenvModule.decrypt\nmodule.exports.parse = DotenvModule.parse\nmodule.exports.populate = DotenvModule.populate\n\nmodule.exports = DotenvModule\n","'use strict'\n\nfunction _interpolate (envValue, environment, config) {\n const matches = envValue.match(/(.?\\${*[\\w]*(?::-[\\w/]*)?}*)/g) || []\n\n return matches.reduce(function (newEnv, match, index) {\n const parts = /(.?)\\${*([\\w]*(?::-[\\w/]*)?)?}*/g.exec(match)\n if (!parts || parts.length === 0) {\n return newEnv\n }\n\n const prefix = parts[1]\n\n let value, replacePart\n\n if (prefix === '\\\\') {\n replacePart = parts[0]\n value = replacePart.replace('\\\\$', '$')\n } else {\n const keyParts = parts[2].split(':-')\n const key = keyParts[0]\n replacePart = parts[0].substring(prefix.length)\n // process.env value 'wins' over .env file's value\n value = Object.prototype.hasOwnProperty.call(environment, key)\n ? environment[key]\n : (config.parsed[key] || keyParts[1] || '')\n\n // If the value is found, remove nested expansions.\n if (keyParts.length > 1 && value) {\n const replaceNested = matches[index + 1]\n matches[index + 1] = ''\n\n newEnv = newEnv.replace(replaceNested, '')\n }\n // Resolve recursive interpolations\n value = _interpolate(value, environment, config)\n }\n\n return newEnv.replace(replacePart, value)\n }, envValue)\n}\n\nfunction expand (config) {\n // if ignoring process.env, use a blank object\n const environment = config.ignoreProcessEnv ? {} : process.env\n\n for (const configKey in config.parsed) {\n const value = Object.prototype.hasOwnProperty.call(environment, configKey) ? environment[configKey] : config.parsed[configKey]\n\n config.parsed[configKey] = _interpolate(value, environment, config)\n }\n\n for (const processKey in config.parsed) {\n environment[processKey] = config.parsed[processKey]\n }\n\n return config\n}\n\nmodule.exports.expand = expand\n","/**\n * This is an \"inlined\" version of Vite's `loadEnv` function,\n * simplified somewhat to only support our use case.\n *\n * Ideally we'd just use `loadEnv` from Vite, but importing it\n * causes bundling issues due to node APIs and downstream dependencies.\n *\n * Vite is MIT licensed, copyright (c) Yuxi (Evan) You and Vite contributors.\n */\n\n/* eslint-disable no-process-env */\nimport fs from 'node:fs'\nimport path from 'node:path'\n\nimport {parse} from 'dotenv'\nimport {expand} from 'dotenv-expand'\n\nexport function loadEnv(\n mode: string,\n envDir: string,\n prefixes: string[] = ['VITE_'],\n): Record<string, string> {\n if (mode === 'local') {\n throw new Error(\n `\"local\" cannot be used as a mode name because it conflicts with ` +\n `the .local postfix for .env files.`,\n )\n }\n\n const env: Record<string, string> = {}\n const envFiles = [\n /** default file */ `.env`,\n /** local file */ `.env.local`,\n /** mode file */ `.env.${mode}`,\n /** mode local file */ `.env.${mode}.local`,\n ]\n\n const parsed = Object.fromEntries(\n envFiles.flatMap((file) => {\n const envPath = lookupFile(envDir, [file], {\n rootDir: envDir,\n })\n if (!envPath) return []\n return Object.entries(parse(fs.readFileSync(envPath)))\n }),\n )\n\n // test NODE_ENV override before expand as otherwise process.env.NODE_ENV would override this\n if (parsed.NODE_ENV && process.env.VITE_USER_NODE_ENV === undefined) {\n process.env.VITE_USER_NODE_ENV = parsed.NODE_ENV\n }\n // support BROWSER and BROWSER_ARGS env variables\n if (parsed.BROWSER && process.env.BROWSER === undefined) {\n process.env.BROWSER = parsed.BROWSER\n }\n if (parsed.BROWSER_ARGS && process.env.BROWSER_ARGS === undefined) {\n process.env.BROWSER_ARGS = parsed.BROWSER_ARGS\n }\n\n try {\n // let environment variables use each other\n expand({parsed})\n } catch (e) {\n // custom error handling until https://github.com/motdotla/dotenv-expand/issues/65 is fixed upstream\n // check for message \"TypeError: Cannot read properties of undefined (reading 'split')\"\n if (e.message.includes('split')) {\n throw new Error('dotenv-expand failed to expand env vars. Maybe you need to escape `$`?')\n }\n throw e\n }\n\n // only keys that start with prefix are exposed to client\n for (const [key, value] of Object.entries(parsed)) {\n if (prefixes.some((prefix) => key.startsWith(prefix))) {\n env[key] = value\n }\n }\n\n // check if there are actual env variables starting with VITE_*\n // these are typically provided inline and should be prioritized\n for (const key in process.env) {\n if (prefixes.some((prefix) => key.startsWith(prefix))) {\n env[key] = process.env[key] as string\n }\n }\n\n return env\n}\n\nfunction lookupFile(\n dir: string,\n formats: string[],\n options?: {\n rootDir?: string\n },\n): string | undefined {\n for (const format of formats) {\n const fullPath = path.join(dir, format)\n // eslint-disable-next-line no-sync\n if (fs.existsSync(fullPath) && fs.statSync(fullPath).isFile()) {\n return fullPath\n }\n }\n const parentDir = path.dirname(dir)\n if (parentDir !== dir && (!options?.rootDir || parentDir.startsWith(options?.rootDir))) {\n return lookupFile(parentDir, formats, options)\n }\n\n return undefined\n}\n"],"names":["debugIt","path","fs","require$$0","require$$1","require$$2","require$$3","version","debug","mainModule","parse","expand"],"mappings":";;;;;;AAEa,MAAA,QAAQA,6BAAQ,YAAY;ACOlC,SAAS,eAAe,KAAqB;AAC9C,MAAA;AACK,WAAA,mBAAmB,GAAG,KAAK;AAAA,WAC3B,KAAK;AACZ,UAAM,IAAI,MAAM;AAAA,EAAmD,IAAI,OAAO,EAAE;AAAA,EAAA;AAEpF;AAEA,SAAS,gBAAgB,UAAkB,YAA6B;AACjD,SAAA;AAAA,IACnB,WAAWC,cAAK,QAAA,KAAK,UAAU,GAAG,UAAU,KAAK,CAAC;AAAA,IAClD,WAAWA,cAAK,QAAA,KAAK,UAAU,GAAG,UAAU,KAAK,CAAC;AAAA,IAClD,qBAAqB,QAAQ;AAAA,EAAA,EAGX,KAAK,OAAO;AAClC;AAEA,SAAS,mBAAmB,UAAkB,aAAa,GAAmB;AAExE,MAAA,gBAAgB,UADD,eACqB;AAC/B,WAAA;AAGT,QAAM,YAAYA,cAAA,QAAK,QAAQ,UAAU,IAAI;AACzC,SAAA,cAAc,YAAY,aAAa,KAElC,KAGF,mBAAmB,WAAW,aAAa,CAAC;AACrD;AAEA,SAAS,qBAAqB,UAA2B;AACnD,MAAA;AACF,UAAM,UAAUC,YAAAA,QAAG,aAAaD,cAAAA,QAAK,KAAK,UAAU,aAAa,GAAG,MAAM,GAEpE,SAAS,CAAA,CADI,KAAK,MAAM,OAAO,GACF;AACnC,WAAI,UACF,MAAM,qCAAqC,QAAQ,GAE9C;AAAA,EAAA,QACK;AACL,WAAA;AAAA,EAAA;AAEX;AAEA,SAAS,WAAW,UAA2B;AACtC,SAAAC,YAAA,QAAG,WAAW,QAAQ;AAC/B;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AC1DA,QAAMA,MAAKC,sBAAAA,SACLF,QAAOG,oBAAAA,SACP,KAAKC,sBAAAA,SACL,SAASC,oBAAAA,SAGTC,WAFc,WAEQ,SAEtB,OAAO;AAGb,WAAS,MAAO,KAAK;AACnB,UAAM,MAAM,CAAA;AAGZ,QAAI,QAAQ,IAAI,SAAQ;AAGxB,YAAQ,MAAM,QAAQ,WAAW;AAAA,CAAI;AAErC,QAAI;AACJ,YAAQ,QAAQ,KAAK,KAAK,KAAK,MAAM,QAAM;AACzC,YAAM,MAAM,MAAM,CAAC;AAGnB,UAAI,QAAS,MAAM,CAAC,KAAK;AAGzB,cAAQ,MAAM,KAAI;AAGlB,YAAM,aAAa,MAAM,CAAC;AAG1B,cAAQ,MAAM,QAAQ,0BAA0B,IAAI,GAGhD,eAAe,QACjB,QAAQ,MAAM,QAAQ,QAAQ;AAAA,CAAI,GAClC,QAAQ,MAAM,QAAQ,QAAQ,IAAI,IAIpC,IAAI,GAAG,IAAI;AAAA,IACf;AAEE,WAAO;AAAA,EACT;AAEA,WAAS,YAAa,SAAS;AAC7B,UAAM,YAAY,WAAW,OAAO,GAG9B,SAAS,aAAa,aAAa,EAAE,MAAM,UAAW,CAAA;AAC5D,QAAI,CAAC,OAAO,QAAQ;AAClB,YAAM,MAAM,IAAI,MAAM,8BAA8B,SAAS,wBAAwB;AACrF,gBAAI,OAAO,gBACL;AAAA,IACV;AAIE,UAAM,OAAO,WAAW,OAAO,EAAE,MAAM,GAAG,GACpC,SAAS,KAAK;AAEpB,QAAI;AACJ,aAAS,IAAI,GAAG,IAAI,QAAQ;AAC1B,UAAI;AAEF,cAAM,MAAM,KAAK,CAAC,EAAE,KAAI,GAGlB,QAAQ,cAAc,QAAQ,GAAG;AAGvC,oBAAY,aAAa,QAAQ,MAAM,YAAY,MAAM,GAAG;AAE5D;AAAA,MACD,SAAQ,OAAO;AAEd,YAAI,IAAI,KAAK;AACX,gBAAM;AAAA,MAGd;AAIE,WAAO,aAAa,MAAM,SAAS;AAAA,EACrC;AAEA,WAAS,KAAM,SAAS;AACtB,YAAQ,IAAI,WAAWA,QAAO,WAAW,OAAO,EAAE;AAAA,EACpD;AAEA,WAAS,MAAO,SAAS;AACvB,YAAQ,IAAI,WAAWA,QAAO,WAAW,OAAO,EAAE;AAAA,EACpD;AAEA,WAAS,OAAQ,SAAS;AACxB,YAAQ,IAAI,WAAWA,QAAO,YAAY,OAAO,EAAE;AAAA,EACrD;AAEA,WAAS,WAAY,SAAS;AAE5B,WAAI,WAAW,QAAQ,cAAc,QAAQ,WAAW,SAAS,IACxD,QAAQ,aAIb,QAAQ,IAAI,cAAc,QAAQ,IAAI,WAAW,SAAS,IACrD,QAAQ,IAAI,aAId;AAAA,EACT;AAEA,WAAS,cAAe,QAAQ,WAAW;AAEzC,QAAI;AACJ,QAAI;AACF,YAAM,IAAI,IAAI,SAAS;AAAA,IACxB,SAAQ,OAAO;AACd,UAAI,MAAM,SAAS,mBAAmB;AACpC,cAAM,MAAM,IAAI,MAAM,4IAA4I;AAClK,kBAAI,OAAO,sBACL;AAAA,MACZ;AAEI,YAAM;AAAA,IACV;AAGE,UAAM,MAAM,IAAI;AAChB,QAAI,CAAC,KAAK;AACR,YAAM,MAAM,IAAI,MAAM,sCAAsC;AAC5D,gBAAI,OAAO,sBACL;AAAA,IACV;AAGE,UAAM,cAAc,IAAI,aAAa,IAAI,aAAa;AACtD,QAAI,CAAC,aAAa;AAChB,YAAM,MAAM,IAAI,MAAM,8CAA8C;AACpE,gBAAI,OAAO,sBACL;AAAA,IACV;AAGE,UAAM,iBAAiB,gBAAgB,YAAY,YAAW,CAAE,IAC1D,aAAa,OAAO,OAAO,cAAc;AAC/C,QAAI,CAAC,YAAY;AACf,YAAM,MAAM,IAAI,MAAM,2DAA2D,cAAc,2BAA2B;AAC1H,gBAAI,OAAO,gCACL;AAAA,IACV;AAEE,WAAO,EAAE,YAAY,IAAG;AAAA,EAC1B;AAEA,WAAS,WAAY,SAAS;AAC5B,QAAI,oBAAoB;AAExB,QAAI,WAAW,QAAQ,QAAQ,QAAQ,KAAK,SAAS;AACnD,UAAI,MAAM,QAAQ,QAAQ,IAAI;AAC5B,mBAAW,YAAY,QAAQ;AAC7B,UAAIL,IAAG,WAAW,QAAQ,MACxB,oBAAoB,SAAS,SAAS,QAAQ,IAAI,WAAW,GAAG,QAAQ;AAAA;AAI5E,4BAAoB,QAAQ,KAAK,SAAS,QAAQ,IAAI,QAAQ,OAAO,GAAG,QAAQ,IAAI;AAAA;AAGtF,0BAAoBD,MAAK,QAAQ,QAAQ,IAAK,GAAE,YAAY;AAG9D,WAAIC,IAAG,WAAW,iBAAiB,IAC1B,oBAGF;AAAA,EACT;AAEA,WAAS,aAAc,SAAS;AAC9B,WAAO,QAAQ,CAAC,MAAM,MAAMD,MAAK,KAAK,GAAG,QAAS,GAAE,QAAQ,MAAM,CAAC,CAAC,IAAI;AAAA,EAC1E;AAEA,WAAS,aAAc,SAAS;AAC9B,SAAK,uCAAuC;AAE5C,UAAM,SAAS,aAAa,YAAY,OAAO;AAE/C,QAAI,aAAa,QAAQ;AACzB,WAAI,WAAW,QAAQ,cAAc,SACnC,aAAa,QAAQ,aAGvB,aAAa,SAAS,YAAY,QAAQ,OAAO,GAE1C,EAAE,OAAM;AAAA,EACjB;AAEA,WAAS,aAAc,SAAS;AAC9B,UAAM,aAAaA,MAAK,QAAQ,QAAQ,IAAK,GAAE,MAAM;AACrD,QAAI,WAAW;AACf,UAAMO,SAAQ,GAAQ,WAAW,QAAQ;AAEzC,IAAI,WAAW,QAAQ,WACrB,WAAW,QAAQ,WAEfA,UACF,OAAO,oDAAoD;AAI/D,QAAI,cAAc,CAAC,UAAU;AAC7B,QAAI,WAAW,QAAQ;AACrB,UAAI,CAAC,MAAM,QAAQ,QAAQ,IAAI;AAC7B,sBAAc,CAAC,aAAa,QAAQ,IAAI,CAAC;AAAA,WACpC;AACL,sBAAc,CAAE;AAChB,mBAAW,YAAY,QAAQ;AAC7B,sBAAY,KAAK,aAAa,QAAQ,CAAC;AAAA,MAE/C;AAKE,QAAI;AACJ,UAAM,YAAY,CAAA;AAClB,eAAWP,SAAQ;AACjB,UAAI;AAEF,cAAM,SAAS,aAAa,MAAMC,IAAG,aAAaD,OAAM,EAAE,UAAU,CAAC;AAErE,qBAAa,SAAS,WAAW,QAAQ,OAAO;AAAA,MACjD,SAAQ,GAAG;AACV,QAAIO,UACF,OAAO,kBAAkBP,KAAI,IAAI,EAAE,OAAO,EAAE,GAE9C,YAAY;AAAA,MAClB;AAGE,QAAI,aAAa,QAAQ;AAOzB,WANI,WAAW,QAAQ,cAAc,SACnC,aAAa,QAAQ,aAGvB,aAAa,SAAS,YAAY,WAAW,OAAO,GAEhD,YACK,EAAE,QAAQ,WAAW,OAAO,UAAS,IAErC,EAAE,QAAQ,UAAS;AAAA,EAE9B;AAGA,WAAS,OAAQ,SAAS;AAExB,QAAI,WAAW,OAAO,EAAE,WAAW;AACjC,aAAO,aAAa,aAAa,OAAO;AAG1C,UAAM,YAAY,WAAW,OAAO;AAGpC,WAAK,YAME,aAAa,aAAa,OAAO,KALtC,MAAM,+DAA+D,SAAS,+BAA+B,GAEtG,aAAa,aAAa,OAAO;AAAA,EAI5C;AAEA,WAAS,QAAS,WAAW,QAAQ;AACnC,UAAM,MAAM,OAAO,KAAK,OAAO,MAAM,GAAG,GAAG,KAAK;AAChD,QAAI,aAAa,OAAO,KAAK,WAAW,QAAQ;AAEhD,UAAM,QAAQ,WAAW,SAAS,GAAG,EAAE,GACjC,UAAU,WAAW,SAAS,GAAG;AACvC,iBAAa,WAAW,SAAS,IAAI,GAAG;AAExC,QAAI;AACF,YAAM,SAAS,OAAO,iBAAiB,eAAe,KAAK,KAAK;AAChE,oBAAO,WAAW,OAAO,GAClB,GAAG,OAAO,OAAO,UAAU,CAAC,GAAG,OAAO,OAAO;AAAA,IACrD,SAAQ,OAAO;AACd,YAAM,UAAU,iBAAiB,YAC3B,mBAAmB,MAAM,YAAY,sBACrC,mBAAmB,MAAM,YAAY;AAE3C,UAAI,WAAW,kBAAkB;AAC/B,cAAM,MAAM,IAAI,MAAM,6DAA6D;AACnF,kBAAI,OAAO,sBACL;AAAA,MACP,WAAU,kBAAkB;AAC3B,cAAM,MAAM,IAAI,MAAM,iDAAiD;AACvE,kBAAI,OAAO,qBACL;AAAA,MACZ;AACM,cAAM;AAAA,IAEZ;AAAA,EACA;AAGA,WAAS,SAAU,YAAY,QAAQ,UAAU,CAAA,GAAI;AACnD,UAAMO,SAAQ,GAAQ,WAAW,QAAQ,QACnC,WAAW,GAAQ,WAAW,QAAQ;AAE5C,QAAI,OAAO,UAAW,UAAU;AAC9B,YAAM,MAAM,IAAI,MAAM,gFAAgF;AACtG,gBAAI,OAAO,mBACL;AAAA,IACV;AAGE,eAAW,OAAO,OAAO,KAAK,MAAM;AAClC,MAAI,OAAO,UAAU,eAAe,KAAK,YAAY,GAAG,KAClD,aAAa,OACf,WAAW,GAAG,IAAI,OAAO,GAAG,IAG1BA,UAEA,OADE,aAAa,KACR,IAAI,GAAG,6CAEP,IAAI,GAAG,8CAF0C,KAM5D,WAAW,GAAG,IAAI,OAAO,GAAG;AAAA,EAGlC;AAEA,QAAM,eAAe;AAAA,IACnB;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AAE2BC,gBAAA,QAAA,eAAG,aAAa,cAChBA,OAAA,QAAA,eAAG,aAAa,cACjBA,OAAA,QAAA,cAAG,aAAa,aACrBA,OAAA,QAAA,SAAG,aAAa,QACfA,OAAA,QAAA,UAAG,aAAa,SAClBA,OAAA,QAAA,QAAG,aAAa,OACbA,OAAA,QAAA,WAAG,aAAa,UAEvCA,OAAA,UAAiB;;;;;;ACtWjB,WAAS,aAAc,UAAU,aAAa,QAAQ;AACpD,UAAM,UAAU,SAAS,MAAM,+BAA+B,KAAK,CAAA;AAEnE,WAAO,QAAQ,OAAO,SAAU,QAAQ,OAAO,OAAO;AACpD,YAAM,QAAQ,mCAAmC,KAAK,KAAK;AAC3D,UAAI,CAAC,SAAS,MAAM,WAAW;AAC7B,eAAO;AAGT,YAAM,SAAS,MAAM,CAAC;AAEtB,UAAI,OAAO;AAEX,UAAI,WAAW;AACb,sBAAc,MAAM,CAAC,GACrB,QAAQ,YAAY,QAAQ,OAAO,GAAG;AAAA,WACjC;AACL,cAAM,WAAW,MAAM,CAAC,EAAE,MAAM,IAAI,GAC9B,MAAM,SAAS,CAAC;AAQtB,YAPA,cAAc,MAAM,CAAC,EAAE,UAAU,OAAO,MAAM,GAE9C,QAAQ,OAAO,UAAU,eAAe,KAAK,aAAa,GAAG,IACzD,YAAY,GAAG,IACd,OAAO,OAAO,GAAG,KAAK,SAAS,CAAC,KAAK,IAGtC,SAAS,SAAS,KAAK,OAAO;AAChC,gBAAM,gBAAgB,QAAQ,QAAQ,CAAC;AACvC,kBAAQ,QAAQ,CAAC,IAAI,IAErB,SAAS,OAAO,QAAQ,eAAe,EAAE;AAAA,QACjD;AAEM,gBAAQ,aAAa,OAAO,aAAa,MAAM;AAAA,MACrD;AAEI,aAAO,OAAO,QAAQ,aAAa,KAAK;AAAA,IAC5C,GAAK,QAAQ;AAAA,EACb;AAEA,WAAS,OAAQ,QAAQ;AAEvB,UAAM,cAAc,OAAO,mBAAmB,CAAA,IAAK,QAAQ;AAE3D,eAAW,aAAa,OAAO,QAAQ;AACrC,YAAM,QAAQ,OAAO,UAAU,eAAe,KAAK,aAAa,SAAS,IAAI,YAAY,SAAS,IAAI,OAAO,OAAO,SAAS;AAE7H,aAAO,OAAO,SAAS,IAAI,aAAa,OAAO,aAAa,MAAM;AAAA,IACtE;AAEE,eAAW,cAAc,OAAO;AAC9B,kBAAY,UAAU,IAAI,OAAO,OAAO,UAAU;AAGpD,WAAO;AAAA,EACT;AAEA,cAAA,SAAwB;;;AC1CjB,SAAS,QACd,MACA,QACA,WAAqB,CAAC,OAAO,GACL;AACxB,MAAI,SAAS;AACX,UAAM,IAAI;AAAA,MACR;AAAA,IAEF;AAGI,QAAA,MAA8B,CAAC,GAC/B,WAAW;AAAA;AAAA,IACK;AAAA;AAAA,IACF;AAAA;AAAA,IACD,QAAQ,IAAI;AAAA;AAAA,IACN,QAAQ,IAAI;AAAA,EAAA,GAG/B,SAAS,OAAO;AAAA,IACpB,SAAS,QAAQ,CAAC,SAAS;AACzB,YAAM,UAAU,WAAW,QAAQ,CAAC,IAAI,GAAG;AAAA,QACzC,SAAS;AAAA,MAAA,CACV;AACI,aAAA,UACE,OAAO,QAAQC,cAAM,MAAAR,YAAA,QAAG,aAAa,OAAO,CAAC,CAAC,IADhC,CAAC;AAAA,IAEvB,CAAA;AAAA,EACH;AAGI,SAAO,YAAY,QAAQ,IAAI,uBAAuB,WACxD,QAAQ,IAAI,qBAAqB,OAAO,WAGtC,OAAO,WAAW,QAAQ,IAAI,YAAY,WAC5C,QAAQ,IAAI,UAAU,OAAO,UAE3B,OAAO,gBAAgB,QAAQ,IAAI,iBAAiB,WACtD,QAAQ,IAAI,eAAe,OAAO;AAGhC,MAAA;AAEKS,gBAAA,OAAA,EAAC,QAAO;AAAA,WACR,GAAG;AAGN,UAAA,EAAE,QAAQ,SAAS,OAAO,IACtB,IAAI,MAAM,wEAAwE,IAEpF;AAAA,EAAA;AAIR,aAAW,CAAC,KAAK,KAAK,KAAK,OAAO,QAAQ,MAAM;AAC1C,aAAS,KAAK,CAAC,WAAW,IAAI,WAAW,MAAM,CAAC,MAClD,IAAI,GAAG,IAAI;AAMf,aAAW,OAAO,QAAQ;AACpB,aAAS,KAAK,CAAC,WAAW,IAAI,WAAW,MAAM,CAAC,MAClD,IAAI,GAAG,IAAI,QAAQ,IAAI,GAAG;AAIvB,SAAA;AACT;AAEA,SAAS,WACP,KACA,SACA,SAGoB;AACpB,aAAW,UAAU,SAAS;AAC5B,UAAM,WAAWV,cAAA,QAAK,KAAK,KAAK,MAAM;AAElC,QAAAC,YAAA,QAAG,WAAW,QAAQ,KAAKA,oBAAG,SAAS,QAAQ,EAAE,OAAO;AACnD,aAAA;AAAA,EAAA;AAGL,QAAA,YAAYD,cAAAA,QAAK,QAAQ,GAAG;AAC9B,MAAA,cAAc,QAAQ,CAAC,SAAS,WAAW,UAAU,WAAW,SAAS,OAAO;AAC3E,WAAA,WAAW,WAAW,SAAS,OAAO;AAIjD;;;;;;;;","x_google_ignoreList":[2,3]}
1
+ {"version":3,"file":"loadEnv.js","sources":["../../src/debug.ts","../../src/util/resolveRootDir.ts","../../../../../node_modules/.pnpm/dotenv@16.4.7/node_modules/dotenv/lib/main.js","../../../../../node_modules/.pnpm/dotenv-expand@9.0.0/node_modules/dotenv-expand/lib/main.js","../../src/util/loadEnv.ts"],"sourcesContent":["import debugIt from 'debug'\n\nexport const debug = debugIt('sanity:cli')\n","/* eslint-disable no-sync */\nimport fs from 'node:fs'\nimport path from 'node:path'\n\nimport {debug} from '../debug'\n\n/**\n * Resolve project root directory, falling back to cwd if it cannot be found\n */\nexport function resolveRootDir(cwd: string): string {\n try {\n return resolveProjectRoot(cwd) || cwd\n } catch (err) {\n throw new Error(`Error occurred trying to resolve project root:\\n${err.message}`)\n }\n}\n\nfunction hasSanityConfig(basePath: string, configName: string): boolean {\n const buildConfigs = [\n fileExists(path.join(basePath, `${configName}.js`)),\n fileExists(path.join(basePath, `${configName}.ts`)),\n isSanityV2StudioRoot(basePath),\n ]\n\n return buildConfigs.some(Boolean)\n}\n\nfunction resolveProjectRoot(basePath: string, iterations = 0): string | false {\n const configName = 'sanity.config'\n if (hasSanityConfig(basePath, configName)) {\n return basePath\n }\n\n const parentDir = path.resolve(basePath, '..')\n if (parentDir === basePath || iterations > 30) {\n // Reached root (or max depth), give up\n return false\n }\n\n return resolveProjectRoot(parentDir, iterations + 1)\n}\n\nfunction isSanityV2StudioRoot(basePath: string): boolean {\n try {\n const content = fs.readFileSync(path.join(basePath, 'sanity.json'), 'utf8')\n const sanityJson = JSON.parse(content)\n const isRoot = Boolean(sanityJson?.root)\n if (isRoot) {\n debug('Found Sanity v2 studio root at %s', basePath)\n }\n return isRoot\n } catch (err) {\n return false\n }\n}\n\nfunction fileExists(filePath: string): boolean {\n return fs.existsSync(filePath)\n}\n","const fs = require('fs')\nconst path = require('path')\nconst os = require('os')\nconst crypto = require('crypto')\nconst packageJson = require('../package.json')\n\nconst version = packageJson.version\n\nconst LINE = /(?:^|^)\\s*(?:export\\s+)?([\\w.-]+)(?:\\s*=\\s*?|:\\s+?)(\\s*'(?:\\\\'|[^'])*'|\\s*\"(?:\\\\\"|[^\"])*\"|\\s*`(?:\\\\`|[^`])*`|[^#\\r\\n]+)?\\s*(?:#.*)?(?:$|$)/mg\n\n// Parse src into an Object\nfunction parse (src) {\n const obj = {}\n\n // Convert buffer to string\n let lines = src.toString()\n\n // Convert line breaks to same format\n lines = lines.replace(/\\r\\n?/mg, '\\n')\n\n let match\n while ((match = LINE.exec(lines)) != null) {\n const key = match[1]\n\n // Default undefined or null to empty string\n let value = (match[2] || '')\n\n // Remove whitespace\n value = value.trim()\n\n // Check if double quoted\n const maybeQuote = value[0]\n\n // Remove surrounding quotes\n value = value.replace(/^(['\"`])([\\s\\S]*)\\1$/mg, '$2')\n\n // Expand newlines if double quoted\n if (maybeQuote === '\"') {\n value = value.replace(/\\\\n/g, '\\n')\n value = value.replace(/\\\\r/g, '\\r')\n }\n\n // Add to object\n obj[key] = value\n }\n\n return obj\n}\n\nfunction _parseVault (options) {\n const vaultPath = _vaultPath(options)\n\n // Parse .env.vault\n const result = DotenvModule.configDotenv({ path: vaultPath })\n if (!result.parsed) {\n const err = new Error(`MISSING_DATA: Cannot parse ${vaultPath} for an unknown reason`)\n err.code = 'MISSING_DATA'\n throw err\n }\n\n // handle scenario for comma separated keys - for use with key rotation\n // example: DOTENV_KEY=\"dotenv://:key_1234@dotenvx.com/vault/.env.vault?environment=prod,dotenv://:key_7890@dotenvx.com/vault/.env.vault?environment=prod\"\n const keys = _dotenvKey(options).split(',')\n const length = keys.length\n\n let decrypted\n for (let i = 0; i < length; i++) {\n try {\n // Get full key\n const key = keys[i].trim()\n\n // Get instructions for decrypt\n const attrs = _instructions(result, key)\n\n // Decrypt\n decrypted = DotenvModule.decrypt(attrs.ciphertext, attrs.key)\n\n break\n } catch (error) {\n // last key\n if (i + 1 >= length) {\n throw error\n }\n // try next key\n }\n }\n\n // Parse decrypted .env string\n return DotenvModule.parse(decrypted)\n}\n\nfunction _log (message) {\n console.log(`[dotenv@${version}][INFO] ${message}`)\n}\n\nfunction _warn (message) {\n console.log(`[dotenv@${version}][WARN] ${message}`)\n}\n\nfunction _debug (message) {\n console.log(`[dotenv@${version}][DEBUG] ${message}`)\n}\n\nfunction _dotenvKey (options) {\n // prioritize developer directly setting options.DOTENV_KEY\n if (options && options.DOTENV_KEY && options.DOTENV_KEY.length > 0) {\n return options.DOTENV_KEY\n }\n\n // secondary infra already contains a DOTENV_KEY environment variable\n if (process.env.DOTENV_KEY && process.env.DOTENV_KEY.length > 0) {\n return process.env.DOTENV_KEY\n }\n\n // fallback to empty string\n return ''\n}\n\nfunction _instructions (result, dotenvKey) {\n // Parse DOTENV_KEY. Format is a URI\n let uri\n try {\n uri = new URL(dotenvKey)\n } catch (error) {\n if (error.code === 'ERR_INVALID_URL') {\n const err = new Error('INVALID_DOTENV_KEY: Wrong format. Must be in valid uri format like dotenv://:key_1234@dotenvx.com/vault/.env.vault?environment=development')\n err.code = 'INVALID_DOTENV_KEY'\n throw err\n }\n\n throw error\n }\n\n // Get decrypt key\n const key = uri.password\n if (!key) {\n const err = new Error('INVALID_DOTENV_KEY: Missing key part')\n err.code = 'INVALID_DOTENV_KEY'\n throw err\n }\n\n // Get environment\n const environment = uri.searchParams.get('environment')\n if (!environment) {\n const err = new Error('INVALID_DOTENV_KEY: Missing environment part')\n err.code = 'INVALID_DOTENV_KEY'\n throw err\n }\n\n // Get ciphertext payload\n const environmentKey = `DOTENV_VAULT_${environment.toUpperCase()}`\n const ciphertext = result.parsed[environmentKey] // DOTENV_VAULT_PRODUCTION\n if (!ciphertext) {\n const err = new Error(`NOT_FOUND_DOTENV_ENVIRONMENT: Cannot locate environment ${environmentKey} in your .env.vault file.`)\n err.code = 'NOT_FOUND_DOTENV_ENVIRONMENT'\n throw err\n }\n\n return { ciphertext, key }\n}\n\nfunction _vaultPath (options) {\n let possibleVaultPath = null\n\n if (options && options.path && options.path.length > 0) {\n if (Array.isArray(options.path)) {\n for (const filepath of options.path) {\n if (fs.existsSync(filepath)) {\n possibleVaultPath = filepath.endsWith('.vault') ? filepath : `${filepath}.vault`\n }\n }\n } else {\n possibleVaultPath = options.path.endsWith('.vault') ? options.path : `${options.path}.vault`\n }\n } else {\n possibleVaultPath = path.resolve(process.cwd(), '.env.vault')\n }\n\n if (fs.existsSync(possibleVaultPath)) {\n return possibleVaultPath\n }\n\n return null\n}\n\nfunction _resolveHome (envPath) {\n return envPath[0] === '~' ? path.join(os.homedir(), envPath.slice(1)) : envPath\n}\n\nfunction _configVault (options) {\n _log('Loading env from encrypted .env.vault')\n\n const parsed = DotenvModule._parseVault(options)\n\n let processEnv = process.env\n if (options && options.processEnv != null) {\n processEnv = options.processEnv\n }\n\n DotenvModule.populate(processEnv, parsed, options)\n\n return { parsed }\n}\n\nfunction configDotenv (options) {\n const dotenvPath = path.resolve(process.cwd(), '.env')\n let encoding = 'utf8'\n const debug = Boolean(options && options.debug)\n\n if (options && options.encoding) {\n encoding = options.encoding\n } else {\n if (debug) {\n _debug('No encoding is specified. UTF-8 is used by default')\n }\n }\n\n let optionPaths = [dotenvPath] // default, look for .env\n if (options && options.path) {\n if (!Array.isArray(options.path)) {\n optionPaths = [_resolveHome(options.path)]\n } else {\n optionPaths = [] // reset default\n for (const filepath of options.path) {\n optionPaths.push(_resolveHome(filepath))\n }\n }\n }\n\n // Build the parsed data in a temporary object (because we need to return it). Once we have the final\n // parsed data, we will combine it with process.env (or options.processEnv if provided).\n let lastError\n const parsedAll = {}\n for (const path of optionPaths) {\n try {\n // Specifying an encoding returns a string instead of a buffer\n const parsed = DotenvModule.parse(fs.readFileSync(path, { encoding }))\n\n DotenvModule.populate(parsedAll, parsed, options)\n } catch (e) {\n if (debug) {\n _debug(`Failed to load ${path} ${e.message}`)\n }\n lastError = e\n }\n }\n\n let processEnv = process.env\n if (options && options.processEnv != null) {\n processEnv = options.processEnv\n }\n\n DotenvModule.populate(processEnv, parsedAll, options)\n\n if (lastError) {\n return { parsed: parsedAll, error: lastError }\n } else {\n return { parsed: parsedAll }\n }\n}\n\n// Populates process.env from .env file\nfunction config (options) {\n // fallback to original dotenv if DOTENV_KEY is not set\n if (_dotenvKey(options).length === 0) {\n return DotenvModule.configDotenv(options)\n }\n\n const vaultPath = _vaultPath(options)\n\n // dotenvKey exists but .env.vault file does not exist\n if (!vaultPath) {\n _warn(`You set DOTENV_KEY but you are missing a .env.vault file at ${vaultPath}. Did you forget to build it?`)\n\n return DotenvModule.configDotenv(options)\n }\n\n return DotenvModule._configVault(options)\n}\n\nfunction decrypt (encrypted, keyStr) {\n const key = Buffer.from(keyStr.slice(-64), 'hex')\n let ciphertext = Buffer.from(encrypted, 'base64')\n\n const nonce = ciphertext.subarray(0, 12)\n const authTag = ciphertext.subarray(-16)\n ciphertext = ciphertext.subarray(12, -16)\n\n try {\n const aesgcm = crypto.createDecipheriv('aes-256-gcm', key, nonce)\n aesgcm.setAuthTag(authTag)\n return `${aesgcm.update(ciphertext)}${aesgcm.final()}`\n } catch (error) {\n const isRange = error instanceof RangeError\n const invalidKeyLength = error.message === 'Invalid key length'\n const decryptionFailed = error.message === 'Unsupported state or unable to authenticate data'\n\n if (isRange || invalidKeyLength) {\n const err = new Error('INVALID_DOTENV_KEY: It must be 64 characters long (or more)')\n err.code = 'INVALID_DOTENV_KEY'\n throw err\n } else if (decryptionFailed) {\n const err = new Error('DECRYPTION_FAILED: Please check your DOTENV_KEY')\n err.code = 'DECRYPTION_FAILED'\n throw err\n } else {\n throw error\n }\n }\n}\n\n// Populate process.env with parsed values\nfunction populate (processEnv, parsed, options = {}) {\n const debug = Boolean(options && options.debug)\n const override = Boolean(options && options.override)\n\n if (typeof parsed !== 'object') {\n const err = new Error('OBJECT_REQUIRED: Please check the processEnv argument being passed to populate')\n err.code = 'OBJECT_REQUIRED'\n throw err\n }\n\n // Set process.env\n for (const key of Object.keys(parsed)) {\n if (Object.prototype.hasOwnProperty.call(processEnv, key)) {\n if (override === true) {\n processEnv[key] = parsed[key]\n }\n\n if (debug) {\n if (override === true) {\n _debug(`\"${key}\" is already defined and WAS overwritten`)\n } else {\n _debug(`\"${key}\" is already defined and was NOT overwritten`)\n }\n }\n } else {\n processEnv[key] = parsed[key]\n }\n }\n}\n\nconst DotenvModule = {\n configDotenv,\n _configVault,\n _parseVault,\n config,\n decrypt,\n parse,\n populate\n}\n\nmodule.exports.configDotenv = DotenvModule.configDotenv\nmodule.exports._configVault = DotenvModule._configVault\nmodule.exports._parseVault = DotenvModule._parseVault\nmodule.exports.config = DotenvModule.config\nmodule.exports.decrypt = DotenvModule.decrypt\nmodule.exports.parse = DotenvModule.parse\nmodule.exports.populate = DotenvModule.populate\n\nmodule.exports = DotenvModule\n","'use strict'\n\nfunction _interpolate (envValue, environment, config) {\n const matches = envValue.match(/(.?\\${*[\\w]*(?::-[\\w/]*)?}*)/g) || []\n\n return matches.reduce(function (newEnv, match, index) {\n const parts = /(.?)\\${*([\\w]*(?::-[\\w/]*)?)?}*/g.exec(match)\n if (!parts || parts.length === 0) {\n return newEnv\n }\n\n const prefix = parts[1]\n\n let value, replacePart\n\n if (prefix === '\\\\') {\n replacePart = parts[0]\n value = replacePart.replace('\\\\$', '$')\n } else {\n const keyParts = parts[2].split(':-')\n const key = keyParts[0]\n replacePart = parts[0].substring(prefix.length)\n // process.env value 'wins' over .env file's value\n value = Object.prototype.hasOwnProperty.call(environment, key)\n ? environment[key]\n : (config.parsed[key] || keyParts[1] || '')\n\n // If the value is found, remove nested expansions.\n if (keyParts.length > 1 && value) {\n const replaceNested = matches[index + 1]\n matches[index + 1] = ''\n\n newEnv = newEnv.replace(replaceNested, '')\n }\n // Resolve recursive interpolations\n value = _interpolate(value, environment, config)\n }\n\n return newEnv.replace(replacePart, value)\n }, envValue)\n}\n\nfunction expand (config) {\n // if ignoring process.env, use a blank object\n const environment = config.ignoreProcessEnv ? {} : process.env\n\n for (const configKey in config.parsed) {\n const value = Object.prototype.hasOwnProperty.call(environment, configKey) ? environment[configKey] : config.parsed[configKey]\n\n config.parsed[configKey] = _interpolate(value, environment, config)\n }\n\n for (const processKey in config.parsed) {\n environment[processKey] = config.parsed[processKey]\n }\n\n return config\n}\n\nmodule.exports.expand = expand\n","/**\n * This is an \"inlined\" version of Vite's `loadEnv` function,\n * simplified somewhat to only support our use case.\n *\n * Ideally we'd just use `loadEnv` from Vite, but importing it\n * causes bundling issues due to node APIs and downstream dependencies.\n *\n * Vite is MIT licensed, copyright (c) Yuxi (Evan) You and Vite contributors.\n */\n\n/* eslint-disable no-process-env */\nimport fs from 'node:fs'\nimport path from 'node:path'\n\nimport {parse} from 'dotenv'\nimport {expand} from 'dotenv-expand'\n\nexport function loadEnv(\n mode: string,\n envDir: string,\n prefixes: string[] = ['VITE_'],\n): Record<string, string> {\n if (mode === 'local') {\n throw new Error(\n `\"local\" cannot be used as a mode name because it conflicts with ` +\n `the .local postfix for .env files.`,\n )\n }\n\n const env: Record<string, string> = {}\n const envFiles = [\n /** default file */ `.env`,\n /** local file */ `.env.local`,\n /** mode file */ `.env.${mode}`,\n /** mode local file */ `.env.${mode}.local`,\n ]\n\n const parsed = Object.fromEntries(\n envFiles.flatMap((file) => {\n const envPath = lookupFile(envDir, [file], {\n rootDir: envDir,\n })\n if (!envPath) return []\n return Object.entries(parse(fs.readFileSync(envPath)))\n }),\n )\n\n // test NODE_ENV override before expand as otherwise process.env.NODE_ENV would override this\n if (parsed.NODE_ENV && process.env.VITE_USER_NODE_ENV === undefined) {\n process.env.VITE_USER_NODE_ENV = parsed.NODE_ENV\n }\n // support BROWSER and BROWSER_ARGS env variables\n if (parsed.BROWSER && process.env.BROWSER === undefined) {\n process.env.BROWSER = parsed.BROWSER\n }\n if (parsed.BROWSER_ARGS && process.env.BROWSER_ARGS === undefined) {\n process.env.BROWSER_ARGS = parsed.BROWSER_ARGS\n }\n\n try {\n // let environment variables use each other\n expand({parsed})\n } catch (e) {\n // custom error handling until https://github.com/motdotla/dotenv-expand/issues/65 is fixed upstream\n // check for message \"TypeError: Cannot read properties of undefined (reading 'split')\"\n if (e.message.includes('split')) {\n throw new Error('dotenv-expand failed to expand env vars. Maybe you need to escape `$`?')\n }\n throw e\n }\n\n // only keys that start with prefix are exposed to client\n for (const [key, value] of Object.entries(parsed)) {\n if (prefixes.some((prefix) => key.startsWith(prefix))) {\n env[key] = value\n }\n }\n\n // check if there are actual env variables starting with VITE_*\n // these are typically provided inline and should be prioritized\n for (const key in process.env) {\n if (prefixes.some((prefix) => key.startsWith(prefix))) {\n env[key] = process.env[key] as string\n }\n }\n\n return env\n}\n\nfunction lookupFile(\n dir: string,\n formats: string[],\n options?: {\n rootDir?: string\n },\n): string | undefined {\n for (const format of formats) {\n const fullPath = path.join(dir, format)\n // eslint-disable-next-line no-sync\n if (fs.existsSync(fullPath) && fs.statSync(fullPath).isFile()) {\n return fullPath\n }\n }\n const parentDir = path.dirname(dir)\n if (parentDir !== dir && (!options?.rootDir || parentDir.startsWith(options?.rootDir))) {\n return lookupFile(parentDir, formats, options)\n }\n\n return undefined\n}\n"],"names":["debugIt","path","fs","require$$0","require$$1","require$$2","require$$3","version","debug","mainModule","parse","expand"],"mappings":";;;;;;AAEa,MAAA,QAAQA,yBAAQ,YAAY;ACOlC,SAAS,eAAe,KAAqB;AAC9C,MAAA;AACK,WAAA,mBAAmB,GAAG,KAAK;AAAA,WAC3B,KAAK;AACZ,UAAM,IAAI,MAAM;AAAA,EAAmD,IAAI,OAAO,EAAE;AAAA,EAAA;AAEpF;AAEA,SAAS,gBAAgB,UAAkB,YAA6B;AACjD,SAAA;AAAA,IACnB,WAAWC,cAAK,QAAA,KAAK,UAAU,GAAG,UAAU,KAAK,CAAC;AAAA,IAClD,WAAWA,cAAK,QAAA,KAAK,UAAU,GAAG,UAAU,KAAK,CAAC;AAAA,IAClD,qBAAqB,QAAQ;AAAA,EAAA,EAGX,KAAK,OAAO;AAClC;AAEA,SAAS,mBAAmB,UAAkB,aAAa,GAAmB;AAExE,MAAA,gBAAgB,UADD,eACqB;AAC/B,WAAA;AAGT,QAAM,YAAYA,cAAA,QAAK,QAAQ,UAAU,IAAI;AACzC,SAAA,cAAc,YAAY,aAAa,KAElC,KAGF,mBAAmB,WAAW,aAAa,CAAC;AACrD;AAEA,SAAS,qBAAqB,UAA2B;AACnD,MAAA;AACF,UAAM,UAAUC,YAAAA,QAAG,aAAaD,cAAAA,QAAK,KAAK,UAAU,aAAa,GAAG,MAAM,GAEpE,SAAS,CAAA,CADI,KAAK,MAAM,OAAO,GACF;AACnC,WAAI,UACF,MAAM,qCAAqC,QAAQ,GAE9C;AAAA,EAAA,QACK;AACL,WAAA;AAAA,EAAA;AAEX;AAEA,SAAS,WAAW,UAA2B;AACtC,SAAAC,YAAA,QAAG,WAAW,QAAQ;AAC/B;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AC1DA,QAAMA,MAAKC,sBAAAA,SACLF,QAAOG,oBAAAA,SACP,KAAKC,sBAAAA,SACL,SAASC,oBAAAA,SAGTC,WAFc,WAEQ,SAEtB,OAAO;AAGb,WAAS,MAAO,KAAK;AACnB,UAAM,MAAM,CAAA;AAGZ,QAAI,QAAQ,IAAI,SAAQ;AAGxB,YAAQ,MAAM,QAAQ,WAAW;AAAA,CAAI;AAErC,QAAI;AACJ,YAAQ,QAAQ,KAAK,KAAK,KAAK,MAAM,QAAM;AACzC,YAAM,MAAM,MAAM,CAAC;AAGnB,UAAI,QAAS,MAAM,CAAC,KAAK;AAGzB,cAAQ,MAAM,KAAI;AAGlB,YAAM,aAAa,MAAM,CAAC;AAG1B,cAAQ,MAAM,QAAQ,0BAA0B,IAAI,GAGhD,eAAe,QACjB,QAAQ,MAAM,QAAQ,QAAQ;AAAA,CAAI,GAClC,QAAQ,MAAM,QAAQ,QAAQ,IAAI,IAIpC,IAAI,GAAG,IAAI;AAAA,IACf;AAEE,WAAO;AAAA,EACT;AAEA,WAAS,YAAa,SAAS;AAC7B,UAAM,YAAY,WAAW,OAAO,GAG9B,SAAS,aAAa,aAAa,EAAE,MAAM,UAAW,CAAA;AAC5D,QAAI,CAAC,OAAO,QAAQ;AAClB,YAAM,MAAM,IAAI,MAAM,8BAA8B,SAAS,wBAAwB;AACrF,gBAAI,OAAO,gBACL;AAAA,IACV;AAIE,UAAM,OAAO,WAAW,OAAO,EAAE,MAAM,GAAG,GACpC,SAAS,KAAK;AAEpB,QAAI;AACJ,aAAS,IAAI,GAAG,IAAI,QAAQ;AAC1B,UAAI;AAEF,cAAM,MAAM,KAAK,CAAC,EAAE,KAAI,GAGlB,QAAQ,cAAc,QAAQ,GAAG;AAGvC,oBAAY,aAAa,QAAQ,MAAM,YAAY,MAAM,GAAG;AAE5D;AAAA,MACD,SAAQ,OAAO;AAEd,YAAI,IAAI,KAAK;AACX,gBAAM;AAAA,MAGd;AAIE,WAAO,aAAa,MAAM,SAAS;AAAA,EACrC;AAEA,WAAS,KAAM,SAAS;AACtB,YAAQ,IAAI,WAAWA,QAAO,WAAW,OAAO,EAAE;AAAA,EACpD;AAEA,WAAS,MAAO,SAAS;AACvB,YAAQ,IAAI,WAAWA,QAAO,WAAW,OAAO,EAAE;AAAA,EACpD;AAEA,WAAS,OAAQ,SAAS;AACxB,YAAQ,IAAI,WAAWA,QAAO,YAAY,OAAO,EAAE;AAAA,EACrD;AAEA,WAAS,WAAY,SAAS;AAE5B,WAAI,WAAW,QAAQ,cAAc,QAAQ,WAAW,SAAS,IACxD,QAAQ,aAIb,QAAQ,IAAI,cAAc,QAAQ,IAAI,WAAW,SAAS,IACrD,QAAQ,IAAI,aAId;AAAA,EACT;AAEA,WAAS,cAAe,QAAQ,WAAW;AAEzC,QAAI;AACJ,QAAI;AACF,YAAM,IAAI,IAAI,SAAS;AAAA,IACxB,SAAQ,OAAO;AACd,UAAI,MAAM,SAAS,mBAAmB;AACpC,cAAM,MAAM,IAAI,MAAM,4IAA4I;AAClK,kBAAI,OAAO,sBACL;AAAA,MACZ;AAEI,YAAM;AAAA,IACV;AAGE,UAAM,MAAM,IAAI;AAChB,QAAI,CAAC,KAAK;AACR,YAAM,MAAM,IAAI,MAAM,sCAAsC;AAC5D,gBAAI,OAAO,sBACL;AAAA,IACV;AAGE,UAAM,cAAc,IAAI,aAAa,IAAI,aAAa;AACtD,QAAI,CAAC,aAAa;AAChB,YAAM,MAAM,IAAI,MAAM,8CAA8C;AACpE,gBAAI,OAAO,sBACL;AAAA,IACV;AAGE,UAAM,iBAAiB,gBAAgB,YAAY,YAAW,CAAE,IAC1D,aAAa,OAAO,OAAO,cAAc;AAC/C,QAAI,CAAC,YAAY;AACf,YAAM,MAAM,IAAI,MAAM,2DAA2D,cAAc,2BAA2B;AAC1H,gBAAI,OAAO,gCACL;AAAA,IACV;AAEE,WAAO,EAAE,YAAY,IAAG;AAAA,EAC1B;AAEA,WAAS,WAAY,SAAS;AAC5B,QAAI,oBAAoB;AAExB,QAAI,WAAW,QAAQ,QAAQ,QAAQ,KAAK,SAAS;AACnD,UAAI,MAAM,QAAQ,QAAQ,IAAI;AAC5B,mBAAW,YAAY,QAAQ;AAC7B,UAAIL,IAAG,WAAW,QAAQ,MACxB,oBAAoB,SAAS,SAAS,QAAQ,IAAI,WAAW,GAAG,QAAQ;AAAA;AAI5E,4BAAoB,QAAQ,KAAK,SAAS,QAAQ,IAAI,QAAQ,OAAO,GAAG,QAAQ,IAAI;AAAA;AAGtF,0BAAoBD,MAAK,QAAQ,QAAQ,IAAK,GAAE,YAAY;AAG9D,WAAIC,IAAG,WAAW,iBAAiB,IAC1B,oBAGF;AAAA,EACT;AAEA,WAAS,aAAc,SAAS;AAC9B,WAAO,QAAQ,CAAC,MAAM,MAAMD,MAAK,KAAK,GAAG,QAAS,GAAE,QAAQ,MAAM,CAAC,CAAC,IAAI;AAAA,EAC1E;AAEA,WAAS,aAAc,SAAS;AAC9B,SAAK,uCAAuC;AAE5C,UAAM,SAAS,aAAa,YAAY,OAAO;AAE/C,QAAI,aAAa,QAAQ;AACzB,WAAI,WAAW,QAAQ,cAAc,SACnC,aAAa,QAAQ,aAGvB,aAAa,SAAS,YAAY,QAAQ,OAAO,GAE1C,EAAE,OAAM;AAAA,EACjB;AAEA,WAAS,aAAc,SAAS;AAC9B,UAAM,aAAaA,MAAK,QAAQ,QAAQ,IAAK,GAAE,MAAM;AACrD,QAAI,WAAW;AACf,UAAMO,SAAQ,GAAQ,WAAW,QAAQ;AAEzC,IAAI,WAAW,QAAQ,WACrB,WAAW,QAAQ,WAEfA,UACF,OAAO,oDAAoD;AAI/D,QAAI,cAAc,CAAC,UAAU;AAC7B,QAAI,WAAW,QAAQ;AACrB,UAAI,CAAC,MAAM,QAAQ,QAAQ,IAAI;AAC7B,sBAAc,CAAC,aAAa,QAAQ,IAAI,CAAC;AAAA,WACpC;AACL,sBAAc,CAAE;AAChB,mBAAW,YAAY,QAAQ;AAC7B,sBAAY,KAAK,aAAa,QAAQ,CAAC;AAAA,MAE/C;AAKE,QAAI;AACJ,UAAM,YAAY,CAAA;AAClB,eAAWP,SAAQ;AACjB,UAAI;AAEF,cAAM,SAAS,aAAa,MAAMC,IAAG,aAAaD,OAAM,EAAE,UAAU,CAAC;AAErE,qBAAa,SAAS,WAAW,QAAQ,OAAO;AAAA,MACjD,SAAQ,GAAG;AACV,QAAIO,UACF,OAAO,kBAAkBP,KAAI,IAAI,EAAE,OAAO,EAAE,GAE9C,YAAY;AAAA,MAClB;AAGE,QAAI,aAAa,QAAQ;AAOzB,WANI,WAAW,QAAQ,cAAc,SACnC,aAAa,QAAQ,aAGvB,aAAa,SAAS,YAAY,WAAW,OAAO,GAEhD,YACK,EAAE,QAAQ,WAAW,OAAO,UAAS,IAErC,EAAE,QAAQ,UAAS;AAAA,EAE9B;AAGA,WAAS,OAAQ,SAAS;AAExB,QAAI,WAAW,OAAO,EAAE,WAAW;AACjC,aAAO,aAAa,aAAa,OAAO;AAG1C,UAAM,YAAY,WAAW,OAAO;AAGpC,WAAK,YAME,aAAa,aAAa,OAAO,KALtC,MAAM,+DAA+D,SAAS,+BAA+B,GAEtG,aAAa,aAAa,OAAO;AAAA,EAI5C;AAEA,WAAS,QAAS,WAAW,QAAQ;AACnC,UAAM,MAAM,OAAO,KAAK,OAAO,MAAM,GAAG,GAAG,KAAK;AAChD,QAAI,aAAa,OAAO,KAAK,WAAW,QAAQ;AAEhD,UAAM,QAAQ,WAAW,SAAS,GAAG,EAAE,GACjC,UAAU,WAAW,SAAS,GAAG;AACvC,iBAAa,WAAW,SAAS,IAAI,GAAG;AAExC,QAAI;AACF,YAAM,SAAS,OAAO,iBAAiB,eAAe,KAAK,KAAK;AAChE,oBAAO,WAAW,OAAO,GAClB,GAAG,OAAO,OAAO,UAAU,CAAC,GAAG,OAAO,OAAO;AAAA,IACrD,SAAQ,OAAO;AACd,YAAM,UAAU,iBAAiB,YAC3B,mBAAmB,MAAM,YAAY,sBACrC,mBAAmB,MAAM,YAAY;AAE3C,UAAI,WAAW,kBAAkB;AAC/B,cAAM,MAAM,IAAI,MAAM,6DAA6D;AACnF,kBAAI,OAAO,sBACL;AAAA,MACP,WAAU,kBAAkB;AAC3B,cAAM,MAAM,IAAI,MAAM,iDAAiD;AACvE,kBAAI,OAAO,qBACL;AAAA,MACZ;AACM,cAAM;AAAA,IAEZ;AAAA,EACA;AAGA,WAAS,SAAU,YAAY,QAAQ,UAAU,CAAA,GAAI;AACnD,UAAMO,SAAQ,GAAQ,WAAW,QAAQ,QACnC,WAAW,GAAQ,WAAW,QAAQ;AAE5C,QAAI,OAAO,UAAW,UAAU;AAC9B,YAAM,MAAM,IAAI,MAAM,gFAAgF;AACtG,gBAAI,OAAO,mBACL;AAAA,IACV;AAGE,eAAW,OAAO,OAAO,KAAK,MAAM;AAClC,MAAI,OAAO,UAAU,eAAe,KAAK,YAAY,GAAG,KAClD,aAAa,OACf,WAAW,GAAG,IAAI,OAAO,GAAG,IAG1BA,UAEA,OADE,aAAa,KACR,IAAI,GAAG,6CAEP,IAAI,GAAG,8CAF0C,KAM5D,WAAW,GAAG,IAAI,OAAO,GAAG;AAAA,EAGlC;AAEA,QAAM,eAAe;AAAA,IACnB;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AAE2BC,gBAAA,QAAA,eAAG,aAAa,cAChBA,OAAA,QAAA,eAAG,aAAa,cACjBA,OAAA,QAAA,cAAG,aAAa,aACrBA,OAAA,QAAA,SAAG,aAAa,QACfA,OAAA,QAAA,UAAG,aAAa,SAClBA,OAAA,QAAA,QAAG,aAAa,OACbA,OAAA,QAAA,WAAG,aAAa,UAEvCA,OAAA,UAAiB;;;;;;ACtWjB,WAAS,aAAc,UAAU,aAAa,QAAQ;AACpD,UAAM,UAAU,SAAS,MAAM,+BAA+B,KAAK,CAAA;AAEnE,WAAO,QAAQ,OAAO,SAAU,QAAQ,OAAO,OAAO;AACpD,YAAM,QAAQ,mCAAmC,KAAK,KAAK;AAC3D,UAAI,CAAC,SAAS,MAAM,WAAW;AAC7B,eAAO;AAGT,YAAM,SAAS,MAAM,CAAC;AAEtB,UAAI,OAAO;AAEX,UAAI,WAAW;AACb,sBAAc,MAAM,CAAC,GACrB,QAAQ,YAAY,QAAQ,OAAO,GAAG;AAAA,WACjC;AACL,cAAM,WAAW,MAAM,CAAC,EAAE,MAAM,IAAI,GAC9B,MAAM,SAAS,CAAC;AAQtB,YAPA,cAAc,MAAM,CAAC,EAAE,UAAU,OAAO,MAAM,GAE9C,QAAQ,OAAO,UAAU,eAAe,KAAK,aAAa,GAAG,IACzD,YAAY,GAAG,IACd,OAAO,OAAO,GAAG,KAAK,SAAS,CAAC,KAAK,IAGtC,SAAS,SAAS,KAAK,OAAO;AAChC,gBAAM,gBAAgB,QAAQ,QAAQ,CAAC;AACvC,kBAAQ,QAAQ,CAAC,IAAI,IAErB,SAAS,OAAO,QAAQ,eAAe,EAAE;AAAA,QACjD;AAEM,gBAAQ,aAAa,OAAO,aAAa,MAAM;AAAA,MACrD;AAEI,aAAO,OAAO,QAAQ,aAAa,KAAK;AAAA,IAC5C,GAAK,QAAQ;AAAA,EACb;AAEA,WAAS,OAAQ,QAAQ;AAEvB,UAAM,cAAc,OAAO,mBAAmB,CAAA,IAAK,QAAQ;AAE3D,eAAW,aAAa,OAAO,QAAQ;AACrC,YAAM,QAAQ,OAAO,UAAU,eAAe,KAAK,aAAa,SAAS,IAAI,YAAY,SAAS,IAAI,OAAO,OAAO,SAAS;AAE7H,aAAO,OAAO,SAAS,IAAI,aAAa,OAAO,aAAa,MAAM;AAAA,IACtE;AAEE,eAAW,cAAc,OAAO;AAC9B,kBAAY,UAAU,IAAI,OAAO,OAAO,UAAU;AAGpD,WAAO;AAAA,EACT;AAEA,cAAA,SAAwB;;;AC1CjB,SAAS,QACd,MACA,QACA,WAAqB,CAAC,OAAO,GACL;AACxB,MAAI,SAAS;AACX,UAAM,IAAI;AAAA,MACR;AAAA,IAEF;AAGI,QAAA,MAA8B,CAAC,GAC/B,WAAW;AAAA;AAAA,IACK;AAAA;AAAA,IACF;AAAA;AAAA,IACD,QAAQ,IAAI;AAAA;AAAA,IACN,QAAQ,IAAI;AAAA,EAAA,GAG/B,SAAS,OAAO;AAAA,IACpB,SAAS,QAAQ,CAAC,SAAS;AACzB,YAAM,UAAU,WAAW,QAAQ,CAAC,IAAI,GAAG;AAAA,QACzC,SAAS;AAAA,MAAA,CACV;AACI,aAAA,UACE,OAAO,QAAQC,cAAM,MAAAR,YAAA,QAAG,aAAa,OAAO,CAAC,CAAC,IADhC,CAAC;AAAA,IAEvB,CAAA;AAAA,EACH;AAGI,SAAO,YAAY,QAAQ,IAAI,uBAAuB,WACxD,QAAQ,IAAI,qBAAqB,OAAO,WAGtC,OAAO,WAAW,QAAQ,IAAI,YAAY,WAC5C,QAAQ,IAAI,UAAU,OAAO,UAE3B,OAAO,gBAAgB,QAAQ,IAAI,iBAAiB,WACtD,QAAQ,IAAI,eAAe,OAAO;AAGhC,MAAA;AAEKS,gBAAA,OAAA,EAAC,QAAO;AAAA,WACR,GAAG;AAGN,UAAA,EAAE,QAAQ,SAAS,OAAO,IACtB,IAAI,MAAM,wEAAwE,IAEpF;AAAA,EAAA;AAIR,aAAW,CAAC,KAAK,KAAK,KAAK,OAAO,QAAQ,MAAM;AAC1C,aAAS,KAAK,CAAC,WAAW,IAAI,WAAW,MAAM,CAAC,MAClD,IAAI,GAAG,IAAI;AAMf,aAAW,OAAO,QAAQ;AACpB,aAAS,KAAK,CAAC,WAAW,IAAI,WAAW,MAAM,CAAC,MAClD,IAAI,GAAG,IAAI,QAAQ,IAAI,GAAG;AAIvB,SAAA;AACT;AAEA,SAAS,WACP,KACA,SACA,SAGoB;AACpB,aAAW,UAAU,SAAS;AAC5B,UAAM,WAAWV,cAAA,QAAK,KAAK,KAAK,MAAM;AAElC,QAAAC,YAAA,QAAG,WAAW,QAAQ,KAAKA,oBAAG,SAAS,QAAQ,EAAE,OAAO;AACnD,aAAA;AAAA,EAAA;AAGL,QAAA,YAAYD,cAAAA,QAAK,QAAQ,GAAG;AAC9B,MAAA,cAAc,QAAQ,CAAC,SAAS,WAAW,UAAU,WAAW,SAAS,OAAO;AAC3E,WAAA,WAAW,WAAW,SAAS,OAAO;AAIjD;;;;;;;;","x_google_ignoreList":[2,3]}
@@ -0,0 +1,84 @@
1
+ "use strict";
2
+ class MessageQueue {
3
+ resolver = null;
4
+ queue = [];
5
+ ended = !1;
6
+ // Flag to indicate if end() was called
7
+ push(message) {
8
+ this.ended || (this.resolver ? (this.resolver({ value: message, done: !1 }), this.resolver = null) : this.queue.push(message));
9
+ }
10
+ next() {
11
+ return this.queue.length ? Promise.resolve({ value: this.queue.shift(), done: !1 }) : this.ended ? Promise.resolve({ value: void 0, done: !0 }) : new Promise((resolve) => this.resolver = resolve);
12
+ }
13
+ end() {
14
+ this.resolver ? (this.resolver({ value: void 0, done: !0 }), this.resolver = null) : this.ended = !0;
15
+ }
16
+ }
17
+ function isWorkerChannelMessage(message) {
18
+ return typeof message != "object" || !message || !("type" in message) || typeof message.type != "string" ? !1 : ["event", "emission", "end"].includes(message.type);
19
+ }
20
+ function createReceiver(worker) {
21
+ const _events = /* @__PURE__ */ new Map(), _streams = /* @__PURE__ */ new Map(), errors = new MessageQueue(), eventQueue = (name) => {
22
+ const queue = _events.get(name) ?? new MessageQueue();
23
+ return _events.has(name) || _events.set(name, queue), queue;
24
+ }, streamQueue = (name) => {
25
+ const queue = _streams.get(name) ?? new MessageQueue();
26
+ return _streams.has(name) || _streams.set(name, queue), queue;
27
+ }, handleMessage = (message) => {
28
+ isWorkerChannelMessage(message) && (message.type === "event" && eventQueue(message.name).push(message), message.type === "emission" && streamQueue(message.name).push(message), message.type === "end" && streamQueue(message.name).end());
29
+ }, handleError = (error) => {
30
+ errors.push({ type: "error", error });
31
+ };
32
+ return worker.addListener("message", handleMessage), worker.addListener("error", handleError), {
33
+ event: new Proxy({}, {
34
+ get: (target, name) => typeof name != "string" ? target[name] : async () => {
35
+ const { value } = await Promise.race([eventQueue(name).next(), errors.next()]);
36
+ if (value.type === "error") throw value.error;
37
+ return value.payload;
38
+ }
39
+ }),
40
+ stream: new Proxy({}, {
41
+ get: (target, prop) => {
42
+ if (typeof prop != "string") return target[prop];
43
+ const name = prop;
44
+ async function* streamReceiver() {
45
+ for (; ; ) {
46
+ const { value, done } = await Promise.race([streamQueue(name).next(), errors.next()]);
47
+ if (done) return;
48
+ if (value.type === "error") throw value.error;
49
+ yield value.payload;
50
+ }
51
+ }
52
+ return streamReceiver;
53
+ }
54
+ }),
55
+ dispose: () => (worker.removeListener("message", handleMessage), worker.removeListener("error", handleError), worker.terminate())
56
+ };
57
+ }
58
+ function createReporter(parentPort) {
59
+ if (!parentPort)
60
+ throw new Error("parentPart was falsy");
61
+ return {
62
+ event: new Proxy({}, {
63
+ get: (target, name) => typeof name != "string" ? target[name] : (payload) => {
64
+ const message = { type: "event", name, payload };
65
+ parentPort.postMessage(message);
66
+ }
67
+ }),
68
+ stream: new Proxy({}, {
69
+ get: (target, name) => typeof name != "string" ? target[name] : {
70
+ emit: (payload) => {
71
+ const message = { type: "emission", name, payload };
72
+ parentPort.postMessage(message);
73
+ },
74
+ end: () => {
75
+ const message = { type: "end", name };
76
+ parentPort.postMessage(message);
77
+ }
78
+ }
79
+ })
80
+ };
81
+ }
82
+ exports.createReceiver = createReceiver;
83
+ exports.createReporter = createReporter;
84
+ //# sourceMappingURL=workerChannel.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"workerChannel.js","sources":["../../src/util/workerChannel.ts"],"sourcesContent":["// NOTE: this file was originally copied from\n// https://github.com/sanity-io/sanity/blob/4c4e03d407106dbda12f52cfd9511fbfe75a9696/packages/sanity/src/_internal/cli/util/workerChannels.ts\nimport {type MessagePort, type Worker} from 'node:worker_threads'\n\ntype StreamReporter<TPayload = unknown> = {emit: (payload: TPayload) => void; end: () => void}\ntype EventReporter<TPayload = unknown> = (payload: TPayload) => void\ntype EventReceiver<TPayload = unknown> = () => Promise<TPayload>\ntype StreamReceiver<TPayload = unknown> = () => AsyncIterable<TPayload>\n\ntype EventKeys<TWorkerChannel extends WorkerChannel> = {\n [K in keyof TWorkerChannel]: TWorkerChannel[K] extends WorkerChannelEvent<any> ? K : never\n}[keyof TWorkerChannel]\ntype StreamKeys<TWorkerChannel extends WorkerChannel> = {\n [K in keyof TWorkerChannel]: TWorkerChannel[K] extends WorkerChannelStream<any> ? K : never\n}[keyof TWorkerChannel]\n\ntype EventMessage<TPayload = unknown> = {type: 'event'; name: string; payload: TPayload}\ntype StreamEmissionMessage<TPayload = unknown> = {type: 'emission'; name: string; payload: TPayload}\ntype StreamEndMessage = {type: 'end'; name: string}\ntype WorkerChannelMessage = EventMessage | StreamEmissionMessage | StreamEndMessage\n\n/**\n * Represents the definition of a \"worker channel\" to report progress from the\n * worker to the parent. Worker channels can define named events or streams and\n * the worker will report events and streams while the parent will await them.\n * This allows the control flow of the parent to follow the control flow of the\n * worker 1-to-1.\n *\n * @example\n *\n * ```ts\n * // Define the channel interface (shared between parent and worker)\n * type MyWorkerChannel = WorkerChannel<{\n * compileStart: WorkerChannelEvent<void>\n * compileProgress: WorkerChannelStream<{ file: string; progress: number }>\n * compileEnd: WorkerChannelEvent<{ duration: number }>\n * }>;\n *\n * // --- In the worker file (e.g., worker.ts) ---\n * import { parentPort } from 'node:worker_threads';\n * import { createReporter } from './workerChannels';\n *\n * const report = createReporter<MyWorkerChannel>(parentPort);\n *\n * async function runCompilation() {\n * report.event.compileStart(); // Signal start\n *\n * const files = ['a.js', 'b.js', 'c.js'];\n * for (const file of files) {\n * // Simulate work and report progress\n * await new Promise(resolve => setTimeout(resolve, 100));\n * report.stream.compileProgress.emit({ file, progress: 100 });\n * }\n * report.stream.compileProgress.end(); // Signal end of progress stream\n *\n * report.event.compileEnd({ duration: 300 }); // Signal end with result\n * }\n *\n * runCompilation();\n *\n * // --- In the parent file (e.g., main.ts) ---\n * import { Worker } from 'node:worker_threads';\n * import { createReceiver } from './workerChannels';\n *\n * const worker = new Worker('./worker.js');\n * const receiver = createReceiver<MyWorkerChannel>(worker);\n *\n * async function monitorCompilation() {\n * console.log('Waiting for compilation to start...');\n * await receiver.event.compileStart();\n * console.log('Compilation started.');\n *\n * console.log('Receiving progress:');\n * for await (const progress of receiver.stream.compileProgress()) {\n * console.log(` - ${progress.file}: ${progress.progress}%`);\n * }\n *\n * console.log('Waiting for compilation to end...');\n * const { duration } = await receiver.event.compileEnd();\n * console.log(`Compilation finished in ${duration}ms.`);\n *\n * await receiver.dispose(); // Clean up listeners and terminate worker\n * }\n *\n * monitorCompilation();\n * ```\n *\n * @internal\n */\nexport type WorkerChannel<\n TWorkerChannel extends Record<\n string,\n WorkerChannelEvent<unknown> | WorkerChannelStream<unknown>\n > = Record<string, WorkerChannelEvent<unknown> | WorkerChannelStream<unknown>>,\n> = TWorkerChannel\n\n/** @internal */\nexport type WorkerChannelEvent<TPayload = void> = {type: 'event'; payload: TPayload}\n/** @internal */\nexport type WorkerChannelStream<TPayload = void> = {type: 'stream'; payload: TPayload}\n\nexport interface WorkerChannelReporter<TWorkerChannel extends WorkerChannel> {\n event: {\n [K in EventKeys<TWorkerChannel>]: TWorkerChannel[K] extends WorkerChannelEvent<infer TPayload>\n ? EventReporter<TPayload>\n : void\n }\n stream: {\n [K in StreamKeys<TWorkerChannel>]: TWorkerChannel[K] extends WorkerChannelStream<infer TPayload>\n ? StreamReporter<TPayload>\n : void\n }\n}\n\nexport interface WorkerChannelReceiver<TWorkerChannel extends WorkerChannel> {\n event: {\n [K in EventKeys<TWorkerChannel>]: TWorkerChannel[K] extends WorkerChannelEvent<infer TPayload>\n ? EventReceiver<TPayload>\n : void\n }\n stream: {\n [K in StreamKeys<TWorkerChannel>]: TWorkerChannel[K] extends WorkerChannelStream<infer TPayload>\n ? StreamReceiver<TPayload>\n : void\n }\n // TODO: good candidate for [Symbol.asyncDispose] when our tooling better supports it\n dispose: () => Promise<number>\n}\n\n/**\n * A simple queue that has two primary methods: `push(message)` and\n * `await next()`. This message queue is used by the \"receiver\" of the worker\n * channel and this class handles buffering incoming messages if the worker is\n * producing faster than the parent as well as returning a promise if there is\n * no message yet in the queue when the parent awaits `next()`.\n */\nclass MessageQueue<T> {\n resolver: ((result: IteratorResult<T>) => void) | null = null\n queue: T[] = []\n private ended = false // Flag to indicate if end() was called\n\n push(message: T) {\n if (this.ended) {\n // Don't push messages after the queue has ended\n return\n }\n if (this.resolver) {\n this.resolver({value: message, done: false})\n this.resolver = null\n } else {\n this.queue.push(message)\n }\n }\n\n next(): Promise<IteratorResult<T>> {\n if (this.queue.length) {\n return Promise.resolve({value: this.queue.shift()!, done: false})\n }\n\n if (this.ended) {\n // If end() was called before and queue is empty, resolve immediately as done\n return Promise.resolve({value: undefined, done: true})\n }\n\n return new Promise((resolve) => (this.resolver = resolve))\n }\n\n end() {\n if (this.resolver) {\n this.resolver({value: undefined, done: true})\n this.resolver = null // Clear resolver after ending\n } else {\n // If resolver is null, it means next() hasn't been called yet or\n // previous next() was resolved by a push(). Mark as ended so the\n // *next* call to next() resolves immediately as done.\n this.ended = true\n }\n }\n}\n\nfunction isWorkerChannelMessage(message: unknown): message is WorkerChannelMessage {\n if (typeof message !== 'object') return false\n if (!message) return false\n if (!('type' in message)) return false\n if (typeof message.type !== 'string') return false\n const types: string[] = ['event', 'emission', 'end'] satisfies WorkerChannelMessage['type'][]\n return types.includes(message.type)\n}\n\n/**\n * Creates a \"worker channel receiver\" that subscribes to incoming messages\n * from the given worker and returns promises for worker channel events and\n * async iterators for worker channel streams.\n */\nexport function createReceiver<TWorkerChannel extends WorkerChannel>(\n worker: Worker,\n): WorkerChannelReceiver<TWorkerChannel> {\n const _events = new Map<string, MessageQueue<EventMessage>>()\n const _streams = new Map<string, MessageQueue<StreamEmissionMessage>>()\n const errors = new MessageQueue<{type: 'error'; error: unknown}>()\n\n const eventQueue = (name: string) => {\n const queue = _events.get(name) ?? new MessageQueue()\n if (!_events.has(name)) _events.set(name, queue)\n return queue\n }\n\n const streamQueue = (name: string) => {\n const queue = _streams.get(name) ?? new MessageQueue()\n if (!_streams.has(name)) _streams.set(name, queue)\n return queue\n }\n\n const handleMessage = (message: unknown) => {\n if (!isWorkerChannelMessage(message)) return\n if (message.type === 'event') eventQueue(message.name).push(message)\n if (message.type === 'emission') streamQueue(message.name).push(message)\n if (message.type === 'end') streamQueue(message.name).end()\n }\n\n const handleError = (error: unknown) => {\n errors.push({type: 'error', error})\n }\n\n worker.addListener('message', handleMessage)\n worker.addListener('error', handleError)\n\n return {\n event: new Proxy({} as WorkerChannelReceiver<TWorkerChannel>['event'], {\n get: (target, name) => {\n if (typeof name !== 'string') return target[name as keyof typeof target]\n\n const eventReceiver: EventReceiver = async () => {\n const {value} = await Promise.race([eventQueue(name).next(), errors.next()])\n if (value.type === 'error') throw value.error\n return value.payload\n }\n\n return eventReceiver\n },\n }),\n stream: new Proxy({} as WorkerChannelReceiver<TWorkerChannel>['stream'], {\n get: (target, prop) => {\n if (typeof prop !== 'string') return target[prop as keyof typeof target]\n const name = prop // alias for better typescript narrowing\n\n async function* streamReceiver() {\n while (true) {\n const {value, done} = await Promise.race([streamQueue(name).next(), errors.next()])\n if (done) return\n if (value.type === 'error') throw value.error\n yield value.payload\n }\n }\n\n return streamReceiver satisfies StreamReceiver\n },\n }),\n dispose: () => {\n worker.removeListener('message', handleMessage)\n worker.removeListener('error', handleError)\n return worker.terminate()\n },\n }\n}\n\n/**\n * Creates a \"worker channel reporter\" that sends messages to the given\n * `parentPort` to be received by a worker channel receiver.\n *\n * @internal\n */\nexport function createReporter<TWorkerChannel extends WorkerChannel>(\n parentPort: MessagePort | null,\n): WorkerChannelReporter<TWorkerChannel> {\n if (!parentPort) {\n throw new Error('parentPart was falsy')\n }\n\n return {\n event: new Proxy({} as WorkerChannelReporter<TWorkerChannel>['event'], {\n get: (target, name) => {\n if (typeof name !== 'string') return target[name as keyof typeof target]\n\n const eventReporter: EventReporter = (payload) => {\n const message: EventMessage = {type: 'event', name, payload}\n parentPort.postMessage(message)\n }\n\n return eventReporter\n },\n }),\n stream: new Proxy({} as WorkerChannelReporter<TWorkerChannel>['stream'], {\n get: (target, name) => {\n if (typeof name !== 'string') return target[name as keyof typeof target]\n\n const streamReporter: StreamReporter = {\n emit: (payload) => {\n const message: StreamEmissionMessage = {type: 'emission', name, payload}\n parentPort.postMessage(message)\n },\n end: () => {\n const message: StreamEndMessage = {type: 'end', name}\n parentPort.postMessage(message)\n },\n }\n\n return streamReporter\n },\n }),\n }\n}\n"],"names":[],"mappings":";AAwIA,MAAM,aAAgB;AAAA,EACpB,WAAyD;AAAA,EACzD,QAAa,CAAC;AAAA,EACN,QAAQ;AAAA;AAAA,EAEhB,KAAK,SAAY;AACX,SAAK,UAIL,KAAK,YACP,KAAK,SAAS,EAAC,OAAO,SAAS,MAAM,IAAM,GAC3C,KAAK,WAAW,QAEhB,KAAK,MAAM,KAAK,OAAO;AAAA,EAAA;AAAA,EAI3B,OAAmC;AACjC,WAAI,KAAK,MAAM,SACN,QAAQ,QAAQ,EAAC,OAAO,KAAK,MAAM,MAAM,GAAI,MAAM,GAAM,CAAA,IAG9D,KAAK,QAEA,QAAQ,QAAQ,EAAC,OAAO,QAAW,MAAM,GAAK,CAAA,IAGhD,IAAI,QAAQ,CAAC,YAAa,KAAK,WAAW,OAAQ;AAAA,EAAA;AAAA,EAG3D,MAAM;AACA,SAAK,YACP,KAAK,SAAS,EAAC,OAAO,QAAW,MAAM,GAAK,CAAA,GAC5C,KAAK,WAAW,QAKhB,KAAK,QAAQ;AAAA,EAAA;AAGnB;AAEA,SAAS,uBAAuB,SAAmD;AAC7E,SAAA,OAAO,WAAY,YACnB,CAAC,WACD,EAAE,UAAU,YACZ,OAAO,QAAQ,QAAS,WAAiB,KACrB,CAAC,SAAS,YAAY,KAAK,EACtC,SAAS,QAAQ,IAAI;AACpC;AAOO,SAAS,eACd,QACuC;AACvC,QAAM,UAAU,oBAAI,IAAwC,GACtD,WAAe,oBAAA,OACf,SAAS,IAAI,aAAA,GAEb,aAAa,CAAC,SAAiB;AACnC,UAAM,QAAQ,QAAQ,IAAI,IAAI,KAAK,IAAI,aAAa;AAC/C,WAAA,QAAQ,IAAI,IAAI,KAAG,QAAQ,IAAI,MAAM,KAAK,GACxC;AAAA,EAAA,GAGH,cAAc,CAAC,SAAiB;AACpC,UAAM,QAAQ,SAAS,IAAI,IAAI,KAAK,IAAI,aAAa;AAChD,WAAA,SAAS,IAAI,IAAI,KAAG,SAAS,IAAI,MAAM,KAAK,GAC1C;AAAA,EAAA,GAGH,gBAAgB,CAAC,YAAqB;AACrC,2BAAuB,OAAO,MAC/B,QAAQ,SAAS,WAAS,WAAW,QAAQ,IAAI,EAAE,KAAK,OAAO,GAC/D,QAAQ,SAAS,cAAY,YAAY,QAAQ,IAAI,EAAE,KAAK,OAAO,GACnE,QAAQ,SAAS,SAAO,YAAY,QAAQ,IAAI,EAAE,IAAI;AAAA,EAAA,GAGtD,cAAc,CAAC,UAAmB;AACtC,WAAO,KAAK,EAAC,MAAM,SAAS,OAAM;AAAA,EACpC;AAEO,SAAA,OAAA,YAAY,WAAW,aAAa,GAC3C,OAAO,YAAY,SAAS,WAAW,GAEhC;AAAA,IACL,OAAO,IAAI,MAAM,IAAsD;AAAA,MACrE,KAAK,CAAC,QAAQ,SACR,OAAO,QAAS,WAAiB,OAAO,IAA2B,IAElC,YAAY;AAC/C,cAAM,EAAC,MAAS,IAAA,MAAM,QAAQ,KAAK,CAAC,WAAW,IAAI,EAAE,KAAK,GAAG,OAAO,KAAM,CAAA,CAAC;AAC3E,YAAI,MAAM,SAAS,QAAS,OAAM,MAAM;AACxC,eAAO,MAAM;AAAA,MAAA;AAAA,IACf,CAIH;AAAA,IACD,QAAQ,IAAI,MAAM,IAAuD;AAAA,MACvE,KAAK,CAAC,QAAQ,SAAS;AACrB,YAAI,OAAO,QAAS,SAAU,QAAO,OAAO,IAA2B;AACvE,cAAM,OAAO;AAEb,wBAAgB,iBAAiB;AAClB,qBAAA;AACX,kBAAM,EAAC,OAAO,KAAQ,IAAA,MAAM,QAAQ,KAAK,CAAC,YAAY,IAAI,EAAE,KAAK,GAAG,OAAO,KAAM,CAAA,CAAC;AAClF,gBAAI,KAAM;AACV,gBAAI,MAAM,SAAS,QAAS,OAAM,MAAM;AACxC,kBAAM,MAAM;AAAA,UAAA;AAAA,QACd;AAGK,eAAA;AAAA,MAAA;AAAA,IACT,CACD;AAAA,IACD,SAAS,OACP,OAAO,eAAe,WAAW,aAAa,GAC9C,OAAO,eAAe,SAAS,WAAW,GACnC,OAAO,UAAU;AAAA,EAE5B;AACF;AAQO,SAAS,eACd,YACuC;AACvC,MAAI,CAAC;AACG,UAAA,IAAI,MAAM,sBAAsB;AAGjC,SAAA;AAAA,IACL,OAAO,IAAI,MAAM,IAAsD;AAAA,MACrE,KAAK,CAAC,QAAQ,SACR,OAAO,QAAS,WAAiB,OAAO,IAA2B,IAElC,CAAC,YAAY;AAChD,cAAM,UAAwB,EAAC,MAAM,SAAS,MAAM,QAAO;AAC3D,mBAAW,YAAY,OAAO;AAAA,MAAA;AAAA,IAChC,CAIH;AAAA,IACD,QAAQ,IAAI,MAAM,IAAuD;AAAA,MACvE,KAAK,CAAC,QAAQ,SACR,OAAO,QAAS,WAAiB,OAAO,IAA2B,IAEhC;AAAA,QACrC,MAAM,CAAC,YAAY;AACjB,gBAAM,UAAiC,EAAC,MAAM,YAAY,MAAM,QAAO;AACvE,qBAAW,YAAY,OAAO;AAAA,QAChC;AAAA,QACA,KAAK,MAAM;AACT,gBAAM,UAA4B,EAAC,MAAM,OAAO,KAAI;AACpD,qBAAW,YAAY,OAAO;AAAA,QAAA;AAAA,MAChC;AAAA,IAKL,CAAA;AAAA,EACH;AACF;;;"}