@sanity/cli 3.86.1 → 3.86.2-experimental.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/lib/_chunks-cjs/generateAction.js +118 -96
- package/lib/_chunks-cjs/generateAction.js.map +1 -1
- package/lib/_chunks-cjs/journeyConfig.js +24 -5
- package/lib/_chunks-cjs/journeyConfig.js.map +1 -1
- package/lib/_chunks-cjs/loadEnv.js +3 -3
- package/lib/_chunks-cjs/loadEnv.js.map +1 -1
- package/lib/_chunks-cjs/workerChannel.js +84 -0
- package/lib/_chunks-cjs/workerChannel.js.map +1 -0
- package/lib/workers/typegenGenerate.d.ts +104 -36
- package/lib/workers/typegenGenerate.js +24 -111
- package/lib/workers/typegenGenerate.js.map +1 -1
- package/package.json +18 -20
- package/src/actions/typegen/generate.telemetry.ts +5 -3
- package/src/actions/typegen/generateAction.ts +165 -130
- package/src/cli.ts +0 -0
- package/src/commands/projects/listProjectsCommand.ts +0 -0
- package/src/commands/projects/projectsGroup.ts +0 -0
- package/src/util/__tests__/workerChannel.test.ts +222 -0
- package/src/util/journeyConfig.ts +3 -2
- package/src/util/workerChannel.ts +312 -0
- package/src/workers/typegenGenerate.ts +55 -193
@@ -1,8 +1,27 @@
|
|
1
1
|
"use strict";
|
2
|
-
var
|
2
|
+
var __create = Object.create;
|
3
|
+
var __defProp = Object.defineProperty;
|
4
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
5
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
6
|
+
var __getProtoOf = Object.getPrototypeOf, __hasOwnProp = Object.prototype.hasOwnProperty;
|
7
|
+
var __copyProps = (to, from, except, desc) => {
|
8
|
+
if (from && typeof from == "object" || typeof from == "function")
|
9
|
+
for (let key of __getOwnPropNames(from))
|
10
|
+
!__hasOwnProp.call(to, key) && key !== except && __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
11
|
+
return to;
|
12
|
+
};
|
13
|
+
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
|
14
|
+
// If the importer is in node compatibility mode or this is not an ESM
|
15
|
+
// file that has been converted to a CommonJS file using a Babel-
|
16
|
+
// compatible transform (i.e. "__esModule" has not been set), then set
|
17
|
+
// "default" to the CommonJS "module.exports" for node compatibility.
|
18
|
+
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: !0 }) : target,
|
19
|
+
mod
|
20
|
+
));
|
21
|
+
var fs = require("node:fs/promises"), path = require("node:path"), node_worker_threads = require("node:worker_threads"), codegen = require("@sanity/codegen"), cliWorker = require("./cliWorker.js"), workerChannel = require("./workerChannel.js"), telemetry = require("@sanity/telemetry");
|
3
22
|
const TypesGeneratedTrace = telemetry.defineTrace({
|
4
23
|
name: "Types Generated",
|
5
|
-
version:
|
24
|
+
version: 1,
|
6
25
|
description: "Trace emitted when generating TypeScript types for queries"
|
7
26
|
}), generatedFileWarning = `/**
|
8
27
|
* ---------------------------------------------------------------------------------
|
@@ -19,121 +38,124 @@ const TypesGeneratedTrace = telemetry.defineTrace({
|
|
19
38
|
*/
|
20
39
|
|
21
40
|
`;
|
41
|
+
async function formatGeneratedFile(outputPath, output, spinner) {
|
42
|
+
const { format: prettierFormat, resolveConfig: resolvePrettierConfig } = await import("prettier");
|
43
|
+
let formatFile;
|
44
|
+
try {
|
45
|
+
const prettierConfig = await resolvePrettierConfig(outputPath).catch((err) => (output.warn(`Failed to load prettier config: ${err.message}`), null));
|
46
|
+
if (prettierConfig) {
|
47
|
+
spinner.text = "Formatting generated types with Prettier...", formatFile = await fs.open(outputPath, fs.constants.O_RDWR);
|
48
|
+
try {
|
49
|
+
const code = await formatFile.readFile(), formattedCode = await prettierFormat(code.toString(), {
|
50
|
+
...prettierConfig,
|
51
|
+
parser: "typescript"
|
52
|
+
});
|
53
|
+
await formatFile.truncate(), await formatFile.write(formattedCode, 0), spinner.info("Formatted generated types with Prettier");
|
54
|
+
} catch (err) {
|
55
|
+
output.warn(`Failed to format generated types with Prettier: ${err.message}`);
|
56
|
+
} finally {
|
57
|
+
await formatFile?.close();
|
58
|
+
}
|
59
|
+
}
|
60
|
+
} catch (err) {
|
61
|
+
output.warn(`Error during formatting setup: ${err.message}`);
|
62
|
+
}
|
63
|
+
}
|
22
64
|
async function typegenGenerateAction(args, context) {
|
23
65
|
const flags = args.extOptions, { output, workDir, telemetry: telemetry2 } = context, trace = telemetry2.trace(TypesGeneratedTrace);
|
24
66
|
trace.start();
|
25
|
-
const
|
26
|
-
|
27
|
-
|
28
|
-
|
29
|
-
|
30
|
-
|
31
|
-
|
32
|
-
|
67
|
+
const typegenConfig = await codegen.readConfig(flags["config-path"] ?? "sanity-typegen.json"), missingSchemas = [], invalidSchemas = [];
|
68
|
+
for (const schemaPath of typegenConfig.schemas.map((i) => i.schemaPath))
|
69
|
+
try {
|
70
|
+
(await fs.stat(schemaPath)).isFile() || invalidSchemas.push(schemaPath);
|
71
|
+
} catch (err) {
|
72
|
+
if (err.code === "ENOENT")
|
73
|
+
missingSchemas.push(schemaPath);
|
74
|
+
else
|
75
|
+
throw err;
|
76
|
+
}
|
77
|
+
if (missingSchemas.length > 0 || invalidSchemas.length > 0) {
|
78
|
+
const errors = [];
|
79
|
+
if (missingSchemas.length > 0) {
|
80
|
+
const hint = missingSchemas.length === 1 && missingSchemas[0] === "./schema.json" ? ' - did you run "sanity schema extract"?' : "", schemaList = missingSchemas.map((path2) => ` - ${path2}`).join(`
|
81
|
+
`);
|
82
|
+
errors.push(`The following schema files were not found:
|
83
|
+
${schemaList}${hint}`);
|
84
|
+
}
|
85
|
+
if (invalidSchemas.length > 0) {
|
86
|
+
const schemaList = invalidSchemas.map((path2) => ` - ${path2}`).join(`
|
87
|
+
`);
|
88
|
+
errors.push(`The following schema paths are not files:
|
89
|
+
${schemaList}`);
|
33
90
|
}
|
34
|
-
throw
|
91
|
+
throw new Error(errors.join(`
|
92
|
+
|
93
|
+
`));
|
35
94
|
}
|
36
|
-
const outputPath = path.join(process.cwd(),
|
95
|
+
const outputPath = path.join(process.cwd(), typegenConfig.generates), outputDir = path.dirname(outputPath);
|
37
96
|
await fs.mkdir(outputDir, { recursive: !0 });
|
38
|
-
const workerPath = await cliWorker.getCliWorkerPath("typegenGenerate"), spinner = output.spinner(
|
97
|
+
const workerPath = await cliWorker.getCliWorkerPath("typegenGenerate"), spinner = output.spinner("Generating types"), worker = new node_worker_threads.Worker(workerPath, {
|
39
98
|
workerData: {
|
40
99
|
workDir,
|
41
|
-
|
42
|
-
searchPath:
|
43
|
-
overloadClientMethods:
|
100
|
+
schemas: typegenConfig.schemas,
|
101
|
+
searchPath: typegenConfig.path,
|
102
|
+
overloadClientMethods: typegenConfig.overloadClientMethods,
|
103
|
+
augmentGroqModule: typegenConfig.augmentGroqModule
|
44
104
|
},
|
45
105
|
// eslint-disable-next-line no-process-env
|
46
106
|
env: process.env
|
47
|
-
}),
|
48
|
-
|
49
|
-
// eslint-disable-next-line no-bitwise
|
50
|
-
fs.constants.O_TRUNC | fs.constants.O_CREAT | fs.constants.O_WRONLY
|
51
|
-
);
|
52
|
-
typeFile.write(generatedFileWarning);
|
107
|
+
}), receiver = workerChannel.createReceiver(worker);
|
108
|
+
let fileHandle;
|
53
109
|
const stats = {
|
54
110
|
queryFilesCount: 0,
|
55
|
-
errors: 0,
|
56
111
|
queriesCount: 0,
|
112
|
+
projectionsCount: 0,
|
57
113
|
schemaTypesCount: 0,
|
58
|
-
|
59
|
-
|
60
|
-
|
61
|
-
size: 0
|
114
|
+
typeEvaluationStats: null,
|
115
|
+
outputSize: 0,
|
116
|
+
filesWithErrors: 0
|
62
117
|
};
|
63
|
-
|
64
|
-
|
65
|
-
|
66
|
-
|
67
|
-
|
68
|
-
|
69
|
-
}
|
70
|
-
const errorMessage = msg.filename ? `${msg.error.message} in "${msg.filename}"` : msg.error.message;
|
71
|
-
spinner.fail(errorMessage), stats.errors++;
|
72
|
-
return;
|
73
|
-
}
|
74
|
-
if (msg.type === "complete") {
|
75
|
-
resolve();
|
76
|
-
return;
|
77
|
-
}
|
78
|
-
if (msg.type === "typemap") {
|
79
|
-
let typeMapStr = `// Query TypeMap
|
80
|
-
`;
|
81
|
-
typeMapStr += msg.typeMap, typeFile.write(typeMapStr), stats.size += Buffer.byteLength(typeMapStr);
|
82
|
-
return;
|
83
|
-
}
|
84
|
-
let fileTypeString = `// Source: ${msg.filename}
|
85
|
-
`;
|
86
|
-
if (msg.type === "schema") {
|
87
|
-
stats.schemaTypesCount += msg.length, fileTypeString += msg.schema, typeFile.write(fileTypeString);
|
88
|
-
return;
|
89
|
-
}
|
90
|
-
if (msg.type === "types") {
|
118
|
+
try {
|
119
|
+
try {
|
120
|
+
spinner.start(), fileHandle = await fs.open(outputPath, "w"), await fileHandle.write(generatedFileWarning), spinner.text = "Generating schema types...";
|
121
|
+
const { code: schemaCode, schemas } = await receiver.event.schema();
|
122
|
+
stats.schemaTypesCount = schemas.reduce((total, schema) => total + schema.typeCount, 0), await fileHandle.write(schemaCode), spinner.text = "Generating query types...";
|
123
|
+
for await (const queryResult of receiver.stream.queries()) {
|
91
124
|
stats.queryFilesCount++;
|
92
|
-
|
93
|
-
|
94
|
-
|
95
|
-
type
|
96
|
-
typeNodesGenerated,
|
97
|
-
unknownTypeNodesGenerated,
|
98
|
-
emptyUnionTypeNodesGenerated
|
99
|
-
} of msg.types)
|
100
|
-
fileTypeString += `// Variable: ${queryName}
|
101
|
-
`, fileTypeString += `// Query: ${query.replace(/(\r\n|\n|\r)/gm, "").trim()}
|
102
|
-
`, fileTypeString += type, stats.queriesCount++, stats.typeNodesGenerated += typeNodesGenerated, stats.unknownTypeNodesGenerated += unknownTypeNodesGenerated, stats.emptyUnionTypeNodesGenerated += emptyUnionTypeNodesGenerated;
|
103
|
-
typeFile.write(`${fileTypeString}
|
104
|
-
`), stats.size += Buffer.byteLength(fileTypeString);
|
125
|
+
const { error, results } = queryResult;
|
126
|
+
error && stats.filesWithErrors++;
|
127
|
+
for (const result of results)
|
128
|
+
await fileHandle.write(result.code), result.type === "projection" ? stats.projectionsCount++ : stats.queriesCount++;
|
105
129
|
}
|
106
|
-
|
107
|
-
|
108
|
-
|
109
|
-
if (prettierConfig) {
|
110
|
-
const formatFile = await fs.open(outputPath, fs.constants.O_RDWR);
|
111
|
-
try {
|
112
|
-
const code = await formatFile.readFile(), formattedCode = await prettier.format(code.toString(), {
|
113
|
-
...prettierConfig,
|
114
|
-
parser: "typescript"
|
115
|
-
});
|
116
|
-
await formatFile.truncate(), await formatFile.write(formattedCode, 0), spinner.info("Formatted generated types with Prettier");
|
117
|
-
} catch (err) {
|
118
|
-
output.warn(`Failed to format generated types with Prettier: ${err.message}`);
|
130
|
+
spinner.text = "Generating typemap...";
|
131
|
+
const { code: typemapCode, stats: finalStats } = await receiver.event.typemap();
|
132
|
+
stats.typeEvaluationStats = finalStats, await fileHandle.write(typemapCode);
|
119
133
|
} finally {
|
120
|
-
await
|
134
|
+
await fileHandle?.close();
|
121
135
|
}
|
136
|
+
typegenConfig.formatGeneratedCode && await formatGeneratedFile(outputPath, output, spinner);
|
137
|
+
const finalStat = await fs.stat(outputPath);
|
138
|
+
stats.outputSize = finalStat.size, trace.log({
|
139
|
+
outputSize: stats.outputSize,
|
140
|
+
queriesCount: stats.queriesCount,
|
141
|
+
projectionsCount: stats.projectionsCount,
|
142
|
+
schemaTypesCount: stats.schemaTypesCount,
|
143
|
+
queryFilesCount: stats.queryFilesCount,
|
144
|
+
filesWithErrors: stats.filesWithErrors,
|
145
|
+
typeNodesGenerated: stats.typeEvaluationStats?.totalTypeNodes,
|
146
|
+
unknownTypeNodesGenerated: stats.typeEvaluationStats?.unknownTypeCount,
|
147
|
+
unknownTypeNodesRatio: stats.typeEvaluationStats && stats.typeEvaluationStats.totalTypeNodes > 0 ? stats.typeEvaluationStats.unknownTypeCount / stats.typeEvaluationStats.totalTypeNodes : 0,
|
148
|
+
emptyUnionTypeNodesGenerated: stats.typeEvaluationStats?.emptyUnionCount,
|
149
|
+
configOverloadClientMethods: typegenConfig.overloadClientMethods,
|
150
|
+
configAugmentGroqModule: typegenConfig.augmentGroqModule
|
151
|
+
}), trace.complete(), stats.filesWithErrors > 0 && spinner.warn(`Encountered errors in ${stats.filesWithErrors} files while generating types`), spinner.succeed(
|
152
|
+
`Generated TypeScript types for ${stats.schemaTypesCount} schema types, ${stats.queriesCount} GROQ queries, ${stats.projectionsCount} GROQ projections, in ${stats.queryFilesCount} files into: ${typegenConfig.generates}`
|
153
|
+
);
|
154
|
+
} catch (err) {
|
155
|
+
throw spinner.fail("Type generation failed"), trace.error(err instanceof Error ? err : new Error(String(err))), err;
|
156
|
+
} finally {
|
157
|
+
await receiver.dispose();
|
122
158
|
}
|
123
|
-
trace.log({
|
124
|
-
outputSize: stats.size,
|
125
|
-
queriesCount: stats.queriesCount,
|
126
|
-
schemaTypesCount: stats.schemaTypesCount,
|
127
|
-
queryFilesCount: stats.queryFilesCount,
|
128
|
-
filesWithErrors: stats.errors,
|
129
|
-
typeNodesGenerated: stats.typeNodesGenerated,
|
130
|
-
unknownTypeNodesGenerated: stats.unknownTypeNodesGenerated,
|
131
|
-
unknownTypeNodesRatio: stats.typeNodesGenerated > 0 ? stats.unknownTypeNodesGenerated / stats.typeNodesGenerated : 0,
|
132
|
-
emptyUnionTypeNodesGenerated: stats.emptyUnionTypeNodesGenerated,
|
133
|
-
configOverloadClientMethods: codegenConfig.overloadClientMethods
|
134
|
-
}), trace.complete(), stats.errors > 0 && spinner.warn(`Encountered errors in ${stats.errors} files while generating types`), spinner.succeed(
|
135
|
-
`Generated TypeScript types for ${stats.schemaTypesCount} schema types and ${stats.queriesCount} GROQ queries in ${stats.queryFilesCount} files into: ${codegenConfig.generates}`
|
136
|
-
);
|
137
159
|
}
|
138
160
|
exports.default = typegenGenerateAction;
|
139
161
|
//# sourceMappingURL=generateAction.js.map
|
@@ -1 +1 @@
|
|
1
|
-
{"version":3,"file":"generateAction.js","sources":["../../src/actions/typegen/generate.telemetry.ts","../../src/actions/typegen/generateAction.ts"],"sourcesContent":["import {defineTrace} from '@sanity/telemetry'\n\ninterface TypesGeneratedTraceAttrubutes {\n outputSize: number\n queriesCount: number\n schemaTypesCount: number\n queryFilesCount: number\n filesWithErrors: number\n typeNodesGenerated: number\n unknownTypeNodesGenerated: number\n unknownTypeNodesRatio: number\n emptyUnionTypeNodesGenerated: number\n configOverloadClientMethods: boolean\n}\n\nexport const TypesGeneratedTrace = defineTrace<TypesGeneratedTraceAttrubutes>({\n name: 'Types Generated',\n version: 0,\n description: 'Trace emitted when generating TypeScript types for queries',\n})\n","import {constants, mkdir, open, stat} from 'node:fs/promises'\nimport {dirname, join} from 'node:path'\nimport {Worker} from 'node:worker_threads'\n\nimport {readConfig} from '@sanity/codegen'\nimport {format as prettierFormat, resolveConfig as resolvePrettierConfig} from 'prettier'\n\nimport {type CliCommandArguments, type CliCommandContext} from '../../types'\nimport {getCliWorkerPath} from '../../util/cliWorker'\nimport {\n type TypegenGenerateTypesWorkerData,\n type TypegenGenerateTypesWorkerMessage,\n} from '../../workers/typegenGenerate'\nimport {TypesGeneratedTrace} from './generate.telemetry'\n\nexport interface TypegenGenerateTypesCommandFlags {\n 'config-path'?: string\n}\n\nconst generatedFileWarning = `/**\n * ---------------------------------------------------------------------------------\n * This file has been generated by Sanity TypeGen.\n * Command: \\`sanity typegen generate\\`\n *\n * Any modifications made directly to this file will be overwritten the next time\n * the TypeScript definitions are generated. Please make changes to the Sanity\n * schema definitions and/or GROQ queries if you need to update these types.\n *\n * For more information on how to use Sanity TypeGen, visit the official documentation:\n * https://www.sanity.io/docs/sanity-typegen\n * ---------------------------------------------------------------------------------\n */\\n\\n`\n\nexport default async function typegenGenerateAction(\n args: CliCommandArguments<TypegenGenerateTypesCommandFlags>,\n context: CliCommandContext,\n): Promise<void> {\n const flags = args.extOptions\n const {output, workDir, telemetry} = context\n\n const trace = telemetry.trace(TypesGeneratedTrace)\n trace.start()\n\n const codegenConfig = await readConfig(flags['config-path'] || 'sanity-typegen.json')\n\n try {\n const schemaStats = await stat(codegenConfig.schema)\n if (!schemaStats.isFile()) {\n throw new Error(`Schema path is not a file: ${codegenConfig.schema}`)\n }\n } catch (err) {\n if (err.code === 'ENOENT') {\n // If the user has not provided a specific schema path (eg we're using the default), give some help\n const hint =\n codegenConfig.schema === './schema.json' ? ` - did you run \"sanity schema extract\"?` : ''\n throw new Error(`Schema file not found: ${codegenConfig.schema}${hint}`)\n }\n throw err\n }\n\n const outputPath = join(process.cwd(), codegenConfig.generates)\n const outputDir = dirname(outputPath)\n await mkdir(outputDir, {recursive: true})\n const workerPath = await getCliWorkerPath('typegenGenerate')\n\n const spinner = output.spinner({}).start('Generating types')\n\n const worker = new Worker(workerPath, {\n workerData: {\n workDir,\n schemaPath: codegenConfig.schema,\n searchPath: codegenConfig.path,\n overloadClientMethods: codegenConfig.overloadClientMethods,\n } satisfies TypegenGenerateTypesWorkerData,\n // eslint-disable-next-line no-process-env\n env: process.env,\n })\n\n const typeFile = await open(\n outputPath,\n // eslint-disable-next-line no-bitwise\n constants.O_TRUNC | constants.O_CREAT | constants.O_WRONLY,\n )\n\n typeFile.write(generatedFileWarning)\n\n const stats = {\n queryFilesCount: 0,\n errors: 0,\n queriesCount: 0,\n schemaTypesCount: 0,\n unknownTypeNodesGenerated: 0,\n typeNodesGenerated: 0,\n emptyUnionTypeNodesGenerated: 0,\n size: 0,\n }\n\n await new Promise<void>((resolve, reject) => {\n worker.addListener('message', (msg: TypegenGenerateTypesWorkerMessage) => {\n if (msg.type === 'error') {\n if (msg.fatal) {\n trace.error(msg.error)\n reject(msg.error)\n return\n }\n const errorMessage = msg.filename\n ? `${msg.error.message} in \"${msg.filename}\"`\n : msg.error.message\n spinner.fail(errorMessage)\n stats.errors++\n return\n }\n if (msg.type === 'complete') {\n resolve()\n return\n }\n\n if (msg.type === 'typemap') {\n let typeMapStr = `// Query TypeMap\\n`\n typeMapStr += msg.typeMap\n typeFile.write(typeMapStr)\n stats.size += Buffer.byteLength(typeMapStr)\n return\n }\n\n let fileTypeString = `// Source: ${msg.filename}\\n`\n\n if (msg.type === 'schema') {\n stats.schemaTypesCount += msg.length\n fileTypeString += msg.schema\n typeFile.write(fileTypeString)\n return\n }\n\n if (msg.type === 'types') {\n stats.queryFilesCount++\n for (const {\n queryName,\n query,\n type,\n typeNodesGenerated,\n unknownTypeNodesGenerated,\n emptyUnionTypeNodesGenerated,\n } of msg.types) {\n fileTypeString += `// Variable: ${queryName}\\n`\n fileTypeString += `// Query: ${query.replace(/(\\r\\n|\\n|\\r)/gm, '').trim()}\\n`\n fileTypeString += type\n stats.queriesCount++\n stats.typeNodesGenerated += typeNodesGenerated\n stats.unknownTypeNodesGenerated += unknownTypeNodesGenerated\n stats.emptyUnionTypeNodesGenerated += emptyUnionTypeNodesGenerated\n }\n typeFile.write(`${fileTypeString}\\n`)\n stats.size += Buffer.byteLength(fileTypeString)\n }\n })\n worker.addListener('error', reject)\n })\n\n await typeFile.close()\n\n const prettierConfig = codegenConfig.formatGeneratedCode\n ? await resolvePrettierConfig(outputPath).catch((err) => {\n output.warn(`Failed to load prettier config: ${err.message}`)\n return null\n })\n : null\n\n if (prettierConfig) {\n const formatFile = await open(outputPath, constants.O_RDWR)\n try {\n const code = await formatFile.readFile()\n const formattedCode = await prettierFormat(code.toString(), {\n ...prettierConfig,\n parser: 'typescript' as const,\n })\n await formatFile.truncate()\n await formatFile.write(formattedCode, 0)\n\n spinner.info('Formatted generated types with Prettier')\n } catch (err) {\n output.warn(`Failed to format generated types with Prettier: ${err.message}`)\n } finally {\n await formatFile.close()\n }\n }\n\n trace.log({\n outputSize: stats.size,\n queriesCount: stats.queriesCount,\n schemaTypesCount: stats.schemaTypesCount,\n queryFilesCount: stats.queryFilesCount,\n filesWithErrors: stats.errors,\n typeNodesGenerated: stats.typeNodesGenerated,\n unknownTypeNodesGenerated: stats.unknownTypeNodesGenerated,\n unknownTypeNodesRatio:\n stats.typeNodesGenerated > 0 ? stats.unknownTypeNodesGenerated / stats.typeNodesGenerated : 0,\n emptyUnionTypeNodesGenerated: stats.emptyUnionTypeNodesGenerated,\n configOverloadClientMethods: codegenConfig.overloadClientMethods,\n })\n\n trace.complete()\n if (stats.errors > 0) {\n spinner.warn(`Encountered errors in ${stats.errors} files while generating types`)\n }\n\n spinner.succeed(\n `Generated TypeScript types for ${stats.schemaTypesCount} schema types and ${stats.queriesCount} GROQ queries in ${stats.queryFilesCount} files into: ${codegenConfig.generates}`,\n )\n}\n"],"names":["defineTrace","telemetry","readConfig","stat","join","dirname","mkdir","getCliWorkerPath","Worker","open","constants","resolvePrettierConfig","prettierFormat"],"mappings":";;AAeO,MAAM,sBAAsBA,UAAAA,YAA2C;AAAA,EAC5E,MAAM;AAAA,EACN,SAAS;AAAA,EACT,aAAa;AACf,CAAC,GCAK,uBAAuB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAcC,eAAA,sBAC5B,MACA,SACe;AACf,QAAM,QAAQ,KAAK,YACb,EAAC,QAAQ,SAAS,WAAAC,WAAS,IAAI,SAE/B,QAAQA,WAAU,MAAM,mBAAmB;AACjD,QAAM,MAAM;AAEZ,QAAM,gBAAgB,MAAMC,QAAA,WAAW,MAAM,aAAa,KAAK,qBAAqB;AAEhF,MAAA;AAEF,QAAI,EADgB,MAAMC,GAAA,KAAK,cAAc,MAAM,GAClC,OAAO;AACtB,YAAM,IAAI,MAAM,8BAA8B,cAAc,MAAM,EAAE;AAAA,WAE/D,KAAK;AACR,QAAA,IAAI,SAAS,UAAU;AAEzB,YAAM,OACJ,cAAc,WAAW,kBAAkB,4CAA4C;AACzF,YAAM,IAAI,MAAM,0BAA0B,cAAc,MAAM,GAAG,IAAI,EAAE;AAAA,IAAA;AAEnE,UAAA;AAAA,EAAA;AAGF,QAAA,aAAaC,KAAAA,KAAK,QAAQ,IAAI,GAAG,cAAc,SAAS,GACxD,YAAYC,KAAA,QAAQ,UAAU;AACpC,QAAMC,GAAM,MAAA,WAAW,EAAC,WAAW,IAAK;AACxC,QAAM,aAAa,MAAMC,UAAA,iBAAiB,iBAAiB,GAErD,UAAU,OAAO,QAAQ,CAAA,CAAE,EAAE,MAAM,kBAAkB,GAErD,SAAS,IAAIC,2BAAO,YAAY;AAAA,IACpC,YAAY;AAAA,MACV;AAAA,MACA,YAAY,cAAc;AAAA,MAC1B,YAAY,cAAc;AAAA,MAC1B,uBAAuB,cAAc;AAAA,IACvC;AAAA;AAAA,IAEA,KAAK,QAAQ;AAAA,EAAA,CACd,GAEK,WAAW,MAAMC,GAAA;AAAA,IACrB;AAAA;AAAA,IAEAC,GAAAA,UAAU,UAAUA,aAAU,UAAUA,GAAAA,UAAU;AAAA,EACpD;AAEA,WAAS,MAAM,oBAAoB;AAEnC,QAAM,QAAQ;AAAA,IACZ,iBAAiB;AAAA,IACjB,QAAQ;AAAA,IACR,cAAc;AAAA,IACd,kBAAkB;AAAA,IAClB,2BAA2B;AAAA,IAC3B,oBAAoB;AAAA,IACpB,8BAA8B;AAAA,IAC9B,MAAM;AAAA,EACR;AAEA,QAAM,IAAI,QAAc,CAAC,SAAS,WAAW;AACpC,WAAA,YAAY,WAAW,CAAC,QAA2C;AACpE,UAAA,IAAI,SAAS,SAAS;AACxB,YAAI,IAAI,OAAO;AACb,gBAAM,MAAM,IAAI,KAAK,GACrB,OAAO,IAAI,KAAK;AAChB;AAAA,QAAA;AAEF,cAAM,eAAe,IAAI,WACrB,GAAG,IAAI,MAAM,OAAO,QAAQ,IAAI,QAAQ,MACxC,IAAI,MAAM;AACN,gBAAA,KAAK,YAAY,GACzB,MAAM;AACN;AAAA,MAAA;AAEE,UAAA,IAAI,SAAS,YAAY;AACnB,gBAAA;AACR;AAAA,MAAA;AAGE,UAAA,IAAI,SAAS,WAAW;AAC1B,YAAI,aAAa;AAAA;AACH,sBAAA,IAAI,SAClB,SAAS,MAAM,UAAU,GACzB,MAAM,QAAQ,OAAO,WAAW,UAAU;AAC1C;AAAA,MAAA;AAGE,UAAA,iBAAiB,cAAc,IAAI,QAAQ;AAAA;AAE3C,UAAA,IAAI,SAAS,UAAU;AACnB,cAAA,oBAAoB,IAAI,QAC9B,kBAAkB,IAAI,QACtB,SAAS,MAAM,cAAc;AAC7B;AAAA,MAAA;AAGE,UAAA,IAAI,SAAS,SAAS;AAClB,cAAA;AACK,mBAAA;AAAA,UACT;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,aACG,IAAI;AACP,4BAAkB,gBAAgB,SAAS;AAAA,GAC3C,kBAAkB,aAAa,MAAM,QAAQ,kBAAkB,EAAE,EAAE,KAAM,CAAA;AAAA,GACzE,kBAAkB,MAClB,MAAM,gBACN,MAAM,sBAAsB,oBAC5B,MAAM,6BAA6B,2BACnC,MAAM,gCAAgC;AAE/B,iBAAA,MAAM,GAAG,cAAc;AAAA,CAAI,GACpC,MAAM,QAAQ,OAAO,WAAW,cAAc;AAAA,MAAA;AAAA,IAEjD,CAAA,GACD,OAAO,YAAY,SAAS,MAAM;AAAA,EAAA,CACnC,GAED,MAAM,SAAS,MAAM;AAErB,QAAM,iBAAiB,cAAc,sBACjC,MAAMC,SAAsB,cAAA,UAAU,EAAE,MAAM,CAAC,SAC7C,OAAO,KAAK,mCAAmC,IAAI,OAAO,EAAE,GACrD,KACR,IACD;AAEJ,MAAI,gBAAgB;AAClB,UAAM,aAAa,MAAMF,GAAAA,KAAK,YAAYC,GAAAA,UAAU,MAAM;AACtD,QAAA;AACI,YAAA,OAAO,MAAM,WAAW,SAAS,GACjC,gBAAgB,MAAME,SAAA,OAAe,KAAK,YAAY;AAAA,QAC1D,GAAG;AAAA,QACH,QAAQ;AAAA,MAAA,CACT;AACK,YAAA,WAAW,YACjB,MAAM,WAAW,MAAM,eAAe,CAAC,GAEvC,QAAQ,KAAK,yCAAyC;AAAA,aAC/C,KAAK;AACZ,aAAO,KAAK,mDAAmD,IAAI,OAAO,EAAE;AAAA,IAAA,UAC5E;AACA,YAAM,WAAW,MAAM;AAAA,IAAA;AAAA,EACzB;AAGF,QAAM,IAAI;AAAA,IACR,YAAY,MAAM;AAAA,IAClB,cAAc,MAAM;AAAA,IACpB,kBAAkB,MAAM;AAAA,IACxB,iBAAiB,MAAM;AAAA,IACvB,iBAAiB,MAAM;AAAA,IACvB,oBAAoB,MAAM;AAAA,IAC1B,2BAA2B,MAAM;AAAA,IACjC,uBACE,MAAM,qBAAqB,IAAI,MAAM,4BAA4B,MAAM,qBAAqB;AAAA,IAC9F,8BAA8B,MAAM;AAAA,IACpC,6BAA6B,cAAc;AAAA,EAAA,CAC5C,GAED,MAAM,SAAA,GACF,MAAM,SAAS,KACjB,QAAQ,KAAK,yBAAyB,MAAM,MAAM,+BAA+B,GAGnF,QAAQ;AAAA,IACN,kCAAkC,MAAM,gBAAgB,qBAAqB,MAAM,YAAY,oBAAoB,MAAM,eAAe,gBAAgB,cAAc,SAAS;AAAA,EACjL;AACF;;"}
|
1
|
+
{"version":3,"file":"generateAction.js","sources":["../../src/actions/typegen/generate.telemetry.ts","../../src/actions/typegen/generateAction.ts"],"sourcesContent":["import {defineTrace} from '@sanity/telemetry'\n\ninterface TypesGeneratedTraceAttributes {\n outputSize: number\n queriesCount: number\n projectionsCount: number\n schemaTypesCount: number\n queryFilesCount: number\n filesWithErrors: number\n typeNodesGenerated: number\n unknownTypeNodesGenerated: number\n unknownTypeNodesRatio: number\n emptyUnionTypeNodesGenerated: number\n configOverloadClientMethods: boolean\n configAugmentGroqModule: boolean\n}\n\nexport const TypesGeneratedTrace = defineTrace<TypesGeneratedTraceAttributes>({\n name: 'Types Generated',\n version: 1,\n description: 'Trace emitted when generating TypeScript types for queries',\n})\n","/* eslint-disable complexity */\n/* eslint-disable max-statements */\n/* eslint-disable max-depth */\nimport {constants, mkdir, open, stat} from 'node:fs/promises'\nimport {dirname, join} from 'node:path'\nimport {Worker} from 'node:worker_threads'\n\nimport {readConfig, type TypeEvaluationStats} from '@sanity/codegen'\n\nimport {type CliCommandArguments, type CliCommandContext} from '../../types'\nimport {getCliWorkerPath} from '../../util/cliWorker'\nimport {createReceiver} from '../../util/workerChannel'\nimport {\n type TypegenGenerateTypesWorkerData,\n type TypegenWorkerChannel,\n} from '../../workers/typegenGenerate'\nimport {TypesGeneratedTrace} from './generate.telemetry'\n\nexport interface TypegenGenerateTypesCommandFlags {\n 'config-path'?: string\n}\n\nconst generatedFileWarning = `/**\n * ---------------------------------------------------------------------------------\n * This file has been generated by Sanity TypeGen.\n * Command: \\`sanity typegen generate\\`\n *\n * Any modifications made directly to this file will be overwritten the next time\n * the TypeScript definitions are generated. Please make changes to the Sanity\n * schema definitions and/or GROQ queries if you need to update these types.\n *\n * For more information on how to use Sanity TypeGen, visit the official documentation:\n * https://www.sanity.io/docs/sanity-typegen\n * ---------------------------------------------------------------------------------\n */\\n\\n`\n\n/**\n * Helper function to format the generated file using Prettier.\n * Handles its own file operations and error reporting.\n */\nasync function formatGeneratedFile(\n outputPath: string,\n output: CliCommandContext['output'],\n spinner: ReturnType<CliCommandContext['output']['spinner']>,\n): Promise<void> {\n // this is here because this is an ESM-only import\n const {format: prettierFormat, resolveConfig: resolvePrettierConfig} = await import('prettier')\n\n let formatFile\n try {\n // Load prettier config\n const prettierConfig = await resolvePrettierConfig(outputPath).catch((err) => {\n output.warn(`Failed to load prettier config: ${err.message}`)\n return null\n })\n\n if (prettierConfig) {\n spinner.text = 'Formatting generated types with Prettier...'\n formatFile = await open(outputPath, constants.O_RDWR)\n try {\n const code = await formatFile.readFile()\n const formattedCode = await prettierFormat(code.toString(), {\n ...prettierConfig,\n parser: 'typescript' as const,\n })\n await formatFile.truncate() // Truncate before writing formatted code\n await formatFile.write(formattedCode, 0) // Write formatted code from the beginning\n spinner.info('Formatted generated types with Prettier')\n } catch (err) {\n output.warn(`Failed to format generated types with Prettier: ${err.message}`)\n } finally {\n // Ensure the formatting file handle is closed\n await formatFile?.close()\n }\n }\n } catch (err) {\n // Catch errors during the formatting setup (e.g., opening the formatFile)\n output.warn(`Error during formatting setup: ${err.message}`)\n }\n}\n\nexport default async function typegenGenerateAction(\n args: CliCommandArguments<TypegenGenerateTypesCommandFlags>,\n context: CliCommandContext,\n): Promise<void> {\n const flags = args.extOptions\n const {output, workDir, telemetry} = context\n\n const trace = telemetry.trace(TypesGeneratedTrace)\n trace.start()\n\n const typegenConfig = await readConfig(flags['config-path'] ?? 'sanity-typegen.json')\n\n const missingSchemas: string[] = []\n const invalidSchemas: string[] = []\n\n for (const schemaPath of typegenConfig.schemas.map((i) => i.schemaPath)) {\n try {\n const schemaStats = await stat(schemaPath)\n if (!schemaStats.isFile()) {\n invalidSchemas.push(schemaPath)\n }\n } catch (err) {\n if (err.code === 'ENOENT') {\n missingSchemas.push(schemaPath)\n } else {\n throw err\n }\n }\n }\n\n if (missingSchemas.length > 0 || invalidSchemas.length > 0) {\n const errors: string[] = []\n\n if (missingSchemas.length > 0) {\n // If the user has not provided a specific schema path (eg we're using the default), give some help\n const hint =\n missingSchemas.length === 1 && missingSchemas[0] === './schema.json'\n ? ' - did you run \"sanity schema extract\"?'\n : ''\n const schemaList = missingSchemas.map((path) => ` - ${path}`).join('\\n')\n errors.push(`The following schema files were not found:\\n${schemaList}${hint}`)\n }\n\n if (invalidSchemas.length > 0) {\n const schemaList = invalidSchemas.map((path) => ` - ${path}`).join('\\n')\n errors.push(`The following schema paths are not files:\\n${schemaList}`)\n }\n\n throw new Error(errors.join('\\n\\n'))\n }\n\n const outputPath = join(process.cwd(), typegenConfig.generates)\n const outputDir = dirname(outputPath)\n await mkdir(outputDir, {recursive: true})\n const workerPath = await getCliWorkerPath('typegenGenerate')\n\n const spinner = output.spinner('Generating types')\n\n const worker = new Worker(workerPath, {\n workerData: {\n workDir,\n schemas: typegenConfig.schemas,\n searchPath: typegenConfig.path,\n overloadClientMethods: typegenConfig.overloadClientMethods,\n augmentGroqModule: typegenConfig.augmentGroqModule,\n } satisfies TypegenGenerateTypesWorkerData,\n // eslint-disable-next-line no-process-env\n env: process.env,\n })\n\n const receiver = createReceiver<TypegenWorkerChannel>(worker)\n\n let fileHandle\n const stats = {\n queryFilesCount: 0,\n queriesCount: 0,\n projectionsCount: 0,\n schemaTypesCount: 0,\n typeEvaluationStats: null as TypeEvaluationStats | null,\n outputSize: 0,\n filesWithErrors: 0,\n }\n\n try {\n try {\n spinner.start()\n\n fileHandle = await open(outputPath, 'w')\n await fileHandle.write(generatedFileWarning)\n\n spinner.text = 'Generating schema types...'\n const {code: schemaCode, schemas} = await receiver.event.schema()\n stats.schemaTypesCount = schemas.reduce((total, schema) => total + schema.typeCount, 0)\n await fileHandle.write(schemaCode)\n\n spinner.text = 'Generating query types...'\n for await (const queryResult of receiver.stream.queries()) {\n stats.queryFilesCount++\n const {error, results} = queryResult\n if (error) {\n stats.filesWithErrors++\n }\n for (const result of results) {\n await fileHandle.write(result.code)\n if (result.type === 'projection') {\n stats.projectionsCount++\n } else {\n stats.queriesCount++\n }\n }\n }\n\n spinner.text = 'Generating typemap...'\n const {code: typemapCode, stats: finalStats} = await receiver.event.typemap()\n stats.typeEvaluationStats = finalStats\n await fileHandle.write(typemapCode)\n } finally {\n // Ensure the initial file handle is closed before moving on\n await fileHandle?.close()\n }\n\n if (typegenConfig.formatGeneratedCode) {\n await formatGeneratedFile(outputPath, output, spinner)\n }\n\n // Gather final stats and report success\n const finalStat = await stat(outputPath)\n stats.outputSize = finalStat.size\n\n trace.log({\n outputSize: stats.outputSize,\n queriesCount: stats.queriesCount,\n projectionsCount: stats.projectionsCount,\n schemaTypesCount: stats.schemaTypesCount,\n queryFilesCount: stats.queryFilesCount,\n filesWithErrors: stats.filesWithErrors,\n typeNodesGenerated: stats.typeEvaluationStats?.totalTypeNodes,\n unknownTypeNodesGenerated: stats.typeEvaluationStats?.unknownTypeCount,\n unknownTypeNodesRatio:\n stats.typeEvaluationStats && stats.typeEvaluationStats.totalTypeNodes > 0\n ? stats.typeEvaluationStats.unknownTypeCount / stats.typeEvaluationStats.totalTypeNodes\n : 0,\n emptyUnionTypeNodesGenerated: stats.typeEvaluationStats?.emptyUnionCount,\n configOverloadClientMethods: typegenConfig.overloadClientMethods,\n configAugmentGroqModule: typegenConfig.augmentGroqModule,\n })\n\n trace.complete()\n if (stats.filesWithErrors > 0) {\n spinner.warn(`Encountered errors in ${stats.filesWithErrors} files while generating types`)\n }\n\n spinner.succeed(\n `Generated TypeScript types for ${stats.schemaTypesCount} schema types, ${stats.queriesCount} GROQ queries, ${stats.projectionsCount} GROQ projections, in ${stats.queryFilesCount} files into: ${typegenConfig.generates}`,\n )\n } catch (err) {\n spinner.fail('Type generation failed')\n trace.error(err instanceof Error ? err : new Error(String(err)))\n throw err // Re-throw the error after logging\n } finally {\n // Ensure the worker receiver is always disposed\n await receiver.dispose()\n }\n}\n"],"names":["defineTrace","open","constants","telemetry","readConfig","stat","path","join","dirname","mkdir","getCliWorkerPath","Worker","createReceiver"],"mappings":";;;;;;;;;;;;;;;;;;;;;AAiBO,MAAM,sBAAsBA,UAAAA,YAA2C;AAAA,EAC5E,MAAM;AAAA,EACN,SAAS;AAAA,EACT,aAAa;AACf,CAAC,GCCK,uBAAuB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAkB7B,eAAe,oBACb,YACA,QACA,SACe;AAET,QAAA,EAAC,QAAQ,gBAAgB,eAAe,sBAAyB,IAAA,MAAM,OAAO,UAAU;AAE1F,MAAA;AACA,MAAA;AAEF,UAAM,iBAAiB,MAAM,sBAAsB,UAAU,EAAE,MAAM,CAAC,SACpE,OAAO,KAAK,mCAAmC,IAAI,OAAO,EAAE,GACrD,KACR;AAED,QAAI,gBAAgB;AAClB,cAAQ,OAAO,+CACf,aAAa,MAAMC,GAAAA,KAAK,YAAYC,aAAU,MAAM;AAChD,UAAA;AACI,cAAA,OAAO,MAAM,WAAW,SAAS,GACjC,gBAAgB,MAAM,eAAe,KAAK,YAAY;AAAA,UAC1D,GAAG;AAAA,UACH,QAAQ;AAAA,QAAA,CACT;AACK,cAAA,WAAW,YACjB,MAAM,WAAW,MAAM,eAAe,CAAC,GACvC,QAAQ,KAAK,yCAAyC;AAAA,eAC/C,KAAK;AACZ,eAAO,KAAK,mDAAmD,IAAI,OAAO,EAAE;AAAA,MAAA,UAC5E;AAEA,cAAM,YAAY,MAAM;AAAA,MAAA;AAAA,IAC1B;AAAA,WAEK,KAAK;AAEZ,WAAO,KAAK,kCAAkC,IAAI,OAAO,EAAE;AAAA,EAAA;AAE/D;AAE8B,eAAA,sBAC5B,MACA,SACe;AACf,QAAM,QAAQ,KAAK,YACb,EAAC,QAAQ,SAAS,WAAAC,WAAS,IAAI,SAE/B,QAAQA,WAAU,MAAM,mBAAmB;AACjD,QAAM,MAAM;AAEZ,QAAM,gBAAgB,MAAMC,mBAAW,MAAM,aAAa,KAAK,qBAAqB,GAE9E,iBAA2B,IAC3B,iBAA2B,CAAC;AAElC,aAAW,cAAc,cAAc,QAAQ,IAAI,CAAC,MAAM,EAAE,UAAU;AAChE,QAAA;AACkB,OAAA,MAAMC,QAAK,UAAU,GACxB,YACf,eAAe,KAAK,UAAU;AAAA,aAEzB,KAAK;AACZ,UAAI,IAAI,SAAS;AACf,uBAAe,KAAK,UAAU;AAAA;AAExB,cAAA;AAAA,IAAA;AAKZ,MAAI,eAAe,SAAS,KAAK,eAAe,SAAS,GAAG;AAC1D,UAAM,SAAmB,CAAC;AAEtB,QAAA,eAAe,SAAS,GAAG;AAE7B,YAAM,OACJ,eAAe,WAAW,KAAK,eAAe,CAAC,MAAM,kBACjD,4CACA,IACA,aAAa,eAAe,IAAI,CAACC,UAAS,OAAOA,KAAI,EAAE,EAAE,KAAK;AAAA,CAAI;AACxE,aAAO,KAAK;AAAA,EAA+C,UAAU,GAAG,IAAI,EAAE;AAAA,IAAA;AAG5E,QAAA,eAAe,SAAS,GAAG;AACvB,YAAA,aAAa,eAAe,IAAI,CAACA,UAAS,OAAOA,KAAI,EAAE,EAAE,KAAK;AAAA,CAAI;AACxE,aAAO,KAAK;AAAA,EAA8C,UAAU,EAAE;AAAA,IAAA;AAGlE,UAAA,IAAI,MAAM,OAAO,KAAK;AAAA;AAAA,CAAM,CAAC;AAAA,EAAA;AAG/B,QAAA,aAAaC,KAAAA,KAAK,QAAQ,IAAI,GAAG,cAAc,SAAS,GACxD,YAAYC,KAAA,QAAQ,UAAU;AACpC,QAAMC,GAAM,MAAA,WAAW,EAAC,WAAW,IAAK;AACxC,QAAM,aAAa,MAAMC,UAAAA,iBAAiB,iBAAiB,GAErD,UAAU,OAAO,QAAQ,kBAAkB,GAE3C,SAAS,IAAIC,oBAAAA,OAAO,YAAY;AAAA,IACpC,YAAY;AAAA,MACV;AAAA,MACA,SAAS,cAAc;AAAA,MACvB,YAAY,cAAc;AAAA,MAC1B,uBAAuB,cAAc;AAAA,MACrC,mBAAmB,cAAc;AAAA,IACnC;AAAA;AAAA,IAEA,KAAK,QAAQ;AAAA,EAAA,CACd,GAEK,WAAWC,cAAA,eAAqC,MAAM;AAExD,MAAA;AACJ,QAAM,QAAQ;AAAA,IACZ,iBAAiB;AAAA,IACjB,cAAc;AAAA,IACd,kBAAkB;AAAA,IAClB,kBAAkB;AAAA,IAClB,qBAAqB;AAAA,IACrB,YAAY;AAAA,IACZ,iBAAiB;AAAA,EACnB;AAEI,MAAA;AACE,QAAA;AACF,cAAQ,MAAM,GAEd,aAAa,MAAMX,GAAAA,KAAK,YAAY,GAAG,GACvC,MAAM,WAAW,MAAM,oBAAoB,GAE3C,QAAQ,OAAO;AACT,YAAA,EAAC,MAAM,YAAY,YAAW,MAAM,SAAS,MAAM,OAAO;AAChE,YAAM,mBAAmB,QAAQ,OAAO,CAAC,OAAO,WAAW,QAAQ,OAAO,WAAW,CAAC,GACtF,MAAM,WAAW,MAAM,UAAU,GAEjC,QAAQ,OAAO;AACf,uBAAiB,eAAe,SAAS,OAAO,QAAA,GAAW;AACnD,cAAA;AACA,cAAA,EAAC,OAAO,QAAA,IAAW;AACrB,iBACF,MAAM;AAER,mBAAW,UAAU;AACb,gBAAA,WAAW,MAAM,OAAO,IAAI,GAC9B,OAAO,SAAS,eAClB,MAAM,qBAEN,MAAM;AAAA,MAAA;AAKZ,cAAQ,OAAO;AACT,YAAA,EAAC,MAAM,aAAa,OAAO,eAAc,MAAM,SAAS,MAAM,QAAQ;AAC5E,YAAM,sBAAsB,YAC5B,MAAM,WAAW,MAAM,WAAW;AAAA,IAAA,UAClC;AAEA,YAAM,YAAY,MAAM;AAAA,IAAA;AAGtB,kBAAc,uBAChB,MAAM,oBAAoB,YAAY,QAAQ,OAAO;AAIjD,UAAA,YAAY,MAAMI,GAAA,KAAK,UAAU;AACvC,UAAM,aAAa,UAAU,MAE7B,MAAM,IAAI;AAAA,MACR,YAAY,MAAM;AAAA,MAClB,cAAc,MAAM;AAAA,MACpB,kBAAkB,MAAM;AAAA,MACxB,kBAAkB,MAAM;AAAA,MACxB,iBAAiB,MAAM;AAAA,MACvB,iBAAiB,MAAM;AAAA,MACvB,oBAAoB,MAAM,qBAAqB;AAAA,MAC/C,2BAA2B,MAAM,qBAAqB;AAAA,MACtD,uBACE,MAAM,uBAAuB,MAAM,oBAAoB,iBAAiB,IACpE,MAAM,oBAAoB,mBAAmB,MAAM,oBAAoB,iBACvE;AAAA,MACN,8BAA8B,MAAM,qBAAqB;AAAA,MACzD,6BAA6B,cAAc;AAAA,MAC3C,yBAAyB,cAAc;AAAA,IAAA,CACxC,GAED,MAAM,SAAA,GACF,MAAM,kBAAkB,KAC1B,QAAQ,KAAK,yBAAyB,MAAM,eAAe,+BAA+B,GAG5F,QAAQ;AAAA,MACN,kCAAkC,MAAM,gBAAgB,kBAAkB,MAAM,YAAY,kBAAkB,MAAM,gBAAgB,yBAAyB,MAAM,eAAe,gBAAgB,cAAc,SAAS;AAAA,IAC3N;AAAA,WACO,KAAK;AACZ,UAAA,QAAQ,KAAK,wBAAwB,GACrC,MAAM,MAAM,eAAe,QAAQ,MAAM,IAAI,MAAM,OAAO,GAAG,CAAC,CAAC,GACzD;AAAA,EAAA,UACN;AAEA,UAAM,SAAS,QAAQ;AAAA,EAAA;AAE3B;;"}
|
@@ -1,5 +1,24 @@
|
|
1
1
|
"use strict";
|
2
|
-
var
|
2
|
+
var __create = Object.create;
|
3
|
+
var __defProp = Object.defineProperty;
|
4
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
5
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
6
|
+
var __getProtoOf = Object.getPrototypeOf, __hasOwnProp = Object.prototype.hasOwnProperty;
|
7
|
+
var __copyProps = (to, from, except, desc) => {
|
8
|
+
if (from && typeof from == "object" || typeof from == "function")
|
9
|
+
for (let key of __getOwnPropNames(from))
|
10
|
+
!__hasOwnProp.call(to, key) && key !== except && __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
11
|
+
return to;
|
12
|
+
};
|
13
|
+
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
|
14
|
+
// If the importer is in node compatibility mode or this is not an ESM
|
15
|
+
// file that has been converted to a CommonJS file using a Babel-
|
16
|
+
// compatible transform (i.e. "__esModule" has not been set), then set
|
17
|
+
// "default" to the CommonJS "module.exports" for node compatibility.
|
18
|
+
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: !0 }) : target,
|
19
|
+
mod
|
20
|
+
));
|
21
|
+
var fs = require("node:fs/promises"), path = require("node:path"), node_worker_threads = require("node:worker_threads"), cliWorker = require("./cliWorker.js");
|
3
22
|
function _interopDefaultCompat(e) {
|
4
23
|
return e && typeof e == "object" && "default" in e ? e : { default: e };
|
5
24
|
}
|
@@ -74,7 +93,7 @@ async function fetchJourneySchema(schemaUrl) {
|
|
74
93
|
}
|
75
94
|
}
|
76
95
|
async function assembleJourneySchemaTypeFileContent(schemaType) {
|
77
|
-
const serialised = wrapSchemaTypeInHelpers(schemaType), imports = getImports(serialised), prettifiedSchemaType = await
|
96
|
+
const serialised = wrapSchemaTypeInHelpers(schemaType), imports = getImports(serialised), { format } = await import("prettier"), prettifiedSchemaType = await format(serialised, {
|
78
97
|
parser: "typescript",
|
79
98
|
printWidth: 40
|
80
99
|
});
|
@@ -83,12 +102,12 @@ async function assembleJourneySchemaTypeFileContent(schemaType) {
|
|
83
102
|
export const ${schemaType.name} = ${prettifiedSchemaType}
|
84
103
|
`;
|
85
104
|
}
|
86
|
-
function assembleJourneyIndexContent(schemas) {
|
105
|
+
async function assembleJourneyIndexContent(schemas) {
|
87
106
|
const sortedSchema = schemas.slice().sort((a, b) => a.name > b.name ? 1 : -1), imports = sortedSchema.map((schema) => `import { ${schema.name} } from './${schema.name}'`), exports2 = sortedSchema.map((schema) => schema.name).join(","), fileContents = `${imports.join(`
|
88
107
|
`)}
|
89
108
|
|
90
|
-
export const schemaTypes = [${exports2}]
|
91
|
-
return
|
109
|
+
export const schemaTypes = [${exports2}]`, { format } = await import("prettier");
|
110
|
+
return format(fileContents, { parser: "typescript" });
|
92
111
|
}
|
93
112
|
function getImports(schemaType) {
|
94
113
|
const defaultImports = ["defineType", "defineField"];
|
@@ -1 +1 @@
|
|
1
|
-
{"version":3,"file":"journeyConfig.js","sources":["../../src/util/journeyConfig.ts"],"sourcesContent":["import fs from 'node:fs/promises'\nimport path from 'node:path'\nimport {Worker} from 'node:worker_threads'\n\nimport {\n type BaseSchemaDefinition,\n type DocumentDefinition,\n type ObjectDefinition,\n} from '@sanity/types'\nimport {format} from 'prettier'\n\nimport {type CliApiClient} from '../types'\nimport {getCliWorkerPath} from './cliWorker'\n\n/**\n * A Journey schema is a server schema that is saved in the Journey API\n */\n\ninterface JourneySchemaWorkerData {\n schemasPath: string\n useTypeScript: boolean\n schemaUrl: string\n}\n\ntype JourneySchemaWorkerResult = {type: 'success'} | {type: 'error'; error: Error}\n\ninterface JourneyConfigResponse {\n projectId: string\n datasetName: string\n displayName: string\n schemaUrl: string\n isFirstProject: boolean // Always true for now, making it compatible with the existing getOrCreateProject\n}\n\ntype DocumentOrObject = DocumentDefinition | ObjectDefinition\ntype SchemaObject = BaseSchemaDefinition & {\n type: string\n fields?: SchemaObject[]\n of?: SchemaObject[]\n preview?: object\n}\n\n/**\n * Fetch a Journey schema from the Sanity schema club API and write it to disk\n */\nexport async function getAndWriteJourneySchema(data: JourneySchemaWorkerData): Promise<void> {\n const {schemasPath, useTypeScript, schemaUrl} = data\n try {\n const documentTypes = await fetchJourneySchema(schemaUrl)\n const fileExtension = useTypeScript ? 'ts' : 'js'\n\n // Write a file for each schema\n for (const documentType of documentTypes) {\n const filePath = path.join(schemasPath, `${documentType.name}.${fileExtension}`)\n await fs.writeFile(filePath, await assembleJourneySchemaTypeFileContent(documentType))\n }\n // Write an index file that exports all the schemas\n const indexContent = await assembleJourneyIndexContent(documentTypes)\n await fs.writeFile(path.join(schemasPath, `index.${fileExtension}`), indexContent)\n } catch (error) {\n throw new Error(`Failed to fetch remote schema: ${error.message}`)\n }\n}\n\n/**\n * Executes the `getAndWriteJourneySchema` operation within a worker thread.\n *\n * This method is designed to safely import network resources by leveraging the `--experimental-network-imports` flag.\n * Due to the experimental nature of this flag, its use is not recommended in the main process. Consequently,\n * the task is delegated to a worker thread to ensure both safety and compliance with best practices.\n *\n * The core functionality involves fetching schema definitions from our own trusted API and writing them to disk.\n * This includes handling both predefined and custom schemas. For custom schemas, a process ensures\n * that they undergo JSON parsing to remove any JavaScript code and are validated before being saved.\n *\n * Depending on the configuration, the schemas are saved as either TypeScript or JavaScript files, dictated by the `useTypeScript` flag within the `workerData`.\n *\n * @param workerData - An object containing the necessary data and flags for the worker thread, including the path to save schemas, flags indicating whether to use TypeScript, and any other relevant configuration details.\n * @returns A promise that resolves upon successful execution of the schema fetching and writing process or rejects if an error occurs during the operation.\n */\nexport async function getAndWriteJourneySchemaWorker(\n workerData: JourneySchemaWorkerData,\n): Promise<void> {\n const workerPath = await getCliWorkerPath('getAndWriteJourneySchema')\n return new Promise((resolve, reject) => {\n const worker = new Worker(workerPath, {\n workerData,\n env: {\n // eslint-disable-next-line no-process-env\n ...process.env,\n // Dynamic HTTPS imports are currently behind a Node flag\n NODE_OPTIONS: '--experimental-network-imports',\n NODE_NO_WARNINGS: '1',\n },\n })\n worker.on('message', (message: JourneySchemaWorkerResult) => {\n if (message.type === 'success') {\n resolve()\n } else {\n message.error.message = `Import schema worker failed: ${message.error.message}`\n reject(message.error)\n }\n })\n worker.on('error', (error) => {\n error.message = `Import schema worker failed: ${error.message}`\n reject(error)\n })\n worker.on('exit', (code) => {\n if (code !== 0) {\n reject(new Error(`Worker stopped with exit code ${code}`))\n }\n })\n })\n}\n\n/**\n * Fetch a Journey config from the Sanity schema club API\n *\n * @param projectId - The slug of the Journey schema to fetch\n * @returns The Journey schema as an array of Sanity document or object definitions\n */\nexport async function fetchJourneyConfig(\n apiClient: CliApiClient,\n projectId: string,\n): Promise<JourneyConfigResponse> {\n if (!projectId) {\n throw new Error('ProjectId is required')\n }\n if (!/^[a-zA-Z0-9-]+$/.test(projectId)) {\n throw new Error('Invalid projectId')\n }\n try {\n const response: {\n projectId: string\n dataset: string\n displayName?: string\n schemaUrl: string\n } = await apiClient({\n requireUser: true,\n requireProject: true,\n api: {projectId},\n })\n .config({apiVersion: 'v2024-02-23'})\n .request({\n method: 'GET',\n uri: `/journey/projects/${projectId}`,\n })\n\n return {\n projectId: response.projectId,\n datasetName: response.dataset,\n displayName: response.displayName || 'Sanity Project',\n // The endpoint returns a signed URL that can be used to fetch the schema as ESM\n schemaUrl: response.schemaUrl,\n isFirstProject: true,\n }\n } catch (err) {\n throw new Error(`Failed to fetch remote schema config: ${projectId}`)\n }\n}\n\n/**\n * Fetch a Journey schema from the Sanity schema club API\n *\n * @param projectId - The slug of the Journey schema to fetch\n * @returns The Journey schema as an array of Sanity document or object definitions\n */\nasync function fetchJourneySchema(schemaUrl: string): Promise<DocumentOrObject[]> {\n try {\n const response = await import(schemaUrl)\n return response.default\n } catch (err) {\n throw new Error(`Failed to fetch remote schema: ${schemaUrl}`)\n }\n}\n\n/**\n * Assemble a Journey schema type into a module export\n * Include the necessary imports and export the schema type as a named export\n *\n * @param schema - The Journey schema to export\n * @returns The Journey schema as a module export\n */\nasync function assembleJourneySchemaTypeFileContent(schemaType: DocumentOrObject): Promise<string> {\n const serialised = wrapSchemaTypeInHelpers(schemaType)\n const imports = getImports(serialised)\n const prettifiedSchemaType = await format(serialised, {\n parser: 'typescript',\n printWidth: 40,\n })\n // Start file with import, then export the schema type as a named export\n return `${imports}\\n\\nexport const ${schemaType.name} = ${prettifiedSchemaType}\\n`\n}\n\n/**\n * Assemble a list of Journey schema module exports into a single index file\n *\n * @param schemas - The Journey schemas to assemble into an index file\n * @returns The index file as a string\n */\nfunction assembleJourneyIndexContent(schemas: DocumentOrObject[]): Promise<string> {\n const sortedSchema = schemas.slice().sort((a, b) => (a.name > b.name ? 1 : -1))\n const imports = sortedSchema.map((schema) => `import { ${schema.name} } from './${schema.name}'`)\n const exports = sortedSchema.map((schema) => schema.name).join(',')\n const fileContents = `${imports.join('\\n')}\\n\\nexport const schemaTypes = [${exports}]`\n return format(fileContents, {parser: 'typescript'})\n}\n\n/**\n * Get the import statements for a schema type\n *\n * @param schemaType - The schema type to get the imports for\n * @returns The import statements for the schema type\n */\nfunction getImports(schemaType: string): string {\n const defaultImports = ['defineType', 'defineField']\n if (schemaType.includes('defineArrayMember')) {\n defaultImports.push('defineArrayMember')\n }\n return `import { ${defaultImports.join(', ')} } from 'sanity'`\n}\n\n/**\n * Serialize a singleSanity schema type (signular) into a string.\n * Wraps the schema object in the appropriate helper function.\n *\n * @param schemaType - The schema type to serialize\n * @returns The schema type as a string\n */\n/**\n * Serializes a single Sanity schema type into a string.\n * Wraps the schema object in the appropriate helper function.\n *\n * @param schemaType - The schema type to serialize\n * @param root - Whether the schemaType is the root object\n * @returns The serialized schema type as a string\n */\nexport function wrapSchemaTypeInHelpers(schemaType: SchemaObject, root: boolean = true): string {\n if (root) {\n return generateSchemaDefinition(schemaType, 'defineType')\n } else if (schemaType.type === 'array') {\n return `${generateSchemaDefinition(schemaType, 'defineField')},`\n }\n return `${generateSchemaDefinition(schemaType, 'defineField')},`\n\n function generateSchemaDefinition(\n object: SchemaObject,\n definitionType: 'defineType' | 'defineField',\n ): string {\n const {fields, preview, of, ...otherProperties} = object\n\n const serializedProps = serialize(otherProperties)\n const fieldsDef =\n fields && `fields: [${fields.map((f) => wrapSchemaTypeInHelpers(f, false)).join('')}]`\n const ofDef = of && `of: [${of.map((f) => `defineArrayMember({${serialize(f)}})`).join(',')}]`\n const previewDef = preview && `preview: {${serialize(preview)}}`\n\n const combinedDefinitions = [serializedProps, fieldsDef, ofDef, previewDef]\n .filter(Boolean)\n .join(',')\n return `${definitionType}({ ${combinedDefinitions} })`\n }\n\n function serialize(obj: object) {\n return Object.entries(obj)\n .map(([key, value]) => {\n if (key === 'prepare') {\n return `${value.toString()}`\n }\n if (typeof value === 'string') {\n return `${key}: \"${value}\"`\n }\n if (typeof value === 'object') {\n return `${key}: ${JSON.stringify(value)}`\n }\n return `${key}: ${value}`\n })\n .join(',')\n }\n}\n"],"names":["path","fs","getCliWorkerPath","Worker","format","exports"],"mappings":";;;;;;AA6CA,eAAsB,yBAAyB,MAA8C;AAC3F,QAAM,EAAC,aAAa,eAAe,UAAa,IAAA;AAC5C,MAAA;AACF,UAAM,gBAAgB,MAAM,mBAAmB,SAAS,GAClD,gBAAgB,gBAAgB,OAAO;AAG7C,eAAW,gBAAgB,eAAe;AAClC,YAAA,WAAWA,cAAAA,QAAK,KAAK,aAAa,GAAG,aAAa,IAAI,IAAI,aAAa,EAAE;AAC/E,YAAMC,YAAAA,QAAG,UAAU,UAAU,MAAM,qCAAqC,YAAY,CAAC;AAAA,IAAA;AAGjF,UAAA,eAAe,MAAM,4BAA4B,aAAa;AAC9D,UAAAA,oBAAG,UAAUD,cAAK,QAAA,KAAK,aAAa,SAAS,aAAa,EAAE,GAAG,YAAY;AAAA,WAC1E,OAAO;AACd,UAAM,IAAI,MAAM,kCAAkC,MAAM,OAAO,EAAE;AAAA,EAAA;AAErE;AAkBA,eAAsB,+BACpB,YACe;AACT,QAAA,aAAa,MAAME,UAAA,iBAAiB,0BAA0B;AACpE,SAAO,IAAI,QAAQ,CAAC,SAAS,WAAW;AAChC,UAAA,SAAS,IAAIC,oBAAA,OAAO,YAAY;AAAA,MACpC;AAAA,MACA,KAAK;AAAA;AAAA,QAEH,GAAG,QAAQ;AAAA;AAAA,QAEX,cAAc;AAAA,QACd,kBAAkB;AAAA,MAAA;AAAA,IACpB,CACD;AACM,WAAA,GAAG,WAAW,CAAC,YAAuC;AACvD,cAAQ,SAAS,YACnB,aAEA,QAAQ,MAAM,UAAU,gCAAgC,QAAQ,MAAM,OAAO,IAC7E,OAAO,QAAQ,KAAK;AAAA,IAEvB,CAAA,GACD,OAAO,GAAG,SAAS,CAAC,UAAU;AAC5B,YAAM,UAAU,gCAAgC,MAAM,OAAO,IAC7D,OAAO,KAAK;AAAA,IACb,CAAA,GACD,OAAO,GAAG,QAAQ,CAAC,SAAS;AACtB,eAAS,KACX,OAAO,IAAI,MAAM,iCAAiC,IAAI,EAAE,CAAC;AAAA,IAAA,CAE5D;AAAA,EAAA,CACF;AACH;AAQsB,eAAA,mBACpB,WACA,WACgC;AAChC,MAAI,CAAC;AACG,UAAA,IAAI,MAAM,uBAAuB;AAErC,MAAA,CAAC,kBAAkB,KAAK,SAAS;AAC7B,UAAA,IAAI,MAAM,mBAAmB;AAEjC,MAAA;AACI,UAAA,WAKF,MAAM,UAAU;AAAA,MAClB,aAAa;AAAA,MACb,gBAAgB;AAAA,MAChB,KAAK,EAAC,UAAS;AAAA,IAAA,CAChB,EACE,OAAO,EAAC,YAAY,cAAc,CAAA,EAClC,QAAQ;AAAA,MACP,QAAQ;AAAA,MACR,KAAK,qBAAqB,SAAS;AAAA,IAAA,CACpC;AAEI,WAAA;AAAA,MACL,WAAW,SAAS;AAAA,MACpB,aAAa,SAAS;AAAA,MACtB,aAAa,SAAS,eAAe;AAAA;AAAA,MAErC,WAAW,SAAS;AAAA,MACpB,gBAAgB;AAAA,IAClB;AAAA,EAAA,QACY;AACZ,UAAM,IAAI,MAAM,yCAAyC,SAAS,EAAE;AAAA,EAAA;AAExE;AAQA,eAAe,mBAAmB,WAAgD;AAC5E,MAAA;AACe,YAAA,MAAM,OAAO,YACd;AAAA,EAAA,QACJ;AACZ,UAAM,IAAI,MAAM,kCAAkC,SAAS,EAAE;AAAA,EAAA;AAEjE;AASA,eAAe,qCAAqC,YAA+C;AAC3F,QAAA,aAAa,wBAAwB,UAAU,GAC/C,UAAU,WAAW,UAAU,GAC/B,uBAAuB,MAAMC,SAAAA,OAAO,YAAY;AAAA,IACpD,QAAQ;AAAA,IACR,YAAY;AAAA,EAAA,CACb;AAED,SAAO,GAAG,OAAO;AAAA;AAAA,eAAoB,WAAW,IAAI,MAAM,oBAAoB;AAAA;AAChF;AAQA,SAAS,4BAA4B,SAA8C;AACjF,QAAM,eAAe,QAAQ,QAAQ,KAAK,CAAC,GAAG,MAAO,EAAE,OAAO,EAAE,OAAO,IAAI,EAAG,GACxE,UAAU,aAAa,IAAI,CAAC,WAAW,YAAY,OAAO,IAAI,cAAc,OAAO,IAAI,GAAG,GAC1FC,WAAU,aAAa,IAAI,CAAC,WAAW,OAAO,IAAI,EAAE,KAAK,GAAG,GAC5D,eAAe,GAAG,QAAQ,KAAK;AAAA,CAAI,CAAC;AAAA;AAAA,8BAAmCA,QAAO;AACpF,SAAOD,SAAO,OAAA,cAAc,EAAC,QAAQ,cAAa;AACpD;AAQA,SAAS,WAAW,YAA4B;AACxC,QAAA,iBAAiB,CAAC,cAAc,aAAa;AACnD,SAAI,WAAW,SAAS,mBAAmB,KACzC,eAAe,KAAK,mBAAmB,GAElC,YAAY,eAAe,KAAK,IAAI,CAAC;AAC9C;AAiBgB,SAAA,wBAAwB,YAA0B,OAAgB,IAAc;AAC1F,MAAA;AACK,WAAA,yBAAyB,YAAY,YAAY;AACnD,MAAI,WAAW,SAAS;AAC7B,WAAO,GAAG,yBAAyB,YAAY,aAAa,CAAC;AAE/D,SAAO,GAAG,yBAAyB,YAAY,aAAa,CAAC;AAEpD,WAAA,yBACP,QACA,gBACQ;AACR,UAAM,EAAC,QAAQ,SAAS,IAAI,GAAG,gBAAA,IAAmB,QAE5C,kBAAkB,UAAU,eAAe,GAC3C,YACJ,UAAU,YAAY,OAAO,IAAI,CAAC,MAAM,wBAAwB,GAAG,EAAK,CAAC,EAAE,KAAK,EAAE,CAAC,KAC/E,QAAQ,MAAM,QAAQ,GAAG,IAAI,CAAC,MAAM,sBAAsB,UAAU,CAAC,CAAC,IAAI,EAAE,KAAK,GAAG,CAAC,KACrF,aAAa,WAAW,aAAa,UAAU,OAAO,CAAC,KAEvD,sBAAsB,CAAC,iBAAiB,WAAW,OAAO,UAAU,EACvE,OAAO,OAAO,EACd,KAAK,GAAG;AACJ,WAAA,GAAG,cAAc,MAAM,mBAAmB;AAAA,EAAA;AAGnD,WAAS,UAAU,KAAa;AAC9B,WAAO,OAAO,QAAQ,GAAG,EACtB,IAAI,CAAC,CAAC,KAAK,KAAK,MACX,QAAQ,YACH,GAAG,MAAM,UAAU,KAExB,OAAO,SAAU,WACZ,GAAG,GAAG,MAAM,KAAK,MAEtB,OAAO,SAAU,WACZ,GAAG,GAAG,KAAK,KAAK,UAAU,KAAK,CAAC,KAElC,GAAG,GAAG,KAAK,KAAK,EACxB,EACA,KAAK,GAAG;AAAA,EAAA;AAEf;;;;"}
|
1
|
+
{"version":3,"file":"journeyConfig.js","sources":["../../src/util/journeyConfig.ts"],"sourcesContent":["import fs from 'node:fs/promises'\nimport path from 'node:path'\nimport {Worker} from 'node:worker_threads'\n\nimport {\n type BaseSchemaDefinition,\n type DocumentDefinition,\n type ObjectDefinition,\n} from '@sanity/types'\n\nimport {type CliApiClient} from '../types'\nimport {getCliWorkerPath} from './cliWorker'\n\n/**\n * A Journey schema is a server schema that is saved in the Journey API\n */\n\ninterface JourneySchemaWorkerData {\n schemasPath: string\n useTypeScript: boolean\n schemaUrl: string\n}\n\ntype JourneySchemaWorkerResult = {type: 'success'} | {type: 'error'; error: Error}\n\ninterface JourneyConfigResponse {\n projectId: string\n datasetName: string\n displayName: string\n schemaUrl: string\n isFirstProject: boolean // Always true for now, making it compatible with the existing getOrCreateProject\n}\n\ntype DocumentOrObject = DocumentDefinition | ObjectDefinition\ntype SchemaObject = BaseSchemaDefinition & {\n type: string\n fields?: SchemaObject[]\n of?: SchemaObject[]\n preview?: object\n}\n\n/**\n * Fetch a Journey schema from the Sanity schema club API and write it to disk\n */\nexport async function getAndWriteJourneySchema(data: JourneySchemaWorkerData): Promise<void> {\n const {schemasPath, useTypeScript, schemaUrl} = data\n try {\n const documentTypes = await fetchJourneySchema(schemaUrl)\n const fileExtension = useTypeScript ? 'ts' : 'js'\n\n // Write a file for each schema\n for (const documentType of documentTypes) {\n const filePath = path.join(schemasPath, `${documentType.name}.${fileExtension}`)\n await fs.writeFile(filePath, await assembleJourneySchemaTypeFileContent(documentType))\n }\n // Write an index file that exports all the schemas\n const indexContent = await assembleJourneyIndexContent(documentTypes)\n await fs.writeFile(path.join(schemasPath, `index.${fileExtension}`), indexContent)\n } catch (error) {\n throw new Error(`Failed to fetch remote schema: ${error.message}`)\n }\n}\n\n/**\n * Executes the `getAndWriteJourneySchema` operation within a worker thread.\n *\n * This method is designed to safely import network resources by leveraging the `--experimental-network-imports` flag.\n * Due to the experimental nature of this flag, its use is not recommended in the main process. Consequently,\n * the task is delegated to a worker thread to ensure both safety and compliance with best practices.\n *\n * The core functionality involves fetching schema definitions from our own trusted API and writing them to disk.\n * This includes handling both predefined and custom schemas. For custom schemas, a process ensures\n * that they undergo JSON parsing to remove any JavaScript code and are validated before being saved.\n *\n * Depending on the configuration, the schemas are saved as either TypeScript or JavaScript files, dictated by the `useTypeScript` flag within the `workerData`.\n *\n * @param workerData - An object containing the necessary data and flags for the worker thread, including the path to save schemas, flags indicating whether to use TypeScript, and any other relevant configuration details.\n * @returns A promise that resolves upon successful execution of the schema fetching and writing process or rejects if an error occurs during the operation.\n */\nexport async function getAndWriteJourneySchemaWorker(\n workerData: JourneySchemaWorkerData,\n): Promise<void> {\n const workerPath = await getCliWorkerPath('getAndWriteJourneySchema')\n return new Promise((resolve, reject) => {\n const worker = new Worker(workerPath, {\n workerData,\n env: {\n // eslint-disable-next-line no-process-env\n ...process.env,\n // Dynamic HTTPS imports are currently behind a Node flag\n NODE_OPTIONS: '--experimental-network-imports',\n NODE_NO_WARNINGS: '1',\n },\n })\n worker.on('message', (message: JourneySchemaWorkerResult) => {\n if (message.type === 'success') {\n resolve()\n } else {\n message.error.message = `Import schema worker failed: ${message.error.message}`\n reject(message.error)\n }\n })\n worker.on('error', (error) => {\n error.message = `Import schema worker failed: ${error.message}`\n reject(error)\n })\n worker.on('exit', (code) => {\n if (code !== 0) {\n reject(new Error(`Worker stopped with exit code ${code}`))\n }\n })\n })\n}\n\n/**\n * Fetch a Journey config from the Sanity schema club API\n *\n * @param projectId - The slug of the Journey schema to fetch\n * @returns The Journey schema as an array of Sanity document or object definitions\n */\nexport async function fetchJourneyConfig(\n apiClient: CliApiClient,\n projectId: string,\n): Promise<JourneyConfigResponse> {\n if (!projectId) {\n throw new Error('ProjectId is required')\n }\n if (!/^[a-zA-Z0-9-]+$/.test(projectId)) {\n throw new Error('Invalid projectId')\n }\n try {\n const response: {\n projectId: string\n dataset: string\n displayName?: string\n schemaUrl: string\n } = await apiClient({\n requireUser: true,\n requireProject: true,\n api: {projectId},\n })\n .config({apiVersion: 'v2024-02-23'})\n .request({\n method: 'GET',\n uri: `/journey/projects/${projectId}`,\n })\n\n return {\n projectId: response.projectId,\n datasetName: response.dataset,\n displayName: response.displayName || 'Sanity Project',\n // The endpoint returns a signed URL that can be used to fetch the schema as ESM\n schemaUrl: response.schemaUrl,\n isFirstProject: true,\n }\n } catch (err) {\n throw new Error(`Failed to fetch remote schema config: ${projectId}`)\n }\n}\n\n/**\n * Fetch a Journey schema from the Sanity schema club API\n *\n * @param projectId - The slug of the Journey schema to fetch\n * @returns The Journey schema as an array of Sanity document or object definitions\n */\nasync function fetchJourneySchema(schemaUrl: string): Promise<DocumentOrObject[]> {\n try {\n const response = await import(schemaUrl)\n return response.default\n } catch (err) {\n throw new Error(`Failed to fetch remote schema: ${schemaUrl}`)\n }\n}\n\n/**\n * Assemble a Journey schema type into a module export\n * Include the necessary imports and export the schema type as a named export\n *\n * @param schema - The Journey schema to export\n * @returns The Journey schema as a module export\n */\nasync function assembleJourneySchemaTypeFileContent(schemaType: DocumentOrObject): Promise<string> {\n const serialised = wrapSchemaTypeInHelpers(schemaType)\n const imports = getImports(serialised)\n const {format} = await import('prettier')\n const prettifiedSchemaType = await format(serialised, {\n parser: 'typescript',\n printWidth: 40,\n })\n // Start file with import, then export the schema type as a named export\n return `${imports}\\n\\nexport const ${schemaType.name} = ${prettifiedSchemaType}\\n`\n}\n\n/**\n * Assemble a list of Journey schema module exports into a single index file\n *\n * @param schemas - The Journey schemas to assemble into an index file\n * @returns The index file as a string\n */\nasync function assembleJourneyIndexContent(schemas: DocumentOrObject[]): Promise<string> {\n const sortedSchema = schemas.slice().sort((a, b) => (a.name > b.name ? 1 : -1))\n const imports = sortedSchema.map((schema) => `import { ${schema.name} } from './${schema.name}'`)\n const exports = sortedSchema.map((schema) => schema.name).join(',')\n const fileContents = `${imports.join('\\n')}\\n\\nexport const schemaTypes = [${exports}]`\n const {format} = await import('prettier')\n return format(fileContents, {parser: 'typescript'})\n}\n\n/**\n * Get the import statements for a schema type\n *\n * @param schemaType - The schema type to get the imports for\n * @returns The import statements for the schema type\n */\nfunction getImports(schemaType: string): string {\n const defaultImports = ['defineType', 'defineField']\n if (schemaType.includes('defineArrayMember')) {\n defaultImports.push('defineArrayMember')\n }\n return `import { ${defaultImports.join(', ')} } from 'sanity'`\n}\n\n/**\n * Serialize a singleSanity schema type (signular) into a string.\n * Wraps the schema object in the appropriate helper function.\n *\n * @param schemaType - The schema type to serialize\n * @returns The schema type as a string\n */\n/**\n * Serializes a single Sanity schema type into a string.\n * Wraps the schema object in the appropriate helper function.\n *\n * @param schemaType - The schema type to serialize\n * @param root - Whether the schemaType is the root object\n * @returns The serialized schema type as a string\n */\nexport function wrapSchemaTypeInHelpers(schemaType: SchemaObject, root: boolean = true): string {\n if (root) {\n return generateSchemaDefinition(schemaType, 'defineType')\n } else if (schemaType.type === 'array') {\n return `${generateSchemaDefinition(schemaType, 'defineField')},`\n }\n return `${generateSchemaDefinition(schemaType, 'defineField')},`\n\n function generateSchemaDefinition(\n object: SchemaObject,\n definitionType: 'defineType' | 'defineField',\n ): string {\n const {fields, preview, of, ...otherProperties} = object\n\n const serializedProps = serialize(otherProperties)\n const fieldsDef =\n fields && `fields: [${fields.map((f) => wrapSchemaTypeInHelpers(f, false)).join('')}]`\n const ofDef = of && `of: [${of.map((f) => `defineArrayMember({${serialize(f)}})`).join(',')}]`\n const previewDef = preview && `preview: {${serialize(preview)}}`\n\n const combinedDefinitions = [serializedProps, fieldsDef, ofDef, previewDef]\n .filter(Boolean)\n .join(',')\n return `${definitionType}({ ${combinedDefinitions} })`\n }\n\n function serialize(obj: object) {\n return Object.entries(obj)\n .map(([key, value]) => {\n if (key === 'prepare') {\n return `${value.toString()}`\n }\n if (typeof value === 'string') {\n return `${key}: \"${value}\"`\n }\n if (typeof value === 'object') {\n return `${key}: ${JSON.stringify(value)}`\n }\n return `${key}: ${value}`\n })\n .join(',')\n }\n}\n"],"names":["path","fs","getCliWorkerPath","Worker","exports"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;AA4CA,eAAsB,yBAAyB,MAA8C;AAC3F,QAAM,EAAC,aAAa,eAAe,UAAa,IAAA;AAC5C,MAAA;AACF,UAAM,gBAAgB,MAAM,mBAAmB,SAAS,GAClD,gBAAgB,gBAAgB,OAAO;AAG7C,eAAW,gBAAgB,eAAe;AAClC,YAAA,WAAWA,cAAAA,QAAK,KAAK,aAAa,GAAG,aAAa,IAAI,IAAI,aAAa,EAAE;AAC/E,YAAMC,YAAAA,QAAG,UAAU,UAAU,MAAM,qCAAqC,YAAY,CAAC;AAAA,IAAA;AAGjF,UAAA,eAAe,MAAM,4BAA4B,aAAa;AAC9D,UAAAA,oBAAG,UAAUD,cAAK,QAAA,KAAK,aAAa,SAAS,aAAa,EAAE,GAAG,YAAY;AAAA,WAC1E,OAAO;AACd,UAAM,IAAI,MAAM,kCAAkC,MAAM,OAAO,EAAE;AAAA,EAAA;AAErE;AAkBA,eAAsB,+BACpB,YACe;AACT,QAAA,aAAa,MAAME,UAAA,iBAAiB,0BAA0B;AACpE,SAAO,IAAI,QAAQ,CAAC,SAAS,WAAW;AAChC,UAAA,SAAS,IAAIC,oBAAA,OAAO,YAAY;AAAA,MACpC;AAAA,MACA,KAAK;AAAA;AAAA,QAEH,GAAG,QAAQ;AAAA;AAAA,QAEX,cAAc;AAAA,QACd,kBAAkB;AAAA,MAAA;AAAA,IACpB,CACD;AACM,WAAA,GAAG,WAAW,CAAC,YAAuC;AACvD,cAAQ,SAAS,YACnB,aAEA,QAAQ,MAAM,UAAU,gCAAgC,QAAQ,MAAM,OAAO,IAC7E,OAAO,QAAQ,KAAK;AAAA,IAEvB,CAAA,GACD,OAAO,GAAG,SAAS,CAAC,UAAU;AAC5B,YAAM,UAAU,gCAAgC,MAAM,OAAO,IAC7D,OAAO,KAAK;AAAA,IACb,CAAA,GACD,OAAO,GAAG,QAAQ,CAAC,SAAS;AACtB,eAAS,KACX,OAAO,IAAI,MAAM,iCAAiC,IAAI,EAAE,CAAC;AAAA,IAAA,CAE5D;AAAA,EAAA,CACF;AACH;AAQsB,eAAA,mBACpB,WACA,WACgC;AAChC,MAAI,CAAC;AACG,UAAA,IAAI,MAAM,uBAAuB;AAErC,MAAA,CAAC,kBAAkB,KAAK,SAAS;AAC7B,UAAA,IAAI,MAAM,mBAAmB;AAEjC,MAAA;AACI,UAAA,WAKF,MAAM,UAAU;AAAA,MAClB,aAAa;AAAA,MACb,gBAAgB;AAAA,MAChB,KAAK,EAAC,UAAS;AAAA,IAAA,CAChB,EACE,OAAO,EAAC,YAAY,cAAc,CAAA,EAClC,QAAQ;AAAA,MACP,QAAQ;AAAA,MACR,KAAK,qBAAqB,SAAS;AAAA,IAAA,CACpC;AAEI,WAAA;AAAA,MACL,WAAW,SAAS;AAAA,MACpB,aAAa,SAAS;AAAA,MACtB,aAAa,SAAS,eAAe;AAAA;AAAA,MAErC,WAAW,SAAS;AAAA,MACpB,gBAAgB;AAAA,IAClB;AAAA,EAAA,QACY;AACZ,UAAM,IAAI,MAAM,yCAAyC,SAAS,EAAE;AAAA,EAAA;AAExE;AAQA,eAAe,mBAAmB,WAAgD;AAC5E,MAAA;AACe,YAAA,MAAM,OAAO,YACd;AAAA,EAAA,QACJ;AACZ,UAAM,IAAI,MAAM,kCAAkC,SAAS,EAAE;AAAA,EAAA;AAEjE;AASA,eAAe,qCAAqC,YAA+C;AACjG,QAAM,aAAa,wBAAwB,UAAU,GAC/C,UAAU,WAAW,UAAU,GAC/B,EAAC,OAAM,IAAI,MAAM,OAAO,UAAU,GAClC,uBAAuB,MAAM,OAAO,YAAY;AAAA,IACpD,QAAQ;AAAA,IACR,YAAY;AAAA,EAAA,CACb;AAED,SAAO,GAAG,OAAO;AAAA;AAAA,eAAoB,WAAW,IAAI,MAAM,oBAAoB;AAAA;AAChF;AAQA,eAAe,4BAA4B,SAA8C;AACvF,QAAM,eAAe,QAAQ,QAAQ,KAAK,CAAC,GAAG,MAAO,EAAE,OAAO,EAAE,OAAO,IAAI,EAAG,GACxE,UAAU,aAAa,IAAI,CAAC,WAAW,YAAY,OAAO,IAAI,cAAc,OAAO,IAAI,GAAG,GAC1FC,WAAU,aAAa,IAAI,CAAC,WAAW,OAAO,IAAI,EAAE,KAAK,GAAG,GAC5D,eAAe,GAAG,QAAQ,KAAK;AAAA,CAAI,CAAC;AAAA;AAAA,8BAAmCA,QAAO,KAC9E,EAAC,OAAU,IAAA,MAAM,OAAO,UAAU;AACxC,SAAO,OAAO,cAAc,EAAC,QAAQ,cAAa;AACpD;AAQA,SAAS,WAAW,YAA4B;AACxC,QAAA,iBAAiB,CAAC,cAAc,aAAa;AACnD,SAAI,WAAW,SAAS,mBAAmB,KACzC,eAAe,KAAK,mBAAmB,GAElC,YAAY,eAAe,KAAK,IAAI,CAAC;AAC9C;AAiBgB,SAAA,wBAAwB,YAA0B,OAAgB,IAAc;AAC1F,MAAA;AACK,WAAA,yBAAyB,YAAY,YAAY;AACnD,MAAI,WAAW,SAAS;AAC7B,WAAO,GAAG,yBAAyB,YAAY,aAAa,CAAC;AAE/D,SAAO,GAAG,yBAAyB,YAAY,aAAa,CAAC;AAEpD,WAAA,yBACP,QACA,gBACQ;AACR,UAAM,EAAC,QAAQ,SAAS,IAAI,GAAG,gBAAA,IAAmB,QAE5C,kBAAkB,UAAU,eAAe,GAC3C,YACJ,UAAU,YAAY,OAAO,IAAI,CAAC,MAAM,wBAAwB,GAAG,EAAK,CAAC,EAAE,KAAK,EAAE,CAAC,KAC/E,QAAQ,MAAM,QAAQ,GAAG,IAAI,CAAC,MAAM,sBAAsB,UAAU,CAAC,CAAC,IAAI,EAAE,KAAK,GAAG,CAAC,KACrF,aAAa,WAAW,aAAa,UAAU,OAAO,CAAC,KAEvD,sBAAsB,CAAC,iBAAiB,WAAW,OAAO,UAAU,EACvE,OAAO,OAAO,EACd,KAAK,GAAG;AACJ,WAAA,GAAG,cAAc,MAAM,mBAAmB;AAAA,EAAA;AAGnD,WAAS,UAAU,KAAa;AAC9B,WAAO,OAAO,QAAQ,GAAG,EACtB,IAAI,CAAC,CAAC,KAAK,KAAK,MACX,QAAQ,YACH,GAAG,MAAM,UAAU,KAExB,OAAO,SAAU,WACZ,GAAG,GAAG,MAAM,KAAK,MAEtB,OAAO,SAAU,WACZ,GAAG,GAAG,KAAK,KAAK,UAAU,KAAK,CAAC,KAElC,GAAG,GAAG,KAAK,KAAK,EACxB,EACA,KAAK,GAAG;AAAA,EAAA;AAEf;;;;"}
|
@@ -1,10 +1,10 @@
|
|
1
1
|
"use strict";
|
2
|
-
var fs = require("node:fs"), path = require("node:path"),
|
2
|
+
var fs = require("node:fs"), path = require("node:path"), debugIt = require("debug"), require$$0$1 = require("fs"), require$$0 = require("path"), require$$0$2 = require("os"), require$$3 = require("crypto");
|
3
3
|
function _interopDefaultCompat(e) {
|
4
4
|
return e && typeof e == "object" && "default" in e ? e : { default: e };
|
5
5
|
}
|
6
|
-
var fs__default = /* @__PURE__ */ _interopDefaultCompat(fs), path__default = /* @__PURE__ */ _interopDefaultCompat(path),
|
7
|
-
const debug =
|
6
|
+
var fs__default = /* @__PURE__ */ _interopDefaultCompat(fs), path__default = /* @__PURE__ */ _interopDefaultCompat(path), debugIt__default = /* @__PURE__ */ _interopDefaultCompat(debugIt), require$$0__default$1 = /* @__PURE__ */ _interopDefaultCompat(require$$0$1), require$$0__default = /* @__PURE__ */ _interopDefaultCompat(require$$0), require$$0__default$2 = /* @__PURE__ */ _interopDefaultCompat(require$$0$2), require$$3__default = /* @__PURE__ */ _interopDefaultCompat(require$$3);
|
7
|
+
const debug = debugIt__default.default("sanity:cli");
|
8
8
|
function resolveRootDir(cwd) {
|
9
9
|
try {
|
10
10
|
return resolveProjectRoot(cwd) || cwd;
|
@@ -1 +1 @@
|
|
1
|
-
{"version":3,"file":"loadEnv.js","sources":["../../src/debug.ts","../../src/util/resolveRootDir.ts","../../../../../node_modules/.pnpm/dotenv@16.4.7/node_modules/dotenv/lib/main.js","../../../../../node_modules/.pnpm/dotenv-expand@9.0.0/node_modules/dotenv-expand/lib/main.js","../../src/util/loadEnv.ts"],"sourcesContent":["import debugIt from 'debug'\n\nexport const debug = debugIt('sanity:cli')\n","/* eslint-disable no-sync */\nimport fs from 'node:fs'\nimport path from 'node:path'\n\nimport {debug} from '../debug'\n\n/**\n * Resolve project root directory, falling back to cwd if it cannot be found\n */\nexport function resolveRootDir(cwd: string): string {\n try {\n return resolveProjectRoot(cwd) || cwd\n } catch (err) {\n throw new Error(`Error occurred trying to resolve project root:\\n${err.message}`)\n }\n}\n\nfunction hasSanityConfig(basePath: string, configName: string): boolean {\n const buildConfigs = [\n fileExists(path.join(basePath, `${configName}.js`)),\n fileExists(path.join(basePath, `${configName}.ts`)),\n isSanityV2StudioRoot(basePath),\n ]\n\n return buildConfigs.some(Boolean)\n}\n\nfunction resolveProjectRoot(basePath: string, iterations = 0): string | false {\n const configName = 'sanity.config'\n if (hasSanityConfig(basePath, configName)) {\n return basePath\n }\n\n const parentDir = path.resolve(basePath, '..')\n if (parentDir === basePath || iterations > 30) {\n // Reached root (or max depth), give up\n return false\n }\n\n return resolveProjectRoot(parentDir, iterations + 1)\n}\n\nfunction isSanityV2StudioRoot(basePath: string): boolean {\n try {\n const content = fs.readFileSync(path.join(basePath, 'sanity.json'), 'utf8')\n const sanityJson = JSON.parse(content)\n const isRoot = Boolean(sanityJson?.root)\n if (isRoot) {\n debug('Found Sanity v2 studio root at %s', basePath)\n }\n return isRoot\n } catch (err) {\n return false\n }\n}\n\nfunction fileExists(filePath: string): boolean {\n return fs.existsSync(filePath)\n}\n","const fs = require('fs')\nconst path = require('path')\nconst os = require('os')\nconst crypto = require('crypto')\nconst packageJson = require('../package.json')\n\nconst version = packageJson.version\n\nconst LINE = /(?:^|^)\\s*(?:export\\s+)?([\\w.-]+)(?:\\s*=\\s*?|:\\s+?)(\\s*'(?:\\\\'|[^'])*'|\\s*\"(?:\\\\\"|[^\"])*\"|\\s*`(?:\\\\`|[^`])*`|[^#\\r\\n]+)?\\s*(?:#.*)?(?:$|$)/mg\n\n// Parse src into an Object\nfunction parse (src) {\n const obj = {}\n\n // Convert buffer to string\n let lines = src.toString()\n\n // Convert line breaks to same format\n lines = lines.replace(/\\r\\n?/mg, '\\n')\n\n let match\n while ((match = LINE.exec(lines)) != null) {\n const key = match[1]\n\n // Default undefined or null to empty string\n let value = (match[2] || '')\n\n // Remove whitespace\n value = value.trim()\n\n // Check if double quoted\n const maybeQuote = value[0]\n\n // Remove surrounding quotes\n value = value.replace(/^(['\"`])([\\s\\S]*)\\1$/mg, '$2')\n\n // Expand newlines if double quoted\n if (maybeQuote === '\"') {\n value = value.replace(/\\\\n/g, '\\n')\n value = value.replace(/\\\\r/g, '\\r')\n }\n\n // Add to object\n obj[key] = value\n }\n\n return obj\n}\n\nfunction _parseVault (options) {\n const vaultPath = _vaultPath(options)\n\n // Parse .env.vault\n const result = DotenvModule.configDotenv({ path: vaultPath })\n if (!result.parsed) {\n const err = new Error(`MISSING_DATA: Cannot parse ${vaultPath} for an unknown reason`)\n err.code = 'MISSING_DATA'\n throw err\n }\n\n // handle scenario for comma separated keys - for use with key rotation\n // example: DOTENV_KEY=\"dotenv://:key_1234@dotenvx.com/vault/.env.vault?environment=prod,dotenv://:key_7890@dotenvx.com/vault/.env.vault?environment=prod\"\n const keys = _dotenvKey(options).split(',')\n const length = keys.length\n\n let decrypted\n for (let i = 0; i < length; i++) {\n try {\n // Get full key\n const key = keys[i].trim()\n\n // Get instructions for decrypt\n const attrs = _instructions(result, key)\n\n // Decrypt\n decrypted = DotenvModule.decrypt(attrs.ciphertext, attrs.key)\n\n break\n } catch (error) {\n // last key\n if (i + 1 >= length) {\n throw error\n }\n // try next key\n }\n }\n\n // Parse decrypted .env string\n return DotenvModule.parse(decrypted)\n}\n\nfunction _log (message) {\n console.log(`[dotenv@${version}][INFO] ${message}`)\n}\n\nfunction _warn (message) {\n console.log(`[dotenv@${version}][WARN] ${message}`)\n}\n\nfunction _debug (message) {\n console.log(`[dotenv@${version}][DEBUG] ${message}`)\n}\n\nfunction _dotenvKey (options) {\n // prioritize developer directly setting options.DOTENV_KEY\n if (options && options.DOTENV_KEY && options.DOTENV_KEY.length > 0) {\n return options.DOTENV_KEY\n }\n\n // secondary infra already contains a DOTENV_KEY environment variable\n if (process.env.DOTENV_KEY && process.env.DOTENV_KEY.length > 0) {\n return process.env.DOTENV_KEY\n }\n\n // fallback to empty string\n return ''\n}\n\nfunction _instructions (result, dotenvKey) {\n // Parse DOTENV_KEY. Format is a URI\n let uri\n try {\n uri = new URL(dotenvKey)\n } catch (error) {\n if (error.code === 'ERR_INVALID_URL') {\n const err = new Error('INVALID_DOTENV_KEY: Wrong format. Must be in valid uri format like dotenv://:key_1234@dotenvx.com/vault/.env.vault?environment=development')\n err.code = 'INVALID_DOTENV_KEY'\n throw err\n }\n\n throw error\n }\n\n // Get decrypt key\n const key = uri.password\n if (!key) {\n const err = new Error('INVALID_DOTENV_KEY: Missing key part')\n err.code = 'INVALID_DOTENV_KEY'\n throw err\n }\n\n // Get environment\n const environment = uri.searchParams.get('environment')\n if (!environment) {\n const err = new Error('INVALID_DOTENV_KEY: Missing environment part')\n err.code = 'INVALID_DOTENV_KEY'\n throw err\n }\n\n // Get ciphertext payload\n const environmentKey = `DOTENV_VAULT_${environment.toUpperCase()}`\n const ciphertext = result.parsed[environmentKey] // DOTENV_VAULT_PRODUCTION\n if (!ciphertext) {\n const err = new Error(`NOT_FOUND_DOTENV_ENVIRONMENT: Cannot locate environment ${environmentKey} in your .env.vault file.`)\n err.code = 'NOT_FOUND_DOTENV_ENVIRONMENT'\n throw err\n }\n\n return { ciphertext, key }\n}\n\nfunction _vaultPath (options) {\n let possibleVaultPath = null\n\n if (options && options.path && options.path.length > 0) {\n if (Array.isArray(options.path)) {\n for (const filepath of options.path) {\n if (fs.existsSync(filepath)) {\n possibleVaultPath = filepath.endsWith('.vault') ? filepath : `${filepath}.vault`\n }\n }\n } else {\n possibleVaultPath = options.path.endsWith('.vault') ? options.path : `${options.path}.vault`\n }\n } else {\n possibleVaultPath = path.resolve(process.cwd(), '.env.vault')\n }\n\n if (fs.existsSync(possibleVaultPath)) {\n return possibleVaultPath\n }\n\n return null\n}\n\nfunction _resolveHome (envPath) {\n return envPath[0] === '~' ? path.join(os.homedir(), envPath.slice(1)) : envPath\n}\n\nfunction _configVault (options) {\n _log('Loading env from encrypted .env.vault')\n\n const parsed = DotenvModule._parseVault(options)\n\n let processEnv = process.env\n if (options && options.processEnv != null) {\n processEnv = options.processEnv\n }\n\n DotenvModule.populate(processEnv, parsed, options)\n\n return { parsed }\n}\n\nfunction configDotenv (options) {\n const dotenvPath = path.resolve(process.cwd(), '.env')\n let encoding = 'utf8'\n const debug = Boolean(options && options.debug)\n\n if (options && options.encoding) {\n encoding = options.encoding\n } else {\n if (debug) {\n _debug('No encoding is specified. UTF-8 is used by default')\n }\n }\n\n let optionPaths = [dotenvPath] // default, look for .env\n if (options && options.path) {\n if (!Array.isArray(options.path)) {\n optionPaths = [_resolveHome(options.path)]\n } else {\n optionPaths = [] // reset default\n for (const filepath of options.path) {\n optionPaths.push(_resolveHome(filepath))\n }\n }\n }\n\n // Build the parsed data in a temporary object (because we need to return it). Once we have the final\n // parsed data, we will combine it with process.env (or options.processEnv if provided).\n let lastError\n const parsedAll = {}\n for (const path of optionPaths) {\n try {\n // Specifying an encoding returns a string instead of a buffer\n const parsed = DotenvModule.parse(fs.readFileSync(path, { encoding }))\n\n DotenvModule.populate(parsedAll, parsed, options)\n } catch (e) {\n if (debug) {\n _debug(`Failed to load ${path} ${e.message}`)\n }\n lastError = e\n }\n }\n\n let processEnv = process.env\n if (options && options.processEnv != null) {\n processEnv = options.processEnv\n }\n\n DotenvModule.populate(processEnv, parsedAll, options)\n\n if (lastError) {\n return { parsed: parsedAll, error: lastError }\n } else {\n return { parsed: parsedAll }\n }\n}\n\n// Populates process.env from .env file\nfunction config (options) {\n // fallback to original dotenv if DOTENV_KEY is not set\n if (_dotenvKey(options).length === 0) {\n return DotenvModule.configDotenv(options)\n }\n\n const vaultPath = _vaultPath(options)\n\n // dotenvKey exists but .env.vault file does not exist\n if (!vaultPath) {\n _warn(`You set DOTENV_KEY but you are missing a .env.vault file at ${vaultPath}. Did you forget to build it?`)\n\n return DotenvModule.configDotenv(options)\n }\n\n return DotenvModule._configVault(options)\n}\n\nfunction decrypt (encrypted, keyStr) {\n const key = Buffer.from(keyStr.slice(-64), 'hex')\n let ciphertext = Buffer.from(encrypted, 'base64')\n\n const nonce = ciphertext.subarray(0, 12)\n const authTag = ciphertext.subarray(-16)\n ciphertext = ciphertext.subarray(12, -16)\n\n try {\n const aesgcm = crypto.createDecipheriv('aes-256-gcm', key, nonce)\n aesgcm.setAuthTag(authTag)\n return `${aesgcm.update(ciphertext)}${aesgcm.final()}`\n } catch (error) {\n const isRange = error instanceof RangeError\n const invalidKeyLength = error.message === 'Invalid key length'\n const decryptionFailed = error.message === 'Unsupported state or unable to authenticate data'\n\n if (isRange || invalidKeyLength) {\n const err = new Error('INVALID_DOTENV_KEY: It must be 64 characters long (or more)')\n err.code = 'INVALID_DOTENV_KEY'\n throw err\n } else if (decryptionFailed) {\n const err = new Error('DECRYPTION_FAILED: Please check your DOTENV_KEY')\n err.code = 'DECRYPTION_FAILED'\n throw err\n } else {\n throw error\n }\n }\n}\n\n// Populate process.env with parsed values\nfunction populate (processEnv, parsed, options = {}) {\n const debug = Boolean(options && options.debug)\n const override = Boolean(options && options.override)\n\n if (typeof parsed !== 'object') {\n const err = new Error('OBJECT_REQUIRED: Please check the processEnv argument being passed to populate')\n err.code = 'OBJECT_REQUIRED'\n throw err\n }\n\n // Set process.env\n for (const key of Object.keys(parsed)) {\n if (Object.prototype.hasOwnProperty.call(processEnv, key)) {\n if (override === true) {\n processEnv[key] = parsed[key]\n }\n\n if (debug) {\n if (override === true) {\n _debug(`\"${key}\" is already defined and WAS overwritten`)\n } else {\n _debug(`\"${key}\" is already defined and was NOT overwritten`)\n }\n }\n } else {\n processEnv[key] = parsed[key]\n }\n }\n}\n\nconst DotenvModule = {\n configDotenv,\n _configVault,\n _parseVault,\n config,\n decrypt,\n parse,\n populate\n}\n\nmodule.exports.configDotenv = DotenvModule.configDotenv\nmodule.exports._configVault = DotenvModule._configVault\nmodule.exports._parseVault = DotenvModule._parseVault\nmodule.exports.config = DotenvModule.config\nmodule.exports.decrypt = DotenvModule.decrypt\nmodule.exports.parse = DotenvModule.parse\nmodule.exports.populate = DotenvModule.populate\n\nmodule.exports = DotenvModule\n","'use strict'\n\nfunction _interpolate (envValue, environment, config) {\n const matches = envValue.match(/(.?\\${*[\\w]*(?::-[\\w/]*)?}*)/g) || []\n\n return matches.reduce(function (newEnv, match, index) {\n const parts = /(.?)\\${*([\\w]*(?::-[\\w/]*)?)?}*/g.exec(match)\n if (!parts || parts.length === 0) {\n return newEnv\n }\n\n const prefix = parts[1]\n\n let value, replacePart\n\n if (prefix === '\\\\') {\n replacePart = parts[0]\n value = replacePart.replace('\\\\$', '$')\n } else {\n const keyParts = parts[2].split(':-')\n const key = keyParts[0]\n replacePart = parts[0].substring(prefix.length)\n // process.env value 'wins' over .env file's value\n value = Object.prototype.hasOwnProperty.call(environment, key)\n ? environment[key]\n : (config.parsed[key] || keyParts[1] || '')\n\n // If the value is found, remove nested expansions.\n if (keyParts.length > 1 && value) {\n const replaceNested = matches[index + 1]\n matches[index + 1] = ''\n\n newEnv = newEnv.replace(replaceNested, '')\n }\n // Resolve recursive interpolations\n value = _interpolate(value, environment, config)\n }\n\n return newEnv.replace(replacePart, value)\n }, envValue)\n}\n\nfunction expand (config) {\n // if ignoring process.env, use a blank object\n const environment = config.ignoreProcessEnv ? {} : process.env\n\n for (const configKey in config.parsed) {\n const value = Object.prototype.hasOwnProperty.call(environment, configKey) ? environment[configKey] : config.parsed[configKey]\n\n config.parsed[configKey] = _interpolate(value, environment, config)\n }\n\n for (const processKey in config.parsed) {\n environment[processKey] = config.parsed[processKey]\n }\n\n return config\n}\n\nmodule.exports.expand = expand\n","/**\n * This is an \"inlined\" version of Vite's `loadEnv` function,\n * simplified somewhat to only support our use case.\n *\n * Ideally we'd just use `loadEnv` from Vite, but importing it\n * causes bundling issues due to node APIs and downstream dependencies.\n *\n * Vite is MIT licensed, copyright (c) Yuxi (Evan) You and Vite contributors.\n */\n\n/* eslint-disable no-process-env */\nimport fs from 'node:fs'\nimport path from 'node:path'\n\nimport {parse} from 'dotenv'\nimport {expand} from 'dotenv-expand'\n\nexport function loadEnv(\n mode: string,\n envDir: string,\n prefixes: string[] = ['VITE_'],\n): Record<string, string> {\n if (mode === 'local') {\n throw new Error(\n `\"local\" cannot be used as a mode name because it conflicts with ` +\n `the .local postfix for .env files.`,\n )\n }\n\n const env: Record<string, string> = {}\n const envFiles = [\n /** default file */ `.env`,\n /** local file */ `.env.local`,\n /** mode file */ `.env.${mode}`,\n /** mode local file */ `.env.${mode}.local`,\n ]\n\n const parsed = Object.fromEntries(\n envFiles.flatMap((file) => {\n const envPath = lookupFile(envDir, [file], {\n rootDir: envDir,\n })\n if (!envPath) return []\n return Object.entries(parse(fs.readFileSync(envPath)))\n }),\n )\n\n // test NODE_ENV override before expand as otherwise process.env.NODE_ENV would override this\n if (parsed.NODE_ENV && process.env.VITE_USER_NODE_ENV === undefined) {\n process.env.VITE_USER_NODE_ENV = parsed.NODE_ENV\n }\n // support BROWSER and BROWSER_ARGS env variables\n if (parsed.BROWSER && process.env.BROWSER === undefined) {\n process.env.BROWSER = parsed.BROWSER\n }\n if (parsed.BROWSER_ARGS && process.env.BROWSER_ARGS === undefined) {\n process.env.BROWSER_ARGS = parsed.BROWSER_ARGS\n }\n\n try {\n // let environment variables use each other\n expand({parsed})\n } catch (e) {\n // custom error handling until https://github.com/motdotla/dotenv-expand/issues/65 is fixed upstream\n // check for message \"TypeError: Cannot read properties of undefined (reading 'split')\"\n if (e.message.includes('split')) {\n throw new Error('dotenv-expand failed to expand env vars. Maybe you need to escape `$`?')\n }\n throw e\n }\n\n // only keys that start with prefix are exposed to client\n for (const [key, value] of Object.entries(parsed)) {\n if (prefixes.some((prefix) => key.startsWith(prefix))) {\n env[key] = value\n }\n }\n\n // check if there are actual env variables starting with VITE_*\n // these are typically provided inline and should be prioritized\n for (const key in process.env) {\n if (prefixes.some((prefix) => key.startsWith(prefix))) {\n env[key] = process.env[key] as string\n }\n }\n\n return env\n}\n\nfunction lookupFile(\n dir: string,\n formats: string[],\n options?: {\n rootDir?: string\n },\n): string | undefined {\n for (const format of formats) {\n const fullPath = path.join(dir, format)\n // eslint-disable-next-line no-sync\n if (fs.existsSync(fullPath) && fs.statSync(fullPath).isFile()) {\n return fullPath\n }\n }\n const parentDir = path.dirname(dir)\n if (parentDir !== dir && (!options?.rootDir || parentDir.startsWith(options?.rootDir))) {\n return lookupFile(parentDir, formats, options)\n }\n\n return undefined\n}\n"],"names":["debugIt","path","fs","require$$0","require$$1","require$$2","require$$3","version","debug","mainModule","parse","expand"],"mappings":";;;;;;AAEa,MAAA,QAAQA,6BAAQ,YAAY;ACOlC,SAAS,eAAe,KAAqB;AAC9C,MAAA;AACK,WAAA,mBAAmB,GAAG,KAAK;AAAA,WAC3B,KAAK;AACZ,UAAM,IAAI,MAAM;AAAA,EAAmD,IAAI,OAAO,EAAE;AAAA,EAAA;AAEpF;AAEA,SAAS,gBAAgB,UAAkB,YAA6B;AACjD,SAAA;AAAA,IACnB,WAAWC,cAAK,QAAA,KAAK,UAAU,GAAG,UAAU,KAAK,CAAC;AAAA,IAClD,WAAWA,cAAK,QAAA,KAAK,UAAU,GAAG,UAAU,KAAK,CAAC;AAAA,IAClD,qBAAqB,QAAQ;AAAA,EAAA,EAGX,KAAK,OAAO;AAClC;AAEA,SAAS,mBAAmB,UAAkB,aAAa,GAAmB;AAExE,MAAA,gBAAgB,UADD,eACqB;AAC/B,WAAA;AAGT,QAAM,YAAYA,cAAA,QAAK,QAAQ,UAAU,IAAI;AACzC,SAAA,cAAc,YAAY,aAAa,KAElC,KAGF,mBAAmB,WAAW,aAAa,CAAC;AACrD;AAEA,SAAS,qBAAqB,UAA2B;AACnD,MAAA;AACF,UAAM,UAAUC,YAAAA,QAAG,aAAaD,cAAAA,QAAK,KAAK,UAAU,aAAa,GAAG,MAAM,GAEpE,SAAS,CAAA,CADI,KAAK,MAAM,OAAO,GACF;AACnC,WAAI,UACF,MAAM,qCAAqC,QAAQ,GAE9C;AAAA,EAAA,QACK;AACL,WAAA;AAAA,EAAA;AAEX;AAEA,SAAS,WAAW,UAA2B;AACtC,SAAAC,YAAA,QAAG,WAAW,QAAQ;AAC/B;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AC1DA,QAAMA,MAAKC,sBAAAA,SACLF,QAAOG,oBAAAA,SACP,KAAKC,sBAAAA,SACL,SAASC,oBAAAA,SAGTC,WAFc,WAEQ,SAEtB,OAAO;AAGb,WAAS,MAAO,KAAK;AACnB,UAAM,MAAM,CAAA;AAGZ,QAAI,QAAQ,IAAI,SAAQ;AAGxB,YAAQ,MAAM,QAAQ,WAAW;AAAA,CAAI;AAErC,QAAI;AACJ,YAAQ,QAAQ,KAAK,KAAK,KAAK,MAAM,QAAM;AACzC,YAAM,MAAM,MAAM,CAAC;AAGnB,UAAI,QAAS,MAAM,CAAC,KAAK;AAGzB,cAAQ,MAAM,KAAI;AAGlB,YAAM,aAAa,MAAM,CAAC;AAG1B,cAAQ,MAAM,QAAQ,0BAA0B,IAAI,GAGhD,eAAe,QACjB,QAAQ,MAAM,QAAQ,QAAQ;AAAA,CAAI,GAClC,QAAQ,MAAM,QAAQ,QAAQ,IAAI,IAIpC,IAAI,GAAG,IAAI;AAAA,IACf;AAEE,WAAO;AAAA,EACT;AAEA,WAAS,YAAa,SAAS;AAC7B,UAAM,YAAY,WAAW,OAAO,GAG9B,SAAS,aAAa,aAAa,EAAE,MAAM,UAAW,CAAA;AAC5D,QAAI,CAAC,OAAO,QAAQ;AAClB,YAAM,MAAM,IAAI,MAAM,8BAA8B,SAAS,wBAAwB;AACrF,gBAAI,OAAO,gBACL;AAAA,IACV;AAIE,UAAM,OAAO,WAAW,OAAO,EAAE,MAAM,GAAG,GACpC,SAAS,KAAK;AAEpB,QAAI;AACJ,aAAS,IAAI,GAAG,IAAI,QAAQ;AAC1B,UAAI;AAEF,cAAM,MAAM,KAAK,CAAC,EAAE,KAAI,GAGlB,QAAQ,cAAc,QAAQ,GAAG;AAGvC,oBAAY,aAAa,QAAQ,MAAM,YAAY,MAAM,GAAG;AAE5D;AAAA,MACD,SAAQ,OAAO;AAEd,YAAI,IAAI,KAAK;AACX,gBAAM;AAAA,MAGd;AAIE,WAAO,aAAa,MAAM,SAAS;AAAA,EACrC;AAEA,WAAS,KAAM,SAAS;AACtB,YAAQ,IAAI,WAAWA,QAAO,WAAW,OAAO,EAAE;AAAA,EACpD;AAEA,WAAS,MAAO,SAAS;AACvB,YAAQ,IAAI,WAAWA,QAAO,WAAW,OAAO,EAAE;AAAA,EACpD;AAEA,WAAS,OAAQ,SAAS;AACxB,YAAQ,IAAI,WAAWA,QAAO,YAAY,OAAO,EAAE;AAAA,EACrD;AAEA,WAAS,WAAY,SAAS;AAE5B,WAAI,WAAW,QAAQ,cAAc,QAAQ,WAAW,SAAS,IACxD,QAAQ,aAIb,QAAQ,IAAI,cAAc,QAAQ,IAAI,WAAW,SAAS,IACrD,QAAQ,IAAI,aAId;AAAA,EACT;AAEA,WAAS,cAAe,QAAQ,WAAW;AAEzC,QAAI;AACJ,QAAI;AACF,YAAM,IAAI,IAAI,SAAS;AAAA,IACxB,SAAQ,OAAO;AACd,UAAI,MAAM,SAAS,mBAAmB;AACpC,cAAM,MAAM,IAAI,MAAM,4IAA4I;AAClK,kBAAI,OAAO,sBACL;AAAA,MACZ;AAEI,YAAM;AAAA,IACV;AAGE,UAAM,MAAM,IAAI;AAChB,QAAI,CAAC,KAAK;AACR,YAAM,MAAM,IAAI,MAAM,sCAAsC;AAC5D,gBAAI,OAAO,sBACL;AAAA,IACV;AAGE,UAAM,cAAc,IAAI,aAAa,IAAI,aAAa;AACtD,QAAI,CAAC,aAAa;AAChB,YAAM,MAAM,IAAI,MAAM,8CAA8C;AACpE,gBAAI,OAAO,sBACL;AAAA,IACV;AAGE,UAAM,iBAAiB,gBAAgB,YAAY,YAAW,CAAE,IAC1D,aAAa,OAAO,OAAO,cAAc;AAC/C,QAAI,CAAC,YAAY;AACf,YAAM,MAAM,IAAI,MAAM,2DAA2D,cAAc,2BAA2B;AAC1H,gBAAI,OAAO,gCACL;AAAA,IACV;AAEE,WAAO,EAAE,YAAY,IAAG;AAAA,EAC1B;AAEA,WAAS,WAAY,SAAS;AAC5B,QAAI,oBAAoB;AAExB,QAAI,WAAW,QAAQ,QAAQ,QAAQ,KAAK,SAAS;AACnD,UAAI,MAAM,QAAQ,QAAQ,IAAI;AAC5B,mBAAW,YAAY,QAAQ;AAC7B,UAAIL,IAAG,WAAW,QAAQ,MACxB,oBAAoB,SAAS,SAAS,QAAQ,IAAI,WAAW,GAAG,QAAQ;AAAA;AAI5E,4BAAoB,QAAQ,KAAK,SAAS,QAAQ,IAAI,QAAQ,OAAO,GAAG,QAAQ,IAAI;AAAA;AAGtF,0BAAoBD,MAAK,QAAQ,QAAQ,IAAK,GAAE,YAAY;AAG9D,WAAIC,IAAG,WAAW,iBAAiB,IAC1B,oBAGF;AAAA,EACT;AAEA,WAAS,aAAc,SAAS;AAC9B,WAAO,QAAQ,CAAC,MAAM,MAAMD,MAAK,KAAK,GAAG,QAAS,GAAE,QAAQ,MAAM,CAAC,CAAC,IAAI;AAAA,EAC1E;AAEA,WAAS,aAAc,SAAS;AAC9B,SAAK,uCAAuC;AAE5C,UAAM,SAAS,aAAa,YAAY,OAAO;AAE/C,QAAI,aAAa,QAAQ;AACzB,WAAI,WAAW,QAAQ,cAAc,SACnC,aAAa,QAAQ,aAGvB,aAAa,SAAS,YAAY,QAAQ,OAAO,GAE1C,EAAE,OAAM;AAAA,EACjB;AAEA,WAAS,aAAc,SAAS;AAC9B,UAAM,aAAaA,MAAK,QAAQ,QAAQ,IAAK,GAAE,MAAM;AACrD,QAAI,WAAW;AACf,UAAMO,SAAQ,GAAQ,WAAW,QAAQ;AAEzC,IAAI,WAAW,QAAQ,WACrB,WAAW,QAAQ,WAEfA,UACF,OAAO,oDAAoD;AAI/D,QAAI,cAAc,CAAC,UAAU;AAC7B,QAAI,WAAW,QAAQ;AACrB,UAAI,CAAC,MAAM,QAAQ,QAAQ,IAAI;AAC7B,sBAAc,CAAC,aAAa,QAAQ,IAAI,CAAC;AAAA,WACpC;AACL,sBAAc,CAAE;AAChB,mBAAW,YAAY,QAAQ;AAC7B,sBAAY,KAAK,aAAa,QAAQ,CAAC;AAAA,MAE/C;AAKE,QAAI;AACJ,UAAM,YAAY,CAAA;AAClB,eAAWP,SAAQ;AACjB,UAAI;AAEF,cAAM,SAAS,aAAa,MAAMC,IAAG,aAAaD,OAAM,EAAE,UAAU,CAAC;AAErE,qBAAa,SAAS,WAAW,QAAQ,OAAO;AAAA,MACjD,SAAQ,GAAG;AACV,QAAIO,UACF,OAAO,kBAAkBP,KAAI,IAAI,EAAE,OAAO,EAAE,GAE9C,YAAY;AAAA,MAClB;AAGE,QAAI,aAAa,QAAQ;AAOzB,WANI,WAAW,QAAQ,cAAc,SACnC,aAAa,QAAQ,aAGvB,aAAa,SAAS,YAAY,WAAW,OAAO,GAEhD,YACK,EAAE,QAAQ,WAAW,OAAO,UAAS,IAErC,EAAE,QAAQ,UAAS;AAAA,EAE9B;AAGA,WAAS,OAAQ,SAAS;AAExB,QAAI,WAAW,OAAO,EAAE,WAAW;AACjC,aAAO,aAAa,aAAa,OAAO;AAG1C,UAAM,YAAY,WAAW,OAAO;AAGpC,WAAK,YAME,aAAa,aAAa,OAAO,KALtC,MAAM,+DAA+D,SAAS,+BAA+B,GAEtG,aAAa,aAAa,OAAO;AAAA,EAI5C;AAEA,WAAS,QAAS,WAAW,QAAQ;AACnC,UAAM,MAAM,OAAO,KAAK,OAAO,MAAM,GAAG,GAAG,KAAK;AAChD,QAAI,aAAa,OAAO,KAAK,WAAW,QAAQ;AAEhD,UAAM,QAAQ,WAAW,SAAS,GAAG,EAAE,GACjC,UAAU,WAAW,SAAS,GAAG;AACvC,iBAAa,WAAW,SAAS,IAAI,GAAG;AAExC,QAAI;AACF,YAAM,SAAS,OAAO,iBAAiB,eAAe,KAAK,KAAK;AAChE,oBAAO,WAAW,OAAO,GAClB,GAAG,OAAO,OAAO,UAAU,CAAC,GAAG,OAAO,OAAO;AAAA,IACrD,SAAQ,OAAO;AACd,YAAM,UAAU,iBAAiB,YAC3B,mBAAmB,MAAM,YAAY,sBACrC,mBAAmB,MAAM,YAAY;AAE3C,UAAI,WAAW,kBAAkB;AAC/B,cAAM,MAAM,IAAI,MAAM,6DAA6D;AACnF,kBAAI,OAAO,sBACL;AAAA,MACP,WAAU,kBAAkB;AAC3B,cAAM,MAAM,IAAI,MAAM,iDAAiD;AACvE,kBAAI,OAAO,qBACL;AAAA,MACZ;AACM,cAAM;AAAA,IAEZ;AAAA,EACA;AAGA,WAAS,SAAU,YAAY,QAAQ,UAAU,CAAA,GAAI;AACnD,UAAMO,SAAQ,GAAQ,WAAW,QAAQ,QACnC,WAAW,GAAQ,WAAW,QAAQ;AAE5C,QAAI,OAAO,UAAW,UAAU;AAC9B,YAAM,MAAM,IAAI,MAAM,gFAAgF;AACtG,gBAAI,OAAO,mBACL;AAAA,IACV;AAGE,eAAW,OAAO,OAAO,KAAK,MAAM;AAClC,MAAI,OAAO,UAAU,eAAe,KAAK,YAAY,GAAG,KAClD,aAAa,OACf,WAAW,GAAG,IAAI,OAAO,GAAG,IAG1BA,UAEA,OADE,aAAa,KACR,IAAI,GAAG,6CAEP,IAAI,GAAG,8CAF0C,KAM5D,WAAW,GAAG,IAAI,OAAO,GAAG;AAAA,EAGlC;AAEA,QAAM,eAAe;AAAA,IACnB;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AAE2BC,gBAAA,QAAA,eAAG,aAAa,cAChBA,OAAA,QAAA,eAAG,aAAa,cACjBA,OAAA,QAAA,cAAG,aAAa,aACrBA,OAAA,QAAA,SAAG,aAAa,QACfA,OAAA,QAAA,UAAG,aAAa,SAClBA,OAAA,QAAA,QAAG,aAAa,OACbA,OAAA,QAAA,WAAG,aAAa,UAEvCA,OAAA,UAAiB;;;;;;ACtWjB,WAAS,aAAc,UAAU,aAAa,QAAQ;AACpD,UAAM,UAAU,SAAS,MAAM,+BAA+B,KAAK,CAAA;AAEnE,WAAO,QAAQ,OAAO,SAAU,QAAQ,OAAO,OAAO;AACpD,YAAM,QAAQ,mCAAmC,KAAK,KAAK;AAC3D,UAAI,CAAC,SAAS,MAAM,WAAW;AAC7B,eAAO;AAGT,YAAM,SAAS,MAAM,CAAC;AAEtB,UAAI,OAAO;AAEX,UAAI,WAAW;AACb,sBAAc,MAAM,CAAC,GACrB,QAAQ,YAAY,QAAQ,OAAO,GAAG;AAAA,WACjC;AACL,cAAM,WAAW,MAAM,CAAC,EAAE,MAAM,IAAI,GAC9B,MAAM,SAAS,CAAC;AAQtB,YAPA,cAAc,MAAM,CAAC,EAAE,UAAU,OAAO,MAAM,GAE9C,QAAQ,OAAO,UAAU,eAAe,KAAK,aAAa,GAAG,IACzD,YAAY,GAAG,IACd,OAAO,OAAO,GAAG,KAAK,SAAS,CAAC,KAAK,IAGtC,SAAS,SAAS,KAAK,OAAO;AAChC,gBAAM,gBAAgB,QAAQ,QAAQ,CAAC;AACvC,kBAAQ,QAAQ,CAAC,IAAI,IAErB,SAAS,OAAO,QAAQ,eAAe,EAAE;AAAA,QACjD;AAEM,gBAAQ,aAAa,OAAO,aAAa,MAAM;AAAA,MACrD;AAEI,aAAO,OAAO,QAAQ,aAAa,KAAK;AAAA,IAC5C,GAAK,QAAQ;AAAA,EACb;AAEA,WAAS,OAAQ,QAAQ;AAEvB,UAAM,cAAc,OAAO,mBAAmB,CAAA,IAAK,QAAQ;AAE3D,eAAW,aAAa,OAAO,QAAQ;AACrC,YAAM,QAAQ,OAAO,UAAU,eAAe,KAAK,aAAa,SAAS,IAAI,YAAY,SAAS,IAAI,OAAO,OAAO,SAAS;AAE7H,aAAO,OAAO,SAAS,IAAI,aAAa,OAAO,aAAa,MAAM;AAAA,IACtE;AAEE,eAAW,cAAc,OAAO;AAC9B,kBAAY,UAAU,IAAI,OAAO,OAAO,UAAU;AAGpD,WAAO;AAAA,EACT;AAEA,cAAA,SAAwB;;;AC1CjB,SAAS,QACd,MACA,QACA,WAAqB,CAAC,OAAO,GACL;AACxB,MAAI,SAAS;AACX,UAAM,IAAI;AAAA,MACR;AAAA,IAEF;AAGI,QAAA,MAA8B,CAAC,GAC/B,WAAW;AAAA;AAAA,IACK;AAAA;AAAA,IACF;AAAA;AAAA,IACD,QAAQ,IAAI;AAAA;AAAA,IACN,QAAQ,IAAI;AAAA,EAAA,GAG/B,SAAS,OAAO;AAAA,IACpB,SAAS,QAAQ,CAAC,SAAS;AACzB,YAAM,UAAU,WAAW,QAAQ,CAAC,IAAI,GAAG;AAAA,QACzC,SAAS;AAAA,MAAA,CACV;AACI,aAAA,UACE,OAAO,QAAQC,cAAM,MAAAR,YAAA,QAAG,aAAa,OAAO,CAAC,CAAC,IADhC,CAAC;AAAA,IAEvB,CAAA;AAAA,EACH;AAGI,SAAO,YAAY,QAAQ,IAAI,uBAAuB,WACxD,QAAQ,IAAI,qBAAqB,OAAO,WAGtC,OAAO,WAAW,QAAQ,IAAI,YAAY,WAC5C,QAAQ,IAAI,UAAU,OAAO,UAE3B,OAAO,gBAAgB,QAAQ,IAAI,iBAAiB,WACtD,QAAQ,IAAI,eAAe,OAAO;AAGhC,MAAA;AAEKS,gBAAA,OAAA,EAAC,QAAO;AAAA,WACR,GAAG;AAGN,UAAA,EAAE,QAAQ,SAAS,OAAO,IACtB,IAAI,MAAM,wEAAwE,IAEpF;AAAA,EAAA;AAIR,aAAW,CAAC,KAAK,KAAK,KAAK,OAAO,QAAQ,MAAM;AAC1C,aAAS,KAAK,CAAC,WAAW,IAAI,WAAW,MAAM,CAAC,MAClD,IAAI,GAAG,IAAI;AAMf,aAAW,OAAO,QAAQ;AACpB,aAAS,KAAK,CAAC,WAAW,IAAI,WAAW,MAAM,CAAC,MAClD,IAAI,GAAG,IAAI,QAAQ,IAAI,GAAG;AAIvB,SAAA;AACT;AAEA,SAAS,WACP,KACA,SACA,SAGoB;AACpB,aAAW,UAAU,SAAS;AAC5B,UAAM,WAAWV,cAAA,QAAK,KAAK,KAAK,MAAM;AAElC,QAAAC,YAAA,QAAG,WAAW,QAAQ,KAAKA,oBAAG,SAAS,QAAQ,EAAE,OAAO;AACnD,aAAA;AAAA,EAAA;AAGL,QAAA,YAAYD,cAAAA,QAAK,QAAQ,GAAG;AAC9B,MAAA,cAAc,QAAQ,CAAC,SAAS,WAAW,UAAU,WAAW,SAAS,OAAO;AAC3E,WAAA,WAAW,WAAW,SAAS,OAAO;AAIjD;;;;;;;;","x_google_ignoreList":[2,3]}
|
1
|
+
{"version":3,"file":"loadEnv.js","sources":["../../src/debug.ts","../../src/util/resolveRootDir.ts","../../../../../node_modules/.pnpm/dotenv@16.4.7/node_modules/dotenv/lib/main.js","../../../../../node_modules/.pnpm/dotenv-expand@9.0.0/node_modules/dotenv-expand/lib/main.js","../../src/util/loadEnv.ts"],"sourcesContent":["import debugIt from 'debug'\n\nexport const debug = debugIt('sanity:cli')\n","/* eslint-disable no-sync */\nimport fs from 'node:fs'\nimport path from 'node:path'\n\nimport {debug} from '../debug'\n\n/**\n * Resolve project root directory, falling back to cwd if it cannot be found\n */\nexport function resolveRootDir(cwd: string): string {\n try {\n return resolveProjectRoot(cwd) || cwd\n } catch (err) {\n throw new Error(`Error occurred trying to resolve project root:\\n${err.message}`)\n }\n}\n\nfunction hasSanityConfig(basePath: string, configName: string): boolean {\n const buildConfigs = [\n fileExists(path.join(basePath, `${configName}.js`)),\n fileExists(path.join(basePath, `${configName}.ts`)),\n isSanityV2StudioRoot(basePath),\n ]\n\n return buildConfigs.some(Boolean)\n}\n\nfunction resolveProjectRoot(basePath: string, iterations = 0): string | false {\n const configName = 'sanity.config'\n if (hasSanityConfig(basePath, configName)) {\n return basePath\n }\n\n const parentDir = path.resolve(basePath, '..')\n if (parentDir === basePath || iterations > 30) {\n // Reached root (or max depth), give up\n return false\n }\n\n return resolveProjectRoot(parentDir, iterations + 1)\n}\n\nfunction isSanityV2StudioRoot(basePath: string): boolean {\n try {\n const content = fs.readFileSync(path.join(basePath, 'sanity.json'), 'utf8')\n const sanityJson = JSON.parse(content)\n const isRoot = Boolean(sanityJson?.root)\n if (isRoot) {\n debug('Found Sanity v2 studio root at %s', basePath)\n }\n return isRoot\n } catch (err) {\n return false\n }\n}\n\nfunction fileExists(filePath: string): boolean {\n return fs.existsSync(filePath)\n}\n","const fs = require('fs')\nconst path = require('path')\nconst os = require('os')\nconst crypto = require('crypto')\nconst packageJson = require('../package.json')\n\nconst version = packageJson.version\n\nconst LINE = /(?:^|^)\\s*(?:export\\s+)?([\\w.-]+)(?:\\s*=\\s*?|:\\s+?)(\\s*'(?:\\\\'|[^'])*'|\\s*\"(?:\\\\\"|[^\"])*\"|\\s*`(?:\\\\`|[^`])*`|[^#\\r\\n]+)?\\s*(?:#.*)?(?:$|$)/mg\n\n// Parse src into an Object\nfunction parse (src) {\n const obj = {}\n\n // Convert buffer to string\n let lines = src.toString()\n\n // Convert line breaks to same format\n lines = lines.replace(/\\r\\n?/mg, '\\n')\n\n let match\n while ((match = LINE.exec(lines)) != null) {\n const key = match[1]\n\n // Default undefined or null to empty string\n let value = (match[2] || '')\n\n // Remove whitespace\n value = value.trim()\n\n // Check if double quoted\n const maybeQuote = value[0]\n\n // Remove surrounding quotes\n value = value.replace(/^(['\"`])([\\s\\S]*)\\1$/mg, '$2')\n\n // Expand newlines if double quoted\n if (maybeQuote === '\"') {\n value = value.replace(/\\\\n/g, '\\n')\n value = value.replace(/\\\\r/g, '\\r')\n }\n\n // Add to object\n obj[key] = value\n }\n\n return obj\n}\n\nfunction _parseVault (options) {\n const vaultPath = _vaultPath(options)\n\n // Parse .env.vault\n const result = DotenvModule.configDotenv({ path: vaultPath })\n if (!result.parsed) {\n const err = new Error(`MISSING_DATA: Cannot parse ${vaultPath} for an unknown reason`)\n err.code = 'MISSING_DATA'\n throw err\n }\n\n // handle scenario for comma separated keys - for use with key rotation\n // example: DOTENV_KEY=\"dotenv://:key_1234@dotenvx.com/vault/.env.vault?environment=prod,dotenv://:key_7890@dotenvx.com/vault/.env.vault?environment=prod\"\n const keys = _dotenvKey(options).split(',')\n const length = keys.length\n\n let decrypted\n for (let i = 0; i < length; i++) {\n try {\n // Get full key\n const key = keys[i].trim()\n\n // Get instructions for decrypt\n const attrs = _instructions(result, key)\n\n // Decrypt\n decrypted = DotenvModule.decrypt(attrs.ciphertext, attrs.key)\n\n break\n } catch (error) {\n // last key\n if (i + 1 >= length) {\n throw error\n }\n // try next key\n }\n }\n\n // Parse decrypted .env string\n return DotenvModule.parse(decrypted)\n}\n\nfunction _log (message) {\n console.log(`[dotenv@${version}][INFO] ${message}`)\n}\n\nfunction _warn (message) {\n console.log(`[dotenv@${version}][WARN] ${message}`)\n}\n\nfunction _debug (message) {\n console.log(`[dotenv@${version}][DEBUG] ${message}`)\n}\n\nfunction _dotenvKey (options) {\n // prioritize developer directly setting options.DOTENV_KEY\n if (options && options.DOTENV_KEY && options.DOTENV_KEY.length > 0) {\n return options.DOTENV_KEY\n }\n\n // secondary infra already contains a DOTENV_KEY environment variable\n if (process.env.DOTENV_KEY && process.env.DOTENV_KEY.length > 0) {\n return process.env.DOTENV_KEY\n }\n\n // fallback to empty string\n return ''\n}\n\nfunction _instructions (result, dotenvKey) {\n // Parse DOTENV_KEY. Format is a URI\n let uri\n try {\n uri = new URL(dotenvKey)\n } catch (error) {\n if (error.code === 'ERR_INVALID_URL') {\n const err = new Error('INVALID_DOTENV_KEY: Wrong format. Must be in valid uri format like dotenv://:key_1234@dotenvx.com/vault/.env.vault?environment=development')\n err.code = 'INVALID_DOTENV_KEY'\n throw err\n }\n\n throw error\n }\n\n // Get decrypt key\n const key = uri.password\n if (!key) {\n const err = new Error('INVALID_DOTENV_KEY: Missing key part')\n err.code = 'INVALID_DOTENV_KEY'\n throw err\n }\n\n // Get environment\n const environment = uri.searchParams.get('environment')\n if (!environment) {\n const err = new Error('INVALID_DOTENV_KEY: Missing environment part')\n err.code = 'INVALID_DOTENV_KEY'\n throw err\n }\n\n // Get ciphertext payload\n const environmentKey = `DOTENV_VAULT_${environment.toUpperCase()}`\n const ciphertext = result.parsed[environmentKey] // DOTENV_VAULT_PRODUCTION\n if (!ciphertext) {\n const err = new Error(`NOT_FOUND_DOTENV_ENVIRONMENT: Cannot locate environment ${environmentKey} in your .env.vault file.`)\n err.code = 'NOT_FOUND_DOTENV_ENVIRONMENT'\n throw err\n }\n\n return { ciphertext, key }\n}\n\nfunction _vaultPath (options) {\n let possibleVaultPath = null\n\n if (options && options.path && options.path.length > 0) {\n if (Array.isArray(options.path)) {\n for (const filepath of options.path) {\n if (fs.existsSync(filepath)) {\n possibleVaultPath = filepath.endsWith('.vault') ? filepath : `${filepath}.vault`\n }\n }\n } else {\n possibleVaultPath = options.path.endsWith('.vault') ? options.path : `${options.path}.vault`\n }\n } else {\n possibleVaultPath = path.resolve(process.cwd(), '.env.vault')\n }\n\n if (fs.existsSync(possibleVaultPath)) {\n return possibleVaultPath\n }\n\n return null\n}\n\nfunction _resolveHome (envPath) {\n return envPath[0] === '~' ? path.join(os.homedir(), envPath.slice(1)) : envPath\n}\n\nfunction _configVault (options) {\n _log('Loading env from encrypted .env.vault')\n\n const parsed = DotenvModule._parseVault(options)\n\n let processEnv = process.env\n if (options && options.processEnv != null) {\n processEnv = options.processEnv\n }\n\n DotenvModule.populate(processEnv, parsed, options)\n\n return { parsed }\n}\n\nfunction configDotenv (options) {\n const dotenvPath = path.resolve(process.cwd(), '.env')\n let encoding = 'utf8'\n const debug = Boolean(options && options.debug)\n\n if (options && options.encoding) {\n encoding = options.encoding\n } else {\n if (debug) {\n _debug('No encoding is specified. UTF-8 is used by default')\n }\n }\n\n let optionPaths = [dotenvPath] // default, look for .env\n if (options && options.path) {\n if (!Array.isArray(options.path)) {\n optionPaths = [_resolveHome(options.path)]\n } else {\n optionPaths = [] // reset default\n for (const filepath of options.path) {\n optionPaths.push(_resolveHome(filepath))\n }\n }\n }\n\n // Build the parsed data in a temporary object (because we need to return it). Once we have the final\n // parsed data, we will combine it with process.env (or options.processEnv if provided).\n let lastError\n const parsedAll = {}\n for (const path of optionPaths) {\n try {\n // Specifying an encoding returns a string instead of a buffer\n const parsed = DotenvModule.parse(fs.readFileSync(path, { encoding }))\n\n DotenvModule.populate(parsedAll, parsed, options)\n } catch (e) {\n if (debug) {\n _debug(`Failed to load ${path} ${e.message}`)\n }\n lastError = e\n }\n }\n\n let processEnv = process.env\n if (options && options.processEnv != null) {\n processEnv = options.processEnv\n }\n\n DotenvModule.populate(processEnv, parsedAll, options)\n\n if (lastError) {\n return { parsed: parsedAll, error: lastError }\n } else {\n return { parsed: parsedAll }\n }\n}\n\n// Populates process.env from .env file\nfunction config (options) {\n // fallback to original dotenv if DOTENV_KEY is not set\n if (_dotenvKey(options).length === 0) {\n return DotenvModule.configDotenv(options)\n }\n\n const vaultPath = _vaultPath(options)\n\n // dotenvKey exists but .env.vault file does not exist\n if (!vaultPath) {\n _warn(`You set DOTENV_KEY but you are missing a .env.vault file at ${vaultPath}. Did you forget to build it?`)\n\n return DotenvModule.configDotenv(options)\n }\n\n return DotenvModule._configVault(options)\n}\n\nfunction decrypt (encrypted, keyStr) {\n const key = Buffer.from(keyStr.slice(-64), 'hex')\n let ciphertext = Buffer.from(encrypted, 'base64')\n\n const nonce = ciphertext.subarray(0, 12)\n const authTag = ciphertext.subarray(-16)\n ciphertext = ciphertext.subarray(12, -16)\n\n try {\n const aesgcm = crypto.createDecipheriv('aes-256-gcm', key, nonce)\n aesgcm.setAuthTag(authTag)\n return `${aesgcm.update(ciphertext)}${aesgcm.final()}`\n } catch (error) {\n const isRange = error instanceof RangeError\n const invalidKeyLength = error.message === 'Invalid key length'\n const decryptionFailed = error.message === 'Unsupported state or unable to authenticate data'\n\n if (isRange || invalidKeyLength) {\n const err = new Error('INVALID_DOTENV_KEY: It must be 64 characters long (or more)')\n err.code = 'INVALID_DOTENV_KEY'\n throw err\n } else if (decryptionFailed) {\n const err = new Error('DECRYPTION_FAILED: Please check your DOTENV_KEY')\n err.code = 'DECRYPTION_FAILED'\n throw err\n } else {\n throw error\n }\n }\n}\n\n// Populate process.env with parsed values\nfunction populate (processEnv, parsed, options = {}) {\n const debug = Boolean(options && options.debug)\n const override = Boolean(options && options.override)\n\n if (typeof parsed !== 'object') {\n const err = new Error('OBJECT_REQUIRED: Please check the processEnv argument being passed to populate')\n err.code = 'OBJECT_REQUIRED'\n throw err\n }\n\n // Set process.env\n for (const key of Object.keys(parsed)) {\n if (Object.prototype.hasOwnProperty.call(processEnv, key)) {\n if (override === true) {\n processEnv[key] = parsed[key]\n }\n\n if (debug) {\n if (override === true) {\n _debug(`\"${key}\" is already defined and WAS overwritten`)\n } else {\n _debug(`\"${key}\" is already defined and was NOT overwritten`)\n }\n }\n } else {\n processEnv[key] = parsed[key]\n }\n }\n}\n\nconst DotenvModule = {\n configDotenv,\n _configVault,\n _parseVault,\n config,\n decrypt,\n parse,\n populate\n}\n\nmodule.exports.configDotenv = DotenvModule.configDotenv\nmodule.exports._configVault = DotenvModule._configVault\nmodule.exports._parseVault = DotenvModule._parseVault\nmodule.exports.config = DotenvModule.config\nmodule.exports.decrypt = DotenvModule.decrypt\nmodule.exports.parse = DotenvModule.parse\nmodule.exports.populate = DotenvModule.populate\n\nmodule.exports = DotenvModule\n","'use strict'\n\nfunction _interpolate (envValue, environment, config) {\n const matches = envValue.match(/(.?\\${*[\\w]*(?::-[\\w/]*)?}*)/g) || []\n\n return matches.reduce(function (newEnv, match, index) {\n const parts = /(.?)\\${*([\\w]*(?::-[\\w/]*)?)?}*/g.exec(match)\n if (!parts || parts.length === 0) {\n return newEnv\n }\n\n const prefix = parts[1]\n\n let value, replacePart\n\n if (prefix === '\\\\') {\n replacePart = parts[0]\n value = replacePart.replace('\\\\$', '$')\n } else {\n const keyParts = parts[2].split(':-')\n const key = keyParts[0]\n replacePart = parts[0].substring(prefix.length)\n // process.env value 'wins' over .env file's value\n value = Object.prototype.hasOwnProperty.call(environment, key)\n ? environment[key]\n : (config.parsed[key] || keyParts[1] || '')\n\n // If the value is found, remove nested expansions.\n if (keyParts.length > 1 && value) {\n const replaceNested = matches[index + 1]\n matches[index + 1] = ''\n\n newEnv = newEnv.replace(replaceNested, '')\n }\n // Resolve recursive interpolations\n value = _interpolate(value, environment, config)\n }\n\n return newEnv.replace(replacePart, value)\n }, envValue)\n}\n\nfunction expand (config) {\n // if ignoring process.env, use a blank object\n const environment = config.ignoreProcessEnv ? {} : process.env\n\n for (const configKey in config.parsed) {\n const value = Object.prototype.hasOwnProperty.call(environment, configKey) ? environment[configKey] : config.parsed[configKey]\n\n config.parsed[configKey] = _interpolate(value, environment, config)\n }\n\n for (const processKey in config.parsed) {\n environment[processKey] = config.parsed[processKey]\n }\n\n return config\n}\n\nmodule.exports.expand = expand\n","/**\n * This is an \"inlined\" version of Vite's `loadEnv` function,\n * simplified somewhat to only support our use case.\n *\n * Ideally we'd just use `loadEnv` from Vite, but importing it\n * causes bundling issues due to node APIs and downstream dependencies.\n *\n * Vite is MIT licensed, copyright (c) Yuxi (Evan) You and Vite contributors.\n */\n\n/* eslint-disable no-process-env */\nimport fs from 'node:fs'\nimport path from 'node:path'\n\nimport {parse} from 'dotenv'\nimport {expand} from 'dotenv-expand'\n\nexport function loadEnv(\n mode: string,\n envDir: string,\n prefixes: string[] = ['VITE_'],\n): Record<string, string> {\n if (mode === 'local') {\n throw new Error(\n `\"local\" cannot be used as a mode name because it conflicts with ` +\n `the .local postfix for .env files.`,\n )\n }\n\n const env: Record<string, string> = {}\n const envFiles = [\n /** default file */ `.env`,\n /** local file */ `.env.local`,\n /** mode file */ `.env.${mode}`,\n /** mode local file */ `.env.${mode}.local`,\n ]\n\n const parsed = Object.fromEntries(\n envFiles.flatMap((file) => {\n const envPath = lookupFile(envDir, [file], {\n rootDir: envDir,\n })\n if (!envPath) return []\n return Object.entries(parse(fs.readFileSync(envPath)))\n }),\n )\n\n // test NODE_ENV override before expand as otherwise process.env.NODE_ENV would override this\n if (parsed.NODE_ENV && process.env.VITE_USER_NODE_ENV === undefined) {\n process.env.VITE_USER_NODE_ENV = parsed.NODE_ENV\n }\n // support BROWSER and BROWSER_ARGS env variables\n if (parsed.BROWSER && process.env.BROWSER === undefined) {\n process.env.BROWSER = parsed.BROWSER\n }\n if (parsed.BROWSER_ARGS && process.env.BROWSER_ARGS === undefined) {\n process.env.BROWSER_ARGS = parsed.BROWSER_ARGS\n }\n\n try {\n // let environment variables use each other\n expand({parsed})\n } catch (e) {\n // custom error handling until https://github.com/motdotla/dotenv-expand/issues/65 is fixed upstream\n // check for message \"TypeError: Cannot read properties of undefined (reading 'split')\"\n if (e.message.includes('split')) {\n throw new Error('dotenv-expand failed to expand env vars. Maybe you need to escape `$`?')\n }\n throw e\n }\n\n // only keys that start with prefix are exposed to client\n for (const [key, value] of Object.entries(parsed)) {\n if (prefixes.some((prefix) => key.startsWith(prefix))) {\n env[key] = value\n }\n }\n\n // check if there are actual env variables starting with VITE_*\n // these are typically provided inline and should be prioritized\n for (const key in process.env) {\n if (prefixes.some((prefix) => key.startsWith(prefix))) {\n env[key] = process.env[key] as string\n }\n }\n\n return env\n}\n\nfunction lookupFile(\n dir: string,\n formats: string[],\n options?: {\n rootDir?: string\n },\n): string | undefined {\n for (const format of formats) {\n const fullPath = path.join(dir, format)\n // eslint-disable-next-line no-sync\n if (fs.existsSync(fullPath) && fs.statSync(fullPath).isFile()) {\n return fullPath\n }\n }\n const parentDir = path.dirname(dir)\n if (parentDir !== dir && (!options?.rootDir || parentDir.startsWith(options?.rootDir))) {\n return lookupFile(parentDir, formats, options)\n }\n\n return undefined\n}\n"],"names":["debugIt","path","fs","require$$0","require$$1","require$$2","require$$3","version","debug","mainModule","parse","expand"],"mappings":";;;;;;AAEa,MAAA,QAAQA,yBAAQ,YAAY;ACOlC,SAAS,eAAe,KAAqB;AAC9C,MAAA;AACK,WAAA,mBAAmB,GAAG,KAAK;AAAA,WAC3B,KAAK;AACZ,UAAM,IAAI,MAAM;AAAA,EAAmD,IAAI,OAAO,EAAE;AAAA,EAAA;AAEpF;AAEA,SAAS,gBAAgB,UAAkB,YAA6B;AACjD,SAAA;AAAA,IACnB,WAAWC,cAAK,QAAA,KAAK,UAAU,GAAG,UAAU,KAAK,CAAC;AAAA,IAClD,WAAWA,cAAK,QAAA,KAAK,UAAU,GAAG,UAAU,KAAK,CAAC;AAAA,IAClD,qBAAqB,QAAQ;AAAA,EAAA,EAGX,KAAK,OAAO;AAClC;AAEA,SAAS,mBAAmB,UAAkB,aAAa,GAAmB;AAExE,MAAA,gBAAgB,UADD,eACqB;AAC/B,WAAA;AAGT,QAAM,YAAYA,cAAA,QAAK,QAAQ,UAAU,IAAI;AACzC,SAAA,cAAc,YAAY,aAAa,KAElC,KAGF,mBAAmB,WAAW,aAAa,CAAC;AACrD;AAEA,SAAS,qBAAqB,UAA2B;AACnD,MAAA;AACF,UAAM,UAAUC,YAAAA,QAAG,aAAaD,cAAAA,QAAK,KAAK,UAAU,aAAa,GAAG,MAAM,GAEpE,SAAS,CAAA,CADI,KAAK,MAAM,OAAO,GACF;AACnC,WAAI,UACF,MAAM,qCAAqC,QAAQ,GAE9C;AAAA,EAAA,QACK;AACL,WAAA;AAAA,EAAA;AAEX;AAEA,SAAS,WAAW,UAA2B;AACtC,SAAAC,YAAA,QAAG,WAAW,QAAQ;AAC/B;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AC1DA,QAAMA,MAAKC,sBAAAA,SACLF,QAAOG,oBAAAA,SACP,KAAKC,sBAAAA,SACL,SAASC,oBAAAA,SAGTC,WAFc,WAEQ,SAEtB,OAAO;AAGb,WAAS,MAAO,KAAK;AACnB,UAAM,MAAM,CAAA;AAGZ,QAAI,QAAQ,IAAI,SAAQ;AAGxB,YAAQ,MAAM,QAAQ,WAAW;AAAA,CAAI;AAErC,QAAI;AACJ,YAAQ,QAAQ,KAAK,KAAK,KAAK,MAAM,QAAM;AACzC,YAAM,MAAM,MAAM,CAAC;AAGnB,UAAI,QAAS,MAAM,CAAC,KAAK;AAGzB,cAAQ,MAAM,KAAI;AAGlB,YAAM,aAAa,MAAM,CAAC;AAG1B,cAAQ,MAAM,QAAQ,0BAA0B,IAAI,GAGhD,eAAe,QACjB,QAAQ,MAAM,QAAQ,QAAQ;AAAA,CAAI,GAClC,QAAQ,MAAM,QAAQ,QAAQ,IAAI,IAIpC,IAAI,GAAG,IAAI;AAAA,IACf;AAEE,WAAO;AAAA,EACT;AAEA,WAAS,YAAa,SAAS;AAC7B,UAAM,YAAY,WAAW,OAAO,GAG9B,SAAS,aAAa,aAAa,EAAE,MAAM,UAAW,CAAA;AAC5D,QAAI,CAAC,OAAO,QAAQ;AAClB,YAAM,MAAM,IAAI,MAAM,8BAA8B,SAAS,wBAAwB;AACrF,gBAAI,OAAO,gBACL;AAAA,IACV;AAIE,UAAM,OAAO,WAAW,OAAO,EAAE,MAAM,GAAG,GACpC,SAAS,KAAK;AAEpB,QAAI;AACJ,aAAS,IAAI,GAAG,IAAI,QAAQ;AAC1B,UAAI;AAEF,cAAM,MAAM,KAAK,CAAC,EAAE,KAAI,GAGlB,QAAQ,cAAc,QAAQ,GAAG;AAGvC,oBAAY,aAAa,QAAQ,MAAM,YAAY,MAAM,GAAG;AAE5D;AAAA,MACD,SAAQ,OAAO;AAEd,YAAI,IAAI,KAAK;AACX,gBAAM;AAAA,MAGd;AAIE,WAAO,aAAa,MAAM,SAAS;AAAA,EACrC;AAEA,WAAS,KAAM,SAAS;AACtB,YAAQ,IAAI,WAAWA,QAAO,WAAW,OAAO,EAAE;AAAA,EACpD;AAEA,WAAS,MAAO,SAAS;AACvB,YAAQ,IAAI,WAAWA,QAAO,WAAW,OAAO,EAAE;AAAA,EACpD;AAEA,WAAS,OAAQ,SAAS;AACxB,YAAQ,IAAI,WAAWA,QAAO,YAAY,OAAO,EAAE;AAAA,EACrD;AAEA,WAAS,WAAY,SAAS;AAE5B,WAAI,WAAW,QAAQ,cAAc,QAAQ,WAAW,SAAS,IACxD,QAAQ,aAIb,QAAQ,IAAI,cAAc,QAAQ,IAAI,WAAW,SAAS,IACrD,QAAQ,IAAI,aAId;AAAA,EACT;AAEA,WAAS,cAAe,QAAQ,WAAW;AAEzC,QAAI;AACJ,QAAI;AACF,YAAM,IAAI,IAAI,SAAS;AAAA,IACxB,SAAQ,OAAO;AACd,UAAI,MAAM,SAAS,mBAAmB;AACpC,cAAM,MAAM,IAAI,MAAM,4IAA4I;AAClK,kBAAI,OAAO,sBACL;AAAA,MACZ;AAEI,YAAM;AAAA,IACV;AAGE,UAAM,MAAM,IAAI;AAChB,QAAI,CAAC,KAAK;AACR,YAAM,MAAM,IAAI,MAAM,sCAAsC;AAC5D,gBAAI,OAAO,sBACL;AAAA,IACV;AAGE,UAAM,cAAc,IAAI,aAAa,IAAI,aAAa;AACtD,QAAI,CAAC,aAAa;AAChB,YAAM,MAAM,IAAI,MAAM,8CAA8C;AACpE,gBAAI,OAAO,sBACL;AAAA,IACV;AAGE,UAAM,iBAAiB,gBAAgB,YAAY,YAAW,CAAE,IAC1D,aAAa,OAAO,OAAO,cAAc;AAC/C,QAAI,CAAC,YAAY;AACf,YAAM,MAAM,IAAI,MAAM,2DAA2D,cAAc,2BAA2B;AAC1H,gBAAI,OAAO,gCACL;AAAA,IACV;AAEE,WAAO,EAAE,YAAY,IAAG;AAAA,EAC1B;AAEA,WAAS,WAAY,SAAS;AAC5B,QAAI,oBAAoB;AAExB,QAAI,WAAW,QAAQ,QAAQ,QAAQ,KAAK,SAAS;AACnD,UAAI,MAAM,QAAQ,QAAQ,IAAI;AAC5B,mBAAW,YAAY,QAAQ;AAC7B,UAAIL,IAAG,WAAW,QAAQ,MACxB,oBAAoB,SAAS,SAAS,QAAQ,IAAI,WAAW,GAAG,QAAQ;AAAA;AAI5E,4BAAoB,QAAQ,KAAK,SAAS,QAAQ,IAAI,QAAQ,OAAO,GAAG,QAAQ,IAAI;AAAA;AAGtF,0BAAoBD,MAAK,QAAQ,QAAQ,IAAK,GAAE,YAAY;AAG9D,WAAIC,IAAG,WAAW,iBAAiB,IAC1B,oBAGF;AAAA,EACT;AAEA,WAAS,aAAc,SAAS;AAC9B,WAAO,QAAQ,CAAC,MAAM,MAAMD,MAAK,KAAK,GAAG,QAAS,GAAE,QAAQ,MAAM,CAAC,CAAC,IAAI;AAAA,EAC1E;AAEA,WAAS,aAAc,SAAS;AAC9B,SAAK,uCAAuC;AAE5C,UAAM,SAAS,aAAa,YAAY,OAAO;AAE/C,QAAI,aAAa,QAAQ;AACzB,WAAI,WAAW,QAAQ,cAAc,SACnC,aAAa,QAAQ,aAGvB,aAAa,SAAS,YAAY,QAAQ,OAAO,GAE1C,EAAE,OAAM;AAAA,EACjB;AAEA,WAAS,aAAc,SAAS;AAC9B,UAAM,aAAaA,MAAK,QAAQ,QAAQ,IAAK,GAAE,MAAM;AACrD,QAAI,WAAW;AACf,UAAMO,SAAQ,GAAQ,WAAW,QAAQ;AAEzC,IAAI,WAAW,QAAQ,WACrB,WAAW,QAAQ,WAEfA,UACF,OAAO,oDAAoD;AAI/D,QAAI,cAAc,CAAC,UAAU;AAC7B,QAAI,WAAW,QAAQ;AACrB,UAAI,CAAC,MAAM,QAAQ,QAAQ,IAAI;AAC7B,sBAAc,CAAC,aAAa,QAAQ,IAAI,CAAC;AAAA,WACpC;AACL,sBAAc,CAAE;AAChB,mBAAW,YAAY,QAAQ;AAC7B,sBAAY,KAAK,aAAa,QAAQ,CAAC;AAAA,MAE/C;AAKE,QAAI;AACJ,UAAM,YAAY,CAAA;AAClB,eAAWP,SAAQ;AACjB,UAAI;AAEF,cAAM,SAAS,aAAa,MAAMC,IAAG,aAAaD,OAAM,EAAE,UAAU,CAAC;AAErE,qBAAa,SAAS,WAAW,QAAQ,OAAO;AAAA,MACjD,SAAQ,GAAG;AACV,QAAIO,UACF,OAAO,kBAAkBP,KAAI,IAAI,EAAE,OAAO,EAAE,GAE9C,YAAY;AAAA,MAClB;AAGE,QAAI,aAAa,QAAQ;AAOzB,WANI,WAAW,QAAQ,cAAc,SACnC,aAAa,QAAQ,aAGvB,aAAa,SAAS,YAAY,WAAW,OAAO,GAEhD,YACK,EAAE,QAAQ,WAAW,OAAO,UAAS,IAErC,EAAE,QAAQ,UAAS;AAAA,EAE9B;AAGA,WAAS,OAAQ,SAAS;AAExB,QAAI,WAAW,OAAO,EAAE,WAAW;AACjC,aAAO,aAAa,aAAa,OAAO;AAG1C,UAAM,YAAY,WAAW,OAAO;AAGpC,WAAK,YAME,aAAa,aAAa,OAAO,KALtC,MAAM,+DAA+D,SAAS,+BAA+B,GAEtG,aAAa,aAAa,OAAO;AAAA,EAI5C;AAEA,WAAS,QAAS,WAAW,QAAQ;AACnC,UAAM,MAAM,OAAO,KAAK,OAAO,MAAM,GAAG,GAAG,KAAK;AAChD,QAAI,aAAa,OAAO,KAAK,WAAW,QAAQ;AAEhD,UAAM,QAAQ,WAAW,SAAS,GAAG,EAAE,GACjC,UAAU,WAAW,SAAS,GAAG;AACvC,iBAAa,WAAW,SAAS,IAAI,GAAG;AAExC,QAAI;AACF,YAAM,SAAS,OAAO,iBAAiB,eAAe,KAAK,KAAK;AAChE,oBAAO,WAAW,OAAO,GAClB,GAAG,OAAO,OAAO,UAAU,CAAC,GAAG,OAAO,OAAO;AAAA,IACrD,SAAQ,OAAO;AACd,YAAM,UAAU,iBAAiB,YAC3B,mBAAmB,MAAM,YAAY,sBACrC,mBAAmB,MAAM,YAAY;AAE3C,UAAI,WAAW,kBAAkB;AAC/B,cAAM,MAAM,IAAI,MAAM,6DAA6D;AACnF,kBAAI,OAAO,sBACL;AAAA,MACP,WAAU,kBAAkB;AAC3B,cAAM,MAAM,IAAI,MAAM,iDAAiD;AACvE,kBAAI,OAAO,qBACL;AAAA,MACZ;AACM,cAAM;AAAA,IAEZ;AAAA,EACA;AAGA,WAAS,SAAU,YAAY,QAAQ,UAAU,CAAA,GAAI;AACnD,UAAMO,SAAQ,GAAQ,WAAW,QAAQ,QACnC,WAAW,GAAQ,WAAW,QAAQ;AAE5C,QAAI,OAAO,UAAW,UAAU;AAC9B,YAAM,MAAM,IAAI,MAAM,gFAAgF;AACtG,gBAAI,OAAO,mBACL;AAAA,IACV;AAGE,eAAW,OAAO,OAAO,KAAK,MAAM;AAClC,MAAI,OAAO,UAAU,eAAe,KAAK,YAAY,GAAG,KAClD,aAAa,OACf,WAAW,GAAG,IAAI,OAAO,GAAG,IAG1BA,UAEA,OADE,aAAa,KACR,IAAI,GAAG,6CAEP,IAAI,GAAG,8CAF0C,KAM5D,WAAW,GAAG,IAAI,OAAO,GAAG;AAAA,EAGlC;AAEA,QAAM,eAAe;AAAA,IACnB;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AAE2BC,gBAAA,QAAA,eAAG,aAAa,cAChBA,OAAA,QAAA,eAAG,aAAa,cACjBA,OAAA,QAAA,cAAG,aAAa,aACrBA,OAAA,QAAA,SAAG,aAAa,QACfA,OAAA,QAAA,UAAG,aAAa,SAClBA,OAAA,QAAA,QAAG,aAAa,OACbA,OAAA,QAAA,WAAG,aAAa,UAEvCA,OAAA,UAAiB;;;;;;ACtWjB,WAAS,aAAc,UAAU,aAAa,QAAQ;AACpD,UAAM,UAAU,SAAS,MAAM,+BAA+B,KAAK,CAAA;AAEnE,WAAO,QAAQ,OAAO,SAAU,QAAQ,OAAO,OAAO;AACpD,YAAM,QAAQ,mCAAmC,KAAK,KAAK;AAC3D,UAAI,CAAC,SAAS,MAAM,WAAW;AAC7B,eAAO;AAGT,YAAM,SAAS,MAAM,CAAC;AAEtB,UAAI,OAAO;AAEX,UAAI,WAAW;AACb,sBAAc,MAAM,CAAC,GACrB,QAAQ,YAAY,QAAQ,OAAO,GAAG;AAAA,WACjC;AACL,cAAM,WAAW,MAAM,CAAC,EAAE,MAAM,IAAI,GAC9B,MAAM,SAAS,CAAC;AAQtB,YAPA,cAAc,MAAM,CAAC,EAAE,UAAU,OAAO,MAAM,GAE9C,QAAQ,OAAO,UAAU,eAAe,KAAK,aAAa,GAAG,IACzD,YAAY,GAAG,IACd,OAAO,OAAO,GAAG,KAAK,SAAS,CAAC,KAAK,IAGtC,SAAS,SAAS,KAAK,OAAO;AAChC,gBAAM,gBAAgB,QAAQ,QAAQ,CAAC;AACvC,kBAAQ,QAAQ,CAAC,IAAI,IAErB,SAAS,OAAO,QAAQ,eAAe,EAAE;AAAA,QACjD;AAEM,gBAAQ,aAAa,OAAO,aAAa,MAAM;AAAA,MACrD;AAEI,aAAO,OAAO,QAAQ,aAAa,KAAK;AAAA,IAC5C,GAAK,QAAQ;AAAA,EACb;AAEA,WAAS,OAAQ,QAAQ;AAEvB,UAAM,cAAc,OAAO,mBAAmB,CAAA,IAAK,QAAQ;AAE3D,eAAW,aAAa,OAAO,QAAQ;AACrC,YAAM,QAAQ,OAAO,UAAU,eAAe,KAAK,aAAa,SAAS,IAAI,YAAY,SAAS,IAAI,OAAO,OAAO,SAAS;AAE7H,aAAO,OAAO,SAAS,IAAI,aAAa,OAAO,aAAa,MAAM;AAAA,IACtE;AAEE,eAAW,cAAc,OAAO;AAC9B,kBAAY,UAAU,IAAI,OAAO,OAAO,UAAU;AAGpD,WAAO;AAAA,EACT;AAEA,cAAA,SAAwB;;;AC1CjB,SAAS,QACd,MACA,QACA,WAAqB,CAAC,OAAO,GACL;AACxB,MAAI,SAAS;AACX,UAAM,IAAI;AAAA,MACR;AAAA,IAEF;AAGI,QAAA,MAA8B,CAAC,GAC/B,WAAW;AAAA;AAAA,IACK;AAAA;AAAA,IACF;AAAA;AAAA,IACD,QAAQ,IAAI;AAAA;AAAA,IACN,QAAQ,IAAI;AAAA,EAAA,GAG/B,SAAS,OAAO;AAAA,IACpB,SAAS,QAAQ,CAAC,SAAS;AACzB,YAAM,UAAU,WAAW,QAAQ,CAAC,IAAI,GAAG;AAAA,QACzC,SAAS;AAAA,MAAA,CACV;AACI,aAAA,UACE,OAAO,QAAQC,cAAM,MAAAR,YAAA,QAAG,aAAa,OAAO,CAAC,CAAC,IADhC,CAAC;AAAA,IAEvB,CAAA;AAAA,EACH;AAGI,SAAO,YAAY,QAAQ,IAAI,uBAAuB,WACxD,QAAQ,IAAI,qBAAqB,OAAO,WAGtC,OAAO,WAAW,QAAQ,IAAI,YAAY,WAC5C,QAAQ,IAAI,UAAU,OAAO,UAE3B,OAAO,gBAAgB,QAAQ,IAAI,iBAAiB,WACtD,QAAQ,IAAI,eAAe,OAAO;AAGhC,MAAA;AAEKS,gBAAA,OAAA,EAAC,QAAO;AAAA,WACR,GAAG;AAGN,UAAA,EAAE,QAAQ,SAAS,OAAO,IACtB,IAAI,MAAM,wEAAwE,IAEpF;AAAA,EAAA;AAIR,aAAW,CAAC,KAAK,KAAK,KAAK,OAAO,QAAQ,MAAM;AAC1C,aAAS,KAAK,CAAC,WAAW,IAAI,WAAW,MAAM,CAAC,MAClD,IAAI,GAAG,IAAI;AAMf,aAAW,OAAO,QAAQ;AACpB,aAAS,KAAK,CAAC,WAAW,IAAI,WAAW,MAAM,CAAC,MAClD,IAAI,GAAG,IAAI,QAAQ,IAAI,GAAG;AAIvB,SAAA;AACT;AAEA,SAAS,WACP,KACA,SACA,SAGoB;AACpB,aAAW,UAAU,SAAS;AAC5B,UAAM,WAAWV,cAAA,QAAK,KAAK,KAAK,MAAM;AAElC,QAAAC,YAAA,QAAG,WAAW,QAAQ,KAAKA,oBAAG,SAAS,QAAQ,EAAE,OAAO;AACnD,aAAA;AAAA,EAAA;AAGL,QAAA,YAAYD,cAAAA,QAAK,QAAQ,GAAG;AAC9B,MAAA,cAAc,QAAQ,CAAC,SAAS,WAAW,UAAU,WAAW,SAAS,OAAO;AAC3E,WAAA,WAAW,WAAW,SAAS,OAAO;AAIjD;;;;;;;;","x_google_ignoreList":[2,3]}
|