@sanity/cli 6.0.0-alpha.13 → 6.0.0-alpha.15
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +113 -102
- package/dist/actions/build/buildApp.js +4 -3
- package/dist/actions/build/buildApp.js.map +1 -1
- package/dist/actions/build/buildStaticFiles.js +3 -2
- package/dist/actions/build/buildStaticFiles.js.map +1 -1
- package/dist/actions/build/buildStudio.js +1 -0
- package/dist/actions/build/buildStudio.js.map +1 -1
- package/dist/actions/build/buildVendorDependencies.js +15 -36
- package/dist/actions/build/buildVendorDependencies.js.map +1 -1
- package/dist/actions/build/checkRequiredDependencies.js +7 -5
- package/dist/actions/build/checkRequiredDependencies.js.map +1 -1
- package/dist/actions/build/checkStudioDependencyVersions.js +6 -6
- package/dist/actions/build/checkStudioDependencyVersions.js.map +1 -1
- package/dist/actions/deploy/deployApp.js +2 -2
- package/dist/actions/deploy/deployApp.js.map +1 -1
- package/dist/actions/deploy/deployStudio.js +2 -2
- package/dist/actions/deploy/deployStudio.js.map +1 -1
- package/dist/actions/dev/startStudioDevServer.js +2 -2
- package/dist/actions/dev/startStudioDevServer.js.map +1 -1
- package/dist/actions/manifest/extractManifest.js +2 -2
- package/dist/actions/manifest/extractManifest.js.map +1 -1
- package/dist/actions/mcp/detectAvailableEditors.js +16 -4
- package/dist/actions/mcp/detectAvailableEditors.js.map +1 -1
- package/dist/actions/mcp/editorConfigs.js +64 -6
- package/dist/actions/mcp/editorConfigs.js.map +1 -1
- package/dist/actions/mcp/writeMCPConfig.js +27 -15
- package/dist/actions/mcp/writeMCPConfig.js.map +1 -1
- package/dist/actions/schema/extractSchema.js +7 -32
- package/dist/actions/schema/extractSchema.js.map +1 -1
- package/dist/actions/schema/extractSchemaWatcher.js +126 -0
- package/dist/actions/schema/extractSchemaWatcher.js.map +1 -0
- package/dist/actions/schema/formatSchemaValidation.js +5 -1
- package/dist/actions/schema/formatSchemaValidation.js.map +1 -1
- package/dist/actions/schema/getExtractOptions.js +16 -0
- package/dist/actions/schema/getExtractOptions.js.map +1 -0
- package/dist/actions/schema/runSchemaExtraction.js +39 -0
- package/dist/actions/schema/runSchemaExtraction.js.map +1 -0
- package/dist/actions/schema/watchExtractSchema.js +71 -0
- package/dist/actions/schema/watchExtractSchema.js.map +1 -0
- package/dist/actions/versions/tryFindLatestVersion.js +1 -1
- package/dist/actions/versions/tryFindLatestVersion.js.map +1 -1
- package/dist/commands/mcp/configure.js +1 -1
- package/dist/commands/mcp/configure.js.map +1 -1
- package/dist/commands/schema/extract.js +32 -4
- package/dist/commands/schema/extract.js.map +1 -1
- package/dist/server/previewServer.js +2 -2
- package/dist/server/previewServer.js.map +1 -1
- package/dist/telemetry/extractSchema.telemetry.js +5 -0
- package/dist/telemetry/extractSchema.telemetry.js.map +1 -1
- package/dist/util/compareDependencyVersions.js +3 -5
- package/dist/util/compareDependencyVersions.js.map +1 -1
- package/dist/util/getLocalPackageVersion.js +30 -22
- package/dist/util/getLocalPackageVersion.js.map +1 -1
- package/dist/util/getWorkspace.js +1 -1
- package/dist/util/getWorkspace.js.map +1 -1
- package/dist/util/resolveLatestVersions.js +2 -2
- package/dist/util/resolveLatestVersions.js.map +1 -1
- package/dist/util/update/fetchLatestVersion.js +1 -1
- package/dist/util/update/fetchLatestVersion.js.map +1 -1
- package/oclif.manifest.json +168 -146
- package/package.json +5 -5
- package/dist/util/readModuleVersion.js +0 -21
- package/dist/util/readModuleVersion.js.map +0 -1
|
@@ -1,52 +1,27 @@
|
|
|
1
|
-
import { mkdir, writeFile } from 'node:fs/promises';
|
|
2
|
-
import { join, resolve } from 'node:path';
|
|
3
1
|
import { exit } from '@oclif/core/errors';
|
|
4
|
-
import { getCliTelemetry
|
|
2
|
+
import { getCliTelemetry } from '@sanity/cli-core';
|
|
5
3
|
import { spinner } from '@sanity/cli-core/ux';
|
|
6
4
|
import { SchemaExtractedTrace } from '../../telemetry/extractSchema.telemetry.js';
|
|
7
5
|
import { formatSchemaValidation } from './formatSchemaValidation.js';
|
|
6
|
+
import { runSchemaExtraction } from './runSchemaExtraction.js';
|
|
8
7
|
import { schemasExtractDebug } from './utils/debug.js';
|
|
9
8
|
import { SchemaExtractionError } from './utils/SchemaExtractionError.js';
|
|
10
|
-
const FILENAME = 'schema.json';
|
|
11
9
|
export async function extractSchema(options) {
|
|
12
|
-
const {
|
|
13
|
-
const {
|
|
10
|
+
const { extractOptions, output } = options;
|
|
11
|
+
const { enforceRequiredFields, format, outputPath } = extractOptions;
|
|
14
12
|
const spin = spinner(enforceRequiredFields ? 'Extracting schema with enforced required fields' : 'Extracting schema').start();
|
|
15
|
-
const workDir = projectRoot.directory;
|
|
16
13
|
const trace = getCliTelemetry().trace(SchemaExtractedTrace);
|
|
17
14
|
trace.start();
|
|
18
15
|
try {
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
}
|
|
22
|
-
const result = await studioWorkerTask(new URL('extractSanitySchema.worker.js', import.meta.url), {
|
|
23
|
-
name: 'extractSanitySchema',
|
|
24
|
-
studioRootPath: workDir,
|
|
25
|
-
workerData: {
|
|
26
|
-
configPath: projectRoot.path,
|
|
27
|
-
enforceRequiredFields,
|
|
28
|
-
workDir,
|
|
29
|
-
workspaceName
|
|
30
|
-
}
|
|
31
|
-
});
|
|
32
|
-
if (result.type === 'error') {
|
|
33
|
-
throw new SchemaExtractionError(result.error, result.validation);
|
|
34
|
-
}
|
|
35
|
-
const schema = result.schema;
|
|
16
|
+
spin.text = `Writing schema to ${outputPath}`;
|
|
17
|
+
const schema = await runSchemaExtraction(extractOptions);
|
|
36
18
|
trace.log({
|
|
37
19
|
enforceRequiredFields,
|
|
38
20
|
schemaAllTypesCount: schema.length,
|
|
39
21
|
schemaDocumentTypesCount: schema.filter((type)=>type.type === 'document').length,
|
|
40
|
-
schemaFormat:
|
|
22
|
+
schemaFormat: format,
|
|
41
23
|
schemaTypesCount: schema.filter((type)=>type.type === 'type').length
|
|
42
24
|
});
|
|
43
|
-
const outputDir = path ? resolve(join(workDir, path)) : workDir;
|
|
44
|
-
const outputPath = join(outputDir, FILENAME);
|
|
45
|
-
await mkdir(outputDir, {
|
|
46
|
-
recursive: true
|
|
47
|
-
});
|
|
48
|
-
spin.text = `Writing schema to ${outputPath}`;
|
|
49
|
-
await writeFile(outputPath, `${JSON.stringify(schema, null, 2)}\n`);
|
|
50
25
|
spin.succeed(enforceRequiredFields ? `Extracted schema to ${outputPath} with enforced required fields` : `Extracted schema to ${outputPath}`);
|
|
51
26
|
trace.complete();
|
|
52
27
|
} catch (err) {
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../../../src/actions/schema/extractSchema.ts"],"sourcesContent":["import {
|
|
1
|
+
{"version":3,"sources":["../../../src/actions/schema/extractSchema.ts"],"sourcesContent":["import {exit} from '@oclif/core/errors'\nimport {getCliTelemetry, type Output} from '@sanity/cli-core'\nimport {spinner} from '@sanity/cli-core/ux'\n\nimport {SchemaExtractedTrace} from '../../telemetry/extractSchema.telemetry.js'\nimport {formatSchemaValidation} from './formatSchemaValidation.js'\nimport {type ExtractOptions} from './getExtractOptions.js'\nimport {runSchemaExtraction} from './runSchemaExtraction.js'\nimport {schemasExtractDebug} from './utils/debug.js'\nimport {SchemaExtractionError} from './utils/SchemaExtractionError.js'\n\ninterface ExtractSchemaActionOptions {\n extractOptions: ExtractOptions\n output: Output\n}\n\nexport async function extractSchema(options: ExtractSchemaActionOptions): Promise<void> {\n const {extractOptions, output} = options\n const {enforceRequiredFields, format, outputPath} = extractOptions\n\n const spin = spinner(\n enforceRequiredFields ? 'Extracting schema with enforced required fields' : 'Extracting schema',\n ).start()\n\n const trace = getCliTelemetry().trace(SchemaExtractedTrace)\n trace.start()\n\n try {\n spin.text = `Writing schema to ${outputPath}`\n\n const schema = await runSchemaExtraction(extractOptions)\n\n trace.log({\n enforceRequiredFields,\n schemaAllTypesCount: schema.length,\n schemaDocumentTypesCount: schema.filter((type) => type.type === 'document').length,\n schemaFormat: format,\n schemaTypesCount: schema.filter((type) => type.type === 'type').length,\n })\n\n spin.succeed(\n enforceRequiredFields\n ? `Extracted schema to ${outputPath} with enforced required fields`\n : `Extracted schema to ${outputPath}`,\n )\n\n trace.complete()\n } catch (err) {\n trace.error(err)\n schemasExtractDebug('Failed to extract schema', err)\n spin.fail(\n enforceRequiredFields\n ? 'Failed to extract schema with enforced required fields'\n : 'Failed to extract schema',\n )\n\n // Display validation errors if available\n if (err instanceof SchemaExtractionError && err.validation && err.validation.length > 0) {\n output.log('')\n output.log(formatSchemaValidation(err.validation))\n }\n\n if (err instanceof Error) {\n output.error(err.message, {exit: 1})\n }\n\n exit(1)\n }\n}\n"],"names":["exit","getCliTelemetry","spinner","SchemaExtractedTrace","formatSchemaValidation","runSchemaExtraction","schemasExtractDebug","SchemaExtractionError","extractSchema","options","extractOptions","output","enforceRequiredFields","format","outputPath","spin","start","trace","text","schema","log","schemaAllTypesCount","length","schemaDocumentTypesCount","filter","type","schemaFormat","schemaTypesCount","succeed","complete","err","error","fail","validation","Error","message"],"mappings":"AAAA,SAAQA,IAAI,QAAO,qBAAoB;AACvC,SAAQC,eAAe,QAAoB,mBAAkB;AAC7D,SAAQC,OAAO,QAAO,sBAAqB;AAE3C,SAAQC,oBAAoB,QAAO,6CAA4C;AAC/E,SAAQC,sBAAsB,QAAO,8BAA6B;AAElE,SAAQC,mBAAmB,QAAO,2BAA0B;AAC5D,SAAQC,mBAAmB,QAAO,mBAAkB;AACpD,SAAQC,qBAAqB,QAAO,mCAAkC;AAOtE,OAAO,eAAeC,cAAcC,OAAmC;IACrE,MAAM,EAACC,cAAc,EAAEC,MAAM,EAAC,GAAGF;IACjC,MAAM,EAACG,qBAAqB,EAAEC,MAAM,EAAEC,UAAU,EAAC,GAAGJ;IAEpD,MAAMK,OAAOb,QACXU,wBAAwB,oDAAoD,qBAC5EI,KAAK;IAEP,MAAMC,QAAQhB,kBAAkBgB,KAAK,CAACd;IACtCc,MAAMD,KAAK;IAEX,IAAI;QACFD,KAAKG,IAAI,GAAG,CAAC,kBAAkB,EAAEJ,YAAY;QAE7C,MAAMK,SAAS,MAAMd,oBAAoBK;QAEzCO,MAAMG,GAAG,CAAC;YACRR;YACAS,qBAAqBF,OAAOG,MAAM;YAClCC,0BAA0BJ,OAAOK,MAAM,CAAC,CAACC,OAASA,KAAKA,IAAI,KAAK,YAAYH,MAAM;YAClFI,cAAcb;YACdc,kBAAkBR,OAAOK,MAAM,CAAC,CAACC,OAASA,KAAKA,IAAI,KAAK,QAAQH,MAAM;QACxE;QAEAP,KAAKa,OAAO,CACVhB,wBACI,CAAC,oBAAoB,EAAEE,WAAW,8BAA8B,CAAC,GACjE,CAAC,oBAAoB,EAAEA,YAAY;QAGzCG,MAAMY,QAAQ;IAChB,EAAE,OAAOC,KAAK;QACZb,MAAMc,KAAK,CAACD;QACZxB,oBAAoB,4BAA4BwB;QAChDf,KAAKiB,IAAI,CACPpB,wBACI,2DACA;QAGN,yCAAyC;QACzC,IAAIkB,eAAevB,yBAAyBuB,IAAIG,UAAU,IAAIH,IAAIG,UAAU,CAACX,MAAM,GAAG,GAAG;YACvFX,OAAOS,GAAG,CAAC;YACXT,OAAOS,GAAG,CAAChB,uBAAuB0B,IAAIG,UAAU;QAClD;QAEA,IAAIH,eAAeI,OAAO;YACxBvB,OAAOoB,KAAK,CAACD,IAAIK,OAAO,EAAE;gBAACnC,MAAM;YAAC;QACpC;QAEAA,KAAK;IACP;AACF"}
|
|
@@ -0,0 +1,126 @@
|
|
|
1
|
+
import { dirname, isAbsolute, relative } from 'node:path';
|
|
2
|
+
import { spinner } from '@sanity/cli-core/ux';
|
|
3
|
+
import { watch as chokidarWatch } from 'chokidar';
|
|
4
|
+
import { debounce } from 'lodash-es';
|
|
5
|
+
import { glob } from 'tinyglobby';
|
|
6
|
+
import { formatSchemaValidation } from './formatSchemaValidation.js';
|
|
7
|
+
import { runSchemaExtraction } from './runSchemaExtraction.js';
|
|
8
|
+
import { schemasExtractDebug } from './utils/debug.js';
|
|
9
|
+
import { SchemaExtractionError } from './utils/SchemaExtractionError.js';
|
|
10
|
+
/** Default glob patterns to watch for schema changes */ export const DEFAULT_WATCH_PATTERNS = [
|
|
11
|
+
'sanity.config.{js,jsx,ts,tsx,mjs}',
|
|
12
|
+
'schema*/**/*.{js,jsx,ts,tsx,mjs}'
|
|
13
|
+
];
|
|
14
|
+
/** Default patterns to ignore when watching */ const IGNORED_PATTERNS = [
|
|
15
|
+
'**/node_modules/**',
|
|
16
|
+
'**/.git/**',
|
|
17
|
+
'**/dist/**',
|
|
18
|
+
'**/lib/**',
|
|
19
|
+
'**/.sanity/**'
|
|
20
|
+
];
|
|
21
|
+
/**
|
|
22
|
+
* Creates an extraction runner with concurrency control.
|
|
23
|
+
* If extraction is already running, queues one more extraction to run after completion.
|
|
24
|
+
* Multiple queued requests are coalesced into a single pending extraction.
|
|
25
|
+
*/ function createExtractionRunner(onExtract) {
|
|
26
|
+
const state = {
|
|
27
|
+
isExtracting: false,
|
|
28
|
+
pendingExtraction: false
|
|
29
|
+
};
|
|
30
|
+
async function runExtraction() {
|
|
31
|
+
if (state.isExtracting) {
|
|
32
|
+
state.pendingExtraction = true;
|
|
33
|
+
return;
|
|
34
|
+
}
|
|
35
|
+
state.isExtracting = true;
|
|
36
|
+
state.pendingExtraction = false;
|
|
37
|
+
try {
|
|
38
|
+
await onExtract();
|
|
39
|
+
} finally{
|
|
40
|
+
state.isExtracting = false;
|
|
41
|
+
// If a change came in during extraction, run again
|
|
42
|
+
if (state.pendingExtraction) {
|
|
43
|
+
state.pendingExtraction = false;
|
|
44
|
+
await runExtraction();
|
|
45
|
+
}
|
|
46
|
+
}
|
|
47
|
+
}
|
|
48
|
+
return {
|
|
49
|
+
runExtraction,
|
|
50
|
+
state
|
|
51
|
+
};
|
|
52
|
+
}
|
|
53
|
+
/**
|
|
54
|
+
* Starts a schema watcher that extracts schema on file changes.
|
|
55
|
+
* Returns a watcher instance and a stop function.
|
|
56
|
+
*/ export async function startExtractSchemaWatcher(options) {
|
|
57
|
+
const { extractOptions, onExtraction, output, watchPatterns } = options;
|
|
58
|
+
const { configPath, enforceRequiredFields, outputPath } = extractOptions;
|
|
59
|
+
const workDir = dirname(configPath);
|
|
60
|
+
// Helper function to run extraction with spinner and error handling
|
|
61
|
+
const runExtraction = async ()=>{
|
|
62
|
+
const spin = spinner(enforceRequiredFields ? 'Extracting schema with enforced required fields' : 'Extracting schema...').start();
|
|
63
|
+
const extractionStartTime = Date.now();
|
|
64
|
+
try {
|
|
65
|
+
await runSchemaExtraction(extractOptions);
|
|
66
|
+
spin.succeed(enforceRequiredFields ? `Extracted schema to ${outputPath} with enforced required fields` : `Extracted schema to ${outputPath}`);
|
|
67
|
+
const duration = Date.now() - extractionStartTime;
|
|
68
|
+
onExtraction?.({
|
|
69
|
+
duration,
|
|
70
|
+
success: true
|
|
71
|
+
});
|
|
72
|
+
return true;
|
|
73
|
+
} catch (err) {
|
|
74
|
+
const duration = Date.now() - extractionStartTime;
|
|
75
|
+
onExtraction?.({
|
|
76
|
+
duration,
|
|
77
|
+
success: false
|
|
78
|
+
});
|
|
79
|
+
schemasExtractDebug('Failed to extract schema', err);
|
|
80
|
+
spin.fail('Extraction failed');
|
|
81
|
+
// Display validation errors if available
|
|
82
|
+
if (err instanceof SchemaExtractionError && err.validation && err.validation.length > 0) {
|
|
83
|
+
output.log('');
|
|
84
|
+
output.log(formatSchemaValidation(err.validation));
|
|
85
|
+
} else if (err instanceof Error) {
|
|
86
|
+
output.error(err.message, {
|
|
87
|
+
exit: 1
|
|
88
|
+
});
|
|
89
|
+
}
|
|
90
|
+
return false;
|
|
91
|
+
}
|
|
92
|
+
};
|
|
93
|
+
// Run initial extraction
|
|
94
|
+
await runExtraction();
|
|
95
|
+
const absoluteWatchPatterns = await glob(watchPatterns, {
|
|
96
|
+
absolute: true,
|
|
97
|
+
ignore: IGNORED_PATTERNS
|
|
98
|
+
});
|
|
99
|
+
// Create extraction runner with concurrency control
|
|
100
|
+
const { runExtraction: runConcurrentExtraction } = createExtractionRunner(async ()=>{
|
|
101
|
+
await runExtraction();
|
|
102
|
+
});
|
|
103
|
+
// Debounced extraction trigger (1 second delay)
|
|
104
|
+
const debouncedExtract = debounce(()=>{
|
|
105
|
+
void runConcurrentExtraction();
|
|
106
|
+
}, 1000);
|
|
107
|
+
const watcher = chokidarWatch(absoluteWatchPatterns, {
|
|
108
|
+
cwd: workDir,
|
|
109
|
+
ignoreInitial: true
|
|
110
|
+
});
|
|
111
|
+
watcher.on('all', (event, filePath)=>{
|
|
112
|
+
const timestamp = new Date().toLocaleTimeString();
|
|
113
|
+
const relativePath = isAbsolute(filePath) ? relative(workDir, filePath) : filePath;
|
|
114
|
+
output.log(`[${timestamp}] ${event}: ${relativePath}`);
|
|
115
|
+
debouncedExtract();
|
|
116
|
+
});
|
|
117
|
+
watcher.on('error', (err)=>{
|
|
118
|
+
output.error(`Watcher error: ${err instanceof Error ? err.message : String(err)}`);
|
|
119
|
+
});
|
|
120
|
+
return {
|
|
121
|
+
close: ()=>watcher.close(),
|
|
122
|
+
watcher
|
|
123
|
+
};
|
|
124
|
+
}
|
|
125
|
+
|
|
126
|
+
//# sourceMappingURL=extractSchemaWatcher.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../../src/actions/schema/extractSchemaWatcher.ts"],"sourcesContent":["import {dirname, isAbsolute, relative} from 'node:path'\n\nimport {type Output} from '@sanity/cli-core'\nimport {spinner} from '@sanity/cli-core/ux'\nimport {watch as chokidarWatch, type FSWatcher} from 'chokidar'\nimport {debounce} from 'lodash-es'\nimport {glob} from 'tinyglobby'\n\nimport {formatSchemaValidation} from './formatSchemaValidation.js'\nimport {type ExtractOptions} from './getExtractOptions.js'\nimport {runSchemaExtraction} from './runSchemaExtraction.js'\nimport {schemasExtractDebug} from './utils/debug.js'\nimport {SchemaExtractionError} from './utils/SchemaExtractionError.js'\n\n/** Default glob patterns to watch for schema changes */\nexport const DEFAULT_WATCH_PATTERNS = [\n 'sanity.config.{js,jsx,ts,tsx,mjs}',\n 'schema*/**/*.{js,jsx,ts,tsx,mjs}',\n]\n\n/** Default patterns to ignore when watching */\nconst IGNORED_PATTERNS = [\n '**/node_modules/**',\n '**/.git/**',\n '**/dist/**',\n '**/lib/**',\n '**/.sanity/**',\n]\n\ninterface ExtractSchemaWatcherOptions {\n extractOptions: ExtractOptions\n output: Output\n watchPatterns: string[]\n\n onExtraction?: (result: {duration: number; success: boolean}) => void\n}\n\ninterface ExtractSchemaWatcher {\n close: () => Promise<void>\n watcher: FSWatcher\n}\n\n/** State for tracking extraction status */\ninterface WatchState {\n isExtracting: boolean\n pendingExtraction: boolean\n}\n\n/** Return type for createExtractionRunner */\ninterface ExtractionRunner {\n runExtraction: () => Promise<void>\n state: WatchState\n}\n\n/**\n * Creates an extraction runner with concurrency control.\n * If extraction is already running, queues one more extraction to run after completion.\n * Multiple queued requests are coalesced into a single pending extraction.\n */\nfunction createExtractionRunner(onExtract: () => Promise<void>): ExtractionRunner {\n const state: WatchState = {\n isExtracting: false,\n pendingExtraction: false,\n }\n\n async function runExtraction(): Promise<void> {\n if (state.isExtracting) {\n state.pendingExtraction = true\n return\n }\n\n state.isExtracting = true\n state.pendingExtraction = false\n\n try {\n await onExtract()\n } finally {\n state.isExtracting = false\n\n // If a change came in during extraction, run again\n if (state.pendingExtraction) {\n state.pendingExtraction = false\n await runExtraction()\n }\n }\n }\n\n return {runExtraction, state}\n}\n\n/**\n * Starts a schema watcher that extracts schema on file changes.\n * Returns a watcher instance and a stop function.\n */\nexport async function startExtractSchemaWatcher(\n options: ExtractSchemaWatcherOptions,\n): Promise<ExtractSchemaWatcher> {\n const {extractOptions, onExtraction, output, watchPatterns} = options\n\n const {configPath, enforceRequiredFields, outputPath} = extractOptions\n const workDir = dirname(configPath)\n\n // Helper function to run extraction with spinner and error handling\n const runExtraction = async (): Promise<boolean> => {\n const spin = spinner(\n enforceRequiredFields\n ? 'Extracting schema with enforced required fields'\n : 'Extracting schema...',\n ).start()\n const extractionStartTime = Date.now()\n\n try {\n await runSchemaExtraction(extractOptions)\n\n spin.succeed(\n enforceRequiredFields\n ? `Extracted schema to ${outputPath} with enforced required fields`\n : `Extracted schema to ${outputPath}`,\n )\n\n const duration = Date.now() - extractionStartTime\n onExtraction?.({duration, success: true})\n\n return true\n } catch (err) {\n const duration = Date.now() - extractionStartTime\n onExtraction?.({duration, success: false})\n\n schemasExtractDebug('Failed to extract schema', err)\n spin.fail('Extraction failed')\n\n // Display validation errors if available\n if (err instanceof SchemaExtractionError && err.validation && err.validation.length > 0) {\n output.log('')\n output.log(formatSchemaValidation(err.validation))\n } else if (err instanceof Error) {\n output.error(err.message, {exit: 1})\n }\n\n return false\n }\n }\n\n // Run initial extraction\n await runExtraction()\n\n const absoluteWatchPatterns = await glob(watchPatterns, {\n absolute: true,\n ignore: IGNORED_PATTERNS,\n })\n\n // Create extraction runner with concurrency control\n const {runExtraction: runConcurrentExtraction} = createExtractionRunner(async () => {\n await runExtraction()\n })\n\n // Debounced extraction trigger (1 second delay)\n const debouncedExtract = debounce(() => {\n void runConcurrentExtraction()\n }, 1000)\n\n const watcher: FSWatcher = chokidarWatch(absoluteWatchPatterns, {\n cwd: workDir,\n ignoreInitial: true,\n })\n\n watcher.on('all', (event, filePath) => {\n const timestamp = new Date().toLocaleTimeString()\n const relativePath = isAbsolute(filePath) ? relative(workDir, filePath) : filePath\n output.log(`[${timestamp}] ${event}: ${relativePath}`)\n debouncedExtract()\n })\n\n watcher.on('error', (err) => {\n output.error(`Watcher error: ${err instanceof Error ? err.message : String(err)}`)\n })\n\n return {\n close: () => watcher.close(),\n watcher,\n }\n}\n"],"names":["dirname","isAbsolute","relative","spinner","watch","chokidarWatch","debounce","glob","formatSchemaValidation","runSchemaExtraction","schemasExtractDebug","SchemaExtractionError","DEFAULT_WATCH_PATTERNS","IGNORED_PATTERNS","createExtractionRunner","onExtract","state","isExtracting","pendingExtraction","runExtraction","startExtractSchemaWatcher","options","extractOptions","onExtraction","output","watchPatterns","configPath","enforceRequiredFields","outputPath","workDir","spin","start","extractionStartTime","Date","now","succeed","duration","success","err","fail","validation","length","log","Error","error","message","exit","absoluteWatchPatterns","absolute","ignore","runConcurrentExtraction","debouncedExtract","watcher","cwd","ignoreInitial","on","event","filePath","timestamp","toLocaleTimeString","relativePath","String","close"],"mappings":"AAAA,SAAQA,OAAO,EAAEC,UAAU,EAAEC,QAAQ,QAAO,YAAW;AAGvD,SAAQC,OAAO,QAAO,sBAAqB;AAC3C,SAAQC,SAASC,aAAa,QAAuB,WAAU;AAC/D,SAAQC,QAAQ,QAAO,YAAW;AAClC,SAAQC,IAAI,QAAO,aAAY;AAE/B,SAAQC,sBAAsB,QAAO,8BAA6B;AAElE,SAAQC,mBAAmB,QAAO,2BAA0B;AAC5D,SAAQC,mBAAmB,QAAO,mBAAkB;AACpD,SAAQC,qBAAqB,QAAO,mCAAkC;AAEtE,sDAAsD,GACtD,OAAO,MAAMC,yBAAyB;IACpC;IACA;CACD,CAAA;AAED,6CAA6C,GAC7C,MAAMC,mBAAmB;IACvB;IACA;IACA;IACA;IACA;CACD;AA2BD;;;;CAIC,GACD,SAASC,uBAAuBC,SAA8B;IAC5D,MAAMC,QAAoB;QACxBC,cAAc;QACdC,mBAAmB;IACrB;IAEA,eAAeC;QACb,IAAIH,MAAMC,YAAY,EAAE;YACtBD,MAAME,iBAAiB,GAAG;YAC1B;QACF;QAEAF,MAAMC,YAAY,GAAG;QACrBD,MAAME,iBAAiB,GAAG;QAE1B,IAAI;YACF,MAAMH;QACR,SAAU;YACRC,MAAMC,YAAY,GAAG;YAErB,mDAAmD;YACnD,IAAID,MAAME,iBAAiB,EAAE;gBAC3BF,MAAME,iBAAiB,GAAG;gBAC1B,MAAMC;YACR;QACF;IACF;IAEA,OAAO;QAACA;QAAeH;IAAK;AAC9B;AAEA;;;CAGC,GACD,OAAO,eAAeI,0BACpBC,OAAoC;IAEpC,MAAM,EAACC,cAAc,EAAEC,YAAY,EAAEC,MAAM,EAAEC,aAAa,EAAC,GAAGJ;IAE9D,MAAM,EAACK,UAAU,EAAEC,qBAAqB,EAAEC,UAAU,EAAC,GAAGN;IACxD,MAAMO,UAAU7B,QAAQ0B;IAExB,oEAAoE;IACpE,MAAMP,gBAAgB;QACpB,MAAMW,OAAO3B,QACXwB,wBACI,oDACA,wBACJI,KAAK;QACP,MAAMC,sBAAsBC,KAAKC,GAAG;QAEpC,IAAI;YACF,MAAMzB,oBAAoBa;YAE1BQ,KAAKK,OAAO,CACVR,wBACI,CAAC,oBAAoB,EAAEC,WAAW,8BAA8B,CAAC,GACjE,CAAC,oBAAoB,EAAEA,YAAY;YAGzC,MAAMQ,WAAWH,KAAKC,GAAG,KAAKF;YAC9BT,eAAe;gBAACa;gBAAUC,SAAS;YAAI;YAEvC,OAAO;QACT,EAAE,OAAOC,KAAK;YACZ,MAAMF,WAAWH,KAAKC,GAAG,KAAKF;YAC9BT,eAAe;gBAACa;gBAAUC,SAAS;YAAK;YAExC3B,oBAAoB,4BAA4B4B;YAChDR,KAAKS,IAAI,CAAC;YAEV,yCAAyC;YACzC,IAAID,eAAe3B,yBAAyB2B,IAAIE,UAAU,IAAIF,IAAIE,UAAU,CAACC,MAAM,GAAG,GAAG;gBACvFjB,OAAOkB,GAAG,CAAC;gBACXlB,OAAOkB,GAAG,CAAClC,uBAAuB8B,IAAIE,UAAU;YAClD,OAAO,IAAIF,eAAeK,OAAO;gBAC/BnB,OAAOoB,KAAK,CAACN,IAAIO,OAAO,EAAE;oBAACC,MAAM;gBAAC;YACpC;YAEA,OAAO;QACT;IACF;IAEA,yBAAyB;IACzB,MAAM3B;IAEN,MAAM4B,wBAAwB,MAAMxC,KAAKkB,eAAe;QACtDuB,UAAU;QACVC,QAAQpC;IACV;IAEA,oDAAoD;IACpD,MAAM,EAACM,eAAe+B,uBAAuB,EAAC,GAAGpC,uBAAuB;QACtE,MAAMK;IACR;IAEA,gDAAgD;IAChD,MAAMgC,mBAAmB7C,SAAS;QAChC,KAAK4C;IACP,GAAG;IAEH,MAAME,UAAqB/C,cAAc0C,uBAAuB;QAC9DM,KAAKxB;QACLyB,eAAe;IACjB;IAEAF,QAAQG,EAAE,CAAC,OAAO,CAACC,OAAOC;QACxB,MAAMC,YAAY,IAAIzB,OAAO0B,kBAAkB;QAC/C,MAAMC,eAAe3D,WAAWwD,YAAYvD,SAAS2B,SAAS4B,YAAYA;QAC1EjC,OAAOkB,GAAG,CAAC,CAAC,CAAC,EAAEgB,UAAU,EAAE,EAAEF,MAAM,EAAE,EAAEI,cAAc;QACrDT;IACF;IAEAC,QAAQG,EAAE,CAAC,SAAS,CAACjB;QACnBd,OAAOoB,KAAK,CAAC,CAAC,eAAe,EAAEN,eAAeK,QAAQL,IAAIO,OAAO,GAAGgB,OAAOvB,MAAM;IACnF;IAEA,OAAO;QACLwB,OAAO,IAAMV,QAAQU,KAAK;QAC1BV;IACF;AACF"}
|
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
import { isatty } from 'node:tty';
|
|
2
2
|
import { styleText } from 'node:util';
|
|
3
3
|
import { logSymbols } from '@sanity/cli-core/ux';
|
|
4
|
+
import { generateHelpUrl } from '@sanity/generate-help-url';
|
|
4
5
|
const isTty = isatty(1);
|
|
5
6
|
const headers = {
|
|
6
7
|
error: isTty ? styleText([
|
|
@@ -63,7 +64,10 @@ export function formatSchemaValidation(validation) {
|
|
|
63
64
|
const header = `${headers[getAggregatedSeverity(groups)]} ${formattedTopLevelType}`;
|
|
64
65
|
const body = groups.toSorted((a, b)=>severityValues[getAggregatedSeverity(a)] - severityValues[getAggregatedSeverity(b)]).map((group)=>{
|
|
65
66
|
const formattedPath = ` ${styleText('bold', formatPath(group.path) || '(root)')}`;
|
|
66
|
-
const formattedMessages = group.problems.toSorted((a, b)=>severityValues[a.severity] - severityValues[b.severity]).map(({ message, severity })
|
|
67
|
+
const formattedMessages = group.problems.toSorted((a, b)=>severityValues[a.severity] - severityValues[b.severity]).map(({ helpId, message, severity })=>{
|
|
68
|
+
const help = helpId ? `\n See ${generateHelpUrl(helpId)}` : '';
|
|
69
|
+
return ` ${logSymbols[severity]} ${message}${help}`;
|
|
70
|
+
}).join('\n');
|
|
67
71
|
return `${formattedPath}\n${formattedMessages}`;
|
|
68
72
|
}).join('\n');
|
|
69
73
|
return `${header}\n${body}`;
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../../../src/actions/schema/formatSchemaValidation.ts"],"sourcesContent":["import {isatty} from 'node:tty'\nimport {styleText} from 'node:util'\n\nimport {logSymbols} from '@sanity/cli-core/ux'\nimport {type SchemaValidationProblemGroup, type SchemaValidationProblemPath} from '@sanity/types'\n\nconst isTty = isatty(1)\n\nconst headers = {\n error: isTty ? styleText(['bold', 'bgRed', 'black'], ' ERROR ') : styleText('red', '[ERROR]'),\n warning: isTty\n ? styleText(['bold', 'bgYellow', 'black'], ' WARN ')\n : styleText('yellow', '[WARN]'),\n}\n\nconst severityValues = {error: 0, warning: 1}\n\nfunction formatPath(pathSegments: SchemaValidationProblemPath) {\n const format = (\n [curr, ...next]: SchemaValidationProblemPath,\n mode: 'array' | 'object' = 'object',\n ): string => {\n if (!curr) return ''\n if (curr.kind === 'property') return format(next, curr.name === 'of' ? 'array' : 'object')\n\n const name = curr.name || `<anonymous_${curr.type}>`\n return `${mode === 'array' ? `[${name}]` : `.${name}`}${format(next)}`\n }\n\n return format(pathSegments.slice(1)).slice(1) // removes the top-level type and leading `.`\n}\n\nexport function getAggregatedSeverity(\n groupOrGroups: SchemaValidationProblemGroup | SchemaValidationProblemGroup[],\n): 'error' | 'warning' {\n const groups = Array.isArray(groupOrGroups) ? groupOrGroups : [groupOrGroups]\n return groups\n .flatMap((group) => group.problems.map((problem) => problem.severity))\n .includes('error')\n ? 'error'\n : 'warning'\n}\n\nexport function formatSchemaValidation(validation: SchemaValidationProblemGroup[]): string {\n let unnamedTopLevelTypeCount = 0\n const validationByTypeMap: Record<string, SchemaValidationProblemGroup[]> = {}\n\n for (const group of validation) {\n const [firstSegment] = group.path\n if (!firstSegment) continue\n if (firstSegment.kind !== 'type') continue\n\n const topLevelType =\n firstSegment.name || `<unnamed_${firstSegment.type}_type_${unnamedTopLevelTypeCount++}>`\n\n if (!validationByTypeMap[topLevelType]) {\n validationByTypeMap[topLevelType] = []\n }\n\n validationByTypeMap[topLevelType].push(group)\n }\n\n const validationByType = Object.entries(validationByTypeMap)\n\n const formatted = validationByType\n .toSorted((a, b) => {\n const [aType, aGroups] = a\n const [bType, bGroups] = b\n const aValue = severityValues[getAggregatedSeverity(aGroups)]\n const bValue = severityValues[getAggregatedSeverity(bGroups)]\n if (aValue === bValue) return aType.localeCompare(bType, 'en-US')\n return aValue - bValue\n })\n .map(([topLevelType, groups]) => {\n const formattedTopLevelType = isTty\n ? styleText(['bgWhite', 'black'], ` ${topLevelType} `)\n : `[${topLevelType}]`\n\n const header = `${headers[getAggregatedSeverity(groups)]} ${formattedTopLevelType}`\n const body = groups\n .toSorted(\n (a, b) =>\n severityValues[getAggregatedSeverity(a)] - severityValues[getAggregatedSeverity(b)],\n )\n .map((group) => {\n const formattedPath = ` ${styleText('bold', formatPath(group.path) || '(root)')}`\n const formattedMessages = group.problems\n .toSorted((a, b) => severityValues[a.severity] - severityValues[b.severity])\n .map(({message, severity}) => ` ${logSymbols[severity]} ${message}
|
|
1
|
+
{"version":3,"sources":["../../../src/actions/schema/formatSchemaValidation.ts"],"sourcesContent":["import {isatty} from 'node:tty'\nimport {styleText} from 'node:util'\n\nimport {logSymbols} from '@sanity/cli-core/ux'\nimport {generateHelpUrl} from '@sanity/generate-help-url'\nimport {type SchemaValidationProblemGroup, type SchemaValidationProblemPath} from '@sanity/types'\n\nconst isTty = isatty(1)\n\nconst headers = {\n error: isTty ? styleText(['bold', 'bgRed', 'black'], ' ERROR ') : styleText('red', '[ERROR]'),\n warning: isTty\n ? styleText(['bold', 'bgYellow', 'black'], ' WARN ')\n : styleText('yellow', '[WARN]'),\n}\n\nconst severityValues = {error: 0, warning: 1}\n\nfunction formatPath(pathSegments: SchemaValidationProblemPath) {\n const format = (\n [curr, ...next]: SchemaValidationProblemPath,\n mode: 'array' | 'object' = 'object',\n ): string => {\n if (!curr) return ''\n if (curr.kind === 'property') return format(next, curr.name === 'of' ? 'array' : 'object')\n\n const name = curr.name || `<anonymous_${curr.type}>`\n return `${mode === 'array' ? `[${name}]` : `.${name}`}${format(next)}`\n }\n\n return format(pathSegments.slice(1)).slice(1) // removes the top-level type and leading `.`\n}\n\nexport function getAggregatedSeverity(\n groupOrGroups: SchemaValidationProblemGroup | SchemaValidationProblemGroup[],\n): 'error' | 'warning' {\n const groups = Array.isArray(groupOrGroups) ? groupOrGroups : [groupOrGroups]\n return groups\n .flatMap((group) => group.problems.map((problem) => problem.severity))\n .includes('error')\n ? 'error'\n : 'warning'\n}\n\nexport function formatSchemaValidation(validation: SchemaValidationProblemGroup[]): string {\n let unnamedTopLevelTypeCount = 0\n const validationByTypeMap: Record<string, SchemaValidationProblemGroup[]> = {}\n\n for (const group of validation) {\n const [firstSegment] = group.path\n if (!firstSegment) continue\n if (firstSegment.kind !== 'type') continue\n\n const topLevelType =\n firstSegment.name || `<unnamed_${firstSegment.type}_type_${unnamedTopLevelTypeCount++}>`\n\n if (!validationByTypeMap[topLevelType]) {\n validationByTypeMap[topLevelType] = []\n }\n\n validationByTypeMap[topLevelType].push(group)\n }\n\n const validationByType = Object.entries(validationByTypeMap)\n\n const formatted = validationByType\n .toSorted((a, b) => {\n const [aType, aGroups] = a\n const [bType, bGroups] = b\n const aValue = severityValues[getAggregatedSeverity(aGroups)]\n const bValue = severityValues[getAggregatedSeverity(bGroups)]\n if (aValue === bValue) return aType.localeCompare(bType, 'en-US')\n return aValue - bValue\n })\n .map(([topLevelType, groups]) => {\n const formattedTopLevelType = isTty\n ? styleText(['bgWhite', 'black'], ` ${topLevelType} `)\n : `[${topLevelType}]`\n\n const header = `${headers[getAggregatedSeverity(groups)]} ${formattedTopLevelType}`\n const body = groups\n .toSorted(\n (a, b) =>\n severityValues[getAggregatedSeverity(a)] - severityValues[getAggregatedSeverity(b)],\n )\n .map((group) => {\n const formattedPath = ` ${styleText('bold', formatPath(group.path) || '(root)')}`\n const formattedMessages = group.problems\n .toSorted((a, b) => severityValues[a.severity] - severityValues[b.severity])\n .map(({helpId, message, severity}) => {\n const help = helpId ? `\\n See ${generateHelpUrl(helpId)}` : ''\n return ` ${logSymbols[severity]} ${message}${help}`\n })\n .join('\\n')\n\n return `${formattedPath}\\n${formattedMessages}`\n })\n .join('\\n')\n\n return `${header}\\n${body}`\n })\n .join('\\n\\n')\n\n return formatted\n}\n"],"names":["isatty","styleText","logSymbols","generateHelpUrl","isTty","headers","error","warning","severityValues","formatPath","pathSegments","format","curr","next","mode","kind","name","type","slice","getAggregatedSeverity","groupOrGroups","groups","Array","isArray","flatMap","group","problems","map","problem","severity","includes","formatSchemaValidation","validation","unnamedTopLevelTypeCount","validationByTypeMap","firstSegment","path","topLevelType","push","validationByType","Object","entries","formatted","toSorted","a","b","aType","aGroups","bType","bGroups","aValue","bValue","localeCompare","formattedTopLevelType","header","body","formattedPath","formattedMessages","helpId","message","help","join"],"mappings":"AAAA,SAAQA,MAAM,QAAO,WAAU;AAC/B,SAAQC,SAAS,QAAO,YAAW;AAEnC,SAAQC,UAAU,QAAO,sBAAqB;AAC9C,SAAQC,eAAe,QAAO,4BAA2B;AAGzD,MAAMC,QAAQJ,OAAO;AAErB,MAAMK,UAAU;IACdC,OAAOF,QAAQH,UAAU;QAAC;QAAQ;QAAS;KAAQ,EAAE,aAAaA,UAAU,OAAO;IACnFM,SAASH,QACLH,UAAU;QAAC;QAAQ;QAAY;KAAQ,EAAE,YACzCA,UAAU,UAAU;AAC1B;AAEA,MAAMO,iBAAiB;IAACF,OAAO;IAAGC,SAAS;AAAC;AAE5C,SAASE,WAAWC,YAAyC;IAC3D,MAAMC,SAAS,CACb,CAACC,MAAM,GAAGC,KAAkC,EAC5CC,OAA2B,QAAQ;QAEnC,IAAI,CAACF,MAAM,OAAO;QAClB,IAAIA,KAAKG,IAAI,KAAK,YAAY,OAAOJ,OAAOE,MAAMD,KAAKI,IAAI,KAAK,OAAO,UAAU;QAEjF,MAAMA,OAAOJ,KAAKI,IAAI,IAAI,CAAC,WAAW,EAAEJ,KAAKK,IAAI,CAAC,CAAC,CAAC;QACpD,OAAO,GAAGH,SAAS,UAAU,CAAC,CAAC,EAAEE,KAAK,CAAC,CAAC,GAAG,CAAC,CAAC,EAAEA,MAAM,GAAGL,OAAOE,OAAO;IACxE;IAEA,OAAOF,OAAOD,aAAaQ,KAAK,CAAC,IAAIA,KAAK,CAAC,GAAG,6CAA6C;;AAC7F;AAEA,OAAO,SAASC,sBACdC,aAA4E;IAE5E,MAAMC,SAASC,MAAMC,OAAO,CAACH,iBAAiBA,gBAAgB;QAACA;KAAc;IAC7E,OAAOC,OACJG,OAAO,CAAC,CAACC,QAAUA,MAAMC,QAAQ,CAACC,GAAG,CAAC,CAACC,UAAYA,QAAQC,QAAQ,GACnEC,QAAQ,CAAC,WACR,UACA;AACN;AAEA,OAAO,SAASC,uBAAuBC,UAA0C;IAC/E,IAAIC,2BAA2B;IAC/B,MAAMC,sBAAsE,CAAC;IAE7E,KAAK,MAAMT,SAASO,WAAY;QAC9B,MAAM,CAACG,aAAa,GAAGV,MAAMW,IAAI;QACjC,IAAI,CAACD,cAAc;QACnB,IAAIA,aAAapB,IAAI,KAAK,QAAQ;QAElC,MAAMsB,eACJF,aAAanB,IAAI,IAAI,CAAC,SAAS,EAAEmB,aAAalB,IAAI,CAAC,MAAM,EAAEgB,2BAA2B,CAAC,CAAC;QAE1F,IAAI,CAACC,mBAAmB,CAACG,aAAa,EAAE;YACtCH,mBAAmB,CAACG,aAAa,GAAG,EAAE;QACxC;QAEAH,mBAAmB,CAACG,aAAa,CAACC,IAAI,CAACb;IACzC;IAEA,MAAMc,mBAAmBC,OAAOC,OAAO,CAACP;IAExC,MAAMQ,YAAYH,iBACfI,QAAQ,CAAC,CAACC,GAAGC;QACZ,MAAM,CAACC,OAAOC,QAAQ,GAAGH;QACzB,MAAM,CAACI,OAAOC,QAAQ,GAAGJ;QACzB,MAAMK,SAAS1C,cAAc,CAACW,sBAAsB4B,SAAS;QAC7D,MAAMI,SAAS3C,cAAc,CAACW,sBAAsB8B,SAAS;QAC7D,IAAIC,WAAWC,QAAQ,OAAOL,MAAMM,aAAa,CAACJ,OAAO;QACzD,OAAOE,SAASC;IAClB,GACCxB,GAAG,CAAC,CAAC,CAACU,cAAchB,OAAO;QAC1B,MAAMgC,wBAAwBjD,QAC1BH,UAAU;YAAC;YAAW;SAAQ,EAAE,CAAC,CAAC,EAAEoC,aAAa,CAAC,CAAC,IACnD,CAAC,CAAC,EAAEA,aAAa,CAAC,CAAC;QAEvB,MAAMiB,SAAS,GAAGjD,OAAO,CAACc,sBAAsBE,QAAQ,CAAC,CAAC,EAAEgC,uBAAuB;QACnF,MAAME,OAAOlC,OACVsB,QAAQ,CACP,CAACC,GAAGC,IACFrC,cAAc,CAACW,sBAAsByB,GAAG,GAAGpC,cAAc,CAACW,sBAAsB0B,GAAG,EAEtFlB,GAAG,CAAC,CAACF;YACJ,MAAM+B,gBAAgB,CAAC,EAAE,EAAEvD,UAAU,QAAQQ,WAAWgB,MAAMW,IAAI,KAAK,WAAW;YAClF,MAAMqB,oBAAoBhC,MAAMC,QAAQ,CACrCiB,QAAQ,CAAC,CAACC,GAAGC,IAAMrC,cAAc,CAACoC,EAAEf,QAAQ,CAAC,GAAGrB,cAAc,CAACqC,EAAEhB,QAAQ,CAAC,EAC1EF,GAAG,CAAC,CAAC,EAAC+B,MAAM,EAAEC,OAAO,EAAE9B,QAAQ,EAAC;gBAC/B,MAAM+B,OAAOF,SAAS,CAAC,YAAY,EAAEvD,gBAAgBuD,SAAS,GAAG;gBACjE,OAAO,CAAC,IAAI,EAAExD,UAAU,CAAC2B,SAAS,CAAC,CAAC,EAAE8B,UAAUC,MAAM;YACxD,GACCC,IAAI,CAAC;YAER,OAAO,GAAGL,cAAc,EAAE,EAAEC,mBAAmB;QACjD,GACCI,IAAI,CAAC;QAER,OAAO,GAAGP,OAAO,EAAE,EAAEC,MAAM;IAC7B,GACCM,IAAI,CAAC;IAER,OAAOnB;AACT"}
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
import { join, resolve } from 'node:path';
|
|
2
|
+
export function getExtractOptions({ flags, projectRoot, schemaExtraction }) {
|
|
3
|
+
const pathFlag = flags.path ?? schemaExtraction?.path;
|
|
4
|
+
const outputDir = pathFlag ? resolve(join(projectRoot.directory, pathFlag)) : projectRoot.directory;
|
|
5
|
+
const outputPath = join(outputDir, 'schema.json');
|
|
6
|
+
return {
|
|
7
|
+
configPath: projectRoot.path,
|
|
8
|
+
enforceRequiredFields: flags['enforce-required-fields'] ?? schemaExtraction?.enforceRequiredFields ?? false,
|
|
9
|
+
format: flags.format ?? 'groq-type-nodes',
|
|
10
|
+
outputPath,
|
|
11
|
+
watchPatterns: flags['watch-patterns'] ?? schemaExtraction?.watchPatterns ?? [],
|
|
12
|
+
workspace: flags.workspace ?? schemaExtraction?.workspace
|
|
13
|
+
};
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
//# sourceMappingURL=getExtractOptions.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../../src/actions/schema/getExtractOptions.ts"],"sourcesContent":["import {join, resolve} from 'node:path'\n\nimport {type CliConfig, ProjectRootResult} from '@sanity/cli-core'\n\nimport {type ExtractSchemaCommand} from '../../commands/schema/extract.js'\n\nexport interface ExtractOptions {\n configPath: string\n enforceRequiredFields: boolean\n format: string\n outputPath: string\n watchPatterns: string[]\n workspace: string | undefined\n}\n\ninterface GetExtractionOptions {\n flags: ExtractSchemaCommand['flags']\n projectRoot: ProjectRootResult\n schemaExtraction: CliConfig['schemaExtraction']\n}\n\nexport function getExtractOptions({\n flags,\n projectRoot,\n schemaExtraction,\n}: GetExtractionOptions): ExtractOptions {\n const pathFlag = flags.path ?? schemaExtraction?.path\n const outputDir = pathFlag\n ? resolve(join(projectRoot.directory, pathFlag))\n : projectRoot.directory\n const outputPath = join(outputDir, 'schema.json')\n\n return {\n configPath: projectRoot.path,\n enforceRequiredFields:\n flags['enforce-required-fields'] ?? schemaExtraction?.enforceRequiredFields ?? false,\n format: flags.format ?? 'groq-type-nodes',\n outputPath,\n watchPatterns: flags['watch-patterns'] ?? schemaExtraction?.watchPatterns ?? [],\n workspace: flags.workspace ?? schemaExtraction?.workspace,\n }\n}\n"],"names":["join","resolve","getExtractOptions","flags","projectRoot","schemaExtraction","pathFlag","path","outputDir","directory","outputPath","configPath","enforceRequiredFields","format","watchPatterns","workspace"],"mappings":"AAAA,SAAQA,IAAI,EAAEC,OAAO,QAAO,YAAW;AAqBvC,OAAO,SAASC,kBAAkB,EAChCC,KAAK,EACLC,WAAW,EACXC,gBAAgB,EACK;IACrB,MAAMC,WAAWH,MAAMI,IAAI,IAAIF,kBAAkBE;IACjD,MAAMC,YAAYF,WACdL,QAAQD,KAAKI,YAAYK,SAAS,EAAEH,aACpCF,YAAYK,SAAS;IACzB,MAAMC,aAAaV,KAAKQ,WAAW;IAEnC,OAAO;QACLG,YAAYP,YAAYG,IAAI;QAC5BK,uBACET,KAAK,CAAC,0BAA0B,IAAIE,kBAAkBO,yBAAyB;QACjFC,QAAQV,MAAMU,MAAM,IAAI;QACxBH;QACAI,eAAeX,KAAK,CAAC,iBAAiB,IAAIE,kBAAkBS,iBAAiB,EAAE;QAC/EC,WAAWZ,MAAMY,SAAS,IAAIV,kBAAkBU;IAClD;AACF"}
|
|
@@ -0,0 +1,39 @@
|
|
|
1
|
+
import { mkdir, writeFile } from 'node:fs/promises';
|
|
2
|
+
import { dirname } from 'node:path';
|
|
3
|
+
import { studioWorkerTask } from '@sanity/cli-core';
|
|
4
|
+
import { SchemaExtractionError } from './utils/SchemaExtractionError.js';
|
|
5
|
+
/**
|
|
6
|
+
* Core schema extraction logic.
|
|
7
|
+
* Performs the extraction via worker and writes to file.
|
|
8
|
+
* Throws SchemaExtractionError on failure.
|
|
9
|
+
*/ export async function runSchemaExtraction(extractOptions) {
|
|
10
|
+
const { configPath, enforceRequiredFields, format, outputPath, workspace } = extractOptions;
|
|
11
|
+
if (format !== 'groq-type-nodes') {
|
|
12
|
+
throw new Error(`Unsupported format: "${format}"`);
|
|
13
|
+
}
|
|
14
|
+
const workDir = dirname(configPath);
|
|
15
|
+
const outputDir = dirname(outputPath);
|
|
16
|
+
const result = await studioWorkerTask(new URL('extractSanitySchema.worker.js', import.meta.url), {
|
|
17
|
+
name: 'extractSanitySchema',
|
|
18
|
+
studioRootPath: workDir,
|
|
19
|
+
workerData: {
|
|
20
|
+
configPath,
|
|
21
|
+
enforceRequiredFields,
|
|
22
|
+
workDir,
|
|
23
|
+
workspaceName: workspace
|
|
24
|
+
}
|
|
25
|
+
});
|
|
26
|
+
if (result.type === 'error') {
|
|
27
|
+
throw new SchemaExtractionError(result.error, result.validation);
|
|
28
|
+
}
|
|
29
|
+
const schema = result.schema;
|
|
30
|
+
// Ensure output directory exists
|
|
31
|
+
await mkdir(outputDir, {
|
|
32
|
+
recursive: true
|
|
33
|
+
});
|
|
34
|
+
// Write schema to file
|
|
35
|
+
await writeFile(outputPath, `${JSON.stringify(schema, null, 2)}\n`);
|
|
36
|
+
return schema;
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
//# sourceMappingURL=runSchemaExtraction.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../../src/actions/schema/runSchemaExtraction.ts"],"sourcesContent":["import {mkdir, writeFile} from 'node:fs/promises'\nimport {dirname} from 'node:path'\n\nimport {studioWorkerTask} from '@sanity/cli-core'\nimport {type extractSchema as extractSchemaInternal} from '@sanity/schema/_internal'\n\nimport {type ExtractOptions} from './getExtractOptions.js'\nimport {type ExtractSchemaWorkerData, type ExtractSchemaWorkerError} from './types.js'\nimport {SchemaExtractionError} from './utils/SchemaExtractionError.js'\n\ninterface ExtractSchemaWorkerResult {\n schema: ReturnType<typeof extractSchemaInternal>\n type: 'success'\n}\n\ntype ExtractSchemaWorkerMessage = ExtractSchemaWorkerError | ExtractSchemaWorkerResult\n\n/**\n * Core schema extraction logic.\n * Performs the extraction via worker and writes to file.\n * Throws SchemaExtractionError on failure.\n */\nexport async function runSchemaExtraction(\n extractOptions: ExtractOptions,\n): Promise<ReturnType<typeof extractSchemaInternal>> {\n const {configPath, enforceRequiredFields, format, outputPath, workspace} = extractOptions\n\n if (format !== 'groq-type-nodes') {\n throw new Error(`Unsupported format: \"${format}\"`)\n }\n\n const workDir = dirname(configPath)\n const outputDir = dirname(outputPath)\n\n const result = await studioWorkerTask<ExtractSchemaWorkerMessage>(\n new URL('extractSanitySchema.worker.js', import.meta.url),\n {\n name: 'extractSanitySchema',\n studioRootPath: workDir,\n workerData: {\n configPath,\n enforceRequiredFields,\n workDir,\n workspaceName: workspace,\n } satisfies ExtractSchemaWorkerData,\n },\n )\n\n if (result.type === 'error') {\n throw new SchemaExtractionError(result.error, result.validation)\n }\n\n const schema = result.schema\n\n // Ensure output directory exists\n await mkdir(outputDir, {recursive: true})\n\n // Write schema to file\n await writeFile(outputPath, `${JSON.stringify(schema, null, 2)}\\n`)\n\n return schema\n}\n"],"names":["mkdir","writeFile","dirname","studioWorkerTask","SchemaExtractionError","runSchemaExtraction","extractOptions","configPath","enforceRequiredFields","format","outputPath","workspace","Error","workDir","outputDir","result","URL","url","name","studioRootPath","workerData","workspaceName","type","error","validation","schema","recursive","JSON","stringify"],"mappings":"AAAA,SAAQA,KAAK,EAAEC,SAAS,QAAO,mBAAkB;AACjD,SAAQC,OAAO,QAAO,YAAW;AAEjC,SAAQC,gBAAgB,QAAO,mBAAkB;AAKjD,SAAQC,qBAAqB,QAAO,mCAAkC;AAStE;;;;CAIC,GACD,OAAO,eAAeC,oBACpBC,cAA8B;IAE9B,MAAM,EAACC,UAAU,EAAEC,qBAAqB,EAAEC,MAAM,EAAEC,UAAU,EAAEC,SAAS,EAAC,GAAGL;IAE3E,IAAIG,WAAW,mBAAmB;QAChC,MAAM,IAAIG,MAAM,CAAC,qBAAqB,EAAEH,OAAO,CAAC,CAAC;IACnD;IAEA,MAAMI,UAAUX,QAAQK;IACxB,MAAMO,YAAYZ,QAAQQ;IAE1B,MAAMK,SAAS,MAAMZ,iBACnB,IAAIa,IAAI,iCAAiC,YAAYC,GAAG,GACxD;QACEC,MAAM;QACNC,gBAAgBN;QAChBO,YAAY;YACVb;YACAC;YACAK;YACAQ,eAAeV;QACjB;IACF;IAGF,IAAII,OAAOO,IAAI,KAAK,SAAS;QAC3B,MAAM,IAAIlB,sBAAsBW,OAAOQ,KAAK,EAAER,OAAOS,UAAU;IACjE;IAEA,MAAMC,SAASV,OAAOU,MAAM;IAE5B,iCAAiC;IACjC,MAAMzB,MAAMc,WAAW;QAACY,WAAW;IAAI;IAEvC,uBAAuB;IACvB,MAAMzB,UAAUS,YAAY,GAAGiB,KAAKC,SAAS,CAACH,QAAQ,MAAM,GAAG,EAAE,CAAC;IAElE,OAAOA;AACT"}
|
|
@@ -0,0 +1,71 @@
|
|
|
1
|
+
import { getCliTelemetry } from '@sanity/cli-core';
|
|
2
|
+
import { mean, once } from 'lodash-es';
|
|
3
|
+
import { SchemaExtractionWatchModeTrace } from '../../telemetry/extractSchema.telemetry.js';
|
|
4
|
+
import { DEFAULT_WATCH_PATTERNS, startExtractSchemaWatcher } from './extractSchemaWatcher.js';
|
|
5
|
+
export async function watchExtractSchema(options) {
|
|
6
|
+
const { extractOptions, output } = options;
|
|
7
|
+
// Keep the start time + some simple stats for extractions as they happen
|
|
8
|
+
const startTime = Date.now();
|
|
9
|
+
const stats = {
|
|
10
|
+
failedCount: 0,
|
|
11
|
+
successfulDurations: []
|
|
12
|
+
};
|
|
13
|
+
const watchPatterns = [
|
|
14
|
+
...DEFAULT_WATCH_PATTERNS,
|
|
15
|
+
...extractOptions.watchPatterns
|
|
16
|
+
];
|
|
17
|
+
const trace = getCliTelemetry().trace(SchemaExtractionWatchModeTrace);
|
|
18
|
+
trace.start();
|
|
19
|
+
// Print watch mode header and patterns at the very beginning
|
|
20
|
+
output.log('Schema extraction watch mode');
|
|
21
|
+
output.log('');
|
|
22
|
+
output.log('Watching for changes in:');
|
|
23
|
+
for (const pattern of watchPatterns){
|
|
24
|
+
output.log(` - ${pattern}`);
|
|
25
|
+
}
|
|
26
|
+
output.log('');
|
|
27
|
+
output.log('Running initial extraction...');
|
|
28
|
+
// Start the watcher (includes initial extraction)
|
|
29
|
+
const { close } = await startExtractSchemaWatcher({
|
|
30
|
+
extractOptions,
|
|
31
|
+
onExtraction: ({ duration, success })=>{
|
|
32
|
+
if (success) {
|
|
33
|
+
stats.successfulDurations.push(duration);
|
|
34
|
+
} else {
|
|
35
|
+
stats.failedCount++;
|
|
36
|
+
}
|
|
37
|
+
},
|
|
38
|
+
output,
|
|
39
|
+
watchPatterns
|
|
40
|
+
});
|
|
41
|
+
trace.log({
|
|
42
|
+
enforceRequiredFields: extractOptions.enforceRequiredFields,
|
|
43
|
+
schemaFormat: extractOptions.format,
|
|
44
|
+
step: 'started'
|
|
45
|
+
});
|
|
46
|
+
output.log('');
|
|
47
|
+
output.log('Watching for changes... (Ctrl+C to stop)');
|
|
48
|
+
/**
|
|
49
|
+
* Cleanup function that logs telemetry and stops the watcher.
|
|
50
|
+
* Wrapped in once() to prevent multiple calls.
|
|
51
|
+
*/ const cleanup = once(async ()=>{
|
|
52
|
+
trace.log({
|
|
53
|
+
averageExtractionDuration: mean(stats.successfulDurations) || 0,
|
|
54
|
+
extractionFailedCount: stats.failedCount,
|
|
55
|
+
extractionSuccessfulCount: stats.successfulDurations.length,
|
|
56
|
+
step: 'stopped',
|
|
57
|
+
watcherDuration: Date.now() - startTime
|
|
58
|
+
});
|
|
59
|
+
trace.complete();
|
|
60
|
+
output.log('');
|
|
61
|
+
output.log('Stopping watch mode...');
|
|
62
|
+
await close();
|
|
63
|
+
});
|
|
64
|
+
// Return cleanup function for programmatic usage and testing
|
|
65
|
+
// The CLI framework will handle SIGINT/SIGTERM
|
|
66
|
+
return {
|
|
67
|
+
close: cleanup
|
|
68
|
+
};
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
//# sourceMappingURL=watchExtractSchema.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../../src/actions/schema/watchExtractSchema.ts"],"sourcesContent":["import {getCliTelemetry, type Output} from '@sanity/cli-core'\nimport {mean, once} from 'lodash-es'\n\nimport {SchemaExtractionWatchModeTrace} from '../../telemetry/extractSchema.telemetry.js'\nimport {DEFAULT_WATCH_PATTERNS, startExtractSchemaWatcher} from './extractSchemaWatcher.js'\nimport {type ExtractOptions} from './getExtractOptions.js'\n\ninterface WatchExtractSchemaOptions {\n extractOptions: ExtractOptions\n output: Output\n}\n\nexport async function watchExtractSchema(\n options: WatchExtractSchemaOptions,\n): Promise<{close: () => Promise<void>}> {\n const {extractOptions, output} = options\n\n // Keep the start time + some simple stats for extractions as they happen\n const startTime = Date.now()\n const stats: {failedCount: number; successfulDurations: number[]} = {\n failedCount: 0,\n successfulDurations: [],\n }\n\n const watchPatterns = [...DEFAULT_WATCH_PATTERNS, ...extractOptions.watchPatterns]\n\n const trace = getCliTelemetry().trace(SchemaExtractionWatchModeTrace)\n trace.start()\n\n // Print watch mode header and patterns at the very beginning\n output.log('Schema extraction watch mode')\n output.log('')\n output.log('Watching for changes in:')\n for (const pattern of watchPatterns) {\n output.log(` - ${pattern}`)\n }\n output.log('')\n\n output.log('Running initial extraction...')\n\n // Start the watcher (includes initial extraction)\n const {close} = await startExtractSchemaWatcher({\n extractOptions,\n onExtraction: ({duration, success}) => {\n if (success) {\n stats.successfulDurations.push(duration)\n } else {\n stats.failedCount++\n }\n },\n output,\n watchPatterns,\n })\n\n trace.log({\n enforceRequiredFields: extractOptions.enforceRequiredFields,\n schemaFormat: extractOptions.format,\n step: 'started',\n })\n\n output.log('')\n output.log('Watching for changes... (Ctrl+C to stop)')\n\n /**\n * Cleanup function that logs telemetry and stops the watcher.\n * Wrapped in once() to prevent multiple calls.\n */\n const cleanup = once(async () => {\n trace.log({\n averageExtractionDuration: mean(stats.successfulDurations) || 0,\n extractionFailedCount: stats.failedCount,\n extractionSuccessfulCount: stats.successfulDurations.length,\n step: 'stopped',\n watcherDuration: Date.now() - startTime,\n })\n trace.complete()\n\n output.log('')\n output.log('Stopping watch mode...')\n await close()\n })\n\n // Return cleanup function for programmatic usage and testing\n // The CLI framework will handle SIGINT/SIGTERM\n return {close: cleanup}\n}\n"],"names":["getCliTelemetry","mean","once","SchemaExtractionWatchModeTrace","DEFAULT_WATCH_PATTERNS","startExtractSchemaWatcher","watchExtractSchema","options","extractOptions","output","startTime","Date","now","stats","failedCount","successfulDurations","watchPatterns","trace","start","log","pattern","close","onExtraction","duration","success","push","enforceRequiredFields","schemaFormat","format","step","cleanup","averageExtractionDuration","extractionFailedCount","extractionSuccessfulCount","length","watcherDuration","complete"],"mappings":"AAAA,SAAQA,eAAe,QAAoB,mBAAkB;AAC7D,SAAQC,IAAI,EAAEC,IAAI,QAAO,YAAW;AAEpC,SAAQC,8BAA8B,QAAO,6CAA4C;AACzF,SAAQC,sBAAsB,EAAEC,yBAAyB,QAAO,4BAA2B;AAQ3F,OAAO,eAAeC,mBACpBC,OAAkC;IAElC,MAAM,EAACC,cAAc,EAAEC,MAAM,EAAC,GAAGF;IAEjC,yEAAyE;IACzE,MAAMG,YAAYC,KAAKC,GAAG;IAC1B,MAAMC,QAA8D;QAClEC,aAAa;QACbC,qBAAqB,EAAE;IACzB;IAEA,MAAMC,gBAAgB;WAAIZ;WAA2BI,eAAeQ,aAAa;KAAC;IAElF,MAAMC,QAAQjB,kBAAkBiB,KAAK,CAACd;IACtCc,MAAMC,KAAK;IAEX,6DAA6D;IAC7DT,OAAOU,GAAG,CAAC;IACXV,OAAOU,GAAG,CAAC;IACXV,OAAOU,GAAG,CAAC;IACX,KAAK,MAAMC,WAAWJ,cAAe;QACnCP,OAAOU,GAAG,CAAC,CAAC,IAAI,EAAEC,SAAS;IAC7B;IACAX,OAAOU,GAAG,CAAC;IAEXV,OAAOU,GAAG,CAAC;IAEX,kDAAkD;IAClD,MAAM,EAACE,KAAK,EAAC,GAAG,MAAMhB,0BAA0B;QAC9CG;QACAc,cAAc,CAAC,EAACC,QAAQ,EAAEC,OAAO,EAAC;YAChC,IAAIA,SAAS;gBACXX,MAAME,mBAAmB,CAACU,IAAI,CAACF;YACjC,OAAO;gBACLV,MAAMC,WAAW;YACnB;QACF;QACAL;QACAO;IACF;IAEAC,MAAME,GAAG,CAAC;QACRO,uBAAuBlB,eAAekB,qBAAqB;QAC3DC,cAAcnB,eAAeoB,MAAM;QACnCC,MAAM;IACR;IAEApB,OAAOU,GAAG,CAAC;IACXV,OAAOU,GAAG,CAAC;IAEX;;;GAGC,GACD,MAAMW,UAAU5B,KAAK;QACnBe,MAAME,GAAG,CAAC;YACRY,2BAA2B9B,KAAKY,MAAME,mBAAmB,KAAK;YAC9DiB,uBAAuBnB,MAAMC,WAAW;YACxCmB,2BAA2BpB,MAAME,mBAAmB,CAACmB,MAAM;YAC3DL,MAAM;YACNM,iBAAiBxB,KAAKC,GAAG,KAAKF;QAChC;QACAO,MAAMmB,QAAQ;QAEd3B,OAAOU,GAAG,CAAC;QACXV,OAAOU,GAAG,CAAC;QACX,MAAME;IACR;IAEA,6DAA6D;IAC7D,+CAA+C;IAC/C,OAAO;QAACA,OAAOS;IAAO;AACxB"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../../../src/actions/versions/tryFindLatestVersion.ts"],"sourcesContent":["import getLatestVersion from 'get-latest-version'\n\nimport {versionsDebug} from './versionsDebug.js'\n\n/**\n * Try to find the latest version of a package.\n *\n * @param pkgName - The name of the package to find the latest version of.\n * @returns The latest version of the package.\n * @internal\n */\nexport async function tryFindLatestVersion(pkgName: string): Promise<string | undefined> {\n try {\n const latest = await getLatestVersion(pkgName)\n return latest\n } catch (err) {\n versionsDebug(`Cannot find version for ${pkgName}`, err)\n throw new Error(`Cannot find version for ${pkgName}`, {cause: err})\n }\n}\n"],"names":["getLatestVersion","versionsDebug","tryFindLatestVersion","pkgName","latest","err","Error","cause"],"mappings":"AAAA,
|
|
1
|
+
{"version":3,"sources":["../../../src/actions/versions/tryFindLatestVersion.ts"],"sourcesContent":["import {getLatestVersion} from 'get-latest-version'\n\nimport {versionsDebug} from './versionsDebug.js'\n\n/**\n * Try to find the latest version of a package.\n *\n * @param pkgName - The name of the package to find the latest version of.\n * @returns The latest version of the package.\n * @internal\n */\nexport async function tryFindLatestVersion(pkgName: string): Promise<string | undefined> {\n try {\n const latest = await getLatestVersion(pkgName)\n return latest\n } catch (err) {\n versionsDebug(`Cannot find version for ${pkgName}`, err)\n throw new Error(`Cannot find version for ${pkgName}`, {cause: err})\n }\n}\n"],"names":["getLatestVersion","versionsDebug","tryFindLatestVersion","pkgName","latest","err","Error","cause"],"mappings":"AAAA,SAAQA,gBAAgB,QAAO,qBAAoB;AAEnD,SAAQC,aAAa,QAAO,qBAAoB;AAEhD;;;;;;CAMC,GACD,OAAO,eAAeC,qBAAqBC,OAAe;IACxD,IAAI;QACF,MAAMC,SAAS,MAAMJ,iBAAiBG;QACtC,OAAOC;IACT,EAAE,OAAOC,KAAK;QACZJ,cAAc,CAAC,wBAAwB,EAAEE,SAAS,EAAEE;QACpD,MAAM,IAAIC,MAAM,CAAC,wBAAwB,EAAEH,SAAS,EAAE;YAACI,OAAOF;QAAG;IACnE;AACF"}
|
|
@@ -2,7 +2,7 @@ import { SanityCommand } from '@sanity/cli-core';
|
|
|
2
2
|
import { setupMCP } from '../../actions/mcp/setupMCP.js';
|
|
3
3
|
import { MCPConfigureTrace } from '../../telemetry/mcp.telemetry.js';
|
|
4
4
|
export class ConfigureMcpCommand extends SanityCommand {
|
|
5
|
-
static description = 'Configure Sanity MCP server for AI editors (Cursor,
|
|
5
|
+
static description = 'Configure Sanity MCP server for AI editors (Claude Code, Codex CLI, Cursor, Gemini CLI, GitHub Copilot CLI, VS Code)';
|
|
6
6
|
static examples = [
|
|
7
7
|
{
|
|
8
8
|
command: '<%= config.bin %> <%= command.id %>',
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../../../src/commands/mcp/configure.ts"],"sourcesContent":["import {SanityCommand} from '@sanity/cli-core'\n\nimport {setupMCP} from '../../actions/mcp/setupMCP.js'\nimport {MCPConfigureTrace} from '../../telemetry/mcp.telemetry.js'\n\nexport class ConfigureMcpCommand extends SanityCommand<typeof ConfigureMcpCommand> {\n static override description =\n 'Configure Sanity MCP server for AI editors (Cursor,
|
|
1
|
+
{"version":3,"sources":["../../../src/commands/mcp/configure.ts"],"sourcesContent":["import {SanityCommand} from '@sanity/cli-core'\n\nimport {setupMCP} from '../../actions/mcp/setupMCP.js'\nimport {MCPConfigureTrace} from '../../telemetry/mcp.telemetry.js'\n\nexport class ConfigureMcpCommand extends SanityCommand<typeof ConfigureMcpCommand> {\n static override description =\n 'Configure Sanity MCP server for AI editors (Claude Code, Codex CLI, Cursor, Gemini CLI, GitHub Copilot CLI, VS Code)'\n\n static override examples = [\n {\n command: '<%= config.bin %> <%= command.id %>',\n description: 'Configure Sanity MCP server for detected AI editors',\n },\n ]\n\n public async run(): Promise<void> {\n const trace = this.telemetry.trace(MCPConfigureTrace)\n trace.start()\n const mcpResult = await setupMCP(true)\n\n trace.log({\n configuredEditors: mcpResult.configuredEditors,\n detectedEditors: mcpResult.detectedEditors,\n })\n\n if (mcpResult.error) {\n trace.error(mcpResult.error)\n } else {\n trace.complete()\n }\n }\n}\n"],"names":["SanityCommand","setupMCP","MCPConfigureTrace","ConfigureMcpCommand","description","examples","command","run","trace","telemetry","start","mcpResult","log","configuredEditors","detectedEditors","error","complete"],"mappings":"AAAA,SAAQA,aAAa,QAAO,mBAAkB;AAE9C,SAAQC,QAAQ,QAAO,gCAA+B;AACtD,SAAQC,iBAAiB,QAAO,mCAAkC;AAElE,OAAO,MAAMC,4BAA4BH;IACvC,OAAgBI,cACd,uHAAsH;IAExH,OAAgBC,WAAW;QACzB;YACEC,SAAS;YACTF,aAAa;QACf;KACD,CAAA;IAED,MAAaG,MAAqB;QAChC,MAAMC,QAAQ,IAAI,CAACC,SAAS,CAACD,KAAK,CAACN;QACnCM,MAAME,KAAK;QACX,MAAMC,YAAY,MAAMV,SAAS;QAEjCO,MAAMI,GAAG,CAAC;YACRC,mBAAmBF,UAAUE,iBAAiB;YAC9CC,iBAAiBH,UAAUG,eAAe;QAC5C;QAEA,IAAIH,UAAUI,KAAK,EAAE;YACnBP,MAAMO,KAAK,CAACJ,UAAUI,KAAK;QAC7B,OAAO;YACLP,MAAMQ,QAAQ;QAChB;IACF;AACF"}
|
|
@@ -1,6 +1,8 @@
|
|
|
1
1
|
import { Flags } from '@oclif/core';
|
|
2
2
|
import { SanityCommand } from '@sanity/cli-core';
|
|
3
3
|
import { extractSchema } from '../../actions/schema/extractSchema.js';
|
|
4
|
+
import { getExtractOptions } from '../../actions/schema/getExtractOptions.js';
|
|
5
|
+
import { watchExtractSchema } from '../../actions/schema/watchExtractSchema.js';
|
|
4
6
|
const description = `
|
|
5
7
|
Extracts a JSON representation of a Sanity schema within a Studio context.
|
|
6
8
|
|
|
@@ -12,11 +14,18 @@ export class ExtractSchemaCommand extends SanityCommand {
|
|
|
12
14
|
{
|
|
13
15
|
command: '<%= config.bin %> <%= command.id %> --workspace default',
|
|
14
16
|
description: 'Extracts schema types in a Sanity project with more than one workspace'
|
|
17
|
+
},
|
|
18
|
+
{
|
|
19
|
+
command: '<%= config.bin %> <%= command.id %> --watch',
|
|
20
|
+
description: 'Watch mode - re-extract on changes'
|
|
21
|
+
},
|
|
22
|
+
{
|
|
23
|
+
command: '<%= config.bin %> <%= command.id %> --watch --watch-patterns "lib/**/*.ts"',
|
|
24
|
+
description: 'Watch with custom glob patterns'
|
|
15
25
|
}
|
|
16
26
|
];
|
|
17
27
|
static flags = {
|
|
18
28
|
'enforce-required-fields': Flags.boolean({
|
|
19
|
-
default: false,
|
|
20
29
|
description: 'Makes the schema generated treat fields marked as required as non-optional'
|
|
21
30
|
}),
|
|
22
31
|
format: Flags.string({
|
|
@@ -27,6 +36,14 @@ export class ExtractSchemaCommand extends SanityCommand {
|
|
|
27
36
|
path: Flags.string({
|
|
28
37
|
description: 'Optional path to specify destination of the schema file'
|
|
29
38
|
}),
|
|
39
|
+
watch: Flags.boolean({
|
|
40
|
+
description: 'Enable watch mode to re-extract schema on file changes'
|
|
41
|
+
}),
|
|
42
|
+
'watch-patterns': Flags.string({
|
|
43
|
+
description: 'Additional glob pattern(s) to watch (can be specified multiple times)',
|
|
44
|
+
helpValue: '<glob>',
|
|
45
|
+
multiple: true
|
|
46
|
+
}),
|
|
30
47
|
workspace: Flags.string({
|
|
31
48
|
description: 'The name of the workspace to generate a schema for',
|
|
32
49
|
helpValue: '<name>'
|
|
@@ -35,10 +52,21 @@ export class ExtractSchemaCommand extends SanityCommand {
|
|
|
35
52
|
async run() {
|
|
36
53
|
const { flags } = await this.parse(ExtractSchemaCommand);
|
|
37
54
|
const projectRoot = await this.getProjectRoot();
|
|
38
|
-
await
|
|
55
|
+
const { schemaExtraction } = await this.getCliConfig();
|
|
56
|
+
const extractOptions = getExtractOptions({
|
|
39
57
|
flags,
|
|
40
|
-
|
|
41
|
-
|
|
58
|
+
projectRoot,
|
|
59
|
+
schemaExtraction
|
|
60
|
+
});
|
|
61
|
+
if (flags.watch) {
|
|
62
|
+
return watchExtractSchema({
|
|
63
|
+
extractOptions,
|
|
64
|
+
output: this.output
|
|
65
|
+
});
|
|
66
|
+
}
|
|
67
|
+
return extractSchema({
|
|
68
|
+
extractOptions,
|
|
69
|
+
output: this.output
|
|
42
70
|
});
|
|
43
71
|
}
|
|
44
72
|
}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../../../src/commands/schema/extract.ts"],"sourcesContent":["import {Flags} from '@oclif/core'\nimport {SanityCommand} from '@sanity/cli-core'\n\nimport {extractSchema} from '../../actions/schema/extractSchema.js'\n\nconst description = `\nExtracts a JSON representation of a Sanity schema within a Studio context.\n\n**Note**: This command is experimental and subject to change.\n`.trim()\n\nexport class ExtractSchemaCommand extends SanityCommand<typeof ExtractSchemaCommand> {\n static override description = description\n\n static override examples = [\n {\n command: '<%= config.bin %> <%= command.id %> --workspace default',\n description: 'Extracts schema types in a Sanity project with more than one workspace',\n },\n ]\n\n static override flags = {\n 'enforce-required-fields': Flags.boolean({\n
|
|
1
|
+
{"version":3,"sources":["../../../src/commands/schema/extract.ts"],"sourcesContent":["import {Flags} from '@oclif/core'\nimport {SanityCommand} from '@sanity/cli-core'\n\nimport {extractSchema} from '../../actions/schema/extractSchema.js'\nimport {getExtractOptions} from '../../actions/schema/getExtractOptions.js'\nimport {watchExtractSchema} from '../../actions/schema/watchExtractSchema.js'\n\nconst description = `\nExtracts a JSON representation of a Sanity schema within a Studio context.\n\n**Note**: This command is experimental and subject to change.\n`.trim()\n\nexport class ExtractSchemaCommand extends SanityCommand<typeof ExtractSchemaCommand> {\n static override description = description\n\n static override examples = [\n {\n command: '<%= config.bin %> <%= command.id %> --workspace default',\n description: 'Extracts schema types in a Sanity project with more than one workspace',\n },\n {\n command: '<%= config.bin %> <%= command.id %> --watch',\n description: 'Watch mode - re-extract on changes',\n },\n {\n command: '<%= config.bin %> <%= command.id %> --watch --watch-patterns \"lib/**/*.ts\"',\n description: 'Watch with custom glob patterns',\n },\n ]\n\n static override flags = {\n 'enforce-required-fields': Flags.boolean({\n description: 'Makes the schema generated treat fields marked as required as non-optional',\n }),\n format: Flags.string({\n default: 'groq-type-nodes',\n description: 'Format the schema as GROQ type nodes. Only available format at the moment.',\n helpValue: '<format>',\n }),\n path: Flags.string({\n description: 'Optional path to specify destination of the schema file',\n }),\n watch: Flags.boolean({\n description: 'Enable watch mode to re-extract schema on file changes',\n }),\n 'watch-patterns': Flags.string({\n description: 'Additional glob pattern(s) to watch (can be specified multiple times)',\n helpValue: '<glob>',\n multiple: true,\n }),\n workspace: Flags.string({\n description: 'The name of the workspace to generate a schema for',\n helpValue: '<name>',\n }),\n }\n\n public async run(): Promise<{close?: () => Promise<void>} | void> {\n const {flags} = await this.parse(ExtractSchemaCommand)\n const projectRoot = await this.getProjectRoot()\n\n const {schemaExtraction} = await this.getCliConfig()\n const extractOptions = getExtractOptions({\n flags,\n projectRoot,\n schemaExtraction,\n })\n\n if (flags.watch) {\n return watchExtractSchema({\n extractOptions,\n output: this.output,\n })\n }\n\n return extractSchema({\n extractOptions,\n output: this.output,\n })\n }\n}\n"],"names":["Flags","SanityCommand","extractSchema","getExtractOptions","watchExtractSchema","description","trim","ExtractSchemaCommand","examples","command","flags","boolean","format","string","default","helpValue","path","watch","multiple","workspace","run","parse","projectRoot","getProjectRoot","schemaExtraction","getCliConfig","extractOptions","output"],"mappings":"AAAA,SAAQA,KAAK,QAAO,cAAa;AACjC,SAAQC,aAAa,QAAO,mBAAkB;AAE9C,SAAQC,aAAa,QAAO,wCAAuC;AACnE,SAAQC,iBAAiB,QAAO,4CAA2C;AAC3E,SAAQC,kBAAkB,QAAO,6CAA4C;AAE7E,MAAMC,cAAc,CAAC;;;;AAIrB,CAAC,CAACC,IAAI;AAEN,OAAO,MAAMC,6BAA6BN;IACxC,OAAgBI,cAAcA,YAAW;IAEzC,OAAgBG,WAAW;QACzB;YACEC,SAAS;YACTJ,aAAa;QACf;QACA;YACEI,SAAS;YACTJ,aAAa;QACf;QACA;YACEI,SAAS;YACTJ,aAAa;QACf;KACD,CAAA;IAED,OAAgBK,QAAQ;QACtB,2BAA2BV,MAAMW,OAAO,CAAC;YACvCN,aAAa;QACf;QACAO,QAAQZ,MAAMa,MAAM,CAAC;YACnBC,SAAS;YACTT,aAAa;YACbU,WAAW;QACb;QACAC,MAAMhB,MAAMa,MAAM,CAAC;YACjBR,aAAa;QACf;QACAY,OAAOjB,MAAMW,OAAO,CAAC;YACnBN,aAAa;QACf;QACA,kBAAkBL,MAAMa,MAAM,CAAC;YAC7BR,aAAa;YACbU,WAAW;YACXG,UAAU;QACZ;QACAC,WAAWnB,MAAMa,MAAM,CAAC;YACtBR,aAAa;YACbU,WAAW;QACb;IACF,EAAC;IAED,MAAaK,MAAqD;QAChE,MAAM,EAACV,KAAK,EAAC,GAAG,MAAM,IAAI,CAACW,KAAK,CAACd;QACjC,MAAMe,cAAc,MAAM,IAAI,CAACC,cAAc;QAE7C,MAAM,EAACC,gBAAgB,EAAC,GAAG,MAAM,IAAI,CAACC,YAAY;QAClD,MAAMC,iBAAiBvB,kBAAkB;YACvCO;YACAY;YACAE;QACF;QAEA,IAAId,MAAMO,KAAK,EAAE;YACf,OAAOb,mBAAmB;gBACxBsB;gBACAC,QAAQ,IAAI,CAACA,MAAM;YACrB;QACF;QAEA,OAAOzB,cAAc;YACnBwB;YACAC,QAAQ,IAAI,CAACA,MAAM;QACrB;IACF;AACF"}
|
|
@@ -3,7 +3,7 @@ import path from 'node:path';
|
|
|
3
3
|
import { styleText } from 'node:util';
|
|
4
4
|
import { preview } from 'vite';
|
|
5
5
|
import { extendViteConfigWithUserConfig } from '../actions/build/getViteConfig.js';
|
|
6
|
-
import {
|
|
6
|
+
import { getLocalPackageVersion } from '../util/getLocalPackageVersion.js';
|
|
7
7
|
import { serverDebug } from './serverDebug.js';
|
|
8
8
|
import { sanityBasePathRedirectPlugin } from './vite/plugin-sanity-basepath-redirect.js';
|
|
9
9
|
const debug = serverDebug.extend('preview');
|
|
@@ -66,7 +66,7 @@ const debug = serverDebug.extend('preview');
|
|
|
66
66
|
info(`Using resolved base path from static build: ${styleText('cyan', basePath)}`);
|
|
67
67
|
}
|
|
68
68
|
const startupDuration = Date.now() - startTime;
|
|
69
|
-
const viteVersion = await
|
|
69
|
+
const viteVersion = await getLocalPackageVersion('vite', import.meta.url);
|
|
70
70
|
info(`Sanity ${isApp ? 'application' : 'Studio'} ` + `using ${styleText('cyan', `vite@${viteVersion}`)} ` + `ready in ${styleText('cyan', `${Math.ceil(startupDuration)}ms`)} ` + `and running at ${styleText('cyan', url)} (production preview mode)`);
|
|
71
71
|
return {
|
|
72
72
|
close: ()=>new Promise((resolve, reject)=>server.httpServer.close((err)=>err ? reject(err) : resolve())),
|