sanity 3.77.3-server-side-schemas.15 → 3.77.3-server-side-schemas.16
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/lib/_chunks-cjs/_internal.js +420 -142
- package/lib/_chunks-cjs/_internal.js.map +1 -1
- package/lib/_chunks-cjs/buildAction.js +2 -2
- package/lib/_chunks-cjs/buildAction.js.map +1 -1
- package/lib/_chunks-cjs/deleteSchemaAction.js +6 -4
- package/lib/_chunks-cjs/deleteSchemaAction.js.map +1 -1
- package/lib/_chunks-cjs/deployAction.js +3 -3
- package/lib/_chunks-cjs/deployAction.js.map +1 -1
- package/lib/_chunks-cjs/devAction.js +2 -2
- package/lib/_chunks-cjs/devAction.js.map +1 -1
- package/lib/_chunks-cjs/version.js +1 -1
- package/lib/_chunks-es/version.mjs +1 -1
- package/lib/_legacy/version.esm.js +1 -1
- package/lib/index.d.mts +3 -3
- package/lib/index.d.ts +3 -3
- package/lib/structure.d.mts +1 -1
- package/lib/structure.d.ts +1 -1
- package/package.json +10 -10
- package/src/_internal/cli/actions/manifest/extractManifestAction.ts +3 -3
- package/src/_internal/cli/actions/schema/deleteSchemaAction.ts +16 -7
- package/src/_internal/cli/actions/schema/schemaListAction.ts +23 -9
- package/src/_internal/cli/actions/schema/storeSchemasAction.ts +60 -13
- package/src/_internal/cli/commands/app/deployCommand.ts +2 -0
- package/src/_internal/cli/commands/index.ts +12 -4
- package/src/_internal/cli/commands/schema/deleteSchemaCommand.ts +1 -1
- package/src/_internal/cli/commands/schema/schemaListCommand.ts +1 -1
- package/src/_internal/cli/commands/schema/storeSchemaCommand.ts +6 -5
- package/lib/_chunks-cjs/extractManifestAction.js +0 -99
- package/lib/_chunks-cjs/extractManifestAction.js.map +0 -1
- package/lib/_chunks-cjs/storeSchemasAction.js +0 -147
- package/lib/_chunks-cjs/storeSchemasAction.js.map +0 -1
- package/lib/_chunks-cjs/timing.js +0 -22
- package/lib/_chunks-cjs/timing.js.map +0 -1
@@ -18,11 +18,284 @@ var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__ge
|
|
18
18
|
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: !0 }) : target,
|
19
19
|
mod
|
20
20
|
));
|
21
|
-
var
|
21
|
+
var fs$1 = require("node:fs"), path = require("node:path"), chalk = require("chalk"), node_crypto = require("node:crypto"), fs = require("node:fs/promises"), node_worker_threads = require("node:worker_threads"), dateFns = require("date-fns"), readPkgUp = require("read-pkg-up"), node_perf_hooks = require("node:perf_hooks"), size = require("lodash/size.js"), sortBy = require("lodash/sortBy.js"), uniqBy = require("lodash/uniqBy.js"), debug$3 = require("debug"), os = require("node:os"), promises = require("node:stream/promises"), fs$2 = require("@sanity/util/fs"), asyncMutex = require("async-mutex"), isString = require("lodash/isString.js"), prettyMs = require("pretty-ms"), helpers = require("yargs/helpers"), yargs = require("yargs/yargs"), zlib = require("node:zlib"), rimraf = require("rimraf"), getIt = require("get-it"), middleware = require("get-it/middleware"), node_stream = require("node:stream"), consoleTablePrinter = require("console-table-printer"), url = require("node:url"), logSymbols = require("log-symbols"), oneline = require("oneline"), EventSource = require("@sanity/eventsource"), rxjs = require("rxjs"), exportDataset = require("@sanity/export"), sanityImport = require("@sanity/import"), padStart = require("lodash/padStart.js"), uuid = require("@sanity/uuid"), chokidar = require("chokidar"), execa = require("execa"), json5 = require("json5"), isEqual = require("lodash/isEqual.js"), isPlainObject = require("lodash/isPlainObject.js"), noop$1 = require("lodash/noop.js"), pluralize = require("pluralize-esm"), tokenize = require("json-lexer"), open = require("open"), node_util = require("node:util"), groupBy = require("lodash/groupBy.js"), deburr = require("lodash/deburr.js"), node = require("esbuild-register/dist/node"), migrate = require("@sanity/migrate"), node_tty = require("node:tty"), types = require("@sanity/types");
|
22
22
|
function _interopDefaultCompat(e) {
|
23
23
|
return e && typeof e == "object" && "default" in e ? e : { default: e };
|
24
24
|
}
|
25
|
-
var
|
25
|
+
var fs__default = /* @__PURE__ */ _interopDefaultCompat(fs$1), path__default = /* @__PURE__ */ _interopDefaultCompat(path), chalk__default = /* @__PURE__ */ _interopDefaultCompat(chalk), fs__default$1 = /* @__PURE__ */ _interopDefaultCompat(fs), readPkgUp__default = /* @__PURE__ */ _interopDefaultCompat(readPkgUp), size__default = /* @__PURE__ */ _interopDefaultCompat(size), sortBy__default = /* @__PURE__ */ _interopDefaultCompat(sortBy), uniqBy__default = /* @__PURE__ */ _interopDefaultCompat(uniqBy), debug__default = /* @__PURE__ */ _interopDefaultCompat(debug$3), os__default = /* @__PURE__ */ _interopDefaultCompat(os), isString__default = /* @__PURE__ */ _interopDefaultCompat(isString), prettyMs__default = /* @__PURE__ */ _interopDefaultCompat(prettyMs), yargs__default = /* @__PURE__ */ _interopDefaultCompat(yargs), zlib__default = /* @__PURE__ */ _interopDefaultCompat(zlib), url__default = /* @__PURE__ */ _interopDefaultCompat(url), logSymbols__default = /* @__PURE__ */ _interopDefaultCompat(logSymbols), oneline__default = /* @__PURE__ */ _interopDefaultCompat(oneline), EventSource__default = /* @__PURE__ */ _interopDefaultCompat(EventSource), exportDataset__default = /* @__PURE__ */ _interopDefaultCompat(exportDataset), sanityImport__default = /* @__PURE__ */ _interopDefaultCompat(sanityImport), padStart__default = /* @__PURE__ */ _interopDefaultCompat(padStart), chokidar__default = /* @__PURE__ */ _interopDefaultCompat(chokidar), execa__default = /* @__PURE__ */ _interopDefaultCompat(execa), json5__default = /* @__PURE__ */ _interopDefaultCompat(json5), isEqual__default = /* @__PURE__ */ _interopDefaultCompat(isEqual), isPlainObject__default = /* @__PURE__ */ _interopDefaultCompat(isPlainObject), noop__default = /* @__PURE__ */ _interopDefaultCompat(noop$1), pluralize__default = /* @__PURE__ */ _interopDefaultCompat(pluralize), tokenize__default = /* @__PURE__ */ _interopDefaultCompat(tokenize), open__default = /* @__PURE__ */ _interopDefaultCompat(open), groupBy__default = /* @__PURE__ */ _interopDefaultCompat(groupBy), deburr__default = /* @__PURE__ */ _interopDefaultCompat(deburr);
|
26
|
+
function getTimer() {
|
27
|
+
const timings = {}, startTimes = {};
|
28
|
+
function start(name) {
|
29
|
+
if (typeof startTimes[name] < "u")
|
30
|
+
throw new Error(`Timer "${name}" already started, cannot overwrite`);
|
31
|
+
startTimes[name] = node_perf_hooks.performance.now();
|
32
|
+
}
|
33
|
+
function end(name) {
|
34
|
+
if (typeof startTimes[name] > "u")
|
35
|
+
throw new Error(`Timer "${name}" never started, cannot end`);
|
36
|
+
return timings[name] = node_perf_hooks.performance.now() - startTimes[name], timings[name];
|
37
|
+
}
|
38
|
+
return {
|
39
|
+
start,
|
40
|
+
end,
|
41
|
+
getTimings: () => timings
|
42
|
+
};
|
43
|
+
}
|
44
|
+
const MANIFEST_FILENAME = "create-manifest.json", SCHEMA_FILENAME_SUFFIX = ".create-schema.json", TOOLS_FILENAME_SUFFIX = ".create-tools.json", FEATURE_ENABLED_ENV_NAME$1 = "SANITY_CLI_EXTRACT_MANIFEST_ENABLED", EXTRACT_MANIFEST_ENABLED = process.env[FEATURE_ENABLED_ENV_NAME$1] !== "false", EXTRACT_MANIFEST_LOG_ERRORS = process.env.SANITY_CLI_EXTRACT_MANIFEST_LOG_ERRORS === "true", CREATE_TIMER = "create-manifest", EXTRACT_TASK_TIMEOUT_MS = dateFns.minutesToMilliseconds(2), EXTRACT_FAILURE_MESSAGE = `\u21B3 Couldn't extract manifest file. Sanity Create will not be available for the studio.
|
45
|
+
Disable this message with ${FEATURE_ENABLED_ENV_NAME$1}=false`;
|
46
|
+
async function extractManifestSafe(args, context) {
|
47
|
+
if (EXTRACT_MANIFEST_ENABLED)
|
48
|
+
try {
|
49
|
+
await extractManifest(args, context);
|
50
|
+
return;
|
51
|
+
} catch (err) {
|
52
|
+
return EXTRACT_MANIFEST_LOG_ERRORS && context.output.error(err), err;
|
53
|
+
}
|
54
|
+
}
|
55
|
+
async function extractManifest(args, context) {
|
56
|
+
const {
|
57
|
+
output,
|
58
|
+
workDir
|
59
|
+
} = context, flags = args.extOptions, defaultOutputDir = path.resolve(path.join(workDir, "dist")), outputDir = path.resolve(defaultOutputDir), defaultStaticPath = path.join(outputDir, "static"), staticPath = flags.path ?? defaultStaticPath, path$1 = path.join(staticPath, MANIFEST_FILENAME), rootPkgPath = readPkgUp__default.default.sync({
|
60
|
+
cwd: __dirname
|
61
|
+
})?.path;
|
62
|
+
if (!rootPkgPath)
|
63
|
+
throw new Error("Could not find root directory for `sanity` package");
|
64
|
+
const timer = getTimer();
|
65
|
+
timer.start(CREATE_TIMER);
|
66
|
+
const spinner = output.spinner({}).start("Extracting manifest");
|
67
|
+
try {
|
68
|
+
const workspaceManifests = await getWorkspaceManifests({
|
69
|
+
rootPkgPath,
|
70
|
+
workDir
|
71
|
+
});
|
72
|
+
await fs.mkdir(staticPath, {
|
73
|
+
recursive: !0
|
74
|
+
});
|
75
|
+
const workspaceFiles = await writeWorkspaceFiles(workspaceManifests, staticPath), manifest = {
|
76
|
+
/**
|
77
|
+
* Version history:
|
78
|
+
* 1: Initial release.
|
79
|
+
* 2: Added tools file.
|
80
|
+
*/
|
81
|
+
version: 2,
|
82
|
+
createdAt: (/* @__PURE__ */ new Date()).toISOString(),
|
83
|
+
workspaces: workspaceFiles
|
84
|
+
};
|
85
|
+
await fs.writeFile(path$1, JSON.stringify(manifest, null, 2));
|
86
|
+
const manifestDuration = timer.end(CREATE_TIMER);
|
87
|
+
spinner.succeed(`Extracted manifest (${manifestDuration.toFixed()}ms)`);
|
88
|
+
} catch (err) {
|
89
|
+
throw spinner.fail(err.message), output.print(chalk__default.default.gray(EXTRACT_FAILURE_MESSAGE)), err;
|
90
|
+
}
|
91
|
+
}
|
92
|
+
async function getWorkspaceManifests({
|
93
|
+
rootPkgPath,
|
94
|
+
workDir
|
95
|
+
}) {
|
96
|
+
const workerPath = path.join(path.dirname(rootPkgPath), "lib", "_internal", "cli", "threads", "extractManifest.js"), worker = new node_worker_threads.Worker(workerPath, {
|
97
|
+
workerData: {
|
98
|
+
workDir
|
99
|
+
},
|
100
|
+
// eslint-disable-next-line no-process-env
|
101
|
+
env: process.env
|
102
|
+
});
|
103
|
+
let timeout = !1;
|
104
|
+
const timeoutId = setTimeout(() => {
|
105
|
+
timeout = !0, worker.terminate();
|
106
|
+
}, EXTRACT_TASK_TIMEOUT_MS);
|
107
|
+
try {
|
108
|
+
return await new Promise((resolveWorkspaces, reject) => {
|
109
|
+
const buffer = [];
|
110
|
+
worker.addListener("message", (message) => buffer.push(message)), worker.addListener("exit", (exitCode) => {
|
111
|
+
exitCode === 0 ? resolveWorkspaces(buffer) : timeout && reject(new Error(`Extract manifest was aborted after ${EXTRACT_TASK_TIMEOUT_MS}ms`));
|
112
|
+
}), worker.addListener("error", reject);
|
113
|
+
});
|
114
|
+
} finally {
|
115
|
+
clearTimeout(timeoutId);
|
116
|
+
}
|
117
|
+
}
|
118
|
+
function writeWorkspaceFiles(manifestWorkspaces, staticPath) {
|
119
|
+
const output = manifestWorkspaces.reduce((workspaces, workspace) => [...workspaces, writeWorkspaceFile(workspace, staticPath)], []);
|
120
|
+
return Promise.all(output);
|
121
|
+
}
|
122
|
+
async function writeWorkspaceFile(workspace, staticPath) {
|
123
|
+
const [schemaFilename, toolsFilename] = await Promise.all([createFile(staticPath, workspace.schema, SCHEMA_FILENAME_SUFFIX), createFile(staticPath, workspace.tools, TOOLS_FILENAME_SUFFIX)]);
|
124
|
+
return {
|
125
|
+
...workspace,
|
126
|
+
schema: schemaFilename,
|
127
|
+
tools: toolsFilename
|
128
|
+
};
|
129
|
+
}
|
130
|
+
const createFile = async (path$1, content, filenameSuffix) => {
|
131
|
+
const stringifiedContent = JSON.stringify(content, null, 2), filename = `${node_crypto.createHash("sha1").update(stringifiedContent).digest("hex").slice(0, 8)}${filenameSuffix}`;
|
132
|
+
return await fs.writeFile(path.join(path$1, filename), stringifiedContent), filename;
|
133
|
+
};
|
134
|
+
var extractManifestAction = /* @__PURE__ */ Object.freeze({
|
135
|
+
__proto__: null,
|
136
|
+
MANIFEST_FILENAME,
|
137
|
+
extractManifestSafe
|
138
|
+
});
|
139
|
+
const SANITY_WORKSPACE_SCHEMA_TYPE = "sanity.workspace.schema", printSchemaList = ({
|
140
|
+
schemas,
|
141
|
+
output
|
142
|
+
}) => {
|
143
|
+
const ordered = sortBy__default.default(schemas.map(({
|
144
|
+
_createdAt: createdAt,
|
145
|
+
_id: id,
|
146
|
+
workspace
|
147
|
+
}) => [id, workspace.name, workspace.dataset, workspace.projectId, createdAt].map(String)), ["createdAt"]), headings = ["Id", "Workspace", "Dataset", "ProjectId", "CreatedAt"], rows = ordered.reverse(), maxWidths = rows.reduce((max, row) => row.map((current, index) => Math.max(size__default.default(current), max[index])), headings.map((str) => size__default.default(str))), printRow = (row) => row.map((col, i) => `${col}`.padEnd(maxWidths[i])).join(" ");
|
148
|
+
output.print(chalk__default.default.cyan(printRow(headings))), rows.forEach((row) => output.print(printRow(row)));
|
149
|
+
};
|
150
|
+
async function schemaListAction(args, context) {
|
151
|
+
if (!SCHEMA_STORE_ENABLED)
|
152
|
+
return;
|
153
|
+
const flags = args.extOptions;
|
154
|
+
if (typeof flags.id == "boolean") throw new Error("Schema ID is empty");
|
155
|
+
if (typeof flags["manifest-dir"] == "boolean") throw new Error("Manifest directory is empty");
|
156
|
+
const {
|
157
|
+
apiClient,
|
158
|
+
output
|
159
|
+
} = context, client = apiClient({
|
160
|
+
requireUser: !0,
|
161
|
+
requireProject: !0
|
162
|
+
}).withConfig({
|
163
|
+
apiVersion: "v2024-08-01"
|
164
|
+
}), projectId = client.config().projectId, dataset = client.config().dataset;
|
165
|
+
if (!projectId || !dataset) {
|
166
|
+
output.error("Project ID and dataset must be defined.");
|
167
|
+
return;
|
168
|
+
}
|
169
|
+
const manifestDir = flags["manifest-dir"], manifestPath = getManifestPath(context, manifestDir), manifest = await readManifest(manifestPath, context), schemas = (await Promise.allSettled(uniqBy__default.default(manifest.workspaces, "dataset").map(async (workspace) => (throwIfProjectIdMismatch(workspace, projectId), flags.id ? await client.withConfig({
|
170
|
+
dataset: workspace.dataset,
|
171
|
+
projectId: workspace.projectId
|
172
|
+
}).getDocument(flags.id) : await client.withConfig({
|
173
|
+
dataset: workspace.dataset,
|
174
|
+
projectId: workspace.projectId,
|
175
|
+
useCdn: !1
|
176
|
+
}).fetch("*[_type == $type]", {
|
177
|
+
type: SANITY_WORKSPACE_SCHEMA_TYPE
|
178
|
+
}))))).map((result, index) => {
|
179
|
+
if (result.status === "rejected") {
|
180
|
+
const workspace = manifest.workspaces[index];
|
181
|
+
return output.error(chalk__default.default.red(`Failed to fetch schemas for workspace '${workspace.name}': ${result.reason.message}`)), [];
|
182
|
+
}
|
183
|
+
return result.value;
|
184
|
+
}).flat();
|
185
|
+
if (schemas.length === 0) {
|
186
|
+
output.error("No schemas found");
|
187
|
+
return;
|
188
|
+
}
|
189
|
+
flags.json ? output.print(`${JSON.stringify(flags.id ? schemas[0] : schemas, null, 2)}`) : printSchemaList({
|
190
|
+
schemas,
|
191
|
+
output
|
192
|
+
});
|
193
|
+
}
|
194
|
+
var schemaListAction$1 = /* @__PURE__ */ Object.freeze({
|
195
|
+
__proto__: null,
|
196
|
+
SANITY_WORKSPACE_SCHEMA_TYPE,
|
197
|
+
default: schemaListAction
|
198
|
+
});
|
199
|
+
const FEATURE_ENABLED_ENV_NAME = "SANITY_CLI_SCHEMA_STORE_ENABLED", SCHEMA_STORE_ENABLED = process.env[FEATURE_ENABLED_ENV_NAME] === "true", getManifestPath = (context, customPath) => {
|
200
|
+
const defaultOutputDir = path.resolve(path.join(context.workDir, "dist")), outputDir = path.resolve(defaultOutputDir), defaultStaticPath = path.join(outputDir, "static"), staticPath = customPath ?? defaultStaticPath;
|
201
|
+
return path__default.default.resolve(process.cwd(), staticPath);
|
202
|
+
}, readAndParseManifest = (manifestPath, context) => {
|
203
|
+
const content = fs$1.readFileSync(manifestPath, "utf-8"), lastModified = fs$1.statSync(manifestPath).mtime.toISOString();
|
204
|
+
return context.output.print(chalk__default.default.gray(`
|
205
|
+
\u21B3 Read manifest from ${manifestPath} (last modified: ${lastModified})`)), JSON.parse(content);
|
206
|
+
}, readManifest = async (readPath, context, spinner) => {
|
207
|
+
const manifestPath = `${readPath}/${MANIFEST_FILENAME}`;
|
208
|
+
try {
|
209
|
+
return readAndParseManifest(manifestPath, context);
|
210
|
+
} catch {
|
211
|
+
spinner.text = "Manifest not found, attempting to extract it...", await extractManifestSafe({
|
212
|
+
extOptions: {
|
213
|
+
path: readPath
|
214
|
+
},
|
215
|
+
groupOrCommand: "extract",
|
216
|
+
argv: [],
|
217
|
+
argsWithoutOptions: [],
|
218
|
+
extraArguments: []
|
219
|
+
}, context);
|
220
|
+
try {
|
221
|
+
return readAndParseManifest(manifestPath, context);
|
222
|
+
} catch (retryError) {
|
223
|
+
const errorMessage = `Failed to read manifest at ${manifestPath}`;
|
224
|
+
throw spinner?.fail(errorMessage), context.output.error(errorMessage), retryError;
|
225
|
+
}
|
226
|
+
}
|
227
|
+
}, throwIfProjectIdMismatch = (workspace, projectId) => {
|
228
|
+
if (workspace.projectId !== projectId)
|
229
|
+
throw new Error(`\u21B3 No permissions to store schema for workspace ${workspace.name} with projectId: ${workspace.projectId}`);
|
230
|
+
};
|
231
|
+
async function storeSchemasAction(args, context) {
|
232
|
+
if (!SCHEMA_STORE_ENABLED)
|
233
|
+
return;
|
234
|
+
const flags = args.extOptions, schemaRequired = flags["schema-required"], workspaceName = flags.workspace, idPrefix = flags["id-prefix"], verbose = flags.verbose, manifestDir = flags["manifest-dir"];
|
235
|
+
if (typeof manifestDir == "boolean") throw new Error("Manifest directory is empty");
|
236
|
+
if (typeof idPrefix == "boolean") throw new Error("Id prefix is empty");
|
237
|
+
if (typeof workspaceName == "boolean") throw new Error("Workspace is empty");
|
238
|
+
const {
|
239
|
+
output,
|
240
|
+
apiClient
|
241
|
+
} = context, spinner = output.spinner({}).start("Storing schemas"), manifestPath = getManifestPath(context, manifestDir);
|
242
|
+
try {
|
243
|
+
const client = apiClient({
|
244
|
+
requireUser: !0,
|
245
|
+
requireProject: !0
|
246
|
+
}).withConfig({
|
247
|
+
apiVersion: "v2024-08-01"
|
248
|
+
}), projectId = client.config().projectId;
|
249
|
+
if (!projectId) throw new Error("Project ID is not defined");
|
250
|
+
const manifest = await readManifest(manifestPath, context, spinner);
|
251
|
+
let storedCount = 0, error;
|
252
|
+
const saveSchema = async (workspace) => {
|
253
|
+
const id = `${idPrefix ? `${idPrefix}.` : ""}${SANITY_WORKSPACE_SCHEMA_TYPE}.${workspace.name}`;
|
254
|
+
try {
|
255
|
+
throwIfProjectIdMismatch(workspace, projectId);
|
256
|
+
const schema = JSON.parse(fs$1.readFileSync(`${manifestPath}/${workspace.schema}`, "utf-8"));
|
257
|
+
await client.withConfig({
|
258
|
+
dataset: workspace.dataset,
|
259
|
+
projectId: workspace.projectId
|
260
|
+
}).transaction().createOrReplace({
|
261
|
+
_type: SANITY_WORKSPACE_SCHEMA_TYPE,
|
262
|
+
_id: id,
|
263
|
+
workspace,
|
264
|
+
schema
|
265
|
+
}).commit(), storedCount++, spinner.text = `Stored ${storedCount} schemas so far...`, verbose && spinner.succeed(`Schema stored for workspace '${workspace.name}'`);
|
266
|
+
} catch (err) {
|
267
|
+
if (error = err, spinner.fail(`Error storing schema for workspace '${workspace.name}':
|
268
|
+
${chalk__default.default.red(`${err.message}`)}`), schemaRequired) throw err;
|
269
|
+
} finally {
|
270
|
+
verbose && output.print(chalk__default.default.gray(`\u21B3 schemaId: ${id}, projectId: ${projectId}, dataset: ${workspace.dataset}`));
|
271
|
+
}
|
272
|
+
};
|
273
|
+
if (workspaceName) {
|
274
|
+
const workspaceToSave = manifest.workspaces.find((workspace) => workspace.name === workspaceName);
|
275
|
+
if (!workspaceToSave)
|
276
|
+
throw spinner.fail(`Workspace ${workspaceName} not found in manifest`), new Error(`Workspace ${workspaceName} not found in manifest: projectID: ${projectId}`);
|
277
|
+
await saveSchema(workspaceToSave), spinner.succeed("Stored 1 schemas");
|
278
|
+
} else
|
279
|
+
await Promise.all(manifest.workspaces.map(async (workspace) => {
|
280
|
+
await saveSchema(workspace);
|
281
|
+
})), spinner.succeed(`Stored ${storedCount}/${manifest.workspaces.length} schemas`);
|
282
|
+
if (error) throw error;
|
283
|
+
return;
|
284
|
+
} catch (err) {
|
285
|
+
if (schemaRequired) throw err;
|
286
|
+
return err;
|
287
|
+
} finally {
|
288
|
+
output.print(`${chalk__default.default.gray("\u21B3 List stored schemas with:")} ${chalk__default.default.cyan("sanity schema list")}`);
|
289
|
+
}
|
290
|
+
}
|
291
|
+
var storeSchemasAction$1 = /* @__PURE__ */ Object.freeze({
|
292
|
+
__proto__: null,
|
293
|
+
SCHEMA_STORE_ENABLED,
|
294
|
+
default: storeSchemasAction,
|
295
|
+
getManifestPath,
|
296
|
+
readManifest,
|
297
|
+
throwIfProjectIdMismatch
|
298
|
+
});
|
26
299
|
const appGroup = {
|
27
300
|
name: "app",
|
28
301
|
signature: "[COMMAND]",
|
@@ -57,6 +330,7 @@ Options
|
|
57
330
|
--source-maps Enable source maps for built bundles (increases size of bundle)
|
58
331
|
--no-minify Skip minifying built JavaScript (speeds up build, increases size of bundle)
|
59
332
|
--no-build Don't build the application prior to deploy, instead deploying the version currently in \`dist/\`
|
333
|
+
${SCHEMA_STORE_ENABLED ? "--verbose Enable verbose logging for the schema store" : ""}
|
60
334
|
-y, --yes Unattended mode, answers "yes" to any "yes/no" prompt and otherwise uses defaults
|
61
335
|
|
62
336
|
Examples
|
@@ -138,9 +412,9 @@ Examples
|
|
138
412
|
action: async (args, context) => {
|
139
413
|
const {
|
140
414
|
output,
|
141
|
-
chalk,
|
415
|
+
chalk: chalk2,
|
142
416
|
prompt
|
143
|
-
} = context, previewAction = await getPreviewAction$2(), error = (msg) => output.warn(
|
417
|
+
} = context, previewAction = await getPreviewAction$2(), error = (msg) => output.warn(chalk2.red.bgBlack(msg));
|
144
418
|
try {
|
145
419
|
await previewAction(args, context);
|
146
420
|
} catch (err) {
|
@@ -248,7 +522,7 @@ Examples
|
|
248
522
|
action: async (args, context) => {
|
249
523
|
const {
|
250
524
|
output,
|
251
|
-
chalk
|
525
|
+
chalk: chalk2
|
252
526
|
} = context, [dataset] = args.argsWithoutOptions, {
|
253
527
|
projectId,
|
254
528
|
datasetName,
|
@@ -265,13 +539,13 @@ Examples
|
|
265
539
|
body: {
|
266
540
|
enabled: !1
|
267
541
|
}
|
268
|
-
}), output.print(`${
|
542
|
+
}), output.print(`${chalk2.green(`Disabled daily backups for dataset ${datasetName}
|
269
543
|
`)}`);
|
270
544
|
} catch (error) {
|
271
545
|
const {
|
272
546
|
message
|
273
547
|
} = parseApiErr(error);
|
274
|
-
output.print(`${
|
548
|
+
output.print(`${chalk2.red(`Disabling dataset backup failed: ${message}`)}
|
275
549
|
`);
|
276
550
|
}
|
277
551
|
}
|
@@ -280,7 +554,7 @@ var debug$1 = require("debug")("sanity:backup");
|
|
280
554
|
const archiver = require("archiver");
|
281
555
|
function archiveDir(tmpOutDir, outFilePath, progressCb) {
|
282
556
|
return new Promise((resolve, reject) => {
|
283
|
-
const archiveDestination = fs.createWriteStream(outFilePath);
|
557
|
+
const archiveDestination = fs$1.createWriteStream(outFilePath);
|
284
558
|
archiveDestination.on("error", (err) => {
|
285
559
|
reject(err);
|
286
560
|
}), archiveDestination.on("close", () => {
|
@@ -369,7 +643,7 @@ async function downloadAsset(url2, fileName, fileType, outDir) {
|
|
369
643
|
},
|
370
644
|
stream: !0
|
371
645
|
});
|
372
|
-
debug$1("Received asset %s with status code %d", normalizedFileName, response?.statusCode), await promises.pipeline(response.body, fs.createWriteStream(assetFilePath));
|
646
|
+
debug$1("Received asset %s with status code %d", normalizedFileName, response?.statusCode), await promises.pipeline(response.body, fs$1.createWriteStream(assetFilePath));
|
373
647
|
});
|
374
648
|
}
|
375
649
|
function getAssetFilePath(fileName, fileType, outDir) {
|
@@ -488,7 +762,7 @@ const downloadBackupCommand = {
|
|
488
762
|
action: async (args, context) => {
|
489
763
|
const {
|
490
764
|
output,
|
491
|
-
chalk
|
765
|
+
chalk: chalk2
|
492
766
|
} = context, [client, opts] = await prepareBackupOptions(context, args), {
|
493
767
|
projectId,
|
494
768
|
datasetName,
|
@@ -501,14 +775,14 @@ const downloadBackupCommand = {
|
|
501
775
|
return;
|
502
776
|
}
|
503
777
|
const outFilePath = path__default.default.join(outDir, outFileName);
|
504
|
-
output.print("\u256D\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u256E"), output.print("\u2502 \u2502"), output.print("\u2502 Downloading backup for: \u2502"), output.print(`\u2502 ${
|
505
|
-
const start = Date.now(), progressSpinner = newProgress(output, "Setting up backup environment..."), tmpOutDir = await fs
|
778
|
+
output.print("\u256D\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u256E"), output.print("\u2502 \u2502"), output.print("\u2502 Downloading backup for: \u2502"), output.print(`\u2502 ${chalk2.bold("projectId")}: ${chalk2.cyan(projectId).padEnd(56)} \u2502`), output.print(`\u2502 ${chalk2.bold("dataset")}: ${chalk2.cyan(datasetName).padEnd(58)} \u2502`), output.print(`\u2502 ${chalk2.bold("backupId")}: ${chalk2.cyan(backupId).padEnd(56)} \u2502`), output.print("\u2502 \u2502"), output.print("\u2570\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u256F"), output.print(""), output.print(`Downloading backup to "${chalk2.cyan(outFilePath)}"`);
|
779
|
+
const start = Date.now(), progressSpinner = newProgress(output, "Setting up backup environment..."), tmpOutDir = await fs.mkdtemp(path__default.default.join(os.tmpdir(), "sanity-backup-"));
|
506
780
|
for (const dir of [outDir, path__default.default.join(tmpOutDir, "images"), path__default.default.join(tmpOutDir, "files")])
|
507
|
-
fs.mkdirSync(dir, {
|
781
|
+
fs$1.mkdirSync(dir, {
|
508
782
|
recursive: !0
|
509
783
|
});
|
510
784
|
debug("Writing to temporary directory %s", tmpOutDir);
|
511
|
-
const tmpOutDocumentsFile = path__default.default.join(tmpOutDir, "data.ndjson"), docOutStream = fs.createWriteStream(tmpOutDocumentsFile), docWriteMutex = new asyncMutex.Mutex();
|
785
|
+
const tmpOutDocumentsFile = path__default.default.join(tmpOutDir, "data.ndjson"), docOutStream = fs$1.createWriteStream(tmpOutDocumentsFile), docWriteMutex = new asyncMutex.Mutex();
|
512
786
|
try {
|
513
787
|
const backupFileStream = new PaginatedGetBackupStream(client, opts.projectId, opts.datasetName, opts.backupId, opts.token), files = [];
|
514
788
|
let i = 0;
|
@@ -563,7 +837,7 @@ const downloadBackupCommand = {
|
|
563
837
|
throw progressSpinner.fail(), new Error(`Archiving backup failed: ${err.message}`);
|
564
838
|
}
|
565
839
|
progressSpinner.set({
|
566
|
-
step: `Cleaning up temporary files at ${
|
840
|
+
step: `Cleaning up temporary files at ${chalk2.cyan(`${tmpOutDir}`)}`
|
567
841
|
}), await cleanupTmpDir(tmpOutDir), progressSpinner.set({
|
568
842
|
step: `Backup download complete [${prettyMs__default.default(Date.now() - start)}]`
|
569
843
|
}), progressSpinner.succeed();
|
@@ -596,7 +870,7 @@ async function prepareBackupOptions(context, args) {
|
|
596
870
|
default: path__default.default.join(workDir, defaultOutFileName),
|
597
871
|
filter: fs$2.absolutify
|
598
872
|
}))();
|
599
|
-
return isPathDirName(out) && (out = path__default.default.join(out, defaultOutFileName)), !flags.overwrite && fs.existsSync(out) && (await prompt.single({
|
873
|
+
return isPathDirName(out) && (out = path__default.default.join(out, defaultOutFileName)), !flags.overwrite && fs$1.existsSync(out) && (await prompt.single({
|
600
874
|
type: "confirm",
|
601
875
|
message: `File "${out}" already exists, would you like to overwrite it?`,
|
602
876
|
default: !1
|
@@ -623,7 +897,7 @@ Examples
|
|
623
897
|
action: async (args, context) => {
|
624
898
|
const {
|
625
899
|
output,
|
626
|
-
chalk
|
900
|
+
chalk: chalk2
|
627
901
|
} = context, [dataset] = args.argsWithoutOptions, {
|
628
902
|
projectId,
|
629
903
|
datasetName,
|
@@ -640,15 +914,15 @@ Examples
|
|
640
914
|
body: {
|
641
915
|
enabled: !0
|
642
916
|
}
|
643
|
-
}), output.print(`${
|
917
|
+
}), output.print(`${chalk2.green(`Enabled backups for dataset ${datasetName}.
|
644
918
|
Please note that it may take up to 24 hours before the first backup is created.
|
645
|
-
`)}`), output.print(`${
|
919
|
+
`)}`), output.print(`${chalk2.bold(`Retention policies may apply depending on your plan and agreement.
|
646
920
|
`)}`);
|
647
921
|
} catch (error) {
|
648
922
|
const {
|
649
923
|
message
|
650
924
|
} = parseApiErr(error);
|
651
|
-
output.print(`${
|
925
|
+
output.print(`${chalk2.red(`Enabling dataset backup failed: ${message}`)}
|
652
926
|
`);
|
653
927
|
}
|
654
928
|
}
|
@@ -684,7 +958,7 @@ const listDatasetBackupCommand = {
|
|
684
958
|
action: async (args, context) => {
|
685
959
|
const {
|
686
960
|
output,
|
687
|
-
chalk
|
961
|
+
chalk: chalk2
|
688
962
|
} = context, flags = await parseCliFlags$6(args), [dataset] = args.argsWithoutOptions, {
|
689
963
|
projectId,
|
690
964
|
datasetName,
|
@@ -722,7 +996,7 @@ const listDatasetBackupCommand = {
|
|
722
996
|
const {
|
723
997
|
message
|
724
998
|
} = parseApiErr(error);
|
725
|
-
output.error(`${
|
999
|
+
output.error(`${chalk2.red(`List dataset backup failed: ${message}`)}
|
726
1000
|
`);
|
727
1001
|
}
|
728
1002
|
if (response && response.backups) {
|
@@ -818,19 +1092,19 @@ function promptForCredentials(hasWildcard, context) {
|
|
818
1092
|
const {
|
819
1093
|
prompt,
|
820
1094
|
output,
|
821
|
-
chalk
|
1095
|
+
chalk: chalk2
|
822
1096
|
} = context;
|
823
1097
|
return output.print(""), hasWildcard ? output.print(oneline__default.default`
|
824
|
-
${
|
825
|
-
We ${
|
1098
|
+
${chalk2.yellow(`${logSymbols__default.default.warning} Warning:`)}
|
1099
|
+
We ${chalk2.red(chalk2.underline("HIGHLY"))} recommend NOT allowing credentials
|
826
1100
|
on origins containing wildcards. If you are logged in to a studio, people will
|
827
|
-
be able to send requests ${
|
1101
|
+
be able to send requests ${chalk2.underline("on your behalf")} to read and modify
|
828
1102
|
data, from any matching origin. Please tread carefully!
|
829
1103
|
`) : output.print(oneline__default.default`
|
830
|
-
${
|
1104
|
+
${chalk2.yellow(`${logSymbols__default.default.warning} Warning:`)}
|
831
1105
|
Should this origin be allowed to send requests using authentication tokens or
|
832
1106
|
session cookies? Be aware that any script on this origin will be able to send
|
833
|
-
requests ${
|
1107
|
+
requests ${chalk2.underline("on your behalf")} to read and modify data if you
|
834
1108
|
are logged in to a Sanity studio. If this origin hosts a studio, you will need
|
835
1109
|
this, otherwise you should probably answer "No" (n).
|
836
1110
|
`), output.print(""), prompt.single({
|
@@ -845,13 +1119,13 @@ function promptForWildcardConfirmation(origin, context) {
|
|
845
1119
|
const {
|
846
1120
|
prompt,
|
847
1121
|
output,
|
848
|
-
chalk
|
1122
|
+
chalk: chalk2
|
849
1123
|
} = context;
|
850
|
-
return output.print(""), output.print(
|
1124
|
+
return output.print(""), output.print(chalk2.yellow(`${logSymbols__default.default.warning} Warning: Examples of allowed origins:`)), origin === "*" ? (output.print("- http://www.some-malicious.site"), output.print("- https://not.what-you-were-expecting.com"), output.print("- https://high-traffic-site.com"), output.print("- http://192.168.1.1:8080")) : (output.print(`- ${origin.replace(/:\*/, ":1234").replace(/\*/g, "foo")}`), output.print(`- ${origin.replace(/:\*/, ":3030").replace(/\*/g, "foo.bar")}`)), output.print(""), prompt.single({
|
851
1125
|
type: "confirm",
|
852
1126
|
message: oneline__default.default`
|
853
|
-
Using wildcards can be ${
|
854
|
-
Are you ${
|
1127
|
+
Using wildcards can be ${chalk2.red("risky")}.
|
1128
|
+
Are you ${chalk2.underline("absolutely sure")} you want to allow this origin?`,
|
855
1129
|
default: !1
|
856
1130
|
});
|
857
1131
|
}
|
@@ -1244,7 +1518,7 @@ async function listDatasetCopyJobs(flags, context) {
|
|
1244
1518
|
const {
|
1245
1519
|
apiClient,
|
1246
1520
|
output,
|
1247
|
-
chalk
|
1521
|
+
chalk: chalk2
|
1248
1522
|
} = context, client = apiClient(), projectId = client.config().projectId, query = {};
|
1249
1523
|
let response;
|
1250
1524
|
flags.offset && flags.offset >= 0 && (query.offset = `${flags.offset}`), flags.limit && flags.limit > 0 && (query.limit = `${flags.limit}`);
|
@@ -1255,9 +1529,9 @@ async function listDatasetCopyJobs(flags, context) {
|
|
1255
1529
|
query
|
1256
1530
|
});
|
1257
1531
|
} catch (error) {
|
1258
|
-
error.statusCode ? output.error(`${
|
1532
|
+
error.statusCode ? output.error(`${chalk2.red(`Dataset copy list failed:
|
1259
1533
|
${error.response.body.message}`)}
|
1260
|
-
`) : output.error(`${
|
1534
|
+
`) : output.error(`${chalk2.red(`Dataset copy list failed:
|
1261
1535
|
${error.message}`)}
|
1262
1536
|
`);
|
1263
1537
|
}
|
@@ -1420,7 +1694,7 @@ const progress = (url2) => new rxjs.Observable((observer) => {
|
|
1420
1694
|
apiClient,
|
1421
1695
|
output,
|
1422
1696
|
prompt,
|
1423
|
-
chalk
|
1697
|
+
chalk: chalk2
|
1424
1698
|
} = context, flags = await parseCliFlags$3(args), client = apiClient();
|
1425
1699
|
if (flags.list) {
|
1426
1700
|
await listDatasetCopyJobs(flags, context);
|
@@ -1458,13 +1732,13 @@ const progress = (url2) => new rxjs.Observable((observer) => {
|
|
1458
1732
|
skipHistory: shouldSkipHistory
|
1459
1733
|
}
|
1460
1734
|
});
|
1461
|
-
if (output.print(`Copying dataset ${
|
1735
|
+
if (output.print(`Copying dataset ${chalk2.green(sourceDatasetName)} to ${chalk2.green(targetDatasetName)}...`), shouldSkipHistory || output.print("Note: You can run this command with flag '--skip-history'. The flag will reduce copy time in larger datasets."), output.print(`Job ${chalk2.green(response.jobId)} started`), flags.detach)
|
1462
1736
|
return;
|
1463
|
-
await followProgress(response.jobId, client, output), output.print(`Job ${
|
1737
|
+
await followProgress(response.jobId, client, output), output.print(`Job ${chalk2.green(response.jobId)} completed`);
|
1464
1738
|
} catch (error) {
|
1465
|
-
error.statusCode ? output.print(`${
|
1739
|
+
error.statusCode ? output.print(`${chalk2.red(`Dataset copying failed:
|
1466
1740
|
${error.response.body.message}`)}
|
1467
|
-
`) : output.print(`${
|
1741
|
+
`) : output.print(`${chalk2.red(`Dataset copying failed:
|
1468
1742
|
${error.message}`)}
|
1469
1743
|
`);
|
1470
1744
|
}
|
@@ -1646,7 +1920,7 @@ const exportDatasetCommand = {
|
|
1646
1920
|
const {
|
1647
1921
|
apiClient,
|
1648
1922
|
output,
|
1649
|
-
chalk,
|
1923
|
+
chalk: chalk2,
|
1650
1924
|
workDir,
|
1651
1925
|
prompt
|
1652
1926
|
} = context, client = apiClient(), [targetDataset, targetDestination] = args.argsWithoutOptions, flags = parseFlags$1(args.extOptions);
|
@@ -1662,7 +1936,7 @@ const exportDatasetCommand = {
|
|
1662
1936
|
const {
|
1663
1937
|
projectId
|
1664
1938
|
} = client.config();
|
1665
|
-
output.print("\u256D\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u256E"), output.print("\u2502 \u2502"), output.print("\u2502 Exporting from: \u2502"), output.print(`\u2502 ${
|
1939
|
+
output.print("\u256D\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u256E"), output.print("\u2502 \u2502"), output.print("\u2502 Exporting from: \u2502"), output.print(`\u2502 ${chalk2.bold("projectId")}: ${chalk2.cyan(projectId).padEnd(44)} \u2502`), output.print(`\u2502 ${chalk2.bold("dataset")}: ${chalk2.cyan(dataset).padEnd(46)} \u2502`), output.print("\u2502 \u2502"), output.print("\u2570\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u256F"), output.print("");
|
1666
1940
|
let destinationPath = targetDestination;
|
1667
1941
|
destinationPath || (destinationPath = await prompt.single({
|
1668
1942
|
type: "input",
|
@@ -1675,7 +1949,7 @@ const exportDatasetCommand = {
|
|
1675
1949
|
output.print("Cancelled");
|
1676
1950
|
return;
|
1677
1951
|
}
|
1678
|
-
outputPath !== "-" && output.print(`Exporting dataset "${
|
1952
|
+
outputPath !== "-" && output.print(`Exporting dataset "${chalk2.cyan(dataset)}" to "${chalk2.cyan(outputPath)}"`);
|
1679
1953
|
let currentStep = "Exporting documents...", spinner = output.spinner(currentStep).start();
|
1680
1954
|
const onProgress = (progress2) => {
|
1681
1955
|
progress2.step !== currentStep ? (spinner.succeed(), spinner = output.spinner(progress2.step).start()) : progress2.step === currentStep && progress2.update && (spinner.text = `${progress2.step} (${progress2.current}/${progress2.total})`), currentStep = progress2.step;
|
@@ -1768,7 +2042,7 @@ const importDatasetCommand = {
|
|
1768
2042
|
const {
|
1769
2043
|
apiClient,
|
1770
2044
|
output,
|
1771
|
-
chalk,
|
2045
|
+
chalk: chalk2,
|
1772
2046
|
fromInitCommand
|
1773
2047
|
} = context, flags = parseFlags(args.extOptions), {
|
1774
2048
|
allowAssetsInDifferentDataset,
|
@@ -1779,7 +2053,7 @@ const importDatasetCommand = {
|
|
1779
2053
|
replaceAssets
|
1780
2054
|
} = flags, operation = getMutationOperation(args.extOptions), client = apiClient(), [file, target] = args.argsWithoutOptions;
|
1781
2055
|
if (!file)
|
1782
|
-
throw new Error(`Source file name and target dataset must be specified ("sanity dataset import ${
|
2056
|
+
throw new Error(`Source file name and target dataset must be specified ("sanity dataset import ${chalk2.bold("[file]")} [dataset]")`);
|
1783
2057
|
const targetDataset = await determineTargetDataset(target, context);
|
1784
2058
|
debug$2(`Target dataset has been set to "${targetDataset}"`);
|
1785
2059
|
const isUrl = /^https?:\/\//i.test(file);
|
@@ -1790,7 +2064,7 @@ const importDatasetCommand = {
|
|
1790
2064
|
const sourceFile = path__default.default.resolve(process.cwd(), file), fileStats = await fs__default$1.default.stat(sourceFile).catch(() => null);
|
1791
2065
|
if (!fileStats)
|
1792
2066
|
throw new Error(`${sourceFile} does not exist or is not readable`);
|
1793
|
-
sourceIsFolder = fileStats.isDirectory(), sourceIsFolder ? inputStream = sourceFile : (assetsBase = path__default.default.dirname(sourceFile), inputStream = await fs.createReadStream(sourceFile));
|
2067
|
+
sourceIsFolder = fileStats.isDirectory(), sourceIsFolder ? inputStream = sourceFile : (assetsBase = path__default.default.dirname(sourceFile), inputStream = await fs$1.createReadStream(sourceFile));
|
1794
2068
|
}
|
1795
2069
|
const importClient = client.clone().config({
|
1796
2070
|
dataset: targetDataset
|
@@ -1798,7 +2072,7 @@ const importDatasetCommand = {
|
|
1798
2072
|
projectId,
|
1799
2073
|
dataset
|
1800
2074
|
} = importClient.config();
|
1801
|
-
output.print("\u256D\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u256E"), output.print("\u2502 \u2502"), output.print("\u2502 Importing to: \u2502"), output.print(`\u2502 ${
|
2075
|
+
output.print("\u256D\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u256E"), output.print("\u2502 \u2502"), output.print("\u2502 Importing to: \u2502"), output.print(`\u2502 ${chalk2.bold("projectId")}: ${chalk2.cyan(projectId).padEnd(44)} \u2502`), output.print(`\u2502 ${chalk2.bold("dataset")}: ${chalk2.cyan(dataset).padEnd(46)} \u2502`), output.print("\u2502 \u2502"), output.print("\u2570\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u256F"), output.print("");
|
1802
2076
|
let currentStep, currentProgress, stepStart, spinInterval = null, percent;
|
1803
2077
|
function onProgress(opts) {
|
1804
2078
|
const lengthComputable = opts.total, sameStep = opts.step == currentStep;
|
@@ -2165,7 +2439,7 @@ Example
|
|
2165
2439
|
const {
|
2166
2440
|
apiClient,
|
2167
2441
|
output,
|
2168
|
-
chalk
|
2442
|
+
chalk: chalk2
|
2169
2443
|
} = context, {
|
2170
2444
|
dataset
|
2171
2445
|
} = args.extOptions, ids = args.argsWithoutOptions.map((str) => `${str}`);
|
@@ -2178,7 +2452,7 @@ Example
|
|
2178
2452
|
const {
|
2179
2453
|
results
|
2180
2454
|
} = await transaction.commit(), deleted = results.filter((res) => res.operation === "delete").map((res) => res.id), notFound = ids.filter((id) => !deleted.includes(id));
|
2181
|
-
deleted.length > 0 && output.print(`Deleted ${deleted.length} ${pluralize__default.default("document", deleted.length)}`), notFound.length > 0 && output.error(
|
2455
|
+
deleted.length > 0 && output.print(`Deleted ${deleted.length} ${pluralize__default.default("document", deleted.length)}`), notFound.length > 0 && output.error(chalk2.red(`${pluralize__default.default("Document", notFound.length)} not found: ${notFound.join(", ")}`));
|
2182
2456
|
} catch (err) {
|
2183
2457
|
throw new Error(`Failed to delete ${pluralize__default.default("document", ids.length)}:
|
2184
2458
|
${err.message}`);
|
@@ -2190,13 +2464,13 @@ ${err.message}`);
|
|
2190
2464
|
isGroupRoot: !0,
|
2191
2465
|
description: "Manages documents in your Sanity Content Lake datasets"
|
2192
2466
|
}, identity = (inp) => inp;
|
2193
|
-
function colorizeJson(input,
|
2467
|
+
function colorizeJson(input, chalk2) {
|
2194
2468
|
const formatters = {
|
2195
|
-
punctuator:
|
2196
|
-
key:
|
2197
|
-
string:
|
2198
|
-
number:
|
2199
|
-
literal:
|
2469
|
+
punctuator: chalk2.white,
|
2470
|
+
key: chalk2.white,
|
2471
|
+
string: chalk2.green,
|
2472
|
+
number: chalk2.yellow,
|
2473
|
+
literal: chalk2.bold,
|
2200
2474
|
whitespace: identity
|
2201
2475
|
}, json = JSON.stringify(input, null, 2);
|
2202
2476
|
return tokenize__default.default(json).map((token, i, arr) => {
|
@@ -2230,7 +2504,7 @@ Examples
|
|
2230
2504
|
const {
|
2231
2505
|
apiClient,
|
2232
2506
|
output,
|
2233
|
-
chalk
|
2507
|
+
chalk: chalk2
|
2234
2508
|
} = context, {
|
2235
2509
|
pretty,
|
2236
2510
|
dataset
|
@@ -2244,7 +2518,7 @@ Examples
|
|
2244
2518
|
const doc = await client.getDocument(docId);
|
2245
2519
|
if (!doc)
|
2246
2520
|
throw new Error(`Document ${docId} not found`);
|
2247
|
-
output.print(pretty ? colorizeJson(doc,
|
2521
|
+
output.print(pretty ? colorizeJson(doc, chalk2) : JSON.stringify(doc, null, 2));
|
2248
2522
|
} catch (err) {
|
2249
2523
|
throw new Error(`Failed to fetch document:
|
2250
2524
|
${err.message}`);
|
@@ -2290,12 +2564,12 @@ var queryDocumentsCommand = {
|
|
2290
2564
|
} = await parseCliFlags$1(args), {
|
2291
2565
|
apiClient,
|
2292
2566
|
output,
|
2293
|
-
chalk,
|
2567
|
+
chalk: chalk2,
|
2294
2568
|
cliConfig
|
2295
2569
|
} = context, [query] = args.argsWithoutOptions;
|
2296
2570
|
if (!query)
|
2297
2571
|
throw new Error("Query must be specified");
|
2298
|
-
apiVersion || output.warn(
|
2572
|
+
apiVersion || output.warn(chalk2.yellow(`--api-version not specified, using \`${defaultApiVersion}\``));
|
2299
2573
|
const requireDataset = !dataset, requireProject = !project, requireUser = !anonymous;
|
2300
2574
|
if (requireProject && !cliConfig?.api?.projectId)
|
2301
2575
|
throw new Error("No project configured in CLI config - either configure one, or use `--project` flag");
|
@@ -2316,7 +2590,7 @@ var queryDocumentsCommand = {
|
|
2316
2590
|
const docs = await client.fetch(query);
|
2317
2591
|
if (!docs)
|
2318
2592
|
throw new Error("Query returned no results");
|
2319
|
-
output.print(pretty ? colorizeJson(docs,
|
2593
|
+
output.print(pretty ? colorizeJson(docs, chalk2) : JSON.stringify(docs, null, 2));
|
2320
2594
|
} catch (err) {
|
2321
2595
|
throw new Error(`Failed to run query:
|
2322
2596
|
${err.message}`);
|
@@ -2680,19 +2954,19 @@ function printMessage(message, context, options) {
|
|
2680
2954
|
detailed
|
2681
2955
|
} = options, {
|
2682
2956
|
output,
|
2683
|
-
chalk
|
2957
|
+
chalk: chalk2
|
2684
2958
|
} = context;
|
2685
2959
|
output.print(`Date: ${message.createdAt}`), output.print(`Status: ${message.status}`), output.print(`Result code: ${message.resultCode}`), message.failureCount > 0 && output.print(`Failures: ${message.failureCount}`), detailed && (output.print("Payload:"), output.print(node_util.inspect(JSON.parse(message.payload), {
|
2686
2960
|
colors: !0
|
2687
2961
|
}))), detailed && message.attempts && (output.print("Attempts:"), message.attempts.forEach((attempt) => {
|
2688
2962
|
const prefix = ` [${attempt.createdAt.replace(/\.\d+Z$/, "Z")}]`;
|
2689
2963
|
if (attempt.inProgress)
|
2690
|
-
output.print(`${prefix} ${
|
2964
|
+
output.print(`${prefix} ${chalk2.yellow("Pending")}`);
|
2691
2965
|
else if (attempt.isFailure) {
|
2692
2966
|
const failure = formatFailure(attempt, {
|
2693
2967
|
includeHelp: !0
|
2694
2968
|
});
|
2695
|
-
output.print(`${prefix} ${
|
2969
|
+
output.print(`${prefix} ${chalk2.yellow(`Failure: ${failure}`)}`);
|
2696
2970
|
} else
|
2697
2971
|
output.print(`${prefix} Success: HTTP ${attempt.resultCode} (${attempt.duration}ms)`);
|
2698
2972
|
})), output.print("");
|
@@ -2743,10 +3017,10 @@ Examples
|
|
2743
3017
|
helpText: helpText$c,
|
2744
3018
|
action: async (args, context) => {
|
2745
3019
|
const {
|
2746
|
-
extractManifestSafe
|
3020
|
+
extractManifestSafe: extractManifestSafe2
|
2747
3021
|
} = await Promise.resolve().then(function() {
|
2748
|
-
return
|
2749
|
-
}), extractError = await
|
3022
|
+
return extractManifestAction;
|
3023
|
+
}), extractError = await extractManifestSafe2(args, context);
|
2750
3024
|
if (extractError)
|
2751
3025
|
throw extractError;
|
2752
3026
|
return extractError;
|
@@ -2949,7 +3223,7 @@ Examples:
|
|
2949
3223
|
output,
|
2950
3224
|
prompt,
|
2951
3225
|
workDir,
|
2952
|
-
chalk
|
3226
|
+
chalk: chalk2
|
2953
3227
|
} = context;
|
2954
3228
|
let [title] = args.argsWithoutOptions;
|
2955
3229
|
for (; !title?.trim(); )
|
@@ -2957,7 +3231,7 @@ Examples:
|
|
2957
3231
|
type: "input",
|
2958
3232
|
suffix: ' (e.g. "Rename field from location to address")',
|
2959
3233
|
message: "Title of migration"
|
2960
|
-
}), title.trim() || output.error(
|
3234
|
+
}), title.trim() || output.error(chalk2.red("Name cannot be empty"));
|
2961
3235
|
const types2 = await prompt.single({
|
2962
3236
|
type: "input",
|
2963
3237
|
suffix: " (optional)",
|
@@ -2970,22 +3244,22 @@ Examples:
|
|
2970
3244
|
value: definedTemplate.name
|
2971
3245
|
}))
|
2972
3246
|
}), sluggedName = deburr__default.default(title.toLowerCase()).replace(/\s+/g, "-").replace(/[^a-z0-9-]/g, ""), destDir = path__default.default.join(workDir, MIGRATIONS_DIRECTORY, sluggedName);
|
2973
|
-
if (fs.existsSync(destDir) && !await prompt.single({
|
3247
|
+
if (fs$1.existsSync(destDir) && !await prompt.single({
|
2974
3248
|
type: "confirm",
|
2975
|
-
message: `Migration directory ${
|
3249
|
+
message: `Migration directory ${chalk2.cyan(destDir)} already exists. Overwrite?`,
|
2976
3250
|
default: !1
|
2977
3251
|
}))
|
2978
3252
|
return;
|
2979
|
-
fs.mkdirSync(destDir, {
|
3253
|
+
fs$1.mkdirSync(destDir, {
|
2980
3254
|
recursive: !0
|
2981
3255
|
});
|
2982
3256
|
const renderedTemplate = (templatesByName[template].template || minimalSimple)({
|
2983
3257
|
migrationName: title,
|
2984
3258
|
documentTypes: types2.split(",").map((t) => t.trim()).filter(Boolean)
|
2985
3259
|
}), definitionFile = path__default.default.join(destDir, "index.ts");
|
2986
|
-
await fs
|
2987
|
-
\`${
|
2988
|
-
\`${
|
3260
|
+
await fs.writeFile(definitionFile, renderedTemplate), output.print(), output.print(`${chalk2.green("\u2713")} Migration created!`), output.print(), output.print("Next steps:"), output.print(`Open ${chalk2.bold(definitionFile)} in your code editor and write the code for your migration.`), output.print(`Dry run the migration with:
|
3261
|
+
\`${chalk2.bold(`sanity migration run ${sluggedName} --project=<projectId> --dataset <dataset> `)}\``), output.print(`Run the migration against a dataset with:
|
3262
|
+
\`${chalk2.bold(`sanity migration run ${sluggedName} --project=<projectId> --dataset <dataset> --no-dry-run`)}\``), output.print(), output.print(`\u{1F449} Learn more about schema and content migrations at ${chalk2.bold("https://www.sanity.io/docs/schema-and-content-migrations")}`);
|
2989
3263
|
}
|
2990
3264
|
};
|
2991
3265
|
function resolveMigrationScript(workDir, migrationName) {
|
@@ -3021,13 +3295,13 @@ const helpText$a = "", listMigrationCommand = {
|
|
3021
3295
|
const {
|
3022
3296
|
workDir,
|
3023
3297
|
output,
|
3024
|
-
chalk
|
3298
|
+
chalk: chalk2
|
3025
3299
|
} = context;
|
3026
3300
|
try {
|
3027
3301
|
const migrations = await resolveMigrations(workDir);
|
3028
3302
|
if (migrations.length === 0) {
|
3029
3303
|
output.print("No migrations found in migrations folder of the project"), output.print(`
|
3030
|
-
Run ${
|
3304
|
+
Run ${chalk2.green("`sanity migration create <NAME>`")} to create a new migration`);
|
3031
3305
|
return;
|
3032
3306
|
}
|
3033
3307
|
const table = new consoleTablePrinter.Table({
|
@@ -3051,7 +3325,7 @@ Run ${chalk.green("`sanity migration create <NAME>`")} to create a new migration
|
|
3051
3325
|
} catch (error) {
|
3052
3326
|
if (error.code === "ENOENT") {
|
3053
3327
|
output.print("No migrations folder found in the project"), output.print(`
|
3054
|
-
Run ${
|
3328
|
+
Run ${chalk2.green("`sanity migration create <NAME>`")} to create a new migration`);
|
3055
3329
|
return;
|
3056
3330
|
}
|
3057
3331
|
throw new Error(`An error occurred while listing migrations: ${error.message}`);
|
@@ -3066,7 +3340,7 @@ async function resolveMigrations(workDir) {
|
|
3066
3340
|
"dynamic-import": !0
|
3067
3341
|
}
|
3068
3342
|
}).unregister;
|
3069
|
-
const migrationsDir = path__default.default.join(workDir, MIGRATIONS_DIRECTORY), migrationEntries = await fs
|
3343
|
+
const migrationsDir = path__default.default.join(workDir, MIGRATIONS_DIRECTORY), migrationEntries = await fs.readdir(migrationsDir, {
|
3070
3344
|
withFileTypes: !0
|
3071
3345
|
}), migrations = [];
|
3072
3346
|
for (const entry of migrationEntries) {
|
@@ -3152,20 +3426,20 @@ function convertToTree(nodes) {
|
|
3152
3426
|
}
|
3153
3427
|
const isTty = node_tty.isatty(1);
|
3154
3428
|
function prettyFormat({
|
3155
|
-
chalk,
|
3429
|
+
chalk: chalk2,
|
3156
3430
|
subject,
|
3157
3431
|
migration,
|
3158
3432
|
indentSize = 0
|
3159
3433
|
}) {
|
3160
|
-
return (Array.isArray(subject) ? subject : [subject]).map((subjectEntry) => subjectEntry.type === "transaction" ? [[badge("transaction", "info",
|
3161
|
-
chalk,
|
3434
|
+
return (Array.isArray(subject) ? subject : [subject]).map((subjectEntry) => subjectEntry.type === "transaction" ? [[badge("transaction", "info", chalk2), typeof subjectEntry.id > "u" ? null : chalk2.underline(subjectEntry.id)].filter(Boolean).join(" "), indent(prettyFormat({
|
3435
|
+
chalk: chalk2,
|
3162
3436
|
subject: subjectEntry.mutations,
|
3163
3437
|
migration,
|
3164
3438
|
indentSize
|
3165
3439
|
}))].join(`
|
3166
3440
|
|
3167
3441
|
`) : prettyFormatMutation({
|
3168
|
-
chalk,
|
3442
|
+
chalk: chalk2,
|
3169
3443
|
subject: subjectEntry,
|
3170
3444
|
migration,
|
3171
3445
|
indentSize
|
@@ -3176,16 +3450,16 @@ function prettyFormat({
|
|
3176
3450
|
function encodeItemRef(ref) {
|
3177
3451
|
return typeof ref == "number" ? ref : ref._key;
|
3178
3452
|
}
|
3179
|
-
function badgeStyle(
|
3453
|
+
function badgeStyle(chalk2, variant) {
|
3180
3454
|
return {
|
3181
|
-
info:
|
3182
|
-
incremental:
|
3183
|
-
maybeDestructive:
|
3184
|
-
destructive:
|
3455
|
+
info: chalk2.bgWhite.black,
|
3456
|
+
incremental: chalk2.bgGreen.black.bold,
|
3457
|
+
maybeDestructive: chalk2.bgYellow.black.bold,
|
3458
|
+
destructive: chalk2.bgRed.black.bold
|
3185
3459
|
}[variant];
|
3186
3460
|
}
|
3187
|
-
function badge(label, variant,
|
3188
|
-
return isTty ? badgeStyle(
|
3461
|
+
function badge(label, variant, chalk2) {
|
3462
|
+
return isTty ? badgeStyle(chalk2, variant)(` ${label} `) : `[${label}]`;
|
3189
3463
|
}
|
3190
3464
|
const mutationImpact = {
|
3191
3465
|
create: "incremental",
|
@@ -3203,17 +3477,17 @@ function documentId(mutation) {
|
|
3203
3477
|
const listFormatter = new Intl.ListFormat("en-US", {
|
3204
3478
|
type: "disjunction"
|
3205
3479
|
});
|
3206
|
-
function mutationHeader(
|
3207
|
-
const mutationType = badge(mutation.type, mutationImpact[mutation.type],
|
3208
|
-
return [mutationType, documentType,
|
3480
|
+
function mutationHeader(chalk2, mutation, migration) {
|
3481
|
+
const mutationType = badge(mutation.type, mutationImpact[mutation.type], chalk2), documentType = "document" in mutation || migration.documentTypes ? badge("document" in mutation ? mutation.document._type : listFormatter.format(migration.documentTypes ?? []), "info", chalk2) : null;
|
3482
|
+
return [mutationType, documentType, chalk2.underline(documentId(mutation))].filter(Boolean).join(" ");
|
3209
3483
|
}
|
3210
3484
|
function prettyFormatMutation({
|
3211
|
-
chalk,
|
3485
|
+
chalk: chalk2,
|
3212
3486
|
subject,
|
3213
3487
|
migration,
|
3214
3488
|
indentSize = 0
|
3215
3489
|
}) {
|
3216
|
-
const lock = "options" in subject ?
|
3490
|
+
const lock = "options" in subject ? chalk2.cyan(`(if revision==${subject.options?.ifRevision})`) : "", header = [mutationHeader(chalk2, subject, migration), lock].join(" "), padding = " ".repeat(indentSize);
|
3217
3491
|
if (subject.type === "create" || subject.type === "createIfNotExists" || subject.type === "createOrReplace")
|
3218
3492
|
return [header, `
|
3219
3493
|
`, indent(JSON.stringify(subject.document, null, 2), indentSize)].join("");
|
@@ -3224,31 +3498,31 @@ function prettyFormatMutation({
|
|
3224
3498
|
node: tree.children,
|
3225
3499
|
paddingLength,
|
3226
3500
|
indent: padding,
|
3227
|
-
getMessage: (patch) => formatPatchMutation(
|
3501
|
+
getMessage: (patch) => formatPatchMutation(chalk2, patch)
|
3228
3502
|
})].join("");
|
3229
3503
|
}
|
3230
3504
|
return header;
|
3231
3505
|
}
|
3232
|
-
function formatPatchMutation(
|
3506
|
+
function formatPatchMutation(chalk2, patch) {
|
3233
3507
|
const {
|
3234
3508
|
op
|
3235
|
-
} = patch, formattedType =
|
3509
|
+
} = patch, formattedType = chalk2.bold(op.type);
|
3236
3510
|
if (op.type === "unset")
|
3237
|
-
return `${
|
3511
|
+
return `${chalk2.red(formattedType)}()`;
|
3238
3512
|
if (op.type === "diffMatchPatch")
|
3239
|
-
return `${
|
3513
|
+
return `${chalk2.yellow(formattedType)}(${op.value})`;
|
3240
3514
|
if (op.type === "inc" || op.type === "dec")
|
3241
|
-
return `${
|
3515
|
+
return `${chalk2.yellow(formattedType)}(${op.amount})`;
|
3242
3516
|
if (op.type === "set")
|
3243
|
-
return `${
|
3517
|
+
return `${chalk2.yellow(formattedType)}(${JSON.stringify(op.value)})`;
|
3244
3518
|
if (op.type === "setIfMissing")
|
3245
|
-
return `${
|
3519
|
+
return `${chalk2.green(formattedType)}(${JSON.stringify(op.value)})`;
|
3246
3520
|
if (op.type === "insert")
|
3247
|
-
return `${
|
3521
|
+
return `${chalk2.green(formattedType)}(${op.position}, ${encodeItemRef(op.referenceItem)}, ${JSON.stringify(op.items)})`;
|
3248
3522
|
if (op.type === "replace")
|
3249
|
-
return `${
|
3523
|
+
return `${chalk2.yellow(formattedType)}(${encodeItemRef(op.referenceItem)}, ${JSON.stringify(op.items)})`;
|
3250
3524
|
if (op.type === "truncate")
|
3251
|
-
return `${
|
3525
|
+
return `${chalk2.red(formattedType)}(${op.startIndex}, ${op.endIndex})`;
|
3252
3526
|
throw new Error(`Invalid operation type: ${op.type}`);
|
3253
3527
|
}
|
3254
3528
|
function indent(subject, size2 = 2) {
|
@@ -3311,13 +3585,13 @@ const runMigrationCommand = {
|
|
3311
3585
|
apiClient,
|
3312
3586
|
output,
|
3313
3587
|
prompt,
|
3314
|
-
chalk,
|
3588
|
+
chalk: chalk2,
|
3315
3589
|
workDir
|
3316
3590
|
} = context, [id] = args.argsWithoutOptions, migrationsDirectoryPath = path__default.default.join(workDir, MIGRATIONS_DIRECTORY), flags = await parseCliFlags(args), fromExport = flags.fromExport, dry = flags.dryRun, dataset = flags.dataset, project = flags.project;
|
3317
3591
|
if (dataset && !project || project && !dataset)
|
3318
3592
|
throw new Error("If either --dataset or --project is provided, both must be provided");
|
3319
3593
|
if (!id) {
|
3320
|
-
output.error(
|
3594
|
+
output.error(chalk2.red("Error: Migration ID must be provided"));
|
3321
3595
|
const migrations = await resolveMigrations(workDir), table = new consoleTablePrinter.Table({
|
3322
3596
|
title: "Migrations found in project",
|
3323
3597
|
columns: [{
|
@@ -3346,12 +3620,12 @@ const runMigrationCommand = {
|
|
3346
3620
|
});
|
3347
3621
|
const candidates = resolveMigrationScript(workDir, id), resolvedScripts = candidates.filter(isLoadableMigrationScript);
|
3348
3622
|
if (resolvedScripts.length > 1)
|
3349
|
-
throw new Error(`Found multiple migrations for "${id}" in ${
|
3623
|
+
throw new Error(`Found multiple migrations for "${id}" in ${chalk2.cyan(migrationsDirectoryPath)}:
|
3350
3624
|
- ${candidates.map((candidate) => path__default.default.relative(migrationsDirectoryPath, candidate.absolutePath)).join(`
|
3351
3625
|
- `)}`);
|
3352
3626
|
const script = resolvedScripts[0];
|
3353
3627
|
if (!script)
|
3354
|
-
throw new Error(`No migration found for "${id}" in ${
|
3628
|
+
throw new Error(`No migration found for "${id}" in ${chalk2.cyan(chalk2.cyan(migrationsDirectoryPath))}. Make sure that the migration file exists and exports a valid migration as its default export.
|
3355
3629
|
|
3356
3630
|
Tried the following files:
|
3357
3631
|
- ${candidates.map((candidate) => path__default.default.relative(migrationsDirectoryPath, candidate.absolutePath)).join(`
|
@@ -3387,9 +3661,9 @@ const runMigrationCommand = {
|
|
3387
3661
|
return;
|
3388
3662
|
}
|
3389
3663
|
if (output.print(`
|
3390
|
-
${
|
3664
|
+
${chalk2.yellow(chalk2.bold("Note: During migrations, your webhooks stay active."))}`), output.print(`To adjust them, launch the management interface with ${chalk2.cyan("sanity manage")}, navigate to the API settings, and toggle the webhooks before and after the migration as needed.
|
3391
3665
|
`), flags.confirm && !await prompt.single({
|
3392
|
-
message: `This migration will run on the ${
|
3666
|
+
message: `This migration will run on the ${chalk2.yellow(chalk2.bold(apiConfig.dataset))} dataset in ${chalk2.yellow(chalk2.bold(apiConfig.projectId))} project. Are you sure?`,
|
3393
3667
|
type: "confirm"
|
3394
3668
|
})) {
|
3395
3669
|
debug$2("User aborted migration");
|
@@ -3410,30 +3684,30 @@ ${chalk.yellow(chalk.bold("Note: During migrations, your webhooks stay active.")
|
|
3410
3684
|
if (progress2.done) {
|
3411
3685
|
progressSpinner.text = `Migration "${id}" completed.
|
3412
3686
|
|
3413
|
-
Project id: ${
|
3414
|
-
Dataset: ${
|
3687
|
+
Project id: ${chalk2.bold(apiConfig.projectId)}
|
3688
|
+
Dataset: ${chalk2.bold(apiConfig.dataset)}
|
3415
3689
|
|
3416
3690
|
${progress2.documents} documents processed.
|
3417
3691
|
${progress2.mutations} mutations generated.
|
3418
|
-
${
|
3419
|
-
symbol:
|
3692
|
+
${chalk2.green(progress2.completedTransactions.length)} transactions committed.`, progressSpinner.stopAndPersist({
|
3693
|
+
symbol: chalk2.green("\u2714")
|
3420
3694
|
});
|
3421
3695
|
return;
|
3422
3696
|
}
|
3423
3697
|
[null, ...progress2.currentTransactions].forEach((transaction) => {
|
3424
3698
|
progressSpinner.text = `Running migration "${id}" ${dry ? "in dry mode..." : "..."}
|
3425
3699
|
|
3426
|
-
Project id: ${
|
3427
|
-
Dataset: ${
|
3428
|
-
Document type: ${
|
3700
|
+
Project id: ${chalk2.bold(apiConfig.projectId)}
|
3701
|
+
Dataset: ${chalk2.bold(apiConfig.dataset)}
|
3702
|
+
Document type: ${chalk2.bold(migration.documentTypes?.join(","))}
|
3429
3703
|
|
3430
3704
|
${progress2.documents} documents processed\u2026
|
3431
3705
|
${progress2.mutations} mutations generated\u2026
|
3432
|
-
${
|
3433
|
-
${
|
3706
|
+
${chalk2.blue(progress2.pending)} requests pending\u2026
|
3707
|
+
${chalk2.green(progress2.completedTransactions.length)} transactions committed.
|
3434
3708
|
|
3435
3709
|
${transaction && !progress2.done ? `\xBB ${prettyFormat({
|
3436
|
-
chalk,
|
3710
|
+
chalk: chalk2,
|
3437
3711
|
subject: transaction,
|
3438
3712
|
migration,
|
3439
3713
|
indentSize: 2
|
@@ -3442,13 +3716,13 @@ ${chalk.yellow(chalk.bold("Note: During migrations, your webhooks stay active.")
|
|
3442
3716
|
};
|
3443
3717
|
}
|
3444
3718
|
async function dryRunHandler() {
|
3445
|
-
output.print(`Running migration "${id}" in dry mode`), fromExport && output.print(`Using export ${
|
3719
|
+
output.print(`Running migration "${id}" in dry mode`), fromExport && output.print(`Using export ${chalk2.cyan(fromExport)}`), output.print(), output.print(`Project id: ${chalk2.bold(apiConfig.projectId)}`), output.print(`Dataset: ${chalk2.bold(apiConfig.dataset)}`);
|
3446
3720
|
for await (const mutation of migrate.dryRun({
|
3447
3721
|
api: apiConfig,
|
3448
3722
|
exportPath: fromExport
|
3449
3723
|
}, migration))
|
3450
3724
|
mutation && (output.print(), output.print(prettyFormat({
|
3451
|
-
chalk,
|
3725
|
+
chalk: chalk2,
|
3452
3726
|
subject: mutation,
|
3453
3727
|
migration
|
3454
3728
|
})));
|
@@ -3484,7 +3758,7 @@ const description$4 = "Delete schemas by their IDs.", helpText$7 = `
|
|
3484
3758
|
Options
|
3485
3759
|
--ids <schema_id_1,schema_id_2,...> comma-separated list of schema IDs to delete
|
3486
3760
|
--dataset <dataset_name> delete schemas from a specific dataset
|
3487
|
-
--
|
3761
|
+
--manifest-dir <directory> directory containing your manifest file if it's not in the default location
|
3488
3762
|
|
3489
3763
|
Examples
|
3490
3764
|
# Delete single schema
|
@@ -3535,7 +3809,7 @@ const description$2 = "Lists all schemas in the current dataset.", helpText$5 =
|
|
3535
3809
|
Options
|
3536
3810
|
--json get schemas as json
|
3537
3811
|
--id <schema_id> fetch a specific schema by its ID
|
3538
|
-
--
|
3812
|
+
--manifest-dir <directory> directory containing your manifest file if it's not in the default location
|
3539
3813
|
|
3540
3814
|
Examples
|
3541
3815
|
# Get full json schemas
|
@@ -3550,18 +3824,17 @@ Examples
|
|
3550
3824
|
description: description$2,
|
3551
3825
|
helpText: helpText$5,
|
3552
3826
|
action: async (args, context) => (await Promise.resolve().then(function() {
|
3553
|
-
return
|
3554
|
-
}).then(function(n) {
|
3555
|
-
return n.schemaListAction;
|
3827
|
+
return schemaListAction$1;
|
3556
3828
|
})).default(args, context)
|
3557
|
-
}, description$1 = "Store schemas into
|
3829
|
+
}, description$1 = "Store schemas into workspace datasets.", helpText$4 = `
|
3558
3830
|
**Note**: This command is experimental and subject to change.
|
3559
3831
|
|
3560
3832
|
Options:
|
3561
|
-
--workspace
|
3562
|
-
--
|
3563
|
-
--id-prefix
|
3564
|
-
--
|
3833
|
+
--workspace <workspace_name> store schema for a specific workspace
|
3834
|
+
--manifest-dir <directory> directory containing your manifest file if it's not in the default location
|
3835
|
+
--id-prefix <prefix> add a prefix to the schema ID
|
3836
|
+
--schema-required fail if schema file is not found
|
3837
|
+
--verbose print detailed information during store
|
3565
3838
|
|
3566
3839
|
Examples
|
3567
3840
|
# if no options are provided all workspace schemas will be stored
|
@@ -3576,9 +3849,7 @@ Examples
|
|
3576
3849
|
helpText: helpText$4,
|
3577
3850
|
action: async (args, context) => {
|
3578
3851
|
const mod = await Promise.resolve().then(function() {
|
3579
|
-
return
|
3580
|
-
}).then(function(n) {
|
3581
|
-
return n.storeSchemasAction$1;
|
3852
|
+
return storeSchemasAction$1;
|
3582
3853
|
}), extendedArgs = {
|
3583
3854
|
...args,
|
3584
3855
|
extOptions: {
|
@@ -3631,9 +3902,9 @@ Examples
|
|
3631
3902
|
action: async (args, context) => {
|
3632
3903
|
const {
|
3633
3904
|
output,
|
3634
|
-
chalk,
|
3905
|
+
chalk: chalk2,
|
3635
3906
|
prompt
|
3636
|
-
} = context, previewAction = await getPreviewAction(), warn = (msg) => output.warn(
|
3907
|
+
} = context, previewAction = await getPreviewAction(), warn = (msg) => output.warn(chalk2.yellow.bgBlack(msg)), error = (msg) => output.warn(chalk2.red.bgBlack(msg));
|
3637
3908
|
warn("\u256D\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u256E"), warn("\u2502 \u2502"), warn("\u2502 You're running Sanity Studio v3. In this version the \u2502"), warn("\u2502 [start] command is used to preview static builds. |"), warn("\u2502 \u2502"), warn("\u2502 To run a development server, use the [npm run dev] or |"), warn("\u2502 [npx sanity dev] command instead. For more information, \u2502"), warn("\u2502 see https://www.sanity.io/help/studio-v2-vs-v3 \u2502"), warn("\u2502 \u2502"), warn("\u2570\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u256F"), warn("");
|
3638
3909
|
try {
|
3639
3910
|
await previewAction(args, context);
|
@@ -3751,7 +4022,7 @@ Examples
|
|
3751
4022
|
const {
|
3752
4023
|
apiClient,
|
3753
4024
|
output,
|
3754
|
-
chalk
|
4025
|
+
chalk: chalk2
|
3755
4026
|
} = context, {
|
3756
4027
|
sort,
|
3757
4028
|
order,
|
@@ -3791,9 +4062,9 @@ Examples
|
|
3791
4062
|
date
|
3792
4063
|
}) => [id, name, role, date]), [sortFields.indexOf(sort)]), rows = order === "asc" ? ordered : ordered.reverse(), maxWidths = rows.reduce((max, row) => row.map((current, index) => Math.max(size__default.default(current), max[index])), sortFields.map((str) => size__default.default(str))), printRow = (row) => {
|
3793
4064
|
const isInvite = row[0] === "<pending>", textRow = row.map((col, i) => `${col}`.padEnd(maxWidths[i])).join(" ");
|
3794
|
-
return isInvite ?
|
4065
|
+
return isInvite ? chalk2.dim(textRow) : textRow;
|
3795
4066
|
};
|
3796
|
-
output.print(
|
4067
|
+
output.print(chalk2.cyan(printRow(sortFields))), rows.forEach((row) => output.print(printRow(row)));
|
3797
4068
|
}
|
3798
4069
|
};
|
3799
4070
|
function getUserProps(user) {
|
@@ -3819,14 +4090,21 @@ const usersGroup = {
|
|
3819
4090
|
signature: "[COMMAND]",
|
3820
4091
|
isGroupRoot: !0,
|
3821
4092
|
description: "Manages users of your Sanity project"
|
3822
|
-
},
|
4093
|
+
}, baseCommands = [appGroup, appDeployCommand, appDevCommand, appBuildCommand, appStartCommand, buildCommand, datasetGroup, deployCommand, undeployCommand, listDatasetsCommand, createDatasetCommand, datasetVisibilityCommand, exportDatasetCommand, importDatasetCommand, deleteDatasetCommand, copyDatasetCommand, aliasCommand, datasetBackupGroup, listDatasetBackupCommand, downloadBackupCommand, disableDatasetBackupCommand, enableDatasetBackupCommand, corsGroup, listCorsOriginsCommand, addCorsOriginCommand, deleteCorsOriginCommand, usersGroup, inviteUserCommand, listUsersCommand, hookGroup, listHooksCommand, createHookCommand, migrationGroup, createMigrationCommand, runMigrationCommand, listMigrationCommand, deleteHookCommand, listHookLogsCommand, printHookAttemptCommand, documentsGroup, getDocumentsCommand, queryDocumentsCommand, deleteDocumentsCommand, createDocumentsCommand, validateDocumentsCommand$1, graphqlGroup, listGraphQLAPIsCommand, deployGraphQLAPICommand, deleteGraphQLAPICommand, devCommand, startCommand, schemaGroup, validateDocumentsCommand, extractSchemaCommand, previewCommand, execCommand, manifestGroup, extractManifestCommand], internalSchemaCommands = [fetchSchemaCommand, storeSchemaCommand, deleteSchemaCommand], commands = [...baseCommands, ...SCHEMA_STORE_ENABLED ? internalSchemaCommands : []], cliProjectCommands = {
|
3823
4094
|
requiredCliVersionRange: "^3.0.0",
|
3824
4095
|
commands
|
3825
4096
|
};
|
4097
|
+
exports.SCHEMA_STORE_ENABLED = SCHEMA_STORE_ENABLED;
|
3826
4098
|
exports.cliProjectCommands = cliProjectCommands;
|
3827
4099
|
exports.convertToTree = convertToTree;
|
3828
4100
|
exports.debug = debug$2;
|
4101
|
+
exports.extractManifestSafe = extractManifestSafe;
|
3829
4102
|
exports.formatTree = formatTree;
|
3830
4103
|
exports.getClientUrl = getClientUrl;
|
4104
|
+
exports.getManifestPath = getManifestPath;
|
4105
|
+
exports.getTimer = getTimer;
|
3831
4106
|
exports.maxKeyLength = maxKeyLength;
|
4107
|
+
exports.readManifest = readManifest;
|
4108
|
+
exports.storeSchemasAction = storeSchemasAction;
|
4109
|
+
exports.throwIfProjectIdMismatch = throwIfProjectIdMismatch;
|
3832
4110
|
//# sourceMappingURL=_internal.js.map
|