@sanity/cli 6.0.0-alpha.17 → 6.0.0-alpha.18
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +833 -912
- package/dist/SanityHelp.js +74 -21
- package/dist/SanityHelp.js.map +1 -1
- package/dist/actions/build/buildApp.js +42 -15
- package/dist/actions/build/buildApp.js.map +1 -1
- package/dist/actions/build/buildStudio.js +21 -9
- package/dist/actions/build/buildStudio.js.map +1 -1
- package/dist/actions/build/buildVendorDependencies.js +3 -16
- package/dist/actions/build/buildVendorDependencies.js.map +1 -1
- package/dist/actions/build/checkStudioDependencyVersions.js +7 -7
- package/dist/actions/build/checkStudioDependencyVersions.js.map +1 -1
- package/dist/actions/build/createExternalFromImportMap.js +1 -1
- package/dist/actions/build/createExternalFromImportMap.js.map +1 -1
- package/dist/actions/build/determineBasePath.js +5 -2
- package/dist/actions/build/determineBasePath.js.map +1 -1
- package/dist/actions/build/handlePrereleaseVersions.js +44 -0
- package/dist/actions/build/handlePrereleaseVersions.js.map +1 -0
- package/dist/actions/build/types.js.map +1 -1
- package/dist/actions/dataset/create.js +7 -1
- package/dist/actions/dataset/create.js.map +1 -1
- package/dist/actions/dataset/resolveDataset.js +26 -0
- package/dist/actions/dataset/resolveDataset.js.map +1 -0
- package/dist/actions/deploy/deployApp.js +1 -8
- package/dist/actions/deploy/deployApp.js.map +1 -1
- package/dist/actions/deploy/deployStudio.js +1 -0
- package/dist/actions/deploy/deployStudio.js.map +1 -1
- package/dist/actions/dev/getDevServerConfig.js +5 -2
- package/dist/actions/dev/getDevServerConfig.js.map +1 -1
- package/dist/actions/dev/startStudioDevServer.js +8 -3
- package/dist/actions/dev/startStudioDevServer.js.map +1 -1
- package/dist/actions/documents/types.js.map +1 -1
- package/dist/actions/documents/validate.js +11 -2
- package/dist/actions/documents/validate.js.map +1 -1
- package/dist/actions/documents/validateDocuments.worker.js +2 -2
- package/dist/actions/documents/validateDocuments.worker.js.map +1 -1
- package/dist/actions/documents/validation/reporters/jsonReporter.js +1 -1
- package/dist/actions/documents/validation/reporters/jsonReporter.js.map +1 -1
- package/dist/actions/documents/validation/reporters/ndjsonReporter.js +1 -1
- package/dist/actions/documents/validation/reporters/ndjsonReporter.js.map +1 -1
- package/dist/actions/graphql/SchemaError.js +1 -1
- package/dist/actions/graphql/SchemaError.js.map +1 -1
- package/dist/actions/graphql/__tests__/fixtures/many-self-refs.js +540 -0
- package/dist/actions/graphql/__tests__/fixtures/many-self-refs.js.map +1 -0
- package/dist/actions/graphql/__tests__/fixtures/test-studio.js +1143 -0
- package/dist/actions/graphql/__tests__/fixtures/test-studio.js.map +1 -0
- package/dist/actions/graphql/__tests__/fixtures/union-refs.js +591 -0
- package/dist/actions/graphql/__tests__/fixtures/union-refs.js.map +1 -0
- package/dist/actions/graphql/__tests__/helpers.js +23 -0
- package/dist/actions/graphql/__tests__/helpers.js.map +1 -0
- package/dist/actions/graphql/extractFromSanitySchema.js +2 -1
- package/dist/actions/graphql/extractFromSanitySchema.js.map +1 -1
- package/dist/actions/graphql/gen1/generateTypeFilters.js +1 -1
- package/dist/actions/graphql/gen1/generateTypeFilters.js.map +1 -1
- package/dist/actions/graphql/gen1/generateTypeQueries.js +2 -1
- package/dist/actions/graphql/gen1/generateTypeQueries.js.map +1 -1
- package/dist/actions/graphql/gen2/generateTypeQueries.js +1 -1
- package/dist/actions/graphql/gen2/generateTypeQueries.js.map +1 -1
- package/dist/actions/graphql/gen3/generateTypeQueries.js +1 -1
- package/dist/actions/graphql/gen3/generateTypeQueries.js.map +1 -1
- package/dist/actions/graphql/getGraphQLAPIs.js +2 -10
- package/dist/actions/graphql/getGraphQLAPIs.js.map +1 -1
- package/dist/actions/graphql/getGraphQLAPIs.worker.js +1 -1
- package/dist/actions/graphql/getGraphQLAPIs.worker.js.map +1 -1
- package/dist/actions/graphql/types.js.map +1 -1
- package/dist/actions/init/bootstrapLocalTemplate.js +1 -1
- package/dist/actions/init/bootstrapLocalTemplate.js.map +1 -1
- package/dist/actions/manifest/extractAppManifest.js.map +1 -1
- package/dist/actions/manifest/extractManifest.js +1 -22
- package/dist/actions/manifest/extractManifest.js.map +1 -1
- package/dist/actions/manifest/types.js.map +1 -1
- package/dist/actions/schema/deploySchemas.js +57 -80
- package/dist/actions/schema/deploySchemas.js.map +1 -1
- package/dist/actions/schema/extractSanityWorkspace.worker.js +24 -0
- package/dist/actions/schema/extractSanityWorkspace.worker.js.map +1 -0
- package/dist/actions/schema/extractSchemaWatcher.js +1 -1
- package/dist/actions/schema/extractSchemaWatcher.js.map +1 -1
- package/dist/actions/schema/types.js +4 -0
- package/dist/actions/schema/types.js.map +1 -1
- package/dist/actions/schema/utils/schemaStoreValidation.js +1 -7
- package/dist/actions/schema/utils/schemaStoreValidation.js.map +1 -1
- package/dist/actions/schema/utils/uniqByProjectIdDataset.js +1 -1
- package/dist/actions/schema/utils/uniqByProjectIdDataset.js.map +1 -1
- package/dist/actions/schema/watchExtractSchema.js +2 -1
- package/dist/actions/schema/watchExtractSchema.js.map +1 -1
- package/dist/actions/versions/getFormatters.js +1 -1
- package/dist/actions/versions/getFormatters.js.map +1 -1
- package/dist/commands/backup/list.js +4 -1
- package/dist/commands/backup/list.js.map +1 -1
- package/dist/commands/dataset/copy.js +3 -1
- package/dist/commands/dataset/copy.js.map +1 -1
- package/dist/commands/dataset/create.js +12 -0
- package/dist/commands/dataset/create.js.map +1 -1
- package/dist/commands/dataset/embeddings/disable.js +62 -0
- package/dist/commands/dataset/embeddings/disable.js.map +1 -0
- package/dist/commands/dataset/embeddings/enable.js +128 -0
- package/dist/commands/dataset/embeddings/enable.js.map +1 -0
- package/dist/commands/dataset/embeddings/status.js +61 -0
- package/dist/commands/dataset/embeddings/status.js.map +1 -0
- package/dist/commands/debug.js +2 -1
- package/dist/commands/debug.js.map +1 -1
- package/dist/commands/documents/create.js +2 -1
- package/dist/commands/documents/create.js.map +1 -1
- package/dist/commands/graphql/deploy.js +1 -1
- package/dist/commands/graphql/deploy.js.map +1 -1
- package/dist/commands/hook/logs.js +1 -1
- package/dist/commands/hook/logs.js.map +1 -1
- package/dist/commands/init.js +13 -7
- package/dist/commands/init.js.map +1 -1
- package/dist/commands/manage.js +0 -1
- package/dist/commands/manage.js.map +1 -1
- package/dist/commands/media/create-aspect.js +1 -1
- package/dist/commands/media/create-aspect.js.map +1 -1
- package/dist/commands/projects/list.js +2 -1
- package/dist/commands/projects/list.js.map +1 -1
- package/dist/commands/schema/deploy.js +11 -27
- package/dist/commands/schema/deploy.js.map +1 -1
- package/dist/commands/users/list.js +1 -1
- package/dist/commands/users/list.js.map +1 -1
- package/dist/commands/versions.js +1 -1
- package/dist/commands/versions.js.map +1 -1
- package/dist/exports/index.d.ts +62 -2
- package/dist/exports/index.js.map +1 -1
- package/dist/prompts/selectMediaLibrary.js +1 -1
- package/dist/prompts/selectMediaLibrary.js.map +1 -1
- package/dist/services/datasets.js +7 -5
- package/dist/services/datasets.js.map +1 -1
- package/dist/services/embeddings.js +25 -0
- package/dist/services/embeddings.js.map +1 -0
- package/dist/services/graphql.js.map +1 -1
- package/dist/services/schemas.js +1 -1
- package/dist/services/schemas.js.map +1 -1
- package/dist/types.js.map +1 -1
- package/dist/util/compareDependencyVersions.js +28 -7
- package/dist/util/compareDependencyVersions.js.map +1 -1
- package/dist/util/errorMessages.js +0 -1
- package/dist/util/errorMessages.js.map +1 -1
- package/dist/util/packageManager/getPeerDependencies.js +44 -0
- package/dist/util/packageManager/getPeerDependencies.js.map +1 -0
- package/oclif.manifest.json +283 -149
- package/package.json +12 -13
- package/dist/actions/schema/schemaStoreTypes.js +0 -19
- package/dist/actions/schema/schemaStoreTypes.js.map +0 -1
- package/dist/actions/schema/utils/manifestExtractor.js +0 -29
- package/dist/actions/schema/utils/manifestExtractor.js.map +0 -1
- package/dist/actions/schema/utils/manifestReader.js +0 -71
- package/dist/actions/schema/utils/manifestReader.js.map +0 -1
- package/dist/util/workerChannels.js +0 -172
- package/dist/util/workerChannels.js.map +0 -1
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../../../src/actions/schema/types.ts"],"sourcesContent":["import {type SchemaValidationProblemGroup} from '@sanity/types'\nimport {z} from 'zod'\n\nexport const extractSchemaWorkerData = z.object({\n configPath: z.string(),\n enforceRequiredFields: z.boolean(),\n workDir: z.string(),\n workspaceName: z.string().optional(),\n})\n\nexport type ExtractSchemaWorkerData = z.infer<typeof extractSchemaWorkerData>\n\n/**\n * Contains debug information about the serialized schema.\n *\n * @internal\n **/\nexport type SerializedSchemaDebug = {\n hoisted: Record<string, SerializedTypeDebug>\n parent?: SerializedSchemaDebug\n size: number\n types: Record<string, SerializedTypeDebug>\n}\n\n/**\n * Contains debug information about a serialized type.\n *\n * @internal\n **/\nexport type SerializedTypeDebug = {\n extends: string\n fields?: Record<string, SerializedTypeDebug>\n of?: Record<string, SerializedTypeDebug>\n size: number\n}\n\n/** @internal */\nexport interface ExtractSchemaWorkerError {\n error: string\n type: 'error'\n\n validation?: SchemaValidationProblemGroup[]\n}\n\nexport const uniqWorkspaceWorkerDataSchema = z.object({\n configPath: z.string(),\n dataset: z.string().optional(),\n})\n\nexport type UniqWorkspaceWorkerData = z.infer<typeof uniqWorkspaceWorkerDataSchema>\n"],"names":["z","extractSchemaWorkerData","object","configPath","string","enforceRequiredFields","boolean","workDir","workspaceName","optional","uniqWorkspaceWorkerDataSchema","dataset"],"mappings":"AACA,SAAQA,CAAC,QAAO,MAAK;AAErB,OAAO,MAAMC,0BAA0BD,EAAEE,MAAM,CAAC;IAC9CC,YAAYH,EAAEI,MAAM;IACpBC,uBAAuBL,EAAEM,OAAO;IAChCC,SAASP,EAAEI,MAAM;IACjBI,eAAeR,EAAEI,MAAM,GAAGK,QAAQ;AACpC,GAAE;AAoCF,OAAO,MAAMC,gCAAgCV,EAAEE,MAAM,CAAC;IACpDC,YAAYH,EAAEI,MAAM;IACpBO,SAASX,EAAEI,MAAM,GAAGK,QAAQ;AAC9B,GAAE"}
|
|
1
|
+
{"version":3,"sources":["../../../src/actions/schema/types.ts"],"sourcesContent":["import {type SchemaValidationProblemGroup} from '@sanity/types'\nimport {z} from 'zod'\n\nexport const extractSchemaWorkerData = z.object({\n configPath: z.string(),\n enforceRequiredFields: z.boolean(),\n workDir: z.string(),\n workspaceName: z.string().optional(),\n})\n\nexport type ExtractSchemaWorkerData = z.infer<typeof extractSchemaWorkerData>\n\n/**\n * Contains debug information about the serialized schema.\n *\n * @internal\n **/\nexport type SerializedSchemaDebug = {\n hoisted: Record<string, SerializedTypeDebug>\n parent?: SerializedSchemaDebug\n size: number\n types: Record<string, SerializedTypeDebug>\n}\n\n/**\n * Contains debug information about a serialized type.\n *\n * @internal\n **/\nexport type SerializedTypeDebug = {\n extends: string\n fields?: Record<string, SerializedTypeDebug>\n of?: Record<string, SerializedTypeDebug>\n size: number\n}\n\n/** @internal */\nexport interface ExtractSchemaWorkerError {\n error: string\n type: 'error'\n\n validation?: SchemaValidationProblemGroup[]\n}\n\nexport const uniqWorkspaceWorkerDataSchema = z.object({\n configPath: z.string(),\n dataset: z.string().optional(),\n})\n\nexport type UniqWorkspaceWorkerData = z.infer<typeof uniqWorkspaceWorkerDataSchema>\n\nexport const extractWorkspaceWorkerData = z.object({\n configPath: z.string(),\n workDir: z.string(),\n})\n\nexport type ExtractWorkspaceWorkerData = z.infer<typeof extractWorkspaceWorkerData>\n"],"names":["z","extractSchemaWorkerData","object","configPath","string","enforceRequiredFields","boolean","workDir","workspaceName","optional","uniqWorkspaceWorkerDataSchema","dataset","extractWorkspaceWorkerData"],"mappings":"AACA,SAAQA,CAAC,QAAO,MAAK;AAErB,OAAO,MAAMC,0BAA0BD,EAAEE,MAAM,CAAC;IAC9CC,YAAYH,EAAEI,MAAM;IACpBC,uBAAuBL,EAAEM,OAAO;IAChCC,SAASP,EAAEI,MAAM;IACjBI,eAAeR,EAAEI,MAAM,GAAGK,QAAQ;AACpC,GAAE;AAoCF,OAAO,MAAMC,gCAAgCV,EAAEE,MAAM,CAAC;IACpDC,YAAYH,EAAEI,MAAM;IACpBO,SAASX,EAAEI,MAAM,GAAGK,QAAQ;AAC9B,GAAE;AAIF,OAAO,MAAMG,6BAA6BZ,EAAEE,MAAM,CAAC;IACjDC,YAAYH,EAAEI,MAAM;IACpBG,SAASP,EAAEI,MAAM;AACnB,GAAE"}
|
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
import { CLIError } from '@oclif/core/errors';
|
|
2
|
-
import
|
|
2
|
+
import uniqBy from 'lodash-es/uniqBy.js';
|
|
3
3
|
import { isDefined } from '../../manifest/schemaTypeHelpers.js';
|
|
4
4
|
import { SANITY_WORKSPACE_SCHEMA_ID_PREFIX } from '../../manifest/types.js';
|
|
5
5
|
const validForIdChars = 'a-zA-Z0-9._-';
|
|
@@ -11,12 +11,6 @@ const requiredInId = SANITY_WORKSPACE_SCHEMA_ID_PREFIX.replaceAll(/[.]/g, String
|
|
|
11
11
|
const idIdPatternString = String.raw`^${requiredInId}\.([${validForNamesChars}]+)`;
|
|
12
12
|
const baseIdPattern = new RegExp(`${idIdPatternString}$`);
|
|
13
13
|
const taggedIdIdPattern = new RegExp(String.raw`${idIdPatternString}\.tag\.([${validForNamesChars}]+)$`);
|
|
14
|
-
export class FlagValidationError extends Error {
|
|
15
|
-
constructor(message){
|
|
16
|
-
super(message);
|
|
17
|
-
this.name = 'FlagValidationError';
|
|
18
|
-
}
|
|
19
|
-
}
|
|
20
14
|
export function parseIds(ids) {
|
|
21
15
|
if (!ids) {
|
|
22
16
|
throw new CLIError('ids argument is empty');
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../../../../src/actions/schema/utils/schemaStoreValidation.ts"],"sourcesContent":["import {CLIError} from '@oclif/core/errors'\nimport
|
|
1
|
+
{"version":3,"sources":["../../../../src/actions/schema/utils/schemaStoreValidation.ts"],"sourcesContent":["import {CLIError} from '@oclif/core/errors'\nimport uniqBy from 'lodash-es/uniqBy.js'\n\nimport {isDefined} from '../../manifest/schemaTypeHelpers.js'\nimport {SANITY_WORKSPACE_SCHEMA_ID_PREFIX} from '../../manifest/types.js'\n\nconst validForIdChars = 'a-zA-Z0-9._-'\nconst validForIdPattern = new RegExp(`^[${validForIdChars}]+$`)\n\n//no periods allowed in workspaceName or tag in ids\nexport const validForNamesChars = 'a-zA-Z0-9_-'\nexport const validForNamesPattern = new RegExp(`^[${validForNamesChars}]+$`)\n\nconst requiredInId = SANITY_WORKSPACE_SCHEMA_ID_PREFIX.replaceAll(/[.]/g, String.raw`\\.`)\n\nconst idIdPatternString = String.raw`^${requiredInId}\\.([${validForNamesChars}]+)`\nconst baseIdPattern = new RegExp(`${idIdPatternString}$`)\nconst taggedIdIdPattern = new RegExp(\n String.raw`${idIdPatternString}\\.tag\\.([${validForNamesChars}]+)$`,\n)\n\nexport interface WorkspaceSchemaId {\n schemaId: string\n workspace: string\n}\n\nexport function parseIds(ids?: string): WorkspaceSchemaId[] {\n if (!ids) {\n throw new CLIError('ids argument is empty')\n }\n\n const errors: string[] = []\n\n const parsedIds = ids\n .split(',')\n .map((id) => id.trim())\n .filter((id) => !!id)\n .map((id) => parseWorkspaceSchemaId(errors, id))\n .filter((item) => isDefined(item))\n\n if (errors.length > 0) {\n throw new CLIError(`Invalid arguments:\\n${errors.map((error) => ` - ${error}`).join('\\n')}`)\n }\n\n if (parsedIds.length === 0) {\n throw new CLIError(`ids contains no valid id strings`)\n }\n\n const uniqueIds = uniqBy(parsedIds, 'schemaId' satisfies keyof (typeof parsedIds)[number])\n if (uniqueIds.length < parsedIds.length) {\n throw new CLIError(`ids contains duplicates`)\n }\n\n return uniqueIds\n}\n\nexport function parseWorkspaceSchemaId(errors: string[], id?: string) {\n if (id === undefined) {\n return\n }\n\n if (!id) {\n errors.push('id argument is empty')\n return\n }\n\n const trimmedId = id.trim()\n\n if (!validForIdPattern.test(trimmedId)) {\n errors.push(`id can only contain characters in [${validForIdChars}] but found: \"${trimmedId}\"`)\n return\n }\n\n if (trimmedId.startsWith('-')) {\n errors.push(`id cannot start with - (dash) but found: \"${trimmedId}\"`)\n return\n }\n\n if (/\\.\\./g.test(trimmedId)) {\n errors.push(`id cannot have consecutive . (period) characters, but found: \"${trimmedId}\"`)\n return\n }\n\n const [, workspace] = trimmedId.match(taggedIdIdPattern) ?? trimmedId.match(baseIdPattern) ?? []\n if (!workspace) {\n errors.push(\n [\n `id must either match ${SANITY_WORKSPACE_SCHEMA_ID_PREFIX}.<workspaceName> `,\n `or ${SANITY_WORKSPACE_SCHEMA_ID_PREFIX}.<workspaceName>.tag.<tag> but found: \"${trimmedId}\". `,\n `Note that workspace name characters not in [${validForNamesChars}] has to be replaced with _ for schema id.`,\n ].join(''),\n )\n return\n }\n return {\n schemaId: trimmedId,\n workspace,\n }\n}\n\n/**\n *\n * @param tag - The tag to parse\n * Throws an error if the tag is empty\n * Throws an error if the tag contains a period\n * Throws an error if the tag starts with a dash\n * Returns the parsed tag\n */\nexport async function parseTag(tag?: string) {\n if (tag === undefined) {\n return tag\n }\n\n if (!tag) {\n throw new CLIError('tag argument is empty')\n }\n\n if (tag.includes('.')) {\n throw new CLIError(`tag cannot contain . (period), but was: \"${tag}\"`)\n }\n\n if (!validForNamesPattern.test(tag)) {\n throw new CLIError(\n `tag can only contain characters in [${validForNamesChars}], but was: \"${tag}\"`,\n )\n }\n\n if (tag.startsWith('-')) {\n throw new CLIError(`tag cannot start with - (dash) but was: \"${tag}\"`)\n }\n\n return tag\n}\n\nexport const SCHEMA_PERMISSION_HELP_TEXT =\n 'For multi-project workspaces, set SANITY_AUTH_TOKEN environment variable to a token with access to the workspace projects.'\n"],"names":["CLIError","uniqBy","isDefined","SANITY_WORKSPACE_SCHEMA_ID_PREFIX","validForIdChars","validForIdPattern","RegExp","validForNamesChars","validForNamesPattern","requiredInId","replaceAll","String","raw","idIdPatternString","baseIdPattern","taggedIdIdPattern","parseIds","ids","errors","parsedIds","split","map","id","trim","filter","parseWorkspaceSchemaId","item","length","error","join","uniqueIds","undefined","push","trimmedId","test","startsWith","workspace","match","schemaId","parseTag","tag","includes","SCHEMA_PERMISSION_HELP_TEXT"],"mappings":"AAAA,SAAQA,QAAQ,QAAO,qBAAoB;AAC3C,OAAOC,YAAY,sBAAqB;AAExC,SAAQC,SAAS,QAAO,sCAAqC;AAC7D,SAAQC,iCAAiC,QAAO,0BAAyB;AAEzE,MAAMC,kBAAkB;AACxB,MAAMC,oBAAoB,IAAIC,OAAO,CAAC,EAAE,EAAEF,gBAAgB,GAAG,CAAC;AAE9D,mDAAmD;AACnD,OAAO,MAAMG,qBAAqB,cAAa;AAC/C,OAAO,MAAMC,uBAAuB,IAAIF,OAAO,CAAC,EAAE,EAAEC,mBAAmB,GAAG,CAAC,EAAC;AAE5E,MAAME,eAAeN,kCAAkCO,UAAU,CAAC,QAAQC,OAAOC,GAAG,CAAC,EAAE,CAAC;AAExF,MAAMC,oBAAoBF,OAAOC,GAAG,CAAC,CAAC,EAAEH,aAAa,IAAI,EAAEF,mBAAmB,GAAG,CAAC;AAClF,MAAMO,gBAAgB,IAAIR,OAAO,GAAGO,kBAAkB,CAAC,CAAC;AACxD,MAAME,oBAAoB,IAAIT,OAC5BK,OAAOC,GAAG,CAAC,EAAEC,kBAAkB,SAAS,EAAEN,mBAAmB,IAAI,CAAC;AAQpE,OAAO,SAASS,SAASC,GAAY;IACnC,IAAI,CAACA,KAAK;QACR,MAAM,IAAIjB,SAAS;IACrB;IAEA,MAAMkB,SAAmB,EAAE;IAE3B,MAAMC,YAAYF,IACfG,KAAK,CAAC,KACNC,GAAG,CAAC,CAACC,KAAOA,GAAGC,IAAI,IACnBC,MAAM,CAAC,CAACF,KAAO,CAAC,CAACA,IACjBD,GAAG,CAAC,CAACC,KAAOG,uBAAuBP,QAAQI,KAC3CE,MAAM,CAAC,CAACE,OAASxB,UAAUwB;IAE9B,IAAIR,OAAOS,MAAM,GAAG,GAAG;QACrB,MAAM,IAAI3B,SAAS,CAAC,oBAAoB,EAAEkB,OAAOG,GAAG,CAAC,CAACO,QAAU,CAAC,IAAI,EAAEA,OAAO,EAAEC,IAAI,CAAC,OAAO;IAC9F;IAEA,IAAIV,UAAUQ,MAAM,KAAK,GAAG;QAC1B,MAAM,IAAI3B,SAAS,CAAC,gCAAgC,CAAC;IACvD;IAEA,MAAM8B,YAAY7B,OAAOkB,WAAW;IACpC,IAAIW,UAAUH,MAAM,GAAGR,UAAUQ,MAAM,EAAE;QACvC,MAAM,IAAI3B,SAAS,CAAC,uBAAuB,CAAC;IAC9C;IAEA,OAAO8B;AACT;AAEA,OAAO,SAASL,uBAAuBP,MAAgB,EAAEI,EAAW;IAClE,IAAIA,OAAOS,WAAW;QACpB;IACF;IAEA,IAAI,CAACT,IAAI;QACPJ,OAAOc,IAAI,CAAC;QACZ;IACF;IAEA,MAAMC,YAAYX,GAAGC,IAAI;IAEzB,IAAI,CAAClB,kBAAkB6B,IAAI,CAACD,YAAY;QACtCf,OAAOc,IAAI,CAAC,CAAC,mCAAmC,EAAE5B,gBAAgB,cAAc,EAAE6B,UAAU,CAAC,CAAC;QAC9F;IACF;IAEA,IAAIA,UAAUE,UAAU,CAAC,MAAM;QAC7BjB,OAAOc,IAAI,CAAC,CAAC,0CAA0C,EAAEC,UAAU,CAAC,CAAC;QACrE;IACF;IAEA,IAAI,QAAQC,IAAI,CAACD,YAAY;QAC3Bf,OAAOc,IAAI,CAAC,CAAC,8DAA8D,EAAEC,UAAU,CAAC,CAAC;QACzF;IACF;IAEA,MAAM,GAAGG,UAAU,GAAGH,UAAUI,KAAK,CAACtB,sBAAsBkB,UAAUI,KAAK,CAACvB,kBAAkB,EAAE;IAChG,IAAI,CAACsB,WAAW;QACdlB,OAAOc,IAAI,CACT;YACE,CAAC,qBAAqB,EAAE7B,kCAAkC,iBAAiB,CAAC;YAC5E,CAAC,GAAG,EAAEA,kCAAkC,uCAAuC,EAAE8B,UAAU,GAAG,CAAC;YAC/F,CAAC,4CAA4C,EAAE1B,mBAAmB,0CAA0C,CAAC;SAC9G,CAACsB,IAAI,CAAC;QAET;IACF;IACA,OAAO;QACLS,UAAUL;QACVG;IACF;AACF;AAEA;;;;;;;CAOC,GACD,OAAO,eAAeG,SAASC,GAAY;IACzC,IAAIA,QAAQT,WAAW;QACrB,OAAOS;IACT;IAEA,IAAI,CAACA,KAAK;QACR,MAAM,IAAIxC,SAAS;IACrB;IAEA,IAAIwC,IAAIC,QAAQ,CAAC,MAAM;QACrB,MAAM,IAAIzC,SAAS,CAAC,yCAAyC,EAAEwC,IAAI,CAAC,CAAC;IACvE;IAEA,IAAI,CAAChC,qBAAqB0B,IAAI,CAACM,MAAM;QACnC,MAAM,IAAIxC,SACR,CAAC,oCAAoC,EAAEO,mBAAmB,aAAa,EAAEiC,IAAI,CAAC,CAAC;IAEnF;IAEA,IAAIA,IAAIL,UAAU,CAAC,MAAM;QACvB,MAAM,IAAInC,SAAS,CAAC,yCAAyC,EAAEwC,IAAI,CAAC,CAAC;IACvE;IAEA,OAAOA;AACT;AAEA,OAAO,MAAME,8BACX,6HAA4H"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../../../../src/actions/schema/utils/uniqByProjectIdDataset.ts"],"sourcesContent":["import
|
|
1
|
+
{"version":3,"sources":["../../../../src/actions/schema/utils/uniqByProjectIdDataset.ts"],"sourcesContent":["import uniqBy from 'lodash-es/uniqBy.js'\nimport {type Workspace} from 'sanity'\n\nexport function uniqByProjectIdDataset(workspaces: Workspace[]) {\n return uniqBy<Workspace & {key: string}>(\n workspaces.map((w) => ({\n ...w,\n key: `${w.projectId}-${w.dataset}`,\n })),\n 'key',\n )\n}\n"],"names":["uniqBy","uniqByProjectIdDataset","workspaces","map","w","key","projectId","dataset"],"mappings":"AAAA,OAAOA,YAAY,sBAAqB;AAGxC,OAAO,SAASC,uBAAuBC,UAAuB;IAC5D,OAAOF,OACLE,WAAWC,GAAG,CAAC,CAACC,IAAO,CAAA;YACrB,GAAGA,CAAC;YACJC,KAAK,GAAGD,EAAEE,SAAS,CAAC,CAAC,EAAEF,EAAEG,OAAO,EAAE;QACpC,CAAA,IACA;AAEJ"}
|
|
@@ -1,5 +1,6 @@
|
|
|
1
1
|
import { getCliTelemetry } from '@sanity/cli-core';
|
|
2
|
-
import
|
|
2
|
+
import mean from 'lodash-es/mean.js';
|
|
3
|
+
import once from 'lodash-es/once.js';
|
|
3
4
|
import { SchemaExtractionWatchModeTrace } from '../../telemetry/extractSchema.telemetry.js';
|
|
4
5
|
import { DEFAULT_WATCH_PATTERNS, startExtractSchemaWatcher } from './extractSchemaWatcher.js';
|
|
5
6
|
export async function watchExtractSchema(options) {
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../../../src/actions/schema/watchExtractSchema.ts"],"sourcesContent":["import {getCliTelemetry, type Output} from '@sanity/cli-core'\nimport
|
|
1
|
+
{"version":3,"sources":["../../../src/actions/schema/watchExtractSchema.ts"],"sourcesContent":["import {getCliTelemetry, type Output} from '@sanity/cli-core'\nimport mean from 'lodash-es/mean.js'\nimport once from 'lodash-es/once.js'\n\nimport {SchemaExtractionWatchModeTrace} from '../../telemetry/extractSchema.telemetry.js'\nimport {DEFAULT_WATCH_PATTERNS, startExtractSchemaWatcher} from './extractSchemaWatcher.js'\nimport {type ExtractOptions} from './getExtractOptions.js'\n\ninterface WatchExtractSchemaOptions {\n extractOptions: ExtractOptions\n output: Output\n}\n\nexport async function watchExtractSchema(\n options: WatchExtractSchemaOptions,\n): Promise<{close: () => Promise<void>}> {\n const {extractOptions, output} = options\n\n // Keep the start time + some simple stats for extractions as they happen\n const startTime = Date.now()\n const stats: {failedCount: number; successfulDurations: number[]} = {\n failedCount: 0,\n successfulDurations: [],\n }\n\n const watchPatterns = [...DEFAULT_WATCH_PATTERNS, ...extractOptions.watchPatterns]\n\n const trace = getCliTelemetry().trace(SchemaExtractionWatchModeTrace)\n trace.start()\n\n // Print watch mode header and patterns at the very beginning\n output.log('Schema extraction watch mode')\n output.log('')\n output.log('Watching for changes in:')\n for (const pattern of watchPatterns) {\n output.log(` - ${pattern}`)\n }\n output.log('')\n\n output.log('Running initial extraction...')\n\n // Start the watcher (includes initial extraction)\n const {close} = await startExtractSchemaWatcher({\n extractOptions,\n onExtraction: ({duration, success}) => {\n if (success) {\n stats.successfulDurations.push(duration)\n } else {\n stats.failedCount++\n }\n },\n output,\n watchPatterns,\n })\n\n trace.log({\n enforceRequiredFields: extractOptions.enforceRequiredFields,\n schemaFormat: extractOptions.format,\n step: 'started',\n })\n\n output.log('')\n output.log('Watching for changes... (Ctrl+C to stop)')\n\n /**\n * Cleanup function that logs telemetry and stops the watcher.\n * Wrapped in once() to prevent multiple calls.\n */\n const cleanup = once(async () => {\n trace.log({\n averageExtractionDuration: mean(stats.successfulDurations) || 0,\n extractionFailedCount: stats.failedCount,\n extractionSuccessfulCount: stats.successfulDurations.length,\n step: 'stopped',\n watcherDuration: Date.now() - startTime,\n })\n trace.complete()\n\n output.log('')\n output.log('Stopping watch mode...')\n await close()\n })\n\n // Return cleanup function for programmatic usage and testing\n // The CLI framework will handle SIGINT/SIGTERM\n return {close: cleanup}\n}\n"],"names":["getCliTelemetry","mean","once","SchemaExtractionWatchModeTrace","DEFAULT_WATCH_PATTERNS","startExtractSchemaWatcher","watchExtractSchema","options","extractOptions","output","startTime","Date","now","stats","failedCount","successfulDurations","watchPatterns","trace","start","log","pattern","close","onExtraction","duration","success","push","enforceRequiredFields","schemaFormat","format","step","cleanup","averageExtractionDuration","extractionFailedCount","extractionSuccessfulCount","length","watcherDuration","complete"],"mappings":"AAAA,SAAQA,eAAe,QAAoB,mBAAkB;AAC7D,OAAOC,UAAU,oBAAmB;AACpC,OAAOC,UAAU,oBAAmB;AAEpC,SAAQC,8BAA8B,QAAO,6CAA4C;AACzF,SAAQC,sBAAsB,EAAEC,yBAAyB,QAAO,4BAA2B;AAQ3F,OAAO,eAAeC,mBACpBC,OAAkC;IAElC,MAAM,EAACC,cAAc,EAAEC,MAAM,EAAC,GAAGF;IAEjC,yEAAyE;IACzE,MAAMG,YAAYC,KAAKC,GAAG;IAC1B,MAAMC,QAA8D;QAClEC,aAAa;QACbC,qBAAqB,EAAE;IACzB;IAEA,MAAMC,gBAAgB;WAAIZ;WAA2BI,eAAeQ,aAAa;KAAC;IAElF,MAAMC,QAAQjB,kBAAkBiB,KAAK,CAACd;IACtCc,MAAMC,KAAK;IAEX,6DAA6D;IAC7DT,OAAOU,GAAG,CAAC;IACXV,OAAOU,GAAG,CAAC;IACXV,OAAOU,GAAG,CAAC;IACX,KAAK,MAAMC,WAAWJ,cAAe;QACnCP,OAAOU,GAAG,CAAC,CAAC,IAAI,EAAEC,SAAS;IAC7B;IACAX,OAAOU,GAAG,CAAC;IAEXV,OAAOU,GAAG,CAAC;IAEX,kDAAkD;IAClD,MAAM,EAACE,KAAK,EAAC,GAAG,MAAMhB,0BAA0B;QAC9CG;QACAc,cAAc,CAAC,EAACC,QAAQ,EAAEC,OAAO,EAAC;YAChC,IAAIA,SAAS;gBACXX,MAAME,mBAAmB,CAACU,IAAI,CAACF;YACjC,OAAO;gBACLV,MAAMC,WAAW;YACnB;QACF;QACAL;QACAO;IACF;IAEAC,MAAME,GAAG,CAAC;QACRO,uBAAuBlB,eAAekB,qBAAqB;QAC3DC,cAAcnB,eAAeoB,MAAM;QACnCC,MAAM;IACR;IAEApB,OAAOU,GAAG,CAAC;IACXV,OAAOU,GAAG,CAAC;IAEX;;;GAGC,GACD,MAAMW,UAAU5B,KAAK;QACnBe,MAAME,GAAG,CAAC;YACRY,2BAA2B9B,KAAKY,MAAME,mBAAmB,KAAK;YAC9DiB,uBAAuBnB,MAAMC,WAAW;YACxCmB,2BAA2BpB,MAAME,mBAAmB,CAACmB,MAAM;YAC3DL,MAAM;YACNM,iBAAiBxB,KAAKC,GAAG,KAAKF;QAChC;QACAO,MAAMmB,QAAQ;QAEd3B,OAAOU,GAAG,CAAC;QACXV,OAAOU,GAAG,CAAC;QACX,MAAME;IACR;IAEA,6DAA6D;IAC7D,+CAA+C;IAC/C,OAAO;QAACA,OAAOS;IAAO;AACxB"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../../../src/actions/versions/getFormatters.ts"],"sourcesContent":["import {styleText} from 'node:util'\n\nimport
|
|
1
|
+
{"version":3,"sources":["../../../src/actions/versions/getFormatters.ts"],"sourcesContent":["import {styleText} from 'node:util'\n\nimport padEnd from 'lodash-es/padEnd.js'\n\nimport {type ModuleVersionResult} from './types.js'\n\n/**\n * Get the display name for a module.\n *\n * @internal\n */\nexport function getDisplayName(mod: ModuleVersionResult): string {\n return mod.isGlobal ? `${mod.name} (global)` : mod.name\n}\n\n/**\n * Get formatters for the package versions.\n *\n * @internal\n */\nexport function getFormatters(versions: ModuleVersionResult[]): {\n formatName: (name: string) => string\n nameLength: number\n versionLength: number\n} {\n let nameLength = 0\n let versionLength = 0\n\n for (const mod of versions) {\n const displayName = getDisplayName(mod)\n nameLength = Math.max(nameLength, displayName.length)\n versionLength = Math.max(versionLength, (mod.installed || '<missing>').length)\n }\n\n const formatName = (name: string): string =>\n padEnd(name, nameLength + 1)\n .replace(\n /^@sanity\\/(.*?)(\\s|$)/,\n `${styleText('yellow', '@sanity/')}${styleText('cyan', '$1')}$2`,\n )\n .replace(/^sanity(\\s|$)/, `${styleText('yellow', 'sanity')}$1`)\n\n return {formatName, nameLength, versionLength}\n}\n"],"names":["styleText","padEnd","getDisplayName","mod","isGlobal","name","getFormatters","versions","nameLength","versionLength","displayName","Math","max","length","installed","formatName","replace"],"mappings":"AAAA,SAAQA,SAAS,QAAO,YAAW;AAEnC,OAAOC,YAAY,sBAAqB;AAIxC;;;;CAIC,GACD,OAAO,SAASC,eAAeC,GAAwB;IACrD,OAAOA,IAAIC,QAAQ,GAAG,GAAGD,IAAIE,IAAI,CAAC,SAAS,CAAC,GAAGF,IAAIE,IAAI;AACzD;AAEA;;;;CAIC,GACD,OAAO,SAASC,cAAcC,QAA+B;IAK3D,IAAIC,aAAa;IACjB,IAAIC,gBAAgB;IAEpB,KAAK,MAAMN,OAAOI,SAAU;QAC1B,MAAMG,cAAcR,eAAeC;QACnCK,aAAaG,KAAKC,GAAG,CAACJ,YAAYE,YAAYG,MAAM;QACpDJ,gBAAgBE,KAAKC,GAAG,CAACH,eAAe,AAACN,CAAAA,IAAIW,SAAS,IAAI,WAAU,EAAGD,MAAM;IAC/E;IAEA,MAAME,aAAa,CAACV,OAClBJ,OAAOI,MAAMG,aAAa,GACvBQ,OAAO,CACN,yBACA,GAAGhB,UAAU,UAAU,cAAcA,UAAU,QAAQ,MAAM,EAAE,CAAC,EAEjEgB,OAAO,CAAC,iBAAiB,GAAGhB,UAAU,UAAU,UAAU,EAAE,CAAC;IAElE,OAAO;QAACe;QAAYP;QAAYC;IAAa;AAC/C"}
|
|
@@ -2,7 +2,10 @@ import { Args, Flags } from '@oclif/core';
|
|
|
2
2
|
import { SanityCommand, subdebug } from '@sanity/cli-core';
|
|
3
3
|
import { select } from '@sanity/cli-core/ux';
|
|
4
4
|
import { Table } from 'console-table-printer';
|
|
5
|
-
import { isAfter
|
|
5
|
+
import { isAfter } from 'date-fns/isAfter';
|
|
6
|
+
import { isValid } from 'date-fns/isValid';
|
|
7
|
+
import { lightFormat } from 'date-fns/lightFormat';
|
|
8
|
+
import { parse } from 'date-fns/parse';
|
|
6
9
|
import { assertDatasetExists } from '../../actions/backup/assertDatasetExist.js';
|
|
7
10
|
import { listBackups } from '../../services/backup.js';
|
|
8
11
|
import { listDatasets } from '../../services/datasets.js';
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../../../src/commands/backup/list.ts"],"sourcesContent":["import {Args, Flags} from '@oclif/core'\nimport {SanityCommand, subdebug} from '@sanity/cli-core'\nimport {select} from '@sanity/cli-core/ux'\nimport {type DatasetsResponse} from '@sanity/client'\nimport {Table} from 'console-table-printer'\nimport {isAfter, isValid, lightFormat, parse} from 'date-fns'\n\nimport {assertDatasetExists} from '../../actions/backup/assertDatasetExist.js'\nimport {listBackups} from '../../services/backup.js'\nimport {listDatasets} from '../../services/datasets.js'\nimport {NO_PROJECT_ID} from '../../util/errorMessages.js'\n\nconst listBackupDebug = subdebug('backup:list')\n\nconst DEFAULT_LIST_BACKUP_LIMIT = 30\n\ntype ListBackupRequestQueryParams = {\n after?: string\n before?: string\n limit: string\n}\n\nexport class ListBackupCommand extends SanityCommand<typeof ListBackupCommand> {\n static override args = {\n dataset: Args.string({\n description: 'Dataset name to list backups for',\n required: false,\n }),\n }\n\n static override description = 'List available backups for a dataset.'\n\n static override examples = [\n {\n command: '<%= config.bin %> <%= command.id %>',\n description: 'List backups for a dataset interactively',\n },\n {\n command: '<%= config.bin %> <%= command.id %> production',\n description: 'List backups for the production dataset',\n },\n {\n command: '<%= config.bin %> <%= command.id %> production --limit 50',\n description: 'List up to 50 backups for the production dataset',\n },\n {\n command: '<%= config.bin %> <%= command.id %> production --after 2024-01-31 --limit 10',\n description: 'List up to 10 backups created after 2024-01-31',\n },\n ]\n\n static override flags = {\n after: Flags.string({\n description: 'Only return backups after this date (inclusive, YYYY-MM-DD format)',\n }),\n before: Flags.string({\n description: 'Only return backups before this date (exclusive, YYYY-MM-DD format)',\n }),\n limit: Flags.integer({\n char: 'l',\n default: DEFAULT_LIST_BACKUP_LIMIT,\n description: 'Maximum number of backups returned',\n }),\n }\n\n public async run(): Promise<void> {\n const {args, flags} = await this.parse(ListBackupCommand)\n let {dataset} = args\n\n const projectId = await this.getProjectId()\n if (!projectId) {\n this.error(NO_PROJECT_ID, {exit: 1})\n }\n\n let datasets: DatasetsResponse\n\n try {\n datasets = await listDatasets(projectId)\n } catch (error) {\n const message = error instanceof Error ? error.message : String(error)\n listBackupDebug(`Failed to list datasets: ${message}`, error)\n this.error(`Failed to list datasets: ${message}`, {exit: 1})\n }\n\n if (datasets.length === 0) {\n this.error('No datasets found in this project.', {exit: 1})\n }\n\n if (dataset) {\n assertDatasetExists(datasets, dataset)\n } else {\n dataset = await this.promptForDataset(datasets)\n }\n\n // Validate date flags\n if (flags.before || flags.after) {\n try {\n const parsedBefore = this.processDateFlag(flags.before, 'before')\n const parsedAfter = this.processDateFlag(flags.after, 'after')\n\n if (parsedAfter && parsedBefore && isAfter(parsedAfter, parsedBefore)) {\n this.error('--after date must be before --before', {exit: 1})\n }\n } catch (err) {\n this.error(`Parsing date flags: ${err instanceof Error ? err.message : err}`, {exit: 1})\n }\n }\n\n // Validate limit flag\n if (flags.limit < 1 || flags.limit > Number.MAX_SAFE_INTEGER) {\n this.error(`Parsing --limit: must be an integer between 1 and ${Number.MAX_SAFE_INTEGER}`, {\n exit: 1,\n })\n }\n\n const query: ListBackupRequestQueryParams = {\n limit: flags.limit.toString(),\n }\n\n if (flags.after) {\n query.after = flags.after\n }\n\n if (flags.before) {\n query.before = flags.before\n }\n\n try {\n const response = await listBackups({\n after: flags.after,\n before: flags.before,\n datasetName: dataset,\n limit: flags.limit,\n projectId,\n })\n\n if (response.backups.length === 0) {\n this.log('No backups found.')\n return\n }\n\n const table = new Table({\n columns: [\n {alignment: 'left', name: 'resource', title: 'RESOURCE'},\n {alignment: 'left', name: 'createdAt', title: 'CREATED AT'},\n {alignment: 'left', name: 'backupId', title: 'BACKUP ID'},\n ],\n })\n\n for (const backup of response.backups) {\n const {createdAt, id} = backup\n table.addRow({\n backupId: id,\n createdAt: lightFormat(Date.parse(createdAt), 'yyyy-MM-dd HH:mm:ss'),\n resource: 'Dataset',\n })\n }\n\n table.printTable()\n\n listBackupDebug(\n `Successfully listed ${response.backups.length} backups for dataset ${dataset}`,\n )\n } catch (error) {\n const message = error instanceof Error ? error.message : String(error)\n listBackupDebug(`Failed to list backups for dataset ${dataset}:`, error)\n this.error(`List dataset backup failed: ${message}`, {exit: 1})\n }\n }\n\n private processDateFlag(date: string | undefined, flagName: string): Date | undefined {\n if (!date) return undefined\n const parsedDate = parse(date, 'yyyy-MM-dd', new Date())\n if (isValid(parsedDate)) {\n return parsedDate\n }\n\n throw new Error(`Invalid date format for '--${flagName}' flag. Use YYYY-MM-DD`)\n }\n\n private async promptForDataset(datasets: DatasetsResponse): Promise<string> {\n try {\n const choices = datasets.map((dataset) => ({\n name: dataset.name,\n value: dataset.name,\n }))\n\n return select({\n choices,\n message: 'Select the dataset name:',\n })\n } catch (error) {\n listBackupDebug(`Error selecting dataset`, error)\n this.error(`Failed to select dataset:\\n${error instanceof Error ? error.message : error}`, {\n exit: 1,\n })\n }\n }\n}\n"],"names":["Args","Flags","SanityCommand","subdebug","select","Table","isAfter","isValid","lightFormat","parse","assertDatasetExists","listBackups","listDatasets","NO_PROJECT_ID","listBackupDebug","DEFAULT_LIST_BACKUP_LIMIT","ListBackupCommand","args","dataset","string","description","required","examples","command","flags","after","before","limit","integer","char","default","run","projectId","getProjectId","error","exit","datasets","message","Error","String","length","promptForDataset","parsedBefore","processDateFlag","parsedAfter","err","Number","MAX_SAFE_INTEGER","query","toString","response","datasetName","backups","log","table","columns","alignment","name","title","backup","createdAt","id","addRow","backupId","Date","resource","printTable","date","flagName","undefined","parsedDate","choices","map","value"],"mappings":"AAAA,SAAQA,IAAI,EAAEC,KAAK,QAAO,cAAa;AACvC,SAAQC,aAAa,EAAEC,QAAQ,QAAO,mBAAkB;AACxD,SAAQC,MAAM,QAAO,sBAAqB;AAE1C,SAAQC,KAAK,QAAO,wBAAuB;AAC3C,SAAQC,OAAO,EAAEC,OAAO,EAAEC,WAAW,EAAEC,KAAK,QAAO,WAAU;AAE7D,SAAQC,mBAAmB,QAAO,6CAA4C;AAC9E,SAAQC,WAAW,QAAO,2BAA0B;AACpD,SAAQC,YAAY,QAAO,6BAA4B;AACvD,SAAQC,aAAa,QAAO,8BAA6B;AAEzD,MAAMC,kBAAkBX,SAAS;AAEjC,MAAMY,4BAA4B;AAQlC,OAAO,MAAMC,0BAA0Bd;IACrC,OAAgBe,OAAO;QACrBC,SAASlB,KAAKmB,MAAM,CAAC;YACnBC,aAAa;YACbC,UAAU;QACZ;IACF,EAAC;IAED,OAAgBD,cAAc,wCAAuC;IAErE,OAAgBE,WAAW;QACzB;YACEC,SAAS;YACTH,aAAa;QACf;QACA;YACEG,SAAS;YACTH,aAAa;QACf;QACA;YACEG,SAAS;YACTH,aAAa;QACf;QACA;YACEG,SAAS;YACTH,aAAa;QACf;KACD,CAAA;IAED,OAAgBI,QAAQ;QACtBC,OAAOxB,MAAMkB,MAAM,CAAC;YAClBC,aAAa;QACf;QACAM,QAAQzB,MAAMkB,MAAM,CAAC;YACnBC,aAAa;QACf;QACAO,OAAO1B,MAAM2B,OAAO,CAAC;YACnBC,MAAM;YACNC,SAASf;YACTK,aAAa;QACf;IACF,EAAC;IAED,MAAaW,MAAqB;QAChC,MAAM,EAACd,IAAI,EAAEO,KAAK,EAAC,GAAG,MAAM,IAAI,CAACf,KAAK,CAACO;QACvC,IAAI,EAACE,OAAO,EAAC,GAAGD;QAEhB,MAAMe,YAAY,MAAM,IAAI,CAACC,YAAY;QACzC,IAAI,CAACD,WAAW;YACd,IAAI,CAACE,KAAK,CAACrB,eAAe;gBAACsB,MAAM;YAAC;QACpC;QAEA,IAAIC;QAEJ,IAAI;YACFA,WAAW,MAAMxB,aAAaoB;QAChC,EAAE,OAAOE,OAAO;YACd,MAAMG,UAAUH,iBAAiBI,QAAQJ,MAAMG,OAAO,GAAGE,OAAOL;YAChEpB,gBAAgB,CAAC,yBAAyB,EAAEuB,SAAS,EAAEH;YACvD,IAAI,CAACA,KAAK,CAAC,CAAC,yBAAyB,EAAEG,SAAS,EAAE;gBAACF,MAAM;YAAC;QAC5D;QAEA,IAAIC,SAASI,MAAM,KAAK,GAAG;YACzB,IAAI,CAACN,KAAK,CAAC,sCAAsC;gBAACC,MAAM;YAAC;QAC3D;QAEA,IAAIjB,SAAS;YACXR,oBAAoB0B,UAAUlB;QAChC,OAAO;YACLA,UAAU,MAAM,IAAI,CAACuB,gBAAgB,CAACL;QACxC;QAEA,sBAAsB;QACtB,IAAIZ,MAAME,MAAM,IAAIF,MAAMC,KAAK,EAAE;YAC/B,IAAI;gBACF,MAAMiB,eAAe,IAAI,CAACC,eAAe,CAACnB,MAAME,MAAM,EAAE;gBACxD,MAAMkB,cAAc,IAAI,CAACD,eAAe,CAACnB,MAAMC,KAAK,EAAE;gBAEtD,IAAImB,eAAeF,gBAAgBpC,QAAQsC,aAAaF,eAAe;oBACrE,IAAI,CAACR,KAAK,CAAC,wCAAwC;wBAACC,MAAM;oBAAC;gBAC7D;YACF,EAAE,OAAOU,KAAK;gBACZ,IAAI,CAACX,KAAK,CAAC,CAAC,oBAAoB,EAAEW,eAAeP,QAAQO,IAAIR,OAAO,GAAGQ,KAAK,EAAE;oBAACV,MAAM;gBAAC;YACxF;QACF;QAEA,sBAAsB;QACtB,IAAIX,MAAMG,KAAK,GAAG,KAAKH,MAAMG,KAAK,GAAGmB,OAAOC,gBAAgB,EAAE;YAC5D,IAAI,CAACb,KAAK,CAAC,CAAC,kDAAkD,EAAEY,OAAOC,gBAAgB,EAAE,EAAE;gBACzFZ,MAAM;YACR;QACF;QAEA,MAAMa,QAAsC;YAC1CrB,OAAOH,MAAMG,KAAK,CAACsB,QAAQ;QAC7B;QAEA,IAAIzB,MAAMC,KAAK,EAAE;YACfuB,MAAMvB,KAAK,GAAGD,MAAMC,KAAK;QAC3B;QAEA,IAAID,MAAME,MAAM,EAAE;YAChBsB,MAAMtB,MAAM,GAAGF,MAAME,MAAM;QAC7B;QAEA,IAAI;YACF,MAAMwB,WAAW,MAAMvC,YAAY;gBACjCc,OAAOD,MAAMC,KAAK;gBAClBC,QAAQF,MAAME,MAAM;gBACpByB,aAAajC;gBACbS,OAAOH,MAAMG,KAAK;gBAClBK;YACF;YAEA,IAAIkB,SAASE,OAAO,CAACZ,MAAM,KAAK,GAAG;gBACjC,IAAI,CAACa,GAAG,CAAC;gBACT;YACF;YAEA,MAAMC,QAAQ,IAAIjD,MAAM;gBACtBkD,SAAS;oBACP;wBAACC,WAAW;wBAAQC,MAAM;wBAAYC,OAAO;oBAAU;oBACvD;wBAACF,WAAW;wBAAQC,MAAM;wBAAaC,OAAO;oBAAY;oBAC1D;wBAACF,WAAW;wBAAQC,MAAM;wBAAYC,OAAO;oBAAW;iBACzD;YACH;YAEA,KAAK,MAAMC,UAAUT,SAASE,OAAO,CAAE;gBACrC,MAAM,EAACQ,SAAS,EAAEC,EAAE,EAAC,GAAGF;gBACxBL,MAAMQ,MAAM,CAAC;oBACXC,UAAUF;oBACVD,WAAWpD,YAAYwD,KAAKvD,KAAK,CAACmD,YAAY;oBAC9CK,UAAU;gBACZ;YACF;YAEAX,MAAMY,UAAU;YAEhBpD,gBACE,CAAC,oBAAoB,EAAEoC,SAASE,OAAO,CAACZ,MAAM,CAAC,qBAAqB,EAAEtB,SAAS;QAEnF,EAAE,OAAOgB,OAAO;YACd,MAAMG,UAAUH,iBAAiBI,QAAQJ,MAAMG,OAAO,GAAGE,OAAOL;YAChEpB,gBAAgB,CAAC,mCAAmC,EAAEI,QAAQ,CAAC,CAAC,EAAEgB;YAClE,IAAI,CAACA,KAAK,CAAC,CAAC,4BAA4B,EAAEG,SAAS,EAAE;gBAACF,MAAM;YAAC;QAC/D;IACF;IAEQQ,gBAAgBwB,IAAwB,EAAEC,QAAgB,EAAoB;QACpF,IAAI,CAACD,MAAM,OAAOE;QAClB,MAAMC,aAAa7D,MAAM0D,MAAM,cAAc,IAAIH;QACjD,IAAIzD,QAAQ+D,aAAa;YACvB,OAAOA;QACT;QAEA,MAAM,IAAIhC,MAAM,CAAC,2BAA2B,EAAE8B,SAAS,sBAAsB,CAAC;IAChF;IAEA,MAAc3B,iBAAiBL,QAA0B,EAAmB;QAC1E,IAAI;YACF,MAAMmC,UAAUnC,SAASoC,GAAG,CAAC,CAACtD,UAAa,CAAA;oBACzCuC,MAAMvC,QAAQuC,IAAI;oBAClBgB,OAAOvD,QAAQuC,IAAI;gBACrB,CAAA;YAEA,OAAOrD,OAAO;gBACZmE;gBACAlC,SAAS;YACX;QACF,EAAE,OAAOH,OAAO;YACdpB,gBAAgB,CAAC,uBAAuB,CAAC,EAAEoB;YAC3C,IAAI,CAACA,KAAK,CAAC,CAAC,2BAA2B,EAAEA,iBAAiBI,QAAQJ,MAAMG,OAAO,GAAGH,OAAO,EAAE;gBACzFC,MAAM;YACR;QACF;IACF;AACF"}
|
|
1
|
+
{"version":3,"sources":["../../../src/commands/backup/list.ts"],"sourcesContent":["import {Args, Flags} from '@oclif/core'\nimport {SanityCommand, subdebug} from '@sanity/cli-core'\nimport {select} from '@sanity/cli-core/ux'\nimport {type DatasetsResponse} from '@sanity/client'\nimport {Table} from 'console-table-printer'\nimport {isAfter} from 'date-fns/isAfter'\nimport {isValid} from 'date-fns/isValid'\nimport {lightFormat} from 'date-fns/lightFormat'\nimport {parse} from 'date-fns/parse'\n\nimport {assertDatasetExists} from '../../actions/backup/assertDatasetExist.js'\nimport {listBackups} from '../../services/backup.js'\nimport {listDatasets} from '../../services/datasets.js'\nimport {NO_PROJECT_ID} from '../../util/errorMessages.js'\n\nconst listBackupDebug = subdebug('backup:list')\n\nconst DEFAULT_LIST_BACKUP_LIMIT = 30\n\ntype ListBackupRequestQueryParams = {\n after?: string\n before?: string\n limit: string\n}\n\nexport class ListBackupCommand extends SanityCommand<typeof ListBackupCommand> {\n static override args = {\n dataset: Args.string({\n description: 'Dataset name to list backups for',\n required: false,\n }),\n }\n\n static override description = 'List available backups for a dataset.'\n\n static override examples = [\n {\n command: '<%= config.bin %> <%= command.id %>',\n description: 'List backups for a dataset interactively',\n },\n {\n command: '<%= config.bin %> <%= command.id %> production',\n description: 'List backups for the production dataset',\n },\n {\n command: '<%= config.bin %> <%= command.id %> production --limit 50',\n description: 'List up to 50 backups for the production dataset',\n },\n {\n command: '<%= config.bin %> <%= command.id %> production --after 2024-01-31 --limit 10',\n description: 'List up to 10 backups created after 2024-01-31',\n },\n ]\n\n static override flags = {\n after: Flags.string({\n description: 'Only return backups after this date (inclusive, YYYY-MM-DD format)',\n }),\n before: Flags.string({\n description: 'Only return backups before this date (exclusive, YYYY-MM-DD format)',\n }),\n limit: Flags.integer({\n char: 'l',\n default: DEFAULT_LIST_BACKUP_LIMIT,\n description: 'Maximum number of backups returned',\n }),\n }\n\n public async run(): Promise<void> {\n const {args, flags} = await this.parse(ListBackupCommand)\n let {dataset} = args\n\n const projectId = await this.getProjectId()\n if (!projectId) {\n this.error(NO_PROJECT_ID, {exit: 1})\n }\n\n let datasets: DatasetsResponse\n\n try {\n datasets = await listDatasets(projectId)\n } catch (error) {\n const message = error instanceof Error ? error.message : String(error)\n listBackupDebug(`Failed to list datasets: ${message}`, error)\n this.error(`Failed to list datasets: ${message}`, {exit: 1})\n }\n\n if (datasets.length === 0) {\n this.error('No datasets found in this project.', {exit: 1})\n }\n\n if (dataset) {\n assertDatasetExists(datasets, dataset)\n } else {\n dataset = await this.promptForDataset(datasets)\n }\n\n // Validate date flags\n if (flags.before || flags.after) {\n try {\n const parsedBefore = this.processDateFlag(flags.before, 'before')\n const parsedAfter = this.processDateFlag(flags.after, 'after')\n\n if (parsedAfter && parsedBefore && isAfter(parsedAfter, parsedBefore)) {\n this.error('--after date must be before --before', {exit: 1})\n }\n } catch (err) {\n this.error(`Parsing date flags: ${err instanceof Error ? err.message : err}`, {exit: 1})\n }\n }\n\n // Validate limit flag\n if (flags.limit < 1 || flags.limit > Number.MAX_SAFE_INTEGER) {\n this.error(`Parsing --limit: must be an integer between 1 and ${Number.MAX_SAFE_INTEGER}`, {\n exit: 1,\n })\n }\n\n const query: ListBackupRequestQueryParams = {\n limit: flags.limit.toString(),\n }\n\n if (flags.after) {\n query.after = flags.after\n }\n\n if (flags.before) {\n query.before = flags.before\n }\n\n try {\n const response = await listBackups({\n after: flags.after,\n before: flags.before,\n datasetName: dataset,\n limit: flags.limit,\n projectId,\n })\n\n if (response.backups.length === 0) {\n this.log('No backups found.')\n return\n }\n\n const table = new Table({\n columns: [\n {alignment: 'left', name: 'resource', title: 'RESOURCE'},\n {alignment: 'left', name: 'createdAt', title: 'CREATED AT'},\n {alignment: 'left', name: 'backupId', title: 'BACKUP ID'},\n ],\n })\n\n for (const backup of response.backups) {\n const {createdAt, id} = backup\n table.addRow({\n backupId: id,\n createdAt: lightFormat(Date.parse(createdAt), 'yyyy-MM-dd HH:mm:ss'),\n resource: 'Dataset',\n })\n }\n\n table.printTable()\n\n listBackupDebug(\n `Successfully listed ${response.backups.length} backups for dataset ${dataset}`,\n )\n } catch (error) {\n const message = error instanceof Error ? error.message : String(error)\n listBackupDebug(`Failed to list backups for dataset ${dataset}:`, error)\n this.error(`List dataset backup failed: ${message}`, {exit: 1})\n }\n }\n\n private processDateFlag(date: string | undefined, flagName: string): Date | undefined {\n if (!date) return undefined\n const parsedDate = parse(date, 'yyyy-MM-dd', new Date())\n if (isValid(parsedDate)) {\n return parsedDate\n }\n\n throw new Error(`Invalid date format for '--${flagName}' flag. Use YYYY-MM-DD`)\n }\n\n private async promptForDataset(datasets: DatasetsResponse): Promise<string> {\n try {\n const choices = datasets.map((dataset) => ({\n name: dataset.name,\n value: dataset.name,\n }))\n\n return select({\n choices,\n message: 'Select the dataset name:',\n })\n } catch (error) {\n listBackupDebug(`Error selecting dataset`, error)\n this.error(`Failed to select dataset:\\n${error instanceof Error ? error.message : error}`, {\n exit: 1,\n })\n }\n }\n}\n"],"names":["Args","Flags","SanityCommand","subdebug","select","Table","isAfter","isValid","lightFormat","parse","assertDatasetExists","listBackups","listDatasets","NO_PROJECT_ID","listBackupDebug","DEFAULT_LIST_BACKUP_LIMIT","ListBackupCommand","args","dataset","string","description","required","examples","command","flags","after","before","limit","integer","char","default","run","projectId","getProjectId","error","exit","datasets","message","Error","String","length","promptForDataset","parsedBefore","processDateFlag","parsedAfter","err","Number","MAX_SAFE_INTEGER","query","toString","response","datasetName","backups","log","table","columns","alignment","name","title","backup","createdAt","id","addRow","backupId","Date","resource","printTable","date","flagName","undefined","parsedDate","choices","map","value"],"mappings":"AAAA,SAAQA,IAAI,EAAEC,KAAK,QAAO,cAAa;AACvC,SAAQC,aAAa,EAAEC,QAAQ,QAAO,mBAAkB;AACxD,SAAQC,MAAM,QAAO,sBAAqB;AAE1C,SAAQC,KAAK,QAAO,wBAAuB;AAC3C,SAAQC,OAAO,QAAO,mBAAkB;AACxC,SAAQC,OAAO,QAAO,mBAAkB;AACxC,SAAQC,WAAW,QAAO,uBAAsB;AAChD,SAAQC,KAAK,QAAO,iBAAgB;AAEpC,SAAQC,mBAAmB,QAAO,6CAA4C;AAC9E,SAAQC,WAAW,QAAO,2BAA0B;AACpD,SAAQC,YAAY,QAAO,6BAA4B;AACvD,SAAQC,aAAa,QAAO,8BAA6B;AAEzD,MAAMC,kBAAkBX,SAAS;AAEjC,MAAMY,4BAA4B;AAQlC,OAAO,MAAMC,0BAA0Bd;IACrC,OAAgBe,OAAO;QACrBC,SAASlB,KAAKmB,MAAM,CAAC;YACnBC,aAAa;YACbC,UAAU;QACZ;IACF,EAAC;IAED,OAAgBD,cAAc,wCAAuC;IAErE,OAAgBE,WAAW;QACzB;YACEC,SAAS;YACTH,aAAa;QACf;QACA;YACEG,SAAS;YACTH,aAAa;QACf;QACA;YACEG,SAAS;YACTH,aAAa;QACf;QACA;YACEG,SAAS;YACTH,aAAa;QACf;KACD,CAAA;IAED,OAAgBI,QAAQ;QACtBC,OAAOxB,MAAMkB,MAAM,CAAC;YAClBC,aAAa;QACf;QACAM,QAAQzB,MAAMkB,MAAM,CAAC;YACnBC,aAAa;QACf;QACAO,OAAO1B,MAAM2B,OAAO,CAAC;YACnBC,MAAM;YACNC,SAASf;YACTK,aAAa;QACf;IACF,EAAC;IAED,MAAaW,MAAqB;QAChC,MAAM,EAACd,IAAI,EAAEO,KAAK,EAAC,GAAG,MAAM,IAAI,CAACf,KAAK,CAACO;QACvC,IAAI,EAACE,OAAO,EAAC,GAAGD;QAEhB,MAAMe,YAAY,MAAM,IAAI,CAACC,YAAY;QACzC,IAAI,CAACD,WAAW;YACd,IAAI,CAACE,KAAK,CAACrB,eAAe;gBAACsB,MAAM;YAAC;QACpC;QAEA,IAAIC;QAEJ,IAAI;YACFA,WAAW,MAAMxB,aAAaoB;QAChC,EAAE,OAAOE,OAAO;YACd,MAAMG,UAAUH,iBAAiBI,QAAQJ,MAAMG,OAAO,GAAGE,OAAOL;YAChEpB,gBAAgB,CAAC,yBAAyB,EAAEuB,SAAS,EAAEH;YACvD,IAAI,CAACA,KAAK,CAAC,CAAC,yBAAyB,EAAEG,SAAS,EAAE;gBAACF,MAAM;YAAC;QAC5D;QAEA,IAAIC,SAASI,MAAM,KAAK,GAAG;YACzB,IAAI,CAACN,KAAK,CAAC,sCAAsC;gBAACC,MAAM;YAAC;QAC3D;QAEA,IAAIjB,SAAS;YACXR,oBAAoB0B,UAAUlB;QAChC,OAAO;YACLA,UAAU,MAAM,IAAI,CAACuB,gBAAgB,CAACL;QACxC;QAEA,sBAAsB;QACtB,IAAIZ,MAAME,MAAM,IAAIF,MAAMC,KAAK,EAAE;YAC/B,IAAI;gBACF,MAAMiB,eAAe,IAAI,CAACC,eAAe,CAACnB,MAAME,MAAM,EAAE;gBACxD,MAAMkB,cAAc,IAAI,CAACD,eAAe,CAACnB,MAAMC,KAAK,EAAE;gBAEtD,IAAImB,eAAeF,gBAAgBpC,QAAQsC,aAAaF,eAAe;oBACrE,IAAI,CAACR,KAAK,CAAC,wCAAwC;wBAACC,MAAM;oBAAC;gBAC7D;YACF,EAAE,OAAOU,KAAK;gBACZ,IAAI,CAACX,KAAK,CAAC,CAAC,oBAAoB,EAAEW,eAAeP,QAAQO,IAAIR,OAAO,GAAGQ,KAAK,EAAE;oBAACV,MAAM;gBAAC;YACxF;QACF;QAEA,sBAAsB;QACtB,IAAIX,MAAMG,KAAK,GAAG,KAAKH,MAAMG,KAAK,GAAGmB,OAAOC,gBAAgB,EAAE;YAC5D,IAAI,CAACb,KAAK,CAAC,CAAC,kDAAkD,EAAEY,OAAOC,gBAAgB,EAAE,EAAE;gBACzFZ,MAAM;YACR;QACF;QAEA,MAAMa,QAAsC;YAC1CrB,OAAOH,MAAMG,KAAK,CAACsB,QAAQ;QAC7B;QAEA,IAAIzB,MAAMC,KAAK,EAAE;YACfuB,MAAMvB,KAAK,GAAGD,MAAMC,KAAK;QAC3B;QAEA,IAAID,MAAME,MAAM,EAAE;YAChBsB,MAAMtB,MAAM,GAAGF,MAAME,MAAM;QAC7B;QAEA,IAAI;YACF,MAAMwB,WAAW,MAAMvC,YAAY;gBACjCc,OAAOD,MAAMC,KAAK;gBAClBC,QAAQF,MAAME,MAAM;gBACpByB,aAAajC;gBACbS,OAAOH,MAAMG,KAAK;gBAClBK;YACF;YAEA,IAAIkB,SAASE,OAAO,CAACZ,MAAM,KAAK,GAAG;gBACjC,IAAI,CAACa,GAAG,CAAC;gBACT;YACF;YAEA,MAAMC,QAAQ,IAAIjD,MAAM;gBACtBkD,SAAS;oBACP;wBAACC,WAAW;wBAAQC,MAAM;wBAAYC,OAAO;oBAAU;oBACvD;wBAACF,WAAW;wBAAQC,MAAM;wBAAaC,OAAO;oBAAY;oBAC1D;wBAACF,WAAW;wBAAQC,MAAM;wBAAYC,OAAO;oBAAW;iBACzD;YACH;YAEA,KAAK,MAAMC,UAAUT,SAASE,OAAO,CAAE;gBACrC,MAAM,EAACQ,SAAS,EAAEC,EAAE,EAAC,GAAGF;gBACxBL,MAAMQ,MAAM,CAAC;oBACXC,UAAUF;oBACVD,WAAWpD,YAAYwD,KAAKvD,KAAK,CAACmD,YAAY;oBAC9CK,UAAU;gBACZ;YACF;YAEAX,MAAMY,UAAU;YAEhBpD,gBACE,CAAC,oBAAoB,EAAEoC,SAASE,OAAO,CAACZ,MAAM,CAAC,qBAAqB,EAAEtB,SAAS;QAEnF,EAAE,OAAOgB,OAAO;YACd,MAAMG,UAAUH,iBAAiBI,QAAQJ,MAAMG,OAAO,GAAGE,OAAOL;YAChEpB,gBAAgB,CAAC,mCAAmC,EAAEI,QAAQ,CAAC,CAAC,EAAEgB;YAClE,IAAI,CAACA,KAAK,CAAC,CAAC,4BAA4B,EAAEG,SAAS,EAAE;gBAACF,MAAM;YAAC;QAC/D;IACF;IAEQQ,gBAAgBwB,IAAwB,EAAEC,QAAgB,EAAoB;QACpF,IAAI,CAACD,MAAM,OAAOE;QAClB,MAAMC,aAAa7D,MAAM0D,MAAM,cAAc,IAAIH;QACjD,IAAIzD,QAAQ+D,aAAa;YACvB,OAAOA;QACT;QAEA,MAAM,IAAIhC,MAAM,CAAC,2BAA2B,EAAE8B,SAAS,sBAAsB,CAAC;IAChF;IAEA,MAAc3B,iBAAiBL,QAA0B,EAAmB;QAC1E,IAAI;YACF,MAAMmC,UAAUnC,SAASoC,GAAG,CAAC,CAACtD,UAAa,CAAA;oBACzCuC,MAAMvC,QAAQuC,IAAI;oBAClBgB,OAAOvD,QAAQuC,IAAI;gBACrB,CAAA;YAEA,OAAOrD,OAAO;gBACZmE;gBACAlC,SAAS;YACX;QACF,EAAE,OAAOH,OAAO;YACdpB,gBAAgB,CAAC,uBAAuB,CAAC,EAAEoB;YAC3C,IAAI,CAACA,KAAK,CAAC,CAAC,2BAA2B,EAAEA,iBAAiBI,QAAQJ,MAAMG,OAAO,GAAGH,OAAO,EAAE;gBACzFC,MAAM;YACR;QACF;IACF;AACF"}
|
|
@@ -4,7 +4,9 @@ import { exit } from '@oclif/core/errors';
|
|
|
4
4
|
import { SanityCommand, subdebug } from '@sanity/cli-core';
|
|
5
5
|
import { spinner } from '@sanity/cli-core/ux';
|
|
6
6
|
import { Table } from 'console-table-printer';
|
|
7
|
-
import { formatDistance
|
|
7
|
+
import { formatDistance } from 'date-fns/formatDistance';
|
|
8
|
+
import { formatDistanceToNow } from 'date-fns/formatDistanceToNow';
|
|
9
|
+
import { parseISO } from 'date-fns/parseISO';
|
|
8
10
|
import { validateDatasetName } from '../../actions/dataset/validateDatasetName.js';
|
|
9
11
|
import { promptForDataset } from '../../prompts/promptForDataset.js';
|
|
10
12
|
import { promptForDatasetName } from '../../prompts/promptForDatasetName.js';
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../../../src/commands/dataset/copy.ts"],"sourcesContent":["import {styleText} from 'node:util'\n\nimport {Args, Flags} from '@oclif/core'\nimport {exit} from '@oclif/core/errors'\nimport {SanityCommand, subdebug} from '@sanity/cli-core'\nimport {spinner} from '@sanity/cli-core/ux'\nimport {Table} from 'console-table-printer'\nimport {formatDistance, formatDistanceToNow, parseISO} from 'date-fns'\n\nimport {validateDatasetName} from '../../actions/dataset/validateDatasetName.js'\nimport {promptForDataset} from '../../prompts/promptForDataset.js'\nimport {promptForDatasetName} from '../../prompts/promptForDatasetName.js'\nimport {\n copyDataset,\n type CopyJobProgressEvent,\n type DatasetCopyJob,\n followCopyJobProgress,\n listDatasetCopyJobs,\n listDatasets,\n} from '../../services/datasets.js'\nimport {NO_PROJECT_ID} from '../../util/errorMessages.js'\n\nconst copyDatasetDebug = subdebug('dataset:copy')\n\nexport class CopyDatasetCommand extends SanityCommand<typeof CopyDatasetCommand> {\n static override args = {\n source: Args.string({\n description: 'Name of the dataset to copy from',\n required: false,\n }),\n target: Args.string({\n description: 'Name of the dataset to copy to',\n required: false,\n }),\n }\n\n static override description =\n 'Manages dataset copying, including starting a new copy job, listing copy jobs and following the progress of a running copy job'\n\n static override examples = [\n {\n command: '<%= config.bin %> <%= command.id %>',\n description: 'Interactively copy a dataset',\n },\n {\n command: '<%= config.bin %> <%= command.id %> source-dataset',\n description: 'Copy from source-dataset (prompts for target)',\n },\n {\n command: '<%= config.bin %> <%= command.id %> source-dataset target-dataset',\n description: 'Copy from source-dataset to target-dataset',\n },\n {\n command: '<%= config.bin %> <%= command.id %> --skip-history source target',\n description: 'Copy without preserving document history (faster for large datasets)',\n },\n {\n command: '<%= config.bin %> <%= command.id %> --detach source target',\n description: 'Start copy job without waiting for completion',\n },\n {\n command: '<%= config.bin %> <%= command.id %> --attach <job-id>',\n description: 'Attach to a running copy job to follow progress',\n },\n {\n command: '<%= config.bin %> <%= command.id %> --list',\n description: 'List all dataset copy jobs',\n },\n {\n command: '<%= config.bin %> <%= command.id %> --list --offset 2 --limit 10',\n description: 'List copy jobs with pagination',\n },\n ]\n\n static override flags = {\n attach: Flags.string({\n description: 'Attach to the running copy process to show progress',\n exclusive: ['list', 'detach', 'skip-history'],\n required: false,\n }),\n detach: Flags.boolean({\n description: 'Start the copy without waiting for it to finish',\n exclusive: ['list', 'attach'],\n required: false,\n }),\n limit: Flags.integer({\n dependsOn: ['list'],\n description: 'Maximum number of jobs returned (default 10, max 1000)',\n max: 1000,\n required: false,\n }),\n list: Flags.boolean({\n description: 'Lists all dataset copy jobs',\n exclusive: ['attach', 'detach', 'skip-history'],\n required: false,\n }),\n offset: Flags.integer({\n dependsOn: ['list'],\n description: 'Start position in the list of jobs (default 0)',\n required: false,\n }),\n 'skip-history': Flags.boolean({\n description: \"Don't preserve document history on copy\",\n exclusive: ['list', 'attach'],\n required: false,\n }),\n }\n\n private projectId!: string\n\n public async run(): Promise<void> {\n const {args, flags} = await this.parse(CopyDatasetCommand)\n\n const projectId = await this.getProjectId()\n if (!projectId) {\n this.error(NO_PROJECT_ID, {exit: 1})\n }\n\n this.projectId = projectId\n\n // Route to appropriate mode\n if (flags.list) {\n return this.handleListMode(flags)\n }\n\n if (flags.attach) {\n return this.handleAttachMode(flags.attach)\n }\n\n return this.handleCopyMode(args, flags)\n }\n\n private displayCopyJobsTable(jobs: DatasetCopyJob[]): void {\n const table = new Table({\n columns: [\n {alignment: 'left', name: 'id', title: 'Job ID'},\n {alignment: 'left', name: 'sourceDataset', title: 'Source Dataset'},\n {alignment: 'left', name: 'targetDataset', title: 'Target Dataset'},\n {alignment: 'left', name: 'state', title: 'State'},\n {alignment: 'left', name: 'withHistory', title: 'With history'},\n {alignment: 'left', name: 'timeStarted', title: 'Time started'},\n {alignment: 'left', name: 'timeTaken', title: 'Time taken'},\n ],\n title: 'Dataset copy jobs for this project in descending order',\n })\n\n for (const job of jobs) {\n const {createdAt, id, sourceDataset, state, targetDataset, updatedAt, withHistory} = job\n\n let timeStarted = ''\n if (createdAt !== '') {\n timeStarted = formatDistanceToNow(parseISO(createdAt))\n }\n\n let timeTaken = ''\n if (updatedAt !== '') {\n timeTaken = formatDistance(parseISO(updatedAt), parseISO(createdAt))\n }\n\n let color: '' | 'green' | 'red' | 'yellow' = ''\n switch (state) {\n case 'completed': {\n color = 'green'\n break\n }\n case 'failed': {\n color = 'red'\n break\n }\n case 'pending': {\n color = 'yellow'\n break\n }\n default: {\n color = ''\n }\n }\n\n table.addRow(\n {\n id,\n sourceDataset,\n state,\n targetDataset,\n timeStarted: `${timeStarted} ago`,\n timeTaken,\n withHistory,\n },\n {color},\n )\n }\n\n table.printTable()\n }\n\n private async handleAttachMode(jobId: string): Promise<void> {\n copyDatasetDebug('Attaching to copy job %s', jobId)\n\n if (!jobId || typeof jobId !== 'string' || jobId.trim() === '') {\n this.error('Please supply a valid jobId', {exit: 1})\n }\n\n try {\n await this.subscribeToProgress(jobId)\n this.log(`Job ${styleText('green', jobId)} completed`)\n } catch (error) {\n const message = error instanceof Error ? error.message : String(error)\n copyDatasetDebug('Failed to attach to copy job: %s', message, error)\n this.error(`Failed to attach to copy job: ${message}`, {exit: 1})\n }\n }\n\n private async handleCopyMode(\n args: {source?: string; target?: string},\n flags: {detach?: boolean; 'skip-history'?: boolean},\n ): Promise<void> {\n copyDatasetDebug('Starting copy mode')\n\n const skipHistory = Boolean(flags['skip-history'])\n\n // Get and validate source dataset\n let sourceDataset = args.source\n if (sourceDataset) {\n const nameError = validateDatasetName(sourceDataset)\n if (nameError) {\n this.error(nameError, {exit: 1})\n }\n }\n\n let datasetsResponse\n try {\n datasetsResponse = await listDatasets(this.projectId)\n } catch (error) {\n const message = error instanceof Error ? error.message : String(error)\n copyDatasetDebug('Failed to fetch datasets: %s', message, error)\n this.error(`Failed to fetch datasets: ${message}`, {exit: 1})\n }\n\n const datasetNames = new Set(datasetsResponse.map((ds) => ds.name))\n\n // Prompt for source if not provided\n if (!sourceDataset) {\n sourceDataset = await promptForDataset({\n datasets: datasetsResponse,\n })\n }\n\n if (!datasetNames.has(sourceDataset)) {\n this.error(`Source dataset \"${sourceDataset}\" doesn't exist`, {exit: 1})\n }\n\n // Get and validate target dataset\n let targetDataset = args.target\n if (targetDataset) {\n const nameError = validateDatasetName(targetDataset)\n if (nameError) {\n this.error(nameError, {exit: 1})\n }\n } else {\n targetDataset = await promptForDatasetName({\n message: 'Target dataset name:',\n })\n }\n\n if (datasetNames.has(targetDataset)) {\n this.error(`Target dataset \"${targetDataset}\" already exists`, {exit: 1})\n }\n\n // Start the copy job\n try {\n this.log(\n `Copying dataset ${styleText('green', sourceDataset)} to ${styleText('green', targetDataset)}...`,\n )\n\n if (!skipHistory) {\n this.log(\n `Note: You can run this command with flag '--skip-history'. The flag will reduce copy time in larger datasets.`,\n )\n }\n\n const response = await copyDataset({\n projectId: this.projectId,\n skipHistory,\n sourceDataset,\n targetDataset,\n })\n\n this.log(`Job ${styleText('green', response.jobId)} started`)\n\n if (flags.detach) {\n return\n }\n\n await this.subscribeToProgress(response.jobId)\n this.log(`Job ${styleText('green', response.jobId)} completed`)\n } catch (error) {\n const message = error instanceof Error ? error.message : String(error)\n copyDatasetDebug('Dataset copying failed: %s', message, error)\n this.error(`Dataset copying failed: ${message}`, {exit: 1})\n }\n }\n\n private async handleListMode(flags: {limit?: number; offset?: number}): Promise<void> {\n copyDatasetDebug('Listing dataset copy jobs')\n\n try {\n const jobs = await listDatasetCopyJobs({\n limit: flags.limit,\n offset: flags.offset,\n projectId: this.projectId,\n })\n\n if (jobs.length === 0) {\n this.log(\"This project doesn't have any dataset copy jobs\")\n return\n }\n\n this.displayCopyJobsTable(jobs)\n } catch (error) {\n const message = error instanceof Error ? error.message : String(error)\n copyDatasetDebug('Failed to list dataset copy jobs: %s', message, error)\n this.error(`Failed to list dataset copy jobs: ${message}`, {exit: 1})\n }\n }\n\n private async subscribeToProgress(jobId: string): Promise<void> {\n let currentProgress = 0\n const spin = spinner('').start()\n\n return new Promise<void>((resolve, reject) => {\n const subscription = followCopyJobProgress({jobId, projectId: this.projectId}).subscribe({\n complete: () => {\n spin.succeed('Copy finished.')\n resolve()\n },\n error: (err) => {\n spin.fail('Copy failed.')\n reject(err)\n },\n next: (event: CopyJobProgressEvent) => {\n if (typeof event.progress === 'number') {\n currentProgress = event.progress\n }\n spin.text = `Copy in progress: ${currentProgress}%`\n },\n })\n\n // Cleanup on process termination - use 'once' to prevent memory leaks\n process.once('SIGINT', () => {\n subscription.unsubscribe()\n spin.fail('Copy interrupted.')\n exit(130)\n })\n })\n }\n}\n"],"names":["styleText","Args","Flags","exit","SanityCommand","subdebug","spinner","Table","formatDistance","formatDistanceToNow","parseISO","validateDatasetName","promptForDataset","promptForDatasetName","copyDataset","followCopyJobProgress","listDatasetCopyJobs","listDatasets","NO_PROJECT_ID","copyDatasetDebug","CopyDatasetCommand","args","source","string","description","required","target","examples","command","flags","attach","exclusive","detach","boolean","limit","integer","dependsOn","max","list","offset","projectId","run","parse","getProjectId","error","handleListMode","handleAttachMode","handleCopyMode","displayCopyJobsTable","jobs","table","columns","alignment","name","title","job","createdAt","id","sourceDataset","state","targetDataset","updatedAt","withHistory","timeStarted","timeTaken","color","addRow","printTable","jobId","trim","subscribeToProgress","log","message","Error","String","skipHistory","Boolean","nameError","datasetsResponse","datasetNames","Set","map","ds","datasets","has","response","length","currentProgress","spin","start","Promise","resolve","reject","subscription","subscribe","complete","succeed","err","fail","next","event","progress","text","process","once","unsubscribe"],"mappings":"AAAA,SAAQA,SAAS,QAAO,YAAW;AAEnC,SAAQC,IAAI,EAAEC,KAAK,QAAO,cAAa;AACvC,SAAQC,IAAI,QAAO,qBAAoB;AACvC,SAAQC,aAAa,EAAEC,QAAQ,QAAO,mBAAkB;AACxD,SAAQC,OAAO,QAAO,sBAAqB;AAC3C,SAAQC,KAAK,QAAO,wBAAuB;AAC3C,SAAQC,cAAc,EAAEC,mBAAmB,EAAEC,QAAQ,QAAO,WAAU;AAEtE,SAAQC,mBAAmB,QAAO,+CAA8C;AAChF,SAAQC,gBAAgB,QAAO,oCAAmC;AAClE,SAAQC,oBAAoB,QAAO,wCAAuC;AAC1E,SACEC,WAAW,EAGXC,qBAAqB,EACrBC,mBAAmB,EACnBC,YAAY,QACP,6BAA4B;AACnC,SAAQC,aAAa,QAAO,8BAA6B;AAEzD,MAAMC,mBAAmBd,SAAS;AAElC,OAAO,MAAMe,2BAA2BhB;IACtC,OAAgBiB,OAAO;QACrBC,QAAQrB,KAAKsB,MAAM,CAAC;YAClBC,aAAa;YACbC,UAAU;QACZ;QACAC,QAAQzB,KAAKsB,MAAM,CAAC;YAClBC,aAAa;YACbC,UAAU;QACZ;IACF,EAAC;IAED,OAAgBD,cACd,iIAAgI;IAElI,OAAgBG,WAAW;QACzB;YACEC,SAAS;YACTJ,aAAa;QACf;QACA;YACEI,SAAS;YACTJ,aAAa;QACf;QACA;YACEI,SAAS;YACTJ,aAAa;QACf;QACA;YACEI,SAAS;YACTJ,aAAa;QACf;QACA;YACEI,SAAS;YACTJ,aAAa;QACf;QACA;YACEI,SAAS;YACTJ,aAAa;QACf;QACA;YACEI,SAAS;YACTJ,aAAa;QACf;QACA;YACEI,SAAS;YACTJ,aAAa;QACf;KACD,CAAA;IAED,OAAgBK,QAAQ;QACtBC,QAAQ5B,MAAMqB,MAAM,CAAC;YACnBC,aAAa;YACbO,WAAW;gBAAC;gBAAQ;gBAAU;aAAe;YAC7CN,UAAU;QACZ;QACAO,QAAQ9B,MAAM+B,OAAO,CAAC;YACpBT,aAAa;YACbO,WAAW;gBAAC;gBAAQ;aAAS;YAC7BN,UAAU;QACZ;QACAS,OAAOhC,MAAMiC,OAAO,CAAC;YACnBC,WAAW;gBAAC;aAAO;YACnBZ,aAAa;YACba,KAAK;YACLZ,UAAU;QACZ;QACAa,MAAMpC,MAAM+B,OAAO,CAAC;YAClBT,aAAa;YACbO,WAAW;gBAAC;gBAAU;gBAAU;aAAe;YAC/CN,UAAU;QACZ;QACAc,QAAQrC,MAAMiC,OAAO,CAAC;YACpBC,WAAW;gBAAC;aAAO;YACnBZ,aAAa;YACbC,UAAU;QACZ;QACA,gBAAgBvB,MAAM+B,OAAO,CAAC;YAC5BT,aAAa;YACbO,WAAW;gBAAC;gBAAQ;aAAS;YAC7BN,UAAU;QACZ;IACF,EAAC;IAEOe,UAAkB;IAE1B,MAAaC,MAAqB;QAChC,MAAM,EAACpB,IAAI,EAAEQ,KAAK,EAAC,GAAG,MAAM,IAAI,CAACa,KAAK,CAACtB;QAEvC,MAAMoB,YAAY,MAAM,IAAI,CAACG,YAAY;QACzC,IAAI,CAACH,WAAW;YACd,IAAI,CAACI,KAAK,CAAC1B,eAAe;gBAACf,MAAM;YAAC;QACpC;QAEA,IAAI,CAACqC,SAAS,GAAGA;QAEjB,4BAA4B;QAC5B,IAAIX,MAAMS,IAAI,EAAE;YACd,OAAO,IAAI,CAACO,cAAc,CAAChB;QAC7B;QAEA,IAAIA,MAAMC,MAAM,EAAE;YAChB,OAAO,IAAI,CAACgB,gBAAgB,CAACjB,MAAMC,MAAM;QAC3C;QAEA,OAAO,IAAI,CAACiB,cAAc,CAAC1B,MAAMQ;IACnC;IAEQmB,qBAAqBC,IAAsB,EAAQ;QACzD,MAAMC,QAAQ,IAAI3C,MAAM;YACtB4C,SAAS;gBACP;oBAACC,WAAW;oBAAQC,MAAM;oBAAMC,OAAO;gBAAQ;gBAC/C;oBAACF,WAAW;oBAAQC,MAAM;oBAAiBC,OAAO;gBAAgB;gBAClE;oBAACF,WAAW;oBAAQC,MAAM;oBAAiBC,OAAO;gBAAgB;gBAClE;oBAACF,WAAW;oBAAQC,MAAM;oBAASC,OAAO;gBAAO;gBACjD;oBAACF,WAAW;oBAAQC,MAAM;oBAAeC,OAAO;gBAAc;gBAC9D;oBAACF,WAAW;oBAAQC,MAAM;oBAAeC,OAAO;gBAAc;gBAC9D;oBAACF,WAAW;oBAAQC,MAAM;oBAAaC,OAAO;gBAAY;aAC3D;YACDA,OAAO;QACT;QAEA,KAAK,MAAMC,OAAON,KAAM;YACtB,MAAM,EAACO,SAAS,EAAEC,EAAE,EAAEC,aAAa,EAAEC,KAAK,EAAEC,aAAa,EAAEC,SAAS,EAAEC,WAAW,EAAC,GAAGP;YAErF,IAAIQ,cAAc;YAClB,IAAIP,cAAc,IAAI;gBACpBO,cAActD,oBAAoBC,SAAS8C;YAC7C;YAEA,IAAIQ,YAAY;YAChB,IAAIH,cAAc,IAAI;gBACpBG,YAAYxD,eAAeE,SAASmD,YAAYnD,SAAS8C;YAC3D;YAEA,IAAIS,QAAyC;YAC7C,OAAQN;gBACN,KAAK;oBAAa;wBAChBM,QAAQ;wBACR;oBACF;gBACA,KAAK;oBAAU;wBACbA,QAAQ;wBACR;oBACF;gBACA,KAAK;oBAAW;wBACdA,QAAQ;wBACR;oBACF;gBACA;oBAAS;wBACPA,QAAQ;oBACV;YACF;YAEAf,MAAMgB,MAAM,CACV;gBACET;gBACAC;gBACAC;gBACAC;gBACAG,aAAa,GAAGA,YAAY,IAAI,CAAC;gBACjCC;gBACAF;YACF,GACA;gBAACG;YAAK;QAEV;QAEAf,MAAMiB,UAAU;IAClB;IAEA,MAAcrB,iBAAiBsB,KAAa,EAAiB;QAC3DjD,iBAAiB,4BAA4BiD;QAE7C,IAAI,CAACA,SAAS,OAAOA,UAAU,YAAYA,MAAMC,IAAI,OAAO,IAAI;YAC9D,IAAI,CAACzB,KAAK,CAAC,+BAA+B;gBAACzC,MAAM;YAAC;QACpD;QAEA,IAAI;YACF,MAAM,IAAI,CAACmE,mBAAmB,CAACF;YAC/B,IAAI,CAACG,GAAG,CAAC,CAAC,IAAI,EAAEvE,UAAU,SAASoE,OAAO,UAAU,CAAC;QACvD,EAAE,OAAOxB,OAAO;YACd,MAAM4B,UAAU5B,iBAAiB6B,QAAQ7B,MAAM4B,OAAO,GAAGE,OAAO9B;YAChEzB,iBAAiB,oCAAoCqD,SAAS5B;YAC9D,IAAI,CAACA,KAAK,CAAC,CAAC,8BAA8B,EAAE4B,SAAS,EAAE;gBAACrE,MAAM;YAAC;QACjE;IACF;IAEA,MAAc4C,eACZ1B,IAAwC,EACxCQ,KAAmD,EACpC;QACfV,iBAAiB;QAEjB,MAAMwD,cAAcC,QAAQ/C,KAAK,CAAC,eAAe;QAEjD,kCAAkC;QAClC,IAAI6B,gBAAgBrC,KAAKC,MAAM;QAC/B,IAAIoC,eAAe;YACjB,MAAMmB,YAAYlE,oBAAoB+C;YACtC,IAAImB,WAAW;gBACb,IAAI,CAACjC,KAAK,CAACiC,WAAW;oBAAC1E,MAAM;gBAAC;YAChC;QACF;QAEA,IAAI2E;QACJ,IAAI;YACFA,mBAAmB,MAAM7D,aAAa,IAAI,CAACuB,SAAS;QACtD,EAAE,OAAOI,OAAO;YACd,MAAM4B,UAAU5B,iBAAiB6B,QAAQ7B,MAAM4B,OAAO,GAAGE,OAAO9B;YAChEzB,iBAAiB,gCAAgCqD,SAAS5B;YAC1D,IAAI,CAACA,KAAK,CAAC,CAAC,0BAA0B,EAAE4B,SAAS,EAAE;gBAACrE,MAAM;YAAC;QAC7D;QAEA,MAAM4E,eAAe,IAAIC,IAAIF,iBAAiBG,GAAG,CAAC,CAACC,KAAOA,GAAG7B,IAAI;QAEjE,oCAAoC;QACpC,IAAI,CAACK,eAAe;YAClBA,gBAAgB,MAAM9C,iBAAiB;gBACrCuE,UAAUL;YACZ;QACF;QAEA,IAAI,CAACC,aAAaK,GAAG,CAAC1B,gBAAgB;YACpC,IAAI,CAACd,KAAK,CAAC,CAAC,gBAAgB,EAAEc,cAAc,eAAe,CAAC,EAAE;gBAACvD,MAAM;YAAC;QACxE;QAEA,kCAAkC;QAClC,IAAIyD,gBAAgBvC,KAAKK,MAAM;QAC/B,IAAIkC,eAAe;YACjB,MAAMiB,YAAYlE,oBAAoBiD;YACtC,IAAIiB,WAAW;gBACb,IAAI,CAACjC,KAAK,CAACiC,WAAW;oBAAC1E,MAAM;gBAAC;YAChC;QACF,OAAO;YACLyD,gBAAgB,MAAM/C,qBAAqB;gBACzC2D,SAAS;YACX;QACF;QAEA,IAAIO,aAAaK,GAAG,CAACxB,gBAAgB;YACnC,IAAI,CAAChB,KAAK,CAAC,CAAC,gBAAgB,EAAEgB,cAAc,gBAAgB,CAAC,EAAE;gBAACzD,MAAM;YAAC;QACzE;QAEA,qBAAqB;QACrB,IAAI;YACF,IAAI,CAACoE,GAAG,CACN,CAAC,gBAAgB,EAAEvE,UAAU,SAAS0D,eAAe,IAAI,EAAE1D,UAAU,SAAS4D,eAAe,GAAG,CAAC;YAGnG,IAAI,CAACe,aAAa;gBAChB,IAAI,CAACJ,GAAG,CACN,CAAC,6GAA6G,CAAC;YAEnH;YAEA,MAAMc,WAAW,MAAMvE,YAAY;gBACjC0B,WAAW,IAAI,CAACA,SAAS;gBACzBmC;gBACAjB;gBACAE;YACF;YAEA,IAAI,CAACW,GAAG,CAAC,CAAC,IAAI,EAAEvE,UAAU,SAASqF,SAASjB,KAAK,EAAE,QAAQ,CAAC;YAE5D,IAAIvC,MAAMG,MAAM,EAAE;gBAChB;YACF;YAEA,MAAM,IAAI,CAACsC,mBAAmB,CAACe,SAASjB,KAAK;YAC7C,IAAI,CAACG,GAAG,CAAC,CAAC,IAAI,EAAEvE,UAAU,SAASqF,SAASjB,KAAK,EAAE,UAAU,CAAC;QAChE,EAAE,OAAOxB,OAAO;YACd,MAAM4B,UAAU5B,iBAAiB6B,QAAQ7B,MAAM4B,OAAO,GAAGE,OAAO9B;YAChEzB,iBAAiB,8BAA8BqD,SAAS5B;YACxD,IAAI,CAACA,KAAK,CAAC,CAAC,wBAAwB,EAAE4B,SAAS,EAAE;gBAACrE,MAAM;YAAC;QAC3D;IACF;IAEA,MAAc0C,eAAehB,KAAwC,EAAiB;QACpFV,iBAAiB;QAEjB,IAAI;YACF,MAAM8B,OAAO,MAAMjC,oBAAoB;gBACrCkB,OAAOL,MAAMK,KAAK;gBAClBK,QAAQV,MAAMU,MAAM;gBACpBC,WAAW,IAAI,CAACA,SAAS;YAC3B;YAEA,IAAIS,KAAKqC,MAAM,KAAK,GAAG;gBACrB,IAAI,CAACf,GAAG,CAAC;gBACT;YACF;YAEA,IAAI,CAACvB,oBAAoB,CAACC;QAC5B,EAAE,OAAOL,OAAO;YACd,MAAM4B,UAAU5B,iBAAiB6B,QAAQ7B,MAAM4B,OAAO,GAAGE,OAAO9B;YAChEzB,iBAAiB,wCAAwCqD,SAAS5B;YAClE,IAAI,CAACA,KAAK,CAAC,CAAC,kCAAkC,EAAE4B,SAAS,EAAE;gBAACrE,MAAM;YAAC;QACrE;IACF;IAEA,MAAcmE,oBAAoBF,KAAa,EAAiB;QAC9D,IAAImB,kBAAkB;QACtB,MAAMC,OAAOlF,QAAQ,IAAImF,KAAK;QAE9B,OAAO,IAAIC,QAAc,CAACC,SAASC;YACjC,MAAMC,eAAe9E,sBAAsB;gBAACqD;gBAAO5B,WAAW,IAAI,CAACA,SAAS;YAAA,GAAGsD,SAAS,CAAC;gBACvFC,UAAU;oBACRP,KAAKQ,OAAO,CAAC;oBACbL;gBACF;gBACA/C,OAAO,CAACqD;oBACNT,KAAKU,IAAI,CAAC;oBACVN,OAAOK;gBACT;gBACAE,MAAM,CAACC;oBACL,IAAI,OAAOA,MAAMC,QAAQ,KAAK,UAAU;wBACtCd,kBAAkBa,MAAMC,QAAQ;oBAClC;oBACAb,KAAKc,IAAI,GAAG,CAAC,kBAAkB,EAAEf,gBAAgB,CAAC,CAAC;gBACrD;YACF;YAEA,sEAAsE;YACtEgB,QAAQC,IAAI,CAAC,UAAU;gBACrBX,aAAaY,WAAW;gBACxBjB,KAAKU,IAAI,CAAC;gBACV/F,KAAK;YACP;QACF;IACF;AACF"}
|
|
1
|
+
{"version":3,"sources":["../../../src/commands/dataset/copy.ts"],"sourcesContent":["import {styleText} from 'node:util'\n\nimport {Args, Flags} from '@oclif/core'\nimport {exit} from '@oclif/core/errors'\nimport {SanityCommand, subdebug} from '@sanity/cli-core'\nimport {spinner} from '@sanity/cli-core/ux'\nimport {Table} from 'console-table-printer'\nimport {formatDistance} from 'date-fns/formatDistance'\nimport {formatDistanceToNow} from 'date-fns/formatDistanceToNow'\nimport {parseISO} from 'date-fns/parseISO'\n\nimport {validateDatasetName} from '../../actions/dataset/validateDatasetName.js'\nimport {promptForDataset} from '../../prompts/promptForDataset.js'\nimport {promptForDatasetName} from '../../prompts/promptForDatasetName.js'\nimport {\n copyDataset,\n type CopyJobProgressEvent,\n type DatasetCopyJob,\n followCopyJobProgress,\n listDatasetCopyJobs,\n listDatasets,\n} from '../../services/datasets.js'\nimport {NO_PROJECT_ID} from '../../util/errorMessages.js'\n\nconst copyDatasetDebug = subdebug('dataset:copy')\n\nexport class CopyDatasetCommand extends SanityCommand<typeof CopyDatasetCommand> {\n static override args = {\n source: Args.string({\n description: 'Name of the dataset to copy from',\n required: false,\n }),\n target: Args.string({\n description: 'Name of the dataset to copy to',\n required: false,\n }),\n }\n\n static override description =\n 'Manages dataset copying, including starting a new copy job, listing copy jobs and following the progress of a running copy job'\n\n static override examples = [\n {\n command: '<%= config.bin %> <%= command.id %>',\n description: 'Interactively copy a dataset',\n },\n {\n command: '<%= config.bin %> <%= command.id %> source-dataset',\n description: 'Copy from source-dataset (prompts for target)',\n },\n {\n command: '<%= config.bin %> <%= command.id %> source-dataset target-dataset',\n description: 'Copy from source-dataset to target-dataset',\n },\n {\n command: '<%= config.bin %> <%= command.id %> --skip-history source target',\n description: 'Copy without preserving document history (faster for large datasets)',\n },\n {\n command: '<%= config.bin %> <%= command.id %> --detach source target',\n description: 'Start copy job without waiting for completion',\n },\n {\n command: '<%= config.bin %> <%= command.id %> --attach <job-id>',\n description: 'Attach to a running copy job to follow progress',\n },\n {\n command: '<%= config.bin %> <%= command.id %> --list',\n description: 'List all dataset copy jobs',\n },\n {\n command: '<%= config.bin %> <%= command.id %> --list --offset 2 --limit 10',\n description: 'List copy jobs with pagination',\n },\n ]\n\n static override flags = {\n attach: Flags.string({\n description: 'Attach to the running copy process to show progress',\n exclusive: ['list', 'detach', 'skip-history'],\n required: false,\n }),\n detach: Flags.boolean({\n description: 'Start the copy without waiting for it to finish',\n exclusive: ['list', 'attach'],\n required: false,\n }),\n limit: Flags.integer({\n dependsOn: ['list'],\n description: 'Maximum number of jobs returned (default 10, max 1000)',\n max: 1000,\n required: false,\n }),\n list: Flags.boolean({\n description: 'Lists all dataset copy jobs',\n exclusive: ['attach', 'detach', 'skip-history'],\n required: false,\n }),\n offset: Flags.integer({\n dependsOn: ['list'],\n description: 'Start position in the list of jobs (default 0)',\n required: false,\n }),\n 'skip-history': Flags.boolean({\n description: \"Don't preserve document history on copy\",\n exclusive: ['list', 'attach'],\n required: false,\n }),\n }\n\n private projectId!: string\n\n public async run(): Promise<void> {\n const {args, flags} = await this.parse(CopyDatasetCommand)\n\n const projectId = await this.getProjectId()\n if (!projectId) {\n this.error(NO_PROJECT_ID, {exit: 1})\n }\n\n this.projectId = projectId\n\n // Route to appropriate mode\n if (flags.list) {\n return this.handleListMode(flags)\n }\n\n if (flags.attach) {\n return this.handleAttachMode(flags.attach)\n }\n\n return this.handleCopyMode(args, flags)\n }\n\n private displayCopyJobsTable(jobs: DatasetCopyJob[]): void {\n const table = new Table({\n columns: [\n {alignment: 'left', name: 'id', title: 'Job ID'},\n {alignment: 'left', name: 'sourceDataset', title: 'Source Dataset'},\n {alignment: 'left', name: 'targetDataset', title: 'Target Dataset'},\n {alignment: 'left', name: 'state', title: 'State'},\n {alignment: 'left', name: 'withHistory', title: 'With history'},\n {alignment: 'left', name: 'timeStarted', title: 'Time started'},\n {alignment: 'left', name: 'timeTaken', title: 'Time taken'},\n ],\n title: 'Dataset copy jobs for this project in descending order',\n })\n\n for (const job of jobs) {\n const {createdAt, id, sourceDataset, state, targetDataset, updatedAt, withHistory} = job\n\n let timeStarted = ''\n if (createdAt !== '') {\n timeStarted = formatDistanceToNow(parseISO(createdAt))\n }\n\n let timeTaken = ''\n if (updatedAt !== '') {\n timeTaken = formatDistance(parseISO(updatedAt), parseISO(createdAt))\n }\n\n let color: '' | 'green' | 'red' | 'yellow' = ''\n switch (state) {\n case 'completed': {\n color = 'green'\n break\n }\n case 'failed': {\n color = 'red'\n break\n }\n case 'pending': {\n color = 'yellow'\n break\n }\n default: {\n color = ''\n }\n }\n\n table.addRow(\n {\n id,\n sourceDataset,\n state,\n targetDataset,\n timeStarted: `${timeStarted} ago`,\n timeTaken,\n withHistory,\n },\n {color},\n )\n }\n\n table.printTable()\n }\n\n private async handleAttachMode(jobId: string): Promise<void> {\n copyDatasetDebug('Attaching to copy job %s', jobId)\n\n if (!jobId || typeof jobId !== 'string' || jobId.trim() === '') {\n this.error('Please supply a valid jobId', {exit: 1})\n }\n\n try {\n await this.subscribeToProgress(jobId)\n this.log(`Job ${styleText('green', jobId)} completed`)\n } catch (error) {\n const message = error instanceof Error ? error.message : String(error)\n copyDatasetDebug('Failed to attach to copy job: %s', message, error)\n this.error(`Failed to attach to copy job: ${message}`, {exit: 1})\n }\n }\n\n private async handleCopyMode(\n args: {source?: string; target?: string},\n flags: {detach?: boolean; 'skip-history'?: boolean},\n ): Promise<void> {\n copyDatasetDebug('Starting copy mode')\n\n const skipHistory = Boolean(flags['skip-history'])\n\n // Get and validate source dataset\n let sourceDataset = args.source\n if (sourceDataset) {\n const nameError = validateDatasetName(sourceDataset)\n if (nameError) {\n this.error(nameError, {exit: 1})\n }\n }\n\n let datasetsResponse\n try {\n datasetsResponse = await listDatasets(this.projectId)\n } catch (error) {\n const message = error instanceof Error ? error.message : String(error)\n copyDatasetDebug('Failed to fetch datasets: %s', message, error)\n this.error(`Failed to fetch datasets: ${message}`, {exit: 1})\n }\n\n const datasetNames = new Set(datasetsResponse.map((ds) => ds.name))\n\n // Prompt for source if not provided\n if (!sourceDataset) {\n sourceDataset = await promptForDataset({\n datasets: datasetsResponse,\n })\n }\n\n if (!datasetNames.has(sourceDataset)) {\n this.error(`Source dataset \"${sourceDataset}\" doesn't exist`, {exit: 1})\n }\n\n // Get and validate target dataset\n let targetDataset = args.target\n if (targetDataset) {\n const nameError = validateDatasetName(targetDataset)\n if (nameError) {\n this.error(nameError, {exit: 1})\n }\n } else {\n targetDataset = await promptForDatasetName({\n message: 'Target dataset name:',\n })\n }\n\n if (datasetNames.has(targetDataset)) {\n this.error(`Target dataset \"${targetDataset}\" already exists`, {exit: 1})\n }\n\n // Start the copy job\n try {\n this.log(\n `Copying dataset ${styleText('green', sourceDataset)} to ${styleText('green', targetDataset)}...`,\n )\n\n if (!skipHistory) {\n this.log(\n `Note: You can run this command with flag '--skip-history'. The flag will reduce copy time in larger datasets.`,\n )\n }\n\n const response = await copyDataset({\n projectId: this.projectId,\n skipHistory,\n sourceDataset,\n targetDataset,\n })\n\n this.log(`Job ${styleText('green', response.jobId)} started`)\n\n if (flags.detach) {\n return\n }\n\n await this.subscribeToProgress(response.jobId)\n this.log(`Job ${styleText('green', response.jobId)} completed`)\n } catch (error) {\n const message = error instanceof Error ? error.message : String(error)\n copyDatasetDebug('Dataset copying failed: %s', message, error)\n this.error(`Dataset copying failed: ${message}`, {exit: 1})\n }\n }\n\n private async handleListMode(flags: {limit?: number; offset?: number}): Promise<void> {\n copyDatasetDebug('Listing dataset copy jobs')\n\n try {\n const jobs = await listDatasetCopyJobs({\n limit: flags.limit,\n offset: flags.offset,\n projectId: this.projectId,\n })\n\n if (jobs.length === 0) {\n this.log(\"This project doesn't have any dataset copy jobs\")\n return\n }\n\n this.displayCopyJobsTable(jobs)\n } catch (error) {\n const message = error instanceof Error ? error.message : String(error)\n copyDatasetDebug('Failed to list dataset copy jobs: %s', message, error)\n this.error(`Failed to list dataset copy jobs: ${message}`, {exit: 1})\n }\n }\n\n private async subscribeToProgress(jobId: string): Promise<void> {\n let currentProgress = 0\n const spin = spinner('').start()\n\n return new Promise<void>((resolve, reject) => {\n const subscription = followCopyJobProgress({jobId, projectId: this.projectId}).subscribe({\n complete: () => {\n spin.succeed('Copy finished.')\n resolve()\n },\n error: (err) => {\n spin.fail('Copy failed.')\n reject(err)\n },\n next: (event: CopyJobProgressEvent) => {\n if (typeof event.progress === 'number') {\n currentProgress = event.progress\n }\n spin.text = `Copy in progress: ${currentProgress}%`\n },\n })\n\n // Cleanup on process termination - use 'once' to prevent memory leaks\n process.once('SIGINT', () => {\n subscription.unsubscribe()\n spin.fail('Copy interrupted.')\n exit(130)\n })\n })\n }\n}\n"],"names":["styleText","Args","Flags","exit","SanityCommand","subdebug","spinner","Table","formatDistance","formatDistanceToNow","parseISO","validateDatasetName","promptForDataset","promptForDatasetName","copyDataset","followCopyJobProgress","listDatasetCopyJobs","listDatasets","NO_PROJECT_ID","copyDatasetDebug","CopyDatasetCommand","args","source","string","description","required","target","examples","command","flags","attach","exclusive","detach","boolean","limit","integer","dependsOn","max","list","offset","projectId","run","parse","getProjectId","error","handleListMode","handleAttachMode","handleCopyMode","displayCopyJobsTable","jobs","table","columns","alignment","name","title","job","createdAt","id","sourceDataset","state","targetDataset","updatedAt","withHistory","timeStarted","timeTaken","color","addRow","printTable","jobId","trim","subscribeToProgress","log","message","Error","String","skipHistory","Boolean","nameError","datasetsResponse","datasetNames","Set","map","ds","datasets","has","response","length","currentProgress","spin","start","Promise","resolve","reject","subscription","subscribe","complete","succeed","err","fail","next","event","progress","text","process","once","unsubscribe"],"mappings":"AAAA,SAAQA,SAAS,QAAO,YAAW;AAEnC,SAAQC,IAAI,EAAEC,KAAK,QAAO,cAAa;AACvC,SAAQC,IAAI,QAAO,qBAAoB;AACvC,SAAQC,aAAa,EAAEC,QAAQ,QAAO,mBAAkB;AACxD,SAAQC,OAAO,QAAO,sBAAqB;AAC3C,SAAQC,KAAK,QAAO,wBAAuB;AAC3C,SAAQC,cAAc,QAAO,0BAAyB;AACtD,SAAQC,mBAAmB,QAAO,+BAA8B;AAChE,SAAQC,QAAQ,QAAO,oBAAmB;AAE1C,SAAQC,mBAAmB,QAAO,+CAA8C;AAChF,SAAQC,gBAAgB,QAAO,oCAAmC;AAClE,SAAQC,oBAAoB,QAAO,wCAAuC;AAC1E,SACEC,WAAW,EAGXC,qBAAqB,EACrBC,mBAAmB,EACnBC,YAAY,QACP,6BAA4B;AACnC,SAAQC,aAAa,QAAO,8BAA6B;AAEzD,MAAMC,mBAAmBd,SAAS;AAElC,OAAO,MAAMe,2BAA2BhB;IACtC,OAAgBiB,OAAO;QACrBC,QAAQrB,KAAKsB,MAAM,CAAC;YAClBC,aAAa;YACbC,UAAU;QACZ;QACAC,QAAQzB,KAAKsB,MAAM,CAAC;YAClBC,aAAa;YACbC,UAAU;QACZ;IACF,EAAC;IAED,OAAgBD,cACd,iIAAgI;IAElI,OAAgBG,WAAW;QACzB;YACEC,SAAS;YACTJ,aAAa;QACf;QACA;YACEI,SAAS;YACTJ,aAAa;QACf;QACA;YACEI,SAAS;YACTJ,aAAa;QACf;QACA;YACEI,SAAS;YACTJ,aAAa;QACf;QACA;YACEI,SAAS;YACTJ,aAAa;QACf;QACA;YACEI,SAAS;YACTJ,aAAa;QACf;QACA;YACEI,SAAS;YACTJ,aAAa;QACf;QACA;YACEI,SAAS;YACTJ,aAAa;QACf;KACD,CAAA;IAED,OAAgBK,QAAQ;QACtBC,QAAQ5B,MAAMqB,MAAM,CAAC;YACnBC,aAAa;YACbO,WAAW;gBAAC;gBAAQ;gBAAU;aAAe;YAC7CN,UAAU;QACZ;QACAO,QAAQ9B,MAAM+B,OAAO,CAAC;YACpBT,aAAa;YACbO,WAAW;gBAAC;gBAAQ;aAAS;YAC7BN,UAAU;QACZ;QACAS,OAAOhC,MAAMiC,OAAO,CAAC;YACnBC,WAAW;gBAAC;aAAO;YACnBZ,aAAa;YACba,KAAK;YACLZ,UAAU;QACZ;QACAa,MAAMpC,MAAM+B,OAAO,CAAC;YAClBT,aAAa;YACbO,WAAW;gBAAC;gBAAU;gBAAU;aAAe;YAC/CN,UAAU;QACZ;QACAc,QAAQrC,MAAMiC,OAAO,CAAC;YACpBC,WAAW;gBAAC;aAAO;YACnBZ,aAAa;YACbC,UAAU;QACZ;QACA,gBAAgBvB,MAAM+B,OAAO,CAAC;YAC5BT,aAAa;YACbO,WAAW;gBAAC;gBAAQ;aAAS;YAC7BN,UAAU;QACZ;IACF,EAAC;IAEOe,UAAkB;IAE1B,MAAaC,MAAqB;QAChC,MAAM,EAACpB,IAAI,EAAEQ,KAAK,EAAC,GAAG,MAAM,IAAI,CAACa,KAAK,CAACtB;QAEvC,MAAMoB,YAAY,MAAM,IAAI,CAACG,YAAY;QACzC,IAAI,CAACH,WAAW;YACd,IAAI,CAACI,KAAK,CAAC1B,eAAe;gBAACf,MAAM;YAAC;QACpC;QAEA,IAAI,CAACqC,SAAS,GAAGA;QAEjB,4BAA4B;QAC5B,IAAIX,MAAMS,IAAI,EAAE;YACd,OAAO,IAAI,CAACO,cAAc,CAAChB;QAC7B;QAEA,IAAIA,MAAMC,MAAM,EAAE;YAChB,OAAO,IAAI,CAACgB,gBAAgB,CAACjB,MAAMC,MAAM;QAC3C;QAEA,OAAO,IAAI,CAACiB,cAAc,CAAC1B,MAAMQ;IACnC;IAEQmB,qBAAqBC,IAAsB,EAAQ;QACzD,MAAMC,QAAQ,IAAI3C,MAAM;YACtB4C,SAAS;gBACP;oBAACC,WAAW;oBAAQC,MAAM;oBAAMC,OAAO;gBAAQ;gBAC/C;oBAACF,WAAW;oBAAQC,MAAM;oBAAiBC,OAAO;gBAAgB;gBAClE;oBAACF,WAAW;oBAAQC,MAAM;oBAAiBC,OAAO;gBAAgB;gBAClE;oBAACF,WAAW;oBAAQC,MAAM;oBAASC,OAAO;gBAAO;gBACjD;oBAACF,WAAW;oBAAQC,MAAM;oBAAeC,OAAO;gBAAc;gBAC9D;oBAACF,WAAW;oBAAQC,MAAM;oBAAeC,OAAO;gBAAc;gBAC9D;oBAACF,WAAW;oBAAQC,MAAM;oBAAaC,OAAO;gBAAY;aAC3D;YACDA,OAAO;QACT;QAEA,KAAK,MAAMC,OAAON,KAAM;YACtB,MAAM,EAACO,SAAS,EAAEC,EAAE,EAAEC,aAAa,EAAEC,KAAK,EAAEC,aAAa,EAAEC,SAAS,EAAEC,WAAW,EAAC,GAAGP;YAErF,IAAIQ,cAAc;YAClB,IAAIP,cAAc,IAAI;gBACpBO,cAActD,oBAAoBC,SAAS8C;YAC7C;YAEA,IAAIQ,YAAY;YAChB,IAAIH,cAAc,IAAI;gBACpBG,YAAYxD,eAAeE,SAASmD,YAAYnD,SAAS8C;YAC3D;YAEA,IAAIS,QAAyC;YAC7C,OAAQN;gBACN,KAAK;oBAAa;wBAChBM,QAAQ;wBACR;oBACF;gBACA,KAAK;oBAAU;wBACbA,QAAQ;wBACR;oBACF;gBACA,KAAK;oBAAW;wBACdA,QAAQ;wBACR;oBACF;gBACA;oBAAS;wBACPA,QAAQ;oBACV;YACF;YAEAf,MAAMgB,MAAM,CACV;gBACET;gBACAC;gBACAC;gBACAC;gBACAG,aAAa,GAAGA,YAAY,IAAI,CAAC;gBACjCC;gBACAF;YACF,GACA;gBAACG;YAAK;QAEV;QAEAf,MAAMiB,UAAU;IAClB;IAEA,MAAcrB,iBAAiBsB,KAAa,EAAiB;QAC3DjD,iBAAiB,4BAA4BiD;QAE7C,IAAI,CAACA,SAAS,OAAOA,UAAU,YAAYA,MAAMC,IAAI,OAAO,IAAI;YAC9D,IAAI,CAACzB,KAAK,CAAC,+BAA+B;gBAACzC,MAAM;YAAC;QACpD;QAEA,IAAI;YACF,MAAM,IAAI,CAACmE,mBAAmB,CAACF;YAC/B,IAAI,CAACG,GAAG,CAAC,CAAC,IAAI,EAAEvE,UAAU,SAASoE,OAAO,UAAU,CAAC;QACvD,EAAE,OAAOxB,OAAO;YACd,MAAM4B,UAAU5B,iBAAiB6B,QAAQ7B,MAAM4B,OAAO,GAAGE,OAAO9B;YAChEzB,iBAAiB,oCAAoCqD,SAAS5B;YAC9D,IAAI,CAACA,KAAK,CAAC,CAAC,8BAA8B,EAAE4B,SAAS,EAAE;gBAACrE,MAAM;YAAC;QACjE;IACF;IAEA,MAAc4C,eACZ1B,IAAwC,EACxCQ,KAAmD,EACpC;QACfV,iBAAiB;QAEjB,MAAMwD,cAAcC,QAAQ/C,KAAK,CAAC,eAAe;QAEjD,kCAAkC;QAClC,IAAI6B,gBAAgBrC,KAAKC,MAAM;QAC/B,IAAIoC,eAAe;YACjB,MAAMmB,YAAYlE,oBAAoB+C;YACtC,IAAImB,WAAW;gBACb,IAAI,CAACjC,KAAK,CAACiC,WAAW;oBAAC1E,MAAM;gBAAC;YAChC;QACF;QAEA,IAAI2E;QACJ,IAAI;YACFA,mBAAmB,MAAM7D,aAAa,IAAI,CAACuB,SAAS;QACtD,EAAE,OAAOI,OAAO;YACd,MAAM4B,UAAU5B,iBAAiB6B,QAAQ7B,MAAM4B,OAAO,GAAGE,OAAO9B;YAChEzB,iBAAiB,gCAAgCqD,SAAS5B;YAC1D,IAAI,CAACA,KAAK,CAAC,CAAC,0BAA0B,EAAE4B,SAAS,EAAE;gBAACrE,MAAM;YAAC;QAC7D;QAEA,MAAM4E,eAAe,IAAIC,IAAIF,iBAAiBG,GAAG,CAAC,CAACC,KAAOA,GAAG7B,IAAI;QAEjE,oCAAoC;QACpC,IAAI,CAACK,eAAe;YAClBA,gBAAgB,MAAM9C,iBAAiB;gBACrCuE,UAAUL;YACZ;QACF;QAEA,IAAI,CAACC,aAAaK,GAAG,CAAC1B,gBAAgB;YACpC,IAAI,CAACd,KAAK,CAAC,CAAC,gBAAgB,EAAEc,cAAc,eAAe,CAAC,EAAE;gBAACvD,MAAM;YAAC;QACxE;QAEA,kCAAkC;QAClC,IAAIyD,gBAAgBvC,KAAKK,MAAM;QAC/B,IAAIkC,eAAe;YACjB,MAAMiB,YAAYlE,oBAAoBiD;YACtC,IAAIiB,WAAW;gBACb,IAAI,CAACjC,KAAK,CAACiC,WAAW;oBAAC1E,MAAM;gBAAC;YAChC;QACF,OAAO;YACLyD,gBAAgB,MAAM/C,qBAAqB;gBACzC2D,SAAS;YACX;QACF;QAEA,IAAIO,aAAaK,GAAG,CAACxB,gBAAgB;YACnC,IAAI,CAAChB,KAAK,CAAC,CAAC,gBAAgB,EAAEgB,cAAc,gBAAgB,CAAC,EAAE;gBAACzD,MAAM;YAAC;QACzE;QAEA,qBAAqB;QACrB,IAAI;YACF,IAAI,CAACoE,GAAG,CACN,CAAC,gBAAgB,EAAEvE,UAAU,SAAS0D,eAAe,IAAI,EAAE1D,UAAU,SAAS4D,eAAe,GAAG,CAAC;YAGnG,IAAI,CAACe,aAAa;gBAChB,IAAI,CAACJ,GAAG,CACN,CAAC,6GAA6G,CAAC;YAEnH;YAEA,MAAMc,WAAW,MAAMvE,YAAY;gBACjC0B,WAAW,IAAI,CAACA,SAAS;gBACzBmC;gBACAjB;gBACAE;YACF;YAEA,IAAI,CAACW,GAAG,CAAC,CAAC,IAAI,EAAEvE,UAAU,SAASqF,SAASjB,KAAK,EAAE,QAAQ,CAAC;YAE5D,IAAIvC,MAAMG,MAAM,EAAE;gBAChB;YACF;YAEA,MAAM,IAAI,CAACsC,mBAAmB,CAACe,SAASjB,KAAK;YAC7C,IAAI,CAACG,GAAG,CAAC,CAAC,IAAI,EAAEvE,UAAU,SAASqF,SAASjB,KAAK,EAAE,UAAU,CAAC;QAChE,EAAE,OAAOxB,OAAO;YACd,MAAM4B,UAAU5B,iBAAiB6B,QAAQ7B,MAAM4B,OAAO,GAAGE,OAAO9B;YAChEzB,iBAAiB,8BAA8BqD,SAAS5B;YACxD,IAAI,CAACA,KAAK,CAAC,CAAC,wBAAwB,EAAE4B,SAAS,EAAE;gBAACrE,MAAM;YAAC;QAC3D;IACF;IAEA,MAAc0C,eAAehB,KAAwC,EAAiB;QACpFV,iBAAiB;QAEjB,IAAI;YACF,MAAM8B,OAAO,MAAMjC,oBAAoB;gBACrCkB,OAAOL,MAAMK,KAAK;gBAClBK,QAAQV,MAAMU,MAAM;gBACpBC,WAAW,IAAI,CAACA,SAAS;YAC3B;YAEA,IAAIS,KAAKqC,MAAM,KAAK,GAAG;gBACrB,IAAI,CAACf,GAAG,CAAC;gBACT;YACF;YAEA,IAAI,CAACvB,oBAAoB,CAACC;QAC5B,EAAE,OAAOL,OAAO;YACd,MAAM4B,UAAU5B,iBAAiB6B,QAAQ7B,MAAM4B,OAAO,GAAGE,OAAO9B;YAChEzB,iBAAiB,wCAAwCqD,SAAS5B;YAClE,IAAI,CAACA,KAAK,CAAC,CAAC,kCAAkC,EAAE4B,SAAS,EAAE;gBAACrE,MAAM;YAAC;QACrE;IACF;IAEA,MAAcmE,oBAAoBF,KAAa,EAAiB;QAC9D,IAAImB,kBAAkB;QACtB,MAAMC,OAAOlF,QAAQ,IAAImF,KAAK;QAE9B,OAAO,IAAIC,QAAc,CAACC,SAASC;YACjC,MAAMC,eAAe9E,sBAAsB;gBAACqD;gBAAO5B,WAAW,IAAI,CAACA,SAAS;YAAA,GAAGsD,SAAS,CAAC;gBACvFC,UAAU;oBACRP,KAAKQ,OAAO,CAAC;oBACbL;gBACF;gBACA/C,OAAO,CAACqD;oBACNT,KAAKU,IAAI,CAAC;oBACVN,OAAOK;gBACT;gBACAE,MAAM,CAACC;oBACL,IAAI,OAAOA,MAAMC,QAAQ,KAAK,UAAU;wBACtCd,kBAAkBa,MAAMC,QAAQ;oBAClC;oBACAb,KAAKc,IAAI,GAAG,CAAC,kBAAkB,EAAEf,gBAAgB,CAAC,CAAC;gBACrD;YACF;YAEA,sEAAsE;YACtEgB,QAAQC,IAAI,CAAC,UAAU;gBACrBX,aAAaY,WAAW;gBACxBjB,KAAKU,IAAI,CAAC;gBACV/F,KAAK;YACP;QACF;IACF;AACF"}
|
|
@@ -35,6 +35,16 @@ export class CreateDatasetCommand extends SanityCommand {
|
|
|
35
35
|
}
|
|
36
36
|
];
|
|
37
37
|
static flags = {
|
|
38
|
+
embeddings: Flags.boolean({
|
|
39
|
+
default: false,
|
|
40
|
+
description: 'Enable embeddings for this dataset'
|
|
41
|
+
}),
|
|
42
|
+
'embeddings-projection': Flags.string({
|
|
43
|
+
dependsOn: [
|
|
44
|
+
'embeddings'
|
|
45
|
+
],
|
|
46
|
+
description: 'GROQ projection for embeddings indexing (e.g. "{ title, body }")'
|
|
47
|
+
}),
|
|
38
48
|
visibility: Flags.string({
|
|
39
49
|
description: 'Set visibility for this dataset (custom/private/public)',
|
|
40
50
|
options: ALLOWED_ACL_MODES,
|
|
@@ -89,6 +99,8 @@ export class CreateDatasetCommand extends SanityCommand {
|
|
|
89
99
|
try {
|
|
90
100
|
await createDataset({
|
|
91
101
|
datasetName,
|
|
102
|
+
embeddings: flags.embeddings,
|
|
103
|
+
embeddingsProjection: flags['embeddings-projection'],
|
|
92
104
|
output: this.output,
|
|
93
105
|
projectFeatures,
|
|
94
106
|
projectId,
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../../../src/commands/dataset/create.ts"],"sourcesContent":["import {Args, Flags} from '@oclif/core'\nimport {SanityCommand, subdebug} from '@sanity/cli-core'\n\nimport {createDataset} from '../../actions/dataset/create.js'\nimport {validateDatasetName} from '../../actions/dataset/validateDatasetName.js'\nimport {promptForDatasetName} from '../../prompts/promptForDatasetName.js'\nimport {listDatasets} from '../../services/datasets.js'\nimport {getProjectFeatures} from '../../services/getProjectFeatures.js'\nimport {NO_PROJECT_ID} from '../../util/errorMessages.js'\n\nconst createDatasetDebug = subdebug('dataset:create')\n\nconst ALLOWED_ACL_MODES = ['custom', 'private', 'public']\n\nexport class CreateDatasetCommand extends SanityCommand<typeof CreateDatasetCommand> {\n static override args = {\n name: Args.string({\n description: 'Name of the dataset to create',\n required: false,\n }),\n }\n\n static override description = 'Create a new dataset within your project'\n\n static override examples = [\n {\n command: '<%= config.bin %> <%= command.id %>',\n description: 'Interactively create a dataset',\n },\n {\n command: '<%= config.bin %> <%= command.id %> my-dataset',\n description: 'Create a dataset named \"my-dataset\"',\n },\n {\n command: '<%= config.bin %> <%= command.id %> my-dataset --visibility private',\n description: 'Create a private dataset named \"my-dataset\"',\n },\n ]\n\n static override flags = {\n visibility: Flags.string({\n description: 'Set visibility for this dataset (custom/private/public)',\n options: ALLOWED_ACL_MODES,\n required: false,\n }),\n }\n\n public async run(): Promise<void> {\n const {args, flags} = await this.parse(CreateDatasetCommand)\n const {visibility} = flags\n\n // Ensure we have project context\n const projectId = await this.getProjectId()\n if (!projectId) {\n this.error(NO_PROJECT_ID, {exit: 1})\n }\n\n // Get dataset name from args or prompt\n let {name: datasetName} = args\n if (datasetName) {\n const nameError = validateDatasetName(datasetName)\n if (nameError) {\n this.error(nameError, {exit: 1})\n }\n } else {\n datasetName = await promptForDatasetName()\n }\n\n let datasets: string[]\n let projectFeatures: string[]\n\n try {\n const [datasetsResponse, featuresResponse] = await Promise.all([\n listDatasets(projectId),\n getProjectFeatures(projectId),\n ])\n datasets = datasetsResponse.map((ds) => ds.name)\n projectFeatures = featuresResponse\n } catch (error) {\n const message = error instanceof Error ? error.message : String(error)\n createDatasetDebug(`Failed to fetch project data: ${message}`, error)\n this.error(`Failed to fetch project data: ${message}`, {exit: 1})\n }\n\n if (datasets.includes(datasetName)) {\n this.error(`Dataset \"${datasetName}\" already exists`, {exit: 1})\n }\n\n const canCreatePrivate = projectFeatures.includes('privateDataset')\n createDatasetDebug('%s create private datasets', canCreatePrivate ? 'Can' : 'Cannot')\n\n try {\n await createDataset({\n datasetName,\n output: this.output,\n projectFeatures,\n projectId,\n visibility,\n })\n } catch (error) {\n const message = error instanceof Error ? error.message : String(error)\n this.error(`Failed to create dataset: ${message}`, {exit: 1})\n }\n }\n}\n"],"names":["Args","Flags","SanityCommand","subdebug","createDataset","validateDatasetName","promptForDatasetName","listDatasets","getProjectFeatures","NO_PROJECT_ID","createDatasetDebug","ALLOWED_ACL_MODES","CreateDatasetCommand","args","name","string","description","required","examples","command","flags","visibility","options","run","parse","projectId","getProjectId","error","exit","datasetName","nameError","datasets","projectFeatures","datasetsResponse","featuresResponse","Promise","all","map","ds","message","Error","String","includes","canCreatePrivate","output"],"mappings":"AAAA,SAAQA,IAAI,EAAEC,KAAK,QAAO,cAAa;AACvC,SAAQC,aAAa,EAAEC,QAAQ,QAAO,mBAAkB;AAExD,SAAQC,aAAa,QAAO,kCAAiC;AAC7D,SAAQC,mBAAmB,QAAO,+CAA8C;AAChF,SAAQC,oBAAoB,QAAO,wCAAuC;AAC1E,SAAQC,YAAY,QAAO,6BAA4B;AACvD,SAAQC,kBAAkB,QAAO,uCAAsC;AACvE,SAAQC,aAAa,QAAO,8BAA6B;AAEzD,MAAMC,qBAAqBP,SAAS;AAEpC,MAAMQ,oBAAoB;IAAC;IAAU;IAAW;CAAS;AAEzD,OAAO,MAAMC,6BAA6BV;IACxC,OAAgBW,OAAO;QACrBC,MAAMd,KAAKe,MAAM,CAAC;YAChBC,aAAa;YACbC,UAAU;QACZ;IACF,EAAC;IAED,OAAgBD,cAAc,2CAA0C;IAExE,OAAgBE,WAAW;QACzB;YACEC,SAAS;YACTH,aAAa;QACf;QACA;YACEG,SAAS;YACTH,aAAa;QACf;QACA;YACEG,SAAS;YACTH,aAAa;QACf;KACD,CAAA;IAED,OAAgBI,QAAQ;QACtBC,YAAYpB,MAAMc,MAAM,CAAC;YACvBC,aAAa;
|
|
1
|
+
{"version":3,"sources":["../../../src/commands/dataset/create.ts"],"sourcesContent":["import {Args, Flags} from '@oclif/core'\nimport {SanityCommand, subdebug} from '@sanity/cli-core'\n\nimport {createDataset} from '../../actions/dataset/create.js'\nimport {validateDatasetName} from '../../actions/dataset/validateDatasetName.js'\nimport {promptForDatasetName} from '../../prompts/promptForDatasetName.js'\nimport {listDatasets} from '../../services/datasets.js'\nimport {getProjectFeatures} from '../../services/getProjectFeatures.js'\nimport {NO_PROJECT_ID} from '../../util/errorMessages.js'\n\nconst createDatasetDebug = subdebug('dataset:create')\n\nconst ALLOWED_ACL_MODES = ['custom', 'private', 'public']\n\nexport class CreateDatasetCommand extends SanityCommand<typeof CreateDatasetCommand> {\n static override args = {\n name: Args.string({\n description: 'Name of the dataset to create',\n required: false,\n }),\n }\n\n static override description = 'Create a new dataset within your project'\n\n static override examples = [\n {\n command: '<%= config.bin %> <%= command.id %>',\n description: 'Interactively create a dataset',\n },\n {\n command: '<%= config.bin %> <%= command.id %> my-dataset',\n description: 'Create a dataset named \"my-dataset\"',\n },\n {\n command: '<%= config.bin %> <%= command.id %> my-dataset --visibility private',\n description: 'Create a private dataset named \"my-dataset\"',\n },\n ]\n\n static override flags = {\n embeddings: Flags.boolean({\n default: false,\n description: 'Enable embeddings for this dataset',\n }),\n 'embeddings-projection': Flags.string({\n dependsOn: ['embeddings'],\n description: 'GROQ projection for embeddings indexing (e.g. \"{ title, body }\")',\n }),\n visibility: Flags.string({\n description: 'Set visibility for this dataset (custom/private/public)',\n options: ALLOWED_ACL_MODES,\n required: false,\n }),\n }\n\n public async run(): Promise<void> {\n const {args, flags} = await this.parse(CreateDatasetCommand)\n const {visibility} = flags\n\n // Ensure we have project context\n const projectId = await this.getProjectId()\n if (!projectId) {\n this.error(NO_PROJECT_ID, {exit: 1})\n }\n\n // Get dataset name from args or prompt\n let {name: datasetName} = args\n if (datasetName) {\n const nameError = validateDatasetName(datasetName)\n if (nameError) {\n this.error(nameError, {exit: 1})\n }\n } else {\n datasetName = await promptForDatasetName()\n }\n\n let datasets: string[]\n let projectFeatures: string[]\n\n try {\n const [datasetsResponse, featuresResponse] = await Promise.all([\n listDatasets(projectId),\n getProjectFeatures(projectId),\n ])\n datasets = datasetsResponse.map((ds) => ds.name)\n projectFeatures = featuresResponse\n } catch (error) {\n const message = error instanceof Error ? error.message : String(error)\n createDatasetDebug(`Failed to fetch project data: ${message}`, error)\n this.error(`Failed to fetch project data: ${message}`, {exit: 1})\n }\n\n if (datasets.includes(datasetName)) {\n this.error(`Dataset \"${datasetName}\" already exists`, {exit: 1})\n }\n\n const canCreatePrivate = projectFeatures.includes('privateDataset')\n createDatasetDebug('%s create private datasets', canCreatePrivate ? 'Can' : 'Cannot')\n\n try {\n await createDataset({\n datasetName,\n embeddings: flags.embeddings,\n embeddingsProjection: flags['embeddings-projection'],\n output: this.output,\n projectFeatures,\n projectId,\n visibility,\n })\n } catch (error) {\n const message = error instanceof Error ? error.message : String(error)\n this.error(`Failed to create dataset: ${message}`, {exit: 1})\n }\n }\n}\n"],"names":["Args","Flags","SanityCommand","subdebug","createDataset","validateDatasetName","promptForDatasetName","listDatasets","getProjectFeatures","NO_PROJECT_ID","createDatasetDebug","ALLOWED_ACL_MODES","CreateDatasetCommand","args","name","string","description","required","examples","command","flags","embeddings","boolean","default","dependsOn","visibility","options","run","parse","projectId","getProjectId","error","exit","datasetName","nameError","datasets","projectFeatures","datasetsResponse","featuresResponse","Promise","all","map","ds","message","Error","String","includes","canCreatePrivate","embeddingsProjection","output"],"mappings":"AAAA,SAAQA,IAAI,EAAEC,KAAK,QAAO,cAAa;AACvC,SAAQC,aAAa,EAAEC,QAAQ,QAAO,mBAAkB;AAExD,SAAQC,aAAa,QAAO,kCAAiC;AAC7D,SAAQC,mBAAmB,QAAO,+CAA8C;AAChF,SAAQC,oBAAoB,QAAO,wCAAuC;AAC1E,SAAQC,YAAY,QAAO,6BAA4B;AACvD,SAAQC,kBAAkB,QAAO,uCAAsC;AACvE,SAAQC,aAAa,QAAO,8BAA6B;AAEzD,MAAMC,qBAAqBP,SAAS;AAEpC,MAAMQ,oBAAoB;IAAC;IAAU;IAAW;CAAS;AAEzD,OAAO,MAAMC,6BAA6BV;IACxC,OAAgBW,OAAO;QACrBC,MAAMd,KAAKe,MAAM,CAAC;YAChBC,aAAa;YACbC,UAAU;QACZ;IACF,EAAC;IAED,OAAgBD,cAAc,2CAA0C;IAExE,OAAgBE,WAAW;QACzB;YACEC,SAAS;YACTH,aAAa;QACf;QACA;YACEG,SAAS;YACTH,aAAa;QACf;QACA;YACEG,SAAS;YACTH,aAAa;QACf;KACD,CAAA;IAED,OAAgBI,QAAQ;QACtBC,YAAYpB,MAAMqB,OAAO,CAAC;YACxBC,SAAS;YACTP,aAAa;QACf;QACA,yBAAyBf,MAAMc,MAAM,CAAC;YACpCS,WAAW;gBAAC;aAAa;YACzBR,aAAa;QACf;QACAS,YAAYxB,MAAMc,MAAM,CAAC;YACvBC,aAAa;YACbU,SAASf;YACTM,UAAU;QACZ;IACF,EAAC;IAED,MAAaU,MAAqB;QAChC,MAAM,EAACd,IAAI,EAAEO,KAAK,EAAC,GAAG,MAAM,IAAI,CAACQ,KAAK,CAAChB;QACvC,MAAM,EAACa,UAAU,EAAC,GAAGL;QAErB,iCAAiC;QACjC,MAAMS,YAAY,MAAM,IAAI,CAACC,YAAY;QACzC,IAAI,CAACD,WAAW;YACd,IAAI,CAACE,KAAK,CAACtB,eAAe;gBAACuB,MAAM;YAAC;QACpC;QAEA,uCAAuC;QACvC,IAAI,EAAClB,MAAMmB,WAAW,EAAC,GAAGpB;QAC1B,IAAIoB,aAAa;YACf,MAAMC,YAAY7B,oBAAoB4B;YACtC,IAAIC,WAAW;gBACb,IAAI,CAACH,KAAK,CAACG,WAAW;oBAACF,MAAM;gBAAC;YAChC;QACF,OAAO;YACLC,cAAc,MAAM3B;QACtB;QAEA,IAAI6B;QACJ,IAAIC;QAEJ,IAAI;YACF,MAAM,CAACC,kBAAkBC,iBAAiB,GAAG,MAAMC,QAAQC,GAAG,CAAC;gBAC7DjC,aAAasB;gBACbrB,mBAAmBqB;aACpB;YACDM,WAAWE,iBAAiBI,GAAG,CAAC,CAACC,KAAOA,GAAG5B,IAAI;YAC/CsB,kBAAkBE;QACpB,EAAE,OAAOP,OAAO;YACd,MAAMY,UAAUZ,iBAAiBa,QAAQb,MAAMY,OAAO,GAAGE,OAAOd;YAChErB,mBAAmB,CAAC,8BAA8B,EAAEiC,SAAS,EAAEZ;YAC/D,IAAI,CAACA,KAAK,CAAC,CAAC,8BAA8B,EAAEY,SAAS,EAAE;gBAACX,MAAM;YAAC;QACjE;QAEA,IAAIG,SAASW,QAAQ,CAACb,cAAc;YAClC,IAAI,CAACF,KAAK,CAAC,CAAC,SAAS,EAAEE,YAAY,gBAAgB,CAAC,EAAE;gBAACD,MAAM;YAAC;QAChE;QAEA,MAAMe,mBAAmBX,gBAAgBU,QAAQ,CAAC;QAClDpC,mBAAmB,8BAA8BqC,mBAAmB,QAAQ;QAE5E,IAAI;YACF,MAAM3C,cAAc;gBAClB6B;gBACAZ,YAAYD,MAAMC,UAAU;gBAC5B2B,sBAAsB5B,KAAK,CAAC,wBAAwB;gBACpD6B,QAAQ,IAAI,CAACA,MAAM;gBACnBb;gBACAP;gBACAJ;YACF;QACF,EAAE,OAAOM,OAAO;YACd,MAAMY,UAAUZ,iBAAiBa,QAAQb,MAAMY,OAAO,GAAGE,OAAOd;YAChE,IAAI,CAACA,KAAK,CAAC,CAAC,0BAA0B,EAAEY,SAAS,EAAE;gBAACX,MAAM;YAAC;QAC7D;IACF;AACF"}
|
|
@@ -0,0 +1,62 @@
|
|
|
1
|
+
import { styleText } from 'node:util';
|
|
2
|
+
import { Args } from '@oclif/core';
|
|
3
|
+
import { SanityCommand, subdebug } from '@sanity/cli-core';
|
|
4
|
+
import { resolveDataset } from '../../../actions/dataset/resolveDataset.js';
|
|
5
|
+
import { setEmbeddingsSettings } from '../../../services/embeddings.js';
|
|
6
|
+
import { NO_PROJECT_ID } from '../../../util/errorMessages.js';
|
|
7
|
+
const debug = subdebug('dataset:embeddings:disable');
|
|
8
|
+
export class DatasetEmbeddingsDisableCommand extends SanityCommand {
|
|
9
|
+
static args = {
|
|
10
|
+
dataset: Args.string({
|
|
11
|
+
description: 'Dataset name to disable embeddings for',
|
|
12
|
+
required: false
|
|
13
|
+
})
|
|
14
|
+
};
|
|
15
|
+
static description = 'Disable embeddings for a dataset';
|
|
16
|
+
static examples = [
|
|
17
|
+
{
|
|
18
|
+
command: '<%= config.bin %> <%= command.id %> production',
|
|
19
|
+
description: 'Disable embeddings for the production dataset'
|
|
20
|
+
}
|
|
21
|
+
];
|
|
22
|
+
async run() {
|
|
23
|
+
const { args } = await this.parse(DatasetEmbeddingsDisableCommand);
|
|
24
|
+
let { dataset } = args;
|
|
25
|
+
const projectId = await this.getProjectId();
|
|
26
|
+
if (!projectId) {
|
|
27
|
+
this.error(NO_PROJECT_ID, {
|
|
28
|
+
exit: 1
|
|
29
|
+
});
|
|
30
|
+
}
|
|
31
|
+
try {
|
|
32
|
+
;
|
|
33
|
+
({ dataset } = await resolveDataset({
|
|
34
|
+
dataset,
|
|
35
|
+
projectId
|
|
36
|
+
}));
|
|
37
|
+
} catch (error) {
|
|
38
|
+
const message = error instanceof Error ? error.message : String(error);
|
|
39
|
+
debug(`Failed to resolve dataset: ${message}`, error);
|
|
40
|
+
this.error(message, {
|
|
41
|
+
exit: 1
|
|
42
|
+
});
|
|
43
|
+
}
|
|
44
|
+
try {
|
|
45
|
+
await setEmbeddingsSettings({
|
|
46
|
+
dataset,
|
|
47
|
+
enabled: false,
|
|
48
|
+
projectId
|
|
49
|
+
});
|
|
50
|
+
} catch (error) {
|
|
51
|
+
const message = error instanceof Error ? error.message : String(error);
|
|
52
|
+
debug(`Failed to disable embeddings: ${message}`, error);
|
|
53
|
+
this.error(`Failed to disable embeddings: ${message}`, {
|
|
54
|
+
exit: 1
|
|
55
|
+
});
|
|
56
|
+
}
|
|
57
|
+
this.log(styleText('green', `Disabled embeddings for dataset ${dataset}.`));
|
|
58
|
+
this.log(styleText('yellow', 'Note: Existing embedding data will be removed.'));
|
|
59
|
+
}
|
|
60
|
+
}
|
|
61
|
+
|
|
62
|
+
//# sourceMappingURL=disable.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../../../src/commands/dataset/embeddings/disable.ts"],"sourcesContent":["import {styleText} from 'node:util'\n\nimport {Args} from '@oclif/core'\nimport {SanityCommand, subdebug} from '@sanity/cli-core'\n\nimport {resolveDataset} from '../../../actions/dataset/resolveDataset.js'\nimport {setEmbeddingsSettings} from '../../../services/embeddings.js'\nimport {NO_PROJECT_ID} from '../../../util/errorMessages.js'\n\nconst debug = subdebug('dataset:embeddings:disable')\n\nexport class DatasetEmbeddingsDisableCommand extends SanityCommand<\n typeof DatasetEmbeddingsDisableCommand\n> {\n static override args = {\n dataset: Args.string({\n description: 'Dataset name to disable embeddings for',\n required: false,\n }),\n }\n\n static override description = 'Disable embeddings for a dataset'\n\n static override examples = [\n {\n command: '<%= config.bin %> <%= command.id %> production',\n description: 'Disable embeddings for the production dataset',\n },\n ]\n\n public async run(): Promise<void> {\n const {args} = await this.parse(DatasetEmbeddingsDisableCommand)\n let {dataset} = args\n\n const projectId = await this.getProjectId()\n if (!projectId) {\n this.error(NO_PROJECT_ID, {exit: 1})\n }\n\n try {\n ;({dataset} = await resolveDataset({dataset, projectId}))\n } catch (error) {\n const message = error instanceof Error ? error.message : String(error)\n debug(`Failed to resolve dataset: ${message}`, error)\n this.error(message, {exit: 1})\n }\n\n try {\n await setEmbeddingsSettings({dataset, enabled: false, projectId})\n } catch (error) {\n const message = error instanceof Error ? error.message : String(error)\n debug(`Failed to disable embeddings: ${message}`, error)\n this.error(`Failed to disable embeddings: ${message}`, {exit: 1})\n }\n\n this.log(styleText('green', `Disabled embeddings for dataset ${dataset}.`))\n this.log(styleText('yellow', 'Note: Existing embedding data will be removed.'))\n }\n}\n"],"names":["styleText","Args","SanityCommand","subdebug","resolveDataset","setEmbeddingsSettings","NO_PROJECT_ID","debug","DatasetEmbeddingsDisableCommand","args","dataset","string","description","required","examples","command","run","parse","projectId","getProjectId","error","exit","message","Error","String","enabled","log"],"mappings":"AAAA,SAAQA,SAAS,QAAO,YAAW;AAEnC,SAAQC,IAAI,QAAO,cAAa;AAChC,SAAQC,aAAa,EAAEC,QAAQ,QAAO,mBAAkB;AAExD,SAAQC,cAAc,QAAO,6CAA4C;AACzE,SAAQC,qBAAqB,QAAO,kCAAiC;AACrE,SAAQC,aAAa,QAAO,iCAAgC;AAE5D,MAAMC,QAAQJ,SAAS;AAEvB,OAAO,MAAMK,wCAAwCN;IAGnD,OAAgBO,OAAO;QACrBC,SAAST,KAAKU,MAAM,CAAC;YACnBC,aAAa;YACbC,UAAU;QACZ;IACF,EAAC;IAED,OAAgBD,cAAc,mCAAkC;IAEhE,OAAgBE,WAAW;QACzB;YACEC,SAAS;YACTH,aAAa;QACf;KACD,CAAA;IAED,MAAaI,MAAqB;QAChC,MAAM,EAACP,IAAI,EAAC,GAAG,MAAM,IAAI,CAACQ,KAAK,CAACT;QAChC,IAAI,EAACE,OAAO,EAAC,GAAGD;QAEhB,MAAMS,YAAY,MAAM,IAAI,CAACC,YAAY;QACzC,IAAI,CAACD,WAAW;YACd,IAAI,CAACE,KAAK,CAACd,eAAe;gBAACe,MAAM;YAAC;QACpC;QAEA,IAAI;;YACA,CAAA,EAACX,OAAO,EAAC,GAAG,MAAMN,eAAe;gBAACM;gBAASQ;YAAS,EAAC;QACzD,EAAE,OAAOE,OAAO;YACd,MAAME,UAAUF,iBAAiBG,QAAQH,MAAME,OAAO,GAAGE,OAAOJ;YAChEb,MAAM,CAAC,2BAA2B,EAAEe,SAAS,EAAEF;YAC/C,IAAI,CAACA,KAAK,CAACE,SAAS;gBAACD,MAAM;YAAC;QAC9B;QAEA,IAAI;YACF,MAAMhB,sBAAsB;gBAACK;gBAASe,SAAS;gBAAOP;YAAS;QACjE,EAAE,OAAOE,OAAO;YACd,MAAME,UAAUF,iBAAiBG,QAAQH,MAAME,OAAO,GAAGE,OAAOJ;YAChEb,MAAM,CAAC,8BAA8B,EAAEe,SAAS,EAAEF;YAClD,IAAI,CAACA,KAAK,CAAC,CAAC,8BAA8B,EAAEE,SAAS,EAAE;gBAACD,MAAM;YAAC;QACjE;QAEA,IAAI,CAACK,GAAG,CAAC1B,UAAU,SAAS,CAAC,gCAAgC,EAAEU,QAAQ,CAAC,CAAC;QACzE,IAAI,CAACgB,GAAG,CAAC1B,UAAU,UAAU;IAC/B;AACF"}
|
|
@@ -0,0 +1,128 @@
|
|
|
1
|
+
import { styleText } from 'node:util';
|
|
2
|
+
import { Args, Flags } from '@oclif/core';
|
|
3
|
+
import { SanityCommand, subdebug, waitForAsync } from '@sanity/cli-core';
|
|
4
|
+
import { spinner } from '@sanity/cli-core/ux';
|
|
5
|
+
import { resolveDataset } from '../../../actions/dataset/resolveDataset.js';
|
|
6
|
+
import { getEmbeddingsSettings, setEmbeddingsSettings } from '../../../services/embeddings.js';
|
|
7
|
+
import { NO_PROJECT_ID } from '../../../util/errorMessages.js';
|
|
8
|
+
const debug = subdebug('dataset:embeddings:enable');
|
|
9
|
+
const INITIAL_POLL_INTERVAL_MS = 10_000;
|
|
10
|
+
const MAX_POLL_INTERVAL_MS = 10 * 60 * 1000;
|
|
11
|
+
const POLL_TIMEOUT_MS = 24 * 60 * 60 * 1000;
|
|
12
|
+
export class DatasetEmbeddingsEnableCommand extends SanityCommand {
|
|
13
|
+
static args = {
|
|
14
|
+
dataset: Args.string({
|
|
15
|
+
description: 'Dataset name to enable embeddings for',
|
|
16
|
+
required: false
|
|
17
|
+
})
|
|
18
|
+
};
|
|
19
|
+
static description = 'Enable embeddings for a dataset';
|
|
20
|
+
static examples = [
|
|
21
|
+
{
|
|
22
|
+
command: '<%= config.bin %> <%= command.id %> production',
|
|
23
|
+
description: 'Enable embeddings for the production dataset'
|
|
24
|
+
},
|
|
25
|
+
{
|
|
26
|
+
command: '<%= config.bin %> <%= command.id %> production --projection "{ title, body }"',
|
|
27
|
+
description: 'Enable embeddings with a specific projection'
|
|
28
|
+
},
|
|
29
|
+
{
|
|
30
|
+
command: '<%= config.bin %> <%= command.id %> production --wait',
|
|
31
|
+
description: 'Enable embeddings and wait for processing to complete'
|
|
32
|
+
}
|
|
33
|
+
];
|
|
34
|
+
static flags = {
|
|
35
|
+
projection: Flags.string({
|
|
36
|
+
description: 'GROQ projection defining which fields to embed (e.g. "{ title, body }")',
|
|
37
|
+
required: false
|
|
38
|
+
}),
|
|
39
|
+
wait: Flags.boolean({
|
|
40
|
+
default: false,
|
|
41
|
+
description: 'Wait for embeddings processing to complete before returning'
|
|
42
|
+
})
|
|
43
|
+
};
|
|
44
|
+
async run() {
|
|
45
|
+
const { args, flags } = await this.parse(DatasetEmbeddingsEnableCommand);
|
|
46
|
+
let { dataset } = args;
|
|
47
|
+
const { projection, wait } = flags;
|
|
48
|
+
const projectId = await this.getProjectId();
|
|
49
|
+
if (!projectId) {
|
|
50
|
+
this.error(NO_PROJECT_ID, {
|
|
51
|
+
exit: 1
|
|
52
|
+
});
|
|
53
|
+
}
|
|
54
|
+
try {
|
|
55
|
+
;
|
|
56
|
+
({ dataset } = await resolveDataset({
|
|
57
|
+
dataset,
|
|
58
|
+
projectId
|
|
59
|
+
}));
|
|
60
|
+
} catch (error) {
|
|
61
|
+
const message = error instanceof Error ? error.message : String(error);
|
|
62
|
+
debug(`Failed to resolve dataset: ${message}`, error);
|
|
63
|
+
this.error(message, {
|
|
64
|
+
exit: 1
|
|
65
|
+
});
|
|
66
|
+
}
|
|
67
|
+
try {
|
|
68
|
+
await setEmbeddingsSettings({
|
|
69
|
+
dataset,
|
|
70
|
+
enabled: true,
|
|
71
|
+
projectId,
|
|
72
|
+
projection
|
|
73
|
+
});
|
|
74
|
+
} catch (error) {
|
|
75
|
+
const message = error instanceof Error ? error.message : String(error);
|
|
76
|
+
debug(`Failed to enable embeddings: ${message}`, error);
|
|
77
|
+
this.error(`Failed to enable embeddings: ${message}`, {
|
|
78
|
+
exit: 1
|
|
79
|
+
});
|
|
80
|
+
}
|
|
81
|
+
this.log(styleText('green', `Embeddings enabled for dataset ${dataset}.`));
|
|
82
|
+
if (projection) {
|
|
83
|
+
this.log(`Projection: ${projection}`);
|
|
84
|
+
}
|
|
85
|
+
if (wait) {
|
|
86
|
+
await this.waitForReady(projectId, dataset);
|
|
87
|
+
} else {
|
|
88
|
+
this.log('Processing documents in the background. Use --wait to wait for completion.');
|
|
89
|
+
}
|
|
90
|
+
}
|
|
91
|
+
async waitForReady(projectId, dataset) {
|
|
92
|
+
const spin = spinner('Waiting for embeddings to be ready...').start();
|
|
93
|
+
const deadline = Date.now() + POLL_TIMEOUT_MS;
|
|
94
|
+
let interval = INITIAL_POLL_INTERVAL_MS;
|
|
95
|
+
while(Date.now() < deadline){
|
|
96
|
+
await waitForAsync(interval);
|
|
97
|
+
interval = Math.min(interval * 1.5, MAX_POLL_INTERVAL_MS);
|
|
98
|
+
let settings;
|
|
99
|
+
try {
|
|
100
|
+
settings = await getEmbeddingsSettings(projectId, dataset);
|
|
101
|
+
} catch (error) {
|
|
102
|
+
const message = error instanceof Error ? error.message : String(error);
|
|
103
|
+
spin.fail('Failed while waiting for embeddings.');
|
|
104
|
+
this.error(`Failed while waiting for embeddings: ${message}`, {
|
|
105
|
+
exit: 1
|
|
106
|
+
});
|
|
107
|
+
}
|
|
108
|
+
debug(`Poll status: ${settings.status}, next interval: ${Math.round(interval)}ms`);
|
|
109
|
+
if (settings.status === 'ready') {
|
|
110
|
+
spin.succeed('Embeddings ready.');
|
|
111
|
+
return;
|
|
112
|
+
}
|
|
113
|
+
if (settings.status !== 'updating') {
|
|
114
|
+
spin.fail(`Unexpected status: ${settings.status}`);
|
|
115
|
+
this.error(`Embeddings entered unexpected status: ${settings.status}`, {
|
|
116
|
+
exit: 1
|
|
117
|
+
});
|
|
118
|
+
}
|
|
119
|
+
spin.text = 'Still processing...';
|
|
120
|
+
}
|
|
121
|
+
spin.fail('Timed out waiting for embeddings.');
|
|
122
|
+
this.error('Timed out. Check status with: sanity dataset embeddings status', {
|
|
123
|
+
exit: 1
|
|
124
|
+
});
|
|
125
|
+
}
|
|
126
|
+
}
|
|
127
|
+
|
|
128
|
+
//# sourceMappingURL=enable.js.map
|