@sanity/cli 6.0.0-alpha.17 → 6.0.0-alpha.19

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (249) hide show
  1. package/README.md +833 -912
  2. package/dist/SanityHelp.js +74 -21
  3. package/dist/SanityHelp.js.map +1 -1
  4. package/dist/actions/build/buildApp.js +42 -15
  5. package/dist/actions/build/buildApp.js.map +1 -1
  6. package/dist/actions/build/buildStudio.js +21 -9
  7. package/dist/actions/build/buildStudio.js.map +1 -1
  8. package/dist/actions/build/buildVendorDependencies.js +3 -16
  9. package/dist/actions/build/buildVendorDependencies.js.map +1 -1
  10. package/dist/actions/build/checkStudioDependencyVersions.js +7 -7
  11. package/dist/actions/build/checkStudioDependencyVersions.js.map +1 -1
  12. package/dist/actions/build/createExternalFromImportMap.js +1 -1
  13. package/dist/actions/build/createExternalFromImportMap.js.map +1 -1
  14. package/dist/actions/build/determineBasePath.js +5 -2
  15. package/dist/actions/build/determineBasePath.js.map +1 -1
  16. package/dist/actions/build/getViteConfig.js +47 -4
  17. package/dist/actions/build/getViteConfig.js.map +1 -1
  18. package/dist/actions/build/handlePrereleaseVersions.js +44 -0
  19. package/dist/actions/build/handlePrereleaseVersions.js.map +1 -0
  20. package/dist/actions/build/renderDocumentWorker/components/GlobalErrorHandler.js +1 -0
  21. package/dist/actions/build/renderDocumentWorker/components/GlobalErrorHandler.js.map +1 -1
  22. package/dist/actions/build/types.js.map +1 -1
  23. package/dist/actions/build/writeSanityRuntime.js +4 -3
  24. package/dist/actions/build/writeSanityRuntime.js.map +1 -1
  25. package/dist/actions/dataset/create.js +7 -1
  26. package/dist/actions/dataset/create.js.map +1 -1
  27. package/dist/actions/dataset/resolveDataset.js +26 -0
  28. package/dist/actions/dataset/resolveDataset.js.map +1 -0
  29. package/dist/actions/deploy/deployApp.js +1 -8
  30. package/dist/actions/deploy/deployApp.js.map +1 -1
  31. package/dist/actions/deploy/deployStudio.js +1 -0
  32. package/dist/actions/deploy/deployStudio.js.map +1 -1
  33. package/dist/actions/dev/getDashboardAppUrl.js +48 -0
  34. package/dist/actions/dev/getDashboardAppUrl.js.map +1 -0
  35. package/dist/actions/dev/getDevServerConfig.js +7 -3
  36. package/dist/actions/dev/getDevServerConfig.js.map +1 -1
  37. package/dist/actions/dev/startAppDevServer.js +3 -3
  38. package/dist/actions/dev/startAppDevServer.js.map +1 -1
  39. package/dist/actions/dev/startStudioDevServer.js +12 -12
  40. package/dist/actions/dev/startStudioDevServer.js.map +1 -1
  41. package/dist/actions/documents/types.js.map +1 -1
  42. package/dist/actions/documents/validate.js +11 -2
  43. package/dist/actions/documents/validate.js.map +1 -1
  44. package/dist/actions/documents/validateDocuments.worker.js +2 -2
  45. package/dist/actions/documents/validateDocuments.worker.js.map +1 -1
  46. package/dist/actions/documents/validation/reporters/jsonReporter.js +1 -1
  47. package/dist/actions/documents/validation/reporters/jsonReporter.js.map +1 -1
  48. package/dist/actions/documents/validation/reporters/ndjsonReporter.js +1 -1
  49. package/dist/actions/documents/validation/reporters/ndjsonReporter.js.map +1 -1
  50. package/dist/actions/graphql/SchemaError.js +1 -1
  51. package/dist/actions/graphql/SchemaError.js.map +1 -1
  52. package/dist/actions/graphql/__tests__/fixtures/many-self-refs.js +540 -0
  53. package/dist/actions/graphql/__tests__/fixtures/many-self-refs.js.map +1 -0
  54. package/dist/actions/graphql/__tests__/fixtures/test-studio.js +1143 -0
  55. package/dist/actions/graphql/__tests__/fixtures/test-studio.js.map +1 -0
  56. package/dist/actions/graphql/__tests__/fixtures/union-refs.js +591 -0
  57. package/dist/actions/graphql/__tests__/fixtures/union-refs.js.map +1 -0
  58. package/dist/actions/graphql/__tests__/helpers.js +23 -0
  59. package/dist/actions/graphql/__tests__/helpers.js.map +1 -0
  60. package/dist/actions/graphql/extractFromSanitySchema.js +2 -1
  61. package/dist/actions/graphql/extractFromSanitySchema.js.map +1 -1
  62. package/dist/actions/graphql/gen1/generateTypeFilters.js +1 -1
  63. package/dist/actions/graphql/gen1/generateTypeFilters.js.map +1 -1
  64. package/dist/actions/graphql/gen1/generateTypeQueries.js +2 -1
  65. package/dist/actions/graphql/gen1/generateTypeQueries.js.map +1 -1
  66. package/dist/actions/graphql/gen2/generateTypeQueries.js +1 -1
  67. package/dist/actions/graphql/gen2/generateTypeQueries.js.map +1 -1
  68. package/dist/actions/graphql/gen3/generateTypeQueries.js +1 -1
  69. package/dist/actions/graphql/gen3/generateTypeQueries.js.map +1 -1
  70. package/dist/actions/graphql/getGraphQLAPIs.js +2 -10
  71. package/dist/actions/graphql/getGraphQLAPIs.js.map +1 -1
  72. package/dist/actions/graphql/getGraphQLAPIs.worker.js +1 -1
  73. package/dist/actions/graphql/getGraphQLAPIs.worker.js.map +1 -1
  74. package/dist/actions/graphql/types.js.map +1 -1
  75. package/dist/actions/init/bootstrapLocalTemplate.js +1 -1
  76. package/dist/actions/init/bootstrapLocalTemplate.js.map +1 -1
  77. package/dist/actions/manifest/extractAppManifest.js.map +1 -1
  78. package/dist/actions/manifest/extractManifest.js +1 -22
  79. package/dist/actions/manifest/extractManifest.js.map +1 -1
  80. package/dist/actions/manifest/types.js.map +1 -1
  81. package/dist/actions/schema/deploySchemas.js +57 -80
  82. package/dist/actions/schema/deploySchemas.js.map +1 -1
  83. package/dist/actions/schema/extractSanityWorkspace.worker.js +24 -0
  84. package/dist/actions/schema/extractSanityWorkspace.worker.js.map +1 -0
  85. package/dist/actions/schema/extractSchemaWatcher.js +9 -7
  86. package/dist/actions/schema/extractSchemaWatcher.js.map +1 -1
  87. package/dist/actions/schema/matchSchemaPattern.js +22 -0
  88. package/dist/actions/schema/matchSchemaPattern.js.map +1 -0
  89. package/dist/actions/schema/runSchemaExtraction.js.map +1 -1
  90. package/dist/actions/schema/types.js +4 -0
  91. package/dist/actions/schema/types.js.map +1 -1
  92. package/dist/actions/schema/utils/schemaStoreValidation.js +1 -7
  93. package/dist/actions/schema/utils/schemaStoreValidation.js.map +1 -1
  94. package/dist/actions/schema/utils/uniqByProjectIdDataset.js +1 -1
  95. package/dist/actions/schema/utils/uniqByProjectIdDataset.js.map +1 -1
  96. package/dist/actions/schema/watchExtractSchema.js +2 -1
  97. package/dist/actions/schema/watchExtractSchema.js.map +1 -1
  98. package/dist/actions/versions/getFormatters.js +1 -1
  99. package/dist/actions/versions/getFormatters.js.map +1 -1
  100. package/dist/commands/backup/disable.js +0 -6
  101. package/dist/commands/backup/disable.js.map +1 -1
  102. package/dist/commands/backup/download.js +0 -6
  103. package/dist/commands/backup/download.js.map +1 -1
  104. package/dist/commands/backup/enable.js +0 -6
  105. package/dist/commands/backup/enable.js.map +1 -1
  106. package/dist/commands/backup/list.js +4 -7
  107. package/dist/commands/backup/list.js.map +1 -1
  108. package/dist/commands/cors/add.js +0 -6
  109. package/dist/commands/cors/add.js.map +1 -1
  110. package/dist/commands/cors/delete.js +0 -6
  111. package/dist/commands/cors/delete.js.map +1 -1
  112. package/dist/commands/cors/list.js +0 -6
  113. package/dist/commands/cors/list.js.map +1 -1
  114. package/dist/commands/dataset/alias/create.js +23 -7
  115. package/dist/commands/dataset/alias/create.js.map +1 -1
  116. package/dist/commands/dataset/alias/delete.js +17 -7
  117. package/dist/commands/dataset/alias/delete.js.map +1 -1
  118. package/dist/commands/dataset/alias/link.js +17 -7
  119. package/dist/commands/dataset/alias/link.js.map +1 -1
  120. package/dist/commands/dataset/alias/unlink.js +17 -7
  121. package/dist/commands/dataset/alias/unlink.js.map +1 -1
  122. package/dist/commands/dataset/copy.js +42 -30
  123. package/dist/commands/dataset/copy.js.map +1 -1
  124. package/dist/commands/dataset/create.js +29 -7
  125. package/dist/commands/dataset/create.js.map +1 -1
  126. package/dist/commands/dataset/delete.js +13 -7
  127. package/dist/commands/dataset/delete.js.map +1 -1
  128. package/dist/commands/dataset/embeddings/disable.js +74 -0
  129. package/dist/commands/dataset/embeddings/disable.js.map +1 -0
  130. package/dist/commands/dataset/embeddings/enable.js +138 -0
  131. package/dist/commands/dataset/embeddings/enable.js.map +1 -0
  132. package/dist/commands/dataset/embeddings/status.js +69 -0
  133. package/dist/commands/dataset/embeddings/status.js.map +1 -0
  134. package/dist/commands/dataset/export.js +30 -18
  135. package/dist/commands/dataset/export.js.map +1 -1
  136. package/dist/commands/dataset/list.js +19 -7
  137. package/dist/commands/dataset/list.js.map +1 -1
  138. package/dist/commands/dataset/visibility/get.js +15 -7
  139. package/dist/commands/dataset/visibility/get.js.map +1 -1
  140. package/dist/commands/dataset/visibility/set.js +19 -7
  141. package/dist/commands/dataset/visibility/set.js.map +1 -1
  142. package/dist/commands/debug.js +2 -1
  143. package/dist/commands/debug.js.map +1 -1
  144. package/dist/commands/documents/create.js +2 -7
  145. package/dist/commands/documents/create.js.map +1 -1
  146. package/dist/commands/documents/delete.js +0 -6
  147. package/dist/commands/documents/delete.js.map +1 -1
  148. package/dist/commands/documents/get.js +0 -6
  149. package/dist/commands/documents/get.js.map +1 -1
  150. package/dist/commands/documents/query.js +0 -6
  151. package/dist/commands/documents/query.js.map +1 -1
  152. package/dist/commands/graphql/deploy.js +1 -1
  153. package/dist/commands/graphql/deploy.js.map +1 -1
  154. package/dist/commands/graphql/list.js +0 -6
  155. package/dist/commands/graphql/list.js.map +1 -1
  156. package/dist/commands/graphql/undeploy.js +0 -6
  157. package/dist/commands/graphql/undeploy.js.map +1 -1
  158. package/dist/commands/hook/attempt.js +0 -6
  159. package/dist/commands/hook/attempt.js.map +1 -1
  160. package/dist/commands/hook/create.js +0 -6
  161. package/dist/commands/hook/create.js.map +1 -1
  162. package/dist/commands/hook/delete.js +0 -6
  163. package/dist/commands/hook/delete.js.map +1 -1
  164. package/dist/commands/hook/list.js +0 -6
  165. package/dist/commands/hook/list.js.map +1 -1
  166. package/dist/commands/hook/logs.js +1 -7
  167. package/dist/commands/hook/logs.js.map +1 -1
  168. package/dist/commands/init.js +13 -7
  169. package/dist/commands/init.js.map +1 -1
  170. package/dist/commands/manage.js +0 -1
  171. package/dist/commands/manage.js.map +1 -1
  172. package/dist/commands/media/create-aspect.js +1 -1
  173. package/dist/commands/media/create-aspect.js.map +1 -1
  174. package/dist/commands/media/delete-aspect.js +0 -6
  175. package/dist/commands/media/delete-aspect.js.map +1 -1
  176. package/dist/commands/media/deploy-aspect.js +1 -6
  177. package/dist/commands/media/deploy-aspect.js.map +1 -1
  178. package/dist/commands/media/export.js +0 -6
  179. package/dist/commands/media/export.js.map +1 -1
  180. package/dist/commands/media/import.js +0 -6
  181. package/dist/commands/media/import.js.map +1 -1
  182. package/dist/commands/projects/list.js +2 -1
  183. package/dist/commands/projects/list.js.map +1 -1
  184. package/dist/commands/schema/delete.js +0 -6
  185. package/dist/commands/schema/delete.js.map +1 -1
  186. package/dist/commands/schema/deploy.js +11 -27
  187. package/dist/commands/schema/deploy.js.map +1 -1
  188. package/dist/commands/tokens/add.js +0 -6
  189. package/dist/commands/tokens/add.js.map +1 -1
  190. package/dist/commands/tokens/delete.js +0 -6
  191. package/dist/commands/tokens/delete.js.map +1 -1
  192. package/dist/commands/tokens/list.js +0 -6
  193. package/dist/commands/tokens/list.js.map +1 -1
  194. package/dist/commands/users/invite.js +0 -6
  195. package/dist/commands/users/invite.js.map +1 -1
  196. package/dist/commands/users/list.js +1 -7
  197. package/dist/commands/users/list.js.map +1 -1
  198. package/dist/commands/versions.js +1 -1
  199. package/dist/commands/versions.js.map +1 -1
  200. package/dist/exports/index.d.ts +62 -2
  201. package/dist/exports/index.js.map +1 -1
  202. package/dist/prompts/promptForProject.js +64 -0
  203. package/dist/prompts/promptForProject.js.map +1 -0
  204. package/dist/prompts/selectMediaLibrary.js +1 -1
  205. package/dist/prompts/selectMediaLibrary.js.map +1 -1
  206. package/dist/server/devServer.js +4 -2
  207. package/dist/server/devServer.js.map +1 -1
  208. package/dist/server/vite/plugin-schema-extraction.js +201 -0
  209. package/dist/server/vite/plugin-schema-extraction.js.map +1 -0
  210. package/dist/server/vite/plugin-typegen.js +217 -0
  211. package/dist/server/vite/plugin-typegen.js.map +1 -0
  212. package/dist/services/datasets.js +7 -5
  213. package/dist/services/datasets.js.map +1 -1
  214. package/dist/services/embeddings.js +25 -0
  215. package/dist/services/embeddings.js.map +1 -0
  216. package/dist/services/grants.js +13 -0
  217. package/dist/services/grants.js.map +1 -0
  218. package/dist/services/graphql.js.map +1 -1
  219. package/dist/services/schemas.js +1 -1
  220. package/dist/services/schemas.js.map +1 -1
  221. package/dist/types/grants.js +3 -0
  222. package/dist/types/grants.js.map +1 -0
  223. package/dist/types.js.map +1 -1
  224. package/dist/util/checkProjectPermissions.js +21 -0
  225. package/dist/util/checkProjectPermissions.js.map +1 -0
  226. package/dist/util/compareDependencyVersions.js +28 -7
  227. package/dist/util/compareDependencyVersions.js.map +1 -1
  228. package/dist/util/errorMessages.js +0 -1
  229. package/dist/util/errorMessages.js.map +1 -1
  230. package/dist/util/getSharedServerConfig.js +1 -0
  231. package/dist/util/getSharedServerConfig.js.map +1 -1
  232. package/dist/util/packageManager/getPeerDependencies.js +44 -0
  233. package/dist/util/packageManager/getPeerDependencies.js.map +1 -0
  234. package/dist/util/sharedFlags.js +19 -0
  235. package/dist/util/sharedFlags.js.map +1 -0
  236. package/dist/util/toForwardSlashes.js +8 -0
  237. package/dist/util/toForwardSlashes.js.map +1 -0
  238. package/oclif.manifest.json +639 -379
  239. package/package.json +23 -22
  240. package/dist/actions/dev/getCoreAppUrl.js +0 -10
  241. package/dist/actions/dev/getCoreAppUrl.js.map +0 -1
  242. package/dist/actions/schema/schemaStoreTypes.js +0 -19
  243. package/dist/actions/schema/schemaStoreTypes.js.map +0 -1
  244. package/dist/actions/schema/utils/manifestExtractor.js +0 -29
  245. package/dist/actions/schema/utils/manifestExtractor.js.map +0 -1
  246. package/dist/actions/schema/utils/manifestReader.js +0 -71
  247. package/dist/actions/schema/utils/manifestReader.js.map +0 -1
  248. package/dist/util/workerChannels.js +0 -172
  249. package/dist/util/workerChannels.js.map +0 -1
@@ -1 +1 @@
1
- {"version":3,"sources":["../../../src/actions/schema/extractSchemaWatcher.ts"],"sourcesContent":["import {dirname, isAbsolute, relative} from 'node:path'\n\nimport {type Output} from '@sanity/cli-core'\nimport {spinner} from '@sanity/cli-core/ux'\nimport {watch as chokidarWatch, type FSWatcher} from 'chokidar'\nimport {debounce} from 'lodash-es'\nimport {glob} from 'tinyglobby'\n\nimport {formatSchemaValidation} from './formatSchemaValidation.js'\nimport {type ExtractOptions} from './getExtractOptions.js'\nimport {runSchemaExtraction} from './runSchemaExtraction.js'\nimport {schemasExtractDebug} from './utils/debug.js'\nimport {SchemaExtractionError} from './utils/SchemaExtractionError.js'\n\n/** Default glob patterns to watch for schema changes */\nexport const DEFAULT_WATCH_PATTERNS = [\n 'sanity.config.{js,jsx,ts,tsx,mjs}',\n 'schema*/**/*.{js,jsx,ts,tsx,mjs}',\n]\n\n/** Default patterns to ignore when watching */\nconst IGNORED_PATTERNS = [\n '**/node_modules/**',\n '**/.git/**',\n '**/dist/**',\n '**/lib/**',\n '**/.sanity/**',\n]\n\ninterface ExtractSchemaWatcherOptions {\n extractOptions: ExtractOptions\n output: Output\n watchPatterns: string[]\n\n onExtraction?: (result: {duration: number; success: boolean}) => void\n}\n\ninterface ExtractSchemaWatcher {\n close: () => Promise<void>\n watcher: FSWatcher\n}\n\n/** State for tracking extraction status */\ninterface WatchState {\n isExtracting: boolean\n pendingExtraction: boolean\n}\n\n/** Return type for createExtractionRunner */\ninterface ExtractionRunner {\n runExtraction: () => Promise<void>\n state: WatchState\n}\n\n/**\n * Creates an extraction runner with concurrency control.\n * If extraction is already running, queues one more extraction to run after completion.\n * Multiple queued requests are coalesced into a single pending extraction.\n */\nfunction createExtractionRunner(onExtract: () => Promise<void>): ExtractionRunner {\n const state: WatchState = {\n isExtracting: false,\n pendingExtraction: false,\n }\n\n async function runExtraction(): Promise<void> {\n if (state.isExtracting) {\n state.pendingExtraction = true\n return\n }\n\n state.isExtracting = true\n state.pendingExtraction = false\n\n try {\n await onExtract()\n } finally {\n state.isExtracting = false\n\n // If a change came in during extraction, run again\n if (state.pendingExtraction) {\n state.pendingExtraction = false\n await runExtraction()\n }\n }\n }\n\n return {runExtraction, state}\n}\n\n/**\n * Starts a schema watcher that extracts schema on file changes.\n * Returns a watcher instance and a stop function.\n */\nexport async function startExtractSchemaWatcher(\n options: ExtractSchemaWatcherOptions,\n): Promise<ExtractSchemaWatcher> {\n const {extractOptions, onExtraction, output, watchPatterns} = options\n\n const {configPath, enforceRequiredFields, outputPath} = extractOptions\n const workDir = dirname(configPath)\n\n // Helper function to run extraction with spinner and error handling\n const runExtraction = async (): Promise<boolean> => {\n const spin = spinner(\n enforceRequiredFields\n ? 'Extracting schema with enforced required fields'\n : 'Extracting schema...',\n ).start()\n const extractionStartTime = Date.now()\n\n try {\n await runSchemaExtraction(extractOptions)\n\n spin.succeed(\n enforceRequiredFields\n ? `Extracted schema to ${outputPath} with enforced required fields`\n : `Extracted schema to ${outputPath}`,\n )\n\n const duration = Date.now() - extractionStartTime\n onExtraction?.({duration, success: true})\n\n return true\n } catch (err) {\n const duration = Date.now() - extractionStartTime\n onExtraction?.({duration, success: false})\n\n schemasExtractDebug('Failed to extract schema', err)\n spin.fail('Extraction failed')\n\n // Display validation errors if available\n if (err instanceof SchemaExtractionError && err.validation && err.validation.length > 0) {\n output.log('')\n output.log(formatSchemaValidation(err.validation))\n } else if (err instanceof Error) {\n output.error(err.message, {exit: 1})\n }\n\n return false\n }\n }\n\n // Run initial extraction\n await runExtraction()\n\n const absoluteWatchPatterns = await glob(watchPatterns, {\n absolute: true,\n ignore: IGNORED_PATTERNS,\n })\n\n // Create extraction runner with concurrency control\n const {runExtraction: runConcurrentExtraction} = createExtractionRunner(async () => {\n await runExtraction()\n })\n\n // Debounced extraction trigger (1 second delay)\n const debouncedExtract = debounce(() => {\n void runConcurrentExtraction()\n }, 1000)\n\n const watcher: FSWatcher = chokidarWatch(absoluteWatchPatterns, {\n cwd: workDir,\n ignoreInitial: true,\n })\n\n watcher.on('all', (event, filePath) => {\n const timestamp = new Date().toLocaleTimeString()\n const relativePath = isAbsolute(filePath) ? relative(workDir, filePath) : filePath\n output.log(`[${timestamp}] ${event}: ${relativePath}`)\n debouncedExtract()\n })\n\n watcher.on('error', (err) => {\n output.error(`Watcher error: ${err instanceof Error ? err.message : String(err)}`)\n })\n\n return {\n close: () => watcher.close(),\n watcher,\n }\n}\n"],"names":["dirname","isAbsolute","relative","spinner","watch","chokidarWatch","debounce","glob","formatSchemaValidation","runSchemaExtraction","schemasExtractDebug","SchemaExtractionError","DEFAULT_WATCH_PATTERNS","IGNORED_PATTERNS","createExtractionRunner","onExtract","state","isExtracting","pendingExtraction","runExtraction","startExtractSchemaWatcher","options","extractOptions","onExtraction","output","watchPatterns","configPath","enforceRequiredFields","outputPath","workDir","spin","start","extractionStartTime","Date","now","succeed","duration","success","err","fail","validation","length","log","Error","error","message","exit","absoluteWatchPatterns","absolute","ignore","runConcurrentExtraction","debouncedExtract","watcher","cwd","ignoreInitial","on","event","filePath","timestamp","toLocaleTimeString","relativePath","String","close"],"mappings":"AAAA,SAAQA,OAAO,EAAEC,UAAU,EAAEC,QAAQ,QAAO,YAAW;AAGvD,SAAQC,OAAO,QAAO,sBAAqB;AAC3C,SAAQC,SAASC,aAAa,QAAuB,WAAU;AAC/D,SAAQC,QAAQ,QAAO,YAAW;AAClC,SAAQC,IAAI,QAAO,aAAY;AAE/B,SAAQC,sBAAsB,QAAO,8BAA6B;AAElE,SAAQC,mBAAmB,QAAO,2BAA0B;AAC5D,SAAQC,mBAAmB,QAAO,mBAAkB;AACpD,SAAQC,qBAAqB,QAAO,mCAAkC;AAEtE,sDAAsD,GACtD,OAAO,MAAMC,yBAAyB;IACpC;IACA;CACD,CAAA;AAED,6CAA6C,GAC7C,MAAMC,mBAAmB;IACvB;IACA;IACA;IACA;IACA;CACD;AA2BD;;;;CAIC,GACD,SAASC,uBAAuBC,SAA8B;IAC5D,MAAMC,QAAoB;QACxBC,cAAc;QACdC,mBAAmB;IACrB;IAEA,eAAeC;QACb,IAAIH,MAAMC,YAAY,EAAE;YACtBD,MAAME,iBAAiB,GAAG;YAC1B;QACF;QAEAF,MAAMC,YAAY,GAAG;QACrBD,MAAME,iBAAiB,GAAG;QAE1B,IAAI;YACF,MAAMH;QACR,SAAU;YACRC,MAAMC,YAAY,GAAG;YAErB,mDAAmD;YACnD,IAAID,MAAME,iBAAiB,EAAE;gBAC3BF,MAAME,iBAAiB,GAAG;gBAC1B,MAAMC;YACR;QACF;IACF;IAEA,OAAO;QAACA;QAAeH;IAAK;AAC9B;AAEA;;;CAGC,GACD,OAAO,eAAeI,0BACpBC,OAAoC;IAEpC,MAAM,EAACC,cAAc,EAAEC,YAAY,EAAEC,MAAM,EAAEC,aAAa,EAAC,GAAGJ;IAE9D,MAAM,EAACK,UAAU,EAAEC,qBAAqB,EAAEC,UAAU,EAAC,GAAGN;IACxD,MAAMO,UAAU7B,QAAQ0B;IAExB,oEAAoE;IACpE,MAAMP,gBAAgB;QACpB,MAAMW,OAAO3B,QACXwB,wBACI,oDACA,wBACJI,KAAK;QACP,MAAMC,sBAAsBC,KAAKC,GAAG;QAEpC,IAAI;YACF,MAAMzB,oBAAoBa;YAE1BQ,KAAKK,OAAO,CACVR,wBACI,CAAC,oBAAoB,EAAEC,WAAW,8BAA8B,CAAC,GACjE,CAAC,oBAAoB,EAAEA,YAAY;YAGzC,MAAMQ,WAAWH,KAAKC,GAAG,KAAKF;YAC9BT,eAAe;gBAACa;gBAAUC,SAAS;YAAI;YAEvC,OAAO;QACT,EAAE,OAAOC,KAAK;YACZ,MAAMF,WAAWH,KAAKC,GAAG,KAAKF;YAC9BT,eAAe;gBAACa;gBAAUC,SAAS;YAAK;YAExC3B,oBAAoB,4BAA4B4B;YAChDR,KAAKS,IAAI,CAAC;YAEV,yCAAyC;YACzC,IAAID,eAAe3B,yBAAyB2B,IAAIE,UAAU,IAAIF,IAAIE,UAAU,CAACC,MAAM,GAAG,GAAG;gBACvFjB,OAAOkB,GAAG,CAAC;gBACXlB,OAAOkB,GAAG,CAAClC,uBAAuB8B,IAAIE,UAAU;YAClD,OAAO,IAAIF,eAAeK,OAAO;gBAC/BnB,OAAOoB,KAAK,CAACN,IAAIO,OAAO,EAAE;oBAACC,MAAM;gBAAC;YACpC;YAEA,OAAO;QACT;IACF;IAEA,yBAAyB;IACzB,MAAM3B;IAEN,MAAM4B,wBAAwB,MAAMxC,KAAKkB,eAAe;QACtDuB,UAAU;QACVC,QAAQpC;IACV;IAEA,oDAAoD;IACpD,MAAM,EAACM,eAAe+B,uBAAuB,EAAC,GAAGpC,uBAAuB;QACtE,MAAMK;IACR;IAEA,gDAAgD;IAChD,MAAMgC,mBAAmB7C,SAAS;QAChC,KAAK4C;IACP,GAAG;IAEH,MAAME,UAAqB/C,cAAc0C,uBAAuB;QAC9DM,KAAKxB;QACLyB,eAAe;IACjB;IAEAF,QAAQG,EAAE,CAAC,OAAO,CAACC,OAAOC;QACxB,MAAMC,YAAY,IAAIzB,OAAO0B,kBAAkB;QAC/C,MAAMC,eAAe3D,WAAWwD,YAAYvD,SAAS2B,SAAS4B,YAAYA;QAC1EjC,OAAOkB,GAAG,CAAC,CAAC,CAAC,EAAEgB,UAAU,EAAE,EAAEF,MAAM,EAAE,EAAEI,cAAc;QACrDT;IACF;IAEAC,QAAQG,EAAE,CAAC,SAAS,CAACjB;QACnBd,OAAOoB,KAAK,CAAC,CAAC,eAAe,EAAEN,eAAeK,QAAQL,IAAIO,OAAO,GAAGgB,OAAOvB,MAAM;IACnF;IAEA,OAAO;QACLwB,OAAO,IAAMV,QAAQU,KAAK;QAC1BV;IACF;AACF"}
1
+ {"version":3,"sources":["../../../src/actions/schema/extractSchemaWatcher.ts"],"sourcesContent":["import {dirname, isAbsolute, join, relative} from 'node:path'\n\nimport {type Output} from '@sanity/cli-core'\nimport {spinner} from '@sanity/cli-core/ux'\nimport {watch as chokidarWatch, type FSWatcher} from 'chokidar'\nimport debounce from 'lodash-es/debounce.js'\n\nimport {formatSchemaValidation} from './formatSchemaValidation.js'\nimport {type ExtractOptions} from './getExtractOptions.js'\nimport {createSchemaPatternMatcher} from './matchSchemaPattern.js'\nimport {runSchemaExtraction} from './runSchemaExtraction.js'\nimport {schemasExtractDebug} from './utils/debug.js'\nimport {SchemaExtractionError} from './utils/SchemaExtractionError.js'\n\n/** Default glob patterns to watch for schema changes */\nexport const DEFAULT_WATCH_PATTERNS = [\n 'sanity.config.{js,jsx,ts,tsx,mjs}',\n 'schema*/**/*.{js,jsx,ts,tsx,mjs}',\n]\n\n/** Default patterns to ignore when watching */\nconst IGNORED_PATTERNS = [\n '**/node_modules/**',\n '**/.git/**',\n '**/dist/**',\n '**/lib/**',\n '**/.sanity/**',\n]\n\ninterface ExtractSchemaWatcherOptions {\n extractOptions: ExtractOptions\n output: Output\n watchPatterns: string[]\n\n onExtraction?: (result: {duration: number; success: boolean}) => void\n}\n\ninterface ExtractSchemaWatcher {\n close: () => Promise<void>\n watcher: FSWatcher\n}\n\n/** State for tracking extraction status */\ninterface WatchState {\n isExtracting: boolean\n pendingExtraction: boolean\n}\n\n/** Return type for createExtractionRunner */\ninterface ExtractionRunner {\n runExtraction: () => Promise<void>\n state: WatchState\n}\n\n/**\n * Creates an extraction runner with concurrency control.\n * If extraction is already running, queues one more extraction to run after completion.\n * Multiple queued requests are coalesced into a single pending extraction.\n */\nfunction createExtractionRunner(onExtract: () => Promise<void>): ExtractionRunner {\n const state: WatchState = {\n isExtracting: false,\n pendingExtraction: false,\n }\n\n async function runExtraction(): Promise<void> {\n if (state.isExtracting) {\n state.pendingExtraction = true\n return\n }\n\n state.isExtracting = true\n state.pendingExtraction = false\n\n try {\n await onExtract()\n } finally {\n state.isExtracting = false\n\n // If a change came in during extraction, run again\n if (state.pendingExtraction) {\n state.pendingExtraction = false\n await runExtraction()\n }\n }\n }\n\n return {runExtraction, state}\n}\n\n/**\n * Starts a schema watcher that extracts schema on file changes.\n * Returns a watcher instance and a stop function.\n */\nexport async function startExtractSchemaWatcher(\n options: ExtractSchemaWatcherOptions,\n): Promise<ExtractSchemaWatcher> {\n const {extractOptions, onExtraction, output, watchPatterns} = options\n\n const {configPath, enforceRequiredFields, outputPath} = extractOptions\n const workDir = dirname(configPath)\n\n // Helper function to run extraction with spinner and error handling\n const runExtraction = async (): Promise<boolean> => {\n const spin = spinner(\n enforceRequiredFields\n ? 'Extracting schema with enforced required fields'\n : 'Extracting schema...',\n ).start()\n const extractionStartTime = Date.now()\n\n try {\n await runSchemaExtraction(extractOptions)\n\n spin.succeed(\n enforceRequiredFields\n ? `Extracted schema to ${outputPath} with enforced required fields`\n : `Extracted schema to ${outputPath}`,\n )\n\n const duration = Date.now() - extractionStartTime\n onExtraction?.({duration, success: true})\n\n return true\n } catch (err) {\n const duration = Date.now() - extractionStartTime\n onExtraction?.({duration, success: false})\n\n schemasExtractDebug('Failed to extract schema', err)\n spin.fail('Extraction failed')\n\n // Display validation errors if available\n if (err instanceof SchemaExtractionError && err.validation && err.validation.length > 0) {\n output.log('')\n output.log(formatSchemaValidation(err.validation))\n } else if (err instanceof Error) {\n output.error(err.message, {exit: 1})\n }\n\n return false\n }\n }\n\n // Run initial extraction\n await runExtraction()\n\n const absoluteWatchPatterns = watchPatterns.map((pattern) => join(workDir, pattern))\n\n // Create extraction runner with concurrency control\n const {runExtraction: runConcurrentExtraction} = createExtractionRunner(async () => {\n await runExtraction()\n })\n\n // Debounced extraction trigger (1 second delay)\n const debouncedExtract = debounce(() => {\n void runConcurrentExtraction()\n }, 1000)\n\n const {isMatch} = createSchemaPatternMatcher(watchPatterns)\n\n const watcher: FSWatcher = chokidarWatch(absoluteWatchPatterns, {\n cwd: workDir,\n ignored: IGNORED_PATTERNS,\n ignoreInitial: true,\n })\n\n watcher.on('all', (event, filePath) => {\n if (!isMatch(filePath, workDir)) {\n return\n }\n\n const timestamp = new Date().toLocaleTimeString()\n const relativePath = isAbsolute(filePath) ? relative(workDir, filePath) : filePath\n output.log(`[${timestamp}] ${event}: ${relativePath}`)\n debouncedExtract()\n })\n\n watcher.on('error', (err) => {\n output.error(`Watcher error: ${err instanceof Error ? err.message : String(err)}`)\n })\n\n return {\n close: () => watcher.close(),\n watcher,\n }\n}\n"],"names":["dirname","isAbsolute","join","relative","spinner","watch","chokidarWatch","debounce","formatSchemaValidation","createSchemaPatternMatcher","runSchemaExtraction","schemasExtractDebug","SchemaExtractionError","DEFAULT_WATCH_PATTERNS","IGNORED_PATTERNS","createExtractionRunner","onExtract","state","isExtracting","pendingExtraction","runExtraction","startExtractSchemaWatcher","options","extractOptions","onExtraction","output","watchPatterns","configPath","enforceRequiredFields","outputPath","workDir","spin","start","extractionStartTime","Date","now","succeed","duration","success","err","fail","validation","length","log","Error","error","message","exit","absoluteWatchPatterns","map","pattern","runConcurrentExtraction","debouncedExtract","isMatch","watcher","cwd","ignored","ignoreInitial","on","event","filePath","timestamp","toLocaleTimeString","relativePath","String","close"],"mappings":"AAAA,SAAQA,OAAO,EAAEC,UAAU,EAAEC,IAAI,EAAEC,QAAQ,QAAO,YAAW;AAG7D,SAAQC,OAAO,QAAO,sBAAqB;AAC3C,SAAQC,SAASC,aAAa,QAAuB,WAAU;AAC/D,OAAOC,cAAc,wBAAuB;AAE5C,SAAQC,sBAAsB,QAAO,8BAA6B;AAElE,SAAQC,0BAA0B,QAAO,0BAAyB;AAClE,SAAQC,mBAAmB,QAAO,2BAA0B;AAC5D,SAAQC,mBAAmB,QAAO,mBAAkB;AACpD,SAAQC,qBAAqB,QAAO,mCAAkC;AAEtE,sDAAsD,GACtD,OAAO,MAAMC,yBAAyB;IACpC;IACA;CACD,CAAA;AAED,6CAA6C,GAC7C,MAAMC,mBAAmB;IACvB;IACA;IACA;IACA;IACA;CACD;AA2BD;;;;CAIC,GACD,SAASC,uBAAuBC,SAA8B;IAC5D,MAAMC,QAAoB;QACxBC,cAAc;QACdC,mBAAmB;IACrB;IAEA,eAAeC;QACb,IAAIH,MAAMC,YAAY,EAAE;YACtBD,MAAME,iBAAiB,GAAG;YAC1B;QACF;QAEAF,MAAMC,YAAY,GAAG;QACrBD,MAAME,iBAAiB,GAAG;QAE1B,IAAI;YACF,MAAMH;QACR,SAAU;YACRC,MAAMC,YAAY,GAAG;YAErB,mDAAmD;YACnD,IAAID,MAAME,iBAAiB,EAAE;gBAC3BF,MAAME,iBAAiB,GAAG;gBAC1B,MAAMC;YACR;QACF;IACF;IAEA,OAAO;QAACA;QAAeH;IAAK;AAC9B;AAEA;;;CAGC,GACD,OAAO,eAAeI,0BACpBC,OAAoC;IAEpC,MAAM,EAACC,cAAc,EAAEC,YAAY,EAAEC,MAAM,EAAEC,aAAa,EAAC,GAAGJ;IAE9D,MAAM,EAACK,UAAU,EAAEC,qBAAqB,EAAEC,UAAU,EAAC,GAAGN;IACxD,MAAMO,UAAU9B,QAAQ2B;IAExB,oEAAoE;IACpE,MAAMP,gBAAgB;QACpB,MAAMW,OAAO3B,QACXwB,wBACI,oDACA,wBACJI,KAAK;QACP,MAAMC,sBAAsBC,KAAKC,GAAG;QAEpC,IAAI;YACF,MAAMzB,oBAAoBa;YAE1BQ,KAAKK,OAAO,CACVR,wBACI,CAAC,oBAAoB,EAAEC,WAAW,8BAA8B,CAAC,GACjE,CAAC,oBAAoB,EAAEA,YAAY;YAGzC,MAAMQ,WAAWH,KAAKC,GAAG,KAAKF;YAC9BT,eAAe;gBAACa;gBAAUC,SAAS;YAAI;YAEvC,OAAO;QACT,EAAE,OAAOC,KAAK;YACZ,MAAMF,WAAWH,KAAKC,GAAG,KAAKF;YAC9BT,eAAe;gBAACa;gBAAUC,SAAS;YAAK;YAExC3B,oBAAoB,4BAA4B4B;YAChDR,KAAKS,IAAI,CAAC;YAEV,yCAAyC;YACzC,IAAID,eAAe3B,yBAAyB2B,IAAIE,UAAU,IAAIF,IAAIE,UAAU,CAACC,MAAM,GAAG,GAAG;gBACvFjB,OAAOkB,GAAG,CAAC;gBACXlB,OAAOkB,GAAG,CAACnC,uBAAuB+B,IAAIE,UAAU;YAClD,OAAO,IAAIF,eAAeK,OAAO;gBAC/BnB,OAAOoB,KAAK,CAACN,IAAIO,OAAO,EAAE;oBAACC,MAAM;gBAAC;YACpC;YAEA,OAAO;QACT;IACF;IAEA,yBAAyB;IACzB,MAAM3B;IAEN,MAAM4B,wBAAwBtB,cAAcuB,GAAG,CAAC,CAACC,UAAYhD,KAAK4B,SAASoB;IAE3E,oDAAoD;IACpD,MAAM,EAAC9B,eAAe+B,uBAAuB,EAAC,GAAGpC,uBAAuB;QACtE,MAAMK;IACR;IAEA,gDAAgD;IAChD,MAAMgC,mBAAmB7C,SAAS;QAChC,KAAK4C;IACP,GAAG;IAEH,MAAM,EAACE,OAAO,EAAC,GAAG5C,2BAA2BiB;IAE7C,MAAM4B,UAAqBhD,cAAc0C,uBAAuB;QAC9DO,KAAKzB;QACL0B,SAAS1C;QACT2C,eAAe;IACjB;IAEAH,QAAQI,EAAE,CAAC,OAAO,CAACC,OAAOC;QACxB,IAAI,CAACP,QAAQO,UAAU9B,UAAU;YAC/B;QACF;QAEA,MAAM+B,YAAY,IAAI3B,OAAO4B,kBAAkB;QAC/C,MAAMC,eAAe9D,WAAW2D,YAAYzD,SAAS2B,SAAS8B,YAAYA;QAC1EnC,OAAOkB,GAAG,CAAC,CAAC,CAAC,EAAEkB,UAAU,EAAE,EAAEF,MAAM,EAAE,EAAEI,cAAc;QACrDX;IACF;IAEAE,QAAQI,EAAE,CAAC,SAAS,CAACnB;QACnBd,OAAOoB,KAAK,CAAC,CAAC,eAAe,EAAEN,eAAeK,QAAQL,IAAIO,OAAO,GAAGkB,OAAOzB,MAAM;IACnF;IAEA,OAAO;QACL0B,OAAO,IAAMX,QAAQW,KAAK;QAC1BX;IACF;AACF"}
@@ -0,0 +1,22 @@
1
+ import { isAbsolute, relative } from 'node:path';
2
+ import picomatch from 'picomatch';
3
+ import { toForwardSlashes } from '../../util/toForwardSlashes.js';
4
+ /**
5
+ * Creates a pattern matcher function for schema watch patterns.
6
+ * Normalizes file paths to forward slashes and makes them relative before matching.
7
+ *
8
+ * @param patterns - Array of glob patterns to match against
9
+ * @returns Function that takes a file path and workDir, returns true if file matches any pattern
10
+ * @internal
11
+ */ export function createSchemaPatternMatcher(patterns) {
12
+ const matcher = picomatch(patterns);
13
+ return {
14
+ isMatch: (filePath, workDir)=>{
15
+ const relativePath = isAbsolute(filePath) ? relative(workDir, filePath) : filePath;
16
+ const normalizedPath = toForwardSlashes(relativePath);
17
+ return matcher(normalizedPath);
18
+ }
19
+ };
20
+ }
21
+
22
+ //# sourceMappingURL=matchSchemaPattern.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../../../src/actions/schema/matchSchemaPattern.ts"],"sourcesContent":["import {isAbsolute, relative} from 'node:path'\n\nimport picomatch from 'picomatch'\n\nimport {toForwardSlashes} from '../../util/toForwardSlashes.js'\n\n/**\n * Creates a pattern matcher function for schema watch patterns.\n * Normalizes file paths to forward slashes and makes them relative before matching.\n *\n * @param patterns - Array of glob patterns to match against\n * @returns Function that takes a file path and workDir, returns true if file matches any pattern\n * @internal\n */\nexport function createSchemaPatternMatcher(patterns: string[]): {\n isMatch: (filePath: string, workDir: string) => boolean\n} {\n const matcher = picomatch(patterns)\n\n return {\n isMatch: (filePath: string, workDir: string): boolean => {\n const relativePath = isAbsolute(filePath) ? relative(workDir, filePath) : filePath\n const normalizedPath = toForwardSlashes(relativePath)\n return matcher(normalizedPath)\n },\n }\n}\n"],"names":["isAbsolute","relative","picomatch","toForwardSlashes","createSchemaPatternMatcher","patterns","matcher","isMatch","filePath","workDir","relativePath","normalizedPath"],"mappings":"AAAA,SAAQA,UAAU,EAAEC,QAAQ,QAAO,YAAW;AAE9C,OAAOC,eAAe,YAAW;AAEjC,SAAQC,gBAAgB,QAAO,iCAAgC;AAE/D;;;;;;;CAOC,GACD,OAAO,SAASC,2BAA2BC,QAAkB;IAG3D,MAAMC,UAAUJ,UAAUG;IAE1B,OAAO;QACLE,SAAS,CAACC,UAAkBC;YAC1B,MAAMC,eAAeV,WAAWQ,YAAYP,SAASQ,SAASD,YAAYA;YAC1E,MAAMG,iBAAiBR,iBAAiBO;YACxC,OAAOJ,QAAQK;QACjB;IACF;AACF"}
@@ -1 +1 @@
1
- {"version":3,"sources":["../../../src/actions/schema/runSchemaExtraction.ts"],"sourcesContent":["import {mkdir, writeFile} from 'node:fs/promises'\nimport {dirname} from 'node:path'\n\nimport {studioWorkerTask} from '@sanity/cli-core'\nimport {type extractSchema as extractSchemaInternal} from '@sanity/schema/_internal'\n\nimport {type ExtractOptions} from './getExtractOptions.js'\nimport {type ExtractSchemaWorkerData, type ExtractSchemaWorkerError} from './types.js'\nimport {SchemaExtractionError} from './utils/SchemaExtractionError.js'\n\ninterface ExtractSchemaWorkerResult {\n schema: ReturnType<typeof extractSchemaInternal>\n type: 'success'\n}\n\ntype ExtractSchemaWorkerMessage = ExtractSchemaWorkerError | ExtractSchemaWorkerResult\n\n/**\n * Core schema extraction logic.\n * Performs the extraction via worker and writes to file.\n * Throws SchemaExtractionError on failure.\n */\nexport async function runSchemaExtraction(\n extractOptions: ExtractOptions,\n): Promise<ReturnType<typeof extractSchemaInternal>> {\n const {configPath, enforceRequiredFields, format, outputPath, workspace} = extractOptions\n\n if (format !== 'groq-type-nodes') {\n throw new Error(`Unsupported format: \"${format}\"`)\n }\n\n const workDir = dirname(configPath)\n const outputDir = dirname(outputPath)\n\n const result = await studioWorkerTask<ExtractSchemaWorkerMessage>(\n new URL('extractSanitySchema.worker.js', import.meta.url),\n {\n name: 'extractSanitySchema',\n studioRootPath: workDir,\n workerData: {\n configPath,\n enforceRequiredFields,\n workDir,\n workspaceName: workspace,\n } satisfies ExtractSchemaWorkerData,\n },\n )\n\n if (result.type === 'error') {\n throw new SchemaExtractionError(result.error, result.validation)\n }\n\n const schema = result.schema\n\n // Ensure output directory exists\n await mkdir(outputDir, {recursive: true})\n\n // Write schema to file\n await writeFile(outputPath, `${JSON.stringify(schema, null, 2)}\\n`)\n\n return schema\n}\n"],"names":["mkdir","writeFile","dirname","studioWorkerTask","SchemaExtractionError","runSchemaExtraction","extractOptions","configPath","enforceRequiredFields","format","outputPath","workspace","Error","workDir","outputDir","result","URL","url","name","studioRootPath","workerData","workspaceName","type","error","validation","schema","recursive","JSON","stringify"],"mappings":"AAAA,SAAQA,KAAK,EAAEC,SAAS,QAAO,mBAAkB;AACjD,SAAQC,OAAO,QAAO,YAAW;AAEjC,SAAQC,gBAAgB,QAAO,mBAAkB;AAKjD,SAAQC,qBAAqB,QAAO,mCAAkC;AAStE;;;;CAIC,GACD,OAAO,eAAeC,oBACpBC,cAA8B;IAE9B,MAAM,EAACC,UAAU,EAAEC,qBAAqB,EAAEC,MAAM,EAAEC,UAAU,EAAEC,SAAS,EAAC,GAAGL;IAE3E,IAAIG,WAAW,mBAAmB;QAChC,MAAM,IAAIG,MAAM,CAAC,qBAAqB,EAAEH,OAAO,CAAC,CAAC;IACnD;IAEA,MAAMI,UAAUX,QAAQK;IACxB,MAAMO,YAAYZ,QAAQQ;IAE1B,MAAMK,SAAS,MAAMZ,iBACnB,IAAIa,IAAI,iCAAiC,YAAYC,GAAG,GACxD;QACEC,MAAM;QACNC,gBAAgBN;QAChBO,YAAY;YACVb;YACAC;YACAK;YACAQ,eAAeV;QACjB;IACF;IAGF,IAAII,OAAOO,IAAI,KAAK,SAAS;QAC3B,MAAM,IAAIlB,sBAAsBW,OAAOQ,KAAK,EAAER,OAAOS,UAAU;IACjE;IAEA,MAAMC,SAASV,OAAOU,MAAM;IAE5B,iCAAiC;IACjC,MAAMzB,MAAMc,WAAW;QAACY,WAAW;IAAI;IAEvC,uBAAuB;IACvB,MAAMzB,UAAUS,YAAY,GAAGiB,KAAKC,SAAS,CAACH,QAAQ,MAAM,GAAG,EAAE,CAAC;IAElE,OAAOA;AACT"}
1
+ {"version":3,"sources":["../../../src/actions/schema/runSchemaExtraction.ts"],"sourcesContent":["import {mkdir, writeFile} from 'node:fs/promises'\nimport {dirname} from 'node:path'\n\nimport {studioWorkerTask} from '@sanity/cli-core'\nimport {type extractSchema as extractSchemaInternal} from '@sanity/schema/_internal'\n\nimport {type ExtractOptions} from './getExtractOptions.js'\nimport {type ExtractSchemaWorkerData, type ExtractSchemaWorkerError} from './types.js'\nimport {SchemaExtractionError} from './utils/SchemaExtractionError.js'\n\ninterface ExtractSchemaWorkerResult {\n schema: ReturnType<typeof extractSchemaInternal>\n type: 'success'\n}\n\ntype ExtractSchemaWorkerMessage = ExtractSchemaWorkerError | ExtractSchemaWorkerResult\n\n/**\n * Core schema extraction logic.\n * Performs the extraction via worker and writes to file.\n * Throws SchemaExtractionError on failure.\n */\nexport async function runSchemaExtraction(\n extractOptions: Omit<ExtractOptions, 'watchPatterns'>,\n): Promise<ReturnType<typeof extractSchemaInternal>> {\n const {configPath, enforceRequiredFields, format, outputPath, workspace} = extractOptions\n\n if (format !== 'groq-type-nodes') {\n throw new Error(`Unsupported format: \"${format}\"`)\n }\n\n const workDir = dirname(configPath)\n const outputDir = dirname(outputPath)\n\n const result = await studioWorkerTask<ExtractSchemaWorkerMessage>(\n new URL('extractSanitySchema.worker.js', import.meta.url),\n {\n name: 'extractSanitySchema',\n studioRootPath: workDir,\n workerData: {\n configPath,\n enforceRequiredFields,\n workDir,\n workspaceName: workspace,\n } satisfies ExtractSchemaWorkerData,\n },\n )\n\n if (result.type === 'error') {\n throw new SchemaExtractionError(result.error, result.validation)\n }\n\n const schema = result.schema\n\n // Ensure output directory exists\n await mkdir(outputDir, {recursive: true})\n\n // Write schema to file\n await writeFile(outputPath, `${JSON.stringify(schema, null, 2)}\\n`)\n\n return schema\n}\n"],"names":["mkdir","writeFile","dirname","studioWorkerTask","SchemaExtractionError","runSchemaExtraction","extractOptions","configPath","enforceRequiredFields","format","outputPath","workspace","Error","workDir","outputDir","result","URL","url","name","studioRootPath","workerData","workspaceName","type","error","validation","schema","recursive","JSON","stringify"],"mappings":"AAAA,SAAQA,KAAK,EAAEC,SAAS,QAAO,mBAAkB;AACjD,SAAQC,OAAO,QAAO,YAAW;AAEjC,SAAQC,gBAAgB,QAAO,mBAAkB;AAKjD,SAAQC,qBAAqB,QAAO,mCAAkC;AAStE;;;;CAIC,GACD,OAAO,eAAeC,oBACpBC,cAAqD;IAErD,MAAM,EAACC,UAAU,EAAEC,qBAAqB,EAAEC,MAAM,EAAEC,UAAU,EAAEC,SAAS,EAAC,GAAGL;IAE3E,IAAIG,WAAW,mBAAmB;QAChC,MAAM,IAAIG,MAAM,CAAC,qBAAqB,EAAEH,OAAO,CAAC,CAAC;IACnD;IAEA,MAAMI,UAAUX,QAAQK;IACxB,MAAMO,YAAYZ,QAAQQ;IAE1B,MAAMK,SAAS,MAAMZ,iBACnB,IAAIa,IAAI,iCAAiC,YAAYC,GAAG,GACxD;QACEC,MAAM;QACNC,gBAAgBN;QAChBO,YAAY;YACVb;YACAC;YACAK;YACAQ,eAAeV;QACjB;IACF;IAGF,IAAII,OAAOO,IAAI,KAAK,SAAS;QAC3B,MAAM,IAAIlB,sBAAsBW,OAAOQ,KAAK,EAAER,OAAOS,UAAU;IACjE;IAEA,MAAMC,SAASV,OAAOU,MAAM;IAE5B,iCAAiC;IACjC,MAAMzB,MAAMc,WAAW;QAACY,WAAW;IAAI;IAEvC,uBAAuB;IACvB,MAAMzB,UAAUS,YAAY,GAAGiB,KAAKC,SAAS,CAACH,QAAQ,MAAM,GAAG,EAAE,CAAC;IAElE,OAAOA;AACT"}
@@ -9,5 +9,9 @@ export const uniqWorkspaceWorkerDataSchema = z.object({
9
9
  configPath: z.string(),
10
10
  dataset: z.string().optional()
11
11
  });
12
+ export const extractWorkspaceWorkerData = z.object({
13
+ configPath: z.string(),
14
+ workDir: z.string()
15
+ });
12
16
 
13
17
  //# sourceMappingURL=types.js.map
@@ -1 +1 @@
1
- {"version":3,"sources":["../../../src/actions/schema/types.ts"],"sourcesContent":["import {type SchemaValidationProblemGroup} from '@sanity/types'\nimport {z} from 'zod'\n\nexport const extractSchemaWorkerData = z.object({\n configPath: z.string(),\n enforceRequiredFields: z.boolean(),\n workDir: z.string(),\n workspaceName: z.string().optional(),\n})\n\nexport type ExtractSchemaWorkerData = z.infer<typeof extractSchemaWorkerData>\n\n/**\n * Contains debug information about the serialized schema.\n *\n * @internal\n **/\nexport type SerializedSchemaDebug = {\n hoisted: Record<string, SerializedTypeDebug>\n parent?: SerializedSchemaDebug\n size: number\n types: Record<string, SerializedTypeDebug>\n}\n\n/**\n * Contains debug information about a serialized type.\n *\n * @internal\n **/\nexport type SerializedTypeDebug = {\n extends: string\n fields?: Record<string, SerializedTypeDebug>\n of?: Record<string, SerializedTypeDebug>\n size: number\n}\n\n/** @internal */\nexport interface ExtractSchemaWorkerError {\n error: string\n type: 'error'\n\n validation?: SchemaValidationProblemGroup[]\n}\n\nexport const uniqWorkspaceWorkerDataSchema = z.object({\n configPath: z.string(),\n dataset: z.string().optional(),\n})\n\nexport type UniqWorkspaceWorkerData = z.infer<typeof uniqWorkspaceWorkerDataSchema>\n"],"names":["z","extractSchemaWorkerData","object","configPath","string","enforceRequiredFields","boolean","workDir","workspaceName","optional","uniqWorkspaceWorkerDataSchema","dataset"],"mappings":"AACA,SAAQA,CAAC,QAAO,MAAK;AAErB,OAAO,MAAMC,0BAA0BD,EAAEE,MAAM,CAAC;IAC9CC,YAAYH,EAAEI,MAAM;IACpBC,uBAAuBL,EAAEM,OAAO;IAChCC,SAASP,EAAEI,MAAM;IACjBI,eAAeR,EAAEI,MAAM,GAAGK,QAAQ;AACpC,GAAE;AAoCF,OAAO,MAAMC,gCAAgCV,EAAEE,MAAM,CAAC;IACpDC,YAAYH,EAAEI,MAAM;IACpBO,SAASX,EAAEI,MAAM,GAAGK,QAAQ;AAC9B,GAAE"}
1
+ {"version":3,"sources":["../../../src/actions/schema/types.ts"],"sourcesContent":["import {type SchemaValidationProblemGroup} from '@sanity/types'\nimport {z} from 'zod'\n\nexport const extractSchemaWorkerData = z.object({\n configPath: z.string(),\n enforceRequiredFields: z.boolean(),\n workDir: z.string(),\n workspaceName: z.string().optional(),\n})\n\nexport type ExtractSchemaWorkerData = z.infer<typeof extractSchemaWorkerData>\n\n/**\n * Contains debug information about the serialized schema.\n *\n * @internal\n **/\nexport type SerializedSchemaDebug = {\n hoisted: Record<string, SerializedTypeDebug>\n parent?: SerializedSchemaDebug\n size: number\n types: Record<string, SerializedTypeDebug>\n}\n\n/**\n * Contains debug information about a serialized type.\n *\n * @internal\n **/\nexport type SerializedTypeDebug = {\n extends: string\n fields?: Record<string, SerializedTypeDebug>\n of?: Record<string, SerializedTypeDebug>\n size: number\n}\n\n/** @internal */\nexport interface ExtractSchemaWorkerError {\n error: string\n type: 'error'\n\n validation?: SchemaValidationProblemGroup[]\n}\n\nexport const uniqWorkspaceWorkerDataSchema = z.object({\n configPath: z.string(),\n dataset: z.string().optional(),\n})\n\nexport type UniqWorkspaceWorkerData = z.infer<typeof uniqWorkspaceWorkerDataSchema>\n\nexport const extractWorkspaceWorkerData = z.object({\n configPath: z.string(),\n workDir: z.string(),\n})\n\nexport type ExtractWorkspaceWorkerData = z.infer<typeof extractWorkspaceWorkerData>\n"],"names":["z","extractSchemaWorkerData","object","configPath","string","enforceRequiredFields","boolean","workDir","workspaceName","optional","uniqWorkspaceWorkerDataSchema","dataset","extractWorkspaceWorkerData"],"mappings":"AACA,SAAQA,CAAC,QAAO,MAAK;AAErB,OAAO,MAAMC,0BAA0BD,EAAEE,MAAM,CAAC;IAC9CC,YAAYH,EAAEI,MAAM;IACpBC,uBAAuBL,EAAEM,OAAO;IAChCC,SAASP,EAAEI,MAAM;IACjBI,eAAeR,EAAEI,MAAM,GAAGK,QAAQ;AACpC,GAAE;AAoCF,OAAO,MAAMC,gCAAgCV,EAAEE,MAAM,CAAC;IACpDC,YAAYH,EAAEI,MAAM;IACpBO,SAASX,EAAEI,MAAM,GAAGK,QAAQ;AAC9B,GAAE;AAIF,OAAO,MAAMG,6BAA6BZ,EAAEE,MAAM,CAAC;IACjDC,YAAYH,EAAEI,MAAM;IACpBG,SAASP,EAAEI,MAAM;AACnB,GAAE"}
@@ -1,5 +1,5 @@
1
1
  import { CLIError } from '@oclif/core/errors';
2
- import { uniqBy } from 'lodash-es';
2
+ import uniqBy from 'lodash-es/uniqBy.js';
3
3
  import { isDefined } from '../../manifest/schemaTypeHelpers.js';
4
4
  import { SANITY_WORKSPACE_SCHEMA_ID_PREFIX } from '../../manifest/types.js';
5
5
  const validForIdChars = 'a-zA-Z0-9._-';
@@ -11,12 +11,6 @@ const requiredInId = SANITY_WORKSPACE_SCHEMA_ID_PREFIX.replaceAll(/[.]/g, String
11
11
  const idIdPatternString = String.raw`^${requiredInId}\.([${validForNamesChars}]+)`;
12
12
  const baseIdPattern = new RegExp(`${idIdPatternString}$`);
13
13
  const taggedIdIdPattern = new RegExp(String.raw`${idIdPatternString}\.tag\.([${validForNamesChars}]+)$`);
14
- export class FlagValidationError extends Error {
15
- constructor(message){
16
- super(message);
17
- this.name = 'FlagValidationError';
18
- }
19
- }
20
14
  export function parseIds(ids) {
21
15
  if (!ids) {
22
16
  throw new CLIError('ids argument is empty');
@@ -1 +1 @@
1
- {"version":3,"sources":["../../../../src/actions/schema/utils/schemaStoreValidation.ts"],"sourcesContent":["import {CLIError} from '@oclif/core/errors'\nimport {uniqBy} from 'lodash-es'\n\nimport {isDefined} from '../../manifest/schemaTypeHelpers.js'\nimport {SANITY_WORKSPACE_SCHEMA_ID_PREFIX} from '../../manifest/types.js'\n\nconst validForIdChars = 'a-zA-Z0-9._-'\nconst validForIdPattern = new RegExp(`^[${validForIdChars}]+$`)\n\n//no periods allowed in workspaceName or tag in ids\nexport const validForNamesChars = 'a-zA-Z0-9_-'\nexport const validForNamesPattern = new RegExp(`^[${validForNamesChars}]+$`)\n\nconst requiredInId = SANITY_WORKSPACE_SCHEMA_ID_PREFIX.replaceAll(/[.]/g, String.raw`\\.`)\n\nconst idIdPatternString = String.raw`^${requiredInId}\\.([${validForNamesChars}]+)`\nconst baseIdPattern = new RegExp(`${idIdPatternString}$`)\nconst taggedIdIdPattern = new RegExp(\n String.raw`${idIdPatternString}\\.tag\\.([${validForNamesChars}]+)$`,\n)\n\nexport class FlagValidationError extends Error {\n constructor(message: string) {\n super(message)\n this.name = 'FlagValidationError'\n }\n}\n\nexport interface WorkspaceSchemaId {\n schemaId: string\n workspace: string\n}\n\nexport function parseIds(ids?: string): WorkspaceSchemaId[] {\n if (!ids) {\n throw new CLIError('ids argument is empty')\n }\n\n const errors: string[] = []\n\n const parsedIds = ids\n .split(',')\n .map((id) => id.trim())\n .filter((id) => !!id)\n .map((id) => parseWorkspaceSchemaId(errors, id))\n .filter((item) => isDefined(item))\n\n if (errors.length > 0) {\n throw new CLIError(`Invalid arguments:\\n${errors.map((error) => ` - ${error}`).join('\\n')}`)\n }\n\n if (parsedIds.length === 0) {\n throw new CLIError(`ids contains no valid id strings`)\n }\n\n const uniqueIds = uniqBy(parsedIds, 'schemaId' satisfies keyof (typeof parsedIds)[number])\n if (uniqueIds.length < parsedIds.length) {\n throw new CLIError(`ids contains duplicates`)\n }\n\n return uniqueIds\n}\n\nexport function parseWorkspaceSchemaId(errors: string[], id?: string) {\n if (id === undefined) {\n return\n }\n\n if (!id) {\n errors.push('id argument is empty')\n return\n }\n\n const trimmedId = id.trim()\n\n if (!validForIdPattern.test(trimmedId)) {\n errors.push(`id can only contain characters in [${validForIdChars}] but found: \"${trimmedId}\"`)\n return\n }\n\n if (trimmedId.startsWith('-')) {\n errors.push(`id cannot start with - (dash) but found: \"${trimmedId}\"`)\n return\n }\n\n if (/\\.\\./g.test(trimmedId)) {\n errors.push(`id cannot have consecutive . (period) characters, but found: \"${trimmedId}\"`)\n return\n }\n\n const [, workspace] = trimmedId.match(taggedIdIdPattern) ?? trimmedId.match(baseIdPattern) ?? []\n if (!workspace) {\n errors.push(\n [\n `id must either match ${SANITY_WORKSPACE_SCHEMA_ID_PREFIX}.<workspaceName> `,\n `or ${SANITY_WORKSPACE_SCHEMA_ID_PREFIX}.<workspaceName>.tag.<tag> but found: \"${trimmedId}\". `,\n `Note that workspace name characters not in [${validForNamesChars}] has to be replaced with _ for schema id.`,\n ].join(''),\n )\n return\n }\n return {\n schemaId: trimmedId,\n workspace,\n }\n}\n\n/**\n *\n * @param tag - The tag to parse\n * Throws an error if the tag is empty\n * Throws an error if the tag contains a period\n * Throws an error if the tag starts with a dash\n * Returns the parsed tag\n */\nexport async function parseTag(tag?: string) {\n if (tag === undefined) {\n return tag\n }\n\n if (!tag) {\n throw new CLIError('tag argument is empty')\n }\n\n if (tag.includes('.')) {\n throw new CLIError(`tag cannot contain . (period), but was: \"${tag}\"`)\n }\n\n if (!validForNamesPattern.test(tag)) {\n throw new CLIError(\n `tag can only contain characters in [${validForNamesChars}], but was: \"${tag}\"`,\n )\n }\n\n if (tag.startsWith('-')) {\n throw new CLIError(`tag cannot start with - (dash) but was: \"${tag}\"`)\n }\n\n return tag\n}\n\nexport const SCHEMA_PERMISSION_HELP_TEXT =\n 'For multi-project workspaces, set SANITY_AUTH_TOKEN environment variable to a token with access to the workspace projects.'\n"],"names":["CLIError","uniqBy","isDefined","SANITY_WORKSPACE_SCHEMA_ID_PREFIX","validForIdChars","validForIdPattern","RegExp","validForNamesChars","validForNamesPattern","requiredInId","replaceAll","String","raw","idIdPatternString","baseIdPattern","taggedIdIdPattern","FlagValidationError","Error","message","name","parseIds","ids","errors","parsedIds","split","map","id","trim","filter","parseWorkspaceSchemaId","item","length","error","join","uniqueIds","undefined","push","trimmedId","test","startsWith","workspace","match","schemaId","parseTag","tag","includes","SCHEMA_PERMISSION_HELP_TEXT"],"mappings":"AAAA,SAAQA,QAAQ,QAAO,qBAAoB;AAC3C,SAAQC,MAAM,QAAO,YAAW;AAEhC,SAAQC,SAAS,QAAO,sCAAqC;AAC7D,SAAQC,iCAAiC,QAAO,0BAAyB;AAEzE,MAAMC,kBAAkB;AACxB,MAAMC,oBAAoB,IAAIC,OAAO,CAAC,EAAE,EAAEF,gBAAgB,GAAG,CAAC;AAE9D,mDAAmD;AACnD,OAAO,MAAMG,qBAAqB,cAAa;AAC/C,OAAO,MAAMC,uBAAuB,IAAIF,OAAO,CAAC,EAAE,EAAEC,mBAAmB,GAAG,CAAC,EAAC;AAE5E,MAAME,eAAeN,kCAAkCO,UAAU,CAAC,QAAQC,OAAOC,GAAG,CAAC,EAAE,CAAC;AAExF,MAAMC,oBAAoBF,OAAOC,GAAG,CAAC,CAAC,EAAEH,aAAa,IAAI,EAAEF,mBAAmB,GAAG,CAAC;AAClF,MAAMO,gBAAgB,IAAIR,OAAO,GAAGO,kBAAkB,CAAC,CAAC;AACxD,MAAME,oBAAoB,IAAIT,OAC5BK,OAAOC,GAAG,CAAC,EAAEC,kBAAkB,SAAS,EAAEN,mBAAmB,IAAI,CAAC;AAGpE,OAAO,MAAMS,4BAA4BC;IACvC,YAAYC,OAAe,CAAE;QAC3B,KAAK,CAACA;QACN,IAAI,CAACC,IAAI,GAAG;IACd;AACF;AAOA,OAAO,SAASC,SAASC,GAAY;IACnC,IAAI,CAACA,KAAK;QACR,MAAM,IAAIrB,SAAS;IACrB;IAEA,MAAMsB,SAAmB,EAAE;IAE3B,MAAMC,YAAYF,IACfG,KAAK,CAAC,KACNC,GAAG,CAAC,CAACC,KAAOA,GAAGC,IAAI,IACnBC,MAAM,CAAC,CAACF,KAAO,CAAC,CAACA,IACjBD,GAAG,CAAC,CAACC,KAAOG,uBAAuBP,QAAQI,KAC3CE,MAAM,CAAC,CAACE,OAAS5B,UAAU4B;IAE9B,IAAIR,OAAOS,MAAM,GAAG,GAAG;QACrB,MAAM,IAAI/B,SAAS,CAAC,oBAAoB,EAAEsB,OAAOG,GAAG,CAAC,CAACO,QAAU,CAAC,IAAI,EAAEA,OAAO,EAAEC,IAAI,CAAC,OAAO;IAC9F;IAEA,IAAIV,UAAUQ,MAAM,KAAK,GAAG;QAC1B,MAAM,IAAI/B,SAAS,CAAC,gCAAgC,CAAC;IACvD;IAEA,MAAMkC,YAAYjC,OAAOsB,WAAW;IACpC,IAAIW,UAAUH,MAAM,GAAGR,UAAUQ,MAAM,EAAE;QACvC,MAAM,IAAI/B,SAAS,CAAC,uBAAuB,CAAC;IAC9C;IAEA,OAAOkC;AACT;AAEA,OAAO,SAASL,uBAAuBP,MAAgB,EAAEI,EAAW;IAClE,IAAIA,OAAOS,WAAW;QACpB;IACF;IAEA,IAAI,CAACT,IAAI;QACPJ,OAAOc,IAAI,CAAC;QACZ;IACF;IAEA,MAAMC,YAAYX,GAAGC,IAAI;IAEzB,IAAI,CAACtB,kBAAkBiC,IAAI,CAACD,YAAY;QACtCf,OAAOc,IAAI,CAAC,CAAC,mCAAmC,EAAEhC,gBAAgB,cAAc,EAAEiC,UAAU,CAAC,CAAC;QAC9F;IACF;IAEA,IAAIA,UAAUE,UAAU,CAAC,MAAM;QAC7BjB,OAAOc,IAAI,CAAC,CAAC,0CAA0C,EAAEC,UAAU,CAAC,CAAC;QACrE;IACF;IAEA,IAAI,QAAQC,IAAI,CAACD,YAAY;QAC3Bf,OAAOc,IAAI,CAAC,CAAC,8DAA8D,EAAEC,UAAU,CAAC,CAAC;QACzF;IACF;IAEA,MAAM,GAAGG,UAAU,GAAGH,UAAUI,KAAK,CAAC1B,sBAAsBsB,UAAUI,KAAK,CAAC3B,kBAAkB,EAAE;IAChG,IAAI,CAAC0B,WAAW;QACdlB,OAAOc,IAAI,CACT;YACE,CAAC,qBAAqB,EAAEjC,kCAAkC,iBAAiB,CAAC;YAC5E,CAAC,GAAG,EAAEA,kCAAkC,uCAAuC,EAAEkC,UAAU,GAAG,CAAC;YAC/F,CAAC,4CAA4C,EAAE9B,mBAAmB,0CAA0C,CAAC;SAC9G,CAAC0B,IAAI,CAAC;QAET;IACF;IACA,OAAO;QACLS,UAAUL;QACVG;IACF;AACF;AAEA;;;;;;;CAOC,GACD,OAAO,eAAeG,SAASC,GAAY;IACzC,IAAIA,QAAQT,WAAW;QACrB,OAAOS;IACT;IAEA,IAAI,CAACA,KAAK;QACR,MAAM,IAAI5C,SAAS;IACrB;IAEA,IAAI4C,IAAIC,QAAQ,CAAC,MAAM;QACrB,MAAM,IAAI7C,SAAS,CAAC,yCAAyC,EAAE4C,IAAI,CAAC,CAAC;IACvE;IAEA,IAAI,CAACpC,qBAAqB8B,IAAI,CAACM,MAAM;QACnC,MAAM,IAAI5C,SACR,CAAC,oCAAoC,EAAEO,mBAAmB,aAAa,EAAEqC,IAAI,CAAC,CAAC;IAEnF;IAEA,IAAIA,IAAIL,UAAU,CAAC,MAAM;QACvB,MAAM,IAAIvC,SAAS,CAAC,yCAAyC,EAAE4C,IAAI,CAAC,CAAC;IACvE;IAEA,OAAOA;AACT;AAEA,OAAO,MAAME,8BACX,6HAA4H"}
1
+ {"version":3,"sources":["../../../../src/actions/schema/utils/schemaStoreValidation.ts"],"sourcesContent":["import {CLIError} from '@oclif/core/errors'\nimport uniqBy from 'lodash-es/uniqBy.js'\n\nimport {isDefined} from '../../manifest/schemaTypeHelpers.js'\nimport {SANITY_WORKSPACE_SCHEMA_ID_PREFIX} from '../../manifest/types.js'\n\nconst validForIdChars = 'a-zA-Z0-9._-'\nconst validForIdPattern = new RegExp(`^[${validForIdChars}]+$`)\n\n//no periods allowed in workspaceName or tag in ids\nexport const validForNamesChars = 'a-zA-Z0-9_-'\nexport const validForNamesPattern = new RegExp(`^[${validForNamesChars}]+$`)\n\nconst requiredInId = SANITY_WORKSPACE_SCHEMA_ID_PREFIX.replaceAll(/[.]/g, String.raw`\\.`)\n\nconst idIdPatternString = String.raw`^${requiredInId}\\.([${validForNamesChars}]+)`\nconst baseIdPattern = new RegExp(`${idIdPatternString}$`)\nconst taggedIdIdPattern = new RegExp(\n String.raw`${idIdPatternString}\\.tag\\.([${validForNamesChars}]+)$`,\n)\n\nexport interface WorkspaceSchemaId {\n schemaId: string\n workspace: string\n}\n\nexport function parseIds(ids?: string): WorkspaceSchemaId[] {\n if (!ids) {\n throw new CLIError('ids argument is empty')\n }\n\n const errors: string[] = []\n\n const parsedIds = ids\n .split(',')\n .map((id) => id.trim())\n .filter((id) => !!id)\n .map((id) => parseWorkspaceSchemaId(errors, id))\n .filter((item) => isDefined(item))\n\n if (errors.length > 0) {\n throw new CLIError(`Invalid arguments:\\n${errors.map((error) => ` - ${error}`).join('\\n')}`)\n }\n\n if (parsedIds.length === 0) {\n throw new CLIError(`ids contains no valid id strings`)\n }\n\n const uniqueIds = uniqBy(parsedIds, 'schemaId' satisfies keyof (typeof parsedIds)[number])\n if (uniqueIds.length < parsedIds.length) {\n throw new CLIError(`ids contains duplicates`)\n }\n\n return uniqueIds\n}\n\nexport function parseWorkspaceSchemaId(errors: string[], id?: string) {\n if (id === undefined) {\n return\n }\n\n if (!id) {\n errors.push('id argument is empty')\n return\n }\n\n const trimmedId = id.trim()\n\n if (!validForIdPattern.test(trimmedId)) {\n errors.push(`id can only contain characters in [${validForIdChars}] but found: \"${trimmedId}\"`)\n return\n }\n\n if (trimmedId.startsWith('-')) {\n errors.push(`id cannot start with - (dash) but found: \"${trimmedId}\"`)\n return\n }\n\n if (/\\.\\./g.test(trimmedId)) {\n errors.push(`id cannot have consecutive . (period) characters, but found: \"${trimmedId}\"`)\n return\n }\n\n const [, workspace] = trimmedId.match(taggedIdIdPattern) ?? trimmedId.match(baseIdPattern) ?? []\n if (!workspace) {\n errors.push(\n [\n `id must either match ${SANITY_WORKSPACE_SCHEMA_ID_PREFIX}.<workspaceName> `,\n `or ${SANITY_WORKSPACE_SCHEMA_ID_PREFIX}.<workspaceName>.tag.<tag> but found: \"${trimmedId}\". `,\n `Note that workspace name characters not in [${validForNamesChars}] has to be replaced with _ for schema id.`,\n ].join(''),\n )\n return\n }\n return {\n schemaId: trimmedId,\n workspace,\n }\n}\n\n/**\n *\n * @param tag - The tag to parse\n * Throws an error if the tag is empty\n * Throws an error if the tag contains a period\n * Throws an error if the tag starts with a dash\n * Returns the parsed tag\n */\nexport async function parseTag(tag?: string) {\n if (tag === undefined) {\n return tag\n }\n\n if (!tag) {\n throw new CLIError('tag argument is empty')\n }\n\n if (tag.includes('.')) {\n throw new CLIError(`tag cannot contain . (period), but was: \"${tag}\"`)\n }\n\n if (!validForNamesPattern.test(tag)) {\n throw new CLIError(\n `tag can only contain characters in [${validForNamesChars}], but was: \"${tag}\"`,\n )\n }\n\n if (tag.startsWith('-')) {\n throw new CLIError(`tag cannot start with - (dash) but was: \"${tag}\"`)\n }\n\n return tag\n}\n\nexport const SCHEMA_PERMISSION_HELP_TEXT =\n 'For multi-project workspaces, set SANITY_AUTH_TOKEN environment variable to a token with access to the workspace projects.'\n"],"names":["CLIError","uniqBy","isDefined","SANITY_WORKSPACE_SCHEMA_ID_PREFIX","validForIdChars","validForIdPattern","RegExp","validForNamesChars","validForNamesPattern","requiredInId","replaceAll","String","raw","idIdPatternString","baseIdPattern","taggedIdIdPattern","parseIds","ids","errors","parsedIds","split","map","id","trim","filter","parseWorkspaceSchemaId","item","length","error","join","uniqueIds","undefined","push","trimmedId","test","startsWith","workspace","match","schemaId","parseTag","tag","includes","SCHEMA_PERMISSION_HELP_TEXT"],"mappings":"AAAA,SAAQA,QAAQ,QAAO,qBAAoB;AAC3C,OAAOC,YAAY,sBAAqB;AAExC,SAAQC,SAAS,QAAO,sCAAqC;AAC7D,SAAQC,iCAAiC,QAAO,0BAAyB;AAEzE,MAAMC,kBAAkB;AACxB,MAAMC,oBAAoB,IAAIC,OAAO,CAAC,EAAE,EAAEF,gBAAgB,GAAG,CAAC;AAE9D,mDAAmD;AACnD,OAAO,MAAMG,qBAAqB,cAAa;AAC/C,OAAO,MAAMC,uBAAuB,IAAIF,OAAO,CAAC,EAAE,EAAEC,mBAAmB,GAAG,CAAC,EAAC;AAE5E,MAAME,eAAeN,kCAAkCO,UAAU,CAAC,QAAQC,OAAOC,GAAG,CAAC,EAAE,CAAC;AAExF,MAAMC,oBAAoBF,OAAOC,GAAG,CAAC,CAAC,EAAEH,aAAa,IAAI,EAAEF,mBAAmB,GAAG,CAAC;AAClF,MAAMO,gBAAgB,IAAIR,OAAO,GAAGO,kBAAkB,CAAC,CAAC;AACxD,MAAME,oBAAoB,IAAIT,OAC5BK,OAAOC,GAAG,CAAC,EAAEC,kBAAkB,SAAS,EAAEN,mBAAmB,IAAI,CAAC;AAQpE,OAAO,SAASS,SAASC,GAAY;IACnC,IAAI,CAACA,KAAK;QACR,MAAM,IAAIjB,SAAS;IACrB;IAEA,MAAMkB,SAAmB,EAAE;IAE3B,MAAMC,YAAYF,IACfG,KAAK,CAAC,KACNC,GAAG,CAAC,CAACC,KAAOA,GAAGC,IAAI,IACnBC,MAAM,CAAC,CAACF,KAAO,CAAC,CAACA,IACjBD,GAAG,CAAC,CAACC,KAAOG,uBAAuBP,QAAQI,KAC3CE,MAAM,CAAC,CAACE,OAASxB,UAAUwB;IAE9B,IAAIR,OAAOS,MAAM,GAAG,GAAG;QACrB,MAAM,IAAI3B,SAAS,CAAC,oBAAoB,EAAEkB,OAAOG,GAAG,CAAC,CAACO,QAAU,CAAC,IAAI,EAAEA,OAAO,EAAEC,IAAI,CAAC,OAAO;IAC9F;IAEA,IAAIV,UAAUQ,MAAM,KAAK,GAAG;QAC1B,MAAM,IAAI3B,SAAS,CAAC,gCAAgC,CAAC;IACvD;IAEA,MAAM8B,YAAY7B,OAAOkB,WAAW;IACpC,IAAIW,UAAUH,MAAM,GAAGR,UAAUQ,MAAM,EAAE;QACvC,MAAM,IAAI3B,SAAS,CAAC,uBAAuB,CAAC;IAC9C;IAEA,OAAO8B;AACT;AAEA,OAAO,SAASL,uBAAuBP,MAAgB,EAAEI,EAAW;IAClE,IAAIA,OAAOS,WAAW;QACpB;IACF;IAEA,IAAI,CAACT,IAAI;QACPJ,OAAOc,IAAI,CAAC;QACZ;IACF;IAEA,MAAMC,YAAYX,GAAGC,IAAI;IAEzB,IAAI,CAAClB,kBAAkB6B,IAAI,CAACD,YAAY;QACtCf,OAAOc,IAAI,CAAC,CAAC,mCAAmC,EAAE5B,gBAAgB,cAAc,EAAE6B,UAAU,CAAC,CAAC;QAC9F;IACF;IAEA,IAAIA,UAAUE,UAAU,CAAC,MAAM;QAC7BjB,OAAOc,IAAI,CAAC,CAAC,0CAA0C,EAAEC,UAAU,CAAC,CAAC;QACrE;IACF;IAEA,IAAI,QAAQC,IAAI,CAACD,YAAY;QAC3Bf,OAAOc,IAAI,CAAC,CAAC,8DAA8D,EAAEC,UAAU,CAAC,CAAC;QACzF;IACF;IAEA,MAAM,GAAGG,UAAU,GAAGH,UAAUI,KAAK,CAACtB,sBAAsBkB,UAAUI,KAAK,CAACvB,kBAAkB,EAAE;IAChG,IAAI,CAACsB,WAAW;QACdlB,OAAOc,IAAI,CACT;YACE,CAAC,qBAAqB,EAAE7B,kCAAkC,iBAAiB,CAAC;YAC5E,CAAC,GAAG,EAAEA,kCAAkC,uCAAuC,EAAE8B,UAAU,GAAG,CAAC;YAC/F,CAAC,4CAA4C,EAAE1B,mBAAmB,0CAA0C,CAAC;SAC9G,CAACsB,IAAI,CAAC;QAET;IACF;IACA,OAAO;QACLS,UAAUL;QACVG;IACF;AACF;AAEA;;;;;;;CAOC,GACD,OAAO,eAAeG,SAASC,GAAY;IACzC,IAAIA,QAAQT,WAAW;QACrB,OAAOS;IACT;IAEA,IAAI,CAACA,KAAK;QACR,MAAM,IAAIxC,SAAS;IACrB;IAEA,IAAIwC,IAAIC,QAAQ,CAAC,MAAM;QACrB,MAAM,IAAIzC,SAAS,CAAC,yCAAyC,EAAEwC,IAAI,CAAC,CAAC;IACvE;IAEA,IAAI,CAAChC,qBAAqB0B,IAAI,CAACM,MAAM;QACnC,MAAM,IAAIxC,SACR,CAAC,oCAAoC,EAAEO,mBAAmB,aAAa,EAAEiC,IAAI,CAAC,CAAC;IAEnF;IAEA,IAAIA,IAAIL,UAAU,CAAC,MAAM;QACvB,MAAM,IAAInC,SAAS,CAAC,yCAAyC,EAAEwC,IAAI,CAAC,CAAC;IACvE;IAEA,OAAOA;AACT;AAEA,OAAO,MAAME,8BACX,6HAA4H"}
@@ -1,4 +1,4 @@
1
- import { uniqBy } from 'lodash-es';
1
+ import uniqBy from 'lodash-es/uniqBy.js';
2
2
  export function uniqByProjectIdDataset(workspaces) {
3
3
  return uniqBy(workspaces.map((w)=>({
4
4
  ...w,
@@ -1 +1 @@
1
- {"version":3,"sources":["../../../../src/actions/schema/utils/uniqByProjectIdDataset.ts"],"sourcesContent":["import {uniqBy} from 'lodash-es'\nimport {type Workspace} from 'sanity'\n\nexport function uniqByProjectIdDataset(workspaces: Workspace[]) {\n return uniqBy<Workspace & {key: string}>(\n workspaces.map((w) => ({\n ...w,\n key: `${w.projectId}-${w.dataset}`,\n })),\n 'key',\n )\n}\n"],"names":["uniqBy","uniqByProjectIdDataset","workspaces","map","w","key","projectId","dataset"],"mappings":"AAAA,SAAQA,MAAM,QAAO,YAAW;AAGhC,OAAO,SAASC,uBAAuBC,UAAuB;IAC5D,OAAOF,OACLE,WAAWC,GAAG,CAAC,CAACC,IAAO,CAAA;YACrB,GAAGA,CAAC;YACJC,KAAK,GAAGD,EAAEE,SAAS,CAAC,CAAC,EAAEF,EAAEG,OAAO,EAAE;QACpC,CAAA,IACA;AAEJ"}
1
+ {"version":3,"sources":["../../../../src/actions/schema/utils/uniqByProjectIdDataset.ts"],"sourcesContent":["import uniqBy from 'lodash-es/uniqBy.js'\nimport {type Workspace} from 'sanity'\n\nexport function uniqByProjectIdDataset(workspaces: Workspace[]) {\n return uniqBy<Workspace & {key: string}>(\n workspaces.map((w) => ({\n ...w,\n key: `${w.projectId}-${w.dataset}`,\n })),\n 'key',\n )\n}\n"],"names":["uniqBy","uniqByProjectIdDataset","workspaces","map","w","key","projectId","dataset"],"mappings":"AAAA,OAAOA,YAAY,sBAAqB;AAGxC,OAAO,SAASC,uBAAuBC,UAAuB;IAC5D,OAAOF,OACLE,WAAWC,GAAG,CAAC,CAACC,IAAO,CAAA;YACrB,GAAGA,CAAC;YACJC,KAAK,GAAGD,EAAEE,SAAS,CAAC,CAAC,EAAEF,EAAEG,OAAO,EAAE;QACpC,CAAA,IACA;AAEJ"}
@@ -1,5 +1,6 @@
1
1
  import { getCliTelemetry } from '@sanity/cli-core';
2
- import { mean, once } from 'lodash-es';
2
+ import mean from 'lodash-es/mean.js';
3
+ import once from 'lodash-es/once.js';
3
4
  import { SchemaExtractionWatchModeTrace } from '../../telemetry/extractSchema.telemetry.js';
4
5
  import { DEFAULT_WATCH_PATTERNS, startExtractSchemaWatcher } from './extractSchemaWatcher.js';
5
6
  export async function watchExtractSchema(options) {
@@ -1 +1 @@
1
- {"version":3,"sources":["../../../src/actions/schema/watchExtractSchema.ts"],"sourcesContent":["import {getCliTelemetry, type Output} from '@sanity/cli-core'\nimport {mean, once} from 'lodash-es'\n\nimport {SchemaExtractionWatchModeTrace} from '../../telemetry/extractSchema.telemetry.js'\nimport {DEFAULT_WATCH_PATTERNS, startExtractSchemaWatcher} from './extractSchemaWatcher.js'\nimport {type ExtractOptions} from './getExtractOptions.js'\n\ninterface WatchExtractSchemaOptions {\n extractOptions: ExtractOptions\n output: Output\n}\n\nexport async function watchExtractSchema(\n options: WatchExtractSchemaOptions,\n): Promise<{close: () => Promise<void>}> {\n const {extractOptions, output} = options\n\n // Keep the start time + some simple stats for extractions as they happen\n const startTime = Date.now()\n const stats: {failedCount: number; successfulDurations: number[]} = {\n failedCount: 0,\n successfulDurations: [],\n }\n\n const watchPatterns = [...DEFAULT_WATCH_PATTERNS, ...extractOptions.watchPatterns]\n\n const trace = getCliTelemetry().trace(SchemaExtractionWatchModeTrace)\n trace.start()\n\n // Print watch mode header and patterns at the very beginning\n output.log('Schema extraction watch mode')\n output.log('')\n output.log('Watching for changes in:')\n for (const pattern of watchPatterns) {\n output.log(` - ${pattern}`)\n }\n output.log('')\n\n output.log('Running initial extraction...')\n\n // Start the watcher (includes initial extraction)\n const {close} = await startExtractSchemaWatcher({\n extractOptions,\n onExtraction: ({duration, success}) => {\n if (success) {\n stats.successfulDurations.push(duration)\n } else {\n stats.failedCount++\n }\n },\n output,\n watchPatterns,\n })\n\n trace.log({\n enforceRequiredFields: extractOptions.enforceRequiredFields,\n schemaFormat: extractOptions.format,\n step: 'started',\n })\n\n output.log('')\n output.log('Watching for changes... (Ctrl+C to stop)')\n\n /**\n * Cleanup function that logs telemetry and stops the watcher.\n * Wrapped in once() to prevent multiple calls.\n */\n const cleanup = once(async () => {\n trace.log({\n averageExtractionDuration: mean(stats.successfulDurations) || 0,\n extractionFailedCount: stats.failedCount,\n extractionSuccessfulCount: stats.successfulDurations.length,\n step: 'stopped',\n watcherDuration: Date.now() - startTime,\n })\n trace.complete()\n\n output.log('')\n output.log('Stopping watch mode...')\n await close()\n })\n\n // Return cleanup function for programmatic usage and testing\n // The CLI framework will handle SIGINT/SIGTERM\n return {close: cleanup}\n}\n"],"names":["getCliTelemetry","mean","once","SchemaExtractionWatchModeTrace","DEFAULT_WATCH_PATTERNS","startExtractSchemaWatcher","watchExtractSchema","options","extractOptions","output","startTime","Date","now","stats","failedCount","successfulDurations","watchPatterns","trace","start","log","pattern","close","onExtraction","duration","success","push","enforceRequiredFields","schemaFormat","format","step","cleanup","averageExtractionDuration","extractionFailedCount","extractionSuccessfulCount","length","watcherDuration","complete"],"mappings":"AAAA,SAAQA,eAAe,QAAoB,mBAAkB;AAC7D,SAAQC,IAAI,EAAEC,IAAI,QAAO,YAAW;AAEpC,SAAQC,8BAA8B,QAAO,6CAA4C;AACzF,SAAQC,sBAAsB,EAAEC,yBAAyB,QAAO,4BAA2B;AAQ3F,OAAO,eAAeC,mBACpBC,OAAkC;IAElC,MAAM,EAACC,cAAc,EAAEC,MAAM,EAAC,GAAGF;IAEjC,yEAAyE;IACzE,MAAMG,YAAYC,KAAKC,GAAG;IAC1B,MAAMC,QAA8D;QAClEC,aAAa;QACbC,qBAAqB,EAAE;IACzB;IAEA,MAAMC,gBAAgB;WAAIZ;WAA2BI,eAAeQ,aAAa;KAAC;IAElF,MAAMC,QAAQjB,kBAAkBiB,KAAK,CAACd;IACtCc,MAAMC,KAAK;IAEX,6DAA6D;IAC7DT,OAAOU,GAAG,CAAC;IACXV,OAAOU,GAAG,CAAC;IACXV,OAAOU,GAAG,CAAC;IACX,KAAK,MAAMC,WAAWJ,cAAe;QACnCP,OAAOU,GAAG,CAAC,CAAC,IAAI,EAAEC,SAAS;IAC7B;IACAX,OAAOU,GAAG,CAAC;IAEXV,OAAOU,GAAG,CAAC;IAEX,kDAAkD;IAClD,MAAM,EAACE,KAAK,EAAC,GAAG,MAAMhB,0BAA0B;QAC9CG;QACAc,cAAc,CAAC,EAACC,QAAQ,EAAEC,OAAO,EAAC;YAChC,IAAIA,SAAS;gBACXX,MAAME,mBAAmB,CAACU,IAAI,CAACF;YACjC,OAAO;gBACLV,MAAMC,WAAW;YACnB;QACF;QACAL;QACAO;IACF;IAEAC,MAAME,GAAG,CAAC;QACRO,uBAAuBlB,eAAekB,qBAAqB;QAC3DC,cAAcnB,eAAeoB,MAAM;QACnCC,MAAM;IACR;IAEApB,OAAOU,GAAG,CAAC;IACXV,OAAOU,GAAG,CAAC;IAEX;;;GAGC,GACD,MAAMW,UAAU5B,KAAK;QACnBe,MAAME,GAAG,CAAC;YACRY,2BAA2B9B,KAAKY,MAAME,mBAAmB,KAAK;YAC9DiB,uBAAuBnB,MAAMC,WAAW;YACxCmB,2BAA2BpB,MAAME,mBAAmB,CAACmB,MAAM;YAC3DL,MAAM;YACNM,iBAAiBxB,KAAKC,GAAG,KAAKF;QAChC;QACAO,MAAMmB,QAAQ;QAEd3B,OAAOU,GAAG,CAAC;QACXV,OAAOU,GAAG,CAAC;QACX,MAAME;IACR;IAEA,6DAA6D;IAC7D,+CAA+C;IAC/C,OAAO;QAACA,OAAOS;IAAO;AACxB"}
1
+ {"version":3,"sources":["../../../src/actions/schema/watchExtractSchema.ts"],"sourcesContent":["import {getCliTelemetry, type Output} from '@sanity/cli-core'\nimport mean from 'lodash-es/mean.js'\nimport once from 'lodash-es/once.js'\n\nimport {SchemaExtractionWatchModeTrace} from '../../telemetry/extractSchema.telemetry.js'\nimport {DEFAULT_WATCH_PATTERNS, startExtractSchemaWatcher} from './extractSchemaWatcher.js'\nimport {type ExtractOptions} from './getExtractOptions.js'\n\ninterface WatchExtractSchemaOptions {\n extractOptions: ExtractOptions\n output: Output\n}\n\nexport async function watchExtractSchema(\n options: WatchExtractSchemaOptions,\n): Promise<{close: () => Promise<void>}> {\n const {extractOptions, output} = options\n\n // Keep the start time + some simple stats for extractions as they happen\n const startTime = Date.now()\n const stats: {failedCount: number; successfulDurations: number[]} = {\n failedCount: 0,\n successfulDurations: [],\n }\n\n const watchPatterns = [...DEFAULT_WATCH_PATTERNS, ...extractOptions.watchPatterns]\n\n const trace = getCliTelemetry().trace(SchemaExtractionWatchModeTrace)\n trace.start()\n\n // Print watch mode header and patterns at the very beginning\n output.log('Schema extraction watch mode')\n output.log('')\n output.log('Watching for changes in:')\n for (const pattern of watchPatterns) {\n output.log(` - ${pattern}`)\n }\n output.log('')\n\n output.log('Running initial extraction...')\n\n // Start the watcher (includes initial extraction)\n const {close} = await startExtractSchemaWatcher({\n extractOptions,\n onExtraction: ({duration, success}) => {\n if (success) {\n stats.successfulDurations.push(duration)\n } else {\n stats.failedCount++\n }\n },\n output,\n watchPatterns,\n })\n\n trace.log({\n enforceRequiredFields: extractOptions.enforceRequiredFields,\n schemaFormat: extractOptions.format,\n step: 'started',\n })\n\n output.log('')\n output.log('Watching for changes... (Ctrl+C to stop)')\n\n /**\n * Cleanup function that logs telemetry and stops the watcher.\n * Wrapped in once() to prevent multiple calls.\n */\n const cleanup = once(async () => {\n trace.log({\n averageExtractionDuration: mean(stats.successfulDurations) || 0,\n extractionFailedCount: stats.failedCount,\n extractionSuccessfulCount: stats.successfulDurations.length,\n step: 'stopped',\n watcherDuration: Date.now() - startTime,\n })\n trace.complete()\n\n output.log('')\n output.log('Stopping watch mode...')\n await close()\n })\n\n // Return cleanup function for programmatic usage and testing\n // The CLI framework will handle SIGINT/SIGTERM\n return {close: cleanup}\n}\n"],"names":["getCliTelemetry","mean","once","SchemaExtractionWatchModeTrace","DEFAULT_WATCH_PATTERNS","startExtractSchemaWatcher","watchExtractSchema","options","extractOptions","output","startTime","Date","now","stats","failedCount","successfulDurations","watchPatterns","trace","start","log","pattern","close","onExtraction","duration","success","push","enforceRequiredFields","schemaFormat","format","step","cleanup","averageExtractionDuration","extractionFailedCount","extractionSuccessfulCount","length","watcherDuration","complete"],"mappings":"AAAA,SAAQA,eAAe,QAAoB,mBAAkB;AAC7D,OAAOC,UAAU,oBAAmB;AACpC,OAAOC,UAAU,oBAAmB;AAEpC,SAAQC,8BAA8B,QAAO,6CAA4C;AACzF,SAAQC,sBAAsB,EAAEC,yBAAyB,QAAO,4BAA2B;AAQ3F,OAAO,eAAeC,mBACpBC,OAAkC;IAElC,MAAM,EAACC,cAAc,EAAEC,MAAM,EAAC,GAAGF;IAEjC,yEAAyE;IACzE,MAAMG,YAAYC,KAAKC,GAAG;IAC1B,MAAMC,QAA8D;QAClEC,aAAa;QACbC,qBAAqB,EAAE;IACzB;IAEA,MAAMC,gBAAgB;WAAIZ;WAA2BI,eAAeQ,aAAa;KAAC;IAElF,MAAMC,QAAQjB,kBAAkBiB,KAAK,CAACd;IACtCc,MAAMC,KAAK;IAEX,6DAA6D;IAC7DT,OAAOU,GAAG,CAAC;IACXV,OAAOU,GAAG,CAAC;IACXV,OAAOU,GAAG,CAAC;IACX,KAAK,MAAMC,WAAWJ,cAAe;QACnCP,OAAOU,GAAG,CAAC,CAAC,IAAI,EAAEC,SAAS;IAC7B;IACAX,OAAOU,GAAG,CAAC;IAEXV,OAAOU,GAAG,CAAC;IAEX,kDAAkD;IAClD,MAAM,EAACE,KAAK,EAAC,GAAG,MAAMhB,0BAA0B;QAC9CG;QACAc,cAAc,CAAC,EAACC,QAAQ,EAAEC,OAAO,EAAC;YAChC,IAAIA,SAAS;gBACXX,MAAME,mBAAmB,CAACU,IAAI,CAACF;YACjC,OAAO;gBACLV,MAAMC,WAAW;YACnB;QACF;QACAL;QACAO;IACF;IAEAC,MAAME,GAAG,CAAC;QACRO,uBAAuBlB,eAAekB,qBAAqB;QAC3DC,cAAcnB,eAAeoB,MAAM;QACnCC,MAAM;IACR;IAEApB,OAAOU,GAAG,CAAC;IACXV,OAAOU,GAAG,CAAC;IAEX;;;GAGC,GACD,MAAMW,UAAU5B,KAAK;QACnBe,MAAME,GAAG,CAAC;YACRY,2BAA2B9B,KAAKY,MAAME,mBAAmB,KAAK;YAC9DiB,uBAAuBnB,MAAMC,WAAW;YACxCmB,2BAA2BpB,MAAME,mBAAmB,CAACmB,MAAM;YAC3DL,MAAM;YACNM,iBAAiBxB,KAAKC,GAAG,KAAKF;QAChC;QACAO,MAAMmB,QAAQ;QAEd3B,OAAOU,GAAG,CAAC;QACXV,OAAOU,GAAG,CAAC;QACX,MAAME;IACR;IAEA,6DAA6D;IAC7D,+CAA+C;IAC/C,OAAO;QAACA,OAAOS;IAAO;AACxB"}
@@ -1,5 +1,5 @@
1
1
  import { styleText } from 'node:util';
2
- import { padEnd } from 'lodash-es';
2
+ import padEnd from 'lodash-es/padEnd.js';
3
3
  /**
4
4
  * Get the display name for a module.
5
5
  *
@@ -1 +1 @@
1
- {"version":3,"sources":["../../../src/actions/versions/getFormatters.ts"],"sourcesContent":["import {styleText} from 'node:util'\n\nimport {padEnd} from 'lodash-es'\n\nimport {type ModuleVersionResult} from './types.js'\n\n/**\n * Get the display name for a module.\n *\n * @internal\n */\nexport function getDisplayName(mod: ModuleVersionResult): string {\n return mod.isGlobal ? `${mod.name} (global)` : mod.name\n}\n\n/**\n * Get formatters for the package versions.\n *\n * @internal\n */\nexport function getFormatters(versions: ModuleVersionResult[]): {\n formatName: (name: string) => string\n nameLength: number\n versionLength: number\n} {\n let nameLength = 0\n let versionLength = 0\n\n for (const mod of versions) {\n const displayName = getDisplayName(mod)\n nameLength = Math.max(nameLength, displayName.length)\n versionLength = Math.max(versionLength, (mod.installed || '<missing>').length)\n }\n\n const formatName = (name: string): string =>\n padEnd(name, nameLength + 1)\n .replace(\n /^@sanity\\/(.*?)(\\s|$)/,\n `${styleText('yellow', '@sanity/')}${styleText('cyan', '$1')}$2`,\n )\n .replace(/^sanity(\\s|$)/, `${styleText('yellow', 'sanity')}$1`)\n\n return {formatName, nameLength, versionLength}\n}\n"],"names":["styleText","padEnd","getDisplayName","mod","isGlobal","name","getFormatters","versions","nameLength","versionLength","displayName","Math","max","length","installed","formatName","replace"],"mappings":"AAAA,SAAQA,SAAS,QAAO,YAAW;AAEnC,SAAQC,MAAM,QAAO,YAAW;AAIhC;;;;CAIC,GACD,OAAO,SAASC,eAAeC,GAAwB;IACrD,OAAOA,IAAIC,QAAQ,GAAG,GAAGD,IAAIE,IAAI,CAAC,SAAS,CAAC,GAAGF,IAAIE,IAAI;AACzD;AAEA;;;;CAIC,GACD,OAAO,SAASC,cAAcC,QAA+B;IAK3D,IAAIC,aAAa;IACjB,IAAIC,gBAAgB;IAEpB,KAAK,MAAMN,OAAOI,SAAU;QAC1B,MAAMG,cAAcR,eAAeC;QACnCK,aAAaG,KAAKC,GAAG,CAACJ,YAAYE,YAAYG,MAAM;QACpDJ,gBAAgBE,KAAKC,GAAG,CAACH,eAAe,AAACN,CAAAA,IAAIW,SAAS,IAAI,WAAU,EAAGD,MAAM;IAC/E;IAEA,MAAME,aAAa,CAACV,OAClBJ,OAAOI,MAAMG,aAAa,GACvBQ,OAAO,CACN,yBACA,GAAGhB,UAAU,UAAU,cAAcA,UAAU,QAAQ,MAAM,EAAE,CAAC,EAEjEgB,OAAO,CAAC,iBAAiB,GAAGhB,UAAU,UAAU,UAAU,EAAE,CAAC;IAElE,OAAO;QAACe;QAAYP;QAAYC;IAAa;AAC/C"}
1
+ {"version":3,"sources":["../../../src/actions/versions/getFormatters.ts"],"sourcesContent":["import {styleText} from 'node:util'\n\nimport padEnd from 'lodash-es/padEnd.js'\n\nimport {type ModuleVersionResult} from './types.js'\n\n/**\n * Get the display name for a module.\n *\n * @internal\n */\nexport function getDisplayName(mod: ModuleVersionResult): string {\n return mod.isGlobal ? `${mod.name} (global)` : mod.name\n}\n\n/**\n * Get formatters for the package versions.\n *\n * @internal\n */\nexport function getFormatters(versions: ModuleVersionResult[]): {\n formatName: (name: string) => string\n nameLength: number\n versionLength: number\n} {\n let nameLength = 0\n let versionLength = 0\n\n for (const mod of versions) {\n const displayName = getDisplayName(mod)\n nameLength = Math.max(nameLength, displayName.length)\n versionLength = Math.max(versionLength, (mod.installed || '<missing>').length)\n }\n\n const formatName = (name: string): string =>\n padEnd(name, nameLength + 1)\n .replace(\n /^@sanity\\/(.*?)(\\s|$)/,\n `${styleText('yellow', '@sanity/')}${styleText('cyan', '$1')}$2`,\n )\n .replace(/^sanity(\\s|$)/, `${styleText('yellow', 'sanity')}$1`)\n\n return {formatName, nameLength, versionLength}\n}\n"],"names":["styleText","padEnd","getDisplayName","mod","isGlobal","name","getFormatters","versions","nameLength","versionLength","displayName","Math","max","length","installed","formatName","replace"],"mappings":"AAAA,SAAQA,SAAS,QAAO,YAAW;AAEnC,OAAOC,YAAY,sBAAqB;AAIxC;;;;CAIC,GACD,OAAO,SAASC,eAAeC,GAAwB;IACrD,OAAOA,IAAIC,QAAQ,GAAG,GAAGD,IAAIE,IAAI,CAAC,SAAS,CAAC,GAAGF,IAAIE,IAAI;AACzD;AAEA;;;;CAIC,GACD,OAAO,SAASC,cAAcC,QAA+B;IAK3D,IAAIC,aAAa;IACjB,IAAIC,gBAAgB;IAEpB,KAAK,MAAMN,OAAOI,SAAU;QAC1B,MAAMG,cAAcR,eAAeC;QACnCK,aAAaG,KAAKC,GAAG,CAACJ,YAAYE,YAAYG,MAAM;QACpDJ,gBAAgBE,KAAKC,GAAG,CAACH,eAAe,AAACN,CAAAA,IAAIW,SAAS,IAAI,WAAU,EAAGD,MAAM;IAC/E;IAEA,MAAME,aAAa,CAACV,OAClBJ,OAAOI,MAAMG,aAAa,GACvBQ,OAAO,CACN,yBACA,GAAGhB,UAAU,UAAU,cAAcA,UAAU,QAAQ,MAAM,EAAE,CAAC,EAEjEgB,OAAO,CAAC,iBAAiB,GAAGhB,UAAU,UAAU,UAAU,EAAE,CAAC;IAElE,OAAO;QAACe;QAAYP;QAAYC;IAAa;AAC/C"}
@@ -5,7 +5,6 @@ import { select } from '@sanity/cli-core/ux';
5
5
  import { assertDatasetExists } from '../../actions/backup/assertDatasetExist.js';
6
6
  import { setBackup } from '../../services/backup.js';
7
7
  import { listDatasets } from '../../services/datasets.js';
8
- import { NO_PROJECT_ID } from '../../util/errorMessages.js';
9
8
  const disableBackupDebug = subdebug('backup:disable');
10
9
  export class DisableBackupCommand extends SanityCommand {
11
10
  static args = {
@@ -29,11 +28,6 @@ export class DisableBackupCommand extends SanityCommand {
29
28
  const { args } = await this.parse(DisableBackupCommand);
30
29
  let { dataset } = args;
31
30
  const projectId = await this.getProjectId();
32
- if (!projectId) {
33
- this.error(NO_PROJECT_ID, {
34
- exit: 1
35
- });
36
- }
37
31
  let datasets;
38
32
  try {
39
33
  datasets = await listDatasets(projectId);
@@ -1 +1 @@
1
- {"version":3,"sources":["../../../src/commands/backup/disable.ts"],"sourcesContent":["import {styleText} from 'node:util'\n\nimport {Args} from '@oclif/core'\nimport {SanityCommand, subdebug} from '@sanity/cli-core'\nimport {select} from '@sanity/cli-core/ux'\nimport {type DatasetsResponse} from '@sanity/client'\n\nimport {assertDatasetExists} from '../../actions/backup/assertDatasetExist.js'\nimport {setBackup} from '../../services/backup.js'\nimport {listDatasets} from '../../services/datasets.js'\nimport {NO_PROJECT_ID} from '../../util/errorMessages.js'\n\nconst disableBackupDebug = subdebug('backup:disable')\n\nexport class DisableBackupCommand extends SanityCommand<typeof DisableBackupCommand> {\n static override args = {\n dataset: Args.string({\n description: 'Dataset name to disable backup for',\n required: false,\n }),\n }\n\n static override description = 'Disable backup for a dataset.'\n\n static override examples = [\n {\n command: '<%= config.bin %> <%= command.id %>',\n description: 'Interactively disable backup for a dataset',\n },\n {\n command: '<%= config.bin %> <%= command.id %> production',\n description: 'Disable backup for the production dataset',\n },\n ]\n\n public async run(): Promise<void> {\n const {args} = await this.parse(DisableBackupCommand)\n let {dataset} = args\n\n const projectId = await this.getProjectId()\n if (!projectId) {\n this.error(NO_PROJECT_ID, {exit: 1})\n }\n\n let datasets: DatasetsResponse\n\n try {\n datasets = await listDatasets(projectId)\n } catch (error) {\n const message = error instanceof Error ? error.message : String(error)\n disableBackupDebug(`Failed to list datasets: ${message}`, error)\n this.error(`Failed to list datasets: ${message}`, {exit: 1})\n }\n\n if (datasets.length === 0) {\n this.error('No datasets found in this project.', {exit: 1})\n }\n\n if (dataset) {\n assertDatasetExists(datasets, dataset)\n } else {\n dataset = await this.promptForDataset(datasets)\n }\n\n try {\n await setBackup({dataset, projectId, status: false})\n\n this.log(`${styleText('green', `Disabled daily backups for dataset ${dataset}.\\n`)}`)\n this.log(\n `${styleText('yellow', 'Note: Existing backups will be retained according to your retention policy.\\n')}`,\n )\n\n disableBackupDebug(`Successfully disabled backup for dataset ${dataset}`)\n } catch (error) {\n const message = error instanceof Error ? error.message : String(error)\n disableBackupDebug(`Failed to disable backup for dataset`, error)\n this.error(`Disabling dataset backup failed: ${message}`, {exit: 1})\n }\n }\n\n private async promptForDataset(datasets: DatasetsResponse): Promise<string> {\n try {\n const choices = datasets.map((dataset) => ({\n name: dataset.name,\n value: dataset.name,\n }))\n\n return select({\n choices,\n message: 'Select the dataset name:',\n })\n } catch (error) {\n const err = error as Error\n disableBackupDebug(`Error fetching datasets`, err)\n this.error(`Failed to fetch datasets:\\n${err.message}`, {exit: 1})\n }\n }\n}\n"],"names":["styleText","Args","SanityCommand","subdebug","select","assertDatasetExists","setBackup","listDatasets","NO_PROJECT_ID","disableBackupDebug","DisableBackupCommand","args","dataset","string","description","required","examples","command","run","parse","projectId","getProjectId","error","exit","datasets","message","Error","String","length","promptForDataset","status","log","choices","map","name","value","err"],"mappings":"AAAA,SAAQA,SAAS,QAAO,YAAW;AAEnC,SAAQC,IAAI,QAAO,cAAa;AAChC,SAAQC,aAAa,EAAEC,QAAQ,QAAO,mBAAkB;AACxD,SAAQC,MAAM,QAAO,sBAAqB;AAG1C,SAAQC,mBAAmB,QAAO,6CAA4C;AAC9E,SAAQC,SAAS,QAAO,2BAA0B;AAClD,SAAQC,YAAY,QAAO,6BAA4B;AACvD,SAAQC,aAAa,QAAO,8BAA6B;AAEzD,MAAMC,qBAAqBN,SAAS;AAEpC,OAAO,MAAMO,6BAA6BR;IACxC,OAAgBS,OAAO;QACrBC,SAASX,KAAKY,MAAM,CAAC;YACnBC,aAAa;YACbC,UAAU;QACZ;IACF,EAAC;IAED,OAAgBD,cAAc,gCAA+B;IAE7D,OAAgBE,WAAW;QACzB;YACEC,SAAS;YACTH,aAAa;QACf;QACA;YACEG,SAAS;YACTH,aAAa;QACf;KACD,CAAA;IAED,MAAaI,MAAqB;QAChC,MAAM,EAACP,IAAI,EAAC,GAAG,MAAM,IAAI,CAACQ,KAAK,CAACT;QAChC,IAAI,EAACE,OAAO,EAAC,GAAGD;QAEhB,MAAMS,YAAY,MAAM,IAAI,CAACC,YAAY;QACzC,IAAI,CAACD,WAAW;YACd,IAAI,CAACE,KAAK,CAACd,eAAe;gBAACe,MAAM;YAAC;QACpC;QAEA,IAAIC;QAEJ,IAAI;YACFA,WAAW,MAAMjB,aAAaa;QAChC,EAAE,OAAOE,OAAO;YACd,MAAMG,UAAUH,iBAAiBI,QAAQJ,MAAMG,OAAO,GAAGE,OAAOL;YAChEb,mBAAmB,CAAC,yBAAyB,EAAEgB,SAAS,EAAEH;YAC1D,IAAI,CAACA,KAAK,CAAC,CAAC,yBAAyB,EAAEG,SAAS,EAAE;gBAACF,MAAM;YAAC;QAC5D;QAEA,IAAIC,SAASI,MAAM,KAAK,GAAG;YACzB,IAAI,CAACN,KAAK,CAAC,sCAAsC;gBAACC,MAAM;YAAC;QAC3D;QAEA,IAAIX,SAAS;YACXP,oBAAoBmB,UAAUZ;QAChC,OAAO;YACLA,UAAU,MAAM,IAAI,CAACiB,gBAAgB,CAACL;QACxC;QAEA,IAAI;YACF,MAAMlB,UAAU;gBAACM;gBAASQ;gBAAWU,QAAQ;YAAK;YAElD,IAAI,CAACC,GAAG,CAAC,GAAG/B,UAAU,SAAS,CAAC,mCAAmC,EAAEY,QAAQ,GAAG,CAAC,GAAG;YACpF,IAAI,CAACmB,GAAG,CACN,GAAG/B,UAAU,UAAU,kFAAkF;YAG3GS,mBAAmB,CAAC,yCAAyC,EAAEG,SAAS;QAC1E,EAAE,OAAOU,OAAO;YACd,MAAMG,UAAUH,iBAAiBI,QAAQJ,MAAMG,OAAO,GAAGE,OAAOL;YAChEb,mBAAmB,CAAC,oCAAoC,CAAC,EAAEa;YAC3D,IAAI,CAACA,KAAK,CAAC,CAAC,iCAAiC,EAAEG,SAAS,EAAE;gBAACF,MAAM;YAAC;QACpE;IACF;IAEA,MAAcM,iBAAiBL,QAA0B,EAAmB;QAC1E,IAAI;YACF,MAAMQ,UAAUR,SAASS,GAAG,CAAC,CAACrB,UAAa,CAAA;oBACzCsB,MAAMtB,QAAQsB,IAAI;oBAClBC,OAAOvB,QAAQsB,IAAI;gBACrB,CAAA;YAEA,OAAO9B,OAAO;gBACZ4B;gBACAP,SAAS;YACX;QACF,EAAE,OAAOH,OAAO;YACd,MAAMc,MAAMd;YACZb,mBAAmB,CAAC,uBAAuB,CAAC,EAAE2B;YAC9C,IAAI,CAACd,KAAK,CAAC,CAAC,2BAA2B,EAAEc,IAAIX,OAAO,EAAE,EAAE;gBAACF,MAAM;YAAC;QAClE;IACF;AACF"}
1
+ {"version":3,"sources":["../../../src/commands/backup/disable.ts"],"sourcesContent":["import {styleText} from 'node:util'\n\nimport {Args} from '@oclif/core'\nimport {SanityCommand, subdebug} from '@sanity/cli-core'\nimport {select} from '@sanity/cli-core/ux'\nimport {type DatasetsResponse} from '@sanity/client'\n\nimport {assertDatasetExists} from '../../actions/backup/assertDatasetExist.js'\nimport {setBackup} from '../../services/backup.js'\nimport {listDatasets} from '../../services/datasets.js'\n\nconst disableBackupDebug = subdebug('backup:disable')\n\nexport class DisableBackupCommand extends SanityCommand<typeof DisableBackupCommand> {\n static override args = {\n dataset: Args.string({\n description: 'Dataset name to disable backup for',\n required: false,\n }),\n }\n\n static override description = 'Disable backup for a dataset.'\n\n static override examples = [\n {\n command: '<%= config.bin %> <%= command.id %>',\n description: 'Interactively disable backup for a dataset',\n },\n {\n command: '<%= config.bin %> <%= command.id %> production',\n description: 'Disable backup for the production dataset',\n },\n ]\n\n public async run(): Promise<void> {\n const {args} = await this.parse(DisableBackupCommand)\n let {dataset} = args\n\n const projectId = await this.getProjectId()\n\n let datasets: DatasetsResponse\n\n try {\n datasets = await listDatasets(projectId)\n } catch (error) {\n const message = error instanceof Error ? error.message : String(error)\n disableBackupDebug(`Failed to list datasets: ${message}`, error)\n this.error(`Failed to list datasets: ${message}`, {exit: 1})\n }\n\n if (datasets.length === 0) {\n this.error('No datasets found in this project.', {exit: 1})\n }\n\n if (dataset) {\n assertDatasetExists(datasets, dataset)\n } else {\n dataset = await this.promptForDataset(datasets)\n }\n\n try {\n await setBackup({dataset, projectId, status: false})\n\n this.log(`${styleText('green', `Disabled daily backups for dataset ${dataset}.\\n`)}`)\n this.log(\n `${styleText('yellow', 'Note: Existing backups will be retained according to your retention policy.\\n')}`,\n )\n\n disableBackupDebug(`Successfully disabled backup for dataset ${dataset}`)\n } catch (error) {\n const message = error instanceof Error ? error.message : String(error)\n disableBackupDebug(`Failed to disable backup for dataset`, error)\n this.error(`Disabling dataset backup failed: ${message}`, {exit: 1})\n }\n }\n\n private async promptForDataset(datasets: DatasetsResponse): Promise<string> {\n try {\n const choices = datasets.map((dataset) => ({\n name: dataset.name,\n value: dataset.name,\n }))\n\n return select({\n choices,\n message: 'Select the dataset name:',\n })\n } catch (error) {\n const err = error as Error\n disableBackupDebug(`Error fetching datasets`, err)\n this.error(`Failed to fetch datasets:\\n${err.message}`, {exit: 1})\n }\n }\n}\n"],"names":["styleText","Args","SanityCommand","subdebug","select","assertDatasetExists","setBackup","listDatasets","disableBackupDebug","DisableBackupCommand","args","dataset","string","description","required","examples","command","run","parse","projectId","getProjectId","datasets","error","message","Error","String","exit","length","promptForDataset","status","log","choices","map","name","value","err"],"mappings":"AAAA,SAAQA,SAAS,QAAO,YAAW;AAEnC,SAAQC,IAAI,QAAO,cAAa;AAChC,SAAQC,aAAa,EAAEC,QAAQ,QAAO,mBAAkB;AACxD,SAAQC,MAAM,QAAO,sBAAqB;AAG1C,SAAQC,mBAAmB,QAAO,6CAA4C;AAC9E,SAAQC,SAAS,QAAO,2BAA0B;AAClD,SAAQC,YAAY,QAAO,6BAA4B;AAEvD,MAAMC,qBAAqBL,SAAS;AAEpC,OAAO,MAAMM,6BAA6BP;IACxC,OAAgBQ,OAAO;QACrBC,SAASV,KAAKW,MAAM,CAAC;YACnBC,aAAa;YACbC,UAAU;QACZ;IACF,EAAC;IAED,OAAgBD,cAAc,gCAA+B;IAE7D,OAAgBE,WAAW;QACzB;YACEC,SAAS;YACTH,aAAa;QACf;QACA;YACEG,SAAS;YACTH,aAAa;QACf;KACD,CAAA;IAED,MAAaI,MAAqB;QAChC,MAAM,EAACP,IAAI,EAAC,GAAG,MAAM,IAAI,CAACQ,KAAK,CAACT;QAChC,IAAI,EAACE,OAAO,EAAC,GAAGD;QAEhB,MAAMS,YAAY,MAAM,IAAI,CAACC,YAAY;QAEzC,IAAIC;QAEJ,IAAI;YACFA,WAAW,MAAMd,aAAaY;QAChC,EAAE,OAAOG,OAAO;YACd,MAAMC,UAAUD,iBAAiBE,QAAQF,MAAMC,OAAO,GAAGE,OAAOH;YAChEd,mBAAmB,CAAC,yBAAyB,EAAEe,SAAS,EAAED;YAC1D,IAAI,CAACA,KAAK,CAAC,CAAC,yBAAyB,EAAEC,SAAS,EAAE;gBAACG,MAAM;YAAC;QAC5D;QAEA,IAAIL,SAASM,MAAM,KAAK,GAAG;YACzB,IAAI,CAACL,KAAK,CAAC,sCAAsC;gBAACI,MAAM;YAAC;QAC3D;QAEA,IAAIf,SAAS;YACXN,oBAAoBgB,UAAUV;QAChC,OAAO;YACLA,UAAU,MAAM,IAAI,CAACiB,gBAAgB,CAACP;QACxC;QAEA,IAAI;YACF,MAAMf,UAAU;gBAACK;gBAASQ;gBAAWU,QAAQ;YAAK;YAElD,IAAI,CAACC,GAAG,CAAC,GAAG9B,UAAU,SAAS,CAAC,mCAAmC,EAAEW,QAAQ,GAAG,CAAC,GAAG;YACpF,IAAI,CAACmB,GAAG,CACN,GAAG9B,UAAU,UAAU,kFAAkF;YAG3GQ,mBAAmB,CAAC,yCAAyC,EAAEG,SAAS;QAC1E,EAAE,OAAOW,OAAO;YACd,MAAMC,UAAUD,iBAAiBE,QAAQF,MAAMC,OAAO,GAAGE,OAAOH;YAChEd,mBAAmB,CAAC,oCAAoC,CAAC,EAAEc;YAC3D,IAAI,CAACA,KAAK,CAAC,CAAC,iCAAiC,EAAEC,SAAS,EAAE;gBAACG,MAAM;YAAC;QACpE;IACF;IAEA,MAAcE,iBAAiBP,QAA0B,EAAmB;QAC1E,IAAI;YACF,MAAMU,UAAUV,SAASW,GAAG,CAAC,CAACrB,UAAa,CAAA;oBACzCsB,MAAMtB,QAAQsB,IAAI;oBAClBC,OAAOvB,QAAQsB,IAAI;gBACrB,CAAA;YAEA,OAAO7B,OAAO;gBACZ2B;gBACAR,SAAS;YACX;QACF,EAAE,OAAOD,OAAO;YACd,MAAMa,MAAMb;YACZd,mBAAmB,CAAC,uBAAuB,CAAC,EAAE2B;YAC9C,IAAI,CAACb,KAAK,CAAC,CAAC,2BAA2B,EAAEa,IAAIZ,OAAO,EAAE,EAAE;gBAACG,MAAM;YAAC;QAClE;IACF;AACF"}
@@ -21,7 +21,6 @@ import { validateDatasetName } from '../../actions/dataset/validateDatasetName.j
21
21
  import { promptForDataset } from '../../prompts/promptForDataset.js';
22
22
  import { listBackups } from '../../services/backup.js';
23
23
  import { listDatasets } from '../../services/datasets.js';
24
- import { NO_PROJECT_ID } from '../../util/errorMessages.js';
25
24
  import { humanFileSize } from '../../util/humanFileSize.js';
26
25
  import { isPathDirName } from '../../util/isPathDirName.js';
27
26
  const DEFAULT_DOWNLOAD_CONCURRENCY = 10;
@@ -72,11 +71,6 @@ export class DownloadBackupCommand extends SanityCommand {
72
71
  const { args } = await this.parse(DownloadBackupCommand);
73
72
  let { dataset } = args;
74
73
  const projectId = await this.getProjectId();
75
- if (!projectId) {
76
- this.error(NO_PROJECT_ID, {
77
- exit: 1
78
- });
79
- }
80
74
  let datasets;
81
75
  try {
82
76
  datasets = await listDatasets(projectId);
@@ -1 +1 @@
1
- {"version":3,"sources":["../../../src/commands/backup/download.ts"],"sourcesContent":["import {createWriteStream} from 'node:fs'\nimport {mkdir, mkdtemp} from 'node:fs/promises'\nimport {tmpdir} from 'node:os'\nimport path from 'node:path'\nimport {finished} from 'node:stream/promises'\nimport {styleText} from 'node:util'\n\nimport {Args, Flags} from '@oclif/core'\nimport {fileExists, SanityCommand} from '@sanity/cli-core'\nimport {boxen, confirm, input, select} from '@sanity/cli-core/ux'\nimport {type DatasetsResponse} from '@sanity/client'\nimport pMap from 'p-map'\nimport prettyMs from 'pretty-ms'\n\nimport {archiveDir} from '../../actions/backup/archiveDir.js'\nimport {assertDatasetExists} from '../../actions/backup/assertDatasetExist.js'\nimport {backupDownloadDebug} from '../../actions/backup/backupDownloadDebug.js'\nimport {cleanupTmpDir} from '../../actions/backup/cleanupTmpDir.js'\nimport {downloadAsset} from '../../actions/backup/downloadAsset.js'\nimport {downloadDocument} from '../../actions/backup/downloadDocument.js'\nimport {type File, PaginatedGetBackupStream} from '../../actions/backup/fetchNextBackupPage.js'\nimport {newProgress} from '../../actions/backup/progressSpinner.js'\nimport {validateDatasetName} from '../../actions/dataset/validateDatasetName.js'\nimport {promptForDataset} from '../../prompts/promptForDataset.js'\nimport {type BackupItem, listBackups} from '../../services/backup.js'\nimport {listDatasets} from '../../services/datasets.js'\nimport {NO_PROJECT_ID} from '../../util/errorMessages.js'\nimport {humanFileSize} from '../../util/humanFileSize.js'\nimport {isPathDirName} from '../../util/isPathDirName.js'\n\nconst DEFAULT_DOWNLOAD_CONCURRENCY = 10\nconst MAX_DOWNLOAD_CONCURRENCY = 24\n\ninterface DownloadBackupOptions {\n backupId: string\n concurrency: number\n datasetName: string\n outDir: string\n outFileName: string\n overwrite: boolean\n projectId: string\n}\n\nexport class DownloadBackupCommand extends SanityCommand<typeof DownloadBackupCommand> {\n static override args = {\n dataset: Args.string({\n description: 'Dataset name to download backup from',\n required: false,\n }),\n }\n\n static override description = 'Download a dataset backup to a local file.'\n\n static override examples = [\n {\n command: '<%= config.bin %> <%= command.id %>',\n description: 'Interactively download a backup',\n },\n {\n command: '<%= config.bin %> <%= command.id %> production --backup-id 2024-01-01-backup-1',\n description: 'Download a specific backup for the production dataset',\n },\n {\n command:\n '<%= config.bin %> <%= command.id %> production --backup-id 2024-01-01-backup-2 --out /path/to/file',\n description: 'Download backup to a specific file',\n },\n {\n command:\n '<%= config.bin %> <%= command.id %> production --backup-id 2024-01-01-backup-3 --out /path/to/file --overwrite',\n description: 'Download backup and overwrite existing file',\n },\n ]\n\n static override flags = {\n 'backup-id': Flags.string({\n description: 'The backup ID to download',\n }),\n concurrency: Flags.integer({\n default: DEFAULT_DOWNLOAD_CONCURRENCY,\n description: `Concurrent number of backup item downloads (max: ${MAX_DOWNLOAD_CONCURRENCY})`,\n }),\n out: Flags.string({\n description: 'The file or directory path the backup should download to',\n }),\n overwrite: Flags.boolean({\n default: false,\n description: 'Allows overwriting of existing backup file',\n }),\n }\n\n public async run(): Promise<void> {\n const {args} = await this.parse(DownloadBackupCommand)\n let {dataset} = args\n\n const projectId = await this.getProjectId()\n if (!projectId) {\n this.error(NO_PROJECT_ID, {exit: 1})\n }\n\n let datasets: DatasetsResponse\n\n try {\n datasets = await listDatasets(projectId)\n } catch (error) {\n const message = error instanceof Error ? error.message : String(error)\n backupDownloadDebug(`Failed to list datasets: ${message}`, error)\n this.error(`Failed to list datasets: ${message}`, {exit: 1})\n }\n\n if (datasets.length === 0) {\n this.error('No datasets found in this project.', {exit: 1})\n }\n\n if (dataset) {\n assertDatasetExists(datasets, dataset)\n } else {\n dataset = await promptForDataset({allowCreation: false, datasets})\n }\n\n const opts = await this.prepareBackupOptions(projectId, dataset)\n const outFilePath = path.join(opts.outDir, opts.outFileName)\n\n this.log(\n boxen(\n `Downloading backup for:\n\n${styleText('bold', 'projectId')}: ${styleText('cyan', opts.projectId)}\n${styleText('bold', 'dataset')}: ${styleText('cyan', opts.datasetName)}\n${styleText('bold', 'backupId')}: ${styleText('cyan', opts.backupId)}`,\n {\n borderColor: 'cyan',\n borderStyle: 'round',\n padding: 1,\n },\n ),\n )\n this.log('')\n this.log(`Downloading backup to \"${styleText('cyan', outFilePath)}\"`)\n\n const start = Date.now()\n const progressSpinner = newProgress('Setting up backup environment...')\n\n // Create a unique temporary directory to store files before bundling them into the archive at outputPath.\n // Temporary directories are normally deleted at the end of backup process, any unexpected exit may leave them\n // behind, hence it is important to create a unique directory for each attempt.\n const tmpOutDir = await mkdtemp(path.join(tmpdir(), `sanity-backup-`))\n\n // Create required directories if they don't exist.\n for (const dir of [\n opts.outDir,\n path.join(tmpOutDir, 'images'),\n path.join(tmpOutDir, 'files'),\n ]) {\n await mkdir(dir, {recursive: true})\n }\n\n backupDownloadDebug('Writing to temporary directory %s', tmpOutDir)\n const tmpOutDocumentsFile = path.join(tmpOutDir, 'data.ndjson')\n\n const docOutStream = createWriteStream(tmpOutDocumentsFile)\n\n try {\n const backupFileStream = new PaginatedGetBackupStream(\n opts.projectId,\n opts.datasetName,\n opts.backupId,\n )\n\n const files: File[] = []\n let i = 0\n for await (const file of backupFileStream) {\n files.push(file)\n i++\n progressSpinner.set({\n current: i,\n step: `Reading backup files...`,\n total: backupFileStream.totalFiles,\n update: true,\n })\n }\n\n let totalItemsDownloaded = 0\n await pMap(\n files,\n async (file: File) => {\n if (file.type === 'file' || file.type === 'image') {\n await downloadAsset(file.url, file.name, file.type, tmpOutDir)\n } else {\n const doc = await downloadDocument(file.url)\n docOutStream.write(`${doc}\\n`)\n }\n\n totalItemsDownloaded += 1\n progressSpinner.set({\n current: totalItemsDownloaded,\n step: `Downloading documents and assets...`,\n total: backupFileStream.totalFiles,\n update: true,\n })\n },\n {concurrency: opts.concurrency},\n )\n } catch (error) {\n progressSpinner.fail()\n const message = error instanceof Error ? error.message : String(error)\n backupDownloadDebug(`Downloading dataset backup failed: ${message}`, error)\n this.error(`Downloading dataset backup failed: ${message}`, {exit: 1})\n }\n\n docOutStream.end()\n await finished(docOutStream)\n\n progressSpinner.set({step: `Archiving files into a tarball...`, update: true})\n try {\n await archiveDir(tmpOutDir, outFilePath, (processedBytes: number) => {\n progressSpinner.update({\n step: `Archiving files into a tarball, ${humanFileSize(processedBytes)} bytes written...`,\n })\n })\n } catch (err) {\n progressSpinner.fail()\n const message = err instanceof Error ? err.message : String(err)\n backupDownloadDebug(`Archiving backup failed: ${message}`, err)\n this.error(`Archiving backup failed: ${message}`, {exit: 1})\n }\n\n progressSpinner.set({\n step: `Cleaning up temporary files at ${styleText('cyan', `${tmpOutDir}`)}`,\n })\n await cleanupTmpDir(tmpOutDir)\n\n progressSpinner.set({\n step: `Backup download complete [${prettyMs(Date.now() - start)}]`,\n })\n progressSpinner.succeed()\n }\n\n private async getOutputPath(defaultOutFileName: string): Promise<string> {\n if (this.flags.out !== undefined) {\n // Rewrite the output path to an absolute path, if it is not already.\n return path.resolve(this.flags.out)\n }\n\n const workDir = process.cwd()\n const inputResult = await input({\n default: path.join(workDir, defaultOutFileName),\n message: 'Output path:',\n })\n return path.resolve(inputResult)\n }\n\n private async prepareBackupOptions(\n projectId: string,\n datasetName: string,\n ): Promise<DownloadBackupOptions> {\n const err = validateDatasetName(datasetName)\n if (err) {\n this.error(err, {exit: 1})\n }\n\n const backupId = String(\n this.flags['backup-id'] || (await this.promptForBackupId(projectId, datasetName)),\n )\n\n if (\n 'concurrency' in this.flags &&\n (this.flags.concurrency < 1 || this.flags.concurrency > MAX_DOWNLOAD_CONCURRENCY)\n ) {\n this.error(`concurrency should be in 1 to ${MAX_DOWNLOAD_CONCURRENCY} range`, {exit: 1})\n }\n\n const defaultOutFileName = `${datasetName}-backup-${backupId}.tar.gz`\n let out = await this.getOutputPath(defaultOutFileName)\n\n // If path is a directory name, then add a default file name to the path.\n if (isPathDirName(out)) {\n out = path.join(out, defaultOutFileName)\n }\n\n const exists = await fileExists(out)\n // If the file already exists, ask for confirmation if it should be overwritten.\n if (!this.flags.overwrite && exists) {\n const shouldOverwrite = await confirm({\n default: false,\n message: `File \"${out}\" already exists, would you like to overwrite it?`,\n })\n\n // If the user does not want to overwrite the file, cancel the operation.\n if (!shouldOverwrite) {\n this.error('Operation cancelled.', {exit: 1})\n }\n }\n\n return {\n backupId,\n concurrency: this.flags.concurrency || DEFAULT_DOWNLOAD_CONCURRENCY,\n datasetName,\n outDir: path.dirname(out),\n outFileName: path.basename(out),\n overwrite: this.flags.overwrite,\n projectId,\n }\n }\n\n private async promptForBackupId(projectId: string, datasetName: string): Promise<string> {\n const maxBackupIdsShown = 100\n\n try {\n const response = await listBackups({\n datasetName,\n limit: maxBackupIdsShown,\n projectId,\n })\n\n if (!response?.backups?.length) {\n this.error('No backups found', {exit: 1})\n }\n\n const backupIdChoices = response.backups.map((backup: BackupItem) => ({\n name: backup.id,\n value: backup.id,\n }))\n\n const hint =\n backupIdChoices.length === maxBackupIdsShown\n ? ` (only last ${maxBackupIdsShown} shown)`\n : ''\n\n return select({\n choices: backupIdChoices,\n message: `Select backup ID to use${hint}`,\n })\n } catch (err) {\n const message = err instanceof Error ? err.message : String(err)\n backupDownloadDebug(`Failed to fetch backups for dataset ${datasetName}: ${message}`, err)\n this.error(`Failed to fetch backups for dataset ${datasetName}: ${message}`, {exit: 1})\n }\n }\n}\n"],"names":["createWriteStream","mkdir","mkdtemp","tmpdir","path","finished","styleText","Args","Flags","fileExists","SanityCommand","boxen","confirm","input","select","pMap","prettyMs","archiveDir","assertDatasetExists","backupDownloadDebug","cleanupTmpDir","downloadAsset","downloadDocument","PaginatedGetBackupStream","newProgress","validateDatasetName","promptForDataset","listBackups","listDatasets","NO_PROJECT_ID","humanFileSize","isPathDirName","DEFAULT_DOWNLOAD_CONCURRENCY","MAX_DOWNLOAD_CONCURRENCY","DownloadBackupCommand","args","dataset","string","description","required","examples","command","flags","concurrency","integer","default","out","overwrite","boolean","run","parse","projectId","getProjectId","error","exit","datasets","message","Error","String","length","allowCreation","opts","prepareBackupOptions","outFilePath","join","outDir","outFileName","log","datasetName","backupId","borderColor","borderStyle","padding","start","Date","now","progressSpinner","tmpOutDir","dir","recursive","tmpOutDocumentsFile","docOutStream","backupFileStream","files","i","file","push","set","current","step","total","totalFiles","update","totalItemsDownloaded","type","url","name","doc","write","fail","end","processedBytes","err","succeed","getOutputPath","defaultOutFileName","undefined","resolve","workDir","process","cwd","inputResult","promptForBackupId","exists","shouldOverwrite","dirname","basename","maxBackupIdsShown","response","limit","backups","backupIdChoices","map","backup","id","value","hint","choices"],"mappings":"AAAA,SAAQA,iBAAiB,QAAO,UAAS;AACzC,SAAQC,KAAK,EAAEC,OAAO,QAAO,mBAAkB;AAC/C,SAAQC,MAAM,QAAO,UAAS;AAC9B,OAAOC,UAAU,YAAW;AAC5B,SAAQC,QAAQ,QAAO,uBAAsB;AAC7C,SAAQC,SAAS,QAAO,YAAW;AAEnC,SAAQC,IAAI,EAAEC,KAAK,QAAO,cAAa;AACvC,SAAQC,UAAU,EAAEC,aAAa,QAAO,mBAAkB;AAC1D,SAAQC,KAAK,EAAEC,OAAO,EAAEC,KAAK,EAAEC,MAAM,QAAO,sBAAqB;AAEjE,OAAOC,UAAU,QAAO;AACxB,OAAOC,cAAc,YAAW;AAEhC,SAAQC,UAAU,QAAO,qCAAoC;AAC7D,SAAQC,mBAAmB,QAAO,6CAA4C;AAC9E,SAAQC,mBAAmB,QAAO,8CAA6C;AAC/E,SAAQC,aAAa,QAAO,wCAAuC;AACnE,SAAQC,aAAa,QAAO,wCAAuC;AACnE,SAAQC,gBAAgB,QAAO,2CAA0C;AACzE,SAAmBC,wBAAwB,QAAO,8CAA6C;AAC/F,SAAQC,WAAW,QAAO,0CAAyC;AACnE,SAAQC,mBAAmB,QAAO,+CAA8C;AAChF,SAAQC,gBAAgB,QAAO,oCAAmC;AAClE,SAAyBC,WAAW,QAAO,2BAA0B;AACrE,SAAQC,YAAY,QAAO,6BAA4B;AACvD,SAAQC,aAAa,QAAO,8BAA6B;AACzD,SAAQC,aAAa,QAAO,8BAA6B;AACzD,SAAQC,aAAa,QAAO,8BAA6B;AAEzD,MAAMC,+BAA+B;AACrC,MAAMC,2BAA2B;AAYjC,OAAO,MAAMC,8BAA8BxB;IACzC,OAAgByB,OAAO;QACrBC,SAAS7B,KAAK8B,MAAM,CAAC;YACnBC,aAAa;YACbC,UAAU;QACZ;IACF,EAAC;IAED,OAAgBD,cAAc,6CAA4C;IAE1E,OAAgBE,WAAW;QACzB;YACEC,SAAS;YACTH,aAAa;QACf;QACA;YACEG,SAAS;YACTH,aAAa;QACf;QACA;YACEG,SACE;YACFH,aAAa;QACf;QACA;YACEG,SACE;YACFH,aAAa;QACf;KACD,CAAA;IAED,OAAgBI,QAAQ;QACtB,aAAalC,MAAM6B,MAAM,CAAC;YACxBC,aAAa;QACf;QACAK,aAAanC,MAAMoC,OAAO,CAAC;YACzBC,SAASb;YACTM,aAAa,CAAC,iDAAiD,EAAEL,yBAAyB,CAAC,CAAC;QAC9F;QACAa,KAAKtC,MAAM6B,MAAM,CAAC;YAChBC,aAAa;QACf;QACAS,WAAWvC,MAAMwC,OAAO,CAAC;YACvBH,SAAS;YACTP,aAAa;QACf;IACF,EAAC;IAED,MAAaW,MAAqB;QAChC,MAAM,EAACd,IAAI,EAAC,GAAG,MAAM,IAAI,CAACe,KAAK,CAAChB;QAChC,IAAI,EAACE,OAAO,EAAC,GAAGD;QAEhB,MAAMgB,YAAY,MAAM,IAAI,CAACC,YAAY;QACzC,IAAI,CAACD,WAAW;YACd,IAAI,CAACE,KAAK,CAACxB,eAAe;gBAACyB,MAAM;YAAC;QACpC;QAEA,IAAIC;QAEJ,IAAI;YACFA,WAAW,MAAM3B,aAAauB;QAChC,EAAE,OAAOE,OAAO;YACd,MAAMG,UAAUH,iBAAiBI,QAAQJ,MAAMG,OAAO,GAAGE,OAAOL;YAChElC,oBAAoB,CAAC,yBAAyB,EAAEqC,SAAS,EAAEH;YAC3D,IAAI,CAACA,KAAK,CAAC,CAAC,yBAAyB,EAAEG,SAAS,EAAE;gBAACF,MAAM;YAAC;QAC5D;QAEA,IAAIC,SAASI,MAAM,KAAK,GAAG;YACzB,IAAI,CAACN,KAAK,CAAC,sCAAsC;gBAACC,MAAM;YAAC;QAC3D;QAEA,IAAIlB,SAAS;YACXlB,oBAAoBqC,UAAUnB;QAChC,OAAO;YACLA,UAAU,MAAMV,iBAAiB;gBAACkC,eAAe;gBAAOL;YAAQ;QAClE;QAEA,MAAMM,OAAO,MAAM,IAAI,CAACC,oBAAoB,CAACX,WAAWf;QACxD,MAAM2B,cAAc3D,KAAK4D,IAAI,CAACH,KAAKI,MAAM,EAAEJ,KAAKK,WAAW;QAE3D,IAAI,CAACC,GAAG,CACNxD,MACE,CAAC;;AAET,EAAEL,UAAU,QAAQ,aAAa,EAAE,EAAEA,UAAU,QAAQuD,KAAKV,SAAS,EAAE;AACvE,EAAE7C,UAAU,QAAQ,WAAW,EAAE,EAAEA,UAAU,QAAQuD,KAAKO,WAAW,EAAE;AACvE,EAAE9D,UAAU,QAAQ,YAAY,EAAE,EAAEA,UAAU,QAAQuD,KAAKQ,QAAQ,GAAG,EAC9D;YACEC,aAAa;YACbC,aAAa;YACbC,SAAS;QACX;QAGJ,IAAI,CAACL,GAAG,CAAC;QACT,IAAI,CAACA,GAAG,CAAC,CAAC,uBAAuB,EAAE7D,UAAU,QAAQyD,aAAa,CAAC,CAAC;QAEpE,MAAMU,QAAQC,KAAKC,GAAG;QACtB,MAAMC,kBAAkBpD,YAAY;QAEpC,0GAA0G;QAC1G,8GAA8G;QAC9G,+EAA+E;QAC/E,MAAMqD,YAAY,MAAM3E,QAAQE,KAAK4D,IAAI,CAAC7D,UAAU,CAAC,cAAc,CAAC;QAEpE,mDAAmD;QACnD,KAAK,MAAM2E,OAAO;YAChBjB,KAAKI,MAAM;YACX7D,KAAK4D,IAAI,CAACa,WAAW;YACrBzE,KAAK4D,IAAI,CAACa,WAAW;SACtB,CAAE;YACD,MAAM5E,MAAM6E,KAAK;gBAACC,WAAW;YAAI;QACnC;QAEA5D,oBAAoB,qCAAqC0D;QACzD,MAAMG,sBAAsB5E,KAAK4D,IAAI,CAACa,WAAW;QAEjD,MAAMI,eAAejF,kBAAkBgF;QAEvC,IAAI;YACF,MAAME,mBAAmB,IAAI3D,yBAC3BsC,KAAKV,SAAS,EACdU,KAAKO,WAAW,EAChBP,KAAKQ,QAAQ;YAGf,MAAMc,QAAgB,EAAE;YACxB,IAAIC,IAAI;YACR,WAAW,MAAMC,QAAQH,iBAAkB;gBACzCC,MAAMG,IAAI,CAACD;gBACXD;gBACAR,gBAAgBW,GAAG,CAAC;oBAClBC,SAASJ;oBACTK,MAAM,CAAC,uBAAuB,CAAC;oBAC/BC,OAAOR,iBAAiBS,UAAU;oBAClCC,QAAQ;gBACV;YACF;YAEA,IAAIC,uBAAuB;YAC3B,MAAM9E,KACJoE,OACA,OAAOE;gBACL,IAAIA,KAAKS,IAAI,KAAK,UAAUT,KAAKS,IAAI,KAAK,SAAS;oBACjD,MAAMzE,cAAcgE,KAAKU,GAAG,EAAEV,KAAKW,IAAI,EAAEX,KAAKS,IAAI,EAAEjB;gBACtD,OAAO;oBACL,MAAMoB,MAAM,MAAM3E,iBAAiB+D,KAAKU,GAAG;oBAC3Cd,aAAaiB,KAAK,CAAC,GAAGD,IAAI,EAAE,CAAC;gBAC/B;gBAEAJ,wBAAwB;gBACxBjB,gBAAgBW,GAAG,CAAC;oBAClBC,SAASK;oBACTJ,MAAM,CAAC,mCAAmC,CAAC;oBAC3CC,OAAOR,iBAAiBS,UAAU;oBAClCC,QAAQ;gBACV;YACF,GACA;gBAACjD,aAAakB,KAAKlB,WAAW;YAAA;QAElC,EAAE,OAAOU,OAAO;YACduB,gBAAgBuB,IAAI;YACpB,MAAM3C,UAAUH,iBAAiBI,QAAQJ,MAAMG,OAAO,GAAGE,OAAOL;YAChElC,oBAAoB,CAAC,mCAAmC,EAAEqC,SAAS,EAAEH;YACrE,IAAI,CAACA,KAAK,CAAC,CAAC,mCAAmC,EAAEG,SAAS,EAAE;gBAACF,MAAM;YAAC;QACtE;QAEA2B,aAAamB,GAAG;QAChB,MAAM/F,SAAS4E;QAEfL,gBAAgBW,GAAG,CAAC;YAACE,MAAM,CAAC,iCAAiC,CAAC;YAAEG,QAAQ;QAAI;QAC5E,IAAI;YACF,MAAM3E,WAAW4D,WAAWd,aAAa,CAACsC;gBACxCzB,gBAAgBgB,MAAM,CAAC;oBACrBH,MAAM,CAAC,gCAAgC,EAAE3D,cAAcuE,gBAAgB,iBAAiB,CAAC;gBAC3F;YACF;QACF,EAAE,OAAOC,KAAK;YACZ1B,gBAAgBuB,IAAI;YACpB,MAAM3C,UAAU8C,eAAe7C,QAAQ6C,IAAI9C,OAAO,GAAGE,OAAO4C;YAC5DnF,oBAAoB,CAAC,yBAAyB,EAAEqC,SAAS,EAAE8C;YAC3D,IAAI,CAACjD,KAAK,CAAC,CAAC,yBAAyB,EAAEG,SAAS,EAAE;gBAACF,MAAM;YAAC;QAC5D;QAEAsB,gBAAgBW,GAAG,CAAC;YAClBE,MAAM,CAAC,+BAA+B,EAAEnF,UAAU,QAAQ,GAAGuE,WAAW,GAAG;QAC7E;QACA,MAAMzD,cAAcyD;QAEpBD,gBAAgBW,GAAG,CAAC;YAClBE,MAAM,CAAC,0BAA0B,EAAEzE,SAAS0D,KAAKC,GAAG,KAAKF,OAAO,CAAC,CAAC;QACpE;QACAG,gBAAgB2B,OAAO;IACzB;IAEA,MAAcC,cAAcC,kBAA0B,EAAmB;QACvE,IAAI,IAAI,CAAC/D,KAAK,CAACI,GAAG,KAAK4D,WAAW;YAChC,qEAAqE;YACrE,OAAOtG,KAAKuG,OAAO,CAAC,IAAI,CAACjE,KAAK,CAACI,GAAG;QACpC;QAEA,MAAM8D,UAAUC,QAAQC,GAAG;QAC3B,MAAMC,cAAc,MAAMlG,MAAM;YAC9BgC,SAASzC,KAAK4D,IAAI,CAAC4C,SAASH;YAC5BjD,SAAS;QACX;QACA,OAAOpD,KAAKuG,OAAO,CAACI;IACtB;IAEA,MAAcjD,qBACZX,SAAiB,EACjBiB,WAAmB,EACa;QAChC,MAAMkC,MAAM7E,oBAAoB2C;QAChC,IAAIkC,KAAK;YACP,IAAI,CAACjD,KAAK,CAACiD,KAAK;gBAAChD,MAAM;YAAC;QAC1B;QAEA,MAAMe,WAAWX,OACf,IAAI,CAAChB,KAAK,CAAC,YAAY,IAAK,MAAM,IAAI,CAACsE,iBAAiB,CAAC7D,WAAWiB;QAGtE,IACE,iBAAiB,IAAI,CAAC1B,KAAK,IAC1B,CAAA,IAAI,CAACA,KAAK,CAACC,WAAW,GAAG,KAAK,IAAI,CAACD,KAAK,CAACC,WAAW,GAAGV,wBAAuB,GAC/E;YACA,IAAI,CAACoB,KAAK,CAAC,CAAC,8BAA8B,EAAEpB,yBAAyB,MAAM,CAAC,EAAE;gBAACqB,MAAM;YAAC;QACxF;QAEA,MAAMmD,qBAAqB,GAAGrC,YAAY,QAAQ,EAAEC,SAAS,OAAO,CAAC;QACrE,IAAIvB,MAAM,MAAM,IAAI,CAAC0D,aAAa,CAACC;QAEnC,yEAAyE;QACzE,IAAI1E,cAAce,MAAM;YACtBA,MAAM1C,KAAK4D,IAAI,CAAClB,KAAK2D;QACvB;QAEA,MAAMQ,SAAS,MAAMxG,WAAWqC;QAChC,gFAAgF;QAChF,IAAI,CAAC,IAAI,CAACJ,KAAK,CAACK,SAAS,IAAIkE,QAAQ;YACnC,MAAMC,kBAAkB,MAAMtG,QAAQ;gBACpCiC,SAAS;gBACTW,SAAS,CAAC,MAAM,EAAEV,IAAI,iDAAiD,CAAC;YAC1E;YAEA,yEAAyE;YACzE,IAAI,CAACoE,iBAAiB;gBACpB,IAAI,CAAC7D,KAAK,CAAC,wBAAwB;oBAACC,MAAM;gBAAC;YAC7C;QACF;QAEA,OAAO;YACLe;YACA1B,aAAa,IAAI,CAACD,KAAK,CAACC,WAAW,IAAIX;YACvCoC;YACAH,QAAQ7D,KAAK+G,OAAO,CAACrE;YACrBoB,aAAa9D,KAAKgH,QAAQ,CAACtE;YAC3BC,WAAW,IAAI,CAACL,KAAK,CAACK,SAAS;YAC/BI;QACF;IACF;IAEA,MAAc6D,kBAAkB7D,SAAiB,EAAEiB,WAAmB,EAAmB;QACvF,MAAMiD,oBAAoB;QAE1B,IAAI;YACF,MAAMC,WAAW,MAAM3F,YAAY;gBACjCyC;gBACAmD,OAAOF;gBACPlE;YACF;YAEA,IAAI,CAACmE,UAAUE,SAAS7D,QAAQ;gBAC9B,IAAI,CAACN,KAAK,CAAC,oBAAoB;oBAACC,MAAM;gBAAC;YACzC;YAEA,MAAMmE,kBAAkBH,SAASE,OAAO,CAACE,GAAG,CAAC,CAACC,SAAwB,CAAA;oBACpE3B,MAAM2B,OAAOC,EAAE;oBACfC,OAAOF,OAAOC,EAAE;gBAClB,CAAA;YAEA,MAAME,OACJL,gBAAgB9D,MAAM,KAAK0D,oBACvB,CAAC,YAAY,EAAEA,kBAAkB,OAAO,CAAC,GACzC;YAEN,OAAOvG,OAAO;gBACZiH,SAASN;gBACTjE,SAAS,CAAC,uBAAuB,EAAEsE,MAAM;YAC3C;QACF,EAAE,OAAOxB,KAAK;YACZ,MAAM9C,UAAU8C,eAAe7C,QAAQ6C,IAAI9C,OAAO,GAAGE,OAAO4C;YAC5DnF,oBAAoB,CAAC,oCAAoC,EAAEiD,YAAY,EAAE,EAAEZ,SAAS,EAAE8C;YACtF,IAAI,CAACjD,KAAK,CAAC,CAAC,oCAAoC,EAAEe,YAAY,EAAE,EAAEZ,SAAS,EAAE;gBAACF,MAAM;YAAC;QACvF;IACF;AACF"}
1
+ {"version":3,"sources":["../../../src/commands/backup/download.ts"],"sourcesContent":["import {createWriteStream} from 'node:fs'\nimport {mkdir, mkdtemp} from 'node:fs/promises'\nimport {tmpdir} from 'node:os'\nimport path from 'node:path'\nimport {finished} from 'node:stream/promises'\nimport {styleText} from 'node:util'\n\nimport {Args, Flags} from '@oclif/core'\nimport {fileExists, SanityCommand} from '@sanity/cli-core'\nimport {boxen, confirm, input, select} from '@sanity/cli-core/ux'\nimport {type DatasetsResponse} from '@sanity/client'\nimport pMap from 'p-map'\nimport prettyMs from 'pretty-ms'\n\nimport {archiveDir} from '../../actions/backup/archiveDir.js'\nimport {assertDatasetExists} from '../../actions/backup/assertDatasetExist.js'\nimport {backupDownloadDebug} from '../../actions/backup/backupDownloadDebug.js'\nimport {cleanupTmpDir} from '../../actions/backup/cleanupTmpDir.js'\nimport {downloadAsset} from '../../actions/backup/downloadAsset.js'\nimport {downloadDocument} from '../../actions/backup/downloadDocument.js'\nimport {type File, PaginatedGetBackupStream} from '../../actions/backup/fetchNextBackupPage.js'\nimport {newProgress} from '../../actions/backup/progressSpinner.js'\nimport {validateDatasetName} from '../../actions/dataset/validateDatasetName.js'\nimport {promptForDataset} from '../../prompts/promptForDataset.js'\nimport {type BackupItem, listBackups} from '../../services/backup.js'\nimport {listDatasets} from '../../services/datasets.js'\nimport {humanFileSize} from '../../util/humanFileSize.js'\nimport {isPathDirName} from '../../util/isPathDirName.js'\n\nconst DEFAULT_DOWNLOAD_CONCURRENCY = 10\nconst MAX_DOWNLOAD_CONCURRENCY = 24\n\ninterface DownloadBackupOptions {\n backupId: string\n concurrency: number\n datasetName: string\n outDir: string\n outFileName: string\n overwrite: boolean\n projectId: string\n}\n\nexport class DownloadBackupCommand extends SanityCommand<typeof DownloadBackupCommand> {\n static override args = {\n dataset: Args.string({\n description: 'Dataset name to download backup from',\n required: false,\n }),\n }\n\n static override description = 'Download a dataset backup to a local file.'\n\n static override examples = [\n {\n command: '<%= config.bin %> <%= command.id %>',\n description: 'Interactively download a backup',\n },\n {\n command: '<%= config.bin %> <%= command.id %> production --backup-id 2024-01-01-backup-1',\n description: 'Download a specific backup for the production dataset',\n },\n {\n command:\n '<%= config.bin %> <%= command.id %> production --backup-id 2024-01-01-backup-2 --out /path/to/file',\n description: 'Download backup to a specific file',\n },\n {\n command:\n '<%= config.bin %> <%= command.id %> production --backup-id 2024-01-01-backup-3 --out /path/to/file --overwrite',\n description: 'Download backup and overwrite existing file',\n },\n ]\n\n static override flags = {\n 'backup-id': Flags.string({\n description: 'The backup ID to download',\n }),\n concurrency: Flags.integer({\n default: DEFAULT_DOWNLOAD_CONCURRENCY,\n description: `Concurrent number of backup item downloads (max: ${MAX_DOWNLOAD_CONCURRENCY})`,\n }),\n out: Flags.string({\n description: 'The file or directory path the backup should download to',\n }),\n overwrite: Flags.boolean({\n default: false,\n description: 'Allows overwriting of existing backup file',\n }),\n }\n\n public async run(): Promise<void> {\n const {args} = await this.parse(DownloadBackupCommand)\n let {dataset} = args\n\n const projectId = await this.getProjectId()\n\n let datasets: DatasetsResponse\n\n try {\n datasets = await listDatasets(projectId)\n } catch (error) {\n const message = error instanceof Error ? error.message : String(error)\n backupDownloadDebug(`Failed to list datasets: ${message}`, error)\n this.error(`Failed to list datasets: ${message}`, {exit: 1})\n }\n\n if (datasets.length === 0) {\n this.error('No datasets found in this project.', {exit: 1})\n }\n\n if (dataset) {\n assertDatasetExists(datasets, dataset)\n } else {\n dataset = await promptForDataset({allowCreation: false, datasets})\n }\n\n const opts = await this.prepareBackupOptions(projectId, dataset)\n const outFilePath = path.join(opts.outDir, opts.outFileName)\n\n this.log(\n boxen(\n `Downloading backup for:\n\n${styleText('bold', 'projectId')}: ${styleText('cyan', opts.projectId)}\n${styleText('bold', 'dataset')}: ${styleText('cyan', opts.datasetName)}\n${styleText('bold', 'backupId')}: ${styleText('cyan', opts.backupId)}`,\n {\n borderColor: 'cyan',\n borderStyle: 'round',\n padding: 1,\n },\n ),\n )\n this.log('')\n this.log(`Downloading backup to \"${styleText('cyan', outFilePath)}\"`)\n\n const start = Date.now()\n const progressSpinner = newProgress('Setting up backup environment...')\n\n // Create a unique temporary directory to store files before bundling them into the archive at outputPath.\n // Temporary directories are normally deleted at the end of backup process, any unexpected exit may leave them\n // behind, hence it is important to create a unique directory for each attempt.\n const tmpOutDir = await mkdtemp(path.join(tmpdir(), `sanity-backup-`))\n\n // Create required directories if they don't exist.\n for (const dir of [\n opts.outDir,\n path.join(tmpOutDir, 'images'),\n path.join(tmpOutDir, 'files'),\n ]) {\n await mkdir(dir, {recursive: true})\n }\n\n backupDownloadDebug('Writing to temporary directory %s', tmpOutDir)\n const tmpOutDocumentsFile = path.join(tmpOutDir, 'data.ndjson')\n\n const docOutStream = createWriteStream(tmpOutDocumentsFile)\n\n try {\n const backupFileStream = new PaginatedGetBackupStream(\n opts.projectId,\n opts.datasetName,\n opts.backupId,\n )\n\n const files: File[] = []\n let i = 0\n for await (const file of backupFileStream) {\n files.push(file)\n i++\n progressSpinner.set({\n current: i,\n step: `Reading backup files...`,\n total: backupFileStream.totalFiles,\n update: true,\n })\n }\n\n let totalItemsDownloaded = 0\n await pMap(\n files,\n async (file: File) => {\n if (file.type === 'file' || file.type === 'image') {\n await downloadAsset(file.url, file.name, file.type, tmpOutDir)\n } else {\n const doc = await downloadDocument(file.url)\n docOutStream.write(`${doc}\\n`)\n }\n\n totalItemsDownloaded += 1\n progressSpinner.set({\n current: totalItemsDownloaded,\n step: `Downloading documents and assets...`,\n total: backupFileStream.totalFiles,\n update: true,\n })\n },\n {concurrency: opts.concurrency},\n )\n } catch (error) {\n progressSpinner.fail()\n const message = error instanceof Error ? error.message : String(error)\n backupDownloadDebug(`Downloading dataset backup failed: ${message}`, error)\n this.error(`Downloading dataset backup failed: ${message}`, {exit: 1})\n }\n\n docOutStream.end()\n await finished(docOutStream)\n\n progressSpinner.set({step: `Archiving files into a tarball...`, update: true})\n try {\n await archiveDir(tmpOutDir, outFilePath, (processedBytes: number) => {\n progressSpinner.update({\n step: `Archiving files into a tarball, ${humanFileSize(processedBytes)} bytes written...`,\n })\n })\n } catch (err) {\n progressSpinner.fail()\n const message = err instanceof Error ? err.message : String(err)\n backupDownloadDebug(`Archiving backup failed: ${message}`, err)\n this.error(`Archiving backup failed: ${message}`, {exit: 1})\n }\n\n progressSpinner.set({\n step: `Cleaning up temporary files at ${styleText('cyan', `${tmpOutDir}`)}`,\n })\n await cleanupTmpDir(tmpOutDir)\n\n progressSpinner.set({\n step: `Backup download complete [${prettyMs(Date.now() - start)}]`,\n })\n progressSpinner.succeed()\n }\n\n private async getOutputPath(defaultOutFileName: string): Promise<string> {\n if (this.flags.out !== undefined) {\n // Rewrite the output path to an absolute path, if it is not already.\n return path.resolve(this.flags.out)\n }\n\n const workDir = process.cwd()\n const inputResult = await input({\n default: path.join(workDir, defaultOutFileName),\n message: 'Output path:',\n })\n return path.resolve(inputResult)\n }\n\n private async prepareBackupOptions(\n projectId: string,\n datasetName: string,\n ): Promise<DownloadBackupOptions> {\n const err = validateDatasetName(datasetName)\n if (err) {\n this.error(err, {exit: 1})\n }\n\n const backupId = String(\n this.flags['backup-id'] || (await this.promptForBackupId(projectId, datasetName)),\n )\n\n if (\n 'concurrency' in this.flags &&\n (this.flags.concurrency < 1 || this.flags.concurrency > MAX_DOWNLOAD_CONCURRENCY)\n ) {\n this.error(`concurrency should be in 1 to ${MAX_DOWNLOAD_CONCURRENCY} range`, {exit: 1})\n }\n\n const defaultOutFileName = `${datasetName}-backup-${backupId}.tar.gz`\n let out = await this.getOutputPath(defaultOutFileName)\n\n // If path is a directory name, then add a default file name to the path.\n if (isPathDirName(out)) {\n out = path.join(out, defaultOutFileName)\n }\n\n const exists = await fileExists(out)\n // If the file already exists, ask for confirmation if it should be overwritten.\n if (!this.flags.overwrite && exists) {\n const shouldOverwrite = await confirm({\n default: false,\n message: `File \"${out}\" already exists, would you like to overwrite it?`,\n })\n\n // If the user does not want to overwrite the file, cancel the operation.\n if (!shouldOverwrite) {\n this.error('Operation cancelled.', {exit: 1})\n }\n }\n\n return {\n backupId,\n concurrency: this.flags.concurrency || DEFAULT_DOWNLOAD_CONCURRENCY,\n datasetName,\n outDir: path.dirname(out),\n outFileName: path.basename(out),\n overwrite: this.flags.overwrite,\n projectId,\n }\n }\n\n private async promptForBackupId(projectId: string, datasetName: string): Promise<string> {\n const maxBackupIdsShown = 100\n\n try {\n const response = await listBackups({\n datasetName,\n limit: maxBackupIdsShown,\n projectId,\n })\n\n if (!response?.backups?.length) {\n this.error('No backups found', {exit: 1})\n }\n\n const backupIdChoices = response.backups.map((backup: BackupItem) => ({\n name: backup.id,\n value: backup.id,\n }))\n\n const hint =\n backupIdChoices.length === maxBackupIdsShown\n ? ` (only last ${maxBackupIdsShown} shown)`\n : ''\n\n return select({\n choices: backupIdChoices,\n message: `Select backup ID to use${hint}`,\n })\n } catch (err) {\n const message = err instanceof Error ? err.message : String(err)\n backupDownloadDebug(`Failed to fetch backups for dataset ${datasetName}: ${message}`, err)\n this.error(`Failed to fetch backups for dataset ${datasetName}: ${message}`, {exit: 1})\n }\n }\n}\n"],"names":["createWriteStream","mkdir","mkdtemp","tmpdir","path","finished","styleText","Args","Flags","fileExists","SanityCommand","boxen","confirm","input","select","pMap","prettyMs","archiveDir","assertDatasetExists","backupDownloadDebug","cleanupTmpDir","downloadAsset","downloadDocument","PaginatedGetBackupStream","newProgress","validateDatasetName","promptForDataset","listBackups","listDatasets","humanFileSize","isPathDirName","DEFAULT_DOWNLOAD_CONCURRENCY","MAX_DOWNLOAD_CONCURRENCY","DownloadBackupCommand","args","dataset","string","description","required","examples","command","flags","concurrency","integer","default","out","overwrite","boolean","run","parse","projectId","getProjectId","datasets","error","message","Error","String","exit","length","allowCreation","opts","prepareBackupOptions","outFilePath","join","outDir","outFileName","log","datasetName","backupId","borderColor","borderStyle","padding","start","Date","now","progressSpinner","tmpOutDir","dir","recursive","tmpOutDocumentsFile","docOutStream","backupFileStream","files","i","file","push","set","current","step","total","totalFiles","update","totalItemsDownloaded","type","url","name","doc","write","fail","end","processedBytes","err","succeed","getOutputPath","defaultOutFileName","undefined","resolve","workDir","process","cwd","inputResult","promptForBackupId","exists","shouldOverwrite","dirname","basename","maxBackupIdsShown","response","limit","backups","backupIdChoices","map","backup","id","value","hint","choices"],"mappings":"AAAA,SAAQA,iBAAiB,QAAO,UAAS;AACzC,SAAQC,KAAK,EAAEC,OAAO,QAAO,mBAAkB;AAC/C,SAAQC,MAAM,QAAO,UAAS;AAC9B,OAAOC,UAAU,YAAW;AAC5B,SAAQC,QAAQ,QAAO,uBAAsB;AAC7C,SAAQC,SAAS,QAAO,YAAW;AAEnC,SAAQC,IAAI,EAAEC,KAAK,QAAO,cAAa;AACvC,SAAQC,UAAU,EAAEC,aAAa,QAAO,mBAAkB;AAC1D,SAAQC,KAAK,EAAEC,OAAO,EAAEC,KAAK,EAAEC,MAAM,QAAO,sBAAqB;AAEjE,OAAOC,UAAU,QAAO;AACxB,OAAOC,cAAc,YAAW;AAEhC,SAAQC,UAAU,QAAO,qCAAoC;AAC7D,SAAQC,mBAAmB,QAAO,6CAA4C;AAC9E,SAAQC,mBAAmB,QAAO,8CAA6C;AAC/E,SAAQC,aAAa,QAAO,wCAAuC;AACnE,SAAQC,aAAa,QAAO,wCAAuC;AACnE,SAAQC,gBAAgB,QAAO,2CAA0C;AACzE,SAAmBC,wBAAwB,QAAO,8CAA6C;AAC/F,SAAQC,WAAW,QAAO,0CAAyC;AACnE,SAAQC,mBAAmB,QAAO,+CAA8C;AAChF,SAAQC,gBAAgB,QAAO,oCAAmC;AAClE,SAAyBC,WAAW,QAAO,2BAA0B;AACrE,SAAQC,YAAY,QAAO,6BAA4B;AACvD,SAAQC,aAAa,QAAO,8BAA6B;AACzD,SAAQC,aAAa,QAAO,8BAA6B;AAEzD,MAAMC,+BAA+B;AACrC,MAAMC,2BAA2B;AAYjC,OAAO,MAAMC,8BAA8BvB;IACzC,OAAgBwB,OAAO;QACrBC,SAAS5B,KAAK6B,MAAM,CAAC;YACnBC,aAAa;YACbC,UAAU;QACZ;IACF,EAAC;IAED,OAAgBD,cAAc,6CAA4C;IAE1E,OAAgBE,WAAW;QACzB;YACEC,SAAS;YACTH,aAAa;QACf;QACA;YACEG,SAAS;YACTH,aAAa;QACf;QACA;YACEG,SACE;YACFH,aAAa;QACf;QACA;YACEG,SACE;YACFH,aAAa;QACf;KACD,CAAA;IAED,OAAgBI,QAAQ;QACtB,aAAajC,MAAM4B,MAAM,CAAC;YACxBC,aAAa;QACf;QACAK,aAAalC,MAAMmC,OAAO,CAAC;YACzBC,SAASb;YACTM,aAAa,CAAC,iDAAiD,EAAEL,yBAAyB,CAAC,CAAC;QAC9F;QACAa,KAAKrC,MAAM4B,MAAM,CAAC;YAChBC,aAAa;QACf;QACAS,WAAWtC,MAAMuC,OAAO,CAAC;YACvBH,SAAS;YACTP,aAAa;QACf;IACF,EAAC;IAED,MAAaW,MAAqB;QAChC,MAAM,EAACd,IAAI,EAAC,GAAG,MAAM,IAAI,CAACe,KAAK,CAAChB;QAChC,IAAI,EAACE,OAAO,EAAC,GAAGD;QAEhB,MAAMgB,YAAY,MAAM,IAAI,CAACC,YAAY;QAEzC,IAAIC;QAEJ,IAAI;YACFA,WAAW,MAAMxB,aAAasB;QAChC,EAAE,OAAOG,OAAO;YACd,MAAMC,UAAUD,iBAAiBE,QAAQF,MAAMC,OAAO,GAAGE,OAAOH;YAChElC,oBAAoB,CAAC,yBAAyB,EAAEmC,SAAS,EAAED;YAC3D,IAAI,CAACA,KAAK,CAAC,CAAC,yBAAyB,EAAEC,SAAS,EAAE;gBAACG,MAAM;YAAC;QAC5D;QAEA,IAAIL,SAASM,MAAM,KAAK,GAAG;YACzB,IAAI,CAACL,KAAK,CAAC,sCAAsC;gBAACI,MAAM;YAAC;QAC3D;QAEA,IAAItB,SAAS;YACXjB,oBAAoBkC,UAAUjB;QAChC,OAAO;YACLA,UAAU,MAAMT,iBAAiB;gBAACiC,eAAe;gBAAOP;YAAQ;QAClE;QAEA,MAAMQ,OAAO,MAAM,IAAI,CAACC,oBAAoB,CAACX,WAAWf;QACxD,MAAM2B,cAAc1D,KAAK2D,IAAI,CAACH,KAAKI,MAAM,EAAEJ,KAAKK,WAAW;QAE3D,IAAI,CAACC,GAAG,CACNvD,MACE,CAAC;;AAET,EAAEL,UAAU,QAAQ,aAAa,EAAE,EAAEA,UAAU,QAAQsD,KAAKV,SAAS,EAAE;AACvE,EAAE5C,UAAU,QAAQ,WAAW,EAAE,EAAEA,UAAU,QAAQsD,KAAKO,WAAW,EAAE;AACvE,EAAE7D,UAAU,QAAQ,YAAY,EAAE,EAAEA,UAAU,QAAQsD,KAAKQ,QAAQ,GAAG,EAC9D;YACEC,aAAa;YACbC,aAAa;YACbC,SAAS;QACX;QAGJ,IAAI,CAACL,GAAG,CAAC;QACT,IAAI,CAACA,GAAG,CAAC,CAAC,uBAAuB,EAAE5D,UAAU,QAAQwD,aAAa,CAAC,CAAC;QAEpE,MAAMU,QAAQC,KAAKC,GAAG;QACtB,MAAMC,kBAAkBnD,YAAY;QAEpC,0GAA0G;QAC1G,8GAA8G;QAC9G,+EAA+E;QAC/E,MAAMoD,YAAY,MAAM1E,QAAQE,KAAK2D,IAAI,CAAC5D,UAAU,CAAC,cAAc,CAAC;QAEpE,mDAAmD;QACnD,KAAK,MAAM0E,OAAO;YAChBjB,KAAKI,MAAM;YACX5D,KAAK2D,IAAI,CAACa,WAAW;YACrBxE,KAAK2D,IAAI,CAACa,WAAW;SACtB,CAAE;YACD,MAAM3E,MAAM4E,KAAK;gBAACC,WAAW;YAAI;QACnC;QAEA3D,oBAAoB,qCAAqCyD;QACzD,MAAMG,sBAAsB3E,KAAK2D,IAAI,CAACa,WAAW;QAEjD,MAAMI,eAAehF,kBAAkB+E;QAEvC,IAAI;YACF,MAAME,mBAAmB,IAAI1D,yBAC3BqC,KAAKV,SAAS,EACdU,KAAKO,WAAW,EAChBP,KAAKQ,QAAQ;YAGf,MAAMc,QAAgB,EAAE;YACxB,IAAIC,IAAI;YACR,WAAW,MAAMC,QAAQH,iBAAkB;gBACzCC,MAAMG,IAAI,CAACD;gBACXD;gBACAR,gBAAgBW,GAAG,CAAC;oBAClBC,SAASJ;oBACTK,MAAM,CAAC,uBAAuB,CAAC;oBAC/BC,OAAOR,iBAAiBS,UAAU;oBAClCC,QAAQ;gBACV;YACF;YAEA,IAAIC,uBAAuB;YAC3B,MAAM7E,KACJmE,OACA,OAAOE;gBACL,IAAIA,KAAKS,IAAI,KAAK,UAAUT,KAAKS,IAAI,KAAK,SAAS;oBACjD,MAAMxE,cAAc+D,KAAKU,GAAG,EAAEV,KAAKW,IAAI,EAAEX,KAAKS,IAAI,EAAEjB;gBACtD,OAAO;oBACL,MAAMoB,MAAM,MAAM1E,iBAAiB8D,KAAKU,GAAG;oBAC3Cd,aAAaiB,KAAK,CAAC,GAAGD,IAAI,EAAE,CAAC;gBAC/B;gBAEAJ,wBAAwB;gBACxBjB,gBAAgBW,GAAG,CAAC;oBAClBC,SAASK;oBACTJ,MAAM,CAAC,mCAAmC,CAAC;oBAC3CC,OAAOR,iBAAiBS,UAAU;oBAClCC,QAAQ;gBACV;YACF,GACA;gBAACjD,aAAakB,KAAKlB,WAAW;YAAA;QAElC,EAAE,OAAOW,OAAO;YACdsB,gBAAgBuB,IAAI;YACpB,MAAM5C,UAAUD,iBAAiBE,QAAQF,MAAMC,OAAO,GAAGE,OAAOH;YAChElC,oBAAoB,CAAC,mCAAmC,EAAEmC,SAAS,EAAED;YACrE,IAAI,CAACA,KAAK,CAAC,CAAC,mCAAmC,EAAEC,SAAS,EAAE;gBAACG,MAAM;YAAC;QACtE;QAEAuB,aAAamB,GAAG;QAChB,MAAM9F,SAAS2E;QAEfL,gBAAgBW,GAAG,CAAC;YAACE,MAAM,CAAC,iCAAiC,CAAC;YAAEG,QAAQ;QAAI;QAC5E,IAAI;YACF,MAAM1E,WAAW2D,WAAWd,aAAa,CAACsC;gBACxCzB,gBAAgBgB,MAAM,CAAC;oBACrBH,MAAM,CAAC,gCAAgC,EAAE3D,cAAcuE,gBAAgB,iBAAiB,CAAC;gBAC3F;YACF;QACF,EAAE,OAAOC,KAAK;YACZ1B,gBAAgBuB,IAAI;YACpB,MAAM5C,UAAU+C,eAAe9C,QAAQ8C,IAAI/C,OAAO,GAAGE,OAAO6C;YAC5DlF,oBAAoB,CAAC,yBAAyB,EAAEmC,SAAS,EAAE+C;YAC3D,IAAI,CAAChD,KAAK,CAAC,CAAC,yBAAyB,EAAEC,SAAS,EAAE;gBAACG,MAAM;YAAC;QAC5D;QAEAkB,gBAAgBW,GAAG,CAAC;YAClBE,MAAM,CAAC,+BAA+B,EAAElF,UAAU,QAAQ,GAAGsE,WAAW,GAAG;QAC7E;QACA,MAAMxD,cAAcwD;QAEpBD,gBAAgBW,GAAG,CAAC;YAClBE,MAAM,CAAC,0BAA0B,EAAExE,SAASyD,KAAKC,GAAG,KAAKF,OAAO,CAAC,CAAC;QACpE;QACAG,gBAAgB2B,OAAO;IACzB;IAEA,MAAcC,cAAcC,kBAA0B,EAAmB;QACvE,IAAI,IAAI,CAAC/D,KAAK,CAACI,GAAG,KAAK4D,WAAW;YAChC,qEAAqE;YACrE,OAAOrG,KAAKsG,OAAO,CAAC,IAAI,CAACjE,KAAK,CAACI,GAAG;QACpC;QAEA,MAAM8D,UAAUC,QAAQC,GAAG;QAC3B,MAAMC,cAAc,MAAMjG,MAAM;YAC9B+B,SAASxC,KAAK2D,IAAI,CAAC4C,SAASH;YAC5BlD,SAAS;QACX;QACA,OAAOlD,KAAKsG,OAAO,CAACI;IACtB;IAEA,MAAcjD,qBACZX,SAAiB,EACjBiB,WAAmB,EACa;QAChC,MAAMkC,MAAM5E,oBAAoB0C;QAChC,IAAIkC,KAAK;YACP,IAAI,CAAChD,KAAK,CAACgD,KAAK;gBAAC5C,MAAM;YAAC;QAC1B;QAEA,MAAMW,WAAWZ,OACf,IAAI,CAACf,KAAK,CAAC,YAAY,IAAK,MAAM,IAAI,CAACsE,iBAAiB,CAAC7D,WAAWiB;QAGtE,IACE,iBAAiB,IAAI,CAAC1B,KAAK,IAC1B,CAAA,IAAI,CAACA,KAAK,CAACC,WAAW,GAAG,KAAK,IAAI,CAACD,KAAK,CAACC,WAAW,GAAGV,wBAAuB,GAC/E;YACA,IAAI,CAACqB,KAAK,CAAC,CAAC,8BAA8B,EAAErB,yBAAyB,MAAM,CAAC,EAAE;gBAACyB,MAAM;YAAC;QACxF;QAEA,MAAM+C,qBAAqB,GAAGrC,YAAY,QAAQ,EAAEC,SAAS,OAAO,CAAC;QACrE,IAAIvB,MAAM,MAAM,IAAI,CAAC0D,aAAa,CAACC;QAEnC,yEAAyE;QACzE,IAAI1E,cAAce,MAAM;YACtBA,MAAMzC,KAAK2D,IAAI,CAAClB,KAAK2D;QACvB;QAEA,MAAMQ,SAAS,MAAMvG,WAAWoC;QAChC,gFAAgF;QAChF,IAAI,CAAC,IAAI,CAACJ,KAAK,CAACK,SAAS,IAAIkE,QAAQ;YACnC,MAAMC,kBAAkB,MAAMrG,QAAQ;gBACpCgC,SAAS;gBACTU,SAAS,CAAC,MAAM,EAAET,IAAI,iDAAiD,CAAC;YAC1E;YAEA,yEAAyE;YACzE,IAAI,CAACoE,iBAAiB;gBACpB,IAAI,CAAC5D,KAAK,CAAC,wBAAwB;oBAACI,MAAM;gBAAC;YAC7C;QACF;QAEA,OAAO;YACLW;YACA1B,aAAa,IAAI,CAACD,KAAK,CAACC,WAAW,IAAIX;YACvCoC;YACAH,QAAQ5D,KAAK8G,OAAO,CAACrE;YACrBoB,aAAa7D,KAAK+G,QAAQ,CAACtE;YAC3BC,WAAW,IAAI,CAACL,KAAK,CAACK,SAAS;YAC/BI;QACF;IACF;IAEA,MAAc6D,kBAAkB7D,SAAiB,EAAEiB,WAAmB,EAAmB;QACvF,MAAMiD,oBAAoB;QAE1B,IAAI;YACF,MAAMC,WAAW,MAAM1F,YAAY;gBACjCwC;gBACAmD,OAAOF;gBACPlE;YACF;YAEA,IAAI,CAACmE,UAAUE,SAAS7D,QAAQ;gBAC9B,IAAI,CAACL,KAAK,CAAC,oBAAoB;oBAACI,MAAM;gBAAC;YACzC;YAEA,MAAM+D,kBAAkBH,SAASE,OAAO,CAACE,GAAG,CAAC,CAACC,SAAwB,CAAA;oBACpE3B,MAAM2B,OAAOC,EAAE;oBACfC,OAAOF,OAAOC,EAAE;gBAClB,CAAA;YAEA,MAAME,OACJL,gBAAgB9D,MAAM,KAAK0D,oBACvB,CAAC,YAAY,EAAEA,kBAAkB,OAAO,CAAC,GACzC;YAEN,OAAOtG,OAAO;gBACZgH,SAASN;gBACTlE,SAAS,CAAC,uBAAuB,EAAEuE,MAAM;YAC3C;QACF,EAAE,OAAOxB,KAAK;YACZ,MAAM/C,UAAU+C,eAAe9C,QAAQ8C,IAAI/C,OAAO,GAAGE,OAAO6C;YAC5DlF,oBAAoB,CAAC,oCAAoC,EAAEgD,YAAY,EAAE,EAAEb,SAAS,EAAE+C;YACtF,IAAI,CAAChD,KAAK,CAAC,CAAC,oCAAoC,EAAEc,YAAY,EAAE,EAAEb,SAAS,EAAE;gBAACG,MAAM;YAAC;QACvF;IACF;AACF"}
@@ -6,7 +6,6 @@ import { NEW_DATASET_VALUE, promptForDataset } from '../../prompts/promptForData
6
6
  import { promptForDatasetName } from '../../prompts/promptForDatasetName.js';
7
7
  import { setBackup } from '../../services/backup.js';
8
8
  import { createDataset, listDatasets } from '../../services/datasets.js';
9
- import { NO_PROJECT_ID } from '../../util/errorMessages.js';
10
9
  const enableBackupDebug = subdebug('backup:enable');
11
10
  export class EnableBackupCommand extends SanityCommand {
12
11
  static args = {
@@ -30,11 +29,6 @@ export class EnableBackupCommand extends SanityCommand {
30
29
  const { args } = await this.parse(EnableBackupCommand);
31
30
  let { dataset } = args;
32
31
  const projectId = await this.getProjectId();
33
- if (!projectId) {
34
- this.error(NO_PROJECT_ID, {
35
- exit: 1
36
- });
37
- }
38
32
  let datasets;
39
33
  try {
40
34
  datasets = await listDatasets(projectId);
@@ -1 +1 @@
1
- {"version":3,"sources":["../../../src/commands/backup/enable.ts"],"sourcesContent":["import {styleText} from 'node:util'\n\nimport {Args} from '@oclif/core'\nimport {SanityCommand, subdebug} from '@sanity/cli-core'\nimport {type DatasetsResponse} from '@sanity/client'\n\nimport {assertDatasetExists} from '../../actions/backup/assertDatasetExist.js'\nimport {NEW_DATASET_VALUE, promptForDataset} from '../../prompts/promptForDataset.js'\nimport {promptForDatasetName} from '../../prompts/promptForDatasetName.js'\nimport {setBackup} from '../../services/backup.js'\nimport {createDataset, listDatasets} from '../../services/datasets.js'\nimport {NO_PROJECT_ID} from '../../util/errorMessages.js'\n\nconst enableBackupDebug = subdebug('backup:enable')\n\nexport class EnableBackupCommand extends SanityCommand<typeof EnableBackupCommand> {\n static override args = {\n dataset: Args.string({\n description: 'Dataset name to enable backup for',\n required: false,\n }),\n }\n\n static override description = 'Enable backup for a dataset.'\n\n static override examples = [\n {\n command: '<%= config.bin %> <%= command.id %>',\n description: 'Interactively enable backup for a dataset',\n },\n {\n command: '<%= config.bin %> <%= command.id %> production',\n description: 'Enable backup for the production dataset',\n },\n ]\n\n public async run(): Promise<void> {\n const {args} = await this.parse(EnableBackupCommand)\n let {dataset} = args\n\n const projectId = await this.getProjectId()\n if (!projectId) {\n this.error(NO_PROJECT_ID, {exit: 1})\n }\n\n let datasets: DatasetsResponse\n\n try {\n datasets = await listDatasets(projectId)\n } catch (error) {\n const message = error instanceof Error ? error.message : String(error)\n enableBackupDebug(`Failed to list datasets: ${message}`, error)\n this.error(`Failed to list datasets: ${message}`, {exit: 1})\n }\n\n const hasProduction = datasets.some((dataset) => dataset.name === 'production')\n\n if (datasets.length === 0) {\n this.error('No datasets found in this project.', {exit: 1})\n }\n\n if (dataset) {\n assertDatasetExists(datasets, dataset)\n } else {\n dataset = await promptForDataset({allowCreation: true, datasets})\n\n if (dataset === NEW_DATASET_VALUE) {\n const newDatasetName = await promptForDatasetName({\n default: hasProduction ? undefined : 'production',\n })\n\n try {\n await createDataset({\n datasetName: newDatasetName,\n projectId,\n })\n dataset = newDatasetName\n } catch (error) {\n const message = error instanceof Error ? error.message : String(error)\n enableBackupDebug(`Failed to create dataset ${newDatasetName}: ${message}`, error)\n this.error(`Failed to create dataset ${newDatasetName}: ${message}`, {exit: 1})\n }\n }\n }\n\n try {\n await setBackup({dataset, projectId, status: true})\n\n this.log(\n `${styleText(\n 'green',\n `Enabled backups for dataset ${dataset}.\\nPlease note that it may take up to 24 hours before the first backup is created.\\n`,\n )}`,\n )\n\n this.log(\n `${styleText('bold', `Retention policies may apply depending on your plan and agreement.\\n`)}`,\n )\n\n enableBackupDebug(`Successfully enabled backup for dataset ${dataset}`)\n } catch (error) {\n const message = error instanceof Error ? error.message : String(error)\n enableBackupDebug(`Failed to enable backup for dataset`, error)\n this.error(`Enabling dataset backup failed: ${message}`, {exit: 1})\n }\n }\n}\n"],"names":["styleText","Args","SanityCommand","subdebug","assertDatasetExists","NEW_DATASET_VALUE","promptForDataset","promptForDatasetName","setBackup","createDataset","listDatasets","NO_PROJECT_ID","enableBackupDebug","EnableBackupCommand","args","dataset","string","description","required","examples","command","run","parse","projectId","getProjectId","error","exit","datasets","message","Error","String","hasProduction","some","name","length","allowCreation","newDatasetName","default","undefined","datasetName","status","log"],"mappings":"AAAA,SAAQA,SAAS,QAAO,YAAW;AAEnC,SAAQC,IAAI,QAAO,cAAa;AAChC,SAAQC,aAAa,EAAEC,QAAQ,QAAO,mBAAkB;AAGxD,SAAQC,mBAAmB,QAAO,6CAA4C;AAC9E,SAAQC,iBAAiB,EAAEC,gBAAgB,QAAO,oCAAmC;AACrF,SAAQC,oBAAoB,QAAO,wCAAuC;AAC1E,SAAQC,SAAS,QAAO,2BAA0B;AAClD,SAAQC,aAAa,EAAEC,YAAY,QAAO,6BAA4B;AACtE,SAAQC,aAAa,QAAO,8BAA6B;AAEzD,MAAMC,oBAAoBT,SAAS;AAEnC,OAAO,MAAMU,4BAA4BX;IACvC,OAAgBY,OAAO;QACrBC,SAASd,KAAKe,MAAM,CAAC;YACnBC,aAAa;YACbC,UAAU;QACZ;IACF,EAAC;IAED,OAAgBD,cAAc,+BAA8B;IAE5D,OAAgBE,WAAW;QACzB;YACEC,SAAS;YACTH,aAAa;QACf;QACA;YACEG,SAAS;YACTH,aAAa;QACf;KACD,CAAA;IAED,MAAaI,MAAqB;QAChC,MAAM,EAACP,IAAI,EAAC,GAAG,MAAM,IAAI,CAACQ,KAAK,CAACT;QAChC,IAAI,EAACE,OAAO,EAAC,GAAGD;QAEhB,MAAMS,YAAY,MAAM,IAAI,CAACC,YAAY;QACzC,IAAI,CAACD,WAAW;YACd,IAAI,CAACE,KAAK,CAACd,eAAe;gBAACe,MAAM;YAAC;QACpC;QAEA,IAAIC;QAEJ,IAAI;YACFA,WAAW,MAAMjB,aAAaa;QAChC,EAAE,OAAOE,OAAO;YACd,MAAMG,UAAUH,iBAAiBI,QAAQJ,MAAMG,OAAO,GAAGE,OAAOL;YAChEb,kBAAkB,CAAC,yBAAyB,EAAEgB,SAAS,EAAEH;YACzD,IAAI,CAACA,KAAK,CAAC,CAAC,yBAAyB,EAAEG,SAAS,EAAE;gBAACF,MAAM;YAAC;QAC5D;QAEA,MAAMK,gBAAgBJ,SAASK,IAAI,CAAC,CAACjB,UAAYA,QAAQkB,IAAI,KAAK;QAElE,IAAIN,SAASO,MAAM,KAAK,GAAG;YACzB,IAAI,CAACT,KAAK,CAAC,sCAAsC;gBAACC,MAAM;YAAC;QAC3D;QAEA,IAAIX,SAAS;YACXX,oBAAoBuB,UAAUZ;QAChC,OAAO;YACLA,UAAU,MAAMT,iBAAiB;gBAAC6B,eAAe;gBAAMR;YAAQ;YAE/D,IAAIZ,YAAYV,mBAAmB;gBACjC,MAAM+B,iBAAiB,MAAM7B,qBAAqB;oBAChD8B,SAASN,gBAAgBO,YAAY;gBACvC;gBAEA,IAAI;oBACF,MAAM7B,cAAc;wBAClB8B,aAAaH;wBACbb;oBACF;oBACAR,UAAUqB;gBACZ,EAAE,OAAOX,OAAO;oBACd,MAAMG,UAAUH,iBAAiBI,QAAQJ,MAAMG,OAAO,GAAGE,OAAOL;oBAChEb,kBAAkB,CAAC,yBAAyB,EAAEwB,eAAe,EAAE,EAAER,SAAS,EAAEH;oBAC5E,IAAI,CAACA,KAAK,CAAC,CAAC,yBAAyB,EAAEW,eAAe,EAAE,EAAER,SAAS,EAAE;wBAACF,MAAM;oBAAC;gBAC/E;YACF;QACF;QAEA,IAAI;YACF,MAAMlB,UAAU;gBAACO;gBAASQ;gBAAWiB,QAAQ;YAAI;YAEjD,IAAI,CAACC,GAAG,CACN,GAAGzC,UACD,SACA,CAAC,4BAA4B,EAAEe,QAAQ,oFAAoF,CAAC,GAC3H;YAGL,IAAI,CAAC0B,GAAG,CACN,GAAGzC,UAAU,QAAQ,CAAC,oEAAoE,CAAC,GAAG;YAGhGY,kBAAkB,CAAC,wCAAwC,EAAEG,SAAS;QACxE,EAAE,OAAOU,OAAO;YACd,MAAMG,UAAUH,iBAAiBI,QAAQJ,MAAMG,OAAO,GAAGE,OAAOL;YAChEb,kBAAkB,CAAC,mCAAmC,CAAC,EAAEa;YACzD,IAAI,CAACA,KAAK,CAAC,CAAC,gCAAgC,EAAEG,SAAS,EAAE;gBAACF,MAAM;YAAC;QACnE;IACF;AACF"}
1
+ {"version":3,"sources":["../../../src/commands/backup/enable.ts"],"sourcesContent":["import {styleText} from 'node:util'\n\nimport {Args} from '@oclif/core'\nimport {SanityCommand, subdebug} from '@sanity/cli-core'\nimport {type DatasetsResponse} from '@sanity/client'\n\nimport {assertDatasetExists} from '../../actions/backup/assertDatasetExist.js'\nimport {NEW_DATASET_VALUE, promptForDataset} from '../../prompts/promptForDataset.js'\nimport {promptForDatasetName} from '../../prompts/promptForDatasetName.js'\nimport {setBackup} from '../../services/backup.js'\nimport {createDataset, listDatasets} from '../../services/datasets.js'\n\nconst enableBackupDebug = subdebug('backup:enable')\n\nexport class EnableBackupCommand extends SanityCommand<typeof EnableBackupCommand> {\n static override args = {\n dataset: Args.string({\n description: 'Dataset name to enable backup for',\n required: false,\n }),\n }\n\n static override description = 'Enable backup for a dataset.'\n\n static override examples = [\n {\n command: '<%= config.bin %> <%= command.id %>',\n description: 'Interactively enable backup for a dataset',\n },\n {\n command: '<%= config.bin %> <%= command.id %> production',\n description: 'Enable backup for the production dataset',\n },\n ]\n\n public async run(): Promise<void> {\n const {args} = await this.parse(EnableBackupCommand)\n let {dataset} = args\n\n const projectId = await this.getProjectId()\n\n let datasets: DatasetsResponse\n\n try {\n datasets = await listDatasets(projectId)\n } catch (error) {\n const message = error instanceof Error ? error.message : String(error)\n enableBackupDebug(`Failed to list datasets: ${message}`, error)\n this.error(`Failed to list datasets: ${message}`, {exit: 1})\n }\n\n const hasProduction = datasets.some((dataset) => dataset.name === 'production')\n\n if (datasets.length === 0) {\n this.error('No datasets found in this project.', {exit: 1})\n }\n\n if (dataset) {\n assertDatasetExists(datasets, dataset)\n } else {\n dataset = await promptForDataset({allowCreation: true, datasets})\n\n if (dataset === NEW_DATASET_VALUE) {\n const newDatasetName = await promptForDatasetName({\n default: hasProduction ? undefined : 'production',\n })\n\n try {\n await createDataset({\n datasetName: newDatasetName,\n projectId,\n })\n dataset = newDatasetName\n } catch (error) {\n const message = error instanceof Error ? error.message : String(error)\n enableBackupDebug(`Failed to create dataset ${newDatasetName}: ${message}`, error)\n this.error(`Failed to create dataset ${newDatasetName}: ${message}`, {exit: 1})\n }\n }\n }\n\n try {\n await setBackup({dataset, projectId, status: true})\n\n this.log(\n `${styleText(\n 'green',\n `Enabled backups for dataset ${dataset}.\\nPlease note that it may take up to 24 hours before the first backup is created.\\n`,\n )}`,\n )\n\n this.log(\n `${styleText('bold', `Retention policies may apply depending on your plan and agreement.\\n`)}`,\n )\n\n enableBackupDebug(`Successfully enabled backup for dataset ${dataset}`)\n } catch (error) {\n const message = error instanceof Error ? error.message : String(error)\n enableBackupDebug(`Failed to enable backup for dataset`, error)\n this.error(`Enabling dataset backup failed: ${message}`, {exit: 1})\n }\n }\n}\n"],"names":["styleText","Args","SanityCommand","subdebug","assertDatasetExists","NEW_DATASET_VALUE","promptForDataset","promptForDatasetName","setBackup","createDataset","listDatasets","enableBackupDebug","EnableBackupCommand","args","dataset","string","description","required","examples","command","run","parse","projectId","getProjectId","datasets","error","message","Error","String","exit","hasProduction","some","name","length","allowCreation","newDatasetName","default","undefined","datasetName","status","log"],"mappings":"AAAA,SAAQA,SAAS,QAAO,YAAW;AAEnC,SAAQC,IAAI,QAAO,cAAa;AAChC,SAAQC,aAAa,EAAEC,QAAQ,QAAO,mBAAkB;AAGxD,SAAQC,mBAAmB,QAAO,6CAA4C;AAC9E,SAAQC,iBAAiB,EAAEC,gBAAgB,QAAO,oCAAmC;AACrF,SAAQC,oBAAoB,QAAO,wCAAuC;AAC1E,SAAQC,SAAS,QAAO,2BAA0B;AAClD,SAAQC,aAAa,EAAEC,YAAY,QAAO,6BAA4B;AAEtE,MAAMC,oBAAoBR,SAAS;AAEnC,OAAO,MAAMS,4BAA4BV;IACvC,OAAgBW,OAAO;QACrBC,SAASb,KAAKc,MAAM,CAAC;YACnBC,aAAa;YACbC,UAAU;QACZ;IACF,EAAC;IAED,OAAgBD,cAAc,+BAA8B;IAE5D,OAAgBE,WAAW;QACzB;YACEC,SAAS;YACTH,aAAa;QACf;QACA;YACEG,SAAS;YACTH,aAAa;QACf;KACD,CAAA;IAED,MAAaI,MAAqB;QAChC,MAAM,EAACP,IAAI,EAAC,GAAG,MAAM,IAAI,CAACQ,KAAK,CAACT;QAChC,IAAI,EAACE,OAAO,EAAC,GAAGD;QAEhB,MAAMS,YAAY,MAAM,IAAI,CAACC,YAAY;QAEzC,IAAIC;QAEJ,IAAI;YACFA,WAAW,MAAMd,aAAaY;QAChC,EAAE,OAAOG,OAAO;YACd,MAAMC,UAAUD,iBAAiBE,QAAQF,MAAMC,OAAO,GAAGE,OAAOH;YAChEd,kBAAkB,CAAC,yBAAyB,EAAEe,SAAS,EAAED;YACzD,IAAI,CAACA,KAAK,CAAC,CAAC,yBAAyB,EAAEC,SAAS,EAAE;gBAACG,MAAM;YAAC;QAC5D;QAEA,MAAMC,gBAAgBN,SAASO,IAAI,CAAC,CAACjB,UAAYA,QAAQkB,IAAI,KAAK;QAElE,IAAIR,SAASS,MAAM,KAAK,GAAG;YACzB,IAAI,CAACR,KAAK,CAAC,sCAAsC;gBAACI,MAAM;YAAC;QAC3D;QAEA,IAAIf,SAAS;YACXV,oBAAoBoB,UAAUV;QAChC,OAAO;YACLA,UAAU,MAAMR,iBAAiB;gBAAC4B,eAAe;gBAAMV;YAAQ;YAE/D,IAAIV,YAAYT,mBAAmB;gBACjC,MAAM8B,iBAAiB,MAAM5B,qBAAqB;oBAChD6B,SAASN,gBAAgBO,YAAY;gBACvC;gBAEA,IAAI;oBACF,MAAM5B,cAAc;wBAClB6B,aAAaH;wBACbb;oBACF;oBACAR,UAAUqB;gBACZ,EAAE,OAAOV,OAAO;oBACd,MAAMC,UAAUD,iBAAiBE,QAAQF,MAAMC,OAAO,GAAGE,OAAOH;oBAChEd,kBAAkB,CAAC,yBAAyB,EAAEwB,eAAe,EAAE,EAAET,SAAS,EAAED;oBAC5E,IAAI,CAACA,KAAK,CAAC,CAAC,yBAAyB,EAAEU,eAAe,EAAE,EAAET,SAAS,EAAE;wBAACG,MAAM;oBAAC;gBAC/E;YACF;QACF;QAEA,IAAI;YACF,MAAMrB,UAAU;gBAACM;gBAASQ;gBAAWiB,QAAQ;YAAI;YAEjD,IAAI,CAACC,GAAG,CACN,GAAGxC,UACD,SACA,CAAC,4BAA4B,EAAEc,QAAQ,oFAAoF,CAAC,GAC3H;YAGL,IAAI,CAAC0B,GAAG,CACN,GAAGxC,UAAU,QAAQ,CAAC,oEAAoE,CAAC,GAAG;YAGhGW,kBAAkB,CAAC,wCAAwC,EAAEG,SAAS;QACxE,EAAE,OAAOW,OAAO;YACd,MAAMC,UAAUD,iBAAiBE,QAAQF,MAAMC,OAAO,GAAGE,OAAOH;YAChEd,kBAAkB,CAAC,mCAAmC,CAAC,EAAEc;YACzD,IAAI,CAACA,KAAK,CAAC,CAAC,gCAAgC,EAAEC,SAAS,EAAE;gBAACG,MAAM;YAAC;QACnE;IACF;AACF"}
@@ -2,11 +2,13 @@ import { Args, Flags } from '@oclif/core';
2
2
  import { SanityCommand, subdebug } from '@sanity/cli-core';
3
3
  import { select } from '@sanity/cli-core/ux';
4
4
  import { Table } from 'console-table-printer';
5
- import { isAfter, isValid, lightFormat, parse } from 'date-fns';
5
+ import { isAfter } from 'date-fns/isAfter';
6
+ import { isValid } from 'date-fns/isValid';
7
+ import { lightFormat } from 'date-fns/lightFormat';
8
+ import { parse } from 'date-fns/parse';
6
9
  import { assertDatasetExists } from '../../actions/backup/assertDatasetExist.js';
7
10
  import { listBackups } from '../../services/backup.js';
8
11
  import { listDatasets } from '../../services/datasets.js';
9
- import { NO_PROJECT_ID } from '../../util/errorMessages.js';
10
12
  const listBackupDebug = subdebug('backup:list');
11
13
  const DEFAULT_LIST_BACKUP_LIMIT = 30;
12
14
  export class ListBackupCommand extends SanityCommand {
@@ -52,11 +54,6 @@ export class ListBackupCommand extends SanityCommand {
52
54
  const { args, flags } = await this.parse(ListBackupCommand);
53
55
  let { dataset } = args;
54
56
  const projectId = await this.getProjectId();
55
- if (!projectId) {
56
- this.error(NO_PROJECT_ID, {
57
- exit: 1
58
- });
59
- }
60
57
  let datasets;
61
58
  try {
62
59
  datasets = await listDatasets(projectId);