@databricks/appkit 0.23.0 → 0.25.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (88) hide show
  1. package/CLAUDE.md +9 -1
  2. package/dist/appkit/package.js +1 -1
  3. package/dist/cache/index.js.map +1 -1
  4. package/dist/cli/commands/docs.js +7 -1
  5. package/dist/cli/commands/docs.js.map +1 -1
  6. package/dist/cli/commands/generate-types.js +20 -10
  7. package/dist/cli/commands/generate-types.js.map +1 -1
  8. package/dist/cli/commands/lint.js +3 -1
  9. package/dist/cli/commands/lint.js.map +1 -1
  10. package/dist/cli/commands/plugin/add-resource/add-resource.js +73 -8
  11. package/dist/cli/commands/plugin/add-resource/add-resource.js.map +1 -1
  12. package/dist/cli/commands/plugin/create/create.js +164 -20
  13. package/dist/cli/commands/plugin/create/create.js.map +1 -1
  14. package/dist/cli/commands/plugin/create/resource-defaults.js +5 -1
  15. package/dist/cli/commands/plugin/create/resource-defaults.js.map +1 -1
  16. package/dist/cli/commands/plugin/index.js +7 -1
  17. package/dist/cli/commands/plugin/index.js.map +1 -1
  18. package/dist/cli/commands/plugin/list/list.js +7 -1
  19. package/dist/cli/commands/plugin/list/list.js.map +1 -1
  20. package/dist/cli/commands/plugin/sync/sync.js +27 -14
  21. package/dist/cli/commands/plugin/sync/sync.js.map +1 -1
  22. package/dist/cli/commands/plugin/validate/validate.js +39 -9
  23. package/dist/cli/commands/plugin/validate/validate.js.map +1 -1
  24. package/dist/cli/commands/setup.js +6 -5
  25. package/dist/cli/commands/setup.js.map +1 -1
  26. package/dist/connectors/index.js +1 -0
  27. package/dist/connectors/lakebase/index.js.map +1 -1
  28. package/dist/connectors/lakebase-v1/client.js.map +1 -1
  29. package/dist/connectors/vector-search/client.js +9 -0
  30. package/dist/connectors/vector-search/client.js.map +1 -0
  31. package/dist/connectors/vector-search/index.js +3 -0
  32. package/dist/context/execution-context.js +1 -7
  33. package/dist/context/execution-context.js.map +1 -1
  34. package/dist/context/index.js +1 -1
  35. package/dist/context/index.js.map +1 -1
  36. package/dist/index.d.ts +2 -1
  37. package/dist/index.js +2 -1
  38. package/dist/index.js.map +1 -1
  39. package/dist/plugin/dev-reader.js.map +1 -1
  40. package/dist/plugins/files/plugin.d.ts +46 -15
  41. package/dist/plugins/files/plugin.d.ts.map +1 -1
  42. package/dist/plugins/files/plugin.js +182 -103
  43. package/dist/plugins/files/plugin.js.map +1 -1
  44. package/dist/plugins/files/policy.d.ts +45 -0
  45. package/dist/plugins/files/policy.d.ts.map +1 -0
  46. package/dist/plugins/files/policy.js +63 -0
  47. package/dist/plugins/files/policy.js.map +1 -0
  48. package/dist/plugins/files/types.d.ts +16 -8
  49. package/dist/plugins/files/types.d.ts.map +1 -1
  50. package/dist/plugins/server/vite-dev-server.js.map +1 -1
  51. package/dist/plugins/serving/serving.d.ts.map +1 -1
  52. package/dist/plugins/serving/serving.js +22 -8
  53. package/dist/plugins/serving/serving.js.map +1 -1
  54. package/dist/plugins/serving/types.d.ts +11 -10
  55. package/dist/plugins/serving/types.d.ts.map +1 -1
  56. package/dist/type-generator/index.js +13 -1
  57. package/dist/type-generator/index.js.map +1 -1
  58. package/dist/type-generator/migration.js +155 -0
  59. package/dist/type-generator/migration.js.map +1 -0
  60. package/dist/type-generator/serving/generator.js +22 -1
  61. package/dist/type-generator/serving/generator.js.map +1 -1
  62. package/dist/type-generator/serving/vite-plugin.d.ts +1 -1
  63. package/dist/type-generator/serving/vite-plugin.js +2 -2
  64. package/dist/type-generator/serving/vite-plugin.js.map +1 -1
  65. package/dist/type-generator/vite-plugin.d.ts.map +1 -1
  66. package/dist/type-generator/vite-plugin.js +3 -4
  67. package/dist/type-generator/vite-plugin.js.map +1 -1
  68. package/docs/api/appkit/Class.PolicyDeniedError.md +52 -0
  69. package/docs/api/appkit/Interface.FilePolicyUser.md +23 -0
  70. package/docs/api/appkit/Interface.FileResource.md +36 -0
  71. package/docs/api/appkit/TypeAlias.FileAction.md +18 -0
  72. package/docs/api/appkit/TypeAlias.FilePolicy.md +20 -0
  73. package/docs/api/appkit/TypeAlias.ServingFactory.md +9 -5
  74. package/docs/api/appkit/Variable.READ_ACTIONS.md +8 -0
  75. package/docs/api/appkit/Variable.WRITE_ACTIONS.md +8 -0
  76. package/docs/api/appkit.md +19 -12
  77. package/docs/development/type-generation.md +6 -5
  78. package/docs/faq.md +8 -8
  79. package/docs/plugins/analytics.md +1 -1
  80. package/docs/plugins/custom-plugins.md +4 -0
  81. package/docs/plugins/execution-context.md +0 -1
  82. package/docs/plugins/files.md +150 -2
  83. package/docs/plugins/{serving.md → model-serving.md} +1 -1
  84. package/docs/plugins/plugin-management.md +22 -6
  85. package/docs/plugins/vector-search.md +247 -0
  86. package/llms.txt +9 -1
  87. package/package.json +1 -1
  88. package/sbom.cdx.json +1 -1
@@ -1 +1 @@
1
- {"version":3,"file":"sync.js","names":[],"sources":["../../../../../src/cli/commands/plugin/sync/sync.ts"],"sourcesContent":["import fs from \"node:fs\";\nimport path from \"node:path\";\nimport { Lang, parse, type SgNode } from \"@ast-grep/napi\";\nimport { Command } from \"commander\";\nimport {\n loadManifestFromFile,\n type ResolvedManifest,\n resolveManifestInDir,\n} from \"../manifest-resolve\";\nimport type {\n PluginManifest,\n TemplatePlugin,\n TemplatePluginsManifest,\n} from \"../manifest-types\";\nimport { shouldAllowJsManifestForPackage } from \"../trusted-js-manifest\";\nimport {\n formatValidationErrors,\n validateManifest,\n} from \"../validate/validate-manifest\";\n\n/**\n * Checks whether a resolved file path is within a given directory boundary.\n * Uses path.resolve + startsWith to prevent directory traversal.\n *\n * @param filePath - The path to check (will be resolved to absolute)\n * @param boundary - The directory that must contain filePath\n * @returns true if filePath is inside boundary (or equal to it)\n */\nfunction isWithinDirectory(filePath: string, boundary: string): boolean {\n const resolvedPath = path.resolve(filePath);\n const resolvedBoundary = path.resolve(boundary);\n // Append separator to avoid prefix false-positives (e.g. /foo-bar matching /foo)\n return (\n resolvedPath === resolvedBoundary ||\n resolvedPath.startsWith(`${resolvedBoundary}${path.sep}`)\n );\n}\n\n/**\n * Validates a parsed JSON object against the plugin-manifest JSON schema.\n * Returns the manifest if valid, or null and logs schema errors.\n */\nfunction validateManifestWithSchema(\n obj: unknown,\n sourcePath: string,\n): PluginManifest | null {\n const result = validateManifest(obj);\n if (result.valid && result.manifest) return result.manifest;\n if (result.errors?.length) {\n console.warn(\n `Warning: Manifest at ${sourcePath} failed schema validation:\\n${formatValidationErrors(result.errors, obj)}`,\n );\n }\n return null;\n}\n\n/** Safety limit for recursive directory scanning to prevent runaway traversal. */\nconst MAX_SCAN_DEPTH = 5;\n\n/**\n * Load and validate a resolved manifest, returning a TemplatePlugin entry or null.\n * Centralises the resolve → load → validate → build-entry pipeline used by\n * multiple discovery functions.\n */\nasync function loadPluginEntry(\n resolved: ResolvedManifest,\n pkg: string,\n allowJsManifest: boolean,\n): Promise<[string, TemplatePlugin] | null> {\n const parsed = await loadManifestFromFile(resolved.path, resolved.type, {\n allowJsManifest,\n });\n const manifest = validateManifestWithSchema(parsed, resolved.path);\n if (!manifest || manifest.hidden) return null;\n\n return [\n manifest.name,\n {\n name: manifest.name,\n displayName: manifest.displayName,\n description: manifest.description,\n package: pkg,\n resources: manifest.resources,\n ...(manifest.onSetupMessage && {\n onSetupMessage: manifest.onSetupMessage,\n }),\n },\n ];\n}\n\n/**\n * Known packages that may contain AppKit plugins.\n * Always scanned for manifests, even if not imported in the server file.\n */\nconst KNOWN_PLUGIN_PACKAGES = [\"@databricks/appkit\"];\n\n/**\n * Candidate paths for the server entry file, relative to cwd.\n * Checked in order; the first that exists is used.\n */\nconst SERVER_FILE_CANDIDATES = [\"server/server.ts\", \"server/index.ts\"];\n\n/**\n * Conventional directories to scan for local plugin manifests when\n * --local-plugins-dir is not set. Checked in order; each that exists is scanned.\n * Plugins found here are added to the manifest even if not imported in the server.\n */\nconst CONVENTIONAL_LOCAL_PLUGIN_DIRS = [\"plugins\", \"server\"];\n\n/**\n * Find the server entry file by checking candidate paths in order.\n *\n * @param cwd - Current working directory\n * @returns Absolute path to the server file, or null if none found\n */\nfunction findServerFile(cwd: string): string | null {\n for (const candidate of SERVER_FILE_CANDIDATES) {\n const fullPath = path.join(cwd, candidate);\n if (fs.existsSync(fullPath)) {\n return fullPath;\n }\n }\n return null;\n}\n\n/**\n * Represents a single named import extracted from the server file.\n */\ninterface ParsedImport {\n /** The imported name (or local alias if renamed) */\n name: string;\n /** The original exported name (differs from name when using `import { foo as bar }`) */\n originalName: string;\n /** The module specifier (package name or relative path) */\n source: string;\n}\n\n/**\n * Extract all named imports from the AST root using structural node traversal.\n * Handles single/double quotes, multiline imports, and aliased imports.\n *\n * @param root - AST root node\n * @returns Array of parsed imports with name, original name, and source\n */\nfunction parseImports(root: SgNode): ParsedImport[] {\n const imports: ParsedImport[] = [];\n\n // Find all import_statement nodes in the AST\n const importStatements = root.findAll({\n rule: { kind: \"import_statement\" },\n });\n\n for (const stmt of importStatements) {\n // Extract the module specifier (the string node, e.g. '@databricks/appkit')\n const sourceNode = stmt.find({ rule: { kind: \"string\" } });\n if (!sourceNode) continue;\n\n // Strip surrounding quotes from the string node text\n const source = sourceNode.text().replace(/^['\"]|['\"]$/g, \"\");\n\n // Find named_imports block: { createApp, analytics, server }\n const namedImports = stmt.find({ rule: { kind: \"named_imports\" } });\n if (!namedImports) continue;\n\n // Extract each import_specifier\n const specifiers = namedImports.findAll({\n rule: { kind: \"import_specifier\" },\n });\n\n for (const specifier of specifiers) {\n const children = specifier.children();\n if (children.length >= 3) {\n // Aliased import: `foo as bar` — children are [name, \"as\", alias]\n const originalName = children[0].text();\n const localName = children[children.length - 1].text();\n imports.push({ name: localName, originalName, source });\n } else {\n // Simple import: `foo`\n const name = specifier.text();\n imports.push({ name, originalName: name, source });\n }\n }\n }\n\n return imports;\n}\n\n/**\n * Extract local names of plugins actually used in the `plugins: [...]` array\n * passed to `createApp()`. Uses structural AST traversal to find `pair` nodes\n * with key \"plugins\" and array values containing call expressions.\n *\n * @param root - AST root node\n * @returns Set of local variable names used as plugin calls in the plugins array\n */\nfunction parsePluginUsages(root: SgNode): Set<string> {\n const usedNames = new Set<string>();\n\n // Find all property pairs in the AST\n const pairs = root.findAll({ rule: { kind: \"pair\" } });\n\n for (const pair of pairs) {\n // Check if the property key is \"plugins\"\n const key = pair.find({ rule: { kind: \"property_identifier\" } });\n if (!key || key.text() !== \"plugins\") continue;\n\n // Find the array value\n const arrayNode = pair.find({ rule: { kind: \"array\" } });\n if (!arrayNode) continue;\n\n // Iterate direct children of the array to find call expressions\n for (const child of arrayNode.children()) {\n if (child.kind() === \"call_expression\") {\n // The callee is the first child (the identifier being called)\n const callee = child.children()[0];\n if (callee?.kind() === \"identifier\") {\n usedNames.add(callee.text());\n }\n }\n }\n }\n\n return usedNames;\n}\n\n/**\n * File extensions to try when resolving a relative import to a file path.\n */\nconst RESOLVE_EXTENSIONS = [\".ts\", \".tsx\", \".js\", \".jsx\"];\n\n/**\n * Resolve a relative import source to the plugin directory containing a manifest\n * (manifest.json or manifest.js). Follows the convention that plugins live in\n * their own directory with a manifest file.\n *\n * Resolution strategy:\n * 1. If the import path is a directory, look for manifest.json/js in it\n * 2. If the import path + extension is a file, look for manifest in its parent directory\n * 3. If the import path is a directory with an index file, look for manifest in that directory\n *\n * @param importSource - The relative import specifier (e.g. \"./plugins/my-plugin\")\n * @param serverFileDir - Absolute path to the directory containing the server file\n * @returns Resolved manifest file path and type, or null if not found\n */\nfunction resolveLocalManifest(\n importSource: string,\n serverFileDir: string,\n allowJsManifest: boolean,\n projectRoot?: string,\n): ResolvedManifest | null {\n const resolved = path.resolve(serverFileDir, importSource);\n\n // Security: Reject paths that escape the project root\n const boundary = projectRoot || serverFileDir;\n if (!isWithinDirectory(resolved, boundary)) {\n console.warn(\n `Warning: Skipping import \"${importSource}\" — resolves outside the project directory`,\n );\n return null;\n }\n\n // Case 1: Import path is a directory\n if (fs.existsSync(resolved) && fs.statSync(resolved).isDirectory()) {\n return resolveManifestInDir(resolved, { allowJsManifest });\n }\n\n // Case 2: Import path + extension resolves to a file — manifest in parent dir\n for (const ext of RESOLVE_EXTENSIONS) {\n const filePath = `${resolved}${ext}`;\n if (fs.existsSync(filePath) && fs.statSync(filePath).isFile()) {\n const dir = path.dirname(filePath);\n if (!isWithinDirectory(dir, boundary)) return null;\n return resolveManifestInDir(dir, { allowJsManifest });\n }\n }\n\n // Case 3: Import path is a directory with an index file\n for (const ext of RESOLVE_EXTENSIONS) {\n const indexPath = path.join(resolved, `index${ext}`);\n if (fs.existsSync(indexPath)) {\n return resolveManifestInDir(resolved, { allowJsManifest });\n }\n }\n\n return null;\n}\n\n/**\n * Discover plugin manifests from local (relative) imports in the server file.\n * Resolves each relative import to a directory and loads manifest.json or manifest.js.\n *\n * @param relativeImports - Parsed imports with relative sources (starting with . or /)\n * @param serverFileDir - Absolute path to the directory containing the server file\n * @param cwd - Current working directory (for computing relative paths in output)\n * @returns Map of plugin name to template plugin entry for local plugins\n */\nasync function discoverLocalPlugins(\n relativeImports: ParsedImport[],\n serverFileDir: string,\n cwd: string,\n allowJsManifest: boolean,\n): Promise<TemplatePluginsManifest[\"plugins\"]> {\n const plugins: TemplatePluginsManifest[\"plugins\"] = {};\n\n for (const imp of relativeImports) {\n const resolved = resolveLocalManifest(\n imp.source,\n serverFileDir,\n allowJsManifest,\n cwd,\n );\n if (!resolved) continue;\n\n try {\n const relativePath = path.relative(cwd, path.dirname(resolved.path));\n const entry = await loadPluginEntry(\n resolved,\n `./${relativePath}`,\n allowJsManifest,\n );\n if (entry) plugins[entry[0]] = entry[1];\n } catch (error) {\n console.warn(\n `Warning: Failed to load manifest at ${resolved.path}:`,\n error instanceof Error ? error.message : error,\n );\n }\n }\n\n return plugins;\n}\n\n/**\n * Discover plugin manifests from a package's dist folder.\n * Looks for manifest.json or manifest.js in dist/plugins/{plugin-name}/ directories.\n *\n * @param packagePath - Path to the package in node_modules\n * @returns Array of plugin manifests found in the package\n */\nasync function discoverPluginManifests(\n packagePath: string,\n allowJsManifest: boolean,\n): Promise<PluginManifest[]> {\n const pluginsDir = path.join(packagePath, \"dist\", \"plugins\");\n const manifests: PluginManifest[] = [];\n\n if (!fs.existsSync(pluginsDir)) {\n return manifests;\n }\n\n const entries = fs.readdirSync(pluginsDir, { withFileTypes: true });\n for (const entry of entries) {\n if (!entry.isDirectory()) continue;\n const resolved = resolveManifestInDir(path.join(pluginsDir, entry.name), {\n allowJsManifest,\n });\n if (!resolved) continue;\n\n try {\n const parsed = await loadManifestFromFile(resolved.path, resolved.type, {\n allowJsManifest,\n });\n const manifest = validateManifestWithSchema(parsed, resolved.path);\n if (manifest) {\n manifests.push(manifest);\n }\n } catch (error) {\n console.warn(\n `Warning: Failed to load manifest at ${resolved.path}:`,\n error instanceof Error ? error.message : error,\n );\n }\n }\n\n return manifests;\n}\n\n/**\n * Scan node_modules for packages with plugin manifests.\n *\n * @param cwd - Current working directory to search from\n * @param packages - Set of npm package names to scan for plugin manifests\n * @returns Map of plugin name to template plugin entry\n */\nasync function scanForPlugins(\n cwd: string,\n packages: Iterable<string>,\n allowJsManifest: boolean,\n): Promise<TemplatePluginsManifest[\"plugins\"]> {\n const plugins: TemplatePluginsManifest[\"plugins\"] = {};\n\n for (const packageName of packages) {\n const packagePath = path.join(cwd, \"node_modules\", packageName);\n if (!fs.existsSync(packagePath)) {\n continue;\n }\n\n const allowJsForPackage =\n allowJsManifest || shouldAllowJsManifestForPackage(packageName);\n\n const manifests = await discoverPluginManifests(\n packagePath,\n allowJsForPackage,\n );\n for (const manifest of manifests) {\n if (manifest.hidden) continue;\n plugins[manifest.name] = {\n name: manifest.name,\n displayName: manifest.displayName,\n description: manifest.description,\n package: packageName,\n resources: manifest.resources,\n ...(manifest.onSetupMessage && {\n onSetupMessage: manifest.onSetupMessage,\n }),\n } satisfies TemplatePlugin;\n }\n }\n\n return plugins;\n}\n\n/**\n * Recursively scan a directory for plugin manifests. Any directory that\n * contains manifest.json or manifest.js is treated as a plugin root; we do\n * not descend into that directory's children. Used for local plugins discovery\n * so nested paths like server/plugins/category/my-plugin are found.\n */\nasync function scanPluginsDirRecursive(\n dir: string,\n cwd: string,\n allowJsManifest: boolean,\n depth = 0,\n): Promise<TemplatePluginsManifest[\"plugins\"]> {\n const plugins: TemplatePluginsManifest[\"plugins\"] = {};\n if (!fs.existsSync(dir) || depth >= MAX_SCAN_DEPTH) return plugins;\n\n const entries = fs.readdirSync(dir, { withFileTypes: true });\n for (const entry of entries) {\n if (!entry.isDirectory()) continue;\n\n const pluginDir = path.join(dir, entry.name);\n const resolved = resolveManifestInDir(pluginDir, { allowJsManifest });\n\n if (resolved) {\n const pkg = `./${path.relative(cwd, pluginDir)}`;\n try {\n const pluginEntry = await loadPluginEntry(\n resolved,\n pkg,\n allowJsManifest,\n );\n if (pluginEntry) plugins[pluginEntry[0]] = pluginEntry[1];\n } catch (error) {\n console.warn(\n `Warning: Failed to load manifest at ${resolved.path}:`,\n error instanceof Error ? error.message : error,\n );\n }\n continue;\n }\n\n Object.assign(\n plugins,\n await scanPluginsDirRecursive(pluginDir, cwd, allowJsManifest, depth + 1),\n );\n }\n\n return plugins;\n}\n\n/**\n * Scan a directory for plugin manifests in direct subdirectories only.\n * Each subdirectory may contain manifest.json or manifest.js.\n * Used with --plugins-dir to discover plugins from source instead of node_modules.\n *\n * @param dir - Absolute path to the directory containing plugin subdirectories\n * @param packageName - Package name to assign to discovered plugins (used when cwd is not set)\n * @param cwd - When set, each plugin's package is set to ./<path from cwd to plugin subdir>, e.g. ./server/my-plugin\n * @returns Map of plugin name to template plugin entry\n */\nasync function scanPluginsDir(\n dir: string,\n packageName: string,\n allowJsManifest: boolean,\n cwd?: string,\n): Promise<TemplatePluginsManifest[\"plugins\"]> {\n const plugins: TemplatePluginsManifest[\"plugins\"] = {};\n\n if (!fs.existsSync(dir)) return plugins;\n\n const entries = fs.readdirSync(dir, { withFileTypes: true });\n for (const entry of entries) {\n if (!entry.isDirectory()) continue;\n\n const pluginDir = path.join(dir, entry.name);\n const resolved = resolveManifestInDir(pluginDir, { allowJsManifest });\n if (!resolved) continue;\n\n const pkg =\n cwd !== undefined ? `./${path.relative(cwd, pluginDir)}` : packageName;\n\n try {\n const pluginEntry = await loadPluginEntry(resolved, pkg, allowJsManifest);\n if (pluginEntry) plugins[pluginEntry[0]] = pluginEntry[1];\n } catch (error) {\n console.warn(\n `Warning: Failed to load manifest at ${resolved.path}:`,\n error instanceof Error ? error.message : error,\n );\n }\n }\n\n return plugins;\n}\n\n/**\n * Write (or preview) the template plugins manifest to disk.\n */\nfunction writeManifest(\n outputPath: string,\n { plugins }: { plugins: TemplatePluginsManifest[\"plugins\"] },\n options: { write?: boolean; silent?: boolean },\n) {\n const templateManifest: TemplatePluginsManifest = {\n $schema:\n \"https://databricks.github.io/appkit/schemas/template-plugins.schema.json\",\n version: \"1.0\",\n plugins,\n };\n\n if (options.write) {\n fs.writeFileSync(\n outputPath,\n `${JSON.stringify(templateManifest, null, 2)}\\n`,\n );\n if (!options.silent) {\n console.log(`\\n✓ Wrote ${outputPath}`);\n }\n } else if (!options.silent) {\n console.log(\"\\nTo write the manifest, run:\");\n console.log(\" npx appkit plugin sync --write\\n\");\n console.log(\"Preview:\");\n console.log(\"─\".repeat(60));\n console.log(JSON.stringify(templateManifest, null, 2));\n console.log(\"─\".repeat(60));\n }\n}\n\n/**\n * Run the plugin sync command.\n * Parses the server entry file to discover which packages to scan for plugin\n * manifests, then marks plugins that are actually used in the `plugins: [...]`\n * array as requiredByTemplate.\n */\nasync function runPluginsSync(options: {\n write?: boolean;\n output?: string;\n silent?: boolean;\n requirePlugins?: string;\n pluginsDir?: string;\n packageName?: string;\n localPluginsDir?: string;\n allowJsManifest?: boolean;\n}): Promise<void> {\n const cwd = process.cwd();\n const allowJsManifest = Boolean(options.allowJsManifest);\n const outputPath = path.resolve(cwd, options.output || \"appkit.plugins.json\");\n\n // Security: Reject output paths that escape the project root\n if (!isWithinDirectory(outputPath, cwd)) {\n console.error(\n `Error: Output path \"${options.output}\" resolves outside the project directory.`,\n );\n process.exit(1);\n }\n\n if (!options.silent) {\n console.log(\"Scanning for AppKit plugins...\\n\");\n if (allowJsManifest) {\n console.warn(\n \"Warning: --allow-js-manifest executes manifest.js/manifest.cjs files. Only use with trusted code.\",\n );\n }\n }\n\n // Step 1: Parse server file to discover imports and plugin usages\n const serverFile = findServerFile(cwd);\n let serverImports: ParsedImport[] = [];\n let pluginUsages = new Set<string>();\n\n if (serverFile) {\n if (!options.silent) {\n const relativePath = path.relative(cwd, serverFile);\n console.log(`Server entry file: ${relativePath}`);\n }\n\n const content = fs.readFileSync(serverFile, \"utf-8\");\n const lang = serverFile.endsWith(\".tsx\") ? Lang.Tsx : Lang.TypeScript;\n const ast = parse(lang, content);\n const root = ast.root();\n\n serverImports = parseImports(root);\n pluginUsages = parsePluginUsages(root);\n } else if (!options.silent) {\n console.log(\n \"No server entry file found. Checked:\",\n SERVER_FILE_CANDIDATES.join(\", \"),\n );\n }\n\n // Step 2: Split imports into npm packages and local (relative) imports\n const npmImports = serverImports.filter(\n (i) => !i.source.startsWith(\".\") && !i.source.startsWith(\"/\"),\n );\n const localImports = serverImports.filter(\n (i) => i.source.startsWith(\".\") || i.source.startsWith(\"/\"),\n );\n\n // Step 3: Scan for plugin manifests (--plugins-dir or node_modules)\n const plugins: TemplatePluginsManifest[\"plugins\"] = {};\n\n if (options.pluginsDir) {\n const resolvedDir = path.resolve(cwd, options.pluginsDir);\n const pkgName = options.packageName ?? \"@databricks/appkit\";\n if (!options.silent) {\n console.log(`Scanning plugins directory: ${options.pluginsDir}`);\n }\n Object.assign(\n plugins,\n await scanPluginsDir(resolvedDir, pkgName, allowJsManifest),\n );\n } else {\n const npmPackages = new Set([\n ...KNOWN_PLUGIN_PACKAGES,\n ...npmImports.map((i) => i.source),\n ]);\n Object.assign(\n plugins,\n await scanForPlugins(cwd, npmPackages, allowJsManifest),\n );\n }\n\n // Step 4: Discover local plugin manifests from relative imports\n if (serverFile && localImports.length > 0) {\n const serverFileDir = path.dirname(serverFile);\n const localPlugins = await discoverLocalPlugins(\n localImports,\n serverFileDir,\n cwd,\n allowJsManifest,\n );\n Object.assign(plugins, localPlugins);\n }\n\n // Step 4b: Discover local plugins from conventional directory (or --local-plugins-dir).\n // These are included even when not imported in the server.\n const localDirsToScan: string[] = options.localPluginsDir\n ? [options.localPluginsDir]\n : CONVENTIONAL_LOCAL_PLUGIN_DIRS.filter((d) =>\n fs.existsSync(path.join(cwd, d)),\n );\n for (const dir of localDirsToScan) {\n const resolvedDir = path.resolve(cwd, dir);\n if (!fs.existsSync(resolvedDir)) continue;\n if (!options.silent) {\n console.log(`Scanning local plugins directory: ${dir}`);\n }\n const discovered = await scanPluginsDirRecursive(\n resolvedDir,\n cwd,\n allowJsManifest,\n );\n for (const [name, entry] of Object.entries(discovered)) {\n if (!plugins[name]) plugins[name] = entry;\n }\n }\n\n const pluginCount = Object.keys(plugins).length;\n\n if (pluginCount === 0) {\n if (options.silent) {\n writeManifest(outputPath, { plugins: {} }, options);\n return;\n }\n console.log(\"No plugins found.\");\n if (options.pluginsDir) {\n console.log(\n `\\nNo manifest (${allowJsManifest ? \"manifest.json or manifest.js\" : \"manifest.json\"}) found in: ${options.pluginsDir}`,\n );\n } else {\n console.log(\"\\nMake sure you have plugin packages installed.\");\n }\n process.exit(1);\n }\n\n // Step 5: Mark plugins that are imported AND used in the plugins array as mandatory.\n // For npm imports, match by package name + plugin name.\n // For local imports, resolve both paths to absolute and compare.\n const serverFileDir = serverFile ? path.dirname(serverFile) : cwd;\n\n for (const imp of serverImports) {\n if (!pluginUsages.has(imp.name)) continue;\n\n const isLocal = imp.source.startsWith(\".\") || imp.source.startsWith(\"/\");\n let plugin: TemplatePlugin | undefined;\n\n if (isLocal) {\n // Resolve the import source to an absolute path from the server file directory\n const resolvedImportDir = path.resolve(serverFileDir, imp.source);\n plugin = Object.values(plugins).find((p) => {\n if (!p.package.startsWith(\".\")) return false;\n const resolvedPluginDir = path.resolve(cwd, p.package);\n return (\n resolvedPluginDir === resolvedImportDir && p.name === imp.originalName\n );\n });\n } else {\n // npm import: direct string comparison\n plugin = Object.values(plugins).find(\n (p) => p.package === imp.source && p.name === imp.originalName,\n );\n }\n\n if (plugin) {\n plugin.requiredByTemplate = true;\n }\n }\n\n // Step 6: Apply explicit --require-plugins overrides\n if (options.requirePlugins) {\n const explicitNames = options.requirePlugins\n .split(\",\")\n .map((s) => s.trim())\n .filter(Boolean);\n for (const name of explicitNames) {\n if (plugins[name]) {\n plugins[name].requiredByTemplate = true;\n } else if (!options.silent) {\n console.warn(\n `Warning: --require-plugins referenced \"${name}\" but no such plugin was discovered`,\n );\n }\n }\n }\n\n if (!options.silent) {\n console.log(`\\nFound ${pluginCount} plugin(s):`);\n for (const [name, manifest] of Object.entries(plugins)) {\n const resourceCount =\n manifest.resources.required.length + manifest.resources.optional.length;\n const resourceInfo =\n resourceCount > 0 ? ` [${resourceCount} resource(s)]` : \"\";\n const mandatoryTag = manifest.requiredByTemplate ? \" (mandatory)\" : \"\";\n console.log(\n ` ${manifest.requiredByTemplate ? \"●\" : \"○\"} ${manifest.displayName} (${name}) from ${manifest.package}${resourceInfo}${mandatoryTag}`,\n );\n }\n }\n\n writeManifest(outputPath, { plugins }, options);\n}\n\n/** Exported for testing: path boundary check, AST parsing, trust checks. */\nexport {\n isWithinDirectory,\n parseImports,\n parsePluginUsages,\n shouldAllowJsManifestForPackage,\n};\n\nexport const pluginsSyncCommand = new Command(\"sync\")\n .description(\n \"Sync plugin manifests from installed packages into appkit.plugins.json\",\n )\n .option(\"-w, --write\", \"Write the manifest file\")\n .option(\n \"-o, --output <path>\",\n \"Output file path (default: ./appkit.plugins.json)\",\n )\n .option(\n \"-s, --silent\",\n \"Suppress output and never exit with error (for use in predev/prebuild hooks)\",\n )\n .option(\n \"--require-plugins <names>\",\n \"Comma-separated plugin names to mark as requiredByTemplate (e.g. server,analytics)\",\n )\n .option(\n \"--plugins-dir <path>\",\n \"Scan this directory for plugin subdirectories with manifest.json (instead of node_modules)\",\n )\n .option(\n \"--package-name <name>\",\n \"Package name to assign to plugins found via --plugins-dir (default: @databricks/appkit)\",\n )\n .option(\n \"--local-plugins-dir <path>\",\n \"Also scan this directory for local plugin manifests (default: plugins, server)\",\n )\n .option(\n \"--allow-js-manifest\",\n \"Allow reading manifest.js/manifest.cjs (executes code; use only with trusted plugins)\",\n )\n .action((opts) =>\n runPluginsSync(opts).catch((err) => {\n console.error(err);\n process.exit(1);\n }),\n );\n"],"mappings":";;;;;;;;;;;;;;;;;AA4BA,SAAS,kBAAkB,UAAkB,UAA2B;CACtE,MAAM,eAAe,KAAK,QAAQ,SAAS;CAC3C,MAAM,mBAAmB,KAAK,QAAQ,SAAS;AAE/C,QACE,iBAAiB,oBACjB,aAAa,WAAW,GAAG,mBAAmB,KAAK,MAAM;;;;;;AAQ7D,SAAS,2BACP,KACA,YACuB;CACvB,MAAM,SAAS,iBAAiB,IAAI;AACpC,KAAI,OAAO,SAAS,OAAO,SAAU,QAAO,OAAO;AACnD,KAAI,OAAO,QAAQ,OACjB,SAAQ,KACN,wBAAwB,WAAW,8BAA8B,uBAAuB,OAAO,QAAQ,IAAI,GAC5G;AAEH,QAAO;;;AAIT,MAAM,iBAAiB;;;;;;AAOvB,eAAe,gBACb,UACA,KACA,iBAC0C;CAI1C,MAAM,WAAW,2BAHF,MAAM,qBAAqB,SAAS,MAAM,SAAS,MAAM,EACtE,iBACD,CAAC,EACkD,SAAS,KAAK;AAClE,KAAI,CAAC,YAAY,SAAS,OAAQ,QAAO;AAEzC,QAAO,CACL,SAAS,MACT;EACE,MAAM,SAAS;EACf,aAAa,SAAS;EACtB,aAAa,SAAS;EACtB,SAAS;EACT,WAAW,SAAS;EACpB,GAAI,SAAS,kBAAkB,EAC7B,gBAAgB,SAAS,gBAC1B;EACF,CACF;;;;;;AAOH,MAAM,wBAAwB,CAAC,qBAAqB;;;;;AAMpD,MAAM,yBAAyB,CAAC,oBAAoB,kBAAkB;;;;;;AAOtE,MAAM,iCAAiC,CAAC,WAAW,SAAS;;;;;;;AAQ5D,SAAS,eAAe,KAA4B;AAClD,MAAK,MAAM,aAAa,wBAAwB;EAC9C,MAAM,WAAW,KAAK,KAAK,KAAK,UAAU;AAC1C,MAAI,GAAG,WAAW,SAAS,CACzB,QAAO;;AAGX,QAAO;;;;;;;;;AAsBT,SAAS,aAAa,MAA8B;CAClD,MAAM,UAA0B,EAAE;CAGlC,MAAM,mBAAmB,KAAK,QAAQ,EACpC,MAAM,EAAE,MAAM,oBAAoB,EACnC,CAAC;AAEF,MAAK,MAAM,QAAQ,kBAAkB;EAEnC,MAAM,aAAa,KAAK,KAAK,EAAE,MAAM,EAAE,MAAM,UAAU,EAAE,CAAC;AAC1D,MAAI,CAAC,WAAY;EAGjB,MAAM,SAAS,WAAW,MAAM,CAAC,QAAQ,gBAAgB,GAAG;EAG5D,MAAM,eAAe,KAAK,KAAK,EAAE,MAAM,EAAE,MAAM,iBAAiB,EAAE,CAAC;AACnE,MAAI,CAAC,aAAc;EAGnB,MAAM,aAAa,aAAa,QAAQ,EACtC,MAAM,EAAE,MAAM,oBAAoB,EACnC,CAAC;AAEF,OAAK,MAAM,aAAa,YAAY;GAClC,MAAM,WAAW,UAAU,UAAU;AACrC,OAAI,SAAS,UAAU,GAAG;IAExB,MAAM,eAAe,SAAS,GAAG,MAAM;IACvC,MAAM,YAAY,SAAS,SAAS,SAAS,GAAG,MAAM;AACtD,YAAQ,KAAK;KAAE,MAAM;KAAW;KAAc;KAAQ,CAAC;UAClD;IAEL,MAAM,OAAO,UAAU,MAAM;AAC7B,YAAQ,KAAK;KAAE;KAAM,cAAc;KAAM;KAAQ,CAAC;;;;AAKxD,QAAO;;;;;;;;;;AAWT,SAAS,kBAAkB,MAA2B;CACpD,MAAM,4BAAY,IAAI,KAAa;CAGnC,MAAM,QAAQ,KAAK,QAAQ,EAAE,MAAM,EAAE,MAAM,QAAQ,EAAE,CAAC;AAEtD,MAAK,MAAM,QAAQ,OAAO;EAExB,MAAM,MAAM,KAAK,KAAK,EAAE,MAAM,EAAE,MAAM,uBAAuB,EAAE,CAAC;AAChE,MAAI,CAAC,OAAO,IAAI,MAAM,KAAK,UAAW;EAGtC,MAAM,YAAY,KAAK,KAAK,EAAE,MAAM,EAAE,MAAM,SAAS,EAAE,CAAC;AACxD,MAAI,CAAC,UAAW;AAGhB,OAAK,MAAM,SAAS,UAAU,UAAU,CACtC,KAAI,MAAM,MAAM,KAAK,mBAAmB;GAEtC,MAAM,SAAS,MAAM,UAAU,CAAC;AAChC,OAAI,QAAQ,MAAM,KAAK,aACrB,WAAU,IAAI,OAAO,MAAM,CAAC;;;AAMpC,QAAO;;;;;AAMT,MAAM,qBAAqB;CAAC;CAAO;CAAQ;CAAO;CAAO;;;;;;;;;;;;;;;AAgBzD,SAAS,qBACP,cACA,eACA,iBACA,aACyB;CACzB,MAAM,WAAW,KAAK,QAAQ,eAAe,aAAa;CAG1D,MAAM,WAAW,eAAe;AAChC,KAAI,CAAC,kBAAkB,UAAU,SAAS,EAAE;AAC1C,UAAQ,KACN,6BAA6B,aAAa,4CAC3C;AACD,SAAO;;AAIT,KAAI,GAAG,WAAW,SAAS,IAAI,GAAG,SAAS,SAAS,CAAC,aAAa,CAChE,QAAO,qBAAqB,UAAU,EAAE,iBAAiB,CAAC;AAI5D,MAAK,MAAM,OAAO,oBAAoB;EACpC,MAAM,WAAW,GAAG,WAAW;AAC/B,MAAI,GAAG,WAAW,SAAS,IAAI,GAAG,SAAS,SAAS,CAAC,QAAQ,EAAE;GAC7D,MAAM,MAAM,KAAK,QAAQ,SAAS;AAClC,OAAI,CAAC,kBAAkB,KAAK,SAAS,CAAE,QAAO;AAC9C,UAAO,qBAAqB,KAAK,EAAE,iBAAiB,CAAC;;;AAKzD,MAAK,MAAM,OAAO,oBAAoB;EACpC,MAAM,YAAY,KAAK,KAAK,UAAU,QAAQ,MAAM;AACpD,MAAI,GAAG,WAAW,UAAU,CAC1B,QAAO,qBAAqB,UAAU,EAAE,iBAAiB,CAAC;;AAI9D,QAAO;;;;;;;;;;;AAYT,eAAe,qBACb,iBACA,eACA,KACA,iBAC6C;CAC7C,MAAM,UAA8C,EAAE;AAEtD,MAAK,MAAM,OAAO,iBAAiB;EACjC,MAAM,WAAW,qBACf,IAAI,QACJ,eACA,iBACA,IACD;AACD,MAAI,CAAC,SAAU;AAEf,MAAI;GAEF,MAAM,QAAQ,MAAM,gBAClB,UACA,KAHmB,KAAK,SAAS,KAAK,KAAK,QAAQ,SAAS,KAAK,CAAC,IAIlE,gBACD;AACD,OAAI,MAAO,SAAQ,MAAM,MAAM,MAAM;WAC9B,OAAO;AACd,WAAQ,KACN,uCAAuC,SAAS,KAAK,IACrD,iBAAiB,QAAQ,MAAM,UAAU,MAC1C;;;AAIL,QAAO;;;;;;;;;AAUT,eAAe,wBACb,aACA,iBAC2B;CAC3B,MAAM,aAAa,KAAK,KAAK,aAAa,QAAQ,UAAU;CAC5D,MAAM,YAA8B,EAAE;AAEtC,KAAI,CAAC,GAAG,WAAW,WAAW,CAC5B,QAAO;CAGT,MAAM,UAAU,GAAG,YAAY,YAAY,EAAE,eAAe,MAAM,CAAC;AACnE,MAAK,MAAM,SAAS,SAAS;AAC3B,MAAI,CAAC,MAAM,aAAa,CAAE;EAC1B,MAAM,WAAW,qBAAqB,KAAK,KAAK,YAAY,MAAM,KAAK,EAAE,EACvE,iBACD,CAAC;AACF,MAAI,CAAC,SAAU;AAEf,MAAI;GAIF,MAAM,WAAW,2BAHF,MAAM,qBAAqB,SAAS,MAAM,SAAS,MAAM,EACtE,iBACD,CAAC,EACkD,SAAS,KAAK;AAClE,OAAI,SACF,WAAU,KAAK,SAAS;WAEnB,OAAO;AACd,WAAQ,KACN,uCAAuC,SAAS,KAAK,IACrD,iBAAiB,QAAQ,MAAM,UAAU,MAC1C;;;AAIL,QAAO;;;;;;;;;AAUT,eAAe,eACb,KACA,UACA,iBAC6C;CAC7C,MAAM,UAA8C,EAAE;AAEtD,MAAK,MAAM,eAAe,UAAU;EAClC,MAAM,cAAc,KAAK,KAAK,KAAK,gBAAgB,YAAY;AAC/D,MAAI,CAAC,GAAG,WAAW,YAAY,CAC7B;EAMF,MAAM,YAAY,MAAM,wBACtB,aAHA,mBAAmB,gCAAgC,YAAY,CAKhE;AACD,OAAK,MAAM,YAAY,WAAW;AAChC,OAAI,SAAS,OAAQ;AACrB,WAAQ,SAAS,QAAQ;IACvB,MAAM,SAAS;IACf,aAAa,SAAS;IACtB,aAAa,SAAS;IACtB,SAAS;IACT,WAAW,SAAS;IACpB,GAAI,SAAS,kBAAkB,EAC7B,gBAAgB,SAAS,gBAC1B;IACF;;;AAIL,QAAO;;;;;;;;AAST,eAAe,wBACb,KACA,KACA,iBACA,QAAQ,GACqC;CAC7C,MAAM,UAA8C,EAAE;AACtD,KAAI,CAAC,GAAG,WAAW,IAAI,IAAI,SAAS,eAAgB,QAAO;CAE3D,MAAM,UAAU,GAAG,YAAY,KAAK,EAAE,eAAe,MAAM,CAAC;AAC5D,MAAK,MAAM,SAAS,SAAS;AAC3B,MAAI,CAAC,MAAM,aAAa,CAAE;EAE1B,MAAM,YAAY,KAAK,KAAK,KAAK,MAAM,KAAK;EAC5C,MAAM,WAAW,qBAAqB,WAAW,EAAE,iBAAiB,CAAC;AAErE,MAAI,UAAU;GACZ,MAAM,MAAM,KAAK,KAAK,SAAS,KAAK,UAAU;AAC9C,OAAI;IACF,MAAM,cAAc,MAAM,gBACxB,UACA,KACA,gBACD;AACD,QAAI,YAAa,SAAQ,YAAY,MAAM,YAAY;YAChD,OAAO;AACd,YAAQ,KACN,uCAAuC,SAAS,KAAK,IACrD,iBAAiB,QAAQ,MAAM,UAAU,MAC1C;;AAEH;;AAGF,SAAO,OACL,SACA,MAAM,wBAAwB,WAAW,KAAK,iBAAiB,QAAQ,EAAE,CAC1E;;AAGH,QAAO;;;;;;;;;;;;AAaT,eAAe,eACb,KACA,aACA,iBACA,KAC6C;CAC7C,MAAM,UAA8C,EAAE;AAEtD,KAAI,CAAC,GAAG,WAAW,IAAI,CAAE,QAAO;CAEhC,MAAM,UAAU,GAAG,YAAY,KAAK,EAAE,eAAe,MAAM,CAAC;AAC5D,MAAK,MAAM,SAAS,SAAS;AAC3B,MAAI,CAAC,MAAM,aAAa,CAAE;EAE1B,MAAM,YAAY,KAAK,KAAK,KAAK,MAAM,KAAK;EAC5C,MAAM,WAAW,qBAAqB,WAAW,EAAE,iBAAiB,CAAC;AACrE,MAAI,CAAC,SAAU;EAEf,MAAM,MACJ,QAAQ,SAAY,KAAK,KAAK,SAAS,KAAK,UAAU,KAAK;AAE7D,MAAI;GACF,MAAM,cAAc,MAAM,gBAAgB,UAAU,KAAK,gBAAgB;AACzE,OAAI,YAAa,SAAQ,YAAY,MAAM,YAAY;WAChD,OAAO;AACd,WAAQ,KACN,uCAAuC,SAAS,KAAK,IACrD,iBAAiB,QAAQ,MAAM,UAAU,MAC1C;;;AAIL,QAAO;;;;;AAMT,SAAS,cACP,YACA,EAAE,WACF,SACA;CACA,MAAM,mBAA4C;EAChD,SACE;EACF,SAAS;EACT;EACD;AAED,KAAI,QAAQ,OAAO;AACjB,KAAG,cACD,YACA,GAAG,KAAK,UAAU,kBAAkB,MAAM,EAAE,CAAC,IAC9C;AACD,MAAI,CAAC,QAAQ,OACX,SAAQ,IAAI,aAAa,aAAa;YAE/B,CAAC,QAAQ,QAAQ;AAC1B,UAAQ,IAAI,gCAAgC;AAC5C,UAAQ,IAAI,qCAAqC;AACjD,UAAQ,IAAI,WAAW;AACvB,UAAQ,IAAI,IAAI,OAAO,GAAG,CAAC;AAC3B,UAAQ,IAAI,KAAK,UAAU,kBAAkB,MAAM,EAAE,CAAC;AACtD,UAAQ,IAAI,IAAI,OAAO,GAAG,CAAC;;;;;;;;;AAU/B,eAAe,eAAe,SASZ;CAChB,MAAM,MAAM,QAAQ,KAAK;CACzB,MAAM,kBAAkB,QAAQ,QAAQ,gBAAgB;CACxD,MAAM,aAAa,KAAK,QAAQ,KAAK,QAAQ,UAAU,sBAAsB;AAG7E,KAAI,CAAC,kBAAkB,YAAY,IAAI,EAAE;AACvC,UAAQ,MACN,uBAAuB,QAAQ,OAAO,2CACvC;AACD,UAAQ,KAAK,EAAE;;AAGjB,KAAI,CAAC,QAAQ,QAAQ;AACnB,UAAQ,IAAI,mCAAmC;AAC/C,MAAI,gBACF,SAAQ,KACN,oGACD;;CAKL,MAAM,aAAa,eAAe,IAAI;CACtC,IAAI,gBAAgC,EAAE;CACtC,IAAI,+BAAe,IAAI,KAAa;AAEpC,KAAI,YAAY;AACd,MAAI,CAAC,QAAQ,QAAQ;GACnB,MAAM,eAAe,KAAK,SAAS,KAAK,WAAW;AACnD,WAAQ,IAAI,sBAAsB,eAAe;;EAGnD,MAAM,UAAU,GAAG,aAAa,YAAY,QAAQ;EAGpD,MAAM,OADM,MADC,WAAW,SAAS,OAAO,GAAG,KAAK,MAAM,KAAK,YACnC,QAAQ,CACf,MAAM;AAEvB,kBAAgB,aAAa,KAAK;AAClC,iBAAe,kBAAkB,KAAK;YAC7B,CAAC,QAAQ,OAClB,SAAQ,IACN,wCACA,uBAAuB,KAAK,KAAK,CAClC;CAIH,MAAM,aAAa,cAAc,QAC9B,MAAM,CAAC,EAAE,OAAO,WAAW,IAAI,IAAI,CAAC,EAAE,OAAO,WAAW,IAAI,CAC9D;CACD,MAAM,eAAe,cAAc,QAChC,MAAM,EAAE,OAAO,WAAW,IAAI,IAAI,EAAE,OAAO,WAAW,IAAI,CAC5D;CAGD,MAAM,UAA8C,EAAE;AAEtD,KAAI,QAAQ,YAAY;EACtB,MAAM,cAAc,KAAK,QAAQ,KAAK,QAAQ,WAAW;EACzD,MAAM,UAAU,QAAQ,eAAe;AACvC,MAAI,CAAC,QAAQ,OACX,SAAQ,IAAI,+BAA+B,QAAQ,aAAa;AAElE,SAAO,OACL,SACA,MAAM,eAAe,aAAa,SAAS,gBAAgB,CAC5D;QACI;EACL,MAAM,cAAc,IAAI,IAAI,CAC1B,GAAG,uBACH,GAAG,WAAW,KAAK,MAAM,EAAE,OAAO,CACnC,CAAC;AACF,SAAO,OACL,SACA,MAAM,eAAe,KAAK,aAAa,gBAAgB,CACxD;;AAIH,KAAI,cAAc,aAAa,SAAS,GAAG;EAEzC,MAAM,eAAe,MAAM,qBACzB,cAFoB,KAAK,QAAQ,WAAW,EAI5C,KACA,gBACD;AACD,SAAO,OAAO,SAAS,aAAa;;CAKtC,MAAM,kBAA4B,QAAQ,kBACtC,CAAC,QAAQ,gBAAgB,GACzB,+BAA+B,QAAQ,MACrC,GAAG,WAAW,KAAK,KAAK,KAAK,EAAE,CAAC,CACjC;AACL,MAAK,MAAM,OAAO,iBAAiB;EACjC,MAAM,cAAc,KAAK,QAAQ,KAAK,IAAI;AAC1C,MAAI,CAAC,GAAG,WAAW,YAAY,CAAE;AACjC,MAAI,CAAC,QAAQ,OACX,SAAQ,IAAI,qCAAqC,MAAM;EAEzD,MAAM,aAAa,MAAM,wBACvB,aACA,KACA,gBACD;AACD,OAAK,MAAM,CAAC,MAAM,UAAU,OAAO,QAAQ,WAAW,CACpD,KAAI,CAAC,QAAQ,MAAO,SAAQ,QAAQ;;CAIxC,MAAM,cAAc,OAAO,KAAK,QAAQ,CAAC;AAEzC,KAAI,gBAAgB,GAAG;AACrB,MAAI,QAAQ,QAAQ;AAClB,iBAAc,YAAY,EAAE,SAAS,EAAE,EAAE,EAAE,QAAQ;AACnD;;AAEF,UAAQ,IAAI,oBAAoB;AAChC,MAAI,QAAQ,WACV,SAAQ,IACN,kBAAkB,kBAAkB,iCAAiC,gBAAgB,cAAc,QAAQ,aAC5G;MAED,SAAQ,IAAI,kDAAkD;AAEhE,UAAQ,KAAK,EAAE;;CAMjB,MAAM,gBAAgB,aAAa,KAAK,QAAQ,WAAW,GAAG;AAE9D,MAAK,MAAM,OAAO,eAAe;AAC/B,MAAI,CAAC,aAAa,IAAI,IAAI,KAAK,CAAE;EAEjC,MAAM,UAAU,IAAI,OAAO,WAAW,IAAI,IAAI,IAAI,OAAO,WAAW,IAAI;EACxE,IAAI;AAEJ,MAAI,SAAS;GAEX,MAAM,oBAAoB,KAAK,QAAQ,eAAe,IAAI,OAAO;AACjE,YAAS,OAAO,OAAO,QAAQ,CAAC,MAAM,MAAM;AAC1C,QAAI,CAAC,EAAE,QAAQ,WAAW,IAAI,CAAE,QAAO;AAEvC,WAD0B,KAAK,QAAQ,KAAK,EAAE,QAAQ,KAE9B,qBAAqB,EAAE,SAAS,IAAI;KAE5D;QAGF,UAAS,OAAO,OAAO,QAAQ,CAAC,MAC7B,MAAM,EAAE,YAAY,IAAI,UAAU,EAAE,SAAS,IAAI,aACnD;AAGH,MAAI,OACF,QAAO,qBAAqB;;AAKhC,KAAI,QAAQ,gBAAgB;EAC1B,MAAM,gBAAgB,QAAQ,eAC3B,MAAM,IAAI,CACV,KAAK,MAAM,EAAE,MAAM,CAAC,CACpB,OAAO,QAAQ;AAClB,OAAK,MAAM,QAAQ,cACjB,KAAI,QAAQ,MACV,SAAQ,MAAM,qBAAqB;WAC1B,CAAC,QAAQ,OAClB,SAAQ,KACN,0CAA0C,KAAK,qCAChD;;AAKP,KAAI,CAAC,QAAQ,QAAQ;AACnB,UAAQ,IAAI,WAAW,YAAY,aAAa;AAChD,OAAK,MAAM,CAAC,MAAM,aAAa,OAAO,QAAQ,QAAQ,EAAE;GACtD,MAAM,gBACJ,SAAS,UAAU,SAAS,SAAS,SAAS,UAAU,SAAS;GACnE,MAAM,eACJ,gBAAgB,IAAI,KAAK,cAAc,iBAAiB;GAC1D,MAAM,eAAe,SAAS,qBAAqB,iBAAiB;AACpE,WAAQ,IACN,KAAK,SAAS,qBAAqB,MAAM,IAAI,GAAG,SAAS,YAAY,IAAI,KAAK,SAAS,SAAS,UAAU,eAAe,eAC1H;;;AAIL,eAAc,YAAY,EAAE,SAAS,EAAE,QAAQ;;AAWjD,MAAa,qBAAqB,IAAI,QAAQ,OAAO,CAClD,YACC,yEACD,CACA,OAAO,eAAe,0BAA0B,CAChD,OACC,uBACA,oDACD,CACA,OACC,gBACA,+EACD,CACA,OACC,6BACA,qFACD,CACA,OACC,wBACA,6FACD,CACA,OACC,yBACA,0FACD,CACA,OACC,8BACA,iFACD,CACA,OACC,uBACA,wFACD,CACA,QAAQ,SACP,eAAe,KAAK,CAAC,OAAO,QAAQ;AAClC,SAAQ,MAAM,IAAI;AAClB,SAAQ,KAAK,EAAE;EACf,CACH"}
1
+ {"version":3,"file":"sync.js","names":[],"sources":["../../../../../src/cli/commands/plugin/sync/sync.ts"],"sourcesContent":["import fs from \"node:fs\";\nimport path from \"node:path\";\nimport { Lang, parse, type SgNode } from \"@ast-grep/napi\";\nimport { Command } from \"commander\";\nimport {\n loadManifestFromFile,\n type ResolvedManifest,\n resolveManifestInDir,\n} from \"../manifest-resolve\";\nimport type {\n PluginManifest,\n TemplatePlugin,\n TemplatePluginsManifest,\n} from \"../manifest-types\";\nimport { shouldAllowJsManifestForPackage } from \"../trusted-js-manifest\";\nimport {\n formatValidationErrors,\n validateManifest,\n} from \"../validate/validate-manifest\";\n\n/**\n * Checks whether a resolved file path is within a given directory boundary.\n * Uses path.resolve + startsWith to prevent directory traversal.\n *\n * @param filePath - The path to check (will be resolved to absolute)\n * @param boundary - The directory that must contain filePath\n * @returns true if filePath is inside boundary (or equal to it)\n */\nfunction isWithinDirectory(filePath: string, boundary: string): boolean {\n const resolvedPath = path.resolve(filePath);\n const resolvedBoundary = path.resolve(boundary);\n // Append separator to avoid prefix false-positives (e.g. /foo-bar matching /foo)\n return (\n resolvedPath === resolvedBoundary ||\n resolvedPath.startsWith(`${resolvedBoundary}${path.sep}`)\n );\n}\n\n/**\n * Validates a parsed JSON object against the plugin-manifest JSON schema.\n * Returns the manifest if valid, or null and logs schema errors.\n */\nfunction validateManifestWithSchema(\n obj: unknown,\n sourcePath: string,\n): PluginManifest | null {\n const result = validateManifest(obj);\n if (result.valid && result.manifest) return result.manifest;\n if (result.errors?.length) {\n console.warn(\n `Warning: Manifest at ${sourcePath} failed schema validation:\\n${formatValidationErrors(result.errors, obj)}`,\n );\n }\n return null;\n}\n\n/** Safety limit for recursive directory scanning to prevent runaway traversal. */\nconst MAX_SCAN_DEPTH = 5;\n\n/**\n * Load and validate a resolved manifest, returning a TemplatePlugin entry or null.\n * Centralises the resolve → load → validate → build-entry pipeline used by\n * multiple discovery functions.\n */\nasync function loadPluginEntry(\n resolved: ResolvedManifest,\n pkg: string,\n allowJsManifest: boolean,\n): Promise<[string, TemplatePlugin] | null> {\n const parsed = await loadManifestFromFile(resolved.path, resolved.type, {\n allowJsManifest,\n });\n const manifest = validateManifestWithSchema(parsed, resolved.path);\n if (!manifest || manifest.hidden) return null;\n\n return [\n manifest.name,\n {\n name: manifest.name,\n displayName: manifest.displayName,\n description: manifest.description,\n package: pkg,\n resources: manifest.resources,\n ...(manifest.onSetupMessage && {\n onSetupMessage: manifest.onSetupMessage,\n }),\n },\n ];\n}\n\n/**\n * Known packages that may contain AppKit plugins.\n * Always scanned for manifests, even if not imported in the server file.\n */\nconst KNOWN_PLUGIN_PACKAGES = [\"@databricks/appkit\"];\n\n/**\n * Candidate paths for the server entry file, relative to cwd.\n * Checked in order; the first that exists is used.\n */\nconst SERVER_FILE_CANDIDATES = [\"server/server.ts\", \"server/index.ts\"];\n\n/**\n * Conventional directories to scan for local plugin manifests when\n * --local-plugins-dir is not set. Checked in order; each that exists is scanned.\n * Plugins found here are added to the manifest even if not imported in the server.\n */\nconst CONVENTIONAL_LOCAL_PLUGIN_DIRS = [\"plugins\", \"server\"];\n\n/**\n * Find the server entry file by checking candidate paths in order.\n *\n * @param cwd - Current working directory\n * @returns Absolute path to the server file, or null if none found\n */\nfunction findServerFile(cwd: string): string | null {\n for (const candidate of SERVER_FILE_CANDIDATES) {\n const fullPath = path.join(cwd, candidate);\n if (fs.existsSync(fullPath)) {\n return fullPath;\n }\n }\n return null;\n}\n\n/**\n * Represents a single named import extracted from the server file.\n */\ninterface ParsedImport {\n /** The imported name (or local alias if renamed) */\n name: string;\n /** The original exported name (differs from name when using `import { foo as bar }`) */\n originalName: string;\n /** The module specifier (package name or relative path) */\n source: string;\n}\n\n/**\n * Extract all named imports from the AST root using structural node traversal.\n * Handles single/double quotes, multiline imports, and aliased imports.\n *\n * @param root - AST root node\n * @returns Array of parsed imports with name, original name, and source\n */\nfunction parseImports(root: SgNode): ParsedImport[] {\n const imports: ParsedImport[] = [];\n\n // Find all import_statement nodes in the AST\n const importStatements = root.findAll({\n rule: { kind: \"import_statement\" },\n });\n\n for (const stmt of importStatements) {\n // Extract the module specifier (the string node, e.g. '@databricks/appkit')\n const sourceNode = stmt.find({ rule: { kind: \"string\" } });\n if (!sourceNode) continue;\n\n // Strip surrounding quotes from the string node text\n const source = sourceNode.text().replace(/^['\"]|['\"]$/g, \"\");\n\n // Find named_imports block: { createApp, analytics, server }\n const namedImports = stmt.find({ rule: { kind: \"named_imports\" } });\n if (!namedImports) continue;\n\n // Extract each import_specifier\n const specifiers = namedImports.findAll({\n rule: { kind: \"import_specifier\" },\n });\n\n for (const specifier of specifiers) {\n const children = specifier.children();\n if (children.length >= 3) {\n // Aliased import: `foo as bar` — children are [name, \"as\", alias]\n const originalName = children[0].text();\n const localName = children[children.length - 1].text();\n imports.push({ name: localName, originalName, source });\n } else {\n // Simple import: `foo`\n const name = specifier.text();\n imports.push({ name, originalName: name, source });\n }\n }\n }\n\n return imports;\n}\n\n/**\n * Extract local names of plugins actually used in the `plugins: [...]` array\n * passed to `createApp()`. Uses structural AST traversal to find `pair` nodes\n * with key \"plugins\" and array values containing call expressions.\n *\n * @param root - AST root node\n * @returns Set of local variable names used as plugin calls in the plugins array\n */\nfunction parsePluginUsages(root: SgNode): Set<string> {\n const usedNames = new Set<string>();\n\n // Find all property pairs in the AST\n const pairs = root.findAll({ rule: { kind: \"pair\" } });\n\n for (const pair of pairs) {\n // Check if the property key is \"plugins\"\n const key = pair.find({ rule: { kind: \"property_identifier\" } });\n if (!key || key.text() !== \"plugins\") continue;\n\n // Find the array value\n const arrayNode = pair.find({ rule: { kind: \"array\" } });\n if (!arrayNode) continue;\n\n // Iterate direct children of the array to find call expressions\n for (const child of arrayNode.children()) {\n if (child.kind() === \"call_expression\") {\n // The callee is the first child (the identifier being called)\n const callee = child.children()[0];\n if (callee?.kind() === \"identifier\") {\n usedNames.add(callee.text());\n }\n }\n }\n }\n\n return usedNames;\n}\n\n/**\n * File extensions to try when resolving a relative import to a file path.\n */\nconst RESOLVE_EXTENSIONS = [\".ts\", \".tsx\", \".js\", \".jsx\"];\n\n/**\n * Resolve a relative import source to the plugin directory containing a manifest\n * (manifest.json or manifest.js). Follows the convention that plugins live in\n * their own directory with a manifest file.\n *\n * Resolution strategy:\n * 1. If the import path is a directory, look for manifest.json/js in it\n * 2. If the import path + extension is a file, look for manifest in its parent directory\n * 3. If the import path is a directory with an index file, look for manifest in that directory\n *\n * @param importSource - The relative import specifier (e.g. \"./plugins/my-plugin\")\n * @param serverFileDir - Absolute path to the directory containing the server file\n * @returns Resolved manifest file path and type, or null if not found\n */\nfunction resolveLocalManifest(\n importSource: string,\n serverFileDir: string,\n allowJsManifest: boolean,\n projectRoot?: string,\n): ResolvedManifest | null {\n const resolved = path.resolve(serverFileDir, importSource);\n\n // Security: Reject paths that escape the project root\n const boundary = projectRoot || serverFileDir;\n if (!isWithinDirectory(resolved, boundary)) {\n console.warn(\n `Warning: Skipping import \"${importSource}\" — resolves outside the project directory`,\n );\n return null;\n }\n\n // Case 1: Import path is a directory\n if (fs.existsSync(resolved) && fs.statSync(resolved).isDirectory()) {\n return resolveManifestInDir(resolved, { allowJsManifest });\n }\n\n // Case 2: Import path + extension resolves to a file — manifest in parent dir\n for (const ext of RESOLVE_EXTENSIONS) {\n const filePath = `${resolved}${ext}`;\n if (fs.existsSync(filePath) && fs.statSync(filePath).isFile()) {\n const dir = path.dirname(filePath);\n if (!isWithinDirectory(dir, boundary)) return null;\n return resolveManifestInDir(dir, { allowJsManifest });\n }\n }\n\n // Case 3: Import path is a directory with an index file\n for (const ext of RESOLVE_EXTENSIONS) {\n const indexPath = path.join(resolved, `index${ext}`);\n if (fs.existsSync(indexPath)) {\n return resolveManifestInDir(resolved, { allowJsManifest });\n }\n }\n\n return null;\n}\n\n/**\n * Discover plugin manifests from local (relative) imports in the server file.\n * Resolves each relative import to a directory and loads manifest.json or manifest.js.\n *\n * @param relativeImports - Parsed imports with relative sources (starting with . or /)\n * @param serverFileDir - Absolute path to the directory containing the server file\n * @param cwd - Current working directory (for computing relative paths in output)\n * @returns Map of plugin name to template plugin entry for local plugins\n */\nasync function discoverLocalPlugins(\n relativeImports: ParsedImport[],\n serverFileDir: string,\n cwd: string,\n allowJsManifest: boolean,\n): Promise<TemplatePluginsManifest[\"plugins\"]> {\n const plugins: TemplatePluginsManifest[\"plugins\"] = {};\n\n for (const imp of relativeImports) {\n const resolved = resolveLocalManifest(\n imp.source,\n serverFileDir,\n allowJsManifest,\n cwd,\n );\n if (!resolved) continue;\n\n try {\n const relativePath = path.relative(cwd, path.dirname(resolved.path));\n const entry = await loadPluginEntry(\n resolved,\n `./${relativePath}`,\n allowJsManifest,\n );\n if (entry) plugins[entry[0]] = entry[1];\n } catch (error) {\n console.warn(\n `Warning: Failed to load manifest at ${resolved.path}:`,\n error instanceof Error ? error.message : error,\n );\n }\n }\n\n return plugins;\n}\n\n/**\n * Discover plugin manifests from a package's dist folder.\n * Looks for manifest.json or manifest.js in dist/plugins/{plugin-name}/ directories.\n *\n * @param packagePath - Path to the package in node_modules\n * @returns Array of plugin manifests found in the package\n */\nasync function discoverPluginManifests(\n packagePath: string,\n allowJsManifest: boolean,\n): Promise<PluginManifest[]> {\n const pluginsDir = path.join(packagePath, \"dist\", \"plugins\");\n const manifests: PluginManifest[] = [];\n\n if (!fs.existsSync(pluginsDir)) {\n return manifests;\n }\n\n const entries = fs.readdirSync(pluginsDir, { withFileTypes: true });\n for (const entry of entries) {\n if (!entry.isDirectory()) continue;\n const resolved = resolveManifestInDir(path.join(pluginsDir, entry.name), {\n allowJsManifest,\n });\n if (!resolved) continue;\n\n try {\n const parsed = await loadManifestFromFile(resolved.path, resolved.type, {\n allowJsManifest,\n });\n const manifest = validateManifestWithSchema(parsed, resolved.path);\n if (manifest) {\n manifests.push(manifest);\n }\n } catch (error) {\n console.warn(\n `Warning: Failed to load manifest at ${resolved.path}:`,\n error instanceof Error ? error.message : error,\n );\n }\n }\n\n return manifests;\n}\n\n/**\n * Scan node_modules for packages with plugin manifests.\n *\n * @param cwd - Current working directory to search from\n * @param packages - Set of npm package names to scan for plugin manifests\n * @returns Map of plugin name to template plugin entry\n */\nasync function scanForPlugins(\n cwd: string,\n packages: Iterable<string>,\n allowJsManifest: boolean,\n): Promise<TemplatePluginsManifest[\"plugins\"]> {\n const plugins: TemplatePluginsManifest[\"plugins\"] = {};\n\n for (const packageName of packages) {\n const packagePath = path.join(cwd, \"node_modules\", packageName);\n if (!fs.existsSync(packagePath)) {\n continue;\n }\n\n const allowJsForPackage =\n allowJsManifest || shouldAllowJsManifestForPackage(packageName);\n\n const manifests = await discoverPluginManifests(\n packagePath,\n allowJsForPackage,\n );\n for (const manifest of manifests) {\n if (manifest.hidden) continue;\n plugins[manifest.name] = {\n name: manifest.name,\n displayName: manifest.displayName,\n description: manifest.description,\n package: packageName,\n resources: manifest.resources,\n ...(manifest.onSetupMessage && {\n onSetupMessage: manifest.onSetupMessage,\n }),\n } satisfies TemplatePlugin;\n }\n }\n\n return plugins;\n}\n\n/**\n * Recursively scan a directory for plugin manifests. Any directory that\n * contains manifest.json or manifest.js is treated as a plugin root; we do\n * not descend into that directory's children. Used for local plugins discovery\n * so nested paths like server/plugins/category/my-plugin are found.\n */\nasync function scanPluginsDirRecursive(\n dir: string,\n cwd: string,\n allowJsManifest: boolean,\n depth = 0,\n): Promise<TemplatePluginsManifest[\"plugins\"]> {\n const plugins: TemplatePluginsManifest[\"plugins\"] = {};\n if (!fs.existsSync(dir) || depth >= MAX_SCAN_DEPTH) return plugins;\n\n const entries = fs.readdirSync(dir, { withFileTypes: true });\n for (const entry of entries) {\n if (!entry.isDirectory()) continue;\n\n const pluginDir = path.join(dir, entry.name);\n const resolved = resolveManifestInDir(pluginDir, { allowJsManifest });\n\n if (resolved) {\n const pkg = `./${path.relative(cwd, pluginDir)}`;\n try {\n const pluginEntry = await loadPluginEntry(\n resolved,\n pkg,\n allowJsManifest,\n );\n if (pluginEntry) plugins[pluginEntry[0]] = pluginEntry[1];\n } catch (error) {\n console.warn(\n `Warning: Failed to load manifest at ${resolved.path}:`,\n error instanceof Error ? error.message : error,\n );\n }\n continue;\n }\n\n Object.assign(\n plugins,\n await scanPluginsDirRecursive(pluginDir, cwd, allowJsManifest, depth + 1),\n );\n }\n\n return plugins;\n}\n\n/**\n * Scan a directory for plugin manifests in direct subdirectories only.\n * Each subdirectory may contain manifest.json or manifest.js.\n * Used with --plugins-dir to discover plugins from source instead of node_modules.\n *\n * @param dir - Absolute path to the directory containing plugin subdirectories\n * @param packageName - Package name to assign to discovered plugins (used when cwd is not set)\n * @param cwd - When set, each plugin's package is set to ./<path from cwd to plugin subdir>, e.g. ./server/my-plugin\n * @returns Map of plugin name to template plugin entry\n */\nasync function scanPluginsDir(\n dir: string,\n packageName: string,\n allowJsManifest: boolean,\n cwd?: string,\n): Promise<TemplatePluginsManifest[\"plugins\"]> {\n const plugins: TemplatePluginsManifest[\"plugins\"] = {};\n\n if (!fs.existsSync(dir)) return plugins;\n\n const entries = fs.readdirSync(dir, { withFileTypes: true });\n for (const entry of entries) {\n if (!entry.isDirectory()) continue;\n\n const pluginDir = path.join(dir, entry.name);\n const resolved = resolveManifestInDir(pluginDir, { allowJsManifest });\n if (!resolved) continue;\n\n const pkg =\n cwd !== undefined ? `./${path.relative(cwd, pluginDir)}` : packageName;\n\n try {\n const pluginEntry = await loadPluginEntry(resolved, pkg, allowJsManifest);\n if (pluginEntry) plugins[pluginEntry[0]] = pluginEntry[1];\n } catch (error) {\n console.warn(\n `Warning: Failed to load manifest at ${resolved.path}:`,\n error instanceof Error ? error.message : error,\n );\n }\n }\n\n return plugins;\n}\n\n/**\n * Write (or preview) the template plugins manifest to disk.\n */\nfunction writeManifest(\n outputPath: string,\n { plugins }: { plugins: TemplatePluginsManifest[\"plugins\"] },\n options: { write?: boolean; silent?: boolean; json?: boolean },\n) {\n const templateManifest: TemplatePluginsManifest = {\n $schema:\n \"https://databricks.github.io/appkit/schemas/template-plugins.schema.json\",\n version: \"1.0\",\n plugins,\n };\n\n const serialized = JSON.stringify(templateManifest, null, 2);\n\n if (options.json) {\n console.log(serialized);\n }\n\n if (options.write) {\n fs.writeFileSync(outputPath, `${serialized}\\n`);\n if (!options.silent && !options.json) {\n console.log(`\\n✓ Wrote ${outputPath}`);\n }\n } else if (!options.silent && !options.json) {\n console.log(\"\\nTo write the manifest, run:\");\n console.log(\" npx appkit plugin sync --write\\n\");\n console.log(\"Preview:\");\n console.log(\"─\".repeat(60));\n console.log(serialized);\n console.log(\"─\".repeat(60));\n }\n}\n\n/**\n * Run the plugin sync command.\n * Parses the server entry file to discover which packages to scan for plugin\n * manifests, then marks plugins that are actually used in the `plugins: [...]`\n * array as requiredByTemplate.\n */\nasync function runPluginsSync(options: {\n write?: boolean;\n output?: string;\n silent?: boolean;\n json?: boolean;\n requirePlugins?: string;\n pluginsDir?: string;\n packageName?: string;\n localPluginsDir?: string;\n allowJsManifest?: boolean;\n}): Promise<void> {\n const cwd = process.cwd();\n const allowJsManifest = Boolean(options.allowJsManifest);\n const outputPath = path.resolve(cwd, options.output || \"appkit.plugins.json\");\n\n // Security: Reject output paths that escape the project root\n if (!isWithinDirectory(outputPath, cwd)) {\n console.error(\n `Error: Output path \"${options.output}\" resolves outside the project directory.`,\n );\n process.exit(1);\n }\n\n if (!options.silent && !options.json) {\n console.log(\"Scanning for AppKit plugins...\\n\");\n if (allowJsManifest) {\n console.warn(\n \"Warning: --allow-js-manifest executes manifest.js/manifest.cjs files. Only use with trusted code.\",\n );\n }\n }\n\n // Step 1: Parse server file to discover imports and plugin usages\n const serverFile = findServerFile(cwd);\n let serverImports: ParsedImport[] = [];\n let pluginUsages = new Set<string>();\n\n if (serverFile) {\n if (!options.silent && !options.json) {\n const relativePath = path.relative(cwd, serverFile);\n console.log(`Server entry file: ${relativePath}`);\n }\n\n const content = fs.readFileSync(serverFile, \"utf-8\");\n const lang = serverFile.endsWith(\".tsx\") ? Lang.Tsx : Lang.TypeScript;\n const ast = parse(lang, content);\n const root = ast.root();\n\n serverImports = parseImports(root);\n pluginUsages = parsePluginUsages(root);\n } else if (!options.silent && !options.json) {\n console.log(\n \"No server entry file found. Checked:\",\n SERVER_FILE_CANDIDATES.join(\", \"),\n );\n }\n\n // Step 2: Split imports into npm packages and local (relative) imports\n const npmImports = serverImports.filter(\n (i) => !i.source.startsWith(\".\") && !i.source.startsWith(\"/\"),\n );\n const localImports = serverImports.filter(\n (i) => i.source.startsWith(\".\") || i.source.startsWith(\"/\"),\n );\n\n // Step 3: Scan for plugin manifests (--plugins-dir or node_modules)\n const plugins: TemplatePluginsManifest[\"plugins\"] = {};\n\n if (options.pluginsDir) {\n const resolvedDir = path.resolve(cwd, options.pluginsDir);\n const pkgName = options.packageName ?? \"@databricks/appkit\";\n if (!options.silent && !options.json) {\n console.log(`Scanning plugins directory: ${options.pluginsDir}`);\n }\n Object.assign(\n plugins,\n await scanPluginsDir(resolvedDir, pkgName, allowJsManifest),\n );\n } else {\n const npmPackages = new Set([\n ...KNOWN_PLUGIN_PACKAGES,\n ...npmImports.map((i) => i.source),\n ]);\n Object.assign(\n plugins,\n await scanForPlugins(cwd, npmPackages, allowJsManifest),\n );\n }\n\n // Step 4: Discover local plugin manifests from relative imports\n if (serverFile && localImports.length > 0) {\n const serverFileDir = path.dirname(serverFile);\n const localPlugins = await discoverLocalPlugins(\n localImports,\n serverFileDir,\n cwd,\n allowJsManifest,\n );\n Object.assign(plugins, localPlugins);\n }\n\n // Step 4b: Discover local plugins from conventional directory (or --local-plugins-dir).\n // These are included even when not imported in the server.\n const localDirsToScan: string[] = options.localPluginsDir\n ? [options.localPluginsDir]\n : CONVENTIONAL_LOCAL_PLUGIN_DIRS.filter((d) =>\n fs.existsSync(path.join(cwd, d)),\n );\n for (const dir of localDirsToScan) {\n const resolvedDir = path.resolve(cwd, dir);\n if (!fs.existsSync(resolvedDir)) continue;\n if (!options.silent && !options.json) {\n console.log(`Scanning local plugins directory: ${dir}`);\n }\n const discovered = await scanPluginsDirRecursive(\n resolvedDir,\n cwd,\n allowJsManifest,\n );\n for (const [name, entry] of Object.entries(discovered)) {\n if (!plugins[name]) plugins[name] = entry;\n }\n }\n\n const pluginCount = Object.keys(plugins).length;\n\n if (pluginCount === 0) {\n if (options.silent || options.json) {\n writeManifest(outputPath, { plugins: {} }, options);\n if (options.silent) return;\n process.exit(1);\n }\n console.log(\"No plugins found.\");\n if (options.pluginsDir) {\n console.log(\n `\\nNo manifest (${allowJsManifest ? \"manifest.json or manifest.js\" : \"manifest.json\"}) found in: ${options.pluginsDir}`,\n );\n } else {\n console.log(\n \"\\nMake sure you have plugin packages installed, or specify a directory:\",\n );\n console.log(\" appkit plugin sync --plugins-dir <path>\");\n }\n process.exit(1);\n }\n\n // Step 5: Mark plugins that are imported AND used in the plugins array as mandatory.\n // For npm imports, match by package name + plugin name.\n // For local imports, resolve both paths to absolute and compare.\n const serverFileDir = serverFile ? path.dirname(serverFile) : cwd;\n\n for (const imp of serverImports) {\n if (!pluginUsages.has(imp.name)) continue;\n\n const isLocal = imp.source.startsWith(\".\") || imp.source.startsWith(\"/\");\n let plugin: TemplatePlugin | undefined;\n\n if (isLocal) {\n // Resolve the import source to an absolute path from the server file directory\n const resolvedImportDir = path.resolve(serverFileDir, imp.source);\n plugin = Object.values(plugins).find((p) => {\n if (!p.package.startsWith(\".\")) return false;\n const resolvedPluginDir = path.resolve(cwd, p.package);\n return (\n resolvedPluginDir === resolvedImportDir && p.name === imp.originalName\n );\n });\n } else {\n // npm import: direct string comparison\n plugin = Object.values(plugins).find(\n (p) => p.package === imp.source && p.name === imp.originalName,\n );\n }\n\n if (plugin) {\n plugin.requiredByTemplate = true;\n }\n }\n\n // Step 6: Apply explicit --require-plugins overrides\n if (options.requirePlugins) {\n const explicitNames = options.requirePlugins\n .split(\",\")\n .map((s) => s.trim())\n .filter(Boolean);\n for (const name of explicitNames) {\n if (plugins[name]) {\n plugins[name].requiredByTemplate = true;\n } else if (!options.silent) {\n console.warn(\n `Warning: --require-plugins referenced \"${name}\" but no such plugin was discovered`,\n );\n }\n }\n }\n\n if (!options.silent && !options.json) {\n console.log(`\\nFound ${pluginCount} plugin(s):`);\n for (const [name, manifest] of Object.entries(plugins)) {\n const resourceCount =\n manifest.resources.required.length + manifest.resources.optional.length;\n const resourceInfo =\n resourceCount > 0 ? ` [${resourceCount} resource(s)]` : \"\";\n const mandatoryTag = manifest.requiredByTemplate ? \" (mandatory)\" : \"\";\n console.log(\n ` ${manifest.requiredByTemplate ? \"●\" : \"○\"} ${manifest.displayName} (${name}) from ${manifest.package}${resourceInfo}${mandatoryTag}`,\n );\n }\n }\n\n writeManifest(outputPath, { plugins }, options);\n}\n\n/** Exported for testing: path boundary check, AST parsing, trust checks. */\nexport {\n isWithinDirectory,\n parseImports,\n parsePluginUsages,\n shouldAllowJsManifestForPackage,\n};\n\nexport const pluginsSyncCommand = new Command(\"sync\")\n .description(\n \"Sync plugin manifests from installed packages into appkit.plugins.json\",\n )\n .option(\"-w, --write\", \"Write the manifest file\")\n .option(\n \"-o, --output <path>\",\n \"Output file path (default: ./appkit.plugins.json)\",\n )\n .option(\n \"-s, --silent\",\n \"Suppress output and never exit with error (for use in predev/prebuild hooks)\",\n )\n .option(\n \"--require-plugins <names>\",\n \"Comma-separated plugin names to mark as requiredByTemplate (e.g. server,analytics)\",\n )\n .option(\n \"--plugins-dir <path>\",\n \"Scan this directory for plugin subdirectories with manifest.json (instead of node_modules)\",\n )\n .option(\n \"--package-name <name>\",\n \"Package name to assign to plugins found via --plugins-dir (default: @databricks/appkit)\",\n )\n .option(\n \"--local-plugins-dir <path>\",\n \"Also scan this directory for local plugin manifests (default: plugins, server)\",\n )\n .option(\n \"--allow-js-manifest\",\n \"Allow reading manifest.js/manifest.cjs (executes code; use only with trusted plugins)\",\n )\n .option(\"--json\", \"Output manifest as JSON to stdout\")\n .addHelpText(\n \"after\",\n `\nExamples:\n $ appkit plugin sync\n $ appkit plugin sync --write\n $ appkit plugin sync --write --require-plugins server,analytics\n $ appkit plugin sync --write --plugins-dir src/plugins --package-name @my/pkg\n $ appkit plugin sync --json\n $ appkit plugin sync --silent`,\n )\n .action((opts) =>\n runPluginsSync(opts).catch((err) => {\n console.error(err);\n process.exit(1);\n }),\n );\n"],"mappings":";;;;;;;;;;;;;;;;;AA4BA,SAAS,kBAAkB,UAAkB,UAA2B;CACtE,MAAM,eAAe,KAAK,QAAQ,SAAS;CAC3C,MAAM,mBAAmB,KAAK,QAAQ,SAAS;AAE/C,QACE,iBAAiB,oBACjB,aAAa,WAAW,GAAG,mBAAmB,KAAK,MAAM;;;;;;AAQ7D,SAAS,2BACP,KACA,YACuB;CACvB,MAAM,SAAS,iBAAiB,IAAI;AACpC,KAAI,OAAO,SAAS,OAAO,SAAU,QAAO,OAAO;AACnD,KAAI,OAAO,QAAQ,OACjB,SAAQ,KACN,wBAAwB,WAAW,8BAA8B,uBAAuB,OAAO,QAAQ,IAAI,GAC5G;AAEH,QAAO;;;AAIT,MAAM,iBAAiB;;;;;;AAOvB,eAAe,gBACb,UACA,KACA,iBAC0C;CAI1C,MAAM,WAAW,2BAHF,MAAM,qBAAqB,SAAS,MAAM,SAAS,MAAM,EACtE,iBACD,CAAC,EACkD,SAAS,KAAK;AAClE,KAAI,CAAC,YAAY,SAAS,OAAQ,QAAO;AAEzC,QAAO,CACL,SAAS,MACT;EACE,MAAM,SAAS;EACf,aAAa,SAAS;EACtB,aAAa,SAAS;EACtB,SAAS;EACT,WAAW,SAAS;EACpB,GAAI,SAAS,kBAAkB,EAC7B,gBAAgB,SAAS,gBAC1B;EACF,CACF;;;;;;AAOH,MAAM,wBAAwB,CAAC,qBAAqB;;;;;AAMpD,MAAM,yBAAyB,CAAC,oBAAoB,kBAAkB;;;;;;AAOtE,MAAM,iCAAiC,CAAC,WAAW,SAAS;;;;;;;AAQ5D,SAAS,eAAe,KAA4B;AAClD,MAAK,MAAM,aAAa,wBAAwB;EAC9C,MAAM,WAAW,KAAK,KAAK,KAAK,UAAU;AAC1C,MAAI,GAAG,WAAW,SAAS,CACzB,QAAO;;AAGX,QAAO;;;;;;;;;AAsBT,SAAS,aAAa,MAA8B;CAClD,MAAM,UAA0B,EAAE;CAGlC,MAAM,mBAAmB,KAAK,QAAQ,EACpC,MAAM,EAAE,MAAM,oBAAoB,EACnC,CAAC;AAEF,MAAK,MAAM,QAAQ,kBAAkB;EAEnC,MAAM,aAAa,KAAK,KAAK,EAAE,MAAM,EAAE,MAAM,UAAU,EAAE,CAAC;AAC1D,MAAI,CAAC,WAAY;EAGjB,MAAM,SAAS,WAAW,MAAM,CAAC,QAAQ,gBAAgB,GAAG;EAG5D,MAAM,eAAe,KAAK,KAAK,EAAE,MAAM,EAAE,MAAM,iBAAiB,EAAE,CAAC;AACnE,MAAI,CAAC,aAAc;EAGnB,MAAM,aAAa,aAAa,QAAQ,EACtC,MAAM,EAAE,MAAM,oBAAoB,EACnC,CAAC;AAEF,OAAK,MAAM,aAAa,YAAY;GAClC,MAAM,WAAW,UAAU,UAAU;AACrC,OAAI,SAAS,UAAU,GAAG;IAExB,MAAM,eAAe,SAAS,GAAG,MAAM;IACvC,MAAM,YAAY,SAAS,SAAS,SAAS,GAAG,MAAM;AACtD,YAAQ,KAAK;KAAE,MAAM;KAAW;KAAc;KAAQ,CAAC;UAClD;IAEL,MAAM,OAAO,UAAU,MAAM;AAC7B,YAAQ,KAAK;KAAE;KAAM,cAAc;KAAM;KAAQ,CAAC;;;;AAKxD,QAAO;;;;;;;;;;AAWT,SAAS,kBAAkB,MAA2B;CACpD,MAAM,4BAAY,IAAI,KAAa;CAGnC,MAAM,QAAQ,KAAK,QAAQ,EAAE,MAAM,EAAE,MAAM,QAAQ,EAAE,CAAC;AAEtD,MAAK,MAAM,QAAQ,OAAO;EAExB,MAAM,MAAM,KAAK,KAAK,EAAE,MAAM,EAAE,MAAM,uBAAuB,EAAE,CAAC;AAChE,MAAI,CAAC,OAAO,IAAI,MAAM,KAAK,UAAW;EAGtC,MAAM,YAAY,KAAK,KAAK,EAAE,MAAM,EAAE,MAAM,SAAS,EAAE,CAAC;AACxD,MAAI,CAAC,UAAW;AAGhB,OAAK,MAAM,SAAS,UAAU,UAAU,CACtC,KAAI,MAAM,MAAM,KAAK,mBAAmB;GAEtC,MAAM,SAAS,MAAM,UAAU,CAAC;AAChC,OAAI,QAAQ,MAAM,KAAK,aACrB,WAAU,IAAI,OAAO,MAAM,CAAC;;;AAMpC,QAAO;;;;;AAMT,MAAM,qBAAqB;CAAC;CAAO;CAAQ;CAAO;CAAO;;;;;;;;;;;;;;;AAgBzD,SAAS,qBACP,cACA,eACA,iBACA,aACyB;CACzB,MAAM,WAAW,KAAK,QAAQ,eAAe,aAAa;CAG1D,MAAM,WAAW,eAAe;AAChC,KAAI,CAAC,kBAAkB,UAAU,SAAS,EAAE;AAC1C,UAAQ,KACN,6BAA6B,aAAa,4CAC3C;AACD,SAAO;;AAIT,KAAI,GAAG,WAAW,SAAS,IAAI,GAAG,SAAS,SAAS,CAAC,aAAa,CAChE,QAAO,qBAAqB,UAAU,EAAE,iBAAiB,CAAC;AAI5D,MAAK,MAAM,OAAO,oBAAoB;EACpC,MAAM,WAAW,GAAG,WAAW;AAC/B,MAAI,GAAG,WAAW,SAAS,IAAI,GAAG,SAAS,SAAS,CAAC,QAAQ,EAAE;GAC7D,MAAM,MAAM,KAAK,QAAQ,SAAS;AAClC,OAAI,CAAC,kBAAkB,KAAK,SAAS,CAAE,QAAO;AAC9C,UAAO,qBAAqB,KAAK,EAAE,iBAAiB,CAAC;;;AAKzD,MAAK,MAAM,OAAO,oBAAoB;EACpC,MAAM,YAAY,KAAK,KAAK,UAAU,QAAQ,MAAM;AACpD,MAAI,GAAG,WAAW,UAAU,CAC1B,QAAO,qBAAqB,UAAU,EAAE,iBAAiB,CAAC;;AAI9D,QAAO;;;;;;;;;;;AAYT,eAAe,qBACb,iBACA,eACA,KACA,iBAC6C;CAC7C,MAAM,UAA8C,EAAE;AAEtD,MAAK,MAAM,OAAO,iBAAiB;EACjC,MAAM,WAAW,qBACf,IAAI,QACJ,eACA,iBACA,IACD;AACD,MAAI,CAAC,SAAU;AAEf,MAAI;GAEF,MAAM,QAAQ,MAAM,gBAClB,UACA,KAHmB,KAAK,SAAS,KAAK,KAAK,QAAQ,SAAS,KAAK,CAAC,IAIlE,gBACD;AACD,OAAI,MAAO,SAAQ,MAAM,MAAM,MAAM;WAC9B,OAAO;AACd,WAAQ,KACN,uCAAuC,SAAS,KAAK,IACrD,iBAAiB,QAAQ,MAAM,UAAU,MAC1C;;;AAIL,QAAO;;;;;;;;;AAUT,eAAe,wBACb,aACA,iBAC2B;CAC3B,MAAM,aAAa,KAAK,KAAK,aAAa,QAAQ,UAAU;CAC5D,MAAM,YAA8B,EAAE;AAEtC,KAAI,CAAC,GAAG,WAAW,WAAW,CAC5B,QAAO;CAGT,MAAM,UAAU,GAAG,YAAY,YAAY,EAAE,eAAe,MAAM,CAAC;AACnE,MAAK,MAAM,SAAS,SAAS;AAC3B,MAAI,CAAC,MAAM,aAAa,CAAE;EAC1B,MAAM,WAAW,qBAAqB,KAAK,KAAK,YAAY,MAAM,KAAK,EAAE,EACvE,iBACD,CAAC;AACF,MAAI,CAAC,SAAU;AAEf,MAAI;GAIF,MAAM,WAAW,2BAHF,MAAM,qBAAqB,SAAS,MAAM,SAAS,MAAM,EACtE,iBACD,CAAC,EACkD,SAAS,KAAK;AAClE,OAAI,SACF,WAAU,KAAK,SAAS;WAEnB,OAAO;AACd,WAAQ,KACN,uCAAuC,SAAS,KAAK,IACrD,iBAAiB,QAAQ,MAAM,UAAU,MAC1C;;;AAIL,QAAO;;;;;;;;;AAUT,eAAe,eACb,KACA,UACA,iBAC6C;CAC7C,MAAM,UAA8C,EAAE;AAEtD,MAAK,MAAM,eAAe,UAAU;EAClC,MAAM,cAAc,KAAK,KAAK,KAAK,gBAAgB,YAAY;AAC/D,MAAI,CAAC,GAAG,WAAW,YAAY,CAC7B;EAMF,MAAM,YAAY,MAAM,wBACtB,aAHA,mBAAmB,gCAAgC,YAAY,CAKhE;AACD,OAAK,MAAM,YAAY,WAAW;AAChC,OAAI,SAAS,OAAQ;AACrB,WAAQ,SAAS,QAAQ;IACvB,MAAM,SAAS;IACf,aAAa,SAAS;IACtB,aAAa,SAAS;IACtB,SAAS;IACT,WAAW,SAAS;IACpB,GAAI,SAAS,kBAAkB,EAC7B,gBAAgB,SAAS,gBAC1B;IACF;;;AAIL,QAAO;;;;;;;;AAST,eAAe,wBACb,KACA,KACA,iBACA,QAAQ,GACqC;CAC7C,MAAM,UAA8C,EAAE;AACtD,KAAI,CAAC,GAAG,WAAW,IAAI,IAAI,SAAS,eAAgB,QAAO;CAE3D,MAAM,UAAU,GAAG,YAAY,KAAK,EAAE,eAAe,MAAM,CAAC;AAC5D,MAAK,MAAM,SAAS,SAAS;AAC3B,MAAI,CAAC,MAAM,aAAa,CAAE;EAE1B,MAAM,YAAY,KAAK,KAAK,KAAK,MAAM,KAAK;EAC5C,MAAM,WAAW,qBAAqB,WAAW,EAAE,iBAAiB,CAAC;AAErE,MAAI,UAAU;GACZ,MAAM,MAAM,KAAK,KAAK,SAAS,KAAK,UAAU;AAC9C,OAAI;IACF,MAAM,cAAc,MAAM,gBACxB,UACA,KACA,gBACD;AACD,QAAI,YAAa,SAAQ,YAAY,MAAM,YAAY;YAChD,OAAO;AACd,YAAQ,KACN,uCAAuC,SAAS,KAAK,IACrD,iBAAiB,QAAQ,MAAM,UAAU,MAC1C;;AAEH;;AAGF,SAAO,OACL,SACA,MAAM,wBAAwB,WAAW,KAAK,iBAAiB,QAAQ,EAAE,CAC1E;;AAGH,QAAO;;;;;;;;;;;;AAaT,eAAe,eACb,KACA,aACA,iBACA,KAC6C;CAC7C,MAAM,UAA8C,EAAE;AAEtD,KAAI,CAAC,GAAG,WAAW,IAAI,CAAE,QAAO;CAEhC,MAAM,UAAU,GAAG,YAAY,KAAK,EAAE,eAAe,MAAM,CAAC;AAC5D,MAAK,MAAM,SAAS,SAAS;AAC3B,MAAI,CAAC,MAAM,aAAa,CAAE;EAE1B,MAAM,YAAY,KAAK,KAAK,KAAK,MAAM,KAAK;EAC5C,MAAM,WAAW,qBAAqB,WAAW,EAAE,iBAAiB,CAAC;AACrE,MAAI,CAAC,SAAU;EAEf,MAAM,MACJ,QAAQ,SAAY,KAAK,KAAK,SAAS,KAAK,UAAU,KAAK;AAE7D,MAAI;GACF,MAAM,cAAc,MAAM,gBAAgB,UAAU,KAAK,gBAAgB;AACzE,OAAI,YAAa,SAAQ,YAAY,MAAM,YAAY;WAChD,OAAO;AACd,WAAQ,KACN,uCAAuC,SAAS,KAAK,IACrD,iBAAiB,QAAQ,MAAM,UAAU,MAC1C;;;AAIL,QAAO;;;;;AAMT,SAAS,cACP,YACA,EAAE,WACF,SACA;CACA,MAAM,mBAA4C;EAChD,SACE;EACF,SAAS;EACT;EACD;CAED,MAAM,aAAa,KAAK,UAAU,kBAAkB,MAAM,EAAE;AAE5D,KAAI,QAAQ,KACV,SAAQ,IAAI,WAAW;AAGzB,KAAI,QAAQ,OAAO;AACjB,KAAG,cAAc,YAAY,GAAG,WAAW,IAAI;AAC/C,MAAI,CAAC,QAAQ,UAAU,CAAC,QAAQ,KAC9B,SAAQ,IAAI,aAAa,aAAa;YAE/B,CAAC,QAAQ,UAAU,CAAC,QAAQ,MAAM;AAC3C,UAAQ,IAAI,gCAAgC;AAC5C,UAAQ,IAAI,qCAAqC;AACjD,UAAQ,IAAI,WAAW;AACvB,UAAQ,IAAI,IAAI,OAAO,GAAG,CAAC;AAC3B,UAAQ,IAAI,WAAW;AACvB,UAAQ,IAAI,IAAI,OAAO,GAAG,CAAC;;;;;;;;;AAU/B,eAAe,eAAe,SAUZ;CAChB,MAAM,MAAM,QAAQ,KAAK;CACzB,MAAM,kBAAkB,QAAQ,QAAQ,gBAAgB;CACxD,MAAM,aAAa,KAAK,QAAQ,KAAK,QAAQ,UAAU,sBAAsB;AAG7E,KAAI,CAAC,kBAAkB,YAAY,IAAI,EAAE;AACvC,UAAQ,MACN,uBAAuB,QAAQ,OAAO,2CACvC;AACD,UAAQ,KAAK,EAAE;;AAGjB,KAAI,CAAC,QAAQ,UAAU,CAAC,QAAQ,MAAM;AACpC,UAAQ,IAAI,mCAAmC;AAC/C,MAAI,gBACF,SAAQ,KACN,oGACD;;CAKL,MAAM,aAAa,eAAe,IAAI;CACtC,IAAI,gBAAgC,EAAE;CACtC,IAAI,+BAAe,IAAI,KAAa;AAEpC,KAAI,YAAY;AACd,MAAI,CAAC,QAAQ,UAAU,CAAC,QAAQ,MAAM;GACpC,MAAM,eAAe,KAAK,SAAS,KAAK,WAAW;AACnD,WAAQ,IAAI,sBAAsB,eAAe;;EAGnD,MAAM,UAAU,GAAG,aAAa,YAAY,QAAQ;EAGpD,MAAM,OADM,MADC,WAAW,SAAS,OAAO,GAAG,KAAK,MAAM,KAAK,YACnC,QAAQ,CACf,MAAM;AAEvB,kBAAgB,aAAa,KAAK;AAClC,iBAAe,kBAAkB,KAAK;YAC7B,CAAC,QAAQ,UAAU,CAAC,QAAQ,KACrC,SAAQ,IACN,wCACA,uBAAuB,KAAK,KAAK,CAClC;CAIH,MAAM,aAAa,cAAc,QAC9B,MAAM,CAAC,EAAE,OAAO,WAAW,IAAI,IAAI,CAAC,EAAE,OAAO,WAAW,IAAI,CAC9D;CACD,MAAM,eAAe,cAAc,QAChC,MAAM,EAAE,OAAO,WAAW,IAAI,IAAI,EAAE,OAAO,WAAW,IAAI,CAC5D;CAGD,MAAM,UAA8C,EAAE;AAEtD,KAAI,QAAQ,YAAY;EACtB,MAAM,cAAc,KAAK,QAAQ,KAAK,QAAQ,WAAW;EACzD,MAAM,UAAU,QAAQ,eAAe;AACvC,MAAI,CAAC,QAAQ,UAAU,CAAC,QAAQ,KAC9B,SAAQ,IAAI,+BAA+B,QAAQ,aAAa;AAElE,SAAO,OACL,SACA,MAAM,eAAe,aAAa,SAAS,gBAAgB,CAC5D;QACI;EACL,MAAM,cAAc,IAAI,IAAI,CAC1B,GAAG,uBACH,GAAG,WAAW,KAAK,MAAM,EAAE,OAAO,CACnC,CAAC;AACF,SAAO,OACL,SACA,MAAM,eAAe,KAAK,aAAa,gBAAgB,CACxD;;AAIH,KAAI,cAAc,aAAa,SAAS,GAAG;EAEzC,MAAM,eAAe,MAAM,qBACzB,cAFoB,KAAK,QAAQ,WAAW,EAI5C,KACA,gBACD;AACD,SAAO,OAAO,SAAS,aAAa;;CAKtC,MAAM,kBAA4B,QAAQ,kBACtC,CAAC,QAAQ,gBAAgB,GACzB,+BAA+B,QAAQ,MACrC,GAAG,WAAW,KAAK,KAAK,KAAK,EAAE,CAAC,CACjC;AACL,MAAK,MAAM,OAAO,iBAAiB;EACjC,MAAM,cAAc,KAAK,QAAQ,KAAK,IAAI;AAC1C,MAAI,CAAC,GAAG,WAAW,YAAY,CAAE;AACjC,MAAI,CAAC,QAAQ,UAAU,CAAC,QAAQ,KAC9B,SAAQ,IAAI,qCAAqC,MAAM;EAEzD,MAAM,aAAa,MAAM,wBACvB,aACA,KACA,gBACD;AACD,OAAK,MAAM,CAAC,MAAM,UAAU,OAAO,QAAQ,WAAW,CACpD,KAAI,CAAC,QAAQ,MAAO,SAAQ,QAAQ;;CAIxC,MAAM,cAAc,OAAO,KAAK,QAAQ,CAAC;AAEzC,KAAI,gBAAgB,GAAG;AACrB,MAAI,QAAQ,UAAU,QAAQ,MAAM;AAClC,iBAAc,YAAY,EAAE,SAAS,EAAE,EAAE,EAAE,QAAQ;AACnD,OAAI,QAAQ,OAAQ;AACpB,WAAQ,KAAK,EAAE;;AAEjB,UAAQ,IAAI,oBAAoB;AAChC,MAAI,QAAQ,WACV,SAAQ,IACN,kBAAkB,kBAAkB,iCAAiC,gBAAgB,cAAc,QAAQ,aAC5G;OACI;AACL,WAAQ,IACN,0EACD;AACD,WAAQ,IAAI,4CAA4C;;AAE1D,UAAQ,KAAK,EAAE;;CAMjB,MAAM,gBAAgB,aAAa,KAAK,QAAQ,WAAW,GAAG;AAE9D,MAAK,MAAM,OAAO,eAAe;AAC/B,MAAI,CAAC,aAAa,IAAI,IAAI,KAAK,CAAE;EAEjC,MAAM,UAAU,IAAI,OAAO,WAAW,IAAI,IAAI,IAAI,OAAO,WAAW,IAAI;EACxE,IAAI;AAEJ,MAAI,SAAS;GAEX,MAAM,oBAAoB,KAAK,QAAQ,eAAe,IAAI,OAAO;AACjE,YAAS,OAAO,OAAO,QAAQ,CAAC,MAAM,MAAM;AAC1C,QAAI,CAAC,EAAE,QAAQ,WAAW,IAAI,CAAE,QAAO;AAEvC,WAD0B,KAAK,QAAQ,KAAK,EAAE,QAAQ,KAE9B,qBAAqB,EAAE,SAAS,IAAI;KAE5D;QAGF,UAAS,OAAO,OAAO,QAAQ,CAAC,MAC7B,MAAM,EAAE,YAAY,IAAI,UAAU,EAAE,SAAS,IAAI,aACnD;AAGH,MAAI,OACF,QAAO,qBAAqB;;AAKhC,KAAI,QAAQ,gBAAgB;EAC1B,MAAM,gBAAgB,QAAQ,eAC3B,MAAM,IAAI,CACV,KAAK,MAAM,EAAE,MAAM,CAAC,CACpB,OAAO,QAAQ;AAClB,OAAK,MAAM,QAAQ,cACjB,KAAI,QAAQ,MACV,SAAQ,MAAM,qBAAqB;WAC1B,CAAC,QAAQ,OAClB,SAAQ,KACN,0CAA0C,KAAK,qCAChD;;AAKP,KAAI,CAAC,QAAQ,UAAU,CAAC,QAAQ,MAAM;AACpC,UAAQ,IAAI,WAAW,YAAY,aAAa;AAChD,OAAK,MAAM,CAAC,MAAM,aAAa,OAAO,QAAQ,QAAQ,EAAE;GACtD,MAAM,gBACJ,SAAS,UAAU,SAAS,SAAS,SAAS,UAAU,SAAS;GACnE,MAAM,eACJ,gBAAgB,IAAI,KAAK,cAAc,iBAAiB;GAC1D,MAAM,eAAe,SAAS,qBAAqB,iBAAiB;AACpE,WAAQ,IACN,KAAK,SAAS,qBAAqB,MAAM,IAAI,GAAG,SAAS,YAAY,IAAI,KAAK,SAAS,SAAS,UAAU,eAAe,eAC1H;;;AAIL,eAAc,YAAY,EAAE,SAAS,EAAE,QAAQ;;AAWjD,MAAa,qBAAqB,IAAI,QAAQ,OAAO,CAClD,YACC,yEACD,CACA,OAAO,eAAe,0BAA0B,CAChD,OACC,uBACA,oDACD,CACA,OACC,gBACA,+EACD,CACA,OACC,6BACA,qFACD,CACA,OACC,wBACA,6FACD,CACA,OACC,yBACA,0FACD,CACA,OACC,8BACA,iFACD,CACA,OACC,uBACA,wFACD,CACA,OAAO,UAAU,oCAAoC,CACrD,YACC,SACA;;;;;;;iCAQD,CACA,QAAQ,SACP,eAAe,KAAK,CAAC,OAAO,QAAQ;AAClC,SAAQ,MAAM,IAAI;AAClB,SAAQ,KAAK,EAAE;EACf,CACH"}
@@ -47,35 +47,65 @@ function resolveManifestPaths(paths, cwd, allowJsManifest) {
47
47
  async function runPluginValidate(paths, options) {
48
48
  const cwd = process.cwd();
49
49
  const allowJsManifest = Boolean(options.allowJsManifest);
50
- if (allowJsManifest) console.warn("Warning: --allow-js-manifest executes manifest.js/manifest.cjs files. Only use with trusted code.");
50
+ if (allowJsManifest && !options.json) console.warn("Warning: --allow-js-manifest executes manifest.js/manifest.cjs files. Only use with trusted code.");
51
51
  const manifestPaths = resolveManifestPaths(paths.length > 0 ? paths : ["."], cwd, allowJsManifest);
52
52
  if (manifestPaths.length === 0) {
53
- console.error("No manifest files to validate.");
53
+ if (options.json) console.log("[]");
54
+ else console.error("No manifest files to validate.");
54
55
  process.exit(1);
55
56
  }
56
57
  let hasFailure = false;
58
+ const jsonResults = [];
57
59
  for (const { path: manifestPath, type } of manifestPaths) {
60
+ const relativePath = path.relative(cwd, manifestPath);
58
61
  let obj;
59
62
  try {
60
63
  obj = await loadManifestFromFile(manifestPath, type, { allowJsManifest });
61
64
  } catch (err) {
62
- console.error(`✗ ${manifestPath}`);
63
- console.error(` ${err instanceof Error ? err.message : String(err)}`);
65
+ const errMsg = err instanceof Error ? err.message : String(err);
66
+ if (options.json) jsonResults.push({
67
+ path: relativePath,
68
+ valid: false,
69
+ errors: [errMsg]
70
+ });
71
+ else {
72
+ console.error(`✗ ${manifestPath}`);
73
+ console.error(` ${errMsg}`);
74
+ }
64
75
  hasFailure = true;
65
76
  continue;
66
77
  }
67
78
  const result = detectSchemaType(obj) === "template-plugins" ? validateTemplateManifest(obj) : validateManifest(obj);
68
- const relativePath = path.relative(cwd, manifestPath);
69
- if (result.valid) console.log(`✓ ${relativePath}`);
79
+ if (result.valid) if (options.json) jsonResults.push({
80
+ path: relativePath,
81
+ valid: true
82
+ });
83
+ else console.log(`✓ ${relativePath}`);
70
84
  else {
71
- console.error(`✗ ${relativePath}`);
72
- if (result.errors?.length) console.error(formatValidationErrors(result.errors, obj));
85
+ if (options.json) {
86
+ const errors = result.errors?.length ? formatValidationErrors(result.errors, obj).split("\n").filter(Boolean) : [];
87
+ jsonResults.push({
88
+ path: relativePath,
89
+ valid: false,
90
+ ...errors.length > 0 && { errors }
91
+ });
92
+ } else {
93
+ console.error(`✗ ${relativePath}`);
94
+ if (result.errors?.length) console.error(formatValidationErrors(result.errors, obj));
95
+ }
73
96
  hasFailure = true;
74
97
  }
75
98
  }
99
+ if (options.json) console.log(JSON.stringify(jsonResults, null, 2));
76
100
  process.exit(hasFailure ? 1 : 0);
77
101
  }
78
- const pluginValidateCommand = new Command("validate").description("Validate plugin manifest(s) or template manifests against their JSON schema").argument("[paths...]", "Paths to manifest.json or appkit.plugins.json (or plugin directories); use --allow-js-manifest to include manifest.js").option("--allow-js-manifest", "Allow reading manifest.js/manifest.cjs (executes code; use only with trusted plugins)").action((paths, opts) => runPluginValidate(paths, opts).catch((err) => {
102
+ const pluginValidateCommand = new Command("validate").description("Validate plugin manifest(s) or template manifests against their JSON schema").argument("[paths...]", "Paths to manifest.json or appkit.plugins.json (or plugin directories); use --allow-js-manifest to include manifest.js").option("--allow-js-manifest", "Allow reading manifest.js/manifest.cjs (executes code; use only with trusted plugins)").option("--json", "Output validation results as JSON").addHelpText("after", `
103
+ Examples:
104
+ $ appkit plugin validate
105
+ $ appkit plugin validate plugins/my-plugin
106
+ $ appkit plugin validate plugins/my-plugin plugins/other
107
+ $ appkit plugin validate appkit.plugins.json
108
+ $ appkit plugin validate --json`).action((paths, opts) => runPluginValidate(paths, opts).catch((err) => {
79
109
  console.error(err);
80
110
  process.exit(1);
81
111
  }));
@@ -1 +1 @@
1
- {"version":3,"file":"validate.js","names":[],"sources":["../../../../../src/cli/commands/plugin/validate/validate.ts"],"sourcesContent":["import fs from \"node:fs\";\nimport path from \"node:path\";\nimport process from \"node:process\";\nimport { Command } from \"commander\";\nimport {\n loadManifestFromFile,\n type ResolvedManifest,\n resolveManifestInDir,\n} from \"../manifest-resolve\";\nimport {\n detectSchemaType,\n formatValidationErrors,\n validateManifest,\n validateTemplateManifest,\n} from \"./validate-manifest\";\n\nfunction resolveManifestPaths(\n paths: string[],\n cwd: string,\n allowJsManifest: boolean,\n): ResolvedManifest[] {\n const out: ResolvedManifest[] = [];\n for (const p of paths) {\n const resolved = path.resolve(cwd, p);\n if (!fs.existsSync(resolved)) {\n console.error(`Path not found: ${p}`);\n continue;\n }\n const stat = fs.statSync(resolved);\n if (stat.isDirectory()) {\n let found = false;\n const pluginResolved = resolveManifestInDir(resolved, {\n allowJsManifest,\n });\n if (pluginResolved) {\n out.push(pluginResolved);\n found = true;\n }\n const templateManifest = path.join(resolved, \"appkit.plugins.json\");\n if (fs.existsSync(templateManifest)) {\n out.push({ path: templateManifest, type: \"json\" });\n found = true;\n }\n if (!found) {\n console.error(\n `No ${allowJsManifest ? \"manifest.json, manifest.js, or\" : \"manifest.json or\"} appkit.plugins.json in directory: ${p}`,\n );\n }\n } else {\n const ext = path.extname(resolved).toLowerCase();\n if (!allowJsManifest && (ext === \".js\" || ext === \".cjs\")) {\n console.error(\n `JS manifest provided but disabled by default: ${p}. Re-run with --allow-js-manifest to opt in.`,\n );\n continue;\n }\n out.push({\n path: resolved,\n type: ext === \".js\" || ext === \".cjs\" ? \"js\" : \"json\",\n });\n }\n }\n return out;\n}\n\nasync function runPluginValidate(\n paths: string[],\n options: { allowJsManifest?: boolean },\n): Promise<void> {\n const cwd = process.cwd();\n const allowJsManifest = Boolean(options.allowJsManifest);\n if (allowJsManifest) {\n console.warn(\n \"Warning: --allow-js-manifest executes manifest.js/manifest.cjs files. Only use with trusted code.\",\n );\n }\n const toValidate = paths.length > 0 ? paths : [\".\"];\n const manifestPaths = resolveManifestPaths(toValidate, cwd, allowJsManifest);\n\n if (manifestPaths.length === 0) {\n console.error(\"No manifest files to validate.\");\n process.exit(1);\n }\n\n let hasFailure = false;\n for (const { path: manifestPath, type } of manifestPaths) {\n let obj: unknown;\n try {\n obj = await loadManifestFromFile(manifestPath, type, { allowJsManifest });\n } catch (err) {\n console.error(`✗ ${manifestPath}`);\n console.error(` ${err instanceof Error ? err.message : String(err)}`);\n hasFailure = true;\n continue;\n }\n\n const schemaType = detectSchemaType(obj);\n const result =\n schemaType === \"template-plugins\"\n ? validateTemplateManifest(obj)\n : validateManifest(obj);\n\n const relativePath = path.relative(cwd, manifestPath);\n if (result.valid) {\n console.log(`✓ ${relativePath}`);\n } else {\n console.error(`✗ ${relativePath}`);\n if (result.errors?.length) {\n console.error(formatValidationErrors(result.errors, obj));\n }\n hasFailure = true;\n }\n }\n\n process.exit(hasFailure ? 1 : 0);\n}\n\nexport const pluginValidateCommand = new Command(\"validate\")\n .description(\n \"Validate plugin manifest(s) or template manifests against their JSON schema\",\n )\n .argument(\n \"[paths...]\",\n \"Paths to manifest.json or appkit.plugins.json (or plugin directories); use --allow-js-manifest to include manifest.js\",\n )\n .option(\n \"--allow-js-manifest\",\n \"Allow reading manifest.js/manifest.cjs (executes code; use only with trusted plugins)\",\n )\n .action((paths: string[], opts: { allowJsManifest?: boolean }) =>\n runPluginValidate(paths, opts).catch((err) => {\n console.error(err);\n process.exit(1);\n }),\n );\n"],"mappings":";;;;;;;;AAgBA,SAAS,qBACP,OACA,KACA,iBACoB;CACpB,MAAM,MAA0B,EAAE;AAClC,MAAK,MAAM,KAAK,OAAO;EACrB,MAAM,WAAW,KAAK,QAAQ,KAAK,EAAE;AACrC,MAAI,CAAC,GAAG,WAAW,SAAS,EAAE;AAC5B,WAAQ,MAAM,mBAAmB,IAAI;AACrC;;AAGF,MADa,GAAG,SAAS,SAAS,CACzB,aAAa,EAAE;GACtB,IAAI,QAAQ;GACZ,MAAM,iBAAiB,qBAAqB,UAAU,EACpD,iBACD,CAAC;AACF,OAAI,gBAAgB;AAClB,QAAI,KAAK,eAAe;AACxB,YAAQ;;GAEV,MAAM,mBAAmB,KAAK,KAAK,UAAU,sBAAsB;AACnE,OAAI,GAAG,WAAW,iBAAiB,EAAE;AACnC,QAAI,KAAK;KAAE,MAAM;KAAkB,MAAM;KAAQ,CAAC;AAClD,YAAQ;;AAEV,OAAI,CAAC,MACH,SAAQ,MACN,MAAM,kBAAkB,mCAAmC,mBAAmB,qCAAqC,IACpH;SAEE;GACL,MAAM,MAAM,KAAK,QAAQ,SAAS,CAAC,aAAa;AAChD,OAAI,CAAC,oBAAoB,QAAQ,SAAS,QAAQ,SAAS;AACzD,YAAQ,MACN,iDAAiD,EAAE,8CACpD;AACD;;AAEF,OAAI,KAAK;IACP,MAAM;IACN,MAAM,QAAQ,SAAS,QAAQ,SAAS,OAAO;IAChD,CAAC;;;AAGN,QAAO;;AAGT,eAAe,kBACb,OACA,SACe;CACf,MAAM,MAAM,QAAQ,KAAK;CACzB,MAAM,kBAAkB,QAAQ,QAAQ,gBAAgB;AACxD,KAAI,gBACF,SAAQ,KACN,oGACD;CAGH,MAAM,gBAAgB,qBADH,MAAM,SAAS,IAAI,QAAQ,CAAC,IAAI,EACI,KAAK,gBAAgB;AAE5E,KAAI,cAAc,WAAW,GAAG;AAC9B,UAAQ,MAAM,iCAAiC;AAC/C,UAAQ,KAAK,EAAE;;CAGjB,IAAI,aAAa;AACjB,MAAK,MAAM,EAAE,MAAM,cAAc,UAAU,eAAe;EACxD,IAAI;AACJ,MAAI;AACF,SAAM,MAAM,qBAAqB,cAAc,MAAM,EAAE,iBAAiB,CAAC;WAClE,KAAK;AACZ,WAAQ,MAAM,KAAK,eAAe;AAClC,WAAQ,MAAM,KAAK,eAAe,QAAQ,IAAI,UAAU,OAAO,IAAI,GAAG;AACtE,gBAAa;AACb;;EAIF,MAAM,SADa,iBAAiB,IAAI,KAEvB,qBACX,yBAAyB,IAAI,GAC7B,iBAAiB,IAAI;EAE3B,MAAM,eAAe,KAAK,SAAS,KAAK,aAAa;AACrD,MAAI,OAAO,MACT,SAAQ,IAAI,KAAK,eAAe;OAC3B;AACL,WAAQ,MAAM,KAAK,eAAe;AAClC,OAAI,OAAO,QAAQ,OACjB,SAAQ,MAAM,uBAAuB,OAAO,QAAQ,IAAI,CAAC;AAE3D,gBAAa;;;AAIjB,SAAQ,KAAK,aAAa,IAAI,EAAE;;AAGlC,MAAa,wBAAwB,IAAI,QAAQ,WAAW,CACzD,YACC,8EACD,CACA,SACC,cACA,wHACD,CACA,OACC,uBACA,wFACD,CACA,QAAQ,OAAiB,SACxB,kBAAkB,OAAO,KAAK,CAAC,OAAO,QAAQ;AAC5C,SAAQ,MAAM,IAAI;AAClB,SAAQ,KAAK,EAAE;EACf,CACH"}
1
+ {"version":3,"file":"validate.js","names":[],"sources":["../../../../../src/cli/commands/plugin/validate/validate.ts"],"sourcesContent":["import fs from \"node:fs\";\nimport path from \"node:path\";\nimport process from \"node:process\";\nimport { Command } from \"commander\";\nimport {\n loadManifestFromFile,\n type ResolvedManifest,\n resolveManifestInDir,\n} from \"../manifest-resolve\";\nimport {\n detectSchemaType,\n formatValidationErrors,\n validateManifest,\n validateTemplateManifest,\n} from \"./validate-manifest\";\n\nfunction resolveManifestPaths(\n paths: string[],\n cwd: string,\n allowJsManifest: boolean,\n): ResolvedManifest[] {\n const out: ResolvedManifest[] = [];\n for (const p of paths) {\n const resolved = path.resolve(cwd, p);\n if (!fs.existsSync(resolved)) {\n console.error(`Path not found: ${p}`);\n continue;\n }\n const stat = fs.statSync(resolved);\n if (stat.isDirectory()) {\n let found = false;\n const pluginResolved = resolveManifestInDir(resolved, {\n allowJsManifest,\n });\n if (pluginResolved) {\n out.push(pluginResolved);\n found = true;\n }\n const templateManifest = path.join(resolved, \"appkit.plugins.json\");\n if (fs.existsSync(templateManifest)) {\n out.push({ path: templateManifest, type: \"json\" });\n found = true;\n }\n if (!found) {\n console.error(\n `No ${allowJsManifest ? \"manifest.json, manifest.js, or\" : \"manifest.json or\"} appkit.plugins.json in directory: ${p}`,\n );\n }\n } else {\n const ext = path.extname(resolved).toLowerCase();\n if (!allowJsManifest && (ext === \".js\" || ext === \".cjs\")) {\n console.error(\n `JS manifest provided but disabled by default: ${p}. Re-run with --allow-js-manifest to opt in.`,\n );\n continue;\n }\n out.push({\n path: resolved,\n type: ext === \".js\" || ext === \".cjs\" ? \"js\" : \"json\",\n });\n }\n }\n return out;\n}\n\ninterface ValidateOptions {\n allowJsManifest?: boolean;\n json?: boolean;\n}\n\nasync function runPluginValidate(\n paths: string[],\n options: ValidateOptions,\n): Promise<void> {\n const cwd = process.cwd();\n const allowJsManifest = Boolean(options.allowJsManifest);\n if (allowJsManifest && !options.json) {\n console.warn(\n \"Warning: --allow-js-manifest executes manifest.js/manifest.cjs files. Only use with trusted code.\",\n );\n }\n const toValidate = paths.length > 0 ? paths : [\".\"];\n const manifestPaths = resolveManifestPaths(toValidate, cwd, allowJsManifest);\n\n if (manifestPaths.length === 0) {\n if (options.json) {\n console.log(\"[]\");\n } else {\n console.error(\"No manifest files to validate.\");\n }\n process.exit(1);\n }\n\n let hasFailure = false;\n const jsonResults: { path: string; valid: boolean; errors?: string[] }[] = [];\n\n for (const { path: manifestPath, type } of manifestPaths) {\n const relativePath = path.relative(cwd, manifestPath);\n let obj: unknown;\n try {\n obj = await loadManifestFromFile(manifestPath, type, { allowJsManifest });\n } catch (err) {\n const errMsg = err instanceof Error ? err.message : String(err);\n if (options.json) {\n jsonResults.push({\n path: relativePath,\n valid: false,\n errors: [errMsg],\n });\n } else {\n console.error(`✗ ${manifestPath}`);\n console.error(` ${errMsg}`);\n }\n hasFailure = true;\n continue;\n }\n\n const schemaType = detectSchemaType(obj);\n const result =\n schemaType === \"template-plugins\"\n ? validateTemplateManifest(obj)\n : validateManifest(obj);\n\n if (result.valid) {\n if (options.json) {\n jsonResults.push({ path: relativePath, valid: true });\n } else {\n console.log(`✓ ${relativePath}`);\n }\n } else {\n if (options.json) {\n const errors = result.errors?.length\n ? formatValidationErrors(result.errors, obj)\n .split(\"\\n\")\n .filter(Boolean)\n : [];\n jsonResults.push({\n path: relativePath,\n valid: false,\n ...(errors.length > 0 && { errors }),\n });\n } else {\n console.error(`✗ ${relativePath}`);\n if (result.errors?.length) {\n console.error(formatValidationErrors(result.errors, obj));\n }\n }\n hasFailure = true;\n }\n }\n\n if (options.json) {\n console.log(JSON.stringify(jsonResults, null, 2));\n }\n\n process.exit(hasFailure ? 1 : 0);\n}\n\nexport const pluginValidateCommand = new Command(\"validate\")\n .description(\n \"Validate plugin manifest(s) or template manifests against their JSON schema\",\n )\n .argument(\n \"[paths...]\",\n \"Paths to manifest.json or appkit.plugins.json (or plugin directories); use --allow-js-manifest to include manifest.js\",\n )\n .option(\n \"--allow-js-manifest\",\n \"Allow reading manifest.js/manifest.cjs (executes code; use only with trusted plugins)\",\n )\n .option(\"--json\", \"Output validation results as JSON\")\n .addHelpText(\n \"after\",\n `\nExamples:\n $ appkit plugin validate\n $ appkit plugin validate plugins/my-plugin\n $ appkit plugin validate plugins/my-plugin plugins/other\n $ appkit plugin validate appkit.plugins.json\n $ appkit plugin validate --json`,\n )\n .action((paths: string[], opts: ValidateOptions) =>\n runPluginValidate(paths, opts).catch((err) => {\n console.error(err);\n process.exit(1);\n }),\n );\n"],"mappings":";;;;;;;;AAgBA,SAAS,qBACP,OACA,KACA,iBACoB;CACpB,MAAM,MAA0B,EAAE;AAClC,MAAK,MAAM,KAAK,OAAO;EACrB,MAAM,WAAW,KAAK,QAAQ,KAAK,EAAE;AACrC,MAAI,CAAC,GAAG,WAAW,SAAS,EAAE;AAC5B,WAAQ,MAAM,mBAAmB,IAAI;AACrC;;AAGF,MADa,GAAG,SAAS,SAAS,CACzB,aAAa,EAAE;GACtB,IAAI,QAAQ;GACZ,MAAM,iBAAiB,qBAAqB,UAAU,EACpD,iBACD,CAAC;AACF,OAAI,gBAAgB;AAClB,QAAI,KAAK,eAAe;AACxB,YAAQ;;GAEV,MAAM,mBAAmB,KAAK,KAAK,UAAU,sBAAsB;AACnE,OAAI,GAAG,WAAW,iBAAiB,EAAE;AACnC,QAAI,KAAK;KAAE,MAAM;KAAkB,MAAM;KAAQ,CAAC;AAClD,YAAQ;;AAEV,OAAI,CAAC,MACH,SAAQ,MACN,MAAM,kBAAkB,mCAAmC,mBAAmB,qCAAqC,IACpH;SAEE;GACL,MAAM,MAAM,KAAK,QAAQ,SAAS,CAAC,aAAa;AAChD,OAAI,CAAC,oBAAoB,QAAQ,SAAS,QAAQ,SAAS;AACzD,YAAQ,MACN,iDAAiD,EAAE,8CACpD;AACD;;AAEF,OAAI,KAAK;IACP,MAAM;IACN,MAAM,QAAQ,SAAS,QAAQ,SAAS,OAAO;IAChD,CAAC;;;AAGN,QAAO;;AAQT,eAAe,kBACb,OACA,SACe;CACf,MAAM,MAAM,QAAQ,KAAK;CACzB,MAAM,kBAAkB,QAAQ,QAAQ,gBAAgB;AACxD,KAAI,mBAAmB,CAAC,QAAQ,KAC9B,SAAQ,KACN,oGACD;CAGH,MAAM,gBAAgB,qBADH,MAAM,SAAS,IAAI,QAAQ,CAAC,IAAI,EACI,KAAK,gBAAgB;AAE5E,KAAI,cAAc,WAAW,GAAG;AAC9B,MAAI,QAAQ,KACV,SAAQ,IAAI,KAAK;MAEjB,SAAQ,MAAM,iCAAiC;AAEjD,UAAQ,KAAK,EAAE;;CAGjB,IAAI,aAAa;CACjB,MAAM,cAAqE,EAAE;AAE7E,MAAK,MAAM,EAAE,MAAM,cAAc,UAAU,eAAe;EACxD,MAAM,eAAe,KAAK,SAAS,KAAK,aAAa;EACrD,IAAI;AACJ,MAAI;AACF,SAAM,MAAM,qBAAqB,cAAc,MAAM,EAAE,iBAAiB,CAAC;WAClE,KAAK;GACZ,MAAM,SAAS,eAAe,QAAQ,IAAI,UAAU,OAAO,IAAI;AAC/D,OAAI,QAAQ,KACV,aAAY,KAAK;IACf,MAAM;IACN,OAAO;IACP,QAAQ,CAAC,OAAO;IACjB,CAAC;QACG;AACL,YAAQ,MAAM,KAAK,eAAe;AAClC,YAAQ,MAAM,KAAK,SAAS;;AAE9B,gBAAa;AACb;;EAIF,MAAM,SADa,iBAAiB,IAAI,KAEvB,qBACX,yBAAyB,IAAI,GAC7B,iBAAiB,IAAI;AAE3B,MAAI,OAAO,MACT,KAAI,QAAQ,KACV,aAAY,KAAK;GAAE,MAAM;GAAc,OAAO;GAAM,CAAC;MAErD,SAAQ,IAAI,KAAK,eAAe;OAE7B;AACL,OAAI,QAAQ,MAAM;IAChB,MAAM,SAAS,OAAO,QAAQ,SAC1B,uBAAuB,OAAO,QAAQ,IAAI,CACvC,MAAM,KAAK,CACX,OAAO,QAAQ,GAClB,EAAE;AACN,gBAAY,KAAK;KACf,MAAM;KACN,OAAO;KACP,GAAI,OAAO,SAAS,KAAK,EAAE,QAAQ;KACpC,CAAC;UACG;AACL,YAAQ,MAAM,KAAK,eAAe;AAClC,QAAI,OAAO,QAAQ,OACjB,SAAQ,MAAM,uBAAuB,OAAO,QAAQ,IAAI,CAAC;;AAG7D,gBAAa;;;AAIjB,KAAI,QAAQ,KACV,SAAQ,IAAI,KAAK,UAAU,aAAa,MAAM,EAAE,CAAC;AAGnD,SAAQ,KAAK,aAAa,IAAI,EAAE;;AAGlC,MAAa,wBAAwB,IAAI,QAAQ,WAAW,CACzD,YACC,8EACD,CACA,SACC,cACA,wHACD,CACA,OACC,uBACA,wFACD,CACA,OAAO,UAAU,oCAAoC,CACrD,YACC,SACA;;;;;;mCAOD,CACA,QAAQ,OAAiB,SACxB,kBAAkB,OAAO,KAAK,CAAC,OAAO,QAAQ;AAC5C,SAAQ,MAAM,IAAI;AAClB,SAAQ,KAAK,EAAE;EACf,CACH"}
@@ -95,10 +95,8 @@ function runSetup(options) {
95
95
  const installed = findInstalledPackages();
96
96
  if (installed.length === 0) {
97
97
  console.log("No @databricks/appkit packages found in node_modules.");
98
- console.log("\nMake sure you've installed at least one of:");
99
- PACKAGES.forEach((pkg) => {
100
- console.log(` - ${pkg.name}`);
101
- });
98
+ console.log("\nInstall at least one of:");
99
+ for (const pkg of PACKAGES) console.log(` npm install ${pkg.name}`);
102
100
  process.exit(1);
103
101
  }
104
102
  console.log("Detected packages:");
@@ -130,7 +128,10 @@ function runSetup(options) {
130
128
  console.log("─".repeat(50));
131
129
  }
132
130
  }
133
- const setupCommand = new Command("setup").description("Setup CLAUDE.md with AppKit package references").option("-w, --write", "Create or update CLAUDE.md file in current directory").action(runSetup);
131
+ const setupCommand = new Command("setup").description("Setup CLAUDE.md with AppKit package references").option("-w, --write", "Create or update CLAUDE.md file in current directory").addHelpText("after", `
132
+ Examples:
133
+ $ appkit setup
134
+ $ appkit setup --write`).action(runSetup);
134
135
 
135
136
  //#endregion
136
137
  export { setupCommand };
@@ -1 +1 @@
1
- {"version":3,"file":"setup.js","names":[],"sources":["../../../src/cli/commands/setup.ts"],"sourcesContent":["import fs from \"node:fs\";\nimport path from \"node:path\";\nimport { Command } from \"commander\";\n\nconst PACKAGES = [\n { name: \"@databricks/appkit\", description: \"Backend SDK\" },\n {\n name: \"@databricks/appkit-ui\",\n description: \"UI Integration, Charts, Tables, SSE, and more.\",\n },\n];\n\nconst SECTION_START = \"<!-- appkit-instructions-start -->\";\nconst SECTION_END = \"<!-- appkit-instructions-end -->\";\n\n/**\n * Find which AppKit packages are installed by checking for package.json\n */\nfunction findInstalledPackages() {\n const cwd = process.cwd();\n const installed = [];\n\n for (const pkg of PACKAGES) {\n const packagePath = path.join(\n cwd,\n \"node_modules\",\n pkg.name,\n \"package.json\",\n );\n if (fs.existsSync(packagePath)) {\n installed.push(pkg);\n }\n }\n\n return installed;\n}\n\n/**\n * Generate the AppKit section content\n */\nfunction generateSection(packages: typeof PACKAGES) {\n const links = packages\n .map((pkg) => {\n const docPath = `./node_modules/${pkg.name}/CLAUDE.md`;\n return `- **${pkg.name}** (${pkg.description}): [${docPath}](${docPath})`;\n })\n .join(\"\\n\");\n\n return `${SECTION_START}\n## Databricks AppKit\n\nThis project uses Databricks AppKit packages. For AI assistant guidance on using these packages, refer to:\n\n${links}\n\n### Databricks Skills\n\nFor enhanced AI assistance with Databricks CLI operations, authentication, data exploration, and app development, install the Databricks skills:\n\n\\`\\`\\`bash\ndatabricks experimental aitools install\n\\`\\`\\`\n${SECTION_END}`;\n}\n\n/**\n * Generate standalone CLAUDE.md content (when no existing file)\n */\nfunction generateStandalone(packages: typeof PACKAGES) {\n const links = packages\n .map((pkg) => {\n const docPath = `./node_modules/${pkg.name}/CLAUDE.md`;\n return `- **${pkg.name}** (${pkg.description}): [${docPath}](${docPath})`;\n })\n .join(\"\\n\");\n\n return `# AI Assistant Instructions\n\n${SECTION_START}\n## Databricks AppKit\n\nThis project uses Databricks AppKit packages. For AI assistant guidance on using these packages, refer to:\n\n${links}\n\n### Databricks Skills\n\nFor enhanced AI assistance with Databricks CLI operations, authentication, data exploration, and app development, install the Databricks skills:\n\n\\`\\`\\`bash\ndatabricks experimental aitools install\n\\`\\`\\`\n${SECTION_END}\n`;\n}\n\n/**\n * Update existing content with AppKit section\n */\nfunction updateContent(existingContent: string, packages: typeof PACKAGES) {\n const newSection = generateSection(packages);\n\n // Check if AppKit section already exists\n const startIndex = existingContent.indexOf(SECTION_START);\n const endIndex = existingContent.indexOf(SECTION_END);\n\n if (startIndex !== -1 && endIndex !== -1) {\n // Replace existing section\n const before = existingContent.substring(0, startIndex);\n const after = existingContent.substring(endIndex + SECTION_END.length);\n return before + newSection + after;\n }\n\n // Append section to end\n return `${existingContent.trimEnd()}\\n\\n${newSection}\\n`;\n}\n\n/**\n * Setup command implementation\n */\nfunction runSetup(options: { write?: boolean }) {\n const shouldWrite = options.write;\n\n // Find installed packages\n const installed = findInstalledPackages();\n\n if (installed.length === 0) {\n console.log(\"No @databricks/appkit packages found in node_modules.\");\n console.log(\"\\nMake sure you've installed at least one of:\");\n PACKAGES.forEach((pkg) => {\n console.log(` - ${pkg.name}`);\n });\n process.exit(1);\n }\n\n console.log(\"Detected packages:\");\n installed.forEach((pkg) => {\n console.log(` ✓ ${pkg.name}`);\n });\n\n const claudePath = path.join(process.cwd(), \"CLAUDE.md\");\n const existingContent = fs.existsSync(claudePath)\n ? fs.readFileSync(claudePath, \"utf-8\")\n : null;\n\n let finalContent: string;\n let action: string;\n\n if (existingContent) {\n finalContent = updateContent(existingContent, installed);\n action = existingContent.includes(SECTION_START) ? \"Updated\" : \"Added to\";\n } else {\n finalContent = generateStandalone(installed);\n action = \"Created\";\n }\n\n if (shouldWrite) {\n fs.writeFileSync(claudePath, finalContent);\n console.log(`\\n✓ ${action} CLAUDE.md`);\n console.log(` Path: ${claudePath}`);\n } else {\n console.log(\"\\nTo create/update CLAUDE.md, run:\");\n console.log(\" npx appkit setup --write\\n\");\n\n if (existingContent) {\n console.log(\n `This will ${\n existingContent.includes(SECTION_START)\n ? \"update the existing\"\n : \"add a new\"\n } AppKit section.\\n`,\n );\n }\n\n console.log(\"Preview of AppKit section:\\n\");\n console.log(\"─\".repeat(50));\n console.log(generateSection(installed));\n console.log(\"─\".repeat(50));\n }\n}\n\nexport const setupCommand = new Command(\"setup\")\n .description(\"Setup CLAUDE.md with AppKit package references\")\n .option(\"-w, --write\", \"Create or update CLAUDE.md file in current directory\")\n .action(runSetup);\n"],"mappings":";;;;;AAIA,MAAM,WAAW,CACf;CAAE,MAAM;CAAsB,aAAa;CAAe,EAC1D;CACE,MAAM;CACN,aAAa;CACd,CACF;AAED,MAAM,gBAAgB;AACtB,MAAM,cAAc;;;;AAKpB,SAAS,wBAAwB;CAC/B,MAAM,MAAM,QAAQ,KAAK;CACzB,MAAM,YAAY,EAAE;AAEpB,MAAK,MAAM,OAAO,UAAU;EAC1B,MAAM,cAAc,KAAK,KACvB,KACA,gBACA,IAAI,MACJ,eACD;AACD,MAAI,GAAG,WAAW,YAAY,CAC5B,WAAU,KAAK,IAAI;;AAIvB,QAAO;;;;;AAMT,SAAS,gBAAgB,UAA2B;AAQlD,QAAO,GAAG,cAAc;;;;;EAPV,SACX,KAAK,QAAQ;EACZ,MAAM,UAAU,kBAAkB,IAAI,KAAK;AAC3C,SAAO,OAAO,IAAI,KAAK,MAAM,IAAI,YAAY,MAAM,QAAQ,IAAI,QAAQ;GACvE,CACD,KAAK,KAAK,CAOP;;;;;;;;;EASN;;;;;AAMF,SAAS,mBAAmB,UAA2B;AAQrD,QAAO;;EAEP,cAAc;;;;;EATA,SACX,KAAK,QAAQ;EACZ,MAAM,UAAU,kBAAkB,IAAI,KAAK;AAC3C,SAAO,OAAO,IAAI,KAAK,MAAM,IAAI,YAAY,MAAM,QAAQ,IAAI,QAAQ;GACvE,CACD,KAAK,KAAK,CASP;;;;;;;;;EASN,YAAY;;;;;;AAOd,SAAS,cAAc,iBAAyB,UAA2B;CACzE,MAAM,aAAa,gBAAgB,SAAS;CAG5C,MAAM,aAAa,gBAAgB,QAAQ,cAAc;CACzD,MAAM,WAAW,gBAAgB,QAAQ,YAAY;AAErD,KAAI,eAAe,MAAM,aAAa,IAAI;EAExC,MAAM,SAAS,gBAAgB,UAAU,GAAG,WAAW;EACvD,MAAM,QAAQ,gBAAgB,UAAU,WAAW,GAAmB;AACtE,SAAO,SAAS,aAAa;;AAI/B,QAAO,GAAG,gBAAgB,SAAS,CAAC,MAAM,WAAW;;;;;AAMvD,SAAS,SAAS,SAA8B;CAC9C,MAAM,cAAc,QAAQ;CAG5B,MAAM,YAAY,uBAAuB;AAEzC,KAAI,UAAU,WAAW,GAAG;AAC1B,UAAQ,IAAI,wDAAwD;AACpE,UAAQ,IAAI,gDAAgD;AAC5D,WAAS,SAAS,QAAQ;AACxB,WAAQ,IAAI,OAAO,IAAI,OAAO;IAC9B;AACF,UAAQ,KAAK,EAAE;;AAGjB,SAAQ,IAAI,qBAAqB;AACjC,WAAU,SAAS,QAAQ;AACzB,UAAQ,IAAI,OAAO,IAAI,OAAO;GAC9B;CAEF,MAAM,aAAa,KAAK,KAAK,QAAQ,KAAK,EAAE,YAAY;CACxD,MAAM,kBAAkB,GAAG,WAAW,WAAW,GAC7C,GAAG,aAAa,YAAY,QAAQ,GACpC;CAEJ,IAAI;CACJ,IAAI;AAEJ,KAAI,iBAAiB;AACnB,iBAAe,cAAc,iBAAiB,UAAU;AACxD,WAAS,gBAAgB,SAAS,cAAc,GAAG,YAAY;QAC1D;AACL,iBAAe,mBAAmB,UAAU;AAC5C,WAAS;;AAGX,KAAI,aAAa;AACf,KAAG,cAAc,YAAY,aAAa;AAC1C,UAAQ,IAAI,OAAO,OAAO,YAAY;AACtC,UAAQ,IAAI,WAAW,aAAa;QAC/B;AACL,UAAQ,IAAI,qCAAqC;AACjD,UAAQ,IAAI,+BAA+B;AAE3C,MAAI,gBACF,SAAQ,IACN,aACE,gBAAgB,SAAS,cAAc,GACnC,wBACA,YACL,oBACF;AAGH,UAAQ,IAAI,+BAA+B;AAC3C,UAAQ,IAAI,IAAI,OAAO,GAAG,CAAC;AAC3B,UAAQ,IAAI,gBAAgB,UAAU,CAAC;AACvC,UAAQ,IAAI,IAAI,OAAO,GAAG,CAAC;;;AAI/B,MAAa,eAAe,IAAI,QAAQ,QAAQ,CAC7C,YAAY,iDAAiD,CAC7D,OAAO,eAAe,uDAAuD,CAC7E,OAAO,SAAS"}
1
+ {"version":3,"file":"setup.js","names":[],"sources":["../../../src/cli/commands/setup.ts"],"sourcesContent":["import fs from \"node:fs\";\nimport path from \"node:path\";\nimport { Command } from \"commander\";\n\nconst PACKAGES = [\n { name: \"@databricks/appkit\", description: \"Backend SDK\" },\n {\n name: \"@databricks/appkit-ui\",\n description: \"UI Integration, Charts, Tables, SSE, and more.\",\n },\n];\n\nconst SECTION_START = \"<!-- appkit-instructions-start -->\";\nconst SECTION_END = \"<!-- appkit-instructions-end -->\";\n\n/**\n * Find which AppKit packages are installed by checking for package.json\n */\nfunction findInstalledPackages() {\n const cwd = process.cwd();\n const installed = [];\n\n for (const pkg of PACKAGES) {\n const packagePath = path.join(\n cwd,\n \"node_modules\",\n pkg.name,\n \"package.json\",\n );\n if (fs.existsSync(packagePath)) {\n installed.push(pkg);\n }\n }\n\n return installed;\n}\n\n/**\n * Generate the AppKit section content\n */\nfunction generateSection(packages: typeof PACKAGES) {\n const links = packages\n .map((pkg) => {\n const docPath = `./node_modules/${pkg.name}/CLAUDE.md`;\n return `- **${pkg.name}** (${pkg.description}): [${docPath}](${docPath})`;\n })\n .join(\"\\n\");\n\n return `${SECTION_START}\n## Databricks AppKit\n\nThis project uses Databricks AppKit packages. For AI assistant guidance on using these packages, refer to:\n\n${links}\n\n### Databricks Skills\n\nFor enhanced AI assistance with Databricks CLI operations, authentication, data exploration, and app development, install the Databricks skills:\n\n\\`\\`\\`bash\ndatabricks experimental aitools install\n\\`\\`\\`\n${SECTION_END}`;\n}\n\n/**\n * Generate standalone CLAUDE.md content (when no existing file)\n */\nfunction generateStandalone(packages: typeof PACKAGES) {\n const links = packages\n .map((pkg) => {\n const docPath = `./node_modules/${pkg.name}/CLAUDE.md`;\n return `- **${pkg.name}** (${pkg.description}): [${docPath}](${docPath})`;\n })\n .join(\"\\n\");\n\n return `# AI Assistant Instructions\n\n${SECTION_START}\n## Databricks AppKit\n\nThis project uses Databricks AppKit packages. For AI assistant guidance on using these packages, refer to:\n\n${links}\n\n### Databricks Skills\n\nFor enhanced AI assistance with Databricks CLI operations, authentication, data exploration, and app development, install the Databricks skills:\n\n\\`\\`\\`bash\ndatabricks experimental aitools install\n\\`\\`\\`\n${SECTION_END}\n`;\n}\n\n/**\n * Update existing content with AppKit section\n */\nfunction updateContent(existingContent: string, packages: typeof PACKAGES) {\n const newSection = generateSection(packages);\n\n // Check if AppKit section already exists\n const startIndex = existingContent.indexOf(SECTION_START);\n const endIndex = existingContent.indexOf(SECTION_END);\n\n if (startIndex !== -1 && endIndex !== -1) {\n // Replace existing section\n const before = existingContent.substring(0, startIndex);\n const after = existingContent.substring(endIndex + SECTION_END.length);\n return before + newSection + after;\n }\n\n // Append section to end\n return `${existingContent.trimEnd()}\\n\\n${newSection}\\n`;\n}\n\n/**\n * Setup command implementation\n */\nfunction runSetup(options: { write?: boolean }) {\n const shouldWrite = options.write;\n\n // Find installed packages\n const installed = findInstalledPackages();\n\n if (installed.length === 0) {\n console.log(\"No @databricks/appkit packages found in node_modules.\");\n console.log(\"\\nInstall at least one of:\");\n for (const pkg of PACKAGES) {\n console.log(` npm install ${pkg.name}`);\n }\n process.exit(1);\n }\n\n console.log(\"Detected packages:\");\n installed.forEach((pkg) => {\n console.log(` ✓ ${pkg.name}`);\n });\n\n const claudePath = path.join(process.cwd(), \"CLAUDE.md\");\n const existingContent = fs.existsSync(claudePath)\n ? fs.readFileSync(claudePath, \"utf-8\")\n : null;\n\n let finalContent: string;\n let action: string;\n\n if (existingContent) {\n finalContent = updateContent(existingContent, installed);\n action = existingContent.includes(SECTION_START) ? \"Updated\" : \"Added to\";\n } else {\n finalContent = generateStandalone(installed);\n action = \"Created\";\n }\n\n if (shouldWrite) {\n fs.writeFileSync(claudePath, finalContent);\n console.log(`\\n✓ ${action} CLAUDE.md`);\n console.log(` Path: ${claudePath}`);\n } else {\n console.log(\"\\nTo create/update CLAUDE.md, run:\");\n console.log(\" npx appkit setup --write\\n\");\n\n if (existingContent) {\n console.log(\n `This will ${\n existingContent.includes(SECTION_START)\n ? \"update the existing\"\n : \"add a new\"\n } AppKit section.\\n`,\n );\n }\n\n console.log(\"Preview of AppKit section:\\n\");\n console.log(\"─\".repeat(50));\n console.log(generateSection(installed));\n console.log(\"─\".repeat(50));\n }\n}\n\nexport const setupCommand = new Command(\"setup\")\n .description(\"Setup CLAUDE.md with AppKit package references\")\n .option(\"-w, --write\", \"Create or update CLAUDE.md file in current directory\")\n .addHelpText(\n \"after\",\n `\nExamples:\n $ appkit setup\n $ appkit setup --write`,\n )\n .action(runSetup);\n"],"mappings":";;;;;AAIA,MAAM,WAAW,CACf;CAAE,MAAM;CAAsB,aAAa;CAAe,EAC1D;CACE,MAAM;CACN,aAAa;CACd,CACF;AAED,MAAM,gBAAgB;AACtB,MAAM,cAAc;;;;AAKpB,SAAS,wBAAwB;CAC/B,MAAM,MAAM,QAAQ,KAAK;CACzB,MAAM,YAAY,EAAE;AAEpB,MAAK,MAAM,OAAO,UAAU;EAC1B,MAAM,cAAc,KAAK,KACvB,KACA,gBACA,IAAI,MACJ,eACD;AACD,MAAI,GAAG,WAAW,YAAY,CAC5B,WAAU,KAAK,IAAI;;AAIvB,QAAO;;;;;AAMT,SAAS,gBAAgB,UAA2B;AAQlD,QAAO,GAAG,cAAc;;;;;EAPV,SACX,KAAK,QAAQ;EACZ,MAAM,UAAU,kBAAkB,IAAI,KAAK;AAC3C,SAAO,OAAO,IAAI,KAAK,MAAM,IAAI,YAAY,MAAM,QAAQ,IAAI,QAAQ;GACvE,CACD,KAAK,KAAK,CAOP;;;;;;;;;EASN;;;;;AAMF,SAAS,mBAAmB,UAA2B;AAQrD,QAAO;;EAEP,cAAc;;;;;EATA,SACX,KAAK,QAAQ;EACZ,MAAM,UAAU,kBAAkB,IAAI,KAAK;AAC3C,SAAO,OAAO,IAAI,KAAK,MAAM,IAAI,YAAY,MAAM,QAAQ,IAAI,QAAQ;GACvE,CACD,KAAK,KAAK,CASP;;;;;;;;;EASN,YAAY;;;;;;AAOd,SAAS,cAAc,iBAAyB,UAA2B;CACzE,MAAM,aAAa,gBAAgB,SAAS;CAG5C,MAAM,aAAa,gBAAgB,QAAQ,cAAc;CACzD,MAAM,WAAW,gBAAgB,QAAQ,YAAY;AAErD,KAAI,eAAe,MAAM,aAAa,IAAI;EAExC,MAAM,SAAS,gBAAgB,UAAU,GAAG,WAAW;EACvD,MAAM,QAAQ,gBAAgB,UAAU,WAAW,GAAmB;AACtE,SAAO,SAAS,aAAa;;AAI/B,QAAO,GAAG,gBAAgB,SAAS,CAAC,MAAM,WAAW;;;;;AAMvD,SAAS,SAAS,SAA8B;CAC9C,MAAM,cAAc,QAAQ;CAG5B,MAAM,YAAY,uBAAuB;AAEzC,KAAI,UAAU,WAAW,GAAG;AAC1B,UAAQ,IAAI,wDAAwD;AACpE,UAAQ,IAAI,6BAA6B;AACzC,OAAK,MAAM,OAAO,SAChB,SAAQ,IAAI,iBAAiB,IAAI,OAAO;AAE1C,UAAQ,KAAK,EAAE;;AAGjB,SAAQ,IAAI,qBAAqB;AACjC,WAAU,SAAS,QAAQ;AACzB,UAAQ,IAAI,OAAO,IAAI,OAAO;GAC9B;CAEF,MAAM,aAAa,KAAK,KAAK,QAAQ,KAAK,EAAE,YAAY;CACxD,MAAM,kBAAkB,GAAG,WAAW,WAAW,GAC7C,GAAG,aAAa,YAAY,QAAQ,GACpC;CAEJ,IAAI;CACJ,IAAI;AAEJ,KAAI,iBAAiB;AACnB,iBAAe,cAAc,iBAAiB,UAAU;AACxD,WAAS,gBAAgB,SAAS,cAAc,GAAG,YAAY;QAC1D;AACL,iBAAe,mBAAmB,UAAU;AAC5C,WAAS;;AAGX,KAAI,aAAa;AACf,KAAG,cAAc,YAAY,aAAa;AAC1C,UAAQ,IAAI,OAAO,OAAO,YAAY;AACtC,UAAQ,IAAI,WAAW,aAAa;QAC/B;AACL,UAAQ,IAAI,qCAAqC;AACjD,UAAQ,IAAI,+BAA+B;AAE3C,MAAI,gBACF,SAAQ,IACN,aACE,gBAAgB,SAAS,cAAc,GACnC,wBACA,YACL,oBACF;AAGH,UAAQ,IAAI,+BAA+B;AAC3C,UAAQ,IAAI,IAAI,OAAO,GAAG,CAAC;AAC3B,UAAQ,IAAI,gBAAgB,UAAU,CAAC;AACvC,UAAQ,IAAI,IAAI,OAAO,GAAG,CAAC;;;AAI/B,MAAa,eAAe,IAAI,QAAQ,QAAQ,CAC7C,YAAY,iDAAiD,CAC7D,OAAO,eAAe,uDAAuD,CAC7E,YACC,SACA;;;0BAID,CACA,OAAO,SAAS"}
@@ -7,5 +7,6 @@ import "./genie/index.js";
7
7
  import "./lakebase-v1/index.js";
8
8
  import { SQLWarehouseConnector } from "./sql-warehouse/client.js";
9
9
  import "./sql-warehouse/index.js";
10
+ import "./vector-search/index.js";
10
11
 
11
12
  export { };
@@ -1 +1 @@
1
- {"version":3,"file":"index.js","names":["createLakebasePool","createLakebasePoolBase"],"sources":["../../../src/connectors/lakebase/index.ts"],"sourcesContent":["import {\n createLakebasePool as createLakebasePoolBase,\n type LakebasePoolConfig,\n} from \"@databricks/lakebase\";\nimport type { Pool } from \"pg\";\nimport { createLogger } from \"@/logging/logger\";\n\n/**\n * Create a Lakebase pool with appkit's logger integration.\n * Telemetry automatically uses appkit's OpenTelemetry configuration via global registry.\n *\n * @param config - Lakebase pool configuration\n * @returns PostgreSQL pool with appkit integration\n */\nexport function createLakebasePool(config?: Partial<LakebasePoolConfig>): Pool {\n const logger = createLogger(\"connectors:lakebase\");\n\n return createLakebasePoolBase({\n ...config,\n logger,\n });\n}\n\n// Re-export everything else from lakebase\nexport {\n type DatabaseCredential,\n type GenerateDatabaseCredentialRequest,\n generateDatabaseCredential,\n getLakebaseOrmConfig,\n getLakebasePgConfig,\n getUsernameWithApiLookup,\n getWorkspaceClient,\n type LakebasePoolConfig,\n type RequestedClaims,\n RequestedClaimsPermissionSet,\n type RequestedResource,\n} from \"@databricks/lakebase\";\n"],"mappings":";;;;;;;;;;;AAcA,SAAgBA,qBAAmB,QAA4C;CAC7E,MAAM,SAAS,aAAa,sBAAsB;AAElD,QAAOC,mBAAuB;EAC5B,GAAG;EACH;EACD,CAAC"}
1
+ {"version":3,"file":"index.js","names":["createLakebasePool","createLakebasePoolBase"],"sources":["../../../src/connectors/lakebase/index.ts"],"sourcesContent":["import {\n createLakebasePool as createLakebasePoolBase,\n type LakebasePoolConfig,\n} from \"@databricks/lakebase\";\nimport type { Pool } from \"pg\";\nimport { createLogger } from \"../../logging/logger\";\n\n/**\n * Create a Lakebase pool with appkit's logger integration.\n * Telemetry automatically uses appkit's OpenTelemetry configuration via global registry.\n *\n * @param config - Lakebase pool configuration\n * @returns PostgreSQL pool with appkit integration\n */\nexport function createLakebasePool(config?: Partial<LakebasePoolConfig>): Pool {\n const logger = createLogger(\"connectors:lakebase\");\n\n return createLakebasePoolBase({\n ...config,\n logger,\n });\n}\n\n// Re-export everything else from lakebase\nexport {\n type DatabaseCredential,\n type GenerateDatabaseCredentialRequest,\n generateDatabaseCredential,\n getLakebaseOrmConfig,\n getLakebasePgConfig,\n getUsernameWithApiLookup,\n getWorkspaceClient,\n type LakebasePoolConfig,\n type RequestedClaims,\n RequestedClaimsPermissionSet,\n type RequestedResource,\n} from \"@databricks/lakebase\";\n"],"mappings":";;;;;;;;;;;AAcA,SAAgBA,qBAAmB,QAA4C;CAC7E,MAAM,SAAS,aAAa,sBAAsB;AAElD,QAAOC,mBAAuB;EAC5B,GAAG;EACH;EACD,CAAC"}
@@ -1 +1 @@
1
- {"version":3,"file":"client.js","names":[],"sources":["../../../src/connectors/lakebase-v1/client.ts"],"sourcesContent":["import { randomUUID } from \"node:crypto\";\nimport type { WorkspaceClient } from \"@databricks/sdk-experimental\";\nimport { ApiClient, Config } from \"@databricks/sdk-experimental\";\nimport pg from \"pg\";\nimport {\n type Counter,\n type Histogram,\n SpanStatusCode,\n TelemetryManager,\n type TelemetryProvider,\n} from \"@/telemetry\";\nimport {\n AppKitError,\n AuthenticationError,\n ConfigurationError,\n ConnectionError,\n ValidationError,\n} from \"../../errors\";\nimport { createLogger } from \"../../logging/logger\";\nimport { deepMerge } from \"../../utils\";\nimport { lakebaseV1Defaults } from \"./defaults\";\nimport type {\n LakebaseV1Config,\n LakebaseV1ConnectionConfig,\n LakebaseV1Credentials,\n} from \"./types\";\n\nconst logger = createLogger(\"connectors:lakebase-v1\");\n\n/**\n * Enterprise-grade connector for Databricks Lakebase Provisioned\n *\n * @deprecated This connector is for Lakebase Provisioned only.\n * For new projects, use Lakebase Autoscaling instead: https://docs.databricks.com/aws/en/oltp/projects/\n *\n * This connector is compatible with Lakebase Provisioned: https://docs.databricks.com/aws/en/oltp/instances/\n *\n * Lakebase Autoscaling offers:\n * - Automatic compute scaling\n * - Scale-to-zero for cost optimization\n * - Database branching for development\n * - Instant restore capabilities\n *\n * Use the new LakebaseConnector (coming in a future release) for Lakebase Autoscaling support.\n *\n * @example Simplest - everything from env/context\n * ```typescript\n * const connector = new LakebaseV1Connector();\n * await connector.query('SELECT * FROM users');\n * ```\n *\n * @example With explicit connection string\n * ```typescript\n * const connector = new LakebaseV1Connector({\n * connectionString: 'postgresql://...'\n * });\n * ```\n */\nexport class LakebaseV1Connector {\n private readonly name: string = \"lakebase-v1\";\n private readonly CACHE_BUFFER_MS = 2 * 60 * 1000;\n private readonly config: LakebaseV1Config;\n private readonly connectionConfig: LakebaseV1ConnectionConfig;\n private pool: pg.Pool | null = null;\n private credentials: LakebaseV1Credentials | null = null;\n\n // telemetry\n private readonly telemetry: TelemetryProvider;\n private readonly telemetryMetrics: {\n queryCount: Counter;\n queryDuration: Histogram;\n };\n\n constructor(userConfig?: Partial<LakebaseV1Config>) {\n this.config = deepMerge(lakebaseV1Defaults, userConfig);\n this.connectionConfig = this.parseConnectionConfig();\n\n this.telemetry = TelemetryManager.getProvider(\n this.name,\n this.config.telemetry,\n );\n this.telemetryMetrics = {\n queryCount: this.telemetry\n .getMeter()\n .createCounter(\"lakebase.v1.query.count\", {\n description: \"Total number of queries executed\",\n unit: \"1\",\n }),\n queryDuration: this.telemetry\n .getMeter()\n .createHistogram(\"lakebase.v1.query.duration\", {\n description: \"Duration of queries executed\",\n unit: \"ms\",\n }),\n };\n\n // validate configuration\n if (this.config.maxPoolSize < 1) {\n throw ValidationError.invalidValue(\n \"maxPoolSize\",\n this.config.maxPoolSize,\n \"at least 1\",\n );\n }\n }\n\n /**\n * Execute a SQL query\n *\n * @example\n * ```typescript\n * const users = await connector.query('SELECT * FROM users');\n * const user = await connector.query('SELECT * FROM users WHERE id = $1', [123]);\n * ```\n */\n async query<T extends pg.QueryResultRow>(\n sql: string,\n params?: any[],\n retryCount: number = 0,\n ): Promise<pg.QueryResult<T>> {\n const startTime = Date.now();\n\n return this.telemetry.startActiveSpan(\n \"lakebase.v1.query\",\n {\n attributes: {\n \"db.system\": \"lakebase-v1\",\n \"db.statement\": sql.substring(0, 500),\n \"db.retry_count\": retryCount,\n },\n },\n async (span) => {\n try {\n const pool = await this.getPool();\n const result = await pool.query<T>(sql, params);\n span.setAttribute(\"db.rows_affected\", result.rowCount ?? 0);\n span.setStatus({ code: SpanStatusCode.OK });\n return result;\n } catch (error) {\n // retry on auth failure\n if (this.isAuthError(error)) {\n span.addEvent(\"auth_error_retry\");\n await this.rotateCredentials();\n const newPool = await this.getPool();\n const result = await newPool.query<T>(sql, params);\n span.setAttribute(\"db.rows_affected\", result.rowCount ?? 0);\n span.setStatus({ code: SpanStatusCode.OK });\n return result;\n }\n\n // retry on transient errors, but only once\n if (this.isTransientError(error) && retryCount < 1) {\n span.addEvent(\"transient_error_retry\");\n await new Promise((resolve) => setTimeout(resolve, 100));\n return await this.query<T>(sql, params, retryCount + 1);\n }\n\n span.recordException(error as Error);\n span.setStatus({ code: SpanStatusCode.ERROR });\n\n if (error instanceof AppKitError) {\n throw error;\n }\n throw ConnectionError.queryFailed(error as Error);\n } finally {\n const duration = Date.now() - startTime;\n this.telemetryMetrics.queryCount.add(1);\n this.telemetryMetrics.queryDuration.record(duration);\n span.end();\n }\n },\n );\n }\n\n /**\n * Execute a transaction\n *\n * COMMIT and ROLLBACK are automatically managed by the transaction function.\n *\n * @param callback - Callback function to execute within the transaction context\n * @example\n * ```typescript\n * await connector.transaction(async (client) => {\n * await client.query('INSERT INTO accounts (name) VALUES ($1)', ['Alice']);\n * await client.query('INSERT INTO logs (action) VALUES ($1)', ['Created Alice']);\n * });\n * ```\n */\n async transaction<T>(\n callback: (client: pg.PoolClient) => Promise<T>,\n retryCount: number = 0,\n ): Promise<T> {\n const startTime = Date.now();\n return this.telemetry.startActiveSpan(\n \"lakebase.v1.transaction\",\n {\n attributes: {\n \"db.system\": \"lakebase-v1\",\n \"db.retry_count\": retryCount,\n },\n },\n async (span) => {\n const pool = await this.getPool();\n const client = await pool.connect();\n try {\n await client.query(\"BEGIN\");\n const result = await callback(client);\n await client.query(\"COMMIT\");\n span.setStatus({ code: SpanStatusCode.OK });\n return result;\n } catch (error) {\n try {\n await client.query(\"ROLLBACK\");\n } catch {}\n // retry on auth failure\n if (this.isAuthError(error)) {\n span.addEvent(\"auth_error_retry\");\n client.release();\n await this.rotateCredentials();\n const newPool = await this.getPool();\n const retryClient = await newPool.connect();\n try {\n await client.query(\"BEGIN\");\n const result = await callback(retryClient);\n await client.query(\"COMMIT\");\n span.setStatus({ code: SpanStatusCode.OK });\n return result;\n } catch (retryError) {\n try {\n await retryClient.query(\"ROLLBACK\");\n } catch {}\n throw retryError;\n } finally {\n retryClient.release();\n }\n }\n\n // retry on transient errors, but only once\n if (this.isTransientError(error) && retryCount < 1) {\n span.addEvent(\"transaction_error_retry\");\n client.release();\n await new Promise((resolve) => setTimeout(resolve, 100));\n return await this.transaction<T>(callback, retryCount + 1);\n }\n span.recordException(error as Error);\n span.setStatus({ code: SpanStatusCode.ERROR });\n\n if (error instanceof AppKitError) {\n throw error;\n }\n throw ConnectionError.transactionFailed(error as Error);\n } finally {\n client.release();\n const duration = Date.now() - startTime;\n this.telemetryMetrics.queryCount.add(1);\n this.telemetryMetrics.queryDuration.record(duration);\n span.end();\n }\n },\n );\n }\n\n /** Check if database connection is healthy */\n async healthCheck(): Promise<boolean> {\n return this.telemetry.startActiveSpan(\n \"lakebase.v1.healthCheck\",\n {},\n async (span) => {\n try {\n const result = await this.query<{ result: number }>(\n \"SELECT 1 as result\",\n );\n const healthy = result.rows[0]?.result === 1;\n span.setAttribute(\"db.healthy\", healthy);\n span.setStatus({ code: SpanStatusCode.OK });\n return healthy;\n } catch {\n span.setAttribute(\"db.healthy\", false);\n span.setStatus({ code: SpanStatusCode.ERROR });\n return false;\n } finally {\n span.end();\n }\n },\n );\n }\n\n /** Close connection pool (call on shutdown) */\n async close(): Promise<void> {\n if (this.pool) {\n await this.pool.end().catch((error: unknown) => {\n logger.error(\"Error closing connection pool: %O\", error);\n });\n this.pool = null;\n }\n this.credentials = null;\n }\n\n /** Setup graceful shutdown to close connection pools */\n shutdown(): void {\n process.on(\"SIGTERM\", () => this.close());\n process.on(\"SIGINT\", () => this.close());\n this.close();\n }\n\n /** Get Databricks workspace client - from config or execution context */\n private getWorkspaceClient(): WorkspaceClient {\n if (this.config.workspaceClient) {\n return this.config.workspaceClient;\n }\n\n try {\n const { getWorkspaceClient: getClient } = require(\"../../context\");\n const client = getClient();\n\n // cache it for subsequent calls\n this.config.workspaceClient = client;\n return client;\n } catch (_error) {\n throw ConnectionError.clientUnavailable(\n \"Databricks workspace client\",\n \"Either pass it in config or ensure ServiceContext is initialized\",\n );\n }\n }\n\n /** Get or create connection pool */\n private async getPool(): Promise<pg.Pool> {\n if (!this.connectionConfig) {\n throw ConfigurationError.invalidConnection(\n \"Lakebase\",\n \"Set PGHOST, PGDATABASE, PGAPPNAME env vars, provide a connectionString, or pass explicit config\",\n );\n }\n\n if (!this.pool) {\n const creds = await this.getCredentials();\n this.pool = this.createPool(creds);\n }\n return this.pool;\n }\n\n /** Create PostgreSQL pool */\n private createPool(credentials: {\n username: string;\n password: string;\n }): pg.Pool {\n const { host, database, port, sslMode } = this.connectionConfig;\n\n const pool = new pg.Pool({\n host,\n port,\n database,\n user: credentials.username,\n password: credentials.password,\n max: this.config.maxPoolSize,\n idleTimeoutMillis: this.config.idleTimeoutMs,\n connectionTimeoutMillis: this.config.connectionTimeoutMs,\n ssl: sslMode === \"require\" ? { rejectUnauthorized: true } : false,\n });\n\n pool.on(\"error\", (error: Error & { code?: string }) => {\n logger.error(\n \"Connection pool error: %s (code: %s)\",\n error.message,\n error.code,\n );\n });\n\n return pool;\n }\n\n /** Get or fetch credentials with caching */\n private async getCredentials(): Promise<{\n username: string;\n password: string;\n }> {\n const now = Date.now();\n\n // return cached if still valid\n if (\n this.credentials &&\n now < this.credentials.expiresAt - this.CACHE_BUFFER_MS\n ) {\n return this.credentials;\n }\n\n // fetch new credentials\n const username = await this.fetchUsername();\n const { token, expiresAt } = await this.fetchPassword();\n\n this.credentials = {\n username,\n password: token,\n expiresAt,\n };\n\n return { username, password: token };\n }\n\n /** Rotate credentials and recreate pool */\n private async rotateCredentials(): Promise<void> {\n // clear cached credentials\n this.credentials = null;\n\n if (this.pool) {\n const oldPool = this.pool;\n this.pool = null;\n oldPool.end().catch((error: unknown) => {\n logger.error(\n \"Error closing old connection pool during rotation: %O\",\n error,\n );\n });\n }\n }\n\n /** Fetch username from Databricks */\n private async fetchUsername(): Promise<string> {\n const workspaceClient = this.getWorkspaceClient();\n const user = await workspaceClient.currentUser.me();\n if (!user.userName) {\n throw AuthenticationError.userLookupFailed();\n }\n return user.userName;\n }\n\n /** Fetch password (OAuth token) from Databricks */\n private async fetchPassword(): Promise<{ token: string; expiresAt: number }> {\n const workspaceClient = this.getWorkspaceClient();\n const config = new Config({ host: workspaceClient.config.host });\n const apiClient = new ApiClient(config);\n\n if (!this.connectionConfig.appName) {\n throw ConfigurationError.resourceNotFound(\"Database app name\");\n }\n\n const credentials = await apiClient.request({\n path: `/api/2.0/database/credentials`,\n method: \"POST\",\n headers: new Headers(),\n raw: false,\n payload: {\n instance_names: [this.connectionConfig.appName],\n request_id: randomUUID(),\n },\n });\n\n if (!this.validateCredentials(credentials)) {\n throw AuthenticationError.credentialsFailed(\n this.connectionConfig.appName,\n );\n }\n\n const expiresAt = new Date(credentials.expiration_time).getTime();\n\n return { token: credentials.token, expiresAt };\n }\n\n /** Check if error is auth failure */\n private isAuthError(error: unknown): boolean {\n return (\n typeof error === \"object\" &&\n error !== null &&\n \"code\" in error &&\n (error as any).code === \"28P01\"\n );\n }\n\n /** Check if error is transient */\n private isTransientError(error: unknown): boolean {\n if (typeof error !== \"object\" || error === null || !(\"code\" in error)) {\n return false;\n }\n\n const code = (error as any).code;\n return (\n code === \"ECONNRESET\" ||\n code === \"ECONNREFUSED\" ||\n code === \"ETIMEDOUT\" ||\n code === \"57P01\" || // admin_shutdown\n code === \"57P03\" || // cannot_connect_now\n code === \"08006\" || // connection_failure\n code === \"08003\" || // connection_does_not_exist\n code === \"08000\" // connection_exception\n );\n }\n\n /** Type guard for credentials */\n private validateCredentials(\n value: unknown,\n ): value is { token: string; expiration_time: string } {\n if (typeof value !== \"object\" || value === null) {\n return false;\n }\n\n const credentials = value as { token: string; expiration_time: string };\n return (\n \"token\" in credentials &&\n typeof credentials.token === \"string\" &&\n \"expiration_time\" in credentials &&\n typeof credentials.expiration_time === \"string\" &&\n new Date(credentials.expiration_time).getTime() > Date.now()\n );\n }\n\n /** Parse connection configuration from config or environment */\n private parseConnectionConfig(): LakebaseV1ConnectionConfig {\n if (this.config.connectionString) {\n return this.parseConnectionString(this.config.connectionString);\n }\n\n // get connection from config\n if (this.config.host && this.config.database && this.config.appName) {\n return {\n host: this.config.host,\n database: this.config.database,\n port: this.config.port ?? 5432,\n sslMode: this.config.sslMode ?? \"require\",\n appName: this.config.appName,\n };\n }\n\n // get connection from environment variables\n const pgHost = process.env.PGHOST;\n const pgDatabase = process.env.PGDATABASE;\n const pgAppName = process.env.PGAPPNAME;\n if (!pgHost || !pgDatabase || !pgAppName) {\n throw ConfigurationError.invalidConnection(\n \"Lakebase\",\n \"Required env vars: PGHOST, PGDATABASE, PGAPPNAME. Optional: PGPORT (default: 5432), PGSSLMODE (default: require)\",\n );\n }\n const pgPort = process.env.PGPORT;\n const port = pgPort ? parseInt(pgPort, 10) : 5432;\n\n if (Number.isNaN(port)) {\n throw ValidationError.invalidValue(\"port\", pgPort, \"a number\");\n }\n\n const pgSSLMode = process.env.PGSSLMODE;\n const sslMode =\n (pgSSLMode as \"require\" | \"disable\" | \"prefer\") || \"require\";\n\n return {\n host: pgHost,\n database: pgDatabase,\n port,\n sslMode,\n appName: pgAppName,\n };\n }\n\n private parseConnectionString(\n connectionString: string,\n ): LakebaseV1ConnectionConfig {\n const url = new URL(connectionString);\n const appName = url.searchParams.get(\"appName\");\n if (!appName) {\n throw ConfigurationError.missingConnectionParam(\"appName\");\n }\n\n return {\n host: url.hostname,\n database: url.pathname.slice(1), // remove leading slash\n port: url.port ? parseInt(url.port, 10) : 5432,\n sslMode:\n (url.searchParams.get(\"sslmode\") as \"require\" | \"disable\" | \"prefer\") ??\n \"require\",\n appName: appName,\n };\n }\n}\n"],"mappings":";;;;;;;;AA2BA,MAAM,SAAS,aAAa,yBAAyB"}
1
+ {"version":3,"file":"client.js","names":[],"sources":["../../../src/connectors/lakebase-v1/client.ts"],"sourcesContent":["import { randomUUID } from \"node:crypto\";\nimport type { WorkspaceClient } from \"@databricks/sdk-experimental\";\nimport { ApiClient, Config } from \"@databricks/sdk-experimental\";\nimport pg from \"pg\";\nimport {\n AppKitError,\n AuthenticationError,\n ConfigurationError,\n ConnectionError,\n ValidationError,\n} from \"../../errors\";\nimport { createLogger } from \"../../logging/logger\";\nimport {\n type Counter,\n type Histogram,\n SpanStatusCode,\n TelemetryManager,\n type TelemetryProvider,\n} from \"../../telemetry\";\nimport { deepMerge } from \"../../utils\";\nimport { lakebaseV1Defaults } from \"./defaults\";\nimport type {\n LakebaseV1Config,\n LakebaseV1ConnectionConfig,\n LakebaseV1Credentials,\n} from \"./types\";\n\nconst logger = createLogger(\"connectors:lakebase-v1\");\n\n/**\n * Enterprise-grade connector for Databricks Lakebase Provisioned\n *\n * @deprecated This connector is for Lakebase Provisioned only.\n * For new projects, use Lakebase Autoscaling instead: https://docs.databricks.com/aws/en/oltp/projects/\n *\n * This connector is compatible with Lakebase Provisioned: https://docs.databricks.com/aws/en/oltp/instances/\n *\n * Lakebase Autoscaling offers:\n * - Automatic compute scaling\n * - Scale-to-zero for cost optimization\n * - Database branching for development\n * - Instant restore capabilities\n *\n * Use the new LakebaseConnector (coming in a future release) for Lakebase Autoscaling support.\n *\n * @example Simplest - everything from env/context\n * ```typescript\n * const connector = new LakebaseV1Connector();\n * await connector.query('SELECT * FROM users');\n * ```\n *\n * @example With explicit connection string\n * ```typescript\n * const connector = new LakebaseV1Connector({\n * connectionString: 'postgresql://...'\n * });\n * ```\n */\nexport class LakebaseV1Connector {\n private readonly name: string = \"lakebase-v1\";\n private readonly CACHE_BUFFER_MS = 2 * 60 * 1000;\n private readonly config: LakebaseV1Config;\n private readonly connectionConfig: LakebaseV1ConnectionConfig;\n private pool: pg.Pool | null = null;\n private credentials: LakebaseV1Credentials | null = null;\n\n // telemetry\n private readonly telemetry: TelemetryProvider;\n private readonly telemetryMetrics: {\n queryCount: Counter;\n queryDuration: Histogram;\n };\n\n constructor(userConfig?: Partial<LakebaseV1Config>) {\n this.config = deepMerge(lakebaseV1Defaults, userConfig);\n this.connectionConfig = this.parseConnectionConfig();\n\n this.telemetry = TelemetryManager.getProvider(\n this.name,\n this.config.telemetry,\n );\n this.telemetryMetrics = {\n queryCount: this.telemetry\n .getMeter()\n .createCounter(\"lakebase.v1.query.count\", {\n description: \"Total number of queries executed\",\n unit: \"1\",\n }),\n queryDuration: this.telemetry\n .getMeter()\n .createHistogram(\"lakebase.v1.query.duration\", {\n description: \"Duration of queries executed\",\n unit: \"ms\",\n }),\n };\n\n // validate configuration\n if (this.config.maxPoolSize < 1) {\n throw ValidationError.invalidValue(\n \"maxPoolSize\",\n this.config.maxPoolSize,\n \"at least 1\",\n );\n }\n }\n\n /**\n * Execute a SQL query\n *\n * @example\n * ```typescript\n * const users = await connector.query('SELECT * FROM users');\n * const user = await connector.query('SELECT * FROM users WHERE id = $1', [123]);\n * ```\n */\n async query<T extends pg.QueryResultRow>(\n sql: string,\n params?: any[],\n retryCount: number = 0,\n ): Promise<pg.QueryResult<T>> {\n const startTime = Date.now();\n\n return this.telemetry.startActiveSpan(\n \"lakebase.v1.query\",\n {\n attributes: {\n \"db.system\": \"lakebase-v1\",\n \"db.statement\": sql.substring(0, 500),\n \"db.retry_count\": retryCount,\n },\n },\n async (span) => {\n try {\n const pool = await this.getPool();\n const result = await pool.query<T>(sql, params);\n span.setAttribute(\"db.rows_affected\", result.rowCount ?? 0);\n span.setStatus({ code: SpanStatusCode.OK });\n return result;\n } catch (error) {\n // retry on auth failure\n if (this.isAuthError(error)) {\n span.addEvent(\"auth_error_retry\");\n await this.rotateCredentials();\n const newPool = await this.getPool();\n const result = await newPool.query<T>(sql, params);\n span.setAttribute(\"db.rows_affected\", result.rowCount ?? 0);\n span.setStatus({ code: SpanStatusCode.OK });\n return result;\n }\n\n // retry on transient errors, but only once\n if (this.isTransientError(error) && retryCount < 1) {\n span.addEvent(\"transient_error_retry\");\n await new Promise((resolve) => setTimeout(resolve, 100));\n return await this.query<T>(sql, params, retryCount + 1);\n }\n\n span.recordException(error as Error);\n span.setStatus({ code: SpanStatusCode.ERROR });\n\n if (error instanceof AppKitError) {\n throw error;\n }\n throw ConnectionError.queryFailed(error as Error);\n } finally {\n const duration = Date.now() - startTime;\n this.telemetryMetrics.queryCount.add(1);\n this.telemetryMetrics.queryDuration.record(duration);\n span.end();\n }\n },\n );\n }\n\n /**\n * Execute a transaction\n *\n * COMMIT and ROLLBACK are automatically managed by the transaction function.\n *\n * @param callback - Callback function to execute within the transaction context\n * @example\n * ```typescript\n * await connector.transaction(async (client) => {\n * await client.query('INSERT INTO accounts (name) VALUES ($1)', ['Alice']);\n * await client.query('INSERT INTO logs (action) VALUES ($1)', ['Created Alice']);\n * });\n * ```\n */\n async transaction<T>(\n callback: (client: pg.PoolClient) => Promise<T>,\n retryCount: number = 0,\n ): Promise<T> {\n const startTime = Date.now();\n return this.telemetry.startActiveSpan(\n \"lakebase.v1.transaction\",\n {\n attributes: {\n \"db.system\": \"lakebase-v1\",\n \"db.retry_count\": retryCount,\n },\n },\n async (span) => {\n const pool = await this.getPool();\n const client = await pool.connect();\n try {\n await client.query(\"BEGIN\");\n const result = await callback(client);\n await client.query(\"COMMIT\");\n span.setStatus({ code: SpanStatusCode.OK });\n return result;\n } catch (error) {\n try {\n await client.query(\"ROLLBACK\");\n } catch {}\n // retry on auth failure\n if (this.isAuthError(error)) {\n span.addEvent(\"auth_error_retry\");\n client.release();\n await this.rotateCredentials();\n const newPool = await this.getPool();\n const retryClient = await newPool.connect();\n try {\n await client.query(\"BEGIN\");\n const result = await callback(retryClient);\n await client.query(\"COMMIT\");\n span.setStatus({ code: SpanStatusCode.OK });\n return result;\n } catch (retryError) {\n try {\n await retryClient.query(\"ROLLBACK\");\n } catch {}\n throw retryError;\n } finally {\n retryClient.release();\n }\n }\n\n // retry on transient errors, but only once\n if (this.isTransientError(error) && retryCount < 1) {\n span.addEvent(\"transaction_error_retry\");\n client.release();\n await new Promise((resolve) => setTimeout(resolve, 100));\n return await this.transaction<T>(callback, retryCount + 1);\n }\n span.recordException(error as Error);\n span.setStatus({ code: SpanStatusCode.ERROR });\n\n if (error instanceof AppKitError) {\n throw error;\n }\n throw ConnectionError.transactionFailed(error as Error);\n } finally {\n client.release();\n const duration = Date.now() - startTime;\n this.telemetryMetrics.queryCount.add(1);\n this.telemetryMetrics.queryDuration.record(duration);\n span.end();\n }\n },\n );\n }\n\n /** Check if database connection is healthy */\n async healthCheck(): Promise<boolean> {\n return this.telemetry.startActiveSpan(\n \"lakebase.v1.healthCheck\",\n {},\n async (span) => {\n try {\n const result = await this.query<{ result: number }>(\n \"SELECT 1 as result\",\n );\n const healthy = result.rows[0]?.result === 1;\n span.setAttribute(\"db.healthy\", healthy);\n span.setStatus({ code: SpanStatusCode.OK });\n return healthy;\n } catch {\n span.setAttribute(\"db.healthy\", false);\n span.setStatus({ code: SpanStatusCode.ERROR });\n return false;\n } finally {\n span.end();\n }\n },\n );\n }\n\n /** Close connection pool (call on shutdown) */\n async close(): Promise<void> {\n if (this.pool) {\n await this.pool.end().catch((error: unknown) => {\n logger.error(\"Error closing connection pool: %O\", error);\n });\n this.pool = null;\n }\n this.credentials = null;\n }\n\n /** Setup graceful shutdown to close connection pools */\n shutdown(): void {\n process.on(\"SIGTERM\", () => this.close());\n process.on(\"SIGINT\", () => this.close());\n this.close();\n }\n\n /** Get Databricks workspace client - from config or execution context */\n private getWorkspaceClient(): WorkspaceClient {\n if (this.config.workspaceClient) {\n return this.config.workspaceClient;\n }\n\n try {\n const { getWorkspaceClient: getClient } = require(\"../../context\");\n const client = getClient();\n\n // cache it for subsequent calls\n this.config.workspaceClient = client;\n return client;\n } catch (_error) {\n throw ConnectionError.clientUnavailable(\n \"Databricks workspace client\",\n \"Either pass it in config or ensure ServiceContext is initialized\",\n );\n }\n }\n\n /** Get or create connection pool */\n private async getPool(): Promise<pg.Pool> {\n if (!this.connectionConfig) {\n throw ConfigurationError.invalidConnection(\n \"Lakebase\",\n \"Set PGHOST, PGDATABASE, PGAPPNAME env vars, provide a connectionString, or pass explicit config\",\n );\n }\n\n if (!this.pool) {\n const creds = await this.getCredentials();\n this.pool = this.createPool(creds);\n }\n return this.pool;\n }\n\n /** Create PostgreSQL pool */\n private createPool(credentials: {\n username: string;\n password: string;\n }): pg.Pool {\n const { host, database, port, sslMode } = this.connectionConfig;\n\n const pool = new pg.Pool({\n host,\n port,\n database,\n user: credentials.username,\n password: credentials.password,\n max: this.config.maxPoolSize,\n idleTimeoutMillis: this.config.idleTimeoutMs,\n connectionTimeoutMillis: this.config.connectionTimeoutMs,\n ssl: sslMode === \"require\" ? { rejectUnauthorized: true } : false,\n });\n\n pool.on(\"error\", (error: Error & { code?: string }) => {\n logger.error(\n \"Connection pool error: %s (code: %s)\",\n error.message,\n error.code,\n );\n });\n\n return pool;\n }\n\n /** Get or fetch credentials with caching */\n private async getCredentials(): Promise<{\n username: string;\n password: string;\n }> {\n const now = Date.now();\n\n // return cached if still valid\n if (\n this.credentials &&\n now < this.credentials.expiresAt - this.CACHE_BUFFER_MS\n ) {\n return this.credentials;\n }\n\n // fetch new credentials\n const username = await this.fetchUsername();\n const { token, expiresAt } = await this.fetchPassword();\n\n this.credentials = {\n username,\n password: token,\n expiresAt,\n };\n\n return { username, password: token };\n }\n\n /** Rotate credentials and recreate pool */\n private async rotateCredentials(): Promise<void> {\n // clear cached credentials\n this.credentials = null;\n\n if (this.pool) {\n const oldPool = this.pool;\n this.pool = null;\n oldPool.end().catch((error: unknown) => {\n logger.error(\n \"Error closing old connection pool during rotation: %O\",\n error,\n );\n });\n }\n }\n\n /** Fetch username from Databricks */\n private async fetchUsername(): Promise<string> {\n const workspaceClient = this.getWorkspaceClient();\n const user = await workspaceClient.currentUser.me();\n if (!user.userName) {\n throw AuthenticationError.userLookupFailed();\n }\n return user.userName;\n }\n\n /** Fetch password (OAuth token) from Databricks */\n private async fetchPassword(): Promise<{ token: string; expiresAt: number }> {\n const workspaceClient = this.getWorkspaceClient();\n const config = new Config({ host: workspaceClient.config.host });\n const apiClient = new ApiClient(config);\n\n if (!this.connectionConfig.appName) {\n throw ConfigurationError.resourceNotFound(\"Database app name\");\n }\n\n const credentials = await apiClient.request({\n path: `/api/2.0/database/credentials`,\n method: \"POST\",\n headers: new Headers(),\n raw: false,\n payload: {\n instance_names: [this.connectionConfig.appName],\n request_id: randomUUID(),\n },\n });\n\n if (!this.validateCredentials(credentials)) {\n throw AuthenticationError.credentialsFailed(\n this.connectionConfig.appName,\n );\n }\n\n const expiresAt = new Date(credentials.expiration_time).getTime();\n\n return { token: credentials.token, expiresAt };\n }\n\n /** Check if error is auth failure */\n private isAuthError(error: unknown): boolean {\n return (\n typeof error === \"object\" &&\n error !== null &&\n \"code\" in error &&\n (error as any).code === \"28P01\"\n );\n }\n\n /** Check if error is transient */\n private isTransientError(error: unknown): boolean {\n if (typeof error !== \"object\" || error === null || !(\"code\" in error)) {\n return false;\n }\n\n const code = (error as any).code;\n return (\n code === \"ECONNRESET\" ||\n code === \"ECONNREFUSED\" ||\n code === \"ETIMEDOUT\" ||\n code === \"57P01\" || // admin_shutdown\n code === \"57P03\" || // cannot_connect_now\n code === \"08006\" || // connection_failure\n code === \"08003\" || // connection_does_not_exist\n code === \"08000\" // connection_exception\n );\n }\n\n /** Type guard for credentials */\n private validateCredentials(\n value: unknown,\n ): value is { token: string; expiration_time: string } {\n if (typeof value !== \"object\" || value === null) {\n return false;\n }\n\n const credentials = value as { token: string; expiration_time: string };\n return (\n \"token\" in credentials &&\n typeof credentials.token === \"string\" &&\n \"expiration_time\" in credentials &&\n typeof credentials.expiration_time === \"string\" &&\n new Date(credentials.expiration_time).getTime() > Date.now()\n );\n }\n\n /** Parse connection configuration from config or environment */\n private parseConnectionConfig(): LakebaseV1ConnectionConfig {\n if (this.config.connectionString) {\n return this.parseConnectionString(this.config.connectionString);\n }\n\n // get connection from config\n if (this.config.host && this.config.database && this.config.appName) {\n return {\n host: this.config.host,\n database: this.config.database,\n port: this.config.port ?? 5432,\n sslMode: this.config.sslMode ?? \"require\",\n appName: this.config.appName,\n };\n }\n\n // get connection from environment variables\n const pgHost = process.env.PGHOST;\n const pgDatabase = process.env.PGDATABASE;\n const pgAppName = process.env.PGAPPNAME;\n if (!pgHost || !pgDatabase || !pgAppName) {\n throw ConfigurationError.invalidConnection(\n \"Lakebase\",\n \"Required env vars: PGHOST, PGDATABASE, PGAPPNAME. Optional: PGPORT (default: 5432), PGSSLMODE (default: require)\",\n );\n }\n const pgPort = process.env.PGPORT;\n const port = pgPort ? parseInt(pgPort, 10) : 5432;\n\n if (Number.isNaN(port)) {\n throw ValidationError.invalidValue(\"port\", pgPort, \"a number\");\n }\n\n const pgSSLMode = process.env.PGSSLMODE;\n const sslMode =\n (pgSSLMode as \"require\" | \"disable\" | \"prefer\") || \"require\";\n\n return {\n host: pgHost,\n database: pgDatabase,\n port,\n sslMode,\n appName: pgAppName,\n };\n }\n\n private parseConnectionString(\n connectionString: string,\n ): LakebaseV1ConnectionConfig {\n const url = new URL(connectionString);\n const appName = url.searchParams.get(\"appName\");\n if (!appName) {\n throw ConfigurationError.missingConnectionParam(\"appName\");\n }\n\n return {\n host: url.hostname,\n database: url.pathname.slice(1), // remove leading slash\n port: url.port ? parseInt(url.port, 10) : 5432,\n sslMode:\n (url.searchParams.get(\"sslmode\") as \"require\" | \"disable\" | \"prefer\") ??\n \"require\",\n appName: appName,\n };\n }\n}\n"],"mappings":";;;;;;;;AA2BA,MAAM,SAAS,aAAa,yBAAyB"}
@@ -0,0 +1,9 @@
1
+ import { createLogger } from "../../logging/logger.js";
2
+ import "../../telemetry/index.js";
3
+
4
+ //#region src/connectors/vector-search/client.ts
5
+ const logger = createLogger("connectors:vector-search");
6
+
7
+ //#endregion
8
+ export { };
9
+ //# sourceMappingURL=client.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"client.js","names":[],"sources":["../../../src/connectors/vector-search/client.ts"],"sourcesContent":["import type { WorkspaceClient } from \"@databricks/sdk-experimental\";\nimport { createLogger } from \"../../logging/logger\";\nimport type { TelemetryProvider } from \"../../telemetry\";\nimport {\n type Span,\n SpanKind,\n SpanStatusCode,\n TelemetryManager,\n} from \"../../telemetry\";\nimport type {\n VectorSearchConnectorConfig,\n VsNextPageParams,\n VsQueryParams,\n VsRawResponse,\n} from \"./types\";\n\nconst logger = createLogger(\"connectors:vector-search\");\n\nexport class VectorSearchConnector {\n private readonly telemetry: TelemetryProvider;\n\n constructor(config: VectorSearchConnectorConfig = {}) {\n this.telemetry = TelemetryManager.getProvider(\n \"vector-search\",\n config.telemetry,\n );\n }\n\n async query(\n workspaceClient: WorkspaceClient,\n params: VsQueryParams,\n signal?: AbortSignal,\n ): Promise<VsRawResponse> {\n if (signal?.aborted) {\n throw new Error(\"Query cancelled before execution\");\n }\n\n const body: Record<string, unknown> = {\n columns: params.columns,\n num_results: params.numResults,\n query_type: params.queryType.toUpperCase(),\n debug_level: 1,\n };\n\n if (params.queryText) body.query_text = params.queryText;\n if (params.queryVector) body.query_vector = params.queryVector;\n if (params.filters && Object.keys(params.filters).length > 0) {\n body.filters = params.filters;\n }\n if (params.reranker) {\n body.reranker = {\n model: \"databricks_reranker\",\n parameters: { columns_to_rerank: params.reranker.columnsToRerank },\n };\n }\n\n logger.debug(\n \"Querying VS index %s (type=%s, num_results=%d)\",\n params.indexName,\n params.queryType,\n params.numResults,\n );\n\n return this.telemetry.startActiveSpan(\n \"vector-search.query\",\n {\n kind: SpanKind.CLIENT,\n attributes: {\n \"db.system\": \"databricks\",\n \"vs.index_name\": params.indexName,\n \"vs.query_type\": params.queryType,\n \"vs.num_results\": params.numResults,\n \"vs.has_filters\": !!(\n params.filters && Object.keys(params.filters).length > 0\n ),\n \"vs.has_reranker\": !!params.reranker,\n },\n },\n async (span: Span) => {\n const startTime = Date.now();\n try {\n const response = (await workspaceClient.apiClient.request({\n method: \"POST\",\n path: `/api/2.0/vector-search/indexes/${params.indexName}/query`,\n payload: body,\n headers: new Headers({ \"Content-Type\": \"application/json\" }),\n raw: false,\n query: {},\n })) as VsRawResponse;\n\n const duration = Date.now() - startTime;\n span.setAttribute(\"vs.result_count\", response.result.row_count);\n span.setAttribute(\n \"vs.query_time_ms\",\n response.debug_info?.response_time ?? 0,\n );\n span.setAttribute(\"vs.duration_ms\", duration);\n span.setStatus({ code: SpanStatusCode.OK });\n\n logger.event()?.setContext(\"vector-search\", {\n index_name: params.indexName,\n query_type: params.queryType,\n result_count: response.result.row_count,\n query_time_ms: response.debug_info?.response_time ?? 0,\n duration_ms: duration,\n });\n\n return response;\n } catch (error) {\n span.recordException(error as Error);\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: error instanceof Error ? error.message : String(error),\n });\n throw error;\n }\n },\n { name: \"vector-search\", includePrefix: true },\n );\n }\n\n async queryNextPage(\n workspaceClient: WorkspaceClient,\n params: VsNextPageParams,\n signal?: AbortSignal,\n ): Promise<VsRawResponse> {\n if (signal?.aborted) {\n throw new Error(\"Query cancelled before execution\");\n }\n\n logger.debug(\n \"Fetching next page for index %s (endpoint=%s)\",\n params.indexName,\n params.endpointName,\n );\n\n return this.telemetry.startActiveSpan(\n \"vector-search.queryNextPage\",\n {\n kind: SpanKind.CLIENT,\n attributes: {\n \"db.system\": \"databricks\",\n \"vs.index_name\": params.indexName,\n \"vs.endpoint_name\": params.endpointName,\n },\n },\n async (span: Span) => {\n try {\n const response = (await workspaceClient.apiClient.request({\n method: \"POST\",\n path: `/api/2.0/vector-search/indexes/${params.indexName}/query-next-page`,\n payload: {\n endpoint_name: params.endpointName,\n page_token: params.pageToken,\n },\n headers: new Headers({ \"Content-Type\": \"application/json\" }),\n raw: false,\n query: {},\n })) as VsRawResponse;\n\n span.setAttribute(\"vs.result_count\", response.result.row_count);\n span.setStatus({ code: SpanStatusCode.OK });\n return response;\n } catch (error) {\n span.recordException(error as Error);\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: error instanceof Error ? error.message : String(error),\n });\n throw error;\n }\n },\n { name: \"vector-search\", includePrefix: true },\n );\n }\n}\n"],"mappings":";;;;AAgBA,MAAM,SAAS,aAAa,2BAA2B"}
@@ -0,0 +1,3 @@
1
+ import "./client.js";
2
+
3
+ export { };
@@ -60,12 +60,6 @@ function getWarehouseId() {
60
60
  function getWorkspaceId() {
61
61
  return getExecutionContext().workspaceId;
62
62
  }
63
- /**
64
- * Check if currently running in a user context.
65
- */
66
- function isInUserContext() {
67
- return executionContextStorage.getStore() !== void 0;
68
- }
69
63
  var executionContextStorage;
70
64
  var init_execution_context = __esmMin((() => {
71
65
  init_errors();
@@ -76,5 +70,5 @@ var init_execution_context = __esmMin((() => {
76
70
 
77
71
  //#endregion
78
72
  init_execution_context();
79
- export { getCurrentUserId, getExecutionContext, getWarehouseId, getWorkspaceClient, getWorkspaceId, init_execution_context, isInUserContext, runInUserContext };
73
+ export { getCurrentUserId, getExecutionContext, getWarehouseId, getWorkspaceClient, getWorkspaceId, init_execution_context, runInUserContext };
80
74
  //# sourceMappingURL=execution-context.js.map
@@ -1 +1 @@
1
- {"version":3,"file":"execution-context.js","names":[],"sources":["../../src/context/execution-context.ts"],"sourcesContent":["import { AsyncLocalStorage } from \"node:async_hooks\";\nimport { ConfigurationError } from \"../errors\";\nimport { ServiceContext } from \"./service-context\";\nimport {\n type ExecutionContext,\n isUserContext,\n type UserContext,\n} from \"./user-context\";\n\n/**\n * AsyncLocalStorage for execution context.\n * Used to pass user context through the call stack without explicit parameters.\n */\nconst executionContextStorage = new AsyncLocalStorage<UserContext>();\n\n/**\n * Run a function in the context of a user.\n * All calls within the function will have access to the user context.\n *\n * @param userContext - The user context to use\n * @param fn - The function to run\n * @returns The result of the function\n */\nexport function runInUserContext<T>(userContext: UserContext, fn: () => T): T {\n return executionContextStorage.run(userContext, fn);\n}\n\n/**\n * Get the current execution context.\n *\n * - If running inside a user context (via asUser), returns the user context\n * - Otherwise, returns the service context\n *\n * @throws Error if ServiceContext is not initialized\n */\nexport function getExecutionContext(): ExecutionContext {\n const userContext = executionContextStorage.getStore();\n if (userContext) {\n return userContext;\n }\n return ServiceContext.get();\n}\n\n/**\n * Get the current user ID for cache keying and telemetry.\n *\n * Returns the user ID if in user context, otherwise the service user ID.\n */\nexport function getCurrentUserId(): string {\n const ctx = getExecutionContext();\n if (isUserContext(ctx)) {\n return ctx.userId;\n }\n return ctx.serviceUserId;\n}\n\n/**\n * Get the WorkspaceClient for the current execution context.\n */\nexport function getWorkspaceClient() {\n return getExecutionContext().client;\n}\n\n/**\n * Get the warehouse ID promise.\n */\nexport function getWarehouseId(): Promise<string> {\n const ctx = getExecutionContext();\n if (!ctx.warehouseId) {\n throw ConfigurationError.resourceNotFound(\n \"Warehouse ID\",\n \"No plugin requires a SQL Warehouse. Add a sql_warehouse resource to your plugin manifest, or set DATABRICKS_WAREHOUSE_ID\",\n );\n }\n return ctx.warehouseId;\n}\n\n/**\n * Get the workspace ID promise.\n */\nexport function getWorkspaceId(): Promise<string> {\n return getExecutionContext().workspaceId;\n}\n\n/**\n * Check if currently running in a user context.\n */\nexport function isInUserContext(): boolean {\n const ctx = executionContextStorage.getStore();\n return ctx !== undefined;\n}\n"],"mappings":";;;;;;;;;;;;;;;;AAuBA,SAAgB,iBAAoB,aAA0B,IAAgB;AAC5E,QAAO,wBAAwB,IAAI,aAAa,GAAG;;;;;;;;;;AAWrD,SAAgB,sBAAwC;CACtD,MAAM,cAAc,wBAAwB,UAAU;AACtD,KAAI,YACF,QAAO;AAET,QAAO,eAAe,KAAK;;;;;;;AAQ7B,SAAgB,mBAA2B;CACzC,MAAM,MAAM,qBAAqB;AACjC,KAAI,cAAc,IAAI,CACpB,QAAO,IAAI;AAEb,QAAO,IAAI;;;;;AAMb,SAAgB,qBAAqB;AACnC,QAAO,qBAAqB,CAAC;;;;;AAM/B,SAAgB,iBAAkC;CAChD,MAAM,MAAM,qBAAqB;AACjC,KAAI,CAAC,IAAI,YACP,OAAM,mBAAmB,iBACvB,gBACA,2HACD;AAEH,QAAO,IAAI;;;;;AAMb,SAAgB,iBAAkC;AAChD,QAAO,qBAAqB,CAAC;;;;;AAM/B,SAAgB,kBAA2B;AAEzC,QADY,wBAAwB,UAAU,KAC/B;;;;cAxF8B;uBACI;oBAK3B;CAMlB,0BAA0B,IAAI,mBAAgC"}
1
+ {"version":3,"file":"execution-context.js","names":[],"sources":["../../src/context/execution-context.ts"],"sourcesContent":["import { AsyncLocalStorage } from \"node:async_hooks\";\nimport { ConfigurationError } from \"../errors\";\nimport { ServiceContext } from \"./service-context\";\nimport {\n type ExecutionContext,\n isUserContext,\n type UserContext,\n} from \"./user-context\";\n\n/**\n * AsyncLocalStorage for execution context.\n * Used to pass user context through the call stack without explicit parameters.\n */\nconst executionContextStorage = new AsyncLocalStorage<UserContext>();\n\n/**\n * Run a function in the context of a user.\n * All calls within the function will have access to the user context.\n *\n * @param userContext - The user context to use\n * @param fn - The function to run\n * @returns The result of the function\n */\nexport function runInUserContext<T>(userContext: UserContext, fn: () => T): T {\n return executionContextStorage.run(userContext, fn);\n}\n\n/**\n * Get the current execution context.\n *\n * - If running inside a user context (via asUser), returns the user context\n * - Otherwise, returns the service context\n *\n * @throws Error if ServiceContext is not initialized\n */\nexport function getExecutionContext(): ExecutionContext {\n const userContext = executionContextStorage.getStore();\n if (userContext) {\n return userContext;\n }\n return ServiceContext.get();\n}\n\n/**\n * Get the current user ID for cache keying and telemetry.\n *\n * Returns the user ID if in user context, otherwise the service user ID.\n */\nexport function getCurrentUserId(): string {\n const ctx = getExecutionContext();\n if (isUserContext(ctx)) {\n return ctx.userId;\n }\n return ctx.serviceUserId;\n}\n\n/**\n * Get the WorkspaceClient for the current execution context.\n */\nexport function getWorkspaceClient() {\n return getExecutionContext().client;\n}\n\n/**\n * Get the warehouse ID promise.\n */\nexport function getWarehouseId(): Promise<string> {\n const ctx = getExecutionContext();\n if (!ctx.warehouseId) {\n throw ConfigurationError.resourceNotFound(\n \"Warehouse ID\",\n \"No plugin requires a SQL Warehouse. Add a sql_warehouse resource to your plugin manifest, or set DATABRICKS_WAREHOUSE_ID\",\n );\n }\n return ctx.warehouseId;\n}\n\n/**\n * Get the workspace ID promise.\n */\nexport function getWorkspaceId(): Promise<string> {\n return getExecutionContext().workspaceId;\n}\n"],"mappings":";;;;;;;;;;;;;;;;AAuBA,SAAgB,iBAAoB,aAA0B,IAAgB;AAC5E,QAAO,wBAAwB,IAAI,aAAa,GAAG;;;;;;;;;;AAWrD,SAAgB,sBAAwC;CACtD,MAAM,cAAc,wBAAwB,UAAU;AACtD,KAAI,YACF,QAAO;AAET,QAAO,eAAe,KAAK;;;;;;;AAQ7B,SAAgB,mBAA2B;CACzC,MAAM,MAAM,qBAAqB;AACjC,KAAI,cAAc,IAAI,CACpB,QAAO,IAAI;AAEb,QAAO,IAAI;;;;;AAMb,SAAgB,qBAAqB;AACnC,QAAO,qBAAqB,CAAC;;;;;AAM/B,SAAgB,iBAAkC;CAChD,MAAM,MAAM,qBAAqB;AACjC,KAAI,CAAC,IAAI,YACP,OAAM,mBAAmB,iBACvB,gBACA,2HACD;AAEH,QAAO,IAAI;;;;;AAMb,SAAgB,iBAAkC;AAChD,QAAO,qBAAqB,CAAC;;;;cAhFgB;uBACI;oBAK3B;CAMlB,0BAA0B,IAAI,mBAAgC"}
@@ -1,6 +1,6 @@
1
1
  import { __esmMin } from "../_virtual/_rolldown/runtime.js";
2
2
  import { ServiceContext, init_service_context } from "./service-context.js";
3
- import { getCurrentUserId, getExecutionContext, getWarehouseId, getWorkspaceClient, getWorkspaceId, init_execution_context, isInUserContext, runInUserContext } from "./execution-context.js";
3
+ import { getCurrentUserId, getExecutionContext, getWarehouseId, getWorkspaceClient, getWorkspaceId, init_execution_context, runInUserContext } from "./execution-context.js";
4
4
 
5
5
  //#region src/context/index.ts
6
6
  var init_context = __esmMin((() => {
@@ -1 +1 @@
1
- {"version":3,"file":"index.js","names":[],"sources":["../../src/context/index.ts"],"sourcesContent":["export {\n getCurrentUserId,\n getExecutionContext,\n getWarehouseId,\n getWorkspaceClient,\n getWorkspaceId,\n isInUserContext,\n runInUserContext,\n} from \"./execution-context\";\nexport { ServiceContext } from \"./service-context\";\nexport type { UserContext } from \"./user-context\";\n"],"mappings":";;;;;;yBAQ6B;uBACsB"}
1
+ {"version":3,"file":"index.js","names":[],"sources":["../../src/context/index.ts"],"sourcesContent":["export {\n getCurrentUserId,\n getExecutionContext,\n getWarehouseId,\n getWorkspaceClient,\n getWorkspaceId,\n runInUserContext,\n} from \"./execution-context\";\nexport { ServiceContext } from \"./service-context\";\nexport type { UserContext } from \"./user-context\";\n"],"mappings":";;;;;;yBAO6B;uBACsB"}
package/dist/index.d.ts CHANGED
@@ -31,6 +31,7 @@ import { getPluginManifest, getResourceRequirements } from "./registry/manifest-
31
31
  import { ResourceRegistry } from "./registry/resource-registry.js";
32
32
  import "./registry/index.js";
33
33
  import { analytics } from "./plugins/analytics/analytics.js";
34
+ import { FileAction, FilePolicy, FilePolicyUser, FileResource, PolicyDeniedError, READ_ACTIONS, WRITE_ACTIONS } from "./plugins/files/policy.js";
34
35
  import { files } from "./plugins/files/plugin.js";
35
36
  import { genie } from "./plugins/genie/genie.js";
36
37
  import { lakebase } from "./plugins/lakebase/lakebase.js";
@@ -41,4 +42,4 @@ import "./plugins/index.js";
41
42
  import { extractServingEndpoints, findServerFile } from "./type-generator/serving/server-file-extractor.js";
42
43
  import { appKitServingTypesPlugin } from "./type-generator/serving/vite-plugin.js";
43
44
  import { appKitTypesPlugin } from "./type-generator/vite-plugin.js";
44
- export { AppKitError, AuthenticationError, type BasePluginConfig, type CacheConfig, CacheManager, type ConfigSchema, ConfigurationError, ConnectionError, type Counter, type DatabaseCredential, type EndpointConfig, ExecutionError, type ExecutionResult, type GenerateDatabaseCredentialRequest, type Histogram, type IAppRouter, type ITelemetry, InitializationError, type LakebasePoolConfig, Plugin, type PluginData, type PluginManifest, type RequestedClaims, RequestedClaimsPermissionSet, type RequestedResource, type ResourceEntry, type ResourceFieldEntry, type ResourcePermission, ResourceRegistry, type ResourceRequirement, ResourceType, ServerError, type ServingEndpointEntry, type ServingEndpointRegistry, type ServingFactory, SeverityNumber, type Span, SpanStatusCode, type StreamExecutionSettings, type TelemetryConfig, type ToPlugin, TunnelError, ValidationError, type ValidationResult, analytics, appKitServingTypesPlugin, appKitTypesPlugin, createApp, createLakebasePool, extractServingEndpoints, files, findServerFile, generateDatabaseCredential, genie, getExecutionContext, getLakebaseOrmConfig, getLakebasePgConfig, getPluginManifest, getResourceRequirements, getUsernameWithApiLookup, getWorkspaceClient, isSQLTypeMarker, lakebase, server, serving, sql, toPlugin };
45
+ export { AppKitError, AuthenticationError, type BasePluginConfig, type CacheConfig, CacheManager, type ConfigSchema, ConfigurationError, ConnectionError, type Counter, type DatabaseCredential, type EndpointConfig, ExecutionError, type ExecutionResult, type FileAction, type FilePolicy, type FilePolicyUser, type FileResource, type GenerateDatabaseCredentialRequest, type Histogram, type IAppRouter, type ITelemetry, InitializationError, type LakebasePoolConfig, Plugin, type PluginData, type PluginManifest, PolicyDeniedError, READ_ACTIONS, type RequestedClaims, RequestedClaimsPermissionSet, type RequestedResource, type ResourceEntry, type ResourceFieldEntry, type ResourcePermission, ResourceRegistry, type ResourceRequirement, ResourceType, ServerError, type ServingEndpointEntry, type ServingEndpointRegistry, type ServingFactory, SeverityNumber, type Span, SpanStatusCode, type StreamExecutionSettings, type TelemetryConfig, type ToPlugin, TunnelError, ValidationError, type ValidationResult, WRITE_ACTIONS, analytics, appKitServingTypesPlugin, appKitTypesPlugin, createApp, createLakebasePool, extractServingEndpoints, files, findServerFile, generateDatabaseCredential, genie, getExecutionContext, getLakebaseOrmConfig, getLakebasePgConfig, getPluginManifest, getResourceRequirements, getUsernameWithApiLookup, getWorkspaceClient, isSQLTypeMarker, lakebase, server, serving, sql, toPlugin };