@supabase/mcp-server-supabase 0.5.0-alpha.1 → 0.5.0-dev.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (40) hide show
  1. package/dist/chunk-2E4PBGE7.cjs +311 -0
  2. package/dist/chunk-2E4PBGE7.cjs.map +1 -0
  3. package/dist/chunk-7VYUDCV6.js +311 -0
  4. package/dist/chunk-7VYUDCV6.js.map +1 -0
  5. package/dist/chunk-LO7S72TA.js +2 -0
  6. package/dist/{chunk-W2V6ITSZ.js.map → chunk-LO7S72TA.js.map} +1 -1
  7. package/dist/chunk-WMARAA3I.cjs +2 -0
  8. package/dist/chunk-WMARAA3I.cjs.map +1 -0
  9. package/dist/index.cjs +1 -1
  10. package/dist/index.cjs.map +1 -1
  11. package/dist/index.d.cts +27 -1
  12. package/dist/index.d.ts +27 -1
  13. package/dist/index.js +1 -1
  14. package/dist/platform/index.cjs +1 -1
  15. package/dist/platform/index.cjs.map +1 -1
  16. package/dist/platform/index.d.cts +33 -33
  17. package/dist/platform/index.d.ts +33 -33
  18. package/dist/platform/index.js +1 -1
  19. package/dist/transports/stdio.cjs +1 -1
  20. package/dist/transports/stdio.cjs.map +1 -1
  21. package/dist/transports/stdio.js +1 -1
  22. package/dist/transports/stdio.js.map +1 -1
  23. package/package.json +3 -8
  24. package/dist/chunk-3ZRWMUMF.js +0 -311
  25. package/dist/chunk-3ZRWMUMF.js.map +0 -1
  26. package/dist/chunk-4ET572GE.cjs +0 -311
  27. package/dist/chunk-4ET572GE.cjs.map +0 -1
  28. package/dist/chunk-LL3OYJR7.cjs +0 -2
  29. package/dist/chunk-LL3OYJR7.cjs.map +0 -1
  30. package/dist/chunk-W2V6ITSZ.js +0 -2
  31. package/dist/chunk-X63UTBYC.cjs +0 -2
  32. package/dist/chunk-X63UTBYC.cjs.map +0 -1
  33. package/dist/chunk-YFZN5EJN.js +0 -2
  34. package/dist/chunk-YFZN5EJN.js.map +0 -1
  35. package/dist/platform/api-platform.cjs +0 -2
  36. package/dist/platform/api-platform.cjs.map +0 -1
  37. package/dist/platform/api-platform.d.cts +0 -20
  38. package/dist/platform/api-platform.d.ts +0 -20
  39. package/dist/platform/api-platform.js +0 -2
  40. package/dist/platform/api-platform.js.map +0 -1
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["/Users/grichardson/Documents/dev/supabase/mcp-server-supabase/packages/mcp-server-supabase/dist/chunk-WMARAA3I.cjs","../src/platform/api-platform.ts","../src/eszip.ts","../src/management-api/index.ts"],"names":["parser","Parser","sourceMapSchema","z","extractFiles","eszip","pathPrefix","specifiers","reader","fileSpecifiers","specifier","source","sourceMapString","filePath","relative","fileURLToPath","file","sourceMap","typeScriptSource","createManagementApiClient","baseUrl","accessToken","headers","createClient"],"mappings":"AAAA,87BAA4H,0BCC9F,oCCDA,mCACC,0BAEb,IAEZA,CAAAA,CAAS,MAAMC,aAAAA,CAAO,cAAA,CAAe,CAAA,CACrCC,CAAAA,CAAkBC,MAAAA,CAAE,MAAA,CAAO,CAC/B,OAAA,CAASA,MAAAA,CAAE,MAAA,CAAO,CAAA,CAClB,OAAA,CAASA,MAAAA,CAAE,KAAA,CAAMA,MAAAA,CAAE,MAAA,CAAO,CAAC,CAAA,CAC3B,cAAA,CAAgBA,MAAAA,CAAE,KAAA,CAAMA,MAAAA,CAAE,MAAA,CAAO,CAAC,CAAA,CAAE,QAAA,CAAS,CAAA,CAC7C,KAAA,CAAOA,MAAAA,CAAE,KAAA,CAAMA,MAAAA,CAAE,MAAA,CAAO,CAAC,CAAA,CACzB,QAAA,CAAUA,MAAAA,CAAE,MAAA,CAAO,CACrB,CAAC,CAAA,CAUD,MAAA,SAAsBC,CAAAA,CACpBC,CAAAA,CACAC,CAAAA,CAAqB,GAAA,CACrB,CACA,IAAIC,CAAAA,CAAuB,CAAC,CAAA,CAE5B,EAAA,CAAIF,EAAAA,WAAiB,cAAA,CAAgB,CACnC,IAAMG,CAAAA,CAASH,CAAAA,CAAM,SAAA,CAAU,CAAE,IAAA,CAAM,MAAO,CAAC,CAAA,CAC/CE,CAAAA,CAAa,MAAMP,CAAAA,CAAO,KAAA,CAAMQ,CAAM,CACxC,CAAA,KACED,CAAAA,CAAa,MAAMP,CAAAA,CAAO,UAAA,CAAWK,CAAK,CAAA,CAG5C,MAAML,CAAAA,CAAO,IAAA,CAAK,CAAA,CAElB,IAAMS,CAAAA,CAAiBF,CAAAA,CAAW,MAAA,CAAQG,CAAAA,EACxCA,CAAAA,CAAU,UAAA,CAAW,SAAS,CAChC,CAAA,CAqCA,OAnCc,MAAM,OAAA,CAAQ,GAAA,CAC1BD,CAAAA,CAAe,GAAA,CAAI,MAAOC,CAAAA,EAAc,CACtC,IAAMC,CAAAA,CAAiB,MAAMX,CAAAA,CAAO,eAAA,CAAgBU,CAAS,CAAA,CACvDE,CAAAA,CACJ,MAAMZ,CAAAA,CAAO,kBAAA,CAAmBU,CAAS,CAAA,CAErCG,CAAAA,CAAWC,6BAAAA,CACfR,CACAS,gCAAAA,CAAcL,CAAW,CAAE,OAAA,CAAS,CAAA,CAAM,CAAC,CAC7C,CAAA,CAEMM,CAAAA,CAAO,IAAI,IAAA,CAAK,CAACL,CAAM,CAAA,CAAGE,CAAAA,CAAU,CACxC,IAAA,CAAM,YACR,CAAC,CAAA,CAED,EAAA,CAAI,CAACD,CAAAA,CACH,OAAOI,CAAAA,CAGT,IAAMC,CAAAA,CAAYf,CAAAA,CAAgB,KAAA,CAAM,IAAA,CAAK,KAAA,CAAMU,CAAe,CAAC,CAAA,CAE7D,CAACM,CAAgB,CAAA,kBAAID,CAAAA,CAAU,cAAA,SAAkB,CAAC,GAAA,CAExD,OAAKC,CAAAA,CAIc,IAAI,IAAA,CAAK,CAACA,CAAgB,CAAA,CAAGL,CAAAA,CAAU,CACxD,IAAA,CAAM,wBACR,CAAC,CAAA,CALQG,CAQX,CAAC,CACH,CAGF,CC7EA,yGAIO,SASSG,CAAAA,CACdC,CAAAA,CACAC,CAAAA,CACAC,CAAAA,CAAkC,CAAC,CAAA,CACnC,CACA,OAAOC,oCAAAA,CACL,OAAA,CAAAH,CAAAA,CACA,OAAA,CAAS,CACP,aAAA,CAAe,CAAA,OAAA,EAAUC,CAAW,CAAA,CAAA","file":"/Users/grichardson/Documents/dev/supabase/mcp-server-supabase/packages/mcp-server-supabase/dist/chunk-WMARAA3I.cjs","sourcesContent":[null,"import type { InitData } from '@supabase/mcp-utils';\nimport { fileURLToPath } from 'node:url';\nimport packageJson from '../../package.json' with { type: 'json' };\nimport { getDeploymentId, getPathPrefix } from '../edge-function.js';\nimport { extractFiles } from '../eszip.js';\nimport {\n assertSuccess,\n createManagementApiClient,\n} from '../management-api/index.js';\nimport { generatePassword } from '../password.js';\nimport {\n getClosestAwsRegion,\n getCountryCode,\n getCountryCoordinates,\n} from '../regions.js';\nimport {\n applyMigrationOptionsSchema,\n createBranchOptionsSchema,\n createProjectOptionsSchema,\n deployEdgeFunctionOptionsSchema,\n executeSqlOptionsSchema,\n getLogsOptionsSchema,\n resetBranchOptionsSchema,\n type AccountOperations,\n type ApplyMigrationOptions,\n type BranchingOperations,\n type CreateBranchOptions,\n type CreateProjectOptions,\n type DatabaseOperations,\n type DebuggingOperations,\n type DeployEdgeFunctionOptions,\n type DevelopmentOperations,\n type EdgeFunction,\n type EdgeFunctionsOperations,\n type ExecuteSqlOptions,\n type GetLogsOptions,\n type ResetBranchOptions,\n type StorageConfig,\n type StorageOperations,\n type SupabasePlatform,\n} from './index.js';\n\nconst { version } = packageJson;\n\nexport type SupabaseApiPlatformOptions = {\n /**\n * The access token for the Supabase Management API.\n */\n accessToken: string;\n\n /**\n * The API URL for the Supabase Management API.\n */\n apiUrl?: string;\n};\n\n/**\n * Creates a Supabase platform implementation using the Supabase Management API.\n */\nexport function createSupabaseApiPlatform(\n options: SupabaseApiPlatformOptions\n): SupabasePlatform {\n const { accessToken, apiUrl } = options;\n\n const managementApiUrl = apiUrl ?? 'https://api.supabase.com';\n\n let managementApiClient = createManagementApiClient(\n managementApiUrl,\n accessToken\n );\n\n const account: AccountOperations = {\n async listOrganizations() {\n const response = await managementApiClient.GET('/v1/organizations');\n\n assertSuccess(response, 'Failed to fetch organizations');\n\n return response.data;\n },\n async getOrganization(organizationId: string) {\n const response = await managementApiClient.GET(\n '/v1/organizations/{slug}',\n {\n params: {\n path: {\n slug: organizationId,\n },\n },\n }\n );\n\n assertSuccess(response, 'Failed to fetch organization');\n\n return response.data;\n },\n async listProjects() {\n const response = await managementApiClient.GET('/v1/projects');\n\n assertSuccess(response, 'Failed to fetch projects');\n\n return response.data;\n },\n async getProject(projectId: string) {\n const response = await managementApiClient.GET('/v1/projects/{ref}', {\n params: {\n path: {\n ref: projectId,\n },\n },\n });\n assertSuccess(response, 'Failed to fetch project');\n return response.data;\n },\n async createProject(options: CreateProjectOptions) {\n const { name, organization_id, region, db_pass } =\n createProjectOptionsSchema.parse(options);\n\n const response = await managementApiClient.POST('/v1/projects', {\n body: {\n name,\n region: region ?? (await getClosestRegion()),\n organization_id,\n db_pass:\n db_pass ??\n generatePassword({\n length: 16,\n numbers: true,\n uppercase: true,\n lowercase: true,\n }),\n },\n });\n\n assertSuccess(response, 'Failed to create project');\n\n return response.data;\n },\n async pauseProject(projectId: string) {\n const response = await managementApiClient.POST(\n '/v1/projects/{ref}/pause',\n {\n params: {\n path: {\n ref: projectId,\n },\n },\n }\n );\n\n assertSuccess(response, 'Failed to pause project');\n },\n async restoreProject(projectId: string) {\n const response = await managementApiClient.POST(\n '/v1/projects/{ref}/restore',\n {\n params: {\n path: {\n ref: projectId,\n },\n },\n }\n );\n\n assertSuccess(response, 'Failed to restore project');\n },\n };\n\n const database: DatabaseOperations = {\n async executeSql<T>(projectId: string, options: ExecuteSqlOptions) {\n const { query, read_only } = executeSqlOptionsSchema.parse(options);\n\n const response = await managementApiClient.POST(\n '/v1/projects/{ref}/database/query',\n {\n params: {\n path: {\n ref: projectId,\n },\n },\n body: {\n query,\n read_only,\n },\n }\n );\n\n assertSuccess(response, 'Failed to execute SQL query');\n\n return response.data as unknown as T[];\n },\n async listMigrations(projectId: string) {\n const response = await managementApiClient.GET(\n '/v1/projects/{ref}/database/migrations',\n {\n params: {\n path: {\n ref: projectId,\n },\n },\n }\n );\n\n assertSuccess(response, 'Failed to fetch migrations');\n\n return response.data;\n },\n async applyMigration(projectId: string, options: ApplyMigrationOptions) {\n const { name, query } = applyMigrationOptionsSchema.parse(options);\n\n const response = await managementApiClient.POST(\n '/v1/projects/{ref}/database/migrations',\n {\n params: {\n path: {\n ref: projectId,\n },\n },\n body: {\n name,\n query,\n },\n }\n );\n\n assertSuccess(response, 'Failed to apply migration');\n\n // Intentionally don't return the result of the migration\n // to avoid prompt injection attacks. If the migration failed,\n // it will throw an error.\n },\n };\n\n const debugging: DebuggingOperations = {\n async getLogs(projectId: string, options: GetLogsOptions) {\n const { sql, iso_timestamp_start, iso_timestamp_end } =\n getLogsOptionsSchema.parse(options);\n\n const response = await managementApiClient.GET(\n '/v1/projects/{ref}/analytics/endpoints/logs.all',\n {\n params: {\n path: {\n ref: projectId,\n },\n query: {\n sql,\n iso_timestamp_start,\n iso_timestamp_end,\n },\n },\n }\n );\n\n assertSuccess(response, 'Failed to fetch logs');\n\n return response.data;\n },\n async getSecurityAdvisors(projectId: string) {\n const response = await managementApiClient.GET(\n '/v1/projects/{ref}/advisors/security',\n {\n params: {\n path: {\n ref: projectId,\n },\n },\n }\n );\n\n assertSuccess(response, 'Failed to fetch security advisors');\n\n return response.data;\n },\n async getPerformanceAdvisors(projectId: string) {\n const response = await managementApiClient.GET(\n '/v1/projects/{ref}/advisors/performance',\n {\n params: {\n path: {\n ref: projectId,\n },\n },\n }\n );\n\n assertSuccess(response, 'Failed to fetch performance advisors');\n\n return response.data;\n },\n };\n\n const development: DevelopmentOperations = {\n async getProjectUrl(projectId: string): Promise<string> {\n const apiUrl = new URL(managementApiUrl);\n return `https://${projectId}.${getProjectDomain(apiUrl.hostname)}`;\n },\n async getAnonKey(projectId: string): Promise<string> {\n const response = await managementApiClient.GET(\n '/v1/projects/{ref}/api-keys',\n {\n params: {\n path: {\n ref: projectId,\n },\n query: {\n reveal: false,\n },\n },\n }\n );\n\n assertSuccess(response, 'Failed to fetch API keys');\n\n const anonKey = response.data?.find((key) => key.name === 'anon');\n\n if (!anonKey) {\n throw new Error('Anonymous key not found');\n }\n\n return anonKey.api_key;\n },\n async generateTypescriptTypes(projectId: string) {\n const response = await managementApiClient.GET(\n '/v1/projects/{ref}/types/typescript',\n {\n params: {\n path: {\n ref: projectId,\n },\n },\n }\n );\n\n assertSuccess(response, 'Failed to fetch TypeScript types');\n\n return response.data;\n },\n };\n\n const functions: EdgeFunctionsOperations = {\n async listEdgeFunctions(projectId: string) {\n const response = await managementApiClient.GET(\n '/v1/projects/{ref}/functions',\n {\n params: {\n path: {\n ref: projectId,\n },\n },\n }\n );\n\n assertSuccess(response, 'Failed to fetch Edge Functions');\n\n // Fetch files for each Edge Function\n return await Promise.all(\n response.data.map(async (listedFunction) => {\n return await functions.getEdgeFunction(\n projectId,\n listedFunction.slug\n );\n })\n );\n },\n async getEdgeFunction(projectId: string, functionSlug: string) {\n const functionResponse = await managementApiClient.GET(\n '/v1/projects/{ref}/functions/{function_slug}',\n {\n params: {\n path: {\n ref: projectId,\n function_slug: functionSlug,\n },\n },\n }\n );\n\n if (functionResponse.error) {\n throw functionResponse.error;\n }\n\n assertSuccess(functionResponse, 'Failed to fetch Edge Function');\n\n const edgeFunction = functionResponse.data;\n\n const deploymentId = getDeploymentId(\n projectId,\n edgeFunction.id,\n edgeFunction.version\n );\n\n const pathPrefix = getPathPrefix(deploymentId);\n\n const entrypoint_path = edgeFunction.entrypoint_path\n ? fileURLToPath(edgeFunction.entrypoint_path, {\n windows: false,\n }).replace(pathPrefix, '')\n : undefined;\n\n const import_map_path = edgeFunction.import_map_path\n ? fileURLToPath(edgeFunction.import_map_path, {\n windows: false,\n }).replace(pathPrefix, '')\n : undefined;\n\n const eszipResponse = await managementApiClient.GET(\n '/v1/projects/{ref}/functions/{function_slug}/body',\n {\n params: {\n path: {\n ref: projectId,\n function_slug: functionSlug,\n },\n },\n parseAs: 'arrayBuffer',\n }\n );\n\n assertSuccess(\n eszipResponse,\n 'Failed to fetch Edge Function eszip bundle'\n );\n\n const extractedFiles = await extractFiles(\n new Uint8Array(eszipResponse.data),\n pathPrefix\n );\n\n const files = await Promise.all(\n extractedFiles.map(async (file) => ({\n name: file.name,\n content: await file.text(),\n }))\n );\n\n return {\n ...edgeFunction,\n entrypoint_path,\n import_map_path,\n files,\n };\n },\n async deployEdgeFunction(\n projectId: string,\n options: DeployEdgeFunctionOptions\n ) {\n let {\n name,\n entrypoint_path,\n import_map_path,\n files: inputFiles,\n } = deployEdgeFunctionOptionsSchema.parse(options);\n\n let existingEdgeFunction: EdgeFunction | undefined;\n try {\n existingEdgeFunction = await functions.getEdgeFunction(projectId, name);\n } catch (error) {}\n\n const import_map_file = inputFiles.find((file) =>\n ['deno.json', 'import_map.json'].includes(file.name)\n );\n\n // Use existing import map path or file name heuristic if not provided\n import_map_path ??=\n existingEdgeFunction?.import_map_path ?? import_map_file?.name;\n\n const response = await managementApiClient.POST(\n '/v1/projects/{ref}/functions/deploy',\n {\n params: {\n path: {\n ref: projectId,\n },\n query: { slug: name },\n },\n body: {\n metadata: {\n name,\n entrypoint_path,\n import_map_path,\n },\n file: inputFiles as any, // We need to pass file name and content to our serializer\n },\n bodySerializer(body) {\n const formData = new FormData();\n\n const blob = new Blob([JSON.stringify(body.metadata)], {\n type: 'application/json',\n });\n formData.append('metadata', blob);\n\n body.file?.forEach((f: any) => {\n const file: { name: string; content: string } = f;\n const blob = new Blob([file.content], {\n type: 'application/typescript',\n });\n formData.append('file', blob, file.name);\n });\n\n return formData;\n },\n }\n );\n\n assertSuccess(response, 'Failed to deploy Edge Function');\n\n return response.data;\n },\n };\n\n const branching: BranchingOperations = {\n async listBranches(projectId: string) {\n const response = await managementApiClient.GET(\n '/v1/projects/{ref}/branches',\n {\n params: {\n path: {\n ref: projectId,\n },\n },\n }\n );\n\n // There are no branches if branching is disabled\n if (response.response.status === 422) return [];\n assertSuccess(response, 'Failed to list branches');\n\n return response.data;\n },\n async createBranch(projectId: string, options: CreateBranchOptions) {\n const { name } = createBranchOptionsSchema.parse(options);\n\n const createBranchResponse = await managementApiClient.POST(\n '/v1/projects/{ref}/branches',\n {\n params: {\n path: {\n ref: projectId,\n },\n },\n body: {\n branch_name: name,\n },\n }\n );\n\n assertSuccess(createBranchResponse, 'Failed to create branch');\n\n return createBranchResponse.data;\n },\n async deleteBranch(branchId: string) {\n const response = await managementApiClient.DELETE(\n '/v1/branches/{branch_id}',\n {\n params: {\n path: {\n branch_id: branchId,\n },\n },\n }\n );\n\n assertSuccess(response, 'Failed to delete branch');\n },\n async mergeBranch(branchId: string) {\n const response = await managementApiClient.POST(\n '/v1/branches/{branch_id}/merge',\n {\n params: {\n path: {\n branch_id: branchId,\n },\n },\n body: {},\n }\n );\n\n assertSuccess(response, 'Failed to merge branch');\n },\n async resetBranch(branchId: string, options: ResetBranchOptions) {\n const { migration_version } = resetBranchOptionsSchema.parse(options);\n\n const response = await managementApiClient.POST(\n '/v1/branches/{branch_id}/reset',\n {\n params: {\n path: {\n branch_id: branchId,\n },\n },\n body: {\n migration_version,\n },\n }\n );\n\n assertSuccess(response, 'Failed to reset branch');\n },\n async rebaseBranch(branchId: string) {\n const response = await managementApiClient.POST(\n '/v1/branches/{branch_id}/push',\n {\n params: {\n path: {\n branch_id: branchId,\n },\n },\n body: {},\n }\n );\n\n assertSuccess(response, 'Failed to rebase branch');\n },\n };\n\n const storage: StorageOperations = {\n // Storage methods\n async listAllBuckets(project_id: string) {\n const response = await managementApiClient.GET(\n '/v1/projects/{ref}/storage/buckets',\n {\n params: {\n path: {\n ref: project_id,\n },\n },\n }\n );\n\n assertSuccess(response, 'Failed to list storage buckets');\n\n return response.data;\n },\n\n async getStorageConfig(project_id: string) {\n const response = await managementApiClient.GET(\n '/v1/projects/{ref}/config/storage',\n {\n params: {\n path: {\n ref: project_id,\n },\n },\n }\n );\n\n assertSuccess(response, 'Failed to get storage config');\n\n return response.data;\n },\n\n async updateStorageConfig(projectId: string, config: StorageConfig) {\n const response = await managementApiClient.PATCH(\n '/v1/projects/{ref}/config/storage',\n {\n params: {\n path: {\n ref: projectId,\n },\n },\n body: {\n fileSizeLimit: config.fileSizeLimit,\n features: {\n imageTransformation: {\n enabled: config.features.imageTransformation.enabled,\n },\n s3Protocol: {\n enabled: config.features.s3Protocol.enabled,\n },\n },\n },\n }\n );\n\n assertSuccess(response, 'Failed to update storage config');\n\n return response.data;\n },\n };\n\n const platform: SupabasePlatform = {\n async init(info: InitData) {\n const { clientInfo } = info;\n if (!clientInfo) {\n throw new Error('Client info is required');\n }\n\n // Re-initialize the management API client with the user agent\n managementApiClient = createManagementApiClient(\n managementApiUrl,\n accessToken,\n {\n 'User-Agent': `supabase-mcp/${version} (${clientInfo.name}/${clientInfo.version})`,\n }\n );\n },\n account,\n database,\n debugging,\n development,\n functions,\n branching,\n storage,\n };\n\n return platform;\n}\n\nfunction getProjectDomain(apiHostname: string) {\n switch (apiHostname) {\n case 'api.supabase.com':\n return 'supabase.co';\n case 'api.supabase.green':\n return 'supabase.green';\n default:\n return 'supabase.red';\n }\n}\n\nasync function getClosestRegion() {\n return getClosestAwsRegion(getCountryCoordinates(await getCountryCode()))\n .code;\n}\n","import { build, Parser } from '@deno/eszip';\nimport { join, relative } from 'node:path/posix';\nimport { fileURLToPath } from 'node:url';\nimport { z } from 'zod';\n\nconst parser = await Parser.createInstance();\nconst sourceMapSchema = z.object({\n version: z.number(),\n sources: z.array(z.string()),\n sourcesContent: z.array(z.string()).optional(),\n names: z.array(z.string()),\n mappings: z.string(),\n});\n\n/**\n * Extracts source files from an eszip archive.\n *\n * Optionally removes the given path prefix from file names.\n *\n * If a file contains a source map, it will return the\n * original TypeScript source instead of the transpiled file.\n */\nexport async function extractFiles(\n eszip: Uint8Array,\n pathPrefix: string = '/'\n) {\n let specifiers: string[] = [];\n\n if (eszip instanceof ReadableStream) {\n const reader = eszip.getReader({ mode: 'byob' });\n specifiers = await parser.parse(reader);\n } else {\n specifiers = await parser.parseBytes(eszip);\n }\n\n await parser.load();\n\n const fileSpecifiers = specifiers.filter((specifier) =>\n specifier.startsWith('file://')\n );\n\n const files = await Promise.all(\n fileSpecifiers.map(async (specifier) => {\n const source: string = await parser.getModuleSource(specifier);\n const sourceMapString: string =\n await parser.getModuleSourceMap(specifier);\n\n const filePath = relative(\n pathPrefix,\n fileURLToPath(specifier, { windows: false })\n );\n\n const file = new File([source], filePath, {\n type: 'text/plain',\n });\n\n if (!sourceMapString) {\n return file;\n }\n\n const sourceMap = sourceMapSchema.parse(JSON.parse(sourceMapString));\n\n const [typeScriptSource] = sourceMap.sourcesContent ?? [];\n\n if (!typeScriptSource) {\n return file;\n }\n\n const sourceFile = new File([typeScriptSource], filePath, {\n type: 'application/typescript',\n });\n\n return sourceFile;\n })\n );\n\n return files;\n}\n\n/**\n * Bundles files into an eszip archive.\n *\n * Optionally prefixes the file names with a given path.\n */\nexport async function bundleFiles(files: File[], pathPrefix: string = '/') {\n const specifiers = files.map(\n (file) => `file://${join(pathPrefix, file.name)}`\n );\n const eszip = await build(specifiers, async (specifier: string) => {\n const url = new URL(specifier);\n const scheme = url.protocol;\n\n switch (scheme) {\n case 'file:': {\n const file = files.find(\n (file) => `file://${join(pathPrefix, file.name)}` === specifier\n );\n\n if (!file) {\n throw new Error(`File not found: ${specifier}`);\n }\n\n const headers = {\n 'content-type': file.type,\n };\n\n const content = await file.text();\n\n return {\n kind: 'module',\n specifier,\n headers,\n content,\n };\n }\n case 'http:':\n case 'https:': {\n const response = await fetch(specifier);\n if (!response.ok) {\n throw new Error(`Failed to fetch ${specifier}: ${response.status}`);\n }\n\n // Header keys must be lower case\n const headers = Object.fromEntries(\n Array.from(response.headers.entries()).map(([key, value]) => [\n key.toLowerCase(),\n value,\n ])\n );\n\n const content = await response.text();\n\n return {\n kind: 'module',\n specifier,\n headers,\n content,\n };\n }\n default: {\n throw new Error(`Unsupported scheme: ${scheme}`);\n }\n }\n });\n\n return eszip;\n}\n","import createClient, {\n type Client,\n type FetchResponse,\n type ParseAsResponse,\n} from 'openapi-fetch';\nimport type {\n MediaType,\n ResponseObjectMap,\n SuccessResponse,\n} from 'openapi-typescript-helpers';\nimport { z } from 'zod';\nimport type { paths } from './types.js';\n\nexport function createManagementApiClient(\n baseUrl: string,\n accessToken: string,\n headers: Record<string, string> = {}\n) {\n return createClient<paths>({\n baseUrl,\n headers: {\n Authorization: `Bearer ${accessToken}`,\n ...headers,\n },\n });\n}\n\nexport type ManagementApiClient = Client<paths>;\n\nexport type SuccessResponseType<\n T extends Record<string | number, any>,\n Options,\n Media extends MediaType,\n> = {\n data: ParseAsResponse<SuccessResponse<ResponseObjectMap<T>, Media>, Options>;\n error?: never;\n response: Response;\n};\n\nconst errorSchema = z.object({\n message: z.string(),\n});\n\nexport function assertSuccess<\n T extends Record<string | number, any>,\n Options,\n Media extends MediaType,\n>(\n response: FetchResponse<T, Options, Media>,\n fallbackMessage: string\n): asserts response is SuccessResponseType<T, Options, Media> {\n if ('error' in response) {\n if (response.response.status === 401) {\n throw new Error(\n 'Unauthorized. Please provide a valid access token to the MCP server via the --access-token flag or SUPABASE_ACCESS_TOKEN.'\n );\n }\n\n const { data: errorContent } = errorSchema.safeParse(response.error);\n\n if (errorContent) {\n throw new Error(errorContent.message);\n }\n\n throw new Error(fallbackMessage);\n }\n}\n"]}
package/dist/index.cjs CHANGED
@@ -1,2 +1,2 @@
1
- "use strict";Object.defineProperty(exports, "__esModule", {value: true});var _chunk4ET572GEcjs = require('./chunk-4ET572GE.cjs');exports.createSupabaseMcpServer = _chunk4ET572GEcjs.i; exports.featureGroupSchema = _chunk4ET572GEcjs.b;
1
+ "use strict";Object.defineProperty(exports, "__esModule", {value: true});var _chunkWMARAA3Icjs = require('./chunk-WMARAA3I.cjs');var _chunk2E4PBGE7cjs = require('./chunk-2E4PBGE7.cjs');exports.PLATFORM_INDEPENDENT_FEATURES = _chunk2E4PBGE7cjs.d; exports.createSupabaseApiPlatform = _chunkWMARAA3Icjs.a; exports.createSupabaseMcpServer = _chunk2E4PBGE7cjs.e;
2
2
  //# sourceMappingURL=index.cjs.map
@@ -1 +1 @@
1
- {"version":3,"sources":["/Users/grichardson/Documents/dev/supabase/mcp-server-supabase/packages/mcp-server-supabase/dist/index.cjs"],"names":[],"mappings":"AAAA,iIAA+C,wGAA6D","file":"/Users/grichardson/Documents/dev/supabase/mcp-server-supabase/packages/mcp-server-supabase/dist/index.cjs"}
1
+ {"version":3,"sources":["/Users/grichardson/Documents/dev/supabase/mcp-server-supabase/packages/mcp-server-supabase/dist/index.cjs"],"names":[],"mappings":"AAAA,iIAAwC,wDAAgD,4KAAuG","file":"/Users/grichardson/Documents/dev/supabase/mcp-server-supabase/packages/mcp-server-supabase/dist/index.cjs"}
package/dist/index.d.cts CHANGED
@@ -3,9 +3,34 @@ import * as _modelcontextprotocol_sdk_server_index_js from '@modelcontextprotoco
3
3
  import { z } from 'zod';
4
4
  import '@supabase/mcp-utils';
5
5
 
6
+ type SupabaseApiPlatformOptions = {
7
+ /**
8
+ * The access token for the Supabase Management API.
9
+ */
10
+ accessToken: string;
11
+ /**
12
+ * The API URL for the Supabase Management API.
13
+ */
14
+ apiUrl?: string;
15
+ };
16
+ /**
17
+ * Creates a Supabase platform implementation using the Supabase Management API.
18
+ */
19
+ declare function createSupabaseApiPlatform(options: SupabaseApiPlatformOptions): SupabasePlatform;
20
+
6
21
  declare const featureGroupSchema: z.ZodEnum<["docs", "account", "database", "debugging", "development", "functions", "branching", "storage"]>;
7
22
  type FeatureGroup = z.infer<typeof featureGroupSchema>;
8
23
 
24
+ type SupabasePlatformOptions = {
25
+ /**
26
+ * The access token for the Supabase Management API.
27
+ */
28
+ accessToken: string;
29
+ /**
30
+ * The API URL for the Supabase Management API.
31
+ */
32
+ apiUrl?: string;
33
+ };
9
34
  type SupabaseMcpServerOptions = {
10
35
  /**
11
36
  * Platform implementation for Supabase.
@@ -32,6 +57,7 @@ type SupabaseMcpServerOptions = {
32
57
  */
33
58
  features?: string[];
34
59
  };
60
+ declare const PLATFORM_INDEPENDENT_FEATURES: FeatureGroup[];
35
61
  /**
36
62
  * Creates an MCP server for interacting with Supabase.
37
63
  */
@@ -59,4 +85,4 @@ declare function createSupabaseMcpServer(options: SupabaseMcpServerOptions): _mo
59
85
  } | undefined;
60
86
  }>;
61
87
 
62
- export { type FeatureGroup, type SupabaseMcpServerOptions, SupabasePlatform, createSupabaseMcpServer, featureGroupSchema };
88
+ export { PLATFORM_INDEPENDENT_FEATURES, type SupabaseApiPlatformOptions, type SupabaseMcpServerOptions, SupabasePlatform, type SupabasePlatformOptions, createSupabaseApiPlatform, createSupabaseMcpServer };
package/dist/index.d.ts CHANGED
@@ -3,9 +3,34 @@ import * as _modelcontextprotocol_sdk_server_index_js from '@modelcontextprotoco
3
3
  import { z } from 'zod';
4
4
  import '@supabase/mcp-utils';
5
5
 
6
+ type SupabaseApiPlatformOptions = {
7
+ /**
8
+ * The access token for the Supabase Management API.
9
+ */
10
+ accessToken: string;
11
+ /**
12
+ * The API URL for the Supabase Management API.
13
+ */
14
+ apiUrl?: string;
15
+ };
16
+ /**
17
+ * Creates a Supabase platform implementation using the Supabase Management API.
18
+ */
19
+ declare function createSupabaseApiPlatform(options: SupabaseApiPlatformOptions): SupabasePlatform;
20
+
6
21
  declare const featureGroupSchema: z.ZodEnum<["docs", "account", "database", "debugging", "development", "functions", "branching", "storage"]>;
7
22
  type FeatureGroup = z.infer<typeof featureGroupSchema>;
8
23
 
24
+ type SupabasePlatformOptions = {
25
+ /**
26
+ * The access token for the Supabase Management API.
27
+ */
28
+ accessToken: string;
29
+ /**
30
+ * The API URL for the Supabase Management API.
31
+ */
32
+ apiUrl?: string;
33
+ };
9
34
  type SupabaseMcpServerOptions = {
10
35
  /**
11
36
  * Platform implementation for Supabase.
@@ -32,6 +57,7 @@ type SupabaseMcpServerOptions = {
32
57
  */
33
58
  features?: string[];
34
59
  };
60
+ declare const PLATFORM_INDEPENDENT_FEATURES: FeatureGroup[];
35
61
  /**
36
62
  * Creates an MCP server for interacting with Supabase.
37
63
  */
@@ -59,4 +85,4 @@ declare function createSupabaseMcpServer(options: SupabaseMcpServerOptions): _mo
59
85
  } | undefined;
60
86
  }>;
61
87
 
62
- export { type FeatureGroup, type SupabaseMcpServerOptions, SupabasePlatform, createSupabaseMcpServer, featureGroupSchema };
88
+ export { PLATFORM_INDEPENDENT_FEATURES, type SupabaseApiPlatformOptions, type SupabaseMcpServerOptions, SupabasePlatform, type SupabasePlatformOptions, createSupabaseApiPlatform, createSupabaseMcpServer };
package/dist/index.js CHANGED
@@ -1,2 +1,2 @@
1
- import{b as e,i as r}from"./chunk-3ZRWMUMF.js";export{r as createSupabaseMcpServer,e as featureGroupSchema};
1
+ import{a as e}from"./chunk-LO7S72TA.js";import{d as o,e as r}from"./chunk-7VYUDCV6.js";export{o as PLATFORM_INDEPENDENT_FEATURES,e as createSupabaseApiPlatform,r as createSupabaseMcpServer};
2
2
  //# sourceMappingURL=index.js.map
@@ -1,2 +1,2 @@
1
- "use strict";Object.defineProperty(exports, "__esModule", {value: true});var _chunkLL3OYJR7cjs = require('../chunk-LL3OYJR7.cjs');require('../chunk-4ET572GE.cjs');exports.applyMigrationOptionsSchema = _chunkLL3OYJR7cjs.l; exports.branchSchema = _chunkLL3OYJR7cjs.e; exports.createBranchOptionsSchema = _chunkLL3OYJR7cjs.h; exports.createProjectOptionsSchema = _chunkLL3OYJR7cjs.g; exports.deployEdgeFunctionOptionsSchema = _chunkLL3OYJR7cjs.j; exports.edgeFunctionSchema = _chunkLL3OYJR7cjs.f; exports.executeSqlOptionsSchema = _chunkLL3OYJR7cjs.k; exports.generateTypescriptTypesResultSchema = _chunkLL3OYJR7cjs.o; exports.getLogsOptionsSchema = _chunkLL3OYJR7cjs.n; exports.migrationSchema = _chunkLL3OYJR7cjs.m; exports.organizationSchema = _chunkLL3OYJR7cjs.c; exports.projectSchema = _chunkLL3OYJR7cjs.d; exports.resetBranchOptionsSchema = _chunkLL3OYJR7cjs.i; exports.storageBucketSchema = _chunkLL3OYJR7cjs.a; exports.storageConfigSchema = _chunkLL3OYJR7cjs.b;
1
+ "use strict";Object.defineProperty(exports, "__esModule", {value: true});var _chunk2E4PBGE7cjs = require('../chunk-2E4PBGE7.cjs');exports.applyMigrationOptionsSchema = _chunk2E4PBGE7cjs.t; exports.branchSchema = _chunk2E4PBGE7cjs.m; exports.createBranchOptionsSchema = _chunk2E4PBGE7cjs.p; exports.createProjectOptionsSchema = _chunk2E4PBGE7cjs.o; exports.deployEdgeFunctionOptionsSchema = _chunk2E4PBGE7cjs.r; exports.edgeFunctionSchema = _chunk2E4PBGE7cjs.n; exports.executeSqlOptionsSchema = _chunk2E4PBGE7cjs.s; exports.generateTypescriptTypesResultSchema = _chunk2E4PBGE7cjs.w; exports.getLogsOptionsSchema = _chunk2E4PBGE7cjs.v; exports.migrationSchema = _chunk2E4PBGE7cjs.u; exports.organizationSchema = _chunk2E4PBGE7cjs.k; exports.projectSchema = _chunk2E4PBGE7cjs.l; exports.resetBranchOptionsSchema = _chunk2E4PBGE7cjs.q; exports.storageBucketSchema = _chunk2E4PBGE7cjs.i; exports.storageConfigSchema = _chunk2E4PBGE7cjs.j;
2
2
  //# sourceMappingURL=index.cjs.map
@@ -1 +1 @@
1
- {"version":3,"sources":["/Users/grichardson/Documents/dev/supabase/mcp-server-supabase/packages/mcp-server-supabase/dist/platform/index.cjs"],"names":[],"mappings":"AAAA,kIAAgE,iCAA8B,oyBAAua","file":"/Users/grichardson/Documents/dev/supabase/mcp-server-supabase/packages/mcp-server-supabase/dist/platform/index.cjs"}
1
+ {"version":3,"sources":["/Users/grichardson/Documents/dev/supabase/mcp-server-supabase/packages/mcp-server-supabase/dist/platform/index.cjs"],"names":[],"mappings":"AAAA,kIAA2I,oyBAAua","file":"/Users/grichardson/Documents/dev/supabase/mcp-server-supabase/packages/mcp-server-supabase/dist/platform/index.cjs"}
@@ -9,19 +9,19 @@ declare const storageBucketSchema: z.ZodObject<{
9
9
  updated_at: z.ZodString;
10
10
  public: z.ZodBoolean;
11
11
  }, "strip", z.ZodTypeAny, {
12
- id: string;
12
+ public: boolean;
13
13
  name: string;
14
+ id: string;
15
+ owner: string;
14
16
  created_at: string;
15
17
  updated_at: string;
16
- public: boolean;
17
- owner: string;
18
18
  }, {
19
- id: string;
19
+ public: boolean;
20
20
  name: string;
21
+ id: string;
22
+ owner: string;
21
23
  created_at: string;
22
24
  updated_at: string;
23
- public: boolean;
24
- owner: string;
25
25
  }>;
26
26
  declare const storageConfigSchema: z.ZodObject<{
27
27
  fileSizeLimit: z.ZodNumber;
@@ -83,14 +83,14 @@ declare const organizationSchema: z.ZodObject<{
83
83
  allowed_release_channels: z.ZodArray<z.ZodString, "many">;
84
84
  opt_in_tags: z.ZodArray<z.ZodString, "many">;
85
85
  }, "strip", z.ZodTypeAny, {
86
- id: string;
87
86
  name: string;
87
+ id: string;
88
88
  allowed_release_channels: string[];
89
89
  opt_in_tags: string[];
90
90
  plan?: string | undefined;
91
91
  }, {
92
- id: string;
93
92
  name: string;
93
+ id: string;
94
94
  allowed_release_channels: string[];
95
95
  opt_in_tags: string[];
96
96
  plan?: string | undefined;
@@ -104,17 +104,17 @@ declare const projectSchema: z.ZodObject<{
104
104
  region: z.ZodString;
105
105
  }, "strip", z.ZodTypeAny, {
106
106
  status: string;
107
- id: string;
108
107
  name: string;
109
- organization_id: string;
108
+ id: string;
110
109
  created_at: string;
110
+ organization_id: string;
111
111
  region: string;
112
112
  }, {
113
113
  status: string;
114
- id: string;
115
114
  name: string;
116
- organization_id: string;
115
+ id: string;
117
116
  created_at: string;
117
+ organization_id: string;
118
118
  region: string;
119
119
  }>;
120
120
  declare const branchSchema: z.ZodObject<{
@@ -132,27 +132,27 @@ declare const branchSchema: z.ZodObject<{
132
132
  updated_at: z.ZodString;
133
133
  }, "strip", z.ZodTypeAny, {
134
134
  status: "CREATING_PROJECT" | "RUNNING_MIGRATIONS" | "MIGRATIONS_PASSED" | "MIGRATIONS_FAILED" | "FUNCTIONS_DEPLOYED" | "FUNCTIONS_FAILED";
135
- id: string;
136
135
  name: string;
136
+ id: string;
137
137
  created_at: string;
138
+ updated_at: string;
138
139
  project_ref: string;
139
140
  parent_project_ref: string;
140
141
  is_default: boolean;
141
142
  persistent: boolean;
142
- updated_at: string;
143
143
  git_branch?: string | undefined;
144
144
  pr_number?: number | undefined;
145
145
  latest_check_run_id?: number | undefined;
146
146
  }, {
147
147
  status: "CREATING_PROJECT" | "RUNNING_MIGRATIONS" | "MIGRATIONS_PASSED" | "MIGRATIONS_FAILED" | "FUNCTIONS_DEPLOYED" | "FUNCTIONS_FAILED";
148
- id: string;
149
148
  name: string;
149
+ id: string;
150
150
  created_at: string;
151
+ updated_at: string;
151
152
  project_ref: string;
152
153
  parent_project_ref: string;
153
154
  is_default: boolean;
154
155
  persistent: boolean;
155
- updated_at: string;
156
156
  git_branch?: string | undefined;
157
157
  pr_number?: number | undefined;
158
158
  latest_check_run_id?: number | undefined;
@@ -173,21 +173,21 @@ declare const edgeFunctionSchema: z.ZodObject<{
173
173
  name: z.ZodString;
174
174
  content: z.ZodString;
175
175
  }, "strip", z.ZodTypeAny, {
176
- name: string;
177
176
  content: string;
178
- }, {
179
177
  name: string;
178
+ }, {
180
179
  content: string;
180
+ name: string;
181
181
  }>, "many">;
182
182
  }, "strip", z.ZodTypeAny, {
183
+ version: number;
183
184
  status: string;
184
- id: string;
185
185
  name: string;
186
- version: number;
186
+ id: string;
187
187
  slug: string;
188
188
  files: {
189
- name: string;
190
189
  content: string;
190
+ name: string;
191
191
  }[];
192
192
  created_at?: number | undefined;
193
193
  updated_at?: number | undefined;
@@ -196,14 +196,14 @@ declare const edgeFunctionSchema: z.ZodObject<{
196
196
  import_map_path?: string | undefined;
197
197
  entrypoint_path?: string | undefined;
198
198
  }, {
199
+ version: number;
199
200
  status: string;
200
- id: string;
201
201
  name: string;
202
- version: number;
202
+ id: string;
203
203
  slug: string;
204
204
  files: {
205
- name: string;
206
205
  content: string;
206
+ name: string;
207
207
  }[];
208
208
  created_at?: number | undefined;
209
209
  updated_at?: number | undefined;
@@ -215,17 +215,17 @@ declare const edgeFunctionSchema: z.ZodObject<{
215
215
  declare const createProjectOptionsSchema: z.ZodObject<{
216
216
  name: z.ZodString;
217
217
  organization_id: z.ZodString;
218
- region: z.ZodOptional<z.ZodEnum<["sa-east-1", "ap-southeast-2", "ap-northeast-2", "ap-northeast-1", "ap-southeast-1", "ap-south-1", "eu-north-1", "eu-central-2", "eu-central-1", "eu-west-3", "eu-west-2", "eu-west-1", "ca-central-1", "us-east-2", "us-east-1", "us-west-1"]>>;
218
+ region: z.ZodOptional<z.ZodEnum<["sa-east-1", "ap-south-1", "ca-central-1", "eu-central-2", "eu-central-1", "eu-north-1", "eu-west-3", "eu-west-2", "eu-west-1", "ap-southeast-2", "ap-northeast-2", "ap-northeast-1", "ap-southeast-1", "us-west-1", "us-east-2", "us-east-1"]>>;
219
219
  db_pass: z.ZodOptional<z.ZodString>;
220
220
  }, "strip", z.ZodTypeAny, {
221
221
  name: string;
222
222
  organization_id: string;
223
- region?: "us-west-1" | "us-east-1" | "us-east-2" | "ca-central-1" | "eu-west-1" | "eu-west-2" | "eu-west-3" | "eu-central-1" | "eu-central-2" | "eu-north-1" | "ap-south-1" | "ap-southeast-1" | "ap-northeast-1" | "ap-northeast-2" | "ap-southeast-2" | "sa-east-1" | undefined;
223
+ region?: "us-east-1" | "us-east-2" | "us-west-1" | "ap-southeast-1" | "ap-northeast-1" | "ap-northeast-2" | "ap-southeast-2" | "eu-west-1" | "eu-west-2" | "eu-west-3" | "eu-north-1" | "eu-central-1" | "eu-central-2" | "ca-central-1" | "ap-south-1" | "sa-east-1" | undefined;
224
224
  db_pass?: string | undefined;
225
225
  }, {
226
226
  name: string;
227
227
  organization_id: string;
228
- region?: "us-west-1" | "us-east-1" | "us-east-2" | "ca-central-1" | "eu-west-1" | "eu-west-2" | "eu-west-3" | "eu-central-1" | "eu-central-2" | "eu-north-1" | "ap-south-1" | "ap-southeast-1" | "ap-northeast-1" | "ap-northeast-2" | "ap-southeast-2" | "sa-east-1" | undefined;
228
+ region?: "us-east-1" | "us-east-2" | "us-west-1" | "ap-southeast-1" | "ap-northeast-1" | "ap-northeast-2" | "ap-southeast-2" | "eu-west-1" | "eu-west-2" | "eu-west-3" | "eu-north-1" | "eu-central-1" | "eu-central-2" | "ca-central-1" | "ap-south-1" | "sa-east-1" | undefined;
229
229
  db_pass?: string | undefined;
230
230
  }>;
231
231
  declare const createBranchOptionsSchema: z.ZodObject<{
@@ -250,26 +250,26 @@ declare const deployEdgeFunctionOptionsSchema: z.ZodObject<{
250
250
  name: z.ZodString;
251
251
  content: z.ZodString;
252
252
  }, "strip", z.ZodTypeAny, {
253
- name: string;
254
253
  content: string;
255
- }, {
256
254
  name: string;
255
+ }, {
257
256
  content: string;
257
+ name: string;
258
258
  }>, "many">;
259
259
  }, "strip", z.ZodTypeAny, {
260
260
  name: string;
261
261
  entrypoint_path: string;
262
262
  files: {
263
- name: string;
264
263
  content: string;
264
+ name: string;
265
265
  }[];
266
266
  import_map_path?: string | undefined;
267
267
  }, {
268
268
  name: string;
269
269
  entrypoint_path: string;
270
270
  files: {
271
- name: string;
272
271
  content: string;
272
+ name: string;
273
273
  }[];
274
274
  import_map_path?: string | undefined;
275
275
  }>;
@@ -287,11 +287,11 @@ declare const applyMigrationOptionsSchema: z.ZodObject<{
287
287
  name: z.ZodString;
288
288
  query: z.ZodString;
289
289
  }, "strip", z.ZodTypeAny, {
290
- query: string;
291
290
  name: string;
292
- }, {
293
291
  query: string;
292
+ }, {
294
293
  name: string;
294
+ query: string;
295
295
  }>;
296
296
  declare const migrationSchema: z.ZodObject<{
297
297
  version: z.ZodString;
@@ -9,19 +9,19 @@ declare const storageBucketSchema: z.ZodObject<{
9
9
  updated_at: z.ZodString;
10
10
  public: z.ZodBoolean;
11
11
  }, "strip", z.ZodTypeAny, {
12
- id: string;
12
+ public: boolean;
13
13
  name: string;
14
+ id: string;
15
+ owner: string;
14
16
  created_at: string;
15
17
  updated_at: string;
16
- public: boolean;
17
- owner: string;
18
18
  }, {
19
- id: string;
19
+ public: boolean;
20
20
  name: string;
21
+ id: string;
22
+ owner: string;
21
23
  created_at: string;
22
24
  updated_at: string;
23
- public: boolean;
24
- owner: string;
25
25
  }>;
26
26
  declare const storageConfigSchema: z.ZodObject<{
27
27
  fileSizeLimit: z.ZodNumber;
@@ -83,14 +83,14 @@ declare const organizationSchema: z.ZodObject<{
83
83
  allowed_release_channels: z.ZodArray<z.ZodString, "many">;
84
84
  opt_in_tags: z.ZodArray<z.ZodString, "many">;
85
85
  }, "strip", z.ZodTypeAny, {
86
- id: string;
87
86
  name: string;
87
+ id: string;
88
88
  allowed_release_channels: string[];
89
89
  opt_in_tags: string[];
90
90
  plan?: string | undefined;
91
91
  }, {
92
- id: string;
93
92
  name: string;
93
+ id: string;
94
94
  allowed_release_channels: string[];
95
95
  opt_in_tags: string[];
96
96
  plan?: string | undefined;
@@ -104,17 +104,17 @@ declare const projectSchema: z.ZodObject<{
104
104
  region: z.ZodString;
105
105
  }, "strip", z.ZodTypeAny, {
106
106
  status: string;
107
- id: string;
108
107
  name: string;
109
- organization_id: string;
108
+ id: string;
110
109
  created_at: string;
110
+ organization_id: string;
111
111
  region: string;
112
112
  }, {
113
113
  status: string;
114
- id: string;
115
114
  name: string;
116
- organization_id: string;
115
+ id: string;
117
116
  created_at: string;
117
+ organization_id: string;
118
118
  region: string;
119
119
  }>;
120
120
  declare const branchSchema: z.ZodObject<{
@@ -132,27 +132,27 @@ declare const branchSchema: z.ZodObject<{
132
132
  updated_at: z.ZodString;
133
133
  }, "strip", z.ZodTypeAny, {
134
134
  status: "CREATING_PROJECT" | "RUNNING_MIGRATIONS" | "MIGRATIONS_PASSED" | "MIGRATIONS_FAILED" | "FUNCTIONS_DEPLOYED" | "FUNCTIONS_FAILED";
135
- id: string;
136
135
  name: string;
136
+ id: string;
137
137
  created_at: string;
138
+ updated_at: string;
138
139
  project_ref: string;
139
140
  parent_project_ref: string;
140
141
  is_default: boolean;
141
142
  persistent: boolean;
142
- updated_at: string;
143
143
  git_branch?: string | undefined;
144
144
  pr_number?: number | undefined;
145
145
  latest_check_run_id?: number | undefined;
146
146
  }, {
147
147
  status: "CREATING_PROJECT" | "RUNNING_MIGRATIONS" | "MIGRATIONS_PASSED" | "MIGRATIONS_FAILED" | "FUNCTIONS_DEPLOYED" | "FUNCTIONS_FAILED";
148
- id: string;
149
148
  name: string;
149
+ id: string;
150
150
  created_at: string;
151
+ updated_at: string;
151
152
  project_ref: string;
152
153
  parent_project_ref: string;
153
154
  is_default: boolean;
154
155
  persistent: boolean;
155
- updated_at: string;
156
156
  git_branch?: string | undefined;
157
157
  pr_number?: number | undefined;
158
158
  latest_check_run_id?: number | undefined;
@@ -173,21 +173,21 @@ declare const edgeFunctionSchema: z.ZodObject<{
173
173
  name: z.ZodString;
174
174
  content: z.ZodString;
175
175
  }, "strip", z.ZodTypeAny, {
176
- name: string;
177
176
  content: string;
178
- }, {
179
177
  name: string;
178
+ }, {
180
179
  content: string;
180
+ name: string;
181
181
  }>, "many">;
182
182
  }, "strip", z.ZodTypeAny, {
183
+ version: number;
183
184
  status: string;
184
- id: string;
185
185
  name: string;
186
- version: number;
186
+ id: string;
187
187
  slug: string;
188
188
  files: {
189
- name: string;
190
189
  content: string;
190
+ name: string;
191
191
  }[];
192
192
  created_at?: number | undefined;
193
193
  updated_at?: number | undefined;
@@ -196,14 +196,14 @@ declare const edgeFunctionSchema: z.ZodObject<{
196
196
  import_map_path?: string | undefined;
197
197
  entrypoint_path?: string | undefined;
198
198
  }, {
199
+ version: number;
199
200
  status: string;
200
- id: string;
201
201
  name: string;
202
- version: number;
202
+ id: string;
203
203
  slug: string;
204
204
  files: {
205
- name: string;
206
205
  content: string;
206
+ name: string;
207
207
  }[];
208
208
  created_at?: number | undefined;
209
209
  updated_at?: number | undefined;
@@ -215,17 +215,17 @@ declare const edgeFunctionSchema: z.ZodObject<{
215
215
  declare const createProjectOptionsSchema: z.ZodObject<{
216
216
  name: z.ZodString;
217
217
  organization_id: z.ZodString;
218
- region: z.ZodOptional<z.ZodEnum<["sa-east-1", "ap-southeast-2", "ap-northeast-2", "ap-northeast-1", "ap-southeast-1", "ap-south-1", "eu-north-1", "eu-central-2", "eu-central-1", "eu-west-3", "eu-west-2", "eu-west-1", "ca-central-1", "us-east-2", "us-east-1", "us-west-1"]>>;
218
+ region: z.ZodOptional<z.ZodEnum<["sa-east-1", "ap-south-1", "ca-central-1", "eu-central-2", "eu-central-1", "eu-north-1", "eu-west-3", "eu-west-2", "eu-west-1", "ap-southeast-2", "ap-northeast-2", "ap-northeast-1", "ap-southeast-1", "us-west-1", "us-east-2", "us-east-1"]>>;
219
219
  db_pass: z.ZodOptional<z.ZodString>;
220
220
  }, "strip", z.ZodTypeAny, {
221
221
  name: string;
222
222
  organization_id: string;
223
- region?: "us-west-1" | "us-east-1" | "us-east-2" | "ca-central-1" | "eu-west-1" | "eu-west-2" | "eu-west-3" | "eu-central-1" | "eu-central-2" | "eu-north-1" | "ap-south-1" | "ap-southeast-1" | "ap-northeast-1" | "ap-northeast-2" | "ap-southeast-2" | "sa-east-1" | undefined;
223
+ region?: "us-east-1" | "us-east-2" | "us-west-1" | "ap-southeast-1" | "ap-northeast-1" | "ap-northeast-2" | "ap-southeast-2" | "eu-west-1" | "eu-west-2" | "eu-west-3" | "eu-north-1" | "eu-central-1" | "eu-central-2" | "ca-central-1" | "ap-south-1" | "sa-east-1" | undefined;
224
224
  db_pass?: string | undefined;
225
225
  }, {
226
226
  name: string;
227
227
  organization_id: string;
228
- region?: "us-west-1" | "us-east-1" | "us-east-2" | "ca-central-1" | "eu-west-1" | "eu-west-2" | "eu-west-3" | "eu-central-1" | "eu-central-2" | "eu-north-1" | "ap-south-1" | "ap-southeast-1" | "ap-northeast-1" | "ap-northeast-2" | "ap-southeast-2" | "sa-east-1" | undefined;
228
+ region?: "us-east-1" | "us-east-2" | "us-west-1" | "ap-southeast-1" | "ap-northeast-1" | "ap-northeast-2" | "ap-southeast-2" | "eu-west-1" | "eu-west-2" | "eu-west-3" | "eu-north-1" | "eu-central-1" | "eu-central-2" | "ca-central-1" | "ap-south-1" | "sa-east-1" | undefined;
229
229
  db_pass?: string | undefined;
230
230
  }>;
231
231
  declare const createBranchOptionsSchema: z.ZodObject<{
@@ -250,26 +250,26 @@ declare const deployEdgeFunctionOptionsSchema: z.ZodObject<{
250
250
  name: z.ZodString;
251
251
  content: z.ZodString;
252
252
  }, "strip", z.ZodTypeAny, {
253
- name: string;
254
253
  content: string;
255
- }, {
256
254
  name: string;
255
+ }, {
257
256
  content: string;
257
+ name: string;
258
258
  }>, "many">;
259
259
  }, "strip", z.ZodTypeAny, {
260
260
  name: string;
261
261
  entrypoint_path: string;
262
262
  files: {
263
- name: string;
264
263
  content: string;
264
+ name: string;
265
265
  }[];
266
266
  import_map_path?: string | undefined;
267
267
  }, {
268
268
  name: string;
269
269
  entrypoint_path: string;
270
270
  files: {
271
- name: string;
272
271
  content: string;
272
+ name: string;
273
273
  }[];
274
274
  import_map_path?: string | undefined;
275
275
  }>;
@@ -287,11 +287,11 @@ declare const applyMigrationOptionsSchema: z.ZodObject<{
287
287
  name: z.ZodString;
288
288
  query: z.ZodString;
289
289
  }, "strip", z.ZodTypeAny, {
290
- query: string;
291
290
  name: string;
292
- }, {
293
291
  query: string;
292
+ }, {
294
293
  name: string;
294
+ query: string;
295
295
  }>;
296
296
  declare const migrationSchema: z.ZodObject<{
297
297
  version: z.ZodString;
@@ -1,2 +1,2 @@
1
- import{a,b,c,d,e,f,g,h,i,j,k,l,m,n,o}from"../chunk-YFZN5EJN.js";import"../chunk-3ZRWMUMF.js";export{l as applyMigrationOptionsSchema,e as branchSchema,h as createBranchOptionsSchema,g as createProjectOptionsSchema,j as deployEdgeFunctionOptionsSchema,f as edgeFunctionSchema,k as executeSqlOptionsSchema,o as generateTypescriptTypesResultSchema,n as getLogsOptionsSchema,m as migrationSchema,c as organizationSchema,d as projectSchema,i as resetBranchOptionsSchema,a as storageBucketSchema,b as storageConfigSchema};
1
+ import{i as a,j as b,k as c,l as d,m as e,n as f,o as g,p as h,q as i,r as j,s as k,t as l,u as m,v as n,w as o}from"../chunk-7VYUDCV6.js";export{l as applyMigrationOptionsSchema,e as branchSchema,h as createBranchOptionsSchema,g as createProjectOptionsSchema,j as deployEdgeFunctionOptionsSchema,f as edgeFunctionSchema,k as executeSqlOptionsSchema,o as generateTypescriptTypesResultSchema,n as getLogsOptionsSchema,m as migrationSchema,c as organizationSchema,d as projectSchema,i as resetBranchOptionsSchema,a as storageBucketSchema,b as storageConfigSchema};
2
2
  //# sourceMappingURL=index.js.map
@@ -1,3 +1,3 @@
1
1
  #!/usr/bin/env node
2
- "use strict"; function _nullishCoalesce(lhs, rhsFn) { if (lhs != null) { return lhs; } else { return rhsFn(); } }var _chunkX63UTBYCcjs = require('../chunk-X63UTBYC.cjs');require('../chunk-LL3OYJR7.cjs');var _chunk4ET572GEcjs = require('../chunk-4ET572GE.cjs');var _stdiojs = require('@modelcontextprotocol/sdk/server/stdio.js');var _util = require('util');function p(r,o=","){return r.split(o).map(e=>e.trim()).filter(e=>e!=="")}var{version:d}=_chunk4ET572GEcjs.a;async function g(){let{values:{["access-token"]:r,["project-ref"]:o,["read-only"]:t,["api-url"]:e,["version"]:l,["features"]:s}}=_util.parseArgs.call(void 0, {options:{"access-token":{type:"string"},"project-ref":{type:"string"},"read-only":{type:"boolean",default:!1},"api-url":{type:"string"},version:{type:"boolean"},features:{type:"string"}}});l&&(console.log(d),process.exit(0));let n=_nullishCoalesce(r, () => (process.env.SUPABASE_ACCESS_TOKEN));n||(console.error("Please provide a personal access token (PAT) with the --access-token flag or set the SUPABASE_ACCESS_TOKEN environment variable"),process.exit(1));let f=s?p(s):void 0,m=_chunkX63UTBYCcjs.a.call(void 0, {accessToken:n,apiUrl:e}),u=_chunk4ET572GEcjs.i.call(void 0, {platform:m,projectId:o,readOnly:t,features:f}),S=new _stdiojs.StdioServerTransport;await u.connect(S)}g().catch(console.error);
2
+ "use strict"; function _nullishCoalesce(lhs, rhsFn) { if (lhs != null) { return lhs; } else { return rhsFn(); } }var _chunkWMARAA3Icjs = require('../chunk-WMARAA3I.cjs');var _chunk2E4PBGE7cjs = require('../chunk-2E4PBGE7.cjs');var _stdiojs = require('@modelcontextprotocol/sdk/server/stdio.js');var _util = require('util');function p(r,o=","){return r.split(o).map(e=>e.trim()).filter(e=>e!=="")}var{version:d}=_chunk2E4PBGE7cjs.a;async function g(){let{values:{["access-token"]:r,["project-ref"]:o,["read-only"]:t,["api-url"]:e,["version"]:l,["features"]:s}}=_util.parseArgs.call(void 0, {options:{"access-token":{type:"string"},"project-ref":{type:"string"},"read-only":{type:"boolean",default:!1},"api-url":{type:"string"},version:{type:"boolean"},features:{type:"string"}}});l&&(console.log(d),process.exit(0));let n=_nullishCoalesce(r, () => (process.env.SUPABASE_ACCESS_TOKEN));n||(console.error("Please provide a personal access token (PAT) with the --access-token flag or set the SUPABASE_ACCESS_TOKEN environment variable"),process.exit(1));let f=s?p(s):void 0,m=_chunkWMARAA3Icjs.a.call(void 0, {accessToken:n,apiUrl:e}),u=_chunk2E4PBGE7cjs.e.call(void 0, {platform:m,projectId:o,readOnly:t,features:f}),S=new _stdiojs.StdioServerTransport;await u.connect(S)}g().catch(console.error);
3
3
  //# sourceMappingURL=stdio.cjs.map