@supabase/mcp-server-supabase 0.6.1 → 0.6.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,4 +1,4 @@
1
- "use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } function _nullishCoalesce(lhs, rhsFn) { if (lhs != null) { return lhs; } else { return rhsFn(); } } function _optionalChain(ops) { let lastAccessLHS = undefined; let value = ops[0]; let i = 1; while (i < ops.length) { const op = ops[i]; const fn = ops[i + 1]; i += 2; if ((op === 'optionalAccess' || op === 'optionalCall') && value == null) { return undefined; } if (op === 'access' || op === 'optionalAccess') { lastAccessLHS = value; value = fn(value); } else if (op === 'call' || op === 'optionalCall') { value = fn((...args) => value.call(lastAccessLHS, ...args)); lastAccessLHS = undefined; } } return value; }var _chunkVMJ45UPIcjs = require('./chunk-VMJ45UPI.cjs');var _multipartparser = require('@mjackson/multipart-parser');var _url = require('url');var _commontags = require('common-tags');function D(p,i=100){switch(p){case"api":return _commontags.stripIndent`
1
+ "use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } function _nullishCoalesce(lhs, rhsFn) { if (lhs != null) { return lhs; } else { return rhsFn(); } } function _optionalChain(ops) { let lastAccessLHS = undefined; let value = ops[0]; let i = 1; while (i < ops.length) { const op = ops[i]; const fn = ops[i + 1]; i += 2; if ((op === 'optionalAccess' || op === 'optionalCall') && value == null) { return undefined; } if (op === 'access' || op === 'optionalAccess') { lastAccessLHS = value; value = fn(value); } else if (op === 'call' || op === 'optionalCall') { value = fn((...args) => value.call(lastAccessLHS, ...args)); lastAccessLHS = undefined; } } return value; }var _chunkS7IXC5T5cjs = require('./chunk-S7IXC5T5.cjs');var _multipartparser = require('@mjackson/multipart-parser');var _url = require('url');var _commontags = require('common-tags');function D(p,i=100){switch(p){case"api":return _commontags.stripIndent`
2
2
  select id, identifier, timestamp, event_message, request.method, request.path, response.status_code
3
3
  from edge_logs
4
4
  cross join unnest(metadata) as m
@@ -36,5 +36,5 @@
36
36
  select id, realtime_logs.timestamp, event_message from realtime_logs
37
37
  order by timestamp desc
38
38
  limit ${i}
39
- `;default:throw new Error(`unsupported log service type: ${p}`)}}var _openapifetch = require('openapi-fetch'); var _openapifetch2 = _interopRequireDefault(_openapifetch);var _v4 = require('zod/v4');function F(p,i,d={}){return _openapifetch2.default.call(void 0, {baseUrl:p,headers:{Authorization:`Bearer ${i}`,...d}})}var N=_v4.z.object({message:_v4.z.string()});function n(p,i){if("error"in p){if(p.response.status===401)throw new Error("Unauthorized. Please provide a valid access token to the MCP server via the --access-token flag or SUPABASE_ACCESS_TOKEN.");let{data:d}=N.safeParse(p.error);throw d?new Error(d.message):new Error(i)}}var H="ABCDEFGHIJKLMNOPQRSTUVWXYZ",I="abcdefghijklmnopqrstuvwxyz",W="0123456789",Q="!@#$%^&*()_+~`|}{[]:;?><,./-=",$=({length:p=10,numbers:i=!1,symbols:d=!1,uppercase:_=!0,lowercase:a=!0}={})=>{let m="";if(_&&(m+=H),a&&(m+=I),i&&(m+=W),d&&(m+=Q),m.length===0)throw new Error("at least one character set must be selected");let v=new Uint32Array(p);crypto.getRandomValues(v);let O="";for(let w=0;w<p;w++){let j=v[w]%m.length;O+=m.charAt(j)}return O};var{version:Y}=_chunkVMJ45UPIcjs.a;function ye(p){let{accessToken:i,apiUrl:d}=p,_=_nullishCoalesce(d, () => ("https://api.supabase.com")),a=F(_,i),m={async listOrganizations(){let t=await a.GET("/v1/organizations");return n(t,"Failed to fetch organizations"),t.data},async getOrganization(t){let e=await a.GET("/v1/organizations/{slug}",{params:{path:{slug:t}}});return n(e,"Failed to fetch organization"),e.data},async listProjects(){let t=await a.GET("/v1/projects");return n(t,"Failed to fetch projects"),t.data},async getProject(t){let e=await a.GET("/v1/projects/{ref}",{params:{path:{ref:t}}});return n(e,"Failed to fetch project"),e.data},async createProject(t){let{name:e,organization_id:s,region:o,db_pass:r}=_chunkVMJ45UPIcjs.j.parse(t),c=await a.POST("/v1/projects",{body:{name:e,region:o,organization_id:s,db_pass:_nullishCoalesce(r, () => ($({length:16,numbers:!0,uppercase:!0,lowercase:!0})))}});return n(c,"Failed to create project"),c.data},async pauseProject(t){let e=await a.POST("/v1/projects/{ref}/pause",{params:{path:{ref:t}}});n(e,"Failed to pause project")},async restoreProject(t){let e=await a.POST("/v1/projects/{ref}/restore",{params:{path:{ref:t}}});n(e,"Failed to restore project")}},v={async executeSql(t,e){let{query:s,parameters:o,read_only:r}=_chunkVMJ45UPIcjs.n.parse(e),c=await a.POST("/v1/projects/{ref}/database/query",{params:{path:{ref:t}},body:{query:s,parameters:o,read_only:r}});return n(c,"Failed to execute SQL query"),c.data},async listMigrations(t){let e=await a.GET("/v1/projects/{ref}/database/migrations",{params:{path:{ref:t}}});return n(e,"Failed to fetch migrations"),e.data},async applyMigration(t,e){let{name:s,query:o}=_chunkVMJ45UPIcjs.o.parse(e),r=await a.POST("/v1/projects/{ref}/database/migrations",{params:{path:{ref:t}},body:{name:s,query:o}});n(r,"Failed to apply migration")}},O={async getLogs(t,e){let{service:s,iso_timestamp_start:o,iso_timestamp_end:r}=_chunkVMJ45UPIcjs.r.parse(e),c=D(s),u=await a.GET("/v1/projects/{ref}/analytics/endpoints/logs.all",{params:{path:{ref:t},query:{sql:c,iso_timestamp_start:o,iso_timestamp_end:r}}});return n(u,"Failed to fetch logs"),u.data},async getSecurityAdvisors(t){let e=await a.GET("/v1/projects/{ref}/advisors/security",{params:{path:{ref:t}}});return n(e,"Failed to fetch security advisors"),e.data},async getPerformanceAdvisors(t){let e=await a.GET("/v1/projects/{ref}/advisors/performance",{params:{path:{ref:t}}});return n(e,"Failed to fetch performance advisors"),e.data}},w={async getProjectUrl(t){let e=new URL(_);return`https://${t}.${X(e.hostname)}`},async getPublishableKeys(t){let e=await a.GET("/v1/projects/{ref}/api-keys",{params:{path:{ref:t},query:{reveal:!1}}});n(e,"Failed to fetch API keys");let s;try{let r=await a.GET("/v1/projects/{ref}/api-keys/legacy",{params:{path:{ref:t}}});r.response.ok&&(s=_nullishCoalesce(_optionalChain([r, 'access', _2 => _2.data, 'optionalAccess', _3 => _3.enabled]), () => (!0)))}catch (e2){s=void 0}let o=_nullishCoalesce(_optionalChain([e, 'access', _4 => _4.data, 'optionalAccess', _5 => _5.filter, 'call', _6 => _6(r=>r.name==="anon"||r.type==="publishable")]), () => ([]));if(o.length===0)throw new Error("No client-safe API keys (anon or publishable) found. Please create a publishable key in your project settings.");return o.map(r=>({api_key:r.api_key,name:r.name,type:r.type==="publishable"?"publishable":"legacy",...s!==void 0&&{disabled:r.type==="legacy"&&!s},description:_nullishCoalesce(r.description, () => (void 0)),id:_nullishCoalesce(r.id, () => (void 0))}))},async generateTypescriptTypes(t){let e=await a.GET("/v1/projects/{ref}/types/typescript",{params:{path:{ref:t}}});return n(e,"Failed to fetch TypeScript types"),e.data}},j={async listEdgeFunctions(t){let e=await a.GET("/v1/projects/{ref}/functions",{params:{path:{ref:t}}});return n(e,"Failed to fetch Edge Functions"),e.data.map(s=>{let o=_chunkVMJ45UPIcjs.u.call(void 0, t,s.id,s.version),r=s.entrypoint_path?_chunkVMJ45UPIcjs.v.call(void 0, {deploymentId:o,filename:_url.fileURLToPath.call(void 0, s.entrypoint_path,{windows:!1})}):void 0,c=s.import_map_path?_chunkVMJ45UPIcjs.v.call(void 0, {deploymentId:o,filename:_url.fileURLToPath.call(void 0, s.import_map_path,{windows:!1})}):void 0;return{...s,entrypoint_path:r,import_map_path:c}})},async getEdgeFunction(t,e){let s=await a.GET("/v1/projects/{ref}/functions/{function_slug}",{params:{path:{ref:t,function_slug:e}}});if(s.error)throw s.error;n(s,"Failed to fetch Edge Function");let o=s.data,r=_chunkVMJ45UPIcjs.u.call(void 0, t,o.id,o.version),c=o.entrypoint_path?_chunkVMJ45UPIcjs.v.call(void 0, {deploymentId:r,filename:_url.fileURLToPath.call(void 0, o.entrypoint_path,{windows:!1})}):void 0,u=o.import_map_path?_chunkVMJ45UPIcjs.v.call(void 0, {deploymentId:r,filename:_url.fileURLToPath.call(void 0, o.import_map_path,{windows:!1})}):void 0,g=await a.GET("/v1/projects/{ref}/functions/{function_slug}/body",{params:{path:{ref:t,function_slug:e}},headers:{Accept:"multipart/form-data"},parseAs:"stream"});n(g,"Failed to fetch Edge Function files");let y=g.response.headers.get("content-type");if(!y||!y.startsWith("multipart/form-data"))throw new Error(`Unexpected content type: ${y}. Expected multipart/form-data.`);let S=_multipartparser.getMultipartBoundary.call(void 0, y);if(!S)throw new Error("No multipart boundary found in response headers");if(!g.data)throw new Error("No data received from Edge Function body");let l=[],E=_multipartparser.parseMultipartStream.call(void 0, g.data,{boundary:S});for await(let h of E)h.isFile&&h.filename&&l.push({name:_chunkVMJ45UPIcjs.v.call(void 0, {deploymentId:r,filename:h.filename}),content:h.text});return{...o,entrypoint_path:c,import_map_path:u,files:l}},async deployEdgeFunction(t,e){let{name:s,entrypoint_path:o,import_map_path:r,verify_jwt:c,files:u}=_chunkVMJ45UPIcjs.m.parse(e),g;try{g=await j.getEdgeFunction(t,s)}catch (e3){}let y=u.find(l=>["deno.json","import_map.json"].includes(l.name));r??=_nullishCoalesce(_optionalChain([g, 'optionalAccess', _7 => _7.import_map_path]), () => (_optionalChain([y, 'optionalAccess', _8 => _8.name])));let S=await a.POST("/v1/projects/{ref}/functions/deploy",{params:{path:{ref:t},query:{slug:s}},body:{metadata:{name:s,entrypoint_path:o,import_map_path:r,verify_jwt:c},file:u},bodySerializer(l){let E=new FormData,h=new Blob([JSON.stringify(l.metadata)],{type:"application/json"});return E.append("metadata",h),_optionalChain([l, 'access', _9 => _9.file, 'optionalAccess', _10 => _10.forEach, 'call', _11 => _11(k=>{let A=k,z=new Blob([A.content],{type:"application/typescript"});E.append("file",z,A.name)})]),E}});return n(S,"Failed to deploy Edge Function"),S.data}};return{async init(t){let{clientInfo:e}=t;if(!e)throw new Error("Client info is required");a=F(_,i,{"User-Agent":`supabase-mcp/${Y} (${e.name}/${e.version})`})},account:m,database:v,debugging:O,development:w,functions:j,branching:{async listBranches(t){let e=await a.GET("/v1/projects/{ref}/branches",{params:{path:{ref:t}}});return e.response.status===422?[]:(n(e,"Failed to list branches"),e.data)},async createBranch(t,e){let{name:s}=_chunkVMJ45UPIcjs.k.parse(e),o=await a.POST("/v1/projects/{ref}/branches",{params:{path:{ref:t}},body:{branch_name:s}});return n(o,"Failed to create branch"),o.data},async deleteBranch(t){let e=await a.DELETE("/v1/branches/{branch_id}",{params:{path:{branch_id:t}}});n(e,"Failed to delete branch")},async mergeBranch(t){let e=await a.POST("/v1/branches/{branch_id}/merge",{params:{path:{branch_id:t}},body:{}});n(e,"Failed to merge branch")},async resetBranch(t,e){let{migration_version:s}=_chunkVMJ45UPIcjs.l.parse(e),o=await a.POST("/v1/branches/{branch_id}/reset",{params:{path:{branch_id:t}},body:{migration_version:s}});n(o,"Failed to reset branch")},async rebaseBranch(t){let e=await a.POST("/v1/branches/{branch_id}/push",{params:{path:{branch_id:t}},body:{}});n(e,"Failed to rebase branch")}},storage:{async listAllBuckets(t){let e=await a.GET("/v1/projects/{ref}/storage/buckets",{params:{path:{ref:t}}});return n(e,"Failed to list storage buckets"),e.data},async getStorageConfig(t){let e=await a.GET("/v1/projects/{ref}/config/storage",{params:{path:{ref:t}}});return n(e,"Failed to get storage config"),e.data},async updateStorageConfig(t,e){let s=await a.PATCH("/v1/projects/{ref}/config/storage",{params:{path:{ref:t}},body:{fileSizeLimit:e.fileSizeLimit,features:{imageTransformation:{enabled:e.features.imageTransformation.enabled},s3Protocol:{enabled:e.features.s3Protocol.enabled}}}});n(s,"Failed to update storage config")}}}}function X(p){switch(p){case"api.supabase.com":return"supabase.co";case"api.supabase.green":return"supabase.green";default:return"supabase.red"}}exports.a = ye;
40
- //# sourceMappingURL=chunk-QKSW7LRE.cjs.map
39
+ `;default:throw new Error(`unsupported log service type: ${p}`)}}var _openapifetch = require('openapi-fetch'); var _openapifetch2 = _interopRequireDefault(_openapifetch);var _v4 = require('zod/v4');function F(p,i,d={}){return _openapifetch2.default.call(void 0, {baseUrl:p,headers:{Authorization:`Bearer ${i}`,...d}})}var N=_v4.z.object({message:_v4.z.string()});function n(p,i){if("error"in p){if(p.response.status===401)throw new Error("Unauthorized. Please provide a valid access token to the MCP server via the --access-token flag or SUPABASE_ACCESS_TOKEN.");let{data:d}=N.safeParse(p.error);throw d?new Error(d.message):new Error(i)}}var H="ABCDEFGHIJKLMNOPQRSTUVWXYZ",I="abcdefghijklmnopqrstuvwxyz",W="0123456789",Q="!@#$%^&*()_+~`|}{[]:;?><,./-=",$=({length:p=10,numbers:i=!1,symbols:d=!1,uppercase:_=!0,lowercase:a=!0}={})=>{let m="";if(_&&(m+=H),a&&(m+=I),i&&(m+=W),d&&(m+=Q),m.length===0)throw new Error("at least one character set must be selected");let v=new Uint32Array(p);crypto.getRandomValues(v);let O="";for(let w=0;w<p;w++){let j=v[w]%m.length;O+=m.charAt(j)}return O};var{version:Y}=_chunkS7IXC5T5cjs.a;function ye(p){let{accessToken:i,apiUrl:d}=p,_=_nullishCoalesce(d, () => ("https://api.supabase.com")),a=F(_,i),m={async listOrganizations(){let t=await a.GET("/v1/organizations");return n(t,"Failed to fetch organizations"),t.data},async getOrganization(t){let e=await a.GET("/v1/organizations/{slug}",{params:{path:{slug:t}}});return n(e,"Failed to fetch organization"),e.data},async listProjects(){let t=await a.GET("/v1/projects");return n(t,"Failed to fetch projects"),t.data},async getProject(t){let e=await a.GET("/v1/projects/{ref}",{params:{path:{ref:t}}});return n(e,"Failed to fetch project"),e.data},async createProject(t){let{name:e,organization_id:s,region:o,db_pass:r}=_chunkS7IXC5T5cjs.j.parse(t),c=await a.POST("/v1/projects",{body:{name:e,region:o,organization_id:s,db_pass:_nullishCoalesce(r, () => ($({length:16,numbers:!0,uppercase:!0,lowercase:!0})))}});return n(c,"Failed to create project"),c.data},async pauseProject(t){let e=await a.POST("/v1/projects/{ref}/pause",{params:{path:{ref:t}}});n(e,"Failed to pause project")},async restoreProject(t){let e=await a.POST("/v1/projects/{ref}/restore",{params:{path:{ref:t}}});n(e,"Failed to restore project")}},v={async executeSql(t,e){let{query:s,parameters:o,read_only:r}=_chunkS7IXC5T5cjs.n.parse(e),c=await a.POST("/v1/projects/{ref}/database/query",{params:{path:{ref:t}},body:{query:s,parameters:o,read_only:r}});return n(c,"Failed to execute SQL query"),c.data},async listMigrations(t){let e=await a.GET("/v1/projects/{ref}/database/migrations",{params:{path:{ref:t}}});return n(e,"Failed to fetch migrations"),e.data},async applyMigration(t,e){let{name:s,query:o}=_chunkS7IXC5T5cjs.o.parse(e),r=await a.POST("/v1/projects/{ref}/database/migrations",{params:{path:{ref:t}},body:{name:s,query:o}});n(r,"Failed to apply migration")}},O={async getLogs(t,e){let{service:s,iso_timestamp_start:o,iso_timestamp_end:r}=_chunkS7IXC5T5cjs.r.parse(e),c=D(s),u=await a.GET("/v1/projects/{ref}/analytics/endpoints/logs.all",{params:{path:{ref:t},query:{sql:c,iso_timestamp_start:o,iso_timestamp_end:r}}});return n(u,"Failed to fetch logs"),u.data},async getSecurityAdvisors(t){let e=await a.GET("/v1/projects/{ref}/advisors/security",{params:{path:{ref:t}}});return n(e,"Failed to fetch security advisors"),e.data},async getPerformanceAdvisors(t){let e=await a.GET("/v1/projects/{ref}/advisors/performance",{params:{path:{ref:t}}});return n(e,"Failed to fetch performance advisors"),e.data}},w={async getProjectUrl(t){let e=new URL(_);return`https://${t}.${X(e.hostname)}`},async getPublishableKeys(t){let e=await a.GET("/v1/projects/{ref}/api-keys",{params:{path:{ref:t},query:{reveal:!1}}});n(e,"Failed to fetch API keys");let s;try{let r=await a.GET("/v1/projects/{ref}/api-keys/legacy",{params:{path:{ref:t}}});r.response.ok&&(s=_nullishCoalesce(_optionalChain([r, 'access', _2 => _2.data, 'optionalAccess', _3 => _3.enabled]), () => (!0)))}catch (e2){s=void 0}let o=_nullishCoalesce(_optionalChain([e, 'access', _4 => _4.data, 'optionalAccess', _5 => _5.filter, 'call', _6 => _6(r=>r.name==="anon"||r.type==="publishable")]), () => ([]));if(o.length===0)throw new Error("No client-safe API keys (anon or publishable) found. Please create a publishable key in your project settings.");return o.map(r=>({api_key:r.api_key,name:r.name,type:r.type==="publishable"?"publishable":"legacy",...s!==void 0&&{disabled:r.type==="legacy"&&!s},description:_nullishCoalesce(r.description, () => (void 0)),id:_nullishCoalesce(r.id, () => (void 0))}))},async generateTypescriptTypes(t){let e=await a.GET("/v1/projects/{ref}/types/typescript",{params:{path:{ref:t}}});return n(e,"Failed to fetch TypeScript types"),e.data}},j={async listEdgeFunctions(t){let e=await a.GET("/v1/projects/{ref}/functions",{params:{path:{ref:t}}});return n(e,"Failed to fetch Edge Functions"),e.data.map(s=>{let o=_chunkS7IXC5T5cjs.u.call(void 0, t,s.id,s.version),r=s.entrypoint_path?_chunkS7IXC5T5cjs.v.call(void 0, {deploymentId:o,filename:_url.fileURLToPath.call(void 0, s.entrypoint_path,{windows:!1})}):void 0,c=s.import_map_path?_chunkS7IXC5T5cjs.v.call(void 0, {deploymentId:o,filename:_url.fileURLToPath.call(void 0, s.import_map_path,{windows:!1})}):void 0;return{...s,entrypoint_path:r,import_map_path:c}})},async getEdgeFunction(t,e){let s=await a.GET("/v1/projects/{ref}/functions/{function_slug}",{params:{path:{ref:t,function_slug:e}}});if(s.error)throw s.error;n(s,"Failed to fetch Edge Function");let o=s.data,r=_chunkS7IXC5T5cjs.u.call(void 0, t,o.id,o.version),c=o.entrypoint_path?_chunkS7IXC5T5cjs.v.call(void 0, {deploymentId:r,filename:_url.fileURLToPath.call(void 0, o.entrypoint_path,{windows:!1})}):void 0,u=o.import_map_path?_chunkS7IXC5T5cjs.v.call(void 0, {deploymentId:r,filename:_url.fileURLToPath.call(void 0, o.import_map_path,{windows:!1})}):void 0,g=await a.GET("/v1/projects/{ref}/functions/{function_slug}/body",{params:{path:{ref:t,function_slug:e}},headers:{Accept:"multipart/form-data"},parseAs:"stream"});n(g,"Failed to fetch Edge Function files");let y=g.response.headers.get("content-type");if(!y||!y.startsWith("multipart/form-data"))throw new Error(`Unexpected content type: ${y}. Expected multipart/form-data.`);let S=_multipartparser.getMultipartBoundary.call(void 0, y);if(!S)throw new Error("No multipart boundary found in response headers");if(!g.data)throw new Error("No data received from Edge Function body");let l=[],E=_multipartparser.parseMultipartStream.call(void 0, g.data,{boundary:S});for await(let h of E)h.isFile&&h.filename&&l.push({name:_chunkS7IXC5T5cjs.v.call(void 0, {deploymentId:r,filename:h.filename}),content:h.text});return{...o,entrypoint_path:c,import_map_path:u,files:l}},async deployEdgeFunction(t,e){let{name:s,entrypoint_path:o,import_map_path:r,verify_jwt:c,files:u}=_chunkS7IXC5T5cjs.m.parse(e),g;try{g=await j.getEdgeFunction(t,s)}catch (e3){}let y=u.find(l=>["deno.json","import_map.json"].includes(l.name));r??=_nullishCoalesce(_optionalChain([g, 'optionalAccess', _7 => _7.import_map_path]), () => (_optionalChain([y, 'optionalAccess', _8 => _8.name])));let S=await a.POST("/v1/projects/{ref}/functions/deploy",{params:{path:{ref:t},query:{slug:s}},body:{metadata:{name:s,entrypoint_path:o,import_map_path:r,verify_jwt:c},file:u},bodySerializer(l){let E=new FormData,h=new Blob([JSON.stringify(l.metadata)],{type:"application/json"});return E.append("metadata",h),_optionalChain([l, 'access', _9 => _9.file, 'optionalAccess', _10 => _10.forEach, 'call', _11 => _11(k=>{let A=k,z=new Blob([A.content],{type:"application/typescript"});E.append("file",z,A.name)})]),E}});return n(S,"Failed to deploy Edge Function"),S.data}};return{async init(t){let{clientInfo:e}=t;if(!e)throw new Error("Client info is required");a=F(_,i,{"User-Agent":`supabase-mcp/${Y} (${e.name}/${e.version})`})},account:m,database:v,debugging:O,development:w,functions:j,branching:{async listBranches(t){let e=await a.GET("/v1/projects/{ref}/branches",{params:{path:{ref:t}}});return e.response.status===422?[]:(n(e,"Failed to list branches"),e.data)},async createBranch(t,e){let{name:s}=_chunkS7IXC5T5cjs.k.parse(e),o=await a.POST("/v1/projects/{ref}/branches",{params:{path:{ref:t}},body:{branch_name:s}});return n(o,"Failed to create branch"),o.data},async deleteBranch(t){let e=await a.DELETE("/v1/branches/{branch_id}",{params:{path:{branch_id:t}}});n(e,"Failed to delete branch")},async mergeBranch(t){let e=await a.POST("/v1/branches/{branch_id}/merge",{params:{path:{branch_id:t}},body:{}});n(e,"Failed to merge branch")},async resetBranch(t,e){let{migration_version:s}=_chunkS7IXC5T5cjs.l.parse(e),o=await a.POST("/v1/branches/{branch_id}/reset",{params:{path:{branch_id:t}},body:{migration_version:s}});n(o,"Failed to reset branch")},async rebaseBranch(t){let e=await a.POST("/v1/branches/{branch_id}/push",{params:{path:{branch_id:t}},body:{}});n(e,"Failed to rebase branch")}},storage:{async listAllBuckets(t){let e=await a.GET("/v1/projects/{ref}/storage/buckets",{params:{path:{ref:t}}});return n(e,"Failed to list storage buckets"),e.data},async getStorageConfig(t){let e=await a.GET("/v1/projects/{ref}/config/storage",{params:{path:{ref:t}}});return n(e,"Failed to get storage config"),e.data},async updateStorageConfig(t,e){let s=await a.PATCH("/v1/projects/{ref}/config/storage",{params:{path:{ref:t}},body:{fileSizeLimit:e.fileSizeLimit,features:{imageTransformation:{enabled:e.features.imageTransformation.enabled},s3Protocol:{enabled:e.features.s3Protocol.enabled}}}});n(s,"Failed to update storage config")}}}}function X(p){switch(p){case"api.supabase.com":return"supabase.co";case"api.supabase.green":return"supabase.green";default:return"supabase.red"}}exports.a = ye;
40
+ //# sourceMappingURL=chunk-JZW4V3WJ.cjs.map
@@ -1 +1 @@
1
- {"version":3,"sources":["/Users/matt/Developer/supabase-org/supabase-mcp/packages/mcp-server-supabase/dist/chunk-QKSW7LRE.cjs","../src/platform/api-platform.ts","../src/logs.ts"],"names":["getLogQuery","service","limit","stripIndent"],"mappings":"AAAA,y0BAAuG,6DCGhG,0BAEuB,yCCLF,SAGZA,CAAAA,CAAYC,CAAAA,CAAsBC,CAAAA,CAAgB,GAAA,CAAK,CACrE,MAAA,CAAQD,CAAAA,CAAS,CACf,IAAK,KAAA,CACH,OAAOE,uBAAAA,CAAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,cAAA,EAOGD,CAAK,CAAA;AAAA,MAAA,CAAA,CAEjB,IAAK,eAAA,CACH,OAAOC,uBAAAA,CAAAA;AAAA;AAAA;AAAA,cAAA,EAGGD,CAAK,CAAA;AAAA,MAAA,CAAA,CAEjB,IAAK,UAAA,CACH,OAAOC,uBAAAA,CAAAA;AAAA;AAAA;AAAA;AAAA;AAAA,cAAA,EAKGD,CAAK,CAAA;AAAA,MAAA,CAAA,CAEjB,IAAK,eAAA,CACH,OAAOC,uBAAAA,CAAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,cAAA,EAMGD,CAAK,CAAA;AAAA,MAAA,CAAA,CAEjB,IAAK,MAAA,CACH,OAAOC,uBAAAA,CAAAA;AAAA;AAAA;AAAA;AAAA,cAAA,EAIGD,CAAK,CAAA;AAAA,MAAA,CAAA,CAEjB,IAAK,SAAA,CACH,OAAOC,uBAAAA,CAAAA;AAAA;AAAA;AAAA,cAAA,EAGGD,CAAK,CAAA;AAAA,MAAA,CAAA,CAEjB,IAAK,UAAA,CACH,OAAOC,uBAAAA,CAAAA;AAAA;AAAA;AAAA,cAAA,EAGGD,CAAK,CAAA;AAAA,MAAA,CAAA,CAEjB,OAAA,CACE,MAAM,IAAI,KAAA,CAAM,CAAA,8BAAA,EAAiCD,CAAO,CAAA,CAAA","file":"/Users/matt/Developer/supabase-org/supabase-mcp/packages/mcp-server-supabase/dist/chunk-QKSW7LRE.cjs","sourcesContent":[null,"import {\n getMultipartBoundary,\n parseMultipartStream,\n} from '@mjackson/multipart-parser';\nimport type { InitData } from '@supabase/mcp-utils';\nimport { fileURLToPath } from 'node:url';\nimport packageJson from '../../package.json' with { type: 'json' };\nimport { getDeploymentId, normalizeFilename } from '../edge-function.js';\nimport { getLogQuery } from '../logs.js';\nimport {\n assertSuccess,\n createManagementApiClient,\n} from '../management-api/index.js';\nimport { generatePassword } from '../password.js';\nimport {\n applyMigrationOptionsSchema,\n createBranchOptionsSchema,\n createProjectOptionsSchema,\n deployEdgeFunctionOptionsSchema,\n executeSqlOptionsSchema,\n getLogsOptionsSchema,\n resetBranchOptionsSchema,\n type AccountOperations,\n type ApiKey,\n type ApiKeyType,\n type ApplyMigrationOptions,\n type BranchingOperations,\n type CreateBranchOptions,\n type CreateProjectOptions,\n type DatabaseOperations,\n type DebuggingOperations,\n type DeployEdgeFunctionOptions,\n type DevelopmentOperations,\n type SuccessResponse,\n type EdgeFunction,\n type EdgeFunctionsOperations,\n type EdgeFunctionWithBody,\n type ExecuteSqlOptions,\n type GetLogsOptions,\n type ResetBranchOptions,\n type StorageConfig,\n type StorageOperations,\n type SupabasePlatform,\n} from './index.js';\n\nconst { version } = packageJson;\n\nconst SUCCESS_RESPONSE: SuccessResponse = { success: true };\n\nexport type SupabaseApiPlatformOptions = {\n /**\n * The access token for the Supabase Management API.\n */\n accessToken: string;\n\n /**\n * The API URL for the Supabase Management API.\n */\n apiUrl?: string;\n};\n\n/**\n * Creates a Supabase platform implementation using the Supabase Management API.\n */\nexport function createSupabaseApiPlatform(\n options: SupabaseApiPlatformOptions\n): SupabasePlatform {\n const { accessToken, apiUrl } = options;\n\n const managementApiUrl = apiUrl ?? 'https://api.supabase.com';\n\n let managementApiClient = createManagementApiClient(\n managementApiUrl,\n accessToken\n );\n\n const account: AccountOperations = {\n async listOrganizations() {\n const response = await managementApiClient.GET('/v1/organizations');\n\n assertSuccess(response, 'Failed to fetch organizations');\n\n return response.data;\n },\n async getOrganization(organizationId: string) {\n const response = await managementApiClient.GET(\n '/v1/organizations/{slug}',\n {\n params: {\n path: {\n slug: organizationId,\n },\n },\n }\n );\n\n assertSuccess(response, 'Failed to fetch organization');\n\n return response.data;\n },\n async listProjects() {\n const response = await managementApiClient.GET('/v1/projects');\n\n assertSuccess(response, 'Failed to fetch projects');\n\n return response.data;\n },\n async getProject(projectId: string) {\n const response = await managementApiClient.GET('/v1/projects/{ref}', {\n params: {\n path: {\n ref: projectId,\n },\n },\n });\n assertSuccess(response, 'Failed to fetch project');\n return response.data;\n },\n async createProject(options: CreateProjectOptions) {\n const { name, organization_id, region, db_pass } =\n createProjectOptionsSchema.parse(options);\n\n const response = await managementApiClient.POST('/v1/projects', {\n body: {\n name,\n region,\n organization_id,\n db_pass:\n db_pass ??\n generatePassword({\n length: 16,\n numbers: true,\n uppercase: true,\n lowercase: true,\n }),\n },\n });\n\n assertSuccess(response, 'Failed to create project');\n\n return response.data;\n },\n async pauseProject(projectId: string) {\n const response = await managementApiClient.POST(\n '/v1/projects/{ref}/pause',\n {\n params: {\n path: {\n ref: projectId,\n },\n },\n }\n );\n\n assertSuccess(response, 'Failed to pause project');\n },\n async restoreProject(projectId: string) {\n const response = await managementApiClient.POST(\n '/v1/projects/{ref}/restore',\n {\n params: {\n path: {\n ref: projectId,\n },\n },\n }\n );\n\n assertSuccess(response, 'Failed to restore project');\n },\n };\n\n const database: DatabaseOperations = {\n async executeSql<T>(projectId: string, options: ExecuteSqlOptions) {\n const { query, parameters, read_only } =\n executeSqlOptionsSchema.parse(options);\n\n const response = await managementApiClient.POST(\n '/v1/projects/{ref}/database/query',\n {\n params: {\n path: {\n ref: projectId,\n },\n },\n body: {\n query,\n parameters,\n read_only,\n },\n }\n );\n\n assertSuccess(response, 'Failed to execute SQL query');\n\n return response.data as unknown as T[];\n },\n async listMigrations(projectId: string) {\n const response = await managementApiClient.GET(\n '/v1/projects/{ref}/database/migrations',\n {\n params: {\n path: {\n ref: projectId,\n },\n },\n }\n );\n\n assertSuccess(response, 'Failed to fetch migrations');\n\n return response.data;\n },\n async applyMigration(projectId: string, options: ApplyMigrationOptions) {\n const { name, query } = applyMigrationOptionsSchema.parse(options);\n\n const response = await managementApiClient.POST(\n '/v1/projects/{ref}/database/migrations',\n {\n params: {\n path: {\n ref: projectId,\n },\n },\n body: {\n name,\n query,\n },\n }\n );\n\n assertSuccess(response, 'Failed to apply migration');\n\n // Intentionally don't return the result of the migration\n // to avoid prompt injection attacks. If the migration failed,\n // it will throw an error.\n },\n };\n\n const debugging: DebuggingOperations = {\n async getLogs(projectId: string, options: GetLogsOptions) {\n const { service, iso_timestamp_start, iso_timestamp_end } =\n getLogsOptionsSchema.parse(options);\n\n const sql = getLogQuery(service);\n\n const response = await managementApiClient.GET(\n '/v1/projects/{ref}/analytics/endpoints/logs.all',\n {\n params: {\n path: {\n ref: projectId,\n },\n query: {\n sql,\n iso_timestamp_start,\n iso_timestamp_end,\n },\n },\n }\n );\n\n assertSuccess(response, 'Failed to fetch logs');\n\n return response.data;\n },\n async getSecurityAdvisors(projectId: string) {\n const response = await managementApiClient.GET(\n '/v1/projects/{ref}/advisors/security',\n {\n params: {\n path: {\n ref: projectId,\n },\n },\n }\n );\n\n assertSuccess(response, 'Failed to fetch security advisors');\n\n return response.data;\n },\n async getPerformanceAdvisors(projectId: string) {\n const response = await managementApiClient.GET(\n '/v1/projects/{ref}/advisors/performance',\n {\n params: {\n path: {\n ref: projectId,\n },\n },\n }\n );\n\n assertSuccess(response, 'Failed to fetch performance advisors');\n\n return response.data;\n },\n };\n\n const development: DevelopmentOperations = {\n async getProjectUrl(projectId: string): Promise<string> {\n const apiUrl = new URL(managementApiUrl);\n return `https://${projectId}.${getProjectDomain(apiUrl.hostname)}`;\n },\n async getPublishableKeys(projectId: string): Promise<ApiKey[]> {\n const response = await managementApiClient.GET(\n '/v1/projects/{ref}/api-keys',\n {\n params: {\n path: {\n ref: projectId,\n },\n query: {\n reveal: false,\n },\n },\n }\n );\n\n assertSuccess(response, 'Failed to fetch API keys');\n\n // Try to check if legacy JWT-based keys are enabled\n // If this fails, we'll continue without the disabled field\n let legacyKeysEnabled: boolean | undefined = undefined;\n try {\n const legacyKeysResponse = await managementApiClient.GET(\n '/v1/projects/{ref}/api-keys/legacy',\n {\n params: {\n path: {\n ref: projectId,\n },\n },\n }\n );\n\n if (legacyKeysResponse.response.ok) {\n legacyKeysEnabled = legacyKeysResponse.data?.enabled ?? true;\n }\n } catch (error) {\n // If we can't fetch legacy key status, continue without it\n legacyKeysEnabled = undefined;\n }\n\n // Filter for client-safe keys: legacy 'anon' or publishable type\n const clientKeys =\n response.data?.filter(\n (key) => key.name === 'anon' || key.type === 'publishable'\n ) ?? [];\n\n if (clientKeys.length === 0) {\n throw new Error(\n 'No client-safe API keys (anon or publishable) found. Please create a publishable key in your project settings.'\n );\n }\n\n return clientKeys.map((key) => ({\n api_key: key.api_key!,\n name: key.name,\n type: (key.type === 'publishable'\n ? 'publishable'\n : 'legacy') satisfies ApiKeyType,\n // Only include disabled field if we successfully fetched legacy key status\n ...(legacyKeysEnabled !== undefined && {\n disabled: key.type === 'legacy' && !legacyKeysEnabled,\n }),\n description: key.description ?? undefined,\n id: key.id ?? undefined,\n }));\n },\n async generateTypescriptTypes(projectId: string) {\n const response = await managementApiClient.GET(\n '/v1/projects/{ref}/types/typescript',\n {\n params: {\n path: {\n ref: projectId,\n },\n },\n }\n );\n\n assertSuccess(response, 'Failed to fetch TypeScript types');\n\n return response.data;\n },\n };\n\n const functions: EdgeFunctionsOperations = {\n async listEdgeFunctions(projectId: string) {\n const response = await managementApiClient.GET(\n '/v1/projects/{ref}/functions',\n {\n params: {\n path: {\n ref: projectId,\n },\n },\n }\n );\n\n assertSuccess(response, 'Failed to fetch Edge Functions');\n\n return response.data.map((edgeFunction) => {\n const deploymentId = getDeploymentId(\n projectId,\n edgeFunction.id,\n edgeFunction.version\n );\n\n const entrypoint_path = edgeFunction.entrypoint_path\n ? normalizeFilename({\n deploymentId,\n filename: fileURLToPath(edgeFunction.entrypoint_path, {\n windows: false,\n }),\n })\n : undefined;\n\n const import_map_path = edgeFunction.import_map_path\n ? normalizeFilename({\n deploymentId,\n filename: fileURLToPath(edgeFunction.import_map_path, {\n windows: false,\n }),\n })\n : undefined;\n\n return {\n ...edgeFunction,\n entrypoint_path,\n import_map_path,\n };\n });\n },\n async getEdgeFunction(projectId: string, functionSlug: string) {\n const functionResponse = await managementApiClient.GET(\n '/v1/projects/{ref}/functions/{function_slug}',\n {\n params: {\n path: {\n ref: projectId,\n function_slug: functionSlug,\n },\n },\n }\n );\n\n if (functionResponse.error) {\n throw functionResponse.error;\n }\n\n assertSuccess(functionResponse, 'Failed to fetch Edge Function');\n\n const edgeFunction = functionResponse.data;\n\n const deploymentId = getDeploymentId(\n projectId,\n edgeFunction.id,\n edgeFunction.version\n );\n\n const entrypoint_path = edgeFunction.entrypoint_path\n ? normalizeFilename({\n deploymentId,\n filename: fileURLToPath(edgeFunction.entrypoint_path, {\n windows: false,\n }),\n })\n : undefined;\n\n const import_map_path = edgeFunction.import_map_path\n ? normalizeFilename({\n deploymentId,\n filename: fileURLToPath(edgeFunction.import_map_path, {\n windows: false,\n }),\n })\n : undefined;\n\n const bodyResponse = await managementApiClient.GET(\n '/v1/projects/{ref}/functions/{function_slug}/body',\n {\n params: {\n path: {\n ref: projectId,\n function_slug: functionSlug,\n },\n },\n headers: {\n Accept: 'multipart/form-data',\n },\n parseAs: 'stream',\n }\n );\n\n assertSuccess(bodyResponse, 'Failed to fetch Edge Function files');\n\n const contentType = bodyResponse.response.headers.get('content-type');\n\n if (!contentType || !contentType.startsWith('multipart/form-data')) {\n throw new Error(\n `Unexpected content type: ${contentType}. Expected multipart/form-data.`\n );\n }\n\n const boundary = getMultipartBoundary(contentType);\n\n if (!boundary) {\n throw new Error('No multipart boundary found in response headers');\n }\n\n if (!bodyResponse.data) {\n throw new Error('No data received from Edge Function body');\n }\n\n const files: EdgeFunctionWithBody['files'] = [];\n const parts = parseMultipartStream(bodyResponse.data, { boundary });\n\n for await (const part of parts) {\n if (part.isFile && part.filename) {\n files.push({\n name: normalizeFilename({\n deploymentId,\n filename: part.filename,\n }),\n content: part.text,\n });\n }\n }\n\n return {\n ...edgeFunction,\n entrypoint_path,\n import_map_path,\n files,\n };\n },\n async deployEdgeFunction(\n projectId: string,\n options: DeployEdgeFunctionOptions\n ) {\n let {\n name,\n entrypoint_path,\n import_map_path,\n verify_jwt,\n files: inputFiles,\n } = deployEdgeFunctionOptionsSchema.parse(options);\n\n let existingEdgeFunction: EdgeFunction | undefined;\n try {\n existingEdgeFunction = await functions.getEdgeFunction(projectId, name);\n } catch (error) {}\n\n const import_map_file = inputFiles.find((file) =>\n ['deno.json', 'import_map.json'].includes(file.name)\n );\n\n // Use existing import map path or file name heuristic if not provided\n import_map_path ??=\n existingEdgeFunction?.import_map_path ?? import_map_file?.name;\n\n const response = await managementApiClient.POST(\n '/v1/projects/{ref}/functions/deploy',\n {\n params: {\n path: {\n ref: projectId,\n },\n query: { slug: name },\n },\n body: {\n metadata: {\n name,\n entrypoint_path,\n import_map_path,\n verify_jwt,\n },\n file: inputFiles as any, // We need to pass file name and content to our serializer\n },\n bodySerializer(body) {\n const formData = new FormData();\n\n const blob = new Blob([JSON.stringify(body.metadata)], {\n type: 'application/json',\n });\n formData.append('metadata', blob);\n\n body.file?.forEach((f: any) => {\n const file: { name: string; content: string } = f;\n const blob = new Blob([file.content], {\n type: 'application/typescript',\n });\n formData.append('file', blob, file.name);\n });\n\n return formData;\n },\n }\n );\n\n assertSuccess(response, 'Failed to deploy Edge Function');\n\n return response.data;\n },\n };\n\n const branching: BranchingOperations = {\n async listBranches(projectId: string) {\n const response = await managementApiClient.GET(\n '/v1/projects/{ref}/branches',\n {\n params: {\n path: {\n ref: projectId,\n },\n },\n }\n );\n\n // There are no branches if branching is disabled\n if (response.response.status === 422) return [];\n assertSuccess(response, 'Failed to list branches');\n\n return response.data;\n },\n async createBranch(projectId: string, options: CreateBranchOptions) {\n const { name } = createBranchOptionsSchema.parse(options);\n\n const createBranchResponse = await managementApiClient.POST(\n '/v1/projects/{ref}/branches',\n {\n params: {\n path: {\n ref: projectId,\n },\n },\n body: {\n branch_name: name,\n },\n }\n );\n\n assertSuccess(createBranchResponse, 'Failed to create branch');\n\n return createBranchResponse.data;\n },\n async deleteBranch(branchId: string) {\n const response = await managementApiClient.DELETE(\n '/v1/branches/{branch_id}',\n {\n params: {\n path: {\n branch_id: branchId,\n },\n },\n }\n );\n\n assertSuccess(response, 'Failed to delete branch');\n },\n async mergeBranch(branchId: string) {\n const response = await managementApiClient.POST(\n '/v1/branches/{branch_id}/merge',\n {\n params: {\n path: {\n branch_id: branchId,\n },\n },\n body: {},\n }\n );\n\n assertSuccess(response, 'Failed to merge branch');\n },\n async resetBranch(branchId: string, options: ResetBranchOptions) {\n const { migration_version } = resetBranchOptionsSchema.parse(options);\n\n const response = await managementApiClient.POST(\n '/v1/branches/{branch_id}/reset',\n {\n params: {\n path: {\n branch_id: branchId,\n },\n },\n body: {\n migration_version,\n },\n }\n );\n\n assertSuccess(response, 'Failed to reset branch');\n },\n async rebaseBranch(branchId: string) {\n const response = await managementApiClient.POST(\n '/v1/branches/{branch_id}/push',\n {\n params: {\n path: {\n branch_id: branchId,\n },\n },\n body: {},\n }\n );\n\n assertSuccess(response, 'Failed to rebase branch');\n },\n };\n\n const storage: StorageOperations = {\n // Storage methods\n async listAllBuckets(project_id: string) {\n const response = await managementApiClient.GET(\n '/v1/projects/{ref}/storage/buckets',\n {\n params: {\n path: {\n ref: project_id,\n },\n },\n }\n );\n\n assertSuccess(response, 'Failed to list storage buckets');\n\n return response.data;\n },\n\n async getStorageConfig(project_id: string) {\n const response = await managementApiClient.GET(\n '/v1/projects/{ref}/config/storage',\n {\n params: {\n path: {\n ref: project_id,\n },\n },\n }\n );\n\n assertSuccess(response, 'Failed to get storage config');\n\n return response.data;\n },\n\n async updateStorageConfig(projectId: string, config: StorageConfig) {\n const response = await managementApiClient.PATCH(\n '/v1/projects/{ref}/config/storage',\n {\n params: {\n path: {\n ref: projectId,\n },\n },\n body: {\n fileSizeLimit: config.fileSizeLimit,\n features: {\n imageTransformation: {\n enabled: config.features.imageTransformation.enabled,\n },\n s3Protocol: {\n enabled: config.features.s3Protocol.enabled,\n },\n },\n },\n }\n );\n\n assertSuccess(response, 'Failed to update storage config');\n },\n };\n\n const platform: SupabasePlatform = {\n async init(info: InitData) {\n const { clientInfo } = info;\n if (!clientInfo) {\n throw new Error('Client info is required');\n }\n\n // Re-initialize the management API client with the user agent\n managementApiClient = createManagementApiClient(\n managementApiUrl,\n accessToken,\n {\n 'User-Agent': `supabase-mcp/${version} (${clientInfo.name}/${clientInfo.version})`,\n }\n );\n },\n account,\n database,\n debugging,\n development,\n functions,\n branching,\n storage,\n };\n\n return platform;\n}\n\nfunction getProjectDomain(apiHostname: string) {\n switch (apiHostname) {\n case 'api.supabase.com':\n return 'supabase.co';\n case 'api.supabase.green':\n return 'supabase.green';\n default:\n return 'supabase.red';\n }\n}\n","import { stripIndent } from 'common-tags';\nimport type { LogsService } from './platform/types.js';\n\nexport function getLogQuery(service: LogsService, limit: number = 100) {\n switch (service) {\n case 'api':\n return stripIndent`\n select id, identifier, timestamp, event_message, request.method, request.path, response.status_code\n from edge_logs\n cross join unnest(metadata) as m\n cross join unnest(m.request) as request\n cross join unnest(m.response) as response\n order by timestamp desc\n limit ${limit}\n `;\n case 'branch-action':\n return stripIndent`\n select workflow_run, workflow_run_logs.timestamp, id, event_message from workflow_run_logs\n order by timestamp desc\n limit ${limit}\n `;\n case 'postgres':\n return stripIndent`\n select identifier, postgres_logs.timestamp, id, event_message, parsed.error_severity from postgres_logs\n cross join unnest(metadata) as m\n cross join unnest(m.parsed) as parsed\n order by timestamp desc\n limit ${limit}\n `;\n case 'edge-function':\n return stripIndent`\n select id, function_edge_logs.timestamp, event_message, response.status_code, request.method, m.function_id, m.execution_time_ms, m.deployment_id, m.version from function_edge_logs\n cross join unnest(metadata) as m\n cross join unnest(m.response) as response\n cross join unnest(m.request) as request\n order by timestamp desc\n limit ${limit}\n `;\n case 'auth':\n return stripIndent`\n select id, auth_logs.timestamp, event_message, metadata.level, metadata.status, metadata.path, metadata.msg as msg, metadata.error from auth_logs\n cross join unnest(metadata) as metadata\n order by timestamp desc\n limit ${limit}\n `;\n case 'storage':\n return stripIndent`\n select id, storage_logs.timestamp, event_message from storage_logs\n order by timestamp desc\n limit ${limit}\n `;\n case 'realtime':\n return stripIndent`\n select id, realtime_logs.timestamp, event_message from realtime_logs\n order by timestamp desc\n limit ${limit}\n `;\n default:\n throw new Error(`unsupported log service type: ${service}`);\n }\n}\n"]}
1
+ {"version":3,"sources":["/Users/matt/Developer/supabase-org/supabase-mcp/packages/mcp-server-supabase/dist/chunk-JZW4V3WJ.cjs","../src/platform/api-platform.ts","../src/logs.ts"],"names":["getLogQuery","service","limit","stripIndent"],"mappings":"AAAA,y0BAAuG,6DCGhG,0BAEuB,yCCLF,SAGZA,CAAAA,CAAYC,CAAAA,CAAsBC,CAAAA,CAAgB,GAAA,CAAK,CACrE,MAAA,CAAQD,CAAAA,CAAS,CACf,IAAK,KAAA,CACH,OAAOE,uBAAAA,CAAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,cAAA,EAOGD,CAAK,CAAA;AAAA,MAAA,CAAA,CAEjB,IAAK,eAAA,CACH,OAAOC,uBAAAA,CAAAA;AAAA;AAAA;AAAA,cAAA,EAGGD,CAAK,CAAA;AAAA,MAAA,CAAA,CAEjB,IAAK,UAAA,CACH,OAAOC,uBAAAA,CAAAA;AAAA;AAAA;AAAA;AAAA;AAAA,cAAA,EAKGD,CAAK,CAAA;AAAA,MAAA,CAAA,CAEjB,IAAK,eAAA,CACH,OAAOC,uBAAAA,CAAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,cAAA,EAMGD,CAAK,CAAA;AAAA,MAAA,CAAA,CAEjB,IAAK,MAAA,CACH,OAAOC,uBAAAA,CAAAA;AAAA;AAAA;AAAA;AAAA,cAAA,EAIGD,CAAK,CAAA;AAAA,MAAA,CAAA,CAEjB,IAAK,SAAA,CACH,OAAOC,uBAAAA,CAAAA;AAAA;AAAA;AAAA,cAAA,EAGGD,CAAK,CAAA;AAAA,MAAA,CAAA,CAEjB,IAAK,UAAA,CACH,OAAOC,uBAAAA,CAAAA;AAAA;AAAA;AAAA,cAAA,EAGGD,CAAK,CAAA;AAAA,MAAA,CAAA,CAEjB,OAAA,CACE,MAAM,IAAI,KAAA,CAAM,CAAA,8BAAA,EAAiCD,CAAO,CAAA,CAAA","file":"/Users/matt/Developer/supabase-org/supabase-mcp/packages/mcp-server-supabase/dist/chunk-JZW4V3WJ.cjs","sourcesContent":[null,"import {\n getMultipartBoundary,\n parseMultipartStream,\n} from '@mjackson/multipart-parser';\nimport type { InitData } from '@supabase/mcp-utils';\nimport { fileURLToPath } from 'node:url';\nimport packageJson from '../../package.json' with { type: 'json' };\nimport { getDeploymentId, normalizeFilename } from '../edge-function.js';\nimport { getLogQuery } from '../logs.js';\nimport {\n assertSuccess,\n createManagementApiClient,\n} from '../management-api/index.js';\nimport { generatePassword } from '../password.js';\nimport {\n applyMigrationOptionsSchema,\n createBranchOptionsSchema,\n createProjectOptionsSchema,\n deployEdgeFunctionOptionsSchema,\n executeSqlOptionsSchema,\n getLogsOptionsSchema,\n resetBranchOptionsSchema,\n type AccountOperations,\n type ApiKey,\n type ApiKeyType,\n type ApplyMigrationOptions,\n type BranchingOperations,\n type CreateBranchOptions,\n type CreateProjectOptions,\n type DatabaseOperations,\n type DebuggingOperations,\n type DeployEdgeFunctionOptions,\n type DevelopmentOperations,\n type SuccessResponse,\n type EdgeFunction,\n type EdgeFunctionsOperations,\n type EdgeFunctionWithBody,\n type ExecuteSqlOptions,\n type GetLogsOptions,\n type ResetBranchOptions,\n type StorageConfig,\n type StorageOperations,\n type SupabasePlatform,\n} from './index.js';\n\nconst { version } = packageJson;\n\nconst SUCCESS_RESPONSE: SuccessResponse = { success: true };\n\nexport type SupabaseApiPlatformOptions = {\n /**\n * The access token for the Supabase Management API.\n */\n accessToken: string;\n\n /**\n * The API URL for the Supabase Management API.\n */\n apiUrl?: string;\n};\n\n/**\n * Creates a Supabase platform implementation using the Supabase Management API.\n */\nexport function createSupabaseApiPlatform(\n options: SupabaseApiPlatformOptions\n): SupabasePlatform {\n const { accessToken, apiUrl } = options;\n\n const managementApiUrl = apiUrl ?? 'https://api.supabase.com';\n\n let managementApiClient = createManagementApiClient(\n managementApiUrl,\n accessToken\n );\n\n const account: AccountOperations = {\n async listOrganizations() {\n const response = await managementApiClient.GET('/v1/organizations');\n\n assertSuccess(response, 'Failed to fetch organizations');\n\n return response.data;\n },\n async getOrganization(organizationId: string) {\n const response = await managementApiClient.GET(\n '/v1/organizations/{slug}',\n {\n params: {\n path: {\n slug: organizationId,\n },\n },\n }\n );\n\n assertSuccess(response, 'Failed to fetch organization');\n\n return response.data;\n },\n async listProjects() {\n const response = await managementApiClient.GET('/v1/projects');\n\n assertSuccess(response, 'Failed to fetch projects');\n\n return response.data;\n },\n async getProject(projectId: string) {\n const response = await managementApiClient.GET('/v1/projects/{ref}', {\n params: {\n path: {\n ref: projectId,\n },\n },\n });\n assertSuccess(response, 'Failed to fetch project');\n return response.data;\n },\n async createProject(options: CreateProjectOptions) {\n const { name, organization_id, region, db_pass } =\n createProjectOptionsSchema.parse(options);\n\n const response = await managementApiClient.POST('/v1/projects', {\n body: {\n name,\n region,\n organization_id,\n db_pass:\n db_pass ??\n generatePassword({\n length: 16,\n numbers: true,\n uppercase: true,\n lowercase: true,\n }),\n },\n });\n\n assertSuccess(response, 'Failed to create project');\n\n return response.data;\n },\n async pauseProject(projectId: string) {\n const response = await managementApiClient.POST(\n '/v1/projects/{ref}/pause',\n {\n params: {\n path: {\n ref: projectId,\n },\n },\n }\n );\n\n assertSuccess(response, 'Failed to pause project');\n },\n async restoreProject(projectId: string) {\n const response = await managementApiClient.POST(\n '/v1/projects/{ref}/restore',\n {\n params: {\n path: {\n ref: projectId,\n },\n },\n }\n );\n\n assertSuccess(response, 'Failed to restore project');\n },\n };\n\n const database: DatabaseOperations = {\n async executeSql<T>(projectId: string, options: ExecuteSqlOptions) {\n const { query, parameters, read_only } =\n executeSqlOptionsSchema.parse(options);\n\n const response = await managementApiClient.POST(\n '/v1/projects/{ref}/database/query',\n {\n params: {\n path: {\n ref: projectId,\n },\n },\n body: {\n query,\n parameters,\n read_only,\n },\n }\n );\n\n assertSuccess(response, 'Failed to execute SQL query');\n\n return response.data as unknown as T[];\n },\n async listMigrations(projectId: string) {\n const response = await managementApiClient.GET(\n '/v1/projects/{ref}/database/migrations',\n {\n params: {\n path: {\n ref: projectId,\n },\n },\n }\n );\n\n assertSuccess(response, 'Failed to fetch migrations');\n\n return response.data;\n },\n async applyMigration(projectId: string, options: ApplyMigrationOptions) {\n const { name, query } = applyMigrationOptionsSchema.parse(options);\n\n const response = await managementApiClient.POST(\n '/v1/projects/{ref}/database/migrations',\n {\n params: {\n path: {\n ref: projectId,\n },\n },\n body: {\n name,\n query,\n },\n }\n );\n\n assertSuccess(response, 'Failed to apply migration');\n\n // Intentionally don't return the result of the migration\n // to avoid prompt injection attacks. If the migration failed,\n // it will throw an error.\n },\n };\n\n const debugging: DebuggingOperations = {\n async getLogs(projectId: string, options: GetLogsOptions) {\n const { service, iso_timestamp_start, iso_timestamp_end } =\n getLogsOptionsSchema.parse(options);\n\n const sql = getLogQuery(service);\n\n const response = await managementApiClient.GET(\n '/v1/projects/{ref}/analytics/endpoints/logs.all',\n {\n params: {\n path: {\n ref: projectId,\n },\n query: {\n sql,\n iso_timestamp_start,\n iso_timestamp_end,\n },\n },\n }\n );\n\n assertSuccess(response, 'Failed to fetch logs');\n\n return response.data;\n },\n async getSecurityAdvisors(projectId: string) {\n const response = await managementApiClient.GET(\n '/v1/projects/{ref}/advisors/security',\n {\n params: {\n path: {\n ref: projectId,\n },\n },\n }\n );\n\n assertSuccess(response, 'Failed to fetch security advisors');\n\n return response.data;\n },\n async getPerformanceAdvisors(projectId: string) {\n const response = await managementApiClient.GET(\n '/v1/projects/{ref}/advisors/performance',\n {\n params: {\n path: {\n ref: projectId,\n },\n },\n }\n );\n\n assertSuccess(response, 'Failed to fetch performance advisors');\n\n return response.data;\n },\n };\n\n const development: DevelopmentOperations = {\n async getProjectUrl(projectId: string): Promise<string> {\n const apiUrl = new URL(managementApiUrl);\n return `https://${projectId}.${getProjectDomain(apiUrl.hostname)}`;\n },\n async getPublishableKeys(projectId: string): Promise<ApiKey[]> {\n const response = await managementApiClient.GET(\n '/v1/projects/{ref}/api-keys',\n {\n params: {\n path: {\n ref: projectId,\n },\n query: {\n reveal: false,\n },\n },\n }\n );\n\n assertSuccess(response, 'Failed to fetch API keys');\n\n // Try to check if legacy JWT-based keys are enabled\n // If this fails, we'll continue without the disabled field\n let legacyKeysEnabled: boolean | undefined = undefined;\n try {\n const legacyKeysResponse = await managementApiClient.GET(\n '/v1/projects/{ref}/api-keys/legacy',\n {\n params: {\n path: {\n ref: projectId,\n },\n },\n }\n );\n\n if (legacyKeysResponse.response.ok) {\n legacyKeysEnabled = legacyKeysResponse.data?.enabled ?? true;\n }\n } catch (error) {\n // If we can't fetch legacy key status, continue without it\n legacyKeysEnabled = undefined;\n }\n\n // Filter for client-safe keys: legacy 'anon' or publishable type\n const clientKeys =\n response.data?.filter(\n (key) => key.name === 'anon' || key.type === 'publishable'\n ) ?? [];\n\n if (clientKeys.length === 0) {\n throw new Error(\n 'No client-safe API keys (anon or publishable) found. Please create a publishable key in your project settings.'\n );\n }\n\n return clientKeys.map((key) => ({\n api_key: key.api_key!,\n name: key.name,\n type: (key.type === 'publishable'\n ? 'publishable'\n : 'legacy') satisfies ApiKeyType,\n // Only include disabled field if we successfully fetched legacy key status\n ...(legacyKeysEnabled !== undefined && {\n disabled: key.type === 'legacy' && !legacyKeysEnabled,\n }),\n description: key.description ?? undefined,\n id: key.id ?? undefined,\n }));\n },\n async generateTypescriptTypes(projectId: string) {\n const response = await managementApiClient.GET(\n '/v1/projects/{ref}/types/typescript',\n {\n params: {\n path: {\n ref: projectId,\n },\n },\n }\n );\n\n assertSuccess(response, 'Failed to fetch TypeScript types');\n\n return response.data;\n },\n };\n\n const functions: EdgeFunctionsOperations = {\n async listEdgeFunctions(projectId: string) {\n const response = await managementApiClient.GET(\n '/v1/projects/{ref}/functions',\n {\n params: {\n path: {\n ref: projectId,\n },\n },\n }\n );\n\n assertSuccess(response, 'Failed to fetch Edge Functions');\n\n return response.data.map((edgeFunction) => {\n const deploymentId = getDeploymentId(\n projectId,\n edgeFunction.id,\n edgeFunction.version\n );\n\n const entrypoint_path = edgeFunction.entrypoint_path\n ? normalizeFilename({\n deploymentId,\n filename: fileURLToPath(edgeFunction.entrypoint_path, {\n windows: false,\n }),\n })\n : undefined;\n\n const import_map_path = edgeFunction.import_map_path\n ? normalizeFilename({\n deploymentId,\n filename: fileURLToPath(edgeFunction.import_map_path, {\n windows: false,\n }),\n })\n : undefined;\n\n return {\n ...edgeFunction,\n entrypoint_path,\n import_map_path,\n };\n });\n },\n async getEdgeFunction(projectId: string, functionSlug: string) {\n const functionResponse = await managementApiClient.GET(\n '/v1/projects/{ref}/functions/{function_slug}',\n {\n params: {\n path: {\n ref: projectId,\n function_slug: functionSlug,\n },\n },\n }\n );\n\n if (functionResponse.error) {\n throw functionResponse.error;\n }\n\n assertSuccess(functionResponse, 'Failed to fetch Edge Function');\n\n const edgeFunction = functionResponse.data;\n\n const deploymentId = getDeploymentId(\n projectId,\n edgeFunction.id,\n edgeFunction.version\n );\n\n const entrypoint_path = edgeFunction.entrypoint_path\n ? normalizeFilename({\n deploymentId,\n filename: fileURLToPath(edgeFunction.entrypoint_path, {\n windows: false,\n }),\n })\n : undefined;\n\n const import_map_path = edgeFunction.import_map_path\n ? normalizeFilename({\n deploymentId,\n filename: fileURLToPath(edgeFunction.import_map_path, {\n windows: false,\n }),\n })\n : undefined;\n\n const bodyResponse = await managementApiClient.GET(\n '/v1/projects/{ref}/functions/{function_slug}/body',\n {\n params: {\n path: {\n ref: projectId,\n function_slug: functionSlug,\n },\n },\n headers: {\n Accept: 'multipart/form-data',\n },\n parseAs: 'stream',\n }\n );\n\n assertSuccess(bodyResponse, 'Failed to fetch Edge Function files');\n\n const contentType = bodyResponse.response.headers.get('content-type');\n\n if (!contentType || !contentType.startsWith('multipart/form-data')) {\n throw new Error(\n `Unexpected content type: ${contentType}. Expected multipart/form-data.`\n );\n }\n\n const boundary = getMultipartBoundary(contentType);\n\n if (!boundary) {\n throw new Error('No multipart boundary found in response headers');\n }\n\n if (!bodyResponse.data) {\n throw new Error('No data received from Edge Function body');\n }\n\n const files: EdgeFunctionWithBody['files'] = [];\n const parts = parseMultipartStream(bodyResponse.data, { boundary });\n\n for await (const part of parts) {\n if (part.isFile && part.filename) {\n files.push({\n name: normalizeFilename({\n deploymentId,\n filename: part.filename,\n }),\n content: part.text,\n });\n }\n }\n\n return {\n ...edgeFunction,\n entrypoint_path,\n import_map_path,\n files,\n };\n },\n async deployEdgeFunction(\n projectId: string,\n options: DeployEdgeFunctionOptions\n ) {\n let {\n name,\n entrypoint_path,\n import_map_path,\n verify_jwt,\n files: inputFiles,\n } = deployEdgeFunctionOptionsSchema.parse(options);\n\n let existingEdgeFunction: EdgeFunction | undefined;\n try {\n existingEdgeFunction = await functions.getEdgeFunction(projectId, name);\n } catch (error) {}\n\n const import_map_file = inputFiles.find((file) =>\n ['deno.json', 'import_map.json'].includes(file.name)\n );\n\n // Use existing import map path or file name heuristic if not provided\n import_map_path ??=\n existingEdgeFunction?.import_map_path ?? import_map_file?.name;\n\n const response = await managementApiClient.POST(\n '/v1/projects/{ref}/functions/deploy',\n {\n params: {\n path: {\n ref: projectId,\n },\n query: { slug: name },\n },\n body: {\n metadata: {\n name,\n entrypoint_path,\n import_map_path,\n verify_jwt,\n },\n file: inputFiles as any, // We need to pass file name and content to our serializer\n },\n bodySerializer(body) {\n const formData = new FormData();\n\n const blob = new Blob([JSON.stringify(body.metadata)], {\n type: 'application/json',\n });\n formData.append('metadata', blob);\n\n body.file?.forEach((f: any) => {\n const file: { name: string; content: string } = f;\n const blob = new Blob([file.content], {\n type: 'application/typescript',\n });\n formData.append('file', blob, file.name);\n });\n\n return formData;\n },\n }\n );\n\n assertSuccess(response, 'Failed to deploy Edge Function');\n\n return response.data;\n },\n };\n\n const branching: BranchingOperations = {\n async listBranches(projectId: string) {\n const response = await managementApiClient.GET(\n '/v1/projects/{ref}/branches',\n {\n params: {\n path: {\n ref: projectId,\n },\n },\n }\n );\n\n // There are no branches if branching is disabled\n if (response.response.status === 422) return [];\n assertSuccess(response, 'Failed to list branches');\n\n return response.data;\n },\n async createBranch(projectId: string, options: CreateBranchOptions) {\n const { name } = createBranchOptionsSchema.parse(options);\n\n const createBranchResponse = await managementApiClient.POST(\n '/v1/projects/{ref}/branches',\n {\n params: {\n path: {\n ref: projectId,\n },\n },\n body: {\n branch_name: name,\n },\n }\n );\n\n assertSuccess(createBranchResponse, 'Failed to create branch');\n\n return createBranchResponse.data;\n },\n async deleteBranch(branchId: string) {\n const response = await managementApiClient.DELETE(\n '/v1/branches/{branch_id}',\n {\n params: {\n path: {\n branch_id: branchId,\n },\n },\n }\n );\n\n assertSuccess(response, 'Failed to delete branch');\n },\n async mergeBranch(branchId: string) {\n const response = await managementApiClient.POST(\n '/v1/branches/{branch_id}/merge',\n {\n params: {\n path: {\n branch_id: branchId,\n },\n },\n body: {},\n }\n );\n\n assertSuccess(response, 'Failed to merge branch');\n },\n async resetBranch(branchId: string, options: ResetBranchOptions) {\n const { migration_version } = resetBranchOptionsSchema.parse(options);\n\n const response = await managementApiClient.POST(\n '/v1/branches/{branch_id}/reset',\n {\n params: {\n path: {\n branch_id: branchId,\n },\n },\n body: {\n migration_version,\n },\n }\n );\n\n assertSuccess(response, 'Failed to reset branch');\n },\n async rebaseBranch(branchId: string) {\n const response = await managementApiClient.POST(\n '/v1/branches/{branch_id}/push',\n {\n params: {\n path: {\n branch_id: branchId,\n },\n },\n body: {},\n }\n );\n\n assertSuccess(response, 'Failed to rebase branch');\n },\n };\n\n const storage: StorageOperations = {\n // Storage methods\n async listAllBuckets(project_id: string) {\n const response = await managementApiClient.GET(\n '/v1/projects/{ref}/storage/buckets',\n {\n params: {\n path: {\n ref: project_id,\n },\n },\n }\n );\n\n assertSuccess(response, 'Failed to list storage buckets');\n\n return response.data;\n },\n\n async getStorageConfig(project_id: string) {\n const response = await managementApiClient.GET(\n '/v1/projects/{ref}/config/storage',\n {\n params: {\n path: {\n ref: project_id,\n },\n },\n }\n );\n\n assertSuccess(response, 'Failed to get storage config');\n\n return response.data;\n },\n\n async updateStorageConfig(projectId: string, config: StorageConfig) {\n const response = await managementApiClient.PATCH(\n '/v1/projects/{ref}/config/storage',\n {\n params: {\n path: {\n ref: projectId,\n },\n },\n body: {\n fileSizeLimit: config.fileSizeLimit,\n features: {\n imageTransformation: {\n enabled: config.features.imageTransformation.enabled,\n },\n s3Protocol: {\n enabled: config.features.s3Protocol.enabled,\n },\n },\n },\n }\n );\n\n assertSuccess(response, 'Failed to update storage config');\n },\n };\n\n const platform: SupabasePlatform = {\n async init(info: InitData) {\n const { clientInfo } = info;\n if (!clientInfo) {\n throw new Error('Client info is required');\n }\n\n // Re-initialize the management API client with the user agent\n managementApiClient = createManagementApiClient(\n managementApiUrl,\n accessToken,\n {\n 'User-Agent': `supabase-mcp/${version} (${clientInfo.name}/${clientInfo.version})`,\n }\n );\n },\n account,\n database,\n debugging,\n development,\n functions,\n branching,\n storage,\n };\n\n return platform;\n}\n\nfunction getProjectDomain(apiHostname: string) {\n switch (apiHostname) {\n case 'api.supabase.com':\n return 'supabase.co';\n case 'api.supabase.green':\n return 'supabase.green';\n default:\n return 'supabase.red';\n }\n}\n","import { stripIndent } from 'common-tags';\nimport type { LogsService } from './platform/types.js';\n\nexport function getLogQuery(service: LogsService, limit: number = 100) {\n switch (service) {\n case 'api':\n return stripIndent`\n select id, identifier, timestamp, event_message, request.method, request.path, response.status_code\n from edge_logs\n cross join unnest(metadata) as m\n cross join unnest(m.request) as request\n cross join unnest(m.response) as response\n order by timestamp desc\n limit ${limit}\n `;\n case 'branch-action':\n return stripIndent`\n select workflow_run, workflow_run_logs.timestamp, id, event_message from workflow_run_logs\n order by timestamp desc\n limit ${limit}\n `;\n case 'postgres':\n return stripIndent`\n select identifier, postgres_logs.timestamp, id, event_message, parsed.error_severity from postgres_logs\n cross join unnest(metadata) as m\n cross join unnest(m.parsed) as parsed\n order by timestamp desc\n limit ${limit}\n `;\n case 'edge-function':\n return stripIndent`\n select id, function_edge_logs.timestamp, event_message, response.status_code, request.method, m.function_id, m.execution_time_ms, m.deployment_id, m.version from function_edge_logs\n cross join unnest(metadata) as m\n cross join unnest(m.response) as response\n cross join unnest(m.request) as request\n order by timestamp desc\n limit ${limit}\n `;\n case 'auth':\n return stripIndent`\n select id, auth_logs.timestamp, event_message, metadata.level, metadata.status, metadata.path, metadata.msg as msg, metadata.error from auth_logs\n cross join unnest(metadata) as metadata\n order by timestamp desc\n limit ${limit}\n `;\n case 'storage':\n return stripIndent`\n select id, storage_logs.timestamp, event_message from storage_logs\n order by timestamp desc\n limit ${limit}\n `;\n case 'realtime':\n return stripIndent`\n select id, realtime_logs.timestamp, event_message from realtime_logs\n order by timestamp desc\n limit ${limit}\n `;\n default:\n throw new Error(`unsupported log service type: ${service}`);\n }\n}\n"]}
@@ -1,4 +1,4 @@
1
- import{a as R,j as C,k as B,l as x,m as M,n as q,o as G,r as L,u as P,v as b}from"./chunk-52DDTMJB.js";import{getMultipartBoundary as J,parseMultipartStream as V}from"@mjackson/multipart-parser";import{fileURLToPath as T}from"url";import{stripIndent as f}from"common-tags";function D(p,i=100){switch(p){case"api":return f`
1
+ import{a as R,j as C,k as B,l as x,m as M,n as q,o as G,r as L,u as P,v as b}from"./chunk-RYIG4VFH.js";import{getMultipartBoundary as J,parseMultipartStream as V}from"@mjackson/multipart-parser";import{fileURLToPath as T}from"url";import{stripIndent as f}from"common-tags";function D(p,i=100){switch(p){case"api":return f`
2
2
  select id, identifier, timestamp, event_message, request.method, request.path, response.status_code
3
3
  from edge_logs
4
4
  cross join unnest(metadata) as m
@@ -37,4 +37,4 @@ import{a as R,j as C,k as B,l as x,m as M,n as q,o as G,r as L,u as P,v as b}fro
37
37
  order by timestamp desc
38
38
  limit ${i}
39
39
  `;default:throw new Error(`unsupported log service type: ${p}`)}}import K from"openapi-fetch";import{z as U}from"zod/v4";function F(p,i,d={}){return K({baseUrl:p,headers:{Authorization:`Bearer ${i}`,...d}})}var N=U.object({message:U.string()});function n(p,i){if("error"in p){if(p.response.status===401)throw new Error("Unauthorized. Please provide a valid access token to the MCP server via the --access-token flag or SUPABASE_ACCESS_TOKEN.");let{data:d}=N.safeParse(p.error);throw d?new Error(d.message):new Error(i)}}var H="ABCDEFGHIJKLMNOPQRSTUVWXYZ",I="abcdefghijklmnopqrstuvwxyz",W="0123456789",Q="!@#$%^&*()_+~`|}{[]:;?><,./-=",$=({length:p=10,numbers:i=!1,symbols:d=!1,uppercase:_=!0,lowercase:a=!0}={})=>{let m="";if(_&&(m+=H),a&&(m+=I),i&&(m+=W),d&&(m+=Q),m.length===0)throw new Error("at least one character set must be selected");let v=new Uint32Array(p);crypto.getRandomValues(v);let O="";for(let w=0;w<p;w++){let j=v[w]%m.length;O+=m.charAt(j)}return O};var{version:Y}=R;function ye(p){let{accessToken:i,apiUrl:d}=p,_=d??"https://api.supabase.com",a=F(_,i),m={async listOrganizations(){let t=await a.GET("/v1/organizations");return n(t,"Failed to fetch organizations"),t.data},async getOrganization(t){let e=await a.GET("/v1/organizations/{slug}",{params:{path:{slug:t}}});return n(e,"Failed to fetch organization"),e.data},async listProjects(){let t=await a.GET("/v1/projects");return n(t,"Failed to fetch projects"),t.data},async getProject(t){let e=await a.GET("/v1/projects/{ref}",{params:{path:{ref:t}}});return n(e,"Failed to fetch project"),e.data},async createProject(t){let{name:e,organization_id:s,region:o,db_pass:r}=C.parse(t),c=await a.POST("/v1/projects",{body:{name:e,region:o,organization_id:s,db_pass:r??$({length:16,numbers:!0,uppercase:!0,lowercase:!0})}});return n(c,"Failed to create project"),c.data},async pauseProject(t){let e=await a.POST("/v1/projects/{ref}/pause",{params:{path:{ref:t}}});n(e,"Failed to pause project")},async restoreProject(t){let e=await a.POST("/v1/projects/{ref}/restore",{params:{path:{ref:t}}});n(e,"Failed to restore project")}},v={async executeSql(t,e){let{query:s,parameters:o,read_only:r}=q.parse(e),c=await a.POST("/v1/projects/{ref}/database/query",{params:{path:{ref:t}},body:{query:s,parameters:o,read_only:r}});return n(c,"Failed to execute SQL query"),c.data},async listMigrations(t){let e=await a.GET("/v1/projects/{ref}/database/migrations",{params:{path:{ref:t}}});return n(e,"Failed to fetch migrations"),e.data},async applyMigration(t,e){let{name:s,query:o}=G.parse(e),r=await a.POST("/v1/projects/{ref}/database/migrations",{params:{path:{ref:t}},body:{name:s,query:o}});n(r,"Failed to apply migration")}},O={async getLogs(t,e){let{service:s,iso_timestamp_start:o,iso_timestamp_end:r}=L.parse(e),c=D(s),u=await a.GET("/v1/projects/{ref}/analytics/endpoints/logs.all",{params:{path:{ref:t},query:{sql:c,iso_timestamp_start:o,iso_timestamp_end:r}}});return n(u,"Failed to fetch logs"),u.data},async getSecurityAdvisors(t){let e=await a.GET("/v1/projects/{ref}/advisors/security",{params:{path:{ref:t}}});return n(e,"Failed to fetch security advisors"),e.data},async getPerformanceAdvisors(t){let e=await a.GET("/v1/projects/{ref}/advisors/performance",{params:{path:{ref:t}}});return n(e,"Failed to fetch performance advisors"),e.data}},w={async getProjectUrl(t){let e=new URL(_);return`https://${t}.${X(e.hostname)}`},async getPublishableKeys(t){let e=await a.GET("/v1/projects/{ref}/api-keys",{params:{path:{ref:t},query:{reveal:!1}}});n(e,"Failed to fetch API keys");let s;try{let r=await a.GET("/v1/projects/{ref}/api-keys/legacy",{params:{path:{ref:t}}});r.response.ok&&(s=r.data?.enabled??!0)}catch{s=void 0}let o=e.data?.filter(r=>r.name==="anon"||r.type==="publishable")??[];if(o.length===0)throw new Error("No client-safe API keys (anon or publishable) found. Please create a publishable key in your project settings.");return o.map(r=>({api_key:r.api_key,name:r.name,type:r.type==="publishable"?"publishable":"legacy",...s!==void 0&&{disabled:r.type==="legacy"&&!s},description:r.description??void 0,id:r.id??void 0}))},async generateTypescriptTypes(t){let e=await a.GET("/v1/projects/{ref}/types/typescript",{params:{path:{ref:t}}});return n(e,"Failed to fetch TypeScript types"),e.data}},j={async listEdgeFunctions(t){let e=await a.GET("/v1/projects/{ref}/functions",{params:{path:{ref:t}}});return n(e,"Failed to fetch Edge Functions"),e.data.map(s=>{let o=P(t,s.id,s.version),r=s.entrypoint_path?b({deploymentId:o,filename:T(s.entrypoint_path,{windows:!1})}):void 0,c=s.import_map_path?b({deploymentId:o,filename:T(s.import_map_path,{windows:!1})}):void 0;return{...s,entrypoint_path:r,import_map_path:c}})},async getEdgeFunction(t,e){let s=await a.GET("/v1/projects/{ref}/functions/{function_slug}",{params:{path:{ref:t,function_slug:e}}});if(s.error)throw s.error;n(s,"Failed to fetch Edge Function");let o=s.data,r=P(t,o.id,o.version),c=o.entrypoint_path?b({deploymentId:r,filename:T(o.entrypoint_path,{windows:!1})}):void 0,u=o.import_map_path?b({deploymentId:r,filename:T(o.import_map_path,{windows:!1})}):void 0,g=await a.GET("/v1/projects/{ref}/functions/{function_slug}/body",{params:{path:{ref:t,function_slug:e}},headers:{Accept:"multipart/form-data"},parseAs:"stream"});n(g,"Failed to fetch Edge Function files");let y=g.response.headers.get("content-type");if(!y||!y.startsWith("multipart/form-data"))throw new Error(`Unexpected content type: ${y}. Expected multipart/form-data.`);let S=J(y);if(!S)throw new Error("No multipart boundary found in response headers");if(!g.data)throw new Error("No data received from Edge Function body");let l=[],E=V(g.data,{boundary:S});for await(let h of E)h.isFile&&h.filename&&l.push({name:b({deploymentId:r,filename:h.filename}),content:h.text});return{...o,entrypoint_path:c,import_map_path:u,files:l}},async deployEdgeFunction(t,e){let{name:s,entrypoint_path:o,import_map_path:r,verify_jwt:c,files:u}=M.parse(e),g;try{g=await j.getEdgeFunction(t,s)}catch{}let y=u.find(l=>["deno.json","import_map.json"].includes(l.name));r??=g?.import_map_path??y?.name;let S=await a.POST("/v1/projects/{ref}/functions/deploy",{params:{path:{ref:t},query:{slug:s}},body:{metadata:{name:s,entrypoint_path:o,import_map_path:r,verify_jwt:c},file:u},bodySerializer(l){let E=new FormData,h=new Blob([JSON.stringify(l.metadata)],{type:"application/json"});return E.append("metadata",h),l.file?.forEach(k=>{let A=k,z=new Blob([A.content],{type:"application/typescript"});E.append("file",z,A.name)}),E}});return n(S,"Failed to deploy Edge Function"),S.data}};return{async init(t){let{clientInfo:e}=t;if(!e)throw new Error("Client info is required");a=F(_,i,{"User-Agent":`supabase-mcp/${Y} (${e.name}/${e.version})`})},account:m,database:v,debugging:O,development:w,functions:j,branching:{async listBranches(t){let e=await a.GET("/v1/projects/{ref}/branches",{params:{path:{ref:t}}});return e.response.status===422?[]:(n(e,"Failed to list branches"),e.data)},async createBranch(t,e){let{name:s}=B.parse(e),o=await a.POST("/v1/projects/{ref}/branches",{params:{path:{ref:t}},body:{branch_name:s}});return n(o,"Failed to create branch"),o.data},async deleteBranch(t){let e=await a.DELETE("/v1/branches/{branch_id}",{params:{path:{branch_id:t}}});n(e,"Failed to delete branch")},async mergeBranch(t){let e=await a.POST("/v1/branches/{branch_id}/merge",{params:{path:{branch_id:t}},body:{}});n(e,"Failed to merge branch")},async resetBranch(t,e){let{migration_version:s}=x.parse(e),o=await a.POST("/v1/branches/{branch_id}/reset",{params:{path:{branch_id:t}},body:{migration_version:s}});n(o,"Failed to reset branch")},async rebaseBranch(t){let e=await a.POST("/v1/branches/{branch_id}/push",{params:{path:{branch_id:t}},body:{}});n(e,"Failed to rebase branch")}},storage:{async listAllBuckets(t){let e=await a.GET("/v1/projects/{ref}/storage/buckets",{params:{path:{ref:t}}});return n(e,"Failed to list storage buckets"),e.data},async getStorageConfig(t){let e=await a.GET("/v1/projects/{ref}/config/storage",{params:{path:{ref:t}}});return n(e,"Failed to get storage config"),e.data},async updateStorageConfig(t,e){let s=await a.PATCH("/v1/projects/{ref}/config/storage",{params:{path:{ref:t}},body:{fileSizeLimit:e.fileSizeLimit,features:{imageTransformation:{enabled:e.features.imageTransformation.enabled},s3Protocol:{enabled:e.features.s3Protocol.enabled}}}});n(s,"Failed to update storage config")}}}}function X(p){switch(p){case"api.supabase.com":return"supabase.co";case"api.supabase.green":return"supabase.green";default:return"supabase.red"}}export{ye as a};
40
- //# sourceMappingURL=chunk-H7YDYMTU.js.map
40
+ //# sourceMappingURL=chunk-MI5YH4SU.js.map
@@ -1,4 +1,4 @@
1
- var re={name:"@supabase/mcp-server-supabase",mcpName:"com.supabase/mcp",version:"0.6.1",description:"MCP server for interacting with Supabase",license:"Apache-2.0",repository:{type:"git",url:"https://github.com/supabase-community/supabase-mcp.git"},type:"module",main:"dist/index.cjs",types:"dist/index.d.ts",sideEffects:!1,scripts:{build:"tsup --clean",dev:"tsup --watch",typecheck:"tsc --noEmit",prebuild:"pnpm typecheck",prepublishOnly:"pnpm build","registry:update":"tsx scripts/registry/update-version.ts && biome format --write server.json","registry:login":"scripts/registry/login.sh","registry:publish":"mcp-publisher publish",test:"vitest","test:unit":"vitest --project unit","test:e2e":"vitest --project e2e","test:integration":"vitest --project integration","test:coverage":"vitest --coverage","generate:management-api-types":"openapi-typescript https://api.supabase.com/api/v1-json -o ./src/management-api/types.ts"},files:["dist/**/*"],bin:{"mcp-server-supabase":"./dist/transports/stdio.js"},exports:{".":{types:"./dist/index.d.ts",import:"./dist/index.js",default:"./dist/index.cjs"},"./platform":{types:"./dist/platform/index.d.ts",import:"./dist/platform/index.js",default:"./dist/platform/index.cjs"},"./platform/api":{types:"./dist/platform/api-platform.d.ts",import:"./dist/platform/api-platform.js",default:"./dist/platform/api-platform.cjs"}},dependencies:{"@mjackson/multipart-parser":"^0.10.1","@modelcontextprotocol/sdk":"catalog:","@supabase/mcp-utils":"workspace:^","common-tags":"^1.8.2",gqlmin:"^0.3.1",graphql:"^16.11.0","openapi-fetch":"^0.13.5"},peerDependencies:{zod:"catalog:"},devDependencies:{"@ai-sdk/anthropic":"catalog:","@ai-sdk/mcp":"catalog:","@electric-sql/pglite":"^0.2.17","@total-typescript/tsconfig":"^1.0.4","@types/common-tags":"^1.8.4","@types/node":"^22.8.6","@vitest/coverage-v8":"^2.1.9",ai:"catalog:","date-fns":"^4.1.0",dotenv:"^16.5.0",msw:"^2.7.3",nanoid:"^5.1.5","openapi-typescript":"^7.5.0","openapi-typescript-helpers":"^0.0.15",prettier:"^3.3.3",tsup:"^8.3.5",tsx:"^4.19.2",typescript:"^5.6.3",vite:"^5.4.19",vitest:"^2.1.9",zod:"catalog:"}};import{z as B}from"zod/v4";var ze=["docs","account","database","debugging","development","functions","branching","storage"],qe=B.enum(["debug"]),$=B.enum(ze),ae=B.union([qe,$]).transform(t=>{switch(t){case"debug":return"debugging";default:return t}});import{z as e}from"zod/v4";import{z as K}from"zod/v4";import{createMcpServer as mt}from"@supabase/mcp-utils";import Ke from"gqlmin";import{z as ie}from"zod/v4";import{buildSchema as We,GraphQLError as Ge,parse as Be,validate as $e}from"graphql";import{z as m}from"zod/v4";var Ot=m.object({query:m.string(),variables:m.record(m.string(),m.unknown()).optional()}),Me=m.object({data:m.record(m.string(),m.unknown()),errors:m.undefined()}),Qe=m.object({message:m.string(),locations:m.array(m.object({line:m.number(),column:m.number()}))}),Je=m.object({data:m.undefined(),errors:m.array(Qe)}),Ye=m.union([Me,Je]),R=class{#t;#e;schemaLoaded;constructor(i){this.#t=i.url,this.#e=i.headers??{},this.schemaLoaded=i.loadSchema?.({query:this.#n.bind(this)}).then(n=>({source:n,schema:We(n)}))??Promise.reject(new Error("No schema loader provided")),this.schemaLoaded.catch(()=>{})}async query(i,n={validateSchema:!1}){try{let r=Be(i.query);if(n.validateSchema){let{schema:a}=await this.schemaLoaded,s=$e(a,r);if(s.length>0)throw new Error(`Invalid GraphQL query: ${s.map(c=>c.message).join(", ")}`)}return this.#n(i)}catch(r){throw r instanceof Ge?new Error(`Invalid GraphQL query: ${r.message}`):r}}setUserAgent(i){this.#e["User-Agent"]=i}async#n(i){let{query:n,variables:r}=i,a=new URL(this.#t);a.searchParams.set("query",n),r!==void 0&&a.searchParams.set("variables",JSON.stringify(r));let s=await fetch(a,{method:"GET",headers:{...this.#e,Accept:"application/json"}});if(!s.ok)throw new Error(`Failed to fetch Supabase Content API GraphQL schema: HTTP status ${s.status}`);let c=await s.json(),{data:d,error:p}=Ye.safeParse(c);if(p)throw new Error(`Failed to parse Supabase Content API response: ${p.message}`);if(d.errors)throw new Error(`Supabase Content API GraphQL error: ${d.errors.map(g=>`${g.message} (line ${g.locations[0]?.line??"unknown"}, column ${g.locations[0]?.column??"unknown"})`).join(", ")}`);return d.data}};var Ve=ie.object({schema:ie.string()});async function se(t,i){let n=new R({url:t,headers:i});return{loadSchema:async()=>{let r=await n.query({query:"{ schema }"}),{schema:a}=Ve.parse(r);return Ke(a)},async query(r){return n.query(r)},setUserAgent(r){n.setUserAgent(r)}}}import{tool as j}from"@supabase/mcp-utils";import{z as u}from"zod/v4";async function M(t,i){let n=await t.getOrganization(i),a=(await t.listProjects()).filter(c=>c.organization_id===i&&!["INACTIVE","GOING_DOWN","REMOVED"].includes(c.status)),s=0;return n.plan!=="free"&&a.length>0&&(s=10),{type:"project",recurrence:"monthly",amount:s}}function L(){return{type:"branch",recurrence:"hourly",amount:.01344}}var ce={success:!0};function pe({account:t,readOnly:i}){return{list_organizations:j({description:"Lists all organizations that the user is a member of.",annotations:{title:"List organizations",readOnlyHint:!0,destructiveHint:!1,idempotentHint:!0,openWorldHint:!1},parameters:u.object({}),execute:async()=>await t.listOrganizations()}),get_organization:j({description:"Gets details for an organization. Includes subscription plan.",annotations:{title:"Get organization details",readOnlyHint:!0,destructiveHint:!1,idempotentHint:!0,openWorldHint:!1},parameters:u.object({id:u.string().describe("The organization ID")}),execute:async({id:n})=>await t.getOrganization(n)}),list_projects:j({description:"Lists all Supabase projects for the user. Use this to help discover the project ID of the project that the user is working on.",annotations:{title:"List projects",readOnlyHint:!0,destructiveHint:!1,idempotentHint:!0,openWorldHint:!1},parameters:u.object({}),execute:async()=>await t.listProjects()}),get_project:j({description:"Gets details for a Supabase project.",annotations:{title:"Get project details",readOnlyHint:!0,destructiveHint:!1,idempotentHint:!0,openWorldHint:!1},parameters:u.object({id:u.string().describe("The project ID")}),execute:async({id:n})=>await t.getProject(n)}),get_cost:j({description:"Gets the cost of creating a new project or branch. Never assume organization as costs can be different for each.",annotations:{title:"Get cost of new resources",readOnlyHint:!0,destructiveHint:!1,idempotentHint:!0,openWorldHint:!1},parameters:u.object({type:u.enum(["project","branch"]),organization_id:u.string().describe("The organization ID. Always ask the user.")}),execute:async({type:n,organization_id:r})=>{function a(s){return`The new ${n} will cost $${s.amount} ${s.recurrence}. You must repeat this to the user and confirm their understanding.`}switch(n){case"project":{let s=await M(t,r);return a(s)}case"branch":{let s=L();return a(s)}default:throw new Error(`Unknown cost type: ${n}`)}}}),confirm_cost:j({description:"Ask the user to confirm their understanding of the cost of creating a new project or branch. Call `get_cost` first. Returns a unique ID for this confirmation which should be passed to `create_project` or `create_branch`.",annotations:{title:"Confirm cost understanding",readOnlyHint:!0,destructiveHint:!1,idempotentHint:!0,openWorldHint:!1},parameters:u.object({type:u.enum(["project","branch"]),recurrence:u.enum(["hourly","monthly"]),amount:u.number()}),execute:async n=>await A(n)}),create_project:j({description:"Creates a new Supabase project. Always ask the user which organization to create the project in. The project can take a few minutes to initialize - use `get_project` to check the status.",annotations:{title:"Create project",readOnlyHint:!1,destructiveHint:!1,idempotentHint:!1,openWorldHint:!1},parameters:u.object({name:u.string().describe("The name of the project"),region:u.enum(P).describe("The region to create the project in."),organization_id:u.string(),confirm_cost_id:u.string({error:n=>n.input===void 0?"User must confirm understanding of costs before creating a project.":void 0}).describe("The cost confirmation ID. Call `confirm_cost` first.")}),execute:async({name:n,region:r,organization_id:a,confirm_cost_id:s})=>{if(i)throw new Error("Cannot create a project in read-only mode.");let c=await M(t,a);if(await A(c)!==s)throw new Error("Cost confirmation ID does not match the expected cost of creating a project.");return await t.createProject({name:n,region:r,organization_id:a})}}),pause_project:j({description:"Pauses a Supabase project.",annotations:{title:"Pause project",readOnlyHint:!1,destructiveHint:!1,idempotentHint:!1,openWorldHint:!1},parameters:u.object({project_id:u.string()}),execute:async({project_id:n})=>{if(i)throw new Error("Cannot pause a project in read-only mode.");return await t.pauseProject(n),ce}}),restore_project:j({description:"Restores a Supabase project.",annotations:{title:"Restore project",readOnlyHint:!1,destructiveHint:!1,idempotentHint:!1,openWorldHint:!1},parameters:u.object({project_id:u.string()}),execute:async({project_id:n})=>{if(i)throw new Error("Cannot restore a project in read-only mode.");return await t.restoreProject(n),ce}})}}import{tool as D}from"@supabase/mcp-utils";import{z as f}from"zod/v4";import{tool as le}from"@supabase/mcp-utils";import"zod/v4";function l({description:t,annotations:i,parameters:n,inject:r,execute:a}){if(!r||Object.values(r).every(p=>p===void 0))return le({description:t,annotations:i,parameters:n,execute:a});let s=Object.fromEntries(Object.keys(r).filter(p=>r[p]!==void 0).map(p=>[p,!0])),c=n.omit(s);return le({description:t,annotations:i,parameters:c,execute:async p=>a({...p,...r})})}var k={success:!0};function de({branching:t,projectId:i,readOnly:n}){let r=i;return{create_branch:l({description:"Creates a development branch on a Supabase project. This will apply all migrations from the main project to a fresh branch database. Note that production data will not carry over. The branch will get its own project_id via the resulting project_ref. Use this ID to execute queries and migrations on the branch.",annotations:{title:"Create branch",readOnlyHint:!1,destructiveHint:!1,idempotentHint:!1,openWorldHint:!1},parameters:f.object({project_id:f.string(),name:f.string().default("develop").describe("Name of the branch to create"),confirm_cost_id:f.string({error:a=>a.input===void 0?"User must confirm understanding of costs before creating a branch.":void 0}).describe("The cost confirmation ID. Call `confirm_cost` first.")}),inject:{project_id:r},execute:async({project_id:a,name:s,confirm_cost_id:c})=>{if(n)throw new Error("Cannot create a branch in read-only mode.");let d=L();if(await A(d)!==c)throw new Error("Cost confirmation ID does not match the expected cost of creating a branch.");return await t.createBranch(a,{name:s})}}),list_branches:l({description:"Lists all development branches of a Supabase project. This will return branch details including status which you can use to check when operations like merge/rebase/reset complete.",annotations:{title:"List branches",readOnlyHint:!0,destructiveHint:!1,idempotentHint:!0,openWorldHint:!1},parameters:f.object({project_id:f.string()}),inject:{project_id:r},execute:async({project_id:a})=>await t.listBranches(a)}),delete_branch:D({description:"Deletes a development branch.",annotations:{title:"Delete branch",readOnlyHint:!1,destructiveHint:!0,idempotentHint:!1,openWorldHint:!1},parameters:f.object({branch_id:f.string()}),execute:async({branch_id:a})=>{if(n)throw new Error("Cannot delete a branch in read-only mode.");return await t.deleteBranch(a),k}}),merge_branch:D({description:"Merges migrations and edge functions from a development branch to production.",annotations:{title:"Merge branch",readOnlyHint:!1,destructiveHint:!0,idempotentHint:!1,openWorldHint:!1},parameters:f.object({branch_id:f.string()}),execute:async({branch_id:a})=>{if(n)throw new Error("Cannot merge a branch in read-only mode.");return await t.mergeBranch(a),k}}),reset_branch:D({description:"Resets migrations of a development branch. Any untracked data or schema changes will be lost.",annotations:{title:"Reset branch",readOnlyHint:!1,destructiveHint:!0,idempotentHint:!1,openWorldHint:!1},parameters:f.object({branch_id:f.string(),migration_version:f.string().optional().describe("Reset your development branch to a specific migration version.")}),execute:async({branch_id:a,migration_version:s})=>{if(n)throw new Error("Cannot reset a branch in read-only mode.");return await t.resetBranch(a,{migration_version:s}),k}}),rebase_branch:D({description:"Rebases a development branch on production. This will effectively run any newer migrations from production onto this branch to help handle migration drift.",annotations:{title:"Rebase branch",readOnlyHint:!1,destructiveHint:!0,idempotentHint:!1,openWorldHint:!1},parameters:f.object({branch_id:f.string()}),execute:async({branch_id:a})=>{if(n)throw new Error("Cannot rebase a branch in read-only mode.");return await t.rebaseBranch(a),k}})}}import{source as at}from"common-tags";import{z as h}from"zod/v4";import{stripIndent as he}from"common-tags";var ue=`-- Adapted from information_schema.columns
1
+ var re={name:"@supabase/mcp-server-supabase",mcpName:"com.supabase/mcp",version:"0.6.2",description:"MCP server for interacting with Supabase",license:"Apache-2.0",repository:{type:"git",url:"https://github.com/supabase-community/supabase-mcp.git"},type:"module",main:"dist/index.cjs",types:"dist/index.d.ts",sideEffects:!1,scripts:{build:"tsup --clean",dev:"tsup --watch",typecheck:"tsc --noEmit",prebuild:"pnpm typecheck",prepublishOnly:"pnpm build","registry:update":"tsx scripts/registry/update-version.ts && biome format --write server.json","registry:login":"scripts/registry/login.sh","registry:publish":"mcp-publisher publish",test:"vitest","test:unit":"vitest --project unit","test:e2e":"vitest --project e2e","test:integration":"vitest --project integration","test:coverage":"vitest --coverage","generate:management-api-types":"openapi-typescript https://api.supabase.com/api/v1-json -o ./src/management-api/types.ts"},files:["dist/**/*"],bin:{"mcp-server-supabase":"./dist/transports/stdio.js"},exports:{".":{types:"./dist/index.d.ts",import:"./dist/index.js",default:"./dist/index.cjs"},"./platform":{types:"./dist/platform/index.d.ts",import:"./dist/platform/index.js",default:"./dist/platform/index.cjs"},"./platform/api":{types:"./dist/platform/api-platform.d.ts",import:"./dist/platform/api-platform.js",default:"./dist/platform/api-platform.cjs"}},dependencies:{"@mjackson/multipart-parser":"^0.10.1","@supabase/mcp-utils":"workspace:^","common-tags":"^1.8.2",gqlmin:"^0.3.1",graphql:"^16.11.0","openapi-fetch":"^0.13.5"},peerDependencies:{"@modelcontextprotocol/sdk":"catalog:",zod:"catalog:"},devDependencies:{"@ai-sdk/anthropic":"catalog:","@ai-sdk/mcp":"catalog:","@electric-sql/pglite":"^0.2.17","@modelcontextprotocol/sdk":"catalog:","@total-typescript/tsconfig":"^1.0.4","@types/common-tags":"^1.8.4","@types/node":"^22.8.6","@vitest/coverage-v8":"^2.1.9",ai:"catalog:","date-fns":"^4.1.0",dotenv:"^16.5.0",msw:"^2.7.3",nanoid:"^5.1.5","openapi-typescript":"^7.5.0","openapi-typescript-helpers":"^0.0.15",prettier:"^3.3.3",tsup:"^8.3.5",tsx:"^4.19.2",typescript:"^5.6.3",vite:"^5.4.19",vitest:"^2.1.9",zod:"catalog:"}};import{z as B}from"zod/v4";var ze=["docs","account","database","debugging","development","functions","branching","storage"],qe=B.enum(["debug"]),$=B.enum(ze),ae=B.union([qe,$]).transform(t=>{switch(t){case"debug":return"debugging";default:return t}});import{z as e}from"zod/v4";import{z as K}from"zod/v4";import{createMcpServer as ut}from"@supabase/mcp-utils";import Ke from"gqlmin";import{z as ie}from"zod/v4";import{buildSchema as We,GraphQLError as Ge,parse as Be,validate as $e}from"graphql";import{z as u}from"zod/v4";var Ot=u.object({query:u.string(),variables:u.record(u.string(),u.unknown()).optional()}),Me=u.object({data:u.record(u.string(),u.unknown()),errors:u.undefined()}),Qe=u.object({message:u.string(),locations:u.array(u.object({line:u.number(),column:u.number()}))}),Je=u.object({data:u.undefined(),errors:u.array(Qe)}),Ye=u.union([Me,Je]),R=class{#t;#e;schemaLoaded;constructor(i){this.#t=i.url,this.#e=i.headers??{},this.schemaLoaded=i.loadSchema?.({query:this.#n.bind(this)}).then(n=>({source:n,schema:We(n)}))??Promise.reject(new Error("No schema loader provided")),this.schemaLoaded.catch(()=>{})}async query(i,n={validateSchema:!1}){try{let r=Be(i.query);if(n.validateSchema){let{schema:a}=await this.schemaLoaded,s=$e(a,r);if(s.length>0)throw new Error(`Invalid GraphQL query: ${s.map(c=>c.message).join(", ")}`)}return this.#n(i)}catch(r){throw r instanceof Ge?new Error(`Invalid GraphQL query: ${r.message}`):r}}setUserAgent(i){this.#e["User-Agent"]=i}async#n(i){let{query:n,variables:r}=i,a=new URL(this.#t);a.searchParams.set("query",n),r!==void 0&&a.searchParams.set("variables",JSON.stringify(r));let s=await fetch(a,{method:"GET",headers:{...this.#e,Accept:"application/json"}});if(!s.ok)throw new Error(`Failed to fetch Supabase Content API GraphQL schema: HTTP status ${s.status}`);let c=await s.json(),{data:d,error:p}=Ye.safeParse(c);if(p)throw new Error(`Failed to parse Supabase Content API response: ${p.message}`);if(d.errors)throw new Error(`Supabase Content API GraphQL error: ${d.errors.map(g=>`${g.message} (line ${g.locations[0]?.line??"unknown"}, column ${g.locations[0]?.column??"unknown"})`).join(", ")}`);return d.data}};var Ve=ie.object({schema:ie.string()});async function se(t,i){let n=new R({url:t,headers:i});return{loadSchema:async()=>{let r=await n.query({query:"{ schema }"}),{schema:a}=Ve.parse(r);return Ke(a)},async query(r){return n.query(r)},setUserAgent(r){n.setUserAgent(r)}}}import{tool as j}from"@supabase/mcp-utils";import{z as m}from"zod/v4";async function M(t,i){let n=await t.getOrganization(i),a=(await t.listProjects()).filter(c=>c.organization_id===i&&!["INACTIVE","GOING_DOWN","REMOVED"].includes(c.status)),s=0;return n.plan!=="free"&&a.length>0&&(s=10),{type:"project",recurrence:"monthly",amount:s}}function L(){return{type:"branch",recurrence:"hourly",amount:.01344}}var ce={success:!0};function pe({account:t,readOnly:i}){return{list_organizations:j({description:"Lists all organizations that the user is a member of.",annotations:{title:"List organizations",readOnlyHint:!0,destructiveHint:!1,idempotentHint:!0,openWorldHint:!1},parameters:m.object({}),execute:async()=>await t.listOrganizations()}),get_organization:j({description:"Gets details for an organization. Includes subscription plan.",annotations:{title:"Get organization details",readOnlyHint:!0,destructiveHint:!1,idempotentHint:!0,openWorldHint:!1},parameters:m.object({id:m.string().describe("The organization ID")}),execute:async({id:n})=>await t.getOrganization(n)}),list_projects:j({description:"Lists all Supabase projects for the user. Use this to help discover the project ID of the project that the user is working on.",annotations:{title:"List projects",readOnlyHint:!0,destructiveHint:!1,idempotentHint:!0,openWorldHint:!1},parameters:m.object({}),execute:async()=>await t.listProjects()}),get_project:j({description:"Gets details for a Supabase project.",annotations:{title:"Get project details",readOnlyHint:!0,destructiveHint:!1,idempotentHint:!0,openWorldHint:!1},parameters:m.object({id:m.string().describe("The project ID")}),execute:async({id:n})=>await t.getProject(n)}),get_cost:j({description:"Gets the cost of creating a new project or branch. Never assume organization as costs can be different for each.",annotations:{title:"Get cost of new resources",readOnlyHint:!0,destructiveHint:!1,idempotentHint:!0,openWorldHint:!1},parameters:m.object({type:m.enum(["project","branch"]),organization_id:m.string().describe("The organization ID. Always ask the user.")}),execute:async({type:n,organization_id:r})=>{function a(s){return`The new ${n} will cost $${s.amount} ${s.recurrence}. You must repeat this to the user and confirm their understanding.`}switch(n){case"project":{let s=await M(t,r);return a(s)}case"branch":{let s=L();return a(s)}default:throw new Error(`Unknown cost type: ${n}`)}}}),confirm_cost:j({description:"Ask the user to confirm their understanding of the cost of creating a new project or branch. Call `get_cost` first. Returns a unique ID for this confirmation which should be passed to `create_project` or `create_branch`.",annotations:{title:"Confirm cost understanding",readOnlyHint:!0,destructiveHint:!1,idempotentHint:!0,openWorldHint:!1},parameters:m.object({type:m.enum(["project","branch"]),recurrence:m.enum(["hourly","monthly"]),amount:m.number()}),execute:async n=>await A(n)}),create_project:j({description:"Creates a new Supabase project. Always ask the user which organization to create the project in. The project can take a few minutes to initialize - use `get_project` to check the status.",annotations:{title:"Create project",readOnlyHint:!1,destructiveHint:!1,idempotentHint:!1,openWorldHint:!1},parameters:m.object({name:m.string().describe("The name of the project"),region:m.enum(P).describe("The region to create the project in."),organization_id:m.string(),confirm_cost_id:m.string({error:n=>n.input===void 0?"User must confirm understanding of costs before creating a project.":void 0}).describe("The cost confirmation ID. Call `confirm_cost` first.")}),execute:async({name:n,region:r,organization_id:a,confirm_cost_id:s})=>{if(i)throw new Error("Cannot create a project in read-only mode.");let c=await M(t,a);if(await A(c)!==s)throw new Error("Cost confirmation ID does not match the expected cost of creating a project.");return await t.createProject({name:n,region:r,organization_id:a})}}),pause_project:j({description:"Pauses a Supabase project.",annotations:{title:"Pause project",readOnlyHint:!1,destructiveHint:!1,idempotentHint:!1,openWorldHint:!1},parameters:m.object({project_id:m.string()}),execute:async({project_id:n})=>{if(i)throw new Error("Cannot pause a project in read-only mode.");return await t.pauseProject(n),ce}}),restore_project:j({description:"Restores a Supabase project.",annotations:{title:"Restore project",readOnlyHint:!1,destructiveHint:!1,idempotentHint:!1,openWorldHint:!1},parameters:m.object({project_id:m.string()}),execute:async({project_id:n})=>{if(i)throw new Error("Cannot restore a project in read-only mode.");return await t.restoreProject(n),ce}})}}import{tool as D}from"@supabase/mcp-utils";import{z as f}from"zod/v4";import{tool as le}from"@supabase/mcp-utils";import"zod/v4";function l({description:t,annotations:i,parameters:n,inject:r,execute:a}){if(!r||Object.values(r).every(p=>p===void 0))return le({description:t,annotations:i,parameters:n,execute:a});let s=Object.fromEntries(Object.keys(r).filter(p=>r[p]!==void 0).map(p=>[p,!0])),c=n.omit(s);return le({description:t,annotations:i,parameters:c,execute:async p=>a({...p,...r})})}var k={success:!0};function de({branching:t,projectId:i,readOnly:n}){let r=i;return{create_branch:l({description:"Creates a development branch on a Supabase project. This will apply all migrations from the main project to a fresh branch database. Note that production data will not carry over. The branch will get its own project_id via the resulting project_ref. Use this ID to execute queries and migrations on the branch.",annotations:{title:"Create branch",readOnlyHint:!1,destructiveHint:!1,idempotentHint:!1,openWorldHint:!1},parameters:f.object({project_id:f.string(),name:f.string().default("develop").describe("Name of the branch to create"),confirm_cost_id:f.string({error:a=>a.input===void 0?"User must confirm understanding of costs before creating a branch.":void 0}).describe("The cost confirmation ID. Call `confirm_cost` first.")}),inject:{project_id:r},execute:async({project_id:a,name:s,confirm_cost_id:c})=>{if(n)throw new Error("Cannot create a branch in read-only mode.");let d=L();if(await A(d)!==c)throw new Error("Cost confirmation ID does not match the expected cost of creating a branch.");return await t.createBranch(a,{name:s})}}),list_branches:l({description:"Lists all development branches of a Supabase project. This will return branch details including status which you can use to check when operations like merge/rebase/reset complete.",annotations:{title:"List branches",readOnlyHint:!0,destructiveHint:!1,idempotentHint:!0,openWorldHint:!1},parameters:f.object({project_id:f.string()}),inject:{project_id:r},execute:async({project_id:a})=>await t.listBranches(a)}),delete_branch:D({description:"Deletes a development branch.",annotations:{title:"Delete branch",readOnlyHint:!1,destructiveHint:!0,idempotentHint:!1,openWorldHint:!1},parameters:f.object({branch_id:f.string()}),execute:async({branch_id:a})=>{if(n)throw new Error("Cannot delete a branch in read-only mode.");return await t.deleteBranch(a),k}}),merge_branch:D({description:"Merges migrations and edge functions from a development branch to production.",annotations:{title:"Merge branch",readOnlyHint:!1,destructiveHint:!0,idempotentHint:!1,openWorldHint:!1},parameters:f.object({branch_id:f.string()}),execute:async({branch_id:a})=>{if(n)throw new Error("Cannot merge a branch in read-only mode.");return await t.mergeBranch(a),k}}),reset_branch:D({description:"Resets migrations of a development branch. Any untracked data or schema changes will be lost.",annotations:{title:"Reset branch",readOnlyHint:!1,destructiveHint:!0,idempotentHint:!1,openWorldHint:!1},parameters:f.object({branch_id:f.string(),migration_version:f.string().optional().describe("Reset your development branch to a specific migration version.")}),execute:async({branch_id:a,migration_version:s})=>{if(n)throw new Error("Cannot reset a branch in read-only mode.");return await t.resetBranch(a,{migration_version:s}),k}}),rebase_branch:D({description:"Rebases a development branch on production. This will effectively run any newer migrations from production onto this branch to help handle migration drift.",annotations:{title:"Rebase branch",readOnlyHint:!1,destructiveHint:!0,idempotentHint:!1,openWorldHint:!1},parameters:f.object({branch_id:f.string()}),execute:async({branch_id:a})=>{if(n)throw new Error("Cannot rebase a branch in read-only mode.");return await t.rebaseBranch(a),k}})}}import{source as at}from"common-tags";import{z as h}from"zod/v4";import{stripIndent as he}from"common-tags";var me=`-- Adapted from information_schema.columns
2
2
 
3
3
  SELECT
4
4
  c.oid :: int8 AS table_id,
@@ -109,7 +109,7 @@ WHERE
109
109
  'SELECT, INSERT, UPDATE, REFERENCES'
110
110
  )
111
111
  )
112
- `;var me=`SELECT
112
+ `;var ue=`SELECT
113
113
  e.name,
114
114
  n.nspname AS schema,
115
115
  e.default_version,
@@ -220,13 +220,13 @@ group by
220
220
  `;var fe=["information_schema","pg_catalog","pg_toast","_timescaledb_internal"];function ye(t=[]){let i=he`
221
221
  with
222
222
  tables as (${ge}),
223
- columns as (${ue})
223
+ columns as (${me})
224
224
  select
225
225
  *,
226
226
  ${tt("columns","columns.table_id = tables.id")}
227
227
  from tables
228
228
  `;i+=`
229
- `;let n=[];if(t.length>0){let r=t.map((a,s)=>`$${s+1}`).join(", ");i+=`where schema in (${r})`,n=t}else{let r=fe.map((a,s)=>`$${s+1}`).join(", ");i+=`where schema not in (${r})`,n=fe}return{query:i,parameters:n}}function be(){return me}var tt=(t,i)=>he`
229
+ `;let n=[];if(t.length>0){let r=t.map((a,s)=>`$${s+1}`).join(", ");i+=`where schema in (${r})`,n=t}else{let r=fe.map((a,s)=>`$${s+1}`).join(", ");i+=`where schema not in (${r})`,n=fe}return{query:i,parameters:n}}function be(){return ue}var tt=(t,i)=>he`
230
230
  COALESCE(
231
231
  (
232
232
  SELECT
@@ -268,5 +268,5 @@ group by
268
268
  });
269
269
  `;function Ne({functions:t,projectId:i,readOnly:n}){let r=i;return{list_edge_functions:l({description:"Lists all Edge Functions in a Supabase project.",annotations:{title:"List Edge Functions",readOnlyHint:!0,destructiveHint:!1,idempotentHint:!0,openWorldHint:!1},parameters:y.object({project_id:y.string()}),inject:{project_id:r},execute:async({project_id:a})=>await t.listEdgeFunctions(a)}),get_edge_function:l({description:"Retrieves file contents for an Edge Function in a Supabase project.",annotations:{title:"Get Edge Function",readOnlyHint:!0,destructiveHint:!1,idempotentHint:!0,openWorldHint:!1},parameters:y.object({project_id:y.string(),function_slug:y.string()}),inject:{project_id:r},execute:async({project_id:a,function_slug:s})=>await t.getEdgeFunction(a,s)}),deploy_edge_function:l({description:`Deploys an Edge Function to a Supabase project. If the function already exists, this will create a new version. Example:
270
270
 
271
- ${we}`,annotations:{title:"Deploy Edge Function",readOnlyHint:!1,destructiveHint:!0,idempotentHint:!1,openWorldHint:!1},parameters:y.object({project_id:y.string(),name:y.string().describe("The name of the function"),entrypoint_path:y.string().default("index.ts").describe("The entrypoint of the function"),import_map_path:y.string().describe("The import map for the function.").optional(),verify_jwt:y.boolean().default(!0).describe("Whether to require a valid JWT in the Authorization header. You SHOULD ALWAYS enable this to ensure authorized access. ONLY disable if the function previously had it disabled OR you've confirmed the function body implements custom authentication (e.g., API keys, webhooks) OR the user explicitly requested it be disabled."),files:y.array(y.object({name:y.string(),content:y.string()})).describe("The files to upload. This should include the entrypoint, deno.json, and any relative dependencies. Include the deno.json and deno.jsonc files to configure the Deno runtime (e.g., compiler options, imports) if they exist.")}),inject:{project_id:r},execute:async({project_id:a,name:s,entrypoint_path:c,import_map_path:d,verify_jwt:p,files:g})=>{if(n)throw new Error("Cannot deploy an edge function in read-only mode.");return await t.deployEdgeFunction(a,{name:s,entrypoint_path:c,import_map_path:d,verify_jwt:p,files:g})}})}}import{z as _}from"zod/v4";var ut={success:!0};function He({storage:t,projectId:i,readOnly:n}){let r=i;return{list_storage_buckets:l({description:"Lists all storage buckets in a Supabase project.",annotations:{title:"List storage buckets",readOnlyHint:!0,destructiveHint:!1,idempotentHint:!0,openWorldHint:!1},parameters:_.object({project_id:_.string()}),inject:{project_id:r},execute:async({project_id:a})=>await t.listAllBuckets(a)}),get_storage_config:l({description:"Get the storage config for a Supabase project.",annotations:{title:"Get storage config",readOnlyHint:!0,destructiveHint:!1,idempotentHint:!0,openWorldHint:!1},parameters:_.object({project_id:_.string()}),inject:{project_id:r},execute:async({project_id:a})=>await t.getStorageConfig(a)}),update_storage_config:l({description:"Update the storage config for a Supabase project.",annotations:{title:"Update storage config",readOnlyHint:!1,destructiveHint:!0,idempotentHint:!1,openWorldHint:!1},parameters:_.object({project_id:_.string(),config:_.object({fileSizeLimit:_.number(),features:_.object({imageTransformation:_.object({enabled:_.boolean()}),s3Protocol:_.object({enabled:_.boolean()})})})}),inject:{project_id:r},execute:async({project_id:a,config:s})=>{if(n)throw new Error("Cannot update storage config in read-only mode.");return await t.updateStorageConfig(a,s),ut}})}}var{version:J}=re,gt=["docs","account","database","debugging","development","functions","branching"],Y=["docs"];function Bn(t){let{platform:i,projectId:n,readOnly:r,features:a,contentApiUrl:s="https://supabase.com/docs/api/graphql",onToolCall:c}=t,d=se(s,{"User-Agent":`supabase-mcp/${J}`}),p=gt.filter(S=>Y.includes(S)||Object.keys(i).includes(S)),g=ve(i,a??p);return mt({name:"supabase",title:"Supabase",version:J,async onInitialize(S){let{clientInfo:b}=S,x=`supabase-mcp/${J} (${b.name}/${b.version})`;await Promise.all([i.init?.(S),d.then(T=>T.setUserAgent(x))])},onToolCall:c,tools:async()=>{let S=await d,b={},{account:x,database:T,functions:N,debugging:U,development:F,storage:H,branching:v}=i;return g.has("docs")&&Object.assign(b,Te({contentApiClient:S})),!n&&x&&g.has("account")&&Object.assign(b,pe({account:x,readOnly:r})),T&&g.has("database")&&Object.assign(b,je({database:T,projectId:n,readOnly:r})),U&&g.has("debugging")&&Object.assign(b,Ee({debugging:U,projectId:n})),F&&g.has("development")&&Object.assign(b,Oe({development:F,projectId:n})),N&&g.has("functions")&&Object.assign(b,Ne({functions:N,projectId:n,readOnly:r})),v&&g.has("branching")&&Object.assign(b,de({branching:v,projectId:n,readOnly:r})),H&&g.has("storage")&&Object.assign(b,He({storage:H,projectId:n,readOnly:r})),b}})}async function A(t,i){let n=JSON.stringify(t,(s,c)=>c&&typeof c=="object"&&!Array.isArray(c)?Object.keys(c).sort().reduce((d,p)=>(d[p]=c[p],d),{}):c),r=await crypto.subtle.digest("SHA-256",new TextEncoder().encode(n));return btoa(String.fromCharCode(...new Uint8Array(r))).slice(0,i)}function ve(t,i){let n=K.set(ae).parse(new Set(i)),r=[...Y,...$.options.filter(s=>Object.keys(t).includes(s))],a=K.enum(r,{error:s=>{if(s.code==="invalid_value")return`This platform does not support the '${s.input}' feature group. Supported groups are: ${r.join(", ")}`}}).describe("Available features based on platform implementation");return K.set(a).parse(n)}var ft={WEST_US:{code:"us-west-1",displayName:"West US (North California)",location:{lat:37.774929,lng:-122.419418}},EAST_US:{code:"us-east-1",displayName:"East US (North Virginia)",location:{lat:37.926868,lng:-78.024902}},EAST_US_2:{code:"us-east-2",displayName:"East US (Ohio)",location:{lat:39.9612,lng:-82.9988}},CENTRAL_CANADA:{code:"ca-central-1",displayName:"Canada (Central)",location:{lat:56.130367,lng:-106.346771}},WEST_EU:{code:"eu-west-1",displayName:"West EU (Ireland)",location:{lat:53.3498,lng:-6.2603}},WEST_EU_2:{code:"eu-west-2",displayName:"West Europe (London)",location:{lat:51.507351,lng:-.127758}},WEST_EU_3:{code:"eu-west-3",displayName:"West EU (Paris)",location:{lat:2.352222,lng:48.856613}},CENTRAL_EU:{code:"eu-central-1",displayName:"Central EU (Frankfurt)",location:{lat:50.110924,lng:8.682127}},CENTRAL_EU_2:{code:"eu-central-2",displayName:"Central Europe (Zurich)",location:{lat:47.3744489,lng:8.5410422}},NORTH_EU:{code:"eu-north-1",displayName:"North EU (Stockholm)",location:{lat:59.3251172,lng:18.0710935}},SOUTH_ASIA:{code:"ap-south-1",displayName:"South Asia (Mumbai)",location:{lat:18.9733536,lng:72.8281049}},SOUTHEAST_ASIA:{code:"ap-southeast-1",displayName:"Southeast Asia (Singapore)",location:{lat:1.357107,lng:103.8194992}},NORTHEAST_ASIA:{code:"ap-northeast-1",displayName:"Northeast Asia (Tokyo)",location:{lat:35.6895,lng:139.6917}},NORTHEAST_ASIA_2:{code:"ap-northeast-2",displayName:"Northeast Asia (Seoul)",location:{lat:37.5665,lng:126.978}},OCEANIA:{code:"ap-southeast-2",displayName:"Oceania (Sydney)",location:{lat:-33.8688,lng:151.2093}},SOUTH_AMERICA:{code:"sa-east-1",displayName:"South America (S\xE3o Paulo)",location:{lat:-1.2043218,lng:-47.1583944}}},P=Object.values(ft).map(t=>t.code);var Xn=e.object({id:e.string(),name:e.string(),owner:e.string(),created_at:e.string(),updated_at:e.string(),public:e.boolean()}),eo=e.object({fileSizeLimit:e.number(),features:e.object({imageTransformation:e.object({enabled:e.boolean()}),s3Protocol:e.object({enabled:e.boolean()})})}),to=e.object({id:e.string(),name:e.string(),plan:e.string().optional(),allowed_release_channels:e.array(e.string()),opt_in_tags:e.array(e.string())}),no=e.object({id:e.string(),organization_id:e.string(),name:e.string(),status:e.string(),created_at:e.string(),region:e.string()}),oo=e.object({id:e.string(),name:e.string(),project_ref:e.string(),parent_project_ref:e.string(),is_default:e.boolean(),git_branch:e.string().optional(),pr_number:e.number().optional(),latest_check_run_id:e.number().optional(),persistent:e.boolean(),status:e.enum(["CREATING_PROJECT","RUNNING_MIGRATIONS","MIGRATIONS_PASSED","MIGRATIONS_FAILED","FUNCTIONS_DEPLOYED","FUNCTIONS_FAILED"]),created_at:e.string(),updated_at:e.string()}),ht=e.object({id:e.string(),slug:e.string(),name:e.string(),status:e.string(),version:e.number(),created_at:e.number().optional(),updated_at:e.number().optional(),verify_jwt:e.boolean().optional(),import_map:e.boolean().optional(),import_map_path:e.string().optional(),entrypoint_path:e.string().optional()}),ro=ht.extend({files:e.array(e.object({name:e.string(),content:e.string()}))}),ao=e.object({name:e.string(),organization_id:e.string(),region:e.enum(P),db_pass:e.string().optional()}),io=e.object({name:e.string()}),so=e.object({migration_version:e.string().optional()}),co=e.object({name:e.string(),entrypoint_path:e.string(),import_map_path:e.string().optional(),verify_jwt:e.boolean().optional(),files:e.array(e.object({name:e.string(),content:e.string()}))}),po=e.object({query:e.string(),parameters:e.array(e.unknown()).optional(),read_only:e.boolean().optional()}),lo=e.object({name:e.string(),query:e.string()}),uo=e.object({version:e.string(),name:e.string().optional()}),Q=e.enum(["api","branch-action","postgres","edge-function","auth","storage","realtime"]),mo=e.object({service:Q,iso_timestamp_start:e.string().optional(),iso_timestamp_end:e.string().optional()}),go=e.object({types:e.string()}),fo=e.enum(["legacy","publishable"]);export{re as a,ze as b,Xn as c,eo as d,to as e,no as f,oo as g,ht as h,ro as i,ao as j,io as k,so as l,co as m,po as n,lo as o,uo as p,Q as q,mo as r,go as s,fo as t,En as u,On as v,Bn as w};
272
- //# sourceMappingURL=chunk-52DDTMJB.js.map
271
+ ${we}`,annotations:{title:"Deploy Edge Function",readOnlyHint:!1,destructiveHint:!0,idempotentHint:!1,openWorldHint:!1},parameters:y.object({project_id:y.string(),name:y.string().describe("The name of the function"),entrypoint_path:y.string().default("index.ts").describe("The entrypoint of the function"),import_map_path:y.string().describe("The import map for the function.").optional(),verify_jwt:y.boolean().default(!0).describe("Whether to require a valid JWT in the Authorization header. You SHOULD ALWAYS enable this to ensure authorized access. ONLY disable if the function previously had it disabled OR you've confirmed the function body implements custom authentication (e.g., API keys, webhooks) OR the user explicitly requested it be disabled."),files:y.array(y.object({name:y.string(),content:y.string()})).describe("The files to upload. This should include the entrypoint, deno.json, and any relative dependencies. Include the deno.json and deno.jsonc files to configure the Deno runtime (e.g., compiler options, imports) if they exist.")}),inject:{project_id:r},execute:async({project_id:a,name:s,entrypoint_path:c,import_map_path:d,verify_jwt:p,files:g})=>{if(n)throw new Error("Cannot deploy an edge function in read-only mode.");return await t.deployEdgeFunction(a,{name:s,entrypoint_path:c,import_map_path:d,verify_jwt:p,files:g})}})}}import{z as _}from"zod/v4";var mt={success:!0};function He({storage:t,projectId:i,readOnly:n}){let r=i;return{list_storage_buckets:l({description:"Lists all storage buckets in a Supabase project.",annotations:{title:"List storage buckets",readOnlyHint:!0,destructiveHint:!1,idempotentHint:!0,openWorldHint:!1},parameters:_.object({project_id:_.string()}),inject:{project_id:r},execute:async({project_id:a})=>await t.listAllBuckets(a)}),get_storage_config:l({description:"Get the storage config for a Supabase project.",annotations:{title:"Get storage config",readOnlyHint:!0,destructiveHint:!1,idempotentHint:!0,openWorldHint:!1},parameters:_.object({project_id:_.string()}),inject:{project_id:r},execute:async({project_id:a})=>await t.getStorageConfig(a)}),update_storage_config:l({description:"Update the storage config for a Supabase project.",annotations:{title:"Update storage config",readOnlyHint:!1,destructiveHint:!0,idempotentHint:!1,openWorldHint:!1},parameters:_.object({project_id:_.string(),config:_.object({fileSizeLimit:_.number(),features:_.object({imageTransformation:_.object({enabled:_.boolean()}),s3Protocol:_.object({enabled:_.boolean()})})})}),inject:{project_id:r},execute:async({project_id:a,config:s})=>{if(n)throw new Error("Cannot update storage config in read-only mode.");return await t.updateStorageConfig(a,s),mt}})}}var{version:J}=re,gt=["docs","account","database","debugging","development","functions","branching"],Y=["docs"];function Bn(t){let{platform:i,projectId:n,readOnly:r,features:a,contentApiUrl:s="https://supabase.com/docs/api/graphql",onToolCall:c}=t,d=se(s,{"User-Agent":`supabase-mcp/${J}`}),p=gt.filter(S=>Y.includes(S)||Object.keys(i).includes(S)),g=ve(i,a??p);return ut({name:"supabase",title:"Supabase",version:J,async onInitialize(S){let{clientInfo:b}=S,x=`supabase-mcp/${J} (${b.name}/${b.version})`;await Promise.all([i.init?.(S),d.then(T=>T.setUserAgent(x))])},onToolCall:c,tools:async()=>{let S=await d,b={},{account:x,database:T,functions:N,debugging:U,development:F,storage:H,branching:v}=i;return g.has("docs")&&Object.assign(b,Te({contentApiClient:S})),!n&&x&&g.has("account")&&Object.assign(b,pe({account:x,readOnly:r})),T&&g.has("database")&&Object.assign(b,je({database:T,projectId:n,readOnly:r})),U&&g.has("debugging")&&Object.assign(b,Ee({debugging:U,projectId:n})),F&&g.has("development")&&Object.assign(b,Oe({development:F,projectId:n})),N&&g.has("functions")&&Object.assign(b,Ne({functions:N,projectId:n,readOnly:r})),v&&g.has("branching")&&Object.assign(b,de({branching:v,projectId:n,readOnly:r})),H&&g.has("storage")&&Object.assign(b,He({storage:H,projectId:n,readOnly:r})),b}})}async function A(t,i){let n=JSON.stringify(t,(s,c)=>c&&typeof c=="object"&&!Array.isArray(c)?Object.keys(c).sort().reduce((d,p)=>(d[p]=c[p],d),{}):c),r=await crypto.subtle.digest("SHA-256",new TextEncoder().encode(n));return btoa(String.fromCharCode(...new Uint8Array(r))).slice(0,i)}function ve(t,i){let n=K.set(ae).parse(new Set(i)),r=[...Y,...$.options.filter(s=>Object.keys(t).includes(s))],a=K.enum(r,{error:s=>{if(s.code==="invalid_value")return`This platform does not support the '${s.input}' feature group. Supported groups are: ${r.join(", ")}`}}).describe("Available features based on platform implementation");return K.set(a).parse(n)}var ft={WEST_US:{code:"us-west-1",displayName:"West US (North California)",location:{lat:37.774929,lng:-122.419418}},EAST_US:{code:"us-east-1",displayName:"East US (North Virginia)",location:{lat:37.926868,lng:-78.024902}},EAST_US_2:{code:"us-east-2",displayName:"East US (Ohio)",location:{lat:39.9612,lng:-82.9988}},CENTRAL_CANADA:{code:"ca-central-1",displayName:"Canada (Central)",location:{lat:56.130367,lng:-106.346771}},WEST_EU:{code:"eu-west-1",displayName:"West EU (Ireland)",location:{lat:53.3498,lng:-6.2603}},WEST_EU_2:{code:"eu-west-2",displayName:"West Europe (London)",location:{lat:51.507351,lng:-.127758}},WEST_EU_3:{code:"eu-west-3",displayName:"West EU (Paris)",location:{lat:2.352222,lng:48.856613}},CENTRAL_EU:{code:"eu-central-1",displayName:"Central EU (Frankfurt)",location:{lat:50.110924,lng:8.682127}},CENTRAL_EU_2:{code:"eu-central-2",displayName:"Central Europe (Zurich)",location:{lat:47.3744489,lng:8.5410422}},NORTH_EU:{code:"eu-north-1",displayName:"North EU (Stockholm)",location:{lat:59.3251172,lng:18.0710935}},SOUTH_ASIA:{code:"ap-south-1",displayName:"South Asia (Mumbai)",location:{lat:18.9733536,lng:72.8281049}},SOUTHEAST_ASIA:{code:"ap-southeast-1",displayName:"Southeast Asia (Singapore)",location:{lat:1.357107,lng:103.8194992}},NORTHEAST_ASIA:{code:"ap-northeast-1",displayName:"Northeast Asia (Tokyo)",location:{lat:35.6895,lng:139.6917}},NORTHEAST_ASIA_2:{code:"ap-northeast-2",displayName:"Northeast Asia (Seoul)",location:{lat:37.5665,lng:126.978}},OCEANIA:{code:"ap-southeast-2",displayName:"Oceania (Sydney)",location:{lat:-33.8688,lng:151.2093}},SOUTH_AMERICA:{code:"sa-east-1",displayName:"South America (S\xE3o Paulo)",location:{lat:-1.2043218,lng:-47.1583944}}},P=Object.values(ft).map(t=>t.code);var Xn=e.object({id:e.string(),name:e.string(),owner:e.string(),created_at:e.string(),updated_at:e.string(),public:e.boolean()}),eo=e.object({fileSizeLimit:e.number(),features:e.object({imageTransformation:e.object({enabled:e.boolean()}),s3Protocol:e.object({enabled:e.boolean()})})}),to=e.object({id:e.string(),name:e.string(),plan:e.string().optional(),allowed_release_channels:e.array(e.string()),opt_in_tags:e.array(e.string())}),no=e.object({id:e.string(),organization_id:e.string(),name:e.string(),status:e.string(),created_at:e.string(),region:e.string()}),oo=e.object({id:e.string(),name:e.string(),project_ref:e.string(),parent_project_ref:e.string(),is_default:e.boolean(),git_branch:e.string().optional(),pr_number:e.number().optional(),latest_check_run_id:e.number().optional(),persistent:e.boolean(),status:e.enum(["CREATING_PROJECT","RUNNING_MIGRATIONS","MIGRATIONS_PASSED","MIGRATIONS_FAILED","FUNCTIONS_DEPLOYED","FUNCTIONS_FAILED"]),created_at:e.string(),updated_at:e.string()}),ht=e.object({id:e.string(),slug:e.string(),name:e.string(),status:e.string(),version:e.number(),created_at:e.number().optional(),updated_at:e.number().optional(),verify_jwt:e.boolean().optional(),import_map:e.boolean().optional(),import_map_path:e.string().optional(),entrypoint_path:e.string().optional()}),ro=ht.extend({files:e.array(e.object({name:e.string(),content:e.string()}))}),ao=e.object({name:e.string(),organization_id:e.string(),region:e.enum(P),db_pass:e.string().optional()}),io=e.object({name:e.string()}),so=e.object({migration_version:e.string().optional()}),co=e.object({name:e.string(),entrypoint_path:e.string(),import_map_path:e.string().optional(),verify_jwt:e.boolean().optional(),files:e.array(e.object({name:e.string(),content:e.string()}))}),po=e.object({query:e.string(),parameters:e.array(e.unknown()).optional(),read_only:e.boolean().optional()}),lo=e.object({name:e.string(),query:e.string()}),mo=e.object({version:e.string(),name:e.string().optional()}),Q=e.enum(["api","branch-action","postgres","edge-function","auth","storage","realtime"]),uo=e.object({service:Q,iso_timestamp_start:e.string().optional(),iso_timestamp_end:e.string().optional()}),go=e.object({types:e.string()}),fo=e.enum(["legacy","publishable"]);export{re as a,ze as b,Xn as c,eo as d,to as e,no as f,oo as g,ht as h,ro as i,ao as j,io as k,so as l,co as m,po as n,lo as o,mo as p,Q as q,uo as r,go as s,fo as t,En as u,On as v,Bn as w};
272
+ //# sourceMappingURL=chunk-RYIG4VFH.js.map