supascan 0.1.4 → 0.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.bun-version +1 -0
- package/.github/workflows/release-github.yml +70 -0
- package/.github/workflows/release-npm.yml +45 -0
- package/.github/workflows/tests.yml +36 -0
- package/apps/cli/build.ts +37 -0
- package/apps/cli/package.json +28 -0
- package/apps/cli/src/commands/analyze.ts +213 -0
- package/apps/cli/src/commands/dump.ts +68 -0
- package/apps/cli/src/commands/rpc.ts +67 -0
- package/apps/cli/src/context.ts +96 -0
- package/apps/cli/src/embedded-report.ts +1 -0
- package/apps/cli/src/formatters/console.ts +39 -0
- package/apps/cli/src/formatters/events.ts +95 -0
- package/apps/cli/src/index.ts +105 -0
- package/apps/cli/src/types.ts +9 -0
- package/apps/cli/src/utils/args.ts +46 -0
- package/apps/cli/src/utils/browser.ts +29 -0
- package/apps/cli/src/utils/files.ts +12 -0
- package/apps/cli/src/version.ts +3 -0
- package/apps/web/build.ts +68 -0
- package/apps/web/dev.ts +5 -0
- package/apps/web/index.html +75 -0
- package/apps/web/package.json +22 -0
- package/apps/web/src/App.tsx +129 -0
- package/apps/web/src/components/QueryBuilder.tsx +174 -0
- package/apps/web/src/components/QueryWindow.tsx +133 -0
- package/apps/web/src/components/RPCExecutor.tsx +176 -0
- package/apps/web/src/components/SchemaBrowser.tsx +269 -0
- package/apps/web/src/components/SmartTable.tsx +129 -0
- package/apps/web/src/components/TargetConfig.tsx +130 -0
- package/apps/web/src/components/TargetSummary.tsx +105 -0
- package/apps/web/src/hooks/useAnalysis.ts +54 -0
- package/apps/web/src/hooks/useNotification.ts +28 -0
- package/apps/web/src/hooks/useRPC.ts +53 -0
- package/apps/web/src/hooks/useSupabase.ts +46 -0
- package/apps/web/src/hooks/useTableQuery.ts +148 -0
- package/apps/web/src/index.tsx +18 -0
- package/apps/web/src/types.ts +16 -0
- package/apps/web/src/utils/hash.ts +27 -0
- package/context.test.ts +93 -0
- package/package.json +29 -44
- package/packages/core/package.json +21 -0
- package/packages/core/src/analyzer.ts +212 -0
- package/packages/core/src/extractor.ts +233 -0
- package/packages/core/src/index.ts +9 -0
- package/packages/core/src/supabase.ts +316 -0
- package/packages/core/src/types/analyzer.types.ts +72 -0
- package/packages/core/src/types/event.types.ts +4 -0
- package/packages/core/src/types/events.types.ts +5 -0
- package/packages/core/src/types/extractor.types.ts +54 -0
- package/packages/core/src/types/result.types.ts +17 -0
- package/packages/core/src/types/supabase.types.ts +98 -0
- package/tsconfig.json +23 -0
- package/turbo.json +19 -0
- package/utils.test.ts +68 -0
- package/version.ts +3 -0
- package/dist/supascan.js +0 -206
package/.bun-version
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
1.3.2
|
|
@@ -0,0 +1,70 @@
|
|
|
1
|
+
name: Create GitHub Release
|
|
2
|
+
on:
|
|
3
|
+
push:
|
|
4
|
+
branches: [master]
|
|
5
|
+
workflow_dispatch:
|
|
6
|
+
|
|
7
|
+
jobs:
|
|
8
|
+
tests:
|
|
9
|
+
uses: ./.github/workflows/tests.yml
|
|
10
|
+
|
|
11
|
+
release:
|
|
12
|
+
needs: tests
|
|
13
|
+
runs-on: ubuntu-latest
|
|
14
|
+
permissions:
|
|
15
|
+
contents: write
|
|
16
|
+
id-token: write
|
|
17
|
+
|
|
18
|
+
steps:
|
|
19
|
+
- uses: actions/checkout@v4
|
|
20
|
+
with:
|
|
21
|
+
fetch-depth: 0
|
|
22
|
+
|
|
23
|
+
- uses: oven-sh/setup-bun@v2
|
|
24
|
+
with:
|
|
25
|
+
bun-version-file: ".bun-version"
|
|
26
|
+
|
|
27
|
+
- run: bun install --frozen-lockfile
|
|
28
|
+
- run: bun run build:binary
|
|
29
|
+
|
|
30
|
+
- name: Get version from package.json
|
|
31
|
+
id: pkg
|
|
32
|
+
run: echo "version=$(bun -e "console.log(require('./package.json').version)")" >> $GITHUB_OUTPUT
|
|
33
|
+
|
|
34
|
+
- name: Check if tag exists
|
|
35
|
+
id: tag-check
|
|
36
|
+
run: |
|
|
37
|
+
if git rev-parse "v${{ steps.pkg.outputs.version }}" >/dev/null 2>&1; then
|
|
38
|
+
echo "exists=true" >> $GITHUB_OUTPUT
|
|
39
|
+
else
|
|
40
|
+
echo "exists=false" >> $GITHUB_OUTPUT
|
|
41
|
+
fi
|
|
42
|
+
|
|
43
|
+
- name: Create and push tag
|
|
44
|
+
if: steps.tag-check.outputs.exists == 'false'
|
|
45
|
+
run: |
|
|
46
|
+
git config user.name "github-actions"
|
|
47
|
+
git config user.email "github-actions@github.com"
|
|
48
|
+
git tag v${{ steps.pkg.outputs.version }}
|
|
49
|
+
git push origin v${{ steps.pkg.outputs.version }}
|
|
50
|
+
|
|
51
|
+
- name: Check if release exists
|
|
52
|
+
id: release-check
|
|
53
|
+
run: |
|
|
54
|
+
if gh release view "v${{ steps.pkg.outputs.version }}" >/dev/null 2>&1; then
|
|
55
|
+
echo "exists=true" >> $GITHUB_OUTPUT
|
|
56
|
+
else
|
|
57
|
+
echo "exists=false" >> $GITHUB_OUTPUT
|
|
58
|
+
fi
|
|
59
|
+
env:
|
|
60
|
+
GH_TOKEN: ${{ github.token }}
|
|
61
|
+
|
|
62
|
+
- name: Create GitHub Release
|
|
63
|
+
if: steps.release-check.outputs.exists == 'false'
|
|
64
|
+
uses: softprops/action-gh-release@v1
|
|
65
|
+
with:
|
|
66
|
+
tag_name: v${{ steps.pkg.outputs.version }}
|
|
67
|
+
name: Release v${{ steps.pkg.outputs.version }}
|
|
68
|
+
generate_release_notes: true
|
|
69
|
+
files: |
|
|
70
|
+
dist/supascan
|
|
@@ -0,0 +1,45 @@
|
|
|
1
|
+
name: Publish to npm
|
|
2
|
+
on:
|
|
3
|
+
push:
|
|
4
|
+
branches: [master]
|
|
5
|
+
workflow_dispatch:
|
|
6
|
+
|
|
7
|
+
jobs:
|
|
8
|
+
tests:
|
|
9
|
+
uses: ./.github/workflows/tests.yml
|
|
10
|
+
|
|
11
|
+
publish:
|
|
12
|
+
needs: tests
|
|
13
|
+
runs-on: ubuntu-latest
|
|
14
|
+
permissions:
|
|
15
|
+
contents: read
|
|
16
|
+
id-token: write
|
|
17
|
+
|
|
18
|
+
steps:
|
|
19
|
+
- uses: actions/checkout@v4
|
|
20
|
+
|
|
21
|
+
- uses: oven-sh/setup-bun@v2
|
|
22
|
+
with:
|
|
23
|
+
bun-version-file: ".bun-version"
|
|
24
|
+
|
|
25
|
+
- run: bun install --frozen-lockfile
|
|
26
|
+
- run: bun run build:bundle
|
|
27
|
+
|
|
28
|
+
- name: Get version from package.json
|
|
29
|
+
id: pkg
|
|
30
|
+
run: echo "version=$(bun -e "console.log(require('./package.json').version)")" >> $GITHUB_OUTPUT
|
|
31
|
+
|
|
32
|
+
- name: Check if version exists on npm
|
|
33
|
+
id: npm-check
|
|
34
|
+
run: |
|
|
35
|
+
if npm view supascan@${{ steps.pkg.outputs.version }} version 2>/dev/null; then
|
|
36
|
+
echo "exists=true" >> $GITHUB_OUTPUT
|
|
37
|
+
else
|
|
38
|
+
echo "exists=false" >> $GITHUB_OUTPUT
|
|
39
|
+
fi
|
|
40
|
+
|
|
41
|
+
- name: Publish to npm
|
|
42
|
+
if: steps.npm-check.outputs.exists == 'false'
|
|
43
|
+
run: bun publish --access public
|
|
44
|
+
env:
|
|
45
|
+
NPM_CONFIG_TOKEN: ${{ secrets.NPM_TOKEN }}
|
|
@@ -0,0 +1,36 @@
|
|
|
1
|
+
name: Tests
|
|
2
|
+
|
|
3
|
+
on:
|
|
4
|
+
push:
|
|
5
|
+
branches: [master]
|
|
6
|
+
pull_request:
|
|
7
|
+
branches: [master]
|
|
8
|
+
workflow_call:
|
|
9
|
+
|
|
10
|
+
jobs:
|
|
11
|
+
test:
|
|
12
|
+
runs-on: ubuntu-latest
|
|
13
|
+
permissions:
|
|
14
|
+
contents: read
|
|
15
|
+
|
|
16
|
+
steps:
|
|
17
|
+
- uses: actions/checkout@v4
|
|
18
|
+
|
|
19
|
+
- uses: oven-sh/setup-bun@v2
|
|
20
|
+
with:
|
|
21
|
+
bun-version-file: ".bun-version"
|
|
22
|
+
|
|
23
|
+
- name: Install dependencies
|
|
24
|
+
run: bun install --frozen-lockfile
|
|
25
|
+
|
|
26
|
+
- name: Lint
|
|
27
|
+
run: bun run lint
|
|
28
|
+
|
|
29
|
+
- name: Run tests
|
|
30
|
+
run: bun run test
|
|
31
|
+
|
|
32
|
+
- name: Build bundle
|
|
33
|
+
run: bun run build:bundle
|
|
34
|
+
|
|
35
|
+
- name: Build binary
|
|
36
|
+
run: bun run build:binary
|
|
@@ -0,0 +1,37 @@
|
|
|
1
|
+
import { $ } from "bun";
|
|
2
|
+
import { mkdir, readFile } from "fs/promises";
|
|
3
|
+
|
|
4
|
+
console.log("Building CLI...");
|
|
5
|
+
|
|
6
|
+
await $`tsc --noEmit`;
|
|
7
|
+
|
|
8
|
+
console.log("Building web app for embedding...");
|
|
9
|
+
await $`cd ../web && bun run build`;
|
|
10
|
+
|
|
11
|
+
const webHtml = await readFile("../web/dist/index.html", "utf-8");
|
|
12
|
+
|
|
13
|
+
await Bun.write(
|
|
14
|
+
"./src/embedded-report.ts",
|
|
15
|
+
`export const reportTemplate = ${JSON.stringify(webHtml)};`,
|
|
16
|
+
);
|
|
17
|
+
|
|
18
|
+
await mkdir("./dist", { recursive: true });
|
|
19
|
+
|
|
20
|
+
const nodeBuild = await Bun.build({
|
|
21
|
+
entrypoints: ["./src/index.ts"],
|
|
22
|
+
outdir: "./dist",
|
|
23
|
+
minify: true,
|
|
24
|
+
target: "node",
|
|
25
|
+
banner: "#!/usr/bin/env node",
|
|
26
|
+
naming: {
|
|
27
|
+
entry: "supascan.js",
|
|
28
|
+
},
|
|
29
|
+
});
|
|
30
|
+
|
|
31
|
+
if (!nodeBuild.success) {
|
|
32
|
+
console.error("Failed to bundle CLI");
|
|
33
|
+
for (const log of nodeBuild.logs) console.error(log);
|
|
34
|
+
process.exit(1);
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
console.log("✓ CLI built: dist/supascan.js");
|
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "supascan",
|
|
3
|
+
"version": "0.2.0",
|
|
4
|
+
"type": "module",
|
|
5
|
+
"main": "./dist/supascan.js",
|
|
6
|
+
"bin": {
|
|
7
|
+
"supascan": "./dist/supascan.js"
|
|
8
|
+
},
|
|
9
|
+
"files": [
|
|
10
|
+
"dist/**"
|
|
11
|
+
],
|
|
12
|
+
"scripts": {
|
|
13
|
+
"build": "bun run build.ts",
|
|
14
|
+
"dev": "bun run src/index.ts",
|
|
15
|
+
"lint": "tsc --noEmit"
|
|
16
|
+
},
|
|
17
|
+
"dependencies": {
|
|
18
|
+
"@commander-js/extra-typings": "^14.0.0",
|
|
19
|
+
"@supabase/supabase-js": "^2.75.0",
|
|
20
|
+
"@supascan/core": "workspace:*",
|
|
21
|
+
"commander": "^14.0.1",
|
|
22
|
+
"picocolors": "^1.1.1"
|
|
23
|
+
},
|
|
24
|
+
"devDependencies": {
|
|
25
|
+
"@types/bun": "latest",
|
|
26
|
+
"typescript": "^5.9.3"
|
|
27
|
+
}
|
|
28
|
+
}
|
|
@@ -0,0 +1,213 @@
|
|
|
1
|
+
import {
|
|
2
|
+
analyze,
|
|
3
|
+
type AnalysisResult,
|
|
4
|
+
type RPCParameter,
|
|
5
|
+
} from "@supascan/core";
|
|
6
|
+
import pc from "picocolors";
|
|
7
|
+
import type { CLIContext } from "../context";
|
|
8
|
+
import { log } from "../formatters/console";
|
|
9
|
+
import { handleEvent } from "../formatters/events";
|
|
10
|
+
|
|
11
|
+
export async function executeAnalyzeCommand(
|
|
12
|
+
ctx: CLIContext,
|
|
13
|
+
options: { schema?: string },
|
|
14
|
+
): Promise<void> {
|
|
15
|
+
if (ctx.html) {
|
|
16
|
+
const { reportTemplate } = await import("../embedded-report");
|
|
17
|
+
const { generateTempFilePath, writeHtmlFile } = await import(
|
|
18
|
+
"../utils/files"
|
|
19
|
+
);
|
|
20
|
+
const { openInBrowser } = await import("../utils/browser");
|
|
21
|
+
|
|
22
|
+
const config = {
|
|
23
|
+
url: ctx.url,
|
|
24
|
+
key: ctx.key,
|
|
25
|
+
headers: ctx.headers,
|
|
26
|
+
autorun: true,
|
|
27
|
+
};
|
|
28
|
+
const encoded = Buffer.from(JSON.stringify(config)).toString("base64");
|
|
29
|
+
|
|
30
|
+
const filePath = generateTempFilePath();
|
|
31
|
+
const htmlContent = reportTemplate.replace(
|
|
32
|
+
"</head>",
|
|
33
|
+
`<script>window.__SUPASCAN_CONFIG__ = "${encoded}";</script></head>`,
|
|
34
|
+
);
|
|
35
|
+
writeHtmlFile(filePath, htmlContent);
|
|
36
|
+
|
|
37
|
+
openInBrowser(filePath);
|
|
38
|
+
log.success(`HTML report generated: ${filePath}`);
|
|
39
|
+
return;
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
const analysisGen = analyze(ctx.client, ctx.url, ctx.key, options);
|
|
43
|
+
let analysisResult;
|
|
44
|
+
|
|
45
|
+
while (true) {
|
|
46
|
+
const next = await analysisGen.next();
|
|
47
|
+
if (next.done) {
|
|
48
|
+
analysisResult = next.value;
|
|
49
|
+
break;
|
|
50
|
+
}
|
|
51
|
+
handleEvent(ctx, next.value);
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
if (!analysisResult || !analysisResult.success) {
|
|
55
|
+
log.error(
|
|
56
|
+
"Analysis failed",
|
|
57
|
+
analysisResult?.error.message ?? "Unknown error",
|
|
58
|
+
);
|
|
59
|
+
process.exit(1);
|
|
60
|
+
}
|
|
61
|
+
|
|
62
|
+
if (ctx.json) {
|
|
63
|
+
console.log(JSON.stringify(analysisResult.value, null, 2));
|
|
64
|
+
} else {
|
|
65
|
+
displayAnalysisResult(analysisResult.value);
|
|
66
|
+
}
|
|
67
|
+
}
|
|
68
|
+
|
|
69
|
+
function displayAnalysisResult(result: AnalysisResult): void {
|
|
70
|
+
console.log();
|
|
71
|
+
console.log(pc.bold(pc.cyan("=".repeat(60))));
|
|
72
|
+
console.log(pc.bold(pc.cyan(" SUPABASE DATABASE ANALYSIS")));
|
|
73
|
+
console.log(pc.bold(pc.cyan("=".repeat(60))));
|
|
74
|
+
console.log();
|
|
75
|
+
|
|
76
|
+
console.log(pc.bold(pc.yellow("TARGET SUMMARY")));
|
|
77
|
+
console.log(pc.dim("-".repeat(20)));
|
|
78
|
+
console.log(pc.bold("Domain:"), pc.white(result.summary.domain));
|
|
79
|
+
|
|
80
|
+
if (result.summary.metadata?.service) {
|
|
81
|
+
console.log(pc.bold("Service:"), pc.white(result.summary.metadata.service));
|
|
82
|
+
}
|
|
83
|
+
|
|
84
|
+
if (result.summary.metadata?.region) {
|
|
85
|
+
console.log(
|
|
86
|
+
pc.bold("Project ID:"),
|
|
87
|
+
pc.white(result.summary.metadata.region),
|
|
88
|
+
);
|
|
89
|
+
}
|
|
90
|
+
|
|
91
|
+
if (result.summary.metadata?.title) {
|
|
92
|
+
console.log(pc.bold("Title:"), pc.white(result.summary.metadata.title));
|
|
93
|
+
}
|
|
94
|
+
|
|
95
|
+
if (result.summary.metadata?.version) {
|
|
96
|
+
console.log(pc.bold("Version:"), pc.white(result.summary.metadata.version));
|
|
97
|
+
}
|
|
98
|
+
|
|
99
|
+
if (result.summary.jwtInfo) {
|
|
100
|
+
console.log();
|
|
101
|
+
console.log(pc.bold(pc.yellow("JWT TOKEN INFO")));
|
|
102
|
+
console.log(pc.dim("-".repeat(20)));
|
|
103
|
+
|
|
104
|
+
if (result.summary.jwtInfo.iss) {
|
|
105
|
+
console.log(pc.bold("Issuer:"), pc.white(result.summary.jwtInfo.iss));
|
|
106
|
+
}
|
|
107
|
+
if (result.summary.jwtInfo.aud) {
|
|
108
|
+
console.log(pc.bold("Audience:"), pc.white(result.summary.jwtInfo.aud));
|
|
109
|
+
}
|
|
110
|
+
if (result.summary.jwtInfo.role) {
|
|
111
|
+
console.log(pc.bold("Role:"), pc.white(result.summary.jwtInfo.role));
|
|
112
|
+
}
|
|
113
|
+
if (result.summary.jwtInfo.exp) {
|
|
114
|
+
const expDate = new Date(result.summary.jwtInfo.exp * 1000);
|
|
115
|
+
console.log(pc.bold("Expires:"), pc.white(expDate.toISOString()));
|
|
116
|
+
}
|
|
117
|
+
if (result.summary.jwtInfo.iat) {
|
|
118
|
+
const iatDate = new Date(result.summary.jwtInfo.iat * 1000);
|
|
119
|
+
console.log(pc.bold("Issued:"), pc.white(iatDate.toISOString()));
|
|
120
|
+
}
|
|
121
|
+
}
|
|
122
|
+
|
|
123
|
+
console.log();
|
|
124
|
+
console.log(pc.bold(pc.cyan("DATABASE ANALYSIS")));
|
|
125
|
+
console.log(pc.dim("-".repeat(20)));
|
|
126
|
+
console.log(
|
|
127
|
+
pc.bold("Schemas discovered:"),
|
|
128
|
+
pc.green(result.schemas.length.toString()),
|
|
129
|
+
);
|
|
130
|
+
console.log();
|
|
131
|
+
|
|
132
|
+
Object.entries(result.schemaDetails).forEach(([schema, analysis]) => {
|
|
133
|
+
console.log(pc.bold(pc.cyan(`Schema: ${schema}`)));
|
|
134
|
+
console.log();
|
|
135
|
+
|
|
136
|
+
const exposedCount = Object.values(analysis.tableAccess).filter(
|
|
137
|
+
(a) => a.status === "readable",
|
|
138
|
+
).length;
|
|
139
|
+
const deniedCount = Object.values(analysis.tableAccess).filter(
|
|
140
|
+
(a) => a.status === "denied",
|
|
141
|
+
).length;
|
|
142
|
+
const emptyCount = Object.values(analysis.tableAccess).filter(
|
|
143
|
+
(a) => a.status === "empty",
|
|
144
|
+
).length;
|
|
145
|
+
|
|
146
|
+
console.log(
|
|
147
|
+
pc.bold("Tables:"),
|
|
148
|
+
pc.green(analysis.tables.length.toString()),
|
|
149
|
+
);
|
|
150
|
+
console.log(
|
|
151
|
+
pc.dim(
|
|
152
|
+
` ${exposedCount} exposed | ${emptyCount} empty/protected | ${deniedCount} denied`,
|
|
153
|
+
),
|
|
154
|
+
);
|
|
155
|
+
console.log();
|
|
156
|
+
|
|
157
|
+
if (analysis.tables.length > 0) {
|
|
158
|
+
analysis.tables.forEach((table) => {
|
|
159
|
+
const access = analysis.tableAccess[table];
|
|
160
|
+
let indicator = "";
|
|
161
|
+
let description = "";
|
|
162
|
+
|
|
163
|
+
switch (access?.status) {
|
|
164
|
+
case "readable":
|
|
165
|
+
indicator = pc.green("[+]");
|
|
166
|
+
description = pc.dim(`(~${access.rowCount ?? "?"} rows exposed)`);
|
|
167
|
+
break;
|
|
168
|
+
case "empty":
|
|
169
|
+
indicator = pc.yellow("[-]");
|
|
170
|
+
description = pc.dim("(0 rows - empty or RLS)");
|
|
171
|
+
break;
|
|
172
|
+
case "denied":
|
|
173
|
+
indicator = pc.red("[X]");
|
|
174
|
+
description = pc.dim("(access denied)");
|
|
175
|
+
break;
|
|
176
|
+
}
|
|
177
|
+
|
|
178
|
+
console.log(` ${indicator} ${pc.white(table)} ${description}`);
|
|
179
|
+
});
|
|
180
|
+
} else {
|
|
181
|
+
console.log(pc.dim(" No tables found"));
|
|
182
|
+
}
|
|
183
|
+
console.log();
|
|
184
|
+
|
|
185
|
+
console.log(pc.bold("RPCs:"), pc.green(analysis.rpcs.length.toString()));
|
|
186
|
+
if (analysis.rpcFunctions.length > 0) {
|
|
187
|
+
analysis.rpcFunctions.forEach((rpc) => {
|
|
188
|
+
console.log(` * ${pc.white(rpc.name)}`);
|
|
189
|
+
if (rpc.parameters.length > 0) {
|
|
190
|
+
rpc.parameters.forEach((param: RPCParameter) => {
|
|
191
|
+
const required = param.required
|
|
192
|
+
? pc.red("(required)")
|
|
193
|
+
: pc.dim("(optional)");
|
|
194
|
+
const type = param.format
|
|
195
|
+
? `${param.type} (${param.format})`
|
|
196
|
+
: param.type;
|
|
197
|
+
console.log(
|
|
198
|
+
` - ${pc.cyan(param.name)}: ${pc.yellow(type)} ${required}`,
|
|
199
|
+
);
|
|
200
|
+
});
|
|
201
|
+
} else {
|
|
202
|
+
console.log(pc.dim(" No parameters"));
|
|
203
|
+
}
|
|
204
|
+
});
|
|
205
|
+
} else {
|
|
206
|
+
console.log(pc.dim(" No RPCs found"));
|
|
207
|
+
}
|
|
208
|
+
console.log();
|
|
209
|
+
});
|
|
210
|
+
|
|
211
|
+
console.log(pc.bold(pc.cyan("=".repeat(60))));
|
|
212
|
+
console.log();
|
|
213
|
+
}
|
|
@@ -0,0 +1,68 @@
|
|
|
1
|
+
import { dumpTable } from "@supascan/core";
|
|
2
|
+
import type { CLIContext } from "../context";
|
|
3
|
+
import { log } from "../formatters/console";
|
|
4
|
+
|
|
5
|
+
export async function executeDumpCommand(
|
|
6
|
+
ctx: CLIContext,
|
|
7
|
+
options: { dump: string; limit: string },
|
|
8
|
+
): Promise<void> {
|
|
9
|
+
const parts = options.dump.split(".");
|
|
10
|
+
|
|
11
|
+
if (parts.length === 1) {
|
|
12
|
+
const schema = parts[0];
|
|
13
|
+
if (!schema) {
|
|
14
|
+
log.error("Invalid dump format. Use schema.table or schema");
|
|
15
|
+
process.exit(1);
|
|
16
|
+
}
|
|
17
|
+
|
|
18
|
+
log.info(`Dumping swagger for schema: ${schema}`);
|
|
19
|
+
|
|
20
|
+
const { data, error } = await ctx.client.schema(schema).from("").select();
|
|
21
|
+
|
|
22
|
+
if (error) {
|
|
23
|
+
log.error("Failed to fetch swagger", error.message);
|
|
24
|
+
process.exit(1);
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
console.log(JSON.stringify(data, null, 2));
|
|
28
|
+
return;
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
if (parts.length === 2) {
|
|
32
|
+
const schema = parts[0];
|
|
33
|
+
const table = parts[1];
|
|
34
|
+
|
|
35
|
+
if (!schema || !table) {
|
|
36
|
+
log.error("Invalid dump format. Use schema.table");
|
|
37
|
+
process.exit(1);
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
const limit = parseInt(options.limit);
|
|
41
|
+
if (isNaN(limit) || limit <= 0) {
|
|
42
|
+
log.error("Invalid limit value");
|
|
43
|
+
process.exit(1);
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
log.info(`Dumping table: ${schema}.${table} (limit: ${limit})`);
|
|
47
|
+
|
|
48
|
+
const result = await dumpTable(ctx.client, schema, table, limit);
|
|
49
|
+
|
|
50
|
+
if (!result.success) {
|
|
51
|
+
log.error("Failed to dump table", result.error.message);
|
|
52
|
+
process.exit(1);
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
if (ctx.json) {
|
|
56
|
+
console.log(JSON.stringify(result.value, null, 2));
|
|
57
|
+
} else {
|
|
58
|
+
console.log(
|
|
59
|
+
`\nTable: ${schema}.${table} (${result.value.count} total rows, showing ${result.value.rows.length})\n`,
|
|
60
|
+
);
|
|
61
|
+
console.table(result.value.rows);
|
|
62
|
+
}
|
|
63
|
+
return;
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
log.error("Invalid dump format. Use schema.table or schema");
|
|
67
|
+
process.exit(1);
|
|
68
|
+
}
|
|
@@ -0,0 +1,67 @@
|
|
|
1
|
+
import { callRPC } from "@supascan/core";
|
|
2
|
+
import type { CLIContext } from "../context";
|
|
3
|
+
import { log } from "../formatters/console";
|
|
4
|
+
import { parseRPCArgs } from "../utils/args";
|
|
5
|
+
|
|
6
|
+
export async function executeRPCCommand(
|
|
7
|
+
ctx: CLIContext,
|
|
8
|
+
options: {
|
|
9
|
+
rpc: string;
|
|
10
|
+
args?: string;
|
|
11
|
+
limit: string;
|
|
12
|
+
explain?: boolean;
|
|
13
|
+
},
|
|
14
|
+
): Promise<void> {
|
|
15
|
+
const parts = options.rpc.split(".");
|
|
16
|
+
|
|
17
|
+
if (parts.length !== 2) {
|
|
18
|
+
log.error("Invalid RPC format. Use schema.rpc_name");
|
|
19
|
+
process.exit(1);
|
|
20
|
+
}
|
|
21
|
+
|
|
22
|
+
const schema = parts[0];
|
|
23
|
+
const rpcName = parts[1];
|
|
24
|
+
|
|
25
|
+
if (!schema || !rpcName) {
|
|
26
|
+
log.error("Invalid RPC format. Use schema.rpc_name");
|
|
27
|
+
process.exit(1);
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
const parsedArgs = options.args ? parseRPCArgs(options.args) : {};
|
|
31
|
+
const limit = parseInt(options.limit);
|
|
32
|
+
|
|
33
|
+
if (isNaN(limit) || limit <= 0) {
|
|
34
|
+
log.error("Invalid limit value");
|
|
35
|
+
process.exit(1);
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
log.info(`Calling RPC: ${schema}.${rpcName}`);
|
|
39
|
+
|
|
40
|
+
if (Object.keys(parsedArgs).length > 0) {
|
|
41
|
+
log.info("Arguments:", JSON.stringify(parsedArgs));
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
const result = await callRPC(ctx.client, schema, rpcName, parsedArgs, {
|
|
45
|
+
limit,
|
|
46
|
+
explain: options.explain,
|
|
47
|
+
});
|
|
48
|
+
|
|
49
|
+
if (!result.success) {
|
|
50
|
+
log.error("RPC call failed", result.error.message);
|
|
51
|
+
process.exit(1);
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
if (ctx.json) {
|
|
55
|
+
console.log(JSON.stringify(result.value, null, 2));
|
|
56
|
+
} else if (options.explain) {
|
|
57
|
+
console.log("\nQuery Execution Plan:\n");
|
|
58
|
+
console.log(result.value);
|
|
59
|
+
} else {
|
|
60
|
+
console.log("\nRPC Result:\n");
|
|
61
|
+
if (Array.isArray(result.value)) {
|
|
62
|
+
console.table(result.value);
|
|
63
|
+
} else {
|
|
64
|
+
console.log(JSON.stringify(result.value, null, 2));
|
|
65
|
+
}
|
|
66
|
+
}
|
|
67
|
+
}
|
|
@@ -0,0 +1,96 @@
|
|
|
1
|
+
import { createClient, type SupabaseClient } from "@supabase/supabase-js";
|
|
2
|
+
import { extractFromUrl } from "@supascan/core";
|
|
3
|
+
import { log } from "./formatters/console";
|
|
4
|
+
import { parseHeaders } from "./utils/args";
|
|
5
|
+
|
|
6
|
+
export type CLIContext = {
|
|
7
|
+
debug: boolean;
|
|
8
|
+
json: boolean;
|
|
9
|
+
html: boolean;
|
|
10
|
+
suppressExperimentalWarnings: boolean;
|
|
11
|
+
url: string;
|
|
12
|
+
key: string;
|
|
13
|
+
headers?: Record<string, string>;
|
|
14
|
+
client: SupabaseClient;
|
|
15
|
+
};
|
|
16
|
+
|
|
17
|
+
export async function createCLIContext(options: {
|
|
18
|
+
url?: string;
|
|
19
|
+
key?: string;
|
|
20
|
+
extract?: string;
|
|
21
|
+
debug?: boolean;
|
|
22
|
+
json?: boolean;
|
|
23
|
+
html?: boolean;
|
|
24
|
+
suppressExperimentalWarnings?: boolean;
|
|
25
|
+
header?: string[];
|
|
26
|
+
}): Promise<CLIContext> {
|
|
27
|
+
let url = options.url;
|
|
28
|
+
let key = options.key;
|
|
29
|
+
|
|
30
|
+
if (options.extract) {
|
|
31
|
+
const extractGen = extractFromUrl(options.extract);
|
|
32
|
+
let extractResult;
|
|
33
|
+
|
|
34
|
+
while (true) {
|
|
35
|
+
const next = await extractGen.next();
|
|
36
|
+
if (next.done) {
|
|
37
|
+
extractResult = next.value;
|
|
38
|
+
break;
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
if (options.debug) {
|
|
42
|
+
const event = next.value;
|
|
43
|
+
if (event.type === "content_fetched") {
|
|
44
|
+
console.error(
|
|
45
|
+
`[DEBUG] Fetched ${event.data.size} bytes (${event.data.contentType})`,
|
|
46
|
+
);
|
|
47
|
+
} else if (event.type === "script_checking") {
|
|
48
|
+
console.error(`[DEBUG] Checking script: ${event.data.scriptUrl}`);
|
|
49
|
+
}
|
|
50
|
+
}
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
if (!extractResult || !extractResult.success) {
|
|
54
|
+
throw new Error(
|
|
55
|
+
`Failed to extract credentials: ${extractResult?.error.message ?? "Unknown error"}`,
|
|
56
|
+
);
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
url = extractResult.value.url;
|
|
60
|
+
key = extractResult.value.key;
|
|
61
|
+
|
|
62
|
+
if (extractResult.value.source) {
|
|
63
|
+
log.success(`Extracted credentials from: ${extractResult.value.source}`);
|
|
64
|
+
} else {
|
|
65
|
+
log.success("Extracted credentials from target");
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
if (options.debug) {
|
|
69
|
+
console.error(`[DEBUG] URL: ${url}`);
|
|
70
|
+
console.error(`[DEBUG] Key: ${key?.substring(0, 20)}...`);
|
|
71
|
+
}
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
if (!url || !key) {
|
|
75
|
+
throw new Error("Either provide --url and --key, or use --extract <url>");
|
|
76
|
+
}
|
|
77
|
+
|
|
78
|
+
const headers =
|
|
79
|
+
options.header && options.header.length > 0
|
|
80
|
+
? parseHeaders(options.header)
|
|
81
|
+
: undefined;
|
|
82
|
+
|
|
83
|
+
const clientOptions = headers ? { global: { headers } } : undefined;
|
|
84
|
+
const client = createClient(url, key, clientOptions);
|
|
85
|
+
|
|
86
|
+
return {
|
|
87
|
+
debug: options.debug || false,
|
|
88
|
+
json: options.json || false,
|
|
89
|
+
html: options.html || false,
|
|
90
|
+
suppressExperimentalWarnings: options.suppressExperimentalWarnings || false,
|
|
91
|
+
url,
|
|
92
|
+
key,
|
|
93
|
+
headers,
|
|
94
|
+
client,
|
|
95
|
+
};
|
|
96
|
+
}
|