@prajwolkc/stk 0.6.1 → 0.7.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,411 @@
1
+ import { z } from "zod";
2
+ import { loadConfig, enabledServices } from "../../lib/config.js";
3
+ import { brainDiagnose } from "../../services/brain.js";
4
+ import { execSync } from "child_process";
5
+ export function registerOpsTools(server) {
6
+ // ──────────────────────────────────────────
7
+ // Tool: stk_logs
8
+ // ──────────────────────────────────────────
9
+ server.tool("stk_logs", "Fetch recent production logs from Railway, Vercel, or other deploy providers. Useful for diagnosing errors and understanding runtime behavior.", {
10
+ provider: z.enum(["railway", "vercel"]).optional().describe("Which provider to fetch logs from (auto-detects if omitted)"),
11
+ lines: z.number().optional().default(30).describe("Number of log lines to fetch"),
12
+ }, async ({ provider, lines }) => {
13
+ // Railway logs
14
+ if ((provider === "railway" || !provider) && process.env.RAILWAY_API_TOKEN) {
15
+ const token = process.env.RAILWAY_API_TOKEN;
16
+ const projectId = process.env.RAILWAY_PROJECT_ID;
17
+ const serviceId = process.env.RAILWAY_SERVICE_ID;
18
+ if (!projectId) {
19
+ return { content: [{ type: "text", text: JSON.stringify({ error: "RAILWAY_PROJECT_ID not set" }) }] };
20
+ }
21
+ // Get latest deployment
22
+ const serviceFilter = serviceId ? `serviceId: "${serviceId}",` : "";
23
+ const depRes = await fetch("https://backboard.railway.com/graphql/v2", {
24
+ method: "POST",
25
+ headers: { Authorization: `Bearer ${token}`, "Content-Type": "application/json" },
26
+ body: JSON.stringify({
27
+ query: `{ deployments(first: 1, input: { projectId: "${projectId}", ${serviceFilter} }) { edges { node { id } } } }`,
28
+ }),
29
+ });
30
+ const depData = (await depRes.json());
31
+ const deploymentId = depData.data?.deployments?.edges?.[0]?.node?.id;
32
+ if (!deploymentId) {
33
+ return { content: [{ type: "text", text: JSON.stringify({ error: "No deployments found" }) }] };
34
+ }
35
+ const logRes = await fetch("https://backboard.railway.com/graphql/v2", {
36
+ method: "POST",
37
+ headers: { Authorization: `Bearer ${token}`, "Content-Type": "application/json" },
38
+ body: JSON.stringify({
39
+ query: `{ deploymentLogs(deploymentId: "${deploymentId}", limit: ${lines}) { timestamp message severity } }`,
40
+ }),
41
+ });
42
+ const logData = (await logRes.json());
43
+ const logs = logData.data?.deploymentLogs ?? [];
44
+ return {
45
+ content: [{ type: "text", text: JSON.stringify({ provider: "railway", deploymentId, logs }, null, 2) }],
46
+ };
47
+ }
48
+ // Vercel logs
49
+ if ((provider === "vercel" || !provider) && process.env.VERCEL_TOKEN) {
50
+ const token = process.env.VERCEL_TOKEN;
51
+ const depRes = await fetch("https://api.vercel.com/v6/deployments?limit=1", {
52
+ headers: { Authorization: `Bearer ${token}` },
53
+ });
54
+ const depData = (await depRes.json());
55
+ const dep = depData.deployments?.[0];
56
+ if (!dep) {
57
+ return { content: [{ type: "text", text: JSON.stringify({ error: "No deployments found" }) }] };
58
+ }
59
+ const logRes = await fetch(`https://api.vercel.com/v2/deployments/${dep.uid}/events`, {
60
+ headers: { Authorization: `Bearer ${token}` },
61
+ });
62
+ const events = (await logRes.json());
63
+ const logs = Array.isArray(events)
64
+ ? events
65
+ .filter((e) => e.type === "stdout" || e.type === "stderr")
66
+ .slice(-lines)
67
+ .map((e) => ({
68
+ timestamp: new Date(e.created).toISOString(),
69
+ message: e.payload?.text ?? e.text ?? "",
70
+ severity: e.type === "stderr" ? "ERROR" : "INFO",
71
+ }))
72
+ : [];
73
+ return {
74
+ content: [{ type: "text", text: JSON.stringify({ provider: "vercel", deploymentUrl: dep.url, logs }, null, 2) }],
75
+ };
76
+ }
77
+ return {
78
+ content: [{ type: "text", text: JSON.stringify({ error: "No log provider available. Set RAILWAY_API_TOKEN or VERCEL_TOKEN." }) }],
79
+ };
80
+ });
81
+ // ──────────────────────────────────────────
82
+ // Tool: stk_deploy
83
+ // ──────────────────────────────────────────
84
+ server.tool("stk_deploy", "Push current branch to remote and trigger deploys. Use with caution — this pushes code to production.", {
85
+ skipPush: z.boolean().optional().describe("Skip git push, just report current deploy status"),
86
+ }, async ({ skipPush }) => {
87
+ const config = loadConfig();
88
+ const branch = config.deploy?.branch ?? "main";
89
+ if (!skipPush) {
90
+ try {
91
+ const currentBranch = execSync("git rev-parse --abbrev-ref HEAD", { encoding: "utf-8", stdio: ["pipe", "pipe", "pipe"] }).trim();
92
+ if (currentBranch !== branch) {
93
+ return {
94
+ content: [{ type: "text", text: JSON.stringify({ error: `On branch "${currentBranch}", not "${branch}". Switch branches first.` }) }],
95
+ };
96
+ }
97
+ execSync(`git push origin ${branch}`, { encoding: "utf-8", stdio: "pipe" });
98
+ }
99
+ catch (err) {
100
+ return {
101
+ content: [{ type: "text", text: JSON.stringify({ error: `Git push failed: ${err instanceof Error ? err.message : String(err)}` }) }],
102
+ };
103
+ }
104
+ }
105
+ return {
106
+ content: [{
107
+ type: "text",
108
+ text: JSON.stringify({
109
+ pushed: !skipPush,
110
+ branch,
111
+ providers: config.deploy?.providers ?? [],
112
+ note: "Deploy triggered. Use stk_health to verify after a few minutes.",
113
+ }, null, 2),
114
+ }],
115
+ };
116
+ });
117
+ // ──────────────────────────────────────────
118
+ // Tool: stk_rollback
119
+ // ──────────────────────────────────────────
120
+ server.tool("stk_rollback", "Rollback to a previous Vercel deployment. Lists recent deploys and can promote an older one to production.", {
121
+ deployId: z.string().optional().describe("Deployment ID to rollback to. If omitted, lists recent deployments to choose from."),
122
+ confirm: z.boolean().optional().default(false).describe("Must be true to actually execute the rollback"),
123
+ }, async ({ deployId, confirm }) => {
124
+ const token = process.env.VERCEL_TOKEN;
125
+ if (!token) {
126
+ return { content: [{ type: "text", text: JSON.stringify({ error: "VERCEL_TOKEN not set" }) }] };
127
+ }
128
+ // List recent deployments
129
+ const depRes = await fetch("https://api.vercel.com/v6/deployments?limit=10", {
130
+ headers: { Authorization: `Bearer ${token}` },
131
+ });
132
+ const depData = await depRes.json();
133
+ const deployments = (depData.deployments ?? []).map((d) => ({
134
+ id: d.uid,
135
+ url: d.url,
136
+ state: d.readyState ?? d.state,
137
+ created: new Date(d.created).toISOString(),
138
+ target: d.target ?? "preview",
139
+ }));
140
+ if (!deployId) {
141
+ return {
142
+ content: [{
143
+ type: "text",
144
+ text: JSON.stringify({
145
+ message: "Recent deployments — provide a deployId to rollback",
146
+ deployments,
147
+ }, null, 2),
148
+ }],
149
+ };
150
+ }
151
+ if (!confirm) {
152
+ const target = deployments.find((d) => d.id === deployId);
153
+ return {
154
+ content: [{
155
+ type: "text",
156
+ text: JSON.stringify({
157
+ message: "Rollback requires confirmation. Call again with confirm: true",
158
+ target: target ?? deployId,
159
+ }, null, 2),
160
+ }],
161
+ };
162
+ }
163
+ // Execute rollback by promoting the old deployment
164
+ try {
165
+ // Get the deployment's project
166
+ const detailRes = await fetch(`https://api.vercel.com/v13/deployments/${deployId}`, {
167
+ headers: { Authorization: `Bearer ${token}` },
168
+ });
169
+ const detail = await detailRes.json();
170
+ const projectId = detail.projectId;
171
+ if (!projectId) {
172
+ return { content: [{ type: "text", text: JSON.stringify({ error: "Could not determine project from deployment" }) }] };
173
+ }
174
+ // Create a new deployment based on the old one (redeploy)
175
+ const rollbackRes = await fetch(`https://api.vercel.com/v13/deployments`, {
176
+ method: "POST",
177
+ headers: {
178
+ Authorization: `Bearer ${token}`,
179
+ "Content-Type": "application/json",
180
+ },
181
+ body: JSON.stringify({
182
+ name: detail.name,
183
+ deploymentId: deployId,
184
+ target: "production",
185
+ }),
186
+ });
187
+ if (!rollbackRes.ok) {
188
+ const errData = await rollbackRes.json();
189
+ return { content: [{ type: "text", text: JSON.stringify({ error: errData.error?.message ?? `HTTP ${rollbackRes.status}` }) }] };
190
+ }
191
+ const rollbackData = await rollbackRes.json();
192
+ return {
193
+ content: [{
194
+ type: "text",
195
+ text: JSON.stringify({
196
+ rolledBack: true,
197
+ newDeploymentId: rollbackData.id,
198
+ url: rollbackData.url,
199
+ note: "Rollback triggered. Use stk_health to verify.",
200
+ }, null, 2),
201
+ }],
202
+ };
203
+ }
204
+ catch (err) {
205
+ return { content: [{ type: "text", text: JSON.stringify({ error: err instanceof Error ? err.message : String(err) }) }] };
206
+ }
207
+ });
208
+ // ──────────────────────────────────────────
209
+ // Tool: stk_env_sync
210
+ // ──────────────────────────────────────────
211
+ server.tool("stk_env_sync", "Compare and sync environment variables between local .env and Vercel. Shows which vars are missing, extra, or mismatched.", {
212
+ action: z.enum(["diff", "pull", "push"]).optional().default("diff").describe("diff: compare local vs remote. pull: download remote to .env.pulled. push: upload local to Vercel."),
213
+ confirm: z.boolean().optional().default(false).describe("Required for push action"),
214
+ }, async ({ action, confirm }) => {
215
+ const token = process.env.VERCEL_TOKEN;
216
+ const projectId = process.env.VERCEL_PROJECT_ID;
217
+ // Read local .env
218
+ let localVars = {};
219
+ try {
220
+ const { readFileSync } = await import("fs");
221
+ const envContent = readFileSync(".env", "utf-8");
222
+ for (const line of envContent.split("\n")) {
223
+ const trimmed = line.trim();
224
+ if (!trimmed || trimmed.startsWith("#"))
225
+ continue;
226
+ const eqIdx = trimmed.indexOf("=");
227
+ if (eqIdx > 0) {
228
+ localVars[trimmed.slice(0, eqIdx)] = trimmed.slice(eqIdx + 1);
229
+ }
230
+ }
231
+ }
232
+ catch {
233
+ localVars = {};
234
+ }
235
+ if (!token) {
236
+ return {
237
+ content: [{
238
+ type: "text",
239
+ text: JSON.stringify({
240
+ localVars: Object.keys(localVars),
241
+ remote: "VERCEL_TOKEN not set — cannot fetch remote env vars",
242
+ }, null, 2),
243
+ }],
244
+ };
245
+ }
246
+ // Fetch Vercel env vars
247
+ let remoteVars = {};
248
+ const envUrl = projectId
249
+ ? `https://api.vercel.com/v9/projects/${projectId}/env`
250
+ : null;
251
+ if (envUrl) {
252
+ try {
253
+ const res = await fetch(envUrl, {
254
+ headers: { Authorization: `Bearer ${token}` },
255
+ });
256
+ const data = await res.json();
257
+ for (const env of data.envs ?? []) {
258
+ remoteVars[env.key] = env.value ?? "(encrypted)";
259
+ }
260
+ }
261
+ catch { /* skip */ }
262
+ }
263
+ const localKeys = new Set(Object.keys(localVars));
264
+ const remoteKeys = new Set(Object.keys(remoteVars));
265
+ const onlyLocal = [...localKeys].filter((k) => !remoteKeys.has(k));
266
+ const onlyRemote = [...remoteKeys].filter((k) => !localKeys.has(k));
267
+ const shared = [...localKeys].filter((k) => remoteKeys.has(k));
268
+ if (action === "diff" || !action) {
269
+ return {
270
+ content: [{
271
+ type: "text",
272
+ text: JSON.stringify({
273
+ localCount: localKeys.size,
274
+ remoteCount: remoteKeys.size,
275
+ onlyInLocal: onlyLocal,
276
+ onlyInRemote: onlyRemote,
277
+ inBoth: shared.length,
278
+ note: projectId ? undefined : "Set VERCEL_PROJECT_ID for remote env comparison",
279
+ }, null, 2),
280
+ }],
281
+ };
282
+ }
283
+ if (action === "pull") {
284
+ const { writeFileSync } = await import("fs");
285
+ const lines = Object.entries(remoteVars).map(([k, v]) => `${k}=${v}`);
286
+ writeFileSync(".env.pulled", lines.join("\n") + "\n");
287
+ return {
288
+ content: [{ type: "text", text: JSON.stringify({ pulled: true, file: ".env.pulled", count: lines.length }) }],
289
+ };
290
+ }
291
+ if (action === "push") {
292
+ if (!confirm) {
293
+ return {
294
+ content: [{ type: "text", text: JSON.stringify({ message: "Push requires confirm: true. This will overwrite remote env vars.", varsToUpload: onlyLocal.length + shared.length }) }],
295
+ };
296
+ }
297
+ if (!projectId || !envUrl) {
298
+ return { content: [{ type: "text", text: JSON.stringify({ error: "VERCEL_PROJECT_ID required for push" }) }] };
299
+ }
300
+ let uploaded = 0;
301
+ for (const [key, value] of Object.entries(localVars)) {
302
+ await fetch(envUrl, {
303
+ method: "POST",
304
+ headers: {
305
+ Authorization: `Bearer ${token}`,
306
+ "Content-Type": "application/json",
307
+ },
308
+ body: JSON.stringify({
309
+ key,
310
+ value,
311
+ type: "encrypted",
312
+ target: ["production", "preview", "development"],
313
+ }),
314
+ });
315
+ uploaded++;
316
+ }
317
+ return {
318
+ content: [{ type: "text", text: JSON.stringify({ pushed: true, uploaded }) }],
319
+ };
320
+ }
321
+ return { content: [{ type: "text", text: JSON.stringify({ error: "Unknown action" }) }] };
322
+ });
323
+ // ──────────────────────────────────────────
324
+ // Tool: stk_autofix
325
+ // ──────────────────────────────────────────
326
+ server.tool("stk_autofix", "Auto-diagnose errors from production logs. Fetches recent logs, extracts errors, and searches the brain for matching solutions. Returns errors with matched fix patterns and confidence scores.", {
327
+ provider: z.enum(["railway", "vercel"]).optional(),
328
+ lines: z.number().optional().default(50),
329
+ }, async ({ provider, lines }) => {
330
+ // Determine provider
331
+ const config = loadConfig();
332
+ const detected = provider ?? (enabledServices(config).includes("railway") ? "railway" : "vercel");
333
+ // Fetch logs using existing stk_logs logic (inline)
334
+ let logs = [];
335
+ try {
336
+ if (detected === "railway") {
337
+ const token = process.env.RAILWAY_API_TOKEN;
338
+ const projectId = process.env.RAILWAY_PROJECT_ID;
339
+ const serviceId = process.env.RAILWAY_SERVICE_ID;
340
+ if (token && projectId && serviceId) {
341
+ const envId = process.env.RAILWAY_ENVIRONMENT_ID;
342
+ const gql = `query { deployments(first:1, input:{projectId:"${projectId}",serviceId:"${serviceId}"${envId ? `,environmentId:"${envId}"` : ""}}) { edges { node { id } } } }`;
343
+ const depRes = await fetch("https://backboard.railway.app/graphql/v2", {
344
+ method: "POST",
345
+ headers: { Authorization: `Bearer ${token}`, "Content-Type": "application/json" },
346
+ body: JSON.stringify({ query: gql }),
347
+ });
348
+ const depData = await depRes.json();
349
+ const depId = (depData?.data?.deployments?.edges?.[0]?.node?.id) ?? null;
350
+ if (depId) {
351
+ const logGql = `query { deploymentLogs(deploymentId:"${depId}",limit:${lines}) { timestamp message severity } }`;
352
+ const logRes = await fetch("https://backboard.railway.app/graphql/v2", {
353
+ method: "POST",
354
+ headers: { Authorization: `Bearer ${token}`, "Content-Type": "application/json" },
355
+ body: JSON.stringify({ query: logGql }),
356
+ });
357
+ const logData = await logRes.json();
358
+ logs = (logData?.data?.deploymentLogs ?? []);
359
+ }
360
+ }
361
+ }
362
+ }
363
+ catch { /* log fetch failed */ }
364
+ // Filter for errors
365
+ const errorPattern = /error|exception|fail|crash|ECONNREFUSED|timeout|TypeError|ReferenceError|rejected|FATAL/i;
366
+ const errorLogs = logs.filter(l => l.severity === "error" || (l.message && errorPattern.test(l.message)));
367
+ // Deduplicate by first 100 chars
368
+ const grouped = new Map();
369
+ for (const log of errorLogs) {
370
+ const key = log.message.slice(0, 100);
371
+ const existing = grouped.get(key);
372
+ if (existing) {
373
+ existing.count++;
374
+ existing.lastSeen = log.timestamp;
375
+ }
376
+ else {
377
+ grouped.set(key, { message: log.message, count: 1, firstSeen: log.timestamp, lastSeen: log.timestamp });
378
+ }
379
+ }
380
+ // Diagnose each unique error
381
+ const errors = [];
382
+ let matchedErrors = 0;
383
+ for (const error of grouped.values()) {
384
+ const matches = brainDiagnose(error.message);
385
+ const solutions = matches.slice(0, 3).map(m => ({
386
+ title: m.entry.title,
387
+ content: m.entry.content.slice(0, 400),
388
+ confidence: Math.min(1, m.score / 10),
389
+ source: m.entry.source,
390
+ }));
391
+ if (solutions.length > 0)
392
+ matchedErrors++;
393
+ errors.push({ ...error, solutions });
394
+ }
395
+ return {
396
+ content: [{
397
+ type: "text",
398
+ text: JSON.stringify({
399
+ provider: detected,
400
+ errors,
401
+ summary: {
402
+ totalErrors: errorLogs.length,
403
+ uniqueErrors: grouped.size,
404
+ matchedErrors,
405
+ unmatchedErrors: grouped.size - matchedErrors,
406
+ },
407
+ }, null, 2),
408
+ }],
409
+ };
410
+ });
411
+ } // end registerOpsTools
@@ -0,0 +1,2 @@
1
+ import type { McpServer } from "@modelcontextprotocol/sdk/server/mcp.js";
2
+ export declare function registerSecurityTools(server: McpServer): void;
@@ -0,0 +1,25 @@
1
+ import { z } from "zod";
2
+ import { runAllChecks } from "../../services/security.js";
3
+ export function registerSecurityTools(server) {
4
+ // ──────────────────────────────────────────
5
+ // Tool: stk_secure
6
+ // ──────────────────────────────────────────
7
+ server.tool("stk_secure", "Security scan — check for exposed secrets, vulnerable dependencies, missing rate limiting, open CORS, and unprotected routes. Returns findings with severity levels and fix suggestions.", {
8
+ checks: z.array(z.enum(["secrets", "deps", "rate_limit", "cors", "auth"])).optional().describe("Specific checks to run (default: all)"),
9
+ }, async ({ checks }) => {
10
+ const findings = runAllChecks(checks);
11
+ const critical = findings.filter(f => f.level === "critical").length;
12
+ const warning = findings.filter(f => f.level === "warning").length;
13
+ const info = findings.filter(f => f.level === "info").length;
14
+ return {
15
+ content: [{
16
+ type: "text",
17
+ text: JSON.stringify({
18
+ findings,
19
+ summary: { critical, warning, info, total: findings.length },
20
+ status: critical > 0 ? "CRITICAL" : warning > 0 ? "WARNING" : "CLEAN",
21
+ }, null, 2),
22
+ }],
23
+ };
24
+ });
25
+ } // end registerSecurityTools
@@ -0,0 +1,2 @@
1
+ import type { McpServer } from "@modelcontextprotocol/sdk/server/mcp.js";
2
+ export type ToolRegistrar = (server: McpServer) => void;
@@ -0,0 +1 @@
1
+ export {};
@@ -0,0 +1,13 @@
1
+ import type { KnowledgeEntry } from "./brain-store.js";
2
+ export declare function cloudInsert(entry: KnowledgeEntry): Promise<boolean>;
3
+ export interface SyncResult {
4
+ pushed: number;
5
+ pulled: number;
6
+ errors: string[];
7
+ }
8
+ /** Push all local entries to cloud */
9
+ export declare function pushToCloud(): Promise<SyncResult>;
10
+ /** Pull cloud entries to local */
11
+ export declare function pullFromCloud(): Promise<SyncResult>;
12
+ /** Full sync: push local → cloud, then pull cloud → local */
13
+ export declare function syncBrain(): Promise<SyncResult>;
@@ -0,0 +1,131 @@
1
+ import { loadBrainStore, getAllEntries, saveBrainStore } from "./brain-store.js";
2
+ // ──────────────────────────────────────────
3
+ // Cloud sync (Supabase-backed)
4
+ // ──────────────────────────────────────────
5
+ function getCloudConfig() {
6
+ const url = process.env.SUPABASE_URL;
7
+ const key = process.env.SUPABASE_SERVICE_KEY;
8
+ if (!url || !key)
9
+ return null;
10
+ return { url, key };
11
+ }
12
+ export async function cloudInsert(entry) {
13
+ const cloud = getCloudConfig();
14
+ if (!cloud)
15
+ return false;
16
+ const res = await fetch(`${cloud.url}/rest/v1/knowledge`, {
17
+ method: "POST",
18
+ headers: {
19
+ apikey: cloud.key,
20
+ Authorization: `Bearer ${cloud.key}`,
21
+ "Content-Type": "application/json",
22
+ Prefer: "resolution=ignore-duplicates",
23
+ },
24
+ signal: AbortSignal.timeout(15000),
25
+ body: JSON.stringify({
26
+ id: entry.id,
27
+ title: entry.title,
28
+ content: entry.content,
29
+ category: entry.category,
30
+ source: entry.source,
31
+ tags: entry.tags,
32
+ created_at: entry.created_at,
33
+ }),
34
+ });
35
+ return res.ok;
36
+ }
37
+ /** Push all local entries to cloud */
38
+ export async function pushToCloud() {
39
+ const cloud = getCloudConfig();
40
+ if (!cloud)
41
+ return { pushed: 0, pulled: 0, errors: ["SUPABASE_URL or SUPABASE_SERVICE_KEY not set"] };
42
+ const store = loadBrainStore();
43
+ const allLocal = getAllEntries(store);
44
+ let pushed = 0;
45
+ const errors = [];
46
+ const existingRes = await fetch(`${cloud.url}/rest/v1/knowledge?select=id&limit=10000`, {
47
+ headers: { apikey: cloud.key, Authorization: `Bearer ${cloud.key}` },
48
+ signal: AbortSignal.timeout(15000),
49
+ });
50
+ const existingData = existingRes.ok ? await existingRes.json() : [];
51
+ const existingIds = new Set(existingData.map((r) => r.id));
52
+ const toInsert = allLocal.filter(e => !existingIds.has(e.id));
53
+ for (let i = 0; i < toInsert.length; i += 50) {
54
+ const batch = toInsert.slice(i, i + 50);
55
+ const res = await fetch(`${cloud.url}/rest/v1/knowledge`, {
56
+ method: "POST",
57
+ headers: {
58
+ apikey: cloud.key,
59
+ Authorization: `Bearer ${cloud.key}`,
60
+ "Content-Type": "application/json",
61
+ Prefer: "resolution=ignore-duplicates",
62
+ },
63
+ signal: AbortSignal.timeout(15000),
64
+ body: JSON.stringify(batch),
65
+ });
66
+ if (res.ok) {
67
+ pushed += batch.length;
68
+ }
69
+ else {
70
+ const err = await res.text();
71
+ errors.push(`Batch insert failed: ${err}`);
72
+ }
73
+ }
74
+ return { pushed, pulled: 0, errors };
75
+ }
76
+ /** Pull cloud entries to local */
77
+ export async function pullFromCloud() {
78
+ const cloud = getCloudConfig();
79
+ if (!cloud)
80
+ return { pushed: 0, pulled: 0, errors: ["SUPABASE_URL or SUPABASE_SERVICE_KEY not set"] };
81
+ const store = loadBrainStore();
82
+ const localIds = new Set(getAllEntries(store).map(e => e.id));
83
+ let pulled = 0;
84
+ const errors = [];
85
+ const res = await fetch(`${cloud.url}/rest/v1/knowledge?select=*&limit=10000&order=created_at.desc`, {
86
+ headers: {
87
+ apikey: cloud.key,
88
+ Authorization: `Bearer ${cloud.key}`,
89
+ "Content-Type": "application/json",
90
+ },
91
+ signal: AbortSignal.timeout(15000),
92
+ });
93
+ if (!res.ok) {
94
+ const err = await res.text();
95
+ return { pushed: 0, pulled: 0, errors: [`Cloud fetch failed: ${err}`] };
96
+ }
97
+ const cloudEntries = await res.json();
98
+ for (const entry of cloudEntries) {
99
+ if (localIds.has(entry.id))
100
+ continue;
101
+ const projectMatch = entry.source.match(/^project:(.+)$/);
102
+ if (projectMatch) {
103
+ const projName = projectMatch[1];
104
+ if (!store.projects[projName]) {
105
+ store.projects[projName] = {
106
+ ingestedAt: entry.created_at,
107
+ projectPath: "",
108
+ entries: [],
109
+ };
110
+ }
111
+ store.projects[projName].entries.push(entry);
112
+ }
113
+ else {
114
+ store.global.push(entry);
115
+ }
116
+ pulled++;
117
+ }
118
+ if (pulled > 0)
119
+ saveBrainStore(store);
120
+ return { pushed: 0, pulled, errors };
121
+ }
122
+ /** Full sync: push local → cloud, then pull cloud → local */
123
+ export async function syncBrain() {
124
+ const pushResult = await pushToCloud();
125
+ const pullResult = await pullFromCloud();
126
+ return {
127
+ pushed: pushResult.pushed,
128
+ pulled: pullResult.pulled,
129
+ errors: [...pushResult.errors, ...pullResult.errors],
130
+ };
131
+ }
@@ -0,0 +1,14 @@
1
+ import type { KnowledgeEntry } from "./brain-store.js";
2
+ export declare function extractFromClaudeMd(filePath: string, projectName: string): KnowledgeEntry[];
3
+ export declare function extractFromPackageJson(filePath: string, projectName: string): KnowledgeEntry[];
4
+ export declare function extractFromPrismaSchema(filePath: string, projectName: string): KnowledgeEntry[];
5
+ export declare function extractFromDockerfile(filePath: string, projectName: string): KnowledgeEntry[];
6
+ export declare function extractFromCIConfig(filePath: string, projectName: string): KnowledgeEntry[];
7
+ export declare function extractFromRoutes(routeDir: string, projectName: string): KnowledgeEntry[];
8
+ export declare function extractFromStkConfig(filePath: string, projectName: string): KnowledgeEntry[];
9
+ export interface IngestResult {
10
+ projectName: string;
11
+ entries: KnowledgeEntry[];
12
+ filesScanned: string[];
13
+ }
14
+ export declare function ingestProject(projectPath: string): IngestResult;