@prajwolkc/stk 0.4.1 → 0.5.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,2 @@
1
+ import { Command } from "commander";
2
+ export declare const brainCommand: Command;
@@ -0,0 +1,69 @@
1
+ import { Command } from "commander";
2
+ import chalk from "chalk";
3
+ import ora from "ora";
4
+ import { syncBrain, pushToCloud, pullFromCloud, loadBrainStore, getAllEntries } from "../services/brain.js";
5
+ export const brainCommand = new Command("brain")
6
+ .description("Manage the stk knowledge brain — sync, push, pull across machines")
7
+ .argument("[action]", "push | pull | sync | stats (default: sync)")
8
+ .action(async (action = "sync") => {
9
+ if (action === "stats") {
10
+ const store = loadBrainStore();
11
+ const projects = Object.entries(store.projects);
12
+ const totalEntries = getAllEntries(store).length;
13
+ console.log();
14
+ console.log(chalk.bold(" Brain Stats"));
15
+ console.log(chalk.dim(" ─────────────────────────────────────────"));
16
+ console.log(` Total entries: ${chalk.white(totalEntries)}`);
17
+ console.log(` Global entries: ${chalk.white(store.global.length)}`);
18
+ console.log(` Projects: ${chalk.white(projects.length)}`);
19
+ console.log();
20
+ for (const [name, proj] of projects) {
21
+ console.log(` ${chalk.green("●")} ${chalk.bold(name)} — ${proj.entries.length} entries (${proj.ingestedAt})`);
22
+ }
23
+ if (projects.length === 0) {
24
+ console.log(chalk.dim(" No projects ingested yet. Run: stk ingest"));
25
+ }
26
+ console.log();
27
+ return;
28
+ }
29
+ if (!["push", "pull", "sync"].includes(action)) {
30
+ console.log(chalk.red(` Unknown action: "${action}"`));
31
+ console.log(chalk.dim(" Usage: stk brain [push|pull|sync|stats]"));
32
+ return;
33
+ }
34
+ const spinner = ora(` ${action === "sync" ? "Syncing" : action === "push" ? "Pushing" : "Pulling"} brain knowledge...`).start();
35
+ try {
36
+ let result;
37
+ if (action === "push")
38
+ result = await pushToCloud();
39
+ else if (action === "pull")
40
+ result = await pullFromCloud();
41
+ else
42
+ result = await syncBrain();
43
+ if (result.errors.length > 0) {
44
+ spinner.fail(` Sync completed with errors`);
45
+ for (const err of result.errors) {
46
+ console.log(` ${chalk.red("✗")} ${err}`);
47
+ }
48
+ console.log();
49
+ return;
50
+ }
51
+ spinner.succeed(` Brain ${action} complete`);
52
+ console.log();
53
+ if (result.pushed > 0) {
54
+ console.log(` ${chalk.green("↑")} Pushed ${chalk.white(result.pushed)} entries to cloud`);
55
+ }
56
+ if (result.pulled > 0) {
57
+ console.log(` ${chalk.green("↓")} Pulled ${chalk.white(result.pulled)} entries from cloud`);
58
+ }
59
+ if (result.pushed === 0 && result.pulled === 0) {
60
+ console.log(chalk.dim(" Everything is in sync."));
61
+ }
62
+ console.log();
63
+ }
64
+ catch (err) {
65
+ spinner.fail(" Sync failed");
66
+ console.log(` ${chalk.red(err instanceof Error ? err.message : String(err))}`);
67
+ console.log();
68
+ }
69
+ });
@@ -0,0 +1,2 @@
1
+ import { Command } from "commander";
2
+ export declare const ingestCommand: Command;
@@ -0,0 +1,81 @@
1
+ import { Command } from "commander";
2
+ import chalk from "chalk";
3
+ import { ingestProject, loadBrainStore, saveBrainStore } from "../services/brain.js";
4
+ export const ingestCommand = new Command("ingest")
5
+ .description("Scan project and ingest architecture knowledge into the local brain")
6
+ .option("--force", "re-ingest even if already ingested")
7
+ .option("--stats", "show what's been ingested across all projects")
8
+ .action(async (opts) => {
9
+ if (opts.stats) {
10
+ const store = loadBrainStore();
11
+ const projects = Object.entries(store.projects);
12
+ console.log();
13
+ console.log(chalk.bold(" Brain Stats"));
14
+ console.log(chalk.dim(" ─────────────────────────────────────────"));
15
+ console.log(` Global entries: ${chalk.white(store.global.length)}`);
16
+ console.log(` Projects: ${chalk.white(projects.length)}`);
17
+ console.log();
18
+ for (const [name, proj] of projects) {
19
+ console.log(` ${chalk.green("●")} ${chalk.bold(name)}`);
20
+ console.log(` Entries: ${proj.entries.length}`);
21
+ console.log(` Ingested: ${proj.ingestedAt}`);
22
+ console.log(` Path: ${chalk.dim(proj.projectPath)}`);
23
+ const categories = {};
24
+ for (const e of proj.entries) {
25
+ categories[e.category] = (categories[e.category] || 0) + 1;
26
+ }
27
+ console.log(` Categories: ${Object.entries(categories).map(([k, v]) => `${k}(${v})`).join(", ")}`);
28
+ console.log();
29
+ }
30
+ if (projects.length === 0) {
31
+ console.log(chalk.dim(" No projects ingested yet. Run: stk ingest"));
32
+ console.log();
33
+ }
34
+ return;
35
+ }
36
+ const projectPath = process.cwd();
37
+ const store = loadBrainStore();
38
+ // Check if already ingested
39
+ const { projectName, entries, filesScanned } = ingestProject(projectPath);
40
+ if (store.projects[projectName] && !opts.force) {
41
+ const existing = store.projects[projectName];
42
+ console.log();
43
+ console.log(chalk.yellow(` "${projectName}" already ingested (${existing.entries.length} entries, ${existing.ingestedAt})`));
44
+ console.log(chalk.dim(" Use --force to re-ingest."));
45
+ console.log();
46
+ return;
47
+ }
48
+ if (entries.length === 0) {
49
+ console.log();
50
+ console.log(chalk.yellow(" No knowledge extracted. Make sure you're in a project directory."));
51
+ console.log();
52
+ return;
53
+ }
54
+ // Save
55
+ store.projects[projectName] = {
56
+ ingestedAt: new Date().toISOString(),
57
+ projectPath,
58
+ entries,
59
+ };
60
+ saveBrainStore(store);
61
+ console.log();
62
+ console.log(` ${chalk.green("✓")} Ingested ${chalk.bold(projectName)} — ${chalk.white(entries.length)} knowledge entries`);
63
+ console.log();
64
+ console.log(chalk.bold(" Files scanned:"));
65
+ for (const file of filesScanned) {
66
+ console.log(` ${chalk.green("●")} ${file}`);
67
+ }
68
+ console.log();
69
+ const categories = {};
70
+ for (const e of entries) {
71
+ categories[e.category] = (categories[e.category] || 0) + 1;
72
+ }
73
+ console.log(chalk.bold(" Knowledge by category:"));
74
+ for (const [cat, count] of Object.entries(categories).sort(([, a], [, b]) => b - a)) {
75
+ console.log(` ${chalk.dim("●")} ${cat}: ${count}`);
76
+ }
77
+ console.log();
78
+ console.log(chalk.dim(` Stored at: ~/.stk/brain.json`));
79
+ console.log(chalk.dim(` Brain tools (stk_brain_search, stk_brain_learn, etc.) now work locally.`));
80
+ console.log();
81
+ });
@@ -1,15 +1,165 @@
1
1
  import { Command } from "commander";
2
2
  import chalk from "chalk";
3
- import { writeFileSync, existsSync } from "fs";
3
+ import ora from "ora";
4
+ import { writeFileSync, readFileSync, existsSync } from "fs";
4
5
  import { basename } from "path";
5
6
  import { CONFIG_FILE, KNOWN_SERVICES } from "../lib/config.js";
6
7
  import { templates, listTemplates } from "../templates/index.js";
8
+ import { ingestProject, loadBrainStore, saveBrainStore, pullFromCloud, pushToCloud } from "../services/brain.js";
7
9
  const DEPLOY_PROVIDERS = ["railway", "vercel", "fly", "render", "aws"];
10
+ /** Scan project files to detect what stack is being used */
11
+ function detectStackFromFiles() {
12
+ const detected = {};
13
+ const stack = [];
14
+ let projectType = "unknown";
15
+ // 1. Check env vars (existing behavior)
16
+ const envChecks = {
17
+ railway: ["RAILWAY_API_TOKEN"],
18
+ vercel: ["VERCEL_TOKEN"],
19
+ fly: ["FLY_API_TOKEN"],
20
+ render: ["RENDER_API_KEY"],
21
+ aws: ["AWS_ACCESS_KEY_ID"],
22
+ database: ["DATABASE_URL"],
23
+ mongodb: ["MONGODB_URL", "MONGO_URL"],
24
+ redis: ["REDIS_URL"],
25
+ supabase: ["SUPABASE_URL"],
26
+ r2: ["CLOUDFLARE_ACCOUNT_ID"],
27
+ stripe: ["STRIPE_SECRET_KEY"],
28
+ };
29
+ for (const [service, vars] of Object.entries(envChecks)) {
30
+ if (vars.some((v) => process.env[v])) {
31
+ detected[service] = true;
32
+ }
33
+ }
34
+ // 2. Scan package.json files for dependencies
35
+ const pkgPaths = [
36
+ "package.json",
37
+ "node-backend/package.json",
38
+ "backend/package.json",
39
+ "server/package.json",
40
+ "api/package.json",
41
+ "frontend/package.json",
42
+ "web/package.json",
43
+ "client/package.json",
44
+ ];
45
+ const allDeps = {};
46
+ for (const p of pkgPaths) {
47
+ if (existsSync(p)) {
48
+ try {
49
+ const pkg = JSON.parse(readFileSync(p, "utf-8"));
50
+ Object.assign(allDeps, pkg.dependencies ?? {}, pkg.devDependencies ?? {});
51
+ }
52
+ catch { /* skip */ }
53
+ }
54
+ }
55
+ // Detect framework
56
+ if (allDeps["next"]) {
57
+ stack.push("Next.js");
58
+ projectType = "fullstack";
59
+ }
60
+ else if (allDeps["react"]) {
61
+ stack.push("React");
62
+ }
63
+ if (allDeps["vue"]) {
64
+ stack.push("Vue");
65
+ }
66
+ if (allDeps["angular"] || allDeps["@angular/core"]) {
67
+ stack.push("Angular");
68
+ }
69
+ if (allDeps["express"]) {
70
+ stack.push("Express");
71
+ projectType = projectType === "unknown" ? "api" : projectType;
72
+ }
73
+ if (allDeps["fastify"]) {
74
+ stack.push("Fastify");
75
+ projectType = projectType === "unknown" ? "api" : projectType;
76
+ }
77
+ if (allDeps["hono"]) {
78
+ stack.push("Hono");
79
+ }
80
+ // Detect ORM/DB
81
+ if (allDeps["prisma"] || allDeps["@prisma/client"]) {
82
+ stack.push("Prisma");
83
+ detected.database = true;
84
+ }
85
+ if (allDeps["mongoose"]) {
86
+ stack.push("Mongoose");
87
+ detected.mongodb = true;
88
+ }
89
+ if (allDeps["typeorm"]) {
90
+ stack.push("TypeORM");
91
+ detected.database = true;
92
+ }
93
+ if (allDeps["drizzle-orm"]) {
94
+ stack.push("Drizzle");
95
+ detected.database = true;
96
+ }
97
+ if (allDeps["@supabase/supabase-js"]) {
98
+ stack.push("Supabase SDK");
99
+ detected.supabase = true;
100
+ }
101
+ // Detect billing/payments
102
+ if (allDeps["stripe"] || allDeps["@stripe/stripe-js"]) {
103
+ stack.push("Stripe");
104
+ detected.stripe = true;
105
+ }
106
+ // Detect queue/cache
107
+ if (allDeps["bullmq"] || allDeps["bull"] || allDeps["ioredis"]) {
108
+ stack.push("Redis/BullMQ");
109
+ detected.redis = true;
110
+ }
111
+ // Detect auth
112
+ if (allDeps["jsonwebtoken"])
113
+ stack.push("JWT Auth");
114
+ if (allDeps["passport"])
115
+ stack.push("Passport.js");
116
+ if (allDeps["next-auth"] || allDeps["@auth/core"])
117
+ stack.push("Auth.js");
118
+ // 3. Scan for config files that indicate deploy providers
119
+ if (existsSync("railway.json") || existsSync("railway.toml"))
120
+ detected.railway = true;
121
+ if (existsSync("vercel.json") || existsSync(".vercel"))
122
+ detected.vercel = true;
123
+ if (existsSync("fly.toml"))
124
+ detected.fly = true;
125
+ if (existsSync("render.yaml"))
126
+ detected.render = true;
127
+ if (existsSync("Dockerfile"))
128
+ stack.push("Docker");
129
+ // 4. Scan for Prisma schema
130
+ const prismaPaths = [
131
+ "prisma/schema.prisma",
132
+ "node-backend/prisma/schema.prisma",
133
+ "backend/prisma/schema.prisma",
134
+ "src/prisma/schema.prisma",
135
+ ];
136
+ for (const p of prismaPaths) {
137
+ if (existsSync(p)) {
138
+ detected.database = true;
139
+ if (!stack.includes("Prisma"))
140
+ stack.push("Prisma");
141
+ break;
142
+ }
143
+ }
144
+ // 5. Detect monorepo vs single
145
+ const hasMultiplePkgs = pkgPaths.filter(p => existsSync(p)).length > 1;
146
+ if (hasMultiplePkgs)
147
+ projectType = "fullstack";
148
+ // 6. Detect if frontend-only (static)
149
+ if (stack.length > 0 && !allDeps["express"] && !allDeps["fastify"] && !allDeps["hono"] && !allDeps["next"]) {
150
+ if (allDeps["react"] || allDeps["vue"] || allDeps["angular"]) {
151
+ if (projectType === "unknown")
152
+ projectType = "static";
153
+ }
154
+ }
155
+ return { detected, stack, projectType };
156
+ }
8
157
  export const initCommand = new Command("init")
9
- .description("Initialize stk config for the current project")
158
+ .description("Initialize stk config auto-detects stack, ingests knowledge, syncs brain")
10
159
  .option("--force", "overwrite existing config")
11
160
  .option("-t, --template <name>", `use a starter template (${listTemplates().join(", ")})`)
12
161
  .option("--list-templates", "show available templates")
162
+ .option("--skip-brain", "skip brain ingest and sync")
13
163
  .action(async (opts) => {
14
164
  // List templates
15
165
  if (opts.listTemplates) {
@@ -52,31 +202,24 @@ export const initCommand = new Command("init")
52
202
  console.log(` ${icon} ${enabled ? chalk.white(name) : chalk.dim(name)}`);
53
203
  }
54
204
  console.log();
55
- console.log(chalk.dim(` Deploy branch: ${config.deploy?.branch ?? "main"}`));
56
- console.log(chalk.dim(` Deploy providers: ${config.deploy?.providers?.join(", ") || "none"}`));
57
- console.log();
58
- console.log(chalk.dim(` Set your env vars, then run ${chalk.white("stk health")} to verify.`));
59
- console.log();
205
+ // Run brain steps even for template init
206
+ if (!opts.skipBrain) {
207
+ await runBrainSteps(projectName);
208
+ }
60
209
  return;
61
210
  }
62
- // Auto-detect init
63
- const detected = {};
64
- const envChecks = {
65
- railway: ["RAILWAY_API_TOKEN"],
66
- vercel: ["VERCEL_TOKEN"],
67
- fly: ["FLY_API_TOKEN"],
68
- render: ["RENDER_API_KEY"],
69
- aws: ["AWS_ACCESS_KEY_ID"],
70
- database: ["DATABASE_URL"],
71
- mongodb: ["MONGODB_URL", "MONGO_URL"],
72
- redis: ["REDIS_URL"],
73
- supabase: ["SUPABASE_URL"],
74
- r2: ["CLOUDFLARE_ACCOUNT_ID"],
75
- stripe: ["STRIPE_SECRET_KEY"],
76
- };
77
- for (const [service, vars] of Object.entries(envChecks)) {
78
- if (vars.some((v) => process.env[v])) {
79
- detected[service] = true;
211
+ // ─── Smart Auto-detect Init ───
212
+ console.log();
213
+ const detectSpinner = ora(" Scanning project...").start();
214
+ const { detected, stack, projectType } = detectStackFromFiles();
215
+ detectSpinner.succeed(" Project scanned");
216
+ // Show detected stack
217
+ if (stack.length > 0) {
218
+ console.log();
219
+ console.log(chalk.bold(" Detected stack:"));
220
+ console.log(` ${chalk.cyan(stack.join(" + "))}`);
221
+ if (projectType !== "unknown") {
222
+ console.log(` ${chalk.dim(`Project type: ${projectType}`)}`);
80
223
  }
81
224
  }
82
225
  const config = {
@@ -94,18 +237,91 @@ export const initCommand = new Command("init")
94
237
  console.log();
95
238
  console.log(` ${chalk.green("✓")} Created ${chalk.bold(CONFIG_FILE)}`);
96
239
  console.log();
97
- console.log(chalk.bold(" Detected services:"));
240
+ console.log(chalk.bold(" Services:"));
98
241
  const serviceNames = Object.entries(config.services);
99
242
  for (const [name, enabled] of serviceNames) {
100
243
  const icon = enabled ? chalk.green("✓") : chalk.dim("○");
101
244
  const label = enabled
102
245
  ? chalk.white(name)
103
- : chalk.dim(`${name} (not detected — enable in config)`);
246
+ : chalk.dim(name);
104
247
  console.log(` ${icon} ${label}`);
105
248
  }
249
+ // ─── Brain Steps ───
250
+ if (!opts.skipBrain) {
251
+ await runBrainSteps(projectName);
252
+ }
106
253
  console.log();
107
- console.log(chalk.dim(` Edit ${CONFIG_FILE} to enable/disable services or add config.`));
108
- console.log(chalk.dim(` Or try: ${chalk.white("stk init --template saas")} for a pre-configured stack.`));
109
- console.log(chalk.dim(` Then run ${chalk.white("stk health")} to verify.`));
254
+ console.log(chalk.dim(` Run ${chalk.white("stk health")} to verify services.`));
110
255
  console.log();
111
256
  });
257
+ /** Ingest project, pull cloud knowledge, push new knowledge */
258
+ async function runBrainSteps(projectName) {
259
+ console.log();
260
+ console.log(chalk.bold(" Brain setup:"));
261
+ // Step 1: Ingest project
262
+ const ingestSpinner = ora(" Scanning project architecture...").start();
263
+ try {
264
+ const { entries, filesScanned } = ingestProject(process.cwd());
265
+ if (entries.length > 0) {
266
+ const store = loadBrainStore();
267
+ store.projects[projectName] = {
268
+ ingestedAt: new Date().toISOString(),
269
+ projectPath: process.cwd(),
270
+ entries,
271
+ };
272
+ saveBrainStore(store);
273
+ ingestSpinner.succeed(` Ingested ${chalk.white(entries.length)} knowledge entries from ${filesScanned.length} files`);
274
+ }
275
+ else {
276
+ ingestSpinner.warn(" No project files found to ingest");
277
+ }
278
+ }
279
+ catch {
280
+ ingestSpinner.warn(" Ingest skipped (no recognizable files)");
281
+ }
282
+ // Step 2: Pull cloud brain (learn from other projects)
283
+ const pullSpinner = ora(" Pulling knowledge from cloud brain...").start();
284
+ try {
285
+ const pullResult = await pullFromCloud();
286
+ if (pullResult.errors.length > 0) {
287
+ pullSpinner.warn(" Cloud brain not available (set SUPABASE_URL + SUPABASE_SERVICE_KEY to enable)");
288
+ }
289
+ else if (pullResult.pulled > 0) {
290
+ pullSpinner.succeed(` Pulled ${chalk.white(pullResult.pulled)} entries from cloud (learned from other projects)`);
291
+ }
292
+ else {
293
+ pullSpinner.succeed(" Cloud brain in sync");
294
+ }
295
+ }
296
+ catch {
297
+ pullSpinner.warn(" Cloud sync skipped (no connection)");
298
+ }
299
+ // Step 3: Push new knowledge to cloud
300
+ const pushSpinner = ora(" Sharing knowledge to cloud brain...").start();
301
+ try {
302
+ const pushResult = await pushToCloud();
303
+ if (pushResult.errors.length > 0) {
304
+ pushSpinner.warn(" Push skipped");
305
+ }
306
+ else if (pushResult.pushed > 0) {
307
+ pushSpinner.succeed(` Pushed ${chalk.white(pushResult.pushed)} entries to cloud`);
308
+ }
309
+ else {
310
+ pushSpinner.succeed(" All knowledge already shared");
311
+ }
312
+ }
313
+ catch {
314
+ pushSpinner.warn(" Push skipped (no connection)");
315
+ }
316
+ // Show what we know from other projects
317
+ const store = loadBrainStore();
318
+ const otherProjects = Object.keys(store.projects).filter(p => p !== projectName);
319
+ if (otherProjects.length > 0) {
320
+ console.log();
321
+ console.log(chalk.bold(" Knowledge available from other projects:"));
322
+ for (const proj of otherProjects) {
323
+ const p = store.projects[proj];
324
+ console.log(` ${chalk.green("●")} ${chalk.white(proj)} — ${p.entries.length} entries`);
325
+ }
326
+ }
327
+ }
package/dist/index.js CHANGED
@@ -8,6 +8,8 @@ import { envCommand } from "./commands/env.js";
8
8
  import { logsCommand } from "./commands/logs.js";
9
9
  import { todoCommand } from "./commands/todo.js";
10
10
  import { doctorCommand } from "./commands/doctor.js";
11
+ import { ingestCommand } from "./commands/ingest.js";
12
+ import { brainCommand } from "./commands/brain.js";
11
13
  const program = new Command();
12
14
  program
13
15
  .name("stk")
@@ -21,4 +23,6 @@ program.addCommand(envCommand);
21
23
  program.addCommand(logsCommand);
22
24
  program.addCommand(todoCommand);
23
25
  program.addCommand(doctorCommand);
26
+ program.addCommand(ingestCommand);
27
+ program.addCommand(brainCommand);
24
28
  program.parse();
@@ -11,6 +11,7 @@ import { StdioServerTransport } from "@modelcontextprotocol/sdk/server/stdio.js"
11
11
  import { z } from "zod";
12
12
  import { loadConfig, enabledServices } from "../lib/config.js";
13
13
  import { getChecker, allCheckerNames, loadPluginCheckers } from "../services/registry.js";
14
+ import { getLocalBrainClient, ingestProject, loadBrainStore, saveBrainStore, syncBrain, pushToCloud, pullFromCloud } from "../services/brain.js";
14
15
  import { execSync } from "child_process";
15
16
  const server = new McpServer({
16
17
  name: "stk",
@@ -1012,40 +1013,7 @@ server.tool("stk_cost", "Track costs across your stack: Stripe fees, Vercel usag
1012
1013
  // Brain: Supabase Knowledge Base Client
1013
1014
  // ──────────────────────────────────────────
1014
1015
  function getBrainClient() {
1015
- const url = process.env.SUPABASE_URL;
1016
- const key = process.env.SUPABASE_SERVICE_KEY;
1017
- if (!url || !key)
1018
- return null;
1019
- return {
1020
- async query(table, params = {}) {
1021
- const searchParams = new URLSearchParams(params);
1022
- const res = await fetch(`${url}/rest/v1/${table}?${searchParams}`, {
1023
- headers: {
1024
- apikey: key,
1025
- Authorization: `Bearer ${key}`,
1026
- "Content-Type": "application/json",
1027
- Prefer: "count=exact",
1028
- },
1029
- });
1030
- const count = res.headers.get("content-range")?.split("/")[1] ?? null;
1031
- const data = await res.json();
1032
- return { data, count, ok: res.ok };
1033
- },
1034
- async insert(table, row) {
1035
- const res = await fetch(`${url}/rest/v1/${table}`, {
1036
- method: "POST",
1037
- headers: {
1038
- apikey: key,
1039
- Authorization: `Bearer ${key}`,
1040
- "Content-Type": "application/json",
1041
- Prefer: "return=representation",
1042
- },
1043
- body: JSON.stringify(row),
1044
- });
1045
- const data = await res.json();
1046
- return { data, ok: res.ok };
1047
- },
1048
- };
1016
+ return getLocalBrainClient();
1049
1017
  }
1050
1018
  // ──────────────────────────────────────────
1051
1019
  // Tool: stk_brain_search
@@ -1055,8 +1023,6 @@ server.tool("stk_brain_search", "Search the knowledge base for SaaS patterns, be
1055
1023
  category: z.string().optional().describe("Filter: architecture, auth, payments, database, api, deployment, testing, performance, security, ml, realtime, general"),
1056
1024
  }, async ({ query, category }) => {
1057
1025
  const brain = getBrainClient();
1058
- if (!brain)
1059
- return { content: [{ type: "text", text: JSON.stringify({ error: "SUPABASE_URL or SUPABASE_SERVICE_KEY not set" }) }] };
1060
1026
  // Try ilike search on content and title
1061
1027
  const words = query.split(" ").filter(w => w.length > 2);
1062
1028
  const searchWord = words[0] ?? query;
@@ -1088,8 +1054,6 @@ server.tool("stk_brain_patterns", "Get best practice patterns for a specific fea
1088
1054
  feature: z.string().describe("The feature or pattern (e.g., 'authentication', 'webhooks', 'caching', 'model serving', 'fine-tuning')"),
1089
1055
  }, async ({ feature }) => {
1090
1056
  const brain = getBrainClient();
1091
- if (!brain)
1092
- return { content: [{ type: "text", text: JSON.stringify({ error: "SUPABASE_URL or SUPABASE_SERVICE_KEY not set" }) }] };
1093
1057
  const { data, ok } = await brain.query("knowledge", {
1094
1058
  or: `(title.ilike.%${feature}%,content.ilike.%${feature}%)`,
1095
1059
  limit: "15",
@@ -1123,8 +1087,6 @@ server.tool("stk_brain_stack", "Get recommendations specific to YOUR stack (Supa
1123
1087
  question: z.string().describe("What you want to build or solve (e.g., 'add user auth', 'implement webhooks', 'optimize queries')"),
1124
1088
  }, async ({ question }) => {
1125
1089
  const brain = getBrainClient();
1126
- if (!brain)
1127
- return { content: [{ type: "text", text: JSON.stringify({ error: "SUPABASE_URL or SUPABASE_SERVICE_KEY not set" }) }] };
1128
1090
  const words = question.split(" ").filter(w => w.length > 3);
1129
1091
  const searchWord = words[0] ?? question;
1130
1092
  const { data, ok } = await brain.query("knowledge", {
@@ -1156,8 +1118,6 @@ server.tool("stk_brain_learn", "Save new knowledge to the brain. Use this to rem
1156
1118
  tags: z.array(z.string()).optional().describe("Tags for searchability"),
1157
1119
  }, async ({ title, content, source, category, tags }) => {
1158
1120
  const brain = getBrainClient();
1159
- if (!brain)
1160
- return { content: [{ type: "text", text: JSON.stringify({ error: "SUPABASE_URL or SUPABASE_SERVICE_KEY not set" }) }] };
1161
1121
  const { data, ok } = await brain.insert("knowledge", {
1162
1122
  title,
1163
1123
  content,
@@ -1180,8 +1140,6 @@ server.tool("stk_brain_learn", "Save new knowledge to the brain. Use this to rem
1180
1140
  // ──────────────────────────────────────────
1181
1141
  server.tool("stk_brain_stats", "Check what the brain knows — total knowledge entries, categories, sources, and coverage.", {}, async () => {
1182
1142
  const brain = getBrainClient();
1183
- if (!brain)
1184
- return { content: [{ type: "text", text: JSON.stringify({ error: "SUPABASE_URL or SUPABASE_SERVICE_KEY not set" }) }] };
1185
1143
  const { data, count } = await brain.query("knowledge", { select: "category,source", limit: "1000" });
1186
1144
  const categories = {};
1187
1145
  const sources = {};
@@ -1205,6 +1163,87 @@ server.tool("stk_brain_stats", "Check what the brain knows — total knowledge e
1205
1163
  };
1206
1164
  });
1207
1165
  // ──────────────────────────────────────────
1166
+ // Tool: stk_brain_ingest
1167
+ // ──────────────────────────────────────────
1168
+ server.tool("stk_brain_ingest", "Scan the current project and ingest architecture knowledge into the local brain (~/.stk/brain.json). Automatically reads CLAUDE.md, package.json, Prisma schema, Dockerfile, CI config, and route files. Run this when setting up stk in a new project or after major changes.", {
1169
+ force: z.boolean().optional().default(false).describe("Re-ingest even if already ingested"),
1170
+ }, async ({ force }) => {
1171
+ const store = loadBrainStore();
1172
+ const { projectName, entries, filesScanned } = ingestProject(process.cwd());
1173
+ if (store.projects[projectName] && !force) {
1174
+ const existing = store.projects[projectName];
1175
+ return {
1176
+ content: [{
1177
+ type: "text",
1178
+ text: JSON.stringify({
1179
+ alreadyIngested: true,
1180
+ projectName,
1181
+ entries: existing.entries.length,
1182
+ ingestedAt: existing.ingestedAt,
1183
+ message: "Already ingested. Use force: true to re-ingest.",
1184
+ }, null, 2),
1185
+ }],
1186
+ };
1187
+ }
1188
+ if (entries.length === 0) {
1189
+ return {
1190
+ content: [{
1191
+ type: "text",
1192
+ text: JSON.stringify({ error: "No knowledge extracted. Make sure you're in a project directory with recognizable files." }),
1193
+ }],
1194
+ };
1195
+ }
1196
+ store.projects[projectName] = {
1197
+ ingestedAt: new Date().toISOString(),
1198
+ projectPath: process.cwd(),
1199
+ entries,
1200
+ };
1201
+ saveBrainStore(store);
1202
+ const categories = {};
1203
+ for (const e of entries) {
1204
+ categories[e.category] = (categories[e.category] || 0) + 1;
1205
+ }
1206
+ return {
1207
+ content: [{
1208
+ type: "text",
1209
+ text: JSON.stringify({
1210
+ ingested: true,
1211
+ projectName,
1212
+ totalEntries: entries.length,
1213
+ filesScanned,
1214
+ categories,
1215
+ storedAt: "~/.stk/brain.json",
1216
+ }, null, 2),
1217
+ }],
1218
+ };
1219
+ });
1220
+ // ──────────────────────────────────────────
1221
+ // Tool: stk_brain_sync
1222
+ // ──────────────────────────────────────────
1223
+ server.tool("stk_brain_sync", "Sync brain knowledge between local (~/.stk/brain.json) and cloud (Supabase). Push shares your knowledge with other developers. Pull downloads knowledge from the cloud. Sync does both.", {
1224
+ action: z.enum(["sync", "push", "pull"]).optional().default("sync").describe("sync: push+pull, push: local→cloud, pull: cloud→local"),
1225
+ }, async ({ action }) => {
1226
+ let result;
1227
+ if (action === "push")
1228
+ result = await pushToCloud();
1229
+ else if (action === "pull")
1230
+ result = await pullFromCloud();
1231
+ else
1232
+ result = await syncBrain();
1233
+ return {
1234
+ content: [{
1235
+ type: "text",
1236
+ text: JSON.stringify({
1237
+ action,
1238
+ pushed: result.pushed,
1239
+ pulled: result.pulled,
1240
+ errors: result.errors.length > 0 ? result.errors : undefined,
1241
+ ok: result.errors.length === 0,
1242
+ }, null, 2),
1243
+ }],
1244
+ };
1245
+ });
1246
+ // ──────────────────────────────────────────
1208
1247
  // Tool: stk_brain_claudemd
1209
1248
  // ──────────────────────────────────────────
1210
1249
  server.tool("stk_brain_claudemd", "Auto-generate a CLAUDE.md file for the current project. Analyzes the tech stack, project structure, services, and brain knowledge to create comprehensive project instructions for Claude Code.", {
@@ -0,0 +1,66 @@
1
+ export interface KnowledgeEntry {
2
+ id: string;
3
+ title: string;
4
+ content: string;
5
+ category: string;
6
+ source: string;
7
+ tags: string[];
8
+ created_at: string;
9
+ }
10
+ interface ProjectBrain {
11
+ ingestedAt: string;
12
+ projectPath: string;
13
+ entries: KnowledgeEntry[];
14
+ }
15
+ interface BrainStore {
16
+ version: 1;
17
+ projects: Record<string, ProjectBrain>;
18
+ global: KnowledgeEntry[];
19
+ }
20
+ export declare function loadBrainStore(): BrainStore;
21
+ export declare function saveBrainStore(store: BrainStore): void;
22
+ /** Get all entries — optionally scoped to a project */
23
+ export declare function getAllEntries(store: BrainStore, projectName?: string): KnowledgeEntry[];
24
+ /** Extract knowledge from CLAUDE.md sections */
25
+ export declare function extractFromClaudeMd(filePath: string, projectName: string): KnowledgeEntry[];
26
+ /** Extract knowledge from package.json */
27
+ export declare function extractFromPackageJson(filePath: string, projectName: string): KnowledgeEntry[];
28
+ /** Extract knowledge from Prisma schema */
29
+ export declare function extractFromPrismaSchema(filePath: string, projectName: string): KnowledgeEntry[];
30
+ /** Extract knowledge from Dockerfile */
31
+ export declare function extractFromDockerfile(filePath: string, projectName: string): KnowledgeEntry[];
32
+ /** Extract knowledge from CI config */
33
+ export declare function extractFromCIConfig(filePath: string, projectName: string): KnowledgeEntry[];
34
+ /** Extract knowledge from route files directory */
35
+ export declare function extractFromRoutes(routeDir: string, projectName: string): KnowledgeEntry[];
36
+ /** Extract knowledge from stk.config.json */
37
+ export declare function extractFromStkConfig(filePath: string, projectName: string): KnowledgeEntry[];
38
+ interface IngestResult {
39
+ projectName: string;
40
+ entries: KnowledgeEntry[];
41
+ filesScanned: string[];
42
+ }
43
+ export declare function ingestProject(projectPath: string): IngestResult;
44
+ export declare function getLocalBrainClient(): {
45
+ query(_table: string, params?: Record<string, string>): Promise<{
46
+ data: KnowledgeEntry[];
47
+ count: number;
48
+ ok: boolean;
49
+ }>;
50
+ insert(_table: string, row: Record<string, unknown>): Promise<{
51
+ data: KnowledgeEntry;
52
+ ok: boolean;
53
+ }>;
54
+ };
55
+ export interface SyncResult {
56
+ pushed: number;
57
+ pulled: number;
58
+ errors: string[];
59
+ }
60
+ /** Push all local entries to cloud */
61
+ export declare function pushToCloud(): Promise<SyncResult>;
62
+ /** Pull cloud entries to local */
63
+ export declare function pullFromCloud(): Promise<SyncResult>;
64
+ /** Full sync: push local → cloud, then pull cloud → local */
65
+ export declare function syncBrain(): Promise<SyncResult>;
66
+ export {};
@@ -0,0 +1,526 @@
1
+ import { readFileSync, writeFileSync, existsSync, mkdirSync, readdirSync } from "fs";
2
+ import { join, resolve, basename } from "path";
3
+ import { homedir } from "os";
4
+ import { randomUUID } from "crypto";
5
+ import { loadConfig } from "../lib/config.js";
6
+ // ──────────────────────────────────────────
7
+ // Storage
8
+ // ──────────────────────────────────────────
9
+ const STK_DIR = join(homedir(), ".stk");
10
+ const BRAIN_PATH = join(STK_DIR, "brain.json");
11
+ function ensureStkDir() {
12
+ if (!existsSync(STK_DIR))
13
+ mkdirSync(STK_DIR, { recursive: true });
14
+ }
15
+ export function loadBrainStore() {
16
+ ensureStkDir();
17
+ if (!existsSync(BRAIN_PATH)) {
18
+ return { version: 1, projects: {}, global: [] };
19
+ }
20
+ try {
21
+ const raw = readFileSync(BRAIN_PATH, "utf-8");
22
+ return JSON.parse(raw);
23
+ }
24
+ catch {
25
+ return { version: 1, projects: {}, global: [] };
26
+ }
27
+ }
28
+ export function saveBrainStore(store) {
29
+ ensureStkDir();
30
+ writeFileSync(BRAIN_PATH, JSON.stringify(store, null, 2));
31
+ }
32
+ /** Get all entries — optionally scoped to a project */
33
+ export function getAllEntries(store, projectName) {
34
+ const entries = [...store.global];
35
+ if (projectName && store.projects[projectName]) {
36
+ entries.push(...store.projects[projectName].entries);
37
+ }
38
+ else {
39
+ for (const proj of Object.values(store.projects)) {
40
+ entries.push(...proj.entries);
41
+ }
42
+ }
43
+ return entries;
44
+ }
45
+ // ──────────────────────────────────────────
46
+ // Extractors
47
+ // ──────────────────────────────────────────
48
+ function makeEntry(title, content, category, source, tags) {
49
+ return { id: randomUUID(), title, content, category, source, tags, created_at: new Date().toISOString() };
50
+ }
51
+ /** Extract knowledge from CLAUDE.md sections */
52
+ export function extractFromClaudeMd(filePath, projectName) {
53
+ const raw = readFileSync(filePath, "utf-8");
54
+ const entries = [];
55
+ const source = `project:${projectName}`;
56
+ // Split by ## headings
57
+ const sections = raw.split(/^## /m).slice(1);
58
+ const categoryMap = {
59
+ architecture: "architecture", commands: "deployment", "key paths": "architecture",
60
+ "code rules": "architecture", "theming": "architecture", "backend patterns": "architecture",
61
+ "auth": "auth", "permissions": "auth", "frontend patterns": "architecture",
62
+ "testing": "testing", "environment": "deployment", "cache": "performance",
63
+ "queue": "architecture", "database": "database", "deploy": "deployment",
64
+ "data": "database", "api": "api", "route": "api", "security": "security",
65
+ };
66
+ for (const section of sections) {
67
+ const lines = section.split("\n");
68
+ const heading = lines[0]?.trim() ?? "";
69
+ const body = lines.slice(1).join("\n").trim();
70
+ if (!heading || body.length < 20)
71
+ continue;
72
+ // Infer category from heading
73
+ const headingLower = heading.toLowerCase();
74
+ let category = "general";
75
+ for (const [keyword, cat] of Object.entries(categoryMap)) {
76
+ if (headingLower.includes(keyword)) {
77
+ category = cat;
78
+ break;
79
+ }
80
+ }
81
+ // Truncate very long sections
82
+ const truncated = body.length > 2000 ? body.slice(0, 2000) + "\n..." : body;
83
+ entries.push(makeEntry(heading, truncated, category, source, [heading.toLowerCase().replace(/[^a-z0-9]+/g, "-")]));
84
+ }
85
+ return entries;
86
+ }
87
+ /** Extract knowledge from package.json */
88
+ export function extractFromPackageJson(filePath, projectName) {
89
+ try {
90
+ const raw = readFileSync(filePath, "utf-8");
91
+ const pkg = JSON.parse(raw);
92
+ const source = `project:${projectName}`;
93
+ const deps = { ...pkg.dependencies, ...pkg.devDependencies };
94
+ const depNames = Object.keys(deps);
95
+ const groups = {
96
+ framework: [], orm: [], auth: [], billing: [], testing: [], ui: [], build: [], other: [],
97
+ };
98
+ const classify = {
99
+ react: "framework", next: "framework", express: "framework", fastify: "framework", "vue": "framework", angular: "framework",
100
+ prisma: "orm", typeorm: "orm", drizzle: "orm", sequelize: "orm", mongoose: "orm",
101
+ jsonwebtoken: "auth", passport: "auth", "next-auth": "auth", bcrypt: "auth",
102
+ stripe: "billing", "@stripe/stripe-js": "billing",
103
+ jest: "testing", vitest: "testing", mocha: "testing", supertest: "testing",
104
+ tailwindcss: "ui", "@radix-ui": "ui", "framer-motion": "ui", "shadcn": "ui",
105
+ vite: "build", webpack: "build", esbuild: "build", tsx: "build", typescript: "build",
106
+ };
107
+ for (const dep of depNames) {
108
+ let grouped = false;
109
+ for (const [key, group] of Object.entries(classify)) {
110
+ if (dep.includes(key)) {
111
+ groups[group].push(dep);
112
+ grouped = true;
113
+ break;
114
+ }
115
+ }
116
+ if (!grouped)
117
+ groups.other.push(dep);
118
+ }
119
+ const parts = [];
120
+ if (pkg.name)
121
+ parts.push(`Package: ${pkg.name}`);
122
+ if (pkg.scripts)
123
+ parts.push(`Scripts: ${Object.keys(pkg.scripts).join(", ")}`);
124
+ for (const [group, deps] of Object.entries(groups)) {
125
+ if (deps.length > 0 && group !== "other")
126
+ parts.push(`${group}: ${deps.join(", ")}`);
127
+ }
128
+ const label = filePath.includes("node-backend") ? "Backend" :
129
+ filePath.includes("frontend") ? "Frontend" : "Root";
130
+ return [makeEntry(`${label} Dependencies & Scripts`, parts.join("\n"), "stack", source, ["dependencies", label.toLowerCase(), ...depNames.slice(0, 10)])];
131
+ }
132
+ catch {
133
+ return [];
134
+ }
135
+ }
136
+ /** Extract knowledge from Prisma schema */
137
+ export function extractFromPrismaSchema(filePath, projectName) {
138
+ try {
139
+ const content = readFileSync(filePath, "utf-8");
140
+ const source = `project:${projectName}`;
141
+ const models = content.match(/^model \w+/gm) ?? [];
142
+ const modelNames = models.map(m => m.replace("model ", ""));
143
+ const enums = content.match(/^enum \w+/gm) ?? [];
144
+ const enumNames = enums.map(e => e.replace("enum ", ""));
145
+ const hasOrgId = content.includes("organizationId");
146
+ const hasSoftDelete = content.includes("deletedAt");
147
+ const hasTimestamps = content.includes("@updatedAt");
148
+ const hasRelations = content.match(/@relation/g)?.length ?? 0;
149
+ const parts = [
150
+ `${models.length} models: ${modelNames.join(", ")}`,
151
+ ];
152
+ if (enums.length)
153
+ parts.push(`${enums.length} enums: ${enumNames.join(", ")}`);
154
+ if (hasOrgId)
155
+ parts.push("Multi-tenant: organizationId on entities");
156
+ if (hasSoftDelete)
157
+ parts.push("Soft deletes: deletedAt field");
158
+ if (hasTimestamps)
159
+ parts.push("Auto timestamps: createdAt/updatedAt");
160
+ if (hasRelations)
161
+ parts.push(`${hasRelations} relations defined`);
162
+ return [makeEntry("Database Schema Overview", parts.join("\n"), "database", source, ["prisma", "schema", "database", ...modelNames.slice(0, 15)])];
163
+ }
164
+ catch {
165
+ return [];
166
+ }
167
+ }
168
+ /** Extract knowledge from Dockerfile */
169
+ export function extractFromDockerfile(filePath, projectName) {
170
+ try {
171
+ const content = readFileSync(filePath, "utf-8");
172
+ const source = `project:${projectName}`;
173
+ const parts = [];
174
+ const baseImages = content.match(/^FROM\s+\S+/gm) ?? [];
175
+ if (baseImages.length)
176
+ parts.push(`Base images: ${baseImages.map(b => b.replace("FROM ", "")).join(" → ")}`);
177
+ if (baseImages.length > 1)
178
+ parts.push("Multi-stage build");
179
+ if (content.includes("HEALTHCHECK"))
180
+ parts.push("Has healthcheck");
181
+ if (content.includes("USER") && !content.includes("USER root"))
182
+ parts.push("Non-root user");
183
+ if (content.includes("tini"))
184
+ parts.push("Uses tini init");
185
+ const ports = content.match(/EXPOSE\s+(\d+)/g);
186
+ if (ports)
187
+ parts.push(`Ports: ${ports.map(p => p.replace("EXPOSE ", "")).join(", ")}`);
188
+ return parts.length > 0
189
+ ? [makeEntry("Docker Configuration", parts.join("\n"), "deployment", source, ["docker", "container"])]
190
+ : [];
191
+ }
192
+ catch {
193
+ return [];
194
+ }
195
+ }
196
+ /** Extract knowledge from CI config */
197
+ export function extractFromCIConfig(filePath, projectName) {
198
+ try {
199
+ const content = readFileSync(filePath, "utf-8");
200
+ const source = `project:${projectName}`;
201
+ const parts = [];
202
+ if (filePath.includes(".github"))
203
+ parts.push("CI: GitHub Actions");
204
+ else if (filePath.includes(".gitlab"))
205
+ parts.push("CI: GitLab CI");
206
+ else if (filePath.includes("circle"))
207
+ parts.push("CI: CircleCI");
208
+ const jobs = content.match(/^\s{2}\w[\w-]*:/gm);
209
+ if (jobs)
210
+ parts.push(`Jobs: ${jobs.map(j => j.trim().replace(":", "")).join(", ")}`);
211
+ if (content.includes("tsc"))
212
+ parts.push("Type checking step");
213
+ if (content.includes("test"))
214
+ parts.push("Test step");
215
+ if (content.includes("docker"))
216
+ parts.push("Docker build step");
217
+ if (content.includes("audit"))
218
+ parts.push("Security audit step");
219
+ const triggers = content.match(/on:\s*\n([\s\S]*?)(?=\n\w)/);
220
+ if (triggers) {
221
+ if (content.includes("push:"))
222
+ parts.push("Triggers on push");
223
+ if (content.includes("pull_request:"))
224
+ parts.push("Triggers on PR");
225
+ }
226
+ return parts.length > 0
227
+ ? [makeEntry("CI/CD Pipeline", parts.join("\n"), "deployment", source, ["ci", "pipeline"])]
228
+ : [];
229
+ }
230
+ catch {
231
+ return [];
232
+ }
233
+ }
234
+ /** Extract knowledge from route files directory */
235
+ export function extractFromRoutes(routeDir, projectName) {
236
+ try {
237
+ const source = `project:${projectName}`;
238
+ const files = readdirSync(routeDir).filter(f => f.endsWith(".ts") || f.endsWith(".js"));
239
+ const routeNames = files.map(f => f.replace(/\.(ts|js)$/, ""));
240
+ return [makeEntry("API Routes", `${files.length} route files: ${routeNames.join(", ")}`, "api", source, ["routes", "api", ...routeNames.slice(0, 15)])];
241
+ }
242
+ catch {
243
+ return [];
244
+ }
245
+ }
246
+ /** Extract knowledge from stk.config.json */
247
+ export function extractFromStkConfig(filePath, projectName) {
248
+ try {
249
+ const raw = readFileSync(filePath, "utf-8");
250
+ const config = JSON.parse(raw);
251
+ const source = `project:${projectName}`;
252
+ const services = Object.entries(config.services ?? {})
253
+ .filter(([, v]) => v === true || (typeof v === "object" && v.enabled !== false))
254
+ .map(([k]) => k);
255
+ return services.length > 0
256
+ ? [makeEntry("Infrastructure Services", `Configured services: ${services.join(", ")}`, "architecture", source, ["infrastructure", ...services])]
257
+ : [];
258
+ }
259
+ catch {
260
+ return [];
261
+ }
262
+ }
263
+ export function ingestProject(projectPath) {
264
+ const config = loadConfig();
265
+ const projectName = config.name ?? basename(projectPath);
266
+ const entries = [];
267
+ const filesScanned = [];
268
+ const fileExtractors = [
269
+ { path: "CLAUDE.md", extractor: extractFromClaudeMd },
270
+ { path: "package.json", extractor: extractFromPackageJson },
271
+ { path: "node-backend/package.json", extractor: extractFromPackageJson },
272
+ { path: "frontend/package.json", extractor: extractFromPackageJson },
273
+ { path: "node-backend/prisma/schema.prisma", extractor: extractFromPrismaSchema },
274
+ { path: "Dockerfile", extractor: extractFromDockerfile },
275
+ { path: ".github/workflows/ci.yml", extractor: extractFromCIConfig },
276
+ { path: "stk.config.json", extractor: extractFromStkConfig },
277
+ ];
278
+ // Also try common alternative locations
279
+ const altPaths = [
280
+ { path: "prisma/schema.prisma", extractor: extractFromPrismaSchema },
281
+ { path: "src/prisma/schema.prisma", extractor: extractFromPrismaSchema },
282
+ { path: ".github/workflows/main.yml", extractor: extractFromCIConfig },
283
+ { path: ".github/workflows/deploy.yml", extractor: extractFromCIConfig },
284
+ { path: ".gitlab-ci.yml", extractor: extractFromCIConfig },
285
+ { path: "docker-compose.yml", extractor: extractFromDockerfile },
286
+ { path: "backend/package.json", extractor: extractFromPackageJson },
287
+ { path: "server/package.json", extractor: extractFromPackageJson },
288
+ { path: "api/package.json", extractor: extractFromPackageJson },
289
+ ];
290
+ const allPaths = [...fileExtractors, ...altPaths];
291
+ const seen = new Set();
292
+ for (const { path, extractor } of allPaths) {
293
+ const fullPath = resolve(projectPath, path);
294
+ if (seen.has(fullPath) || !existsSync(fullPath))
295
+ continue;
296
+ seen.add(fullPath);
297
+ const extracted = extractor(fullPath, projectName);
298
+ if (extracted.length > 0) {
299
+ entries.push(...extracted);
300
+ filesScanned.push(path);
301
+ }
302
+ }
303
+ // Route directories
304
+ const routeDirs = [
305
+ "node-backend/src/routes",
306
+ "src/routes",
307
+ "backend/src/routes",
308
+ "server/src/routes",
309
+ "api/src/routes",
310
+ ];
311
+ for (const dir of routeDirs) {
312
+ const fullDir = resolve(projectPath, dir);
313
+ if (existsSync(fullDir)) {
314
+ entries.push(...extractFromRoutes(fullDir, projectName));
315
+ filesScanned.push(dir);
316
+ break; // only one route dir
317
+ }
318
+ }
319
+ return { projectName, entries, filesScanned };
320
+ }
321
+ // ──────────────────────────────────────────
322
+ // Local brain client (replaces getBrainClient)
323
+ // ──────────────────────────────────────────
324
+ export function getLocalBrainClient() {
325
+ const store = loadBrainStore();
326
+ const config = loadConfig();
327
+ const projectName = config.name;
328
+ // Auto-ingest if this project hasn't been scanned yet
329
+ if (projectName && projectName !== "my-app" && !store.projects[projectName]) {
330
+ try {
331
+ const { entries, filesScanned } = ingestProject(process.cwd());
332
+ if (entries.length > 0) {
333
+ store.projects[projectName] = {
334
+ ingestedAt: new Date().toISOString(),
335
+ projectPath: process.cwd(),
336
+ entries,
337
+ };
338
+ saveBrainStore(store);
339
+ }
340
+ }
341
+ catch {
342
+ // Ingest failure shouldn't block brain tools
343
+ }
344
+ }
345
+ return {
346
+ async query(_table, params = {}) {
347
+ const currentStore = loadBrainStore();
348
+ let entries = getAllEntries(currentStore);
349
+ // Handle ilike search (Supabase PostgREST style)
350
+ if (params.or) {
351
+ const matches = params.or.match(/ilike\.%(.+?)%/g);
352
+ if (matches) {
353
+ const searchTerms = matches.map(m => m.replace("ilike.%", "").replace("%", "").toLowerCase());
354
+ entries = entries.filter(e => searchTerms.some(term => e.title.toLowerCase().includes(term) ||
355
+ e.content.toLowerCase().includes(term) ||
356
+ e.tags.some(t => t.toLowerCase().includes(term))));
357
+ }
358
+ }
359
+ // Handle category filter
360
+ if (params.category) {
361
+ const cat = params.category.replace("eq.", "");
362
+ entries = entries.filter(e => e.category === cat);
363
+ }
364
+ // Handle select (for stats — just return all fields)
365
+ // Handle order (best-effort)
366
+ if (params.order) {
367
+ const field = params.order;
368
+ entries.sort((a, b) => String(a[field] ?? "").localeCompare(String(b[field] ?? "")));
369
+ }
370
+ const limit = parseInt(params.limit ?? "10");
371
+ const data = entries.slice(0, limit);
372
+ return { data, count: entries.length, ok: true };
373
+ },
374
+ async insert(_table, row) {
375
+ const currentStore = loadBrainStore();
376
+ const entry = {
377
+ id: randomUUID(),
378
+ title: String(row.title ?? ""),
379
+ content: String(row.content ?? ""),
380
+ category: String(row.category ?? "general"),
381
+ source: String(row.source ?? "manual"),
382
+ tags: Array.isArray(row.tags) ? row.tags.map(String) : [],
383
+ created_at: String(row.created_at ?? new Date().toISOString()),
384
+ };
385
+ currentStore.global.push(entry);
386
+ saveBrainStore(currentStore);
387
+ // Also push to cloud if configured
388
+ try {
389
+ await cloudInsert(entry);
390
+ }
391
+ catch { /* cloud sync is best-effort */ }
392
+ return { data: entry, ok: true };
393
+ },
394
+ };
395
+ }
396
+ // ──────────────────────────────────────────
397
+ // Cloud sync (Supabase-backed)
398
+ // ──────────────────────────────────────────
399
+ function getCloudConfig() {
400
+ const url = process.env.SUPABASE_URL;
401
+ const key = process.env.SUPABASE_SERVICE_KEY;
402
+ if (!url || !key)
403
+ return null;
404
+ return { url, key };
405
+ }
406
+ async function cloudInsert(entry) {
407
+ const cloud = getCloudConfig();
408
+ if (!cloud)
409
+ return false;
410
+ const res = await fetch(`${cloud.url}/rest/v1/knowledge`, {
411
+ method: "POST",
412
+ headers: {
413
+ apikey: cloud.key,
414
+ Authorization: `Bearer ${cloud.key}`,
415
+ "Content-Type": "application/json",
416
+ Prefer: "resolution=ignore-duplicates",
417
+ },
418
+ body: JSON.stringify({
419
+ id: entry.id,
420
+ title: entry.title,
421
+ content: entry.content,
422
+ category: entry.category,
423
+ source: entry.source,
424
+ tags: entry.tags,
425
+ created_at: entry.created_at,
426
+ }),
427
+ });
428
+ return res.ok;
429
+ }
430
+ /** Push all local entries to cloud */
431
+ export async function pushToCloud() {
432
+ const cloud = getCloudConfig();
433
+ if (!cloud)
434
+ return { pushed: 0, pulled: 0, errors: ["SUPABASE_URL or SUPABASE_SERVICE_KEY not set"] };
435
+ const store = loadBrainStore();
436
+ const allLocal = getAllEntries(store);
437
+ let pushed = 0;
438
+ const errors = [];
439
+ // Get existing cloud IDs to avoid duplicates
440
+ const existingRes = await fetch(`${cloud.url}/rest/v1/knowledge?select=id&limit=10000`, {
441
+ headers: { apikey: cloud.key, Authorization: `Bearer ${cloud.key}` },
442
+ });
443
+ const existingData = existingRes.ok ? await existingRes.json() : [];
444
+ const existingIds = new Set(existingData.map((r) => r.id));
445
+ // Push entries that don't exist in cloud
446
+ const toInsert = allLocal.filter(e => !existingIds.has(e.id));
447
+ // Batch insert in chunks of 50
448
+ for (let i = 0; i < toInsert.length; i += 50) {
449
+ const batch = toInsert.slice(i, i + 50);
450
+ const res = await fetch(`${cloud.url}/rest/v1/knowledge`, {
451
+ method: "POST",
452
+ headers: {
453
+ apikey: cloud.key,
454
+ Authorization: `Bearer ${cloud.key}`,
455
+ "Content-Type": "application/json",
456
+ Prefer: "resolution=ignore-duplicates",
457
+ },
458
+ body: JSON.stringify(batch),
459
+ });
460
+ if (res.ok) {
461
+ pushed += batch.length;
462
+ }
463
+ else {
464
+ const err = await res.text();
465
+ errors.push(`Batch insert failed: ${err}`);
466
+ }
467
+ }
468
+ return { pushed, pulled: 0, errors };
469
+ }
470
+ /** Pull cloud entries to local */
471
+ export async function pullFromCloud() {
472
+ const cloud = getCloudConfig();
473
+ if (!cloud)
474
+ return { pushed: 0, pulled: 0, errors: ["SUPABASE_URL or SUPABASE_SERVICE_KEY not set"] };
475
+ const store = loadBrainStore();
476
+ const localIds = new Set(getAllEntries(store).map(e => e.id));
477
+ let pulled = 0;
478
+ const errors = [];
479
+ // Fetch all cloud entries
480
+ const res = await fetch(`${cloud.url}/rest/v1/knowledge?select=*&limit=10000&order=created_at.desc`, {
481
+ headers: {
482
+ apikey: cloud.key,
483
+ Authorization: `Bearer ${cloud.key}`,
484
+ "Content-Type": "application/json",
485
+ },
486
+ });
487
+ if (!res.ok) {
488
+ const err = await res.text();
489
+ return { pushed: 0, pulled: 0, errors: [`Cloud fetch failed: ${err}`] };
490
+ }
491
+ const cloudEntries = await res.json();
492
+ for (const entry of cloudEntries) {
493
+ if (localIds.has(entry.id))
494
+ continue;
495
+ // Determine where to put it — if source matches a project, add to that project
496
+ const projectMatch = entry.source.match(/^project:(.+)$/);
497
+ if (projectMatch) {
498
+ const projName = projectMatch[1];
499
+ if (!store.projects[projName]) {
500
+ store.projects[projName] = {
501
+ ingestedAt: entry.created_at,
502
+ projectPath: "",
503
+ entries: [],
504
+ };
505
+ }
506
+ store.projects[projName].entries.push(entry);
507
+ }
508
+ else {
509
+ store.global.push(entry);
510
+ }
511
+ pulled++;
512
+ }
513
+ if (pulled > 0)
514
+ saveBrainStore(store);
515
+ return { pushed: 0, pulled, errors };
516
+ }
517
+ /** Full sync: push local → cloud, then pull cloud → local */
518
+ export async function syncBrain() {
519
+ const pushResult = await pushToCloud();
520
+ const pullResult = await pullFromCloud();
521
+ return {
522
+ pushed: pushResult.pushed,
523
+ pulled: pullResult.pulled,
524
+ errors: [...pushResult.errors, ...pullResult.errors],
525
+ };
526
+ }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@prajwolkc/stk",
3
- "version": "0.4.1",
3
+ "version": "0.5.1",
4
4
  "description": "One CLI to deploy, monitor, debug, and learn about your entire stack. Infrastructure monitoring, knowledge base brain, deploy watching, and GitHub issues — all from one command.",
5
5
  "type": "module",
6
6
  "license": "MIT",