amalfa 1.0.0 → 1.0.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (85) hide show
  1. package/README.md +226 -247
  2. package/amalfa.config.example.ts +8 -6
  3. package/docs/AGENT-METADATA-PATTERNS.md +1021 -0
  4. package/docs/CONFIG_E2E_VALIDATION.md +147 -0
  5. package/docs/CONFIG_UNIFICATION.md +187 -0
  6. package/docs/CONFIG_VALIDATION.md +103 -0
  7. package/docs/LEGACY_DEPRECATION.md +174 -0
  8. package/docs/MCP_SETUP.md +317 -0
  9. package/docs/QUICK_START_MCP.md +168 -0
  10. package/docs/SESSION-2026-01-06-METADATA-PATTERNS.md +346 -0
  11. package/docs/SETUP.md +464 -0
  12. package/docs/SETUP_COMPLETE.md +464 -0
  13. package/docs/VISION-AGENT-LEARNING.md +1242 -0
  14. package/docs/_current-config-status.md +93 -0
  15. package/package.json +6 -3
  16. package/polyvis.settings.json.bak +38 -0
  17. package/src/cli.ts +159 -31
  18. package/src/config/defaults.ts +73 -15
  19. package/src/core/VectorEngine.ts +18 -9
  20. package/src/daemon/index.ts +12 -8
  21. package/src/mcp/index.ts +62 -7
  22. package/src/pipeline/AmalfaIngestor.ts +22 -12
  23. package/src/pipeline/PreFlightAnalyzer.ts +434 -0
  24. package/src/resonance/DatabaseFactory.ts +3 -4
  25. package/src/resonance/db.ts +8 -6
  26. package/src/resonance/schema.ts +19 -1
  27. package/src/resonance/services/vector-daemon.ts +151 -0
  28. package/src/utils/DaemonManager.ts +147 -0
  29. package/src/utils/ZombieDefense.ts +5 -1
  30. package/:memory: +0 -0
  31. package/:memory:-shm +0 -0
  32. package/:memory:-wal +0 -0
  33. package/README.old.md +0 -112
  34. package/agents.config.json +0 -11
  35. package/drizzle/0000_minor_iron_fist.sql +0 -19
  36. package/drizzle/meta/0000_snapshot.json +0 -139
  37. package/drizzle/meta/_journal.json +0 -13
  38. package/example_usage.ts +0 -39
  39. package/experiment.sh +0 -35
  40. package/hello +0 -2
  41. package/index.html +0 -52
  42. package/knowledge/excalibur.md +0 -12
  43. package/plans/experience-graph-integration.md +0 -60
  44. package/prompts/gemini-king-mode-prompt.md +0 -46
  45. package/public/docs/MCP_TOOLS.md +0 -372
  46. package/schemas/README.md +0 -20
  47. package/schemas/cda.schema.json +0 -84
  48. package/schemas/conceptual-lexicon.schema.json +0 -75
  49. package/scratchpads/dummy-debrief-boxed.md +0 -39
  50. package/scratchpads/dummy-debrief.md +0 -27
  51. package/scratchpads/scratchpad-design.md +0 -50
  52. package/scratchpads/scratchpad-scrolling.md +0 -20
  53. package/scratchpads/scratchpad-toc-disappearance.md +0 -23
  54. package/scratchpads/scratchpad-toc.md +0 -28
  55. package/scratchpads/test_gardener.md +0 -7
  56. package/src/core/LLMClient.ts +0 -93
  57. package/src/core/TagEngine.ts +0 -56
  58. package/src/db/schema.ts +0 -46
  59. package/src/gardeners/AutoTagger.ts +0 -116
  60. package/src/pipeline/HarvesterPipeline.ts +0 -101
  61. package/src/pipeline/Ingestor.ts +0 -555
  62. package/src/resonance/cli/ingest.ts +0 -41
  63. package/src/resonance/cli/migrate.ts +0 -54
  64. package/src/resonance/config.ts +0 -40
  65. package/src/resonance/daemon.ts +0 -236
  66. package/src/resonance/pipeline/extract.ts +0 -89
  67. package/src/resonance/pipeline/transform_docs.ts +0 -60
  68. package/src/resonance/services/tokenizer.ts +0 -159
  69. package/src/resonance/transform/cda.ts +0 -393
  70. package/src/utils/EnvironmentVerifier.ts +0 -67
  71. package/substack/substack-playbook-1.md +0 -95
  72. package/substack/substack-playbook-2.md +0 -78
  73. package/tasks/ui-investigation.md +0 -26
  74. package/test-db +0 -0
  75. package/test-db-shm +0 -0
  76. package/test-db-wal +0 -0
  77. package/tests/canary/verify_pinch_check.ts +0 -44
  78. package/tests/fixtures/ingest_test.md +0 -12
  79. package/tests/fixtures/ingest_test_boxed.md +0 -13
  80. package/tests/fixtures/safety_test.md +0 -45
  81. package/tests/fixtures/safety_test_boxed.md +0 -49
  82. package/tests/fixtures/tagged_output.md +0 -49
  83. package/tests/fixtures/tagged_test.md +0 -49
  84. package/tests/mcp-server-settings.json +0 -8
  85. package/verify-embedder.ts +0 -54
@@ -0,0 +1,151 @@
1
+ #!/usr/bin/env bun
2
+ /**
3
+ * Vector Daemon - HTTP server for fast embedding generation
4
+ * Keeps FastEmbed model loaded in memory for <100ms embedding lookups
5
+ */
6
+
7
+ import { serve } from "bun";
8
+ import { EmbeddingModel, FlagEmbedding } from "fastembed";
9
+ import { toFafcas } from "@src/resonance/db";
10
+ import { getLogger } from "@src/utils/Logger";
11
+ import { ServiceLifecycle } from "@src/utils/ServiceLifecycle";
12
+
13
+ const log = getLogger("VectorDaemon");
14
+ const PORT = Number(process.env.VECTOR_PORT || 3010);
15
+
16
+ // Service lifecycle management
17
+ const lifecycle = new ServiceLifecycle({
18
+ name: "Vector-Daemon",
19
+ pidFile: ".vector-daemon.pid",
20
+ logFile: ".vector-daemon.log",
21
+ entryPoint: "src/resonance/services/vector-daemon.ts",
22
+ });
23
+
24
+ // Keep model loaded in memory
25
+ let embedder: FlagEmbedding | null = null;
26
+ const currentModel = EmbeddingModel.BGESmallENV15;
27
+
28
+ /**
29
+ * Initialize embedding model (called once at startup)
30
+ */
31
+ async function initEmbedder() {
32
+ if (!embedder) {
33
+ log.info({ model: currentModel }, "🔄 Initializing embedding model...");
34
+
35
+ // Ensure cache directory exists
36
+ const cacheDir = ".resonance/cache";
37
+ const { mkdir } = await import("node:fs/promises");
38
+ try {
39
+ await mkdir(cacheDir, { recursive: true });
40
+ } catch (e) {
41
+ // Directory might already exist, that's fine
42
+ }
43
+
44
+ embedder = await FlagEmbedding.init({
45
+ model: currentModel,
46
+ cacheDir,
47
+ showDownloadProgress: true,
48
+ });
49
+ log.info("✅ Embedding model loaded and ready");
50
+ }
51
+ }
52
+
53
+ /**
54
+ * Main server logic
55
+ */
56
+ async function runServer() {
57
+ // Initialize model before accepting requests
58
+ await initEmbedder();
59
+
60
+ // Start HTTP server
61
+ const server = serve({
62
+ port: PORT,
63
+ async fetch(req) {
64
+ const url = new URL(req.url);
65
+
66
+ // Health check endpoint
67
+ if (url.pathname === "/health") {
68
+ return new Response(
69
+ JSON.stringify({
70
+ status: "ok",
71
+ model: currentModel,
72
+ ready: embedder !== null,
73
+ }),
74
+ {
75
+ headers: { "Content-Type": "application/json" },
76
+ },
77
+ );
78
+ }
79
+
80
+ // Embed endpoint
81
+ if (url.pathname === "/embed" && req.method === "POST") {
82
+ try {
83
+ const body = (await req.json()) as { text?: string; model?: string };
84
+ const { text } = body;
85
+
86
+ if (!text) {
87
+ return new Response(
88
+ JSON.stringify({ error: "Missing text parameter" }),
89
+ {
90
+ status: 400,
91
+ headers: { "Content-Type": "application/json" },
92
+ },
93
+ );
94
+ }
95
+
96
+ // Generate embedding
97
+ if (!embedder) {
98
+ throw new Error("Embedder not initialized");
99
+ }
100
+
101
+ const gen = embedder.embed([text]);
102
+ const result = await gen.next();
103
+ const val = result.value?.[0];
104
+
105
+ if (!val || val.length === 0) {
106
+ throw new Error("Embedding generation returned empty result");
107
+ }
108
+
109
+ // Normalize using FAFCAS protocol
110
+ const raw = new Float32Array(val);
111
+ const normalized = toFafcas(raw);
112
+
113
+ // Convert to plain array for JSON serialization
114
+ const vector = Array.from(new Float32Array(normalized.buffer));
115
+
116
+ return new Response(
117
+ JSON.stringify({
118
+ vector,
119
+ dimensions: vector.length,
120
+ }),
121
+ {
122
+ headers: { "Content-Type": "application/json" },
123
+ },
124
+ );
125
+ } catch (e) {
126
+ log.error({ err: e }, "❌ Embedding generation failed");
127
+ return new Response(
128
+ JSON.stringify({
129
+ error: e instanceof Error ? e.message : String(e),
130
+ }),
131
+ {
132
+ status: 500,
133
+ headers: { "Content-Type": "application/json" },
134
+ },
135
+ );
136
+ }
137
+ }
138
+
139
+ // 404 for unknown endpoints
140
+ return new Response("Not Found", { status: 404 });
141
+ },
142
+ });
143
+
144
+ log.info({ port: PORT, model: currentModel }, "🚀 Vector Daemon listening");
145
+
146
+ // Keep server alive
147
+ await new Promise(() => {});
148
+ }
149
+
150
+ // Run with lifecycle management
151
+ await lifecycle.run(process.argv[2] || "serve", runServer);
@@ -0,0 +1,147 @@
1
+ import { existsSync } from "node:fs";
2
+ import { ServiceLifecycle } from "./ServiceLifecycle";
3
+
4
+ export interface DaemonStatus {
5
+ running: boolean;
6
+ pid?: number;
7
+ port?: number;
8
+ }
9
+
10
+ /**
11
+ * DaemonManager - Unified management for AMALFA daemons
12
+ * Handles vector daemon and file watcher daemon
13
+ */
14
+ export class DaemonManager {
15
+ private vectorLifecycle: ServiceLifecycle;
16
+ private watcherLifecycle: ServiceLifecycle;
17
+
18
+ constructor() {
19
+ this.vectorLifecycle = new ServiceLifecycle({
20
+ name: "Vector-Daemon",
21
+ pidFile: ".vector-daemon.pid",
22
+ logFile: ".vector-daemon.log",
23
+ entryPoint: "src/resonance/services/vector-daemon.ts",
24
+ });
25
+
26
+ this.watcherLifecycle = new ServiceLifecycle({
27
+ name: "File-Watcher",
28
+ pidFile: ".amalfa-daemon.pid",
29
+ logFile: ".amalfa-daemon.log",
30
+ entryPoint: "src/daemon/index.ts",
31
+ });
32
+ }
33
+
34
+ /**
35
+ * Check if a process is running
36
+ */
37
+ private async isProcessRunning(pid: number): Promise<boolean> {
38
+ try {
39
+ process.kill(pid, 0);
40
+ return true;
41
+ } catch {
42
+ return false;
43
+ }
44
+ }
45
+
46
+ /**
47
+ * Read PID from file
48
+ */
49
+ private async readPid(pidFile: string): Promise<number | null> {
50
+ if (!existsSync(pidFile)) {
51
+ return null;
52
+ }
53
+ try {
54
+ const content = await Bun.file(pidFile).text();
55
+ const pid = Number.parseInt(content.trim(), 10);
56
+ return Number.isNaN(pid) ? null : pid;
57
+ } catch {
58
+ return null;
59
+ }
60
+ }
61
+
62
+ /**
63
+ * Check if vector daemon is running
64
+ */
65
+ async checkVectorDaemon(): Promise<DaemonStatus> {
66
+ const pid = await this.readPid(".vector-daemon.pid");
67
+ if (!pid) {
68
+ return { running: false };
69
+ }
70
+
71
+ const running = await this.isProcessRunning(pid);
72
+ return {
73
+ running,
74
+ pid: running ? pid : undefined,
75
+ port: running ? 3010 : undefined,
76
+ };
77
+ }
78
+
79
+ /**
80
+ * Start vector daemon
81
+ */
82
+ async startVectorDaemon(): Promise<void> {
83
+ await this.vectorLifecycle.start();
84
+ // Wait a moment for daemon to initialize
85
+ await new Promise((resolve) => setTimeout(resolve, 1000));
86
+ }
87
+
88
+ /**
89
+ * Stop vector daemon
90
+ */
91
+ async stopVectorDaemon(): Promise<void> {
92
+ await this.vectorLifecycle.stop();
93
+ }
94
+
95
+ /**
96
+ * Check if file watcher daemon is running
97
+ */
98
+ async checkFileWatcher(): Promise<DaemonStatus> {
99
+ const pid = await this.readPid(".amalfa-daemon.pid");
100
+ if (!pid) {
101
+ return { running: false };
102
+ }
103
+
104
+ const running = await this.isProcessRunning(pid);
105
+ return {
106
+ running,
107
+ pid: running ? pid : undefined,
108
+ };
109
+ }
110
+
111
+ /**
112
+ * Start file watcher daemon
113
+ */
114
+ async startFileWatcher(): Promise<void> {
115
+ await this.watcherLifecycle.start();
116
+ // Wait a moment for daemon to initialize
117
+ await new Promise((resolve) => setTimeout(resolve, 500));
118
+ }
119
+
120
+ /**
121
+ * Stop file watcher daemon
122
+ */
123
+ async stopFileWatcher(): Promise<void> {
124
+ await this.watcherLifecycle.stop();
125
+ }
126
+
127
+ /**
128
+ * Check status of all daemons
129
+ */
130
+ async checkAll(): Promise<{
131
+ vector: DaemonStatus;
132
+ watcher: DaemonStatus;
133
+ }> {
134
+ const [vector, watcher] = await Promise.all([
135
+ this.checkVectorDaemon(),
136
+ this.checkFileWatcher(),
137
+ ]);
138
+ return { vector, watcher };
139
+ }
140
+
141
+ /**
142
+ * Stop all daemons
143
+ */
144
+ async stopAll(): Promise<void> {
145
+ await Promise.all([this.stopVectorDaemon(), this.stopFileWatcher()]);
146
+ }
147
+ }
@@ -12,8 +12,12 @@ export interface ZombieReport {
12
12
 
13
13
  // Services intended to run as singletons
14
14
  const WHITELIST = [
15
- "src/resonance/daemon.ts",
15
+ // AMALFA core services
16
16
  "src/mcp/index.ts",
17
+ "src/resonance/services/vector-daemon.ts",
18
+ "src/daemon/index.ts",
19
+ // Legacy services
20
+ "src/resonance/daemon.ts",
17
21
  "scripts/cli/dev.ts",
18
22
  "src/resonance/cli/ingest.ts",
19
23
  "bun run build:data",
package/:memory: DELETED
Binary file
package/:memory:-shm DELETED
Binary file
package/:memory:-wal DELETED
File without changes
package/README.old.md DELETED
@@ -1,112 +0,0 @@
1
- # Polyvis: A Neuro-Symbolic Graph Visualizer
2
-
3
- Polyvis is a lightweight, frontend-only web application for exploring and visualizing neuro-symbolic knowledge graphs. It renders conceptual relationships from a pre-built SQLite database, allowing users to navigate a "Neuro-Map" of interconnected ideas, principles, and directives.
4
-
5
- The application is built with HTML, CSS, and [Alpine.js](https://alpinejs.dev/), and uses [Bun](https://bun.sh/) as its JavaScript runtime and toolkit. The graph visualization is powered by [viz.js](https://github.com/mdaines/viz.js) and [Sigma.js](https://www.sigmajs.org/), and the in-browser database is handled by [sql.js](https://sql.js.org/).
6
-
7
- ## Features
8
-
9
- - **Interactive Graph Visualization:** Explore the knowledge graph by searching for terms.
10
- - **Data-Driven Suggestions:** The search box provides a curated list of high-value terms guaranteed to produce rich, interesting graphs.
11
- - **In-Browser Database:** The entire graph dataset is loaded into the browser via sql.js, requiring no active backend server for querying.
12
- - **Alpine.js Reactivity:** Uses [Alpine.js](https://alpinejs.dev/) for a lightweight, reactive UI without a complex build step.
13
- - **Zero-Build Frontend:** Built with vanilla web technologies and Alpine.js for maximum simplicity and performance.
14
- - **Themable UI:** All design tokens (colors, dimensions) are centralized in `src/css/layers/theme.css` ("The Control Panel") for easy customization.
15
- - **Semantic Styling:** No magic numbers. All styles use semantic variables (e.g., `--surface-panel`, `--border-base`) for consistent theming.
16
- - **Efficient Search:** Two-tier search architecture (vector embeddings + grep) - no FTS or chunking needed.
17
-
18
- ## Search Architecture
19
-
20
- Polyvis uses a **two-tier search system** that eliminates the need for full-text search (FTS) or document chunking:
21
-
22
- ### 1. Vector Search (Semantic)
23
- - **Purpose:** Semantic similarity, concept discovery
24
- - **Accuracy:** 85% average best match across diverse queries
25
- - **Speed:** <10ms per query
26
- - **Use case:** "Find documents about CSS patterns" or "Show me graph weaving logic"
27
-
28
- ### 2. Grep/Ripgrep (Literal)
29
- - **Purpose:** Exact phrase matches, symbol lookup
30
- - **Accuracy:** 100% (literal text matching)
31
- - **Speed:** <1ms
32
- - **Use case:** "Find exact phrase 'function fooBar'" or "Where is BentoBoxer imported?"
33
-
34
- ### Why No Chunking?
35
-
36
- **Document corpus characteristics:**
37
- - 80% of documents are <5KB (~1,000 words) - already "chunk-sized"
38
- - Average document: 2.7KB (~550 words)
39
- - Largest document: 47KB (~9,500 words) - still within LLM context windows
40
-
41
- **Results without chunking:**
42
- - Vector search achieves 85% accuracy on whole documents
43
- - Documents are well-structured markdown with clear headers
44
- - Natural granularity matches search needs
45
-
46
- **Future strategy:** If large documents (>20KB) become problematic, split them into multiple markdown files at natural boundaries (H1/H2 headers) and commit to version control. This keeps source files as the source of truth, maintains git-friendly diffs, and requires no runtime infrastructure.
47
-
48
- **See:** `docs/BENTO_BOXING_DEPRECATION.md` for full analysis and decision rationale.
49
-
50
- ## Design System (The Control Center)
51
- The application's visual design is strictly controlled by **`src/css/layers/theme.css`**. This file acts as a configuration panel for:
52
- - **Dimensions:** Sidebar widths, header heights.
53
- - **Colors:** Semantic mappings (e.g., `--surface-1`, `--brand`).
54
- - **Spacing:** Global padding and gaps.
55
-
56
- **Protocol:** Always check and tweak `theme.css` before modifying component styles.
57
-
58
- ## Prerequisites
59
-
60
- - [Bun.js](https://bun.sh/docs/installation) (v1.0 or later) - **MANDATORY**
61
- - A local web server for development (e.g., `bun x http-server`)
62
-
63
- ## Getting Started
64
-
65
- Follow these steps to set up and run the project locally.
66
-
67
- ### 1. Installation
68
-
69
- There are no external dependencies to install for the application itself, as it relies on vanilla JavaScript and CDN-hosted libraries.
70
-
71
- ### 2. Development Workflow
72
-
73
- For detailed instructions on CSS development, database building, and running the app, please refer to the **[Development Workflow Playbook](playbooks/development-workflow-playbook.md)**.
74
-
75
- **Quick Start:**
76
- 1. **Dev Mode:** `bun run dev` (Starts server & CSS watcher)
77
- 2. **Build DB:** `bun run scripts/build_db.ts`
78
-
79
- ## Project Structure
80
-
81
- ### 3. Detailed Documentation
82
- For a deep dive on the codebase organization, please see **[Project Structure](docs/webdocs/project-structure.md)**.
83
-
84
- ## Project Structure (High Level)
85
-
86
- ```
87
- ├── public/ # Web Root (HTML, Static Data)
88
- │ ├── explorer/ # Sigma.js Graph Explorer
89
- │ └── resonance.db # SQLite Database (generated locally)
90
-
91
- ├── src/ # Application Source Code
92
- │ ├── core/ # The Bento Box Kernel (Normalizer, Weaver)
93
- │ ├── config/ # Shared Configuration
94
- │ └── db/ # Database Schemas
95
-
96
- ├── scripts/ # Data Pipeline & Tooling
97
- │ ├── pipeline/ # ETL Scripts (Sync, Load)
98
- │ ├── cli/ # Command Line Tools (Harvest)
99
- │ └── verify/ # Integrity Checks
100
-
101
- ├── docs/ # Project Documentation
102
- ├── playbooks/ # Operational Protocols
103
- ├── polyvis.settings.json # Central Configuration
104
- └── README.md # This file
105
- ```
106
-
107
- ## Contributing
108
- ## Contribution Guidelines
109
- Please review `AGENTS.md` for our operational protocols, specifically:
110
- - **EVP (Empirical Verification Protocol):** Use the browser to verify, don't guess.
111
- - **GEP (Granular Execution Protocol):** One step at a time.
112
- Please feel free to open issues or submit pull requests.
@@ -1,11 +0,0 @@
1
- {
2
- "nanocoder": {
3
- "providers": [
4
- {
5
- "name": "ollama",
6
- "models": ["llama3.1:8b"],
7
- "baseUrl": "http://localhost:11434/v1"
8
- }
9
- ]
10
- }
11
- }
@@ -1,19 +0,0 @@
1
- CREATE TABLE `edges` (
2
- `source` text NOT NULL,
3
- `target` text NOT NULL,
4
- `type` text NOT NULL,
5
- `metadata` text
6
- );
7
- --> statement-breakpoint
8
- CREATE TABLE `nodes` (
9
- `id` text PRIMARY KEY NOT NULL,
10
- `type` text NOT NULL,
11
- `title` text,
12
- `content` text,
13
- `domain` text DEFAULT 'knowledge',
14
- `layer` text DEFAULT 'experience',
15
- `order_index` integer DEFAULT 0,
16
- `metadata` text,
17
- `external_refs` text,
18
- `embedding` blob
19
- );
@@ -1,139 +0,0 @@
1
- {
2
- "version": "6",
3
- "dialect": "sqlite",
4
- "id": "96582325-8a88-4ffb-872a-43699e33324c",
5
- "prevId": "00000000-0000-0000-0000-000000000000",
6
- "tables": {
7
- "edges": {
8
- "name": "edges",
9
- "columns": {
10
- "source": {
11
- "name": "source",
12
- "type": "text",
13
- "primaryKey": false,
14
- "notNull": true,
15
- "autoincrement": false
16
- },
17
- "target": {
18
- "name": "target",
19
- "type": "text",
20
- "primaryKey": false,
21
- "notNull": true,
22
- "autoincrement": false
23
- },
24
- "type": {
25
- "name": "type",
26
- "type": "text",
27
- "primaryKey": false,
28
- "notNull": true,
29
- "autoincrement": false
30
- },
31
- "metadata": {
32
- "name": "metadata",
33
- "type": "text",
34
- "primaryKey": false,
35
- "notNull": false,
36
- "autoincrement": false
37
- }
38
- },
39
- "indexes": {},
40
- "foreignKeys": {},
41
- "compositePrimaryKeys": {},
42
- "uniqueConstraints": {},
43
- "checkConstraints": {}
44
- },
45
- "nodes": {
46
- "name": "nodes",
47
- "columns": {
48
- "id": {
49
- "name": "id",
50
- "type": "text",
51
- "primaryKey": true,
52
- "notNull": true,
53
- "autoincrement": false
54
- },
55
- "type": {
56
- "name": "type",
57
- "type": "text",
58
- "primaryKey": false,
59
- "notNull": true,
60
- "autoincrement": false
61
- },
62
- "title": {
63
- "name": "title",
64
- "type": "text",
65
- "primaryKey": false,
66
- "notNull": false,
67
- "autoincrement": false
68
- },
69
- "content": {
70
- "name": "content",
71
- "type": "text",
72
- "primaryKey": false,
73
- "notNull": false,
74
- "autoincrement": false
75
- },
76
- "domain": {
77
- "name": "domain",
78
- "type": "text",
79
- "primaryKey": false,
80
- "notNull": false,
81
- "autoincrement": false,
82
- "default": "'knowledge'"
83
- },
84
- "layer": {
85
- "name": "layer",
86
- "type": "text",
87
- "primaryKey": false,
88
- "notNull": false,
89
- "autoincrement": false,
90
- "default": "'experience'"
91
- },
92
- "order_index": {
93
- "name": "order_index",
94
- "type": "integer",
95
- "primaryKey": false,
96
- "notNull": false,
97
- "autoincrement": false,
98
- "default": 0
99
- },
100
- "metadata": {
101
- "name": "metadata",
102
- "type": "text",
103
- "primaryKey": false,
104
- "notNull": false,
105
- "autoincrement": false
106
- },
107
- "external_refs": {
108
- "name": "external_refs",
109
- "type": "text",
110
- "primaryKey": false,
111
- "notNull": false,
112
- "autoincrement": false
113
- },
114
- "embedding": {
115
- "name": "embedding",
116
- "type": "blob",
117
- "primaryKey": false,
118
- "notNull": false,
119
- "autoincrement": false
120
- }
121
- },
122
- "indexes": {},
123
- "foreignKeys": {},
124
- "compositePrimaryKeys": {},
125
- "uniqueConstraints": {},
126
- "checkConstraints": {}
127
- }
128
- },
129
- "views": {},
130
- "enums": {},
131
- "_meta": {
132
- "schemas": {},
133
- "tables": {},
134
- "columns": {}
135
- },
136
- "internal": {
137
- "indexes": {}
138
- }
139
- }
@@ -1,13 +0,0 @@
1
- {
2
- "version": "7",
3
- "dialect": "sqlite",
4
- "entries": [
5
- {
6
- "idx": 0,
7
- "version": "6",
8
- "when": 1765219627085,
9
- "tag": "0000_minor_iron_fist",
10
- "breakpoints": true
11
- }
12
- ]
13
- }
package/example_usage.ts DELETED
@@ -1,39 +0,0 @@
1
- // example_usage.ts
2
- import { EnlightenedProvider } from "./src/llm/EnlightenedProvider";
3
-
4
- async function main() {
5
- // 1. Initialize the Accountant
6
- const hume = new EnlightenedProvider({ port: 8083 });
7
-
8
- // 2. Check Pulse
9
- if (!(await hume.isOnline())) {
10
- console.error(
11
- "⚠️ The Enlightenment Server is offline. Run the Golden Command.",
12
- );
13
- return;
14
- }
15
-
16
- console.log("🟢 Enlightenment Engine Online.\n");
17
-
18
- // 3. Scenario A: The Logic Check
19
- console.log("--- TEST A: LOGIC ---");
20
- const logicResponse = await hume.think([
21
- { role: "system", content: "You are a skeptical logician." },
22
- {
23
- role: "user",
24
- content:
25
- "I walked under a ladder and then tripped. Did the ladder cause me to trip?",
26
- },
27
- ]);
28
- console.log(`Result: ${logicResponse}\n`);
29
-
30
- // 4. Scenario B: The "De-Fluff" Tool
31
- console.log("--- TEST B: DE-FLUFFING ---");
32
- const corporateJargon =
33
- "We need to leverage our core competencies to shift the paradigm and boil the ocean.";
34
- const cleanText = await hume.defluff(corporateJargon);
35
- console.log(`Original: "${corporateJargon}"`);
36
- console.log(`Cleaned: "${cleanText}"`);
37
- }
38
-
39
- main();
package/experiment.sh DELETED
@@ -1,35 +0,0 @@
1
- #!/bin/bash
2
-
3
- # 1. Create the Hazard Lab
4
- mkdir -p experiments/enlightenment
5
- cd experiments/enlightenment
6
-
7
- # 2. Update .gitignore (Crucial: Don't commit 10GB of AI)
8
- # We append to the root .gitignore if these rules aren't there
9
- if ! grep -q "experiments/enlightenment/models" ../../.gitignore; then
10
- echo -e "\n# Enlightenment Experiment\nexperiments/enlightenment/models/\nexperiments/enlightenment/venv/\nexperiments/enlightenment/__pycache__/\n*.gguf" >> ../../.gitignore
11
- echo "✅ Added safety rules to .gitignore"
12
- fi
13
-
14
- # 3. Create Python Virtual Environment
15
- echo "🐍 Setting up Python environment..."
16
- python3 -m venv venv
17
- source venv/bin/activate
18
-
19
- # 4. Install Dependencies
20
- # repeng: The control vector library
21
- # torch/transformers: For calculating the vector
22
- echo "⬇️ Installing PyTorch and Transformers (this may take a minute)..."
23
- pip install torch transformers repeng tqdm notebook
24
-
25
- # 5. Clone llama.cpp (for the server)
26
- if [ ! -d "llama.cpp" ]; then
27
- echo "🏗️ Cloning and building llama.cpp..."
28
- git clone https://github.com/ggerganov/llama.cpp
29
- cd llama.cpp
30
- make -j4 # Build the server
31
- cd ..
32
- fi
33
-
34
- echo "✅ Environment Ready in experiments/enlightenment/"
35
- echo " Next Step: Download the Llama-3 Model."
package/hello DELETED
@@ -1,2 +0,0 @@
1
- ls -la ~/.claude-code-router* 2>/dev/null || echo "No config found"
2
-