ahok-skill 1.3.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.prettierrc +8 -0
- package/Dockerfile +59 -0
- package/RAW_SKILL.md +219 -0
- package/README.md +277 -0
- package/SKILL.md +58 -0
- package/bin/opm.js +268 -0
- package/data/openmemory.sqlite +0 -0
- package/data/openmemory.sqlite-shm +0 -0
- package/data/openmemory.sqlite-wal +0 -0
- package/dist/ai/graph.js +293 -0
- package/dist/ai/mcp.js +397 -0
- package/dist/cli.js +78 -0
- package/dist/core/cfg.js +87 -0
- package/dist/core/db.js +636 -0
- package/dist/core/memory.js +116 -0
- package/dist/core/migrate.js +227 -0
- package/dist/core/models.js +105 -0
- package/dist/core/telemetry.js +57 -0
- package/dist/core/types.js +2 -0
- package/dist/core/vector/postgres.js +52 -0
- package/dist/core/vector/valkey.js +246 -0
- package/dist/core/vector_store.js +2 -0
- package/dist/index.js +44 -0
- package/dist/memory/decay.js +301 -0
- package/dist/memory/embed.js +675 -0
- package/dist/memory/hsg.js +959 -0
- package/dist/memory/reflect.js +131 -0
- package/dist/memory/user_summary.js +99 -0
- package/dist/migrate.js +9 -0
- package/dist/ops/compress.js +255 -0
- package/dist/ops/dynamics.js +189 -0
- package/dist/ops/extract.js +333 -0
- package/dist/ops/ingest.js +214 -0
- package/dist/server/index.js +109 -0
- package/dist/server/middleware/auth.js +137 -0
- package/dist/server/routes/auth.js +186 -0
- package/dist/server/routes/compression.js +108 -0
- package/dist/server/routes/dashboard.js +399 -0
- package/dist/server/routes/docs.js +241 -0
- package/dist/server/routes/dynamics.js +312 -0
- package/dist/server/routes/ide.js +280 -0
- package/dist/server/routes/index.js +33 -0
- package/dist/server/routes/keys.js +132 -0
- package/dist/server/routes/langgraph.js +61 -0
- package/dist/server/routes/memory.js +213 -0
- package/dist/server/routes/sources.js +140 -0
- package/dist/server/routes/system.js +63 -0
- package/dist/server/routes/temporal.js +293 -0
- package/dist/server/routes/users.js +101 -0
- package/dist/server/routes/vercel.js +57 -0
- package/dist/server/server.js +211 -0
- package/dist/server.js +3 -0
- package/dist/sources/base.js +223 -0
- package/dist/sources/github.js +171 -0
- package/dist/sources/google_drive.js +166 -0
- package/dist/sources/google_sheets.js +112 -0
- package/dist/sources/google_slides.js +139 -0
- package/dist/sources/index.js +34 -0
- package/dist/sources/notion.js +165 -0
- package/dist/sources/onedrive.js +143 -0
- package/dist/sources/web_crawler.js +166 -0
- package/dist/temporal_graph/index.js +20 -0
- package/dist/temporal_graph/query.js +240 -0
- package/dist/temporal_graph/store.js +116 -0
- package/dist/temporal_graph/timeline.js +241 -0
- package/dist/temporal_graph/types.js +2 -0
- package/dist/utils/chunking.js +60 -0
- package/dist/utils/index.js +31 -0
- package/dist/utils/keyword.js +94 -0
- package/dist/utils/text.js +120 -0
- package/nodemon.json +7 -0
- package/package.json +50 -0
- package/references/api_reference.md +66 -0
- package/references/examples.md +45 -0
- package/src/ai/graph.ts +363 -0
- package/src/ai/mcp.ts +494 -0
- package/src/cli.ts +94 -0
- package/src/core/cfg.ts +110 -0
- package/src/core/db.ts +1052 -0
- package/src/core/memory.ts +99 -0
- package/src/core/migrate.ts +302 -0
- package/src/core/models.ts +107 -0
- package/src/core/telemetry.ts +47 -0
- package/src/core/types.ts +130 -0
- package/src/core/vector/postgres.ts +61 -0
- package/src/core/vector/valkey.ts +261 -0
- package/src/core/vector_store.ts +9 -0
- package/src/index.ts +5 -0
- package/src/memory/decay.ts +427 -0
- package/src/memory/embed.ts +707 -0
- package/src/memory/hsg.ts +1245 -0
- package/src/memory/reflect.ts +158 -0
- package/src/memory/user_summary.ts +110 -0
- package/src/migrate.ts +8 -0
- package/src/ops/compress.ts +296 -0
- package/src/ops/dynamics.ts +272 -0
- package/src/ops/extract.ts +360 -0
- package/src/ops/ingest.ts +286 -0
- package/src/server/index.ts +159 -0
- package/src/server/middleware/auth.ts +156 -0
- package/src/server/routes/auth.ts +223 -0
- package/src/server/routes/compression.ts +106 -0
- package/src/server/routes/dashboard.ts +420 -0
- package/src/server/routes/docs.ts +380 -0
- package/src/server/routes/dynamics.ts +516 -0
- package/src/server/routes/ide.ts +283 -0
- package/src/server/routes/index.ts +32 -0
- package/src/server/routes/keys.ts +131 -0
- package/src/server/routes/langgraph.ts +71 -0
- package/src/server/routes/memory.ts +440 -0
- package/src/server/routes/sources.ts +111 -0
- package/src/server/routes/system.ts +68 -0
- package/src/server/routes/temporal.ts +335 -0
- package/src/server/routes/users.ts +111 -0
- package/src/server/routes/vercel.ts +55 -0
- package/src/server/server.js +215 -0
- package/src/server.ts +1 -0
- package/src/sources/base.ts +257 -0
- package/src/sources/github.ts +156 -0
- package/src/sources/google_drive.ts +144 -0
- package/src/sources/google_sheets.ts +85 -0
- package/src/sources/google_slides.ts +115 -0
- package/src/sources/index.ts +19 -0
- package/src/sources/notion.ts +148 -0
- package/src/sources/onedrive.ts +131 -0
- package/src/sources/web_crawler.ts +161 -0
- package/src/temporal_graph/index.ts +4 -0
- package/src/temporal_graph/query.ts +299 -0
- package/src/temporal_graph/store.ts +156 -0
- package/src/temporal_graph/timeline.ts +319 -0
- package/src/temporal_graph/types.ts +41 -0
- package/src/utils/chunking.ts +66 -0
- package/src/utils/index.ts +25 -0
- package/src/utils/keyword.ts +137 -0
- package/src/utils/text.ts +115 -0
- package/tests/test_api_workspace_management.ts +413 -0
- package/tests/test_bulk_delete.ts +267 -0
- package/tests/test_omnibus.ts +166 -0
- package/tests/test_workspace_management.ts +278 -0
- package/tests/verify.ts +104 -0
- package/tsconfig.json +15 -0
|
@@ -0,0 +1,99 @@
|
|
|
1
|
+
|
|
2
|
+
import { add_hsg_memory, hsg_query } from "../memory/hsg";
|
|
3
|
+
import { q, log_maint_op } from "./db";
|
|
4
|
+
import { env } from "./cfg";
|
|
5
|
+
import { j } from "../utils";
|
|
6
|
+
|
|
7
|
+
export interface MemoryOptions {
|
|
8
|
+
user_id?: string;
|
|
9
|
+
tags?: string[];
|
|
10
|
+
[key: string]: any;
|
|
11
|
+
}
|
|
12
|
+
|
|
13
|
+
export class Memory {
|
|
14
|
+
default_user: string | null;
|
|
15
|
+
|
|
16
|
+
constructor(user_id?: string) {
|
|
17
|
+
this.default_user = user_id || null;
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
async add(content: string, opts?: MemoryOptions) {
|
|
21
|
+
const uid = opts?.user_id || this.default_user;
|
|
22
|
+
const tags = opts?.tags || [];
|
|
23
|
+
const meta = { ...opts };
|
|
24
|
+
delete meta.user_id;
|
|
25
|
+
delete meta.tags;
|
|
26
|
+
|
|
27
|
+
// Ensure tags is JSON string if needed by add_hsg_memory
|
|
28
|
+
// hsg.ts signature: add_hsg_memory(content, tags, meta, user_id)
|
|
29
|
+
// tags is usually stringified JSON or string?
|
|
30
|
+
// Checked hsg.ts: interface hsg_mem { tags?: string }
|
|
31
|
+
// Let's pass JSON string for tags.
|
|
32
|
+
const tags_str = JSON.stringify(tags);
|
|
33
|
+
|
|
34
|
+
// hsg.ts add_hsg_memory returns { id, ... } or similar?
|
|
35
|
+
// Let's check hsg.ts exports. It's likely async and returns object.
|
|
36
|
+
const res = await add_hsg_memory(content, tags_str, meta, uid ?? undefined);
|
|
37
|
+
return res;
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
async get(id: string) {
|
|
41
|
+
return await q.get_mem.get(id);
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
async search(query: string, opts?: { user_id?: string, limit?: number, sectors?: string[] }) {
|
|
45
|
+
// hsg_query(qt, k, f)
|
|
46
|
+
const k = opts?.limit || 10;
|
|
47
|
+
const uid = opts?.user_id || this.default_user;
|
|
48
|
+
const f: any = {};
|
|
49
|
+
if (uid) f.user_id = uid;
|
|
50
|
+
if (opts?.sectors) f.sectors = opts.sectors;
|
|
51
|
+
|
|
52
|
+
return await hsg_query(query, k, f);
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
async delete_all(user_id?: string) {
|
|
56
|
+
const uid = user_id || this.default_user;
|
|
57
|
+
if (uid) {
|
|
58
|
+
// q.del_mem usually exists or we execute raw SQL
|
|
59
|
+
// But we can't easily access q here if not exported or if we want to change memory.ts minimal
|
|
60
|
+
// I'll add a wipe() method that calls q directly if q is imported
|
|
61
|
+
}
|
|
62
|
+
}
|
|
63
|
+
|
|
64
|
+
async wipe() {
|
|
65
|
+
console.log("[Memory] Wiping DB...");
|
|
66
|
+
// q is imported from db.ts
|
|
67
|
+
await q.clear_all.run();
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
/**
|
|
71
|
+
* get a pre-configured source connector.
|
|
72
|
+
*
|
|
73
|
+
* usage:
|
|
74
|
+
* const github = mem.source("github")
|
|
75
|
+
* await github.connect({ token: "ghp_..." })
|
|
76
|
+
* await github.ingest_all({ repo: "owner/repo" })
|
|
77
|
+
*
|
|
78
|
+
* available sources: github, notion, google_drive, google_sheets,
|
|
79
|
+
* google_slides, onedrive, web_crawler
|
|
80
|
+
*/
|
|
81
|
+
source(name: string) {
|
|
82
|
+
// dynamic import to avoid circular deps
|
|
83
|
+
const sources: Record<string, any> = {
|
|
84
|
+
github: () => import("../sources/github").then(m => new m.github_source(this.default_user ?? undefined)),
|
|
85
|
+
notion: () => import("../sources/notion").then(m => new m.notion_source(this.default_user ?? undefined)),
|
|
86
|
+
google_drive: () => import("../sources/google_drive").then(m => new m.google_drive_source(this.default_user ?? undefined)),
|
|
87
|
+
google_sheets: () => import("../sources/google_sheets").then(m => new m.google_sheets_source(this.default_user ?? undefined)),
|
|
88
|
+
google_slides: () => import("../sources/google_slides").then(m => new m.google_slides_source(this.default_user ?? undefined)),
|
|
89
|
+
onedrive: () => import("../sources/onedrive").then(m => new m.onedrive_source(this.default_user ?? undefined)),
|
|
90
|
+
web_crawler: () => import("../sources/web_crawler").then(m => new m.web_crawler_source(this.default_user ?? undefined)),
|
|
91
|
+
};
|
|
92
|
+
|
|
93
|
+
if (!(name in sources)) {
|
|
94
|
+
throw new Error(`unknown source: ${name}. available: ${Object.keys(sources).join(", ")}`);
|
|
95
|
+
}
|
|
96
|
+
|
|
97
|
+
return sources[name]();
|
|
98
|
+
}
|
|
99
|
+
}
|
|
@@ -0,0 +1,302 @@
|
|
|
1
|
+
import { env } from "./cfg";
|
|
2
|
+
import sqlite3 from "sqlite3";
|
|
3
|
+
import { Pool } from "pg";
|
|
4
|
+
|
|
5
|
+
const is_pg = env.metadata_backend === "postgres";
|
|
6
|
+
|
|
7
|
+
const log = (msg: string) => console.log(`[MIGRATE] ${msg}`);
|
|
8
|
+
|
|
9
|
+
interface Migration {
|
|
10
|
+
version: string;
|
|
11
|
+
desc: string;
|
|
12
|
+
sqlite: string[];
|
|
13
|
+
postgres: string[];
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
const migrations: Migration[] = [
|
|
17
|
+
{
|
|
18
|
+
version: "1.2.0",
|
|
19
|
+
desc: "Multi-user tenant support",
|
|
20
|
+
sqlite: [
|
|
21
|
+
`ALTER TABLE memories ADD COLUMN user_id TEXT`,
|
|
22
|
+
`CREATE INDEX IF NOT EXISTS idx_memories_user ON memories(user_id)`,
|
|
23
|
+
`ALTER TABLE vectors ADD COLUMN user_id TEXT`,
|
|
24
|
+
`CREATE INDEX IF NOT EXISTS idx_vectors_user ON vectors(user_id)`,
|
|
25
|
+
`CREATE TABLE IF NOT EXISTS waypoints_new (
|
|
26
|
+
src_id TEXT, dst_id TEXT NOT NULL, user_id TEXT,
|
|
27
|
+
weight REAL NOT NULL, created_at INTEGER, updated_at INTEGER,
|
|
28
|
+
PRIMARY KEY(src_id, user_id)
|
|
29
|
+
)`,
|
|
30
|
+
`INSERT INTO waypoints_new SELECT src_id, dst_id, NULL, weight, created_at, updated_at FROM waypoints`,
|
|
31
|
+
`DROP TABLE waypoints`,
|
|
32
|
+
`ALTER TABLE waypoints_new RENAME TO waypoints`,
|
|
33
|
+
`CREATE INDEX IF NOT EXISTS idx_waypoints_src ON waypoints(src_id)`,
|
|
34
|
+
`CREATE INDEX IF NOT EXISTS idx_waypoints_dst ON waypoints(dst_id)`,
|
|
35
|
+
`CREATE INDEX IF NOT EXISTS idx_waypoints_user ON waypoints(user_id)`,
|
|
36
|
+
`CREATE TABLE IF NOT EXISTS users (
|
|
37
|
+
user_id TEXT PRIMARY KEY, summary TEXT,
|
|
38
|
+
reflection_count INTEGER DEFAULT 0,
|
|
39
|
+
created_at INTEGER, updated_at INTEGER
|
|
40
|
+
)`,
|
|
41
|
+
`CREATE TABLE IF NOT EXISTS stats (
|
|
42
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
43
|
+
type TEXT NOT NULL, count INTEGER DEFAULT 1, ts INTEGER NOT NULL
|
|
44
|
+
)`,
|
|
45
|
+
`CREATE INDEX IF NOT EXISTS idx_stats_ts ON stats(ts)`,
|
|
46
|
+
`CREATE INDEX IF NOT EXISTS idx_stats_type ON stats(type)`,
|
|
47
|
+
],
|
|
48
|
+
postgres: [
|
|
49
|
+
`ALTER TABLE {m} ADD COLUMN IF NOT EXISTS user_id TEXT`,
|
|
50
|
+
`CREATE INDEX IF NOT EXISTS openmemory_memories_user_idx ON {m}(user_id)`,
|
|
51
|
+
`ALTER TABLE {v} ADD COLUMN IF NOT EXISTS user_id TEXT`,
|
|
52
|
+
`CREATE INDEX IF NOT EXISTS openmemory_vectors_user_idx ON {v}(user_id)`,
|
|
53
|
+
`ALTER TABLE {w} ADD COLUMN IF NOT EXISTS user_id TEXT`,
|
|
54
|
+
`ALTER TABLE {w} DROP CONSTRAINT IF EXISTS waypoints_pkey`,
|
|
55
|
+
`ALTER TABLE {w} ADD PRIMARY KEY (src_id, user_id)`,
|
|
56
|
+
`CREATE INDEX IF NOT EXISTS openmemory_waypoints_user_idx ON {w}(user_id)`,
|
|
57
|
+
`CREATE TABLE IF NOT EXISTS {u} (
|
|
58
|
+
user_id TEXT PRIMARY KEY, summary TEXT,
|
|
59
|
+
reflection_count INTEGER DEFAULT 0,
|
|
60
|
+
created_at BIGINT, updated_at BIGINT
|
|
61
|
+
)`,
|
|
62
|
+
],
|
|
63
|
+
},
|
|
64
|
+
];
|
|
65
|
+
|
|
66
|
+
async function get_db_version_sqlite(
|
|
67
|
+
db: sqlite3.Database,
|
|
68
|
+
): Promise<string | null> {
|
|
69
|
+
return new Promise((ok, no) => {
|
|
70
|
+
db.get(
|
|
71
|
+
`SELECT name FROM sqlite_master WHERE type='table' AND name='schema_version'`,
|
|
72
|
+
(err, row: any) => {
|
|
73
|
+
if (err) return no(err);
|
|
74
|
+
if (!row) return ok(null);
|
|
75
|
+
db.get(
|
|
76
|
+
`SELECT version FROM schema_version ORDER BY applied_at DESC LIMIT 1`,
|
|
77
|
+
(e, v: any) => {
|
|
78
|
+
if (e) return no(e);
|
|
79
|
+
ok(v?.version || null);
|
|
80
|
+
},
|
|
81
|
+
);
|
|
82
|
+
},
|
|
83
|
+
);
|
|
84
|
+
});
|
|
85
|
+
}
|
|
86
|
+
|
|
87
|
+
async function set_db_version_sqlite(
|
|
88
|
+
db: sqlite3.Database,
|
|
89
|
+
version: string,
|
|
90
|
+
): Promise<void> {
|
|
91
|
+
return new Promise((ok, no) => {
|
|
92
|
+
db.run(
|
|
93
|
+
`CREATE TABLE IF NOT EXISTS schema_version (
|
|
94
|
+
version TEXT PRIMARY KEY, applied_at INTEGER
|
|
95
|
+
)`,
|
|
96
|
+
(err) => {
|
|
97
|
+
if (err) return no(err);
|
|
98
|
+
db.run(
|
|
99
|
+
`INSERT OR REPLACE INTO schema_version VALUES (?, ?)`,
|
|
100
|
+
[version, Date.now()],
|
|
101
|
+
(e) => {
|
|
102
|
+
if (e) return no(e);
|
|
103
|
+
ok();
|
|
104
|
+
},
|
|
105
|
+
);
|
|
106
|
+
},
|
|
107
|
+
);
|
|
108
|
+
});
|
|
109
|
+
}
|
|
110
|
+
|
|
111
|
+
async function check_column_exists_sqlite(
|
|
112
|
+
db: sqlite3.Database,
|
|
113
|
+
table: string,
|
|
114
|
+
column: string,
|
|
115
|
+
): Promise<boolean> {
|
|
116
|
+
return new Promise((ok, no) => {
|
|
117
|
+
db.all(`PRAGMA table_info(${table})`, (err, rows: any[]) => {
|
|
118
|
+
if (err) return no(err);
|
|
119
|
+
ok(rows.some((r) => r.name === column));
|
|
120
|
+
});
|
|
121
|
+
});
|
|
122
|
+
}
|
|
123
|
+
|
|
124
|
+
async function run_sqlite_migration(
|
|
125
|
+
db: sqlite3.Database,
|
|
126
|
+
m: Migration,
|
|
127
|
+
): Promise<void> {
|
|
128
|
+
log(`Running migration: ${m.version} - ${m.desc}`);
|
|
129
|
+
|
|
130
|
+
const has_user_id = await check_column_exists_sqlite(
|
|
131
|
+
db,
|
|
132
|
+
"memories",
|
|
133
|
+
"user_id",
|
|
134
|
+
);
|
|
135
|
+
if (has_user_id) {
|
|
136
|
+
log(
|
|
137
|
+
`Migration ${m.version} already applied (user_id exists), skipping`,
|
|
138
|
+
);
|
|
139
|
+
await set_db_version_sqlite(db, m.version);
|
|
140
|
+
return;
|
|
141
|
+
}
|
|
142
|
+
|
|
143
|
+
for (const sql of m.sqlite) {
|
|
144
|
+
await new Promise<void>((ok, no) => {
|
|
145
|
+
db.run(sql, (err) => {
|
|
146
|
+
if (err && !err.message.includes("duplicate column")) {
|
|
147
|
+
log(`ERROR: ${err.message}`);
|
|
148
|
+
return no(err);
|
|
149
|
+
}
|
|
150
|
+
ok();
|
|
151
|
+
});
|
|
152
|
+
});
|
|
153
|
+
}
|
|
154
|
+
|
|
155
|
+
await set_db_version_sqlite(db, m.version);
|
|
156
|
+
log(`Migration ${m.version} completed successfully`);
|
|
157
|
+
}
|
|
158
|
+
|
|
159
|
+
async function get_db_version_pg(pool: Pool): Promise<string | null> {
|
|
160
|
+
try {
|
|
161
|
+
const sc = process.env.OM_PG_SCHEMA || "public";
|
|
162
|
+
const check = await pool.query(
|
|
163
|
+
`SELECT EXISTS (
|
|
164
|
+
SELECT FROM information_schema.tables
|
|
165
|
+
WHERE table_schema = $1 AND table_name = 'schema_version'
|
|
166
|
+
)`,
|
|
167
|
+
[sc],
|
|
168
|
+
);
|
|
169
|
+
if (!check.rows[0].exists) return null;
|
|
170
|
+
|
|
171
|
+
const ver = await pool.query(
|
|
172
|
+
`SELECT version FROM "${sc}"."schema_version" ORDER BY applied_at DESC LIMIT 1`,
|
|
173
|
+
);
|
|
174
|
+
return ver.rows[0]?.version || null;
|
|
175
|
+
} catch (e) {
|
|
176
|
+
return null;
|
|
177
|
+
}
|
|
178
|
+
}
|
|
179
|
+
|
|
180
|
+
async function set_db_version_pg(pool: Pool, version: string): Promise<void> {
|
|
181
|
+
const sc = process.env.OM_PG_SCHEMA || "public";
|
|
182
|
+
await pool.query(
|
|
183
|
+
`CREATE TABLE IF NOT EXISTS "${sc}"."schema_version" (
|
|
184
|
+
version TEXT PRIMARY KEY, applied_at BIGINT
|
|
185
|
+
)`,
|
|
186
|
+
);
|
|
187
|
+
await pool.query(
|
|
188
|
+
`INSERT INTO "${sc}"."schema_version" VALUES ($1, $2)
|
|
189
|
+
ON CONFLICT (version) DO UPDATE SET applied_at = EXCLUDED.applied_at`,
|
|
190
|
+
[version, Date.now()],
|
|
191
|
+
);
|
|
192
|
+
}
|
|
193
|
+
|
|
194
|
+
async function check_column_exists_pg(
|
|
195
|
+
pool: Pool,
|
|
196
|
+
table: string,
|
|
197
|
+
column: string,
|
|
198
|
+
): Promise<boolean> {
|
|
199
|
+
const sc = process.env.OM_PG_SCHEMA || "public";
|
|
200
|
+
const tbl = table.replace(/"/g, "").split(".").pop() || table;
|
|
201
|
+
const res = await pool.query(
|
|
202
|
+
`SELECT EXISTS (
|
|
203
|
+
SELECT FROM information_schema.columns
|
|
204
|
+
WHERE table_schema = $1 AND table_name = $2 AND column_name = $3
|
|
205
|
+
)`,
|
|
206
|
+
[sc, tbl, column],
|
|
207
|
+
);
|
|
208
|
+
return res.rows[0].exists;
|
|
209
|
+
}
|
|
210
|
+
|
|
211
|
+
async function run_pg_migration(pool: Pool, m: Migration): Promise<void> {
|
|
212
|
+
log(`Running migration: ${m.version} - ${m.desc}`);
|
|
213
|
+
|
|
214
|
+
const sc = process.env.OM_PG_SCHEMA || "public";
|
|
215
|
+
const mt = process.env.OM_PG_TABLE || "openmemory_memories";
|
|
216
|
+
const has_user_id = await check_column_exists_pg(pool, mt, "user_id");
|
|
217
|
+
|
|
218
|
+
if (has_user_id) {
|
|
219
|
+
log(
|
|
220
|
+
`Migration ${m.version} already applied (user_id exists), skipping`,
|
|
221
|
+
);
|
|
222
|
+
await set_db_version_pg(pool, m.version);
|
|
223
|
+
return;
|
|
224
|
+
}
|
|
225
|
+
|
|
226
|
+
const replacements: Record<string, string> = {
|
|
227
|
+
"{m}": `"${sc}"."${mt}"`,
|
|
228
|
+
"{v}": `"${sc}"."${process.env.OM_VECTOR_TABLE || "openmemory_vectors"}"`,
|
|
229
|
+
"{w}": `"${sc}"."openmemory_waypoints"`,
|
|
230
|
+
"{u}": `"${sc}"."openmemory_users"`,
|
|
231
|
+
};
|
|
232
|
+
|
|
233
|
+
for (let sql of m.postgres) {
|
|
234
|
+
for (const [k, v] of Object.entries(replacements)) {
|
|
235
|
+
sql = sql.replace(new RegExp(k, "g"), v);
|
|
236
|
+
}
|
|
237
|
+
|
|
238
|
+
try {
|
|
239
|
+
await pool.query(sql);
|
|
240
|
+
} catch (e: any) {
|
|
241
|
+
if (
|
|
242
|
+
!e.message.includes("already exists") &&
|
|
243
|
+
!e.message.includes("duplicate")
|
|
244
|
+
) {
|
|
245
|
+
log(`ERROR: ${e.message}`);
|
|
246
|
+
throw e;
|
|
247
|
+
}
|
|
248
|
+
}
|
|
249
|
+
}
|
|
250
|
+
|
|
251
|
+
await set_db_version_pg(pool, m.version);
|
|
252
|
+
log(`Migration ${m.version} completed successfully`);
|
|
253
|
+
}
|
|
254
|
+
|
|
255
|
+
export async function run_migrations() {
|
|
256
|
+
log("Checking for pending migrations...");
|
|
257
|
+
|
|
258
|
+
if (is_pg) {
|
|
259
|
+
const ssl =
|
|
260
|
+
process.env.OM_PG_SSL === "require"
|
|
261
|
+
? { rejectUnauthorized: false }
|
|
262
|
+
: process.env.OM_PG_SSL === "disable"
|
|
263
|
+
? false
|
|
264
|
+
: undefined;
|
|
265
|
+
|
|
266
|
+
const pool = new Pool({
|
|
267
|
+
host: process.env.OM_PG_HOST,
|
|
268
|
+
port: process.env.OM_PG_PORT ? +process.env.OM_PG_PORT : undefined,
|
|
269
|
+
database: process.env.OM_PG_DB || "openmemory",
|
|
270
|
+
user: process.env.OM_PG_USER,
|
|
271
|
+
password: process.env.OM_PG_PASSWORD,
|
|
272
|
+
ssl,
|
|
273
|
+
});
|
|
274
|
+
|
|
275
|
+
const current = await get_db_version_pg(pool);
|
|
276
|
+
log(`Current database version: ${current || "none"}`);
|
|
277
|
+
|
|
278
|
+
for (const m of migrations) {
|
|
279
|
+
if (!current || m.version > current) {
|
|
280
|
+
await run_pg_migration(pool, m);
|
|
281
|
+
}
|
|
282
|
+
}
|
|
283
|
+
|
|
284
|
+
await pool.end();
|
|
285
|
+
} else {
|
|
286
|
+
const db_path = process.env.OM_DB_PATH || "./data/openmemory.sqlite";
|
|
287
|
+
const db = new sqlite3.Database(db_path);
|
|
288
|
+
|
|
289
|
+
const current = await get_db_version_sqlite(db);
|
|
290
|
+
log(`Current database version: ${current || "none"}`);
|
|
291
|
+
|
|
292
|
+
for (const m of migrations) {
|
|
293
|
+
if (!current || m.version > current) {
|
|
294
|
+
await run_sqlite_migration(db, m);
|
|
295
|
+
}
|
|
296
|
+
}
|
|
297
|
+
|
|
298
|
+
await new Promise<void>((ok) => db.close(() => ok()));
|
|
299
|
+
}
|
|
300
|
+
|
|
301
|
+
log("All migrations completed");
|
|
302
|
+
}
|
|
@@ -0,0 +1,107 @@
|
|
|
1
|
+
import { readFileSync, existsSync } from "fs";
|
|
2
|
+
import { join } from "path";
|
|
3
|
+
interface model_cfg {
|
|
4
|
+
[sector: string]: Record<string, string>;
|
|
5
|
+
}
|
|
6
|
+
let cfg: model_cfg | null = null;
|
|
7
|
+
|
|
8
|
+
export const load_models = (): model_cfg => {
|
|
9
|
+
if (cfg) return cfg;
|
|
10
|
+
const p = join(__dirname, "../../../models.yml");
|
|
11
|
+
if (!existsSync(p)) {
|
|
12
|
+
console.error("[MODELS] models.yml not found, using defaults");
|
|
13
|
+
return get_defaults();
|
|
14
|
+
}
|
|
15
|
+
try {
|
|
16
|
+
const yml = readFileSync(p, "utf-8");
|
|
17
|
+
cfg = parse_yaml(yml);
|
|
18
|
+
console.error(
|
|
19
|
+
`[MODELS] Loaded models.yml (${Object.keys(cfg).length} sectors)`,
|
|
20
|
+
);
|
|
21
|
+
return cfg;
|
|
22
|
+
} catch (e) {
|
|
23
|
+
console.error("[MODELS] Failed to parse models.yml:", e);
|
|
24
|
+
return get_defaults();
|
|
25
|
+
}
|
|
26
|
+
};
|
|
27
|
+
|
|
28
|
+
const parse_yaml = (yml: string): model_cfg => {
|
|
29
|
+
const lines = yml.split("\n");
|
|
30
|
+
const obj: model_cfg = {};
|
|
31
|
+
let cur_sec: string | null = null;
|
|
32
|
+
for (const line of lines) {
|
|
33
|
+
const trim = line.trim();
|
|
34
|
+
if (!trim || trim.startsWith("#")) continue;
|
|
35
|
+
const indent = line.search(/\S/);
|
|
36
|
+
const [key, ...val_parts] = trim.split(":");
|
|
37
|
+
const val = val_parts.join(":").trim();
|
|
38
|
+
if (indent === 0 && val) {
|
|
39
|
+
continue;
|
|
40
|
+
} else if (indent === 0) {
|
|
41
|
+
cur_sec = key;
|
|
42
|
+
obj[cur_sec] = {};
|
|
43
|
+
} else if (cur_sec && val) {
|
|
44
|
+
obj[cur_sec][key] = val;
|
|
45
|
+
}
|
|
46
|
+
}
|
|
47
|
+
return obj;
|
|
48
|
+
};
|
|
49
|
+
|
|
50
|
+
const get_defaults = (): model_cfg => ({
|
|
51
|
+
episodic: {
|
|
52
|
+
ollama: "nomic-embed-text",
|
|
53
|
+
openai: "text-embedding-3-small",
|
|
54
|
+
gemini: "models/embedding-001",
|
|
55
|
+
aws: "amazon.titan-embed-text-v2:0",
|
|
56
|
+
local: "all-MiniLM-L6-v2",
|
|
57
|
+
},
|
|
58
|
+
semantic: {
|
|
59
|
+
ollama: "nomic-embed-text",
|
|
60
|
+
openai: "text-embedding-3-small",
|
|
61
|
+
gemini: "models/embedding-001",
|
|
62
|
+
aws: "amazon.titan-embed-text-v2:0",
|
|
63
|
+
local: "all-MiniLM-L6-v2",
|
|
64
|
+
},
|
|
65
|
+
procedural: {
|
|
66
|
+
ollama: "nomic-embed-text",
|
|
67
|
+
openai: "text-embedding-3-small",
|
|
68
|
+
gemini: "models/embedding-001",
|
|
69
|
+
aws: "amazon.titan-embed-text-v2:0",
|
|
70
|
+
local: "all-MiniLM-L6-v2",
|
|
71
|
+
},
|
|
72
|
+
emotional: {
|
|
73
|
+
ollama: "nomic-embed-text",
|
|
74
|
+
openai: "text-embedding-3-small",
|
|
75
|
+
gemini: "models/embedding-001",
|
|
76
|
+
aws: "amazon.titan-embed-text-v2:0",
|
|
77
|
+
local: "all-MiniLM-L6-v2",
|
|
78
|
+
},
|
|
79
|
+
reflective: {
|
|
80
|
+
ollama: "nomic-embed-text",
|
|
81
|
+
openai: "text-embedding-3-large",
|
|
82
|
+
gemini: "models/embedding-001",
|
|
83
|
+
aws: "amazon.titan-embed-text-v2:0",
|
|
84
|
+
local: "all-mpnet-base-v2",
|
|
85
|
+
},
|
|
86
|
+
});
|
|
87
|
+
|
|
88
|
+
export const get_model = (sector: string, provider: string): string => {
|
|
89
|
+
// Environment variable overrides
|
|
90
|
+
if (provider === "ollama" && process.env.OM_OLLAMA_MODEL) {
|
|
91
|
+
return process.env.OM_OLLAMA_MODEL;
|
|
92
|
+
}
|
|
93
|
+
if (provider === "openai" && process.env.OM_OPENAI_MODEL) {
|
|
94
|
+
return process.env.OM_OPENAI_MODEL;
|
|
95
|
+
}
|
|
96
|
+
|
|
97
|
+
const cfg = load_models();
|
|
98
|
+
return (
|
|
99
|
+
cfg[sector]?.[provider] ||
|
|
100
|
+
cfg.semantic?.[provider] ||
|
|
101
|
+
"nomic-embed-text"
|
|
102
|
+
);
|
|
103
|
+
};
|
|
104
|
+
|
|
105
|
+
export const get_provider_config = (provider: string): any => {
|
|
106
|
+
return {};
|
|
107
|
+
};
|
|
@@ -0,0 +1,47 @@
|
|
|
1
|
+
import os from 'node:os'
|
|
2
|
+
import { env } from './cfg'
|
|
3
|
+
|
|
4
|
+
const DISABLED = (process.env.OM_TELEMETRY ?? '').toLowerCase() === 'false'
|
|
5
|
+
const gatherVersion = (): string => {
|
|
6
|
+
if (process.env.npm_package_version) return process.env.npm_package_version
|
|
7
|
+
try {
|
|
8
|
+
// eslint-disable-next-line @typescript-eslint/no-var-requires
|
|
9
|
+
const pkg = require('../../package.json')
|
|
10
|
+
if (pkg?.version) return pkg.version
|
|
11
|
+
} catch {
|
|
12
|
+
// ignore
|
|
13
|
+
}
|
|
14
|
+
return 'unknown'
|
|
15
|
+
}
|
|
16
|
+
|
|
17
|
+
export const sendTelemetry = async () => {
|
|
18
|
+
if (DISABLED) return
|
|
19
|
+
try {
|
|
20
|
+
const ramMb = Math.round(os.totalmem() / (1024 * 1024))
|
|
21
|
+
const storageMb = ramMb * 4
|
|
22
|
+
const payload = {
|
|
23
|
+
name: os.hostname(),
|
|
24
|
+
os: os.platform(),
|
|
25
|
+
embeddings: env.emb_kind || 'synthetic',
|
|
26
|
+
metadata: env.metadata_backend || 'sqlite',
|
|
27
|
+
version: gatherVersion(),
|
|
28
|
+
ram: ramMb,
|
|
29
|
+
storage: storageMb,
|
|
30
|
+
cpu: os.cpus()?.[0]?.model || 'unknown',
|
|
31
|
+
}
|
|
32
|
+
const res = await fetch('https://telemetry.spotit.dev', {
|
|
33
|
+
method: 'POST',
|
|
34
|
+
headers: { 'content-type': 'application/json' },
|
|
35
|
+
body: JSON.stringify(payload),
|
|
36
|
+
keepalive: true,
|
|
37
|
+
})
|
|
38
|
+
if (!res.ok) {
|
|
39
|
+
console.warn(``)
|
|
40
|
+
} else {
|
|
41
|
+
console.log(`[telemetry] sent`)
|
|
42
|
+
}
|
|
43
|
+
} catch {
|
|
44
|
+
// silently ignore telemetry errors
|
|
45
|
+
}
|
|
46
|
+
}
|
|
47
|
+
|
|
@@ -0,0 +1,130 @@
|
|
|
1
|
+
export type add_req = {
|
|
2
|
+
content: string;
|
|
3
|
+
tags?: string[];
|
|
4
|
+
metadata?: Record<string, unknown>;
|
|
5
|
+
salience?: number;
|
|
6
|
+
decay_lambda?: number;
|
|
7
|
+
user_id?: string;
|
|
8
|
+
};
|
|
9
|
+
export type q_req = {
|
|
10
|
+
query: string;
|
|
11
|
+
k?: number;
|
|
12
|
+
filters?: {
|
|
13
|
+
tags?: string[];
|
|
14
|
+
min_score?: number;
|
|
15
|
+
sector?: string;
|
|
16
|
+
user_id?: string;
|
|
17
|
+
startTime?: number;
|
|
18
|
+
endTime?: number;
|
|
19
|
+
};
|
|
20
|
+
user_id?: string;
|
|
21
|
+
};
|
|
22
|
+
export type sector_type =
|
|
23
|
+
| "episodic"
|
|
24
|
+
| "semantic"
|
|
25
|
+
| "procedural"
|
|
26
|
+
| "emotional"
|
|
27
|
+
| "reflective";
|
|
28
|
+
|
|
29
|
+
export type mem_row = {
|
|
30
|
+
id: string;
|
|
31
|
+
content: string;
|
|
32
|
+
primary_sector: string;
|
|
33
|
+
tags: string | null;
|
|
34
|
+
meta: string | null;
|
|
35
|
+
user_id: string | null;
|
|
36
|
+
created_at: number;
|
|
37
|
+
updated_at: number;
|
|
38
|
+
last_seen_at: number;
|
|
39
|
+
salience: number;
|
|
40
|
+
decay_lambda: number;
|
|
41
|
+
version: number;
|
|
42
|
+
};
|
|
43
|
+
|
|
44
|
+
export type rpc_err_code = -32600 | -32603;
|
|
45
|
+
|
|
46
|
+
export type ingest_req = {
|
|
47
|
+
source: "file" | "link" | "connector";
|
|
48
|
+
content_type: "pdf" | "docx" | "html" | "md" | "txt" | "audio";
|
|
49
|
+
data: string;
|
|
50
|
+
metadata?: Record<string, unknown>;
|
|
51
|
+
config?: { force_root?: boolean; sec_sz?: number; lg_thresh?: number };
|
|
52
|
+
user_id?: string;
|
|
53
|
+
};
|
|
54
|
+
|
|
55
|
+
export type ingest_url_req = {
|
|
56
|
+
url: string;
|
|
57
|
+
metadata?: Record<string, unknown>;
|
|
58
|
+
config?: { force_root?: boolean; sec_sz?: number; lg_thresh?: number };
|
|
59
|
+
user_id?: string;
|
|
60
|
+
};
|
|
61
|
+
|
|
62
|
+
export type lgm_store_req = {
|
|
63
|
+
node: string;
|
|
64
|
+
content: string;
|
|
65
|
+
tags?: string[];
|
|
66
|
+
metadata?: Record<string, unknown>;
|
|
67
|
+
namespace?: string;
|
|
68
|
+
graph_id?: string;
|
|
69
|
+
reflective?: boolean;
|
|
70
|
+
user_id?: string;
|
|
71
|
+
};
|
|
72
|
+
|
|
73
|
+
export type lgm_retrieve_req = {
|
|
74
|
+
node: string;
|
|
75
|
+
query?: string;
|
|
76
|
+
namespace?: string;
|
|
77
|
+
graph_id?: string;
|
|
78
|
+
limit?: number;
|
|
79
|
+
include_metadata?: boolean;
|
|
80
|
+
};
|
|
81
|
+
|
|
82
|
+
export type lgm_context_req = {
|
|
83
|
+
graph_id?: string;
|
|
84
|
+
namespace?: string;
|
|
85
|
+
limit?: number;
|
|
86
|
+
};
|
|
87
|
+
|
|
88
|
+
export type lgm_reflection_req = {
|
|
89
|
+
node?: string;
|
|
90
|
+
graph_id?: string;
|
|
91
|
+
namespace?: string;
|
|
92
|
+
content?: string;
|
|
93
|
+
context_ids?: string[];
|
|
94
|
+
};
|
|
95
|
+
|
|
96
|
+
export type ide_event_req = {
|
|
97
|
+
event:
|
|
98
|
+
| "edit"
|
|
99
|
+
| "open"
|
|
100
|
+
| "close"
|
|
101
|
+
| "save"
|
|
102
|
+
| "refactor"
|
|
103
|
+
| "comment"
|
|
104
|
+
| "pattern_detected"
|
|
105
|
+
| "api_call"
|
|
106
|
+
| "definition"
|
|
107
|
+
| "reflection";
|
|
108
|
+
file?: string;
|
|
109
|
+
snippet?: string;
|
|
110
|
+
comment?: string;
|
|
111
|
+
metadata: {
|
|
112
|
+
project?: string;
|
|
113
|
+
lang?: string;
|
|
114
|
+
user?: string;
|
|
115
|
+
timestamp?: number;
|
|
116
|
+
[key: string]: unknown;
|
|
117
|
+
};
|
|
118
|
+
session_id?: string;
|
|
119
|
+
};
|
|
120
|
+
|
|
121
|
+
export type ide_context_query_req = {
|
|
122
|
+
query: string;
|
|
123
|
+
k?: number;
|
|
124
|
+
session_id?: string;
|
|
125
|
+
file_filter?: string;
|
|
126
|
+
include_patterns?: boolean;
|
|
127
|
+
include_knowledge?: boolean;
|
|
128
|
+
};
|
|
129
|
+
|
|
130
|
+
export type ide_session_req = { user?: string; project?: string; ide?: string };
|