kotadb 2.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +79 -0
- package/package.json +75 -0
- package/src/api/auto-reindex.ts +55 -0
- package/src/api/openapi/builder.ts +209 -0
- package/src/api/openapi/paths.ts +354 -0
- package/src/api/openapi/schemas.ts +608 -0
- package/src/api/queries.ts +1168 -0
- package/src/api/routes.ts +339 -0
- package/src/auth/middleware.ts +83 -0
- package/src/cli.ts +221 -0
- package/src/config/constants.ts +96 -0
- package/src/config/environment.ts +96 -0
- package/src/config/gitignore.ts +68 -0
- package/src/config/index.ts +20 -0
- package/src/config/project-root.ts +52 -0
- package/src/db/client.ts +72 -0
- package/src/db/sqlite/index.ts +35 -0
- package/src/db/sqlite/jsonl-exporter.ts +416 -0
- package/src/db/sqlite/jsonl-importer.ts +361 -0
- package/src/db/sqlite/sqlite-client.ts +536 -0
- package/src/index.ts +66 -0
- package/src/indexer/ast-parser.ts +146 -0
- package/src/indexer/ast-types.ts +54 -0
- package/src/indexer/circular-detector.ts +262 -0
- package/src/indexer/dependency-extractor.ts +352 -0
- package/src/indexer/extractors.ts +54 -0
- package/src/indexer/import-resolver.ts +167 -0
- package/src/indexer/parsers.ts +177 -0
- package/src/indexer/reference-extractor.ts +488 -0
- package/src/indexer/repos.ts +245 -0
- package/src/indexer/storage.ts +277 -0
- package/src/indexer/symbol-extractor.ts +660 -0
- package/src/instrument.ts +88 -0
- package/src/logging/context.ts +46 -0
- package/src/logging/logger.ts +193 -0
- package/src/logging/middleware.ts +107 -0
- package/src/mcp/github-integration.ts +293 -0
- package/src/mcp/headers.ts +101 -0
- package/src/mcp/impact-analysis.ts +495 -0
- package/src/mcp/jsonrpc.ts +141 -0
- package/src/mcp/lifecycle.ts +73 -0
- package/src/mcp/server.ts +202 -0
- package/src/mcp/session.ts +44 -0
- package/src/mcp/spec-validation.ts +491 -0
- package/src/mcp/tools.ts +889 -0
- package/src/sync/deletion-manifest.ts +210 -0
- package/src/sync/index.ts +16 -0
- package/src/sync/merge-driver.ts +172 -0
- package/src/sync/watcher.ts +221 -0
- package/src/types/rate-limit.ts +88 -0
- package/src/validation/common-schemas.ts +96 -0
- package/src/validation/schemas.ts +187 -0
|
@@ -0,0 +1,210 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Deletion manifest for tracking removed entities during sync
|
|
3
|
+
*
|
|
4
|
+
* Problem: JSONL export only captures current state. If you delete
|
|
5
|
+
* a repository locally, other machines won't know to delete it on
|
|
6
|
+
* import because it's simply absent from the export.
|
|
7
|
+
*
|
|
8
|
+
* Solution: .deletions.jsonl manifest tracks deletions explicitly:
|
|
9
|
+
* ```jsonl
|
|
10
|
+
* {"table":"repositories","id":"abc-123","deleted_at":"2025-12-15T10:30:00Z"}
|
|
11
|
+
* {"table":"indexed_files","id":"def-456","deleted_at":"2025-12-15T10:31:00Z"}
|
|
12
|
+
* ```
|
|
13
|
+
*
|
|
14
|
+
* Lifecycle:
|
|
15
|
+
* 1. Export: Record deletions in manifest
|
|
16
|
+
* 2. Git: Manifest syncs like any other file
|
|
17
|
+
* 3. Import: Apply deletions before importing new data
|
|
18
|
+
* 4. Cleanup: Clear manifest after successful import
|
|
19
|
+
*
|
|
20
|
+
* @module @sync/deletion-manifest
|
|
21
|
+
*/
|
|
22
|
+
|
|
23
|
+
import { existsSync, appendFileSync } from "node:fs";
|
|
24
|
+
import { join } from "node:path";
|
|
25
|
+
import { createLogger } from "@logging/logger.js";
|
|
26
|
+
import type { KotaDatabase } from "@db/sqlite/sqlite-client.js";
|
|
27
|
+
import { getDefaultExportDir } from "@db/sqlite/jsonl-exporter.js";
|
|
28
|
+
|
|
29
|
+
const logger = createLogger({ module: "deletion-manifest" });
|
|
30
|
+
|
|
31
|
+
/**
|
|
32
|
+
* Deletion entry in manifest
|
|
33
|
+
*/
|
|
34
|
+
export interface DeletionEntry {
|
|
35
|
+
table: string;
|
|
36
|
+
id: string;
|
|
37
|
+
deleted_at: string;
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
/**
|
|
41
|
+
* Record a deletion in the manifest
|
|
42
|
+
*/
|
|
43
|
+
export async function recordDeletion(
|
|
44
|
+
table: string,
|
|
45
|
+
id: string,
|
|
46
|
+
exportDir: string = getDefaultExportDir()
|
|
47
|
+
): Promise<void> {
|
|
48
|
+
const manifestPath = join(exportDir, ".deletions.jsonl");
|
|
49
|
+
|
|
50
|
+
const entry: DeletionEntry = {
|
|
51
|
+
table,
|
|
52
|
+
id,
|
|
53
|
+
deleted_at: new Date().toISOString()
|
|
54
|
+
};
|
|
55
|
+
|
|
56
|
+
const line = JSON.stringify(entry) + "\n";
|
|
57
|
+
|
|
58
|
+
// Append to manifest (create if doesn't exist)
|
|
59
|
+
appendFileSync(manifestPath, line, "utf-8");
|
|
60
|
+
|
|
61
|
+
logger.debug("Deletion recorded", { table, id });
|
|
62
|
+
}
|
|
63
|
+
|
|
64
|
+
/**
|
|
65
|
+
* Load all deletions from manifest
|
|
66
|
+
*/
|
|
67
|
+
export async function loadDeletionManifest(
|
|
68
|
+
manifestPath: string
|
|
69
|
+
): Promise<DeletionEntry[]> {
|
|
70
|
+
if (!existsSync(manifestPath)) {
|
|
71
|
+
return [];
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
const content = await Bun.file(manifestPath).text();
|
|
75
|
+
const lines = content.trim().split("\n").filter(Boolean);
|
|
76
|
+
const entries: DeletionEntry[] = [];
|
|
77
|
+
|
|
78
|
+
for (let idx = 0; idx < lines.length; idx++) {
|
|
79
|
+
const line = lines[idx];
|
|
80
|
+
try {
|
|
81
|
+
const entry = JSON.parse(line as string) as DeletionEntry;
|
|
82
|
+
entries.push(entry);
|
|
83
|
+
} catch (error) {
|
|
84
|
+
logger.warn("Invalid deletion entry, skipping", {
|
|
85
|
+
line_number: idx + 1,
|
|
86
|
+
error: error instanceof Error ? error.message : String(error)
|
|
87
|
+
});
|
|
88
|
+
}
|
|
89
|
+
}
|
|
90
|
+
|
|
91
|
+
return entries;
|
|
92
|
+
}
|
|
93
|
+
|
|
94
|
+
/**
|
|
95
|
+
* Apply deletions from manifest to database
|
|
96
|
+
*/
|
|
97
|
+
export async function applyDeletionManifest(
|
|
98
|
+
db: KotaDatabase,
|
|
99
|
+
manifestPath: string
|
|
100
|
+
): Promise<{ deletedCount: number; errors: string[] }> {
|
|
101
|
+
const entries = await loadDeletionManifest(manifestPath);
|
|
102
|
+
|
|
103
|
+
if (entries.length === 0) {
|
|
104
|
+
logger.debug("No deletions to apply");
|
|
105
|
+
return { deletedCount: 0, errors: [] };
|
|
106
|
+
}
|
|
107
|
+
|
|
108
|
+
logger.info("Applying deletion manifest", { entry_count: entries.length });
|
|
109
|
+
|
|
110
|
+
const errors: string[] = [];
|
|
111
|
+
let deletedCount = 0;
|
|
112
|
+
|
|
113
|
+
// Group by table for batch deletions
|
|
114
|
+
const byTable = new Map<string, string[]>();
|
|
115
|
+
for (const entry of entries) {
|
|
116
|
+
if (!byTable.has(entry.table)) {
|
|
117
|
+
byTable.set(entry.table, []);
|
|
118
|
+
}
|
|
119
|
+
byTable.get(entry.table)!.push(entry.id);
|
|
120
|
+
}
|
|
121
|
+
|
|
122
|
+
// Apply deletions in transaction
|
|
123
|
+
try {
|
|
124
|
+
db.immediateTransaction(() => {
|
|
125
|
+
for (const [table, ids] of byTable) {
|
|
126
|
+
if (!db.tableExists(table)) {
|
|
127
|
+
logger.warn("Deletion target table not found, skipping", { table });
|
|
128
|
+
errors.push(`Table not found: ${table}`);
|
|
129
|
+
continue;
|
|
130
|
+
}
|
|
131
|
+
|
|
132
|
+
// DELETE FROM table WHERE id IN (?, ?, ...)
|
|
133
|
+
const placeholders = ids.map(() => "?").join(", ");
|
|
134
|
+
const sql = `DELETE FROM ${table} WHERE id IN (${placeholders})`;
|
|
135
|
+
|
|
136
|
+
try {
|
|
137
|
+
db.run(sql, ids as (string | number | bigint | boolean | null | Uint8Array)[]);
|
|
138
|
+
// Note: db.run() doesn't return changes count, so we count the IDs
|
|
139
|
+
deletedCount += ids.length;
|
|
140
|
+
|
|
141
|
+
logger.debug("Deleted entries from table", {
|
|
142
|
+
table,
|
|
143
|
+
deleted_count: ids.length
|
|
144
|
+
});
|
|
145
|
+
} catch (error) {
|
|
146
|
+
const errorMsg = error instanceof Error ? error.message : String(error);
|
|
147
|
+
logger.error(`Failed to delete from ${table}`, new Error(errorMsg));
|
|
148
|
+
errors.push(`${table}: ${errorMsg}`);
|
|
149
|
+
}
|
|
150
|
+
}
|
|
151
|
+
});
|
|
152
|
+
} catch (txError) {
|
|
153
|
+
const errorMsg = txError instanceof Error ? txError.message : String(txError);
|
|
154
|
+
logger.error("Deletion transaction failed", new Error(errorMsg));
|
|
155
|
+
return { deletedCount: 0, errors: [errorMsg] };
|
|
156
|
+
}
|
|
157
|
+
|
|
158
|
+
logger.info("Deletion manifest applied", {
|
|
159
|
+
deleted_count: deletedCount,
|
|
160
|
+
error_count: errors.length
|
|
161
|
+
});
|
|
162
|
+
|
|
163
|
+
return { deletedCount, errors };
|
|
164
|
+
}
|
|
165
|
+
|
|
166
|
+
/**
|
|
167
|
+
* Clear deletion manifest after successful import
|
|
168
|
+
*/
|
|
169
|
+
export async function clearDeletionManifest(
|
|
170
|
+
exportDir: string = getDefaultExportDir()
|
|
171
|
+
): Promise<void> {
|
|
172
|
+
const manifestPath = join(exportDir, ".deletions.jsonl");
|
|
173
|
+
|
|
174
|
+
if (!existsSync(manifestPath)) {
|
|
175
|
+
return;
|
|
176
|
+
}
|
|
177
|
+
|
|
178
|
+
await Bun.write(manifestPath, "");
|
|
179
|
+
logger.info("Deletion manifest cleared");
|
|
180
|
+
}
|
|
181
|
+
|
|
182
|
+
/**
|
|
183
|
+
* Hook into database operations to track deletions
|
|
184
|
+
*
|
|
185
|
+
* Usage:
|
|
186
|
+
* ```typescript
|
|
187
|
+
* const db = getClient();
|
|
188
|
+
* trackDeletions(db);
|
|
189
|
+
*
|
|
190
|
+
* // Now all DELETE operations are logged to manifest
|
|
191
|
+
* db.run("DELETE FROM repositories WHERE id = ?", ["abc-123"]);
|
|
192
|
+
* ```
|
|
193
|
+
*
|
|
194
|
+
* Note: This is a post-Phase-3 enhancement. For MVP, manual calls
|
|
195
|
+
* to recordDeletion() are sufficient.
|
|
196
|
+
*/
|
|
197
|
+
export function trackDeletions(
|
|
198
|
+
db: KotaDatabase,
|
|
199
|
+
exportDir: string = getDefaultExportDir()
|
|
200
|
+
): void {
|
|
201
|
+
// Implementation note: This would require wrapping db.run() to intercept
|
|
202
|
+
// DELETE statements and extract table/id pairs. Complex pattern matching
|
|
203
|
+
// needed. Consider for Phase 3B or Phase 4.
|
|
204
|
+
|
|
205
|
+
logger.warn("Automatic deletion tracking not yet implemented");
|
|
206
|
+
logger.info(
|
|
207
|
+
"Use recordDeletion(table, id) manually after deletions",
|
|
208
|
+
{ export_dir: exportDir }
|
|
209
|
+
);
|
|
210
|
+
}
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Sync layer module exports
|
|
3
|
+
*
|
|
4
|
+
* @module @sync
|
|
5
|
+
*/
|
|
6
|
+
|
|
7
|
+
export { SyncWatcher, createWatcher } from "./watcher.js";
|
|
8
|
+
export { runMergeDriver } from "./merge-driver.js";
|
|
9
|
+
export {
|
|
10
|
+
recordDeletion,
|
|
11
|
+
loadDeletionManifest,
|
|
12
|
+
applyDeletionManifest,
|
|
13
|
+
clearDeletionManifest,
|
|
14
|
+
trackDeletions,
|
|
15
|
+
type DeletionEntry
|
|
16
|
+
} from "./deletion-manifest.js";
|
|
@@ -0,0 +1,172 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Custom git merge driver for JSONL files
|
|
3
|
+
*
|
|
4
|
+
* Resolves conflicts in .jsonl files using line-based reconciliation:
|
|
5
|
+
* - Lines with same ID: use THEIRS (assume remote is authoritative)
|
|
6
|
+
* - Lines unique to OURS: keep them
|
|
7
|
+
* - Lines unique to THEIRS: keep them
|
|
8
|
+
*
|
|
9
|
+
* Algorithm:
|
|
10
|
+
* 1. Parse BASE, OURS, THEIRS into ID-keyed maps
|
|
11
|
+
* 2. Collect all unique IDs across versions
|
|
12
|
+
* 3. For each ID, choose THEIRS if present, else OURS
|
|
13
|
+
* 4. Sort by ID for deterministic output
|
|
14
|
+
* 5. Write merged JSONL to OURS path
|
|
15
|
+
*
|
|
16
|
+
* Installation:
|
|
17
|
+
* ```bash
|
|
18
|
+
* # Add to .git/config or ~/.gitconfig
|
|
19
|
+
* [merge "jsonl"]
|
|
20
|
+
* name = JSONL merge driver
|
|
21
|
+
* driver = bun run src/sync/merge-driver.ts %O %A %B %L
|
|
22
|
+
* ```
|
|
23
|
+
*
|
|
24
|
+
* @module @sync/merge-driver
|
|
25
|
+
*/
|
|
26
|
+
|
|
27
|
+
import { readFileSync, writeFileSync } from "node:fs";
|
|
28
|
+
import { createLogger } from "@logging/logger.js";
|
|
29
|
+
|
|
30
|
+
const logger = createLogger({ module: "merge-driver" });
|
|
31
|
+
|
|
32
|
+
/**
|
|
33
|
+
* Parsed JSONL entry with ID
|
|
34
|
+
*/
|
|
35
|
+
interface JSONLEntry {
|
|
36
|
+
id: string;
|
|
37
|
+
line: string;
|
|
38
|
+
data: Record<string, unknown>;
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
/**
|
|
42
|
+
* Parse JSONL file into ID-keyed map
|
|
43
|
+
*/
|
|
44
|
+
function parseJSONL(filepath: string): Map<string, JSONLEntry> {
|
|
45
|
+
const content = readFileSync(filepath, "utf-8");
|
|
46
|
+
const lines = content.trim().split("\n").filter(Boolean);
|
|
47
|
+
const entries = new Map<string, JSONLEntry>();
|
|
48
|
+
|
|
49
|
+
for (const line of lines) {
|
|
50
|
+
try {
|
|
51
|
+
const data = JSON.parse(line) as Record<string, unknown>;
|
|
52
|
+
const id = data.id as string;
|
|
53
|
+
|
|
54
|
+
if (!id) {
|
|
55
|
+
logger.warn("JSONL entry missing ID, skipping", { line });
|
|
56
|
+
continue;
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
entries.set(id, { id, line, data });
|
|
60
|
+
} catch (error) {
|
|
61
|
+
logger.error(
|
|
62
|
+
"Failed to parse JSONL line",
|
|
63
|
+
error instanceof Error ? error : new Error(String(error)),
|
|
64
|
+
{ line }
|
|
65
|
+
);
|
|
66
|
+
}
|
|
67
|
+
}
|
|
68
|
+
|
|
69
|
+
return entries;
|
|
70
|
+
}
|
|
71
|
+
|
|
72
|
+
/**
|
|
73
|
+
* Merge three JSONL versions (base, ours, theirs)
|
|
74
|
+
*
|
|
75
|
+
* Strategy: THEIRS-preferred merge
|
|
76
|
+
* - If ID in THEIRS: use THEIRS
|
|
77
|
+
* - Else if ID in OURS: use OURS
|
|
78
|
+
* - Else: skip (was deleted in both)
|
|
79
|
+
*/
|
|
80
|
+
function mergeJSONL(
|
|
81
|
+
basePath: string,
|
|
82
|
+
oursPath: string,
|
|
83
|
+
theirsPath: string
|
|
84
|
+
): string {
|
|
85
|
+
const base = parseJSONL(basePath);
|
|
86
|
+
const ours = parseJSONL(oursPath);
|
|
87
|
+
const theirs = parseJSONL(theirsPath);
|
|
88
|
+
|
|
89
|
+
// Collect all IDs
|
|
90
|
+
const allIds = new Set<string>([
|
|
91
|
+
...base.keys(),
|
|
92
|
+
...ours.keys(),
|
|
93
|
+
...theirs.keys()
|
|
94
|
+
]);
|
|
95
|
+
|
|
96
|
+
// Merge: prefer THEIRS, fallback to OURS
|
|
97
|
+
const merged: JSONLEntry[] = [];
|
|
98
|
+
for (const id of allIds) {
|
|
99
|
+
if (theirs.has(id)) {
|
|
100
|
+
merged.push(theirs.get(id)!);
|
|
101
|
+
} else if (ours.has(id)) {
|
|
102
|
+
merged.push(ours.get(id)!);
|
|
103
|
+
}
|
|
104
|
+
// If neither has it, ID was deleted in both - skip
|
|
105
|
+
}
|
|
106
|
+
|
|
107
|
+
// Sort by ID for deterministic output
|
|
108
|
+
merged.sort((a, b) => a.id.localeCompare(b.id));
|
|
109
|
+
|
|
110
|
+
// Format as JSONL
|
|
111
|
+
return merged.map((entry) => entry.line).join("\n") + "\n";
|
|
112
|
+
}
|
|
113
|
+
|
|
114
|
+
/**
|
|
115
|
+
* Main merge driver entry point
|
|
116
|
+
*
|
|
117
|
+
* Git invokes as: merge-driver %O %A %B %L
|
|
118
|
+
* - %O: base version path
|
|
119
|
+
* - %A: ours version path (current branch)
|
|
120
|
+
* - %B: theirs version path (incoming branch)
|
|
121
|
+
* - %L: conflict marker size (unused)
|
|
122
|
+
*/
|
|
123
|
+
export function runMergeDriver(
|
|
124
|
+
basePath: string,
|
|
125
|
+
oursPath: string,
|
|
126
|
+
theirsPath: string,
|
|
127
|
+
_markerSize: string
|
|
128
|
+
): number {
|
|
129
|
+
logger.info("JSONL merge driver invoked", {
|
|
130
|
+
base: basePath,
|
|
131
|
+
ours: oursPath,
|
|
132
|
+
theirs: theirsPath
|
|
133
|
+
});
|
|
134
|
+
|
|
135
|
+
try {
|
|
136
|
+
const merged = mergeJSONL(basePath, oursPath, theirsPath);
|
|
137
|
+
|
|
138
|
+
// Write merged result to OURS path
|
|
139
|
+
writeFileSync(oursPath, merged, "utf-8");
|
|
140
|
+
|
|
141
|
+
logger.info("JSONL merge completed successfully", {
|
|
142
|
+
output: oursPath
|
|
143
|
+
});
|
|
144
|
+
|
|
145
|
+
return 0; // Success
|
|
146
|
+
} catch (error) {
|
|
147
|
+
logger.error(
|
|
148
|
+
"JSONL merge failed",
|
|
149
|
+
error instanceof Error ? error : new Error(String(error)),
|
|
150
|
+
{
|
|
151
|
+
base: basePath,
|
|
152
|
+
ours: oursPath,
|
|
153
|
+
theirs: theirsPath
|
|
154
|
+
}
|
|
155
|
+
);
|
|
156
|
+
|
|
157
|
+
return 1; // Conflict (git will mark file as conflicted)
|
|
158
|
+
}
|
|
159
|
+
}
|
|
160
|
+
|
|
161
|
+
// CLI entry point (when run via `bun run merge-driver.ts`)
|
|
162
|
+
if (import.meta.main) {
|
|
163
|
+
const [basePath, oursPath, theirsPath, markerSize] = process.argv.slice(2);
|
|
164
|
+
|
|
165
|
+
if (!basePath || !oursPath || !theirsPath) {
|
|
166
|
+
process.stderr.write("Usage: merge-driver.ts <base> <ours> <theirs> <marker-size>\n");
|
|
167
|
+
process.exit(1);
|
|
168
|
+
}
|
|
169
|
+
|
|
170
|
+
const exitCode = runMergeDriver(basePath, oursPath, theirsPath, markerSize || "7");
|
|
171
|
+
process.exit(exitCode);
|
|
172
|
+
}
|
|
@@ -0,0 +1,221 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* File watcher for automatic JSONL import on git pull
|
|
3
|
+
*
|
|
4
|
+
* Watches .kotadb/export/*.jsonl (project-local) for changes and triggers
|
|
5
|
+
* import when modifications are detected (e.g., after git pull).
|
|
6
|
+
*
|
|
7
|
+
* Features:
|
|
8
|
+
* - Debounced import (1-second delay to batch rapid changes)
|
|
9
|
+
* - Hash-based change detection (skip unchanged files)
|
|
10
|
+
* - Graceful error handling (log failures, don't crash)
|
|
11
|
+
*
|
|
12
|
+
* @module @sync/watcher
|
|
13
|
+
*/
|
|
14
|
+
|
|
15
|
+
import { watch, type FSWatcher } from "node:fs";
|
|
16
|
+
import { existsSync } from "node:fs";
|
|
17
|
+
import { join } from "node:path";
|
|
18
|
+
import { createLogger } from "@logging/logger.js";
|
|
19
|
+
import { getDefaultExportDir } from "@db/sqlite/jsonl-exporter.js";
|
|
20
|
+
import { importFromJSONL } from "@db/sqlite/jsonl-importer.js";
|
|
21
|
+
import type { KotaDatabase } from "@db/sqlite/sqlite-client.js";
|
|
22
|
+
import { getClient } from "@db/client.js";
|
|
23
|
+
import { applyDeletionManifest } from "@sync/deletion-manifest.js";
|
|
24
|
+
|
|
25
|
+
const logger = createLogger({ module: "sync-watcher" });
|
|
26
|
+
|
|
27
|
+
/**
|
|
28
|
+
* Watcher state for debouncing imports
|
|
29
|
+
*/
|
|
30
|
+
interface WatcherState {
|
|
31
|
+
timer: ReturnType<typeof setTimeout> | null;
|
|
32
|
+
changedFiles: Set<string>;
|
|
33
|
+
lastImportAt: string;
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
/**
|
|
37
|
+
* SyncWatcher - Watches JSONL export directory for changes.
|
|
38
|
+
*
|
|
39
|
+
* Usage:
|
|
40
|
+
* ```typescript
|
|
41
|
+
* const watcher = new SyncWatcher();
|
|
42
|
+
* watcher.start();
|
|
43
|
+
*
|
|
44
|
+
* // Later...
|
|
45
|
+
* watcher.stop();
|
|
46
|
+
* ```
|
|
47
|
+
*/
|
|
48
|
+
export class SyncWatcher {
|
|
49
|
+
private fsWatcher: FSWatcher | null = null;
|
|
50
|
+
private state: WatcherState;
|
|
51
|
+
private readonly exportDir: string;
|
|
52
|
+
private readonly debounceMs: number;
|
|
53
|
+
|
|
54
|
+
constructor(
|
|
55
|
+
exportDir: string = getDefaultExportDir(),
|
|
56
|
+
debounceMs: number = 1000
|
|
57
|
+
) {
|
|
58
|
+
this.exportDir = exportDir;
|
|
59
|
+
this.debounceMs = debounceMs;
|
|
60
|
+
this.state = {
|
|
61
|
+
timer: null,
|
|
62
|
+
changedFiles: new Set(),
|
|
63
|
+
lastImportAt: new Date().toISOString()
|
|
64
|
+
};
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
/**
|
|
68
|
+
* Start watching the export directory
|
|
69
|
+
*/
|
|
70
|
+
start(): void {
|
|
71
|
+
if (this.fsWatcher) {
|
|
72
|
+
logger.warn("Watcher already started");
|
|
73
|
+
return;
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
if (!existsSync(this.exportDir)) {
|
|
77
|
+
logger.error("Export directory not found", { path: this.exportDir });
|
|
78
|
+
throw new Error(`Export directory not found: ${this.exportDir}`);
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
this.fsWatcher = watch(
|
|
82
|
+
this.exportDir,
|
|
83
|
+
{ recursive: false },
|
|
84
|
+
(eventType, filename) => {
|
|
85
|
+
if (!filename || !filename.endsWith(".jsonl")) {
|
|
86
|
+
return;
|
|
87
|
+
}
|
|
88
|
+
|
|
89
|
+
// Skip deletion manifest (handled separately)
|
|
90
|
+
if (filename === ".deletions.jsonl") {
|
|
91
|
+
return;
|
|
92
|
+
}
|
|
93
|
+
|
|
94
|
+
logger.debug("File change detected", {
|
|
95
|
+
event: eventType,
|
|
96
|
+
file: filename
|
|
97
|
+
});
|
|
98
|
+
|
|
99
|
+
this.state.changedFiles.add(filename);
|
|
100
|
+
this.scheduleImport();
|
|
101
|
+
}
|
|
102
|
+
);
|
|
103
|
+
|
|
104
|
+
logger.info("Sync watcher started", {
|
|
105
|
+
export_dir: this.exportDir,
|
|
106
|
+
debounce_ms: this.debounceMs
|
|
107
|
+
});
|
|
108
|
+
}
|
|
109
|
+
|
|
110
|
+
/**
|
|
111
|
+
* Stop watching (cleanup)
|
|
112
|
+
*/
|
|
113
|
+
stop(): void {
|
|
114
|
+
if (this.state.timer) {
|
|
115
|
+
clearTimeout(this.state.timer);
|
|
116
|
+
this.state.timer = null;
|
|
117
|
+
}
|
|
118
|
+
|
|
119
|
+
if (this.fsWatcher) {
|
|
120
|
+
this.fsWatcher.close();
|
|
121
|
+
this.fsWatcher = null;
|
|
122
|
+
logger.info("Sync watcher stopped");
|
|
123
|
+
}
|
|
124
|
+
}
|
|
125
|
+
|
|
126
|
+
/**
|
|
127
|
+
* Schedule import with debouncing
|
|
128
|
+
*/
|
|
129
|
+
private scheduleImport(): void {
|
|
130
|
+
if (this.state.timer) {
|
|
131
|
+
clearTimeout(this.state.timer);
|
|
132
|
+
}
|
|
133
|
+
|
|
134
|
+
this.state.timer = setTimeout(() => {
|
|
135
|
+
this.executeImport().catch((error) => {
|
|
136
|
+
logger.error(
|
|
137
|
+
"Scheduled import failed",
|
|
138
|
+
error instanceof Error ? error : new Error(String(error))
|
|
139
|
+
);
|
|
140
|
+
});
|
|
141
|
+
}, this.debounceMs);
|
|
142
|
+
|
|
143
|
+
logger.debug("Import scheduled (debounced)", {
|
|
144
|
+
changed_files: Array.from(this.state.changedFiles)
|
|
145
|
+
});
|
|
146
|
+
}
|
|
147
|
+
|
|
148
|
+
/**
|
|
149
|
+
* Execute import for all changed files
|
|
150
|
+
*/
|
|
151
|
+
private async executeImport(): Promise<void> {
|
|
152
|
+
const changedFiles = Array.from(this.state.changedFiles);
|
|
153
|
+
this.state.changedFiles.clear();
|
|
154
|
+
|
|
155
|
+
logger.info("Starting automatic import", {
|
|
156
|
+
files: changedFiles,
|
|
157
|
+
count: changedFiles.length
|
|
158
|
+
});
|
|
159
|
+
|
|
160
|
+
const startTime = Date.now();
|
|
161
|
+
|
|
162
|
+
try {
|
|
163
|
+
const db = getClient() as KotaDatabase;
|
|
164
|
+
|
|
165
|
+
// Import JSONL files
|
|
166
|
+
const result = await importFromJSONL(db, this.exportDir);
|
|
167
|
+
|
|
168
|
+
// Apply deletion manifest if present
|
|
169
|
+
const deletionManifestPath = join(this.exportDir, ".deletions.jsonl");
|
|
170
|
+
if (existsSync(deletionManifestPath)) {
|
|
171
|
+
await applyDeletionManifest(db, deletionManifestPath);
|
|
172
|
+
}
|
|
173
|
+
|
|
174
|
+
const duration = Date.now() - startTime;
|
|
175
|
+
|
|
176
|
+
logger.info("Automatic import completed", {
|
|
177
|
+
tables_imported: result.tablesImported,
|
|
178
|
+
rows_imported: result.totalRowsImported,
|
|
179
|
+
duration_ms: duration,
|
|
180
|
+
errors: result.errors
|
|
181
|
+
});
|
|
182
|
+
|
|
183
|
+
this.state.lastImportAt = new Date().toISOString();
|
|
184
|
+
} catch (error) {
|
|
185
|
+
logger.error(
|
|
186
|
+
"Import failed",
|
|
187
|
+
error instanceof Error ? error : new Error(String(error)),
|
|
188
|
+
{
|
|
189
|
+
changed_files: changedFiles
|
|
190
|
+
}
|
|
191
|
+
);
|
|
192
|
+
}
|
|
193
|
+
}
|
|
194
|
+
|
|
195
|
+
/**
|
|
196
|
+
* Get current watcher state (for debugging)
|
|
197
|
+
*/
|
|
198
|
+
getState(): {
|
|
199
|
+
isRunning: boolean;
|
|
200
|
+
lastImportAt: string;
|
|
201
|
+
pendingFiles: string[];
|
|
202
|
+
} {
|
|
203
|
+
return {
|
|
204
|
+
isRunning: this.fsWatcher !== null,
|
|
205
|
+
lastImportAt: this.state.lastImportAt,
|
|
206
|
+
pendingFiles: Array.from(this.state.changedFiles)
|
|
207
|
+
};
|
|
208
|
+
}
|
|
209
|
+
}
|
|
210
|
+
|
|
211
|
+
/**
|
|
212
|
+
* Factory function to create and start watcher
|
|
213
|
+
*/
|
|
214
|
+
export function createWatcher(
|
|
215
|
+
exportDir?: string,
|
|
216
|
+
debounceMs?: number
|
|
217
|
+
): SyncWatcher {
|
|
218
|
+
const watcher = new SyncWatcher(exportDir, debounceMs);
|
|
219
|
+
watcher.start();
|
|
220
|
+
return watcher;
|
|
221
|
+
}
|
|
@@ -0,0 +1,88 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Rate limiting types for KotaDB API.
|
|
3
|
+
*
|
|
4
|
+
* Types for rate limit enforcement and response headers.
|
|
5
|
+
* Backend-only types (moved from shared/ to resolve Docker build context issue).
|
|
6
|
+
*/
|
|
7
|
+
|
|
8
|
+
import type { Tier } from "@shared/types/auth";
|
|
9
|
+
|
|
10
|
+
/**
|
|
11
|
+
* Rate limit enforcement result.
|
|
12
|
+
* Contains current status and metadata for response headers.
|
|
13
|
+
*/
|
|
14
|
+
export interface RateLimitResult {
|
|
15
|
+
/** Whether the request is allowed (within limit) */
|
|
16
|
+
allowed: boolean;
|
|
17
|
+
|
|
18
|
+
/** Requests remaining in current window */
|
|
19
|
+
remaining: number;
|
|
20
|
+
|
|
21
|
+
/** Seconds until window resets (only set when limit exceeded) */
|
|
22
|
+
retryAfter?: number;
|
|
23
|
+
|
|
24
|
+
/** Unix timestamp when window resets */
|
|
25
|
+
resetAt: number;
|
|
26
|
+
|
|
27
|
+
/** Total limit for this key's tier */
|
|
28
|
+
limit: number;
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
/**
|
|
32
|
+
* Rate limit response headers.
|
|
33
|
+
* Standard header names for rate limit metadata.
|
|
34
|
+
*/
|
|
35
|
+
export interface RateLimitHeaders {
|
|
36
|
+
/** Total requests allowed per hour for the tier */
|
|
37
|
+
"X-RateLimit-Limit": string;
|
|
38
|
+
|
|
39
|
+
/** Requests remaining in current window */
|
|
40
|
+
"X-RateLimit-Remaining": string;
|
|
41
|
+
|
|
42
|
+
/** Unix timestamp when the limit resets */
|
|
43
|
+
"X-RateLimit-Reset": string;
|
|
44
|
+
|
|
45
|
+
/** Seconds until retry (only present in 429 responses) */
|
|
46
|
+
"Retry-After"?: string;
|
|
47
|
+
}
|
|
48
|
+
|
|
49
|
+
/**
|
|
50
|
+
* Rate limit configuration by tier.
|
|
51
|
+
* Defines request quotas for each subscription level.
|
|
52
|
+
*/
|
|
53
|
+
export interface RateLimitConfig {
|
|
54
|
+
/** Subscription tier */
|
|
55
|
+
tier: Tier;
|
|
56
|
+
|
|
57
|
+
/** Requests allowed per hour */
|
|
58
|
+
requestsPerHour: number;
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
/**
|
|
62
|
+
* Import centralized rate limit configuration.
|
|
63
|
+
* Re-exported for backward compatibility.
|
|
64
|
+
*/
|
|
65
|
+
import { RATE_LIMITS as CONFIG_RATE_LIMITS } from "@config/constants";
|
|
66
|
+
|
|
67
|
+
/**
|
|
68
|
+
* Standard hourly rate limit configurations by tier.
|
|
69
|
+
* Updated in #423 to support realistic development workflows.
|
|
70
|
+
* Re-exported from @config/constants for convenience.
|
|
71
|
+
*/
|
|
72
|
+
export const RATE_LIMITS: Record<Tier, number> = {
|
|
73
|
+
free: CONFIG_RATE_LIMITS.FREE.HOURLY,
|
|
74
|
+
solo: CONFIG_RATE_LIMITS.SOLO.HOURLY,
|
|
75
|
+
team: CONFIG_RATE_LIMITS.TEAM.HOURLY,
|
|
76
|
+
};
|
|
77
|
+
|
|
78
|
+
/**
|
|
79
|
+
* Daily rate limit configurations by tier.
|
|
80
|
+
* Provides cost protection while enabling burst usage patterns.
|
|
81
|
+
* Both hourly and daily limits are enforced; whichever is reached first blocks requests.
|
|
82
|
+
* Re-exported from @config/constants for convenience.
|
|
83
|
+
*/
|
|
84
|
+
export const DAILY_RATE_LIMITS: Record<Tier, number> = {
|
|
85
|
+
free: CONFIG_RATE_LIMITS.FREE.DAILY,
|
|
86
|
+
solo: CONFIG_RATE_LIMITS.SOLO.DAILY,
|
|
87
|
+
team: CONFIG_RATE_LIMITS.TEAM.DAILY,
|
|
88
|
+
};
|