@hatk/hatk 0.0.1-alpha.6 → 0.0.1-alpha.60
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/adapter.d.ts +19 -0
- package/dist/adapter.d.ts.map +1 -0
- package/dist/adapter.js +108 -0
- package/dist/backfill.d.ts +2 -2
- package/dist/backfill.d.ts.map +1 -1
- package/dist/backfill.js +78 -31
- package/dist/car.d.ts +42 -10
- package/dist/car.d.ts.map +1 -1
- package/dist/car.js +154 -14
- package/dist/cli.js +243 -1043
- package/dist/config.d.ts +31 -1
- package/dist/config.d.ts.map +1 -1
- package/dist/config.js +40 -9
- package/dist/database/adapter-factory.d.ts +6 -0
- package/dist/database/adapter-factory.d.ts.map +1 -0
- package/dist/database/adapter-factory.js +20 -0
- package/dist/database/adapters/duckdb-search.d.ts +12 -0
- package/dist/database/adapters/duckdb-search.d.ts.map +1 -0
- package/dist/database/adapters/duckdb-search.js +27 -0
- package/dist/database/adapters/duckdb.d.ts +25 -0
- package/dist/database/adapters/duckdb.d.ts.map +1 -0
- package/dist/database/adapters/duckdb.js +161 -0
- package/dist/database/adapters/sqlite-search.d.ts +23 -0
- package/dist/database/adapters/sqlite-search.d.ts.map +1 -0
- package/dist/database/adapters/sqlite-search.js +74 -0
- package/dist/database/adapters/sqlite.d.ts +18 -0
- package/dist/database/adapters/sqlite.d.ts.map +1 -0
- package/dist/database/adapters/sqlite.js +88 -0
- package/dist/{db.d.ts → database/db.d.ts} +57 -6
- package/dist/database/db.d.ts.map +1 -0
- package/dist/{db.js → database/db.js} +730 -549
- package/dist/database/dialect.d.ts +45 -0
- package/dist/database/dialect.d.ts.map +1 -0
- package/dist/database/dialect.js +72 -0
- package/dist/{fts.d.ts → database/fts.d.ts} +7 -0
- package/dist/database/fts.d.ts.map +1 -0
- package/dist/{fts.js → database/fts.js} +116 -32
- package/dist/database/index.d.ts +7 -0
- package/dist/database/index.d.ts.map +1 -0
- package/dist/database/index.js +6 -0
- package/dist/database/ports.d.ts +50 -0
- package/dist/database/ports.d.ts.map +1 -0
- package/dist/database/ports.js +1 -0
- package/dist/{schema.d.ts → database/schema.d.ts} +14 -3
- package/dist/database/schema.d.ts.map +1 -0
- package/dist/{schema.js → database/schema.js} +81 -41
- package/dist/dev-entry.d.ts +8 -0
- package/dist/dev-entry.d.ts.map +1 -0
- package/dist/dev-entry.js +112 -0
- package/dist/feeds.d.ts +12 -8
- package/dist/feeds.d.ts.map +1 -1
- package/dist/feeds.js +51 -6
- package/dist/hooks.d.ts +85 -0
- package/dist/hooks.d.ts.map +1 -0
- package/dist/hooks.js +161 -0
- package/dist/hydrate.d.ts +7 -6
- package/dist/hydrate.d.ts.map +1 -1
- package/dist/hydrate.js +4 -16
- package/dist/indexer.d.ts +22 -0
- package/dist/indexer.d.ts.map +1 -1
- package/dist/indexer.js +123 -32
- package/dist/labels.d.ts +36 -0
- package/dist/labels.d.ts.map +1 -1
- package/dist/labels.js +71 -6
- package/dist/lexicon-resolve.d.ts.map +1 -1
- package/dist/lexicon-resolve.js +27 -112
- package/dist/lexicons/com/atproto/label/defs.json +75 -0
- package/dist/lexicons/com/atproto/moderation/defs.json +30 -0
- package/dist/lexicons/com/atproto/repo/strongRef.json +24 -0
- package/dist/lexicons/dev/hatk/applyWrites.json +87 -0
- package/dist/lexicons/dev/hatk/createRecord.json +40 -0
- package/dist/lexicons/dev/hatk/createReport.json +48 -0
- package/dist/lexicons/dev/hatk/deleteRecord.json +25 -0
- package/dist/lexicons/dev/hatk/describeCollections.json +41 -0
- package/dist/lexicons/dev/hatk/describeFeeds.json +29 -0
- package/dist/lexicons/dev/hatk/describeLabels.json +45 -0
- package/dist/lexicons/dev/hatk/getFeed.json +30 -0
- package/dist/lexicons/dev/hatk/getPreferences.json +19 -0
- package/dist/lexicons/dev/hatk/getRecord.json +26 -0
- package/dist/lexicons/dev/hatk/getRecords.json +32 -0
- package/dist/lexicons/dev/hatk/putPreference.json +28 -0
- package/dist/lexicons/dev/hatk/putRecord.json +41 -0
- package/dist/lexicons/dev/hatk/searchRecords.json +32 -0
- package/dist/lexicons/dev/hatk/uploadBlob.json +23 -0
- package/dist/logger.d.ts +29 -0
- package/dist/logger.d.ts.map +1 -1
- package/dist/logger.js +29 -0
- package/dist/main.js +137 -67
- package/dist/mst.d.ts +18 -1
- package/dist/mst.d.ts.map +1 -1
- package/dist/mst.js +19 -8
- package/dist/oauth/db.d.ts +3 -1
- package/dist/oauth/db.d.ts.map +1 -1
- package/dist/oauth/db.js +48 -19
- package/dist/oauth/server.d.ts +24 -0
- package/dist/oauth/server.d.ts.map +1 -1
- package/dist/oauth/server.js +198 -22
- package/dist/oauth/session.d.ts +11 -0
- package/dist/oauth/session.d.ts.map +1 -0
- package/dist/oauth/session.js +65 -0
- package/dist/opengraph.d.ts +10 -0
- package/dist/opengraph.d.ts.map +1 -1
- package/dist/opengraph.js +80 -40
- package/dist/pds-proxy.d.ts +60 -0
- package/dist/pds-proxy.d.ts.map +1 -0
- package/dist/pds-proxy.js +277 -0
- package/dist/push.d.ts +34 -0
- package/dist/push.d.ts.map +1 -0
- package/dist/push.js +184 -0
- package/dist/renderer.d.ts +27 -0
- package/dist/renderer.d.ts.map +1 -0
- package/dist/renderer.js +46 -0
- package/dist/resolve-hatk.d.ts +6 -0
- package/dist/resolve-hatk.d.ts.map +1 -0
- package/dist/resolve-hatk.js +20 -0
- package/dist/response.d.ts +16 -0
- package/dist/response.d.ts.map +1 -0
- package/dist/response.js +69 -0
- package/dist/scanner.d.ts +21 -0
- package/dist/scanner.d.ts.map +1 -0
- package/dist/scanner.js +88 -0
- package/dist/seed.d.ts +19 -0
- package/dist/seed.d.ts.map +1 -1
- package/dist/seed.js +43 -4
- package/dist/server-init.d.ts +8 -0
- package/dist/server-init.d.ts.map +1 -0
- package/dist/server-init.js +62 -0
- package/dist/server.d.ts +26 -3
- package/dist/server.d.ts.map +1 -1
- package/dist/server.js +629 -635
- package/dist/setup.d.ts +28 -1
- package/dist/setup.d.ts.map +1 -1
- package/dist/setup.js +50 -3
- package/dist/templates/feed.tpl +14 -0
- package/dist/templates/hook.tpl +5 -0
- package/dist/templates/label.tpl +15 -0
- package/dist/templates/og.tpl +17 -0
- package/dist/templates/seed.tpl +11 -0
- package/dist/templates/setup.tpl +5 -0
- package/dist/templates/test-feed.tpl +19 -0
- package/dist/templates/test-xrpc.tpl +19 -0
- package/dist/templates/xrpc.tpl +41 -0
- package/dist/test.d.ts +1 -1
- package/dist/test.d.ts.map +1 -1
- package/dist/test.js +39 -32
- package/dist/views.js +1 -1
- package/dist/vite-plugin.d.ts +1 -1
- package/dist/vite-plugin.d.ts.map +1 -1
- package/dist/vite-plugin.js +254 -66
- package/dist/xrpc.d.ts +75 -11
- package/dist/xrpc.d.ts.map +1 -1
- package/dist/xrpc.js +189 -39
- package/package.json +14 -7
- package/public/admin.html +133 -54
- package/dist/db.d.ts.map +0 -1
- package/dist/fts.d.ts.map +0 -1
- package/dist/oauth/hooks.d.ts +0 -10
- package/dist/oauth/hooks.d.ts.map +0 -1
- package/dist/oauth/hooks.js +0 -40
- package/dist/schema.d.ts.map +0 -1
- package/dist/test-browser.d.ts +0 -14
- package/dist/test-browser.d.ts.map +0 -1
- package/dist/test-browser.js +0 -26
|
@@ -0,0 +1,45 @@
|
|
|
1
|
+
import type { Dialect } from './ports.ts';
|
|
2
|
+
export interface SqlDialect {
|
|
3
|
+
/** Map from lexicon type key to SQL column type */
|
|
4
|
+
typeMap: Record<string, string>;
|
|
5
|
+
/** Timestamp type name */
|
|
6
|
+
timestampType: string;
|
|
7
|
+
/** JSON type name */
|
|
8
|
+
jsonType: string;
|
|
9
|
+
/** Parameter placeholder for index (1-based). DuckDB/Postgres: $1 SQLite: ? */
|
|
10
|
+
param(index: number): string;
|
|
11
|
+
/** Whether the engine supports native bulk appenders (DuckDB) vs batched INSERT */
|
|
12
|
+
supportsAppender: boolean;
|
|
13
|
+
/** SQL for upsert — 'INSERT OR REPLACE' (DuckDB/SQLite) vs 'ON CONFLICT DO UPDATE' */
|
|
14
|
+
upsertPrefix: string;
|
|
15
|
+
/** Extract a string value from a JSON column. Returns SQL expression. */
|
|
16
|
+
jsonExtractString(column: string, path: string): string;
|
|
17
|
+
/** Aggregate strings from a JSON array. Returns SQL expression. */
|
|
18
|
+
jsonArrayStringAgg(column: string, jsonPath: string): string;
|
|
19
|
+
/** Information schema query to list user tables */
|
|
20
|
+
listTablesQuery: string;
|
|
21
|
+
/** CHECKPOINT or equivalent (for WAL compaction). null if not needed. */
|
|
22
|
+
checkpointSQL: string | null;
|
|
23
|
+
/** Current timestamp expression */
|
|
24
|
+
currentTimestamp: string;
|
|
25
|
+
/** ILIKE or equivalent for case-insensitive matching */
|
|
26
|
+
ilike: string;
|
|
27
|
+
/** Cast expression for safe timestamp parsing. DuckDB: TRY_CAST(x AS TIMESTAMP), SQLite: x */
|
|
28
|
+
tryCastTimestamp(expr: string): string;
|
|
29
|
+
/** COUNT(*)::INTEGER or equivalent */
|
|
30
|
+
countAsInteger: string;
|
|
31
|
+
/** GREATEST(...) or MAX(...) for multi-arg max */
|
|
32
|
+
greatest(exprs: string[]): string;
|
|
33
|
+
/** jaro_winkler_similarity or null if unsupported */
|
|
34
|
+
jaroWinklerSimilarity: string | null;
|
|
35
|
+
/** string_agg or group_concat */
|
|
36
|
+
stringAgg(column: string, separator: string): string;
|
|
37
|
+
/** CREATE SEQUENCE support */
|
|
38
|
+
supportsSequences: boolean;
|
|
39
|
+
/** SQL to get columns for a table. Returns rows with column_name/name and data_type/type. */
|
|
40
|
+
introspectColumnsQuery(tableName: string): string;
|
|
41
|
+
}
|
|
42
|
+
export declare const DUCKDB_DIALECT: SqlDialect;
|
|
43
|
+
export declare const SQLITE_DIALECT: SqlDialect;
|
|
44
|
+
export declare function getDialect(dialect: Dialect): SqlDialect;
|
|
45
|
+
//# sourceMappingURL=dialect.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"dialect.d.ts","sourceRoot":"","sources":["../../src/database/dialect.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,OAAO,EAAE,MAAM,YAAY,CAAA;AAEzC,MAAM,WAAW,UAAU;IACzB,mDAAmD;IACnD,OAAO,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAA;IAE/B,0BAA0B;IAC1B,aAAa,EAAE,MAAM,CAAA;IAErB,qBAAqB;IACrB,QAAQ,EAAE,MAAM,CAAA;IAEhB,gFAAgF;IAChF,KAAK,CAAC,KAAK,EAAE,MAAM,GAAG,MAAM,CAAA;IAE5B,mFAAmF;IACnF,gBAAgB,EAAE,OAAO,CAAA;IAEzB,sFAAsF;IACtF,YAAY,EAAE,MAAM,CAAA;IAEpB,yEAAyE;IACzE,iBAAiB,CAAC,MAAM,EAAE,MAAM,EAAE,IAAI,EAAE,MAAM,GAAG,MAAM,CAAA;IAEvD,mEAAmE;IACnE,kBAAkB,CAAC,MAAM,EAAE,MAAM,EAAE,QAAQ,EAAE,MAAM,GAAG,MAAM,CAAA;IAE5D,mDAAmD;IACnD,eAAe,EAAE,MAAM,CAAA;IAEvB,yEAAyE;IACzE,aAAa,EAAE,MAAM,GAAG,IAAI,CAAA;IAE5B,mCAAmC;IACnC,gBAAgB,EAAE,MAAM,CAAA;IAExB,wDAAwD;IACxD,KAAK,EAAE,MAAM,CAAA;IAEb,8FAA8F;IAC9F,gBAAgB,CAAC,IAAI,EAAE,MAAM,GAAG,MAAM,CAAA;IAEtC,sCAAsC;IACtC,cAAc,EAAE,MAAM,CAAA;IAEtB,kDAAkD;IAClD,QAAQ,CAAC,KAAK,EAAE,MAAM,EAAE,GAAG,MAAM,CAAA;IAEjC,qDAAqD;IACrD,qBAAqB,EAAE,MAAM,GAAG,IAAI,CAAA;IAEpC,iCAAiC;IACjC,SAAS,CAAC,MAAM,EAAE,MAAM,EAAE,SAAS,EAAE,MAAM,GAAG,MAAM,CAAA;IAEpD,8BAA8B;IAC9B,iBAAiB,EAAE,OAAO,CAAA;IAE1B,6FAA6F;IAC7F,sBAAsB,CAAC,SAAS,EAAE,MAAM,GAAG,MAAM,CAAA;CAClD;AAED,eAAO,MAAM,cAAc,EAAE,UA8B5B,CAAA;AAED,eAAO,MAAM,cAAc,EAAE,UA+B5B,CAAA;AAED,wBAAgB,UAAU,CAAC,OAAO,EAAE,OAAO,GAAG,UAAU,CASvD"}
|
|
@@ -0,0 +1,72 @@
|
|
|
1
|
+
export const DUCKDB_DIALECT = {
|
|
2
|
+
typeMap: {
|
|
3
|
+
text: 'TEXT',
|
|
4
|
+
integer: 'INTEGER',
|
|
5
|
+
bigint: 'BIGINT',
|
|
6
|
+
boolean: 'BOOLEAN',
|
|
7
|
+
blob: 'BLOB',
|
|
8
|
+
timestamp: 'TIMESTAMP',
|
|
9
|
+
timestamptz: 'TIMESTAMPTZ',
|
|
10
|
+
json: 'JSON',
|
|
11
|
+
},
|
|
12
|
+
timestampType: 'TIMESTAMP',
|
|
13
|
+
jsonType: 'JSON',
|
|
14
|
+
param: (i) => `$${i}`,
|
|
15
|
+
supportsAppender: true,
|
|
16
|
+
upsertPrefix: 'INSERT OR REPLACE INTO',
|
|
17
|
+
jsonExtractString: (col, path) => `json_extract_string(${col}, '${path}')`,
|
|
18
|
+
jsonArrayStringAgg: (col, path) => `list_string_agg(json_extract_string(${col}, '${path}'))`,
|
|
19
|
+
listTablesQuery: `SELECT table_name FROM information_schema.tables WHERE table_schema = 'main' AND table_name NOT LIKE '\\_%' ESCAPE '\\\\'`,
|
|
20
|
+
checkpointSQL: 'CHECKPOINT',
|
|
21
|
+
currentTimestamp: 'CURRENT_TIMESTAMP',
|
|
22
|
+
ilike: 'ILIKE',
|
|
23
|
+
tryCastTimestamp: (expr) => `TRY_CAST(${expr} AS TIMESTAMP)`,
|
|
24
|
+
countAsInteger: 'COUNT(*)::INTEGER',
|
|
25
|
+
greatest: (exprs) => `GREATEST(${exprs.join(', ')})`,
|
|
26
|
+
jaroWinklerSimilarity: 'jaro_winkler_similarity',
|
|
27
|
+
stringAgg: (col, sep) => `string_agg(${col}, ${sep})`,
|
|
28
|
+
supportsSequences: true,
|
|
29
|
+
introspectColumnsQuery: (tableName) => `SELECT column_name, data_type FROM information_schema.columns WHERE table_name = '${tableName}'`,
|
|
30
|
+
};
|
|
31
|
+
export const SQLITE_DIALECT = {
|
|
32
|
+
typeMap: {
|
|
33
|
+
text: 'TEXT',
|
|
34
|
+
integer: 'INTEGER',
|
|
35
|
+
bigint: 'INTEGER',
|
|
36
|
+
boolean: 'INTEGER',
|
|
37
|
+
blob: 'BLOB',
|
|
38
|
+
timestamp: 'TEXT',
|
|
39
|
+
timestamptz: 'TEXT',
|
|
40
|
+
json: 'TEXT',
|
|
41
|
+
},
|
|
42
|
+
timestampType: 'TEXT',
|
|
43
|
+
jsonType: 'TEXT',
|
|
44
|
+
param: (_i) => '?',
|
|
45
|
+
supportsAppender: false,
|
|
46
|
+
upsertPrefix: 'INSERT OR REPLACE INTO',
|
|
47
|
+
jsonExtractString: (col, path) => `json_extract(${col}, '${path}')`,
|
|
48
|
+
jsonArrayStringAgg: (col, path) => {
|
|
49
|
+
return `(SELECT group_concat(je.value, ' ') FROM json_each(${col}, '${path}') je)`;
|
|
50
|
+
},
|
|
51
|
+
listTablesQuery: `SELECT name AS table_name FROM sqlite_master WHERE type='table' AND name NOT LIKE '\\_%' ESCAPE '\\\\'`,
|
|
52
|
+
checkpointSQL: null,
|
|
53
|
+
currentTimestamp: 'CURRENT_TIMESTAMP',
|
|
54
|
+
ilike: 'LIKE',
|
|
55
|
+
tryCastTimestamp: (expr) => expr,
|
|
56
|
+
countAsInteger: 'CAST(COUNT(*) AS INTEGER)',
|
|
57
|
+
greatest: (exprs) => `MAX(${exprs.join(', ')})`,
|
|
58
|
+
jaroWinklerSimilarity: null,
|
|
59
|
+
stringAgg: (col, sep) => `group_concat(${col}, ${sep})`,
|
|
60
|
+
supportsSequences: false,
|
|
61
|
+
introspectColumnsQuery: (tableName) => `PRAGMA table_info("${tableName}")`,
|
|
62
|
+
};
|
|
63
|
+
export function getDialect(dialect) {
|
|
64
|
+
switch (dialect) {
|
|
65
|
+
case 'duckdb':
|
|
66
|
+
return DUCKDB_DIALECT;
|
|
67
|
+
case 'sqlite':
|
|
68
|
+
return SQLITE_DIALECT;
|
|
69
|
+
case 'postgres':
|
|
70
|
+
throw new Error('PostgreSQL adapter not yet implemented');
|
|
71
|
+
}
|
|
72
|
+
}
|
|
@@ -1,3 +1,7 @@
|
|
|
1
|
+
import type { SearchPort } from './ports.ts';
|
|
2
|
+
export declare function setSearchPort(port: SearchPort | null): void;
|
|
3
|
+
export declare function hasSearchPort(): boolean;
|
|
4
|
+
export declare function getSearchPort(): SearchPort | null;
|
|
1
5
|
export declare function getSearchColumns(collection: string): string[];
|
|
2
6
|
export declare function getLastRebuiltAt(collection: string): string | null;
|
|
3
7
|
/**
|
|
@@ -11,6 +15,9 @@ export declare function ftsTableName(collection: string): string;
|
|
|
11
15
|
* using Porter stemmer with English stopwords.
|
|
12
16
|
*/
|
|
13
17
|
export declare function buildFtsIndex(collection: string): Promise<void>;
|
|
18
|
+
export declare function buildFtsRow(collection: string, uri: string): Promise<Record<string, string | null> | null>;
|
|
19
|
+
export declare function updateFtsRecord(collection: string, uri: string): Promise<void>;
|
|
20
|
+
export declare function deleteFtsRecord(collection: string, uri: string): Promise<void>;
|
|
14
21
|
/**
|
|
15
22
|
* Strip English stop words from a search query, preserving non-stop-word terms.
|
|
16
23
|
* Returns the cleaned query string. If all words are stop words, returns the original query.
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"fts.d.ts","sourceRoot":"","sources":["../../src/database/fts.ts"],"names":[],"mappings":"AAGA,OAAO,KAAK,EAAE,UAAU,EAAE,MAAM,YAAY,CAAA;AAuE5C,wBAAgB,aAAa,CAAC,IAAI,EAAE,UAAU,GAAG,IAAI,GAAG,IAAI,CAE3D;AAED,wBAAgB,aAAa,IAAI,OAAO,CAEvC;AAED,wBAAgB,aAAa,IAAI,UAAU,GAAG,IAAI,CAEjD;AAWD,wBAAgB,gBAAgB,CAAC,UAAU,EAAE,MAAM,GAAG,MAAM,EAAE,CAE7D;AAED,wBAAgB,gBAAgB,CAAC,UAAU,EAAE,MAAM,GAAG,MAAM,GAAG,IAAI,CAElE;AAED;;;GAGG;AACH,wBAAgB,YAAY,CAAC,UAAU,EAAE,MAAM,GAAG,MAAM,CAEvD;AAgFD;;;;GAIG;AACH,wBAAsB,aAAa,CAAC,UAAU,EAAE,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC,CAmBrE;AAED,wBAAsB,WAAW,CAAC,UAAU,EAAE,MAAM,EAAE,GAAG,EAAE,MAAM,GAAG,OAAO,CAAC,MAAM,CAAC,MAAM,EAAE,MAAM,GAAG,IAAI,CAAC,GAAG,IAAI,CAAC,CAehH;AAED,wBAAsB,eAAe,CAAC,UAAU,EAAE,MAAM,EAAE,GAAG,EAAE,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC,CAepF;AAED,wBAAsB,eAAe,CAAC,UAAU,EAAE,MAAM,EAAE,GAAG,EAAE,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC,CAYpF;AAokBD;;;GAGG;AACH,wBAAgB,cAAc,CAAC,KAAK,EAAE,MAAM,GAAG,MAAM,CAIpD;AAED,wBAAsB,iBAAiB,CAAC,WAAW,EAAE,MAAM,EAAE,GAAG,OAAO,CAAC,IAAI,CAAC,CA4B5E"}
|
|
@@ -1,6 +1,6 @@
|
|
|
1
|
-
import { getSchema, runSQL } from "./db.js";
|
|
2
|
-
import { getLexicon } from "./schema.js";
|
|
3
|
-
import { emit, timer } from "
|
|
1
|
+
import { getSchema, runSQL, getSqlDialect, querySQL } from "./db.js";
|
|
2
|
+
import { getLexicon, q } from "./schema.js";
|
|
3
|
+
import { emit, timer } from "../logger.js";
|
|
4
4
|
/**
|
|
5
5
|
* Resolve a lexicon ref like "#artist" to its definition.
|
|
6
6
|
* Only handles local refs (same lexicon).
|
|
@@ -15,7 +15,7 @@ function resolveRef(ref, lexicon) {
|
|
|
15
15
|
* Given a JSON column and its lexicon property definition, produce
|
|
16
16
|
* search column expressions that extract searchable text.
|
|
17
17
|
*/
|
|
18
|
-
function jsonSearchColumns(colName, prop, lexicon) {
|
|
18
|
+
function jsonSearchColumns(colName, prop, lexicon, dialect) {
|
|
19
19
|
const columns = [];
|
|
20
20
|
// Strip table qualifier (e.g. "t.artists" → "artists") for use in aliases
|
|
21
21
|
const aliasBase = colName.includes('.') ? colName.split('.').pop() : colName;
|
|
@@ -26,7 +26,7 @@ function jsonSearchColumns(colName, prop, lexicon) {
|
|
|
26
26
|
if (itemDef.type === 'string') {
|
|
27
27
|
// array of strings — join into one text column
|
|
28
28
|
columns.push({
|
|
29
|
-
expr:
|
|
29
|
+
expr: dialect.jsonArrayStringAgg(colName, '$[*]'),
|
|
30
30
|
alias: `${aliasBase}_text`,
|
|
31
31
|
});
|
|
32
32
|
}
|
|
@@ -35,7 +35,7 @@ function jsonSearchColumns(colName, prop, lexicon) {
|
|
|
35
35
|
for (const [field, fieldProp] of Object.entries(itemDef.properties)) {
|
|
36
36
|
if (fieldProp.type === 'string') {
|
|
37
37
|
columns.push({
|
|
38
|
-
expr:
|
|
38
|
+
expr: dialect.jsonArrayStringAgg(colName, `$[*].${field}`),
|
|
39
39
|
alias: `${aliasBase}_${field}`,
|
|
40
40
|
});
|
|
41
41
|
}
|
|
@@ -47,7 +47,7 @@ function jsonSearchColumns(colName, prop, lexicon) {
|
|
|
47
47
|
for (const [field, fieldProp] of Object.entries(prop.properties)) {
|
|
48
48
|
if (fieldProp.type === 'string') {
|
|
49
49
|
columns.push({
|
|
50
|
-
expr:
|
|
50
|
+
expr: dialect.jsonExtractString(colName, `$.${field}`),
|
|
51
51
|
alias: `${aliasBase}_${field}`,
|
|
52
52
|
});
|
|
53
53
|
}
|
|
@@ -56,10 +56,22 @@ function jsonSearchColumns(colName, prop, lexicon) {
|
|
|
56
56
|
// blob, union, unknown — skip (no useful text to extract)
|
|
57
57
|
return columns;
|
|
58
58
|
}
|
|
59
|
+
let searchPort = null;
|
|
60
|
+
export function setSearchPort(port) {
|
|
61
|
+
searchPort = port;
|
|
62
|
+
}
|
|
63
|
+
export function hasSearchPort() {
|
|
64
|
+
return searchPort !== null;
|
|
65
|
+
}
|
|
66
|
+
export function getSearchPort() {
|
|
67
|
+
return searchPort;
|
|
68
|
+
}
|
|
59
69
|
// Tracks when each collection's FTS index was last rebuilt
|
|
60
70
|
const lastRebuiltAt = new Map();
|
|
61
71
|
// Cache of search column metadata per collection, populated during buildFtsIndex
|
|
62
72
|
const searchColumnCache = new Map();
|
|
73
|
+
// Cache of computed FTS schemas per collection (deterministic, so compute once)
|
|
74
|
+
const ftsSchemaCache = new Map();
|
|
63
75
|
export function getSearchColumns(collection) {
|
|
64
76
|
return searchColumnCache.get(collection) || [];
|
|
65
77
|
}
|
|
@@ -74,30 +86,32 @@ export function ftsTableName(collection) {
|
|
|
74
86
|
return '_fts_' + collection.replace(/\./g, '_');
|
|
75
87
|
}
|
|
76
88
|
/**
|
|
77
|
-
*
|
|
78
|
-
* Creates a shadow table copy and indexes all TEXT NOT NULL columns
|
|
79
|
-
* using Porter stemmer with English stopwords.
|
|
89
|
+
* Compute the FTS schema for a collection: search column names, source query, and safe table name.
|
|
80
90
|
*/
|
|
81
|
-
|
|
91
|
+
function computeFtsSchema(collection) {
|
|
92
|
+
const cached = ftsSchemaCache.get(collection);
|
|
93
|
+
if (cached)
|
|
94
|
+
return cached;
|
|
82
95
|
const schema = getSchema(collection);
|
|
83
96
|
if (!schema)
|
|
84
97
|
throw new Error(`Unknown collection: ${collection}`);
|
|
85
98
|
const lexicon = getLexicon(collection);
|
|
86
99
|
const record = lexicon?.defs?.main?.record;
|
|
87
100
|
// Build column list for shadow table
|
|
101
|
+
const dialect = getSqlDialect();
|
|
88
102
|
const selectExprs = ['t.uri', 't.cid', 't.did', 't.indexed_at'];
|
|
89
103
|
const searchColNames = [];
|
|
90
104
|
for (const col of schema.columns) {
|
|
91
|
-
if (col.
|
|
92
|
-
selectExprs.push(`t.${col.name}`);
|
|
105
|
+
if (col.sqlType === 'TEXT') {
|
|
106
|
+
selectExprs.push(`t.${q(col.name)}`);
|
|
93
107
|
searchColNames.push(col.name);
|
|
94
108
|
}
|
|
95
|
-
else if (col.
|
|
109
|
+
else if (col.isJson && record?.properties) {
|
|
96
110
|
const prop = record.properties[col.originalName];
|
|
97
111
|
if (prop?.type === 'blob')
|
|
98
112
|
continue; // skip blobs
|
|
99
113
|
if (prop && lexicon) {
|
|
100
|
-
const derived = jsonSearchColumns(`t.${col.name}`, prop, lexicon);
|
|
114
|
+
const derived = jsonSearchColumns(`t.${q(col.name)}`, prop, lexicon, dialect);
|
|
101
115
|
if (derived.length > 0) {
|
|
102
116
|
for (const d of derived) {
|
|
103
117
|
selectExprs.push(`${d.expr} AS ${d.alias}`);
|
|
@@ -107,16 +121,17 @@ export async function buildFtsIndex(collection) {
|
|
|
107
121
|
}
|
|
108
122
|
}
|
|
109
123
|
// Fallback: cast JSON to TEXT
|
|
110
|
-
selectExprs.push(`CAST(t.${col.name} AS TEXT) AS ${col.name}`);
|
|
124
|
+
selectExprs.push(`CAST(t.${q(col.name)} AS TEXT) AS ${q(col.name)}`);
|
|
111
125
|
searchColNames.push(col.name);
|
|
112
126
|
}
|
|
113
127
|
}
|
|
114
128
|
// Include searchable text from child tables (decomposed array fields)
|
|
115
129
|
for (const child of schema.children) {
|
|
116
130
|
for (const col of child.columns) {
|
|
117
|
-
if (col.
|
|
131
|
+
if (col.sqlType === 'TEXT') {
|
|
118
132
|
const alias = `${child.fieldName}_${col.name}`;
|
|
119
|
-
|
|
133
|
+
const agg = dialect.stringAgg(`c.${q(col.name)}`, "' '");
|
|
134
|
+
selectExprs.push(`(SELECT ${agg} FROM ${child.tableName} c WHERE c.parent_uri = t.uri) AS ${alias}`);
|
|
120
135
|
searchColNames.push(alias);
|
|
121
136
|
}
|
|
122
137
|
}
|
|
@@ -125,9 +140,10 @@ export async function buildFtsIndex(collection) {
|
|
|
125
140
|
for (const union of schema.unions) {
|
|
126
141
|
for (const branch of union.branches) {
|
|
127
142
|
for (const col of branch.columns) {
|
|
128
|
-
if (col.
|
|
143
|
+
if (col.sqlType === 'TEXT') {
|
|
129
144
|
const alias = `${union.fieldName}_${branch.branchName}_${col.name}`;
|
|
130
|
-
|
|
145
|
+
const agg = dialect.stringAgg(`c.${q(col.name)}`, "' '");
|
|
146
|
+
selectExprs.push(`(SELECT ${agg} FROM ${branch.tableName} c WHERE c.parent_uri = t.uri) AS ${alias}`);
|
|
131
147
|
searchColNames.push(alias);
|
|
132
148
|
}
|
|
133
149
|
}
|
|
@@ -136,23 +152,83 @@ export async function buildFtsIndex(collection) {
|
|
|
136
152
|
// Include handle from _repos for people search
|
|
137
153
|
selectExprs.push('r.handle');
|
|
138
154
|
searchColNames.push('handle');
|
|
139
|
-
if (searchColNames.length === 0) {
|
|
140
|
-
return;
|
|
141
|
-
}
|
|
142
155
|
const safeName = ftsTableName(collection);
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
|
|
156
|
+
const sourceQuery = `SELECT ${selectExprs.join(', ')} FROM ${schema.tableName} t LEFT JOIN _repos r ON t.did = r.did`;
|
|
157
|
+
const result = { searchColNames, sourceQuery, safeName };
|
|
158
|
+
ftsSchemaCache.set(collection, result);
|
|
159
|
+
return result;
|
|
160
|
+
}
|
|
161
|
+
/**
|
|
162
|
+
* Build FTS index for a collection.
|
|
163
|
+
* Creates a shadow table copy and indexes all TEXT NOT NULL columns
|
|
164
|
+
* using Porter stemmer with English stopwords.
|
|
165
|
+
*/
|
|
166
|
+
export async function buildFtsIndex(collection) {
|
|
167
|
+
if (!searchPort)
|
|
168
|
+
return; // No FTS support for this adapter
|
|
169
|
+
const { searchColNames, sourceQuery, safeName } = computeFtsSchema(collection);
|
|
170
|
+
if (searchColNames.length === 0)
|
|
171
|
+
return;
|
|
172
|
+
// For incremental ports: skip rebuild if index already exists
|
|
173
|
+
if (searchPort.indexExists) {
|
|
174
|
+
const exists = await searchPort.indexExists(safeName);
|
|
175
|
+
if (exists) {
|
|
176
|
+
searchColumnCache.set(collection, searchColNames);
|
|
177
|
+
lastRebuiltAt.set(collection, new Date().toISOString());
|
|
178
|
+
return;
|
|
179
|
+
}
|
|
148
180
|
}
|
|
149
|
-
|
|
150
|
-
// Build FTS index over all search columns
|
|
151
|
-
const colList = searchColNames.map((c) => `'${c}'`).join(', ');
|
|
152
|
-
await runSQL(`PRAGMA create_fts_index('${safeName}', 'uri', ${colList}, stemmer='porter', stopwords='english', strip_accents=1, lower=1, overwrite=1)`);
|
|
181
|
+
await searchPort.buildIndex(safeName, sourceQuery, searchColNames);
|
|
153
182
|
searchColumnCache.set(collection, searchColNames);
|
|
154
183
|
lastRebuiltAt.set(collection, new Date().toISOString());
|
|
155
184
|
}
|
|
185
|
+
export async function buildFtsRow(collection, uri) {
|
|
186
|
+
const { searchColNames, sourceQuery } = computeFtsSchema(collection);
|
|
187
|
+
if (searchColNames.length === 0)
|
|
188
|
+
return null;
|
|
189
|
+
// Append WHERE clause to filter for single record
|
|
190
|
+
const sql = sourceQuery + ' WHERE t.uri = $1';
|
|
191
|
+
const rows = await querySQL(sql, [uri]);
|
|
192
|
+
if (!rows || rows.length === 0)
|
|
193
|
+
return null;
|
|
194
|
+
const row = rows[0];
|
|
195
|
+
const result = {};
|
|
196
|
+
for (const col of searchColNames) {
|
|
197
|
+
result[col] = row[col] != null ? String(row[col]) : null;
|
|
198
|
+
}
|
|
199
|
+
return result;
|
|
200
|
+
}
|
|
201
|
+
export async function updateFtsRecord(collection, uri) {
|
|
202
|
+
if (!searchPort || !searchPort.updateIndex)
|
|
203
|
+
return;
|
|
204
|
+
const searchCols = searchColumnCache.get(collection);
|
|
205
|
+
if (!searchCols || searchCols.length === 0)
|
|
206
|
+
return;
|
|
207
|
+
try {
|
|
208
|
+
const row = await buildFtsRow(collection, uri);
|
|
209
|
+
if (!row)
|
|
210
|
+
return;
|
|
211
|
+
const safeName = ftsTableName(collection);
|
|
212
|
+
await searchPort.updateIndex(safeName, uri, row, searchCols);
|
|
213
|
+
}
|
|
214
|
+
catch (err) {
|
|
215
|
+
emit('fts', 'update_error', { collection, uri, error: err.message });
|
|
216
|
+
}
|
|
217
|
+
}
|
|
218
|
+
export async function deleteFtsRecord(collection, uri) {
|
|
219
|
+
if (!searchPort || !searchPort.deleteFromIndex)
|
|
220
|
+
return;
|
|
221
|
+
const searchCols = searchColumnCache.get(collection);
|
|
222
|
+
if (!searchCols || searchCols.length === 0)
|
|
223
|
+
return;
|
|
224
|
+
try {
|
|
225
|
+
const safeName = ftsTableName(collection);
|
|
226
|
+
await searchPort.deleteFromIndex(safeName, uri, searchCols);
|
|
227
|
+
}
|
|
228
|
+
catch (err) {
|
|
229
|
+
emit('fts', 'delete_error', { collection, uri, error: err.message });
|
|
230
|
+
}
|
|
231
|
+
}
|
|
156
232
|
/**
|
|
157
233
|
* Rebuild FTS indexes for all registered collections.
|
|
158
234
|
*/
|
|
@@ -752,6 +828,14 @@ export async function rebuildAllIndexes(collections) {
|
|
|
752
828
|
errors.push(`${collection}: ${err.message}`);
|
|
753
829
|
}
|
|
754
830
|
}
|
|
831
|
+
// Compact WAL to free memory after heavy FTS operations (DuckDB only)
|
|
832
|
+
try {
|
|
833
|
+
const { getSqlDialect } = await import("./db.js");
|
|
834
|
+
const d = getSqlDialect();
|
|
835
|
+
if (d.checkpointSQL)
|
|
836
|
+
await runSQL(d.checkpointSQL);
|
|
837
|
+
}
|
|
838
|
+
catch { }
|
|
755
839
|
emit('fts', 'rebuild', {
|
|
756
840
|
collections_total: collections.length,
|
|
757
841
|
collections_rebuilt: rebuilt,
|
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
export type { DatabasePort, BulkInserter, SearchPort, Dialect } from './ports.ts';
|
|
2
|
+
export type { SqlDialect } from './dialect.ts';
|
|
3
|
+
export { getDialect, DUCKDB_DIALECT, SQLITE_DIALECT } from './dialect.ts';
|
|
4
|
+
export { createAdapter } from './adapter-factory.ts';
|
|
5
|
+
export { initDatabase, closeDatabase, querySQL, runSQL, insertRecord, deleteRecord, queryRecords, searchRecords, getRecordByUri, getCursor, setCursor, bulkInsertRecords, packCursor, unpackCursor, } from './db.ts';
|
|
6
|
+
export { type TableSchema, type ColumnDef, type ChildTableSchema, loadLexicons, discoverCollections, buildSchemas, generateTableSchema, generateCreateTableSQL, toSnakeCase, getLexicon, getLexiconArray, getAllLexicons, storeLexicons, } from './schema.ts';
|
|
7
|
+
//# sourceMappingURL=index.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/database/index.ts"],"names":[],"mappings":"AAAA,YAAY,EAAE,YAAY,EAAE,YAAY,EAAE,UAAU,EAAE,OAAO,EAAE,MAAM,YAAY,CAAA;AACjF,YAAY,EAAE,UAAU,EAAE,MAAM,cAAc,CAAA;AAC9C,OAAO,EAAE,UAAU,EAAE,cAAc,EAAE,cAAc,EAAE,MAAM,cAAc,CAAA;AACzE,OAAO,EAAE,aAAa,EAAE,MAAM,sBAAsB,CAAA;AAGpD,OAAO,EACL,YAAY,EACZ,aAAa,EACb,QAAQ,EACR,MAAM,EACN,YAAY,EACZ,YAAY,EACZ,YAAY,EACZ,aAAa,EACb,cAAc,EACd,SAAS,EACT,SAAS,EACT,iBAAiB,EACjB,UAAU,EACV,YAAY,GACb,MAAM,SAAS,CAAA;AAGhB,OAAO,EACL,KAAK,WAAW,EAChB,KAAK,SAAS,EACd,KAAK,gBAAgB,EACrB,YAAY,EACZ,mBAAmB,EACnB,YAAY,EACZ,mBAAmB,EACnB,sBAAsB,EACtB,WAAW,EACX,UAAU,EACV,eAAe,EACf,cAAc,EACd,aAAa,GACd,MAAM,aAAa,CAAA"}
|
|
@@ -0,0 +1,6 @@
|
|
|
1
|
+
export { getDialect, DUCKDB_DIALECT, SQLITE_DIALECT } from "./dialect.js";
|
|
2
|
+
export { createAdapter } from "./adapter-factory.js";
|
|
3
|
+
// Re-export commonly used functions from db.ts
|
|
4
|
+
export { initDatabase, closeDatabase, querySQL, runSQL, insertRecord, deleteRecord, queryRecords, searchRecords, getRecordByUri, getCursor, setCursor, bulkInsertRecords, packCursor, unpackCursor, } from "./db.js";
|
|
5
|
+
// Re-export schema utilities
|
|
6
|
+
export { loadLexicons, discoverCollections, buildSchemas, generateTableSchema, generateCreateTableSQL, toSnakeCase, getLexicon, getLexiconArray, getAllLexicons, storeLexicons, } from "./schema.js";
|
|
@@ -0,0 +1,50 @@
|
|
|
1
|
+
export type Dialect = 'duckdb' | 'sqlite' | 'postgres';
|
|
2
|
+
export interface DatabasePort {
|
|
3
|
+
/** Dialect identifier for SQL generation differences */
|
|
4
|
+
dialect: Dialect;
|
|
5
|
+
/** Open a database connection. path is file path or ':memory:' */
|
|
6
|
+
open(path: string): Promise<void>;
|
|
7
|
+
/** Close all connections and release resources */
|
|
8
|
+
close(): void;
|
|
9
|
+
/** Execute a read query, return rows as plain objects */
|
|
10
|
+
query<T = Record<string, unknown>>(sql: string, params?: unknown[]): Promise<T[]>;
|
|
11
|
+
/** Execute a write statement (INSERT, UPDATE, DELETE, DDL) */
|
|
12
|
+
execute(sql: string, params?: unknown[]): Promise<void>;
|
|
13
|
+
/** Execute multiple statements in sequence (for DDL batches) */
|
|
14
|
+
executeMultiple(sql: string): Promise<void>;
|
|
15
|
+
/** Begin a transaction */
|
|
16
|
+
beginTransaction(): Promise<void>;
|
|
17
|
+
/** Commit the current transaction */
|
|
18
|
+
commit(): Promise<void>;
|
|
19
|
+
/** Rollback the current transaction */
|
|
20
|
+
rollback(): Promise<void>;
|
|
21
|
+
/** Create a bulk inserter for high-throughput writes */
|
|
22
|
+
createBulkInserter(table: string, columns: string[], options?: {
|
|
23
|
+
onConflict?: 'ignore' | 'replace';
|
|
24
|
+
batchSize?: number;
|
|
25
|
+
}): Promise<BulkInserter>;
|
|
26
|
+
}
|
|
27
|
+
export interface BulkInserter {
|
|
28
|
+
/** Append a single row of values */
|
|
29
|
+
append(values: unknown[]): void;
|
|
30
|
+
/** Flush buffered rows to the database */
|
|
31
|
+
flush(): Promise<void>;
|
|
32
|
+
/** Close the inserter and release resources */
|
|
33
|
+
close(): Promise<void>;
|
|
34
|
+
}
|
|
35
|
+
export interface SearchPort {
|
|
36
|
+
/** Build/rebuild an FTS index for a table */
|
|
37
|
+
buildIndex(shadowTable: string, sourceQuery: string, searchColumns: string[]): Promise<void>;
|
|
38
|
+
/** Incrementally update a single record in the FTS index */
|
|
39
|
+
updateIndex?(shadowTable: string, uri: string, row: Record<string, string | null>, searchColumns: string[]): Promise<void>;
|
|
40
|
+
/** Remove a single record from the FTS index */
|
|
41
|
+
deleteFromIndex?(shadowTable: string, uri: string, searchColumns: string[]): Promise<void>;
|
|
42
|
+
/** Check if the FTS index already exists (for skipping rebuild on startup) */
|
|
43
|
+
indexExists?(shadowTable: string): Promise<boolean>;
|
|
44
|
+
/** Search a table, returning URIs with scores */
|
|
45
|
+
search(shadowTable: string, query: string, searchColumns: string[], limit: number, offset: number): Promise<Array<{
|
|
46
|
+
uri: string;
|
|
47
|
+
score: number;
|
|
48
|
+
}>>;
|
|
49
|
+
}
|
|
50
|
+
//# sourceMappingURL=ports.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"ports.d.ts","sourceRoot":"","sources":["../../src/database/ports.ts"],"names":[],"mappings":"AAAA,MAAM,MAAM,OAAO,GAAG,QAAQ,GAAG,QAAQ,GAAG,UAAU,CAAA;AAEtD,MAAM,WAAW,YAAY;IAC3B,wDAAwD;IACxD,OAAO,EAAE,OAAO,CAAA;IAEhB,kEAAkE;IAClE,IAAI,CAAC,IAAI,EAAE,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC,CAAA;IAEjC,kDAAkD;IAClD,KAAK,IAAI,IAAI,CAAA;IAEb,yDAAyD;IACzD,KAAK,CAAC,CAAC,GAAG,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,EAAE,GAAG,EAAE,MAAM,EAAE,MAAM,CAAC,EAAE,OAAO,EAAE,GAAG,OAAO,CAAC,CAAC,EAAE,CAAC,CAAA;IAEjF,8DAA8D;IAC9D,OAAO,CAAC,GAAG,EAAE,MAAM,EAAE,MAAM,CAAC,EAAE,OAAO,EAAE,GAAG,OAAO,CAAC,IAAI,CAAC,CAAA;IAEvD,gEAAgE;IAChE,eAAe,CAAC,GAAG,EAAE,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC,CAAA;IAE3C,0BAA0B;IAC1B,gBAAgB,IAAI,OAAO,CAAC,IAAI,CAAC,CAAA;IAEjC,qCAAqC;IACrC,MAAM,IAAI,OAAO,CAAC,IAAI,CAAC,CAAA;IAEvB,uCAAuC;IACvC,QAAQ,IAAI,OAAO,CAAC,IAAI,CAAC,CAAA;IAEzB,wDAAwD;IACxD,kBAAkB,CAChB,KAAK,EAAE,MAAM,EACb,OAAO,EAAE,MAAM,EAAE,EACjB,OAAO,CAAC,EAAE;QAAE,UAAU,CAAC,EAAE,QAAQ,GAAG,SAAS,CAAC;QAAC,SAAS,CAAC,EAAE,MAAM,CAAA;KAAE,GAClE,OAAO,CAAC,YAAY,CAAC,CAAA;CACzB;AAED,MAAM,WAAW,YAAY;IAC3B,oCAAoC;IACpC,MAAM,CAAC,MAAM,EAAE,OAAO,EAAE,GAAG,IAAI,CAAA;IAE/B,0CAA0C;IAC1C,KAAK,IAAI,OAAO,CAAC,IAAI,CAAC,CAAA;IAEtB,+CAA+C;IAC/C,KAAK,IAAI,OAAO,CAAC,IAAI,CAAC,CAAA;CACvB;AAED,MAAM,WAAW,UAAU;IACzB,6CAA6C;IAC7C,UAAU,CAAC,WAAW,EAAE,MAAM,EAAE,WAAW,EAAE,MAAM,EAAE,aAAa,EAAE,MAAM,EAAE,GAAG,OAAO,CAAC,IAAI,CAAC,CAAA;IAE5F,4DAA4D;IAC5D,WAAW,CAAC,CACV,WAAW,EAAE,MAAM,EACnB,GAAG,EAAE,MAAM,EACX,GAAG,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,GAAG,IAAI,CAAC,EAClC,aAAa,EAAE,MAAM,EAAE,GACtB,OAAO,CAAC,IAAI,CAAC,CAAA;IAEhB,gDAAgD;IAChD,eAAe,CAAC,CAAC,WAAW,EAAE,MAAM,EAAE,GAAG,EAAE,MAAM,EAAE,aAAa,EAAE,MAAM,EAAE,GAAG,OAAO,CAAC,IAAI,CAAC,CAAA;IAE1F,8EAA8E;IAC9E,WAAW,CAAC,CAAC,WAAW,EAAE,MAAM,GAAG,OAAO,CAAC,OAAO,CAAC,CAAA;IAEnD,iDAAiD;IACjD,MAAM,CACJ,WAAW,EAAE,MAAM,EACnB,KAAK,EAAE,MAAM,EACb,aAAa,EAAE,MAAM,EAAE,EACvB,KAAK,EAAE,MAAM,EACb,MAAM,EAAE,MAAM,GACb,OAAO,CAAC,KAAK,CAAC;QAAE,GAAG,EAAE,MAAM,CAAC;QAAC,KAAK,EAAE,MAAM,CAAA;KAAE,CAAC,CAAC,CAAA;CAClD"}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export {};
|
|
@@ -1,9 +1,11 @@
|
|
|
1
|
+
import type { SqlDialect } from './dialect.ts';
|
|
1
2
|
export interface ColumnDef {
|
|
2
3
|
name: string;
|
|
3
4
|
originalName: string;
|
|
4
|
-
|
|
5
|
+
sqlType: string;
|
|
5
6
|
notNull: boolean;
|
|
6
7
|
isRef: boolean;
|
|
8
|
+
isJson: boolean;
|
|
7
9
|
}
|
|
8
10
|
export interface UnionBranchSchema {
|
|
9
11
|
type: string;
|
|
@@ -33,6 +35,7 @@ export interface ChildTableSchema {
|
|
|
33
35
|
columns: ColumnDef[];
|
|
34
36
|
}
|
|
35
37
|
export declare function toSnakeCase(str: string): string;
|
|
38
|
+
export declare function q(name: string): string;
|
|
36
39
|
export declare function loadLexicons(lexiconsDir: string): Map<string, any>;
|
|
37
40
|
/**
|
|
38
41
|
* Discover collections by scanning lexicons for record-type definitions.
|
|
@@ -46,6 +49,14 @@ export declare function getAllLexicons(): Array<{
|
|
|
46
49
|
}>;
|
|
47
50
|
/** Get all stored lexicons as a flat array (for @bigmoves/lexicon validators) */
|
|
48
51
|
export declare function getLexiconArray(): any[];
|
|
49
|
-
export declare function generateTableSchema(nsid: string, lexicon: any, lexicons?: Map<string, any
|
|
50
|
-
export declare function generateCreateTableSQL(schema: TableSchema): string;
|
|
52
|
+
export declare function generateTableSchema(nsid: string, lexicon: any, lexicons?: Map<string, any>, dialect?: SqlDialect): TableSchema;
|
|
53
|
+
export declare function generateCreateTableSQL(schema: TableSchema, dialect?: SqlDialect): string;
|
|
54
|
+
/**
|
|
55
|
+
* Build table schemas and DDL from lexicons and collections.
|
|
56
|
+
* Shared by main.ts (server boot) and cli.ts (hatk schema command).
|
|
57
|
+
*/
|
|
58
|
+
export declare function buildSchemas(lexicons: Map<string, any>, collections: string[], dialect?: SqlDialect): {
|
|
59
|
+
schemas: TableSchema[];
|
|
60
|
+
ddlStatements: string[];
|
|
61
|
+
};
|
|
51
62
|
//# sourceMappingURL=schema.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"schema.d.ts","sourceRoot":"","sources":["../../src/database/schema.ts"],"names":[],"mappings":"AAEA,OAAO,KAAK,EAAE,UAAU,EAAE,MAAM,cAAc,CAAA;AAG9C,MAAM,WAAW,SAAS;IACxB,IAAI,EAAE,MAAM,CAAA;IACZ,YAAY,EAAE,MAAM,CAAA;IACpB,OAAO,EAAE,MAAM,CAAA;IACf,OAAO,EAAE,OAAO,CAAA;IAChB,KAAK,EAAE,OAAO,CAAA;IACd,MAAM,EAAE,OAAO,CAAA;CAChB;AAED,MAAM,WAAW,iBAAiB;IAChC,IAAI,EAAE,MAAM,CAAA;IACZ,UAAU,EAAE,MAAM,CAAA;IAClB,SAAS,EAAE,MAAM,CAAA;IACjB,OAAO,EAAE,SAAS,EAAE,CAAA;IACpB,OAAO,EAAE,OAAO,CAAA;IAChB,UAAU,CAAC,EAAE,MAAM,CAAA;IACnB,YAAY,CAAC,EAAE,MAAM,CAAA;CACtB;AAED,MAAM,WAAW,gBAAgB;IAC/B,SAAS,EAAE,MAAM,CAAA;IACjB,QAAQ,EAAE,iBAAiB,EAAE,CAAA;CAC9B;AAED,MAAM,WAAW,WAAW;IAC1B,UAAU,EAAE,MAAM,CAAA;IAClB,SAAS,EAAE,MAAM,CAAA;IACjB,OAAO,EAAE,SAAS,EAAE,CAAA;IACpB,UAAU,EAAE,MAAM,EAAE,CAAA;IACpB,QAAQ,EAAE,gBAAgB,EAAE,CAAA;IAC5B,MAAM,EAAE,gBAAgB,EAAE,CAAA;CAC3B;AAED,MAAM,WAAW,gBAAgB;IAC/B,gBAAgB,EAAE,MAAM,CAAA;IACxB,SAAS,EAAE,MAAM,CAAA;IACjB,SAAS,EAAE,MAAM,CAAA;IACjB,OAAO,EAAE,SAAS,EAAE,CAAA;CACrB;AAGD,wBAAgB,WAAW,CAAC,GAAG,EAAE,MAAM,GAAG,MAAM,CAE/C;AAGD,wBAAgB,CAAC,CAAC,IAAI,EAAE,MAAM,GAAG,MAAM,CAEtC;AA+CD,wBAAgB,YAAY,CAAC,WAAW,EAAE,MAAM,GAAG,GAAG,CAAC,MAAM,EAAE,GAAG,CAAC,CASlE;AAED;;GAEG;AACH,wBAAgB,mBAAmB,CAAC,QAAQ,EAAE,GAAG,CAAC,MAAM,EAAE,GAAG,CAAC,GAAG,MAAM,EAAE,CASxE;AAID,wBAAgB,aAAa,CAAC,QAAQ,EAAE,GAAG,CAAC,MAAM,EAAE,GAAG,CAAC,GAAG,IAAI,CAI9D;AAED,wBAAgB,UAAU,CAAC,IAAI,EAAE,MAAM,GAAG,GAAG,GAAG,SAAS,CAExD;AAED,wBAAgB,cAAc,IAAI,KAAK,CAAC;IAAE,IAAI,EAAE,MAAM,CAAC;IAAC,OAAO,EAAE,GAAG,CAAA;CAAE,CAAC,CAEtE;AAED,iFAAiF;AACjF,wBAAgB,eAAe,IAAI,GAAG,EAAE,CAEvC;AAyHD,wBAAgB,mBAAmB,CACjC,IAAI,EAAE,MAAM,EACZ,OAAO,EAAE,GAAG,EACZ,QAAQ,CAAC,EAAE,GAAG,CAAC,MAAM,EAAE,GAAG,CAAC,EAC3B,OAAO,GAAE,UAA2B,GACnC,WAAW,CA+Gb;AAGD,wBAAgB,sBAAsB,CAAC,MAAM,EAAE,WAAW,EAAE,OAAO,GAAE,UAA2B,GAAG,MAAM,CAoExG;AAED;;;GAGG;AACH,wBAAgB,YAAY,CAC1B,QAAQ,EAAE,GAAG,CAAC,MAAM,EAAE,GAAG,CAAC,EAC1B,WAAW,EAAE,MAAM,EAAE,EACrB,OAAO,GAAE,UAA2B,GACnC;IAAE,OAAO,EAAE,WAAW,EAAE,CAAC;IAAC,aAAa,EAAE,MAAM,EAAE,CAAA;CAAE,CA2BrD"}
|