@hatk/hatk 0.0.1-alpha.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/backfill.d.ts +11 -0
- package/dist/backfill.d.ts.map +1 -0
- package/dist/backfill.js +328 -0
- package/dist/car.d.ts +5 -0
- package/dist/car.d.ts.map +1 -0
- package/dist/car.js +52 -0
- package/dist/cbor.d.ts +7 -0
- package/dist/cbor.d.ts.map +1 -0
- package/dist/cbor.js +89 -0
- package/dist/cid.d.ts +4 -0
- package/dist/cid.d.ts.map +1 -0
- package/dist/cid.js +39 -0
- package/dist/cli.d.ts +3 -0
- package/dist/cli.d.ts.map +1 -0
- package/dist/cli.js +1663 -0
- package/dist/config.d.ts +47 -0
- package/dist/config.d.ts.map +1 -0
- package/dist/config.js +43 -0
- package/dist/db.d.ts +134 -0
- package/dist/db.d.ts.map +1 -0
- package/dist/db.js +1361 -0
- package/dist/feeds.d.ts +95 -0
- package/dist/feeds.d.ts.map +1 -0
- package/dist/feeds.js +144 -0
- package/dist/fts.d.ts +20 -0
- package/dist/fts.d.ts.map +1 -0
- package/dist/fts.js +762 -0
- package/dist/hydrate.d.ts +23 -0
- package/dist/hydrate.d.ts.map +1 -0
- package/dist/hydrate.js +75 -0
- package/dist/indexer.d.ts +14 -0
- package/dist/indexer.d.ts.map +1 -0
- package/dist/indexer.js +316 -0
- package/dist/labels.d.ts +29 -0
- package/dist/labels.d.ts.map +1 -0
- package/dist/labels.js +111 -0
- package/dist/lex-types.d.ts +401 -0
- package/dist/lex-types.d.ts.map +1 -0
- package/dist/lex-types.js +4 -0
- package/dist/lexicon-resolve.d.ts +14 -0
- package/dist/lexicon-resolve.d.ts.map +1 -0
- package/dist/lexicon-resolve.js +280 -0
- package/dist/logger.d.ts +4 -0
- package/dist/logger.d.ts.map +1 -0
- package/dist/logger.js +23 -0
- package/dist/main.d.ts +3 -0
- package/dist/main.d.ts.map +1 -0
- package/dist/main.js +148 -0
- package/dist/mst.d.ts +6 -0
- package/dist/mst.d.ts.map +1 -0
- package/dist/mst.js +30 -0
- package/dist/oauth/client.d.ts +16 -0
- package/dist/oauth/client.d.ts.map +1 -0
- package/dist/oauth/client.js +54 -0
- package/dist/oauth/crypto.d.ts +28 -0
- package/dist/oauth/crypto.d.ts.map +1 -0
- package/dist/oauth/crypto.js +101 -0
- package/dist/oauth/db.d.ts +47 -0
- package/dist/oauth/db.d.ts.map +1 -0
- package/dist/oauth/db.js +139 -0
- package/dist/oauth/discovery.d.ts +22 -0
- package/dist/oauth/discovery.d.ts.map +1 -0
- package/dist/oauth/discovery.js +50 -0
- package/dist/oauth/dpop.d.ts +11 -0
- package/dist/oauth/dpop.d.ts.map +1 -0
- package/dist/oauth/dpop.js +56 -0
- package/dist/oauth/hooks.d.ts +10 -0
- package/dist/oauth/hooks.d.ts.map +1 -0
- package/dist/oauth/hooks.js +40 -0
- package/dist/oauth/server.d.ts +86 -0
- package/dist/oauth/server.d.ts.map +1 -0
- package/dist/oauth/server.js +572 -0
- package/dist/opengraph.d.ts +34 -0
- package/dist/opengraph.d.ts.map +1 -0
- package/dist/opengraph.js +198 -0
- package/dist/schema.d.ts +51 -0
- package/dist/schema.d.ts.map +1 -0
- package/dist/schema.js +358 -0
- package/dist/seed.d.ts +29 -0
- package/dist/seed.d.ts.map +1 -0
- package/dist/seed.js +86 -0
- package/dist/server.d.ts +6 -0
- package/dist/server.d.ts.map +1 -0
- package/dist/server.js +1024 -0
- package/dist/setup.d.ts +8 -0
- package/dist/setup.d.ts.map +1 -0
- package/dist/setup.js +48 -0
- package/dist/test-browser.d.ts +14 -0
- package/dist/test-browser.d.ts.map +1 -0
- package/dist/test-browser.js +26 -0
- package/dist/test.d.ts +47 -0
- package/dist/test.d.ts.map +1 -0
- package/dist/test.js +256 -0
- package/dist/views.d.ts +40 -0
- package/dist/views.d.ts.map +1 -0
- package/dist/views.js +178 -0
- package/dist/vite-plugin.d.ts +5 -0
- package/dist/vite-plugin.d.ts.map +1 -0
- package/dist/vite-plugin.js +86 -0
- package/dist/xrpc-client.d.ts +18 -0
- package/dist/xrpc-client.d.ts.map +1 -0
- package/dist/xrpc-client.js +54 -0
- package/dist/xrpc.d.ts +53 -0
- package/dist/xrpc.d.ts.map +1 -0
- package/dist/xrpc.js +139 -0
- package/fonts/Inter-Regular.woff +0 -0
- package/package.json +41 -0
- package/public/admin-auth.js +320 -0
- package/public/admin.html +2166 -0
package/dist/setup.d.ts
ADDED
|
@@ -0,0 +1,8 @@
|
|
|
1
|
+
export interface SetupContext {
|
|
2
|
+
db: {
|
|
3
|
+
query: (sql: string, params?: any[]) => Promise<any[]>;
|
|
4
|
+
run: (sql: string, ...params: any[]) => Promise<void>;
|
|
5
|
+
};
|
|
6
|
+
}
|
|
7
|
+
export declare function initSetup(setupDir: string): Promise<void>;
|
|
8
|
+
//# sourceMappingURL=setup.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"setup.d.ts","sourceRoot":"","sources":["../src/setup.ts"],"names":[],"mappings":"AAKA,MAAM,WAAW,YAAY;IAC3B,EAAE,EAAE;QACF,KAAK,EAAE,CAAC,GAAG,EAAE,MAAM,EAAE,MAAM,CAAC,EAAE,GAAG,EAAE,KAAK,OAAO,CAAC,GAAG,EAAE,CAAC,CAAA;QACtD,GAAG,EAAE,CAAC,GAAG,EAAE,MAAM,EAAE,GAAG,MAAM,EAAE,GAAG,EAAE,KAAK,OAAO,CAAC,IAAI,CAAC,CAAA;KACtD,CAAA;CACF;AAiBD,wBAAsB,SAAS,CAAC,QAAQ,EAAE,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC,CAqB/D"}
|
package/dist/setup.js
ADDED
|
@@ -0,0 +1,48 @@
|
|
|
1
|
+
var __rewriteRelativeImportExtension = (this && this.__rewriteRelativeImportExtension) || function (path, preserveJsx) {
|
|
2
|
+
if (typeof path === "string" && /^\.\.?\//.test(path)) {
|
|
3
|
+
return path.replace(/\.(tsx)$|((?:\.d)?)((?:\.[^./]+?)?)\.([cm]?)ts$/i, function (m, tsx, d, ext, cm) {
|
|
4
|
+
return tsx ? preserveJsx ? ".jsx" : ".js" : d && (!ext || !cm) ? m : (d + ext + "." + cm.toLowerCase() + "js");
|
|
5
|
+
});
|
|
6
|
+
}
|
|
7
|
+
return path;
|
|
8
|
+
};
|
|
9
|
+
import { resolve, relative } from 'node:path';
|
|
10
|
+
import { readdirSync, statSync } from 'node:fs';
|
|
11
|
+
import { log } from "./logger.js";
|
|
12
|
+
import { querySQL, runSQL } from "./db.js";
|
|
13
|
+
function walkDir(dir) {
|
|
14
|
+
const results = [];
|
|
15
|
+
try {
|
|
16
|
+
for (const entry of readdirSync(dir)) {
|
|
17
|
+
const full = resolve(dir, entry);
|
|
18
|
+
if (statSync(full).isDirectory()) {
|
|
19
|
+
results.push(...walkDir(full));
|
|
20
|
+
}
|
|
21
|
+
else if ((entry.endsWith('.ts') || entry.endsWith('.js')) && !entry.startsWith('_')) {
|
|
22
|
+
results.push(full);
|
|
23
|
+
}
|
|
24
|
+
}
|
|
25
|
+
}
|
|
26
|
+
catch { }
|
|
27
|
+
return results.sort();
|
|
28
|
+
}
|
|
29
|
+
export async function initSetup(setupDir) {
|
|
30
|
+
const files = walkDir(setupDir);
|
|
31
|
+
if (files.length === 0)
|
|
32
|
+
return;
|
|
33
|
+
for (const scriptPath of files) {
|
|
34
|
+
const name = relative(setupDir, scriptPath).replace(/\.(ts|js)$/, '');
|
|
35
|
+
const mod = await import(__rewriteRelativeImportExtension(scriptPath));
|
|
36
|
+
const handler = mod.default?.handler || mod.default;
|
|
37
|
+
if (typeof handler !== 'function') {
|
|
38
|
+
console.warn(`[setup] ${name}: no handler function found, skipping`);
|
|
39
|
+
continue;
|
|
40
|
+
}
|
|
41
|
+
const ctx = {
|
|
42
|
+
db: { query: querySQL, run: runSQL },
|
|
43
|
+
};
|
|
44
|
+
log(`[setup] running: ${name}`);
|
|
45
|
+
await handler(ctx);
|
|
46
|
+
log(`[setup] done: ${name}`);
|
|
47
|
+
}
|
|
48
|
+
}
|
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
import { expect, type Page } from '@playwright/test';
|
|
2
|
+
import type { TestServer } from './test.ts';
|
|
3
|
+
type WorkerFixtures = {
|
|
4
|
+
server: TestServer;
|
|
5
|
+
};
|
|
6
|
+
/** Inject __TEST_AUTH__ into a page so isLoggedIn() and viewerDid() work. */
|
|
7
|
+
export declare function loginAs(page: Page, did: string): Promise<void>;
|
|
8
|
+
/**
|
|
9
|
+
* Extended Playwright test with an auto-started hatk server fixture.
|
|
10
|
+
* The server starts once per test file (worker scope) and is shared across tests.
|
|
11
|
+
*/
|
|
12
|
+
export declare const test: import("@playwright/test").TestType<import("@playwright/test").PlaywrightTestArgs & import("@playwright/test").PlaywrightTestOptions, import("@playwright/test").PlaywrightWorkerArgs & import("@playwright/test").PlaywrightWorkerOptions & WorkerFixtures>;
|
|
13
|
+
export { expect };
|
|
14
|
+
//# sourceMappingURL=test-browser.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"test-browser.d.ts","sourceRoot":"","sources":["../src/test-browser.ts"],"names":[],"mappings":"AAAA,OAAO,EAAgB,MAAM,EAAE,KAAK,IAAI,EAAE,MAAM,kBAAkB,CAAA;AAClE,OAAO,KAAK,EAAE,UAAU,EAAE,MAAM,WAAW,CAAA;AAG3C,KAAK,cAAc,GAAG;IACpB,MAAM,EAAE,UAAU,CAAA;CACnB,CAAA;AAED,6EAA6E;AAC7E,wBAAsB,OAAO,CAAC,IAAI,EAAE,IAAI,EAAE,GAAG,EAAE,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC,CAIpE;AAED;;;GAGG;AACH,eAAO,MAAM,IAAI,8PAWf,CAAA;AAEF,OAAO,EAAE,MAAM,EAAE,CAAA"}
|
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
import { test as base, expect } from '@playwright/test';
|
|
2
|
+
import { startTestServer } from "./test.js";
|
|
3
|
+
/** Inject __TEST_AUTH__ into a page so isLoggedIn() and viewerDid() work. */
|
|
4
|
+
export async function loginAs(page, did) {
|
|
5
|
+
await page.addInitScript((d) => {
|
|
6
|
+
;
|
|
7
|
+
window.__TEST_AUTH__ = { did: d };
|
|
8
|
+
}, did);
|
|
9
|
+
}
|
|
10
|
+
/**
|
|
11
|
+
* Extended Playwright test with an auto-started hatk server fixture.
|
|
12
|
+
* The server starts once per test file (worker scope) and is shared across tests.
|
|
13
|
+
*/
|
|
14
|
+
export const test = base.extend({
|
|
15
|
+
// eslint-disable-next-line no-empty-pattern -- Playwright fixture API requires the deps arg
|
|
16
|
+
server: [
|
|
17
|
+
async (_deps, use) => {
|
|
18
|
+
const server = await startTestServer();
|
|
19
|
+
await server.loadFixtures();
|
|
20
|
+
await use(server);
|
|
21
|
+
await server.close();
|
|
22
|
+
},
|
|
23
|
+
{ scope: 'worker' },
|
|
24
|
+
],
|
|
25
|
+
});
|
|
26
|
+
export { expect };
|
package/dist/test.d.ts
ADDED
|
@@ -0,0 +1,47 @@
|
|
|
1
|
+
import { type HatkConfig } from './config.ts';
|
|
2
|
+
import { seed as createSeedHelpers, type SeedOpts } from './seed.ts';
|
|
3
|
+
import type { FeedContext } from './feeds.ts';
|
|
4
|
+
export interface TestContext {
|
|
5
|
+
db: {
|
|
6
|
+
query: (sql: string, params?: any[]) => Promise<any[]>;
|
|
7
|
+
run: (sql: string, ...params: any[]) => Promise<void>;
|
|
8
|
+
};
|
|
9
|
+
loadFixtures: (dir?: string) => Promise<void>;
|
|
10
|
+
loadFeed: (name: string) => {
|
|
11
|
+
generate: (ctx: FeedContext) => Promise<any>;
|
|
12
|
+
};
|
|
13
|
+
loadXrpc: (name: string) => {
|
|
14
|
+
handler: (ctx: any) => Promise<any>;
|
|
15
|
+
};
|
|
16
|
+
feedContext: (opts?: {
|
|
17
|
+
limit?: number;
|
|
18
|
+
cursor?: string;
|
|
19
|
+
viewer?: {
|
|
20
|
+
did: string;
|
|
21
|
+
} | null;
|
|
22
|
+
params?: Record<string, string>;
|
|
23
|
+
}) => FeedContext;
|
|
24
|
+
close: () => Promise<void>;
|
|
25
|
+
/** @internal */ _config: HatkConfig;
|
|
26
|
+
/** @internal */ _collections: string[];
|
|
27
|
+
}
|
|
28
|
+
export interface TestServer extends TestContext {
|
|
29
|
+
url: string;
|
|
30
|
+
port: number;
|
|
31
|
+
fetch: (path: string, init?: RequestInit) => Promise<Response>;
|
|
32
|
+
fetchAs: (did: string, path: string, init?: RequestInit) => Promise<Response>;
|
|
33
|
+
seed: (opts?: SeedOpts) => ReturnType<typeof createSeedHelpers>;
|
|
34
|
+
waitForRecord: (uri: string, timeoutMs?: number) => Promise<void>;
|
|
35
|
+
}
|
|
36
|
+
/**
|
|
37
|
+
* Boot an in-memory hatk context for unit tests.
|
|
38
|
+
* Loads lexicons, creates in-memory DuckDB, discovers feeds/xrpc/labels.
|
|
39
|
+
* No HTTP server, no PDS, no indexer.
|
|
40
|
+
*
|
|
41
|
+
* Note: uses module-level singletons (DB, feeds, xrpc, labels).
|
|
42
|
+
* Each vitest worker runs in its own process so this is safe by default,
|
|
43
|
+
* but it will NOT work with --pool=threads (multiple tests sharing a process).
|
|
44
|
+
*/
|
|
45
|
+
export declare function createTestContext(): Promise<TestContext>;
|
|
46
|
+
export declare function startTestServer(): Promise<TestServer>;
|
|
47
|
+
//# sourceMappingURL=test.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"test.d.ts","sourceRoot":"","sources":["../src/test.ts"],"names":[],"mappings":"AAIA,OAAO,EAAc,KAAK,UAAU,EAAE,MAAM,aAAa,CAAA;AAiBzD,OAAO,EAAE,IAAI,IAAI,iBAAiB,EAAE,KAAK,QAAQ,EAAE,MAAM,WAAW,CAAA;AACpE,OAAO,KAAK,EAAE,WAAW,EAAE,MAAM,YAAY,CAAA;AAE7C,MAAM,WAAW,WAAW;IAC1B,EAAE,EAAE;QACF,KAAK,EAAE,CAAC,GAAG,EAAE,MAAM,EAAE,MAAM,CAAC,EAAE,GAAG,EAAE,KAAK,OAAO,CAAC,GAAG,EAAE,CAAC,CAAA;QACtD,GAAG,EAAE,CAAC,GAAG,EAAE,MAAM,EAAE,GAAG,MAAM,EAAE,GAAG,EAAE,KAAK,OAAO,CAAC,IAAI,CAAC,CAAA;KACtD,CAAA;IACD,YAAY,EAAE,CAAC,GAAG,CAAC,EAAE,MAAM,KAAK,OAAO,CAAC,IAAI,CAAC,CAAA;IAC7C,QAAQ,EAAE,CAAC,IAAI,EAAE,MAAM,KAAK;QAAE,QAAQ,EAAE,CAAC,GAAG,EAAE,WAAW,KAAK,OAAO,CAAC,GAAG,CAAC,CAAA;KAAE,CAAA;IAC5E,QAAQ,EAAE,CAAC,IAAI,EAAE,MAAM,KAAK;QAAE,OAAO,EAAE,CAAC,GAAG,EAAE,GAAG,KAAK,OAAO,CAAC,GAAG,CAAC,CAAA;KAAE,CAAA;IACnE,WAAW,EAAE,CAAC,IAAI,CAAC,EAAE;QACnB,KAAK,CAAC,EAAE,MAAM,CAAA;QACd,MAAM,CAAC,EAAE,MAAM,CAAA;QACf,MAAM,CAAC,EAAE;YAAE,GAAG,EAAE,MAAM,CAAA;SAAE,GAAG,IAAI,CAAA;QAC/B,MAAM,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAA;KAChC,KAAK,WAAW,CAAA;IACjB,KAAK,EAAE,MAAM,OAAO,CAAC,IAAI,CAAC,CAAA;IAC1B,gBAAgB,CAAC,OAAO,EAAE,UAAU,CAAA;IACpC,gBAAgB,CAAC,YAAY,EAAE,MAAM,EAAE,CAAA;CACxC;AAED,MAAM,WAAW,UAAW,SAAQ,WAAW;IAC7C,GAAG,EAAE,MAAM,CAAA;IACX,IAAI,EAAE,MAAM,CAAA;IACZ,KAAK,EAAE,CAAC,IAAI,EAAE,MAAM,EAAE,IAAI,CAAC,EAAE,WAAW,KAAK,OAAO,CAAC,QAAQ,CAAC,CAAA;IAC9D,OAAO,EAAE,CAAC,GAAG,EAAE,MAAM,EAAE,IAAI,EAAE,MAAM,EAAE,IAAI,CAAC,EAAE,WAAW,KAAK,OAAO,CAAC,QAAQ,CAAC,CAAA;IAC7E,IAAI,EAAE,CAAC,IAAI,CAAC,EAAE,QAAQ,KAAK,UAAU,CAAC,OAAO,iBAAiB,CAAC,CAAA;IAC/D,aAAa,EAAE,CAAC,GAAG,EAAE,MAAM,EAAE,SAAS,CAAC,EAAE,MAAM,KAAK,OAAO,CAAC,IAAI,CAAC,CAAA;CAClE;AAYD;;;;;;;;GAQG;AACH,wBAAsB,iBAAiB,IAAI,OAAO,CAAC,WAAW,CAAC,CAiL9D;AA8BD,wBAAsB,eAAe,IAAI,OAAO,CAAC,UAAU,CAAC,CAgD3D"}
|
package/dist/test.js
ADDED
|
@@ -0,0 +1,256 @@
|
|
|
1
|
+
import { resolve, dirname } from 'node:path';
|
|
2
|
+
import { readdirSync, readFileSync } from 'node:fs';
|
|
3
|
+
import YAML from 'yaml';
|
|
4
|
+
import { loadConfig } from "./config.js";
|
|
5
|
+
import { loadLexicons, storeLexicons, discoverCollections, generateTableSchema, generateCreateTableSQL, } from "./schema.js";
|
|
6
|
+
import { initDatabase, querySQL, runSQL, insertRecord, closeDatabase } from "./db.js";
|
|
7
|
+
import { initFeeds, executeFeed, listFeeds, createPaginate } from "./feeds.js";
|
|
8
|
+
import { initXrpc, executeXrpc, listXrpc, configureRelay } from "./xrpc.js";
|
|
9
|
+
import { initOpengraph } from "./opengraph.js";
|
|
10
|
+
import { initLabels } from "./labels.js";
|
|
11
|
+
import { discoverViews } from "./views.js";
|
|
12
|
+
import { loadOnLoginHook } from "./oauth/hooks.js";
|
|
13
|
+
import { validateLexicons } from '@bigmoves/lexicon';
|
|
14
|
+
import { packCursor, unpackCursor, isTakendownDid, filterTakendownDids } from "./db.js";
|
|
15
|
+
import { seed as createSeedHelpers } from "./seed.js";
|
|
16
|
+
/**
|
|
17
|
+
* Find the project's config.yaml by walking up from cwd.
|
|
18
|
+
* Returns the resolved config path, or falls back to 'config.yaml'.
|
|
19
|
+
*/
|
|
20
|
+
function findConfigPath() {
|
|
21
|
+
const explicit = process.env.APPVIEW_CONFIG;
|
|
22
|
+
if (explicit)
|
|
23
|
+
return resolve(explicit);
|
|
24
|
+
return resolve('config.yaml');
|
|
25
|
+
}
|
|
26
|
+
/**
|
|
27
|
+
* Boot an in-memory hatk context for unit tests.
|
|
28
|
+
* Loads lexicons, creates in-memory DuckDB, discovers feeds/xrpc/labels.
|
|
29
|
+
* No HTTP server, no PDS, no indexer.
|
|
30
|
+
*
|
|
31
|
+
* Note: uses module-level singletons (DB, feeds, xrpc, labels).
|
|
32
|
+
* Each vitest worker runs in its own process so this is safe by default,
|
|
33
|
+
* but it will NOT work with --pool=threads (multiple tests sharing a process).
|
|
34
|
+
*/
|
|
35
|
+
export async function createTestContext() {
|
|
36
|
+
const configPath = findConfigPath();
|
|
37
|
+
const config = loadConfig(configPath);
|
|
38
|
+
const configDir = dirname(resolve(configPath));
|
|
39
|
+
configureRelay(config.relay);
|
|
40
|
+
// Load and validate lexicons
|
|
41
|
+
const lexicons = loadLexicons(resolve(configDir, 'lexicons'));
|
|
42
|
+
const lexiconErrors = validateLexicons([...lexicons.values()]);
|
|
43
|
+
if (lexiconErrors) {
|
|
44
|
+
const messages = Object.entries(lexiconErrors).flatMap(([nsid, errs]) => errs.map((e) => `${nsid}: ${e}`));
|
|
45
|
+
throw new Error(`Invalid lexicons:\n${messages.join('\n')}`);
|
|
46
|
+
}
|
|
47
|
+
storeLexicons(lexicons);
|
|
48
|
+
// Discover collections
|
|
49
|
+
const collections = config.collections.length > 0 ? config.collections : discoverCollections(lexicons);
|
|
50
|
+
// Generate schemas
|
|
51
|
+
const schemas = [];
|
|
52
|
+
const ddlStatements = [];
|
|
53
|
+
for (const nsid of collections) {
|
|
54
|
+
const lexicon = lexicons.get(nsid);
|
|
55
|
+
if (!lexicon)
|
|
56
|
+
continue;
|
|
57
|
+
const schema = generateTableSchema(nsid, lexicon, lexicons);
|
|
58
|
+
schemas.push(schema);
|
|
59
|
+
ddlStatements.push(generateCreateTableSQL(schema));
|
|
60
|
+
}
|
|
61
|
+
// In-memory DuckDB
|
|
62
|
+
await initDatabase(':memory:', schemas, ddlStatements);
|
|
63
|
+
// Discover views + hooks
|
|
64
|
+
discoverViews();
|
|
65
|
+
try {
|
|
66
|
+
await loadOnLoginHook(resolve(configDir, 'hooks'));
|
|
67
|
+
}
|
|
68
|
+
catch { }
|
|
69
|
+
// Skip setup hooks in test context — they're for server boot-time
|
|
70
|
+
// initialization (e.g. importing large datasets) and not appropriate for tests
|
|
71
|
+
// Discover feeds, xrpc, labels
|
|
72
|
+
await initFeeds(resolve(configDir, 'feeds'));
|
|
73
|
+
await initXrpc(resolve(configDir, 'xrpc'));
|
|
74
|
+
await initOpengraph(resolve(configDir, 'og'));
|
|
75
|
+
await initLabels(resolve(configDir, 'labels'));
|
|
76
|
+
return {
|
|
77
|
+
db: { query: querySQL, run: runSQL },
|
|
78
|
+
_config: config,
|
|
79
|
+
_collections: collections,
|
|
80
|
+
loadFixtures: async (dir) => {
|
|
81
|
+
const fixturesDir = resolve(dir || 'test/fixtures');
|
|
82
|
+
let files;
|
|
83
|
+
try {
|
|
84
|
+
files = readdirSync(fixturesDir).filter((f) => f.endsWith('.yaml') || f.endsWith('.yml'));
|
|
85
|
+
}
|
|
86
|
+
catch {
|
|
87
|
+
throw new Error(`Fixtures directory not found: ${fixturesDir}`);
|
|
88
|
+
}
|
|
89
|
+
// Load _repos.yaml first if it exists, so handles are registered before records
|
|
90
|
+
const reposFile = files.find((f) => f.replace(/\.ya?ml$/, '') === '_repos');
|
|
91
|
+
if (reposFile) {
|
|
92
|
+
const content = readFileSync(resolve(fixturesDir, reposFile), 'utf-8');
|
|
93
|
+
const records = YAML.parse(content);
|
|
94
|
+
if (Array.isArray(records)) {
|
|
95
|
+
for (const rec of records) {
|
|
96
|
+
const row = interpolateHelpers(rec);
|
|
97
|
+
await runSQL(`INSERT OR IGNORE INTO _repos (did, status, handle, backfilled_at) VALUES ($1, $2, $3, $4)`, row.did, row.status || 'active', row.handle || row.did.split(':').pop() + '.test', new Date().toISOString());
|
|
98
|
+
}
|
|
99
|
+
}
|
|
100
|
+
}
|
|
101
|
+
const seenDids = new Set();
|
|
102
|
+
for (const file of files) {
|
|
103
|
+
const tableName = file.replace(/\.ya?ml$/, '');
|
|
104
|
+
if (tableName === '_repos')
|
|
105
|
+
continue;
|
|
106
|
+
const content = readFileSync(resolve(fixturesDir, file), 'utf-8');
|
|
107
|
+
const records = YAML.parse(content);
|
|
108
|
+
if (!Array.isArray(records))
|
|
109
|
+
continue;
|
|
110
|
+
const isCollection = collections.includes(tableName);
|
|
111
|
+
if (!isCollection) {
|
|
112
|
+
// Custom table: auto-create from first record's keys, then INSERT
|
|
113
|
+
if (records.length === 0)
|
|
114
|
+
continue;
|
|
115
|
+
const keys = Object.keys(interpolateHelpers(records[0]));
|
|
116
|
+
const colDefs = keys.map((k) => `"${k}" VARCHAR`).join(', ');
|
|
117
|
+
await runSQL(`CREATE TABLE IF NOT EXISTS "${tableName}" (${colDefs})`);
|
|
118
|
+
for (const rec of records) {
|
|
119
|
+
const row = interpolateHelpers(rec);
|
|
120
|
+
const vals = keys.map((k) => row[k]);
|
|
121
|
+
const placeholders = keys.map((_, i) => `$${i + 1}`).join(', ');
|
|
122
|
+
await runSQL(`INSERT INTO "${tableName}" (${keys.map((k) => `"${k}"`).join(', ')}) VALUES (${placeholders})`, ...vals);
|
|
123
|
+
}
|
|
124
|
+
continue;
|
|
125
|
+
}
|
|
126
|
+
for (let i = 0; i < records.length; i++) {
|
|
127
|
+
const rec = interpolateHelpers(records[i]);
|
|
128
|
+
const did = rec.did || 'did:plc:test';
|
|
129
|
+
const rkey = rec.rkey || rec.uri?.split('/').pop() || String(i);
|
|
130
|
+
const uri = rec.uri || `at://${did}/${tableName}/${rkey}`;
|
|
131
|
+
const cid = rec.cid || `cid${i}`;
|
|
132
|
+
const fields = Object.fromEntries(Object.entries(rec).filter(([k]) => !['uri', 'cid', 'did', 'rkey'].includes(k)));
|
|
133
|
+
// Auto-register DID in _repos if not already present
|
|
134
|
+
if (!seenDids.has(did)) {
|
|
135
|
+
seenDids.add(did);
|
|
136
|
+
await runSQL(`INSERT OR IGNORE INTO _repos (did, status, handle, backfilled_at) VALUES ($1, $2, $3, $4)`, did, 'active', did.split(':').pop() + '.test', new Date().toISOString());
|
|
137
|
+
}
|
|
138
|
+
await insertRecord(tableName, uri, cid, did, fields);
|
|
139
|
+
}
|
|
140
|
+
}
|
|
141
|
+
},
|
|
142
|
+
loadFeed: (name) => {
|
|
143
|
+
const feedList = listFeeds();
|
|
144
|
+
if (!feedList.find((f) => f.name === name))
|
|
145
|
+
throw new Error(`Feed "${name}" not found. Available: ${feedList.map((f) => f.name).join(', ')}`);
|
|
146
|
+
return {
|
|
147
|
+
generate: (ctx) => executeFeed(name, ctx.params || {}, ctx.cursor, ctx.limit, ctx.viewer),
|
|
148
|
+
};
|
|
149
|
+
},
|
|
150
|
+
loadXrpc: (name) => {
|
|
151
|
+
const xrpcList = listXrpc();
|
|
152
|
+
if (!xrpcList.includes(name))
|
|
153
|
+
throw new Error(`XRPC handler "${name}" not found. Available: ${xrpcList.join(', ')}`);
|
|
154
|
+
return {
|
|
155
|
+
handler: (ctx) => {
|
|
156
|
+
const params = { ...ctx.params };
|
|
157
|
+
if (ctx.cursor != null && params.cursor == null)
|
|
158
|
+
params.cursor = ctx.cursor;
|
|
159
|
+
if (ctx.limit != null && params.limit == null)
|
|
160
|
+
params.limit = String(ctx.limit);
|
|
161
|
+
return executeXrpc(name, params, ctx.cursor, ctx.limit ?? 30, ctx.viewer);
|
|
162
|
+
},
|
|
163
|
+
};
|
|
164
|
+
},
|
|
165
|
+
feedContext: (opts) => {
|
|
166
|
+
const paginateDeps = {
|
|
167
|
+
db: { query: querySQL },
|
|
168
|
+
cursor: opts?.cursor,
|
|
169
|
+
limit: opts?.limit || 30,
|
|
170
|
+
packCursor,
|
|
171
|
+
unpackCursor,
|
|
172
|
+
};
|
|
173
|
+
return {
|
|
174
|
+
db: { query: querySQL },
|
|
175
|
+
params: opts?.params || {},
|
|
176
|
+
cursor: opts?.cursor,
|
|
177
|
+
limit: opts?.limit || 30,
|
|
178
|
+
viewer: opts?.viewer ?? null,
|
|
179
|
+
packCursor,
|
|
180
|
+
unpackCursor,
|
|
181
|
+
isTakendown: isTakendownDid,
|
|
182
|
+
filterTakendownDids,
|
|
183
|
+
paginate: createPaginate(paginateDeps),
|
|
184
|
+
};
|
|
185
|
+
},
|
|
186
|
+
close: async () => {
|
|
187
|
+
closeDatabase();
|
|
188
|
+
},
|
|
189
|
+
};
|
|
190
|
+
}
|
|
191
|
+
/**
|
|
192
|
+
* Boot a full hatk HTTP server on a random port for integration tests.
|
|
193
|
+
* Includes everything from createTestContext plus an HTTP server.
|
|
194
|
+
*/
|
|
195
|
+
const NOW_RE = /^\$now(?:\(([+-]?\d+)([smhd])\))?$/;
|
|
196
|
+
function interpolateHelpers(value) {
|
|
197
|
+
if (typeof value === 'string') {
|
|
198
|
+
const m = value.match(NOW_RE);
|
|
199
|
+
if (m) {
|
|
200
|
+
const offset = m[1] ? parseInt(m[1]) : 0;
|
|
201
|
+
const unit = m[2] || 's';
|
|
202
|
+
const ms = offset * { s: 1000, m: 60_000, h: 3_600_000, d: 86_400_000 }[unit];
|
|
203
|
+
return new Date(Date.now() + ms).toISOString();
|
|
204
|
+
}
|
|
205
|
+
return value;
|
|
206
|
+
}
|
|
207
|
+
if (Array.isArray(value))
|
|
208
|
+
return value.map(interpolateHelpers);
|
|
209
|
+
if (value && typeof value === 'object') {
|
|
210
|
+
const out = {};
|
|
211
|
+
for (const [k, v] of Object.entries(value)) {
|
|
212
|
+
out[k] = interpolateHelpers(v);
|
|
213
|
+
}
|
|
214
|
+
return out;
|
|
215
|
+
}
|
|
216
|
+
return value;
|
|
217
|
+
}
|
|
218
|
+
export async function startTestServer() {
|
|
219
|
+
const ctx = await createTestContext();
|
|
220
|
+
// Import startServer — it creates the HTTP server and returns it
|
|
221
|
+
const { startServer } = await import("./server.js");
|
|
222
|
+
// Start server on port 0 (random available port)
|
|
223
|
+
const resolveViewer = (req) => {
|
|
224
|
+
const did = req.headers['x-test-viewer'];
|
|
225
|
+
return typeof did === 'string' ? { did } : null;
|
|
226
|
+
};
|
|
227
|
+
const httpServer = startServer(0, ctx._collections, ctx._config.publicDir, ctx._config.oauth, ctx._config.admins, resolveViewer);
|
|
228
|
+
await new Promise((resolve) => httpServer.on('listening', resolve));
|
|
229
|
+
const port = httpServer.address().port;
|
|
230
|
+
const url = `http://127.0.0.1:${port}`;
|
|
231
|
+
return {
|
|
232
|
+
...ctx,
|
|
233
|
+
url,
|
|
234
|
+
port,
|
|
235
|
+
fetch: (path, init) => fetch(`${url}${path}`, init),
|
|
236
|
+
fetchAs: (did, path, init) => fetch(`${url}${path}`, {
|
|
237
|
+
...init,
|
|
238
|
+
headers: { ...init?.headers, 'x-test-viewer': did },
|
|
239
|
+
}),
|
|
240
|
+
seed: (seedOpts) => createSeedHelpers(seedOpts),
|
|
241
|
+
waitForRecord: async (uri, timeoutMs = 10_000) => {
|
|
242
|
+
const start = Date.now();
|
|
243
|
+
while (Date.now() - start < timeoutMs) {
|
|
244
|
+
const record = await querySQL(`SELECT uri FROM "${uri.split('/')[3]}" WHERE uri = $1`, [uri]).catch(() => []);
|
|
245
|
+
if (record.length > 0)
|
|
246
|
+
return;
|
|
247
|
+
await new Promise((r) => setTimeout(r, 100));
|
|
248
|
+
}
|
|
249
|
+
throw new Error(`Timed out waiting for record: ${uri}`);
|
|
250
|
+
},
|
|
251
|
+
close: async () => {
|
|
252
|
+
httpServer.close();
|
|
253
|
+
await ctx.close();
|
|
254
|
+
},
|
|
255
|
+
};
|
|
256
|
+
}
|
package/dist/views.d.ts
ADDED
|
@@ -0,0 +1,40 @@
|
|
|
1
|
+
interface ViewFieldRef {
|
|
2
|
+
kind: 'ref';
|
|
3
|
+
fieldName: string;
|
|
4
|
+
collection: string;
|
|
5
|
+
joinField: string;
|
|
6
|
+
blobFields: Map<string, string>;
|
|
7
|
+
}
|
|
8
|
+
interface ViewFieldScalar {
|
|
9
|
+
kind: 'scalar';
|
|
10
|
+
fieldName: string;
|
|
11
|
+
type: string;
|
|
12
|
+
format?: string;
|
|
13
|
+
}
|
|
14
|
+
interface ViewFieldLabels {
|
|
15
|
+
kind: 'labels';
|
|
16
|
+
fieldName: string;
|
|
17
|
+
}
|
|
18
|
+
type ViewField = ViewFieldRef | ViewFieldScalar | ViewFieldLabels;
|
|
19
|
+
export interface ViewDef {
|
|
20
|
+
/** Full NSID key (e.g., "fm.teal.alpha.feed.play#playView") */
|
|
21
|
+
nsid: string;
|
|
22
|
+
/** The record collection this view hydrates */
|
|
23
|
+
collection: string;
|
|
24
|
+
/** The def name (e.g., "playView") */
|
|
25
|
+
name: string;
|
|
26
|
+
/** The record field name, or null for flattened views (bsky pattern) */
|
|
27
|
+
recordField: string | null;
|
|
28
|
+
/** Blob fields on the record itself — for flattened views */
|
|
29
|
+
blobFields: Map<string, string>;
|
|
30
|
+
/** All other fields and their hydration instructions */
|
|
31
|
+
fields: ViewField[];
|
|
32
|
+
}
|
|
33
|
+
/** Get a view def by full NSID. */
|
|
34
|
+
export declare function getViewDef(nsid: string): ViewDef | undefined;
|
|
35
|
+
/** Get the default view def for a collection (used by feed auto-hydration). */
|
|
36
|
+
export declare function getDefaultView(collection: string): ViewDef | undefined;
|
|
37
|
+
/** Discover view defs from all loaded lexicons. */
|
|
38
|
+
export declare function discoverViews(): void;
|
|
39
|
+
export {};
|
|
40
|
+
//# sourceMappingURL=views.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"views.d.ts","sourceRoot":"","sources":["../src/views.ts"],"names":[],"mappings":"AAUA,UAAU,YAAY;IACpB,IAAI,EAAE,KAAK,CAAA;IACX,SAAS,EAAE,MAAM,CAAA;IACjB,UAAU,EAAE,MAAM,CAAA;IAClB,SAAS,EAAE,MAAM,CAAA;IACjB,UAAU,EAAE,GAAG,CAAC,MAAM,EAAE,MAAM,CAAC,CAAA;CAChC;AAED,UAAU,eAAe;IACvB,IAAI,EAAE,QAAQ,CAAA;IACd,SAAS,EAAE,MAAM,CAAA;IACjB,IAAI,EAAE,MAAM,CAAA;IACZ,MAAM,CAAC,EAAE,MAAM,CAAA;CAChB;AAED,UAAU,eAAe;IACvB,IAAI,EAAE,QAAQ,CAAA;IACd,SAAS,EAAE,MAAM,CAAA;CAClB;AAED,KAAK,SAAS,GAAG,YAAY,GAAG,eAAe,GAAG,eAAe,CAAA;AAEjE,MAAM,WAAW,OAAO;IACtB,+DAA+D;IAC/D,IAAI,EAAE,MAAM,CAAA;IACZ,+CAA+C;IAC/C,UAAU,EAAE,MAAM,CAAA;IAClB,sCAAsC;IACtC,IAAI,EAAE,MAAM,CAAA;IACZ,wEAAwE;IACxE,WAAW,EAAE,MAAM,GAAG,IAAI,CAAA;IAC1B,6DAA6D;IAC7D,UAAU,EAAE,GAAG,CAAC,MAAM,EAAE,MAAM,CAAC,CAAA;IAC/B,wDAAwD;IACxD,MAAM,EAAE,SAAS,EAAE,CAAA;CACpB;AAUD,mCAAmC;AACnC,wBAAgB,UAAU,CAAC,IAAI,EAAE,MAAM,GAAG,OAAO,GAAG,SAAS,CAE5D;AAED,+EAA+E;AAC/E,wBAAgB,cAAc,CAAC,UAAU,EAAE,MAAM,GAAG,OAAO,GAAG,SAAS,CAGtE;AAED,mDAAmD;AACnD,wBAAgB,aAAa,IAAI,IAAI,CA4CpC"}
|
package/dist/views.js
ADDED
|
@@ -0,0 +1,178 @@
|
|
|
1
|
+
// views.ts — View registry: discovers view defs from lexicons, builds hydration pipelines.
|
|
2
|
+
// Supports two patterns:
|
|
3
|
+
// 1. Inline views: defined in the record lexicon with ref: "#main" (e.g., playView)
|
|
4
|
+
// 2. Defs views: defined in a defs lexicon, associated by naming convention (e.g., profileView)
|
|
5
|
+
import { log } from "./logger.js";
|
|
6
|
+
import { getAllLexicons, getLexicon } from "./schema.js";
|
|
7
|
+
// --- Registry ---
|
|
8
|
+
/** All views keyed by full NSID (e.g., "fm.teal.alpha.feed.play#playView") */
|
|
9
|
+
const views = new Map();
|
|
10
|
+
/** Reverse index: collection → default view NSID (the {name}View variant) */
|
|
11
|
+
const collectionDefaults = new Map();
|
|
12
|
+
/** Get a view def by full NSID. */
|
|
13
|
+
export function getViewDef(nsid) {
|
|
14
|
+
return views.get(nsid);
|
|
15
|
+
}
|
|
16
|
+
/** Get the default view def for a collection (used by feed auto-hydration). */
|
|
17
|
+
export function getDefaultView(collection) {
|
|
18
|
+
const viewNsid = collectionDefaults.get(collection);
|
|
19
|
+
return viewNsid ? views.get(viewNsid) : undefined;
|
|
20
|
+
}
|
|
21
|
+
/** Discover view defs from all loaded lexicons. */
|
|
22
|
+
export function discoverViews() {
|
|
23
|
+
views.clear();
|
|
24
|
+
collectionDefaults.clear();
|
|
25
|
+
const lexicons = getAllLexicons();
|
|
26
|
+
for (const { nsid, lexicon } of lexicons) {
|
|
27
|
+
if (!lexicon.defs)
|
|
28
|
+
continue;
|
|
29
|
+
const namespace = nsid.split('.').slice(0, -1).join('.');
|
|
30
|
+
for (const [defName, def] of Object.entries(lexicon.defs)) {
|
|
31
|
+
if (defName === 'main')
|
|
32
|
+
continue;
|
|
33
|
+
if (def.type !== 'object')
|
|
34
|
+
continue;
|
|
35
|
+
if (!def.properties)
|
|
36
|
+
continue;
|
|
37
|
+
if (!defName.includes('View') && !defName.includes('view'))
|
|
38
|
+
continue;
|
|
39
|
+
// Pattern 1: Inline view — has a property that refs #main
|
|
40
|
+
const recordFieldEntry = Object.entries(def.properties).find(([_, prop]) => prop.type === 'ref' && prop.ref === '#main');
|
|
41
|
+
if (recordFieldEntry) {
|
|
42
|
+
// Inline view: the record lexicon IS the collection
|
|
43
|
+
const viewDef = buildInlineViewDef(nsid, defName, recordFieldEntry[0], def);
|
|
44
|
+
const fullNsid = `${nsid}#${defName}`;
|
|
45
|
+
views.set(fullNsid, viewDef);
|
|
46
|
+
registerDefault(viewDef.collection, defName, fullNsid);
|
|
47
|
+
log(`[views] discovered: ${fullNsid} → ${viewDef.collection} (inline, ${viewDef.fields.length} fields)`);
|
|
48
|
+
continue;
|
|
49
|
+
}
|
|
50
|
+
// Pattern 2: Defs view — associate by naming convention
|
|
51
|
+
const recordName = extractRecordName(defName);
|
|
52
|
+
if (!recordName)
|
|
53
|
+
continue;
|
|
54
|
+
const collection = findRecordCollection(recordName, namespace, lexicons);
|
|
55
|
+
if (!collection)
|
|
56
|
+
continue;
|
|
57
|
+
const viewDef = buildDefsViewDef(nsid, defName, collection, def);
|
|
58
|
+
const fullNsid = `${nsid}#${defName}`;
|
|
59
|
+
views.set(fullNsid, viewDef);
|
|
60
|
+
registerDefault(collection, defName, fullNsid);
|
|
61
|
+
log(`[views] discovered: ${fullNsid} → ${collection} (defs, ${viewDef.fields.length} fields)`);
|
|
62
|
+
}
|
|
63
|
+
}
|
|
64
|
+
}
|
|
65
|
+
// --- View Builders ---
|
|
66
|
+
/** Build a ViewDef for an inline view (has ref: "#main"). */
|
|
67
|
+
function buildInlineViewDef(nsid, defName, recordFieldName, def) {
|
|
68
|
+
const fields = [];
|
|
69
|
+
for (const [fieldName, prop] of Object.entries(def.properties)) {
|
|
70
|
+
if (fieldName === recordFieldName)
|
|
71
|
+
continue;
|
|
72
|
+
if (prop.type === 'ref') {
|
|
73
|
+
const resolved = resolveRefCollection(prop.ref, nsid);
|
|
74
|
+
if (resolved) {
|
|
75
|
+
const refLexicon = getLexicon(resolved);
|
|
76
|
+
const mainDef = refLexicon?.defs?.main;
|
|
77
|
+
const joinField = mainDef?.key === 'literal:self' ? 'did' : 'uri';
|
|
78
|
+
const blobs = discoverBlobFields(mainDef);
|
|
79
|
+
fields.push({ kind: 'ref', fieldName, collection: resolved, joinField, blobFields: blobs });
|
|
80
|
+
}
|
|
81
|
+
}
|
|
82
|
+
else if (prop.type === 'array' && prop.items?.type === 'ref' && prop.items.ref?.includes('label')) {
|
|
83
|
+
fields.push({ kind: 'labels', fieldName });
|
|
84
|
+
}
|
|
85
|
+
else {
|
|
86
|
+
fields.push({ kind: 'scalar', fieldName, type: prop.type, format: prop.format });
|
|
87
|
+
}
|
|
88
|
+
}
|
|
89
|
+
return {
|
|
90
|
+
nsid: `${nsid}#${defName}`,
|
|
91
|
+
collection: nsid,
|
|
92
|
+
name: defName,
|
|
93
|
+
recordField: recordFieldName,
|
|
94
|
+
blobFields: new Map(),
|
|
95
|
+
fields,
|
|
96
|
+
};
|
|
97
|
+
}
|
|
98
|
+
/** Build a ViewDef for a defs-pattern view (flattened, no ref: "#main"). */
|
|
99
|
+
function buildDefsViewDef(defsNsid, defName, collection, def) {
|
|
100
|
+
const fields = [];
|
|
101
|
+
// Get the record's schema to detect blob fields
|
|
102
|
+
const recordLexicon = getLexicon(collection);
|
|
103
|
+
const mainDef = recordLexicon?.defs?.main;
|
|
104
|
+
const blobFields = discoverBlobFields(mainDef);
|
|
105
|
+
for (const [fieldName, prop] of Object.entries(def.properties)) {
|
|
106
|
+
// Skip envelope fields — these come from the row, not hydration
|
|
107
|
+
if (['did', 'handle', 'indexedAt'].includes(fieldName))
|
|
108
|
+
continue;
|
|
109
|
+
// Skip fields that are record properties (they come from flattening the record)
|
|
110
|
+
if (mainDef?.record?.properties?.[fieldName])
|
|
111
|
+
continue;
|
|
112
|
+
if (prop.type === 'array' && prop.items?.type === 'ref' && prop.items.ref?.includes('label')) {
|
|
113
|
+
fields.push({ kind: 'labels', fieldName });
|
|
114
|
+
}
|
|
115
|
+
else if (prop.type === 'ref') {
|
|
116
|
+
// Could be viewer state or other refs — treat as scalar for now
|
|
117
|
+
// Viewer hooks handle viewer state enrichment
|
|
118
|
+
fields.push({ kind: 'scalar', fieldName, type: 'ref' });
|
|
119
|
+
}
|
|
120
|
+
else {
|
|
121
|
+
fields.push({ kind: 'scalar', fieldName, type: prop.type, format: prop.format });
|
|
122
|
+
}
|
|
123
|
+
}
|
|
124
|
+
return {
|
|
125
|
+
nsid: `${defsNsid}#${defName}`,
|
|
126
|
+
collection,
|
|
127
|
+
name: defName,
|
|
128
|
+
recordField: null,
|
|
129
|
+
blobFields,
|
|
130
|
+
fields,
|
|
131
|
+
};
|
|
132
|
+
}
|
|
133
|
+
// --- Helpers ---
|
|
134
|
+
/** Register a view as the default for its collection if it's the base {name}View variant. */
|
|
135
|
+
function registerDefault(collection, defName, fullNsid) {
|
|
136
|
+
// {name}View is the default; {name}ViewBasic / {name}ViewDetailed are not
|
|
137
|
+
if (defName.endsWith('View') && !collectionDefaults.has(collection)) {
|
|
138
|
+
collectionDefaults.set(collection, fullNsid);
|
|
139
|
+
}
|
|
140
|
+
}
|
|
141
|
+
/** Extract the record name from a view def name. profileView → profile, playViewDetailed → play */
|
|
142
|
+
function extractRecordName(defName) {
|
|
143
|
+
const match = defName.match(/^(.+?)View(Basic|Detailed)?$/);
|
|
144
|
+
return match ? match[1] : null;
|
|
145
|
+
}
|
|
146
|
+
/** Find a record-type lexicon matching a name in the given namespace. */
|
|
147
|
+
function findRecordCollection(recordName, namespace, lexicons) {
|
|
148
|
+
const target = `${namespace}.${recordName}`;
|
|
149
|
+
const lex = lexicons.find((l) => l.nsid === target);
|
|
150
|
+
if (lex?.lexicon?.defs?.main?.type === 'record')
|
|
151
|
+
return target;
|
|
152
|
+
return null;
|
|
153
|
+
}
|
|
154
|
+
/** Resolve a ref string to a collection NSID. */
|
|
155
|
+
function resolveRefCollection(ref, currentNsid) {
|
|
156
|
+
if (ref.startsWith('#'))
|
|
157
|
+
return currentNsid;
|
|
158
|
+
if (ref.includes('#'))
|
|
159
|
+
return ref.split('#')[0];
|
|
160
|
+
return ref;
|
|
161
|
+
}
|
|
162
|
+
/** Find blob-typed fields in a record def and assign CDN presets by field name. */
|
|
163
|
+
function discoverBlobFields(mainDef) {
|
|
164
|
+
const blobs = new Map();
|
|
165
|
+
if (!mainDef?.record?.properties)
|
|
166
|
+
return blobs;
|
|
167
|
+
const presetMap = {
|
|
168
|
+
avatar: 'avatar',
|
|
169
|
+
banner: 'banner',
|
|
170
|
+
thumbnail: 'feed_thumbnail',
|
|
171
|
+
};
|
|
172
|
+
for (const [name, prop] of Object.entries(mainDef.record.properties)) {
|
|
173
|
+
if (prop.type === 'blob') {
|
|
174
|
+
blobs.set(name, presetMap[name] || 'feed_fullsize');
|
|
175
|
+
}
|
|
176
|
+
}
|
|
177
|
+
return blobs;
|
|
178
|
+
}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"vite-plugin.d.ts","sourceRoot":"","sources":["../src/vite-plugin.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,MAAM,EAAE,MAAM,MAAM,CAAA;AAKlC,wBAAgB,IAAI,CAAC,IAAI,CAAC,EAAE;IAAE,IAAI,CAAC,EAAE,MAAM,CAAA;CAAE,GAAG,MAAM,CAsFrD"}
|