@hatk/hatk 0.0.1-alpha.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/backfill.d.ts +11 -0
- package/dist/backfill.d.ts.map +1 -0
- package/dist/backfill.js +328 -0
- package/dist/car.d.ts +5 -0
- package/dist/car.d.ts.map +1 -0
- package/dist/car.js +52 -0
- package/dist/cbor.d.ts +7 -0
- package/dist/cbor.d.ts.map +1 -0
- package/dist/cbor.js +89 -0
- package/dist/cid.d.ts +4 -0
- package/dist/cid.d.ts.map +1 -0
- package/dist/cid.js +39 -0
- package/dist/cli.d.ts +3 -0
- package/dist/cli.d.ts.map +1 -0
- package/dist/cli.js +1663 -0
- package/dist/config.d.ts +47 -0
- package/dist/config.d.ts.map +1 -0
- package/dist/config.js +43 -0
- package/dist/db.d.ts +134 -0
- package/dist/db.d.ts.map +1 -0
- package/dist/db.js +1361 -0
- package/dist/feeds.d.ts +95 -0
- package/dist/feeds.d.ts.map +1 -0
- package/dist/feeds.js +144 -0
- package/dist/fts.d.ts +20 -0
- package/dist/fts.d.ts.map +1 -0
- package/dist/fts.js +762 -0
- package/dist/hydrate.d.ts +23 -0
- package/dist/hydrate.d.ts.map +1 -0
- package/dist/hydrate.js +75 -0
- package/dist/indexer.d.ts +14 -0
- package/dist/indexer.d.ts.map +1 -0
- package/dist/indexer.js +316 -0
- package/dist/labels.d.ts +29 -0
- package/dist/labels.d.ts.map +1 -0
- package/dist/labels.js +111 -0
- package/dist/lex-types.d.ts +401 -0
- package/dist/lex-types.d.ts.map +1 -0
- package/dist/lex-types.js +4 -0
- package/dist/lexicon-resolve.d.ts +14 -0
- package/dist/lexicon-resolve.d.ts.map +1 -0
- package/dist/lexicon-resolve.js +280 -0
- package/dist/logger.d.ts +4 -0
- package/dist/logger.d.ts.map +1 -0
- package/dist/logger.js +23 -0
- package/dist/main.d.ts +3 -0
- package/dist/main.d.ts.map +1 -0
- package/dist/main.js +148 -0
- package/dist/mst.d.ts +6 -0
- package/dist/mst.d.ts.map +1 -0
- package/dist/mst.js +30 -0
- package/dist/oauth/client.d.ts +16 -0
- package/dist/oauth/client.d.ts.map +1 -0
- package/dist/oauth/client.js +54 -0
- package/dist/oauth/crypto.d.ts +28 -0
- package/dist/oauth/crypto.d.ts.map +1 -0
- package/dist/oauth/crypto.js +101 -0
- package/dist/oauth/db.d.ts +47 -0
- package/dist/oauth/db.d.ts.map +1 -0
- package/dist/oauth/db.js +139 -0
- package/dist/oauth/discovery.d.ts +22 -0
- package/dist/oauth/discovery.d.ts.map +1 -0
- package/dist/oauth/discovery.js +50 -0
- package/dist/oauth/dpop.d.ts +11 -0
- package/dist/oauth/dpop.d.ts.map +1 -0
- package/dist/oauth/dpop.js +56 -0
- package/dist/oauth/hooks.d.ts +10 -0
- package/dist/oauth/hooks.d.ts.map +1 -0
- package/dist/oauth/hooks.js +40 -0
- package/dist/oauth/server.d.ts +86 -0
- package/dist/oauth/server.d.ts.map +1 -0
- package/dist/oauth/server.js +572 -0
- package/dist/opengraph.d.ts +34 -0
- package/dist/opengraph.d.ts.map +1 -0
- package/dist/opengraph.js +198 -0
- package/dist/schema.d.ts +51 -0
- package/dist/schema.d.ts.map +1 -0
- package/dist/schema.js +358 -0
- package/dist/seed.d.ts +29 -0
- package/dist/seed.d.ts.map +1 -0
- package/dist/seed.js +86 -0
- package/dist/server.d.ts +6 -0
- package/dist/server.d.ts.map +1 -0
- package/dist/server.js +1024 -0
- package/dist/setup.d.ts +8 -0
- package/dist/setup.d.ts.map +1 -0
- package/dist/setup.js +48 -0
- package/dist/test-browser.d.ts +14 -0
- package/dist/test-browser.d.ts.map +1 -0
- package/dist/test-browser.js +26 -0
- package/dist/test.d.ts +47 -0
- package/dist/test.d.ts.map +1 -0
- package/dist/test.js +256 -0
- package/dist/views.d.ts +40 -0
- package/dist/views.d.ts.map +1 -0
- package/dist/views.js +178 -0
- package/dist/vite-plugin.d.ts +5 -0
- package/dist/vite-plugin.d.ts.map +1 -0
- package/dist/vite-plugin.js +86 -0
- package/dist/xrpc-client.d.ts +18 -0
- package/dist/xrpc-client.d.ts.map +1 -0
- package/dist/xrpc-client.js +54 -0
- package/dist/xrpc.d.ts +53 -0
- package/dist/xrpc.d.ts.map +1 -0
- package/dist/xrpc.js +139 -0
- package/fonts/Inter-Regular.woff +0 -0
- package/package.json +41 -0
- package/public/admin-auth.js +320 -0
- package/public/admin.html +2166 -0
package/dist/cli.js
ADDED
|
@@ -0,0 +1,1663 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
import { mkdirSync, writeFileSync, existsSync, unlinkSync, readdirSync, readFileSync } from 'node:fs';
|
|
3
|
+
import { resolve, join } from 'node:path';
|
|
4
|
+
import { execSync } from 'node:child_process';
|
|
5
|
+
import { loadLexicons } from "./schema.js";
|
|
6
|
+
import { loadConfig } from "./config.js";
|
|
7
|
+
const args = process.argv.slice(2);
|
|
8
|
+
const command = args[0];
|
|
9
|
+
async function ensurePds() {
|
|
10
|
+
if (!existsSync(resolve('docker-compose.yml')))
|
|
11
|
+
return;
|
|
12
|
+
// Check if PDS is already healthy
|
|
13
|
+
try {
|
|
14
|
+
const res = await fetch('http://localhost:2583/xrpc/_health');
|
|
15
|
+
if (res.ok)
|
|
16
|
+
return;
|
|
17
|
+
}
|
|
18
|
+
catch { }
|
|
19
|
+
// Start it
|
|
20
|
+
console.log('[dev] starting PDS...');
|
|
21
|
+
execSync('docker compose up -d', { stdio: 'inherit', cwd: process.cwd() });
|
|
22
|
+
// Wait for health
|
|
23
|
+
for (let i = 0; i < 30; i++) {
|
|
24
|
+
try {
|
|
25
|
+
const res = await fetch('http://localhost:2583/xrpc/_health');
|
|
26
|
+
if (res.ok) {
|
|
27
|
+
console.log('[dev] PDS ready');
|
|
28
|
+
return;
|
|
29
|
+
}
|
|
30
|
+
}
|
|
31
|
+
catch { }
|
|
32
|
+
await new Promise((r) => setTimeout(r, 1000));
|
|
33
|
+
}
|
|
34
|
+
console.error('[dev] PDS failed to start');
|
|
35
|
+
process.exit(1);
|
|
36
|
+
}
|
|
37
|
+
function runSeed() {
|
|
38
|
+
const seedFile = resolve('seeds/seed.ts');
|
|
39
|
+
if (!existsSync(seedFile))
|
|
40
|
+
return;
|
|
41
|
+
execSync(`npx tsx ${seedFile}`, { stdio: 'inherit', cwd: process.cwd() });
|
|
42
|
+
}
|
|
43
|
+
function usage() {
|
|
44
|
+
console.log(`
|
|
45
|
+
Usage: hatk <command> [options]
|
|
46
|
+
|
|
47
|
+
Getting Started
|
|
48
|
+
new <name> [--svelte] [--template <t>] Create a new hatk project
|
|
49
|
+
|
|
50
|
+
Running
|
|
51
|
+
start Start the hatk server
|
|
52
|
+
dev Start PDS, seed, and run hatk
|
|
53
|
+
seed Seed local PDS with fixture data
|
|
54
|
+
reset Reset database and PDS for a clean slate
|
|
55
|
+
schema Show database schema from lexicons
|
|
56
|
+
|
|
57
|
+
Code Quality
|
|
58
|
+
check Type-check and lint the project
|
|
59
|
+
format Format code with oxfmt
|
|
60
|
+
test [--unit|--integration|--browser] Run tests
|
|
61
|
+
|
|
62
|
+
Build
|
|
63
|
+
build Build the frontend for production
|
|
64
|
+
|
|
65
|
+
Generators
|
|
66
|
+
generate record <nsid> Generate a record lexicon
|
|
67
|
+
generate query <nsid> Generate a query lexicon
|
|
68
|
+
generate procedure <nsid> Generate a procedure lexicon
|
|
69
|
+
generate feed <name> Generate a feed generator
|
|
70
|
+
generate xrpc <nsid> Generate an XRPC handler
|
|
71
|
+
generate label <name> Generate a label definition
|
|
72
|
+
generate og <name> Generate an OpenGraph route
|
|
73
|
+
generate job <name> Generate a periodic job
|
|
74
|
+
generate types Regenerate TypeScript types from lexicons
|
|
75
|
+
destroy <type> <name> Remove a generated file
|
|
76
|
+
|
|
77
|
+
Registry
|
|
78
|
+
resolve <nsid> Fetch a lexicon and its refs from the network
|
|
79
|
+
`);
|
|
80
|
+
process.exit(1);
|
|
81
|
+
}
|
|
82
|
+
if (!command)
|
|
83
|
+
usage();
|
|
84
|
+
// --- Templates ---
|
|
85
|
+
const templates = {
|
|
86
|
+
feed: (name) => `import { defineFeed } from '../hatk.generated.ts'
|
|
87
|
+
|
|
88
|
+
export default defineFeed({
|
|
89
|
+
collection: 'your.collection.here',
|
|
90
|
+
label: '${name.charAt(0).toUpperCase() + name.slice(1)}',
|
|
91
|
+
|
|
92
|
+
async generate(ctx) {
|
|
93
|
+
const { rows, cursor } = await ctx.paginate<{ uri: string }>(
|
|
94
|
+
\`SELECT uri, cid, indexed_at FROM "your.collection.here"\`,
|
|
95
|
+
)
|
|
96
|
+
|
|
97
|
+
return ctx.ok({ uris: rows.map((r) => r.uri), cursor })
|
|
98
|
+
},
|
|
99
|
+
})
|
|
100
|
+
`,
|
|
101
|
+
xrpc: (name) => `import { defineQuery } from '${xrpcImportPath(name)}'
|
|
102
|
+
|
|
103
|
+
export default defineQuery('${name}', async (ctx) => {
|
|
104
|
+
const { ok, db, params, packCursor, unpackCursor } = ctx
|
|
105
|
+
const limit = params.limit ?? 30
|
|
106
|
+
const cursor = params.cursor
|
|
107
|
+
|
|
108
|
+
const conditions: string[] = []
|
|
109
|
+
const sqlParams: (string | number)[] = []
|
|
110
|
+
let paramIdx = 1
|
|
111
|
+
|
|
112
|
+
if (cursor) {
|
|
113
|
+
const parsed = unpackCursor(cursor)
|
|
114
|
+
if (parsed) {
|
|
115
|
+
conditions.push(\`(s.indexed_at < $\${paramIdx} OR (s.indexed_at = $\${paramIdx + 1} AND s.cid < $\${paramIdx + 2}))\`)
|
|
116
|
+
sqlParams.push(parsed.primary, parsed.primary, parsed.cid)
|
|
117
|
+
paramIdx += 3
|
|
118
|
+
}
|
|
119
|
+
}
|
|
120
|
+
|
|
121
|
+
const where = conditions.length ? 'WHERE ' + conditions.join(' AND ') : ''
|
|
122
|
+
|
|
123
|
+
const rows = await db.query(
|
|
124
|
+
\`SELECT s.* FROM "your.collection.here" s \${where} ORDER BY s.indexed_at DESC, s.cid DESC LIMIT $\${paramIdx}\`,
|
|
125
|
+
sqlParams.concat([limit + 1]),
|
|
126
|
+
)
|
|
127
|
+
|
|
128
|
+
const hasMore = rows.length > limit
|
|
129
|
+
if (hasMore) rows.pop()
|
|
130
|
+
const lastRow = rows[rows.length - 1]
|
|
131
|
+
|
|
132
|
+
return ok({
|
|
133
|
+
items: rows,
|
|
134
|
+
cursor: hasMore && lastRow ? packCursor(lastRow.indexed_at, lastRow.cid) : undefined,
|
|
135
|
+
})
|
|
136
|
+
})
|
|
137
|
+
`,
|
|
138
|
+
label: (name) => `import type { LabelRuleContext } from 'hatk/labels'
|
|
139
|
+
|
|
140
|
+
export default {
|
|
141
|
+
definition: {
|
|
142
|
+
identifier: '${name}',
|
|
143
|
+
severity: 'inform',
|
|
144
|
+
blurs: 'none',
|
|
145
|
+
defaultSetting: 'warn',
|
|
146
|
+
locales: [{ lang: 'en', name: '${name.charAt(0).toUpperCase() + name.slice(1)}', description: 'Description here' }],
|
|
147
|
+
},
|
|
148
|
+
async evaluate(ctx: LabelRuleContext) {
|
|
149
|
+
// Return array of label identifiers to apply, or empty array
|
|
150
|
+
return []
|
|
151
|
+
},
|
|
152
|
+
}
|
|
153
|
+
`,
|
|
154
|
+
og: (name) => `import type { OpengraphContext, OpengraphResult } from 'hatk/opengraph'
|
|
155
|
+
|
|
156
|
+
export default {
|
|
157
|
+
path: '/og/${name}/:id',
|
|
158
|
+
async generate(ctx: OpengraphContext): Promise<OpengraphResult> {
|
|
159
|
+
const { db, params } = ctx
|
|
160
|
+
return {
|
|
161
|
+
element: {
|
|
162
|
+
type: 'div',
|
|
163
|
+
props: {
|
|
164
|
+
style: { display: 'flex', width: '100%', height: '100%', background: '#080b12', color: 'white', alignItems: 'center', justifyContent: 'center' },
|
|
165
|
+
children: params.id,
|
|
166
|
+
},
|
|
167
|
+
},
|
|
168
|
+
}
|
|
169
|
+
},
|
|
170
|
+
}
|
|
171
|
+
`,
|
|
172
|
+
job: (_name) => `export default {
|
|
173
|
+
interval: 300, // seconds
|
|
174
|
+
async run(_ctx: any) {
|
|
175
|
+
// Periodic task logic here
|
|
176
|
+
},
|
|
177
|
+
}
|
|
178
|
+
`,
|
|
179
|
+
};
|
|
180
|
+
// Compute relative import path from xrpc/ns/id/method.ts back to hatk.generated.ts
|
|
181
|
+
// e.g. fm.teal.getStats → xrpc/fm/teal/getStats.ts → needs ../../../hatk.generated.ts
|
|
182
|
+
// Parts: [fm, teal, getStats] → 2 namespace dirs + xrpc/ dir = 3 levels up
|
|
183
|
+
function xrpcImportPath(nsid) {
|
|
184
|
+
const parts = nsid.split('.');
|
|
185
|
+
const namespaceDirs = parts.length - 1; // dirs created from namespace segments
|
|
186
|
+
return '../'.repeat(namespaceDirs + 1) + 'hatk.generated.ts'; // +1 for xrpc/ dir itself
|
|
187
|
+
}
|
|
188
|
+
const testTemplates = {
|
|
189
|
+
feed: (name) => `import { describe, test, expect, beforeAll, afterAll } from 'vitest'
|
|
190
|
+
import { createTestContext } from 'hatk/test'
|
|
191
|
+
|
|
192
|
+
let ctx: Awaited<ReturnType<typeof createTestContext>>
|
|
193
|
+
|
|
194
|
+
beforeAll(async () => {
|
|
195
|
+
ctx = await createTestContext()
|
|
196
|
+
await ctx.loadFixtures()
|
|
197
|
+
})
|
|
198
|
+
|
|
199
|
+
afterAll(async () => ctx?.close())
|
|
200
|
+
|
|
201
|
+
describe('${name} feed', () => {
|
|
202
|
+
test('returns results', async () => {
|
|
203
|
+
const feed = ctx.loadFeed('${name}')
|
|
204
|
+
const result = await feed.generate(ctx.feedContext({ limit: 10 }))
|
|
205
|
+
expect(result).toBeDefined()
|
|
206
|
+
})
|
|
207
|
+
})
|
|
208
|
+
`,
|
|
209
|
+
xrpc: (name) => `import { describe, test, expect, beforeAll, afterAll } from 'vitest'
|
|
210
|
+
import { createTestContext } from 'hatk/test'
|
|
211
|
+
|
|
212
|
+
let ctx: Awaited<ReturnType<typeof createTestContext>>
|
|
213
|
+
|
|
214
|
+
beforeAll(async () => {
|
|
215
|
+
ctx = await createTestContext()
|
|
216
|
+
await ctx.loadFixtures()
|
|
217
|
+
})
|
|
218
|
+
|
|
219
|
+
afterAll(async () => ctx?.close())
|
|
220
|
+
|
|
221
|
+
describe('${name}', () => {
|
|
222
|
+
test('returns response', async () => {
|
|
223
|
+
const handler = ctx.loadXrpc('${name}')
|
|
224
|
+
const result = await handler.handler({ params: {} })
|
|
225
|
+
expect(result).toBeDefined()
|
|
226
|
+
})
|
|
227
|
+
})
|
|
228
|
+
`,
|
|
229
|
+
};
|
|
230
|
+
const lexiconTemplates = {
|
|
231
|
+
record: (nsid) => ({
|
|
232
|
+
lexicon: 1,
|
|
233
|
+
id: nsid,
|
|
234
|
+
defs: {
|
|
235
|
+
main: {
|
|
236
|
+
type: 'record',
|
|
237
|
+
key: 'tid',
|
|
238
|
+
description: `A ${nsid.split('.').pop()} record.`,
|
|
239
|
+
record: {
|
|
240
|
+
type: 'object',
|
|
241
|
+
required: ['createdAt'],
|
|
242
|
+
properties: {
|
|
243
|
+
createdAt: { type: 'string', format: 'datetime' },
|
|
244
|
+
},
|
|
245
|
+
},
|
|
246
|
+
},
|
|
247
|
+
},
|
|
248
|
+
}),
|
|
249
|
+
query: (nsid) => ({
|
|
250
|
+
lexicon: 1,
|
|
251
|
+
id: nsid,
|
|
252
|
+
defs: {
|
|
253
|
+
main: {
|
|
254
|
+
type: 'query',
|
|
255
|
+
description: `${nsid.split('.').pop()} query.`,
|
|
256
|
+
parameters: {
|
|
257
|
+
type: 'params',
|
|
258
|
+
properties: {},
|
|
259
|
+
},
|
|
260
|
+
output: {
|
|
261
|
+
encoding: 'application/json',
|
|
262
|
+
schema: {
|
|
263
|
+
type: 'object',
|
|
264
|
+
properties: {},
|
|
265
|
+
},
|
|
266
|
+
},
|
|
267
|
+
},
|
|
268
|
+
},
|
|
269
|
+
}),
|
|
270
|
+
procedure: (nsid) => ({
|
|
271
|
+
lexicon: 1,
|
|
272
|
+
id: nsid,
|
|
273
|
+
defs: {
|
|
274
|
+
main: {
|
|
275
|
+
type: 'procedure',
|
|
276
|
+
description: `${nsid.split('.').pop()} procedure.`,
|
|
277
|
+
input: {
|
|
278
|
+
encoding: 'application/json',
|
|
279
|
+
schema: {
|
|
280
|
+
type: 'object',
|
|
281
|
+
properties: {},
|
|
282
|
+
},
|
|
283
|
+
},
|
|
284
|
+
output: {
|
|
285
|
+
encoding: 'application/json',
|
|
286
|
+
schema: {
|
|
287
|
+
type: 'object',
|
|
288
|
+
properties: {},
|
|
289
|
+
},
|
|
290
|
+
},
|
|
291
|
+
},
|
|
292
|
+
},
|
|
293
|
+
}),
|
|
294
|
+
};
|
|
295
|
+
const dirs = {
|
|
296
|
+
feed: 'feeds',
|
|
297
|
+
xrpc: 'xrpc',
|
|
298
|
+
label: 'labels',
|
|
299
|
+
og: 'og',
|
|
300
|
+
job: 'jobs',
|
|
301
|
+
};
|
|
302
|
+
// --- Commands ---
|
|
303
|
+
if (command === 'new') {
|
|
304
|
+
const name = args[1];
|
|
305
|
+
if (!name) {
|
|
306
|
+
console.error('Usage: hatk new <name> [--svelte] [--template <template-name>]');
|
|
307
|
+
process.exit(1);
|
|
308
|
+
}
|
|
309
|
+
const templateIdx = args.indexOf('--template');
|
|
310
|
+
const templateName = templateIdx !== -1 ? args[templateIdx + 1] : null;
|
|
311
|
+
if (templateIdx !== -1 && !templateName) {
|
|
312
|
+
console.error('Usage: hatk new <name> --template <template-name>');
|
|
313
|
+
process.exit(1);
|
|
314
|
+
}
|
|
315
|
+
const dir = resolve(name);
|
|
316
|
+
if (existsSync(dir)) {
|
|
317
|
+
console.error(`Directory ${name} already exists`);
|
|
318
|
+
process.exit(1);
|
|
319
|
+
}
|
|
320
|
+
if (templateName) {
|
|
321
|
+
const repo = `https://github.com/hatk-dev/hatk-template-${templateName}.git`;
|
|
322
|
+
console.log(`Cloning template ${templateName}...`);
|
|
323
|
+
try {
|
|
324
|
+
execSync(`git clone --depth 1 ${repo} ${dir}`, { stdio: 'inherit' });
|
|
325
|
+
}
|
|
326
|
+
catch {
|
|
327
|
+
console.error(`Failed to clone template: ${repo}`);
|
|
328
|
+
process.exit(1);
|
|
329
|
+
}
|
|
330
|
+
execSync(`rm -rf ${join(dir, '.git')}`);
|
|
331
|
+
const pkgPath = join(dir, 'package.json');
|
|
332
|
+
if (existsSync(pkgPath)) {
|
|
333
|
+
const pkg = JSON.parse(readFileSync(pkgPath, 'utf-8'));
|
|
334
|
+
pkg.name = name;
|
|
335
|
+
writeFileSync(pkgPath, JSON.stringify(pkg, null, 2) + '\n');
|
|
336
|
+
}
|
|
337
|
+
console.log(`\nCreated ${name}/ from template ${templateName}`);
|
|
338
|
+
console.log(`\n cd ${name}`);
|
|
339
|
+
console.log(` npm install`);
|
|
340
|
+
console.log(` hatk dev`);
|
|
341
|
+
process.exit(0);
|
|
342
|
+
}
|
|
343
|
+
const withSvelte = args.includes('--svelte');
|
|
344
|
+
mkdirSync(dir);
|
|
345
|
+
const subs = [
|
|
346
|
+
'lexicons',
|
|
347
|
+
'feeds',
|
|
348
|
+
'xrpc',
|
|
349
|
+
'og',
|
|
350
|
+
'labels',
|
|
351
|
+
'jobs',
|
|
352
|
+
'seeds',
|
|
353
|
+
'setup',
|
|
354
|
+
'public',
|
|
355
|
+
'test',
|
|
356
|
+
'test/feeds',
|
|
357
|
+
'test/xrpc',
|
|
358
|
+
'test/integration',
|
|
359
|
+
'test/browser',
|
|
360
|
+
'test/fixtures',
|
|
361
|
+
];
|
|
362
|
+
if (withSvelte)
|
|
363
|
+
subs.push('src', 'src/routes', 'src/lib');
|
|
364
|
+
for (const sub of subs) {
|
|
365
|
+
mkdirSync(join(dir, sub));
|
|
366
|
+
}
|
|
367
|
+
writeFileSync(join(dir, 'config.yaml'), `relay: ws://localhost:2583
|
|
368
|
+
plc: http://localhost:2582
|
|
369
|
+
port: 3000
|
|
370
|
+
database: data/hatk.db
|
|
371
|
+
admins: []
|
|
372
|
+
|
|
373
|
+
backfill:
|
|
374
|
+
parallelism: 10
|
|
375
|
+
`);
|
|
376
|
+
writeFileSync(join(dir, 'public', 'index.html'), `<!DOCTYPE html>
|
|
377
|
+
<html><head><title>${name}</title></head>
|
|
378
|
+
<body><h1>${name}</h1></body></html>
|
|
379
|
+
`);
|
|
380
|
+
// Scaffold core framework lexicons under dev.hatk namespace
|
|
381
|
+
const coreLexDir = join(dir, 'lexicons', 'dev', 'hatk');
|
|
382
|
+
mkdirSync(coreLexDir, { recursive: true });
|
|
383
|
+
writeFileSync(join(coreLexDir, 'describeCollections.json'), JSON.stringify({
|
|
384
|
+
lexicon: 1,
|
|
385
|
+
id: 'dev.hatk.describeCollections',
|
|
386
|
+
defs: {
|
|
387
|
+
main: {
|
|
388
|
+
type: 'query',
|
|
389
|
+
description: 'List indexed collections and their schemas.',
|
|
390
|
+
output: {
|
|
391
|
+
encoding: 'application/json',
|
|
392
|
+
schema: {
|
|
393
|
+
type: 'object',
|
|
394
|
+
properties: {
|
|
395
|
+
collections: {
|
|
396
|
+
type: 'array',
|
|
397
|
+
items: {
|
|
398
|
+
type: 'object',
|
|
399
|
+
required: ['collection'],
|
|
400
|
+
properties: {
|
|
401
|
+
collection: { type: 'string' },
|
|
402
|
+
columns: {
|
|
403
|
+
type: 'array',
|
|
404
|
+
items: {
|
|
405
|
+
type: 'object',
|
|
406
|
+
required: ['name', 'originalName', 'type', 'required'],
|
|
407
|
+
properties: {
|
|
408
|
+
name: { type: 'string' },
|
|
409
|
+
originalName: { type: 'string' },
|
|
410
|
+
type: { type: 'string' },
|
|
411
|
+
required: { type: 'boolean' },
|
|
412
|
+
},
|
|
413
|
+
},
|
|
414
|
+
},
|
|
415
|
+
},
|
|
416
|
+
},
|
|
417
|
+
},
|
|
418
|
+
},
|
|
419
|
+
},
|
|
420
|
+
},
|
|
421
|
+
},
|
|
422
|
+
},
|
|
423
|
+
}, null, 2) + '\n');
|
|
424
|
+
writeFileSync(join(coreLexDir, 'describeFeeds.json'), JSON.stringify({
|
|
425
|
+
lexicon: 1,
|
|
426
|
+
id: 'dev.hatk.describeFeeds',
|
|
427
|
+
defs: {
|
|
428
|
+
main: {
|
|
429
|
+
type: 'query',
|
|
430
|
+
description: 'List available feeds.',
|
|
431
|
+
output: {
|
|
432
|
+
encoding: 'application/json',
|
|
433
|
+
schema: {
|
|
434
|
+
type: 'object',
|
|
435
|
+
properties: {
|
|
436
|
+
feeds: {
|
|
437
|
+
type: 'array',
|
|
438
|
+
items: {
|
|
439
|
+
type: 'object',
|
|
440
|
+
required: ['name', 'label'],
|
|
441
|
+
properties: {
|
|
442
|
+
name: { type: 'string' },
|
|
443
|
+
label: { type: 'string' },
|
|
444
|
+
},
|
|
445
|
+
},
|
|
446
|
+
},
|
|
447
|
+
},
|
|
448
|
+
},
|
|
449
|
+
},
|
|
450
|
+
},
|
|
451
|
+
},
|
|
452
|
+
}, null, 2) + '\n');
|
|
453
|
+
writeFileSync(join(coreLexDir, 'describeLabels.json'), JSON.stringify({
|
|
454
|
+
lexicon: 1,
|
|
455
|
+
id: 'dev.hatk.describeLabels',
|
|
456
|
+
defs: {
|
|
457
|
+
main: {
|
|
458
|
+
type: 'query',
|
|
459
|
+
description: 'List available label definitions.',
|
|
460
|
+
output: {
|
|
461
|
+
encoding: 'application/json',
|
|
462
|
+
schema: {
|
|
463
|
+
type: 'object',
|
|
464
|
+
properties: {
|
|
465
|
+
definitions: {
|
|
466
|
+
type: 'array',
|
|
467
|
+
items: {
|
|
468
|
+
type: 'object',
|
|
469
|
+
required: ['identifier', 'severity', 'blurs', 'defaultSetting'],
|
|
470
|
+
properties: {
|
|
471
|
+
identifier: { type: 'string' },
|
|
472
|
+
severity: { type: 'string' },
|
|
473
|
+
blurs: { type: 'string' },
|
|
474
|
+
defaultSetting: { type: 'string' },
|
|
475
|
+
},
|
|
476
|
+
},
|
|
477
|
+
},
|
|
478
|
+
},
|
|
479
|
+
},
|
|
480
|
+
},
|
|
481
|
+
},
|
|
482
|
+
},
|
|
483
|
+
}, null, 2) + '\n');
|
|
484
|
+
writeFileSync(join(coreLexDir, 'createRecord.json'), JSON.stringify({
|
|
485
|
+
lexicon: 1,
|
|
486
|
+
id: 'dev.hatk.createRecord',
|
|
487
|
+
defs: {
|
|
488
|
+
main: {
|
|
489
|
+
type: 'procedure',
|
|
490
|
+
description: "Create a record via the user's PDS.",
|
|
491
|
+
input: {
|
|
492
|
+
encoding: 'application/json',
|
|
493
|
+
schema: {
|
|
494
|
+
type: 'object',
|
|
495
|
+
required: ['collection', 'repo', 'record'],
|
|
496
|
+
properties: {
|
|
497
|
+
collection: { type: 'string' },
|
|
498
|
+
repo: { type: 'string', format: 'did' },
|
|
499
|
+
record: { type: 'unknown' },
|
|
500
|
+
},
|
|
501
|
+
},
|
|
502
|
+
},
|
|
503
|
+
output: {
|
|
504
|
+
encoding: 'application/json',
|
|
505
|
+
schema: {
|
|
506
|
+
type: 'object',
|
|
507
|
+
properties: {
|
|
508
|
+
uri: { type: 'string', format: 'at-uri' },
|
|
509
|
+
cid: { type: 'string', format: 'cid' },
|
|
510
|
+
},
|
|
511
|
+
},
|
|
512
|
+
},
|
|
513
|
+
},
|
|
514
|
+
},
|
|
515
|
+
}, null, 2) + '\n');
|
|
516
|
+
writeFileSync(join(coreLexDir, 'deleteRecord.json'), JSON.stringify({
|
|
517
|
+
lexicon: 1,
|
|
518
|
+
id: 'dev.hatk.deleteRecord',
|
|
519
|
+
defs: {
|
|
520
|
+
main: {
|
|
521
|
+
type: 'procedure',
|
|
522
|
+
description: "Delete a record via the user's PDS.",
|
|
523
|
+
input: {
|
|
524
|
+
encoding: 'application/json',
|
|
525
|
+
schema: {
|
|
526
|
+
type: 'object',
|
|
527
|
+
required: ['collection', 'rkey'],
|
|
528
|
+
properties: {
|
|
529
|
+
collection: { type: 'string' },
|
|
530
|
+
rkey: { type: 'string' },
|
|
531
|
+
},
|
|
532
|
+
},
|
|
533
|
+
},
|
|
534
|
+
output: { encoding: 'application/json', schema: { type: 'object', properties: {} } },
|
|
535
|
+
},
|
|
536
|
+
},
|
|
537
|
+
}, null, 2) + '\n');
|
|
538
|
+
writeFileSync(join(coreLexDir, 'putRecord.json'), JSON.stringify({
|
|
539
|
+
lexicon: 1,
|
|
540
|
+
id: 'dev.hatk.putRecord',
|
|
541
|
+
defs: {
|
|
542
|
+
main: {
|
|
543
|
+
type: 'procedure',
|
|
544
|
+
description: "Create or update a record via the user's PDS.",
|
|
545
|
+
input: {
|
|
546
|
+
encoding: 'application/json',
|
|
547
|
+
schema: {
|
|
548
|
+
type: 'object',
|
|
549
|
+
required: ['collection', 'rkey', 'record'],
|
|
550
|
+
properties: {
|
|
551
|
+
collection: { type: 'string' },
|
|
552
|
+
rkey: { type: 'string' },
|
|
553
|
+
record: { type: 'unknown' },
|
|
554
|
+
repo: { type: 'string', format: 'did' },
|
|
555
|
+
},
|
|
556
|
+
},
|
|
557
|
+
},
|
|
558
|
+
output: {
|
|
559
|
+
encoding: 'application/json',
|
|
560
|
+
schema: {
|
|
561
|
+
type: 'object',
|
|
562
|
+
properties: {
|
|
563
|
+
uri: { type: 'string', format: 'at-uri' },
|
|
564
|
+
cid: { type: 'string', format: 'cid' },
|
|
565
|
+
},
|
|
566
|
+
},
|
|
567
|
+
},
|
|
568
|
+
},
|
|
569
|
+
},
|
|
570
|
+
}, null, 2) + '\n');
|
|
571
|
+
writeFileSync(join(coreLexDir, 'uploadBlob.json'), JSON.stringify({
|
|
572
|
+
lexicon: 1,
|
|
573
|
+
id: 'dev.hatk.uploadBlob',
|
|
574
|
+
defs: {
|
|
575
|
+
main: {
|
|
576
|
+
type: 'procedure',
|
|
577
|
+
description: "Upload a blob via the user's PDS.",
|
|
578
|
+
input: {
|
|
579
|
+
encoding: '*/*',
|
|
580
|
+
},
|
|
581
|
+
output: {
|
|
582
|
+
encoding: 'application/json',
|
|
583
|
+
schema: {
|
|
584
|
+
type: 'object',
|
|
585
|
+
required: ['blob'],
|
|
586
|
+
properties: {
|
|
587
|
+
blob: { type: 'blob' },
|
|
588
|
+
},
|
|
589
|
+
},
|
|
590
|
+
},
|
|
591
|
+
},
|
|
592
|
+
},
|
|
593
|
+
}, null, 2) + '\n');
|
|
594
|
+
writeFileSync(join(coreLexDir, 'getFeed.json'), JSON.stringify({
|
|
595
|
+
lexicon: 1,
|
|
596
|
+
id: 'dev.hatk.getFeed',
|
|
597
|
+
defs: {
|
|
598
|
+
main: {
|
|
599
|
+
type: 'query',
|
|
600
|
+
description: 'Retrieve a named feed of items.',
|
|
601
|
+
parameters: {
|
|
602
|
+
type: 'params',
|
|
603
|
+
required: ['feed'],
|
|
604
|
+
properties: {
|
|
605
|
+
feed: { type: 'string', description: 'Feed name' },
|
|
606
|
+
limit: { type: 'integer', minimum: 1, maximum: 100, default: 30 },
|
|
607
|
+
cursor: { type: 'string' },
|
|
608
|
+
},
|
|
609
|
+
},
|
|
610
|
+
output: {
|
|
611
|
+
encoding: 'application/json',
|
|
612
|
+
schema: {
|
|
613
|
+
type: 'object',
|
|
614
|
+
properties: {
|
|
615
|
+
items: { type: 'array', items: { type: 'unknown' } },
|
|
616
|
+
cursor: { type: 'string' },
|
|
617
|
+
},
|
|
618
|
+
},
|
|
619
|
+
},
|
|
620
|
+
},
|
|
621
|
+
},
|
|
622
|
+
}, null, 2) + '\n');
|
|
623
|
+
writeFileSync(join(coreLexDir, 'getRecord.json'), JSON.stringify({
|
|
624
|
+
lexicon: 1,
|
|
625
|
+
id: 'dev.hatk.getRecord',
|
|
626
|
+
defs: {
|
|
627
|
+
main: {
|
|
628
|
+
type: 'query',
|
|
629
|
+
description: 'Fetch a single record by AT URI.',
|
|
630
|
+
parameters: {
|
|
631
|
+
type: 'params',
|
|
632
|
+
required: ['uri'],
|
|
633
|
+
properties: {
|
|
634
|
+
uri: { type: 'string', format: 'at-uri' },
|
|
635
|
+
},
|
|
636
|
+
},
|
|
637
|
+
output: {
|
|
638
|
+
encoding: 'application/json',
|
|
639
|
+
schema: {
|
|
640
|
+
type: 'object',
|
|
641
|
+
properties: {
|
|
642
|
+
record: { type: 'unknown' },
|
|
643
|
+
},
|
|
644
|
+
},
|
|
645
|
+
},
|
|
646
|
+
},
|
|
647
|
+
},
|
|
648
|
+
}, null, 2) + '\n');
|
|
649
|
+
writeFileSync(join(coreLexDir, 'getRecords.json'), JSON.stringify({
|
|
650
|
+
lexicon: 1,
|
|
651
|
+
id: 'dev.hatk.getRecords',
|
|
652
|
+
defs: {
|
|
653
|
+
main: {
|
|
654
|
+
type: 'query',
|
|
655
|
+
description: 'List records from a collection with optional filters.',
|
|
656
|
+
parameters: {
|
|
657
|
+
type: 'params',
|
|
658
|
+
required: ['collection'],
|
|
659
|
+
properties: {
|
|
660
|
+
collection: { type: 'string' },
|
|
661
|
+
limit: { type: 'integer', minimum: 1, maximum: 100, default: 20 },
|
|
662
|
+
cursor: { type: 'string' },
|
|
663
|
+
sort: { type: 'string' },
|
|
664
|
+
order: { type: 'string' },
|
|
665
|
+
},
|
|
666
|
+
},
|
|
667
|
+
output: {
|
|
668
|
+
encoding: 'application/json',
|
|
669
|
+
schema: {
|
|
670
|
+
type: 'object',
|
|
671
|
+
properties: {
|
|
672
|
+
items: { type: 'array', items: { type: 'unknown' } },
|
|
673
|
+
cursor: { type: 'string' },
|
|
674
|
+
},
|
|
675
|
+
},
|
|
676
|
+
},
|
|
677
|
+
},
|
|
678
|
+
},
|
|
679
|
+
}, null, 2) + '\n');
|
|
680
|
+
writeFileSync(join(coreLexDir, 'searchRecords.json'), JSON.stringify({
|
|
681
|
+
lexicon: 1,
|
|
682
|
+
id: 'dev.hatk.searchRecords',
|
|
683
|
+
defs: {
|
|
684
|
+
main: {
|
|
685
|
+
type: 'query',
|
|
686
|
+
description: 'Full-text search across a collection.',
|
|
687
|
+
parameters: {
|
|
688
|
+
type: 'params',
|
|
689
|
+
required: ['collection', 'q'],
|
|
690
|
+
properties: {
|
|
691
|
+
collection: { type: 'string' },
|
|
692
|
+
q: { type: 'string', description: 'Search query' },
|
|
693
|
+
limit: { type: 'integer', minimum: 1, maximum: 100, default: 20 },
|
|
694
|
+
cursor: { type: 'string' },
|
|
695
|
+
fuzzy: { type: 'boolean', default: true },
|
|
696
|
+
},
|
|
697
|
+
},
|
|
698
|
+
output: {
|
|
699
|
+
encoding: 'application/json',
|
|
700
|
+
schema: {
|
|
701
|
+
type: 'object',
|
|
702
|
+
properties: {
|
|
703
|
+
items: { type: 'array', items: { type: 'unknown' } },
|
|
704
|
+
cursor: { type: 'string' },
|
|
705
|
+
},
|
|
706
|
+
},
|
|
707
|
+
},
|
|
708
|
+
},
|
|
709
|
+
},
|
|
710
|
+
}, null, 2) + '\n');
|
|
711
|
+
writeFileSync(join(dir, 'seeds', 'seed.ts'), `import { seed } from '../hatk.generated.ts'
|
|
712
|
+
|
|
713
|
+
const { createAccount, createRecord } = seed()
|
|
714
|
+
|
|
715
|
+
const alice = await createAccount('alice.test')
|
|
716
|
+
|
|
717
|
+
// await createRecord(alice, 'your.collection.here', {
|
|
718
|
+
// field: 'value',
|
|
719
|
+
// }, { rkey: 'my-record' })
|
|
720
|
+
|
|
721
|
+
console.log('\\n[seed] Done!')
|
|
722
|
+
`);
|
|
723
|
+
writeFileSync(join(dir, 'docker-compose.yml'), `services:
|
|
724
|
+
plc:
|
|
725
|
+
build:
|
|
726
|
+
context: https://github.com/did-method-plc/did-method-plc.git
|
|
727
|
+
dockerfile: packages/server/Dockerfile
|
|
728
|
+
ports:
|
|
729
|
+
- '2582:2582'
|
|
730
|
+
environment:
|
|
731
|
+
- DATABASE_URL=postgres://plc:plc@postgres:5432/plc
|
|
732
|
+
- PORT=2582
|
|
733
|
+
command: ['dumb-init', 'node', '--enable-source-maps', '../dist/bin.js']
|
|
734
|
+
depends_on:
|
|
735
|
+
postgres:
|
|
736
|
+
condition: service_healthy
|
|
737
|
+
healthcheck:
|
|
738
|
+
test: ['CMD-SHELL', 'wget -q --spider http://localhost:2582/_health || exit 1']
|
|
739
|
+
interval: 2s
|
|
740
|
+
timeout: 5s
|
|
741
|
+
retries: 15
|
|
742
|
+
|
|
743
|
+
pds:
|
|
744
|
+
image: ghcr.io/bluesky-social/pds:latest
|
|
745
|
+
ports:
|
|
746
|
+
- '2583:2583'
|
|
747
|
+
environment:
|
|
748
|
+
- PDS_HOSTNAME=localhost
|
|
749
|
+
- PDS_PORT=2583
|
|
750
|
+
- PDS_DID_PLC_URL=http://plc:2582
|
|
751
|
+
- PDS_DATA_DIRECTORY=/pds
|
|
752
|
+
- PDS_BLOBSTORE_DISK_LOCATION=/pds/blobs
|
|
753
|
+
- PDS_JWT_SECRET=dev-jwt-secret
|
|
754
|
+
- PDS_ADMIN_PASSWORD=dev-admin
|
|
755
|
+
- PDS_PLC_ROTATION_KEY_K256_PRIVATE_KEY_HEX=0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef
|
|
756
|
+
- PDS_INVITE_REQUIRED=false
|
|
757
|
+
- PDS_DEV_MODE=true
|
|
758
|
+
- LOG_ENABLED=true
|
|
759
|
+
volumes:
|
|
760
|
+
- pds_data:/pds
|
|
761
|
+
depends_on:
|
|
762
|
+
plc:
|
|
763
|
+
condition: service_healthy
|
|
764
|
+
healthcheck:
|
|
765
|
+
test: ['CMD-SHELL', 'wget -q --spider http://localhost:2583/xrpc/_health || exit 1']
|
|
766
|
+
interval: 2s
|
|
767
|
+
timeout: 5s
|
|
768
|
+
retries: 15
|
|
769
|
+
|
|
770
|
+
postgres:
|
|
771
|
+
image: postgres:16-alpine
|
|
772
|
+
environment:
|
|
773
|
+
- POSTGRES_USER=plc
|
|
774
|
+
- POSTGRES_PASSWORD=plc
|
|
775
|
+
- POSTGRES_DB=plc
|
|
776
|
+
volumes:
|
|
777
|
+
- plc_data:/var/lib/postgresql/data
|
|
778
|
+
healthcheck:
|
|
779
|
+
test: ['CMD-SHELL', 'pg_isready -U plc']
|
|
780
|
+
interval: 2s
|
|
781
|
+
timeout: 5s
|
|
782
|
+
retries: 10
|
|
783
|
+
|
|
784
|
+
volumes:
|
|
785
|
+
pds_data:
|
|
786
|
+
plc_data:
|
|
787
|
+
`);
|
|
788
|
+
writeFileSync(join(dir, '.dockerignore'), `node_modules
|
|
789
|
+
data
|
|
790
|
+
.svelte-kit
|
|
791
|
+
public
|
|
792
|
+
`);
|
|
793
|
+
writeFileSync(join(dir, 'Dockerfile'), `FROM node:25-slim
|
|
794
|
+
WORKDIR /app
|
|
795
|
+
COPY package.json package-lock.json ./
|
|
796
|
+
RUN npm ci --omit=dev
|
|
797
|
+
COPY . .
|
|
798
|
+
RUN node_modules/.bin/hatk build
|
|
799
|
+
EXPOSE 3000
|
|
800
|
+
CMD ["node", "--experimental-strip-types", "--no-warnings", "node_modules/hatk/src/main.ts", "config.yaml"]
|
|
801
|
+
`);
|
|
802
|
+
const pkgDeps = { '@hatk/oauth-client': '*', hatk: '*' };
|
|
803
|
+
const pkgDevDeps = {
|
|
804
|
+
'@playwright/test': '^1',
|
|
805
|
+
oxfmt: '^0.35.0',
|
|
806
|
+
oxlint: '^1',
|
|
807
|
+
typescript: '^5',
|
|
808
|
+
vite: '^6',
|
|
809
|
+
vitest: '^4',
|
|
810
|
+
};
|
|
811
|
+
if (withSvelte) {
|
|
812
|
+
pkgDevDeps['@sveltejs/adapter-static'] = '^3';
|
|
813
|
+
pkgDevDeps['@sveltejs/kit'] = '^2';
|
|
814
|
+
pkgDevDeps['@sveltejs/vite-plugin-svelte'] = '^5';
|
|
815
|
+
pkgDevDeps['svelte'] = '^5';
|
|
816
|
+
pkgDevDeps['svelte-check'] = '^4';
|
|
817
|
+
}
|
|
818
|
+
writeFileSync(join(dir, 'package.json'), JSON.stringify({
|
|
819
|
+
name,
|
|
820
|
+
private: true,
|
|
821
|
+
type: 'module',
|
|
822
|
+
scripts: {
|
|
823
|
+
start: 'hatk start',
|
|
824
|
+
dev: 'hatk dev',
|
|
825
|
+
build: 'hatk build',
|
|
826
|
+
check: 'hatk check',
|
|
827
|
+
format: 'hatk format',
|
|
828
|
+
},
|
|
829
|
+
dependencies: pkgDeps,
|
|
830
|
+
devDependencies: pkgDevDeps,
|
|
831
|
+
}, null, 2) + '\n');
|
|
832
|
+
writeFileSync(join(dir, 'tsconfig.server.json'), JSON.stringify({
|
|
833
|
+
compilerOptions: {
|
|
834
|
+
target: 'ES2022',
|
|
835
|
+
module: 'Node16',
|
|
836
|
+
moduleResolution: 'Node16',
|
|
837
|
+
strict: true,
|
|
838
|
+
esModuleInterop: true,
|
|
839
|
+
skipLibCheck: true,
|
|
840
|
+
noEmit: true,
|
|
841
|
+
allowImportingTsExtensions: true,
|
|
842
|
+
resolveJsonModule: true,
|
|
843
|
+
},
|
|
844
|
+
include: ['feeds', 'xrpc', 'og', 'seeds', 'labels', 'jobs', 'setup', 'hatk.generated.ts'],
|
|
845
|
+
}, null, 2) + '\n');
|
|
846
|
+
writeFileSync(join(dir, 'playwright.config.ts'), `import { defineConfig } from '@playwright/test'
|
|
847
|
+
|
|
848
|
+
export default defineConfig({
|
|
849
|
+
testDir: 'test/browser',
|
|
850
|
+
use: { baseURL: 'http://127.0.0.1:3000' },
|
|
851
|
+
globalSetup: './test/browser/global-setup.ts',
|
|
852
|
+
})
|
|
853
|
+
`);
|
|
854
|
+
writeFileSync(join(dir, 'test/browser/global-setup.ts'), `import { execSync } from 'node:child_process'
|
|
855
|
+
import { existsSync } from 'node:fs'
|
|
856
|
+
|
|
857
|
+
export default function globalSetup() {
|
|
858
|
+
if (existsSync('src/app.html')) {
|
|
859
|
+
execSync('npx vite build', { stdio: 'inherit' })
|
|
860
|
+
}
|
|
861
|
+
}
|
|
862
|
+
`);
|
|
863
|
+
writeFileSync(join(dir, '.gitignore'), `node_modules/
|
|
864
|
+
*.db
|
|
865
|
+
data/
|
|
866
|
+
test-results/
|
|
867
|
+
.svelte-kit/
|
|
868
|
+
.DS_Store
|
|
869
|
+
public/
|
|
870
|
+
`);
|
|
871
|
+
writeFileSync(join(dir, '.oxlintrc.json'), `{
|
|
872
|
+
"ignorePatterns": ["public", "data", ".svelte-kit", "hatk.generated.ts"]
|
|
873
|
+
}
|
|
874
|
+
`);
|
|
875
|
+
writeFileSync(join(dir, '.oxfmtrc.json'), `{
|
|
876
|
+
"semi": false,
|
|
877
|
+
"singleQuote": true,
|
|
878
|
+
"trailingComma": "all",
|
|
879
|
+
"printWidth": 120,
|
|
880
|
+
"tabWidth": 2,
|
|
881
|
+
"ignorePatterns": ["public", "data", ".svelte-kit", "hatk.generated.ts"]
|
|
882
|
+
}
|
|
883
|
+
`);
|
|
884
|
+
if (withSvelte) {
|
|
885
|
+
writeFileSync(join(dir, 'svelte.config.js'), `import adapter from '@sveltejs/adapter-static'
|
|
886
|
+
|
|
887
|
+
export default {
|
|
888
|
+
kit: {
|
|
889
|
+
adapter: adapter({
|
|
890
|
+
pages: 'public',
|
|
891
|
+
assets: 'public',
|
|
892
|
+
fallback: 'index.html',
|
|
893
|
+
}),
|
|
894
|
+
paths: { base: '' },
|
|
895
|
+
alias: {
|
|
896
|
+
$hatk: './hatk.generated.ts',
|
|
897
|
+
},
|
|
898
|
+
},
|
|
899
|
+
}
|
|
900
|
+
`);
|
|
901
|
+
writeFileSync(join(dir, 'vite.config.ts'), `import { sveltekit } from '@sveltejs/kit/vite'
|
|
902
|
+
import { hatk } from 'hatk/vite-plugin'
|
|
903
|
+
import { defineConfig } from 'vite'
|
|
904
|
+
|
|
905
|
+
export default defineConfig({
|
|
906
|
+
plugins: [sveltekit(), hatk()],
|
|
907
|
+
})
|
|
908
|
+
`);
|
|
909
|
+
writeFileSync(join(dir, 'tsconfig.json'), JSON.stringify({
|
|
910
|
+
extends: './.svelte-kit/tsconfig.json',
|
|
911
|
+
compilerOptions: {
|
|
912
|
+
allowJs: true,
|
|
913
|
+
checkJs: false,
|
|
914
|
+
esModuleInterop: true,
|
|
915
|
+
forceConsistentCasingInFileNames: true,
|
|
916
|
+
resolveJsonModule: true,
|
|
917
|
+
skipLibCheck: true,
|
|
918
|
+
sourceMap: true,
|
|
919
|
+
strict: true,
|
|
920
|
+
moduleResolution: 'bundler',
|
|
921
|
+
allowImportingTsExtensions: true,
|
|
922
|
+
},
|
|
923
|
+
}, null, 2) + '\n');
|
|
924
|
+
writeFileSync(join(dir, 'src/app.html'), `<!doctype html>
|
|
925
|
+
<html lang="en">
|
|
926
|
+
<head>
|
|
927
|
+
<meta charset="utf-8" />
|
|
928
|
+
<meta name="viewport" content="width=device-width, initial-scale=1" />
|
|
929
|
+
<title>${name}</title>
|
|
930
|
+
%sveltekit.head%
|
|
931
|
+
</head>
|
|
932
|
+
<body data-sveltekit-preload-data="hover">
|
|
933
|
+
<div style="display: contents">%sveltekit.body%</div>
|
|
934
|
+
</body>
|
|
935
|
+
</html>
|
|
936
|
+
`);
|
|
937
|
+
writeFileSync(join(dir, 'src/app.css'), `*,
|
|
938
|
+
*::before,
|
|
939
|
+
*::after {
|
|
940
|
+
box-sizing: border-box;
|
|
941
|
+
margin: 0;
|
|
942
|
+
padding: 0;
|
|
943
|
+
}
|
|
944
|
+
|
|
945
|
+
:root {
|
|
946
|
+
--bg-root: #080b12;
|
|
947
|
+
--bg-surface: #0f1419;
|
|
948
|
+
--bg-elevated: #161d27;
|
|
949
|
+
--bg-hover: #1c2633;
|
|
950
|
+
--border: #1e293b;
|
|
951
|
+
--teal: #14b8a6;
|
|
952
|
+
--text-primary: #e2e8f0;
|
|
953
|
+
--text-secondary: #94a3b8;
|
|
954
|
+
--text-muted: #64748b;
|
|
955
|
+
}
|
|
956
|
+
|
|
957
|
+
html {
|
|
958
|
+
background: var(--bg-root);
|
|
959
|
+
color: var(--text-primary);
|
|
960
|
+
}
|
|
961
|
+
|
|
962
|
+
body {
|
|
963
|
+
font-family: -apple-system, system-ui, sans-serif;
|
|
964
|
+
font-size: 15px;
|
|
965
|
+
line-height: 1.5;
|
|
966
|
+
min-height: 100vh;
|
|
967
|
+
}
|
|
968
|
+
|
|
969
|
+
a {
|
|
970
|
+
color: inherit;
|
|
971
|
+
text-decoration: none;
|
|
972
|
+
}
|
|
973
|
+
`);
|
|
974
|
+
writeFileSync(join(dir, 'src/routes/+layout.svelte'), `<script lang="ts">
|
|
975
|
+
import type { Snippet } from 'svelte'
|
|
976
|
+
import '../app.css'
|
|
977
|
+
|
|
978
|
+
let { children }: { children: Snippet } = $props()
|
|
979
|
+
</script>
|
|
980
|
+
|
|
981
|
+
{@render children()}
|
|
982
|
+
`);
|
|
983
|
+
writeFileSync(join(dir, 'src/routes/+page.svelte'), `<h1>${name}</h1>
|
|
984
|
+
<p>Your hatk server is running.</p>
|
|
985
|
+
`);
|
|
986
|
+
writeFileSync(join(dir, 'src/error.html'), `<!doctype html>
|
|
987
|
+
<html lang="en">
|
|
988
|
+
<head>
|
|
989
|
+
<meta charset="utf-8" />
|
|
990
|
+
<meta name="viewport" content="width=device-width, initial-scale=1" />
|
|
991
|
+
<title>%sveltekit.error.message% — ${name}</title>
|
|
992
|
+
<style>
|
|
993
|
+
* { box-sizing: border-box; margin: 0; padding: 0; }
|
|
994
|
+
body {
|
|
995
|
+
font-family: -apple-system, system-ui, sans-serif;
|
|
996
|
+
background: #080b12; color: #e2e8f0;
|
|
997
|
+
min-height: 100vh; display: flex; align-items: center; justify-content: center;
|
|
998
|
+
}
|
|
999
|
+
.error-page { display: flex; flex-direction: column; align-items: center; text-align: center; gap: 8px; padding: 24px; }
|
|
1000
|
+
.error-code { font-size: 72px; font-weight: 800; color: #14b8a6; line-height: 1; }
|
|
1001
|
+
.error-title { font-size: 24px; font-weight: 800; }
|
|
1002
|
+
.error-link {
|
|
1003
|
+
margin-top: 16px; padding: 10px 24px; background: #14b8a6; color: #000;
|
|
1004
|
+
border-radius: 20px; font-weight: 600; font-size: 14px; text-decoration: none;
|
|
1005
|
+
}
|
|
1006
|
+
</style>
|
|
1007
|
+
</head>
|
|
1008
|
+
<body>
|
|
1009
|
+
<div class="error-page">
|
|
1010
|
+
<span class="error-code">%sveltekit.status%</span>
|
|
1011
|
+
<h1 class="error-title">%sveltekit.error.message%</h1>
|
|
1012
|
+
<a href="/" class="error-link">Back to home</a>
|
|
1013
|
+
</div>
|
|
1014
|
+
</body>
|
|
1015
|
+
</html>
|
|
1016
|
+
`);
|
|
1017
|
+
writeFileSync(join(dir, 'src/routes/+error.svelte'), `<script lang="ts">
|
|
1018
|
+
import { page } from '$app/state'
|
|
1019
|
+
</script>
|
|
1020
|
+
|
|
1021
|
+
<div style="display: flex; flex-direction: column; align-items: center; justify-content: center; min-height: 80vh; gap: 8px;">
|
|
1022
|
+
<span style="font-size: 72px; font-weight: 800; color: var(--teal);">{page.status}</span>
|
|
1023
|
+
<h1 style="font-size: 24px; font-weight: 800;">{page.error?.message}</h1>
|
|
1024
|
+
<a href="/" style="margin-top: 16px; padding: 10px 24px; background: var(--teal); color: #000; border-radius: 20px; font-weight: 600; font-size: 14px;">Back to home</a>
|
|
1025
|
+
</div>
|
|
1026
|
+
`);
|
|
1027
|
+
}
|
|
1028
|
+
console.log(`Created ${name}/`);
|
|
1029
|
+
console.log(` config.yaml`);
|
|
1030
|
+
console.log(` lexicons/ — lexicon JSON files (core + your own)`);
|
|
1031
|
+
console.log(` feeds/ — feed generators`);
|
|
1032
|
+
console.log(` xrpc/ — XRPC method handlers`);
|
|
1033
|
+
console.log(` og/ — OpenGraph image routes`);
|
|
1034
|
+
console.log(` labels/ — label definitions + rules`);
|
|
1035
|
+
console.log(` jobs/ — periodic tasks`);
|
|
1036
|
+
console.log(` seeds/ — seed fixture data (hatk seed)`);
|
|
1037
|
+
console.log(` setup/ — boot-time setup scripts (run before server starts)`);
|
|
1038
|
+
console.log(` test/ — test files (hatk test)`);
|
|
1039
|
+
console.log(` public/ — static files`);
|
|
1040
|
+
console.log(` docker-compose.yml — local PDS for development`);
|
|
1041
|
+
console.log(` Dockerfile — production container`);
|
|
1042
|
+
if (withSvelte) {
|
|
1043
|
+
console.log(` src/ — SvelteKit frontend`);
|
|
1044
|
+
console.log(` svelte.config.js`);
|
|
1045
|
+
console.log(` vite.config.ts`);
|
|
1046
|
+
}
|
|
1047
|
+
// Generate types so the project is ready to go
|
|
1048
|
+
execSync('npx hatk generate types', { stdio: 'inherit', cwd: dir });
|
|
1049
|
+
if (withSvelte) {
|
|
1050
|
+
execSync('npx svelte-kit sync', { stdio: 'inherit', cwd: dir });
|
|
1051
|
+
}
|
|
1052
|
+
}
|
|
1053
|
+
else if (command === 'generate') {
|
|
1054
|
+
const type = args[1];
|
|
1055
|
+
if (type === 'types') {
|
|
1056
|
+
const lexiconsDir = './lexicons';
|
|
1057
|
+
const outPath = './hatk.generated.ts';
|
|
1058
|
+
if (!existsSync(lexiconsDir)) {
|
|
1059
|
+
console.error(`Lexicons directory not found: ${lexiconsDir}`);
|
|
1060
|
+
process.exit(1);
|
|
1061
|
+
}
|
|
1062
|
+
const lexicons = loadLexicons(resolve(lexiconsDir));
|
|
1063
|
+
// Classify all lexicons — include defs-only lexicons for registry
|
|
1064
|
+
const entries = [];
|
|
1065
|
+
for (const [nsid, lex] of lexicons) {
|
|
1066
|
+
const defType = lex.defs?.main?.type;
|
|
1067
|
+
if (defType === 'record' || defType === 'query' || defType === 'procedure') {
|
|
1068
|
+
entries.push({ nsid, defType });
|
|
1069
|
+
}
|
|
1070
|
+
else if (lex.defs && Object.keys(lex.defs).length > 0) {
|
|
1071
|
+
// Defs-only lexicon (shared types, no main record/query/procedure)
|
|
1072
|
+
entries.push({ nsid, defType: null });
|
|
1073
|
+
}
|
|
1074
|
+
}
|
|
1075
|
+
entries.sort((a, b) => a.nsid.localeCompare(b.nsid));
|
|
1076
|
+
if (entries.length === 0) {
|
|
1077
|
+
console.error('No lexicons found');
|
|
1078
|
+
process.exit(1);
|
|
1079
|
+
}
|
|
1080
|
+
// Build unique variable names from NSIDs
|
|
1081
|
+
// First pass: detect which leaf names collide
|
|
1082
|
+
const leafCount = new Map();
|
|
1083
|
+
for (const { nsid } of entries) {
|
|
1084
|
+
const leaf = nsid.split('.').pop();
|
|
1085
|
+
leafCount.set(leaf, (leafCount.get(leaf) || 0) + 1);
|
|
1086
|
+
}
|
|
1087
|
+
const varNames = new Map();
|
|
1088
|
+
const usedNames = new Set();
|
|
1089
|
+
for (const { nsid } of entries) {
|
|
1090
|
+
const parts = nsid.split('.');
|
|
1091
|
+
const leaf = parts[parts.length - 1];
|
|
1092
|
+
let name;
|
|
1093
|
+
if (leafCount.get(leaf) > 1) {
|
|
1094
|
+
// Collision: use authority + path segments (skip TLD)
|
|
1095
|
+
// e.g. app.bsky.actor.profile → bskyActorProfile
|
|
1096
|
+
name = parts
|
|
1097
|
+
.slice(1)
|
|
1098
|
+
.join('.')
|
|
1099
|
+
.split('.')
|
|
1100
|
+
.map((s, i) => (i === 0 ? s : s.charAt(0).toUpperCase() + s.slice(1)))
|
|
1101
|
+
.join('');
|
|
1102
|
+
}
|
|
1103
|
+
else {
|
|
1104
|
+
name = leaf;
|
|
1105
|
+
}
|
|
1106
|
+
// Final dedup fallback
|
|
1107
|
+
if (usedNames.has(name))
|
|
1108
|
+
name = name + '2';
|
|
1109
|
+
usedNames.add(name);
|
|
1110
|
+
varNames.set(nsid, name);
|
|
1111
|
+
}
|
|
1112
|
+
const capitalize = (s) => s.charAt(0).toUpperCase() + s.slice(1);
|
|
1113
|
+
const wrapperMap = {
|
|
1114
|
+
record: 'LexRecord',
|
|
1115
|
+
query: 'LexQuery',
|
|
1116
|
+
procedure: 'LexProcedure',
|
|
1117
|
+
};
|
|
1118
|
+
// Collect which wrappers are used (only from entries with a main type)
|
|
1119
|
+
const usedWrappers = new Set(entries.filter((e) => e.defType).map((e) => wrapperMap[e.defType]));
|
|
1120
|
+
let out = '// Auto-generated from lexicons. Do not edit.\n';
|
|
1121
|
+
out += `import type { ${[...usedWrappers].sort().join(', ')}, LexServerParams, Checked, Prettify, StrictArg } from 'hatk/lex-types'\n`;
|
|
1122
|
+
out += `import type { XrpcContext } from 'hatk/xrpc'\n`;
|
|
1123
|
+
out += `import { defineFeed as _defineFeed, type FeedResult, type FeedContext, type HydrateContext } from 'hatk/feeds'\n`;
|
|
1124
|
+
out += `import { seed as _seed, type SeedOpts } from 'hatk/seed'\n`;
|
|
1125
|
+
// Emit ALL lexicons as `const ... = {...} as const` (including defs-only)
|
|
1126
|
+
out += `\n// ─── Lexicon Definitions ────────────────────────────────────────────\n\n`;
|
|
1127
|
+
for (const { nsid } of entries) {
|
|
1128
|
+
const varName = varNames.get(nsid);
|
|
1129
|
+
const content = lexicons.get(nsid);
|
|
1130
|
+
out += `const ${varName}Lex = ${JSON.stringify(content)} as const\n`;
|
|
1131
|
+
}
|
|
1132
|
+
// Registry includes ALL lexicons so cross-lexicon refs resolve
|
|
1133
|
+
out += `\n// ─── Type Registry ──────────────────────────────────────────────────\n\n`;
|
|
1134
|
+
out += `type Registry = {\n`;
|
|
1135
|
+
for (const { nsid } of entries) {
|
|
1136
|
+
const varName = varNames.get(nsid);
|
|
1137
|
+
out += ` '${nsid}': typeof ${varName}Lex\n`;
|
|
1138
|
+
}
|
|
1139
|
+
out += `}\n\n`;
|
|
1140
|
+
// Emit main type only for lexicons with a typed main def
|
|
1141
|
+
out += `// ─── Record & Method Types ──────────────────────────────────────────\n\n`;
|
|
1142
|
+
for (const { nsid, defType } of entries) {
|
|
1143
|
+
if (!defType)
|
|
1144
|
+
continue;
|
|
1145
|
+
// createRecord/deleteRecord/putRecord get typed overrides after RecordRegistry
|
|
1146
|
+
if (nsid === 'dev.hatk.createRecord' || nsid === 'dev.hatk.deleteRecord' || nsid === 'dev.hatk.putRecord')
|
|
1147
|
+
continue;
|
|
1148
|
+
const varName = varNames.get(nsid);
|
|
1149
|
+
const typeName = capitalize(varName);
|
|
1150
|
+
const wrapper = wrapperMap[defType];
|
|
1151
|
+
out += `export type ${typeName} = Prettify<${wrapper}<typeof ${varName}Lex, Registry>>\n`;
|
|
1152
|
+
}
|
|
1153
|
+
// Emit RecordRegistry for typesafe search/resolve
|
|
1154
|
+
const recordEntries = entries.filter((e) => e.defType === 'record');
|
|
1155
|
+
if (recordEntries.length > 0) {
|
|
1156
|
+
out += `\nexport type RecordRegistry = {\n`;
|
|
1157
|
+
for (const { nsid } of recordEntries) {
|
|
1158
|
+
const varName = varNames.get(nsid);
|
|
1159
|
+
out += ` '${nsid}': ${capitalize(varName)}\n`;
|
|
1160
|
+
}
|
|
1161
|
+
out += `}\n\n`;
|
|
1162
|
+
// Emit typed CreateRecord/DeleteRecord using RecordRegistry
|
|
1163
|
+
out += `export type CreateRecord = {\n`;
|
|
1164
|
+
out += ` params: {}\n`;
|
|
1165
|
+
out += ` input: { [K in keyof RecordRegistry]: { collection: K; record: RecordRegistry[K]; repo?: string } }[keyof RecordRegistry]\n`;
|
|
1166
|
+
out += ` output: { uri?: string; cid?: string }\n`;
|
|
1167
|
+
out += `}\n\n`;
|
|
1168
|
+
out += `export type DeleteRecord = {\n`;
|
|
1169
|
+
out += ` params: {}\n`;
|
|
1170
|
+
out += ` input: { [K in keyof RecordRegistry]: { collection: K; rkey: string } }[keyof RecordRegistry]\n`;
|
|
1171
|
+
out += ` output: {}\n`;
|
|
1172
|
+
out += `}\n\n`;
|
|
1173
|
+
out += `export type PutRecord = {\n`;
|
|
1174
|
+
out += ` params: {}\n`;
|
|
1175
|
+
out += ` input: { [K in keyof RecordRegistry]: { collection: K; rkey: string; record: RecordRegistry[K]; repo?: string } }[keyof RecordRegistry]\n`;
|
|
1176
|
+
out += ` output: { uri?: string; cid?: string }\n`;
|
|
1177
|
+
out += `}\n\n`;
|
|
1178
|
+
}
|
|
1179
|
+
else {
|
|
1180
|
+
// No record lexicons — emit empty registries and basic CRUD types
|
|
1181
|
+
out += `\nexport type RecordRegistry = {}\n\n`;
|
|
1182
|
+
out += `export type CreateRecord = LexProcedure<typeof createRecordLex, Registry>\n`;
|
|
1183
|
+
out += `export type DeleteRecord = LexProcedure<typeof deleteRecordLex, Registry>\n`;
|
|
1184
|
+
out += `export type PutRecord = LexProcedure<typeof putRecordLex, Registry>\n\n`;
|
|
1185
|
+
}
|
|
1186
|
+
// Emit named defs (non-main, non-record types like views, objects)
|
|
1187
|
+
// Use def name as-is; only prefix with lexicon name on collision
|
|
1188
|
+
out += `// ─── Named Defs (Views, Objects) ────────────────────────────────────\n\n`;
|
|
1189
|
+
let hasLexDef = false;
|
|
1190
|
+
// First pass: collect all def names to detect collisions
|
|
1191
|
+
const defOccurrences = new Map();
|
|
1192
|
+
for (const { nsid } of entries) {
|
|
1193
|
+
const lex = lexicons.get(nsid);
|
|
1194
|
+
for (const defName of Object.keys(lex.defs || {})) {
|
|
1195
|
+
if (defName === 'main')
|
|
1196
|
+
continue;
|
|
1197
|
+
const def = lex.defs[defName];
|
|
1198
|
+
if (def.type === 'object' && def.properties) {
|
|
1199
|
+
const name = capitalize(defName);
|
|
1200
|
+
defOccurrences.set(name, (defOccurrences.get(name) || 0) + 1);
|
|
1201
|
+
}
|
|
1202
|
+
}
|
|
1203
|
+
}
|
|
1204
|
+
// Second pass: emit, prefixing only when name collides
|
|
1205
|
+
// Seed with main type names to avoid collision with those
|
|
1206
|
+
const emittedDefNames = new Set(entries.filter((e) => e.defType).map((e) => capitalize(varNames.get(e.nsid))));
|
|
1207
|
+
// Track view defs for views identity helpers
|
|
1208
|
+
const viewEntries = [];
|
|
1209
|
+
for (const { nsid } of entries) {
|
|
1210
|
+
const varName = varNames.get(nsid);
|
|
1211
|
+
const typeName = capitalize(varName);
|
|
1212
|
+
const lex = lexicons.get(nsid);
|
|
1213
|
+
for (const defName of Object.keys(lex.defs || {})) {
|
|
1214
|
+
if (defName === 'main')
|
|
1215
|
+
continue;
|
|
1216
|
+
const def = lex.defs[defName];
|
|
1217
|
+
if (def.type === 'object' && def.properties) {
|
|
1218
|
+
if (!hasLexDef)
|
|
1219
|
+
hasLexDef = true;
|
|
1220
|
+
let name = capitalize(defName);
|
|
1221
|
+
// Also check collision with main type names
|
|
1222
|
+
const needsPrefix = (defOccurrences.get(name) || 0) > 1 || emittedDefNames.has(name);
|
|
1223
|
+
if (needsPrefix)
|
|
1224
|
+
name = typeName + name;
|
|
1225
|
+
// Final dedup fallback
|
|
1226
|
+
if (emittedDefNames.has(name))
|
|
1227
|
+
name = name + '2';
|
|
1228
|
+
emittedDefNames.add(name);
|
|
1229
|
+
out += `export type ${name} = Prettify<LexDef<typeof ${varName}Lex, '${defName}', Registry>>\n`;
|
|
1230
|
+
// Detect view defs for views identity helpers
|
|
1231
|
+
if (/View(Basic|Detailed)?$/.test(defName)) {
|
|
1232
|
+
const fullNsid = `${nsid}#${defName}`;
|
|
1233
|
+
// Pattern 1: inline view — has ref: "#main", collection is this lexicon
|
|
1234
|
+
const hasMainRef = Object.values(def.properties).some((p) => p.type === 'ref' && p.ref === '#main');
|
|
1235
|
+
if (hasMainRef) {
|
|
1236
|
+
viewEntries.push({ fullNsid, typeName: name, collection: nsid });
|
|
1237
|
+
}
|
|
1238
|
+
else {
|
|
1239
|
+
// Pattern 2: defs view — derive collection from naming convention
|
|
1240
|
+
const recordName = defName.match(/^(.+?)View(Basic|Detailed)?$/)?.[1];
|
|
1241
|
+
let found = false;
|
|
1242
|
+
if (recordName) {
|
|
1243
|
+
const namespace = nsid.split('.').slice(0, -1).join('.');
|
|
1244
|
+
const collectionNsid = `${namespace}.${recordName}`;
|
|
1245
|
+
const collectionLex = lexicons.get(collectionNsid);
|
|
1246
|
+
if (collectionLex?.defs?.main?.type === 'record') {
|
|
1247
|
+
viewEntries.push({ fullNsid, typeName: name, collection: collectionNsid });
|
|
1248
|
+
found = true;
|
|
1249
|
+
}
|
|
1250
|
+
}
|
|
1251
|
+
// Pattern 3: cross-namespace view — has explicit ref to a record-type lexicon
|
|
1252
|
+
if (!found) {
|
|
1253
|
+
const recordRef = Object.values(def.properties).find((p) => p.type === 'ref' && !p.ref.startsWith('#') && lexicons.get(p.ref)?.defs?.main?.type === 'record');
|
|
1254
|
+
if (recordRef) {
|
|
1255
|
+
viewEntries.push({ fullNsid, typeName: name, collection: recordRef.ref });
|
|
1256
|
+
found = true;
|
|
1257
|
+
}
|
|
1258
|
+
}
|
|
1259
|
+
// Pattern 4: standalone view — not tied to a record, but still a reusable view type
|
|
1260
|
+
if (!found) {
|
|
1261
|
+
viewEntries.push({ fullNsid, typeName: name, collection: null });
|
|
1262
|
+
}
|
|
1263
|
+
}
|
|
1264
|
+
}
|
|
1265
|
+
}
|
|
1266
|
+
}
|
|
1267
|
+
}
|
|
1268
|
+
// Emit XrpcSchema for typed XRPC clients — keyed by full NSID
|
|
1269
|
+
const methods = entries.filter((e) => e.defType === 'query' || e.defType === 'procedure');
|
|
1270
|
+
out += `\n// ─── XRPC Schema ────────────────────────────────────────────────────\n\n`;
|
|
1271
|
+
out += `export type XrpcSchema = {\n`;
|
|
1272
|
+
for (const { nsid } of methods) {
|
|
1273
|
+
const varName = varNames.get(nsid);
|
|
1274
|
+
out += ` '${nsid}': ${capitalize(varName)}\n`;
|
|
1275
|
+
}
|
|
1276
|
+
out += `}\n`;
|
|
1277
|
+
// Emit Ctx helper for typesafe XRPC handler contexts
|
|
1278
|
+
out += `\n// ─── XRPC Helpers ───────────────────────────────────────────────────\n\n`;
|
|
1279
|
+
out += `export type { HydrateContext } from 'hatk/feeds'\n`;
|
|
1280
|
+
out += `export { InvalidRequestError, NotFoundError } from 'hatk/xrpc'\n`;
|
|
1281
|
+
out += `export type Ctx<K extends keyof XrpcSchema & keyof Registry> = XrpcContext<\n`;
|
|
1282
|
+
out += ` LexServerParams<Registry[K], Registry>,\n`;
|
|
1283
|
+
out += ` RecordRegistry,\n`;
|
|
1284
|
+
out += ` K extends keyof XrpcSchema ? InputOf<K> : unknown\n`;
|
|
1285
|
+
out += `>\n`;
|
|
1286
|
+
// Emit typed handler helpers with ctx.ok() for strict return type enforcement
|
|
1287
|
+
out += `\ntype OutputOf<K extends keyof XrpcSchema> = XrpcSchema[K]['output']\n`;
|
|
1288
|
+
out += `type InputOf<K extends keyof XrpcSchema> = XrpcSchema[K] extends { input: infer I } ? I : unknown\n\n`;
|
|
1289
|
+
out += `export function defineQuery<K extends keyof XrpcSchema & string>(\n`;
|
|
1290
|
+
out += ` nsid: K,\n`;
|
|
1291
|
+
out += ` handler: (ctx: Ctx<K> & { ok: <T extends OutputOf<K>>(value: StrictArg<T, OutputOf<K>>) => Checked<OutputOf<K>> }) => Promise<Checked<OutputOf<K>>>,\n`;
|
|
1292
|
+
out += `) {\n`;
|
|
1293
|
+
out += ` return { handler: (ctx: any) => handler({ ...ctx, ok: (v: any) => v }) }\n`;
|
|
1294
|
+
out += `}\n\n`;
|
|
1295
|
+
out += `export function defineProcedure<K extends keyof XrpcSchema & string>(\n`;
|
|
1296
|
+
out += ` nsid: K,\n`;
|
|
1297
|
+
out += ` handler: (ctx: Ctx<K> & { ok: <T extends OutputOf<K>>(value: StrictArg<T, OutputOf<K>>) => Checked<OutputOf<K>> }) => Promise<Checked<OutputOf<K>>>,\n`;
|
|
1298
|
+
out += `) {\n`;
|
|
1299
|
+
out += ` return { handler: (ctx: any) => handler({ ...ctx, ok: (v: any) => v }) }\n`;
|
|
1300
|
+
out += `}\n\n`;
|
|
1301
|
+
out += `// ─── Feed & Seed Helpers ────────────────────────────────────────────\n\n`;
|
|
1302
|
+
out += `type FeedGenerate = (ctx: FeedContext & { ok: (value: FeedResult) => Checked<FeedResult> }) => Promise<Checked<FeedResult>>\n`;
|
|
1303
|
+
out += `export function defineFeed<K extends keyof RecordRegistry>(\n`;
|
|
1304
|
+
out += ` opts: { collection: K; view?: string; label: string; generate: FeedGenerate; hydrate?: (ctx: HydrateContext<RecordRegistry[K]>) => Promise<unknown[]> }\n`;
|
|
1305
|
+
out += `): ReturnType<typeof _defineFeed>\n`;
|
|
1306
|
+
out += `export function defineFeed(\n`;
|
|
1307
|
+
out += ` opts: { collection?: never; view?: never; label: string; generate: FeedGenerate; hydrate: (ctx: HydrateContext<any>) => Promise<unknown[]> }\n`;
|
|
1308
|
+
out += `): ReturnType<typeof _defineFeed>\n`;
|
|
1309
|
+
out += `export function defineFeed(opts: any) { return _defineFeed(opts) }\n`;
|
|
1310
|
+
out += `export function seed(opts?: SeedOpts) { return _seed<RecordRegistry>(opts) }\n`;
|
|
1311
|
+
// Emit view identity helpers for strict excess property checking on nested objects
|
|
1312
|
+
if (viewEntries.length > 0) {
|
|
1313
|
+
out += `\n// View identity helpers — wrap object literals to enable excess property checking.\n`;
|
|
1314
|
+
out += `// Usage: rows.map(r => views.statusView({ ...fields })) catches extra properties.\n`;
|
|
1315
|
+
out += `export const views = {\n`;
|
|
1316
|
+
for (const { typeName } of viewEntries) {
|
|
1317
|
+
// Use the deduped type name (lowercased) as key to avoid collisions
|
|
1318
|
+
// e.g., PlayView -> playView, BskyFeedDefsPlayView -> bskyFeedDefsPlayView
|
|
1319
|
+
const key = typeName[0].toLowerCase() + typeName.slice(1);
|
|
1320
|
+
out += ` ${key}: (v: ${typeName}): ${typeName} => v,\n`;
|
|
1321
|
+
}
|
|
1322
|
+
out += `} as const\n`;
|
|
1323
|
+
}
|
|
1324
|
+
// Patch imports to include LexDef if needed
|
|
1325
|
+
if (hasLexDef) {
|
|
1326
|
+
usedWrappers.add('LexDef');
|
|
1327
|
+
out = out.replace(/import type \{ ([^}]+) \} from 'hatk\/lex-types'/, `import type { ${[...usedWrappers].sort().join(', ')}, LexServerParams, Checked, Prettify, StrictArg } from 'hatk/lex-types'`);
|
|
1328
|
+
}
|
|
1329
|
+
writeFileSync(outPath, out);
|
|
1330
|
+
console.log(`Generated ${outPath} with ${entries.length} types: ${entries.map((e) => capitalize(varNames.get(e.nsid))).join(', ')}`);
|
|
1331
|
+
}
|
|
1332
|
+
else if (lexiconTemplates[type]) {
|
|
1333
|
+
const nsid = args[2];
|
|
1334
|
+
if (!nsid || !nsid.includes('.')) {
|
|
1335
|
+
console.error(`Usage: hatk generate ${type} <nsid> (e.g. com.example.myRecord)`);
|
|
1336
|
+
process.exit(1);
|
|
1337
|
+
}
|
|
1338
|
+
const parts = nsid.split('.');
|
|
1339
|
+
const lexDir = join('lexicons', ...parts.slice(0, -1));
|
|
1340
|
+
mkdirSync(lexDir, { recursive: true });
|
|
1341
|
+
const filePath = join(lexDir, `${parts[parts.length - 1]}.json`);
|
|
1342
|
+
if (existsSync(filePath)) {
|
|
1343
|
+
console.error(`${filePath} already exists`);
|
|
1344
|
+
process.exit(1);
|
|
1345
|
+
}
|
|
1346
|
+
writeFileSync(filePath, JSON.stringify(lexiconTemplates[type](nsid), null, 2) + '\n');
|
|
1347
|
+
console.log(`Created ${filePath}`);
|
|
1348
|
+
// Auto-regenerate types
|
|
1349
|
+
execSync('npx hatk generate types', { stdio: 'inherit', cwd: process.cwd() });
|
|
1350
|
+
}
|
|
1351
|
+
else {
|
|
1352
|
+
const name = args[2];
|
|
1353
|
+
if (!type || !name || !templates[type]) {
|
|
1354
|
+
console.error(`Usage: hatk generate <${[...Object.keys(templates), ...Object.keys(lexiconTemplates)].join('|')}|types> <name>`);
|
|
1355
|
+
process.exit(1);
|
|
1356
|
+
}
|
|
1357
|
+
const baseDir = dirs[type];
|
|
1358
|
+
let filePath;
|
|
1359
|
+
if (type === 'xrpc') {
|
|
1360
|
+
// NSID → folder path: fm.teal.getStats → xrpc/fm/teal/getStats.ts
|
|
1361
|
+
const parts = name.split('.');
|
|
1362
|
+
const subDir = join(baseDir, ...parts.slice(0, -1));
|
|
1363
|
+
mkdirSync(subDir, { recursive: true });
|
|
1364
|
+
filePath = join(subDir, `${parts[parts.length - 1]}.ts`);
|
|
1365
|
+
}
|
|
1366
|
+
else {
|
|
1367
|
+
mkdirSync(baseDir, { recursive: true });
|
|
1368
|
+
filePath = join(baseDir, `${name}.ts`);
|
|
1369
|
+
}
|
|
1370
|
+
if (existsSync(filePath)) {
|
|
1371
|
+
console.error(`${filePath} already exists`);
|
|
1372
|
+
process.exit(1);
|
|
1373
|
+
}
|
|
1374
|
+
writeFileSync(filePath, templates[type](name));
|
|
1375
|
+
console.log(`Created ${filePath}`);
|
|
1376
|
+
// Scaffold test file if template exists
|
|
1377
|
+
const testTemplate = testTemplates[type];
|
|
1378
|
+
if (testTemplate) {
|
|
1379
|
+
const testDir = type === 'xrpc' ? 'test/xrpc' : `test/${baseDir}`;
|
|
1380
|
+
mkdirSync(testDir, { recursive: true });
|
|
1381
|
+
const testName = type === 'xrpc' ? name.split('.').pop() : name;
|
|
1382
|
+
const testPath = join(testDir, `${testName}.test.ts`);
|
|
1383
|
+
if (!existsSync(testPath)) {
|
|
1384
|
+
writeFileSync(testPath, testTemplate(name));
|
|
1385
|
+
console.log(`Created ${testPath}`);
|
|
1386
|
+
}
|
|
1387
|
+
}
|
|
1388
|
+
}
|
|
1389
|
+
}
|
|
1390
|
+
else if (command === 'destroy') {
|
|
1391
|
+
const type = args[1];
|
|
1392
|
+
const name = args[2];
|
|
1393
|
+
if (!type || !name || !dirs[type]) {
|
|
1394
|
+
console.error(`Usage: hatk destroy <${Object.keys(dirs).join('|')}> <name>`);
|
|
1395
|
+
process.exit(1);
|
|
1396
|
+
}
|
|
1397
|
+
const baseDir = dirs[type];
|
|
1398
|
+
let tsPath, jsPath;
|
|
1399
|
+
if (type === 'xrpc') {
|
|
1400
|
+
const parts = name.split('.');
|
|
1401
|
+
const leaf = parts[parts.length - 1];
|
|
1402
|
+
const subDir = join(baseDir, ...parts.slice(0, -1));
|
|
1403
|
+
tsPath = join(subDir, `${leaf}.ts`);
|
|
1404
|
+
jsPath = join(subDir, `${leaf}.js`);
|
|
1405
|
+
}
|
|
1406
|
+
else {
|
|
1407
|
+
tsPath = join(baseDir, `${name}.ts`);
|
|
1408
|
+
jsPath = join(baseDir, `${name}.js`);
|
|
1409
|
+
}
|
|
1410
|
+
const filePath = existsSync(tsPath) ? tsPath : existsSync(jsPath) ? jsPath : null;
|
|
1411
|
+
if (!filePath) {
|
|
1412
|
+
console.error(`No file found for ${type} "${name}"`);
|
|
1413
|
+
process.exit(1);
|
|
1414
|
+
}
|
|
1415
|
+
unlinkSync(filePath);
|
|
1416
|
+
console.log(`Removed ${filePath}`);
|
|
1417
|
+
// Clean up test file
|
|
1418
|
+
const testDir = type === 'xrpc' ? 'test/xrpc' : `test/${baseDir}`;
|
|
1419
|
+
const testName = type === 'xrpc' ? name.split('.').pop() : name;
|
|
1420
|
+
const testFile = join(testDir, `${testName}.test.ts`);
|
|
1421
|
+
if (existsSync(testFile)) {
|
|
1422
|
+
unlinkSync(testFile);
|
|
1423
|
+
console.log(`Removed ${testFile}`);
|
|
1424
|
+
}
|
|
1425
|
+
if (type === 'label') {
|
|
1426
|
+
console.log(`Note: existing applied labels for "${name}" remain in the database.`);
|
|
1427
|
+
}
|
|
1428
|
+
}
|
|
1429
|
+
else if (command === 'dev') {
|
|
1430
|
+
await ensurePds();
|
|
1431
|
+
runSeed();
|
|
1432
|
+
try {
|
|
1433
|
+
if (existsSync(resolve('svelte.config.js')) && existsSync(resolve('src/app.html'))) {
|
|
1434
|
+
// SvelteKit project — vite dev starts the hatk server via the plugin
|
|
1435
|
+
execSync('npx vite dev', { stdio: 'inherit', cwd: process.cwd() });
|
|
1436
|
+
}
|
|
1437
|
+
else {
|
|
1438
|
+
// No frontend — just run the hatk server directly
|
|
1439
|
+
const mainPath = resolve(import.meta.dirname, 'main.ts');
|
|
1440
|
+
execSync(`npx tsx ${mainPath} config.yaml`, { stdio: 'inherit', cwd: process.cwd() });
|
|
1441
|
+
}
|
|
1442
|
+
}
|
|
1443
|
+
catch (e) {
|
|
1444
|
+
if (e.signal === 'SIGINT' || e.signal === 'SIGTERM')
|
|
1445
|
+
process.exit(0);
|
|
1446
|
+
throw e;
|
|
1447
|
+
}
|
|
1448
|
+
}
|
|
1449
|
+
else if (command === 'format' || command === 'fmt') {
|
|
1450
|
+
try {
|
|
1451
|
+
execSync('npx oxfmt .', { stdio: 'inherit', cwd: process.cwd() });
|
|
1452
|
+
}
|
|
1453
|
+
catch {
|
|
1454
|
+
console.log('[format] oxfmt not found — install it with: npm install -D oxfmt');
|
|
1455
|
+
}
|
|
1456
|
+
}
|
|
1457
|
+
else if (command === 'build') {
|
|
1458
|
+
if (existsSync(resolve('svelte.config.js')) && existsSync(resolve('src/app.html'))) {
|
|
1459
|
+
execSync('npx vite build', { stdio: 'inherit', cwd: process.cwd() });
|
|
1460
|
+
}
|
|
1461
|
+
else {
|
|
1462
|
+
console.log('[build] No frontend to build (API-only hatk)');
|
|
1463
|
+
}
|
|
1464
|
+
}
|
|
1465
|
+
else if (command === 'reset') {
|
|
1466
|
+
const config = loadConfig(resolve('config.yaml'));
|
|
1467
|
+
if (config.database !== ':memory:') {
|
|
1468
|
+
for (const suffix of ['', '.wal']) {
|
|
1469
|
+
const file = config.database + suffix;
|
|
1470
|
+
if (existsSync(file)) {
|
|
1471
|
+
unlinkSync(file);
|
|
1472
|
+
console.log(`[reset] deleted ${file}`);
|
|
1473
|
+
}
|
|
1474
|
+
}
|
|
1475
|
+
}
|
|
1476
|
+
if (existsSync(resolve('docker-compose.yml'))) {
|
|
1477
|
+
console.log('[reset] resetting PDS...');
|
|
1478
|
+
execSync('docker compose down -v', { stdio: 'inherit', cwd: process.cwd() });
|
|
1479
|
+
}
|
|
1480
|
+
console.log('[reset] done');
|
|
1481
|
+
}
|
|
1482
|
+
else if (command === 'check') {
|
|
1483
|
+
let failed = false;
|
|
1484
|
+
// Lexicon schema validation
|
|
1485
|
+
if (existsSync(resolve('lexicons'))) {
|
|
1486
|
+
console.log('[check] lexicons...');
|
|
1487
|
+
const { validateLexicons } = await import('@bigmoves/lexicon');
|
|
1488
|
+
const lexicons = loadLexicons(resolve('lexicons'));
|
|
1489
|
+
const errors = validateLexicons([...lexicons.values()]);
|
|
1490
|
+
if (errors) {
|
|
1491
|
+
for (const [nsid, errs] of Object.entries(errors)) {
|
|
1492
|
+
for (const err of errs) {
|
|
1493
|
+
console.error(` ${nsid}: ${err}`);
|
|
1494
|
+
}
|
|
1495
|
+
}
|
|
1496
|
+
failed = true;
|
|
1497
|
+
}
|
|
1498
|
+
}
|
|
1499
|
+
// Server code type checking (if tsconfig.server.json exists)
|
|
1500
|
+
if (existsSync(resolve('tsconfig.server.json'))) {
|
|
1501
|
+
console.log('[check] tsc (server)...');
|
|
1502
|
+
try {
|
|
1503
|
+
execSync('npx tsc --noEmit -p tsconfig.server.json', { stdio: 'inherit', cwd: process.cwd() });
|
|
1504
|
+
}
|
|
1505
|
+
catch {
|
|
1506
|
+
failed = true;
|
|
1507
|
+
}
|
|
1508
|
+
}
|
|
1509
|
+
// Svelte type checking (if SvelteKit project)
|
|
1510
|
+
if (existsSync(resolve('svelte.config.js')) && existsSync(resolve('src/app.html'))) {
|
|
1511
|
+
console.log('[check] svelte-check...');
|
|
1512
|
+
try {
|
|
1513
|
+
execSync('npx svelte-kit sync && npx svelte-check --tsconfig ./tsconfig.json', {
|
|
1514
|
+
stdio: 'inherit',
|
|
1515
|
+
cwd: process.cwd(),
|
|
1516
|
+
});
|
|
1517
|
+
}
|
|
1518
|
+
catch {
|
|
1519
|
+
failed = true;
|
|
1520
|
+
}
|
|
1521
|
+
}
|
|
1522
|
+
// Lint
|
|
1523
|
+
console.log('[check] oxlint...');
|
|
1524
|
+
try {
|
|
1525
|
+
execSync('npx oxlint .', { stdio: 'inherit', cwd: process.cwd() });
|
|
1526
|
+
}
|
|
1527
|
+
catch {
|
|
1528
|
+
failed = true;
|
|
1529
|
+
}
|
|
1530
|
+
if (failed)
|
|
1531
|
+
process.exit(1);
|
|
1532
|
+
}
|
|
1533
|
+
else if (command === 'test') {
|
|
1534
|
+
const knownFlags = new Set(['--unit', '--integration', '--browser', '--verbose']);
|
|
1535
|
+
const parsedFlags = args.slice(1).filter((a) => knownFlags.has(a));
|
|
1536
|
+
const extraArgs = args
|
|
1537
|
+
.slice(1)
|
|
1538
|
+
.filter((a) => !knownFlags.has(a))
|
|
1539
|
+
.join(' ');
|
|
1540
|
+
const flag = parsedFlags.find((f) => f !== '--verbose') || null;
|
|
1541
|
+
const verbose = parsedFlags.includes('--verbose');
|
|
1542
|
+
if (!verbose && !process.env.DEBUG)
|
|
1543
|
+
process.env.DEBUG = '0';
|
|
1544
|
+
const runUnit = !flag || flag === '--unit';
|
|
1545
|
+
const runIntegration = !flag || flag === '--integration';
|
|
1546
|
+
const runBrowser = !flag || flag === '--browser';
|
|
1547
|
+
// Integration and browser tests need PDS
|
|
1548
|
+
if (runIntegration || runBrowser) {
|
|
1549
|
+
await ensurePds();
|
|
1550
|
+
}
|
|
1551
|
+
if (!existsSync(resolve(process.cwd(), 'vite.config.ts'))) {
|
|
1552
|
+
console.error('No vite.config.ts found. Add one with the hatk() plugin to configure tests.');
|
|
1553
|
+
process.exit(1);
|
|
1554
|
+
}
|
|
1555
|
+
if (runUnit) {
|
|
1556
|
+
console.log('[test] running unit tests...');
|
|
1557
|
+
try {
|
|
1558
|
+
execSync(`npx vitest run --project unit ${extraArgs}`, { stdio: 'inherit', cwd: process.cwd() });
|
|
1559
|
+
}
|
|
1560
|
+
catch (e) {
|
|
1561
|
+
if (e.status === 130)
|
|
1562
|
+
process.exit(0);
|
|
1563
|
+
process.exit(e.status || 1);
|
|
1564
|
+
}
|
|
1565
|
+
}
|
|
1566
|
+
if (runIntegration) {
|
|
1567
|
+
const intDir = resolve(process.cwd(), 'test/integration');
|
|
1568
|
+
const hasIntegrationTests = existsSync(intDir) && readdirSync(intDir).some((f) => f.endsWith('.test.ts'));
|
|
1569
|
+
if (hasIntegrationTests) {
|
|
1570
|
+
console.log('[test] running integration tests...');
|
|
1571
|
+
try {
|
|
1572
|
+
execSync(`npx vitest run --project integration ${extraArgs}`, { stdio: 'inherit', cwd: process.cwd() });
|
|
1573
|
+
}
|
|
1574
|
+
catch (e) {
|
|
1575
|
+
if (e.status === 130)
|
|
1576
|
+
process.exit(0);
|
|
1577
|
+
process.exit(e.status || 1);
|
|
1578
|
+
}
|
|
1579
|
+
}
|
|
1580
|
+
}
|
|
1581
|
+
if (runBrowser) {
|
|
1582
|
+
const browserDir = resolve(process.cwd(), 'test/browser');
|
|
1583
|
+
const hasBrowserTests = existsSync(browserDir) && readdirSync(browserDir).some((f) => f.endsWith('.test.ts') || f.endsWith('.spec.ts'));
|
|
1584
|
+
if (hasBrowserTests) {
|
|
1585
|
+
console.log('[test] running browser tests...');
|
|
1586
|
+
try {
|
|
1587
|
+
execSync(`npx playwright test ${extraArgs}`, { stdio: 'inherit', cwd: process.cwd() });
|
|
1588
|
+
}
|
|
1589
|
+
catch (e) {
|
|
1590
|
+
if (e.status === 130)
|
|
1591
|
+
process.exit(0);
|
|
1592
|
+
process.exit(e.status || 1);
|
|
1593
|
+
}
|
|
1594
|
+
}
|
|
1595
|
+
}
|
|
1596
|
+
}
|
|
1597
|
+
else if (command === 'seed') {
|
|
1598
|
+
await ensurePds();
|
|
1599
|
+
runSeed();
|
|
1600
|
+
}
|
|
1601
|
+
else if (command === 'resolve') {
|
|
1602
|
+
const nsid = args[1];
|
|
1603
|
+
if (!nsid) {
|
|
1604
|
+
console.error('Usage: hatk resolve <nsid>');
|
|
1605
|
+
process.exit(1);
|
|
1606
|
+
}
|
|
1607
|
+
const { resolveLexicon } = await import("./lexicon-resolve.js");
|
|
1608
|
+
console.log(`Resolving ${nsid} from registry...`);
|
|
1609
|
+
const resolved = await resolveLexicon(nsid);
|
|
1610
|
+
if (resolved.size === 0) {
|
|
1611
|
+
console.error(`Could not resolve ${nsid}`);
|
|
1612
|
+
process.exit(1);
|
|
1613
|
+
}
|
|
1614
|
+
for (const [id, lexicon] of resolved) {
|
|
1615
|
+
const parts = id.split('.');
|
|
1616
|
+
const lexDir = join('lexicons', ...parts.slice(0, -1));
|
|
1617
|
+
const filePath = join(lexDir, `${parts[parts.length - 1]}.json`);
|
|
1618
|
+
mkdirSync(lexDir, { recursive: true });
|
|
1619
|
+
writeFileSync(filePath, JSON.stringify(lexicon, null, 2) + '\n');
|
|
1620
|
+
console.log(` wrote ${filePath}`);
|
|
1621
|
+
}
|
|
1622
|
+
console.log(`\nResolved ${resolved.size} lexicon(s). Regenerating types...`);
|
|
1623
|
+
execSync('npx hatk generate types', { stdio: 'inherit', cwd: process.cwd() });
|
|
1624
|
+
}
|
|
1625
|
+
else if (command === 'schema') {
|
|
1626
|
+
const config = loadConfig(resolve('config.yaml'));
|
|
1627
|
+
if (config.database === ':memory:') {
|
|
1628
|
+
console.error('No database file configured (database is :memory:)');
|
|
1629
|
+
process.exit(1);
|
|
1630
|
+
}
|
|
1631
|
+
if (!existsSync(config.database)) {
|
|
1632
|
+
console.error(`Database not found: ${config.database}`);
|
|
1633
|
+
console.error('Run "hatk dev" first to create it.');
|
|
1634
|
+
process.exit(1);
|
|
1635
|
+
}
|
|
1636
|
+
const { DuckDBInstance } = await import('@duckdb/node-api');
|
|
1637
|
+
const instance = await DuckDBInstance.create(config.database);
|
|
1638
|
+
const con = await instance.connect();
|
|
1639
|
+
const tables = (await (await con.runAndReadAll(`SELECT table_name FROM information_schema.tables WHERE table_schema = 'main' ORDER BY table_name`)).getRowObjects());
|
|
1640
|
+
for (const { table_name } of tables) {
|
|
1641
|
+
console.log(`"${table_name}"`);
|
|
1642
|
+
const cols = (await (await con.runAndReadAll(`SELECT column_name, data_type, is_nullable FROM information_schema.columns WHERE table_name = '${table_name}' ORDER BY ordinal_position`)).getRowObjects());
|
|
1643
|
+
for (const col of cols) {
|
|
1644
|
+
const nullable = col.is_nullable === 'YES' ? '' : ' NOT NULL';
|
|
1645
|
+
console.log(` ${col.column_name.padEnd(20)} ${col.data_type}${nullable}`);
|
|
1646
|
+
}
|
|
1647
|
+
console.log();
|
|
1648
|
+
}
|
|
1649
|
+
}
|
|
1650
|
+
else if (command === 'start') {
|
|
1651
|
+
try {
|
|
1652
|
+
const mainPath = resolve(import.meta.dirname, 'main.ts');
|
|
1653
|
+
execSync(`npx tsx ${mainPath} config.yaml`, { stdio: 'inherit', cwd: process.cwd() });
|
|
1654
|
+
}
|
|
1655
|
+
catch (e) {
|
|
1656
|
+
if (e.signal === 'SIGINT' || e.signal === 'SIGTERM')
|
|
1657
|
+
process.exit(0);
|
|
1658
|
+
throw e;
|
|
1659
|
+
}
|
|
1660
|
+
}
|
|
1661
|
+
else {
|
|
1662
|
+
usage();
|
|
1663
|
+
}
|