postgresdk 0.1.1-alpha.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +15 -0
- package/dist/cli.d.ts +2 -0
- package/dist/cli.d.ts.map +1 -0
- package/dist/cli.js +5833 -0
- package/dist/cli.js.map +1 -0
- package/dist/emit-client.d.ts +3 -0
- package/dist/emit-client.d.ts.map +1 -0
- package/dist/emit-client.js +114 -0
- package/dist/emit-client.js.map +1 -0
- package/dist/emit-include-builder.d.ts +2 -0
- package/dist/emit-include-builder.d.ts.map +1 -0
- package/dist/emit-include-builder.js +30 -0
- package/dist/emit-include-builder.js.map +1 -0
- package/dist/emit-include-loader.d.ts +9 -0
- package/dist/emit-include-loader.d.ts.map +1 -0
- package/dist/emit-include-loader.js +299 -0
- package/dist/emit-include-loader.js.map +1 -0
- package/dist/emit-include-spec.d.ts +2 -0
- package/dist/emit-include-spec.d.ts.map +1 -0
- package/dist/emit-include-spec.js +26 -0
- package/dist/emit-include-spec.js.map +1 -0
- package/dist/emit-logger.d.ts +1 -0
- package/dist/emit-logger.d.ts.map +1 -0
- package/dist/emit-logger.js +35 -0
- package/dist/emit-logger.js.map +1 -0
- package/dist/emit-routes.d.ts +20 -0
- package/dist/emit-routes.d.ts.map +1 -0
- package/dist/emit-routes.js +208 -0
- package/dist/emit-routes.js.map +1 -0
- package/dist/emit-types.d.ts +5 -0
- package/dist/emit-types.d.ts.map +1 -0
- package/dist/emit-types.js +51 -0
- package/dist/emit-types.js.map +1 -0
- package/dist/emit-zod.d.ts +5 -0
- package/dist/emit-zod.d.ts.map +1 -0
- package/dist/emit-zod.js +43 -0
- package/dist/emit-zod.js.map +1 -0
- package/dist/gen.config.d.ts +10 -0
- package/dist/gen.config.js +10 -0
- package/dist/gen.config.js.map +1 -0
- package/dist/index.d.ts +1 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +5793 -0
- package/dist/index.js.map +1 -0
- package/dist/introspect.d.ts +26 -0
- package/dist/introspect.d.ts.map +1 -0
- package/dist/introspect.js +132 -0
- package/dist/introspect.js.map +1 -0
- package/dist/rel-classify.d.ts +10 -0
- package/dist/rel-classify.d.ts.map +1 -0
- package/dist/rel-classify.js +52 -0
- package/dist/rel-classify.js.map +1 -0
- package/dist/src/cli.d.ts +2 -0
- package/dist/src/cli.js +39 -0
- package/dist/src/cli.js.map +1 -0
- package/dist/src/emit-client.d.ts +3 -0
- package/dist/src/emit-client.js +114 -0
- package/dist/src/emit-client.js.map +1 -0
- package/dist/src/emit-include-builder.d.ts +2 -0
- package/dist/src/emit-include-builder.js +30 -0
- package/dist/src/emit-include-builder.js.map +1 -0
- package/dist/src/emit-include-loader.d.ts +9 -0
- package/dist/src/emit-include-loader.js +299 -0
- package/dist/src/emit-include-loader.js.map +1 -0
- package/dist/src/emit-include-spec.d.ts +2 -0
- package/dist/src/emit-include-spec.js +26 -0
- package/dist/src/emit-include-spec.js.map +1 -0
- package/dist/src/emit-logger.d.ts +1 -0
- package/dist/src/emit-logger.js +35 -0
- package/dist/src/emit-logger.js.map +1 -0
- package/dist/src/emit-routes.d.ts +20 -0
- package/dist/src/emit-routes.js +208 -0
- package/dist/src/emit-routes.js.map +1 -0
- package/dist/src/emit-types.d.ts +5 -0
- package/dist/src/emit-types.js +51 -0
- package/dist/src/emit-types.js.map +1 -0
- package/dist/src/emit-zod.d.ts +5 -0
- package/dist/src/emit-zod.js +43 -0
- package/dist/src/emit-zod.js.map +1 -0
- package/dist/src/index.d.ts +1 -0
- package/dist/src/index.js +83 -0
- package/dist/src/index.js.map +1 -0
- package/dist/src/introspect.d.ts +26 -0
- package/dist/src/introspect.js +132 -0
- package/dist/src/introspect.js.map +1 -0
- package/dist/src/rel-classify.d.ts +10 -0
- package/dist/src/rel-classify.js +52 -0
- package/dist/src/rel-classify.js.map +1 -0
- package/dist/src/utils.d.ts +6 -0
- package/dist/src/utils.js +17 -0
- package/dist/src/utils.js.map +1 -0
- package/dist/utils.d.ts +6 -0
- package/dist/utils.d.ts.map +1 -0
- package/dist/utils.js +17 -0
- package/dist/utils.js.map +1 -0
- package/package.json +49 -0
@@ -0,0 +1,299 @@
|
|
1
|
+
/**
|
2
|
+
* Emit a generic include loader that:
|
3
|
+
* - Walks the include spec
|
4
|
+
* - Loads children in batches per edge kind
|
5
|
+
* - Stitches onto parent rows (mutates copies)
|
6
|
+
*/
|
7
|
+
export function emitIncludeLoader(graph, model, maxDepth) {
|
8
|
+
// Precompute helpful maps for FK discovery
|
9
|
+
const fkIndex = {};
|
10
|
+
for (const t of Object.values(model.tables)) {
|
11
|
+
fkIndex[t.name] = t.fks.map((f) => ({ from: f.from, toTable: f.toTable, to: f.to }));
|
12
|
+
}
|
13
|
+
return `/* Generated. Do not edit. */
|
14
|
+
import { RELATION_GRAPH } from "./include-builder";
|
15
|
+
|
16
|
+
// Minimal types to keep the file self-contained
|
17
|
+
type Graph = typeof RELATION_GRAPH;
|
18
|
+
type TableName = keyof Graph;
|
19
|
+
type IncludeSpec = any;
|
20
|
+
|
21
|
+
// Debug helpers (enabled with SDK_DEBUG=1)
|
22
|
+
const DEBUG = process.env.SDK_DEBUG === "1" || process.env.SDK_DEBUG === "true";
|
23
|
+
const log = {
|
24
|
+
debug: (...args: any[]) => { if (DEBUG) console.debug("[sdk:include]", ...args); },
|
25
|
+
warn: (...args: any[]) => console.warn("[sdk:include]", ...args),
|
26
|
+
error: (...args: any[]) => console.error("[sdk:include]", ...args),
|
27
|
+
};
|
28
|
+
|
29
|
+
// Helpers for PK/FK discovery from model (inlined)
|
30
|
+
const FK_INDEX = ${JSON.stringify(fkIndex, null, 2)} as const;
|
31
|
+
const PKS = ${JSON.stringify(Object.fromEntries(Object.values(model.tables).map((t) => [t.name, t.pk])), null, 2)} as const;
|
32
|
+
|
33
|
+
// Build WHERE predicate for OR-of-AND on composite values
|
34
|
+
function buildOrAndPredicate(cols: string[], count: number, startIndex: number) {
|
35
|
+
// Generates: (c1=$i AND c2=$i+1) OR (c1=$j AND c2=$j+1) ...
|
36
|
+
const groups: string[] = [];
|
37
|
+
let idx = startIndex;
|
38
|
+
for (let k = 0; k < count; k++) {
|
39
|
+
const parts = cols.map((c, j) => \`"\${c}" = $\${idx + j}\`);
|
40
|
+
groups.push('(' + parts.join(' AND ') + ')');
|
41
|
+
idx += cols.length;
|
42
|
+
}
|
43
|
+
return groups.join(' OR ');
|
44
|
+
}
|
45
|
+
|
46
|
+
// Extract distinct tuples from rows
|
47
|
+
function distinctTuples(rows: any[], cols: string[]): any[] {
|
48
|
+
const s = new Set<string>();
|
49
|
+
const res: any[] = [];
|
50
|
+
for (const r of rows) {
|
51
|
+
const tup = cols.map(c => r[c]);
|
52
|
+
const key = JSON.stringify(tup);
|
53
|
+
if (!s.has(key)) {
|
54
|
+
s.add(key);
|
55
|
+
res.push(tup);
|
56
|
+
}
|
57
|
+
}
|
58
|
+
return res;
|
59
|
+
}
|
60
|
+
|
61
|
+
// Index rows by tuple key
|
62
|
+
function indexByTuple(rows: any[], cols: string[]) {
|
63
|
+
const map = new Map<string, any>();
|
64
|
+
for (const r of rows) {
|
65
|
+
const key = JSON.stringify(cols.map(c => r[c]));
|
66
|
+
map.set(key, r);
|
67
|
+
}
|
68
|
+
return map;
|
69
|
+
}
|
70
|
+
|
71
|
+
// Group rows by tuple key (1:N)
|
72
|
+
function groupByTuple(rows: any[], cols: string[]) {
|
73
|
+
const map = new Map<string, any[]>();
|
74
|
+
for (const r of rows) {
|
75
|
+
const key = JSON.stringify(cols.map(c => r[c]));
|
76
|
+
const arr = map.get(key) ?? [];
|
77
|
+
arr.push(r);
|
78
|
+
map.set(key, arr);
|
79
|
+
}
|
80
|
+
return map;
|
81
|
+
}
|
82
|
+
|
83
|
+
// Public entry
|
84
|
+
export async function loadIncludes(
|
85
|
+
root: TableName,
|
86
|
+
parents: any[],
|
87
|
+
spec: IncludeSpec | undefined,
|
88
|
+
pg: { query: (text: string, params?: any[]) => Promise<{ rows: any[] }> },
|
89
|
+
maxDepth: number = ${maxDepth}
|
90
|
+
) {
|
91
|
+
try {
|
92
|
+
if (!spec || !parents.length) return parents;
|
93
|
+
log.debug("loadIncludes root/spec/rows", root, Object.keys(spec ?? {}).length, parents.length);
|
94
|
+
|
95
|
+
// Deep clone parents to avoid mutating caller refs
|
96
|
+
const cloned = parents.map(p => ({ ...p }));
|
97
|
+
await walk(root, cloned, spec, 0);
|
98
|
+
return cloned;
|
99
|
+
} catch (e: any) {
|
100
|
+
log.error("loadIncludes error:", e?.message ?? e, e?.stack);
|
101
|
+
// Never throw to the route; return base rows
|
102
|
+
return parents;
|
103
|
+
}
|
104
|
+
|
105
|
+
async function walk(table: TableName, rows: any[], s: any, depth: number): Promise<void> {
|
106
|
+
if (!s || depth >= maxDepth || rows.length === 0) return;
|
107
|
+
const rels: any = (RELATION_GRAPH as any)[table] || {};
|
108
|
+
log.debug("walk", { table, depth, keys: Object.keys(s) });
|
109
|
+
|
110
|
+
// Process each requested relation at this level
|
111
|
+
for (const key of Object.keys(s)) {
|
112
|
+
const rel = rels[key];
|
113
|
+
if (!rel) {
|
114
|
+
log.warn(\`Unknown include key '\${key}' on '\${table}' — skipping\`);
|
115
|
+
continue;
|
116
|
+
}
|
117
|
+
const target = rel.target as TableName;
|
118
|
+
|
119
|
+
// Safely run each loader; never let one bad edge 500 the route
|
120
|
+
if (rel.via) {
|
121
|
+
// M:N via junction
|
122
|
+
try {
|
123
|
+
await loadManyToMany(table, target, rel.via as string, rows, key);
|
124
|
+
} catch (e: any) {
|
125
|
+
log.error("loadManyToMany failed", { table, key, via: rel.via, target }, e?.message ?? e);
|
126
|
+
for (const r of rows) r[key] = [];
|
127
|
+
}
|
128
|
+
// Recurse if nested include specified
|
129
|
+
const childSpec = s[key] && typeof s[key] === "object" ? (s[key] as any).include : undefined;
|
130
|
+
if (childSpec) {
|
131
|
+
const children = rows.flatMap(r => (r[key] ?? []));
|
132
|
+
try {
|
133
|
+
await walk(target, children, childSpec, depth + 1);
|
134
|
+
} catch (e: any) {
|
135
|
+
log.error("walk nested (via) failed", { table: String(target), key }, e?.message ?? e);
|
136
|
+
}
|
137
|
+
}
|
138
|
+
continue;
|
139
|
+
}
|
140
|
+
|
141
|
+
if (rel.kind === "many") {
|
142
|
+
// 1:N target has FK to current
|
143
|
+
try {
|
144
|
+
await loadOneToMany(table, target, rows, key);
|
145
|
+
} catch (e: any) {
|
146
|
+
log.error("loadOneToMany failed", { table, key, target }, e?.message ?? e);
|
147
|
+
for (const r of rows) r[key] = [];
|
148
|
+
}
|
149
|
+
const childSpec = s[key] && typeof s[key] === "object" ? (s[key] as any).include : undefined;
|
150
|
+
if (childSpec) {
|
151
|
+
const children = rows.flatMap(r => (r[key] ?? []));
|
152
|
+
try {
|
153
|
+
await walk(target, children, childSpec, depth + 1);
|
154
|
+
} catch (e: any) {
|
155
|
+
log.error("walk nested (many) failed", { table: String(target), key }, e?.message ?? e);
|
156
|
+
}
|
157
|
+
}
|
158
|
+
} else {
|
159
|
+
// kind === "one"
|
160
|
+
// Could be belongs-to (current has FK to target) OR has-one (target unique-FK to current)
|
161
|
+
const currFks = (FK_INDEX as any)[table] as Array<{from:string[];toTable:string;to:string[]}>;
|
162
|
+
const toTarget = currFks.find(f => f.toTable === target);
|
163
|
+
if (toTarget) {
|
164
|
+
try {
|
165
|
+
await loadBelongsTo(table, target, rows, key);
|
166
|
+
} catch (e: any) {
|
167
|
+
log.error("loadBelongsTo failed", { table, key, target }, e?.message ?? e);
|
168
|
+
for (const r of rows) r[key] = null;
|
169
|
+
}
|
170
|
+
} else {
|
171
|
+
try {
|
172
|
+
await loadHasOne(table, target, rows, key);
|
173
|
+
} catch (e: any) {
|
174
|
+
log.error("loadHasOne failed", { table, key, target }, e?.message ?? e);
|
175
|
+
for (const r of rows) r[key] = null;
|
176
|
+
}
|
177
|
+
}
|
178
|
+
const childSpec = s[key] && typeof s[key] === "object" ? (s[key] as any).include : undefined;
|
179
|
+
if (childSpec) {
|
180
|
+
const children = rows.map(r => r[key]).filter(Boolean);
|
181
|
+
try {
|
182
|
+
await walk(target, children, childSpec, depth + 1);
|
183
|
+
} catch (e: any) {
|
184
|
+
log.error("walk nested (one) failed", { table: String(target), key }, e?.message ?? e);
|
185
|
+
}
|
186
|
+
}
|
187
|
+
}
|
188
|
+
}
|
189
|
+
}
|
190
|
+
|
191
|
+
async function loadBelongsTo(curr: TableName, target: TableName, rows: any[], key: string) {
|
192
|
+
// current has FK cols referencing target PK
|
193
|
+
const fk = (FK_INDEX as any)[curr].find((f: any) => f.toTable === target);
|
194
|
+
if (!fk) { for (const r of rows) r[key] = null; return; }
|
195
|
+
const tuples = distinctTuples(rows, fk.from).filter(t => t.every((v: any) => v != null));
|
196
|
+
if (!tuples.length) { for (const r of rows) r[key] = null; return; }
|
197
|
+
|
198
|
+
// Query target WHERE target.pk IN tuples
|
199
|
+
const pkCols = (PKS as any)[target] as string[];
|
200
|
+
const where = buildOrAndPredicate(pkCols, tuples.length, 1);
|
201
|
+
const params = tuples.flat();
|
202
|
+
const sql = \`SELECT * FROM "\${target}" WHERE \${where}\`;
|
203
|
+
log.debug("belongsTo SQL", { curr, target, key, sql, paramsCount: params.length });
|
204
|
+
const { rows: targets } = await pg.query(sql, params);
|
205
|
+
|
206
|
+
const idx = indexByTuple(targets, pkCols);
|
207
|
+
for (const r of rows) {
|
208
|
+
const tup = fk.from.map((c: string) => r[c]);
|
209
|
+
const keyStr = JSON.stringify(tup);
|
210
|
+
r[key] = idx.get(keyStr) ?? null;
|
211
|
+
}
|
212
|
+
}
|
213
|
+
|
214
|
+
async function loadHasOne(curr: TableName, target: TableName, rows: any[], key: string) {
|
215
|
+
// target has FK cols referencing current PK (unique)
|
216
|
+
const fk = (FK_INDEX as any)[target].find((f: any) => f.toTable === curr);
|
217
|
+
if (!fk) { for (const r of rows) r[key] = null; return; }
|
218
|
+
|
219
|
+
const pkCols = (PKS as any)[curr] as string[];
|
220
|
+
const tuples = distinctTuples(rows, pkCols).filter(t => t.every((v: any) => v != null));
|
221
|
+
if (!tuples.length) { for (const r of rows) r[key] = null; return; }
|
222
|
+
|
223
|
+
// SELECT target WHERE fk IN tuples
|
224
|
+
const where = buildOrAndPredicate(fk.from, tuples.length, 1);
|
225
|
+
const params = tuples.flat();
|
226
|
+
const sql = \`SELECT * FROM "\${target}" WHERE \${where}\`;
|
227
|
+
log.debug("hasOne SQL", { curr, target, key, sql, paramsCount: params.length });
|
228
|
+
const { rows: targets } = await pg.query(sql, params);
|
229
|
+
|
230
|
+
const idx = indexByTuple(targets, fk.from);
|
231
|
+
for (const r of rows) {
|
232
|
+
const keyStr = JSON.stringify(pkCols.map((c: string) => r[c]));
|
233
|
+
r[key] = idx.get(keyStr) ?? null;
|
234
|
+
}
|
235
|
+
}
|
236
|
+
|
237
|
+
async function loadOneToMany(curr: TableName, target: TableName, rows: any[], key: string) {
|
238
|
+
// target has FK cols referencing current PK
|
239
|
+
const fk = (FK_INDEX as any)[target].find((f: any) => f.toTable === curr);
|
240
|
+
if (!fk) { for (const r of rows) r[key] = []; return; }
|
241
|
+
|
242
|
+
const pkCols = (PKS as any)[curr] as string[];
|
243
|
+
const tuples = distinctTuples(rows, pkCols).filter(t => t.every((v: any) => v != null));
|
244
|
+
if (!tuples.length) { for (const r of rows) r[key] = []; return; }
|
245
|
+
|
246
|
+
const where = buildOrAndPredicate(fk.from, tuples.length, 1);
|
247
|
+
const params = tuples.flat();
|
248
|
+
const sql = \`SELECT * FROM "\${target}" WHERE \${where}\`;
|
249
|
+
log.debug("oneToMany SQL", { curr, target, key, sql, paramsCount: params.length });
|
250
|
+
const { rows: children } = await pg.query(sql, params);
|
251
|
+
|
252
|
+
const groups = groupByTuple(children, fk.from);
|
253
|
+
for (const r of rows) {
|
254
|
+
const keyStr = JSON.stringify(pkCols.map((c: string) => r[c]));
|
255
|
+
r[key] = groups.get(keyStr) ?? [];
|
256
|
+
}
|
257
|
+
}
|
258
|
+
|
259
|
+
async function loadManyToMany(curr: TableName, target: TableName, via: string, rows: any[], key: string) {
|
260
|
+
// via has two FKs: one to curr, one to target
|
261
|
+
const toCurr = (FK_INDEX as any)[via].find((f: any) => f.toTable === curr);
|
262
|
+
const toTarget = (FK_INDEX as any)[via].find((f: any) => f.toTable === target);
|
263
|
+
if (!toCurr || !toTarget) { for (const r of rows) r[key] = []; return; }
|
264
|
+
|
265
|
+
const pkCols = (PKS as any)[curr] as string[];
|
266
|
+
const tuples = distinctTuples(rows, pkCols).filter(t => t.every((v: any) => v != null));
|
267
|
+
if (!tuples.length) { for (const r of rows) r[key] = []; return; }
|
268
|
+
|
269
|
+
// 1) Load junction rows for current parents
|
270
|
+
const whereVia = buildOrAndPredicate(toCurr.from, tuples.length, 1);
|
271
|
+
const sqlVia = \`SELECT * FROM "\${via}" WHERE \${whereVia}\`;
|
272
|
+
const paramsVia = tuples.flat();
|
273
|
+
log.debug("manyToMany junction SQL", { curr, target, via, key, sql: sqlVia, paramsCount: paramsVia.length });
|
274
|
+
const { rows: jrows } = await pg.query(sqlVia, paramsVia);
|
275
|
+
|
276
|
+
if (!jrows.length) { for (const r of rows) r[key] = []; return; }
|
277
|
+
|
278
|
+
// 2) Load targets by distinct target fk tuples in junction
|
279
|
+
const tTuples = distinctTuples(jrows, toTarget.from);
|
280
|
+
const whereT = buildOrAndPredicate((PKS as any)[target], tTuples.length, 1);
|
281
|
+
const sqlT = \`SELECT * FROM "\${target}" WHERE \${whereT}\`;
|
282
|
+
const paramsT = tTuples.flat();
|
283
|
+
log.debug("manyToMany target SQL", { curr, target, via, key, sql: sqlT, paramsCount: paramsT.length });
|
284
|
+
const { rows: targets } = await pg.query(sqlT, paramsT);
|
285
|
+
|
286
|
+
const tIdx = indexByTuple(targets, (PKS as any)[target]);
|
287
|
+
|
288
|
+
// 3) Group junction rows by current pk tuple, map to target rows
|
289
|
+
const byCurr = groupByTuple(jrows, toCurr.from);
|
290
|
+
for (const r of rows) {
|
291
|
+
const currKey = JSON.stringify(pkCols.map((c: string) => r[c]));
|
292
|
+
const j = byCurr.get(currKey) ?? [];
|
293
|
+
r[key] = j.map(jr => tIdx.get(JSON.stringify(toTarget.from.map((c: string) => jr[c])))).filter(Boolean);
|
294
|
+
}
|
295
|
+
}
|
296
|
+
}
|
297
|
+
`;
|
298
|
+
}
|
299
|
+
//# sourceMappingURL=emit-include-loader.js.map
|
@@ -0,0 +1 @@
|
|
1
|
+
{"version":3,"file":"emit-include-loader.js","sourceRoot":"","sources":["../../src/emit-include-loader.ts"],"names":[],"mappings":"AAGA;;;;;GAKG;AACH,MAAM,UAAU,iBAAiB,CAAC,KAAY,EAAE,KAAY,EAAE,QAAgB;IAC5E,2CAA2C;IAC3C,MAAM,OAAO,GAQT,EAAE,CAAC;IACP,KAAK,MAAM,CAAC,IAAI,MAAM,CAAC,MAAM,CAAC,KAAK,CAAC,MAAM,CAAC,EAAE,CAAC;QAC5C,OAAO,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,EAAE,IAAI,EAAE,CAAC,CAAC,IAAI,EAAE,OAAO,EAAE,CAAC,CAAC,OAAO,EAAE,EAAE,EAAE,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC;IACvF,CAAC;IAED,OAAO;;;;;;;;;;;;;;;;;mBAiBU,IAAI,CAAC,SAAS,CAAC,OAAO,EAAE,IAAI,EAAE,CAAC,CAAC;cACrC,IAAI,CAAC,SAAS,CACxB,MAAM,CAAC,WAAW,CAAC,MAAM,CAAC,MAAM,CAAC,KAAK,CAAC,MAAM,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,IAAI,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,EAC1E,IAAI,EACJ,CAAC,CACF;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;uBA0DoB,QAAQ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CAgN9B,CAAC;AACF,CAAC"}
|
@@ -0,0 +1,26 @@
|
|
1
|
+
export function emitIncludeSpec(graph) {
|
2
|
+
let out = `/* Generated. Do not edit. */\n`;
|
3
|
+
const tables = Object.keys(graph);
|
4
|
+
for (const table of tables) {
|
5
|
+
const rels = graph[table] ?? {};
|
6
|
+
const entries = Object.entries(rels);
|
7
|
+
out += `export type ${toPascal(table)}IncludeSpec = {\n`;
|
8
|
+
for (const [relKey, edge] of entries) {
|
9
|
+
if (edge.kind === "many") {
|
10
|
+
out += ` ${relKey}?: boolean | { include?: ${toPascal(edge.target)}IncludeSpec; limit?: number; offset?: number; };\n`;
|
11
|
+
}
|
12
|
+
else {
|
13
|
+
out += ` ${relKey}?: boolean | ${toPascal(edge.target)}IncludeSpec;\n`;
|
14
|
+
}
|
15
|
+
}
|
16
|
+
out += `};\n\n`;
|
17
|
+
}
|
18
|
+
return out;
|
19
|
+
}
|
20
|
+
function toPascal(s) {
|
21
|
+
return s
|
22
|
+
.split(/[_\s-]+/)
|
23
|
+
.map((w) => (w?.[0] ? w[0].toUpperCase() + w.slice(1) : ""))
|
24
|
+
.join("");
|
25
|
+
}
|
26
|
+
//# sourceMappingURL=emit-include-spec.js.map
|
@@ -0,0 +1 @@
|
|
1
|
+
{"version":3,"file":"emit-include-spec.js","sourceRoot":"","sources":["../../src/emit-include-spec.ts"],"names":[],"mappings":"AAGA,MAAM,UAAU,eAAe,CAAC,KAAY;IAC1C,IAAI,GAAG,GAAG,iCAAiC,CAAC;IAE5C,MAAM,MAAM,GAAG,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;IAClC,KAAK,MAAM,KAAK,IAAI,MAAM,EAAE,CAAC;QAC3B,MAAM,IAAI,GAAG,KAAK,CAAC,KAAK,CAAC,IAAI,EAAE,CAAC;QAChC,MAAM,OAAO,GAAG,MAAM,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC;QAErC,GAAG,IAAI,eAAe,QAAQ,CAAC,KAAK,CAAC,mBAAmB,CAAC;QACzD,KAAK,MAAM,CAAC,MAAM,EAAE,IAAI,CAAC,IAAI,OAAO,EAAE,CAAC;YACrC,IAAI,IAAI,CAAC,IAAI,KAAK,MAAM,EAAE,CAAC;gBACzB,GAAG,IAAI,KAAK,MAAM,4BAA4B,QAAQ,CACpD,IAAI,CAAC,MAAM,CACZ,oDAAoD,CAAC;YACxD,CAAC;iBAAM,CAAC;gBACN,GAAG,IAAI,KAAK,MAAM,gBAAgB,QAAQ,CAAC,IAAI,CAAC,MAAM,CAAC,gBAAgB,CAAC;YAC1E,CAAC;QACH,CAAC;QACD,GAAG,IAAI,QAAQ,CAAC;IAClB,CAAC;IAED,OAAO,GAAG,CAAC;AACb,CAAC;AAED,SAAS,QAAQ,CAAC,CAAS;IACzB,OAAO,CAAC;SACL,KAAK,CAAC,SAAS,CAAC;SAChB,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,WAAW,EAAE,GAAG,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC;SAC3D,IAAI,CAAC,EAAE,CAAC,CAAC;AACd,CAAC"}
|
@@ -0,0 +1 @@
|
|
1
|
+
export declare function emitLogger(): string;
|
@@ -0,0 +1,35 @@
|
|
1
|
+
/* Emits a tiny logger used by generated server routes. */
|
2
|
+
export function emitLogger() {
|
3
|
+
return `/* Generated. Do not edit. */
|
4
|
+
const DEBUG = process.env.SDK_DEBUG === "1" || process.env.SDK_DEBUG === "true";
|
5
|
+
|
6
|
+
export const logger = {
|
7
|
+
debug: (...args: any[]) => { if (DEBUG) console.debug("[sdk:debug]", ...args); },
|
8
|
+
info: (...args: any[]) => { if (DEBUG) console.info ("[sdk:info ]", ...args); },
|
9
|
+
warn: (...args: any[]) => { console.warn ("[sdk:warn ]", ...args); },
|
10
|
+
error: (...args: any[]) => { console.error("[sdk:error]", ...args); },
|
11
|
+
};
|
12
|
+
|
13
|
+
export function safe<T extends (c: any) => any>(handler: T) {
|
14
|
+
return async (c: any) => {
|
15
|
+
try {
|
16
|
+
const res = await handler(c);
|
17
|
+
// If a handler returns a Response with 5xx, log the body in debug
|
18
|
+
if (typeof res?.status === "number" && res.status >= 500) {
|
19
|
+
try {
|
20
|
+
const clone = res.clone?.();
|
21
|
+
const text = clone ? await clone.text() : "";
|
22
|
+
logger.error(\`5xx response: \${c.req.method} \${c.req.path}\`, text);
|
23
|
+
} catch {}
|
24
|
+
}
|
25
|
+
return res;
|
26
|
+
} catch (e: any) {
|
27
|
+
logger.error(\`Unhandled error in \${c.req.method} \${c.req.path}\`, e?.stack ?? e);
|
28
|
+
const body = { error: e?.message ?? "Internal error", ...(process.env.SDK_DEBUG ? { stack: e?.stack } : {}) };
|
29
|
+
return c.json(body, 500);
|
30
|
+
}
|
31
|
+
};
|
32
|
+
}
|
33
|
+
`;
|
34
|
+
}
|
35
|
+
//# sourceMappingURL=emit-logger.js.map
|
@@ -0,0 +1 @@
|
|
1
|
+
{"version":3,"file":"emit-logger.js","sourceRoot":"","sources":["../../src/emit-logger.ts"],"names":[],"mappings":"AAAA,0DAA0D;AAC1D,MAAM,UAAU,UAAU;IACxB,OAAO;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CA8BR,CAAC;AACF,CAAC"}
|
@@ -0,0 +1,20 @@
|
|
1
|
+
import type { Table } from "./introspect";
|
2
|
+
import type { Graph } from "./rel-classify";
|
3
|
+
/**
|
4
|
+
* Emits a Hono router for one table, using generated Zod schemas.
|
5
|
+
*
|
6
|
+
* Expects:
|
7
|
+
* - Generated file at ../zod/<table>.ts exporting Insert<Type>Schema & Update<Type>Schema
|
8
|
+
* - deps: { pg: Pool | Client } with .query(text, params)
|
9
|
+
*
|
10
|
+
* Endpoints:
|
11
|
+
* POST /v1/<table> create (Insert<Type>Schema)
|
12
|
+
* GET /v1/<table>/:...pk get by pk
|
13
|
+
* POST /v1/<table>/list list (limit/offset; includes)
|
14
|
+
* PATCH /v1/<table>/:...pk update (Update<Type>Schema)
|
15
|
+
* DELETE /v1/<table>/:...pk delete (or soft-delete)
|
16
|
+
*/
|
17
|
+
export declare function emitRoutes(table: Table, _graph: Graph, opts: {
|
18
|
+
softDeleteColumn: string | null;
|
19
|
+
includeDepthLimit: number;
|
20
|
+
}): string;
|
@@ -0,0 +1,208 @@
|
|
1
|
+
import { pascal } from "./utils";
|
2
|
+
/**
|
3
|
+
* Emits a Hono router for one table, using generated Zod schemas.
|
4
|
+
*
|
5
|
+
* Expects:
|
6
|
+
* - Generated file at ../zod/<table>.ts exporting Insert<Type>Schema & Update<Type>Schema
|
7
|
+
* - deps: { pg: Pool | Client } with .query(text, params)
|
8
|
+
*
|
9
|
+
* Endpoints:
|
10
|
+
* POST /v1/<table> create (Insert<Type>Schema)
|
11
|
+
* GET /v1/<table>/:...pk get by pk
|
12
|
+
* POST /v1/<table>/list list (limit/offset; includes)
|
13
|
+
* PATCH /v1/<table>/:...pk update (Update<Type>Schema)
|
14
|
+
* DELETE /v1/<table>/:...pk delete (or soft-delete)
|
15
|
+
*/
|
16
|
+
export function emitRoutes(table, _graph, opts) {
|
17
|
+
const fileTableName = table.name; // SQL table name for file/route
|
18
|
+
const Type = pascal(table.name); // PascalCase for type/schemas
|
19
|
+
// Normalize pk to an array and fallback to ["id"] if empty
|
20
|
+
const rawPk = table.pk;
|
21
|
+
const pkCols = Array.isArray(rawPk) ? rawPk : rawPk ? [rawPk] : [];
|
22
|
+
const safePkCols = pkCols.length ? pkCols : ["id"];
|
23
|
+
const hasCompositePk = safePkCols.length > 1;
|
24
|
+
const pkPath = hasCompositePk ? safePkCols.map((c) => `:${c}`).join("/") : `:${safePkCols[0]}`;
|
25
|
+
const softDel = opts.softDeleteColumn && table.columns.some((c) => c.name === opts.softDeleteColumn) ? opts.softDeleteColumn : null;
|
26
|
+
// IMPORTANT: interpolate column names at generator time (no escaping/backslashes here)
|
27
|
+
const wherePkSql = hasCompositePk
|
28
|
+
? safePkCols.map((c, i) => `"${c}" = $${i + 1}`).join(" AND ")
|
29
|
+
: `"${safePkCols[0]}" = $1`;
|
30
|
+
const getPkParams = hasCompositePk
|
31
|
+
? `const pkValues = [${safePkCols.map((c) => `c.req.param("${c}")`).join(", ")}];`
|
32
|
+
: `const pkValues = [c.req.param("${safePkCols[0]}")];`;
|
33
|
+
// Build SET clause indices for UPDATE (PK params first, then update values)
|
34
|
+
// These strings are emitted into generated code, so we DO escape ${...} intentionally here.
|
35
|
+
const updateSetSql = hasCompositePk
|
36
|
+
? `Object.keys(updateData).map((k, i) => \`"\${k}" = $\${i + ${safePkCols.length} + 1}\`).join(", ")`
|
37
|
+
: `Object.keys(updateData).map((k, i) => \`"\${k}" = $\${i + 2}\`).join(", ")`;
|
38
|
+
// Prevent updating PK columns
|
39
|
+
const pkFilter = safePkCols.length
|
40
|
+
? `const updateData = Object.fromEntries(Object.entries(parsed.data).filter(([k]) => !new Set(${JSON.stringify(safePkCols)}).has(k)));`
|
41
|
+
: `const updateData = parsed.data;`;
|
42
|
+
return `/* Generated. Do not edit. */
|
43
|
+
import { Hono } from "hono";
|
44
|
+
import { z } from "zod";
|
45
|
+
import { Insert${Type}Schema, Update${Type}Schema } from "../zod/${fileTableName}";
|
46
|
+
import { loadIncludes } from "../include-loader";
|
47
|
+
|
48
|
+
const DEBUG = process.env.SDK_DEBUG === "1" || process.env.SDK_DEBUG === "true";
|
49
|
+
const log = {
|
50
|
+
debug: (...args: any[]) => { if (DEBUG) console.debug("[sdk]", ...args); },
|
51
|
+
error: (...args: any[]) => console.error("[sdk]", ...args),
|
52
|
+
};
|
53
|
+
|
54
|
+
const listSchema = z.object({
|
55
|
+
include: z.any().optional(), // TODO: typed include spec in later pass
|
56
|
+
limit: z.number().int().positive().max(100).optional(),
|
57
|
+
offset: z.number().int().min(0).optional(),
|
58
|
+
orderBy: z.any().optional() // TODO: typed orderBy in a later pass
|
59
|
+
});
|
60
|
+
|
61
|
+
export function register${Type}Routes(app: Hono, deps: { pg: { query: (text: string, params?: any[]) => Promise<{ rows: any[] }> } }) {
|
62
|
+
const base = "/v1/${fileTableName}";
|
63
|
+
|
64
|
+
// CREATE
|
65
|
+
app.post(base, async (c) => {
|
66
|
+
try {
|
67
|
+
const body = await c.req.json().catch(() => ({}));
|
68
|
+
log.debug("POST ${fileTableName} body:", body);
|
69
|
+
const parsed = Insert${Type}Schema.safeParse(body);
|
70
|
+
if (!parsed.success) {
|
71
|
+
const issues = parsed.error.flatten();
|
72
|
+
log.debug("POST ${fileTableName} invalid:", issues);
|
73
|
+
return c.json({ error: "Invalid body", issues }, 400);
|
74
|
+
}
|
75
|
+
|
76
|
+
const data = parsed.data;
|
77
|
+
const cols = Object.keys(data);
|
78
|
+
const vals = Object.values(data);
|
79
|
+
if (!cols.length) return c.json({ error: "No fields provided" }, 400);
|
80
|
+
|
81
|
+
const placeholders = cols.map((_, i) => '$' + (i + 1)).join(", ");
|
82
|
+
const text = \`INSERT INTO "${fileTableName}" (\${cols.map(c => '"' + c + '"').join(", ")})
|
83
|
+
VALUES (\${placeholders})
|
84
|
+
RETURNING *\`;
|
85
|
+
log.debug("SQL:", text, "vals:", vals);
|
86
|
+
const { rows } = await deps.pg.query(text, vals);
|
87
|
+
return c.json(rows[0] ?? null, rows[0] ? 201 : 500);
|
88
|
+
} catch (e: any) {
|
89
|
+
log.error("POST ${fileTableName} error:", e?.stack ?? e);
|
90
|
+
return c.json({ error: e?.message ?? "Internal error", ...(DEBUG ? { stack: e?.stack } : {}) }, 500);
|
91
|
+
}
|
92
|
+
});
|
93
|
+
|
94
|
+
// GET BY PK
|
95
|
+
app.get(\`\${base}/${pkPath}\`, async (c) => {
|
96
|
+
try {
|
97
|
+
${getPkParams}
|
98
|
+
const text = \`SELECT * FROM "${fileTableName}" WHERE ${wherePkSql} LIMIT 1\`;
|
99
|
+
log.debug("GET ${fileTableName} by PK:", pkValues, "SQL:", text);
|
100
|
+
const { rows } = await deps.pg.query(text, pkValues);
|
101
|
+
if (!rows[0]) return c.json(null, 404);
|
102
|
+
return c.json(rows[0]);
|
103
|
+
} catch (e: any) {
|
104
|
+
log.error("GET ${fileTableName} error:", e?.stack ?? e);
|
105
|
+
return c.json({ error: e?.message ?? "Internal error", ...(DEBUG ? { stack: e?.stack } : {}) }, 500);
|
106
|
+
}
|
107
|
+
});
|
108
|
+
|
109
|
+
// LIST
|
110
|
+
app.post(\`\${base}/list\`, async (c) => {
|
111
|
+
try {
|
112
|
+
const body = listSchema.safeParse(await c.req.json().catch(() => ({})));
|
113
|
+
if (!body.success) {
|
114
|
+
const issues = body.error.flatten();
|
115
|
+
log.debug("LIST ${fileTableName} invalid:", issues);
|
116
|
+
return c.json({ error: "Invalid body", issues }, 400);
|
117
|
+
}
|
118
|
+
const { include, limit = 50, offset = 0 } = body.data;
|
119
|
+
|
120
|
+
const where = ${softDel ? `\`WHERE "${softDel}" IS NULL\`` : `""`};
|
121
|
+
const text = \`SELECT * FROM "${fileTableName}" \${where} LIMIT $1 OFFSET $2\`;
|
122
|
+
log.debug("LIST ${fileTableName} SQL:", text, "params:", [limit, offset]);
|
123
|
+
const { rows } = await deps.pg.query(text, [limit, offset]);
|
124
|
+
|
125
|
+
if (!include) {
|
126
|
+
log.debug("LIST ${fileTableName} rows:", rows.length);
|
127
|
+
return c.json(rows);
|
128
|
+
}
|
129
|
+
|
130
|
+
// Attempt include stitching with explicit error handling
|
131
|
+
log.debug("LIST ${fileTableName} include spec:", include);
|
132
|
+
try {
|
133
|
+
const stitched = await loadIncludes("${fileTableName}", rows, include, deps.pg, ${opts.includeDepthLimit});
|
134
|
+
log.debug("LIST ${fileTableName} stitched count:", Array.isArray(stitched) ? stitched.length : "n/a");
|
135
|
+
return c.json(stitched);
|
136
|
+
} catch (e: any) {
|
137
|
+
const strict = process.env.SDK_STRICT_INCLUDE === "1" || process.env.SDK_STRICT_INCLUDE === "true";
|
138
|
+
const msg = e?.message ?? String(e);
|
139
|
+
const stack = e?.stack;
|
140
|
+
log.error("LIST ${fileTableName} include stitch FAILED:", msg, stack);
|
141
|
+
|
142
|
+
if (strict) {
|
143
|
+
return c.json({ error: "include-stitch-failed", message: msg, ...(DEBUG ? { stack: e?.stack } : {}) }, 500);
|
144
|
+
}
|
145
|
+
// Non-strict fallback: return base rows plus error metadata
|
146
|
+
return c.json({ data: rows, includeError: { message: msg, ...(DEBUG ? { stack: e?.stack } : {}) } }, 200);
|
147
|
+
}
|
148
|
+
} catch (e: any) {
|
149
|
+
log.error("LIST ${fileTableName} error:", e?.stack ?? e);
|
150
|
+
return c.json({ error: e?.message ?? "Internal error", ...(DEBUG ? { stack: e?.stack } : {}) }, 500);
|
151
|
+
}
|
152
|
+
});
|
153
|
+
|
154
|
+
// UPDATE
|
155
|
+
app.patch(\`\${base}/${pkPath}\`, async (c) => {
|
156
|
+
try {
|
157
|
+
${getPkParams}
|
158
|
+
const body = await c.req.json().catch(() => ({}));
|
159
|
+
log.debug("PATCH ${fileTableName} pk:", pkValues, "patch:", body);
|
160
|
+
const parsed = Update${Type}Schema.safeParse(body);
|
161
|
+
if (!parsed.success) {
|
162
|
+
const issues = parsed.error.flatten();
|
163
|
+
log.debug("PATCH ${fileTableName} invalid:", issues);
|
164
|
+
return c.json({ error: "Invalid body", issues: issues }, 400);
|
165
|
+
}
|
166
|
+
|
167
|
+
${pkFilter}
|
168
|
+
if (!Object.keys(updateData).length) return c.json({ error: "No updatable fields provided" }, 400);
|
169
|
+
|
170
|
+
const setSql = ${updateSetSql};
|
171
|
+
const text = \`UPDATE "${fileTableName}" SET \${setSql} WHERE ${wherePkSql} RETURNING *\`;
|
172
|
+
const params = ${hasCompositePk ? `[...pkValues, ...Object.values(updateData)]` : `[pkValues[0], ...Object.values(updateData)]`};
|
173
|
+
log.debug("PATCH ${fileTableName} SQL:", text, "params:", params);
|
174
|
+
const { rows } = await deps.pg.query(text, params);
|
175
|
+
if (!rows[0]) return c.json(null, 404);
|
176
|
+
return c.json(rows[0]);
|
177
|
+
} catch (e: any) {
|
178
|
+
log.error("PATCH ${fileTableName} error:", e?.stack ?? e);
|
179
|
+
return c.json({ error: e?.message ?? "Internal error", ...(DEBUG ? { stack: e?.stack } : {}) }, 500);
|
180
|
+
}
|
181
|
+
});
|
182
|
+
|
183
|
+
// DELETE (soft or hard)
|
184
|
+
app.delete(\`\${base}/${pkPath}\`, async (c) => {
|
185
|
+
try {
|
186
|
+
${getPkParams}
|
187
|
+
${softDel
|
188
|
+
? `
|
189
|
+
const text = \`UPDATE "${fileTableName}" SET "${softDel}" = NOW() WHERE ${wherePkSql} RETURNING *\`;
|
190
|
+
log.debug("DELETE (soft) ${fileTableName} SQL:", text, "pk:", pkValues);
|
191
|
+
const { rows } = await deps.pg.query(text, pkValues);
|
192
|
+
if (!rows[0]) return c.json(null, 404);
|
193
|
+
return c.json(rows[0]);`
|
194
|
+
: `
|
195
|
+
const text = \`DELETE FROM "${fileTableName}" WHERE ${wherePkSql} RETURNING *\`;
|
196
|
+
log.debug("DELETE ${fileTableName} SQL:", text, "pk:", pkValues);
|
197
|
+
const { rows } = await deps.pg.query(text, pkValues);
|
198
|
+
if (!rows[0]) return c.json(null, 404);
|
199
|
+
return c.json(rows[0]);`}
|
200
|
+
} catch (e: any) {
|
201
|
+
log.error("DELETE ${fileTableName} error:", e?.stack ?? e);
|
202
|
+
return c.json({ error: e?.message ?? "Internal error", ...(DEBUG ? { stack: e?.stack } : {}) }, 500);
|
203
|
+
}
|
204
|
+
});
|
205
|
+
}
|
206
|
+
`;
|
207
|
+
}
|
208
|
+
//# sourceMappingURL=emit-routes.js.map
|
@@ -0,0 +1 @@
|
|
1
|
+
{"version":3,"file":"emit-routes.js","sourceRoot":"","sources":["../../src/emit-routes.ts"],"names":[],"mappings":"AAEA,OAAO,EAAE,MAAM,EAAE,MAAM,SAAS,CAAC;AAEjC;;;;;;;;;;;;;GAaG;AACH,MAAM,UAAU,UAAU,CACxB,KAAY,EACZ,MAAa,EACb,IAAoE;IAEpE,MAAM,aAAa,GAAG,KAAK,CAAC,IAAI,CAAC,CAAC,gCAAgC;IAClE,MAAM,IAAI,GAAG,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,CAAC,8BAA8B;IAE/D,2DAA2D;IAC3D,MAAM,KAAK,GAAI,KAAa,CAAC,EAAE,CAAC;IAChC,MAAM,MAAM,GAAa,KAAK,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC;IAC7E,MAAM,UAAU,GAAG,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC;IAEnD,MAAM,cAAc,GAAG,UAAU,CAAC,MAAM,GAAG,CAAC,CAAC;IAC7C,MAAM,MAAM,GAAG,cAAc,CAAC,CAAC,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,IAAI,UAAU,CAAC,CAAC,CAAC,EAAE,CAAC;IAE/F,MAAM,OAAO,GACX,IAAI,CAAC,gBAAgB,IAAI,KAAK,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,IAAI,KAAK,IAAI,CAAC,gBAAgB,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,gBAAgB,CAAC,CAAC,CAAC,IAAI,CAAC;IAEtH,uFAAuF;IACvF,MAAM,UAAU,GAAG,cAAc;QAC/B,CAAC,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,IAAI,CAAC,QAAQ,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC,IAAI,CAAC,OAAO,CAAC;QAC9D,CAAC,CAAC,IAAI,UAAU,CAAC,CAAC,CAAC,QAAQ,CAAC;IAE9B,MAAM,WAAW,GAAG,cAAc;QAChC,CAAC,CAAC,qBAAqB,UAAU,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,gBAAgB,CAAC,IAAI,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,IAAI;QAClF,CAAC,CAAC,kCAAkC,UAAU,CAAC,CAAC,CAAC,MAAM,CAAC;IAE1D,4EAA4E;IAC5E,4FAA4F;IAC5F,MAAM,YAAY,GAAG,cAAc;QACjC,CAAC,CAAC,6DAA6D,UAAU,CAAC,MAAM,qBAAqB;QACrG,CAAC,CAAC,4EAA4E,CAAC;IAEjF,8BAA8B;IAC9B,MAAM,QAAQ,GAAG,UAAU,CAAC,MAAM;QAChC,CAAC,CAAC,8FAA8F,IAAI,CAAC,SAAS,CAC1G,UAAU,CACX,aAAa;QAChB,CAAC,CAAC,iCAAiC,CAAC;IAEtC,OAAO;;;iBAGQ,IAAI,iBAAiB,IAAI,yBAAyB,aAAa;;;;;;;;;;;;;;;;0BAgBtD,IAAI;sBACR,aAAa;;;;;;wBAMX,aAAa;6BACR,IAAI;;;0BAGP,aAAa;;;;;;;;;;oCAUH,aAAa;;;;;;;wBAOzB,aAAa;;;;;;uBAMd,MAAM;;QAErB,WAAW;sCACmB,aAAa,WAAW,UAAU;uBACjD,aAAa;;;;;uBAKb,aAAa;;;;;;;;;;;0BAWV,aAAa;;;;;sBAKjB,OAAO,CAAC,CAAC,CAAC,YAAY,OAAO,aAAa,CAAC,CAAC,CAAC,IAAI;sCACjC,aAAa;wBAC3B,aAAa;;;;0BAIX,aAAa;;;;;wBAKf,aAAa;;+CAEU,aAAa,8BAA8B,IAAI,CAAC,iBAAiB;0BACtF,aAAa;;;;;;0BAMb,aAAa;;;;;;;;;wBASf,aAAa;;;;;;yBAMZ,MAAM;;QAEvB,WAAW;;yBAEM,aAAa;6BACT,IAAI;;;2BAGN,aAAa;;;;QAIhC,QAAQ;;;uBAGO,YAAY;+BACJ,aAAa,0BAA0B,UAAU;uBAExE,cAAc,CAAC,CAAC,CAAC,6CAA6C,CAAC,CAAC,CAAC,6CACnE;yBACmB,aAAa;;;;;yBAKb,aAAa;;;;;;0BAMZ,MAAM;;QAExB,WAAW;QAEX,OAAO;QACL,CAAC,CAAC;+BACmB,aAAa,UAAU,OAAO,mBAAmB,UAAU;iCACzD,aAAa;;;8BAGhB;QACpB,CAAC,CAAC;oCACwB,aAAa,WAAW,UAAU;0BAC5C,aAAa;;;8BAIjC;;0BAEoB,aAAa;;;;;CAKtC,CAAC;AACF,CAAC"}
|