@m6d/cortex-server 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +64 -0
- package/dist/index.d.ts +1 -0
- package/dist/src/adapters/database.d.ts +27 -0
- package/dist/src/adapters/minio.d.ts +10 -0
- package/dist/src/adapters/mssql.d.ts +3 -0
- package/dist/src/adapters/storage.d.ts +6 -0
- package/dist/src/ai/fetch.d.ts +2 -0
- package/dist/src/ai/helpers.d.ts +5 -0
- package/dist/src/ai/index.d.ts +4 -0
- package/dist/src/ai/interceptors/resolve-captured-files.d.ts +11 -0
- package/dist/src/ai/prompt.d.ts +4 -0
- package/dist/src/ai/tools/call-endpoint.tool.d.ts +7 -0
- package/dist/src/ai/tools/capture-files.tool.d.ts +6 -0
- package/dist/src/ai/tools/execute-code.tool.d.ts +4 -0
- package/dist/src/ai/tools/query-graph.tool.d.ts +5 -0
- package/dist/src/auth/middleware.d.ts +4 -0
- package/dist/src/cli/extract-endpoints.d.ts +6 -0
- package/dist/src/config.d.ts +145 -0
- package/dist/src/db/migrate.d.ts +1 -0
- package/dist/src/db/schema.d.ts +345 -0
- package/dist/src/factory.d.ts +17 -0
- package/dist/src/graph/generate-cypher.d.ts +22 -0
- package/dist/src/graph/helpers.d.ts +60 -0
- package/dist/src/graph/index.d.ts +11 -0
- package/dist/src/graph/neo4j.d.ts +18 -0
- package/dist/src/graph/resolver.d.ts +51 -0
- package/dist/src/graph/seed.d.ts +19 -0
- package/dist/src/graph/types.d.ts +104 -0
- package/dist/src/graph/validate.d.ts +2 -0
- package/dist/src/index.d.ts +10 -0
- package/dist/src/routes/chat.d.ts +3 -0
- package/dist/src/routes/files.d.ts +3 -0
- package/dist/src/routes/index.d.ts +4 -0
- package/dist/src/routes/threads.d.ts +3 -0
- package/dist/src/routes/ws.d.ts +3 -0
- package/dist/src/types.d.ts +56 -0
- package/dist/src/ws/connections.d.ts +4 -0
- package/dist/src/ws/events.d.ts +8 -0
- package/dist/src/ws/index.d.ts +3 -0
- package/dist/src/ws/notify.d.ts +2 -0
- package/index.ts +1 -0
- package/package.json +57 -0
- package/src/adapters/database.ts +33 -0
- package/src/adapters/minio.ts +89 -0
- package/src/adapters/mssql.ts +203 -0
- package/src/adapters/storage.ts +6 -0
- package/src/ai/fetch.ts +39 -0
- package/src/ai/helpers.ts +36 -0
- package/src/ai/index.ts +145 -0
- package/src/ai/interceptors/resolve-captured-files.ts +64 -0
- package/src/ai/prompt.ts +120 -0
- package/src/ai/tools/call-endpoint.tool.ts +96 -0
- package/src/ai/tools/capture-files.tool.ts +22 -0
- package/src/ai/tools/execute-code.tool.ts +108 -0
- package/src/ai/tools/query-graph.tool.ts +35 -0
- package/src/auth/middleware.ts +63 -0
- package/src/cli/extract-endpoints.ts +588 -0
- package/src/config.ts +155 -0
- package/src/db/migrate.ts +21 -0
- package/src/db/migrations/20260309012148_cloudy_maria_hill/migration.sql +36 -0
- package/src/db/migrations/20260309012148_cloudy_maria_hill/snapshot.json +305 -0
- package/src/db/schema.ts +77 -0
- package/src/factory.ts +159 -0
- package/src/graph/generate-cypher.ts +179 -0
- package/src/graph/helpers.ts +68 -0
- package/src/graph/index.ts +47 -0
- package/src/graph/neo4j.ts +117 -0
- package/src/graph/resolver.ts +357 -0
- package/src/graph/seed.ts +172 -0
- package/src/graph/types.ts +152 -0
- package/src/graph/validate.ts +80 -0
- package/src/index.ts +27 -0
- package/src/routes/chat.ts +38 -0
- package/src/routes/files.ts +105 -0
- package/src/routes/index.ts +4 -0
- package/src/routes/threads.ts +69 -0
- package/src/routes/ws.ts +33 -0
- package/src/types.ts +50 -0
- package/src/ws/connections.ts +23 -0
- package/src/ws/events.ts +6 -0
- package/src/ws/index.ts +7 -0
- package/src/ws/notify.ts +9 -0
|
@@ -0,0 +1,588 @@
|
|
|
1
|
+
import * as fs from "fs";
|
|
2
|
+
import * as path from "path";
|
|
3
|
+
|
|
4
|
+
type HttpMethod = "GET" | "POST" | "PUT" | "DELETE";
|
|
5
|
+
type ResponseKind = "object" | "array" | "paginated" | "file" | "none";
|
|
6
|
+
type Obj = Record<string, unknown>;
|
|
7
|
+
type Prop = {
|
|
8
|
+
name: string;
|
|
9
|
+
required: boolean;
|
|
10
|
+
type: string;
|
|
11
|
+
isArray?: boolean;
|
|
12
|
+
properties?: Prop[];
|
|
13
|
+
};
|
|
14
|
+
type Endpoint = {
|
|
15
|
+
key: string;
|
|
16
|
+
method: HttpMethod;
|
|
17
|
+
path: string;
|
|
18
|
+
params: Prop[];
|
|
19
|
+
body: Prop[];
|
|
20
|
+
response: Prop[];
|
|
21
|
+
responseKind: ResponseKind;
|
|
22
|
+
successStatus: number;
|
|
23
|
+
errorStatuses: number[];
|
|
24
|
+
};
|
|
25
|
+
|
|
26
|
+
export type ExtractEndpointsOptions = {
|
|
27
|
+
swaggerUrl: string;
|
|
28
|
+
domainsDir: string;
|
|
29
|
+
write?: boolean;
|
|
30
|
+
};
|
|
31
|
+
|
|
32
|
+
const METHODS: Array<[string, HttpMethod]> = [
|
|
33
|
+
["get", "GET"],
|
|
34
|
+
["post", "POST"],
|
|
35
|
+
["put", "PUT"],
|
|
36
|
+
["delete", "DELETE"],
|
|
37
|
+
];
|
|
38
|
+
const AUTO_START = "// @auto-generated-start";
|
|
39
|
+
const AUTO_END = "// @auto-generated-end";
|
|
40
|
+
const MAX_DEPTH = 8;
|
|
41
|
+
|
|
42
|
+
const toCamelCase = (s: string): string =>
|
|
43
|
+
s
|
|
44
|
+
.split(".")
|
|
45
|
+
.map((seg) => seg.replace(/^[A-Z]/, (c) => c.toLowerCase()))
|
|
46
|
+
.join(".");
|
|
47
|
+
|
|
48
|
+
const isObj = (v: unknown): v is Obj =>
|
|
49
|
+
typeof v === "object" && v !== null && !Array.isArray(v);
|
|
50
|
+
const obj = (v: unknown): Obj => (isObj(v) ? v : {});
|
|
51
|
+
const strings = (v: unknown): string[] =>
|
|
52
|
+
Array.isArray(v) ? v.filter((x): x is string => typeof x === "string") : [];
|
|
53
|
+
const dedupe = (items: Prop[]): Prop[] => {
|
|
54
|
+
const seen = new Set<string>();
|
|
55
|
+
return items.filter((x) =>
|
|
56
|
+
seen.has(x.name) ? false : (seen.add(x.name), true),
|
|
57
|
+
);
|
|
58
|
+
};
|
|
59
|
+
|
|
60
|
+
function normalizePath(raw: string): string {
|
|
61
|
+
let p = raw.startsWith("/") ? raw : `/${raw}`;
|
|
62
|
+
p =
|
|
63
|
+
p
|
|
64
|
+
.replace(/\{([^}:?]+)(?::[^}]+)?\??\}/g, "{$1}")
|
|
65
|
+
.replace(/^\/api(?=\/|$)/i, "") || "/";
|
|
66
|
+
return p.length > 1 ? p.replace(/\/+$/, "") : p;
|
|
67
|
+
}
|
|
68
|
+
|
|
69
|
+
function walkFiles(
|
|
70
|
+
dir: string,
|
|
71
|
+
accept: (filePath: string) => boolean,
|
|
72
|
+
): string[] {
|
|
73
|
+
if (!fs.existsSync(dir)) return [];
|
|
74
|
+
const out: string[] = [];
|
|
75
|
+
function walk(d: string) {
|
|
76
|
+
for (const e of fs.readdirSync(d, { withFileTypes: true })) {
|
|
77
|
+
const p = path.join(d, e.name);
|
|
78
|
+
if (e.isDirectory()) walk(p);
|
|
79
|
+
else if (accept(p)) out.push(p);
|
|
80
|
+
}
|
|
81
|
+
}
|
|
82
|
+
walk(dir);
|
|
83
|
+
return out;
|
|
84
|
+
}
|
|
85
|
+
|
|
86
|
+
function endpointFiles(root: string): Map<string, string> {
|
|
87
|
+
const out = new Map<string, string>();
|
|
88
|
+
for (const filePath of walkFiles(root, (p) => p.endsWith(".endpoint.ts"))) {
|
|
89
|
+
const c = fs.readFileSync(filePath, "utf8");
|
|
90
|
+
const method = c.match(/^\s*method:\s*(['"])([^'"]+)\1/m)?.[2];
|
|
91
|
+
const route = c.match(/^\s*path:\s*(['"])([^'"]+)\1/m)?.[2];
|
|
92
|
+
if (method && route) out.set(`${method}:${normalizePath(route)}`, filePath);
|
|
93
|
+
}
|
|
94
|
+
return out;
|
|
95
|
+
}
|
|
96
|
+
|
|
97
|
+
function resolvePointer(doc: unknown, ref: string): unknown {
|
|
98
|
+
if (!ref.startsWith("#/")) return null;
|
|
99
|
+
let cur: unknown = doc;
|
|
100
|
+
for (const part of ref.slice(2).split("/")) {
|
|
101
|
+
const key = part.replace(/~1/g, "/").replace(/~0/g, "~");
|
|
102
|
+
if (!isObj(cur) || !(key in cur)) return null;
|
|
103
|
+
cur = cur[key];
|
|
104
|
+
}
|
|
105
|
+
return cur;
|
|
106
|
+
}
|
|
107
|
+
|
|
108
|
+
function deref(doc: unknown, v: unknown): Obj {
|
|
109
|
+
if (!isObj(v)) return {};
|
|
110
|
+
let x = v;
|
|
111
|
+
const seen = new Set<string>();
|
|
112
|
+
while (typeof x.$ref === "string") {
|
|
113
|
+
if (seen.has(x.$ref)) return {};
|
|
114
|
+
seen.add(x.$ref);
|
|
115
|
+
x = obj(resolvePointer(doc, x.$ref));
|
|
116
|
+
}
|
|
117
|
+
return x;
|
|
118
|
+
}
|
|
119
|
+
|
|
120
|
+
function schemaType(s: Obj): string {
|
|
121
|
+
const t = Array.isArray(s.type)
|
|
122
|
+
? s.type.find((x) => typeof x === "string" && x !== "null")
|
|
123
|
+
: s.type;
|
|
124
|
+
if (typeof t === "string") return t;
|
|
125
|
+
if (isObj(s.properties)) return "object";
|
|
126
|
+
if (isObj(s.items)) return "array";
|
|
127
|
+
return "";
|
|
128
|
+
}
|
|
129
|
+
|
|
130
|
+
const isJsonNodeRef = (v: unknown) =>
|
|
131
|
+
isObj(v) &&
|
|
132
|
+
typeof v.$ref === "string" &&
|
|
133
|
+
/\/System\.Text\.Json\.Nodes\.JsonNode$/.test(v.$ref);
|
|
134
|
+
|
|
135
|
+
function isJsonNodeSchema(s: Obj) {
|
|
136
|
+
const p = obj(s.properties);
|
|
137
|
+
return (
|
|
138
|
+
schemaType(s) === "object" &&
|
|
139
|
+
"options" in p &&
|
|
140
|
+
"parent" in p &&
|
|
141
|
+
"root" in p &&
|
|
142
|
+
isJsonNodeRef(p.parent) &&
|
|
143
|
+
isJsonNodeRef(p.root)
|
|
144
|
+
);
|
|
145
|
+
}
|
|
146
|
+
|
|
147
|
+
function scalarType(s: Obj): string {
|
|
148
|
+
if (Array.isArray(s.enum) && s.enum.every((v) => typeof v === "string")) {
|
|
149
|
+
return s.enum.map((v) => `'${String(v).replace(/'/g, "\\'")}'`).join(" | ");
|
|
150
|
+
}
|
|
151
|
+
const t = schemaType(s);
|
|
152
|
+
const f = typeof s.format === "string" ? s.format : "";
|
|
153
|
+
if (f === "uuid") return "uuid";
|
|
154
|
+
if (f === "date") return "date";
|
|
155
|
+
if (f === "date-time") return "datetime";
|
|
156
|
+
if (t === "integer" || t === "number") return "number";
|
|
157
|
+
if (t === "boolean") return "boolean";
|
|
158
|
+
if (t === "string") return "string";
|
|
159
|
+
if (t === "object") return "object";
|
|
160
|
+
return "unknown";
|
|
161
|
+
}
|
|
162
|
+
|
|
163
|
+
function toProps(doc: unknown, schemaIn: unknown, depth = 0): Prop[] {
|
|
164
|
+
if (depth > MAX_DEPTH) return [];
|
|
165
|
+
const s = deref(doc, schemaIn);
|
|
166
|
+
if (isJsonNodeRef(schemaIn) || isJsonNodeSchema(s)) return [];
|
|
167
|
+
const req = new Set(strings(s.required));
|
|
168
|
+
return dedupe(
|
|
169
|
+
Object.entries(obj(s.properties)).map(([name, child]) =>
|
|
170
|
+
toProp(doc, name, child, req.has(name), depth + 1),
|
|
171
|
+
),
|
|
172
|
+
);
|
|
173
|
+
}
|
|
174
|
+
|
|
175
|
+
function toProp(
|
|
176
|
+
doc: unknown,
|
|
177
|
+
name: string,
|
|
178
|
+
schemaIn: unknown,
|
|
179
|
+
required: boolean,
|
|
180
|
+
depth = 0,
|
|
181
|
+
): Prop {
|
|
182
|
+
if (isJsonNodeRef(schemaIn)) return { name, required, type: "object" };
|
|
183
|
+
|
|
184
|
+
const s = deref(doc, schemaIn);
|
|
185
|
+
const req = required && s.nullable !== true;
|
|
186
|
+
|
|
187
|
+
if (isJsonNodeSchema(s)) return { name, required: req, type: "object" };
|
|
188
|
+
|
|
189
|
+
const t = schemaType(s);
|
|
190
|
+
if (t === "array") {
|
|
191
|
+
const item = deref(doc, s.items);
|
|
192
|
+
if (schemaType(item) === "object") {
|
|
193
|
+
const properties = toProps(doc, item, depth + 1);
|
|
194
|
+
return {
|
|
195
|
+
name,
|
|
196
|
+
required: req,
|
|
197
|
+
type: "object",
|
|
198
|
+
isArray: true,
|
|
199
|
+
...(properties.length ? { properties } : {}),
|
|
200
|
+
};
|
|
201
|
+
}
|
|
202
|
+
return { name, required: req, type: scalarType(item), isArray: true };
|
|
203
|
+
}
|
|
204
|
+
|
|
205
|
+
if (t === "object") {
|
|
206
|
+
const properties = toProps(doc, s, depth + 1);
|
|
207
|
+
return {
|
|
208
|
+
name,
|
|
209
|
+
required: req,
|
|
210
|
+
type: "object",
|
|
211
|
+
...(properties.length ? { properties } : {}),
|
|
212
|
+
};
|
|
213
|
+
}
|
|
214
|
+
|
|
215
|
+
return { name, required: req, type: scalarType(s) };
|
|
216
|
+
}
|
|
217
|
+
|
|
218
|
+
function pickContent(
|
|
219
|
+
contentIn: unknown,
|
|
220
|
+
): { schema: unknown; mimes: string[] } | null {
|
|
221
|
+
const content = obj(contentIn);
|
|
222
|
+
const mimes = Object.keys(content);
|
|
223
|
+
if (!mimes.length) return null;
|
|
224
|
+
const mime =
|
|
225
|
+
mimes.find((x) => x.toLowerCase() === "application/json") ??
|
|
226
|
+
mimes.find((x) => x.toLowerCase().includes("json")) ??
|
|
227
|
+
mimes[0];
|
|
228
|
+
return { schema: obj(content[mime!]).schema, mimes };
|
|
229
|
+
}
|
|
230
|
+
|
|
231
|
+
function parseResponse(
|
|
232
|
+
doc: unknown,
|
|
233
|
+
responsesIn: unknown,
|
|
234
|
+
): {
|
|
235
|
+
successStatus: number;
|
|
236
|
+
errorStatuses: number[];
|
|
237
|
+
responseKind: ResponseKind;
|
|
238
|
+
response: Prop[];
|
|
239
|
+
} {
|
|
240
|
+
const responses = obj(responsesIn);
|
|
241
|
+
const statuses = Object.keys(responses)
|
|
242
|
+
.map(Number)
|
|
243
|
+
.filter(Number.isInteger)
|
|
244
|
+
.sort((a, b) => a - b);
|
|
245
|
+
const successStatus = statuses.find((s) => s >= 200 && s < 300) ?? 200;
|
|
246
|
+
const errorStatuses = [...new Set(statuses.filter((s) => s >= 400))];
|
|
247
|
+
|
|
248
|
+
if (successStatus === 204)
|
|
249
|
+
return {
|
|
250
|
+
successStatus,
|
|
251
|
+
errorStatuses,
|
|
252
|
+
responseKind: "none",
|
|
253
|
+
response: [],
|
|
254
|
+
};
|
|
255
|
+
|
|
256
|
+
const success = deref(doc, responses[String(successStatus)]);
|
|
257
|
+
const content = pickContent(success.content);
|
|
258
|
+
if (!content)
|
|
259
|
+
return {
|
|
260
|
+
successStatus,
|
|
261
|
+
errorStatuses,
|
|
262
|
+
responseKind: "none",
|
|
263
|
+
response: [],
|
|
264
|
+
};
|
|
265
|
+
|
|
266
|
+
const s = deref(doc, content.schema);
|
|
267
|
+
const t = schemaType(s);
|
|
268
|
+
const isFile =
|
|
269
|
+
content.mimes.some((m) =>
|
|
270
|
+
/(application\/octet-stream|application\/pdf|application\/vnd\.|text\/csv)/i.test(
|
|
271
|
+
m,
|
|
272
|
+
),
|
|
273
|
+
) ||
|
|
274
|
+
(t === "string" && ["binary", "base64"].includes(String(s.format ?? "")));
|
|
275
|
+
|
|
276
|
+
if (isFile)
|
|
277
|
+
return {
|
|
278
|
+
successStatus,
|
|
279
|
+
errorStatuses,
|
|
280
|
+
responseKind: "file",
|
|
281
|
+
response: [],
|
|
282
|
+
};
|
|
283
|
+
|
|
284
|
+
if (t === "array") {
|
|
285
|
+
const item = deref(doc, s.items);
|
|
286
|
+
return {
|
|
287
|
+
successStatus,
|
|
288
|
+
errorStatuses,
|
|
289
|
+
responseKind: "array",
|
|
290
|
+
response:
|
|
291
|
+
schemaType(item) === "object"
|
|
292
|
+
? toProps(doc, item)
|
|
293
|
+
: [toProp(doc, "item", item, true)],
|
|
294
|
+
};
|
|
295
|
+
}
|
|
296
|
+
|
|
297
|
+
if (t === "object") {
|
|
298
|
+
const props = obj(s.properties);
|
|
299
|
+
const hasPaging = [
|
|
300
|
+
"totalCount",
|
|
301
|
+
"pageNumber",
|
|
302
|
+
"pageSize",
|
|
303
|
+
"totalPages",
|
|
304
|
+
].some((k) => k in props);
|
|
305
|
+
const list = deref(doc, props.items ?? props.data ?? props.results);
|
|
306
|
+
|
|
307
|
+
if (hasPaging && schemaType(list) === "array") {
|
|
308
|
+
const item = deref(doc, list.items);
|
|
309
|
+
return {
|
|
310
|
+
successStatus,
|
|
311
|
+
errorStatuses,
|
|
312
|
+
responseKind: "paginated",
|
|
313
|
+
response:
|
|
314
|
+
schemaType(item) === "object"
|
|
315
|
+
? toProps(doc, item)
|
|
316
|
+
: [toProp(doc, "item", item, true)],
|
|
317
|
+
};
|
|
318
|
+
}
|
|
319
|
+
|
|
320
|
+
return {
|
|
321
|
+
successStatus,
|
|
322
|
+
errorStatuses,
|
|
323
|
+
responseKind: "object",
|
|
324
|
+
response: toProps(doc, s),
|
|
325
|
+
};
|
|
326
|
+
}
|
|
327
|
+
|
|
328
|
+
if (Object.keys(s).length) {
|
|
329
|
+
return {
|
|
330
|
+
successStatus,
|
|
331
|
+
errorStatuses,
|
|
332
|
+
responseKind: "object",
|
|
333
|
+
response: [toProp(doc, "value", s, true)],
|
|
334
|
+
};
|
|
335
|
+
}
|
|
336
|
+
|
|
337
|
+
return { successStatus, errorStatuses, responseKind: "none", response: [] };
|
|
338
|
+
}
|
|
339
|
+
|
|
340
|
+
function extractEndpoint(
|
|
341
|
+
doc: unknown,
|
|
342
|
+
route: string,
|
|
343
|
+
method: HttpMethod,
|
|
344
|
+
pathItemIn: unknown,
|
|
345
|
+
operationIn: unknown,
|
|
346
|
+
): Endpoint {
|
|
347
|
+
const pathItem = obj(pathItemIn);
|
|
348
|
+
const operation = obj(operationIn);
|
|
349
|
+
|
|
350
|
+
const rawParams = [
|
|
351
|
+
...(Array.isArray(pathItem.parameters) ? pathItem.parameters : []),
|
|
352
|
+
...(Array.isArray(operation.parameters) ? operation.parameters : []),
|
|
353
|
+
]
|
|
354
|
+
.map((x) => deref(doc, x))
|
|
355
|
+
.filter(
|
|
356
|
+
(x) =>
|
|
357
|
+
["path", "query"].includes(String(x.in ?? "")) &&
|
|
358
|
+
typeof x.name === "string",
|
|
359
|
+
);
|
|
360
|
+
|
|
361
|
+
// Collapse JsonNode-expanded dot-notation query params into a single
|
|
362
|
+
// object param. Swagger expands JsonNode properties (Options, Parent,
|
|
363
|
+
// Root, …) into many individual params like "CustomFilter.Parent.Root".
|
|
364
|
+
const jsonNodeRoots = new Set<string>();
|
|
365
|
+
for (const x of rawParams) {
|
|
366
|
+
const name = String(x.name);
|
|
367
|
+
if (!name.includes(".")) continue;
|
|
368
|
+
const root = name.split(".")[0]!;
|
|
369
|
+
const rest = name.slice(root.length + 1);
|
|
370
|
+
if (/^(Options|Parent|Root)\b/.test(rest)) jsonNodeRoots.add(root);
|
|
371
|
+
}
|
|
372
|
+
|
|
373
|
+
const params = dedupe(
|
|
374
|
+
rawParams
|
|
375
|
+
.filter((x) => {
|
|
376
|
+
const name = String(x.name);
|
|
377
|
+
if (!name.includes(".")) return true;
|
|
378
|
+
const root = name.split(".")[0]!;
|
|
379
|
+
return !jsonNodeRoots.has(root);
|
|
380
|
+
})
|
|
381
|
+
.map((x) =>
|
|
382
|
+
toProp(
|
|
383
|
+
doc,
|
|
384
|
+
toCamelCase(String(x.name)),
|
|
385
|
+
x.schema,
|
|
386
|
+
String(x.in) === "path" || x.required === true,
|
|
387
|
+
),
|
|
388
|
+
)
|
|
389
|
+
.concat(
|
|
390
|
+
[...jsonNodeRoots].map((root) => ({
|
|
391
|
+
name: toCamelCase(root),
|
|
392
|
+
required: false,
|
|
393
|
+
type: "object",
|
|
394
|
+
})),
|
|
395
|
+
),
|
|
396
|
+
);
|
|
397
|
+
|
|
398
|
+
const body: Prop[] = [];
|
|
399
|
+
const requestBody = deref(doc, operation.requestBody);
|
|
400
|
+
const requestContent = pickContent(requestBody.content);
|
|
401
|
+
if (requestContent) {
|
|
402
|
+
const s = deref(doc, requestContent.schema);
|
|
403
|
+
const t = schemaType(s);
|
|
404
|
+
if (t === "object") body.push(...toProps(doc, s));
|
|
405
|
+
else if (t === "array")
|
|
406
|
+
body.push(toProp(doc, "items", s, requestBody.required === true));
|
|
407
|
+
else body.push(toProp(doc, "body", s, requestBody.required === true));
|
|
408
|
+
}
|
|
409
|
+
|
|
410
|
+
const parsed = parseResponse(doc, operation.responses);
|
|
411
|
+
const normalizedPath = normalizePath(route);
|
|
412
|
+
|
|
413
|
+
return {
|
|
414
|
+
key: `${method}:${normalizedPath}`,
|
|
415
|
+
method,
|
|
416
|
+
path: normalizedPath,
|
|
417
|
+
params,
|
|
418
|
+
body: dedupe(body),
|
|
419
|
+
response: dedupe(parsed.response),
|
|
420
|
+
responseKind: parsed.responseKind,
|
|
421
|
+
successStatus: parsed.successStatus,
|
|
422
|
+
errorStatuses: parsed.errorStatuses,
|
|
423
|
+
};
|
|
424
|
+
}
|
|
425
|
+
|
|
426
|
+
function parseSwaggerEndpoints(swagger: unknown): Map<string, Endpoint> {
|
|
427
|
+
const out = new Map<string, Endpoint>();
|
|
428
|
+
const doc = obj(swagger);
|
|
429
|
+
for (const [route, pathItem] of Object.entries(obj(doc.paths))) {
|
|
430
|
+
for (const [openApiMethod, method] of METHODS) {
|
|
431
|
+
const operation = obj(pathItem)[openApiMethod];
|
|
432
|
+
if (!isObj(operation)) continue;
|
|
433
|
+
const endpoint = extractEndpoint(doc, route, method, pathItem, operation);
|
|
434
|
+
out.set(endpoint.key, endpoint);
|
|
435
|
+
}
|
|
436
|
+
}
|
|
437
|
+
return out;
|
|
438
|
+
}
|
|
439
|
+
|
|
440
|
+
function serializeProps(props: Prop[], depth: number): string {
|
|
441
|
+
return props
|
|
442
|
+
.map((p) => {
|
|
443
|
+
const indent = " ".repeat(depth);
|
|
444
|
+
const childIndent = " ".repeat(depth + 1);
|
|
445
|
+
const typeLiteral = p.type.includes("'")
|
|
446
|
+
? `"${p.type.replace(/"/g, '\\"')}"`
|
|
447
|
+
: `'${p.type}'`;
|
|
448
|
+
|
|
449
|
+
const parts = [
|
|
450
|
+
`name: '${p.name}'`,
|
|
451
|
+
`required: ${p.required}`,
|
|
452
|
+
`type: ${typeLiteral}`,
|
|
453
|
+
];
|
|
454
|
+
if (p.isArray) parts.push("isArray: true");
|
|
455
|
+
if (p.properties?.length)
|
|
456
|
+
parts.push(
|
|
457
|
+
`properties: [\n${serializeProps(p.properties, depth + 1)}\n${childIndent}]`,
|
|
458
|
+
);
|
|
459
|
+
|
|
460
|
+
return `${indent} { ${parts.join(", ")} },`;
|
|
461
|
+
})
|
|
462
|
+
.join("\n");
|
|
463
|
+
}
|
|
464
|
+
|
|
465
|
+
function blockFor(endpoint: Endpoint): string {
|
|
466
|
+
return ` autoGenerated: {
|
|
467
|
+
params: [
|
|
468
|
+
${serializeProps(endpoint.params, 2)}
|
|
469
|
+
] as const,
|
|
470
|
+
body: [
|
|
471
|
+
${serializeProps(endpoint.body, 2)}
|
|
472
|
+
] as const,
|
|
473
|
+
response: [
|
|
474
|
+
${serializeProps(endpoint.response, 2)}
|
|
475
|
+
] as const,
|
|
476
|
+
successStatus: ${endpoint.successStatus},
|
|
477
|
+
errorStatuses: [${endpoint.errorStatuses.join(", ")}],
|
|
478
|
+
responseKind: '${endpoint.responseKind}',
|
|
479
|
+
} as const,`;
|
|
480
|
+
}
|
|
481
|
+
|
|
482
|
+
function formatGeneratedFiles(cwd: string, filePaths: string[]) {
|
|
483
|
+
if (!filePaths.length) return;
|
|
484
|
+
try {
|
|
485
|
+
Bun.spawnSync(["bunx", "prettier", "--write", ...filePaths], {
|
|
486
|
+
cwd,
|
|
487
|
+
stdio: ["ignore", "ignore", "ignore"],
|
|
488
|
+
});
|
|
489
|
+
} catch (error) {
|
|
490
|
+
const message = error instanceof Error ? error.message : String(error);
|
|
491
|
+
console.warn(`WARN failed to format generated files: ${message}`);
|
|
492
|
+
}
|
|
493
|
+
}
|
|
494
|
+
|
|
495
|
+
async function fetchJson(url: string): Promise<unknown> {
|
|
496
|
+
const response = await fetch(url, {
|
|
497
|
+
headers: { Accept: "application/json" },
|
|
498
|
+
});
|
|
499
|
+
if (!response.ok) {
|
|
500
|
+
throw new Error(`Swagger request failed (${response.status})`);
|
|
501
|
+
}
|
|
502
|
+
return response.json();
|
|
503
|
+
}
|
|
504
|
+
|
|
505
|
+
function resolveEndpoint(
|
|
506
|
+
key: string,
|
|
507
|
+
extracted: Map<string, Endpoint>,
|
|
508
|
+
): Endpoint | undefined {
|
|
509
|
+
const [method, route] = key.split(":");
|
|
510
|
+
if (!method || !route) return undefined;
|
|
511
|
+
return (
|
|
512
|
+
extracted.get(key) ??
|
|
513
|
+
extracted.get(`${method}:${normalizePath(`/api${route}`)}`) ??
|
|
514
|
+
(route.startsWith("/api/")
|
|
515
|
+
? extracted.get(`${method}:${normalizePath(route.slice(4))}`)
|
|
516
|
+
: undefined)
|
|
517
|
+
);
|
|
518
|
+
}
|
|
519
|
+
|
|
520
|
+
export async function extractEndpoints(
|
|
521
|
+
options: ExtractEndpointsOptions,
|
|
522
|
+
): Promise<void> {
|
|
523
|
+
const { swaggerUrl, domainsDir, write = false } = options;
|
|
524
|
+
|
|
525
|
+
console.log(`Fetching Swagger from ${swaggerUrl}`);
|
|
526
|
+
|
|
527
|
+
const extracted = parseSwaggerEndpoints(await fetchJson(swaggerUrl));
|
|
528
|
+
const files = endpointFiles(domainsDir);
|
|
529
|
+
|
|
530
|
+
console.log(`Parsed ${extracted.size} endpoints from Swagger`);
|
|
531
|
+
console.log(`Found ${files.size} graph endpoint files`);
|
|
532
|
+
|
|
533
|
+
let matched = 0;
|
|
534
|
+
let missing = 0;
|
|
535
|
+
let updated = 0;
|
|
536
|
+
const updatedFiles: string[] = [];
|
|
537
|
+
|
|
538
|
+
for (const [key, filePath] of files.entries()) {
|
|
539
|
+
const endpoint = resolveEndpoint(key, extracted);
|
|
540
|
+
if (!endpoint) {
|
|
541
|
+
missing++;
|
|
542
|
+
console.log(
|
|
543
|
+
`MISSING backend endpoint for ${path.relative(domainsDir, filePath)} (${key})`,
|
|
544
|
+
);
|
|
545
|
+
continue;
|
|
546
|
+
}
|
|
547
|
+
|
|
548
|
+
matched++;
|
|
549
|
+
if (!write) continue;
|
|
550
|
+
|
|
551
|
+
const current = fs.readFileSync(filePath, "utf8");
|
|
552
|
+
const start = current.indexOf(AUTO_START);
|
|
553
|
+
const end = current.indexOf(AUTO_END);
|
|
554
|
+
|
|
555
|
+
let next: string;
|
|
556
|
+
if (start >= 0 && end > start) {
|
|
557
|
+
// Markers exist — replace content between them
|
|
558
|
+
next =
|
|
559
|
+
current.slice(0, start + AUTO_START.length) +
|
|
560
|
+
`\n${blockFor(endpoint)}\n ` +
|
|
561
|
+
current.slice(end);
|
|
562
|
+
} else {
|
|
563
|
+
// No markers — insert before the closing `});`
|
|
564
|
+
const closingIndex = current.lastIndexOf("});");
|
|
565
|
+
if (closingIndex < 0) continue;
|
|
566
|
+
next =
|
|
567
|
+
current.slice(0, closingIndex) +
|
|
568
|
+
`\n ${AUTO_START}\n${blockFor(endpoint)}\n ${AUTO_END}\n});` +
|
|
569
|
+
current.slice(closingIndex + 3);
|
|
570
|
+
}
|
|
571
|
+
|
|
572
|
+
if (next === current) continue;
|
|
573
|
+
|
|
574
|
+
fs.writeFileSync(filePath, next);
|
|
575
|
+
updatedFiles.push(filePath);
|
|
576
|
+
updated++;
|
|
577
|
+
console.log(`UPDATED ${path.relative(domainsDir, filePath)}`);
|
|
578
|
+
}
|
|
579
|
+
|
|
580
|
+
if (write) formatGeneratedFiles(domainsDir, updatedFiles);
|
|
581
|
+
|
|
582
|
+
console.log("\n--- Summary ---");
|
|
583
|
+
console.log(`Matched: ${matched}/${files.size}`);
|
|
584
|
+
console.log(`Missing: ${missing}`);
|
|
585
|
+
console.log(
|
|
586
|
+
write ? `Updated: ${updated}` : "Run with --write to update files",
|
|
587
|
+
);
|
|
588
|
+
}
|