@struktur/sdk 2.1.1 → 2.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (105) hide show
  1. package/dist/index.js +4111 -0
  2. package/dist/index.js.map +1 -0
  3. package/dist/parsers.js +492 -0
  4. package/dist/parsers.js.map +1 -0
  5. package/dist/strategies.js +2435 -0
  6. package/dist/strategies.js.map +1 -0
  7. package/package.json +25 -13
  8. package/src/agent-cli-integration.test.ts +0 -47
  9. package/src/agent-export.test.ts +0 -17
  10. package/src/agent-tool-labels.test.ts +0 -50
  11. package/src/artifacts/AGENTS.md +0 -16
  12. package/src/artifacts/fileToArtifact.test.ts +0 -37
  13. package/src/artifacts/fileToArtifact.ts +0 -44
  14. package/src/artifacts/input.test.ts +0 -243
  15. package/src/artifacts/input.ts +0 -360
  16. package/src/artifacts/providers.test.ts +0 -19
  17. package/src/artifacts/providers.ts +0 -7
  18. package/src/artifacts/urlToArtifact.test.ts +0 -23
  19. package/src/artifacts/urlToArtifact.ts +0 -19
  20. package/src/auth/AGENTS.md +0 -11
  21. package/src/auth/config.test.ts +0 -132
  22. package/src/auth/config.ts +0 -186
  23. package/src/auth/tokens.test.ts +0 -58
  24. package/src/auth/tokens.ts +0 -229
  25. package/src/chunking/AGENTS.md +0 -11
  26. package/src/chunking/ArtifactBatcher.test.ts +0 -22
  27. package/src/chunking/ArtifactBatcher.ts +0 -110
  28. package/src/chunking/ArtifactSplitter.test.ts +0 -38
  29. package/src/chunking/ArtifactSplitter.ts +0 -151
  30. package/src/debug/AGENTS.md +0 -79
  31. package/src/debug/logger.test.ts +0 -244
  32. package/src/debug/logger.ts +0 -211
  33. package/src/extract.test.ts +0 -22
  34. package/src/extract.ts +0 -150
  35. package/src/fields.test.ts +0 -681
  36. package/src/fields.ts +0 -246
  37. package/src/index.test.ts +0 -20
  38. package/src/index.ts +0 -110
  39. package/src/llm/AGENTS.md +0 -9
  40. package/src/llm/LLMClient.test.ts +0 -394
  41. package/src/llm/LLMClient.ts +0 -264
  42. package/src/llm/RetryingRunner.test.ts +0 -174
  43. package/src/llm/RetryingRunner.ts +0 -270
  44. package/src/llm/message.test.ts +0 -42
  45. package/src/llm/message.ts +0 -47
  46. package/src/llm/models.test.ts +0 -82
  47. package/src/llm/models.ts +0 -190
  48. package/src/llm/resolveModel.ts +0 -86
  49. package/src/merge/AGENTS.md +0 -6
  50. package/src/merge/Deduplicator.test.ts +0 -108
  51. package/src/merge/Deduplicator.ts +0 -45
  52. package/src/merge/SmartDataMerger.test.ts +0 -177
  53. package/src/merge/SmartDataMerger.ts +0 -56
  54. package/src/parsers/AGENTS.md +0 -58
  55. package/src/parsers/collect.test.ts +0 -56
  56. package/src/parsers/collect.ts +0 -31
  57. package/src/parsers/index.ts +0 -6
  58. package/src/parsers/mime.test.ts +0 -91
  59. package/src/parsers/mime.ts +0 -137
  60. package/src/parsers/npm.ts +0 -26
  61. package/src/parsers/pdf.test.ts +0 -394
  62. package/src/parsers/pdf.ts +0 -194
  63. package/src/parsers/runner.test.ts +0 -95
  64. package/src/parsers/runner.ts +0 -177
  65. package/src/parsers/types.ts +0 -29
  66. package/src/prompts/AGENTS.md +0 -8
  67. package/src/prompts/DeduplicationPrompt.test.ts +0 -41
  68. package/src/prompts/DeduplicationPrompt.ts +0 -37
  69. package/src/prompts/ExtractorPrompt.test.ts +0 -21
  70. package/src/prompts/ExtractorPrompt.ts +0 -72
  71. package/src/prompts/ParallelMergerPrompt.test.ts +0 -8
  72. package/src/prompts/ParallelMergerPrompt.ts +0 -37
  73. package/src/prompts/SequentialExtractorPrompt.test.ts +0 -24
  74. package/src/prompts/SequentialExtractorPrompt.ts +0 -82
  75. package/src/prompts/formatArtifacts.test.ts +0 -39
  76. package/src/prompts/formatArtifacts.ts +0 -46
  77. package/src/strategies/AGENTS.md +0 -6
  78. package/src/strategies/DoublePassAutoMergeStrategy.test.ts +0 -53
  79. package/src/strategies/DoublePassAutoMergeStrategy.ts +0 -410
  80. package/src/strategies/DoublePassStrategy.test.ts +0 -48
  81. package/src/strategies/DoublePassStrategy.ts +0 -266
  82. package/src/strategies/ParallelAutoMergeStrategy.test.ts +0 -152
  83. package/src/strategies/ParallelAutoMergeStrategy.ts +0 -345
  84. package/src/strategies/ParallelStrategy.test.ts +0 -61
  85. package/src/strategies/ParallelStrategy.ts +0 -208
  86. package/src/strategies/SequentialAutoMergeStrategy.test.ts +0 -66
  87. package/src/strategies/SequentialAutoMergeStrategy.ts +0 -325
  88. package/src/strategies/SequentialStrategy.test.ts +0 -53
  89. package/src/strategies/SequentialStrategy.ts +0 -142
  90. package/src/strategies/SimpleStrategy.test.ts +0 -46
  91. package/src/strategies/SimpleStrategy.ts +0 -94
  92. package/src/strategies/concurrency.test.ts +0 -16
  93. package/src/strategies/concurrency.ts +0 -14
  94. package/src/strategies/index.test.ts +0 -20
  95. package/src/strategies/index.ts +0 -7
  96. package/src/strategies/utils.test.ts +0 -76
  97. package/src/strategies/utils.ts +0 -95
  98. package/src/tokenization.test.ts +0 -119
  99. package/src/tokenization.ts +0 -71
  100. package/src/types.test.ts +0 -25
  101. package/src/types.ts +0 -174
  102. package/src/validation/AGENTS.md +0 -7
  103. package/src/validation/validator.test.ts +0 -204
  104. package/src/validation/validator.ts +0 -90
  105. package/tsconfig.json +0 -22
@@ -0,0 +1,492 @@
1
+ var __getOwnPropNames = Object.getOwnPropertyNames;
2
+ var __esm = (fn, res) => function __init() {
3
+ return fn && (res = (0, fn[__getOwnPropNames(fn)[0]])(fn = 0)), res;
4
+ };
5
+
6
+ // src/parsers/collect.ts
7
+ async function collectStream(stream) {
8
+ const reader = stream.getReader();
9
+ const chunks = [];
10
+ try {
11
+ while (true) {
12
+ const { done, value } = await reader.read();
13
+ if (done) {
14
+ break;
15
+ }
16
+ chunks.push(value);
17
+ }
18
+ } finally {
19
+ reader.releaseLock();
20
+ }
21
+ const totalLength = chunks.reduce((sum, chunk) => sum + chunk.length, 0);
22
+ const result = new Uint8Array(totalLength);
23
+ let offset = 0;
24
+ for (const chunk of chunks) {
25
+ result.set(chunk, offset);
26
+ offset += chunk.length;
27
+ }
28
+ return Buffer.from(result);
29
+ }
30
+ var init_collect = __esm({
31
+ "src/parsers/collect.ts"() {
32
+ "use strict";
33
+ }
34
+ });
35
+
36
+ // src/parsers/pdf.ts
37
+ async function parsePdf(input, options) {
38
+ const buffer = Buffer.isBuffer(input) ? input : await collectStream(input);
39
+ const { PDFParse } = await import("pdf-parse");
40
+ const parser = new PDFParse({ data: buffer });
41
+ const textResult = await parser.getText();
42
+ const pageTextMap = /* @__PURE__ */ new Map();
43
+ if (textResult.pages.length > 0) {
44
+ for (const page of textResult.pages) {
45
+ if (page.text && page.text.trim().length > 0) {
46
+ pageTextMap.set(page.num, page.text);
47
+ }
48
+ }
49
+ }
50
+ let imageResult;
51
+ if (options?.includeImages !== false) {
52
+ try {
53
+ imageResult = await parser.getImage({ imageBuffer: false, imageDataUrl: true });
54
+ } catch {
55
+ }
56
+ }
57
+ let screenshotResult;
58
+ if (options?.screenshots === true) {
59
+ try {
60
+ const screenshotParams = { imageBuffer: false, imageDataUrl: true };
61
+ if (options.screenshotWidth !== void 0) {
62
+ screenshotParams.desiredWidth = options.screenshotWidth;
63
+ } else {
64
+ screenshotParams.scale = options.screenshotScale ?? 1.5;
65
+ }
66
+ screenshotResult = await parser.getScreenshot(screenshotParams);
67
+ } catch {
68
+ }
69
+ }
70
+ const pageImageMap = /* @__PURE__ */ new Map();
71
+ if (imageResult) {
72
+ for (const pageImages of imageResult.pages) {
73
+ const artifactImages = pageImages.images.filter((img) => img.dataUrl).map((img) => {
74
+ const base64 = img.dataUrl.replace(/^data:[^;]+;base64,/, "");
75
+ const artifactImage = {
76
+ type: "image",
77
+ base64,
78
+ width: img.width,
79
+ height: img.height,
80
+ imageType: "embedded"
81
+ };
82
+ return artifactImage;
83
+ });
84
+ if (artifactImages.length > 0) {
85
+ pageImageMap.set(pageImages.pageNumber, artifactImages);
86
+ }
87
+ }
88
+ }
89
+ if (screenshotResult) {
90
+ for (const screenshot of screenshotResult.pages) {
91
+ if (screenshot.dataUrl) {
92
+ const base64 = screenshot.dataUrl.replace(/^data:[^;]+;base64,/, "");
93
+ const artifactImage = {
94
+ type: "image",
95
+ base64,
96
+ width: screenshot.width,
97
+ height: screenshot.height,
98
+ imageType: "screenshot"
99
+ };
100
+ const existing = pageImageMap.get(screenshot.pageNumber) ?? [];
101
+ pageImageMap.set(screenshot.pageNumber, [...existing, artifactImage]);
102
+ }
103
+ }
104
+ }
105
+ let contents;
106
+ if (textResult.pages.length > 0) {
107
+ const allPageNums = /* @__PURE__ */ new Set([
108
+ ...pageTextMap.keys(),
109
+ ...pageImageMap.keys()
110
+ ]);
111
+ contents = Array.from(allPageNums).sort((a, b) => a - b).map((pageNum) => {
112
+ const entry = { page: pageNum };
113
+ const text = pageTextMap.get(pageNum);
114
+ if (text) entry.text = text;
115
+ const media = pageImageMap.get(pageNum);
116
+ if (media) entry.media = media;
117
+ return entry;
118
+ });
119
+ } else {
120
+ const entry = { text: textResult.text };
121
+ const firstPageImages = pageImageMap.size > 0 ? pageImageMap.values().next().value : void 0;
122
+ if (firstPageImages) entry.media = firstPageImages;
123
+ contents = [entry];
124
+ }
125
+ if (contents.length === 0) {
126
+ contents = [{ text: "" }];
127
+ }
128
+ let infoResult;
129
+ try {
130
+ infoResult = await parser.getInfo();
131
+ } catch {
132
+ }
133
+ await parser.destroy();
134
+ return {
135
+ id: `artifact-${crypto.randomUUID()}`,
136
+ type: "pdf",
137
+ raw: async () => buffer,
138
+ contents,
139
+ metadata: infoResult ? {
140
+ numpages: textResult.total,
141
+ info: infoResult
142
+ } : { numpages: textResult.total }
143
+ };
144
+ }
145
+ var init_pdf = __esm({
146
+ "src/parsers/pdf.ts"() {
147
+ "use strict";
148
+ init_collect();
149
+ }
150
+ });
151
+
152
+ // src/parsers/runner.ts
153
+ import os from "os";
154
+ import path2 from "path";
155
+ import { rm, writeFile, readFile as readFile2 } from "fs/promises";
156
+ import { exec } from "child_process";
157
+ import { promisify } from "util";
158
+
159
+ // src/validation/validator.ts
160
+ import Ajv from "ajv";
161
+ import addFormats from "ajv-formats";
162
+ var SchemaValidationError = class extends Error {
163
+ errors;
164
+ constructor(message, errors) {
165
+ super(message);
166
+ this.name = "SchemaValidationError";
167
+ this.errors = errors;
168
+ }
169
+ };
170
+ var ARTIFACT_ID_PATTERN = /^artifact:[^/]+\/images\/image\d+\.\w+$/;
171
+ var createAjv = () => {
172
+ const ajv = new Ajv({
173
+ allErrors: true,
174
+ strict: false,
175
+ allowUnionTypes: true
176
+ });
177
+ addFormats(ajv);
178
+ ajv.addFormat("artifact-id", {
179
+ type: "string",
180
+ validate: (data) => ARTIFACT_ID_PATTERN.test(data)
181
+ });
182
+ return ajv;
183
+ };
184
+ var validateOrThrow = (ajv, schema, data) => {
185
+ const validate = ajv.compile(schema);
186
+ const valid = validate(data);
187
+ if (!valid) {
188
+ const errors = validate.errors ?? [];
189
+ const message = "Schema validation failed";
190
+ throw new SchemaValidationError(message, errors);
191
+ }
192
+ return data;
193
+ };
194
+
195
+ // src/artifacts/input.ts
196
+ import { readFile } from "fs/promises";
197
+
198
+ // src/parsers/mime.ts
199
+ import path from "path";
200
+ var MAGIC_BYTES = [
201
+ // PDF: %PDF
202
+ { mimeType: "application/pdf", bytes: [37, 80, 68, 70] },
203
+ // PNG: 89 50 4E 47
204
+ { mimeType: "image/png", bytes: [137, 80, 78, 71] },
205
+ // JPEG: FF D8 FF
206
+ { mimeType: "image/jpeg", bytes: [255, 216, 255] },
207
+ // GIF: GIF8
208
+ { mimeType: "image/gif", bytes: [71, 73, 70, 56] },
209
+ // ZIP / Office Open XML (DOCX/XLSX/PPTX all start with PK\x03\x04)
210
+ {
211
+ mimeType: "application/zip",
212
+ bytes: [80, 75, 3, 4]
213
+ }
214
+ ];
215
+ var isWebP = (header) => {
216
+ if (header.length < 12) return false;
217
+ const riff = header[0] === 82 && header[1] === 73 && header[2] === 70 && header[3] === 70;
218
+ const webp = header[8] === 87 && header[9] === 69 && header[10] === 66 && header[11] === 80;
219
+ return riff && webp;
220
+ };
221
+ var matchesMagicBytes = (header, bytes, offset = 0) => {
222
+ if (header.length < offset + bytes.length) return false;
223
+ return bytes.every((b, i) => header[offset + i] === b);
224
+ };
225
+ var detectFromMagicBytes = (header) => {
226
+ if (isWebP(header)) return "image/webp";
227
+ for (const { mimeType, bytes, offset } of MAGIC_BYTES) {
228
+ if (matchesMagicBytes(header, bytes, offset ?? 0)) {
229
+ return mimeType;
230
+ }
231
+ }
232
+ return null;
233
+ };
234
+ var EXTENSION_MIME_MAP = {
235
+ ".txt": "text/plain",
236
+ ".md": "text/markdown",
237
+ ".markdown": "text/markdown",
238
+ ".html": "text/html",
239
+ ".htm": "text/html",
240
+ ".json": "application/json",
241
+ ".pdf": "application/pdf",
242
+ ".png": "image/png",
243
+ ".jpg": "image/jpeg",
244
+ ".jpeg": "image/jpeg",
245
+ ".gif": "image/gif",
246
+ ".webp": "image/webp",
247
+ ".csv": "text/csv",
248
+ ".xml": "application/xml",
249
+ ".yaml": "application/yaml",
250
+ ".yml": "application/yaml",
251
+ ".docx": "application/vnd.openxmlformats-officedocument.wordprocessingml.document",
252
+ ".xlsx": "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet",
253
+ ".pptx": "application/vnd.openxmlformats-officedocument.presentationml.presentation",
254
+ ".mp4": "video/mp4",
255
+ ".mp3": "audio/mpeg",
256
+ ".wav": "audio/wav",
257
+ ".ogg": "audio/ogg",
258
+ ".svg": "image/svg+xml",
259
+ ".ts": "text/plain",
260
+ ".tsx": "text/plain",
261
+ ".js": "text/javascript",
262
+ ".jsx": "text/javascript",
263
+ ".css": "text/css",
264
+ ".toml": "application/toml"
265
+ };
266
+ async function detectMimeType(options) {
267
+ const { buffer, filePath, mimeOverride, npmParsers } = options;
268
+ if (mimeOverride) {
269
+ return mimeOverride;
270
+ }
271
+ if (buffer && buffer.length > 0) {
272
+ const header = buffer.subarray(0, 512);
273
+ const magicMime = detectFromMagicBytes(header);
274
+ if (magicMime) {
275
+ return magicMime;
276
+ }
277
+ if (npmParsers && npmParsers.length > 0) {
278
+ for (const entry of npmParsers) {
279
+ try {
280
+ const mod = await import(entry.def.package);
281
+ if (typeof mod.detectFileType === "function" && mod.detectFileType(header)) {
282
+ return entry.mimeType;
283
+ }
284
+ } catch {
285
+ }
286
+ }
287
+ }
288
+ }
289
+ if (filePath) {
290
+ const ext = path.extname(filePath).toLowerCase();
291
+ if (ext && ext in EXTENSION_MIME_MAP) {
292
+ return EXTENSION_MIME_MAP[ext] ?? null;
293
+ }
294
+ }
295
+ return null;
296
+ }
297
+
298
+ // src/artifacts/input.ts
299
+ var serializedArtifactImageSchema = {
300
+ type: "object",
301
+ required: ["type"],
302
+ properties: {
303
+ type: { const: "image" },
304
+ url: { type: "string", minLength: 1 },
305
+ base64: { type: "string", minLength: 1 },
306
+ text: { type: "string" },
307
+ x: { type: "number" },
308
+ y: { type: "number" },
309
+ width: { type: "number" },
310
+ height: { type: "number" },
311
+ imageType: { enum: ["embedded", "screenshot"] }
312
+ },
313
+ additionalProperties: false,
314
+ anyOf: [{ required: ["url"] }, { required: ["base64"] }]
315
+ };
316
+ var serializedArtifactContentSchema = {
317
+ type: "object",
318
+ properties: {
319
+ page: { type: "number" },
320
+ text: { type: "string" },
321
+ media: { type: "array", items: serializedArtifactImageSchema }
322
+ },
323
+ additionalProperties: false,
324
+ anyOf: [{ required: ["text"] }, { required: ["media"] }]
325
+ };
326
+ var serializedArtifactSchema = {
327
+ type: "object",
328
+ required: ["id", "type", "contents"],
329
+ properties: {
330
+ id: { type: "string", minLength: 1 },
331
+ type: { enum: ["text", "image", "pdf", "file"] },
332
+ contents: { type: "array", items: serializedArtifactContentSchema },
333
+ metadata: { type: "object", additionalProperties: true },
334
+ tokens: { type: "number" }
335
+ },
336
+ additionalProperties: false
337
+ };
338
+ var serializedArtifactsSchema = {
339
+ anyOf: [
340
+ serializedArtifactSchema,
341
+ { type: "array", items: serializedArtifactSchema }
342
+ ]
343
+ };
344
+ var validateSerializedArtifacts = (data) => {
345
+ const ajv = createAjv();
346
+ const parsed = validateOrThrow(
347
+ ajv,
348
+ serializedArtifactsSchema,
349
+ data
350
+ );
351
+ return Array.isArray(parsed) ? parsed : [parsed];
352
+ };
353
+ var hydrateSerializedArtifacts = (items) => {
354
+ return items.map((item) => ({
355
+ ...item,
356
+ raw: async () => Buffer.from(JSON.stringify(item.contents ?? []))
357
+ }));
358
+ };
359
+
360
+ // src/parsers/runner.ts
361
+ var execAsync = promisify(exec);
362
+ var parseCommandOutput = (stdout) => {
363
+ let parsed;
364
+ try {
365
+ parsed = JSON.parse(stdout);
366
+ } catch (error) {
367
+ const message = error instanceof Error ? error.message : String(error);
368
+ throw new Error(`Parser command produced invalid JSON: ${message}
369
+ Output: ${stdout.slice(0, 200)}`);
370
+ }
371
+ const serialized = validateSerializedArtifacts(parsed);
372
+ return hydrateSerializedArtifacts(serialized);
373
+ };
374
+ var spawnAndCapture = async (command, stdinBuffer) => {
375
+ if (!command.trim()) {
376
+ throw new Error(`Empty command: ${command}`);
377
+ }
378
+ try {
379
+ const options = stdinBuffer ? { input: stdinBuffer.toString(), maxBuffer: 50 * 1024 * 1024 } : { maxBuffer: 50 * 1024 * 1024 };
380
+ const { stdout } = await execAsync(command, options);
381
+ return stdout;
382
+ } catch (error) {
383
+ if (error instanceof Error && "stderr" in error) {
384
+ const stderr = error.stderr;
385
+ throw new Error(
386
+ `Parser command failed: ${command}
387
+ Stderr: ${stderr?.slice(0, 500) ?? ""}`
388
+ );
389
+ }
390
+ throw error;
391
+ }
392
+ };
393
+ var runNpmParser = async (pkg, input, mimeType) => {
394
+ const mod = await import(pkg);
395
+ const hasParseFile = typeof mod.parseFile === "function";
396
+ const hasParseStream = typeof mod.parseStream === "function";
397
+ if (!hasParseFile && !hasParseStream) {
398
+ throw new Error(
399
+ `npm parser package "${pkg}" exports neither parseFile nor parseStream`
400
+ );
401
+ }
402
+ if (input.kind === "file") {
403
+ if (hasParseFile) {
404
+ return mod.parseFile(input.path, mimeType);
405
+ }
406
+ const { createReadStream } = await import("fs");
407
+ const { Readable } = await import("stream");
408
+ const nodeStream = createReadStream(input.path);
409
+ const stream = Readable.toWeb(nodeStream);
410
+ return mod.parseStream(stream, mimeType);
411
+ }
412
+ if (hasParseStream) {
413
+ const stream = new ReadableStream({
414
+ start(controller) {
415
+ controller.enqueue(input.buffer);
416
+ controller.close();
417
+ }
418
+ });
419
+ return mod.parseStream(stream, mimeType);
420
+ }
421
+ const tmpFile = path2.join(os.tmpdir(), `struktur-parse-${crypto.randomUUID()}`);
422
+ try {
423
+ await writeFile(tmpFile, input.buffer);
424
+ return await mod.parseFile(tmpFile, mimeType);
425
+ } finally {
426
+ await rm(tmpFile, { force: true });
427
+ }
428
+ };
429
+ var runCommandFileParser = async (command, input) => {
430
+ let filePath;
431
+ let tempFile = null;
432
+ if (input.kind === "file") {
433
+ filePath = input.path;
434
+ } else {
435
+ tempFile = path2.join(os.tmpdir(), `struktur-parse-${crypto.randomUUID()}`);
436
+ await writeFile(tempFile, input.buffer);
437
+ filePath = tempFile;
438
+ }
439
+ try {
440
+ const interpolated = command.replace(/FILE_PATH/g, filePath);
441
+ const stdout = await spawnAndCapture(interpolated);
442
+ return parseCommandOutput(stdout);
443
+ } finally {
444
+ if (tempFile) {
445
+ await rm(tempFile, { force: true });
446
+ }
447
+ }
448
+ };
449
+ var runCommandStdinParser = async (command, input) => {
450
+ let buffer;
451
+ if (input.kind === "file") {
452
+ buffer = await readFile2(input.path);
453
+ } else {
454
+ buffer = input.buffer;
455
+ }
456
+ const stdout = await spawnAndCapture(command, buffer);
457
+ return parseCommandOutput(stdout);
458
+ };
459
+ var runParser = async (def, input, mimeType) => {
460
+ switch (def.type) {
461
+ case "npm":
462
+ return runNpmParser(def.package, input, mimeType);
463
+ case "command-file":
464
+ return runCommandFileParser(def.command, input);
465
+ case "command-stdin":
466
+ return runCommandStdinParser(def.command, input);
467
+ case "inline": {
468
+ let buffer;
469
+ if (input.kind === "file") {
470
+ buffer = await readFile2(input.path);
471
+ } else {
472
+ buffer = input.buffer;
473
+ }
474
+ return [await def.handler(buffer)];
475
+ }
476
+ default: {
477
+ const _exhaustive = def;
478
+ throw new Error(`Unknown parser type: ${_exhaustive.type}`);
479
+ }
480
+ }
481
+ };
482
+
483
+ // src/parsers/index.ts
484
+ init_collect();
485
+ init_pdf();
486
+ export {
487
+ collectStream,
488
+ detectMimeType,
489
+ parsePdf,
490
+ runParser
491
+ };
492
+ //# sourceMappingURL=parsers.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/parsers/collect.ts","../src/parsers/pdf.ts","../src/parsers/runner.ts","../src/validation/validator.ts","../src/artifacts/input.ts","../src/parsers/mime.ts","../src/parsers/index.ts"],"sourcesContent":["/**\n * Collects a ReadableStream<Uint8Array> into a Buffer.\n * Uses Web Streams API — compatible with Bun and Node 18+.\n * Exported as a public utility for npm parser authors.\n */\nexport async function collectStream(stream: ReadableStream<Uint8Array>): Promise<Buffer> {\n const reader = stream.getReader();\n const chunks: Uint8Array[] = [];\n\n try {\n while (true) {\n const { done, value } = await reader.read();\n if (done) {\n break;\n }\n chunks.push(value);\n }\n } finally {\n reader.releaseLock();\n }\n\n const totalLength = chunks.reduce((sum, chunk) => sum + chunk.length, 0);\n const result = new Uint8Array(totalLength);\n let offset = 0;\n for (const chunk of chunks) {\n result.set(chunk, offset);\n offset += chunk.length;\n }\n\n return Buffer.from(result);\n}\n","import type { Artifact, ArtifactContent, ArtifactImage } from \"../types\";\nimport { collectStream } from \"./collect\";\n\nexport type ParsePdfOptions = {\n /**\n * Whether to extract embedded images from each page and include them as\n * base64-encoded ArtifactImage entries in the media field.\n * Defaults to true. Pass false to skip image extraction entirely.\n */\n includeImages?: boolean;\n /**\n * Whether to render page screenshots and include them as ArtifactImage entries.\n * When true, each page is rendered to a PNG image and added to the media field.\n * Defaults to false.\n */\n screenshots?: boolean;\n /**\n * Scale factor for screenshots. Higher values produce larger, higher-quality images.\n * Defaults to 1.5.\n */\n screenshotScale?: number;\n /**\n * Target width in pixels for screenshots. If specified, takes precedence over screenshotScale.\n * Height is calculated to maintain aspect ratio.\n */\n screenshotWidth?: number;\n};\n\n/**\n * Built-in PDF parser using pdf-parse.\n *\n * Accepts a Buffer or ReadableStream<Uint8Array> and extracts text per-page\n * into ArtifactContent[] with page numbers set. Embedded images on each page\n * are extracted and included as base64-encoded ArtifactImage entries in the\n * media field of the corresponding content block (unless includeImages is\n * false). Returns an Artifact with type: \"pdf\".\n */\nexport async function parsePdf(\n input: Buffer | ReadableStream<Uint8Array>,\n options?: ParsePdfOptions,\n): Promise<Artifact> {\n const buffer = Buffer.isBuffer(input) ? input : await collectStream(input);\n\n // Dynamic import to avoid bundling issues\n const { PDFParse } = await import(\"pdf-parse\");\n\n const parser = new PDFParse({ data: buffer });\n const textResult = await parser.getText();\n\n // Build a page-number → text map from per-page results\n const pageTextMap = new Map<number, string>();\n if (textResult.pages.length > 0) {\n for (const page of textResult.pages) {\n if (page.text && page.text.trim().length > 0) {\n pageTextMap.set(page.num, page.text);\n }\n }\n }\n\n // Extract embedded images unless the caller opted out.\n // imageBuffer=false saves memory (we only need the data URL).\n let imageResult;\n if (options?.includeImages !== false) {\n try {\n imageResult = await parser.getImage({ imageBuffer: false, imageDataUrl: true });\n } catch {\n // Image extraction is optional — continue without images if it fails\n }\n }\n\n // Render page screenshots if requested\n let screenshotResult;\n if (options?.screenshots === true) {\n try {\n const screenshotParams: {\n imageBuffer: boolean;\n imageDataUrl: boolean;\n scale?: number;\n desiredWidth?: number;\n } = { imageBuffer: false, imageDataUrl: true };\n \n if (options.screenshotWidth !== undefined) {\n screenshotParams.desiredWidth = options.screenshotWidth;\n } else {\n screenshotParams.scale = options.screenshotScale ?? 1.5;\n }\n \n screenshotResult = await parser.getScreenshot(screenshotParams);\n } catch {\n // Screenshot rendering is optional — continue without screenshots if it fails\n }\n }\n\n // Build a page-number → ArtifactImage[] map from extracted images\n const pageImageMap = new Map<number, ArtifactImage[]>();\n if (imageResult) {\n for (const pageImages of imageResult.pages) {\n const artifactImages: ArtifactImage[] = pageImages.images\n .filter((img) => img.dataUrl)\n .map((img) => {\n // Strip the \"data:<mime>;base64,\" prefix to get the raw base64 string\n const base64 = img.dataUrl.replace(/^data:[^;]+;base64,/, \"\");\n const artifactImage: ArtifactImage = {\n type: \"image\",\n base64,\n width: img.width,\n height: img.height,\n imageType: \"embedded\",\n };\n return artifactImage;\n });\n if (artifactImages.length > 0) {\n pageImageMap.set(pageImages.pageNumber, artifactImages);\n }\n }\n }\n\n // Add screenshots to the pageImageMap\n if (screenshotResult) {\n for (const screenshot of screenshotResult.pages) {\n if (screenshot.dataUrl) {\n // Strip the \"data:<mime>;base64,\" prefix to get the raw base64 string\n const base64 = screenshot.dataUrl.replace(/^data:[^;]+;base64,/, \"\");\n const artifactImage: ArtifactImage = {\n type: \"image\",\n base64,\n width: screenshot.width,\n height: screenshot.height,\n imageType: \"screenshot\",\n };\n // Append to existing images for this page, or create new entry\n const existing = pageImageMap.get(screenshot.pageNumber) ?? [];\n pageImageMap.set(screenshot.pageNumber, [...existing, artifactImage]);\n }\n }\n }\n\n let contents: ArtifactContent[];\n\n if (textResult.pages.length > 0) {\n // Collect all page numbers that have text or images\n const allPageNums = new Set<number>([\n ...pageTextMap.keys(),\n ...pageImageMap.keys(),\n ]);\n\n contents = Array.from(allPageNums)\n .sort((a, b) => a - b)\n .map((pageNum) => {\n const entry: ArtifactContent = { page: pageNum };\n const text = pageTextMap.get(pageNum);\n if (text) entry.text = text;\n const media = pageImageMap.get(pageNum);\n if (media) entry.media = media;\n return entry;\n });\n } else {\n // Fallback: no per-page info — use full concatenated text\n const entry: ArtifactContent = { text: textResult.text };\n // Attach any images from the first page if available\n const firstPageImages = pageImageMap.size > 0\n ? pageImageMap.values().next().value\n : undefined;\n if (firstPageImages) entry.media = firstPageImages;\n contents = [entry];\n }\n\n // Ensure we have at least one content entry\n if (contents.length === 0) {\n contents = [{ text: \"\" }];\n }\n\n let infoResult;\n try {\n infoResult = await parser.getInfo();\n } catch {\n // Info extraction is optional\n }\n\n await parser.destroy();\n\n return {\n id: `artifact-${crypto.randomUUID()}`,\n type: \"pdf\",\n raw: async () => buffer,\n contents,\n metadata: infoResult\n ? {\n numpages: textResult.total,\n info: infoResult,\n }\n : { numpages: textResult.total },\n };\n}\n","import os from \"node:os\";\nimport path from \"node:path\";\nimport { rm, writeFile, readFile } from \"node:fs/promises\";\nimport { exec } from \"node:child_process\";\nimport { promisify } from \"node:util\";\nimport type { Artifact } from \"../types\";\nimport type { ParserDef, ParserInput } from \"./types\";\nimport type { NpmParserModule } from \"./npm\";\nimport {\n hydrateSerializedArtifacts,\n validateSerializedArtifacts,\n} from \"../artifacts/input\";\n\nconst execAsync = promisify(exec);\n\nconst parseCommandOutput = (stdout: string): Artifact[] => {\n let parsed: unknown;\n try {\n parsed = JSON.parse(stdout);\n } catch (error) {\n const message = error instanceof Error ? error.message : String(error);\n throw new Error(`Parser command produced invalid JSON: ${message}\\nOutput: ${stdout.slice(0, 200)}`);\n }\n const serialized = validateSerializedArtifacts(parsed);\n return hydrateSerializedArtifacts(serialized);\n};\n\nconst spawnAndCapture = async (command: string, stdinBuffer?: Buffer): Promise<string> => {\n if (!command.trim()) {\n throw new Error(`Empty command: ${command}`);\n }\n\n try {\n const options = stdinBuffer\n ? { input: stdinBuffer.toString(), maxBuffer: 50 * 1024 * 1024 }\n : { maxBuffer: 50 * 1024 * 1024 };\n const { stdout } = await execAsync(command, options);\n return stdout;\n } catch (error) {\n if (error instanceof Error && \"stderr\" in error) {\n const stderr = (error as { stderr: string }).stderr;\n throw new Error(\n `Parser command failed: ${command}\\nStderr: ${stderr?.slice(0, 500) ?? \"\"}`\n );\n }\n throw error;\n }\n};\n\nconst runNpmParser = async (\n pkg: string,\n input: ParserInput,\n mimeType: string,\n): Promise<Artifact[]> => {\n const mod = (await import(pkg)) as NpmParserModule;\n\n const hasParseFile = typeof mod.parseFile === \"function\";\n const hasParseStream = typeof mod.parseStream === \"function\";\n\n if (!hasParseFile && !hasParseStream) {\n throw new Error(\n `npm parser package \"${pkg}\" exports neither parseFile nor parseStream`\n );\n }\n\n if (input.kind === \"file\") {\n // Prefer parseFile for zero-copy\n if (hasParseFile) {\n return mod.parseFile!(input.path, mimeType);\n }\n // Fallback: open file as stream\n const { createReadStream } = await import(\"node:fs\");\n const { Readable } = await import(\"node:stream\");\n const nodeStream = createReadStream(input.path);\n const stream = Readable.toWeb(nodeStream) as ReadableStream<Uint8Array>;\n return mod.parseStream!(stream, mimeType);\n }\n\n // input.kind === \"buffer\"\n if (hasParseStream) {\n // Prefer parseStream for buffers\n const stream = new ReadableStream<Uint8Array>({\n start(controller) {\n controller.enqueue(input.buffer);\n controller.close();\n },\n });\n return mod.parseStream!(stream, mimeType);\n }\n\n // Fallback: write buffer to temp file, call parseFile, clean up\n const tmpFile = path.join(os.tmpdir(), `struktur-parse-${crypto.randomUUID()}`);\n try {\n await writeFile(tmpFile, input.buffer);\n return await mod.parseFile!(tmpFile, mimeType);\n } finally {\n await rm(tmpFile, { force: true });\n }\n};\n\nconst runCommandFileParser = async (\n command: string,\n input: ParserInput,\n): Promise<Artifact[]> => {\n let filePath: string;\n let tempFile: string | null = null;\n\n if (input.kind === \"file\") {\n filePath = input.path;\n } else {\n // Write buffer to temp file\n tempFile = path.join(os.tmpdir(), `struktur-parse-${crypto.randomUUID()}`);\n await writeFile(tempFile, input.buffer);\n filePath = tempFile;\n }\n\n try {\n const interpolated = command.replace(/FILE_PATH/g, filePath);\n const stdout = await spawnAndCapture(interpolated);\n return parseCommandOutput(stdout);\n } finally {\n if (tempFile) {\n await rm(tempFile, { force: true });\n }\n }\n};\n\nconst runCommandStdinParser = async (\n command: string,\n input: ParserInput,\n): Promise<Artifact[]> => {\n let buffer: Buffer;\n\n if (input.kind === \"file\") {\n buffer = await readFile(input.path);\n } else {\n buffer = input.buffer;\n }\n\n const stdout = await spawnAndCapture(command, buffer);\n return parseCommandOutput(stdout);\n};\n\nexport const runParser = async (\n def: ParserDef,\n input: ParserInput,\n mimeType: string,\n): Promise<Artifact[]> => {\n switch (def.type) {\n case \"npm\":\n return runNpmParser(def.package, input, mimeType);\n case \"command-file\":\n return runCommandFileParser(def.command, input);\n case \"command-stdin\":\n return runCommandStdinParser(def.command, input);\n case \"inline\": {\n let buffer: Buffer;\n if (input.kind === \"file\") {\n buffer = await readFile(input.path);\n } else {\n buffer = input.buffer;\n }\n return [await def.handler(buffer)];\n }\n default: {\n const _exhaustive: never = def;\n throw new Error(`Unknown parser type: ${(_exhaustive as { type: string }).type}`);\n }\n }\n};\n","import Ajv, { type AnySchema, type ErrorObject, type JSONSchemaType } from \"ajv\";\nimport addFormats from \"ajv-formats\";\n\nexport type ValidationErrors = ErrorObject[];\n\nexport type ValidationMode = 'strict' | 'lenient';\n\nexport type ValidationResult<T> = \n | { valid: true; data: T }\n | { valid: false; errors: ErrorObject[] };\n\nexport class SchemaValidationError extends Error {\n public readonly errors: ValidationErrors;\n\n constructor(message: string, errors: ValidationErrors) {\n super(message);\n this.name = \"SchemaValidationError\";\n this.errors = errors;\n }\n}\n\nconst ARTIFACT_ID_PATTERN = /^artifact:[^/]+\\/images\\/image\\d+\\.\\w+$/;\n\nexport const createAjv = () => {\n const ajv = new Ajv({\n allErrors: true,\n strict: false,\n allowUnionTypes: true,\n });\n addFormats(ajv);\n \n ajv.addFormat(\"artifact-id\", {\n type: \"string\",\n validate: (data: string) => ARTIFACT_ID_PATTERN.test(data),\n });\n \n return ajv;\n};\n\nexport type SchemaInput<T> = JSONSchemaType<T> | AnySchema;\n\nexport const validateOrThrow = <T>(\n ajv: Ajv,\n schema: SchemaInput<T>,\n data: unknown\n): T => {\n const validate = ajv.compile<T>(schema);\n const valid = validate(data);\n\n if (!valid) {\n const errors = validate.errors ?? [];\n const message = \"Schema validation failed\";\n throw new SchemaValidationError(message, errors);\n }\n\n return data as T;\n};\n\nexport const isRequiredError = (error: ErrorObject): boolean => {\n return error.keyword === \"required\";\n};\n\nexport const validateAllowingMissingRequired = <T>(\n ajv: Ajv,\n schema: SchemaInput<T>,\n data: unknown,\n isFinalAttempt: boolean = true\n): ValidationResult<T> => {\n const validate = ajv.compile<T>(schema);\n const valid = validate(data);\n\n if (valid) {\n return { valid: true, data: data as T };\n }\n\n const errors = validate.errors ?? [];\n const nonRequiredErrors = errors.filter((error) => !isRequiredError(error));\n\n if (nonRequiredErrors.length === 0) {\n // Only required field errors\n // On final attempt, accept partial data\n // On non-final attempts, return invalid to trigger retry\n if (isFinalAttempt) {\n return { valid: true, data: data as T };\n }\n return { valid: false, errors };\n }\n\n return { valid: false, errors: nonRequiredErrors };\n};\n","import type { Artifact, ArtifactContent, ArtifactImage, ArtifactType } from \"../types\";\nimport { createAjv, validateOrThrow } from \"../validation/validator\";\nimport { defaultArtifactProviders, type ArtifactProviders } from \"./providers\";\nimport type { ParsersConfig } from \"../parsers/types\";\nimport { runParser } from \"../parsers/runner\";\nimport type { ParsePdfOptions } from \"../parsers/pdf\";\nimport { readFile } from \"node:fs/promises\";\nimport { detectMimeType } from \"../parsers/mime\";\n\nexport type SerializedArtifactImage = Omit<ArtifactImage, \"contents\"> & {\n contents?: never;\n};\n\nexport type SerializedArtifactContent = Omit<ArtifactContent, \"media\"> & {\n media?: SerializedArtifactImage[];\n};\n\nexport type SerializedArtifact = Omit<Artifact, \"raw\" | \"contents\"> & {\n contents: SerializedArtifactContent[];\n raw?: never;\n};\n\nexport type SerializedArtifacts = SerializedArtifact | SerializedArtifact[];\n\nexport type ArtifactInput =\n | { kind: \"artifact-json\"; data: unknown }\n | { kind: \"text\"; text: string; id?: string }\n | { kind: \"file\"; path: string; mimeType?: string; id?: string }\n | { kind: \"buffer\"; buffer: Buffer; mimeType: string; id?: string };\n\nexport type ArtifactInputParser = {\n name: string;\n canParse: (input: ArtifactInput) => boolean;\n parse: (\n input: ArtifactInput,\n options?: { \n providers?: ArtifactProviders; \n parsers?: ParsersConfig; \n includeImages?: boolean;\n screenshots?: boolean;\n screenshotScale?: number;\n screenshotWidth?: number;\n }\n ) => Promise<Artifact[]>;\n};\n\nconst serializedArtifactImageSchema = {\n type: \"object\",\n required: [\"type\"],\n properties: {\n type: { const: \"image\" },\n url: { type: \"string\", minLength: 1 },\n base64: { type: \"string\", minLength: 1 },\n text: { type: \"string\" },\n x: { type: \"number\" },\n y: { type: \"number\" },\n width: { type: \"number\" },\n height: { type: \"number\" },\n imageType: { enum: [\"embedded\", \"screenshot\"] },\n },\n additionalProperties: false,\n anyOf: [{ required: [\"url\"] }, { required: [\"base64\"] }],\n};\n\nconst serializedArtifactContentSchema = {\n type: \"object\",\n properties: {\n page: { type: \"number\" },\n text: { type: \"string\" },\n media: { type: \"array\", items: serializedArtifactImageSchema },\n },\n additionalProperties: false,\n anyOf: [{ required: [\"text\"] }, { required: [\"media\"] }],\n};\n\nconst serializedArtifactSchema = {\n type: \"object\",\n required: [\"id\", \"type\", \"contents\"],\n properties: {\n id: { type: \"string\", minLength: 1 },\n type: { enum: [\"text\", \"image\", \"pdf\", \"file\"] as ArtifactType[] },\n contents: { type: \"array\", items: serializedArtifactContentSchema },\n metadata: { type: \"object\", additionalProperties: true },\n tokens: { type: \"number\" },\n },\n additionalProperties: false,\n};\n\nconst serializedArtifactsSchema = {\n anyOf: [\n serializedArtifactSchema,\n { type: \"array\", items: serializedArtifactSchema },\n ],\n};\n\nconst inputParsers: ArtifactInputParser[] = [];\n\nexport const registerArtifactInputParser = (parser: ArtifactInputParser) => {\n inputParsers.push(parser);\n};\n\nexport const clearArtifactInputParsers = () => {\n inputParsers.length = 0;\n};\n\nexport const validateSerializedArtifacts = (data: unknown): SerializedArtifact[] => {\n const ajv = createAjv();\n const parsed = validateOrThrow<SerializedArtifacts>(\n ajv,\n serializedArtifactsSchema,\n data\n );\n return Array.isArray(parsed) ? parsed : [parsed];\n};\n\nexport const hydrateSerializedArtifacts = (items: SerializedArtifact[]): Artifact[] => {\n return items.map((item) => ({\n ...item,\n raw: async () => Buffer.from(JSON.stringify(item.contents ?? [])),\n }));\n};\n\nexport const parseSerializedArtifacts = (text: string): SerializedArtifact[] => {\n const parsed = JSON.parse(text) as unknown;\n return validateSerializedArtifacts(parsed);\n};\n\nexport const splitTextIntoContents = (text: string): ArtifactContent[] => {\n const blocks = text\n .split(/\\n\\s*\\n/g)\n .map((block) => block.trim())\n .filter((block) => block.length > 0);\n\n if (blocks.length === 0) {\n return [{ text }];\n }\n\n return blocks.map((block) => ({ text: block }));\n};\n\nconst bufferToTextArtifact = (buffer: Buffer, id?: string): Artifact => {\n const text = buffer.toString();\n return {\n id: id ?? `artifact-${crypto.randomUUID()}`,\n type: \"text\",\n raw: async () => buffer,\n contents: splitTextIntoContents(text),\n };\n};\n\nconst bufferToImageArtifact = (buffer: Buffer, id?: string): Artifact => {\n return {\n id: id ?? `artifact-${crypto.randomUUID()}`,\n type: \"image\",\n raw: async () => buffer,\n contents: [\n {\n media: [{ type: \"image\", contents: buffer }],\n },\n ],\n };\n};\n\nconst parseBufferInput = async (\n buffer: Buffer,\n mimeType: string,\n id?: string,\n providers?: ArtifactProviders,\n parsers?: ParsersConfig,\n includeImages?: boolean,\n screenshots?: boolean,\n screenshotScale?: number,\n screenshotWidth?: number,\n): Promise<Artifact[]> => {\n // Resolution order:\n // 1. parsers config (custom ParserDef) — if MIME type has a configured parser, use it\n if (parsers) {\n const parserDef = parsers[mimeType];\n if (parserDef) {\n return runParser(parserDef, { kind: \"buffer\", buffer }, mimeType);\n }\n }\n\n // 2. providers registry (user-registered ArtifactProvider functions)\n const registry = providers ?? defaultArtifactProviders;\n const provider = registry[mimeType];\n if (provider) {\n return [await provider(buffer)];\n }\n\n // JSON auto-detection: if MIME is application/json, try to parse as SerializedArtifact[]\n if (mimeType === \"application/json\") {\n try {\n const parsed = JSON.parse(buffer.toString()) as unknown;\n const serialized = validateSerializedArtifacts(parsed);\n return hydrateSerializedArtifacts(serialized);\n } catch {\n // If no custom parser is configured for application/json, throw clear error\n throw new Error(\n \"Input is JSON but not in SerializedArtifact format. To parse arbitrary JSON files, configure a parser: struktur config parsers add --mime application/json ...\"\n );\n }\n }\n\n // 3. Built-in PDF → pdf artifact\n if (mimeType === \"application/pdf\") {\n const { parsePdf } = await import(\"../parsers/pdf\");\n const pdfOptions: ParsePdfOptions = { \n includeImages, \n screenshots, \n screenshotScale, \n screenshotWidth \n };\n return [await parsePdf(buffer, pdfOptions)];\n }\n\n // 4. Built-in text/* → text artifact\n if (mimeType.startsWith(\"text/\")) {\n return [bufferToTextArtifact(buffer, id)];\n }\n\n // 5. Built-in image/* → image artifact\n if (mimeType.startsWith(\"image/\")) {\n return [bufferToImageArtifact(buffer, id)];\n }\n\n throw new Error(`Unsupported MIME type: ${mimeType}`);\n};\n\nconst artifactJsonParser: ArtifactInputParser = {\n name: \"artifact-json\",\n canParse: (input) => input.kind === \"artifact-json\",\n parse: async (input) => {\n if (input.kind !== \"artifact-json\") {\n return [];\n }\n const serialized = validateSerializedArtifacts(input.data);\n return hydrateSerializedArtifacts(serialized);\n },\n};\n\nconst textParser: ArtifactInputParser = {\n name: \"text\",\n canParse: (input) => input.kind === \"text\",\n parse: async (input) => {\n if (input.kind !== \"text\") {\n return [];\n }\n const buffer = Buffer.from(input.text);\n return [bufferToTextArtifact(buffer, input.id)];\n },\n};\n\nconst fileParser: ArtifactInputParser = {\n name: \"file\",\n canParse: (input) => input.kind === \"file\",\n parse: async (input, options) => {\n if (input.kind !== \"file\") {\n return [];\n }\n const mimeType = input.mimeType ?? (await detectMimeType({ filePath: input.path })) ?? \"application/octet-stream\";\n\n // JSON auto-detection: if MIME type is application/json, first try to validate as SerializedArtifact[]\n if (mimeType === \"application/json\") {\n const text = await readFile(input.path, \"utf-8\");\n try {\n const parsed = JSON.parse(text) as unknown;\n const serialized = validateSerializedArtifacts(parsed);\n return hydrateSerializedArtifacts(serialized);\n } catch {\n // Not valid artifact JSON — try custom parser or throw\n if (options?.parsers) {\n const parserDef = options.parsers[mimeType];\n if (parserDef) {\n return runParser(parserDef, { kind: \"file\", path: input.path }, mimeType);\n }\n }\n throw new Error(\n `File \"${input.path}\" is JSON but not in SerializedArtifact format. To parse arbitrary JSON files, configure a parser: struktur config parsers add --mime application/json ...`\n );\n }\n }\n\n const buffer = await readFile(input.path);\n return parseBufferInput(\n buffer, \n mimeType, \n input.id, \n options?.providers, \n options?.parsers, \n options?.includeImages,\n options?.screenshots,\n options?.screenshotScale,\n options?.screenshotWidth,\n );\n },\n};\n\nconst bufferParser: ArtifactInputParser = {\n name: \"buffer\",\n canParse: (input) => input.kind === \"buffer\",\n parse: async (input, options) => {\n if (input.kind !== \"buffer\") {\n return [];\n }\n return parseBufferInput(\n input.buffer,\n input.mimeType,\n input.id,\n options?.providers,\n options?.parsers,\n options?.includeImages,\n options?.screenshots,\n options?.screenshotScale,\n options?.screenshotWidth,\n );\n },\n};\n\nexport const parse = async (\n input: ArtifactInput,\n options?: { \n parsers?: ArtifactInputParser[]; \n providers?: ArtifactProviders; \n parserConfig?: ParsersConfig; \n includeImages?: boolean;\n screenshots?: boolean;\n screenshotScale?: number;\n screenshotWidth?: number;\n }\n): Promise<Artifact[]> => {\n const parsers =\n options?.parsers ??\n [\n ...inputParsers,\n artifactJsonParser,\n textParser,\n fileParser,\n bufferParser,\n ];\n const parser = parsers.find((candidate) => candidate.canParse(input));\n\n if (!parser) {\n throw new Error(`No artifact input parser available for ${input.kind}`);\n }\n\n return parser.parse(input, { \n providers: options?.providers, \n parsers: options?.parserConfig, \n includeImages: options?.includeImages,\n screenshots: options?.screenshots,\n screenshotScale: options?.screenshotScale,\n screenshotWidth: options?.screenshotWidth,\n });\n};\n","import path from \"node:path\";\nimport type { NpmParserDef } from \"./types\";\n\n// Magic byte signatures for common file types\nconst MAGIC_BYTES: Array<{ mimeType: string; bytes: number[]; offset?: number }> = [\n // PDF: %PDF\n { mimeType: \"application/pdf\", bytes: [0x25, 0x50, 0x44, 0x46] },\n // PNG: 89 50 4E 47\n { mimeType: \"image/png\", bytes: [0x89, 0x50, 0x4e, 0x47] },\n // JPEG: FF D8 FF\n { mimeType: \"image/jpeg\", bytes: [0xff, 0xd8, 0xff] },\n // GIF: GIF8\n { mimeType: \"image/gif\", bytes: [0x47, 0x49, 0x46, 0x38] },\n // ZIP / Office Open XML (DOCX/XLSX/PPTX all start with PK\\x03\\x04)\n {\n mimeType: \"application/zip\",\n bytes: [0x50, 0x4b, 0x03, 0x04],\n },\n];\n\n// WebP has RIFF at offset 0 and WEBP at offset 8\nconst isWebP = (header: Uint8Array): boolean => {\n if (header.length < 12) return false;\n const riff =\n header[0] === 0x52 && header[1] === 0x49 && header[2] === 0x46 && header[3] === 0x46;\n const webp =\n header[8] === 0x57 && header[9] === 0x45 && header[10] === 0x42 && header[11] === 0x50;\n return riff && webp;\n};\n\nconst matchesMagicBytes = (header: Uint8Array, bytes: number[], offset = 0): boolean => {\n if (header.length < offset + bytes.length) return false;\n return bytes.every((b, i) => header[offset + i] === b);\n};\n\nconst detectFromMagicBytes = (header: Uint8Array): string | null => {\n if (isWebP(header)) return \"image/webp\";\n\n for (const { mimeType, bytes, offset } of MAGIC_BYTES) {\n if (matchesMagicBytes(header, bytes, offset ?? 0)) {\n return mimeType;\n }\n }\n\n return null;\n};\n\n// Extension → MIME type lookup\nconst EXTENSION_MIME_MAP: Record<string, string> = {\n \".txt\": \"text/plain\",\n \".md\": \"text/markdown\",\n \".markdown\": \"text/markdown\",\n \".html\": \"text/html\",\n \".htm\": \"text/html\",\n \".json\": \"application/json\",\n \".pdf\": \"application/pdf\",\n \".png\": \"image/png\",\n \".jpg\": \"image/jpeg\",\n \".jpeg\": \"image/jpeg\",\n \".gif\": \"image/gif\",\n \".webp\": \"image/webp\",\n \".csv\": \"text/csv\",\n \".xml\": \"application/xml\",\n \".yaml\": \"application/yaml\",\n \".yml\": \"application/yaml\",\n \".docx\":\n \"application/vnd.openxmlformats-officedocument.wordprocessingml.document\",\n \".xlsx\":\n \"application/vnd.openxmlformats-officedocument.spreadsheetml.sheet\",\n \".pptx\":\n \"application/vnd.openxmlformats-officedocument.presentationml.presentation\",\n \".mp4\": \"video/mp4\",\n \".mp3\": \"audio/mpeg\",\n \".wav\": \"audio/wav\",\n \".ogg\": \"audio/ogg\",\n \".svg\": \"image/svg+xml\",\n \".ts\": \"text/plain\",\n \".tsx\": \"text/plain\",\n \".js\": \"text/javascript\",\n \".jsx\": \"text/javascript\",\n \".css\": \"text/css\",\n \".toml\": \"application/toml\",\n};\n\nexport type NpmParserEntry = {\n mimeType: string;\n def: NpmParserDef;\n};\n\nexport async function detectMimeType(options: {\n buffer?: Buffer;\n filePath?: string;\n mimeOverride?: string;\n npmParsers?: NpmParserEntry[];\n}): Promise<string | null> {\n const { buffer, filePath, mimeOverride, npmParsers } = options;\n\n // --mime override takes precedence\n if (mimeOverride) {\n return mimeOverride;\n }\n\n // Layer 1: magic bytes (authoritative)\n if (buffer && buffer.length > 0) {\n const header = buffer.subarray(0, 512);\n const magicMime = detectFromMagicBytes(header);\n if (magicMime) {\n return magicMime;\n }\n\n // Layer 3: npm parser detectFileType callbacks (after built-ins)\n if (npmParsers && npmParsers.length > 0) {\n for (const entry of npmParsers) {\n try {\n const mod = await import(entry.def.package) as {\n detectFileType?: (header: Uint8Array) => boolean;\n };\n if (typeof mod.detectFileType === \"function\" && mod.detectFileType(header)) {\n return entry.mimeType;\n }\n } catch {\n // If the package fails to load, skip it\n }\n }\n }\n }\n\n // Layer 2: extension database (for file inputs)\n if (filePath) {\n const ext = path.extname(filePath).toLowerCase();\n if (ext && ext in EXTENSION_MIME_MAP) {\n return EXTENSION_MIME_MAP[ext] ?? null;\n }\n }\n\n return null;\n}\n","export type { ParserDef, ParsersConfig, NpmParserDef, CommandFileDef, CommandStdinDef, InlineParserDef, ParserInput } from \"./types\";\nexport { runParser } from \"./runner\";\nexport { detectMimeType } from \"./mime\";\nexport { collectStream } from \"./collect\";\nexport { parsePdf } from \"./pdf\";\nexport type { ParsePdfOptions } from \"./pdf\";\n"],"mappings":";;;;;;AAKA,eAAsB,cAAc,QAAqD;AACvF,QAAM,SAAS,OAAO,UAAU;AAChC,QAAM,SAAuB,CAAC;AAE9B,MAAI;AACF,WAAO,MAAM;AACX,YAAM,EAAE,MAAM,MAAM,IAAI,MAAM,OAAO,KAAK;AAC1C,UAAI,MAAM;AACR;AAAA,MACF;AACA,aAAO,KAAK,KAAK;AAAA,IACnB;AAAA,EACF,UAAE;AACA,WAAO,YAAY;AAAA,EACrB;AAEA,QAAM,cAAc,OAAO,OAAO,CAAC,KAAK,UAAU,MAAM,MAAM,QAAQ,CAAC;AACvE,QAAM,SAAS,IAAI,WAAW,WAAW;AACzC,MAAI,SAAS;AACb,aAAW,SAAS,QAAQ;AAC1B,WAAO,IAAI,OAAO,MAAM;AACxB,cAAU,MAAM;AAAA,EAClB;AAEA,SAAO,OAAO,KAAK,MAAM;AAC3B;AA9BA;AAAA;AAAA;AAAA;AAAA;;;ACqCA,eAAsB,SACpB,OACA,SACmB;AACnB,QAAM,SAAS,OAAO,SAAS,KAAK,IAAI,QAAQ,MAAM,cAAc,KAAK;AAGzE,QAAM,EAAE,SAAS,IAAI,MAAM,OAAO,WAAW;AAE7C,QAAM,SAAS,IAAI,SAAS,EAAE,MAAM,OAAO,CAAC;AAC5C,QAAM,aAAa,MAAM,OAAO,QAAQ;AAGxC,QAAM,cAAc,oBAAI,IAAoB;AAC5C,MAAI,WAAW,MAAM,SAAS,GAAG;AAC/B,eAAW,QAAQ,WAAW,OAAO;AACnC,UAAI,KAAK,QAAQ,KAAK,KAAK,KAAK,EAAE,SAAS,GAAG;AAC5C,oBAAY,IAAI,KAAK,KAAK,KAAK,IAAI;AAAA,MACrC;AAAA,IACF;AAAA,EACF;AAIA,MAAI;AACJ,MAAI,SAAS,kBAAkB,OAAO;AACpC,QAAI;AACF,oBAAc,MAAM,OAAO,SAAS,EAAE,aAAa,OAAO,cAAc,KAAK,CAAC;AAAA,IAChF,QAAQ;AAAA,IAER;AAAA,EACF;AAGA,MAAI;AACJ,MAAI,SAAS,gBAAgB,MAAM;AACjC,QAAI;AACF,YAAM,mBAKF,EAAE,aAAa,OAAO,cAAc,KAAK;AAE7C,UAAI,QAAQ,oBAAoB,QAAW;AACzC,yBAAiB,eAAe,QAAQ;AAAA,MAC1C,OAAO;AACL,yBAAiB,QAAQ,QAAQ,mBAAmB;AAAA,MACtD;AAEA,yBAAmB,MAAM,OAAO,cAAc,gBAAgB;AAAA,IAChE,QAAQ;AAAA,IAER;AAAA,EACF;AAGA,QAAM,eAAe,oBAAI,IAA6B;AACtD,MAAI,aAAa;AACf,eAAW,cAAc,YAAY,OAAO;AAC1C,YAAM,iBAAkC,WAAW,OAChD,OAAO,CAAC,QAAQ,IAAI,OAAO,EAC3B,IAAI,CAAC,QAAQ;AAEZ,cAAM,SAAS,IAAI,QAAQ,QAAQ,uBAAuB,EAAE;AAC5D,cAAM,gBAA+B;AAAA,UACnC,MAAM;AAAA,UACN;AAAA,UACA,OAAO,IAAI;AAAA,UACX,QAAQ,IAAI;AAAA,UACZ,WAAW;AAAA,QACb;AACA,eAAO;AAAA,MACT,CAAC;AACH,UAAI,eAAe,SAAS,GAAG;AAC7B,qBAAa,IAAI,WAAW,YAAY,cAAc;AAAA,MACxD;AAAA,IACF;AAAA,EACF;AAGA,MAAI,kBAAkB;AACpB,eAAW,cAAc,iBAAiB,OAAO;AAC/C,UAAI,WAAW,SAAS;AAEtB,cAAM,SAAS,WAAW,QAAQ,QAAQ,uBAAuB,EAAE;AACnE,cAAM,gBAA+B;AAAA,UACnC,MAAM;AAAA,UACN;AAAA,UACA,OAAO,WAAW;AAAA,UAClB,QAAQ,WAAW;AAAA,UACnB,WAAW;AAAA,QACb;AAEA,cAAM,WAAW,aAAa,IAAI,WAAW,UAAU,KAAK,CAAC;AAC7D,qBAAa,IAAI,WAAW,YAAY,CAAC,GAAG,UAAU,aAAa,CAAC;AAAA,MACtE;AAAA,IACF;AAAA,EACF;AAEA,MAAI;AAEJ,MAAI,WAAW,MAAM,SAAS,GAAG;AAE/B,UAAM,cAAc,oBAAI,IAAY;AAAA,MAClC,GAAG,YAAY,KAAK;AAAA,MACpB,GAAG,aAAa,KAAK;AAAA,IACvB,CAAC;AAED,eAAW,MAAM,KAAK,WAAW,EAC9B,KAAK,CAAC,GAAG,MAAM,IAAI,CAAC,EACpB,IAAI,CAAC,YAAY;AAChB,YAAM,QAAyB,EAAE,MAAM,QAAQ;AAC/C,YAAM,OAAO,YAAY,IAAI,OAAO;AACpC,UAAI,KAAM,OAAM,OAAO;AACvB,YAAM,QAAQ,aAAa,IAAI,OAAO;AACtC,UAAI,MAAO,OAAM,QAAQ;AACzB,aAAO;AAAA,IACT,CAAC;AAAA,EACL,OAAO;AAEL,UAAM,QAAyB,EAAE,MAAM,WAAW,KAAK;AAEvD,UAAM,kBAAkB,aAAa,OAAO,IACxC,aAAa,OAAO,EAAE,KAAK,EAAE,QAC7B;AACJ,QAAI,gBAAiB,OAAM,QAAQ;AACnC,eAAW,CAAC,KAAK;AAAA,EACnB;AAGA,MAAI,SAAS,WAAW,GAAG;AACzB,eAAW,CAAC,EAAE,MAAM,GAAG,CAAC;AAAA,EAC1B;AAEA,MAAI;AACJ,MAAI;AACF,iBAAa,MAAM,OAAO,QAAQ;AAAA,EACpC,QAAQ;AAAA,EAER;AAEA,QAAM,OAAO,QAAQ;AAErB,SAAO;AAAA,IACL,IAAI,YAAY,OAAO,WAAW,CAAC;AAAA,IACnC,MAAM;AAAA,IACN,KAAK,YAAY;AAAA,IACjB;AAAA,IACA,UAAU,aACN;AAAA,MACE,UAAU,WAAW;AAAA,MACrB,MAAM;AAAA,IACR,IACA,EAAE,UAAU,WAAW,MAAM;AAAA,EACnC;AACF;AAjMA;AAAA;AAAA;AACA;AAAA;AAAA;;;ACDA,OAAO,QAAQ;AACf,OAAOA,WAAU;AACjB,SAAS,IAAI,WAAW,YAAAC,iBAAgB;AACxC,SAAS,YAAY;AACrB,SAAS,iBAAiB;;;ACJ1B,OAAO,SAAoE;AAC3E,OAAO,gBAAgB;AAUhB,IAAM,wBAAN,cAAoC,MAAM;AAAA,EAC/B;AAAA,EAEhB,YAAY,SAAiB,QAA0B;AACrD,UAAM,OAAO;AACb,SAAK,OAAO;AACZ,SAAK,SAAS;AAAA,EAChB;AACF;AAEA,IAAM,sBAAsB;AAErB,IAAM,YAAY,MAAM;AAC7B,QAAM,MAAM,IAAI,IAAI;AAAA,IAClB,WAAW;AAAA,IACX,QAAQ;AAAA,IACR,iBAAiB;AAAA,EACnB,CAAC;AACD,aAAW,GAAG;AAEd,MAAI,UAAU,eAAe;AAAA,IAC3B,MAAM;AAAA,IACN,UAAU,CAAC,SAAiB,oBAAoB,KAAK,IAAI;AAAA,EAC3D,CAAC;AAED,SAAO;AACT;AAIO,IAAM,kBAAkB,CAC7B,KACA,QACA,SACM;AACN,QAAM,WAAW,IAAI,QAAW,MAAM;AACtC,QAAM,QAAQ,SAAS,IAAI;AAE3B,MAAI,CAAC,OAAO;AACV,UAAM,SAAS,SAAS,UAAU,CAAC;AACnC,UAAM,UAAU;AAChB,UAAM,IAAI,sBAAsB,SAAS,MAAM;AAAA,EACjD;AAEA,SAAO;AACT;;;AClDA,SAAS,gBAAgB;;;ACNzB,OAAO,UAAU;AAIjB,IAAM,cAA6E;AAAA;AAAA,EAEjF,EAAE,UAAU,mBAAmB,OAAO,CAAC,IAAM,IAAM,IAAM,EAAI,EAAE;AAAA;AAAA,EAE/D,EAAE,UAAU,aAAa,OAAO,CAAC,KAAM,IAAM,IAAM,EAAI,EAAE;AAAA;AAAA,EAEzD,EAAE,UAAU,cAAc,OAAO,CAAC,KAAM,KAAM,GAAI,EAAE;AAAA;AAAA,EAEpD,EAAE,UAAU,aAAa,OAAO,CAAC,IAAM,IAAM,IAAM,EAAI,EAAE;AAAA;AAAA,EAEzD;AAAA,IACE,UAAU;AAAA,IACV,OAAO,CAAC,IAAM,IAAM,GAAM,CAAI;AAAA,EAChC;AACF;AAGA,IAAM,SAAS,CAAC,WAAgC;AAC9C,MAAI,OAAO,SAAS,GAAI,QAAO;AAC/B,QAAM,OACJ,OAAO,CAAC,MAAM,MAAQ,OAAO,CAAC,MAAM,MAAQ,OAAO,CAAC,MAAM,MAAQ,OAAO,CAAC,MAAM;AAClF,QAAM,OACJ,OAAO,CAAC,MAAM,MAAQ,OAAO,CAAC,MAAM,MAAQ,OAAO,EAAE,MAAM,MAAQ,OAAO,EAAE,MAAM;AACpF,SAAO,QAAQ;AACjB;AAEA,IAAM,oBAAoB,CAAC,QAAoB,OAAiB,SAAS,MAAe;AACtF,MAAI,OAAO,SAAS,SAAS,MAAM,OAAQ,QAAO;AAClD,SAAO,MAAM,MAAM,CAAC,GAAG,MAAM,OAAO,SAAS,CAAC,MAAM,CAAC;AACvD;AAEA,IAAM,uBAAuB,CAAC,WAAsC;AAClE,MAAI,OAAO,MAAM,EAAG,QAAO;AAE3B,aAAW,EAAE,UAAU,OAAO,OAAO,KAAK,aAAa;AACrD,QAAI,kBAAkB,QAAQ,OAAO,UAAU,CAAC,GAAG;AACjD,aAAO;AAAA,IACT;AAAA,EACF;AAEA,SAAO;AACT;AAGA,IAAM,qBAA6C;AAAA,EACjD,QAAQ;AAAA,EACR,OAAO;AAAA,EACP,aAAa;AAAA,EACb,SAAS;AAAA,EACT,QAAQ;AAAA,EACR,SAAS;AAAA,EACT,QAAQ;AAAA,EACR,QAAQ;AAAA,EACR,QAAQ;AAAA,EACR,SAAS;AAAA,EACT,QAAQ;AAAA,EACR,SAAS;AAAA,EACT,QAAQ;AAAA,EACR,QAAQ;AAAA,EACR,SAAS;AAAA,EACT,QAAQ;AAAA,EACR,SACE;AAAA,EACF,SACE;AAAA,EACF,SACE;AAAA,EACF,QAAQ;AAAA,EACR,QAAQ;AAAA,EACR,QAAQ;AAAA,EACR,QAAQ;AAAA,EACR,QAAQ;AAAA,EACR,OAAO;AAAA,EACP,QAAQ;AAAA,EACR,OAAO;AAAA,EACP,QAAQ;AAAA,EACR,QAAQ;AAAA,EACR,SAAS;AACX;AAOA,eAAsB,eAAe,SAKV;AACzB,QAAM,EAAE,QAAQ,UAAU,cAAc,WAAW,IAAI;AAGvD,MAAI,cAAc;AAChB,WAAO;AAAA,EACT;AAGA,MAAI,UAAU,OAAO,SAAS,GAAG;AAC/B,UAAM,SAAS,OAAO,SAAS,GAAG,GAAG;AACrC,UAAM,YAAY,qBAAqB,MAAM;AAC7C,QAAI,WAAW;AACb,aAAO;AAAA,IACT;AAGA,QAAI,cAAc,WAAW,SAAS,GAAG;AACvC,iBAAW,SAAS,YAAY;AAC9B,YAAI;AACF,gBAAM,MAAM,MAAM,OAAO,MAAM,IAAI;AAGnC,cAAI,OAAO,IAAI,mBAAmB,cAAc,IAAI,eAAe,MAAM,GAAG;AAC1E,mBAAO,MAAM;AAAA,UACf;AAAA,QACF,QAAQ;AAAA,QAER;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAGA,MAAI,UAAU;AACZ,UAAM,MAAM,KAAK,QAAQ,QAAQ,EAAE,YAAY;AAC/C,QAAI,OAAO,OAAO,oBAAoB;AACpC,aAAO,mBAAmB,GAAG,KAAK;AAAA,IACpC;AAAA,EACF;AAEA,SAAO;AACT;;;AD1FA,IAAM,gCAAgC;AAAA,EACpC,MAAM;AAAA,EACN,UAAU,CAAC,MAAM;AAAA,EACjB,YAAY;AAAA,IACV,MAAM,EAAE,OAAO,QAAQ;AAAA,IACvB,KAAK,EAAE,MAAM,UAAU,WAAW,EAAE;AAAA,IACpC,QAAQ,EAAE,MAAM,UAAU,WAAW,EAAE;AAAA,IACvC,MAAM,EAAE,MAAM,SAAS;AAAA,IACvB,GAAG,EAAE,MAAM,SAAS;AAAA,IACpB,GAAG,EAAE,MAAM,SAAS;AAAA,IACpB,OAAO,EAAE,MAAM,SAAS;AAAA,IACxB,QAAQ,EAAE,MAAM,SAAS;AAAA,IACzB,WAAW,EAAE,MAAM,CAAC,YAAY,YAAY,EAAE;AAAA,EAChD;AAAA,EACA,sBAAsB;AAAA,EACtB,OAAO,CAAC,EAAE,UAAU,CAAC,KAAK,EAAE,GAAG,EAAE,UAAU,CAAC,QAAQ,EAAE,CAAC;AACzD;AAEA,IAAM,kCAAkC;AAAA,EACtC,MAAM;AAAA,EACN,YAAY;AAAA,IACV,MAAM,EAAE,MAAM,SAAS;AAAA,IACvB,MAAM,EAAE,MAAM,SAAS;AAAA,IACvB,OAAO,EAAE,MAAM,SAAS,OAAO,8BAA8B;AAAA,EAC/D;AAAA,EACA,sBAAsB;AAAA,EACtB,OAAO,CAAC,EAAE,UAAU,CAAC,MAAM,EAAE,GAAG,EAAE,UAAU,CAAC,OAAO,EAAE,CAAC;AACzD;AAEA,IAAM,2BAA2B;AAAA,EAC/B,MAAM;AAAA,EACN,UAAU,CAAC,MAAM,QAAQ,UAAU;AAAA,EACnC,YAAY;AAAA,IACV,IAAI,EAAE,MAAM,UAAU,WAAW,EAAE;AAAA,IACnC,MAAM,EAAE,MAAM,CAAC,QAAQ,SAAS,OAAO,MAAM,EAAoB;AAAA,IACjE,UAAU,EAAE,MAAM,SAAS,OAAO,gCAAgC;AAAA,IAClE,UAAU,EAAE,MAAM,UAAU,sBAAsB,KAAK;AAAA,IACvD,QAAQ,EAAE,MAAM,SAAS;AAAA,EAC3B;AAAA,EACA,sBAAsB;AACxB;AAEA,IAAM,4BAA4B;AAAA,EAChC,OAAO;AAAA,IACL;AAAA,IACA,EAAE,MAAM,SAAS,OAAO,yBAAyB;AAAA,EACnD;AACF;AAYO,IAAM,8BAA8B,CAAC,SAAwC;AAClF,QAAM,MAAM,UAAU;AACtB,QAAM,SAAS;AAAA,IACb;AAAA,IACA;AAAA,IACA;AAAA,EACF;AACA,SAAO,MAAM,QAAQ,MAAM,IAAI,SAAS,CAAC,MAAM;AACjD;AAEO,IAAM,6BAA6B,CAAC,UAA4C;AACrF,SAAO,MAAM,IAAI,CAAC,UAAU;AAAA,IAC1B,GAAG;AAAA,IACH,KAAK,YAAY,OAAO,KAAK,KAAK,UAAU,KAAK,YAAY,CAAC,CAAC,CAAC;AAAA,EAClE,EAAE;AACJ;;;AF3GA,IAAM,YAAY,UAAU,IAAI;AAEhC,IAAM,qBAAqB,CAAC,WAA+B;AACzD,MAAI;AACJ,MAAI;AACF,aAAS,KAAK,MAAM,MAAM;AAAA,EAC5B,SAAS,OAAO;AACd,UAAM,UAAU,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AACrE,UAAM,IAAI,MAAM,yCAAyC,OAAO;AAAA,UAAa,OAAO,MAAM,GAAG,GAAG,CAAC,EAAE;AAAA,EACrG;AACA,QAAM,aAAa,4BAA4B,MAAM;AACrD,SAAO,2BAA2B,UAAU;AAC9C;AAEA,IAAM,kBAAkB,OAAO,SAAiB,gBAA0C;AACxF,MAAI,CAAC,QAAQ,KAAK,GAAG;AACnB,UAAM,IAAI,MAAM,kBAAkB,OAAO,EAAE;AAAA,EAC7C;AAEA,MAAI;AACF,UAAM,UAAU,cACZ,EAAE,OAAO,YAAY,SAAS,GAAG,WAAW,KAAK,OAAO,KAAK,IAC7D,EAAE,WAAW,KAAK,OAAO,KAAK;AAClC,UAAM,EAAE,OAAO,IAAI,MAAM,UAAU,SAAS,OAAO;AACnD,WAAO;AAAA,EACT,SAAS,OAAO;AACd,QAAI,iBAAiB,SAAS,YAAY,OAAO;AAC/C,YAAM,SAAU,MAA6B;AAC7C,YAAM,IAAI;AAAA,QACR,0BAA0B,OAAO;AAAA,UAAa,QAAQ,MAAM,GAAG,GAAG,KAAK,EAAE;AAAA,MAC3E;AAAA,IACF;AACA,UAAM;AAAA,EACR;AACF;AAEA,IAAM,eAAe,OACnB,KACA,OACA,aACwB;AACxB,QAAM,MAAO,MAAM,OAAO;AAE1B,QAAM,eAAe,OAAO,IAAI,cAAc;AAC9C,QAAM,iBAAiB,OAAO,IAAI,gBAAgB;AAElD,MAAI,CAAC,gBAAgB,CAAC,gBAAgB;AACpC,UAAM,IAAI;AAAA,MACR,uBAAuB,GAAG;AAAA,IAC5B;AAAA,EACF;AAEA,MAAI,MAAM,SAAS,QAAQ;AAEzB,QAAI,cAAc;AAChB,aAAO,IAAI,UAAW,MAAM,MAAM,QAAQ;AAAA,IAC5C;AAEA,UAAM,EAAE,iBAAiB,IAAI,MAAM,OAAO,IAAS;AACnD,UAAM,EAAE,SAAS,IAAI,MAAM,OAAO,QAAa;AAC/C,UAAM,aAAa,iBAAiB,MAAM,IAAI;AAC9C,UAAM,SAAS,SAAS,MAAM,UAAU;AACxC,WAAO,IAAI,YAAa,QAAQ,QAAQ;AAAA,EAC1C;AAGA,MAAI,gBAAgB;AAElB,UAAM,SAAS,IAAI,eAA2B;AAAA,MAC5C,MAAM,YAAY;AAChB,mBAAW,QAAQ,MAAM,MAAM;AAC/B,mBAAW,MAAM;AAAA,MACnB;AAAA,IACF,CAAC;AACD,WAAO,IAAI,YAAa,QAAQ,QAAQ;AAAA,EAC1C;AAGA,QAAM,UAAUC,MAAK,KAAK,GAAG,OAAO,GAAG,kBAAkB,OAAO,WAAW,CAAC,EAAE;AAC9E,MAAI;AACF,UAAM,UAAU,SAAS,MAAM,MAAM;AACrC,WAAO,MAAM,IAAI,UAAW,SAAS,QAAQ;AAAA,EAC/C,UAAE;AACA,UAAM,GAAG,SAAS,EAAE,OAAO,KAAK,CAAC;AAAA,EACnC;AACF;AAEA,IAAM,uBAAuB,OAC3B,SACA,UACwB;AACxB,MAAI;AACJ,MAAI,WAA0B;AAE9B,MAAI,MAAM,SAAS,QAAQ;AACzB,eAAW,MAAM;AAAA,EACnB,OAAO;AAEL,eAAWA,MAAK,KAAK,GAAG,OAAO,GAAG,kBAAkB,OAAO,WAAW,CAAC,EAAE;AACzE,UAAM,UAAU,UAAU,MAAM,MAAM;AACtC,eAAW;AAAA,EACb;AAEA,MAAI;AACF,UAAM,eAAe,QAAQ,QAAQ,cAAc,QAAQ;AAC3D,UAAM,SAAS,MAAM,gBAAgB,YAAY;AACjD,WAAO,mBAAmB,MAAM;AAAA,EAClC,UAAE;AACA,QAAI,UAAU;AACZ,YAAM,GAAG,UAAU,EAAE,OAAO,KAAK,CAAC;AAAA,IACpC;AAAA,EACF;AACF;AAEA,IAAM,wBAAwB,OAC5B,SACA,UACwB;AACxB,MAAI;AAEJ,MAAI,MAAM,SAAS,QAAQ;AACzB,aAAS,MAAMC,UAAS,MAAM,IAAI;AAAA,EACpC,OAAO;AACL,aAAS,MAAM;AAAA,EACjB;AAEA,QAAM,SAAS,MAAM,gBAAgB,SAAS,MAAM;AACpD,SAAO,mBAAmB,MAAM;AAClC;AAEO,IAAM,YAAY,OACvB,KACA,OACA,aACwB;AACxB,UAAQ,IAAI,MAAM;AAAA,IAChB,KAAK;AACH,aAAO,aAAa,IAAI,SAAS,OAAO,QAAQ;AAAA,IAClD,KAAK;AACH,aAAO,qBAAqB,IAAI,SAAS,KAAK;AAAA,IAChD,KAAK;AACH,aAAO,sBAAsB,IAAI,SAAS,KAAK;AAAA,IACjD,KAAK,UAAU;AACb,UAAI;AACJ,UAAI,MAAM,SAAS,QAAQ;AACzB,iBAAS,MAAMA,UAAS,MAAM,IAAI;AAAA,MACpC,OAAO;AACL,iBAAS,MAAM;AAAA,MACjB;AACA,aAAO,CAAC,MAAM,IAAI,QAAQ,MAAM,CAAC;AAAA,IACnC;AAAA,IACA,SAAS;AACP,YAAM,cAAqB;AAC3B,YAAM,IAAI,MAAM,wBAAyB,YAAiC,IAAI,EAAE;AAAA,IAClF;AAAA,EACF;AACF;;;AItKA;AACA;","names":["path","readFile","path","readFile"]}