mdmeld 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/LICENSE ADDED
@@ -0,0 +1,21 @@
1
+ MIT License
2
+
3
+ Copyright (c) 2026 MDMeld Contributors
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
package/README.md ADDED
@@ -0,0 +1,100 @@
1
+ # mdmeld
2
+
3
+ **Pack Once. Share Anywhere.** — The markdown-based file archive format designed for seamless collaboration between humans and AI assistants.
4
+
5
+ MDMeld encodes files into a human-readable markdown archive with YAML metadata, position indexing for efficient AI navigation, and integrity hashes for verification.
6
+
7
+ ## Install
8
+
9
+ ```bash
10
+ npm install mdmeld
11
+ ```
12
+
13
+ ## CLI
14
+
15
+ ```bash
16
+ # Pack a directory into an archive
17
+ npx mdmeld pack ./my-project -o archive.mdmeld
18
+
19
+ # Unpack an archive
20
+ npx mdmeld unpack archive.mdmeld -o ./output
21
+
22
+ # Finalize hashes in an AI-generated archive
23
+ npx mdmeld finalize archive.mdmeld
24
+ ```
25
+
26
+ ## API
27
+
28
+ ```typescript
29
+ import { pack, unpack, check } from "mdmeld";
30
+
31
+ // Pack files into an archive string
32
+ const archive = await pack([
33
+ {
34
+ path: "src/index.ts",
35
+ content: new TextEncoder().encode("export const x = 1;"),
36
+ },
37
+ { path: "README.md", content: new TextEncoder().encode("# Hello") },
38
+ ]);
39
+
40
+ // Unpack an archive
41
+ const { files, manifest } = await unpack(archive);
42
+
43
+ // Validate integrity
44
+ const { valid, errors } = await check(archive);
45
+ ```
46
+
47
+ ## Web
48
+
49
+ A static web tool for packing folders via drag-and-drop is included in `web/`. Run locally:
50
+
51
+ ```bash
52
+ npm run dev
53
+ ```
54
+
55
+ ## Format
56
+
57
+ An MDMeld archive is a markdown file with:
58
+
59
+ 1. A self-describing HTML comment header
60
+ 2. YAML frontmatter with file metadata and position indexes
61
+ 3. File contents in fenced code blocks
62
+
63
+ ````markdown
64
+ ## <!-- MDMeld v1.0 archive -->
65
+
66
+ mdmeld:
67
+ version: "1.0.0"
68
+ hash_algorithm: xxh64
69
+ backtick_count: 4
70
+ files: - path: src/index.ts
71
+ type: text
72
+ size: 42
73
+ hash: "abc123"
74
+ syntax: typescript
75
+ position:
76
+ start: 20
77
+ fence: 22
78
+ content: 23
79
+ length: 3
80
+ integrity:
81
+ manifest_hash: "..."
82
+ content_hash: "..."
83
+
84
+ ---
85
+
86
+ ### src/index.ts
87
+
88
+ ```typescript
89
+ export const x = 1;
90
+ ```
91
+ ````
92
+
93
+ ```
94
+
95
+ Position metadata enables AI assistants to navigate directly to specific files without reading the entire archive.
96
+
97
+ ## License
98
+
99
+ MIT
100
+ ```
@@ -0,0 +1,426 @@
1
+ #!/usr/bin/env node
2
+ import {
3
+ ARCHIVE_HEADER,
4
+ FORMAT_VERSION,
5
+ HASH_PLACEHOLDER,
6
+ MAX_BACKTICK_COUNT,
7
+ MIN_BACKTICK_COUNT,
8
+ computeHash,
9
+ computeHashBytes,
10
+ decodeBase64,
11
+ dump,
12
+ encodeBase64,
13
+ load
14
+ } from "./chunk-UJZ43GZC.js";
15
+
16
+ // src/core/backticks.ts
17
+ function scanForMaxBackticks(content) {
18
+ let max = 0;
19
+ let current = 0;
20
+ for (let i = 0; i < content.length; i++) {
21
+ if (content[i] === "`") {
22
+ current++;
23
+ if (current > max) max = current;
24
+ } else {
25
+ current = 0;
26
+ }
27
+ }
28
+ return max;
29
+ }
30
+ function resolveBacktickCount(maxFoundInContent) {
31
+ const needed = Math.max(maxFoundInContent + 1, MIN_BACKTICK_COUNT);
32
+ if (needed > MAX_BACKTICK_COUNT) return null;
33
+ return needed;
34
+ }
35
+ function generateBackticks(count) {
36
+ return "`".repeat(count);
37
+ }
38
+
39
+ // src/core/detect.ts
40
+ function isText(data) {
41
+ if (data.length === 0) return true;
42
+ const sample = data.length > 8192 ? data.subarray(0, 8192) : data;
43
+ const len = sample.length;
44
+ let nullBytes = 0;
45
+ let controlChars = 0;
46
+ let printableOrWhitespace = 0;
47
+ for (let i = 0; i < len; i++) {
48
+ const byte = sample[i];
49
+ if (byte === 0) {
50
+ nullBytes++;
51
+ } else if (byte === 9 || byte === 10 || byte === 13) {
52
+ printableOrWhitespace++;
53
+ } else if (byte < 32) {
54
+ controlChars++;
55
+ } else if (byte <= 126) {
56
+ printableOrWhitespace++;
57
+ }
58
+ }
59
+ if (nullBytes > 0) return false;
60
+ if (controlChars / len > 0.05) return false;
61
+ if (len > 32 && printableOrWhitespace / len < 0.8) return false;
62
+ return true;
63
+ }
64
+
65
+ // src/core/syntax.ts
66
+ var EXT_TO_SYNTAX = {
67
+ // Web
68
+ ".html": "html",
69
+ ".htm": "html",
70
+ ".css": "css",
71
+ ".js": "javascript",
72
+ ".mjs": "javascript",
73
+ ".cjs": "javascript",
74
+ ".jsx": "jsx",
75
+ ".ts": "typescript",
76
+ ".tsx": "tsx",
77
+ ".mts": "typescript",
78
+ ".cts": "typescript",
79
+ ".vue": "vue",
80
+ ".svelte": "svelte",
81
+ ".astro": "astro",
82
+ // Data
83
+ ".json": "json",
84
+ ".jsonc": "jsonc",
85
+ ".yaml": "yaml",
86
+ ".yml": "yaml",
87
+ ".toml": "toml",
88
+ ".xml": "xml",
89
+ ".csv": "csv",
90
+ ".graphql": "graphql",
91
+ ".gql": "graphql",
92
+ // Config
93
+ ".env": "dotenv",
94
+ ".ini": "ini",
95
+ ".conf": "conf",
96
+ ".cfg": "ini",
97
+ // Shell
98
+ ".sh": "bash",
99
+ ".bash": "bash",
100
+ ".zsh": "zsh",
101
+ ".fish": "fish",
102
+ ".ps1": "powershell",
103
+ ".bat": "batch",
104
+ ".cmd": "batch",
105
+ // Systems
106
+ ".c": "c",
107
+ ".h": "c",
108
+ ".cpp": "cpp",
109
+ ".cc": "cpp",
110
+ ".cxx": "cpp",
111
+ ".hpp": "cpp",
112
+ ".rs": "rust",
113
+ ".go": "go",
114
+ ".java": "java",
115
+ ".kt": "kotlin",
116
+ ".kts": "kotlin",
117
+ ".swift": "swift",
118
+ ".cs": "csharp",
119
+ ".fs": "fsharp",
120
+ ".zig": "zig",
121
+ // Scripting
122
+ ".py": "python",
123
+ ".rb": "ruby",
124
+ ".php": "php",
125
+ ".pl": "perl",
126
+ ".lua": "lua",
127
+ ".r": "r",
128
+ ".R": "r",
129
+ ".jl": "julia",
130
+ ".ex": "elixir",
131
+ ".exs": "elixir",
132
+ ".erl": "erlang",
133
+ ".clj": "clojure",
134
+ ".scala": "scala",
135
+ ".dart": "dart",
136
+ // Markup / Docs
137
+ ".md": "markdown",
138
+ ".mdx": "mdx",
139
+ ".rst": "rst",
140
+ ".tex": "latex",
141
+ ".typ": "typst",
142
+ // DevOps
143
+ ".dockerfile": "dockerfile",
144
+ ".tf": "terraform",
145
+ ".hcl": "hcl",
146
+ ".nix": "nix",
147
+ // SQL
148
+ ".sql": "sql",
149
+ // Misc
150
+ ".diff": "diff",
151
+ ".patch": "diff",
152
+ ".prisma": "prisma",
153
+ ".proto": "protobuf",
154
+ ".wasm": "wasm",
155
+ ".lock": "text",
156
+ ".log": "text",
157
+ ".txt": "text"
158
+ };
159
+ var NAME_TO_SYNTAX = {
160
+ Dockerfile: "dockerfile",
161
+ Makefile: "makefile",
162
+ Containerfile: "dockerfile",
163
+ Justfile: "makefile",
164
+ Vagrantfile: "ruby",
165
+ Gemfile: "ruby",
166
+ Rakefile: "ruby",
167
+ ".gitignore": "gitignore",
168
+ ".gitattributes": "gitattributes",
169
+ ".editorconfig": "editorconfig",
170
+ ".prettierrc": "json",
171
+ ".eslintrc": "json"
172
+ };
173
+ function getSyntax(filePath) {
174
+ const fileName = filePath.split("/").pop() ?? "";
175
+ const byName = NAME_TO_SYNTAX[fileName];
176
+ if (byName) return byName;
177
+ const dotIdx = fileName.lastIndexOf(".");
178
+ if (dotIdx === -1) return "text";
179
+ const ext = fileName.slice(dotIdx).toLowerCase();
180
+ return EXT_TO_SYNTAX[ext] ?? "text";
181
+ }
182
+
183
+ // src/core/pack.ts
184
+ var TEXT_DECODER = new TextDecoder();
185
+ async function pack(files, options = {}) {
186
+ const hashAlgorithm = options.hashAlgorithm ?? "xxh64";
187
+ const created = options.created ?? (/* @__PURE__ */ new Date()).toISOString();
188
+ let maxBackticks = 0;
189
+ const fileInfos = [];
190
+ for (const file of files) {
191
+ const textFile = isText(file.content);
192
+ if (textFile) {
193
+ const text = TEXT_DECODER.decode(file.content);
194
+ const found = scanForMaxBackticks(text);
195
+ if (found > maxBackticks) maxBackticks = found;
196
+ fileInfos.push({ file, isTextFile: true, textContent: text, base64Content: null });
197
+ } else {
198
+ const b64 = encodeBase64(file.content);
199
+ fileInfos.push({ file, isTextFile: false, textContent: null, base64Content: b64 });
200
+ }
201
+ }
202
+ const backtickCount = resolveBacktickCount(maxBackticks);
203
+ if (backtickCount === null) {
204
+ throw new Error(
205
+ "Cannot pack: file content contains a backtick run of 8 or more characters, which exceeds the maximum allowed fence width. Remove the excessive backtick sequences or exclude the file."
206
+ );
207
+ }
208
+ const fence = generateBackticks(backtickCount);
209
+ const contentBlocks = [];
210
+ const fileEntries = [];
211
+ for (const info of fileInfos) {
212
+ const path = info.file.path;
213
+ let contentStr;
214
+ let fenceLang;
215
+ let type;
216
+ let syntax;
217
+ let encoding;
218
+ const hash = await computeHashBytes(info.file.content, hashAlgorithm);
219
+ if (info.textContent !== null) {
220
+ contentStr = info.textContent;
221
+ type = "text";
222
+ syntax = getSyntax(path);
223
+ fenceLang = syntax;
224
+ encoding = void 0;
225
+ } else {
226
+ const b64 = info.base64Content;
227
+ contentStr = b64;
228
+ type = info.isTextFile ? "text" : "binary";
229
+ syntax = getSyntax(path);
230
+ fenceLang = "base64";
231
+ encoding = "base64";
232
+ }
233
+ const block = `### ${path}
234
+
235
+ ${fence}${fenceLang}
236
+ ${contentStr}
237
+ ${fence}`;
238
+ contentBlocks.push(block);
239
+ fileEntries.push({
240
+ path,
241
+ type,
242
+ size: info.file.content.length,
243
+ hash,
244
+ syntax,
245
+ ...encoding ? { encoding } : {},
246
+ position: { start: 0, fence: 0, content: 0, length: 0 }
247
+ // placeholder
248
+ });
249
+ }
250
+ const contentSection = contentBlocks.join("\n\n");
251
+ const manifest = {
252
+ mdmeld: {
253
+ version: FORMAT_VERSION,
254
+ created,
255
+ hash_algorithm: hashAlgorithm,
256
+ backtick_count: backtickCount,
257
+ files: fileEntries
258
+ },
259
+ integrity: {
260
+ manifest_hash: HASH_PLACEHOLDER,
261
+ content_hash: HASH_PLACEHOLDER
262
+ }
263
+ };
264
+ const entriesNoPos = fileEntries.map((f) => {
265
+ const { position: _, ...rest } = f;
266
+ return rest;
267
+ });
268
+ const prelimYaml = dump({
269
+ ...manifest,
270
+ mdmeld: { ...manifest.mdmeld, files: entriesNoPos }
271
+ });
272
+ const prelimFrontmatter = `${ARCHIVE_HEADER}
273
+ ---
274
+ ${prelimYaml}---
275
+ `;
276
+ const prelimLines = countLines(prelimFrontmatter);
277
+ const positions1 = calculatePositions(contentSection, prelimLines);
278
+ applyPositions(fileEntries, positions1);
279
+ const withPosYaml = dump(manifest);
280
+ const withPosFrontmatter = `${ARCHIVE_HEADER}
281
+ ---
282
+ ${withPosYaml}---
283
+ `;
284
+ const finalLines = countLines(withPosFrontmatter);
285
+ if (finalLines !== prelimLines) {
286
+ const positions2 = calculatePositions(contentSection, finalLines);
287
+ applyPositions(fileEntries, positions2);
288
+ }
289
+ const rawContentForHash = `
290
+
291
+ ${contentSection}`;
292
+ const contentHash = await computeHash(rawContentForHash, hashAlgorithm);
293
+ manifest.integrity.content_hash = contentHash;
294
+ const manifestYamlForHash = dump({ mdmeld: manifest.mdmeld });
295
+ const manifestHash = await computeHash(manifestYamlForHash, hashAlgorithm);
296
+ manifest.integrity.manifest_hash = manifestHash;
297
+ const outputYaml = dump(manifest);
298
+ return `${ARCHIVE_HEADER}
299
+ ---
300
+ ${outputYaml}---
301
+
302
+ ${contentSection}`;
303
+ }
304
+ function calculatePositions(contentSection, manifestLineCount) {
305
+ const lines = contentSection.split("\n");
306
+ const positions = [];
307
+ const offset = manifestLineCount + 1;
308
+ let i = 0;
309
+ while (i < lines.length) {
310
+ const line = lines[i];
311
+ if (line.startsWith("### ")) {
312
+ const start = offset + i + 1;
313
+ i++;
314
+ if (i < lines.length && lines[i].trim() === "") {
315
+ i++;
316
+ }
317
+ if (i < lines.length && isFenceLine(lines[i])) {
318
+ const fenceLine = offset + i + 1;
319
+ i++;
320
+ const contentStart = offset + i + 1;
321
+ let contentLength = 0;
322
+ while (i < lines.length && !isFenceLine(lines[i])) {
323
+ contentLength++;
324
+ i++;
325
+ }
326
+ if (i < lines.length) i++;
327
+ positions.push({
328
+ start,
329
+ fence: fenceLine,
330
+ content: contentStart,
331
+ length: contentLength
332
+ });
333
+ }
334
+ } else {
335
+ i++;
336
+ }
337
+ }
338
+ return positions;
339
+ }
340
+ function applyPositions(entries, positions) {
341
+ for (let i = 0; i < entries.length; i++) {
342
+ entries[i].position = positions[i];
343
+ }
344
+ }
345
+ function countLines(s) {
346
+ if (s.length === 0) return 0;
347
+ let count = 0;
348
+ for (let i = 0; i < s.length; i++) {
349
+ if (s[i] === "\n") count++;
350
+ }
351
+ if (s[s.length - 1] !== "\n") count++;
352
+ return count;
353
+ }
354
+ function isFenceLine(line) {
355
+ return /^`{4,}/.test(line.trim());
356
+ }
357
+
358
+ // src/core/unpack.ts
359
+ var TEXT_ENCODER = new TextEncoder();
360
+ async function unpack(content) {
361
+ const { manifest, contentSection } = parseArchive(content);
362
+ const files = extractFiles(contentSection, manifest);
363
+ return { files, manifest };
364
+ }
365
+ function parseArchive(content) {
366
+ let input = content;
367
+ if (input.startsWith("<!--")) {
368
+ const commentEnd = input.indexOf("-->");
369
+ if (commentEnd !== -1) {
370
+ input = input.slice(commentEnd + 3).trimStart();
371
+ }
372
+ }
373
+ if (!input.startsWith("---")) {
374
+ throw new Error("Missing YAML frontmatter (expected --- delimiter)");
375
+ }
376
+ const secondDelim = input.indexOf("\n---", 3);
377
+ if (secondDelim === -1) {
378
+ throw new Error("Missing closing --- delimiter for YAML frontmatter");
379
+ }
380
+ const yamlStr = input.slice(4, secondDelim);
381
+ const rawContentAfterDelimiter = input.slice(secondDelim + 4);
382
+ const contentSection = rawContentAfterDelimiter.replace(/^\n+/, "");
383
+ const parsed = load(yamlStr);
384
+ if (!parsed || typeof parsed !== "object" || !("mdmeld" in parsed)) {
385
+ throw new Error("Invalid manifest: missing 'mdmeld' key");
386
+ }
387
+ const manifest = parsed;
388
+ const archiveMajor = String(manifest.mdmeld.version).split(".")[0];
389
+ const supportedMajor = FORMAT_VERSION.split(".")[0];
390
+ if (archiveMajor !== supportedMajor) {
391
+ throw new Error(
392
+ `Unsupported archive version: ${manifest.mdmeld.version} (this parser supports major version ${supportedMajor})`
393
+ );
394
+ }
395
+ return { manifest, contentSection, rawContentAfterDelimiter };
396
+ }
397
+ function extractFiles(contentSection, manifest) {
398
+ const backtickCount = manifest.mdmeld.backtick_count;
399
+ const fence = "`".repeat(backtickCount);
400
+ const files = [];
401
+ const pattern = new RegExp(
402
+ `### (.+?)\\s*\\n\\s*${fence}([^\\n]*)\\n([\\s\\S]*?)\\n?${fence}(?=\\s|$)`,
403
+ "g"
404
+ );
405
+ for (const match of contentSection.matchAll(pattern)) {
406
+ const path = match[1].trim();
407
+ const lang = match[2].trim();
408
+ const rawContent = match[3];
409
+ const entry = manifest.mdmeld.files.find((f) => f.path === path);
410
+ const isEncoded = lang === "base64" || entry?.encoding === "base64";
411
+ let content;
412
+ if (isEncoded) {
413
+ content = decodeBase64(rawContent);
414
+ } else {
415
+ content = TEXT_ENCODER.encode(rawContent);
416
+ }
417
+ files.push({ path, content });
418
+ }
419
+ return files;
420
+ }
421
+
422
+ export {
423
+ pack,
424
+ unpack
425
+ };
426
+ //# sourceMappingURL=chunk-UF532LMZ.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/core/backticks.ts","../src/core/detect.ts","../src/core/syntax.ts","../src/core/pack.ts","../src/core/unpack.ts"],"sourcesContent":["import { MAX_BACKTICK_COUNT, MIN_BACKTICK_COUNT } from \"./constants.js\";\n\n/**\n * Scan a string and return the maximum run of consecutive backticks found.\n */\nexport function scanForMaxBackticks(content: string): number {\n\tlet max = 0;\n\tlet current = 0;\n\n\tfor (let i = 0; i < content.length; i++) {\n\t\tif (content[i] === \"`\") {\n\t\t\tcurrent++;\n\t\t\tif (current > max) max = current;\n\t\t} else {\n\t\t\tcurrent = 0;\n\t\t}\n\t}\n\n\treturn max;\n}\n\n/**\n * Determine the backtick count needed to safely fence all file contents.\n * Returns the count, or null if the content cannot be safely fenced\n * (exceeds MAX_BACKTICK_COUNT).\n */\nexport function resolveBacktickCount(maxFoundInContent: number): number | null {\n\tconst needed = Math.max(maxFoundInContent + 1, MIN_BACKTICK_COUNT);\n\tif (needed > MAX_BACKTICK_COUNT) return null;\n\treturn needed;\n}\n\n/** Generate a string of `count` backticks. */\nexport function generateBackticks(count: number): string {\n\treturn \"`\".repeat(count);\n}\n","/**\n * Detect whether a Uint8Array likely contains text or binary data.\n * Uses byte distribution analysis — no filesystem or MIME dependencies.\n */\nexport function isText(data: Uint8Array): boolean {\n\tif (data.length === 0) return true;\n\n\t// Sample up to 8KB for analysis\n\tconst sample = data.length > 8192 ? data.subarray(0, 8192) : data;\n\tconst len = sample.length;\n\n\tlet nullBytes = 0;\n\tlet controlChars = 0;\n\tlet printableOrWhitespace = 0;\n\n\tfor (let i = 0; i < len; i++) {\n\t\tconst byte = sample[i]!;\n\n\t\tif (byte === 0x00) {\n\t\t\tnullBytes++;\n\t\t} else if (byte === 0x09 || byte === 0x0a || byte === 0x0d) {\n\t\t\t// Common whitespace (tab, LF, CR)\n\t\t\tprintableOrWhitespace++;\n\t\t} else if (byte < 0x20) {\n\t\t\t// Non-whitespace control char\n\t\t\tcontrolChars++;\n\t\t} else if (byte <= 0x7e) {\n\t\t\t// Printable ASCII (0x20–0x7E, includes space)\n\t\t\tprintableOrWhitespace++;\n\t\t}\n\t\t// High bytes (0x80-0xFF) are allowed — could be UTF-8 continuation\n\t}\n\n\t// Any null bytes → binary\n\tif (nullBytes > 0) return false;\n\n\t// More than 5% control characters → binary\n\tif (controlChars / len > 0.05) return false;\n\n\t// At least 80% printable ASCII or common whitespace (per spec)\n\tif (len > 32 && printableOrWhitespace / len < 0.8) return false;\n\n\treturn true;\n}\n","/** Map file extensions to markdown code fence language identifiers. */\nconst EXT_TO_SYNTAX: Record<string, string> = {\n\t// Web\n\t\".html\": \"html\",\n\t\".htm\": \"html\",\n\t\".css\": \"css\",\n\t\".js\": \"javascript\",\n\t\".mjs\": \"javascript\",\n\t\".cjs\": \"javascript\",\n\t\".jsx\": \"jsx\",\n\t\".ts\": \"typescript\",\n\t\".tsx\": \"tsx\",\n\t\".mts\": \"typescript\",\n\t\".cts\": \"typescript\",\n\t\".vue\": \"vue\",\n\t\".svelte\": \"svelte\",\n\t\".astro\": \"astro\",\n\n\t// Data\n\t\".json\": \"json\",\n\t\".jsonc\": \"jsonc\",\n\t\".yaml\": \"yaml\",\n\t\".yml\": \"yaml\",\n\t\".toml\": \"toml\",\n\t\".xml\": \"xml\",\n\t\".csv\": \"csv\",\n\t\".graphql\": \"graphql\",\n\t\".gql\": \"graphql\",\n\n\t// Config\n\t\".env\": \"dotenv\",\n\t\".ini\": \"ini\",\n\t\".conf\": \"conf\",\n\t\".cfg\": \"ini\",\n\n\t// Shell\n\t\".sh\": \"bash\",\n\t\".bash\": \"bash\",\n\t\".zsh\": \"zsh\",\n\t\".fish\": \"fish\",\n\t\".ps1\": \"powershell\",\n\t\".bat\": \"batch\",\n\t\".cmd\": \"batch\",\n\n\t// Systems\n\t\".c\": \"c\",\n\t\".h\": \"c\",\n\t\".cpp\": \"cpp\",\n\t\".cc\": \"cpp\",\n\t\".cxx\": \"cpp\",\n\t\".hpp\": \"cpp\",\n\t\".rs\": \"rust\",\n\t\".go\": \"go\",\n\t\".java\": \"java\",\n\t\".kt\": \"kotlin\",\n\t\".kts\": \"kotlin\",\n\t\".swift\": \"swift\",\n\t\".cs\": \"csharp\",\n\t\".fs\": \"fsharp\",\n\t\".zig\": \"zig\",\n\n\t// Scripting\n\t\".py\": \"python\",\n\t\".rb\": \"ruby\",\n\t\".php\": \"php\",\n\t\".pl\": \"perl\",\n\t\".lua\": \"lua\",\n\t\".r\": \"r\",\n\t\".R\": \"r\",\n\t\".jl\": \"julia\",\n\t\".ex\": \"elixir\",\n\t\".exs\": \"elixir\",\n\t\".erl\": \"erlang\",\n\t\".clj\": \"clojure\",\n\t\".scala\": \"scala\",\n\t\".dart\": \"dart\",\n\n\t// Markup / Docs\n\t\".md\": \"markdown\",\n\t\".mdx\": \"mdx\",\n\t\".rst\": \"rst\",\n\t\".tex\": \"latex\",\n\t\".typ\": \"typst\",\n\n\t// DevOps\n\t\".dockerfile\": \"dockerfile\",\n\t\".tf\": \"terraform\",\n\t\".hcl\": \"hcl\",\n\t\".nix\": \"nix\",\n\n\t// SQL\n\t\".sql\": \"sql\",\n\n\t// Misc\n\t\".diff\": \"diff\",\n\t\".patch\": \"diff\",\n\t\".prisma\": \"prisma\",\n\t\".proto\": \"protobuf\",\n\t\".wasm\": \"wasm\",\n\t\".lock\": \"text\",\n\t\".log\": \"text\",\n\t\".txt\": \"text\",\n};\n\n/** Well-known filenames that map to a syntax */\nconst NAME_TO_SYNTAX: Record<string, string> = {\n\tDockerfile: \"dockerfile\",\n\tMakefile: \"makefile\",\n\tContainerfile: \"dockerfile\",\n\tJustfile: \"makefile\",\n\tVagrantfile: \"ruby\",\n\tGemfile: \"ruby\",\n\tRakefile: \"ruby\",\n\t\".gitignore\": \"gitignore\",\n\t\".gitattributes\": \"gitattributes\",\n\t\".editorconfig\": \"editorconfig\",\n\t\".prettierrc\": \"json\",\n\t\".eslintrc\": \"json\",\n};\n\n/** Get the syntax language identifier for a file path. Defaults to \"text\" per spec. */\nexport function getSyntax(filePath: string): string {\n\t// Check full filename first\n\tconst fileName = filePath.split(\"/\").pop() ?? \"\";\n\tconst byName = NAME_TO_SYNTAX[fileName];\n\tif (byName) return byName;\n\n\t// Then check extension\n\tconst dotIdx = fileName.lastIndexOf(\".\");\n\tif (dotIdx === -1) return \"text\";\n\tconst ext = fileName.slice(dotIdx).toLowerCase();\n\treturn EXT_TO_SYNTAX[ext] ?? \"text\";\n}\n","import { generateBackticks, resolveBacktickCount, scanForMaxBackticks } from \"./backticks.js\";\nimport { encodeBase64 } from \"./base64.js\";\nimport { ARCHIVE_HEADER, FORMAT_VERSION, HASH_PLACEHOLDER } from \"./constants.js\";\nimport { isText } from \"./detect.js\";\nimport { computeHash, computeHashBytes } from \"./hash.js\";\nimport { getSyntax } from \"./syntax.js\";\nimport type { FileEntry, HashAlgorithm, Manifest, PackOptions, VirtualFile } from \"./types.js\";\nimport * as yaml from \"./yaml.js\";\n\nconst TEXT_DECODER = new TextDecoder();\n\n/**\n * Pack an array of virtual files into an mdmeld archive string.\n */\nexport async function pack(files: VirtualFile[], options: PackOptions = {}): Promise<string> {\n\tconst hashAlgorithm: HashAlgorithm = options.hashAlgorithm ?? \"xxh64\";\n\tconst created = options.created ?? new Date().toISOString();\n\n\t// Determine backtick count by scanning all text content\n\tlet maxBackticks = 0;\n\tconst fileInfos: Array<{\n\t\tfile: VirtualFile;\n\t\tisTextFile: boolean;\n\t\ttextContent: string | null;\n\t\tbase64Content: string | null;\n\t}> = [];\n\n\tfor (const file of files) {\n\t\tconst textFile = isText(file.content);\n\n\t\tif (textFile) {\n\t\t\tconst text = TEXT_DECODER.decode(file.content);\n\t\t\tconst found = scanForMaxBackticks(text);\n\t\t\tif (found > maxBackticks) maxBackticks = found;\n\t\t\tfileInfos.push({ file, isTextFile: true, textContent: text, base64Content: null });\n\t\t} else {\n\t\t\tconst b64 = encodeBase64(file.content);\n\t\t\tfileInfos.push({ file, isTextFile: false, textContent: null, base64Content: b64 });\n\t\t}\n\t}\n\n\tconst backtickCount = resolveBacktickCount(maxBackticks);\n\tif (backtickCount === null) {\n\t\tthrow new Error(\n\t\t\t\"Cannot pack: file content contains a backtick run of 8 or more characters, \" +\n\t\t\t\t\"which exceeds the maximum allowed fence width. \" +\n\t\t\t\t\"Remove the excessive backtick sequences or exclude the file.\",\n\t\t);\n\t}\n\tconst fence = generateBackticks(backtickCount);\n\n\t// Build content blocks and manifest entries\n\tconst contentBlocks: string[] = [];\n\tconst fileEntries: FileEntry[] = [];\n\n\tfor (const info of fileInfos) {\n\t\tconst path = info.file.path;\n\t\tlet contentStr: string;\n\t\tlet fenceLang: string;\n\t\tlet type: \"text\" | \"binary\";\n\t\tlet syntax: string;\n\t\tlet encoding: \"base64\" | undefined;\n\n\t\t// File hash is always computed on the original raw bytes (per spec)\n\t\tconst hash = await computeHashBytes(info.file.content, hashAlgorithm);\n\n\t\tif (info.textContent !== null) {\n\t\t\t// Plain text file\n\t\t\tcontentStr = info.textContent;\n\t\t\ttype = \"text\";\n\t\t\tsyntax = getSyntax(path);\n\t\t\tfenceLang = syntax;\n\t\t\tencoding = undefined;\n\t\t} else {\n\t\t\t// Binary or base64-encoded text\n\t\t\tconst b64 = info.base64Content!;\n\t\t\tcontentStr = b64;\n\t\t\ttype = info.isTextFile ? \"text\" : \"binary\";\n\t\t\tsyntax = getSyntax(path);\n\t\t\tfenceLang = \"base64\";\n\t\t\tencoding = \"base64\";\n\t\t}\n\n\t\tconst block = `### ${path}\\n\\n${fence}${fenceLang}\\n${contentStr}\\n${fence}`;\n\t\tcontentBlocks.push(block);\n\n\t\tfileEntries.push({\n\t\t\tpath,\n\t\t\ttype,\n\t\t\tsize: info.file.content.length,\n\t\t\thash,\n\t\t\tsyntax,\n\t\t\t...(encoding ? { encoding } : {}),\n\t\t\tposition: { start: 0, fence: 0, content: 0, length: 0 }, // placeholder\n\t\t});\n\t}\n\n\tconst contentSection = contentBlocks.join(\"\\n\\n\");\n\n\t// Multi-pass position calculation\n\tconst manifest: Manifest = {\n\t\tmdmeld: {\n\t\t\tversion: FORMAT_VERSION,\n\t\t\tcreated,\n\t\t\thash_algorithm: hashAlgorithm,\n\t\t\tbacktick_count: backtickCount,\n\t\t\tfiles: fileEntries,\n\t\t},\n\t\tintegrity: {\n\t\t\tmanifest_hash: HASH_PLACEHOLDER,\n\t\t\tcontent_hash: HASH_PLACEHOLDER,\n\t\t},\n\t};\n\n\t// Pass 1: Estimate manifest size without position data\n\tconst entriesNoPos = fileEntries.map((f) => {\n\t\tconst { position: _, ...rest } = f;\n\t\treturn rest;\n\t});\n\tconst prelimYaml = yaml.dump({\n\t\t...manifest,\n\t\tmdmeld: { ...manifest.mdmeld, files: entriesNoPos },\n\t});\n\tconst prelimFrontmatter = `${ARCHIVE_HEADER}\\n---\\n${prelimYaml}---\\n`;\n\tconst prelimLines = countLines(prelimFrontmatter);\n\n\t// Pass 2: Calculate positions with estimated offset\n\tconst positions1 = calculatePositions(contentSection, prelimLines);\n\tapplyPositions(fileEntries, positions1);\n\n\t// Pass 3: Re-serialize with real positions, check if line count changed\n\tconst withPosYaml = yaml.dump(manifest);\n\tconst withPosFrontmatter = `${ARCHIVE_HEADER}\\n---\\n${withPosYaml}---\\n`;\n\tconst finalLines = countLines(withPosFrontmatter);\n\n\t// Pass 4: If line count changed, recalculate with actual offset\n\tif (finalLines !== prelimLines) {\n\t\tconst positions2 = calculatePositions(contentSection, finalLines);\n\t\tapplyPositions(fileEntries, positions2);\n\t}\n\n\t// Compute integrity hashes\n\t// Content hash covers everything after closing \\n--- delimiter\n\t// In the output format: ...yaml\\n---\\n\\n{content}, so after \\n--- we get \\n\\n{content}\n\tconst rawContentForHash = `\\n\\n${contentSection}`;\n\tconst contentHash = await computeHash(rawContentForHash, hashAlgorithm);\n\tmanifest.integrity.content_hash = contentHash;\n\n\t// Manifest hash includes the `mdmeld:` key line per spec\n\tconst manifestYamlForHash = yaml.dump({ mdmeld: manifest.mdmeld });\n\tconst manifestHash = await computeHash(manifestYamlForHash, hashAlgorithm);\n\tmanifest.integrity.manifest_hash = manifestHash;\n\n\t// Final serialization\n\tconst outputYaml = yaml.dump(manifest);\n\treturn `${ARCHIVE_HEADER}\\n---\\n${outputYaml}---\\n\\n${contentSection}`;\n}\n\n/**\n * Parse the content section and calculate position metadata for each file block.\n * Returns an array of positions, one per file block found.\n */\nfunction calculatePositions(\n\tcontentSection: string,\n\tmanifestLineCount: number,\n): Array<{ start: number; fence: number; content: number; length: number }> {\n\tconst lines = contentSection.split(\"\\n\");\n\tconst positions: Array<{ start: number; fence: number; content: number; length: number }> = [];\n\t// Content starts after manifest + 1 blank line separator\n\tconst offset = manifestLineCount + 1;\n\n\tlet i = 0;\n\twhile (i < lines.length) {\n\t\tconst line = lines[i]!;\n\n\t\tif (line.startsWith(\"### \")) {\n\t\t\tconst start = offset + i + 1; // 1-indexed\n\n\t\t\t// Skip blank line after header\n\t\t\ti++;\n\t\t\tif (i < lines.length && lines[i]!.trim() === \"\") {\n\t\t\t\ti++;\n\t\t\t}\n\n\t\t\t// Expect fence line\n\t\t\tif (i < lines.length && isFenceLine(lines[i]!)) {\n\t\t\t\tconst fenceLine = offset + i + 1;\n\t\t\t\ti++;\n\t\t\t\tconst contentStart = offset + i + 1;\n\n\t\t\t\t// Count content lines until closing fence\n\t\t\t\tlet contentLength = 0;\n\t\t\t\twhile (i < lines.length && !isFenceLine(lines[i]!)) {\n\t\t\t\t\tcontentLength++;\n\t\t\t\t\ti++;\n\t\t\t\t}\n\t\t\t\t// Skip closing fence\n\t\t\t\tif (i < lines.length) i++;\n\n\t\t\t\tpositions.push({\n\t\t\t\t\tstart,\n\t\t\t\t\tfence: fenceLine,\n\t\t\t\t\tcontent: contentStart,\n\t\t\t\t\tlength: contentLength,\n\t\t\t\t});\n\t\t\t}\n\t\t} else {\n\t\t\ti++;\n\t\t}\n\t}\n\n\treturn positions;\n}\n\n/** Apply calculated positions to file entries. */\nfunction applyPositions(\n\tentries: FileEntry[],\n\tpositions: Array<{ start: number; fence: number; content: number; length: number }>,\n): void {\n\tfor (let i = 0; i < entries.length; i++) {\n\t\tentries[i]!.position = positions[i]!;\n\t}\n}\n\n/** Count the number of lines in a string (trailing newline doesn't add extra). */\nfunction countLines(s: string): number {\n\tif (s.length === 0) return 0;\n\tlet count = 0;\n\tfor (let i = 0; i < s.length; i++) {\n\t\tif (s[i] === \"\\n\") count++;\n\t}\n\t// If the string doesn't end with a newline, add 1 for the last line\n\tif (s[s.length - 1] !== \"\\n\") count++;\n\treturn count;\n}\n\n/** Check if a line is a code fence (4+ backticks). */\nfunction isFenceLine(line: string): boolean {\n\treturn /^`{4,}/.test(line.trim());\n}\n","import { decodeBase64 } from \"./base64.js\";\nimport { FORMAT_VERSION, HASH_PLACEHOLDER } from \"./constants.js\";\nimport { computeHash, computeHashBytes } from \"./hash.js\";\nimport type { CheckResult, Manifest, UnpackResult, VirtualFile } from \"./types.js\";\nimport * as yaml from \"./yaml.js\";\n\nconst TEXT_ENCODER = new TextEncoder();\n\n/**\n * Unpack an mdmeld archive string into virtual files.\n */\nexport async function unpack(content: string): Promise<UnpackResult> {\n\tconst { manifest, contentSection } = parseArchive(content);\n\tconst files = extractFiles(contentSection, manifest);\n\treturn { files, manifest };\n}\n\n/**\n * Validate an mdmeld archive's integrity without extracting files.\n */\nexport async function check(content: string): Promise<CheckResult> {\n\tconst errors: string[] = [];\n\n\tlet manifest: Manifest;\n\tlet contentSection: string;\n\tlet rawContentAfterDelimiter: string;\n\ttry {\n\t\tconst parsed = parseArchive(content);\n\t\tmanifest = parsed.manifest;\n\t\tcontentSection = parsed.contentSection;\n\t\trawContentAfterDelimiter = parsed.rawContentAfterDelimiter;\n\t} catch (e) {\n\t\treturn { valid: false, errors: [`Parse error: ${(e as Error).message}`] };\n\t}\n\n\tconst algorithm = manifest.mdmeld.hash_algorithm;\n\n\t// Verify content hash (hashed on raw content after closing ---, per spec)\n\tif (\n\t\tmanifest.integrity.content_hash !== HASH_PLACEHOLDER &&\n\t\tmanifest.integrity.content_hash !== \"\"\n\t) {\n\t\tconst actual = await computeHash(rawContentAfterDelimiter, algorithm);\n\t\tif (actual !== manifest.integrity.content_hash) {\n\t\t\terrors.push(\n\t\t\t\t`Content hash mismatch: expected ${manifest.integrity.content_hash}, got ${actual}`,\n\t\t\t);\n\t\t}\n\t}\n\n\t// Verify manifest hash\n\tif (\n\t\tmanifest.integrity.manifest_hash !== HASH_PLACEHOLDER &&\n\t\tmanifest.integrity.manifest_hash !== \"\"\n\t) {\n\t\t// Manifest hash includes the `mdmeld:` key line per spec\n\t\tconst manifestYaml = yaml.dump({ mdmeld: manifest.mdmeld });\n\t\tconst actual = await computeHash(manifestYaml, algorithm);\n\t\tif (actual !== manifest.integrity.manifest_hash) {\n\t\t\terrors.push(\n\t\t\t\t`Manifest hash mismatch: expected ${manifest.integrity.manifest_hash}, got ${actual}`,\n\t\t\t);\n\t\t}\n\t}\n\n\t// Verify individual file hashes (hashed on original bytes per spec)\n\tconst files = extractFiles(contentSection, manifest);\n\tfor (let i = 0; i < manifest.mdmeld.files.length; i++) {\n\t\tconst entry = manifest.mdmeld.files[i]!;\n\t\tconst file = files[i];\n\n\t\tif (!file) {\n\t\t\terrors.push(`Missing content block for file: ${entry.path}`);\n\t\t\tcontinue;\n\t\t}\n\n\t\tif (entry.hash === HASH_PLACEHOLDER) continue;\n\n\t\tconst actual = await computeHashBytes(file.content, algorithm);\n\t\tif (actual !== entry.hash) {\n\t\t\terrors.push(`File hash mismatch for ${entry.path}: expected ${entry.hash}, got ${actual}`);\n\t\t}\n\t}\n\n\treturn { valid: errors.length === 0, errors };\n}\n\n/**\n * Parse the archive into its manifest and content section.\n * Returns both the raw content after --- (for hashing) and the normalized\n * content section (leading newlines stripped, for file extraction).\n */\nfunction parseArchive(content: string): {\n\tmanifest: Manifest;\n\tcontentSection: string;\n\trawContentAfterDelimiter: string;\n} {\n\t// Strip optional header comment (<!-- ... -->)\n\tlet input = content;\n\tif (input.startsWith(\"<!--\")) {\n\t\tconst commentEnd = input.indexOf(\"-->\");\n\t\tif (commentEnd !== -1) {\n\t\t\tinput = input.slice(commentEnd + 3).trimStart();\n\t\t}\n\t}\n\n\t// Find YAML frontmatter\n\tif (!input.startsWith(\"---\")) {\n\t\tthrow new Error(\"Missing YAML frontmatter (expected --- delimiter)\");\n\t}\n\n\tconst secondDelim = input.indexOf(\"\\n---\", 3);\n\tif (secondDelim === -1) {\n\t\tthrow new Error(\"Missing closing --- delimiter for YAML frontmatter\");\n\t}\n\n\tconst yamlStr = input.slice(4, secondDelim); // skip initial \"---\\n\"\n\tconst rawContentAfterDelimiter = input.slice(secondDelim + 4); // skip \"\\n---\"\n\tconst contentSection = rawContentAfterDelimiter.replace(/^\\n+/, \"\");\n\n\tconst parsed = yaml.load(yamlStr) as Record<string, unknown>;\n\tif (!parsed || typeof parsed !== \"object\" || !(\"mdmeld\" in parsed)) {\n\t\tthrow new Error(\"Invalid manifest: missing 'mdmeld' key\");\n\t}\n\n\t// Reject unsupported major versions per spec\n\tconst manifest = parsed as unknown as Manifest;\n\tconst archiveMajor = String(manifest.mdmeld.version).split(\".\")[0];\n\tconst supportedMajor = FORMAT_VERSION.split(\".\")[0];\n\tif (archiveMajor !== supportedMajor) {\n\t\tthrow new Error(\n\t\t\t`Unsupported archive version: ${manifest.mdmeld.version} (this parser supports major version ${supportedMajor})`,\n\t\t);\n\t}\n\n\treturn { manifest, contentSection, rawContentAfterDelimiter };\n}\n\n/**\n * Extract files from the content section using the manifest metadata.\n */\nfunction extractFiles(contentSection: string, manifest: Manifest): VirtualFile[] {\n\tconst backtickCount = manifest.mdmeld.backtick_count;\n\tconst fence = \"`\".repeat(backtickCount);\n\tconst files: VirtualFile[] = [];\n\n\t// Build regex to extract file blocks\n\t// Format: ### path\\n\\n````lang\\ncontent\\n````\n\tconst pattern = new RegExp(\n\t\t`### (.+?)\\\\s*\\\\n\\\\s*${fence}([^\\\\n]*)\\\\n([\\\\s\\\\S]*?)\\\\n?${fence}(?=\\\\s|$)`,\n\t\t\"g\",\n\t);\n\n\tfor (const match of contentSection.matchAll(pattern)) {\n\t\tconst path = match[1]!.trim();\n\t\tconst lang = match[2]!.trim();\n\t\tconst rawContent = match[3]!;\n\n\t\t// Find the corresponding manifest entry\n\t\tconst entry = manifest.mdmeld.files.find((f) => f.path === path);\n\t\tconst isEncoded = lang === \"base64\" || entry?.encoding === \"base64\";\n\n\t\tlet content: Uint8Array;\n\t\tif (isEncoded) {\n\t\t\tcontent = decodeBase64(rawContent);\n\t\t} else {\n\t\t\tcontent = TEXT_ENCODER.encode(rawContent);\n\t\t}\n\n\t\tfiles.push({ path, content });\n\t}\n\n\treturn files;\n}\n"],"mappings":";;;;;;;;;;;;;;;;AAKO,SAAS,oBAAoB,SAAyB;AAC5D,MAAI,MAAM;AACV,MAAI,UAAU;AAEd,WAAS,IAAI,GAAG,IAAI,QAAQ,QAAQ,KAAK;AACxC,QAAI,QAAQ,CAAC,MAAM,KAAK;AACvB;AACA,UAAI,UAAU,IAAK,OAAM;AAAA,IAC1B,OAAO;AACN,gBAAU;AAAA,IACX;AAAA,EACD;AAEA,SAAO;AACR;AAOO,SAAS,qBAAqB,mBAA0C;AAC9E,QAAM,SAAS,KAAK,IAAI,oBAAoB,GAAG,kBAAkB;AACjE,MAAI,SAAS,mBAAoB,QAAO;AACxC,SAAO;AACR;AAGO,SAAS,kBAAkB,OAAuB;AACxD,SAAO,IAAI,OAAO,KAAK;AACxB;;;AC/BO,SAAS,OAAO,MAA2B;AACjD,MAAI,KAAK,WAAW,EAAG,QAAO;AAG9B,QAAM,SAAS,KAAK,SAAS,OAAO,KAAK,SAAS,GAAG,IAAI,IAAI;AAC7D,QAAM,MAAM,OAAO;AAEnB,MAAI,YAAY;AAChB,MAAI,eAAe;AACnB,MAAI,wBAAwB;AAE5B,WAAS,IAAI,GAAG,IAAI,KAAK,KAAK;AAC7B,UAAM,OAAO,OAAO,CAAC;AAErB,QAAI,SAAS,GAAM;AAClB;AAAA,IACD,WAAW,SAAS,KAAQ,SAAS,MAAQ,SAAS,IAAM;AAE3D;AAAA,IACD,WAAW,OAAO,IAAM;AAEvB;AAAA,IACD,WAAW,QAAQ,KAAM;AAExB;AAAA,IACD;AAAA,EAED;AAGA,MAAI,YAAY,EAAG,QAAO;AAG1B,MAAI,eAAe,MAAM,KAAM,QAAO;AAGtC,MAAI,MAAM,MAAM,wBAAwB,MAAM,IAAK,QAAO;AAE1D,SAAO;AACR;;;AC1CA,IAAM,gBAAwC;AAAA;AAAA,EAE7C,SAAS;AAAA,EACT,QAAQ;AAAA,EACR,QAAQ;AAAA,EACR,OAAO;AAAA,EACP,QAAQ;AAAA,EACR,QAAQ;AAAA,EACR,QAAQ;AAAA,EACR,OAAO;AAAA,EACP,QAAQ;AAAA,EACR,QAAQ;AAAA,EACR,QAAQ;AAAA,EACR,QAAQ;AAAA,EACR,WAAW;AAAA,EACX,UAAU;AAAA;AAAA,EAGV,SAAS;AAAA,EACT,UAAU;AAAA,EACV,SAAS;AAAA,EACT,QAAQ;AAAA,EACR,SAAS;AAAA,EACT,QAAQ;AAAA,EACR,QAAQ;AAAA,EACR,YAAY;AAAA,EACZ,QAAQ;AAAA;AAAA,EAGR,QAAQ;AAAA,EACR,QAAQ;AAAA,EACR,SAAS;AAAA,EACT,QAAQ;AAAA;AAAA,EAGR,OAAO;AAAA,EACP,SAAS;AAAA,EACT,QAAQ;AAAA,EACR,SAAS;AAAA,EACT,QAAQ;AAAA,EACR,QAAQ;AAAA,EACR,QAAQ;AAAA;AAAA,EAGR,MAAM;AAAA,EACN,MAAM;AAAA,EACN,QAAQ;AAAA,EACR,OAAO;AAAA,EACP,QAAQ;AAAA,EACR,QAAQ;AAAA,EACR,OAAO;AAAA,EACP,OAAO;AAAA,EACP,SAAS;AAAA,EACT,OAAO;AAAA,EACP,QAAQ;AAAA,EACR,UAAU;AAAA,EACV,OAAO;AAAA,EACP,OAAO;AAAA,EACP,QAAQ;AAAA;AAAA,EAGR,OAAO;AAAA,EACP,OAAO;AAAA,EACP,QAAQ;AAAA,EACR,OAAO;AAAA,EACP,QAAQ;AAAA,EACR,MAAM;AAAA,EACN,MAAM;AAAA,EACN,OAAO;AAAA,EACP,OAAO;AAAA,EACP,QAAQ;AAAA,EACR,QAAQ;AAAA,EACR,QAAQ;AAAA,EACR,UAAU;AAAA,EACV,SAAS;AAAA;AAAA,EAGT,OAAO;AAAA,EACP,QAAQ;AAAA,EACR,QAAQ;AAAA,EACR,QAAQ;AAAA,EACR,QAAQ;AAAA;AAAA,EAGR,eAAe;AAAA,EACf,OAAO;AAAA,EACP,QAAQ;AAAA,EACR,QAAQ;AAAA;AAAA,EAGR,QAAQ;AAAA;AAAA,EAGR,SAAS;AAAA,EACT,UAAU;AAAA,EACV,WAAW;AAAA,EACX,UAAU;AAAA,EACV,SAAS;AAAA,EACT,SAAS;AAAA,EACT,QAAQ;AAAA,EACR,QAAQ;AACT;AAGA,IAAM,iBAAyC;AAAA,EAC9C,YAAY;AAAA,EACZ,UAAU;AAAA,EACV,eAAe;AAAA,EACf,UAAU;AAAA,EACV,aAAa;AAAA,EACb,SAAS;AAAA,EACT,UAAU;AAAA,EACV,cAAc;AAAA,EACd,kBAAkB;AAAA,EAClB,iBAAiB;AAAA,EACjB,eAAe;AAAA,EACf,aAAa;AACd;AAGO,SAAS,UAAU,UAA0B;AAEnD,QAAM,WAAW,SAAS,MAAM,GAAG,EAAE,IAAI,KAAK;AAC9C,QAAM,SAAS,eAAe,QAAQ;AACtC,MAAI,OAAQ,QAAO;AAGnB,QAAM,SAAS,SAAS,YAAY,GAAG;AACvC,MAAI,WAAW,GAAI,QAAO;AAC1B,QAAM,MAAM,SAAS,MAAM,MAAM,EAAE,YAAY;AAC/C,SAAO,cAAc,GAAG,KAAK;AAC9B;;;AC3HA,IAAM,eAAe,IAAI,YAAY;AAKrC,eAAsB,KAAK,OAAsB,UAAuB,CAAC,GAAoB;AAC5F,QAAM,gBAA+B,QAAQ,iBAAiB;AAC9D,QAAM,UAAU,QAAQ,YAAW,oBAAI,KAAK,GAAE,YAAY;AAG1D,MAAI,eAAe;AACnB,QAAM,YAKD,CAAC;AAEN,aAAW,QAAQ,OAAO;AACzB,UAAM,WAAW,OAAO,KAAK,OAAO;AAEpC,QAAI,UAAU;AACb,YAAM,OAAO,aAAa,OAAO,KAAK,OAAO;AAC7C,YAAM,QAAQ,oBAAoB,IAAI;AACtC,UAAI,QAAQ,aAAc,gBAAe;AACzC,gBAAU,KAAK,EAAE,MAAM,YAAY,MAAM,aAAa,MAAM,eAAe,KAAK,CAAC;AAAA,IAClF,OAAO;AACN,YAAM,MAAM,aAAa,KAAK,OAAO;AACrC,gBAAU,KAAK,EAAE,MAAM,YAAY,OAAO,aAAa,MAAM,eAAe,IAAI,CAAC;AAAA,IAClF;AAAA,EACD;AAEA,QAAM,gBAAgB,qBAAqB,YAAY;AACvD,MAAI,kBAAkB,MAAM;AAC3B,UAAM,IAAI;AAAA,MACT;AAAA,IAGD;AAAA,EACD;AACA,QAAM,QAAQ,kBAAkB,aAAa;AAG7C,QAAM,gBAA0B,CAAC;AACjC,QAAM,cAA2B,CAAC;AAElC,aAAW,QAAQ,WAAW;AAC7B,UAAM,OAAO,KAAK,KAAK;AACvB,QAAI;AACJ,QAAI;AACJ,QAAI;AACJ,QAAI;AACJ,QAAI;AAGJ,UAAM,OAAO,MAAM,iBAAiB,KAAK,KAAK,SAAS,aAAa;AAEpE,QAAI,KAAK,gBAAgB,MAAM;AAE9B,mBAAa,KAAK;AAClB,aAAO;AACP,eAAS,UAAU,IAAI;AACvB,kBAAY;AACZ,iBAAW;AAAA,IACZ,OAAO;AAEN,YAAM,MAAM,KAAK;AACjB,mBAAa;AACb,aAAO,KAAK,aAAa,SAAS;AAClC,eAAS,UAAU,IAAI;AACvB,kBAAY;AACZ,iBAAW;AAAA,IACZ;AAEA,UAAM,QAAQ,OAAO,IAAI;AAAA;AAAA,EAAO,KAAK,GAAG,SAAS;AAAA,EAAK,UAAU;AAAA,EAAK,KAAK;AAC1E,kBAAc,KAAK,KAAK;AAExB,gBAAY,KAAK;AAAA,MAChB;AAAA,MACA;AAAA,MACA,MAAM,KAAK,KAAK,QAAQ;AAAA,MACxB;AAAA,MACA;AAAA,MACA,GAAI,WAAW,EAAE,SAAS,IAAI,CAAC;AAAA,MAC/B,UAAU,EAAE,OAAO,GAAG,OAAO,GAAG,SAAS,GAAG,QAAQ,EAAE;AAAA;AAAA,IACvD,CAAC;AAAA,EACF;AAEA,QAAM,iBAAiB,cAAc,KAAK,MAAM;AAGhD,QAAM,WAAqB;AAAA,IAC1B,QAAQ;AAAA,MACP,SAAS;AAAA,MACT;AAAA,MACA,gBAAgB;AAAA,MAChB,gBAAgB;AAAA,MAChB,OAAO;AAAA,IACR;AAAA,IACA,WAAW;AAAA,MACV,eAAe;AAAA,MACf,cAAc;AAAA,IACf;AAAA,EACD;AAGA,QAAM,eAAe,YAAY,IAAI,CAAC,MAAM;AAC3C,UAAM,EAAE,UAAU,GAAG,GAAG,KAAK,IAAI;AACjC,WAAO;AAAA,EACR,CAAC;AACD,QAAM,aAAkB,KAAK;AAAA,IAC5B,GAAG;AAAA,IACH,QAAQ,EAAE,GAAG,SAAS,QAAQ,OAAO,aAAa;AAAA,EACnD,CAAC;AACD,QAAM,oBAAoB,GAAG,cAAc;AAAA;AAAA,EAAU,UAAU;AAAA;AAC/D,QAAM,cAAc,WAAW,iBAAiB;AAGhD,QAAM,aAAa,mBAAmB,gBAAgB,WAAW;AACjE,iBAAe,aAAa,UAAU;AAGtC,QAAM,cAAmB,KAAK,QAAQ;AACtC,QAAM,qBAAqB,GAAG,cAAc;AAAA;AAAA,EAAU,WAAW;AAAA;AACjE,QAAM,aAAa,WAAW,kBAAkB;AAGhD,MAAI,eAAe,aAAa;AAC/B,UAAM,aAAa,mBAAmB,gBAAgB,UAAU;AAChE,mBAAe,aAAa,UAAU;AAAA,EACvC;AAKA,QAAM,oBAAoB;AAAA;AAAA,EAAO,cAAc;AAC/C,QAAM,cAAc,MAAM,YAAY,mBAAmB,aAAa;AACtE,WAAS,UAAU,eAAe;AAGlC,QAAM,sBAA2B,KAAK,EAAE,QAAQ,SAAS,OAAO,CAAC;AACjE,QAAM,eAAe,MAAM,YAAY,qBAAqB,aAAa;AACzE,WAAS,UAAU,gBAAgB;AAGnC,QAAM,aAAkB,KAAK,QAAQ;AACrC,SAAO,GAAG,cAAc;AAAA;AAAA,EAAU,UAAU;AAAA;AAAA,EAAU,cAAc;AACrE;AAMA,SAAS,mBACR,gBACA,mBAC2E;AAC3E,QAAM,QAAQ,eAAe,MAAM,IAAI;AACvC,QAAM,YAAsF,CAAC;AAE7F,QAAM,SAAS,oBAAoB;AAEnC,MAAI,IAAI;AACR,SAAO,IAAI,MAAM,QAAQ;AACxB,UAAM,OAAO,MAAM,CAAC;AAEpB,QAAI,KAAK,WAAW,MAAM,GAAG;AAC5B,YAAM,QAAQ,SAAS,IAAI;AAG3B;AACA,UAAI,IAAI,MAAM,UAAU,MAAM,CAAC,EAAG,KAAK,MAAM,IAAI;AAChD;AAAA,MACD;AAGA,UAAI,IAAI,MAAM,UAAU,YAAY,MAAM,CAAC,CAAE,GAAG;AAC/C,cAAM,YAAY,SAAS,IAAI;AAC/B;AACA,cAAM,eAAe,SAAS,IAAI;AAGlC,YAAI,gBAAgB;AACpB,eAAO,IAAI,MAAM,UAAU,CAAC,YAAY,MAAM,CAAC,CAAE,GAAG;AACnD;AACA;AAAA,QACD;AAEA,YAAI,IAAI,MAAM,OAAQ;AAEtB,kBAAU,KAAK;AAAA,UACd;AAAA,UACA,OAAO;AAAA,UACP,SAAS;AAAA,UACT,QAAQ;AAAA,QACT,CAAC;AAAA,MACF;AAAA,IACD,OAAO;AACN;AAAA,IACD;AAAA,EACD;AAEA,SAAO;AACR;AAGA,SAAS,eACR,SACA,WACO;AACP,WAAS,IAAI,GAAG,IAAI,QAAQ,QAAQ,KAAK;AACxC,YAAQ,CAAC,EAAG,WAAW,UAAU,CAAC;AAAA,EACnC;AACD;AAGA,SAAS,WAAW,GAAmB;AACtC,MAAI,EAAE,WAAW,EAAG,QAAO;AAC3B,MAAI,QAAQ;AACZ,WAAS,IAAI,GAAG,IAAI,EAAE,QAAQ,KAAK;AAClC,QAAI,EAAE,CAAC,MAAM,KAAM;AAAA,EACpB;AAEA,MAAI,EAAE,EAAE,SAAS,CAAC,MAAM,KAAM;AAC9B,SAAO;AACR;AAGA,SAAS,YAAY,MAAuB;AAC3C,SAAO,SAAS,KAAK,KAAK,KAAK,CAAC;AACjC;;;ACzOA,IAAM,eAAe,IAAI,YAAY;AAKrC,eAAsB,OAAO,SAAwC;AACpE,QAAM,EAAE,UAAU,eAAe,IAAI,aAAa,OAAO;AACzD,QAAM,QAAQ,aAAa,gBAAgB,QAAQ;AACnD,SAAO,EAAE,OAAO,SAAS;AAC1B;AA6EA,SAAS,aAAa,SAIpB;AAED,MAAI,QAAQ;AACZ,MAAI,MAAM,WAAW,MAAM,GAAG;AAC7B,UAAM,aAAa,MAAM,QAAQ,KAAK;AACtC,QAAI,eAAe,IAAI;AACtB,cAAQ,MAAM,MAAM,aAAa,CAAC,EAAE,UAAU;AAAA,IAC/C;AAAA,EACD;AAGA,MAAI,CAAC,MAAM,WAAW,KAAK,GAAG;AAC7B,UAAM,IAAI,MAAM,mDAAmD;AAAA,EACpE;AAEA,QAAM,cAAc,MAAM,QAAQ,SAAS,CAAC;AAC5C,MAAI,gBAAgB,IAAI;AACvB,UAAM,IAAI,MAAM,oDAAoD;AAAA,EACrE;AAEA,QAAM,UAAU,MAAM,MAAM,GAAG,WAAW;AAC1C,QAAM,2BAA2B,MAAM,MAAM,cAAc,CAAC;AAC5D,QAAM,iBAAiB,yBAAyB,QAAQ,QAAQ,EAAE;AAElE,QAAM,SAAc,KAAK,OAAO;AAChC,MAAI,CAAC,UAAU,OAAO,WAAW,YAAY,EAAE,YAAY,SAAS;AACnE,UAAM,IAAI,MAAM,wCAAwC;AAAA,EACzD;AAGA,QAAM,WAAW;AACjB,QAAM,eAAe,OAAO,SAAS,OAAO,OAAO,EAAE,MAAM,GAAG,EAAE,CAAC;AACjE,QAAM,iBAAiB,eAAe,MAAM,GAAG,EAAE,CAAC;AAClD,MAAI,iBAAiB,gBAAgB;AACpC,UAAM,IAAI;AAAA,MACT,gCAAgC,SAAS,OAAO,OAAO,wCAAwC,cAAc;AAAA,IAC9G;AAAA,EACD;AAEA,SAAO,EAAE,UAAU,gBAAgB,yBAAyB;AAC7D;AAKA,SAAS,aAAa,gBAAwB,UAAmC;AAChF,QAAM,gBAAgB,SAAS,OAAO;AACtC,QAAM,QAAQ,IAAI,OAAO,aAAa;AACtC,QAAM,QAAuB,CAAC;AAI9B,QAAM,UAAU,IAAI;AAAA,IACnB,uBAAuB,KAAK,+BAA+B,KAAK;AAAA,IAChE;AAAA,EACD;AAEA,aAAW,SAAS,eAAe,SAAS,OAAO,GAAG;AACrD,UAAM,OAAO,MAAM,CAAC,EAAG,KAAK;AAC5B,UAAM,OAAO,MAAM,CAAC,EAAG,KAAK;AAC5B,UAAM,aAAa,MAAM,CAAC;AAG1B,UAAM,QAAQ,SAAS,OAAO,MAAM,KAAK,CAAC,MAAM,EAAE,SAAS,IAAI;AAC/D,UAAM,YAAY,SAAS,YAAY,OAAO,aAAa;AAE3D,QAAI;AACJ,QAAI,WAAW;AACd,gBAAU,aAAa,UAAU;AAAA,IAClC,OAAO;AACN,gBAAU,aAAa,OAAO,UAAU;AAAA,IACzC;AAEA,UAAM,KAAK,EAAE,MAAM,QAAQ,CAAC;AAAA,EAC7B;AAEA,SAAO;AACR;","names":[]}