@cloudflare/sandbox 0.0.0-02ee8fe

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (80) hide show
  1. package/CHANGELOG.md +311 -0
  2. package/Dockerfile +143 -0
  3. package/README.md +162 -0
  4. package/dist/chunk-BFVUNTP4.js +104 -0
  5. package/dist/chunk-BFVUNTP4.js.map +1 -0
  6. package/dist/chunk-EKSWCBCA.js +86 -0
  7. package/dist/chunk-EKSWCBCA.js.map +1 -0
  8. package/dist/chunk-JXZMAU2C.js +559 -0
  9. package/dist/chunk-JXZMAU2C.js.map +1 -0
  10. package/dist/chunk-UJ3TV4M6.js +7 -0
  11. package/dist/chunk-UJ3TV4M6.js.map +1 -0
  12. package/dist/chunk-YE265ASX.js +2484 -0
  13. package/dist/chunk-YE265ASX.js.map +1 -0
  14. package/dist/chunk-Z532A7QC.js +78 -0
  15. package/dist/chunk-Z532A7QC.js.map +1 -0
  16. package/dist/file-stream.d.ts +43 -0
  17. package/dist/file-stream.js +9 -0
  18. package/dist/file-stream.js.map +1 -0
  19. package/dist/index.d.ts +9 -0
  20. package/dist/index.js +67 -0
  21. package/dist/index.js.map +1 -0
  22. package/dist/interpreter.d.ts +33 -0
  23. package/dist/interpreter.js +8 -0
  24. package/dist/interpreter.js.map +1 -0
  25. package/dist/request-handler.d.ts +18 -0
  26. package/dist/request-handler.js +13 -0
  27. package/dist/request-handler.js.map +1 -0
  28. package/dist/sandbox-CLZWpfGc.d.ts +613 -0
  29. package/dist/sandbox.d.ts +4 -0
  30. package/dist/sandbox.js +13 -0
  31. package/dist/sandbox.js.map +1 -0
  32. package/dist/security.d.ts +31 -0
  33. package/dist/security.js +13 -0
  34. package/dist/security.js.map +1 -0
  35. package/dist/sse-parser.d.ts +28 -0
  36. package/dist/sse-parser.js +11 -0
  37. package/dist/sse-parser.js.map +1 -0
  38. package/dist/version.d.ts +8 -0
  39. package/dist/version.js +7 -0
  40. package/dist/version.js.map +1 -0
  41. package/package.json +44 -0
  42. package/src/clients/base-client.ts +280 -0
  43. package/src/clients/command-client.ts +115 -0
  44. package/src/clients/file-client.ts +295 -0
  45. package/src/clients/git-client.ts +92 -0
  46. package/src/clients/index.ts +64 -0
  47. package/src/clients/interpreter-client.ts +329 -0
  48. package/src/clients/port-client.ts +105 -0
  49. package/src/clients/process-client.ts +177 -0
  50. package/src/clients/sandbox-client.ts +41 -0
  51. package/src/clients/types.ts +84 -0
  52. package/src/clients/utility-client.ts +119 -0
  53. package/src/errors/adapter.ts +180 -0
  54. package/src/errors/classes.ts +469 -0
  55. package/src/errors/index.ts +105 -0
  56. package/src/file-stream.ts +164 -0
  57. package/src/index.ts +93 -0
  58. package/src/interpreter.ts +159 -0
  59. package/src/request-handler.ts +180 -0
  60. package/src/sandbox.ts +1045 -0
  61. package/src/security.ts +104 -0
  62. package/src/sse-parser.ts +143 -0
  63. package/src/version.ts +6 -0
  64. package/startup.sh +3 -0
  65. package/tests/base-client.test.ts +328 -0
  66. package/tests/command-client.test.ts +407 -0
  67. package/tests/file-client.test.ts +719 -0
  68. package/tests/file-stream.test.ts +306 -0
  69. package/tests/get-sandbox.test.ts +149 -0
  70. package/tests/git-client.test.ts +328 -0
  71. package/tests/port-client.test.ts +301 -0
  72. package/tests/process-client.test.ts +658 -0
  73. package/tests/request-handler.test.ts +240 -0
  74. package/tests/sandbox.test.ts +554 -0
  75. package/tests/sse-parser.test.ts +290 -0
  76. package/tests/utility-client.test.ts +332 -0
  77. package/tests/version.test.ts +16 -0
  78. package/tests/wrangler.jsonc +35 -0
  79. package/tsconfig.json +11 -0
  80. package/vitest.config.ts +31 -0
@@ -0,0 +1,104 @@
1
+ // src/file-stream.ts
2
+ async function* parseSSE(stream) {
3
+ const reader = stream.getReader();
4
+ const decoder = new TextDecoder();
5
+ let buffer = "";
6
+ try {
7
+ while (true) {
8
+ const { done, value } = await reader.read();
9
+ if (done) {
10
+ break;
11
+ }
12
+ buffer += decoder.decode(value, { stream: true });
13
+ const lines = buffer.split("\n");
14
+ buffer = lines.pop() || "";
15
+ for (const line of lines) {
16
+ if (line.startsWith("data: ")) {
17
+ const data = line.slice(6);
18
+ try {
19
+ const event = JSON.parse(data);
20
+ yield event;
21
+ } catch {
22
+ }
23
+ }
24
+ }
25
+ }
26
+ } finally {
27
+ reader.releaseLock();
28
+ }
29
+ }
30
+ async function* streamFile(stream) {
31
+ let metadata = null;
32
+ for await (const event of parseSSE(stream)) {
33
+ switch (event.type) {
34
+ case "metadata":
35
+ metadata = {
36
+ mimeType: event.mimeType,
37
+ size: event.size,
38
+ isBinary: event.isBinary,
39
+ encoding: event.encoding
40
+ };
41
+ break;
42
+ case "chunk":
43
+ if (!metadata) {
44
+ throw new Error("Received chunk before metadata");
45
+ }
46
+ if (metadata.isBinary && metadata.encoding === "base64") {
47
+ const binaryString = atob(event.data);
48
+ const bytes = new Uint8Array(binaryString.length);
49
+ for (let i = 0; i < binaryString.length; i++) {
50
+ bytes[i] = binaryString.charCodeAt(i);
51
+ }
52
+ yield bytes;
53
+ } else {
54
+ yield event.data;
55
+ }
56
+ break;
57
+ case "complete":
58
+ if (!metadata) {
59
+ throw new Error("Stream completed without metadata");
60
+ }
61
+ return metadata;
62
+ case "error":
63
+ throw new Error(`File streaming error: ${event.error}`);
64
+ }
65
+ }
66
+ throw new Error("Stream ended unexpectedly");
67
+ }
68
+ async function collectFile(stream) {
69
+ const chunks = [];
70
+ const generator = streamFile(stream);
71
+ let result = await generator.next();
72
+ while (!result.done) {
73
+ chunks.push(result.value);
74
+ result = await generator.next();
75
+ }
76
+ const metadata = result.value;
77
+ if (!metadata) {
78
+ throw new Error("Failed to get file metadata");
79
+ }
80
+ if (metadata.isBinary) {
81
+ const totalLength = chunks.reduce(
82
+ (sum, chunk) => sum + (chunk instanceof Uint8Array ? chunk.length : 0),
83
+ 0
84
+ );
85
+ const combined = new Uint8Array(totalLength);
86
+ let offset = 0;
87
+ for (const chunk of chunks) {
88
+ if (chunk instanceof Uint8Array) {
89
+ combined.set(chunk, offset);
90
+ offset += chunk.length;
91
+ }
92
+ }
93
+ return { content: combined, metadata };
94
+ } else {
95
+ const combined = chunks.filter((c) => typeof c === "string").join("");
96
+ return { content: combined, metadata };
97
+ }
98
+ }
99
+
100
+ export {
101
+ streamFile,
102
+ collectFile
103
+ };
104
+ //# sourceMappingURL=chunk-BFVUNTP4.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/file-stream.ts"],"sourcesContent":["import type { FileChunk, FileMetadata, FileStreamEvent } from '@repo/shared';\n\n/**\n * Parse SSE (Server-Sent Events) lines from a stream\n */\nasync function* parseSSE(stream: ReadableStream<Uint8Array>): AsyncGenerator<FileStreamEvent> {\n const reader = stream.getReader();\n const decoder = new TextDecoder();\n let buffer = '';\n\n try {\n while (true) {\n const { done, value } = await reader.read();\n\n if (done) {\n break;\n }\n\n buffer += decoder.decode(value, { stream: true });\n const lines = buffer.split('\\n');\n\n // Keep the last incomplete line in the buffer\n buffer = lines.pop() || '';\n\n for (const line of lines) {\n if (line.startsWith('data: ')) {\n const data = line.slice(6); // Remove 'data: ' prefix\n try {\n const event = JSON.parse(data) as FileStreamEvent;\n yield event;\n } catch {\n // Skip invalid JSON events and continue processing\n }\n }\n }\n }\n } finally {\n reader.releaseLock();\n }\n}\n\n/**\n * Stream a file from the sandbox with automatic base64 decoding for binary files\n *\n * @param stream - The ReadableStream from readFileStream()\n * @returns AsyncGenerator that yields FileChunk (string for text, Uint8Array for binary)\n *\n * @example\n * ```ts\n * const stream = await sandbox.readFileStream('/path/to/file.png');\n * for await (const chunk of streamFile(stream)) {\n * if (chunk instanceof Uint8Array) {\n * // Binary chunk\n * console.log('Binary chunk:', chunk.length, 'bytes');\n * } else {\n * // Text chunk\n * console.log('Text chunk:', chunk);\n * }\n * }\n * ```\n */\nexport async function* streamFile(stream: ReadableStream<Uint8Array>): AsyncGenerator<FileChunk, FileMetadata> {\n let metadata: FileMetadata | null = null;\n\n for await (const event of parseSSE(stream)) {\n switch (event.type) {\n case 'metadata':\n metadata = {\n mimeType: event.mimeType,\n size: event.size,\n isBinary: event.isBinary,\n encoding: event.encoding,\n };\n break;\n\n case 'chunk':\n if (!metadata) {\n throw new Error('Received chunk before metadata');\n }\n\n if (metadata.isBinary && metadata.encoding === 'base64') {\n // Decode base64 to Uint8Array for binary files\n const binaryString = atob(event.data);\n const bytes = new Uint8Array(binaryString.length);\n for (let i = 0; i < binaryString.length; i++) {\n bytes[i] = binaryString.charCodeAt(i);\n }\n yield bytes;\n } else {\n // Text files - yield as-is\n yield event.data;\n }\n break;\n\n case 'complete':\n if (!metadata) {\n throw new Error('Stream completed without metadata');\n }\n return metadata;\n\n case 'error':\n throw new Error(`File streaming error: ${event.error}`);\n }\n }\n\n throw new Error('Stream ended unexpectedly');\n}\n\n/**\n * Collect an entire file into memory from a stream\n *\n * @param stream - The ReadableStream from readFileStream()\n * @returns Object containing the file content and metadata\n *\n * @example\n * ```ts\n * const stream = await sandbox.readFileStream('/path/to/file.txt');\n * const { content, metadata } = await collectFile(stream);\n * console.log('Content:', content);\n * console.log('MIME type:', metadata.mimeType);\n * ```\n */\nexport async function collectFile(stream: ReadableStream<Uint8Array>): Promise<{\n content: string | Uint8Array;\n metadata: FileMetadata;\n}> {\n const chunks: Array<string | Uint8Array> = [];\n\n // Iterate through the generator and get the return value (metadata)\n const generator = streamFile(stream);\n let result = await generator.next();\n\n while (!result.done) {\n chunks.push(result.value);\n result = await generator.next();\n }\n\n const metadata = result.value;\n\n if (!metadata) {\n throw new Error('Failed to get file metadata');\n }\n\n // Combine chunks based on type\n if (metadata.isBinary) {\n // Binary file - combine Uint8Arrays\n const totalLength = chunks.reduce((sum, chunk) =>\n sum + (chunk instanceof Uint8Array ? chunk.length : 0), 0\n );\n const combined = new Uint8Array(totalLength);\n let offset = 0;\n for (const chunk of chunks) {\n if (chunk instanceof Uint8Array) {\n combined.set(chunk, offset);\n offset += chunk.length;\n }\n }\n return { content: combined, metadata };\n } else {\n // Text file - combine strings\n const combined = chunks.filter(c => typeof c === 'string').join('');\n return { content: combined, metadata };\n }\n}\n"],"mappings":";AAKA,gBAAgB,SAAS,QAAqE;AAC5F,QAAM,SAAS,OAAO,UAAU;AAChC,QAAM,UAAU,IAAI,YAAY;AAChC,MAAI,SAAS;AAEb,MAAI;AACF,WAAO,MAAM;AACX,YAAM,EAAE,MAAM,MAAM,IAAI,MAAM,OAAO,KAAK;AAE1C,UAAI,MAAM;AACR;AAAA,MACF;AAEA,gBAAU,QAAQ,OAAO,OAAO,EAAE,QAAQ,KAAK,CAAC;AAChD,YAAM,QAAQ,OAAO,MAAM,IAAI;AAG/B,eAAS,MAAM,IAAI,KAAK;AAExB,iBAAW,QAAQ,OAAO;AACxB,YAAI,KAAK,WAAW,QAAQ,GAAG;AAC7B,gBAAM,OAAO,KAAK,MAAM,CAAC;AACzB,cAAI;AACF,kBAAM,QAAQ,KAAK,MAAM,IAAI;AAC7B,kBAAM;AAAA,UACR,QAAQ;AAAA,UAER;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAAA,EACF,UAAE;AACA,WAAO,YAAY;AAAA,EACrB;AACF;AAsBA,gBAAuB,WAAW,QAA6E;AAC7G,MAAI,WAAgC;AAEpC,mBAAiB,SAAS,SAAS,MAAM,GAAG;AAC1C,YAAQ,MAAM,MAAM;AAAA,MAClB,KAAK;AACH,mBAAW;AAAA,UACT,UAAU,MAAM;AAAA,UAChB,MAAM,MAAM;AAAA,UACZ,UAAU,MAAM;AAAA,UAChB,UAAU,MAAM;AAAA,QAClB;AACA;AAAA,MAEF,KAAK;AACH,YAAI,CAAC,UAAU;AACb,gBAAM,IAAI,MAAM,gCAAgC;AAAA,QAClD;AAEA,YAAI,SAAS,YAAY,SAAS,aAAa,UAAU;AAEvD,gBAAM,eAAe,KAAK,MAAM,IAAI;AACpC,gBAAM,QAAQ,IAAI,WAAW,aAAa,MAAM;AAChD,mBAAS,IAAI,GAAG,IAAI,aAAa,QAAQ,KAAK;AAC5C,kBAAM,CAAC,IAAI,aAAa,WAAW,CAAC;AAAA,UACtC;AACA,gBAAM;AAAA,QACR,OAAO;AAEL,gBAAM,MAAM;AAAA,QACd;AACA;AAAA,MAEF,KAAK;AACH,YAAI,CAAC,UAAU;AACb,gBAAM,IAAI,MAAM,mCAAmC;AAAA,QACrD;AACA,eAAO;AAAA,MAET,KAAK;AACH,cAAM,IAAI,MAAM,yBAAyB,MAAM,KAAK,EAAE;AAAA,IAC1D;AAAA,EACF;AAEA,QAAM,IAAI,MAAM,2BAA2B;AAC7C;AAgBA,eAAsB,YAAY,QAG/B;AACD,QAAM,SAAqC,CAAC;AAG5C,QAAM,YAAY,WAAW,MAAM;AACnC,MAAI,SAAS,MAAM,UAAU,KAAK;AAElC,SAAO,CAAC,OAAO,MAAM;AACnB,WAAO,KAAK,OAAO,KAAK;AACxB,aAAS,MAAM,UAAU,KAAK;AAAA,EAChC;AAEA,QAAM,WAAW,OAAO;AAExB,MAAI,CAAC,UAAU;AACb,UAAM,IAAI,MAAM,6BAA6B;AAAA,EAC/C;AAGA,MAAI,SAAS,UAAU;AAErB,UAAM,cAAc,OAAO;AAAA,MAAO,CAAC,KAAK,UACtC,OAAO,iBAAiB,aAAa,MAAM,SAAS;AAAA,MAAI;AAAA,IAC1D;AACA,UAAM,WAAW,IAAI,WAAW,WAAW;AAC3C,QAAI,SAAS;AACb,eAAW,SAAS,QAAQ;AAC1B,UAAI,iBAAiB,YAAY;AAC/B,iBAAS,IAAI,OAAO,MAAM;AAC1B,kBAAU,MAAM;AAAA,MAClB;AAAA,IACF;AACA,WAAO,EAAE,SAAS,UAAU,SAAS;AAAA,EACvC,OAAO;AAEL,UAAM,WAAW,OAAO,OAAO,OAAK,OAAO,MAAM,QAAQ,EAAE,KAAK,EAAE;AAClE,WAAO,EAAE,SAAS,UAAU,SAAS;AAAA,EACvC;AACF;","names":[]}
@@ -0,0 +1,86 @@
1
+ // src/sse-parser.ts
2
+ async function* parseSSEStream(stream, signal) {
3
+ const reader = stream.getReader();
4
+ const decoder = new TextDecoder();
5
+ let buffer = "";
6
+ try {
7
+ while (true) {
8
+ if (signal?.aborted) {
9
+ throw new Error("Operation was aborted");
10
+ }
11
+ const { done, value } = await reader.read();
12
+ if (done) break;
13
+ buffer += decoder.decode(value, { stream: true });
14
+ const lines = buffer.split("\n");
15
+ buffer = lines.pop() || "";
16
+ for (const line of lines) {
17
+ if (line.trim() === "") continue;
18
+ if (line.startsWith("data: ")) {
19
+ const data = line.substring(6);
20
+ if (data === "[DONE]" || data.trim() === "") continue;
21
+ try {
22
+ const event = JSON.parse(data);
23
+ yield event;
24
+ } catch {
25
+ }
26
+ }
27
+ }
28
+ }
29
+ if (buffer.trim() && buffer.startsWith("data: ")) {
30
+ const data = buffer.substring(6);
31
+ if (data !== "[DONE]" && data.trim()) {
32
+ try {
33
+ const event = JSON.parse(data);
34
+ yield event;
35
+ } catch {
36
+ }
37
+ }
38
+ }
39
+ } finally {
40
+ reader.releaseLock();
41
+ }
42
+ }
43
+ async function* responseToAsyncIterable(response, signal) {
44
+ if (!response.ok) {
45
+ throw new Error(`Response not ok: ${response.status} ${response.statusText}`);
46
+ }
47
+ if (!response.body) {
48
+ throw new Error("No response body");
49
+ }
50
+ yield* parseSSEStream(response.body, signal);
51
+ }
52
+ function asyncIterableToSSEStream(events, options) {
53
+ const encoder = new TextEncoder();
54
+ const serialize = options?.serialize || JSON.stringify;
55
+ return new ReadableStream({
56
+ async start(controller) {
57
+ try {
58
+ for await (const event of events) {
59
+ if (options?.signal?.aborted) {
60
+ controller.error(new Error("Operation was aborted"));
61
+ break;
62
+ }
63
+ const data = serialize(event);
64
+ const sseEvent = `data: ${data}
65
+
66
+ `;
67
+ controller.enqueue(encoder.encode(sseEvent));
68
+ }
69
+ controller.enqueue(encoder.encode("data: [DONE]\n\n"));
70
+ } catch (error) {
71
+ controller.error(error);
72
+ } finally {
73
+ controller.close();
74
+ }
75
+ },
76
+ cancel() {
77
+ }
78
+ });
79
+ }
80
+
81
+ export {
82
+ parseSSEStream,
83
+ responseToAsyncIterable,
84
+ asyncIterableToSSEStream
85
+ };
86
+ //# sourceMappingURL=chunk-EKSWCBCA.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/sse-parser.ts"],"sourcesContent":["/**\n * Server-Sent Events (SSE) parser for streaming responses\n * Converts ReadableStream<Uint8Array> to typed AsyncIterable<T>\n */\n\n/**\n * Parse a ReadableStream of SSE events into typed AsyncIterable\n * @param stream - The ReadableStream from fetch response\n * @param signal - Optional AbortSignal for cancellation\n */\nexport async function* parseSSEStream<T>(\n stream: ReadableStream<Uint8Array>,\n signal?: AbortSignal\n): AsyncIterable<T> {\n const reader = stream.getReader();\n const decoder = new TextDecoder();\n let buffer = '';\n\n try {\n while (true) {\n // Check for cancellation\n if (signal?.aborted) {\n throw new Error('Operation was aborted');\n }\n\n const { done, value } = await reader.read();\n if (done) break;\n\n // Decode chunk and add to buffer\n buffer += decoder.decode(value, { stream: true });\n\n // Process complete SSE events in buffer\n const lines = buffer.split('\\n');\n\n // Keep the last incomplete line in buffer\n buffer = lines.pop() || '';\n\n for (const line of lines) {\n // Skip empty lines\n if (line.trim() === '') continue;\n\n // Process SSE data lines\n if (line.startsWith('data: ')) {\n const data = line.substring(6);\n\n // Skip [DONE] markers or empty data\n if (data === '[DONE]' || data.trim() === '') continue;\n\n try {\n const event = JSON.parse(data) as T;\n yield event;\n } catch {\n // Skip invalid JSON events and continue processing\n }\n }\n // Handle other SSE fields if needed (event:, id:, retry:)\n // For now, we only care about data: lines\n }\n }\n\n // Process any remaining data in buffer\n if (buffer.trim() && buffer.startsWith('data: ')) {\n const data = buffer.substring(6);\n if (data !== '[DONE]' && data.trim()) {\n try {\n const event = JSON.parse(data) as T;\n yield event;\n } catch {\n // Skip invalid JSON in final event\n }\n }\n }\n } finally {\n // Clean up resources\n reader.releaseLock();\n }\n}\n\n\n/**\n * Helper to convert a Response with SSE stream directly to AsyncIterable\n * @param response - Response object with SSE stream\n * @param signal - Optional AbortSignal for cancellation\n */\nexport async function* responseToAsyncIterable<T>(\n response: Response,\n signal?: AbortSignal\n): AsyncIterable<T> {\n if (!response.ok) {\n throw new Error(`Response not ok: ${response.status} ${response.statusText}`);\n }\n\n if (!response.body) {\n throw new Error('No response body');\n }\n\n yield* parseSSEStream<T>(response.body, signal);\n}\n\n/**\n * Create an SSE-formatted ReadableStream from an AsyncIterable\n * (Useful for Worker endpoints that need to forward AsyncIterable as SSE)\n * @param events - AsyncIterable of events\n * @param options - Stream options\n */\nexport function asyncIterableToSSEStream<T>(\n events: AsyncIterable<T>,\n options?: {\n signal?: AbortSignal;\n serialize?: (event: T) => string;\n }\n): ReadableStream<Uint8Array> {\n const encoder = new TextEncoder();\n const serialize = options?.serialize || JSON.stringify;\n\n return new ReadableStream({\n async start(controller) {\n try {\n for await (const event of events) {\n if (options?.signal?.aborted) {\n controller.error(new Error('Operation was aborted'));\n break;\n }\n\n const data = serialize(event);\n const sseEvent = `data: ${data}\\n\\n`;\n controller.enqueue(encoder.encode(sseEvent));\n }\n\n // Send completion marker\n controller.enqueue(encoder.encode('data: [DONE]\\n\\n'));\n } catch (error) {\n controller.error(error);\n } finally {\n controller.close();\n }\n },\n\n cancel() {\n // Handle stream cancellation\n }\n });\n}"],"mappings":";AAUA,gBAAuB,eACrB,QACA,QACkB;AAClB,QAAM,SAAS,OAAO,UAAU;AAChC,QAAM,UAAU,IAAI,YAAY;AAChC,MAAI,SAAS;AAEb,MAAI;AACF,WAAO,MAAM;AAEX,UAAI,QAAQ,SAAS;AACnB,cAAM,IAAI,MAAM,uBAAuB;AAAA,MACzC;AAEA,YAAM,EAAE,MAAM,MAAM,IAAI,MAAM,OAAO,KAAK;AAC1C,UAAI,KAAM;AAGV,gBAAU,QAAQ,OAAO,OAAO,EAAE,QAAQ,KAAK,CAAC;AAGhD,YAAM,QAAQ,OAAO,MAAM,IAAI;AAG/B,eAAS,MAAM,IAAI,KAAK;AAExB,iBAAW,QAAQ,OAAO;AAExB,YAAI,KAAK,KAAK,MAAM,GAAI;AAGxB,YAAI,KAAK,WAAW,QAAQ,GAAG;AAC7B,gBAAM,OAAO,KAAK,UAAU,CAAC;AAG7B,cAAI,SAAS,YAAY,KAAK,KAAK,MAAM,GAAI;AAE7C,cAAI;AACF,kBAAM,QAAQ,KAAK,MAAM,IAAI;AAC7B,kBAAM;AAAA,UACR,QAAQ;AAAA,UAER;AAAA,QACF;AAAA,MAGF;AAAA,IACF;AAGA,QAAI,OAAO,KAAK,KAAK,OAAO,WAAW,QAAQ,GAAG;AAChD,YAAM,OAAO,OAAO,UAAU,CAAC;AAC/B,UAAI,SAAS,YAAY,KAAK,KAAK,GAAG;AACpC,YAAI;AACF,gBAAM,QAAQ,KAAK,MAAM,IAAI;AAC7B,gBAAM;AAAA,QACR,QAAQ;AAAA,QAER;AAAA,MACF;AAAA,IACF;AAAA,EACF,UAAE;AAEA,WAAO,YAAY;AAAA,EACrB;AACF;AAQA,gBAAuB,wBACrB,UACA,QACkB;AAClB,MAAI,CAAC,SAAS,IAAI;AAChB,UAAM,IAAI,MAAM,oBAAoB,SAAS,MAAM,IAAI,SAAS,UAAU,EAAE;AAAA,EAC9E;AAEA,MAAI,CAAC,SAAS,MAAM;AAClB,UAAM,IAAI,MAAM,kBAAkB;AAAA,EACpC;AAEA,SAAO,eAAkB,SAAS,MAAM,MAAM;AAChD;AAQO,SAAS,yBACd,QACA,SAI4B;AAC5B,QAAM,UAAU,IAAI,YAAY;AAChC,QAAM,YAAY,SAAS,aAAa,KAAK;AAE7C,SAAO,IAAI,eAAe;AAAA,IACxB,MAAM,MAAM,YAAY;AACtB,UAAI;AACF,yBAAiB,SAAS,QAAQ;AAChC,cAAI,SAAS,QAAQ,SAAS;AAC5B,uBAAW,MAAM,IAAI,MAAM,uBAAuB,CAAC;AACnD;AAAA,UACF;AAEA,gBAAM,OAAO,UAAU,KAAK;AAC5B,gBAAM,WAAW,SAAS,IAAI;AAAA;AAAA;AAC9B,qBAAW,QAAQ,QAAQ,OAAO,QAAQ,CAAC;AAAA,QAC7C;AAGA,mBAAW,QAAQ,QAAQ,OAAO,kBAAkB,CAAC;AAAA,MACvD,SAAS,OAAO;AACd,mBAAW,MAAM,KAAK;AAAA,MACxB,UAAE;AACA,mBAAW,MAAM;AAAA,MACnB;AAAA,IACF;AAAA,IAEA,SAAS;AAAA,IAET;AAAA,EACF,CAAC;AACH;","names":[]}