@joinezco/codeblock 0.0.8

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (81) hide show
  1. package/LICENSE +661 -0
  2. package/dist/assets/clike-C8IJ2oj_.js +1 -0
  3. package/dist/assets/cmake-BQqOBYOt.js +1 -0
  4. package/dist/assets/dockerfile-C_y-rIpk.js +1 -0
  5. package/dist/assets/fs.worker-BwEqZcql.ts +109 -0
  6. package/dist/assets/go-CTD25R5P.js +1 -0
  7. package/dist/assets/haskell-BWDZoCOh.js +1 -0
  8. package/dist/assets/index-9HdhmM_Y.js +1 -0
  9. package/dist/assets/index-C-QhPFHP.js +3 -0
  10. package/dist/assets/index-C3BnE2cG.js +222 -0
  11. package/dist/assets/index-CGx5MZO7.js +6 -0
  12. package/dist/assets/index-CIuq3uTk.js +1 -0
  13. package/dist/assets/index-CXFONXS8.js +1 -0
  14. package/dist/assets/index-D5Z27j1C.js +1 -0
  15. package/dist/assets/index-DWOBdRjn.js +1 -0
  16. package/dist/assets/index-Dvu-FFzd.js +1 -0
  17. package/dist/assets/index-Dx_VuNNd.js +1 -0
  18. package/dist/assets/index-I0dlv-r3.js +1 -0
  19. package/dist/assets/index-MGle_v2x.js +1 -0
  20. package/dist/assets/index-N-GE7HTU.js +1 -0
  21. package/dist/assets/index-aEsF5o-7.js +2 -0
  22. package/dist/assets/index-as7ELo0J.js +1 -0
  23. package/dist/assets/index-gUUzXNuP.js +1 -0
  24. package/dist/assets/index-pGm0qkrJ.js +13 -0
  25. package/dist/assets/javascript.worker-C1zGArKk.js +527 -0
  26. package/dist/assets/lua-BgMRiT3U.js +1 -0
  27. package/dist/assets/perl-CdXCOZ3F.js +1 -0
  28. package/dist/assets/process-Dw9K5EnD.js +1357 -0
  29. package/dist/assets/properties-C78fOPTZ.js +1 -0
  30. package/dist/assets/ruby-B2Rjki9n.js +1 -0
  31. package/dist/assets/shell-CjFT_Tl9.js +1 -0
  32. package/dist/assets/swift-BzpIVaGY.js +1 -0
  33. package/dist/assets/toml-BXUEaScT.js +1 -0
  34. package/dist/assets/vb-CmGdzxic.js +1 -0
  35. package/dist/e2e/example.spec.d.ts +5 -0
  36. package/dist/e2e/example.spec.js +44 -0
  37. package/dist/editor.d.ts +53 -0
  38. package/dist/editor.js +248 -0
  39. package/dist/index.d.ts +6 -0
  40. package/dist/index.html +16 -0
  41. package/dist/index.js +6 -0
  42. package/dist/lsps/index.d.ts +96 -0
  43. package/dist/lsps/index.js +198 -0
  44. package/dist/lsps/typescript.d.ts +55 -0
  45. package/dist/lsps/typescript.js +48 -0
  46. package/dist/panels/toolbar.d.ts +20 -0
  47. package/dist/panels/toolbar.js +453 -0
  48. package/dist/panels/toolbar.test.d.ts +1 -0
  49. package/dist/panels/toolbar.test.js +146 -0
  50. package/dist/resources/config.json +13 -0
  51. package/dist/rpc/serde.d.ts +11 -0
  52. package/dist/rpc/serde.js +49 -0
  53. package/dist/rpc/transport.d.ts +11 -0
  54. package/dist/rpc/transport.js +38 -0
  55. package/dist/snapshot.bin +0 -0
  56. package/dist/styles.css +7 -0
  57. package/dist/themes/index.d.ts +1 -0
  58. package/dist/themes/index.js +169 -0
  59. package/dist/themes/util.d.ts +24 -0
  60. package/dist/themes/util.js +63 -0
  61. package/dist/themes/vscode.d.ts +6 -0
  62. package/dist/themes/vscode.js +187 -0
  63. package/dist/types.d.ts +64 -0
  64. package/dist/types.js +1 -0
  65. package/dist/utils/fs.d.ts +29 -0
  66. package/dist/utils/fs.js +310 -0
  67. package/dist/utils/indent.d.ts +1 -0
  68. package/dist/utils/indent.js +38 -0
  69. package/dist/utils/index.d.ts +2 -0
  70. package/dist/utils/index.js +2 -0
  71. package/dist/utils/lsp.d.ts +26 -0
  72. package/dist/utils/lsp.js +74 -0
  73. package/dist/utils/search.d.ts +30 -0
  74. package/dist/utils/search.js +68 -0
  75. package/dist/utils/snapshot.d.ts +60 -0
  76. package/dist/utils/snapshot.js +299 -0
  77. package/dist/workers/fs.worker.d.ts +11 -0
  78. package/dist/workers/fs.worker.js +93 -0
  79. package/dist/workers/javascript.worker.d.ts +1 -0
  80. package/dist/workers/javascript.worker.js +20 -0
  81. package/package.json +95 -0
@@ -0,0 +1,299 @@
1
+ import fsPromises from 'fs/promises';
2
+ import multimatch from 'multimatch';
3
+ import { CborEncoder } from '@jsonjoy.com/json-pack/lib/cbor/CborEncoder';
4
+ import { CborDecoder } from '@jsonjoy.com/json-pack/lib/cbor/CborDecoder';
5
+ import { Writer } from '@jsonjoy.com/util/lib/buffers/Writer';
6
+ export const writer = new Writer(1024 * 32);
7
+ export const encoder = new CborEncoder(writer);
8
+ export const decoder = new CborDecoder();
9
+ // Cross-platform compression utilities
10
+ const isNode = typeof process !== 'undefined' && process.versions?.node;
11
+ /**
12
+ * Compress data using gzip compression.
13
+ * Uses Node.js zlib in Node.js environment, browser CompressionStream in browser.
14
+ */
15
+ export const compress = async (data) => {
16
+ if (isNode) {
17
+ // Node.js environment
18
+ try {
19
+ const { gzip } = await import('zlib');
20
+ const { promisify } = await import('util');
21
+ const gzipAsync = promisify(gzip);
22
+ return new Uint8Array(await gzipAsync(data));
23
+ }
24
+ catch (error) {
25
+ console.warn('Node.js compression failed, returning uncompressed data:', error);
26
+ return data;
27
+ }
28
+ }
29
+ else {
30
+ // Browser environment
31
+ if (typeof CompressionStream === 'undefined') {
32
+ // Fallback: return uncompressed data if CompressionStream is not available
33
+ console.warn('CompressionStream not available, returning uncompressed data');
34
+ return data;
35
+ }
36
+ try {
37
+ const stream = new CompressionStream('gzip');
38
+ const writer = stream.writable.getWriter();
39
+ const reader = stream.readable.getReader();
40
+ writer.write(new Uint8Array(data));
41
+ writer.close();
42
+ const chunks = [];
43
+ let done = false;
44
+ while (!done) {
45
+ const { value, done: readerDone } = await reader.read();
46
+ done = readerDone;
47
+ if (value)
48
+ chunks.push(value);
49
+ }
50
+ // Concatenate all chunks
51
+ const totalLength = chunks.reduce((sum, chunk) => sum + chunk.length, 0);
52
+ const result = new Uint8Array(totalLength);
53
+ let offset = 0;
54
+ for (const chunk of chunks) {
55
+ result.set(chunk, offset);
56
+ offset += chunk.length;
57
+ }
58
+ return result;
59
+ }
60
+ catch (error) {
61
+ console.warn('Browser compression failed, returning uncompressed data:', error);
62
+ return data;
63
+ }
64
+ }
65
+ };
66
+ /**
67
+ * Decompress gzip-compressed data.
68
+ * Uses Node.js zlib in Node.js environment, browser DecompressionStream in browser.
69
+ */
70
+ /**
71
+ * Check if data appears to be gzip compressed by looking at the magic bytes
72
+ */
73
+ const isGzipCompressed = (data) => {
74
+ return data.length >= 2 && data[0] === 0x1f && data[1] === 0x8b;
75
+ };
76
+ export const decompress = async (data) => {
77
+ console.debug('decompressData: Starting decompression, data length:', data.length);
78
+ console.debug('decompressData: First few bytes:', Array.from(data.slice(0, 10)).map(b => '0x' + b.toString(16).padStart(2, '0')).join(' '));
79
+ // Check if data is actually compressed
80
+ if (!isGzipCompressed(data)) {
81
+ console.debug('decompressData: Data does not appear to be gzip compressed, returning as-is');
82
+ return data;
83
+ }
84
+ console.debug('decompressData: Data appears to be gzip compressed');
85
+ if (isNode) {
86
+ console.debug('decompressData: Using Node.js zlib');
87
+ // Node.js environment
88
+ const { gunzip } = await import('zlib');
89
+ const { promisify } = await import('util');
90
+ const gunzipAsync = promisify(gunzip);
91
+ const result = new Uint8Array(await gunzipAsync(data));
92
+ console.debug('decompressData: Node.js decompression successful, result length:', result.length);
93
+ return result;
94
+ }
95
+ else {
96
+ console.debug('decompressData: Using browser DecompressionStream');
97
+ // Browser environment
98
+ if (typeof DecompressionStream === 'undefined') {
99
+ // Fallback: assume data is uncompressed if DecompressionStream is not available
100
+ console.warn('decompressData: DecompressionStream not available, assuming uncompressed data');
101
+ return data;
102
+ }
103
+ try {
104
+ const stream = new DecompressionStream('gzip');
105
+ const writer = stream.writable.getWriter();
106
+ const reader = stream.readable.getReader();
107
+ console.debug('decompressData: Writing data to decompression stream');
108
+ writer.write(new Uint8Array(data));
109
+ writer.close();
110
+ const chunks = [];
111
+ let done = false;
112
+ console.debug('decompressData: Reading decompressed chunks');
113
+ while (!done) {
114
+ const { value, done: readerDone } = await reader.read();
115
+ done = readerDone;
116
+ if (value) {
117
+ console.debug('decompressData: Received chunk of length:', value.length);
118
+ chunks.push(value);
119
+ }
120
+ }
121
+ // Concatenate all chunks
122
+ const totalLength = chunks.reduce((sum, chunk) => sum + chunk.length, 0);
123
+ console.debug('decompressData: Total decompressed length:', totalLength);
124
+ const result = new Uint8Array(totalLength);
125
+ let offset = 0;
126
+ for (const chunk of chunks) {
127
+ result.set(chunk, offset);
128
+ offset += chunk.length;
129
+ }
130
+ console.debug('decompressData: Browser decompression successful');
131
+ return result;
132
+ }
133
+ catch (error) {
134
+ console.warn('decompressData: Browser decompression failed, returning original data:', error);
135
+ return data;
136
+ }
137
+ }
138
+ };
139
+ export const buildFilter = ({ include, exclude }) => {
140
+ return (path) => {
141
+ if (!(include || exclude))
142
+ return true;
143
+ const included = include ? !!multimatch(path, include, { partial: true }).length : true;
144
+ const excluded = exclude ? !!multimatch(path, exclude).length : false;
145
+ return included && !excluded;
146
+ };
147
+ };
148
+ export const getGitignored = async (path, fs = typeof fsPromises) => {
149
+ // @ts-expect-error
150
+ const content = await fs.readFile(path, 'utf-8');
151
+ // @ts-ignore
152
+ return parse(content).patterns;
153
+ };
154
+ export const snapshotDefaults = {
155
+ root: typeof process !== 'undefined' ? process.cwd() : './',
156
+ filter: () => Promise.resolve(true),
157
+ };
158
+ /**
159
+ * Takes a snapshot of the file system based on the provided properties.
160
+ * The snapshot is encoded with CBOR and compressed with gzip.
161
+ *
162
+ * @param props - The properties to configure the snapshot.
163
+ */
164
+ export const takeSnapshot = async (props = {}) => {
165
+ const { root, filter } = { ...snapshotDefaults, ...props };
166
+ console.debug('Taking snapshot of filesystem', { root, filter });
167
+ const snapshot = await Snapshot
168
+ .take({ fs: fsPromises, path: root, filter })
169
+ .then((snapshot) => encoder.encode(snapshot))
170
+ .then((encoded) => compress(encoded));
171
+ return snapshot;
172
+ };
173
+ export var Snapshot;
174
+ (function (Snapshot) {
175
+ Snapshot.take = async ({ fs, path, filter, separator = '/' }) => {
176
+ if (filter && !(await filter(path)))
177
+ return null;
178
+ // TODO: think about handling snapshotting symlinks better
179
+ // for now we just resolve and include
180
+ const stats = await fs.stat(path);
181
+ if (stats.isDirectory()) {
182
+ const list = await fs.readdir(path);
183
+ const entries = {};
184
+ const dir = path.endsWith(separator) ? path : path + separator;
185
+ for (const child of list) {
186
+ const childSnapshot = await Snapshot.take({ fs, path: `${dir}${child}`, separator, filter });
187
+ if (childSnapshot)
188
+ entries[child] = childSnapshot;
189
+ }
190
+ return [0 /* Folder */, {}, entries];
191
+ }
192
+ else if (stats.isFile()) {
193
+ const buf = (await fs.readFile(path));
194
+ const uint8 = new Uint8Array(buf.buffer, buf.byteOffset, buf.byteLength);
195
+ return [1 /* File */, stats, uint8];
196
+ }
197
+ else if (stats.isSymbolicLink()) {
198
+ // TODO: branch never actually reached as `fs.stat` doesn't return symlinks
199
+ return [
200
+ 2 /* Symlink */,
201
+ {
202
+ target: (await fs.readlink(path, { encoding: 'utf8' })),
203
+ },
204
+ ];
205
+ }
206
+ return null;
207
+ };
208
+ Snapshot.mount = async (buffer, { fs, path = '/', separator = '/' }) => {
209
+ try {
210
+ console.debug('Snapshot.mount: Starting mount process');
211
+ console.debug('Snapshot.mount: Buffer type:', typeof buffer);
212
+ console.debug('Snapshot.mount: Buffer length:', buffer?.byteLength || buffer?.length || 'unknown');
213
+ console.debug('Snapshot.mount: Buffer constructor:', buffer?.constructor?.name);
214
+ // Convert buffer to Uint8Array if needed
215
+ const uint8Buffer = buffer instanceof Uint8Array ? buffer : new Uint8Array(buffer);
216
+ console.debug('Snapshot.mount: Converted to Uint8Array, length:', uint8Buffer.length);
217
+ // Check if data appears to be compressed
218
+ const isCompressed = isGzipCompressed(uint8Buffer);
219
+ console.debug('Snapshot.mount: Data appears compressed:', isCompressed);
220
+ // Try to decompress the buffer first, then decode
221
+ let decompressed;
222
+ try {
223
+ decompressed = await decompress(uint8Buffer);
224
+ console.debug('Snapshot.mount: Successfully processed buffer, decompressed length:', decompressed.length);
225
+ }
226
+ catch (decompressError) {
227
+ console.warn('Snapshot.mount: Decompression failed, using original buffer:', decompressError);
228
+ // Fallback: assume the buffer is already uncompressed (for backward compatibility)
229
+ decompressed = uint8Buffer;
230
+ }
231
+ console.debug('Snapshot.mount: Attempting to decode CBOR data...');
232
+ const snapshot = await decoder.decode(decompressed);
233
+ console.debug('Snapshot.mount: Successfully decoded snapshot, type:', typeof snapshot);
234
+ console.debug('Snapshot.mount: Snapshot structure:', Array.isArray(snapshot) ? `Array[${snapshot.length}]` : snapshot);
235
+ if (snapshot) {
236
+ console.debug('Snapshot.mount: Starting fromSnapshot process...');
237
+ await fromSnapshot(snapshot, { fs, path, separator });
238
+ console.debug('Snapshot.mount: Successfully mounted snapshot');
239
+ }
240
+ else {
241
+ console.warn('Snapshot.mount: Decoded snapshot is null or undefined');
242
+ }
243
+ }
244
+ catch (error) {
245
+ console.error('Snapshot.mount: Failed to mount snapshot:', error);
246
+ throw error;
247
+ }
248
+ };
249
+ /**
250
+ * Load and mount a snapshot directly from a URL in a web worker environment.
251
+ * This is more efficient for large snapshots as it avoids transferring data through the main thread.
252
+ */
253
+ Snapshot.loadAndMount = async (url, { fs, path = '/', separator = '/' }) => {
254
+ try {
255
+ console.debug('Snapshot.loadAndMount: Starting direct load from URL:', url);
256
+ // Fetch the snapshot data directly in the worker
257
+ const response = await fetch(url);
258
+ if (!response.ok) {
259
+ throw new Error(`Failed to fetch snapshot: ${response.status} ${response.statusText}`);
260
+ }
261
+ console.debug('Snapshot.loadAndMount: Response received, content-length:', response.headers.get('content-length'));
262
+ // Get the response as ArrayBuffer for better performance
263
+ const arrayBuffer = await response.arrayBuffer();
264
+ const uint8Buffer = new Uint8Array(arrayBuffer);
265
+ console.debug('Snapshot.loadAndMount: Downloaded buffer length:', uint8Buffer.length);
266
+ // Use the existing mount logic
267
+ await Snapshot.mount(uint8Buffer, { fs, path, separator });
268
+ }
269
+ catch (error) {
270
+ console.error('Snapshot.loadAndMount: Failed to load and mount snapshot:', error);
271
+ throw error;
272
+ }
273
+ };
274
+ })(Snapshot || (Snapshot = {}));
275
+ export const fromSnapshot = async (snapshot, { fs, path = '/', separator = '/' }) => {
276
+ if (!snapshot)
277
+ return;
278
+ switch (snapshot[0]) {
279
+ case 0: {
280
+ if (!path.endsWith(separator))
281
+ path = path + separator;
282
+ const [, , entries] = snapshot;
283
+ fs.mkdirSync(path, { recursive: true });
284
+ for (const [name, child] of Object.entries(entries))
285
+ await fromSnapshot(child, { fs, path: `${path}${name}`, separator });
286
+ break;
287
+ }
288
+ case 1: {
289
+ const [, , data] = snapshot;
290
+ fs.writeFileSync(path, data);
291
+ break;
292
+ }
293
+ case 2: {
294
+ const [, { target }] = snapshot;
295
+ fs.symlinkSync(target, path);
296
+ break;
297
+ }
298
+ }
299
+ };
@@ -0,0 +1,11 @@
1
+ import { MountArgs, MountResult } from "../types";
2
+ export declare const mount: ({ buffer, mountPoint }: MountArgs) => Promise<MountResult>;
3
+ /**
4
+ * Optimized mount function that loads snapshots directly from URLs.
5
+ * This is much more efficient for large snapshots as it avoids transferring
6
+ * data through the main thread.
7
+ */
8
+ export declare const mountFromUrl: ({ url, mountPoint }: {
9
+ url: string;
10
+ mountPoint?: string;
11
+ }) => Promise<MountResult>;
@@ -0,0 +1,93 @@
1
+ import * as Comlink from "comlink";
2
+ import { watchOptionsTransferHandler, asyncGeneratorTransferHandler } from '../rpc/serde';
3
+ import { Snapshot } from "../utils";
4
+ Comlink.transferHandlers.set('asyncGenerator', asyncGeneratorTransferHandler);
5
+ Comlink.transferHandlers.set('watchOptions', watchOptionsTransferHandler);
6
+ let filesystems = [];
7
+ export const mount = async ({ buffer, mountPoint = '/' }) => {
8
+ let filesystem;
9
+ try {
10
+ console.log('Importing memfs after FS mount...');
11
+ const { fs } = await import('@joinezco/memfs');
12
+ console.log("FS imported");
13
+ try {
14
+ if (buffer) {
15
+ console.log(`Mounting filesystem snapshot at [${mountPoint}]...`, buffer);
16
+ // Convert Node Buffer to ArrayBuffer if needed
17
+ const uint8 = buffer instanceof Uint8Array ? buffer : new Uint8Array(buffer);
18
+ const aligned = uint8.byteOffset === 0 && uint8.byteLength === uint8.buffer.byteLength
19
+ ? uint8.buffer
20
+ : uint8.buffer.slice(uint8.byteOffset, uint8.byteOffset + uint8.byteLength);
21
+ console.log('Aligned ArrayBuffer:', aligned);
22
+ await Snapshot.mount(new Uint8Array(aligned), {
23
+ // @ts-ignore
24
+ fs,
25
+ });
26
+ }
27
+ else {
28
+ console.log('Getting storage directory...');
29
+ // const handle = await navigator.storage.getDirectory();
30
+ console.log('Got storage directory');
31
+ console.log('Attempting to remove directory...');
32
+ try {
33
+ // TODO: clear storage button
34
+ // @ts-ignore
35
+ // await handle.remove({ recursive: true });
36
+ console.log('Successfully removed directory');
37
+ }
38
+ catch (removeErr) {
39
+ console.error('Error removing directory:', removeErr);
40
+ // Continue anyway, this might not be critical
41
+ }
42
+ }
43
+ console.log('Returning proxy from worker', fs);
44
+ filesystem = Comlink.proxy({ fs });
45
+ filesystems.push(filesystem);
46
+ }
47
+ catch (e) {
48
+ console.error('Worker initialization failed with error:', e);
49
+ throw e; // Make sure error propagates
50
+ }
51
+ }
52
+ catch (e) {
53
+ console.error('Error importing memfs:', e);
54
+ }
55
+ console.log('mounting fs', { buffer, mountPoint });
56
+ return filesystem;
57
+ };
58
+ /**
59
+ * Optimized mount function that loads snapshots directly from URLs.
60
+ * This is much more efficient for large snapshots as it avoids transferring
61
+ * data through the main thread.
62
+ */
63
+ export const mountFromUrl = async ({ url, mountPoint = '/' }) => {
64
+ let filesystem;
65
+ try {
66
+ const { fs } = await import('@joinezco/memfs');
67
+ console.log(`Loading and mounting filesystem snapshot from URL: ${url} at [${mountPoint}]...`);
68
+ const startTime = performance.now();
69
+ await Snapshot.loadAndMount(url, {
70
+ // @ts-ignore
71
+ fs,
72
+ path: mountPoint
73
+ });
74
+ const endTime = performance.now();
75
+ console.log(`Snapshot loaded and mounted in ${Math.round(endTime - startTime)}ms`);
76
+ console.log('Returning proxy from worker', fs);
77
+ filesystem = Comlink.proxy({ fs });
78
+ filesystems.push(filesystem);
79
+ }
80
+ catch (e) {
81
+ console.error('Error loading snapshot from URL:', e);
82
+ throw e;
83
+ }
84
+ return filesystem;
85
+ };
86
+ onconnect = async function (event) {
87
+ const [port] = event.ports;
88
+ console.log('workers/fs connected on port: ', port);
89
+ port.addEventListener('close', () => {
90
+ console.log('fs port closed');
91
+ });
92
+ Comlink.expose({ mount, mountFromUrl }, port);
93
+ };
@@ -0,0 +1 @@
1
+ export {};
@@ -0,0 +1,20 @@
1
+ import * as Comlink from 'comlink';
2
+ import { createLanguageServer } from '../lsps/typescript';
3
+ import { createConnection } from 'vscode-languageserver/browser';
4
+ import { BrowserMessageReader, BrowserMessageWriter } from '@volar/language-server/browser';
5
+ // TODO: get rid of this
6
+ // instead, create language specific workers (with a smarter client)
7
+ // i.e typescript.worker.ts / rust.worker.ts / ...
8
+ onconnect = async (event) => {
9
+ const [port] = event.ports;
10
+ console.debug('LSP worker connected on port: ', port);
11
+ const reader = new BrowserMessageReader(port);
12
+ const writer = new BrowserMessageWriter(port);
13
+ const connection = createConnection(reader, writer);
14
+ connection.listen();
15
+ const proxy = async ({ fs }) => {
16
+ console.log('creating language server');
17
+ await createLanguageServer({ fs, connection });
18
+ };
19
+ Comlink.expose({ createLanguageServer: proxy }, port);
20
+ };
package/package.json ADDED
@@ -0,0 +1,95 @@
1
+ {
2
+ "name": "@joinezco/codeblock",
3
+ "version": "0.0.8",
4
+ "type": "module",
5
+ "module": "dist/index.js",
6
+ "main": "dist/index.js",
7
+ "publishConfig": {
8
+ "access": "public"
9
+ },
10
+ "exports": {
11
+ ".": {
12
+ "types": "./dist/index.d.ts",
13
+ "import": "./dist/index.js",
14
+ "require": "./dist/index.js"
15
+ }
16
+ },
17
+ "files": [
18
+ "dist"
19
+ ],
20
+ "devDependencies": {
21
+ "@playwright/test": "^1.52.0",
22
+ "@types/lodash": "^4.17.16",
23
+ "@types/markdown-it": "^14.1.2",
24
+ "@types/node": "^24.3.1",
25
+ "@types/parse-gitignore": "^1.0.2",
26
+ "@types/sharedworker": "^0.0.181",
27
+ "multimatch": "^7.0.0",
28
+ "process": "^0.11.10",
29
+ "tsx": "^4.19.4",
30
+ "vite": "^7.0.6",
31
+ "vitest": "^3.2.4"
32
+ },
33
+ "dependencies": {
34
+ "@babel/runtime": "^7.27.1",
35
+ "@codemirror/autocomplete": "^6.18.6",
36
+ "@codemirror/commands": "^6.8.1",
37
+ "@codemirror/lang-cpp": "^6.0.3",
38
+ "@codemirror/lang-css": "^6.3.1",
39
+ "@codemirror/lang-html": "^6.4.9",
40
+ "@codemirror/lang-java": "^6.0.2",
41
+ "@codemirror/lang-javascript": "^6.2.3",
42
+ "@codemirror/lang-json": "^6.0.2",
43
+ "@codemirror/lang-less": "^6.0.2",
44
+ "@codemirror/lang-markdown": "^6.3.4",
45
+ "@codemirror/lang-php": "^6.0.2",
46
+ "@codemirror/lang-python": "^6.2.0",
47
+ "@codemirror/lang-rust": "^6.0.1",
48
+ "@codemirror/lang-sass": "^6.0.2",
49
+ "@codemirror/lang-sql": "^6.9.1",
50
+ "@codemirror/lang-xml": "^6.1.0",
51
+ "@codemirror/lang-yaml": "^6.1.2",
52
+ "@codemirror/language": "^6.11.0",
53
+ "@codemirror/legacy-modes": "^6.5.1",
54
+ "@codemirror/lint": "^6.8.5",
55
+ "@codemirror/search": "^6.5.10",
56
+ "@codemirror/state": "^6.5.2",
57
+ "@codemirror/view": "^6.36.7",
58
+ "@jsonjoy.com/json-pack": "^1.2.0",
59
+ "@jsonjoy.com/util": "^1.6.0",
60
+ "@lezer/highlight": "^1.2.1",
61
+ "@open-rpc/client-js": "^1.8.1",
62
+ "@typescript/vfs": "^1.6.1",
63
+ "@uiw/codemirror-theme-vscode": "^4.23.12",
64
+ "@volar/language-server": "2.4.13",
65
+ "@volar/language-service": "2.4.13",
66
+ "@volar/typescript": "2.4.13",
67
+ "comlink": "^4.4.2",
68
+ "events": "^3.3.0",
69
+ "ignore": "^7.0.4",
70
+ "lodash": "^4.17.21",
71
+ "lz-string": "^1.5.0",
72
+ "markdown-it": "^14.1.0",
73
+ "minisearch": "^7.1.2",
74
+ "parse-gitignore": "^2.0.0",
75
+ "path-browserify": "^1.0.1",
76
+ "style-mod": "^4.1.2",
77
+ "typescript": "~5.9.2",
78
+ "uuid": "^11.1.0",
79
+ "vite-plugin-node-polyfills": "^0.24.0",
80
+ "volar-service-typescript": "0.0.65",
81
+ "vscode-languageserver": "^9.0.1",
82
+ "vscode-languageserver-protocol": "^3.17.5",
83
+ "vscode-languageserver-textdocument": "^1.0.12",
84
+ "vscode-uri": "^3.1.0",
85
+ "@marimo-team/codemirror-languageserver": "1.16.0",
86
+ "@joinezco/jswasi": "0.0.1",
87
+ "@joinezco/memfs": "4.23.0"
88
+ },
89
+ "scripts": {
90
+ "dev": "vite",
91
+ "build": "tsc",
92
+ "build:preview": "vite build",
93
+ "preview": "vite preview"
94
+ }
95
+ }