@genart-dev/core 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js ADDED
@@ -0,0 +1,1819 @@
1
+ // src/index.ts
2
+ export * from "@genart-dev/format";
3
+
4
+ // src/sketch/adapters/p5.ts
5
+ var P5_CDN_VERSION = "1.11.3";
6
+ var P5_CDN_URL = `https://cdnjs.cloudflare.com/ajax/libs/p5.js/${P5_CDN_VERSION}/p5.min.js`;
7
+ var P5RendererAdapter = class {
8
+ type = "p5";
9
+ displayName = "p5.js";
10
+ algorithmLanguage = "javascript";
11
+ validate(algorithm) {
12
+ const errors = [];
13
+ if (!algorithm || algorithm.trim().length === 0) {
14
+ return { valid: false, errors: ["Algorithm source is empty"] };
15
+ }
16
+ const hasSketchFn = /function\s+sketch\s*\(\s*p\s*,\s*state\s*\)/.test(algorithm) || /(?:const|let|var)\s+sketch\s*=\s*(?:function\s*)?\(\s*p\s*,\s*state\s*\)/.test(algorithm) || /(?:const|let|var)\s+sketch\s*=\s*\(\s*p\s*,\s*state\s*\)\s*=>/.test(algorithm);
17
+ if (!hasSketchFn) {
18
+ errors.push(
19
+ "p5 algorithms must export a function with signature: function sketch(p, state)"
20
+ );
21
+ }
22
+ const hasSetup = /p\s*\.\s*setup\s*=/.test(algorithm);
23
+ if (!hasSetup) {
24
+ errors.push("p5 algorithms must assign p.setup");
25
+ }
26
+ return { valid: errors.length === 0, errors };
27
+ }
28
+ async compile(algorithm) {
29
+ const validation = this.validate(algorithm);
30
+ if (!validation.valid) {
31
+ throw new Error(
32
+ `p5 compilation failed: ${validation.errors.join("; ")}`
33
+ );
34
+ }
35
+ const wrappedSource = `
36
+ return (function() {
37
+ ${algorithm}
38
+ return sketch;
39
+ })();
40
+ `;
41
+ try {
42
+ const factory = new Function(wrappedSource);
43
+ const compiled = {
44
+ source: algorithm,
45
+ factory: factory()
46
+ };
47
+ return compiled;
48
+ } catch (err) {
49
+ throw new Error(
50
+ `p5 compilation failed: ${err instanceof Error ? err.message : String(err)}`
51
+ );
52
+ }
53
+ }
54
+ createInstance(compiled, state, canvas) {
55
+ const { factory } = compiled;
56
+ let p5Instance = null;
57
+ let container = null;
58
+ let currentState = { ...state };
59
+ let animating = true;
60
+ let sketchModule = null;
61
+ const instance = {
62
+ mount(el) {
63
+ container = el;
64
+ const P5Constructor = globalThis["p5"];
65
+ if (!P5Constructor) {
66
+ throw new Error(
67
+ "p5.js is not loaded. Include p5.js before mounting a p5 sketch."
68
+ );
69
+ }
70
+ p5Instance = new P5Constructor((p) => {
71
+ sketchModule = factory(p, currentState);
72
+ }, el);
73
+ },
74
+ unmount() {
75
+ if (p5Instance && typeof p5Instance["remove"] === "function") {
76
+ p5Instance["remove"]();
77
+ }
78
+ p5Instance = null;
79
+ sketchModule = null;
80
+ container = null;
81
+ },
82
+ updateState(newState) {
83
+ currentState = { ...newState };
84
+ if (sketchModule && typeof sketchModule["initializeSystem"] === "function") {
85
+ sketchModule["initializeSystem"]();
86
+ }
87
+ },
88
+ redraw() {
89
+ if (p5Instance && typeof p5Instance["redraw"] === "function") {
90
+ p5Instance["redraw"]();
91
+ }
92
+ },
93
+ pause() {
94
+ animating = false;
95
+ if (p5Instance && typeof p5Instance["noLoop"] === "function") {
96
+ p5Instance["noLoop"]();
97
+ }
98
+ },
99
+ resume() {
100
+ animating = true;
101
+ if (p5Instance && typeof p5Instance["loop"] === "function") {
102
+ p5Instance["loop"]();
103
+ }
104
+ },
105
+ get isAnimating() {
106
+ return animating;
107
+ },
108
+ async captureFrame(options) {
109
+ if (!container) throw new Error("Sketch is not mounted");
110
+ const canvasEl = container.querySelector("canvas");
111
+ if (!canvasEl) throw new Error("No canvas element found");
112
+ const format = options?.format ?? "png";
113
+ const quality = options?.quality ?? 1;
114
+ const mimeType = format === "jpeg" ? "image/jpeg" : format === "webp" ? "image/webp" : "image/png";
115
+ return canvasEl.toDataURL(mimeType, quality);
116
+ },
117
+ async captureImageData() {
118
+ if (!container) throw new Error("Sketch is not mounted");
119
+ const canvasEl = container.querySelector("canvas");
120
+ if (!canvasEl) throw new Error("No canvas element found");
121
+ const ctx = canvasEl.getContext("2d");
122
+ if (!ctx) throw new Error("Cannot get 2D context from canvas");
123
+ return ctx.getImageData(0, 0, canvasEl.width, canvasEl.height);
124
+ },
125
+ dispose() {
126
+ instance.unmount();
127
+ }
128
+ };
129
+ return instance;
130
+ }
131
+ async renderOffscreen(compiled, state, canvas, options) {
132
+ const el = document.createElement("div");
133
+ el.style.position = "absolute";
134
+ el.style.left = "-9999px";
135
+ document.body.appendChild(el);
136
+ try {
137
+ const instance = this.createInstance(compiled, state, canvas);
138
+ instance.mount(el);
139
+ await new Promise((resolve) => setTimeout(resolve, 100));
140
+ const dataUrl = await instance.captureFrame(options);
141
+ instance.dispose();
142
+ const base64 = dataUrl.split(",")[1] ?? "";
143
+ const binary = atob(base64);
144
+ const bytes = new Uint8Array(binary.length);
145
+ for (let i = 0; i < binary.length; i++) {
146
+ bytes[i] = binary.charCodeAt(i);
147
+ }
148
+ return bytes;
149
+ } finally {
150
+ document.body.removeChild(el);
151
+ }
152
+ }
153
+ generateStandaloneHTML(sketch) {
154
+ const { width, height } = sketch.canvas;
155
+ const pixelDensity = sketch.canvas.pixelDensity ?? 1;
156
+ const stateJson = JSON.stringify(sketch.state, null, 2);
157
+ const colorsMap = {};
158
+ for (let i = 0; i < sketch.colors.length; i++) {
159
+ const colorDef = sketch.colors[i];
160
+ if (colorDef) {
161
+ colorsMap[colorDef.key] = sketch.state.colorPalette[i] ?? "#000000";
162
+ }
163
+ }
164
+ const colorsJson = JSON.stringify(colorsMap);
165
+ return `<!DOCTYPE html>
166
+ <html lang="en">
167
+ <head>
168
+ <meta charset="UTF-8">
169
+ <meta name="viewport" content="width=device-width, initial-scale=1.0">
170
+ <title>${escapeHtml(sketch.title)}</title>
171
+ <script src="${P5_CDN_URL}"></script>
172
+ <style>
173
+ * { margin: 0; padding: 0; box-sizing: border-box; }
174
+ body { display: flex; justify-content: center; align-items: center; min-height: 100vh; background: #111; }
175
+ #canvas-container canvas { display: block; max-width: 100vw; max-height: 100vh; }
176
+ </style>
177
+ </head>
178
+ <body>
179
+ <div id="canvas-container"></div>
180
+ <script>
181
+ const state = ${stateJson};
182
+ state.canvas = { width: ${width}, height: ${height}, pixelDensity: ${pixelDensity} };
183
+ // Convenience aliases \u2014 algorithms may use either naming convention
184
+ state.WIDTH = ${width};
185
+ state.HEIGHT = ${height};
186
+ state.SEED = state.seed;
187
+ state.PARAMS = state.params;
188
+ state.COLORS = ${colorsJson};
189
+
190
+ ${sketch.algorithm}
191
+
192
+ new p5(function(p) {
193
+ sketch(p, state);
194
+ }, document.getElementById('canvas-container'));
195
+ </script>
196
+ </body>
197
+ </html>`;
198
+ }
199
+ getAlgorithmTemplate() {
200
+ return `function sketch(p, state) {
201
+ const { WIDTH, HEIGHT, SEED, PARAMS, COLORS } = state;
202
+ p.setup = () => {
203
+ p.createCanvas(WIDTH, HEIGHT);
204
+ p.randomSeed(SEED);
205
+ };
206
+ p.draw = () => {
207
+ p.background(COLORS.background);
208
+ // generative algorithm here
209
+ };
210
+ return { initializeSystem() { /* rebuild from state */ } };
211
+ }`;
212
+ }
213
+ getRuntimeDependencies() {
214
+ return [
215
+ {
216
+ name: "p5",
217
+ version: P5_CDN_VERSION,
218
+ cdnUrl: P5_CDN_URL
219
+ }
220
+ ];
221
+ }
222
+ };
223
+ function escapeHtml(str) {
224
+ return str.replace(/&/g, "&amp;").replace(/</g, "&lt;").replace(/>/g, "&gt;").replace(/"/g, "&quot;");
225
+ }
226
+
227
+ // src/sketch/adapters/canvas2d.ts
228
+ var Canvas2DRendererAdapter = class {
229
+ type = "canvas2d";
230
+ displayName = "Canvas 2D";
231
+ algorithmLanguage = "javascript";
232
+ validate(algorithm) {
233
+ const errors = [];
234
+ if (!algorithm || algorithm.trim().length === 0) {
235
+ return { valid: false, errors: ["Algorithm source is empty"] };
236
+ }
237
+ const hasSketchFn = /function\s+sketch\s*\(\s*ctx\s*,\s*state\s*\)/.test(algorithm) || /(?:const|let|var)\s+sketch\s*=\s*(?:function\s*)?\(\s*ctx\s*,\s*state\s*\)/.test(algorithm) || /(?:const|let|var)\s+sketch\s*=\s*\(\s*ctx\s*,\s*state\s*\)\s*=>/.test(algorithm);
238
+ if (!hasSketchFn) {
239
+ errors.push(
240
+ "Canvas 2D algorithms must export a function with signature: function sketch(ctx, state)"
241
+ );
242
+ }
243
+ return { valid: errors.length === 0, errors };
244
+ }
245
+ async compile(algorithm) {
246
+ const validation = this.validate(algorithm);
247
+ if (!validation.valid) {
248
+ throw new Error(
249
+ `Canvas 2D compilation failed: ${validation.errors.join("; ")}`
250
+ );
251
+ }
252
+ const wrappedSource = `
253
+ return (function() {
254
+ ${algorithm}
255
+ return sketch;
256
+ })();
257
+ `;
258
+ try {
259
+ const factory = new Function(wrappedSource);
260
+ const compiled = {
261
+ source: algorithm,
262
+ factory: factory()
263
+ };
264
+ return compiled;
265
+ } catch (err) {
266
+ throw new Error(
267
+ `Canvas 2D compilation failed: ${err instanceof Error ? err.message : String(err)}`
268
+ );
269
+ }
270
+ }
271
+ createInstance(compiled, state, canvas) {
272
+ const { factory } = compiled;
273
+ let canvasEl = null;
274
+ let ctx = null;
275
+ let container = null;
276
+ let currentState = { ...state };
277
+ let animating = false;
278
+ let animationFrameId = null;
279
+ let sketchModule = null;
280
+ const instance = {
281
+ mount(el) {
282
+ container = el;
283
+ canvasEl = document.createElement("canvas");
284
+ canvasEl.width = canvas.width;
285
+ canvasEl.height = canvas.height;
286
+ const density = canvas.pixelDensity ?? 1;
287
+ if (density !== 1) {
288
+ canvasEl.width = canvas.width * density;
289
+ canvasEl.height = canvas.height * density;
290
+ canvasEl.style.width = `${canvas.width}px`;
291
+ canvasEl.style.height = `${canvas.height}px`;
292
+ }
293
+ el.appendChild(canvasEl);
294
+ ctx = canvasEl.getContext("2d");
295
+ if (!ctx) throw new Error("Failed to get 2D rendering context");
296
+ if (density !== 1) {
297
+ ctx.scale(density, density);
298
+ }
299
+ sketchModule = factory(ctx, currentState);
300
+ if (sketchModule && typeof sketchModule["initializeSystem"] === "function") {
301
+ sketchModule["initializeSystem"]();
302
+ }
303
+ },
304
+ unmount() {
305
+ if (animationFrameId !== null) {
306
+ cancelAnimationFrame(animationFrameId);
307
+ animationFrameId = null;
308
+ }
309
+ if (canvasEl && container) {
310
+ container.removeChild(canvasEl);
311
+ }
312
+ canvasEl = null;
313
+ ctx = null;
314
+ container = null;
315
+ sketchModule = null;
316
+ animating = false;
317
+ },
318
+ updateState(newState) {
319
+ currentState = { ...newState };
320
+ if (sketchModule && typeof sketchModule["initializeSystem"] === "function") {
321
+ sketchModule["initializeSystem"]();
322
+ }
323
+ },
324
+ redraw() {
325
+ if (sketchModule && typeof sketchModule["initializeSystem"] === "function") {
326
+ sketchModule["initializeSystem"]();
327
+ }
328
+ },
329
+ pause() {
330
+ animating = false;
331
+ if (animationFrameId !== null) {
332
+ cancelAnimationFrame(animationFrameId);
333
+ animationFrameId = null;
334
+ }
335
+ },
336
+ resume() {
337
+ animating = true;
338
+ if (sketchModule && typeof sketchModule["draw"] === "function") {
339
+ const loop = () => {
340
+ if (!animating) return;
341
+ sketchModule["draw"]();
342
+ animationFrameId = requestAnimationFrame(loop);
343
+ };
344
+ loop();
345
+ }
346
+ },
347
+ get isAnimating() {
348
+ return animating;
349
+ },
350
+ async captureFrame(options) {
351
+ if (!canvasEl) throw new Error("Sketch is not mounted");
352
+ const format = options?.format ?? "png";
353
+ const quality = options?.quality ?? 1;
354
+ const mimeType = format === "jpeg" ? "image/jpeg" : format === "webp" ? "image/webp" : "image/png";
355
+ return canvasEl.toDataURL(mimeType, quality);
356
+ },
357
+ async captureImageData() {
358
+ if (!canvasEl || !ctx) throw new Error("Sketch is not mounted");
359
+ return ctx.getImageData(0, 0, canvasEl.width, canvasEl.height);
360
+ },
361
+ dispose() {
362
+ instance.unmount();
363
+ }
364
+ };
365
+ return instance;
366
+ }
367
+ async renderOffscreen(compiled, state, canvas, options) {
368
+ const el = document.createElement("div");
369
+ el.style.position = "absolute";
370
+ el.style.left = "-9999px";
371
+ document.body.appendChild(el);
372
+ try {
373
+ const instance = this.createInstance(compiled, state, canvas);
374
+ instance.mount(el);
375
+ await new Promise((resolve) => setTimeout(resolve, 50));
376
+ const dataUrl = await instance.captureFrame(options);
377
+ instance.dispose();
378
+ const base64 = dataUrl.split(",")[1] ?? "";
379
+ const binary = atob(base64);
380
+ const bytes = new Uint8Array(binary.length);
381
+ for (let i = 0; i < binary.length; i++) {
382
+ bytes[i] = binary.charCodeAt(i);
383
+ }
384
+ return bytes;
385
+ } finally {
386
+ document.body.removeChild(el);
387
+ }
388
+ }
389
+ generateStandaloneHTML(sketch) {
390
+ const { width, height } = sketch.canvas;
391
+ const pixelDensity = sketch.canvas.pixelDensity ?? 1;
392
+ const stateJson = JSON.stringify(sketch.state, null, 2);
393
+ return `<!DOCTYPE html>
394
+ <html lang="en">
395
+ <head>
396
+ <meta charset="UTF-8">
397
+ <meta name="viewport" content="width=device-width, initial-scale=1.0">
398
+ <title>${escapeHtml2(sketch.title)}</title>
399
+ <style>
400
+ * { margin: 0; padding: 0; box-sizing: border-box; }
401
+ html, body { width: 100%; height: 100%; overflow: hidden; background: #111; }
402
+ body { display: flex; justify-content: center; align-items: center; }
403
+ canvas { display: block; }
404
+ </style>
405
+ </head>
406
+ <body>
407
+ <canvas id="canvas" width="${width * pixelDensity}" height="${height * pixelDensity}" style="width:${width}px;height:${height}px;"></canvas>
408
+ <script>
409
+ try {
410
+ const state = ${stateJson};
411
+ state.canvas = { width: ${width}, height: ${height}, pixelDensity: ${pixelDensity} };
412
+
413
+ ${sketch.algorithm}
414
+
415
+ const canvas = document.getElementById('canvas');
416
+ const ctx = canvas.getContext('2d');
417
+ ${pixelDensity !== 1 ? `ctx.scale(${pixelDensity}, ${pixelDensity});` : ""}
418
+ const module = sketch(ctx, state);
419
+ if (module && module.initializeSystem) module.initializeSystem();
420
+ } catch (e) {
421
+ document.body.style.background = '#300';
422
+ document.body.style.color = '#f88';
423
+ document.body.style.fontFamily = 'monospace';
424
+ document.body.style.fontSize = '12px';
425
+ document.body.style.padding = '16px';
426
+ document.body.textContent = 'Sketch error: ' + e.message;
427
+ }
428
+ </script>
429
+ </body>
430
+ </html>`;
431
+ }
432
+ getAlgorithmTemplate() {
433
+ return `function sketch(ctx, state) {
434
+ const { width, height } = state.canvas;
435
+ function initializeSystem() {
436
+ ctx.clearRect(0, 0, width, height);
437
+ // generative algorithm here
438
+ }
439
+ return { initializeSystem };
440
+ }`;
441
+ }
442
+ getRuntimeDependencies() {
443
+ return [];
444
+ }
445
+ };
446
+ function escapeHtml2(str) {
447
+ return str.replace(/&/g, "&amp;").replace(/</g, "&lt;").replace(/>/g, "&gt;").replace(/"/g, "&quot;");
448
+ }
449
+
450
+ // src/sketch/adapters/three.ts
451
+ var THREE_CDN_VERSION = "0.172.0";
452
+ var THREE_CDN_URL = `https://cdn.jsdelivr.net/npm/three@${THREE_CDN_VERSION}/build/three.module.min.js`;
453
+ var ThreeRendererAdapter = class {
454
+ type = "three";
455
+ displayName = "Three.js";
456
+ algorithmLanguage = "javascript";
457
+ validate(algorithm) {
458
+ if (!algorithm || algorithm.trim().length === 0) {
459
+ return { valid: false, errors: ["Algorithm source is empty"] };
460
+ }
461
+ const hasSketchFn = /function\s+sketch\s*\(\s*THREE\s*,\s*state\s*,\s*container\s*\)/.test(algorithm) || /(?:const|let|var)\s+sketch\s*=\s*(?:function\s*)?\(\s*THREE\s*,\s*state\s*,\s*container\s*\)/.test(algorithm) || /(?:const|let|var)\s+sketch\s*=\s*\(\s*THREE\s*,\s*state\s*,\s*container\s*\)\s*=>/.test(algorithm);
462
+ if (!hasSketchFn) {
463
+ return {
464
+ valid: false,
465
+ errors: [
466
+ "Three.js algorithms must export a function with signature: function sketch(THREE, state, container)"
467
+ ]
468
+ };
469
+ }
470
+ return { valid: true, errors: [] };
471
+ }
472
+ async compile(algorithm) {
473
+ const validation = this.validate(algorithm);
474
+ if (!validation.valid) {
475
+ throw new Error(
476
+ `Three.js compilation failed: ${validation.errors.join("; ")}`
477
+ );
478
+ }
479
+ const wrappedSource = `
480
+ return (function() {
481
+ ${algorithm}
482
+ return sketch;
483
+ })();
484
+ `;
485
+ try {
486
+ const factory = new Function(wrappedSource);
487
+ const compiled = {
488
+ source: algorithm,
489
+ factory: factory()
490
+ };
491
+ return compiled;
492
+ } catch (err) {
493
+ throw new Error(
494
+ `Three.js compilation failed: ${err instanceof Error ? err.message : String(err)}`
495
+ );
496
+ }
497
+ }
498
+ createInstance(compiled, state, canvas) {
499
+ const { factory } = compiled;
500
+ let container = null;
501
+ let currentState = { ...state, canvas };
502
+ let sketchModule = null;
503
+ let animating = false;
504
+ const instance = {
505
+ mount(el) {
506
+ container = el;
507
+ const THREEGlobal = globalThis["THREE"];
508
+ if (!THREEGlobal) {
509
+ throw new Error(
510
+ "Three.js is not loaded. Include Three.js before mounting a Three.js sketch."
511
+ );
512
+ }
513
+ sketchModule = factory(THREEGlobal, currentState, el);
514
+ animating = true;
515
+ },
516
+ unmount() {
517
+ if (sketchModule && typeof sketchModule["dispose"] === "function") {
518
+ sketchModule["dispose"]();
519
+ }
520
+ if (container) {
521
+ container.innerHTML = "";
522
+ }
523
+ sketchModule = null;
524
+ container = null;
525
+ animating = false;
526
+ },
527
+ updateState(newState) {
528
+ currentState = { ...newState, canvas };
529
+ if (sketchModule && typeof sketchModule["initializeSystem"] === "function") {
530
+ sketchModule["initializeSystem"]();
531
+ }
532
+ },
533
+ redraw() {
534
+ if (sketchModule && typeof sketchModule["initializeSystem"] === "function") {
535
+ sketchModule["initializeSystem"]();
536
+ }
537
+ },
538
+ pause() {
539
+ animating = false;
540
+ if (sketchModule && typeof sketchModule["pause"] === "function") {
541
+ sketchModule["pause"]();
542
+ }
543
+ },
544
+ resume() {
545
+ animating = true;
546
+ if (sketchModule && typeof sketchModule["resume"] === "function") {
547
+ sketchModule["resume"]();
548
+ }
549
+ },
550
+ get isAnimating() {
551
+ return animating;
552
+ },
553
+ async captureFrame(options) {
554
+ if (!container) throw new Error("Sketch is not mounted");
555
+ const canvasEl = container.querySelector("canvas");
556
+ if (!canvasEl) throw new Error("No canvas element found");
557
+ const format = options?.format ?? "png";
558
+ const quality = options?.quality ?? 1;
559
+ const mimeType = format === "jpeg" ? "image/jpeg" : format === "webp" ? "image/webp" : "image/png";
560
+ return canvasEl.toDataURL(mimeType, quality);
561
+ },
562
+ async captureImageData() {
563
+ if (!container) throw new Error("Sketch is not mounted");
564
+ const canvasEl = container.querySelector("canvas");
565
+ if (!canvasEl) throw new Error("No canvas element found");
566
+ const ctx = canvasEl.getContext("2d");
567
+ if (!ctx) throw new Error("Cannot get 2D context from canvas");
568
+ return ctx.getImageData(0, 0, canvasEl.width, canvasEl.height);
569
+ },
570
+ dispose() {
571
+ instance.unmount();
572
+ }
573
+ };
574
+ return instance;
575
+ }
576
+ async renderOffscreen(compiled, state, canvas, options) {
577
+ const el = document.createElement("div");
578
+ el.style.position = "absolute";
579
+ el.style.left = "-9999px";
580
+ document.body.appendChild(el);
581
+ try {
582
+ const instance = this.createInstance(compiled, state, canvas);
583
+ instance.mount(el);
584
+ await new Promise((resolve) => setTimeout(resolve, 200));
585
+ const dataUrl = await instance.captureFrame(options);
586
+ instance.dispose();
587
+ const base64 = dataUrl.split(",")[1] ?? "";
588
+ const binary = atob(base64);
589
+ const bytes = new Uint8Array(binary.length);
590
+ for (let i = 0; i < binary.length; i++) {
591
+ bytes[i] = binary.charCodeAt(i);
592
+ }
593
+ return bytes;
594
+ } finally {
595
+ document.body.removeChild(el);
596
+ }
597
+ }
598
+ generateStandaloneHTML(sketch) {
599
+ const { width, height } = sketch.canvas;
600
+ const pixelDensity = sketch.canvas.pixelDensity ?? 1;
601
+ const stateJson = JSON.stringify(sketch.state, null, 2);
602
+ return `<!DOCTYPE html>
603
+ <html lang="en">
604
+ <head>
605
+ <meta charset="UTF-8">
606
+ <meta name="viewport" content="width=device-width, initial-scale=1.0">
607
+ <title>${escapeHtml3(sketch.title)}</title>
608
+ <style>
609
+ * { margin: 0; padding: 0; box-sizing: border-box; }
610
+ body { display: flex; justify-content: center; align-items: center; min-height: 100vh; background: #111; overflow: hidden; }
611
+ #canvas-container { width: ${width}px; height: ${height}px; }
612
+ #canvas-container canvas { display: block; max-width: 100vw; max-height: 100vh; }
613
+ </style>
614
+ </head>
615
+ <body>
616
+ <div id="canvas-container"></div>
617
+ <script type="module">
618
+ import * as THREE from '${THREE_CDN_URL}';
619
+
620
+ const state = ${stateJson};
621
+ state.canvas = { width: ${width}, height: ${height}, pixelDensity: ${pixelDensity} };
622
+
623
+ ${sketch.algorithm}
624
+
625
+ sketch(THREE, state, document.getElementById('canvas-container'));
626
+ </script>
627
+ </body>
628
+ </html>`;
629
+ }
630
+ getAlgorithmTemplate() {
631
+ return `function sketch(THREE, state, container) {
632
+ const scene = new THREE.Scene();
633
+ const camera = new THREE.PerspectiveCamera(75, state.canvas.width / state.canvas.height, 0.1, 1000);
634
+ const renderer = new THREE.WebGLRenderer({ antialias: true, preserveDrawingBuffer: true });
635
+ renderer.setSize(state.canvas.width, state.canvas.height);
636
+ container.appendChild(renderer.domElement);
637
+ camera.position.z = 5;
638
+
639
+ let animating = true;
640
+ let animId = null;
641
+
642
+ function initializeSystem() {
643
+ // Rebuild scene from state.params and state.seed
644
+ while (scene.children.length > 0) scene.remove(scene.children[0]);
645
+ const geometry = new THREE.BoxGeometry();
646
+ const material = new THREE.MeshBasicMaterial({ color: 0x22d3ee });
647
+ const cube = new THREE.Mesh(geometry, material);
648
+ scene.add(cube);
649
+ }
650
+
651
+ function animate() {
652
+ if (!animating) return;
653
+ animId = requestAnimationFrame(animate);
654
+ renderer.render(scene, camera);
655
+ }
656
+
657
+ function pause() {
658
+ animating = false;
659
+ if (animId !== null) cancelAnimationFrame(animId);
660
+ }
661
+
662
+ function resume() {
663
+ animating = true;
664
+ animate();
665
+ }
666
+
667
+ function dispose() {
668
+ pause();
669
+ renderer.dispose();
670
+ container.removeChild(renderer.domElement);
671
+ }
672
+
673
+ initializeSystem();
674
+ animate();
675
+
676
+ return { initializeSystem, dispose, pause, resume };
677
+ }`;
678
+ }
679
+ getRuntimeDependencies() {
680
+ return [
681
+ {
682
+ name: "three",
683
+ version: THREE_CDN_VERSION,
684
+ cdnUrl: THREE_CDN_URL
685
+ }
686
+ ];
687
+ }
688
+ };
689
+ function escapeHtml3(str) {
690
+ return str.replace(/&/g, "&amp;").replace(/</g, "&lt;").replace(/>/g, "&gt;").replace(/"/g, "&quot;");
691
+ }
692
+
693
+ // src/sketch/adapters/glsl.ts
694
+ var BUILTIN_UNIFORMS = /* @__PURE__ */ new Set([
695
+ "u_resolution",
696
+ "u_time",
697
+ "u_seed"
698
+ ]);
699
+ var FULLSCREEN_QUAD_VERTEX = `#version 300 es
700
+ in vec2 a_position;
701
+ void main() {
702
+ gl_Position = vec4(a_position, 0.0, 1.0);
703
+ }`;
704
+ function hexToVec3(hex) {
705
+ const clean = hex.replace(/^#/, "");
706
+ const r = parseInt(clean.substring(0, 2), 16) / 255;
707
+ const g = parseInt(clean.substring(2, 4), 16) / 255;
708
+ const b = parseInt(clean.substring(4, 6), 16) / 255;
709
+ return [r, g, b];
710
+ }
711
+ function extractUniforms(source) {
712
+ const params = [];
713
+ const colors = [];
714
+ const uniformRegex = /uniform\s+(?:float|vec[234]|int|mat[234])\s+(u_\w+)/g;
715
+ let match;
716
+ while ((match = uniformRegex.exec(source)) !== null) {
717
+ const name = match[1];
718
+ if (BUILTIN_UNIFORMS.has(name)) continue;
719
+ if (/^u_color\d+$/.test(name)) {
720
+ colors.push(name);
721
+ } else {
722
+ params.push(name);
723
+ }
724
+ }
725
+ return { params, colors };
726
+ }
727
+ var GLSLRendererAdapter = class {
728
+ type = "glsl";
729
+ displayName = "GLSL Shader";
730
+ algorithmLanguage = "glsl";
731
+ validate(algorithm) {
732
+ if (!algorithm || algorithm.trim().length === 0) {
733
+ return { valid: false, errors: ["Algorithm source is empty"] };
734
+ }
735
+ const hasVersion = /#version\s+\d+\s+es/.test(algorithm);
736
+ const hasMain = /void\s+main\s*\(\s*\)/.test(algorithm);
737
+ const hasFragColor = /fragColor/.test(algorithm) || /gl_FragColor/.test(algorithm);
738
+ const errors = [];
739
+ if (!hasVersion) {
740
+ errors.push("GLSL shaders should start with a #version directive (e.g., #version 300 es)");
741
+ }
742
+ if (!hasMain) {
743
+ errors.push("GLSL shaders must contain a void main() function");
744
+ }
745
+ if (!hasFragColor) {
746
+ errors.push("GLSL shaders must write to fragColor or gl_FragColor");
747
+ }
748
+ return { valid: errors.length === 0, errors };
749
+ }
750
+ async compile(algorithm) {
751
+ const validation = this.validate(algorithm);
752
+ if (!validation.valid) {
753
+ throw new Error(
754
+ `GLSL compilation failed: ${validation.errors.join("; ")}`
755
+ );
756
+ }
757
+ const uniforms = extractUniforms(algorithm);
758
+ const compiled = {
759
+ fragmentSource: algorithm,
760
+ vertexSource: FULLSCREEN_QUAD_VERTEX,
761
+ uniformNames: uniforms
762
+ };
763
+ return compiled;
764
+ }
765
+ createInstance(compiled, state, canvas) {
766
+ const { fragmentSource, vertexSource, uniformNames } = compiled;
767
+ let canvasEl = null;
768
+ let gl = null;
769
+ let program = null;
770
+ let vao = null;
771
+ let positionBuffer = null;
772
+ let container = null;
773
+ let currentState = { ...state };
774
+ let animating = false;
775
+ let animationFrameId = null;
776
+ let startTime = 0;
777
+ function compileShader(glCtx, type, source) {
778
+ const shader = glCtx.createShader(type);
779
+ if (!shader) throw new Error("Failed to create shader");
780
+ glCtx.shaderSource(shader, source);
781
+ glCtx.compileShader(shader);
782
+ if (!glCtx.getShaderParameter(shader, glCtx.COMPILE_STATUS)) {
783
+ const info = glCtx.getShaderInfoLog(shader);
784
+ glCtx.deleteShader(shader);
785
+ throw new Error(`Shader compilation error: ${info}`);
786
+ }
787
+ return shader;
788
+ }
789
+ function bindUniforms(glCtx, prog) {
790
+ const resLoc = glCtx.getUniformLocation(prog, "u_resolution");
791
+ if (resLoc) {
792
+ glCtx.uniform2f(resLoc, canvas.width, canvas.height);
793
+ }
794
+ const seedLoc = glCtx.getUniformLocation(prog, "u_seed");
795
+ if (seedLoc) {
796
+ glCtx.uniform1f(seedLoc, currentState.seed);
797
+ }
798
+ const timeLoc = glCtx.getUniformLocation(prog, "u_time");
799
+ if (timeLoc) {
800
+ const elapsed = (performance.now() - startTime) / 1e3;
801
+ glCtx.uniform1f(timeLoc, elapsed);
802
+ }
803
+ for (const uName of uniformNames.params) {
804
+ const loc = glCtx.getUniformLocation(prog, uName);
805
+ if (!loc) continue;
806
+ const paramKey = uName.substring(2);
807
+ const value = currentState.params[paramKey];
808
+ if (value !== void 0) {
809
+ glCtx.uniform1f(loc, value);
810
+ }
811
+ }
812
+ for (const uName of uniformNames.colors) {
813
+ const loc = glCtx.getUniformLocation(prog, uName);
814
+ if (!loc) continue;
815
+ const idx = parseInt(uName.replace("u_color", ""), 10) - 1;
816
+ const hex = currentState.colorPalette[idx];
817
+ if (hex) {
818
+ const [r, g, b] = hexToVec3(hex);
819
+ glCtx.uniform3f(loc, r, g, b);
820
+ }
821
+ }
822
+ }
823
+ function renderFrame() {
824
+ if (!gl || !program || !vao) return;
825
+ gl.viewport(0, 0, gl.drawingBufferWidth, gl.drawingBufferHeight);
826
+ gl.clearColor(0, 0, 0, 1);
827
+ gl.clear(gl.COLOR_BUFFER_BIT);
828
+ gl.useProgram(program);
829
+ bindUniforms(gl, program);
830
+ gl.bindVertexArray(vao);
831
+ gl.drawArrays(gl.TRIANGLES, 0, 6);
832
+ gl.bindVertexArray(null);
833
+ }
834
+ function animationLoop() {
835
+ if (!animating) return;
836
+ renderFrame();
837
+ animationFrameId = requestAnimationFrame(animationLoop);
838
+ }
839
+ const instance = {
840
+ mount(el) {
841
+ container = el;
842
+ canvasEl = document.createElement("canvas");
843
+ canvasEl.width = canvas.width;
844
+ canvasEl.height = canvas.height;
845
+ const density = canvas.pixelDensity ?? 1;
846
+ if (density !== 1) {
847
+ canvasEl.width = canvas.width * density;
848
+ canvasEl.height = canvas.height * density;
849
+ canvasEl.style.width = `${canvas.width}px`;
850
+ canvasEl.style.height = `${canvas.height}px`;
851
+ }
852
+ el.appendChild(canvasEl);
853
+ gl = canvasEl.getContext("webgl2", {
854
+ preserveDrawingBuffer: true
855
+ });
856
+ if (!gl) throw new Error("WebGL2 is not supported");
857
+ const vertShader = compileShader(gl, gl.VERTEX_SHADER, vertexSource);
858
+ const fragShader = compileShader(
859
+ gl,
860
+ gl.FRAGMENT_SHADER,
861
+ fragmentSource
862
+ );
863
+ program = gl.createProgram();
864
+ if (!program) throw new Error("Failed to create WebGL program");
865
+ gl.attachShader(program, vertShader);
866
+ gl.attachShader(program, fragShader);
867
+ gl.linkProgram(program);
868
+ if (!gl.getProgramParameter(program, gl.LINK_STATUS)) {
869
+ const info = gl.getProgramInfoLog(program);
870
+ throw new Error(`Program linking error: ${info}`);
871
+ }
872
+ gl.deleteShader(vertShader);
873
+ gl.deleteShader(fragShader);
874
+ const positions = new Float32Array([
875
+ -1,
876
+ -1,
877
+ 1,
878
+ -1,
879
+ -1,
880
+ 1,
881
+ -1,
882
+ 1,
883
+ 1,
884
+ -1,
885
+ 1,
886
+ 1
887
+ ]);
888
+ positionBuffer = gl.createBuffer();
889
+ gl.bindBuffer(gl.ARRAY_BUFFER, positionBuffer);
890
+ gl.bufferData(gl.ARRAY_BUFFER, positions, gl.STATIC_DRAW);
891
+ vao = gl.createVertexArray();
892
+ gl.bindVertexArray(vao);
893
+ const aPosition = gl.getAttribLocation(program, "a_position");
894
+ gl.enableVertexAttribArray(aPosition);
895
+ gl.vertexAttribPointer(aPosition, 2, gl.FLOAT, false, 0, 0);
896
+ gl.bindVertexArray(null);
897
+ startTime = performance.now();
898
+ animating = true;
899
+ animationLoop();
900
+ },
901
+ unmount() {
902
+ if (animationFrameId !== null) {
903
+ cancelAnimationFrame(animationFrameId);
904
+ animationFrameId = null;
905
+ }
906
+ animating = false;
907
+ if (gl) {
908
+ if (vao) gl.deleteVertexArray(vao);
909
+ if (positionBuffer) gl.deleteBuffer(positionBuffer);
910
+ if (program) gl.deleteProgram(program);
911
+ }
912
+ if (canvasEl && container) {
913
+ container.removeChild(canvasEl);
914
+ }
915
+ gl = null;
916
+ program = null;
917
+ vao = null;
918
+ positionBuffer = null;
919
+ canvasEl = null;
920
+ container = null;
921
+ },
922
+ updateState(newState) {
923
+ currentState = { ...newState };
924
+ if (!animating) {
925
+ renderFrame();
926
+ }
927
+ },
928
+ redraw() {
929
+ renderFrame();
930
+ },
931
+ pause() {
932
+ animating = false;
933
+ if (animationFrameId !== null) {
934
+ cancelAnimationFrame(animationFrameId);
935
+ animationFrameId = null;
936
+ }
937
+ },
938
+ resume() {
939
+ if (!animating) {
940
+ animating = true;
941
+ animationLoop();
942
+ }
943
+ },
944
+ get isAnimating() {
945
+ return animating;
946
+ },
947
+ async captureFrame(options) {
948
+ if (!canvasEl) throw new Error("Sketch is not mounted");
949
+ const format = options?.format ?? "png";
950
+ const quality = options?.quality ?? 1;
951
+ const mimeType = format === "jpeg" ? "image/jpeg" : format === "webp" ? "image/webp" : "image/png";
952
+ return canvasEl.toDataURL(mimeType, quality);
953
+ },
954
+ async captureImageData() {
955
+ if (!canvasEl || !gl) throw new Error("Sketch is not mounted");
956
+ const width = gl.drawingBufferWidth;
957
+ const height = gl.drawingBufferHeight;
958
+ const pixels = new Uint8Array(width * height * 4);
959
+ gl.readPixels(0, 0, width, height, gl.RGBA, gl.UNSIGNED_BYTE, pixels);
960
+ const flipped = new Uint8Array(width * height * 4);
961
+ const rowSize = width * 4;
962
+ for (let y = 0; y < height; y++) {
963
+ const srcOffset = y * rowSize;
964
+ const dstOffset = (height - 1 - y) * rowSize;
965
+ flipped.set(pixels.subarray(srcOffset, srcOffset + rowSize), dstOffset);
966
+ }
967
+ return new ImageData(
968
+ new Uint8ClampedArray(flipped.buffer),
969
+ width,
970
+ height
971
+ );
972
+ },
973
+ dispose() {
974
+ instance.unmount();
975
+ }
976
+ };
977
+ return instance;
978
+ }
979
+ async renderOffscreen(compiled, state, canvas, options) {
980
+ const el = document.createElement("div");
981
+ el.style.position = "absolute";
982
+ el.style.left = "-9999px";
983
+ document.body.appendChild(el);
984
+ try {
985
+ const instance = this.createInstance(compiled, state, canvas);
986
+ instance.mount(el);
987
+ await new Promise((resolve) => setTimeout(resolve, 50));
988
+ const dataUrl = await instance.captureFrame(options);
989
+ instance.dispose();
990
+ const base64 = dataUrl.split(",")[1] ?? "";
991
+ const binary = atob(base64);
992
+ const bytes = new Uint8Array(binary.length);
993
+ for (let i = 0; i < binary.length; i++) {
994
+ bytes[i] = binary.charCodeAt(i);
995
+ }
996
+ return bytes;
997
+ } finally {
998
+ document.body.removeChild(el);
999
+ }
1000
+ }
1001
+ generateStandaloneHTML(sketch) {
1002
+ const { width, height } = sketch.canvas;
1003
+ const pixelDensity = sketch.canvas.pixelDensity ?? 1;
1004
+ const stateJson = JSON.stringify(sketch.state, null, 2);
1005
+ const uniforms = extractUniforms(sketch.algorithm);
1006
+ const paramBindings = uniforms.params.map((u) => {
1007
+ const key = u.substring(2);
1008
+ return ` { const loc = gl.getUniformLocation(program, "${u}"); if (loc && state.params["${key}"] !== undefined) gl.uniform1f(loc, state.params["${key}"]); }`;
1009
+ }).join("\n");
1010
+ const colorBindings = uniforms.colors.map((u) => {
1011
+ const idx = parseInt(u.replace("u_color", ""), 10) - 1;
1012
+ return ` { const loc = gl.getUniformLocation(program, "${u}"); if (loc && state.colorPalette[${idx}]) { const c = state.colorPalette[${idx}].replace('#',''); gl.uniform3f(loc, parseInt(c.substring(0,2),16)/255, parseInt(c.substring(2,4),16)/255, parseInt(c.substring(4,6),16)/255); } }`;
1013
+ }).join("\n");
1014
+ return `<!DOCTYPE html>
1015
+ <html lang="en">
1016
+ <head>
1017
+ <meta charset="UTF-8">
1018
+ <meta name="viewport" content="width=device-width, initial-scale=1.0">
1019
+ <title>${escapeHtml4(sketch.title)}</title>
1020
+ <style>
1021
+ * { margin: 0; padding: 0; box-sizing: border-box; }
1022
+ body { display: flex; justify-content: center; align-items: center; min-height: 100vh; background: #111; }
1023
+ canvas { display: block; max-width: 100vw; max-height: 100vh; }
1024
+ </style>
1025
+ </head>
1026
+ <body>
1027
+ <canvas id="canvas" width="${width * pixelDensity}" height="${height * pixelDensity}" style="width:${width}px;height:${height}px;"></canvas>
1028
+ <script>
1029
+ const state = ${stateJson};
1030
+
1031
+ const canvas = document.getElementById('canvas');
1032
+ const gl = canvas.getContext('webgl2', { preserveDrawingBuffer: true });
1033
+ if (!gl) { document.body.textContent = 'WebGL2 not supported'; throw new Error('No WebGL2'); }
1034
+
1035
+ // Vertex shader: fullscreen quad
1036
+ const vertSrc = \`${FULLSCREEN_QUAD_VERTEX}\`;
1037
+
1038
+ // Fragment shader
1039
+ const fragSrc = \`${sketch.algorithm.replace(/\\/g, "\\\\").replace(/`/g, "\\`").replace(/\$/g, "\\$")}\`;
1040
+
1041
+ function createShader(type, src) {
1042
+ const s = gl.createShader(type);
1043
+ gl.shaderSource(s, src);
1044
+ gl.compileShader(s);
1045
+ if (!gl.getShaderParameter(s, gl.COMPILE_STATUS)) throw new Error(gl.getShaderInfoLog(s));
1046
+ return s;
1047
+ }
1048
+
1049
+ const program = gl.createProgram();
1050
+ gl.attachShader(program, createShader(gl.VERTEX_SHADER, vertSrc));
1051
+ gl.attachShader(program, createShader(gl.FRAGMENT_SHADER, fragSrc));
1052
+ gl.linkProgram(program);
1053
+ if (!gl.getProgramParameter(program, gl.LINK_STATUS)) throw new Error(gl.getProgramInfoLog(program));
1054
+
1055
+ const positions = new Float32Array([-1,-1, 1,-1, -1,1, -1,1, 1,-1, 1,1]);
1056
+ const buf = gl.createBuffer();
1057
+ gl.bindBuffer(gl.ARRAY_BUFFER, buf);
1058
+ gl.bufferData(gl.ARRAY_BUFFER, positions, gl.STATIC_DRAW);
1059
+ const vao = gl.createVertexArray();
1060
+ gl.bindVertexArray(vao);
1061
+ const aPos = gl.getAttribLocation(program, 'a_position');
1062
+ gl.enableVertexAttribArray(aPos);
1063
+ gl.vertexAttribPointer(aPos, 2, gl.FLOAT, false, 0, 0);
1064
+ gl.bindVertexArray(null);
1065
+
1066
+ const startTime = performance.now();
1067
+ function render() {
1068
+ gl.viewport(0, 0, gl.drawingBufferWidth, gl.drawingBufferHeight);
1069
+ gl.clearColor(0, 0, 0, 1);
1070
+ gl.clear(gl.COLOR_BUFFER_BIT);
1071
+ gl.useProgram(program);
1072
+
1073
+ // Built-in uniforms
1074
+ { const loc = gl.getUniformLocation(program, 'u_resolution'); if (loc) gl.uniform2f(loc, ${width}, ${height}); }
1075
+ { const loc = gl.getUniformLocation(program, 'u_seed'); if (loc) gl.uniform1f(loc, state.seed); }
1076
+ { const loc = gl.getUniformLocation(program, 'u_time'); if (loc) gl.uniform1f(loc, (performance.now() - startTime) / 1000); }
1077
+
1078
+ // Parameter uniforms
1079
+ ${paramBindings}
1080
+
1081
+ // Color uniforms
1082
+ ${colorBindings}
1083
+
1084
+ gl.bindVertexArray(vao);
1085
+ gl.drawArrays(gl.TRIANGLES, 0, 6);
1086
+ gl.bindVertexArray(null);
1087
+ requestAnimationFrame(render);
1088
+ }
1089
+ render();
1090
+ </script>
1091
+ </body>
1092
+ </html>`;
1093
+ }
1094
+ getAlgorithmTemplate() {
1095
+ return `#version 300 es
1096
+ precision highp float;
1097
+ uniform vec2 u_resolution;
1098
+ uniform float u_time;
1099
+ uniform float u_seed;
1100
+ uniform float u_noiseScale; // mapped from state.params.noiseScale
1101
+ uniform vec3 u_color1; // mapped from state.colorPalette[0]
1102
+ out vec4 fragColor;
1103
+ void main() {
1104
+ vec2 uv = gl_FragCoord.xy / u_resolution;
1105
+ fragColor = vec4(uv, 0.5 + 0.5 * sin(u_time), 1.0);
1106
+ }`;
1107
+ }
1108
+ getRuntimeDependencies() {
1109
+ return [];
1110
+ }
1111
+ };
1112
+ function escapeHtml4(str) {
1113
+ return str.replace(/&/g, "&amp;").replace(/</g, "&lt;").replace(/>/g, "&gt;").replace(/"/g, "&quot;");
1114
+ }
1115
+
1116
+ // src/sketch/adapters/svg.ts
1117
+ var SVGRendererAdapter = class {
1118
+ type = "svg";
1119
+ displayName = "SVG";
1120
+ algorithmLanguage = "javascript";
1121
+ validate(algorithm) {
1122
+ if (!algorithm || algorithm.trim().length === 0) {
1123
+ return { valid: false, errors: ["Algorithm source is empty"] };
1124
+ }
1125
+ const hasSketchFn = /function\s+sketch\s*\(\s*state\s*\)/.test(algorithm) || /(?:const|let|var)\s+sketch\s*=\s*(?:function\s*)?\(\s*state\s*\)/.test(algorithm) || /(?:const|let|var)\s+sketch\s*=\s*\(\s*state\s*\)\s*=>/.test(algorithm);
1126
+ if (!hasSketchFn) {
1127
+ return {
1128
+ valid: false,
1129
+ errors: [
1130
+ "SVG algorithms must export a function with signature: function sketch(state)"
1131
+ ]
1132
+ };
1133
+ }
1134
+ return { valid: true, errors: [] };
1135
+ }
1136
+ async compile(algorithm) {
1137
+ const validation = this.validate(algorithm);
1138
+ if (!validation.valid) {
1139
+ throw new Error(
1140
+ `SVG compilation failed: ${validation.errors.join("; ")}`
1141
+ );
1142
+ }
1143
+ const wrappedSource = `
1144
+ return (function() {
1145
+ ${algorithm}
1146
+ return sketch;
1147
+ })();
1148
+ `;
1149
+ try {
1150
+ const factory = new Function(wrappedSource);
1151
+ const compiled = {
1152
+ source: algorithm,
1153
+ factory: factory()
1154
+ };
1155
+ return compiled;
1156
+ } catch (err) {
1157
+ throw new Error(
1158
+ `SVG compilation failed: ${err instanceof Error ? err.message : String(err)}`
1159
+ );
1160
+ }
1161
+ }
1162
+ createInstance(compiled, state, canvas) {
1163
+ const { factory } = compiled;
1164
+ let container = null;
1165
+ let currentState = { ...state, canvas };
1166
+ let sketchModule = null;
1167
+ function regenerate() {
1168
+ if (!container || !sketchModule) return;
1169
+ if (typeof sketchModule["generate"] === "function") {
1170
+ const svgString = sketchModule["generate"]();
1171
+ container.innerHTML = svgString;
1172
+ }
1173
+ }
1174
+ const instance = {
1175
+ mount(el) {
1176
+ container = el;
1177
+ sketchModule = factory(currentState);
1178
+ regenerate();
1179
+ },
1180
+ unmount() {
1181
+ if (container) {
1182
+ container.innerHTML = "";
1183
+ }
1184
+ container = null;
1185
+ sketchModule = null;
1186
+ },
1187
+ updateState(newState) {
1188
+ currentState = { ...newState, canvas };
1189
+ sketchModule = factory(currentState);
1190
+ regenerate();
1191
+ },
1192
+ redraw() {
1193
+ regenerate();
1194
+ },
1195
+ pause() {
1196
+ },
1197
+ resume() {
1198
+ },
1199
+ get isAnimating() {
1200
+ return false;
1201
+ },
1202
+ async captureFrame(_options) {
1203
+ if (!container) throw new Error("Sketch is not mounted");
1204
+ const svgEl = container.querySelector("svg");
1205
+ if (!svgEl) throw new Error("No SVG element found");
1206
+ const serializer = new XMLSerializer();
1207
+ const svgString = serializer.serializeToString(svgEl);
1208
+ const base64 = btoa(unescape(encodeURIComponent(svgString)));
1209
+ return `data:image/svg+xml;base64,${base64}`;
1210
+ },
1211
+ async captureImageData() {
1212
+ if (!container) throw new Error("Sketch is not mounted");
1213
+ const svgEl = container.querySelector("svg");
1214
+ if (!svgEl) throw new Error("No SVG element found");
1215
+ const serializer = new XMLSerializer();
1216
+ const svgString = serializer.serializeToString(svgEl);
1217
+ const blob = new Blob([svgString], { type: "image/svg+xml" });
1218
+ const url = URL.createObjectURL(blob);
1219
+ const img = new Image();
1220
+ await new Promise((resolve, reject) => {
1221
+ img.onload = () => resolve();
1222
+ img.onerror = () => reject(new Error("Failed to load SVG as image"));
1223
+ img.src = url;
1224
+ });
1225
+ const offscreen = document.createElement("canvas");
1226
+ offscreen.width = canvas.width;
1227
+ offscreen.height = canvas.height;
1228
+ const ctx = offscreen.getContext("2d");
1229
+ if (!ctx) throw new Error("Cannot get 2D context");
1230
+ ctx.drawImage(img, 0, 0, canvas.width, canvas.height);
1231
+ URL.revokeObjectURL(url);
1232
+ return ctx.getImageData(0, 0, canvas.width, canvas.height);
1233
+ },
1234
+ dispose() {
1235
+ instance.unmount();
1236
+ }
1237
+ };
1238
+ return instance;
1239
+ }
1240
+ async renderOffscreen(compiled, state, canvas, _options) {
1241
+ const { factory } = compiled;
1242
+ const stateWithCanvas = { ...state, canvas };
1243
+ const module = factory(stateWithCanvas);
1244
+ let svgString;
1245
+ if (typeof module["generate"] === "function") {
1246
+ svgString = module["generate"]();
1247
+ } else {
1248
+ throw new Error("SVG sketch module must have a generate() method");
1249
+ }
1250
+ const encoder = new TextEncoder();
1251
+ return encoder.encode(svgString);
1252
+ }
1253
+ generateStandaloneHTML(sketch) {
1254
+ const { width, height } = sketch.canvas;
1255
+ const stateJson = JSON.stringify(sketch.state, null, 2);
1256
+ return `<!DOCTYPE html>
1257
+ <html lang="en">
1258
+ <head>
1259
+ <meta charset="UTF-8">
1260
+ <meta name="viewport" content="width=device-width, initial-scale=1.0">
1261
+ <title>${escapeHtml5(sketch.title)}</title>
1262
+ <style>
1263
+ * { margin: 0; padding: 0; box-sizing: border-box; }
1264
+ body { display: flex; justify-content: center; align-items: center; min-height: 100vh; background: #111; }
1265
+ #svg-container svg { display: block; width: 100%; height: auto; max-width: 100vw; max-height: 100vh; }
1266
+ </style>
1267
+ </head>
1268
+ <body>
1269
+ <div id="svg-container"></div>
1270
+ <script>
1271
+ const state = ${stateJson};
1272
+ state.canvas = { width: ${width}, height: ${height} };
1273
+
1274
+ ${sketch.algorithm}
1275
+
1276
+ const module = sketch(state);
1277
+ const svgString = module.generate ? module.generate() : module.initializeSystem();
1278
+ document.getElementById('svg-container').innerHTML = svgString;
1279
+ </script>
1280
+ </body>
1281
+ </html>`;
1282
+ }
1283
+ getAlgorithmTemplate() {
1284
+ return `function sketch(state) {
1285
+ const { width, height } = state.canvas;
1286
+ function generate() {
1287
+ return \`<svg xmlns="http://www.w3.org/2000/svg" width="\${width}" height="\${height}" viewBox="0 0 \${width} \${height}">
1288
+ <!-- generated elements -->
1289
+ </svg>\`;
1290
+ }
1291
+ return { generate, initializeSystem: generate };
1292
+ }`;
1293
+ }
1294
+ getRuntimeDependencies() {
1295
+ return [];
1296
+ }
1297
+ };
1298
+ function escapeHtml5(str) {
1299
+ return str.replace(/&/g, "&amp;").replace(/</g, "&lt;").replace(/>/g, "&gt;").replace(/"/g, "&quot;");
1300
+ }
1301
+
1302
+ // src/sketch/registry.ts
1303
+ var RendererRegistry = class {
1304
+ adapters = /* @__PURE__ */ new Map();
1305
+ defaultType = "p5";
1306
+ /**
1307
+ * Register a renderer adapter. Replaces any existing adapter for the same type.
1308
+ */
1309
+ register(adapter) {
1310
+ this.adapters.set(adapter.type, adapter);
1311
+ }
1312
+ /**
1313
+ * Resolve a renderer adapter by type.
1314
+ * If type is undefined, returns the default adapter (p5 — v1.0 compat).
1315
+ *
1316
+ * @throws Error if the type is not registered.
1317
+ */
1318
+ resolve(type) {
1319
+ const resolvedType = type ?? this.defaultType;
1320
+ const adapter = this.adapters.get(resolvedType);
1321
+ if (!adapter) {
1322
+ const available = [...this.adapters.keys()].join(", ");
1323
+ throw new Error(
1324
+ `Unknown renderer type "${resolvedType}". Registered types: ${available || "(none)"}`
1325
+ );
1326
+ }
1327
+ return adapter;
1328
+ }
1329
+ /**
1330
+ * List all registered renderer types.
1331
+ */
1332
+ list() {
1333
+ return [...this.adapters.keys()];
1334
+ }
1335
+ /**
1336
+ * Get the default renderer adapter (p5).
1337
+ */
1338
+ getDefault() {
1339
+ return this.resolve(this.defaultType);
1340
+ }
1341
+ /**
1342
+ * Check if a renderer type is registered.
1343
+ */
1344
+ has(type) {
1345
+ return this.adapters.has(type);
1346
+ }
1347
+ };
1348
+ function createDefaultRegistry() {
1349
+ const registry = new RendererRegistry();
1350
+ registry.register(new P5RendererAdapter());
1351
+ registry.register(new Canvas2DRendererAdapter());
1352
+ registry.register(new ThreeRendererAdapter());
1353
+ registry.register(new GLSLRendererAdapter());
1354
+ registry.register(new SVGRendererAdapter());
1355
+ return registry;
1356
+ }
1357
+
1358
+ // src/skill/skills.ts
1359
+ var COMPOSITION_SKILLS = [
1360
+ {
1361
+ id: "golden-ratio",
1362
+ name: "Golden Ratio",
1363
+ category: "composition",
1364
+ complexity: "intermediate",
1365
+ description: "Use the golden ratio (1:1.618) to create naturally harmonious proportions and spiral compositions.",
1366
+ theory: `The golden ratio (phi, approximately 1.618) appears throughout nature and has been used in art and architecture for millennia. In generative art, it provides a mathematical foundation for creating compositions that feel naturally balanced.
1367
+
1368
+ The golden rectangle can be recursively subdivided, creating a spiral that guides the viewer's eye. Elements placed along this spiral or at golden section points create visual harmony without the rigidity of symmetric layouts.
1369
+
1370
+ Key applications in generative art:
1371
+ - Divide the canvas at golden ratio points (61.8% / 38.2%) for element placement
1372
+ - Use the golden spiral as a path for distributing elements
1373
+ - Scale recursive elements by phi for self-similar patterns
1374
+ - Apply phi to spacing, margins, and proportional relationships`,
1375
+ principles: [
1376
+ "Divide compositions at 61.8% / 38.2% rather than halves",
1377
+ "Place focal elements at golden section intersections",
1378
+ "Use the golden spiral to guide element distribution",
1379
+ "Scale nested or recursive elements by phi (1.618) or 1/phi (0.618)",
1380
+ "Apply golden proportions to negative space as well as positive forms"
1381
+ ],
1382
+ references: [
1383
+ { title: "Art and Visual Perception", author: "Rudolf Arnheim", year: 1954 },
1384
+ { title: "Principles of Form and Design", author: "Wucius Wong", year: 1993 }
1385
+ ],
1386
+ suggestedParameters: [
1387
+ { key: "phi", label: "Golden Ratio", min: 1, max: 2, step: 1e-3, default: 1.618 },
1388
+ { key: "subdivisions", label: "Subdivisions", min: 1, max: 12, step: 1, default: 6 }
1389
+ ]
1390
+ },
1391
+ {
1392
+ id: "rule-of-thirds",
1393
+ name: "Rule of Thirds",
1394
+ category: "composition",
1395
+ complexity: "beginner",
1396
+ description: "Divide the canvas into a 3x3 grid and place key elements at intersection points for dynamic balance.",
1397
+ theory: `The rule of thirds is one of the most fundamental composition techniques. By dividing the canvas into nine equal sections with two horizontal and two vertical lines, artists create four "power points" at the intersections where the viewer's eye naturally rests.
1398
+
1399
+ In generative art, this grid serves as an anchor system. Rather than centering elements (which creates static compositions), placing density clusters, color accents, or focal geometries at third-line intersections produces dynamic, engaging layouts.
1400
+
1401
+ This rule also applies to the distribution of visual weight: roughly two-thirds of the canvas can be one tone or density, with the remaining third providing contrast.`,
1402
+ principles: [
1403
+ "Place primary focal elements at one of the four intersection points",
1404
+ "Align dominant lines (horizons, divisions) with the third lines, not center",
1405
+ "Distribute visual weight asymmetrically: 2/3 to 1/3 ratio",
1406
+ "Use the grid to create tension between filled and empty areas",
1407
+ "Multiple elements should occupy different intersection points"
1408
+ ],
1409
+ references: [
1410
+ { title: "Art and Visual Perception", author: "Rudolf Arnheim", year: 1954 },
1411
+ { title: "Principles of Form and Design", author: "Wucius Wong", year: 1993 }
1412
+ ],
1413
+ suggestedParameters: [
1414
+ { key: "gridDivisions", label: "Grid Divisions", min: 2, max: 8, step: 1, default: 3 },
1415
+ { key: "focalStrength", label: "Focal Strength", min: 0.1, max: 1, step: 0.1, default: 0.7 }
1416
+ ]
1417
+ },
1418
+ {
1419
+ id: "visual-weight",
1420
+ name: "Visual Weight & Balance",
1421
+ category: "composition",
1422
+ complexity: "intermediate",
1423
+ description: "Create equilibrium through the distribution of visual weight \u2014 size, density, color intensity, and position.",
1424
+ theory: `Every visual element carries "weight" determined by its size, color intensity, texture density, and position relative to the canvas center. Rudolf Arnheim's research showed that viewers perceive compositions as balanced or imbalanced based on this aggregate weight distribution.
1425
+
1426
+ In generative art, visual weight manifests through:
1427
+ - **Size**: Larger elements carry more weight
1428
+ - **Value contrast**: High-contrast elements are heavier than low-contrast ones
1429
+ - **Color saturation**: Saturated colors are heavier than muted ones
1430
+ - **Density**: Clusters of small elements can balance a single large one
1431
+ - **Position**: Elements farther from center exert more leverage (like a seesaw)
1432
+
1433
+ Balance can be symmetrical (formal, stable), asymmetrical (dynamic, interesting), or radial (emanating from center).`,
1434
+ principles: [
1435
+ "A small, high-contrast element can balance a large, low-contrast one",
1436
+ "Elements farther from the center carry more compositional weight",
1437
+ "Asymmetrical balance creates more visual interest than symmetry",
1438
+ "Distribute density gradients to create directional visual flow",
1439
+ "Empty space (negative space) has visual weight and must be balanced too",
1440
+ "Color saturation and value contribute independently to perceived weight"
1441
+ ],
1442
+ references: [
1443
+ { title: "Art and Visual Perception", author: "Rudolf Arnheim", year: 1954 }
1444
+ ],
1445
+ suggestedParameters: [
1446
+ { key: "balancePoint", label: "Balance Point", min: 0.2, max: 0.8, step: 0.05, default: 0.5 },
1447
+ { key: "weightContrast", label: "Weight Contrast", min: 0.1, max: 2, step: 0.1, default: 1 }
1448
+ ]
1449
+ },
1450
+ {
1451
+ id: "gestalt-grouping",
1452
+ name: "Gestalt Grouping",
1453
+ category: "composition",
1454
+ complexity: "intermediate",
1455
+ description: "Apply Gestalt principles \u2014 proximity, similarity, closure, continuity \u2014 to organize elements into perceived wholes.",
1456
+ theory: `Gestalt psychology reveals how humans perceive visual elements as organized groups rather than isolated parts. These principles are powerful tools for generative artists:
1457
+
1458
+ - **Proximity**: Elements close together are perceived as a group. Control spacing to create clusters or separations.
1459
+ - **Similarity**: Elements sharing color, size, or shape are grouped. Use gradual variation to create sub-groups.
1460
+ - **Closure**: The mind completes incomplete shapes. Leave gaps in patterns \u2014 viewers will fill them in.
1461
+ - **Continuity**: Elements arranged along a line or curve are perceived as related. Use flow fields or parametric curves.
1462
+ - **Figure-Ground**: The relationship between foreground elements and background space defines the composition.
1463
+
1464
+ In generative art, these principles help create readable structure from potentially chaotic algorithms.`,
1465
+ principles: [
1466
+ "Use proximity (spacing) as the primary grouping mechanism",
1467
+ "Vary one property (color, size) while keeping others constant to create similarity groups",
1468
+ "Leave intentional gaps for the viewer's mind to complete (closure)",
1469
+ "Arrange elements along implied lines or curves for continuity",
1470
+ "Ensure clear figure-ground separation at all parameter settings",
1471
+ "Layer multiple Gestalt principles for complex but readable compositions"
1472
+ ],
1473
+ references: [
1474
+ { title: "Art and Visual Perception", author: "Rudolf Arnheim", year: 1954 },
1475
+ { title: "Principles of Form and Design", author: "Wucius Wong", year: 1993 }
1476
+ ],
1477
+ suggestedParameters: [
1478
+ { key: "groupSpacing", label: "Group Spacing", min: 0.5, max: 5, step: 0.1, default: 2 },
1479
+ { key: "similarity", label: "Similarity", min: 0, max: 1, step: 0.05, default: 0.8 },
1480
+ { key: "elementCount", label: "Element Count", min: 10, max: 500, step: 10, default: 100 }
1481
+ ]
1482
+ },
1483
+ {
1484
+ id: "figure-ground",
1485
+ name: "Figure-Ground Relationship",
1486
+ category: "composition",
1487
+ complexity: "beginner",
1488
+ description: "Control the interplay between positive forms (figure) and negative space (ground) for clarity and ambiguity.",
1489
+ theory: `The figure-ground relationship is the most fundamental perceptual organization. Every composition consists of figures (perceived objects) and ground (the space around and between them).
1490
+
1491
+ Strong figure-ground contrast creates clarity and focus. Deliberate ambiguity \u2014 where figure and ground are interchangeable \u2014 creates visual puzzles that engage viewers (as in M.C. Escher's tessellations).
1492
+
1493
+ In generative art, controlling figure-ground means managing:
1494
+ - Value contrast between elements and background
1495
+ - Edge definition (sharp vs. soft boundaries)
1496
+ - Density distribution (clustered elements read as figure)
1497
+ - Reversibility (can the viewer flip perception?)`,
1498
+ principles: [
1499
+ "Maintain sufficient contrast between figure and ground for readability",
1500
+ "Use value (light/dark) as the primary figure-ground separator",
1501
+ "Create deliberate ambiguity for visual interest when appropriate",
1502
+ "Small enclosed areas tend to be perceived as figure; large areas as ground",
1503
+ "Convex shapes are more likely perceived as figure than concave ones"
1504
+ ],
1505
+ references: [
1506
+ { title: "Art and Visual Perception", author: "Rudolf Arnheim", year: 1954 }
1507
+ ],
1508
+ suggestedParameters: [
1509
+ { key: "contrast", label: "Figure-Ground Contrast", min: 0.1, max: 1, step: 0.05, default: 0.7 },
1510
+ { key: "density", label: "Figure Density", min: 0.1, max: 0.9, step: 0.05, default: 0.4 }
1511
+ ]
1512
+ },
1513
+ {
1514
+ id: "rhythm-movement",
1515
+ name: "Rhythm & Movement",
1516
+ category: "composition",
1517
+ complexity: "advanced",
1518
+ description: "Create visual rhythm through repetition, variation, and progression to guide the viewer's eye across the composition.",
1519
+ theory: `Rhythm in visual art functions like rhythm in music \u2014 it creates patterns of emphasis and rest that move the viewer's eye through the composition. Wucius Wong identifies several types of visual rhythm:
1520
+
1521
+ - **Regular rhythm**: Consistent repetition of identical elements at equal intervals. Creates order and predictability.
1522
+ - **Alternating rhythm**: Two or more elements or spacings alternate. Creates more complex patterns.
1523
+ - **Progressive rhythm**: Elements gradually change in size, color, spacing, or orientation. Creates directional movement.
1524
+ - **Flowing rhythm**: Elements follow organic, curving paths. Creates natural, fluid movement.
1525
+ - **Random rhythm**: Irregular repetition with controlled variation. Creates controlled chaos.
1526
+
1527
+ In generative art, rhythm emerges from the interplay between your algorithm's regularity and its stochastic variation. The seed controls the specific instance; the parameters control the type and intensity of rhythm.`,
1528
+ principles: [
1529
+ "Establish a base rhythm through regular repetition before introducing variation",
1530
+ "Use progressive changes in size or spacing to create directional movement",
1531
+ "Alternate between tension (clustering) and release (spacing) zones",
1532
+ "Let flow fields or parametric curves create natural movement paths",
1533
+ "Control the ratio of regularity to randomness via parameters",
1534
+ "Use acceleration/deceleration in element distribution for energy"
1535
+ ],
1536
+ references: [
1537
+ { title: "Principles of Form and Design", author: "Wucius Wong", year: 1993 },
1538
+ { title: "Art and Visual Perception", author: "Rudolf Arnheim", year: 1954 }
1539
+ ],
1540
+ suggestedParameters: [
1541
+ { key: "frequency", label: "Rhythm Frequency", min: 1, max: 20, step: 1, default: 8 },
1542
+ { key: "variation", label: "Variation", min: 0, max: 1, step: 0.05, default: 0.3 },
1543
+ { key: "flow", label: "Flow Strength", min: 0, max: 2, step: 0.1, default: 0.5 }
1544
+ ]
1545
+ }
1546
+ ];
1547
+ var COLOR_SKILLS = [
1548
+ {
1549
+ id: "color-harmony",
1550
+ name: "Color Harmony Systems",
1551
+ category: "color",
1552
+ complexity: "beginner",
1553
+ description: "Use systematic color relationships \u2014 complementary, analogous, triadic, split-complementary \u2014 for harmonious palettes.",
1554
+ theory: `Color harmony describes combinations of colors that are aesthetically pleasing. Johannes Itten formalized these relationships using the color wheel:
1555
+
1556
+ - **Complementary**: Colors opposite on the wheel (e.g., blue/orange). Maximum contrast, vibrant when juxtaposed.
1557
+ - **Analogous**: 2-4 adjacent colors on the wheel. Harmonious and calm, low contrast.
1558
+ - **Triadic**: Three colors equally spaced (120\xB0 apart). Balanced and vibrant.
1559
+ - **Split-complementary**: A color plus the two colors adjacent to its complement. High contrast with less tension.
1560
+ - **Tetradic**: Two pairs of complementary colors. Rich and complex.
1561
+
1562
+ In generative art, these systems provide algorithmic palette generation. Start with a base hue, then calculate harmonious companions using angular relationships on the HSL color wheel.`,
1563
+ principles: [
1564
+ "Choose one dominant color and use harmonics as accents (60-30-10 rule)",
1565
+ "Complementary pairs create maximum vibrance \u2014 use for focal contrast",
1566
+ "Analogous schemes work well for atmospheric, mood-driven pieces",
1567
+ "Vary saturation and value within a harmonic scheme for depth",
1568
+ "Use triadic schemes when you need balanced variety without chaos"
1569
+ ],
1570
+ references: [
1571
+ { title: "The Art of Color", author: "Johannes Itten", year: 1961 },
1572
+ { title: "Interaction of Color", author: "Josef Albers", year: 1963 }
1573
+ ],
1574
+ suggestedParameters: [
1575
+ { key: "baseHue", label: "Base Hue", min: 0, max: 360, step: 1, default: 200 },
1576
+ { key: "harmonyAngle", label: "Harmony Angle", min: 15, max: 180, step: 5, default: 120 }
1577
+ ],
1578
+ suggestedColors: [
1579
+ { key: "base", label: "Base Color", default: "#2196f3" },
1580
+ { key: "accent", label: "Accent Color", default: "#ff9800" }
1581
+ ]
1582
+ },
1583
+ {
1584
+ id: "simultaneous-contrast",
1585
+ name: "Simultaneous Contrast",
1586
+ category: "color",
1587
+ complexity: "advanced",
1588
+ description: "Exploit how adjacent colors alter each other's perceived hue, value, and saturation \u2014 the core of Albers' teaching.",
1589
+ theory: `Josef Albers spent decades demonstrating that color is the most relative medium in art. The same color appears dramatically different depending on its surroundings:
1590
+
1591
+ - A gray square on a black background appears lighter than the same gray on white
1592
+ - A neutral color surrounded by red appears to shift toward green (and vice versa)
1593
+ - Small color areas are more affected by their surroundings than large ones
1594
+
1595
+ This phenomenon \u2014 simultaneous contrast \u2014 means that in generative art, you cannot choose colors in isolation. The same palette will look different depending on element sizes, spacing, and layering order.
1596
+
1597
+ Practical implications:
1598
+ - Test colors in context, not in isolation
1599
+ - Adjacent complementary colors intensify each other
1600
+ - Adjacent similar colors reduce each other's saturation
1601
+ - Background color dramatically shifts perception of all foreground elements`,
1602
+ principles: [
1603
+ "The same color will appear different depending on what surrounds it",
1604
+ "Adjacent complementary colors intensify each other (vibration effect)",
1605
+ "Small color areas are dominated by surrounding colors",
1606
+ "Background color shifts the perceived hue of all foreground elements",
1607
+ "Use simultaneous contrast deliberately to create optical effects",
1608
+ "Test palette choices at various element sizes \u2014 effects change with scale"
1609
+ ],
1610
+ references: [
1611
+ { title: "Interaction of Color", author: "Josef Albers", year: 1963 }
1612
+ ],
1613
+ suggestedParameters: [
1614
+ { key: "elementSize", label: "Element Size", min: 2, max: 100, step: 1, default: 20 },
1615
+ { key: "borderWidth", label: "Border Width", min: 0, max: 10, step: 0.5, default: 0 }
1616
+ ]
1617
+ },
1618
+ {
1619
+ id: "color-temperature",
1620
+ name: "Color Temperature",
1621
+ category: "color",
1622
+ complexity: "beginner",
1623
+ description: "Use warm (red/orange/yellow) and cool (blue/green/violet) color relationships to create depth and emotional tone.",
1624
+ theory: `Color temperature divides the spectrum into warm colors (reds, oranges, yellows) and cool colors (blues, greens, violets). This division has profound perceptual and emotional effects:
1625
+
1626
+ **Spatial effects**: Warm colors appear to advance (come toward the viewer) while cool colors recede. This creates an automatic sense of depth without perspective geometry.
1627
+
1628
+ **Emotional associations**: Warm colors evoke energy, passion, urgency. Cool colors evoke calm, distance, contemplation.
1629
+
1630
+ **Temperature contrast**: The tension between warm and cool areas creates visual energy. A predominantly cool composition with a warm accent immediately draws the eye to the accent.
1631
+
1632
+ In generative art, map temperature to depth layers: cool backgrounds, warm foregrounds. Or use temperature gradients across the composition to create directional flow.`,
1633
+ principles: [
1634
+ "Warm colors advance, cool colors recede \u2014 use for spatial depth",
1635
+ "A small warm accent in a cool composition creates a strong focal point",
1636
+ "Temperature contrast is as important as value contrast for composition",
1637
+ "Map color temperature to z-depth or layering order",
1638
+ "Use temperature gradients to create directional energy flow"
1639
+ ],
1640
+ references: [
1641
+ { title: "The Art of Color", author: "Johannes Itten", year: 1961 },
1642
+ { title: "Interaction of Color", author: "Josef Albers", year: 1963 }
1643
+ ],
1644
+ suggestedParameters: [
1645
+ { key: "warmth", label: "Warmth", min: 0, max: 1, step: 0.05, default: 0.5 },
1646
+ { key: "temperatureRange", label: "Temperature Range", min: 30, max: 180, step: 10, default: 90 }
1647
+ ],
1648
+ suggestedColors: [
1649
+ { key: "warm", label: "Warm Tone", default: "#ff6b35" },
1650
+ { key: "cool", label: "Cool Tone", default: "#4a90d9" }
1651
+ ]
1652
+ },
1653
+ {
1654
+ id: "itten-contrasts",
1655
+ name: "Itten's Seven Contrasts",
1656
+ category: "color",
1657
+ complexity: "advanced",
1658
+ description: "Apply Itten's systematic framework of seven color contrasts to create specific visual effects.",
1659
+ theory: `Johannes Itten identified seven fundamental types of color contrast, each producing distinct visual effects:
1660
+
1661
+ 1. **Contrast of Hue**: Pure hues side by side (red, blue, yellow). Maximum color variety.
1662
+ 2. **Light-Dark Contrast**: Value differences. The strongest structural contrast.
1663
+ 3. **Cold-Warm Contrast**: Temperature opposition. Creates spatial depth.
1664
+ 4. **Complementary Contrast**: Opposite hues. Each intensifies the other.
1665
+ 5. **Simultaneous Contrast**: Perceived color shift caused by adjacency.
1666
+ 6. **Contrast of Saturation**: Pure vs. muted colors. Creates focus hierarchy.
1667
+ 7. **Contrast of Extension**: Relative area sizes of colors. Balances visual weight.
1668
+
1669
+ Each contrast type can be parameterized in generative art. A single piece might employ multiple contrasts simultaneously, with parameters controlling the intensity of each.`,
1670
+ principles: [
1671
+ "Light-dark contrast provides the structural backbone of any composition",
1672
+ "Use saturation contrast to create hierarchy: saturated = focal, muted = ambient",
1673
+ "Complementary contrast creates energy; analogous reduces it",
1674
+ "Contrast of extension: balance area ratios to color intensity (bright colors need less area)",
1675
+ "Layer multiple contrast types for visual complexity",
1676
+ "Control contrast intensity through parameters for exploration"
1677
+ ],
1678
+ references: [
1679
+ { title: "The Art of Color", author: "Johannes Itten", year: 1961 }
1680
+ ],
1681
+ suggestedParameters: [
1682
+ { key: "hueContrast", label: "Hue Contrast", min: 0, max: 1, step: 0.05, default: 0.5 },
1683
+ { key: "valueContrast", label: "Value Contrast", min: 0.1, max: 1, step: 0.05, default: 0.7 },
1684
+ { key: "saturationContrast", label: "Saturation Contrast", min: 0, max: 1, step: 0.05, default: 0.4 }
1685
+ ]
1686
+ },
1687
+ {
1688
+ id: "value-structure",
1689
+ name: "Value Structure",
1690
+ category: "color",
1691
+ complexity: "intermediate",
1692
+ description: "Organize compositions through light-dark value patterns \u2014 the foundation that underlies all color decisions.",
1693
+ theory: `Value (lightness/darkness) is the most important property of color for composition. A piece that works in grayscale will work in any palette; a piece that fails in grayscale cannot be saved by color alone.
1694
+
1695
+ Value structure means planning the distribution of lights, mid-tones, and darks across the composition. Classic approaches include:
1696
+
1697
+ - **High-key**: Predominantly light values. Airy, ethereal, optimistic.
1698
+ - **Low-key**: Predominantly dark values. Dramatic, mysterious, intense.
1699
+ - **Full-range**: Full spectrum from near-white to near-black. Maximum contrast and visual impact.
1700
+ - **Limited-range**: Narrow value band. Subtle, atmospheric, unified.
1701
+
1702
+ In generative art, map value to depth, density, or importance. Establish a value plan (the notan \u2014 simplified light/dark pattern) before adding color complexity.`,
1703
+ principles: [
1704
+ "Squint at your output: if the value pattern is unclear, color won't help",
1705
+ "Limit your palette to 3-5 value steps for strong structure",
1706
+ "Reserve highest contrast for the focal area",
1707
+ "Use mid-tones for transitions and atmosphere",
1708
+ "Dark values carry more visual weight than light ones at equal size"
1709
+ ],
1710
+ references: [
1711
+ { title: "Interaction of Color", author: "Josef Albers", year: 1963 },
1712
+ { title: "The Art of Color", author: "Johannes Itten", year: 1961 }
1713
+ ],
1714
+ suggestedParameters: [
1715
+ { key: "valueRange", label: "Value Range", min: 0.1, max: 1, step: 0.05, default: 0.8 },
1716
+ { key: "keyValue", label: "Key Value", min: 0, max: 1, step: 0.05, default: 0.3 }
1717
+ ]
1718
+ },
1719
+ {
1720
+ id: "palette-generation",
1721
+ name: "Algorithmic Palette Generation",
1722
+ category: "color",
1723
+ complexity: "intermediate",
1724
+ description: "Generate cohesive color palettes algorithmically using perceptual color spaces and mathematical relationships.",
1725
+ theory: `Traditional color theory works with the HSL/HSV color wheel, but perceptual uniformity is better achieved in modern color spaces like OKLCH (Oklab Lightness-Chroma-Hue).
1726
+
1727
+ In OKLCH:
1728
+ - **L** (lightness, 0-1): Perceptually uniform brightness steps
1729
+ - **C** (chroma, 0-0.4): Saturation/vibrancy
1730
+ - **H** (hue, 0-360): Hue angle
1731
+
1732
+ Algorithmic palette strategies:
1733
+ - **Fixed hue, varying L/C**: Monochromatic palette with perceptual uniformity
1734
+ - **Fixed L/C, varying H**: Evenly-spaced hues at consistent brightness (true equiluminant palette)
1735
+ - **Seed-based generation**: Use the sketch seed to derive a base hue, then calculate harmonics
1736
+ - **Gradient interpolation**: Interpolate between anchor colors in OKLCH for smooth transitions
1737
+
1738
+ The advantage of working in OKLCH over HSL is that equal mathematical steps produce equal perceptual steps \u2014 a gradient from dark blue to light blue will look evenly spaced rather than having a perceptual "jump" in the middle.`,
1739
+ principles: [
1740
+ "Use OKLCH or Oklab for perceptually uniform color operations",
1741
+ "Generate palettes from seed + base hue for deterministic, explorable results",
1742
+ "Keep chroma (saturation) consistent across a palette for cohesion",
1743
+ "Vary lightness for value structure, hue for variety, chroma for energy",
1744
+ "Map parameter ranges to hue angles for intuitive color exploration",
1745
+ "Limit generated palettes to 3-7 colors for coherence"
1746
+ ],
1747
+ references: [
1748
+ { title: "Interaction of Color", author: "Josef Albers", year: 1963 }
1749
+ ],
1750
+ suggestedParameters: [
1751
+ { key: "baseHue", label: "Base Hue", min: 0, max: 360, step: 1, default: 220 },
1752
+ { key: "chroma", label: "Chroma", min: 0.05, max: 0.35, step: 0.01, default: 0.15 },
1753
+ { key: "paletteSize", label: "Palette Size", min: 3, max: 7, step: 1, default: 5 }
1754
+ ]
1755
+ }
1756
+ ];
1757
+
1758
+ // src/skill/registry.ts
1759
+ var SkillRegistry = class {
1760
+ skills = /* @__PURE__ */ new Map();
1761
+ /** Register a skill definition. */
1762
+ register(skill) {
1763
+ this.skills.set(skill.id, skill);
1764
+ }
1765
+ /** Resolve a skill by ID. Throws if not found. */
1766
+ resolve(id) {
1767
+ const skill = this.skills.get(id);
1768
+ if (!skill) {
1769
+ throw new Error(`Unknown skill: '${id}'`);
1770
+ }
1771
+ return skill;
1772
+ }
1773
+ /** Get a skill by ID, or undefined if not found. */
1774
+ get(id) {
1775
+ return this.skills.get(id);
1776
+ }
1777
+ /** List all skills, optionally filtered by category. */
1778
+ list(category) {
1779
+ const all = Array.from(this.skills.values());
1780
+ if (category) {
1781
+ return all.filter((s) => s.category === category);
1782
+ }
1783
+ return all;
1784
+ }
1785
+ /** Check if a skill ID is registered. */
1786
+ has(id) {
1787
+ return this.skills.has(id);
1788
+ }
1789
+ /** Return unique categories across all registered skills. */
1790
+ categories() {
1791
+ const cats = /* @__PURE__ */ new Set();
1792
+ for (const skill of this.skills.values()) {
1793
+ cats.add(skill.category);
1794
+ }
1795
+ return Array.from(cats).sort();
1796
+ }
1797
+ };
1798
+ function createDefaultSkillRegistry() {
1799
+ const registry = new SkillRegistry();
1800
+ for (const skill of [...COMPOSITION_SKILLS, ...COLOR_SKILLS]) {
1801
+ registry.register(skill);
1802
+ }
1803
+ return registry;
1804
+ }
1805
+ export {
1806
+ COLOR_SKILLS,
1807
+ COMPOSITION_SKILLS,
1808
+ Canvas2DRendererAdapter,
1809
+ GLSLRendererAdapter,
1810
+ P5RendererAdapter,
1811
+ RendererRegistry,
1812
+ SVGRendererAdapter,
1813
+ SkillRegistry,
1814
+ ThreeRendererAdapter,
1815
+ createDefaultRegistry,
1816
+ createDefaultSkillRegistry,
1817
+ hexToVec3
1818
+ };
1819
+ //# sourceMappingURL=index.js.map