@viji-dev/core 0.1.0-alpha.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +1169 -0
- package/dist/assets/viji.worker-Cozsmke0.js +1287 -0
- package/dist/assets/viji.worker-Cozsmke0.js.map +1 -0
- package/dist/index.d.ts +568 -0
- package/dist/index.js +2466 -0
- package/dist/index.js.map +1 -0
- package/package.json +53 -0
package/README.md
ADDED
|
@@ -0,0 +1,1169 @@
|
|
|
1
|
+
# Viji Core Package (`@viji-dev/core`)
|
|
2
|
+
|
|
3
|
+
**Universal execution engine for Viji Creative scenes**
|
|
4
|
+
|
|
5
|
+
A powerful, secure, and feature-rich JavaScript/TypeScript library that provides the foundation for creative scene execution across all Viji platform contexts. The core offers identical IFrame + WebWorker execution with comprehensive parameter management, audio/video analysis, user interaction handling, and performance optimization.
|
|
6
|
+
|
|
7
|
+
## ๐ Features
|
|
8
|
+
|
|
9
|
+
### โ
**Core Execution Engine**
|
|
10
|
+
- **Secure IFrame + WebWorker Architecture**: Complete isolation with controlled communication
|
|
11
|
+
- **Multi-Instance Support**: Concurrent instances for main scenes and previews
|
|
12
|
+
- **Automatic Resource Management**: Memory leak prevention and cleanup
|
|
13
|
+
|
|
14
|
+
### โ
**Parameter System**
|
|
15
|
+
- **Declarative Parameter Definition**: Define parameters once with automatic UI generation
|
|
16
|
+
- **Proxy-Based Access**: Fast parameter access in render loops
|
|
17
|
+
- **Category-Based Organization**: Audio, video, interaction, and general parameters
|
|
18
|
+
- **Real-time Validation**: Type safety and range checking
|
|
19
|
+
- **Capability-Aware UI**: Parameters shown based on active features
|
|
20
|
+
|
|
21
|
+
### โ
**Audio Analysis**
|
|
22
|
+
- **Real-time Audio Processing**: Volume, frequency analysis, and beat detection
|
|
23
|
+
- **Custom Frequency Bands**: Bass, mid, treble, and custom band analysis
|
|
24
|
+
- **Multiple Input Sources**: Microphone, audio files, and screen capture
|
|
25
|
+
- **Audio-Reactive Scenes**: Make scenes respond to audio input
|
|
26
|
+
|
|
27
|
+
### โ
**Video Analysis**
|
|
28
|
+
- **Real-time Video Processing**: Frame analysis in separate WebWorker
|
|
29
|
+
- **Multiple Input Sources**: Camera, video files, and screen capture
|
|
30
|
+
- **Video-Reactive Scenes**: Make scenes respond to video motion and brightness
|
|
31
|
+
- **Frame Data Access**: Raw video frame data for custom analysis
|
|
32
|
+
|
|
33
|
+
### โ
**User Interaction**
|
|
34
|
+
- **Mouse Tracking**: Position, buttons, movement, and scroll wheel
|
|
35
|
+
- **Keyboard Input**: Key states, modifiers, and event handling
|
|
36
|
+
- **Touch Support**: Multi-touch with gesture detection
|
|
37
|
+
- **Canvas-Coordinate Mapping**: Accurate input positioning
|
|
38
|
+
|
|
39
|
+
### โ
**Performance Optimization**
|
|
40
|
+
- **Configurable Frame Rates**: Full (60fps) or half (30fps) modes
|
|
41
|
+
- **Resolution Scaling**: Fractional or explicit canvas dimensions
|
|
42
|
+
- **Adaptive Performance**: Automatic optimization based on hardware
|
|
43
|
+
- **Memory Management**: Efficient resource pooling and cleanup
|
|
44
|
+
|
|
45
|
+
## ๐ฆ Installation
|
|
46
|
+
|
|
47
|
+
```bash
|
|
48
|
+
npm install @viji-dev/core
|
|
49
|
+
```
|
|
50
|
+
|
|
51
|
+
## ๐ฏ Quick Start
|
|
52
|
+
|
|
53
|
+
### Basic Scene Creation
|
|
54
|
+
|
|
55
|
+
```typescript
|
|
56
|
+
import { VijiCore } from '@viji-dev/core';
|
|
57
|
+
|
|
58
|
+
// Artist scene code
|
|
59
|
+
const sceneCode = `
|
|
60
|
+
// Define parameters using helper functions
|
|
61
|
+
const color = viji.color('#ff6b6b', {
|
|
62
|
+
label: 'Shape Color',
|
|
63
|
+
description: 'Color of the animated shape',
|
|
64
|
+
group: 'appearance'
|
|
65
|
+
});
|
|
66
|
+
|
|
67
|
+
const size = viji.slider(50, {
|
|
68
|
+
min: 10,
|
|
69
|
+
max: 150,
|
|
70
|
+
step: 5,
|
|
71
|
+
label: 'Shape Size',
|
|
72
|
+
description: 'Size of the animated shape',
|
|
73
|
+
group: 'appearance'
|
|
74
|
+
});
|
|
75
|
+
|
|
76
|
+
const speed = viji.slider(1.0, {
|
|
77
|
+
min: 0.1,
|
|
78
|
+
max: 3.0,
|
|
79
|
+
step: 0.1,
|
|
80
|
+
label: 'Animation Speed',
|
|
81
|
+
description: 'Speed of the animation',
|
|
82
|
+
group: 'animation'
|
|
83
|
+
});
|
|
84
|
+
|
|
85
|
+
// Main render function
|
|
86
|
+
function render(viji) {
|
|
87
|
+
const ctx = viji.useContext('2d');
|
|
88
|
+
|
|
89
|
+
// Clear canvas
|
|
90
|
+
ctx.fillStyle = '#2c3e50';
|
|
91
|
+
ctx.fillRect(0, 0, viji.width, viji.height);
|
|
92
|
+
|
|
93
|
+
// Animated shape
|
|
94
|
+
const time = viji.time * speed.value;
|
|
95
|
+
const x = viji.width / 2 + Math.sin(time) * 100;
|
|
96
|
+
const y = viji.height / 2 + Math.cos(time) * 100;
|
|
97
|
+
|
|
98
|
+
ctx.fillStyle = color.value;
|
|
99
|
+
ctx.beginPath();
|
|
100
|
+
ctx.arc(x, y, size.value / 2, 0, Math.PI * 2);
|
|
101
|
+
ctx.fill();
|
|
102
|
+
}
|
|
103
|
+
`;
|
|
104
|
+
|
|
105
|
+
// Create core instance
|
|
106
|
+
const core = new VijiCore({
|
|
107
|
+
hostContainer: document.getElementById('scene-container'),
|
|
108
|
+
sceneCode: sceneCode,
|
|
109
|
+
frameRateMode: 'full',
|
|
110
|
+
allowUserInteraction: true
|
|
111
|
+
});
|
|
112
|
+
|
|
113
|
+
// Initialize and start rendering
|
|
114
|
+
await core.initialize();
|
|
115
|
+
console.log('Scene is running!');
|
|
116
|
+
```
|
|
117
|
+
|
|
118
|
+
## ๐ง Integration API
|
|
119
|
+
|
|
120
|
+
### Core Configuration
|
|
121
|
+
|
|
122
|
+
The `VijiCoreConfig` interface defines all available configuration options:
|
|
123
|
+
|
|
124
|
+
```typescript
|
|
125
|
+
interface VijiCoreConfig {
|
|
126
|
+
// Required configuration
|
|
127
|
+
hostContainer: HTMLElement; // Container element for the scene
|
|
128
|
+
sceneCode: string; // Artist JavaScript code with render function
|
|
129
|
+
|
|
130
|
+
// Performance configuration
|
|
131
|
+
frameRateMode?: 'full' | 'half'; // 'full' = 60fps, 'half' = 30fps
|
|
132
|
+
autoOptimize?: boolean; // Enable automatic performance optimization
|
|
133
|
+
|
|
134
|
+
// Input streams
|
|
135
|
+
audioStream?: MediaStream; // Audio input for analysis
|
|
136
|
+
videoStream?: MediaStream; // Video input for analysis
|
|
137
|
+
|
|
138
|
+
// Audio analysis configuration
|
|
139
|
+
analysisConfig?: {
|
|
140
|
+
fftSize?: number; // FFT size for frequency analysis (default: 2048)
|
|
141
|
+
smoothing?: number; // Smoothing factor 0-1 (default: 0.8)
|
|
142
|
+
frequencyBands?: FrequencyBand[]; // Custom frequency bands
|
|
143
|
+
beatDetection?: boolean; // Enable beat detection
|
|
144
|
+
onsetDetection?: boolean; // Enable onset detection
|
|
145
|
+
};
|
|
146
|
+
|
|
147
|
+
// Parameter system
|
|
148
|
+
parameters?: ParameterGroup[]; // Initial parameter values
|
|
149
|
+
|
|
150
|
+
// Feature toggles
|
|
151
|
+
noInputs?: boolean; // Disable all input processing
|
|
152
|
+
allowUserInteraction?: boolean; // Enable mouse/keyboard/touch events
|
|
153
|
+
}
|
|
154
|
+
```
|
|
155
|
+
|
|
156
|
+
### Instance Management
|
|
157
|
+
|
|
158
|
+
#### Creation and Initialization
|
|
159
|
+
|
|
160
|
+
```typescript
|
|
161
|
+
// Create core instance
|
|
162
|
+
const core = new VijiCore({
|
|
163
|
+
hostContainer: document.getElementById('scene-container'),
|
|
164
|
+
sceneCode: sceneCode,
|
|
165
|
+
frameRateMode: 'full',
|
|
166
|
+
allowUserInteraction: true
|
|
167
|
+
});
|
|
168
|
+
|
|
169
|
+
// Initialize the core (required before use)
|
|
170
|
+
await core.initialize();
|
|
171
|
+
|
|
172
|
+
// Check if core is ready for operations
|
|
173
|
+
if (core.ready) {
|
|
174
|
+
console.log('Core is ready for use');
|
|
175
|
+
}
|
|
176
|
+
|
|
177
|
+
// Get current configuration
|
|
178
|
+
const config = core.configuration;
|
|
179
|
+
console.log('Current frame rate mode:', config.frameRateMode);
|
|
180
|
+
```
|
|
181
|
+
|
|
182
|
+
#### Performance Control
|
|
183
|
+
|
|
184
|
+
```typescript
|
|
185
|
+
// Frame rate control
|
|
186
|
+
await core.setFullFrameRate(); // Set to 60fps mode
|
|
187
|
+
await core.setHalfFrameRate(); // Set to 30fps mode
|
|
188
|
+
|
|
189
|
+
// Resolution control
|
|
190
|
+
await core.setResolution(0.75); // Set to 75% of container size
|
|
191
|
+
await core.setResolution(0.5); // Set to 50% for performance
|
|
192
|
+
await core.updateResolution(); // Auto-detect container size changes
|
|
193
|
+
|
|
194
|
+
// Get performance statistics
|
|
195
|
+
const stats = core.getStats();
|
|
196
|
+
console.log('Current FPS:', stats.frameRate.effectiveRefreshRate);
|
|
197
|
+
console.log('Canvas size:', stats.resolution);
|
|
198
|
+
console.log('Scale factor:', stats.scale);
|
|
199
|
+
console.log('Parameter count:', stats.parameterCount);
|
|
200
|
+
```
|
|
201
|
+
|
|
202
|
+
#### Debug and Development
|
|
203
|
+
|
|
204
|
+
```typescript
|
|
205
|
+
// Enable debug logging
|
|
206
|
+
core.setDebugMode(true);
|
|
207
|
+
|
|
208
|
+
// Check debug mode status
|
|
209
|
+
const isDebugEnabled = core.getDebugMode();
|
|
210
|
+
|
|
211
|
+
// Debug mode provides detailed logging for:
|
|
212
|
+
// - Initialization process
|
|
213
|
+
// - Communication between components
|
|
214
|
+
// - Parameter system operations
|
|
215
|
+
// - Audio/video stream processing
|
|
216
|
+
// - Performance statistics
|
|
217
|
+
```
|
|
218
|
+
|
|
219
|
+
### Parameter Management
|
|
220
|
+
|
|
221
|
+
The parameter system provides a powerful way to create interactive scenes with automatic UI generation.
|
|
222
|
+
|
|
223
|
+
#### Parameter Definition and Access
|
|
224
|
+
|
|
225
|
+
```typescript
|
|
226
|
+
// Listen for parameter definitions from artist code
|
|
227
|
+
core.onParametersDefined((groups) => {
|
|
228
|
+
console.log('Parameters available:', groups);
|
|
229
|
+
|
|
230
|
+
// Each group contains:
|
|
231
|
+
// - groupName: string
|
|
232
|
+
// - category: 'audio' | 'video' | 'interaction' | 'general'
|
|
233
|
+
// - description: string
|
|
234
|
+
// - parameters: Record<string, ParameterDefinition>
|
|
235
|
+
|
|
236
|
+
// Generate UI based on parameter groups
|
|
237
|
+
generateParameterUI(groups);
|
|
238
|
+
});
|
|
239
|
+
|
|
240
|
+
// Set individual parameter values
|
|
241
|
+
await core.setParameter('color', '#ff0000');
|
|
242
|
+
await core.setParameter('size', 75);
|
|
243
|
+
await core.setParameter('enabled', true);
|
|
244
|
+
|
|
245
|
+
// Set multiple parameters efficiently
|
|
246
|
+
await core.setParameters({
|
|
247
|
+
'color': '#00ff00',
|
|
248
|
+
'size': 100,
|
|
249
|
+
'speed': 2.0,
|
|
250
|
+
'enabled': false
|
|
251
|
+
});
|
|
252
|
+
|
|
253
|
+
// Get current parameter values
|
|
254
|
+
const values = core.getParameterValues();
|
|
255
|
+
const color = core.getParameter('color');
|
|
256
|
+
|
|
257
|
+
// Listen for parameter changes
|
|
258
|
+
core.onParameterChange('size', (value) => {
|
|
259
|
+
console.log('Size parameter changed to:', value);
|
|
260
|
+
});
|
|
261
|
+
|
|
262
|
+
// Listen for parameter errors
|
|
263
|
+
core.onParameterError((error) => {
|
|
264
|
+
console.error('Parameter error:', error.message);
|
|
265
|
+
console.error('Error code:', error.code);
|
|
266
|
+
});
|
|
267
|
+
```
|
|
268
|
+
|
|
269
|
+
#### Capability-Aware Parameters
|
|
270
|
+
|
|
271
|
+
```typescript
|
|
272
|
+
// Get all parameter groups
|
|
273
|
+
const allGroups = core.getParameterGroups();
|
|
274
|
+
|
|
275
|
+
// Get parameter groups filtered by active capabilities
|
|
276
|
+
const visibleGroups = core.getVisibleParameterGroups();
|
|
277
|
+
|
|
278
|
+
// Check current capabilities
|
|
279
|
+
const capabilities = core.getCapabilities();
|
|
280
|
+
console.log('Audio available:', capabilities.hasAudio);
|
|
281
|
+
console.log('Video available:', capabilities.hasVideo);
|
|
282
|
+
console.log('Interaction enabled:', capabilities.hasInteraction);
|
|
283
|
+
|
|
284
|
+
// Check if specific parameter category is active
|
|
285
|
+
const isAudioActive = core.isCategoryActive('audio');
|
|
286
|
+
const isVideoActive = core.isCategoryActive('video');
|
|
287
|
+
|
|
288
|
+
// Parameters are automatically categorized:
|
|
289
|
+
// - 'audio': Only shown when audio stream is connected
|
|
290
|
+
// - 'video': Only shown when video stream is connected
|
|
291
|
+
// - 'interaction': Only shown when user interaction is enabled
|
|
292
|
+
// - 'general': Always available
|
|
293
|
+
```
|
|
294
|
+
|
|
295
|
+
### Audio and Video Integration
|
|
296
|
+
|
|
297
|
+
#### Audio Stream Management
|
|
298
|
+
|
|
299
|
+
```typescript
|
|
300
|
+
// Set audio stream for analysis
|
|
301
|
+
const audioStream = await navigator.mediaDevices.getUserMedia({
|
|
302
|
+
audio: {
|
|
303
|
+
echoCancellation: false,
|
|
304
|
+
noiseSuppression: false,
|
|
305
|
+
autoGainControl: false
|
|
306
|
+
}
|
|
307
|
+
});
|
|
308
|
+
await core.setAudioStream(audioStream);
|
|
309
|
+
|
|
310
|
+
// Configure audio analysis
|
|
311
|
+
await core.setAudioAnalysisConfig({
|
|
312
|
+
fftSize: 2048, // Higher values = better frequency resolution
|
|
313
|
+
smoothing: 0.8 // 0 = no smoothing, 1 = maximum smoothing
|
|
314
|
+
});
|
|
315
|
+
|
|
316
|
+
// Get current audio stream
|
|
317
|
+
const currentStream = core.getAudioStream();
|
|
318
|
+
|
|
319
|
+
// Disconnect audio
|
|
320
|
+
await core.setAudioStream(null);
|
|
321
|
+
```
|
|
322
|
+
|
|
323
|
+
#### Video Stream Management
|
|
324
|
+
|
|
325
|
+
```typescript
|
|
326
|
+
// Set video stream for analysis
|
|
327
|
+
const videoStream = await navigator.mediaDevices.getUserMedia({
|
|
328
|
+
video: {
|
|
329
|
+
width: { ideal: 640 },
|
|
330
|
+
height: { ideal: 480 },
|
|
331
|
+
frameRate: { ideal: 30 }
|
|
332
|
+
}
|
|
333
|
+
});
|
|
334
|
+
await core.setVideoStream(videoStream);
|
|
335
|
+
|
|
336
|
+
// Video analysis includes:
|
|
337
|
+
// - Real-time frame processing
|
|
338
|
+
// - Frame data access for custom analysis
|
|
339
|
+
// - Brightness and motion detection
|
|
340
|
+
// - Custom computer vision processing
|
|
341
|
+
|
|
342
|
+
// Disconnect video
|
|
343
|
+
await core.setVideoStream(null);
|
|
344
|
+
```
|
|
345
|
+
|
|
346
|
+
#### Capability Change Monitoring
|
|
347
|
+
|
|
348
|
+
```typescript
|
|
349
|
+
// Listen for capability changes
|
|
350
|
+
core.onCapabilitiesChange((capabilities) => {
|
|
351
|
+
console.log('Capabilities updated:', capabilities);
|
|
352
|
+
|
|
353
|
+
// Update UI based on new capabilities
|
|
354
|
+
if (capabilities.hasAudio) {
|
|
355
|
+
showAudioControls();
|
|
356
|
+
} else {
|
|
357
|
+
hideAudioControls();
|
|
358
|
+
}
|
|
359
|
+
|
|
360
|
+
if (capabilities.hasVideo) {
|
|
361
|
+
showVideoControls();
|
|
362
|
+
} else {
|
|
363
|
+
hideVideoControls();
|
|
364
|
+
}
|
|
365
|
+
});
|
|
366
|
+
```
|
|
367
|
+
|
|
368
|
+
### Event Handling and Lifecycle
|
|
369
|
+
|
|
370
|
+
#### Core Lifecycle Events
|
|
371
|
+
|
|
372
|
+
```typescript
|
|
373
|
+
// Core is ready for operations
|
|
374
|
+
if (core.ready) {
|
|
375
|
+
// All systems initialized and running
|
|
376
|
+
console.log('Core is fully operational');
|
|
377
|
+
}
|
|
378
|
+
|
|
379
|
+
// Check if parameters are initialized
|
|
380
|
+
if (core.parametersReady) {
|
|
381
|
+
// Parameter system is ready
|
|
382
|
+
console.log('Parameters are available');
|
|
383
|
+
}
|
|
384
|
+
```
|
|
385
|
+
|
|
386
|
+
#### Cleanup and Resource Management
|
|
387
|
+
|
|
388
|
+
```typescript
|
|
389
|
+
// Destroy instance and clean up all resources
|
|
390
|
+
await core.destroy();
|
|
391
|
+
|
|
392
|
+
// This automatically:
|
|
393
|
+
// - Stops all rendering loops
|
|
394
|
+
// - Disconnects audio/video streams
|
|
395
|
+
// - Cleans up WebWorker and IFrame
|
|
396
|
+
// - Releases all event listeners
|
|
397
|
+
// - Clears parameter system
|
|
398
|
+
// - Frees memory resources
|
|
399
|
+
```
|
|
400
|
+
|
|
401
|
+
## ๐จ Artist API
|
|
402
|
+
|
|
403
|
+
The artist API provides a comprehensive set of tools for creating interactive, audio-reactive, and video-responsive scenes.
|
|
404
|
+
|
|
405
|
+
### Canvas and Rendering
|
|
406
|
+
|
|
407
|
+
```typescript
|
|
408
|
+
function render(viji) {
|
|
409
|
+
// Get canvas contexts
|
|
410
|
+
const ctx = viji.useContext('2d'); // 2D rendering context
|
|
411
|
+
const gl = viji.useContext('webgl'); // WebGL rendering context
|
|
412
|
+
|
|
413
|
+
// Canvas properties
|
|
414
|
+
viji.canvas; // OffscreenCanvas object
|
|
415
|
+
viji.width; // Canvas width in pixels
|
|
416
|
+
viji.height; // Canvas height in pixels
|
|
417
|
+
viji.pixelRatio; // Device pixel ratio for crisp rendering
|
|
418
|
+
|
|
419
|
+
// Example: Draw a responsive circle
|
|
420
|
+
const centerX = viji.width / 2;
|
|
421
|
+
const centerY = viji.height / 2;
|
|
422
|
+
const radius = Math.min(viji.width, viji.height) * 0.1;
|
|
423
|
+
|
|
424
|
+
ctx.fillStyle = '#ff6b6b';
|
|
425
|
+
ctx.beginPath();
|
|
426
|
+
ctx.arc(centerX, centerY, radius, 0, Math.PI * 2);
|
|
427
|
+
ctx.fill();
|
|
428
|
+
}
|
|
429
|
+
```
|
|
430
|
+
|
|
431
|
+
### Timing Information
|
|
432
|
+
|
|
433
|
+
The timing system provides FPS-independent timing data for smooth animations:
|
|
434
|
+
|
|
435
|
+
```typescript
|
|
436
|
+
function render(viji) {
|
|
437
|
+
// Timing data (FPS independent)
|
|
438
|
+
viji.time; // Elapsed time in seconds since scene start
|
|
439
|
+
viji.deltaTime; // Time since last frame in seconds
|
|
440
|
+
viji.frameCount; // Total number of frames rendered
|
|
441
|
+
viji.fps; // Current frames per second
|
|
442
|
+
|
|
443
|
+
// Example: Smooth animation regardless of frame rate
|
|
444
|
+
const animationSpeed = 2.0; // rotations per second
|
|
445
|
+
const rotation = (viji.time * animationSpeed * Math.PI * 2) % (Math.PI * 2);
|
|
446
|
+
|
|
447
|
+
ctx.save();
|
|
448
|
+
ctx.translate(viji.width / 2, viji.height / 2);
|
|
449
|
+
ctx.rotate(rotation);
|
|
450
|
+
ctx.fillRect(-25, -25, 50, 50);
|
|
451
|
+
ctx.restore();
|
|
452
|
+
}
|
|
453
|
+
```
|
|
454
|
+
|
|
455
|
+
### Parameter System
|
|
456
|
+
|
|
457
|
+
The parameter system allows artists to define interactive parameters that automatically generate UI controls.
|
|
458
|
+
|
|
459
|
+
#### Parameter Definition
|
|
460
|
+
|
|
461
|
+
```typescript
|
|
462
|
+
// Define parameters (call once outside render loop)
|
|
463
|
+
const color = viji.color('#ff6b6b', {
|
|
464
|
+
label: 'Primary Color',
|
|
465
|
+
description: 'Main color for shapes',
|
|
466
|
+
group: 'appearance',
|
|
467
|
+
category: 'general'
|
|
468
|
+
});
|
|
469
|
+
|
|
470
|
+
const size = viji.slider(50, {
|
|
471
|
+
min: 10,
|
|
472
|
+
max: 150,
|
|
473
|
+
step: 5,
|
|
474
|
+
label: 'Shape Size',
|
|
475
|
+
description: 'Size of shapes in pixels',
|
|
476
|
+
group: 'appearance',
|
|
477
|
+
category: 'general'
|
|
478
|
+
});
|
|
479
|
+
|
|
480
|
+
const speed = viji.slider(1.0, {
|
|
481
|
+
min: 0.1,
|
|
482
|
+
max: 3.0,
|
|
483
|
+
step: 0.1,
|
|
484
|
+
label: 'Animation Speed',
|
|
485
|
+
description: 'Speed of animation in rotations per second',
|
|
486
|
+
group: 'animation',
|
|
487
|
+
category: 'general'
|
|
488
|
+
});
|
|
489
|
+
|
|
490
|
+
const useAudio = viji.toggle(false, {
|
|
491
|
+
label: 'Audio Reactive',
|
|
492
|
+
description: 'Make shapes react to audio input',
|
|
493
|
+
group: 'audio',
|
|
494
|
+
category: 'audio'
|
|
495
|
+
});
|
|
496
|
+
|
|
497
|
+
const shapeType = viji.select('circle', {
|
|
498
|
+
options: ['circle', 'square', 'triangle', 'star'],
|
|
499
|
+
label: 'Shape Type',
|
|
500
|
+
description: 'Type of shape to draw',
|
|
501
|
+
group: 'appearance',
|
|
502
|
+
category: 'general'
|
|
503
|
+
});
|
|
504
|
+
|
|
505
|
+
const title = viji.text('My Scene', {
|
|
506
|
+
label: 'Scene Title',
|
|
507
|
+
description: 'Title displayed in the scene',
|
|
508
|
+
group: 'text',
|
|
509
|
+
category: 'general',
|
|
510
|
+
maxLength: 50
|
|
511
|
+
});
|
|
512
|
+
|
|
513
|
+
const particleCount = viji.number(5, {
|
|
514
|
+
min: 1,
|
|
515
|
+
max: 20,
|
|
516
|
+
step: 1,
|
|
517
|
+
label: 'Particle Count',
|
|
518
|
+
description: 'Number of particles to render',
|
|
519
|
+
group: 'animation',
|
|
520
|
+
category: 'general'
|
|
521
|
+
});
|
|
522
|
+
```
|
|
523
|
+
|
|
524
|
+
#### Parameter Usage in Render Loop
|
|
525
|
+
|
|
526
|
+
```typescript
|
|
527
|
+
function render(viji) {
|
|
528
|
+
const ctx = viji.useContext('2d');
|
|
529
|
+
|
|
530
|
+
// Fast parameter access (proxy-based)
|
|
531
|
+
ctx.fillStyle = color.value; // Get current color value
|
|
532
|
+
const radius = size.value / 2; // Get current size value
|
|
533
|
+
const animationSpeed = speed.value; // Get current speed value
|
|
534
|
+
|
|
535
|
+
// Clear canvas
|
|
536
|
+
ctx.fillStyle = '#2c3e50';
|
|
537
|
+
ctx.fillRect(0, 0, viji.width, viji.height);
|
|
538
|
+
|
|
539
|
+
// Draw title
|
|
540
|
+
ctx.fillStyle = 'white';
|
|
541
|
+
ctx.font = '20px Arial';
|
|
542
|
+
ctx.textAlign = 'center';
|
|
543
|
+
ctx.fillText(title.value, viji.width / 2, 30);
|
|
544
|
+
|
|
545
|
+
// Draw particles
|
|
546
|
+
for (let i = 0; i < particleCount.value; i++) {
|
|
547
|
+
const angle = (i / particleCount.value) * Math.PI * 2 + (viji.time * animationSpeed);
|
|
548
|
+
const x = viji.width / 2 + Math.cos(angle) * 100;
|
|
549
|
+
const y = viji.height / 2 + Math.sin(angle) * 100;
|
|
550
|
+
|
|
551
|
+
ctx.fillStyle = color.value;
|
|
552
|
+
ctx.beginPath();
|
|
553
|
+
|
|
554
|
+
switch (shapeType.value) {
|
|
555
|
+
case 'circle':
|
|
556
|
+
ctx.arc(x, y, radius, 0, Math.PI * 2);
|
|
557
|
+
break;
|
|
558
|
+
case 'square':
|
|
559
|
+
ctx.rect(x - radius, y - radius, radius * 2, radius * 2);
|
|
560
|
+
break;
|
|
561
|
+
case 'triangle':
|
|
562
|
+
ctx.moveTo(x, y - radius);
|
|
563
|
+
ctx.lineTo(x - radius, y + radius);
|
|
564
|
+
ctx.lineTo(x + radius, y + radius);
|
|
565
|
+
ctx.closePath();
|
|
566
|
+
break;
|
|
567
|
+
}
|
|
568
|
+
|
|
569
|
+
ctx.fill();
|
|
570
|
+
}
|
|
571
|
+
}
|
|
572
|
+
```
|
|
573
|
+
|
|
574
|
+
### Audio Analysis
|
|
575
|
+
|
|
576
|
+
The audio system provides real-time analysis of audio input with comprehensive frequency and volume data.
|
|
577
|
+
|
|
578
|
+
#### Audio API Overview
|
|
579
|
+
|
|
580
|
+
```typescript
|
|
581
|
+
function render(viji) {
|
|
582
|
+
const audio = viji.audio;
|
|
583
|
+
|
|
584
|
+
if (audio.isConnected) {
|
|
585
|
+
// Volume analysis
|
|
586
|
+
const volume = audio.volume.rms; // 0-1 RMS volume (true volume)
|
|
587
|
+
const peak = audio.volume.peak; // 0-1 peak volume (maximum amplitude)
|
|
588
|
+
|
|
589
|
+
// Frequency bands (0-1 values)
|
|
590
|
+
const bass = audio.bands.bass; // 60-250 Hz
|
|
591
|
+
const mid = audio.bands.mid; // 500-2000 Hz
|
|
592
|
+
const treble = audio.bands.treble; // 2000-20000 Hz
|
|
593
|
+
|
|
594
|
+
// Extended frequency bands
|
|
595
|
+
const subBass = audio.bands.subBass; // 20-60 Hz
|
|
596
|
+
const lowMid = audio.bands.lowMid; // 250-500 Hz
|
|
597
|
+
const highMid = audio.bands.highMid; // 2000-4000 Hz
|
|
598
|
+
const presence = audio.bands.presence; // 4000-6000 Hz
|
|
599
|
+
const brilliance = audio.bands.brilliance; // 6000-20000 Hz
|
|
600
|
+
|
|
601
|
+
// Beat detection
|
|
602
|
+
if (audio.beat?.isKick) {
|
|
603
|
+
// Kick drum detected
|
|
604
|
+
console.log('Kick detected!');
|
|
605
|
+
}
|
|
606
|
+
|
|
607
|
+
// Raw frequency data (0-255 values)
|
|
608
|
+
const frequencyData = audio.getFrequencyData();
|
|
609
|
+
|
|
610
|
+
// Example: Audio-reactive animation
|
|
611
|
+
const scale = 1 + (volume * 2); // Scale based on volume
|
|
612
|
+
const hue = (bass * 360); // Color based on bass
|
|
613
|
+
|
|
614
|
+
ctx.save();
|
|
615
|
+
ctx.translate(viji.width / 2, viji.height / 2);
|
|
616
|
+
ctx.scale(scale, scale);
|
|
617
|
+
ctx.fillStyle = `hsl(${hue}, 70%, 60%)`;
|
|
618
|
+
ctx.fillRect(-25, -25, 50, 50);
|
|
619
|
+
ctx.restore();
|
|
620
|
+
}
|
|
621
|
+
}
|
|
622
|
+
```
|
|
623
|
+
|
|
624
|
+
#### Audio-Reactive Scene Example
|
|
625
|
+
|
|
626
|
+
```typescript
|
|
627
|
+
// Define audio-reactive parameters
|
|
628
|
+
const audioReactive = viji.toggle(true, {
|
|
629
|
+
label: 'Audio Reactive',
|
|
630
|
+
description: 'Make shapes react to audio',
|
|
631
|
+
group: 'audio',
|
|
632
|
+
category: 'audio'
|
|
633
|
+
});
|
|
634
|
+
|
|
635
|
+
const volumeSensitivity = viji.slider(1.0, {
|
|
636
|
+
min: 0.1,
|
|
637
|
+
max: 5.0,
|
|
638
|
+
step: 0.1,
|
|
639
|
+
label: 'Volume Sensitivity',
|
|
640
|
+
description: 'How sensitive shapes are to volume',
|
|
641
|
+
group: 'audio',
|
|
642
|
+
category: 'audio'
|
|
643
|
+
});
|
|
644
|
+
|
|
645
|
+
const bassReactivity = viji.slider(1.0, {
|
|
646
|
+
min: 0,
|
|
647
|
+
max: 3.0,
|
|
648
|
+
step: 0.1,
|
|
649
|
+
label: 'Bass Reactivity',
|
|
650
|
+
description: 'How much shapes react to bass',
|
|
651
|
+
group: 'audio',
|
|
652
|
+
category: 'audio'
|
|
653
|
+
});
|
|
654
|
+
|
|
655
|
+
function render(viji) {
|
|
656
|
+
const ctx = viji.useContext('2d');
|
|
657
|
+
const audio = viji.audio;
|
|
658
|
+
|
|
659
|
+
// Clear canvas
|
|
660
|
+
ctx.fillStyle = '#2c3e50';
|
|
661
|
+
ctx.fillRect(0, 0, viji.width, viji.height);
|
|
662
|
+
|
|
663
|
+
if (audioReactive.value && audio.isConnected) {
|
|
664
|
+
// Audio-reactive animation
|
|
665
|
+
const volume = audio.volume.rms * volumeSensitivity.value;
|
|
666
|
+
const bass = audio.bands.bass * bassReactivity.value;
|
|
667
|
+
|
|
668
|
+
// Scale based on volume
|
|
669
|
+
const scale = 1 + volume;
|
|
670
|
+
|
|
671
|
+
// Color based on bass
|
|
672
|
+
const hue = 200 + (bass * 160); // Blue to purple range
|
|
673
|
+
|
|
674
|
+
// Position based on frequency distribution
|
|
675
|
+
const x = viji.width * (audio.bands.mid + audio.bands.treble) / 2;
|
|
676
|
+
const y = viji.height * (1 - audio.bands.bass);
|
|
677
|
+
|
|
678
|
+
ctx.save();
|
|
679
|
+
ctx.translate(x, y);
|
|
680
|
+
ctx.scale(scale, scale);
|
|
681
|
+
ctx.fillStyle = `hsl(${hue}, 80%, 60%)`;
|
|
682
|
+
ctx.beginPath();
|
|
683
|
+
ctx.arc(0, 0, 30, 0, Math.PI * 2);
|
|
684
|
+
ctx.fill();
|
|
685
|
+
ctx.restore();
|
|
686
|
+
}
|
|
687
|
+
}
|
|
688
|
+
```
|
|
689
|
+
|
|
690
|
+
### Video Analysis
|
|
691
|
+
|
|
692
|
+
The video system provides real-time video frame analysis with frame data access for custom processing.
|
|
693
|
+
|
|
694
|
+
#### Video API Overview
|
|
695
|
+
|
|
696
|
+
```typescript
|
|
697
|
+
function render(viji) {
|
|
698
|
+
const video = viji.video;
|
|
699
|
+
|
|
700
|
+
if (video.isConnected) {
|
|
701
|
+
// Video properties
|
|
702
|
+
const frameWidth = video.frameWidth;
|
|
703
|
+
const frameHeight = video.frameHeight;
|
|
704
|
+
const frameRate = video.frameRate;
|
|
705
|
+
|
|
706
|
+
// Current video frame (OffscreenCanvas)
|
|
707
|
+
if (video.currentFrame) {
|
|
708
|
+
// Draw video frame as background
|
|
709
|
+
ctx.globalAlpha = 0.3;
|
|
710
|
+
ctx.drawImage(video.currentFrame, 0, 0, viji.width, viji.height);
|
|
711
|
+
ctx.globalAlpha = 1.0;
|
|
712
|
+
}
|
|
713
|
+
|
|
714
|
+
// Frame data for custom analysis
|
|
715
|
+
const frameData = video.getFrameData();
|
|
716
|
+
|
|
717
|
+
// Example: Custom video analysis
|
|
718
|
+
if (frameData) {
|
|
719
|
+
// Access raw pixel data for custom processing
|
|
720
|
+
const imageData = frameData.data;
|
|
721
|
+
const width = frameData.width;
|
|
722
|
+
const height = frameData.height;
|
|
723
|
+
|
|
724
|
+
// Example: Calculate average brightness
|
|
725
|
+
let totalBrightness = 0;
|
|
726
|
+
for (let i = 0; i < imageData.length; i += 4) {
|
|
727
|
+
const r = imageData[i];
|
|
728
|
+
const g = imageData[i + 1];
|
|
729
|
+
const b = imageData[i + 2];
|
|
730
|
+
totalBrightness += (r + g + b) / 3;
|
|
731
|
+
}
|
|
732
|
+
const averageBrightness = totalBrightness / (imageData.length / 4);
|
|
733
|
+
|
|
734
|
+
// Use brightness for effects
|
|
735
|
+
const brightness = averageBrightness / 255; // Normalize to 0-1
|
|
736
|
+
|
|
737
|
+
// Create brightness-reactive animation
|
|
738
|
+
ctx.fillStyle = `rgba(255, 255, 255, ${brightness * 0.5})`;
|
|
739
|
+
ctx.fillRect(0, 0, viji.width, viji.height);
|
|
740
|
+
}
|
|
741
|
+
}
|
|
742
|
+
}
|
|
743
|
+
```
|
|
744
|
+
|
|
745
|
+
#### Video-Reactive Scene Example
|
|
746
|
+
|
|
747
|
+
```typescript
|
|
748
|
+
// Define video-reactive parameters
|
|
749
|
+
const videoReactive = viji.toggle(true, {
|
|
750
|
+
label: 'Video Reactive',
|
|
751
|
+
description: 'Make shapes react to video',
|
|
752
|
+
group: 'video',
|
|
753
|
+
category: 'video'
|
|
754
|
+
});
|
|
755
|
+
|
|
756
|
+
const motionSensitivity = viji.slider(1.0, {
|
|
757
|
+
min: 0.1,
|
|
758
|
+
max: 3.0,
|
|
759
|
+
step: 0.1,
|
|
760
|
+
label: 'Motion Sensitivity',
|
|
761
|
+
description: 'How sensitive shapes are to video changes',
|
|
762
|
+
group: 'video',
|
|
763
|
+
category: 'video'
|
|
764
|
+
});
|
|
765
|
+
|
|
766
|
+
function render(viji) {
|
|
767
|
+
const ctx = viji.useContext('2d');
|
|
768
|
+
const video = viji.video;
|
|
769
|
+
|
|
770
|
+
if (videoReactive.value && video.isConnected) {
|
|
771
|
+
// Video-reactive animation using frame data
|
|
772
|
+
const frameData = video.getFrameData();
|
|
773
|
+
|
|
774
|
+
if (frameData) {
|
|
775
|
+
// Simple motion detection (compare with previous frame)
|
|
776
|
+
// This is a basic example - you can implement more sophisticated analysis
|
|
777
|
+
const imageData = frameData.data;
|
|
778
|
+
let motionEnergy = 0;
|
|
779
|
+
|
|
780
|
+
// Calculate motion energy (simplified)
|
|
781
|
+
for (let i = 0; i < imageData.length; i += 4) {
|
|
782
|
+
const brightness = (imageData[i] + imageData[i + 1] + imageData[i + 2]) / 3;
|
|
783
|
+
motionEnergy += brightness;
|
|
784
|
+
}
|
|
785
|
+
|
|
786
|
+
const normalizedMotion = (motionEnergy / (imageData.length / 4)) / 255;
|
|
787
|
+
const scale = 1 + (normalizedMotion * motionSensitivity.value);
|
|
788
|
+
|
|
789
|
+
// Create motion-reactive shapes
|
|
790
|
+
ctx.save();
|
|
791
|
+
ctx.translate(viji.width / 2, viji.height / 2);
|
|
792
|
+
ctx.scale(scale, scale);
|
|
793
|
+
ctx.fillStyle = `hsl(${normalizedMotion * 360}, 70%, 60%)`;
|
|
794
|
+
ctx.beginPath();
|
|
795
|
+
ctx.arc(0, 0, 30, 0, Math.PI * 2);
|
|
796
|
+
ctx.fill();
|
|
797
|
+
ctx.restore();
|
|
798
|
+
}
|
|
799
|
+
}
|
|
800
|
+
}
|
|
801
|
+
```
|
|
802
|
+
|
|
803
|
+
### User Interaction
|
|
804
|
+
|
|
805
|
+
The interaction system provides comprehensive support for mouse, keyboard, and touch input.
|
|
806
|
+
|
|
807
|
+
#### Mouse Interaction
|
|
808
|
+
|
|
809
|
+
```typescript
|
|
810
|
+
function render(viji) {
|
|
811
|
+
const mouse = viji.mouse;
|
|
812
|
+
|
|
813
|
+
// Mouse position (canvas coordinates)
|
|
814
|
+
if (mouse.isInCanvas) {
|
|
815
|
+
const x = mouse.x; // Current X coordinate
|
|
816
|
+
const y = mouse.y; // Current Y coordinate
|
|
817
|
+
|
|
818
|
+
// Mouse movement
|
|
819
|
+
const deltaX = mouse.deltaX; // X movement since last frame
|
|
820
|
+
const deltaY = mouse.deltaY; // Y movement since last frame
|
|
821
|
+
const velocity = mouse.velocity; // Smoothed velocity { x, y }
|
|
822
|
+
|
|
823
|
+
// Mouse buttons
|
|
824
|
+
const isPressed = mouse.isPressed; // Any button currently pressed
|
|
825
|
+
const leftButton = mouse.leftButton; // Left button state
|
|
826
|
+
const rightButton = mouse.rightButton; // Right button state
|
|
827
|
+
const middleButton = mouse.middleButton; // Middle button state
|
|
828
|
+
|
|
829
|
+
// Frame-based events
|
|
830
|
+
const wasPressed = mouse.wasPressed; // Button was pressed this frame
|
|
831
|
+
const wasReleased = mouse.wasReleased; // Button was released this frame
|
|
832
|
+
const wasMoved = mouse.wasMoved; // Mouse moved this frame
|
|
833
|
+
|
|
834
|
+
// Scroll wheel
|
|
835
|
+
const wheelDelta = mouse.wheelDelta; // Combined wheel delta
|
|
836
|
+
const wheelX = mouse.wheelX; // Horizontal wheel delta
|
|
837
|
+
const wheelY = mouse.wheelY; // Vertical wheel delta
|
|
838
|
+
|
|
839
|
+
// Example: Mouse-reactive animation
|
|
840
|
+
ctx.fillStyle = leftButton ? 'red' : 'blue';
|
|
841
|
+
ctx.beginPath();
|
|
842
|
+
ctx.arc(x, y, 20 + Math.abs(velocity.x + velocity.y), 0, Math.PI * 2);
|
|
843
|
+
ctx.fill();
|
|
844
|
+
}
|
|
845
|
+
}
|
|
846
|
+
```
|
|
847
|
+
|
|
848
|
+
#### Keyboard Interaction
|
|
849
|
+
|
|
850
|
+
```typescript
|
|
851
|
+
function render(viji) {
|
|
852
|
+
const keyboard = viji.keyboard;
|
|
853
|
+
|
|
854
|
+
// Key state queries
|
|
855
|
+
if (keyboard.isPressed('w')) {
|
|
856
|
+
// W key is currently pressed
|
|
857
|
+
console.log('W key is held down');
|
|
858
|
+
}
|
|
859
|
+
|
|
860
|
+
if (keyboard.wasPressed('space')) {
|
|
861
|
+
// Space was pressed this frame
|
|
862
|
+
console.log('Space was pressed!');
|
|
863
|
+
}
|
|
864
|
+
|
|
865
|
+
if (keyboard.wasReleased('escape')) {
|
|
866
|
+
// Escape was released this frame
|
|
867
|
+
console.log('Escape was released!');
|
|
868
|
+
}
|
|
869
|
+
|
|
870
|
+
// Active key tracking
|
|
871
|
+
const activeKeys = keyboard.activeKeys; // Set of currently pressed keys
|
|
872
|
+
const pressedThisFrame = keyboard.pressedThisFrame; // Set of keys pressed this frame
|
|
873
|
+
const releasedThisFrame = keyboard.releasedThisFrame; // Set of keys released this frame
|
|
874
|
+
|
|
875
|
+
// Modifier keys
|
|
876
|
+
const shift = keyboard.shift; // Shift key is held
|
|
877
|
+
const ctrl = keyboard.ctrl; // Ctrl key is held
|
|
878
|
+
const alt = keyboard.alt; // Alt key is held
|
|
879
|
+
const meta = keyboard.meta; // Meta/Cmd key is held
|
|
880
|
+
|
|
881
|
+
// Recent activity
|
|
882
|
+
const lastKeyPressed = keyboard.lastKeyPressed; // Last key that was pressed
|
|
883
|
+
const lastKeyReleased = keyboard.lastKeyReleased; // Last key that was released
|
|
884
|
+
|
|
885
|
+
// Example: Keyboard-controlled movement
|
|
886
|
+
let moveX = 0;
|
|
887
|
+
let moveY = 0;
|
|
888
|
+
|
|
889
|
+
if (keyboard.isPressed('w') || keyboard.isPressed('W')) moveY -= 5;
|
|
890
|
+
if (keyboard.isPressed('s') || keyboard.isPressed('S')) moveY += 5;
|
|
891
|
+
if (keyboard.isPressed('a') || keyboard.isPressed('A')) moveX -= 5;
|
|
892
|
+
if (keyboard.isPressed('d') || keyboard.isPressed('D')) moveX += 5;
|
|
893
|
+
|
|
894
|
+
// Apply movement
|
|
895
|
+
ctx.save();
|
|
896
|
+
ctx.translate(moveX, moveY);
|
|
897
|
+
ctx.fillStyle = 'green';
|
|
898
|
+
ctx.fillRect(0, 0, 50, 50);
|
|
899
|
+
ctx.restore();
|
|
900
|
+
}
|
|
901
|
+
```
|
|
902
|
+
|
|
903
|
+
#### Touch Interaction
|
|
904
|
+
|
|
905
|
+
```typescript
|
|
906
|
+
function render(viji) {
|
|
907
|
+
const touches = viji.touches;
|
|
908
|
+
|
|
909
|
+
// Touch points
|
|
910
|
+
for (const touch of touches.points) {
|
|
911
|
+
const x = touch.x; // Touch X coordinate
|
|
912
|
+
const y = touch.y; // Touch Y coordinate
|
|
913
|
+
const pressure = touch.pressure; // Pressure (0-1)
|
|
914
|
+
const radius = touch.radius; // Touch radius
|
|
915
|
+
const id = touch.id; // Unique touch ID
|
|
916
|
+
|
|
917
|
+
// Movement
|
|
918
|
+
const deltaX = touch.deltaX; // X movement since last frame
|
|
919
|
+
const deltaY = touch.deltaY; // Y movement since last frame
|
|
920
|
+
const velocity = touch.velocity; // Movement velocity { x, y }
|
|
921
|
+
|
|
922
|
+
// Lifecycle
|
|
923
|
+
const isNew = touch.isNew; // Touch started this frame
|
|
924
|
+
const isActive = touch.isActive; // Touch is currently active
|
|
925
|
+
const isEnding = touch.isEnding; // Touch ending this frame
|
|
926
|
+
|
|
927
|
+
// Draw touch point
|
|
928
|
+
ctx.fillStyle = isNew ? 'red' : isEnding ? 'yellow' : 'blue';
|
|
929
|
+
ctx.beginPath();
|
|
930
|
+
ctx.arc(x, y, radius * 2, 0, Math.PI * 2);
|
|
931
|
+
ctx.fill();
|
|
932
|
+
}
|
|
933
|
+
|
|
934
|
+
// Touch events
|
|
935
|
+
const started = touches.started; // Touches that started this frame
|
|
936
|
+
const moved = touches.moved; // Touches that moved this frame
|
|
937
|
+
const ended = touches.ended; // Touches that ended this frame
|
|
938
|
+
|
|
939
|
+
// Primary touch (first touch point)
|
|
940
|
+
const primary = touches.primary; // Primary touch point or null
|
|
941
|
+
|
|
942
|
+
// Touch gestures
|
|
943
|
+
const gestures = touches.gestures;
|
|
944
|
+
|
|
945
|
+
if (gestures.isPinching) {
|
|
946
|
+
const scale = gestures.pinchScale; // Current pinch scale
|
|
947
|
+
const delta = gestures.pinchDelta; // Scale change since last frame
|
|
948
|
+
|
|
949
|
+
// React to pinch gesture
|
|
950
|
+
ctx.save();
|
|
951
|
+
ctx.scale(scale, scale);
|
|
952
|
+
ctx.fillStyle = 'purple';
|
|
953
|
+
ctx.fillRect(0, 0, 100, 100);
|
|
954
|
+
ctx.restore();
|
|
955
|
+
}
|
|
956
|
+
|
|
957
|
+
if (gestures.isRotating) {
|
|
958
|
+
const angle = gestures.rotationAngle; // Current rotation angle
|
|
959
|
+
const delta = gestures.rotationDelta; // Rotation change since last frame
|
|
960
|
+
|
|
961
|
+
// React to rotation gesture
|
|
962
|
+
ctx.save();
|
|
963
|
+
ctx.rotate(angle);
|
|
964
|
+
ctx.fillStyle = 'orange';
|
|
965
|
+
ctx.fillRect(-25, -25, 50, 50);
|
|
966
|
+
ctx.restore();
|
|
967
|
+
}
|
|
968
|
+
|
|
969
|
+
if (gestures.isPanning) {
|
|
970
|
+
const panDelta = gestures.panDelta; // Pan movement { x, y }
|
|
971
|
+
|
|
972
|
+
// React to pan gesture
|
|
973
|
+
ctx.save();
|
|
974
|
+
ctx.translate(panDelta.x, panDelta.y);
|
|
975
|
+
ctx.fillStyle = 'cyan';
|
|
976
|
+
ctx.fillRect(0, 0, 50, 50);
|
|
977
|
+
ctx.restore();
|
|
978
|
+
}
|
|
979
|
+
|
|
980
|
+
if (gestures.isTapping) {
|
|
981
|
+
const tapCount = gestures.tapCount; // Number of taps
|
|
982
|
+
const tapPosition = gestures.tapPosition; // { x, y } tap position
|
|
983
|
+
|
|
984
|
+
// React to tap gesture
|
|
985
|
+
if (tapPosition) {
|
|
986
|
+
ctx.fillStyle = 'lime';
|
|
987
|
+
ctx.beginPath();
|
|
988
|
+
ctx.arc(tapPosition.x, tapPosition.y, 30, 0, Math.PI * 2);
|
|
989
|
+
ctx.fill();
|
|
990
|
+
}
|
|
991
|
+
}
|
|
992
|
+
}
|
|
993
|
+
```
|
|
994
|
+
|
|
995
|
+
## ๐๏ธ Architecture
|
|
996
|
+
|
|
997
|
+
### Security Model
|
|
998
|
+
|
|
999
|
+
The core implements a comprehensive security model to ensure safe execution of artist code:
|
|
1000
|
+
|
|
1001
|
+
- **IFrame Isolation**: Complete separation from host environment with sandboxed execution
|
|
1002
|
+
- **WebWorker Sandboxing**: Artist code runs with controlled API access only
|
|
1003
|
+
- **Blob URL Creation**: Secure worker and IFrame creation from blob URLs
|
|
1004
|
+
- **Resource Protection**: Memory leaks and errors cannot affect main application
|
|
1005
|
+
- **Controlled Communication**: Optimized message passing with validation
|
|
1006
|
+
|
|
1007
|
+
### Performance Features
|
|
1008
|
+
|
|
1009
|
+
The core provides extensive performance optimization capabilities:
|
|
1010
|
+
|
|
1011
|
+
- **Configurable Frame Rates**: Full (60fps) or half (30fps) modes for performance tuning
|
|
1012
|
+
- **Resolution Scaling**: Fractional (0.1-1.0) or explicit canvas dimensions
|
|
1013
|
+
- **Adaptive Optimization**: Automatic performance tuning based on hardware capabilities
|
|
1014
|
+
- **Efficient Communication**: Optimized message passing between components
|
|
1015
|
+
- **Memory Management**: Automatic resource cleanup and memory leak prevention
|
|
1016
|
+
|
|
1017
|
+
### Multi-Instance Support
|
|
1018
|
+
|
|
1019
|
+
The core supports multiple concurrent instances for complex applications:
|
|
1020
|
+
|
|
1021
|
+
```typescript
|
|
1022
|
+
// Main scene with full features
|
|
1023
|
+
const mainCore = new VijiCore({
|
|
1024
|
+
hostContainer: document.getElementById('main-scene'),
|
|
1025
|
+
resolution: { width: 1920, height: 1080 },
|
|
1026
|
+
frameRateMode: 'full',
|
|
1027
|
+
allowUserInteraction: true,
|
|
1028
|
+
audioStream: sharedAudioStream,
|
|
1029
|
+
videoStream: sharedVideoStream
|
|
1030
|
+
});
|
|
1031
|
+
|
|
1032
|
+
// Preview instance with reduced features
|
|
1033
|
+
const previewCore = new VijiCore({
|
|
1034
|
+
hostContainer: document.getElementById('preview'),
|
|
1035
|
+
resolution: 0.25, // 25% resolution for performance
|
|
1036
|
+
frameRateMode: 'half',
|
|
1037
|
+
noInputs: true,
|
|
1038
|
+
allowUserInteraction: false,
|
|
1039
|
+
audioStream: sharedAudioStream // Shared efficiently across instances
|
|
1040
|
+
});
|
|
1041
|
+
|
|
1042
|
+
// Thumbnail instance for gallery view
|
|
1043
|
+
const thumbnailCore = new VijiCore({
|
|
1044
|
+
hostContainer: document.getElementById('thumbnail'),
|
|
1045
|
+
resolution: 0.1, // 10% resolution
|
|
1046
|
+
frameRateMode: 'half',
|
|
1047
|
+
noInputs: true,
|
|
1048
|
+
allowUserInteraction: false
|
|
1049
|
+
});
|
|
1050
|
+
|
|
1051
|
+
// To change scenes, create a new instance and destroy the old one
|
|
1052
|
+
const newCore = new VijiCore({
|
|
1053
|
+
hostContainer: document.getElementById('scene-host'),
|
|
1054
|
+
sceneCode: newSceneCode,
|
|
1055
|
+
audioStream: sharedAudioStream,
|
|
1056
|
+
videoStream: sharedVideoStream
|
|
1057
|
+
});
|
|
1058
|
+
|
|
1059
|
+
// Automatic comprehensive cleanup when destroyed
|
|
1060
|
+
await oldCore.destroy();
|
|
1061
|
+
```
|
|
1062
|
+
|
|
1063
|
+
## ๐ Error Handling
|
|
1064
|
+
|
|
1065
|
+
The core provides comprehensive error handling with detailed error information:
|
|
1066
|
+
|
|
1067
|
+
```typescript
|
|
1068
|
+
import { VijiCoreError } from '@viji-dev/core';
|
|
1069
|
+
|
|
1070
|
+
try {
|
|
1071
|
+
const core = new VijiCore(config);
|
|
1072
|
+
await core.initialize();
|
|
1073
|
+
} catch (error) {
|
|
1074
|
+
if (error instanceof VijiCoreError) {
|
|
1075
|
+
console.error(`Core error [${error.code}]:`, error.message);
|
|
1076
|
+
console.error('Error context:', error.context);
|
|
1077
|
+
|
|
1078
|
+
// Handle specific error types
|
|
1079
|
+
switch (error.code) {
|
|
1080
|
+
case 'INVALID_CONFIG':
|
|
1081
|
+
console.error('Configuration is invalid:', error.context);
|
|
1082
|
+
break;
|
|
1083
|
+
case 'INITIALIZATION_ERROR':
|
|
1084
|
+
console.error('Failed to initialize core:', error.context);
|
|
1085
|
+
break;
|
|
1086
|
+
case 'CORE_NOT_READY':
|
|
1087
|
+
console.error('Core is not ready for operations');
|
|
1088
|
+
break;
|
|
1089
|
+
case 'INSTANCE_DESTROYED':
|
|
1090
|
+
console.error('Core instance has been destroyed');
|
|
1091
|
+
break;
|
|
1092
|
+
case 'PARAMETERS_NOT_INITIALIZED':
|
|
1093
|
+
console.error('Parameter system not yet initialized');
|
|
1094
|
+
break;
|
|
1095
|
+
case 'UNKNOWN_PARAMETER':
|
|
1096
|
+
console.error('Parameter not found:', error.context);
|
|
1097
|
+
break;
|
|
1098
|
+
}
|
|
1099
|
+
} else {
|
|
1100
|
+
console.error('Unexpected error:', error);
|
|
1101
|
+
}
|
|
1102
|
+
}
|
|
1103
|
+
```
|
|
1104
|
+
|
|
1105
|
+
**Common Error Codes:**
|
|
1106
|
+
- `INVALID_CONFIG` - Configuration validation failed
|
|
1107
|
+
- `INITIALIZATION_ERROR` - Failed to initialize core components
|
|
1108
|
+
- `CORE_NOT_READY` - Operation attempted before ready
|
|
1109
|
+
- `INSTANCE_DESTROYED` - Operation attempted after destroy
|
|
1110
|
+
- `PARAMETERS_NOT_INITIALIZED` - Parameters not yet available
|
|
1111
|
+
- `UNKNOWN_PARAMETER` - Parameter not found
|
|
1112
|
+
- `CONCURRENT_INITIALIZATION` - Multiple initialization attempts
|
|
1113
|
+
- `MANAGER_NOT_READY` - Internal component not available
|
|
1114
|
+
|
|
1115
|
+
## ๐งช Development
|
|
1116
|
+
|
|
1117
|
+
```bash
|
|
1118
|
+
# Install dependencies
|
|
1119
|
+
npm install
|
|
1120
|
+
|
|
1121
|
+
# Build the package
|
|
1122
|
+
npm run build
|
|
1123
|
+
|
|
1124
|
+
# Run tests
|
|
1125
|
+
npm test
|
|
1126
|
+
|
|
1127
|
+
# Development build (watch mode)
|
|
1128
|
+
npm run dev
|
|
1129
|
+
|
|
1130
|
+
# Type checking
|
|
1131
|
+
npm run type-check
|
|
1132
|
+
|
|
1133
|
+
# Linting
|
|
1134
|
+
npm run lint
|
|
1135
|
+
```
|
|
1136
|
+
|
|
1137
|
+
## ๐ Examples
|
|
1138
|
+
|
|
1139
|
+
The package includes comprehensive examples in the `/example` directory:
|
|
1140
|
+
|
|
1141
|
+
- **Basic Scene Creation**: Simple animated shapes with parameters
|
|
1142
|
+
- **Audio-Reactive Scenes**: Scenes that respond to audio input
|
|
1143
|
+
- **Video-Reactive Scenes**: Scenes that respond to video analysis
|
|
1144
|
+
- **Interactive Scenes**: Mouse, keyboard, and touch interaction
|
|
1145
|
+
- **Parameter System**: Complete parameter definition and UI generation
|
|
1146
|
+
- **Multi-Instance**: Multiple concurrent scene instances
|
|
1147
|
+
|
|
1148
|
+
## ๐ฏ Use Cases
|
|
1149
|
+
|
|
1150
|
+
### Platform Integration
|
|
1151
|
+
The core integrates seamlessly with the Viji platform, providing scene execution while the platform handles UI, user management, and social features.
|
|
1152
|
+
|
|
1153
|
+
### SDK Development
|
|
1154
|
+
The core serves as the execution foundation for the Viji SDK, ensuring identical behavior between development and platform environments.
|
|
1155
|
+
|
|
1156
|
+
### Standalone Applications
|
|
1157
|
+
Use the core directly in custom applications for creative scene rendering with full feature support.
|
|
1158
|
+
|
|
1159
|
+
## ๐ License
|
|
1160
|
+
|
|
1161
|
+
MIT License - see LICENSE file for details.
|
|
1162
|
+
|
|
1163
|
+
## ๐ค Contributing
|
|
1164
|
+
|
|
1165
|
+
The Viji Core package is part of the larger Viji Creative SDK ecosystem. For contribution guidelines and development setup, please refer to the main Viji project documentation.
|
|
1166
|
+
|
|
1167
|
+
---
|
|
1168
|
+
|
|
1169
|
+
**Viji Core** - Universal execution engine for creative scenes across all contexts.
|