databender 1.0.3 → 2.0.0-alpha.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE.md +1 -1
- package/README.md +93 -12
- package/dist/databender.js +196 -2624
- package/index.js +193 -110
- package/package.json +4 -8
- package/rollup.config.js +12 -10
- package/tags +611 -2534
- package/effects/biquad.js +0 -27
- package/effects/bitcrusher.js +0 -7
- package/effects/chorus.js +0 -8
- package/effects/convolver.js +0 -10
- package/effects/detune.js +0 -20
- package/effects/gain.js +0 -5
- package/effects/index.js +0 -13
- package/effects/phaser.js +0 -9
- package/effects/pingPong.js +0 -8
- package/effects/playbackRate.js +0 -14
- package/random.js +0 -4
package/index.js
CHANGED
|
@@ -1,122 +1,205 @@
|
|
|
1
|
-
var effects = require('./effects');
|
|
2
|
-
var Tuna = require('tunajs');
|
|
3
|
-
|
|
4
|
-
// Create a Databender instance
|
|
5
|
-
module.exports = function (config, audioCtx) {
|
|
6
|
-
this.audioCtx = audioCtx ? audioCtx : new AudioContext();
|
|
7
|
-
this.channels = 1;
|
|
8
|
-
this.config = config;
|
|
9
|
-
this.configKeys = Object.keys(this.config);
|
|
10
|
-
this.previousConfig = this.config;
|
|
11
|
-
|
|
12
|
-
this.convert = function (image) {
|
|
13
|
-
if (image instanceof Image || image instanceof HTMLVideoElement) {
|
|
14
|
-
var canvas = document.createElement('canvas');
|
|
15
|
-
canvas.width = window.innerWidth;
|
|
16
|
-
canvas.height = window.innerHeight;
|
|
17
|
-
var context = canvas.getContext('2d');
|
|
18
|
-
context.drawImage(image, 0, 0, canvas.width, canvas.height);
|
|
19
|
-
var imageData = context.getImageData(0, 0, canvas.width, canvas.height);
|
|
20
|
-
}
|
|
21
|
-
this.imageData = imageData || image;
|
|
22
|
-
var bufferSize = this.imageData.data.length / this.channels;
|
|
23
|
-
|
|
24
|
-
// Make an audioBuffer on the audioContext to pass to the offlineAudioCtx AudioBufferSourceNode
|
|
25
|
-
var audioBuffer = this.audioCtx.createBuffer(this.channels, bufferSize, this.audioCtx.sampleRate);
|
|
26
|
-
|
|
27
|
-
// This gives us the actual ArrayBuffer that contains the data
|
|
28
|
-
var nowBuffering = audioBuffer.getChannelData(0);
|
|
29
|
-
|
|
30
|
-
nowBuffering.set(this.imageData.data);
|
|
31
|
-
|
|
32
|
-
return Promise.resolve(audioBuffer);
|
|
33
|
-
}
|
|
34
|
-
|
|
35
|
-
this.configHasChanged = function () {
|
|
36
|
-
return JSON.stringify(this.previousConfig) !== JSON.stringify(this.config);
|
|
37
|
-
}
|
|
38
|
-
|
|
39
|
-
this.updateConfig = function (effect, param, value) {
|
|
40
|
-
this.config[effect][param] = value;
|
|
41
|
-
}
|
|
42
1
|
|
|
43
|
-
|
|
2
|
+
const isFunction = (candidate) => typeof candidate === 'function';
|
|
44
3
|
|
|
45
|
-
|
|
46
|
-
var offlineAudioCtx = new OfflineAudioContext(this.channels, buffer.length * this.channels, this.audioCtx.sampleRate);
|
|
4
|
+
const isConnectable = (candidate) => candidate && typeof candidate.connect === 'function';
|
|
47
5
|
|
|
48
|
-
|
|
6
|
+
const isPromise = (candidate) => candidate && typeof candidate.then === 'function';
|
|
49
7
|
|
|
50
|
-
|
|
51
|
-
|
|
8
|
+
const normalizeEffectNode = (candidate) => {
|
|
9
|
+
if (!candidate) {
|
|
10
|
+
return null;
|
|
11
|
+
}
|
|
52
12
|
|
|
53
|
-
|
|
54
|
-
|
|
13
|
+
const input = isConnectable(candidate.input) ? candidate.input : candidate;
|
|
14
|
+
const output = isConnectable(candidate.output) ? candidate.output : input;
|
|
55
15
|
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
}, {});
|
|
60
|
-
var activeEffectsIndex = Object.keys(activeEffects);
|
|
16
|
+
if (!isConnectable(input) || !isConnectable(output)) {
|
|
17
|
+
return null;
|
|
18
|
+
}
|
|
61
19
|
|
|
62
|
-
|
|
20
|
+
return { input, output };
|
|
21
|
+
};
|
|
63
22
|
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
effects[effect](this.config, tuna, bufferSource);
|
|
68
|
-
activeEffectsIndex.pop();
|
|
69
|
-
}
|
|
70
|
-
});
|
|
23
|
+
const asArray = (value) => {
|
|
24
|
+
if (!value) {
|
|
25
|
+
return [];
|
|
71
26
|
}
|
|
72
27
|
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
} else {
|
|
76
|
-
var nodes = activeEffectsIndex.map((effect) => {
|
|
77
|
-
const context = effect === 'biquad' ? offlineAudioCtx : tuna
|
|
78
|
-
return effects[effect](this.config, context, bufferSource);
|
|
79
|
-
}).filter(Boolean);
|
|
80
|
-
|
|
81
|
-
nodes.forEach((node) => {
|
|
82
|
-
bufferSource.connect(node);
|
|
83
|
-
node.connect(offlineAudioCtx.destination);
|
|
84
|
-
});
|
|
85
|
-
}
|
|
28
|
+
return Array.isArray(value) ? value : [value];
|
|
29
|
+
};
|
|
86
30
|
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
31
|
+
export default class Databender {
|
|
32
|
+
constructor({
|
|
33
|
+
config = {},
|
|
34
|
+
effectsChain = null,
|
|
35
|
+
chainMode = 'series',
|
|
36
|
+
audioCtx = null
|
|
37
|
+
} = {}) {
|
|
38
|
+
this.audioCtx = audioCtx ? audioCtx : new AudioContext();
|
|
39
|
+
this.channels = 1;
|
|
40
|
+
this.config = config || {};
|
|
41
|
+
this.configKeys = Object.keys(this.config);
|
|
42
|
+
this.previousConfig = this.config;
|
|
43
|
+
this.effectsChain = effectsChain ? asArray(effectsChain) : null;
|
|
44
|
+
this.chainMode = chainMode === 'parallel' ? 'parallel' : 'series';
|
|
45
|
+
|
|
46
|
+
this.convert = function(image) {
|
|
47
|
+
if (image instanceof Image || image instanceof HTMLVideoElement) {
|
|
48
|
+
const canvas = typeof OffscreenCanvas !== 'undefined'
|
|
49
|
+
? new OffscreenCanvas(window.innerWidth, window.innerHeight)
|
|
50
|
+
: (() => {
|
|
51
|
+
const element = document.createElement('canvas');
|
|
52
|
+
element.width = window.innerWidth;
|
|
53
|
+
element.height = window.innerHeight;
|
|
54
|
+
return element;
|
|
55
|
+
})();
|
|
56
|
+
var context = canvas.getContext('2d');
|
|
57
|
+
context.drawImage(image, 0, 0, canvas.width, canvas.height);
|
|
58
|
+
var imageData = context.getImageData(0, 0, canvas.width, canvas.height);
|
|
59
|
+
}
|
|
60
|
+
this.imageData = imageData || image;
|
|
61
|
+
var bufferSize = this.imageData.data.length / this.channels;
|
|
62
|
+
|
|
63
|
+
// Make an audioBuffer on the audioContext to pass to the offlineAudioCtx AudioBufferSourceNode
|
|
64
|
+
var audioBuffer = this.audioCtx.createBuffer(this.channels, bufferSize, this.audioCtx.sampleRate);
|
|
65
|
+
|
|
66
|
+
// This gives us the actual ArrayBuffer that contains the data
|
|
67
|
+
var nowBuffering = audioBuffer.getChannelData(0);
|
|
68
|
+
|
|
69
|
+
for (var i = 0; i < nowBuffering.length; i++) {
|
|
70
|
+
nowBuffering[i] = (this.imageData.data[i] / 128) - 1;
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
return Promise.resolve(audioBuffer);
|
|
74
|
+
};
|
|
75
|
+
|
|
76
|
+
this.configHasChanged = function() {
|
|
77
|
+
if (!this.configKeys.length) {
|
|
78
|
+
return false;
|
|
79
|
+
}
|
|
80
|
+
return JSON.stringify(this.previousConfig) !== JSON.stringify(this.config);
|
|
81
|
+
};
|
|
82
|
+
|
|
83
|
+
this.updateConfig = function(effect, param, value) {
|
|
84
|
+
if (!this.configKeys.length || !this.config[effect]) {
|
|
85
|
+
return;
|
|
86
|
+
}
|
|
87
|
+
this.config[effect][param] = value;
|
|
88
|
+
};
|
|
89
|
+
|
|
90
|
+
this.render = async function(buffer, bypass = false) {
|
|
91
|
+
|
|
92
|
+
// Create offlineAudioCtx that will house our rendered buffer
|
|
93
|
+
var offlineAudioCtx = new OfflineAudioContext(this.channels, buffer.length * this.channels, this.audioCtx.sampleRate);
|
|
94
|
+
|
|
95
|
+
// Create an AudioBufferSourceNode, which represents an audio source consisting of in-memory audio data
|
|
96
|
+
var bufferSource = offlineAudioCtx.createBufferSource();
|
|
97
|
+
|
|
98
|
+
// Set buffer to audio buffer containing image data
|
|
99
|
+
bufferSource.buffer = buffer;
|
|
100
|
+
|
|
101
|
+
var resolveEffectsChain = async function() {
|
|
102
|
+
if (bypass) {
|
|
103
|
+
return [];
|
|
104
|
+
}
|
|
105
|
+
|
|
106
|
+
var chainDefinition = null;
|
|
107
|
+
|
|
108
|
+
if (this.effectsChain) {
|
|
109
|
+
chainDefinition = this.effectsChain;
|
|
110
|
+
}
|
|
111
|
+
|
|
112
|
+
if (isPromise(chainDefinition)) {
|
|
113
|
+
chainDefinition = await chainDefinition;
|
|
114
|
+
}
|
|
115
|
+
|
|
116
|
+
var candidates = asArray(chainDefinition);
|
|
117
|
+
var resolvedNodes = [];
|
|
118
|
+
|
|
119
|
+
for (var i = 0; i < candidates.length; i++) {
|
|
120
|
+
var nodeCandidate = candidates[i];
|
|
121
|
+
var resolvedNode = nodeCandidate;
|
|
122
|
+
|
|
123
|
+
if (isFunction(resolvedNode)) {
|
|
124
|
+
resolvedNode = resolvedNode({ context: offlineAudioCtx, source: bufferSource, config: this.config });
|
|
125
|
+
}
|
|
126
|
+
|
|
127
|
+
if (isPromise(resolvedNode)) {
|
|
128
|
+
resolvedNode = await resolvedNode;
|
|
129
|
+
}
|
|
130
|
+
|
|
131
|
+
var normalizedNodes = asArray(resolvedNode);
|
|
132
|
+
|
|
133
|
+
for (var j = 0; j < normalizedNodes.length; j++) {
|
|
134
|
+
var node = normalizedNodes[j];
|
|
135
|
+
resolvedNodes.push(isPromise(node) ? await node : node);
|
|
136
|
+
}
|
|
137
|
+
}
|
|
138
|
+
|
|
139
|
+
return resolvedNodes;
|
|
140
|
+
}.bind(this);
|
|
141
|
+
|
|
142
|
+
var effectNodes = (await resolveEffectsChain()).map(normalizeEffectNode).filter(Boolean);
|
|
143
|
+
|
|
144
|
+
if (!effectNodes.length) {
|
|
145
|
+
bufferSource.connect(offlineAudioCtx.destination);
|
|
146
|
+
} else if (this.chainMode === 'parallel') {
|
|
147
|
+
effectNodes.forEach((node) => {
|
|
148
|
+
bufferSource.connect(node.input);
|
|
149
|
+
node.output.connect(offlineAudioCtx.destination);
|
|
150
|
+
});
|
|
151
|
+
} else {
|
|
152
|
+
var previousNode = bufferSource;
|
|
153
|
+
effectNodes.forEach((node) => {
|
|
154
|
+
previousNode.connect(node.input);
|
|
155
|
+
previousNode = node.output;
|
|
156
|
+
});
|
|
157
|
+
previousNode.connect(offlineAudioCtx.destination);
|
|
158
|
+
}
|
|
159
|
+
|
|
160
|
+
bufferSource.start();
|
|
161
|
+
|
|
162
|
+
this.previousConfig = this.config;
|
|
163
|
+
// Kick off the render, callback will contain rendered buffer in event
|
|
164
|
+
return offlineAudioCtx.startRendering();
|
|
165
|
+
};
|
|
166
|
+
|
|
167
|
+
this.draw = function(buffer, context, sourceX = 0, sourceY = 0, x = 0, y = 0, sourceWidth = this.imageData.width, sourceHeight = this.imageData.height, targetWidth = window.innerWidth, targetHeight = window.innerHeight) {
|
|
168
|
+
// Get buffer data
|
|
169
|
+
var bufferData = buffer.getChannelData(0);
|
|
170
|
+
|
|
171
|
+
// ImageData expects a Uint8ClampedArray so we need to make a typed array from our buffer
|
|
172
|
+
// @see https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/ArrayBuffer
|
|
173
|
+
var clampedDataArray = new Uint8ClampedArray(buffer.length);
|
|
174
|
+
|
|
175
|
+
for (var k = 0; k < bufferData.length; k++) {
|
|
176
|
+
var value = ((bufferData[k] + 1) * 128);
|
|
177
|
+
clampedDataArray[k] = value < 0 ? 0 : (value > 255 ? 255 : value);
|
|
178
|
+
}
|
|
179
|
+
|
|
180
|
+
// putImageData requires an ImageData Object
|
|
181
|
+
// @see https://developer.mozilla.org/en-US/docs/Web/API/ImageData
|
|
182
|
+
const transformedImageData = new ImageData(this.imageData.width, this.imageData.height);
|
|
183
|
+
transformedImageData.data.set(clampedDataArray);
|
|
184
|
+
|
|
185
|
+
const tmpCanvas = typeof OffscreenCanvas !== 'undefined'
|
|
186
|
+
? new OffscreenCanvas(this.imageData.width, this.imageData.height)
|
|
187
|
+
: (() => {
|
|
188
|
+
const element = document.createElement('canvas');
|
|
189
|
+
element.width = this.imageData.width;
|
|
190
|
+
element.height = this.imageData.height;
|
|
191
|
+
return element;
|
|
192
|
+
})();
|
|
193
|
+
tmpCanvas.getContext('2d').putImageData(transformedImageData, sourceX, sourceY);
|
|
194
|
+
context.drawImage(tmpCanvas, sourceX, sourceY, sourceWidth, sourceHeight, x, y, targetWidth, targetHeight);
|
|
195
|
+
};
|
|
196
|
+
|
|
197
|
+
this.bend = function(data, context, sourceX = 0, sourceY = 0, x = 0, y = 0, targetWidth = window.innerWidth, targetHeight = window.innerHeight) {
|
|
198
|
+
return this.convert(data)
|
|
199
|
+
.then((buffer) => this.render(buffer))
|
|
200
|
+
.then((buffer) => this.draw(buffer, context, sourceX, sourceY, x, y, this.imageData.width, this.imageData.height, targetWidth, targetHeight));
|
|
201
|
+
};
|
|
202
|
+
|
|
203
|
+
return this;
|
|
204
|
+
}
|
|
122
205
|
};
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "databender",
|
|
3
|
-
"version": "
|
|
3
|
+
"version": "2.0.0-alpha.0",
|
|
4
4
|
"description": "Create interesting visuals by misusing the Web Audio API",
|
|
5
5
|
"main": "index.js",
|
|
6
6
|
"scripts": {
|
|
@@ -17,13 +17,9 @@
|
|
|
17
17
|
],
|
|
18
18
|
"author": "Mike Vattuone",
|
|
19
19
|
"license": "MIT",
|
|
20
|
-
"dependencies": {
|
|
21
|
-
"tunajs": "1.0.11"
|
|
22
|
-
},
|
|
23
20
|
"devDependencies": {
|
|
24
|
-
"rollup": "0.
|
|
25
|
-
"rollup
|
|
26
|
-
"rollup
|
|
27
|
-
"rollup-plugin-node-resolve": "3.3.0"
|
|
21
|
+
"@rollup/plugin-commonjs": "28.0.8",
|
|
22
|
+
"@rollup/plugin-node-resolve": "16.0.3",
|
|
23
|
+
"rollup": "4.52.4"
|
|
28
24
|
}
|
|
29
25
|
}
|
package/rollup.config.js
CHANGED
|
@@ -1,21 +1,23 @@
|
|
|
1
1
|
// rollup.config.js
|
|
2
|
-
import
|
|
3
|
-
import
|
|
4
|
-
import json from 'rollup-plugin-json';
|
|
2
|
+
import resolve from '@rollup/plugin-node-resolve';
|
|
3
|
+
import commonjs from '@rollup/plugin-commonjs';
|
|
5
4
|
|
|
6
5
|
export default {
|
|
7
6
|
input: 'index.js',
|
|
8
7
|
output: {
|
|
9
8
|
file: 'dist/databender.js',
|
|
10
9
|
name: 'Databender',
|
|
11
|
-
format: 'iife'
|
|
10
|
+
format: 'iife',
|
|
11
|
+
sourcemap: false,
|
|
12
12
|
},
|
|
13
13
|
plugins: [
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
14
|
+
resolve({
|
|
15
|
+
browser: true,
|
|
16
|
+
preferBuiltins: false,
|
|
17
|
+
}),
|
|
18
|
+
commonjs({
|
|
19
|
+
requireReturnsDefault: 'auto',
|
|
20
|
+
}),
|
|
20
21
|
]
|
|
21
22
|
};
|
|
23
|
+
|