@shaderfrog/core 2.0.1 → 3.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/engine.d.ts +41 -4
- package/engine.js +7 -2
- package/graph/context.d.ts +6 -5
- package/graph/context.js +38 -48
- package/graph/data-nodes.d.ts +15 -6
- package/graph/data-nodes.js +1 -1
- package/graph/graph-types.d.ts +26 -6
- package/graph/graph-types.js +69 -0
- package/graph/graph.d.ts +28 -3
- package/graph/graph.js +87 -23
- package/graph/graph.test.js +18 -4
- package/graph/parsers.d.ts +2 -1
- package/graph/parsers.js +9 -5
- package/graph/shader-sections.d.ts +30 -13
- package/graph/shader-sections.js +92 -18
- package/package.json +3 -3
- package/plugins/babylon/bablyengine.js +5 -9
- package/plugins/babylon/importers.js +1 -1
- package/plugins/playcanvas/importers.js +1 -1
- package/plugins/playcanvas/playengine.js +5 -9
- package/plugins/three/importers.d.ts +1 -1
- package/plugins/three/importers.js +35 -12
- package/plugins/three/threngine.d.ts +6 -4
- package/plugins/three/threngine.js +54 -37
- package/plugins/three/threngine.test.js +1 -2
- package/strategy/assignemntTo.js +3 -4
- package/strategy/assignmentTo.d.ts +10 -0
- package/strategy/assignmentTo.js +35 -0
- package/strategy/index.d.ts +1 -1
- package/strategy/index.js +1 -1
- package/strategy/strategy.d.ts +2 -2
- package/strategy/strategy.js +1 -1
- package/util/ast.d.ts +3 -2
- package/util/ast.js +8 -0
- package/util/indexByid.d.ts +4 -0
- package/util/indexByid.js +18 -0
- package/util/math.d.ts +2 -0
- package/util/math.js +6 -0
- package/util/whitespace.js +4 -11
package/graph/graph.js
CHANGED
|
@@ -73,12 +73,12 @@ var __spreadArray = (this && this.__spreadArray) || function (to, from, pack) {
|
|
|
73
73
|
import { renameBindings, renameFunctions, } from '@shaderfrog/glsl-parser/parser/utils';
|
|
74
74
|
import { computeGraphContext, isError, } from './context';
|
|
75
75
|
import { shaderSectionsCons, findShaderSections, mergeShaderSections, shaderSectionsToProgram, } from './shader-sections';
|
|
76
|
-
import { backfillAst, makeExpression
|
|
76
|
+
import { backfillAst, makeExpression } from '../util/ast';
|
|
77
77
|
import { ensure } from '../util/ensure';
|
|
78
78
|
import { SourceType } from './code-nodes';
|
|
79
79
|
import { nodeInput } from './base-node';
|
|
80
80
|
import { makeId } from '../util/id';
|
|
81
|
-
import { coreParsers } from './parsers';
|
|
81
|
+
import { alphabet, coreParsers } from './parsers';
|
|
82
82
|
import { toGlsl } from './evaluate';
|
|
83
83
|
import { EdgeLink, MAGIC_OUTPUT_STMTS, NodeType, } from './graph-types';
|
|
84
84
|
import { generate } from '@shaderfrog/glsl-parser';
|
|
@@ -173,9 +173,58 @@ export var resetGraphIds = function (graph) {
|
|
|
173
173
|
edges: graph.edges.map(function (e) { return (__assign(__assign({}, e), { id: map(e.id), from: map(e.from), to: map(e.to) })); }),
|
|
174
174
|
};
|
|
175
175
|
};
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
|
|
176
|
+
/**
|
|
177
|
+
* A binary node automatically adds/removes inputs based on how many edges
|
|
178
|
+
* connect to it. If a binary node has edges to "a" and "b", removing the edge
|
|
179
|
+
* to "a" means the edge to "b" needs to be moved down to the "a" one. This
|
|
180
|
+
* function essentially groups edges by target node id, and resets the edge
|
|
181
|
+
* target to its index. This doesn't feel good to do here but I don't have a
|
|
182
|
+
* better idea at the moment. One reason the inputs to binary nodes are
|
|
183
|
+
* automatically updated after compile, but the edges are updated here
|
|
184
|
+
* at the editor layer, before compile. This also hard codes assumptions about
|
|
185
|
+
* (binary) node inputs into the graph, namely they can't have blank inputs.
|
|
186
|
+
*/
|
|
187
|
+
export var collapseBinaryGraphEdges = function (graph) {
|
|
188
|
+
// Find all edges that flow into a binary node, grouped by the target node's
|
|
189
|
+
// id, since we need to know the total number of edges per node first
|
|
190
|
+
var binaryEdges = graph.edges.reduce(function (acc, edge) {
|
|
191
|
+
var _a;
|
|
192
|
+
var toNode = findNode(graph, edge.to);
|
|
193
|
+
return toNode.type === NodeType.BINARY
|
|
194
|
+
? __assign(__assign({}, acc), (_a = {}, _a[toNode.id] = __spreadArray(__spreadArray([], __read((acc[toNode.id] || [])), false), [edge], false), _a)) : acc;
|
|
195
|
+
}, {});
|
|
196
|
+
// Then collapse them
|
|
197
|
+
var updatedEdges = graph.edges.map(function (edge) {
|
|
198
|
+
return edge.to in binaryEdges
|
|
199
|
+
? __assign(__assign({}, edge), { input: alphabet.charAt(binaryEdges[edge.to].indexOf(edge)) }) : edge;
|
|
200
|
+
});
|
|
201
|
+
return __assign(__assign({}, graph), { edges: updatedEdges });
|
|
202
|
+
};
|
|
203
|
+
/**
|
|
204
|
+
* Restrict edges so that an input handle can't have multiple edges going to it
|
|
205
|
+
*/
|
|
206
|
+
export var addEdgeAndPruneRestrictions = function (edges, newEdge) {
|
|
207
|
+
return edges
|
|
208
|
+
.filter(function (edge) {
|
|
209
|
+
// Prevent one input handle from having multiple inputs
|
|
210
|
+
return !(edge.to === newEdge.to && edge.input === newEdge.input);
|
|
211
|
+
})
|
|
212
|
+
.concat(newEdge);
|
|
213
|
+
};
|
|
214
|
+
/**
|
|
215
|
+
* Adds an edge to the graph and enforces graph edge business logic rules:
|
|
216
|
+
* - Makes sure "binary" (add/multiply) nodes edges are collapsed
|
|
217
|
+
* - Makes sure two edges can't flow into the same input.
|
|
218
|
+
* See also editor/flow-helpers.ts
|
|
219
|
+
*/
|
|
220
|
+
export var addGraphEdge = function (graph, newEdge) {
|
|
221
|
+
return collapseBinaryGraphEdges(__assign(__assign({}, graph), { edges: addEdgeAndPruneRestrictions(graph.edges, newEdge) }));
|
|
222
|
+
};
|
|
223
|
+
export var findLinkedNode = function (graph, nodeId) {
|
|
224
|
+
var edgeLink = graph.edges.find(function (e) {
|
|
225
|
+
return e.type === EdgeLink.NEXT_STAGE && (e.from === nodeId || e.to === nodeId);
|
|
226
|
+
});
|
|
227
|
+
var otherId = (edgeLink === null || edgeLink === void 0 ? void 0 : edgeLink.from) === nodeId ? edgeLink === null || edgeLink === void 0 ? void 0 : edgeLink.to : edgeLink === null || edgeLink === void 0 ? void 0 : edgeLink.from;
|
|
179
228
|
// Only source nodes can be linked, so cast it
|
|
180
229
|
return graph.nodes.find(function (node) { return node.id === otherId; });
|
|
181
230
|
};
|
|
@@ -204,12 +253,12 @@ export var findLinkedVertexNodes = function (graph, existingIds) {
|
|
|
204
253
|
export var consSearchResult = function () { return ({
|
|
205
254
|
nodes: {},
|
|
206
255
|
inputs: {},
|
|
207
|
-
edges:
|
|
256
|
+
edges: {},
|
|
208
257
|
}); };
|
|
209
258
|
export var mergeSearchResults = function (a, b) { return ({
|
|
210
259
|
nodes: __assign(__assign({}, a.nodes), b.nodes),
|
|
211
260
|
inputs: __assign(__assign({}, a.inputs), b.inputs),
|
|
212
|
-
edges:
|
|
261
|
+
edges: __assign(__assign({}, a.edges), b.edges),
|
|
213
262
|
}); };
|
|
214
263
|
/**
|
|
215
264
|
* Create the inputs on a node from the properties. This used to be done at
|
|
@@ -237,23 +286,24 @@ export var filterGraphFromNode = function (graph, node, predicates, depth, lastR
|
|
|
237
286
|
? (_a = {}, _a[node.id] = node, _a) : {}));
|
|
238
287
|
var accumulatedResult = __assign(__assign({}, lastResult), { nodes: __assign(__assign({}, lastResult.nodes), nodeAcc) });
|
|
239
288
|
return inputEdges.reduce(function (acc, inputEdge) {
|
|
240
|
-
var _a;
|
|
289
|
+
var _a, _b;
|
|
241
290
|
var input = inputs.find(function (i) { return i.id === inputEdge.input; });
|
|
242
291
|
var fromNode = inputEdge ? ensureFromNode(graph, inputEdge) : undefined;
|
|
243
292
|
var inputAcc = __assign(__assign({}, acc.inputs), (input &&
|
|
244
293
|
predicates.input &&
|
|
245
294
|
predicates.input(input, node, inputEdge, fromNode, lastResult)
|
|
246
295
|
? (_a = {}, _a[node.id] = __spreadArray(__spreadArray([], __read((acc.inputs[node.id] || [])), false), [input], false), _a) : {}));
|
|
247
|
-
var edgeAcc =
|
|
296
|
+
var edgeAcc = __assign(__assign({}, acc.edges), (predicates.edge &&
|
|
248
297
|
predicates.edge(input, node, inputEdge, fromNode, lastResult)
|
|
249
|
-
? [inputEdge]
|
|
250
|
-
: [])), false);
|
|
298
|
+
? (_b = {}, _b[inputEdge.id] = inputEdge, _b) : {}));
|
|
251
299
|
// Add in the latest result of edges and inputs so that when we recurse into
|
|
252
300
|
// the next node, it has the latest accumulator
|
|
253
301
|
var intermediateAcc = __assign(__assign({}, acc), { inputs: inputAcc, edges: edgeAcc });
|
|
254
302
|
if (inputEdge && fromNode && depth > 1) {
|
|
255
303
|
var result = filterGraphFromNode(graph, fromNode, predicates, depth - 1, intermediateAcc);
|
|
256
|
-
|
|
304
|
+
// The result is automatically the combination of the currenet acc and the
|
|
305
|
+
// result of the recursed data
|
|
306
|
+
return result;
|
|
257
307
|
}
|
|
258
308
|
else {
|
|
259
309
|
return intermediateAcc;
|
|
@@ -263,13 +313,20 @@ export var filterGraphFromNode = function (graph, node, predicates, depth, lastR
|
|
|
263
313
|
export var collectConnectedNodes = function (graph, node) {
|
|
264
314
|
return filterGraphFromNode(graph, node, { node: function () { return true; } }).nodes;
|
|
265
315
|
};
|
|
316
|
+
var merge = function (a, b) {
|
|
317
|
+
var keys = new Set(__spreadArray(__spreadArray([], __read(Object.keys(a)), false), __read(Object.keys(b)), false));
|
|
318
|
+
return Array.from(keys).reduce(function (acc, key) {
|
|
319
|
+
var _a;
|
|
320
|
+
return (__assign(__assign({}, acc), (_a = {}, _a[key] = __spreadArray(__spreadArray([], __read((a[key] || [])), false), __read((b[key] || [])), false), _a)));
|
|
321
|
+
}, {});
|
|
322
|
+
};
|
|
266
323
|
export var filterGraphNodes = function (graph, nodes, filter, depth) {
|
|
267
324
|
if (depth === void 0) { depth = Infinity; }
|
|
268
325
|
return nodes.reduce(function (acc, node) {
|
|
269
326
|
var result = filterGraphFromNode(graph, node, filter, depth);
|
|
270
327
|
return {
|
|
271
328
|
nodes: __assign(__assign({}, acc.nodes), result.nodes),
|
|
272
|
-
inputs:
|
|
329
|
+
inputs: merge(acc.inputs, result.inputs),
|
|
273
330
|
edges: __assign(__assign({}, acc.edges), result.edges),
|
|
274
331
|
};
|
|
275
332
|
}, consSearchResult());
|
|
@@ -294,9 +351,9 @@ export var compileNode = function (engine, graph, edges, engineContext, node, ac
|
|
|
294
351
|
var nodeContext = isDataNode(node)
|
|
295
352
|
? null
|
|
296
353
|
: ensure(engineContext.nodes[node.id], "No node context found for \"".concat(node.name, "\" (id ").concat(node.id, ")!"));
|
|
297
|
-
var _b =
|
|
298
|
-
if (!
|
|
299
|
-
throw new Error("I'm drunk and I think this case should be impossible");
|
|
354
|
+
var _b = nodeContext || {}, ast = _b.ast, inputFillers = _b.inputFillers;
|
|
355
|
+
if (!ast || !inputFillers) {
|
|
356
|
+
throw new Error("I'm drunk while writing this code, and I think this case should be impossible, so I'm going to throw an error and ignore it");
|
|
300
357
|
}
|
|
301
358
|
var compiledIds = activeIds;
|
|
302
359
|
var inputEdges = edges.filter(function (edge) { return edge.to === node.id; });
|
|
@@ -324,7 +381,6 @@ export var compileNode = function (engine, graph, edges, engineContext, node, ac
|
|
|
324
381
|
.forEach(function (_a) {
|
|
325
382
|
var _b, _c;
|
|
326
383
|
var fromNode = _a.fromNode, input = _a.input;
|
|
327
|
-
// const [inputSections, fillerFn, childIds, childDeps] = compileNode(
|
|
328
384
|
var _d = __read(compileNode(engine, graph, edges, engineContext, fromNode, activeIds), 3), inputSections = _d[0], fillerFn = _d[1], childIds = _d[2];
|
|
329
385
|
if (!fillerFn) {
|
|
330
386
|
throw new TypeError("Expected a filler ast from node ID ".concat(fromNode.id, " (").concat(fromNode.type, ") but none was returned"));
|
|
@@ -352,7 +408,12 @@ export var compileNode = function (engine, graph, edges, engineContext, node, ac
|
|
|
352
408
|
inputFillers: inputFillers,
|
|
353
409
|
fillerName: fillerName,
|
|
354
410
|
});
|
|
355
|
-
|
|
411
|
+
return;
|
|
412
|
+
// throw new Error(
|
|
413
|
+
// `Node "${node.name}"${
|
|
414
|
+
// (node as SourceNode).stage ? ` (${(node as SourceNode).stage})` : ''
|
|
415
|
+
// } has no filler for input "${input.displayName}" named ${fillerName}`,
|
|
416
|
+
// );
|
|
356
417
|
}
|
|
357
418
|
// Test if it needs to be backfilled - this only goes one level deep
|
|
358
419
|
// because we're only backfilling fromNode
|
|
@@ -380,7 +441,7 @@ export var compileNode = function (engine, graph, edges, engineContext, node, ac
|
|
|
380
441
|
codeNode.sourceType === SourceType.EXPRESSION ||
|
|
381
442
|
codeNode.sourceType === SourceType.FN_BODY_FRAGMENT
|
|
382
443
|
? shaderSectionsCons()
|
|
383
|
-
: findShaderSections(ast));
|
|
444
|
+
: findShaderSections(node.id, ast));
|
|
384
445
|
var filler = isDataNode(node)
|
|
385
446
|
? function () { return makeExpression(toGlsl(node)); }
|
|
386
447
|
: parser.produceFiller(node, ast);
|
|
@@ -452,7 +513,7 @@ export var collectNodeProperties = function (graph) {
|
|
|
452
513
|
};
|
|
453
514
|
};
|
|
454
515
|
export var compileSource = function (graph, engine, ctx) { return __awaiter(void 0, void 0, void 0, function () {
|
|
455
|
-
var result, compileResult, fragmentResult, vertexResult, dataInputs, dataNodes;
|
|
516
|
+
var result, updatedNodeContext, updatedContext, compileResult, fragmentResult, vertexResult, dataInputs, dataNodes;
|
|
456
517
|
return __generator(this, function (_a) {
|
|
457
518
|
switch (_a.label) {
|
|
458
519
|
case 0: return [4 /*yield*/, computeGraphContext(ctx, engine, graph)];
|
|
@@ -461,9 +522,10 @@ export var compileSource = function (graph, engine, ctx) { return __awaiter(void
|
|
|
461
522
|
if (isError(result)) {
|
|
462
523
|
return [2 /*return*/, result];
|
|
463
524
|
}
|
|
464
|
-
|
|
465
|
-
|
|
466
|
-
|
|
525
|
+
updatedNodeContext = __assign(__assign({}, ctx.nodes), result);
|
|
526
|
+
updatedContext = __assign(__assign({}, ctx), { nodes: updatedNodeContext });
|
|
527
|
+
compileResult = compileGraph(updatedContext, engine, graph);
|
|
528
|
+
fragmentResult = generate(shaderSectionsToProgram(compileResult.fragment, engine.mergeOptions).program);
|
|
467
529
|
vertexResult = generate(shaderSectionsToProgram(compileResult.vertex, engine.mergeOptions).program);
|
|
468
530
|
dataInputs = filterGraphNodes(graph, [compileResult.outputFrag, compileResult.outputVert], { input: isDataInput }).inputs;
|
|
469
531
|
dataNodes = Object.entries(dataInputs).reduce(function (acc, _a) {
|
|
@@ -476,11 +538,13 @@ export var compileSource = function (graph, engine, ctx) { return __awaiter(void
|
|
|
476
538
|
}, acc);
|
|
477
539
|
}, {});
|
|
478
540
|
return [2 /*return*/, {
|
|
541
|
+
updatedNodeContext: updatedNodeContext,
|
|
479
542
|
compileResult: compileResult,
|
|
480
543
|
fragmentResult: fragmentResult,
|
|
481
544
|
vertexResult: vertexResult,
|
|
482
545
|
dataNodes: dataNodes,
|
|
483
546
|
dataInputs: dataInputs,
|
|
547
|
+
compileMs: '',
|
|
484
548
|
}];
|
|
485
549
|
}
|
|
486
550
|
});
|
package/graph/graph.test.js
CHANGED
|
@@ -50,7 +50,7 @@ import util from 'util';
|
|
|
50
50
|
import { parser } from '@shaderfrog/glsl-parser';
|
|
51
51
|
import { generate } from '@shaderfrog/glsl-parser';
|
|
52
52
|
import { addNode, outputNode, sourceNode } from './graph-node';
|
|
53
|
-
import { shaderSectionsToProgram, mergeShaderSections, findShaderSections, } from './shader-sections';
|
|
53
|
+
import { shaderSectionsToProgram, mergeShaderSections, findShaderSections, extractSource, filterUniformNames, filterQualifiedStatements, } from './shader-sections';
|
|
54
54
|
import { numberNode } from './data-nodes';
|
|
55
55
|
import { linkFromVertToFrag, makeEdge } from './edge';
|
|
56
56
|
import { evaluateNode } from './evaluate';
|
|
@@ -64,8 +64,8 @@ var inspect = function (thing) {
|
|
|
64
64
|
return console.log(util.inspect(thing, false, null, true));
|
|
65
65
|
};
|
|
66
66
|
var mergeBlocks = function (ast1, ast2) {
|
|
67
|
-
var s1 = findShaderSections(ast1);
|
|
68
|
-
var s2 = findShaderSections(ast2);
|
|
67
|
+
var s1 = findShaderSections('', ast1);
|
|
68
|
+
var s2 = findShaderSections('', ast2);
|
|
69
69
|
var merged = mergeShaderSections(s1, s2);
|
|
70
70
|
return generate(shaderSectionsToProgram(merged, {
|
|
71
71
|
includePrecisions: true,
|
|
@@ -73,7 +73,7 @@ var mergeBlocks = function (ast1, ast2) {
|
|
|
73
73
|
}));
|
|
74
74
|
};
|
|
75
75
|
var dedupe = function (code) {
|
|
76
|
-
return generate(shaderSectionsToProgram(findShaderSections(parser.parse(code)), {
|
|
76
|
+
return generate(shaderSectionsToProgram(findShaderSections('', parser.parse(code)), {
|
|
77
77
|
includePrecisions: true,
|
|
78
78
|
includeVersion: true,
|
|
79
79
|
}));
|
|
@@ -435,3 +435,17 @@ it('should merge uniforms with interface blocks', function () {
|
|
|
435
435
|
// Verify these lines are preserved (they go through dedupeUniforms)
|
|
436
436
|
expect(dedupe("layout(std140,column_major) uniform;")).toEqual("layout(std140,column_major) uniform;");
|
|
437
437
|
});
|
|
438
|
+
it('filterUniformNames', function () {
|
|
439
|
+
var stmts = parser
|
|
440
|
+
.parse("uniform vec4 x,y;\nuniform vec2 x, y[5];\nuniform Light0 { vec4 y; } x;\nuniform Light0 { vec4 x; } y;\n")
|
|
441
|
+
.program.filter(function (s) { return s.type === 'declaration_statement'; });
|
|
442
|
+
var filtered = filterUniformNames(stmts.map(function (x) { return ({ nodeId: '', source: x }); }), function (name) { return name !== 'x'; });
|
|
443
|
+
expect(generate(extractSource(filtered))).toEqual("uniform vec4 y;\nuniform vec2 y[5];\nuniform Light0 { vec4 x; } y;\n");
|
|
444
|
+
});
|
|
445
|
+
it('filterQualifiedStatements', function () {
|
|
446
|
+
var stmts = parser
|
|
447
|
+
.parse("in vec2 x, y;\nout vec2 x;\n")
|
|
448
|
+
.program.filter(function (s) { return s.type === 'declaration_statement'; });
|
|
449
|
+
var filtered = filterQualifiedStatements(stmts.map(function (x) { return ({ nodeId: '', source: x }); }), function (name) { return name !== 'x'; });
|
|
450
|
+
expect(generate(extractSource(filtered))).toEqual("in vec2 y;\n");
|
|
451
|
+
});
|
package/graph/parsers.d.ts
CHANGED
|
@@ -5,9 +5,10 @@ import { Edge } from './edge';
|
|
|
5
5
|
import { SourceNode } from './code-nodes';
|
|
6
6
|
import { Graph } from './graph-types';
|
|
7
7
|
import { Evaluate } from './evaluate';
|
|
8
|
+
import { NodeContext } from './context';
|
|
8
9
|
export declare const alphabet = "abcdefghijklmnopqrstuvwxyz";
|
|
9
10
|
export type ProduceAst = (engineContext: EngineContext, engine: Engine, graph: Graph, node: SourceNode, inputEdges: Edge[]) => AstNode | Program;
|
|
10
|
-
export type OnBeforeCompile = (graph: Graph, engineContext: EngineContext, node: SourceNode, sibling?: SourceNode) => Promise<void>;
|
|
11
|
+
export type OnBeforeCompile = (graph: Graph, engineContext: EngineContext, node: SourceNode, sibling?: SourceNode) => Promise<Partial<NodeContext> | void>;
|
|
11
12
|
export type ManipulateAst = (engineContext: EngineContext, engine: Engine, graph: Graph, ast: AstNode | Program, inputEdges: Edge[], node: SourceNode, sibling: SourceNode) => AstNode | Program;
|
|
12
13
|
export type NodeParser = {
|
|
13
14
|
onBeforeCompile?: OnBeforeCompile;
|
package/graph/parsers.js
CHANGED
|
@@ -51,7 +51,11 @@ export var alphabet = 'abcdefghijklmnopqrstuvwxyz';
|
|
|
51
51
|
export var coreParsers = (_a = {},
|
|
52
52
|
_a[NodeType.SOURCE] = {
|
|
53
53
|
produceAst: function (engineContext, engine, graph, node, inputEdges) {
|
|
54
|
+
var _a;
|
|
54
55
|
var ast;
|
|
56
|
+
// Load the source either from the computed source at runtime, or the
|
|
57
|
+
// node's source code itself
|
|
58
|
+
var source = ((_a = engineContext.nodes[node.id]) === null || _a === void 0 ? void 0 : _a.computedSource) || node.source;
|
|
55
59
|
// @ts-ignore
|
|
56
60
|
if (node.expressionOnly) {
|
|
57
61
|
node.sourceType = SourceType.EXPRESSION;
|
|
@@ -59,7 +63,7 @@ export var coreParsers = (_a = {},
|
|
|
59
63
|
delete node.expressionOnly;
|
|
60
64
|
}
|
|
61
65
|
if (node.sourceType === SourceType.FN_BODY_FRAGMENT) {
|
|
62
|
-
var
|
|
66
|
+
var _b = makeFnBodyStatementWithScopes(source), statements = _b.statements, scope = _b.scope;
|
|
63
67
|
ast = {
|
|
64
68
|
type: 'program',
|
|
65
69
|
scopes: [scope],
|
|
@@ -68,7 +72,7 @@ export var coreParsers = (_a = {},
|
|
|
68
72
|
};
|
|
69
73
|
}
|
|
70
74
|
else if (node.sourceType === SourceType.EXPRESSION) {
|
|
71
|
-
var
|
|
75
|
+
var _c = makeExpressionWithScopes(source), expression = _c.expression, scope = _c.scope;
|
|
72
76
|
ast = {
|
|
73
77
|
type: 'program',
|
|
74
78
|
scopes: [scope],
|
|
@@ -78,13 +82,13 @@ export var coreParsers = (_a = {},
|
|
|
78
82
|
}
|
|
79
83
|
else {
|
|
80
84
|
var preprocessed = node.config.preprocess === false
|
|
81
|
-
?
|
|
82
|
-
: preprocess(
|
|
85
|
+
? source
|
|
86
|
+
: preprocess(source, {
|
|
83
87
|
preserve: {
|
|
84
88
|
version: function () { return true; },
|
|
85
89
|
},
|
|
86
90
|
});
|
|
87
|
-
ast = parser.parse(preprocessed);
|
|
91
|
+
ast = parser.parse(preprocessed, { stage: node.stage });
|
|
88
92
|
if (node.config.version === 2 && node.stage) {
|
|
89
93
|
from2To3(ast, node.stage);
|
|
90
94
|
}
|
|
@@ -1,18 +1,26 @@
|
|
|
1
1
|
/**
|
|
2
2
|
* Categorizing / deduping parts of shaders to help merge them together
|
|
3
3
|
*/
|
|
4
|
-
import {
|
|
4
|
+
import { DeclarationStatementNode, PreprocessorNode, ProgramStatement } from '@shaderfrog/glsl-parser/ast';
|
|
5
5
|
import { Program } from '@shaderfrog/glsl-parser/ast';
|
|
6
|
+
export type LineAndSource<T = any> = {
|
|
7
|
+
nodeId: string;
|
|
8
|
+
source: T;
|
|
9
|
+
};
|
|
10
|
+
export declare function extractSource<T>(lineAndSource: LineAndSource<T>): T;
|
|
11
|
+
export declare function extractSource<T>(lineAndSource: LineAndSource<T>[]): T[];
|
|
6
12
|
export interface ShaderSections {
|
|
7
|
-
precision: DeclarationStatementNode[];
|
|
8
|
-
version:
|
|
9
|
-
preprocessor: PreprocessorNode[];
|
|
10
|
-
structs:
|
|
11
|
-
inStatements: DeclarationStatementNode[];
|
|
12
|
-
outStatements: DeclarationStatementNode[];
|
|
13
|
-
uniforms: DeclarationStatementNode[];
|
|
14
|
-
program:
|
|
13
|
+
precision: LineAndSource<DeclarationStatementNode>[];
|
|
14
|
+
version: LineAndSource<PreprocessorNode>[];
|
|
15
|
+
preprocessor: LineAndSource<PreprocessorNode>[];
|
|
16
|
+
structs: LineAndSource<DeclarationStatementNode>[];
|
|
17
|
+
inStatements: LineAndSource<DeclarationStatementNode>[];
|
|
18
|
+
outStatements: LineAndSource<DeclarationStatementNode>[];
|
|
19
|
+
uniforms: LineAndSource<DeclarationStatementNode>[];
|
|
20
|
+
program: LineAndSource<ProgramStatement>[];
|
|
15
21
|
}
|
|
22
|
+
export declare const filterSections: (filter: (s: LineAndSource) => boolean, sections: ShaderSections) => ShaderSections;
|
|
23
|
+
export declare const mapSections: (map: (s: LineAndSource) => LineAndSource, sections: ShaderSections) => ShaderSections;
|
|
16
24
|
export declare const shaderSectionsCons: () => ShaderSections;
|
|
17
25
|
declare enum Precision {
|
|
18
26
|
highp = 2,
|
|
@@ -20,9 +28,18 @@ declare enum Precision {
|
|
|
20
28
|
lowp = 0
|
|
21
29
|
}
|
|
22
30
|
export declare const higherPrecision: (p1: Precision, p2: Precision) => Precision;
|
|
23
|
-
export declare const dedupeVersions: (nodes:
|
|
31
|
+
export declare const dedupeVersions: (nodes: PreprocessorNode[]) => PreprocessorNode;
|
|
24
32
|
export declare const highestPrecisions: (nodes: DeclarationStatementNode[]) => DeclarationStatementNode[];
|
|
25
|
-
export declare const
|
|
33
|
+
export declare const extractDeclarationNameAndType: (stmt: DeclarationStatementNode) => {
|
|
34
|
+
type: string;
|
|
35
|
+
names: string[];
|
|
36
|
+
};
|
|
37
|
+
export declare const filterQualifiedStatements: (statements: LineAndSource<DeclarationStatementNode>[], filter: (name: string) => boolean) => LineAndSource<DeclarationStatementNode>[];
|
|
38
|
+
export declare const dedupeQualifiedStatements: (statements: DeclarationStatementNode[], qualifier: string) => ProgramStatement[];
|
|
39
|
+
/**
|
|
40
|
+
* Remove uniform declarations by the variable names they declare
|
|
41
|
+
*/
|
|
42
|
+
export declare const filterUniformNames: (declarations: LineAndSource<DeclarationStatementNode>[], filter: (name: string) => boolean) => LineAndSource<DeclarationStatementNode>[];
|
|
26
43
|
/**
|
|
27
44
|
* Merge uniforms together into lists of identifiers under the same type.
|
|
28
45
|
* There's special case handling for mixing of uniforms with "interface blocks"
|
|
@@ -32,7 +49,7 @@ export declare const dedupeQualifiedStatements: (statements: DeclarationStatemen
|
|
|
32
49
|
* This function consumes uniforms as found by findShaderSections, so the
|
|
33
50
|
* definitions must line up
|
|
34
51
|
*/
|
|
35
|
-
export declare const dedupeUniforms: (statements: DeclarationStatementNode[]) =>
|
|
52
|
+
export declare const dedupeUniforms: (statements: DeclarationStatementNode[]) => DeclarationStatementNode[];
|
|
36
53
|
export declare const mergeShaderSections: (s1: ShaderSections, s2: ShaderSections) => ShaderSections;
|
|
37
54
|
export type MergeOptions = {
|
|
38
55
|
includePrecisions: boolean;
|
|
@@ -43,5 +60,5 @@ export declare const shaderSectionsToProgram: (sections: ShaderSections, mergeOp
|
|
|
43
60
|
* Group an AST into logical sections. The output of this funciton is consumed
|
|
44
61
|
* by the dedupe methods, namely dedupeUniforms, so the data shapes are coupled
|
|
45
62
|
*/
|
|
46
|
-
export declare const findShaderSections: (ast: Program) => ShaderSections;
|
|
63
|
+
export declare const findShaderSections: (nodeId: string, ast: Program) => ShaderSections;
|
|
47
64
|
export {};
|
package/graph/shader-sections.js
CHANGED
|
@@ -36,6 +36,31 @@ var __spreadArray = (this && this.__spreadArray) || function (to, from, pack) {
|
|
|
36
36
|
};
|
|
37
37
|
import { generate } from '@shaderfrog/glsl-parser';
|
|
38
38
|
import { makeStatement } from '../util/ast';
|
|
39
|
+
export function extractSource(lineAndSource) {
|
|
40
|
+
return Array.isArray(lineAndSource)
|
|
41
|
+
? lineAndSource.map(function (l) { return l.source; })
|
|
42
|
+
: lineAndSource.source;
|
|
43
|
+
}
|
|
44
|
+
export var filterSections = function (filter, sections) { return ({
|
|
45
|
+
precision: sections.precision.filter(filter),
|
|
46
|
+
version: sections.version.filter(filter),
|
|
47
|
+
preprocessor: sections.preprocessor.filter(filter),
|
|
48
|
+
structs: sections.structs.filter(filter),
|
|
49
|
+
inStatements: sections.inStatements.filter(filter),
|
|
50
|
+
outStatements: sections.outStatements.filter(filter),
|
|
51
|
+
uniforms: sections.uniforms.filter(filter),
|
|
52
|
+
program: sections.program.filter(filter),
|
|
53
|
+
}); };
|
|
54
|
+
export var mapSections = function (map, sections) { return ({
|
|
55
|
+
precision: sections.precision.map(map),
|
|
56
|
+
version: sections.version.map(map),
|
|
57
|
+
preprocessor: sections.preprocessor.map(map),
|
|
58
|
+
structs: sections.structs.map(map),
|
|
59
|
+
inStatements: sections.inStatements.map(map),
|
|
60
|
+
outStatements: sections.outStatements.map(map),
|
|
61
|
+
uniforms: sections.uniforms.map(map),
|
|
62
|
+
program: sections.program.map(map),
|
|
63
|
+
}); };
|
|
39
64
|
export var shaderSectionsCons = function () { return ({
|
|
40
65
|
precision: [],
|
|
41
66
|
preprocessor: [],
|
|
@@ -65,18 +90,65 @@ export var highestPrecisions = function (nodes) {
|
|
|
65
90
|
return makeStatement("precision ".concat(precision, " ").concat(typeName))[0];
|
|
66
91
|
});
|
|
67
92
|
};
|
|
93
|
+
export var extractDeclarationNameAndType = function (stmt) {
|
|
94
|
+
var dec = stmt.declaration;
|
|
95
|
+
return {
|
|
96
|
+
type: dec.specified_type.specifier.specifier.token,
|
|
97
|
+
names: dec.declarations.map(function (decl) { return decl.identifier.identifier; }),
|
|
98
|
+
};
|
|
99
|
+
};
|
|
100
|
+
export var filterQualifiedStatements = function (statements, filter) {
|
|
101
|
+
return statements.reduce(function (acc, line) {
|
|
102
|
+
var stmt = line.source;
|
|
103
|
+
var dec = stmt.declaration;
|
|
104
|
+
var filtered = dec.declarations.filter(function (decl) {
|
|
105
|
+
return filter(decl.identifier.identifier);
|
|
106
|
+
});
|
|
107
|
+
return filtered.length
|
|
108
|
+
? acc.concat(__assign(__assign({}, line), { source: __assign(__assign({}, line.source), { declaration: __assign(__assign({}, dec), { declarations: filtered }) }) }))
|
|
109
|
+
: acc;
|
|
110
|
+
}, []);
|
|
111
|
+
};
|
|
68
112
|
export var dedupeQualifiedStatements = function (statements, qualifier) {
|
|
69
|
-
return Object.entries(statements.reduce(function (
|
|
113
|
+
return Object.entries(statements.reduce(function (indexed, stmt) {
|
|
70
114
|
var _a;
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
return (__assign(__assign({}, types), (_a = {}, _a[decl.identifier.identifier] = true, _a)));
|
|
74
|
-
}, {})), _a)));
|
|
115
|
+
var _b = extractDeclarationNameAndType(stmt), type = _b.type, names = _b.names;
|
|
116
|
+
return __assign(__assign({}, indexed), (_a = {}, _a[type] = new Set(__spreadArray(__spreadArray([], __read((indexed[type] || new Set())), false), __read(names), false)), _a));
|
|
75
117
|
}, {})).map(function (_a) {
|
|
76
118
|
var _b = __read(_a, 2), type = _b[0], varNames = _b[1];
|
|
77
|
-
return makeStatement("".concat(qualifier, " ").concat(type, " ").concat(
|
|
119
|
+
return makeStatement("".concat(qualifier, " ").concat(type, " ").concat(Array.from(varNames).join(', ')))[0];
|
|
78
120
|
});
|
|
79
121
|
};
|
|
122
|
+
/**
|
|
123
|
+
* Remove uniform declarations by the variable names they declare
|
|
124
|
+
*/
|
|
125
|
+
export var filterUniformNames = function (declarations, filter) {
|
|
126
|
+
return declarations.reduce(function (acc, line) {
|
|
127
|
+
var _a, _b;
|
|
128
|
+
var decl = line.source.declaration;
|
|
129
|
+
// Struct declarations like "uniform Light0 { vec4 y; } x;"
|
|
130
|
+
if (decl.type === 'interface_declarator') {
|
|
131
|
+
var identifier = (_b = (_a = decl.identifier) === null || _a === void 0 ? void 0 : _a.identifier) === null || _b === void 0 ? void 0 : _b.identifier;
|
|
132
|
+
// If there are no remaining declarations, remove the whole line
|
|
133
|
+
return !identifier || !filter(identifier) ? acc : __spreadArray(__spreadArray([], __read(acc), false), [line], false);
|
|
134
|
+
// Standard uniform declaration, like "uniform vec4 x, y;"
|
|
135
|
+
}
|
|
136
|
+
else if (decl.type === 'declarator_list') {
|
|
137
|
+
var filtered = decl.declarations.filter(function (d) {
|
|
138
|
+
return filter(d.identifier.identifier);
|
|
139
|
+
});
|
|
140
|
+
// If there are no remaining decalrations, remove the whole line.
|
|
141
|
+
// Otherwise, update the line to remove the filtered out names
|
|
142
|
+
return filtered.length
|
|
143
|
+
? acc.concat(__assign(__assign({}, line), { source: __assign(__assign({}, line.source), { declaration: __assign(__assign({}, decl), { declarations: filtered }) }) }))
|
|
144
|
+
: acc;
|
|
145
|
+
}
|
|
146
|
+
else {
|
|
147
|
+
console.error('Unknown uniform declaration type to filter:', decl);
|
|
148
|
+
throw new Error("Unknown uniform declarationt type to filter: \"".concat(decl.type, "\""));
|
|
149
|
+
}
|
|
150
|
+
}, []);
|
|
151
|
+
};
|
|
80
152
|
/**
|
|
81
153
|
* Merge uniforms together into lists of identifiers under the same type.
|
|
82
154
|
* There's special case handling for mixing of uniforms with "interface blocks"
|
|
@@ -183,15 +255,17 @@ export var mergeShaderSections = function (s1, s2) {
|
|
|
183
255
|
export var shaderSectionsToProgram = function (sections, mergeOptions) { return ({
|
|
184
256
|
type: 'program',
|
|
185
257
|
scopes: [],
|
|
186
|
-
program: __spreadArray(__spreadArray(__spreadArray(__spreadArray(__spreadArray(__spreadArray(__spreadArray(__spreadArray([], __read((mergeOptions.includeVersion
|
|
187
|
-
?
|
|
188
|
-
: [])), false), __read(
|
|
258
|
+
program: __spreadArray(__spreadArray(__spreadArray(__spreadArray(__spreadArray(__spreadArray(__spreadArray(__spreadArray([], __read((mergeOptions.includeVersion
|
|
259
|
+
? [dedupeVersions(extractSource(sections.version))]
|
|
260
|
+
: [])), false), __read((mergeOptions.includePrecisions
|
|
261
|
+
? highestPrecisions(extractSource(sections.precision))
|
|
262
|
+
: [])), false), __read(extractSource(sections.preprocessor)), false), __read(extractSource(sections.structs)), false), __read(dedupeQualifiedStatements(extractSource(sections.inStatements), 'in')), false), __read(dedupeQualifiedStatements(extractSource(sections.outStatements), 'out')), false), __read(dedupeUniforms(extractSource(sections.uniforms))), false), __read(extractSource(sections.program)), false),
|
|
189
263
|
}); };
|
|
190
264
|
/**
|
|
191
265
|
* Group an AST into logical sections. The output of this funciton is consumed
|
|
192
266
|
* by the dedupe methods, namely dedupeUniforms, so the data shapes are coupled
|
|
193
267
|
*/
|
|
194
|
-
export var findShaderSections = function (ast) {
|
|
268
|
+
export var findShaderSections = function (nodeId, ast) {
|
|
195
269
|
var initialValue = {
|
|
196
270
|
precision: [],
|
|
197
271
|
preprocessor: [],
|
|
@@ -205,19 +279,19 @@ export var findShaderSections = function (ast) {
|
|
|
205
279
|
return ast.program.reduce(function (sections, node) {
|
|
206
280
|
var _a, _b, _c, _d, _e, _f, _g, _h, _j, _k, _l, _m, _o, _p, _q;
|
|
207
281
|
if (node.type === 'preprocessor' && node.line.startsWith('#version')) {
|
|
208
|
-
return __assign(__assign({}, sections), { version: sections.version.concat(node) });
|
|
282
|
+
return __assign(__assign({}, sections), { version: sections.version.concat({ nodeId: nodeId, source: node }) });
|
|
209
283
|
}
|
|
210
284
|
else if (node.type === 'declaration_statement' &&
|
|
211
285
|
node.declaration.type === 'precision') {
|
|
212
|
-
return __assign(__assign({}, sections), { precision: sections.precision.concat(node) });
|
|
286
|
+
return __assign(__assign({}, sections), { precision: sections.precision.concat({ nodeId: nodeId, source: node }) });
|
|
213
287
|
}
|
|
214
288
|
else if (node.type === 'preprocessor') {
|
|
215
|
-
return __assign(__assign({}, sections), { preprocessor: sections.preprocessor.concat(node) });
|
|
289
|
+
return __assign(__assign({}, sections), { preprocessor: sections.preprocessor.concat({ nodeId: nodeId, source: node }) });
|
|
216
290
|
}
|
|
217
291
|
else if (node.type === 'declaration_statement' &&
|
|
218
292
|
node.declaration.type === 'declarator_list' &&
|
|
219
293
|
((_d = (_c = (_b = (_a = node.declaration) === null || _a === void 0 ? void 0 : _a.specified_type) === null || _b === void 0 ? void 0 : _b.specifier) === null || _c === void 0 ? void 0 : _c.specifier) === null || _d === void 0 ? void 0 : _d.type) === 'struct') {
|
|
220
|
-
return __assign(__assign({}, sections), { structs: sections.structs.concat(node) });
|
|
294
|
+
return __assign(__assign({}, sections), { structs: sections.structs.concat({ nodeId: nodeId, source: node }) });
|
|
221
295
|
// This definition of a uniform lines up with the processing we do in
|
|
222
296
|
// dedupeUniforms
|
|
223
297
|
}
|
|
@@ -232,20 +306,20 @@ export var findShaderSections = function (ast) {
|
|
|
232
306
|
((_g = node.declaration.specified_type.qualifiers) === null || _g === void 0 ? void 0 : _g.find(function (n) { return 'token' in n && n.token === 'uniform'; }))) ||
|
|
233
307
|
('qualifiers' in node.declaration &&
|
|
234
308
|
((_j = (_h = node.declaration) === null || _h === void 0 ? void 0 : _h.qualifiers) === null || _j === void 0 ? void 0 : _j.find(function (n) { return 'token' in n && n.token === 'uniform'; }))))) {
|
|
235
|
-
return __assign(__assign({}, sections), { uniforms: sections.uniforms.concat(node) });
|
|
309
|
+
return __assign(__assign({}, sections), { uniforms: sections.uniforms.concat({ nodeId: nodeId, source: node }) });
|
|
236
310
|
}
|
|
237
311
|
else if (node.type === 'declaration_statement' &&
|
|
238
312
|
'specified_type' in node.declaration &&
|
|
239
313
|
((_m = (_l = (_k = node.declaration) === null || _k === void 0 ? void 0 : _k.specified_type) === null || _l === void 0 ? void 0 : _l.qualifiers) === null || _m === void 0 ? void 0 : _m.find(function (n) { return 'token' in n && n.token === 'in'; }))) {
|
|
240
|
-
return __assign(__assign({}, sections), { inStatements: sections.inStatements.concat(node) });
|
|
314
|
+
return __assign(__assign({}, sections), { inStatements: sections.inStatements.concat({ nodeId: nodeId, source: node }) });
|
|
241
315
|
}
|
|
242
316
|
else if (node.type === 'declaration_statement' &&
|
|
243
317
|
'specified_type' in node.declaration &&
|
|
244
318
|
((_q = (_p = (_o = node.declaration) === null || _o === void 0 ? void 0 : _o.specified_type) === null || _p === void 0 ? void 0 : _p.qualifiers) === null || _q === void 0 ? void 0 : _q.find(function (n) { return 'token' in n && n.token === 'out'; }))) {
|
|
245
|
-
return __assign(__assign({}, sections), { outStatements: sections.outStatements.concat(node) });
|
|
319
|
+
return __assign(__assign({}, sections), { outStatements: sections.outStatements.concat({ nodeId: nodeId, source: node }) });
|
|
246
320
|
}
|
|
247
321
|
else {
|
|
248
|
-
return __assign(__assign({}, sections), { program: sections.program.concat(node) });
|
|
322
|
+
return __assign(__assign({}, sections), { program: sections.program.concat({ nodeId: nodeId, source: node }) });
|
|
249
323
|
}
|
|
250
324
|
}, initialValue);
|
|
251
325
|
};
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@shaderfrog/core",
|
|
3
|
-
"version": "
|
|
3
|
+
"version": "3.0.1",
|
|
4
4
|
"description": "Shaderfrog core",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"files": [
|
|
@@ -34,7 +34,7 @@
|
|
|
34
34
|
"@babel/core": "^7.21.8",
|
|
35
35
|
"@babel/preset-env": "^7.21.5",
|
|
36
36
|
"@babel/preset-typescript": "^7.21.5",
|
|
37
|
-
"@shaderfrog/glsl-parser": "^
|
|
37
|
+
"@shaderfrog/glsl-parser": "^6.0.0-beta.7",
|
|
38
38
|
"@swc/core": "^1.6.7",
|
|
39
39
|
"@types/lodash.groupby": "^4.6.7",
|
|
40
40
|
"@types/three": "^0.169.0",
|
|
@@ -48,7 +48,7 @@
|
|
|
48
48
|
"lodash.groupby": "^4.6.0"
|
|
49
49
|
},
|
|
50
50
|
"peerDependencies": {
|
|
51
|
-
"@shaderfrog/glsl-parser": "^
|
|
51
|
+
"@shaderfrog/glsl-parser": "^6.0.0-beta.7",
|
|
52
52
|
"babylonjs": ">=4",
|
|
53
53
|
"playcanvas": "^1.65.3",
|
|
54
54
|
"three": ">=0.50"
|