@shaderfrog/core 0.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.eslintrc.json +3 -0
- package/.prettierrc.js +3 -0
- package/README.md +3 -0
- package/babel.config.js +6 -0
- package/package.json +47 -0
- package/src/ast/manipulate.ts +392 -0
- package/src/ast/shader-sections.ts +323 -0
- package/src/core/engine.ts +214 -0
- package/src/core/file.js +53 -0
- package/src/core/graph.ts +1007 -0
- package/src/core/nodes/code-nodes.ts +66 -0
- package/src/core/nodes/core-node.ts +48 -0
- package/src/core/nodes/data-nodes.ts +344 -0
- package/src/core/nodes/edge.ts +23 -0
- package/src/core/nodes/engine-node.ts +266 -0
- package/src/core/strategy.ts +520 -0
- package/src/core.test.ts +312 -0
- package/src/plugins/babylon/bablyengine.ts +670 -0
- package/src/plugins/babylon/examples.ts +512 -0
- package/src/plugins/babylon/importers.ts +69 -0
- package/src/plugins/babylon/index.ts +6 -0
- package/src/plugins/three/examples.ts +680 -0
- package/src/plugins/three/importers.ts +18 -0
- package/src/plugins/three/index.ts +6 -0
- package/src/plugins/three/threngine.tsx +571 -0
- package/src/util/ensure.ts +10 -0
- package/src/util/id.ts +2 -0
|
@@ -0,0 +1,323 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Categorizing / deduping parts of shaders to help merge them together
|
|
3
|
+
*/
|
|
4
|
+
import {
|
|
5
|
+
AstNode,
|
|
6
|
+
DeclarationStatementNode,
|
|
7
|
+
PreprocessorNode,
|
|
8
|
+
} from '@shaderfrog/glsl-parser/ast';
|
|
9
|
+
import { generate } from '@shaderfrog/glsl-parser';
|
|
10
|
+
import { makeStatement } from './manipulate';
|
|
11
|
+
import { Program } from '@shaderfrog/glsl-parser/ast';
|
|
12
|
+
export interface ShaderSections {
|
|
13
|
+
precision: DeclarationStatementNode[];
|
|
14
|
+
version: AstNode[];
|
|
15
|
+
preprocessor: PreprocessorNode[];
|
|
16
|
+
structs: AstNode[];
|
|
17
|
+
inStatements: DeclarationStatementNode[];
|
|
18
|
+
outStatements: DeclarationStatementNode[];
|
|
19
|
+
uniforms: DeclarationStatementNode[];
|
|
20
|
+
program: AstNode[];
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
export const emptyShaderSections = (): ShaderSections => ({
|
|
24
|
+
precision: [],
|
|
25
|
+
preprocessor: [],
|
|
26
|
+
version: [],
|
|
27
|
+
structs: [],
|
|
28
|
+
program: [],
|
|
29
|
+
inStatements: [],
|
|
30
|
+
outStatements: [],
|
|
31
|
+
uniforms: [],
|
|
32
|
+
});
|
|
33
|
+
|
|
34
|
+
enum Precision {
|
|
35
|
+
highp = 2,
|
|
36
|
+
mediump = 1,
|
|
37
|
+
lowp = 0,
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
export const higherPrecision = (p1: Precision, p2: Precision): Precision =>
|
|
41
|
+
Precision[p1] > Precision[p2] ? p1 : p2;
|
|
42
|
+
|
|
43
|
+
export const dedupeVersions = (nodes: AstNode[]): AstNode => nodes[0];
|
|
44
|
+
export const highestPrecisions = (
|
|
45
|
+
nodes: DeclarationStatementNode[]
|
|
46
|
+
): DeclarationStatementNode[] =>
|
|
47
|
+
Object.entries(
|
|
48
|
+
nodes.reduce(
|
|
49
|
+
(precisions, stmt) => ({
|
|
50
|
+
...precisions,
|
|
51
|
+
// Like "float"
|
|
52
|
+
[stmt.declaration.specifier.specifier.token]: higherPrecision(
|
|
53
|
+
precisions[stmt.declaration.specifier.specifier.token],
|
|
54
|
+
stmt.declaration.qualifier.token
|
|
55
|
+
),
|
|
56
|
+
}),
|
|
57
|
+
{} as { [type: string]: Precision }
|
|
58
|
+
)
|
|
59
|
+
).map(
|
|
60
|
+
([typeName, precision]) =>
|
|
61
|
+
makeStatement(
|
|
62
|
+
`precision ${precision} ${typeName}`
|
|
63
|
+
) as DeclarationStatementNode
|
|
64
|
+
);
|
|
65
|
+
|
|
66
|
+
export const dedupeQualifiedStatements = (
|
|
67
|
+
statements: DeclarationStatementNode[],
|
|
68
|
+
qualifier: string
|
|
69
|
+
): any =>
|
|
70
|
+
Object.entries(
|
|
71
|
+
statements.reduce(
|
|
72
|
+
(stmts, stmt) => ({
|
|
73
|
+
...stmts,
|
|
74
|
+
// Like "vec2"
|
|
75
|
+
[stmt.declaration.specified_type.specifier.specifier.token]: {
|
|
76
|
+
...(stmts[
|
|
77
|
+
stmt.declaration.specified_type.specifier.specifier.token
|
|
78
|
+
] || {}),
|
|
79
|
+
...stmt.declaration.declarations.reduce(
|
|
80
|
+
(types: { [typeName: string]: string }, decl: any) => ({
|
|
81
|
+
...types,
|
|
82
|
+
[decl.identifier.identifier]: true,
|
|
83
|
+
}),
|
|
84
|
+
{} as { [typeName: string]: string }
|
|
85
|
+
),
|
|
86
|
+
},
|
|
87
|
+
}),
|
|
88
|
+
{} as { [key: string]: AstNode }
|
|
89
|
+
)
|
|
90
|
+
).map(([type, varNames]) =>
|
|
91
|
+
makeStatement(`${qualifier} ${type} ${Object.keys(varNames).join(', ')}`)
|
|
92
|
+
);
|
|
93
|
+
|
|
94
|
+
type UniformName = Record<string, { generated: string; hasInterface: boolean }>;
|
|
95
|
+
type UniformGroup = Record<string, UniformName>;
|
|
96
|
+
|
|
97
|
+
/**
|
|
98
|
+
* Merge uniforms together into lists of identifiers under the same type.
|
|
99
|
+
* There's special case handling for mixing of uniforms with "interface blocks"
|
|
100
|
+
* and those without when merging to make sure the interface block definition is
|
|
101
|
+
* preserved. Check out the tests for more.
|
|
102
|
+
*
|
|
103
|
+
* This function consumes uniforms as found by findShaderSections, so the
|
|
104
|
+
* definitions must line up
|
|
105
|
+
*/
|
|
106
|
+
export const dedupeUniforms = (statements: DeclarationStatementNode[]): any => {
|
|
107
|
+
const groupedByTypeName = Object.entries(
|
|
108
|
+
statements.reduce<UniformGroup>((stmts, stmt) => {
|
|
109
|
+
const { specified_type } = stmt.declaration;
|
|
110
|
+
const { identifier, interface_type } = stmt.declaration;
|
|
111
|
+
|
|
112
|
+
// This is the standard case, a uniform like "uniform vec2 x"
|
|
113
|
+
if (specified_type) {
|
|
114
|
+
const { specifier } = specified_type.specifier;
|
|
115
|
+
// Token is for "vec2", "identifier" is for custom names like struct
|
|
116
|
+
const type = (specifier.token || specifier.identifier) as string;
|
|
117
|
+
|
|
118
|
+
// Groups uniforms into their return type, and for each type, collapses
|
|
119
|
+
// uniform names into an object where the keys determine uniqueness
|
|
120
|
+
// "vec2": { x: x[1] }
|
|
121
|
+
const grouped = (
|
|
122
|
+
stmt.declaration.declarations as any[]
|
|
123
|
+
).reduce<UniformName>(
|
|
124
|
+
(types, decl) => ({
|
|
125
|
+
...types,
|
|
126
|
+
// There's probably a bug here where one shader declares x[1],
|
|
127
|
+
// another declares x[2], they both get collapsed under "x",
|
|
128
|
+
// and one is wrong
|
|
129
|
+
[decl.identifier.identifier as string]: stmts[type]?.[
|
|
130
|
+
decl.identifier.identifier as string
|
|
131
|
+
]?.hasInterface
|
|
132
|
+
? stmts[type]?.[decl.identifier.identifier as string]
|
|
133
|
+
: {
|
|
134
|
+
hasInterface: false,
|
|
135
|
+
generated:
|
|
136
|
+
decl.identifier.identifier +
|
|
137
|
+
(decl.quantifier
|
|
138
|
+
? `[${decl.quantifier.specifiers[0].expression.token}]`
|
|
139
|
+
: ''),
|
|
140
|
+
},
|
|
141
|
+
}),
|
|
142
|
+
{}
|
|
143
|
+
);
|
|
144
|
+
|
|
145
|
+
return {
|
|
146
|
+
...stmts,
|
|
147
|
+
[type]: {
|
|
148
|
+
...(stmts[type] || {}),
|
|
149
|
+
...grouped,
|
|
150
|
+
},
|
|
151
|
+
};
|
|
152
|
+
// This is the less common case, a uniform like "uniform Light { vec3 position; } name"
|
|
153
|
+
} else if (interface_type) {
|
|
154
|
+
// If this is an interface block only, like uniform Scene { mat4 view; };
|
|
155
|
+
// then group the interface block declaration under ''
|
|
156
|
+
const interfaceDeclaredUniform =
|
|
157
|
+
(identifier?.identifier?.identifier as string) || '';
|
|
158
|
+
return {
|
|
159
|
+
...stmts,
|
|
160
|
+
[interface_type.identifier as string]: {
|
|
161
|
+
[interfaceDeclaredUniform]: {
|
|
162
|
+
generated: `${generate({
|
|
163
|
+
type: 'interface_declarator',
|
|
164
|
+
lp: stmt.declaration.lp,
|
|
165
|
+
declarations: stmt.declaration.declarations,
|
|
166
|
+
qualifiers: null,
|
|
167
|
+
interface_type: null,
|
|
168
|
+
rp: stmt.declaration.rp,
|
|
169
|
+
})}${interfaceDeclaredUniform}`,
|
|
170
|
+
hasInterface: true,
|
|
171
|
+
},
|
|
172
|
+
},
|
|
173
|
+
};
|
|
174
|
+
} else {
|
|
175
|
+
console.error('Unknown uniform AST', { stmt, code: generate(stmt) });
|
|
176
|
+
throw new Error(
|
|
177
|
+
'Unknown uniform AST encountered when merging uniforms'
|
|
178
|
+
);
|
|
179
|
+
}
|
|
180
|
+
}, {})
|
|
181
|
+
);
|
|
182
|
+
|
|
183
|
+
return groupedByTypeName.map(([type, variables]) => {
|
|
184
|
+
return makeStatement(
|
|
185
|
+
`uniform ${type} ${Object.values(variables)
|
|
186
|
+
.map((v) => v.generated)
|
|
187
|
+
.join(', ')}`
|
|
188
|
+
);
|
|
189
|
+
});
|
|
190
|
+
};
|
|
191
|
+
|
|
192
|
+
export const mergeShaderSections = (
|
|
193
|
+
s1: ShaderSections,
|
|
194
|
+
s2: ShaderSections
|
|
195
|
+
): ShaderSections => {
|
|
196
|
+
return {
|
|
197
|
+
version: [...s1.version, ...s2.version],
|
|
198
|
+
precision: [...s1.precision, ...s2.precision],
|
|
199
|
+
preprocessor: [...s1.preprocessor, ...s2.preprocessor],
|
|
200
|
+
inStatements: [...s1.inStatements, ...s2.inStatements],
|
|
201
|
+
outStatements: [...s1.outStatements, ...s2.outStatements],
|
|
202
|
+
structs: [...s1.structs, ...s2.structs],
|
|
203
|
+
uniforms: [...s1.uniforms, ...s2.uniforms],
|
|
204
|
+
program: [...s1.program, ...s2.program],
|
|
205
|
+
};
|
|
206
|
+
};
|
|
207
|
+
|
|
208
|
+
export type MergeOptions = {
|
|
209
|
+
includePrecisions: boolean;
|
|
210
|
+
includeVersion: boolean;
|
|
211
|
+
};
|
|
212
|
+
|
|
213
|
+
export const shaderSectionsToProgram = (
|
|
214
|
+
sections: ShaderSections,
|
|
215
|
+
mergeOptions: MergeOptions
|
|
216
|
+
): Program => ({
|
|
217
|
+
type: 'program',
|
|
218
|
+
scopes: [],
|
|
219
|
+
program: [
|
|
220
|
+
...(mergeOptions.includeVersion ? [dedupeVersions(sections.version)] : []),
|
|
221
|
+
...(mergeOptions.includePrecisions
|
|
222
|
+
? highestPrecisions(sections.precision)
|
|
223
|
+
: []),
|
|
224
|
+
...sections.preprocessor,
|
|
225
|
+
// Structs before ins and uniforms as they can reference structs
|
|
226
|
+
...sections.structs,
|
|
227
|
+
...dedupeQualifiedStatements(sections.inStatements, 'in'),
|
|
228
|
+
...dedupeQualifiedStatements(sections.outStatements, 'out'),
|
|
229
|
+
...dedupeUniforms(sections.uniforms),
|
|
230
|
+
...sections.program,
|
|
231
|
+
],
|
|
232
|
+
});
|
|
233
|
+
|
|
234
|
+
/**
|
|
235
|
+
* Group an AST into logical sections. The output of this funciton is consumed
|
|
236
|
+
* by the dedupe methods, namely dedupeUniforms, so the data shapes are coupled
|
|
237
|
+
*/
|
|
238
|
+
export const findShaderSections = (ast: Program): ShaderSections => {
|
|
239
|
+
const initialValue: ShaderSections = {
|
|
240
|
+
precision: [],
|
|
241
|
+
preprocessor: [],
|
|
242
|
+
version: [],
|
|
243
|
+
structs: [],
|
|
244
|
+
inStatements: [],
|
|
245
|
+
outStatements: [],
|
|
246
|
+
uniforms: [],
|
|
247
|
+
program: [],
|
|
248
|
+
};
|
|
249
|
+
|
|
250
|
+
return ast.program.reduce((sections, node) => {
|
|
251
|
+
if (node.type === 'preprocessor' && node.line.startsWith('#version')) {
|
|
252
|
+
return {
|
|
253
|
+
...sections,
|
|
254
|
+
version: sections.version.concat(node),
|
|
255
|
+
};
|
|
256
|
+
} else if (
|
|
257
|
+
node.type === 'declaration_statement' &&
|
|
258
|
+
node.declaration.type === 'precision'
|
|
259
|
+
) {
|
|
260
|
+
return {
|
|
261
|
+
...sections,
|
|
262
|
+
precision: sections.precision.concat(node),
|
|
263
|
+
};
|
|
264
|
+
} else if (node.type === 'preprocessor') {
|
|
265
|
+
return {
|
|
266
|
+
...sections,
|
|
267
|
+
preprocessor: sections.preprocessor.concat(node),
|
|
268
|
+
};
|
|
269
|
+
} else if (
|
|
270
|
+
node.type === 'declaration_statement' &&
|
|
271
|
+
node.declaration?.specified_type?.specifier?.specifier?.type === 'struct'
|
|
272
|
+
) {
|
|
273
|
+
return {
|
|
274
|
+
...sections,
|
|
275
|
+
structs: sections.structs.concat(node),
|
|
276
|
+
};
|
|
277
|
+
// This definition of a uniform lines up with the processing we do in
|
|
278
|
+
// dedupeUniforms
|
|
279
|
+
} else if (
|
|
280
|
+
node.type === 'declaration_statement' &&
|
|
281
|
+
// Ignore lines like "layout(std140,column_major) uniform;"
|
|
282
|
+
!node.declaration?.qualifiers?.find(
|
|
283
|
+
(q: any) => q.layout?.token === 'layout'
|
|
284
|
+
) &&
|
|
285
|
+
// One of these checks is for a uniform with an interface block, and the
|
|
286
|
+
// other is for vanilla uniforms. I don't remember which is which
|
|
287
|
+
(node.declaration?.specified_type?.qualifiers?.find(
|
|
288
|
+
(n: any) => n.token === 'uniform'
|
|
289
|
+
) ||
|
|
290
|
+
node.declaration?.qualifiers?.find((n: any) => n.token === 'uniform'))
|
|
291
|
+
) {
|
|
292
|
+
return {
|
|
293
|
+
...sections,
|
|
294
|
+
uniforms: sections.uniforms.concat(node),
|
|
295
|
+
};
|
|
296
|
+
} else if (
|
|
297
|
+
node.type === 'declaration_statement' &&
|
|
298
|
+
node.declaration?.specified_type?.qualifiers?.find(
|
|
299
|
+
(n: any) => n.token === 'in'
|
|
300
|
+
)
|
|
301
|
+
) {
|
|
302
|
+
return {
|
|
303
|
+
...sections,
|
|
304
|
+
inStatements: sections.inStatements.concat(node),
|
|
305
|
+
};
|
|
306
|
+
} else if (
|
|
307
|
+
node.type === 'declaration_statement' &&
|
|
308
|
+
node.declaration?.specified_type?.qualifiers?.find(
|
|
309
|
+
(n: any) => n.token === 'out'
|
|
310
|
+
)
|
|
311
|
+
) {
|
|
312
|
+
return {
|
|
313
|
+
...sections,
|
|
314
|
+
outStatements: sections.outStatements.concat(node),
|
|
315
|
+
};
|
|
316
|
+
} else {
|
|
317
|
+
return {
|
|
318
|
+
...sections,
|
|
319
|
+
program: sections.program.concat(node),
|
|
320
|
+
};
|
|
321
|
+
}
|
|
322
|
+
}, initialValue);
|
|
323
|
+
};
|
|
@@ -0,0 +1,214 @@
|
|
|
1
|
+
import { Program } from '@shaderfrog/glsl-parser/ast';
|
|
2
|
+
import { AstNode } from '@shaderfrog/glsl-parser/ast';
|
|
3
|
+
import { MergeOptions } from '../ast/shader-sections';
|
|
4
|
+
import { Graph, NodeParser } from './graph';
|
|
5
|
+
import preprocess from '@shaderfrog/glsl-parser/preprocessor';
|
|
6
|
+
import { generate, parser } from '@shaderfrog/glsl-parser';
|
|
7
|
+
import { ShaderStage, GraphNode, NodeType } from './graph';
|
|
8
|
+
import { CoreNode, NodeInput, NodePosition } from './nodes/core-node';
|
|
9
|
+
import { DataNode, UniformDataType } from './nodes/data-nodes';
|
|
10
|
+
import { CodeNode, SourceNode } from './nodes/code-nodes';
|
|
11
|
+
import { Edge } from './nodes/edge';
|
|
12
|
+
import groupBy from 'lodash.groupby';
|
|
13
|
+
|
|
14
|
+
const log = (...args: any[]) =>
|
|
15
|
+
console.log.call(console, '\x1b[32m(core)\x1b[0m', ...args);
|
|
16
|
+
|
|
17
|
+
export enum EngineNodeType {
|
|
18
|
+
toon = 'toon',
|
|
19
|
+
phong = 'phong',
|
|
20
|
+
physical = 'physical',
|
|
21
|
+
shader = 'shader',
|
|
22
|
+
binary = 'binary',
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
export type PhysicalNodeConstructor = (
|
|
26
|
+
id: string,
|
|
27
|
+
name: string,
|
|
28
|
+
groupId: string | null | undefined,
|
|
29
|
+
position: NodePosition,
|
|
30
|
+
uniforms: UniformDataType[],
|
|
31
|
+
stage: ShaderStage | undefined,
|
|
32
|
+
nextStageNodeId?: string
|
|
33
|
+
) => CodeNode;
|
|
34
|
+
|
|
35
|
+
export type ToonNodeConstructor = (
|
|
36
|
+
id: string,
|
|
37
|
+
name: string,
|
|
38
|
+
groupId: string | null | undefined,
|
|
39
|
+
position: NodePosition,
|
|
40
|
+
uniforms: UniformDataType[],
|
|
41
|
+
stage: ShaderStage | undefined,
|
|
42
|
+
nextStageNodeId?: string
|
|
43
|
+
) => CodeNode;
|
|
44
|
+
|
|
45
|
+
export interface Engine {
|
|
46
|
+
name: string;
|
|
47
|
+
preserve: Set<string>;
|
|
48
|
+
mergeOptions: MergeOptions;
|
|
49
|
+
// Component: FunctionComponent<{ engine: Engine; parsers: NodeParsers }>;
|
|
50
|
+
// nodes: NodeParsers;
|
|
51
|
+
parsers: Record<string, NodeParser>;
|
|
52
|
+
importers: EngineImporters;
|
|
53
|
+
evaluateNode: (node: DataNode) => any;
|
|
54
|
+
constructors: {
|
|
55
|
+
[EngineNodeType.physical]: PhysicalNodeConstructor;
|
|
56
|
+
[EngineNodeType.toon]: ToonNodeConstructor;
|
|
57
|
+
};
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
export type NodeContext = {
|
|
61
|
+
ast: AstNode | Program;
|
|
62
|
+
source?: string;
|
|
63
|
+
// Inputs are determined at parse time and should probably be in the graph,
|
|
64
|
+
// not here on the runtime context for the node
|
|
65
|
+
inputs?: NodeInput[];
|
|
66
|
+
id?: string;
|
|
67
|
+
name?: string;
|
|
68
|
+
};
|
|
69
|
+
|
|
70
|
+
// The context an engine builds as it evaluates. It can manage its own state
|
|
71
|
+
// as the generic "RuntimeContext" which is passed to implemented engine methods
|
|
72
|
+
export type EngineContext = {
|
|
73
|
+
engine: string;
|
|
74
|
+
nodes: Record<string, NodeContext>;
|
|
75
|
+
runtime: any;
|
|
76
|
+
debuggingNonsense: {
|
|
77
|
+
vertexSource?: string;
|
|
78
|
+
vertexPreprocessed?: string;
|
|
79
|
+
fragmentPreprocessed?: string;
|
|
80
|
+
fragmentSource?: string;
|
|
81
|
+
};
|
|
82
|
+
};
|
|
83
|
+
|
|
84
|
+
export type EngineImporter = {
|
|
85
|
+
convertAst(ast: Program, type?: ShaderStage): void;
|
|
86
|
+
nodeInputMap: Partial<Record<EngineNodeType, Record<string, string | null>>>;
|
|
87
|
+
edgeMap: { [oldInput: string]: string };
|
|
88
|
+
};
|
|
89
|
+
export type EngineImporters = {
|
|
90
|
+
[engine: string]: EngineImporter;
|
|
91
|
+
};
|
|
92
|
+
|
|
93
|
+
type EdgeUpdates = { [edgeId: string]: { oldInput: string; newInput: string } };
|
|
94
|
+
|
|
95
|
+
export const convertNode = (
|
|
96
|
+
node: SourceNode,
|
|
97
|
+
converter: EngineImporter
|
|
98
|
+
): SourceNode => {
|
|
99
|
+
log(`Converting ${node.name} (${node.id})`);
|
|
100
|
+
const preprocessed = preprocess(node.source, {
|
|
101
|
+
preserveComments: true,
|
|
102
|
+
preserve: {
|
|
103
|
+
version: () => true,
|
|
104
|
+
define: () => true,
|
|
105
|
+
},
|
|
106
|
+
});
|
|
107
|
+
const ast = parser.parse(preprocessed);
|
|
108
|
+
converter.convertAst(ast, node.stage);
|
|
109
|
+
const source = generate(ast);
|
|
110
|
+
|
|
111
|
+
return {
|
|
112
|
+
...node,
|
|
113
|
+
source,
|
|
114
|
+
};
|
|
115
|
+
};
|
|
116
|
+
|
|
117
|
+
export const convertToEngine = (
|
|
118
|
+
oldEngine: Engine,
|
|
119
|
+
newEngine: Engine,
|
|
120
|
+
graph: Graph
|
|
121
|
+
): Graph => {
|
|
122
|
+
const converter = newEngine.importers[oldEngine.name];
|
|
123
|
+
if (!converter) {
|
|
124
|
+
throw new Error(
|
|
125
|
+
`The engine ${newEngine.name} has no importer for ${oldEngine.name}`
|
|
126
|
+
);
|
|
127
|
+
}
|
|
128
|
+
|
|
129
|
+
log(`Attempting to convert from ${newEngine.name} to ${oldEngine.name}`);
|
|
130
|
+
|
|
131
|
+
// const edgeUpdates: EdgeUpdates = {};
|
|
132
|
+
|
|
133
|
+
const edgesByNodeId = groupBy(graph.edges, 'to');
|
|
134
|
+
const edgeUpdates: Record<string, Edge | null> = {};
|
|
135
|
+
const nodeUpdates: Record<string, GraphNode | null> = {};
|
|
136
|
+
|
|
137
|
+
graph.nodes.forEach((node) => {
|
|
138
|
+
// Convert engine nodes
|
|
139
|
+
if (node.type in EngineNodeType) {
|
|
140
|
+
if (node.type in newEngine.constructors) {
|
|
141
|
+
const source = node as SourceNode;
|
|
142
|
+
nodeUpdates[source.id] = // @ts-ignore
|
|
143
|
+
(newEngine.constructors[source.type] as PhysicalNodeConstructor)(
|
|
144
|
+
source.id,
|
|
145
|
+
source.name,
|
|
146
|
+
source.groupId,
|
|
147
|
+
source.position,
|
|
148
|
+
source.config.uniforms,
|
|
149
|
+
source.stage,
|
|
150
|
+
source.nextStageNodeId
|
|
151
|
+
);
|
|
152
|
+
// Bail if no conversion
|
|
153
|
+
} else {
|
|
154
|
+
throw new Error(
|
|
155
|
+
`Can't convert ${oldEngine.name} to ${newEngine.name} because ${newEngine.name} does not have a "${node.type}" constructor`
|
|
156
|
+
);
|
|
157
|
+
}
|
|
158
|
+
} else if (NodeType.SOURCE === node.type) {
|
|
159
|
+
nodeUpdates[node.id] = convertNode(node, converter);
|
|
160
|
+
}
|
|
161
|
+
|
|
162
|
+
// Then update input edges. We only care about engine nodes
|
|
163
|
+
if (node.type in converter.nodeInputMap) {
|
|
164
|
+
const map = converter.nodeInputMap[node.type as EngineNodeType]!;
|
|
165
|
+
|
|
166
|
+
(edgesByNodeId[node.id] || []).forEach((edge) => {
|
|
167
|
+
if (edge.input in map) {
|
|
168
|
+
const mapped = map[edge.input]!;
|
|
169
|
+
log('Converting edge', edge.input, 'to', map[edge.input]);
|
|
170
|
+
edgeUpdates[edge.id] = {
|
|
171
|
+
...edge,
|
|
172
|
+
input: mapped,
|
|
173
|
+
};
|
|
174
|
+
} else {
|
|
175
|
+
log(
|
|
176
|
+
'Discarding',
|
|
177
|
+
edge.input,
|
|
178
|
+
'as there is no edge mapping in the',
|
|
179
|
+
newEngine.name,
|
|
180
|
+
'importer'
|
|
181
|
+
);
|
|
182
|
+
edgeUpdates[edge.id] = null;
|
|
183
|
+
}
|
|
184
|
+
});
|
|
185
|
+
}
|
|
186
|
+
});
|
|
187
|
+
|
|
188
|
+
graph.edges = graph.edges.reduce<Edge[]>((edges, edge) => {
|
|
189
|
+
if (edge.id in edgeUpdates) {
|
|
190
|
+
const res = edgeUpdates[edge.id];
|
|
191
|
+
if (res === null) {
|
|
192
|
+
return edges;
|
|
193
|
+
} else {
|
|
194
|
+
return [...edges, res];
|
|
195
|
+
}
|
|
196
|
+
}
|
|
197
|
+
return [...edges, edge];
|
|
198
|
+
}, []);
|
|
199
|
+
|
|
200
|
+
graph.nodes = graph.nodes.reduce<GraphNode[]>((nodes, node) => {
|
|
201
|
+
if (node.id in nodeUpdates) {
|
|
202
|
+
const res = nodeUpdates[node.id];
|
|
203
|
+
if (res === null) {
|
|
204
|
+
return nodes;
|
|
205
|
+
} else {
|
|
206
|
+
return [...nodes, res];
|
|
207
|
+
}
|
|
208
|
+
}
|
|
209
|
+
return [...nodes, node];
|
|
210
|
+
}, []);
|
|
211
|
+
|
|
212
|
+
log('Created converted graph', graph);
|
|
213
|
+
return graph;
|
|
214
|
+
};
|
package/src/core/file.js
ADDED
|
@@ -0,0 +1,53 @@
|
|
|
1
|
+
const items = [
|
|
2
|
+
{
|
|
3
|
+
pairId: 1,
|
|
4
|
+
name: 'abc',
|
|
5
|
+
count: 10,
|
|
6
|
+
},
|
|
7
|
+
{
|
|
8
|
+
pairId: 1,
|
|
9
|
+
name: 'xyz',
|
|
10
|
+
count: 20,
|
|
11
|
+
},
|
|
12
|
+
{
|
|
13
|
+
pairId: 2,
|
|
14
|
+
name: 'abc',
|
|
15
|
+
count: 15,
|
|
16
|
+
},
|
|
17
|
+
{
|
|
18
|
+
pairId: 2,
|
|
19
|
+
name: 'xyz',
|
|
20
|
+
count: 25,
|
|
21
|
+
},
|
|
22
|
+
{
|
|
23
|
+
pairId: 3,
|
|
24
|
+
name: 'xyz',
|
|
25
|
+
count: 25,
|
|
26
|
+
},
|
|
27
|
+
];
|
|
28
|
+
|
|
29
|
+
// Group the items by pairId
|
|
30
|
+
const groups = items.reduce(
|
|
31
|
+
(groups, item) => ({
|
|
32
|
+
...groups,
|
|
33
|
+
[item.pairId]: [...(groups[item.pairId] || []), item],
|
|
34
|
+
}),
|
|
35
|
+
{}
|
|
36
|
+
);
|
|
37
|
+
|
|
38
|
+
// For each group, if has more than one element, append a new one with the sume of the previous elements.
|
|
39
|
+
// Object.values() takes advantage of the facts that object keys are iterated over in insertion order, so
|
|
40
|
+
// the resulting array is in the same order as the original one
|
|
41
|
+
const result = Object.values(groups).flatMap((group) => [
|
|
42
|
+
...group,
|
|
43
|
+
...(group.length > 1
|
|
44
|
+
? [
|
|
45
|
+
{
|
|
46
|
+
...group[0],
|
|
47
|
+
count: group.reduce((sum, item) => sum + item.count, 0),
|
|
48
|
+
},
|
|
49
|
+
]
|
|
50
|
+
: []),
|
|
51
|
+
]);
|
|
52
|
+
|
|
53
|
+
console.log(result);
|