@highstate/backend 0.9.8 → 0.9.10
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/{chunk-NMGIUI6X.js → chunk-DQDXRDUA.js} +228 -96
- package/dist/chunk-DQDXRDUA.js.map +1 -0
- package/dist/{chunk-EQ4LMS7B.js → chunk-WXDYCRTT.js} +1 -57
- package/dist/chunk-WXDYCRTT.js.map +1 -0
- package/dist/highstate.manifest.json +3 -3
- package/dist/index.js +117 -118
- package/dist/index.js.map +1 -1
- package/dist/library/worker/main.js +1 -1
- package/dist/shared/index.js +11 -13
- package/package.json +3 -3
- package/src/common/utils.ts +0 -74
- package/src/library/abstractions.ts +0 -5
- package/src/orchestrator/operation-workset.ts +64 -59
- package/src/orchestrator/operation.ts +3 -19
- package/src/project/manager.ts +66 -61
- package/src/shared/async-batcher.ts +73 -0
- package/src/shared/index.ts +1 -0
- package/src/shared/resolvers/graph-resolver.ts +146 -79
- package/src/shared/resolvers/input-hash.ts +22 -17
- package/src/shared/resolvers/input.ts +29 -26
- package/src/shared/resolvers/registry.ts +19 -9
- package/src/shared/resolvers/validation.ts +12 -18
- package/src/state/abstractions.ts +2 -3
- package/src/state/local.ts +13 -6
- package/src/terminal/manager.ts +2 -2
- package/dist/chunk-EQ4LMS7B.js.map +0 -1
- package/dist/chunk-NMGIUI6X.js.map +0 -1
package/src/project/manager.ts
CHANGED
@@ -6,16 +6,16 @@ import type { ProjectLockManager } from "./lock"
|
|
6
6
|
import { EventEmitter, on } from "node:events"
|
7
7
|
import { isUnitModel, type InstanceModel } from "@highstate/contract"
|
8
8
|
import {
|
9
|
-
|
10
|
-
|
11
|
-
type InputResolverInput,
|
12
|
-
type InputHashResolverInput,
|
9
|
+
type InputResolverNode,
|
10
|
+
type InputHashNode,
|
13
11
|
type InstanceModelPatch,
|
14
12
|
type LibraryUpdate,
|
15
13
|
createInstanceState,
|
16
14
|
type CompositeInstance,
|
17
15
|
type ResolvedInstanceInput,
|
18
16
|
type HubModel,
|
17
|
+
InputResolver,
|
18
|
+
InputHashResolver,
|
19
19
|
} from "../shared"
|
20
20
|
|
21
21
|
type CompositeInstanceEvent =
|
@@ -31,6 +31,10 @@ type CompositeInstanceEvent =
|
|
31
31
|
type: "deleted"
|
32
32
|
instanceId: string
|
33
33
|
}
|
34
|
+
| {
|
35
|
+
type: "failed"
|
36
|
+
instanceId: string
|
37
|
+
}
|
34
38
|
|
35
39
|
type CompositeInstanceEvents = {
|
36
40
|
[K in string]: [CompositeInstanceEvent]
|
@@ -98,7 +102,7 @@ export class ProjectManager {
|
|
98
102
|
|
99
103
|
async createInstance(projectId: string, instance: InstanceModel): Promise<InstanceModel> {
|
100
104
|
const createdInstance = await this.projectBackend.createInstance(projectId, instance)
|
101
|
-
await this.
|
105
|
+
await this.evaluateChangedCompositeInstances(projectId)
|
102
106
|
|
103
107
|
return createdInstance
|
104
108
|
}
|
@@ -109,7 +113,7 @@ export class ProjectManager {
|
|
109
113
|
patch: InstanceModelPatch,
|
110
114
|
): Promise<InstanceModel> {
|
111
115
|
const instance = await this.projectBackend.updateInstance(projectId, instanceId, patch)
|
112
|
-
await this.
|
116
|
+
await this.evaluateChangedCompositeInstances(projectId)
|
113
117
|
|
114
118
|
return instance
|
115
119
|
}
|
@@ -120,7 +124,7 @@ export class ProjectManager {
|
|
120
124
|
newName: string,
|
121
125
|
): Promise<InstanceModel> {
|
122
126
|
const instance = await this.projectBackend.renameInstance(projectId, instanceId, newName)
|
123
|
-
await this.
|
127
|
+
await this.evaluateChangedCompositeInstances(projectId)
|
124
128
|
|
125
129
|
return instance
|
126
130
|
}
|
@@ -132,32 +136,22 @@ export class ProjectManager {
|
|
132
136
|
])
|
133
137
|
}
|
134
138
|
|
135
|
-
private async
|
136
|
-
const {
|
137
|
-
|
138
|
-
const component = library.components[instance.type]
|
139
|
-
if (!component) {
|
140
|
-
return
|
141
|
-
}
|
142
|
-
|
143
|
-
if (isUnitModel(component)) {
|
144
|
-
return
|
145
|
-
}
|
146
|
-
|
147
|
-
const { inputHash: expectedInputHash } = await resolveInputHash(instance.id)
|
148
|
-
const inputHash = await this.stateBackend.getCompositeInstanceInputHash(projectId, instance.id)
|
139
|
+
private async evaluateChangedCompositeInstances(projectId: string): Promise<void> {
|
140
|
+
const { inputHashResolver, instances, library, evaluatedInputHashes } =
|
141
|
+
await this.prepareResolvers(projectId)
|
149
142
|
|
150
|
-
|
151
|
-
|
152
|
-
projectId,
|
153
|
-
instanceId: instance.id,
|
154
|
-
})
|
143
|
+
inputHashResolver.addAllNodesToWorkset()
|
144
|
+
await inputHashResolver.process()
|
155
145
|
|
156
|
-
|
157
|
-
|
158
|
-
|
146
|
+
const instanceIds = instances
|
147
|
+
.filter(instance => !isUnitModel(library.components[instance.type]))
|
148
|
+
.filter(
|
149
|
+
instance =>
|
150
|
+
evaluatedInputHashes[instance.id] !==
|
151
|
+
inputHashResolver.requireOutput(instance.id).inputHash,
|
152
|
+
)
|
153
|
+
.map(instance => instance.id)
|
159
154
|
|
160
|
-
private async evaluateCompositeInstances(projectId: string, instanceIds: string[]) {
|
161
155
|
await this.projectLockManager.getLock(projectId).lockInstances(instanceIds, async () => {
|
162
156
|
this.logger.debug({ instanceIds }, "evaluating composite instances")
|
163
157
|
|
@@ -166,10 +160,10 @@ export class ProjectManager {
|
|
166
160
|
}
|
167
161
|
|
168
162
|
const [
|
169
|
-
{ instances, resolvedInputs, stateMap,
|
163
|
+
{ instances, resolvedInputs, stateMap, inputHashResolver },
|
170
164
|
topLevelCompositeChildrenIds,
|
171
165
|
] = await Promise.all([
|
172
|
-
this.
|
166
|
+
this.prepareResolvers(projectId),
|
173
167
|
this.stateBackend.getTopLevelCompositeChildrenIds(projectId, instanceIds),
|
174
168
|
])
|
175
169
|
|
@@ -188,18 +182,19 @@ export class ProjectManager {
|
|
188
182
|
return newState
|
189
183
|
})
|
190
184
|
|
191
|
-
|
192
|
-
|
193
|
-
const { inputHash } = await resolveInputHash(instanceId)
|
194
|
-
inputHashes.set(instanceId, inputHash)
|
195
|
-
}
|
185
|
+
inputHashResolver.addAllNodesToWorkset()
|
186
|
+
await inputHashResolver.process()
|
196
187
|
|
197
188
|
const compositeInstances = results
|
198
189
|
.filter(result => result.success)
|
199
190
|
.flatMap(result =>
|
200
191
|
result.compositeInstances.map(instance => ({
|
201
192
|
...instance,
|
202
|
-
inputHash:
|
193
|
+
inputHash:
|
194
|
+
// only store inputHash for top-level composite instances
|
195
|
+
instance.instance.id === result.instanceId
|
196
|
+
? inputHashResolver.requireOutput(instance.instance.id).inputHash
|
197
|
+
: "",
|
203
198
|
})),
|
204
199
|
)
|
205
200
|
|
@@ -235,6 +230,18 @@ export class ProjectManager {
|
|
235
230
|
|
236
231
|
for (const state of newStates) {
|
237
232
|
this.stateManager.emitStatePatch(projectId, state)
|
233
|
+
|
234
|
+
if (state.evaluationError) {
|
235
|
+
this.logger.error(
|
236
|
+
{ projectId, instanceId: state.id, error: state.evaluationError },
|
237
|
+
"instance evaluation failed",
|
238
|
+
)
|
239
|
+
|
240
|
+
this.compositeInstanceEE.emit(projectId, {
|
241
|
+
type: "failed",
|
242
|
+
instanceId: state.id,
|
243
|
+
})
|
244
|
+
}
|
238
245
|
}
|
239
246
|
|
240
247
|
for (const instance of compositeInstances) {
|
@@ -270,16 +277,18 @@ export class ProjectManager {
|
|
270
277
|
})
|
271
278
|
}
|
272
279
|
|
273
|
-
private async
|
274
|
-
const { instances, hubs } = await
|
280
|
+
private async prepareResolvers(projectId: string) {
|
281
|
+
const [{ instances, hubs }, states, library, evaluatedInputHashes] = await Promise.all([
|
282
|
+
this.projectBackend.getProject(projectId),
|
283
|
+
this.stateBackend.getAllInstanceStates(projectId),
|
284
|
+
this.libraryBackend.loadLibrary(),
|
285
|
+
this.stateBackend.getCompositeInstanceInputHashes(projectId),
|
286
|
+
])
|
275
287
|
|
276
|
-
const library = await this.libraryBackend.loadLibrary()
|
277
288
|
const filteredInstances = instances.filter(instance => instance.type in library.components)
|
278
|
-
|
279
|
-
const states = await this.stateBackend.getAllInstanceStates(projectId)
|
280
289
|
const stateMap = new Map(states.map(state => [state.id, state]))
|
281
290
|
|
282
|
-
const inputResolverNodes = new Map<string,
|
291
|
+
const inputResolverNodes = new Map<string, InputResolverNode>()
|
283
292
|
|
284
293
|
for (const instance of filteredInstances) {
|
285
294
|
inputResolverNodes.set(`instance:${instance.id}`, {
|
@@ -293,13 +302,16 @@ export class ProjectManager {
|
|
293
302
|
inputResolverNodes.set(`hub:${hub.id}`, { kind: "hub", hub })
|
294
303
|
}
|
295
304
|
|
296
|
-
const
|
305
|
+
const inputResolver = new InputResolver(inputResolverNodes, this.logger)
|
306
|
+
inputResolver.addAllNodesToWorkset()
|
297
307
|
|
298
|
-
const inputHashInputs = new Map<string,
|
308
|
+
const inputHashInputs = new Map<string, InputHashNode>()
|
299
309
|
const resolvedInputs: Record<string, Record<string, ResolvedInstanceInput[]>> = {}
|
300
310
|
|
311
|
+
await inputResolver.process()
|
312
|
+
|
301
313
|
for (const instance of filteredInstances) {
|
302
|
-
const output =
|
314
|
+
const output = inputResolver.requireOutput(`instance:${instance.id}`)
|
303
315
|
if (output.kind !== "instance") {
|
304
316
|
throw new Error("Expected instance node")
|
305
317
|
}
|
@@ -328,14 +340,16 @@ export class ProjectManager {
|
|
328
340
|
resolvedInputs[instance.id] = output.resolvedInputs
|
329
341
|
}
|
330
342
|
|
331
|
-
const
|
343
|
+
const inputHashResolver = new InputHashResolver(inputHashInputs, this.logger)
|
332
344
|
|
333
345
|
return {
|
334
|
-
|
346
|
+
inputHashInputs,
|
347
|
+
inputHashResolver,
|
335
348
|
library,
|
336
|
-
instances,
|
349
|
+
instances: filteredInstances,
|
337
350
|
stateMap,
|
338
351
|
resolvedInputs,
|
352
|
+
evaluatedInputHashes,
|
339
353
|
}
|
340
354
|
}
|
341
355
|
|
@@ -377,7 +391,7 @@ export class ProjectManager {
|
|
377
391
|
|
378
392
|
const projects = await this.projectBackend.getProjectIds()
|
379
393
|
for (const projectId of projects) {
|
380
|
-
const {
|
394
|
+
const { instances } = await this.prepareResolvers(projectId)
|
381
395
|
|
382
396
|
const filteredInstances = instances.filter(
|
383
397
|
instance =>
|
@@ -391,17 +405,8 @@ export class ProjectManager {
|
|
391
405
|
"updating composite instances for project",
|
392
406
|
)
|
393
407
|
|
394
|
-
const inputHashMap = new Map<string, string>()
|
395
|
-
for (const instance of filteredInstances) {
|
396
|
-
const { inputHash } = await resolveInputHash(instance.id)
|
397
|
-
inputHashMap.set(instance.id, inputHash)
|
398
|
-
}
|
399
|
-
|
400
408
|
try {
|
401
|
-
await this.
|
402
|
-
projectId,
|
403
|
-
filteredInstances.map(instance => instance.id),
|
404
|
-
)
|
409
|
+
await this.evaluateChangedCompositeInstances(projectId)
|
405
410
|
} catch (error) {
|
406
411
|
this.logger.error({ error }, "failed to evaluate composite instances")
|
407
412
|
}
|
@@ -0,0 +1,73 @@
|
|
1
|
+
export type AsyncBatcherOptions = {
|
2
|
+
waitMs?: number
|
3
|
+
maxWaitTimeMs?: number
|
4
|
+
}
|
5
|
+
|
6
|
+
export function createAsyncBatcher<T>(
|
7
|
+
fn: (items: T[]) => Promise<void> | void,
|
8
|
+
{ waitMs = 100, maxWaitTimeMs = 1000 }: AsyncBatcherOptions = {},
|
9
|
+
) {
|
10
|
+
let batch: T[] = []
|
11
|
+
let activeTimeout: NodeJS.Timeout | null = null
|
12
|
+
let maxWaitTimeout: NodeJS.Timeout | null = null
|
13
|
+
let firstCallTimestamp: number | null = null
|
14
|
+
|
15
|
+
async function processBatch() {
|
16
|
+
if (batch.length === 0) return
|
17
|
+
|
18
|
+
const currentBatch = batch
|
19
|
+
batch = [] // Reset batch before async call
|
20
|
+
|
21
|
+
await fn(currentBatch)
|
22
|
+
|
23
|
+
// Clear max wait timer since batch has been processed
|
24
|
+
if (maxWaitTimeout) {
|
25
|
+
clearTimeout(maxWaitTimeout)
|
26
|
+
maxWaitTimeout = null
|
27
|
+
}
|
28
|
+
firstCallTimestamp = null
|
29
|
+
}
|
30
|
+
|
31
|
+
function schedule() {
|
32
|
+
if (activeTimeout) clearTimeout(activeTimeout)
|
33
|
+
activeTimeout = setTimeout(() => {
|
34
|
+
activeTimeout = null
|
35
|
+
void processBatch()
|
36
|
+
}, waitMs)
|
37
|
+
|
38
|
+
// Ensure batch is executed within maxWaitTimeMs
|
39
|
+
if (!firstCallTimestamp) {
|
40
|
+
firstCallTimestamp = Date.now()
|
41
|
+
maxWaitTimeout = setTimeout(() => {
|
42
|
+
if (activeTimeout) clearTimeout(activeTimeout)
|
43
|
+
activeTimeout = null
|
44
|
+
void processBatch()
|
45
|
+
}, maxWaitTimeMs)
|
46
|
+
}
|
47
|
+
}
|
48
|
+
|
49
|
+
return {
|
50
|
+
/**
|
51
|
+
* Add an item to the batch.
|
52
|
+
*/
|
53
|
+
call(item: T): void {
|
54
|
+
batch.push(item)
|
55
|
+
schedule()
|
56
|
+
},
|
57
|
+
|
58
|
+
/**
|
59
|
+
* Immediately flush the pending batch (if any).
|
60
|
+
*/
|
61
|
+
async flush(): Promise<void> {
|
62
|
+
if (activeTimeout) {
|
63
|
+
clearTimeout(activeTimeout)
|
64
|
+
activeTimeout = null
|
65
|
+
}
|
66
|
+
if (maxWaitTimeout) {
|
67
|
+
clearTimeout(maxWaitTimeout)
|
68
|
+
maxWaitTimeout = null
|
69
|
+
}
|
70
|
+
await processBatch()
|
71
|
+
},
|
72
|
+
}
|
73
|
+
}
|
package/src/shared/index.ts
CHANGED
@@ -1,111 +1,178 @@
|
|
1
1
|
import type { Logger } from "pino"
|
2
2
|
import { unique } from "remeda"
|
3
3
|
|
4
|
-
export type
|
5
|
-
name: string
|
4
|
+
export type ResolverOutputHandler<TOutput> = (id: string, value: TOutput) => void
|
6
5
|
|
7
|
-
|
6
|
+
export abstract class GraphResolver<TNode, TOutput> {
|
7
|
+
private readonly workset: Set<string> = new Set()
|
8
|
+
private readonly dependentMap: Map<string, Set<string>> = new Map()
|
9
|
+
private readonly outputMap: Map<string, TOutput> = new Map()
|
8
10
|
|
9
|
-
|
11
|
+
constructor(
|
12
|
+
private readonly nodes: ReadonlyMap<string, TNode>,
|
13
|
+
protected readonly logger: Logger,
|
14
|
+
private readonly outputHandler?: ResolverOutputHandler<TOutput>,
|
15
|
+
) {}
|
10
16
|
|
11
|
-
|
12
|
-
|
13
|
-
|
14
|
-
logger: Logger,
|
15
|
-
): TOutput | Promise<TOutput>
|
16
|
-
}
|
17
|
-
|
18
|
-
export interface GraphResolverBackend<TOutput> {
|
19
|
-
promiseCache: Map<string, Promise<TOutput>>
|
20
|
-
|
21
|
-
setOutput?(id: string, value: TOutput): void
|
22
|
-
setDependencies?(id: string, dependencies: string[]): void
|
23
|
-
}
|
24
|
-
|
25
|
-
export type GraphResolverFactory<TNode, TOutput> = {
|
26
|
-
factoryName: string
|
27
|
-
} & ((
|
28
|
-
nodes: ReadonlyMap<string, TNode>,
|
29
|
-
logger: Logger,
|
30
|
-
backend?: GraphResolverBackend<TOutput>,
|
31
|
-
) => GraphResolver<TOutput>)
|
17
|
+
addToWorkset(nodeId: string): void {
|
18
|
+
this.workset.add(nodeId)
|
19
|
+
}
|
32
20
|
|
33
|
-
|
21
|
+
addAllNodesToWorkset(): void {
|
22
|
+
for (const nodeId of this.nodes.keys()) {
|
23
|
+
this.workset.add(nodeId)
|
24
|
+
}
|
25
|
+
}
|
34
26
|
|
35
|
-
|
36
|
-
|
37
|
-
|
38
|
-
|
27
|
+
/**
|
28
|
+
* The map of calculated outputs.
|
29
|
+
*/
|
30
|
+
get outputs(): ReadonlyMap<string, TOutput> {
|
31
|
+
return this.outputMap
|
39
32
|
}
|
40
|
-
}
|
41
33
|
|
42
|
-
|
43
|
-
|
34
|
+
requireOutput(nodeId: string): TOutput {
|
35
|
+
const output = this.outputMap.get(nodeId)
|
36
|
+
if (!output) {
|
37
|
+
throw new Error(`Output for node ${nodeId} is not available`)
|
38
|
+
}
|
44
39
|
|
45
|
-
|
46
|
-
promiseCache,
|
40
|
+
return output
|
47
41
|
}
|
48
|
-
}
|
49
|
-
|
50
|
-
export function defineGraphResolver<TInput, TOutput>(
|
51
|
-
options: GraphResolverOptions<TInput, TOutput>,
|
52
|
-
): GraphResolverFactory<TInput, TOutput> {
|
53
|
-
const factory: GraphResolverFactory<TInput, TOutput> = (nodes, logger, backend) => {
|
54
|
-
backend ??= createDefaultGraphResolverBackend<TOutput>()
|
55
|
-
logger = logger.child({ resolver: options.name })
|
56
42
|
|
57
|
-
|
43
|
+
/**
|
44
|
+
* Gets the list of the identifiers of the dependencies for the node.
|
45
|
+
*
|
46
|
+
* Used to produce the dependency graph.
|
47
|
+
*/
|
48
|
+
protected abstract getNodeDependencies(node: TNode): string[]
|
49
|
+
|
50
|
+
/**
|
51
|
+
* Processes the node and returns the output.
|
52
|
+
*/
|
53
|
+
protected abstract processNode(node: TNode, logger: Logger): TOutput | Promise<TOutput>
|
54
|
+
|
55
|
+
/**
|
56
|
+
* Invalidates the node and all nodes that depend on it.
|
57
|
+
*
|
58
|
+
* Also adds the node to the work set for processing.
|
59
|
+
*/
|
60
|
+
invalidate(nodeId: string): void {
|
61
|
+
const stack = [nodeId]
|
62
|
+
|
63
|
+
while (stack.length > 0) {
|
64
|
+
const nodeId = stack.pop()!
|
65
|
+
if (!this.nodes.has(nodeId)) {
|
66
|
+
// it is ok to invalidate deleted nodes
|
67
|
+
continue
|
68
|
+
}
|
58
69
|
|
59
|
-
|
60
|
-
|
70
|
+
// remove the node from the output map
|
71
|
+
this.outputMap.delete(nodeId)
|
72
|
+
this.workset.add(nodeId)
|
61
73
|
|
62
|
-
const
|
63
|
-
if (
|
64
|
-
|
74
|
+
const dependents = this.dependentMap.get(nodeId)
|
75
|
+
if (!dependents) {
|
76
|
+
continue
|
65
77
|
}
|
66
78
|
|
67
|
-
|
68
|
-
|
79
|
+
for (const dependentId of dependents) {
|
80
|
+
if (this.outputMap.has(dependentId)) {
|
81
|
+
// add the dependent to the stack for further processing
|
82
|
+
stack.push(dependentId)
|
83
|
+
}
|
69
84
|
}
|
70
85
|
|
71
|
-
|
72
|
-
|
73
|
-
|
74
|
-
|
86
|
+
// clear the dependent map for the node
|
87
|
+
this.dependentMap.delete(nodeId)
|
88
|
+
}
|
89
|
+
}
|
75
90
|
|
76
|
-
|
77
|
-
|
91
|
+
/**
|
92
|
+
* Resolves all not-resolved or invalidated nodes in the graph.
|
93
|
+
*
|
94
|
+
* The abort signal of the previous operation must be called before calling this method again.
|
95
|
+
*/
|
96
|
+
async process(signal?: AbortSignal): Promise<void> {
|
97
|
+
while (this.workset.size > 0) {
|
98
|
+
const rootNodeId = this.workset.values().next().value!
|
99
|
+
const stack = [{ nodeId: rootNodeId, resolved: false, dependencies: [] as string[] }]
|
100
|
+
|
101
|
+
while (stack.length > 0) {
|
102
|
+
const stackItem = stack[stack.length - 1]
|
103
|
+
const { nodeId, resolved } = stackItem
|
104
|
+
|
105
|
+
const node = this.nodes.get(nodeId)
|
106
|
+
if (!node) {
|
107
|
+
this.logger.warn({ nodeId }, "node not found in the graph, skipping")
|
108
|
+
stack.pop()
|
109
|
+
continue
|
110
|
+
}
|
78
111
|
|
79
|
-
|
80
|
-
|
112
|
+
if (this.outputMap.has(nodeId)) {
|
113
|
+
// already processed
|
114
|
+
stack.pop()
|
115
|
+
continue
|
116
|
+
}
|
81
117
|
|
82
|
-
|
118
|
+
if (!resolved) {
|
119
|
+
stackItem.dependencies = unique(this.getNodeDependencies(node))
|
120
|
+
|
121
|
+
let hasUnresolvedDependencies = false
|
122
|
+
|
123
|
+
for (const depId of stackItem.dependencies) {
|
124
|
+
if (!this.nodes.has(depId)) {
|
125
|
+
this.logger.warn({ depId, nodeId }, "dependency not found in the graph, skipping")
|
126
|
+
continue
|
127
|
+
}
|
128
|
+
|
129
|
+
// if (stack.some(item => item.nodeId === depId)) {
|
130
|
+
// this.logger.warn(
|
131
|
+
// { depId, nodeId, stack },
|
132
|
+
// "dependency is already in the stack, looks like a circular dependency, skipping",
|
133
|
+
// )
|
134
|
+
// continue
|
135
|
+
// }
|
136
|
+
|
137
|
+
if (!this.outputMap.has(depId)) {
|
138
|
+
stack.push({ nodeId: depId, resolved: false, dependencies: [] })
|
139
|
+
hasUnresolvedDependencies = true
|
140
|
+
}
|
141
|
+
}
|
142
|
+
|
143
|
+
if (hasUnresolvedDependencies) {
|
144
|
+
// wait for dependencies to be resolved
|
145
|
+
stackItem.resolved = true
|
146
|
+
continue
|
147
|
+
}
|
148
|
+
}
|
83
149
|
|
84
|
-
|
85
|
-
|
150
|
+
// all dependencies are resolved, process the node
|
151
|
+
const output = await this.processNode(node, this.logger)
|
152
|
+
|
153
|
+
if (signal?.aborted) {
|
154
|
+
this.logger.warn({ nodeId }, "processing aborted, skipping output")
|
155
|
+
return
|
86
156
|
}
|
87
157
|
|
88
|
-
|
89
|
-
|
158
|
+
// update the dependent map
|
159
|
+
for (const depId of stackItem.dependencies) {
|
160
|
+
let dependantSet = this.dependentMap.get(depId)
|
161
|
+
if (!dependantSet) {
|
162
|
+
dependantSet = new Set()
|
163
|
+
this.dependentMap.set(depId, dependantSet)
|
164
|
+
}
|
90
165
|
|
91
|
-
|
92
|
-
if (backend.promiseCache.get(itemId) === promise) {
|
93
|
-
// persist the result to the state if provided
|
94
|
-
// ignore if the promise has been replaced (which means this calculation was invalidated)
|
95
|
-
backend.setOutput?.(itemId, result)
|
96
|
-
outputs.set(itemId, result)
|
166
|
+
dependantSet.add(nodeId)
|
97
167
|
}
|
98
168
|
|
99
|
-
|
100
|
-
|
169
|
+
this.outputMap.set(nodeId, output)
|
170
|
+
this.outputHandler?.(nodeId, output)
|
101
171
|
|
102
|
-
|
103
|
-
|
104
|
-
}
|
172
|
+
stack.pop()
|
173
|
+
}
|
105
174
|
|
106
|
-
|
175
|
+
this.workset.delete(rootNodeId)
|
176
|
+
}
|
107
177
|
}
|
108
|
-
|
109
|
-
factory.factoryName = options.name
|
110
|
-
return factory
|
111
178
|
}
|
@@ -1,10 +1,10 @@
|
|
1
|
-
import type { ComponentModel, InstanceModel } from "@highstate/contract"
|
2
1
|
import type { InstanceState } from "../state"
|
3
2
|
import type { ResolvedInstanceInput } from "./input"
|
3
|
+
import { isUnitModel, type ComponentModel, type InstanceModel } from "@highstate/contract"
|
4
4
|
import { sha256 } from "crypto-hash"
|
5
|
-
import {
|
5
|
+
import { GraphResolver } from "./graph-resolver"
|
6
6
|
|
7
|
-
export type
|
7
|
+
export type InputHashNode = {
|
8
8
|
instance: InstanceModel
|
9
9
|
component: ComponentModel
|
10
10
|
resolvedInputs: Record<string, ResolvedInstanceInput[]>
|
@@ -12,7 +12,7 @@ export type InputHashResolverInput = {
|
|
12
12
|
sourceHash: string | undefined
|
13
13
|
}
|
14
14
|
|
15
|
-
export type
|
15
|
+
export type InputHashOutput = {
|
16
16
|
inputHash: string
|
17
17
|
outputHash: string
|
18
18
|
}
|
@@ -20,14 +20,8 @@ export type InputHashResolverOutput = {
|
|
20
20
|
/**
|
21
21
|
* Resolves the hash of the instance based on its args, resolved input hashes, source hash, and the output hash.
|
22
22
|
*/
|
23
|
-
export
|
24
|
-
|
25
|
-
InputHashResolverOutput
|
26
|
-
>({
|
27
|
-
name: "input-hash-resolver",
|
28
|
-
getNodeId: node => node.instance.id,
|
29
|
-
|
30
|
-
getNodeDependencies({ resolvedInputs }) {
|
23
|
+
export class InputHashResolver extends GraphResolver<InputHashNode, InputHashOutput> {
|
24
|
+
getNodeDependencies({ resolvedInputs }: InputHashNode): string[] {
|
31
25
|
const dependencies: string[] = []
|
32
26
|
|
33
27
|
for (const inputs of Object.values(resolvedInputs ?? {})) {
|
@@ -37,13 +31,24 @@ export const createInputHashResolver = defineGraphResolver<
|
|
37
31
|
}
|
38
32
|
|
39
33
|
return dependencies
|
40
|
-
}
|
34
|
+
}
|
41
35
|
|
42
|
-
async
|
36
|
+
async processNode({
|
37
|
+
instance,
|
38
|
+
component,
|
39
|
+
resolvedInputs,
|
40
|
+
sourceHash,
|
41
|
+
state,
|
42
|
+
}: InputHashNode): Promise<InputHashOutput> {
|
43
43
|
let sink = component.definitionHash + JSON.stringify(instance.args ?? {})
|
44
44
|
|
45
45
|
if (sourceHash) {
|
46
46
|
sink += sourceHash
|
47
|
+
} else if (isUnitModel(component)) {
|
48
|
+
this.logger.warn(
|
49
|
+
{ instanceId: instance.id },
|
50
|
+
"missing source hash for unit model, this may lead to incorrect input hash",
|
51
|
+
)
|
47
52
|
}
|
48
53
|
|
49
54
|
const sortedInputs = Object.entries(resolvedInputs)
|
@@ -61,7 +66,7 @@ export const createInputHashResolver = defineGraphResolver<
|
|
61
66
|
instanceIds.sort()
|
62
67
|
|
63
68
|
for (const instanceId of instanceIds) {
|
64
|
-
const dependency =
|
69
|
+
const dependency = this.outputs.get(instanceId)
|
65
70
|
if (!dependency) continue
|
66
71
|
|
67
72
|
sink += dependency.inputHash
|
@@ -73,5 +78,5 @@ export const createInputHashResolver = defineGraphResolver<
|
|
73
78
|
inputHash: await sha256(sink),
|
74
79
|
outputHash: state?.outputHash ?? "",
|
75
80
|
}
|
76
|
-
}
|
77
|
-
}
|
81
|
+
}
|
82
|
+
}
|