@highstate/backend 0.9.33 → 0.9.35
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.js +415 -194
- package/dist/index.js.map +1 -1
- package/package.json +5 -3
- package/src/artifact/local.ts +10 -7
- package/src/business/artifact.test.ts +7 -337
- package/src/business/artifact.ts +5 -56
- package/src/business/instance-state.test.ts +6 -1
- package/src/business/instance-state.ts +16 -0
- package/src/business/unit-extra.test.ts +113 -0
- package/src/business/unit-extra.ts +40 -0
- package/src/library/local.ts +431 -141
- package/src/orchestrator/operation.ts +8 -3
- package/src/runner/abstractions.ts +7 -0
- package/src/runner/artifact-env.ts +6 -4
- package/src/runner/local.ts +15 -2
package/dist/index.js
CHANGED
|
@@ -3,21 +3,21 @@ import { codebaseConfig, stringArrayType, createProjectLogger, isAbortErrorLike,
|
|
|
3
3
|
import { __using, __callDispose } from './chunk-I7BWSAN6.js';
|
|
4
4
|
import { randomBytes, createHash } from 'node:crypto';
|
|
5
5
|
import { createId } from '@paralleldrive/cuid2';
|
|
6
|
-
import { hubModelSchema, instanceModelSchema, parseInstanceId, isUnitModel, HighstateConfigKey, unitArtifactSchema, unitWorkerSchema, unitTriggerSchema, unitPageSchema, unitTerminalSchema, instanceStatusFieldSchema, getInstanceId } from '@highstate/contract';
|
|
7
|
-
import { omit, isNonNullish, groupBy,
|
|
6
|
+
import { hubModelSchema, instanceModelSchema, parseInstanceId, isUnitModel, HighstateConfigKey, unitArtifactSchema, unitArtifactId, unitWorkerSchema, unitTriggerSchema, unitPageSchema, unitTerminalSchema, instanceStatusFieldSchema, getInstanceId } from '@highstate/contract';
|
|
7
|
+
import { omit, isNonNullish, groupBy, mapValues, unique, join as join$1, omitBy } from 'remeda';
|
|
8
8
|
import z3, { z } from 'zod';
|
|
9
9
|
import * as os from 'node:os';
|
|
10
10
|
import { tmpdir, hostname } from 'node:os';
|
|
11
11
|
import { PrismaLibSQL } from '@prisma/adapter-libsql';
|
|
12
12
|
import { generateIdentity, armor, Decrypter, Encrypter, identityToRecipient } from 'age-encryption';
|
|
13
13
|
import * as path4 from 'node:path';
|
|
14
|
-
import path4__default, { join, resolve as resolve$1 } from 'node:path';
|
|
14
|
+
import path4__default, { join, resolve as resolve$1, dirname } from 'node:path';
|
|
15
15
|
import { fileURLToPath } from 'node:url';
|
|
16
16
|
import * as runtime2 from '@prisma/client/runtime/client';
|
|
17
17
|
import { PrismaClientKnownRequestError } from '@prisma/client/runtime/client';
|
|
18
18
|
import { execa } from 'execa';
|
|
19
19
|
import { resolve } from 'import-meta-resolve';
|
|
20
|
-
import { detectPackageManager,
|
|
20
|
+
import { detectPackageManager, runScript, addDependency, ensureDependencyInstalled } from 'nypm';
|
|
21
21
|
import { findCredentialsAsync, AsyncEntry } from '@napi-rs/keyring';
|
|
22
22
|
import { mkdir, readFile, writeFile, access, unlink, readdir, rm, mkdtemp } from 'node:fs/promises';
|
|
23
23
|
import { stringify, parse } from 'yaml';
|
|
@@ -30,7 +30,9 @@ import { EventEmitter, on } from 'node:events';
|
|
|
30
30
|
import { Worker } from 'node:worker_threads';
|
|
31
31
|
import { decode, encode } from '@msgpack/msgpack';
|
|
32
32
|
import { BetterLock } from 'better-lock';
|
|
33
|
-
import {
|
|
33
|
+
import { glob } from 'glob';
|
|
34
|
+
import PQueue from 'p-queue';
|
|
35
|
+
import { readPackageJSON, resolvePackageJSON } from 'pkg-types';
|
|
34
36
|
import Watcher from 'watcher';
|
|
35
37
|
import { crc32 } from 'node:zlib';
|
|
36
38
|
import { sha256 } from '@noble/hashes/sha2';
|
|
@@ -80,8 +82,6 @@ var ApiKeyService = class {
|
|
|
80
82
|
return apiKey;
|
|
81
83
|
}
|
|
82
84
|
};
|
|
83
|
-
|
|
84
|
-
// src/business/artifact.ts
|
|
85
85
|
var artifactChunkSize = 1024 * 1024;
|
|
86
86
|
var ArtifactService = class {
|
|
87
87
|
constructor(database, artifactBackend, logger) {
|
|
@@ -104,6 +104,7 @@ var ArtifactService = class {
|
|
|
104
104
|
async store(projectId, hash, size, meta, content, track) {
|
|
105
105
|
const database = await this.database.forProject(projectId);
|
|
106
106
|
const existingArtifact = await database.artifact.findUnique({ where: { hash } });
|
|
107
|
+
const artifactId = existingArtifact?.id ?? createId();
|
|
107
108
|
if (!existingArtifact || !await this.artifactBackend.exists(projectId, hash)) {
|
|
108
109
|
if (existingArtifact) {
|
|
109
110
|
this.logger.warn(
|
|
@@ -111,12 +112,12 @@ var ArtifactService = class {
|
|
|
111
112
|
hash
|
|
112
113
|
);
|
|
113
114
|
}
|
|
114
|
-
await this.artifactBackend.store(projectId,
|
|
115
|
+
await this.artifactBackend.store(projectId, artifactId, artifactChunkSize, content);
|
|
115
116
|
}
|
|
116
117
|
return await database.$transaction(async (tx) => {
|
|
117
118
|
const artifact = await tx.artifact.upsert({
|
|
118
|
-
where: {
|
|
119
|
-
create: { hash, size, meta, chunkSize: artifactChunkSize },
|
|
119
|
+
where: { id: artifactId },
|
|
120
|
+
create: { id: artifactId, hash, size, meta, chunkSize: artifactChunkSize },
|
|
120
121
|
update: { meta }
|
|
121
122
|
});
|
|
122
123
|
await track(tx, artifact);
|
|
@@ -129,49 +130,6 @@ var ArtifactService = class {
|
|
|
129
130
|
return artifact;
|
|
130
131
|
});
|
|
131
132
|
}
|
|
132
|
-
/**
|
|
133
|
-
* Clears all artifact references for a specific instance and runs garbage collection.
|
|
134
|
-
* This removes all associations between the instance and its artifacts, then
|
|
135
|
-
* cleans up any unreferenced artifacts.
|
|
136
|
-
*
|
|
137
|
-
* @param projectId The project ID.
|
|
138
|
-
* @param instanceId The instance ID to clear artifact references for.
|
|
139
|
-
*/
|
|
140
|
-
async clearInstanceArtifactReferences(projectId, instanceId) {
|
|
141
|
-
const database = await this.database.forProject(projectId);
|
|
142
|
-
this.logger.info({ projectId, instanceId }, "clearing instance artifact references");
|
|
143
|
-
await database.$transaction(async (tx) => {
|
|
144
|
-
const artifactsConnectedToInstance = await tx.artifact.findMany({
|
|
145
|
-
where: {
|
|
146
|
-
instances: {
|
|
147
|
-
some: {
|
|
148
|
-
id: instanceId
|
|
149
|
-
}
|
|
150
|
-
}
|
|
151
|
-
},
|
|
152
|
-
select: {
|
|
153
|
-
id: true
|
|
154
|
-
}
|
|
155
|
-
});
|
|
156
|
-
await Promise.all(
|
|
157
|
-
artifactsConnectedToInstance.map(
|
|
158
|
-
(artifact) => tx.artifact.update({
|
|
159
|
-
where: { id: artifact.id },
|
|
160
|
-
data: {
|
|
161
|
-
instances: {
|
|
162
|
-
disconnect: { id: instanceId }
|
|
163
|
-
}
|
|
164
|
-
}
|
|
165
|
-
})
|
|
166
|
-
)
|
|
167
|
-
);
|
|
168
|
-
});
|
|
169
|
-
this.logger.info(
|
|
170
|
-
{ projectId, instanceId },
|
|
171
|
-
"cleared instance artifact references, running garbage collection"
|
|
172
|
-
);
|
|
173
|
-
await this.collectGarbage(projectId);
|
|
174
|
-
}
|
|
175
133
|
/**
|
|
176
134
|
* Removes artifacts with no references and cleans up backend storage.
|
|
177
135
|
*
|
|
@@ -694,8 +652,8 @@ config.compilerWasm = {
|
|
|
694
652
|
return await decodeBase64AsWasm(wasm);
|
|
695
653
|
}
|
|
696
654
|
};
|
|
697
|
-
function getPrismaClientClass(
|
|
698
|
-
config.dirname =
|
|
655
|
+
function getPrismaClientClass(dirname5) {
|
|
656
|
+
config.dirname = dirname5;
|
|
699
657
|
return runtime2.getPrismaClient(config);
|
|
700
658
|
}
|
|
701
659
|
runtime2.Public.validator;
|
|
@@ -1040,8 +998,8 @@ config2.compilerWasm = {
|
|
|
1040
998
|
return await decodeBase64AsWasm2(wasm);
|
|
1041
999
|
}
|
|
1042
1000
|
};
|
|
1043
|
-
function getPrismaClientClass2(
|
|
1044
|
-
config2.dirname =
|
|
1001
|
+
function getPrismaClientClass2(dirname5) {
|
|
1002
|
+
config2.dirname = dirname5;
|
|
1045
1003
|
return runtime2.getPrismaClient(config2);
|
|
1046
1004
|
}
|
|
1047
1005
|
runtime2.Public.validator;
|
|
@@ -1137,8 +1095,8 @@ config3.compilerWasm = {
|
|
|
1137
1095
|
return await decodeBase64AsWasm3(wasm);
|
|
1138
1096
|
}
|
|
1139
1097
|
};
|
|
1140
|
-
function getPrismaClientClass3(
|
|
1141
|
-
config3.dirname =
|
|
1098
|
+
function getPrismaClientClass3(dirname5) {
|
|
1099
|
+
config3.dirname = dirname5;
|
|
1142
1100
|
return runtime2.getPrismaClient(config3);
|
|
1143
1101
|
}
|
|
1144
1102
|
runtime2.Public.validator;
|
|
@@ -1685,6 +1643,9 @@ var InstanceStateService = class {
|
|
|
1685
1643
|
this.workerService.updateUnitRegistrations(tx, projectId, stateId, unitExtra.workers)
|
|
1686
1644
|
]);
|
|
1687
1645
|
unitExtraData = { pageIds, terminalIds, triggerIds, secretNames };
|
|
1646
|
+
if (unitExtra.artifactIds !== void 0) {
|
|
1647
|
+
await this.unitExtraService.pruneInstanceArtifacts(tx, stateId, unitExtra.artifactIds);
|
|
1648
|
+
}
|
|
1688
1649
|
}
|
|
1689
1650
|
if (instanceState) {
|
|
1690
1651
|
await tx.instanceState.update({
|
|
@@ -1694,6 +1655,9 @@ var InstanceStateService = class {
|
|
|
1694
1655
|
}
|
|
1695
1656
|
return { updatedOperationState, unitExtraData };
|
|
1696
1657
|
});
|
|
1658
|
+
if (options.unitExtra?.artifactIds !== void 0) {
|
|
1659
|
+
await this.artifactService.collectGarbage(projectId);
|
|
1660
|
+
}
|
|
1697
1661
|
const patch = {
|
|
1698
1662
|
...instanceState,
|
|
1699
1663
|
...result.unitExtraData,
|
|
@@ -3549,6 +3513,39 @@ var UnitExtraService = class {
|
|
|
3549
3513
|
});
|
|
3550
3514
|
return triggerIds;
|
|
3551
3515
|
}
|
|
3516
|
+
/**
|
|
3517
|
+
* Disconnects artifacts that are no longer referenced by the instance.
|
|
3518
|
+
*
|
|
3519
|
+
* @param tx The database transaction to use.
|
|
3520
|
+
* @param stateId The ID of the instance state.
|
|
3521
|
+
* @param artifactIds The artifact IDs that should remain connected to the instance.
|
|
3522
|
+
*/
|
|
3523
|
+
async pruneInstanceArtifacts(tx, stateId, artifactIds) {
|
|
3524
|
+
const staleArtifacts = await tx.artifact.findMany({
|
|
3525
|
+
where: {
|
|
3526
|
+
instances: {
|
|
3527
|
+
some: {
|
|
3528
|
+
id: stateId
|
|
3529
|
+
}
|
|
3530
|
+
},
|
|
3531
|
+
id: artifactIds.length > 0 ? { notIn: artifactIds } : void 0
|
|
3532
|
+
},
|
|
3533
|
+
select: {
|
|
3534
|
+
id: true
|
|
3535
|
+
}
|
|
3536
|
+
});
|
|
3537
|
+
if (staleArtifacts.length === 0) {
|
|
3538
|
+
return;
|
|
3539
|
+
}
|
|
3540
|
+
await tx.instanceState.update({
|
|
3541
|
+
where: { id: stateId },
|
|
3542
|
+
data: {
|
|
3543
|
+
artifacts: {
|
|
3544
|
+
disconnect: staleArtifacts.map((artifact) => ({ id: artifact.id }))
|
|
3545
|
+
}
|
|
3546
|
+
}
|
|
3547
|
+
});
|
|
3548
|
+
}
|
|
3552
3549
|
/**
|
|
3553
3550
|
* Gets all triggers for a specific instance.
|
|
3554
3551
|
*
|
|
@@ -3842,13 +3839,13 @@ var LocalArtifactBackend = class _LocalArtifactBackend {
|
|
|
3842
3839
|
file.end();
|
|
3843
3840
|
logger.info({ artifactId, fileName }, "artifact stored");
|
|
3844
3841
|
}
|
|
3845
|
-
async retrieve(projectId,
|
|
3842
|
+
async retrieve(projectId, artifactId, chunkSize) {
|
|
3846
3843
|
const logger = createProjectLogger(this.logger, projectId);
|
|
3847
|
-
const [, fileName] = this.getArtifactPath(projectId,
|
|
3844
|
+
const [, fileName] = this.getArtifactPath(projectId, artifactId);
|
|
3848
3845
|
try {
|
|
3849
3846
|
return Promise.resolve(createReadStream(fileName, { highWaterMark: chunkSize }));
|
|
3850
3847
|
} catch (error) {
|
|
3851
|
-
logger.debug({ hash, error }, "artifact retrieval failed");
|
|
3848
|
+
logger.debug({ hash: artifactId, error }, "artifact retrieval failed");
|
|
3852
3849
|
return null;
|
|
3853
3850
|
}
|
|
3854
3851
|
}
|
|
@@ -3876,8 +3873,8 @@ var LocalArtifactBackend = class _LocalArtifactBackend {
|
|
|
3876
3873
|
logger.error({ dirPath, error }, "failed to delete directory");
|
|
3877
3874
|
}
|
|
3878
3875
|
}
|
|
3879
|
-
async exists(projectId,
|
|
3880
|
-
const [, fileName] = this.getArtifactPath(projectId,
|
|
3876
|
+
async exists(projectId, artifactId) {
|
|
3877
|
+
const [, fileName] = this.getArtifactPath(projectId, artifactId);
|
|
3881
3878
|
try {
|
|
3882
3879
|
await access(fileName);
|
|
3883
3880
|
return true;
|
|
@@ -3885,9 +3882,9 @@ var LocalArtifactBackend = class _LocalArtifactBackend {
|
|
|
3885
3882
|
return false;
|
|
3886
3883
|
}
|
|
3887
3884
|
}
|
|
3888
|
-
getArtifactPath(projectId,
|
|
3885
|
+
getArtifactPath(projectId, artifactId) {
|
|
3889
3886
|
const baseDir = resolve$1(this.hsCodebasePath, "projects", projectId, "artifacts");
|
|
3890
|
-
const fileName = join(baseDir, `${
|
|
3887
|
+
const fileName = join(baseDir, `${artifactId}${this.extension}`);
|
|
3891
3888
|
return [baseDir, fileName];
|
|
3892
3889
|
}
|
|
3893
3890
|
static async create(config4, extension, logger) {
|
|
@@ -3916,17 +3913,27 @@ async function createArtifactBackend(config4, database, logger) {
|
|
|
3916
3913
|
return backend;
|
|
3917
3914
|
}
|
|
3918
3915
|
var localLibraryBackendConfig = z.object({
|
|
3919
|
-
|
|
3920
|
-
|
|
3916
|
+
HIGHSTATE_LIBRARY_BACKEND_LOCAL_LIBRARY_PACKAGES: stringArrayType.default(() => [
|
|
3917
|
+
"@highstate/library"
|
|
3918
|
+
]),
|
|
3919
|
+
HIGHSTATE_LIBRARY_BACKEND_LOCAL_WORKSPACE_PATH: z.string().optional(),
|
|
3920
|
+
HIGHSTATE_LIBRARY_BACKEND_LOCAL_BUILD_CONCURRENCY: z.coerce.number().int().positive().default(3),
|
|
3921
|
+
HIGHSTATE_LIBRARY_BACKEND_LOCAL_BUILD_ON_STARTUP: z.stringbool().default(true),
|
|
3922
|
+
HIGHSTATE_LIBRARY_BACKEND_LOCAL_BUILD_ON_CHANGES: z.stringbool().default(true)
|
|
3921
3923
|
});
|
|
3922
3924
|
var LocalLibraryBackend = class _LocalLibraryBackend {
|
|
3923
|
-
constructor(libraryPackages,
|
|
3925
|
+
constructor(libraryPackages, workspacePath, workspacePatterns, buildConcurrency, buildOnStartup, buildOnChanges, logger) {
|
|
3924
3926
|
this.libraryPackages = libraryPackages;
|
|
3925
3927
|
this.logger = logger;
|
|
3926
|
-
this.
|
|
3928
|
+
this.workspacePath = workspacePath;
|
|
3929
|
+
this.workspacePatterns = workspacePatterns;
|
|
3930
|
+
this.buildQueue = new PQueue({ concurrency: buildConcurrency });
|
|
3931
|
+
this.configBuildOnStartup = buildOnStartup;
|
|
3932
|
+
this.configBuildOnChanges = buildOnChanges;
|
|
3933
|
+
this.watcher = new Watcher([workspacePath], {
|
|
3927
3934
|
recursive: true,
|
|
3928
3935
|
ignoreInitial: true,
|
|
3929
|
-
ignore: /\.git|node_modules|dist|\.highstate/
|
|
3936
|
+
ignore: /\.git|node_modules|dist|\.highstate|\.nx/
|
|
3930
3937
|
});
|
|
3931
3938
|
this.watcher.on("all", (event, path5) => {
|
|
3932
3939
|
this.logger.debug({ event, path: path5 }, "library event");
|
|
@@ -3937,12 +3944,18 @@ var LocalLibraryBackend = class _LocalLibraryBackend {
|
|
|
3937
3944
|
});
|
|
3938
3945
|
}
|
|
3939
3946
|
watcher;
|
|
3947
|
+
workspacePath;
|
|
3948
|
+
workspacePatterns;
|
|
3940
3949
|
lock = new BetterLock();
|
|
3941
3950
|
eventEmitter = new EventEmitter();
|
|
3942
3951
|
library = null;
|
|
3943
3952
|
packages = /* @__PURE__ */ new Map();
|
|
3944
3953
|
resolvedUnitSources = /* @__PURE__ */ new Map();
|
|
3945
3954
|
rebuildStates = /* @__PURE__ */ new Map();
|
|
3955
|
+
ignoredPackages = /* @__PURE__ */ new Set();
|
|
3956
|
+
buildQueue;
|
|
3957
|
+
configBuildOnStartup;
|
|
3958
|
+
configBuildOnChanges;
|
|
3946
3959
|
async loadLibrary() {
|
|
3947
3960
|
return await this.lock.acquire(async () => await this.getLibrary());
|
|
3948
3961
|
}
|
|
@@ -4065,72 +4078,89 @@ var LocalLibraryBackend = class _LocalLibraryBackend {
|
|
|
4065
4078
|
await this.loadLibraryPackages(packagesToLoad, installIfNotFound);
|
|
4066
4079
|
}
|
|
4067
4080
|
}
|
|
4068
|
-
|
|
4069
|
-
if (
|
|
4081
|
+
logIgnoredPackage(identifier, message) {
|
|
4082
|
+
if (this.ignoredPackages.has(identifier)) {
|
|
4070
4083
|
return;
|
|
4071
4084
|
}
|
|
4072
|
-
|
|
4073
|
-
|
|
4074
|
-
|
|
4075
|
-
|
|
4076
|
-
|
|
4077
|
-
|
|
4078
|
-
|
|
4085
|
+
this.logger.debug({ package: identifier }, message);
|
|
4086
|
+
this.ignoredPackages.add(identifier);
|
|
4087
|
+
}
|
|
4088
|
+
isHighstateManagedPackage(packageName, packageJson) {
|
|
4089
|
+
if (!packageJson.devDependencies?.["@highstate/cli"]) {
|
|
4090
|
+
this.logIgnoredPackage(packageName, "skipping package without @highstate/cli dev dependency");
|
|
4091
|
+
return false;
|
|
4079
4092
|
}
|
|
4080
|
-
|
|
4081
|
-
|
|
4082
|
-
|
|
4083
|
-
|
|
4093
|
+
const buildScript = packageJson.scripts?.build ?? "";
|
|
4094
|
+
if (!buildScript.includes("highstate build")) {
|
|
4095
|
+
this.logIgnoredPackage(
|
|
4096
|
+
packageName,
|
|
4097
|
+
'skipping package without "highstate build" in build script'
|
|
4098
|
+
);
|
|
4099
|
+
return false;
|
|
4084
4100
|
}
|
|
4085
|
-
|
|
4101
|
+
this.ignoredPackages.delete(packageName);
|
|
4102
|
+
return true;
|
|
4086
4103
|
}
|
|
4087
|
-
async
|
|
4088
|
-
|
|
4089
|
-
|
|
4090
|
-
|
|
4091
|
-
|
|
4092
|
-
|
|
4093
|
-
|
|
4094
|
-
|
|
4095
|
-
|
|
4096
|
-
const sameScopeDependencies = dependencyPackageNames.filter(
|
|
4097
|
-
(dep) => dep.scope === parsedName.scope && dep.name !== parsedName.name
|
|
4098
|
-
);
|
|
4099
|
-
await this.ensureLibraryPackagesLoaded(sameScopeDependencies.map((dep) => dep.name));
|
|
4100
|
-
for (const dependency of sameScopeDependencies) {
|
|
4101
|
-
const dependencyPackage = this.packages.get(dependency.name);
|
|
4102
|
-
if (!dependencyPackage) {
|
|
4103
|
-
this.logger.warn(`dependency package not found for graph update: "%s"`, dependency.name);
|
|
4104
|
-
continue;
|
|
4104
|
+
async registerLibraryPackage(packageRootPath, declaredName, existingPackageJson) {
|
|
4105
|
+
let packageJson = existingPackageJson;
|
|
4106
|
+
if (!packageJson) {
|
|
4107
|
+
try {
|
|
4108
|
+
packageJson = await readPackageJSON(packageRootPath);
|
|
4109
|
+
} catch (error) {
|
|
4110
|
+
this.logger.debug({ error }, `failed to read package.json at path: "%s"`, packageRootPath);
|
|
4111
|
+
this.logIgnoredPackage(packageRootPath, "skipping package without readable package.json");
|
|
4112
|
+
return null;
|
|
4105
4113
|
}
|
|
4106
|
-
libraryPackage.dependencies.add(dependency.name);
|
|
4107
|
-
dependencyPackage.dependents.add(libraryPackage.name);
|
|
4108
4114
|
}
|
|
4109
|
-
|
|
4110
|
-
|
|
4111
|
-
|
|
4112
|
-
for (const dependent of libraryPackage.dependents) {
|
|
4113
|
-
const dependentPackage = this.packages.get(dependent);
|
|
4114
|
-
if (!dependentPackage) {
|
|
4115
|
-
this.logger.warn(`dependent package not found for rebuild: "%s"`, dependent);
|
|
4116
|
-
continue;
|
|
4117
|
-
}
|
|
4118
|
-
promises.push(this.rebuildLibraryPackage(dependentPackage, false, false, rebuiltPackages));
|
|
4115
|
+
if (!packageJson.name) {
|
|
4116
|
+
this.logIgnoredPackage(packageRootPath, "skipping package without name in package.json");
|
|
4117
|
+
return null;
|
|
4119
4118
|
}
|
|
4120
|
-
|
|
4121
|
-
|
|
4122
|
-
|
|
4123
|
-
|
|
4124
|
-
|
|
4125
|
-
|
|
4126
|
-
|
|
4127
|
-
|
|
4119
|
+
if (declaredName && declaredName !== packageJson.name) {
|
|
4120
|
+
this.logger.warn(
|
|
4121
|
+
{
|
|
4122
|
+
declaredName,
|
|
4123
|
+
packageJsonName: packageJson.name,
|
|
4124
|
+
packageRootPath
|
|
4125
|
+
},
|
|
4126
|
+
"resolved package name does not match declared name"
|
|
4127
|
+
);
|
|
4128
|
+
}
|
|
4129
|
+
const isHighstateManaged = this.isHighstateManagedPackage(packageJson.name, packageJson);
|
|
4130
|
+
let libraryPackage = this.packages.get(packageJson.name);
|
|
4131
|
+
if (libraryPackage) {
|
|
4132
|
+
libraryPackage.rootPath = packageRootPath;
|
|
4133
|
+
libraryPackage.isHighstateManaged = isHighstateManaged;
|
|
4134
|
+
} else {
|
|
4135
|
+
libraryPackage = {
|
|
4136
|
+
name: packageJson.name,
|
|
4137
|
+
rootPath: packageRootPath,
|
|
4138
|
+
isHighstateManaged
|
|
4128
4139
|
};
|
|
4140
|
+
this.packages.set(packageJson.name, libraryPackage);
|
|
4141
|
+
}
|
|
4142
|
+
this.ignoredPackages.delete(packageRootPath);
|
|
4143
|
+
if (libraryPackage.isHighstateManaged && this.library && !this.libraryPackages.includes(libraryPackage.name) && libraryPackage.name !== "@highstate/contract") {
|
|
4144
|
+
await this.reloadUnitManifest(libraryPackage);
|
|
4145
|
+
}
|
|
4146
|
+
return libraryPackage;
|
|
4147
|
+
}
|
|
4148
|
+
async rebuildLibraryPackage(libraryPackage) {
|
|
4149
|
+
if (!libraryPackage.isHighstateManaged) {
|
|
4150
|
+
this.logIgnoredPackage(
|
|
4151
|
+
libraryPackage.name,
|
|
4152
|
+
"skipping rebuild for non Highstate-managed package"
|
|
4153
|
+
);
|
|
4154
|
+
return;
|
|
4155
|
+
}
|
|
4156
|
+
const now = Date.now();
|
|
4157
|
+
this.logger.info(`rebuilding "%s"`, libraryPackage.name);
|
|
4158
|
+
await runScript("build", { cwd: libraryPackage.rootPath });
|
|
4159
|
+
const duration = Date.now() - now;
|
|
4160
|
+
this.logger.info(`built "%s" in %dms`, libraryPackage.name, duration);
|
|
4161
|
+
if (this.library && !this.libraryPackages.includes(libraryPackage.name)) {
|
|
4162
|
+
await this.reloadUnitManifest(libraryPackage);
|
|
4129
4163
|
}
|
|
4130
|
-
return {
|
|
4131
|
-
name: dependency,
|
|
4132
|
-
scope: null
|
|
4133
|
-
};
|
|
4134
4164
|
}
|
|
4135
4165
|
async readLibraryPackageManifest(libraryPackage) {
|
|
4136
4166
|
const manifestPath = resolve$1(libraryPackage.rootPath, "dist", "highstate.manifest.json");
|
|
@@ -4164,7 +4194,6 @@ var LocalLibraryBackend = class _LocalLibraryBackend {
|
|
|
4164
4194
|
async loadLibraryPackages(names, installIfNotFound = false) {
|
|
4165
4195
|
this.logger.debug("loading library packages: %s", names.join(", "));
|
|
4166
4196
|
const missingPackages = [];
|
|
4167
|
-
const packagesToUpdate = [];
|
|
4168
4197
|
const worker = this.createPackageResolutionWorker({ packageNames: names });
|
|
4169
4198
|
for await (const [event] of on(worker, "message")) {
|
|
4170
4199
|
const eventData = event;
|
|
@@ -4173,15 +4202,13 @@ var LocalLibraryBackend = class _LocalLibraryBackend {
|
|
|
4173
4202
|
}
|
|
4174
4203
|
for (const result of eventData.results) {
|
|
4175
4204
|
if (result.type === "success") {
|
|
4176
|
-
const libraryPackage =
|
|
4177
|
-
|
|
4178
|
-
|
|
4179
|
-
|
|
4180
|
-
|
|
4181
|
-
|
|
4182
|
-
|
|
4183
|
-
packagesToUpdate.push(libraryPackage);
|
|
4184
|
-
this.logger.info(`loaded library package: "%s"`, result.packageName);
|
|
4205
|
+
const libraryPackage = await this.registerLibraryPackage(
|
|
4206
|
+
result.packageRootPath,
|
|
4207
|
+
result.packageName
|
|
4208
|
+
);
|
|
4209
|
+
if (libraryPackage) {
|
|
4210
|
+
this.logger.info(`loaded library package: "%s"`, libraryPackage.name);
|
|
4211
|
+
}
|
|
4185
4212
|
} else if (result.type === "not-found") {
|
|
4186
4213
|
missingPackages.push(result.packageName);
|
|
4187
4214
|
} else {
|
|
@@ -4194,26 +4221,57 @@ var LocalLibraryBackend = class _LocalLibraryBackend {
|
|
|
4194
4221
|
}
|
|
4195
4222
|
break;
|
|
4196
4223
|
}
|
|
4197
|
-
for (const libraryPackage of packagesToUpdate) {
|
|
4198
|
-
await this.updateLibraryPackageDependencies(libraryPackage);
|
|
4199
|
-
if (!this.libraryPackages.includes(libraryPackage.name) && libraryPackage.name !== "@highstate/contract") {
|
|
4200
|
-
await this.reloadUnitManifest(libraryPackage);
|
|
4201
|
-
}
|
|
4202
|
-
}
|
|
4203
4224
|
if (installIfNotFound && missingPackages.length > 0) {
|
|
4204
4225
|
this.logger.info("installing missing library packages: %s", missingPackages.join(", "));
|
|
4205
4226
|
await addDependency(missingPackages);
|
|
4206
4227
|
await this.loadLibraryPackages(missingPackages);
|
|
4207
4228
|
}
|
|
4208
4229
|
}
|
|
4209
|
-
handleFileEvent(path5) {
|
|
4210
|
-
|
|
4211
|
-
|
|
4212
|
-
|
|
4230
|
+
async handleFileEvent(path5) {
|
|
4231
|
+
try {
|
|
4232
|
+
const libraryPackage = await this.resolveLibraryPackageForPath(path5);
|
|
4233
|
+
if (!libraryPackage) {
|
|
4234
|
+
return;
|
|
4235
|
+
}
|
|
4236
|
+
if (!libraryPackage.isHighstateManaged) {
|
|
4237
|
+
this.logIgnoredPackage(
|
|
4238
|
+
libraryPackage.name,
|
|
4239
|
+
"skipping file event for non Highstate-managed package"
|
|
4240
|
+
);
|
|
4241
|
+
return;
|
|
4242
|
+
}
|
|
4243
|
+
if (!this.configBuildOnChanges) {
|
|
4244
|
+
return;
|
|
4245
|
+
}
|
|
4246
|
+
this.schedulePackageRebuild(libraryPackage.name);
|
|
4247
|
+
} catch (error) {
|
|
4248
|
+
this.logger.error({ error, path: path5 }, "failed to schedule library package rebuild");
|
|
4249
|
+
}
|
|
4250
|
+
}
|
|
4251
|
+
async resolveLibraryPackageForPath(path5) {
|
|
4252
|
+
const existingPackage = Array.from(this.packages.values()).find(
|
|
4253
|
+
(pkg) => path5.startsWith(pkg.rootPath)
|
|
4254
|
+
);
|
|
4255
|
+
if (existingPackage) {
|
|
4256
|
+
return existingPackage;
|
|
4257
|
+
}
|
|
4258
|
+
let packageJsonPath;
|
|
4259
|
+
try {
|
|
4260
|
+
packageJsonPath = await resolvePackageJSON(path5);
|
|
4261
|
+
} catch (error) {
|
|
4262
|
+
this.logger.debug({ error }, `failed to resolve package.json for path: "%s"`, path5);
|
|
4263
|
+
return null;
|
|
4264
|
+
}
|
|
4265
|
+
const packageRootPath = dirname(packageJsonPath);
|
|
4266
|
+
const alreadyLoaded = Array.from(this.packages.values()).find(
|
|
4267
|
+
(pkg) => pkg.rootPath === packageRootPath
|
|
4268
|
+
);
|
|
4269
|
+
if (alreadyLoaded) {
|
|
4270
|
+
return alreadyLoaded;
|
|
4213
4271
|
}
|
|
4214
|
-
|
|
4272
|
+
return await this.registerLibraryPackage(packageRootPath);
|
|
4215
4273
|
}
|
|
4216
|
-
|
|
4274
|
+
schedulePackageRebuild(packageName) {
|
|
4217
4275
|
const state = this.rebuildStates.get(packageName) ?? { inProgress: false, pending: false };
|
|
4218
4276
|
if (state.inProgress && state.pending) {
|
|
4219
4277
|
this.logger.debug(`rebuild in progress and already pending for "%s", discarding`, packageName);
|
|
@@ -4225,36 +4283,178 @@ var LocalLibraryBackend = class _LocalLibraryBackend {
|
|
|
4225
4283
|
this.logger.debug(`rebuild in progress, scheduling pending rebuild for "%s"`, packageName);
|
|
4226
4284
|
return;
|
|
4227
4285
|
}
|
|
4228
|
-
|
|
4229
|
-
|
|
4230
|
-
|
|
4231
|
-
const state = this.rebuildStates.get(packageName) ?? { inProgress: false, pending: false };
|
|
4232
|
-
do {
|
|
4233
|
-
state.inProgress = true;
|
|
4234
|
-
state.pending = false;
|
|
4235
|
-
this.rebuildStates.set(packageName, state);
|
|
4286
|
+
state.inProgress = true;
|
|
4287
|
+
this.rebuildStates.set(packageName, state);
|
|
4288
|
+
void this.buildQueue.add(async () => {
|
|
4236
4289
|
try {
|
|
4237
|
-
await this.
|
|
4238
|
-
const libraryPackage = this.packages.get(packageName);
|
|
4239
|
-
if (!libraryPackage) {
|
|
4240
|
-
this.logger.warn(`package not found for rebuild: "%s"`, packageName);
|
|
4241
|
-
return;
|
|
4242
|
-
}
|
|
4243
|
-
const builtPackages = /* @__PURE__ */ new Set();
|
|
4244
|
-
await this.rebuildLibraryPackage(libraryPackage, false, false, builtPackages);
|
|
4245
|
-
if (this.libraryPackages.some((pkg) => builtPackages.has(pkg))) {
|
|
4246
|
-
this.logger.info(
|
|
4247
|
-
"reloading library due to file change in package: %s",
|
|
4248
|
-
libraryPackage.name
|
|
4249
|
-
);
|
|
4250
|
-
await this.reloadLibrary();
|
|
4251
|
-
}
|
|
4252
|
-
});
|
|
4290
|
+
await this.executePackageRebuild(packageName);
|
|
4253
4291
|
} finally {
|
|
4254
4292
|
state.inProgress = false;
|
|
4293
|
+
if (state.pending) {
|
|
4294
|
+
state.pending = false;
|
|
4295
|
+
this.schedulePackageRebuild(packageName);
|
|
4296
|
+
} else {
|
|
4297
|
+
this.rebuildStates.delete(packageName);
|
|
4298
|
+
}
|
|
4299
|
+
}
|
|
4300
|
+
});
|
|
4301
|
+
}
|
|
4302
|
+
async executePackageRebuild(packageName) {
|
|
4303
|
+
const libraryPackage = this.packages.get(packageName);
|
|
4304
|
+
if (!libraryPackage) {
|
|
4305
|
+
this.logger.warn(`package not found for rebuild: "%s"`, packageName);
|
|
4306
|
+
return;
|
|
4307
|
+
}
|
|
4308
|
+
await this.rebuildLibraryPackage(libraryPackage);
|
|
4309
|
+
if (this.libraryPackages.includes(libraryPackage.name)) {
|
|
4310
|
+
this.logger.info("reloading library due to file change in package: %s", libraryPackage.name);
|
|
4311
|
+
await this.reloadLibrary();
|
|
4312
|
+
}
|
|
4313
|
+
}
|
|
4314
|
+
async initialize() {
|
|
4315
|
+
const workspacePackages = await this.collectWorkspacePackages();
|
|
4316
|
+
if (workspacePackages.length === 0) {
|
|
4317
|
+
return;
|
|
4318
|
+
}
|
|
4319
|
+
const managedPackageNames = /* @__PURE__ */ new Set();
|
|
4320
|
+
await this.lock.acquire(async () => {
|
|
4321
|
+
for (const workspacePackage of workspacePackages) {
|
|
4322
|
+
const registered = await this.registerLibraryPackage(
|
|
4323
|
+
workspacePackage.rootPath,
|
|
4324
|
+
workspacePackage.packageJson.name,
|
|
4325
|
+
workspacePackage.packageJson
|
|
4326
|
+
);
|
|
4327
|
+
if (registered?.isHighstateManaged) {
|
|
4328
|
+
managedPackageNames.add(registered.name);
|
|
4329
|
+
}
|
|
4330
|
+
}
|
|
4331
|
+
});
|
|
4332
|
+
if (!this.configBuildOnStartup) {
|
|
4333
|
+
return;
|
|
4334
|
+
}
|
|
4335
|
+
for (const packageName of managedPackageNames) {
|
|
4336
|
+
this.schedulePackageRebuild(packageName);
|
|
4337
|
+
}
|
|
4338
|
+
}
|
|
4339
|
+
async collectWorkspacePackages() {
|
|
4340
|
+
if (this.workspacePatterns.length === 0) {
|
|
4341
|
+
this.logger.warn(
|
|
4342
|
+
{ workspaceRoot: this.workspacePath },
|
|
4343
|
+
"workspace root does not define workspaces; skipping startup builds"
|
|
4344
|
+
);
|
|
4345
|
+
return [];
|
|
4346
|
+
}
|
|
4347
|
+
return await this.expandWorkspacePatterns(this.workspacePath, this.workspacePatterns);
|
|
4348
|
+
}
|
|
4349
|
+
async expandWorkspacePatterns(rootPath, patterns) {
|
|
4350
|
+
const includePatterns = patterns.filter((pattern) => !pattern.startsWith("!"));
|
|
4351
|
+
const excludePatterns = patterns.filter((pattern) => pattern.startsWith("!")).map((pattern) => pattern.slice(1));
|
|
4352
|
+
if (includePatterns.length === 0) {
|
|
4353
|
+
return [];
|
|
4354
|
+
}
|
|
4355
|
+
let matches = [];
|
|
4356
|
+
try {
|
|
4357
|
+
matches = await glob(includePatterns, {
|
|
4358
|
+
cwd: rootPath,
|
|
4359
|
+
absolute: true,
|
|
4360
|
+
dot: false,
|
|
4361
|
+
ignore: excludePatterns
|
|
4362
|
+
});
|
|
4363
|
+
} catch (error) {
|
|
4364
|
+
this.logger.error({ error, rootPath }, "failed to expand workspace patterns");
|
|
4365
|
+
return [];
|
|
4366
|
+
}
|
|
4367
|
+
const results = [];
|
|
4368
|
+
const seen = /* @__PURE__ */ new Set();
|
|
4369
|
+
for (const match of matches) {
|
|
4370
|
+
const packageDir = match.endsWith("package.json") ? dirname(match) : match;
|
|
4371
|
+
if (seen.has(packageDir)) {
|
|
4372
|
+
continue;
|
|
4373
|
+
}
|
|
4374
|
+
const packageJson = await this.tryReadPackageJson(packageDir);
|
|
4375
|
+
if (!packageJson) {
|
|
4376
|
+
continue;
|
|
4377
|
+
}
|
|
4378
|
+
seen.add(packageDir);
|
|
4379
|
+
results.push({ rootPath: packageDir, packageJson });
|
|
4380
|
+
}
|
|
4381
|
+
return results;
|
|
4382
|
+
}
|
|
4383
|
+
async tryReadPackageJson(directory) {
|
|
4384
|
+
try {
|
|
4385
|
+
return await readPackageJSON(directory);
|
|
4386
|
+
} catch (error) {
|
|
4387
|
+
this.logger.trace(
|
|
4388
|
+
{ error, directory },
|
|
4389
|
+
"failed to read package.json while resolving workspaces"
|
|
4390
|
+
);
|
|
4391
|
+
return null;
|
|
4392
|
+
}
|
|
4393
|
+
}
|
|
4394
|
+
static extractWorkspacePatterns(packageJson) {
|
|
4395
|
+
const workspaces = packageJson.workspaces;
|
|
4396
|
+
if (!workspaces) {
|
|
4397
|
+
return [];
|
|
4398
|
+
}
|
|
4399
|
+
if (Array.isArray(workspaces)) {
|
|
4400
|
+
return workspaces;
|
|
4401
|
+
}
|
|
4402
|
+
if (typeof workspaces === "object" && Array.isArray(workspaces.packages)) {
|
|
4403
|
+
return workspaces.packages;
|
|
4404
|
+
}
|
|
4405
|
+
return [];
|
|
4406
|
+
}
|
|
4407
|
+
static async resolveWorkspaceConfig(explicitWorkspace, projectRoot, logger) {
|
|
4408
|
+
if (explicitWorkspace) {
|
|
4409
|
+
const workspaceRoot = resolve$1(explicitWorkspace);
|
|
4410
|
+
try {
|
|
4411
|
+
const packageJson = await readPackageJSON(workspaceRoot);
|
|
4412
|
+
const patterns = _LocalLibraryBackend.extractWorkspacePatterns(packageJson);
|
|
4413
|
+
if (patterns.length === 0) {
|
|
4414
|
+
logger.warn(
|
|
4415
|
+
{ workspaceRoot },
|
|
4416
|
+
"configured workspace does not define workspaces; skipping startup builds"
|
|
4417
|
+
);
|
|
4418
|
+
}
|
|
4419
|
+
return { root: workspaceRoot, patterns };
|
|
4420
|
+
} catch (error) {
|
|
4421
|
+
logger.warn(
|
|
4422
|
+
{ error, workspaceRoot },
|
|
4423
|
+
"failed to read configured workspace package.json; skipping startup builds"
|
|
4424
|
+
);
|
|
4425
|
+
return { root: workspaceRoot, patterns: [] };
|
|
4426
|
+
}
|
|
4427
|
+
}
|
|
4428
|
+
const detected = await _LocalLibraryBackend.findWorkspaceRootForPath(projectRoot);
|
|
4429
|
+
if (detected) {
|
|
4430
|
+
return detected;
|
|
4431
|
+
}
|
|
4432
|
+
logger.warn(
|
|
4433
|
+
{ projectRoot },
|
|
4434
|
+
'no workspace root with "workspaces" field detected; skipping startup builds'
|
|
4435
|
+
);
|
|
4436
|
+
return { root: projectRoot, patterns: [] };
|
|
4437
|
+
}
|
|
4438
|
+
static async findWorkspaceRootForPath(startPath) {
|
|
4439
|
+
let current = startPath;
|
|
4440
|
+
const visited = /* @__PURE__ */ new Set();
|
|
4441
|
+
while (!visited.has(current)) {
|
|
4442
|
+
visited.add(current);
|
|
4443
|
+
try {
|
|
4444
|
+
const packageJson = await readPackageJSON(current);
|
|
4445
|
+
const patterns = _LocalLibraryBackend.extractWorkspacePatterns(packageJson);
|
|
4446
|
+
if (patterns.length > 0) {
|
|
4447
|
+
return { root: current, patterns };
|
|
4448
|
+
}
|
|
4449
|
+
} catch {
|
|
4255
4450
|
}
|
|
4256
|
-
|
|
4257
|
-
|
|
4451
|
+
const parent = dirname(current);
|
|
4452
|
+
if (parent === current) {
|
|
4453
|
+
break;
|
|
4454
|
+
}
|
|
4455
|
+
current = parent;
|
|
4456
|
+
}
|
|
4457
|
+
return null;
|
|
4258
4458
|
}
|
|
4259
4459
|
createLibraryWorker(workerData) {
|
|
4260
4460
|
const workerPathUrl = resolve(`@highstate/backend/library-worker`, import.meta.url);
|
|
@@ -4270,16 +4470,23 @@ var LocalLibraryBackend = class _LocalLibraryBackend {
|
|
|
4270
4470
|
return new Worker(workerPath, { workerData });
|
|
4271
4471
|
}
|
|
4272
4472
|
static async create(config4, logger) {
|
|
4273
|
-
|
|
4274
|
-
|
|
4275
|
-
|
|
4276
|
-
|
|
4277
|
-
|
|
4278
|
-
|
|
4279
|
-
|
|
4280
|
-
|
|
4473
|
+
const [projectPath] = await resolveMainLocalProject();
|
|
4474
|
+
const workspaceConfig = await _LocalLibraryBackend.resolveWorkspaceConfig(
|
|
4475
|
+
config4.HIGHSTATE_LIBRARY_BACKEND_LOCAL_WORKSPACE_PATH,
|
|
4476
|
+
projectPath,
|
|
4477
|
+
logger
|
|
4478
|
+
);
|
|
4479
|
+
const backend = new _LocalLibraryBackend(
|
|
4480
|
+
config4.HIGHSTATE_LIBRARY_BACKEND_LOCAL_LIBRARY_PACKAGES,
|
|
4481
|
+
workspaceConfig.root,
|
|
4482
|
+
workspaceConfig.patterns,
|
|
4483
|
+
config4.HIGHSTATE_LIBRARY_BACKEND_LOCAL_BUILD_CONCURRENCY,
|
|
4484
|
+
config4.HIGHSTATE_LIBRARY_BACKEND_LOCAL_BUILD_ON_STARTUP,
|
|
4485
|
+
config4.HIGHSTATE_LIBRARY_BACKEND_LOCAL_BUILD_ON_CHANGES,
|
|
4281
4486
|
logger.child({ backend: "LibraryBackend", service: "LocalLibraryBackend" })
|
|
4282
4487
|
);
|
|
4488
|
+
void backend.initialize().catch((error) => logger.error({ error }, "failed to initialize LocalLibraryBackend"));
|
|
4489
|
+
return backend;
|
|
4283
4490
|
}
|
|
4284
4491
|
};
|
|
4285
4492
|
|
|
@@ -4456,13 +4663,13 @@ async function setupArtifactEnvironment(projectId, artifacts, artifactBackend, t
|
|
|
4456
4663
|
await Promise.all(
|
|
4457
4664
|
artifacts.map(async (artifact) => {
|
|
4458
4665
|
const artifactPath = join(readDir, `${artifact.hash}.tgz`);
|
|
4459
|
-
const stream = await artifactBackend.retrieve(projectId, artifact.
|
|
4666
|
+
const stream = await artifactBackend.retrieve(projectId, artifact.id, artifact.chunkSize);
|
|
4460
4667
|
if (!stream) {
|
|
4461
4668
|
throw new Error(`artifact not found: ${artifact.hash}`);
|
|
4462
4669
|
}
|
|
4463
4670
|
const writeStream = createWriteStream(artifactPath);
|
|
4464
4671
|
await pipeline(stream, writeStream);
|
|
4465
|
-
logger.debug({ msg: "downloaded artifact",
|
|
4672
|
+
logger.debug({ msg: "downloaded artifact", id: artifact.id, path: artifactPath });
|
|
4466
4673
|
})
|
|
4467
4674
|
);
|
|
4468
4675
|
} else {
|
|
@@ -4511,23 +4718,24 @@ async function collectAndStoreArtifacts(writePath, projectId, stateId, artifactM
|
|
|
4511
4718
|
return;
|
|
4512
4719
|
}
|
|
4513
4720
|
const readStream = __using(_stack, createReadStream(filePath), true);
|
|
4514
|
-
await artifactManager.store(
|
|
4721
|
+
const storedArtifact = await artifactManager.store(
|
|
4515
4722
|
projectId,
|
|
4516
4723
|
expectedHash,
|
|
4517
4724
|
fileSize,
|
|
4518
4725
|
artifact.meta ?? { title: `Artifact for "${stateId}"` },
|
|
4519
4726
|
readStream,
|
|
4520
|
-
async (tx,
|
|
4727
|
+
async (tx, storedArtifact2) => {
|
|
4521
4728
|
await tx.instanceState.update({
|
|
4522
4729
|
where: { id: stateId },
|
|
4523
4730
|
data: {
|
|
4524
4731
|
artifacts: {
|
|
4525
|
-
connect: { id:
|
|
4732
|
+
connect: { id: storedArtifact2.id }
|
|
4526
4733
|
}
|
|
4527
4734
|
}
|
|
4528
4735
|
});
|
|
4529
4736
|
}
|
|
4530
4737
|
);
|
|
4738
|
+
artifact[unitArtifactId] = storedArtifact.id;
|
|
4531
4739
|
logger.debug({ msg: "stored artifact", hash: expectedHash, file: filePath });
|
|
4532
4740
|
} catch (_) {
|
|
4533
4741
|
var _error = _, _hasError = true;
|
|
@@ -4826,7 +5034,7 @@ var LocalRunnerBackend = class _LocalRunnerBackend {
|
|
|
4826
5034
|
let unitTempPath = null;
|
|
4827
5035
|
let artifactEnv = null;
|
|
4828
5036
|
try {
|
|
4829
|
-
unitTempPath = await mkdtemp(join(tmpdir(), `highstate-unit-${
|
|
5037
|
+
unitTempPath = await mkdtemp(join(tmpdir(), `highstate-unit-${options.stateId}-`));
|
|
4830
5038
|
childLogger.debug({ msg: "created unit temp directory", unitTempPath });
|
|
4831
5039
|
options.signal?.throwIfAborted();
|
|
4832
5040
|
artifactEnv = await setupArtifactEnvironment(
|
|
@@ -4915,6 +5123,16 @@ var LocalRunnerBackend = class _LocalRunnerBackend {
|
|
|
4915
5123
|
Object.values(artifacts).flat(),
|
|
4916
5124
|
childLogger
|
|
4917
5125
|
);
|
|
5126
|
+
completionUpdate.exportedArtifactIds = mapValues(artifacts, (artifacts2) => {
|
|
5127
|
+
return artifacts2.map((artifact) => {
|
|
5128
|
+
if (artifact[unitArtifactId]) {
|
|
5129
|
+
return artifact[unitArtifactId];
|
|
5130
|
+
}
|
|
5131
|
+
throw new Error(
|
|
5132
|
+
`Failed to determine artifact ID for artifact with hash ${artifact.hash}`
|
|
5133
|
+
);
|
|
5134
|
+
});
|
|
5135
|
+
});
|
|
4918
5136
|
} else if (preview && outputs["$artifacts"]) {
|
|
4919
5137
|
childLogger.debug({ msg: "skipping artifact persistence for preview" });
|
|
4920
5138
|
}
|
|
@@ -6205,13 +6423,13 @@ var OperationContext = class _OperationContext {
|
|
|
6205
6423
|
* @returns A map of instance IDs to state IDs.
|
|
6206
6424
|
*/
|
|
6207
6425
|
getInstanceIdToStateIdMap(instanceId) {
|
|
6208
|
-
const
|
|
6426
|
+
const map = {};
|
|
6209
6427
|
const dependencies = this.getDependencies(instanceId).map((i) => i.id);
|
|
6210
6428
|
for (const dep of dependencies) {
|
|
6211
6429
|
const state = this.getState(dep);
|
|
6212
|
-
|
|
6430
|
+
map[dep] = state.id;
|
|
6213
6431
|
}
|
|
6214
|
-
return
|
|
6432
|
+
return map;
|
|
6215
6433
|
}
|
|
6216
6434
|
getUnfinishedOperationStates() {
|
|
6217
6435
|
const unfinishedStates = [];
|
|
@@ -7431,7 +7649,6 @@ ${errors.join("\n")}`
|
|
|
7431
7649
|
});
|
|
7432
7650
|
logger.debug("destroy request sent");
|
|
7433
7651
|
await this.watchStateStream(state, type, name, logger);
|
|
7434
|
-
await this.artifactService.clearInstanceArtifactReferences(this.project.id, instance.id);
|
|
7435
7652
|
logger.info("unit destroyed");
|
|
7436
7653
|
});
|
|
7437
7654
|
}
|
|
@@ -7557,12 +7774,14 @@ ${errors.join("\n")}`
|
|
|
7557
7774
|
status: this.workset.getNextStableInstanceStatus(instance.id),
|
|
7558
7775
|
statusFields: update.statusFields ?? null
|
|
7559
7776
|
};
|
|
7777
|
+
const artifactIds = update.exportedArtifactIds ? Object.values(update.exportedArtifactIds).flat() : [];
|
|
7560
7778
|
if (update.operationType !== "destroy") {
|
|
7561
7779
|
state.outputHash = update.outputHash ?? null;
|
|
7562
7780
|
const { inputHash, dependencyOutputHash } = await this.context.getUpToDateInputHashOutput(instance);
|
|
7563
7781
|
data.inputHash = inputHash;
|
|
7564
7782
|
data.dependencyOutputHash = dependencyOutputHash;
|
|
7565
7783
|
data.outputHash = update.outputHash;
|
|
7784
|
+
data.exportedArtifactIds = update.exportedArtifactIds;
|
|
7566
7785
|
if (instance.parentId) {
|
|
7567
7786
|
const parentState = this.context.getState(instance.parentId);
|
|
7568
7787
|
data.parentId = parentState.id;
|
|
@@ -7576,6 +7795,7 @@ ${errors.join("\n")}`
|
|
|
7576
7795
|
data.parentId = null;
|
|
7577
7796
|
data.model = null;
|
|
7578
7797
|
data.resolvedInputs = null;
|
|
7798
|
+
data.exportedArtifactIds = null;
|
|
7579
7799
|
}
|
|
7580
7800
|
await this.workset.updateState(instance.id, {
|
|
7581
7801
|
// TODO: honestly, it is not correct
|
|
@@ -7593,7 +7813,8 @@ ${errors.join("\n")}`
|
|
|
7593
7813
|
terminals: update.terminals ?? [],
|
|
7594
7814
|
triggers: update.triggers ?? [],
|
|
7595
7815
|
workers: update.workers ?? [],
|
|
7596
|
-
secrets: update.secrets ?? {}
|
|
7816
|
+
secrets: update.secrets ?? {},
|
|
7817
|
+
artifactIds
|
|
7597
7818
|
} : void 0
|
|
7598
7819
|
});
|
|
7599
7820
|
}
|