vector-framework 1.2.1 → 1.2.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +18 -6
- package/dist/auth/protected.d.ts +4 -4
- package/dist/auth/protected.d.ts.map +1 -1
- package/dist/auth/protected.js +10 -7
- package/dist/auth/protected.js.map +1 -1
- package/dist/cache/manager.d.ts +2 -0
- package/dist/cache/manager.d.ts.map +1 -1
- package/dist/cache/manager.js +21 -4
- package/dist/cache/manager.js.map +1 -1
- package/dist/checkpoint/artifacts/compressor.d.ts +5 -0
- package/dist/checkpoint/artifacts/compressor.d.ts.map +1 -0
- package/dist/checkpoint/artifacts/compressor.js +24 -0
- package/dist/checkpoint/artifacts/compressor.js.map +1 -0
- package/dist/checkpoint/artifacts/decompress-worker.d.ts +2 -0
- package/dist/checkpoint/artifacts/decompress-worker.d.ts.map +1 -0
- package/dist/checkpoint/artifacts/decompress-worker.js +31 -0
- package/dist/checkpoint/artifacts/decompress-worker.js.map +1 -0
- package/dist/checkpoint/artifacts/hasher.d.ts +2 -0
- package/dist/checkpoint/artifacts/hasher.d.ts.map +1 -0
- package/dist/checkpoint/artifacts/hasher.js +7 -0
- package/dist/checkpoint/artifacts/hasher.js.map +1 -0
- package/dist/checkpoint/artifacts/manifest.d.ts +6 -0
- package/dist/checkpoint/artifacts/manifest.d.ts.map +1 -0
- package/dist/checkpoint/artifacts/manifest.js +55 -0
- package/dist/checkpoint/artifacts/manifest.js.map +1 -0
- package/dist/checkpoint/artifacts/materializer.d.ts +16 -0
- package/dist/checkpoint/artifacts/materializer.d.ts.map +1 -0
- package/dist/checkpoint/artifacts/materializer.js +168 -0
- package/dist/checkpoint/artifacts/materializer.js.map +1 -0
- package/dist/checkpoint/artifacts/packager.d.ts +12 -0
- package/dist/checkpoint/artifacts/packager.d.ts.map +1 -0
- package/dist/checkpoint/artifacts/packager.js +82 -0
- package/dist/checkpoint/artifacts/packager.js.map +1 -0
- package/dist/checkpoint/artifacts/repository.d.ts +11 -0
- package/dist/checkpoint/artifacts/repository.d.ts.map +1 -0
- package/dist/checkpoint/artifacts/repository.js +29 -0
- package/dist/checkpoint/artifacts/repository.js.map +1 -0
- package/dist/checkpoint/artifacts/store.d.ts +13 -0
- package/dist/checkpoint/artifacts/store.d.ts.map +1 -0
- package/dist/checkpoint/artifacts/store.js +85 -0
- package/dist/checkpoint/artifacts/store.js.map +1 -0
- package/dist/checkpoint/artifacts/types.d.ts +21 -0
- package/dist/checkpoint/artifacts/types.d.ts.map +1 -0
- package/dist/checkpoint/artifacts/types.js +2 -0
- package/dist/checkpoint/artifacts/types.js.map +1 -0
- package/dist/checkpoint/artifacts/worker-decompressor.d.ts +17 -0
- package/dist/checkpoint/artifacts/worker-decompressor.d.ts.map +1 -0
- package/dist/checkpoint/artifacts/worker-decompressor.js +148 -0
- package/dist/checkpoint/artifacts/worker-decompressor.js.map +1 -0
- package/dist/checkpoint/asset-store.d.ts +10 -0
- package/dist/checkpoint/asset-store.d.ts.map +1 -0
- package/dist/checkpoint/asset-store.js +46 -0
- package/dist/checkpoint/asset-store.js.map +1 -0
- package/dist/checkpoint/bundler.d.ts +15 -0
- package/dist/checkpoint/bundler.d.ts.map +1 -0
- package/dist/checkpoint/bundler.js +45 -0
- package/dist/checkpoint/bundler.js.map +1 -0
- package/dist/checkpoint/cli.d.ts +2 -0
- package/dist/checkpoint/cli.d.ts.map +1 -0
- package/dist/checkpoint/cli.js +157 -0
- package/dist/checkpoint/cli.js.map +1 -0
- package/dist/checkpoint/entrypoint-generator.d.ts +17 -0
- package/dist/checkpoint/entrypoint-generator.d.ts.map +1 -0
- package/dist/checkpoint/entrypoint-generator.js +251 -0
- package/dist/checkpoint/entrypoint-generator.js.map +1 -0
- package/dist/checkpoint/forwarder.d.ts +6 -0
- package/dist/checkpoint/forwarder.d.ts.map +1 -0
- package/dist/checkpoint/forwarder.js +74 -0
- package/dist/checkpoint/forwarder.js.map +1 -0
- package/dist/checkpoint/gateway.d.ts +11 -0
- package/dist/checkpoint/gateway.d.ts.map +1 -0
- package/dist/checkpoint/gateway.js +30 -0
- package/dist/checkpoint/gateway.js.map +1 -0
- package/dist/checkpoint/ipc.d.ts +12 -0
- package/dist/checkpoint/ipc.d.ts.map +1 -0
- package/dist/checkpoint/ipc.js +96 -0
- package/dist/checkpoint/ipc.js.map +1 -0
- package/dist/checkpoint/manager.d.ts +20 -0
- package/dist/checkpoint/manager.d.ts.map +1 -0
- package/dist/checkpoint/manager.js +214 -0
- package/dist/checkpoint/manager.js.map +1 -0
- package/dist/checkpoint/process-manager.d.ts +35 -0
- package/dist/checkpoint/process-manager.d.ts.map +1 -0
- package/dist/checkpoint/process-manager.js +203 -0
- package/dist/checkpoint/process-manager.js.map +1 -0
- package/dist/checkpoint/resolver.d.ts +25 -0
- package/dist/checkpoint/resolver.d.ts.map +1 -0
- package/dist/checkpoint/resolver.js +95 -0
- package/dist/checkpoint/resolver.js.map +1 -0
- package/dist/checkpoint/socket-path.d.ts +2 -0
- package/dist/checkpoint/socket-path.d.ts.map +1 -0
- package/dist/checkpoint/socket-path.js +51 -0
- package/dist/checkpoint/socket-path.js.map +1 -0
- package/dist/checkpoint/types.d.ts +54 -0
- package/dist/checkpoint/types.d.ts.map +1 -0
- package/dist/checkpoint/types.js +2 -0
- package/dist/checkpoint/types.js.map +1 -0
- package/dist/cli/index.js +10 -2
- package/dist/cli/index.js.map +1 -1
- package/dist/cli/option-resolution.d.ts +1 -1
- package/dist/cli/option-resolution.d.ts.map +1 -1
- package/dist/cli/option-resolution.js.map +1 -1
- package/dist/cli.js +3817 -350
- package/dist/core/config-loader.d.ts +1 -0
- package/dist/core/config-loader.d.ts.map +1 -1
- package/dist/core/config-loader.js +10 -2
- package/dist/core/config-loader.js.map +1 -1
- package/dist/core/router.d.ts +24 -3
- package/dist/core/router.d.ts.map +1 -1
- package/dist/core/router.js +398 -249
- package/dist/core/router.js.map +1 -1
- package/dist/core/server.d.ts +3 -0
- package/dist/core/server.d.ts.map +1 -1
- package/dist/core/server.js +35 -10
- package/dist/core/server.js.map +1 -1
- package/dist/core/vector.d.ts +3 -0
- package/dist/core/vector.d.ts.map +1 -1
- package/dist/core/vector.js +51 -1
- package/dist/core/vector.js.map +1 -1
- package/dist/dev/route-scanner.d.ts.map +1 -1
- package/dist/dev/route-scanner.js +2 -1
- package/dist/dev/route-scanner.js.map +1 -1
- package/dist/errors/index.cjs +2 -0
- package/dist/http.d.ts +32 -7
- package/dist/http.d.ts.map +1 -1
- package/dist/http.js +144 -13
- package/dist/http.js.map +1 -1
- package/dist/index.cjs +2657 -0
- package/dist/index.d.ts +3 -2
- package/dist/index.d.ts.map +1 -1
- package/dist/index.js +12 -1433
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +1301 -77
- package/dist/middleware/manager.d.ts +3 -3
- package/dist/middleware/manager.d.ts.map +1 -1
- package/dist/middleware/manager.js +9 -8
- package/dist/middleware/manager.js.map +1 -1
- package/dist/openapi/docs-ui.d.ts.map +1 -1
- package/dist/openapi/docs-ui.js +1097 -61
- package/dist/openapi/docs-ui.js.map +1 -1
- package/dist/openapi/generator.d.ts +2 -1
- package/dist/openapi/generator.d.ts.map +1 -1
- package/dist/openapi/generator.js +332 -16
- package/dist/openapi/generator.js.map +1 -1
- package/dist/types/index.d.ts +71 -28
- package/dist/types/index.d.ts.map +1 -1
- package/dist/types/index.js +24 -1
- package/dist/types/index.js.map +1 -1
- package/dist/utils/validation.d.ts.map +1 -1
- package/dist/utils/validation.js +3 -2
- package/dist/utils/validation.js.map +1 -1
- package/package.json +9 -14
- package/src/auth/protected.ts +11 -8
- package/src/cache/manager.ts +23 -4
- package/src/checkpoint/artifacts/compressor.ts +30 -0
- package/src/checkpoint/artifacts/decompress-worker.ts +49 -0
- package/src/checkpoint/artifacts/hasher.ts +6 -0
- package/src/checkpoint/artifacts/manifest.ts +72 -0
- package/src/checkpoint/artifacts/materializer.ts +211 -0
- package/src/checkpoint/artifacts/packager.ts +100 -0
- package/src/checkpoint/artifacts/repository.ts +36 -0
- package/src/checkpoint/artifacts/store.ts +102 -0
- package/src/checkpoint/artifacts/types.ts +24 -0
- package/src/checkpoint/artifacts/worker-decompressor.ts +192 -0
- package/src/checkpoint/asset-store.ts +61 -0
- package/src/checkpoint/bundler.ts +64 -0
- package/src/checkpoint/cli.ts +177 -0
- package/src/checkpoint/entrypoint-generator.ts +275 -0
- package/src/checkpoint/forwarder.ts +84 -0
- package/src/checkpoint/gateway.ts +40 -0
- package/src/checkpoint/ipc.ts +107 -0
- package/src/checkpoint/manager.ts +254 -0
- package/src/checkpoint/process-manager.ts +250 -0
- package/src/checkpoint/resolver.ts +124 -0
- package/src/checkpoint/socket-path.ts +61 -0
- package/src/checkpoint/types.ts +63 -0
- package/src/cli/index.ts +11 -2
- package/src/cli/option-resolution.ts +5 -1
- package/src/core/config-loader.ts +11 -2
- package/src/core/router.ts +505 -264
- package/src/core/server.ts +51 -11
- package/src/core/vector.ts +60 -1
- package/src/dev/route-scanner.ts +2 -1
- package/src/http.ts +219 -19
- package/src/index.ts +3 -2
- package/src/middleware/manager.ts +10 -10
- package/src/openapi/docs-ui.ts +1097 -61
- package/src/openapi/generator.ts +380 -13
- package/src/types/index.ts +83 -30
- package/src/utils/validation.ts +5 -3
|
@@ -0,0 +1,211 @@
|
|
|
1
|
+
import { existsSync, promises as fs } from 'node:fs';
|
|
2
|
+
import { dirname, extname, join, relative } from 'node:path';
|
|
3
|
+
import type { CheckpointAssetRecord, CheckpointManifest } from '../types';
|
|
4
|
+
import { sha256Hex } from './hasher';
|
|
5
|
+
import { computeAssetFingerprint, isAbsolutePathPortable, normalizeLogicalPath } from './manifest';
|
|
6
|
+
import type { CheckpointArtifactMaterializerOptions } from './types';
|
|
7
|
+
import { CheckpointWorkerDecompressor } from './worker-decompressor';
|
|
8
|
+
|
|
9
|
+
const DEFAULT_MATERIALIZED_DIR = '_materialized';
|
|
10
|
+
const DEFAULT_LOCK_TIMEOUT_MS = 15_000;
|
|
11
|
+
const DEFAULT_LOCK_POLL_MS = 50;
|
|
12
|
+
|
|
13
|
+
interface MaterializedMarker {
|
|
14
|
+
fingerprint: string;
|
|
15
|
+
createdAt: string;
|
|
16
|
+
}
|
|
17
|
+
|
|
18
|
+
export class CheckpointArtifactMaterializer {
|
|
19
|
+
private verifyChecksums: boolean;
|
|
20
|
+
private materializedDirName: string;
|
|
21
|
+
private lockTimeoutMs: number;
|
|
22
|
+
|
|
23
|
+
constructor(options: CheckpointArtifactMaterializerOptions = {}) {
|
|
24
|
+
this.verifyChecksums = options.verifyChecksums ?? true;
|
|
25
|
+
this.materializedDirName = options.materializedDirName ?? DEFAULT_MATERIALIZED_DIR;
|
|
26
|
+
this.lockTimeoutMs = options.lockTimeoutMs ?? DEFAULT_LOCK_TIMEOUT_MS;
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
async materialize(manifest: CheckpointManifest, storageDir: string): Promise<void> {
|
|
30
|
+
const versionDir = join(storageDir, manifest.version);
|
|
31
|
+
const markerPath = join(versionDir, '.assets.ready.json');
|
|
32
|
+
const lockPath = join(versionDir, '.assets.lock');
|
|
33
|
+
const fingerprint = computeAssetFingerprint(manifest.assets);
|
|
34
|
+
|
|
35
|
+
if (await this.isReady(markerPath, fingerprint, manifest.assets, join(versionDir, this.materializedDirName))) {
|
|
36
|
+
return;
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
await this.acquireLock(lockPath);
|
|
40
|
+
try {
|
|
41
|
+
if (await this.isReady(markerPath, fingerprint, manifest.assets, join(versionDir, this.materializedDirName))) {
|
|
42
|
+
return;
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
const materializedRoot = join(versionDir, this.materializedDirName);
|
|
46
|
+
await fs.rm(materializedRoot, { recursive: true, force: true });
|
|
47
|
+
await fs.mkdir(materializedRoot, { recursive: true });
|
|
48
|
+
|
|
49
|
+
const decompressor = new CheckpointWorkerDecompressor();
|
|
50
|
+
try {
|
|
51
|
+
for (const asset of manifest.assets) {
|
|
52
|
+
const result = await this.materializeAsset(asset, storageDir, versionDir, materializedRoot, decompressor);
|
|
53
|
+
asset.materializedPath = result;
|
|
54
|
+
}
|
|
55
|
+
} finally {
|
|
56
|
+
await decompressor.dispose();
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
const marker: MaterializedMarker = {
|
|
60
|
+
fingerprint,
|
|
61
|
+
createdAt: new Date().toISOString(),
|
|
62
|
+
};
|
|
63
|
+
await fs.writeFile(markerPath, JSON.stringify(marker, null, 2), 'utf-8');
|
|
64
|
+
} finally {
|
|
65
|
+
await fs.rm(lockPath, { recursive: true, force: true });
|
|
66
|
+
}
|
|
67
|
+
}
|
|
68
|
+
|
|
69
|
+
private async materializeAsset(
|
|
70
|
+
asset: CheckpointAssetRecord,
|
|
71
|
+
storageDir: string,
|
|
72
|
+
versionDir: string,
|
|
73
|
+
root: string,
|
|
74
|
+
decompressor: CheckpointWorkerDecompressor
|
|
75
|
+
): Promise<string> {
|
|
76
|
+
const sourcePath = this.resolveSourcePath(asset, storageDir);
|
|
77
|
+
if (!existsSync(sourcePath)) {
|
|
78
|
+
throw new Error(`Checkpoint asset blob not found: ${sourcePath}`);
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
const blob = await fs.readFile(sourcePath);
|
|
82
|
+
const blobBytes = new Uint8Array(blob.buffer, blob.byteOffset, blob.byteLength);
|
|
83
|
+
const expectedBlobHash = asset.blobHash;
|
|
84
|
+
if (this.verifyChecksums && expectedBlobHash && sha256Hex(blobBytes) !== expectedBlobHash) {
|
|
85
|
+
throw new Error(`Checkpoint asset blob checksum mismatch for ${asset.logicalPath}`);
|
|
86
|
+
}
|
|
87
|
+
|
|
88
|
+
const codec = asset.codec ?? (asset.blobHash ? 'gzip' : 'none');
|
|
89
|
+
const contentBytes = await decompressor.decompress(blobBytes, codec);
|
|
90
|
+
const expectedContentHash = asset.contentHash ?? asset.hash;
|
|
91
|
+
if (this.verifyChecksums && expectedContentHash && sha256Hex(contentBytes) !== expectedContentHash) {
|
|
92
|
+
throw new Error(`Checkpoint asset content checksum mismatch for ${asset.logicalPath}`);
|
|
93
|
+
}
|
|
94
|
+
|
|
95
|
+
const cachedFile = await this.writeDecompressedCache(asset, storageDir, contentBytes);
|
|
96
|
+
const safeLogicalPath = normalizeLogicalPath(asset.logicalPath);
|
|
97
|
+
const destinationPath = join(root, safeLogicalPath);
|
|
98
|
+
await fs.mkdir(dirname(destinationPath), { recursive: true });
|
|
99
|
+
await fs.rm(destinationPath, { force: true });
|
|
100
|
+
await this.linkWithFallback(cachedFile, destinationPath);
|
|
101
|
+
|
|
102
|
+
return normalizePath(relative(versionDir, destinationPath));
|
|
103
|
+
}
|
|
104
|
+
|
|
105
|
+
private async writeDecompressedCache(
|
|
106
|
+
asset: CheckpointAssetRecord,
|
|
107
|
+
storageDir: string,
|
|
108
|
+
bytes: Uint8Array
|
|
109
|
+
): Promise<string> {
|
|
110
|
+
const hash = asset.contentHash ?? asset.hash;
|
|
111
|
+
const extension = extname(asset.logicalPath) || '.bin';
|
|
112
|
+
const cacheFile = join(storageDir, '_assets/cache', `${hash}${extension}`);
|
|
113
|
+
await fs.mkdir(dirname(cacheFile), { recursive: true });
|
|
114
|
+
if (existsSync(cacheFile)) {
|
|
115
|
+
if (!this.verifyChecksums) {
|
|
116
|
+
return cacheFile;
|
|
117
|
+
}
|
|
118
|
+
const existing = await fs.readFile(cacheFile);
|
|
119
|
+
const existingBytes = new Uint8Array(existing.buffer, existing.byteOffset, existing.byteLength);
|
|
120
|
+
if (sha256Hex(existingBytes) === hash) {
|
|
121
|
+
return cacheFile;
|
|
122
|
+
}
|
|
123
|
+
}
|
|
124
|
+
await fs.writeFile(cacheFile, bytes);
|
|
125
|
+
return cacheFile;
|
|
126
|
+
}
|
|
127
|
+
|
|
128
|
+
private resolveSourcePath(asset: CheckpointAssetRecord, storageDir: string): string {
|
|
129
|
+
const rawPath = asset.blobPath ?? asset.storedPath;
|
|
130
|
+
if (isAbsolutePathPortable(rawPath)) {
|
|
131
|
+
return rawPath;
|
|
132
|
+
}
|
|
133
|
+
return join(storageDir, rawPath);
|
|
134
|
+
}
|
|
135
|
+
|
|
136
|
+
private async linkWithFallback(sourcePath: string, destinationPath: string): Promise<void> {
|
|
137
|
+
try {
|
|
138
|
+
await fs.link(sourcePath, destinationPath);
|
|
139
|
+
return;
|
|
140
|
+
} catch {
|
|
141
|
+
// Fall through to symlink/copy.
|
|
142
|
+
}
|
|
143
|
+
|
|
144
|
+
try {
|
|
145
|
+
await fs.symlink(sourcePath, destinationPath);
|
|
146
|
+
return;
|
|
147
|
+
} catch {
|
|
148
|
+
// Fall through to copy.
|
|
149
|
+
}
|
|
150
|
+
|
|
151
|
+
await fs.copyFile(sourcePath, destinationPath);
|
|
152
|
+
}
|
|
153
|
+
|
|
154
|
+
private async acquireLock(lockPath: string): Promise<void> {
|
|
155
|
+
const deadline = Date.now() + this.lockTimeoutMs;
|
|
156
|
+
while (Date.now() < deadline) {
|
|
157
|
+
try {
|
|
158
|
+
await fs.mkdir(lockPath);
|
|
159
|
+
return;
|
|
160
|
+
} catch (error) {
|
|
161
|
+
if (!isAlreadyExists(error)) {
|
|
162
|
+
throw error;
|
|
163
|
+
}
|
|
164
|
+
}
|
|
165
|
+
await sleep(DEFAULT_LOCK_POLL_MS);
|
|
166
|
+
}
|
|
167
|
+
|
|
168
|
+
throw new Error(`Timed out waiting for checkpoint asset lock: ${lockPath}`);
|
|
169
|
+
}
|
|
170
|
+
|
|
171
|
+
private async isReady(
|
|
172
|
+
markerPath: string,
|
|
173
|
+
fingerprint: string,
|
|
174
|
+
assets: CheckpointAssetRecord[],
|
|
175
|
+
materializedRoot: string
|
|
176
|
+
): Promise<boolean> {
|
|
177
|
+
if (!existsSync(markerPath)) {
|
|
178
|
+
return false;
|
|
179
|
+
}
|
|
180
|
+
|
|
181
|
+
try {
|
|
182
|
+
const marker = JSON.parse(await fs.readFile(markerPath, 'utf-8')) as MaterializedMarker;
|
|
183
|
+
if (marker.fingerprint !== fingerprint) {
|
|
184
|
+
return false;
|
|
185
|
+
}
|
|
186
|
+
for (const asset of assets) {
|
|
187
|
+
const expectedPath = join(materializedRoot, normalizeLogicalPath(asset.logicalPath));
|
|
188
|
+
if (!existsSync(expectedPath)) {
|
|
189
|
+
return false;
|
|
190
|
+
}
|
|
191
|
+
}
|
|
192
|
+
return true;
|
|
193
|
+
} catch {
|
|
194
|
+
return false;
|
|
195
|
+
}
|
|
196
|
+
}
|
|
197
|
+
}
|
|
198
|
+
|
|
199
|
+
function normalizePath(path: string): string {
|
|
200
|
+
return path.replace(/\\/g, '/');
|
|
201
|
+
}
|
|
202
|
+
|
|
203
|
+
function isAlreadyExists(error: unknown): boolean {
|
|
204
|
+
return (
|
|
205
|
+
typeof error === 'object' && error !== null && 'code' in error && (error as { code?: string }).code === 'EEXIST'
|
|
206
|
+
);
|
|
207
|
+
}
|
|
208
|
+
|
|
209
|
+
function sleep(ms: number): Promise<void> {
|
|
210
|
+
return new Promise((resolve) => setTimeout(resolve, ms));
|
|
211
|
+
}
|
|
@@ -0,0 +1,100 @@
|
|
|
1
|
+
import { promises as fs } from 'node:fs';
|
|
2
|
+
import { join, relative } from 'node:path';
|
|
3
|
+
import type { CheckpointCompressionCodec } from '../types';
|
|
4
|
+
import { sha256Hex } from './hasher';
|
|
5
|
+
import type { CheckpointArtifactPackageRecord } from './types';
|
|
6
|
+
|
|
7
|
+
const ARCHIVE_DIR = '_archives';
|
|
8
|
+
|
|
9
|
+
export class CheckpointPackager {
|
|
10
|
+
private storageDir: string;
|
|
11
|
+
private codec: CheckpointCompressionCodec;
|
|
12
|
+
|
|
13
|
+
constructor(storageDir: string, codec: CheckpointCompressionCodec = 'gzip') {
|
|
14
|
+
this.storageDir = storageDir;
|
|
15
|
+
this.codec = codec;
|
|
16
|
+
}
|
|
17
|
+
|
|
18
|
+
async packageVersion(version: string): Promise<CheckpointArtifactPackageRecord> {
|
|
19
|
+
const versionDir = join(this.storageDir, version);
|
|
20
|
+
const archiveRelPath = join(ARCHIVE_DIR, `${version}${this.archiveSuffix()}`).replace(/\\/g, '/');
|
|
21
|
+
const archivePath = join(this.storageDir, archiveRelPath);
|
|
22
|
+
await fs.mkdir(join(this.storageDir, ARCHIVE_DIR), { recursive: true });
|
|
23
|
+
|
|
24
|
+
const files = await collectFiles(versionDir);
|
|
25
|
+
const archiveBytes = await this.buildArchiveBytes(versionDir, archivePath, files);
|
|
26
|
+
|
|
27
|
+
return {
|
|
28
|
+
archivePath: archiveRelPath,
|
|
29
|
+
archiveHash: sha256Hex(archiveBytes),
|
|
30
|
+
archiveSize: archiveBytes.byteLength,
|
|
31
|
+
codec: this.codec,
|
|
32
|
+
};
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
private async buildArchiveBytes(versionDir: string, archivePath: string, files: string[]): Promise<Uint8Array> {
|
|
36
|
+
const ArchiveCtor = (Bun as any).Archive;
|
|
37
|
+
if (typeof ArchiveCtor === 'function') {
|
|
38
|
+
const archiveEntries = Object.fromEntries(
|
|
39
|
+
files.map((filePath) => {
|
|
40
|
+
const rel = relative(versionDir, filePath).replace(/\\/g, '/');
|
|
41
|
+
return [rel, Bun.file(filePath)];
|
|
42
|
+
})
|
|
43
|
+
);
|
|
44
|
+
|
|
45
|
+
const archive = new ArchiveCtor(archiveEntries);
|
|
46
|
+
const tarBytes = new Uint8Array(await archive.bytes());
|
|
47
|
+
const archiveBytes = this.codec === 'gzip' ? Bun.gzipSync(tarBytes) : tarBytes;
|
|
48
|
+
await Bun.write(archivePath, archiveBytes);
|
|
49
|
+
return archiveBytes;
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
await this.buildArchiveWithTar(versionDir, archivePath, files);
|
|
53
|
+
const bytes = await fs.readFile(archivePath);
|
|
54
|
+
return new Uint8Array(bytes.buffer, bytes.byteOffset, bytes.byteLength);
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
private async buildArchiveWithTar(versionDir: string, archivePath: string, files: string[]): Promise<void> {
|
|
58
|
+
const relFiles = files.map((filePath) => relative(versionDir, filePath));
|
|
59
|
+
if (relFiles.length === 0) {
|
|
60
|
+
throw new Error(`Cannot package checkpoint: no files found in "${versionDir}"`);
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
const tarArgs = this.codec === 'gzip' ? ['-czf', archivePath] : ['-cf', archivePath];
|
|
64
|
+
const proc = Bun.spawn(['tar', ...tarArgs, '-C', versionDir, ...relFiles], {
|
|
65
|
+
stdout: 'pipe',
|
|
66
|
+
stderr: 'pipe',
|
|
67
|
+
});
|
|
68
|
+
const exitCode = await proc.exited;
|
|
69
|
+
if (exitCode === 0) {
|
|
70
|
+
return;
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
const stderr = await new Response(proc.stderr).text();
|
|
74
|
+
throw new Error(`Failed to package checkpoint archive with tar (exit ${exitCode}): ${stderr.trim()}`);
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
private archiveSuffix(): '.tar' | '.tar.gz' {
|
|
78
|
+
return this.codec === 'gzip' ? '.tar.gz' : '.tar';
|
|
79
|
+
}
|
|
80
|
+
}
|
|
81
|
+
|
|
82
|
+
async function collectFiles(root: string): Promise<string[]> {
|
|
83
|
+
const files: string[] = [];
|
|
84
|
+
await walk(root, files);
|
|
85
|
+
return files.filter((filePath) => relative(root, filePath).replace(/\\/g, '/') !== 'manifest.json');
|
|
86
|
+
}
|
|
87
|
+
|
|
88
|
+
async function walk(dir: string, files: string[]): Promise<void> {
|
|
89
|
+
const entries = await fs.readdir(dir, { withFileTypes: true });
|
|
90
|
+
for (const entry of entries) {
|
|
91
|
+
const fullPath = join(dir, entry.name);
|
|
92
|
+
if (entry.isDirectory()) {
|
|
93
|
+
await walk(fullPath, files);
|
|
94
|
+
continue;
|
|
95
|
+
}
|
|
96
|
+
if (entry.isFile()) {
|
|
97
|
+
files.push(fullPath);
|
|
98
|
+
}
|
|
99
|
+
}
|
|
100
|
+
}
|
|
@@ -0,0 +1,36 @@
|
|
|
1
|
+
import { existsSync, promises as fs } from 'node:fs';
|
|
2
|
+
import { dirname, join } from 'node:path';
|
|
3
|
+
import type { CheckpointAssetRecord } from '../types';
|
|
4
|
+
import type { CheckpointArtifactRepository } from './types';
|
|
5
|
+
|
|
6
|
+
export class LocalCheckpointArtifactRepository implements CheckpointArtifactRepository {
|
|
7
|
+
private storageDir: string;
|
|
8
|
+
|
|
9
|
+
constructor(storageDir: string) {
|
|
10
|
+
this.storageDir = storageDir;
|
|
11
|
+
}
|
|
12
|
+
|
|
13
|
+
async writeBlob(record: CheckpointAssetRecord, bytes: Uint8Array): Promise<void> {
|
|
14
|
+
const targetPath = this.resolveBlobPath(record);
|
|
15
|
+
await fs.mkdir(dirname(targetPath), { recursive: true });
|
|
16
|
+
if (existsSync(targetPath)) {
|
|
17
|
+
return;
|
|
18
|
+
}
|
|
19
|
+
await fs.writeFile(targetPath, bytes);
|
|
20
|
+
}
|
|
21
|
+
|
|
22
|
+
async hasBlob(record: CheckpointAssetRecord): Promise<boolean> {
|
|
23
|
+
return existsSync(this.resolveBlobPath(record));
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
async readBlob(record: CheckpointAssetRecord): Promise<Uint8Array> {
|
|
27
|
+
const targetPath = this.resolveBlobPath(record);
|
|
28
|
+
const bytes = await fs.readFile(targetPath);
|
|
29
|
+
return new Uint8Array(bytes.buffer, bytes.byteOffset, bytes.byteLength);
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
resolveBlobPath(record: CheckpointAssetRecord): string {
|
|
33
|
+
const rawPath = record.blobPath ?? record.storedPath;
|
|
34
|
+
return rawPath.startsWith('/') ? rawPath : join(this.storageDir, rawPath);
|
|
35
|
+
}
|
|
36
|
+
}
|
|
@@ -0,0 +1,102 @@
|
|
|
1
|
+
import { existsSync, promises as fs } from 'node:fs';
|
|
2
|
+
import { join } from 'node:path';
|
|
3
|
+
import type { CheckpointAssetRecord, CheckpointCompressionCodec } from '../types';
|
|
4
|
+
import { compressBytes, DEFAULT_ASSET_CODEC } from './compressor';
|
|
5
|
+
import { sha256Hex } from './hasher';
|
|
6
|
+
import { normalizeLogicalPath, normalizeRelativePath } from './manifest';
|
|
7
|
+
import type { CheckpointArtifactStoreOptions } from './types';
|
|
8
|
+
|
|
9
|
+
const BLOB_DIR = '_assets/blobs';
|
|
10
|
+
|
|
11
|
+
export class CheckpointArtifactStore {
|
|
12
|
+
private storageDir: string;
|
|
13
|
+
private codec: CheckpointCompressionCodec;
|
|
14
|
+
|
|
15
|
+
constructor(storageDir: string, options: CheckpointArtifactStoreOptions = {}) {
|
|
16
|
+
this.storageDir = storageDir;
|
|
17
|
+
this.codec = options.assetCodec ?? DEFAULT_ASSET_CODEC;
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
async addEmbedded(logicalPath: string, sourcePath: string): Promise<CheckpointAssetRecord> {
|
|
21
|
+
return this.addAsset('embedded', logicalPath, sourcePath);
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
async addSidecar(logicalPath: string, sourcePath: string): Promise<CheckpointAssetRecord> {
|
|
25
|
+
return this.addAsset('sidecar', logicalPath, sourcePath);
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
async collect(embeddedPaths: string[], sidecarPaths: string[]): Promise<CheckpointAssetRecord[]> {
|
|
29
|
+
const records: CheckpointAssetRecord[] = [];
|
|
30
|
+
|
|
31
|
+
for (const sourcePath of embeddedPaths) {
|
|
32
|
+
records.push(await this.addEmbedded(sourcePath, sourcePath));
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
for (const sourcePath of sidecarPaths) {
|
|
36
|
+
records.push(await this.addSidecar(sourcePath, sourcePath));
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
return records;
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
private async addAsset(
|
|
43
|
+
type: CheckpointAssetRecord['type'],
|
|
44
|
+
logicalPath: string,
|
|
45
|
+
sourcePath: string
|
|
46
|
+
): Promise<CheckpointAssetRecord> {
|
|
47
|
+
const content = await fs.readFile(sourcePath);
|
|
48
|
+
const contentBytes = new Uint8Array(content.buffer, content.byteOffset, content.byteLength);
|
|
49
|
+
const contentHash = sha256Hex(contentBytes);
|
|
50
|
+
const compressed = compressBytes(contentBytes, this.codec);
|
|
51
|
+
const blobHash = sha256Hex(compressed);
|
|
52
|
+
const blobPath = normalizeRelativePath(join(BLOB_DIR, `${blobHash}${this.codec === 'gzip' ? '.gz' : ''}`));
|
|
53
|
+
const storedPath = join(this.storageDir, blobPath);
|
|
54
|
+
|
|
55
|
+
await fs.mkdir(join(this.storageDir, BLOB_DIR), { recursive: true });
|
|
56
|
+
if (!existsSync(storedPath)) {
|
|
57
|
+
await this.writeAtomically(storedPath, compressed);
|
|
58
|
+
} else {
|
|
59
|
+
const existing = await fs.readFile(storedPath);
|
|
60
|
+
const existingBytes = new Uint8Array(existing.buffer, existing.byteOffset, existing.byteLength);
|
|
61
|
+
if (sha256Hex(existingBytes) !== blobHash) {
|
|
62
|
+
await this.writeAtomically(storedPath, compressed);
|
|
63
|
+
}
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
return {
|
|
67
|
+
type,
|
|
68
|
+
logicalPath: normalizeLogicalPath(logicalPath),
|
|
69
|
+
storedPath,
|
|
70
|
+
hash: contentHash,
|
|
71
|
+
size: content.byteLength,
|
|
72
|
+
contentHash,
|
|
73
|
+
contentSize: content.byteLength,
|
|
74
|
+
blobHash,
|
|
75
|
+
blobSize: compressed.byteLength,
|
|
76
|
+
blobPath,
|
|
77
|
+
codec: this.codec,
|
|
78
|
+
};
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
private async writeAtomically(path: string, bytes: Uint8Array): Promise<void> {
|
|
82
|
+
const tempPath = `${path}.tmp.${process.pid}.${Date.now()}`;
|
|
83
|
+
await fs.writeFile(tempPath, bytes);
|
|
84
|
+
try {
|
|
85
|
+
await fs.rename(tempPath, path);
|
|
86
|
+
} catch (error) {
|
|
87
|
+
if (!isAlreadyExists(error)) {
|
|
88
|
+
throw error;
|
|
89
|
+
}
|
|
90
|
+
await fs.rm(path, { force: true });
|
|
91
|
+
await fs.rename(tempPath, path);
|
|
92
|
+
}
|
|
93
|
+
}
|
|
94
|
+
}
|
|
95
|
+
|
|
96
|
+
function isAlreadyExists(error: unknown): boolean {
|
|
97
|
+
if (typeof error !== 'object' || error === null || !('code' in error)) {
|
|
98
|
+
return false;
|
|
99
|
+
}
|
|
100
|
+
const code = (error as { code?: string }).code;
|
|
101
|
+
return code === 'EEXIST' || code === 'EPERM';
|
|
102
|
+
}
|
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
import type { CheckpointAssetRecord, CheckpointCompressionCodec } from '../types';
|
|
2
|
+
|
|
3
|
+
export interface CheckpointArtifactStoreOptions {
|
|
4
|
+
assetCodec?: CheckpointCompressionCodec;
|
|
5
|
+
}
|
|
6
|
+
|
|
7
|
+
export interface CheckpointArtifactMaterializerOptions {
|
|
8
|
+
verifyChecksums?: boolean;
|
|
9
|
+
materializedDirName?: string;
|
|
10
|
+
lockTimeoutMs?: number;
|
|
11
|
+
}
|
|
12
|
+
|
|
13
|
+
export interface CheckpointArtifactPackageRecord {
|
|
14
|
+
archivePath: string;
|
|
15
|
+
archiveHash: string;
|
|
16
|
+
archiveSize: number;
|
|
17
|
+
codec: CheckpointCompressionCodec;
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
export interface CheckpointArtifactRepository {
|
|
21
|
+
writeBlob(record: CheckpointAssetRecord, bytes: Uint8Array): Promise<void>;
|
|
22
|
+
hasBlob(record: CheckpointAssetRecord): Promise<boolean>;
|
|
23
|
+
readBlob(record: CheckpointAssetRecord): Promise<Uint8Array>;
|
|
24
|
+
}
|
|
@@ -0,0 +1,192 @@
|
|
|
1
|
+
import { availableParallelism, cpus } from 'node:os';
|
|
2
|
+
import type { CheckpointCompressionCodec } from '../types';
|
|
3
|
+
|
|
4
|
+
interface DecompressRequest {
|
|
5
|
+
id: number;
|
|
6
|
+
codec: CheckpointCompressionCodec;
|
|
7
|
+
input: ArrayBuffer;
|
|
8
|
+
}
|
|
9
|
+
|
|
10
|
+
interface DecompressResponse {
|
|
11
|
+
id: number;
|
|
12
|
+
output?: ArrayBuffer;
|
|
13
|
+
error?: string;
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
interface DecompressJob {
|
|
17
|
+
id: number;
|
|
18
|
+
request: DecompressRequest;
|
|
19
|
+
resolve: (output: Uint8Array) => void;
|
|
20
|
+
reject: (error: Error) => void;
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
const DEFAULT_MAX_WORKERS = 4;
|
|
24
|
+
|
|
25
|
+
export class CheckpointWorkerDecompressor {
|
|
26
|
+
private workers: Worker[] = [];
|
|
27
|
+
private idleWorkers: Worker[] = [];
|
|
28
|
+
private queue: DecompressJob[] = [];
|
|
29
|
+
private activeJobsByWorker: Map<Worker, DecompressJob> = new Map();
|
|
30
|
+
private nextJobId = 1;
|
|
31
|
+
private disposed = false;
|
|
32
|
+
|
|
33
|
+
constructor(workerCount: number = resolveDefaultWorkerCount()) {
|
|
34
|
+
const normalizedCount = normalizeWorkerCount(workerCount);
|
|
35
|
+
const workerUrl = resolveWorkerModuleUrl();
|
|
36
|
+
|
|
37
|
+
for (let i = 0; i < normalizedCount; i++) {
|
|
38
|
+
const worker = new Worker(workerUrl.href);
|
|
39
|
+
worker.onmessage = (event) => this.handleWorkerMessage(worker, event);
|
|
40
|
+
worker.onerror = (event) => this.handleWorkerError(worker, event);
|
|
41
|
+
this.workers.push(worker);
|
|
42
|
+
this.idleWorkers.push(worker);
|
|
43
|
+
}
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
async decompress(input: Uint8Array, codec: CheckpointCompressionCodec): Promise<Uint8Array> {
|
|
47
|
+
if (codec === 'none') {
|
|
48
|
+
return new Uint8Array(input);
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
if (this.disposed) {
|
|
52
|
+
throw new Error('Checkpoint worker decompressor is disposed');
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
const copied = new Uint8Array(input);
|
|
56
|
+
|
|
57
|
+
return await new Promise<Uint8Array>((resolve, reject) => {
|
|
58
|
+
const id = this.nextJobId++;
|
|
59
|
+
this.queue.push({
|
|
60
|
+
id,
|
|
61
|
+
request: {
|
|
62
|
+
id,
|
|
63
|
+
codec,
|
|
64
|
+
input: copied.buffer,
|
|
65
|
+
},
|
|
66
|
+
resolve,
|
|
67
|
+
reject,
|
|
68
|
+
});
|
|
69
|
+
this.pump();
|
|
70
|
+
});
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
async dispose(): Promise<void> {
|
|
74
|
+
if (this.disposed) {
|
|
75
|
+
return;
|
|
76
|
+
}
|
|
77
|
+
this.disposed = true;
|
|
78
|
+
|
|
79
|
+
const error = new Error('Checkpoint worker decompressor disposed');
|
|
80
|
+
this.failAll(error);
|
|
81
|
+
|
|
82
|
+
for (const worker of this.workers) {
|
|
83
|
+
try {
|
|
84
|
+
worker.terminate();
|
|
85
|
+
} catch {
|
|
86
|
+
// Ignore termination failures.
|
|
87
|
+
}
|
|
88
|
+
}
|
|
89
|
+
|
|
90
|
+
this.workers = [];
|
|
91
|
+
this.idleWorkers = [];
|
|
92
|
+
this.activeJobsByWorker.clear();
|
|
93
|
+
}
|
|
94
|
+
|
|
95
|
+
private pump(): void {
|
|
96
|
+
while (this.idleWorkers.length > 0 && this.queue.length > 0) {
|
|
97
|
+
const worker = this.idleWorkers.pop()!;
|
|
98
|
+
const job = this.queue.shift()!;
|
|
99
|
+
this.activeJobsByWorker.set(worker, job);
|
|
100
|
+
worker.postMessage(job.request, [job.request.input]);
|
|
101
|
+
}
|
|
102
|
+
}
|
|
103
|
+
|
|
104
|
+
private handleWorkerMessage(worker: Worker, event: MessageEvent<DecompressResponse>): void {
|
|
105
|
+
const job = this.activeJobsByWorker.get(worker);
|
|
106
|
+
this.activeJobsByWorker.delete(worker);
|
|
107
|
+
|
|
108
|
+
if (!this.disposed) {
|
|
109
|
+
this.idleWorkers.push(worker);
|
|
110
|
+
}
|
|
111
|
+
|
|
112
|
+
if (!job) {
|
|
113
|
+
this.pump();
|
|
114
|
+
return;
|
|
115
|
+
}
|
|
116
|
+
|
|
117
|
+
const message = event.data;
|
|
118
|
+
if (message.error) {
|
|
119
|
+
job.reject(new Error(message.error));
|
|
120
|
+
} else if (message.output instanceof ArrayBuffer) {
|
|
121
|
+
job.resolve(new Uint8Array(message.output));
|
|
122
|
+
} else {
|
|
123
|
+
job.reject(new Error('Worker returned no output'));
|
|
124
|
+
}
|
|
125
|
+
|
|
126
|
+
this.pump();
|
|
127
|
+
}
|
|
128
|
+
|
|
129
|
+
private handleWorkerError(worker: Worker, event: ErrorEvent): void {
|
|
130
|
+
const job = this.activeJobsByWorker.get(worker);
|
|
131
|
+
this.activeJobsByWorker.delete(worker);
|
|
132
|
+
this.idleWorkers = this.idleWorkers.filter((candidate) => candidate !== worker);
|
|
133
|
+
|
|
134
|
+
const message = event.message?.trim() || 'Checkpoint decompression worker crashed';
|
|
135
|
+
const error = new Error(message);
|
|
136
|
+
|
|
137
|
+
if (job) {
|
|
138
|
+
job.reject(error);
|
|
139
|
+
}
|
|
140
|
+
|
|
141
|
+
this.failAll(error);
|
|
142
|
+
this.dispose().catch(() => {
|
|
143
|
+
// Ignore cleanup failures after worker error.
|
|
144
|
+
});
|
|
145
|
+
}
|
|
146
|
+
|
|
147
|
+
private failAll(error: Error): void {
|
|
148
|
+
const queued = this.queue.splice(0, this.queue.length);
|
|
149
|
+
for (const job of queued) {
|
|
150
|
+
job.reject(error);
|
|
151
|
+
}
|
|
152
|
+
|
|
153
|
+
for (const job of this.activeJobsByWorker.values()) {
|
|
154
|
+
job.reject(error);
|
|
155
|
+
}
|
|
156
|
+
this.activeJobsByWorker.clear();
|
|
157
|
+
}
|
|
158
|
+
}
|
|
159
|
+
|
|
160
|
+
function resolveDefaultWorkerCount(): number {
|
|
161
|
+
const cores = resolveCoreCount();
|
|
162
|
+
const reserveForMainThread = Math.max(1, cores - 1);
|
|
163
|
+
return Math.max(1, Math.min(DEFAULT_MAX_WORKERS, reserveForMainThread));
|
|
164
|
+
}
|
|
165
|
+
|
|
166
|
+
function resolveCoreCount(): number {
|
|
167
|
+
try {
|
|
168
|
+
const parallelism = availableParallelism();
|
|
169
|
+
if (Number.isFinite(parallelism) && parallelism > 0) {
|
|
170
|
+
return parallelism;
|
|
171
|
+
}
|
|
172
|
+
} catch {
|
|
173
|
+
// Fall through to cpus().
|
|
174
|
+
}
|
|
175
|
+
|
|
176
|
+
const cpuCount = cpus().length;
|
|
177
|
+
return Number.isFinite(cpuCount) && cpuCount > 0 ? cpuCount : 1;
|
|
178
|
+
}
|
|
179
|
+
|
|
180
|
+
function normalizeWorkerCount(value: number): number {
|
|
181
|
+
if (!Number.isFinite(value) || value <= 0) {
|
|
182
|
+
return 1;
|
|
183
|
+
}
|
|
184
|
+
return Math.max(1, Math.floor(value));
|
|
185
|
+
}
|
|
186
|
+
|
|
187
|
+
function resolveWorkerModuleUrl(): URL {
|
|
188
|
+
if (import.meta.url.endsWith('.ts')) {
|
|
189
|
+
return new URL('./decompress-worker.ts', import.meta.url);
|
|
190
|
+
}
|
|
191
|
+
return new URL('./decompress-worker.js', import.meta.url);
|
|
192
|
+
}
|