vector-framework 1.2.2 → 1.2.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +18 -6
- package/dist/auth/protected.d.ts +4 -4
- package/dist/auth/protected.d.ts.map +1 -1
- package/dist/auth/protected.js +10 -7
- package/dist/auth/protected.js.map +1 -1
- package/dist/cache/manager.d.ts +2 -0
- package/dist/cache/manager.d.ts.map +1 -1
- package/dist/cache/manager.js +21 -4
- package/dist/cache/manager.js.map +1 -1
- package/dist/checkpoint/artifacts/compressor.d.ts +5 -0
- package/dist/checkpoint/artifacts/compressor.d.ts.map +1 -0
- package/dist/checkpoint/artifacts/compressor.js +24 -0
- package/dist/checkpoint/artifacts/compressor.js.map +1 -0
- package/dist/checkpoint/artifacts/decompress-worker.d.ts +2 -0
- package/dist/checkpoint/artifacts/decompress-worker.d.ts.map +1 -0
- package/dist/checkpoint/artifacts/decompress-worker.js +31 -0
- package/dist/checkpoint/artifacts/decompress-worker.js.map +1 -0
- package/dist/checkpoint/artifacts/hasher.d.ts +2 -0
- package/dist/checkpoint/artifacts/hasher.d.ts.map +1 -0
- package/dist/checkpoint/artifacts/hasher.js +7 -0
- package/dist/checkpoint/artifacts/hasher.js.map +1 -0
- package/dist/checkpoint/artifacts/manifest.d.ts +6 -0
- package/dist/checkpoint/artifacts/manifest.d.ts.map +1 -0
- package/dist/checkpoint/artifacts/manifest.js +55 -0
- package/dist/checkpoint/artifacts/manifest.js.map +1 -0
- package/dist/checkpoint/artifacts/materializer.d.ts +16 -0
- package/dist/checkpoint/artifacts/materializer.d.ts.map +1 -0
- package/dist/checkpoint/artifacts/materializer.js +168 -0
- package/dist/checkpoint/artifacts/materializer.js.map +1 -0
- package/dist/checkpoint/artifacts/packager.d.ts +12 -0
- package/dist/checkpoint/artifacts/packager.d.ts.map +1 -0
- package/dist/checkpoint/artifacts/packager.js +82 -0
- package/dist/checkpoint/artifacts/packager.js.map +1 -0
- package/dist/checkpoint/artifacts/repository.d.ts +11 -0
- package/dist/checkpoint/artifacts/repository.d.ts.map +1 -0
- package/dist/checkpoint/artifacts/repository.js +29 -0
- package/dist/checkpoint/artifacts/repository.js.map +1 -0
- package/dist/checkpoint/artifacts/store.d.ts +13 -0
- package/dist/checkpoint/artifacts/store.d.ts.map +1 -0
- package/dist/checkpoint/artifacts/store.js +85 -0
- package/dist/checkpoint/artifacts/store.js.map +1 -0
- package/dist/checkpoint/artifacts/types.d.ts +21 -0
- package/dist/checkpoint/artifacts/types.d.ts.map +1 -0
- package/dist/checkpoint/artifacts/types.js +2 -0
- package/dist/checkpoint/artifacts/types.js.map +1 -0
- package/dist/checkpoint/artifacts/worker-decompressor.d.ts +17 -0
- package/dist/checkpoint/artifacts/worker-decompressor.d.ts.map +1 -0
- package/dist/checkpoint/artifacts/worker-decompressor.js +148 -0
- package/dist/checkpoint/artifacts/worker-decompressor.js.map +1 -0
- package/dist/checkpoint/asset-store.d.ts +10 -0
- package/dist/checkpoint/asset-store.d.ts.map +1 -0
- package/dist/checkpoint/asset-store.js +46 -0
- package/dist/checkpoint/asset-store.js.map +1 -0
- package/dist/checkpoint/bundler.d.ts +15 -0
- package/dist/checkpoint/bundler.d.ts.map +1 -0
- package/dist/checkpoint/bundler.js +45 -0
- package/dist/checkpoint/bundler.js.map +1 -0
- package/dist/checkpoint/cli.d.ts +2 -0
- package/dist/checkpoint/cli.d.ts.map +1 -0
- package/dist/checkpoint/cli.js +157 -0
- package/dist/checkpoint/cli.js.map +1 -0
- package/dist/checkpoint/entrypoint-generator.d.ts +17 -0
- package/dist/checkpoint/entrypoint-generator.d.ts.map +1 -0
- package/dist/checkpoint/entrypoint-generator.js +251 -0
- package/dist/checkpoint/entrypoint-generator.js.map +1 -0
- package/dist/checkpoint/forwarder.d.ts +6 -0
- package/dist/checkpoint/forwarder.d.ts.map +1 -0
- package/dist/checkpoint/forwarder.js +74 -0
- package/dist/checkpoint/forwarder.js.map +1 -0
- package/dist/checkpoint/gateway.d.ts +11 -0
- package/dist/checkpoint/gateway.d.ts.map +1 -0
- package/dist/checkpoint/gateway.js +30 -0
- package/dist/checkpoint/gateway.js.map +1 -0
- package/dist/checkpoint/ipc.d.ts +12 -0
- package/dist/checkpoint/ipc.d.ts.map +1 -0
- package/dist/checkpoint/ipc.js +96 -0
- package/dist/checkpoint/ipc.js.map +1 -0
- package/dist/checkpoint/manager.d.ts +20 -0
- package/dist/checkpoint/manager.d.ts.map +1 -0
- package/dist/checkpoint/manager.js +214 -0
- package/dist/checkpoint/manager.js.map +1 -0
- package/dist/checkpoint/process-manager.d.ts +35 -0
- package/dist/checkpoint/process-manager.d.ts.map +1 -0
- package/dist/checkpoint/process-manager.js +203 -0
- package/dist/checkpoint/process-manager.js.map +1 -0
- package/dist/checkpoint/resolver.d.ts +25 -0
- package/dist/checkpoint/resolver.d.ts.map +1 -0
- package/dist/checkpoint/resolver.js +95 -0
- package/dist/checkpoint/resolver.js.map +1 -0
- package/dist/checkpoint/socket-path.d.ts +2 -0
- package/dist/checkpoint/socket-path.d.ts.map +1 -0
- package/dist/checkpoint/socket-path.js +51 -0
- package/dist/checkpoint/socket-path.js.map +1 -0
- package/dist/checkpoint/types.d.ts +54 -0
- package/dist/checkpoint/types.d.ts.map +1 -0
- package/dist/checkpoint/types.js +2 -0
- package/dist/checkpoint/types.js.map +1 -0
- package/dist/cli/index.js +10 -2
- package/dist/cli/index.js.map +1 -1
- package/dist/cli/option-resolution.d.ts +1 -1
- package/dist/cli/option-resolution.d.ts.map +1 -1
- package/dist/cli/option-resolution.js.map +1 -1
- package/dist/cli.js +3709 -328
- package/dist/core/config-loader.d.ts +1 -0
- package/dist/core/config-loader.d.ts.map +1 -1
- package/dist/core/config-loader.js +10 -2
- package/dist/core/config-loader.js.map +1 -1
- package/dist/core/router.d.ts +24 -3
- package/dist/core/router.d.ts.map +1 -1
- package/dist/core/router.js +398 -249
- package/dist/core/router.js.map +1 -1
- package/dist/core/server.d.ts +2 -0
- package/dist/core/server.d.ts.map +1 -1
- package/dist/core/server.js +22 -8
- package/dist/core/server.js.map +1 -1
- package/dist/core/vector.d.ts +3 -0
- package/dist/core/vector.d.ts.map +1 -1
- package/dist/core/vector.js +51 -1
- package/dist/core/vector.js.map +1 -1
- package/dist/dev/route-scanner.d.ts.map +1 -1
- package/dist/dev/route-scanner.js +2 -1
- package/dist/dev/route-scanner.js.map +1 -1
- package/dist/http.d.ts +32 -7
- package/dist/http.d.ts.map +1 -1
- package/dist/http.js +144 -13
- package/dist/http.js.map +1 -1
- package/dist/index.cjs +1297 -74
- package/dist/index.d.ts +3 -2
- package/dist/index.d.ts.map +1 -1
- package/dist/index.js +2 -2
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +1296 -73
- package/dist/middleware/manager.d.ts +3 -3
- package/dist/middleware/manager.d.ts.map +1 -1
- package/dist/middleware/manager.js +9 -8
- package/dist/middleware/manager.js.map +1 -1
- package/dist/openapi/docs-ui.d.ts.map +1 -1
- package/dist/openapi/docs-ui.js +1097 -61
- package/dist/openapi/docs-ui.js.map +1 -1
- package/dist/openapi/generator.d.ts +2 -1
- package/dist/openapi/generator.d.ts.map +1 -1
- package/dist/openapi/generator.js +240 -7
- package/dist/openapi/generator.js.map +1 -1
- package/dist/types/index.d.ts +71 -28
- package/dist/types/index.d.ts.map +1 -1
- package/dist/types/index.js +24 -1
- package/dist/types/index.js.map +1 -1
- package/dist/utils/validation.d.ts.map +1 -1
- package/dist/utils/validation.js +3 -2
- package/dist/utils/validation.js.map +1 -1
- package/package.json +2 -1
- package/src/auth/protected.ts +11 -8
- package/src/cache/manager.ts +23 -4
- package/src/checkpoint/artifacts/compressor.ts +30 -0
- package/src/checkpoint/artifacts/decompress-worker.ts +49 -0
- package/src/checkpoint/artifacts/hasher.ts +6 -0
- package/src/checkpoint/artifacts/manifest.ts +72 -0
- package/src/checkpoint/artifacts/materializer.ts +211 -0
- package/src/checkpoint/artifacts/packager.ts +100 -0
- package/src/checkpoint/artifacts/repository.ts +36 -0
- package/src/checkpoint/artifacts/store.ts +102 -0
- package/src/checkpoint/artifacts/types.ts +24 -0
- package/src/checkpoint/artifacts/worker-decompressor.ts +192 -0
- package/src/checkpoint/asset-store.ts +61 -0
- package/src/checkpoint/bundler.ts +64 -0
- package/src/checkpoint/cli.ts +177 -0
- package/src/checkpoint/entrypoint-generator.ts +275 -0
- package/src/checkpoint/forwarder.ts +84 -0
- package/src/checkpoint/gateway.ts +40 -0
- package/src/checkpoint/ipc.ts +107 -0
- package/src/checkpoint/manager.ts +254 -0
- package/src/checkpoint/process-manager.ts +250 -0
- package/src/checkpoint/resolver.ts +124 -0
- package/src/checkpoint/socket-path.ts +61 -0
- package/src/checkpoint/types.ts +63 -0
- package/src/cli/index.ts +11 -2
- package/src/cli/option-resolution.ts +5 -1
- package/src/core/config-loader.ts +11 -2
- package/src/core/router.ts +505 -264
- package/src/core/server.ts +36 -9
- package/src/core/vector.ts +60 -1
- package/src/dev/route-scanner.ts +2 -1
- package/src/http.ts +219 -19
- package/src/index.ts +3 -2
- package/src/middleware/manager.ts +10 -10
- package/src/openapi/docs-ui.ts +1097 -61
- package/src/openapi/generator.ts +265 -6
- package/src/types/index.ts +83 -30
- package/src/utils/validation.ts +5 -3
|
@@ -0,0 +1,100 @@
|
|
|
1
|
+
import { promises as fs } from 'node:fs';
|
|
2
|
+
import { join, relative } from 'node:path';
|
|
3
|
+
import type { CheckpointCompressionCodec } from '../types';
|
|
4
|
+
import { sha256Hex } from './hasher';
|
|
5
|
+
import type { CheckpointArtifactPackageRecord } from './types';
|
|
6
|
+
|
|
7
|
+
const ARCHIVE_DIR = '_archives';
|
|
8
|
+
|
|
9
|
+
export class CheckpointPackager {
|
|
10
|
+
private storageDir: string;
|
|
11
|
+
private codec: CheckpointCompressionCodec;
|
|
12
|
+
|
|
13
|
+
constructor(storageDir: string, codec: CheckpointCompressionCodec = 'gzip') {
|
|
14
|
+
this.storageDir = storageDir;
|
|
15
|
+
this.codec = codec;
|
|
16
|
+
}
|
|
17
|
+
|
|
18
|
+
async packageVersion(version: string): Promise<CheckpointArtifactPackageRecord> {
|
|
19
|
+
const versionDir = join(this.storageDir, version);
|
|
20
|
+
const archiveRelPath = join(ARCHIVE_DIR, `${version}${this.archiveSuffix()}`).replace(/\\/g, '/');
|
|
21
|
+
const archivePath = join(this.storageDir, archiveRelPath);
|
|
22
|
+
await fs.mkdir(join(this.storageDir, ARCHIVE_DIR), { recursive: true });
|
|
23
|
+
|
|
24
|
+
const files = await collectFiles(versionDir);
|
|
25
|
+
const archiveBytes = await this.buildArchiveBytes(versionDir, archivePath, files);
|
|
26
|
+
|
|
27
|
+
return {
|
|
28
|
+
archivePath: archiveRelPath,
|
|
29
|
+
archiveHash: sha256Hex(archiveBytes),
|
|
30
|
+
archiveSize: archiveBytes.byteLength,
|
|
31
|
+
codec: this.codec,
|
|
32
|
+
};
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
private async buildArchiveBytes(versionDir: string, archivePath: string, files: string[]): Promise<Uint8Array> {
|
|
36
|
+
const ArchiveCtor = (Bun as any).Archive;
|
|
37
|
+
if (typeof ArchiveCtor === 'function') {
|
|
38
|
+
const archiveEntries = Object.fromEntries(
|
|
39
|
+
files.map((filePath) => {
|
|
40
|
+
const rel = relative(versionDir, filePath).replace(/\\/g, '/');
|
|
41
|
+
return [rel, Bun.file(filePath)];
|
|
42
|
+
})
|
|
43
|
+
);
|
|
44
|
+
|
|
45
|
+
const archive = new ArchiveCtor(archiveEntries);
|
|
46
|
+
const tarBytes = new Uint8Array(await archive.bytes());
|
|
47
|
+
const archiveBytes = this.codec === 'gzip' ? Bun.gzipSync(tarBytes) : tarBytes;
|
|
48
|
+
await Bun.write(archivePath, archiveBytes);
|
|
49
|
+
return archiveBytes;
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
await this.buildArchiveWithTar(versionDir, archivePath, files);
|
|
53
|
+
const bytes = await fs.readFile(archivePath);
|
|
54
|
+
return new Uint8Array(bytes.buffer, bytes.byteOffset, bytes.byteLength);
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
private async buildArchiveWithTar(versionDir: string, archivePath: string, files: string[]): Promise<void> {
|
|
58
|
+
const relFiles = files.map((filePath) => relative(versionDir, filePath));
|
|
59
|
+
if (relFiles.length === 0) {
|
|
60
|
+
throw new Error(`Cannot package checkpoint: no files found in "${versionDir}"`);
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
const tarArgs = this.codec === 'gzip' ? ['-czf', archivePath] : ['-cf', archivePath];
|
|
64
|
+
const proc = Bun.spawn(['tar', ...tarArgs, '-C', versionDir, ...relFiles], {
|
|
65
|
+
stdout: 'pipe',
|
|
66
|
+
stderr: 'pipe',
|
|
67
|
+
});
|
|
68
|
+
const exitCode = await proc.exited;
|
|
69
|
+
if (exitCode === 0) {
|
|
70
|
+
return;
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
const stderr = await new Response(proc.stderr).text();
|
|
74
|
+
throw new Error(`Failed to package checkpoint archive with tar (exit ${exitCode}): ${stderr.trim()}`);
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
private archiveSuffix(): '.tar' | '.tar.gz' {
|
|
78
|
+
return this.codec === 'gzip' ? '.tar.gz' : '.tar';
|
|
79
|
+
}
|
|
80
|
+
}
|
|
81
|
+
|
|
82
|
+
async function collectFiles(root: string): Promise<string[]> {
|
|
83
|
+
const files: string[] = [];
|
|
84
|
+
await walk(root, files);
|
|
85
|
+
return files.filter((filePath) => relative(root, filePath).replace(/\\/g, '/') !== 'manifest.json');
|
|
86
|
+
}
|
|
87
|
+
|
|
88
|
+
async function walk(dir: string, files: string[]): Promise<void> {
|
|
89
|
+
const entries = await fs.readdir(dir, { withFileTypes: true });
|
|
90
|
+
for (const entry of entries) {
|
|
91
|
+
const fullPath = join(dir, entry.name);
|
|
92
|
+
if (entry.isDirectory()) {
|
|
93
|
+
await walk(fullPath, files);
|
|
94
|
+
continue;
|
|
95
|
+
}
|
|
96
|
+
if (entry.isFile()) {
|
|
97
|
+
files.push(fullPath);
|
|
98
|
+
}
|
|
99
|
+
}
|
|
100
|
+
}
|
|
@@ -0,0 +1,36 @@
|
|
|
1
|
+
import { existsSync, promises as fs } from 'node:fs';
|
|
2
|
+
import { dirname, join } from 'node:path';
|
|
3
|
+
import type { CheckpointAssetRecord } from '../types';
|
|
4
|
+
import type { CheckpointArtifactRepository } from './types';
|
|
5
|
+
|
|
6
|
+
export class LocalCheckpointArtifactRepository implements CheckpointArtifactRepository {
|
|
7
|
+
private storageDir: string;
|
|
8
|
+
|
|
9
|
+
constructor(storageDir: string) {
|
|
10
|
+
this.storageDir = storageDir;
|
|
11
|
+
}
|
|
12
|
+
|
|
13
|
+
async writeBlob(record: CheckpointAssetRecord, bytes: Uint8Array): Promise<void> {
|
|
14
|
+
const targetPath = this.resolveBlobPath(record);
|
|
15
|
+
await fs.mkdir(dirname(targetPath), { recursive: true });
|
|
16
|
+
if (existsSync(targetPath)) {
|
|
17
|
+
return;
|
|
18
|
+
}
|
|
19
|
+
await fs.writeFile(targetPath, bytes);
|
|
20
|
+
}
|
|
21
|
+
|
|
22
|
+
async hasBlob(record: CheckpointAssetRecord): Promise<boolean> {
|
|
23
|
+
return existsSync(this.resolveBlobPath(record));
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
async readBlob(record: CheckpointAssetRecord): Promise<Uint8Array> {
|
|
27
|
+
const targetPath = this.resolveBlobPath(record);
|
|
28
|
+
const bytes = await fs.readFile(targetPath);
|
|
29
|
+
return new Uint8Array(bytes.buffer, bytes.byteOffset, bytes.byteLength);
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
resolveBlobPath(record: CheckpointAssetRecord): string {
|
|
33
|
+
const rawPath = record.blobPath ?? record.storedPath;
|
|
34
|
+
return rawPath.startsWith('/') ? rawPath : join(this.storageDir, rawPath);
|
|
35
|
+
}
|
|
36
|
+
}
|
|
@@ -0,0 +1,102 @@
|
|
|
1
|
+
import { existsSync, promises as fs } from 'node:fs';
|
|
2
|
+
import { join } from 'node:path';
|
|
3
|
+
import type { CheckpointAssetRecord, CheckpointCompressionCodec } from '../types';
|
|
4
|
+
import { compressBytes, DEFAULT_ASSET_CODEC } from './compressor';
|
|
5
|
+
import { sha256Hex } from './hasher';
|
|
6
|
+
import { normalizeLogicalPath, normalizeRelativePath } from './manifest';
|
|
7
|
+
import type { CheckpointArtifactStoreOptions } from './types';
|
|
8
|
+
|
|
9
|
+
const BLOB_DIR = '_assets/blobs';
|
|
10
|
+
|
|
11
|
+
export class CheckpointArtifactStore {
|
|
12
|
+
private storageDir: string;
|
|
13
|
+
private codec: CheckpointCompressionCodec;
|
|
14
|
+
|
|
15
|
+
constructor(storageDir: string, options: CheckpointArtifactStoreOptions = {}) {
|
|
16
|
+
this.storageDir = storageDir;
|
|
17
|
+
this.codec = options.assetCodec ?? DEFAULT_ASSET_CODEC;
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
async addEmbedded(logicalPath: string, sourcePath: string): Promise<CheckpointAssetRecord> {
|
|
21
|
+
return this.addAsset('embedded', logicalPath, sourcePath);
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
async addSidecar(logicalPath: string, sourcePath: string): Promise<CheckpointAssetRecord> {
|
|
25
|
+
return this.addAsset('sidecar', logicalPath, sourcePath);
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
async collect(embeddedPaths: string[], sidecarPaths: string[]): Promise<CheckpointAssetRecord[]> {
|
|
29
|
+
const records: CheckpointAssetRecord[] = [];
|
|
30
|
+
|
|
31
|
+
for (const sourcePath of embeddedPaths) {
|
|
32
|
+
records.push(await this.addEmbedded(sourcePath, sourcePath));
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
for (const sourcePath of sidecarPaths) {
|
|
36
|
+
records.push(await this.addSidecar(sourcePath, sourcePath));
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
return records;
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
private async addAsset(
|
|
43
|
+
type: CheckpointAssetRecord['type'],
|
|
44
|
+
logicalPath: string,
|
|
45
|
+
sourcePath: string
|
|
46
|
+
): Promise<CheckpointAssetRecord> {
|
|
47
|
+
const content = await fs.readFile(sourcePath);
|
|
48
|
+
const contentBytes = new Uint8Array(content.buffer, content.byteOffset, content.byteLength);
|
|
49
|
+
const contentHash = sha256Hex(contentBytes);
|
|
50
|
+
const compressed = compressBytes(contentBytes, this.codec);
|
|
51
|
+
const blobHash = sha256Hex(compressed);
|
|
52
|
+
const blobPath = normalizeRelativePath(join(BLOB_DIR, `${blobHash}${this.codec === 'gzip' ? '.gz' : ''}`));
|
|
53
|
+
const storedPath = join(this.storageDir, blobPath);
|
|
54
|
+
|
|
55
|
+
await fs.mkdir(join(this.storageDir, BLOB_DIR), { recursive: true });
|
|
56
|
+
if (!existsSync(storedPath)) {
|
|
57
|
+
await this.writeAtomically(storedPath, compressed);
|
|
58
|
+
} else {
|
|
59
|
+
const existing = await fs.readFile(storedPath);
|
|
60
|
+
const existingBytes = new Uint8Array(existing.buffer, existing.byteOffset, existing.byteLength);
|
|
61
|
+
if (sha256Hex(existingBytes) !== blobHash) {
|
|
62
|
+
await this.writeAtomically(storedPath, compressed);
|
|
63
|
+
}
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
return {
|
|
67
|
+
type,
|
|
68
|
+
logicalPath: normalizeLogicalPath(logicalPath),
|
|
69
|
+
storedPath,
|
|
70
|
+
hash: contentHash,
|
|
71
|
+
size: content.byteLength,
|
|
72
|
+
contentHash,
|
|
73
|
+
contentSize: content.byteLength,
|
|
74
|
+
blobHash,
|
|
75
|
+
blobSize: compressed.byteLength,
|
|
76
|
+
blobPath,
|
|
77
|
+
codec: this.codec,
|
|
78
|
+
};
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
private async writeAtomically(path: string, bytes: Uint8Array): Promise<void> {
|
|
82
|
+
const tempPath = `${path}.tmp.${process.pid}.${Date.now()}`;
|
|
83
|
+
await fs.writeFile(tempPath, bytes);
|
|
84
|
+
try {
|
|
85
|
+
await fs.rename(tempPath, path);
|
|
86
|
+
} catch (error) {
|
|
87
|
+
if (!isAlreadyExists(error)) {
|
|
88
|
+
throw error;
|
|
89
|
+
}
|
|
90
|
+
await fs.rm(path, { force: true });
|
|
91
|
+
await fs.rename(tempPath, path);
|
|
92
|
+
}
|
|
93
|
+
}
|
|
94
|
+
}
|
|
95
|
+
|
|
96
|
+
function isAlreadyExists(error: unknown): boolean {
|
|
97
|
+
if (typeof error !== 'object' || error === null || !('code' in error)) {
|
|
98
|
+
return false;
|
|
99
|
+
}
|
|
100
|
+
const code = (error as { code?: string }).code;
|
|
101
|
+
return code === 'EEXIST' || code === 'EPERM';
|
|
102
|
+
}
|
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
import type { CheckpointAssetRecord, CheckpointCompressionCodec } from '../types';
|
|
2
|
+
|
|
3
|
+
export interface CheckpointArtifactStoreOptions {
|
|
4
|
+
assetCodec?: CheckpointCompressionCodec;
|
|
5
|
+
}
|
|
6
|
+
|
|
7
|
+
export interface CheckpointArtifactMaterializerOptions {
|
|
8
|
+
verifyChecksums?: boolean;
|
|
9
|
+
materializedDirName?: string;
|
|
10
|
+
lockTimeoutMs?: number;
|
|
11
|
+
}
|
|
12
|
+
|
|
13
|
+
export interface CheckpointArtifactPackageRecord {
|
|
14
|
+
archivePath: string;
|
|
15
|
+
archiveHash: string;
|
|
16
|
+
archiveSize: number;
|
|
17
|
+
codec: CheckpointCompressionCodec;
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
export interface CheckpointArtifactRepository {
|
|
21
|
+
writeBlob(record: CheckpointAssetRecord, bytes: Uint8Array): Promise<void>;
|
|
22
|
+
hasBlob(record: CheckpointAssetRecord): Promise<boolean>;
|
|
23
|
+
readBlob(record: CheckpointAssetRecord): Promise<Uint8Array>;
|
|
24
|
+
}
|
|
@@ -0,0 +1,192 @@
|
|
|
1
|
+
import { availableParallelism, cpus } from 'node:os';
|
|
2
|
+
import type { CheckpointCompressionCodec } from '../types';
|
|
3
|
+
|
|
4
|
+
interface DecompressRequest {
|
|
5
|
+
id: number;
|
|
6
|
+
codec: CheckpointCompressionCodec;
|
|
7
|
+
input: ArrayBuffer;
|
|
8
|
+
}
|
|
9
|
+
|
|
10
|
+
interface DecompressResponse {
|
|
11
|
+
id: number;
|
|
12
|
+
output?: ArrayBuffer;
|
|
13
|
+
error?: string;
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
interface DecompressJob {
|
|
17
|
+
id: number;
|
|
18
|
+
request: DecompressRequest;
|
|
19
|
+
resolve: (output: Uint8Array) => void;
|
|
20
|
+
reject: (error: Error) => void;
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
const DEFAULT_MAX_WORKERS = 4;
|
|
24
|
+
|
|
25
|
+
export class CheckpointWorkerDecompressor {
|
|
26
|
+
private workers: Worker[] = [];
|
|
27
|
+
private idleWorkers: Worker[] = [];
|
|
28
|
+
private queue: DecompressJob[] = [];
|
|
29
|
+
private activeJobsByWorker: Map<Worker, DecompressJob> = new Map();
|
|
30
|
+
private nextJobId = 1;
|
|
31
|
+
private disposed = false;
|
|
32
|
+
|
|
33
|
+
constructor(workerCount: number = resolveDefaultWorkerCount()) {
|
|
34
|
+
const normalizedCount = normalizeWorkerCount(workerCount);
|
|
35
|
+
const workerUrl = resolveWorkerModuleUrl();
|
|
36
|
+
|
|
37
|
+
for (let i = 0; i < normalizedCount; i++) {
|
|
38
|
+
const worker = new Worker(workerUrl.href);
|
|
39
|
+
worker.onmessage = (event) => this.handleWorkerMessage(worker, event);
|
|
40
|
+
worker.onerror = (event) => this.handleWorkerError(worker, event);
|
|
41
|
+
this.workers.push(worker);
|
|
42
|
+
this.idleWorkers.push(worker);
|
|
43
|
+
}
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
async decompress(input: Uint8Array, codec: CheckpointCompressionCodec): Promise<Uint8Array> {
|
|
47
|
+
if (codec === 'none') {
|
|
48
|
+
return new Uint8Array(input);
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
if (this.disposed) {
|
|
52
|
+
throw new Error('Checkpoint worker decompressor is disposed');
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
const copied = new Uint8Array(input);
|
|
56
|
+
|
|
57
|
+
return await new Promise<Uint8Array>((resolve, reject) => {
|
|
58
|
+
const id = this.nextJobId++;
|
|
59
|
+
this.queue.push({
|
|
60
|
+
id,
|
|
61
|
+
request: {
|
|
62
|
+
id,
|
|
63
|
+
codec,
|
|
64
|
+
input: copied.buffer,
|
|
65
|
+
},
|
|
66
|
+
resolve,
|
|
67
|
+
reject,
|
|
68
|
+
});
|
|
69
|
+
this.pump();
|
|
70
|
+
});
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
async dispose(): Promise<void> {
|
|
74
|
+
if (this.disposed) {
|
|
75
|
+
return;
|
|
76
|
+
}
|
|
77
|
+
this.disposed = true;
|
|
78
|
+
|
|
79
|
+
const error = new Error('Checkpoint worker decompressor disposed');
|
|
80
|
+
this.failAll(error);
|
|
81
|
+
|
|
82
|
+
for (const worker of this.workers) {
|
|
83
|
+
try {
|
|
84
|
+
worker.terminate();
|
|
85
|
+
} catch {
|
|
86
|
+
// Ignore termination failures.
|
|
87
|
+
}
|
|
88
|
+
}
|
|
89
|
+
|
|
90
|
+
this.workers = [];
|
|
91
|
+
this.idleWorkers = [];
|
|
92
|
+
this.activeJobsByWorker.clear();
|
|
93
|
+
}
|
|
94
|
+
|
|
95
|
+
private pump(): void {
|
|
96
|
+
while (this.idleWorkers.length > 0 && this.queue.length > 0) {
|
|
97
|
+
const worker = this.idleWorkers.pop()!;
|
|
98
|
+
const job = this.queue.shift()!;
|
|
99
|
+
this.activeJobsByWorker.set(worker, job);
|
|
100
|
+
worker.postMessage(job.request, [job.request.input]);
|
|
101
|
+
}
|
|
102
|
+
}
|
|
103
|
+
|
|
104
|
+
private handleWorkerMessage(worker: Worker, event: MessageEvent<DecompressResponse>): void {
|
|
105
|
+
const job = this.activeJobsByWorker.get(worker);
|
|
106
|
+
this.activeJobsByWorker.delete(worker);
|
|
107
|
+
|
|
108
|
+
if (!this.disposed) {
|
|
109
|
+
this.idleWorkers.push(worker);
|
|
110
|
+
}
|
|
111
|
+
|
|
112
|
+
if (!job) {
|
|
113
|
+
this.pump();
|
|
114
|
+
return;
|
|
115
|
+
}
|
|
116
|
+
|
|
117
|
+
const message = event.data;
|
|
118
|
+
if (message.error) {
|
|
119
|
+
job.reject(new Error(message.error));
|
|
120
|
+
} else if (message.output instanceof ArrayBuffer) {
|
|
121
|
+
job.resolve(new Uint8Array(message.output));
|
|
122
|
+
} else {
|
|
123
|
+
job.reject(new Error('Worker returned no output'));
|
|
124
|
+
}
|
|
125
|
+
|
|
126
|
+
this.pump();
|
|
127
|
+
}
|
|
128
|
+
|
|
129
|
+
private handleWorkerError(worker: Worker, event: ErrorEvent): void {
|
|
130
|
+
const job = this.activeJobsByWorker.get(worker);
|
|
131
|
+
this.activeJobsByWorker.delete(worker);
|
|
132
|
+
this.idleWorkers = this.idleWorkers.filter((candidate) => candidate !== worker);
|
|
133
|
+
|
|
134
|
+
const message = event.message?.trim() || 'Checkpoint decompression worker crashed';
|
|
135
|
+
const error = new Error(message);
|
|
136
|
+
|
|
137
|
+
if (job) {
|
|
138
|
+
job.reject(error);
|
|
139
|
+
}
|
|
140
|
+
|
|
141
|
+
this.failAll(error);
|
|
142
|
+
this.dispose().catch(() => {
|
|
143
|
+
// Ignore cleanup failures after worker error.
|
|
144
|
+
});
|
|
145
|
+
}
|
|
146
|
+
|
|
147
|
+
private failAll(error: Error): void {
|
|
148
|
+
const queued = this.queue.splice(0, this.queue.length);
|
|
149
|
+
for (const job of queued) {
|
|
150
|
+
job.reject(error);
|
|
151
|
+
}
|
|
152
|
+
|
|
153
|
+
for (const job of this.activeJobsByWorker.values()) {
|
|
154
|
+
job.reject(error);
|
|
155
|
+
}
|
|
156
|
+
this.activeJobsByWorker.clear();
|
|
157
|
+
}
|
|
158
|
+
}
|
|
159
|
+
|
|
160
|
+
function resolveDefaultWorkerCount(): number {
|
|
161
|
+
const cores = resolveCoreCount();
|
|
162
|
+
const reserveForMainThread = Math.max(1, cores - 1);
|
|
163
|
+
return Math.max(1, Math.min(DEFAULT_MAX_WORKERS, reserveForMainThread));
|
|
164
|
+
}
|
|
165
|
+
|
|
166
|
+
function resolveCoreCount(): number {
|
|
167
|
+
try {
|
|
168
|
+
const parallelism = availableParallelism();
|
|
169
|
+
if (Number.isFinite(parallelism) && parallelism > 0) {
|
|
170
|
+
return parallelism;
|
|
171
|
+
}
|
|
172
|
+
} catch {
|
|
173
|
+
// Fall through to cpus().
|
|
174
|
+
}
|
|
175
|
+
|
|
176
|
+
const cpuCount = cpus().length;
|
|
177
|
+
return Number.isFinite(cpuCount) && cpuCount > 0 ? cpuCount : 1;
|
|
178
|
+
}
|
|
179
|
+
|
|
180
|
+
function normalizeWorkerCount(value: number): number {
|
|
181
|
+
if (!Number.isFinite(value) || value <= 0) {
|
|
182
|
+
return 1;
|
|
183
|
+
}
|
|
184
|
+
return Math.max(1, Math.floor(value));
|
|
185
|
+
}
|
|
186
|
+
|
|
187
|
+
function resolveWorkerModuleUrl(): URL {
|
|
188
|
+
if (import.meta.url.endsWith('.ts')) {
|
|
189
|
+
return new URL('./decompress-worker.ts', import.meta.url);
|
|
190
|
+
}
|
|
191
|
+
return new URL('./decompress-worker.js', import.meta.url);
|
|
192
|
+
}
|
|
@@ -0,0 +1,61 @@
|
|
|
1
|
+
import { promises as fs } from 'node:fs';
|
|
2
|
+
import type { CheckpointAssetRecord } from './types';
|
|
3
|
+
import { CheckpointArtifactStore } from './artifacts/store';
|
|
4
|
+
|
|
5
|
+
const EMBEDDED_PER_FILE_BUDGET = 64 * 1024; // 64 KB per file
|
|
6
|
+
const EMBEDDED_TOTAL_BUDGET = 512 * 1024; // 512 KB total
|
|
7
|
+
|
|
8
|
+
export class AssetStore {
|
|
9
|
+
private artifactStore: CheckpointArtifactStore;
|
|
10
|
+
|
|
11
|
+
constructor(storageDir: string) {
|
|
12
|
+
this.artifactStore = new CheckpointArtifactStore(storageDir);
|
|
13
|
+
}
|
|
14
|
+
|
|
15
|
+
async addEmbedded(logicalPath: string, sourcePath: string): Promise<CheckpointAssetRecord> {
|
|
16
|
+
const content = await fs.readFile(sourcePath);
|
|
17
|
+
if (content.byteLength > EMBEDDED_PER_FILE_BUDGET) {
|
|
18
|
+
throw new Error(
|
|
19
|
+
`Embedded asset "${logicalPath}" is ${formatBytes(content.byteLength)} — exceeds ${formatBytes(EMBEDDED_PER_FILE_BUDGET)} per-file budget. Use sidecar instead.`
|
|
20
|
+
);
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
return await this.artifactStore.addEmbedded(logicalPath, sourcePath);
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
async addSidecar(logicalPath: string, sourcePath: string): Promise<CheckpointAssetRecord> {
|
|
27
|
+
return await this.artifactStore.addSidecar(logicalPath, sourcePath);
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
async collect(embeddedPaths: string[], sidecarPaths: string[]): Promise<CheckpointAssetRecord[]> {
|
|
31
|
+
const records: CheckpointAssetRecord[] = [];
|
|
32
|
+
|
|
33
|
+
for (const p of embeddedPaths) {
|
|
34
|
+
records.push(await this.addEmbedded(p, p));
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
for (const p of sidecarPaths) {
|
|
38
|
+
records.push(await this.addSidecar(p, p));
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
return records;
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
validateBudgets(records: CheckpointAssetRecord[]): void {
|
|
45
|
+
const embeddedTotal = records
|
|
46
|
+
.filter((r) => r.type === 'embedded')
|
|
47
|
+
.reduce((acc, r) => acc + (r.contentSize ?? r.size), 0);
|
|
48
|
+
|
|
49
|
+
if (embeddedTotal > EMBEDDED_TOTAL_BUDGET) {
|
|
50
|
+
throw new Error(
|
|
51
|
+
`Total embedded asset size ${formatBytes(embeddedTotal)} exceeds ${formatBytes(EMBEDDED_TOTAL_BUDGET)} budget.`
|
|
52
|
+
);
|
|
53
|
+
}
|
|
54
|
+
}
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
function formatBytes(bytes: number): string {
|
|
58
|
+
if (bytes < 1024) return `${bytes} B`;
|
|
59
|
+
if (bytes < 1024 * 1024) return `${(bytes / 1024).toFixed(1)} KB`;
|
|
60
|
+
return `${(bytes / (1024 * 1024)).toFixed(1)} MB`;
|
|
61
|
+
}
|
|
@@ -0,0 +1,64 @@
|
|
|
1
|
+
import { existsSync } from 'node:fs';
|
|
2
|
+
import { join } from 'node:path';
|
|
3
|
+
|
|
4
|
+
export interface BundleOptions {
|
|
5
|
+
entrypointPath: string;
|
|
6
|
+
outputDir: string;
|
|
7
|
+
outputFile?: string;
|
|
8
|
+
}
|
|
9
|
+
|
|
10
|
+
export interface BundleResult {
|
|
11
|
+
outputPath: string;
|
|
12
|
+
hash: string;
|
|
13
|
+
size: number;
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
export class CheckpointBundler {
|
|
17
|
+
async bundle(options: BundleOptions): Promise<BundleResult> {
|
|
18
|
+
const outfile = options.outputFile ?? 'checkpoint.js';
|
|
19
|
+
|
|
20
|
+
const result = await Bun.build({
|
|
21
|
+
entrypoints: [options.entrypointPath],
|
|
22
|
+
outdir: options.outputDir,
|
|
23
|
+
target: 'bun',
|
|
24
|
+
format: 'esm',
|
|
25
|
+
minify: true,
|
|
26
|
+
naming: { entry: outfile },
|
|
27
|
+
});
|
|
28
|
+
|
|
29
|
+
if (!result.success) {
|
|
30
|
+
const messages = result.logs.map((l) => l.message ?? String(l)).join('\n');
|
|
31
|
+
throw new Error(`Checkpoint bundle failed:\n${messages}`);
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
const outputPath = join(options.outputDir, outfile);
|
|
35
|
+
|
|
36
|
+
if (!existsSync(outputPath)) {
|
|
37
|
+
// Bun.build may output with a different name; find the actual output
|
|
38
|
+
const actualOutput = result.outputs.find((o) => o.kind === 'entry-point');
|
|
39
|
+
if (actualOutput) {
|
|
40
|
+
const actualPath = actualOutput.path;
|
|
41
|
+
if (existsSync(actualPath)) {
|
|
42
|
+
return this.hashFile(actualPath);
|
|
43
|
+
}
|
|
44
|
+
}
|
|
45
|
+
throw new Error(`Bundle output not found at expected path: ${outputPath}`);
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
return this.hashFile(outputPath);
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
private async hashFile(path: string): Promise<BundleResult> {
|
|
52
|
+
const file = Bun.file(path);
|
|
53
|
+
const content = await file.arrayBuffer();
|
|
54
|
+
const hashBuffer = Bun.SHA256.hash(new Uint8Array(content));
|
|
55
|
+
const hashBytes = new Uint8Array(hashBuffer.buffer, hashBuffer.byteOffset, hashBuffer.byteLength);
|
|
56
|
+
const hash = Buffer.from(hashBytes).toString('hex');
|
|
57
|
+
|
|
58
|
+
return {
|
|
59
|
+
outputPath: path,
|
|
60
|
+
hash,
|
|
61
|
+
size: content.byteLength,
|
|
62
|
+
};
|
|
63
|
+
}
|
|
64
|
+
}
|