@f0rbit/corpus 0.1.3 → 0.1.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/backend/cloudflare.d.ts +41 -1
- package/dist/backend/cloudflare.d.ts.map +1 -1
- package/dist/backend/cloudflare.js +69 -36
- package/dist/backend/file.d.ts +36 -1
- package/dist/backend/file.d.ts.map +1 -1
- package/dist/backend/file.js +55 -21
- package/dist/backend/layered.d.ts +38 -0
- package/dist/backend/layered.d.ts.map +1 -1
- package/dist/backend/layered.js +38 -0
- package/dist/backend/memory.d.ts +31 -1
- package/dist/backend/memory.d.ts.map +1 -1
- package/dist/backend/memory.js +41 -11
- package/dist/backends.d.ts +11 -0
- package/dist/backends.d.ts.map +1 -0
- package/dist/backends.js +9 -0
- package/dist/cloudflare.d.ts +2 -5
- package/dist/cloudflare.d.ts.map +1 -1
- package/dist/cloudflare.js +2 -5
- package/dist/codecs.d.ts +8 -0
- package/dist/codecs.d.ts.map +1 -0
- package/dist/codecs.js +6 -0
- package/dist/core.d.ts +9 -0
- package/dist/core.d.ts.map +1 -0
- package/dist/core.js +7 -0
- package/dist/corpus.d.ts +68 -1
- package/dist/corpus.d.ts.map +1 -1
- package/dist/corpus.js +194 -1
- package/dist/index.d.ts +3 -6
- package/dist/index.d.ts.map +1 -1
- package/dist/index.js +2 -5
- package/dist/schema.d.ts +27 -0
- package/dist/schema.d.ts.map +1 -1
- package/dist/schema.js +27 -0
- package/dist/sst.d.ts +38 -0
- package/dist/sst.d.ts.map +1 -1
- package/dist/sst.js +38 -0
- package/dist/types.d.ts +229 -1
- package/dist/types.d.ts.map +1 -1
- package/dist/types.js +91 -2
- package/dist/utils.d.ts +133 -0
- package/dist/utils.d.ts.map +1 -0
- package/dist/utils.js +174 -0
- package/package.json +5 -2
- package/dist/codec.d.ts +0 -9
- package/dist/codec.d.ts.map +0 -1
- package/dist/codec.js +0 -21
- package/dist/hash.d.ts +0 -2
- package/dist/hash.d.ts.map +0 -1
- package/dist/hash.js +0 -5
- package/dist/store.d.ts +0 -3
- package/dist/store.d.ts.map +0 -1
- package/dist/store.js +0 -125
- package/dist/version.d.ts +0 -7
- package/dist/version.d.ts.map +0 -1
- package/dist/version.js +0 -31
package/dist/utils.js
ADDED
|
@@ -0,0 +1,174 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* @module Utilities
|
|
3
|
+
* @description Utility functions for hashing, versioning, and codecs.
|
|
4
|
+
*/
|
|
5
|
+
/**
|
|
6
|
+
* Computes the SHA-256 hash of binary data.
|
|
7
|
+
* @category Utilities
|
|
8
|
+
* @group Hashing
|
|
9
|
+
*
|
|
10
|
+
* Returns a lowercase hexadecimal string (64 characters).
|
|
11
|
+
* Used internally for content-addressable storage and deduplication.
|
|
12
|
+
*
|
|
13
|
+
* @param data - The binary data to hash
|
|
14
|
+
* @returns A lowercase hex string of the SHA-256 hash
|
|
15
|
+
*
|
|
16
|
+
* @example
|
|
17
|
+
* ```ts
|
|
18
|
+
* const data = new TextEncoder().encode('Hello, world!')
|
|
19
|
+
* const hash = await compute_hash(data)
|
|
20
|
+
* // => '315f5bdb76d078c43b8ac0064e4a0164612b1fce77c869345bfc94c75894edd3'
|
|
21
|
+
* ```
|
|
22
|
+
*/
|
|
23
|
+
export async function compute_hash(data) {
|
|
24
|
+
const hash_buffer = await crypto.subtle.digest('SHA-256', data);
|
|
25
|
+
const hash_array = new Uint8Array(hash_buffer);
|
|
26
|
+
return Array.from(hash_array).map(b => b.toString(16).padStart(2, '0')).join('');
|
|
27
|
+
}
|
|
28
|
+
let last_timestamp = 0;
|
|
29
|
+
let sequence = 0;
|
|
30
|
+
/**
|
|
31
|
+
* Generates a unique, time-sortable version string.
|
|
32
|
+
*
|
|
33
|
+
* Format: base64url-encoded timestamp, with optional `.N` suffix when multiple
|
|
34
|
+
* versions are generated within the same millisecond.
|
|
35
|
+
*
|
|
36
|
+
* Versions sort lexicographically in chronological order, making them suitable
|
|
37
|
+
* for use as database keys where ordering matters.
|
|
38
|
+
*
|
|
39
|
+
* @category Utilities
|
|
40
|
+
* @group Versioning
|
|
41
|
+
* @returns A unique version string like `AZJx4vM` or `AZJx4vM.1`
|
|
42
|
+
*
|
|
43
|
+
* @example
|
|
44
|
+
* ```ts
|
|
45
|
+
* const v1 = generate_version() // => 'AZJx4vM'
|
|
46
|
+
* const v2 = generate_version() // => 'AZJx4vM.1' (same millisecond)
|
|
47
|
+
* const v3 = generate_version() // => 'AZJx4vN' (next millisecond)
|
|
48
|
+
*
|
|
49
|
+
* // Versions sort chronologically
|
|
50
|
+
* [v3, v1, v2].sort() // => [v1, v2, v3]
|
|
51
|
+
* ```
|
|
52
|
+
*/
|
|
53
|
+
export function generate_version() {
|
|
54
|
+
const now = Date.now();
|
|
55
|
+
if (now === last_timestamp) {
|
|
56
|
+
sequence++;
|
|
57
|
+
}
|
|
58
|
+
else {
|
|
59
|
+
last_timestamp = now;
|
|
60
|
+
sequence = 0;
|
|
61
|
+
}
|
|
62
|
+
// base64url encode the timestamp (no padding, url-safe)
|
|
63
|
+
const timestamp_bytes = new Uint8Array(8);
|
|
64
|
+
const view = new DataView(timestamp_bytes.buffer);
|
|
65
|
+
view.setBigUint64(0, BigInt(now), false); // big-endian for lexicographic sorting
|
|
66
|
+
// trim leading zeros for compactness
|
|
67
|
+
let start = 0;
|
|
68
|
+
while (start < 7 && timestamp_bytes[start] === 0)
|
|
69
|
+
start++;
|
|
70
|
+
const trimmed = timestamp_bytes.slice(start);
|
|
71
|
+
const base64 = btoa(String.fromCharCode(...trimmed))
|
|
72
|
+
.replace(/\+/g, '-')
|
|
73
|
+
.replace(/\//g, '_')
|
|
74
|
+
.replace(/=/g, '');
|
|
75
|
+
return sequence > 0 ? `${base64}.${sequence}` : base64;
|
|
76
|
+
}
|
|
77
|
+
/**
|
|
78
|
+
* Creates a JSON codec with schema validation.
|
|
79
|
+
*
|
|
80
|
+
* Data is serialized to JSON on encode and validated against the schema on decode.
|
|
81
|
+
* Works with both Zod 3.x and 4.x (uses structural typing, not Zod imports).
|
|
82
|
+
*
|
|
83
|
+
* Note: Validation only happens on decode. Invalid data passed to encode will
|
|
84
|
+
* serialize but may fail validation when decoded later.
|
|
85
|
+
*
|
|
86
|
+
* @category Codecs
|
|
87
|
+
* @group Codec Factories
|
|
88
|
+
* @param schema - A Zod schema (or any object with a `parse` method)
|
|
89
|
+
* @returns A Codec for JSON serialization with validation
|
|
90
|
+
*
|
|
91
|
+
* @example
|
|
92
|
+
* ```ts
|
|
93
|
+
* import { z } from 'zod'
|
|
94
|
+
*
|
|
95
|
+
* const UserSchema = z.object({
|
|
96
|
+
* id: z.string().uuid(),
|
|
97
|
+
* name: z.string(),
|
|
98
|
+
* createdAt: z.coerce.date()
|
|
99
|
+
* })
|
|
100
|
+
*
|
|
101
|
+
* const codec = json_codec(UserSchema)
|
|
102
|
+
* const users = define_store('users', codec)
|
|
103
|
+
*
|
|
104
|
+
* // Decoding validates and transforms data
|
|
105
|
+
* const bytes = codec.encode({ id: '...', name: 'Alice', createdAt: '2024-01-01' })
|
|
106
|
+
* const user = codec.decode(bytes) // createdAt is now a Date object
|
|
107
|
+
* ```
|
|
108
|
+
*/
|
|
109
|
+
export function json_codec(schema) {
|
|
110
|
+
return {
|
|
111
|
+
content_type: "application/json",
|
|
112
|
+
encode: (value) => new TextEncoder().encode(JSON.stringify(value)),
|
|
113
|
+
decode: (bytes) => schema.parse(JSON.parse(new TextDecoder().decode(bytes))),
|
|
114
|
+
};
|
|
115
|
+
}
|
|
116
|
+
/**
|
|
117
|
+
* Creates a plain text codec using UTF-8 encoding.
|
|
118
|
+
*
|
|
119
|
+
* No validation is performed - any string can be encoded and any valid
|
|
120
|
+
* UTF-8 bytes can be decoded.
|
|
121
|
+
*
|
|
122
|
+
* @category Codecs
|
|
123
|
+
* @group Codec Factories
|
|
124
|
+
* @returns A Codec for plain text strings
|
|
125
|
+
*
|
|
126
|
+
* @example
|
|
127
|
+
* ```ts
|
|
128
|
+
* const notes = define_store('notes', text_codec())
|
|
129
|
+
*
|
|
130
|
+
* await corpus.stores.notes.put('Meeting notes for 2024-01-15...')
|
|
131
|
+
*
|
|
132
|
+
* const result = await corpus.stores.notes.get_latest()
|
|
133
|
+
* if (result.ok) {
|
|
134
|
+
* console.log(result.value.data) // string
|
|
135
|
+
* }
|
|
136
|
+
* ```
|
|
137
|
+
*/
|
|
138
|
+
export function text_codec() {
|
|
139
|
+
return {
|
|
140
|
+
content_type: "text/plain",
|
|
141
|
+
encode: (value) => new TextEncoder().encode(value),
|
|
142
|
+
decode: (bytes) => new TextDecoder().decode(bytes),
|
|
143
|
+
};
|
|
144
|
+
}
|
|
145
|
+
/**
|
|
146
|
+
* Creates a pass-through codec for raw binary data.
|
|
147
|
+
*
|
|
148
|
+
* No transformation is performed - bytes are stored and retrieved as-is.
|
|
149
|
+
* Use for images, PDFs, pre-serialized data, or any binary content.
|
|
150
|
+
*
|
|
151
|
+
* @category Codecs
|
|
152
|
+
* @group Codec Factories
|
|
153
|
+
* @returns A Codec for raw binary data
|
|
154
|
+
*
|
|
155
|
+
* @example
|
|
156
|
+
* ```ts
|
|
157
|
+
* const images = define_store('images', binary_codec())
|
|
158
|
+
*
|
|
159
|
+
* // Store an image
|
|
160
|
+
* const imageData = await fetch('photo.png').then(r => r.arrayBuffer())
|
|
161
|
+
* await corpus.stores.images.put(new Uint8Array(imageData))
|
|
162
|
+
*
|
|
163
|
+
* // Store pre-serialized protobuf
|
|
164
|
+
* const protoBytes = MyMessage.encode(message).finish()
|
|
165
|
+
* await corpus.stores.images.put(protoBytes)
|
|
166
|
+
* ```
|
|
167
|
+
*/
|
|
168
|
+
export function binary_codec() {
|
|
169
|
+
return {
|
|
170
|
+
content_type: "application/octet-stream",
|
|
171
|
+
encode: (value) => value,
|
|
172
|
+
decode: (bytes) => bytes,
|
|
173
|
+
};
|
|
174
|
+
}
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@f0rbit/corpus",
|
|
3
|
-
"version": "0.1.
|
|
3
|
+
"version": "0.1.5",
|
|
4
4
|
"description": "A functional snapshotting library for TypeScript with versioned data storage, lineage tracking, and multiple backend support",
|
|
5
5
|
"module": "dist/index.js",
|
|
6
6
|
"main": "dist/index.js",
|
|
@@ -31,7 +31,10 @@
|
|
|
31
31
|
"build": "tsc -p tsconfig.build.json",
|
|
32
32
|
"test": "bun test",
|
|
33
33
|
"typecheck": "tsc --noEmit",
|
|
34
|
-
"prepublishOnly": "npm run build"
|
|
34
|
+
"prepublishOnly": "npm run build",
|
|
35
|
+
"docs:dev": "cd docs && bun run dev",
|
|
36
|
+
"docs:build": "cd docs && bun run build",
|
|
37
|
+
"docs:preview": "cd docs && bun run preview"
|
|
35
38
|
},
|
|
36
39
|
"repository": {
|
|
37
40
|
"type": "git",
|
package/dist/codec.d.ts
DELETED
|
@@ -1,9 +0,0 @@
|
|
|
1
|
-
import type { Codec } from "./types";
|
|
2
|
-
type ZodLike<T> = {
|
|
3
|
-
parse: (data: unknown) => T;
|
|
4
|
-
};
|
|
5
|
-
export declare function json_codec<T>(schema: ZodLike<T>): Codec<T>;
|
|
6
|
-
export declare function text_codec(): Codec<string>;
|
|
7
|
-
export declare function binary_codec(): Codec<Uint8Array>;
|
|
8
|
-
export {};
|
|
9
|
-
//# sourceMappingURL=codec.d.ts.map
|
package/dist/codec.d.ts.map
DELETED
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"codec.d.ts","sourceRoot":"","sources":["../codec.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,KAAK,EAAE,MAAM,SAAS,CAAC;AAGrC,KAAK,OAAO,CAAC,CAAC,IAAI;IAAE,KAAK,EAAE,CAAC,IAAI,EAAE,OAAO,KAAK,CAAC,CAAA;CAAE,CAAC;AAElD,wBAAgB,UAAU,CAAC,CAAC,EAAE,MAAM,EAAE,OAAO,CAAC,CAAC,CAAC,GAAG,KAAK,CAAC,CAAC,CAAC,CAM1D;AAED,wBAAgB,UAAU,IAAI,KAAK,CAAC,MAAM,CAAC,CAM1C;AAED,wBAAgB,YAAY,IAAI,KAAK,CAAC,UAAU,CAAC,CAMhD"}
|
package/dist/codec.js
DELETED
|
@@ -1,21 +0,0 @@
|
|
|
1
|
-
export function json_codec(schema) {
|
|
2
|
-
return {
|
|
3
|
-
content_type: "application/json",
|
|
4
|
-
encode: (value) => new TextEncoder().encode(JSON.stringify(value)),
|
|
5
|
-
decode: (bytes) => schema.parse(JSON.parse(new TextDecoder().decode(bytes))),
|
|
6
|
-
};
|
|
7
|
-
}
|
|
8
|
-
export function text_codec() {
|
|
9
|
-
return {
|
|
10
|
-
content_type: "text/plain",
|
|
11
|
-
encode: (value) => new TextEncoder().encode(value),
|
|
12
|
-
decode: (bytes) => new TextDecoder().decode(bytes),
|
|
13
|
-
};
|
|
14
|
-
}
|
|
15
|
-
export function binary_codec() {
|
|
16
|
-
return {
|
|
17
|
-
content_type: "application/octet-stream",
|
|
18
|
-
encode: (value) => value,
|
|
19
|
-
decode: (bytes) => bytes,
|
|
20
|
-
};
|
|
21
|
-
}
|
package/dist/hash.d.ts
DELETED
package/dist/hash.d.ts.map
DELETED
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"hash.d.ts","sourceRoot":"","sources":["../hash.ts"],"names":[],"mappings":"AAAA,wBAAsB,YAAY,CAAC,IAAI,EAAE,UAAU,GAAG,OAAO,CAAC,MAAM,CAAC,CAIpE"}
|
package/dist/hash.js
DELETED
package/dist/store.d.ts
DELETED
package/dist/store.d.ts.map
DELETED
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"store.d.ts","sourceRoot":"","sources":["../store.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,OAAO,EAAE,KAAK,EAAE,eAAe,EAA8C,MAAM,SAAS,CAAA;AAK1G,wBAAgB,YAAY,CAAC,CAAC,EAAE,OAAO,EAAE,OAAO,EAAE,UAAU,EAAE,eAAe,CAAC,MAAM,EAAE,CAAC,CAAC,GAAG,KAAK,CAAC,CAAC,CAAC,CA+IlG"}
|
package/dist/store.js
DELETED
|
@@ -1,125 +0,0 @@
|
|
|
1
|
-
import { ok, err } from './types';
|
|
2
|
-
import { compute_hash } from './hash';
|
|
3
|
-
import { generate_version } from './version';
|
|
4
|
-
export function create_store(backend, definition) {
|
|
5
|
-
const { id, codec } = definition;
|
|
6
|
-
function emit(event) {
|
|
7
|
-
backend.on_event?.(event);
|
|
8
|
-
}
|
|
9
|
-
function make_data_key(store_id, content_hash) {
|
|
10
|
-
return `${store_id}/${content_hash}`;
|
|
11
|
-
}
|
|
12
|
-
return {
|
|
13
|
-
id,
|
|
14
|
-
codec,
|
|
15
|
-
async put(data, opts) {
|
|
16
|
-
const version = generate_version();
|
|
17
|
-
let bytes;
|
|
18
|
-
try {
|
|
19
|
-
bytes = codec.encode(data);
|
|
20
|
-
}
|
|
21
|
-
catch (cause) {
|
|
22
|
-
const error = { kind: 'encode_error', cause: cause };
|
|
23
|
-
emit({ type: 'error', error });
|
|
24
|
-
return err(error);
|
|
25
|
-
}
|
|
26
|
-
const content_hash = await compute_hash(bytes);
|
|
27
|
-
// deduplication: reuse existing data_key if content already exists
|
|
28
|
-
const existing = await backend.metadata.find_by_hash(id, content_hash);
|
|
29
|
-
const deduplicated = existing !== null;
|
|
30
|
-
const data_key = deduplicated ? existing.data_key : make_data_key(id, content_hash);
|
|
31
|
-
if (!deduplicated) {
|
|
32
|
-
const data_result = await backend.data.put(data_key, bytes);
|
|
33
|
-
if (!data_result.ok) {
|
|
34
|
-
emit({ type: 'error', error: data_result.error });
|
|
35
|
-
return data_result;
|
|
36
|
-
}
|
|
37
|
-
}
|
|
38
|
-
emit({ type: 'data_put', store_id: id, version, size_bytes: bytes.length, deduplicated });
|
|
39
|
-
const meta = {
|
|
40
|
-
store_id: id,
|
|
41
|
-
version,
|
|
42
|
-
parents: opts?.parents ?? [],
|
|
43
|
-
created_at: new Date(),
|
|
44
|
-
invoked_at: opts?.invoked_at,
|
|
45
|
-
content_hash,
|
|
46
|
-
content_type: codec.content_type,
|
|
47
|
-
size_bytes: bytes.length,
|
|
48
|
-
data_key,
|
|
49
|
-
tags: opts?.tags,
|
|
50
|
-
};
|
|
51
|
-
const meta_result = await backend.metadata.put(meta);
|
|
52
|
-
if (!meta_result.ok) {
|
|
53
|
-
emit({ type: 'error', error: meta_result.error });
|
|
54
|
-
return meta_result;
|
|
55
|
-
}
|
|
56
|
-
emit({ type: 'snapshot_put', store_id: id, version, content_hash, deduplicated });
|
|
57
|
-
return ok(meta);
|
|
58
|
-
},
|
|
59
|
-
async get(version) {
|
|
60
|
-
const meta_result = await backend.metadata.get(id, version);
|
|
61
|
-
if (!meta_result.ok) {
|
|
62
|
-
emit({ type: 'snapshot_get', store_id: id, version, found: false });
|
|
63
|
-
return meta_result;
|
|
64
|
-
}
|
|
65
|
-
const meta = meta_result.value;
|
|
66
|
-
const data_result = await backend.data.get(meta.data_key);
|
|
67
|
-
if (!data_result.ok) {
|
|
68
|
-
emit({ type: 'error', error: data_result.error });
|
|
69
|
-
return data_result;
|
|
70
|
-
}
|
|
71
|
-
const bytes = await data_result.value.bytes();
|
|
72
|
-
let data;
|
|
73
|
-
try {
|
|
74
|
-
data = codec.decode(bytes);
|
|
75
|
-
}
|
|
76
|
-
catch (cause) {
|
|
77
|
-
const error = { kind: 'decode_error', cause: cause };
|
|
78
|
-
emit({ type: 'error', error });
|
|
79
|
-
return err(error);
|
|
80
|
-
}
|
|
81
|
-
emit({ type: 'snapshot_get', store_id: id, version, found: true });
|
|
82
|
-
return ok({ meta, data });
|
|
83
|
-
},
|
|
84
|
-
async get_latest() {
|
|
85
|
-
const meta_result = await backend.metadata.get_latest(id);
|
|
86
|
-
if (!meta_result.ok) {
|
|
87
|
-
return meta_result;
|
|
88
|
-
}
|
|
89
|
-
const meta = meta_result.value;
|
|
90
|
-
const data_result = await backend.data.get(meta.data_key);
|
|
91
|
-
if (!data_result.ok) {
|
|
92
|
-
return data_result;
|
|
93
|
-
}
|
|
94
|
-
const bytes = await data_result.value.bytes();
|
|
95
|
-
let data;
|
|
96
|
-
try {
|
|
97
|
-
data = codec.decode(bytes);
|
|
98
|
-
}
|
|
99
|
-
catch (cause) {
|
|
100
|
-
const error = { kind: 'decode_error', cause: cause };
|
|
101
|
-
emit({ type: 'error', error });
|
|
102
|
-
return err(error);
|
|
103
|
-
}
|
|
104
|
-
return ok({ meta, data });
|
|
105
|
-
},
|
|
106
|
-
async get_meta(version) {
|
|
107
|
-
return backend.metadata.get(id, version);
|
|
108
|
-
},
|
|
109
|
-
list(opts) {
|
|
110
|
-
return backend.metadata.list(id, opts);
|
|
111
|
-
},
|
|
112
|
-
async delete(version) {
|
|
113
|
-
const meta_result = await backend.metadata.get(id, version);
|
|
114
|
-
if (!meta_result.ok) {
|
|
115
|
-
return meta_result;
|
|
116
|
-
}
|
|
117
|
-
const delete_meta_result = await backend.metadata.delete(id, version);
|
|
118
|
-
if (!delete_meta_result.ok) {
|
|
119
|
-
return delete_meta_result;
|
|
120
|
-
}
|
|
121
|
-
emit({ type: 'meta_delete', store_id: id, version });
|
|
122
|
-
return ok(undefined);
|
|
123
|
-
},
|
|
124
|
-
};
|
|
125
|
-
}
|
package/dist/version.d.ts
DELETED
|
@@ -1,7 +0,0 @@
|
|
|
1
|
-
/**
|
|
2
|
-
* Generates a unique, sortable version string.
|
|
3
|
-
* Format: base64url-encoded timestamp with optional sequence suffix for same-millisecond calls.
|
|
4
|
-
* Versions sort lexicographically in chronological order.
|
|
5
|
-
*/
|
|
6
|
-
export declare function generate_version(): string;
|
|
7
|
-
//# sourceMappingURL=version.d.ts.map
|
package/dist/version.d.ts.map
DELETED
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"version.d.ts","sourceRoot":"","sources":["../version.ts"],"names":[],"mappings":"AAGA;;;;GAIG;AACH,wBAAgB,gBAAgB,IAAI,MAAM,CA0BzC"}
|
package/dist/version.js
DELETED
|
@@ -1,31 +0,0 @@
|
|
|
1
|
-
let last_timestamp = 0;
|
|
2
|
-
let sequence = 0;
|
|
3
|
-
/**
|
|
4
|
-
* Generates a unique, sortable version string.
|
|
5
|
-
* Format: base64url-encoded timestamp with optional sequence suffix for same-millisecond calls.
|
|
6
|
-
* Versions sort lexicographically in chronological order.
|
|
7
|
-
*/
|
|
8
|
-
export function generate_version() {
|
|
9
|
-
const now = Date.now();
|
|
10
|
-
if (now === last_timestamp) {
|
|
11
|
-
sequence++;
|
|
12
|
-
}
|
|
13
|
-
else {
|
|
14
|
-
last_timestamp = now;
|
|
15
|
-
sequence = 0;
|
|
16
|
-
}
|
|
17
|
-
// base64url encode the timestamp (no padding, url-safe)
|
|
18
|
-
const timestamp_bytes = new Uint8Array(8);
|
|
19
|
-
const view = new DataView(timestamp_bytes.buffer);
|
|
20
|
-
view.setBigUint64(0, BigInt(now), false); // big-endian for lexicographic sorting
|
|
21
|
-
// trim leading zeros for compactness
|
|
22
|
-
let start = 0;
|
|
23
|
-
while (start < 7 && timestamp_bytes[start] === 0)
|
|
24
|
-
start++;
|
|
25
|
-
const trimmed = timestamp_bytes.slice(start);
|
|
26
|
-
const base64 = btoa(String.fromCharCode(...trimmed))
|
|
27
|
-
.replace(/\+/g, '-')
|
|
28
|
-
.replace(/\//g, '_')
|
|
29
|
-
.replace(/=/g, '');
|
|
30
|
-
return sequence > 0 ? `${base64}.${sequence}` : base64;
|
|
31
|
-
}
|