@backloghq/opslog 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +169 -0
- package/dist/archive.d.ts +3 -0
- package/dist/archive.js +34 -0
- package/dist/index.d.ts +2 -0
- package/dist/index.js +1 -0
- package/dist/manifest.d.ts +4 -0
- package/dist/manifest.js +34 -0
- package/dist/snapshot.d.ts +5 -0
- package/dist/snapshot.js +23 -0
- package/dist/store.d.ts +38 -0
- package/dist/store.js +295 -0
- package/dist/types.d.ts +52 -0
- package/dist/types.js +1 -0
- package/dist/wal.d.ts +5 -0
- package/dist/wal.js +44 -0
- package/package.json +44 -0
package/LICENSE
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
MIT License
|
|
2
|
+
|
|
3
|
+
Copyright (c) 2026 backloghq
|
|
4
|
+
|
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
7
|
+
in the Software without restriction, including without limitation the rights
|
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
10
|
+
furnished to do so, subject to the following conditions:
|
|
11
|
+
|
|
12
|
+
The above copyright notice and this permission notice shall be included in all
|
|
13
|
+
copies or substantial portions of the Software.
|
|
14
|
+
|
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
21
|
+
SOFTWARE.
|
package/README.md
ADDED
|
@@ -0,0 +1,169 @@
|
|
|
1
|
+
# opslog
|
|
2
|
+
|
|
3
|
+
Embedded event-sourced document store for Node.js. Zero native dependencies.
|
|
4
|
+
|
|
5
|
+
Every mutation is recorded as an operation in an append-only log. Current state is derived by replaying operations from the latest snapshot. You get crash safety, undo, audit trails, and sync-readiness without a database server.
|
|
6
|
+
|
|
7
|
+
## Install
|
|
8
|
+
|
|
9
|
+
```bash
|
|
10
|
+
npm install opslog
|
|
11
|
+
```
|
|
12
|
+
|
|
13
|
+
## Usage
|
|
14
|
+
|
|
15
|
+
```typescript
|
|
16
|
+
import { Store } from "opslog";
|
|
17
|
+
|
|
18
|
+
const store = new Store<{ name: string; status: string }>();
|
|
19
|
+
await store.open("./data");
|
|
20
|
+
|
|
21
|
+
// Create
|
|
22
|
+
await store.set("task-1", { name: "Build API", status: "active" });
|
|
23
|
+
|
|
24
|
+
// Read
|
|
25
|
+
const task = store.get("task-1");
|
|
26
|
+
|
|
27
|
+
// Update
|
|
28
|
+
await store.set("task-1", { ...task, status: "done" });
|
|
29
|
+
|
|
30
|
+
// Delete
|
|
31
|
+
await store.delete("task-1");
|
|
32
|
+
|
|
33
|
+
// Undo the delete
|
|
34
|
+
await store.undo();
|
|
35
|
+
|
|
36
|
+
// Query
|
|
37
|
+
const active = store.filter((r) => r.status === "active");
|
|
38
|
+
const count = store.count((r) => r.status === "done");
|
|
39
|
+
|
|
40
|
+
// Batch (single disk write for multiple operations)
|
|
41
|
+
await store.batch(() => {
|
|
42
|
+
store.set("a", { name: "A", status: "active" });
|
|
43
|
+
store.set("b", { name: "B", status: "active" });
|
|
44
|
+
store.set("c", { name: "C", status: "active" });
|
|
45
|
+
});
|
|
46
|
+
|
|
47
|
+
// Close (checkpoints automatically)
|
|
48
|
+
await store.close();
|
|
49
|
+
```
|
|
50
|
+
|
|
51
|
+
State survives restarts — reopen the same directory and everything is there.
|
|
52
|
+
|
|
53
|
+
## How It Works
|
|
54
|
+
|
|
55
|
+
```
|
|
56
|
+
data/
|
|
57
|
+
manifest.json # Points to current snapshot + ops file
|
|
58
|
+
snapshots/
|
|
59
|
+
snap-<timestamp>.json # Immutable full-state capture
|
|
60
|
+
ops/
|
|
61
|
+
ops-<timestamp>.jsonl # Append-only operation log
|
|
62
|
+
archive/
|
|
63
|
+
archive-<period>.json # Old records, lazy-loaded
|
|
64
|
+
```
|
|
65
|
+
|
|
66
|
+
**Writes** append an operation (one JSON line) to the ops file. **Reads** come from an in-memory map built from the latest snapshot + ops replay. **Checkpoints** materialize current state as a new immutable snapshot.
|
|
67
|
+
|
|
68
|
+
Every operation records the previous value, so **undo** pops the last operation and restores the old state. The operations log doubles as an **audit trail** and a natural unit for **sync** between nodes.
|
|
69
|
+
|
|
70
|
+
## API
|
|
71
|
+
|
|
72
|
+
### Lifecycle
|
|
73
|
+
|
|
74
|
+
```typescript
|
|
75
|
+
await store.open(dir, options?) // Load state from directory
|
|
76
|
+
await store.close() // Checkpoint and close
|
|
77
|
+
```
|
|
78
|
+
|
|
79
|
+
### CRUD
|
|
80
|
+
|
|
81
|
+
```typescript
|
|
82
|
+
store.get(id) // Get record by ID
|
|
83
|
+
await store.set(id, value) // Create or update
|
|
84
|
+
await store.delete(id) // Remove (throws if not found)
|
|
85
|
+
store.has(id) // Check existence
|
|
86
|
+
```
|
|
87
|
+
|
|
88
|
+
### Query
|
|
89
|
+
|
|
90
|
+
```typescript
|
|
91
|
+
store.all() // All records
|
|
92
|
+
store.entries() // All [id, record] pairs
|
|
93
|
+
store.filter(predicate) // Records matching predicate
|
|
94
|
+
store.count(predicate?) // Count (all or matching)
|
|
95
|
+
```
|
|
96
|
+
|
|
97
|
+
### Batch
|
|
98
|
+
|
|
99
|
+
```typescript
|
|
100
|
+
await store.batch(() => { // Multiple ops, single disk write
|
|
101
|
+
store.set("a", valueA); // Rolls back all on error
|
|
102
|
+
store.set("b", valueB);
|
|
103
|
+
});
|
|
104
|
+
```
|
|
105
|
+
|
|
106
|
+
### History
|
|
107
|
+
|
|
108
|
+
```typescript
|
|
109
|
+
await store.undo() // Undo last operation
|
|
110
|
+
store.getHistory(id) // All operations for a record
|
|
111
|
+
store.getOps(since?) // Operations since timestamp
|
|
112
|
+
```
|
|
113
|
+
|
|
114
|
+
### Maintenance
|
|
115
|
+
|
|
116
|
+
```typescript
|
|
117
|
+
await store.compact() // Create new snapshot, clear ops
|
|
118
|
+
await store.archive(predicate) // Move matching records to archive
|
|
119
|
+
await store.loadArchive(segment) // Lazy-load archived records
|
|
120
|
+
store.listArchiveSegments() // List available archive files
|
|
121
|
+
store.stats() // { activeRecords, opsCount, archiveSegments }
|
|
122
|
+
```
|
|
123
|
+
|
|
124
|
+
## Options
|
|
125
|
+
|
|
126
|
+
```typescript
|
|
127
|
+
await store.open(dir, {
|
|
128
|
+
checkpointThreshold: 100, // Auto-checkpoint after N ops (default: 100)
|
|
129
|
+
checkpointOnClose: true, // Checkpoint when close() is called (default: true)
|
|
130
|
+
version: 1, // Schema version
|
|
131
|
+
migrate: (record, fromVersion) => record, // Migration function
|
|
132
|
+
});
|
|
133
|
+
```
|
|
134
|
+
|
|
135
|
+
## Crash Safety
|
|
136
|
+
|
|
137
|
+
- **Ops file**: append-only writes. A crash mid-append loses at most the last operation. Malformed lines are skipped on recovery.
|
|
138
|
+
- **Snapshots**: immutable. Written to a temp file, then atomically renamed.
|
|
139
|
+
- **Manifest**: atomically replaced via temp-file-rename. Always points to a valid snapshot.
|
|
140
|
+
|
|
141
|
+
No data corruption on crash. At most one in-flight operation is lost.
|
|
142
|
+
|
|
143
|
+
## Schema Migration
|
|
144
|
+
|
|
145
|
+
```typescript
|
|
146
|
+
const store = new Store();
|
|
147
|
+
await store.open(dir, {
|
|
148
|
+
version: 2,
|
|
149
|
+
migrate: (record, fromVersion) => {
|
|
150
|
+
if (fromVersion < 2) return { ...record, newField: "default" };
|
|
151
|
+
return record;
|
|
152
|
+
},
|
|
153
|
+
});
|
|
154
|
+
```
|
|
155
|
+
|
|
156
|
+
Records are migrated in memory on open. Next checkpoint persists the migrated state.
|
|
157
|
+
|
|
158
|
+
## Development
|
|
159
|
+
|
|
160
|
+
```bash
|
|
161
|
+
npm run build # Compile TypeScript
|
|
162
|
+
npm run lint # ESLint
|
|
163
|
+
npm test # Run tests
|
|
164
|
+
npm run test:coverage # Tests with coverage
|
|
165
|
+
```
|
|
166
|
+
|
|
167
|
+
## License
|
|
168
|
+
|
|
169
|
+
MIT
|
|
@@ -0,0 +1,3 @@
|
|
|
1
|
+
export declare function writeArchiveSegment<T>(dir: string, period: string, records: Map<string, T>): Promise<string>;
|
|
2
|
+
export declare function loadArchiveSegment<T>(dir: string, relativePath: string): Promise<Map<string, T>>;
|
|
3
|
+
export declare function listArchiveSegments(dir: string): Promise<string[]>;
|
package/dist/archive.js
ADDED
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
import { readFile, readdir, rename, writeFile } from "node:fs/promises";
|
|
2
|
+
import { join } from "node:path";
|
|
3
|
+
export async function writeArchiveSegment(dir, period, records) {
|
|
4
|
+
const filename = `archive-${period}.json`;
|
|
5
|
+
const path = join(dir, "archive", filename);
|
|
6
|
+
const segment = {
|
|
7
|
+
version: 1,
|
|
8
|
+
period,
|
|
9
|
+
timestamp: new Date().toISOString(),
|
|
10
|
+
records: Object.fromEntries(records),
|
|
11
|
+
};
|
|
12
|
+
const tmpPath = path + ".tmp";
|
|
13
|
+
await writeFile(tmpPath, JSON.stringify(segment, null, 2), "utf-8");
|
|
14
|
+
await rename(tmpPath, path);
|
|
15
|
+
return `archive/${filename}`;
|
|
16
|
+
}
|
|
17
|
+
export async function loadArchiveSegment(dir, relativePath) {
|
|
18
|
+
const path = join(dir, relativePath);
|
|
19
|
+
const content = await readFile(path, "utf-8");
|
|
20
|
+
const segment = JSON.parse(content);
|
|
21
|
+
return new Map(Object.entries(segment.records));
|
|
22
|
+
}
|
|
23
|
+
export async function listArchiveSegments(dir) {
|
|
24
|
+
const archiveDir = join(dir, "archive");
|
|
25
|
+
try {
|
|
26
|
+
const files = await readdir(archiveDir);
|
|
27
|
+
return files
|
|
28
|
+
.filter((f) => f.startsWith("archive-") && f.endsWith(".json"))
|
|
29
|
+
.map((f) => `archive/${f}`);
|
|
30
|
+
}
|
|
31
|
+
catch {
|
|
32
|
+
return [];
|
|
33
|
+
}
|
|
34
|
+
}
|
package/dist/index.d.ts
ADDED
package/dist/index.js
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export { Store } from "./store.js";
|
|
@@ -0,0 +1,4 @@
|
|
|
1
|
+
import type { Manifest } from "./types.js";
|
|
2
|
+
export declare function readManifest(dir: string): Promise<Manifest | null>;
|
|
3
|
+
export declare function writeManifest(dir: string, manifest: Manifest): Promise<void>;
|
|
4
|
+
export declare function createDefaultManifest(snapshotPath: string, opsPath: string): Manifest;
|
package/dist/manifest.js
ADDED
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
import { readFile, rename, writeFile } from "node:fs/promises";
|
|
2
|
+
import { join } from "node:path";
|
|
3
|
+
const MANIFEST_FILE = "manifest.json";
|
|
4
|
+
export async function readManifest(dir) {
|
|
5
|
+
try {
|
|
6
|
+
const content = await readFile(join(dir, MANIFEST_FILE), "utf-8");
|
|
7
|
+
return JSON.parse(content);
|
|
8
|
+
}
|
|
9
|
+
catch {
|
|
10
|
+
return null;
|
|
11
|
+
}
|
|
12
|
+
}
|
|
13
|
+
export async function writeManifest(dir, manifest) {
|
|
14
|
+
const path = join(dir, MANIFEST_FILE);
|
|
15
|
+
const tmpPath = path + ".tmp";
|
|
16
|
+
await writeFile(tmpPath, JSON.stringify(manifest, null, 2), "utf-8");
|
|
17
|
+
await rename(tmpPath, path);
|
|
18
|
+
}
|
|
19
|
+
export function createDefaultManifest(snapshotPath, opsPath) {
|
|
20
|
+
const now = new Date().toISOString();
|
|
21
|
+
return {
|
|
22
|
+
version: 1,
|
|
23
|
+
currentSnapshot: snapshotPath,
|
|
24
|
+
activeOps: opsPath,
|
|
25
|
+
archiveSegments: [],
|
|
26
|
+
stats: {
|
|
27
|
+
activeRecords: 0,
|
|
28
|
+
archivedRecords: 0,
|
|
29
|
+
opsCount: 0,
|
|
30
|
+
created: now,
|
|
31
|
+
lastCheckpoint: now,
|
|
32
|
+
},
|
|
33
|
+
};
|
|
34
|
+
}
|
package/dist/snapshot.js
ADDED
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
import { readFile, rename, writeFile } from "node:fs/promises";
|
|
2
|
+
import { join } from "node:path";
|
|
3
|
+
export async function writeSnapshot(dir, records, version) {
|
|
4
|
+
const timestamp = new Date().toISOString();
|
|
5
|
+
const filename = `snap-${Date.now()}.json`;
|
|
6
|
+
const path = join(dir, "snapshots", filename);
|
|
7
|
+
const snapshot = {
|
|
8
|
+
version,
|
|
9
|
+
timestamp,
|
|
10
|
+
records: Object.fromEntries(records),
|
|
11
|
+
};
|
|
12
|
+
const tmpPath = path + ".tmp";
|
|
13
|
+
await writeFile(tmpPath, JSON.stringify(snapshot, null, 2), "utf-8");
|
|
14
|
+
await rename(tmpPath, path);
|
|
15
|
+
return `snapshots/${filename}`;
|
|
16
|
+
}
|
|
17
|
+
export async function loadSnapshot(dir, relativePath) {
|
|
18
|
+
const path = join(dir, relativePath);
|
|
19
|
+
const content = await readFile(path, "utf-8");
|
|
20
|
+
const snapshot = JSON.parse(content);
|
|
21
|
+
const records = new Map(Object.entries(snapshot.records));
|
|
22
|
+
return { records, version: snapshot.version };
|
|
23
|
+
}
|
package/dist/store.d.ts
ADDED
|
@@ -0,0 +1,38 @@
|
|
|
1
|
+
import type { Operation, StoreOptions, StoreStats } from "./types.js";
|
|
2
|
+
export declare class Store<T = Record<string, unknown>> {
|
|
3
|
+
private dir;
|
|
4
|
+
private records;
|
|
5
|
+
private ops;
|
|
6
|
+
private archiveSegments;
|
|
7
|
+
private opened;
|
|
8
|
+
private version;
|
|
9
|
+
private activeOpsPath;
|
|
10
|
+
private created;
|
|
11
|
+
private options;
|
|
12
|
+
private batching;
|
|
13
|
+
private batchOps;
|
|
14
|
+
open(dir: string, options?: StoreOptions): Promise<void>;
|
|
15
|
+
close(): Promise<void>;
|
|
16
|
+
get(id: string): T | undefined;
|
|
17
|
+
set(id: string, value: T): Promise<void> | void;
|
|
18
|
+
delete(id: string): Promise<void> | void;
|
|
19
|
+
has(id: string): boolean;
|
|
20
|
+
all(): T[];
|
|
21
|
+
entries(): [string, T][];
|
|
22
|
+
filter(predicate: (value: T, id: string) => boolean): T[];
|
|
23
|
+
count(predicate?: (value: T, id: string) => boolean): number;
|
|
24
|
+
batch(fn: () => void): Promise<void>;
|
|
25
|
+
undo(): Promise<boolean>;
|
|
26
|
+
getHistory(id: string): Operation<T>[];
|
|
27
|
+
getOps(since?: string): Operation<T>[];
|
|
28
|
+
compact(): Promise<void>;
|
|
29
|
+
archive(predicate: (value: T, id: string) => boolean, segment?: string): Promise<number>;
|
|
30
|
+
loadArchive(segment: string): Promise<Map<string, T>>;
|
|
31
|
+
listArchiveSegments(): string[];
|
|
32
|
+
stats(): StoreStats;
|
|
33
|
+
private ensureOpen;
|
|
34
|
+
private applyOp;
|
|
35
|
+
private reverseOp;
|
|
36
|
+
private persistOp;
|
|
37
|
+
private defaultPeriod;
|
|
38
|
+
}
|
package/dist/store.js
ADDED
|
@@ -0,0 +1,295 @@
|
|
|
1
|
+
import { mkdir, writeFile } from "node:fs/promises";
|
|
2
|
+
import { join } from "node:path";
|
|
3
|
+
import { appendOp, appendOps, readOps, truncateLastOp } from "./wal.js";
|
|
4
|
+
import { loadSnapshot, writeSnapshot } from "./snapshot.js";
|
|
5
|
+
import { createDefaultManifest, readManifest, writeManifest, } from "./manifest.js";
|
|
6
|
+
import { loadArchiveSegment, writeArchiveSegment, } from "./archive.js";
|
|
7
|
+
export class Store {
|
|
8
|
+
dir = "";
|
|
9
|
+
records = new Map();
|
|
10
|
+
ops = [];
|
|
11
|
+
archiveSegments = [];
|
|
12
|
+
opened = false;
|
|
13
|
+
version = 1;
|
|
14
|
+
activeOpsPath = "";
|
|
15
|
+
created = "";
|
|
16
|
+
options = {
|
|
17
|
+
checkpointThreshold: 100,
|
|
18
|
+
checkpointOnClose: true,
|
|
19
|
+
version: 1,
|
|
20
|
+
migrate: (r) => r,
|
|
21
|
+
};
|
|
22
|
+
batching = false;
|
|
23
|
+
batchOps = [];
|
|
24
|
+
async open(dir, options) {
|
|
25
|
+
this.dir = dir;
|
|
26
|
+
if (options) {
|
|
27
|
+
this.options = { ...this.options, ...options };
|
|
28
|
+
}
|
|
29
|
+
await mkdir(join(dir, "snapshots"), { recursive: true });
|
|
30
|
+
await mkdir(join(dir, "ops"), { recursive: true });
|
|
31
|
+
await mkdir(join(dir, "archive"), { recursive: true });
|
|
32
|
+
const manifest = await readManifest(dir);
|
|
33
|
+
if (!manifest) {
|
|
34
|
+
// Fresh store — create empty snapshot and manifest
|
|
35
|
+
const snapshotPath = await writeSnapshot(dir, new Map(), this.options.version);
|
|
36
|
+
const opsFilename = `ops-${Date.now()}.jsonl`;
|
|
37
|
+
const opsPath = `ops/${opsFilename}`;
|
|
38
|
+
await writeFile(join(dir, opsPath), "", "utf-8");
|
|
39
|
+
const newManifest = createDefaultManifest(snapshotPath, opsPath);
|
|
40
|
+
await writeManifest(dir, newManifest);
|
|
41
|
+
this.version = this.options.version;
|
|
42
|
+
this.activeOpsPath = opsPath;
|
|
43
|
+
this.created = newManifest.stats.created;
|
|
44
|
+
this.archiveSegments = [];
|
|
45
|
+
}
|
|
46
|
+
else {
|
|
47
|
+
// Load existing state
|
|
48
|
+
const { records, version: storedVersion } = await loadSnapshot(dir, manifest.currentSnapshot);
|
|
49
|
+
this.records = records;
|
|
50
|
+
this.version = storedVersion;
|
|
51
|
+
this.activeOpsPath = manifest.activeOps;
|
|
52
|
+
this.created = manifest.stats.created;
|
|
53
|
+
this.archiveSegments = manifest.archiveSegments;
|
|
54
|
+
// Migrate if needed
|
|
55
|
+
if (storedVersion < this.options.version) {
|
|
56
|
+
for (const [id, record] of this.records) {
|
|
57
|
+
this.records.set(id, this.options.migrate(record, storedVersion));
|
|
58
|
+
}
|
|
59
|
+
this.version = this.options.version;
|
|
60
|
+
}
|
|
61
|
+
// Replay ops
|
|
62
|
+
const ops = await readOps(join(dir, manifest.activeOps));
|
|
63
|
+
for (const op of ops) {
|
|
64
|
+
this.applyOp(op);
|
|
65
|
+
}
|
|
66
|
+
this.ops = ops;
|
|
67
|
+
}
|
|
68
|
+
this.opened = true;
|
|
69
|
+
}
|
|
70
|
+
async close() {
|
|
71
|
+
this.ensureOpen();
|
|
72
|
+
if (this.options.checkpointOnClose && this.ops.length > 0) {
|
|
73
|
+
await this.compact();
|
|
74
|
+
}
|
|
75
|
+
this.opened = false;
|
|
76
|
+
}
|
|
77
|
+
get(id) {
|
|
78
|
+
this.ensureOpen();
|
|
79
|
+
return this.records.get(id);
|
|
80
|
+
}
|
|
81
|
+
set(id, value) {
|
|
82
|
+
this.ensureOpen();
|
|
83
|
+
const prev = this.records.get(id) ?? null;
|
|
84
|
+
const op = {
|
|
85
|
+
ts: new Date().toISOString(),
|
|
86
|
+
op: "set",
|
|
87
|
+
id,
|
|
88
|
+
data: value,
|
|
89
|
+
prev,
|
|
90
|
+
};
|
|
91
|
+
this.records.set(id, value);
|
|
92
|
+
if (this.batching) {
|
|
93
|
+
this.batchOps.push(op);
|
|
94
|
+
return;
|
|
95
|
+
}
|
|
96
|
+
return this.persistOp(op);
|
|
97
|
+
}
|
|
98
|
+
delete(id) {
|
|
99
|
+
this.ensureOpen();
|
|
100
|
+
const prev = this.records.get(id);
|
|
101
|
+
if (prev === undefined) {
|
|
102
|
+
throw new Error(`Record '${id}' not found`);
|
|
103
|
+
}
|
|
104
|
+
const op = {
|
|
105
|
+
ts: new Date().toISOString(),
|
|
106
|
+
op: "delete",
|
|
107
|
+
id,
|
|
108
|
+
prev,
|
|
109
|
+
};
|
|
110
|
+
this.records.delete(id);
|
|
111
|
+
if (this.batching) {
|
|
112
|
+
this.batchOps.push(op);
|
|
113
|
+
return;
|
|
114
|
+
}
|
|
115
|
+
return this.persistOp(op);
|
|
116
|
+
}
|
|
117
|
+
has(id) {
|
|
118
|
+
this.ensureOpen();
|
|
119
|
+
return this.records.has(id);
|
|
120
|
+
}
|
|
121
|
+
all() {
|
|
122
|
+
this.ensureOpen();
|
|
123
|
+
return Array.from(this.records.values());
|
|
124
|
+
}
|
|
125
|
+
entries() {
|
|
126
|
+
this.ensureOpen();
|
|
127
|
+
return Array.from(this.records.entries());
|
|
128
|
+
}
|
|
129
|
+
filter(predicate) {
|
|
130
|
+
this.ensureOpen();
|
|
131
|
+
const results = [];
|
|
132
|
+
for (const [id, value] of this.records) {
|
|
133
|
+
if (predicate(value, id))
|
|
134
|
+
results.push(value);
|
|
135
|
+
}
|
|
136
|
+
return results;
|
|
137
|
+
}
|
|
138
|
+
count(predicate) {
|
|
139
|
+
this.ensureOpen();
|
|
140
|
+
if (!predicate)
|
|
141
|
+
return this.records.size;
|
|
142
|
+
let n = 0;
|
|
143
|
+
for (const [id, value] of this.records) {
|
|
144
|
+
if (predicate(value, id))
|
|
145
|
+
n++;
|
|
146
|
+
}
|
|
147
|
+
return n;
|
|
148
|
+
}
|
|
149
|
+
async batch(fn) {
|
|
150
|
+
this.ensureOpen();
|
|
151
|
+
this.batching = true;
|
|
152
|
+
this.batchOps = [];
|
|
153
|
+
try {
|
|
154
|
+
fn();
|
|
155
|
+
if (this.batchOps.length > 0) {
|
|
156
|
+
await appendOps(join(this.dir, this.activeOpsPath), this.batchOps);
|
|
157
|
+
this.ops.push(...this.batchOps);
|
|
158
|
+
if (this.ops.length >= this.options.checkpointThreshold) {
|
|
159
|
+
await this.compact();
|
|
160
|
+
}
|
|
161
|
+
}
|
|
162
|
+
}
|
|
163
|
+
catch (err) {
|
|
164
|
+
// Rollback in-memory changes on failure
|
|
165
|
+
for (const op of this.batchOps.reverse()) {
|
|
166
|
+
this.reverseOp(op);
|
|
167
|
+
}
|
|
168
|
+
throw err;
|
|
169
|
+
}
|
|
170
|
+
finally {
|
|
171
|
+
this.batching = false;
|
|
172
|
+
this.batchOps = [];
|
|
173
|
+
}
|
|
174
|
+
}
|
|
175
|
+
async undo() {
|
|
176
|
+
this.ensureOpen();
|
|
177
|
+
if (this.ops.length === 0)
|
|
178
|
+
return false;
|
|
179
|
+
const lastOp = this.ops[this.ops.length - 1];
|
|
180
|
+
this.reverseOp(lastOp);
|
|
181
|
+
this.ops.pop();
|
|
182
|
+
await truncateLastOp(join(this.dir, this.activeOpsPath));
|
|
183
|
+
return true;
|
|
184
|
+
}
|
|
185
|
+
getHistory(id) {
|
|
186
|
+
this.ensureOpen();
|
|
187
|
+
return this.ops.filter((op) => op.id === id);
|
|
188
|
+
}
|
|
189
|
+
getOps(since) {
|
|
190
|
+
this.ensureOpen();
|
|
191
|
+
if (!since)
|
|
192
|
+
return [...this.ops];
|
|
193
|
+
return this.ops.filter((op) => op.ts > since);
|
|
194
|
+
}
|
|
195
|
+
async compact() {
|
|
196
|
+
this.ensureOpen();
|
|
197
|
+
const snapshotPath = await writeSnapshot(this.dir, this.records, this.version);
|
|
198
|
+
const opsFilename = `ops-${Date.now()}.jsonl`;
|
|
199
|
+
const opsPath = `ops/${opsFilename}`;
|
|
200
|
+
await writeFile(join(this.dir, opsPath), "", "utf-8");
|
|
201
|
+
const updatedManifest = {
|
|
202
|
+
version: this.version,
|
|
203
|
+
currentSnapshot: snapshotPath,
|
|
204
|
+
activeOps: opsPath,
|
|
205
|
+
archiveSegments: this.archiveSegments,
|
|
206
|
+
stats: {
|
|
207
|
+
activeRecords: this.records.size,
|
|
208
|
+
archivedRecords: 0,
|
|
209
|
+
opsCount: 0,
|
|
210
|
+
created: this.created,
|
|
211
|
+
lastCheckpoint: new Date().toISOString(),
|
|
212
|
+
},
|
|
213
|
+
};
|
|
214
|
+
await writeManifest(this.dir, updatedManifest);
|
|
215
|
+
this.activeOpsPath = opsPath;
|
|
216
|
+
this.ops = [];
|
|
217
|
+
}
|
|
218
|
+
async archive(predicate, segment) {
|
|
219
|
+
this.ensureOpen();
|
|
220
|
+
const toArchive = new Map();
|
|
221
|
+
for (const [id, value] of this.records) {
|
|
222
|
+
if (predicate(value, id))
|
|
223
|
+
toArchive.set(id, value);
|
|
224
|
+
}
|
|
225
|
+
if (toArchive.size === 0)
|
|
226
|
+
return 0;
|
|
227
|
+
const period = segment ?? this.defaultPeriod();
|
|
228
|
+
const segmentPath = await writeArchiveSegment(this.dir, period, toArchive);
|
|
229
|
+
if (!this.archiveSegments.includes(segmentPath)) {
|
|
230
|
+
this.archiveSegments.push(segmentPath);
|
|
231
|
+
}
|
|
232
|
+
for (const id of toArchive.keys()) {
|
|
233
|
+
this.records.delete(id);
|
|
234
|
+
}
|
|
235
|
+
await this.compact();
|
|
236
|
+
return toArchive.size;
|
|
237
|
+
}
|
|
238
|
+
async loadArchive(segment) {
|
|
239
|
+
this.ensureOpen();
|
|
240
|
+
const segmentPath = this.archiveSegments.find((s) => s.includes(segment));
|
|
241
|
+
if (!segmentPath)
|
|
242
|
+
throw new Error(`Archive segment '${segment}' not found`);
|
|
243
|
+
return loadArchiveSegment(this.dir, segmentPath);
|
|
244
|
+
}
|
|
245
|
+
listArchiveSegments() {
|
|
246
|
+
this.ensureOpen();
|
|
247
|
+
return [...this.archiveSegments];
|
|
248
|
+
}
|
|
249
|
+
stats() {
|
|
250
|
+
this.ensureOpen();
|
|
251
|
+
return {
|
|
252
|
+
activeRecords: this.records.size,
|
|
253
|
+
opsCount: this.ops.length,
|
|
254
|
+
archiveSegments: this.archiveSegments.length,
|
|
255
|
+
};
|
|
256
|
+
}
|
|
257
|
+
ensureOpen() {
|
|
258
|
+
if (!this.opened)
|
|
259
|
+
throw new Error("Store is not open. Call open() first.");
|
|
260
|
+
}
|
|
261
|
+
applyOp(op) {
|
|
262
|
+
if (op.op === "set" && op.data !== undefined) {
|
|
263
|
+
this.records.set(op.id, op.data);
|
|
264
|
+
}
|
|
265
|
+
else if (op.op === "delete") {
|
|
266
|
+
this.records.delete(op.id);
|
|
267
|
+
}
|
|
268
|
+
}
|
|
269
|
+
reverseOp(op) {
|
|
270
|
+
if (op.prev === null) {
|
|
271
|
+
// Was a create — reverse by deleting
|
|
272
|
+
this.records.delete(op.id);
|
|
273
|
+
}
|
|
274
|
+
else if (op.op === "delete") {
|
|
275
|
+
// Was a delete — reverse by restoring
|
|
276
|
+
this.records.set(op.id, op.prev);
|
|
277
|
+
}
|
|
278
|
+
else {
|
|
279
|
+
// Was an update — reverse by restoring prev
|
|
280
|
+
this.records.set(op.id, op.prev);
|
|
281
|
+
}
|
|
282
|
+
}
|
|
283
|
+
async persistOp(op) {
|
|
284
|
+
await appendOp(join(this.dir, this.activeOpsPath), op);
|
|
285
|
+
this.ops.push(op);
|
|
286
|
+
if (this.ops.length >= this.options.checkpointThreshold) {
|
|
287
|
+
await this.compact();
|
|
288
|
+
}
|
|
289
|
+
}
|
|
290
|
+
defaultPeriod() {
|
|
291
|
+
const now = new Date();
|
|
292
|
+
const q = Math.ceil((now.getMonth() + 1) / 3);
|
|
293
|
+
return `${now.getFullYear()}-Q${q}`;
|
|
294
|
+
}
|
|
295
|
+
}
|
package/dist/types.d.ts
ADDED
|
@@ -0,0 +1,52 @@
|
|
|
1
|
+
export interface Operation<T = Record<string, unknown>> {
|
|
2
|
+
/** ISO 8601 timestamp */
|
|
3
|
+
ts: string;
|
|
4
|
+
/** Operation type */
|
|
5
|
+
op: "set" | "delete";
|
|
6
|
+
/** Record ID */
|
|
7
|
+
id: string;
|
|
8
|
+
/** New value (present for set operations) */
|
|
9
|
+
data?: T;
|
|
10
|
+
/** Previous value (null for creates, full record for updates/deletes) */
|
|
11
|
+
prev: T | null;
|
|
12
|
+
}
|
|
13
|
+
export interface Snapshot<T = Record<string, unknown>> {
|
|
14
|
+
version: number;
|
|
15
|
+
timestamp: string;
|
|
16
|
+
records: Record<string, T>;
|
|
17
|
+
}
|
|
18
|
+
export interface Manifest {
|
|
19
|
+
version: number;
|
|
20
|
+
currentSnapshot: string;
|
|
21
|
+
activeOps: string;
|
|
22
|
+
archiveSegments: string[];
|
|
23
|
+
stats: ManifestStats;
|
|
24
|
+
}
|
|
25
|
+
export interface ManifestStats {
|
|
26
|
+
activeRecords: number;
|
|
27
|
+
archivedRecords: number;
|
|
28
|
+
opsCount: number;
|
|
29
|
+
created: string;
|
|
30
|
+
lastCheckpoint: string;
|
|
31
|
+
}
|
|
32
|
+
export interface ArchiveSegment<T = Record<string, unknown>> {
|
|
33
|
+
version: number;
|
|
34
|
+
period: string;
|
|
35
|
+
timestamp: string;
|
|
36
|
+
records: Record<string, T>;
|
|
37
|
+
}
|
|
38
|
+
export interface StoreOptions {
|
|
39
|
+
/** Auto-checkpoint after this many operations (default: 100) */
|
|
40
|
+
checkpointThreshold?: number;
|
|
41
|
+
/** Checkpoint on close (default: true) */
|
|
42
|
+
checkpointOnClose?: boolean;
|
|
43
|
+
/** Schema version for migration (default: 1) */
|
|
44
|
+
version?: number;
|
|
45
|
+
/** Migration function: called if stored version < current version */
|
|
46
|
+
migrate?: (record: unknown, fromVersion: number) => unknown;
|
|
47
|
+
}
|
|
48
|
+
export interface StoreStats {
|
|
49
|
+
activeRecords: number;
|
|
50
|
+
opsCount: number;
|
|
51
|
+
archiveSegments: number;
|
|
52
|
+
}
|
package/dist/types.js
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export {};
|
package/dist/wal.d.ts
ADDED
|
@@ -0,0 +1,5 @@
|
|
|
1
|
+
import type { Operation } from "./types.js";
|
|
2
|
+
export declare function appendOp<T>(path: string, op: Operation<T>): Promise<void>;
|
|
3
|
+
export declare function appendOps<T>(path: string, ops: Operation<T>[]): Promise<void>;
|
|
4
|
+
export declare function readOps<T>(path: string): Promise<Operation<T>[]>;
|
|
5
|
+
export declare function truncateLastOp(path: string): Promise<boolean>;
|
package/dist/wal.js
ADDED
|
@@ -0,0 +1,44 @@
|
|
|
1
|
+
import { appendFile, readFile, writeFile } from "node:fs/promises";
|
|
2
|
+
export async function appendOp(path, op) {
|
|
3
|
+
await appendFile(path, JSON.stringify(op) + "\n", "utf-8");
|
|
4
|
+
}
|
|
5
|
+
export async function appendOps(path, ops) {
|
|
6
|
+
const lines = ops.map((op) => JSON.stringify(op)).join("\n") + "\n";
|
|
7
|
+
await appendFile(path, lines, "utf-8");
|
|
8
|
+
}
|
|
9
|
+
export async function readOps(path) {
|
|
10
|
+
let content;
|
|
11
|
+
try {
|
|
12
|
+
content = await readFile(path, "utf-8");
|
|
13
|
+
}
|
|
14
|
+
catch {
|
|
15
|
+
return [];
|
|
16
|
+
}
|
|
17
|
+
const lines = content.trim().split("\n").filter(Boolean);
|
|
18
|
+
const ops = [];
|
|
19
|
+
for (const line of lines) {
|
|
20
|
+
try {
|
|
21
|
+
ops.push(JSON.parse(line));
|
|
22
|
+
}
|
|
23
|
+
catch {
|
|
24
|
+
// Skip malformed lines (crash recovery)
|
|
25
|
+
}
|
|
26
|
+
}
|
|
27
|
+
return ops;
|
|
28
|
+
}
|
|
29
|
+
export async function truncateLastOp(path) {
|
|
30
|
+
let content;
|
|
31
|
+
try {
|
|
32
|
+
content = await readFile(path, "utf-8");
|
|
33
|
+
}
|
|
34
|
+
catch {
|
|
35
|
+
return false;
|
|
36
|
+
}
|
|
37
|
+
const lines = content.trim().split("\n").filter(Boolean);
|
|
38
|
+
if (lines.length === 0)
|
|
39
|
+
return false;
|
|
40
|
+
lines.pop();
|
|
41
|
+
const newContent = lines.length > 0 ? lines.join("\n") + "\n" : "";
|
|
42
|
+
await writeFile(path, newContent, "utf-8");
|
|
43
|
+
return true;
|
|
44
|
+
}
|
package/package.json
ADDED
|
@@ -0,0 +1,44 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@backloghq/opslog",
|
|
3
|
+
"version": "0.1.0",
|
|
4
|
+
"description": "Embedded event-sourced document store. Append-only operation log with immutable snapshots, zero native dependencies.",
|
|
5
|
+
"main": "dist/index.js",
|
|
6
|
+
"types": "dist/index.d.ts",
|
|
7
|
+
"scripts": {
|
|
8
|
+
"build": "tsc",
|
|
9
|
+
"dev": "tsc --watch",
|
|
10
|
+
"lint": "eslint src/ tests/",
|
|
11
|
+
"test": "vitest run",
|
|
12
|
+
"test:watch": "vitest",
|
|
13
|
+
"test:coverage": "vitest run --coverage"
|
|
14
|
+
},
|
|
15
|
+
"keywords": [
|
|
16
|
+
"event-sourcing",
|
|
17
|
+
"append-only",
|
|
18
|
+
"wal",
|
|
19
|
+
"document-store",
|
|
20
|
+
"embedded",
|
|
21
|
+
"jsonl",
|
|
22
|
+
"snapshots"
|
|
23
|
+
],
|
|
24
|
+
"author": "mbocevski",
|
|
25
|
+
"license": "MIT",
|
|
26
|
+
"type": "module",
|
|
27
|
+
"engines": {
|
|
28
|
+
"node": ">=20"
|
|
29
|
+
},
|
|
30
|
+
"files": [
|
|
31
|
+
"dist/",
|
|
32
|
+
"README.md",
|
|
33
|
+
"LICENSE"
|
|
34
|
+
],
|
|
35
|
+
"devDependencies": {
|
|
36
|
+
"@eslint/js": "*",
|
|
37
|
+
"@types/node": "*",
|
|
38
|
+
"@vitest/coverage-v8": "^4.1.2",
|
|
39
|
+
"eslint": "*",
|
|
40
|
+
"typescript": "*",
|
|
41
|
+
"typescript-eslint": "*",
|
|
42
|
+
"vitest": "*"
|
|
43
|
+
}
|
|
44
|
+
}
|