@femtomc/mu-core 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +50 -0
- package/dist/browser/idb.d.ts +14 -0
- package/dist/browser/idb.d.ts.map +1 -0
- package/dist/browser/idb.js +158 -0
- package/dist/browser/index.d.ts +5 -0
- package/dist/browser/index.d.ts.map +1 -0
- package/dist/browser/index.js +2 -0
- package/dist/browser/local_storage.d.ts +12 -0
- package/dist/browser/local_storage.d.ts.map +1 -0
- package/dist/browser/local_storage.js +38 -0
- package/dist/dag.d.ts +13 -0
- package/dist/dag.d.ts.map +1 -0
- package/dist/dag.js +140 -0
- package/dist/events.d.ts +44 -0
- package/dist/events.d.ts.map +1 -0
- package/dist/events.js +72 -0
- package/dist/ids.d.ts +6 -0
- package/dist/ids.d.ts.map +1 -0
- package/dist/ids.js +34 -0
- package/dist/index.d.ts +6 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +5 -0
- package/dist/node/index.d.ts +14 -0
- package/dist/node/index.d.ts.map +1 -0
- package/dist/node/index.js +20 -0
- package/dist/node/jsonl.d.ts +13 -0
- package/dist/node/jsonl.d.ts.map +1 -0
- package/dist/node/jsonl.js +80 -0
- package/dist/node/run_context.d.ts +5 -0
- package/dist/node/run_context.d.ts.map +1 -0
- package/dist/node/run_context.js +8 -0
- package/dist/node/spec.d.ts +3 -0
- package/dist/node/spec.d.ts.map +1 -0
- package/dist/node/spec.js +27 -0
- package/dist/node/store.d.ts +13 -0
- package/dist/node/store.d.ts.map +1 -0
- package/dist/node/store.js +51 -0
- package/dist/persistence.d.ts +13 -0
- package/dist/persistence.d.ts.map +1 -0
- package/dist/persistence.js +15 -0
- package/dist/spec.d.ts +49 -0
- package/dist/spec.d.ts.map +1 -0
- package/dist/spec.js +38 -0
- package/package.json +25 -0
package/README.md
ADDED
|
@@ -0,0 +1,50 @@
|
|
|
1
|
+
# @femtomc/mu-core
|
|
2
|
+
|
|
3
|
+
Core types and utilities shared across mu packages: JSONL store abstractions, event logging, DAG helpers, and spec schemas.
|
|
4
|
+
|
|
5
|
+
## Install
|
|
6
|
+
|
|
7
|
+
After publishing:
|
|
8
|
+
|
|
9
|
+
```bash
|
|
10
|
+
npm install @femtomc/mu-core
|
|
11
|
+
# or: bun add @femtomc/mu-core
|
|
12
|
+
```
|
|
13
|
+
|
|
14
|
+
From this repo:
|
|
15
|
+
|
|
16
|
+
```bash
|
|
17
|
+
cd mu
|
|
18
|
+
bun install
|
|
19
|
+
bun run build
|
|
20
|
+
```
|
|
21
|
+
|
|
22
|
+
## Usage
|
|
23
|
+
|
|
24
|
+
```ts
|
|
25
|
+
import { EventLog, InMemoryJsonlStore, JsonlEventSink, newRunId, runContext } from "@femtomc/mu-core/node";
|
|
26
|
+
|
|
27
|
+
const jsonl = new InMemoryJsonlStore();
|
|
28
|
+
const events = new EventLog(new JsonlEventSink(jsonl));
|
|
29
|
+
|
|
30
|
+
await runContext({ runId: newRunId() }, async () => {
|
|
31
|
+
await events.emit("demo.event", { source: "readme", payload: { ok: true } });
|
|
32
|
+
});
|
|
33
|
+
|
|
34
|
+
console.log(await jsonl.read());
|
|
35
|
+
```
|
|
36
|
+
|
|
37
|
+
## Tests / Typecheck
|
|
38
|
+
|
|
39
|
+
From the `mu/` repo root:
|
|
40
|
+
|
|
41
|
+
```bash
|
|
42
|
+
bun test packages/core
|
|
43
|
+
bun run typecheck
|
|
44
|
+
```
|
|
45
|
+
|
|
46
|
+
## Runtime
|
|
47
|
+
|
|
48
|
+
- `@femtomc/mu-core` is runtime-agnostic (no Node builtins).
|
|
49
|
+
- `@femtomc/mu-core/node` is **Node-only** (`node:fs`, `node:async_hooks`).
|
|
50
|
+
- `@femtomc/mu-core/browser` is **browser-only** (IndexedDB/localStorage).
|
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
import type { JsonlStore } from "../persistence.js";
|
|
2
|
+
export type IndexedDbJsonlStoreOpts = {
|
|
3
|
+
dbName: string;
|
|
4
|
+
storeName: string;
|
|
5
|
+
ensureStores?: readonly string[];
|
|
6
|
+
};
|
|
7
|
+
export declare class IndexedDbJsonlStore<T = unknown> implements JsonlStore<T> {
|
|
8
|
+
#private;
|
|
9
|
+
constructor(opts: IndexedDbJsonlStoreOpts);
|
|
10
|
+
read(): Promise<T[]>;
|
|
11
|
+
write(rows: readonly T[]): Promise<void>;
|
|
12
|
+
append(row: T): Promise<void>;
|
|
13
|
+
}
|
|
14
|
+
//# sourceMappingURL=idb.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"idb.d.ts","sourceRoot":"","sources":["../../src/browser/idb.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,UAAU,EAAE,MAAM,mBAAmB,CAAC;AAmGpD,MAAM,MAAM,uBAAuB,GAAG;IAErC,MAAM,EAAE,MAAM,CAAC;IAGf,SAAS,EAAE,MAAM,CAAC;IAIlB,YAAY,CAAC,EAAE,SAAS,MAAM,EAAE,CAAC;CACjC,CAAC;AAEF,qBAAa,mBAAmB,CAAC,CAAC,GAAG,OAAO,CAAE,YAAW,UAAU,CAAC,CAAC,CAAC;;gBAMlD,IAAI,EAAE,uBAAuB;IAoBnC,IAAI,IAAI,OAAO,CAAC,CAAC,EAAE,CAAC;IAepB,KAAK,CAAC,IAAI,EAAE,SAAS,CAAC,EAAE,GAAG,OAAO,CAAC,IAAI,CAAC;IAiBxC,MAAM,CAAC,GAAG,EAAE,CAAC,GAAG,OAAO,CAAC,IAAI,CAAC;CAa1C"}
|
|
@@ -0,0 +1,158 @@
|
|
|
1
|
+
function invariant(cond, msg) {
|
|
2
|
+
if (!cond) {
|
|
3
|
+
throw new Error(msg);
|
|
4
|
+
}
|
|
5
|
+
}
|
|
6
|
+
function getIndexedDb() {
|
|
7
|
+
return globalThis.indexedDB ?? null;
|
|
8
|
+
}
|
|
9
|
+
function requestToPromise(req) {
|
|
10
|
+
return new Promise((resolve, reject) => {
|
|
11
|
+
req.onsuccess = () => resolve(req.result);
|
|
12
|
+
req.onerror = () => reject(req.error ?? new Error("indexeddb request failed"));
|
|
13
|
+
});
|
|
14
|
+
}
|
|
15
|
+
function transactionDone(tx) {
|
|
16
|
+
return new Promise((resolve, reject) => {
|
|
17
|
+
tx.oncomplete = () => resolve();
|
|
18
|
+
tx.onabort = () => reject(tx.error ?? new Error("indexeddb transaction aborted"));
|
|
19
|
+
tx.onerror = () => reject(tx.error ?? new Error("indexeddb transaction failed"));
|
|
20
|
+
});
|
|
21
|
+
}
|
|
22
|
+
function hasObjectStore(db, name) {
|
|
23
|
+
const names = db.objectStoreNames;
|
|
24
|
+
if (names && typeof names.contains === "function") {
|
|
25
|
+
return names.contains(name);
|
|
26
|
+
}
|
|
27
|
+
// Fallback for older DOMStringList implementations.
|
|
28
|
+
try {
|
|
29
|
+
for (const n of names) {
|
|
30
|
+
if (n === name) {
|
|
31
|
+
return true;
|
|
32
|
+
}
|
|
33
|
+
}
|
|
34
|
+
}
|
|
35
|
+
catch { }
|
|
36
|
+
return false;
|
|
37
|
+
}
|
|
38
|
+
async function ensureDb(opts) {
|
|
39
|
+
const idb = getIndexedDb();
|
|
40
|
+
invariant(idb, "IndexedDB is not available in this environment");
|
|
41
|
+
// First open creates a v1 DB when missing; ensure object stores exist on create.
|
|
42
|
+
{
|
|
43
|
+
const req = idb.open(opts.dbName);
|
|
44
|
+
req.onupgradeneeded = () => {
|
|
45
|
+
const db = req.result;
|
|
46
|
+
for (const store of opts.stores) {
|
|
47
|
+
if (!hasObjectStore(db, store)) {
|
|
48
|
+
db.createObjectStore(store, { autoIncrement: true });
|
|
49
|
+
}
|
|
50
|
+
}
|
|
51
|
+
};
|
|
52
|
+
const db = await requestToPromise(req);
|
|
53
|
+
const missing = opts.stores.filter((s) => !hasObjectStore(db, s));
|
|
54
|
+
if (missing.length === 0) {
|
|
55
|
+
return db;
|
|
56
|
+
}
|
|
57
|
+
// Upgrade by one and create missing stores.
|
|
58
|
+
const nextVersion = Math.max(1, Math.trunc(db.version ?? 1)) + 1;
|
|
59
|
+
db.close();
|
|
60
|
+
const upgradeReq = idb.open(opts.dbName, nextVersion);
|
|
61
|
+
upgradeReq.onupgradeneeded = () => {
|
|
62
|
+
const db2 = upgradeReq.result;
|
|
63
|
+
for (const store of opts.stores) {
|
|
64
|
+
if (!hasObjectStore(db2, store)) {
|
|
65
|
+
db2.createObjectStore(store, { autoIncrement: true });
|
|
66
|
+
}
|
|
67
|
+
}
|
|
68
|
+
};
|
|
69
|
+
return await requestToPromise(upgradeReq);
|
|
70
|
+
}
|
|
71
|
+
}
|
|
72
|
+
async function readAllFromStore(store) {
|
|
73
|
+
const out = [];
|
|
74
|
+
return await new Promise((resolve, reject) => {
|
|
75
|
+
const req = store.openCursor();
|
|
76
|
+
req.onerror = () => reject(req.error ?? new Error("indexeddb cursor failed"));
|
|
77
|
+
req.onsuccess = () => {
|
|
78
|
+
const cursor = req.result;
|
|
79
|
+
if (!cursor) {
|
|
80
|
+
resolve(out);
|
|
81
|
+
return;
|
|
82
|
+
}
|
|
83
|
+
out.push(cursor.value);
|
|
84
|
+
cursor.continue();
|
|
85
|
+
};
|
|
86
|
+
});
|
|
87
|
+
}
|
|
88
|
+
export class IndexedDbJsonlStore {
|
|
89
|
+
#dbName;
|
|
90
|
+
#storeName;
|
|
91
|
+
#stores;
|
|
92
|
+
#lock = Promise.resolve();
|
|
93
|
+
constructor(opts) {
|
|
94
|
+
this.#dbName = opts.dbName;
|
|
95
|
+
this.#storeName = opts.storeName;
|
|
96
|
+
this.#stores = opts.ensureStores ?? [opts.storeName];
|
|
97
|
+
}
|
|
98
|
+
async #withLock(fn) {
|
|
99
|
+
const start = this.#lock;
|
|
100
|
+
let release = () => { };
|
|
101
|
+
this.#lock = new Promise((resolve) => {
|
|
102
|
+
release = resolve;
|
|
103
|
+
});
|
|
104
|
+
await start;
|
|
105
|
+
try {
|
|
106
|
+
return await fn();
|
|
107
|
+
}
|
|
108
|
+
finally {
|
|
109
|
+
release();
|
|
110
|
+
}
|
|
111
|
+
}
|
|
112
|
+
async read() {
|
|
113
|
+
return await this.#withLock(async () => {
|
|
114
|
+
const db = await ensureDb({ dbName: this.#dbName, stores: this.#stores });
|
|
115
|
+
try {
|
|
116
|
+
const tx = db.transaction(this.#storeName, "readonly");
|
|
117
|
+
const store = tx.objectStore(this.#storeName);
|
|
118
|
+
const rows = await readAllFromStore(store);
|
|
119
|
+
await transactionDone(tx);
|
|
120
|
+
return rows;
|
|
121
|
+
}
|
|
122
|
+
finally {
|
|
123
|
+
db.close();
|
|
124
|
+
}
|
|
125
|
+
});
|
|
126
|
+
}
|
|
127
|
+
async write(rows) {
|
|
128
|
+
await this.#withLock(async () => {
|
|
129
|
+
const db = await ensureDb({ dbName: this.#dbName, stores: this.#stores });
|
|
130
|
+
try {
|
|
131
|
+
const tx = db.transaction(this.#storeName, "readwrite");
|
|
132
|
+
const store = tx.objectStore(this.#storeName);
|
|
133
|
+
store.clear();
|
|
134
|
+
for (const row of rows) {
|
|
135
|
+
store.add(row);
|
|
136
|
+
}
|
|
137
|
+
await transactionDone(tx);
|
|
138
|
+
}
|
|
139
|
+
finally {
|
|
140
|
+
db.close();
|
|
141
|
+
}
|
|
142
|
+
});
|
|
143
|
+
}
|
|
144
|
+
async append(row) {
|
|
145
|
+
await this.#withLock(async () => {
|
|
146
|
+
const db = await ensureDb({ dbName: this.#dbName, stores: this.#stores });
|
|
147
|
+
try {
|
|
148
|
+
const tx = db.transaction(this.#storeName, "readwrite");
|
|
149
|
+
const store = tx.objectStore(this.#storeName);
|
|
150
|
+
store.add(row);
|
|
151
|
+
await transactionDone(tx);
|
|
152
|
+
}
|
|
153
|
+
finally {
|
|
154
|
+
db.close();
|
|
155
|
+
}
|
|
156
|
+
});
|
|
157
|
+
}
|
|
158
|
+
}
|
|
@@ -0,0 +1,5 @@
|
|
|
1
|
+
export type { IndexedDbJsonlStoreOpts } from "./idb.js";
|
|
2
|
+
export { IndexedDbJsonlStore } from "./idb.js";
|
|
3
|
+
export type { LocalStorageJsonlStoreOpts } from "./local_storage.js";
|
|
4
|
+
export { LocalStorageJsonlStore } from "./local_storage.js";
|
|
5
|
+
//# sourceMappingURL=index.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/browser/index.ts"],"names":[],"mappings":"AAAA,YAAY,EAAE,uBAAuB,EAAE,MAAM,UAAU,CAAC;AACxD,OAAO,EAAE,mBAAmB,EAAE,MAAM,UAAU,CAAC;AAC/C,YAAY,EAAE,0BAA0B,EAAE,MAAM,oBAAoB,CAAC;AACrE,OAAO,EAAE,sBAAsB,EAAE,MAAM,oBAAoB,CAAC"}
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
import type { JsonlStore } from "../persistence.js";
|
|
2
|
+
export type LocalStorageJsonlStoreOpts = {
|
|
3
|
+
key: string;
|
|
4
|
+
};
|
|
5
|
+
export declare class LocalStorageJsonlStore<T = unknown> implements JsonlStore<T> {
|
|
6
|
+
#private;
|
|
7
|
+
constructor(opts: LocalStorageJsonlStoreOpts);
|
|
8
|
+
read(): Promise<T[]>;
|
|
9
|
+
write(rows: readonly T[]): Promise<void>;
|
|
10
|
+
append(row: T): Promise<void>;
|
|
11
|
+
}
|
|
12
|
+
//# sourceMappingURL=local_storage.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"local_storage.d.ts","sourceRoot":"","sources":["../../src/browser/local_storage.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,UAAU,EAAE,MAAM,mBAAmB,CAAC;AAYpD,MAAM,MAAM,0BAA0B,GAAG;IACxC,GAAG,EAAE,MAAM,CAAC;CACZ,CAAC;AAEF,qBAAa,sBAAsB,CAAC,CAAC,GAAG,OAAO,CAAE,YAAW,UAAU,CAAC,CAAC,CAAC;;gBAGrD,IAAI,EAAE,0BAA0B;IAUtC,IAAI,IAAI,OAAO,CAAC,CAAC,EAAE,CAAC;IAYpB,KAAK,CAAC,IAAI,EAAE,SAAS,CAAC,EAAE,GAAG,OAAO,CAAC,IAAI,CAAC;IAIxC,MAAM,CAAC,GAAG,EAAE,CAAC,GAAG,OAAO,CAAC,IAAI,CAAC;CAK1C"}
|
|
@@ -0,0 +1,38 @@
|
|
|
1
|
+
function invariant(cond, msg) {
|
|
2
|
+
if (!cond) {
|
|
3
|
+
throw new Error(msg);
|
|
4
|
+
}
|
|
5
|
+
}
|
|
6
|
+
function getLocalStorage() {
|
|
7
|
+
return globalThis.localStorage ?? null;
|
|
8
|
+
}
|
|
9
|
+
export class LocalStorageJsonlStore {
|
|
10
|
+
#key;
|
|
11
|
+
constructor(opts) {
|
|
12
|
+
this.#key = opts.key;
|
|
13
|
+
}
|
|
14
|
+
#ls() {
|
|
15
|
+
const ls = getLocalStorage();
|
|
16
|
+
invariant(ls, "localStorage is not available in this environment");
|
|
17
|
+
return ls;
|
|
18
|
+
}
|
|
19
|
+
async read() {
|
|
20
|
+
const raw = this.#ls().getItem(this.#key);
|
|
21
|
+
if (!raw) {
|
|
22
|
+
return [];
|
|
23
|
+
}
|
|
24
|
+
const parsed = JSON.parse(raw);
|
|
25
|
+
if (!Array.isArray(parsed)) {
|
|
26
|
+
throw new Error(`localStorage key ${JSON.stringify(this.#key)} did not contain an array`);
|
|
27
|
+
}
|
|
28
|
+
return parsed;
|
|
29
|
+
}
|
|
30
|
+
async write(rows) {
|
|
31
|
+
this.#ls().setItem(this.#key, JSON.stringify(rows));
|
|
32
|
+
}
|
|
33
|
+
async append(row) {
|
|
34
|
+
const rows = await this.read();
|
|
35
|
+
rows.push(row);
|
|
36
|
+
await this.write(rows);
|
|
37
|
+
}
|
|
38
|
+
}
|
package/dist/dag.d.ts
ADDED
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
import type { Issue } from "./spec.js";
|
|
2
|
+
export type ValidationResult = {
|
|
3
|
+
is_final: boolean;
|
|
4
|
+
reason: string;
|
|
5
|
+
};
|
|
6
|
+
export declare function subtreeIds(issues: readonly Issue[], rootId: string): string[];
|
|
7
|
+
export declare function readyLeaves(issues: readonly Issue[], opts?: {
|
|
8
|
+
root_id?: string;
|
|
9
|
+
tags?: readonly string[];
|
|
10
|
+
}): Issue[];
|
|
11
|
+
export declare function collapsible(issues: readonly Issue[], rootId: string): Issue[];
|
|
12
|
+
export declare function validateDag(issues: readonly Issue[], rootId: string): ValidationResult;
|
|
13
|
+
//# sourceMappingURL=dag.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"dag.d.ts","sourceRoot":"","sources":["../src/dag.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,KAAK,EAAE,MAAM,WAAW,CAAC;AAEvC,MAAM,MAAM,gBAAgB,GAAG;IAC9B,QAAQ,EAAE,OAAO,CAAC;IAClB,MAAM,EAAE,MAAM,CAAC;CACf,CAAC;AAiBF,wBAAgB,UAAU,CAAC,MAAM,EAAE,SAAS,KAAK,EAAE,EAAE,MAAM,EAAE,MAAM,GAAG,MAAM,EAAE,CAuB7E;AAED,wBAAgB,WAAW,CAC1B,MAAM,EAAE,SAAS,KAAK,EAAE,EACxB,IAAI,GAAE;IAAE,OAAO,CAAC,EAAE,MAAM,CAAC;IAAC,IAAI,CAAC,EAAE,SAAS,MAAM,EAAE,CAAA;CAAO,GACvD,KAAK,EAAE,CAuCT;AAED,wBAAgB,WAAW,CAAC,MAAM,EAAE,SAAS,KAAK,EAAE,EAAE,MAAM,EAAE,MAAM,GAAG,KAAK,EAAE,CA2B7E;AAED,wBAAgB,WAAW,CAAC,MAAM,EAAE,SAAS,KAAK,EAAE,EAAE,MAAM,EAAE,MAAM,GAAG,gBAAgB,CAqDtF"}
|
package/dist/dag.js
ADDED
|
@@ -0,0 +1,140 @@
|
|
|
1
|
+
function childrenByParent(issues) {
|
|
2
|
+
const byParent = new Map();
|
|
3
|
+
for (const issue of issues) {
|
|
4
|
+
for (const dep of issue.deps) {
|
|
5
|
+
if (dep.type !== "parent") {
|
|
6
|
+
continue;
|
|
7
|
+
}
|
|
8
|
+
const list = byParent.get(dep.target) ?? [];
|
|
9
|
+
list.push(issue);
|
|
10
|
+
byParent.set(dep.target, list);
|
|
11
|
+
}
|
|
12
|
+
}
|
|
13
|
+
return byParent;
|
|
14
|
+
}
|
|
15
|
+
export function subtreeIds(issues, rootId) {
|
|
16
|
+
const children = childrenByParent(issues);
|
|
17
|
+
const result = [];
|
|
18
|
+
const q = [rootId];
|
|
19
|
+
const seen = new Set();
|
|
20
|
+
while (q.length > 0) {
|
|
21
|
+
const nodeId = q.shift();
|
|
22
|
+
if (!nodeId) {
|
|
23
|
+
continue;
|
|
24
|
+
}
|
|
25
|
+
if (seen.has(nodeId)) {
|
|
26
|
+
continue;
|
|
27
|
+
}
|
|
28
|
+
seen.add(nodeId);
|
|
29
|
+
result.push(nodeId);
|
|
30
|
+
for (const child of children.get(nodeId) ?? []) {
|
|
31
|
+
q.push(child.id);
|
|
32
|
+
}
|
|
33
|
+
}
|
|
34
|
+
return result;
|
|
35
|
+
}
|
|
36
|
+
export function readyLeaves(issues, opts = {}) {
|
|
37
|
+
const byId = new Map(issues.map((i) => [i.id, i]));
|
|
38
|
+
const idsInScope = new Set(opts.root_id ? subtreeIds(issues, opts.root_id) : byId.keys());
|
|
39
|
+
const blocked = new Set();
|
|
40
|
+
for (const issue of issues) {
|
|
41
|
+
for (const dep of issue.deps) {
|
|
42
|
+
if (dep.type !== "blocks") {
|
|
43
|
+
continue;
|
|
44
|
+
}
|
|
45
|
+
if (issue.status !== "closed" || issue.outcome === "expanded") {
|
|
46
|
+
blocked.add(dep.target);
|
|
47
|
+
}
|
|
48
|
+
}
|
|
49
|
+
}
|
|
50
|
+
const children = childrenByParent(issues);
|
|
51
|
+
const result = [];
|
|
52
|
+
for (const issueId of idsInScope) {
|
|
53
|
+
const issue = byId.get(issueId);
|
|
54
|
+
if (!issue || issue.status !== "open") {
|
|
55
|
+
continue;
|
|
56
|
+
}
|
|
57
|
+
if (blocked.has(issueId)) {
|
|
58
|
+
continue;
|
|
59
|
+
}
|
|
60
|
+
const kids = children.get(issueId) ?? [];
|
|
61
|
+
if (kids.some((kid) => kid.status !== "closed")) {
|
|
62
|
+
continue;
|
|
63
|
+
}
|
|
64
|
+
if (opts.tags && !opts.tags.every((tag) => issue.tags.includes(tag))) {
|
|
65
|
+
continue;
|
|
66
|
+
}
|
|
67
|
+
result.push(issue);
|
|
68
|
+
}
|
|
69
|
+
result.sort((a, b) => (a.priority ?? 3) - (b.priority ?? 3));
|
|
70
|
+
return result;
|
|
71
|
+
}
|
|
72
|
+
export function collapsible(issues, rootId) {
|
|
73
|
+
const byId = new Map(issues.map((i) => [i.id, i]));
|
|
74
|
+
const idsInScope = new Set(subtreeIds(issues, rootId));
|
|
75
|
+
const children = childrenByParent(issues);
|
|
76
|
+
const terminalOutcomes = new Set(["success", "skipped"]);
|
|
77
|
+
const result = [];
|
|
78
|
+
for (const issueId of idsInScope) {
|
|
79
|
+
const node = byId.get(issueId);
|
|
80
|
+
if (!node) {
|
|
81
|
+
continue;
|
|
82
|
+
}
|
|
83
|
+
if (node.status !== "closed" || node.outcome !== "expanded") {
|
|
84
|
+
continue;
|
|
85
|
+
}
|
|
86
|
+
const kids = children.get(issueId) ?? [];
|
|
87
|
+
if (kids.length === 0) {
|
|
88
|
+
continue;
|
|
89
|
+
}
|
|
90
|
+
if (kids.every((kid) => kid.status === "closed" && kid.outcome != null && terminalOutcomes.has(kid.outcome))) {
|
|
91
|
+
result.push(node);
|
|
92
|
+
}
|
|
93
|
+
}
|
|
94
|
+
result.sort((a, b) => a.id.localeCompare(b.id));
|
|
95
|
+
return result;
|
|
96
|
+
}
|
|
97
|
+
export function validateDag(issues, rootId) {
|
|
98
|
+
const byId = new Map(issues.map((i) => [i.id, i]));
|
|
99
|
+
const ids = new Set(subtreeIds(issues, rootId));
|
|
100
|
+
const root = byId.get(rootId);
|
|
101
|
+
if (!root) {
|
|
102
|
+
return { is_final: true, reason: "root not found" };
|
|
103
|
+
}
|
|
104
|
+
const children = childrenByParent(issues);
|
|
105
|
+
const needsReorch = [...ids]
|
|
106
|
+
.filter((issueId) => {
|
|
107
|
+
const issue = byId.get(issueId);
|
|
108
|
+
return issue?.status === "closed" && (issue.outcome === "failure" || issue.outcome === "needs_work");
|
|
109
|
+
})
|
|
110
|
+
.sort();
|
|
111
|
+
if (needsReorch.length > 0) {
|
|
112
|
+
return { is_final: false, reason: `needs work: ${needsReorch.join(",")}` };
|
|
113
|
+
}
|
|
114
|
+
const badExpanded = [...ids]
|
|
115
|
+
.filter((issueId) => {
|
|
116
|
+
const issue = byId.get(issueId);
|
|
117
|
+
return (issue?.status === "closed" && issue.outcome === "expanded" && (children.get(issueId)?.length ?? 0) === 0);
|
|
118
|
+
})
|
|
119
|
+
.sort();
|
|
120
|
+
if (badExpanded.length > 0) {
|
|
121
|
+
return { is_final: false, reason: `expanded without children: ${badExpanded.join(",")}` };
|
|
122
|
+
}
|
|
123
|
+
const pending = [...ids].filter((issueId) => {
|
|
124
|
+
const issue = byId.get(issueId);
|
|
125
|
+
if (!issue) {
|
|
126
|
+
return false;
|
|
127
|
+
}
|
|
128
|
+
if (issue.status === "closed" && issue.outcome === "expanded") {
|
|
129
|
+
return false;
|
|
130
|
+
}
|
|
131
|
+
return issue.status !== "closed";
|
|
132
|
+
});
|
|
133
|
+
if (pending.length === 0) {
|
|
134
|
+
return { is_final: true, reason: "all work completed" };
|
|
135
|
+
}
|
|
136
|
+
if (pending.length === 1 && pending[0] === rootId && ids.size > 1) {
|
|
137
|
+
return { is_final: false, reason: "all children closed, root still open" };
|
|
138
|
+
}
|
|
139
|
+
return { is_final: false, reason: "in progress" };
|
|
140
|
+
}
|
package/dist/events.d.ts
ADDED
|
@@ -0,0 +1,44 @@
|
|
|
1
|
+
import type { JsonlStore } from "./persistence.js";
|
|
2
|
+
export declare const EVENT_VERSION = 1;
|
|
3
|
+
export type EventEnvelope = {
|
|
4
|
+
v: number;
|
|
5
|
+
ts_ms: number;
|
|
6
|
+
type: string;
|
|
7
|
+
source: string;
|
|
8
|
+
payload: Record<string, unknown>;
|
|
9
|
+
run_id?: string;
|
|
10
|
+
issue_id?: string;
|
|
11
|
+
};
|
|
12
|
+
export type RunIdProvider = () => string | null;
|
|
13
|
+
export type EventSink = {
|
|
14
|
+
emit(event: EventEnvelope): Promise<void>;
|
|
15
|
+
};
|
|
16
|
+
export declare class NullEventSink implements EventSink {
|
|
17
|
+
emit(_event: EventEnvelope): Promise<void>;
|
|
18
|
+
}
|
|
19
|
+
export declare class JsonlEventSink implements EventSink {
|
|
20
|
+
#private;
|
|
21
|
+
constructor(store: Pick<JsonlStore<EventEnvelope>, "append">);
|
|
22
|
+
emit(event: EventEnvelope): Promise<void>;
|
|
23
|
+
}
|
|
24
|
+
export declare function currentRunId(): string | null;
|
|
25
|
+
export declare function runContext<T>(opts: {
|
|
26
|
+
runId: string | null;
|
|
27
|
+
}, fn: () => T): T;
|
|
28
|
+
export declare function runContext<T>(opts: {
|
|
29
|
+
runId: string | null;
|
|
30
|
+
}, fn: () => Promise<T>): Promise<T>;
|
|
31
|
+
export declare class EventLog {
|
|
32
|
+
#private;
|
|
33
|
+
constructor(sink: EventSink, opts?: {
|
|
34
|
+
runIdProvider?: RunIdProvider;
|
|
35
|
+
});
|
|
36
|
+
emit(eventType: string, opts: {
|
|
37
|
+
source: string;
|
|
38
|
+
payload?: Record<string, unknown>;
|
|
39
|
+
issueId?: string;
|
|
40
|
+
runId?: string | null;
|
|
41
|
+
tsMs?: number;
|
|
42
|
+
}): Promise<EventEnvelope>;
|
|
43
|
+
}
|
|
44
|
+
//# sourceMappingURL=events.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"events.d.ts","sourceRoot":"","sources":["../src/events.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,UAAU,EAAE,MAAM,kBAAkB,CAAC;AAEnD,eAAO,MAAM,aAAa,IAAI,CAAC;AAE/B,MAAM,MAAM,aAAa,GAAG;IAC3B,CAAC,EAAE,MAAM,CAAC;IACV,KAAK,EAAE,MAAM,CAAC;IACd,IAAI,EAAE,MAAM,CAAC;IACb,MAAM,EAAE,MAAM,CAAC;IACf,OAAO,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;IACjC,MAAM,CAAC,EAAE,MAAM,CAAC;IAChB,QAAQ,CAAC,EAAE,MAAM,CAAC;CAClB,CAAC;AAEF,MAAM,MAAM,aAAa,GAAG,MAAM,MAAM,GAAG,IAAI,CAAC;AAEhD,MAAM,MAAM,SAAS,GAAG;IACvB,IAAI,CAAC,KAAK,EAAE,aAAa,GAAG,OAAO,CAAC,IAAI,CAAC,CAAC;CAC1C,CAAC;AAEF,qBAAa,aAAc,YAAW,SAAS;IACjC,IAAI,CAAC,MAAM,EAAE,aAAa,GAAG,OAAO,CAAC,IAAI,CAAC;CACvD;AAED,qBAAa,cAAe,YAAW,SAAS;;gBAG5B,KAAK,EAAE,IAAI,CAAC,UAAU,CAAC,aAAa,CAAC,EAAE,QAAQ,CAAC;IAItD,IAAI,CAAC,KAAK,EAAE,aAAa,GAAG,OAAO,CAAC,IAAI,CAAC;CAGtD;AAID,wBAAgB,YAAY,IAAI,MAAM,GAAG,IAAI,CAK5C;AAED,wBAAgB,UAAU,CAAC,CAAC,EAAE,IAAI,EAAE;IAAE,KAAK,EAAE,MAAM,GAAG,IAAI,CAAA;CAAE,EAAE,EAAE,EAAE,MAAM,CAAC,GAAG,CAAC,CAAC;AAC9E,wBAAgB,UAAU,CAAC,CAAC,EAAE,IAAI,EAAE;IAAE,KAAK,EAAE,MAAM,GAAG,IAAI,CAAA;CAAE,EAAE,EAAE,EAAE,MAAM,OAAO,CAAC,CAAC,CAAC,GAAG,OAAO,CAAC,CAAC,CAAC,CAAC;AAwBhG,qBAAa,QAAQ;;gBAID,IAAI,EAAE,SAAS,EAAE,IAAI,GAAE;QAAE,aAAa,CAAC,EAAE,aAAa,CAAA;KAAO;IAKnE,IAAI,CAChB,SAAS,EAAE,MAAM,EACjB,IAAI,EAAE;QACL,MAAM,EAAE,MAAM,CAAC;QACf,OAAO,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;QAClC,OAAO,CAAC,EAAE,MAAM,CAAC;QACjB,KAAK,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;QACtB,IAAI,CAAC,EAAE,MAAM,CAAC;KACd,GACC,OAAO,CAAC,aAAa,CAAC;CAwBzB"}
|
package/dist/events.js
ADDED
|
@@ -0,0 +1,72 @@
|
|
|
1
|
+
export const EVENT_VERSION = 1;
|
|
2
|
+
export class NullEventSink {
|
|
3
|
+
async emit(_event) { }
|
|
4
|
+
}
|
|
5
|
+
export class JsonlEventSink {
|
|
6
|
+
#store;
|
|
7
|
+
constructor(store) {
|
|
8
|
+
this.#store = store;
|
|
9
|
+
}
|
|
10
|
+
async emit(event) {
|
|
11
|
+
await this.#store.append(event);
|
|
12
|
+
}
|
|
13
|
+
}
|
|
14
|
+
const runIdStack = [];
|
|
15
|
+
export function currentRunId() {
|
|
16
|
+
if (runIdStack.length === 0) {
|
|
17
|
+
return null;
|
|
18
|
+
}
|
|
19
|
+
return runIdStack[runIdStack.length - 1] ?? null;
|
|
20
|
+
}
|
|
21
|
+
export function runContext(opts, fn) {
|
|
22
|
+
runIdStack.push(opts.runId);
|
|
23
|
+
let popped = false;
|
|
24
|
+
const pop = () => {
|
|
25
|
+
if (!popped) {
|
|
26
|
+
popped = true;
|
|
27
|
+
runIdStack.pop();
|
|
28
|
+
}
|
|
29
|
+
};
|
|
30
|
+
try {
|
|
31
|
+
const out = fn();
|
|
32
|
+
if (out && typeof out.then === "function") {
|
|
33
|
+
return out.finally(pop);
|
|
34
|
+
}
|
|
35
|
+
pop();
|
|
36
|
+
return out;
|
|
37
|
+
}
|
|
38
|
+
catch (err) {
|
|
39
|
+
pop();
|
|
40
|
+
throw err;
|
|
41
|
+
}
|
|
42
|
+
}
|
|
43
|
+
export class EventLog {
|
|
44
|
+
#sink;
|
|
45
|
+
#runIdProvider;
|
|
46
|
+
constructor(sink, opts = {}) {
|
|
47
|
+
this.#sink = sink;
|
|
48
|
+
this.#runIdProvider = opts.runIdProvider ?? currentRunId;
|
|
49
|
+
}
|
|
50
|
+
async emit(eventType, opts) {
|
|
51
|
+
const payload = opts.payload ?? {};
|
|
52
|
+
if (typeof payload !== "object" || payload === null || Array.isArray(payload)) {
|
|
53
|
+
throw new TypeError("payload must be an object");
|
|
54
|
+
}
|
|
55
|
+
const resolvedRunId = opts.runId != null ? opts.runId : this.#runIdProvider();
|
|
56
|
+
const event = {
|
|
57
|
+
v: EVENT_VERSION,
|
|
58
|
+
ts_ms: Math.trunc(opts.tsMs ?? Date.now()),
|
|
59
|
+
type: eventType,
|
|
60
|
+
source: opts.source,
|
|
61
|
+
payload,
|
|
62
|
+
};
|
|
63
|
+
if (resolvedRunId != null) {
|
|
64
|
+
event.run_id = resolvedRunId;
|
|
65
|
+
}
|
|
66
|
+
if (opts.issueId != null) {
|
|
67
|
+
event.issue_id = opts.issueId;
|
|
68
|
+
}
|
|
69
|
+
await this.#sink.emit(event);
|
|
70
|
+
return event;
|
|
71
|
+
}
|
|
72
|
+
}
|
package/dist/ids.d.ts
ADDED
|
@@ -0,0 +1,6 @@
|
|
|
1
|
+
export declare function randomHex(bytes: number): string;
|
|
2
|
+
export declare function shortId(): string;
|
|
3
|
+
export declare function newRunId(): string;
|
|
4
|
+
export declare function nowTs(): number;
|
|
5
|
+
export declare function nowTsMs(): number;
|
|
6
|
+
//# sourceMappingURL=ids.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"ids.d.ts","sourceRoot":"","sources":["../src/ids.ts"],"names":[],"mappings":"AAmBA,wBAAgB,SAAS,CAAC,KAAK,EAAE,MAAM,GAAG,MAAM,CAE/C;AAED,wBAAgB,OAAO,IAAI,MAAM,CAGhC;AAED,wBAAgB,QAAQ,IAAI,MAAM,CAGjC;AAED,wBAAgB,KAAK,IAAI,MAAM,CAG9B;AAED,wBAAgB,OAAO,IAAI,MAAM,CAGhC"}
|
package/dist/ids.js
ADDED
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
function hexByte(n) {
|
|
2
|
+
return n.toString(16).padStart(2, "0");
|
|
3
|
+
}
|
|
4
|
+
function randomBytes(len) {
|
|
5
|
+
const out = new Uint8Array(len);
|
|
6
|
+
const c = globalThis.crypto;
|
|
7
|
+
if (c?.getRandomValues) {
|
|
8
|
+
c.getRandomValues(out);
|
|
9
|
+
return out;
|
|
10
|
+
}
|
|
11
|
+
for (let i = 0; i < out.length; i++) {
|
|
12
|
+
out[i] = Math.floor(Math.random() * 256);
|
|
13
|
+
}
|
|
14
|
+
return out;
|
|
15
|
+
}
|
|
16
|
+
export function randomHex(bytes) {
|
|
17
|
+
return [...randomBytes(bytes)].map(hexByte).join("");
|
|
18
|
+
}
|
|
19
|
+
export function shortId() {
|
|
20
|
+
// 8 hex chars for compact, human-scannable IDs.
|
|
21
|
+
return randomHex(4);
|
|
22
|
+
}
|
|
23
|
+
export function newRunId() {
|
|
24
|
+
// Python uses uuid4().hex (32 hex chars, no dashes).
|
|
25
|
+
return randomHex(16);
|
|
26
|
+
}
|
|
27
|
+
export function nowTs() {
|
|
28
|
+
// Seconds since Unix epoch.
|
|
29
|
+
return Math.floor(Date.now() / 1000);
|
|
30
|
+
}
|
|
31
|
+
export function nowTsMs() {
|
|
32
|
+
// Milliseconds since Unix epoch.
|
|
33
|
+
return Date.now();
|
|
34
|
+
}
|
package/dist/index.d.ts
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA,cAAc,UAAU,CAAC;AACzB,cAAc,aAAa,CAAC;AAC5B,cAAc,UAAU,CAAC;AACzB,cAAc,kBAAkB,CAAC;AACjC,cAAc,WAAW,CAAC"}
|
package/dist/index.js
ADDED
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
export { appendJsonl, FsJsonlStore, readJsonl, streamJsonl, writeJsonl } from "./jsonl.js";
|
|
2
|
+
export { currentRunId, runContext } from "./run_context.js";
|
|
3
|
+
export { executionSpecFromDict } from "./spec.js";
|
|
4
|
+
export type { StorePaths } from "./store.js";
|
|
5
|
+
export { findRepoRoot, getStorePaths } from "./store.js";
|
|
6
|
+
import { EventLog } from "../events.js";
|
|
7
|
+
export declare function fsEventLog(path: string): EventLog;
|
|
8
|
+
export declare function fsEventLogFromRepoRoot(repoRoot: string): EventLog;
|
|
9
|
+
export * from "../dag.js";
|
|
10
|
+
export { EVENT_VERSION, type EventEnvelope, EventLog, type EventSink, JsonlEventSink, NullEventSink, type RunIdProvider, } from "../events.js";
|
|
11
|
+
export { newRunId, nowTs, nowTsMs, randomHex, shortId } from "../ids.js";
|
|
12
|
+
export { InMemoryJsonlStore, type JsonlStore } from "../persistence.js";
|
|
13
|
+
export * from "../spec.js";
|
|
14
|
+
//# sourceMappingURL=index.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/node/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,WAAW,EAAE,YAAY,EAAE,SAAS,EAAE,WAAW,EAAE,UAAU,EAAE,MAAM,YAAY,CAAC;AAC3F,OAAO,EAAE,YAAY,EAAE,UAAU,EAAE,MAAM,kBAAkB,CAAC;AAC5D,OAAO,EAAE,qBAAqB,EAAE,MAAM,WAAW,CAAC;AAClD,YAAY,EAAE,UAAU,EAAE,MAAM,YAAY,CAAC;AAC7C,OAAO,EAAE,YAAY,EAAE,aAAa,EAAE,MAAM,YAAY,CAAC;AAEzD,OAAO,EAAE,QAAQ,EAAkB,MAAM,cAAc,CAAC;AAKxD,wBAAgB,UAAU,CAAC,IAAI,EAAE,MAAM,GAAG,QAAQ,CAEjD;AAED,wBAAgB,sBAAsB,CAAC,QAAQ,EAAE,MAAM,GAAG,QAAQ,CAEjE;AAED,cAAc,WAAW,CAAC;AAE1B,OAAO,EACN,aAAa,EACb,KAAK,aAAa,EAClB,QAAQ,EACR,KAAK,SAAS,EACd,cAAc,EACd,aAAa,EACb,KAAK,aAAa,GAClB,MAAM,cAAc,CAAC;AACtB,OAAO,EAAE,QAAQ,EAAE,KAAK,EAAE,OAAO,EAAE,SAAS,EAAE,OAAO,EAAE,MAAM,WAAW,CAAC;AACzE,OAAO,EAAE,kBAAkB,EAAE,KAAK,UAAU,EAAE,MAAM,mBAAmB,CAAC;AACxE,cAAc,YAAY,CAAC"}
|
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
export { appendJsonl, FsJsonlStore, readJsonl, streamJsonl, writeJsonl } from "./jsonl.js";
|
|
2
|
+
export { currentRunId, runContext } from "./run_context.js";
|
|
3
|
+
export { executionSpecFromDict } from "./spec.js";
|
|
4
|
+
export { findRepoRoot, getStorePaths } from "./store.js";
|
|
5
|
+
import { EventLog, JsonlEventSink } from "../events.js";
|
|
6
|
+
import { FsJsonlStore } from "./jsonl.js";
|
|
7
|
+
import { currentRunId } from "./run_context.js";
|
|
8
|
+
import { getStorePaths } from "./store.js";
|
|
9
|
+
export function fsEventLog(path) {
|
|
10
|
+
return new EventLog(new JsonlEventSink(new FsJsonlStore(path)), { runIdProvider: currentRunId });
|
|
11
|
+
}
|
|
12
|
+
export function fsEventLogFromRepoRoot(repoRoot) {
|
|
13
|
+
return fsEventLog(getStorePaths(repoRoot).eventsPath);
|
|
14
|
+
}
|
|
15
|
+
export * from "../dag.js";
|
|
16
|
+
// Re-export the node-free surface so node code can import from a single place.
|
|
17
|
+
export { EVENT_VERSION, EventLog, JsonlEventSink, NullEventSink, } from "../events.js";
|
|
18
|
+
export { newRunId, nowTs, nowTsMs, randomHex, shortId } from "../ids.js";
|
|
19
|
+
export { InMemoryJsonlStore } from "../persistence.js";
|
|
20
|
+
export * from "../spec.js";
|
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
import type { JsonlStore } from "../persistence.js";
|
|
2
|
+
export declare function streamJsonl(path: string): AsyncGenerator<unknown>;
|
|
3
|
+
export declare function readJsonl(path: string): Promise<unknown[]>;
|
|
4
|
+
export declare function writeJsonl(path: string, rows: readonly unknown[]): Promise<void>;
|
|
5
|
+
export declare function appendJsonl(path: string, row: unknown): Promise<void>;
|
|
6
|
+
export declare class FsJsonlStore<T = unknown> implements JsonlStore<T> {
|
|
7
|
+
readonly path: string;
|
|
8
|
+
constructor(path: string);
|
|
9
|
+
read(): Promise<T[]>;
|
|
10
|
+
write(rows: readonly T[]): Promise<void>;
|
|
11
|
+
append(row: T): Promise<void>;
|
|
12
|
+
}
|
|
13
|
+
//# sourceMappingURL=jsonl.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"jsonl.d.ts","sourceRoot":"","sources":["../../src/node/jsonl.ts"],"names":[],"mappings":"AAIA,OAAO,KAAK,EAAE,UAAU,EAAE,MAAM,mBAAmB,CAAC;AAmBpD,wBAAuB,WAAW,CAAC,IAAI,EAAE,MAAM,GAAG,cAAc,CAAC,OAAO,CAAC,CAmBxE;AAED,wBAAsB,SAAS,CAAC,IAAI,EAAE,MAAM,GAAG,OAAO,CAAC,OAAO,EAAE,CAAC,CAMhE;AAED,wBAAsB,UAAU,CAAC,IAAI,EAAE,MAAM,EAAE,IAAI,EAAE,SAAS,OAAO,EAAE,GAAG,OAAO,CAAC,IAAI,CAAC,CAOtF;AAED,wBAAsB,WAAW,CAAC,IAAI,EAAE,MAAM,EAAE,GAAG,EAAE,OAAO,GAAG,OAAO,CAAC,IAAI,CAAC,CAU3E;AAED,qBAAa,YAAY,CAAC,CAAC,GAAG,OAAO,CAAE,YAAW,UAAU,CAAC,CAAC,CAAC;IAC9D,SAAgB,IAAI,EAAE,MAAM,CAAC;gBAEV,IAAI,EAAE,MAAM;IAIlB,IAAI,IAAI,OAAO,CAAC,CAAC,EAAE,CAAC;IAIpB,KAAK,CAAC,IAAI,EAAE,SAAS,CAAC,EAAE,GAAG,OAAO,CAAC,IAAI,CAAC;IAIxC,MAAM,CAAC,GAAG,EAAE,CAAC,GAAG,OAAO,CAAC,IAAI,CAAC;CAG1C"}
|
|
@@ -0,0 +1,80 @@
|
|
|
1
|
+
import { createReadStream } from "node:fs";
|
|
2
|
+
import { mkdir, open, rename, stat, writeFile } from "node:fs/promises";
|
|
3
|
+
import { dirname, join, parse as parsePath } from "node:path";
|
|
4
|
+
import { createInterface } from "node:readline";
|
|
5
|
+
function tmpPathFor(path) {
|
|
6
|
+
const parsed = parsePath(path);
|
|
7
|
+
return join(parsed.dir, `${parsed.name}.tmp`);
|
|
8
|
+
}
|
|
9
|
+
async function exists(path) {
|
|
10
|
+
try {
|
|
11
|
+
await stat(path);
|
|
12
|
+
return true;
|
|
13
|
+
}
|
|
14
|
+
catch (err) {
|
|
15
|
+
if (err instanceof Error && "code" in err && err.code === "ENOENT") {
|
|
16
|
+
return false;
|
|
17
|
+
}
|
|
18
|
+
throw err;
|
|
19
|
+
}
|
|
20
|
+
}
|
|
21
|
+
export async function* streamJsonl(path) {
|
|
22
|
+
if (!(await exists(path))) {
|
|
23
|
+
return;
|
|
24
|
+
}
|
|
25
|
+
const file = createReadStream(path, { encoding: "utf8" });
|
|
26
|
+
const rl = createInterface({ input: file, crlfDelay: Number.POSITIVE_INFINITY });
|
|
27
|
+
try {
|
|
28
|
+
for await (const line of rl) {
|
|
29
|
+
const trimmed = line.trim();
|
|
30
|
+
if (trimmed.length === 0) {
|
|
31
|
+
continue;
|
|
32
|
+
}
|
|
33
|
+
yield JSON.parse(trimmed);
|
|
34
|
+
}
|
|
35
|
+
}
|
|
36
|
+
finally {
|
|
37
|
+
rl.close();
|
|
38
|
+
file.close();
|
|
39
|
+
}
|
|
40
|
+
}
|
|
41
|
+
export async function readJsonl(path) {
|
|
42
|
+
const rows = [];
|
|
43
|
+
for await (const row of streamJsonl(path)) {
|
|
44
|
+
rows.push(row);
|
|
45
|
+
}
|
|
46
|
+
return rows;
|
|
47
|
+
}
|
|
48
|
+
export async function writeJsonl(path, rows) {
|
|
49
|
+
await mkdir(dirname(path), { recursive: true });
|
|
50
|
+
const tmp = tmpPathFor(path);
|
|
51
|
+
const out = rows.map((row) => `${JSON.stringify(row)}\n`).join("");
|
|
52
|
+
await writeFile(tmp, out, "utf8");
|
|
53
|
+
await rename(tmp, path);
|
|
54
|
+
}
|
|
55
|
+
export async function appendJsonl(path, row) {
|
|
56
|
+
await mkdir(dirname(path), { recursive: true });
|
|
57
|
+
const line = `${JSON.stringify(row)}\n`;
|
|
58
|
+
const fh = await open(path, "a");
|
|
59
|
+
try {
|
|
60
|
+
await fh.writeFile(line, { encoding: "utf8" });
|
|
61
|
+
}
|
|
62
|
+
finally {
|
|
63
|
+
await fh.close();
|
|
64
|
+
}
|
|
65
|
+
}
|
|
66
|
+
export class FsJsonlStore {
|
|
67
|
+
path;
|
|
68
|
+
constructor(path) {
|
|
69
|
+
this.path = path;
|
|
70
|
+
}
|
|
71
|
+
async read() {
|
|
72
|
+
return (await readJsonl(this.path));
|
|
73
|
+
}
|
|
74
|
+
async write(rows) {
|
|
75
|
+
await writeJsonl(this.path, rows);
|
|
76
|
+
}
|
|
77
|
+
async append(row) {
|
|
78
|
+
await appendJsonl(this.path, row);
|
|
79
|
+
}
|
|
80
|
+
}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"run_context.d.ts","sourceRoot":"","sources":["../../src/node/run_context.ts"],"names":[],"mappings":"AAIA,wBAAgB,YAAY,IAAI,MAAM,GAAG,IAAI,CAE5C;AAED,wBAAgB,UAAU,CAAC,CAAC,EAAE,IAAI,EAAE;IAAE,KAAK,EAAE,MAAM,GAAG,IAAI,CAAA;CAAE,EAAE,EAAE,EAAE,MAAM,CAAC,GAAG,CAAC,CAE5E"}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"spec.d.ts","sourceRoot":"","sources":["../../src/node/spec.ts"],"names":[],"mappings":"AAEA,OAAO,KAAK,EAAE,aAAa,EAAE,MAAM,YAAY,CAAC;AAMhD,wBAAgB,qBAAqB,CAAC,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,EAAE,QAAQ,CAAC,EAAE,MAAM,GAAG,aAAa,CAwBlG"}
|
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
import { existsSync } from "node:fs";
|
|
2
|
+
import { isAbsolute, join } from "node:path";
|
|
3
|
+
function emptyStringToNull(value) {
|
|
4
|
+
return typeof value === "string" && value.length > 0 ? value : null;
|
|
5
|
+
}
|
|
6
|
+
export function executionSpecFromDict(d, repoRoot) {
|
|
7
|
+
let prompt_path = emptyStringToNull(d.prompt_path);
|
|
8
|
+
const role = emptyStringToNull(d.role);
|
|
9
|
+
// Auto-resolve prompt_path from role name
|
|
10
|
+
if (!prompt_path && role && repoRoot) {
|
|
11
|
+
const candidate = join(repoRoot, ".mu", "roles", `${role}.md`);
|
|
12
|
+
if (existsSync(candidate)) {
|
|
13
|
+
prompt_path = candidate;
|
|
14
|
+
}
|
|
15
|
+
}
|
|
16
|
+
// Resolve relative prompt_path against repoRoot
|
|
17
|
+
if (repoRoot && prompt_path && !isAbsolute(prompt_path)) {
|
|
18
|
+
prompt_path = join(repoRoot, prompt_path);
|
|
19
|
+
}
|
|
20
|
+
return {
|
|
21
|
+
role,
|
|
22
|
+
prompt_path,
|
|
23
|
+
cli: emptyStringToNull(d.cli),
|
|
24
|
+
model: emptyStringToNull(d.model),
|
|
25
|
+
reasoning: emptyStringToNull(d.reasoning),
|
|
26
|
+
};
|
|
27
|
+
}
|
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
export type StorePaths = {
|
|
2
|
+
repoRoot: string;
|
|
3
|
+
storeDir: string;
|
|
4
|
+
issuesPath: string;
|
|
5
|
+
forumPath: string;
|
|
6
|
+
eventsPath: string;
|
|
7
|
+
logsDir: string;
|
|
8
|
+
rolesDir: string;
|
|
9
|
+
orchestratorPath: string;
|
|
10
|
+
};
|
|
11
|
+
export declare function findRepoRoot(start?: string): string;
|
|
12
|
+
export declare function getStorePaths(repoRoot: string): StorePaths;
|
|
13
|
+
//# sourceMappingURL=store.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"store.d.ts","sourceRoot":"","sources":["../../src/node/store.ts"],"names":[],"mappings":"AAwBA,MAAM,MAAM,UAAU,GAAG;IACxB,QAAQ,EAAE,MAAM,CAAC;IACjB,QAAQ,EAAE,MAAM,CAAC;IACjB,UAAU,EAAE,MAAM,CAAC;IACnB,SAAS,EAAE,MAAM,CAAC;IAClB,UAAU,EAAE,MAAM,CAAC;IACnB,OAAO,EAAE,MAAM,CAAC;IAChB,QAAQ,EAAE,MAAM,CAAC;IACjB,gBAAgB,EAAE,MAAM,CAAC;CACzB,CAAC;AAEF,wBAAgB,YAAY,CAAC,KAAK,GAAE,MAAsB,GAAG,MAAM,CAgBlE;AAED,wBAAgB,aAAa,CAAC,QAAQ,EAAE,MAAM,GAAG,UAAU,CAY1D"}
|
|
@@ -0,0 +1,51 @@
|
|
|
1
|
+
import { statSync } from "node:fs";
|
|
2
|
+
import { dirname, join, resolve } from "node:path";
|
|
3
|
+
function isDirectory(path) {
|
|
4
|
+
try {
|
|
5
|
+
return statSync(path).isDirectory();
|
|
6
|
+
}
|
|
7
|
+
catch {
|
|
8
|
+
return false;
|
|
9
|
+
}
|
|
10
|
+
}
|
|
11
|
+
function toDirectory(path) {
|
|
12
|
+
try {
|
|
13
|
+
const st = statSync(path);
|
|
14
|
+
if (st.isDirectory()) {
|
|
15
|
+
return path;
|
|
16
|
+
}
|
|
17
|
+
return dirname(path);
|
|
18
|
+
}
|
|
19
|
+
catch {
|
|
20
|
+
// If the path doesn't exist, treat it as a directory-like string.
|
|
21
|
+
return path;
|
|
22
|
+
}
|
|
23
|
+
}
|
|
24
|
+
export function findRepoRoot(start = process.cwd()) {
|
|
25
|
+
const startDir = resolve(toDirectory(start));
|
|
26
|
+
let current = startDir;
|
|
27
|
+
while (true) {
|
|
28
|
+
const storeDir = join(current, ".mu");
|
|
29
|
+
if (isDirectory(storeDir)) {
|
|
30
|
+
return current;
|
|
31
|
+
}
|
|
32
|
+
const parent = dirname(current);
|
|
33
|
+
if (parent === current) {
|
|
34
|
+
return startDir;
|
|
35
|
+
}
|
|
36
|
+
current = parent;
|
|
37
|
+
}
|
|
38
|
+
}
|
|
39
|
+
export function getStorePaths(repoRoot) {
|
|
40
|
+
const storeDir = join(repoRoot, ".mu");
|
|
41
|
+
return {
|
|
42
|
+
repoRoot,
|
|
43
|
+
storeDir,
|
|
44
|
+
issuesPath: join(storeDir, "issues.jsonl"),
|
|
45
|
+
forumPath: join(storeDir, "forum.jsonl"),
|
|
46
|
+
eventsPath: join(storeDir, "events.jsonl"),
|
|
47
|
+
logsDir: join(storeDir, "logs"),
|
|
48
|
+
rolesDir: join(storeDir, "roles"),
|
|
49
|
+
orchestratorPath: join(storeDir, "orchestrator.md"),
|
|
50
|
+
};
|
|
51
|
+
}
|
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
export type JsonlStore<T = unknown> = {
|
|
2
|
+
read(): Promise<T[]>;
|
|
3
|
+
write(rows: readonly T[]): Promise<void>;
|
|
4
|
+
append(row: T): Promise<void>;
|
|
5
|
+
};
|
|
6
|
+
export declare class InMemoryJsonlStore<T = unknown> implements JsonlStore<T> {
|
|
7
|
+
#private;
|
|
8
|
+
constructor(initial?: readonly T[]);
|
|
9
|
+
read(): Promise<T[]>;
|
|
10
|
+
write(rows: readonly T[]): Promise<void>;
|
|
11
|
+
append(row: T): Promise<void>;
|
|
12
|
+
}
|
|
13
|
+
//# sourceMappingURL=persistence.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"persistence.d.ts","sourceRoot":"","sources":["../src/persistence.ts"],"names":[],"mappings":"AAAA,MAAM,MAAM,UAAU,CAAC,CAAC,GAAG,OAAO,IAAI;IACrC,IAAI,IAAI,OAAO,CAAC,CAAC,EAAE,CAAC,CAAC;IACrB,KAAK,CAAC,IAAI,EAAE,SAAS,CAAC,EAAE,GAAG,OAAO,CAAC,IAAI,CAAC,CAAC;IACzC,MAAM,CAAC,GAAG,EAAE,CAAC,GAAG,OAAO,CAAC,IAAI,CAAC,CAAC;CAC9B,CAAC;AAEF,qBAAa,kBAAkB,CAAC,CAAC,GAAG,OAAO,CAAE,YAAW,UAAU,CAAC,CAAC,CAAC;;gBAGjD,OAAO,GAAE,SAAS,CAAC,EAAO;IAIhC,IAAI,IAAI,OAAO,CAAC,CAAC,EAAE,CAAC;IAIpB,KAAK,CAAC,IAAI,EAAE,SAAS,CAAC,EAAE,GAAG,OAAO,CAAC,IAAI,CAAC;IAIxC,MAAM,CAAC,GAAG,EAAE,CAAC,GAAG,OAAO,CAAC,IAAI,CAAC;CAG1C"}
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
export class InMemoryJsonlStore {
|
|
2
|
+
#rows;
|
|
3
|
+
constructor(initial = []) {
|
|
4
|
+
this.#rows = [...initial];
|
|
5
|
+
}
|
|
6
|
+
async read() {
|
|
7
|
+
return [...this.#rows];
|
|
8
|
+
}
|
|
9
|
+
async write(rows) {
|
|
10
|
+
this.#rows = [...rows];
|
|
11
|
+
}
|
|
12
|
+
async append(row) {
|
|
13
|
+
this.#rows.push(row);
|
|
14
|
+
}
|
|
15
|
+
}
|
package/dist/spec.d.ts
ADDED
|
@@ -0,0 +1,49 @@
|
|
|
1
|
+
import { z } from "zod";
|
|
2
|
+
export type ExecutionSpec = {
|
|
3
|
+
role: string | null;
|
|
4
|
+
prompt_path: string | null;
|
|
5
|
+
cli: string | null;
|
|
6
|
+
model: string | null;
|
|
7
|
+
reasoning: string | null;
|
|
8
|
+
};
|
|
9
|
+
export declare const DepSchema: z.ZodObject<{
|
|
10
|
+
type: z.ZodString;
|
|
11
|
+
target: z.ZodString;
|
|
12
|
+
}, z.core.$loose>;
|
|
13
|
+
export type Dep = z.infer<typeof DepSchema>;
|
|
14
|
+
export declare const IssueSchema: z.ZodObject<{
|
|
15
|
+
id: z.ZodString;
|
|
16
|
+
title: z.ZodString;
|
|
17
|
+
body: z.ZodString;
|
|
18
|
+
status: z.ZodEnum<{
|
|
19
|
+
open: "open";
|
|
20
|
+
in_progress: "in_progress";
|
|
21
|
+
closed: "closed";
|
|
22
|
+
}>;
|
|
23
|
+
outcome: z.ZodNullable<z.ZodString>;
|
|
24
|
+
tags: z.ZodArray<z.ZodString>;
|
|
25
|
+
deps: z.ZodArray<z.ZodObject<{
|
|
26
|
+
type: z.ZodString;
|
|
27
|
+
target: z.ZodString;
|
|
28
|
+
}, z.core.$loose>>;
|
|
29
|
+
execution_spec: z.ZodNullable<z.ZodRecord<z.ZodString, z.ZodUnknown>>;
|
|
30
|
+
priority: z.ZodNumber;
|
|
31
|
+
created_at: z.ZodNumber;
|
|
32
|
+
updated_at: z.ZodNumber;
|
|
33
|
+
}, z.core.$loose>;
|
|
34
|
+
export type Issue = z.infer<typeof IssueSchema>;
|
|
35
|
+
export declare const ForumMessageSchema: z.ZodObject<{
|
|
36
|
+
topic: z.ZodString;
|
|
37
|
+
body: z.ZodString;
|
|
38
|
+
author: z.ZodString;
|
|
39
|
+
created_at: z.ZodNumber;
|
|
40
|
+
}, z.core.$loose>;
|
|
41
|
+
export type ForumMessage = z.infer<typeof ForumMessageSchema>;
|
|
42
|
+
export declare const PromptFrontmatterSchema: z.ZodObject<{
|
|
43
|
+
cli: z.ZodOptional<z.ZodString>;
|
|
44
|
+
model: z.ZodOptional<z.ZodString>;
|
|
45
|
+
reasoning: z.ZodOptional<z.ZodString>;
|
|
46
|
+
description: z.ZodOptional<z.ZodString>;
|
|
47
|
+
}, z.core.$loose>;
|
|
48
|
+
export type PromptFrontmatter = z.infer<typeof PromptFrontmatterSchema>;
|
|
49
|
+
//# sourceMappingURL=spec.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"spec.d.ts","sourceRoot":"","sources":["../src/spec.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,CAAC,EAAE,MAAM,KAAK,CAAC;AAExB,MAAM,MAAM,aAAa,GAAG;IAC3B,IAAI,EAAE,MAAM,GAAG,IAAI,CAAC;IACpB,WAAW,EAAE,MAAM,GAAG,IAAI,CAAC;IAC3B,GAAG,EAAE,MAAM,GAAG,IAAI,CAAC;IACnB,KAAK,EAAE,MAAM,GAAG,IAAI,CAAC;IACrB,SAAS,EAAE,MAAM,GAAG,IAAI,CAAC;CACzB,CAAC;AAEF,eAAO,MAAM,SAAS;;;iBAKP,CAAC;AAChB,MAAM,MAAM,GAAG,GAAG,CAAC,CAAC,KAAK,CAAC,OAAO,SAAS,CAAC,CAAC;AAE5C,eAAO,MAAM,WAAW;;;;;;;;;;;;;;;;;;;iBAcT,CAAC;AAChB,MAAM,MAAM,KAAK,GAAG,CAAC,CAAC,KAAK,CAAC,OAAO,WAAW,CAAC,CAAC;AAEhD,eAAO,MAAM,kBAAkB;;;;;iBAOhB,CAAC;AAChB,MAAM,MAAM,YAAY,GAAG,CAAC,CAAC,KAAK,CAAC,OAAO,kBAAkB,CAAC,CAAC;AAE9D,eAAO,MAAM,uBAAuB;;;;;iBAOrB,CAAC;AAChB,MAAM,MAAM,iBAAiB,GAAG,CAAC,CAAC,KAAK,CAAC,OAAO,uBAAuB,CAAC,CAAC"}
|
package/dist/spec.js
ADDED
|
@@ -0,0 +1,38 @@
|
|
|
1
|
+
import { z } from "zod";
|
|
2
|
+
export const DepSchema = z
|
|
3
|
+
.object({
|
|
4
|
+
type: z.string().min(1),
|
|
5
|
+
target: z.string().min(1),
|
|
6
|
+
})
|
|
7
|
+
.passthrough();
|
|
8
|
+
export const IssueSchema = z
|
|
9
|
+
.object({
|
|
10
|
+
id: z.string().min(1),
|
|
11
|
+
title: z.string(),
|
|
12
|
+
body: z.string(),
|
|
13
|
+
status: z.enum(["open", "in_progress", "closed"]),
|
|
14
|
+
outcome: z.string().nullable(),
|
|
15
|
+
tags: z.array(z.string()),
|
|
16
|
+
deps: z.array(DepSchema),
|
|
17
|
+
execution_spec: z.record(z.string(), z.unknown()).nullable(),
|
|
18
|
+
priority: z.number().int(),
|
|
19
|
+
created_at: z.number().int(),
|
|
20
|
+
updated_at: z.number().int(),
|
|
21
|
+
})
|
|
22
|
+
.passthrough();
|
|
23
|
+
export const ForumMessageSchema = z
|
|
24
|
+
.object({
|
|
25
|
+
topic: z.string().min(1),
|
|
26
|
+
body: z.string(),
|
|
27
|
+
author: z.string().min(1),
|
|
28
|
+
created_at: z.number().int(),
|
|
29
|
+
})
|
|
30
|
+
.passthrough();
|
|
31
|
+
export const PromptFrontmatterSchema = z
|
|
32
|
+
.object({
|
|
33
|
+
cli: z.string().optional(),
|
|
34
|
+
model: z.string().optional(),
|
|
35
|
+
reasoning: z.string().optional(),
|
|
36
|
+
description: z.string().optional(),
|
|
37
|
+
})
|
|
38
|
+
.passthrough();
|
package/package.json
ADDED
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@femtomc/mu-core",
|
|
3
|
+
"version": "0.1.0",
|
|
4
|
+
"type": "module",
|
|
5
|
+
"main": "./dist/index.js",
|
|
6
|
+
"types": "./dist/index.d.ts",
|
|
7
|
+
"exports": {
|
|
8
|
+
".": {
|
|
9
|
+
"types": "./dist/index.d.ts",
|
|
10
|
+
"default": "./dist/index.js"
|
|
11
|
+
},
|
|
12
|
+
"./browser": {
|
|
13
|
+
"types": "./dist/browser/index.d.ts",
|
|
14
|
+
"default": "./dist/browser/index.js"
|
|
15
|
+
},
|
|
16
|
+
"./node": {
|
|
17
|
+
"types": "./dist/node/index.d.ts",
|
|
18
|
+
"default": "./dist/node/index.js"
|
|
19
|
+
}
|
|
20
|
+
},
|
|
21
|
+
"files": ["dist/**"],
|
|
22
|
+
"dependencies": {
|
|
23
|
+
"zod": "^4.1.9"
|
|
24
|
+
}
|
|
25
|
+
}
|