flingit 0.0.65 → 0.0.66
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cli/commands/db.d.ts +10 -0
- package/dist/cli/commands/db.d.ts.map +1 -1
- package/dist/cli/commands/db.js +17 -0
- package/dist/cli/commands/db.js.map +1 -1
- package/dist/cli/commands/dev.d.ts.map +1 -1
- package/dist/cli/commands/dev.js +13 -2
- package/dist/cli/commands/dev.js.map +1 -1
- package/dist/cli/commands/init.js +1 -1
- package/dist/cli/commands/init.js.map +1 -1
- package/dist/cli/commands/push.d.ts +1 -0
- package/dist/cli/commands/push.d.ts.map +1 -1
- package/dist/cli/commands/push.js +3 -3
- package/dist/cli/commands/push.js.map +1 -1
- package/dist/cli/commands/storage.d.ts +0 -3
- package/dist/cli/commands/storage.d.ts.map +1 -1
- package/dist/cli/commands/storage.js +1 -4
- package/dist/cli/commands/storage.js.map +1 -1
- package/dist/cli/utils/cli-io-impl.d.ts +24 -2
- package/dist/cli/utils/cli-io-impl.d.ts.map +1 -1
- package/dist/cli/utils/cli-io-impl.js +87 -3
- package/dist/cli/utils/cli-io-impl.js.map +1 -1
- package/dist/cli/utils/cli-io.d.ts +25 -1
- package/dist/cli/utils/cli-io.d.ts.map +1 -1
- package/dist/cli/utils/project-name.d.ts +9 -0
- package/dist/cli/utils/project-name.d.ts.map +1 -1
- package/dist/cli/utils/project-name.js +28 -0
- package/dist/cli/utils/project-name.js.map +1 -1
- package/dist/cli/utils/project.d.ts +14 -1
- package/dist/cli/utils/project.d.ts.map +1 -1
- package/dist/cli/utils/project.js +16 -0
- package/dist/cli/utils/project.js.map +1 -1
- package/dist/worker-runtime/d1-event-store.d.ts +2 -1
- package/dist/worker-runtime/d1-event-store.d.ts.map +1 -1
- package/dist/worker-runtime/d1-event-store.js +26 -0
- package/dist/worker-runtime/d1-event-store.js.map +1 -1
- package/dist/worker-runtime/entry.d.ts.map +1 -1
- package/dist/worker-runtime/entry.js +17 -16
- package/dist/worker-runtime/entry.js.map +1 -1
- package/dist/workflow/runtime.d.ts +3 -0
- package/dist/workflow/runtime.d.ts.map +1 -1
- package/dist/workflow/runtime.js +37 -4
- package/dist/workflow/runtime.js.map +1 -1
- package/node_modules/flingflow/README.md +10 -0
- package/node_modules/flingflow/SPEC.md +0 -4
- package/node_modules/flingflow/TESTING.md +2 -0
- package/node_modules/flingflow/dist/index.d.ts +1 -1
- package/node_modules/flingflow/dist/store-memory.d.ts +2 -1
- package/node_modules/flingflow/dist/store-memory.js +36 -0
- package/node_modules/flingflow/dist/store-memory.js.map +1 -1
- package/node_modules/flingflow/dist/store-sqlite.d.ts +2 -1
- package/node_modules/flingflow/dist/store-sqlite.js +45 -0
- package/node_modules/flingflow/dist/store-sqlite.js.map +1 -1
- package/node_modules/flingflow/dist/store.d.ts +5 -0
- package/node_modules/flingflow/src/index.ts +1 -1
- package/node_modules/flingflow/src/store-memory.ts +40 -0
- package/node_modules/flingflow/src/store-sqlite.ts +47 -0
- package/node_modules/flingflow/src/store.ts +6 -0
- package/node_modules/flingflow/test/store-conformance.ts +42 -0
- package/package.json +1 -1
- package/templates/default/dot-claude/skills/fling/.hash +1 -1
- package/templates/default/dot-claude/skills/fling/SKILL.md +2 -0
|
@@ -10,6 +10,7 @@ import {
|
|
|
10
10
|
type EventStoreCheck,
|
|
11
11
|
type AppendError,
|
|
12
12
|
type AppendIfNoActiveError,
|
|
13
|
+
type PruneResult,
|
|
13
14
|
} from "./store.js";
|
|
14
15
|
|
|
15
16
|
function toError(e: unknown): Error {
|
|
@@ -375,4 +376,43 @@ export class MemoryEventStore implements EventStore {
|
|
|
375
376
|
|
|
376
377
|
return ok(results);
|
|
377
378
|
}
|
|
379
|
+
|
|
380
|
+
async pruneTerminalRunsOlderThan(cutoff: number, sim: SimulationTask): Promise<Result<PruneResult, Error>> {
|
|
381
|
+
try {
|
|
382
|
+
await sim.failpoint("store.pruneTerminalRunsOlderThan:before", cutoff);
|
|
383
|
+
} catch (e: unknown) {
|
|
384
|
+
return err(toError(e));
|
|
385
|
+
}
|
|
386
|
+
|
|
387
|
+
let result: PruneResult;
|
|
388
|
+
try {
|
|
389
|
+
let prunedRuns = 0;
|
|
390
|
+
let prunedEvents = 0;
|
|
391
|
+
|
|
392
|
+
for (const [runId, map] of this.events) {
|
|
393
|
+
if (map.size === 0) continue;
|
|
394
|
+
const maxSeq = Math.max(...map.keys());
|
|
395
|
+
const last = this.deserialize(defined(map.get(maxSeq)));
|
|
396
|
+
if (!TERMINAL_EVENT_TYPES.includes(last.event_type) || last.created_at >= cutoff) {
|
|
397
|
+
continue;
|
|
398
|
+
}
|
|
399
|
+
|
|
400
|
+
prunedRuns++;
|
|
401
|
+
prunedEvents += map.size;
|
|
402
|
+
this.events.delete(runId);
|
|
403
|
+
}
|
|
404
|
+
|
|
405
|
+
result = { prunedRuns, prunedEvents };
|
|
406
|
+
} catch (e: unknown) {
|
|
407
|
+
return err(toError(e));
|
|
408
|
+
}
|
|
409
|
+
|
|
410
|
+
try {
|
|
411
|
+
await sim.failpoint("store.pruneTerminalRunsOlderThan:after", cutoff);
|
|
412
|
+
} catch (e: unknown) {
|
|
413
|
+
return err(toError(e));
|
|
414
|
+
}
|
|
415
|
+
|
|
416
|
+
return ok(result);
|
|
417
|
+
}
|
|
378
418
|
}
|
|
@@ -10,6 +10,7 @@ import {
|
|
|
10
10
|
type EventStoreCheck,
|
|
11
11
|
type AppendError,
|
|
12
12
|
type AppendIfNoActiveError,
|
|
13
|
+
type PruneResult,
|
|
13
14
|
} from "./store.js";
|
|
14
15
|
|
|
15
16
|
function toError(e: unknown): Error {
|
|
@@ -445,6 +446,52 @@ export class SqliteEventStore implements EventStore {
|
|
|
445
446
|
return ok(results);
|
|
446
447
|
}
|
|
447
448
|
|
|
449
|
+
async pruneTerminalRunsOlderThan(cutoff: number, sim: SimulationTask): Promise<Result<PruneResult, Error>> {
|
|
450
|
+
try {
|
|
451
|
+
await sim.failpoint("store.pruneTerminalRunsOlderThan:before", cutoff);
|
|
452
|
+
} catch (e: unknown) {
|
|
453
|
+
return err(toError(e));
|
|
454
|
+
}
|
|
455
|
+
|
|
456
|
+
let result: PruneResult;
|
|
457
|
+
try {
|
|
458
|
+
const runIds = this.db.prepare(`
|
|
459
|
+
SELECT e.run_id FROM ${this.table} e
|
|
460
|
+
INNER JOIN (
|
|
461
|
+
SELECT run_id, MAX(seq) as max_seq
|
|
462
|
+
FROM ${this.table}
|
|
463
|
+
GROUP BY run_id
|
|
464
|
+
) latest ON e.run_id = latest.run_id AND e.seq = latest.max_seq
|
|
465
|
+
WHERE e.event_type IN ('workflow_completed', 'workflow_failed')
|
|
466
|
+
AND e.created_at < ?
|
|
467
|
+
`).all(cutoff) as { run_id: string }[];
|
|
468
|
+
|
|
469
|
+
if (runIds.length === 0) {
|
|
470
|
+
result = { prunedRuns: 0, prunedEvents: 0 };
|
|
471
|
+
} else {
|
|
472
|
+
const deleteStmt = this.db.prepare(`DELETE FROM ${this.table} WHERE run_id = ?`);
|
|
473
|
+
const transaction = this.db.transaction((ids: string[]): PruneResult => {
|
|
474
|
+
let prunedEvents = 0;
|
|
475
|
+
for (const runId of ids) {
|
|
476
|
+
prunedEvents += deleteStmt.run(runId).changes;
|
|
477
|
+
}
|
|
478
|
+
return { prunedRuns: ids.length, prunedEvents };
|
|
479
|
+
});
|
|
480
|
+
result = transaction(runIds.map((row) => row.run_id));
|
|
481
|
+
}
|
|
482
|
+
} catch (e: unknown) {
|
|
483
|
+
return err(toError(e));
|
|
484
|
+
}
|
|
485
|
+
|
|
486
|
+
try {
|
|
487
|
+
await sim.failpoint("store.pruneTerminalRunsOlderThan:after", cutoff);
|
|
488
|
+
} catch (e: unknown) {
|
|
489
|
+
return err(toError(e));
|
|
490
|
+
}
|
|
491
|
+
|
|
492
|
+
return ok(result);
|
|
493
|
+
}
|
|
494
|
+
|
|
448
495
|
close(): void {
|
|
449
496
|
this.db.close();
|
|
450
497
|
}
|
|
@@ -30,6 +30,11 @@ export type EventStoreCheck = (runId: string, allEvents: WorkflowEvent[]) => voi
|
|
|
30
30
|
export type AppendError = UniqueConstraintError | WorkflowIdMismatchError | Error;
|
|
31
31
|
export type AppendIfNoActiveError = WorkflowAlreadyActiveError | UniqueConstraintError | Error;
|
|
32
32
|
|
|
33
|
+
export interface PruneResult {
|
|
34
|
+
prunedRuns: number;
|
|
35
|
+
prunedEvents: number;
|
|
36
|
+
}
|
|
37
|
+
|
|
33
38
|
/**
|
|
34
39
|
* Event payloads are stored as JSON. This means:
|
|
35
40
|
* - `undefined` values in objects are silently dropped
|
|
@@ -52,4 +57,5 @@ export interface EventStore {
|
|
|
52
57
|
getAllEvents(runId: string, sim: SimulationTask): Promise<Result<WorkflowEvent[], Error>>;
|
|
53
58
|
getActiveWorkflows(sim: SimulationTask): Promise<Result<WorkflowEvent[], Error>>;
|
|
54
59
|
getRecentRuns(limit: number, sim: SimulationTask): Promise<Result<WorkflowEvent[], Error>>;
|
|
60
|
+
pruneTerminalRunsOlderThan(cutoff: number, sim: SimulationTask): Promise<Result<PruneResult, Error>>;
|
|
55
61
|
}
|
|
@@ -326,6 +326,48 @@ export function storeConformanceTests(name: string, createStore: () => Promise<E
|
|
|
326
326
|
expect(runIds).toContain("run-done");
|
|
327
327
|
});
|
|
328
328
|
|
|
329
|
+
it("pruneTerminalRunsOlderThan removes all events for terminal runs older than the cutoff", async () => {
|
|
330
|
+
(await store.appendBatch([
|
|
331
|
+
{ run_id: "run-old-done", workflow_id: "wf-old-done", seq: 0, event_type: "workflow_created", payload: { workflowName: "test", writes: {}, config: TEST_CONFIG }, created_at: 1000 },
|
|
332
|
+
{ run_id: "run-old-done", workflow_id: "wf-old-done", seq: 1, event_type: "step_completed", payload: { step: "start", writes: {} }, created_at: 1100 },
|
|
333
|
+
{ run_id: "run-old-done", workflow_id: "wf-old-done", seq: 2, event_type: "workflow_completed", payload: { result: "ok" }, created_at: 2000 },
|
|
334
|
+
], noSim))._unsafeUnwrap();
|
|
335
|
+
|
|
336
|
+
(await store.appendBatch([
|
|
337
|
+
{ run_id: "run-old-fail", workflow_id: "wf-old-fail", seq: 0, event_type: "workflow_created", payload: { workflowName: "test", writes: {}, config: TEST_CONFIG }, created_at: 1200 },
|
|
338
|
+
{ run_id: "run-old-fail", workflow_id: "wf-old-fail", seq: 1, event_type: "step_failed", payload: { step: "start", attempt: 1, reason: "boom", retryable: false }, created_at: 1300 },
|
|
339
|
+
{ run_id: "run-old-fail", workflow_id: "wf-old-fail", seq: 2, event_type: "workflow_failed", payload: { reason: "boom", cause: "non_retryable" }, created_at: 2100 },
|
|
340
|
+
], noSim))._unsafeUnwrap();
|
|
341
|
+
|
|
342
|
+
const result = (await store.pruneTerminalRunsOlderThan(3000, noSim))._unsafeUnwrap();
|
|
343
|
+
expect(result).toEqual({ prunedRuns: 2, prunedEvents: 6 });
|
|
344
|
+
expect((await store.getAllEvents("run-old-done", noSim))._unsafeUnwrap()).toEqual([]);
|
|
345
|
+
expect((await store.getAllEvents("run-old-fail", noSim))._unsafeUnwrap()).toEqual([]);
|
|
346
|
+
});
|
|
347
|
+
|
|
348
|
+
it("pruneTerminalRunsOlderThan keeps active runs and runs whose terminal event is recent", async () => {
|
|
349
|
+
(await store.appendBatch([
|
|
350
|
+
{ run_id: "run-active", workflow_id: "wf-active", seq: 0, event_type: "workflow_created", payload: { workflowName: "test", writes: {}, config: TEST_CONFIG }, created_at: 1000 },
|
|
351
|
+
{ run_id: "run-active", workflow_id: "wf-active", seq: 1, event_type: "step_started", payload: { step: "start", attempt: 1 }, created_at: 2000 },
|
|
352
|
+
], noSim))._unsafeUnwrap();
|
|
353
|
+
|
|
354
|
+
(await store.appendBatch([
|
|
355
|
+
{ run_id: "run-recent-terminal", workflow_id: "wf-recent-terminal", seq: 0, event_type: "workflow_created", payload: { workflowName: "test", writes: {}, config: TEST_CONFIG }, created_at: 1000 },
|
|
356
|
+
{ run_id: "run-recent-terminal", workflow_id: "wf-recent-terminal", seq: 1, event_type: "workflow_completed", payload: { result: "ok" }, created_at: 4000 },
|
|
357
|
+
], noSim))._unsafeUnwrap();
|
|
358
|
+
|
|
359
|
+
(await store.appendBatch([
|
|
360
|
+
{ run_id: "run-old-created-recent-terminal", workflow_id: "wf-old-created-recent-terminal", seq: 0, event_type: "workflow_created", payload: { workflowName: "test", writes: {}, config: TEST_CONFIG }, created_at: 100 },
|
|
361
|
+
{ run_id: "run-old-created-recent-terminal", workflow_id: "wf-old-created-recent-terminal", seq: 1, event_type: "workflow_completed", payload: { result: "ok" }, created_at: 4500 },
|
|
362
|
+
], noSim))._unsafeUnwrap();
|
|
363
|
+
|
|
364
|
+
const result = (await store.pruneTerminalRunsOlderThan(3000, noSim))._unsafeUnwrap();
|
|
365
|
+
expect(result).toEqual({ prunedRuns: 0, prunedEvents: 0 });
|
|
366
|
+
expect((await store.getAllEvents("run-active", noSim))._unsafeUnwrap()).toHaveLength(2);
|
|
367
|
+
expect((await store.getAllEvents("run-recent-terminal", noSim))._unsafeUnwrap()).toHaveLength(2);
|
|
368
|
+
expect((await store.getAllEvents("run-old-created-recent-terminal", noSim))._unsafeUnwrap()).toHaveLength(2);
|
|
369
|
+
});
|
|
370
|
+
|
|
329
371
|
it("rejects appending event with mismatched workflow_id for existing run_id", async () => {
|
|
330
372
|
(await store.append({
|
|
331
373
|
run_id: "run-1", workflow_id: "wf-1", seq: 0, event_type: "workflow_created",
|
package/package.json
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
|
|
1
|
+
c192df8dd0fe550a80e4c04557abd1f8
|
|
@@ -340,6 +340,8 @@ This gives collaborators:
|
|
|
340
340
|
|
|
341
341
|
**IMPORTANT: Migrations MUST be idempotent** (safe to run multiple times).
|
|
342
342
|
|
|
343
|
+
**IMPORTANT: Tables MUST not have more than 100 columns**: There is a hard limit on this in deployed flings, even though it will works locally. Design schemas to stay under 100 columns. Split wide tables into related tables joined by foreign keys, or move rarely-used fields into a JSON column.
|
|
344
|
+
|
|
343
345
|
Use the `migrate` helper for schema changes:
|
|
344
346
|
|
|
345
347
|
```typescript
|