@acpfx/core 0.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +15 -0
- package/package.json +13 -0
- package/src/config.ts +130 -0
- package/src/dag.ts +152 -0
- package/src/generated-types.ts +253 -0
- package/src/generated-zod.ts +160 -0
- package/src/index.ts +4 -0
- package/src/manifest.ts +60 -0
package/CHANGELOG.md
ADDED
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
# @acpfx/core
|
|
2
|
+
|
|
3
|
+
## 0.2.0
|
|
4
|
+
|
|
5
|
+
### Minor Changes
|
|
6
|
+
|
|
7
|
+
- d757640: Initial release: type-safe contracts, Rust orchestrator, manifest-driven event filtering
|
|
8
|
+
|
|
9
|
+
- Rust schema crate as canonical event type source of truth with codegen to TypeScript + Zod
|
|
10
|
+
- Node manifests (manifest.yaml) declaring consumes/emits contracts
|
|
11
|
+
- Orchestrator event filtering: nodes only receive declared events
|
|
12
|
+
- Rust orchestrator with ratatui TUI (--ui flag)
|
|
13
|
+
- node-sdk with structured logging helpers
|
|
14
|
+
- CI/CD with GitHub Actions and changesets
|
|
15
|
+
- Platform-specific npm packages for Rust binaries (esbuild-style distribution)
|
package/package.json
ADDED
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@acpfx/core",
|
|
3
|
+
"version": "0.2.0",
|
|
4
|
+
"type": "module",
|
|
5
|
+
"main": "./dist/index.js",
|
|
6
|
+
"types": "./dist/index.d.ts",
|
|
7
|
+
"dependencies": {
|
|
8
|
+
"yaml": "^2.8.3"
|
|
9
|
+
},
|
|
10
|
+
"scripts": {
|
|
11
|
+
"build": "esbuild src/index.ts --bundle --platform=node --format=esm --outfile=dist/index.js --packages=external"
|
|
12
|
+
}
|
|
13
|
+
}
|
package/src/config.ts
ADDED
|
@@ -0,0 +1,130 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* YAML config loader and validator for acpfx v2 pipeline configs.
|
|
3
|
+
*
|
|
4
|
+
* Config format:
|
|
5
|
+
* ```yaml
|
|
6
|
+
* nodes:
|
|
7
|
+
* <name>:
|
|
8
|
+
* use: "@acpfx/<impl>"
|
|
9
|
+
* settings: { ... }
|
|
10
|
+
* outputs: [<name>, ...]
|
|
11
|
+
* env:
|
|
12
|
+
* KEY: value
|
|
13
|
+
* ```
|
|
14
|
+
*/
|
|
15
|
+
|
|
16
|
+
import { readFileSync } from "node:fs";
|
|
17
|
+
import { parse as parseYaml } from "yaml";
|
|
18
|
+
|
|
19
|
+
// ---- Config types ----
|
|
20
|
+
|
|
21
|
+
export type NodeConfig = {
|
|
22
|
+
use: string;
|
|
23
|
+
settings?: Record<string, unknown>;
|
|
24
|
+
outputs?: string[];
|
|
25
|
+
};
|
|
26
|
+
|
|
27
|
+
export type PipelineConfig = {
|
|
28
|
+
nodes: Record<string, NodeConfig>;
|
|
29
|
+
env?: Record<string, string>;
|
|
30
|
+
};
|
|
31
|
+
|
|
32
|
+
// ---- Validation errors ----
|
|
33
|
+
|
|
34
|
+
export class ConfigError extends Error {
|
|
35
|
+
constructor(message: string) {
|
|
36
|
+
super(message);
|
|
37
|
+
this.name = "ConfigError";
|
|
38
|
+
}
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
// ---- Loader ----
|
|
42
|
+
|
|
43
|
+
/** Load and validate a YAML config file. */
|
|
44
|
+
export function loadConfig(path: string): PipelineConfig {
|
|
45
|
+
const raw = readFileSync(path, "utf-8");
|
|
46
|
+
return parseConfig(raw);
|
|
47
|
+
}
|
|
48
|
+
|
|
49
|
+
/** Parse and validate a YAML string. */
|
|
50
|
+
export function parseConfig(yaml: string): PipelineConfig {
|
|
51
|
+
const doc = parseYaml(yaml);
|
|
52
|
+
return validateConfig(doc);
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
// ---- Validation ----
|
|
56
|
+
|
|
57
|
+
function validateConfig(doc: unknown): PipelineConfig {
|
|
58
|
+
if (!doc || typeof doc !== "object") {
|
|
59
|
+
throw new ConfigError("Config must be a YAML object");
|
|
60
|
+
}
|
|
61
|
+
|
|
62
|
+
const obj = doc as Record<string, unknown>;
|
|
63
|
+
|
|
64
|
+
if (!obj.nodes || typeof obj.nodes !== "object") {
|
|
65
|
+
throw new ConfigError("Config must have a 'nodes' object");
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
const nodes = obj.nodes as Record<string, unknown>;
|
|
69
|
+
const nodeNames = new Set(Object.keys(nodes));
|
|
70
|
+
|
|
71
|
+
if (nodeNames.size === 0) {
|
|
72
|
+
throw new ConfigError("Config must have at least one node");
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
const validatedNodes: Record<string, NodeConfig> = {};
|
|
76
|
+
|
|
77
|
+
for (const [name, raw] of Object.entries(nodes)) {
|
|
78
|
+
if (!raw || typeof raw !== "object") {
|
|
79
|
+
throw new ConfigError(`Node '${name}' must be an object`);
|
|
80
|
+
}
|
|
81
|
+
|
|
82
|
+
const node = raw as Record<string, unknown>;
|
|
83
|
+
|
|
84
|
+
if (typeof node.use !== "string" || node.use.length === 0) {
|
|
85
|
+
throw new ConfigError(`Node '${name}' must have a 'use' string`);
|
|
86
|
+
}
|
|
87
|
+
|
|
88
|
+
let outputs: string[] | undefined;
|
|
89
|
+
if (node.outputs !== undefined) {
|
|
90
|
+
if (!Array.isArray(node.outputs)) {
|
|
91
|
+
throw new ConfigError(`Node '${name}'.outputs must be an array`);
|
|
92
|
+
}
|
|
93
|
+
outputs = [];
|
|
94
|
+
for (const out of node.outputs) {
|
|
95
|
+
if (typeof out !== "string") {
|
|
96
|
+
throw new ConfigError(
|
|
97
|
+
`Node '${name}'.outputs must contain strings`,
|
|
98
|
+
);
|
|
99
|
+
}
|
|
100
|
+
if (!nodeNames.has(out)) {
|
|
101
|
+
throw new ConfigError(
|
|
102
|
+
`Node '${name}' outputs to undefined node '${out}'`,
|
|
103
|
+
);
|
|
104
|
+
}
|
|
105
|
+
outputs.push(out);
|
|
106
|
+
}
|
|
107
|
+
}
|
|
108
|
+
|
|
109
|
+
const settings =
|
|
110
|
+
node.settings && typeof node.settings === "object"
|
|
111
|
+
? (node.settings as Record<string, unknown>)
|
|
112
|
+
: undefined;
|
|
113
|
+
|
|
114
|
+
validatedNodes[name] = { use: node.use, settings, outputs };
|
|
115
|
+
}
|
|
116
|
+
|
|
117
|
+
// Validate env
|
|
118
|
+
let env: Record<string, string> | undefined;
|
|
119
|
+
if (obj.env !== undefined) {
|
|
120
|
+
if (typeof obj.env !== "object" || obj.env === null) {
|
|
121
|
+
throw new ConfigError("'env' must be an object");
|
|
122
|
+
}
|
|
123
|
+
env = {};
|
|
124
|
+
for (const [k, v] of Object.entries(obj.env as Record<string, unknown>)) {
|
|
125
|
+
env[k] = String(v);
|
|
126
|
+
}
|
|
127
|
+
}
|
|
128
|
+
|
|
129
|
+
return { nodes: validatedNodes, env };
|
|
130
|
+
}
|
package/src/dag.ts
ADDED
|
@@ -0,0 +1,152 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Directed graph construction from a PipelineConfig.
|
|
3
|
+
*
|
|
4
|
+
* - Builds an adjacency list from node outputs
|
|
5
|
+
* - Produces a topological ordering (nodes in cycles are appended in config declaration order)
|
|
6
|
+
* - Computes downstream sets for interrupt propagation
|
|
7
|
+
*/
|
|
8
|
+
|
|
9
|
+
import { PipelineConfig } from "./config.js";
|
|
10
|
+
|
|
11
|
+
export class DagError extends Error {
|
|
12
|
+
constructor(message: string) {
|
|
13
|
+
super(message);
|
|
14
|
+
this.name = "DagError";
|
|
15
|
+
}
|
|
16
|
+
}
|
|
17
|
+
|
|
18
|
+
export type DagNode = {
|
|
19
|
+
name: string;
|
|
20
|
+
use: string;
|
|
21
|
+
settings?: Record<string, unknown>;
|
|
22
|
+
outputs: string[];
|
|
23
|
+
/** Event types this node declares it consumes (from manifest). Empty = accepts all. */
|
|
24
|
+
consumes: string[];
|
|
25
|
+
/** Event types this node declares it emits (from manifest). Empty = emits any. */
|
|
26
|
+
emits: string[];
|
|
27
|
+
};
|
|
28
|
+
|
|
29
|
+
export type Dag = {
|
|
30
|
+
/** All nodes keyed by name. */
|
|
31
|
+
nodes: Map<string, DagNode>;
|
|
32
|
+
/** Topological order (sources first). */
|
|
33
|
+
order: string[];
|
|
34
|
+
/** For a given node, all nodes downstream (transitive). Used for interrupt propagation. */
|
|
35
|
+
downstream: Map<string, Set<string>>;
|
|
36
|
+
};
|
|
37
|
+
|
|
38
|
+
/** Build and validate a DAG from a pipeline config. */
|
|
39
|
+
export function buildDag(config: PipelineConfig): Dag {
|
|
40
|
+
const nodes = new Map<string, DagNode>();
|
|
41
|
+
|
|
42
|
+
for (const [name, nc] of Object.entries(config.nodes)) {
|
|
43
|
+
nodes.set(name, {
|
|
44
|
+
name,
|
|
45
|
+
use: nc.use,
|
|
46
|
+
settings: nc.settings,
|
|
47
|
+
outputs: nc.outputs ?? [],
|
|
48
|
+
consumes: [],
|
|
49
|
+
emits: [],
|
|
50
|
+
});
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
// Validate all output references exist (config.ts already does this, but belt-and-suspenders)
|
|
54
|
+
for (const node of nodes.values()) {
|
|
55
|
+
for (const out of node.outputs) {
|
|
56
|
+
if (!nodes.has(out)) {
|
|
57
|
+
throw new DagError(
|
|
58
|
+
`Node '${node.name}' outputs to undefined node '${out}'`,
|
|
59
|
+
);
|
|
60
|
+
}
|
|
61
|
+
}
|
|
62
|
+
}
|
|
63
|
+
|
|
64
|
+
const order = topologicalSort(nodes);
|
|
65
|
+
const downstream = computeDownstream(nodes);
|
|
66
|
+
|
|
67
|
+
return { nodes, order, downstream };
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
/**
|
|
71
|
+
* Check if a node accepts a given event type based on its manifest.
|
|
72
|
+
* If the node has no consumes list (empty), it accepts everything (permissive mode).
|
|
73
|
+
*/
|
|
74
|
+
export function nodeConsumesEvent(node: DagNode, eventType: string): boolean {
|
|
75
|
+
if (node.consumes.length === 0) return true; // permissive: no manifest or empty consumes
|
|
76
|
+
return node.consumes.includes(eventType);
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
// ---- Topological sort (Kahn's algorithm, cycles allowed) ----
|
|
80
|
+
|
|
81
|
+
function topologicalSort(nodes: Map<string, DagNode>): string[] {
|
|
82
|
+
// Compute in-degrees
|
|
83
|
+
const inDegree = new Map<string, number>();
|
|
84
|
+
for (const name of nodes.keys()) {
|
|
85
|
+
inDegree.set(name, 0);
|
|
86
|
+
}
|
|
87
|
+
for (const node of nodes.values()) {
|
|
88
|
+
for (const out of node.outputs) {
|
|
89
|
+
inDegree.set(out, (inDegree.get(out) ?? 0) + 1);
|
|
90
|
+
}
|
|
91
|
+
}
|
|
92
|
+
|
|
93
|
+
// Start with zero in-degree nodes
|
|
94
|
+
const queue: string[] = [];
|
|
95
|
+
for (const [name, deg] of inDegree) {
|
|
96
|
+
if (deg === 0) queue.push(name);
|
|
97
|
+
}
|
|
98
|
+
// Sort for deterministic order
|
|
99
|
+
queue.sort();
|
|
100
|
+
|
|
101
|
+
const result: string[] = [];
|
|
102
|
+
while (queue.length > 0) {
|
|
103
|
+
const name = queue.shift()!;
|
|
104
|
+
result.push(name);
|
|
105
|
+
const node = nodes.get(name)!;
|
|
106
|
+
for (const out of node.outputs) {
|
|
107
|
+
const newDeg = (inDegree.get(out) ?? 1) - 1;
|
|
108
|
+
inDegree.set(out, newDeg);
|
|
109
|
+
if (newDeg === 0) {
|
|
110
|
+
// Insert sorted for deterministic order
|
|
111
|
+
const idx = queue.findIndex((q) => q > out);
|
|
112
|
+
if (idx === -1) queue.push(out);
|
|
113
|
+
else queue.splice(idx, 0, out);
|
|
114
|
+
}
|
|
115
|
+
}
|
|
116
|
+
}
|
|
117
|
+
|
|
118
|
+
if (result.length !== nodes.size) {
|
|
119
|
+
// Nodes in cycles: append in config declaration order
|
|
120
|
+
const sorted = new Set(result);
|
|
121
|
+
for (const name of nodes.keys()) {
|
|
122
|
+
if (!sorted.has(name)) result.push(name);
|
|
123
|
+
}
|
|
124
|
+
}
|
|
125
|
+
|
|
126
|
+
return result;
|
|
127
|
+
}
|
|
128
|
+
|
|
129
|
+
// ---- Downstream computation (transitive closure) ----
|
|
130
|
+
|
|
131
|
+
function computeDownstream(nodes: Map<string, DagNode>): Map<string, Set<string>> {
|
|
132
|
+
const downstream = new Map<string, Set<string>>();
|
|
133
|
+
|
|
134
|
+
for (const name of nodes.keys()) {
|
|
135
|
+
const visited = new Set<string>();
|
|
136
|
+
const stack = [...(nodes.get(name)?.outputs ?? [])];
|
|
137
|
+
while (stack.length > 0) {
|
|
138
|
+
const cur = stack.pop()!;
|
|
139
|
+
if (visited.has(cur)) continue;
|
|
140
|
+
visited.add(cur);
|
|
141
|
+
const curNode = nodes.get(cur);
|
|
142
|
+
if (curNode) {
|
|
143
|
+
for (const out of curNode.outputs) {
|
|
144
|
+
if (!visited.has(out)) stack.push(out);
|
|
145
|
+
}
|
|
146
|
+
}
|
|
147
|
+
}
|
|
148
|
+
downstream.set(name, visited);
|
|
149
|
+
}
|
|
150
|
+
|
|
151
|
+
return downstream;
|
|
152
|
+
}
|
|
@@ -0,0 +1,253 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* AUTO-GENERATED by acpfx-codegen from packages/schema (Rust).
|
|
3
|
+
* DO NOT EDIT — re-run `cargo run -p acpfx-schema --bin acpfx-codegen`.
|
|
4
|
+
*/
|
|
5
|
+
|
|
6
|
+
/** Fields added by the orchestrator to every routed event. */
|
|
7
|
+
export type OrchestratorStamp = {
|
|
8
|
+
ts?: number;
|
|
9
|
+
_from?: string;
|
|
10
|
+
};
|
|
11
|
+
|
|
12
|
+
// ---- Audio ----
|
|
13
|
+
|
|
14
|
+
export type AudioChunkEvent = OrchestratorStamp & {
|
|
15
|
+
type: "audio.chunk";
|
|
16
|
+
trackId: string;
|
|
17
|
+
format: string;
|
|
18
|
+
sampleRate: number;
|
|
19
|
+
channels: number;
|
|
20
|
+
data: string;
|
|
21
|
+
durationMs: number;
|
|
22
|
+
kind?: "speech" | "sfx";
|
|
23
|
+
};
|
|
24
|
+
|
|
25
|
+
export type AudioLevelEvent = OrchestratorStamp & {
|
|
26
|
+
type: "audio.level";
|
|
27
|
+
trackId: string;
|
|
28
|
+
rms: number;
|
|
29
|
+
peak: number;
|
|
30
|
+
dbfs: number;
|
|
31
|
+
};
|
|
32
|
+
|
|
33
|
+
// ---- Speech Recognition ----
|
|
34
|
+
|
|
35
|
+
export type SpeechPartialEvent = OrchestratorStamp & {
|
|
36
|
+
type: "speech.partial";
|
|
37
|
+
trackId: string;
|
|
38
|
+
text: string;
|
|
39
|
+
};
|
|
40
|
+
|
|
41
|
+
export type SpeechDeltaEvent = OrchestratorStamp & {
|
|
42
|
+
type: "speech.delta";
|
|
43
|
+
trackId: string;
|
|
44
|
+
text: string;
|
|
45
|
+
replaces?: string;
|
|
46
|
+
};
|
|
47
|
+
|
|
48
|
+
export type SpeechFinalEvent = OrchestratorStamp & {
|
|
49
|
+
type: "speech.final";
|
|
50
|
+
trackId: string;
|
|
51
|
+
text: string;
|
|
52
|
+
confidence?: number;
|
|
53
|
+
};
|
|
54
|
+
|
|
55
|
+
export type SpeechPauseEvent = OrchestratorStamp & {
|
|
56
|
+
type: "speech.pause";
|
|
57
|
+
trackId: string;
|
|
58
|
+
pendingText: string;
|
|
59
|
+
silenceMs: number;
|
|
60
|
+
};
|
|
61
|
+
|
|
62
|
+
// ---- Agent/LLM ----
|
|
63
|
+
|
|
64
|
+
export type AgentSubmitEvent = OrchestratorStamp & {
|
|
65
|
+
type: "agent.submit";
|
|
66
|
+
requestId: string;
|
|
67
|
+
text: string;
|
|
68
|
+
};
|
|
69
|
+
|
|
70
|
+
export type AgentDeltaEvent = OrchestratorStamp & {
|
|
71
|
+
type: "agent.delta";
|
|
72
|
+
requestId: string;
|
|
73
|
+
delta: string;
|
|
74
|
+
seq: number;
|
|
75
|
+
};
|
|
76
|
+
|
|
77
|
+
export type AgentCompleteEvent = OrchestratorStamp & {
|
|
78
|
+
type: "agent.complete";
|
|
79
|
+
requestId: string;
|
|
80
|
+
text: string;
|
|
81
|
+
tokenUsage?: { input: number; output: number };
|
|
82
|
+
};
|
|
83
|
+
|
|
84
|
+
export type AgentThinkingEvent = OrchestratorStamp & {
|
|
85
|
+
type: "agent.thinking";
|
|
86
|
+
requestId: string;
|
|
87
|
+
};
|
|
88
|
+
|
|
89
|
+
export type AgentToolStartEvent = OrchestratorStamp & {
|
|
90
|
+
type: "agent.tool_start";
|
|
91
|
+
requestId: string;
|
|
92
|
+
toolCallId: string;
|
|
93
|
+
title?: string;
|
|
94
|
+
};
|
|
95
|
+
|
|
96
|
+
export type AgentToolDoneEvent = OrchestratorStamp & {
|
|
97
|
+
type: "agent.tool_done";
|
|
98
|
+
requestId: string;
|
|
99
|
+
toolCallId: string;
|
|
100
|
+
status: string;
|
|
101
|
+
};
|
|
102
|
+
|
|
103
|
+
// ---- Control ----
|
|
104
|
+
|
|
105
|
+
export type ControlInterruptEvent = OrchestratorStamp & {
|
|
106
|
+
type: "control.interrupt";
|
|
107
|
+
reason: string;
|
|
108
|
+
};
|
|
109
|
+
|
|
110
|
+
export type ControlStateEvent = OrchestratorStamp & {
|
|
111
|
+
type: "control.state";
|
|
112
|
+
component: string;
|
|
113
|
+
state: string;
|
|
114
|
+
};
|
|
115
|
+
|
|
116
|
+
export type ControlErrorEvent = OrchestratorStamp & {
|
|
117
|
+
type: "control.error";
|
|
118
|
+
component: string;
|
|
119
|
+
message: string;
|
|
120
|
+
fatal: boolean;
|
|
121
|
+
};
|
|
122
|
+
|
|
123
|
+
// ---- Lifecycle ----
|
|
124
|
+
|
|
125
|
+
export type LifecycleReadyEvent = OrchestratorStamp & {
|
|
126
|
+
type: "lifecycle.ready";
|
|
127
|
+
component: string;
|
|
128
|
+
};
|
|
129
|
+
|
|
130
|
+
export type LifecycleDoneEvent = OrchestratorStamp & {
|
|
131
|
+
type: "lifecycle.done";
|
|
132
|
+
component: string;
|
|
133
|
+
};
|
|
134
|
+
|
|
135
|
+
// ---- Log ----
|
|
136
|
+
|
|
137
|
+
export type LogEvent = OrchestratorStamp & {
|
|
138
|
+
type: "log";
|
|
139
|
+
level: "info" | "warn" | "error" | "debug";
|
|
140
|
+
component: string;
|
|
141
|
+
message: string;
|
|
142
|
+
};
|
|
143
|
+
|
|
144
|
+
// ---- Player ----
|
|
145
|
+
|
|
146
|
+
export type PlayerStatusEvent = OrchestratorStamp & {
|
|
147
|
+
type: "player.status";
|
|
148
|
+
playing: unknown;
|
|
149
|
+
agentState: unknown;
|
|
150
|
+
sfxActive: boolean;
|
|
151
|
+
};
|
|
152
|
+
|
|
153
|
+
// ---- Union types ----
|
|
154
|
+
|
|
155
|
+
export type AudioEvent = AudioChunkEvent | AudioLevelEvent;
|
|
156
|
+
|
|
157
|
+
export type SpeechEvent =
|
|
158
|
+
| SpeechPartialEvent
|
|
159
|
+
| SpeechDeltaEvent
|
|
160
|
+
| SpeechFinalEvent
|
|
161
|
+
| SpeechPauseEvent;
|
|
162
|
+
|
|
163
|
+
export type AgentEvent =
|
|
164
|
+
| AgentSubmitEvent
|
|
165
|
+
| AgentDeltaEvent
|
|
166
|
+
| AgentCompleteEvent
|
|
167
|
+
| AgentThinkingEvent
|
|
168
|
+
| AgentToolStartEvent
|
|
169
|
+
| AgentToolDoneEvent;
|
|
170
|
+
|
|
171
|
+
export type ControlEvent =
|
|
172
|
+
| ControlInterruptEvent
|
|
173
|
+
| ControlStateEvent
|
|
174
|
+
| ControlErrorEvent;
|
|
175
|
+
|
|
176
|
+
export type LifecycleEvent = LifecycleReadyEvent | LifecycleDoneEvent;
|
|
177
|
+
export type LogEventType = LogEvent;
|
|
178
|
+
|
|
179
|
+
export type PlayerEvent = PlayerStatusEvent;
|
|
180
|
+
|
|
181
|
+
export type PipelineEvent =
|
|
182
|
+
| AudioEvent
|
|
183
|
+
| SpeechEvent
|
|
184
|
+
| AgentEvent
|
|
185
|
+
| ControlEvent
|
|
186
|
+
| LifecycleEvent
|
|
187
|
+
| LogEvent
|
|
188
|
+
| PlayerEvent;
|
|
189
|
+
|
|
190
|
+
/** An event with a `type` field that doesn't match a known type. Forwarded unchanged. */
|
|
191
|
+
export type UnknownEvent = OrchestratorStamp & {
|
|
192
|
+
type: string;
|
|
193
|
+
[key: string]: unknown;
|
|
194
|
+
};
|
|
195
|
+
|
|
196
|
+
export type AnyEvent = PipelineEvent | UnknownEvent;
|
|
197
|
+
|
|
198
|
+
// ---- Type discrimination ----
|
|
199
|
+
|
|
200
|
+
const KNOWN_TYPES = new Set([
|
|
201
|
+
"audio.chunk",
|
|
202
|
+
"audio.level",
|
|
203
|
+
"speech.partial",
|
|
204
|
+
"speech.delta",
|
|
205
|
+
"speech.final",
|
|
206
|
+
"speech.pause",
|
|
207
|
+
"agent.submit",
|
|
208
|
+
"agent.delta",
|
|
209
|
+
"agent.complete",
|
|
210
|
+
"agent.thinking",
|
|
211
|
+
"agent.tool_start",
|
|
212
|
+
"agent.tool_done",
|
|
213
|
+
"control.interrupt",
|
|
214
|
+
"control.state",
|
|
215
|
+
"control.error",
|
|
216
|
+
"lifecycle.ready",
|
|
217
|
+
"lifecycle.done",
|
|
218
|
+
"log",
|
|
219
|
+
"player.status",
|
|
220
|
+
]);
|
|
221
|
+
|
|
222
|
+
export function isKnownEventType(type: string): boolean {
|
|
223
|
+
return KNOWN_TYPES.has(type);
|
|
224
|
+
}
|
|
225
|
+
|
|
226
|
+
// ---- Serialization ----
|
|
227
|
+
|
|
228
|
+
export function parseEvent(json: string): AnyEvent {
|
|
229
|
+
const obj = JSON.parse(json);
|
|
230
|
+
if (!obj || typeof obj !== "object" || typeof obj.type !== "string") {
|
|
231
|
+
throw new Error("Invalid event: missing 'type' field");
|
|
232
|
+
}
|
|
233
|
+
return obj as AnyEvent;
|
|
234
|
+
}
|
|
235
|
+
|
|
236
|
+
export function serializeEvent(event: AnyEvent): string {
|
|
237
|
+
return JSON.stringify(event);
|
|
238
|
+
}
|
|
239
|
+
|
|
240
|
+
// ---- Helpers ----
|
|
241
|
+
|
|
242
|
+
/** Create an event with the given type and payload. */
|
|
243
|
+
export function createEvent<T extends AnyEvent>(event: T): T {
|
|
244
|
+
return event;
|
|
245
|
+
}
|
|
246
|
+
|
|
247
|
+
/** Stamp an event with orchestrator metadata. */
|
|
248
|
+
export function stampEvent<T extends AnyEvent>(
|
|
249
|
+
event: T,
|
|
250
|
+
from: string,
|
|
251
|
+
): T & Required<OrchestratorStamp> {
|
|
252
|
+
return { ...event, ts: Date.now(), _from: from };
|
|
253
|
+
}
|
|
@@ -0,0 +1,160 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* AUTO-GENERATED by acpfx-codegen from packages/schema (Rust).
|
|
3
|
+
* DO NOT EDIT — re-run `cargo run -p acpfx-schema --bin acpfx-codegen`.
|
|
4
|
+
*/
|
|
5
|
+
|
|
6
|
+
import { z } from "zod";
|
|
7
|
+
|
|
8
|
+
export const OrchestratorStampSchema = z.object({
|
|
9
|
+
ts: z.number().optional(),
|
|
10
|
+
_from: z.string().optional(),
|
|
11
|
+
});
|
|
12
|
+
|
|
13
|
+
export const AudioChunkEventSchema = OrchestratorStampSchema.extend({
|
|
14
|
+
type: z.literal("audio.chunk"),
|
|
15
|
+
trackId: z.string(),
|
|
16
|
+
format: z.string(),
|
|
17
|
+
sampleRate: z.number(),
|
|
18
|
+
channels: z.number(),
|
|
19
|
+
data: z.string(),
|
|
20
|
+
durationMs: z.number(),
|
|
21
|
+
kind: z.enum(["speech", "sfx"]).optional(),
|
|
22
|
+
});
|
|
23
|
+
|
|
24
|
+
export const AudioLevelEventSchema = OrchestratorStampSchema.extend({
|
|
25
|
+
type: z.literal("audio.level"),
|
|
26
|
+
trackId: z.string(),
|
|
27
|
+
rms: z.number(),
|
|
28
|
+
peak: z.number(),
|
|
29
|
+
dbfs: z.number(),
|
|
30
|
+
});
|
|
31
|
+
|
|
32
|
+
export const SpeechPartialEventSchema = OrchestratorStampSchema.extend({
|
|
33
|
+
type: z.literal("speech.partial"),
|
|
34
|
+
trackId: z.string(),
|
|
35
|
+
text: z.string(),
|
|
36
|
+
});
|
|
37
|
+
|
|
38
|
+
export const SpeechDeltaEventSchema = OrchestratorStampSchema.extend({
|
|
39
|
+
type: z.literal("speech.delta"),
|
|
40
|
+
trackId: z.string(),
|
|
41
|
+
text: z.string(),
|
|
42
|
+
replaces: z.string().optional(),
|
|
43
|
+
});
|
|
44
|
+
|
|
45
|
+
export const SpeechFinalEventSchema = OrchestratorStampSchema.extend({
|
|
46
|
+
type: z.literal("speech.final"),
|
|
47
|
+
trackId: z.string(),
|
|
48
|
+
text: z.string(),
|
|
49
|
+
confidence: z.number().optional(),
|
|
50
|
+
});
|
|
51
|
+
|
|
52
|
+
export const SpeechPauseEventSchema = OrchestratorStampSchema.extend({
|
|
53
|
+
type: z.literal("speech.pause"),
|
|
54
|
+
trackId: z.string(),
|
|
55
|
+
pendingText: z.string(),
|
|
56
|
+
silenceMs: z.number(),
|
|
57
|
+
});
|
|
58
|
+
|
|
59
|
+
export const AgentSubmitEventSchema = OrchestratorStampSchema.extend({
|
|
60
|
+
type: z.literal("agent.submit"),
|
|
61
|
+
requestId: z.string(),
|
|
62
|
+
text: z.string(),
|
|
63
|
+
});
|
|
64
|
+
|
|
65
|
+
export const AgentDeltaEventSchema = OrchestratorStampSchema.extend({
|
|
66
|
+
type: z.literal("agent.delta"),
|
|
67
|
+
requestId: z.string(),
|
|
68
|
+
delta: z.string(),
|
|
69
|
+
seq: z.number(),
|
|
70
|
+
});
|
|
71
|
+
|
|
72
|
+
export const AgentCompleteEventSchema = OrchestratorStampSchema.extend({
|
|
73
|
+
type: z.literal("agent.complete"),
|
|
74
|
+
requestId: z.string(),
|
|
75
|
+
text: z.string(),
|
|
76
|
+
tokenUsage: z.object({ input: z.number(), output: z.number() }).optional(),
|
|
77
|
+
});
|
|
78
|
+
|
|
79
|
+
export const AgentThinkingEventSchema = OrchestratorStampSchema.extend({
|
|
80
|
+
type: z.literal("agent.thinking"),
|
|
81
|
+
requestId: z.string(),
|
|
82
|
+
});
|
|
83
|
+
|
|
84
|
+
export const AgentToolStartEventSchema = OrchestratorStampSchema.extend({
|
|
85
|
+
type: z.literal("agent.tool_start"),
|
|
86
|
+
requestId: z.string(),
|
|
87
|
+
toolCallId: z.string(),
|
|
88
|
+
title: z.string().optional(),
|
|
89
|
+
});
|
|
90
|
+
|
|
91
|
+
export const AgentToolDoneEventSchema = OrchestratorStampSchema.extend({
|
|
92
|
+
type: z.literal("agent.tool_done"),
|
|
93
|
+
requestId: z.string(),
|
|
94
|
+
toolCallId: z.string(),
|
|
95
|
+
status: z.string(),
|
|
96
|
+
});
|
|
97
|
+
|
|
98
|
+
export const ControlInterruptEventSchema = OrchestratorStampSchema.extend({
|
|
99
|
+
type: z.literal("control.interrupt"),
|
|
100
|
+
reason: z.string(),
|
|
101
|
+
});
|
|
102
|
+
|
|
103
|
+
export const ControlStateEventSchema = OrchestratorStampSchema.extend({
|
|
104
|
+
type: z.literal("control.state"),
|
|
105
|
+
component: z.string(),
|
|
106
|
+
state: z.string(),
|
|
107
|
+
});
|
|
108
|
+
|
|
109
|
+
export const ControlErrorEventSchema = OrchestratorStampSchema.extend({
|
|
110
|
+
type: z.literal("control.error"),
|
|
111
|
+
component: z.string(),
|
|
112
|
+
message: z.string(),
|
|
113
|
+
fatal: z.boolean(),
|
|
114
|
+
});
|
|
115
|
+
|
|
116
|
+
export const LifecycleReadyEventSchema = OrchestratorStampSchema.extend({
|
|
117
|
+
type: z.literal("lifecycle.ready"),
|
|
118
|
+
component: z.string(),
|
|
119
|
+
});
|
|
120
|
+
|
|
121
|
+
export const LifecycleDoneEventSchema = OrchestratorStampSchema.extend({
|
|
122
|
+
type: z.literal("lifecycle.done"),
|
|
123
|
+
component: z.string(),
|
|
124
|
+
});
|
|
125
|
+
|
|
126
|
+
export const LogEventSchema = OrchestratorStampSchema.extend({
|
|
127
|
+
type: z.literal("log"),
|
|
128
|
+
level: z.enum(["info", "warn", "error", "debug"]),
|
|
129
|
+
component: z.string(),
|
|
130
|
+
message: z.string(),
|
|
131
|
+
});
|
|
132
|
+
|
|
133
|
+
export const PlayerStatusEventSchema = OrchestratorStampSchema.extend({
|
|
134
|
+
type: z.literal("player.status"),
|
|
135
|
+
playing: z.unknown(),
|
|
136
|
+
agentState: z.unknown(),
|
|
137
|
+
sfxActive: z.boolean(),
|
|
138
|
+
});
|
|
139
|
+
|
|
140
|
+
export const PipelineEventSchema = z.discriminatedUnion("type", [
|
|
141
|
+
AudioChunkEventSchema,
|
|
142
|
+
AudioLevelEventSchema,
|
|
143
|
+
SpeechPartialEventSchema,
|
|
144
|
+
SpeechDeltaEventSchema,
|
|
145
|
+
SpeechFinalEventSchema,
|
|
146
|
+
SpeechPauseEventSchema,
|
|
147
|
+
AgentSubmitEventSchema,
|
|
148
|
+
AgentDeltaEventSchema,
|
|
149
|
+
AgentCompleteEventSchema,
|
|
150
|
+
AgentThinkingEventSchema,
|
|
151
|
+
AgentToolStartEventSchema,
|
|
152
|
+
AgentToolDoneEventSchema,
|
|
153
|
+
ControlInterruptEventSchema,
|
|
154
|
+
ControlStateEventSchema,
|
|
155
|
+
ControlErrorEventSchema,
|
|
156
|
+
LifecycleReadyEventSchema,
|
|
157
|
+
LifecycleDoneEventSchema,
|
|
158
|
+
LogEventSchema,
|
|
159
|
+
PlayerStatusEventSchema,
|
|
160
|
+
]);
|
package/src/index.ts
ADDED
package/src/manifest.ts
ADDED
|
@@ -0,0 +1,60 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Manifest utilities for acpfx nodes.
|
|
3
|
+
*
|
|
4
|
+
* Call `handleManifestFlag()` at the top of your node's entry point.
|
|
5
|
+
* If `--manifest` is in argv, it prints the manifest as JSON and exits.
|
|
6
|
+
*/
|
|
7
|
+
|
|
8
|
+
import { readFileSync } from "node:fs";
|
|
9
|
+
import { join, dirname } from "node:path";
|
|
10
|
+
|
|
11
|
+
export interface NodeManifest {
|
|
12
|
+
name: string;
|
|
13
|
+
description?: string;
|
|
14
|
+
consumes: string[];
|
|
15
|
+
emits: string[];
|
|
16
|
+
}
|
|
17
|
+
|
|
18
|
+
/**
|
|
19
|
+
* If `--manifest` is in process.argv, read the co-located manifest.json,
|
|
20
|
+
* print it to stdout, and exit(0).
|
|
21
|
+
*
|
|
22
|
+
* Resolution order:
|
|
23
|
+
* 1. Explicit `manifestPath` if provided
|
|
24
|
+
* 2. `<script-base>.manifest.json` (bundled: dist/nodes/foo.js -> foo.manifest.json)
|
|
25
|
+
* 3. `manifest.json` in the script's directory
|
|
26
|
+
*/
|
|
27
|
+
export function handleManifestFlag(manifestPath?: string): void {
|
|
28
|
+
if (!process.argv.includes("--manifest")) return;
|
|
29
|
+
|
|
30
|
+
if (!manifestPath) {
|
|
31
|
+
const script = process.argv[1];
|
|
32
|
+
const scriptDir = dirname(script);
|
|
33
|
+
const scriptBase = script.replace(/\.[^.]+$/, "");
|
|
34
|
+
const colocated = `${scriptBase}.manifest.json`;
|
|
35
|
+
try {
|
|
36
|
+
readFileSync(colocated);
|
|
37
|
+
manifestPath = colocated;
|
|
38
|
+
} catch {
|
|
39
|
+
manifestPath = join(scriptDir, "manifest.json");
|
|
40
|
+
}
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
try {
|
|
44
|
+
const content = readFileSync(manifestPath, "utf8");
|
|
45
|
+
// Already JSON — just write it out
|
|
46
|
+
process.stdout.write(content.trim() + "\n");
|
|
47
|
+
process.exit(0);
|
|
48
|
+
} catch (err) {
|
|
49
|
+
process.stderr.write(`Failed to read manifest: ${err}\n`);
|
|
50
|
+
process.exit(1);
|
|
51
|
+
}
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
/**
|
|
55
|
+
* Load a manifest from a JSON file.
|
|
56
|
+
*/
|
|
57
|
+
export function loadManifestJson(path: string): NodeManifest {
|
|
58
|
+
const content = readFileSync(path, "utf8");
|
|
59
|
+
return JSON.parse(content) as NodeManifest;
|
|
60
|
+
}
|