@exaudeus/workrail 0.17.0 → 1.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/application/services/output-normalizer.d.ts +9 -0
- package/dist/application/services/output-normalizer.js +38 -0
- package/dist/di/container.js +8 -0
- package/dist/di/tokens.d.ts +2 -0
- package/dist/di/tokens.js +2 -0
- package/dist/infrastructure/session/HttpServer.d.ts +2 -1
- package/dist/infrastructure/session/HttpServer.js +34 -10
- package/dist/infrastructure/session/SessionManager.js +19 -1
- package/dist/infrastructure/storage/enhanced-multi-source-workflow-storage.js +26 -2
- package/dist/infrastructure/storage/file-workflow-storage.js +4 -4
- package/dist/infrastructure/storage/git-workflow-storage.d.ts +0 -1
- package/dist/infrastructure/storage/git-workflow-storage.js +28 -29
- package/dist/infrastructure/storage/plugin-workflow-storage.js +11 -5
- package/dist/manifest.json +276 -92
- package/dist/mcp/handler-factory.d.ts +7 -0
- package/dist/mcp/handler-factory.js +70 -0
- package/dist/mcp/handlers/v2-execution-helpers.d.ts +4 -4
- package/dist/mcp/handlers/v2-execution-helpers.js +29 -0
- package/dist/mcp/handlers/v2-execution.js +460 -166
- package/dist/mcp/output-schemas.d.ts +350 -37
- package/dist/mcp/output-schemas.js +91 -18
- package/dist/mcp/server.js +32 -130
- package/dist/mcp/tool-descriptions.js +126 -18
- package/dist/mcp/types/workflow-tool-edition.d.ts +28 -0
- package/dist/mcp/types/workflow-tool-edition.js +10 -0
- package/dist/mcp/types.d.ts +2 -6
- package/dist/mcp/v1/tool-registry.d.ts +8 -0
- package/dist/mcp/v1/tool-registry.js +49 -0
- package/dist/mcp/v2/tool-registry.d.ts +2 -5
- package/dist/mcp/v2/tool-registry.js +33 -32
- package/dist/mcp/v2/tools.js +6 -6
- package/dist/mcp/workflow-tool-edition-selector.d.ts +4 -0
- package/dist/mcp/workflow-tool-edition-selector.js +13 -0
- package/dist/utils/storage-security.js +15 -1
- package/dist/v2/durable-core/constants.d.ts +1 -0
- package/dist/v2/durable-core/constants.js +2 -1
- package/dist/v2/durable-core/domain/ack-advance-append-plan.d.ts +14 -7
- package/dist/v2/durable-core/domain/ack-advance-append-plan.js +78 -23
- package/dist/v2/durable-core/domain/blocking-decision.d.ts +32 -0
- package/dist/v2/durable-core/domain/blocking-decision.js +41 -0
- package/dist/v2/durable-core/domain/context-merge.d.ts +8 -0
- package/dist/v2/durable-core/domain/context-merge.js +40 -0
- package/dist/v2/durable-core/domain/function-definition-expander.d.ts +14 -0
- package/dist/v2/durable-core/domain/function-definition-expander.js +66 -0
- package/dist/v2/durable-core/domain/gap-builder.d.ts +19 -0
- package/dist/v2/durable-core/domain/gap-builder.js +24 -0
- package/dist/v2/durable-core/domain/prompt-renderer.d.ts +24 -0
- package/dist/v2/durable-core/domain/prompt-renderer.js +167 -0
- package/dist/v2/durable-core/domain/reason-model.d.ts +94 -0
- package/dist/v2/durable-core/domain/reason-model.js +228 -0
- package/dist/v2/durable-core/domain/recap-recovery.d.ts +24 -0
- package/dist/v2/durable-core/domain/recap-recovery.js +71 -0
- package/dist/v2/durable-core/domain/validation-criteria-validator.d.ts +8 -0
- package/dist/v2/durable-core/domain/validation-criteria-validator.js +16 -0
- package/dist/v2/durable-core/domain/validation-requirements-extractor.d.ts +2 -0
- package/dist/v2/durable-core/domain/validation-requirements-extractor.js +58 -0
- package/dist/v2/durable-core/encoding/base32-lower.d.ts +1 -0
- package/dist/v2/durable-core/encoding/base32-lower.js +28 -0
- package/dist/v2/durable-core/ids/index.d.ts +4 -0
- package/dist/v2/durable-core/ids/index.js +7 -0
- package/dist/v2/durable-core/ids/workflow-hash-ref.d.ts +7 -0
- package/dist/v2/durable-core/ids/workflow-hash-ref.js +23 -0
- package/dist/v2/durable-core/schemas/export-bundle/index.d.ts +206 -0
- package/dist/v2/durable-core/schemas/session/events.d.ts +58 -0
- package/dist/v2/durable-core/schemas/session/events.js +9 -0
- package/dist/v2/durable-core/tokens/binary-payload.d.ts +35 -0
- package/dist/v2/durable-core/tokens/binary-payload.js +279 -0
- package/dist/v2/durable-core/tokens/index.d.ts +9 -4
- package/dist/v2/durable-core/tokens/index.js +17 -7
- package/dist/v2/durable-core/tokens/payloads.d.ts +12 -8
- package/dist/v2/durable-core/tokens/payloads.js +5 -3
- package/dist/v2/durable-core/tokens/token-codec-capabilities.d.ts +4 -0
- package/dist/v2/durable-core/tokens/token-codec-capabilities.js +2 -0
- package/dist/v2/durable-core/tokens/token-codec-ports.d.ts +42 -0
- package/dist/v2/durable-core/tokens/token-codec-ports.js +27 -0
- package/dist/v2/durable-core/tokens/token-codec.d.ts +18 -0
- package/dist/v2/durable-core/tokens/token-codec.js +108 -0
- package/dist/v2/durable-core/tokens/token-signer.d.ts +13 -1
- package/dist/v2/durable-core/tokens/token-signer.js +65 -0
- package/dist/v2/infra/local/base32/index.d.ts +6 -0
- package/dist/v2/infra/local/base32/index.js +44 -0
- package/dist/v2/infra/local/bech32m/index.d.ts +8 -0
- package/dist/v2/infra/local/bech32m/index.js +56 -0
- package/dist/v2/infra/local/data-dir/index.d.ts +1 -0
- package/dist/v2/infra/local/data-dir/index.js +5 -2
- package/dist/v2/infra/local/fs/index.js +3 -0
- package/dist/v2/infra/local/session-store/index.js +38 -4
- package/dist/v2/ports/base32.port.d.ts +16 -0
- package/dist/v2/ports/base32.port.js +2 -0
- package/dist/v2/ports/bech32m.port.d.ts +11 -0
- package/dist/v2/ports/bech32m.port.js +2 -0
- package/dist/v2/projections/run-context.d.ts +22 -0
- package/dist/v2/projections/run-context.js +33 -0
- package/package.json +20 -2
|
@@ -0,0 +1,279 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.TOKEN_KIND_CHECKPOINT = exports.TOKEN_KIND_ACK = exports.TOKEN_KIND_STATE = void 0;
|
|
4
|
+
exports.packStateTokenPayload = packStateTokenPayload;
|
|
5
|
+
exports.packAckTokenPayload = packAckTokenPayload;
|
|
6
|
+
exports.packCheckpointTokenPayload = packCheckpointTokenPayload;
|
|
7
|
+
exports.unpackTokenPayload = unpackTokenPayload;
|
|
8
|
+
const neverthrow_1 = require("neverthrow");
|
|
9
|
+
const index_js_1 = require("../ids/index.js");
|
|
10
|
+
const STATE_TOKEN_LAYOUT = {
|
|
11
|
+
VERSION: 0,
|
|
12
|
+
KIND: 1,
|
|
13
|
+
SESSION_ID: 2,
|
|
14
|
+
RUN_ID: 18,
|
|
15
|
+
NODE_ID: 34,
|
|
16
|
+
WORKFLOW_HASH_REF: 50,
|
|
17
|
+
TOTAL_SIZE: 66,
|
|
18
|
+
};
|
|
19
|
+
const ACK_TOKEN_LAYOUT = {
|
|
20
|
+
VERSION: 0,
|
|
21
|
+
KIND: 1,
|
|
22
|
+
SESSION_ID: 2,
|
|
23
|
+
RUN_ID: 18,
|
|
24
|
+
NODE_ID: 34,
|
|
25
|
+
ATTEMPT_ID: 50,
|
|
26
|
+
TOTAL_SIZE: 66,
|
|
27
|
+
};
|
|
28
|
+
const CHECKPOINT_TOKEN_LAYOUT = {
|
|
29
|
+
VERSION: 0,
|
|
30
|
+
KIND: 1,
|
|
31
|
+
SESSION_ID: 2,
|
|
32
|
+
RUN_ID: 18,
|
|
33
|
+
NODE_ID: 34,
|
|
34
|
+
ATTEMPT_ID: 50,
|
|
35
|
+
TOTAL_SIZE: 66,
|
|
36
|
+
};
|
|
37
|
+
exports.TOKEN_KIND_STATE = 0;
|
|
38
|
+
exports.TOKEN_KIND_ACK = 1;
|
|
39
|
+
exports.TOKEN_KIND_CHECKPOINT = 2;
|
|
40
|
+
const TOKEN_KIND_BYTES = {
|
|
41
|
+
state: exports.TOKEN_KIND_STATE,
|
|
42
|
+
ack: exports.TOKEN_KIND_ACK,
|
|
43
|
+
checkpoint: exports.TOKEN_KIND_CHECKPOINT,
|
|
44
|
+
};
|
|
45
|
+
function packStateTokenPayload(payload, base32) {
|
|
46
|
+
if (payload.tokenVersion !== 1) {
|
|
47
|
+
return (0, neverthrow_1.err)({ code: 'BINARY_INVALID_VERSION', version: payload.tokenVersion });
|
|
48
|
+
}
|
|
49
|
+
if (payload.tokenKind !== 'state') {
|
|
50
|
+
return (0, neverthrow_1.err)({ code: 'BINARY_INVALID_TOKEN_KIND', kind: payload.tokenKind });
|
|
51
|
+
}
|
|
52
|
+
const buffer = new Uint8Array(STATE_TOKEN_LAYOUT.TOTAL_SIZE);
|
|
53
|
+
buffer[STATE_TOKEN_LAYOUT.VERSION] = payload.tokenVersion;
|
|
54
|
+
buffer[STATE_TOKEN_LAYOUT.KIND] = TOKEN_KIND_BYTES[payload.tokenKind];
|
|
55
|
+
const sessionBytes = sessionIdToBytes(payload.sessionId, base32);
|
|
56
|
+
if (sessionBytes.isErr())
|
|
57
|
+
return sessionBytes;
|
|
58
|
+
buffer.set(sessionBytes.value, STATE_TOKEN_LAYOUT.SESSION_ID);
|
|
59
|
+
const runBytes = runIdToBytes(payload.runId, base32);
|
|
60
|
+
if (runBytes.isErr())
|
|
61
|
+
return runBytes;
|
|
62
|
+
buffer.set(runBytes.value, STATE_TOKEN_LAYOUT.RUN_ID);
|
|
63
|
+
const nodeBytes = nodeIdToBytes(payload.nodeId, base32);
|
|
64
|
+
if (nodeBytes.isErr())
|
|
65
|
+
return nodeBytes;
|
|
66
|
+
buffer.set(nodeBytes.value, STATE_TOKEN_LAYOUT.NODE_ID);
|
|
67
|
+
const wfRefBytes = workflowHashRefToBytes(payload.workflowHashRef, base32);
|
|
68
|
+
if (wfRefBytes.isErr())
|
|
69
|
+
return wfRefBytes;
|
|
70
|
+
buffer.set(wfRefBytes.value, STATE_TOKEN_LAYOUT.WORKFLOW_HASH_REF);
|
|
71
|
+
return (0, neverthrow_1.ok)(buffer);
|
|
72
|
+
}
|
|
73
|
+
function packAckTokenPayload(payload, base32) {
|
|
74
|
+
if (payload.tokenVersion !== 1) {
|
|
75
|
+
return (0, neverthrow_1.err)({ code: 'BINARY_INVALID_VERSION', version: payload.tokenVersion });
|
|
76
|
+
}
|
|
77
|
+
if (payload.tokenKind !== 'ack') {
|
|
78
|
+
return (0, neverthrow_1.err)({ code: 'BINARY_INVALID_TOKEN_KIND', kind: payload.tokenKind });
|
|
79
|
+
}
|
|
80
|
+
const buffer = new Uint8Array(ACK_TOKEN_LAYOUT.TOTAL_SIZE);
|
|
81
|
+
buffer[ACK_TOKEN_LAYOUT.VERSION] = payload.tokenVersion;
|
|
82
|
+
buffer[ACK_TOKEN_LAYOUT.KIND] = TOKEN_KIND_BYTES[payload.tokenKind];
|
|
83
|
+
const sessionBytes = sessionIdToBytes(payload.sessionId, base32);
|
|
84
|
+
if (sessionBytes.isErr())
|
|
85
|
+
return sessionBytes;
|
|
86
|
+
buffer.set(sessionBytes.value, ACK_TOKEN_LAYOUT.SESSION_ID);
|
|
87
|
+
const runBytes = runIdToBytes(payload.runId, base32);
|
|
88
|
+
if (runBytes.isErr())
|
|
89
|
+
return runBytes;
|
|
90
|
+
buffer.set(runBytes.value, ACK_TOKEN_LAYOUT.RUN_ID);
|
|
91
|
+
const nodeBytes = nodeIdToBytes(payload.nodeId, base32);
|
|
92
|
+
if (nodeBytes.isErr())
|
|
93
|
+
return nodeBytes;
|
|
94
|
+
buffer.set(nodeBytes.value, ACK_TOKEN_LAYOUT.NODE_ID);
|
|
95
|
+
const attemptBytes = attemptIdToBytes(payload.attemptId, base32);
|
|
96
|
+
if (attemptBytes.isErr())
|
|
97
|
+
return attemptBytes;
|
|
98
|
+
buffer.set(attemptBytes.value, ACK_TOKEN_LAYOUT.ATTEMPT_ID);
|
|
99
|
+
return (0, neverthrow_1.ok)(buffer);
|
|
100
|
+
}
|
|
101
|
+
function packCheckpointTokenPayload(payload, base32) {
|
|
102
|
+
if (payload.tokenVersion !== 1) {
|
|
103
|
+
return (0, neverthrow_1.err)({ code: 'BINARY_INVALID_VERSION', version: payload.tokenVersion });
|
|
104
|
+
}
|
|
105
|
+
if (payload.tokenKind !== 'checkpoint') {
|
|
106
|
+
return (0, neverthrow_1.err)({ code: 'BINARY_INVALID_TOKEN_KIND', kind: payload.tokenKind });
|
|
107
|
+
}
|
|
108
|
+
const buffer = new Uint8Array(CHECKPOINT_TOKEN_LAYOUT.TOTAL_SIZE);
|
|
109
|
+
buffer[CHECKPOINT_TOKEN_LAYOUT.VERSION] = payload.tokenVersion;
|
|
110
|
+
buffer[CHECKPOINT_TOKEN_LAYOUT.KIND] = TOKEN_KIND_BYTES[payload.tokenKind];
|
|
111
|
+
const sessionBytes = sessionIdToBytes(payload.sessionId, base32);
|
|
112
|
+
if (sessionBytes.isErr())
|
|
113
|
+
return sessionBytes;
|
|
114
|
+
buffer.set(sessionBytes.value, CHECKPOINT_TOKEN_LAYOUT.SESSION_ID);
|
|
115
|
+
const runBytes = runIdToBytes(payload.runId, base32);
|
|
116
|
+
if (runBytes.isErr())
|
|
117
|
+
return runBytes;
|
|
118
|
+
buffer.set(runBytes.value, CHECKPOINT_TOKEN_LAYOUT.RUN_ID);
|
|
119
|
+
const nodeBytes = nodeIdToBytes(payload.nodeId, base32);
|
|
120
|
+
if (nodeBytes.isErr())
|
|
121
|
+
return nodeBytes;
|
|
122
|
+
buffer.set(nodeBytes.value, CHECKPOINT_TOKEN_LAYOUT.NODE_ID);
|
|
123
|
+
const attemptBytes = attemptIdToBytes(payload.attemptId, base32);
|
|
124
|
+
if (attemptBytes.isErr())
|
|
125
|
+
return attemptBytes;
|
|
126
|
+
buffer.set(attemptBytes.value, CHECKPOINT_TOKEN_LAYOUT.ATTEMPT_ID);
|
|
127
|
+
return (0, neverthrow_1.ok)(buffer);
|
|
128
|
+
}
|
|
129
|
+
function unpackTokenPayload(bytes, base32) {
|
|
130
|
+
if (bytes.length !== 66) {
|
|
131
|
+
return (0, neverthrow_1.err)({ code: 'BINARY_INVALID_LENGTH', expected: 66, actual: bytes.length });
|
|
132
|
+
}
|
|
133
|
+
const tokenVersion = bytes[0];
|
|
134
|
+
const tokenKind = bytes[1];
|
|
135
|
+
if (tokenVersion !== 1) {
|
|
136
|
+
return (0, neverthrow_1.err)({ code: 'BINARY_UNSUPPORTED_VERSION', version: tokenVersion });
|
|
137
|
+
}
|
|
138
|
+
let offset = 2;
|
|
139
|
+
const sessionIdRes = bytesToSessionId(bytes.slice(offset, offset + 16), base32);
|
|
140
|
+
if (sessionIdRes.isErr())
|
|
141
|
+
return (0, neverthrow_1.err)(sessionIdRes.error);
|
|
142
|
+
const sessionId = sessionIdRes.value;
|
|
143
|
+
offset += 16;
|
|
144
|
+
const runIdRes = bytesToRunId(bytes.slice(offset, offset + 16), base32);
|
|
145
|
+
if (runIdRes.isErr())
|
|
146
|
+
return (0, neverthrow_1.err)(runIdRes.error);
|
|
147
|
+
const runId = runIdRes.value;
|
|
148
|
+
offset += 16;
|
|
149
|
+
const nodeIdRes = bytesToNodeId(bytes.slice(offset, offset + 16), base32);
|
|
150
|
+
if (nodeIdRes.isErr())
|
|
151
|
+
return (0, neverthrow_1.err)(nodeIdRes.error);
|
|
152
|
+
const nodeId = nodeIdRes.value;
|
|
153
|
+
offset += 16;
|
|
154
|
+
const field4 = bytes.slice(offset, offset + 16);
|
|
155
|
+
switch (tokenKind) {
|
|
156
|
+
case exports.TOKEN_KIND_STATE: {
|
|
157
|
+
const wfRefRes = bytesToWorkflowHashRef(field4, base32);
|
|
158
|
+
if (wfRefRes.isErr())
|
|
159
|
+
return (0, neverthrow_1.err)(wfRefRes.error);
|
|
160
|
+
return (0, neverthrow_1.ok)({
|
|
161
|
+
tokenVersion: 1,
|
|
162
|
+
tokenKind: 'state',
|
|
163
|
+
sessionId,
|
|
164
|
+
runId,
|
|
165
|
+
nodeId,
|
|
166
|
+
workflowHashRef: wfRefRes.value,
|
|
167
|
+
});
|
|
168
|
+
}
|
|
169
|
+
case exports.TOKEN_KIND_ACK: {
|
|
170
|
+
const attemptIdRes = bytesToAttemptId(field4, base32);
|
|
171
|
+
if (attemptIdRes.isErr())
|
|
172
|
+
return (0, neverthrow_1.err)(attemptIdRes.error);
|
|
173
|
+
return (0, neverthrow_1.ok)({
|
|
174
|
+
tokenVersion: 1,
|
|
175
|
+
tokenKind: 'ack',
|
|
176
|
+
sessionId,
|
|
177
|
+
runId,
|
|
178
|
+
nodeId,
|
|
179
|
+
attemptId: attemptIdRes.value,
|
|
180
|
+
});
|
|
181
|
+
}
|
|
182
|
+
case exports.TOKEN_KIND_CHECKPOINT: {
|
|
183
|
+
const attemptIdRes = bytesToAttemptId(field4, base32);
|
|
184
|
+
if (attemptIdRes.isErr())
|
|
185
|
+
return (0, neverthrow_1.err)(attemptIdRes.error);
|
|
186
|
+
return (0, neverthrow_1.ok)({
|
|
187
|
+
tokenVersion: 1,
|
|
188
|
+
tokenKind: 'checkpoint',
|
|
189
|
+
sessionId,
|
|
190
|
+
runId,
|
|
191
|
+
nodeId,
|
|
192
|
+
attemptId: attemptIdRes.value,
|
|
193
|
+
});
|
|
194
|
+
}
|
|
195
|
+
default:
|
|
196
|
+
return (0, neverthrow_1.err)({ code: 'BINARY_UNKNOWN_TOKEN_KIND', kind: tokenKind });
|
|
197
|
+
}
|
|
198
|
+
}
|
|
199
|
+
function idStringToBytesOrFail(id, expectedPrefix, base32) {
|
|
200
|
+
const parts = id.split('_');
|
|
201
|
+
if (parts.length !== 2) {
|
|
202
|
+
return (0, neverthrow_1.err)({
|
|
203
|
+
code: 'BINARY_INVALID_ID_FORMAT',
|
|
204
|
+
id,
|
|
205
|
+
reason: 'Expected format: <prefix>_<base32>',
|
|
206
|
+
});
|
|
207
|
+
}
|
|
208
|
+
const [prefix, suffix] = parts;
|
|
209
|
+
if (prefix !== expectedPrefix) {
|
|
210
|
+
return (0, neverthrow_1.err)({
|
|
211
|
+
code: 'BINARY_INVALID_ID_FORMAT',
|
|
212
|
+
id,
|
|
213
|
+
reason: `Expected prefix '${expectedPrefix}_', got '${prefix}_'`,
|
|
214
|
+
});
|
|
215
|
+
}
|
|
216
|
+
if (!/^[a-z2-7]{26}$/.test(suffix)) {
|
|
217
|
+
return (0, neverthrow_1.err)({
|
|
218
|
+
code: 'BINARY_INVALID_ID_FORMAT',
|
|
219
|
+
id,
|
|
220
|
+
reason: 'ID suffix must be 26 base32 chars [a-z2-7]',
|
|
221
|
+
});
|
|
222
|
+
}
|
|
223
|
+
const decoded = base32.decode(suffix);
|
|
224
|
+
if (decoded.isErr()) {
|
|
225
|
+
return (0, neverthrow_1.err)({
|
|
226
|
+
code: 'BINARY_INVALID_ID_FORMAT',
|
|
227
|
+
id,
|
|
228
|
+
reason: `Base32 decode failed: ${decoded.error.code}`,
|
|
229
|
+
});
|
|
230
|
+
}
|
|
231
|
+
if (decoded.value.length !== 16) {
|
|
232
|
+
return (0, neverthrow_1.err)({
|
|
233
|
+
code: 'BINARY_INVALID_ID_FORMAT',
|
|
234
|
+
id,
|
|
235
|
+
reason: `Decoded to ${decoded.value.length} bytes, expected 16`,
|
|
236
|
+
});
|
|
237
|
+
}
|
|
238
|
+
return (0, neverthrow_1.ok)(decoded.value);
|
|
239
|
+
}
|
|
240
|
+
function idBytesToString(bytes, prefix, base32) {
|
|
241
|
+
if (bytes.length !== 16) {
|
|
242
|
+
return (0, neverthrow_1.err)({
|
|
243
|
+
code: 'BINARY_INVALID_ID_BYTES',
|
|
244
|
+
field: prefix,
|
|
245
|
+
});
|
|
246
|
+
}
|
|
247
|
+
const suffix = base32.encode(bytes);
|
|
248
|
+
return (0, neverthrow_1.ok)(`${prefix}_${suffix}`);
|
|
249
|
+
}
|
|
250
|
+
function sessionIdToBytes(id, base32) {
|
|
251
|
+
return idStringToBytesOrFail(String(id), 'sess', base32);
|
|
252
|
+
}
|
|
253
|
+
function bytesToSessionId(bytes, base32) {
|
|
254
|
+
return idBytesToString(bytes, 'sess', base32).map(id => (0, index_js_1.asSessionId)(id));
|
|
255
|
+
}
|
|
256
|
+
function runIdToBytes(id, base32) {
|
|
257
|
+
return idStringToBytesOrFail(String(id), 'run', base32);
|
|
258
|
+
}
|
|
259
|
+
function bytesToRunId(bytes, base32) {
|
|
260
|
+
return idBytesToString(bytes, 'run', base32).map(id => (0, index_js_1.asRunId)(id));
|
|
261
|
+
}
|
|
262
|
+
function nodeIdToBytes(id, base32) {
|
|
263
|
+
return idStringToBytesOrFail(String(id), 'node', base32);
|
|
264
|
+
}
|
|
265
|
+
function bytesToNodeId(bytes, base32) {
|
|
266
|
+
return idBytesToString(bytes, 'node', base32).map(id => (0, index_js_1.asNodeId)(id));
|
|
267
|
+
}
|
|
268
|
+
function attemptIdToBytes(id, base32) {
|
|
269
|
+
return idStringToBytesOrFail(String(id), 'attempt', base32);
|
|
270
|
+
}
|
|
271
|
+
function bytesToAttemptId(bytes, base32) {
|
|
272
|
+
return idBytesToString(bytes, 'attempt', base32).map(id => (0, index_js_1.asAttemptId)(id));
|
|
273
|
+
}
|
|
274
|
+
function workflowHashRefToBytes(hashRef, base32) {
|
|
275
|
+
return idStringToBytesOrFail(String(hashRef), 'wf', base32);
|
|
276
|
+
}
|
|
277
|
+
function bytesToWorkflowHashRef(bytes, base32) {
|
|
278
|
+
return idBytesToString(bytes, 'wf', base32).map(id => (0, index_js_1.asWorkflowHashRef)(id));
|
|
279
|
+
}
|
|
@@ -1,8 +1,13 @@
|
|
|
1
1
|
export { TokenPayloadV1Schema, StateTokenPayloadV1Schema, AckTokenPayloadV1Schema, CheckpointTokenPayloadV1Schema, expectedPrefixForTokenKind, } from './payloads.js';
|
|
2
2
|
export type { TokenPayloadV1, StateTokenPayloadV1, AckTokenPayloadV1, CheckpointTokenPayloadV1 } from './payloads.js';
|
|
3
|
-
export {
|
|
4
|
-
export type { TokenDecodeErrorV2,
|
|
5
|
-
export {
|
|
6
|
-
export type {
|
|
3
|
+
export { encodeTokenPayloadV1Binary, parseTokenV1Binary } from './token-codec.js';
|
|
4
|
+
export type { TokenDecodeErrorV2, ParsedTokenV1Binary } from './token-codec.js';
|
|
5
|
+
export { packStateTokenPayload, packAckTokenPayload, packCheckpointTokenPayload, unpackTokenPayload, TOKEN_KIND_STATE, TOKEN_KIND_ACK, TOKEN_KIND_CHECKPOINT, } from './binary-payload.js';
|
|
6
|
+
export type { BinaryPackError, BinaryUnpackError } from './binary-payload.js';
|
|
7
|
+
export { signTokenV1Binary, verifyTokenSignatureV1Binary, assertTokenScopeMatchesStateBinary, } from './token-signer.js';
|
|
8
|
+
export type { TokenSignErrorV2, TokenVerifyErrorV2 } from './token-signer.js';
|
|
9
|
+
export { createTokenCodecPorts, unsafeTokenCodecPorts } from './token-codec-ports.js';
|
|
10
|
+
export type { TokenCodecPorts, TokenCodecPortsError } from './token-codec-ports.js';
|
|
11
|
+
export type { TokenParsePorts, TokenVerifyPorts, TokenSignPorts } from './token-codec-capabilities.js';
|
|
7
12
|
export type { AttemptId, OutputId } from '../ids/index.js';
|
|
8
13
|
export { asAttemptId, asOutputId } from '../ids/index.js';
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.asOutputId = exports.asAttemptId = exports.
|
|
3
|
+
exports.asOutputId = exports.asAttemptId = exports.unsafeTokenCodecPorts = exports.createTokenCodecPorts = exports.assertTokenScopeMatchesStateBinary = exports.verifyTokenSignatureV1Binary = exports.signTokenV1Binary = exports.TOKEN_KIND_CHECKPOINT = exports.TOKEN_KIND_ACK = exports.TOKEN_KIND_STATE = exports.unpackTokenPayload = exports.packCheckpointTokenPayload = exports.packAckTokenPayload = exports.packStateTokenPayload = exports.parseTokenV1Binary = exports.encodeTokenPayloadV1Binary = exports.expectedPrefixForTokenKind = exports.CheckpointTokenPayloadV1Schema = exports.AckTokenPayloadV1Schema = exports.StateTokenPayloadV1Schema = exports.TokenPayloadV1Schema = void 0;
|
|
4
4
|
var payloads_js_1 = require("./payloads.js");
|
|
5
5
|
Object.defineProperty(exports, "TokenPayloadV1Schema", { enumerable: true, get: function () { return payloads_js_1.TokenPayloadV1Schema; } });
|
|
6
6
|
Object.defineProperty(exports, "StateTokenPayloadV1Schema", { enumerable: true, get: function () { return payloads_js_1.StateTokenPayloadV1Schema; } });
|
|
@@ -8,13 +8,23 @@ Object.defineProperty(exports, "AckTokenPayloadV1Schema", { enumerable: true, ge
|
|
|
8
8
|
Object.defineProperty(exports, "CheckpointTokenPayloadV1Schema", { enumerable: true, get: function () { return payloads_js_1.CheckpointTokenPayloadV1Schema; } });
|
|
9
9
|
Object.defineProperty(exports, "expectedPrefixForTokenKind", { enumerable: true, get: function () { return payloads_js_1.expectedPrefixForTokenKind; } });
|
|
10
10
|
var token_codec_js_1 = require("./token-codec.js");
|
|
11
|
-
Object.defineProperty(exports, "
|
|
12
|
-
Object.defineProperty(exports, "
|
|
13
|
-
|
|
11
|
+
Object.defineProperty(exports, "encodeTokenPayloadV1Binary", { enumerable: true, get: function () { return token_codec_js_1.encodeTokenPayloadV1Binary; } });
|
|
12
|
+
Object.defineProperty(exports, "parseTokenV1Binary", { enumerable: true, get: function () { return token_codec_js_1.parseTokenV1Binary; } });
|
|
13
|
+
var binary_payload_js_1 = require("./binary-payload.js");
|
|
14
|
+
Object.defineProperty(exports, "packStateTokenPayload", { enumerable: true, get: function () { return binary_payload_js_1.packStateTokenPayload; } });
|
|
15
|
+
Object.defineProperty(exports, "packAckTokenPayload", { enumerable: true, get: function () { return binary_payload_js_1.packAckTokenPayload; } });
|
|
16
|
+
Object.defineProperty(exports, "packCheckpointTokenPayload", { enumerable: true, get: function () { return binary_payload_js_1.packCheckpointTokenPayload; } });
|
|
17
|
+
Object.defineProperty(exports, "unpackTokenPayload", { enumerable: true, get: function () { return binary_payload_js_1.unpackTokenPayload; } });
|
|
18
|
+
Object.defineProperty(exports, "TOKEN_KIND_STATE", { enumerable: true, get: function () { return binary_payload_js_1.TOKEN_KIND_STATE; } });
|
|
19
|
+
Object.defineProperty(exports, "TOKEN_KIND_ACK", { enumerable: true, get: function () { return binary_payload_js_1.TOKEN_KIND_ACK; } });
|
|
20
|
+
Object.defineProperty(exports, "TOKEN_KIND_CHECKPOINT", { enumerable: true, get: function () { return binary_payload_js_1.TOKEN_KIND_CHECKPOINT; } });
|
|
14
21
|
var token_signer_js_1 = require("./token-signer.js");
|
|
15
|
-
Object.defineProperty(exports, "
|
|
16
|
-
Object.defineProperty(exports, "
|
|
17
|
-
Object.defineProperty(exports, "
|
|
22
|
+
Object.defineProperty(exports, "signTokenV1Binary", { enumerable: true, get: function () { return token_signer_js_1.signTokenV1Binary; } });
|
|
23
|
+
Object.defineProperty(exports, "verifyTokenSignatureV1Binary", { enumerable: true, get: function () { return token_signer_js_1.verifyTokenSignatureV1Binary; } });
|
|
24
|
+
Object.defineProperty(exports, "assertTokenScopeMatchesStateBinary", { enumerable: true, get: function () { return token_signer_js_1.assertTokenScopeMatchesStateBinary; } });
|
|
25
|
+
var token_codec_ports_js_1 = require("./token-codec-ports.js");
|
|
26
|
+
Object.defineProperty(exports, "createTokenCodecPorts", { enumerable: true, get: function () { return token_codec_ports_js_1.createTokenCodecPorts; } });
|
|
27
|
+
Object.defineProperty(exports, "unsafeTokenCodecPorts", { enumerable: true, get: function () { return token_codec_ports_js_1.unsafeTokenCodecPorts; } });
|
|
18
28
|
var index_js_1 = require("../ids/index.js");
|
|
19
29
|
Object.defineProperty(exports, "asAttemptId", { enumerable: true, get: function () { return index_js_1.asAttemptId; } });
|
|
20
30
|
Object.defineProperty(exports, "asOutputId", { enumerable: true, get: function () { return index_js_1.asOutputId; } });
|
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
import { z } from 'zod';
|
|
2
|
-
import type { AttemptId, NodeId, RunId, SessionId, TokenStringV1,
|
|
2
|
+
import type { AttemptId, NodeId, RunId, SessionId, TokenStringV1, WorkflowHashRef } from '../ids/index.js';
|
|
3
3
|
export type TokenVersionV1 = 1;
|
|
4
4
|
export type TokenKindV1 = 'state' | 'ack' | 'checkpoint';
|
|
5
5
|
export declare const AttemptIdSchema: z.ZodEffects<z.ZodString, AttemptId, string>;
|
|
@@ -12,7 +12,7 @@ export declare const StateTokenPayloadV1Schema: z.ZodObject<{
|
|
|
12
12
|
sessionId: z.ZodEffects<z.ZodString, SessionId, string>;
|
|
13
13
|
runId: z.ZodEffects<z.ZodString, RunId, string>;
|
|
14
14
|
nodeId: z.ZodEffects<z.ZodString, NodeId, string>;
|
|
15
|
-
|
|
15
|
+
workflowHashRef: z.ZodEffects<z.ZodString, WorkflowHashRef, string>;
|
|
16
16
|
}, "strip", z.ZodTypeAny, {
|
|
17
17
|
sessionId: string & {
|
|
18
18
|
readonly __brand: "v2.SessionId";
|
|
@@ -23,16 +23,18 @@ export declare const StateTokenPayloadV1Schema: z.ZodObject<{
|
|
|
23
23
|
nodeId: string & {
|
|
24
24
|
readonly __brand: "v2.NodeId";
|
|
25
25
|
};
|
|
26
|
-
workflowHash: never;
|
|
27
26
|
tokenVersion: 1;
|
|
28
27
|
tokenKind: "state";
|
|
28
|
+
workflowHashRef: string & {
|
|
29
|
+
readonly __brand: "v2.WorkflowHashRef";
|
|
30
|
+
};
|
|
29
31
|
}, {
|
|
30
32
|
sessionId: string;
|
|
31
33
|
runId: string;
|
|
32
34
|
nodeId: string;
|
|
33
|
-
workflowHash: string;
|
|
34
35
|
tokenVersion: 1;
|
|
35
36
|
tokenKind: "state";
|
|
37
|
+
workflowHashRef: string;
|
|
36
38
|
}>;
|
|
37
39
|
export type StateTokenPayloadV1 = z.infer<typeof StateTokenPayloadV1Schema> & {
|
|
38
40
|
readonly tokenVersion: TokenVersionV1;
|
|
@@ -40,7 +42,7 @@ export type StateTokenPayloadV1 = z.infer<typeof StateTokenPayloadV1Schema> & {
|
|
|
40
42
|
readonly sessionId: SessionId;
|
|
41
43
|
readonly runId: RunId;
|
|
42
44
|
readonly nodeId: NodeId;
|
|
43
|
-
readonly
|
|
45
|
+
readonly workflowHashRef: WorkflowHashRef;
|
|
44
46
|
};
|
|
45
47
|
export declare const AckTokenPayloadV1Schema: z.ZodObject<{
|
|
46
48
|
tokenVersion: z.ZodLiteral<1>;
|
|
@@ -124,7 +126,7 @@ export declare const TokenPayloadV1Schema: z.ZodDiscriminatedUnion<"tokenKind",
|
|
|
124
126
|
sessionId: z.ZodEffects<z.ZodString, SessionId, string>;
|
|
125
127
|
runId: z.ZodEffects<z.ZodString, RunId, string>;
|
|
126
128
|
nodeId: z.ZodEffects<z.ZodString, NodeId, string>;
|
|
127
|
-
|
|
129
|
+
workflowHashRef: z.ZodEffects<z.ZodString, WorkflowHashRef, string>;
|
|
128
130
|
}, "strip", z.ZodTypeAny, {
|
|
129
131
|
sessionId: string & {
|
|
130
132
|
readonly __brand: "v2.SessionId";
|
|
@@ -135,16 +137,18 @@ export declare const TokenPayloadV1Schema: z.ZodDiscriminatedUnion<"tokenKind",
|
|
|
135
137
|
nodeId: string & {
|
|
136
138
|
readonly __brand: "v2.NodeId";
|
|
137
139
|
};
|
|
138
|
-
workflowHash: never;
|
|
139
140
|
tokenVersion: 1;
|
|
140
141
|
tokenKind: "state";
|
|
142
|
+
workflowHashRef: string & {
|
|
143
|
+
readonly __brand: "v2.WorkflowHashRef";
|
|
144
|
+
};
|
|
141
145
|
}, {
|
|
142
146
|
sessionId: string;
|
|
143
147
|
runId: string;
|
|
144
148
|
nodeId: string;
|
|
145
|
-
workflowHash: string;
|
|
146
149
|
tokenVersion: 1;
|
|
147
150
|
tokenKind: "state";
|
|
151
|
+
workflowHashRef: string;
|
|
148
152
|
}>, z.ZodObject<{
|
|
149
153
|
tokenVersion: z.ZodLiteral<1>;
|
|
150
154
|
tokenKind: z.ZodLiteral<"ack">;
|
|
@@ -5,8 +5,10 @@ exports.expectedPrefixForTokenKind = expectedPrefixForTokenKind;
|
|
|
5
5
|
exports.asTokenString = asTokenString;
|
|
6
6
|
const zod_1 = require("zod");
|
|
7
7
|
const index_js_1 = require("../ids/index.js");
|
|
8
|
-
const
|
|
9
|
-
|
|
8
|
+
const workflowHashRefSchema = zod_1.z
|
|
9
|
+
.string()
|
|
10
|
+
.regex(/^wf_[a-z2-7]{26}$/, 'Expected wf_<26 base32 chars [a-z2-7]>')
|
|
11
|
+
.transform((v) => (0, index_js_1.asWorkflowHashRef)(v));
|
|
10
12
|
const nonEmpty = zod_1.z.string().min(1);
|
|
11
13
|
const delimiterSafeId = nonEmpty.regex(/^[^:\s]+$/, 'Expected a delimiter-safe ID (no ":" or whitespace)');
|
|
12
14
|
exports.AttemptIdSchema = delimiterSafeId.transform(index_js_1.asAttemptId);
|
|
@@ -19,7 +21,7 @@ exports.StateTokenPayloadV1Schema = zod_1.z.object({
|
|
|
19
21
|
sessionId: exports.SessionIdSchema,
|
|
20
22
|
runId: exports.RunIdSchema,
|
|
21
23
|
nodeId: exports.NodeIdSchema,
|
|
22
|
-
|
|
24
|
+
workflowHashRef: workflowHashRefSchema,
|
|
23
25
|
});
|
|
24
26
|
exports.AckTokenPayloadV1Schema = zod_1.z.object({
|
|
25
27
|
tokenVersion: zod_1.z.literal(1),
|
|
@@ -0,0 +1,4 @@
|
|
|
1
|
+
import type { TokenCodecPorts } from './token-codec-ports.js';
|
|
2
|
+
export type TokenParsePorts = Pick<TokenCodecPorts, 'bech32m' | 'base32'>;
|
|
3
|
+
export type TokenVerifyPorts = Pick<TokenCodecPorts, 'keyring' | 'hmac' | 'base64url'>;
|
|
4
|
+
export type TokenSignPorts = TokenCodecPorts;
|
|
@@ -0,0 +1,42 @@
|
|
|
1
|
+
import type { Result } from 'neverthrow';
|
|
2
|
+
import type { KeyringV1 } from '../../ports/keyring.port.js';
|
|
3
|
+
import type { HmacSha256PortV2 } from '../../ports/hmac-sha256.port.js';
|
|
4
|
+
import type { Base64UrlPortV2 } from '../../ports/base64url.port.js';
|
|
5
|
+
import type { Base32PortV2 } from '../../ports/base32.port.js';
|
|
6
|
+
import type { Bech32mPortV2 } from '../../ports/bech32m.port.js';
|
|
7
|
+
declare const tokenCodecPortsBrand: unique symbol;
|
|
8
|
+
export type TokenCodecPorts = Readonly<{
|
|
9
|
+
readonly keyring: KeyringV1;
|
|
10
|
+
readonly hmac: HmacSha256PortV2;
|
|
11
|
+
readonly base64url: Base64UrlPortV2;
|
|
12
|
+
readonly base32: Base32PortV2;
|
|
13
|
+
readonly bech32m: Bech32mPortV2;
|
|
14
|
+
}> & {
|
|
15
|
+
readonly [tokenCodecPortsBrand]: 'TokenCodecPorts';
|
|
16
|
+
};
|
|
17
|
+
export type TokenCodecPortsError = {
|
|
18
|
+
readonly code: 'TOKEN_CODEC_PORTS_MISSING_KEYRING';
|
|
19
|
+
} | {
|
|
20
|
+
readonly code: 'TOKEN_CODEC_PORTS_MISSING_HMAC';
|
|
21
|
+
} | {
|
|
22
|
+
readonly code: 'TOKEN_CODEC_PORTS_MISSING_BASE64URL';
|
|
23
|
+
} | {
|
|
24
|
+
readonly code: 'TOKEN_CODEC_PORTS_MISSING_BASE32';
|
|
25
|
+
} | {
|
|
26
|
+
readonly code: 'TOKEN_CODEC_PORTS_MISSING_BECH32M';
|
|
27
|
+
};
|
|
28
|
+
export declare function createTokenCodecPorts(deps: {
|
|
29
|
+
readonly keyring?: KeyringV1 | null;
|
|
30
|
+
readonly hmac?: HmacSha256PortV2 | null;
|
|
31
|
+
readonly base64url?: Base64UrlPortV2 | null;
|
|
32
|
+
readonly base32?: Base32PortV2 | null;
|
|
33
|
+
readonly bech32m?: Bech32mPortV2 | null;
|
|
34
|
+
}): Result<TokenCodecPorts, TokenCodecPortsError>;
|
|
35
|
+
export declare function unsafeTokenCodecPorts(deps: {
|
|
36
|
+
readonly keyring: KeyringV1;
|
|
37
|
+
readonly hmac: HmacSha256PortV2;
|
|
38
|
+
readonly base64url: Base64UrlPortV2;
|
|
39
|
+
readonly base32: Base32PortV2;
|
|
40
|
+
readonly bech32m: Bech32mPortV2;
|
|
41
|
+
}): TokenCodecPorts;
|
|
42
|
+
export {};
|
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.createTokenCodecPorts = createTokenCodecPorts;
|
|
4
|
+
exports.unsafeTokenCodecPorts = unsafeTokenCodecPorts;
|
|
5
|
+
const neverthrow_1 = require("neverthrow");
|
|
6
|
+
function createTokenCodecPorts(deps) {
|
|
7
|
+
if (!deps.keyring)
|
|
8
|
+
return (0, neverthrow_1.err)({ code: 'TOKEN_CODEC_PORTS_MISSING_KEYRING' });
|
|
9
|
+
if (!deps.hmac)
|
|
10
|
+
return (0, neverthrow_1.err)({ code: 'TOKEN_CODEC_PORTS_MISSING_HMAC' });
|
|
11
|
+
if (!deps.base64url)
|
|
12
|
+
return (0, neverthrow_1.err)({ code: 'TOKEN_CODEC_PORTS_MISSING_BASE64URL' });
|
|
13
|
+
if (!deps.base32)
|
|
14
|
+
return (0, neverthrow_1.err)({ code: 'TOKEN_CODEC_PORTS_MISSING_BASE32' });
|
|
15
|
+
if (!deps.bech32m)
|
|
16
|
+
return (0, neverthrow_1.err)({ code: 'TOKEN_CODEC_PORTS_MISSING_BECH32M' });
|
|
17
|
+
return (0, neverthrow_1.ok)(Object.freeze({
|
|
18
|
+
keyring: deps.keyring,
|
|
19
|
+
hmac: deps.hmac,
|
|
20
|
+
base64url: deps.base64url,
|
|
21
|
+
base32: deps.base32,
|
|
22
|
+
bech32m: deps.bech32m,
|
|
23
|
+
}));
|
|
24
|
+
}
|
|
25
|
+
function unsafeTokenCodecPorts(deps) {
|
|
26
|
+
return Object.freeze({ ...deps });
|
|
27
|
+
}
|
|
@@ -2,10 +2,16 @@ import type { Result } from 'neverthrow';
|
|
|
2
2
|
import type { CanonicalBytes } from '../ids/index.js';
|
|
3
3
|
import type { TokenStringV1 } from '../ids/index.js';
|
|
4
4
|
import type { Base64UrlPortV2 } from '../../ports/base64url.port.js';
|
|
5
|
+
import type { Bech32mDecodeError, TokenHrp } from '../../ports/bech32m.port.js';
|
|
6
|
+
import type { Base32PortV2 } from '../../ports/base32.port.js';
|
|
5
7
|
import { type TokenPayloadV1, type TokenPrefixV1 } from './payloads.js';
|
|
8
|
+
import type { TokenParsePorts } from './token-codec-capabilities.js';
|
|
6
9
|
export type TokenDecodeErrorV2 = {
|
|
7
10
|
readonly code: 'TOKEN_INVALID_FORMAT';
|
|
8
11
|
readonly message: string;
|
|
12
|
+
readonly details?: {
|
|
13
|
+
bech32mError?: Bech32mDecodeError;
|
|
14
|
+
};
|
|
9
15
|
} | {
|
|
10
16
|
readonly code: 'TOKEN_UNSUPPORTED_VERSION';
|
|
11
17
|
readonly message: string;
|
|
@@ -30,3 +36,15 @@ export declare function encodeUnsignedTokenV1(payload: TokenPayloadV1, base64url
|
|
|
30
36
|
readonly payloadBytes: CanonicalBytes;
|
|
31
37
|
}, TokenDecodeErrorV2>;
|
|
32
38
|
export declare function parseTokenV1(token: string, base64url: Base64UrlPortV2): Result<ParsedTokenV1, TokenDecodeErrorV2>;
|
|
39
|
+
export interface ParsedTokenV1Binary {
|
|
40
|
+
readonly hrp: TokenHrp;
|
|
41
|
+
readonly version: '1';
|
|
42
|
+
readonly payloadBytes: Uint8Array;
|
|
43
|
+
readonly signatureBytes: Uint8Array;
|
|
44
|
+
readonly payload: TokenPayloadV1;
|
|
45
|
+
}
|
|
46
|
+
export declare function encodeTokenPayloadV1Binary(payload: TokenPayloadV1, base32: Base32PortV2): Result<{
|
|
47
|
+
payloadBytes: Uint8Array;
|
|
48
|
+
hrp: TokenHrp;
|
|
49
|
+
}, TokenDecodeErrorV2>;
|
|
50
|
+
export declare function parseTokenV1Binary(tokenString: string, ports: TokenParsePorts): Result<ParsedTokenV1Binary, TokenDecodeErrorV2>;
|