@unlaxer/tramli 1.4.0 → 1.5.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/data-flow-graph.d.ts +4 -0
- package/dist/data-flow-graph.js +73 -0
- package/dist/index.d.ts +2 -0
- package/dist/index.js +1 -0
- package/dist/skeleton-generator.d.ts +10 -0
- package/dist/skeleton-generator.js +44 -0
- package/package.json +1 -1
|
@@ -68,6 +68,10 @@ export declare class DataFlowGraph<S extends string> {
|
|
|
68
68
|
toJson(): string;
|
|
69
69
|
/** Generate Mermaid data-flow diagram. */
|
|
70
70
|
toMermaid(): string;
|
|
71
|
+
/** Recommended migration order: processors sorted by dependency (fewest first). */
|
|
72
|
+
migrationOrder(): string[];
|
|
73
|
+
/** Generate Markdown migration checklist. */
|
|
74
|
+
toMarkdown(): string;
|
|
71
75
|
/** Test scaffold: for each processor, list required type names. */
|
|
72
76
|
testScaffold(): Map<string, string[]>;
|
|
73
77
|
/** Generate data-flow invariant assertions as strings. */
|
package/dist/data-flow-graph.js
CHANGED
|
@@ -223,6 +223,79 @@ export class DataFlowGraph {
|
|
|
223
223
|
}
|
|
224
224
|
return lines.join('\n') + '\n';
|
|
225
225
|
}
|
|
226
|
+
/** Recommended migration order: processors sorted by dependency (fewest first). */
|
|
227
|
+
migrationOrder() {
|
|
228
|
+
const nodeReqs = new Map();
|
|
229
|
+
const nodeProds = new Map();
|
|
230
|
+
for (const [t, ns] of this._consumers)
|
|
231
|
+
for (const n of ns) {
|
|
232
|
+
if (!nodeReqs.has(n.name))
|
|
233
|
+
nodeReqs.set(n.name, new Set());
|
|
234
|
+
nodeReqs.get(n.name).add(t);
|
|
235
|
+
}
|
|
236
|
+
for (const [t, ns] of this._producers)
|
|
237
|
+
for (const n of ns) {
|
|
238
|
+
if (!nodeProds.has(n.name))
|
|
239
|
+
nodeProds.set(n.name, new Set());
|
|
240
|
+
nodeProds.get(n.name).add(t);
|
|
241
|
+
}
|
|
242
|
+
const order = [];
|
|
243
|
+
const available = new Set();
|
|
244
|
+
for (const [t, ns] of this._producers) {
|
|
245
|
+
if (ns.some(n => n.name === 'initial'))
|
|
246
|
+
available.add(t);
|
|
247
|
+
}
|
|
248
|
+
const remaining = new Set([...nodeReqs.keys(), ...nodeProds.keys()]);
|
|
249
|
+
remaining.delete('initial');
|
|
250
|
+
while (remaining.size > 0) {
|
|
251
|
+
let next = null;
|
|
252
|
+
for (const name of remaining) {
|
|
253
|
+
const reqs = nodeReqs.get(name) ?? new Set();
|
|
254
|
+
if ([...reqs].every(r => available.has(r))) {
|
|
255
|
+
next = name;
|
|
256
|
+
break;
|
|
257
|
+
}
|
|
258
|
+
}
|
|
259
|
+
if (!next) {
|
|
260
|
+
order.push(...remaining);
|
|
261
|
+
break;
|
|
262
|
+
}
|
|
263
|
+
order.push(next);
|
|
264
|
+
remaining.delete(next);
|
|
265
|
+
for (const p of nodeProds.get(next) ?? [])
|
|
266
|
+
available.add(p);
|
|
267
|
+
}
|
|
268
|
+
return order;
|
|
269
|
+
}
|
|
270
|
+
/** Generate Markdown migration checklist. */
|
|
271
|
+
toMarkdown() {
|
|
272
|
+
const lines = ['# Migration Checklist\n'];
|
|
273
|
+
const order = this.migrationOrder();
|
|
274
|
+
for (let i = 0; i < order.length; i++) {
|
|
275
|
+
const name = order[i];
|
|
276
|
+
const reqs = [];
|
|
277
|
+
for (const [t, ns] of this._consumers)
|
|
278
|
+
if (ns.some(n => n.name === name))
|
|
279
|
+
reqs.push(t);
|
|
280
|
+
const prods = [];
|
|
281
|
+
for (const [t, ns] of this._producers)
|
|
282
|
+
if (ns.some(n => n.name === name))
|
|
283
|
+
prods.push(t);
|
|
284
|
+
let line = `- [ ] **${i + 1}. ${name}**`;
|
|
285
|
+
if (reqs.length)
|
|
286
|
+
line += ` requires: [${reqs.join(', ')}]`;
|
|
287
|
+
if (prods.length)
|
|
288
|
+
line += ` produces: [${prods.join(', ')}]`;
|
|
289
|
+
lines.push(line);
|
|
290
|
+
}
|
|
291
|
+
const dead = this.deadData();
|
|
292
|
+
if (dead.size > 0) {
|
|
293
|
+
lines.push('\n## Dead Data\n');
|
|
294
|
+
for (const d of dead)
|
|
295
|
+
lines.push(`- ${d}`);
|
|
296
|
+
}
|
|
297
|
+
return lines.join('\n') + '\n';
|
|
298
|
+
}
|
|
226
299
|
/** Test scaffold: for each processor, list required type names. */
|
|
227
300
|
testScaffold() {
|
|
228
301
|
const scaffold = new Map();
|
package/dist/index.d.ts
CHANGED
|
@@ -7,6 +7,8 @@ export { FlowError } from './flow-error.js';
|
|
|
7
7
|
export { InMemoryFlowStore } from './in-memory-flow-store.js';
|
|
8
8
|
export type { TransitionRecord } from './in-memory-flow-store.js';
|
|
9
9
|
export { MermaidGenerator } from './mermaid-generator.js';
|
|
10
|
+
export { SkeletonGenerator } from './skeleton-generator.js';
|
|
11
|
+
export type { TargetLanguage } from './skeleton-generator.js';
|
|
10
12
|
export { DataFlowGraph } from './data-flow-graph.js';
|
|
11
13
|
export type { NodeInfo } from './data-flow-graph.js';
|
|
12
14
|
export { flowKey } from './flow-key.js';
|
package/dist/index.js
CHANGED
|
@@ -6,5 +6,6 @@ export { FlowDefinition, Builder, FromBuilder, BranchBuilder, SubFlowBuilder } f
|
|
|
6
6
|
export { FlowError } from './flow-error.js';
|
|
7
7
|
export { InMemoryFlowStore } from './in-memory-flow-store.js';
|
|
8
8
|
export { MermaidGenerator } from './mermaid-generator.js';
|
|
9
|
+
export { SkeletonGenerator } from './skeleton-generator.js';
|
|
9
10
|
export { DataFlowGraph } from './data-flow-graph.js';
|
|
10
11
|
export { flowKey } from './flow-key.js';
|
|
@@ -0,0 +1,10 @@
|
|
|
1
|
+
import type { FlowDefinition } from './flow-definition.js';
|
|
2
|
+
export type TargetLanguage = 'java' | 'typescript' | 'rust';
|
|
3
|
+
/**
|
|
4
|
+
* Generates Processor skeleton code from a FlowDefinition's requires/produces contracts.
|
|
5
|
+
*/
|
|
6
|
+
export declare class SkeletonGenerator {
|
|
7
|
+
static generate<S extends string>(def: FlowDefinition<S>, lang: TargetLanguage): string;
|
|
8
|
+
private static genProcessor;
|
|
9
|
+
private static genGuard;
|
|
10
|
+
}
|
|
@@ -0,0 +1,44 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Generates Processor skeleton code from a FlowDefinition's requires/produces contracts.
|
|
3
|
+
*/
|
|
4
|
+
export class SkeletonGenerator {
|
|
5
|
+
static generate(def, lang) {
|
|
6
|
+
const lines = [
|
|
7
|
+
`// Skeleton generated from flow: ${def.name}`,
|
|
8
|
+
`// Language: ${lang}`,
|
|
9
|
+
'',
|
|
10
|
+
];
|
|
11
|
+
const seen = new Set();
|
|
12
|
+
for (const t of def.transitions) {
|
|
13
|
+
if (t.processor && !seen.has(t.processor.name)) {
|
|
14
|
+
seen.add(t.processor.name);
|
|
15
|
+
lines.push(this.genProcessor(t.processor.name, t.processor.requires, t.processor.produces, lang));
|
|
16
|
+
}
|
|
17
|
+
if (t.guard && !seen.has(t.guard.name)) {
|
|
18
|
+
seen.add(t.guard.name);
|
|
19
|
+
lines.push(this.genGuard(t.guard.name, t.guard.requires, t.guard.produces, lang));
|
|
20
|
+
}
|
|
21
|
+
}
|
|
22
|
+
return lines.join('\n');
|
|
23
|
+
}
|
|
24
|
+
static genProcessor(name, reqs, prods, lang) {
|
|
25
|
+
if (lang === 'typescript') {
|
|
26
|
+
return `const ${lcFirst(name)}: StateProcessor<S> = {\n name: '${name}',\n requires: [${reqs.join(', ')}],\n produces: [${prods.join(', ')}],\n process(ctx: FlowContext) {\n${reqs.map(r => ` const ${lcFirst(r)} = ctx.get(${r});`).join('\n')}\n // TODO: implement\n${prods.map(p => ` // ctx.put(${p}, { ... });`).join('\n')}\n },\n};\n`;
|
|
27
|
+
}
|
|
28
|
+
if (lang === 'java') {
|
|
29
|
+
return `static final StateProcessor ${name.replace(/([a-z])([A-Z])/g, '$1_$2').toUpperCase()} = new StateProcessor() {\n @Override public String name() { return "${name}"; }\n @Override public Set<Class<?>> requires() { return Set.of(${reqs.map(r => r + '.class').join(', ')}); }\n @Override public Set<Class<?>> produces() { return Set.of(${prods.map(p => p + '.class').join(', ')}); }\n @Override public void process(FlowContext ctx) {\n // TODO: implement\n }\n};\n`;
|
|
30
|
+
}
|
|
31
|
+
// rust
|
|
32
|
+
return `struct ${name};\nimpl StateProcessor<S> for ${name} {\n fn name(&self) -> &str { "${name}" }\n fn requires(&self) -> Vec<TypeId> { requires![${reqs.join(', ')}] }\n fn produces(&self) -> Vec<TypeId> { requires![${prods.join(', ')}] }\n fn process(&self, ctx: &mut FlowContext) -> Result<(), FlowError> {\n todo!()\n }\n}\n`;
|
|
33
|
+
}
|
|
34
|
+
static genGuard(name, reqs, prods, lang) {
|
|
35
|
+
if (lang === 'typescript') {
|
|
36
|
+
return `const ${lcFirst(name)}: TransitionGuard<S> = {\n name: '${name}',\n requires: [${reqs.join(', ')}],\n produces: [${prods.join(', ')}],\n maxRetries: 3,\n validate(ctx: FlowContext): GuardOutput {\n // TODO: implement\n return { type: 'accepted' };\n },\n};\n`;
|
|
37
|
+
}
|
|
38
|
+
if (lang === 'java') {
|
|
39
|
+
return `static final TransitionGuard ${name.replace(/([a-z])([A-Z])/g, '$1_$2').toUpperCase()} = new TransitionGuard() {\n @Override public String name() { return "${name}"; }\n @Override public Set<Class<?>> requires() { return Set.of(${reqs.map(r => r + '.class').join(', ')}); }\n @Override public Set<Class<?>> produces() { return Set.of(${prods.map(p => p + '.class').join(', ')}); }\n @Override public int maxRetries() { return 3; }\n @Override public GuardOutput validate(FlowContext ctx) {\n return new GuardOutput.Accepted();\n }\n};\n`;
|
|
40
|
+
}
|
|
41
|
+
return `struct ${name};\nimpl TransitionGuard<S> for ${name} {\n fn name(&self) -> &str { "${name}" }\n fn requires(&self) -> Vec<TypeId> { requires![${reqs.join(', ')}] }\n fn produces(&self) -> Vec<TypeId> { requires![${prods.join(', ')}] }\n fn validate(&self, ctx: &FlowContext) -> GuardOutput {\n GuardOutput::Accepted { data: HashMap::new() }\n }\n}\n`;
|
|
42
|
+
}
|
|
43
|
+
}
|
|
44
|
+
function lcFirst(s) { return s.charAt(0).toLowerCase() + s.slice(1); }
|