gabion 0.1.0__py3-none-any.whl → 0.1.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
gabion/cli.py CHANGED
@@ -3,7 +3,7 @@ from __future__ import annotations
3
3
  from dataclasses import dataclass
4
4
  from datetime import datetime, timezone
5
5
  from pathlib import Path
6
- from typing import List, Optional, Any
6
+ from typing import List, Optional, Any, Callable
7
7
  import argparse
8
8
  import json
9
9
  import subprocess
@@ -22,56 +22,55 @@ app = typer.Typer(add_completion=False)
22
22
  class DataflowAuditRequest:
23
23
  ctx: typer.Context
24
24
  args: List[str] | None = None
25
+ runner: Callable[..., dict[str, Any]] | None = None
25
26
 
26
27
 
27
28
  def _find_repo_root() -> Path:
28
29
  return Path(__file__).resolve().parents[2]
29
30
 
30
31
 
31
- @app.command()
32
- def check(
33
- paths: List[Path] = typer.Argument(None),
34
- report: Optional[Path] = typer.Option(None, "--report"),
35
- fail_on_violations: bool = typer.Option(True, "--fail-on-violations/--no-fail-on-violations"),
36
- root: Path = typer.Option(Path("."), "--root"),
37
- config: Optional[Path] = typer.Option(None, "--config"),
38
- baseline: Optional[Path] = typer.Option(
39
- None, "--baseline", help="Baseline file of allowed violations."
40
- ),
41
- baseline_write: bool = typer.Option(
42
- False, "--baseline-write", help="Write current violations to baseline."
43
- ),
44
- exclude: Optional[List[str]] = typer.Option(None, "--exclude"),
45
- ignore_params: Optional[str] = typer.Option(None, "--ignore-params"),
46
- transparent_decorators: Optional[str] = typer.Option(
47
- None, "--transparent-decorators"
48
- ),
49
- allow_external: Optional[bool] = typer.Option(
50
- None, "--allow-external/--no-allow-external"
51
- ),
52
- strictness: Optional[str] = typer.Option(None, "--strictness"),
53
- fail_on_type_ambiguities: bool = typer.Option(
54
- True, "--fail-on-type-ambiguities/--no-fail-on-type-ambiguities"
55
- ),
56
- ) -> None:
57
- """Run the dataflow grammar audit with strict defaults."""
32
+ def _split_csv_entries(entries: Optional[List[str]]) -> list[str] | None:
33
+ if entries is None:
34
+ return None
35
+ merged: list[str] = []
36
+ for entry in entries:
37
+ merged.extend([part.strip() for part in entry.split(",") if part.strip()])
38
+ return merged or None
39
+
40
+
41
+ def _split_csv(value: Optional[str]) -> list[str] | None:
42
+ if value is None:
43
+ return None
44
+ items = [part.strip() for part in value.split(",") if part.strip()]
45
+ return items or None
46
+
47
+
48
+ def build_check_payload(
49
+ *,
50
+ paths: Optional[List[Path]],
51
+ report: Optional[Path],
52
+ fail_on_violations: bool,
53
+ root: Path | None,
54
+ config: Optional[Path],
55
+ baseline: Optional[Path],
56
+ baseline_write: bool,
57
+ exclude: Optional[List[str]],
58
+ ignore_params: Optional[str],
59
+ transparent_decorators: Optional[str],
60
+ allow_external: Optional[bool],
61
+ strictness: Optional[str],
62
+ fail_on_type_ambiguities: bool,
63
+ ) -> dict[str, Any]:
64
+ # dataflow-bundle: ignore_params, transparent_decorators
58
65
  if not paths:
59
66
  paths = [Path(".")]
60
- exclude_dirs: list[str] | None = None
61
- if exclude is not None:
62
- exclude_dirs = []
63
- for entry in exclude:
64
- exclude_dirs.extend([part.strip() for part in entry.split(",") if part.strip()])
65
- ignore_list: list[str] | None = None
66
- if ignore_params is not None:
67
- ignore_list = [p.strip() for p in ignore_params.split(",") if p.strip()]
68
- transparent_list: list[str] | None = None
69
- if transparent_decorators is not None:
70
- transparent_list = [
71
- p.strip() for p in transparent_decorators.split(",") if p.strip()
72
- ]
73
67
  if strictness is not None and strictness not in {"high", "low"}:
74
68
  raise typer.BadParameter("strictness must be 'high' or 'low'")
69
+ exclude_dirs = _split_csv_entries(exclude)
70
+ ignore_list = _split_csv(ignore_params)
71
+ transparent_list = _split_csv(transparent_decorators)
72
+ baseline_write_value: bool | None = baseline_write if baseline is not None else None
73
+ root = root or Path(".")
75
74
  payload = {
76
75
  "paths": [str(p) for p in paths],
77
76
  "report": str(report) if report is not None else None,
@@ -80,7 +79,7 @@ def check(
80
79
  "root": str(root),
81
80
  "config": str(config) if config is not None else None,
82
81
  "baseline": str(baseline) if baseline is not None else None,
83
- "baseline_write": baseline_write if baseline is not None else None,
82
+ "baseline_write": baseline_write_value,
84
83
  "exclude": exclude_dirs,
85
84
  "ignore_params": ignore_list,
86
85
  "transparent_decorators": transparent_list,
@@ -88,32 +87,18 @@ def check(
88
87
  "strictness": strictness,
89
88
  "type_audit": True if fail_on_type_ambiguities else None,
90
89
  }
91
- result = run_command(CommandRequest(DATAFLOW_COMMAND, [payload]))
92
- raise typer.Exit(code=int(result.get("exit_code", 0)))
90
+ return payload
93
91
 
94
92
 
95
- def _dataflow_audit(
96
- request: "DataflowAuditRequest",
97
- ) -> None:
98
- """Run the dataflow grammar audit with explicit options."""
99
- argv = list(request.args or []) + list(request.ctx.args)
100
- if not argv:
101
- argv = []
93
+ def parse_dataflow_args(argv: list[str]) -> argparse.Namespace:
102
94
  parser = dataflow_cli_parser()
103
- opts = parser.parse_args(argv)
104
- exclude_dirs: list[str] | None = None
105
- if opts.exclude is not None:
106
- exclude_dirs = []
107
- for entry in opts.exclude:
108
- exclude_dirs.extend([part.strip() for part in entry.split(",") if part.strip()])
109
- ignore_list: list[str] | None = None
110
- if opts.ignore_params is not None:
111
- ignore_list = [p.strip() for p in opts.ignore_params.split(",") if p.strip()]
112
- transparent_list: list[str] | None = None
113
- if opts.transparent_decorators is not None:
114
- transparent_list = [
115
- p.strip() for p in opts.transparent_decorators.split(",") if p.strip()
116
- ]
95
+ return parser.parse_args(argv)
96
+
97
+
98
+ def build_dataflow_payload(opts: argparse.Namespace) -> dict[str, Any]:
99
+ exclude_dirs = _split_csv_entries(opts.exclude)
100
+ ignore_list = _split_csv(opts.ignore_params)
101
+ transparent_list = _split_csv(opts.transparent_decorators)
117
102
  payload: dict[str, Any] = {
118
103
  "paths": [str(p) for p in opts.paths],
119
104
  "root": str(opts.root),
@@ -121,6 +106,7 @@ def _dataflow_audit(
121
106
  "report": str(opts.report) if opts.report else None,
122
107
  "dot": opts.dot,
123
108
  "fail_on_violations": opts.fail_on_violations,
109
+ "fail_on_type_ambiguities": opts.fail_on_type_ambiguities,
124
110
  "baseline": str(opts.baseline) if opts.baseline else None,
125
111
  "baseline_write": opts.baseline_write if opts.baseline else None,
126
112
  "no_recursive": opts.no_recursive,
@@ -146,8 +132,158 @@ def _dataflow_audit(
146
132
  "refactor_plan_json": str(opts.refactor_plan_json)
147
133
  if opts.refactor_plan_json
148
134
  else None,
135
+ "synthesis_merge_overlap": opts.synthesis_merge_overlap,
149
136
  }
150
- result = run_command(CommandRequest(DATAFLOW_COMMAND, [payload]))
137
+ return payload
138
+
139
+
140
+ def build_refactor_payload(
141
+ *,
142
+ input_payload: Optional[dict[str, Any]] = None,
143
+ protocol_name: Optional[str],
144
+ bundle: Optional[List[str]],
145
+ field: Optional[List[str]],
146
+ target_path: Optional[Path],
147
+ target_functions: Optional[List[str]],
148
+ compatibility_shim: bool,
149
+ rationale: Optional[str],
150
+ ) -> dict[str, Any]:
151
+ if input_payload is not None:
152
+ return input_payload
153
+ if protocol_name is None or target_path is None:
154
+ raise typer.BadParameter(
155
+ "Provide --protocol-name and --target-path or use --input."
156
+ )
157
+ field_specs: list[dict[str, str | None]] = []
158
+ for spec in field or []:
159
+ name, _, hint = spec.partition(":")
160
+ name = name.strip()
161
+ if not name:
162
+ continue
163
+ type_hint = hint.strip() or None
164
+ field_specs.append({"name": name, "type_hint": type_hint})
165
+ if not bundle and field_specs:
166
+ bundle = [spec["name"] for spec in field_specs]
167
+ return {
168
+ "protocol_name": protocol_name,
169
+ "bundle": bundle or [],
170
+ "fields": field_specs,
171
+ "target_path": str(target_path),
172
+ "target_functions": target_functions or [],
173
+ "compatibility_shim": compatibility_shim,
174
+ "rationale": rationale,
175
+ }
176
+
177
+
178
+ def dispatch_command(
179
+ *,
180
+ command: str,
181
+ payload: dict[str, Any],
182
+ root: Path | None = None,
183
+ runner: Callable[..., dict[str, Any]] = run_command,
184
+ ) -> dict[str, Any]:
185
+ request = CommandRequest(command, [payload])
186
+ return runner(request, root=root)
187
+
188
+
189
+ def run_check(
190
+ *,
191
+ paths: Optional[List[Path]],
192
+ report: Optional[Path],
193
+ fail_on_violations: bool,
194
+ root: Path,
195
+ config: Optional[Path],
196
+ baseline: Optional[Path],
197
+ baseline_write: bool,
198
+ exclude: Optional[List[str]],
199
+ ignore_params: Optional[str],
200
+ transparent_decorators: Optional[str],
201
+ allow_external: Optional[bool],
202
+ strictness: Optional[str],
203
+ fail_on_type_ambiguities: bool,
204
+ runner: Callable[..., dict[str, Any]] = run_command,
205
+ ) -> dict[str, Any]:
206
+ # dataflow-bundle: ignore_params, transparent_decorators
207
+ payload = build_check_payload(
208
+ paths=paths,
209
+ report=report,
210
+ fail_on_violations=fail_on_violations,
211
+ root=root,
212
+ config=config,
213
+ baseline=baseline,
214
+ baseline_write=baseline_write if baseline is not None else False,
215
+ exclude=exclude,
216
+ ignore_params=ignore_params,
217
+ transparent_decorators=transparent_decorators,
218
+ allow_external=allow_external,
219
+ strictness=strictness,
220
+ fail_on_type_ambiguities=fail_on_type_ambiguities,
221
+ )
222
+ return dispatch_command(command=DATAFLOW_COMMAND, payload=payload, root=root, runner=runner)
223
+
224
+
225
+ @app.command()
226
+ def check(
227
+ paths: List[Path] = typer.Argument(None),
228
+ report: Optional[Path] = typer.Option(None, "--report"),
229
+ fail_on_violations: bool = typer.Option(True, "--fail-on-violations/--no-fail-on-violations"),
230
+ root: Path = typer.Option(Path("."), "--root"),
231
+ config: Optional[Path] = typer.Option(None, "--config"),
232
+ baseline: Optional[Path] = typer.Option(
233
+ None, "--baseline", help="Baseline file of allowed violations."
234
+ ),
235
+ baseline_write: bool = typer.Option(
236
+ False, "--baseline-write", help="Write current violations to baseline."
237
+ ),
238
+ exclude: Optional[List[str]] = typer.Option(None, "--exclude"),
239
+ ignore_params: Optional[str] = typer.Option(None, "--ignore-params"),
240
+ transparent_decorators: Optional[str] = typer.Option(
241
+ None, "--transparent-decorators"
242
+ ),
243
+ allow_external: Optional[bool] = typer.Option(
244
+ None, "--allow-external/--no-allow-external"
245
+ ),
246
+ strictness: Optional[str] = typer.Option(None, "--strictness"),
247
+ fail_on_type_ambiguities: bool = typer.Option(
248
+ True, "--fail-on-type-ambiguities/--no-fail-on-type-ambiguities"
249
+ ),
250
+ ) -> None:
251
+ # dataflow-bundle: ignore_params, transparent_decorators
252
+ """Run the dataflow grammar audit with strict defaults."""
253
+ result = run_check(
254
+ paths=paths,
255
+ report=report,
256
+ fail_on_violations=fail_on_violations,
257
+ root=root,
258
+ config=config,
259
+ baseline=baseline,
260
+ baseline_write=baseline_write,
261
+ exclude=exclude,
262
+ ignore_params=ignore_params,
263
+ transparent_decorators=transparent_decorators,
264
+ allow_external=allow_external,
265
+ strictness=strictness,
266
+ fail_on_type_ambiguities=fail_on_type_ambiguities,
267
+ )
268
+ raise typer.Exit(code=int(result.get("exit_code", 0)))
269
+
270
+
271
+ def _dataflow_audit(
272
+ request: "DataflowAuditRequest",
273
+ ) -> None:
274
+ """Run the dataflow grammar audit with explicit options."""
275
+ argv = list(request.args or []) + list(request.ctx.args)
276
+ if not argv:
277
+ argv = []
278
+ opts = parse_dataflow_args(argv)
279
+ payload = build_dataflow_payload(opts)
280
+ runner = request.runner or run_command
281
+ result = dispatch_command(
282
+ command=DATAFLOW_COMMAND,
283
+ payload=payload,
284
+ root=Path(opts.root),
285
+ runner=runner,
286
+ )
151
287
  if opts.type_audit:
152
288
  suggestions = result.get("type_suggestions", [])
153
289
  ambiguities = result.get("type_ambiguities", [])
@@ -274,6 +410,12 @@ def dataflow_cli_parser() -> argparse.ArgumentParser:
274
410
  action="store_true",
275
411
  help="Allow single-field bundles in synthesis plan.",
276
412
  )
413
+ parser.add_argument(
414
+ "--synthesis-merge-overlap",
415
+ type=float,
416
+ default=None,
417
+ help="Jaccard overlap threshold for merging bundles (0.0-1.0).",
418
+ )
277
419
  parser.add_argument(
278
420
  "--refactor-plan",
279
421
  action="store_true",
@@ -287,66 +429,64 @@ def dataflow_cli_parser() -> argparse.ArgumentParser:
287
429
  return parser
288
430
 
289
431
 
290
- @app.command("docflow-audit")
291
- def docflow_audit(
292
- root: Path = typer.Option(Path("."), "--root"),
293
- fail_on_violations: bool = typer.Option(
294
- False, "--fail-on-violations/--no-fail-on-violations"
295
- ),
296
- ) -> None:
297
- """Run the docflow audit (governance docs only)."""
432
+ def _run_docflow_audit(
433
+ *,
434
+ root: Path,
435
+ fail_on_violations: bool,
436
+ script: Path | None = None,
437
+ ) -> int:
298
438
  repo_root = _find_repo_root()
299
- script = repo_root / "scripts" / "docflow_audit.py"
300
- if not script.exists():
439
+ script_path = script or (repo_root / "scripts" / "docflow_audit.py")
440
+ if not script_path.exists():
301
441
  typer.secho(
302
442
  "docflow audit script not found; repository layout required",
303
443
  err=True,
304
444
  fg=typer.colors.RED,
305
445
  )
306
- raise typer.Exit(code=2)
446
+ return 2
307
447
  args = ["--root", str(root)]
308
448
  if fail_on_violations:
309
449
  args.append("--fail-on-violations")
310
- result = subprocess.run([sys.executable, str(script), *args], check=False)
311
- raise typer.Exit(code=result.returncode)
450
+ result = subprocess.run([sys.executable, str(script_path), *args], check=False)
451
+ return result.returncode
312
452
 
313
453
 
314
- @app.command("synth")
315
- def synth(
316
- paths: List[Path] = typer.Argument(None),
454
+ @app.command("docflow-audit")
455
+ def docflow_audit(
317
456
  root: Path = typer.Option(Path("."), "--root"),
318
- out_dir: Path = typer.Option(Path("artifacts/synthesis"), "--out-dir"),
319
- no_timestamp: bool = typer.Option(False, "--no-timestamp"),
320
- config: Optional[Path] = typer.Option(None, "--config"),
321
- exclude: Optional[List[str]] = typer.Option(None, "--exclude"),
322
- ignore_params: Optional[str] = typer.Option(None, "--ignore-params"),
323
- transparent_decorators: Optional[str] = typer.Option(
324
- None, "--transparent-decorators"
325
- ),
326
- allow_external: Optional[bool] = typer.Option(
327
- None, "--allow-external/--no-allow-external"
328
- ),
329
- strictness: Optional[str] = typer.Option(None, "--strictness"),
330
- no_recursive: bool = typer.Option(False, "--no-recursive"),
331
- max_components: int = typer.Option(10, "--max-components"),
332
- type_audit_report: bool = typer.Option(
333
- True, "--type-audit-report/--no-type-audit-report"
334
- ),
335
- type_audit_max: int = typer.Option(50, "--type-audit-max"),
336
- synthesis_max_tier: int = typer.Option(2, "--synthesis-max-tier"),
337
- synthesis_min_bundle_size: int = typer.Option(2, "--synthesis-min-bundle-size"),
338
- synthesis_allow_singletons: bool = typer.Option(
339
- False, "--synthesis-allow-singletons"
340
- ),
341
- synthesis_protocols_kind: str = typer.Option(
342
- "dataclass", "--synthesis-protocols-kind"
343
- ),
344
- refactor_plan: bool = typer.Option(True, "--refactor-plan/--no-refactor-plan"),
345
457
  fail_on_violations: bool = typer.Option(
346
458
  False, "--fail-on-violations/--no-fail-on-violations"
347
459
  ),
348
460
  ) -> None:
349
- """Run the dataflow audit and emit synthesis outputs (prototype)."""
461
+ """Run the docflow audit (governance docs only)."""
462
+ exit_code = _run_docflow_audit(root=root, fail_on_violations=fail_on_violations)
463
+ raise typer.Exit(code=exit_code)
464
+
465
+
466
+ def _run_synth(
467
+ *,
468
+ paths: List[Path] | None,
469
+ root: Path,
470
+ out_dir: Path,
471
+ no_timestamp: bool,
472
+ config: Optional[Path],
473
+ exclude: Optional[List[str]],
474
+ ignore_params: Optional[str],
475
+ transparent_decorators: Optional[str],
476
+ allow_external: Optional[bool],
477
+ strictness: Optional[str],
478
+ no_recursive: bool,
479
+ max_components: int,
480
+ type_audit_report: bool,
481
+ type_audit_max: int,
482
+ synthesis_max_tier: int,
483
+ synthesis_min_bundle_size: int,
484
+ synthesis_allow_singletons: bool,
485
+ synthesis_protocols_kind: str,
486
+ refactor_plan: bool,
487
+ fail_on_violations: bool,
488
+ runner: Callable[..., dict[str, Any]] = run_command,
489
+ ) -> tuple[dict[str, Any], dict[str, Path], Path | None]:
350
490
  if not paths:
351
491
  paths = [Path(".")]
352
492
  exclude_dirs: list[str] | None = None
@@ -410,15 +550,89 @@ def synth(
410
550
  "refactor_plan": refactor_plan,
411
551
  "refactor_plan_json": str(refactor_plan_path) if refactor_plan else None,
412
552
  }
413
- result = run_command(CommandRequest(DATAFLOW_COMMAND, [payload]))
553
+ result = dispatch_command(
554
+ command=DATAFLOW_COMMAND,
555
+ payload=payload,
556
+ root=root,
557
+ runner=runner,
558
+ )
559
+ paths_out = {
560
+ "report": report_path,
561
+ "dot": dot_path,
562
+ "plan": plan_path,
563
+ "protocol": protocol_path,
564
+ "refactor": refactor_plan_path,
565
+ "output_root": output_root,
566
+ }
567
+ return result, paths_out, timestamp
568
+
569
+
570
+ @app.command("synth")
571
+ def synth(
572
+ paths: List[Path] = typer.Argument(None),
573
+ root: Path = typer.Option(Path("."), "--root"),
574
+ out_dir: Path = typer.Option(Path("artifacts/synthesis"), "--out-dir"),
575
+ no_timestamp: bool = typer.Option(False, "--no-timestamp"),
576
+ config: Optional[Path] = typer.Option(None, "--config"),
577
+ exclude: Optional[List[str]] = typer.Option(None, "--exclude"),
578
+ ignore_params: Optional[str] = typer.Option(None, "--ignore-params"),
579
+ transparent_decorators: Optional[str] = typer.Option(
580
+ None, "--transparent-decorators"
581
+ ),
582
+ allow_external: Optional[bool] = typer.Option(
583
+ None, "--allow-external/--no-allow-external"
584
+ ),
585
+ strictness: Optional[str] = typer.Option(None, "--strictness"),
586
+ no_recursive: bool = typer.Option(False, "--no-recursive"),
587
+ max_components: int = typer.Option(10, "--max-components"),
588
+ type_audit_report: bool = typer.Option(
589
+ True, "--type-audit-report/--no-type-audit-report"
590
+ ),
591
+ type_audit_max: int = typer.Option(50, "--type-audit-max"),
592
+ synthesis_max_tier: int = typer.Option(2, "--synthesis-max-tier"),
593
+ synthesis_min_bundle_size: int = typer.Option(2, "--synthesis-min-bundle-size"),
594
+ synthesis_allow_singletons: bool = typer.Option(
595
+ False, "--synthesis-allow-singletons"
596
+ ),
597
+ synthesis_protocols_kind: str = typer.Option(
598
+ "dataclass", "--synthesis-protocols-kind"
599
+ ),
600
+ refactor_plan: bool = typer.Option(True, "--refactor-plan/--no-refactor-plan"),
601
+ fail_on_violations: bool = typer.Option(
602
+ False, "--fail-on-violations/--no-fail-on-violations"
603
+ ),
604
+ ) -> None:
605
+ """Run the dataflow audit and emit synthesis outputs (prototype)."""
606
+ result, paths_out, timestamp = _run_synth(
607
+ paths=paths,
608
+ root=root,
609
+ out_dir=out_dir,
610
+ no_timestamp=no_timestamp,
611
+ config=config,
612
+ exclude=exclude,
613
+ ignore_params=ignore_params,
614
+ transparent_decorators=transparent_decorators,
615
+ allow_external=allow_external,
616
+ strictness=strictness,
617
+ no_recursive=no_recursive,
618
+ max_components=max_components,
619
+ type_audit_report=type_audit_report,
620
+ type_audit_max=type_audit_max,
621
+ synthesis_max_tier=synthesis_max_tier,
622
+ synthesis_min_bundle_size=synthesis_min_bundle_size,
623
+ synthesis_allow_singletons=synthesis_allow_singletons,
624
+ synthesis_protocols_kind=synthesis_protocols_kind,
625
+ refactor_plan=refactor_plan,
626
+ fail_on_violations=fail_on_violations,
627
+ )
414
628
  if timestamp:
415
- typer.echo(f"Snapshot: {output_root}")
416
- typer.echo(f"- {report_path}")
417
- typer.echo(f"- {dot_path}")
418
- typer.echo(f"- {plan_path}")
419
- typer.echo(f"- {protocol_path}")
629
+ typer.echo(f"Snapshot: {paths_out['output_root']}")
630
+ typer.echo(f"- {paths_out['report']}")
631
+ typer.echo(f"- {paths_out['dot']}")
632
+ typer.echo(f"- {paths_out['plan']}")
633
+ typer.echo(f"- {paths_out['protocol']}")
420
634
  if refactor_plan:
421
- typer.echo(f"- {refactor_plan_path}")
635
+ typer.echo(f"- {paths_out['refactor']}")
422
636
  raise typer.Exit(code=int(result.get("exit_code", 0)))
423
637
 
424
638
 
@@ -430,6 +644,16 @@ def synthesis_plan(
430
644
  output_path: Optional[Path] = typer.Option(
431
645
  None, "--output", help="Write synthesis plan JSON to this path."
432
646
  ),
647
+ ) -> None:
648
+ """Generate a synthesis plan from a JSON payload (prototype)."""
649
+ _run_synthesis_plan(input_path=input_path, output_path=output_path)
650
+
651
+
652
+ def _run_synthesis_plan(
653
+ *,
654
+ input_path: Optional[Path],
655
+ output_path: Optional[Path],
656
+ runner: Callable[..., dict[str, Any]] = run_command,
433
657
  ) -> None:
434
658
  """Generate a synthesis plan from a JSON payload (prototype)."""
435
659
  payload: dict[str, Any] = {}
@@ -438,7 +662,12 @@ def synthesis_plan(
438
662
  payload = json.loads(input_path.read_text())
439
663
  except json.JSONDecodeError as exc:
440
664
  raise typer.BadParameter(f"Invalid JSON payload: {exc}") from exc
441
- result = run_command(CommandRequest(SYNTHESIS_COMMAND, [payload]))
665
+ result = dispatch_command(
666
+ command=SYNTHESIS_COMMAND,
667
+ payload=payload,
668
+ root=None,
669
+ runner=runner,
670
+ )
442
671
  output = json.dumps(result, indent=2, sort_keys=True)
443
672
  if output_path is None:
444
673
  typer.echo(output)
@@ -463,39 +692,33 @@ def refactor_protocol(
463
692
  ),
464
693
  target_path: Optional[Path] = typer.Option(None, "--target-path"),
465
694
  target_functions: Optional[List[str]] = typer.Option(None, "--target-function"),
695
+ compatibility_shim: bool = typer.Option(
696
+ False, "--compat-shim/--no-compat-shim"
697
+ ),
466
698
  rationale: Optional[str] = typer.Option(None, "--rationale"),
467
699
  ) -> None:
468
700
  """Generate protocol refactor edits from a JSON payload (prototype)."""
469
- payload: dict[str, Any] = {}
701
+ input_payload: dict[str, Any] | None = None
470
702
  if input_path is not None:
471
703
  try:
472
- payload = json.loads(input_path.read_text())
704
+ input_payload = json.loads(input_path.read_text())
473
705
  except json.JSONDecodeError as exc:
474
706
  raise typer.BadParameter(f"Invalid JSON payload: {exc}") from exc
475
- else:
476
- if protocol_name is None or target_path is None:
477
- raise typer.BadParameter(
478
- "Provide --protocol-name and --target-path or use --input."
479
- )
480
- field_specs: list[dict[str, str | None]] = []
481
- for spec in field or []:
482
- name, _, hint = spec.partition(":")
483
- name = name.strip()
484
- if not name:
485
- continue
486
- type_hint = hint.strip() or None
487
- field_specs.append({"name": name, "type_hint": type_hint})
488
- if not bundle and field_specs:
489
- bundle = [spec["name"] for spec in field_specs]
490
- payload = {
491
- "protocol_name": protocol_name,
492
- "bundle": bundle or [],
493
- "fields": field_specs,
494
- "target_path": str(target_path),
495
- "target_functions": target_functions or [],
496
- "rationale": rationale,
497
- }
498
- result = run_command(CommandRequest(REFACTOR_COMMAND, [payload]))
707
+ payload = build_refactor_payload(
708
+ input_payload=input_payload,
709
+ protocol_name=protocol_name,
710
+ bundle=bundle,
711
+ field=field,
712
+ target_path=target_path,
713
+ target_functions=target_functions,
714
+ compatibility_shim=compatibility_shim,
715
+ rationale=rationale,
716
+ )
717
+ result = dispatch_command(
718
+ command=REFACTOR_COMMAND,
719
+ payload=payload,
720
+ root=None,
721
+ )
499
722
  output = json.dumps(result, indent=2, sort_keys=True)
500
723
  if output_path is None:
501
724
  typer.echo(output)
gabion/config.py CHANGED
@@ -36,6 +36,14 @@ def dataflow_defaults(
36
36
  return section if isinstance(section, dict) else {}
37
37
 
38
38
 
39
+ def synthesis_defaults(
40
+ root: Path | None = None, config_path: Path | None = None
41
+ ) -> dict[str, Any]:
42
+ data = load_config(root=root, config_path=config_path)
43
+ section = data.get("synthesis", {})
44
+ return section if isinstance(section, dict) else {}
45
+
46
+
39
47
  def merge_payload(payload: dict[str, Any], defaults: dict[str, Any]) -> dict[str, Any]:
40
48
  merged = dict(defaults)
41
49
  for key, value in payload.items():
gabion/lsp_client.py CHANGED
@@ -5,7 +5,7 @@ import subprocess
5
5
  import sys
6
6
  from pathlib import Path
7
7
  from dataclasses import dataclass
8
- from typing import Any
8
+ from typing import Any, Callable
9
9
 
10
10
 
11
11
  class LspClientError(RuntimeError):
@@ -58,8 +58,9 @@ def run_command(
58
58
  *,
59
59
  root: Path | None = None,
60
60
  timeout: float = 5.0,
61
+ process_factory: Callable[..., subprocess.Popen] = subprocess.Popen,
61
62
  ) -> dict[str, Any]:
62
- proc = subprocess.Popen(
63
+ proc = process_factory(
63
64
  [sys.executable, "-m", "gabion.server"],
64
65
  stdin=subprocess.PIPE,
65
66
  stdout=subprocess.PIPE,