agentsync-cli 0.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
agentsync/sync.py ADDED
@@ -0,0 +1,164 @@
1
+ """Core sync orchestrator — loads source, deduplicates, generates targets."""
2
+
3
+ from __future__ import annotations
4
+
5
+ from dataclasses import dataclass, field
6
+ from typing import TYPE_CHECKING
7
+
8
+ from agentsync.adapters.base import ServerConfig, WriteResult
9
+ from agentsync.utils.dedup import dedup_servers
10
+ from agentsync.utils.markdown import filter_sections
11
+
12
+ if TYPE_CHECKING:
13
+ from agentsync.adapters.base import SourceAdapter, TargetAdapter
14
+ from agentsync.config import AgentSyncConfig
15
+
16
+
17
+ @dataclass
18
+ class TargetSyncResult:
19
+ """Outcome of syncing a single target."""
20
+
21
+ target_name: str
22
+ success: bool
23
+ writes: list[WriteResult] = field(default_factory=list)
24
+ errors: list[str] = field(default_factory=list)
25
+
26
+
27
+ @dataclass
28
+ class SyncResult:
29
+ """Aggregate outcome of a full sync run."""
30
+
31
+ success: bool
32
+ dry_run: bool
33
+ target_results: dict[str, TargetSyncResult] = field(default_factory=dict)
34
+
35
+
36
+ class SyncEngine:
37
+ """Orchestrates sync from a single source to multiple targets."""
38
+
39
+ def __init__(
40
+ self,
41
+ config: AgentSyncConfig,
42
+ source: SourceAdapter,
43
+ targets: dict[str, TargetAdapter],
44
+ ) -> None:
45
+ self._config = config
46
+ self._source = source
47
+ self._targets = targets
48
+
49
+ # ------------------------------------------------------------------
50
+ # Public
51
+ # ------------------------------------------------------------------
52
+
53
+ def run(
54
+ self,
55
+ *,
56
+ dry_run: bool = False,
57
+ mcp_only: bool = False,
58
+ rules_only: bool = False,
59
+ target_filter: str | None = None,
60
+ quiet: bool = False,
61
+ ) -> SyncResult:
62
+ """Execute the sync pipeline.
63
+
64
+ Returns a :class:`SyncResult` summarising what happened.
65
+ """
66
+ from agentsync.utils.logger import SyncLogger
67
+
68
+ log = SyncLogger(dry_run=dry_run, quiet=quiet)
69
+ result = SyncResult(success=True, dry_run=dry_run)
70
+
71
+ # Determine which targets to process
72
+ target_names = list(self._targets)
73
+ if target_filter:
74
+ if target_filter not in self._targets:
75
+ log.error(f"Unknown target '{target_filter}'")
76
+ result.success = False
77
+ return result
78
+ target_names = [target_filter]
79
+
80
+ # --- MCP servers ---
81
+ servers: dict[str, ServerConfig] = {}
82
+ if not rules_only:
83
+ log.section("Loading MCP servers")
84
+ servers = self._source.load_servers()
85
+ servers = dedup_servers(servers, log)
86
+ log.info(f"Total: {len(servers)} unique servers after dedup")
87
+
88
+ # --- Rules (sections) ---
89
+ all_sections = []
90
+ if not mcp_only:
91
+ log.section("Loading rules")
92
+ all_sections = self._source.load_rules()
93
+ log.info(f"Loaded {len(all_sections)} sections from source")
94
+
95
+ # --- Per-target processing ---
96
+ for name in target_names:
97
+ target = self._targets[name]
98
+ tr = TargetSyncResult(target_name=name, success=True)
99
+
100
+ log.section(f"Target: {name}")
101
+
102
+ try:
103
+ # MCP
104
+ if not rules_only and servers:
105
+ filtered_servers = self._filter_servers(servers, name)
106
+ log.info(
107
+ f"{len(filtered_servers)}/{len(servers)} servers after filtering for {name}"
108
+ )
109
+ target.generate_mcp(filtered_servers)
110
+
111
+ # Rules
112
+ if not mcp_only and all_sections:
113
+ exclude_set = set(self._config.rules.exclude_sections)
114
+ filtered = filter_sections(all_sections, exclude_set)
115
+ log.info(
116
+ f"{len(filtered)}/{len(all_sections)} sections after filtering for {name}"
117
+ )
118
+ target.generate_rules(filtered)
119
+
120
+ # Write
121
+ writes = target.write(dry_run=dry_run)
122
+ tr.writes = writes
123
+
124
+ except Exception as exc: # noqa: BLE001
125
+ tr.success = False
126
+ tr.errors.append(str(exc))
127
+ log.error(f"{name}: {exc}")
128
+
129
+ result.target_results[name] = tr
130
+ if not tr.success:
131
+ result.success = False
132
+
133
+ return result
134
+
135
+ # ------------------------------------------------------------------
136
+ # Internal helpers
137
+ # ------------------------------------------------------------------
138
+
139
+ def _filter_servers(
140
+ self,
141
+ servers: dict[str, ServerConfig],
142
+ target_name: str,
143
+ ) -> dict[str, ServerConfig]:
144
+ """Apply per-target exclude_servers and protocol filtering."""
145
+ target_cfg = self._config.targets.get(target_name)
146
+ if target_cfg is None:
147
+ return servers
148
+
149
+ exclude = {s.lower() for s in target_cfg.exclude_servers}
150
+ protocols = {p.lower() for p in target_cfg.protocols}
151
+
152
+ filtered: dict[str, ServerConfig] = {}
153
+ for key, sc in servers.items():
154
+ if key.lower() in exclude:
155
+ continue
156
+ if protocols:
157
+ matches = ("stdio" in protocols and sc.is_stdio) or (
158
+ "http" in protocols and sc.is_http
159
+ )
160
+ if not matches:
161
+ continue
162
+ filtered[key] = sc
163
+
164
+ return filtered
@@ -0,0 +1 @@
1
+ """Shared utilities for agentsync."""
@@ -0,0 +1,29 @@
1
+ """File backup utilities."""
2
+
3
+ from __future__ import annotations
4
+
5
+ import shutil
6
+ from datetime import datetime
7
+ from pathlib import Path
8
+ from typing import TYPE_CHECKING
9
+
10
+ if TYPE_CHECKING:
11
+ from agentsync.utils.logger import SyncLogger
12
+
13
+
14
+ def backup_file(path: Path, backup_dir: Path, log: SyncLogger) -> Path | None:
15
+ """Create a timestamped backup of *path* inside *backup_dir*.
16
+
17
+ Returns the backup path on success, or ``None`` if the source file
18
+ does not exist.
19
+ """
20
+ if not path.exists():
21
+ return None
22
+
23
+ backup_dir.mkdir(parents=True, exist_ok=True)
24
+ timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
25
+ backup_name = f"{path.name}.{timestamp}.bak"
26
+ backup_path = backup_dir / backup_name
27
+ shutil.copy2(path, backup_path)
28
+ log.info(f"Backup: {path} -> {backup_path}")
29
+ return backup_path
@@ -0,0 +1,32 @@
1
+ """Case-insensitive server deduplication."""
2
+
3
+ from __future__ import annotations
4
+
5
+ from typing import TYPE_CHECKING
6
+
7
+ from agentsync.adapters.base import ServerConfig
8
+
9
+ if TYPE_CHECKING:
10
+ from agentsync.utils.logger import SyncLogger
11
+
12
+
13
+ def dedup_servers(
14
+ servers: dict[str, ServerConfig],
15
+ log: SyncLogger,
16
+ ) -> dict[str, ServerConfig]:
17
+ """Deduplicate servers by case-insensitive key comparison.
18
+
19
+ When two keys differ only by case (e.g. ``Notion`` vs ``notion``),
20
+ the later entry wins. All returned keys are lowercase.
21
+ """
22
+ seen: dict[str, tuple[str, ServerConfig]] = {}
23
+
24
+ for key, sc in servers.items():
25
+ lower = key.lower()
26
+ if lower in seen:
27
+ prev_key = seen[lower][0]
28
+ if prev_key != key:
29
+ log.warn(f"Dedup: '{prev_key}' replaced by '{key}' (case-insensitive merge)")
30
+ seen[lower] = (key, sc)
31
+
32
+ return {lower: sc for lower, (_orig, sc) in seen.items()}
@@ -0,0 +1,44 @@
1
+ """Server diff display utilities."""
2
+
3
+ from __future__ import annotations
4
+
5
+ import json
6
+ from pathlib import Path
7
+ from typing import TYPE_CHECKING
8
+
9
+ from agentsync.adapters.base import ServerConfig
10
+
11
+ if TYPE_CHECKING:
12
+ from agentsync.utils.logger import SyncLogger
13
+
14
+
15
+ def show_server_diff(
16
+ target_name: str,
17
+ existing_path: Path,
18
+ new_servers: dict[str, ServerConfig],
19
+ log: SyncLogger,
20
+ ) -> None:
21
+ """Log which servers are added/removed compared to an existing JSON file."""
22
+ new_names = set(new_servers)
23
+
24
+ if not existing_path.exists():
25
+ log.info(
26
+ f"{target_name}: file doesn't exist yet, will create with {len(new_names)} servers"
27
+ )
28
+ return
29
+
30
+ try:
31
+ data = json.loads(existing_path.read_text())
32
+ existing_names: set[str] = set(data.get("mcpServers", {}))
33
+ except (json.JSONDecodeError, KeyError):
34
+ existing_names = set()
35
+
36
+ added = new_names - existing_names
37
+ removed = existing_names - new_names
38
+
39
+ if added:
40
+ log.info(f"{target_name}: +{len(added)} servers ({', '.join(sorted(added))})")
41
+ if removed:
42
+ log.info(f"{target_name}: -{len(removed)} servers ({', '.join(sorted(removed))})")
43
+ if not added and not removed:
44
+ log.info(f"{target_name}: same {len(new_names)} servers")
agentsync/utils/io.py ADDED
@@ -0,0 +1,88 @@
1
+ """File writing utilities that return WriteResult."""
2
+
3
+ from __future__ import annotations
4
+
5
+ import json
6
+ from pathlib import Path
7
+ from typing import TYPE_CHECKING, Any
8
+
9
+ from agentsync.adapters.base import WriteResult
10
+ from agentsync.utils.backup import backup_file
11
+
12
+ if TYPE_CHECKING:
13
+ from agentsync.utils.logger import SyncLogger
14
+
15
+
16
+ def write_json(
17
+ path: Path,
18
+ data: Any,
19
+ log: SyncLogger,
20
+ backup_dir: Path | None = None,
21
+ dry_run: bool = False,
22
+ ) -> WriteResult:
23
+ """Serialize *data* as JSON and write to *path*.
24
+
25
+ Returns a :class:`WriteResult` describing the outcome.
26
+ """
27
+ content = json.dumps(data, indent=2, ensure_ascii=False) + "\n"
28
+
29
+ # Validate round-trip
30
+ try:
31
+ json.loads(content)
32
+ except json.JSONDecodeError as e:
33
+ msg = f"JSON validation failed for {path}: {e}"
34
+ log.error(msg)
35
+ return WriteResult(path=str(path), written=False, message=msg)
36
+
37
+ return _write(path, content, log, backup_dir=backup_dir, dry_run=dry_run)
38
+
39
+
40
+ def write_text(
41
+ path: Path,
42
+ content: str,
43
+ log: SyncLogger,
44
+ backup_dir: Path | None = None,
45
+ dry_run: bool = False,
46
+ ) -> WriteResult:
47
+ """Write plain text to *path*.
48
+
49
+ Returns a :class:`WriteResult` describing the outcome.
50
+ """
51
+ return _write(path, content, log, backup_dir=backup_dir, dry_run=dry_run)
52
+
53
+
54
+ # ------------------------------------------------------------------
55
+ # Internal helper
56
+ # ------------------------------------------------------------------
57
+
58
+
59
+ def _write(
60
+ path: Path,
61
+ content: str,
62
+ log: SyncLogger,
63
+ *,
64
+ backup_dir: Path | None,
65
+ dry_run: bool,
66
+ ) -> WriteResult:
67
+ nbytes = len(content.encode())
68
+
69
+ if dry_run:
70
+ if path.exists():
71
+ existing = path.read_text()
72
+ if existing == content:
73
+ msg = f"{path}: no changes"
74
+ else:
75
+ msg = f"{path}: WOULD UPDATE ({nbytes} bytes)"
76
+ else:
77
+ msg = f"{path}: WOULD CREATE ({nbytes} bytes)"
78
+ log.info(msg)
79
+ return WriteResult(path=str(path), written=False, bytes_written=0, message=msg)
80
+
81
+ if backup_dir is not None:
82
+ backup_file(path, backup_dir, log)
83
+
84
+ path.parent.mkdir(parents=True, exist_ok=True)
85
+ path.write_text(content)
86
+ msg = f"Written: {path} ({nbytes} bytes)"
87
+ log.info(msg)
88
+ return WriteResult(path=str(path), written=True, bytes_written=nbytes, message=msg)
@@ -0,0 +1,56 @@
1
+ """Rich-based logging utilities for agentsync."""
2
+
3
+ from __future__ import annotations
4
+
5
+ from datetime import datetime
6
+ from pathlib import Path
7
+
8
+ from rich.console import Console
9
+ from rich.rule import Rule
10
+
11
+
12
+ class SyncLogger:
13
+ """Logger with rich console output and optional file flushing."""
14
+
15
+ def __init__(self, dry_run: bool = False, quiet: bool = False) -> None:
16
+ self.dry_run = dry_run
17
+ self.quiet = quiet
18
+ self._console = Console(quiet=quiet)
19
+ self._buffer: list[str] = []
20
+
21
+ def _record(self, msg: str, level: str = "INFO") -> None:
22
+ prefix = "[DRY-RUN] " if self.dry_run else ""
23
+ timestamp = datetime.now().strftime("%H:%M:%S")
24
+ self._buffer.append(f"[{timestamp}] [{level}] {prefix}{msg}")
25
+
26
+ def info(self, msg: str) -> None:
27
+ self._record(msg, "INFO")
28
+ self._console.print(f" [green]INFO[/green] {msg}")
29
+
30
+ def warn(self, msg: str) -> None:
31
+ self._record(msg, "WARN")
32
+ self._console.print(f" [yellow]WARN[/yellow] {msg}")
33
+
34
+ def error(self, msg: str) -> None:
35
+ self._record(msg, "ERROR")
36
+ self._console.print(f" [red]ERROR[/red] {msg}")
37
+
38
+ def section(self, title: str) -> None:
39
+ self._record(f"=== {title} ===")
40
+ self._console.print(Rule(title))
41
+
42
+ def flush_to_file(self, log_dir: Path) -> None:
43
+ """Write buffered messages to a dated log file."""
44
+ if not self._buffer:
45
+ return
46
+ log_dir.mkdir(parents=True, exist_ok=True)
47
+ log_file = log_dir / f"sync-{datetime.now().strftime('%Y-%m-%d')}.log"
48
+ with open(log_file, "a") as f:
49
+ f.write("\n".join(self._buffer) + "\n")
50
+
51
+
52
+ class SilentLogger(SyncLogger):
53
+ """Suppresses console output; buffer is still preserved for flush_to_file."""
54
+
55
+ def __init__(self, dry_run: bool = False) -> None:
56
+ super().__init__(dry_run=dry_run, quiet=True)
@@ -0,0 +1,91 @@
1
+ """Markdown section parsing and filtering — pure functions, no I/O."""
2
+
3
+ from __future__ import annotations
4
+
5
+ from agentsync.adapters.base import Section
6
+
7
+
8
+ def parse_markdown_sections(content: str) -> list[Section]:
9
+ """Parse markdown *content* into a flat list of :class:`Section` objects.
10
+
11
+ Recognises ``##`` (level 2) and ``###`` (level 3) headers. Content
12
+ before the first header (the preamble) is silently discarded.
13
+ """
14
+ lines = content.split("\n")
15
+ sections: list[Section] = []
16
+ current_header: str | None = None
17
+ current_level: int = 0
18
+ current_lines: list[str] = []
19
+
20
+ for line in lines:
21
+ if line.startswith("### "):
22
+ # Flush previous section
23
+ if current_header is not None:
24
+ sections.append(
25
+ Section(
26
+ header=current_header,
27
+ level=current_level,
28
+ content="\n".join(current_lines),
29
+ )
30
+ )
31
+ current_header = line[4:].strip()
32
+ current_level = 3
33
+ current_lines = [line]
34
+ elif line.startswith("## "):
35
+ if current_header is not None:
36
+ sections.append(
37
+ Section(
38
+ header=current_header,
39
+ level=current_level,
40
+ content="\n".join(current_lines),
41
+ )
42
+ )
43
+ current_header = line[3:].strip()
44
+ current_level = 2
45
+ current_lines = [line]
46
+ elif current_header is not None:
47
+ current_lines.append(line)
48
+
49
+ # Flush last section
50
+ if current_header is not None:
51
+ sections.append(
52
+ Section(
53
+ header=current_header,
54
+ level=current_level,
55
+ content="\n".join(current_lines),
56
+ )
57
+ )
58
+
59
+ return sections
60
+
61
+
62
+ def filter_sections(sections: list[Section], exclude_set: set[str]) -> list[Section]:
63
+ """Filter out sections whose headers appear in *exclude_set*.
64
+
65
+ When a level-2 (``##``) section is excluded, all its level-3 children
66
+ are excluded too. A level-3 section listed explicitly is removed on
67
+ its own without affecting siblings.
68
+ """
69
+ filtered: list[Section] = []
70
+ skip_parent = False
71
+
72
+ for section in sections:
73
+ # Level-2: toggle parent skip flag
74
+ if section.level == 2:
75
+ if section.header in exclude_set:
76
+ skip_parent = True
77
+ continue
78
+ else:
79
+ skip_parent = False
80
+
81
+ # Level-3 under an excluded parent
82
+ if skip_parent and section.level == 3:
83
+ continue
84
+
85
+ # Level-3 excluded individually
86
+ if section.header in exclude_set:
87
+ continue
88
+
89
+ filtered.append(section)
90
+
91
+ return filtered
@@ -0,0 +1,159 @@
1
+ """Rich output helpers for CLI commands."""
2
+
3
+ from __future__ import annotations
4
+
5
+ from pathlib import Path
6
+ from typing import TYPE_CHECKING
7
+
8
+ from rich.console import Console
9
+ from rich.table import Table
10
+
11
+ if TYPE_CHECKING:
12
+ from agentsync.adapters.base import SourceAdapter, TargetAdapter
13
+ from agentsync.config import AgentSyncConfig
14
+ from agentsync.sync import SyncResult
15
+ from agentsync.validate import ValidationReport
16
+
17
+
18
+ def print_sync_summary(result: SyncResult, console: Console | None = None) -> None:
19
+ """Print a coloured summary of sync results."""
20
+ con = console or Console()
21
+
22
+ total_files = 0
23
+ total_errors = 0
24
+
25
+ for tr in result.target_results.values():
26
+ total_files += sum(1 for w in tr.writes if w.written)
27
+ total_errors += len(tr.errors)
28
+
29
+ n_targets = len(result.target_results)
30
+ header = f"Sync complete: {n_targets} target{'s' if n_targets != 1 else ''}"
31
+ header += f", {total_files} file{'s' if total_files != 1 else ''} written"
32
+ if total_errors:
33
+ header += f", [red]{total_errors} error{'s' if total_errors != 1 else ''}[/red]"
34
+ else:
35
+ header += ", 0 errors"
36
+
37
+ con.print()
38
+ con.print(header)
39
+
40
+ for name, tr in result.target_results.items():
41
+ n_written = sum(1 for w in tr.writes if w.written)
42
+ if tr.success:
43
+ mark = "[green]\u2713[/green]"
44
+ detail = f"{n_written} file{'s' if n_written != 1 else ''}"
45
+ else:
46
+ mark = "[red]\u2717[/red]"
47
+ detail = "; ".join(tr.errors) if tr.errors else "failed"
48
+
49
+ con.print(f" {name:<16} {mark} {detail}")
50
+
51
+ if result.dry_run:
52
+ con.print()
53
+ con.print("[yellow]DRY RUN \u2014 no files were written[/yellow]")
54
+
55
+
56
+ def print_validation_report(
57
+ report: ValidationReport,
58
+ verbose: bool = False,
59
+ console: Console | None = None,
60
+ ) -> None:
61
+ """Print a coloured validation report."""
62
+ con = console or Console()
63
+
64
+ passed = sum(1 for r in report.results if r.passed)
65
+ failed = sum(1 for r in report.results if not r.passed)
66
+
67
+ con.print()
68
+
69
+ for r in report.results:
70
+ if r.passed and not verbose:
71
+ continue
72
+ mark = "[green]\u2713[/green]" if r.passed else "[red]\u2717[/red]"
73
+ con.print(f" {mark} {r.name}: {r.message}")
74
+
75
+ con.print()
76
+ parts = []
77
+ if passed:
78
+ parts.append(f"[green]{passed} passed[/green]")
79
+ if failed:
80
+ parts.append(f"[red]{failed} failed[/red]")
81
+ con.print(f"Validation: {', '.join(parts)}")
82
+
83
+
84
+ def print_status(
85
+ config: AgentSyncConfig,
86
+ source: SourceAdapter,
87
+ targets: dict[str, TargetAdapter],
88
+ console: Console | None = None,
89
+ ) -> None:
90
+ """Print current sync status: source info and per-target state."""
91
+ from agentsync.config import resolve_path
92
+
93
+ con = console or Console()
94
+
95
+ # --- Source ---
96
+ con.print()
97
+ con.print(f"[bold]Source:[/bold] {config.source.type}")
98
+
99
+ global_path = resolve_path(config.source.global_config, config.config_dir)
100
+ _print_path_status(con, "Global config", global_path)
101
+
102
+ project_mcp = resolve_path(config.source.project_mcp, config.config_dir)
103
+ server_count = None
104
+ if project_mcp.is_file():
105
+ try:
106
+ servers = source.load_servers()
107
+ server_count = len(servers)
108
+ except Exception: # noqa: BLE001
109
+ server_count = None
110
+ _print_path_status(con, "Project MCP", project_mcp, extra_count=server_count)
111
+
112
+ rules_path = resolve_path(config.source.rules_file, config.config_dir)
113
+ _print_path_status(con, "Rules", rules_path)
114
+
115
+ # --- Targets ---
116
+ con.print()
117
+ con.print("[bold]Targets:[/bold]")
118
+
119
+ table = Table(show_header=True, show_edge=False, pad_edge=False, box=None)
120
+ table.add_column("Name", style="cyan", min_width=14)
121
+ table.add_column("Status", min_width=10)
122
+ table.add_column("Details")
123
+
124
+ for name, target in targets.items():
125
+ try:
126
+ results = target.validate()
127
+ all_pass = all(r.passed for r in results)
128
+ if not results:
129
+ mark = "[yellow]\u26a0[/yellow]"
130
+ detail = "no validation checks"
131
+ elif all_pass:
132
+ mark = "[green]\u2713[/green]"
133
+ detail = f"{len(results)} check{'s' if len(results) != 1 else ''} passed"
134
+ else:
135
+ n_fail = sum(1 for r in results if not r.passed)
136
+ mark = "[red]\u2717[/red]"
137
+ detail = f"{n_fail} check{'s' if n_fail != 1 else ''} failed"
138
+ except Exception as exc: # noqa: BLE001
139
+ mark = "[red]\u2717[/red]"
140
+ detail = str(exc)
141
+
142
+ table.add_row(name, mark, detail)
143
+
144
+ con.print(table)
145
+
146
+
147
+ def _print_path_status(
148
+ con: Console,
149
+ label: str,
150
+ path: Path,
151
+ *,
152
+ extra_count: int | None = None,
153
+ ) -> None:
154
+ """Print a source path with exists/missing indicator."""
155
+ if path.is_file():
156
+ suffix = f" ({extra_count} servers)" if extra_count is not None else ""
157
+ con.print(f" {label + ':':<18} {path} [green](exists{suffix})[/green]")
158
+ else:
159
+ con.print(f" {label + ':':<18} {path} [yellow](missing)[/yellow]")