skillchef 0.2.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (32) hide show
  1. skillchef-0.2.0/.gitignore +7 -0
  2. skillchef-0.2.0/PKG-INFO +14 -0
  3. skillchef-0.2.0/README.md +56 -0
  4. skillchef-0.2.0/pyproject.toml +32 -0
  5. skillchef-0.2.0/src/skillchef/__init__.py +1 -0
  6. skillchef-0.2.0/src/skillchef/cli.py +63 -0
  7. skillchef-0.2.0/src/skillchef/commands/__init__.py +1 -0
  8. skillchef-0.2.0/src/skillchef/commands/common.py +27 -0
  9. skillchef-0.2.0/src/skillchef/commands/cook_cmd.py +37 -0
  10. skillchef-0.2.0/src/skillchef/commands/flavor_cmd.py +32 -0
  11. skillchef-0.2.0/src/skillchef/commands/init_cmd.py +48 -0
  12. skillchef-0.2.0/src/skillchef/commands/list_cmd.py +8 -0
  13. skillchef-0.2.0/src/skillchef/commands/remove_cmd.py +16 -0
  14. skillchef-0.2.0/src/skillchef/commands/sync_cmd.py +115 -0
  15. skillchef-0.2.0/src/skillchef/config.py +50 -0
  16. skillchef-0.2.0/src/skillchef/llm.py +80 -0
  17. skillchef-0.2.0/src/skillchef/merge.py +47 -0
  18. skillchef-0.2.0/src/skillchef/remote.py +106 -0
  19. skillchef-0.2.0/src/skillchef/store.py +138 -0
  20. skillchef-0.2.0/src/skillchef/ui.py +185 -0
  21. skillchef-0.2.0/test/skills/code-reviewer/SKILL.md +23 -0
  22. skillchef-0.2.0/test/skills/code-reviewer/scripts/lint_check.py +9 -0
  23. skillchef-0.2.0/test/skills/hello-chef/SKILL.md +18 -0
  24. skillchef-0.2.0/tests/conftest.py +48 -0
  25. skillchef-0.2.0/tests/test_cli.py +16 -0
  26. skillchef-0.2.0/tests/test_config.py +25 -0
  27. skillchef-0.2.0/tests/test_llm.py +51 -0
  28. skillchef-0.2.0/tests/test_merge.py +46 -0
  29. skillchef-0.2.0/tests/test_remote.py +51 -0
  30. skillchef-0.2.0/tests/test_store.py +86 -0
  31. skillchef-0.2.0/tests/test_sync_cmd.py +108 -0
  32. skillchef-0.2.0/uv.lock +1879 -0
@@ -0,0 +1,7 @@
1
+ __pycache__/
2
+ *.pyc
3
+ *.egg-info/
4
+ dist/
5
+ build/
6
+ .venv/
7
+ .env
@@ -0,0 +1,14 @@
1
+ Metadata-Version: 2.4
2
+ Name: skillchef
3
+ Version: 0.2.0
4
+ Summary: pyenv + git for agent skills. Cook, flavor, and sync skills from any source.
5
+ License-Expression: MIT
6
+ Requires-Python: >=3.11
7
+ Requires-Dist: click>=8.3.1
8
+ Requires-Dist: httpx>=0.28.1
9
+ Requires-Dist: litellm>=1.81.11
10
+ Requires-Dist: questionary>=2.1.0
11
+ Requires-Dist: rich>=14.3.2
12
+ Requires-Dist: tomli-w>=1.2.0
13
+ Provides-Extra: test
14
+ Requires-Dist: pytest>=8.3.0; extra == 'test'
@@ -0,0 +1,56 @@
1
+ # skillchef
2
+
3
+ Cook, flavor & sync agent skills from any source.
4
+
5
+ ## Install
6
+
7
+ ```
8
+ uvx skillchef
9
+ uv tool install skillchef
10
+ ```
11
+
12
+ ## Usage
13
+
14
+ ```
15
+ skillchef init # configure platforms, editor, AI model
16
+ skillchef cook <source> # import a skill (GitHub URL, HTTP URL, local path)
17
+ skillchef sync [skill-name] # pull upstream changes, merge with your flavor
18
+ skillchef flavor [skill-name] # edit local customizations in $EDITOR
19
+ skillchef list # show all managed skills
20
+ skillchef remove <skill-name> # remove a skill and its symlinks
21
+ ```
22
+
23
+ ## How it works
24
+
25
+ Skills are stored in `~/.skillchef/store/<name>/` with three layers:
26
+
27
+ ```
28
+ base/ ← pristine copy from remote
29
+ flavor.md ← your local additions (optional)
30
+ live/ ← merged result (base + flavor), symlinked into platform dirs
31
+ ```
32
+
33
+ `cook` fetches a skill and symlinks it into your configured platform directories (`~/.codex/skills/`, `~/.cursor/skills/`, etc).
34
+
35
+ `sync` checks the remote for changes. If your skill has a flavor, it shows the upstream diff and proposes a semantic merge via LLM (auto-detected from env API keys).
36
+
37
+ `flavor` opens your editor to add local customizations that persist across syncs.
38
+
39
+ ## Remote sources
40
+
41
+ - **GitHub**: `https://github.com/user/repo/tree/main/path/to/skill`
42
+ - **HTTP**: any direct URL to a SKILL.md file
43
+ - **Local**: path to a skill directory or file on disk
44
+
45
+ ## Testing
46
+
47
+ ```
48
+ python -m pip install -e ".[test]"
49
+ pytest -q
50
+ ```
51
+
52
+ The suite lives in `tests/` and focuses on:
53
+ - storage lifecycle (`cook`, `rebuild_live`, `remove`)
54
+ - remote source classification/fetch behavior
55
+ - merge semantics and flavor application
56
+ - key sync command branches (no-flavor update and AI-merge path)
@@ -0,0 +1,32 @@
1
+ [build-system]
2
+ requires = ["hatchling"]
3
+ build-backend = "hatchling.build"
4
+
5
+ [project]
6
+ name = "skillchef"
7
+ version = "0.2.0"
8
+ description = "pyenv + git for agent skills. Cook, flavor, and sync skills from any source."
9
+ requires-python = ">=3.11"
10
+ license = "MIT"
11
+ dependencies = [
12
+ "click>=8.3.1",
13
+ "rich>=14.3.2",
14
+ "httpx>=0.28.1",
15
+ "tomli-w>=1.2.0",
16
+ "litellm>=1.81.11",
17
+ "questionary>=2.1.0",
18
+ ]
19
+
20
+ [project.scripts]
21
+ skillchef = "skillchef.cli:main"
22
+
23
+ [project.optional-dependencies]
24
+ test = [
25
+ "pytest>=8.3.0",
26
+ ]
27
+
28
+ [tool.pytest.ini_options]
29
+ testpaths = ["tests"]
30
+
31
+ [tool.hatch.build.targets.wheel]
32
+ packages = ["src/skillchef"]
@@ -0,0 +1 @@
1
+ __version__ = "0.1.0"
@@ -0,0 +1,63 @@
1
+ from __future__ import annotations
2
+
3
+ import click
4
+
5
+ from skillchef.commands import (
6
+ cook_cmd,
7
+ flavor_cmd,
8
+ init_cmd,
9
+ list_cmd as list_command,
10
+ remove_cmd,
11
+ sync_cmd,
12
+ )
13
+
14
+
15
+ @click.group()
16
+ def main() -> None:
17
+ """skillchef — cook, flavor & sync your agent skills."""
18
+ pass
19
+
20
+
21
+ @main.command()
22
+ def init() -> None:
23
+ """First-time setup: platforms, editor, model."""
24
+ init_cmd.run()
25
+
26
+
27
+ @main.command()
28
+ @click.argument("source")
29
+ def cook(source: str) -> None:
30
+ """Import a skill from a remote source or local path."""
31
+ cook_cmd.run(source)
32
+
33
+
34
+ @main.command()
35
+ @click.argument("skill_name", required=False)
36
+ @click.option("--no-ai", is_flag=True, help="Disable automatic AI merge proposals.")
37
+ def sync(skill_name: str | None, no_ai: bool) -> None:
38
+ """Check remotes for updates and merge."""
39
+ sync_cmd.run(skill_name, no_ai)
40
+
41
+
42
+ @main.command()
43
+ @click.argument("skill_name", required=False)
44
+ def flavor(skill_name: str | None) -> None:
45
+ """Add or edit a local flavor for a skill."""
46
+ flavor_cmd.run(skill_name)
47
+
48
+
49
+ @main.command(name="list")
50
+ def list_cmd() -> None:
51
+ """List all managed skills."""
52
+ list_command.run()
53
+
54
+
55
+ @main.command()
56
+ @click.argument("skill_name")
57
+ def remove(skill_name: str) -> None:
58
+ """Remove a managed skill."""
59
+ remove_cmd.run(skill_name)
60
+
61
+
62
+ if __name__ == "__main__":
63
+ main()
@@ -0,0 +1 @@
1
+ """Namespaced command implementations for the Skillchef CLI."""
@@ -0,0 +1,27 @@
1
+ from __future__ import annotations
2
+
3
+ import shutil
4
+ import subprocess
5
+ from pathlib import Path
6
+ from typing import Any
7
+
8
+ from skillchef import config, ui
9
+
10
+
11
+ def ensure_config() -> dict[str, Any]:
12
+ cfg = config.load()
13
+ if not cfg.get("platforms"):
14
+ ui.warn("No config found. Run [bold]skillchef init[/bold] first.")
15
+ raise SystemExit(1)
16
+ return cfg
17
+
18
+
19
+ def open_editor(path: Path) -> None:
20
+ cfg = config.load()
21
+ ed = config.editor(cfg)
22
+ subprocess.call([ed, str(path)])
23
+
24
+
25
+ def cleanup_fetched(fetched_dir: Path) -> None:
26
+ root = fetched_dir.parent if fetched_dir.name == "skill" else fetched_dir
27
+ shutil.rmtree(root, ignore_errors=True)
@@ -0,0 +1,37 @@
1
+ from __future__ import annotations
2
+
3
+ from skillchef import config, merge, remote, store, ui
4
+
5
+ from .common import cleanup_fetched, ensure_config
6
+
7
+
8
+ def run(source: str) -> None:
9
+ ui.banner()
10
+ cfg = ensure_config()
11
+
12
+ ui.info(f"Fetching from {source}...")
13
+ try:
14
+ fetched_dir, remote_type = remote.fetch(source)
15
+ except Exception as e:
16
+ ui.error(f"Failed to fetch: {e}")
17
+ raise SystemExit(1)
18
+
19
+ skill_md = fetched_dir / "SKILL.md"
20
+ default_name = fetched_dir.name
21
+ if skill_md.exists():
22
+ front, _ = merge.split_frontmatter(skill_md.read_text())
23
+ if "name:" in front:
24
+ for line in front.splitlines():
25
+ if line.strip().startswith("name:"):
26
+ default_name = line.split(":", 1)[1].strip().strip('"').strip("'")
27
+ break
28
+
29
+ name = ui.ask("Skill name", default=default_name)
30
+ platforms = ui.multi_choose("Target platforms", cfg.get("platforms", list(config.PLATFORMS.keys())))
31
+
32
+ store.cook(name, fetched_dir, source, remote_type, platforms)
33
+ cleanup_fetched(fetched_dir)
34
+
35
+ ui.success(f"Cooked [bold]{name}[/bold]!")
36
+ for p in platforms:
37
+ ui.info(f"Symlinked → {config.platform_skill_dir(p) / name}")
@@ -0,0 +1,32 @@
1
+ from __future__ import annotations
2
+
3
+ from skillchef import merge, store, ui
4
+
5
+ from .common import ensure_config, open_editor
6
+
7
+
8
+ def run(skill_name: str | None) -> None:
9
+ ui.banner()
10
+ ensure_config()
11
+
12
+ skills = store.list_skills()
13
+ if not skills:
14
+ ui.info("No skills cooked yet.")
15
+ return
16
+
17
+ if not skill_name:
18
+ names = [s["name"] for s in skills]
19
+ skill_name = ui.choose("Which skill?", names)
20
+
21
+ fp = store.flavor_path(skill_name)
22
+ if not fp.exists():
23
+ fp.write_text("# Add your local flavor below\n\n")
24
+
25
+ old_live = store.live_skill_text(skill_name)
26
+ open_editor(fp)
27
+ store.rebuild_live(skill_name)
28
+ new_live = store.live_skill_text(skill_name)
29
+
30
+ diff_lines = merge.diff_texts(old_live, new_live, "before", "after")
31
+ ui.show_diff(diff_lines)
32
+ ui.success(f"Flavor saved for [bold]{skill_name}[/bold]")
@@ -0,0 +1,48 @@
1
+ from __future__ import annotations
2
+
3
+ import os
4
+
5
+ from skillchef import config, ui
6
+ from skillchef.llm import detect_keys
7
+
8
+
9
+ def run() -> None:
10
+ ui.banner()
11
+ ui.console.print()
12
+
13
+ ui.info("Scanning for agent platforms...\n")
14
+ ui.show_platforms(config.PLATFORMS)
15
+ ui.console.print()
16
+
17
+ detected = detect_keys()
18
+ ui.show_detected_keys(detected)
19
+ ui.console.print()
20
+
21
+ platforms = ui.multi_choose(
22
+ "Which platforms do you use?",
23
+ list(config.PLATFORMS.keys()),
24
+ )
25
+
26
+ editor = ui.ask("Preferred editor", default=os.environ.get("EDITOR", "vim"))
27
+
28
+ default_model = "anthropic/claude-sonnet-4-20250514"
29
+ selected_key_env = ""
30
+ if detected:
31
+ if len(detected) == 1:
32
+ selected_key_env, provider = detected[0]
33
+ else:
34
+ label_to_env = {f"{provider} ({env_var})": env_var for env_var, provider in detected}
35
+ key_choices = list(label_to_env.keys())
36
+ selected_label = ui.choose("Multiple LLM keys found. Which key should AI merge use?", key_choices)
37
+ selected_key_env = label_to_env[selected_label]
38
+ provider = next(p for env_var, p in detected if env_var == selected_key_env)
39
+ ui.info(f"AI merge will use [bold]{selected_key_env}[/bold] ({provider})")
40
+ model = ui.ask("AI model for semantic merge", default=default_model)
41
+
42
+ cfg = {"platforms": platforms, "editor": editor, "model": model, "llm_api_key_env": selected_key_env}
43
+ config.save(cfg)
44
+
45
+ ui.console.print()
46
+ ui.show_config_summary(cfg)
47
+ ui.console.print()
48
+ ui.success(f"Config saved to [bold]{config.CONFIG_PATH}[/bold]")
@@ -0,0 +1,8 @@
1
+ from __future__ import annotations
2
+
3
+ from skillchef import store, ui
4
+
5
+
6
+ def run() -> None:
7
+ ui.banner()
8
+ ui.skill_table(store.list_skills(), has_flavor_fn=store.has_flavor)
@@ -0,0 +1,16 @@
1
+ from __future__ import annotations
2
+
3
+ from skillchef import store, ui
4
+
5
+
6
+ def run(skill_name: str) -> None:
7
+ ui.banner()
8
+ try:
9
+ store.load_meta(skill_name)
10
+ except FileNotFoundError:
11
+ ui.error(f"Skill '{skill_name}' not found.")
12
+ raise SystemExit(1)
13
+
14
+ if ui.confirm(f"Remove [bold]{skill_name}[/bold]?", default=False):
15
+ store.remove(skill_name)
16
+ ui.success(f"Removed {skill_name}")
@@ -0,0 +1,115 @@
1
+ from __future__ import annotations
2
+
3
+ from concurrent.futures import Future, ThreadPoolExecutor
4
+ from typing import Any
5
+
6
+ from skillchef import config, merge, remote, store, ui
7
+ from skillchef.llm import selected_key, semantic_merge
8
+
9
+ from .common import cleanup_fetched, ensure_config, open_editor
10
+
11
+
12
+ def run(skill_name: str | None, no_ai: bool) -> None:
13
+ ui.banner()
14
+ ensure_config()
15
+
16
+ cfg = config.load()
17
+ key = selected_key(cfg.get("llm_api_key_env", ""))
18
+ ai_available = key is not None and not no_ai
19
+ if ai_available and key:
20
+ ui.info(f"Using [bold]{key[0]}[/bold] ({key[1]}) for semantic merge")
21
+
22
+ skills = store.list_skills()
23
+ if not skills:
24
+ ui.info("No skills to sync.")
25
+ return
26
+
27
+ if skill_name:
28
+ skills = [s for s in skills if s["name"] == skill_name]
29
+ if not skills:
30
+ ui.error(f"Skill '{skill_name}' not found.")
31
+ raise SystemExit(1)
32
+
33
+ for meta in skills:
34
+ _sync_one(meta, ai_available=ai_available)
35
+
36
+
37
+ def _sync_one(meta: dict[str, Any], ai_available: bool = False) -> None:
38
+ name = meta["name"]
39
+ ui.info(f"Syncing [bold]{name}[/bold]...")
40
+
41
+ try:
42
+ fetched_dir, _ = remote.fetch(meta["remote_url"])
43
+ except Exception as e:
44
+ ui.warn(f" Could not fetch {name}: {e}")
45
+ return
46
+
47
+ new_hash = store.hash_dir(fetched_dir)
48
+ if new_hash == meta.get("base_sha256"):
49
+ ui.success(f" {name}: up to date")
50
+ cleanup_fetched(fetched_dir)
51
+ return
52
+
53
+ old_base = store.base_skill_text(name)
54
+ new_remote = (fetched_dir / "SKILL.md").read_text() if (fetched_dir / "SKILL.md").exists() else ""
55
+ diff_lines = merge.diff_texts(old_base, new_remote, "base (current)", "remote (new)")
56
+
57
+ ai_future: Future[str] | None = None
58
+ flavor_text = ""
59
+ if store.has_flavor(name) and ai_available:
60
+ flavor_text = store.flavor_path(name).read_text()
61
+ executor = ThreadPoolExecutor(max_workers=1)
62
+ ai_future = executor.submit(semantic_merge, old_base, new_remote, flavor_text)
63
+
64
+ ui.show_diff(diff_lines)
65
+
66
+ if not store.has_flavor(name):
67
+ if ui.confirm("Accept update?"):
68
+ store.update_base(name, fetched_dir)
69
+ store.rebuild_live(name)
70
+ ui.success(f" {name}: updated")
71
+ else:
72
+ ui.info(f" {name}: skipped")
73
+ else:
74
+ if not flavor_text:
75
+ flavor_text = store.flavor_path(name).read_text()
76
+
77
+ ai_result = _resolve_ai_future(ai_future)
78
+ choices = ["accept + re-apply flavor", "keep current", "manual edit"]
79
+ if ai_result:
80
+ choices.insert(0, "accept ai merge")
81
+ ui.info("AI proposed a semantic merge:")
82
+ ai_diff = merge.diff_texts(store.live_skill_text(name), ai_result, "current", "ai proposed")
83
+ ui.show_diff(ai_diff)
84
+
85
+ action = ui.choose("How to handle?", choices)
86
+
87
+ if action == "accept ai merge" and ai_result:
88
+ store.update_base(name, fetched_dir)
89
+ live_md = store.skill_dir(name) / "live" / "SKILL.md"
90
+ live_md.write_text(ai_result)
91
+ ui.success(f" {name}: AI merged")
92
+ elif action == "accept + re-apply flavor":
93
+ store.update_base(name, fetched_dir)
94
+ store.rebuild_live(name)
95
+ ui.success(f" {name}: rebased with flavor")
96
+ elif action == "keep current":
97
+ ui.info(f" {name}: skipped")
98
+ elif action == "manual edit":
99
+ store.update_base(name, fetched_dir)
100
+ store.rebuild_live(name)
101
+ open_editor(store.skill_dir(name) / "live" / "SKILL.md")
102
+ ui.success(f" {name}: manually merged")
103
+
104
+ cleanup_fetched(fetched_dir)
105
+
106
+
107
+ def _resolve_ai_future(future: Future[str] | None) -> str | None:
108
+ if future is None:
109
+ return None
110
+ try:
111
+ with ui.spinner("Waiting for AI merge proposal..."):
112
+ return future.result(timeout=60)
113
+ except Exception as e:
114
+ ui.warn(f" AI merge failed: {e}")
115
+ return None
@@ -0,0 +1,50 @@
1
+ from __future__ import annotations
2
+
3
+ import os
4
+ import tomllib
5
+ from pathlib import Path
6
+ from typing import Any
7
+
8
+ import tomli_w
9
+
10
+ SKILLCHEF_HOME = Path.home() / ".skillchef"
11
+ CONFIG_PATH = SKILLCHEF_HOME / "config.toml"
12
+ STORE_DIR = SKILLCHEF_HOME / "store"
13
+
14
+ PLATFORMS: dict[str, Path] = {
15
+ "codex": Path.home() / ".codex" / "skills",
16
+ "cursor": Path.home() / ".cursor" / "skills",
17
+ "claude-code": Path.home() / ".claude" / "skills",
18
+ }
19
+
20
+ DEFAULT_CONFIG: dict[str, Any] = {
21
+ "platforms": [],
22
+ "editor": "",
23
+ "model": "anthropic/claude-sonnet-4-20250514",
24
+ "llm_api_key_env": "",
25
+ }
26
+
27
+
28
+ def load() -> dict[str, Any]:
29
+ if not CONFIG_PATH.exists():
30
+ return dict(DEFAULT_CONFIG)
31
+ return tomllib.loads(CONFIG_PATH.read_text())
32
+
33
+
34
+ def save(cfg: dict[str, Any]) -> None:
35
+ SKILLCHEF_HOME.mkdir(parents=True, exist_ok=True)
36
+ CONFIG_PATH.write_bytes(tomli_w.dumps(cfg).encode())
37
+
38
+
39
+ def editor(cfg: dict[str, Any] | None = None) -> str:
40
+ cfg = cfg or load()
41
+ return cfg.get("editor") or os.environ.get("EDITOR", "vim")
42
+
43
+
44
+ def platform_skill_dir(platform: str) -> Path:
45
+ return PLATFORMS[platform]
46
+
47
+
48
+ def ensure_store() -> Path:
49
+ STORE_DIR.mkdir(parents=True, exist_ok=True)
50
+ return STORE_DIR
@@ -0,0 +1,80 @@
1
+ from __future__ import annotations
2
+
3
+ import os
4
+
5
+ from litellm import completion
6
+
7
+ from skillchef import config
8
+
9
+ LLM_KEY_MAP = [
10
+ ("ANTHROPIC_API_KEY", "Anthropic"),
11
+ ("OPENAI_API_KEY", "OpenAI"),
12
+ ("GEMINI_API_KEY", "Google Gemini"),
13
+ ("MISTRAL_API_KEY", "Mistral"),
14
+ ("COHERE_API_KEY", "Cohere"),
15
+ ("OLLAMA_API_BASE", "Ollama (local)"),
16
+ ]
17
+
18
+ MERGE_PROMPT = """You are merging an agent skill file. The upstream base has changed.
19
+ The user has a local "flavor" (customization) applied on top of the old base.
20
+
21
+ Your job: produce a single merged SKILL.md that incorporates BOTH the new upstream
22
+ changes AND the user's local flavor. Preserve the intent of both sides.
23
+
24
+ Return ONLY the merged file content, no explanation.
25
+
26
+ === OLD BASE ===
27
+ {old_base}
28
+
29
+ === NEW REMOTE (upstream update) ===
30
+ {new_remote}
31
+
32
+ === USER'S LOCAL FLAVOR ===
33
+ {flavor}
34
+
35
+ === MERGED RESULT ==="""
36
+
37
+
38
+ def detect_keys() -> list[tuple[str, str]]:
39
+ return [(k, v) for k, v in LLM_KEY_MAP if os.environ.get(k)]
40
+
41
+
42
+ def selected_key(preferred_env_var: str | None = None) -> tuple[str, str] | None:
43
+ keys = detect_keys()
44
+ if not keys:
45
+ return None
46
+ if preferred_env_var:
47
+ for env_var, provider in keys:
48
+ if env_var == preferred_env_var:
49
+ return env_var, provider
50
+ return keys[0]
51
+
52
+
53
+ def has_llm() -> bool:
54
+ return len(detect_keys()) > 0
55
+
56
+
57
+ def semantic_merge(old_base: str, new_remote: str, flavor: str, model: str | None = None) -> str:
58
+ cfg = config.load()
59
+ model = model or cfg.get("model", "anthropic/claude-sonnet-4-20250514")
60
+ configured_env = cfg.get("llm_api_key_env", "")
61
+ key = selected_key(configured_env)
62
+
63
+ completion_kwargs: dict[str, str | int | list[dict[str, str]]] = {}
64
+ if key:
65
+ env_var, _provider = key
66
+ value = os.environ.get(env_var, "")
67
+ if value:
68
+ if env_var == "OLLAMA_API_BASE":
69
+ completion_kwargs["api_base"] = value
70
+ else:
71
+ completion_kwargs["api_key"] = value
72
+
73
+ prompt = MERGE_PROMPT.format(old_base=old_base, new_remote=new_remote, flavor=flavor)
74
+ resp = completion(
75
+ model=model,
76
+ messages=[{"role": "user", "content": prompt}],
77
+ temperature=0,
78
+ **completion_kwargs,
79
+ )
80
+ return resp.choices[0].message.content.strip()
@@ -0,0 +1,47 @@
1
+ from __future__ import annotations
2
+
3
+ import difflib
4
+ import re
5
+ from pathlib import Path
6
+
7
+ FRONTMATTER_RE = re.compile(r"^---\s*\n(.*?\n)---\s*\n", re.DOTALL)
8
+ FLAVOR_HEADER = "\n\n## Local Flavor\n\n"
9
+
10
+
11
+ def split_frontmatter(text: str) -> tuple[str, str]:
12
+ m = FRONTMATTER_RE.match(text)
13
+ if m:
14
+ return text[: m.end()], text[m.end() :]
15
+ return "", text
16
+
17
+
18
+ def merge_skill(live_skill_path: Path, flavor_path: Path) -> None:
19
+ base_text = live_skill_path.read_text()
20
+ flavor_text = flavor_path.read_text().strip()
21
+ if not flavor_text:
22
+ return
23
+ front, body = split_frontmatter(base_text)
24
+ body = body.rstrip()
25
+ merged = front + body + FLAVOR_HEADER + flavor_text + "\n"
26
+ live_skill_path.write_text(merged)
27
+
28
+
29
+ def diff_texts(old: str, new: str, label_old: str = "old", label_new: str = "new") -> list[str]:
30
+ return list(difflib.unified_diff(
31
+ old.splitlines(keepends=True),
32
+ new.splitlines(keepends=True),
33
+ fromfile=label_old,
34
+ tofile=label_new,
35
+ ))
36
+
37
+
38
+ def three_way_summary(old_base: str, new_remote: str, flavor: str) -> str:
39
+ lines = []
40
+ base_diff = diff_texts(old_base, new_remote, "base (old)", "remote (new)")
41
+ if base_diff:
42
+ lines.append("=== Upstream changes ===")
43
+ lines.extend(base_diff)
44
+ if flavor.strip():
45
+ lines.append("\n=== Your flavor ===")
46
+ lines.append(flavor)
47
+ return "".join(lines)