mdb-cli 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- mdb/__init__.py +0 -0
- mdb/atomic.py +25 -0
- mdb/data/SKILL.md +95 -0
- mdb/data/__init__.py +0 -0
- mdb/discovery.py +70 -0
- mdb/filelock.py +76 -0
- mdb/formatter.py +101 -0
- mdb/init.py +150 -0
- mdb/mdb.py +1214 -0
- mdb/models.py +81 -0
- mdb/parser.py +609 -0
- mdb/puller.py +212 -0
- mdb/validators.py +46 -0
- mdb_cli-0.1.0.dist-info/METADATA +220 -0
- mdb_cli-0.1.0.dist-info/RECORD +18 -0
- mdb_cli-0.1.0.dist-info/WHEEL +4 -0
- mdb_cli-0.1.0.dist-info/entry_points.txt +2 -0
- mdb_cli-0.1.0.dist-info/licenses/LICENSE +21 -0
mdb/__init__.py
ADDED
|
File without changes
|
mdb/atomic.py
ADDED
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
"""Atomic file write via temp-file-then-rename."""
|
|
2
|
+
|
|
3
|
+
import os
|
|
4
|
+
import tempfile
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
def atomic_write(filepath: str, content: str) -> None:
|
|
8
|
+
"""Write content to filepath atomically.
|
|
9
|
+
|
|
10
|
+
Creates a temporary file in the same directory, writes content,
|
|
11
|
+
then atomically replaces the target via os.replace().
|
|
12
|
+
On failure, cleans up the temp file and re-raises.
|
|
13
|
+
"""
|
|
14
|
+
dir_ = os.path.dirname(filepath) or "."
|
|
15
|
+
fd, tmp = tempfile.mkstemp(dir=dir_, suffix=".tmp")
|
|
16
|
+
try:
|
|
17
|
+
with os.fdopen(fd, "w", encoding="utf-8") as f:
|
|
18
|
+
f.write(content)
|
|
19
|
+
os.replace(tmp, filepath)
|
|
20
|
+
except BaseException:
|
|
21
|
+
try:
|
|
22
|
+
os.unlink(tmp)
|
|
23
|
+
except OSError:
|
|
24
|
+
pass
|
|
25
|
+
raise
|
mdb/data/SKILL.md
ADDED
|
@@ -0,0 +1,95 @@
|
|
|
1
|
+
---
|
|
2
|
+
name: mdb
|
|
3
|
+
description: Use mdb to query, mutate, and sync markdown table data via SQL. Invoke when working with markdown files containing mdb markers, or when the user asks to add, query, update, or report on tabular data in markdown.
|
|
4
|
+
user-invocable: true
|
|
5
|
+
---
|
|
6
|
+
|
|
7
|
+
# mdb Reference
|
|
8
|
+
|
|
9
|
+
`mdb` makes markdown tables data-driven and queryable via SQL, using specialized co-located markers `💾 ...` with some embedded inline SQL queries to define and dynamically view project-level data. Under the hood, datasets are ingested into SQLite backing databases on which SQL queries are actually run.
|
|
10
|
+
|
|
11
|
+
Interpret `$ARGUMENTS` as the user's intent and perform the appropriate mdb operation. If no arguments, show: `mdb push | mdb pull "<query>" | mdb --help`
|
|
12
|
+
|
|
13
|
+
## Marker Syntax
|
|
14
|
+
|
|
15
|
+
Markers are backtick-delimited code spans on their own line:
|
|
16
|
+
|
|
17
|
+
```
|
|
18
|
+
`💾 [scope] <directive> <sql-query>`
|
|
19
|
+
```
|
|
20
|
+
|
|
21
|
+
- **scope** (optional): Named scope resolving to `.mdb/<md5(scope)>.db`. Omit for implicit scope `.mdb/_.db`.
|
|
22
|
+
- **🌀** (feed): Include the table below me as part of the dataset.
|
|
23
|
+
- **💎** (tap): Execute my SQL query on the dataset and render the results as the table below me (table auto-generated if absent).
|
|
24
|
+
|
|
25
|
+
For 🌀 markers, a table **must** be present after the marker -- it defines the column schema (source of truth). For 💎 markers, the table is optional -- if absent, `mdb push` auto-generates one from query results. Column types default to TEXT; annotate with `:type` suffix (e.g. `id:integer`, `salary:real`). Valid types: TEXT, INTEGER, REAL, NUMERIC, BLOB.
|
|
26
|
+
|
|
27
|
+
## Commands
|
|
28
|
+
|
|
29
|
+
### `mdb push [query]`
|
|
30
|
+
|
|
31
|
+
Process all 🌀 markers first (pulls), then all 💎 markers after (pushes). With a DML query, mutates data between pull and push phases.
|
|
32
|
+
|
|
33
|
+
```bash
|
|
34
|
+
mdb push # standard pull-then-push
|
|
35
|
+
mdb push "INSERT INTO t VALUES (1, 'x')" # mutate implicit scope
|
|
36
|
+
mdb push "mydb 🌀 DELETE FROM t WHERE id = 1" # mutate named scope
|
|
37
|
+
mdb push -i "docs/" "UPDATE t SET x = 1" # restrict to directory
|
|
38
|
+
```
|
|
39
|
+
|
|
40
|
+
Push-query rules: only DML (INSERT/UPDATE/DELETE); SELECT and DDL rejected. Multi-statement queries run in a single transaction.
|
|
41
|
+
|
|
42
|
+
### `mdb pull "<query>"`
|
|
43
|
+
|
|
44
|
+
Process all 🌀 markers first (pulls), then outputs SQL query results in CSV format.
|
|
45
|
+
|
|
46
|
+
```bash
|
|
47
|
+
mdb pull "SELECT avg(salary) FROM employees" # implicit scope
|
|
48
|
+
mdb pull "company 💎 SELECT avg(salary) FROM employees" # named scope
|
|
49
|
+
mdb pull -i "docs/" "SELECT * FROM t" # restrict to directory
|
|
50
|
+
```
|
|
51
|
+
|
|
52
|
+
Pull rules: only SELECT permitted; DML rejected.
|
|
53
|
+
|
|
54
|
+
### Common flags
|
|
55
|
+
|
|
56
|
+
`-i "path1/ path2/"` — restrict markdown file discovery to specific directories (default: recurse from CWD).
|
|
57
|
+
|
|
58
|
+
## Key Behaviors
|
|
59
|
+
|
|
60
|
+
- 🌀 markers across all files execute first (pull phase), then 💎 markers (push phase)
|
|
61
|
+
- Cross-file write conflict: two 🌀 markers in **different files** targeting the same table → error. Same file → last wins.
|
|
62
|
+
- Push-query mutations update both 🌀 source tables and 💎 derived tables
|
|
63
|
+
- Database must already exist for 💎 markers (created by 🌀 pull)
|
|
64
|
+
|
|
65
|
+
## Example: Mixed Marker File
|
|
66
|
+
|
|
67
|
+
```markdown
|
|
68
|
+
# Employees
|
|
69
|
+
|
|
70
|
+
`💾 company 🌀 SELECT * FROM employees`
|
|
71
|
+
|
|
72
|
+
| id:integer | name | department | salary:real |
|
|
73
|
+
| ---------- | ----- | ----------- | ----------- |
|
|
74
|
+
| 1 | Alice | Engineering | 95000 |
|
|
75
|
+
| 2 | Bob | Marketing | 78000 |
|
|
76
|
+
|
|
77
|
+
## Engineering Report
|
|
78
|
+
|
|
79
|
+
`💾 company 💎 SELECT name, salary FROM employees WHERE department = 'Engineering'`
|
|
80
|
+
```
|
|
81
|
+
|
|
82
|
+
`mdb push` pulls seed data, then auto-generates the report table from query results.
|
|
83
|
+
|
|
84
|
+
## Error Quick Reference
|
|
85
|
+
|
|
86
|
+
| Error | Fix |
|
|
87
|
+
| ------------------------- | --------------------------------------------------------------------------------- |
|
|
88
|
+
| Database not found | 💎 needs an existing DB — run a 🌀 pull first, or check scope name |
|
|
89
|
+
| Write conflict | Two 🌀 markers in different files write same table — consolidate to one file |
|
|
90
|
+
| No table below 🌀 marker | Add markdown table with column headers after the 🌀 marker (required for pull) |
|
|
91
|
+
| No data sources found | No 🌀 markers match the scope — check scope name or use `-i` |
|
|
92
|
+
| Only SELECT/DML permitted | pull accepts SELECT only; push accepts DML only |
|
|
93
|
+
| Invalid column type | Use TEXT, INTEGER, REAL, NUMERIC, or BLOB |
|
|
94
|
+
| Database is locked | Another mdb process holds the lock — wait and retry, or check for stuck processes |
|
|
95
|
+
| mdb: command not found | Run `pipx install mdb-cli` or equivalent from the repo root |
|
mdb/data/__init__.py
ADDED
|
File without changes
|
mdb/discovery.py
ADDED
|
@@ -0,0 +1,70 @@
|
|
|
1
|
+
"""Path resolution and file discovery for mdb CLI.
|
|
2
|
+
|
|
3
|
+
Pure-function module for resolving CLI glob patterns into a deduplicated,
|
|
4
|
+
sorted list of markdown file paths. This module has no knowledge of markers,
|
|
5
|
+
databases, or processing logic -- it operates purely on filesystem paths.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
import glob
|
|
9
|
+
import os
|
|
10
|
+
import os.path
|
|
11
|
+
import pathlib
|
|
12
|
+
import sys
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
def _has_hidden_segment(path: str) -> bool:
|
|
16
|
+
"""Check if any component of a path is a hidden directory (starts with '.').
|
|
17
|
+
|
|
18
|
+
Args:
|
|
19
|
+
path: File path string (relative or absolute).
|
|
20
|
+
|
|
21
|
+
Returns:
|
|
22
|
+
True if any path component starts with '.' (excluding '.' itself).
|
|
23
|
+
"""
|
|
24
|
+
parts = pathlib.PurePosixPath(path).parts
|
|
25
|
+
return any(p.startswith(".") and p != "." for p in parts)
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
def resolve_paths(patterns: list[str]) -> tuple[list[str], list[str]]:
|
|
29
|
+
"""Resolve a list of glob patterns into deduplicated, sorted markdown file paths.
|
|
30
|
+
|
|
31
|
+
Args:
|
|
32
|
+
patterns: List of glob pattern strings (e.g., ["**"], ["docs/*"]).
|
|
33
|
+
|
|
34
|
+
Returns:
|
|
35
|
+
Tuple of (files, warnings):
|
|
36
|
+
files: Deduplicated, sorted list of resolved absolute .md file paths.
|
|
37
|
+
warnings: List of warning messages (e.g., patterns matching zero files).
|
|
38
|
+
"""
|
|
39
|
+
file_set: set[str] = set()
|
|
40
|
+
warnings: list[str] = []
|
|
41
|
+
|
|
42
|
+
for pattern in patterns:
|
|
43
|
+
expanded = glob.glob(pattern, recursive=True)
|
|
44
|
+
matched = False
|
|
45
|
+
for path in expanded:
|
|
46
|
+
if not os.path.isfile(path):
|
|
47
|
+
continue
|
|
48
|
+
if not _is_markdown_file(path):
|
|
49
|
+
continue
|
|
50
|
+
if _has_hidden_segment(path):
|
|
51
|
+
continue
|
|
52
|
+
file_set.add(os.path.realpath(path))
|
|
53
|
+
matched = True
|
|
54
|
+
if not matched:
|
|
55
|
+
warnings.append(f"Pattern matched no markdown files: {pattern}")
|
|
56
|
+
|
|
57
|
+
sorted_files = sorted(file_set)
|
|
58
|
+
return (sorted_files, warnings)
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
def _is_markdown_file(path: str) -> bool:
|
|
62
|
+
"""Check if a path has a markdown file extension.
|
|
63
|
+
|
|
64
|
+
Args:
|
|
65
|
+
path: File path string.
|
|
66
|
+
|
|
67
|
+
Returns:
|
|
68
|
+
True if the path ends with .md (case-insensitive).
|
|
69
|
+
"""
|
|
70
|
+
return os.path.splitext(path)[1].lower() == ".md"
|
mdb/filelock.py
ADDED
|
@@ -0,0 +1,76 @@
|
|
|
1
|
+
"""Advisory file locking for concurrency safety."""
|
|
2
|
+
|
|
3
|
+
import sys
|
|
4
|
+
import time
|
|
5
|
+
from contextlib import contextmanager
|
|
6
|
+
|
|
7
|
+
LOCK_TIMEOUT = 3 # seconds
|
|
8
|
+
LOCK_RETRY_INTERVAL = 1 # second
|
|
9
|
+
LOCK_ERROR_MSG = "mdb: file locked by another process"
|
|
10
|
+
|
|
11
|
+
if sys.platform == "win32":
|
|
12
|
+
import msvcrt
|
|
13
|
+
|
|
14
|
+
def _lock(fd):
|
|
15
|
+
"""Acquire non-blocking exclusive lock (Windows)."""
|
|
16
|
+
deadline = time.monotonic() + LOCK_TIMEOUT
|
|
17
|
+
while True:
|
|
18
|
+
try:
|
|
19
|
+
msvcrt.locking(fd.fileno(), msvcrt.LK_NBLCK, 1)
|
|
20
|
+
return
|
|
21
|
+
except OSError:
|
|
22
|
+
if time.monotonic() >= deadline:
|
|
23
|
+
raise OSError(LOCK_ERROR_MSG)
|
|
24
|
+
time.sleep(LOCK_RETRY_INTERVAL)
|
|
25
|
+
|
|
26
|
+
def _unlock(fd):
|
|
27
|
+
"""Release exclusive lock (Windows)."""
|
|
28
|
+
try:
|
|
29
|
+
msvcrt.locking(fd.fileno(), msvcrt.LK_UNLCK, 1)
|
|
30
|
+
except OSError:
|
|
31
|
+
pass
|
|
32
|
+
else:
|
|
33
|
+
import fcntl
|
|
34
|
+
|
|
35
|
+
def _lock(fd):
|
|
36
|
+
"""Acquire non-blocking exclusive lock (Unix/macOS)."""
|
|
37
|
+
deadline = time.monotonic() + LOCK_TIMEOUT
|
|
38
|
+
while True:
|
|
39
|
+
try:
|
|
40
|
+
fcntl.flock(fd, fcntl.LOCK_EX | fcntl.LOCK_NB)
|
|
41
|
+
return
|
|
42
|
+
except OSError:
|
|
43
|
+
if time.monotonic() >= deadline:
|
|
44
|
+
raise OSError(LOCK_ERROR_MSG)
|
|
45
|
+
time.sleep(LOCK_RETRY_INTERVAL)
|
|
46
|
+
|
|
47
|
+
def _unlock(fd):
|
|
48
|
+
"""Release exclusive lock (Unix/macOS)."""
|
|
49
|
+
fcntl.flock(fd, fcntl.LOCK_UN)
|
|
50
|
+
|
|
51
|
+
|
|
52
|
+
@contextmanager
|
|
53
|
+
def batch_lock(filepaths):
|
|
54
|
+
"""Context manager: acquire exclusive locks on all filepaths in sorted order.
|
|
55
|
+
|
|
56
|
+
On timeout or error, releases all previously acquired locks.
|
|
57
|
+
"""
|
|
58
|
+
sorted_paths = sorted(filepaths)
|
|
59
|
+
held = [] # list of open file objects
|
|
60
|
+
try:
|
|
61
|
+
for filepath in sorted_paths:
|
|
62
|
+
fd = open(filepath, "r")
|
|
63
|
+
try:
|
|
64
|
+
_lock(fd)
|
|
65
|
+
except BaseException:
|
|
66
|
+
fd.close()
|
|
67
|
+
raise
|
|
68
|
+
held.append(fd)
|
|
69
|
+
yield
|
|
70
|
+
finally:
|
|
71
|
+
for fd in reversed(held):
|
|
72
|
+
try:
|
|
73
|
+
_unlock(fd)
|
|
74
|
+
except OSError:
|
|
75
|
+
pass
|
|
76
|
+
fd.close()
|
mdb/formatter.py
ADDED
|
@@ -0,0 +1,101 @@
|
|
|
1
|
+
"""Markdown table generation from query result data."""
|
|
2
|
+
|
|
3
|
+
|
|
4
|
+
def format_markdown_table(
|
|
5
|
+
columns: list[str],
|
|
6
|
+
rows: list[list[str]],
|
|
7
|
+
compaction: str = "full",
|
|
8
|
+
) -> str:
|
|
9
|
+
"""Generate a formatted markdown table string from columns and rows.
|
|
10
|
+
|
|
11
|
+
Args:
|
|
12
|
+
columns: Column header names.
|
|
13
|
+
rows: Row data (list of cell value lists).
|
|
14
|
+
compaction: Compaction mode - "full" (no padding) or "fit" (aligned).
|
|
15
|
+
|
|
16
|
+
When compaction="fit":
|
|
17
|
+
- Column widths based on max of header and cell widths (minimum 3 chars)
|
|
18
|
+
- Left-aligned, padded with spaces
|
|
19
|
+
- Zero rows produces header + separator only
|
|
20
|
+
- No trailing newline
|
|
21
|
+
|
|
22
|
+
When compaction="full":
|
|
23
|
+
- No column width calculation for alignment purposes
|
|
24
|
+
- Non-empty cells: ``| value |`` (single space before and after content)
|
|
25
|
+
- Empty cells: ``||`` (no spaces, no content)
|
|
26
|
+
- Separator row: ``| --- |`` per column (exactly 3 dashes, space-padded)
|
|
27
|
+
- No trailing newline
|
|
28
|
+
"""
|
|
29
|
+
if compaction == "fit":
|
|
30
|
+
return _format_fit(columns, rows)
|
|
31
|
+
# Default to "full" for any value (including unknown values)
|
|
32
|
+
return _format_full(columns, rows)
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
def _format_fit(columns: list[str], rows: list[list[str]]) -> str:
|
|
36
|
+
"""Format a markdown table with aligned columns (fit compaction)."""
|
|
37
|
+
# Calculate column widths
|
|
38
|
+
widths = [max(3, len(col)) for col in columns]
|
|
39
|
+
for row in rows:
|
|
40
|
+
for i, cell in enumerate(row):
|
|
41
|
+
if i < len(widths):
|
|
42
|
+
widths[i] = max(widths[i], len(cell))
|
|
43
|
+
|
|
44
|
+
# Build header row
|
|
45
|
+
header_cells = [col.ljust(widths[i]) for i, col in enumerate(columns)]
|
|
46
|
+
header = "| " + " | ".join(header_cells) + " |"
|
|
47
|
+
|
|
48
|
+
# Build separator row
|
|
49
|
+
sep_cells = ["-" * widths[i] for i in range(len(columns))]
|
|
50
|
+
separator = "| " + " | ".join(sep_cells) + " |"
|
|
51
|
+
|
|
52
|
+
# Build data rows
|
|
53
|
+
lines = [header, separator]
|
|
54
|
+
for row in rows:
|
|
55
|
+
data_cells = []
|
|
56
|
+
for i in range(len(columns)):
|
|
57
|
+
cell = row[i] if i < len(row) else ""
|
|
58
|
+
data_cells.append(cell.ljust(widths[i]))
|
|
59
|
+
lines.append("| " + " | ".join(data_cells) + " |")
|
|
60
|
+
|
|
61
|
+
return "\n".join(lines)
|
|
62
|
+
|
|
63
|
+
|
|
64
|
+
def _format_full(columns: list[str], rows: list[list[str]]) -> str:
|
|
65
|
+
"""Format a markdown table with no alignment padding (full compaction)."""
|
|
66
|
+
# Build header row -- non-empty cells get single space padding
|
|
67
|
+
header_parts = []
|
|
68
|
+
for col in columns:
|
|
69
|
+
if col:
|
|
70
|
+
header_parts.append(f"| {col} ")
|
|
71
|
+
else:
|
|
72
|
+
header_parts.append("||")
|
|
73
|
+
header = "".join(header_parts) + "|" if header_parts else "|"
|
|
74
|
+
# Fix: if last part was a non-empty cell, it already ends with space
|
|
75
|
+
# We just need trailing |
|
|
76
|
+
# Actually, let's build it more carefully
|
|
77
|
+
header_cells = []
|
|
78
|
+
for col in columns:
|
|
79
|
+
if col:
|
|
80
|
+
header_cells.append(f" {col} ")
|
|
81
|
+
else:
|
|
82
|
+
header_cells.append("")
|
|
83
|
+
header = "|" + "|".join(header_cells) + "|"
|
|
84
|
+
|
|
85
|
+
# Build separator row -- exactly 3 dashes per column, space-padded
|
|
86
|
+
sep_cells = [" --- "] * len(columns)
|
|
87
|
+
separator = "|" + "|".join(sep_cells) + "|"
|
|
88
|
+
|
|
89
|
+
# Build data rows
|
|
90
|
+
lines = [header, separator]
|
|
91
|
+
for row in rows:
|
|
92
|
+
data_cells = []
|
|
93
|
+
for i in range(len(columns)):
|
|
94
|
+
cell = row[i] if i < len(row) else ""
|
|
95
|
+
if cell:
|
|
96
|
+
data_cells.append(f" {cell} ")
|
|
97
|
+
else:
|
|
98
|
+
data_cells.append("")
|
|
99
|
+
lines.append("|" + "|".join(data_cells) + "|")
|
|
100
|
+
|
|
101
|
+
return "\n".join(lines)
|
mdb/init.py
ADDED
|
@@ -0,0 +1,150 @@
|
|
|
1
|
+
"""Initialize the mdb skill in the current project.
|
|
2
|
+
|
|
3
|
+
Copies the bundled SKILL.md file into <dir>/mdb/SKILL.md relative
|
|
4
|
+
to the current working directory, where <dir> defaults to .mdb/skills.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
import importlib.resources
|
|
8
|
+
import os
|
|
9
|
+
import sys
|
|
10
|
+
from dataclasses import dataclass
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
@dataclass
|
|
14
|
+
class InitResult:
|
|
15
|
+
"""Outcome of an init operation."""
|
|
16
|
+
success: bool
|
|
17
|
+
target_path: str
|
|
18
|
+
action: str # "created", "updated", "up_to_date", "skipped", "error"
|
|
19
|
+
error: str | None = None
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
# ---------------------------------------------------------------------------
|
|
23
|
+
# Helper functions (T004, T005)
|
|
24
|
+
# ---------------------------------------------------------------------------
|
|
25
|
+
|
|
26
|
+
def _load_bundled_skill() -> str | None:
|
|
27
|
+
"""Load the bundled SKILL.md content from the package data directory.
|
|
28
|
+
|
|
29
|
+
Returns the content as a string, or None if the file cannot be loaded.
|
|
30
|
+
"""
|
|
31
|
+
try:
|
|
32
|
+
return (
|
|
33
|
+
importlib.resources.files("mdb.data")
|
|
34
|
+
.joinpath("SKILL.md")
|
|
35
|
+
.read_text(encoding="utf-8")
|
|
36
|
+
)
|
|
37
|
+
except Exception:
|
|
38
|
+
return None
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
def _prompt_overwrite(rel_path: str) -> bool:
|
|
42
|
+
"""Prompt the user for confirmation to overwrite an existing file.
|
|
43
|
+
|
|
44
|
+
Returns True if the user confirms (y/yes, case-insensitive),
|
|
45
|
+
False otherwise (including empty input, n, or EOFError).
|
|
46
|
+
"""
|
|
47
|
+
print(
|
|
48
|
+
f"{rel_path} already exists and differs "
|
|
49
|
+
"from the bundled version."
|
|
50
|
+
)
|
|
51
|
+
try:
|
|
52
|
+
response = input("Overwrite? [y/N]: ")
|
|
53
|
+
except EOFError:
|
|
54
|
+
return False
|
|
55
|
+
return response.strip().lower() in ("y", "yes")
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
# ---------------------------------------------------------------------------
|
|
59
|
+
# Main orchestration (T006)
|
|
60
|
+
# ---------------------------------------------------------------------------
|
|
61
|
+
|
|
62
|
+
def init_skill(target_dir: str = ".mdb/skills", force: bool = False) -> InitResult:
|
|
63
|
+
"""Install the bundled mdb skill file into the current project.
|
|
64
|
+
|
|
65
|
+
Args:
|
|
66
|
+
target_dir: Parent directory for the skill file. The tool always
|
|
67
|
+
appends mdb/SKILL.md to this directory.
|
|
68
|
+
force: If True, overwrite existing file without prompting.
|
|
69
|
+
|
|
70
|
+
Returns:
|
|
71
|
+
InitResult with success status and action taken.
|
|
72
|
+
"""
|
|
73
|
+
# Step 1: Load bundled skill file
|
|
74
|
+
bundled_content = _load_bundled_skill()
|
|
75
|
+
if bundled_content is None:
|
|
76
|
+
error = ("Could not load bundled skill file: "
|
|
77
|
+
"package installation may be corrupted")
|
|
78
|
+
print(f"Error: {error}", file=sys.stderr)
|
|
79
|
+
return InitResult(success=False, target_path="", action="error",
|
|
80
|
+
error=error)
|
|
81
|
+
|
|
82
|
+
# Step 2: Determine target path
|
|
83
|
+
target_path = os.path.join(
|
|
84
|
+
os.getcwd(), target_dir, "mdb", "SKILL.md"
|
|
85
|
+
)
|
|
86
|
+
display_path = os.path.join(target_dir, "mdb", "SKILL.md")
|
|
87
|
+
target_parent = os.path.dirname(target_path)
|
|
88
|
+
|
|
89
|
+
# Step 3: Check existing file
|
|
90
|
+
if os.path.exists(target_path):
|
|
91
|
+
if not os.path.isfile(target_path):
|
|
92
|
+
error = f"{display_path} exists but is not a file"
|
|
93
|
+
print(f"Error: {error}", file=sys.stderr)
|
|
94
|
+
return InitResult(success=False, target_path=target_path,
|
|
95
|
+
action="error", error=error)
|
|
96
|
+
|
|
97
|
+
try:
|
|
98
|
+
with open(target_path, "r", encoding="utf-8") as f:
|
|
99
|
+
existing_content = f.read()
|
|
100
|
+
except OSError as e:
|
|
101
|
+
error = f"Cannot read {display_path}: {e}"
|
|
102
|
+
print(f"Error: {error}", file=sys.stderr)
|
|
103
|
+
return InitResult(success=False, target_path=target_path,
|
|
104
|
+
action="error", error=error)
|
|
105
|
+
|
|
106
|
+
if existing_content == bundled_content:
|
|
107
|
+
print(f"mdb skill is already up to date at "
|
|
108
|
+
f"{display_path}")
|
|
109
|
+
return InitResult(success=True, target_path=target_path,
|
|
110
|
+
action="up_to_date")
|
|
111
|
+
|
|
112
|
+
# Content differs
|
|
113
|
+
if not force:
|
|
114
|
+
if not _prompt_overwrite(display_path):
|
|
115
|
+
print("Skipped (no changes made)")
|
|
116
|
+
return InitResult(success=True, target_path=target_path,
|
|
117
|
+
action="skipped")
|
|
118
|
+
|
|
119
|
+
is_update = True
|
|
120
|
+
else:
|
|
121
|
+
is_update = False
|
|
122
|
+
|
|
123
|
+
# Step 4: Create directory structure
|
|
124
|
+
try:
|
|
125
|
+
os.makedirs(target_parent, exist_ok=True)
|
|
126
|
+
except OSError as e:
|
|
127
|
+
error = f"Cannot create directory {os.path.dirname(display_path)}/: {e}"
|
|
128
|
+
print(f"Error: {error}", file=sys.stderr)
|
|
129
|
+
return InitResult(success=False, target_path=target_path,
|
|
130
|
+
action="error", error=error)
|
|
131
|
+
|
|
132
|
+
# Step 5: Write skill file
|
|
133
|
+
try:
|
|
134
|
+
with open(target_path, "w", encoding="utf-8") as f:
|
|
135
|
+
f.write(bundled_content)
|
|
136
|
+
except OSError as e:
|
|
137
|
+
error = f"Cannot write to {display_path}: {e}"
|
|
138
|
+
print(f"Error: {error}", file=sys.stderr)
|
|
139
|
+
return InitResult(success=False, target_path=target_path,
|
|
140
|
+
action="error", error=error)
|
|
141
|
+
|
|
142
|
+
# Step 6: Print status and return
|
|
143
|
+
if is_update:
|
|
144
|
+
print(f"Updated mdb skill at {display_path}")
|
|
145
|
+
return InitResult(success=True, target_path=target_path,
|
|
146
|
+
action="updated")
|
|
147
|
+
else:
|
|
148
|
+
print(f"Installed mdb skill to {display_path}")
|
|
149
|
+
return InitResult(success=True, target_path=target_path,
|
|
150
|
+
action="created")
|