contree-mcp 0.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (46) hide show
  1. contree_mcp/__init__.py +0 -0
  2. contree_mcp/__main__.py +25 -0
  3. contree_mcp/app.py +240 -0
  4. contree_mcp/arguments.py +35 -0
  5. contree_mcp/auth/__init__.py +2 -0
  6. contree_mcp/auth/registry.py +236 -0
  7. contree_mcp/backend_types.py +301 -0
  8. contree_mcp/cache.py +208 -0
  9. contree_mcp/client.py +711 -0
  10. contree_mcp/context.py +53 -0
  11. contree_mcp/docs.py +1203 -0
  12. contree_mcp/file_cache.py +381 -0
  13. contree_mcp/prompts.py +238 -0
  14. contree_mcp/py.typed +0 -0
  15. contree_mcp/resources/__init__.py +17 -0
  16. contree_mcp/resources/guide.py +715 -0
  17. contree_mcp/resources/image_lineage.py +46 -0
  18. contree_mcp/resources/image_ls.py +32 -0
  19. contree_mcp/resources/import_operation.py +52 -0
  20. contree_mcp/resources/instance_operation.py +52 -0
  21. contree_mcp/resources/read_file.py +33 -0
  22. contree_mcp/resources/static.py +12 -0
  23. contree_mcp/server.py +77 -0
  24. contree_mcp/tools/__init__.py +39 -0
  25. contree_mcp/tools/cancel_operation.py +36 -0
  26. contree_mcp/tools/download.py +128 -0
  27. contree_mcp/tools/get_guide.py +54 -0
  28. contree_mcp/tools/get_image.py +30 -0
  29. contree_mcp/tools/get_operation.py +26 -0
  30. contree_mcp/tools/import_image.py +99 -0
  31. contree_mcp/tools/list_files.py +80 -0
  32. contree_mcp/tools/list_images.py +50 -0
  33. contree_mcp/tools/list_operations.py +46 -0
  34. contree_mcp/tools/read_file.py +47 -0
  35. contree_mcp/tools/registry_auth.py +71 -0
  36. contree_mcp/tools/registry_token_obtain.py +80 -0
  37. contree_mcp/tools/rsync.py +46 -0
  38. contree_mcp/tools/run.py +97 -0
  39. contree_mcp/tools/set_tag.py +31 -0
  40. contree_mcp/tools/upload.py +50 -0
  41. contree_mcp/tools/wait_operations.py +79 -0
  42. contree_mcp-0.1.0.dist-info/METADATA +450 -0
  43. contree_mcp-0.1.0.dist-info/RECORD +46 -0
  44. contree_mcp-0.1.0.dist-info/WHEEL +4 -0
  45. contree_mcp-0.1.0.dist-info/entry_points.txt +2 -0
  46. contree_mcp-0.1.0.dist-info/licenses/LICENSE +176 -0
@@ -0,0 +1,381 @@
1
+ from __future__ import annotations
2
+
3
+ import asyncio
4
+ import re
5
+ import uuid
6
+ from collections.abc import Iterable
7
+ from dataclasses import dataclass, field
8
+ from datetime import datetime, timedelta, timezone
9
+ from pathlib import Path
10
+
11
+ import aiosqlite
12
+
13
+ from contree_mcp.client import ContreeClient
14
+
15
+
16
+ @dataclass(frozen=True)
17
+ class FileState:
18
+ path: Path
19
+ size: int
20
+ mtime_ns: int
21
+ ino: int
22
+ mode: int
23
+ uuid: str | None = field(default=None, compare=False)
24
+ sha256: str | None = field(default=None, compare=False)
25
+
26
+ @classmethod
27
+ def from_path(cls, path: Path) -> FileState:
28
+ stat = path.stat()
29
+ return cls(path=path, size=stat.st_size, mtime_ns=stat.st_mtime_ns, ino=stat.st_ino, mode=stat.st_mode)
30
+
31
+ @classmethod
32
+ def from_row(cls, row: aiosqlite.Row) -> FileState:
33
+ return cls(
34
+ path=Path(row["path"]),
35
+ size=row["size"],
36
+ mtime_ns=row["mtime"],
37
+ ino=row["ino"],
38
+ mode=row["mode"],
39
+ uuid=row["uuid"],
40
+ sha256=row["sha256"],
41
+ )
42
+
43
+
44
+ @dataclass(frozen=True)
45
+ class DirectoryState:
46
+ id: int
47
+ name: str | None
48
+ destination: str | None = None
49
+ files: tuple[FileState, ...] = ()
50
+
51
+ @classmethod
52
+ def from_row(cls, row: aiosqlite.Row, files: Iterable[FileState] = ()) -> DirectoryState:
53
+ # sqlite3.Row doesn't have .get() in Python 3.10, so access destination safely
54
+ # Note: .keys() is required for sqlite3.Row - it doesn't support `in` directly
55
+ destination = row["destination"] if "destination" in row.keys() else None # noqa: SIM118
56
+ return cls(id=row["id"], name=row["name"], destination=destination, files=tuple(files))
57
+
58
+
59
+ @dataclass(frozen=True)
60
+ class DirectoryStateFile:
61
+ """A file within a directory state - for run.py compatibility."""
62
+
63
+ file_uuid: str
64
+ target_path: str
65
+ target_mode: int
66
+
67
+
68
+ class FileCache:
69
+ DEFAULT_PATH = Path.home() / ".cache" / "contree" / "filesync.db"
70
+
71
+ SCHEMA = """
72
+ CREATE TABLE IF NOT EXISTS files (
73
+ id INTEGER PRIMARY KEY,
74
+ path TEXT UNIQUE NOT NULL,
75
+ symlink_to TEXT,
76
+ size INTEGER NOT NULL,
77
+ mtime INTEGER NOT NULL,
78
+ ino INTEGER NOT NULL,
79
+ mode INTEGER NOT NULL,
80
+ sha256 TEXT NOT NULL,
81
+ uuid TEXT NOT NULL,
82
+ created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL
83
+ );
84
+ CREATE INDEX IF NOT EXISTS idx_files_sha256 ON files(sha256);
85
+
86
+ CREATE TABLE IF NOT EXISTS directory_state (
87
+ id INTEGER PRIMARY KEY,
88
+ uuid TEXT NOT NULL,
89
+ name TEXT,
90
+ destination TEXT NOT NULL,
91
+ created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL
92
+ );
93
+
94
+ CREATE UNIQUE INDEX IF NOT EXISTS idx_directory_state_uuid ON directory_state(uuid);
95
+
96
+ CREATE TABLE IF NOT EXISTS directory_state_file (
97
+ id INTEGER PRIMARY KEY,
98
+ state_id INTEGER NOT NULL REFERENCES directory_state(id) ON DELETE CASCADE,
99
+ uuid TEXT NOT NULL,
100
+ target_path TEXT NOT NULL,
101
+ target_mode INTEGER NOT NULL,
102
+ UNIQUE(state_id, target_path)
103
+ );
104
+ """
105
+
106
+ UPLAOD_CONCURRENCY = 10
107
+
108
+ def __init__(self, db_path: Path | None = None, retention_days: int = 120) -> None:
109
+ self.db_path = db_path or self.DEFAULT_PATH
110
+ self.db_path.parent.mkdir(parents=True, exist_ok=True)
111
+ self.retention_days = retention_days
112
+ self.__conn: aiosqlite.Connection | None = None
113
+ self.__lock = asyncio.Lock()
114
+ self.__upload_semaphore = asyncio.Semaphore(self.UPLAOD_CONCURRENCY)
115
+
116
+ @property
117
+ def conn(self) -> aiosqlite.Connection:
118
+ if self.__conn is None:
119
+ raise RuntimeError("FileCache is not opened")
120
+ return self.__conn
121
+
122
+ async def open(self) -> None:
123
+ async with self.__lock:
124
+ if self.__conn is not None:
125
+ raise RuntimeError(f"{self.__class__.__name__} already opened")
126
+ conn = await aiosqlite.connect(str(self.db_path))
127
+ conn.row_factory = aiosqlite.Row
128
+ await conn.execute("PRAGMA journal_mode=WAL")
129
+ await conn.execute("PRAGMA foreign_keys=ON")
130
+ await conn.executescript(self.SCHEMA)
131
+ await conn.commit()
132
+ self.__conn = conn
133
+
134
+ async def close(self) -> None:
135
+ async with self.__lock:
136
+ if not self.__conn:
137
+ return
138
+ await self.conn.close()
139
+ self.__conn = None
140
+
141
+ async def __aenter__(self) -> FileCache:
142
+ await self.open()
143
+ return self
144
+
145
+ async def __aexit__(self, *args: object) -> None:
146
+ await self.close()
147
+
148
+ @staticmethod
149
+ def traverse_directory_files(root: Path, excludes: Iterable[str]) -> set[FileState]:
150
+ """
151
+ Traverse directory and return set of file paths, excluding patterns.
152
+ Patterns are strings and can contain * and ? for matching.
153
+ """
154
+ result = []
155
+ patterns = []
156
+ for pattern in excludes:
157
+ pattern = pattern.replace(".", r"\.").replace("*", ".*").replace("?", ".")
158
+ patterns.append(re.compile(pattern, re.IGNORECASE))
159
+
160
+ for path in root.rglob("*"):
161
+ relative_path = path.relative_to(root)
162
+ if any(p.match(str(relative_path)) for p in patterns):
163
+ continue
164
+ if path.is_file():
165
+ result.append(FileState.from_path(path))
166
+ return set(result)
167
+
168
+ async def get_synced_directory_files(self, directory_state: int) -> set[FileState]:
169
+ async with self.conn.execute(
170
+ """
171
+ SELECT f.* FROM directory_state ds
172
+ JOIN directory_state_file dsf ON ds.id = dsf.state_id
173
+ JOIN files f ON dsf.uuid = f.uuid
174
+ WHERE ds.id = ?
175
+ """,
176
+ (directory_state,),
177
+ ) as cursor:
178
+ rows = await cursor.fetchall()
179
+ if not rows:
180
+ return set()
181
+ return {FileState.from_row(row) for row in rows}
182
+
183
+ async def _upload_file(self, client: ContreeClient, file_state: FileState) -> FileState:
184
+ async with self.__upload_semaphore:
185
+ output = await client.upload_file(file_state.path.open("rb"))
186
+ path_str = str(file_state.path)
187
+ await self.conn.execute(
188
+ """
189
+ INSERT INTO files (path, size, mtime, ino, mode, sha256, uuid) VALUES (?, ?, ?, ?, ?, ?, ?)
190
+ ON CONFLICT (path) DO UPDATE SET
191
+ size = excluded.size,
192
+ mtime = excluded.mtime,
193
+ ino = excluded.ino,
194
+ mode = excluded.mode,
195
+ sha256 = excluded.sha256,
196
+ uuid = excluded.uuid
197
+ """,
198
+ (
199
+ path_str,
200
+ file_state.size,
201
+ file_state.mtime_ns,
202
+ file_state.ino,
203
+ file_state.mode,
204
+ output.sha256,
205
+ output.uuid,
206
+ ),
207
+ )
208
+ await self.conn.commit()
209
+ # Query by unique path instead of lastrowid, which is unreliable with ON CONFLICT
210
+ async with self.conn.execute("""SELECT * FROM files WHERE path = ?""", (path_str,)) as cursor:
211
+ row = await cursor.fetchone()
212
+ if row is None:
213
+ raise RuntimeError("Failed to retrieve uploaded file from database")
214
+ return FileState.from_row(row)
215
+
216
+ async def _update_synced_directory(
217
+ self,
218
+ client: ContreeClient,
219
+ directory_state: int,
220
+ local_files: set[FileState],
221
+ synced_files: set[FileState],
222
+ root: Path,
223
+ destination: str,
224
+ ) -> int:
225
+ async with self.__lock:
226
+ to_upload = local_files - synced_files # New or modified local files
227
+ tasks = []
228
+ for file_state in to_upload:
229
+ tasks.append(self._upload_file(client, file_state))
230
+ uploaded_files = await asyncio.gather(*tasks)
231
+ # Get unchanged files from synced_files (which have uuid populated).
232
+ # Cannot use set.intersection() as it may return elements from local_files
233
+ # (which have uuid=None) depending on set sizes.
234
+ non_changed_files = [f for f in synced_files if f in local_files]
235
+
236
+ await self.conn.execute("""DELETE FROM directory_state_file WHERE state_id = ?""", (directory_state,))
237
+ for file_state in list(uploaded_files) + list(non_changed_files):
238
+ relative_path = file_state.path.relative_to(root)
239
+ target_path = f"{destination}/{relative_path}"
240
+ await self.conn.execute(
241
+ """
242
+ INSERT INTO directory_state_file (state_id, uuid, target_path, target_mode) VALUES (?, ?, ?, ?)
243
+ """,
244
+ (directory_state, file_state.uuid, target_path, file_state.mode),
245
+ )
246
+ await self.conn.commit()
247
+ return directory_state
248
+
249
+ async def _sync_new_directory(
250
+ self,
251
+ client: ContreeClient,
252
+ local_files: set[FileState],
253
+ path_uuid: str,
254
+ root: Path,
255
+ destination: str,
256
+ name: str | None = None,
257
+ ) -> int:
258
+ async with self.__lock:
259
+ cursor = await self.conn.execute(
260
+ """INSERT INTO directory_state (uuid, name, destination) VALUES (?, ?, ?)""",
261
+ (path_uuid, name, destination),
262
+ )
263
+ directory_state_id = cursor.lastrowid
264
+ if directory_state_id is None:
265
+ raise RuntimeError("Failed to get lastrowid after insert")
266
+ tasks = []
267
+ for file_state in local_files:
268
+ tasks.append(self._upload_file(client, file_state))
269
+
270
+ uploaded_files = await asyncio.gather(*tasks)
271
+
272
+ for file_state in uploaded_files:
273
+ relative_path = file_state.path.relative_to(root)
274
+ target_path = f"{destination}/{relative_path}"
275
+ await self.conn.execute(
276
+ """
277
+ INSERT INTO directory_state_file (state_id, uuid, target_path, target_mode) VALUES (?, ?, ?, ?)
278
+ """,
279
+ (directory_state_id, file_state.uuid, target_path, file_state.mode),
280
+ )
281
+ await self.conn.commit()
282
+ return directory_state_id
283
+
284
+ async def sync_directory(
285
+ self,
286
+ client: ContreeClient,
287
+ path: Path,
288
+ destination: str,
289
+ excludes: Iterable[str] = (),
290
+ name: str | None = None,
291
+ ) -> int:
292
+ """
293
+ Sync directory files, excluding patterns, and return directory_state_id.
294
+ If the directory paths was not changed since last sync, return last cached state.
295
+
296
+ Args:
297
+ client: Contree API client
298
+ path: Local directory path to sync
299
+ destination: Container destination path (e.g., "/app")
300
+ excludes: Patterns to exclude
301
+ name: Optional human-readable label
302
+
303
+ Returns:
304
+ Directory state ID (int)
305
+ """
306
+ path = path.resolve()
307
+ destination = destination.rstrip("/")
308
+ excludes_list = frozenset(excludes)
309
+ path_url = f"file://{path.as_posix()}?dest={destination}&" + "&".join(sorted(excludes_list))
310
+ path_uuid = str(uuid.uuid5(uuid.NAMESPACE_URL, path_url))
311
+ local_files = self.traverse_directory_files(path, excludes_list)
312
+
313
+ directory_state: int | None = None
314
+ async with self.conn.execute("""SELECT * FROM directory_state WHERE uuid = ?""", (path_uuid,)) as cursor:
315
+ row = await cursor.fetchone()
316
+ directory_state = None if row is None else row["id"]
317
+
318
+ if directory_state is not None:
319
+ synced_files = await self.get_synced_directory_files(directory_state)
320
+ if local_files == synced_files:
321
+ return int(directory_state)
322
+
323
+ return await self._update_synced_directory(
324
+ client, directory_state, local_files, synced_files, path, destination
325
+ )
326
+ else:
327
+ return await self._sync_new_directory(client, local_files, path_uuid, path, destination, name)
328
+
329
+ async def get_directory_state(self, ds_id: int) -> DirectoryState | None:
330
+ """Get directory state metadata.
331
+
332
+ Args:
333
+ ds_id: Directory state ID
334
+
335
+ Returns:
336
+ DirectoryState if found, None otherwise
337
+ """
338
+ async with self.conn.execute(
339
+ """SELECT id, name, destination FROM directory_state WHERE id = ?""",
340
+ (ds_id,),
341
+ ) as cursor:
342
+ row = await cursor.fetchone()
343
+ if row is None:
344
+ return None
345
+ return DirectoryState.from_row(row)
346
+
347
+ async def get_directory_state_files(self, ds_id: int) -> list[DirectoryStateFile]:
348
+ """Get files in a directory state.
349
+
350
+ Args:
351
+ ds_id: Directory state ID
352
+
353
+ Returns:
354
+ List of files in the directory state
355
+ """
356
+ async with self.conn.execute(
357
+ """
358
+ SELECT uuid, target_path, target_mode
359
+ FROM directory_state_file
360
+ WHERE state_id = ?
361
+ """,
362
+ (ds_id,),
363
+ ) as cursor:
364
+ rows = await cursor.fetchall()
365
+ return [
366
+ DirectoryStateFile(
367
+ file_uuid=row["uuid"],
368
+ target_path=row["target_path"],
369
+ target_mode=row["target_mode"],
370
+ )
371
+ for row in rows
372
+ ]
373
+
374
+ async def retain(self) -> None:
375
+ """Delete records older than retention_days. Call explicitly at app startup."""
376
+ if self.retention_days <= 0:
377
+ return
378
+ cutoff = (datetime.now(timezone.utc) - timedelta(days=self.retention_days)).isoformat()
379
+ await self.conn.execute("DELETE FROM files WHERE created_at < ?", (cutoff,))
380
+ await self.conn.execute("DELETE FROM directory_state WHERE created_at < ?", (cutoff,))
381
+ await self.conn.commit()
contree_mcp/prompts.py ADDED
@@ -0,0 +1,238 @@
1
+ """Contree MCP prompts for common workflows."""
2
+
3
+ from textwrap import dedent
4
+
5
+
6
+ def _prompt(text: str) -> str:
7
+ return dedent(text).strip()
8
+
9
+
10
+ def run_python(code: str) -> str:
11
+ """Run Python code in an isolated container."""
12
+ return _prompt(
13
+ f"""
14
+ Run this Python code in a container.
15
+
16
+ Avoid `import_image` unless no suitable base exists
17
+ (it's the most expensive operation).
18
+
19
+ 1. Look for Python-ready images:
20
+ `list_images(tag_prefix="common/python")`
21
+ `list_images(tag_prefix="python")`
22
+ 2. If none, look for Ubuntu:
23
+ `list_images(tag_prefix="ubuntu")`
24
+ 3. If Ubuntu exists, install Python once (disposable=false),
25
+ then tag it. Example sequence:
26
+ `run("apt-get update", image=..., disposable=false)`
27
+ `run("apt-get install -y python3 python3-pip", image=..., disposable=false)`
28
+ `set_tag(..., tag="common/python/ubuntu:22.04")`
29
+ 4. Only if no suitable base exists, `import_image`:
30
+ `import_image(registry_url="docker://docker.io/library/ubuntu:22.04", tag="ubuntu:22.04")`
31
+ Then do step 3.
32
+ 5. `run` the code with the resulting image (UUID or `tag:...`):
33
+
34
+ ```python
35
+ {code}
36
+ ```
37
+ """
38
+ )
39
+
40
+
41
+ def run_shell(command: str, image: str = "ubuntu:noble") -> str:
42
+ """Run a shell command in an isolated container."""
43
+ return _prompt(
44
+ f"""
45
+ Run this command in a container.
46
+
47
+ 1. Prefer existing images:
48
+ `list_images(tag_prefix="{image.split(":")[0]}")`
49
+ 2. If a suitable image exists, use it by UUID or `tag:{image}`.
50
+ 3. Only if nothing suitable exists, `import_image`:
51
+ `import_image(registry_url="docker://docker.io/library/{image}", tag="{image}")`
52
+ 4. Use `run` to execute:
53
+
54
+ ```bash
55
+ {command}
56
+ ```
57
+ """
58
+ )
59
+
60
+
61
+ def sync_and_run(source: str, command: str, image: str = "alpine:latest") -> str:
62
+ """Sync local files to container and run a command."""
63
+ return _prompt(
64
+ f"""
65
+ Sync files and run a command.
66
+
67
+ 1. `rsync` `{source}` to `/app` (absolute path required).
68
+ Exclude `__pycache__`, `.git`, `node_modules`, `.venv`.
69
+ 2. Prefer a Python-ready image (`tag:{image}`)
70
+ or a tagged Ubuntu+Python image.
71
+ 3. If only Ubuntu exists, install Python once and tag it.
72
+ 4. Only import a base image if none exist.
73
+ 5. `run` with the chosen image, `directory_state_id` from rsync,
74
+ and `cwd="/app"`:
75
+
76
+ ```bash
77
+ {command}
78
+ ```
79
+ """
80
+ )
81
+
82
+
83
+ def install_packages(packages: str, image: str = "ubuntu:noble") -> str:
84
+ """Install packages and create a reusable image."""
85
+ return _prompt(
86
+ f"""
87
+ Install packages and keep the image.
88
+
89
+ 1. Prefer existing images; avoid `import_image` if possible.
90
+ 2. If you only have Ubuntu, install Python once and tag it.
91
+ 3. Use base image `{image}` (or your Ubuntu+Python tag).
92
+ 4. `run` `pip install {packages}` with `disposable=false`.
93
+ 5. Use `result_image` directly, or `set_tag` for reuse.
94
+
95
+ Tagging rules (explicit):
96
+ - Format: `{{scope}}/{{purpose}}/{{base}}` where base includes its tag.
97
+ Example: `common/python/ubuntu:noble`
98
+ - Scope: `common` for reusable deps, or a project name for
99
+ project-specific images.
100
+ - Purpose: describe what you added (e.g., `python-ml`, `web-deps`).
101
+ """
102
+ )
103
+
104
+
105
+ def parallel_tasks(tasks: str, image: str = "ubuntu:noble") -> str:
106
+ """Run multiple tasks in parallel."""
107
+ return _prompt(
108
+ f"""
109
+ Run these tasks in parallel (one per line):
110
+ {tasks}
111
+
112
+ 1. Ensure image `{image}` exists (list_images -> import_image as last resort).
113
+ 2. Call `run` for each task with `wait=false`.
114
+ 3. Collect `operation_id` values and call `wait_operations`.
115
+ """
116
+ )
117
+
118
+
119
+ def build_project(
120
+ source: str,
121
+ install_cmd: str = "pip install -e .",
122
+ test_cmd: str = "pytest",
123
+ ) -> str:
124
+ """Build a project: install dependencies and run tests."""
125
+ return _prompt(
126
+ f"""
127
+ Build and test the project.
128
+
129
+ 1. `rsync` `{source}` to `/app` (absolute path required).
130
+ 2. Prefer existing Python-ready images.
131
+ If only Ubuntu exists, install Python once and tag it.
132
+ 3. Only import a base image if none exist.
133
+ 4. `run` `{install_cmd}` with `disposable=false`, `cwd="/app"`.
134
+ 5. `run` `{test_cmd}` on the deps image with `cwd="/app"`.
135
+ """
136
+ )
137
+
138
+
139
+ def debug_failure(operation_id: str) -> str:
140
+ """Diagnose a failed command and suggest fixes."""
141
+ return _prompt(
142
+ f"""
143
+ Debug the failed operation `{operation_id}`.
144
+
145
+ 1. `get_operation` and check `exit_code`
146
+ (state can be SUCCESS even if exit_code != 0).
147
+ 2. Read `stderr` and `stdout`; note `timed_out` if present.
148
+ 3. Common fixes:
149
+ missing image -> `list_images`/`import_image`
150
+ bad tag -> use `tag:...`
151
+ missing `directory_state_id` -> re-run `rsync`
152
+ timeout -> increase `timeout` or use `wait=false` + poll
153
+ """
154
+ )
155
+
156
+
157
+ def inspect_image(image: str) -> str:
158
+ """Explore the contents of a container image."""
159
+ return _prompt(
160
+ f"""
161
+ Inspect the container image `{image}`.
162
+
163
+ 1. Prefer no-VM tools:
164
+ `list_files(image=...)` and `read_file(image=...)`.
165
+ 2. If you need commands, use `run` with `disposable=true`
166
+ (e.g., `cat /etc/os-release`, `which python`).
167
+ """
168
+ )
169
+
170
+
171
+ def multi_stage_build(
172
+ source: str,
173
+ install_cmd: str = "pip install -e .",
174
+ build_cmd: str = "python -m build",
175
+ test_cmd: str = "pytest",
176
+ ) -> str:
177
+ """Multi-stage build with rollback points."""
178
+ return _prompt(
179
+ f"""
180
+ Execute a multi-stage build with checkpoints.
181
+
182
+ 1. `rsync` `{source}` to `/app` (absolute path required).
183
+ 2. Prefer existing Python-ready images.
184
+ If only Ubuntu exists, install Python once and tag it.
185
+ 3. Only import a base image if none exist.
186
+ 4. `run` `{install_cmd}` with `disposable=false`, `cwd="/app"`.
187
+ 5. `run` `{build_cmd}` with `disposable=false`, `cwd="/app"`.
188
+ 6. `run` `{test_cmd}` on the build image with `cwd="/app"`.
189
+ 7. Keep each `result_image` UUID as a rollback point.
190
+ """
191
+ )
192
+
193
+
194
+ def prepare_environment(
195
+ task: str,
196
+ base: str = "python:3.11-slim",
197
+ project: str | None = None,
198
+ packages: str | None = None,
199
+ ) -> str:
200
+ """Prepare a container environment for a task, checking for existing images first."""
201
+ scope = project if project else "common"
202
+ purpose = "custom-env"
203
+ if packages:
204
+ # Derive purpose from first package
205
+ first_pkg = packages.split()[0].split("==")[0].split("[")[0]
206
+ purpose = f"{first_pkg}-env"
207
+
208
+ tag = f"{scope}/{purpose}/{base}"
209
+
210
+ packages_info = f"\n Packages to install: `{packages}`" if packages else ""
211
+
212
+ return _prompt(
213
+ f"""
214
+ Prepare an environment for: {task}
215
+
216
+ 1. Search existing images:
217
+ `list_images(tag_prefix="{scope}/")`
218
+ 2. Prefer reusing any existing base (Python-ready or Ubuntu).
219
+ 3. If only Ubuntu exists, install Python once and tag it.
220
+ 4. Only if no suitable base exists, `import_image`:
221
+ `import_image(registry_url="docker://docker.io/library/{base}")`
222
+ 5. `run` `pip install {packages if packages else "<required packages>"}` with `disposable=false`.
223
+ 6. `set_tag` the `result_image` as `{tag}`.
224
+ 7. Run the task with `run(image="tag:{tag}")`.
225
+
226
+ Task details:
227
+ - Task: {task}
228
+ - Base image: `{base}`{packages_info}
229
+ - Scope: `{scope}` ({"project-specific" if project else "common/reusable"})
230
+ - Suggested tag: `{tag}`
231
+
232
+ Tagging rules (explicit):
233
+ - Format: `{{scope}}/{{purpose}}/{{base}}` where base includes its tag.
234
+ - Scope: `common` for reusable deps, or a project name for
235
+ project-specific images.
236
+ - Purpose: describe what you added (e.g., `python-ml`, `web-deps`).
237
+ """
238
+ )
contree_mcp/py.typed ADDED
File without changes
@@ -0,0 +1,17 @@
1
+ from .guide import SECTIONS
2
+ from .image_lineage import image_lineage
3
+ from .image_ls import image_ls
4
+ from .import_operation import import_operation
5
+ from .instance_operation import instance_operation
6
+ from .read_file import read_file
7
+ from .static import StaticResource
8
+
9
+ __all__ = [
10
+ "SECTIONS",
11
+ "StaticResource",
12
+ "read_file",
13
+ "image_lineage",
14
+ "image_ls",
15
+ "import_operation",
16
+ "instance_operation",
17
+ ]