framework-m-studio 0.2.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,318 @@
1
+ """Documentation Generator for Framework M.
2
+
3
+ Generates API documentation from DocTypes and Controllers.
4
+ Supports markdown output and MkDocs integration.
5
+ """
6
+
7
+ from __future__ import annotations
8
+
9
+ import json
10
+ from pathlib import Path
11
+ from typing import Any
12
+ from urllib.request import Request, urlopen
13
+
14
+
15
+ def format_field_table(fields: list[dict[str, Any]]) -> str:
16
+ """Format fields as a markdown table.
17
+
18
+ Args:
19
+ fields: List of field dictionaries.
20
+
21
+ Returns:
22
+ Markdown table string.
23
+ """
24
+ if not fields:
25
+ return "_No fields defined._\n"
26
+
27
+ lines = [
28
+ "| Field | Type | Required | Description | Validators |",
29
+ "|-------|------|----------|-------------|------------|",
30
+ ]
31
+
32
+ for field in fields:
33
+ name = field.get("name", "")
34
+ field_type = field.get("type", "str")
35
+ required = "✓" if field.get("required", True) else ""
36
+ description = field.get("description", "") or "-"
37
+
38
+ # Format validators
39
+ validators = field.get("validators", {})
40
+ if validators:
41
+ validator_strs = []
42
+ if validators.get("min_value") is not None:
43
+ validator_strs.append(f"min: {validators['min_value']}")
44
+ if validators.get("max_value") is not None:
45
+ validator_strs.append(f"max: {validators['max_value']}")
46
+ if validators.get("min_length") is not None:
47
+ validator_strs.append(f"minLen: {validators['min_length']}")
48
+ if validators.get("max_length") is not None:
49
+ validator_strs.append(f"maxLen: {validators['max_length']}")
50
+ if validators.get("pattern"):
51
+ validator_strs.append(f"pattern: `{validators['pattern']}`")
52
+ validators_str = ", ".join(validator_strs) if validator_strs else "-"
53
+ else:
54
+ validators_str = "-"
55
+
56
+ lines.append(
57
+ f"| {name} | {field_type} | {required} | {description} | {validators_str} |"
58
+ )
59
+
60
+ return "\n".join(lines) + "\n"
61
+
62
+
63
+ def format_meta_section(meta: dict[str, Any]) -> str:
64
+ """Format Meta configuration as markdown.
65
+
66
+ Args:
67
+ meta: Meta configuration dictionary.
68
+
69
+ Returns:
70
+ Markdown string.
71
+ """
72
+ if not meta:
73
+ return ""
74
+
75
+ lines = ["## Configuration", "", "| Setting | Value |", "|---------|-------|"]
76
+
77
+ settings = [
78
+ ("Table Name", meta.get("tablename")),
79
+ ("Naming Pattern", meta.get("name_pattern")),
80
+ ("Submittable", meta.get("is_submittable")),
81
+ ("Track Changes", meta.get("track_changes")),
82
+ ]
83
+
84
+ for setting, value in settings:
85
+ if value is not None:
86
+ lines.append(f"| {setting} | `{value}` |")
87
+
88
+ if len(lines) > 4: # Has at least one setting
89
+ lines.append("")
90
+ return "\n".join(lines)
91
+ return ""
92
+
93
+
94
+ def generate_doctype_markdown(doctype_info: dict[str, Any]) -> str:
95
+ """Generate markdown documentation for a DocType.
96
+
97
+ Args:
98
+ doctype_info: DocType information dictionary.
99
+
100
+ Returns:
101
+ Markdown string.
102
+ """
103
+ name = doctype_info.get("name", "Unknown")
104
+ docstring = doctype_info.get("docstring", "")
105
+ fields = doctype_info.get("fields", [])
106
+ meta = doctype_info.get("meta", {})
107
+
108
+ lines = [
109
+ f"# {name}",
110
+ "",
111
+ ]
112
+
113
+ if docstring:
114
+ lines.extend([docstring, ""])
115
+
116
+ # Fields section
117
+ lines.extend(
118
+ [
119
+ "## Fields",
120
+ "",
121
+ format_field_table(fields),
122
+ ]
123
+ )
124
+
125
+ # Meta configuration section
126
+ meta_section = format_meta_section(meta)
127
+ if meta_section:
128
+ lines.append(meta_section)
129
+
130
+ # Controller info placeholder
131
+ lines.extend(
132
+ [
133
+ "## Controller",
134
+ "",
135
+ "Controller hooks are implemented in `*_controller.py` files.",
136
+ "Available lifecycle hooks:",
137
+ "",
138
+ "- `validate()` - Called before save, raise exceptions for validation errors",
139
+ "- `before_insert()` - Called before inserting a new document",
140
+ "- `after_insert()` - Called after successfully inserting",
141
+ "- `before_save()` - Called before saving (insert or update)",
142
+ "- `after_save()` - Called after saving",
143
+ "- `before_delete()` - Called before deleting",
144
+ "- `after_delete()` - Called after deleting",
145
+ "",
146
+ ]
147
+ )
148
+
149
+ return "\n".join(lines)
150
+
151
+
152
+ def generate_index(doctypes: list[str]) -> str:
153
+ """Generate index.md with links to all DocTypes.
154
+
155
+ Args:
156
+ doctypes: List of DocType names.
157
+
158
+ Returns:
159
+ Index markdown string.
160
+ """
161
+ lines = [
162
+ "# API Reference",
163
+ "",
164
+ "## DocTypes",
165
+ "",
166
+ ]
167
+
168
+ for name in sorted(doctypes):
169
+ filename = name.lower() + ".md"
170
+ lines.append(f"- [{name}]({filename})")
171
+
172
+ lines.append("")
173
+ return "\n".join(lines)
174
+
175
+
176
+ def generate_api_reference(
177
+ doctypes: list[dict[str, Any]],
178
+ output_dir: Path,
179
+ ) -> None:
180
+ """Generate API reference documentation for all DocTypes.
181
+
182
+ Args:
183
+ doctypes: List of DocType info dictionaries.
184
+ output_dir: Output directory for markdown files.
185
+ """
186
+ output_dir.mkdir(parents=True, exist_ok=True)
187
+
188
+ doctype_names = []
189
+
190
+ for doctype in doctypes:
191
+ name = doctype.get("name", "Unknown")
192
+ doctype_names.append(name)
193
+
194
+ markdown = generate_doctype_markdown(doctype)
195
+ filename = name.lower() + ".md"
196
+ (output_dir / filename).write_text(markdown)
197
+
198
+ # Generate index
199
+ index_md = generate_index(doctype_names)
200
+ (output_dir / "index.md").write_text(index_md)
201
+
202
+
203
+ def export_openapi_json(
204
+ openapi_url: str,
205
+ output_file: Path,
206
+ *,
207
+ timeout: int = 30,
208
+ ) -> None:
209
+ """Export OpenAPI JSON from a running app.
210
+
211
+ Args:
212
+ openapi_url: URL to fetch OpenAPI schema from.
213
+ output_file: Output file path for JSON.
214
+ timeout: Request timeout in seconds.
215
+ """
216
+ request = Request(openapi_url)
217
+ request.add_header("Accept", "application/json")
218
+
219
+ with urlopen(request, timeout=timeout) as response:
220
+ schema = json.loads(response.read().decode("utf-8"))
221
+
222
+ output_file.parent.mkdir(parents=True, exist_ok=True)
223
+ output_file.write_text(json.dumps(schema, indent=2))
224
+
225
+
226
+ def run_docs_generate(
227
+ output: str = "./docs/api",
228
+ project_root: str | None = None,
229
+ openapi_url: str | None = None,
230
+ build_site: bool = False,
231
+ ) -> None:
232
+ """Run the documentation generator.
233
+
234
+ Args:
235
+ output: Output directory for documentation.
236
+ project_root: Project root directory (for DocType discovery).
237
+ openapi_url: Optional OpenAPI URL to export.
238
+ build_site: If True, run mkdocs build if available.
239
+ """
240
+ from framework_m_studio.discovery import doctype_to_dict, scan_doctypes
241
+
242
+ output_dir = Path(output)
243
+ output_dir.mkdir(parents=True, exist_ok=True)
244
+
245
+ # Determine project root
246
+ root = Path(project_root) if project_root else Path.cwd()
247
+
248
+ # Scan for DocTypes
249
+ doctypes = scan_doctypes(root)
250
+ doctype_infos = [doctype_to_dict(dt) for dt in doctypes]
251
+
252
+ # Generate API reference
253
+ generate_api_reference(doctype_infos, output_dir)
254
+
255
+ print(f"📚 Generated documentation for {len(doctypes)} DocTypes")
256
+ print(f" Output: {output_dir}")
257
+
258
+ # Export OpenAPI if URL provided
259
+ if openapi_url:
260
+ openapi_file = output_dir / "openapi.json"
261
+ try:
262
+ export_openapi_json(openapi_url, openapi_file)
263
+ print(f" OpenAPI: {openapi_file}")
264
+ except Exception as e:
265
+ print(f" ⚠️ Failed to export OpenAPI: {e}")
266
+
267
+ # Run mkdocs build if requested
268
+ if build_site:
269
+ run_mkdocs_build(root)
270
+
271
+
272
+ def run_mkdocs_build(project_root: Path) -> bool:
273
+ """Run mkdocs build if mkdocs is installed.
274
+
275
+ Args:
276
+ project_root: Project root directory.
277
+
278
+ Returns:
279
+ True if build succeeded, False otherwise.
280
+ """
281
+ import shutil
282
+ import subprocess
283
+
284
+ # Check if mkdocs is available
285
+ mkdocs_path = shutil.which("mkdocs")
286
+ if not mkdocs_path:
287
+ print(" [i] mkdocs not found, skipping site build")
288
+ print(" Install with: pip install mkdocs-material")
289
+ return False
290
+
291
+ # Check if mkdocs.yml exists
292
+ mkdocs_config = project_root / "mkdocs.yml"
293
+ if not mkdocs_config.exists():
294
+ print(" [i] No mkdocs.yml found, skipping site build")
295
+ return False
296
+
297
+ # Run mkdocs build
298
+ print(" 🔨 Building documentation site...")
299
+ try:
300
+ result = subprocess.run(
301
+ [mkdocs_path, "build"],
302
+ cwd=project_root,
303
+ capture_output=True,
304
+ text=True,
305
+ timeout=120,
306
+ )
307
+ if result.returncode == 0:
308
+ print(" ✅ Site built successfully")
309
+ return True
310
+ else:
311
+ print(f" ❌ mkdocs build failed: {result.stderr}")
312
+ return False
313
+ except subprocess.TimeoutExpired:
314
+ print(" ❌ mkdocs build timed out")
315
+ return False
316
+ except Exception as e:
317
+ print(f" ❌ mkdocs build error: {e}")
318
+ return False
@@ -0,0 +1 @@
1
+ # Git module for Studio Cloud Mode
@@ -0,0 +1,309 @@
1
+ """Git Adapter Implementation.
2
+
3
+ Uses the `git` CLI via asyncio subprocess for all Git operations.
4
+ No external Python dependencies (gitpython, dulwich) required.
5
+ """
6
+
7
+ from __future__ import annotations
8
+
9
+ import asyncio
10
+ from pathlib import Path
11
+ from urllib.parse import urlparse, urlunparse
12
+
13
+ from .protocol import (
14
+ CommitResult,
15
+ GitAdapterProtocol,
16
+ GitAuthError,
17
+ GitConflictError,
18
+ GitError,
19
+ GitNetworkError,
20
+ GitStatus,
21
+ )
22
+
23
+
24
+ class GitAdapter:
25
+ """Git adapter using the git CLI.
26
+
27
+ Implements GitAdapterProtocol using asyncio subprocess to call git commands.
28
+ This approach has no Python dependencies and works with any git version.
29
+ """
30
+
31
+ def __init__(self, git_binary: str = "git"):
32
+ """Initialize GitAdapter.
33
+
34
+ Args:
35
+ git_binary: Path to git binary (default: "git" from PATH).
36
+ """
37
+ self.git_binary = git_binary
38
+
39
+ async def _run_git(
40
+ self,
41
+ args: list[str],
42
+ cwd: Path | None = None,
43
+ *,
44
+ check: bool = True,
45
+ env: dict[str, str] | None = None,
46
+ ) -> tuple[str, str]:
47
+ """Run a git command asynchronously.
48
+
49
+ Args:
50
+ args: Git command arguments (without 'git' prefix).
51
+ cwd: Working directory for the command.
52
+ check: If True, raise GitError on non-zero exit.
53
+ env: Additional environment variables.
54
+
55
+ Returns:
56
+ Tuple of (stdout, stderr).
57
+
58
+ Raises:
59
+ GitError: If command fails and check=True.
60
+ """
61
+ import os
62
+
63
+ full_env = os.environ.copy()
64
+ if env:
65
+ full_env.update(env)
66
+
67
+ # Disable interactive prompts
68
+ full_env["GIT_TERMINAL_PROMPT"] = "0"
69
+
70
+ proc = await asyncio.create_subprocess_exec(
71
+ self.git_binary,
72
+ *args,
73
+ cwd=cwd,
74
+ stdout=asyncio.subprocess.PIPE,
75
+ stderr=asyncio.subprocess.PIPE,
76
+ env=full_env,
77
+ )
78
+
79
+ stdout, stderr = await proc.communicate()
80
+ stdout_str = stdout.decode("utf-8", errors="replace").strip()
81
+ stderr_str = stderr.decode("utf-8", errors="replace").strip()
82
+
83
+ if check and proc.returncode != 0:
84
+ error_msg = stderr_str or stdout_str or "Unknown git error"
85
+ raise self._classify_error(error_msg, proc.returncode or 0)
86
+
87
+ return stdout_str, stderr_str
88
+
89
+ def _classify_error(self, message: str, returncode: int) -> GitError:
90
+ """Classify a git error into a specific exception type."""
91
+ lower_msg = message.lower()
92
+
93
+ if "authentication" in lower_msg or "permission denied" in lower_msg:
94
+ return GitAuthError(message, returncode)
95
+ if "conflict" in lower_msg or "merge conflict" in lower_msg:
96
+ return GitConflictError(message, returncode)
97
+ if (
98
+ "could not resolve host" in lower_msg
99
+ or "connection refused" in lower_msg
100
+ or "network" in lower_msg
101
+ ):
102
+ return GitNetworkError(message, returncode)
103
+
104
+ return GitError(message, returncode)
105
+
106
+ def _embed_token_in_url(self, url: str, token: str) -> str:
107
+ """Embed auth token in HTTPS URL.
108
+
109
+ Converts: https://github.com/user/repo.git
110
+ To: https://token@github.com/user/repo.git
111
+ """
112
+ parsed = urlparse(url)
113
+ if parsed.scheme not in ("http", "https"):
114
+ return url # SSH URLs don't use token embedding
115
+
116
+ # Replace or add username with token
117
+ netloc = f"{token}@{parsed.hostname}"
118
+ if parsed.port:
119
+ netloc += f":{parsed.port}"
120
+
121
+ return urlunparse(parsed._replace(netloc=netloc))
122
+
123
+ async def clone(
124
+ self,
125
+ repo_url: str,
126
+ target_dir: Path,
127
+ *,
128
+ auth_token: str | None = None,
129
+ branch: str | None = None,
130
+ ) -> None:
131
+ """Clone a repository to the target directory."""
132
+ url = repo_url
133
+ if auth_token:
134
+ url = self._embed_token_in_url(repo_url, auth_token)
135
+
136
+ args = ["clone", "--depth", "1"] # Shallow clone for speed
137
+ if branch:
138
+ args.extend(["--branch", branch])
139
+ args.extend([url, str(target_dir)])
140
+
141
+ await self._run_git(args)
142
+
143
+ async def commit(
144
+ self,
145
+ workspace: Path,
146
+ message: str,
147
+ *,
148
+ author: str | None = None,
149
+ ) -> CommitResult:
150
+ """Stage all changes and create a commit."""
151
+ # Stage all changes
152
+ await self._run_git(["add", "-A"], cwd=workspace)
153
+
154
+ # Check if there's anything to commit
155
+ status_out, _ = await self._run_git(
156
+ ["status", "--porcelain"], cwd=workspace, check=False
157
+ )
158
+ if not status_out:
159
+ raise GitError("No changes to commit", 0)
160
+
161
+ # Count files
162
+ files_changed = len(status_out.strip().split("\n"))
163
+
164
+ # Build commit command
165
+ args = ["commit", "-m", message]
166
+ if author:
167
+ args.extend(["--author", author])
168
+
169
+ await self._run_git(args, cwd=workspace)
170
+
171
+ # Get commit SHA
172
+ sha, _ = await self._run_git(["rev-parse", "HEAD"], cwd=workspace)
173
+
174
+ return CommitResult(sha=sha, message=message, files_changed=files_changed)
175
+
176
+ async def push(
177
+ self,
178
+ workspace: Path,
179
+ branch: str | None = None,
180
+ *,
181
+ force: bool = False,
182
+ ) -> None:
183
+ """Push commits to remote."""
184
+ args = ["push"]
185
+ if force:
186
+ args.append("--force-with-lease")
187
+ if branch:
188
+ args.extend(["origin", branch])
189
+
190
+ await self._run_git(args, cwd=workspace)
191
+
192
+ async def pull(
193
+ self,
194
+ workspace: Path,
195
+ *,
196
+ rebase: bool = True,
197
+ ) -> None:
198
+ """Pull latest changes from remote."""
199
+ args = ["pull"]
200
+ if rebase:
201
+ args.append("--rebase")
202
+
203
+ await self._run_git(args, cwd=workspace)
204
+
205
+ async def create_branch(
206
+ self,
207
+ workspace: Path,
208
+ name: str,
209
+ *,
210
+ checkout: bool = True,
211
+ ) -> None:
212
+ """Create a new branch."""
213
+ if checkout:
214
+ await self._run_git(["checkout", "-b", name], cwd=workspace)
215
+ else:
216
+ await self._run_git(["branch", name], cwd=workspace)
217
+
218
+ async def checkout(
219
+ self,
220
+ workspace: Path,
221
+ ref: str,
222
+ ) -> None:
223
+ """Checkout a branch or commit."""
224
+ await self._run_git(["checkout", ref], cwd=workspace)
225
+
226
+ async def get_status(
227
+ self,
228
+ workspace: Path,
229
+ ) -> GitStatus:
230
+ """Get the current status of the workspace."""
231
+ # Get current branch
232
+ branch = await self.get_current_branch(workspace)
233
+
234
+ # Get status
235
+ status_out, _ = await self._run_git(
236
+ ["status", "--porcelain"], cwd=workspace, check=False
237
+ )
238
+
239
+ modified: list[str] = []
240
+ staged: list[str] = []
241
+ untracked: list[str] = []
242
+
243
+ for line in status_out.split("\n"):
244
+ if not line:
245
+ continue
246
+ status_code = line[:2]
247
+ filepath = line[3:]
248
+
249
+ if status_code[0] == "?":
250
+ untracked.append(filepath)
251
+ elif status_code[0] != " ":
252
+ staged.append(filepath)
253
+
254
+ if status_code[1] == "M":
255
+ modified.append(filepath)
256
+
257
+ is_clean = not (modified or staged or untracked)
258
+
259
+ # Get ahead/behind counts
260
+ ahead, behind = await self._get_ahead_behind(workspace, branch)
261
+
262
+ return GitStatus(
263
+ branch=branch,
264
+ is_clean=is_clean,
265
+ modified_files=modified,
266
+ staged_files=staged,
267
+ untracked_files=untracked,
268
+ ahead=ahead,
269
+ behind=behind,
270
+ )
271
+
272
+ async def get_current_branch(
273
+ self,
274
+ workspace: Path,
275
+ ) -> str:
276
+ """Get the name of the current branch."""
277
+ branch, _ = await self._run_git(
278
+ ["rev-parse", "--abbrev-ref", "HEAD"], cwd=workspace
279
+ )
280
+ return branch
281
+
282
+ async def _get_ahead_behind(self, workspace: Path, branch: str) -> tuple[int, int]:
283
+ """Get number of commits ahead/behind remote."""
284
+ try:
285
+ out, _ = await self._run_git(
286
+ ["rev-list", "--left-right", "--count", f"origin/{branch}...HEAD"],
287
+ cwd=workspace,
288
+ check=False,
289
+ )
290
+ if out:
291
+ parts = out.split()
292
+ if len(parts) == 2:
293
+ behind = int(parts[0])
294
+ ahead = int(parts[1])
295
+ return ahead, behind
296
+ except (ValueError, GitError):
297
+ pass
298
+ return 0, 0
299
+
300
+ async def fetch(
301
+ self,
302
+ workspace: Path,
303
+ ) -> None:
304
+ """Fetch updates from remote without merging."""
305
+ await self._run_git(["fetch", "--quiet"], cwd=workspace)
306
+
307
+
308
+ # Type assertion to verify protocol compliance
309
+ _: GitAdapterProtocol = GitAdapter()