framework-m-studio 0.2.2__py3-none-any.whl → 0.3.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- framework_m_studio/__init__.py +6 -1
- framework_m_studio/app.py +56 -11
- framework_m_studio/checklist_parser.py +421 -0
- framework_m_studio/cli/__init__.py +752 -0
- framework_m_studio/cli/build.py +421 -0
- framework_m_studio/cli/dev.py +214 -0
- framework_m_studio/cli/new.py +754 -0
- framework_m_studio/cli/quality.py +157 -0
- framework_m_studio/cli/studio.py +159 -0
- framework_m_studio/cli/utility.py +50 -0
- framework_m_studio/codegen/generator.py +6 -2
- framework_m_studio/codegen/parser.py +101 -4
- framework_m_studio/codegen/templates/doctype.py.jinja2 +19 -10
- framework_m_studio/codegen/test_generator.py +6 -2
- framework_m_studio/discovery.py +15 -5
- framework_m_studio/docs_generator.py +298 -2
- framework_m_studio/protocol_scanner.py +435 -0
- framework_m_studio/routes.py +39 -11
- {framework_m_studio-0.2.2.dist-info → framework_m_studio-0.3.0.dist-info}/METADATA +7 -2
- framework_m_studio-0.3.0.dist-info/RECORD +32 -0
- framework_m_studio-0.3.0.dist-info/entry_points.txt +18 -0
- framework_m_studio/cli.py +0 -247
- framework_m_studio-0.2.2.dist-info/RECORD +0 -24
- framework_m_studio-0.2.2.dist-info/entry_points.txt +0 -4
- {framework_m_studio-0.2.2.dist-info → framework_m_studio-0.3.0.dist-info}/WHEEL +0 -0
framework_m_studio/discovery.py
CHANGED
|
@@ -34,6 +34,8 @@ class FieldInfo:
|
|
|
34
34
|
required: bool = True
|
|
35
35
|
description: str | None = None
|
|
36
36
|
label: str | None = None
|
|
37
|
+
link_doctype: str | None = None # For Link field - which DocType to link to
|
|
38
|
+
table_doctype: str | None = None # For Table field - which DocType for child table
|
|
37
39
|
validators: dict[str, Any] = field(default_factory=dict)
|
|
38
40
|
|
|
39
41
|
|
|
@@ -79,6 +81,8 @@ def parse_doctype_file(file_path: Path) -> list[DocTypeInfo]:
|
|
|
79
81
|
required=f.get("required", True),
|
|
80
82
|
description=f.get("description"),
|
|
81
83
|
label=f.get("label"),
|
|
84
|
+
link_doctype=f.get("link_doctype"), # Preserve Link field metadata
|
|
85
|
+
table_doctype=f.get("table_doctype"), # Preserve Table field metadata
|
|
82
86
|
validators=f.get("validators", {}),
|
|
83
87
|
)
|
|
84
88
|
for f in schema.get("fields", [])
|
|
@@ -104,13 +108,12 @@ def scan_doctypes(
|
|
|
104
108
|
|
|
105
109
|
Args:
|
|
106
110
|
root_dir: Root directory to scan
|
|
107
|
-
exclude_patterns: Glob patterns to exclude (e.g., ["**/test_
|
|
111
|
+
exclude_patterns: Glob patterns to exclude (e.g., ["**/test_*.py"])
|
|
108
112
|
|
|
109
113
|
Returns:
|
|
110
114
|
List of all DocTypeInfo objects found
|
|
111
115
|
"""
|
|
112
116
|
exclude_patterns = exclude_patterns or [
|
|
113
|
-
"**/test_*.py",
|
|
114
117
|
"**/tests/**",
|
|
115
118
|
"**/__pycache__/**",
|
|
116
119
|
"**/.venv/**",
|
|
@@ -128,13 +131,18 @@ def scan_doctypes(
|
|
|
128
131
|
excluded = False
|
|
129
132
|
|
|
130
133
|
# Check path segments for excluded directories
|
|
131
|
-
for part in path_parts
|
|
134
|
+
for part in path_parts[
|
|
135
|
+
:-1
|
|
136
|
+
]: # Exclude last part (filename) from directory check
|
|
132
137
|
if part in ("tests", "__pycache__", ".venv", "node_modules"):
|
|
133
138
|
excluded = True
|
|
134
139
|
break
|
|
135
|
-
|
|
140
|
+
|
|
141
|
+
# Check if the file itself is a test file
|
|
142
|
+
if not excluded and len(path_parts) > 0:
|
|
143
|
+
filename = path_parts[-1]
|
|
144
|
+
if filename.startswith("test_") and filename.endswith(".py"):
|
|
136
145
|
excluded = True
|
|
137
|
-
break
|
|
138
146
|
|
|
139
147
|
# Also check fnmatch patterns
|
|
140
148
|
if not excluded:
|
|
@@ -176,6 +184,8 @@ def doctype_to_dict(doctype: DocTypeInfo) -> dict[str, Any]:
|
|
|
176
184
|
"required": f.required,
|
|
177
185
|
"description": f.description,
|
|
178
186
|
"label": f.label,
|
|
187
|
+
"link_doctype": f.link_doctype, # Include Link field metadata in API response
|
|
188
|
+
"table_doctype": f.table_doctype, # Include Table field metadata in API response
|
|
179
189
|
"validators": f.validators,
|
|
180
190
|
}
|
|
181
191
|
for f in doctype.fields
|
|
@@ -12,6 +12,17 @@ from typing import Any
|
|
|
12
12
|
from urllib.request import Request, urlopen
|
|
13
13
|
|
|
14
14
|
|
|
15
|
+
def escape_mdx(text: str) -> str:
|
|
16
|
+
"""Escape MDX special characters like braces in text.
|
|
17
|
+
|
|
18
|
+
Docusaurus/MDX treats {braces} as JSX expressions. This function
|
|
19
|
+
escapes them using HTML entities so they are rendered as literal text.
|
|
20
|
+
"""
|
|
21
|
+
if not text:
|
|
22
|
+
return ""
|
|
23
|
+
return text.replace("{", "{").replace("}", "}")
|
|
24
|
+
|
|
25
|
+
|
|
15
26
|
def format_field_table(fields: list[dict[str, Any]]) -> str:
|
|
16
27
|
"""Format fields as a markdown table.
|
|
17
28
|
|
|
@@ -33,7 +44,7 @@ def format_field_table(fields: list[dict[str, Any]]) -> str:
|
|
|
33
44
|
name = field.get("name", "")
|
|
34
45
|
field_type = field.get("type", "str")
|
|
35
46
|
required = "✓" if field.get("required", True) else ""
|
|
36
|
-
description = field.get("description", "") or "-"
|
|
47
|
+
description = escape_mdx(field.get("description", "")) or "-"
|
|
37
48
|
|
|
38
49
|
# Format validators
|
|
39
50
|
validators = field.get("validators", {})
|
|
@@ -91,6 +102,51 @@ def format_meta_section(meta: dict[str, Any]) -> str:
|
|
|
91
102
|
return ""
|
|
92
103
|
|
|
93
104
|
|
|
105
|
+
def format_permission_matrix(
|
|
106
|
+
permissions: dict[str, list[str]] | None,
|
|
107
|
+
) -> str:
|
|
108
|
+
"""Format permissions as a role-based matrix table.
|
|
109
|
+
|
|
110
|
+
Creates a table where rows are roles and columns are permission types.
|
|
111
|
+
Shows ✓ for granted permissions.
|
|
112
|
+
|
|
113
|
+
Args:
|
|
114
|
+
permissions: Dict of permission_type -> list of roles
|
|
115
|
+
|
|
116
|
+
Returns:
|
|
117
|
+
Markdown table string.
|
|
118
|
+
"""
|
|
119
|
+
if not permissions:
|
|
120
|
+
return "_No permissions defined._\n"
|
|
121
|
+
|
|
122
|
+
# Collect all roles and permission types
|
|
123
|
+
all_roles: set[str] = set()
|
|
124
|
+
for roles in permissions.values():
|
|
125
|
+
all_roles.update(roles)
|
|
126
|
+
|
|
127
|
+
# Sort for consistent output
|
|
128
|
+
sorted_roles = sorted(all_roles)
|
|
129
|
+
sorted_actions = sorted(permissions.keys())
|
|
130
|
+
|
|
131
|
+
# Build header
|
|
132
|
+
header_cols = ["Role"] + [action.capitalize() for action in sorted_actions]
|
|
133
|
+
header = "| " + " | ".join(header_cols) + " |"
|
|
134
|
+
separator = "|" + "|".join(["------"] * len(header_cols)) + "|"
|
|
135
|
+
|
|
136
|
+
lines = [header, separator]
|
|
137
|
+
|
|
138
|
+
# Build rows for each role
|
|
139
|
+
for role in sorted_roles:
|
|
140
|
+
row = [role]
|
|
141
|
+
for action in sorted_actions:
|
|
142
|
+
has_permission = role in permissions.get(action, [])
|
|
143
|
+
row.append("✓" if has_permission else "")
|
|
144
|
+
lines.append("| " + " | ".join(row) + " |")
|
|
145
|
+
|
|
146
|
+
lines.append("")
|
|
147
|
+
return "\n".join(lines)
|
|
148
|
+
|
|
149
|
+
|
|
94
150
|
def generate_doctype_markdown(doctype_info: dict[str, Any]) -> str:
|
|
95
151
|
"""Generate markdown documentation for a DocType.
|
|
96
152
|
|
|
@@ -100,10 +156,13 @@ def generate_doctype_markdown(doctype_info: dict[str, Any]) -> str:
|
|
|
100
156
|
Returns:
|
|
101
157
|
Markdown string.
|
|
102
158
|
"""
|
|
159
|
+
from pathlib import Path
|
|
160
|
+
|
|
103
161
|
name = doctype_info.get("name", "Unknown")
|
|
104
|
-
docstring = doctype_info.get("docstring", "")
|
|
162
|
+
docstring = escape_mdx(doctype_info.get("docstring", ""))
|
|
105
163
|
fields = doctype_info.get("fields", [])
|
|
106
164
|
meta = doctype_info.get("meta", {})
|
|
165
|
+
file_path = doctype_info.get("file_path", "")
|
|
107
166
|
|
|
108
167
|
lines = [
|
|
109
168
|
f"# {name}",
|
|
@@ -113,6 +172,12 @@ def generate_doctype_markdown(doctype_info: dict[str, Any]) -> str:
|
|
|
113
172
|
if docstring:
|
|
114
173
|
lines.extend([docstring, ""])
|
|
115
174
|
|
|
175
|
+
# Source file link
|
|
176
|
+
if file_path:
|
|
177
|
+
filename = Path(file_path).name
|
|
178
|
+
file_uri = f"file://{file_path}"
|
|
179
|
+
lines.extend([f"**Source**: [{filename}]({file_uri})", ""])
|
|
180
|
+
|
|
116
181
|
# Fields section
|
|
117
182
|
lines.extend(
|
|
118
183
|
[
|
|
@@ -122,6 +187,17 @@ def generate_doctype_markdown(doctype_info: dict[str, Any]) -> str:
|
|
|
122
187
|
]
|
|
123
188
|
)
|
|
124
189
|
|
|
190
|
+
# Permissions section
|
|
191
|
+
permissions = meta.get("permissions") if meta else None
|
|
192
|
+
if permissions:
|
|
193
|
+
lines.extend(
|
|
194
|
+
[
|
|
195
|
+
"## Permissions",
|
|
196
|
+
"",
|
|
197
|
+
format_permission_matrix(permissions),
|
|
198
|
+
]
|
|
199
|
+
)
|
|
200
|
+
|
|
125
201
|
# Meta configuration section
|
|
126
202
|
meta_section = format_meta_section(meta)
|
|
127
203
|
if meta_section:
|
|
@@ -223,6 +299,107 @@ def export_openapi_json(
|
|
|
223
299
|
output_file.write_text(json.dumps(schema, indent=2))
|
|
224
300
|
|
|
225
301
|
|
|
302
|
+
def generate_openapi_markdown(
|
|
303
|
+
schema: dict[str, Any],
|
|
304
|
+
*,
|
|
305
|
+
include_examples: bool = False,
|
|
306
|
+
) -> str:
|
|
307
|
+
"""Generate human-readable markdown from OpenAPI schema.
|
|
308
|
+
|
|
309
|
+
Groups endpoints by tags and includes method, path, and summary.
|
|
310
|
+
|
|
311
|
+
Args:
|
|
312
|
+
schema: OpenAPI JSON schema dict
|
|
313
|
+
include_examples: If True, include request/response examples
|
|
314
|
+
|
|
315
|
+
Returns:
|
|
316
|
+
Markdown string
|
|
317
|
+
"""
|
|
318
|
+
info = schema.get("info", {})
|
|
319
|
+
title = info.get("title", "API Reference")
|
|
320
|
+
version = info.get("version", "")
|
|
321
|
+
paths = schema.get("paths", {})
|
|
322
|
+
|
|
323
|
+
lines = [f"# {title}"]
|
|
324
|
+
if version:
|
|
325
|
+
lines.append(f"\n**Version**: {version}")
|
|
326
|
+
lines.append("")
|
|
327
|
+
|
|
328
|
+
# Group endpoints by tag - now store full details if examples needed
|
|
329
|
+
endpoints_by_tag: dict[str, list[dict[str, Any]]] = {}
|
|
330
|
+
|
|
331
|
+
for path, methods in paths.items():
|
|
332
|
+
for method, details in methods.items():
|
|
333
|
+
if method in ("get", "post", "put", "patch", "delete"):
|
|
334
|
+
summary = details.get("summary", details.get("operationId", ""))
|
|
335
|
+
tags = details.get("tags", ["Other"])
|
|
336
|
+
for tag in tags:
|
|
337
|
+
if tag not in endpoints_by_tag:
|
|
338
|
+
endpoints_by_tag[tag] = []
|
|
339
|
+
endpoints_by_tag[tag].append(
|
|
340
|
+
{
|
|
341
|
+
"method": method.upper(),
|
|
342
|
+
"path": path,
|
|
343
|
+
"summary": summary,
|
|
344
|
+
"details": details if include_examples else {},
|
|
345
|
+
}
|
|
346
|
+
)
|
|
347
|
+
|
|
348
|
+
# Generate sections by tag
|
|
349
|
+
for tag in sorted(endpoints_by_tag.keys()):
|
|
350
|
+
lines.extend([f"## {tag}", ""])
|
|
351
|
+
|
|
352
|
+
lines.extend(
|
|
353
|
+
["| Method | Path | Description |", "|--------|------|-------------|"]
|
|
354
|
+
)
|
|
355
|
+
|
|
356
|
+
for endpoint in endpoints_by_tag[tag]:
|
|
357
|
+
lines.append(
|
|
358
|
+
f"| {endpoint['method']} | `{endpoint['path']}` | {endpoint['summary']} |"
|
|
359
|
+
)
|
|
360
|
+
|
|
361
|
+
lines.append("")
|
|
362
|
+
|
|
363
|
+
# Include examples if requested
|
|
364
|
+
if include_examples:
|
|
365
|
+
for endpoint in endpoints_by_tag[tag]:
|
|
366
|
+
details = endpoint.get("details", {})
|
|
367
|
+
|
|
368
|
+
# Request example
|
|
369
|
+
request_body = details.get("requestBody", {})
|
|
370
|
+
if request_body:
|
|
371
|
+
content = request_body.get("content", {})
|
|
372
|
+
json_content = content.get("application/json", {})
|
|
373
|
+
example = json_content.get("example")
|
|
374
|
+
if example:
|
|
375
|
+
lines.append(
|
|
376
|
+
f"### {endpoint['method']} {endpoint['path']} - Request"
|
|
377
|
+
)
|
|
378
|
+
lines.append("")
|
|
379
|
+
lines.append("```json")
|
|
380
|
+
lines.append(json.dumps(example, indent=2))
|
|
381
|
+
lines.append("```")
|
|
382
|
+
lines.append("")
|
|
383
|
+
|
|
384
|
+
# Response example
|
|
385
|
+
responses = details.get("responses", {})
|
|
386
|
+
for status_code, response in responses.items():
|
|
387
|
+
content = response.get("content", {})
|
|
388
|
+
json_content = content.get("application/json", {})
|
|
389
|
+
example = json_content.get("example")
|
|
390
|
+
if example:
|
|
391
|
+
lines.append(
|
|
392
|
+
f"### {endpoint['method']} {endpoint['path']} - Response ({status_code})"
|
|
393
|
+
)
|
|
394
|
+
lines.append("")
|
|
395
|
+
lines.append("```json")
|
|
396
|
+
lines.append(json.dumps(example, indent=2))
|
|
397
|
+
lines.append("```")
|
|
398
|
+
lines.append("")
|
|
399
|
+
|
|
400
|
+
return "\n".join(lines)
|
|
401
|
+
|
|
402
|
+
|
|
226
403
|
def run_docs_generate(
|
|
227
404
|
output: str = "./docs/api",
|
|
228
405
|
project_root: str | None = None,
|
|
@@ -316,3 +493,122 @@ def run_mkdocs_build(project_root: Path) -> bool:
|
|
|
316
493
|
except Exception as e:
|
|
317
494
|
print(f" ❌ mkdocs build error: {e}")
|
|
318
495
|
return False
|
|
496
|
+
|
|
497
|
+
|
|
498
|
+
def export_rag_corpus(
|
|
499
|
+
project_root: Path,
|
|
500
|
+
output_file: Path,
|
|
501
|
+
include_tests: bool = False,
|
|
502
|
+
) -> None:
|
|
503
|
+
"""Export documentation and source code as a JSONL corpus for RAG/LLM.
|
|
504
|
+
|
|
505
|
+
Args:
|
|
506
|
+
project_root: Project root directory.
|
|
507
|
+
output_file: Output file path (.jsonl).
|
|
508
|
+
include_tests: If True, include test files in the corpus.
|
|
509
|
+
"""
|
|
510
|
+
import json
|
|
511
|
+
|
|
512
|
+
corpus = []
|
|
513
|
+
|
|
514
|
+
# 1. Export DocTypes
|
|
515
|
+
from framework_m_studio.discovery import doctype_to_dict, scan_doctypes
|
|
516
|
+
|
|
517
|
+
doctypes = scan_doctypes(project_root)
|
|
518
|
+
for dt in doctypes:
|
|
519
|
+
dt_dict = doctype_to_dict(dt)
|
|
520
|
+
corpus.append(
|
|
521
|
+
{
|
|
522
|
+
"id": f"doctype-{dt_dict['name'].lower()}",
|
|
523
|
+
"type": "doctype",
|
|
524
|
+
"title": f"DocType: {dt_dict['name']}",
|
|
525
|
+
"content": generate_doctype_markdown(dt_dict),
|
|
526
|
+
"metadata": {
|
|
527
|
+
"module": dt.__module__,
|
|
528
|
+
"name": dt_dict["name"],
|
|
529
|
+
},
|
|
530
|
+
}
|
|
531
|
+
)
|
|
532
|
+
|
|
533
|
+
# 2. Export Markdown Files (Docs, ADRs, RFCs, Frontend Docs)
|
|
534
|
+
doc_paths = list(project_root.glob("docs/**/*.md"))
|
|
535
|
+
doc_paths.extend(project_root.glob("checklists/*.md"))
|
|
536
|
+
doc_paths.extend(project_root.glob("frontend/docs/**/*.md"))
|
|
537
|
+
|
|
538
|
+
for path in doc_paths:
|
|
539
|
+
if path.name == "index.md" or "generated" in str(path):
|
|
540
|
+
continue
|
|
541
|
+
|
|
542
|
+
try:
|
|
543
|
+
content = path.read_text()
|
|
544
|
+
rel_path = path.relative_to(project_root)
|
|
545
|
+
doc_type = "doc"
|
|
546
|
+
if "adr" in str(rel_path):
|
|
547
|
+
doc_type = "adr"
|
|
548
|
+
elif "rfc" in str(rel_path):
|
|
549
|
+
doc_type = "rfc"
|
|
550
|
+
elif "checklist" in str(rel_path):
|
|
551
|
+
doc_type = "checklist"
|
|
552
|
+
elif "frontend/docs" in str(rel_path):
|
|
553
|
+
doc_type = "frontend-doc"
|
|
554
|
+
|
|
555
|
+
corpus.append(
|
|
556
|
+
{
|
|
557
|
+
"id": f"file-{str(rel_path).replace('/', '-')}",
|
|
558
|
+
"type": doc_type,
|
|
559
|
+
"title": f"Document: {rel_path}",
|
|
560
|
+
"content": content,
|
|
561
|
+
"metadata": {
|
|
562
|
+
"path": str(rel_path),
|
|
563
|
+
},
|
|
564
|
+
}
|
|
565
|
+
)
|
|
566
|
+
except Exception as e:
|
|
567
|
+
print(f" ⚠️ Failed to read {path}: {e}")
|
|
568
|
+
|
|
569
|
+
# 3. Export Tests (Optional)
|
|
570
|
+
if include_tests:
|
|
571
|
+
test_paths = list(project_root.glob("tests/**/*.py"))
|
|
572
|
+
for path in test_paths:
|
|
573
|
+
try:
|
|
574
|
+
content = path.read_text()
|
|
575
|
+
rel_path = path.relative_to(project_root)
|
|
576
|
+
corpus.append(
|
|
577
|
+
{
|
|
578
|
+
"id": f"test-{str(rel_path).replace('/', '-')}",
|
|
579
|
+
"type": "test",
|
|
580
|
+
"title": f"Test: {rel_path}",
|
|
581
|
+
"content": f"```python\n{content}\n```",
|
|
582
|
+
"metadata": {
|
|
583
|
+
"path": str(rel_path),
|
|
584
|
+
},
|
|
585
|
+
}
|
|
586
|
+
)
|
|
587
|
+
except Exception as e:
|
|
588
|
+
print(f" ⚠️ Failed to read {path}: {e}")
|
|
589
|
+
|
|
590
|
+
# Write JSONL
|
|
591
|
+
output_file.parent.mkdir(parents=True, exist_ok=True)
|
|
592
|
+
with output_file.open("w") as f:
|
|
593
|
+
for entry in corpus:
|
|
594
|
+
f.write(json.dumps(entry) + "\n")
|
|
595
|
+
|
|
596
|
+
|
|
597
|
+
def run_docs_export(
|
|
598
|
+
output: str = "./docs/machine/corpus.jsonl",
|
|
599
|
+
project_root: str | None = None,
|
|
600
|
+
include_tests: bool = False,
|
|
601
|
+
) -> None:
|
|
602
|
+
"""Run the documentation export.
|
|
603
|
+
|
|
604
|
+
Args:
|
|
605
|
+
output: Output file path for the corpus.
|
|
606
|
+
project_root: Project root directory.
|
|
607
|
+
include_tests: If True, include tests in the export.
|
|
608
|
+
"""
|
|
609
|
+
root = Path(project_root) if project_root else Path.cwd()
|
|
610
|
+
output_file = Path(output)
|
|
611
|
+
|
|
612
|
+
print(f"📤 Exporting RAG corpus to {output_file}...")
|
|
613
|
+
export_rag_corpus(root, output_file, include_tests=include_tests)
|
|
614
|
+
print(" ✅ Export complete")
|