marimo-dev 0.1.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,74 @@
1
+ Metadata-Version: 2.3
2
+ Name: marimo-dev
3
+ Version: 0.1.0
4
+ Summary: Build and publish python packages from marimo notebooks
5
+ Author: Deufel
6
+ Author-email: Deufel <MDeufel13@gmail.com>
7
+ License: MIT
8
+ Requires-Python: >=3.12
9
+ Description-Content-Type: text/markdown
10
+
11
+ # m_dev
12
+
13
+ A literate programming build system that converts Marimo notebooks into distributable Python packages.
14
+
15
+ ## What it does
16
+
17
+ Write code in numbered notebook files in a `notebooks/` directory. Mark functions and classes for export by making them purely functional with references from the setup cell—Marimo detects these automatically. Run `md build` to generate a proper Python package with `__init__.py`, module files, and `llms.txt` API documentation.
18
+
19
+ ## Project structure
20
+
21
+ ```
22
+ my-project/
23
+ ├── pyproject.toml
24
+ ├── notebooks/
25
+ │ ├── 00_core.py
26
+ │ ├── 01_read.py
27
+ │ ├── 02_pkg.py
28
+ │ ├── 03_docs.py
29
+ │ └── 04_build.py
30
+ └── src/
31
+ └── my_package/
32
+ ├── __init__.py
33
+ ├── core.py
34
+ ├── read.py
35
+ └── ...
36
+ ```
37
+
38
+ ## How it works
39
+
40
+ The build system parses notebooks via AST, extracts decorated exports (`@app.function`, `@app.class_definition`), and writes clean module files. It reads metadata from `pyproject.toml` and generates `__init__.py` with proper imports and `__all__` exports.
41
+
42
+ The `llms.txt` file contains function signatures with inline documentation extracted from comments, formatted for LLM consumption. This provides a compact API reference.
43
+
44
+ ## CLI usage
45
+
46
+ ```bash
47
+ md build # build package from notebooks/
48
+ md publish # publish to PyPI
49
+ md publish --test # publish to Test PyPI
50
+ ```
51
+
52
+ ## Requirements
53
+
54
+ - Python 3.10+
55
+ - Marimo for notebook management
56
+ - uv for dependency management
57
+ - pyproject.toml with project metadata
58
+
59
+ Marimo manages your `pyproject.toml` through its package tab, making dependencies visible and easy to update.
60
+
61
+ ## Install
62
+
63
+ ```bash
64
+ uv add m-dev --index testpypi=https://test.pypi.org/simple --index pypi=https://pypi.org/simple --index-strategy unsafe-best-match
65
+ ```
66
+
67
+ ## Module structure
68
+
69
+ - `core.py` - Data model: `Kind`, `Param`, `Node`
70
+ - `read.py` - Parse notebooks, extract exports, scan project
71
+ - `pkg.py` - Write module files and `__init__.py`
72
+ - `docs.py` - Generate signatures and `llms.txt`
73
+ - `build.py` - Orchestrate the build
74
+ - `cli.py` - Command-line interface
@@ -0,0 +1,64 @@
1
+ # m_dev
2
+
3
+ A literate programming build system that converts Marimo notebooks into distributable Python packages.
4
+
5
+ ## What it does
6
+
7
+ Write code in numbered notebook files in a `notebooks/` directory. Mark functions and classes for export by making them purely functional with references from the setup cell—Marimo detects these automatically. Run `md build` to generate a proper Python package with `__init__.py`, module files, and `llms.txt` API documentation.
8
+
9
+ ## Project structure
10
+
11
+ ```
12
+ my-project/
13
+ ├── pyproject.toml
14
+ ├── notebooks/
15
+ │ ├── 00_core.py
16
+ │ ├── 01_read.py
17
+ │ ├── 02_pkg.py
18
+ │ ├── 03_docs.py
19
+ │ └── 04_build.py
20
+ └── src/
21
+ └── my_package/
22
+ ├── __init__.py
23
+ ├── core.py
24
+ ├── read.py
25
+ └── ...
26
+ ```
27
+
28
+ ## How it works
29
+
30
+ The build system parses notebooks via AST, extracts decorated exports (`@app.function`, `@app.class_definition`), and writes clean module files. It reads metadata from `pyproject.toml` and generates `__init__.py` with proper imports and `__all__` exports.
31
+
32
+ The `llms.txt` file contains function signatures with inline documentation extracted from comments, formatted for LLM consumption. This provides a compact API reference.
33
+
34
+ ## CLI usage
35
+
36
+ ```bash
37
+ md build # build package from notebooks/
38
+ md publish # publish to PyPI
39
+ md publish --test # publish to Test PyPI
40
+ ```
41
+
42
+ ## Requirements
43
+
44
+ - Python 3.10+
45
+ - Marimo for notebook management
46
+ - uv for dependency management
47
+ - pyproject.toml with project metadata
48
+
49
+ Marimo manages your `pyproject.toml` through its package tab, making dependencies visible and easy to update.
50
+
51
+ ## Install
52
+
53
+ ```bash
54
+ uv add m-dev --index testpypi=https://test.pypi.org/simple --index pypi=https://pypi.org/simple --index-strategy unsafe-best-match
55
+ ```
56
+
57
+ ## Module structure
58
+
59
+ - `core.py` - Data model: `Kind`, `Param`, `Node`
60
+ - `read.py` - Parse notebooks, extract exports, scan project
61
+ - `pkg.py` - Write module files and `__init__.py`
62
+ - `docs.py` - Generate signatures and `llms.txt`
63
+ - `build.py` - Orchestrate the build
64
+ - `cli.py` - Command-line interface
@@ -0,0 +1,41 @@
1
+ [build-system]
2
+ requires = ["uv_build>=0.9.15,<0.10.0"]
3
+ build-backend = "uv_build"
4
+
5
+ [project]
6
+ name = "marimo-dev"
7
+ version = "0.1.0"
8
+ description = "Build and publish python packages from marimo notebooks"
9
+ readme = "README.md"
10
+ requires-python = ">=3.12"
11
+ dependencies = []
12
+
13
+ [project.license]
14
+ text = "MIT"
15
+
16
+ [[project.authors]]
17
+ name = "Deufel"
18
+ email = "MDeufel13@gmail.com"
19
+
20
+ [project.scripts]
21
+ md = "m_dev.cli:main"
22
+
23
+
24
+ [tool.marimo.runtime]
25
+ pythonpath = ["src"]
26
+
27
+ # Remove this when ready to upload this is a saftey net for not accidentally uploading
28
+ # classifiers = ["Private :: Do Not Upload"]
29
+
30
+ # Development packages
31
+ [dependency-groups]
32
+ dev = ["anthropic>=0.72.0", "marimo[mcp]>=0.18.4" , "pytest>=8.4.2"]
33
+
34
+
35
+ # Not sure that this is required...
36
+ [tool.uv]
37
+ default-groups = ["dev"]
38
+
39
+
40
+
41
+
@@ -0,0 +1,39 @@
1
+ """Build and publish python packages from marimo notebooks"""
2
+ __version__ = '0.1.0'
3
+ __author__ = 'Deufel'
4
+ from .core import Kind, Param, Node
5
+ from .read import inline_doc, parse_params, parse_class_params, parse_ret, src_with_decs, is_export, parse_import, parse_const, parse_export, parse_node, parse_file, read_meta, nb_name, scan
6
+ from .pkg import clean, write, write_mod, write_init
7
+ from .docs import cls_sig, fn_sig, sig, write_llms
8
+ from .build import publish
9
+ from .cli import init, main
10
+ __all__ = [
11
+ "Kind",
12
+ "Node",
13
+ "Param",
14
+ "clean",
15
+ "cls_sig",
16
+ "fn_sig",
17
+ "init",
18
+ "inline_doc",
19
+ "is_export",
20
+ "main",
21
+ "nb_name",
22
+ "parse_class_params",
23
+ "parse_const",
24
+ "parse_export",
25
+ "parse_file",
26
+ "parse_import",
27
+ "parse_node",
28
+ "parse_params",
29
+ "parse_ret",
30
+ "publish",
31
+ "read_meta",
32
+ "scan",
33
+ "sig",
34
+ "src_with_decs",
35
+ "write",
36
+ "write_init",
37
+ "write_llms",
38
+ "write_mod",
39
+ ]
@@ -0,0 +1,33 @@
1
+ from m_dev.core import Kind, Param, Node
2
+ from m_dev.read import scan
3
+ from m_dev.pkg import write_mod, write_init
4
+ from m_dev.docs import write_llms
5
+ from pathlib import Path
6
+ import ast
7
+
8
+ def publish(
9
+ test:bool=True, # Use Test PyPI if True, real PyPI if False
10
+ ):
11
+ "Build and publish package to PyPI. Looks for ~/.pypirc for credentials, otherwise prompts."
12
+ import subprocess, configparser, shutil
13
+ from pathlib import Path
14
+
15
+ shutil.rmtree('dist', ignore_errors=True)
16
+ print("Building package...")
17
+ subprocess.run(['uv', 'build'], check=True)
18
+
19
+ pypirc, cmd = Path.home() / '.pypirc', ['uv', 'publish']
20
+ section = 'testpypi' if test else 'pypi'
21
+
22
+ if test: cmd.extend(['--publish-url', 'https://test.pypi.org/legacy/'])
23
+ else: cmd.extend(['--publish-url', 'https://upload.pypi.org/legacy/'])
24
+
25
+ if pypirc.exists():
26
+ config = configparser.ConfigParser()
27
+ config.read(pypirc)
28
+ if section in config:
29
+ username, password = config[section].get('username', '__token__'), config[section].get('password', '')
30
+ cmd.extend(['--username', username, '--password', password])
31
+
32
+ print(f"Publishing to {'Test ' if test else ''}PyPI...")
33
+ subprocess.run(cmd, check=True)
@@ -0,0 +1,36 @@
1
+ from m_dev.build import build
2
+ import sys, subprocess
3
+ from pathlib import Path
4
+
5
+ def init(
6
+ name:str=None, # project name (defaults to current directory name)
7
+ ):
8
+ "Initialize a new m-dev project with notebooks dir and pyproject.toml."
9
+ cmd = ['uv', 'init', '--bare', '--no-readme', '--no-pin-python', '--vcs', 'none']
10
+ if name: cmd.append(name)
11
+ subprocess.run(cmd, check=True)
12
+ Path('notebooks').mkdir(exist_ok=True)
13
+ p = Path('pyproject.toml')
14
+ content = p.read_text()
15
+ additions = '''
16
+ [tool.marimo.runtime]
17
+ pythonpath = ["src"]
18
+
19
+ [build-system]
20
+ requires = ["uv_build>=0.9.15,<0.10.0"]
21
+ build-backend = "uv_build"
22
+ '''
23
+ if '[build-system]' not in content: p.write_text(content.rstrip() + '\n' + additions)
24
+
25
+ def main():
26
+ if len(sys.argv) < 2: print("Usage: md [init|build|publish]"); sys.exit(1)
27
+ cmd = sys.argv[1]
28
+ if cmd == 'init': init(sys.argv[2] if len(sys.argv) > 2 else None)
29
+ elif cmd == 'build':
30
+ from m_dev.build import build
31
+ print(f"Built package at: {build()}")
32
+ elif cmd == 'publish':
33
+ test = '--test' in sys.argv or '-t' in sys.argv
34
+ from m_dev.publish import publish
35
+ publish(test=test)
36
+ else: print(f"Unknown command: {cmd}"); sys.exit(1)
@@ -0,0 +1,27 @@
1
+ import ast, re, tomllib, json
2
+ from pathlib import Path
3
+ from enum import Enum
4
+ from dataclasses import dataclass, field
5
+
6
+ class Kind(Enum):
7
+ "Types of nodes in parsed code"
8
+ IMP='import' # Import statement
9
+ CONST='const' # Constant definition
10
+ EXP='export' # Exported function or class
11
+
12
+ @dataclass
13
+ class Param:
14
+ name: str # parameter name
15
+ anno: str|None = None # type annotation
16
+ default: str|None = None # default value
17
+ doc: str = '' # inline documentation
18
+
19
+ @dataclass
20
+ class Node:
21
+ "A parsed code node representing an import, constant, or exported function/class."
22
+ kind: Kind # type of node (import/const/export)
23
+ name: str # identifier name
24
+ src: str # source code
25
+ doc: str = '' # docstring text
26
+ params: list[Param] = field(default_factory=list) # function/class parameters
27
+ ret: tuple[str,str]|None = None # return type annotation and doc
@@ -0,0 +1,44 @@
1
+ from m_dev.core import Kind, Param, Node
2
+ from pathlib import Path
3
+ import ast
4
+
5
+ def cls_sig(
6
+ n:Node, # the node to generate signature for
7
+ dataclass=False, # whether to include @dataclass decorator
8
+ )->str: # formatted class signature
9
+ "Generate a class signature string."
10
+ header = f"@dataclass\nclass {n.name}:" if dataclass else f"class {n.name}:"
11
+ lines = [header]
12
+ if n.doc: lines.append(f' """{n.doc}"""')
13
+ lines.extend(f" {p.name}{f': {p.anno}' if p.anno else ''}{f' = {p.default}' if p.default else ''}" for p in n.params)
14
+ return '\n'.join(lines)
15
+
16
+ def fn_sig(
17
+ n:Node, # the node to generate signature for
18
+ is_async=False, # whether function is async
19
+ )->str: # formatted function signature
20
+ "Generate a function signature string."
21
+ ps = ', '.join(f"{p.name}{f': {p.anno}' if p.anno else ''}{f'={p.default}' if p.default else ''}" for p in n.params)
22
+ ret = f" -> {n.ret[0]}" if n.ret else ""
23
+ s = f"{'async def' if is_async else 'def'} {n.name}({ps}){ret}:"
24
+ return f"{s}\n \"\"\"{n.doc}\"\"\"" if n.doc else s
25
+
26
+ def sig(
27
+ n:Node, # the node to generate signature for
28
+ )->str: # formatted signature string
29
+ "Generate a signature string for a class or function node."
30
+ src = n.src.lstrip()
31
+ if src.startswith('@dataclass'): return cls_sig(n, dataclass=True)
32
+ if src.startswith('class '): return cls_sig(n)
33
+ return fn_sig(n, is_async=src.startswith('async def'))
34
+
35
+ def write_llms(
36
+ meta:dict, # project metadata from pyproject.toml
37
+ nodes:list, # list of Node objects to document
38
+ out='docs', # output directory path
39
+ ):
40
+ "Write API signatures to llms.txt file for LLM consumption."
41
+ sigs = '\n\n'.join(sig(n) for n in nodes if not n.name.startswith('__'))
42
+ content = f"# {meta['name']}\n\n> {meta['desc']}\n\nVersion: {meta['version']}\n\n## API\n\n```python\n{sigs}\n```"
43
+ Path(out).mkdir(exist_ok=True)
44
+ (Path(out)/'llms.txt').write_text(content)
@@ -0,0 +1,41 @@
1
+ from m_dev.core import Kind, Param, Node
2
+ from pathlib import Path
3
+ import ast
4
+
5
+ def clean(
6
+ src:str, # source code to clean
7
+ )->str: # cleaned source code
8
+ "Remove decorator lines from source code."
9
+ return '\n'.join(l for l in src.splitlines() if not l.strip().startswith(('@app.function', '@app.class_definition')))
10
+
11
+ def write(
12
+ p:str, # path to write to
13
+ *parts:str, # content parts to join with blank lines
14
+ ):
15
+ "Write parts to file, filtering None values and joining with blank lines."
16
+ Path(p).write_text('\n\n'.join(filter(None, parts)) + '\n')
17
+
18
+ def write_mod(
19
+ path, # output file path
20
+ nodes:list, # list of Node objects to write
21
+ ):
22
+ "Write module file with imports, constants, and exports."
23
+ g = {k: [n for n in nodes if n.kind == k] for k in Kind}
24
+ parts = ['\n'.join(n.src for n in g[Kind.IMP]), '\n'.join(n.src for n in g[Kind.CONST]), '\n\n'.join(clean(n.src) for n in g[Kind.EXP])]
25
+ write(path, *parts)
26
+
27
+ def write_init(
28
+ path:str|Path, # path to write __init__.py file
29
+ meta:dict, # metadata dict with desc, version, author
30
+ mods:list, # list of (name, nodes) tuples
31
+ ):
32
+ "Generate and write __init__.py file with metadata and exports."
33
+ lines = [f'"""{meta["desc"]}"""', f"__version__ = '{meta['version']}'"]
34
+ if meta['author']: lines.append(f"__author__ = '{meta['author'].split('<')[0].strip()}'")
35
+ exports = []
36
+ for name, nodes in mods:
37
+ if name.startswith('00_'): continue
38
+ pub = [n.name for n in nodes if n.kind == Kind.EXP and not n.name.startswith('__')]
39
+ if pub: lines.append(f"from .{name} import {', '.join(pub)}"); exports.extend(pub)
40
+ if exports: lines.append('__all__ = [\n' + '\n'.join(f' "{n}",' for n in sorted(exports)) + '\n]')
41
+ write(path, '\n'.join(lines))
@@ -0,0 +1,125 @@
1
+ from m_dev.core import Kind, Param, Node
2
+ from pathlib import Path
3
+ import ast, re, tomllib
4
+
5
+ def inline_doc(
6
+ ls:list[str], # source code lines
7
+ ln:int, # line number to search
8
+ name:str, # identifier name to match before comment
9
+ )->str: # extracted inline comment or empty string
10
+ "Extract inline comment following an identifier on a source line."
11
+ if 0 < ln <= len(ls) and (m := re.search(rf'\b{re.escape(name)}\b.*?#\s*(.+)', ls[ln-1])): return m.group(1).strip()
12
+ return ''
13
+
14
+ def parse_params(
15
+ fn, # function node to extract parameters from
16
+ ls, # source lines for inline doc extraction
17
+ )->list[Param]: # list of parameter objects
18
+ "Extract parameters from a function node with inline documentation."
19
+ if not hasattr(fn, 'args'): return []
20
+ args, defs = fn.args.args, fn.args.defaults
21
+ pad = [None] * (len(args) - len(defs))
22
+ return [Param(a.arg, ast.unparse(a.annotation) if a.annotation else None, ast.unparse(d) if d else None, inline_doc(ls, a.lineno, a.arg)) for a, d in zip(args, pad + defs) if a.arg not in ('self', 'cls')]
23
+
24
+ def parse_class_params(
25
+ n:ast.ClassDef, # class node to extract params from
26
+ ls:list, # source lines for inline doc
27
+ )->list[Param]: # list of class attribute parameters
28
+ "Extract annotated class attributes as parameters."
29
+ return [Param(t.id, ast.unparse(a.annotation) if a.annotation else None, None, inline_doc(ls, a.lineno, t.id))
30
+ for a in n.body if isinstance(a, ast.AnnAssign) and isinstance((t := a.target), ast.Name)]
31
+
32
+ def parse_ret(
33
+ fn, # function node to parse return annotation from
34
+ ls, # source code lines
35
+ )->tuple[str,str]|None: # tuple of (return type, inline doc) or None
36
+ "Extract return type annotation and inline documentation from function node."
37
+ if not fn.returns or isinstance(fn.returns, ast.Constant): return None
38
+ return (ast.unparse(fn.returns), inline_doc(ls, fn.returns.lineno, '->') if hasattr(fn.returns, 'lineno') else '')
39
+
40
+ def src_with_decs(
41
+ n, # AST node with potential decorators
42
+ ls, # source code lines
43
+ )->str: # source code including decorators
44
+ "Extract source code including decorators from AST node."
45
+ start = n.decorator_list[0].lineno - 1 if n.decorator_list else n.lineno - 1
46
+ return '\n'.join(ls[start:n.end_lineno])
47
+
48
+ def is_export(
49
+ d, # decorator node to check
50
+ )->bool: # whether decorator marks node for export
51
+ "Check if decorator marks a node for export."
52
+ return ast.unparse(d.func if isinstance(d, ast.Call) else d) in {'app.function', 'app.class_definition'}
53
+
54
+ def parse_import(
55
+ n:ast.AST, # AST node to check
56
+ ls:list, # source lines (unused but kept for consistent interface)
57
+ )->Node|None: # Node if import statement, else None
58
+ "Extract import node from AST."
59
+ if isinstance(n, (ast.Import, ast.ImportFrom)): return Node(Kind.IMP, '', ast.unparse(n))
60
+
61
+ def parse_const(
62
+ n:ast.AST, # AST node to check
63
+ ls:list, # source lines (unused)
64
+ )->Node|None: # Node if constant assignment, else None
65
+ "Extract constant definition (dunder-prefixed, non-dunder-suffixed)."
66
+ if not isinstance(n, ast.Assign): return None
67
+ for t in n.targets:
68
+ if isinstance(t, ast.Name) and t.id.startswith('__') and not t.id.endswith('__'): return Node(Kind.CONST, t.id, ast.unparse(n))
69
+
70
+ def parse_export(
71
+ n:ast.AST, # AST node to check
72
+ ls:list, # source lines for inline doc and decorators
73
+ )->Node|None: # Node if exported function/class, else None
74
+ "Extract exported function or class decorated with @app.function or @app.class_definition."
75
+ if not isinstance(n, (ast.FunctionDef, ast.AsyncFunctionDef, ast.ClassDef)): return None
76
+ if not any(is_export(d) for d in n.decorator_list) or n.name.startswith('test_'): return None
77
+ doc, src = ast.get_docstring(n) or '', src_with_decs(n, ls)
78
+ if isinstance(n, ast.ClassDef): return Node(Kind.EXP, n.name, src, doc, parse_class_params(n, ls), None)
79
+ return Node(Kind.EXP, n.name, src, doc, parse_params(n, ls), parse_ret(n, ls))
80
+
81
+ def parse_node(
82
+ n:ast.AST, # AST node to parse
83
+ src:str, # full source code text
84
+ ): # yields Node objects for imports, constants, and exports
85
+ "Extract importable nodes from an AST node."
86
+ ls = src.splitlines()
87
+ if isinstance(n, ast.With):
88
+ for s in n.body:
89
+ if (node := parse_import(s, ls)): yield node
90
+ if (node := parse_const(s, ls)): yield node
91
+ if (node := parse_export(n, ls)): yield node
92
+
93
+ def parse_file(
94
+ p:str|Path, # path to Python file to parse
95
+ )->list[Node]: # list of parsed nodes from the file
96
+ "Parse a Python file and extract all nodes."
97
+ src = Path(p).read_text()
98
+ return [node for n in ast.parse(src).body for node in parse_node(n, src)]
99
+
100
+ def read_meta(
101
+ root='.', # project root directory containing pyproject.toml
102
+ )->dict: # metadata dict with name, version, desc, license, author
103
+ "Read project metadata from pyproject.toml."
104
+ with open(Path(root)/'pyproject.toml', 'rb') as f: p = tomllib.load(f).get('project', {})
105
+ a = (p.get('authors') or [{}])[0]
106
+ author = f"{a.get('name','')} <{a.get('email','')}>".strip(' <>') if isinstance(a, dict) else str(a)
107
+ lic = p.get('license', {})
108
+ return dict(name=p.get('name',''), version=p.get('version','0.0.0'), desc=p.get('description',''), license=lic.get('text','') if isinstance(lic, dict) else lic, author=author)
109
+
110
+ def nb_name(
111
+ f:Path, # file path to extract notebook name from
112
+ )->str|None: # cleaned notebook name or None if should be skipped
113
+ "Extract notebook name from file path, skipping hidden, test, and XX_ prefixed files."
114
+ if f.name.startswith('.') or f.stem.startswith('XX_'): return None
115
+ name = re.sub(r'^\d+_', '', f.stem)
116
+ return None if name.startswith('test') else name
117
+
118
+ def scan(
119
+ nbs='notebooks', # directory containing notebook .py files
120
+ root='.', # root directory containing pyproject.toml
121
+ ): # tuple of (meta dict, list of (name, nodes) tuples)
122
+ "Scan notebooks directory and extract metadata and module definitions."
123
+ meta = read_meta(root)
124
+ mods = [(name, parse_file(f)) for f in sorted(Path(nbs).glob('*.py')) if (name := nb_name(f))]
125
+ return meta, mods