ai-docify 1.1.2__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,21 @@
1
+ MIT License
2
+
3
+ Copyright (c) 2025 John Spencer
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
@@ -0,0 +1,201 @@
1
+ Metadata-Version: 2.4
2
+ Name: ai-docify
3
+ Version: 1.1.2
4
+ Summary: A CLI tool for generating safe, NumPy/Sphinx style docstrings using AST parsing.
5
+ Author-email: John Spencer <johnspencer97@live.co.uk>
6
+ License: MIT
7
+ Classifier: Programming Language :: Python :: 3
8
+ Classifier: License :: OSI Approved :: MIT License
9
+ Classifier: Operating System :: OS Independent
10
+ Classifier: Topic :: Software Development :: Documentation
11
+ Requires-Python: >=3.8
12
+ Description-Content-Type: text/markdown
13
+ License-File: LICENSE
14
+ Requires-Dist: click
15
+ Requires-Dist: openai
16
+ Requires-Dist: python-dotenv
17
+ Requires-Dist: rich
18
+ Requires-Dist: tiktoken
19
+ Requires-Dist: astunparse
20
+ Provides-Extra: dev
21
+ Requires-Dist: pytest; extra == "dev"
22
+ Requires-Dist: black; extra == "dev"
23
+ Requires-Dist: flake8; extra == "dev"
24
+ Requires-Dist: pytest-cov; extra == "dev"
25
+ Requires-Dist: build; extra == "dev"
26
+ Requires-Dist: twine; extra == "dev"
27
+ Dynamic: license-file
28
+
29
+ [![PyPI version](https://badge.fury.io/py/ai-docify.svg)](https://badge.fury.io/py/ai-docify)
30
+ [![License: MIT](https://img.shields.io/badge/License-MIT-yellow.svg)](https://opensource.org/licenses/MIT)
31
+ # ai-docify 🤖
32
+
33
+ **A simple, secure, and cost-aware CLI tool for generating high-quality NumPy/Sphinx style docstrings using AI.**
34
+
35
+ `ai-docify` helps you document your Python code instantly using either cloud-based models (OpenAI) or local privacy-focused models (Ollama). It is designed to be **safe** (using AST parsing), **transparent** (pre-flight cost checks), and **non-destructive**.
36
+
37
+ ---
38
+
39
+ ## ✨ Key Features
40
+
41
+ * **💰 Cost-Aware Design**: Calculates and displays the **estimated input token cost** via `tiktoken` *before* you spend a penny.
42
+ * **🔒 Privacy-First**: Switch seamlessly between **OpenAI** (Cloud) and **Ollama** (Local) with a single flag. Keep proprietary code on your machine when needed.
43
+ * **đŸ›Ąī¸ AST-Powered Safety**: Unlike other tools that "guess" where to put text, `ai-docify` parses your code's Abstract Syntax Tree to surgically inject docstrings without breaking indentation or logic.
44
+ * **â†Šī¸ The "Undo" Button**: Includes a `strip` command to safely remove all docstrings if you change your mind.
45
+ * **âœŒī¸ Dual Generation Modes**: Choose between `rewrite` for speed/coverage, or `inject` for surgical precision.
46
+ * **đŸŽ¯ Granular Control**: Document an entire file or target a single function or class for surgical precision.
47
+ * **⚡ "Lean" Architecture**: Optimized prompt engineering ensures high-quality documentation without wasting tokens on conversational fluff.
48
+
49
+ ---
50
+
51
+ ## 🚀 Installation
52
+
53
+ Install `ai-docify` directly from PyPI:
54
+
55
+ ```bash
56
+ pip install ai-docify
57
+
58
+ ```
59
+
60
+ ### Prerequisites
61
+
62
+ * **Python 3.8+**
63
+ * **[Ollama](https://ollama.com/)** (Optional, required only if using local models)
64
+
65
+ ### Setup (OpenAI Only)
66
+
67
+ If you plan to use OpenAI models, set your API key as an environment variable:
68
+
69
+ **Mac/Linux:**
70
+
71
+ ```bash
72
+ export OPENAI_API_KEY=sk-your-api-key-here
73
+
74
+ ```
75
+
76
+ **Windows (PowerShell):**
77
+
78
+ ```powershell
79
+ $env:OPENAI_API_KEY="sk-your-api-key-here"
80
+
81
+ ```
82
+
83
+ *(Alternatively, you can create a `.env` file in your project root)*
84
+
85
+ ---
86
+
87
+ ## đŸ’ģ VS Code Extension
88
+
89
+ Prefer a GUI? This CLI powers the **AI Docify for VS Code** extension.
90
+ 👉 **[Download AI Docify for VS Code](https://github.com/sunman97-ui/ai-docify-vscode)
91
+
92
+ ---
93
+
94
+ ## 📖 Usage
95
+
96
+ ### 1. Generating Documentation
97
+
98
+ You must specify the **Provider**, **Model**, and **Mode**.
99
+
100
+ #### `inject` Mode (Recommended)
101
+
102
+ *Best for: Surgical precision and preserving formatting.*
103
+ Uses AST parsing to find functions and classes, injecting docstrings exactly where they belong without touching a single line of your actual code.
104
+
105
+ ```bash
106
+ ai-docify generate my_script.py --provider openai --model gpt-5-mini --mode inject
107
+
108
+ ```
109
+
110
+ #### `rewrite` Mode
111
+
112
+ *Best for: Speed and heavy refactoring.*
113
+ Asks the AI to rewrite the file with docs included. Good for initial drafts or undocumented legacy files.
114
+
115
+ ```bash
116
+ # Using a local model (Free)
117
+ ai-docify generate my_script.py --provider ollama --model llama3 --mode rewrite
118
+
119
+ ```
120
+
121
+ ### 2. Targeting a Single Function
122
+
123
+ *Best for: Quickly documenting a specific piece of code you're working on.*
124
+
125
+ Use the `--function` flag to target a single function or class by name. This is faster, cheaper, and automatically uses the safe `inject` mode.
126
+
127
+ ```bash
128
+ # Target only the 'calculate_total' function in the file
129
+ ai-docify generate utils.py --provider openai --model gpt-5-mini --function calculate_total
130
+ ```
131
+
132
+ ### 3. The Safety Check đŸ›Ąī¸
133
+
134
+ Before generating anything, the tool will pause and show you an exact cost estimate:
135
+
136
+ ```text
137
+ 🤖 ai-docify: Checking my_script.py in INJECT mode
138
+
139
+ 📊 Estimation (Input Only):
140
+ Tokens: 350
141
+ Est. Cost: $0.00009
142
+
143
+ Do you want to proceed? [y/n]:
144
+
145
+ ```
146
+
147
+ ### 3. Stripping Docstrings (Undo) â†Šī¸
148
+
149
+ Need to start over? The `strip` command uses AST parsing to cleanly remove all docstrings from a file, leaving your logic intact. It saves the clean version to a `stripped_scripts/` folder by default.
150
+
151
+ ```bash
152
+ ai-docify strip my_script.py
153
+
154
+ ```
155
+
156
+ ### 4. Cleaning Output
157
+
158
+ To remove all generated files from the default `ai_output/` directory:
159
+
160
+ ```bash
161
+ ai-docify clean
162
+
163
+ ```
164
+
165
+ ---
166
+
167
+ ## âš™ī¸ Supported Models
168
+
169
+ `ai-docify` comes pre-configured with pricing and token limits for popular models.
170
+
171
+ **OpenAI:**
172
+
173
+ * `o3-2025-04-16`
174
+ * `gpt-5` / `gpt-5-mini` / `gpt-5-nano`
175
+ * `gpt-5.2`
176
+
177
+ **Ollama (Local):**
178
+
179
+ * `llama3.1:8b`
180
+ * `qwen2.5-coder:7b`
181
+ * (Any model pulled via `ollama pull` works with the `--provider ollama` flag)
182
+
183
+ *Missing a model? Feel free to open an Issue or Pull Request to update the internal pricing configuration!*
184
+
185
+ ---
186
+
187
+ ## 🤝 Contributing
188
+
189
+ We welcome contributions! Whether it's a bug fix, a new feature, or a better prompt template:
190
+
191
+ 1. Fork the Project
192
+ 2. Create your Feature Branch (`git checkout -b feature/NewModel`)
193
+ 3. Commit your Changes (`git commit -m 'Add GPT-6 support'`)
194
+ 4. Push to the Branch (`git push origin feature/NewModel`)
195
+ 5. Open a Pull Request
196
+
197
+ ---
198
+
199
+ ## 📄 License
200
+
201
+ Distributed under the MIT License. See `LICENSE` for more information.
@@ -0,0 +1,173 @@
1
+ [![PyPI version](https://badge.fury.io/py/ai-docify.svg)](https://badge.fury.io/py/ai-docify)
2
+ [![License: MIT](https://img.shields.io/badge/License-MIT-yellow.svg)](https://opensource.org/licenses/MIT)
3
+ # ai-docify 🤖
4
+
5
+ **A simple, secure, and cost-aware CLI tool for generating high-quality NumPy/Sphinx style docstrings using AI.**
6
+
7
+ `ai-docify` helps you document your Python code instantly using either cloud-based models (OpenAI) or local privacy-focused models (Ollama). It is designed to be **safe** (using AST parsing), **transparent** (pre-flight cost checks), and **non-destructive**.
8
+
9
+ ---
10
+
11
+ ## ✨ Key Features
12
+
13
+ * **💰 Cost-Aware Design**: Calculates and displays the **estimated input token cost** via `tiktoken` *before* you spend a penny.
14
+ * **🔒 Privacy-First**: Switch seamlessly between **OpenAI** (Cloud) and **Ollama** (Local) with a single flag. Keep proprietary code on your machine when needed.
15
+ * **đŸ›Ąī¸ AST-Powered Safety**: Unlike other tools that "guess" where to put text, `ai-docify` parses your code's Abstract Syntax Tree to surgically inject docstrings without breaking indentation or logic.
16
+ * **â†Šī¸ The "Undo" Button**: Includes a `strip` command to safely remove all docstrings if you change your mind.
17
+ * **âœŒī¸ Dual Generation Modes**: Choose between `rewrite` for speed/coverage, or `inject` for surgical precision.
18
+ * **đŸŽ¯ Granular Control**: Document an entire file or target a single function or class for surgical precision.
19
+ * **⚡ "Lean" Architecture**: Optimized prompt engineering ensures high-quality documentation without wasting tokens on conversational fluff.
20
+
21
+ ---
22
+
23
+ ## 🚀 Installation
24
+
25
+ Install `ai-docify` directly from PyPI:
26
+
27
+ ```bash
28
+ pip install ai-docify
29
+
30
+ ```
31
+
32
+ ### Prerequisites
33
+
34
+ * **Python 3.8+**
35
+ * **[Ollama](https://ollama.com/)** (Optional, required only if using local models)
36
+
37
+ ### Setup (OpenAI Only)
38
+
39
+ If you plan to use OpenAI models, set your API key as an environment variable:
40
+
41
+ **Mac/Linux:**
42
+
43
+ ```bash
44
+ export OPENAI_API_KEY=sk-your-api-key-here
45
+
46
+ ```
47
+
48
+ **Windows (PowerShell):**
49
+
50
+ ```powershell
51
+ $env:OPENAI_API_KEY="sk-your-api-key-here"
52
+
53
+ ```
54
+
55
+ *(Alternatively, you can create a `.env` file in your project root)*
56
+
57
+ ---
58
+
59
+ ## đŸ’ģ VS Code Extension
60
+
61
+ Prefer a GUI? This CLI powers the **AI Docify for VS Code** extension.
62
+ 👉 **[Download AI Docify for VS Code](https://github.com/sunman97-ui/ai-docify-vscode)
63
+
64
+ ---
65
+
66
+ ## 📖 Usage
67
+
68
+ ### 1. Generating Documentation
69
+
70
+ You must specify the **Provider**, **Model**, and **Mode**.
71
+
72
+ #### `inject` Mode (Recommended)
73
+
74
+ *Best for: Surgical precision and preserving formatting.*
75
+ Uses AST parsing to find functions and classes, injecting docstrings exactly where they belong without touching a single line of your actual code.
76
+
77
+ ```bash
78
+ ai-docify generate my_script.py --provider openai --model gpt-5-mini --mode inject
79
+
80
+ ```
81
+
82
+ #### `rewrite` Mode
83
+
84
+ *Best for: Speed and heavy refactoring.*
85
+ Asks the AI to rewrite the file with docs included. Good for initial drafts or undocumented legacy files.
86
+
87
+ ```bash
88
+ # Using a local model (Free)
89
+ ai-docify generate my_script.py --provider ollama --model llama3 --mode rewrite
90
+
91
+ ```
92
+
93
+ ### 2. Targeting a Single Function
94
+
95
+ *Best for: Quickly documenting a specific piece of code you're working on.*
96
+
97
+ Use the `--function` flag to target a single function or class by name. This is faster, cheaper, and automatically uses the safe `inject` mode.
98
+
99
+ ```bash
100
+ # Target only the 'calculate_total' function in the file
101
+ ai-docify generate utils.py --provider openai --model gpt-5-mini --function calculate_total
102
+ ```
103
+
104
+ ### 3. The Safety Check đŸ›Ąī¸
105
+
106
+ Before generating anything, the tool will pause and show you an exact cost estimate:
107
+
108
+ ```text
109
+ 🤖 ai-docify: Checking my_script.py in INJECT mode
110
+
111
+ 📊 Estimation (Input Only):
112
+ Tokens: 350
113
+ Est. Cost: $0.00009
114
+
115
+ Do you want to proceed? [y/n]:
116
+
117
+ ```
118
+
119
+ ### 3. Stripping Docstrings (Undo) â†Šī¸
120
+
121
+ Need to start over? The `strip` command uses AST parsing to cleanly remove all docstrings from a file, leaving your logic intact. It saves the clean version to a `stripped_scripts/` folder by default.
122
+
123
+ ```bash
124
+ ai-docify strip my_script.py
125
+
126
+ ```
127
+
128
+ ### 4. Cleaning Output
129
+
130
+ To remove all generated files from the default `ai_output/` directory:
131
+
132
+ ```bash
133
+ ai-docify clean
134
+
135
+ ```
136
+
137
+ ---
138
+
139
+ ## âš™ī¸ Supported Models
140
+
141
+ `ai-docify` comes pre-configured with pricing and token limits for popular models.
142
+
143
+ **OpenAI:**
144
+
145
+ * `o3-2025-04-16`
146
+ * `gpt-5` / `gpt-5-mini` / `gpt-5-nano`
147
+ * `gpt-5.2`
148
+
149
+ **Ollama (Local):**
150
+
151
+ * `llama3.1:8b`
152
+ * `qwen2.5-coder:7b`
153
+ * (Any model pulled via `ollama pull` works with the `--provider ollama` flag)
154
+
155
+ *Missing a model? Feel free to open an Issue or Pull Request to update the internal pricing configuration!*
156
+
157
+ ---
158
+
159
+ ## 🤝 Contributing
160
+
161
+ We welcome contributions! Whether it's a bug fix, a new feature, or a better prompt template:
162
+
163
+ 1. Fork the Project
164
+ 2. Create your Feature Branch (`git checkout -b feature/NewModel`)
165
+ 3. Commit your Changes (`git commit -m 'Add GPT-6 support'`)
166
+ 4. Push to the Branch (`git push origin feature/NewModel`)
167
+ 5. Open a Pull Request
168
+
169
+ ---
170
+
171
+ ## 📄 License
172
+
173
+ Distributed under the MIT License. See `LICENSE` for more information.
@@ -0,0 +1,65 @@
1
+ [build-system]
2
+ requires = ["setuptools>=61.0", "wheel"]
3
+ build-backend = "setuptools.build_meta"
4
+
5
+ [project]
6
+ name = "ai-docify"
7
+ version = "1.1.2"
8
+ description = "A CLI tool for generating safe, NumPy/Sphinx style docstrings using AST parsing."
9
+ readme = "README.md"
10
+ authors = [
11
+ { name = "John Spencer", email = "johnspencer97@live.co.uk" },
12
+ ]
13
+ license = { text = "MIT" }
14
+ classifiers = [
15
+ "Programming Language :: Python :: 3",
16
+ "License :: OSI Approved :: MIT License",
17
+ "Operating System :: OS Independent",
18
+ "Topic :: Software Development :: Documentation",
19
+ ]
20
+ requires-python = ">=3.8"
21
+ dependencies = [
22
+ "click",
23
+ "openai",
24
+ "python-dotenv",
25
+ "rich",
26
+ "tiktoken",
27
+ "astunparse",
28
+ ]
29
+
30
+ [project.optional-dependencies]
31
+ dev = [
32
+ "pytest",
33
+ "black",
34
+ "flake8",
35
+ "pytest-cov",
36
+ "build", # <--- Added 'build' so you can run 'python -m build' easily
37
+ "twine", # <--- Added 'twine' for uploading to PyPI
38
+ ]
39
+
40
+ [project.scripts]
41
+ ai-docify = "ai_docify.cli:main"
42
+
43
+ [tool.setuptools.packages.find]
44
+ where = ["src"]
45
+
46
+ [tool.setuptools.package-data]
47
+ ai_docify = ["templates/*.json", "pricing.json"]
48
+
49
+ [tool.black]
50
+ line-length = 88
51
+ target-version = ['py38']
52
+ include = '\.pyi?$'
53
+ exclude = '''
54
+ /(
55
+ \.git
56
+ | \.hg
57
+ | \.mypy_cache
58
+ | \.tox
59
+ | \.venv
60
+ | _build
61
+ | buck-out
62
+ | build
63
+ | dist
64
+ )/
65
+ '''
@@ -0,0 +1,4 @@
1
+ [egg_info]
2
+ tag_build =
3
+ tag_date = 0
4
+
@@ -0,0 +1,4 @@
1
+ __version__ = "0.1.0"
2
+ from .cli import main # Expose main CLI entry point
3
+
4
+ __all__ = ["main"]
@@ -0,0 +1,4 @@
1
+ from .cli import main
2
+
3
+ if __name__ == "__main__":
4
+ main()