wikigen 1.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- wikigen/__init__.py +7 -0
- wikigen/cli.py +690 -0
- wikigen/config.py +526 -0
- wikigen/defaults.py +78 -0
- wikigen/flows/__init__.py +1 -0
- wikigen/flows/flow.py +38 -0
- wikigen/formatter/help_formatter.py +194 -0
- wikigen/formatter/init_formatter.py +56 -0
- wikigen/formatter/output_formatter.py +290 -0
- wikigen/mcp/__init__.py +12 -0
- wikigen/mcp/chunking.py +127 -0
- wikigen/mcp/embeddings.py +69 -0
- wikigen/mcp/output_resources.py +65 -0
- wikigen/mcp/search_index.py +826 -0
- wikigen/mcp/server.py +232 -0
- wikigen/mcp/vector_index.py +297 -0
- wikigen/metadata/__init__.py +35 -0
- wikigen/metadata/logo.py +28 -0
- wikigen/metadata/project.py +28 -0
- wikigen/metadata/version.py +17 -0
- wikigen/nodes/__init__.py +1 -0
- wikigen/nodes/nodes.py +1080 -0
- wikigen/utils/__init__.py +0 -0
- wikigen/utils/adjust_headings.py +72 -0
- wikigen/utils/call_llm.py +271 -0
- wikigen/utils/crawl_github_files.py +450 -0
- wikigen/utils/crawl_local_files.py +151 -0
- wikigen/utils/llm_providers.py +101 -0
- wikigen/utils/version_check.py +84 -0
- wikigen-1.0.0.dist-info/METADATA +352 -0
- wikigen-1.0.0.dist-info/RECORD +35 -0
- wikigen-1.0.0.dist-info/WHEEL +5 -0
- wikigen-1.0.0.dist-info/entry_points.txt +2 -0
- wikigen-1.0.0.dist-info/licenses/LICENSE +21 -0
- wikigen-1.0.0.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,101 @@
|
|
|
1
|
+
"""
|
|
2
|
+
LLM Provider Registry
|
|
3
|
+
|
|
4
|
+
Defines supported LLM providers, their recommended models, and configuration details.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
LLM_PROVIDERS = {
|
|
8
|
+
"gemini": {
|
|
9
|
+
"display_name": "Google Gemini",
|
|
10
|
+
"recommended_models": [
|
|
11
|
+
"gemini-2.5-pro",
|
|
12
|
+
"gemini-2.5-flash",
|
|
13
|
+
"gemini-1.5-pro",
|
|
14
|
+
"gemini-1.5-flash",
|
|
15
|
+
],
|
|
16
|
+
"api_key_env": "GEMINI_API_KEY",
|
|
17
|
+
"keyring_key": "gemini_api_key",
|
|
18
|
+
"requires_api_key": True,
|
|
19
|
+
},
|
|
20
|
+
"openai": {
|
|
21
|
+
"display_name": "OpenAI",
|
|
22
|
+
"recommended_models": [
|
|
23
|
+
"gpt-4o-mini",
|
|
24
|
+
"gpt-4.1-mini",
|
|
25
|
+
"gpt-5-mini",
|
|
26
|
+
"gpt-5-nano",
|
|
27
|
+
],
|
|
28
|
+
"api_key_env": "OPENAI_API_KEY",
|
|
29
|
+
"keyring_key": "openai_api_key",
|
|
30
|
+
"requires_api_key": True,
|
|
31
|
+
},
|
|
32
|
+
"anthropic": {
|
|
33
|
+
"display_name": "Anthropic Claude",
|
|
34
|
+
"recommended_models": [
|
|
35
|
+
"claude-3-5-sonnet-20241022",
|
|
36
|
+
"claude-3-5-haiku-20241022",
|
|
37
|
+
"claude-3-7-sonnet-20250219",
|
|
38
|
+
"claude-3-opus-20240229",
|
|
39
|
+
],
|
|
40
|
+
"api_key_env": "ANTHROPIC_API_KEY",
|
|
41
|
+
"keyring_key": "anthropic_api_key",
|
|
42
|
+
"requires_api_key": True,
|
|
43
|
+
},
|
|
44
|
+
"openrouter": {
|
|
45
|
+
"display_name": "OpenRouter",
|
|
46
|
+
"recommended_models": [
|
|
47
|
+
"google/gemini-2.5-flash:free",
|
|
48
|
+
"meta-llama/llama-3.1-8b-instruct:free",
|
|
49
|
+
"openai/gpt-4o-mini",
|
|
50
|
+
"anthropic/claude-3.5-sonnet",
|
|
51
|
+
],
|
|
52
|
+
"api_key_env": "OPENROUTER_API_KEY",
|
|
53
|
+
"keyring_key": "openrouter_api_key",
|
|
54
|
+
"requires_api_key": True,
|
|
55
|
+
},
|
|
56
|
+
"ollama": {
|
|
57
|
+
"display_name": "Ollama (Local)",
|
|
58
|
+
"recommended_models": [
|
|
59
|
+
"llama3.2",
|
|
60
|
+
"llama3.1",
|
|
61
|
+
"mistral",
|
|
62
|
+
"codellama",
|
|
63
|
+
"phi3",
|
|
64
|
+
],
|
|
65
|
+
"api_key_env": None,
|
|
66
|
+
"keyring_key": None,
|
|
67
|
+
"requires_api_key": False,
|
|
68
|
+
"base_url": "http://localhost:11434",
|
|
69
|
+
"base_url_env": "OLLAMA_BASE_URL",
|
|
70
|
+
},
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
|
|
74
|
+
def get_provider_info(provider_id: str) -> dict:
|
|
75
|
+
"""Get provider information by provider ID."""
|
|
76
|
+
if provider_id not in LLM_PROVIDERS:
|
|
77
|
+
raise ValueError(f"Unknown provider: {provider_id}")
|
|
78
|
+
return LLM_PROVIDERS[provider_id]
|
|
79
|
+
|
|
80
|
+
|
|
81
|
+
def get_recommended_models(provider_id: str) -> list:
|
|
82
|
+
"""Get recommended models for a provider."""
|
|
83
|
+
provider_info = get_provider_info(provider_id)
|
|
84
|
+
return provider_info.get("recommended_models", [])
|
|
85
|
+
|
|
86
|
+
|
|
87
|
+
def get_provider_list() -> list:
|
|
88
|
+
"""Get list of all provider IDs."""
|
|
89
|
+
return list(LLM_PROVIDERS.keys())
|
|
90
|
+
|
|
91
|
+
|
|
92
|
+
def get_display_name(provider_id: str) -> str:
|
|
93
|
+
"""Get display name for a provider."""
|
|
94
|
+
provider_info = get_provider_info(provider_id)
|
|
95
|
+
return provider_info["display_name"]
|
|
96
|
+
|
|
97
|
+
|
|
98
|
+
def requires_api_key(provider_id: str) -> bool:
|
|
99
|
+
"""Check if provider requires an API key."""
|
|
100
|
+
provider_info = get_provider_info(provider_id)
|
|
101
|
+
return provider_info.get("requires_api_key", True)
|
|
@@ -0,0 +1,84 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Version checking utilities for WikiGen CLI.
|
|
3
|
+
Queries PyPI API to check for available updates.
|
|
4
|
+
"""
|
|
5
|
+
|
|
6
|
+
import requests
|
|
7
|
+
from typing import Optional
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
def fetch_latest_version(
|
|
11
|
+
package_name: str = "wikigen", timeout: float = 5.0
|
|
12
|
+
) -> Optional[str]:
|
|
13
|
+
"""
|
|
14
|
+
Fetch the latest version from PyPI API.
|
|
15
|
+
|
|
16
|
+
Args:
|
|
17
|
+
package_name: Name of the package on PyPI
|
|
18
|
+
timeout: Request timeout in seconds
|
|
19
|
+
|
|
20
|
+
Returns:
|
|
21
|
+
Latest version string if successful, None otherwise
|
|
22
|
+
"""
|
|
23
|
+
try:
|
|
24
|
+
url = f"https://pypi.org/pypi/{package_name}/json"
|
|
25
|
+
response = requests.get(url, timeout=timeout)
|
|
26
|
+
response.raise_for_status()
|
|
27
|
+
|
|
28
|
+
data = response.json()
|
|
29
|
+
return data.get("info", {}).get("version")
|
|
30
|
+
except (requests.RequestException, KeyError, ValueError, Exception):
|
|
31
|
+
# Silently fail - don't interrupt user workflow
|
|
32
|
+
# Catch all exceptions to ensure CLI never breaks due to update check
|
|
33
|
+
return None
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
def compare_versions(current_version: str, latest_version: str) -> bool:
|
|
37
|
+
"""
|
|
38
|
+
Compare two version strings to determine if latest is newer.
|
|
39
|
+
|
|
40
|
+
Uses simple semantic version comparison (e.g., "0.1.5" vs "0.1.6").
|
|
41
|
+
|
|
42
|
+
Args:
|
|
43
|
+
current_version: Currently installed version
|
|
44
|
+
latest_version: Latest available version from PyPI
|
|
45
|
+
|
|
46
|
+
Returns:
|
|
47
|
+
True if latest_version is newer than current_version
|
|
48
|
+
"""
|
|
49
|
+
try:
|
|
50
|
+
# Simple tuple comparison works for semantic versions
|
|
51
|
+
current_parts = tuple(map(int, current_version.split(".")))
|
|
52
|
+
latest_parts = tuple(map(int, latest_version.split(".")))
|
|
53
|
+
|
|
54
|
+
# Pad shorter version with zeros for comparison
|
|
55
|
+
max_len = max(len(current_parts), len(latest_parts))
|
|
56
|
+
current_parts = current_parts + (0,) * (max_len - len(current_parts))
|
|
57
|
+
latest_parts = latest_parts + (0,) * (max_len - len(latest_parts))
|
|
58
|
+
|
|
59
|
+
return latest_parts > current_parts
|
|
60
|
+
except (ValueError, AttributeError):
|
|
61
|
+
# If version format is unexpected, fall back to string comparison
|
|
62
|
+
return latest_version > current_version
|
|
63
|
+
|
|
64
|
+
|
|
65
|
+
def check_for_update(current_version: str, timeout: float = 5.0) -> Optional[str]:
|
|
66
|
+
"""
|
|
67
|
+
Check if a newer version is available on PyPI.
|
|
68
|
+
|
|
69
|
+
Args:
|
|
70
|
+
current_version: Currently installed version
|
|
71
|
+
timeout: Request timeout in seconds
|
|
72
|
+
|
|
73
|
+
Returns:
|
|
74
|
+
Latest version string if update is available, None otherwise
|
|
75
|
+
"""
|
|
76
|
+
latest_version = fetch_latest_version(timeout=timeout)
|
|
77
|
+
|
|
78
|
+
if not latest_version:
|
|
79
|
+
return None
|
|
80
|
+
|
|
81
|
+
if compare_versions(current_version, latest_version):
|
|
82
|
+
return latest_version
|
|
83
|
+
|
|
84
|
+
return None
|
|
@@ -0,0 +1,352 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: wikigen
|
|
3
|
+
Version: 1.0.0
|
|
4
|
+
Summary: Wiki's for nerds, by nerds
|
|
5
|
+
Author: Mithun Ramesh
|
|
6
|
+
License-Expression: MIT
|
|
7
|
+
Project-URL: Homepage, https://github.com/usesalt/wikigen
|
|
8
|
+
Project-URL: Repository, https://github.com/usesalt/wikigen
|
|
9
|
+
Project-URL: Issues, https://github.com/usesalt/wikigen/issues
|
|
10
|
+
Classifier: Development Status :: 3 - Alpha
|
|
11
|
+
Classifier: Intended Audience :: Developers
|
|
12
|
+
Classifier: Operating System :: OS Independent
|
|
13
|
+
Classifier: Programming Language :: Python :: 3
|
|
14
|
+
Classifier: Programming Language :: Python :: 3.12
|
|
15
|
+
Classifier: Programming Language :: Python :: 3.13
|
|
16
|
+
Classifier: Programming Language :: Python :: 3.14
|
|
17
|
+
Classifier: Topic :: Software Development :: Documentation
|
|
18
|
+
Classifier: Topic :: Text Processing :: Markup
|
|
19
|
+
Requires-Python: >=3.12
|
|
20
|
+
Description-Content-Type: text/markdown
|
|
21
|
+
License-File: LICENSE
|
|
22
|
+
Requires-Dist: pocketflow>=0.0.1
|
|
23
|
+
Requires-Dist: pyyaml>=6.0
|
|
24
|
+
Requires-Dist: requests>=2.28.0
|
|
25
|
+
Requires-Dist: gitpython>=3.1.0
|
|
26
|
+
Requires-Dist: google-genai>=1.9.0
|
|
27
|
+
Requires-Dist: pathspec>=0.11.0
|
|
28
|
+
Requires-Dist: keyring>=24.0.0
|
|
29
|
+
Requires-Dist: mcp>=1.19.0
|
|
30
|
+
Requires-Dist: faiss-cpu>=1.7.4
|
|
31
|
+
Requires-Dist: sentence-transformers>=2.2.0
|
|
32
|
+
Requires-Dist: numpy>=1.24.0
|
|
33
|
+
Dynamic: license-file
|
|
34
|
+
Dynamic: requires-python
|
|
35
|
+
|
|
36
|
+

|
|
37
|
+
|
|
38
|
+
## WIKIGEN
|
|
39
|
+
|
|
40
|
+
[](https://pypi.org/project/wikigen/) [](https://www.python.org/) [](https://pypi.org/project/wikigen/) [](LICENSE) [](https://github.com/usesalt/wikigen)
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
**WikiGen** (Previously named "salt-docs") is a compact, human-readable documentation generator for codebases that minimizes tokens and makes structure easy for models to follow.
|
|
44
|
+
It's intended for **LLM input** as a drop-in, lossless representation of your existing codebase.
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
## Installation
|
|
48
|
+
|
|
49
|
+
### Option 1: Install from PyPI
|
|
50
|
+
```bash
|
|
51
|
+
pip install wikigen
|
|
52
|
+
```
|
|
53
|
+
|
|
54
|
+
### Option 2: Install from source
|
|
55
|
+
```bash
|
|
56
|
+
git clone https://github.com/usesalt/wikigen.git
|
|
57
|
+
cd wikigen
|
|
58
|
+
pip install -e .
|
|
59
|
+
```
|
|
60
|
+
|
|
61
|
+
## Quick Start
|
|
62
|
+
|
|
63
|
+
### 1. Initial Setup
|
|
64
|
+
Run the setup wizard to configure your API keys and preferences:
|
|
65
|
+
|
|
66
|
+
```bash
|
|
67
|
+
wikigen init
|
|
68
|
+
```
|
|
69
|
+
|
|
70
|
+
### 2. Generate Documentation
|
|
71
|
+
|
|
72
|
+
#### Analyze GitHub repository
|
|
73
|
+
```bash
|
|
74
|
+
wikigen run https://github.com/username/repo
|
|
75
|
+
```
|
|
76
|
+
|
|
77
|
+
#### Analyze local directory
|
|
78
|
+
```bash
|
|
79
|
+
wikigen run /path/to/your/codebase
|
|
80
|
+
```
|
|
81
|
+
|
|
82
|
+
#### With custom options
|
|
83
|
+
```bash
|
|
84
|
+
wikigen run https://github.com/username/repo --output /custom/path --language spanish --max-abstractions 10
|
|
85
|
+
```
|
|
86
|
+
|
|
87
|
+
## Configuration
|
|
88
|
+
|
|
89
|
+
WikiGen stores configuration in a per-user config file and uses your system's keyring for secure API key storage.
|
|
90
|
+
|
|
91
|
+
- macOS/Linux: `~/.config/wikigen/config.json` (or `$XDG_CONFIG_HOME/wikigen/config.json`)
|
|
92
|
+
- Windows: `%APPDATA%\wikigen\config.json`
|
|
93
|
+
|
|
94
|
+
### Configuration Options
|
|
95
|
+
- `llm_provider`: LLM provider to use (gemini, openai, anthropic, openrouter, ollama) - default: gemini
|
|
96
|
+
- `llm_model`: Model name to use (e.g., "gemini-2.5-flash", "gpt-4o-mini", "claude-3-5-sonnet-20241022") - default: gemini-2.5-flash
|
|
97
|
+
- `output_dir`: Default output directory
|
|
98
|
+
- `language`: Default language for generated docs
|
|
99
|
+
- `max_abstractions`: Default number of abstractions to identify
|
|
100
|
+
- `max_file_size`: Maximum file size in bytes
|
|
101
|
+
- `use_cache`: Enable/disable LLM response caching
|
|
102
|
+
- `include_patterns`: Default file patterns to include
|
|
103
|
+
- `exclude_patterns`: Default file patterns to exclude
|
|
104
|
+
- `ollama_base_url`: Custom Ollama base URL (optional, default: http://localhost:11434)
|
|
105
|
+
|
|
106
|
+
### Managing Configuration
|
|
107
|
+
|
|
108
|
+
#### View Current Configuration
|
|
109
|
+
```bash
|
|
110
|
+
wikigen config show
|
|
111
|
+
```
|
|
112
|
+
|
|
113
|
+
#### Update API Keys
|
|
114
|
+
```bash
|
|
115
|
+
# Update API key for any provider (interactive)
|
|
116
|
+
wikigen config update-api-key gemini
|
|
117
|
+
wikigen config update-api-key openai
|
|
118
|
+
wikigen config update-api-key anthropic
|
|
119
|
+
wikigen config update-api-key openrouter
|
|
120
|
+
|
|
121
|
+
# Legacy command (still works, redirects to update-api-key)
|
|
122
|
+
wikigen config update-gemini-key
|
|
123
|
+
|
|
124
|
+
# Update GitHub token (interactive)
|
|
125
|
+
wikigen config update-github-token
|
|
126
|
+
|
|
127
|
+
# Update GitHub token directly
|
|
128
|
+
wikigen config update-github-token "your-token-here"
|
|
129
|
+
```
|
|
130
|
+
|
|
131
|
+
#### Update Other Settings
|
|
132
|
+
```bash
|
|
133
|
+
# Change LLM provider
|
|
134
|
+
wikigen config set llm-provider openai
|
|
135
|
+
|
|
136
|
+
# Change LLM model
|
|
137
|
+
wikigen config set llm-model gpt-4o-mini
|
|
138
|
+
|
|
139
|
+
# Change default language
|
|
140
|
+
wikigen config set language spanish
|
|
141
|
+
|
|
142
|
+
# Change max abstractions
|
|
143
|
+
wikigen config set max_abstractions 15
|
|
144
|
+
|
|
145
|
+
# Disable caching
|
|
146
|
+
wikigen config set use_cache false
|
|
147
|
+
|
|
148
|
+
# Update output directory
|
|
149
|
+
wikigen config set output_dir /custom/path
|
|
150
|
+
```
|
|
151
|
+
|
|
152
|
+
---
|
|
153
|
+
## CI/CD Integration
|
|
154
|
+
|
|
155
|
+
WikiGen can automatically generate and update documentation in your CI/CD pipeline. Perfect for keeping docs in sync with code changes!
|
|
156
|
+
|
|
157
|
+
### Quick Setup for GitHub Actions
|
|
158
|
+
|
|
159
|
+
1. **Add workflow file** to `.github/workflows/wikigen.yml`:
|
|
160
|
+
|
|
161
|
+
```yaml
|
|
162
|
+
name: WikiGen
|
|
163
|
+
|
|
164
|
+
on:
|
|
165
|
+
push:
|
|
166
|
+
branches: [main]
|
|
167
|
+
|
|
168
|
+
jobs:
|
|
169
|
+
docs:
|
|
170
|
+
runs-on: ubuntu-latest
|
|
171
|
+
permissions:
|
|
172
|
+
contents: write
|
|
173
|
+
pull-requests: write
|
|
174
|
+
steps:
|
|
175
|
+
- uses: actions/checkout@v4
|
|
176
|
+
- uses: actions/setup-python@v5
|
|
177
|
+
with:
|
|
178
|
+
python-version: '3.12'
|
|
179
|
+
- run: pip install wikigen
|
|
180
|
+
- run: wikigen run . --ci --output-path docs/
|
|
181
|
+
env:
|
|
182
|
+
GEMINI_API_KEY: ${{ secrets.GEMINI_API_KEY }}
|
|
183
|
+
- uses: peter-evans/create-pull-request@v6
|
|
184
|
+
with:
|
|
185
|
+
commit-message: 'docs: updated documentation for new changes'
|
|
186
|
+
branch: wikigen/update-${{ github.run_number }}
|
|
187
|
+
title: 'Update Documentation'
|
|
188
|
+
```
|
|
189
|
+
|
|
190
|
+
2. **Add your LLM API key** to GitHub Secrets:
|
|
191
|
+
- Go to **Settings** → **Secrets and variables** → **Actions**
|
|
192
|
+
- Add `GEMINI_API_KEY` (or `OPENAI_API_KEY`, `ANTHROPIC_API_KEY`, etc.)
|
|
193
|
+
|
|
194
|
+
3. **Push to main** - Documentation will be automatically generated and a PR will be created!
|
|
195
|
+
|
|
196
|
+
### CI-Specific Flags
|
|
197
|
+
|
|
198
|
+
- `--ci` - Enable CI mode (non-interactive, better error messages)
|
|
199
|
+
- `--output-path <path>` - Custom output directory (e.g., `docs/`, `documentation/`)
|
|
200
|
+
- `--update` - Merge with existing docs instead of overwriting
|
|
201
|
+
- `--check-changes` - Exit with code 1 if docs changed, 0 if unchanged
|
|
202
|
+
|
|
203
|
+
### Learn More
|
|
204
|
+
|
|
205
|
+
See the complete [CI/CD Integration Guide](docs/ci-cd-integration.md) for:
|
|
206
|
+
- Advanced configuration options
|
|
207
|
+
- Multiple LLM provider setup
|
|
208
|
+
- Troubleshooting tips
|
|
209
|
+
- Best practices
|
|
210
|
+
- Future integrations (Confluence, Notion, etc.)
|
|
211
|
+
|
|
212
|
+
---
|
|
213
|
+
## MCP Server Setup
|
|
214
|
+
|
|
215
|
+
WikiGen includes an MCP (Model Context Protocol) server that exposes your generated documentation to AI assistants in IDEs like Cursor, Continue.dev, and Claude Desktop.
|
|
216
|
+
|
|
217
|
+
### MCP Tools Available
|
|
218
|
+
|
|
219
|
+
The MCP server provides these tools:
|
|
220
|
+
- `list_docs` - List all available documentation files
|
|
221
|
+
- `get_docs` - Fetch the full content of a documentation file (by resource name or absolute path)
|
|
222
|
+
- `search_docs` - Full-text search across documentation (paths, names, and resource names)
|
|
223
|
+
- `index_directories` - Index directories for fast searching
|
|
224
|
+
|
|
225
|
+
### Setup Instructions
|
|
226
|
+
|
|
227
|
+
#### Cursor
|
|
228
|
+
|
|
229
|
+
1. Open or create your MCP configuration file:
|
|
230
|
+
- **macOS/Linux**: `~/.cursor/mcp.json`
|
|
231
|
+
- **Windows**: `%APPDATA%\Cursor\mcp.json`
|
|
232
|
+
|
|
233
|
+
2. Add the wikigen server configuration:
|
|
234
|
+
|
|
235
|
+
```json
|
|
236
|
+
{
|
|
237
|
+
"mcpServers": {
|
|
238
|
+
"wikigen": {
|
|
239
|
+
"command": "wikigen",
|
|
240
|
+
"args": ["mcp"]
|
|
241
|
+
}
|
|
242
|
+
}
|
|
243
|
+
}
|
|
244
|
+
```
|
|
245
|
+
|
|
246
|
+
3. Restart Cursor to load the MCP server.
|
|
247
|
+
|
|
248
|
+
4. The AI assistant in Cursor can now access your documentation using tools like:
|
|
249
|
+
- "What documentation do we have?"
|
|
250
|
+
- "Get me the documentation for 'WIKIGEN project"
|
|
251
|
+
- "Read the README documentation"
|
|
252
|
+
|
|
253
|
+
#### Claude Desktop
|
|
254
|
+
|
|
255
|
+
1. Open or create your Claude configuration file:
|
|
256
|
+
- **macOS**: `~/Library/Application Support/Claude/claude_desktop_config.json`
|
|
257
|
+
- **Windows**: `%APPDATA%\Claude\claude_desktop_config.json`
|
|
258
|
+
- **Linux**: `~/.config/Claude/claude_desktop_config.json`
|
|
259
|
+
|
|
260
|
+
2. Add the wikigen server configuration:
|
|
261
|
+
|
|
262
|
+
```json
|
|
263
|
+
{
|
|
264
|
+
"mcpServers": {
|
|
265
|
+
"wikigen": {
|
|
266
|
+
"command": "wikigen",
|
|
267
|
+
"args": ["mcp"]
|
|
268
|
+
}
|
|
269
|
+
}
|
|
270
|
+
}
|
|
271
|
+
```
|
|
272
|
+
|
|
273
|
+
3. Restart Claude Desktop to load the MCP server.
|
|
274
|
+
|
|
275
|
+
#### Troubleshooting
|
|
276
|
+
|
|
277
|
+
- **Command not found**: Make sure `wikigen` is in your PATH. You can verify by running `wikigen --version` in your terminal.
|
|
278
|
+
- **Server not starting**: Ensure you've run `wikigen init` and have generated at least one documentation project.
|
|
279
|
+
- **No docs found**: The MCP server discovers docs from your configured `output_dir`. Run `wikigen config show` to check your output directory.
|
|
280
|
+
|
|
281
|
+
### Testing the MCP Server
|
|
282
|
+
|
|
283
|
+
You can test the MCP server directly:
|
|
284
|
+
|
|
285
|
+
```bash
|
|
286
|
+
wikigen mcp
|
|
287
|
+
```
|
|
288
|
+
|
|
289
|
+
This will start the server in stdio mode (for MCP clients). To test locally, you can use the test scripts in the `tests/` directory.
|
|
290
|
+
|
|
291
|
+
## LLM Provider Support
|
|
292
|
+
|
|
293
|
+
WikiGen supports multiple LLM providers, allowing you to choose the best option for your needs:
|
|
294
|
+
|
|
295
|
+
### Supported Providers
|
|
296
|
+
|
|
297
|
+
1. **Google Gemini** (default)
|
|
298
|
+
- Recommended models: gemini-2.5-pro, gemini-2.5-flash, gemini-1.5-pro, gemini-1.5-flash
|
|
299
|
+
- API key required: Yes (GEMINI_API_KEY)
|
|
300
|
+
|
|
301
|
+
2. **OpenAI**
|
|
302
|
+
- Recommended models: gpt-4o-mini, gpt-4.1-mini, gpt-5-mini, gpt-5-nano
|
|
303
|
+
- API key required: Yes (OPENAI_API_KEY)
|
|
304
|
+
- Supports o1 models with reasoning capabilities
|
|
305
|
+
|
|
306
|
+
3. **Anthropic Claude**
|
|
307
|
+
- Recommended models: claude-3-5-sonnet, claude-3-5-haiku, claude-3-7-sonnet (with extended thinking), claude-3-opus
|
|
308
|
+
- API key required: Yes (ANTHROPIC_API_KEY)
|
|
309
|
+
|
|
310
|
+
4. **OpenRouter**
|
|
311
|
+
- Recommended models: google/gemini-2.5-flash:free, meta-llama/llama-3.1-8b-instruct:free, openai/gpt-4o-mini, anthropic/claude-3.5-sonnet
|
|
312
|
+
- API key required: Yes (OPENROUTER_API_KEY)
|
|
313
|
+
- Access multiple models through a single API
|
|
314
|
+
|
|
315
|
+
5. **Ollama (Local)**
|
|
316
|
+
- Recommended models: llama3.2, llama3.1, mistral, codellama, phi3
|
|
317
|
+
- API key required: No (runs locally)
|
|
318
|
+
- Default URL: http://localhost:11434
|
|
319
|
+
- Perfect for privacy-sensitive projects or offline usage
|
|
320
|
+
|
|
321
|
+
### Switching Providers
|
|
322
|
+
|
|
323
|
+
You can switch between providers at any time:
|
|
324
|
+
|
|
325
|
+
```bash
|
|
326
|
+
# Switch to OpenAI
|
|
327
|
+
wikigen config set llm-provider openai
|
|
328
|
+
wikigen config set llm-model gpt-4o-mini
|
|
329
|
+
wikigen config update-api-key openai
|
|
330
|
+
|
|
331
|
+
# Switch to Ollama (local)
|
|
332
|
+
wikigen config set llm-provider ollama
|
|
333
|
+
wikigen config set llm-model llama3.2
|
|
334
|
+
# No API key needed for Ollama!
|
|
335
|
+
```
|
|
336
|
+
|
|
337
|
+
## CLI Options
|
|
338
|
+
|
|
339
|
+
### Required
|
|
340
|
+
- `run` - GitHub repo URL, current open directory or local directory path
|
|
341
|
+
- `--repo` or `--dir` - GitHub repo URL or local directory path (depricated)
|
|
342
|
+
|
|
343
|
+
### Optional
|
|
344
|
+
- `-n, --name` - Project name (derived from repo/directory if omitted)
|
|
345
|
+
- `-t, --token` - GitHub personal access token
|
|
346
|
+
- `-o, --output` - Output directory (overrides config default)
|
|
347
|
+
- `-i, --include` - File patterns to include (e.g., "*.py", "*.js")
|
|
348
|
+
- `-e, --exclude` - File patterns to exclude (e.g., "tests/*", "docs/*")
|
|
349
|
+
- `-s, --max-size` - Maximum file size in bytes (default: 100KB)
|
|
350
|
+
- `--language` - Language for generated docs (default: "english")
|
|
351
|
+
- `--no-cache` - Disable LLM response caching
|
|
352
|
+
- `--max-abstractions` - Maximum number of abstractions to identify (default: 10)
|
|
@@ -0,0 +1,35 @@
|
|
|
1
|
+
wikigen/__init__.py,sha256=h8CRJ7LIaIjC5bVAp2y5IGlJ9tiox8bqkdDlAtPj6cU,102
|
|
2
|
+
wikigen/cli.py,sha256=n5FhlKhsfgUAkAZ6y_4cjvu2fGkiU5Z8Bb_1Xz2-IQc,23418
|
|
3
|
+
wikigen/config.py,sha256=bKFAgNmdfVJ9Ahxqe1WTknihOd4du78VCc-jmlXGQ_A,16942
|
|
4
|
+
wikigen/defaults.py,sha256=WJK7Mk9ixL48nNMc3LL0xcqH6fqiL9hACwEIWIa0B4k,1725
|
|
5
|
+
wikigen/flows/__init__.py,sha256=T3ZqdOPalymm0MoLupgHiviyjPDTtTdPa2KZjPxPnTU,32
|
|
6
|
+
wikigen/flows/flow.py,sha256=MmcFy7P1oR-zqYLMzfi7NVj-HCMr6nJQqAsxXq29_28,1211
|
|
7
|
+
wikigen/formatter/help_formatter.py,sha256=oI8OvgVcq4Nnn8UjmAWIUiBp4T-Y0Mwn5YXxrRIT8GA,6924
|
|
8
|
+
wikigen/formatter/init_formatter.py,sha256=a5FDq9o1mDxj5Jrk8Gf8g1H03MISN2Jj2m3dOVVPABk,1956
|
|
9
|
+
wikigen/formatter/output_formatter.py,sha256=sFtCIbFh5CONgJcqydlca4C0sMEwtjyR9mkWSXVrj2I,8510
|
|
10
|
+
wikigen/mcp/__init__.py,sha256=3Y3r4luzIYnrA80jL02N7icSYdvw0o6oGzI0omiaw-4,330
|
|
11
|
+
wikigen/mcp/chunking.py,sha256=dFWMGgUW4B12Ks6XilbF6AP0SfAsut_jX6c8SaobKCY,4705
|
|
12
|
+
wikigen/mcp/embeddings.py,sha256=58eqEJf4TaRosgfeTpRUqXMeIZtF0ynLBePd69T6zyE,1955
|
|
13
|
+
wikigen/mcp/output_resources.py,sha256=nveebupxRETAb9mc7qLW7HrKmtd7f7PrK0Inn7xosUE,2291
|
|
14
|
+
wikigen/mcp/search_index.py,sha256=vtIdL7OmGXAArMXMQMdFMOQHnU3joHhJp6e_Trbsk0c,33189
|
|
15
|
+
wikigen/mcp/server.py,sha256=ykuW0ol8l7AxgI-Zjxy2Ocn4FQpNA4FDUt145R_CjyM,7703
|
|
16
|
+
wikigen/mcp/vector_index.py,sha256=QPu-fdmgZBRjyPTVusSrexN8Gh8r6MJKDq4tOzHDRGs,10317
|
|
17
|
+
wikigen/metadata/__init__.py,sha256=uywd-fTYEOmlbSfteQFWrM4ARj2deAo2yFp6Q4CPsmM,647
|
|
18
|
+
wikigen/metadata/logo.py,sha256=O19BViYxVG8QW8kjDepo32E4bewFxCM0SSJHcGUYn_U,1624
|
|
19
|
+
wikigen/metadata/project.py,sha256=cCjjwYjLLTW6Yw9kjL9XxtIzegDwxowEc9YI4Ru6q3E,659
|
|
20
|
+
wikigen/metadata/version.py,sha256=TZOE3bP_yC0H-TAi9lOEwciW2HXKK1uWx8yk6RzrqYU,317
|
|
21
|
+
wikigen/nodes/__init__.py,sha256=-WHUQ9mrm2VUvwaMpMHCgDNQlW2a6GOY5HzpDmVJWUY,32
|
|
22
|
+
wikigen/nodes/nodes.py,sha256=_6FQ1oX9FR3TIcFcSawpyJXt9kNwPWOnJDH0owJRErk,48414
|
|
23
|
+
wikigen/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
24
|
+
wikigen/utils/adjust_headings.py,sha256=T5Dszp5VA0xT3QYkk4A5WbBFXF_mhakG1bR0FxmDHyQ,2142
|
|
25
|
+
wikigen/utils/call_llm.py,sha256=Wa2itlARi1Caz70rdxx3P0UpBTc5_SHBOhaytDQHAdw,8913
|
|
26
|
+
wikigen/utils/crawl_github_files.py,sha256=VhIaW8qxTk3_jT8bRSBNvlp2RzmeNFOuEdg6SPQ7FKM,18945
|
|
27
|
+
wikigen/utils/crawl_local_files.py,sha256=-JGxERtHqNdtoE92CdAtqZRTzHYSk_XcQhfym2qhzik,4943
|
|
28
|
+
wikigen/utils/llm_providers.py,sha256=skEes58Lq0RCGbCH7N_fJPXiUNpwJYOZ9k9WD8b6QnA,2973
|
|
29
|
+
wikigen/utils/version_check.py,sha256=ayyvtaNBDExrK0_YtQdb1SiQSFQYXcA8IAiT5Ow1Xlg,2624
|
|
30
|
+
wikigen-1.0.0.dist-info/licenses/LICENSE,sha256=yJGGto_mc9GKIARlP2lF_j6bQt0P_lLcw3dvbF0U_Dk,1070
|
|
31
|
+
wikigen-1.0.0.dist-info/METADATA,sha256=ce9BAD0eWCsAkIy04m1PraQjQLi6F3T06ZBGDzeK3Oo,11061
|
|
32
|
+
wikigen-1.0.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
33
|
+
wikigen-1.0.0.dist-info/entry_points.txt,sha256=BMp4yG5oOC3zeYIRz_wF-L3NB-QbllziBsTubWuJhIQ,45
|
|
34
|
+
wikigen-1.0.0.dist-info/top_level.txt,sha256=ytrmD6PyCdXr0GwyqRGsAywy91gOaDhsAEcCVgq9sng,8
|
|
35
|
+
wikigen-1.0.0.dist-info/RECORD,,
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
MIT License
|
|
2
|
+
|
|
3
|
+
Copyright (c) 2025 Mithun Ramesh
|
|
4
|
+
|
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
7
|
+
in the Software without restriction, including without limitation the rights
|
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
10
|
+
furnished to do so, subject to the following conditions:
|
|
11
|
+
|
|
12
|
+
The above copyright notice and this permission notice shall be included in all
|
|
13
|
+
copies or substantial portions of the Software.
|
|
14
|
+
|
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
21
|
+
SOFTWARE.
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
wikigen
|