thonny-codemate 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- thonny_codemate-0.1.0.dist-info/METADATA +307 -0
- thonny_codemate-0.1.0.dist-info/RECORD +27 -0
- thonny_codemate-0.1.0.dist-info/WHEEL +5 -0
- thonny_codemate-0.1.0.dist-info/licenses/LICENSE +21 -0
- thonny_codemate-0.1.0.dist-info/top_level.txt +1 -0
- thonnycontrib/__init__.py +1 -0
- thonnycontrib/thonny_codemate/__init__.py +397 -0
- thonnycontrib/thonny_codemate/api.py +154 -0
- thonnycontrib/thonny_codemate/context_manager.py +296 -0
- thonnycontrib/thonny_codemate/external_providers.py +714 -0
- thonnycontrib/thonny_codemate/i18n.py +506 -0
- thonnycontrib/thonny_codemate/llm_client.py +841 -0
- thonnycontrib/thonny_codemate/message_virtualization.py +136 -0
- thonnycontrib/thonny_codemate/model_manager.py +515 -0
- thonnycontrib/thonny_codemate/performance_monitor.py +141 -0
- thonnycontrib/thonny_codemate/prompts.py +102 -0
- thonnycontrib/thonny_codemate/ui/__init__.py +1 -0
- thonnycontrib/thonny_codemate/ui/chat_view.py +687 -0
- thonnycontrib/thonny_codemate/ui/chat_view_html.py +1299 -0
- thonnycontrib/thonny_codemate/ui/custom_prompt_dialog.py +175 -0
- thonnycontrib/thonny_codemate/ui/markdown_renderer.py +484 -0
- thonnycontrib/thonny_codemate/ui/model_download_dialog.py +355 -0
- thonnycontrib/thonny_codemate/ui/settings_dialog.py +1218 -0
- thonnycontrib/thonny_codemate/utils/__init__.py +25 -0
- thonnycontrib/thonny_codemate/utils/constants.py +138 -0
- thonnycontrib/thonny_codemate/utils/error_messages.py +92 -0
- thonnycontrib/thonny_codemate/utils/unified_error_handler.py +310 -0
@@ -0,0 +1,307 @@
|
|
1
|
+
Metadata-Version: 2.4
|
2
|
+
Name: thonny-codemate
|
3
|
+
Version: 0.1.0
|
4
|
+
Summary: A Thonny IDE plugin that provides AI-powered coding assistance using local and cloud LLMs
|
5
|
+
Author-email: tokoroten <shinta.nakayama@gmail.com>
|
6
|
+
License: MIT
|
7
|
+
Project-URL: Homepage, https://github.com/tokoroten/thonny-codemate
|
8
|
+
Project-URL: Repository, https://github.com/tokoroten/thonny-codemate
|
9
|
+
Project-URL: Issues, https://github.com/tokoroten/thonny-codemate/issues
|
10
|
+
Keywords: thonny,plugin,llm,ai,code-assistant,education
|
11
|
+
Classifier: Development Status :: 3 - Alpha
|
12
|
+
Classifier: Intended Audience :: Education
|
13
|
+
Classifier: Intended Audience :: Developers
|
14
|
+
Classifier: License :: OSI Approved :: MIT License
|
15
|
+
Classifier: Programming Language :: Python :: 3
|
16
|
+
Classifier: Programming Language :: Python :: 3.10
|
17
|
+
Classifier: Programming Language :: Python :: 3.11
|
18
|
+
Classifier: Programming Language :: Python :: 3.12
|
19
|
+
Classifier: Topic :: Education
|
20
|
+
Classifier: Topic :: Software Development :: Code Generators
|
21
|
+
Requires-Python: >=3.10
|
22
|
+
Description-Content-Type: text/markdown
|
23
|
+
License-File: LICENSE
|
24
|
+
Requires-Dist: thonny>=4.0.0
|
25
|
+
Requires-Dist: huggingface-hub[hf_xet]>=0.16.0
|
26
|
+
Requires-Dist: tkinterweb[javascript]>=3.24
|
27
|
+
Requires-Dist: pythonmonkey>=0.2.0
|
28
|
+
Requires-Dist: markdown>=3.5
|
29
|
+
Requires-Dist: pygments>=2.17
|
30
|
+
Requires-Dist: llama-cpp-python>=0.3.9
|
31
|
+
Provides-Extra: cuda
|
32
|
+
Provides-Extra: metal
|
33
|
+
Provides-Extra: external-only
|
34
|
+
Provides-Extra: dev
|
35
|
+
Requires-Dist: pytest>=7.0; extra == "dev"
|
36
|
+
Requires-Dist: pytest-cov>=4.0; extra == "dev"
|
37
|
+
Requires-Dist: pytest-mock>=3.10.0; extra == "dev"
|
38
|
+
Requires-Dist: debugpy>=1.6; extra == "dev"
|
39
|
+
Requires-Dist: black>=23.0; extra == "dev"
|
40
|
+
Requires-Dist: ruff>=0.1.0; extra == "dev"
|
41
|
+
Requires-Dist: llama-cpp-python>=0.3.9; extra == "dev"
|
42
|
+
Provides-Extra: test
|
43
|
+
Requires-Dist: pytest>=7.0; extra == "test"
|
44
|
+
Requires-Dist: pytest-cov>=4.0; extra == "test"
|
45
|
+
Requires-Dist: pytest-mock>=3.10.0; extra == "test"
|
46
|
+
Dynamic: license-file
|
47
|
+
|
48
|
+
# Thonny Local LLM Plugin
|
49
|
+
|
50
|
+
A Thonny IDE plugin that integrates local LLM capabilities using llama-cpp-python to provide GitHub Copilot-like features without requiring external API services.
|
51
|
+
|
52
|
+

|
53
|
+
|
54
|
+
## Features
|
55
|
+
|
56
|
+
- 🤖 **Local LLM Integration**: Uses llama-cpp-python to load GGUF models directly (no Ollama server required)
|
57
|
+
- 🚀 **On-Demand Model Loading**: Models are loaded on first use (not at startup) to avoid slow startup times
|
58
|
+
- 📝 **Code Generation**: Generate code based on natural language instructions
|
59
|
+
- 💡 **Code Explanation**: Select code and get AI-powered explanations via context menu
|
60
|
+
- 🎯 **Context-Aware**: Understands multiple files and project context
|
61
|
+
- 💬 **Conversation Memory**: Maintains conversation history for contextual responses
|
62
|
+
- 🎚️ **Skill Level Adaptation**: Adjusts responses based on user's programming skill level
|
63
|
+
- 🔌 **External API Support**: Optional support for ChatGPT, Ollama server, and OpenRouter as alternatives
|
64
|
+
- 📥 **Model Download Manager**: Built-in download manager for recommended models
|
65
|
+
- 🎨 **Customizable System Prompts**: Tailor AI behavior with custom system prompts
|
66
|
+
- 📋 **Interactive Code Blocks**: Copy and insert code blocks directly from chat
|
67
|
+
- 🎨 **Markdown Rendering**: Optional rich text formatting with tkinterweb
|
68
|
+
- 💾 **USB Portable**: Can be bundled with Thonny and models for portable use
|
69
|
+
- 🛡️ **Error Resilience**: Advanced error handling with automatic retry and user-friendly messages
|
70
|
+
- ⚡ **Performance Optimized**: Message virtualization and caching for handling large conversations
|
71
|
+
- 🔧 **Smart Provider Detection**: Automatically detects Ollama vs LM Studio based on API responses
|
72
|
+
- 🌐 **Multi-language Support**: Japanese, Chinese (Simplified/Traditional), and English UI
|
73
|
+
|
74
|
+
## Installation
|
75
|
+
|
76
|
+
### From PyPI
|
77
|
+
```bash
|
78
|
+
# Standard installation (includes llama-cpp-python for CPU)
|
79
|
+
pip install thonny-codemate
|
80
|
+
```
|
81
|
+
|
82
|
+
**For GPU support**, see [INSTALL_GPU.md](INSTALL_GPU.md) for detailed instructions:
|
83
|
+
- NVIDIA GPUs (CUDA)
|
84
|
+
- Apple Silicon (Metal)
|
85
|
+
- Automatic GPU detection
|
86
|
+
|
87
|
+
### Development Installation
|
88
|
+
|
89
|
+
#### Quick Setup with uv (Recommended)
|
90
|
+
```bash
|
91
|
+
# Clone the repository
|
92
|
+
git clone https://github.com/tokoroten/thonny-codemate.git
|
93
|
+
cd thonny-codemate
|
94
|
+
|
95
|
+
# Install uv if not already installed
|
96
|
+
# Windows (PowerShell):
|
97
|
+
powershell -c "irm https://astral.sh/uv/install.ps1 | iex"
|
98
|
+
# Linux/macOS:
|
99
|
+
curl -LsSf https://astral.sh/uv/install.sh | sh
|
100
|
+
|
101
|
+
# Install all dependencies (including llama-cpp-python)
|
102
|
+
uv sync --all-extras
|
103
|
+
|
104
|
+
# Or install with development dependencies only
|
105
|
+
uv sync --extra dev
|
106
|
+
|
107
|
+
# (Optional) Install Markdown rendering support
|
108
|
+
# Basic Markdown rendering:
|
109
|
+
uv sync --extra markdown
|
110
|
+
# Full JavaScript support for interactive features:
|
111
|
+
uv sync --extra markdown-full
|
112
|
+
|
113
|
+
# Activate virtual environment
|
114
|
+
.venv\Scripts\activate # Windows
|
115
|
+
source .venv/bin/activate # macOS/Linux
|
116
|
+
```
|
117
|
+
|
118
|
+
#### Alternative Setup Script
|
119
|
+
```bash
|
120
|
+
# Use the setup script for guided installation
|
121
|
+
python setup_dev.py
|
122
|
+
```
|
123
|
+
|
124
|
+
### Installing with GPU Support
|
125
|
+
|
126
|
+
By default, llama-cpp-python is installed with CPU support. For GPU acceleration:
|
127
|
+
|
128
|
+
**CUDA support**:
|
129
|
+
```bash
|
130
|
+
# Reinstall llama-cpp-python with CUDA support
|
131
|
+
uv pip uninstall llama-cpp-python
|
132
|
+
uv pip install llama-cpp-python --extra-index-url https://abetlen.github.io/llama-cpp-python/whl/cu124
|
133
|
+
```
|
134
|
+
|
135
|
+
**Metal support (macOS)**:
|
136
|
+
```bash
|
137
|
+
# Rebuild with Metal support
|
138
|
+
uv pip uninstall llama-cpp-python
|
139
|
+
CMAKE_ARGS="-DLLAMA_METAL=on" uv pip install llama-cpp-python --no-cache-dir
|
140
|
+
```
|
141
|
+
|
142
|
+
## Model Setup
|
143
|
+
|
144
|
+
### Download GGUF Models
|
145
|
+
|
146
|
+
Recommended models:
|
147
|
+
- **Qwen2.5-Coder-14B** - Latest high-performance model specialized for programming (8.8GB)
|
148
|
+
- **Llama-3.2-1B/3B** - Lightweight and fast models (0.8GB/2.0GB)
|
149
|
+
- **Llama-3-ELYZA-JP-8B** - Japanese-specialized model (4.9GB)
|
150
|
+
|
151
|
+
```bash
|
152
|
+
# Install Hugging Face CLI
|
153
|
+
pip install -U "huggingface_hub[cli]"
|
154
|
+
|
155
|
+
# Qwen2.5 Coder (programming-focused, recommended)
|
156
|
+
huggingface-cli download bartowski/Qwen2.5-Coder-14B-Instruct-GGUF Qwen2.5-Coder-14B-Instruct-Q4_K_M.gguf --local-dir ./models
|
157
|
+
|
158
|
+
# Llama 3.2 1B (lightweight)
|
159
|
+
huggingface-cli download bartowski/Llama-3.2-1B-Instruct-GGUF Llama-3.2-1B-Instruct-Q4_K_M.gguf --local-dir ./models
|
160
|
+
```
|
161
|
+
|
162
|
+
## Usage
|
163
|
+
|
164
|
+
1. **Start Thonny** - The plugin will load automatically
|
165
|
+
2. **Model Setup**:
|
166
|
+
- Open Settings → LLM Assistant Settings
|
167
|
+
- Choose between local models or external APIs
|
168
|
+
- For local models: Select a GGUF file or download recommended models
|
169
|
+
- For external APIs: Enter your API key and model name
|
170
|
+
3. **Code Explanation**:
|
171
|
+
- Select code in the editor
|
172
|
+
- Right-click and choose "Explain Selection"
|
173
|
+
- The AI will explain the code based on your skill level
|
174
|
+
4. **Code Generation**:
|
175
|
+
- Write a comment describing what you want
|
176
|
+
- Right-click and choose "Generate from Comment"
|
177
|
+
- Or use the AI Assistant panel for interactive chat
|
178
|
+
5. **Error Fixing**:
|
179
|
+
- When you encounter an error, click "Explain Error" in the assistant panel
|
180
|
+
- The AI will analyze the error and suggest fixes
|
181
|
+
|
182
|
+
### External API Configuration
|
183
|
+
|
184
|
+
#### ChatGPT
|
185
|
+
1. Get an API key from [OpenAI](https://platform.openai.com/)
|
186
|
+
2. In settings, select "chatgpt" as provider
|
187
|
+
3. Enter your API key
|
188
|
+
4. Choose model (e.g., gpt-3.5-turbo, gpt-4)
|
189
|
+
|
190
|
+
#### Ollama
|
191
|
+
1. Install and run [Ollama](https://ollama.ai/)
|
192
|
+
2. In settings, select "ollama" as provider
|
193
|
+
3. Set base URL (default: http://localhost:11434)
|
194
|
+
4. Choose installed model (e.g., llama3, mistral)
|
195
|
+
|
196
|
+
#### OpenRouter
|
197
|
+
1. Get an API key from [OpenRouter](https://openrouter.ai/)
|
198
|
+
2. In settings, select "openrouter" as provider
|
199
|
+
3. Enter your API key
|
200
|
+
4. Choose model (free models available)
|
201
|
+
|
202
|
+
## Development
|
203
|
+
|
204
|
+
### Project Structure
|
205
|
+
```
|
206
|
+
thonny-codemate/
|
207
|
+
├── thonnycontrib/
|
208
|
+
│ └── thonny_codemate/
|
209
|
+
│ ├── __init__.py # Plugin entry point
|
210
|
+
│ ├── llm_client.py # LLM integration
|
211
|
+
│ ├── ui_widgets.py # UI components
|
212
|
+
│ └── config.py # Configuration
|
213
|
+
├── models/ # GGUF model storage
|
214
|
+
├── tests/ # Unit tests
|
215
|
+
├── docs_for_ai/ # AI documentation
|
216
|
+
└── README.md
|
217
|
+
```
|
218
|
+
|
219
|
+
### Running in Development Mode
|
220
|
+
```bash
|
221
|
+
# Normal mode
|
222
|
+
python run_dev.py
|
223
|
+
|
224
|
+
# Debug mode (for VS Code/PyCharm attachment)
|
225
|
+
python run_dev.py --debug
|
226
|
+
|
227
|
+
# Quick run with uv
|
228
|
+
uv run thonny
|
229
|
+
```
|
230
|
+
|
231
|
+
### Running Tests
|
232
|
+
```bash
|
233
|
+
uv run pytest -v
|
234
|
+
```
|
235
|
+
|
236
|
+
## Configuration
|
237
|
+
|
238
|
+
The plugin stores its configuration in Thonny's settings system. You can configure:
|
239
|
+
|
240
|
+
- **Provider Selection**: Local models or external APIs (ChatGPT, Ollama, OpenRouter)
|
241
|
+
- **Model Settings**: Model path, context size, generation parameters
|
242
|
+
- **User Preferences**: Skill level (beginner/intermediate/advanced)
|
243
|
+
- **System Prompts**: Choose between coding-focused, explanation-focused, or custom prompts
|
244
|
+
- **Generation Parameters**: Temperature, max tokens, etc.
|
245
|
+
|
246
|
+
## Requirements
|
247
|
+
|
248
|
+
- Python 3.8+
|
249
|
+
- Thonny 4.0+
|
250
|
+
- llama-cpp-python (automatically installed)
|
251
|
+
- 4GB+ RAM (depending on model size)
|
252
|
+
- 5-10GB disk space for models
|
253
|
+
- uv (for development)
|
254
|
+
- tkinterweb with JavaScript support (for Markdown rendering and interactive features)
|
255
|
+
- Automatically installed with the plugin
|
256
|
+
- Includes PythonMonkey for JavaScript-Python communication
|
257
|
+
- Enables Copy/Insert buttons with direct Python integration
|
258
|
+
|
259
|
+
## Contributing
|
260
|
+
|
261
|
+
Contributions are welcome! Please feel free to submit a Pull Request.
|
262
|
+
|
263
|
+
1. Fork the repository
|
264
|
+
2. Create your feature branch (`git checkout -b feature/AmazingFeature`)
|
265
|
+
3. Commit your changes (`git commit -m 'Add some AmazingFeature'`)
|
266
|
+
4. Push to the branch (`git push origin feature/AmazingFeature`)
|
267
|
+
5. Open a Pull Request
|
268
|
+
|
269
|
+
## License
|
270
|
+
|
271
|
+
This project is licensed under the MIT License - see the LICENSE file for details.
|
272
|
+
|
273
|
+
## Acknowledgments
|
274
|
+
|
275
|
+
- Inspired by GitHub Copilot's functionality
|
276
|
+
- Built on top of [llama-cpp-python](https://github.com/abetlen/llama-cpp-python)
|
277
|
+
- Designed for [Thonny IDE](https://thonny.org/)
|
278
|
+
- **99% of the code in this project was generated by [Claude Code](https://claude.ai/code)** - This project demonstrates the capabilities of AI-assisted development
|
279
|
+
|
280
|
+
## Status
|
281
|
+
|
282
|
+
🚧 **Under Development** - This plugin is currently in early development stage.
|
283
|
+
|
284
|
+
## Roadmap
|
285
|
+
|
286
|
+
- [x] Initial project setup
|
287
|
+
- [x] Development environment with uv
|
288
|
+
- [x] Basic plugin structure
|
289
|
+
- [x] LLM integration with llama-cpp-python
|
290
|
+
- [x] Chat panel UI (right side)
|
291
|
+
- [x] Context menu for code explanation
|
292
|
+
- [x] Code generation from comments
|
293
|
+
- [x] Error fixing assistance
|
294
|
+
- [x] Configuration UI
|
295
|
+
- [x] Multi-file context support
|
296
|
+
- [x] Model download manager
|
297
|
+
- [x] External API support (ChatGPT, Ollama, OpenRouter)
|
298
|
+
- [x] Customizable system prompts
|
299
|
+
- [ ] Inline code completion
|
300
|
+
- [ ] USB portable packaging
|
301
|
+
- [ ] PyPI release
|
302
|
+
|
303
|
+
## Links
|
304
|
+
|
305
|
+
- [Thonny IDE](https://thonny.org/)
|
306
|
+
- [llama-cpp-python](https://github.com/abetlen/llama-cpp-python)
|
307
|
+
- [Project Documentation](docs_for_ai/)
|
@@ -0,0 +1,27 @@
|
|
1
|
+
thonny_codemate-0.1.0.dist-info/licenses/LICENSE,sha256=f6YK7MRRtaWl2brMAtg_aGrsLK_i4rVyho0y1PdnFi0,1065
|
2
|
+
thonnycontrib/__init__.py,sha256=7INAjyU8ojDRVyo0y1OQh3ynMPRGqBJZL4fa--i3elk,33
|
3
|
+
thonnycontrib/thonny_codemate/__init__.py,sha256=M312z1qN0Pxtl6xUggSFQ0euX2kD8voBCoqFIlLLM6Q,13485
|
4
|
+
thonnycontrib/thonny_codemate/api.py,sha256=YLXjPsB6cykgRwyjzrUBOGFtw0eu4BQgr1AiMbj8e7c,4259
|
5
|
+
thonnycontrib/thonny_codemate/context_manager.py,sha256=K83M-VlBy7DR1xzedyPT8NViqnR9CmX95YmXnDBzRFM,11274
|
6
|
+
thonnycontrib/thonny_codemate/external_providers.py,sha256=PmIRY_lLOILCAZa4mpP3yB9PU7ZWSddWjYPf2S7vkyY,29016
|
7
|
+
thonnycontrib/thonny_codemate/i18n.py,sha256=tq1pBwNLYgQtJBXH36MmTOl1sRuaPaXgSyMaAXVIIPU,28846
|
8
|
+
thonnycontrib/thonny_codemate/llm_client.py,sha256=Ojk8oxNI7tKAzVEtaoSwOLVisT1cOYdXf9Qw56eC2L4,33767
|
9
|
+
thonnycontrib/thonny_codemate/message_virtualization.py,sha256=kShFsujEOm68_VOUGH47Qui4STBaxBjKu5r-lLqUdGE,4909
|
10
|
+
thonnycontrib/thonny_codemate/model_manager.py,sha256=oS0VNuS_NWggNPfoRmQk3z7GIyQlo0DmQ1Bqks8iApY,21268
|
11
|
+
thonnycontrib/thonny_codemate/performance_monitor.py,sha256=L6gIin6oBzplRgZB676AkYd3mAtkmc-Qg4K5iM59Q1E,4281
|
12
|
+
thonnycontrib/thonny_codemate/prompts.py,sha256=bV4Qjr8SjqWBnnUIk-g-R58sRGbyZOauvlWvNO9GkGg,3885
|
13
|
+
thonnycontrib/thonny_codemate/ui/__init__.py,sha256=SDKZ1vYdsb14_jHIP4wTeM8Vqkog8z0PkWquS3-mQIY,43
|
14
|
+
thonnycontrib/thonny_codemate/ui/chat_view.py,sha256=veyFSCJUOPVFUt1qJochKTvHQi49u3fzxKQ0Nti0NNc,29203
|
15
|
+
thonnycontrib/thonny_codemate/ui/chat_view_html.py,sha256=jgAeEc3dp2pcyfDfMg5RsTLROCHffNKFVlnMQNxFNrw,53561
|
16
|
+
thonnycontrib/thonny_codemate/ui/custom_prompt_dialog.py,sha256=LEemjMmHM_KcidEsUhOOffa1DzCauOXNiUqlH2wGjts,5973
|
17
|
+
thonnycontrib/thonny_codemate/ui/markdown_renderer.py,sha256=K1vHLTn6EocPRxb7fXMKGsSFD6iY-OhFTqMY_dBkiRw,15649
|
18
|
+
thonnycontrib/thonny_codemate/ui/model_download_dialog.py,sha256=1qzqCL_2J_cXCh9OvUdaPAyouBm76BgfyUlPRkNCDGw,13701
|
19
|
+
thonnycontrib/thonny_codemate/ui/settings_dialog.py,sha256=fFbTfhFN-_rkI48oW_5eqCT91OzT1ZnqxCOaCSWvjbU,51147
|
20
|
+
thonnycontrib/thonny_codemate/utils/__init__.py,sha256=xjA_6r7WFCf3RbCNnZhwFOSapJgcr-4lC3sTI417rDM,523
|
21
|
+
thonnycontrib/thonny_codemate/utils/constants.py,sha256=l6KKItPClPy7IWZKH53Kpq-PNMfJogTYIZ2OGKTZO8w,3159
|
22
|
+
thonnycontrib/thonny_codemate/utils/error_messages.py,sha256=kOucGfSwBIOE23GsVAIcQYY7YbKjezIuOxiIM1wTKY4,3601
|
23
|
+
thonnycontrib/thonny_codemate/utils/unified_error_handler.py,sha256=yvrSiXC3Kzro4KLL6lDLMBRfdff79PLvBF1yjHFhv9E,10769
|
24
|
+
thonny_codemate-0.1.0.dist-info/METADATA,sha256=ZKugHzfp8cMIdTNUXCnVjfRh25FESOit_I1LdW8R6iE,10870
|
25
|
+
thonny_codemate-0.1.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
26
|
+
thonny_codemate-0.1.0.dist-info/top_level.txt,sha256=thF9WQtY_lcNKOOR4SQeeWLAVWb4i8XhdGC7pEMAOHY,14
|
27
|
+
thonny_codemate-0.1.0.dist-info/RECORD,,
|
@@ -0,0 +1,21 @@
|
|
1
|
+
MIT License
|
2
|
+
|
3
|
+
Copyright (c) 2024 tokoroten
|
4
|
+
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
7
|
+
in the Software without restriction, including without limitation the rights
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
10
|
+
furnished to do so, subject to the following conditions:
|
11
|
+
|
12
|
+
The above copyright notice and this permission notice shall be included in all
|
13
|
+
copies or substantial portions of the Software.
|
14
|
+
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
21
|
+
SOFTWARE.
|
@@ -0,0 +1 @@
|
|
1
|
+
thonnycontrib
|
@@ -0,0 +1 @@
|
|
1
|
+
# thonnycontrib namespace package
|