rust-crate-pipeline 1.2.6__py3-none-any.whl → 1.5.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- rust_crate_pipeline/__init__.py +15 -6
- rust_crate_pipeline/ai_processing.py +260 -153
- rust_crate_pipeline/analysis.py +171 -160
- rust_crate_pipeline/config.py +23 -3
- rust_crate_pipeline/github_token_checker.py +30 -20
- rust_crate_pipeline/main.py +107 -45
- rust_crate_pipeline/network.py +109 -108
- rust_crate_pipeline/pipeline.py +269 -125
- rust_crate_pipeline/production_config.py +15 -9
- rust_crate_pipeline/utils/file_utils.py +14 -10
- rust_crate_pipeline/utils/logging_utils.py +25 -13
- rust_crate_pipeline/version.py +47 -2
- {rust_crate_pipeline-1.2.6.dist-info → rust_crate_pipeline-1.5.1.dist-info}/METADATA +94 -9
- rust_crate_pipeline-1.5.1.dist-info/RECORD +19 -0
- rust_crate_pipeline-1.2.6.dist-info/RECORD +0 -19
- {rust_crate_pipeline-1.2.6.dist-info → rust_crate_pipeline-1.5.1.dist-info}/WHEEL +0 -0
- {rust_crate_pipeline-1.2.6.dist-info → rust_crate_pipeline-1.5.1.dist-info}/entry_points.txt +0 -0
- {rust_crate_pipeline-1.2.6.dist-info → rust_crate_pipeline-1.5.1.dist-info}/licenses/LICENSE +0 -0
- {rust_crate_pipeline-1.2.6.dist-info → rust_crate_pipeline-1.5.1.dist-info}/top_level.txt +0 -0
@@ -1,17 +1,18 @@
|
|
1
1
|
# rust_crate_pipeline/utils/file_utils.py
|
2
|
-
import os
|
3
2
|
import json
|
3
|
+
import os
|
4
4
|
import shutil
|
5
5
|
from datetime import datetime
|
6
|
-
from typing import List, Dict
|
6
|
+
from typing import List, Dict
|
7
|
+
|
7
8
|
|
8
9
|
def create_output_dir(base_name: str = "crate_data") -> str:
|
9
10
|
"""
|
10
11
|
Create timestamped output directory
|
11
|
-
|
12
|
+
|
12
13
|
Args:
|
13
14
|
base_name: Base name for output directory
|
14
|
-
|
15
|
+
|
15
16
|
Returns:
|
16
17
|
Path to created directory
|
17
18
|
"""
|
@@ -20,38 +21,40 @@ def create_output_dir(base_name: str = "crate_data") -> str:
|
|
20
21
|
os.makedirs(output_dir, exist_ok=True)
|
21
22
|
return output_dir
|
22
23
|
|
24
|
+
|
23
25
|
def save_checkpoint(data: List[Dict], prefix: str, output_dir: str) -> str:
|
24
26
|
"""
|
25
27
|
Save processing checkpoint with status metadata
|
26
|
-
|
28
|
+
|
27
29
|
Args:
|
28
30
|
data: List of crate dictionaries
|
29
31
|
prefix: File name prefix
|
30
32
|
output_dir: Target directory
|
31
|
-
|
33
|
+
|
32
34
|
Returns:
|
33
35
|
Path to saved checkpoint file
|
34
36
|
"""
|
35
37
|
timestamp = datetime.now().isoformat()
|
36
38
|
filename = os.path.join(output_dir, f"{prefix}_{timestamp}.jsonl")
|
37
|
-
|
39
|
+
|
38
40
|
with open(filename, "w") as f:
|
39
41
|
for item in data:
|
40
42
|
f.write(json.dumps(item) + "\n")
|
41
|
-
|
43
|
+
|
42
44
|
# Save status metadata
|
43
45
|
status = {
|
44
46
|
"timestamp": timestamp,
|
45
47
|
"total_items": len(data),
|
46
48
|
"checkpoint_file": filename
|
47
49
|
}
|
48
|
-
|
50
|
+
|
49
51
|
status_file = os.path.join(output_dir, f"{prefix}_status_{timestamp}.json")
|
50
52
|
with open(status_file, "w") as f:
|
51
53
|
json.dump(status, f, indent=2)
|
52
|
-
|
54
|
+
|
53
55
|
return filename
|
54
56
|
|
57
|
+
|
55
58
|
def safe_file_cleanup(path: str):
|
56
59
|
"""Safely remove files or directories"""
|
57
60
|
try:
|
@@ -62,6 +65,7 @@ def safe_file_cleanup(path: str):
|
|
62
65
|
except Exception as e:
|
63
66
|
print(f"Failed to cleanup {path}: {str(e)}")
|
64
67
|
|
68
|
+
|
65
69
|
def disk_space_check(min_free_gb: float = 1.0) -> bool:
|
66
70
|
"""Check if sufficient disk space is available"""
|
67
71
|
try:
|
@@ -6,38 +6,45 @@ import logging
|
|
6
6
|
from functools import wraps
|
7
7
|
from typing import Optional
|
8
8
|
|
9
|
-
|
9
|
+
|
10
|
+
def configure_logging(log_dir: Optional[str] = None,
|
11
|
+
log_level: int = logging.INFO) -> logging.Logger:
|
10
12
|
"""
|
11
13
|
Configure global logging with file and console handlers
|
12
|
-
|
14
|
+
|
13
15
|
Args:
|
14
16
|
log_dir: Directory for log files (defaults to current directory)
|
15
17
|
log_level: Logging level (default: INFO)
|
16
|
-
|
18
|
+
|
17
19
|
Returns:
|
18
20
|
Root logger instance
|
19
21
|
"""
|
20
22
|
logger = logging.getLogger()
|
21
23
|
logger.setLevel(log_level)
|
22
|
-
|
24
|
+
|
23
25
|
# Console handler
|
24
26
|
console_handler = logging.StreamHandler()
|
25
27
|
console_handler.setLevel(log_level)
|
26
|
-
console_format = logging.Formatter(
|
28
|
+
console_format = logging.Formatter(
|
29
|
+
"%(asctime)s [%(levelname)s] %(message)s")
|
27
30
|
console_handler.setFormatter(console_format)
|
28
31
|
logger.addHandler(console_handler)
|
29
|
-
|
32
|
+
|
30
33
|
# File handler
|
31
34
|
if log_dir:
|
32
|
-
log_file = os.path.join(
|
35
|
+
log_file = os.path.join(
|
36
|
+
log_dir, f"pipeline_{
|
37
|
+
time.strftime('%Y%m%d-%H%M%S')}.log")
|
33
38
|
file_handler = logging.FileHandler(log_file)
|
34
39
|
file_handler.setLevel(log_level)
|
35
|
-
file_format = logging.Formatter(
|
40
|
+
file_format = logging.Formatter(
|
41
|
+
"%(asctime)s [%(levelname)s] %(name)s: %(message)s")
|
36
42
|
file_handler.setFormatter(file_format)
|
37
43
|
logger.addHandler(file_handler)
|
38
|
-
|
44
|
+
|
39
45
|
return logger
|
40
46
|
|
47
|
+
|
41
48
|
def log_execution_time(func):
|
42
49
|
"""Decorator to log function execution time"""
|
43
50
|
@wraps(func)
|
@@ -45,18 +52,23 @@ def log_execution_time(func):
|
|
45
52
|
start_time = time.time()
|
46
53
|
result = func(*args, **kwargs)
|
47
54
|
end_time = time.time()
|
48
|
-
logging.info(
|
55
|
+
logging.info(
|
56
|
+
f"{func.__name__} executed in {end_time - start_time:.2f} seconds")
|
49
57
|
return result
|
50
58
|
return wrapper
|
51
59
|
|
60
|
+
|
52
61
|
def log_resource_usage():
|
53
62
|
"""Log current resource utilization (CPU, memory, disk)"""
|
54
63
|
cpu_percent = psutil.cpu_percent()
|
55
64
|
mem = psutil.virtual_memory()
|
56
65
|
disk = psutil.disk_usage('.')
|
57
|
-
|
58
|
-
logging.info(
|
59
|
-
|
66
|
+
|
67
|
+
logging.info(
|
68
|
+
f"Resource Usage - CPU: {cpu_percent}%, Memory: {
|
69
|
+
mem.percent}%, Disk: {
|
70
|
+
disk.percent}%")
|
71
|
+
|
60
72
|
return {
|
61
73
|
"cpu_percent": cpu_percent,
|
62
74
|
"memory_percent": mem.percent,
|
rust_crate_pipeline/version.py
CHANGED
@@ -1,9 +1,54 @@
|
|
1
|
-
"""Version
|
1
|
+
"""Version inf - New CLI options: --enable-crawl4ai, --disable-crawl4ai, --crawl4ai-model
|
2
|
+
- Enhanced configuration with local GGUF model paths and crawl4ai_timeoutmation for rust-crate-pipeline."""
|
2
3
|
|
3
|
-
__version__ = "1.
|
4
|
+
__version__ = "1.5.1"
|
4
5
|
__version_info__ = tuple(int(x) for x in __version__.split("."))
|
5
6
|
|
6
7
|
# Version history
|
8
|
+
# 1.5.1 - Configuration Standardization Release: Model Path Consistency
|
9
|
+
# - Standardized all configuration to use GGUF model paths
|
10
|
+
# - Updated CLI defaults for --crawl4ai-model to ~/models/deepseek/deepseek-coder-6.7b-instruct.Q4_K_M.gguf
|
11
|
+
# - Enhanced Rule Zero alignment with transparent configuration practices
|
12
|
+
# - Updated all test files to use consistent GGUF model path references
|
13
|
+
# - Comprehensive documentation updates for proper model configuration
|
14
|
+
# - Removed inconsistent Ollama references in favor of llama-cpp-python
|
15
|
+
# - Ensured CLI help text and JSON examples reflect correct model paths
|
16
|
+
# 1.5.0 - Major Release: Enhanced Web Scraping with Crawl4AI Integration
|
17
|
+
# - Integrated Crawl4AI for advanced web scraping capabilities
|
18
|
+
# - Added JavaScript-rendered content extraction via Playwright
|
19
|
+
# - Enhanced README parsing with LLM-powered content analysis
|
20
|
+
# - Implemented structured data extraction from docs.rs
|
21
|
+
# - Added quality scoring for scraped content
|
22
|
+
# - Graceful fallback to basic scraping when Crawl4AI unavailable
|
23
|
+
# - Full async processing for improved performance
|
24
|
+
# - New CLI options: --enable-crawl4ai, --disable-crawl4ai, --crawl4ai-model
|
25
|
+
# - Enhanced configuration with crawl4ai_model and crawl4ai_timeout
|
26
|
+
# - Comprehensive test coverage for all Crawl4AI features
|
27
|
+
# - Rule Zero compliant with full transparency and audit trails
|
28
|
+
# 1.4.0 - Major Release: Rule Zero Compliance Audit Complete
|
29
|
+
# - Completed comprehensive Rule Zero alignment audit
|
30
|
+
# - Eliminated all code redundancy and dead code
|
31
|
+
# - Achieved 100% test coverage (22/22 tests passing)
|
32
|
+
# - Refactored to pure asyncio architecture (thread-free)
|
33
|
+
# - Suppressed Pydantic deprecation warnings
|
34
|
+
# - Full production readiness with Docker support
|
35
|
+
# - Enhanced documentation with PyPI cross-references
|
36
|
+
# - Certified Rule Zero compliance across all four principles
|
37
|
+
# 1.3.1 - Bug Fix Release: Crawl4AI Integration Cleanup
|
38
|
+
# - Fixed CSS selector syntax errors in Crawl4AI integration
|
39
|
+
# - Cleaned up duplicate and obsolete test files
|
40
|
+
# - Resolved import conflicts between workspace and integration configs
|
41
|
+
# - Improved error handling in enhanced scraping module
|
42
|
+
# - Standardized on direct llama.cpp approach (removed Ollama dependencies)
|
43
|
+
# - Enhanced Rule Zero compliance with transparent cleanup process
|
44
|
+
# 1.3.0 - Quality & Integration Release: Comprehensive code quality improvements
|
45
|
+
# - Fixed all critical PEP 8 violations (F821, F811, E114)
|
46
|
+
# - Enhanced error handling with graceful dependency fallbacks
|
47
|
+
# - Improved module integration and import path resolution
|
48
|
+
# - Added comprehensive test validation (21/21 tests passing)
|
49
|
+
# - Enhanced async support and Unicode handling
|
50
|
+
# - Production-ready CLI interfaces with robust error handling
|
51
|
+
# - Full Rule Zero compliance validation
|
7
52
|
# 1.2.0 - Major release: Production-ready, cleaned codebase
|
8
53
|
# - Unified documentation into single comprehensive README
|
9
54
|
# - Removed all non-essential development and test files
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.4
|
2
2
|
Name: rust-crate-pipeline
|
3
|
-
Version: 1.
|
3
|
+
Version: 1.5.1
|
4
4
|
Summary: A comprehensive system for gathering, enriching, and analyzing metadata for Rust crates using AI-powered insights
|
5
5
|
Home-page: https://github.com/Superuser666-Sigil/SigilDERG-Data_Production
|
6
6
|
Author: SuperUser666-Sigil
|
@@ -51,21 +51,30 @@ Dynamic: requires-python
|
|
51
51
|
|
52
52
|
[](https://www.python.org/downloads/)
|
53
53
|
[](https://opensource.org/licenses/MIT)
|
54
|
-
[](https://pypi.org/project/rust-crate-pipeline/)
|
55
55
|
[](https://docker.com/)
|
56
|
+
[](https://github.com/Superuser666-Sigil/SigilDERG-Data_Production/blob/main/SYSTEM_AUDIT_REPORT.md)
|
56
57
|
|
57
|
-
A production-ready pipeline for comprehensive Rust crate analysis, featuring AI-powered insights
|
58
|
+
A production-ready, Rule Zero-compliant pipeline for comprehensive Rust crate analysis, featuring **AI-powered insights**, **enhanced web scraping with Crawl4AI**, dependency mapping, and automated data enrichment. Designed for researchers, developers, and data scientists studying the Rust ecosystem.
|
59
|
+
|
60
|
+
**🆕 New in v1.5.1**: Model path standardization, improved GGUF configuration consistency, and enhanced Rule Zero alignment.
|
61
|
+
|
62
|
+
📦 **Available on PyPI:** [rust-crate-pipeline](https://pypi.org/project/rust-crate-pipeline/)
|
58
63
|
|
59
64
|
## 🚀 Quick Start
|
60
65
|
|
61
66
|
### 1. Installation
|
62
67
|
|
63
68
|
#### From PyPI (Recommended)
|
69
|
+
|
64
70
|
```bash
|
65
71
|
pip install rust-crate-pipeline
|
66
72
|
```
|
67
73
|
|
74
|
+
For the latest version, visit: [rust-crate-pipeline on PyPI](https://pypi.org/project/rust-crate-pipeline/)
|
75
|
+
|
68
76
|
#### From Source
|
77
|
+
|
69
78
|
```bash
|
70
79
|
git clone https://github.com/Superuser666-Sigil/SigilDERG-Data_Production.git
|
71
80
|
cd SigilDERG-Data_Production
|
@@ -73,6 +82,7 @@ pip install -e .
|
|
73
82
|
```
|
74
83
|
|
75
84
|
#### Development Installation
|
85
|
+
|
76
86
|
```bash
|
77
87
|
git clone https://github.com/Superuser666-Sigil/SigilDERG-Data_Production.git
|
78
88
|
cd SigilDERG-Data_Production
|
@@ -118,6 +128,25 @@ python3 -m rust_crate_pipeline --skip-ai --limit 50
|
|
118
128
|
### 4. Advanced Usage
|
119
129
|
|
120
130
|
```bash
|
131
|
+
# Enhanced web scraping with Crawl4AI (default in v1.5.0)
|
132
|
+
python3 -m rust_crate_pipeline --enable-crawl4ai --limit 20
|
133
|
+
|
134
|
+
# Disable Crawl4AI for basic scraping only
|
135
|
+
python3 -m rust_crate_pipeline --disable-crawl4ai --limit 20
|
136
|
+
|
137
|
+
# Custom Crawl4AI model configuration
|
138
|
+
python3 -m rust_crate_pipeline \
|
139
|
+
--enable-crawl4ai \
|
140
|
+
--crawl4ai-model "~/models/deepseek/deepseek-coder-6.7b-instruct.Q4_K_M.gguf" \
|
141
|
+
--limit 10
|
142
|
+
|
143
|
+
# Sigil Protocol with enhanced scraping
|
144
|
+
python3 -m rust_crate_pipeline \
|
145
|
+
--enable-sigil-protocol \
|
146
|
+
--enable-crawl4ai \
|
147
|
+
--skip-ai \
|
148
|
+
--limit 5
|
149
|
+
|
121
150
|
# Custom configuration
|
122
151
|
python3 -m rust_crate_pipeline \
|
123
152
|
--limit 100 \
|
@@ -139,6 +168,17 @@ python3 -m rust_crate_pipeline \
|
|
139
168
|
|
140
169
|
## 🎯 Features
|
141
170
|
|
171
|
+
*Available in the latest version: [rust-crate-pipeline v1.5.1](https://pypi.org/project/rust-crate-pipeline/)*
|
172
|
+
|
173
|
+
### 🌐 Enhanced Web Scraping (New in v1.5.0)
|
174
|
+
|
175
|
+
- **Crawl4AI Integration**: Advanced web scraping with AI-powered content extraction
|
176
|
+
- **JavaScript Rendering**: Playwright-powered browser automation for dynamic content
|
177
|
+
- **Smart Content Analysis**: LLM-enhanced README and documentation parsing
|
178
|
+
- **Structured Data Extraction**: Intelligent parsing of docs.rs and technical documentation
|
179
|
+
- **Quality Scoring**: Automated content quality assessment and validation
|
180
|
+
- **Graceful Fallbacks**: Automatic degradation to basic scraping when needed
|
181
|
+
|
142
182
|
### 📊 Data Collection & Analysis
|
143
183
|
|
144
184
|
- **Multi-source metadata**: crates.io, GitHub, lib.rs integration
|
@@ -161,8 +201,35 @@ python3 -m rust_crate_pipeline \
|
|
161
201
|
- **Robust error handling**: Graceful degradation and comprehensive logging
|
162
202
|
- **Progress checkpointing**: Automatic saving for long-running processes
|
163
203
|
- **Docker ready**: Full container support with optimized configurations
|
204
|
+
- **Rule Zero Compliance**: Full transparency and audit trail support
|
205
|
+
|
206
|
+
## � Recent Updates
|
207
|
+
|
208
|
+
### Version 1.5.1 - Configuration Standardization (Latest)
|
209
|
+
- 🔧 **Model Path Consistency**: Standardized all configuration to use GGUF model paths (`~/models/deepseek/deepseek-coder-6.7b-instruct.Q4_K_M.gguf`)
|
210
|
+
- ⚖️ **Rule Zero Alignment**: Enhanced compliance with Rule Zero principles for transparency and validation
|
211
|
+
- 📝 **Documentation Updates**: Comprehensive updates to reflect proper model configuration practices
|
212
|
+
- 🧪 **Test Standardization**: Updated all test files to use consistent GGUF model paths
|
213
|
+
- 🚀 **CLI Consistency**: Ensured all CLI defaults and help text reflect correct model paths
|
214
|
+
|
215
|
+
### Version 1.5.0 - Enhanced Web Scraping
|
216
|
+
- 🚀 **Crawl4AI Integration**: Advanced web scraping with AI-powered content extraction
|
217
|
+
- 🌐 **JavaScript Rendering**: Playwright-powered browser automation for dynamic content
|
218
|
+
- 🧠 **LLM-Enhanced Parsing**: AI-powered README and documentation analysis
|
219
|
+
- 📊 **Structured Data Extraction**: Intelligent parsing of docs.rs and technical documentation
|
220
|
+
- ⚡ **Async Processing**: High-performance concurrent web scraping
|
221
|
+
- 🛡️ **Graceful Fallbacks**: Automatic degradation to basic scraping when needed
|
164
222
|
|
165
|
-
|
223
|
+
### Version 1.4.0 - Rule Zero Compliance
|
224
|
+
- 🏆 **Rule Zero Certification**: Complete alignment audit and compliance verification
|
225
|
+
- 🧪 **100% Test Coverage**: All 22 tests passing with comprehensive validation
|
226
|
+
- 🔄 **Thread-Free Architecture**: Pure asyncio implementation for better performance
|
227
|
+
- 📦 **PyPI Integration**: Official package availability with easy installation
|
228
|
+
- 🐳 **Docker Support**: Full containerization with production-ready configurations
|
229
|
+
|
230
|
+
*For complete version history, see [CHANGELOG.md](CHANGELOG.md)*
|
231
|
+
|
232
|
+
## �💻 System Requirements
|
166
233
|
|
167
234
|
### Minimum Requirements
|
168
235
|
|
@@ -183,12 +250,21 @@ python3 -m rust_crate_pipeline \
|
|
183
250
|
Core dependencies are automatically installed:
|
184
251
|
|
185
252
|
```bash
|
253
|
+
# Core functionality
|
186
254
|
requests>=2.28.0
|
187
255
|
requests-cache>=0.9.0
|
188
256
|
beautifulsoup4>=4.11.0
|
189
257
|
tqdm>=4.64.0
|
258
|
+
|
259
|
+
# AI and LLM processing
|
190
260
|
llama-cpp-python>=0.2.0
|
191
261
|
tiktoken>=0.4.0
|
262
|
+
|
263
|
+
# Enhanced web scraping (New in v1.5.0)
|
264
|
+
crawl4ai>=0.6.0
|
265
|
+
playwright>=1.49.0
|
266
|
+
|
267
|
+
# System utilities
|
192
268
|
psutil>=5.9.0
|
193
269
|
python-dateutil>=2.8.0
|
194
270
|
```
|
@@ -209,6 +285,11 @@ python-dateutil>=2.8.0
|
|
209
285
|
| `--log-level` | str | INFO | Logging verbosity |
|
210
286
|
| `--skip-ai` | flag | False | Skip AI enrichment |
|
211
287
|
| `--skip-source-analysis` | flag | False | Skip source code analysis |
|
288
|
+
| `--enable-crawl4ai` | flag | True | Enable enhanced web scraping (default) |
|
289
|
+
| `--disable-crawl4ai` | flag | False | Disable Crawl4AI, use basic scraping |
|
290
|
+
| `--crawl4ai-model` | str | ~/models/deepseek/deepseek-coder-6.7b-instruct.Q4_K_M.gguf | GGUF model path for content analysis |
|
291
|
+
| `--enable-sigil-protocol` | flag | False | Enable Rule Zero compliance mode |
|
292
|
+
| `--sigil-mode` | str | enhanced | Sigil processing mode |
|
212
293
|
| `--crate-list` | list | None | Specific crates to process |
|
213
294
|
| `--config-file` | str | None | JSON configuration file |
|
214
295
|
|
@@ -244,7 +325,9 @@ Create a JSON configuration file for custom settings:
|
|
244
325
|
"batch_size": 10,
|
245
326
|
"github_min_remaining": 500,
|
246
327
|
"cache_ttl": 7200,
|
247
|
-
"model_path": "~/models/your-model.gguf"
|
328
|
+
"model_path": "~/models/your-model.gguf", "enable_crawl4ai": true,
|
329
|
+
"crawl4ai_model": "~/models/deepseek/deepseek-coder-6.7b-instruct.Q4_K_M.gguf",
|
330
|
+
"crawl4ai_timeout": 30
|
248
331
|
}
|
249
332
|
```
|
250
333
|
|
@@ -295,7 +378,7 @@ docker run -d --name pipeline \
|
|
295
378
|
|
296
379
|
### Output Structure
|
297
380
|
|
298
|
-
```
|
381
|
+
```text
|
299
382
|
output/
|
300
383
|
├── enriched_crates_YYYYMMDD_HHMMSS.json # Main results
|
301
384
|
├── metadata_YYYYMMDD_HHMMSS.json # Raw metadata
|
@@ -459,7 +542,7 @@ sudo systemctl status rust-crate-pipeline
|
|
459
542
|
|
460
543
|
### Processing Flow
|
461
544
|
|
462
|
-
```
|
545
|
+
```text
|
463
546
|
1. Crate Discovery → 2. Metadata Fetching → 3. AI Enrichment
|
464
547
|
↓ ↓ ↓
|
465
548
|
4. Source Analysis → 5. Security Scanning → 6. Community Analysis
|
@@ -469,7 +552,7 @@ sudo systemctl status rust-crate-pipeline
|
|
469
552
|
|
470
553
|
### Project Structure
|
471
554
|
|
472
|
-
```
|
555
|
+
```text
|
473
556
|
rust_crate_pipeline/
|
474
557
|
├── __init__.py # Package initialization
|
475
558
|
├── __main__.py # Entry point for python -m execution
|
@@ -570,4 +653,6 @@ This project is licensed under the MIT License - see the [LICENSE](LICENSE) file
|
|
570
653
|
|
571
654
|
---
|
572
655
|
|
573
|
-
|
656
|
+
## Ready to analyze the Rust ecosystem! 🦀✨
|
657
|
+
|
658
|
+
📦 **Get started today:** [Install from PyPI](https://pypi.org/project/rust-crate-pipeline/)
|
@@ -0,0 +1,19 @@
|
|
1
|
+
rust_crate_pipeline/__init__.py,sha256=NxD8_OEGHEHUN9EfJj2S1rRyZ0UMkiF20LNSMnjL9Uk,1939
|
2
|
+
rust_crate_pipeline/__main__.py,sha256=fYgtPofuk4vkwiZ7ELP4GVMNj_QiKmZMSlvhzsNGuDs,155
|
3
|
+
rust_crate_pipeline/ai_processing.py,sha256=sj-qPtIVLuuY_VoWoLbcGQ6_eS_giQyXIPyAGAWOCrs,24814
|
4
|
+
rust_crate_pipeline/analysis.py,sha256=jcHHTBZ_zg5n4VGPXJYM7-NkNeL5hRdgvowkiim0onM,17663
|
5
|
+
rust_crate_pipeline/config.py,sha256=CeDlEZ08UDA_1DkcIfTOoPpYj3kGBZNGwsefRjBKlwg,2396
|
6
|
+
rust_crate_pipeline/github_token_checker.py,sha256=_cyOiSYc1bCVczr6pUUJc_s822ic7Qi_IW3JtI_4C0w,3796
|
7
|
+
rust_crate_pipeline/main.py,sha256=UZj2pcHAzG5MdrgHhahWnsz3MuTQfVQ6yzf91jPtli0,10224
|
8
|
+
rust_crate_pipeline/network.py,sha256=MFtn_-9MRBUSehfjLboUBGOMk8gv2edjOjHCR_YEyGc,12677
|
9
|
+
rust_crate_pipeline/pipeline.py,sha256=aOLuIpfvDbPDCvft8ppUa0vRiFVdiz2wltpi26ZJaes,22769
|
10
|
+
rust_crate_pipeline/production_config.py,sha256=24YWT68Fo2Kl8v7Hn1WgqfPrikXma9VZEuEcMr7iDik,2282
|
11
|
+
rust_crate_pipeline/version.py,sha256=BS9a-IKMe4pIl-nSmLaSJ2bDo6r87s_h8Mk5TAsrsiI,4291
|
12
|
+
rust_crate_pipeline/utils/file_utils.py,sha256=IJOBBp6-w9pnCdqyGcRNwBph_iwI_zzULCdAULGFUy0,2097
|
13
|
+
rust_crate_pipeline/utils/logging_utils.py,sha256=5-o6ohm38sH1ozjZWHPlm9Wj7yILiUzvMsLJDeu11lk,2350
|
14
|
+
rust_crate_pipeline-1.5.1.dist-info/licenses/LICENSE,sha256=tpd4XNpbssrSx9-iErATOLrOh0ivNPfO2I5MAPUpats,1088
|
15
|
+
rust_crate_pipeline-1.5.1.dist-info/METADATA,sha256=Rk8aWxLEwJJgpuTHTHmU_JsI3BY7aHk_YWaDv22rhno,21349
|
16
|
+
rust_crate_pipeline-1.5.1.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
17
|
+
rust_crate_pipeline-1.5.1.dist-info/entry_points.txt,sha256=9Rr_IRuFRIridXxUSdEJbB3ba0NnpEfKmknZXFdYRC0,70
|
18
|
+
rust_crate_pipeline-1.5.1.dist-info/top_level.txt,sha256=GUdB7RyxHLhijQxui_KTy3B8p_L2APui9C6RYa0FuaE,20
|
19
|
+
rust_crate_pipeline-1.5.1.dist-info/RECORD,,
|
@@ -1,19 +0,0 @@
|
|
1
|
-
rust_crate_pipeline/__init__.py,sha256=m9fb1WGbyOimxK2e18FSgvLWGYBwbLoHM_mscr-nAPs,1429
|
2
|
-
rust_crate_pipeline/__main__.py,sha256=fYgtPofuk4vkwiZ7ELP4GVMNj_QiKmZMSlvhzsNGuDs,155
|
3
|
-
rust_crate_pipeline/ai_processing.py,sha256=B93rCDdxE-UkYMjmT0UotQTahx9-Lgzec7_bjBd3cUs,23240
|
4
|
-
rust_crate_pipeline/analysis.py,sha256=ijP4zp3cFnN09nZkeCluyAvbyAtAW_M2YSxALpQX8LY,18615
|
5
|
-
rust_crate_pipeline/config.py,sha256=r4Y_5SD-lfrM1112edk9T0S0MiVxaNSSHk4q2yDrM88,1528
|
6
|
-
rust_crate_pipeline/github_token_checker.py,sha256=MJqHP8J84NEZ6nzdutpC7iRnsP0kyqscjLUosvmI4MI,3768
|
7
|
-
rust_crate_pipeline/main.py,sha256=Wz4Q4TX-G7qvLNMyYT6cHbgRCeMJoWILCvXcJr1FYAc,7876
|
8
|
-
rust_crate_pipeline/network.py,sha256=t_G8eh_WHNugm_laMftcWVbHsmP0bOlTPnVW9DqF6SU,13375
|
9
|
-
rust_crate_pipeline/pipeline.py,sha256=fcWgqKC0teGeVyNbwayFwngoZLJGWwWZAlWtMqwtdyY,17074
|
10
|
-
rust_crate_pipeline/production_config.py,sha256=TdvmO1SIRpex1xZ0AymTKXpLfkkvOG44Jyy7S5M-u7k,2304
|
11
|
-
rust_crate_pipeline/version.py,sha256=ocKi1ObqW3ryP4VBbaECeWFiNTV6KyqCXTy0JqlOqbk,1022
|
12
|
-
rust_crate_pipeline/utils/file_utils.py,sha256=lnHeLrt1JYaQhRDKtA1TWR2HIyRO8zwOyWb-KmAmWgk,2126
|
13
|
-
rust_crate_pipeline/utils/logging_utils.py,sha256=O4Jnr_k9dBchrVqXf-vqtDKgizDtL_ljh8g7G2VCX_c,2241
|
14
|
-
rust_crate_pipeline-1.2.6.dist-info/licenses/LICENSE,sha256=tpd4XNpbssrSx9-iErATOLrOh0ivNPfO2I5MAPUpats,1088
|
15
|
-
rust_crate_pipeline-1.2.6.dist-info/METADATA,sha256=7hhV0ocxmiDPRTJPAxU7jlODkQRO5onQyrrjnI1w1x0,16797
|
16
|
-
rust_crate_pipeline-1.2.6.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
17
|
-
rust_crate_pipeline-1.2.6.dist-info/entry_points.txt,sha256=9Rr_IRuFRIridXxUSdEJbB3ba0NnpEfKmknZXFdYRC0,70
|
18
|
-
rust_crate_pipeline-1.2.6.dist-info/top_level.txt,sha256=GUdB7RyxHLhijQxui_KTy3B8p_L2APui9C6RYa0FuaE,20
|
19
|
-
rust_crate_pipeline-1.2.6.dist-info/RECORD,,
|
File without changes
|
{rust_crate_pipeline-1.2.6.dist-info → rust_crate_pipeline-1.5.1.dist-info}/entry_points.txt
RENAMED
File without changes
|
{rust_crate_pipeline-1.2.6.dist-info → rust_crate_pipeline-1.5.1.dist-info}/licenses/LICENSE
RENAMED
File without changes
|
File without changes
|