clapp-pm 1.0.16__py3-none-any.whl → 1.0.18__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- cache_manager.py +376 -0
- {clapp_pm-1.0.16.data → clapp_pm-1.0.18.data}/data/version.json +1 -1
- {clapp_pm-1.0.16.dist-info → clapp_pm-1.0.18.dist-info}/METADATA +1 -1
- {clapp_pm-1.0.16.dist-info → clapp_pm-1.0.18.dist-info}/RECORD +13 -9
- {clapp_pm-1.0.16.dist-info → clapp_pm-1.0.18.dist-info}/top_level.txt +4 -0
- main.py +183 -0
- package_signing.py +370 -0
- smart_search.py +451 -0
- version.py +1 -1
- version_manager.py +351 -0
- {clapp_pm-1.0.16.dist-info → clapp_pm-1.0.18.dist-info}/WHEEL +0 -0
- {clapp_pm-1.0.16.dist-info → clapp_pm-1.0.18.dist-info}/entry_points.txt +0 -0
- {clapp_pm-1.0.16.dist-info → clapp_pm-1.0.18.dist-info}/licenses/LICENSE +0 -0
cache_manager.py
ADDED
@@ -0,0 +1,376 @@
|
|
1
|
+
#!/usr/bin/env python3
|
2
|
+
"""
|
3
|
+
cache_manager.py - Performans Optimizasyonu ve Önbellekleme Sistemi
|
4
|
+
|
5
|
+
Bu modül clapp'in performansını artırmak için:
|
6
|
+
- Paket meta verilerini önbellekleme
|
7
|
+
- Registry verilerini önbellekleme
|
8
|
+
- Dosya checksum'larını önbellekleme
|
9
|
+
- Akıllı cache yönetimi
|
10
|
+
- Paralel indirme desteği
|
11
|
+
"""
|
12
|
+
|
13
|
+
import os
|
14
|
+
import json
|
15
|
+
import hashlib
|
16
|
+
import pickle
|
17
|
+
import threading
|
18
|
+
import time
|
19
|
+
from pathlib import Path
|
20
|
+
from typing import Dict, Any, Optional, List, Tuple
|
21
|
+
from datetime import datetime, timedelta
|
22
|
+
import concurrent.futures
|
23
|
+
import requests
|
24
|
+
from functools import wraps
|
25
|
+
|
26
|
+
class CacheManager:
|
27
|
+
"""Akıllı önbellekleme yöneticisi"""
|
28
|
+
|
29
|
+
def __init__(self, cache_dir: Optional[str] = None):
|
30
|
+
"""
|
31
|
+
CacheManager başlatıcısı
|
32
|
+
|
33
|
+
Args:
|
34
|
+
cache_dir: Cache dizini (varsayılan: ~/.clapp/cache)
|
35
|
+
"""
|
36
|
+
if cache_dir is None:
|
37
|
+
cache_dir = os.path.join(os.path.expanduser("~"), ".clapp", "cache")
|
38
|
+
|
39
|
+
self.cache_dir = Path(cache_dir)
|
40
|
+
self.cache_dir.mkdir(parents=True, exist_ok=True)
|
41
|
+
|
42
|
+
# Cache türleri
|
43
|
+
self.metadata_cache = self.cache_dir / "metadata"
|
44
|
+
self.registry_cache = self.cache_dir / "registry"
|
45
|
+
self.checksum_cache = self.cache_dir / "checksums"
|
46
|
+
self.download_cache = self.cache_dir / "downloads"
|
47
|
+
|
48
|
+
# Cache dizinlerini oluştur
|
49
|
+
for cache_path in [self.metadata_cache, self.registry_cache,
|
50
|
+
self.checksum_cache, self.download_cache]:
|
51
|
+
cache_path.mkdir(exist_ok=True)
|
52
|
+
|
53
|
+
# Cache istatistikleri
|
54
|
+
self.stats = {
|
55
|
+
"hits": 0,
|
56
|
+
"misses": 0,
|
57
|
+
"evictions": 0,
|
58
|
+
"size": 0
|
59
|
+
}
|
60
|
+
|
61
|
+
# Thread-safe cache
|
62
|
+
self._lock = threading.Lock()
|
63
|
+
|
64
|
+
def _get_cache_key(self, key: str, cache_type: str = "metadata") -> Path:
|
65
|
+
"""Cache anahtarı için dosya yolu oluşturur"""
|
66
|
+
if cache_type == "metadata":
|
67
|
+
return self.metadata_cache / f"{key}.json"
|
68
|
+
elif cache_type == "registry":
|
69
|
+
return self.registry_cache / f"{key}.json"
|
70
|
+
elif cache_type == "checksum":
|
71
|
+
return self.checksum_cache / f"{key}.txt"
|
72
|
+
elif cache_type == "download":
|
73
|
+
return self.download_cache / f"{key}.zip"
|
74
|
+
else:
|
75
|
+
raise ValueError(f"Geçersiz cache türü: {cache_type}")
|
76
|
+
|
77
|
+
def get(self, key: str, cache_type: str = "metadata", max_age: int = 3600) -> Optional[Any]:
|
78
|
+
"""
|
79
|
+
Cache'den veri alır
|
80
|
+
|
81
|
+
Args:
|
82
|
+
key: Cache anahtarı
|
83
|
+
cache_type: Cache türü
|
84
|
+
max_age: Maksimum yaş (saniye)
|
85
|
+
|
86
|
+
Returns:
|
87
|
+
Cache'lenmiş veri veya None
|
88
|
+
"""
|
89
|
+
cache_file = self._get_cache_key(key, cache_type)
|
90
|
+
|
91
|
+
if not cache_file.exists():
|
92
|
+
self.stats["misses"] += 1
|
93
|
+
return None
|
94
|
+
|
95
|
+
# Dosya yaşını kontrol et
|
96
|
+
file_age = time.time() - cache_file.stat().st_mtime
|
97
|
+
if file_age > max_age:
|
98
|
+
cache_file.unlink()
|
99
|
+
self.stats["misses"] += 1
|
100
|
+
return None
|
101
|
+
|
102
|
+
try:
|
103
|
+
with self._lock:
|
104
|
+
if cache_type in ["metadata", "registry"]:
|
105
|
+
with open(cache_file, 'r', encoding='utf-8') as f:
|
106
|
+
data = json.load(f)
|
107
|
+
elif cache_type == "checksum":
|
108
|
+
with open(cache_file, 'r', encoding='utf-8') as f:
|
109
|
+
data = f.read().strip()
|
110
|
+
else:
|
111
|
+
# Binary dosyalar için pickle kullan
|
112
|
+
with open(cache_file, 'rb') as f:
|
113
|
+
data = pickle.load(f)
|
114
|
+
|
115
|
+
self.stats["hits"] += 1
|
116
|
+
return data
|
117
|
+
|
118
|
+
except Exception as e:
|
119
|
+
print(f"Cache okuma hatası: {e}")
|
120
|
+
cache_file.unlink()
|
121
|
+
self.stats["misses"] += 1
|
122
|
+
return None
|
123
|
+
|
124
|
+
def set(self, key: str, data: Any, cache_type: str = "metadata") -> bool:
|
125
|
+
"""
|
126
|
+
Cache'e veri kaydeder
|
127
|
+
|
128
|
+
Args:
|
129
|
+
key: Cache anahtarı
|
130
|
+
data: Kaydedilecek veri
|
131
|
+
cache_type: Cache türü
|
132
|
+
|
133
|
+
Returns:
|
134
|
+
Başarılıysa True
|
135
|
+
"""
|
136
|
+
cache_file = self._get_cache_key(key, cache_type)
|
137
|
+
|
138
|
+
try:
|
139
|
+
with self._lock:
|
140
|
+
if cache_type in ["metadata", "registry"]:
|
141
|
+
with open(cache_file, 'w', encoding='utf-8') as f:
|
142
|
+
json.dump(data, f, indent=2, ensure_ascii=False)
|
143
|
+
elif cache_type == "checksum":
|
144
|
+
with open(cache_file, 'w', encoding='utf-8') as f:
|
145
|
+
f.write(str(data))
|
146
|
+
else:
|
147
|
+
# Binary dosyalar için pickle kullan
|
148
|
+
with open(cache_file, 'wb') as f:
|
149
|
+
pickle.dump(data, f)
|
150
|
+
|
151
|
+
return True
|
152
|
+
|
153
|
+
except Exception as e:
|
154
|
+
print(f"Cache yazma hatası: {e}")
|
155
|
+
return False
|
156
|
+
|
157
|
+
def delete(self, key: str, cache_type: str = "metadata") -> bool:
|
158
|
+
"""Cache'den veri siler"""
|
159
|
+
cache_file = self._get_cache_key(key, cache_type)
|
160
|
+
|
161
|
+
try:
|
162
|
+
if cache_file.exists():
|
163
|
+
cache_file.unlink()
|
164
|
+
return True
|
165
|
+
return False
|
166
|
+
except Exception:
|
167
|
+
return False
|
168
|
+
|
169
|
+
def clear(self, cache_type: Optional[str] = None) -> int:
|
170
|
+
"""
|
171
|
+
Cache'i temizler
|
172
|
+
|
173
|
+
Args:
|
174
|
+
cache_type: Temizlenecek cache türü (None ise tümü)
|
175
|
+
|
176
|
+
Returns:
|
177
|
+
Silinen dosya sayısı
|
178
|
+
"""
|
179
|
+
deleted_count = 0
|
180
|
+
|
181
|
+
if cache_type:
|
182
|
+
cache_path = self._get_cache_key("", cache_type).parent
|
183
|
+
if cache_path.exists():
|
184
|
+
for file in cache_path.iterdir():
|
185
|
+
if file.is_file():
|
186
|
+
file.unlink()
|
187
|
+
deleted_count += 1
|
188
|
+
else:
|
189
|
+
# Tüm cache'leri temizle
|
190
|
+
for cache_path in [self.metadata_cache, self.registry_cache,
|
191
|
+
self.checksum_cache, self.download_cache]:
|
192
|
+
if cache_path.exists():
|
193
|
+
for file in cache_path.iterdir():
|
194
|
+
if file.is_file():
|
195
|
+
file.unlink()
|
196
|
+
deleted_count += 1
|
197
|
+
|
198
|
+
return deleted_count
|
199
|
+
|
200
|
+
def get_stats(self) -> Dict[str, Any]:
|
201
|
+
"""Cache istatistiklerini döndürür"""
|
202
|
+
total_size = 0
|
203
|
+
|
204
|
+
# Cache boyutunu hesapla
|
205
|
+
for cache_path in [self.metadata_cache, self.registry_cache,
|
206
|
+
self.checksum_cache, self.download_cache]:
|
207
|
+
if cache_path.exists():
|
208
|
+
for file in cache_path.iterdir():
|
209
|
+
if file.is_file():
|
210
|
+
total_size += file.stat().st_size
|
211
|
+
|
212
|
+
return {
|
213
|
+
**self.stats,
|
214
|
+
"size_bytes": total_size,
|
215
|
+
"size_mb": round(total_size / (1024 * 1024), 2),
|
216
|
+
"hit_rate": round(self.stats["hits"] / max(1, self.stats["hits"] + self.stats["misses"]) * 100, 2)
|
217
|
+
}
|
218
|
+
|
219
|
+
def calculate_checksum(self, file_path: str) -> str:
|
220
|
+
"""Dosyanın SHA-256 checksum'unu hesaplar ve cache'ler"""
|
221
|
+
cache_key = hashlib.md5(file_path.encode()).hexdigest()
|
222
|
+
cached_checksum = self.get(cache_key, "checksum", max_age=86400) # 24 saat
|
223
|
+
|
224
|
+
if cached_checksum:
|
225
|
+
return cached_checksum
|
226
|
+
|
227
|
+
# Checksum hesapla
|
228
|
+
sha256_hash = hashlib.sha256()
|
229
|
+
with open(file_path, "rb") as f:
|
230
|
+
for chunk in iter(lambda: f.read(4096), b""):
|
231
|
+
sha256_hash.update(chunk)
|
232
|
+
|
233
|
+
checksum = sha256_hash.hexdigest()
|
234
|
+
self.set(cache_key, checksum, "checksum")
|
235
|
+
|
236
|
+
return checksum
|
237
|
+
|
238
|
+
def cache_package_metadata(self, package_path: str, metadata: Dict[str, Any]) -> bool:
|
239
|
+
"""Paket meta verilerini cache'ler"""
|
240
|
+
cache_key = hashlib.md5(package_path.encode()).hexdigest()
|
241
|
+
return self.set(cache_key, metadata, "metadata")
|
242
|
+
|
243
|
+
def get_cached_package_metadata(self, package_path: str) -> Optional[Dict[str, Any]]:
|
244
|
+
"""Cache'lenmiş paket meta verilerini alır"""
|
245
|
+
cache_key = hashlib.md5(package_path.encode()).hexdigest()
|
246
|
+
return self.get(cache_key, "metadata", max_age=3600) # 1 saat
|
247
|
+
|
248
|
+
def cache_registry_data(self, registry_url: str, data: List[Dict[str, Any]]) -> bool:
|
249
|
+
"""Registry verilerini cache'ler"""
|
250
|
+
cache_key = hashlib.md5(registry_url.encode()).hexdigest()
|
251
|
+
return self.set(cache_key, data, "registry")
|
252
|
+
|
253
|
+
def get_cached_registry_data(self, registry_url: str) -> Optional[List[Dict[str, Any]]]:
|
254
|
+
"""Cache'lenmiş registry verilerini alır"""
|
255
|
+
cache_key = hashlib.md5(registry_url.encode()).hexdigest()
|
256
|
+
return self.get(cache_key, "registry", max_age=1800) # 30 dakika
|
257
|
+
|
258
|
+
class ParallelDownloader:
|
259
|
+
"""Paralel indirme yöneticisi"""
|
260
|
+
|
261
|
+
def __init__(self, max_workers: int = 4):
|
262
|
+
"""
|
263
|
+
ParallelDownloader başlatıcısı
|
264
|
+
|
265
|
+
Args:
|
266
|
+
max_workers: Maksimum paralel işçi sayısı
|
267
|
+
"""
|
268
|
+
self.max_workers = max_workers
|
269
|
+
self.session = requests.Session()
|
270
|
+
self.session.headers.update({
|
271
|
+
'User-Agent': 'clapp-package-manager/1.0'
|
272
|
+
})
|
273
|
+
|
274
|
+
def download_file(self, url: str, destination: str) -> Tuple[bool, str]:
|
275
|
+
"""Tek dosya indirir"""
|
276
|
+
try:
|
277
|
+
response = self.session.get(url, stream=True, timeout=30)
|
278
|
+
response.raise_for_status()
|
279
|
+
|
280
|
+
with open(destination, 'wb') as f:
|
281
|
+
for chunk in response.iter_content(chunk_size=8192):
|
282
|
+
f.write(chunk)
|
283
|
+
|
284
|
+
return True, f"Dosya indirildi: {destination}"
|
285
|
+
|
286
|
+
except Exception as e:
|
287
|
+
return False, f"İndirme hatası: {str(e)}"
|
288
|
+
|
289
|
+
def download_files_parallel(self, download_tasks: List[Tuple[str, str]]) -> List[Tuple[bool, str]]:
|
290
|
+
"""
|
291
|
+
Birden fazla dosyayı paralel indirir
|
292
|
+
|
293
|
+
Args:
|
294
|
+
download_tasks: [(url, destination), ...] listesi
|
295
|
+
|
296
|
+
Returns:
|
297
|
+
[(success, message), ...] listesi
|
298
|
+
"""
|
299
|
+
results = []
|
300
|
+
|
301
|
+
with concurrent.futures.ThreadPoolExecutor(max_workers=self.max_workers) as executor:
|
302
|
+
# İndirme görevlerini başlat
|
303
|
+
future_to_task = {
|
304
|
+
executor.submit(self.download_file, url, dest): (url, dest)
|
305
|
+
for url, dest in download_tasks
|
306
|
+
}
|
307
|
+
|
308
|
+
# Sonuçları topla
|
309
|
+
for future in concurrent.futures.as_completed(future_to_task):
|
310
|
+
task = future_to_task[future]
|
311
|
+
try:
|
312
|
+
result = future.result()
|
313
|
+
results.append(result)
|
314
|
+
except Exception as e:
|
315
|
+
results.append((False, f"İndirme hatası: {str(e)}"))
|
316
|
+
|
317
|
+
return results
|
318
|
+
|
319
|
+
# Cache decorator
|
320
|
+
def cached(max_age: int = 3600, cache_type: str = "metadata"):
|
321
|
+
"""Cache decorator'ı"""
|
322
|
+
def decorator(func):
|
323
|
+
@wraps(func)
|
324
|
+
def wrapper(*args, **kwargs):
|
325
|
+
# Cache anahtarı oluştur
|
326
|
+
cache_key = f"{func.__name__}_{hash(str(args) + str(sorted(kwargs.items())))}"
|
327
|
+
|
328
|
+
# Cache manager oluştur
|
329
|
+
cache_manager = CacheManager()
|
330
|
+
|
331
|
+
# Cache'den kontrol et
|
332
|
+
cached_result = cache_manager.get(cache_key, cache_type, max_age)
|
333
|
+
if cached_result is not None:
|
334
|
+
return cached_result
|
335
|
+
|
336
|
+
# Fonksiyonu çalıştır
|
337
|
+
result = func(*args, **kwargs)
|
338
|
+
|
339
|
+
# Sonucu cache'le
|
340
|
+
cache_manager.set(cache_key, result, cache_type)
|
341
|
+
|
342
|
+
return result
|
343
|
+
return wrapper
|
344
|
+
return decorator
|
345
|
+
|
346
|
+
# Yardımcı fonksiyonlar
|
347
|
+
def create_cache_manager() -> CacheManager:
|
348
|
+
"""Varsayılan ayarlarla CacheManager oluşturur"""
|
349
|
+
return CacheManager()
|
350
|
+
|
351
|
+
def create_parallel_downloader(max_workers: int = 4) -> ParallelDownloader:
|
352
|
+
"""ParallelDownloader oluşturur"""
|
353
|
+
return ParallelDownloader(max_workers)
|
354
|
+
|
355
|
+
def get_cache_stats() -> Dict[str, Any]:
|
356
|
+
"""Cache istatistiklerini alır"""
|
357
|
+
cache_manager = create_cache_manager()
|
358
|
+
return cache_manager.get_stats()
|
359
|
+
|
360
|
+
def clear_all_caches() -> int:
|
361
|
+
"""Tüm cache'leri temizler"""
|
362
|
+
cache_manager = create_cache_manager()
|
363
|
+
return cache_manager.clear()
|
364
|
+
|
365
|
+
def download_packages_parallel(package_urls: List[str], destination_dir: str) -> List[Tuple[bool, str]]:
|
366
|
+
"""Paketleri paralel indirir"""
|
367
|
+
downloader = create_parallel_downloader()
|
368
|
+
|
369
|
+
# İndirme görevlerini hazırla
|
370
|
+
download_tasks = []
|
371
|
+
for url in package_urls:
|
372
|
+
filename = os.path.basename(url)
|
373
|
+
destination = os.path.join(destination_dir, filename)
|
374
|
+
download_tasks.append((url, destination))
|
375
|
+
|
376
|
+
return downloader.download_files_parallel(download_tasks)
|
@@ -1,3 +1,4 @@
|
|
1
|
+
cache_manager.py,sha256=HdQ3vnZO_YP0vyotzjP5wozltVNgkJAxrvYFXhGLGCI,13287
|
1
2
|
check_env.py,sha256=9O8CrdWGcxd_WBV5bsAAZG9iAaiO-nGYDYY-gYZ9G3M,4132
|
2
3
|
clapp_core.py,sha256=5tkzt7J4GKSv7Chv5Zoe9N0dv_sWlYGqYxmIn3dhEPY,1764
|
3
4
|
clean_command.py,sha256=nmzitkdNo0Ovgi-tGPUxa9mkAIFzwEzGNaTm82MUdvw,6806
|
@@ -8,18 +9,21 @@ info_command.py,sha256=b74Pl1-x_PSezALx-lS3FkEcVNTF7a9McTKk5XTDhjM,5629
|
|
8
9
|
install_command.py,sha256=1Ogy_4nOsnLXzoUBys9yeLf5jKkGv9C40wuGZo2COC0,9445
|
9
10
|
installer.py,sha256=EkQ5Z1Cq_Z5u_Ssv2Nc8hryI5E97x4D1QVjSqEHe0dE,10619
|
10
11
|
list_command.py,sha256=qbeocvrg2eXRklxbYS3audQhYHGXTlMBk_tNh1cMxd0,8391
|
11
|
-
main.py,sha256=
|
12
|
+
main.py,sha256=sDbezA6-irQ8QDRvA8wi7rkfyx73Dn7MWMCmlDiTKbI,23590
|
12
13
|
manifest_schema.py,sha256=IxfKuYgcIhILJrDMOm5vjSJn2jp7hPpUoxtjPMCPvbE,2201
|
13
14
|
manifest_validator.py,sha256=MTI6c_sYfVakQ6aQUu5_qkukTh4H1FcSrT4uRdE6xIg,7990
|
14
15
|
package_registry.py,sha256=Rco15cvz-6lpCEDdCzwGZNCKtvVhlFIsEPy-WFAESMM,4233
|
15
16
|
package_runner.py,sha256=jbn9OJDU4p-v63dO66C5EBSbT0dlWQIjtp7x2VE6lU0,2758
|
17
|
+
package_signing.py,sha256=CdJKLNQLpQJmckgatOnnYDl_4Ve_U1tgWFlzvhXLhq8,12991
|
16
18
|
post_install_hint.py,sha256=wjMPCgRurZiGu6hv_se-XA36KqBCdeYdRCD1q7FrJzQ,4918
|
17
19
|
publish_command.py,sha256=P05AFbu_mxcc1yAiwShN5Yi9PX1o_7TFXD1mowJcqJE,8589
|
18
20
|
remote_registry.py,sha256=rPBIM_ESXUt0br5cARQ4YbzUoTda0G4e1KGzfyYMbpQ,8235
|
21
|
+
smart_search.py,sha256=R5O5CDXqlQc-N-6R6D5k36_-arCB-wnKfmWr5zMYWUI,15741
|
19
22
|
uninstall_command.py,sha256=rQYbZ-XMw8Xxw1fmgGdDaBQmgBGqyJ_rTBZkvEV5HV0,7066
|
20
23
|
validate_command.py,sha256=idaujErzrwuZNT6DYCVTVwZqBDEEi1GTxIXAGBgKMKM,7623
|
21
|
-
version.py,sha256=
|
24
|
+
version.py,sha256=v2-Js_gwcyS6cS3GUE0svFpltW1QaNxMWqolPkiYckc,224
|
22
25
|
version_command.py,sha256=DZuYWtohSeM5PJNYCflBy36_k0vex3tYV2C8ixEA9ho,4259
|
26
|
+
version_manager.py,sha256=fpC7jxhIW1wZhJ9IkVwyqkgJN4mhBjUUbaLefXLDMiM,11423
|
23
27
|
where_command.py,sha256=TcLoXLGmrPSHQuvlceVuuKBsfeadIwz-E0G_5okH14g,6420
|
24
28
|
backup_current/build_index.py,sha256=8yKrUnh1Wre31Jt-kgMRxLiW6AIWpmZoNR4wEwAsP2E,4195
|
25
29
|
backup_current/check_env.py,sha256=5DJwdALpRe1TLbzF_Zta-zD312J065-pPmsJZ17xRDg,4425
|
@@ -56,16 +60,16 @@ clapp-packages-repo/packages/test-app/main.py,sha256=rN4Zo9u53bIVjcUlul059knx6v-
|
|
56
60
|
clapp-packages-repo/packages/test-app/manifest.json,sha256=kJe4sjYdPRNZD5hEeca80jj3lxeEWBMJoZ59RW7tiKI,118
|
57
61
|
clapp-packages-repo/packages/test-app2/main.py,sha256=lHkbjTmehFY4VuYYF2dYiVBH7W0oqHHeY0I5W85iPTY,35
|
58
62
|
clapp-packages-repo/packages/test-app2/manifest.json,sha256=vshXJrtRxBc_ISM6E8KT5BSmveMbjWszenlgxgSN86w,121
|
59
|
-
clapp_pm-1.0.
|
60
|
-
clapp_pm-1.0.
|
63
|
+
clapp_pm-1.0.18.data/data/version.json,sha256=vjVSIoBpTGhKmQn_whZcLBQvZROYh0bwmu0koOga7GE,239
|
64
|
+
clapp_pm-1.0.18.dist-info/licenses/LICENSE,sha256=_hryv9pKR6udRexceUYuoYCJGmYBz7e-vRuFWmm38UY,1075
|
61
65
|
packages/hello-python/main.py,sha256=Dy-Ov-Vumj8oQYI6qKWU6fIKD0gCB8b7KzAJVrGyLMg,1429
|
62
66
|
packages/hello-python/manifest.json,sha256=fJOVJk_2rwpRJ6IeWMPieklJD3gAR279jvuqRH69s90,179
|
63
67
|
packages/test-app/main.py,sha256=rN4Zo9u53bIVjcUlul059knx6v-2Cd1MFftPS57FIRU,33
|
64
68
|
packages/test-app/manifest.json,sha256=kJe4sjYdPRNZD5hEeca80jj3lxeEWBMJoZ59RW7tiKI,118
|
65
69
|
packages/test-app2/main.py,sha256=lHkbjTmehFY4VuYYF2dYiVBH7W0oqHHeY0I5W85iPTY,35
|
66
70
|
packages/test-app2/manifest.json,sha256=vshXJrtRxBc_ISM6E8KT5BSmveMbjWszenlgxgSN86w,121
|
67
|
-
clapp_pm-1.0.
|
68
|
-
clapp_pm-1.0.
|
69
|
-
clapp_pm-1.0.
|
70
|
-
clapp_pm-1.0.
|
71
|
-
clapp_pm-1.0.
|
71
|
+
clapp_pm-1.0.18.dist-info/METADATA,sha256=CV70wSEfsj7lpDkbQqqNBmzwIjqoAy-1Zech_-SRp8c,3980
|
72
|
+
clapp_pm-1.0.18.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
73
|
+
clapp_pm-1.0.18.dist-info/entry_points.txt,sha256=7j-3pQVpQfnaVzUV83g7zlCA30ePlnXkhHLAGGz9xrQ,36
|
74
|
+
clapp_pm-1.0.18.dist-info/top_level.txt,sha256=WMvIV8nU6pIZGL4xalAcOVmjBDjI6NoCd_Os3rdT9bI,433
|
75
|
+
clapp_pm-1.0.18.dist-info/RECORD,,
|
@@ -1,4 +1,5 @@
|
|
1
1
|
backup_current
|
2
|
+
cache_manager
|
2
3
|
check_env
|
3
4
|
clapp-packages-repo
|
4
5
|
clapp_core
|
@@ -15,12 +16,15 @@ manifest_schema
|
|
15
16
|
manifest_validator
|
16
17
|
package_registry
|
17
18
|
package_runner
|
19
|
+
package_signing
|
18
20
|
packages
|
19
21
|
post_install_hint
|
20
22
|
publish_command
|
21
23
|
remote_registry
|
24
|
+
smart_search
|
22
25
|
uninstall_command
|
23
26
|
validate_command
|
24
27
|
version
|
25
28
|
version_command
|
29
|
+
version_manager
|
26
30
|
where_command
|
main.py
CHANGED
@@ -32,6 +32,12 @@ from clean_command import run_clean
|
|
32
32
|
from where_command import locate_app_path, list_all_app_locations
|
33
33
|
from version_command import print_version, print_detailed_version
|
34
34
|
|
35
|
+
# Yeni güvenlik ve performans modülleri
|
36
|
+
from package_signing import sign_package_file, verify_package_file, check_package_security
|
37
|
+
from version_manager import check_app_updates, get_app_latest_version, increment_app_version
|
38
|
+
from cache_manager import get_cache_stats, clear_all_caches, download_packages_parallel
|
39
|
+
from smart_search import search_packages, get_search_suggestions, get_search_analytics, clear_search_history
|
40
|
+
|
35
41
|
# Yeni publish.cursorrules komutları
|
36
42
|
from publish_command import publish_app
|
37
43
|
from install_command import install_app
|
@@ -173,6 +179,34 @@ def main():
|
|
173
179
|
version_parser.add_argument('--json', action='store_true', help='JSON formatında')
|
174
180
|
version_parser.add_argument('--detailed', action='store_true', help='Detaylı bilgi')
|
175
181
|
|
182
|
+
# Güvenlik komutları
|
183
|
+
security_parser = subparsers.add_parser('security', help='Paket güvenlik işlemleri')
|
184
|
+
security_parser.add_argument('action', choices=['sign', 'verify', 'check'], help='Güvenlik işlemi')
|
185
|
+
security_parser.add_argument('package_path', help='Paket dosyası yolu')
|
186
|
+
security_parser.add_argument('--signature', help='İmza dosyası yolu (verify için)')
|
187
|
+
|
188
|
+
# Versiyon yönetimi komutları
|
189
|
+
update_parser = subparsers.add_parser('update', help='Versiyon yönetimi')
|
190
|
+
update_parser.add_argument('action', choices=['check', 'increment'], help='İşlem türü')
|
191
|
+
update_parser.add_argument('--app', help='Uygulama adı')
|
192
|
+
update_parser.add_argument('--type', choices=['major', 'minor', 'patch'], default='patch', help='Artırma tipi')
|
193
|
+
|
194
|
+
# Cache yönetimi komutları
|
195
|
+
cache_parser = subparsers.add_parser('cache', help='Cache yönetimi')
|
196
|
+
cache_parser.add_argument('action', choices=['stats', 'clear', 'download'], help='Cache işlemi')
|
197
|
+
cache_parser.add_argument('--urls', nargs='+', help='İndirilecek URL\'ler (download için)')
|
198
|
+
cache_parser.add_argument('--dest', help='Hedef dizin (download için)')
|
199
|
+
|
200
|
+
# Akıllı arama komutları
|
201
|
+
search_parser = subparsers.add_parser('search', help='Akıllı arama')
|
202
|
+
search_parser.add_argument('query', nargs='?', help='Arama sorgusu')
|
203
|
+
search_parser.add_argument('--suggestions', action='store_true', help='Arama önerileri')
|
204
|
+
search_parser.add_argument('--analytics', action='store_true', help='Arama analitikleri')
|
205
|
+
search_parser.add_argument('--clear-history', action='store_true', help='Arama geçmişini temizle')
|
206
|
+
search_parser.add_argument('--language', help='Dil filtresi')
|
207
|
+
search_parser.add_argument('--category', help='Kategori filtresi')
|
208
|
+
search_parser.add_argument('--sort', choices=['relevance', 'name', 'version', 'language'], default='relevance', help='Sıralama')
|
209
|
+
|
176
210
|
# dependency komutu (yeni)
|
177
211
|
dependency_parser = subparsers.add_parser('dependency', help='Bağımlılık yönetimi')
|
178
212
|
dependency_subparsers = dependency_parser.add_subparsers(dest='dependency_command', help='Bağımlılık alt komutları')
|
@@ -341,6 +375,155 @@ def main():
|
|
341
375
|
else:
|
342
376
|
print("❌ Geçersiz dependency komutu")
|
343
377
|
sys.exit(1)
|
378
|
+
|
379
|
+
elif args.command == 'security':
|
380
|
+
if args.action == 'sign':
|
381
|
+
success, message = sign_package_file(args.package_path)
|
382
|
+
if success:
|
383
|
+
print(f"✅ {message}")
|
384
|
+
else:
|
385
|
+
print(f"❌ {message}")
|
386
|
+
sys.exit(1)
|
387
|
+
|
388
|
+
elif args.action == 'verify':
|
389
|
+
signature_path = args.signature or args.package_path.replace('.zip', '.sig')
|
390
|
+
success, message = verify_package_file(args.package_path, signature_path)
|
391
|
+
print(f"{'✅' if success else '❌'} {message}")
|
392
|
+
sys.exit(0 if success else 1)
|
393
|
+
|
394
|
+
elif args.action == 'check':
|
395
|
+
results = check_package_security(args.package_path)
|
396
|
+
print("🔒 Paket Güvenlik Kontrolü")
|
397
|
+
print("=" * 40)
|
398
|
+
print(f"Bütünlük: {'✅' if results['integrity'] else '❌'}")
|
399
|
+
print(f"İmza: {'✅' if results['signature'] else '❌'}")
|
400
|
+
print(f"Checksum: {results['checksum']}")
|
401
|
+
if results['warnings']:
|
402
|
+
print("\n⚠️ Uyarılar:")
|
403
|
+
for warning in results['warnings']:
|
404
|
+
print(f" - {warning}")
|
405
|
+
|
406
|
+
elif args.command == 'update':
|
407
|
+
if args.action == 'check':
|
408
|
+
if not args.app:
|
409
|
+
print("❌ --app parametresi gerekli")
|
410
|
+
sys.exit(1)
|
411
|
+
|
412
|
+
from package_registry import get_manifest
|
413
|
+
manifest = get_manifest(args.app)
|
414
|
+
if not manifest:
|
415
|
+
print(f"❌ {args.app} uygulaması bulunamadı")
|
416
|
+
sys.exit(1)
|
417
|
+
|
418
|
+
current_version = manifest.get('version', '0.0.0')
|
419
|
+
update_info = check_app_updates(args.app, current_version)
|
420
|
+
|
421
|
+
print(f"📦 {args.app} Güncelleme Kontrolü")
|
422
|
+
print("=" * 40)
|
423
|
+
print(f"Mevcut: {update_info['current_version']}")
|
424
|
+
print(f"En son: {update_info['latest_version'] or 'Bilinmiyor'}")
|
425
|
+
print(f"Durum: {update_info['message']}")
|
426
|
+
|
427
|
+
if update_info['has_update']:
|
428
|
+
print(f"Güncelleme tipi: {update_info['update_type']}")
|
429
|
+
|
430
|
+
elif args.action == 'increment':
|
431
|
+
if not args.app:
|
432
|
+
print("❌ --app parametresi gerekli")
|
433
|
+
sys.exit(1)
|
434
|
+
|
435
|
+
from package_registry import get_manifest
|
436
|
+
manifest = get_manifest(args.app)
|
437
|
+
if not manifest:
|
438
|
+
print(f"❌ {args.app} uygulaması bulunamadı")
|
439
|
+
sys.exit(1)
|
440
|
+
|
441
|
+
current_version = manifest.get('version', '0.0.0')
|
442
|
+
new_version = increment_app_version(current_version, args.type)
|
443
|
+
print(f"📦 {args.app} versiyonu artırıldı")
|
444
|
+
print(f"Eski: {current_version} → Yeni: {new_version}")
|
445
|
+
|
446
|
+
elif args.command == 'cache':
|
447
|
+
if args.action == 'stats':
|
448
|
+
stats = get_cache_stats()
|
449
|
+
print("📊 Cache İstatistikleri")
|
450
|
+
print("=" * 30)
|
451
|
+
print(f"Hit: {stats['hits']}")
|
452
|
+
print(f"Miss: {stats['misses']}")
|
453
|
+
print(f"Hit Rate: {stats['hit_rate']}%")
|
454
|
+
print(f"Boyut: {stats['size_mb']} MB")
|
455
|
+
|
456
|
+
elif args.action == 'clear':
|
457
|
+
deleted_count = clear_all_caches()
|
458
|
+
print(f"✅ {deleted_count} cache dosyası silindi")
|
459
|
+
|
460
|
+
elif args.action == 'download':
|
461
|
+
if not args.urls or not args.dest:
|
462
|
+
print("❌ --urls ve --dest parametreleri gerekli")
|
463
|
+
sys.exit(1)
|
464
|
+
|
465
|
+
os.makedirs(args.dest, exist_ok=True)
|
466
|
+
results = download_packages_parallel(args.urls, args.dest)
|
467
|
+
|
468
|
+
print("📥 Paralel İndirme Sonuçları")
|
469
|
+
print("=" * 40)
|
470
|
+
for success, message in results:
|
471
|
+
print(f"{'✅' if success else '❌'} {message}")
|
472
|
+
|
473
|
+
elif args.command == 'search':
|
474
|
+
if args.suggestions:
|
475
|
+
from package_registry import list_packages
|
476
|
+
packages = list_packages()
|
477
|
+
suggestions = get_search_suggestions(args.query or "", packages)
|
478
|
+
print("💡 Arama Önerileri")
|
479
|
+
print("=" * 20)
|
480
|
+
for suggestion in suggestions:
|
481
|
+
print(f" • {suggestion}")
|
482
|
+
|
483
|
+
elif args.analytics:
|
484
|
+
analytics = get_search_analytics()
|
485
|
+
print("📈 Arama Analitikleri")
|
486
|
+
print("=" * 25)
|
487
|
+
print(f"Toplam arama: {analytics['total_searches']}")
|
488
|
+
print(f"Benzersiz sorgu: {analytics['unique_queries']}")
|
489
|
+
print(f"Hit rate: {round(analytics['total_searches'] / max(1, analytics['unique_queries']) * 100, 1)}%")
|
490
|
+
|
491
|
+
if analytics['most_popular']:
|
492
|
+
print("\n🔥 Popüler Aramalar:")
|
493
|
+
for query, count in analytics['most_popular']:
|
494
|
+
print(f" • {query} ({count} kez)")
|
495
|
+
|
496
|
+
elif args.clear_history:
|
497
|
+
clear_search_history()
|
498
|
+
|
499
|
+
elif args.query:
|
500
|
+
from package_registry import list_packages
|
501
|
+
packages = list_packages()
|
502
|
+
|
503
|
+
filters = {}
|
504
|
+
if args.language:
|
505
|
+
filters['language'] = args.language
|
506
|
+
if args.category:
|
507
|
+
filters['category'] = args.category
|
508
|
+
if args.sort:
|
509
|
+
filters['sort_by'] = args.sort
|
510
|
+
|
511
|
+
results = search_packages(args.query, packages, filters)
|
512
|
+
|
513
|
+
print(f"🔍 '{args.query}' için {len(results)} sonuç bulundu")
|
514
|
+
print("=" * 50)
|
515
|
+
|
516
|
+
for package in results:
|
517
|
+
score = package.get('search_score', 0)
|
518
|
+
print(f"📦 {package['name']} v{package['version']} ({package['language']})")
|
519
|
+
print(f" {package['description']}")
|
520
|
+
if score > 0:
|
521
|
+
print(f" Skor: {score:.2f}")
|
522
|
+
print()
|
523
|
+
|
524
|
+
else:
|
525
|
+
print("❌ Arama sorgusu gerekli veya --suggestions/--analytics kullanın")
|
526
|
+
sys.exit(1)
|
344
527
|
|
345
528
|
except KeyboardInterrupt:
|
346
529
|
print("\n❌ İşlem iptal edildi")
|