skillnet-ai 0.0.1__py3-none-any.whl → 0.0.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,156 @@
1
+ import requests
2
+ import os
3
+ import logging
4
+ from typing import Optional, List, Dict
5
+
6
+ logger = logging.getLogger(__name__)
7
+
8
+ class SkillDownloader:
9
+ """
10
+ A class to handle downloading specific subdirectories from GitHub repositories
11
+ and installing them as local skills.
12
+ """
13
+
14
+ def __init__(self, api_token: Optional[str] = None):
15
+ """
16
+ Initialize the downloader.
17
+
18
+ Args:
19
+ api_token: Optional GitHub Personal Access Token to avoid rate limits
20
+ and access private repositories.
21
+ """
22
+ self.api_token = api_token
23
+ self.session = requests.Session()
24
+ self.session.headers.update({
25
+ "Accept": "application/vnd.github.v3+json"
26
+ })
27
+ if self.api_token:
28
+ self.session.headers.update({"Authorization": f"token {self.api_token}"})
29
+
30
+ def download(self, folder_url: str, target_dir: str = ".") -> Optional[str]:
31
+ """
32
+ Download a specific GitHub subdirectory to the target directory.
33
+
34
+ Args:
35
+ folder_url: The full URL to the GitHub folder.
36
+ target_dir: The local directory to install the skill into.
37
+
38
+ Returns:
39
+ The absolute path to the installed skill directory, or None if failed.
40
+ """
41
+ try:
42
+ # 1. Parse the URL
43
+ parsed_info = self._parse_github_url(folder_url)
44
+ if not parsed_info:
45
+ return None
46
+
47
+ owner, repo, ref, dir_path, folder_name = parsed_info
48
+ logger.info(f"Parsing: {owner}/{repo} @ {ref} -> {dir_path}")
49
+
50
+ # 2. Get file tree
51
+ files_to_download = self._get_file_tree(owner, repo, ref, dir_path)
52
+ if not files_to_download:
53
+ logger.warning("No matching files found or API error.")
54
+ return None
55
+
56
+ logger.info(f"Found {len(files_to_download)} files, starting download...")
57
+
58
+ # 3. Download files
59
+ success_count = 0
60
+ failed_files = []
61
+ for file_info in files_to_download:
62
+ if self._download_single_file(owner, repo, ref, dir_path, file_info, folder_name, target_dir):
63
+ success_count += 1
64
+ else:
65
+ failed_files.append(file_info['path'])
66
+
67
+ if success_count == 0:
68
+ logger.error("❌ Failed to download any files. Please check your network settings and ensure connection to GitHub is working properly.")
69
+ return None
70
+
71
+ final_path = os.path.abspath(os.path.join(target_dir, folder_name))
72
+
73
+ if failed_files:
74
+ logger.warning(f"⚠️ Successfully downloaded {success_count} files at {final_path}, but {len(failed_files)} failed.")
75
+ logger.warning("The following files could not be downloaded:")
76
+ for f in failed_files:
77
+ logger.warning(f" - {f}")
78
+ logger.warning("Please check your network settings and ensure connection to GitHub is working properly.")
79
+ else:
80
+ logger.info(f"✅ Skill installed successfully at: {final_path}")
81
+ return final_path
82
+
83
+ except Exception as e:
84
+ logger.error(f"Critical error during installation: {e}")
85
+ return None
86
+
87
+ def _parse_github_url(self, url: str) -> Optional[tuple]:
88
+ """
89
+ Parses the GitHub URL into components.
90
+ Expected format: https://github.com/owner/repo/tree/ref/path/to/dir
91
+ """
92
+ parts = url.rstrip('/').split('/')
93
+ if len(parts) < 7:
94
+ logger.error(f"Invalid GitHub URL format: {url}")
95
+ return None
96
+
97
+ # parts[2] is usually 'github.com'
98
+ owner, repo = parts[3], parts[4]
99
+ ref = parts[6] # branch or commit hash
100
+ dir_path = "/".join(parts[7:])
101
+ folder_name = parts[-1]
102
+
103
+ return owner, repo, ref, dir_path, folder_name
104
+
105
+ def _get_file_tree(self, owner: str, repo: str, ref: str, dir_path: str) -> List[Dict]:
106
+ """
107
+ Fetches the recursive file tree from GitHub API and filters for the target directory.
108
+ """
109
+ api_url = f"https://api.github.com/repos/{owner}/{repo}/git/trees/{ref}?recursive=1"
110
+
111
+ response = self.session.get(api_url)
112
+ if response.status_code != 200:
113
+ logger.error(f"GitHub API error: {response.status_code} - {response.text}")
114
+ return []
115
+
116
+ tree = response.json().get('tree', [])
117
+
118
+ # Filter files that are inside the target dir_path and are blobs (files, not folders)
119
+ return [
120
+ item for item in tree
121
+ if item['path'].startswith(dir_path) and item['type'] == 'blob'
122
+ ]
123
+
124
+ def _download_single_file(self, owner: str, repo: str, ref: str, dir_path: str, file_info: Dict, folder_name: str, target_dir: str) -> bool:
125
+ """
126
+ Downloads a single file from raw.githubusercontent.com.
127
+ """
128
+ # Construct raw URL
129
+ # Note: Handling private repos might require using the API content endpoint instead of raw.githubusercontent
130
+ raw_url = f"https://raw.githubusercontent.com/{owner}/{repo}/{ref}/{file_info['path']}"
131
+
132
+ # Calculate local path
133
+ relative_path = file_info['path'].replace(f"{dir_path}/", "")
134
+ # Remove leading slash if present to avoid path join issues
135
+ relative_path = relative_path.lstrip('/')
136
+
137
+ local_file_path = os.path.join(target_dir, folder_name, relative_path)
138
+
139
+ # Create parent directories
140
+ os.makedirs(os.path.dirname(local_file_path), exist_ok=True)
141
+
142
+ try:
143
+ # If it's a private repo, raw.githubusercontent might need token in header
144
+ # The session already has the token if provided.
145
+ file_resp = self.session.get(raw_url)
146
+
147
+ if file_resp.status_code == 200:
148
+ with open(local_file_path, "wb") as f:
149
+ f.write(file_resp.content)
150
+ return True
151
+ else:
152
+ logger.warning(f"Failed to download {raw_url}: {file_resp.status_code}")
153
+ return False
154
+ except Exception as e:
155
+ logger.error(f"Exception downloading {raw_url}: {e}")
156
+ return False