afwf_github 1.0.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- afwf_github/__init__.py +1 -0
- afwf_github/api.py +2 -0
- afwf_github/cache.py +22 -0
- afwf_github/cli.py +295 -0
- afwf_github/config.py +44 -0
- afwf_github/constants.py +36 -0
- afwf_github/dataset.py +101 -0
- afwf_github/github.py +179 -0
- afwf_github/paths.py +93 -0
- afwf_github/type_hint.py +5 -0
- afwf_github/vendor/__init__.py +2 -0
- afwf_github/vendor/pytest_cov_helper.py +148 -0
- afwf_github-1.0.1.dist-info/METADATA +154 -0
- afwf_github-1.0.1.dist-info/RECORD +19 -0
- afwf_github-1.0.1.dist-info/WHEEL +5 -0
- afwf_github-1.0.1.dist-info/entry_points.txt +2 -0
- afwf_github-1.0.1.dist-info/licenses/AUTHORS.rst +15 -0
- afwf_github-1.0.1.dist-info/licenses/LICENSE.txt +21 -0
- afwf_github-1.0.1.dist-info/top_level.txt +1 -0
afwf_github/__init__.py
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
# -*- coding: utf-8 -*-
|
afwf_github/api.py
ADDED
afwf_github/cache.py
ADDED
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
# -*- coding: utf-8 -*-
|
|
2
|
+
|
|
3
|
+
"""
|
|
4
|
+
Factory for diskcache instances used to store GitHub API responses.
|
|
5
|
+
|
|
6
|
+
Each GitHub user gets their own cache directory under their per-user data
|
|
7
|
+
directory. sayt2 datasets manage their own internal search result cache
|
|
8
|
+
separately — this module only covers GitHub API response caching.
|
|
9
|
+
"""
|
|
10
|
+
|
|
11
|
+
from pathlib import Path
|
|
12
|
+
|
|
13
|
+
from diskcache import Cache
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
def make_cache(dir_cache: Path) -> Cache:
|
|
17
|
+
"""Create a ``diskcache.Cache`` at *dir_cache*.
|
|
18
|
+
|
|
19
|
+
diskcache creates the directory if it does not exist, so callers do not
|
|
20
|
+
need to ensure the path exists beforehand.
|
|
21
|
+
"""
|
|
22
|
+
return Cache(dir_cache)
|
afwf_github/cli.py
ADDED
|
@@ -0,0 +1,295 @@
|
|
|
1
|
+
# -*- coding: utf-8 -*-
|
|
2
|
+
|
|
3
|
+
import sys
|
|
4
|
+
import json
|
|
5
|
+
import typing as T
|
|
6
|
+
import functools
|
|
7
|
+
from pathlib import Path
|
|
8
|
+
from functools import cached_property
|
|
9
|
+
|
|
10
|
+
import fire
|
|
11
|
+
import afwf.api as afwf
|
|
12
|
+
import git_web_url.api as gwu
|
|
13
|
+
from git_web_url.exc import NotGitRepoError
|
|
14
|
+
|
|
15
|
+
from .config import Config
|
|
16
|
+
from .paths import path_enum
|
|
17
|
+
from .dataset import create_repo_dataset
|
|
18
|
+
|
|
19
|
+
_CONFIG_TEMPLATE = {
|
|
20
|
+
"pac_token": None,
|
|
21
|
+
"pac_token_home_secret_toml_path": None,
|
|
22
|
+
"cache_expire": 2_592_000,
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
_log_error = afwf.log_error(
|
|
26
|
+
log_file=path_enum.path_error_log,
|
|
27
|
+
tb_limit=10,
|
|
28
|
+
)
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
def _error_sf(exc: Exception) -> afwf.ScriptFilter:
|
|
32
|
+
item = afwf.Item(
|
|
33
|
+
title=f"{type(exc).__name__}: {exc}",
|
|
34
|
+
subtitle=f"Press Enter to open the error log: {path_enum.path_error_log}",
|
|
35
|
+
icon=afwf.Icon.from_image_file(path=afwf.IconFileEnum.error),
|
|
36
|
+
valid=True,
|
|
37
|
+
)
|
|
38
|
+
item.open_file(str(path_enum.path_error_log))
|
|
39
|
+
return afwf.ScriptFilter(items=[item])
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
def _config_error_sf(config_path: Path) -> afwf.ScriptFilter:
|
|
43
|
+
item = afwf.Item(
|
|
44
|
+
title=f"Config file not found: {config_path}",
|
|
45
|
+
subtitle="Press Enter to open the setup guide on GitHub",
|
|
46
|
+
icon=afwf.Icon.from_image_file(path=afwf.IconFileEnum.error),
|
|
47
|
+
valid=True,
|
|
48
|
+
)
|
|
49
|
+
item.open_url("https://github.com/MacHu-GWU/afwf_github-project")
|
|
50
|
+
return afwf.ScriptFilter(items=[item])
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
def require_config(method: T.Callable) -> T.Callable:
|
|
54
|
+
"""Decorator that resolves and loads config before calling a CLI method.
|
|
55
|
+
|
|
56
|
+
Resolution order:
|
|
57
|
+
1. If ``self.config_file`` is set, load from that path.
|
|
58
|
+
2. Otherwise use the default path (``path_enum.path_config_json``).
|
|
59
|
+
|
|
60
|
+
On any failure (file missing, parse error) the decorator outputs a
|
|
61
|
+
single error ``Item`` that opens the project README on Enter, then
|
|
62
|
+
returns early so the wrapped method is never called.
|
|
63
|
+
|
|
64
|
+
On success, ``self._config`` is populated and the wrapped method runs
|
|
65
|
+
normally.
|
|
66
|
+
|
|
67
|
+
``functools.wraps`` is still required in Python 3.10+ to copy
|
|
68
|
+
``__name__``, ``__doc__``, ``__wrapped__``, etc. onto the wrapper —
|
|
69
|
+
``ParamSpec`` (added in 3.10) improves type-checker inference for
|
|
70
|
+
decorators but does not replace ``functools.wraps``.
|
|
71
|
+
"""
|
|
72
|
+
|
|
73
|
+
@functools.wraps(method)
|
|
74
|
+
def wrapper(self: "Command", *args, **kwargs):
|
|
75
|
+
if self.config_file is not None:
|
|
76
|
+
config_path = Path(self.config_file).expanduser().resolve()
|
|
77
|
+
else:
|
|
78
|
+
config_path = path_enum.path_config_json
|
|
79
|
+
|
|
80
|
+
if not config_path.exists():
|
|
81
|
+
_config_error_sf(config_path).send_feedback()
|
|
82
|
+
return
|
|
83
|
+
|
|
84
|
+
try:
|
|
85
|
+
self._config = Config.load(config_path)
|
|
86
|
+
except Exception:
|
|
87
|
+
_config_error_sf(config_path).send_feedback()
|
|
88
|
+
return
|
|
89
|
+
|
|
90
|
+
return method(self, *args, **kwargs)
|
|
91
|
+
|
|
92
|
+
return wrapper
|
|
93
|
+
|
|
94
|
+
|
|
95
|
+
class Command:
|
|
96
|
+
"""Alfred GitHub Workflow CLI.
|
|
97
|
+
|
|
98
|
+
All subcommands accept an optional ``--config-file`` argument (absolute
|
|
99
|
+
or relative path). When omitted, config is loaded from the default
|
|
100
|
+
location via ``default_config``.
|
|
101
|
+
"""
|
|
102
|
+
|
|
103
|
+
def __init__(self, config_file: str | None = None):
|
|
104
|
+
self.config_file = config_file
|
|
105
|
+
self._config: Config | None = None
|
|
106
|
+
|
|
107
|
+
@cached_property
|
|
108
|
+
def default_config(self) -> Config:
|
|
109
|
+
"""Load config from the default path (``~/.alfred-afwf/afwf_github/config.json``)."""
|
|
110
|
+
return Config.load(path_enum.path_config_json)
|
|
111
|
+
|
|
112
|
+
@afwf.log_error(
|
|
113
|
+
|
|
114
|
+
)
|
|
115
|
+
def edit_config(self) -> None:
|
|
116
|
+
"""Script Filter: open config.json in the default editor.
|
|
117
|
+
|
|
118
|
+
Creates a blank template at the default path if the file does not yet
|
|
119
|
+
exist, then opens it via Alfred's Open File action.
|
|
120
|
+
|
|
121
|
+
Alfred Script field (dev):
|
|
122
|
+
.venv/bin/afwf-github edit-config
|
|
123
|
+
|
|
124
|
+
Alfred Script field (prod):
|
|
125
|
+
~/.local/bin/uvx --from afwf_github==<ver> afwf-github edit-config
|
|
126
|
+
"""
|
|
127
|
+
@_log_error
|
|
128
|
+
def _run():
|
|
129
|
+
config_path = path_enum.path_config_json
|
|
130
|
+
if not config_path.exists():
|
|
131
|
+
config_path.parent.mkdir(parents=True, exist_ok=True)
|
|
132
|
+
config_path.write_text(json.dumps(_CONFIG_TEMPLATE, indent=4))
|
|
133
|
+
item = afwf.Item(
|
|
134
|
+
title="Open and edit config.json",
|
|
135
|
+
subtitle=str(config_path),
|
|
136
|
+
icon=afwf.Icon.from_image_file(path=afwf.IconFileEnum.file),
|
|
137
|
+
)
|
|
138
|
+
item.open_file(str(config_path))
|
|
139
|
+
afwf.ScriptFilter(items=[item]).send_feedback()
|
|
140
|
+
|
|
141
|
+
try:
|
|
142
|
+
_run()
|
|
143
|
+
except Exception as e:
|
|
144
|
+
_error_sf(e).send_feedback()
|
|
145
|
+
|
|
146
|
+
@require_config
|
|
147
|
+
def view_in_browser(self, path: str = "") -> None:
|
|
148
|
+
"""Script Filter: given a local file or directory path, open its GitHub URL in the browser.
|
|
149
|
+
|
|
150
|
+
Alfred Script field (dev):
|
|
151
|
+
.venv/bin/afwf-github view-in-browser --path '{query}'
|
|
152
|
+
|
|
153
|
+
Alfred Script field (prod):
|
|
154
|
+
~/.local/bin/uvx --from afwf_github==<ver> afwf-github view-in-browser --path '{query}'
|
|
155
|
+
"""
|
|
156
|
+
@_log_error
|
|
157
|
+
def _run():
|
|
158
|
+
if not path.strip():
|
|
159
|
+
afwf.ScriptFilter(
|
|
160
|
+
items=[
|
|
161
|
+
afwf.Item(
|
|
162
|
+
title="Type or paste the absolute path of a local file or directory"
|
|
163
|
+
)
|
|
164
|
+
]
|
|
165
|
+
).send_feedback()
|
|
166
|
+
return
|
|
167
|
+
|
|
168
|
+
try:
|
|
169
|
+
url = gwu.get_web_url(Path(path))
|
|
170
|
+
item = afwf.Item(
|
|
171
|
+
title=f"Open in browser: {url}",
|
|
172
|
+
subtitle=f"Local path: {path}",
|
|
173
|
+
icon=afwf.Icon.from_image_file(path=afwf.IconFileEnum.internet),
|
|
174
|
+
)
|
|
175
|
+
item.open_url(url)
|
|
176
|
+
except NotGitRepoError:
|
|
177
|
+
item = afwf.Item(
|
|
178
|
+
title=f"Not a git repository path: {path}",
|
|
179
|
+
subtitle="Only paths inside a git repo with a remote can be opened in browser",
|
|
180
|
+
icon=afwf.Icon.from_image_file(path=afwf.IconFileEnum.error),
|
|
181
|
+
valid=False,
|
|
182
|
+
)
|
|
183
|
+
|
|
184
|
+
afwf.ScriptFilter(items=[item]).send_feedback()
|
|
185
|
+
|
|
186
|
+
try:
|
|
187
|
+
_run()
|
|
188
|
+
except Exception as e:
|
|
189
|
+
_error_sf(e).send_feedback()
|
|
190
|
+
|
|
191
|
+
@require_config
|
|
192
|
+
def search_repo(self, query: str = "") -> None:
|
|
193
|
+
"""Script Filter: search GitHub repositories in the local index.
|
|
194
|
+
|
|
195
|
+
Alfred Script field (dev):
|
|
196
|
+
.venv/bin/afwf-github search-repo --query '{query}'
|
|
197
|
+
|
|
198
|
+
Alfred Script field (prod):
|
|
199
|
+
~/.local/bin/uvx --from afwf_github==<ver> afwf-github search-repo --query '{query}'
|
|
200
|
+
"""
|
|
201
|
+
@_log_error
|
|
202
|
+
def _run():
|
|
203
|
+
if not query.strip():
|
|
204
|
+
afwf.ScriptFilter(
|
|
205
|
+
items=[afwf.Item(title="Type to search GitHub repositories ...")]
|
|
206
|
+
).send_feedback()
|
|
207
|
+
return
|
|
208
|
+
|
|
209
|
+
dataset = create_repo_dataset(config=self._config)
|
|
210
|
+
result = dataset.search(query=query, limit=50)
|
|
211
|
+
|
|
212
|
+
if not result.hits:
|
|
213
|
+
afwf.ScriptFilter(
|
|
214
|
+
items=[
|
|
215
|
+
afwf.Item(
|
|
216
|
+
title=f"No repository found for: {query!r}",
|
|
217
|
+
icon=afwf.Icon.from_image_file(path=afwf.IconFileEnum.error),
|
|
218
|
+
valid=False,
|
|
219
|
+
)
|
|
220
|
+
]
|
|
221
|
+
).send_feedback()
|
|
222
|
+
return
|
|
223
|
+
|
|
224
|
+
items = []
|
|
225
|
+
for hit in result.hits:
|
|
226
|
+
repo = hit.source
|
|
227
|
+
account_name = repo["acc"]
|
|
228
|
+
repo_name = repo["repo"]
|
|
229
|
+
repo_description = repo.get("desc", "No description")
|
|
230
|
+
url = f"https://github.com/{account_name}/{repo_name}"
|
|
231
|
+
item = afwf.Item(
|
|
232
|
+
title=f"{account_name}/{repo_name}",
|
|
233
|
+
subtitle=repo_description,
|
|
234
|
+
autocomplete=f"{account_name}/{repo_name}",
|
|
235
|
+
)
|
|
236
|
+
item.open_url(url)
|
|
237
|
+
items.append(item)
|
|
238
|
+
afwf.ScriptFilter(items=items).send_feedback()
|
|
239
|
+
|
|
240
|
+
try:
|
|
241
|
+
_run()
|
|
242
|
+
except Exception as e:
|
|
243
|
+
_error_sf(e).send_feedback()
|
|
244
|
+
|
|
245
|
+
@require_config
|
|
246
|
+
def rebuild_index(self) -> None:
|
|
247
|
+
"""Script Filter: show a single item that triggers ``rebuild-index-action`` on Enter.
|
|
248
|
+
|
|
249
|
+
Alfred Script field (dev):
|
|
250
|
+
.venv/bin/afwf-github rebuild-index
|
|
251
|
+
|
|
252
|
+
Alfred Script field (prod):
|
|
253
|
+
~/.local/bin/uvx --from afwf_github==<ver> afwf-github rebuild-index
|
|
254
|
+
"""
|
|
255
|
+
@_log_error
|
|
256
|
+
def _run():
|
|
257
|
+
bin_cli = Path(sys.executable).parent / "afwf-github"
|
|
258
|
+
cmd = f"{bin_cli} rebuild-index-action"
|
|
259
|
+
if self.config_file is not None:
|
|
260
|
+
cmd += f" --config-file {self.config_file!r}"
|
|
261
|
+
|
|
262
|
+
item = afwf.Item(
|
|
263
|
+
title="Rebuild Index for GitHub Alfred Workflow",
|
|
264
|
+
subtitle="Hit Enter to rebuild — may take 10–20 seconds",
|
|
265
|
+
icon=afwf.Icon.from_image_file(path=afwf.IconFileEnum.reset),
|
|
266
|
+
)
|
|
267
|
+
item.run_script(cmd)
|
|
268
|
+
afwf.ScriptFilter(items=[item]).send_feedback()
|
|
269
|
+
|
|
270
|
+
try:
|
|
271
|
+
_run()
|
|
272
|
+
except Exception as e:
|
|
273
|
+
_error_sf(e).send_feedback()
|
|
274
|
+
|
|
275
|
+
@require_config
|
|
276
|
+
@_log_error
|
|
277
|
+
def rebuild_index_action(self) -> None:
|
|
278
|
+
"""Rebuild the local repo search index by re-fetching data from GitHub.
|
|
279
|
+
|
|
280
|
+
Called by Alfred's Run Script widget — NOT a Script Filter.
|
|
281
|
+
|
|
282
|
+
Alfred Run Script field (dev):
|
|
283
|
+
.venv/bin/afwf-github rebuild-index-action
|
|
284
|
+
|
|
285
|
+
Alfred Run Script field (prod):
|
|
286
|
+
~/.local/bin/uvx --from afwf_github==<ver> afwf-github rebuild-index-action
|
|
287
|
+
"""
|
|
288
|
+
create_repo_dataset(config=self._config).search(
|
|
289
|
+
query="",
|
|
290
|
+
refresh=True,
|
|
291
|
+
)
|
|
292
|
+
|
|
293
|
+
|
|
294
|
+
def run():
|
|
295
|
+
fire.Fire(Command)
|
afwf_github/config.py
ADDED
|
@@ -0,0 +1,44 @@
|
|
|
1
|
+
# -*- coding: utf-8 -*-
|
|
2
|
+
|
|
3
|
+
import json
|
|
4
|
+
from pathlib import Path
|
|
5
|
+
from functools import cached_property
|
|
6
|
+
|
|
7
|
+
from pydantic import BaseModel, ConfigDict, model_validator
|
|
8
|
+
from github import Github, Auth
|
|
9
|
+
from home_secret_toml.api import hs
|
|
10
|
+
|
|
11
|
+
from .paths import path_enum
|
|
12
|
+
|
|
13
|
+
path_config_json = path_enum.path_config_json
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
class Config(BaseModel):
|
|
17
|
+
model_config = ConfigDict(extra="forbid")
|
|
18
|
+
|
|
19
|
+
pac_token: str | None = None
|
|
20
|
+
pac_token_home_secret_toml_path: str | None = None
|
|
21
|
+
cache_expire: int = 30 * 24 * 3600
|
|
22
|
+
|
|
23
|
+
@model_validator(mode="after")
|
|
24
|
+
def check_pac_token(self):
|
|
25
|
+
if self.pac_token is None and self.pac_token_home_secret_toml_path is None:
|
|
26
|
+
raise ValueError("Must provide pac_token or pac_token_home_secret_toml_path")
|
|
27
|
+
return self
|
|
28
|
+
|
|
29
|
+
@classmethod
|
|
30
|
+
def load(cls, path: Path) -> "Config": # pragma: no cover
|
|
31
|
+
return cls.model_validate(json.loads(path.read_text()))
|
|
32
|
+
|
|
33
|
+
def dump(self, path: Path): # pragma: no cover
|
|
34
|
+
path.write_text(self.model_dump_json(indent=4))
|
|
35
|
+
|
|
36
|
+
@cached_property
|
|
37
|
+
def gh(self):
|
|
38
|
+
if self.pac_token is not None:
|
|
39
|
+
pac_token = self.pac_token
|
|
40
|
+
elif self.pac_token_home_secret_toml_path is not None:
|
|
41
|
+
pac_token = hs.v(self.pac_token_home_secret_toml_path)
|
|
42
|
+
else:
|
|
43
|
+
raise ValueError("Must provide pac_token or pac_token_home_secret_toml")
|
|
44
|
+
return Github(auth=Auth.Token(pac_token))
|
afwf_github/constants.py
ADDED
|
@@ -0,0 +1,36 @@
|
|
|
1
|
+
# -*- coding: utf-8 -*-
|
|
2
|
+
|
|
3
|
+
import enum
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
class CacheKeyEnum(str, enum.Enum):
|
|
7
|
+
"""
|
|
8
|
+
Cache keys for GitHub data, parameterized by GitHub username.
|
|
9
|
+
|
|
10
|
+
Each data type is stored under a separate key (``user@{username}``,
|
|
11
|
+
``accounts@{username}``, ``repos@{username}``) rather than a single
|
|
12
|
+
combined JSON blob. This allows callers to fetch only what they need:
|
|
13
|
+
reading the authenticated user or listing org accounts is cheap and
|
|
14
|
+
instantaneous, while fetching all repositories can be slow for accounts
|
|
15
|
+
with hundreds of repos. Keeping them separate avoids paying the full
|
|
16
|
+
download cost just to answer a lightweight query.
|
|
17
|
+
"""
|
|
18
|
+
|
|
19
|
+
user = "user"
|
|
20
|
+
"""Authenticated GitHub user — id and display name."""
|
|
21
|
+
|
|
22
|
+
accounts = "accounts"
|
|
23
|
+
"""All accounts visible to the user: the user themselves plus every org they belong to."""
|
|
24
|
+
|
|
25
|
+
repos = "repos"
|
|
26
|
+
"""All repositories accessible to the user across all accounts."""
|
|
27
|
+
|
|
28
|
+
def of(self, username: str) -> str:
|
|
29
|
+
"""Return the concrete cache key for a given GitHub username.
|
|
30
|
+
|
|
31
|
+
Example::
|
|
32
|
+
|
|
33
|
+
>>> CacheKeyEnum.repos.of("alice")
|
|
34
|
+
'repos@alice'
|
|
35
|
+
"""
|
|
36
|
+
return f"{self.value}@{username}"
|
afwf_github/dataset.py
ADDED
|
@@ -0,0 +1,101 @@
|
|
|
1
|
+
# -*- coding: utf-8 -*-
|
|
2
|
+
|
|
3
|
+
"""
|
|
4
|
+
Github repo dataset related
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
import typing as T
|
|
8
|
+
from pathlib import Path
|
|
9
|
+
|
|
10
|
+
from diskcache import Cache
|
|
11
|
+
from github import Github
|
|
12
|
+
from sayt2.api import DataSet, NgramField, TextField
|
|
13
|
+
|
|
14
|
+
from .type_hint import T_RECORD
|
|
15
|
+
from .paths import path_enum
|
|
16
|
+
from .cache import make_cache
|
|
17
|
+
from .config import Config
|
|
18
|
+
from .constants import CacheKeyEnum
|
|
19
|
+
from .github import download_data, get_repos, get_username
|
|
20
|
+
|
|
21
|
+
repo_fields = [
|
|
22
|
+
NgramField(name="acc", min_gram=2, max_gram=10, boost=1.0),
|
|
23
|
+
NgramField(name="repo", min_gram=2, max_gram=10, boost=2.0),
|
|
24
|
+
TextField(name="desc"),
|
|
25
|
+
]
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
def make_repo_dataset(
|
|
29
|
+
dir_user: Path,
|
|
30
|
+
downloader: T.Callable[[], list[T_RECORD]],
|
|
31
|
+
cache_expire: int,
|
|
32
|
+
) -> DataSet:
|
|
33
|
+
"""
|
|
34
|
+
Low-level DataSet constructor. Pure function — no GitHub or config
|
|
35
|
+
dependencies. The caller is responsible for providing the per-user
|
|
36
|
+
directory, a ready-made downloader, and the cache expiry.
|
|
37
|
+
"""
|
|
38
|
+
return DataSet(
|
|
39
|
+
dir_root=dir_user,
|
|
40
|
+
name="repo",
|
|
41
|
+
fields=repo_fields,
|
|
42
|
+
downloader=downloader,
|
|
43
|
+
cache_expire=cache_expire,
|
|
44
|
+
)
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
def make_downloader(
|
|
48
|
+
gh: Github,
|
|
49
|
+
cache: Cache,
|
|
50
|
+
username: str,
|
|
51
|
+
cache_expire: int,
|
|
52
|
+
) -> T.Callable[[], list[T_RECORD]]: # pragma: no cover
|
|
53
|
+
"""
|
|
54
|
+
Build the downloader callable used by the repo DataSet.
|
|
55
|
+
|
|
56
|
+
Wraps :func:`~afwf_github.github.download_data` with a cache-check:
|
|
57
|
+
if repos are already stored under the per-user cache key, they are
|
|
58
|
+
returned directly without hitting the GitHub API.
|
|
59
|
+
"""
|
|
60
|
+
|
|
61
|
+
def downloader() -> list[T_RECORD]:
|
|
62
|
+
cache_key = CacheKeyEnum.repos.of(username)
|
|
63
|
+
try:
|
|
64
|
+
repos = get_repos(cache, username) if cache_key in cache else None
|
|
65
|
+
except FileNotFoundError:
|
|
66
|
+
# per-user directory was deleted after cache was opened;
|
|
67
|
+
# fall through to re-download
|
|
68
|
+
repos = None
|
|
69
|
+
|
|
70
|
+
if repos is None:
|
|
71
|
+
_, _, repos = download_data(
|
|
72
|
+
gh=gh,
|
|
73
|
+
cache=cache,
|
|
74
|
+
username=username,
|
|
75
|
+
expire=cache_expire,
|
|
76
|
+
)
|
|
77
|
+
|
|
78
|
+
seen = set()
|
|
79
|
+
records = []
|
|
80
|
+
for r in repos:
|
|
81
|
+
key = (r["acc"], r["repo"])
|
|
82
|
+
if key not in seen:
|
|
83
|
+
seen.add(key)
|
|
84
|
+
records.append({"acc": r["acc"], "repo": r["repo"], "desc": r["desc"]})
|
|
85
|
+
return records
|
|
86
|
+
|
|
87
|
+
return downloader
|
|
88
|
+
|
|
89
|
+
|
|
90
|
+
def create_repo_dataset(config: Config) -> DataSet: # pragma: no cover
|
|
91
|
+
"""
|
|
92
|
+
High-level factory. Derives the GitHub client, username, per-user
|
|
93
|
+
directory, and cache entirely from *config*.
|
|
94
|
+
"""
|
|
95
|
+
gh = config.gh
|
|
96
|
+
user = get_username(gh)
|
|
97
|
+
username = user["id"]
|
|
98
|
+
user_dir = path_enum.dir_user(username)
|
|
99
|
+
cache = make_cache(user_dir / ".cache")
|
|
100
|
+
downloader = make_downloader(gh, cache, username, config.cache_expire)
|
|
101
|
+
return make_repo_dataset(user_dir, downloader, config.cache_expire)
|
afwf_github/github.py
ADDED
|
@@ -0,0 +1,179 @@
|
|
|
1
|
+
# -*- coding: utf-8 -*-
|
|
2
|
+
|
|
3
|
+
"""
|
|
4
|
+
GitHub data related.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
import typing as T
|
|
8
|
+
import json
|
|
9
|
+
import itertools
|
|
10
|
+
|
|
11
|
+
from github import Github, GithubException
|
|
12
|
+
from diskcache import Cache
|
|
13
|
+
|
|
14
|
+
from .constants import CacheKeyEnum
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
class UserType(T.TypedDict):
|
|
18
|
+
id: str
|
|
19
|
+
name: str
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
class AccountType(T.TypedDict):
|
|
23
|
+
id: str
|
|
24
|
+
name: str
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
class RepoType(T.TypedDict):
|
|
28
|
+
acc: str
|
|
29
|
+
repo: str
|
|
30
|
+
desc: str
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
def get_username(gh: Github) -> UserType:
|
|
34
|
+
"""Return the id and display name for the authenticated GitHub user.
|
|
35
|
+
|
|
36
|
+
This is a single lightweight API call used to determine the per-user
|
|
37
|
+
data directory before any heavier operations run.
|
|
38
|
+
``id`` is the GitHub login (used for directory names and cache keys);
|
|
39
|
+
``name`` is the human-readable display name.
|
|
40
|
+
"""
|
|
41
|
+
try:
|
|
42
|
+
gh_user = gh.get_user()
|
|
43
|
+
user_id: str = gh_user.html_url.split("/")[-1]
|
|
44
|
+
user_name: str = gh_user.name or user_id
|
|
45
|
+
return UserType(id=user_id, name=user_name)
|
|
46
|
+
except GithubException as e:
|
|
47
|
+
raise RuntimeError(
|
|
48
|
+
f"Failed to get GitHub username (status={e.status}): {e.data}"
|
|
49
|
+
) from e
|
|
50
|
+
except Exception as e:
|
|
51
|
+
raise RuntimeError(f"Unexpected error getting GitHub username: {e}") from e
|
|
52
|
+
|
|
53
|
+
|
|
54
|
+
def take(iterable: T.Iterable, n: int) -> list:
|
|
55
|
+
"""
|
|
56
|
+
Return first n items of the iterable as a list.
|
|
57
|
+
|
|
58
|
+
Example::
|
|
59
|
+
|
|
60
|
+
>>> take([0, 1, 2], 2)
|
|
61
|
+
[0, 1]
|
|
62
|
+
"""
|
|
63
|
+
return list(itertools.islice(iterable, n))
|
|
64
|
+
|
|
65
|
+
|
|
66
|
+
def fetch_data(
|
|
67
|
+
gh: Github,
|
|
68
|
+
page_limit: int = 9999,
|
|
69
|
+
verbose: bool = False,
|
|
70
|
+
) -> tuple[
|
|
71
|
+
UserType,
|
|
72
|
+
list[AccountType],
|
|
73
|
+
list[RepoType],
|
|
74
|
+
]: # pragma: no cover
|
|
75
|
+
"""
|
|
76
|
+
Pull user, accounts, and repos from the GitHub API. No caching.
|
|
77
|
+
Each API call is treated as potentially failing and raises a descriptive
|
|
78
|
+
RuntimeError on unexpected errors. 403 responses on individual orgs or
|
|
79
|
+
repos are silently skipped — the user simply lacks permission for those.
|
|
80
|
+
"""
|
|
81
|
+
try:
|
|
82
|
+
gh_user = gh.get_user()
|
|
83
|
+
user_id: str = gh_user.html_url.split("/")[-1]
|
|
84
|
+
user_name: str = gh_user.name or user_id
|
|
85
|
+
except GithubException as e:
|
|
86
|
+
raise RuntimeError(
|
|
87
|
+
f"Failed to fetch authenticated user from GitHub (status={e.status}): {e.data}"
|
|
88
|
+
) from e
|
|
89
|
+
except Exception as e:
|
|
90
|
+
raise RuntimeError(f"Unexpected error fetching authenticated user: {e}") from e
|
|
91
|
+
|
|
92
|
+
if verbose:
|
|
93
|
+
print(f"Fetched user: {user_name} ({user_id})")
|
|
94
|
+
|
|
95
|
+
accounts: list[AccountType] = [AccountType(id=user_id, name=user_name)]
|
|
96
|
+
for org in take(gh_user.get_orgs(), page_limit):
|
|
97
|
+
try:
|
|
98
|
+
account_id: str = org.html_url.split("/")[-1]
|
|
99
|
+
account_name: str = org.name or account_id
|
|
100
|
+
if verbose:
|
|
101
|
+
print(f"Fetched org: {account_name} ({account_id})")
|
|
102
|
+
accounts.append(AccountType(id=account_id, name=account_name))
|
|
103
|
+
except GithubException as e:
|
|
104
|
+
if e.status == 403:
|
|
105
|
+
if verbose:
|
|
106
|
+
print(f"Skipped org (no permission, status=403): {e.data}")
|
|
107
|
+
else:
|
|
108
|
+
raise RuntimeError(
|
|
109
|
+
f"Failed to fetch org details (status={e.status}): {e.data}"
|
|
110
|
+
) from e
|
|
111
|
+
except Exception as e:
|
|
112
|
+
raise RuntimeError(f"Unexpected error fetching org: {e}") from e
|
|
113
|
+
|
|
114
|
+
repos: list[RepoType] = []
|
|
115
|
+
for repo in take(gh_user.get_repos(visibility="all"), page_limit):
|
|
116
|
+
try:
|
|
117
|
+
account_name = repo.full_name.split("/")[0]
|
|
118
|
+
repo_name: str = repo.name
|
|
119
|
+
repo_desc: str = repo.description or ""
|
|
120
|
+
if verbose:
|
|
121
|
+
print(f"Fetched repo: {account_name}/{repo_name}")
|
|
122
|
+
repos.append(RepoType(acc=account_name, repo=repo_name, desc=repo_desc))
|
|
123
|
+
except GithubException as e:
|
|
124
|
+
if e.status == 403:
|
|
125
|
+
if verbose:
|
|
126
|
+
print(f"Skipped repo (no permission, status=403): {e.data}")
|
|
127
|
+
else:
|
|
128
|
+
raise RuntimeError(
|
|
129
|
+
f"Failed to fetch repo details (status={e.status}): {e.data}"
|
|
130
|
+
) from e
|
|
131
|
+
except Exception as e:
|
|
132
|
+
raise RuntimeError(f"Unexpected error fetching repo: {e}") from e
|
|
133
|
+
|
|
134
|
+
user: UserType = UserType(id=user_id, name=user_name)
|
|
135
|
+
return user, accounts, repos
|
|
136
|
+
|
|
137
|
+
|
|
138
|
+
def download_data(
|
|
139
|
+
gh: Github,
|
|
140
|
+
cache: Cache,
|
|
141
|
+
username: str,
|
|
142
|
+
expire: int,
|
|
143
|
+
page_limit: int = 9999,
|
|
144
|
+
verbose: bool = False,
|
|
145
|
+
) -> tuple[
|
|
146
|
+
UserType,
|
|
147
|
+
list[AccountType],
|
|
148
|
+
list[RepoType],
|
|
149
|
+
]: # pragma: no cover
|
|
150
|
+
"""
|
|
151
|
+
Pull data from GitHub via :func:`fetch_data` and store results in cache
|
|
152
|
+
under per-username keys (e.g. ``repos@alice``).
|
|
153
|
+
"""
|
|
154
|
+
user, accounts, repos = fetch_data(gh, page_limit=page_limit, verbose=verbose)
|
|
155
|
+
cache.set(CacheKeyEnum.user.of(username), json.dumps(user), expire=expire)
|
|
156
|
+
cache.set(CacheKeyEnum.accounts.of(username), json.dumps(accounts), expire=expire)
|
|
157
|
+
cache.set(CacheKeyEnum.repos.of(username), json.dumps(repos), expire=expire)
|
|
158
|
+
return user, accounts, repos
|
|
159
|
+
|
|
160
|
+
|
|
161
|
+
def get_user(
|
|
162
|
+
cache: Cache,
|
|
163
|
+
username: str,
|
|
164
|
+
) -> UserType:
|
|
165
|
+
return json.loads(cache.get(CacheKeyEnum.user.of(username)))
|
|
166
|
+
|
|
167
|
+
|
|
168
|
+
def get_accounts(
|
|
169
|
+
cache: Cache,
|
|
170
|
+
username: str,
|
|
171
|
+
) -> list[AccountType]:
|
|
172
|
+
return json.loads(cache.get(CacheKeyEnum.accounts.of(username)))
|
|
173
|
+
|
|
174
|
+
|
|
175
|
+
def get_repos(
|
|
176
|
+
cache: Cache,
|
|
177
|
+
username: str,
|
|
178
|
+
) -> list[RepoType]:
|
|
179
|
+
return json.loads(cache.get(CacheKeyEnum.repos.of(username)))
|
afwf_github/paths.py
ADDED
|
@@ -0,0 +1,93 @@
|
|
|
1
|
+
# -*- coding: utf-8 -*-
|
|
2
|
+
|
|
3
|
+
from pathlib import Path
|
|
4
|
+
from functools import cached_property
|
|
5
|
+
|
|
6
|
+
_dir_here = Path(__file__).absolute().parent
|
|
7
|
+
PACKAGE_NAME = _dir_here.name
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
class PathEnum:
|
|
11
|
+
"""
|
|
12
|
+
Centralized enumeration of all project paths with absolute path references.
|
|
13
|
+
|
|
14
|
+
Provides IDE-autocomplete-friendly access to all project directories and files using
|
|
15
|
+
absolute paths to eliminate current directory dependencies and ensure consistent path
|
|
16
|
+
resolution across different execution contexts and DevOps workflows.
|
|
17
|
+
"""
|
|
18
|
+
@cached_property
|
|
19
|
+
def dir_home(self):
|
|
20
|
+
return Path.home()
|
|
21
|
+
|
|
22
|
+
dir_project_root = _dir_here.parent
|
|
23
|
+
dir_tmp = dir_project_root / "tmp"
|
|
24
|
+
|
|
25
|
+
# Source Code
|
|
26
|
+
dir_package = _dir_here
|
|
27
|
+
path_version_py = dir_package / "_version.py"
|
|
28
|
+
path_pyproject_toml = dir_project_root / "pyproject.toml"
|
|
29
|
+
path_requirements_txt = dir_project_root / "requirements.txt"
|
|
30
|
+
path_authors = dir_project_root / "AUTHORS.txt"
|
|
31
|
+
path_license = dir_project_root / "LICENSE.txt"
|
|
32
|
+
path_release_history = dir_project_root / "release-history.rst"
|
|
33
|
+
|
|
34
|
+
# Virtual Environment
|
|
35
|
+
dir_venv = dir_project_root / ".venv"
|
|
36
|
+
dir_venv_bin = dir_venv / "bin"
|
|
37
|
+
path_venv_bin_pip = dir_venv_bin / "pip"
|
|
38
|
+
path_venv_bin_python = dir_venv_bin / "python"
|
|
39
|
+
path_venv_bin_pytest = dir_venv_bin / "pytest"
|
|
40
|
+
path_python_interpreter = path_venv_bin_python
|
|
41
|
+
|
|
42
|
+
# Test
|
|
43
|
+
dir_htmlcov = dir_project_root / "htmlcov"
|
|
44
|
+
path_cov_index_html = dir_htmlcov / "index.html"
|
|
45
|
+
dir_unit_test = dir_project_root / "tests"
|
|
46
|
+
dir_int_test = dir_project_root / "tests_int"
|
|
47
|
+
dir_load_test = dir_project_root / "tests_load"
|
|
48
|
+
|
|
49
|
+
# Documentation
|
|
50
|
+
dir_docs_source = dir_project_root / "docs" / "source"
|
|
51
|
+
dir_docs_build_html = dir_project_root / "docs" / "build" / "html"
|
|
52
|
+
|
|
53
|
+
# Build
|
|
54
|
+
dir_build = dir_project_root / "build"
|
|
55
|
+
dir_dist = dir_project_root / "dist"
|
|
56
|
+
|
|
57
|
+
# Alfred workflow runtime paths
|
|
58
|
+
@cached_property
|
|
59
|
+
def dir_project_home(self):
|
|
60
|
+
p = self.dir_home / ".alfred-afwf" / PACKAGE_NAME
|
|
61
|
+
p.mkdir(parents=True, exist_ok=True)
|
|
62
|
+
return p
|
|
63
|
+
|
|
64
|
+
@cached_property
|
|
65
|
+
def path_config_json(self):
|
|
66
|
+
return self.dir_project_home / "config.json"
|
|
67
|
+
|
|
68
|
+
@cached_property
|
|
69
|
+
def path_error_log(self):
|
|
70
|
+
return self.dir_project_home / "error.log"
|
|
71
|
+
|
|
72
|
+
def dir_user(self, username: str) -> Path:
|
|
73
|
+
"""Per-user data directory for a specific GitHub account.
|
|
74
|
+
|
|
75
|
+
All GitHub API cache and search index for this user live under this
|
|
76
|
+
directory. Deleting it resets everything for that user cleanly.
|
|
77
|
+
|
|
78
|
+
Layout::
|
|
79
|
+
|
|
80
|
+
$HOME/.alfred-afwf/afwf_github/{username}/
|
|
81
|
+
.cache/ # diskcache — GitHub API responses
|
|
82
|
+
repo/ # sayt2 search dataset (index + query cache)
|
|
83
|
+
"""
|
|
84
|
+
p = self.dir_project_home / username
|
|
85
|
+
p.mkdir(parents=True, exist_ok=True)
|
|
86
|
+
return p
|
|
87
|
+
|
|
88
|
+
|
|
89
|
+
|
|
90
|
+
path_enum = PathEnum()
|
|
91
|
+
"""
|
|
92
|
+
Single entry point for all project paths with absolute path references.
|
|
93
|
+
"""
|
afwf_github/type_hint.py
ADDED
|
@@ -0,0 +1,148 @@
|
|
|
1
|
+
# -*- coding: utf-8 -*-
|
|
2
|
+
|
|
3
|
+
import os
|
|
4
|
+
import sys
|
|
5
|
+
import contextlib
|
|
6
|
+
import subprocess
|
|
7
|
+
from pathlib import Path
|
|
8
|
+
|
|
9
|
+
__version__ = "0.2.1"
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
@contextlib.contextmanager
|
|
13
|
+
def temp_cwd(path: Path):
|
|
14
|
+
"""
|
|
15
|
+
Temporarily set the current working directory (CWD) and automatically
|
|
16
|
+
switch back when it's done.
|
|
17
|
+
"""
|
|
18
|
+
cwd = os.getcwd()
|
|
19
|
+
os.chdir(str(path))
|
|
20
|
+
try:
|
|
21
|
+
yield path
|
|
22
|
+
finally:
|
|
23
|
+
os.chdir(cwd)
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
def run_unit_test(
|
|
27
|
+
script: str,
|
|
28
|
+
root_dir: str,
|
|
29
|
+
):
|
|
30
|
+
"""
|
|
31
|
+
Run ``pytest -s --tb=native /path/to/script.py`` Command.
|
|
32
|
+
|
|
33
|
+
:param script: the path to test script
|
|
34
|
+
:param root_dir: the dir you want to temporarily set as cwd
|
|
35
|
+
"""
|
|
36
|
+
bin_pytest = Path(sys.executable).parent / "pytest"
|
|
37
|
+
args = [
|
|
38
|
+
f"{bin_pytest}",
|
|
39
|
+
"-s",
|
|
40
|
+
"--tb=native",
|
|
41
|
+
script,
|
|
42
|
+
]
|
|
43
|
+
with temp_cwd(Path(root_dir)):
|
|
44
|
+
subprocess.run(args)
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
def run_cov_test(
|
|
48
|
+
script: str,
|
|
49
|
+
module: str,
|
|
50
|
+
root_dir: str,
|
|
51
|
+
htmlcov_dir: str,
|
|
52
|
+
preview: bool = False,
|
|
53
|
+
is_folder: bool = False,
|
|
54
|
+
):
|
|
55
|
+
"""
|
|
56
|
+
The pytest-cov plugin gives you the coverage for entire project. What if
|
|
57
|
+
I want run per-module test independently and get per-module coverage?
|
|
58
|
+
|
|
59
|
+
This is a simple wrapper around pytest + coverage cli command. Allow you to run
|
|
60
|
+
coverage test from Python script and set the code coverage measurement scope.
|
|
61
|
+
|
|
62
|
+
Usage example:
|
|
63
|
+
|
|
64
|
+
suppose you have a source code folder structure like this::
|
|
65
|
+
|
|
66
|
+
/dir_git_repo/
|
|
67
|
+
/dir_git_repo/my_library
|
|
68
|
+
/dir_git_repo/my_library/__init__.py
|
|
69
|
+
/dir_git_repo/my_library/module1.py
|
|
70
|
+
/dir_git_repo/my_library/module2.py
|
|
71
|
+
|
|
72
|
+
In your module 1 unit test script, you can do this:
|
|
73
|
+
|
|
74
|
+
.. code-block:: python
|
|
75
|
+
|
|
76
|
+
from my_library.module1 import func1, func2
|
|
77
|
+
|
|
78
|
+
def test_func1():
|
|
79
|
+
pass
|
|
80
|
+
|
|
81
|
+
def test_func2():
|
|
82
|
+
pass
|
|
83
|
+
|
|
84
|
+
if __name__ == "__main__":
|
|
85
|
+
from fixa.pytest_cov_helper import run_cov_test
|
|
86
|
+
|
|
87
|
+
run_cov_test(
|
|
88
|
+
script=__file__,
|
|
89
|
+
module="my_library.module1", # test scope is the module1.py
|
|
90
|
+
root_dir="/path/to/dir_git_repo",
|
|
91
|
+
htmlcov_dir="/path/to/dir_git_repo/htmlcov",
|
|
92
|
+
)
|
|
93
|
+
|
|
94
|
+
In your all modules unit test script, you can do this:
|
|
95
|
+
|
|
96
|
+
.. code-block:: python
|
|
97
|
+
|
|
98
|
+
if __name__ == "__main__":
|
|
99
|
+
from fixa.pytest_cov_helper import run_cov_test
|
|
100
|
+
|
|
101
|
+
run_cov_test(
|
|
102
|
+
script=__file__,
|
|
103
|
+
module="my_library", # test scope is the my_library/
|
|
104
|
+
root_dir="/path/to/dir_git_repo",
|
|
105
|
+
htmlcov_dir="/path/to/dir_git_repo/htmlcov",
|
|
106
|
+
is_folder=True, # my_library is a folder
|
|
107
|
+
)
|
|
108
|
+
|
|
109
|
+
:param script: the test script absolute path
|
|
110
|
+
:param module: the dot notation to the python module you want to calculate
|
|
111
|
+
coverage
|
|
112
|
+
:param root_dir: the dir to dump coverage results binary file
|
|
113
|
+
:param htmlcov_dir: the dir to dump HTML output
|
|
114
|
+
:param preview: whether to open the HTML output in web browser after the test
|
|
115
|
+
:param is_folder: whether the module is a folder
|
|
116
|
+
|
|
117
|
+
Reference:
|
|
118
|
+
|
|
119
|
+
- https://pypi.org/project/pytest-cov/
|
|
120
|
+
"""
|
|
121
|
+
bin_pytest = Path(sys.executable).parent / "pytest"
|
|
122
|
+
if is_folder:
|
|
123
|
+
script = f"{Path(script).parent}"
|
|
124
|
+
if module.endswith(".py"): # pragma: no cover
|
|
125
|
+
module = module[:-3]
|
|
126
|
+
args = [
|
|
127
|
+
f"{bin_pytest}",
|
|
128
|
+
"-s",
|
|
129
|
+
"--tb=native",
|
|
130
|
+
f"--rootdir={root_dir}",
|
|
131
|
+
f"--cov={module}",
|
|
132
|
+
"--cov-report",
|
|
133
|
+
"term-missing",
|
|
134
|
+
"--cov-report",
|
|
135
|
+
f"html:{htmlcov_dir}",
|
|
136
|
+
script,
|
|
137
|
+
]
|
|
138
|
+
with temp_cwd(Path(root_dir)):
|
|
139
|
+
subprocess.run(args)
|
|
140
|
+
if preview: # pragma: no cover
|
|
141
|
+
platform = sys.platform
|
|
142
|
+
if platform in ["win32", "cygwin"]:
|
|
143
|
+
open_command = "start"
|
|
144
|
+
elif platform in ["darwin", "linux"]:
|
|
145
|
+
open_command = "open"
|
|
146
|
+
else:
|
|
147
|
+
raise NotImplementedError
|
|
148
|
+
subprocess.run([open_command, f"{Path(htmlcov_dir).joinpath('index.html')}"])
|
|
@@ -0,0 +1,154 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: afwf_github
|
|
3
|
+
Version: 1.0.1
|
|
4
|
+
Summary: Alfred GitHub Workflow.
|
|
5
|
+
Author-email: Sanhe Hu <husanhe@email.com>
|
|
6
|
+
Maintainer-email: Sanhe Hu <husanhe@email.com>
|
|
7
|
+
License-Expression: MIT
|
|
8
|
+
Project-URL: Homepage, https://github.com/MacHu-GWU/afwf_github-project
|
|
9
|
+
Project-URL: Documentation, https://afwf-github.readthedocs.io/en/latest/
|
|
10
|
+
Project-URL: Repository, https://github.com/MacHu-GWU/afwf_github-project
|
|
11
|
+
Project-URL: Issues, https://github.com/MacHu-GWU/afwf_github-project/issues
|
|
12
|
+
Project-URL: Changelog, https://github.com/MacHu-GWU/afwf_github-project/blob/main/release-history.rst
|
|
13
|
+
Project-URL: Download, https://pypi.org/pypi/afwf-github#files
|
|
14
|
+
Classifier: Development Status :: 4 - Beta
|
|
15
|
+
Classifier: Intended Audience :: Developers
|
|
16
|
+
Classifier: Natural Language :: English
|
|
17
|
+
Classifier: Operating System :: Microsoft :: Windows
|
|
18
|
+
Classifier: Operating System :: MacOS
|
|
19
|
+
Classifier: Operating System :: Unix
|
|
20
|
+
Classifier: Programming Language :: Python :: 3
|
|
21
|
+
Classifier: Programming Language :: Python :: 3.10
|
|
22
|
+
Classifier: Programming Language :: Python :: 3.11
|
|
23
|
+
Classifier: Programming Language :: Python :: 3.12
|
|
24
|
+
Classifier: Programming Language :: Python :: 3.13
|
|
25
|
+
Classifier: Programming Language :: Python :: 3.14
|
|
26
|
+
Requires-Python: <4.0,>=3.10
|
|
27
|
+
Description-Content-Type: text/x-rst
|
|
28
|
+
License-File: LICENSE.txt
|
|
29
|
+
License-File: AUTHORS.rst
|
|
30
|
+
Requires-Dist: pydantic<3.0.0,>=2.11.10
|
|
31
|
+
Requires-Dist: afwf<2.0.0,>=1.0.2
|
|
32
|
+
Requires-Dist: diskcache<6.0.0,>=5.6.3
|
|
33
|
+
Requires-Dist: PyGithub<3.0.0,>=2.8.1
|
|
34
|
+
Requires-Dist: git-web-url<2.0.0,>=1.0.2
|
|
35
|
+
Requires-Dist: sayt2<1.0.0,>=0.1.2
|
|
36
|
+
Requires-Dist: home-secret-toml<1.0.0,>=0.2.1
|
|
37
|
+
Requires-Dist: fire<1.0.0,>=0.6.0
|
|
38
|
+
Provides-Extra: dev
|
|
39
|
+
Requires-Dist: rich<15.0.0,>=13.8.1; extra == "dev"
|
|
40
|
+
Provides-Extra: test
|
|
41
|
+
Requires-Dist: pytest<9.0.0,>=8.2.2; extra == "test"
|
|
42
|
+
Requires-Dist: pytest-cov<7.0.0,>=6.0.0; extra == "test"
|
|
43
|
+
Provides-Extra: doc
|
|
44
|
+
Requires-Dist: Sphinx<8.0.0,>=7.4.7; extra == "doc"
|
|
45
|
+
Requires-Dist: sphinx-copybutton<1.0.0,>=0.5.2; extra == "doc"
|
|
46
|
+
Requires-Dist: sphinx-design<1.0.0,>=0.6.1; extra == "doc"
|
|
47
|
+
Requires-Dist: sphinx-jinja<3.0.0,>=2.0.2; extra == "doc"
|
|
48
|
+
Requires-Dist: furo==2024.8.6; extra == "doc"
|
|
49
|
+
Requires-Dist: pygments<3.0.0,>=2.18.0; extra == "doc"
|
|
50
|
+
Requires-Dist: ipython<8.19.0,>=8.18.1; extra == "doc"
|
|
51
|
+
Requires-Dist: nbsphinx<1.0.0,>=0.8.12; extra == "doc"
|
|
52
|
+
Requires-Dist: rstobj==2.0.0; extra == "doc"
|
|
53
|
+
Requires-Dist: docfly==3.0.3; extra == "doc"
|
|
54
|
+
Provides-Extra: mise
|
|
55
|
+
Requires-Dist: PyGithub<3.0.0,>=2.8.0; extra == "mise"
|
|
56
|
+
Requires-Dist: httpx<1.0.0,>=0.28.0; extra == "mise"
|
|
57
|
+
Requires-Dist: tomli<3.0.0,>=2.0.0; python_version < "3.11" and extra == "mise"
|
|
58
|
+
Dynamic: license-file
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
.. image:: https://readthedocs.org/projects/afwf-github/badge/?version=latest
|
|
62
|
+
:target: https://afwf-github.readthedocs.io/en/latest/
|
|
63
|
+
:alt: Documentation Status
|
|
64
|
+
|
|
65
|
+
.. image:: https://github.com/MacHu-GWU/afwf_github-project/actions/workflows/main.yml/badge.svg
|
|
66
|
+
:target: https://github.com/MacHu-GWU/afwf_github-project/actions?query=workflow:CI
|
|
67
|
+
|
|
68
|
+
.. image:: https://codecov.io/gh/MacHu-GWU/afwf_github-project/branch/main/graph/badge.svg
|
|
69
|
+
:target: https://codecov.io/gh/MacHu-GWU/afwf_github-project
|
|
70
|
+
|
|
71
|
+
.. image:: https://img.shields.io/pypi/v/afwf-github.svg
|
|
72
|
+
:target: https://pypi.python.org/pypi/afwf-github
|
|
73
|
+
|
|
74
|
+
.. image:: https://img.shields.io/pypi/l/afwf-github.svg
|
|
75
|
+
:target: https://pypi.python.org/pypi/afwf-github
|
|
76
|
+
|
|
77
|
+
.. image:: https://img.shields.io/pypi/pyversions/afwf-github.svg
|
|
78
|
+
:target: https://pypi.python.org/pypi/afwf-github
|
|
79
|
+
|
|
80
|
+
.. image:: https://img.shields.io/badge/✍️_Release_History!--None.svg?style=social&logo=github
|
|
81
|
+
:target: https://github.com/MacHu-GWU/afwf_github-project/blob/main/release-history.rst
|
|
82
|
+
|
|
83
|
+
.. image:: https://img.shields.io/badge/⭐_Star_me_on_GitHub!--None.svg?style=social&logo=github
|
|
84
|
+
:target: https://github.com/MacHu-GWU/afwf_github-project
|
|
85
|
+
|
|
86
|
+
------
|
|
87
|
+
|
|
88
|
+
.. image:: https://img.shields.io/badge/Link-API-blue.svg
|
|
89
|
+
:target: https://afwf-github.readthedocs.io/en/latest/py-modindex.html
|
|
90
|
+
|
|
91
|
+
.. image:: https://img.shields.io/badge/Link-Install-blue.svg
|
|
92
|
+
:target: `install`_
|
|
93
|
+
|
|
94
|
+
.. image:: https://img.shields.io/badge/Link-GitHub-blue.svg
|
|
95
|
+
:target: https://github.com/MacHu-GWU/afwf_github-project
|
|
96
|
+
|
|
97
|
+
.. image:: https://img.shields.io/badge/Link-Submit_Issue-blue.svg
|
|
98
|
+
:target: https://github.com/MacHu-GWU/afwf_github-project/issues
|
|
99
|
+
|
|
100
|
+
.. image:: https://img.shields.io/badge/Link-Request_Feature-blue.svg
|
|
101
|
+
:target: https://github.com/MacHu-GWU/afwf_github-project/issues
|
|
102
|
+
|
|
103
|
+
.. image:: https://img.shields.io/badge/Link-Download-blue.svg
|
|
104
|
+
:target: https://pypi.org/pypi/afwf-github#files
|
|
105
|
+
|
|
106
|
+
|
|
107
|
+
Welcome to ``afwf_github`` Documentation
|
|
108
|
+
==============================================================================
|
|
109
|
+
.. image:: https://afwf-github.readthedocs.io/en/latest/_static/afwf_github-logo.png
|
|
110
|
+
:target: https://afwf-github.readthedocs.io/en/latest/
|
|
111
|
+
|
|
112
|
+
It is an `Alfred Workflow <https://www.alfredapp.com/workflows/>`_ for GitHub operations. There already is a PHP `alfred-github-workflow <https://github.com/gharlan/alfred-github-workflow>`_ library for this. But the searching is based on Alfred built-in word level filtering, which doesn't allow any typo, fuzzy, and full text search. This project aims to provide the best searching experience powered by `tantivy <https://github.com/quickwit-oss/tantivy>`_ (via `sayt2 <https://github.com/MacHu-GWU/sayt2-project>`_), a Rust-based full-text search engine.
|
|
113
|
+
|
|
114
|
+
|
|
115
|
+
Install
|
|
116
|
+
------------------------------------------------------------------------------
|
|
117
|
+
1. Make sure you have `Alfred 5 + <https://www.alfredapp.com/>`_ installed and bought the `Power Pack <https://www.alfredapp.com/shop/>`_.
|
|
118
|
+
2. Go to `Release <https://github.com/MacHu-GWU/afwf_github-project/releases>`_, download the latest release.
|
|
119
|
+
3. Double click the file to install.
|
|
120
|
+
4. Prepare your GitHub Personal Access Token: go to https://github.com/settings/tokens, create a new token, make sure you checked ``repo -> public_repo``, ``admin:org -> read:org``, ``admin:enterprise -> read:enterprise`` so the workflow can get your public repo name and url information. If you want to get your private repo as well, you should check ``repo (Full control of private repositories)``.
|
|
121
|
+
|
|
122
|
+
|
|
123
|
+
Usage
|
|
124
|
+
------------------------------------------------------------------------------
|
|
125
|
+
1. Configuration.
|
|
126
|
+
|
|
127
|
+
In Alfred UI, type ``gh-config``, it should open the ~/.alfred-afwf/afwf_github/config.json``
|
|
128
|
+
|
|
129
|
+
.. image:: https://github.com/MacHu-GWU/afwf_github-project/assets/6800411/2acff3ad-8a90-4326-8f64-3a54df2da11f
|
|
130
|
+
|
|
131
|
+
2. Build Index
|
|
132
|
+
|
|
133
|
+
In Alfred UI, type ``gh-rebuild-index``, it should start to crawl your GitHub repos. It will take a while to finish. You can check the progress in the ``~/.alfred-afwf/afwf_github/.repo_index/``
|
|
134
|
+
|
|
135
|
+
.. image:: https://github.com/MacHu-GWU/afwf_github-project/assets/6800411/59ce941d-a22a-4fb5-8013-c6a14ec5ca56
|
|
136
|
+
|
|
137
|
+
3. Search GitHub
|
|
138
|
+
|
|
139
|
+
In Alfred UI, type ``gh ${query}``, it should show the following UI:
|
|
140
|
+
|
|
141
|
+
.. image:: https://github.com/MacHu-GWU/afwf_github-project/assets/6800411/57ea7aa5-d2e0-4b73-8e66-632453418d92
|
|
142
|
+
|
|
143
|
+
4. Open Git Repo in Browser
|
|
144
|
+
|
|
145
|
+
Copy any absolute path of a file in any git repo, type ``gh-view-in-browser ${path}`` then hit ``Enter``, it should open the repo in browser.
|
|
146
|
+
|
|
147
|
+
.. image:: https://github.com/MacHu-GWU/afwf_github-project/assets/6800411/e863fac8-e9b0-4301-93c0-d745059e4346
|
|
148
|
+
|
|
149
|
+
|
|
150
|
+
Trouble Shooting
|
|
151
|
+
------------------------------------------------------------------------------
|
|
152
|
+
1. ``gh ${query}`` doesn't show any result.
|
|
153
|
+
|
|
154
|
+
Check the ``${HOME}/.alfred-afwf/afwf_github/`` folder, if there's no folder name equal to your github username(where to store the index), it means the Workflow failed to crawl your GitHub repos. Please double check ``${HOME}/.alfred-afwf/afwf_github/config.json`` to make sure you have the correct GitHub Personal Access Token.
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
afwf_github/__init__.py,sha256=iwhKnzeBJLKxpRVjvzwiRE63_zNpIBfaKLITauVph-0,24
|
|
2
|
+
afwf_github/api.py,sha256=O9CT1B2F-cVB8elT0EoCJbgkcffjvlmqteqavs4giDg,25
|
|
3
|
+
afwf_github/cache.py,sha256=CZPiNLMh_kJoIz1l1hTNlYt1ilIBNopb_rRkEBEB4vA,627
|
|
4
|
+
afwf_github/cli.py,sha256=aYdSzR3dyGMdug8c0LTx8xEgCugvMFJVDKbrlSKh5NI,9998
|
|
5
|
+
afwf_github/config.py,sha256=_haEkNVekN31YOfQ6DqrmugdagVpv-lN14v15ssSyyY,1419
|
|
6
|
+
afwf_github/constants.py,sha256=q20ww6N_znfLpS7CgaauiWOMrNyTxFVjURHMp1WdvPg,1203
|
|
7
|
+
afwf_github/dataset.py,sha256=0THSggu39cRbwNhmui8gc2XeA3iqE0_nmXeo2Utjggw,2935
|
|
8
|
+
afwf_github/github.py,sha256=llv7seIQ3LFrU08w1Q4DZ8aeebstTO0QHCU-n2JvLqA,5481
|
|
9
|
+
afwf_github/paths.py,sha256=RhRLfkOB7-ObxfjlQjp_4bVQ087YIErDVYjW9dt_u6Y,3018
|
|
10
|
+
afwf_github/type_hint.py,sha256=dNePn5NGiTWH9BNyFuNm09qDEdN9MPG__zVYdUzQY5I,73
|
|
11
|
+
afwf_github/vendor/__init__.py,sha256=O9CT1B2F-cVB8elT0EoCJbgkcffjvlmqteqavs4giDg,25
|
|
12
|
+
afwf_github/vendor/pytest_cov_helper.py,sha256=H2BJtFq3bwJHZtt-bxZ1yF9LnXHVDRZZ0NH-OvkRJdE,4063
|
|
13
|
+
afwf_github-1.0.1.dist-info/licenses/AUTHORS.rst,sha256=oo38V9AD_y57Ac69mKmiItWquV29SRi2aTCdKQo531A,767
|
|
14
|
+
afwf_github-1.0.1.dist-info/licenses/LICENSE.txt,sha256=Fx_tQAmqkcvXayTPbicXlPPrnLKRf1JczCthjVuMS-M,1084
|
|
15
|
+
afwf_github-1.0.1.dist-info/METADATA,sha256=tWj6lMcYK5-rwLg4SqRIHXDkL5KviEfVjJVGhLZ78cA,7826
|
|
16
|
+
afwf_github-1.0.1.dist-info/WHEEL,sha256=aeYiig01lYGDzBgS8HxWXOg3uV61G9ijOsup-k9o1sk,91
|
|
17
|
+
afwf_github-1.0.1.dist-info/entry_points.txt,sha256=ii-KsfGqMcXk9my_QOIm_o3MP60ad9B8VJneBBYhUmk,52
|
|
18
|
+
afwf_github-1.0.1.dist-info/top_level.txt,sha256=pzLhlQvE2dooTdGmz-qQQBMG-f4sJ0_Ob8oE8QqahZ0,12
|
|
19
|
+
afwf_github-1.0.1.dist-info/RECORD,,
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
.. _about_author:
|
|
2
|
+
|
|
3
|
+
About the Author
|
|
4
|
+
------------------------------------------------------------------------------
|
|
5
|
+
::
|
|
6
|
+
|
|
7
|
+
(\ (\
|
|
8
|
+
( -.-)o
|
|
9
|
+
o_(")(")
|
|
10
|
+
|
|
11
|
+
**Sanhe Hu** is a seasoned software engineer with a deep passion for Python development since 2010. As an author and maintainer of `150+ open-source Python projects <https://pypi.org/user/machugwu/>`_, with over `15 million monthly downloads <https://github.com/MacHu-GWU>`_, I bring a wealth of experience to the table. As a Senior Solution Architect and Subject Matter Expert in AI, Data, Amazon Web Services, Cloud Engineering, DevOps, I thrive on helping clients with platform design, enterprise architecture, and strategic roadmaps.
|
|
12
|
+
|
|
13
|
+
Talk is cheap, show me the code:
|
|
14
|
+
|
|
15
|
+
- My Github: https://github.com/MacHu-GWU
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
MIT License
|
|
2
|
+
|
|
3
|
+
Copyright (c) 2026 Sanhe Hu <husanhe@email.com>
|
|
4
|
+
|
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
7
|
+
in the Software without restriction, including without limitation the rights
|
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
10
|
+
furnished to do so, subject to the following conditions:
|
|
11
|
+
|
|
12
|
+
The above copyright notice and this permission notice shall be included in all
|
|
13
|
+
copies or substantial portions of the Software.
|
|
14
|
+
|
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
21
|
+
SOFTWARE.
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
afwf_github
|