chatmcp-cli 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- aider/__init__.py +20 -0
- aider/__main__.py +4 -0
- aider/_version.py +21 -0
- aider/analytics.py +250 -0
- aider/args.py +926 -0
- aider/args_formatter.py +228 -0
- aider/coders/__init__.py +34 -0
- aider/coders/architect_coder.py +48 -0
- aider/coders/architect_prompts.py +40 -0
- aider/coders/ask_coder.py +9 -0
- aider/coders/ask_prompts.py +35 -0
- aider/coders/base_coder.py +2483 -0
- aider/coders/base_prompts.py +60 -0
- aider/coders/chat_chunks.py +64 -0
- aider/coders/context_coder.py +53 -0
- aider/coders/context_prompts.py +75 -0
- aider/coders/editblock_coder.py +657 -0
- aider/coders/editblock_fenced_coder.py +10 -0
- aider/coders/editblock_fenced_prompts.py +143 -0
- aider/coders/editblock_func_coder.py +141 -0
- aider/coders/editblock_func_prompts.py +27 -0
- aider/coders/editblock_prompts.py +174 -0
- aider/coders/editor_diff_fenced_coder.py +9 -0
- aider/coders/editor_diff_fenced_prompts.py +11 -0
- aider/coders/editor_editblock_coder.py +8 -0
- aider/coders/editor_editblock_prompts.py +18 -0
- aider/coders/editor_whole_coder.py +8 -0
- aider/coders/editor_whole_prompts.py +10 -0
- aider/coders/help_coder.py +16 -0
- aider/coders/help_prompts.py +46 -0
- aider/coders/patch_coder.py +706 -0
- aider/coders/patch_prompts.py +161 -0
- aider/coders/search_replace.py +757 -0
- aider/coders/shell.py +37 -0
- aider/coders/single_wholefile_func_coder.py +102 -0
- aider/coders/single_wholefile_func_prompts.py +27 -0
- aider/coders/udiff_coder.py +429 -0
- aider/coders/udiff_prompts.py +115 -0
- aider/coders/udiff_simple.py +14 -0
- aider/coders/udiff_simple_prompts.py +25 -0
- aider/coders/wholefile_coder.py +144 -0
- aider/coders/wholefile_func_coder.py +134 -0
- aider/coders/wholefile_func_prompts.py +27 -0
- aider/coders/wholefile_prompts.py +67 -0
- aider/commands.py +1665 -0
- aider/copypaste.py +72 -0
- aider/deprecated.py +126 -0
- aider/diffs.py +128 -0
- aider/dump.py +29 -0
- aider/editor.py +147 -0
- aider/exceptions.py +107 -0
- aider/format_settings.py +26 -0
- aider/gui.py +545 -0
- aider/help.py +163 -0
- aider/help_pats.py +19 -0
- aider/history.py +143 -0
- aider/io.py +1175 -0
- aider/linter.py +304 -0
- aider/llm.py +47 -0
- aider/main.py +1267 -0
- aider/mdstream.py +243 -0
- aider/models.py +1286 -0
- aider/onboarding.py +428 -0
- aider/openrouter.py +128 -0
- aider/prompts.py +64 -0
- aider/queries/tree-sitter-language-pack/README.md +7 -0
- aider/queries/tree-sitter-language-pack/arduino-tags.scm +5 -0
- aider/queries/tree-sitter-language-pack/c-tags.scm +9 -0
- aider/queries/tree-sitter-language-pack/chatito-tags.scm +16 -0
- aider/queries/tree-sitter-language-pack/commonlisp-tags.scm +122 -0
- aider/queries/tree-sitter-language-pack/cpp-tags.scm +15 -0
- aider/queries/tree-sitter-language-pack/csharp-tags.scm +26 -0
- aider/queries/tree-sitter-language-pack/d-tags.scm +26 -0
- aider/queries/tree-sitter-language-pack/dart-tags.scm +92 -0
- aider/queries/tree-sitter-language-pack/elisp-tags.scm +5 -0
- aider/queries/tree-sitter-language-pack/elixir-tags.scm +54 -0
- aider/queries/tree-sitter-language-pack/elm-tags.scm +19 -0
- aider/queries/tree-sitter-language-pack/gleam-tags.scm +41 -0
- aider/queries/tree-sitter-language-pack/go-tags.scm +42 -0
- aider/queries/tree-sitter-language-pack/java-tags.scm +20 -0
- aider/queries/tree-sitter-language-pack/javascript-tags.scm +88 -0
- aider/queries/tree-sitter-language-pack/lua-tags.scm +34 -0
- aider/queries/tree-sitter-language-pack/ocaml-tags.scm +115 -0
- aider/queries/tree-sitter-language-pack/ocaml_interface-tags.scm +98 -0
- aider/queries/tree-sitter-language-pack/pony-tags.scm +39 -0
- aider/queries/tree-sitter-language-pack/properties-tags.scm +5 -0
- aider/queries/tree-sitter-language-pack/python-tags.scm +14 -0
- aider/queries/tree-sitter-language-pack/r-tags.scm +21 -0
- aider/queries/tree-sitter-language-pack/racket-tags.scm +12 -0
- aider/queries/tree-sitter-language-pack/ruby-tags.scm +64 -0
- aider/queries/tree-sitter-language-pack/rust-tags.scm +60 -0
- aider/queries/tree-sitter-language-pack/solidity-tags.scm +43 -0
- aider/queries/tree-sitter-language-pack/swift-tags.scm +51 -0
- aider/queries/tree-sitter-language-pack/udev-tags.scm +20 -0
- aider/queries/tree-sitter-languages/README.md +23 -0
- aider/queries/tree-sitter-languages/c-tags.scm +9 -0
- aider/queries/tree-sitter-languages/c_sharp-tags.scm +46 -0
- aider/queries/tree-sitter-languages/cpp-tags.scm +15 -0
- aider/queries/tree-sitter-languages/dart-tags.scm +91 -0
- aider/queries/tree-sitter-languages/elisp-tags.scm +8 -0
- aider/queries/tree-sitter-languages/elixir-tags.scm +54 -0
- aider/queries/tree-sitter-languages/elm-tags.scm +19 -0
- aider/queries/tree-sitter-languages/go-tags.scm +30 -0
- aider/queries/tree-sitter-languages/hcl-tags.scm +77 -0
- aider/queries/tree-sitter-languages/java-tags.scm +20 -0
- aider/queries/tree-sitter-languages/javascript-tags.scm +88 -0
- aider/queries/tree-sitter-languages/kotlin-tags.scm +27 -0
- aider/queries/tree-sitter-languages/ocaml-tags.scm +115 -0
- aider/queries/tree-sitter-languages/ocaml_interface-tags.scm +98 -0
- aider/queries/tree-sitter-languages/php-tags.scm +26 -0
- aider/queries/tree-sitter-languages/python-tags.scm +12 -0
- aider/queries/tree-sitter-languages/ql-tags.scm +26 -0
- aider/queries/tree-sitter-languages/ruby-tags.scm +64 -0
- aider/queries/tree-sitter-languages/rust-tags.scm +60 -0
- aider/queries/tree-sitter-languages/scala-tags.scm +65 -0
- aider/queries/tree-sitter-languages/typescript-tags.scm +41 -0
- aider/reasoning_tags.py +82 -0
- aider/repo.py +623 -0
- aider/repomap.py +847 -0
- aider/report.py +200 -0
- aider/resources/__init__.py +3 -0
- aider/resources/model-metadata.json +468 -0
- aider/resources/model-settings.yml +1767 -0
- aider/run_cmd.py +132 -0
- aider/scrape.py +284 -0
- aider/sendchat.py +61 -0
- aider/special.py +203 -0
- aider/urls.py +17 -0
- aider/utils.py +338 -0
- aider/versioncheck.py +113 -0
- aider/voice.py +187 -0
- aider/waiting.py +221 -0
- aider/watch.py +318 -0
- aider/watch_prompts.py +12 -0
- aider/website/Gemfile +8 -0
- aider/website/_includes/blame.md +162 -0
- aider/website/_includes/get-started.md +22 -0
- aider/website/_includes/help-tip.md +5 -0
- aider/website/_includes/help.md +24 -0
- aider/website/_includes/install.md +5 -0
- aider/website/_includes/keys.md +4 -0
- aider/website/_includes/model-warnings.md +67 -0
- aider/website/_includes/multi-line.md +22 -0
- aider/website/_includes/python-m-aider.md +5 -0
- aider/website/_includes/recording.css +228 -0
- aider/website/_includes/recording.md +34 -0
- aider/website/_includes/replit-pipx.md +9 -0
- aider/website/_includes/works-best.md +1 -0
- aider/website/_sass/custom/custom.scss +103 -0
- aider/website/docs/config/adv-model-settings.md +1881 -0
- aider/website/docs/config/aider_conf.md +527 -0
- aider/website/docs/config/api-keys.md +90 -0
- aider/website/docs/config/dotenv.md +478 -0
- aider/website/docs/config/editor.md +127 -0
- aider/website/docs/config/model-aliases.md +103 -0
- aider/website/docs/config/options.md +843 -0
- aider/website/docs/config/reasoning.md +209 -0
- aider/website/docs/config.md +44 -0
- aider/website/docs/faq.md +378 -0
- aider/website/docs/git.md +76 -0
- aider/website/docs/index.md +47 -0
- aider/website/docs/install/codespaces.md +39 -0
- aider/website/docs/install/docker.md +57 -0
- aider/website/docs/install/optional.md +100 -0
- aider/website/docs/install/replit.md +8 -0
- aider/website/docs/install.md +115 -0
- aider/website/docs/languages.md +264 -0
- aider/website/docs/legal/contributor-agreement.md +111 -0
- aider/website/docs/legal/privacy.md +104 -0
- aider/website/docs/llms/anthropic.md +77 -0
- aider/website/docs/llms/azure.md +48 -0
- aider/website/docs/llms/bedrock.md +132 -0
- aider/website/docs/llms/cohere.md +34 -0
- aider/website/docs/llms/deepseek.md +32 -0
- aider/website/docs/llms/gemini.md +49 -0
- aider/website/docs/llms/github.md +105 -0
- aider/website/docs/llms/groq.md +36 -0
- aider/website/docs/llms/lm-studio.md +39 -0
- aider/website/docs/llms/ollama.md +75 -0
- aider/website/docs/llms/openai-compat.md +39 -0
- aider/website/docs/llms/openai.md +58 -0
- aider/website/docs/llms/openrouter.md +78 -0
- aider/website/docs/llms/other.md +103 -0
- aider/website/docs/llms/vertex.md +50 -0
- aider/website/docs/llms/warnings.md +10 -0
- aider/website/docs/llms/xai.md +53 -0
- aider/website/docs/llms.md +54 -0
- aider/website/docs/more/analytics.md +122 -0
- aider/website/docs/more/edit-formats.md +116 -0
- aider/website/docs/more/infinite-output.md +137 -0
- aider/website/docs/more-info.md +8 -0
- aider/website/docs/recordings/auto-accept-architect.md +31 -0
- aider/website/docs/recordings/dont-drop-original-read-files.md +35 -0
- aider/website/docs/recordings/index.md +21 -0
- aider/website/docs/recordings/model-accepts-settings.md +69 -0
- aider/website/docs/recordings/tree-sitter-language-pack.md +80 -0
- aider/website/docs/repomap.md +112 -0
- aider/website/docs/scripting.md +100 -0
- aider/website/docs/troubleshooting/aider-not-found.md +24 -0
- aider/website/docs/troubleshooting/edit-errors.md +76 -0
- aider/website/docs/troubleshooting/imports.md +62 -0
- aider/website/docs/troubleshooting/models-and-keys.md +54 -0
- aider/website/docs/troubleshooting/support.md +79 -0
- aider/website/docs/troubleshooting/token-limits.md +96 -0
- aider/website/docs/troubleshooting/warnings.md +12 -0
- aider/website/docs/troubleshooting.md +11 -0
- aider/website/docs/usage/browser.md +57 -0
- aider/website/docs/usage/caching.md +49 -0
- aider/website/docs/usage/commands.md +132 -0
- aider/website/docs/usage/conventions.md +119 -0
- aider/website/docs/usage/copypaste.md +121 -0
- aider/website/docs/usage/images-urls.md +48 -0
- aider/website/docs/usage/lint-test.md +118 -0
- aider/website/docs/usage/modes.md +211 -0
- aider/website/docs/usage/not-code.md +179 -0
- aider/website/docs/usage/notifications.md +87 -0
- aider/website/docs/usage/tips.md +79 -0
- aider/website/docs/usage/tutorials.md +30 -0
- aider/website/docs/usage/voice.md +121 -0
- aider/website/docs/usage/watch.md +294 -0
- aider/website/docs/usage.md +92 -0
- aider/website/share/index.md +101 -0
- chatmcp_cli-0.1.0.dist-info/METADATA +502 -0
- chatmcp_cli-0.1.0.dist-info/RECORD +228 -0
- chatmcp_cli-0.1.0.dist-info/WHEEL +5 -0
- chatmcp_cli-0.1.0.dist-info/entry_points.txt +3 -0
- chatmcp_cli-0.1.0.dist-info/licenses/LICENSE.txt +202 -0
- chatmcp_cli-0.1.0.dist-info/top_level.txt +1 -0
aider/repomap.py
ADDED
@@ -0,0 +1,847 @@
|
|
1
|
+
import colorsys
|
2
|
+
import math
|
3
|
+
import os
|
4
|
+
import random
|
5
|
+
import shutil
|
6
|
+
import sqlite3
|
7
|
+
import sys
|
8
|
+
import time
|
9
|
+
import warnings
|
10
|
+
from collections import Counter, defaultdict, namedtuple
|
11
|
+
from importlib import resources
|
12
|
+
from pathlib import Path
|
13
|
+
|
14
|
+
from diskcache import Cache
|
15
|
+
from grep_ast import TreeContext, filename_to_lang
|
16
|
+
from pygments.lexers import guess_lexer_for_filename
|
17
|
+
from pygments.token import Token
|
18
|
+
from tqdm import tqdm
|
19
|
+
|
20
|
+
from aider.dump import dump
|
21
|
+
from aider.special import filter_important_files
|
22
|
+
from aider.waiting import Spinner
|
23
|
+
|
24
|
+
# tree_sitter is throwing a FutureWarning
|
25
|
+
warnings.simplefilter("ignore", category=FutureWarning)
|
26
|
+
from grep_ast.tsl import USING_TSL_PACK, get_language, get_parser # noqa: E402
|
27
|
+
|
28
|
+
Tag = namedtuple("Tag", "rel_fname fname line name kind".split())
|
29
|
+
|
30
|
+
|
31
|
+
SQLITE_ERRORS = (sqlite3.OperationalError, sqlite3.DatabaseError, OSError)
|
32
|
+
|
33
|
+
|
34
|
+
CACHE_VERSION = 3
|
35
|
+
if USING_TSL_PACK:
|
36
|
+
CACHE_VERSION = 4
|
37
|
+
|
38
|
+
UPDATING_REPO_MAP_MESSAGE = "Updating repo map"
|
39
|
+
|
40
|
+
|
41
|
+
class RepoMap:
|
42
|
+
TAGS_CACHE_DIR = f".aider.tags.cache.v{CACHE_VERSION}"
|
43
|
+
|
44
|
+
warned_files = set()
|
45
|
+
|
46
|
+
def __init__(
|
47
|
+
self,
|
48
|
+
map_tokens=1024,
|
49
|
+
root=None,
|
50
|
+
main_model=None,
|
51
|
+
io=None,
|
52
|
+
repo_content_prefix=None,
|
53
|
+
verbose=False,
|
54
|
+
max_context_window=None,
|
55
|
+
map_mul_no_files=8,
|
56
|
+
refresh="auto",
|
57
|
+
):
|
58
|
+
self.io = io
|
59
|
+
self.verbose = verbose
|
60
|
+
self.refresh = refresh
|
61
|
+
|
62
|
+
if not root:
|
63
|
+
root = os.getcwd()
|
64
|
+
self.root = root
|
65
|
+
|
66
|
+
self.load_tags_cache()
|
67
|
+
self.cache_threshold = 0.95
|
68
|
+
|
69
|
+
self.max_map_tokens = map_tokens
|
70
|
+
self.map_mul_no_files = map_mul_no_files
|
71
|
+
self.max_context_window = max_context_window
|
72
|
+
|
73
|
+
self.repo_content_prefix = repo_content_prefix
|
74
|
+
|
75
|
+
self.main_model = main_model
|
76
|
+
|
77
|
+
self.tree_cache = {}
|
78
|
+
self.tree_context_cache = {}
|
79
|
+
self.map_cache = {}
|
80
|
+
self.map_processing_time = 0
|
81
|
+
self.last_map = None
|
82
|
+
|
83
|
+
if self.verbose:
|
84
|
+
self.io.tool_output(
|
85
|
+
f"RepoMap initialized with map_mul_no_files: {self.map_mul_no_files}"
|
86
|
+
)
|
87
|
+
|
88
|
+
def token_count(self, text):
|
89
|
+
len_text = len(text)
|
90
|
+
if len_text < 200:
|
91
|
+
return self.main_model.token_count(text)
|
92
|
+
|
93
|
+
lines = text.splitlines(keepends=True)
|
94
|
+
num_lines = len(lines)
|
95
|
+
step = num_lines // 100 or 1
|
96
|
+
lines = lines[::step]
|
97
|
+
sample_text = "".join(lines)
|
98
|
+
sample_tokens = self.main_model.token_count(sample_text)
|
99
|
+
est_tokens = sample_tokens / len(sample_text) * len_text
|
100
|
+
return est_tokens
|
101
|
+
|
102
|
+
def get_repo_map(
|
103
|
+
self,
|
104
|
+
chat_files,
|
105
|
+
other_files,
|
106
|
+
mentioned_fnames=None,
|
107
|
+
mentioned_idents=None,
|
108
|
+
force_refresh=False,
|
109
|
+
):
|
110
|
+
if self.max_map_tokens <= 0:
|
111
|
+
return
|
112
|
+
if not other_files:
|
113
|
+
return
|
114
|
+
if not mentioned_fnames:
|
115
|
+
mentioned_fnames = set()
|
116
|
+
if not mentioned_idents:
|
117
|
+
mentioned_idents = set()
|
118
|
+
|
119
|
+
max_map_tokens = self.max_map_tokens
|
120
|
+
|
121
|
+
# With no files in the chat, give a bigger view of the entire repo
|
122
|
+
padding = 4096
|
123
|
+
if max_map_tokens and self.max_context_window:
|
124
|
+
target = min(
|
125
|
+
int(max_map_tokens * self.map_mul_no_files),
|
126
|
+
self.max_context_window - padding,
|
127
|
+
)
|
128
|
+
else:
|
129
|
+
target = 0
|
130
|
+
if not chat_files and self.max_context_window and target > 0:
|
131
|
+
max_map_tokens = target
|
132
|
+
|
133
|
+
try:
|
134
|
+
files_listing = self.get_ranked_tags_map(
|
135
|
+
chat_files,
|
136
|
+
other_files,
|
137
|
+
max_map_tokens,
|
138
|
+
mentioned_fnames,
|
139
|
+
mentioned_idents,
|
140
|
+
force_refresh,
|
141
|
+
)
|
142
|
+
except RecursionError:
|
143
|
+
self.io.tool_error("Disabling repo map, git repo too large?")
|
144
|
+
self.max_map_tokens = 0
|
145
|
+
return
|
146
|
+
|
147
|
+
if not files_listing:
|
148
|
+
return
|
149
|
+
|
150
|
+
if self.verbose:
|
151
|
+
num_tokens = self.token_count(files_listing)
|
152
|
+
self.io.tool_output(f"Repo-map: {num_tokens / 1024:.1f} k-tokens")
|
153
|
+
|
154
|
+
if chat_files:
|
155
|
+
other = "other "
|
156
|
+
else:
|
157
|
+
other = ""
|
158
|
+
|
159
|
+
if self.repo_content_prefix:
|
160
|
+
repo_content = self.repo_content_prefix.format(other=other)
|
161
|
+
else:
|
162
|
+
repo_content = ""
|
163
|
+
|
164
|
+
repo_content += files_listing
|
165
|
+
|
166
|
+
return repo_content
|
167
|
+
|
168
|
+
def get_rel_fname(self, fname):
|
169
|
+
try:
|
170
|
+
return os.path.relpath(fname, self.root)
|
171
|
+
except ValueError:
|
172
|
+
# Issue #1288: ValueError: path is on mount 'C:', start on mount 'D:'
|
173
|
+
# Just return the full fname.
|
174
|
+
return fname
|
175
|
+
|
176
|
+
def tags_cache_error(self, original_error=None):
|
177
|
+
"""Handle SQLite errors by trying to recreate cache, falling back to dict if needed"""
|
178
|
+
|
179
|
+
if self.verbose and original_error:
|
180
|
+
self.io.tool_warning(f"Tags cache error: {str(original_error)}")
|
181
|
+
|
182
|
+
if isinstance(getattr(self, "TAGS_CACHE", None), dict):
|
183
|
+
return
|
184
|
+
|
185
|
+
path = Path(self.root) / self.TAGS_CACHE_DIR
|
186
|
+
|
187
|
+
# Try to recreate the cache
|
188
|
+
try:
|
189
|
+
# Delete existing cache dir
|
190
|
+
if path.exists():
|
191
|
+
shutil.rmtree(path)
|
192
|
+
|
193
|
+
# Try to create new cache
|
194
|
+
new_cache = Cache(path)
|
195
|
+
|
196
|
+
# Test that it works
|
197
|
+
test_key = "test"
|
198
|
+
new_cache[test_key] = "test"
|
199
|
+
_ = new_cache[test_key]
|
200
|
+
del new_cache[test_key]
|
201
|
+
|
202
|
+
# If we got here, the new cache works
|
203
|
+
self.TAGS_CACHE = new_cache
|
204
|
+
return
|
205
|
+
|
206
|
+
except SQLITE_ERRORS as e:
|
207
|
+
# If anything goes wrong, warn and fall back to dict
|
208
|
+
self.io.tool_warning(
|
209
|
+
f"Unable to use tags cache at {path}, falling back to memory cache"
|
210
|
+
)
|
211
|
+
if self.verbose:
|
212
|
+
self.io.tool_warning(f"Cache recreation error: {str(e)}")
|
213
|
+
|
214
|
+
self.TAGS_CACHE = dict()
|
215
|
+
|
216
|
+
def load_tags_cache(self):
|
217
|
+
path = Path(self.root) / self.TAGS_CACHE_DIR
|
218
|
+
try:
|
219
|
+
self.TAGS_CACHE = Cache(path)
|
220
|
+
except SQLITE_ERRORS as e:
|
221
|
+
self.tags_cache_error(e)
|
222
|
+
|
223
|
+
def save_tags_cache(self):
|
224
|
+
pass
|
225
|
+
|
226
|
+
def get_mtime(self, fname):
|
227
|
+
try:
|
228
|
+
return os.path.getmtime(fname)
|
229
|
+
except FileNotFoundError:
|
230
|
+
self.io.tool_warning(f"File not found error: {fname}")
|
231
|
+
|
232
|
+
def get_tags(self, fname, rel_fname):
|
233
|
+
# Check if the file is in the cache and if the modification time has not changed
|
234
|
+
file_mtime = self.get_mtime(fname)
|
235
|
+
if file_mtime is None:
|
236
|
+
return []
|
237
|
+
|
238
|
+
cache_key = fname
|
239
|
+
try:
|
240
|
+
val = self.TAGS_CACHE.get(cache_key) # Issue #1308
|
241
|
+
except SQLITE_ERRORS as e:
|
242
|
+
self.tags_cache_error(e)
|
243
|
+
val = self.TAGS_CACHE.get(cache_key)
|
244
|
+
|
245
|
+
if val is not None and val.get("mtime") == file_mtime:
|
246
|
+
try:
|
247
|
+
return self.TAGS_CACHE[cache_key]["data"]
|
248
|
+
except SQLITE_ERRORS as e:
|
249
|
+
self.tags_cache_error(e)
|
250
|
+
return self.TAGS_CACHE[cache_key]["data"]
|
251
|
+
|
252
|
+
# miss!
|
253
|
+
data = list(self.get_tags_raw(fname, rel_fname))
|
254
|
+
|
255
|
+
# Update the cache
|
256
|
+
try:
|
257
|
+
self.TAGS_CACHE[cache_key] = {"mtime": file_mtime, "data": data}
|
258
|
+
self.save_tags_cache()
|
259
|
+
except SQLITE_ERRORS as e:
|
260
|
+
self.tags_cache_error(e)
|
261
|
+
self.TAGS_CACHE[cache_key] = {"mtime": file_mtime, "data": data}
|
262
|
+
|
263
|
+
return data
|
264
|
+
|
265
|
+
def get_tags_raw(self, fname, rel_fname):
|
266
|
+
lang = filename_to_lang(fname)
|
267
|
+
if not lang:
|
268
|
+
return
|
269
|
+
|
270
|
+
try:
|
271
|
+
language = get_language(lang)
|
272
|
+
parser = get_parser(lang)
|
273
|
+
except Exception as err:
|
274
|
+
print(f"Skipping file {fname}: {err}")
|
275
|
+
return
|
276
|
+
|
277
|
+
query_scm = get_scm_fname(lang)
|
278
|
+
if not query_scm.exists():
|
279
|
+
return
|
280
|
+
query_scm = query_scm.read_text()
|
281
|
+
|
282
|
+
code = self.io.read_text(fname)
|
283
|
+
if not code:
|
284
|
+
return
|
285
|
+
tree = parser.parse(bytes(code, "utf-8"))
|
286
|
+
|
287
|
+
# Run the tags queries
|
288
|
+
query = language.query(query_scm)
|
289
|
+
captures = query.captures(tree.root_node)
|
290
|
+
|
291
|
+
saw = set()
|
292
|
+
if USING_TSL_PACK:
|
293
|
+
all_nodes = []
|
294
|
+
for tag, nodes in captures.items():
|
295
|
+
all_nodes += [(node, tag) for node in nodes]
|
296
|
+
else:
|
297
|
+
all_nodes = list(captures)
|
298
|
+
|
299
|
+
for node, tag in all_nodes:
|
300
|
+
if tag.startswith("name.definition."):
|
301
|
+
kind = "def"
|
302
|
+
elif tag.startswith("name.reference."):
|
303
|
+
kind = "ref"
|
304
|
+
else:
|
305
|
+
continue
|
306
|
+
|
307
|
+
saw.add(kind)
|
308
|
+
|
309
|
+
result = Tag(
|
310
|
+
rel_fname=rel_fname,
|
311
|
+
fname=fname,
|
312
|
+
name=node.text.decode("utf-8"),
|
313
|
+
kind=kind,
|
314
|
+
line=node.start_point[0],
|
315
|
+
)
|
316
|
+
|
317
|
+
yield result
|
318
|
+
|
319
|
+
if "ref" in saw:
|
320
|
+
return
|
321
|
+
if "def" not in saw:
|
322
|
+
return
|
323
|
+
|
324
|
+
# We saw defs, without any refs
|
325
|
+
# Some tags files only provide defs (cpp, for example)
|
326
|
+
# Use pygments to backfill refs
|
327
|
+
|
328
|
+
try:
|
329
|
+
lexer = guess_lexer_for_filename(fname, code)
|
330
|
+
except Exception: # On Windows, bad ref to time.clock which is deprecated?
|
331
|
+
# self.io.tool_error(f"Error lexing {fname}")
|
332
|
+
return
|
333
|
+
|
334
|
+
tokens = list(lexer.get_tokens(code))
|
335
|
+
tokens = [token[1] for token in tokens if token[0] in Token.Name]
|
336
|
+
|
337
|
+
for token in tokens:
|
338
|
+
yield Tag(
|
339
|
+
rel_fname=rel_fname,
|
340
|
+
fname=fname,
|
341
|
+
name=token,
|
342
|
+
kind="ref",
|
343
|
+
line=-1,
|
344
|
+
)
|
345
|
+
|
346
|
+
def get_ranked_tags(
|
347
|
+
self, chat_fnames, other_fnames, mentioned_fnames, mentioned_idents, progress=None
|
348
|
+
):
|
349
|
+
import networkx as nx
|
350
|
+
|
351
|
+
defines = defaultdict(set)
|
352
|
+
references = defaultdict(list)
|
353
|
+
definitions = defaultdict(set)
|
354
|
+
|
355
|
+
personalization = dict()
|
356
|
+
|
357
|
+
fnames = set(chat_fnames).union(set(other_fnames))
|
358
|
+
chat_rel_fnames = set()
|
359
|
+
|
360
|
+
fnames = sorted(fnames)
|
361
|
+
|
362
|
+
# Default personalization for unspecified files is 1/num_nodes
|
363
|
+
# https://networkx.org/documentation/stable/_modules/networkx/algorithms/link_analysis/pagerank_alg.html#pagerank
|
364
|
+
personalize = 100 / len(fnames)
|
365
|
+
|
366
|
+
try:
|
367
|
+
cache_size = len(self.TAGS_CACHE)
|
368
|
+
except SQLITE_ERRORS as e:
|
369
|
+
self.tags_cache_error(e)
|
370
|
+
cache_size = len(self.TAGS_CACHE)
|
371
|
+
|
372
|
+
if len(fnames) - cache_size > 100:
|
373
|
+
self.io.tool_output(
|
374
|
+
"Initial repo scan can be slow in larger repos, but only happens once."
|
375
|
+
)
|
376
|
+
fnames = tqdm(fnames, desc="Scanning repo")
|
377
|
+
showing_bar = True
|
378
|
+
else:
|
379
|
+
showing_bar = False
|
380
|
+
|
381
|
+
for fname in fnames:
|
382
|
+
if self.verbose:
|
383
|
+
self.io.tool_output(f"Processing {fname}")
|
384
|
+
if progress and not showing_bar:
|
385
|
+
progress(f"{UPDATING_REPO_MAP_MESSAGE}: {fname}")
|
386
|
+
|
387
|
+
try:
|
388
|
+
file_ok = Path(fname).is_file()
|
389
|
+
except OSError:
|
390
|
+
file_ok = False
|
391
|
+
|
392
|
+
if not file_ok:
|
393
|
+
if fname not in self.warned_files:
|
394
|
+
self.io.tool_warning(f"Repo-map can't include {fname}")
|
395
|
+
self.io.tool_output(
|
396
|
+
"Has it been deleted from the file system but not from git?"
|
397
|
+
)
|
398
|
+
self.warned_files.add(fname)
|
399
|
+
continue
|
400
|
+
|
401
|
+
# dump(fname)
|
402
|
+
rel_fname = self.get_rel_fname(fname)
|
403
|
+
current_pers = 0.0 # Start with 0 personalization score
|
404
|
+
|
405
|
+
if fname in chat_fnames:
|
406
|
+
current_pers += personalize
|
407
|
+
chat_rel_fnames.add(rel_fname)
|
408
|
+
|
409
|
+
if rel_fname in mentioned_fnames:
|
410
|
+
# Use max to avoid double counting if in chat_fnames and mentioned_fnames
|
411
|
+
current_pers = max(current_pers, personalize)
|
412
|
+
|
413
|
+
# Check path components against mentioned_idents
|
414
|
+
path_obj = Path(rel_fname)
|
415
|
+
path_components = set(path_obj.parts)
|
416
|
+
basename_with_ext = path_obj.name
|
417
|
+
basename_without_ext, _ = os.path.splitext(basename_with_ext)
|
418
|
+
components_to_check = path_components.union({basename_with_ext, basename_without_ext})
|
419
|
+
|
420
|
+
matched_idents = components_to_check.intersection(mentioned_idents)
|
421
|
+
if matched_idents:
|
422
|
+
# Add personalization *once* if any path component matches a mentioned ident
|
423
|
+
current_pers += personalize
|
424
|
+
|
425
|
+
if current_pers > 0:
|
426
|
+
personalization[rel_fname] = current_pers # Assign the final calculated value
|
427
|
+
|
428
|
+
tags = list(self.get_tags(fname, rel_fname))
|
429
|
+
if tags is None:
|
430
|
+
continue
|
431
|
+
|
432
|
+
for tag in tags:
|
433
|
+
if tag.kind == "def":
|
434
|
+
defines[tag.name].add(rel_fname)
|
435
|
+
key = (rel_fname, tag.name)
|
436
|
+
definitions[key].add(tag)
|
437
|
+
|
438
|
+
elif tag.kind == "ref":
|
439
|
+
references[tag.name].append(rel_fname)
|
440
|
+
|
441
|
+
##
|
442
|
+
# dump(defines)
|
443
|
+
# dump(references)
|
444
|
+
# dump(personalization)
|
445
|
+
|
446
|
+
if not references:
|
447
|
+
references = dict((k, list(v)) for k, v in defines.items())
|
448
|
+
|
449
|
+
idents = set(defines.keys()).intersection(set(references.keys()))
|
450
|
+
|
451
|
+
G = nx.MultiDiGraph()
|
452
|
+
|
453
|
+
# Add a small self-edge for every definition that has no references
|
454
|
+
# Helps with tree-sitter 0.23.2 with ruby, where "def greet(name)"
|
455
|
+
# isn't counted as a def AND a ref. tree-sitter 0.24.0 does.
|
456
|
+
for ident in defines.keys():
|
457
|
+
if ident in references:
|
458
|
+
continue
|
459
|
+
for definer in defines[ident]:
|
460
|
+
G.add_edge(definer, definer, weight=0.1, ident=ident)
|
461
|
+
|
462
|
+
for ident in idents:
|
463
|
+
if progress:
|
464
|
+
progress(f"{UPDATING_REPO_MAP_MESSAGE}: {ident}")
|
465
|
+
|
466
|
+
definers = defines[ident]
|
467
|
+
|
468
|
+
mul = 1.0
|
469
|
+
|
470
|
+
is_snake = ("_" in ident) and any(c.isalpha() for c in ident)
|
471
|
+
is_camel = any(c.isupper() for c in ident) and any(c.islower() for c in ident)
|
472
|
+
if ident in mentioned_idents:
|
473
|
+
mul *= 10
|
474
|
+
if (is_snake or is_camel) and len(ident) >= 8:
|
475
|
+
mul *= 10
|
476
|
+
if ident.startswith("_"):
|
477
|
+
mul *= 0.1
|
478
|
+
if len(defines[ident]) > 5:
|
479
|
+
mul *= 0.1
|
480
|
+
|
481
|
+
for referencer, num_refs in Counter(references[ident]).items():
|
482
|
+
for definer in definers:
|
483
|
+
# dump(referencer, definer, num_refs, mul)
|
484
|
+
# if referencer == definer:
|
485
|
+
# continue
|
486
|
+
|
487
|
+
use_mul = mul
|
488
|
+
if referencer in chat_rel_fnames:
|
489
|
+
use_mul *= 50
|
490
|
+
|
491
|
+
# scale down so high freq (low value) mentions don't dominate
|
492
|
+
num_refs = math.sqrt(num_refs)
|
493
|
+
|
494
|
+
G.add_edge(referencer, definer, weight=use_mul * num_refs, ident=ident)
|
495
|
+
|
496
|
+
if not references:
|
497
|
+
pass
|
498
|
+
|
499
|
+
if personalization:
|
500
|
+
pers_args = dict(personalization=personalization, dangling=personalization)
|
501
|
+
else:
|
502
|
+
pers_args = dict()
|
503
|
+
|
504
|
+
try:
|
505
|
+
ranked = nx.pagerank(G, weight="weight", **pers_args)
|
506
|
+
except ZeroDivisionError:
|
507
|
+
# Issue #1536
|
508
|
+
try:
|
509
|
+
ranked = nx.pagerank(G, weight="weight")
|
510
|
+
except ZeroDivisionError:
|
511
|
+
return []
|
512
|
+
|
513
|
+
# distribute the rank from each source node, across all of its out edges
|
514
|
+
ranked_definitions = defaultdict(float)
|
515
|
+
for src in G.nodes:
|
516
|
+
if progress:
|
517
|
+
progress(f"{UPDATING_REPO_MAP_MESSAGE}: {src}")
|
518
|
+
|
519
|
+
src_rank = ranked[src]
|
520
|
+
total_weight = sum(data["weight"] for _src, _dst, data in G.out_edges(src, data=True))
|
521
|
+
# dump(src, src_rank, total_weight)
|
522
|
+
for _src, dst, data in G.out_edges(src, data=True):
|
523
|
+
data["rank"] = src_rank * data["weight"] / total_weight
|
524
|
+
ident = data["ident"]
|
525
|
+
ranked_definitions[(dst, ident)] += data["rank"]
|
526
|
+
|
527
|
+
ranked_tags = []
|
528
|
+
ranked_definitions = sorted(
|
529
|
+
ranked_definitions.items(), reverse=True, key=lambda x: (x[1], x[0])
|
530
|
+
)
|
531
|
+
|
532
|
+
# dump(ranked_definitions)
|
533
|
+
|
534
|
+
for (fname, ident), rank in ranked_definitions:
|
535
|
+
# print(f"{rank:.03f} {fname} {ident}")
|
536
|
+
if fname in chat_rel_fnames:
|
537
|
+
continue
|
538
|
+
ranked_tags += list(definitions.get((fname, ident), []))
|
539
|
+
|
540
|
+
rel_other_fnames_without_tags = set(self.get_rel_fname(fname) for fname in other_fnames)
|
541
|
+
|
542
|
+
fnames_already_included = set(rt[0] for rt in ranked_tags)
|
543
|
+
|
544
|
+
top_rank = sorted([(rank, node) for (node, rank) in ranked.items()], reverse=True)
|
545
|
+
for rank, fname in top_rank:
|
546
|
+
if fname in rel_other_fnames_without_tags:
|
547
|
+
rel_other_fnames_without_tags.remove(fname)
|
548
|
+
if fname not in fnames_already_included:
|
549
|
+
ranked_tags.append((fname,))
|
550
|
+
|
551
|
+
for fname in rel_other_fnames_without_tags:
|
552
|
+
ranked_tags.append((fname,))
|
553
|
+
|
554
|
+
return ranked_tags
|
555
|
+
|
556
|
+
def get_ranked_tags_map(
|
557
|
+
self,
|
558
|
+
chat_fnames,
|
559
|
+
other_fnames=None,
|
560
|
+
max_map_tokens=None,
|
561
|
+
mentioned_fnames=None,
|
562
|
+
mentioned_idents=None,
|
563
|
+
force_refresh=False,
|
564
|
+
):
|
565
|
+
# Create a cache key
|
566
|
+
cache_key = [
|
567
|
+
tuple(sorted(chat_fnames)) if chat_fnames else None,
|
568
|
+
tuple(sorted(other_fnames)) if other_fnames else None,
|
569
|
+
max_map_tokens,
|
570
|
+
]
|
571
|
+
|
572
|
+
if self.refresh == "auto":
|
573
|
+
cache_key += [
|
574
|
+
tuple(sorted(mentioned_fnames)) if mentioned_fnames else None,
|
575
|
+
tuple(sorted(mentioned_idents)) if mentioned_idents else None,
|
576
|
+
]
|
577
|
+
cache_key = tuple(cache_key)
|
578
|
+
|
579
|
+
use_cache = False
|
580
|
+
if not force_refresh:
|
581
|
+
if self.refresh == "manual" and self.last_map:
|
582
|
+
return self.last_map
|
583
|
+
|
584
|
+
if self.refresh == "always":
|
585
|
+
use_cache = False
|
586
|
+
elif self.refresh == "files":
|
587
|
+
use_cache = True
|
588
|
+
elif self.refresh == "auto":
|
589
|
+
use_cache = self.map_processing_time > 1.0
|
590
|
+
|
591
|
+
# Check if the result is in the cache
|
592
|
+
if use_cache and cache_key in self.map_cache:
|
593
|
+
return self.map_cache[cache_key]
|
594
|
+
|
595
|
+
# If not in cache or force_refresh is True, generate the map
|
596
|
+
start_time = time.time()
|
597
|
+
result = self.get_ranked_tags_map_uncached(
|
598
|
+
chat_fnames, other_fnames, max_map_tokens, mentioned_fnames, mentioned_idents
|
599
|
+
)
|
600
|
+
end_time = time.time()
|
601
|
+
self.map_processing_time = end_time - start_time
|
602
|
+
|
603
|
+
# Store the result in the cache
|
604
|
+
self.map_cache[cache_key] = result
|
605
|
+
self.last_map = result
|
606
|
+
|
607
|
+
return result
|
608
|
+
|
609
|
+
def get_ranked_tags_map_uncached(
|
610
|
+
self,
|
611
|
+
chat_fnames,
|
612
|
+
other_fnames=None,
|
613
|
+
max_map_tokens=None,
|
614
|
+
mentioned_fnames=None,
|
615
|
+
mentioned_idents=None,
|
616
|
+
):
|
617
|
+
if not other_fnames:
|
618
|
+
other_fnames = list()
|
619
|
+
if not max_map_tokens:
|
620
|
+
max_map_tokens = self.max_map_tokens
|
621
|
+
if not mentioned_fnames:
|
622
|
+
mentioned_fnames = set()
|
623
|
+
if not mentioned_idents:
|
624
|
+
mentioned_idents = set()
|
625
|
+
|
626
|
+
spin = Spinner(UPDATING_REPO_MAP_MESSAGE)
|
627
|
+
|
628
|
+
ranked_tags = self.get_ranked_tags(
|
629
|
+
chat_fnames,
|
630
|
+
other_fnames,
|
631
|
+
mentioned_fnames,
|
632
|
+
mentioned_idents,
|
633
|
+
progress=spin.step,
|
634
|
+
)
|
635
|
+
|
636
|
+
other_rel_fnames = sorted(set(self.get_rel_fname(fname) for fname in other_fnames))
|
637
|
+
special_fnames = filter_important_files(other_rel_fnames)
|
638
|
+
ranked_tags_fnames = set(tag[0] for tag in ranked_tags)
|
639
|
+
special_fnames = [fn for fn in special_fnames if fn not in ranked_tags_fnames]
|
640
|
+
special_fnames = [(fn,) for fn in special_fnames]
|
641
|
+
|
642
|
+
ranked_tags = special_fnames + ranked_tags
|
643
|
+
|
644
|
+
spin.step()
|
645
|
+
|
646
|
+
num_tags = len(ranked_tags)
|
647
|
+
lower_bound = 0
|
648
|
+
upper_bound = num_tags
|
649
|
+
best_tree = None
|
650
|
+
best_tree_tokens = 0
|
651
|
+
|
652
|
+
chat_rel_fnames = set(self.get_rel_fname(fname) for fname in chat_fnames)
|
653
|
+
|
654
|
+
self.tree_cache = dict()
|
655
|
+
|
656
|
+
middle = min(int(max_map_tokens // 25), num_tags)
|
657
|
+
while lower_bound <= upper_bound:
|
658
|
+
# dump(lower_bound, middle, upper_bound)
|
659
|
+
|
660
|
+
if middle > 1500:
|
661
|
+
show_tokens = f"{middle / 1000.0:.1f}K"
|
662
|
+
else:
|
663
|
+
show_tokens = str(middle)
|
664
|
+
spin.step(f"{UPDATING_REPO_MAP_MESSAGE}: {show_tokens} tokens")
|
665
|
+
|
666
|
+
tree = self.to_tree(ranked_tags[:middle], chat_rel_fnames)
|
667
|
+
num_tokens = self.token_count(tree)
|
668
|
+
|
669
|
+
pct_err = abs(num_tokens - max_map_tokens) / max_map_tokens
|
670
|
+
ok_err = 0.15
|
671
|
+
if (num_tokens <= max_map_tokens and num_tokens > best_tree_tokens) or pct_err < ok_err:
|
672
|
+
best_tree = tree
|
673
|
+
best_tree_tokens = num_tokens
|
674
|
+
|
675
|
+
if pct_err < ok_err:
|
676
|
+
break
|
677
|
+
|
678
|
+
if num_tokens < max_map_tokens:
|
679
|
+
lower_bound = middle + 1
|
680
|
+
else:
|
681
|
+
upper_bound = middle - 1
|
682
|
+
|
683
|
+
middle = int((lower_bound + upper_bound) // 2)
|
684
|
+
|
685
|
+
spin.end()
|
686
|
+
return best_tree
|
687
|
+
|
688
|
+
tree_cache = dict()
|
689
|
+
|
690
|
+
def render_tree(self, abs_fname, rel_fname, lois):
|
691
|
+
mtime = self.get_mtime(abs_fname)
|
692
|
+
key = (rel_fname, tuple(sorted(lois)), mtime)
|
693
|
+
|
694
|
+
if key in self.tree_cache:
|
695
|
+
return self.tree_cache[key]
|
696
|
+
|
697
|
+
if (
|
698
|
+
rel_fname not in self.tree_context_cache
|
699
|
+
or self.tree_context_cache[rel_fname]["mtime"] != mtime
|
700
|
+
):
|
701
|
+
code = self.io.read_text(abs_fname) or ""
|
702
|
+
if not code.endswith("\n"):
|
703
|
+
code += "\n"
|
704
|
+
|
705
|
+
context = TreeContext(
|
706
|
+
rel_fname,
|
707
|
+
code,
|
708
|
+
color=False,
|
709
|
+
line_number=False,
|
710
|
+
child_context=False,
|
711
|
+
last_line=False,
|
712
|
+
margin=0,
|
713
|
+
mark_lois=False,
|
714
|
+
loi_pad=0,
|
715
|
+
# header_max=30,
|
716
|
+
show_top_of_file_parent_scope=False,
|
717
|
+
)
|
718
|
+
self.tree_context_cache[rel_fname] = {"context": context, "mtime": mtime}
|
719
|
+
|
720
|
+
context = self.tree_context_cache[rel_fname]["context"]
|
721
|
+
context.lines_of_interest = set()
|
722
|
+
context.add_lines_of_interest(lois)
|
723
|
+
context.add_context()
|
724
|
+
res = context.format()
|
725
|
+
self.tree_cache[key] = res
|
726
|
+
return res
|
727
|
+
|
728
|
+
def to_tree(self, tags, chat_rel_fnames):
|
729
|
+
if not tags:
|
730
|
+
return ""
|
731
|
+
|
732
|
+
cur_fname = None
|
733
|
+
cur_abs_fname = None
|
734
|
+
lois = None
|
735
|
+
output = ""
|
736
|
+
|
737
|
+
# add a bogus tag at the end so we trip the this_fname != cur_fname...
|
738
|
+
dummy_tag = (None,)
|
739
|
+
for tag in sorted(tags) + [dummy_tag]:
|
740
|
+
this_rel_fname = tag[0]
|
741
|
+
if this_rel_fname in chat_rel_fnames:
|
742
|
+
continue
|
743
|
+
|
744
|
+
# ... here ... to output the final real entry in the list
|
745
|
+
if this_rel_fname != cur_fname:
|
746
|
+
if lois is not None:
|
747
|
+
output += "\n"
|
748
|
+
output += cur_fname + ":\n"
|
749
|
+
output += self.render_tree(cur_abs_fname, cur_fname, lois)
|
750
|
+
lois = None
|
751
|
+
elif cur_fname:
|
752
|
+
output += "\n" + cur_fname + "\n"
|
753
|
+
if type(tag) is Tag:
|
754
|
+
lois = []
|
755
|
+
cur_abs_fname = tag.fname
|
756
|
+
cur_fname = this_rel_fname
|
757
|
+
|
758
|
+
if lois is not None:
|
759
|
+
lois.append(tag.line)
|
760
|
+
|
761
|
+
# truncate long lines, in case we get minified js or something else crazy
|
762
|
+
output = "\n".join([line[:100] for line in output.splitlines()]) + "\n"
|
763
|
+
|
764
|
+
return output
|
765
|
+
|
766
|
+
|
767
|
+
def find_src_files(directory):
|
768
|
+
if not os.path.isdir(directory):
|
769
|
+
return [directory]
|
770
|
+
|
771
|
+
src_files = []
|
772
|
+
for root, dirs, files in os.walk(directory):
|
773
|
+
for file in files:
|
774
|
+
src_files.append(os.path.join(root, file))
|
775
|
+
return src_files
|
776
|
+
|
777
|
+
|
778
|
+
def get_random_color():
|
779
|
+
hue = random.random()
|
780
|
+
r, g, b = [int(x * 255) for x in colorsys.hsv_to_rgb(hue, 1, 0.75)]
|
781
|
+
res = f"#{r:02x}{g:02x}{b:02x}"
|
782
|
+
return res
|
783
|
+
|
784
|
+
|
785
|
+
def get_scm_fname(lang):
|
786
|
+
# Load the tags queries
|
787
|
+
if USING_TSL_PACK:
|
788
|
+
subdir = "tree-sitter-language-pack"
|
789
|
+
try:
|
790
|
+
path = resources.files(__package__).joinpath(
|
791
|
+
"queries",
|
792
|
+
subdir,
|
793
|
+
f"{lang}-tags.scm",
|
794
|
+
)
|
795
|
+
if path.exists():
|
796
|
+
return path
|
797
|
+
except KeyError:
|
798
|
+
pass
|
799
|
+
|
800
|
+
# Fall back to tree-sitter-languages
|
801
|
+
subdir = "tree-sitter-languages"
|
802
|
+
try:
|
803
|
+
return resources.files(__package__).joinpath(
|
804
|
+
"queries",
|
805
|
+
subdir,
|
806
|
+
f"{lang}-tags.scm",
|
807
|
+
)
|
808
|
+
except KeyError:
|
809
|
+
return
|
810
|
+
|
811
|
+
|
812
|
+
def get_supported_languages_md():
|
813
|
+
from grep_ast.parsers import PARSERS
|
814
|
+
|
815
|
+
res = """
|
816
|
+
| Language | File extension | Repo map | Linter |
|
817
|
+
|:--------:|:--------------:|:--------:|:------:|
|
818
|
+
"""
|
819
|
+
data = sorted((lang, ex) for ex, lang in PARSERS.items())
|
820
|
+
|
821
|
+
for lang, ext in data:
|
822
|
+
fn = get_scm_fname(lang)
|
823
|
+
repo_map = "✓" if Path(fn).exists() else ""
|
824
|
+
linter_support = "✓"
|
825
|
+
res += f"| {lang:20} | {ext:20} | {repo_map:^8} | {linter_support:^6} |\n"
|
826
|
+
|
827
|
+
res += "\n"
|
828
|
+
|
829
|
+
return res
|
830
|
+
|
831
|
+
|
832
|
+
if __name__ == "__main__":
|
833
|
+
fnames = sys.argv[1:]
|
834
|
+
|
835
|
+
chat_fnames = []
|
836
|
+
other_fnames = []
|
837
|
+
for fname in sys.argv[1:]:
|
838
|
+
if Path(fname).is_dir():
|
839
|
+
chat_fnames += find_src_files(fname)
|
840
|
+
else:
|
841
|
+
chat_fnames.append(fname)
|
842
|
+
|
843
|
+
rm = RepoMap(root=".")
|
844
|
+
repo_map = rm.get_ranked_tags_map(chat_fnames, other_fnames)
|
845
|
+
|
846
|
+
dump(len(repo_map))
|
847
|
+
print(repo_map)
|