Typhon-Language 0.1.3__py3-none-any.whl → 0.1.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (47) hide show
  1. Typhon/Driver/configs.py +14 -0
  2. Typhon/Driver/debugging.py +148 -5
  3. Typhon/Driver/diagnostic.py +4 -3
  4. Typhon/Driver/language_server.py +25 -0
  5. Typhon/Driver/run.py +1 -1
  6. Typhon/Driver/translate.py +14 -10
  7. Typhon/Driver/utils.py +39 -1
  8. Typhon/Grammar/_typhon_parser.py +2738 -2525
  9. Typhon/Grammar/parser.py +80 -53
  10. Typhon/Grammar/parser_helper.py +68 -87
  11. Typhon/Grammar/syntax_errors.py +31 -21
  12. Typhon/Grammar/token_factory_custom.py +541 -485
  13. Typhon/Grammar/tokenizer_custom.py +52 -0
  14. Typhon/Grammar/typhon_ast.py +372 -44
  15. Typhon/Grammar/typhon_ast_error.py +438 -0
  16. Typhon/LanguageServer/__init__.py +3 -0
  17. Typhon/LanguageServer/client/__init__.py +42 -0
  18. Typhon/LanguageServer/client/pyrefly.py +115 -0
  19. Typhon/LanguageServer/client/pyright.py +173 -0
  20. Typhon/LanguageServer/semantic_tokens.py +446 -0
  21. Typhon/LanguageServer/server.py +376 -0
  22. Typhon/LanguageServer/utils.py +65 -0
  23. Typhon/SourceMap/ast_match_based_map.py +199 -152
  24. Typhon/SourceMap/ast_matching.py +102 -87
  25. Typhon/SourceMap/datatype.py +27 -16
  26. Typhon/SourceMap/defined_name_retrieve.py +145 -0
  27. Typhon/Transform/comprehension_to_function.py +2 -5
  28. Typhon/Transform/const_member_to_final.py +12 -7
  29. Typhon/Transform/forbidden_statements.py +1 -0
  30. Typhon/Transform/optional_operators_to_checked.py +14 -6
  31. Typhon/Transform/scope_check_rename.py +44 -18
  32. Typhon/Transform/type_abbrev_desugar.py +11 -15
  33. Typhon/Transform/type_annotation_check_expand.py +2 -2
  34. Typhon/Transform/utils/imports.py +39 -4
  35. Typhon/Transform/utils/make_class.py +18 -23
  36. Typhon/Transform/visitor.py +25 -0
  37. Typhon/Typing/pyrefly.py +145 -0
  38. Typhon/Typing/pyright.py +2 -4
  39. Typhon/__main__.py +15 -1
  40. {typhon_language-0.1.3.dist-info → typhon_language-0.1.4.dist-info}/METADATA +7 -5
  41. typhon_language-0.1.4.dist-info/RECORD +65 -0
  42. {typhon_language-0.1.3.dist-info → typhon_language-0.1.4.dist-info}/WHEEL +1 -1
  43. typhon_language-0.1.4.dist-info/licenses/LICENSE +201 -0
  44. typhon_language-0.1.3.dist-info/RECORD +0 -53
  45. typhon_language-0.1.3.dist-info/licenses/LICENSE +0 -21
  46. {typhon_language-0.1.3.dist-info → typhon_language-0.1.4.dist-info}/entry_points.txt +0 -0
  47. {typhon_language-0.1.3.dist-info → typhon_language-0.1.4.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,376 @@
1
+ import copy
2
+ from lsprotocol.types import SemanticTokens
3
+ from typing import Any, Sequence, override, Protocol, Final
4
+ import ast
5
+ import traceback
6
+ from pathlib import Path
7
+ import asyncio
8
+
9
+ from pygls.lsp.server import LanguageServer as PyglsLanguageServer
10
+ from pygls.lsp.client import LanguageClient
11
+ from pygls.workspace import TextDocument
12
+ from lsprotocol import types
13
+
14
+ from ..Grammar.tokenizer_custom import TokenInfo, tokenizer_for_string
15
+ from ..Grammar.parser import parse_tokenizer
16
+ from ..Grammar.unparse_custom import unparse_custom
17
+ from ..Transform.transform import transform
18
+ from ..Driver.debugging import (
19
+ debug_file_write,
20
+ debug_file_write_verbose,
21
+ is_debug_mode,
22
+ )
23
+ from ..Driver.utils import (
24
+ output_dir_for_server_workspace,
25
+ default_server_output_dir,
26
+ mkdir_and_setup_init_py,
27
+ )
28
+ from ..SourceMap.ast_match_based_map import map_from_translated
29
+ from ..SourceMap.ast_match_based_map import MatchBasedSourceMap
30
+ from .client import create_language_client, start_language_client
31
+ from .semantic_tokens import (
32
+ SemanticToken,
33
+ ast_tokens_to_semantic_tokens,
34
+ encode_semantic_tokens,
35
+ semantic_legend,
36
+ map_semantic_tokens,
37
+ semantic_legends_of_initialized_response,
38
+ semantic_token_capabilities,
39
+ )
40
+ from .utils import (
41
+ uri_to_path,
42
+ path_to_uri,
43
+ clone_and_map_initialize_param,
44
+ )
45
+
46
+
47
+ class URIContainer(Protocol):
48
+ uri: str
49
+
50
+
51
+ class URIMappableParams(Protocol):
52
+ text_document: Final[URIContainer]
53
+
54
+
55
+ class LanguageServer(PyglsLanguageServer):
56
+ @override
57
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
58
+ PyglsLanguageServer.__init__(self, *args, **kwargs) # type: ignore
59
+ self.ast_modules: dict[str, ast.Module | None] = {}
60
+ self.token_infos: dict[str, list[TokenInfo]] = {}
61
+ self.semantic_tokens: dict[str, list[SemanticToken]] = {}
62
+ self.semantic_tokens_encoded: dict[str, Sequence[int]] = {}
63
+ self.backend_client: LanguageClient = create_language_client()
64
+ self.mapping: dict[str, MatchBasedSourceMap] = {}
65
+ self.client_semantic_legend: dict[int, str] = {}
66
+
67
+ def reload(self, uri: str) -> str | None:
68
+ doc: TextDocument = self.workspace.get_text_document(uri)
69
+ try:
70
+ source = doc.source
71
+ debug_file_write(f"Reloading document {uri} source: {source[:100]}...")
72
+ tokenizer = tokenizer_for_string(source)
73
+ self.token_infos[uri] = tokenizer.read_all_tokens()
74
+ ast_node = parse_tokenizer(tokenizer)
75
+ if not isinstance(ast_node, ast.Module):
76
+ ast_node = None
77
+ debug_file_write(
78
+ f"Parsed AST for {uri}: {ast.dump(ast_node) if ast_node else 'None'}"
79
+ )
80
+ if ast_node:
81
+ transform(ast_node)
82
+ self.ast_modules[uri] = ast_node
83
+ semantic_tokens, encoded = ast_tokens_to_semantic_tokens(
84
+ ast_node, self.token_infos[uri], doc
85
+ )
86
+ self.semantic_tokens[uri] = semantic_tokens
87
+ self.semantic_tokens_encoded[uri] = encoded
88
+ if not ast_node:
89
+ return None
90
+ # Write translated file to server temporal directory
91
+ translate_file_path, root = self.get_translated_file_path_and_root(
92
+ Path(doc.path)
93
+ )
94
+ unparsed = unparse_custom(ast_node)
95
+ mapping = map_from_translated(ast_node, source, doc.path, unparsed)
96
+ if mapping:
97
+ self.mapping[uri] = mapping
98
+ self.setup_container_directories(translate_file_path, root)
99
+ with open(translate_file_path, "w", encoding="utf-8") as f:
100
+ debug_file_write(
101
+ f"Writing translated file to {translate_file_path}, length={len(unparsed)}"
102
+ )
103
+ f.write(unparsed)
104
+ return unparsed
105
+ except Exception as e:
106
+ self.ast_modules[uri] = None
107
+ self.token_infos[uri] = []
108
+ self.semantic_tokens[uri] = []
109
+ self.semantic_tokens_encoded[uri] = []
110
+ debug_file_write(f"Error reloading document {uri}: {e}")
111
+ # dump traceback to debug log
112
+ traceback_str = traceback.format_exc()
113
+ debug_file_write_verbose(f"Traceback:\n{traceback_str}")
114
+
115
+ def on_initialize_backend(self, init_result: types.InitializeResult) -> None:
116
+ if semantic_token_provider := init_result.capabilities.semantic_tokens_provider:
117
+ legend = semantic_token_provider.legend
118
+ self.client_semantic_legend = semantic_legends_of_initialized_response(
119
+ legend
120
+ )
121
+ debug_file_write(
122
+ f"Client semantic token legend: {self.client_semantic_legend}"
123
+ )
124
+
125
+ def server_workspace_root(self) -> Path | None:
126
+ if self.workspace.root_path:
127
+ return output_dir_for_server_workspace(Path(self.workspace.root_path))
128
+ else:
129
+ return None
130
+
131
+ def get_translated_file_path_and_root(self, src: Path) -> tuple[Path, Path | None]:
132
+ """
133
+ Translated path is obtained by replacing workspace root with server temporal directory.
134
+ """
135
+ for uri, folder in self.workspace.folders.items():
136
+ folder_path = uri_to_path(folder.uri)
137
+ try:
138
+ relative_path = src.relative_to(folder_path)
139
+ py_name = relative_path.parent / (relative_path.stem + ".py")
140
+ server_dir = output_dir_for_server_workspace(folder_path)
141
+ return server_dir / py_name, folder_path
142
+ except ValueError:
143
+ continue
144
+ # No workspace, use default path for single file mode
145
+ server_dir = default_server_output_dir(src.as_posix())
146
+ return server_dir / src.name, None
147
+
148
+ def get_translated_file_uri(self, src_uri: str) -> str:
149
+ src_path = uri_to_path(src_uri)
150
+ translated_path, _ = self.get_translated_file_path_and_root(src_path)
151
+ uri = path_to_uri(translated_path)
152
+ return uri
153
+
154
+ def setup_container_directories(self, file: Path, root: Path | None) -> None:
155
+ """
156
+ Create directory and __init__.py for all parent folders until workspace root.
157
+ """
158
+ if not root:
159
+ return
160
+ workspace_root = Path(root)
161
+ parent = file.parent
162
+ while parent != workspace_root and not parent.exists():
163
+ mkdir_and_setup_init_py(parent)
164
+
165
+ def clone_params_map_uri[T: URIMappableParams](self, param: T) -> T:
166
+ cloned_param = copy.deepcopy(param)
167
+ cloned_param.text_document.uri = self.get_translated_file_uri(
168
+ cloned_param.text_document.uri
169
+ )
170
+ return cloned_param
171
+
172
+
173
+ server = LanguageServer("typhon-language-server", "v0.1.4")
174
+ client = server.backend_client
175
+
176
+
177
+ @server.feature(types.INITIALIZE)
178
+ async def lsp_server_initialize(ls: LanguageServer, params: types.InitializeParams):
179
+ try:
180
+ debug_file_write(f"Initializing with params: {params}\n")
181
+ # await start_pyright_client(ls.backend_client)
182
+ await start_language_client(ls.backend_client)
183
+ debug_file_write("Backend client started.\n")
184
+ # Clone the params and modify workspace root to server workspace root.
185
+ cloned_params = clone_and_map_initialize_param(params)
186
+ debug_file_write(f"Initializing backend with cloned params: {cloned_params}\n")
187
+ initialize_result = await ls.backend_client.initialize_async(cloned_params)
188
+ debug_file_write(f"Backend initialize result: {initialize_result}\n")
189
+ ls.on_initialize_backend(initialize_result)
190
+ # Helpful for diagnosing why semanticTokens requests hang.
191
+ try:
192
+ caps = getattr(initialize_result, "capabilities", None)
193
+ stp = getattr(caps, "semantic_tokens_provider", None) if caps else None
194
+ debug_file_write(f"Backend semanticTokensProvider: {stp}")
195
+ except Exception as e:
196
+ debug_file_write(f"Failed to inspect backend capabilities: {e}")
197
+ except Exception as e:
198
+ debug_file_write(f"Error during initialization: {e}")
199
+
200
+
201
+ @server.feature(types.INITIALIZED)
202
+ def lsp_client_initialized(ls: LanguageServer, params: types.InitializedParams):
203
+ try:
204
+ debug_file_write(f"Sending initialized notification to backend {params}")
205
+ ls.backend_client.initialized(params)
206
+ except Exception as e:
207
+ debug_file_write(f"Error during initialized notification: {e}")
208
+
209
+
210
+ @client.feature(types.WORKSPACE_CONFIGURATION) # type: ignore
211
+ async def on_workspace_configuration(
212
+ ls_client: LanguageClient, params: types.ConfigurationParams
213
+ ):
214
+ debug_file_write(f"Backend requested configuration: {params}")
215
+ try:
216
+ result = await server.workspace_configuration_async(params)
217
+ return result
218
+ except Exception as e:
219
+ debug_file_write(f"Error fetching configuration: {e}")
220
+ # Backends like Pyright expect a list with the same length as params.items.
221
+ return [{}] * len(params.items)
222
+
223
+
224
+ @server.feature(types.TEXT_DOCUMENT_DIAGNOSTIC)
225
+ async def on_text_document_diagnostic(
226
+ ls: LanguageServer, params: types.DocumentDiagnosticParams
227
+ ):
228
+ debug_file_write(f"Server requested diagnostics: {params}")
229
+ # Typhon currently focuses on semantic tokens and does not surface backend diagnostics.
230
+ # Forwarding diagnostics can be expensive and may delay semantic token responses.
231
+ return None
232
+
233
+
234
+ @client.feature(types.CLIENT_REGISTER_CAPABILITY) # type: ignore
235
+ async def on_register_capability(
236
+ ls_client: LanguageClient, params: types.RegistrationParams
237
+ ):
238
+ debug_file_write(f"Backend requested registerCapability: {params}")
239
+ # Basedpyright may dynamically register pull diagnostics.
240
+ # VS Code then immediately starts issuing `textDocument/diagnostic` requests,
241
+ # which can be expensive and can starve other requests (like semanticTokens).
242
+ # Typhon currently doesn't surface these diagnostics anyway, so drop them.
243
+ filtered = tuple(
244
+ r for r in params.registrations if r.method != "textDocument/diagnostic"
245
+ )
246
+ if len(filtered) != len(params.registrations):
247
+ debug_file_write("Dropping dynamic registration(s) for textDocument/diagnostic")
248
+ if not filtered:
249
+ return None
250
+ return await server.client_register_capability_async(
251
+ types.RegistrationParams(registrations=filtered)
252
+ )
253
+
254
+
255
+ @client.feature(types.CLIENT_UNREGISTER_CAPABILITY) # type: ignore
256
+ async def on_unregister_capability(
257
+ ls_client: LanguageClient, params: types.UnregistrationParams
258
+ ):
259
+ debug_file_write(f"Backend requested unregisterCapability: {params}")
260
+ filtered = tuple(
261
+ u for u in params.unregisterations if u.method != "textDocument/diagnostic"
262
+ )
263
+ if len(filtered) != len(params.unregisterations):
264
+ debug_file_write(
265
+ "Dropping dynamic unregistration(s) for textDocument/diagnostic"
266
+ )
267
+ if not filtered:
268
+ return None
269
+ return await server.client_unregister_capability_async(
270
+ types.UnregistrationParams(unregisterations=filtered)
271
+ )
272
+
273
+
274
+ @client.feature(types.WORKSPACE_WORKSPACE_FOLDERS) # type: ignore
275
+ async def on_workspace_folders(ls_client: LanguageClient, params: None):
276
+ debug_file_write("Backend requested workspace folders")
277
+ return await server.workspace_workspace_folders_async(None)
278
+
279
+
280
+ @client.feature(types.TEXT_DOCUMENT_PUBLISH_DIAGNOSTICS) # type: ignore
281
+ async def on_publish_diagnostics(
282
+ ls_client: LanguageClient, params: types.PublishDiagnosticsParams
283
+ ):
284
+ debug_file_write(f"Backend published diagnostics: {params}")
285
+ server.text_document_publish_diagnostics(params)
286
+
287
+
288
+ # Define handlers for debug logging from backend
289
+ @client.feature(types.WINDOW_LOG_MESSAGE) # type: ignore
290
+ async def on_backend_log(ls_client: LanguageClient, log_params: types.LogMessageParams):
291
+ debug_file_write(f"[Backend Log] {log_params.message}")
292
+
293
+
294
+ @server.feature(types.WORKSPACE_DID_CHANGE_WATCHED_FILES)
295
+ def did_change_watched_files(
296
+ ls: LanguageServer, params: types.DidChangeWatchedFilesParams
297
+ ):
298
+ debug_file_write(f"Relaying didChangeWatchedFiles to backend: {params}")
299
+ uri_changed_params = types.DidChangeWatchedFilesParams(
300
+ changes=[
301
+ types.FileEvent(
302
+ uri=ls.get_translated_file_uri(change.uri),
303
+ # uri=ls.get_translated_file_uri(change.uri),
304
+ type=change.type,
305
+ )
306
+ for change in params.changes
307
+ ]
308
+ )
309
+ # ls.backend_client.workspace_did_change_watched_files(uri_changed_params)
310
+
311
+
312
+ @server.feature(types.TEXT_DOCUMENT_DID_OPEN)
313
+ def did_open(ls: LanguageServer, params: types.DidOpenTextDocumentParams):
314
+ try:
315
+ uri = params.text_document.uri
316
+ unparsed = ls.reload(uri)
317
+ cloned_params = ls.clone_params_map_uri(params)
318
+ cloned_params.text_document.language_id = "python"
319
+ cloned_params.text_document.text = unparsed if unparsed is not None else ""
320
+ debug_file_write(f"Did open called for {uri}, translated to {cloned_params}")
321
+ ls.backend_client.text_document_did_open(cloned_params)
322
+ except Exception as e:
323
+ debug_file_write(f"Error during document open: {e}")
324
+
325
+
326
+ @server.feature(types.TEXT_DOCUMENT_DID_CHANGE)
327
+ def did_change(ls: LanguageServer, params: types.DidChangeTextDocumentParams):
328
+ """Parse each document when it is changed"""
329
+ try:
330
+ uri = params.text_document.uri
331
+ ls.reload(uri)
332
+ cloned_params = ls.clone_params_map_uri(params)
333
+ debug_file_write(f"Did change called for {uri}, translated to {cloned_params}")
334
+ ls.backend_client.text_document_did_change(cloned_params)
335
+ except Exception as e:
336
+ debug_file_write(f"Error during document change: {e}")
337
+
338
+
339
+ @server.feature(types.TEXT_DOCUMENT_SEMANTIC_TOKENS_FULL, semantic_legend())
340
+ async def semantic_tokens_full(ls: LanguageServer, params: types.SemanticTokensParams):
341
+ try:
342
+ debug_file_write(f"Semantic tokens requested {params}")
343
+ cloned_params = ls.clone_params_map_uri(params)
344
+ debug_file_write(f"Translated URI for semantic tokens: {cloned_params}")
345
+ semantic_tokens: (
346
+ SemanticTokens | None
347
+ ) = await ls.backend_client.text_document_semantic_tokens_full_async(
348
+ cloned_params,
349
+ )
350
+ debug_file_write(f"Received semantic tokens: {semantic_tokens}")
351
+ if not semantic_tokens:
352
+ return None
353
+ if (mapping := ls.mapping.get(params.text_document.uri)) is None:
354
+ return None
355
+ mapped = map_semantic_tokens(
356
+ semantic_tokens, mapping, ls.client_semantic_legend
357
+ )
358
+ debug_file_write(f"Mapped semantic tokens: {mapped}")
359
+ return mapped
360
+ except Exception as e:
361
+ debug_file_write(
362
+ f"Error during semantic tokens retrieval: {type(e).__name__}: {e}"
363
+ )
364
+ # Fall back to precomputed rough semantic tokens (encoded).
365
+ if (mapping := ls.mapping.get(params.text_document.uri)) is not None:
366
+ try:
367
+ fallback = encode_semantic_tokens(
368
+ ls.semantic_tokens.get(params.text_document.uri, [])
369
+ )
370
+ debug_file_write(f"Fallback: {fallback}")
371
+ return map_semantic_tokens(fallback, mapping, ls.client_semantic_legend)
372
+ except Exception as mapping_error:
373
+ debug_file_write(f"Fallback mapping failed: {mapping_error}")
374
+ return encode_semantic_tokens(
375
+ ls.semantic_tokens.get(params.text_document.uri, [])
376
+ )
@@ -0,0 +1,65 @@
1
+ import copy
2
+ from pathlib import Path
3
+
4
+ from lsprotocol import types
5
+ from pygls import uris
6
+ from pygls.workspace import text_document
7
+
8
+ from ..Driver.debugging import (
9
+ debug_file_write,
10
+ is_debug_mode,
11
+ )
12
+ from ..Driver.utils import output_dir_for_server_workspace
13
+ from .semantic_tokens import semantic_token_capabilities
14
+ from .client import configure_language_client_option
15
+
16
+
17
+ def uri_to_path(uri: str) -> Path:
18
+ fs_path = uris.to_fs_path(uri)
19
+ if fs_path is None:
20
+ raise ValueError(f"Could not convert URI to path: {uri}")
21
+ return Path(fs_path)
22
+
23
+
24
+ def path_to_uri(path: Path) -> str:
25
+ # Use pathlib's implementation to get canonical Windows file URIs like:
26
+ # file:///c:/Users/... (not file:///c%3A/Users/...)
27
+ # Some LSP backends treat these as different URIs and may ignore requests.
28
+ try:
29
+ return path.resolve().as_uri()
30
+ except Exception as e:
31
+ raise ValueError(f"Could not convert path to URI: {path}") from e
32
+
33
+
34
+ def clone_and_map_initialize_param(
35
+ params: types.InitializeParams,
36
+ ) -> types.InitializeParams:
37
+ cloned_params = copy.deepcopy(params)
38
+ cloned_params = configure_language_client_option(cloned_params)
39
+ # Modify workspace folders to server workspace folders.
40
+ if params.workspace_folders:
41
+ debug_file_write(f"Workspace folders: {params.workspace_folders}")
42
+ cloned_params.workspace_folders = [
43
+ types.WorkspaceFolder(
44
+ uri=path_to_uri(output_dir_for_server_workspace(uri_to_path(f.uri))),
45
+ name=f.name,
46
+ )
47
+ for f in params.workspace_folders
48
+ ]
49
+ # Setup the deprecated root_uri and root_path as well.
50
+ if params.root_uri:
51
+ cloned_params.root_uri = path_to_uri(
52
+ output_dir_for_server_workspace(uri_to_path(params.root_uri))
53
+ )
54
+ cloned_params.root_uri = None
55
+ if params.root_path:
56
+ # `rootPath` is deprecated but some servers still consult it.
57
+ # Keep it as a native filesystem path on Windows.
58
+ cloned_params.root_path = str(
59
+ output_dir_for_server_workspace(Path(params.root_path))
60
+ )
61
+ cloned_params.root_path = None
62
+ # Debug trace handling
63
+ if is_debug_mode():
64
+ cloned_params.trace = types.TraceValue.Verbose
65
+ return cloned_params