Typhon-Language 0.1.2__py3-none-any.whl → 0.1.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (57) hide show
  1. Typhon/Driver/configs.py +14 -0
  2. Typhon/Driver/debugging.py +148 -5
  3. Typhon/Driver/diagnostic.py +4 -3
  4. Typhon/Driver/language_server.py +25 -0
  5. Typhon/Driver/run.py +1 -1
  6. Typhon/Driver/translate.py +16 -11
  7. Typhon/Driver/utils.py +39 -1
  8. Typhon/Grammar/_typhon_parser.py +2920 -2718
  9. Typhon/Grammar/parser.py +80 -53
  10. Typhon/Grammar/parser_helper.py +68 -87
  11. Typhon/Grammar/syntax_errors.py +41 -20
  12. Typhon/Grammar/token_factory_custom.py +541 -485
  13. Typhon/Grammar/tokenizer_custom.py +52 -0
  14. Typhon/Grammar/typhon_ast.py +754 -76
  15. Typhon/Grammar/typhon_ast_error.py +438 -0
  16. Typhon/Grammar/unparse_custom.py +25 -0
  17. Typhon/LanguageServer/__init__.py +3 -0
  18. Typhon/LanguageServer/client/__init__.py +42 -0
  19. Typhon/LanguageServer/client/pyrefly.py +115 -0
  20. Typhon/LanguageServer/client/pyright.py +173 -0
  21. Typhon/LanguageServer/semantic_tokens.py +446 -0
  22. Typhon/LanguageServer/server.py +376 -0
  23. Typhon/LanguageServer/utils.py +65 -0
  24. Typhon/SourceMap/ast_match_based_map.py +199 -152
  25. Typhon/SourceMap/ast_matching.py +102 -87
  26. Typhon/SourceMap/datatype.py +275 -264
  27. Typhon/SourceMap/defined_name_retrieve.py +145 -0
  28. Typhon/Transform/comprehension_to_function.py +2 -5
  29. Typhon/Transform/const_member_to_final.py +12 -7
  30. Typhon/Transform/extended_patterns.py +139 -0
  31. Typhon/Transform/forbidden_statements.py +25 -0
  32. Typhon/Transform/if_while_let.py +122 -11
  33. Typhon/Transform/inline_statement_block_capture.py +22 -15
  34. Typhon/Transform/optional_operators_to_checked.py +14 -6
  35. Typhon/Transform/placeholder_to_function.py +0 -1
  36. Typhon/Transform/record_to_dataclass.py +22 -238
  37. Typhon/Transform/scope_check_rename.py +109 -29
  38. Typhon/Transform/transform.py +16 -12
  39. Typhon/Transform/type_abbrev_desugar.py +11 -15
  40. Typhon/Transform/type_annotation_check_expand.py +2 -2
  41. Typhon/Transform/utils/__init__.py +0 -0
  42. Typhon/Transform/utils/imports.py +83 -0
  43. Typhon/Transform/{utils.py → utils/jump_away.py} +2 -38
  44. Typhon/Transform/utils/make_class.py +135 -0
  45. Typhon/Transform/visitor.py +25 -0
  46. Typhon/Typing/pyrefly.py +145 -0
  47. Typhon/Typing/pyright.py +141 -144
  48. Typhon/Typing/result_diagnostic.py +1 -1
  49. Typhon/__main__.py +15 -1
  50. {typhon_language-0.1.2.dist-info → typhon_language-0.1.4.dist-info}/METADATA +13 -6
  51. typhon_language-0.1.4.dist-info/RECORD +65 -0
  52. {typhon_language-0.1.2.dist-info → typhon_language-0.1.4.dist-info}/WHEEL +1 -1
  53. typhon_language-0.1.4.dist-info/licenses/LICENSE +201 -0
  54. typhon_language-0.1.2.dist-info/RECORD +0 -48
  55. typhon_language-0.1.2.dist-info/licenses/LICENSE +0 -21
  56. {typhon_language-0.1.2.dist-info → typhon_language-0.1.4.dist-info}/entry_points.txt +0 -0
  57. {typhon_language-0.1.2.dist-info → typhon_language-0.1.4.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,438 @@
1
+ import ast
2
+ from typing import Unpack, cast
3
+ from tokenize import TokenInfo
4
+ from .typhon_ast import (
5
+ PosAttributes,
6
+ unpack_pos_default,
7
+ get_pos_attributes,
8
+ PosNode,
9
+ make_for_let_pattern,
10
+ make_function_def,
11
+ make_class_def,
12
+ get_invalid_name,
13
+ unpack_pos_tuple,
14
+ )
15
+ from ..Driver.debugging import debug_print
16
+ from .parser_helper import Parser
17
+ from .syntax_errors import set_syntax_error
18
+ from ..Transform.visitor import TyphonASTRawVisitor
19
+
20
+
21
+ _ERROR_NODE = "_typh_error_node"
22
+
23
+
24
+ def set_error_node[T: ast.AST](node: T, errors: list[SyntaxError]) -> T:
25
+ setattr(node, _ERROR_NODE, errors)
26
+ return node
27
+
28
+
29
+ def add_error_node[T: ast.AST](node: T, errors: list[SyntaxError]) -> T:
30
+ new_errors = get_error_node(node) + errors
31
+ print(f"Adding errors to node {type(node).__name__}: {new_errors}")
32
+ return set_error_node(node, new_errors)
33
+
34
+
35
+ def get_error_node(node: ast.AST) -> list[SyntaxError]:
36
+ return getattr(node, _ERROR_NODE, [])
37
+
38
+
39
+ def clear_error_node(node: ast.AST) -> None:
40
+ if hasattr(node, _ERROR_NODE):
41
+ delattr(node, _ERROR_NODE)
42
+
43
+
44
+ class ErrorPositionHolder(PosAttributes):
45
+ pass
46
+
47
+
48
+ def maybe_invalid_block(
49
+ parser: Parser,
50
+ open_brace: TokenInfo | None,
51
+ close_brace: TokenInfo | None,
52
+ body: list[ast.stmt],
53
+ ) -> list[ast.stmt]:
54
+ if open_brace is None:
55
+ if body:
56
+ stmt = body[0]
57
+ lineno, col_offset, _, _ = unpack_pos_default(get_pos_attributes(stmt))
58
+ error_open = parser.build_expected_error(
59
+ "'{'",
60
+ (lineno, col_offset),
61
+ (lineno, col_offset + 1),
62
+ )
63
+ add_error_node(stmt, [error_open])
64
+ if close_brace is None:
65
+ if body:
66
+ stmt = body[-1]
67
+ _, _, lineno, col_offset = unpack_pos_default(get_pos_attributes(stmt))
68
+ error_close = parser.build_expected_error(
69
+ "'}'",
70
+ (lineno, col_offset),
71
+ (lineno, col_offset + 1),
72
+ )
73
+ add_error_node(stmt, [error_close])
74
+ return body
75
+
76
+
77
+ def maybe_invalid_braces[T: PosNode](
78
+ parser: Parser,
79
+ open_brace: TokenInfo | None,
80
+ close_brace: TokenInfo | None,
81
+ node: T,
82
+ *,
83
+ open_anchor: PosNode | TokenInfo | None = None,
84
+ ) -> T:
85
+ lineno, col_offset, end_lineno, end_col_offset = unpack_pos_default(
86
+ get_pos_attributes(node)
87
+ )
88
+ if open_anchor:
89
+ _, _, lineno, col_offset = unpack_pos_default(get_pos_attributes(open_anchor))
90
+ if open_brace is None:
91
+ error_open = parser.build_expected_error(
92
+ "'{'",
93
+ (lineno, col_offset),
94
+ (lineno, col_offset + 1),
95
+ )
96
+ add_error_node(node, [error_open])
97
+
98
+ if close_brace is None:
99
+ error_close = parser.build_expected_error(
100
+ "'}'",
101
+ (end_lineno, end_col_offset),
102
+ (end_lineno, end_col_offset + 1),
103
+ )
104
+ add_error_node(node, [error_close])
105
+ return node
106
+
107
+
108
+ def _pos_of_anchor(
109
+ anchor: PosNode | TokenInfo,
110
+ ) -> tuple[tuple[int, int], tuple[int, int]]:
111
+ if isinstance(anchor, TokenInfo):
112
+ return anchor.end, _next_col(anchor.end)
113
+ else: # PosNode
114
+ _, _, e_lineno, e_col = unpack_pos_default(get_pos_attributes(anchor))
115
+ start_loc = (e_lineno, e_col)
116
+ end_loc = _next_col(start_loc)
117
+ return start_loc, end_loc
118
+
119
+
120
+ def _next_col(pos: tuple[int, int]) -> tuple[int, int]:
121
+ return pos[0], pos[1] + 1
122
+
123
+
124
+ def maybe_invalid_stmt[T: PosNode](
125
+ parser: Parser,
126
+ open_paren: TokenInfo | None,
127
+ close_paren: TokenInfo | None,
128
+ *,
129
+ node: T,
130
+ open_anchor: PosNode | TokenInfo,
131
+ close_anchor: PosNode | TokenInfo,
132
+ message: str | None = None,
133
+ message_anchor: PosNode | TokenInfo | None = None,
134
+ ) -> T:
135
+ lineno, col_offset, end_lineno, end_col_offset = unpack_pos_default(
136
+ get_pos_attributes(node)
137
+ )
138
+ if message is not None:
139
+ error = parser.build_syntax_error(
140
+ message,
141
+ (lineno, col_offset),
142
+ (end_lineno, end_col_offset),
143
+ )
144
+ add_error_node(node, [error])
145
+ if open_paren is None:
146
+ start_loc, end_loc = _pos_of_anchor(open_anchor)
147
+ debug_print(
148
+ f"open paren missing: {start_loc} to {end_loc} open anchor: {open_anchor}"
149
+ )
150
+ error = parser.build_expected_error("'('", start_loc, end_loc)
151
+ add_error_node(node, [error])
152
+ if close_paren is None:
153
+ start_loc, end_loc = _pos_of_anchor(close_anchor)
154
+ debug_print(
155
+ f"close paren missing: {start_loc} to {end_loc} close anchor: {close_anchor}"
156
+ )
157
+ error = parser.build_expected_error("')'", start_loc, end_loc)
158
+ add_error_node(node, [error])
159
+ return node
160
+
161
+
162
+ def recover_invalid_try(
163
+ parser: Parser,
164
+ message: str,
165
+ node: ast.Try,
166
+ ) -> ast.Try:
167
+ lineno, col_offset, end_lineno, end_col_offset = unpack_pos_default(
168
+ get_pos_attributes(node)
169
+ )
170
+ error = parser.build_syntax_error(
171
+ message,
172
+ (lineno, col_offset),
173
+ (end_lineno, end_col_offset),
174
+ )
175
+ return add_error_node(node, [error])
176
+
177
+
178
+ def recover_invalid_for(
179
+ parser: Parser,
180
+ open_paren: TokenInfo | None,
181
+ close_paren: TokenInfo | None,
182
+ is_async: bool,
183
+ decl_keyword: TokenInfo | None,
184
+ pattern: ast.pattern | None,
185
+ in_keyword: TokenInfo | None,
186
+ expression: ast.expr | None,
187
+ body: list[ast.stmt],
188
+ *,
189
+ open_anchor: PosNode | TokenInfo,
190
+ **kwargs: Unpack[PosAttributes],
191
+ ) -> ast.For | ast.AsyncFor:
192
+ current_anchor: tuple[int, int] = _pos_of_anchor(open_anchor)[0]
193
+ errors: list[SyntaxError] = []
194
+
195
+ def error_expect(mes: str):
196
+ nonlocal errors
197
+ nonlocal current_anchor
198
+ error = parser.build_expected_error(
199
+ mes,
200
+ current_anchor,
201
+ _next_col(current_anchor),
202
+ )
203
+ errors.append(error)
204
+ current_anchor = _next_col(current_anchor)
205
+
206
+ if not open_paren:
207
+ error_expect("'('")
208
+ else:
209
+ current_anchor = open_paren.end
210
+ if not decl_keyword:
211
+ error_expect("'let/var'")
212
+ decl = "let"
213
+ else:
214
+ current_anchor = decl_keyword.end
215
+ decl = decl_keyword.string
216
+ if not pattern:
217
+ error_expect("pattern")
218
+ pattern = ast.MatchAs(name=None)
219
+ else:
220
+ current_anchor = (pattern.end_lineno, pattern.end_col_offset)
221
+ if not in_keyword:
222
+ error_expect("'in'")
223
+ else:
224
+ current_anchor = _pos_of_anchor(in_keyword)[1]
225
+ if not expression:
226
+ error_expect("expression")
227
+ expression = ast.Constant(
228
+ value=Ellipsis,
229
+ lineno=current_anchor[0],
230
+ col_offset=current_anchor[1],
231
+ end_lineno=current_anchor[0],
232
+ end_col_offset=current_anchor[1] + 1,
233
+ )
234
+ else:
235
+ current_anchor = _pos_of_anchor(expression)[1]
236
+ if not close_paren:
237
+ error_expect("')'")
238
+ else:
239
+ current_anchor = close_paren.end
240
+ for_node = make_for_let_pattern(
241
+ parser,
242
+ decl_type=decl,
243
+ pattern=pattern,
244
+ iter=expression,
245
+ body=body,
246
+ orelse=[],
247
+ type_comment=None,
248
+ is_async=is_async,
249
+ **kwargs,
250
+ )
251
+ add_error_node(for_node, errors)
252
+ return for_node
253
+
254
+
255
+ def recover_maybe_invalid_function_def_raw(
256
+ parser: Parser,
257
+ is_async: bool,
258
+ is_static: bool,
259
+ maybe_name: tuple[TokenInfo, bool] | None,
260
+ open_paren: TokenInfo | None,
261
+ args: ast.arguments,
262
+ close_paren: TokenInfo | None,
263
+ returns: ast.expr | None,
264
+ body: list[ast.stmt],
265
+ type_comment: str | None,
266
+ type_params: list[ast.type_param],
267
+ *,
268
+ open_anchor: PosNode | TokenInfo,
269
+ close_anchor: PosNode | TokenInfo,
270
+ **kwargs: Unpack[PosAttributes],
271
+ ) -> ast.FunctionDef | ast.AsyncFunctionDef:
272
+ error: SyntaxError | None = None
273
+ if not maybe_name:
274
+ start_pos, end_pos = _pos_of_anchor(open_anchor)
275
+ error = parser.build_expected_error("function name", start_pos, end_pos)
276
+ name = get_invalid_name()
277
+ else:
278
+ name, is_usable = maybe_name
279
+ if not is_usable:
280
+ error = parser.build_syntax_error(
281
+ f"keyword '{name.string}' cannot be used as function name",
282
+ name.start,
283
+ name.end,
284
+ )
285
+ name = get_invalid_name()
286
+ result = maybe_invalid_stmt(
287
+ parser,
288
+ open_paren,
289
+ close_paren,
290
+ node=make_function_def(
291
+ is_async=is_async,
292
+ is_static=is_static,
293
+ name=name,
294
+ args=args,
295
+ returns=returns,
296
+ body=body,
297
+ type_comment=type_comment,
298
+ type_params=type_params,
299
+ **kwargs,
300
+ ),
301
+ open_anchor=open_anchor,
302
+ close_anchor=close_anchor,
303
+ )
304
+ if error:
305
+ add_error_node(result, [error])
306
+ return result
307
+
308
+
309
+ def recover_maybe_invalid_class_def_raw(
310
+ parser: Parser,
311
+ maybe_name: tuple[TokenInfo, bool] | None,
312
+ bases_parens: tuple[
313
+ TokenInfo, tuple[list[ast.expr], list[ast.keyword]], TokenInfo | None
314
+ ]
315
+ | None,
316
+ body: list[ast.stmt],
317
+ decorator_list: list[ast.expr],
318
+ type_params: list[ast.type_param],
319
+ *,
320
+ open_anchor: PosNode | TokenInfo,
321
+ **kwargs: Unpack[PosAttributes],
322
+ ) -> ast.ClassDef:
323
+ open_paren, (bases, keywords), close_paren = bases_parens or (None, ([], []), None)
324
+ close_anchor = (
325
+ bases[-1]
326
+ if bases
327
+ else (
328
+ type_params[-1]
329
+ if type_params
330
+ else (maybe_name if isinstance(maybe_name, TokenInfo) else open_anchor)
331
+ )
332
+ )
333
+ error: SyntaxError | None = None
334
+ if not maybe_name:
335
+ start_pos, end_pos = _pos_of_anchor(open_anchor)
336
+ error = parser.build_expected_error("class name", start_pos, end_pos)
337
+ name = get_invalid_name()
338
+ else:
339
+ name, is_usable = maybe_name
340
+ if not is_usable:
341
+ error = parser.build_syntax_error(
342
+ f"keyword '{name.string}' cannot be used as class name",
343
+ name.start,
344
+ name.end,
345
+ )
346
+ name = get_invalid_name()
347
+ class_def = make_class_def(
348
+ name=name,
349
+ bases=bases,
350
+ keywords=keywords,
351
+ body=body,
352
+ decorator_list=decorator_list,
353
+ type_params=type_params,
354
+ **kwargs,
355
+ )
356
+ if bases:
357
+ maybe_invalid_stmt(
358
+ parser,
359
+ open_paren,
360
+ close_paren,
361
+ node=class_def,
362
+ open_anchor=open_anchor,
363
+ close_anchor=close_anchor,
364
+ )
365
+ if error:
366
+ add_error_node(class_def, [error])
367
+ return class_def
368
+
369
+
370
+ def _token_position_default(
371
+ tok: TokenInfo | str, **kwargs: Unpack[PosAttributes]
372
+ ) -> tuple[tuple[int, int], tuple[int, int]]:
373
+ if isinstance(tok, TokenInfo):
374
+ return (tok.start, tok.end)
375
+ else:
376
+ return unpack_pos_tuple(kwargs)
377
+
378
+
379
+ def let_pattern_check[T: ast.AST](
380
+ parser: Parser,
381
+ decl_type: TokenInfo | str,
382
+ pattern_subjects: list[tuple[ast.pattern, ast.expr]],
383
+ node: T,
384
+ **kwargs: Unpack[PosAttributes],
385
+ ) -> T:
386
+ start_pos, end_pos = _token_position_default(decl_type, **kwargs)
387
+ decl_type_str = decl_type.string if isinstance(decl_type, TokenInfo) else decl_type
388
+ if decl_type_str != "let":
389
+ error = parser.build_syntax_error(
390
+ "declaration pattern must be 'let' declaration", start_pos, end_pos
391
+ )
392
+ add_error_node(node, [error])
393
+ if len(pattern_subjects) == 0:
394
+ error = parser.build_syntax_error(
395
+ "declaration pattern must have at least one pattern", start_pos, end_pos
396
+ )
397
+ add_error_node(node, [error])
398
+ return node
399
+
400
+
401
+ def statement_panic_skip(
402
+ parser: Parser,
403
+ skip: list[TokenInfo],
404
+ sync: TokenInfo | str,
405
+ **kwargs: Unpack[PosAttributes],
406
+ ) -> list[ast.stmt]:
407
+ if isinstance(sync, TokenInfo):
408
+ skip.append(sync)
409
+ start_loc, end_loc = unpack_pos_tuple(kwargs)
410
+ if skip:
411
+ start_loc = skip[0].start
412
+ end_loc = skip[-1].end
413
+ # Record the skipped tokens for error recovery
414
+ error = parser.build_skip_tokens_error(skip, start_loc, end_loc)
415
+ result = ast.Pass(**kwargs) # Error holder
416
+ add_error_node(result, [error])
417
+ return [result]
418
+
419
+
420
+ class _ErrorGather(TyphonASTRawVisitor):
421
+ errors: list[SyntaxError]
422
+
423
+ def __init__(self):
424
+ self.errors = []
425
+ super().__init__()
426
+
427
+ def visit(self, node: ast.AST):
428
+ if errors := get_error_node(node):
429
+ print(f"Gathered errors from node {type(node).__name__}: {errors}")
430
+ self.errors.extend(errors)
431
+ self.generic_visit(node)
432
+
433
+
434
+ def gather_errors(node: ast.AST):
435
+ gather = _ErrorGather()
436
+ gather.visit(node)
437
+ parse_errors = sorted(gather.errors, key=lambda e: (e.lineno, e.offset))
438
+ set_syntax_error(node, parse_errors)
@@ -0,0 +1,25 @@
1
+ # type: ignore[all]
2
+ # TODO: Never forget implementation here is temporal hack.
3
+ import ast
4
+ from .typhon_ast import get_type_ignore_comment
5
+
6
+
7
+ # Hack the ast._Unparser to create our CustomUnparser.
8
+ # DO make new class from scratch when we need to change more things.
9
+ class CustomUnparser(ast._Unparser):
10
+ def __init__(self):
11
+ super().__init__()
12
+
13
+ def visit_match_case(self, node):
14
+ self.fill("case ")
15
+ self.traverse(node.pattern)
16
+ if node.guard:
17
+ self.write(" if ")
18
+ self.traverse(node.guard)
19
+ with self.block(extra=get_type_ignore_comment(node)):
20
+ self.traverse(node.body)
21
+
22
+
23
+ def unparse_custom(node: ast.AST) -> str:
24
+ unparser = CustomUnparser()
25
+ return unparser.visit(node)
@@ -0,0 +1,3 @@
1
+ from .server import server
2
+
3
+ __all__ = ["server"]
@@ -0,0 +1,42 @@
1
+ from ...Driver.configs import get_language_backend
2
+ from pygls.lsp.client import LanguageClient
3
+ from lsprotocol import types
4
+ from .pyrefly import (
5
+ create_pyrefly_client,
6
+ start_pyrefly_client,
7
+ configure_pyrefly_client_option,
8
+ )
9
+ from .pyright import (
10
+ create_pyright_client,
11
+ start_pyright_client,
12
+ configure_pyright_client_option,
13
+ )
14
+
15
+
16
+ def create_language_client() -> LanguageClient:
17
+ _lsp_backend = get_language_backend()
18
+ match _lsp_backend:
19
+ case "pyrefly":
20
+ return create_pyrefly_client()
21
+ case "pyright":
22
+ return create_pyright_client()
23
+
24
+
25
+ async def start_language_client(client: LanguageClient):
26
+ _lsp_backend = get_language_backend()
27
+ match _lsp_backend:
28
+ case "pyrefly":
29
+ await start_pyrefly_client(client)
30
+ case "pyright":
31
+ await start_pyright_client(client)
32
+
33
+
34
+ def configure_language_client_option(
35
+ param: types.InitializeParams,
36
+ ) -> types.InitializeParams:
37
+ _lsp_backend = get_language_backend()
38
+ match _lsp_backend:
39
+ case "pyrefly":
40
+ return configure_pyrefly_client_option(param)
41
+ case "pyright":
42
+ return configure_pyright_client_option(param)
@@ -0,0 +1,115 @@
1
+ from cattrs.converters import Converter
2
+
3
+
4
+ import sys
5
+ import json
6
+ import logging
7
+ import traceback
8
+ from typing import Any, Optional, Union
9
+ from lsprotocol import types
10
+ from cattrs.gen import make_dict_structure_fn
11
+ from pygls.lsp.client import LanguageClient
12
+ from pygls.protocol import default_converter
13
+
14
+ from ...Driver.debugging import is_debug_verbose
15
+
16
+
17
+ def client_converter_bugfix(client: LanguageClient) -> None:
18
+ """
19
+ Customize the client's cattrs converter to ignore unknown notebook fields.
20
+ """
21
+ converter: Converter = client.protocol._converter # type: ignore
22
+ # customize cattrs to ignore unknown fields
23
+ # pyrefly may return newer LSP fields than the local lsprotocol supports.
24
+ # Do NOT override structure hooks (it breaks camelCase<->snake_case mapping),
25
+ # Making the result all None.
26
+ # converter.register_structure_hook_factory(
27
+ # lambda cls: hasattr(cls, "__attrs_attrs__"),
28
+ # lambda cls: make_dict_structure_fn( # type: ignore
29
+ # cls,
30
+ # converter,
31
+ # _cattrs_forbid_extra_keys=False, # type: ignore
32
+ # ),
33
+ # )
34
+
35
+ # lsprotocol 2025.0.0 lacks a structure hook for the optional notebook filter union.
36
+ # This is required to handle `notebookDocumentSync.notebookSelector[*].notebook`.
37
+ NotebookDocumentFilterOptional = Optional[
38
+ Union[
39
+ str,
40
+ types.NotebookDocumentFilterNotebookType,
41
+ types.NotebookDocumentFilterScheme,
42
+ types.NotebookDocumentFilterPattern,
43
+ ]
44
+ ]
45
+
46
+ def _notebook_document_filter_hook(object_: Any, _: type) -> Any:
47
+ if object_ is None:
48
+ return None
49
+ if isinstance(object_, str):
50
+ return object_
51
+ if isinstance(object_, dict):
52
+ if "notebookType" in object_:
53
+ return converter.structure(
54
+ object_, types.NotebookDocumentFilterNotebookType
55
+ )
56
+ if "scheme" in object_:
57
+ return converter.structure(object_, types.NotebookDocumentFilterScheme)
58
+ if "pattern" in object_:
59
+ return converter.structure(object_, types.NotebookDocumentFilterPattern)
60
+ return converter.structure(object_, types.NotebookDocumentFilterNotebookType)
61
+
62
+ converter.register_structure_hook(
63
+ NotebookDocumentFilterOptional, _notebook_document_filter_hook
64
+ )
65
+
66
+
67
+ def create_pyrefly_client() -> LanguageClient:
68
+ client = LanguageClient("pyrefly-language-client", "v0.1.4")
69
+
70
+ client_converter_bugfix(client)
71
+ return client
72
+
73
+
74
+ async def start_pyrefly_client(client: LanguageClient):
75
+ original_structure = client.protocol.structure_message
76
+ if is_debug_verbose():
77
+
78
+ def leaf_structure_message(data: Any):
79
+ logger = logging.getLogger("pygls.client.pyrefly")
80
+ try:
81
+ logger.debug(f"DEBUG: Raw data: {json.dumps(data, indent=2)}")
82
+ result = original_structure(data)
83
+ return result
84
+ except Exception as e:
85
+ logger.error(f"DEBUG: Failed to deserialize: {data}")
86
+ logger.error(traceback.format_exc())
87
+ raise e
88
+
89
+ client.protocol.structure_message = leaf_structure_message
90
+
91
+ # Also capture outgoing JSON. `structure_message` only sees inbound data.
92
+ logger = logging.getLogger("pygls.client.pyrefly")
93
+ original_send_data = client.protocol._send_data # type: ignore[attr-defined]
94
+
95
+ def leaf_send_data(data: Any):
96
+ try:
97
+ if is_debug_verbose():
98
+ logger.debug(f"DEBUG: Outgoing data: {data}")
99
+ except Exception:
100
+ logger.error("DEBUG: Failed to serialize outgoing data")
101
+ logger.error(traceback.format_exc())
102
+ return original_send_data(data)
103
+
104
+ client.protocol._send_data = leaf_send_data # type: ignore[attr-defined]
105
+
106
+ await client.start_io( # type: ignore
107
+ sys.executable, "-m", "pyrefly", "lsp", "--verbose"
108
+ )
109
+
110
+
111
+ def configure_pyrefly_client_option(
112
+ param: types.InitializeParams,
113
+ ) -> types.InitializeParams:
114
+ # No special configuration needed for Pyrefly
115
+ return param