Typhon-Language 0.1.2__py3-none-any.whl → 0.1.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (57) hide show
  1. Typhon/Driver/configs.py +14 -0
  2. Typhon/Driver/debugging.py +148 -5
  3. Typhon/Driver/diagnostic.py +4 -3
  4. Typhon/Driver/language_server.py +25 -0
  5. Typhon/Driver/run.py +1 -1
  6. Typhon/Driver/translate.py +16 -11
  7. Typhon/Driver/utils.py +39 -1
  8. Typhon/Grammar/_typhon_parser.py +2920 -2718
  9. Typhon/Grammar/parser.py +80 -53
  10. Typhon/Grammar/parser_helper.py +68 -87
  11. Typhon/Grammar/syntax_errors.py +41 -20
  12. Typhon/Grammar/token_factory_custom.py +541 -485
  13. Typhon/Grammar/tokenizer_custom.py +52 -0
  14. Typhon/Grammar/typhon_ast.py +754 -76
  15. Typhon/Grammar/typhon_ast_error.py +438 -0
  16. Typhon/Grammar/unparse_custom.py +25 -0
  17. Typhon/LanguageServer/__init__.py +3 -0
  18. Typhon/LanguageServer/client/__init__.py +42 -0
  19. Typhon/LanguageServer/client/pyrefly.py +115 -0
  20. Typhon/LanguageServer/client/pyright.py +173 -0
  21. Typhon/LanguageServer/semantic_tokens.py +446 -0
  22. Typhon/LanguageServer/server.py +376 -0
  23. Typhon/LanguageServer/utils.py +65 -0
  24. Typhon/SourceMap/ast_match_based_map.py +199 -152
  25. Typhon/SourceMap/ast_matching.py +102 -87
  26. Typhon/SourceMap/datatype.py +275 -264
  27. Typhon/SourceMap/defined_name_retrieve.py +145 -0
  28. Typhon/Transform/comprehension_to_function.py +2 -5
  29. Typhon/Transform/const_member_to_final.py +12 -7
  30. Typhon/Transform/extended_patterns.py +139 -0
  31. Typhon/Transform/forbidden_statements.py +25 -0
  32. Typhon/Transform/if_while_let.py +122 -11
  33. Typhon/Transform/inline_statement_block_capture.py +22 -15
  34. Typhon/Transform/optional_operators_to_checked.py +14 -6
  35. Typhon/Transform/placeholder_to_function.py +0 -1
  36. Typhon/Transform/record_to_dataclass.py +22 -238
  37. Typhon/Transform/scope_check_rename.py +109 -29
  38. Typhon/Transform/transform.py +16 -12
  39. Typhon/Transform/type_abbrev_desugar.py +11 -15
  40. Typhon/Transform/type_annotation_check_expand.py +2 -2
  41. Typhon/Transform/utils/__init__.py +0 -0
  42. Typhon/Transform/utils/imports.py +83 -0
  43. Typhon/Transform/{utils.py → utils/jump_away.py} +2 -38
  44. Typhon/Transform/utils/make_class.py +135 -0
  45. Typhon/Transform/visitor.py +25 -0
  46. Typhon/Typing/pyrefly.py +145 -0
  47. Typhon/Typing/pyright.py +141 -144
  48. Typhon/Typing/result_diagnostic.py +1 -1
  49. Typhon/__main__.py +15 -1
  50. {typhon_language-0.1.2.dist-info → typhon_language-0.1.4.dist-info}/METADATA +13 -6
  51. typhon_language-0.1.4.dist-info/RECORD +65 -0
  52. {typhon_language-0.1.2.dist-info → typhon_language-0.1.4.dist-info}/WHEEL +1 -1
  53. typhon_language-0.1.4.dist-info/licenses/LICENSE +201 -0
  54. typhon_language-0.1.2.dist-info/RECORD +0 -48
  55. typhon_language-0.1.2.dist-info/licenses/LICENSE +0 -21
  56. {typhon_language-0.1.2.dist-info → typhon_language-0.1.4.dist-info}/entry_points.txt +0 -0
  57. {typhon_language-0.1.2.dist-info → typhon_language-0.1.4.dist-info}/top_level.txt +0 -0
@@ -2,10 +2,13 @@ from typing import NamedTuple
2
2
  from tokenize import TokenInfo, OP, NAME
3
3
  import tokenize
4
4
  import token
5
+ import io
5
6
  from pegen.tokenizer import Tokenizer as PegenTokenizer
6
7
  from typing import override
7
8
  from .line_break import line_breakable_after, line_breakable_before
8
9
  from .typhon_ast import get_postfix_operator_temp_name
10
+ from ..Driver.debugging import debug_verbose_print
11
+ from .token_factory_custom import token_stream_factory, generate_tokens_ignore_error
9
12
 
10
13
 
11
14
  # Combine sequencial 2 tokens (optionally without space between) into 1
@@ -186,6 +189,7 @@ class TokenizerCustom(PegenTokenizer):
186
189
  while True:
187
190
  tok = next(self._tokengen)
188
191
  if self._is_token_to_skip(tok):
192
+ self._all_tokens.append(tok)
189
193
  continue
190
194
  if tok.type == token.ENDMARKER:
191
195
  self._end_tok = tok
@@ -209,6 +213,7 @@ class TokenizerCustom(PegenTokenizer):
209
213
 
210
214
  def _commit_token(self, tok: TokenInfo) -> None:
211
215
  self._tokens.append(tok)
216
+ self._all_tokens.append(tok)
212
217
  if not self._path and tok.start[0] not in self._lines:
213
218
  self._lines[tok.start[0]] = tok.line
214
219
 
@@ -295,3 +300,50 @@ class TokenizerCustom(PegenTokenizer):
295
300
  continue
296
301
  self._commit_token(tok)
297
302
  return self._tokens[self._index]
303
+
304
+ def read_all_tokens(self) -> list[TokenInfo]:
305
+ """Return all tokens including comments."""
306
+ # Force to consume all tokens
307
+ debug_verbose_print("Reading all tokens for tokenizer.")
308
+ while tok := self.getnext():
309
+ debug_verbose_print(f" Token: {tok}")
310
+ if tok.type == token.ENDMARKER:
311
+ break
312
+ debug_verbose_print("Finished reading all tokens.")
313
+ self.reset(0)
314
+ return sorted(self._all_tokens, key=lambda t: t.start)
315
+
316
+
317
+ def tokenizer_for_file(file_path: str) -> TokenizerCustom:
318
+ """Tokenize the specified file."""
319
+ with open(file_path) as f:
320
+ tok_stream = token_stream_factory(f.readline)
321
+ tokenizer = TokenizerCustom(tok_stream, path=file_path)
322
+ return tokenizer
323
+
324
+
325
+ def tokenizer_for_string(source: str) -> TokenizerCustom:
326
+ """Tokenize the specified string."""
327
+ tok_stream = token_stream_factory(io.StringIO(source).readline)
328
+ tokenizer = TokenizerCustom(tok_stream)
329
+ return tokenizer
330
+
331
+
332
+ def show_token(
333
+ source: str, show_typhon_token: bool = True, show_python_token: bool = True
334
+ ):
335
+ if show_python_token:
336
+ print("Tokens of Python tokenizer:")
337
+ for tok in generate_tokens_ignore_error(io.StringIO(source).readline):
338
+ print(f" {tok}")
339
+ if show_typhon_token:
340
+ print("Tokens of Typhon Token Factory:")
341
+ tok_stream = token_stream_factory(io.StringIO(source).readline)
342
+ for tok in tok_stream:
343
+ print(f" {tok}")
344
+ print("Tokens of Typhon Custom tokenizer:")
345
+ tok_stream = token_stream_factory(io.StringIO(source).readline)
346
+ tokenizer = TokenizerCustom(tok_stream, verbose=True)
347
+ tokens = tokenizer.read_all_tokens()
348
+ for tok in tokens:
349
+ print(f" {tok}")