jaclang 0.4.6__py3-none-any.whl → 0.5.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of jaclang might be problematic. Click here for more details.

Files changed (152) hide show
  1. jaclang/__init__.py +5 -2
  2. jaclang/cli/cli.py +57 -10
  3. jaclang/cli/cmdreg.py +16 -9
  4. jaclang/compiler/__jac_gen__/jac_parser.py +11 -15
  5. jaclang/compiler/absyntree.py +53 -19
  6. jaclang/compiler/codeloc.py +3 -1
  7. jaclang/compiler/{transpiler.py → compile.py} +3 -2
  8. jaclang/compiler/constant.py +4 -0
  9. jaclang/compiler/parser.py +156 -108
  10. jaclang/compiler/passes/ir_pass.py +1 -0
  11. jaclang/compiler/passes/main/__init__.py +2 -1
  12. jaclang/compiler/passes/main/def_impl_match_pass.py +1 -0
  13. jaclang/compiler/passes/main/def_use_pass.py +1 -0
  14. jaclang/compiler/passes/main/import_pass.py +18 -18
  15. jaclang/compiler/passes/main/pyast_gen_pass.py +1228 -853
  16. jaclang/compiler/passes/main/pyast_load_pass.py +3 -1
  17. jaclang/compiler/passes/main/pybc_gen_pass.py +46 -0
  18. jaclang/compiler/passes/main/pyout_pass.py +6 -7
  19. jaclang/compiler/passes/main/schedules.py +5 -9
  20. jaclang/compiler/passes/main/sub_node_tab_pass.py +1 -0
  21. jaclang/compiler/passes/main/sym_tab_build_pass.py +21 -9
  22. jaclang/compiler/passes/main/tests/test_decl_def_match_pass.py +2 -1
  23. jaclang/compiler/passes/main/tests/test_def_use_pass.py +2 -1
  24. jaclang/compiler/passes/main/tests/test_import_pass.py +2 -1
  25. jaclang/compiler/passes/main/tests/test_pyast_build_pass.py +1 -0
  26. jaclang/compiler/passes/main/tests/test_pyast_gen_pass.py +15 -38
  27. jaclang/compiler/passes/main/tests/test_pybc_gen_pass.py +25 -0
  28. jaclang/compiler/passes/main/tests/test_sub_node_pass.py +1 -1
  29. jaclang/compiler/passes/main/tests/test_sym_tab_build_pass.py +2 -1
  30. jaclang/compiler/passes/main/tests/test_type_check_pass.py +17 -1
  31. jaclang/compiler/passes/main/type_check_pass.py +9 -6
  32. jaclang/compiler/passes/tool/__init__.py +1 -0
  33. jaclang/compiler/passes/tool/ast_printer_pass.py +1 -0
  34. jaclang/compiler/passes/tool/fuse_comments_pass.py +1 -1
  35. jaclang/compiler/passes/tool/jac_formatter_pass.py +69 -32
  36. jaclang/compiler/passes/tool/schedules.py +1 -0
  37. jaclang/compiler/passes/tool/sym_tab_printer_pass.py +1 -0
  38. jaclang/compiler/passes/tool/tests/test_ast_print_pass.py +2 -1
  39. jaclang/compiler/passes/tool/tests/test_fuse_comments_pass.py +1 -0
  40. jaclang/compiler/passes/tool/tests/test_jac_format_pass.py +4 -3
  41. jaclang/compiler/passes/tool/tests/test_symtab_print_pass.py +2 -1
  42. jaclang/compiler/passes/transform.py +1 -0
  43. jaclang/compiler/passes/utils/mypy_ast_build.py +203 -17
  44. jaclang/compiler/symtable.py +1 -0
  45. jaclang/compiler/tests/test_importer.py +3 -2
  46. jaclang/compiler/tests/test_parser.py +1 -0
  47. jaclang/compiler/tests/test_workspace.py +1 -0
  48. jaclang/compiler/workspace.py +18 -5
  49. jaclang/core/construct.py +9 -32
  50. jaclang/{compiler → core}/importer.py +95 -85
  51. jaclang/core/utils.py +17 -12
  52. jaclang/plugin/__init__.py +1 -0
  53. jaclang/plugin/default.py +145 -43
  54. jaclang/plugin/feature.py +65 -19
  55. jaclang/plugin/spec.py +56 -34
  56. jaclang/plugin/tests/test_features.py +9 -0
  57. jaclang/utils/helpers.py +1 -0
  58. jaclang/utils/lang_tools.py +13 -19
  59. jaclang/utils/tests/test_lang_tools.py +2 -1
  60. jaclang/utils/treeprinter.py +2 -1
  61. jaclang/vendor/lark/common.py +3 -1
  62. jaclang/vendor/lark/lexer.py +6 -12
  63. jaclang/vendor/lark/parsers/lalr_parser.py +1 -0
  64. jaclang/vendor/mypy/applytype.py +2 -1
  65. jaclang/vendor/mypy/binder.py +1 -1
  66. jaclang/vendor/mypy/build.py +7 -9
  67. jaclang/vendor/mypy/checker.py +57 -33
  68. jaclang/vendor/mypy/checkexpr.py +42 -29
  69. jaclang/vendor/mypy/checkmember.py +13 -1
  70. jaclang/vendor/mypy/checkpattern.py +1 -1
  71. jaclang/vendor/mypy/checkstrformat.py +2 -4
  72. jaclang/vendor/mypy/constraints.py +10 -5
  73. jaclang/vendor/mypy/dmypy_server.py +3 -3
  74. jaclang/vendor/mypy/dmypy_util.py +62 -3
  75. jaclang/vendor/mypy/errors.py +1 -1
  76. jaclang/vendor/mypy/evalexpr.py +1 -0
  77. jaclang/vendor/mypy/expandtype.py +29 -29
  78. jaclang/vendor/mypy/fastparse.py +51 -31
  79. jaclang/vendor/mypy/inspections.py +5 -3
  80. jaclang/vendor/mypy/join.py +4 -4
  81. jaclang/vendor/mypy/main.py +6 -6
  82. jaclang/vendor/mypy/message_registry.py +1 -2
  83. jaclang/vendor/mypy/messages.py +31 -23
  84. jaclang/vendor/mypy/metastore.py +1 -2
  85. jaclang/vendor/mypy/modulefinder.py +2 -22
  86. jaclang/vendor/mypy/nodes.py +22 -20
  87. jaclang/vendor/mypy/options.py +4 -0
  88. jaclang/vendor/mypy/parse.py +6 -2
  89. jaclang/vendor/mypy/patterns.py +6 -6
  90. jaclang/vendor/mypy/plugin.py +3 -1
  91. jaclang/vendor/mypy/plugins/attrs.py +52 -10
  92. jaclang/vendor/mypy/plugins/common.py +2 -1
  93. jaclang/vendor/mypy/plugins/enums.py +3 -2
  94. jaclang/vendor/mypy/plugins/functools.py +1 -0
  95. jaclang/vendor/mypy/renaming.py +1 -1
  96. jaclang/vendor/mypy/report.py +15 -15
  97. jaclang/vendor/mypy/semanal.py +22 -13
  98. jaclang/vendor/mypy/semanal_enum.py +1 -1
  99. jaclang/vendor/mypy/semanal_namedtuple.py +1 -2
  100. jaclang/vendor/mypy/semanal_shared.py +3 -6
  101. jaclang/vendor/mypy/semanal_typeddict.py +16 -5
  102. jaclang/vendor/mypy/server/astdiff.py +15 -9
  103. jaclang/vendor/mypy/server/astmerge.py +5 -5
  104. jaclang/vendor/mypy/stats.py +0 -5
  105. jaclang/vendor/mypy/stubdoc.py +1 -1
  106. jaclang/vendor/mypy/stubgen.py +12 -21
  107. jaclang/vendor/mypy/stubgenc.py +16 -8
  108. jaclang/vendor/mypy/stubtest.py +57 -48
  109. jaclang/vendor/mypy/stubutil.py +28 -15
  110. jaclang/vendor/mypy/subtypes.py +4 -4
  111. jaclang/vendor/mypy/test/helpers.py +2 -2
  112. jaclang/vendor/mypy/test/meta/test_parse_data.py +1 -0
  113. jaclang/vendor/mypy/test/meta/test_update_data.py +1 -0
  114. jaclang/vendor/mypy/test/testargs.py +1 -0
  115. jaclang/vendor/mypy/test/testcheck.py +4 -1
  116. jaclang/vendor/mypy/test/testconstraints.py +25 -7
  117. jaclang/vendor/mypy/test/testerrorstream.py +1 -0
  118. jaclang/vendor/mypy/test/testformatter.py +2 -2
  119. jaclang/vendor/mypy/test/testparse.py +6 -4
  120. jaclang/vendor/mypy/test/testpythoneval.py +1 -0
  121. jaclang/vendor/mypy/test/testreports.py +1 -0
  122. jaclang/vendor/mypy/test/teststubgen.py +1 -2
  123. jaclang/vendor/mypy/test/teststubtest.py +98 -4
  124. jaclang/vendor/mypy/test/testtypes.py +1 -1
  125. jaclang/vendor/mypy/test/testutil.py +22 -0
  126. jaclang/vendor/mypy/typeanal.py +302 -158
  127. jaclang/vendor/mypy/typeops.py +22 -13
  128. jaclang/vendor/mypy/types.py +33 -34
  129. jaclang/vendor/mypy/typestate.py +2 -2
  130. jaclang/vendor/mypy/util.py +7 -6
  131. jaclang/vendor/mypy/version.py +1 -1
  132. jaclang/vendor/mypyc/analysis/ircheck.py +1 -0
  133. jaclang/vendor/mypyc/codegen/emitfunc.py +5 -3
  134. jaclang/vendor/mypyc/codegen/emitmodule.py +12 -12
  135. jaclang/vendor/mypyc/codegen/emitwrapper.py +2 -2
  136. jaclang/vendor/mypyc/ir/class_ir.py +10 -6
  137. jaclang/vendor/mypyc/irbuild/builder.py +3 -4
  138. jaclang/vendor/mypyc/irbuild/function.py +5 -3
  139. jaclang/vendor/mypyc/irbuild/nonlocalcontrol.py +1 -2
  140. jaclang/vendor/mypyc/irbuild/prepare.py +6 -6
  141. jaclang/vendor/mypyc/primitives/registry.py +15 -5
  142. jaclang/vendor/mypyc/test/test_run.py +1 -2
  143. jaclang/vendor/mypyc/transform/uninit.py +3 -3
  144. jaclang/vendor/pluggy/_callers.py +1 -0
  145. jaclang/vendor/pluggy/_hooks.py +6 -10
  146. jaclang/vendor/pluggy/_result.py +1 -0
  147. jaclang/vendor/pluggy/_tracing.py +1 -0
  148. {jaclang-0.4.6.dist-info → jaclang-0.5.0.dist-info}/METADATA +1 -1
  149. {jaclang-0.4.6.dist-info → jaclang-0.5.0.dist-info}/RECORD +152 -150
  150. {jaclang-0.4.6.dist-info → jaclang-0.5.0.dist-info}/WHEEL +0 -0
  151. {jaclang-0.4.6.dist-info → jaclang-0.5.0.dist-info}/entry_points.txt +0 -0
  152. {jaclang-0.4.6.dist-info → jaclang-0.5.0.dist-info}/top_level.txt +0 -0
@@ -9,8 +9,9 @@ def parse(
9
9
  source: str | bytes,
10
10
  fnam: str,
11
11
  module: str | None,
12
- errors: Errors | None,
12
+ errors: Errors,
13
13
  options: Options,
14
+ raise_on_error: bool = False,
14
15
  ) -> MypyFile:
15
16
  """Parse a source file, without doing any semantic analysis.
16
17
 
@@ -23,6 +24,9 @@ def parse(
23
24
  source = options.transform_source(source)
24
25
  import mypy.fastparse
25
26
 
26
- return mypy.fastparse.parse(
27
+ tree = mypy.fastparse.parse(
27
28
  source, fnam=fnam, module=module, errors=errors, options=options
28
29
  )
30
+ if raise_on_error and errors.is_errors():
31
+ errors.raise_error()
32
+ return tree
@@ -60,7 +60,7 @@ class ValuePattern(Pattern):
60
60
 
61
61
  expr: Expression
62
62
 
63
- def __init__(self, expr: Expression):
63
+ def __init__(self, expr: Expression) -> None:
64
64
  super().__init__()
65
65
  self.expr = expr
66
66
 
@@ -72,7 +72,7 @@ class SingletonPattern(Pattern):
72
72
  # This can be exactly True, False or None
73
73
  value: bool | None
74
74
 
75
- def __init__(self, value: bool | None):
75
+ def __init__(self, value: bool | None) -> None:
76
76
  super().__init__()
77
77
  self.value = value
78
78
 
@@ -85,7 +85,7 @@ class SequencePattern(Pattern):
85
85
 
86
86
  patterns: list[Pattern]
87
87
 
88
- def __init__(self, patterns: list[Pattern]):
88
+ def __init__(self, patterns: list[Pattern]) -> None:
89
89
  super().__init__()
90
90
  self.patterns = patterns
91
91
 
@@ -98,7 +98,7 @@ class StarredPattern(Pattern):
98
98
  # a name.
99
99
  capture: NameExpr | None
100
100
 
101
- def __init__(self, capture: NameExpr | None):
101
+ def __init__(self, capture: NameExpr | None) -> None:
102
102
  super().__init__()
103
103
  self.capture = capture
104
104
 
@@ -113,7 +113,7 @@ class MappingPattern(Pattern):
113
113
 
114
114
  def __init__(
115
115
  self, keys: list[Expression], values: list[Pattern], rest: NameExpr | None
116
- ):
116
+ ) -> None:
117
117
  super().__init__()
118
118
  assert len(keys) == len(values)
119
119
  self.keys = keys
@@ -138,7 +138,7 @@ class ClassPattern(Pattern):
138
138
  positionals: list[Pattern],
139
139
  keyword_keys: list[str],
140
140
  keyword_values: list[Pattern],
141
- ):
141
+ ) -> None:
142
142
  super().__init__()
143
143
  assert len(keyword_keys) == len(keyword_values)
144
144
  self.class_ref = class_ref
@@ -515,7 +515,9 @@ class AttributeContext(NamedTuple):
515
515
  # A context for a class hook that modifies the class definition.
516
516
  class ClassDefContext(NamedTuple):
517
517
  cls: ClassDef # The class definition
518
- reason: Expression # The expression being applied (decorator, metaclass, base class)
518
+ reason: (
519
+ Expression # The expression being applied (decorator, metaclass, base class)
520
+ )
519
521
  api: SemanticAnalyzerPluginInterface
520
522
 
521
523
 
@@ -123,6 +123,7 @@ class Attribute:
123
123
  def __init__(
124
124
  self,
125
125
  name: str,
126
+ alias: str | None,
126
127
  info: TypeInfo,
127
128
  has_default: bool,
128
129
  init: bool,
@@ -132,6 +133,7 @@ class Attribute:
132
133
  init_type: Type | None,
133
134
  ) -> None:
134
135
  self.name = name
136
+ self.alias = alias
135
137
  self.info = info
136
138
  self.has_default = has_default
137
139
  self.init = init
@@ -192,21 +194,23 @@ class Attribute:
192
194
  arg_kind = ARG_OPT if self.has_default else ARG_POS
193
195
 
194
196
  # Attrs removes leading underscores when creating the __init__ arguments.
195
- return Argument(
196
- Var(self.name.lstrip("_"), init_type), init_type, None, arg_kind
197
- )
197
+ name = self.alias or self.name.lstrip("_")
198
+ return Argument(Var(name, init_type), init_type, None, arg_kind)
198
199
 
199
200
  def serialize(self) -> JsonDict:
200
201
  """Serialize this object so it can be saved and restored."""
201
202
  return {
202
203
  "name": self.name,
204
+ "alias": self.alias,
203
205
  "has_default": self.has_default,
204
206
  "init": self.init,
205
207
  "kw_only": self.kw_only,
206
208
  "has_converter": self.converter is not None,
207
- "converter_init_type": self.converter.init_type.serialize()
208
- if self.converter and self.converter.init_type
209
- else None,
209
+ "converter_init_type": (
210
+ self.converter.init_type.serialize()
211
+ if self.converter and self.converter.init_type
212
+ else None
213
+ ),
210
214
  "context_line": self.context.line,
211
215
  "context_column": self.context.column,
212
216
  "init_type": self.init_type.serialize() if self.init_type else None,
@@ -230,6 +234,7 @@ class Attribute:
230
234
 
231
235
  return Attribute(
232
236
  data["name"],
237
+ data["alias"],
233
238
  info,
234
239
  data["has_default"],
235
240
  data["init"],
@@ -332,6 +337,8 @@ def attr_class_maker_callback(
332
337
  it will add an __init__ or all the compare methods.
333
338
  For frozen=True it will turn the attrs into properties.
334
339
 
340
+ Hashability will be set according to https://www.attrs.org/en/stable/hashing.html.
341
+
335
342
  See https://www.attrs.org/en/stable/how-does-it-work.html for information on how attrs works.
336
343
 
337
344
  If this returns False, some required metadata was not ready yet and we need another
@@ -343,6 +350,9 @@ def attr_class_maker_callback(
343
350
  frozen = _get_frozen(ctx, frozen_default)
344
351
  order = _determine_eq_order(ctx)
345
352
  slots = _get_decorator_bool_argument(ctx, "slots", slots_default)
353
+ hashable = _get_decorator_bool_argument(
354
+ ctx, "hash", False
355
+ ) or _get_decorator_bool_argument(ctx, "unsafe_hash", False)
346
356
 
347
357
  auto_attribs = _get_decorator_optional_bool_argument(
348
358
  ctx, "auto_attribs", auto_attribs_default
@@ -385,10 +395,13 @@ def attr_class_maker_callback(
385
395
  adder = MethodAdder(ctx)
386
396
  # If __init__ is not being generated, attrs still generates it as __attrs_init__ instead.
387
397
  _add_init(ctx, attributes, adder, "__init__" if init else ATTRS_INIT_NAME)
398
+
388
399
  if order:
389
400
  _add_order(ctx, adder)
390
401
  if frozen:
391
402
  _make_frozen(ctx, attributes)
403
+ elif not hashable:
404
+ _remove_hashability(ctx)
392
405
 
393
406
  return True
394
407
 
@@ -534,6 +547,7 @@ def _attributes_from_assignment(
534
547
  or if auto_attribs is enabled also like this:
535
548
  x: type
536
549
  x: type = default_value
550
+ x: type = attr.ib(...)
537
551
  """
538
552
  for lvalue in stmt.lvalues:
539
553
  lvalues, rvalues = _parse_assignments(lvalue, stmt)
@@ -604,7 +618,9 @@ def _attribute_from_auto_attrib(
604
618
  has_rhs = not isinstance(rvalue, TempNode)
605
619
  sym = ctx.cls.info.names.get(name)
606
620
  init_type = sym.type if sym else None
607
- return Attribute(name, ctx.cls.info, has_rhs, True, kw_only, None, stmt, init_type)
621
+ return Attribute(
622
+ name, None, ctx.cls.info, has_rhs, True, kw_only, None, stmt, init_type
623
+ )
608
624
 
609
625
 
610
626
  def _attribute_from_attrib_maker(
@@ -670,9 +686,21 @@ def _attribute_from_attrib_maker(
670
686
  converter = convert
671
687
  converter_info = _parse_converter(ctx, converter)
672
688
 
689
+ # Custom alias might be defined:
690
+ alias = None
691
+ alias_expr = _get_argument(rvalue, "alias")
692
+ if alias_expr:
693
+ alias = ctx.api.parse_str_literal(alias_expr)
694
+ if alias is None:
695
+ ctx.api.fail(
696
+ '"alias" argument to attrs field must be a string literal',
697
+ rvalue,
698
+ code=LITERAL_REQ,
699
+ )
673
700
  name = unmangle(lhs.name)
674
701
  return Attribute(
675
702
  name,
703
+ alias,
676
704
  ctx.cls.info,
677
705
  attr_has_default,
678
706
  init,
@@ -989,6 +1017,18 @@ def _add_match_args(
989
1017
  )
990
1018
 
991
1019
 
1020
+ def _remove_hashability(ctx: mypy.plugin.ClassDefContext) -> None:
1021
+ """Remove hashability from a class."""
1022
+ add_attribute_to_class(
1023
+ ctx.api,
1024
+ ctx.cls,
1025
+ "__hash__",
1026
+ NoneType(),
1027
+ is_classvar=True,
1028
+ overwrite_existing=True,
1029
+ )
1030
+
1031
+
992
1032
  class MethodAdder:
993
1033
  """Helper to add methods to a TypeInfo.
994
1034
 
@@ -1110,9 +1150,11 @@ def _meet_fields(types: list[Mapping[str, Type]]) -> Mapping[str, Type]:
1110
1150
  field_to_types[name].append(typ)
1111
1151
 
1112
1152
  return {
1113
- name: get_proper_type(reduce(meet_types, f_types))
1114
- if len(f_types) == len(types)
1115
- else UninhabitedType()
1153
+ name: (
1154
+ get_proper_type(reduce(meet_types, f_types))
1155
+ if len(f_types) == len(types)
1156
+ else UninhabitedType()
1157
+ )
1116
1158
  for name, f_types in field_to_types.items()
1117
1159
  }
1118
1160
 
@@ -417,6 +417,7 @@ def add_attribute_to_class(
417
417
  override_allow_incompatible: bool = False,
418
418
  fullname: str | None = None,
419
419
  is_classvar: bool = False,
420
+ overwrite_existing: bool = False,
420
421
  ) -> Var:
421
422
  """
422
423
  Adds a new attribute to a class definition.
@@ -426,7 +427,7 @@ def add_attribute_to_class(
426
427
 
427
428
  # NOTE: we would like the plugin generated node to dominate, but we still
428
429
  # need to keep any existing definitions so they get semantically analyzed.
429
- if name in info.names:
430
+ if name in info.names and not overwrite_existing:
430
431
  # Get a nice unique name instead.
431
432
  r_name = get_unique_redefinition_name(name, info.names)
432
433
  info.names[r_name] = info.names[name]
@@ -10,6 +10,7 @@ Note that this file does *not* contain all special-cased logic related to enums:
10
10
  we actually bake some of it directly in to the semantic analysis layer (see
11
11
  semanal_enum.py).
12
12
  """
13
+
13
14
  from __future__ import annotations
14
15
 
15
16
  from typing import Final, Iterable, Sequence, TypeVar, cast
@@ -177,11 +178,11 @@ def enum_value_callback(ctx: mypy.plugin.AttributeContext) -> Type:
177
178
  for n in stnodes
178
179
  if n is None or not n.implicit
179
180
  )
180
- proper_types = list(
181
+ proper_types = [
181
182
  _infer_value_type_with_auto_fallback(ctx, t)
182
183
  for t in node_types
183
184
  if t is None or not isinstance(t, CallableType)
184
- )
185
+ ]
185
186
  underlying_type = _first(proper_types)
186
187
  if underlying_type is None:
187
188
  return ctx.default_attr_type
@@ -1,4 +1,5 @@
1
1
  """Plugin for supporting the functools standard library module."""
2
+
2
3
  from __future__ import annotations
3
4
 
4
5
  from typing import Final, NamedTuple
@@ -270,7 +270,7 @@ class VariableRenameVisitor(TraverserVisitor):
270
270
  This will be called at the end of a scope.
271
271
  """
272
272
  is_func = self.scope_kinds[-1] == FUNCTION
273
- for name, refs in self.refs[-1].items():
273
+ for refs in self.refs[-1].values():
274
274
  if len(refs) == 1:
275
275
  # Only one definition -- no renaming needed.
276
276
  continue
@@ -31,18 +31,18 @@ try:
31
31
  except ImportError:
32
32
  LXML_INSTALLED = False
33
33
 
34
- type_of_any_name_map: Final[
35
- collections.OrderedDict[int, str]
36
- ] = collections.OrderedDict(
37
- [
38
- (TypeOfAny.unannotated, "Unannotated"),
39
- (TypeOfAny.explicit, "Explicit"),
40
- (TypeOfAny.from_unimported_type, "Unimported"),
41
- (TypeOfAny.from_omitted_generics, "Omitted Generics"),
42
- (TypeOfAny.from_error, "Error"),
43
- (TypeOfAny.special_form, "Special Form"),
44
- (TypeOfAny.implementation_artifact, "Implementation Artifact"),
45
- ]
34
+ type_of_any_name_map: Final[collections.OrderedDict[int, str]] = (
35
+ collections.OrderedDict(
36
+ [
37
+ (TypeOfAny.unannotated, "Unannotated"),
38
+ (TypeOfAny.explicit, "Explicit"),
39
+ (TypeOfAny.from_unimported_type, "Unimported"),
40
+ (TypeOfAny.from_omitted_generics, "Omitted Generics"),
41
+ (TypeOfAny.from_error, "Error"),
42
+ (TypeOfAny.special_form, "Special Form"),
43
+ (TypeOfAny.implementation_artifact, "Implementation Artifact"),
44
+ ]
45
+ )
46
46
  )
47
47
 
48
48
  ReporterClasses: _TypeAlias = Dict[
@@ -101,7 +101,7 @@ class AbstractReporter(metaclass=ABCMeta):
101
101
  def __init__(self, reports: Reports, output_dir: str) -> None:
102
102
  self.output_dir = output_dir
103
103
  if output_dir != "<memory>":
104
- stats.ensure_dir_exists(output_dir)
104
+ os.makedirs(output_dir, exist_ok=True)
105
105
 
106
106
  @abstractmethod
107
107
  def on_file(
@@ -766,7 +766,7 @@ class XmlReporter(AbstractXmlReporter):
766
766
  if path.startswith(".."):
767
767
  return
768
768
  out_path = os.path.join(self.output_dir, "xml", path + ".xml")
769
- stats.ensure_dir_exists(os.path.dirname(out_path))
769
+ os.makedirs(os.path.dirname(out_path), exist_ok=True)
770
770
  last_xml.write(out_path, encoding="utf-8")
771
771
 
772
772
  def on_finish(self) -> None:
@@ -811,7 +811,7 @@ class XsltHtmlReporter(AbstractXmlReporter):
811
811
  if path.startswith(".."):
812
812
  return
813
813
  out_path = os.path.join(self.output_dir, "html", path + ".html")
814
- stats.ensure_dir_exists(os.path.dirname(out_path))
814
+ os.makedirs(os.path.dirname(out_path), exist_ok=True)
815
815
  transformed_html = bytes(self.xslt_html(last_xml, ext=self.param_html))
816
816
  with open(out_path, "wb") as out_file:
817
817
  out_file.write(transformed_html)
@@ -234,9 +234,9 @@ from mypy.semanal_typeddict import TypedDictAnalyzer
234
234
  from mypy.tvar_scope import TypeVarLikeScope
235
235
  from mypy.typeanal import (
236
236
  SELF_TYPE_NAMES,
237
+ FindTypeVarVisitor,
237
238
  TypeAnalyser,
238
239
  TypeVarLikeList,
239
- TypeVarLikeQuery,
240
240
  analyze_type_alias,
241
241
  check_for_explicit_any,
242
242
  detect_diverging_alias,
@@ -2147,6 +2147,11 @@ class SemanticAnalyzer(
2147
2147
  assert isinstance(sym.node, TypeVarExpr)
2148
2148
  return t.name, sym.node
2149
2149
 
2150
+ def find_type_var_likes(self, t: Type) -> TypeVarLikeList:
2151
+ visitor = FindTypeVarVisitor(self, self.tvar_scope)
2152
+ t.accept(visitor)
2153
+ return visitor.type_var_likes
2154
+
2150
2155
  def get_all_bases_tvars(
2151
2156
  self, base_type_exprs: list[Expression], removed: list[int]
2152
2157
  ) -> TypeVarLikeList:
@@ -2159,7 +2164,7 @@ class SemanticAnalyzer(
2159
2164
  except TypeTranslationError:
2160
2165
  # This error will be caught later.
2161
2166
  continue
2162
- base_tvars = base.accept(TypeVarLikeQuery(self, self.tvar_scope))
2167
+ base_tvars = self.find_type_var_likes(base)
2163
2168
  tvars.extend(base_tvars)
2164
2169
  return remove_dups(tvars)
2165
2170
 
@@ -2179,7 +2184,7 @@ class SemanticAnalyzer(
2179
2184
  except TypeTranslationError:
2180
2185
  # This error will be caught later.
2181
2186
  continue
2182
- base_tvars = base.accept(TypeVarLikeQuery(self, self.tvar_scope))
2187
+ base_tvars = self.find_type_var_likes(base)
2183
2188
  tvars.extend(base_tvars)
2184
2189
  tvars = remove_dups(
2185
2190
  tvars
@@ -2281,8 +2286,16 @@ class SemanticAnalyzer(
2281
2286
  if (
2282
2287
  isinstance(base_expr, RefExpr)
2283
2288
  and base_expr.fullname in TYPED_NAMEDTUPLE_NAMES + TPDICT_NAMES
2289
+ ) or (
2290
+ isinstance(base_expr, CallExpr)
2291
+ and isinstance(base_expr.callee, RefExpr)
2292
+ and base_expr.callee.fullname in TPDICT_NAMES
2284
2293
  ):
2285
2294
  # Ignore magic bases for now.
2295
+ # For example:
2296
+ # class Foo(TypedDict): ... # RefExpr
2297
+ # class Foo(NamedTuple): ... # RefExpr
2298
+ # class Foo(TypedDict("Foo", {"a": int})): ... # CallExpr
2286
2299
  continue
2287
2300
 
2288
2301
  try:
@@ -3721,7 +3734,7 @@ class SemanticAnalyzer(
3721
3734
  )
3722
3735
  return None, [], set(), [], False
3723
3736
 
3724
- found_type_vars = typ.accept(TypeVarLikeQuery(self, self.tvar_scope))
3737
+ found_type_vars = self.find_type_var_likes(typ)
3725
3738
  tvar_defs: list[TypeVarLikeType] = []
3726
3739
  namespace = self.qualified_name(name)
3727
3740
  with self.tvar_scope_frame(self.tvar_scope.class_frame(namespace)):
@@ -4437,7 +4450,7 @@ class SemanticAnalyzer(
4437
4450
  if len(call.args) < 1:
4438
4451
  self.fail(f"Too few arguments for {typevarlike_type}()", context)
4439
4452
  return False
4440
- if not isinstance(call.args[0], StrExpr) or not call.arg_kinds[0] == ARG_POS:
4453
+ if not isinstance(call.args[0], StrExpr) or call.arg_kinds[0] != ARG_POS:
4441
4454
  self.fail(
4442
4455
  f"{typevarlike_type}() expects a string literal as first argument",
4443
4456
  context,
@@ -5306,9 +5319,7 @@ class SemanticAnalyzer(
5306
5319
  """Bind name expression to a symbol table node."""
5307
5320
  if isinstance(sym.node, TypeVarExpr) and self.tvar_scope.get_binding(sym):
5308
5321
  self.fail(
5309
- '"{}" is a type variable and only valid in type '
5310
- "context".format(expr.name),
5311
- expr,
5322
+ f'"{expr.name}" is a type variable and only valid in type context', expr
5312
5323
  )
5313
5324
  elif isinstance(sym.node, PlaceholderNode):
5314
5325
  self.process_placeholder(expr.name, "name", expr)
@@ -6645,9 +6656,7 @@ class SemanticAnalyzer(
6645
6656
  line = (
6646
6657
  debug_context.line
6647
6658
  if debug_context
6648
- else self.statement.line
6649
- if self.statement
6650
- else -1
6659
+ else self.statement.line if self.statement else -1
6651
6660
  )
6652
6661
  self.deferral_debug_context.append((self.cur_mod_id, line))
6653
6662
 
@@ -7273,13 +7282,13 @@ class SemanticAnalyzer(
7273
7282
  ) -> tuple[str, ...]:
7274
7283
  if not isinstance(arg, TupleExpr):
7275
7284
  self.fail('"field_specifiers" argument must be a tuple literal', arg)
7276
- return tuple()
7285
+ return ()
7277
7286
 
7278
7287
  names = []
7279
7288
  for specifier in arg.items:
7280
7289
  if not isinstance(specifier, RefExpr):
7281
7290
  self.fail('"field_specifiers" must only contain identifiers', specifier)
7282
- return tuple()
7291
+ return ()
7283
7292
  names.append(specifier.fullname)
7284
7293
  return tuple(names)
7285
7294
 
@@ -148,7 +148,7 @@ class EnumCallAnalyzer:
148
148
  Return a tuple of fields, values, was there an error.
149
149
  """
150
150
  args = call.args
151
- if not all([arg_kind in [ARG_POS, ARG_NAMED] for arg_kind in call.arg_kinds]):
151
+ if not all(arg_kind in [ARG_POS, ARG_NAMED] for arg_kind in call.arg_kinds):
152
152
  return self.fail_enum_call_arg(
153
153
  f"Unexpected arguments to {class_name}()", call
154
154
  )
@@ -85,8 +85,7 @@ NAMEDTUPLE_PROHIBITED_NAMES: Final = (
85
85
  )
86
86
 
87
87
  NAMEDTUP_CLASS_ERROR: Final = (
88
- "Invalid statement in NamedTuple definition; "
89
- 'expected "field_name: field_type [= default]"'
88
+ 'Invalid statement in NamedTuple definition; expected "field_name: field_type [= default]"'
90
89
  )
91
90
 
92
91
  SELF_TVAR_NAME: Final = "_NT"
@@ -318,8 +318,7 @@ def calculate_tuple_fallback(typ: TupleType) -> None:
318
318
  class _NamedTypeCallback(Protocol):
319
319
  def __call__(
320
320
  self, fully_qualified_name: str, args: list[Type] | None = None
321
- ) -> Instance:
322
- ...
321
+ ) -> Instance: ...
323
322
 
324
323
 
325
324
  def paramspec_args(
@@ -469,8 +468,7 @@ def require_bool_literal_argument(
469
468
  expression: Expression,
470
469
  name: str,
471
470
  default: Literal[True] | Literal[False],
472
- ) -> bool:
473
- ...
471
+ ) -> bool: ...
474
472
 
475
473
 
476
474
  @overload
@@ -479,8 +477,7 @@ def require_bool_literal_argument(
479
477
  expression: Expression,
480
478
  name: str,
481
479
  default: None = None,
482
- ) -> bool | None:
483
- ...
480
+ ) -> bool | None: ...
484
481
 
485
482
 
486
483
  def require_bool_literal_argument(
@@ -50,7 +50,7 @@ from mypy.types import (
50
50
  )
51
51
 
52
52
  TPDICT_CLASS_ERROR: Final = (
53
- "Invalid statement in TypedDict definition; " 'expected "field_name: field_type"'
53
+ 'Invalid statement in TypedDict definition; expected "field_name: field_type"'
54
54
  )
55
55
 
56
56
 
@@ -81,6 +81,8 @@ class TypedDictAnalyzer:
81
81
  """
82
82
  possible = False
83
83
  for base_expr in defn.base_type_exprs:
84
+ if isinstance(base_expr, CallExpr):
85
+ base_expr = base_expr.callee
84
86
  if isinstance(base_expr, IndexExpr):
85
87
  base_expr = base_expr.base
86
88
  if isinstance(base_expr, RefExpr):
@@ -126,7 +128,13 @@ class TypedDictAnalyzer:
126
128
  typeddict_bases: list[Expression] = []
127
129
  typeddict_bases_set = set()
128
130
  for expr in defn.base_type_exprs:
129
- if isinstance(expr, RefExpr) and expr.fullname in TPDICT_NAMES:
131
+ ok, maybe_type_info, _ = self.check_typeddict(expr, None, False)
132
+ if ok and maybe_type_info is not None:
133
+ # expr is a CallExpr
134
+ info = maybe_type_info
135
+ typeddict_bases_set.add(info.fullname)
136
+ typeddict_bases.append(expr)
137
+ elif isinstance(expr, RefExpr) and expr.fullname in TPDICT_NAMES:
130
138
  if "TypedDict" not in typeddict_bases_set:
131
139
  typeddict_bases_set.add("TypedDict")
132
140
  else:
@@ -185,12 +193,11 @@ class TypedDictAnalyzer:
185
193
  required_keys: set[str],
186
194
  ctx: Context,
187
195
  ) -> None:
196
+ base_args: list[Type] = []
188
197
  if isinstance(base, RefExpr):
189
198
  assert isinstance(base.node, TypeInfo)
190
199
  info = base.node
191
- base_args: list[Type] = []
192
- else:
193
- assert isinstance(base, IndexExpr)
200
+ elif isinstance(base, IndexExpr):
194
201
  assert isinstance(base.base, RefExpr)
195
202
  assert isinstance(base.base.node, TypeInfo)
196
203
  info = base.base.node
@@ -198,6 +205,10 @@ class TypedDictAnalyzer:
198
205
  if args is None:
199
206
  return
200
207
  base_args = args
208
+ else:
209
+ assert isinstance(base, CallExpr)
210
+ assert isinstance(base.analyzed, TypedDictExpr)
211
+ info = base.analyzed.info
201
212
 
202
213
  assert info.typeddict_type is not None
203
214
  base_typed_dict = info.typeddict_type
@@ -261,9 +261,11 @@ def snapshot_definition(
261
261
  node.is_static,
262
262
  signature,
263
263
  is_trivial_body,
264
- dataclass_transform_spec.serialize()
265
- if dataclass_transform_spec is not None
266
- else None,
264
+ (
265
+ dataclass_transform_spec.serialize()
266
+ if dataclass_transform_spec is not None
267
+ else None
268
+ ),
267
269
  )
268
270
  elif isinstance(node, Var):
269
271
  return ("Var", common, snapshot_optional_type(node.type), node.is_final)
@@ -309,9 +311,11 @@ def snapshot_definition(
309
311
  tuple(snapshot_type(tdef) for tdef in node.defn.type_vars),
310
312
  [snapshot_type(base) for base in node.bases],
311
313
  [snapshot_type(p) for p in node._promote],
312
- dataclass_transform_spec.serialize()
313
- if dataclass_transform_spec is not None
314
- else None,
314
+ (
315
+ dataclass_transform_spec.serialize()
316
+ if dataclass_transform_spec is not None
317
+ else None
318
+ ),
315
319
  )
316
320
  prefix = node.fullname
317
321
  symbol_table = snapshot_symbol_table(prefix, node.names)
@@ -397,9 +401,11 @@ class SnapshotTypeVisitor(TypeVisitor[SnapshotItem]):
397
401
  "Instance",
398
402
  encode_optional_str(typ.type.fullname),
399
403
  snapshot_types(typ.args),
400
- ("None",)
401
- if typ.last_known_value is None
402
- else snapshot_type(typ.last_known_value),
404
+ (
405
+ ("None",)
406
+ if typ.last_known_value is None
407
+ else snapshot_type(typ.last_known_value)
408
+ ),
403
409
  )
404
410
 
405
411
  def visit_type_var(self, typ: TypeVarType) -> SnapshotItem:
@@ -175,9 +175,9 @@ def replacement_map_from_symbol_table(
175
175
  )
176
176
  replacements.update(type_repl)
177
177
  if node.node.special_alias and new_node.node.special_alias:
178
- replacements[
179
- new_node.node.special_alias
180
- ] = node.node.special_alias
178
+ replacements[new_node.node.special_alias] = (
179
+ node.node.special_alias
180
+ )
181
181
  return replacements
182
182
 
183
183
 
@@ -398,7 +398,7 @@ class NodeReplaceVisitor(TraverserVisitor):
398
398
  # have bodies in the AST so we need to iterate over their symbol
399
399
  # tables separately, unlike normal classes.
400
400
  self.process_type_info(info)
401
- for name, node in info.names.items():
401
+ for node in info.names.values():
402
402
  if node.node:
403
403
  node.node.accept(self)
404
404
 
@@ -553,7 +553,7 @@ class TypeReplaceVisitor(SyntheticTypeVisitor[None]):
553
553
  def replace_nodes_in_symbol_table(
554
554
  symbols: SymbolTable, replacements: dict[SymbolNode, SymbolNode]
555
555
  ) -> None:
556
- for name, node in symbols.items():
556
+ for node in symbols.values():
557
557
  if node.node:
558
558
  if node.node in replacements:
559
559
  new = replacements[node.node]
@@ -495,11 +495,6 @@ def is_complex(t: Type) -> bool:
495
495
  return is_generic(t) or isinstance(t, (FunctionLike, TupleType, TypeVarType))
496
496
 
497
497
 
498
- def ensure_dir_exists(dir: str) -> None:
499
- if not os.path.exists(dir):
500
- os.makedirs(dir)
501
-
502
-
503
498
  def is_special_form_any(t: AnyType) -> bool:
504
499
  return get_original_any(t).type_of_any == TypeOfAny.special_form
505
500