betterproto2-compiler 0.0.3__tar.gz → 0.1.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (40) hide show
  1. {betterproto2_compiler-0.0.3 → betterproto2_compiler-0.1.0}/PKG-INFO +1 -1
  2. {betterproto2_compiler-0.0.3 → betterproto2_compiler-0.1.0}/pyproject.toml +12 -8
  3. {betterproto2_compiler-0.0.3 → betterproto2_compiler-0.1.0}/src/betterproto2_compiler/compile/importing.py +9 -14
  4. {betterproto2_compiler-0.0.3 → betterproto2_compiler-0.1.0}/src/betterproto2_compiler/lib/pydantic/google/protobuf/__init__.py +3 -3
  5. {betterproto2_compiler-0.0.3 → betterproto2_compiler-0.1.0}/src/betterproto2_compiler/lib/std/google/protobuf/__init__.py +1 -2
  6. {betterproto2_compiler-0.0.3 → betterproto2_compiler-0.1.0}/src/betterproto2_compiler/plugin/compiler.py +5 -2
  7. {betterproto2_compiler-0.0.3 → betterproto2_compiler-0.1.0}/src/betterproto2_compiler/plugin/models.py +54 -52
  8. {betterproto2_compiler-0.0.3 → betterproto2_compiler-0.1.0}/src/betterproto2_compiler/plugin/module_validation.py +2 -7
  9. {betterproto2_compiler-0.0.3 → betterproto2_compiler-0.1.0}/src/betterproto2_compiler/plugin/parser.py +11 -16
  10. {betterproto2_compiler-0.0.3 → betterproto2_compiler-0.1.0}/src/betterproto2_compiler/plugin/typing_compiler.py +8 -12
  11. {betterproto2_compiler-0.0.3 → betterproto2_compiler-0.1.0}/src/betterproto2_compiler/templates/header.py.j2 +2 -0
  12. betterproto2_compiler-0.0.3/src/betterproto2_compiler/_types.py +0 -13
  13. betterproto2_compiler-0.0.3/src/betterproto2_compiler/enum.py +0 -180
  14. betterproto2_compiler-0.0.3/src/betterproto2_compiler/grpc/grpclib_client.py +0 -172
  15. betterproto2_compiler-0.0.3/src/betterproto2_compiler/grpc/grpclib_server.py +0 -32
  16. betterproto2_compiler-0.0.3/src/betterproto2_compiler/grpc/util/async_channel.py +0 -190
  17. betterproto2_compiler-0.0.3/src/betterproto2_compiler/lib/std/__init__.py +0 -0
  18. betterproto2_compiler-0.0.3/src/betterproto2_compiler/lib/std/google/__init__.py +0 -0
  19. {betterproto2_compiler-0.0.3 → betterproto2_compiler-0.1.0}/LICENSE.md +0 -0
  20. {betterproto2_compiler-0.0.3 → betterproto2_compiler-0.1.0}/README.md +0 -0
  21. {betterproto2_compiler-0.0.3 → betterproto2_compiler-0.1.0}/src/betterproto2_compiler/__init__.py +0 -0
  22. {betterproto2_compiler-0.0.3 → betterproto2_compiler-0.1.0}/src/betterproto2_compiler/casing.py +0 -0
  23. {betterproto2_compiler-0.0.3 → betterproto2_compiler-0.1.0}/src/betterproto2_compiler/compile/__init__.py +0 -0
  24. {betterproto2_compiler-0.0.3 → betterproto2_compiler-0.1.0}/src/betterproto2_compiler/compile/naming.py +0 -0
  25. {betterproto2_compiler-0.0.3/src/betterproto2_compiler/grpc → betterproto2_compiler-0.1.0/src/betterproto2_compiler/lib}/__init__.py +0 -0
  26. {betterproto2_compiler-0.0.3/src/betterproto2_compiler/grpc/util → betterproto2_compiler-0.1.0/src/betterproto2_compiler/lib/google}/__init__.py +0 -0
  27. {betterproto2_compiler-0.0.3 → betterproto2_compiler-0.1.0}/src/betterproto2_compiler/lib/google/protobuf/__init__.py +0 -0
  28. {betterproto2_compiler-0.0.3 → betterproto2_compiler-0.1.0}/src/betterproto2_compiler/lib/google/protobuf/compiler/__init__.py +0 -0
  29. {betterproto2_compiler-0.0.3/src/betterproto2_compiler/lib → betterproto2_compiler-0.1.0/src/betterproto2_compiler/lib/pydantic}/__init__.py +0 -0
  30. {betterproto2_compiler-0.0.3/src/betterproto2_compiler/lib → betterproto2_compiler-0.1.0/src/betterproto2_compiler/lib/pydantic}/google/__init__.py +0 -0
  31. {betterproto2_compiler-0.0.3 → betterproto2_compiler-0.1.0}/src/betterproto2_compiler/lib/pydantic/google/protobuf/compiler/__init__.py +0 -0
  32. {betterproto2_compiler-0.0.3/src/betterproto2_compiler/lib/pydantic → betterproto2_compiler-0.1.0/src/betterproto2_compiler/lib/std}/__init__.py +0 -0
  33. {betterproto2_compiler-0.0.3/src/betterproto2_compiler/lib/pydantic → betterproto2_compiler-0.1.0/src/betterproto2_compiler/lib/std}/google/__init__.py +0 -0
  34. {betterproto2_compiler-0.0.3 → betterproto2_compiler-0.1.0}/src/betterproto2_compiler/lib/std/google/protobuf/compiler/__init__.py +0 -0
  35. {betterproto2_compiler-0.0.3 → betterproto2_compiler-0.1.0}/src/betterproto2_compiler/plugin/__init__.py +0 -0
  36. {betterproto2_compiler-0.0.3 → betterproto2_compiler-0.1.0}/src/betterproto2_compiler/plugin/__main__.py +0 -0
  37. {betterproto2_compiler-0.0.3 → betterproto2_compiler-0.1.0}/src/betterproto2_compiler/plugin/main.py +0 -0
  38. {betterproto2_compiler-0.0.3 → betterproto2_compiler-0.1.0}/src/betterproto2_compiler/plugin/plugin.bat +0 -0
  39. {betterproto2_compiler-0.0.3 → betterproto2_compiler-0.1.0}/src/betterproto2_compiler/py.typed +0 -0
  40. {betterproto2_compiler-0.0.3 → betterproto2_compiler-0.1.0}/src/betterproto2_compiler/templates/template.py.j2 +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: betterproto2_compiler
3
- Version: 0.0.3
3
+ Version: 0.1.0
4
4
  Summary: Compiler for betterproto2
5
5
  Home-page: https://github.com/betterproto/python-betterproto2-compiler
6
6
  License: MIT
@@ -1,6 +1,6 @@
1
1
  [tool.poetry]
2
2
  name = "betterproto2_compiler"
3
- version = "0.0.3"
3
+ version = "0.1.0"
4
4
  description = "Compiler for betterproto2"
5
5
  authors = ["Adrien Vannson <adrien.vannson@protonmail.com>", "Daniel G. Taylor <danielgtaylor@gmail.com>"]
6
6
  readme = "README.md"
@@ -34,10 +34,10 @@ pytest = "^6.2.5"
34
34
  protobuf = "^4"
35
35
 
36
36
  [tool.poetry.scripts]
37
- protoc-gen-python_betterproto = "betterproto2_compiler.plugin:main"
37
+ protoc-gen-python_betterproto2 = "betterproto2_compiler.plugin:main"
38
38
 
39
39
  [tool.ruff]
40
- extend-exclude = ["tests/output_*"]
40
+ extend-exclude = ["tests/output_*", "src/betterproto2_compiler/lib"]
41
41
  target-version = "py310"
42
42
  line-length = 120
43
43
 
@@ -52,7 +52,11 @@ select = [
52
52
  "SIM102", # Simplify return or yield statements
53
53
  "SIM103", # Simplify list/set/dict comprehensions
54
54
 
55
+ "UP",
56
+
55
57
  "I",
58
+
59
+ "COM812", # Trailing commas
56
60
  ]
57
61
 
58
62
 
@@ -78,8 +82,8 @@ sequence = ["_format", "_sort-imports"]
78
82
  help = "Format the source code, and sort the imports"
79
83
 
80
84
  [tool.poe.tasks.check]
81
- sequence = ["_check-format", "_check-imports"]
82
- help = "Check that the source code is formatted and the imports sorted"
85
+ sequence = ["_check-format", "_check-ruff-lint"]
86
+ help = "Check that the source code is formatted and the code passes the linter"
83
87
 
84
88
  [tool.poe.tasks._format]
85
89
  cmd = "ruff format src tests"
@@ -93,9 +97,9 @@ help = "Sort the imports"
93
97
  cmd = "ruff format --diff src tests"
94
98
  help = "Check that the source code is formatted"
95
99
 
96
- [tool.poe.tasks._check-imports]
97
- cmd = "ruff check --select I src tests"
98
- help = "Check that the imports are sorted"
100
+ [tool.poe.tasks._check-ruff-lint]
101
+ cmd = "ruff check src tests"
102
+ help = "Check the code with the Ruff linter"
99
103
 
100
104
  [tool.poe.tasks.generate_lib]
101
105
  cmd = """
@@ -3,11 +3,6 @@ from __future__ import annotations
3
3
  import os
4
4
  from typing import (
5
5
  TYPE_CHECKING,
6
- Dict,
7
- List,
8
- Set,
9
- Tuple,
10
- Type,
11
6
  )
12
7
 
13
8
  from ..casing import safe_snake_case
@@ -18,7 +13,7 @@ if TYPE_CHECKING:
18
13
  from ..plugin.models import PluginRequestCompiler
19
14
  from ..plugin.typing_compiler import TypingCompiler
20
15
 
21
- WRAPPER_TYPES: Dict[str, Type] = {
16
+ WRAPPER_TYPES: dict[str, type] = {
22
17
  ".google.protobuf.DoubleValue": google_protobuf.DoubleValue,
23
18
  ".google.protobuf.FloatValue": google_protobuf.FloatValue,
24
19
  ".google.protobuf.Int32Value": google_protobuf.Int32Value,
@@ -31,7 +26,7 @@ WRAPPER_TYPES: Dict[str, Type] = {
31
26
  }
32
27
 
33
28
 
34
- def parse_source_type_name(field_type_name: str, request: "PluginRequestCompiler") -> Tuple[str, str]:
29
+ def parse_source_type_name(field_type_name: str, request: PluginRequestCompiler) -> tuple[str, str]:
35
30
  """
36
31
  Split full source type name into package and type name.
37
32
  E.g. 'root.package.Message' -> ('root.package', 'Message')
@@ -77,7 +72,7 @@ def get_type_reference(
77
72
  imports: set,
78
73
  source_type: str,
79
74
  typing_compiler: TypingCompiler,
80
- request: "PluginRequestCompiler",
75
+ request: PluginRequestCompiler,
81
76
  unwrap: bool = True,
82
77
  pydantic: bool = False,
83
78
  ) -> str:
@@ -98,8 +93,8 @@ def get_type_reference(
98
93
 
99
94
  source_package, source_type = parse_source_type_name(source_type, request)
100
95
 
101
- current_package: List[str] = package.split(".") if package else []
102
- py_package: List[str] = source_package.split(".") if source_package else []
96
+ current_package: list[str] = package.split(".") if package else []
97
+ py_package: list[str] = source_package.split(".") if source_package else []
103
98
  py_type: str = pythonize_class_name(source_type)
104
99
 
105
100
  compiling_google_protobuf = current_package == ["google", "protobuf"]
@@ -122,7 +117,7 @@ def get_type_reference(
122
117
  return reference_cousin(current_package, imports, py_package, py_type)
123
118
 
124
119
 
125
- def reference_absolute(imports: Set[str], py_package: List[str], py_type: str) -> str:
120
+ def reference_absolute(imports: set[str], py_package: list[str], py_type: str) -> str:
126
121
  """
127
122
  Returns a reference to a python type located in the root, i.e. sys.path.
128
123
  """
@@ -139,7 +134,7 @@ def reference_sibling(py_type: str) -> str:
139
134
  return f"{py_type}"
140
135
 
141
136
 
142
- def reference_descendent(current_package: List[str], imports: Set[str], py_package: List[str], py_type: str) -> str:
137
+ def reference_descendent(current_package: list[str], imports: set[str], py_package: list[str], py_type: str) -> str:
143
138
  """
144
139
  Returns a reference to a python type in a package that is a descendent of the
145
140
  current package, and adds the required import that is aliased to avoid name
@@ -157,7 +152,7 @@ def reference_descendent(current_package: List[str], imports: Set[str], py_packa
157
152
  return f"{string_import}.{py_type}"
158
153
 
159
154
 
160
- def reference_ancestor(current_package: List[str], imports: Set[str], py_package: List[str], py_type: str) -> str:
155
+ def reference_ancestor(current_package: list[str], imports: set[str], py_package: list[str], py_type: str) -> str:
161
156
  """
162
157
  Returns a reference to a python type in a package which is an ancestor to the
163
158
  current package, and adds the required import that is aliased (if possible) to avoid
@@ -178,7 +173,7 @@ def reference_ancestor(current_package: List[str], imports: Set[str], py_package
178
173
  return string_alias
179
174
 
180
175
 
181
- def reference_cousin(current_package: List[str], imports: Set[str], py_package: List[str], py_type: str) -> str:
176
+ def reference_cousin(current_package: list[str], imports: set[str], py_package: list[str], py_type: str) -> str:
182
177
  """
183
178
  Returns a reference to a python type in a package that is not descendent, ancestor
184
179
  or sibling, and adds the required import that is aliased to avoid name conflicts.
@@ -2401,13 +2401,13 @@ class Value(betterproto2_compiler.Message):
2401
2401
  )
2402
2402
  """Represents a null value."""
2403
2403
 
2404
- number_value: Optional[float] = betterproto2_compiler.double_field(2, optional=True, group="kind")
2404
+ number_value: float | None = betterproto2_compiler.double_field(2, optional=True, group="kind")
2405
2405
  """Represents a double value."""
2406
2406
 
2407
- string_value: Optional[str] = betterproto2_compiler.string_field(3, optional=True, group="kind")
2407
+ string_value: str | None = betterproto2_compiler.string_field(3, optional=True, group="kind")
2408
2408
  """Represents a string value."""
2409
2409
 
2410
- bool_value: Optional[bool] = betterproto2_compiler.bool_field(4, optional=True, group="kind")
2410
+ bool_value: bool | None = betterproto2_compiler.bool_field(4, optional=True, group="kind")
2411
2411
  """Represents a boolean value."""
2412
2412
 
2413
2413
  struct_value: Optional["Struct"] = betterproto2_compiler.message_field(5, optional=True, group="kind")
@@ -78,7 +78,6 @@ from typing import (
78
78
  Dict,
79
79
  List,
80
80
  Mapping,
81
- Optional,
82
81
  )
83
82
 
84
83
  import betterproto2
@@ -1022,7 +1021,7 @@ class FieldDescriptorProto(betterproto2.Message):
1022
1021
  TODO(kenton): Base-64 encode?
1023
1022
  """
1024
1023
 
1025
- oneof_index: Optional[int] = betterproto2.int32_field(9, optional=True)
1024
+ oneof_index: int | None = betterproto2.int32_field(9, optional=True)
1026
1025
  """
1027
1026
  If set, gives the index of a oneof in the containing type's oneof_decl
1028
1027
  list. This field is a member of that oneof.
@@ -1,6 +1,7 @@
1
1
  import os.path
2
2
  import subprocess
3
3
  import sys
4
+ from importlib import metadata
4
5
 
5
6
  from .module_validation import ModuleValidator
6
7
 
@@ -14,7 +15,7 @@ except ImportError as err:
14
15
  "Please ensure that you've installed betterproto as "
15
16
  '`pip install "betterproto[compiler]"` so that compiler dependencies '
16
17
  "are included."
17
- "\033[0m"
18
+ "\033[0m",
18
19
  )
19
20
  raise SystemExit(1)
20
21
 
@@ -24,6 +25,8 @@ from .models import OutputTemplate
24
25
  def outputfile_compiler(output_file: OutputTemplate) -> str:
25
26
  templates_folder = os.path.abspath(os.path.join(os.path.dirname(__file__), "..", "templates"))
26
27
 
28
+ version = metadata.version("betterproto2_compiler")
29
+
27
30
  env = jinja2.Environment(
28
31
  trim_blocks=True,
29
32
  lstrip_blocks=True,
@@ -35,7 +38,7 @@ def outputfile_compiler(output_file: OutputTemplate) -> str:
35
38
  header_template = env.get_template("header.py.j2")
36
39
 
37
40
  code = body_template.render(output_file=output_file)
38
- code = header_template.render(output_file=output_file) + "\n" + code
41
+ code = header_template.render(output_file=output_file, version=version) + "\n" + code
39
42
 
40
43
  # Sort imports, delete unused ones
41
44
  code = subprocess.check_output(
@@ -31,18 +31,12 @@ reference to `A` to `B`'s `fields` attribute.
31
31
 
32
32
  import builtins
33
33
  import re
34
+ from collections.abc import Iterable, Iterator
34
35
  from dataclasses import (
35
36
  dataclass,
36
37
  field,
37
38
  )
38
39
  from typing import (
39
- Dict,
40
- Iterable,
41
- Iterator,
42
- List,
43
- Optional,
44
- Set,
45
- Type,
46
40
  Union,
47
41
  )
48
42
 
@@ -59,6 +53,7 @@ from betterproto2_compiler.lib.google.protobuf import (
59
53
  FieldDescriptorProto,
60
54
  FieldDescriptorProtoLabel,
61
55
  FieldDescriptorProtoType,
56
+ FieldDescriptorProtoType as FieldType,
62
57
  FileDescriptorProto,
63
58
  MethodDescriptorProto,
64
59
  )
@@ -146,7 +141,7 @@ PROTO_PACKED_TYPES = (
146
141
 
147
142
  def get_comment(
148
143
  proto_file: "FileDescriptorProto",
149
- path: List[int],
144
+ path: list[int],
150
145
  ) -> str:
151
146
  for sci_loc in proto_file.source_code_info.location:
152
147
  if list(sci_loc.path) == path:
@@ -182,10 +177,10 @@ class ProtoContentBase:
182
177
 
183
178
  source_file: FileDescriptorProto
184
179
  typing_compiler: TypingCompiler
185
- path: List[int]
180
+ path: list[int]
186
181
  parent: Union["betterproto2.Message", "OutputTemplate"]
187
182
 
188
- __dataclass_fields__: Dict[str, object]
183
+ __dataclass_fields__: dict[str, object]
189
184
 
190
185
  def __post_init__(self) -> None:
191
186
  """Checks that no fake default fields were left as placeholders."""
@@ -225,10 +220,10 @@ class ProtoContentBase:
225
220
  @dataclass
226
221
  class PluginRequestCompiler:
227
222
  plugin_request_obj: CodeGeneratorRequest
228
- output_packages: Dict[str, "OutputTemplate"] = field(default_factory=dict)
223
+ output_packages: dict[str, "OutputTemplate"] = field(default_factory=dict)
229
224
 
230
225
  @property
231
- def all_messages(self) -> List["MessageCompiler"]:
226
+ def all_messages(self) -> list["MessageCompiler"]:
232
227
  """All of the messages in this request.
233
228
 
234
229
  Returns
@@ -250,11 +245,11 @@ class OutputTemplate:
250
245
 
251
246
  parent_request: PluginRequestCompiler
252
247
  package_proto_obj: FileDescriptorProto
253
- input_files: List[str] = field(default_factory=list)
254
- imports_end: Set[str] = field(default_factory=set)
255
- messages: Dict[str, "MessageCompiler"] = field(default_factory=dict)
256
- enums: Dict[str, "EnumDefinitionCompiler"] = field(default_factory=dict)
257
- services: Dict[str, "ServiceCompiler"] = field(default_factory=dict)
248
+ input_files: list[str] = field(default_factory=list)
249
+ imports_end: set[str] = field(default_factory=set)
250
+ messages: dict[str, "MessageCompiler"] = field(default_factory=dict)
251
+ enums: dict[str, "EnumDefinitionCompiler"] = field(default_factory=dict)
252
+ services: dict[str, "ServiceCompiler"] = field(default_factory=dict)
258
253
  pydantic_dataclasses: bool = False
259
254
  output: bool = True
260
255
  typing_compiler: TypingCompiler = field(default_factory=DirectImportTypingCompiler)
@@ -290,9 +285,9 @@ class MessageCompiler(ProtoContentBase):
290
285
  typing_compiler: TypingCompiler
291
286
  parent: Union["MessageCompiler", OutputTemplate] = PLACEHOLDER
292
287
  proto_obj: DescriptorProto = PLACEHOLDER
293
- path: List[int] = PLACEHOLDER
294
- fields: List[Union["FieldCompiler", "MessageCompiler"]] = field(default_factory=list)
295
- builtins_types: Set[str] = field(default_factory=set)
288
+ path: list[int] = PLACEHOLDER
289
+ fields: list[Union["FieldCompiler", "MessageCompiler"]] = field(default_factory=list)
290
+ builtins_types: set[str] = field(default_factory=set)
296
291
 
297
292
  def __post_init__(self) -> None:
298
293
  # Add message to output file
@@ -328,11 +323,9 @@ class MessageCompiler(ProtoContentBase):
328
323
  @property
329
324
  def has_message_field(self) -> bool:
330
325
  return any(
331
- (
332
- field.proto_obj.type in PROTO_MESSAGE_TYPES
333
- for field in self.fields
334
- if isinstance(field.proto_obj, FieldDescriptorProto)
335
- )
326
+ field.proto_obj.type in PROTO_MESSAGE_TYPES
327
+ for field in self.fields
328
+ if isinstance(field.proto_obj, FieldDescriptorProto)
336
329
  )
337
330
 
338
331
 
@@ -347,7 +340,7 @@ def is_map(proto_field_obj: FieldDescriptorProto, parent_message: DescriptorProt
347
340
  map_entry = f"{proto_field_obj.name.replace('_', '').lower()}entry"
348
341
  if message_type == map_entry:
349
342
  for nested in parent_message.nested_type: # parent message
350
- if nested.name.replace("_", "").lower() == map_entry and nested.options.map_entry:
343
+ if nested.name.replace("_", "").lower() == map_entry and nested.options and nested.options.map_entry:
351
344
  return True
352
345
  return False
353
346
 
@@ -374,8 +367,8 @@ def is_oneof(proto_field_obj: FieldDescriptorProto) -> bool:
374
367
  class FieldCompiler(ProtoContentBase):
375
368
  source_file: FileDescriptorProto
376
369
  typing_compiler: TypingCompiler
377
- path: List[int] = PLACEHOLDER
378
- builtins_types: Set[str] = field(default_factory=set)
370
+ path: list[int] = PLACEHOLDER
371
+ builtins_types: set[str] = field(default_factory=set)
379
372
 
380
373
  parent: MessageCompiler = PLACEHOLDER
381
374
  proto_obj: FieldDescriptorProto = PLACEHOLDER
@@ -390,13 +383,16 @@ class FieldCompiler(ProtoContentBase):
390
383
  """Construct string representation of this field as a field."""
391
384
  name = f"{self.py_name}"
392
385
  field_args = ", ".join(([""] + self.betterproto_field_args) if self.betterproto_field_args else [])
393
- betterproto_field_type = f"betterproto2.{self.field_type}_field({self.proto_obj.number}{field_args})"
386
+
387
+ betterproto_field_type = (
388
+ f"betterproto2.field({self.proto_obj.number}, betterproto2.{str(self.field_type)}{field_args})"
389
+ )
394
390
  if self.py_name in dir(builtins):
395
391
  self.parent.builtins_types.add(self.py_name)
396
392
  return f'{name}: "{self.annotation}" = {betterproto_field_type}'
397
393
 
398
394
  @property
399
- def betterproto_field_args(self) -> List[str]:
395
+ def betterproto_field_args(self) -> list[str]:
400
396
  args = []
401
397
  if self.field_wraps:
402
398
  args.append(f"wraps={self.field_wraps}")
@@ -404,9 +400,9 @@ class FieldCompiler(ProtoContentBase):
404
400
  args.append("optional=True")
405
401
  if self.repeated:
406
402
  args.append("repeated=True")
407
- if self.field_type == "enum":
403
+ if self.field_type == FieldType.TYPE_ENUM:
408
404
  t = self.py_type
409
- args.append(f"enum_default_value=lambda: {t}.try_value(0)")
405
+ args.append(f"default_factory=lambda: {t}.try_value(0)")
410
406
  return args
411
407
 
412
408
  @property
@@ -416,7 +412,7 @@ class FieldCompiler(ProtoContentBase):
416
412
  )
417
413
 
418
414
  @property
419
- def field_wraps(self) -> Optional[str]:
415
+ def field_wraps(self) -> str | None:
420
416
  """Returns betterproto wrapped field type or None."""
421
417
  match_wrapper = re.match(r"\.google\.protobuf\.(.+)Value$", self.proto_obj.type_name)
422
418
  if match_wrapper:
@@ -428,17 +424,19 @@ class FieldCompiler(ProtoContentBase):
428
424
  @property
429
425
  def repeated(self) -> bool:
430
426
  return self.proto_obj.label == FieldDescriptorProtoLabel.LABEL_REPEATED and not is_map(
431
- self.proto_obj, self.parent
427
+ self.proto_obj,
428
+ self.parent,
432
429
  )
433
430
 
434
431
  @property
435
432
  def optional(self) -> bool:
436
- return self.proto_obj.proto3_optional or (self.field_type == "message" and not self.repeated)
433
+ # TODO not for maps
434
+ return self.proto_obj.proto3_optional or (self.field_type == FieldType.TYPE_MESSAGE and not self.repeated)
437
435
 
438
436
  @property
439
- def field_type(self) -> str:
440
- """String representation of proto field type."""
441
- return FieldDescriptorProtoType(self.proto_obj.type).name.lower().replace("type_", "")
437
+ def field_type(self) -> FieldType:
438
+ # TODO it should be possible to remove constructor
439
+ return FieldType(self.proto_obj.type)
442
440
 
443
441
  @property
444
442
  def packed(self) -> bool:
@@ -500,7 +498,7 @@ class OneOfFieldCompiler(FieldCompiler):
500
498
  return True
501
499
 
502
500
  @property
503
- def betterproto_field_args(self) -> List[str]:
501
+ def betterproto_field_args(self) -> list[str]:
504
502
  args = super().betterproto_field_args
505
503
  group = self.parent.proto_obj.oneof_decl[self.proto_obj.oneof_index].name
506
504
  args.append(f'group="{group}"')
@@ -509,8 +507,8 @@ class OneOfFieldCompiler(FieldCompiler):
509
507
 
510
508
  @dataclass
511
509
  class MapEntryCompiler(FieldCompiler):
512
- py_k_type: Optional[Type] = None
513
- py_v_type: Optional[Type] = None
510
+ py_k_type: type | None = None
511
+ py_v_type: type | None = None
514
512
  proto_k_type: str = ""
515
513
  proto_v_type: str = ""
516
514
 
@@ -547,13 +545,17 @@ class MapEntryCompiler(FieldCompiler):
547
545
 
548
546
  raise ValueError("can't find enum")
549
547
 
550
- @property
551
- def betterproto_field_args(self) -> List[str]:
552
- return [f"betterproto2.{self.proto_k_type}", f"betterproto2.{self.proto_v_type}"]
553
-
554
- @property
555
- def field_type(self) -> str:
556
- return "map"
548
+ def get_field_string(self) -> str:
549
+ """Construct string representation of this field as a field."""
550
+ betterproto_field_type = (
551
+ f"betterproto2.field({self.proto_obj.number}, "
552
+ "betterproto2.TYPE_MAP, "
553
+ f"map_types=(betterproto2.{self.proto_k_type}, "
554
+ f"betterproto2.{self.proto_v_type}))"
555
+ )
556
+ if self.py_name in dir(builtins):
557
+ self.parent.builtins_types.add(self.py_name)
558
+ return f'{self.py_name}: "{self.annotation}" = {betterproto_field_type}'
557
559
 
558
560
  @property
559
561
  def annotation(self) -> str:
@@ -569,7 +571,7 @@ class EnumDefinitionCompiler(MessageCompiler):
569
571
  """Representation of a proto Enum definition."""
570
572
 
571
573
  proto_obj: EnumDescriptorProto = PLACEHOLDER
572
- entries: List["EnumDefinitionCompiler.EnumEntry"] = PLACEHOLDER
574
+ entries: list["EnumDefinitionCompiler.EnumEntry"] = PLACEHOLDER
573
575
 
574
576
  @dataclass(unsafe_hash=True)
575
577
  class EnumEntry:
@@ -597,8 +599,8 @@ class ServiceCompiler(ProtoContentBase):
597
599
  source_file: FileDescriptorProto
598
600
  parent: OutputTemplate = PLACEHOLDER
599
601
  proto_obj: DescriptorProto = PLACEHOLDER
600
- path: List[int] = PLACEHOLDER
601
- methods: List["ServiceMethodCompiler"] = field(default_factory=list)
602
+ path: list[int] = PLACEHOLDER
603
+ methods: list["ServiceMethodCompiler"] = field(default_factory=list)
602
604
 
603
605
  def __post_init__(self) -> None:
604
606
  # Add service to output file
@@ -619,7 +621,7 @@ class ServiceMethodCompiler(ProtoContentBase):
619
621
  source_file: FileDescriptorProto
620
622
  parent: ServiceCompiler
621
623
  proto_obj: MethodDescriptorProto
622
- path: List[int] = PLACEHOLDER
624
+ path: list[int] = PLACEHOLDER
623
625
 
624
626
  def __post_init__(self) -> None:
625
627
  # Add method to service
@@ -1,15 +1,10 @@
1
1
  import re
2
2
  from collections import defaultdict
3
+ from collections.abc import Iterator
3
4
  from dataclasses import (
4
5
  dataclass,
5
6
  field,
6
7
  )
7
- from typing import (
8
- Dict,
9
- Iterator,
10
- List,
11
- Tuple,
12
- )
13
8
 
14
9
 
15
10
  @dataclass
@@ -17,7 +12,7 @@ class ModuleValidator:
17
12
  line_iterator: Iterator[str]
18
13
  line_number: int = field(init=False, default=0)
19
14
 
20
- collisions: Dict[str, List[Tuple[int, str]]] = field(init=False, default_factory=lambda: defaultdict(list))
15
+ collisions: dict[str, list[tuple[int, str]]] = field(init=False, default_factory=lambda: defaultdict(list))
21
16
 
22
17
  def add_import(self, imp: str, number: int, full_line: str):
23
18
  """
@@ -1,12 +1,6 @@
1
1
  import pathlib
2
2
  import sys
3
- from typing import (
4
- Generator,
5
- List,
6
- Set,
7
- Tuple,
8
- Union,
9
- )
3
+ from collections.abc import Generator
10
4
 
11
5
  from betterproto2_compiler.lib.google.protobuf import (
12
6
  DescriptorProto,
@@ -45,13 +39,13 @@ from .typing_compiler import (
45
39
 
46
40
  def traverse(
47
41
  proto_file: FileDescriptorProto,
48
- ) -> Generator[Tuple[Union[EnumDescriptorProto, DescriptorProto], List[int]], None, None]:
42
+ ) -> Generator[tuple[EnumDescriptorProto | DescriptorProto, list[int]], None, None]:
49
43
  # Todo: Keep information about nested hierarchy
50
44
  def _traverse(
51
- path: List[int],
52
- items: Union[List[EnumDescriptorProto], List[DescriptorProto]],
45
+ path: list[int],
46
+ items: list[EnumDescriptorProto] | list[DescriptorProto],
53
47
  prefix: str = "",
54
- ) -> Generator[Tuple[Union[EnumDescriptorProto, DescriptorProto], List[int]], None, None]:
48
+ ) -> Generator[tuple[EnumDescriptorProto | DescriptorProto, list[int]], None, None]:
55
49
  for i, item in enumerate(items):
56
50
  # Adjust the name since we flatten the hierarchy.
57
51
  # Todo: don't change the name, but include full name in returned tuple
@@ -82,7 +76,8 @@ def generate_code(request: CodeGeneratorRequest) -> CodeGeneratorResponse:
82
76
  if output_package_name not in request_data.output_packages:
83
77
  # Create a new output if there is no output for this package
84
78
  request_data.output_packages[output_package_name] = OutputTemplate(
85
- parent_request=request_data, package_proto_obj=proto_file
79
+ parent_request=request_data,
80
+ package_proto_obj=proto_file,
86
81
  )
87
82
  # Add this input file to the output corresponding to this package
88
83
  request_data.output_packages[output_package_name].input_files.append(proto_file)
@@ -144,7 +139,7 @@ def generate_code(request: CodeGeneratorRequest) -> CodeGeneratorResponse:
144
139
  service.ready()
145
140
 
146
141
  # Generate output files
147
- output_paths: Set[pathlib.Path] = set()
142
+ output_paths: set[pathlib.Path] = set()
148
143
  for output_package_name, output_package in request_data.output_packages.items():
149
144
  if not output_package.output:
150
145
  continue
@@ -158,7 +153,7 @@ def generate_code(request: CodeGeneratorRequest) -> CodeGeneratorResponse:
158
153
  name=str(output_path),
159
154
  # Render and then format the output file
160
155
  content=outputfile_compiler(output_file=output_package),
161
- )
156
+ ),
162
157
  )
163
158
 
164
159
  # Make each output directory a package with __init__ file
@@ -183,7 +178,7 @@ def _make_one_of_field_compiler(
183
178
  source_file: "FileDescriptorProto",
184
179
  parent: MessageCompiler,
185
180
  proto_obj: "FieldDescriptorProto",
186
- path: List[int],
181
+ path: list[int],
187
182
  ) -> FieldCompiler:
188
183
  return OneOfFieldCompiler(
189
184
  source_file=source_file,
@@ -196,7 +191,7 @@ def _make_one_of_field_compiler(
196
191
 
197
192
  def read_protobuf_type(
198
193
  item: DescriptorProto,
199
- path: List[int],
194
+ path: list[int],
200
195
  source_file: "FileDescriptorProto",
201
196
  output_package: OutputTemplate,
202
197
  ) -> None:
@@ -1,15 +1,11 @@
1
1
  import abc
2
+ import builtins
2
3
  from collections import defaultdict
4
+ from collections.abc import Iterator
3
5
  from dataclasses import (
4
6
  dataclass,
5
7
  field,
6
8
  )
7
- from typing import (
8
- Dict,
9
- Iterator,
10
- Optional,
11
- Set,
12
- )
13
9
 
14
10
 
15
11
  class TypingCompiler(metaclass=abc.ABCMeta):
@@ -42,7 +38,7 @@ class TypingCompiler(metaclass=abc.ABCMeta):
42
38
  raise NotImplementedError
43
39
 
44
40
  @abc.abstractmethod
45
- def imports(self) -> Dict[str, Optional[Set[str]]]:
41
+ def imports(self) -> builtins.dict[str, set[str] | None]:
46
42
  """
47
43
  Returns either the direct import as a key with none as value, or a set of
48
44
  values to import from the key.
@@ -63,7 +59,7 @@ class TypingCompiler(metaclass=abc.ABCMeta):
63
59
 
64
60
  @dataclass
65
61
  class DirectImportTypingCompiler(TypingCompiler):
66
- _imports: Dict[str, Set[str]] = field(default_factory=lambda: defaultdict(set))
62
+ _imports: dict[str, set[str]] = field(default_factory=lambda: defaultdict(set))
67
63
 
68
64
  def optional(self, type_: str) -> str:
69
65
  self._imports["typing"].add("Optional")
@@ -93,7 +89,7 @@ class DirectImportTypingCompiler(TypingCompiler):
93
89
  self._imports["typing"].add("AsyncIterator")
94
90
  return f"AsyncIterator[{type_}]"
95
91
 
96
- def imports(self) -> Dict[str, Optional[Set[str]]]:
92
+ def imports(self) -> builtins.dict[str, set[str] | None]:
97
93
  return {k: v if v else None for k, v in self._imports.items()}
98
94
 
99
95
 
@@ -129,7 +125,7 @@ class TypingImportTypingCompiler(TypingCompiler):
129
125
  self._imported = True
130
126
  return f"typing.AsyncIterator[{type_}]"
131
127
 
132
- def imports(self) -> Dict[str, Optional[Set[str]]]:
128
+ def imports(self) -> builtins.dict[str, set[str] | None]:
133
129
  if self._imported:
134
130
  return {"typing": None}
135
131
  return {}
@@ -137,7 +133,7 @@ class TypingImportTypingCompiler(TypingCompiler):
137
133
 
138
134
  @dataclass
139
135
  class NoTyping310TypingCompiler(TypingCompiler):
140
- _imports: Dict[str, Set[str]] = field(default_factory=lambda: defaultdict(set))
136
+ _imports: dict[str, set[str]] = field(default_factory=lambda: defaultdict(set))
141
137
 
142
138
  def optional(self, type_: str) -> str:
143
139
  return f"{type_} | None"
@@ -163,5 +159,5 @@ class NoTyping310TypingCompiler(TypingCompiler):
163
159
  self._imports["collections.abc"].add("AsyncIterator")
164
160
  return f"AsyncIterator[{type_}]"
165
161
 
166
- def imports(self) -> Dict[str, Optional[Set[str]]]:
162
+ def imports(self) -> builtins.dict[str, set[str] | None]:
167
163
  return {k: v if v else None for k, v in self._imports.items()}
@@ -48,3 +48,5 @@ if TYPE_CHECKING:
48
48
  import grpclib.server
49
49
  from betterproto2.grpc.grpclib_client import MetadataLike
50
50
  from grpclib.metadata import Deadline
51
+
52
+ betterproto2.check_compiler_version("{{ version }}")