betterproto2-compiler 0.0.2__tar.gz → 0.1.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (40) hide show
  1. {betterproto2_compiler-0.0.2 → betterproto2_compiler-0.1.0}/PKG-INFO +1 -1
  2. {betterproto2_compiler-0.0.2 → betterproto2_compiler-0.1.0}/pyproject.toml +16 -8
  3. {betterproto2_compiler-0.0.2 → betterproto2_compiler-0.1.0}/src/betterproto2_compiler/compile/importing.py +11 -16
  4. {betterproto2_compiler-0.0.2 → betterproto2_compiler-0.1.0}/src/betterproto2_compiler/lib/pydantic/google/protobuf/__init__.py +3 -3
  5. {betterproto2_compiler-0.0.2 → betterproto2_compiler-0.1.0}/src/betterproto2_compiler/lib/std/google/protobuf/__init__.py +1 -2
  6. {betterproto2_compiler-0.0.2 → betterproto2_compiler-0.1.0}/src/betterproto2_compiler/plugin/compiler.py +5 -2
  7. {betterproto2_compiler-0.0.2 → betterproto2_compiler-0.1.0}/src/betterproto2_compiler/plugin/models.py +58 -55
  8. {betterproto2_compiler-0.0.2 → betterproto2_compiler-0.1.0}/src/betterproto2_compiler/plugin/module_validation.py +2 -7
  9. {betterproto2_compiler-0.0.2 → betterproto2_compiler-0.1.0}/src/betterproto2_compiler/plugin/parser.py +11 -16
  10. {betterproto2_compiler-0.0.2 → betterproto2_compiler-0.1.0}/src/betterproto2_compiler/plugin/typing_compiler.py +8 -12
  11. {betterproto2_compiler-0.0.2 → betterproto2_compiler-0.1.0}/src/betterproto2_compiler/templates/header.py.j2 +2 -0
  12. betterproto2_compiler-0.0.2/src/betterproto2_compiler/_types.py +0 -13
  13. betterproto2_compiler-0.0.2/src/betterproto2_compiler/enum.py +0 -180
  14. betterproto2_compiler-0.0.2/src/betterproto2_compiler/grpc/grpclib_client.py +0 -172
  15. betterproto2_compiler-0.0.2/src/betterproto2_compiler/grpc/grpclib_server.py +0 -32
  16. betterproto2_compiler-0.0.2/src/betterproto2_compiler/grpc/util/async_channel.py +0 -190
  17. betterproto2_compiler-0.0.2/src/betterproto2_compiler/lib/std/__init__.py +0 -0
  18. betterproto2_compiler-0.0.2/src/betterproto2_compiler/lib/std/google/__init__.py +0 -0
  19. {betterproto2_compiler-0.0.2 → betterproto2_compiler-0.1.0}/LICENSE.md +0 -0
  20. {betterproto2_compiler-0.0.2 → betterproto2_compiler-0.1.0}/README.md +0 -0
  21. {betterproto2_compiler-0.0.2 → betterproto2_compiler-0.1.0}/src/betterproto2_compiler/__init__.py +0 -0
  22. {betterproto2_compiler-0.0.2 → betterproto2_compiler-0.1.0}/src/betterproto2_compiler/casing.py +0 -0
  23. {betterproto2_compiler-0.0.2 → betterproto2_compiler-0.1.0}/src/betterproto2_compiler/compile/__init__.py +0 -0
  24. {betterproto2_compiler-0.0.2 → betterproto2_compiler-0.1.0}/src/betterproto2_compiler/compile/naming.py +0 -0
  25. {betterproto2_compiler-0.0.2/src/betterproto2_compiler/grpc → betterproto2_compiler-0.1.0/src/betterproto2_compiler/lib}/__init__.py +0 -0
  26. {betterproto2_compiler-0.0.2/src/betterproto2_compiler/grpc/util → betterproto2_compiler-0.1.0/src/betterproto2_compiler/lib/google}/__init__.py +0 -0
  27. {betterproto2_compiler-0.0.2 → betterproto2_compiler-0.1.0}/src/betterproto2_compiler/lib/google/protobuf/__init__.py +0 -0
  28. {betterproto2_compiler-0.0.2 → betterproto2_compiler-0.1.0}/src/betterproto2_compiler/lib/google/protobuf/compiler/__init__.py +0 -0
  29. {betterproto2_compiler-0.0.2/src/betterproto2_compiler/lib → betterproto2_compiler-0.1.0/src/betterproto2_compiler/lib/pydantic}/__init__.py +0 -0
  30. {betterproto2_compiler-0.0.2/src/betterproto2_compiler/lib → betterproto2_compiler-0.1.0/src/betterproto2_compiler/lib/pydantic}/google/__init__.py +0 -0
  31. {betterproto2_compiler-0.0.2 → betterproto2_compiler-0.1.0}/src/betterproto2_compiler/lib/pydantic/google/protobuf/compiler/__init__.py +0 -0
  32. {betterproto2_compiler-0.0.2/src/betterproto2_compiler/lib/pydantic → betterproto2_compiler-0.1.0/src/betterproto2_compiler/lib/std}/__init__.py +0 -0
  33. {betterproto2_compiler-0.0.2/src/betterproto2_compiler/lib/pydantic → betterproto2_compiler-0.1.0/src/betterproto2_compiler/lib/std}/google/__init__.py +0 -0
  34. {betterproto2_compiler-0.0.2 → betterproto2_compiler-0.1.0}/src/betterproto2_compiler/lib/std/google/protobuf/compiler/__init__.py +0 -0
  35. {betterproto2_compiler-0.0.2 → betterproto2_compiler-0.1.0}/src/betterproto2_compiler/plugin/__init__.py +0 -0
  36. {betterproto2_compiler-0.0.2 → betterproto2_compiler-0.1.0}/src/betterproto2_compiler/plugin/__main__.py +0 -0
  37. {betterproto2_compiler-0.0.2 → betterproto2_compiler-0.1.0}/src/betterproto2_compiler/plugin/main.py +0 -0
  38. {betterproto2_compiler-0.0.2 → betterproto2_compiler-0.1.0}/src/betterproto2_compiler/plugin/plugin.bat +0 -0
  39. {betterproto2_compiler-0.0.2 → betterproto2_compiler-0.1.0}/src/betterproto2_compiler/py.typed +0 -0
  40. {betterproto2_compiler-0.0.2 → betterproto2_compiler-0.1.0}/src/betterproto2_compiler/templates/template.py.j2 +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: betterproto2_compiler
3
- Version: 0.0.2
3
+ Version: 0.1.0
4
4
  Summary: Compiler for betterproto2
5
5
  Home-page: https://github.com/betterproto/python-betterproto2-compiler
6
6
  License: MIT
@@ -1,6 +1,6 @@
1
1
  [tool.poetry]
2
2
  name = "betterproto2_compiler"
3
- version = "0.0.2"
3
+ version = "0.1.0"
4
4
  description = "Compiler for betterproto2"
5
5
  authors = ["Adrien Vannson <adrien.vannson@protonmail.com>", "Daniel G. Taylor <danielgtaylor@gmail.com>"]
6
6
  readme = "README.md"
@@ -34,10 +34,10 @@ pytest = "^6.2.5"
34
34
  protobuf = "^4"
35
35
 
36
36
  [tool.poetry.scripts]
37
- protoc-gen-python_betterproto = "betterproto2_compiler.plugin:main"
37
+ protoc-gen-python_betterproto2 = "betterproto2_compiler.plugin:main"
38
38
 
39
39
  [tool.ruff]
40
- extend-exclude = ["tests/output_*"]
40
+ extend-exclude = ["tests/output_*", "src/betterproto2_compiler/lib"]
41
41
  target-version = "py310"
42
42
  line-length = 120
43
43
 
@@ -52,7 +52,11 @@ select = [
52
52
  "SIM102", # Simplify return or yield statements
53
53
  "SIM103", # Simplify list/set/dict comprehensions
54
54
 
55
+ "UP",
56
+
55
57
  "I",
58
+
59
+ "COM812", # Trailing commas
56
60
  ]
57
61
 
58
62
 
@@ -61,6 +65,10 @@ combine-as-imports = true
61
65
 
62
66
  # Dev workflow tasks
63
67
 
68
+ [tool.poe.tasks.test]
69
+ cmd = "pytest"
70
+ help = "Run tests"
71
+
64
72
  [tool.poe.tasks.generate]
65
73
  script = "tests.generate:main"
66
74
  help = "Generate test cases"
@@ -74,8 +82,8 @@ sequence = ["_format", "_sort-imports"]
74
82
  help = "Format the source code, and sort the imports"
75
83
 
76
84
  [tool.poe.tasks.check]
77
- sequence = ["_check-format", "_check-imports"]
78
- help = "Check that the source code is formatted and the imports sorted"
85
+ sequence = ["_check-format", "_check-ruff-lint"]
86
+ help = "Check that the source code is formatted and the code passes the linter"
79
87
 
80
88
  [tool.poe.tasks._format]
81
89
  cmd = "ruff format src tests"
@@ -89,9 +97,9 @@ help = "Sort the imports"
89
97
  cmd = "ruff format --diff src tests"
90
98
  help = "Check that the source code is formatted"
91
99
 
92
- [tool.poe.tasks._check-imports]
93
- cmd = "ruff check --select I src tests"
94
- help = "Check that the imports are sorted"
100
+ [tool.poe.tasks._check-ruff-lint]
101
+ cmd = "ruff check src tests"
102
+ help = "Check the code with the Ruff linter"
95
103
 
96
104
  [tool.poe.tasks.generate_lib]
97
105
  cmd = """
@@ -3,11 +3,6 @@ from __future__ import annotations
3
3
  import os
4
4
  from typing import (
5
5
  TYPE_CHECKING,
6
- Dict,
7
- List,
8
- Set,
9
- Tuple,
10
- Type,
11
6
  )
12
7
 
13
8
  from ..casing import safe_snake_case
@@ -18,7 +13,7 @@ if TYPE_CHECKING:
18
13
  from ..plugin.models import PluginRequestCompiler
19
14
  from ..plugin.typing_compiler import TypingCompiler
20
15
 
21
- WRAPPER_TYPES: Dict[str, Type] = {
16
+ WRAPPER_TYPES: dict[str, type] = {
22
17
  ".google.protobuf.DoubleValue": google_protobuf.DoubleValue,
23
18
  ".google.protobuf.FloatValue": google_protobuf.FloatValue,
24
19
  ".google.protobuf.Int32Value": google_protobuf.Int32Value,
@@ -31,7 +26,7 @@ WRAPPER_TYPES: Dict[str, Type] = {
31
26
  }
32
27
 
33
28
 
34
- def parse_source_type_name(field_type_name: str, request: "PluginRequestCompiler") -> Tuple[str, str]:
29
+ def parse_source_type_name(field_type_name: str, request: PluginRequestCompiler) -> tuple[str, str]:
35
30
  """
36
31
  Split full source type name into package and type name.
37
32
  E.g. 'root.package.Message' -> ('root.package', 'Message')
@@ -77,7 +72,7 @@ def get_type_reference(
77
72
  imports: set,
78
73
  source_type: str,
79
74
  typing_compiler: TypingCompiler,
80
- request: "PluginRequestCompiler",
75
+ request: PluginRequestCompiler,
81
76
  unwrap: bool = True,
82
77
  pydantic: bool = False,
83
78
  ) -> str:
@@ -98,16 +93,16 @@ def get_type_reference(
98
93
 
99
94
  source_package, source_type = parse_source_type_name(source_type, request)
100
95
 
101
- current_package: List[str] = package.split(".") if package else []
102
- py_package: List[str] = source_package.split(".") if source_package else []
96
+ current_package: list[str] = package.split(".") if package else []
97
+ py_package: list[str] = source_package.split(".") if source_package else []
103
98
  py_type: str = pythonize_class_name(source_type)
104
99
 
105
100
  compiling_google_protobuf = current_package == ["google", "protobuf"]
106
101
  importing_google_protobuf = py_package == ["google", "protobuf"]
107
102
  if importing_google_protobuf and not compiling_google_protobuf:
108
- py_package = ["betterproto", "lib"] + (["pydantic"] if pydantic else []) + py_package
103
+ py_package = ["betterproto2", "lib"] + (["pydantic"] if pydantic else []) + py_package
109
104
 
110
- if py_package[:1] == ["betterproto"]:
105
+ if py_package[:1] == ["betterproto2"]:
111
106
  return reference_absolute(imports, py_package, py_type)
112
107
 
113
108
  if py_package == current_package:
@@ -122,7 +117,7 @@ def get_type_reference(
122
117
  return reference_cousin(current_package, imports, py_package, py_type)
123
118
 
124
119
 
125
- def reference_absolute(imports: Set[str], py_package: List[str], py_type: str) -> str:
120
+ def reference_absolute(imports: set[str], py_package: list[str], py_type: str) -> str:
126
121
  """
127
122
  Returns a reference to a python type located in the root, i.e. sys.path.
128
123
  """
@@ -139,7 +134,7 @@ def reference_sibling(py_type: str) -> str:
139
134
  return f"{py_type}"
140
135
 
141
136
 
142
- def reference_descendent(current_package: List[str], imports: Set[str], py_package: List[str], py_type: str) -> str:
137
+ def reference_descendent(current_package: list[str], imports: set[str], py_package: list[str], py_type: str) -> str:
143
138
  """
144
139
  Returns a reference to a python type in a package that is a descendent of the
145
140
  current package, and adds the required import that is aliased to avoid name
@@ -157,7 +152,7 @@ def reference_descendent(current_package: List[str], imports: Set[str], py_packa
157
152
  return f"{string_import}.{py_type}"
158
153
 
159
154
 
160
- def reference_ancestor(current_package: List[str], imports: Set[str], py_package: List[str], py_type: str) -> str:
155
+ def reference_ancestor(current_package: list[str], imports: set[str], py_package: list[str], py_type: str) -> str:
161
156
  """
162
157
  Returns a reference to a python type in a package which is an ancestor to the
163
158
  current package, and adds the required import that is aliased (if possible) to avoid
@@ -178,7 +173,7 @@ def reference_ancestor(current_package: List[str], imports: Set[str], py_package
178
173
  return string_alias
179
174
 
180
175
 
181
- def reference_cousin(current_package: List[str], imports: Set[str], py_package: List[str], py_type: str) -> str:
176
+ def reference_cousin(current_package: list[str], imports: set[str], py_package: list[str], py_type: str) -> str:
182
177
  """
183
178
  Returns a reference to a python type in a package that is not descendent, ancestor
184
179
  or sibling, and adds the required import that is aliased to avoid name conflicts.
@@ -2401,13 +2401,13 @@ class Value(betterproto2_compiler.Message):
2401
2401
  )
2402
2402
  """Represents a null value."""
2403
2403
 
2404
- number_value: Optional[float] = betterproto2_compiler.double_field(2, optional=True, group="kind")
2404
+ number_value: float | None = betterproto2_compiler.double_field(2, optional=True, group="kind")
2405
2405
  """Represents a double value."""
2406
2406
 
2407
- string_value: Optional[str] = betterproto2_compiler.string_field(3, optional=True, group="kind")
2407
+ string_value: str | None = betterproto2_compiler.string_field(3, optional=True, group="kind")
2408
2408
  """Represents a string value."""
2409
2409
 
2410
- bool_value: Optional[bool] = betterproto2_compiler.bool_field(4, optional=True, group="kind")
2410
+ bool_value: bool | None = betterproto2_compiler.bool_field(4, optional=True, group="kind")
2411
2411
  """Represents a boolean value."""
2412
2412
 
2413
2413
  struct_value: Optional["Struct"] = betterproto2_compiler.message_field(5, optional=True, group="kind")
@@ -78,7 +78,6 @@ from typing import (
78
78
  Dict,
79
79
  List,
80
80
  Mapping,
81
- Optional,
82
81
  )
83
82
 
84
83
  import betterproto2
@@ -1022,7 +1021,7 @@ class FieldDescriptorProto(betterproto2.Message):
1022
1021
  TODO(kenton): Base-64 encode?
1023
1022
  """
1024
1023
 
1025
- oneof_index: Optional[int] = betterproto2.int32_field(9, optional=True)
1024
+ oneof_index: int | None = betterproto2.int32_field(9, optional=True)
1026
1025
  """
1027
1026
  If set, gives the index of a oneof in the containing type's oneof_decl
1028
1027
  list. This field is a member of that oneof.
@@ -1,6 +1,7 @@
1
1
  import os.path
2
2
  import subprocess
3
3
  import sys
4
+ from importlib import metadata
4
5
 
5
6
  from .module_validation import ModuleValidator
6
7
 
@@ -14,7 +15,7 @@ except ImportError as err:
14
15
  "Please ensure that you've installed betterproto as "
15
16
  '`pip install "betterproto[compiler]"` so that compiler dependencies '
16
17
  "are included."
17
- "\033[0m"
18
+ "\033[0m",
18
19
  )
19
20
  raise SystemExit(1)
20
21
 
@@ -24,6 +25,8 @@ from .models import OutputTemplate
24
25
  def outputfile_compiler(output_file: OutputTemplate) -> str:
25
26
  templates_folder = os.path.abspath(os.path.join(os.path.dirname(__file__), "..", "templates"))
26
27
 
28
+ version = metadata.version("betterproto2_compiler")
29
+
27
30
  env = jinja2.Environment(
28
31
  trim_blocks=True,
29
32
  lstrip_blocks=True,
@@ -35,7 +38,7 @@ def outputfile_compiler(output_file: OutputTemplate) -> str:
35
38
  header_template = env.get_template("header.py.j2")
36
39
 
37
40
  code = body_template.render(output_file=output_file)
38
- code = header_template.render(output_file=output_file) + "\n" + code
41
+ code = header_template.render(output_file=output_file, version=version) + "\n" + code
39
42
 
40
43
  # Sort imports, delete unused ones
41
44
  code = subprocess.check_output(
@@ -31,22 +31,17 @@ reference to `A` to `B`'s `fields` attribute.
31
31
 
32
32
  import builtins
33
33
  import re
34
+ from collections.abc import Iterable, Iterator
34
35
  from dataclasses import (
35
36
  dataclass,
36
37
  field,
37
38
  )
38
39
  from typing import (
39
- Dict,
40
- Iterable,
41
- Iterator,
42
- List,
43
- Optional,
44
- Set,
45
- Type,
46
40
  Union,
47
41
  )
48
42
 
49
- import betterproto2_compiler
43
+ import betterproto2
44
+
50
45
  from betterproto2_compiler.compile.naming import (
51
46
  pythonize_class_name,
52
47
  pythonize_field_name,
@@ -58,6 +53,7 @@ from betterproto2_compiler.lib.google.protobuf import (
58
53
  FieldDescriptorProto,
59
54
  FieldDescriptorProtoLabel,
60
55
  FieldDescriptorProtoType,
56
+ FieldDescriptorProtoType as FieldType,
61
57
  FileDescriptorProto,
62
58
  MethodDescriptorProto,
63
59
  )
@@ -145,7 +141,7 @@ PROTO_PACKED_TYPES = (
145
141
 
146
142
  def get_comment(
147
143
  proto_file: "FileDescriptorProto",
148
- path: List[int],
144
+ path: list[int],
149
145
  ) -> str:
150
146
  for sci_loc in proto_file.source_code_info.location:
151
147
  if list(sci_loc.path) == path:
@@ -181,10 +177,10 @@ class ProtoContentBase:
181
177
 
182
178
  source_file: FileDescriptorProto
183
179
  typing_compiler: TypingCompiler
184
- path: List[int]
185
- parent: Union["betterproto2_compiler.Message", "OutputTemplate"]
180
+ path: list[int]
181
+ parent: Union["betterproto2.Message", "OutputTemplate"]
186
182
 
187
- __dataclass_fields__: Dict[str, object]
183
+ __dataclass_fields__: dict[str, object]
188
184
 
189
185
  def __post_init__(self) -> None:
190
186
  """Checks that no fake default fields were left as placeholders."""
@@ -224,10 +220,10 @@ class ProtoContentBase:
224
220
  @dataclass
225
221
  class PluginRequestCompiler:
226
222
  plugin_request_obj: CodeGeneratorRequest
227
- output_packages: Dict[str, "OutputTemplate"] = field(default_factory=dict)
223
+ output_packages: dict[str, "OutputTemplate"] = field(default_factory=dict)
228
224
 
229
225
  @property
230
- def all_messages(self) -> List["MessageCompiler"]:
226
+ def all_messages(self) -> list["MessageCompiler"]:
231
227
  """All of the messages in this request.
232
228
 
233
229
  Returns
@@ -249,11 +245,11 @@ class OutputTemplate:
249
245
 
250
246
  parent_request: PluginRequestCompiler
251
247
  package_proto_obj: FileDescriptorProto
252
- input_files: List[str] = field(default_factory=list)
253
- imports_end: Set[str] = field(default_factory=set)
254
- messages: Dict[str, "MessageCompiler"] = field(default_factory=dict)
255
- enums: Dict[str, "EnumDefinitionCompiler"] = field(default_factory=dict)
256
- services: Dict[str, "ServiceCompiler"] = field(default_factory=dict)
248
+ input_files: list[str] = field(default_factory=list)
249
+ imports_end: set[str] = field(default_factory=set)
250
+ messages: dict[str, "MessageCompiler"] = field(default_factory=dict)
251
+ enums: dict[str, "EnumDefinitionCompiler"] = field(default_factory=dict)
252
+ services: dict[str, "ServiceCompiler"] = field(default_factory=dict)
257
253
  pydantic_dataclasses: bool = False
258
254
  output: bool = True
259
255
  typing_compiler: TypingCompiler = field(default_factory=DirectImportTypingCompiler)
@@ -289,9 +285,9 @@ class MessageCompiler(ProtoContentBase):
289
285
  typing_compiler: TypingCompiler
290
286
  parent: Union["MessageCompiler", OutputTemplate] = PLACEHOLDER
291
287
  proto_obj: DescriptorProto = PLACEHOLDER
292
- path: List[int] = PLACEHOLDER
293
- fields: List[Union["FieldCompiler", "MessageCompiler"]] = field(default_factory=list)
294
- builtins_types: Set[str] = field(default_factory=set)
288
+ path: list[int] = PLACEHOLDER
289
+ fields: list[Union["FieldCompiler", "MessageCompiler"]] = field(default_factory=list)
290
+ builtins_types: set[str] = field(default_factory=set)
295
291
 
296
292
  def __post_init__(self) -> None:
297
293
  # Add message to output file
@@ -327,11 +323,9 @@ class MessageCompiler(ProtoContentBase):
327
323
  @property
328
324
  def has_message_field(self) -> bool:
329
325
  return any(
330
- (
331
- field.proto_obj.type in PROTO_MESSAGE_TYPES
332
- for field in self.fields
333
- if isinstance(field.proto_obj, FieldDescriptorProto)
334
- )
326
+ field.proto_obj.type in PROTO_MESSAGE_TYPES
327
+ for field in self.fields
328
+ if isinstance(field.proto_obj, FieldDescriptorProto)
335
329
  )
336
330
 
337
331
 
@@ -346,7 +340,7 @@ def is_map(proto_field_obj: FieldDescriptorProto, parent_message: DescriptorProt
346
340
  map_entry = f"{proto_field_obj.name.replace('_', '').lower()}entry"
347
341
  if message_type == map_entry:
348
342
  for nested in parent_message.nested_type: # parent message
349
- if nested.name.replace("_", "").lower() == map_entry and nested.options.map_entry:
343
+ if nested.name.replace("_", "").lower() == map_entry and nested.options and nested.options.map_entry:
350
344
  return True
351
345
  return False
352
346
 
@@ -373,8 +367,8 @@ def is_oneof(proto_field_obj: FieldDescriptorProto) -> bool:
373
367
  class FieldCompiler(ProtoContentBase):
374
368
  source_file: FileDescriptorProto
375
369
  typing_compiler: TypingCompiler
376
- path: List[int] = PLACEHOLDER
377
- builtins_types: Set[str] = field(default_factory=set)
370
+ path: list[int] = PLACEHOLDER
371
+ builtins_types: set[str] = field(default_factory=set)
378
372
 
379
373
  parent: MessageCompiler = PLACEHOLDER
380
374
  proto_obj: FieldDescriptorProto = PLACEHOLDER
@@ -389,13 +383,16 @@ class FieldCompiler(ProtoContentBase):
389
383
  """Construct string representation of this field as a field."""
390
384
  name = f"{self.py_name}"
391
385
  field_args = ", ".join(([""] + self.betterproto_field_args) if self.betterproto_field_args else [])
392
- betterproto_field_type = f"betterproto2.{self.field_type}_field({self.proto_obj.number}{field_args})"
386
+
387
+ betterproto_field_type = (
388
+ f"betterproto2.field({self.proto_obj.number}, betterproto2.{str(self.field_type)}{field_args})"
389
+ )
393
390
  if self.py_name in dir(builtins):
394
391
  self.parent.builtins_types.add(self.py_name)
395
392
  return f'{name}: "{self.annotation}" = {betterproto_field_type}'
396
393
 
397
394
  @property
398
- def betterproto_field_args(self) -> List[str]:
395
+ def betterproto_field_args(self) -> list[str]:
399
396
  args = []
400
397
  if self.field_wraps:
401
398
  args.append(f"wraps={self.field_wraps}")
@@ -403,9 +400,9 @@ class FieldCompiler(ProtoContentBase):
403
400
  args.append("optional=True")
404
401
  if self.repeated:
405
402
  args.append("repeated=True")
406
- if self.field_type == "enum":
403
+ if self.field_type == FieldType.TYPE_ENUM:
407
404
  t = self.py_type
408
- args.append(f"enum_default_value=lambda: {t}.try_value(0)")
405
+ args.append(f"default_factory=lambda: {t}.try_value(0)")
409
406
  return args
410
407
 
411
408
  @property
@@ -415,29 +412,31 @@ class FieldCompiler(ProtoContentBase):
415
412
  )
416
413
 
417
414
  @property
418
- def field_wraps(self) -> Optional[str]:
415
+ def field_wraps(self) -> str | None:
419
416
  """Returns betterproto wrapped field type or None."""
420
417
  match_wrapper = re.match(r"\.google\.protobuf\.(.+)Value$", self.proto_obj.type_name)
421
418
  if match_wrapper:
422
419
  wrapped_type = "TYPE_" + match_wrapper.group(1).upper()
423
- if hasattr(betterproto2_compiler, wrapped_type):
420
+ if hasattr(betterproto2, wrapped_type):
424
421
  return f"betterproto2.{wrapped_type}"
425
422
  return None
426
423
 
427
424
  @property
428
425
  def repeated(self) -> bool:
429
426
  return self.proto_obj.label == FieldDescriptorProtoLabel.LABEL_REPEATED and not is_map(
430
- self.proto_obj, self.parent
427
+ self.proto_obj,
428
+ self.parent,
431
429
  )
432
430
 
433
431
  @property
434
432
  def optional(self) -> bool:
435
- return self.proto_obj.proto3_optional or (self.field_type == "message" and not self.repeated)
433
+ # TODO not for maps
434
+ return self.proto_obj.proto3_optional or (self.field_type == FieldType.TYPE_MESSAGE and not self.repeated)
436
435
 
437
436
  @property
438
- def field_type(self) -> str:
439
- """String representation of proto field type."""
440
- return FieldDescriptorProtoType(self.proto_obj.type).name.lower().replace("type_", "")
437
+ def field_type(self) -> FieldType:
438
+ # TODO it should be possible to remove constructor
439
+ return FieldType(self.proto_obj.type)
441
440
 
442
441
  @property
443
442
  def packed(self) -> bool:
@@ -499,7 +498,7 @@ class OneOfFieldCompiler(FieldCompiler):
499
498
  return True
500
499
 
501
500
  @property
502
- def betterproto_field_args(self) -> List[str]:
501
+ def betterproto_field_args(self) -> list[str]:
503
502
  args = super().betterproto_field_args
504
503
  group = self.parent.proto_obj.oneof_decl[self.proto_obj.oneof_index].name
505
504
  args.append(f'group="{group}"')
@@ -508,8 +507,8 @@ class OneOfFieldCompiler(FieldCompiler):
508
507
 
509
508
  @dataclass
510
509
  class MapEntryCompiler(FieldCompiler):
511
- py_k_type: Optional[Type] = None
512
- py_v_type: Optional[Type] = None
510
+ py_k_type: type | None = None
511
+ py_v_type: type | None = None
513
512
  proto_k_type: str = ""
514
513
  proto_v_type: str = ""
515
514
 
@@ -546,13 +545,17 @@ class MapEntryCompiler(FieldCompiler):
546
545
 
547
546
  raise ValueError("can't find enum")
548
547
 
549
- @property
550
- def betterproto_field_args(self) -> List[str]:
551
- return [f"betterproto2.{self.proto_k_type}", f"betterproto2.{self.proto_v_type}"]
552
-
553
- @property
554
- def field_type(self) -> str:
555
- return "map"
548
+ def get_field_string(self) -> str:
549
+ """Construct string representation of this field as a field."""
550
+ betterproto_field_type = (
551
+ f"betterproto2.field({self.proto_obj.number}, "
552
+ "betterproto2.TYPE_MAP, "
553
+ f"map_types=(betterproto2.{self.proto_k_type}, "
554
+ f"betterproto2.{self.proto_v_type}))"
555
+ )
556
+ if self.py_name in dir(builtins):
557
+ self.parent.builtins_types.add(self.py_name)
558
+ return f'{self.py_name}: "{self.annotation}" = {betterproto_field_type}'
556
559
 
557
560
  @property
558
561
  def annotation(self) -> str:
@@ -568,7 +571,7 @@ class EnumDefinitionCompiler(MessageCompiler):
568
571
  """Representation of a proto Enum definition."""
569
572
 
570
573
  proto_obj: EnumDescriptorProto = PLACEHOLDER
571
- entries: List["EnumDefinitionCompiler.EnumEntry"] = PLACEHOLDER
574
+ entries: list["EnumDefinitionCompiler.EnumEntry"] = PLACEHOLDER
572
575
 
573
576
  @dataclass(unsafe_hash=True)
574
577
  class EnumEntry:
@@ -596,8 +599,8 @@ class ServiceCompiler(ProtoContentBase):
596
599
  source_file: FileDescriptorProto
597
600
  parent: OutputTemplate = PLACEHOLDER
598
601
  proto_obj: DescriptorProto = PLACEHOLDER
599
- path: List[int] = PLACEHOLDER
600
- methods: List["ServiceMethodCompiler"] = field(default_factory=list)
602
+ path: list[int] = PLACEHOLDER
603
+ methods: list["ServiceMethodCompiler"] = field(default_factory=list)
601
604
 
602
605
  def __post_init__(self) -> None:
603
606
  # Add service to output file
@@ -618,7 +621,7 @@ class ServiceMethodCompiler(ProtoContentBase):
618
621
  source_file: FileDescriptorProto
619
622
  parent: ServiceCompiler
620
623
  proto_obj: MethodDescriptorProto
621
- path: List[int] = PLACEHOLDER
624
+ path: list[int] = PLACEHOLDER
622
625
 
623
626
  def __post_init__(self) -> None:
624
627
  # Add method to service
@@ -1,15 +1,10 @@
1
1
  import re
2
2
  from collections import defaultdict
3
+ from collections.abc import Iterator
3
4
  from dataclasses import (
4
5
  dataclass,
5
6
  field,
6
7
  )
7
- from typing import (
8
- Dict,
9
- Iterator,
10
- List,
11
- Tuple,
12
- )
13
8
 
14
9
 
15
10
  @dataclass
@@ -17,7 +12,7 @@ class ModuleValidator:
17
12
  line_iterator: Iterator[str]
18
13
  line_number: int = field(init=False, default=0)
19
14
 
20
- collisions: Dict[str, List[Tuple[int, str]]] = field(init=False, default_factory=lambda: defaultdict(list))
15
+ collisions: dict[str, list[tuple[int, str]]] = field(init=False, default_factory=lambda: defaultdict(list))
21
16
 
22
17
  def add_import(self, imp: str, number: int, full_line: str):
23
18
  """
@@ -1,12 +1,6 @@
1
1
  import pathlib
2
2
  import sys
3
- from typing import (
4
- Generator,
5
- List,
6
- Set,
7
- Tuple,
8
- Union,
9
- )
3
+ from collections.abc import Generator
10
4
 
11
5
  from betterproto2_compiler.lib.google.protobuf import (
12
6
  DescriptorProto,
@@ -45,13 +39,13 @@ from .typing_compiler import (
45
39
 
46
40
  def traverse(
47
41
  proto_file: FileDescriptorProto,
48
- ) -> Generator[Tuple[Union[EnumDescriptorProto, DescriptorProto], List[int]], None, None]:
42
+ ) -> Generator[tuple[EnumDescriptorProto | DescriptorProto, list[int]], None, None]:
49
43
  # Todo: Keep information about nested hierarchy
50
44
  def _traverse(
51
- path: List[int],
52
- items: Union[List[EnumDescriptorProto], List[DescriptorProto]],
45
+ path: list[int],
46
+ items: list[EnumDescriptorProto] | list[DescriptorProto],
53
47
  prefix: str = "",
54
- ) -> Generator[Tuple[Union[EnumDescriptorProto, DescriptorProto], List[int]], None, None]:
48
+ ) -> Generator[tuple[EnumDescriptorProto | DescriptorProto, list[int]], None, None]:
55
49
  for i, item in enumerate(items):
56
50
  # Adjust the name since we flatten the hierarchy.
57
51
  # Todo: don't change the name, but include full name in returned tuple
@@ -82,7 +76,8 @@ def generate_code(request: CodeGeneratorRequest) -> CodeGeneratorResponse:
82
76
  if output_package_name not in request_data.output_packages:
83
77
  # Create a new output if there is no output for this package
84
78
  request_data.output_packages[output_package_name] = OutputTemplate(
85
- parent_request=request_data, package_proto_obj=proto_file
79
+ parent_request=request_data,
80
+ package_proto_obj=proto_file,
86
81
  )
87
82
  # Add this input file to the output corresponding to this package
88
83
  request_data.output_packages[output_package_name].input_files.append(proto_file)
@@ -144,7 +139,7 @@ def generate_code(request: CodeGeneratorRequest) -> CodeGeneratorResponse:
144
139
  service.ready()
145
140
 
146
141
  # Generate output files
147
- output_paths: Set[pathlib.Path] = set()
142
+ output_paths: set[pathlib.Path] = set()
148
143
  for output_package_name, output_package in request_data.output_packages.items():
149
144
  if not output_package.output:
150
145
  continue
@@ -158,7 +153,7 @@ def generate_code(request: CodeGeneratorRequest) -> CodeGeneratorResponse:
158
153
  name=str(output_path),
159
154
  # Render and then format the output file
160
155
  content=outputfile_compiler(output_file=output_package),
161
- )
156
+ ),
162
157
  )
163
158
 
164
159
  # Make each output directory a package with __init__ file
@@ -183,7 +178,7 @@ def _make_one_of_field_compiler(
183
178
  source_file: "FileDescriptorProto",
184
179
  parent: MessageCompiler,
185
180
  proto_obj: "FieldDescriptorProto",
186
- path: List[int],
181
+ path: list[int],
187
182
  ) -> FieldCompiler:
188
183
  return OneOfFieldCompiler(
189
184
  source_file=source_file,
@@ -196,7 +191,7 @@ def _make_one_of_field_compiler(
196
191
 
197
192
  def read_protobuf_type(
198
193
  item: DescriptorProto,
199
- path: List[int],
194
+ path: list[int],
200
195
  source_file: "FileDescriptorProto",
201
196
  output_package: OutputTemplate,
202
197
  ) -> None: