betterproto2-compiler 0.0.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (41) hide show
  1. betterproto2_compiler/__init__.py +0 -0
  2. betterproto2_compiler/_types.py +13 -0
  3. betterproto2_compiler/casing.py +140 -0
  4. betterproto2_compiler/compile/__init__.py +0 -0
  5. betterproto2_compiler/compile/importing.py +193 -0
  6. betterproto2_compiler/compile/naming.py +21 -0
  7. betterproto2_compiler/enum.py +180 -0
  8. betterproto2_compiler/grpc/__init__.py +0 -0
  9. betterproto2_compiler/grpc/grpclib_client.py +172 -0
  10. betterproto2_compiler/grpc/grpclib_server.py +32 -0
  11. betterproto2_compiler/grpc/util/__init__.py +0 -0
  12. betterproto2_compiler/grpc/util/async_channel.py +190 -0
  13. betterproto2_compiler/lib/__init__.py +0 -0
  14. betterproto2_compiler/lib/google/__init__.py +0 -0
  15. betterproto2_compiler/lib/google/protobuf/__init__.py +1 -0
  16. betterproto2_compiler/lib/google/protobuf/compiler/__init__.py +1 -0
  17. betterproto2_compiler/lib/pydantic/__init__.py +0 -0
  18. betterproto2_compiler/lib/pydantic/google/__init__.py +0 -0
  19. betterproto2_compiler/lib/pydantic/google/protobuf/__init__.py +2690 -0
  20. betterproto2_compiler/lib/pydantic/google/protobuf/compiler/__init__.py +209 -0
  21. betterproto2_compiler/lib/std/__init__.py +0 -0
  22. betterproto2_compiler/lib/std/google/__init__.py +0 -0
  23. betterproto2_compiler/lib/std/google/protobuf/__init__.py +2517 -0
  24. betterproto2_compiler/lib/std/google/protobuf/compiler/__init__.py +197 -0
  25. betterproto2_compiler/plugin/__init__.py +3 -0
  26. betterproto2_compiler/plugin/__main__.py +3 -0
  27. betterproto2_compiler/plugin/compiler.py +59 -0
  28. betterproto2_compiler/plugin/main.py +52 -0
  29. betterproto2_compiler/plugin/models.py +709 -0
  30. betterproto2_compiler/plugin/module_validation.py +161 -0
  31. betterproto2_compiler/plugin/parser.py +263 -0
  32. betterproto2_compiler/plugin/plugin.bat +2 -0
  33. betterproto2_compiler/plugin/typing_compiler.py +167 -0
  34. betterproto2_compiler/py.typed +0 -0
  35. betterproto2_compiler/templates/header.py.j2 +50 -0
  36. betterproto2_compiler/templates/template.py.j2 +243 -0
  37. betterproto2_compiler-0.0.1.dist-info/LICENSE.md +22 -0
  38. betterproto2_compiler-0.0.1.dist-info/METADATA +35 -0
  39. betterproto2_compiler-0.0.1.dist-info/RECORD +41 -0
  40. betterproto2_compiler-0.0.1.dist-info/WHEEL +4 -0
  41. betterproto2_compiler-0.0.1.dist-info/entry_points.txt +3 -0
@@ -0,0 +1,161 @@
1
+ import re
2
+ from collections import defaultdict
3
+ from dataclasses import (
4
+ dataclass,
5
+ field,
6
+ )
7
+ from typing import (
8
+ Dict,
9
+ Iterator,
10
+ List,
11
+ Tuple,
12
+ )
13
+
14
+
15
+ @dataclass
16
+ class ModuleValidator:
17
+ line_iterator: Iterator[str]
18
+ line_number: int = field(init=False, default=0)
19
+
20
+ collisions: Dict[str, List[Tuple[int, str]]] = field(init=False, default_factory=lambda: defaultdict(list))
21
+
22
+ def add_import(self, imp: str, number: int, full_line: str):
23
+ """
24
+ Adds an import to be tracked.
25
+ """
26
+ self.collisions[imp].append((number, full_line))
27
+
28
+ def process_import(self, imp: str):
29
+ """
30
+ Filters out the import to its actual value.
31
+ """
32
+ if " as " in imp:
33
+ imp = imp[imp.index(" as ") + 4 :]
34
+
35
+ imp = imp.strip()
36
+ assert " " not in imp, imp
37
+ return imp
38
+
39
+ def evaluate_multiline_import(self, line: str):
40
+ """
41
+ Evaluates a multiline import from a starting line
42
+ """
43
+ # Filter the first line and remove anything before the import statement.
44
+ full_line = line
45
+ line = line.split("import", 1)[1]
46
+ if "(" in line:
47
+ conditional = lambda line: ")" not in line
48
+ else:
49
+ conditional = lambda line: "\\" in line
50
+
51
+ # Remove open parenthesis if it exists.
52
+ if "(" in line:
53
+ line = line[line.index("(") + 1 :]
54
+
55
+ # Choose the conditional based on how multiline imports are formatted.
56
+ while conditional(line):
57
+ # Split the line by commas
58
+ imports = line.split(",")
59
+
60
+ for imp in imports:
61
+ # Add the import to the namespace
62
+ imp = self.process_import(imp)
63
+ if imp:
64
+ self.add_import(imp, self.line_number, full_line)
65
+ # Get the next line
66
+ full_line = line = next(self.line_iterator)
67
+ # Increment the line number
68
+ self.line_number += 1
69
+
70
+ # validate the last line
71
+ if ")" in line:
72
+ line = line[: line.index(")")]
73
+ imports = line.split(",")
74
+ for imp in imports:
75
+ imp = self.process_import(imp)
76
+ if imp:
77
+ self.add_import(imp, self.line_number, full_line)
78
+
79
+ def evaluate_import(self, line: str):
80
+ """
81
+ Extracts an import from a line.
82
+ """
83
+ whole_line = line
84
+ line = line[line.index("import") + 6 :]
85
+ values = line.split(",")
86
+ for v in values:
87
+ self.add_import(self.process_import(v), self.line_number, whole_line)
88
+
89
+ def next(self):
90
+ """
91
+ Evaluate each line for names in the module.
92
+ """
93
+ line = next(self.line_iterator)
94
+
95
+ # Skip lines with indentation or comments
96
+ if (
97
+ # Skip indents and whitespace.
98
+ line.startswith(" ")
99
+ or line == "\n"
100
+ or line.startswith("\t")
101
+ or
102
+ # Skip comments
103
+ line.startswith("#")
104
+ or
105
+ # Skip decorators
106
+ line.startswith("@")
107
+ ):
108
+ self.line_number += 1
109
+ return
110
+
111
+ # Skip docstrings.
112
+ if line.startswith('"""') or line.startswith("'''"):
113
+ quote = line[0] * 3
114
+ line = line[3:]
115
+ while quote not in line:
116
+ line = next(self.line_iterator)
117
+ self.line_number += 1
118
+ return
119
+
120
+ # Evaluate Imports.
121
+ if line.startswith("from ") or line.startswith("import "):
122
+ if "(" in line or "\\" in line:
123
+ self.evaluate_multiline_import(line)
124
+ else:
125
+ self.evaluate_import(line)
126
+
127
+ # Evaluate Classes.
128
+ elif line.startswith("class "):
129
+ class_name = re.search(r"class (\w+)", line).group(1)
130
+ if class_name:
131
+ self.add_import(class_name, self.line_number, line)
132
+
133
+ # Evaluate Functions.
134
+ elif line.startswith("def "):
135
+ function_name = re.search(r"def (\w+)", line).group(1)
136
+ if function_name:
137
+ self.add_import(function_name, self.line_number, line)
138
+
139
+ # Evaluate direct assignments.
140
+ elif "=" in line:
141
+ assignment = re.search(r"(\w+)\s*=", line).group(1)
142
+ if assignment:
143
+ self.add_import(assignment, self.line_number, line)
144
+
145
+ self.line_number += 1
146
+
147
+ def validate(self) -> bool:
148
+ """
149
+ Run Validation.
150
+ """
151
+ try:
152
+ while True:
153
+ self.next()
154
+ except StopIteration:
155
+ pass
156
+
157
+ # Filter collisions for those with more than one value.
158
+ self.collisions = {k: v for k, v in self.collisions.items() if len(v) > 1}
159
+
160
+ # Return True if no collisions are found.
161
+ return not bool(self.collisions)
@@ -0,0 +1,263 @@
1
+ import pathlib
2
+ import sys
3
+ from typing import (
4
+ Generator,
5
+ List,
6
+ Set,
7
+ Tuple,
8
+ Union,
9
+ )
10
+
11
+ from betterproto2_compiler.lib.google.protobuf import (
12
+ DescriptorProto,
13
+ EnumDescriptorProto,
14
+ FieldDescriptorProto,
15
+ FileDescriptorProto,
16
+ ServiceDescriptorProto,
17
+ )
18
+ from betterproto2_compiler.lib.google.protobuf.compiler import (
19
+ CodeGeneratorRequest,
20
+ CodeGeneratorResponse,
21
+ CodeGeneratorResponseFeature,
22
+ CodeGeneratorResponseFile,
23
+ )
24
+
25
+ from .compiler import outputfile_compiler
26
+ from .models import (
27
+ EnumDefinitionCompiler,
28
+ FieldCompiler,
29
+ MapEntryCompiler,
30
+ MessageCompiler,
31
+ OneOfFieldCompiler,
32
+ OutputTemplate,
33
+ PluginRequestCompiler,
34
+ ServiceCompiler,
35
+ ServiceMethodCompiler,
36
+ is_map,
37
+ is_oneof,
38
+ )
39
+ from .typing_compiler import (
40
+ DirectImportTypingCompiler,
41
+ NoTyping310TypingCompiler,
42
+ TypingImportTypingCompiler,
43
+ )
44
+
45
+
46
+ def traverse(
47
+ proto_file: FileDescriptorProto,
48
+ ) -> Generator[Tuple[Union[EnumDescriptorProto, DescriptorProto], List[int]], None, None]:
49
+ # Todo: Keep information about nested hierarchy
50
+ def _traverse(
51
+ path: List[int],
52
+ items: Union[List[EnumDescriptorProto], List[DescriptorProto]],
53
+ prefix: str = "",
54
+ ) -> Generator[Tuple[Union[EnumDescriptorProto, DescriptorProto], List[int]], None, None]:
55
+ for i, item in enumerate(items):
56
+ # Adjust the name since we flatten the hierarchy.
57
+ # Todo: don't change the name, but include full name in returned tuple
58
+ should_rename = not isinstance(item, DescriptorProto) or not item.options or not item.options.map_entry
59
+
60
+ item.name = next_prefix = f"{prefix}.{item.name}" if prefix and should_rename else item.name
61
+ yield item, [*path, i]
62
+
63
+ if isinstance(item, DescriptorProto):
64
+ # Get nested types.
65
+ yield from _traverse([*path, i, 4], item.enum_type, next_prefix)
66
+ yield from _traverse([*path, i, 3], item.nested_type, next_prefix)
67
+
68
+ yield from _traverse([5], proto_file.enum_type)
69
+ yield from _traverse([4], proto_file.message_type)
70
+
71
+
72
+ def generate_code(request: CodeGeneratorRequest) -> CodeGeneratorResponse:
73
+ response = CodeGeneratorResponse()
74
+
75
+ plugin_options = request.parameter.split(",") if request.parameter else []
76
+ response.supported_features = CodeGeneratorResponseFeature.FEATURE_PROTO3_OPTIONAL
77
+
78
+ request_data = PluginRequestCompiler(plugin_request_obj=request)
79
+ # Gather output packages
80
+ for proto_file in request.proto_file:
81
+ output_package_name = proto_file.package
82
+ if output_package_name not in request_data.output_packages:
83
+ # Create a new output if there is no output for this package
84
+ request_data.output_packages[output_package_name] = OutputTemplate(
85
+ parent_request=request_data, package_proto_obj=proto_file
86
+ )
87
+ # Add this input file to the output corresponding to this package
88
+ request_data.output_packages[output_package_name].input_files.append(proto_file)
89
+
90
+ if proto_file.package == "google.protobuf" and "INCLUDE_GOOGLE" not in plugin_options:
91
+ # If not INCLUDE_GOOGLE,
92
+ # skip outputting Google's well-known types
93
+ request_data.output_packages[output_package_name].output = False
94
+
95
+ if "pydantic_dataclasses" in plugin_options:
96
+ request_data.output_packages[output_package_name].pydantic_dataclasses = True
97
+
98
+ # Gather any typing generation options.
99
+ typing_opts = [opt[len("typing.") :] for opt in plugin_options if opt.startswith("typing.")]
100
+
101
+ if len(typing_opts) > 1:
102
+ raise ValueError("Multiple typing options provided")
103
+ # Set the compiler type.
104
+ typing_opt = typing_opts[0] if typing_opts else "direct"
105
+ if typing_opt == "direct":
106
+ request_data.output_packages[output_package_name].typing_compiler = DirectImportTypingCompiler()
107
+ elif typing_opt == "root":
108
+ request_data.output_packages[output_package_name].typing_compiler = TypingImportTypingCompiler()
109
+ elif typing_opt == "310":
110
+ request_data.output_packages[output_package_name].typing_compiler = NoTyping310TypingCompiler()
111
+
112
+ # Read Messages and Enums
113
+ # We need to read Messages before Services in so that we can
114
+ # get the references to input/output messages for each service
115
+ for output_package_name, output_package in request_data.output_packages.items():
116
+ for proto_input_file in output_package.input_files:
117
+ for item, path in traverse(proto_input_file):
118
+ read_protobuf_type(
119
+ source_file=proto_input_file,
120
+ item=item,
121
+ path=path,
122
+ output_package=output_package,
123
+ )
124
+
125
+ # Read Services
126
+ for output_package_name, output_package in request_data.output_packages.items():
127
+ for proto_input_file in output_package.input_files:
128
+ for index, service in enumerate(proto_input_file.service):
129
+ read_protobuf_service(proto_input_file, service, index, output_package)
130
+
131
+ # All the hierarchy is ready. We can perform pre-computations before generating the output files
132
+ for package in request_data.output_packages.values():
133
+ for message in package.messages.values():
134
+ for field in message.fields:
135
+ field.ready()
136
+ message.ready()
137
+ for enum in package.enums.values():
138
+ for variant in enum.fields:
139
+ variant.ready()
140
+ enum.ready()
141
+ for service in package.services.values():
142
+ for method in service.methods:
143
+ method.ready()
144
+ service.ready()
145
+
146
+ # Generate output files
147
+ output_paths: Set[pathlib.Path] = set()
148
+ for output_package_name, output_package in request_data.output_packages.items():
149
+ if not output_package.output:
150
+ continue
151
+
152
+ # Add files to the response object
153
+ output_path = pathlib.Path(*output_package_name.split("."), "__init__.py")
154
+ output_paths.add(output_path)
155
+
156
+ response.file.append(
157
+ CodeGeneratorResponseFile(
158
+ name=str(output_path),
159
+ # Render and then format the output file
160
+ content=outputfile_compiler(output_file=output_package),
161
+ )
162
+ )
163
+
164
+ # Make each output directory a package with __init__ file
165
+ init_files = {
166
+ directory.joinpath("__init__.py")
167
+ for path in output_paths
168
+ for directory in path.parents
169
+ if not directory.joinpath("__init__.py").exists()
170
+ } - output_paths
171
+
172
+ for init_file in init_files:
173
+ response.file.append(CodeGeneratorResponseFile(name=str(init_file)))
174
+
175
+ for output_package_name in sorted(output_paths.union(init_files)):
176
+ print(f"Writing {output_package_name}", file=sys.stderr)
177
+
178
+ return response
179
+
180
+
181
+ def _make_one_of_field_compiler(
182
+ output_package: OutputTemplate,
183
+ source_file: "FileDescriptorProto",
184
+ parent: MessageCompiler,
185
+ proto_obj: "FieldDescriptorProto",
186
+ path: List[int],
187
+ ) -> FieldCompiler:
188
+ return OneOfFieldCompiler(
189
+ source_file=source_file,
190
+ parent=parent,
191
+ proto_obj=proto_obj,
192
+ path=path,
193
+ typing_compiler=output_package.typing_compiler,
194
+ )
195
+
196
+
197
+ def read_protobuf_type(
198
+ item: DescriptorProto,
199
+ path: List[int],
200
+ source_file: "FileDescriptorProto",
201
+ output_package: OutputTemplate,
202
+ ) -> None:
203
+ if isinstance(item, DescriptorProto):
204
+ if item.options and item.options.map_entry:
205
+ # Skip generated map entry messages since we just use dicts
206
+ return
207
+ # Process Message
208
+ message_data = MessageCompiler(
209
+ source_file=source_file,
210
+ parent=output_package,
211
+ proto_obj=item,
212
+ path=path,
213
+ typing_compiler=output_package.typing_compiler,
214
+ )
215
+ for index, field in enumerate(item.field):
216
+ if is_map(field, item):
217
+ MapEntryCompiler(
218
+ source_file=source_file,
219
+ parent=message_data,
220
+ proto_obj=field,
221
+ path=path + [2, index],
222
+ typing_compiler=output_package.typing_compiler,
223
+ )
224
+ elif is_oneof(field):
225
+ _make_one_of_field_compiler(output_package, source_file, message_data, field, path + [2, index])
226
+ else:
227
+ FieldCompiler(
228
+ source_file=source_file,
229
+ parent=message_data,
230
+ proto_obj=field,
231
+ path=path + [2, index],
232
+ typing_compiler=output_package.typing_compiler,
233
+ )
234
+ elif isinstance(item, EnumDescriptorProto):
235
+ # Enum
236
+ EnumDefinitionCompiler(
237
+ source_file=source_file,
238
+ parent=output_package,
239
+ proto_obj=item,
240
+ path=path,
241
+ typing_compiler=output_package.typing_compiler,
242
+ )
243
+
244
+
245
+ def read_protobuf_service(
246
+ source_file: FileDescriptorProto,
247
+ service: ServiceDescriptorProto,
248
+ index: int,
249
+ output_package: OutputTemplate,
250
+ ) -> None:
251
+ service_data = ServiceCompiler(
252
+ source_file=source_file,
253
+ parent=output_package,
254
+ proto_obj=service,
255
+ path=[6, index],
256
+ )
257
+ for j, method in enumerate(service.method):
258
+ ServiceMethodCompiler(
259
+ source_file=source_file,
260
+ parent=service_data,
261
+ proto_obj=method,
262
+ path=[6, index, 2, j],
263
+ )
@@ -0,0 +1,2 @@
1
+ @SET plugin_dir=%~dp0
2
+ @python -m %plugin_dir% %*
@@ -0,0 +1,167 @@
1
+ import abc
2
+ from collections import defaultdict
3
+ from dataclasses import (
4
+ dataclass,
5
+ field,
6
+ )
7
+ from typing import (
8
+ Dict,
9
+ Iterator,
10
+ Optional,
11
+ Set,
12
+ )
13
+
14
+
15
+ class TypingCompiler(metaclass=abc.ABCMeta):
16
+ @abc.abstractmethod
17
+ def optional(self, type_: str) -> str:
18
+ raise NotImplementedError
19
+
20
+ @abc.abstractmethod
21
+ def list(self, type_: str) -> str:
22
+ raise NotImplementedError
23
+
24
+ @abc.abstractmethod
25
+ def dict(self, key: str, value: str) -> str:
26
+ raise NotImplementedError
27
+
28
+ @abc.abstractmethod
29
+ def union(self, *types: str) -> str:
30
+ raise NotImplementedError
31
+
32
+ @abc.abstractmethod
33
+ def iterable(self, type_: str) -> str:
34
+ raise NotImplementedError
35
+
36
+ @abc.abstractmethod
37
+ def async_iterable(self, type_: str) -> str:
38
+ raise NotImplementedError
39
+
40
+ @abc.abstractmethod
41
+ def async_iterator(self, type_: str) -> str:
42
+ raise NotImplementedError
43
+
44
+ @abc.abstractmethod
45
+ def imports(self) -> Dict[str, Optional[Set[str]]]:
46
+ """
47
+ Returns either the direct import as a key with none as value, or a set of
48
+ values to import from the key.
49
+ """
50
+ raise NotImplementedError
51
+
52
+ def import_lines(self) -> Iterator:
53
+ imports = self.imports()
54
+ for key, value in imports.items():
55
+ if value is None:
56
+ yield f"import {key}"
57
+ else:
58
+ yield f"from {key} import ("
59
+ for v in sorted(value):
60
+ yield f" {v},"
61
+ yield ")"
62
+
63
+
64
+ @dataclass
65
+ class DirectImportTypingCompiler(TypingCompiler):
66
+ _imports: Dict[str, Set[str]] = field(default_factory=lambda: defaultdict(set))
67
+
68
+ def optional(self, type_: str) -> str:
69
+ self._imports["typing"].add("Optional")
70
+ return f"Optional[{type_}]"
71
+
72
+ def list(self, type_: str) -> str:
73
+ self._imports["typing"].add("List")
74
+ return f"List[{type_}]"
75
+
76
+ def dict(self, key: str, value: str) -> str:
77
+ self._imports["typing"].add("Dict")
78
+ return f"Dict[{key}, {value}]"
79
+
80
+ def union(self, *types: str) -> str:
81
+ self._imports["typing"].add("Union")
82
+ return f"Union[{', '.join(types)}]"
83
+
84
+ def iterable(self, type_: str) -> str:
85
+ self._imports["typing"].add("Iterable")
86
+ return f"Iterable[{type_}]"
87
+
88
+ def async_iterable(self, type_: str) -> str:
89
+ self._imports["typing"].add("AsyncIterable")
90
+ return f"AsyncIterable[{type_}]"
91
+
92
+ def async_iterator(self, type_: str) -> str:
93
+ self._imports["typing"].add("AsyncIterator")
94
+ return f"AsyncIterator[{type_}]"
95
+
96
+ def imports(self) -> Dict[str, Optional[Set[str]]]:
97
+ return {k: v if v else None for k, v in self._imports.items()}
98
+
99
+
100
+ @dataclass
101
+ class TypingImportTypingCompiler(TypingCompiler):
102
+ _imported: bool = False
103
+
104
+ def optional(self, type_: str) -> str:
105
+ self._imported = True
106
+ return f"typing.Optional[{type_}]"
107
+
108
+ def list(self, type_: str) -> str:
109
+ self._imported = True
110
+ return f"typing.List[{type_}]"
111
+
112
+ def dict(self, key: str, value: str) -> str:
113
+ self._imported = True
114
+ return f"typing.Dict[{key}, {value}]"
115
+
116
+ def union(self, *types: str) -> str:
117
+ self._imported = True
118
+ return f"typing.Union[{', '.join(types)}]"
119
+
120
+ def iterable(self, type_: str) -> str:
121
+ self._imported = True
122
+ return f"typing.Iterable[{type_}]"
123
+
124
+ def async_iterable(self, type_: str) -> str:
125
+ self._imported = True
126
+ return f"typing.AsyncIterable[{type_}]"
127
+
128
+ def async_iterator(self, type_: str) -> str:
129
+ self._imported = True
130
+ return f"typing.AsyncIterator[{type_}]"
131
+
132
+ def imports(self) -> Dict[str, Optional[Set[str]]]:
133
+ if self._imported:
134
+ return {"typing": None}
135
+ return {}
136
+
137
+
138
+ @dataclass
139
+ class NoTyping310TypingCompiler(TypingCompiler):
140
+ _imports: Dict[str, Set[str]] = field(default_factory=lambda: defaultdict(set))
141
+
142
+ def optional(self, type_: str) -> str:
143
+ return f"{type_} | None"
144
+
145
+ def list(self, type_: str) -> str:
146
+ return f"list[{type_}]"
147
+
148
+ def dict(self, key: str, value: str) -> str:
149
+ return f"dict[{key}, {value}]"
150
+
151
+ def union(self, *types: str) -> str:
152
+ return f"{' | '.join(types)}"
153
+
154
+ def iterable(self, type_: str) -> str:
155
+ self._imports["collections.abc"].add("Iterable")
156
+ return f"Iterable[{type_}]"
157
+
158
+ def async_iterable(self, type_: str) -> str:
159
+ self._imports["collections.abc"].add("AsyncIterable")
160
+ return f"AsyncIterable[{type_}]"
161
+
162
+ def async_iterator(self, type_: str) -> str:
163
+ self._imports["collections.abc"].add("AsyncIterator")
164
+ return f"AsyncIterator[{type_}]"
165
+
166
+ def imports(self) -> Dict[str, Optional[Set[str]]]:
167
+ return {k: v if v else None for k, v in self._imports.items()}
File without changes
@@ -0,0 +1,50 @@
1
+ {# All the imports needed for this file. The useless imports will be removed by Ruff. #}
2
+
3
+ # Generated by the protocol buffer compiler. DO NOT EDIT!
4
+ # sources: {{ ', '.join(output_file.input_filenames) }}
5
+ # plugin: python-betterproto
6
+ # This file has been @generated
7
+
8
+ __all__ = (
9
+ {% for _, enum in output_file.enums|dictsort(by="key") %}
10
+ "{{ enum.py_name }}",
11
+ {%- endfor -%}
12
+ {% for _, message in output_file.messages|dictsort(by="key") %}
13
+ "{{ message.py_name }}",
14
+ {%- endfor -%}
15
+ {% for _, service in output_file.services|dictsort(by="key") %}
16
+ "{{ service.py_name }}Stub",
17
+ "{{ service.py_name }}Base",
18
+ {%- endfor -%}
19
+ )
20
+
21
+ import builtins
22
+ import datetime
23
+ import warnings
24
+
25
+ {% if output_file.pydantic_dataclasses %}
26
+ from pydantic.dataclasses import dataclass
27
+ from pydantic import model_validator
28
+ {%- else -%}
29
+ from dataclasses import dataclass
30
+ {% endif %}
31
+
32
+ {% set typing_imports = output_file.typing_compiler.imports() %}
33
+ {% if typing_imports %}
34
+ {% for line in output_file.typing_compiler.import_lines() %}
35
+ {{ line }}
36
+ {% endfor %}
37
+ {% endif %}
38
+
39
+ import betterproto
40
+ {% if output_file.services %}
41
+ from betterproto.grpc.grpclib_server import ServiceBase
42
+ import grpclib
43
+ {% endif %}
44
+
45
+ from typing import TYPE_CHECKING
46
+
47
+ if TYPE_CHECKING:
48
+ import grpclib.server
49
+ from betterproto.grpc.grpclib_client import MetadataLike
50
+ from grpclib.metadata import Deadline