structurize 2.16.2__py3-none-any.whl → 2.16.6__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- avrotize/__init__.py +63 -63
- avrotize/__main__.py +5 -5
- avrotize/_version.py +34 -34
- avrotize/asn1toavro.py +160 -160
- avrotize/avrotize.py +152 -152
- avrotize/avrotocpp.py +483 -483
- avrotize/avrotocsharp.py +992 -992
- avrotize/avrotocsv.py +121 -121
- avrotize/avrotodatapackage.py +173 -173
- avrotize/avrotodb.py +1383 -1383
- avrotize/avrotogo.py +476 -476
- avrotize/avrotographql.py +197 -197
- avrotize/avrotoiceberg.py +210 -210
- avrotize/avrotojava.py +1023 -1023
- avrotize/avrotojs.py +250 -250
- avrotize/avrotojsons.py +481 -481
- avrotize/avrotojstruct.py +345 -345
- avrotize/avrotokusto.py +363 -363
- avrotize/avrotomd.py +137 -137
- avrotize/avrotools.py +168 -168
- avrotize/avrotoparquet.py +208 -208
- avrotize/avrotoproto.py +358 -358
- avrotize/avrotopython.py +622 -622
- avrotize/avrotorust.py +435 -435
- avrotize/avrotots.py +598 -598
- avrotize/avrotoxsd.py +344 -344
- avrotize/commands.json +2493 -2433
- avrotize/common.py +828 -828
- avrotize/constants.py +4 -4
- avrotize/csvtoavro.py +131 -131
- avrotize/datapackagetoavro.py +76 -76
- avrotize/dependency_resolver.py +348 -348
- avrotize/jsonstoavro.py +1698 -1698
- avrotize/jsonstostructure.py +2642 -2642
- avrotize/jstructtoavro.py +878 -878
- avrotize/kstructtoavro.py +93 -93
- avrotize/kustotoavro.py +455 -455
- avrotize/parquettoavro.py +157 -157
- avrotize/proto2parser.py +497 -497
- avrotize/proto3parser.py +402 -402
- avrotize/prototoavro.py +382 -382
- avrotize/structuretocsharp.py +2005 -2005
- avrotize/structuretojsons.py +498 -498
- avrotize/structuretopython.py +772 -772
- avrotize/structuretots.py +653 -0
- avrotize/xsdtoavro.py +413 -413
- structurize-2.16.6.dist-info/METADATA +107 -0
- structurize-2.16.6.dist-info/RECORD +52 -0
- {structurize-2.16.2.dist-info → structurize-2.16.6.dist-info}/licenses/LICENSE +200 -200
- structurize-2.16.2.dist-info/METADATA +0 -805
- structurize-2.16.2.dist-info/RECORD +0 -51
- {structurize-2.16.2.dist-info → structurize-2.16.6.dist-info}/WHEEL +0 -0
- {structurize-2.16.2.dist-info → structurize-2.16.6.dist-info}/entry_points.txt +0 -0
- {structurize-2.16.2.dist-info → structurize-2.16.6.dist-info}/top_level.txt +0 -0
avrotize/avrotots.py
CHANGED
|
@@ -1,598 +1,598 @@
|
|
|
1
|
-
# pylint: disable=missing-module-docstring,missing-class-docstring,missing-function-docstring, line-too-long, too-many-locals, too-many-branches, too-many-statements
|
|
2
|
-
|
|
3
|
-
import json
|
|
4
|
-
import os
|
|
5
|
-
from typing import Dict, List, Set, Union
|
|
6
|
-
|
|
7
|
-
from avrotize.common import build_flat_type_dict, fullname, inline_avro_references, is_generic_avro_type, is_type_with_alternate, pascal, process_template, strip_alternate_type
|
|
8
|
-
from numpy import full
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
def is_typescript_reserved_word(word: str) -> bool:
|
|
12
|
-
"""Check if word is a TypeScript reserved word."""
|
|
13
|
-
reserved_words = [
|
|
14
|
-
'break', 'case', 'catch', 'class', 'const', 'continue', 'debugger',
|
|
15
|
-
'default', 'delete', 'do', 'else', 'export', 'extends', 'finally',
|
|
16
|
-
'for', 'function', 'if', 'import', 'in', 'instanceof', 'new', 'return',
|
|
17
|
-
'super', 'switch', 'this', 'throw', 'try', 'typeof', 'var', 'void',
|
|
18
|
-
'while', 'with', 'yield', 'enum', 'string', 'number', 'boolean', 'symbol',
|
|
19
|
-
'type', 'namespace', 'module', 'declare', 'abstract', 'readonly',
|
|
20
|
-
]
|
|
21
|
-
return word in reserved_words
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
class AvroToTypeScript:
|
|
25
|
-
"""Converts Avro schema to TypeScript classes using templates with namespace support."""
|
|
26
|
-
|
|
27
|
-
def __init__(self, base_package: str = '', typed_json_annotation=False, avro_annotation=False) -> None:
|
|
28
|
-
self.base_package = base_package
|
|
29
|
-
self.typed_json_annotation = typed_json_annotation
|
|
30
|
-
self.avro_annotation = avro_annotation
|
|
31
|
-
self.output_dir = os.getcwd()
|
|
32
|
-
self.src_dir = os.path.join(self.output_dir, "src")
|
|
33
|
-
self.generated_types: Dict[str, str] = {}
|
|
34
|
-
self.main_schema = None
|
|
35
|
-
self.type_dict = None
|
|
36
|
-
self.INDENT = ' ' * 4
|
|
37
|
-
|
|
38
|
-
def map_primitive_to_typescript(self, avro_type: str) -> str:
|
|
39
|
-
"""Map Avro primitive type to TypeScript type."""
|
|
40
|
-
mapping = {
|
|
41
|
-
'null': 'null',
|
|
42
|
-
'boolean': 'boolean',
|
|
43
|
-
'int': 'number',
|
|
44
|
-
'long': 'number',
|
|
45
|
-
'float': 'number',
|
|
46
|
-
'double': 'number',
|
|
47
|
-
'bytes': 'string',
|
|
48
|
-
'string': 'string',
|
|
49
|
-
}
|
|
50
|
-
return mapping.get(avro_type, avro_type)
|
|
51
|
-
|
|
52
|
-
def convert_logical_type_to_typescript(self, avro_type: Dict) -> str:
|
|
53
|
-
"""Convert Avro logical type to TypeScript type."""
|
|
54
|
-
if 'logicalType' in avro_type:
|
|
55
|
-
if avro_type['logicalType'] in ['decimal', 'uuid']:
|
|
56
|
-
return 'string'
|
|
57
|
-
if avro_type['logicalType'] in ['date', 'time-millis', 'time-micros', 'timestamp-millis', 'timestamp-micros']:
|
|
58
|
-
return 'Date'
|
|
59
|
-
if avro_type['logicalType'] == 'duration':
|
|
60
|
-
return 'string'
|
|
61
|
-
return 'any'
|
|
62
|
-
|
|
63
|
-
def strip_nullable(self, ts_type: str) -> str:
|
|
64
|
-
"""Strip nullable type from TypeScript type."""
|
|
65
|
-
if ts_type.endswith('?'):
|
|
66
|
-
return ts_type[:-1]
|
|
67
|
-
return ts_type
|
|
68
|
-
|
|
69
|
-
def is_typescript_primitive(self, ts_type: str) -> bool:
|
|
70
|
-
"""Check if TypeScript type is a primitive."""
|
|
71
|
-
ts_type = self.strip_nullable(ts_type)
|
|
72
|
-
return ts_type in ['null', 'boolean', 'number', 'string', 'Date', 'any']
|
|
73
|
-
|
|
74
|
-
def is_enum_type(self, ts_type: str, namespace: str) -> bool:
|
|
75
|
-
"""Check if TypeScript type is an enum."""
|
|
76
|
-
ts_type = self.strip_nullable(ts_type)
|
|
77
|
-
fn_type = fullname(ts_type, namespace)
|
|
78
|
-
return not self.is_typescript_primitive(ts_type) and fn_type in self.generated_types and self.generated_types[fn_type] == 'enum'
|
|
79
|
-
|
|
80
|
-
def safe_name(self, name: str) -> str:
|
|
81
|
-
"""Converts a name to a safe TypeScript name."""
|
|
82
|
-
if is_typescript_reserved_word(name):
|
|
83
|
-
return name + "_"
|
|
84
|
-
return name
|
|
85
|
-
|
|
86
|
-
def convert_avro_type_to_typescript(self, avro_type: Union[str, Dict, List], parent_namespace: str, import_types: Set[str], class_name: str = '', field_name: str = '') -> str:
|
|
87
|
-
"""Convert Avro type to TypeScript type with namespace support."""
|
|
88
|
-
if isinstance(avro_type, str):
|
|
89
|
-
mapped_type = self.map_primitive_to_typescript(avro_type)
|
|
90
|
-
if mapped_type == avro_type and not self.is_typescript_primitive(mapped_type):
|
|
91
|
-
full_name = self.concat_namespace(self.base_package,fullname(avro_type, parent_namespace))
|
|
92
|
-
import_types.add(full_name)
|
|
93
|
-
return pascal(avro_type.split('.')[-1])
|
|
94
|
-
return mapped_type
|
|
95
|
-
elif isinstance(avro_type, list):
|
|
96
|
-
if is_generic_avro_type(avro_type):
|
|
97
|
-
return '{ [key: string]: any }'
|
|
98
|
-
if 'null' in avro_type:
|
|
99
|
-
if len(avro_type) == 2:
|
|
100
|
-
return f'{self.convert_avro_type_to_typescript([t for t in avro_type if t != "null"][0], parent_namespace, import_types, class_name, field_name)}?'
|
|
101
|
-
return f'{self.generate_embedded_union(class_name, field_name, avro_type, parent_namespace, import_types)}?'
|
|
102
|
-
return self.generate_embedded_union(class_name, field_name, avro_type, parent_namespace, import_types)
|
|
103
|
-
elif isinstance(avro_type, dict):
|
|
104
|
-
if avro_type['type'] == 'record':
|
|
105
|
-
class_ref = self.generate_class(avro_type, parent_namespace, write_file=True)
|
|
106
|
-
import_types.add(class_ref)
|
|
107
|
-
return pascal(class_ref.split('.')[-1])
|
|
108
|
-
elif avro_type['type'] == 'enum':
|
|
109
|
-
enum_ref = self.generate_enum(avro_type, parent_namespace, write_file=True)
|
|
110
|
-
import_types.add(enum_ref)
|
|
111
|
-
return pascal(enum_ref.split('.')[-1])
|
|
112
|
-
elif avro_type['type'] == 'array':
|
|
113
|
-
return f'{self.convert_avro_type_to_typescript(avro_type["items"], parent_namespace, import_types, class_name, field_name)}[]'
|
|
114
|
-
elif avro_type['type'] == 'map':
|
|
115
|
-
return f'{{ [key: string]: {self.convert_avro_type_to_typescript(avro_type["values"], parent_namespace, import_types, class_name, field_name)} }}'
|
|
116
|
-
elif 'logicalType' in avro_type:
|
|
117
|
-
return self.convert_logical_type_to_typescript(avro_type)
|
|
118
|
-
return self.convert_avro_type_to_typescript(avro_type['type'], parent_namespace, import_types, class_name, field_name)
|
|
119
|
-
return 'any'
|
|
120
|
-
|
|
121
|
-
def get_qualified_name(self, namespace: str, name: str) -> str:
|
|
122
|
-
"""Concatenates namespace and name with a dot separator."""
|
|
123
|
-
return f"{namespace}.{name}" if namespace != '' else name
|
|
124
|
-
|
|
125
|
-
def concat_namespace(self, namespace: str, name: str) -> str:
|
|
126
|
-
"""Concatenates namespace and name with a dot separator."""
|
|
127
|
-
if namespace and name:
|
|
128
|
-
return f"{namespace}.{name}"
|
|
129
|
-
return namespace or name
|
|
130
|
-
|
|
131
|
-
def generate_class_or_enum(self, avro_schema: Dict, parent_namespace: str, write_file: bool = True) -> str:
|
|
132
|
-
"""Generates a Class or Enum."""
|
|
133
|
-
if avro_schema['type'] == 'record':
|
|
134
|
-
return self.generate_class(avro_schema, parent_namespace, write_file)
|
|
135
|
-
elif avro_schema['type'] == 'enum':
|
|
136
|
-
return self.generate_enum(avro_schema, parent_namespace, write_file)
|
|
137
|
-
return ''
|
|
138
|
-
|
|
139
|
-
def generate_class(self, avro_schema: Dict, parent_namespace: str, write_file: bool = True) -> str:
|
|
140
|
-
"""Generate TypeScript class from Avro record using templates with namespace support."""
|
|
141
|
-
import_types: Set[str] = set()
|
|
142
|
-
class_name = pascal(avro_schema['name'])
|
|
143
|
-
namespace = self.concat_namespace(self.base_package, avro_schema.get('namespace', parent_namespace))
|
|
144
|
-
ts_qualified_name = self.get_qualified_name(namespace, class_name)
|
|
145
|
-
if ts_qualified_name in self.generated_types:
|
|
146
|
-
return ts_qualified_name
|
|
147
|
-
|
|
148
|
-
fields = [{
|
|
149
|
-
'definition': self.generate_field(field, avro_schema.get('namespace', parent_namespace), import_types, class_name),
|
|
150
|
-
'docstring': field.get('doc', '')
|
|
151
|
-
} for field in avro_schema.get('fields', [])]
|
|
152
|
-
|
|
153
|
-
fields = [{
|
|
154
|
-
'name': self.safe_name(field['definition']['name']),
|
|
155
|
-
'original_name': field['definition']['name'],
|
|
156
|
-
'type': field['definition']['type'],
|
|
157
|
-
'type_no_null': self.strip_nullable(field['definition']['type']),
|
|
158
|
-
'is_primitive': field['definition']['is_primitive'],
|
|
159
|
-
'is_enum': field['definition']['is_enum'],
|
|
160
|
-
'is_array': field['definition']['is_array'],
|
|
161
|
-
'is_union': field['definition']['is_union'],
|
|
162
|
-
'docstring': field['docstring'],
|
|
163
|
-
} for field in fields]
|
|
164
|
-
|
|
165
|
-
imports_with_paths: Dict[str, str] = {}
|
|
166
|
-
for import_type in import_types:
|
|
167
|
-
if import_type == ts_qualified_name:
|
|
168
|
-
continue
|
|
169
|
-
import_is_enum = import_type in self.generated_types and self.generated_types[import_type] == 'enum'
|
|
170
|
-
import_type_parts = import_type.split('.')
|
|
171
|
-
import_type_name = pascal(import_type_parts[-1])
|
|
172
|
-
import_path = '/'.join(import_type_parts)
|
|
173
|
-
current_path = '/'.join(namespace.split('.'))
|
|
174
|
-
relative_import_path = os.path.relpath(import_path, current_path).replace(os.sep, '/')
|
|
175
|
-
if not relative_import_path.startswith('.'):
|
|
176
|
-
relative_import_path = f'./{relative_import_path}'
|
|
177
|
-
if import_is_enum:
|
|
178
|
-
import_type_name_and_util = f"{import_type_name}, {import_type_name}Utils"
|
|
179
|
-
imports_with_paths[import_type_name_and_util] = relative_import_path + '.js'
|
|
180
|
-
else:
|
|
181
|
-
imports_with_paths[import_type_name] = relative_import_path + '.js'
|
|
182
|
-
|
|
183
|
-
# Inline the schema
|
|
184
|
-
local_avro_schema = inline_avro_references(avro_schema.copy(), self.type_dict, parent_namespace)
|
|
185
|
-
avro_schema_json = json.dumps(local_avro_schema)
|
|
186
|
-
|
|
187
|
-
class_definition = process_template(
|
|
188
|
-
"avrotots/class_core.ts.jinja",
|
|
189
|
-
namespace=namespace,
|
|
190
|
-
class_name=class_name,
|
|
191
|
-
docstring=avro_schema.get('doc', '').strip() if 'doc' in avro_schema else f'A {class_name} record.',
|
|
192
|
-
fields=fields,
|
|
193
|
-
imports=imports_with_paths,
|
|
194
|
-
base_package=self.base_package,
|
|
195
|
-
avro_annotation=self.avro_annotation,
|
|
196
|
-
typed_json_annotation=self.typed_json_annotation,
|
|
197
|
-
avro_schema_json=avro_schema_json,
|
|
198
|
-
get_is_json_match_clause=self.get_is_json_match_clause,
|
|
199
|
-
)
|
|
200
|
-
|
|
201
|
-
if write_file:
|
|
202
|
-
self.write_to_file(namespace, class_name, class_definition)
|
|
203
|
-
self.generated_types[ts_qualified_name] = 'class'
|
|
204
|
-
return ts_qualified_name
|
|
205
|
-
|
|
206
|
-
def generate_enum(self, avro_schema: Dict, parent_namespace: str, write_file: bool = True) -> str:
|
|
207
|
-
"""Generate TypeScript enum from Avro enum using templates with namespace support."""
|
|
208
|
-
enum_name = pascal(avro_schema['name'])
|
|
209
|
-
namespace = self.concat_namespace(self.base_package, avro_schema.get('namespace', parent_namespace))
|
|
210
|
-
ts_qualified_name = self.get_qualified_name(namespace, enum_name)
|
|
211
|
-
if ts_qualified_name in self.generated_types:
|
|
212
|
-
return ts_qualified_name
|
|
213
|
-
|
|
214
|
-
symbols = avro_schema.get('symbols', [])
|
|
215
|
-
enum_definition = process_template(
|
|
216
|
-
"avrotots/enum_core.ts.jinja",
|
|
217
|
-
namespace=namespace,
|
|
218
|
-
enum_name=enum_name,
|
|
219
|
-
docstring=avro_schema.get('doc', '').strip() if 'doc' in avro_schema else f'A {enum_name} enum.',
|
|
220
|
-
symbols=symbols,
|
|
221
|
-
)
|
|
222
|
-
|
|
223
|
-
if write_file:
|
|
224
|
-
self.write_to_file(namespace, enum_name, enum_definition)
|
|
225
|
-
self.generated_types[ts_qualified_name] = 'enum'
|
|
226
|
-
return ts_qualified_name
|
|
227
|
-
|
|
228
|
-
def generate_field(self, field: Dict, parent_namespace: str, import_types: Set[str], class_name: str) -> Dict:
|
|
229
|
-
"""Generates a field for a TypeScript class."""
|
|
230
|
-
import_types_this = set()
|
|
231
|
-
field_type = self.convert_avro_type_to_typescript(
|
|
232
|
-
field['type'], parent_namespace, import_types_this, class_name, field['name'])
|
|
233
|
-
import_types.update(import_types_this)
|
|
234
|
-
field_name = field['name']
|
|
235
|
-
import_name = import_types_this.pop() if len(import_types_this) > 0 else ''
|
|
236
|
-
return {
|
|
237
|
-
'name': field_name,
|
|
238
|
-
'type': field_type,
|
|
239
|
-
'is_primitive': self.is_typescript_primitive(field_type.replace('[]', '')),
|
|
240
|
-
'is_array': field_type.endswith('[]'),
|
|
241
|
-
'is_union': self.generated_types.get(import_name, '') == 'union',
|
|
242
|
-
'is_enum': self.generated_types.get(import_name, '') == 'enum',
|
|
243
|
-
}
|
|
244
|
-
|
|
245
|
-
def get_is_json_match_clause(self, field_name: str, field_type: str, field_is_enum: bool) -> str:
|
|
246
|
-
"""Generates the isJsonMatch clause for a field."""
|
|
247
|
-
field_name_js = field_name.rstrip('_')
|
|
248
|
-
is_optional = field_type.endswith('?')
|
|
249
|
-
field_type = self.strip_nullable(field_type)
|
|
250
|
-
|
|
251
|
-
if '|' in field_type:
|
|
252
|
-
union_types = [t.strip() for t in field_type.split('|')]
|
|
253
|
-
union_clauses = [self.get_is_json_match_clause(field_name, union_type, False) for union_type in union_types]
|
|
254
|
-
clause = f"({' || '.join(union_clauses)})"
|
|
255
|
-
return clause
|
|
256
|
-
|
|
257
|
-
clause = f"(element.hasOwnProperty('{field_name_js}') && "
|
|
258
|
-
|
|
259
|
-
if field_is_enum:
|
|
260
|
-
clause += f"(typeof element['{field_name_js}'] === 'string' || typeof element['{field_name_js}'] === 'number')"
|
|
261
|
-
else:
|
|
262
|
-
if field_type == 'string':
|
|
263
|
-
clause += f"typeof element['{field_name_js}'] === 'string'"
|
|
264
|
-
elif field_type == 'number':
|
|
265
|
-
clause += f"typeof element['{field_name_js}'] === 'number'"
|
|
266
|
-
elif field_type == 'boolean':
|
|
267
|
-
clause += f"typeof element['{field_name_js}'] === 'boolean'"
|
|
268
|
-
elif field_type == 'Date':
|
|
269
|
-
clause += f"typeof element['{field_name_js}'] === 'string' && !isNaN(Date.parse(element['{field_name_js}']))"
|
|
270
|
-
elif field_type.startswith('{ [key: string]:'):
|
|
271
|
-
clause += f"typeof element['{field_name_js}'] === 'object' && !Array.isArray(element['{field_name_js}'])"
|
|
272
|
-
elif field_type.endswith('[]'):
|
|
273
|
-
clause += f"Array.isArray(element['{field_name_js}'])"
|
|
274
|
-
else:
|
|
275
|
-
clause += f"{field_type}.isJsonMatch(element['{field_name_js}'])"
|
|
276
|
-
|
|
277
|
-
if is_optional:
|
|
278
|
-
clause += f") || element['{field_name_js}'] === null"
|
|
279
|
-
else:
|
|
280
|
-
clause += ")"
|
|
281
|
-
|
|
282
|
-
return clause
|
|
283
|
-
|
|
284
|
-
def generate_embedded_union(self, class_name: str, field_name: str, avro_type: List, parent_namespace: str, parent_import_types: Set[str], write_file: bool = True) -> str:
|
|
285
|
-
"""Generate embedded Union class for a field with namespace support."""
|
|
286
|
-
union_class_name = pascal(field_name) + 'Union' if field_name else pascal(class_name) + 'Union'
|
|
287
|
-
namespace = self.concat_namespace(self.base_package, parent_namespace)
|
|
288
|
-
import_types:Set[str] = set()
|
|
289
|
-
union_types = [self.convert_avro_type_to_typescript( t, parent_namespace, import_types) for t in avro_type if t != 'null']
|
|
290
|
-
if not import_types:
|
|
291
|
-
return '|'.join(union_types)
|
|
292
|
-
class_definition = ''
|
|
293
|
-
for import_type in import_types:
|
|
294
|
-
if import_type == union_class_name:
|
|
295
|
-
continue # Avoid importing itself
|
|
296
|
-
import_type_parts = import_type.split('.')
|
|
297
|
-
import_type_name = pascal(import_type_parts[-1])
|
|
298
|
-
import_path = '/'.join(import_type_parts)
|
|
299
|
-
current_path = '/'.join(namespace.split('.'))
|
|
300
|
-
relative_import_path = os.path.relpath(import_path, current_path).replace(os.sep, '/')
|
|
301
|
-
if not relative_import_path.startswith('.'):
|
|
302
|
-
relative_import_path = f'./{relative_import_path}'
|
|
303
|
-
class_definition += f"import {{ {import_type_name} }} from '{relative_import_path}.js';\n"
|
|
304
|
-
|
|
305
|
-
if self.typed_json_annotation:
|
|
306
|
-
class_definition += "import 'reflect-metadata';\n"
|
|
307
|
-
class_definition += "import { CustomDeserializerParams, CustomSerializerParams } from 'typedjson/lib/types/metadata.js';\n"
|
|
308
|
-
|
|
309
|
-
|
|
310
|
-
class_definition += f"\nexport class {union_class_name} {{\n"
|
|
311
|
-
|
|
312
|
-
class_definition += f"{self.INDENT}private value: any;\n\n"
|
|
313
|
-
|
|
314
|
-
# Constructor
|
|
315
|
-
class_definition += f"{self.INDENT}constructor(value: { ' | '.join(union_types) }) {{\n"
|
|
316
|
-
class_definition += f"{self.INDENT*2}this.value = value;\n"
|
|
317
|
-
class_definition += f"{self.INDENT}}}\n\n"
|
|
318
|
-
|
|
319
|
-
# Method to check which type is set
|
|
320
|
-
for union_type in union_types:
|
|
321
|
-
type_check_method = f"{self.INDENT}public is{pascal(union_type)}(): boolean {{\n"
|
|
322
|
-
if union_type.strip() in ['string', 'number', 'boolean']:
|
|
323
|
-
type_check_method += f"{self.INDENT*2}return typeof this.value === '{union_type.strip()}';\n"
|
|
324
|
-
elif union_type.strip() == 'Date':
|
|
325
|
-
type_check_method += f"{self.INDENT*2}return this.value instanceof Date;\n"
|
|
326
|
-
else:
|
|
327
|
-
type_check_method += f"{self.INDENT*2}return this.value instanceof {union_type.strip()};\n"
|
|
328
|
-
type_check_method += f"{self.INDENT}}}\n\n"
|
|
329
|
-
class_definition += type_check_method
|
|
330
|
-
|
|
331
|
-
# Method to return the current value
|
|
332
|
-
class_definition += f"{self.INDENT}public toJSON(): string {{\n"
|
|
333
|
-
class_definition += f"{self.INDENT*2}let rawJson : Uint8Array = this.value.toByteArray('application/json');\n"
|
|
334
|
-
class_definition += f"{self.INDENT*2}return new TextDecoder().decode(rawJson);\n"
|
|
335
|
-
class_definition += f"{self.INDENT}}}\n\n"
|
|
336
|
-
|
|
337
|
-
# Method to check if JSON matches any of the union types
|
|
338
|
-
class_definition += f"{self.INDENT}public static isJsonMatch(element: any): boolean {{\n"
|
|
339
|
-
match_clauses = []
|
|
340
|
-
for union_type in union_types:
|
|
341
|
-
match_clauses.append(f"({self.get_is_json_match_clause('value', union_type, False)})")
|
|
342
|
-
class_definition += f"{self.INDENT*2}return {' || '.join(match_clauses)};\n"
|
|
343
|
-
class_definition += f"{self.INDENT}}}\n\n"
|
|
344
|
-
|
|
345
|
-
# Method to deserialize from JSON
|
|
346
|
-
class_definition += f"{self.INDENT}public static fromData(element: any, contentTypeString: string): {union_class_name} {{\n"
|
|
347
|
-
class_definition += f"{self.INDENT*2}const unionTypes = [{', '.join([t.strip() for t in union_types if not self.is_typescript_primitive(t.strip())])}];\n"
|
|
348
|
-
class_definition += f"{self.INDENT*2}for (const type of unionTypes) {{\n"
|
|
349
|
-
class_definition += f"{self.INDENT*3}if (type.isJsonMatch(element)) {{\n"
|
|
350
|
-
class_definition += f"{self.INDENT*4}return new {union_class_name}(type.fromData(element, contentTypeString));\n"
|
|
351
|
-
class_definition += f"{self.INDENT*3}}}\n"
|
|
352
|
-
class_definition += f"{self.INDENT*2}}}\n"
|
|
353
|
-
class_definition += f"{self.INDENT*2}throw new Error('No matching type for union');\n"
|
|
354
|
-
class_definition += f"{self.INDENT}}}\n"
|
|
355
|
-
|
|
356
|
-
# Method to deserialize from JSON with custom deserializer params
|
|
357
|
-
class_definition += f"{self.INDENT}public static fromJSON(json: any, params: CustomDeserializerParams): {union_class_name} {{\n"
|
|
358
|
-
class_definition += f"{self.INDENT*2}try {{\n"
|
|
359
|
-
class_definition += f"{self.INDENT*3}return {union_class_name}.fromData(json, 'application/json');\n"
|
|
360
|
-
class_definition += f"{self.INDENT*2}}} catch (error) {{\n"
|
|
361
|
-
class_definition += f"{self.INDENT*3}return params.fallback(json, {union_class_name});\n"
|
|
362
|
-
class_definition += f"{self.INDENT*2}}}\n"
|
|
363
|
-
class_definition += f"{self.INDENT}}}\n\n"
|
|
364
|
-
|
|
365
|
-
# Method to serialize to JSON with custom serializer params
|
|
366
|
-
class_definition += f"{self.INDENT}public static toJSON(obj: any, params: CustomSerializerParams): any {{\n"
|
|
367
|
-
class_definition += f"{self.INDENT*2}try {{\n"
|
|
368
|
-
class_definition += f"{self.INDENT*3}const val = new {union_class_name}(obj);\n"
|
|
369
|
-
class_definition += f"{self.INDENT*3}return val.toJSON();\n"
|
|
370
|
-
class_definition += f"{self.INDENT*2}}} catch (error) {{\n"
|
|
371
|
-
class_definition += f"{self.INDENT*3}return params.fallback(this, {union_class_name});\n"
|
|
372
|
-
class_definition += f"{self.INDENT*2}}}\n"
|
|
373
|
-
class_definition += f"{self.INDENT}}}\n\n"
|
|
374
|
-
|
|
375
|
-
class_definition += "}\n"
|
|
376
|
-
|
|
377
|
-
if write_file:
|
|
378
|
-
self.write_to_file(namespace, union_class_name, class_definition)
|
|
379
|
-
|
|
380
|
-
parent_import_types.add(f"{namespace}.{union_class_name}")
|
|
381
|
-
self.generated_types[f"{namespace}.{union_class_name}"] = 'union'
|
|
382
|
-
return f"{union_class_name}"
|
|
383
|
-
|
|
384
|
-
def write_to_file(self, namespace: str, name: str, content: str):
|
|
385
|
-
"""Write TypeScript class to file in the correct namespace directory."""
|
|
386
|
-
directory_path = os.path.join(self.src_dir, *namespace.split('.'))
|
|
387
|
-
if not os.path.exists(directory_path):
|
|
388
|
-
os.makedirs(directory_path, exist_ok=True)
|
|
389
|
-
|
|
390
|
-
file_path = os.path.join(directory_path, f"{name}.ts")
|
|
391
|
-
with open(file_path, 'w', encoding='utf-8') as file:
|
|
392
|
-
file.write(content)
|
|
393
|
-
|
|
394
|
-
def generate_index_file(self):
|
|
395
|
-
"""Generate a root index.ts file that exports all types with aliases scoped to their modules."""
|
|
396
|
-
exports = []
|
|
397
|
-
|
|
398
|
-
for class_name in self.generated_types:
|
|
399
|
-
# Split the class_name into parts
|
|
400
|
-
parts = class_name.split('.')
|
|
401
|
-
file_name = parts[-1] # The actual type name (e.g., 'FareRules')
|
|
402
|
-
module_path = parts[:-1] # The module path excluding the type (e.g., ['gtfs_dash_data', 'GeneralTransitFeedStatic'])
|
|
403
|
-
|
|
404
|
-
# Construct the relative path to the .js file
|
|
405
|
-
# Exclude 'gtfs_dash_data' from the module path for the file path
|
|
406
|
-
file_relative_path = os.path.join(*(module_path[0:] + [f"{file_name}.js"])).replace(os.sep, '/')
|
|
407
|
-
if not file_relative_path.startswith('.'):
|
|
408
|
-
file_relative_path = './' + file_relative_path
|
|
409
|
-
|
|
410
|
-
# Construct the alias name by joining module parts with underscores
|
|
411
|
-
# Exclude 'gtfs_dash_data' for brevity
|
|
412
|
-
alias_parts = [pascal(part) for part in parts]
|
|
413
|
-
alias_name = '_'.join(alias_parts)
|
|
414
|
-
|
|
415
|
-
# Generate the export statement with alias
|
|
416
|
-
exports.append(f"export {{ {file_name} as {alias_name} }} from '{file_relative_path}';\n")
|
|
417
|
-
|
|
418
|
-
# Write the root index.ts file
|
|
419
|
-
index_file_path = os.path.join(self.src_dir, 'index.ts')
|
|
420
|
-
with open(index_file_path, 'w', encoding='utf-8') as f:
|
|
421
|
-
f.writelines(exports)
|
|
422
|
-
|
|
423
|
-
def generate_project_files(self, output_dir: str):
|
|
424
|
-
"""Generate project files using templates."""
|
|
425
|
-
tsconfig_content = process_template(
|
|
426
|
-
"avrotots/tsconfig.json.jinja",
|
|
427
|
-
)
|
|
428
|
-
|
|
429
|
-
package_json_content = process_template(
|
|
430
|
-
"avrotots/package.json.jinja",
|
|
431
|
-
package_name=self.base_package,
|
|
432
|
-
)
|
|
433
|
-
|
|
434
|
-
gitignore_content = process_template(
|
|
435
|
-
"avrotots/gitignore.jinja",
|
|
436
|
-
)
|
|
437
|
-
|
|
438
|
-
tsconfig_path = os.path.join(output_dir, 'tsconfig.json')
|
|
439
|
-
package_json_path = os.path.join(output_dir, 'package.json')
|
|
440
|
-
gitignore_path = os.path.join(output_dir, '.gitignore')
|
|
441
|
-
|
|
442
|
-
with open(tsconfig_path, 'w', encoding='utf-8') as file:
|
|
443
|
-
file.write(tsconfig_content)
|
|
444
|
-
|
|
445
|
-
with open(package_json_path, 'w', encoding='utf-8') as file:
|
|
446
|
-
file.write(package_json_content)
|
|
447
|
-
|
|
448
|
-
with open(gitignore_path, 'w', encoding='utf-8') as file:
|
|
449
|
-
file.write(gitignore_content)
|
|
450
|
-
|
|
451
|
-
# Generate TypeScript type definitions for avro-js when using Avro annotations
|
|
452
|
-
if self.avro_annotation:
|
|
453
|
-
self.generate_avro_js_types(output_dir)
|
|
454
|
-
|
|
455
|
-
def generate_avro_js_types(self, output_dir: str):
|
|
456
|
-
"""Generate TypeScript type declaration file for avro-js module."""
|
|
457
|
-
avro_js_types = '''declare module 'avro-js' {
|
|
458
|
-
/**
|
|
459
|
-
* Avro Type representation.
|
|
460
|
-
* Provides methods for encoding, decoding, and validating Avro data.
|
|
461
|
-
*/
|
|
462
|
-
export class Type {
|
|
463
|
-
/**
|
|
464
|
-
* Create a Type instance from an Avro schema.
|
|
465
|
-
* @param schema - Avro schema object or JSON string
|
|
466
|
-
* @returns Type instance
|
|
467
|
-
*/
|
|
468
|
-
static forSchema(schema: any): Type;
|
|
469
|
-
|
|
470
|
-
/**
|
|
471
|
-
* Encode a value to a Buffer.
|
|
472
|
-
* @param obj - Value to encode
|
|
473
|
-
* @returns Encoded Buffer
|
|
474
|
-
*/
|
|
475
|
-
toBuffer(obj: any): Buffer;
|
|
476
|
-
|
|
477
|
-
/**
|
|
478
|
-
* Decode a value from a Buffer.
|
|
479
|
-
* @param buffer - Buffer to decode
|
|
480
|
-
* @returns Decoded value
|
|
481
|
-
*/
|
|
482
|
-
fromBuffer(buffer: Buffer | Uint8Array): any;
|
|
483
|
-
|
|
484
|
-
/**
|
|
485
|
-
* Get string representation of the type or encode a value to JSON string.
|
|
486
|
-
* @param value - Optional value to encode
|
|
487
|
-
* @returns String representation
|
|
488
|
-
*/
|
|
489
|
-
toString(value?: any): string;
|
|
490
|
-
|
|
491
|
-
/**
|
|
492
|
-
* Clone a value using the type's schema.
|
|
493
|
-
* @param value - Value to clone
|
|
494
|
-
* @param options - Clone options
|
|
495
|
-
* @returns Cloned value
|
|
496
|
-
*/
|
|
497
|
-
clone(value: any, options?: any): any;
|
|
498
|
-
|
|
499
|
-
/**
|
|
500
|
-
* Compare two values according to Avro sort order.
|
|
501
|
-
* @param a - First value
|
|
502
|
-
* @param b - Second value
|
|
503
|
-
* @returns -1, 0, or 1
|
|
504
|
-
*/
|
|
505
|
-
compare(a: any, b: any): number;
|
|
506
|
-
|
|
507
|
-
/**
|
|
508
|
-
* Check if a value is valid for this type.
|
|
509
|
-
* @param value - Value to validate
|
|
510
|
-
* @param options - Validation options
|
|
511
|
-
* @returns true if valid
|
|
512
|
-
*/
|
|
513
|
-
isValid(value: any, options?: any): boolean;
|
|
514
|
-
|
|
515
|
-
/**
|
|
516
|
-
* Decode a value from a buffer.
|
|
517
|
-
* @param buffer - Buffer to decode
|
|
518
|
-
* @param resolver - Optional resolver for schema evolution
|
|
519
|
-
* @param noCheck - Skip validation
|
|
520
|
-
* @returns Decoded value
|
|
521
|
-
*/
|
|
522
|
-
decode(buffer: Buffer, resolver?: any, noCheck?: boolean): any;
|
|
523
|
-
|
|
524
|
-
/**
|
|
525
|
-
* Encode a value to a buffer.
|
|
526
|
-
* @param value - Value to encode
|
|
527
|
-
* @param bufferSize - Optional buffer size
|
|
528
|
-
* @returns Encoded buffer
|
|
529
|
-
*/
|
|
530
|
-
encode(value: any, bufferSize?: number): Buffer;
|
|
531
|
-
|
|
532
|
-
/**
|
|
533
|
-
* Create a resolver for schema evolution.
|
|
534
|
-
* @param writerType - Writer's type
|
|
535
|
-
* @returns Resolver
|
|
536
|
-
*/
|
|
537
|
-
createResolver(writerType: Type): any;
|
|
538
|
-
}
|
|
539
|
-
|
|
540
|
-
/**
|
|
541
|
-
* Parse an Avro schema.
|
|
542
|
-
* @param schema - Schema as string or object
|
|
543
|
-
* @param options - Parse options
|
|
544
|
-
* @returns Type instance
|
|
545
|
-
*/
|
|
546
|
-
export function parse(schema: string | any, options?: any): Type;
|
|
547
|
-
}
|
|
548
|
-
'''
|
|
549
|
-
|
|
550
|
-
# Place type definitions in src directory so TypeScript can find them
|
|
551
|
-
src_dir = os.path.join(output_dir, 'src')
|
|
552
|
-
if not os.path.exists(src_dir):
|
|
553
|
-
os.makedirs(src_dir, exist_ok=True)
|
|
554
|
-
|
|
555
|
-
types_file_path = os.path.join(src_dir, 'avro-js.d.ts')
|
|
556
|
-
with open(types_file_path, 'w', encoding='utf-8') as file:
|
|
557
|
-
file.write(avro_js_types)
|
|
558
|
-
|
|
559
|
-
def convert_schema(self, schema: Union[List[Dict], Dict], output_dir: str, write_file: bool = True):
|
|
560
|
-
"""Convert Avro schema to TypeScript classes with namespace support."""
|
|
561
|
-
self.output_dir = output_dir
|
|
562
|
-
self.src_dir = os.path.join(self.output_dir, "src")
|
|
563
|
-
if isinstance(schema, dict):
|
|
564
|
-
schema = [schema]
|
|
565
|
-
self.main_schema = schema
|
|
566
|
-
self.type_dict = build_flat_type_dict(schema)
|
|
567
|
-
for avro_schema in schema:
|
|
568
|
-
if avro_schema['type'] == 'record':
|
|
569
|
-
self.generate_class(avro_schema, '', write_file)
|
|
570
|
-
elif avro_schema['type'] == 'enum':
|
|
571
|
-
self.generate_enum(avro_schema, '', write_file)
|
|
572
|
-
self.generate_index_file()
|
|
573
|
-
self.generate_project_files(output_dir)
|
|
574
|
-
|
|
575
|
-
def convert(self, avro_schema_path: str, output_dir: str):
|
|
576
|
-
"""Convert Avro schema to TypeScript classes."""
|
|
577
|
-
with open(avro_schema_path, 'r', encoding='utf-8') as file:
|
|
578
|
-
schema = json.load(file)
|
|
579
|
-
self.convert_schema(schema, output_dir)
|
|
580
|
-
self.generate_project_files(output_dir)
|
|
581
|
-
|
|
582
|
-
|
|
583
|
-
def convert_avro_to_typescript(avro_schema_path, js_dir_path, package_name='', typedjson_annotation=False, avro_annotation=False):
|
|
584
|
-
"""Convert Avro schema to TypeScript classes."""
|
|
585
|
-
if not package_name:
|
|
586
|
-
package_name = os.path.splitext(os.path.basename(avro_schema_path))[0].lower().replace('-', '_')
|
|
587
|
-
|
|
588
|
-
converter = AvroToTypeScript(package_name, typed_json_annotation=typedjson_annotation,
|
|
589
|
-
avro_annotation=avro_annotation)
|
|
590
|
-
converter.convert(avro_schema_path, js_dir_path)
|
|
591
|
-
|
|
592
|
-
|
|
593
|
-
def convert_avro_schema_to_typescript(avro_schema, js_dir_path, package_name='', typedjson_annotation=False, avro_annotation=False):
|
|
594
|
-
"""Convert Avro schema to TypeScript classes."""
|
|
595
|
-
converter = AvroToTypeScript(package_name, typed_json_annotation=typedjson_annotation,
|
|
596
|
-
avro_annotation=avro_annotation)
|
|
597
|
-
converter.convert_schema(avro_schema, js_dir_path)
|
|
598
|
-
converter.generate_project_files(js_dir_path)
|
|
1
|
+
# pylint: disable=missing-module-docstring,missing-class-docstring,missing-function-docstring, line-too-long, too-many-locals, too-many-branches, too-many-statements
|
|
2
|
+
|
|
3
|
+
import json
|
|
4
|
+
import os
|
|
5
|
+
from typing import Dict, List, Set, Union
|
|
6
|
+
|
|
7
|
+
from avrotize.common import build_flat_type_dict, fullname, inline_avro_references, is_generic_avro_type, is_type_with_alternate, pascal, process_template, strip_alternate_type
|
|
8
|
+
from numpy import full
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
def is_typescript_reserved_word(word: str) -> bool:
|
|
12
|
+
"""Check if word is a TypeScript reserved word."""
|
|
13
|
+
reserved_words = [
|
|
14
|
+
'break', 'case', 'catch', 'class', 'const', 'continue', 'debugger',
|
|
15
|
+
'default', 'delete', 'do', 'else', 'export', 'extends', 'finally',
|
|
16
|
+
'for', 'function', 'if', 'import', 'in', 'instanceof', 'new', 'return',
|
|
17
|
+
'super', 'switch', 'this', 'throw', 'try', 'typeof', 'var', 'void',
|
|
18
|
+
'while', 'with', 'yield', 'enum', 'string', 'number', 'boolean', 'symbol',
|
|
19
|
+
'type', 'namespace', 'module', 'declare', 'abstract', 'readonly',
|
|
20
|
+
]
|
|
21
|
+
return word in reserved_words
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
class AvroToTypeScript:
|
|
25
|
+
"""Converts Avro schema to TypeScript classes using templates with namespace support."""
|
|
26
|
+
|
|
27
|
+
def __init__(self, base_package: str = '', typed_json_annotation=False, avro_annotation=False) -> None:
|
|
28
|
+
self.base_package = base_package
|
|
29
|
+
self.typed_json_annotation = typed_json_annotation
|
|
30
|
+
self.avro_annotation = avro_annotation
|
|
31
|
+
self.output_dir = os.getcwd()
|
|
32
|
+
self.src_dir = os.path.join(self.output_dir, "src")
|
|
33
|
+
self.generated_types: Dict[str, str] = {}
|
|
34
|
+
self.main_schema = None
|
|
35
|
+
self.type_dict = None
|
|
36
|
+
self.INDENT = ' ' * 4
|
|
37
|
+
|
|
38
|
+
def map_primitive_to_typescript(self, avro_type: str) -> str:
|
|
39
|
+
"""Map Avro primitive type to TypeScript type."""
|
|
40
|
+
mapping = {
|
|
41
|
+
'null': 'null',
|
|
42
|
+
'boolean': 'boolean',
|
|
43
|
+
'int': 'number',
|
|
44
|
+
'long': 'number',
|
|
45
|
+
'float': 'number',
|
|
46
|
+
'double': 'number',
|
|
47
|
+
'bytes': 'string',
|
|
48
|
+
'string': 'string',
|
|
49
|
+
}
|
|
50
|
+
return mapping.get(avro_type, avro_type)
|
|
51
|
+
|
|
52
|
+
def convert_logical_type_to_typescript(self, avro_type: Dict) -> str:
|
|
53
|
+
"""Convert Avro logical type to TypeScript type."""
|
|
54
|
+
if 'logicalType' in avro_type:
|
|
55
|
+
if avro_type['logicalType'] in ['decimal', 'uuid']:
|
|
56
|
+
return 'string'
|
|
57
|
+
if avro_type['logicalType'] in ['date', 'time-millis', 'time-micros', 'timestamp-millis', 'timestamp-micros']:
|
|
58
|
+
return 'Date'
|
|
59
|
+
if avro_type['logicalType'] == 'duration':
|
|
60
|
+
return 'string'
|
|
61
|
+
return 'any'
|
|
62
|
+
|
|
63
|
+
def strip_nullable(self, ts_type: str) -> str:
|
|
64
|
+
"""Strip nullable type from TypeScript type."""
|
|
65
|
+
if ts_type.endswith('?'):
|
|
66
|
+
return ts_type[:-1]
|
|
67
|
+
return ts_type
|
|
68
|
+
|
|
69
|
+
def is_typescript_primitive(self, ts_type: str) -> bool:
|
|
70
|
+
"""Check if TypeScript type is a primitive."""
|
|
71
|
+
ts_type = self.strip_nullable(ts_type)
|
|
72
|
+
return ts_type in ['null', 'boolean', 'number', 'string', 'Date', 'any']
|
|
73
|
+
|
|
74
|
+
def is_enum_type(self, ts_type: str, namespace: str) -> bool:
|
|
75
|
+
"""Check if TypeScript type is an enum."""
|
|
76
|
+
ts_type = self.strip_nullable(ts_type)
|
|
77
|
+
fn_type = fullname(ts_type, namespace)
|
|
78
|
+
return not self.is_typescript_primitive(ts_type) and fn_type in self.generated_types and self.generated_types[fn_type] == 'enum'
|
|
79
|
+
|
|
80
|
+
def safe_name(self, name: str) -> str:
|
|
81
|
+
"""Converts a name to a safe TypeScript name."""
|
|
82
|
+
if is_typescript_reserved_word(name):
|
|
83
|
+
return name + "_"
|
|
84
|
+
return name
|
|
85
|
+
|
|
86
|
+
def convert_avro_type_to_typescript(self, avro_type: Union[str, Dict, List], parent_namespace: str, import_types: Set[str], class_name: str = '', field_name: str = '') -> str:
|
|
87
|
+
"""Convert Avro type to TypeScript type with namespace support."""
|
|
88
|
+
if isinstance(avro_type, str):
|
|
89
|
+
mapped_type = self.map_primitive_to_typescript(avro_type)
|
|
90
|
+
if mapped_type == avro_type and not self.is_typescript_primitive(mapped_type):
|
|
91
|
+
full_name = self.concat_namespace(self.base_package,fullname(avro_type, parent_namespace))
|
|
92
|
+
import_types.add(full_name)
|
|
93
|
+
return pascal(avro_type.split('.')[-1])
|
|
94
|
+
return mapped_type
|
|
95
|
+
elif isinstance(avro_type, list):
|
|
96
|
+
if is_generic_avro_type(avro_type):
|
|
97
|
+
return '{ [key: string]: any }'
|
|
98
|
+
if 'null' in avro_type:
|
|
99
|
+
if len(avro_type) == 2:
|
|
100
|
+
return f'{self.convert_avro_type_to_typescript([t for t in avro_type if t != "null"][0], parent_namespace, import_types, class_name, field_name)}?'
|
|
101
|
+
return f'{self.generate_embedded_union(class_name, field_name, avro_type, parent_namespace, import_types)}?'
|
|
102
|
+
return self.generate_embedded_union(class_name, field_name, avro_type, parent_namespace, import_types)
|
|
103
|
+
elif isinstance(avro_type, dict):
|
|
104
|
+
if avro_type['type'] == 'record':
|
|
105
|
+
class_ref = self.generate_class(avro_type, parent_namespace, write_file=True)
|
|
106
|
+
import_types.add(class_ref)
|
|
107
|
+
return pascal(class_ref.split('.')[-1])
|
|
108
|
+
elif avro_type['type'] == 'enum':
|
|
109
|
+
enum_ref = self.generate_enum(avro_type, parent_namespace, write_file=True)
|
|
110
|
+
import_types.add(enum_ref)
|
|
111
|
+
return pascal(enum_ref.split('.')[-1])
|
|
112
|
+
elif avro_type['type'] == 'array':
|
|
113
|
+
return f'{self.convert_avro_type_to_typescript(avro_type["items"], parent_namespace, import_types, class_name, field_name)}[]'
|
|
114
|
+
elif avro_type['type'] == 'map':
|
|
115
|
+
return f'{{ [key: string]: {self.convert_avro_type_to_typescript(avro_type["values"], parent_namespace, import_types, class_name, field_name)} }}'
|
|
116
|
+
elif 'logicalType' in avro_type:
|
|
117
|
+
return self.convert_logical_type_to_typescript(avro_type)
|
|
118
|
+
return self.convert_avro_type_to_typescript(avro_type['type'], parent_namespace, import_types, class_name, field_name)
|
|
119
|
+
return 'any'
|
|
120
|
+
|
|
121
|
+
def get_qualified_name(self, namespace: str, name: str) -> str:
|
|
122
|
+
"""Concatenates namespace and name with a dot separator."""
|
|
123
|
+
return f"{namespace}.{name}" if namespace != '' else name
|
|
124
|
+
|
|
125
|
+
def concat_namespace(self, namespace: str, name: str) -> str:
|
|
126
|
+
"""Concatenates namespace and name with a dot separator."""
|
|
127
|
+
if namespace and name:
|
|
128
|
+
return f"{namespace}.{name}"
|
|
129
|
+
return namespace or name
|
|
130
|
+
|
|
131
|
+
def generate_class_or_enum(self, avro_schema: Dict, parent_namespace: str, write_file: bool = True) -> str:
|
|
132
|
+
"""Generates a Class or Enum."""
|
|
133
|
+
if avro_schema['type'] == 'record':
|
|
134
|
+
return self.generate_class(avro_schema, parent_namespace, write_file)
|
|
135
|
+
elif avro_schema['type'] == 'enum':
|
|
136
|
+
return self.generate_enum(avro_schema, parent_namespace, write_file)
|
|
137
|
+
return ''
|
|
138
|
+
|
|
139
|
+
def generate_class(self, avro_schema: Dict, parent_namespace: str, write_file: bool = True) -> str:
|
|
140
|
+
"""Generate TypeScript class from Avro record using templates with namespace support."""
|
|
141
|
+
import_types: Set[str] = set()
|
|
142
|
+
class_name = pascal(avro_schema['name'])
|
|
143
|
+
namespace = self.concat_namespace(self.base_package, avro_schema.get('namespace', parent_namespace))
|
|
144
|
+
ts_qualified_name = self.get_qualified_name(namespace, class_name)
|
|
145
|
+
if ts_qualified_name in self.generated_types:
|
|
146
|
+
return ts_qualified_name
|
|
147
|
+
|
|
148
|
+
fields = [{
|
|
149
|
+
'definition': self.generate_field(field, avro_schema.get('namespace', parent_namespace), import_types, class_name),
|
|
150
|
+
'docstring': field.get('doc', '')
|
|
151
|
+
} for field in avro_schema.get('fields', [])]
|
|
152
|
+
|
|
153
|
+
fields = [{
|
|
154
|
+
'name': self.safe_name(field['definition']['name']),
|
|
155
|
+
'original_name': field['definition']['name'],
|
|
156
|
+
'type': field['definition']['type'],
|
|
157
|
+
'type_no_null': self.strip_nullable(field['definition']['type']),
|
|
158
|
+
'is_primitive': field['definition']['is_primitive'],
|
|
159
|
+
'is_enum': field['definition']['is_enum'],
|
|
160
|
+
'is_array': field['definition']['is_array'],
|
|
161
|
+
'is_union': field['definition']['is_union'],
|
|
162
|
+
'docstring': field['docstring'],
|
|
163
|
+
} for field in fields]
|
|
164
|
+
|
|
165
|
+
imports_with_paths: Dict[str, str] = {}
|
|
166
|
+
for import_type in import_types:
|
|
167
|
+
if import_type == ts_qualified_name:
|
|
168
|
+
continue
|
|
169
|
+
import_is_enum = import_type in self.generated_types and self.generated_types[import_type] == 'enum'
|
|
170
|
+
import_type_parts = import_type.split('.')
|
|
171
|
+
import_type_name = pascal(import_type_parts[-1])
|
|
172
|
+
import_path = '/'.join(import_type_parts)
|
|
173
|
+
current_path = '/'.join(namespace.split('.'))
|
|
174
|
+
relative_import_path = os.path.relpath(import_path, current_path).replace(os.sep, '/')
|
|
175
|
+
if not relative_import_path.startswith('.'):
|
|
176
|
+
relative_import_path = f'./{relative_import_path}'
|
|
177
|
+
if import_is_enum:
|
|
178
|
+
import_type_name_and_util = f"{import_type_name}, {import_type_name}Utils"
|
|
179
|
+
imports_with_paths[import_type_name_and_util] = relative_import_path + '.js'
|
|
180
|
+
else:
|
|
181
|
+
imports_with_paths[import_type_name] = relative_import_path + '.js'
|
|
182
|
+
|
|
183
|
+
# Inline the schema
|
|
184
|
+
local_avro_schema = inline_avro_references(avro_schema.copy(), self.type_dict, parent_namespace)
|
|
185
|
+
avro_schema_json = json.dumps(local_avro_schema)
|
|
186
|
+
|
|
187
|
+
class_definition = process_template(
|
|
188
|
+
"avrotots/class_core.ts.jinja",
|
|
189
|
+
namespace=namespace,
|
|
190
|
+
class_name=class_name,
|
|
191
|
+
docstring=avro_schema.get('doc', '').strip() if 'doc' in avro_schema else f'A {class_name} record.',
|
|
192
|
+
fields=fields,
|
|
193
|
+
imports=imports_with_paths,
|
|
194
|
+
base_package=self.base_package,
|
|
195
|
+
avro_annotation=self.avro_annotation,
|
|
196
|
+
typed_json_annotation=self.typed_json_annotation,
|
|
197
|
+
avro_schema_json=avro_schema_json,
|
|
198
|
+
get_is_json_match_clause=self.get_is_json_match_clause,
|
|
199
|
+
)
|
|
200
|
+
|
|
201
|
+
if write_file:
|
|
202
|
+
self.write_to_file(namespace, class_name, class_definition)
|
|
203
|
+
self.generated_types[ts_qualified_name] = 'class'
|
|
204
|
+
return ts_qualified_name
|
|
205
|
+
|
|
206
|
+
def generate_enum(self, avro_schema: Dict, parent_namespace: str, write_file: bool = True) -> str:
|
|
207
|
+
"""Generate TypeScript enum from Avro enum using templates with namespace support."""
|
|
208
|
+
enum_name = pascal(avro_schema['name'])
|
|
209
|
+
namespace = self.concat_namespace(self.base_package, avro_schema.get('namespace', parent_namespace))
|
|
210
|
+
ts_qualified_name = self.get_qualified_name(namespace, enum_name)
|
|
211
|
+
if ts_qualified_name in self.generated_types:
|
|
212
|
+
return ts_qualified_name
|
|
213
|
+
|
|
214
|
+
symbols = avro_schema.get('symbols', [])
|
|
215
|
+
enum_definition = process_template(
|
|
216
|
+
"avrotots/enum_core.ts.jinja",
|
|
217
|
+
namespace=namespace,
|
|
218
|
+
enum_name=enum_name,
|
|
219
|
+
docstring=avro_schema.get('doc', '').strip() if 'doc' in avro_schema else f'A {enum_name} enum.',
|
|
220
|
+
symbols=symbols,
|
|
221
|
+
)
|
|
222
|
+
|
|
223
|
+
if write_file:
|
|
224
|
+
self.write_to_file(namespace, enum_name, enum_definition)
|
|
225
|
+
self.generated_types[ts_qualified_name] = 'enum'
|
|
226
|
+
return ts_qualified_name
|
|
227
|
+
|
|
228
|
+
def generate_field(self, field: Dict, parent_namespace: str, import_types: Set[str], class_name: str) -> Dict:
|
|
229
|
+
"""Generates a field for a TypeScript class."""
|
|
230
|
+
import_types_this = set()
|
|
231
|
+
field_type = self.convert_avro_type_to_typescript(
|
|
232
|
+
field['type'], parent_namespace, import_types_this, class_name, field['name'])
|
|
233
|
+
import_types.update(import_types_this)
|
|
234
|
+
field_name = field['name']
|
|
235
|
+
import_name = import_types_this.pop() if len(import_types_this) > 0 else ''
|
|
236
|
+
return {
|
|
237
|
+
'name': field_name,
|
|
238
|
+
'type': field_type,
|
|
239
|
+
'is_primitive': self.is_typescript_primitive(field_type.replace('[]', '')),
|
|
240
|
+
'is_array': field_type.endswith('[]'),
|
|
241
|
+
'is_union': self.generated_types.get(import_name, '') == 'union',
|
|
242
|
+
'is_enum': self.generated_types.get(import_name, '') == 'enum',
|
|
243
|
+
}
|
|
244
|
+
|
|
245
|
+
def get_is_json_match_clause(self, field_name: str, field_type: str, field_is_enum: bool) -> str:
|
|
246
|
+
"""Generates the isJsonMatch clause for a field."""
|
|
247
|
+
field_name_js = field_name.rstrip('_')
|
|
248
|
+
is_optional = field_type.endswith('?')
|
|
249
|
+
field_type = self.strip_nullable(field_type)
|
|
250
|
+
|
|
251
|
+
if '|' in field_type:
|
|
252
|
+
union_types = [t.strip() for t in field_type.split('|')]
|
|
253
|
+
union_clauses = [self.get_is_json_match_clause(field_name, union_type, False) for union_type in union_types]
|
|
254
|
+
clause = f"({' || '.join(union_clauses)})"
|
|
255
|
+
return clause
|
|
256
|
+
|
|
257
|
+
clause = f"(element.hasOwnProperty('{field_name_js}') && "
|
|
258
|
+
|
|
259
|
+
if field_is_enum:
|
|
260
|
+
clause += f"(typeof element['{field_name_js}'] === 'string' || typeof element['{field_name_js}'] === 'number')"
|
|
261
|
+
else:
|
|
262
|
+
if field_type == 'string':
|
|
263
|
+
clause += f"typeof element['{field_name_js}'] === 'string'"
|
|
264
|
+
elif field_type == 'number':
|
|
265
|
+
clause += f"typeof element['{field_name_js}'] === 'number'"
|
|
266
|
+
elif field_type == 'boolean':
|
|
267
|
+
clause += f"typeof element['{field_name_js}'] === 'boolean'"
|
|
268
|
+
elif field_type == 'Date':
|
|
269
|
+
clause += f"typeof element['{field_name_js}'] === 'string' && !isNaN(Date.parse(element['{field_name_js}']))"
|
|
270
|
+
elif field_type.startswith('{ [key: string]:'):
|
|
271
|
+
clause += f"typeof element['{field_name_js}'] === 'object' && !Array.isArray(element['{field_name_js}'])"
|
|
272
|
+
elif field_type.endswith('[]'):
|
|
273
|
+
clause += f"Array.isArray(element['{field_name_js}'])"
|
|
274
|
+
else:
|
|
275
|
+
clause += f"{field_type}.isJsonMatch(element['{field_name_js}'])"
|
|
276
|
+
|
|
277
|
+
if is_optional:
|
|
278
|
+
clause += f") || element['{field_name_js}'] === null"
|
|
279
|
+
else:
|
|
280
|
+
clause += ")"
|
|
281
|
+
|
|
282
|
+
return clause
|
|
283
|
+
|
|
284
|
+
def generate_embedded_union(self, class_name: str, field_name: str, avro_type: List, parent_namespace: str, parent_import_types: Set[str], write_file: bool = True) -> str:
|
|
285
|
+
"""Generate embedded Union class for a field with namespace support."""
|
|
286
|
+
union_class_name = pascal(field_name) + 'Union' if field_name else pascal(class_name) + 'Union'
|
|
287
|
+
namespace = self.concat_namespace(self.base_package, parent_namespace)
|
|
288
|
+
import_types:Set[str] = set()
|
|
289
|
+
union_types = [self.convert_avro_type_to_typescript( t, parent_namespace, import_types) for t in avro_type if t != 'null']
|
|
290
|
+
if not import_types:
|
|
291
|
+
return '|'.join(union_types)
|
|
292
|
+
class_definition = ''
|
|
293
|
+
for import_type in import_types:
|
|
294
|
+
if import_type == union_class_name:
|
|
295
|
+
continue # Avoid importing itself
|
|
296
|
+
import_type_parts = import_type.split('.')
|
|
297
|
+
import_type_name = pascal(import_type_parts[-1])
|
|
298
|
+
import_path = '/'.join(import_type_parts)
|
|
299
|
+
current_path = '/'.join(namespace.split('.'))
|
|
300
|
+
relative_import_path = os.path.relpath(import_path, current_path).replace(os.sep, '/')
|
|
301
|
+
if not relative_import_path.startswith('.'):
|
|
302
|
+
relative_import_path = f'./{relative_import_path}'
|
|
303
|
+
class_definition += f"import {{ {import_type_name} }} from '{relative_import_path}.js';\n"
|
|
304
|
+
|
|
305
|
+
if self.typed_json_annotation:
|
|
306
|
+
class_definition += "import 'reflect-metadata';\n"
|
|
307
|
+
class_definition += "import { CustomDeserializerParams, CustomSerializerParams } from 'typedjson/lib/types/metadata.js';\n"
|
|
308
|
+
|
|
309
|
+
|
|
310
|
+
class_definition += f"\nexport class {union_class_name} {{\n"
|
|
311
|
+
|
|
312
|
+
class_definition += f"{self.INDENT}private value: any;\n\n"
|
|
313
|
+
|
|
314
|
+
# Constructor
|
|
315
|
+
class_definition += f"{self.INDENT}constructor(value: { ' | '.join(union_types) }) {{\n"
|
|
316
|
+
class_definition += f"{self.INDENT*2}this.value = value;\n"
|
|
317
|
+
class_definition += f"{self.INDENT}}}\n\n"
|
|
318
|
+
|
|
319
|
+
# Method to check which type is set
|
|
320
|
+
for union_type in union_types:
|
|
321
|
+
type_check_method = f"{self.INDENT}public is{pascal(union_type)}(): boolean {{\n"
|
|
322
|
+
if union_type.strip() in ['string', 'number', 'boolean']:
|
|
323
|
+
type_check_method += f"{self.INDENT*2}return typeof this.value === '{union_type.strip()}';\n"
|
|
324
|
+
elif union_type.strip() == 'Date':
|
|
325
|
+
type_check_method += f"{self.INDENT*2}return this.value instanceof Date;\n"
|
|
326
|
+
else:
|
|
327
|
+
type_check_method += f"{self.INDENT*2}return this.value instanceof {union_type.strip()};\n"
|
|
328
|
+
type_check_method += f"{self.INDENT}}}\n\n"
|
|
329
|
+
class_definition += type_check_method
|
|
330
|
+
|
|
331
|
+
# Method to return the current value
|
|
332
|
+
class_definition += f"{self.INDENT}public toJSON(): string {{\n"
|
|
333
|
+
class_definition += f"{self.INDENT*2}let rawJson : Uint8Array = this.value.toByteArray('application/json');\n"
|
|
334
|
+
class_definition += f"{self.INDENT*2}return new TextDecoder().decode(rawJson);\n"
|
|
335
|
+
class_definition += f"{self.INDENT}}}\n\n"
|
|
336
|
+
|
|
337
|
+
# Method to check if JSON matches any of the union types
|
|
338
|
+
class_definition += f"{self.INDENT}public static isJsonMatch(element: any): boolean {{\n"
|
|
339
|
+
match_clauses = []
|
|
340
|
+
for union_type in union_types:
|
|
341
|
+
match_clauses.append(f"({self.get_is_json_match_clause('value', union_type, False)})")
|
|
342
|
+
class_definition += f"{self.INDENT*2}return {' || '.join(match_clauses)};\n"
|
|
343
|
+
class_definition += f"{self.INDENT}}}\n\n"
|
|
344
|
+
|
|
345
|
+
# Method to deserialize from JSON
|
|
346
|
+
class_definition += f"{self.INDENT}public static fromData(element: any, contentTypeString: string): {union_class_name} {{\n"
|
|
347
|
+
class_definition += f"{self.INDENT*2}const unionTypes = [{', '.join([t.strip() for t in union_types if not self.is_typescript_primitive(t.strip())])}];\n"
|
|
348
|
+
class_definition += f"{self.INDENT*2}for (const type of unionTypes) {{\n"
|
|
349
|
+
class_definition += f"{self.INDENT*3}if (type.isJsonMatch(element)) {{\n"
|
|
350
|
+
class_definition += f"{self.INDENT*4}return new {union_class_name}(type.fromData(element, contentTypeString));\n"
|
|
351
|
+
class_definition += f"{self.INDENT*3}}}\n"
|
|
352
|
+
class_definition += f"{self.INDENT*2}}}\n"
|
|
353
|
+
class_definition += f"{self.INDENT*2}throw new Error('No matching type for union');\n"
|
|
354
|
+
class_definition += f"{self.INDENT}}}\n"
|
|
355
|
+
|
|
356
|
+
# Method to deserialize from JSON with custom deserializer params
|
|
357
|
+
class_definition += f"{self.INDENT}public static fromJSON(json: any, params: CustomDeserializerParams): {union_class_name} {{\n"
|
|
358
|
+
class_definition += f"{self.INDENT*2}try {{\n"
|
|
359
|
+
class_definition += f"{self.INDENT*3}return {union_class_name}.fromData(json, 'application/json');\n"
|
|
360
|
+
class_definition += f"{self.INDENT*2}}} catch (error) {{\n"
|
|
361
|
+
class_definition += f"{self.INDENT*3}return params.fallback(json, {union_class_name});\n"
|
|
362
|
+
class_definition += f"{self.INDENT*2}}}\n"
|
|
363
|
+
class_definition += f"{self.INDENT}}}\n\n"
|
|
364
|
+
|
|
365
|
+
# Method to serialize to JSON with custom serializer params
|
|
366
|
+
class_definition += f"{self.INDENT}public static toJSON(obj: any, params: CustomSerializerParams): any {{\n"
|
|
367
|
+
class_definition += f"{self.INDENT*2}try {{\n"
|
|
368
|
+
class_definition += f"{self.INDENT*3}const val = new {union_class_name}(obj);\n"
|
|
369
|
+
class_definition += f"{self.INDENT*3}return val.toJSON();\n"
|
|
370
|
+
class_definition += f"{self.INDENT*2}}} catch (error) {{\n"
|
|
371
|
+
class_definition += f"{self.INDENT*3}return params.fallback(this, {union_class_name});\n"
|
|
372
|
+
class_definition += f"{self.INDENT*2}}}\n"
|
|
373
|
+
class_definition += f"{self.INDENT}}}\n\n"
|
|
374
|
+
|
|
375
|
+
class_definition += "}\n"
|
|
376
|
+
|
|
377
|
+
if write_file:
|
|
378
|
+
self.write_to_file(namespace, union_class_name, class_definition)
|
|
379
|
+
|
|
380
|
+
parent_import_types.add(f"{namespace}.{union_class_name}")
|
|
381
|
+
self.generated_types[f"{namespace}.{union_class_name}"] = 'union'
|
|
382
|
+
return f"{union_class_name}"
|
|
383
|
+
|
|
384
|
+
def write_to_file(self, namespace: str, name: str, content: str):
|
|
385
|
+
"""Write TypeScript class to file in the correct namespace directory."""
|
|
386
|
+
directory_path = os.path.join(self.src_dir, *namespace.split('.'))
|
|
387
|
+
if not os.path.exists(directory_path):
|
|
388
|
+
os.makedirs(directory_path, exist_ok=True)
|
|
389
|
+
|
|
390
|
+
file_path = os.path.join(directory_path, f"{name}.ts")
|
|
391
|
+
with open(file_path, 'w', encoding='utf-8') as file:
|
|
392
|
+
file.write(content)
|
|
393
|
+
|
|
394
|
+
def generate_index_file(self):
|
|
395
|
+
"""Generate a root index.ts file that exports all types with aliases scoped to their modules."""
|
|
396
|
+
exports = []
|
|
397
|
+
|
|
398
|
+
for class_name in self.generated_types:
|
|
399
|
+
# Split the class_name into parts
|
|
400
|
+
parts = class_name.split('.')
|
|
401
|
+
file_name = parts[-1] # The actual type name (e.g., 'FareRules')
|
|
402
|
+
module_path = parts[:-1] # The module path excluding the type (e.g., ['gtfs_dash_data', 'GeneralTransitFeedStatic'])
|
|
403
|
+
|
|
404
|
+
# Construct the relative path to the .js file
|
|
405
|
+
# Exclude 'gtfs_dash_data' from the module path for the file path
|
|
406
|
+
file_relative_path = os.path.join(*(module_path[0:] + [f"{file_name}.js"])).replace(os.sep, '/')
|
|
407
|
+
if not file_relative_path.startswith('.'):
|
|
408
|
+
file_relative_path = './' + file_relative_path
|
|
409
|
+
|
|
410
|
+
# Construct the alias name by joining module parts with underscores
|
|
411
|
+
# Exclude 'gtfs_dash_data' for brevity
|
|
412
|
+
alias_parts = [pascal(part) for part in parts]
|
|
413
|
+
alias_name = '_'.join(alias_parts)
|
|
414
|
+
|
|
415
|
+
# Generate the export statement with alias
|
|
416
|
+
exports.append(f"export {{ {file_name} as {alias_name} }} from '{file_relative_path}';\n")
|
|
417
|
+
|
|
418
|
+
# Write the root index.ts file
|
|
419
|
+
index_file_path = os.path.join(self.src_dir, 'index.ts')
|
|
420
|
+
with open(index_file_path, 'w', encoding='utf-8') as f:
|
|
421
|
+
f.writelines(exports)
|
|
422
|
+
|
|
423
|
+
def generate_project_files(self, output_dir: str):
|
|
424
|
+
"""Generate project files using templates."""
|
|
425
|
+
tsconfig_content = process_template(
|
|
426
|
+
"avrotots/tsconfig.json.jinja",
|
|
427
|
+
)
|
|
428
|
+
|
|
429
|
+
package_json_content = process_template(
|
|
430
|
+
"avrotots/package.json.jinja",
|
|
431
|
+
package_name=self.base_package,
|
|
432
|
+
)
|
|
433
|
+
|
|
434
|
+
gitignore_content = process_template(
|
|
435
|
+
"avrotots/gitignore.jinja",
|
|
436
|
+
)
|
|
437
|
+
|
|
438
|
+
tsconfig_path = os.path.join(output_dir, 'tsconfig.json')
|
|
439
|
+
package_json_path = os.path.join(output_dir, 'package.json')
|
|
440
|
+
gitignore_path = os.path.join(output_dir, '.gitignore')
|
|
441
|
+
|
|
442
|
+
with open(tsconfig_path, 'w', encoding='utf-8') as file:
|
|
443
|
+
file.write(tsconfig_content)
|
|
444
|
+
|
|
445
|
+
with open(package_json_path, 'w', encoding='utf-8') as file:
|
|
446
|
+
file.write(package_json_content)
|
|
447
|
+
|
|
448
|
+
with open(gitignore_path, 'w', encoding='utf-8') as file:
|
|
449
|
+
file.write(gitignore_content)
|
|
450
|
+
|
|
451
|
+
# Generate TypeScript type definitions for avro-js when using Avro annotations
|
|
452
|
+
if self.avro_annotation:
|
|
453
|
+
self.generate_avro_js_types(output_dir)
|
|
454
|
+
|
|
455
|
+
def generate_avro_js_types(self, output_dir: str):
|
|
456
|
+
"""Generate TypeScript type declaration file for avro-js module."""
|
|
457
|
+
avro_js_types = '''declare module 'avro-js' {
|
|
458
|
+
/**
|
|
459
|
+
* Avro Type representation.
|
|
460
|
+
* Provides methods for encoding, decoding, and validating Avro data.
|
|
461
|
+
*/
|
|
462
|
+
export class Type {
|
|
463
|
+
/**
|
|
464
|
+
* Create a Type instance from an Avro schema.
|
|
465
|
+
* @param schema - Avro schema object or JSON string
|
|
466
|
+
* @returns Type instance
|
|
467
|
+
*/
|
|
468
|
+
static forSchema(schema: any): Type;
|
|
469
|
+
|
|
470
|
+
/**
|
|
471
|
+
* Encode a value to a Buffer.
|
|
472
|
+
* @param obj - Value to encode
|
|
473
|
+
* @returns Encoded Buffer
|
|
474
|
+
*/
|
|
475
|
+
toBuffer(obj: any): Buffer;
|
|
476
|
+
|
|
477
|
+
/**
|
|
478
|
+
* Decode a value from a Buffer.
|
|
479
|
+
* @param buffer - Buffer to decode
|
|
480
|
+
* @returns Decoded value
|
|
481
|
+
*/
|
|
482
|
+
fromBuffer(buffer: Buffer | Uint8Array): any;
|
|
483
|
+
|
|
484
|
+
/**
|
|
485
|
+
* Get string representation of the type or encode a value to JSON string.
|
|
486
|
+
* @param value - Optional value to encode
|
|
487
|
+
* @returns String representation
|
|
488
|
+
*/
|
|
489
|
+
toString(value?: any): string;
|
|
490
|
+
|
|
491
|
+
/**
|
|
492
|
+
* Clone a value using the type's schema.
|
|
493
|
+
* @param value - Value to clone
|
|
494
|
+
* @param options - Clone options
|
|
495
|
+
* @returns Cloned value
|
|
496
|
+
*/
|
|
497
|
+
clone(value: any, options?: any): any;
|
|
498
|
+
|
|
499
|
+
/**
|
|
500
|
+
* Compare two values according to Avro sort order.
|
|
501
|
+
* @param a - First value
|
|
502
|
+
* @param b - Second value
|
|
503
|
+
* @returns -1, 0, or 1
|
|
504
|
+
*/
|
|
505
|
+
compare(a: any, b: any): number;
|
|
506
|
+
|
|
507
|
+
/**
|
|
508
|
+
* Check if a value is valid for this type.
|
|
509
|
+
* @param value - Value to validate
|
|
510
|
+
* @param options - Validation options
|
|
511
|
+
* @returns true if valid
|
|
512
|
+
*/
|
|
513
|
+
isValid(value: any, options?: any): boolean;
|
|
514
|
+
|
|
515
|
+
/**
|
|
516
|
+
* Decode a value from a buffer.
|
|
517
|
+
* @param buffer - Buffer to decode
|
|
518
|
+
* @param resolver - Optional resolver for schema evolution
|
|
519
|
+
* @param noCheck - Skip validation
|
|
520
|
+
* @returns Decoded value
|
|
521
|
+
*/
|
|
522
|
+
decode(buffer: Buffer, resolver?: any, noCheck?: boolean): any;
|
|
523
|
+
|
|
524
|
+
/**
|
|
525
|
+
* Encode a value to a buffer.
|
|
526
|
+
* @param value - Value to encode
|
|
527
|
+
* @param bufferSize - Optional buffer size
|
|
528
|
+
* @returns Encoded buffer
|
|
529
|
+
*/
|
|
530
|
+
encode(value: any, bufferSize?: number): Buffer;
|
|
531
|
+
|
|
532
|
+
/**
|
|
533
|
+
* Create a resolver for schema evolution.
|
|
534
|
+
* @param writerType - Writer's type
|
|
535
|
+
* @returns Resolver
|
|
536
|
+
*/
|
|
537
|
+
createResolver(writerType: Type): any;
|
|
538
|
+
}
|
|
539
|
+
|
|
540
|
+
/**
|
|
541
|
+
* Parse an Avro schema.
|
|
542
|
+
* @param schema - Schema as string or object
|
|
543
|
+
* @param options - Parse options
|
|
544
|
+
* @returns Type instance
|
|
545
|
+
*/
|
|
546
|
+
export function parse(schema: string | any, options?: any): Type;
|
|
547
|
+
}
|
|
548
|
+
'''
|
|
549
|
+
|
|
550
|
+
# Place type definitions in src directory so TypeScript can find them
|
|
551
|
+
src_dir = os.path.join(output_dir, 'src')
|
|
552
|
+
if not os.path.exists(src_dir):
|
|
553
|
+
os.makedirs(src_dir, exist_ok=True)
|
|
554
|
+
|
|
555
|
+
types_file_path = os.path.join(src_dir, 'avro-js.d.ts')
|
|
556
|
+
with open(types_file_path, 'w', encoding='utf-8') as file:
|
|
557
|
+
file.write(avro_js_types)
|
|
558
|
+
|
|
559
|
+
def convert_schema(self, schema: Union[List[Dict], Dict], output_dir: str, write_file: bool = True):
|
|
560
|
+
"""Convert Avro schema to TypeScript classes with namespace support."""
|
|
561
|
+
self.output_dir = output_dir
|
|
562
|
+
self.src_dir = os.path.join(self.output_dir, "src")
|
|
563
|
+
if isinstance(schema, dict):
|
|
564
|
+
schema = [schema]
|
|
565
|
+
self.main_schema = schema
|
|
566
|
+
self.type_dict = build_flat_type_dict(schema)
|
|
567
|
+
for avro_schema in schema:
|
|
568
|
+
if avro_schema['type'] == 'record':
|
|
569
|
+
self.generate_class(avro_schema, '', write_file)
|
|
570
|
+
elif avro_schema['type'] == 'enum':
|
|
571
|
+
self.generate_enum(avro_schema, '', write_file)
|
|
572
|
+
self.generate_index_file()
|
|
573
|
+
self.generate_project_files(output_dir)
|
|
574
|
+
|
|
575
|
+
def convert(self, avro_schema_path: str, output_dir: str):
|
|
576
|
+
"""Convert Avro schema to TypeScript classes."""
|
|
577
|
+
with open(avro_schema_path, 'r', encoding='utf-8') as file:
|
|
578
|
+
schema = json.load(file)
|
|
579
|
+
self.convert_schema(schema, output_dir)
|
|
580
|
+
self.generate_project_files(output_dir)
|
|
581
|
+
|
|
582
|
+
|
|
583
|
+
def convert_avro_to_typescript(avro_schema_path, js_dir_path, package_name='', typedjson_annotation=False, avro_annotation=False):
|
|
584
|
+
"""Convert Avro schema to TypeScript classes."""
|
|
585
|
+
if not package_name:
|
|
586
|
+
package_name = os.path.splitext(os.path.basename(avro_schema_path))[0].lower().replace('-', '_')
|
|
587
|
+
|
|
588
|
+
converter = AvroToTypeScript(package_name, typed_json_annotation=typedjson_annotation,
|
|
589
|
+
avro_annotation=avro_annotation)
|
|
590
|
+
converter.convert(avro_schema_path, js_dir_path)
|
|
591
|
+
|
|
592
|
+
|
|
593
|
+
def convert_avro_schema_to_typescript(avro_schema, js_dir_path, package_name='', typedjson_annotation=False, avro_annotation=False):
|
|
594
|
+
"""Convert Avro schema to TypeScript classes."""
|
|
595
|
+
converter = AvroToTypeScript(package_name, typed_json_annotation=typedjson_annotation,
|
|
596
|
+
avro_annotation=avro_annotation)
|
|
597
|
+
converter.convert_schema(avro_schema, js_dir_path)
|
|
598
|
+
converter.generate_project_files(js_dir_path)
|