avrotize 2.21.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- avrotize/__init__.py +66 -0
- avrotize/__main__.py +6 -0
- avrotize/_version.py +34 -0
- avrotize/asn1toavro.py +160 -0
- avrotize/avrotize.py +152 -0
- avrotize/avrotocpp/CMakeLists.txt.jinja +77 -0
- avrotize/avrotocpp/build.bat.jinja +7 -0
- avrotize/avrotocpp/build.sh.jinja +7 -0
- avrotize/avrotocpp/dataclass_body.jinja +108 -0
- avrotize/avrotocpp/vcpkg.json.jinja +21 -0
- avrotize/avrotocpp.py +483 -0
- avrotize/avrotocsharp/README.md.jinja +166 -0
- avrotize/avrotocsharp/class_test.cs.jinja +266 -0
- avrotize/avrotocsharp/dataclass_core.jinja +293 -0
- avrotize/avrotocsharp/enum_test.cs.jinja +20 -0
- avrotize/avrotocsharp/project.csproj.jinja +30 -0
- avrotize/avrotocsharp/project.sln.jinja +34 -0
- avrotize/avrotocsharp/run_coverage.ps1.jinja +98 -0
- avrotize/avrotocsharp/run_coverage.sh.jinja +149 -0
- avrotize/avrotocsharp/testproject.csproj.jinja +19 -0
- avrotize/avrotocsharp.py +1180 -0
- avrotize/avrotocsv.py +121 -0
- avrotize/avrotodatapackage.py +173 -0
- avrotize/avrotodb.py +1383 -0
- avrotize/avrotogo/go_enum.jinja +12 -0
- avrotize/avrotogo/go_helpers.jinja +31 -0
- avrotize/avrotogo/go_struct.jinja +151 -0
- avrotize/avrotogo/go_test.jinja +47 -0
- avrotize/avrotogo/go_union.jinja +38 -0
- avrotize/avrotogo.py +476 -0
- avrotize/avrotographql.py +197 -0
- avrotize/avrotoiceberg.py +210 -0
- avrotize/avrotojava/class_test.java.jinja +212 -0
- avrotize/avrotojava/enum_test.java.jinja +21 -0
- avrotize/avrotojava/testproject.pom.jinja +54 -0
- avrotize/avrotojava.py +2156 -0
- avrotize/avrotojs.py +250 -0
- avrotize/avrotojsons.py +481 -0
- avrotize/avrotojstruct.py +345 -0
- avrotize/avrotokusto.py +364 -0
- avrotize/avrotomd/README.md.jinja +49 -0
- avrotize/avrotomd.py +137 -0
- avrotize/avrotools.py +168 -0
- avrotize/avrotoparquet.py +208 -0
- avrotize/avrotoproto.py +359 -0
- avrotize/avrotopython/dataclass_core.jinja +241 -0
- avrotize/avrotopython/enum_core.jinja +87 -0
- avrotize/avrotopython/pyproject_toml.jinja +18 -0
- avrotize/avrotopython/test_class.jinja +97 -0
- avrotize/avrotopython/test_enum.jinja +23 -0
- avrotize/avrotopython.py +626 -0
- avrotize/avrotorust/dataclass_enum.rs.jinja +74 -0
- avrotize/avrotorust/dataclass_struct.rs.jinja +204 -0
- avrotize/avrotorust/dataclass_union.rs.jinja +105 -0
- avrotize/avrotorust.py +435 -0
- avrotize/avrotots/class_core.ts.jinja +140 -0
- avrotize/avrotots/class_test.ts.jinja +77 -0
- avrotize/avrotots/enum_core.ts.jinja +46 -0
- avrotize/avrotots/gitignore.jinja +34 -0
- avrotize/avrotots/index.ts.jinja +0 -0
- avrotize/avrotots/package.json.jinja +23 -0
- avrotize/avrotots/tsconfig.json.jinja +21 -0
- avrotize/avrotots.py +687 -0
- avrotize/avrotoxsd.py +344 -0
- avrotize/cddltostructure.py +1841 -0
- avrotize/commands.json +3496 -0
- avrotize/common.py +834 -0
- avrotize/constants.py +87 -0
- avrotize/csvtoavro.py +132 -0
- avrotize/datapackagetoavro.py +76 -0
- avrotize/dependencies/cpp/vcpkg/vcpkg.json +19 -0
- avrotize/dependencies/cs/net90/dependencies.csproj +29 -0
- avrotize/dependencies/go/go121/go.mod +6 -0
- avrotize/dependencies/java/jdk21/pom.xml +91 -0
- avrotize/dependencies/python/py312/requirements.txt +13 -0
- avrotize/dependencies/rust/stable/Cargo.toml +17 -0
- avrotize/dependencies/typescript/node22/package.json +16 -0
- avrotize/dependency_resolver.py +348 -0
- avrotize/dependency_version.py +432 -0
- avrotize/generic/generic.avsc +57 -0
- avrotize/jsonstoavro.py +2167 -0
- avrotize/jsonstostructure.py +2864 -0
- avrotize/jstructtoavro.py +878 -0
- avrotize/kstructtoavro.py +93 -0
- avrotize/kustotoavro.py +455 -0
- avrotize/openapitostructure.py +717 -0
- avrotize/parquettoavro.py +157 -0
- avrotize/proto2parser.py +498 -0
- avrotize/proto3parser.py +403 -0
- avrotize/prototoavro.py +382 -0
- avrotize/prototypes/any.avsc +19 -0
- avrotize/prototypes/api.avsc +106 -0
- avrotize/prototypes/duration.avsc +20 -0
- avrotize/prototypes/field_mask.avsc +18 -0
- avrotize/prototypes/struct.avsc +60 -0
- avrotize/prototypes/timestamp.avsc +20 -0
- avrotize/prototypes/type.avsc +253 -0
- avrotize/prototypes/wrappers.avsc +117 -0
- avrotize/structuretocddl.py +597 -0
- avrotize/structuretocpp/CMakeLists.txt.jinja +76 -0
- avrotize/structuretocpp/build.bat.jinja +3 -0
- avrotize/structuretocpp/build.sh.jinja +3 -0
- avrotize/structuretocpp/dataclass_body.jinja +50 -0
- avrotize/structuretocpp/vcpkg.json.jinja +11 -0
- avrotize/structuretocpp.py +697 -0
- avrotize/structuretocsharp/class_test.cs.jinja +180 -0
- avrotize/structuretocsharp/dataclass_core.jinja +156 -0
- avrotize/structuretocsharp/enum_test.cs.jinja +36 -0
- avrotize/structuretocsharp/json_structure_converters.cs.jinja +399 -0
- avrotize/structuretocsharp/program.cs.jinja +49 -0
- avrotize/structuretocsharp/project.csproj.jinja +17 -0
- avrotize/structuretocsharp/project.sln.jinja +34 -0
- avrotize/structuretocsharp/testproject.csproj.jinja +18 -0
- avrotize/structuretocsharp/tuple_converter.cs.jinja +121 -0
- avrotize/structuretocsharp.py +2295 -0
- avrotize/structuretocsv.py +365 -0
- avrotize/structuretodatapackage.py +659 -0
- avrotize/structuretodb.py +1125 -0
- avrotize/structuretogo/go_enum.jinja +12 -0
- avrotize/structuretogo/go_helpers.jinja +26 -0
- avrotize/structuretogo/go_interface.jinja +18 -0
- avrotize/structuretogo/go_struct.jinja +187 -0
- avrotize/structuretogo/go_test.jinja +70 -0
- avrotize/structuretogo.py +729 -0
- avrotize/structuretographql.py +502 -0
- avrotize/structuretoiceberg.py +355 -0
- avrotize/structuretojava/choice_core.jinja +34 -0
- avrotize/structuretojava/class_core.jinja +23 -0
- avrotize/structuretojava/enum_core.jinja +18 -0
- avrotize/structuretojava/equals_hashcode.jinja +30 -0
- avrotize/structuretojava/pom.xml.jinja +26 -0
- avrotize/structuretojava/tuple_core.jinja +49 -0
- avrotize/structuretojava.py +938 -0
- avrotize/structuretojs/class_core.js.jinja +33 -0
- avrotize/structuretojs/enum_core.js.jinja +10 -0
- avrotize/structuretojs/package.json.jinja +12 -0
- avrotize/structuretojs/test_class.js.jinja +84 -0
- avrotize/structuretojs/test_enum.js.jinja +58 -0
- avrotize/structuretojs/test_runner.js.jinja +45 -0
- avrotize/structuretojs.py +657 -0
- avrotize/structuretojsons.py +498 -0
- avrotize/structuretokusto.py +639 -0
- avrotize/structuretomd/README.md.jinja +204 -0
- avrotize/structuretomd.py +322 -0
- avrotize/structuretoproto.py +764 -0
- avrotize/structuretopython/dataclass_core.jinja +363 -0
- avrotize/structuretopython/enum_core.jinja +45 -0
- avrotize/structuretopython/map_alias.jinja +21 -0
- avrotize/structuretopython/pyproject_toml.jinja +23 -0
- avrotize/structuretopython/test_class.jinja +103 -0
- avrotize/structuretopython/test_enum.jinja +34 -0
- avrotize/structuretopython.py +799 -0
- avrotize/structuretorust/dataclass_enum.rs.jinja +63 -0
- avrotize/structuretorust/dataclass_struct.rs.jinja +121 -0
- avrotize/structuretorust/dataclass_union.rs.jinja +81 -0
- avrotize/structuretorust.py +714 -0
- avrotize/structuretots/class_core.ts.jinja +78 -0
- avrotize/structuretots/enum_core.ts.jinja +6 -0
- avrotize/structuretots/gitignore.jinja +8 -0
- avrotize/structuretots/index.ts.jinja +1 -0
- avrotize/structuretots/package.json.jinja +39 -0
- avrotize/structuretots/test_class.ts.jinja +35 -0
- avrotize/structuretots/tsconfig.json.jinja +21 -0
- avrotize/structuretots.py +740 -0
- avrotize/structuretoxsd.py +679 -0
- avrotize/xsdtoavro.py +413 -0
- avrotize-2.21.1.dist-info/METADATA +1319 -0
- avrotize-2.21.1.dist-info/RECORD +171 -0
- avrotize-2.21.1.dist-info/WHEEL +4 -0
- avrotize-2.21.1.dist-info/entry_points.txt +3 -0
- avrotize-2.21.1.dist-info/licenses/LICENSE +201 -0
|
@@ -0,0 +1,210 @@
|
|
|
1
|
+
"""Convert an Avro schema to an Iceberg schema."""
|
|
2
|
+
|
|
3
|
+
import json
|
|
4
|
+
import sys
|
|
5
|
+
from typing import Dict, List
|
|
6
|
+
import pyarrow as pa
|
|
7
|
+
from pyiceberg.schema import Schema, NestedField
|
|
8
|
+
from pyiceberg.types import (
|
|
9
|
+
BooleanType,
|
|
10
|
+
IntegerType,
|
|
11
|
+
LongType,
|
|
12
|
+
FloatType,
|
|
13
|
+
DoubleType,
|
|
14
|
+
StringType,
|
|
15
|
+
BinaryType,
|
|
16
|
+
DateType,
|
|
17
|
+
TimestampType,
|
|
18
|
+
DecimalType,
|
|
19
|
+
FixedType,
|
|
20
|
+
ListType,
|
|
21
|
+
MapType,
|
|
22
|
+
StructType
|
|
23
|
+
)
|
|
24
|
+
from pyiceberg.io.pyarrow import PyArrowFileIO, schema_to_pyarrow
|
|
25
|
+
|
|
26
|
+
JsonNode = Dict[str, 'JsonNode'] | List['JsonNode'] | str | bool | int | None
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
class AvroToIcebergConverter:
|
|
30
|
+
"""Class to convert Avro schema to Iceberg schema."""
|
|
31
|
+
|
|
32
|
+
def __init__(self: 'AvroToIcebergConverter'):
|
|
33
|
+
self.named_type_cache: Dict[str, JsonNode] = {}
|
|
34
|
+
self.id_counter = 0
|
|
35
|
+
|
|
36
|
+
def get_id(self) -> int:
|
|
37
|
+
"""Get a unique ID for a record type."""
|
|
38
|
+
self.id_counter += 1
|
|
39
|
+
return self.id_counter
|
|
40
|
+
|
|
41
|
+
def get_fullname(self, namespace: str, name: str) -> str:
|
|
42
|
+
"""Get the full name of a record type."""
|
|
43
|
+
return f"{namespace}.{name}" if namespace else name
|
|
44
|
+
|
|
45
|
+
def convert_avro_to_iceberg(self, avro_schema_path: str, avro_record_type: str, output_path: str, emit_cloudevents_columns: bool=False):
|
|
46
|
+
"""Convert an Avro schema to an Iceberg schema."""
|
|
47
|
+
schema_file = avro_schema_path
|
|
48
|
+
if not schema_file:
|
|
49
|
+
print("Please specify the avro schema file")
|
|
50
|
+
sys.exit(1)
|
|
51
|
+
with open(schema_file, "r", encoding="utf-8") as f:
|
|
52
|
+
schema_json = f.read()
|
|
53
|
+
|
|
54
|
+
# Parse the schema as a JSON object
|
|
55
|
+
schema = json.loads(schema_json)
|
|
56
|
+
self.cache_named_types(schema)
|
|
57
|
+
|
|
58
|
+
if isinstance(schema, list) and avro_record_type:
|
|
59
|
+
schema = next(
|
|
60
|
+
(x for x in schema if x["name"] == avro_record_type or x["namespace"] + "." + x["name"] == avro_record_type), None)
|
|
61
|
+
if schema is None:
|
|
62
|
+
print(
|
|
63
|
+
f"No top-level record type {avro_record_type} found in the Avro schema")
|
|
64
|
+
sys.exit(1)
|
|
65
|
+
elif not isinstance(schema, dict):
|
|
66
|
+
print(
|
|
67
|
+
"Expected a single Avro schema as a JSON object, or a list of schema records")
|
|
68
|
+
sys.exit(1)
|
|
69
|
+
|
|
70
|
+
# Get the name and fields of the top-level record
|
|
71
|
+
table_name = schema["name"]
|
|
72
|
+
fields = schema["fields"]
|
|
73
|
+
|
|
74
|
+
# Create a list to store the iceberg schema
|
|
75
|
+
iceberg_fields: List[NestedField] = []
|
|
76
|
+
|
|
77
|
+
# Append the iceberg schema with the column names and types
|
|
78
|
+
for i, field in enumerate(fields):
|
|
79
|
+
column_name = field["name"]
|
|
80
|
+
column_type = self.convert_avro_type_to_iceberg_type(field["type"])
|
|
81
|
+
iceberg_fields.append(
|
|
82
|
+
NestedField(field_id=self.get_id(), name=column_name, type=column_type))
|
|
83
|
+
|
|
84
|
+
if emit_cloudevents_columns:
|
|
85
|
+
iceberg_fields.extend([
|
|
86
|
+
NestedField(field_id=self.get_id(),
|
|
87
|
+
name="___type", type=StringType()),
|
|
88
|
+
NestedField(field_id=self.get_id(),
|
|
89
|
+
name="___source", type=StringType()),
|
|
90
|
+
NestedField(field_id=self.get_id(),
|
|
91
|
+
name="___id", type=StringType()),
|
|
92
|
+
NestedField(field_id=self.get_id(),
|
|
93
|
+
name="___time", type=TimestampType()),
|
|
94
|
+
NestedField(field_id=self.get_id(),
|
|
95
|
+
name="___subject", type=StringType())
|
|
96
|
+
])
|
|
97
|
+
|
|
98
|
+
iceberg_schema = Schema(*iceberg_fields)
|
|
99
|
+
arrow_schema = schema_to_pyarrow(iceberg_schema)
|
|
100
|
+
print(f"Iceberg schema created: {arrow_schema}")
|
|
101
|
+
|
|
102
|
+
# Write to Iceberg table (for demonstration, using local file system)
|
|
103
|
+
file_io = PyArrowFileIO()
|
|
104
|
+
output_file = file_io.new_output("file://"+output_path)
|
|
105
|
+
with output_file.create(overwrite=True) as f:
|
|
106
|
+
pa.output_stream(f).write(arrow_schema.serialize().to_pybytes())
|
|
107
|
+
|
|
108
|
+
def convert_avro_type_to_iceberg_type(self, avro_type):
|
|
109
|
+
"""Convert an Avro type to an Iceberg type."""
|
|
110
|
+
if isinstance(avro_type, list):
|
|
111
|
+
item_count = len(avro_type)
|
|
112
|
+
if item_count == 1:
|
|
113
|
+
return self.convert_avro_type_to_iceberg_type(avro_type[0])
|
|
114
|
+
elif item_count == 2:
|
|
115
|
+
first, second = avro_type[0], avro_type[1]
|
|
116
|
+
if first == "null":
|
|
117
|
+
return self.convert_avro_type_to_iceberg_type(second)
|
|
118
|
+
elif second == "null":
|
|
119
|
+
return self.convert_avro_type_to_iceberg_type(first)
|
|
120
|
+
else:
|
|
121
|
+
return StructType(fields=[NestedField(field_id=self.get_id(), name=f'field_{i}', type=self.convert_avro_type_to_iceberg_type(t)) for i, t in enumerate(avro_type)])
|
|
122
|
+
elif item_count > 0:
|
|
123
|
+
return StructType(fields=[NestedField(field_id=self.get_id(), name=f'field_{i}', type=self.convert_avro_type_to_iceberg_type(t)) for i, t in enumerate(avro_type)])
|
|
124
|
+
else:
|
|
125
|
+
print(f"WARNING: Empty union type {avro_type}")
|
|
126
|
+
return StringType()
|
|
127
|
+
elif isinstance(avro_type, dict):
|
|
128
|
+
type_name = avro_type.get("type")
|
|
129
|
+
if type_name == "array":
|
|
130
|
+
return ListType(element_id=self.get_id(), element=self.convert_avro_type_to_iceberg_type(avro_type.get("items")))
|
|
131
|
+
elif type_name == "map":
|
|
132
|
+
return MapType(key_id=self.get_id(), key_type=StringType(), value_id=self.get_id(), value_type=self.convert_avro_type_to_iceberg_type(avro_type.get("values")))
|
|
133
|
+
elif type_name == "record":
|
|
134
|
+
fields = avro_type.get("fields")
|
|
135
|
+
return StructType(fields=[NestedField(field_id=self.get_id(), name=field["name"], type=self.convert_avro_type_to_iceberg_type(field["type"])) for i, field in enumerate(fields)])
|
|
136
|
+
if type_name == "enum":
|
|
137
|
+
return StringType()
|
|
138
|
+
elif type_name == "fixed":
|
|
139
|
+
return FixedType(avro_type.get("size"))
|
|
140
|
+
elif type_name == "string":
|
|
141
|
+
logical_type = avro_type.get("logicalType")
|
|
142
|
+
if logical_type == "uuid":
|
|
143
|
+
return StringType()
|
|
144
|
+
return StringType()
|
|
145
|
+
elif type_name == "bytes":
|
|
146
|
+
logical_type = avro_type.get("logicalType")
|
|
147
|
+
if logical_type == "decimal":
|
|
148
|
+
return DecimalType(38, 18)
|
|
149
|
+
return BinaryType()
|
|
150
|
+
elif type_name == "long":
|
|
151
|
+
logical_type = avro_type.get("logicalType")
|
|
152
|
+
if logical_type in ["timestamp-millis", "timestamp-micros"]:
|
|
153
|
+
return TimestampType()
|
|
154
|
+
if logical_type in ["time-millis", "time-micros"]:
|
|
155
|
+
return LongType()
|
|
156
|
+
return LongType()
|
|
157
|
+
elif type_name == "int":
|
|
158
|
+
logical_type = avro_type.get("logicalType")
|
|
159
|
+
if logical_type == "date":
|
|
160
|
+
return DateType()
|
|
161
|
+
return IntegerType()
|
|
162
|
+
else:
|
|
163
|
+
return self.map_iceberg_scalar_type(type_name)
|
|
164
|
+
elif isinstance(avro_type, str):
|
|
165
|
+
if avro_type in self.named_type_cache:
|
|
166
|
+
return self.convert_avro_type_to_iceberg_type(self.named_type_cache[avro_type])
|
|
167
|
+
return self.map_iceberg_scalar_type(avro_type)
|
|
168
|
+
|
|
169
|
+
return StringType()
|
|
170
|
+
|
|
171
|
+
def cache_named_types(self, avro_type):
|
|
172
|
+
"""Add an encountered type to the list of types."""
|
|
173
|
+
if isinstance(avro_type, list):
|
|
174
|
+
for item in avro_type:
|
|
175
|
+
self.cache_named_types(item)
|
|
176
|
+
if isinstance(avro_type, dict) and avro_type.get("name"):
|
|
177
|
+
self.named_type_cache[self.get_fullname(avro_type.get(
|
|
178
|
+
"namespace"), avro_type.get("name"))] = avro_type
|
|
179
|
+
if "fields" in avro_type:
|
|
180
|
+
for field in avro_type.get("fields"):
|
|
181
|
+
if "type" in field:
|
|
182
|
+
self.cache_named_types(field.get("type"))
|
|
183
|
+
|
|
184
|
+
def map_iceberg_scalar_type(self, type_name: str):
|
|
185
|
+
"""Map an Avro scalar type to an Iceberg scalar type."""
|
|
186
|
+
if type_name == "null":
|
|
187
|
+
return StringType()
|
|
188
|
+
elif type_name == "int":
|
|
189
|
+
return IntegerType()
|
|
190
|
+
elif type_name == "long":
|
|
191
|
+
return LongType()
|
|
192
|
+
elif type_name == "float":
|
|
193
|
+
return FloatType()
|
|
194
|
+
elif type_name == "double":
|
|
195
|
+
return DoubleType()
|
|
196
|
+
elif type_name == "boolean":
|
|
197
|
+
return BooleanType()
|
|
198
|
+
elif type_name == "bytes":
|
|
199
|
+
return BinaryType()
|
|
200
|
+
elif type_name == "string":
|
|
201
|
+
return StringType()
|
|
202
|
+
else:
|
|
203
|
+
return StringType()
|
|
204
|
+
|
|
205
|
+
|
|
206
|
+
def convert_avro_to_iceberg(avro_schema_path, avro_record_type, output_path, emit_cloudevents_columns=False):
|
|
207
|
+
"""Convert an Avro schema to an Iceberg schema."""
|
|
208
|
+
converter = AvroToIcebergConverter()
|
|
209
|
+
converter.convert_avro_to_iceberg(
|
|
210
|
+
avro_schema_path, avro_record_type, output_path, emit_cloudevents_columns)
|
|
@@ -0,0 +1,212 @@
|
|
|
1
|
+
package {{ package }};
|
|
2
|
+
|
|
3
|
+
import org.junit.jupiter.api.Test;
|
|
4
|
+
import org.junit.jupiter.api.BeforeEach;
|
|
5
|
+
import static org.junit.jupiter.api.Assertions.*;
|
|
6
|
+
|
|
7
|
+
{% if imports %}
|
|
8
|
+
{% for import_stmt in imports %}
|
|
9
|
+
{{ import_stmt }}
|
|
10
|
+
{% endfor %}
|
|
11
|
+
{% endif %}
|
|
12
|
+
|
|
13
|
+
/**
|
|
14
|
+
* Test class for {{ class_name }}
|
|
15
|
+
*/
|
|
16
|
+
public class {{ test_class_name }} {
|
|
17
|
+
private {{ class_name }} instance;
|
|
18
|
+
|
|
19
|
+
/**
|
|
20
|
+
* Set up test instance
|
|
21
|
+
*/
|
|
22
|
+
@BeforeEach
|
|
23
|
+
public void setUp() {
|
|
24
|
+
instance = createInstance();
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
/**
|
|
28
|
+
* Create instance of {{ class_name }}
|
|
29
|
+
*/
|
|
30
|
+
public static {{ class_name }} createInstance() {
|
|
31
|
+
return {{ class_name }}.createTestInstance();
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
/**
|
|
35
|
+
* Testing Equals and GetHashCode - positive case
|
|
36
|
+
*/
|
|
37
|
+
@Test
|
|
38
|
+
public void testEqualsAndHashCode_PositiveCase() {
|
|
39
|
+
{{ class_name }} instance1 = createInstance();
|
|
40
|
+
{{ class_name }} instance2 = createInstance();
|
|
41
|
+
|
|
42
|
+
// Test equality
|
|
43
|
+
assertEquals(instance1, instance2, "Two instances with same values should be equal");
|
|
44
|
+
|
|
45
|
+
// Test hash codes
|
|
46
|
+
assertEquals(instance1.hashCode(), instance2.hashCode(),
|
|
47
|
+
"Equal instances should have equal hash codes");
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
/**
|
|
51
|
+
* Testing Equals and GetHashCode - negative case
|
|
52
|
+
*/
|
|
53
|
+
@Test
|
|
54
|
+
public void testEqualsAndHashCode_NegativeCase() {
|
|
55
|
+
{{ class_name }} instance1 = createInstance();
|
|
56
|
+
{{ class_name }} instance2 = createInstance();
|
|
57
|
+
|
|
58
|
+
{%- for field in fields if not field.is_const %}
|
|
59
|
+
{%- if loop.first %}
|
|
60
|
+
// Modify {{ field.field_name }} and verify inequality
|
|
61
|
+
{%- set base_type = field.base_type %}
|
|
62
|
+
{%- if base_type == "String" %}
|
|
63
|
+
instance2.set{{ field.field_name | pascal }}((instance2.get{{ field.field_name | pascal }}() != null ? instance2.get{{ field.field_name | pascal }}() : "test") + "_modified");
|
|
64
|
+
{%- elif base_type == "byte[]" %}
|
|
65
|
+
byte[] modifiedBytes = instance2.get{{ field.field_name | pascal }}();
|
|
66
|
+
if (modifiedBytes != null && modifiedBytes.length > 0) {
|
|
67
|
+
modifiedBytes = java.util.Arrays.copyOf(modifiedBytes, modifiedBytes.length);
|
|
68
|
+
modifiedBytes[0] = (byte)(modifiedBytes[0] + 1);
|
|
69
|
+
} else {
|
|
70
|
+
modifiedBytes = new byte[] { 0x00, 0x01, 0x02, 0x03 };
|
|
71
|
+
}
|
|
72
|
+
instance2.set{{ field.field_name | pascal }}(modifiedBytes);
|
|
73
|
+
{%- elif base_type in ["int", "Integer", "long", "Long", "short", "Short", "byte", "Byte"] %}
|
|
74
|
+
{%- if base_type in ["Integer", "Long", "Short", "Byte"] %}
|
|
75
|
+
instance2.set{{ field.field_name | pascal }}((instance2.get{{ field.field_name | pascal }}() != null ? instance2.get{{ field.field_name | pascal }}() : 0) + 1);
|
|
76
|
+
{%- else %}
|
|
77
|
+
instance2.set{{ field.field_name | pascal }}(instance2.get{{ field.field_name | pascal }}() + 1);
|
|
78
|
+
{%- endif %}
|
|
79
|
+
{%- elif base_type in ["float", "Float", "double", "Double"] %}
|
|
80
|
+
{%- if base_type in ["Float", "Double"] %}
|
|
81
|
+
instance2.set{{ field.field_name | pascal }}((instance2.get{{ field.field_name | pascal }}() != null ? instance2.get{{ field.field_name | pascal }}() : 0.0{{ "f" if base_type == "Float" else "" }}) + 1.0{{ "f" if base_type == "Float" else "" }});
|
|
82
|
+
{%- else %}
|
|
83
|
+
instance2.set{{ field.field_name | pascal }}(instance2.get{{ field.field_name | pascal }}() + 1.0{{ "f" if base_type == "float" else "" }});
|
|
84
|
+
{%- endif %}
|
|
85
|
+
{%- elif base_type in ["boolean", "Boolean"] %}
|
|
86
|
+
{%- if base_type == "Boolean" %}
|
|
87
|
+
instance2.set{{ field.field_name | pascal }}(!(instance2.get{{ field.field_name | pascal }}() != null ? instance2.get{{ field.field_name | pascal }}() : false));
|
|
88
|
+
{%- else %}
|
|
89
|
+
instance2.set{{ field.field_name | pascal }}(!instance2.get{{ field.field_name | pascal }}());
|
|
90
|
+
{%- endif %}
|
|
91
|
+
{%- elif base_type in ["List", "Set", "ArrayList", "HashSet", "LinkedList"] %}
|
|
92
|
+
// For collection types, add a dummy element to make them different
|
|
93
|
+
{%- if base_type in ["Set", "HashSet"] %}
|
|
94
|
+
java.util.Set tempSet = new java.util.HashSet(instance2.get{{ field.field_name | pascal }}());
|
|
95
|
+
tempSet.add(new Object());
|
|
96
|
+
instance2.set{{ field.field_name | pascal }}(tempSet);
|
|
97
|
+
{%- else %}
|
|
98
|
+
java.util.List tempList = new java.util.ArrayList(instance2.get{{ field.field_name | pascal }}());
|
|
99
|
+
tempList.add(new Object());
|
|
100
|
+
instance2.set{{ field.field_name | pascal }}(tempList);
|
|
101
|
+
{%- endif %}
|
|
102
|
+
{%- elif base_type in ["Map", "HashMap", "LinkedHashMap"] %}
|
|
103
|
+
// For map types, add a dummy entry to make them different
|
|
104
|
+
java.util.Map tempMap = new java.util.HashMap(instance2.get{{ field.field_name | pascal }}());
|
|
105
|
+
tempMap.put(new Object(), new Object());
|
|
106
|
+
instance2.set{{ field.field_name | pascal }}(tempMap);
|
|
107
|
+
{%- else %}
|
|
108
|
+
// For enum or complex types, set to a different value
|
|
109
|
+
{%- if "Union" in base_type %}
|
|
110
|
+
// For union types, set to null to create a different value
|
|
111
|
+
instance2.set{{ field.field_name | pascal }}(null);
|
|
112
|
+
assertNotEquals(instance1, instance2,
|
|
113
|
+
"Instances with different {{ field.field_name }} should not be equal");
|
|
114
|
+
{%- elif field.is_enum %}
|
|
115
|
+
// Enum - try to get a different enum value if more than one value exists
|
|
116
|
+
{%- set simple_type = base_type.split('.')[-1] if '.' in base_type else base_type %}
|
|
117
|
+
{{ simple_type }}[] enumValues = {{ simple_type }}.values();
|
|
118
|
+
if (enumValues.length > 1) {
|
|
119
|
+
instance2.set{{ field.field_name | pascal }}(enumValues[(instance2.get{{ field.field_name | pascal }}().ordinal() + 1) % enumValues.length]);
|
|
120
|
+
assertNotEquals(instance1, instance2,
|
|
121
|
+
"Instances with different {{ field.field_name }} should not be equal");
|
|
122
|
+
}
|
|
123
|
+
// If enum has only one value, skip the inequality test for this field
|
|
124
|
+
{%- elif base_type == "Object" %}
|
|
125
|
+
// For Object type, create a new unique instance using a timestamp-based hash
|
|
126
|
+
instance2.set{{ field.field_name | pascal }}(new Object());
|
|
127
|
+
assertNotEquals(instance1, instance2,
|
|
128
|
+
"Instances with different {{ field.field_name }} should not be equal");
|
|
129
|
+
{%- elif '.' in base_type %}
|
|
130
|
+
// For complex object types, set to null to create a different value
|
|
131
|
+
instance2.set{{ field.field_name | pascal }}(null);
|
|
132
|
+
assertNotEquals(instance1, instance2,
|
|
133
|
+
"Instances with different {{ field.field_name }} should not be equal");
|
|
134
|
+
{%- else %}
|
|
135
|
+
instance2.set{{ field.field_name | pascal }}({{ field.test_value }});
|
|
136
|
+
assertNotEquals(instance1, instance2,
|
|
137
|
+
"Instances with different {{ field.field_name }} should not be equal");
|
|
138
|
+
{%- endif %}
|
|
139
|
+
{%- endif %}
|
|
140
|
+
{%- endif %}
|
|
141
|
+
{%- endfor %}
|
|
142
|
+
}
|
|
143
|
+
|
|
144
|
+
{%- for field in fields %}
|
|
145
|
+
/**
|
|
146
|
+
* Testing property {{ field.field_name }}
|
|
147
|
+
*/
|
|
148
|
+
@Test
|
|
149
|
+
public void test{{ field.field_name | pascal }}Property() {
|
|
150
|
+
{%- if '<' in field.field_type %}
|
|
151
|
+
{{ field.field_type }} testValue = {{ field.test_value }};
|
|
152
|
+
{%- else %}
|
|
153
|
+
{%- set simple_field_type = field.field_type.split('.')[-1] if '.' in field.field_type else field.field_type %}
|
|
154
|
+
{{ simple_field_type }} testValue = {{ field.test_value }};
|
|
155
|
+
{%- endif %}
|
|
156
|
+
{%- if not field.is_const %}
|
|
157
|
+
instance.set{{ field.field_name | pascal }}(testValue);
|
|
158
|
+
{%- endif %}
|
|
159
|
+
{%- if field.is_discriminator %}
|
|
160
|
+
assertEquals(testValue, instance.get{{ field.field_name | pascal }}Value());
|
|
161
|
+
{%- else %}
|
|
162
|
+
assertEquals(testValue, instance.get{{ field.field_name | pascal }}());
|
|
163
|
+
{%- endif %}
|
|
164
|
+
}
|
|
165
|
+
{%- endfor %}
|
|
166
|
+
{%- if avro_annotation %}
|
|
167
|
+
/**
|
|
168
|
+
* Testing Avro serializer
|
|
169
|
+
*/
|
|
170
|
+
@Test
|
|
171
|
+
public void test_ToByteArray_FromData_Avro() throws Exception {
|
|
172
|
+
String mediaType = "application/vnd.apache.avro+avro";
|
|
173
|
+
byte[] bytes = instance.toByteArray(mediaType);
|
|
174
|
+
{{ class_name }} newInstance = {{ class_name }}.fromData(bytes, mediaType);
|
|
175
|
+
assertEquals(instance, newInstance);
|
|
176
|
+
}
|
|
177
|
+
|
|
178
|
+
/**
|
|
179
|
+
* Testing Avro serializer with gzip compression
|
|
180
|
+
*/
|
|
181
|
+
@Test
|
|
182
|
+
public void test_ToByteArray_FromData_Avro_Gzip() throws Exception {
|
|
183
|
+
String mediaType = "application/vnd.apache.avro+avro+gzip";
|
|
184
|
+
byte[] bytes = instance.toByteArray(mediaType);
|
|
185
|
+
{{ class_name }} newInstance = {{ class_name }}.fromData(bytes, mediaType);
|
|
186
|
+
assertEquals(instance, newInstance);
|
|
187
|
+
}
|
|
188
|
+
{%- endif %}
|
|
189
|
+
{%- if jackson_annotation %}
|
|
190
|
+
/**
|
|
191
|
+
* Testing JSON serializer
|
|
192
|
+
*/
|
|
193
|
+
@Test
|
|
194
|
+
public void test_ToByteArray_FromData_Json() throws Exception {
|
|
195
|
+
String mediaType = "application/json";
|
|
196
|
+
byte[] bytes = instance.toByteArray(mediaType);
|
|
197
|
+
{{ class_name }} newInstance = {{ class_name }}.fromData(bytes, mediaType);
|
|
198
|
+
assertEquals(instance, newInstance);
|
|
199
|
+
}
|
|
200
|
+
|
|
201
|
+
/**
|
|
202
|
+
* Testing JSON serializer with gzip compression
|
|
203
|
+
*/
|
|
204
|
+
@Test
|
|
205
|
+
public void test_ToByteArray_FromData_Json_Gzip() throws Exception {
|
|
206
|
+
String mediaType = "application/json+gzip";
|
|
207
|
+
byte[] bytes = instance.toByteArray(mediaType);
|
|
208
|
+
{{ class_name }} newInstance = {{ class_name }}.fromData(bytes, mediaType);
|
|
209
|
+
assertEquals(instance, newInstance);
|
|
210
|
+
}
|
|
211
|
+
{%- endif %}
|
|
212
|
+
}
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
package {{ package }};
|
|
2
|
+
|
|
3
|
+
import org.junit.jupiter.api.Test;
|
|
4
|
+
import static org.junit.jupiter.api.Assertions.*;
|
|
5
|
+
|
|
6
|
+
/**
|
|
7
|
+
* Test class for {{ enum_name }}
|
|
8
|
+
*/
|
|
9
|
+
public class {{ test_class_name }} {
|
|
10
|
+
/**
|
|
11
|
+
* Test {{ enum_name }} Enum
|
|
12
|
+
*/
|
|
13
|
+
@Test
|
|
14
|
+
public void test_{{ enum_name }}_Enum() {
|
|
15
|
+
{{ enum_name }}[] values = {{ enum_name }}.values();
|
|
16
|
+
{%- for symbol in symbols %}
|
|
17
|
+
assertNotNull({{ enum_name }}.valueOf("{{ symbol }}"));
|
|
18
|
+
{%- endfor %}
|
|
19
|
+
assertEquals({{ symbols|length }}, values.length);
|
|
20
|
+
}
|
|
21
|
+
}
|
|
@@ -0,0 +1,54 @@
|
|
|
1
|
+
<?xml version="1.0" encoding="UTF-8"?>
|
|
2
|
+
<project xmlns="http://maven.apache.org/POM/4.0.0"
|
|
3
|
+
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
|
4
|
+
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
|
5
|
+
<modelVersion>4.0.0</modelVersion>
|
|
6
|
+
<groupId>{{ groupid }}</groupId>
|
|
7
|
+
<artifactId>{{ artifactid }}-test</artifactId>
|
|
8
|
+
<version>1.0-SNAPSHOT</version>
|
|
9
|
+
<properties>
|
|
10
|
+
<maven.compiler.source>{{ JDK_VERSION }}</maven.compiler.source>
|
|
11
|
+
<maven.compiler.target>{{ JDK_VERSION }}</maven.compiler.target>
|
|
12
|
+
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
|
|
13
|
+
</properties>
|
|
14
|
+
<dependencies>
|
|
15
|
+
<dependency>
|
|
16
|
+
<groupId>{{ groupid }}</groupId>
|
|
17
|
+
<artifactId>{{ artifactid }}</artifactId>
|
|
18
|
+
<version>1.0-SNAPSHOT</version>
|
|
19
|
+
</dependency>
|
|
20
|
+
<dependency>
|
|
21
|
+
<groupId>org.junit.jupiter</groupId>
|
|
22
|
+
<artifactId>junit-jupiter-api</artifactId>
|
|
23
|
+
<version>{{ JUNIT_VERSION }}</version>
|
|
24
|
+
<scope>test</scope>
|
|
25
|
+
</dependency>
|
|
26
|
+
<dependency>
|
|
27
|
+
<groupId>org.junit.jupiter</groupId>
|
|
28
|
+
<artifactId>junit-jupiter-engine</artifactId>
|
|
29
|
+
<version>{{ JUNIT_VERSION }}</version>
|
|
30
|
+
<scope>test</scope>
|
|
31
|
+
</dependency>
|
|
32
|
+
<dependency>
|
|
33
|
+
<groupId>org.apache.avro</groupId>
|
|
34
|
+
<artifactId>avro</artifactId>
|
|
35
|
+
<version>{{ AVRO_VERSION }}</version>
|
|
36
|
+
</dependency>
|
|
37
|
+
<dependency>
|
|
38
|
+
<groupId>com.fasterxml.jackson</groupId>
|
|
39
|
+
<artifactId>jackson-bom</artifactId>
|
|
40
|
+
<version>{{ JACKSON_VERSION }}</version>
|
|
41
|
+
<type>pom</type>
|
|
42
|
+
</dependency>
|
|
43
|
+
</dependencies>
|
|
44
|
+
<build>
|
|
45
|
+
<plugins>
|
|
46
|
+
<plugin>
|
|
47
|
+
<groupId>org.apache.maven.plugins</groupId>
|
|
48
|
+
<artifactId>maven-surefire-plugin</artifactId>
|
|
49
|
+
<version>{{ MAVEN_SUREFIRE_VERSION }}</version>
|
|
50
|
+
</plugin>
|
|
51
|
+
</plugins>
|
|
52
|
+
</build>
|
|
53
|
+
</project>
|
|
54
|
+
|