avrotize 3.0.2__py3-none-any.whl → 3.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,151 @@
1
+ """Infers schema from JSON files and converts to Avro or JSON Structure format.
2
+
3
+ This module provides:
4
+ - json2a: Infer Avro schema from JSON files
5
+ - json2s: Infer JSON Structure schema from JSON files
6
+ """
7
+
8
+ import json
9
+ import os
10
+ from typing import Any, Dict, List
11
+
12
+ from avrotize.schema_inference import (
13
+ AvroSchemaInferrer,
14
+ JsonStructureSchemaInferrer,
15
+ JsonNode
16
+ )
17
+
18
+
19
+ def convert_json_to_avro(
20
+ input_files: List[str],
21
+ avro_schema_file: str,
22
+ type_name: str = 'Document',
23
+ avro_namespace: str = '',
24
+ sample_size: int = 0
25
+ ) -> None:
26
+ """Infers Avro schema from JSON files.
27
+
28
+ Reads JSON files, analyzes their structure, and generates an Avro schema
29
+ that can represent all the data. Multiple files are analyzed together to
30
+ produce a unified schema.
31
+
32
+ Args:
33
+ input_files: List of JSON file paths to analyze
34
+ avro_schema_file: Output path for the Avro schema
35
+ type_name: Name for the root type
36
+ avro_namespace: Namespace for generated Avro types
37
+ sample_size: Maximum number of records to sample (0 = all)
38
+ """
39
+ if not input_files:
40
+ raise ValueError("At least one input file is required")
41
+
42
+ values = _load_json_values(input_files, sample_size)
43
+
44
+ if not values:
45
+ raise ValueError("No valid JSON data found in input files")
46
+
47
+ inferrer = AvroSchemaInferrer(namespace=avro_namespace)
48
+ schema = inferrer.infer_from_json_values(type_name, values)
49
+
50
+ # Ensure output directory exists
51
+ output_dir = os.path.dirname(avro_schema_file)
52
+ if output_dir and not os.path.exists(output_dir):
53
+ os.makedirs(output_dir)
54
+
55
+ with open(avro_schema_file, 'w', encoding='utf-8') as f:
56
+ json.dump(schema, f, indent=2)
57
+
58
+
59
+ def convert_json_to_jstruct(
60
+ input_files: List[str],
61
+ jstruct_schema_file: str,
62
+ type_name: str = 'Document',
63
+ base_id: str = 'https://example.com/',
64
+ sample_size: int = 0
65
+ ) -> None:
66
+ """Infers JSON Structure schema from JSON files.
67
+
68
+ Reads JSON files, analyzes their structure, and generates a JSON Structure
69
+ schema that validates with the official JSON Structure SDK.
70
+
71
+ Args:
72
+ input_files: List of JSON file paths to analyze
73
+ jstruct_schema_file: Output path for the JSON Structure schema
74
+ type_name: Name for the root type
75
+ base_id: Base URI for $id generation
76
+ sample_size: Maximum number of records to sample (0 = all)
77
+ """
78
+ if not input_files:
79
+ raise ValueError("At least one input file is required")
80
+
81
+ values = _load_json_values(input_files, sample_size)
82
+
83
+ if not values:
84
+ raise ValueError("No valid JSON data found in input files")
85
+
86
+ inferrer = JsonStructureSchemaInferrer(base_id=base_id)
87
+ schema = inferrer.infer_from_json_values(type_name, values)
88
+
89
+ # Ensure output directory exists
90
+ output_dir = os.path.dirname(jstruct_schema_file)
91
+ if output_dir and not os.path.exists(output_dir):
92
+ os.makedirs(output_dir)
93
+
94
+ with open(jstruct_schema_file, 'w', encoding='utf-8') as f:
95
+ json.dump(schema, f, indent=2)
96
+
97
+
98
+ def _load_json_values(input_files: List[str], sample_size: int) -> List[Any]:
99
+ """Loads JSON values from files.
100
+
101
+ Handles both single JSON documents and JSON Lines (JSONL) files.
102
+ Arrays at the root level are flattened into individual values.
103
+
104
+ Args:
105
+ input_files: List of file paths
106
+ sample_size: Maximum values to load (0 = all)
107
+
108
+ Returns:
109
+ List of parsed JSON values
110
+ """
111
+ values: List[Any] = []
112
+
113
+ for file_path in input_files:
114
+ if sample_size > 0 and len(values) >= sample_size:
115
+ break
116
+
117
+ with open(file_path, 'r', encoding='utf-8') as f:
118
+ content = f.read().strip()
119
+
120
+ if not content:
121
+ continue
122
+
123
+ # Try parsing as a single JSON document first
124
+ try:
125
+ data = json.loads(content)
126
+ if isinstance(data, list):
127
+ # Root-level array: each element is a separate value
128
+ for item in data:
129
+ values.append(item)
130
+ if sample_size > 0 and len(values) >= sample_size:
131
+ break
132
+ else:
133
+ values.append(data)
134
+ continue
135
+ except json.JSONDecodeError:
136
+ pass
137
+
138
+ # Try parsing as JSON Lines (JSONL)
139
+ for line in content.split('\n'):
140
+ line = line.strip()
141
+ if not line:
142
+ continue
143
+ try:
144
+ data = json.loads(line)
145
+ values.append(data)
146
+ if sample_size > 0 and len(values) >= sample_size:
147
+ break
148
+ except json.JSONDecodeError:
149
+ pass
150
+
151
+ return values