schemez 1.0.0__tar.gz → 1.1.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: schemez
3
- Version: 1.0.0
3
+ Version: 1.1.0
4
4
  Summary: Pydantic shim for config stuff
5
5
  Keywords:
6
6
  Author: Philipp Temminghoff
@@ -1,6 +1,6 @@
1
1
  [project]
2
2
  name = "schemez"
3
- version = "1.0.0"
3
+ version = "1.1.0"
4
4
  description = "Pydantic shim for config stuff"
5
5
  readme = "README.md"
6
6
  requires-python = ">=3.13"
@@ -48,6 +48,7 @@ dev = [
48
48
  "devtools",
49
49
  "pyreadline3",
50
50
  "pytest",
51
+ "pytest-asyncio>=1.2.0",
51
52
  "pytest-cov",
52
53
  # Only add below (Copier)
53
54
  ]
@@ -2,8 +2,14 @@
2
2
 
3
3
  from __future__ import annotations
4
4
 
5
+ import asyncio
5
6
  import importlib
7
+ import json
6
8
  import os
9
+ from pathlib import Path
10
+ import subprocess
11
+ import sys
12
+ import tempfile
7
13
  from typing import TYPE_CHECKING, Any
8
14
 
9
15
  from pydantic import BaseModel
@@ -169,3 +175,90 @@ def resolve_type_string(type_string: str, safe: bool = True) -> type:
169
175
  except Exception as e:
170
176
  msg = f"Failed to resolve type {type_string} in unsafe mode"
171
177
  raise ValueError(msg) from e
178
+
179
+
180
+ async def model_to_python_code(
181
+ model: type[BaseModel],
182
+ *,
183
+ class_name: str | None = None,
184
+ target_python_version: str | None = None,
185
+ ) -> str:
186
+ """Convert a BaseModel to Python code asynchronously.
187
+
188
+ Args:
189
+ model: The BaseModel class to convert
190
+ class_name: Optional custom class name for the generated code
191
+ target_python_version: Target Python version for code generation.
192
+ Defaults to current system Python version.
193
+
194
+ Returns:
195
+ Generated Python code as string
196
+
197
+ Raises:
198
+ RuntimeError: If datamodel-codegen is not available
199
+ subprocess.CalledProcessError: If code generation fails
200
+ """
201
+ try:
202
+ # Check if datamodel-codegen is available
203
+ proc = await asyncio.create_subprocess_exec(
204
+ "datamodel-codegen",
205
+ "--version",
206
+ stdout=asyncio.subprocess.PIPE,
207
+ stderr=asyncio.subprocess.PIPE,
208
+ )
209
+ await proc.communicate()
210
+ if proc.returncode != 0:
211
+ raise subprocess.CalledProcessError(
212
+ proc.returncode or -1, "datamodel-codegen"
213
+ )
214
+ except FileNotFoundError as e:
215
+ msg = "datamodel-codegen not available"
216
+ raise RuntimeError(msg) from e
217
+
218
+ # Get model schema
219
+ schema = model.model_json_schema()
220
+ name = class_name or model.__name__
221
+ python_version = (
222
+ target_python_version or f"{sys.version_info.major}.{sys.version_info.minor}"
223
+ )
224
+
225
+ # Create temporary file with schema
226
+ with tempfile.NamedTemporaryFile(mode="w", suffix=".json", delete=False) as f:
227
+ json.dump(schema, f)
228
+ schema_file = Path(f.name)
229
+
230
+ try:
231
+ # Generate model using datamodel-codegen
232
+ proc = await asyncio.create_subprocess_exec(
233
+ "datamodel-codegen",
234
+ "--input",
235
+ str(schema_file),
236
+ "--input-file-type",
237
+ "jsonschema",
238
+ "--output-model-type",
239
+ "pydantic.BaseModel",
240
+ "--class-name",
241
+ name,
242
+ "--disable-timestamp",
243
+ "--use-union-operator",
244
+ "--use-schema-description",
245
+ "--enum-field-as-literal",
246
+ "all",
247
+ "--target-python-version",
248
+ python_version,
249
+ stdout=asyncio.subprocess.PIPE,
250
+ stderr=asyncio.subprocess.PIPE,
251
+ )
252
+ stdout, stderr = await proc.communicate()
253
+
254
+ if proc.returncode != 0:
255
+ msg = f"datamodel-codegen failed: {stderr.decode()}"
256
+ raise subprocess.CalledProcessError(
257
+ proc.returncode or -1, "datamodel-codegen"
258
+ )
259
+
260
+ return stdout.decode().strip()
261
+
262
+ finally:
263
+ # Cleanup temp file
264
+ schema_file.unlink(missing_ok=True)
@@ -254,3 +254,27 @@ class Schema(BaseModel):
254
254
  except Exception as exc:
255
255
  msg = f"Failed to save configuration to {path}"
256
256
  raise ValueError(msg) from exc
257
+
258
+ @classmethod
259
+ async def to_python_code(
260
+ cls,
261
+ *,
262
+ class_name: str | None = None,
263
+ target_python_version: str | None = None,
264
+ ) -> str:
265
+ """Convert this model to Python code asynchronously.
266
+
267
+ Args:
268
+ class_name: Optional custom class name for the generated code
269
+ target_python_version: Target Python version for code generation
270
+
271
+ Returns:
272
+ Generated Python code as string
273
+ """
274
+ from schemez.helpers import model_to_python_code
275
+
276
+ return await model_to_python_code(
277
+ cls,
278
+ class_name=class_name,
279
+ target_python_version=target_python_version,
280
+ )
@@ -2,6 +2,7 @@
2
2
 
3
3
  from __future__ import annotations
4
4
 
5
+ from collections.abc import Callable
5
6
  from enum import Enum
6
7
  from typing import Annotated, Any, Literal
7
8
 
@@ -213,29 +214,22 @@ class InlineSchemaDef(BaseSchemaDef):
213
214
  # Handle enum type
214
215
  if field.type == "enum":
215
216
  if not field.values:
216
- msg = f"Field '{name}' has type 'enum' but no values defined"
217
+ msg = f"Field {name!r} has type 'enum' but no values defined"
217
218
  raise ValueError(msg)
218
219
 
219
- # Create dynamic Enum class
220
- enum_name = f"{name.capitalize()}Enum"
221
-
222
- # Create enum members dictionary
223
- enum_members = {}
220
+ enum_name = f"{name.capitalize()}Enum" # Create dynamic Enum class
221
+ enum_members = {} # Create enum members dictionary
224
222
  for i, value in enumerate(field.values):
225
223
  if isinstance(value, str) and value.isidentifier():
226
- # If value is a valid Python identifier, use it as is
227
- key = value
224
+ key = value # If value is a valid Python identifier, use as is
228
225
  else:
229
- # Otherwise, create a synthetic name
230
- key = f"VALUE_{i}"
226
+ key = f"VALUE_{i}" # Otherwise, create a synthetic name
231
227
  enum_members[key] = value
232
228
 
233
- # Create the enum class
234
- enum_class = Enum(enum_name, enum_members)
229
+ enum_class = Enum(enum_name, enum_members) # Create the enum class
235
230
  python_type: Any = enum_class
236
231
 
237
- # Handle enum default value specially
238
- if field.default is not None:
232
+ if field.default is not None: # Handle enum default value specially
239
233
  # Store default value as the enum value string
240
234
  # Pydantic v2 will convert it to the enum instance
241
235
  if field.default in list(field.values):
@@ -252,18 +246,13 @@ class InlineSchemaDef(BaseSchemaDef):
252
246
  msg = f"Unsupported field type: {field.type}"
253
247
  raise ValueError(msg)
254
248
 
255
- # Handle literal constraint if provided
256
- if field.literal_value is not None:
257
- from typing import Literal as LiteralType
258
-
259
- python_type = LiteralType[field.literal_value]
249
+ if field.literal_value is not None: # Handle literal constraint if provided
250
+ python_type = Literal[field.literal_value]
260
251
 
261
- # Handle optional fields (allowing None)
262
- if field.optional:
252
+ if field.optional: # Handle optional fields (allowing None)
263
253
  python_type = python_type | None # type: ignore
264
254
 
265
- # Add standard Pydantic constraints
266
- # Collect all constraint values
255
+ # Add standard Pydantic constraints. Collect all constraint values
267
256
  for constraint in [
268
257
  "default",
269
258
  "title",
@@ -282,23 +271,19 @@ class InlineSchemaDef(BaseSchemaDef):
282
271
  if value is not None:
283
272
  field_constraints[constraint] = value
284
273
 
285
- # Handle examples separately (Pydantic v2 way)
286
274
  if field.examples:
287
275
  if field.json_schema_extra is None:
288
276
  field.json_schema_extra = {}
289
277
  field.json_schema_extra["examples"] = field.examples
290
278
 
291
- # Add json_schema_extra if provided
292
279
  if field.json_schema_extra:
293
280
  field_constraints["json_schema_extra"] = field.json_schema_extra
294
281
 
295
- # Handle field dependencies
296
282
  if field.dependent_required or field.dependent_schema:
297
283
  if field.json_schema_extra is None:
298
284
  field_constraints["json_schema_extra"] = {}
299
285
 
300
286
  json_extra = field_constraints.get("json_schema_extra", {})
301
-
302
287
  if field.dependent_required:
303
288
  if "dependentRequired" not in json_extra:
304
289
  json_extra["dependentRequired"] = {}
@@ -311,9 +296,7 @@ class InlineSchemaDef(BaseSchemaDef):
311
296
 
312
297
  field_constraints["json_schema_extra"] = json_extra
313
298
 
314
- # Add any additional constraints
315
- field_constraints.update(field.constraints)
316
-
299
+ field_constraints.update(field.constraints) # Add any additional constraints
317
300
  field_info = Field(description=field.description, **field_constraints)
318
301
  fields[name] = (python_type, field_info)
319
302
 
@@ -321,25 +304,18 @@ class InlineSchemaDef(BaseSchemaDef):
321
304
  if field.dependent_required or field.dependent_schema:
322
305
  if not model_dependencies:
323
306
  model_dependencies = {"json_schema_extra": {}}
324
-
307
+ extra = model_dependencies["json_schema_extra"]
325
308
  if field.dependent_required:
326
- if "dependentRequired" not in model_dependencies["json_schema_extra"]:
327
- model_dependencies["json_schema_extra"]["dependentRequired"] = {}
328
- model_dependencies["json_schema_extra"]["dependentRequired"].update(
329
- field.dependent_required
330
- )
331
-
309
+ if "dependentRequired" not in extra:
310
+ extra["dependentRequired"] = {}
311
+ extra["dependentRequired"].update(field.dependent_required)
332
312
  if field.dependent_schema:
333
- if "dependentSchemas" not in model_dependencies["json_schema_extra"]:
334
- model_dependencies["json_schema_extra"]["dependentSchemas"] = {}
335
- model_dependencies["json_schema_extra"]["dependentSchemas"].update(
336
- field.dependent_schema
337
- )
338
-
339
- # Create the model class with field definitions
340
- cls_name = self.description or "ResponseType"
341
- model = create_model(
342
- cls_name,
313
+ if "dependentSchemas" not in extra:
314
+ extra["dependentSchemas"] = {}
315
+ extra["dependentSchemas"].update(field.dependent_schema)
316
+
317
+ model = create_model( # Create the model class
318
+ self.description or "ResponseType",
343
319
  **fields,
344
320
  __base__=BaseModel,
345
321
  __doc__=self.description,
@@ -347,23 +323,22 @@ class InlineSchemaDef(BaseSchemaDef):
347
323
 
348
324
  # Add model-level JSON Schema extras for dependencies
349
325
  if model_dependencies:
350
- if not hasattr(model, "model_config") or not model.model_config:
351
- model.model_config = {}
352
-
353
- if "json_schema_extra" not in model.model_config:
354
- model.model_config["json_schema_extra"] = {}
355
-
356
- schema_extra = model.model_config["json_schema_extra"]
357
-
358
- if "dependentRequired" in model_dependencies["json_schema_extra"]:
359
- schema_extra["dependentRequired"] = model_dependencies[
360
- "json_schema_extra"
361
- ]["dependentRequired"]
362
-
363
- if "dependentSchemas" in model_dependencies["json_schema_extra"]:
364
- schema_extra["dependentSchemas"] = model_dependencies[
365
- "json_schema_extra"
366
- ]["dependentSchemas"]
326
+ existing_extra = model.model_config.get("json_schema_extra")
327
+ deps_extra = model_dependencies["json_schema_extra"]
328
+
329
+ match existing_extra:
330
+ case None:
331
+ model.model_config["json_schema_extra"] = deps_extra
332
+ case dict() as schema_extra:
333
+ schema_extra.update(deps_extra)
334
+ case Callable() as callable_func:
335
+
336
+ def wrapped_extra(*args: Any) -> None:
337
+ callable_func(*args)
338
+ schema = args[0]
339
+ schema.update(deps_extra)
340
+
341
+ model.model_config["json_schema_extra"] = wrapped_extra
367
342
 
368
343
  # Return the created model
369
344
  return model
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes