idfpy 25.2.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (67) hide show
  1. idfpy-25.2.0/.github/workflows/publish.yml +25 -0
  2. idfpy-25.2.0/.github/workflows/update-models.yml +123 -0
  3. idfpy-25.2.0/.gitignore +9 -0
  4. idfpy-25.2.0/LICENSE +21 -0
  5. idfpy-25.2.0/PKG-INFO +77 -0
  6. idfpy-25.2.0/README.md +54 -0
  7. idfpy-25.2.0/idfpy/__init__.py +15 -0
  8. idfpy-25.2.0/idfpy/cli.py +47 -0
  9. idfpy-25.2.0/idfpy/codegen/__init__.py +4 -0
  10. idfpy-25.2.0/idfpy/codegen/field_parser.py +248 -0
  11. idfpy-25.2.0/idfpy/codegen/model_generator.py +630 -0
  12. idfpy-25.2.0/idfpy/codegen/schema_parser.py +322 -0
  13. idfpy-25.2.0/idfpy/codegen/template_filters.py +542 -0
  14. idfpy-25.2.0/idfpy/codegen/templates/idf_model_py.jinja2 +79 -0
  15. idfpy-25.2.0/idfpy/idf.py +510 -0
  16. idfpy-25.2.0/idfpy/models/__init__.py +17713 -0
  17. idfpy-25.2.0/idfpy/models/_base.py +110 -0
  18. idfpy-25.2.0/idfpy/models/_refs.py +994 -0
  19. idfpy-25.2.0/idfpy/models/advanced_construction.py +2598 -0
  20. idfpy-25.2.0/idfpy/models/air_distribution.py +428 -0
  21. idfpy-25.2.0/idfpy/models/availability_managers.py +492 -0
  22. idfpy-25.2.0/idfpy/models/coils.py +10592 -0
  23. idfpy-25.2.0/idfpy/models/condensers.py +2437 -0
  24. idfpy-25.2.0/idfpy/models/constructions.py +3814 -0
  25. idfpy-25.2.0/idfpy/models/curves.py +1126 -0
  26. idfpy-25.2.0/idfpy/models/daylighting.py +330 -0
  27. idfpy-25.2.0/idfpy/models/demand_limiting.py +342 -0
  28. idfpy-25.2.0/idfpy/models/economics.py +1298 -0
  29. idfpy-25.2.0/idfpy/models/electric_load.py +2668 -0
  30. idfpy-25.2.0/idfpy/models/ems.py +333 -0
  31. idfpy-25.2.0/idfpy/models/evap_coolers.py +394 -0
  32. idfpy-25.2.0/idfpy/models/external_interface.py +262 -0
  33. idfpy-25.2.0/idfpy/models/fans.py +618 -0
  34. idfpy-25.2.0/idfpy/models/faults.py +593 -0
  35. idfpy-25.2.0/idfpy/models/fluids.py +1453 -0
  36. idfpy-25.2.0/idfpy/models/hvac_design.py +1040 -0
  37. idfpy-25.2.0/idfpy/models/hvac_templates.py +7207 -0
  38. idfpy-25.2.0/idfpy/models/internal_gains.py +1351 -0
  39. idfpy-25.2.0/idfpy/models/location.py +1386 -0
  40. idfpy-25.2.0/idfpy/models/misc.py +6304 -0
  41. idfpy-25.2.0/idfpy/models/node_branch.py +602 -0
  42. idfpy-25.2.0/idfpy/models/outputs.py +1555 -0
  43. idfpy-25.2.0/idfpy/models/plant_control.py +1964 -0
  44. idfpy-25.2.0/idfpy/models/plant_equipment.py +4318 -0
  45. idfpy-25.2.0/idfpy/models/pumps.py +572 -0
  46. idfpy-25.2.0/idfpy/models/python_plugins.py +231 -0
  47. idfpy-25.2.0/idfpy/models/refrigeration.py +1863 -0
  48. idfpy-25.2.0/idfpy/models/room_air.py +876 -0
  49. idfpy-25.2.0/idfpy/models/schedules.py +348 -0
  50. idfpy-25.2.0/idfpy/models/setpoint_managers.py +1013 -0
  51. idfpy-25.2.0/idfpy/models/simulation.py +627 -0
  52. idfpy-25.2.0/idfpy/models/solar.py +537 -0
  53. idfpy-25.2.0/idfpy/models/thermal_zones.py +2405 -0
  54. idfpy-25.2.0/idfpy/models/unitary.py +1724 -0
  55. idfpy-25.2.0/idfpy/models/user_defined.py +414 -0
  56. idfpy-25.2.0/idfpy/models/water_heaters.py +1797 -0
  57. idfpy-25.2.0/idfpy/models/water_systems.py +310 -0
  58. idfpy-25.2.0/idfpy/models/zone_airflow.py +1264 -0
  59. idfpy-25.2.0/idfpy/models/zone_controls.py +622 -0
  60. idfpy-25.2.0/idfpy/models/zone_equipment.py +407 -0
  61. idfpy-25.2.0/idfpy/models/zone_forced_air.py +2845 -0
  62. idfpy-25.2.0/idfpy/models/zone_radiative.py +1345 -0
  63. idfpy-25.2.0/idfpy/models/zone_terminals.py +1375 -0
  64. idfpy-25.2.0/idfpy/py.typed +0 -0
  65. idfpy-25.2.0/pyproject.toml +70 -0
  66. idfpy-25.2.0/tests/__init__.py +0 -0
  67. idfpy-25.2.0/tests/test_models.py +185 -0
@@ -0,0 +1,25 @@
1
+ name: Publish to PyPI
2
+
3
+ on:
4
+ push:
5
+ tags:
6
+ - "v*"
7
+ - "!latest" # Exclude latest tag
8
+
9
+ permissions:
10
+ id-token: write # Required for trusted publishing
11
+
12
+ jobs:
13
+ publish:
14
+ runs-on: ubuntu-latest
15
+ environment: pypi
16
+ steps:
17
+ - uses: actions/checkout@v4
18
+
19
+ - uses: astral-sh/setup-uv@v4
20
+
21
+ - name: Build
22
+ run: uv build
23
+
24
+ - name: Publish to PyPI
25
+ uses: pypa/gh-action-pypi-publish@release/v1
@@ -0,0 +1,123 @@
1
+ name: Update EnergyPlus Models
2
+
3
+ on:
4
+ schedule:
5
+ - cron: "0 6 * * 1" # Weekly Monday 6AM UTC
6
+ workflow_dispatch:
7
+
8
+ permissions:
9
+ contents: write
10
+
11
+ jobs:
12
+ update-models:
13
+ runs-on: ubuntu-latest
14
+ steps:
15
+ - uses: actions/checkout@v4
16
+ with:
17
+ fetch-depth: 0 # Need full history for tags
18
+
19
+ - name: Get latest EnergyPlus release
20
+ id: ep
21
+ env:
22
+ GH_TOKEN: ${{ github.token }}
23
+ run: |
24
+ RELEASE_JSON=$(gh api repos/NREL/EnergyPlus/releases/latest)
25
+ TAG=$(echo "$RELEASE_JSON" | jq -r '.tag_name')
26
+ VERSION=${TAG#v}
27
+ echo "version=$VERSION" >> "$GITHUB_OUTPUT"
28
+
29
+ # Pick Ubuntu 24.04 x86_64 tar.gz
30
+ ASSET_URL=$(echo "$RELEASE_JSON" | jq -r '
31
+ .assets[]
32
+ | select(.name | test("Linux-Ubuntu24\\.04-x86_64\\.tar\\.gz$"))
33
+ | .browser_download_url
34
+ ')
35
+ if [ -z "$ASSET_URL" ]; then
36
+ # Fallback: any Linux x86_64 tar.gz
37
+ ASSET_URL=$(echo "$RELEASE_JSON" | jq -r '
38
+ .assets[]
39
+ | select(.name | test("Linux.*x86_64\\.tar\\.gz$"))
40
+ | .browser_download_url
41
+ ' | head -1)
42
+ fi
43
+ echo "asset_url=$ASSET_URL" >> "$GITHUB_OUTPUT"
44
+ echo "EnergyPlus $VERSION -> $ASSET_URL"
45
+
46
+ - name: Check if update needed
47
+ id: check
48
+ run: |
49
+ EP="${{ steps.ep.outputs.version }}"
50
+ TAG="v${EP}"
51
+ if git tag -l "$TAG" | grep -q "$TAG"; then
52
+ echo "Tag $TAG already exists, skipping"
53
+ echo "skip=true" >> "$GITHUB_OUTPUT"
54
+ else
55
+ echo "Tag $TAG not found, will update"
56
+ echo "skip=false" >> "$GITHUB_OUTPUT"
57
+ fi
58
+
59
+ - name: Download and extract schema
60
+ if: steps.check.outputs.skip == 'false'
61
+ run: |
62
+ curl -L -o ep.tar.gz "${{ steps.ep.outputs.asset_url }}"
63
+ tar -xzf ep.tar.gz --wildcards '*/Energy+.schema.epJSON' --strip-components=1
64
+ rm ep.tar.gz
65
+ ls -lh 'Energy+.schema.epJSON'
66
+
67
+ - uses: astral-sh/setup-uv@v4
68
+ if: steps.check.outputs.skip == 'false'
69
+
70
+ - name: Generate models
71
+ if: steps.check.outputs.skip == 'false'
72
+ run: |
73
+ uv sync --all-groups
74
+ uv run idfpy codegen --schema 'Energy+.schema.epJSON' --output idfpy/models
75
+ uv run ruff check --fix .
76
+ uv run ruff format .
77
+ uv run ty check .
78
+ rm 'Energy+.schema.epJSON'
79
+
80
+ - name: Run tests
81
+ if: steps.check.outputs.skip == 'false'
82
+ run: uv run pytest tests/ -v
83
+
84
+ - name: Update version and README
85
+ if: steps.check.outputs.skip == 'false'
86
+ run: |
87
+ EP_VERSION="${{ steps.ep.outputs.version }}"
88
+
89
+ # Update pyproject.toml version
90
+ sed -i "s/^version = .*/version = \"${EP_VERSION}\"/" pyproject.toml
91
+
92
+ # Count object types and ref types from generated code
93
+ OBJ_COUNT=$(grep -r '_idf_object_type: ClassVar' idfpy/models/*.py | grep -cv '_base.py')
94
+ REF_COUNT=$(grep -c 'Ref = Annotated' idfpy/models/_refs.py)
95
+
96
+ # Update README
97
+ sed -i "s/version \*\*[0-9.]*\*\*/version **${EP_VERSION}**/" README.md
98
+ sed -i "s/\*\*[0-9]* object types\*\*/\*\*${OBJ_COUNT} object types\*\*/" README.md
99
+ sed -i "s/\*\*[0-9]* reference types\*\*/\*\*${REF_COUNT} reference types\*\*/" README.md
100
+
101
+ - name: Commit, tag and push
102
+ if: steps.check.outputs.skip == 'false'
103
+ run: |
104
+ EP_VERSION="${{ steps.ep.outputs.version }}"
105
+
106
+ git config user.name "github-actions[bot]"
107
+ git config user.email "github-actions[bot]@users.noreply.github.com"
108
+
109
+ git add -A
110
+ if git diff --cached --quiet; then
111
+ echo "No changes detected"
112
+ exit 0
113
+ fi
114
+
115
+ git commit -m "feat(models): update to EnergyPlus ${EP_VERSION}"
116
+
117
+ TAG="v${EP_VERSION}"
118
+ git tag -a "$TAG" -m "EnergyPlus ${EP_VERSION} models"
119
+ git tag -f latest -m "Latest EnergyPlus models (${EP_VERSION})"
120
+
121
+ git push origin HEAD
122
+ git push origin "$TAG"
123
+ git push origin latest --force
@@ -0,0 +1,9 @@
1
+ __pycache__/
2
+ *.py[cod]
3
+ *.egg-info/
4
+ dist/
5
+ build/
6
+ .venv/
7
+ *.lock
8
+
9
+ .claude/
idfpy-25.2.0/LICENSE ADDED
@@ -0,0 +1,21 @@
1
+ MIT License
2
+
3
+ Copyright (c) 2025 ITOTI-Y
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
idfpy-25.2.0/PKG-INFO ADDED
@@ -0,0 +1,77 @@
1
+ Metadata-Version: 2.4
2
+ Name: idfpy
3
+ Version: 25.2.0
4
+ Summary: Type-safe Pydantic models for all EnergyPlus IDF objects
5
+ Project-URL: Repository, https://github.com/ITOTI-Y/idfpy
6
+ Project-URL: Issues, https://github.com/ITOTI-Y/idfpy/issues
7
+ Author: ITOTI-Y
8
+ License-Expression: MIT
9
+ License-File: LICENSE
10
+ Keywords: building-simulation,energyplus,idf,pydantic
11
+ Classifier: Development Status :: 3 - Alpha
12
+ Classifier: Intended Audience :: Science/Research
13
+ Classifier: License :: OSI Approved :: MIT License
14
+ Classifier: Programming Language :: Python :: 3.12
15
+ Classifier: Topic :: Scientific/Engineering
16
+ Classifier: Typing :: Typed
17
+ Requires-Python: >=3.12
18
+ Requires-Dist: jinja2>=3.1.6
19
+ Requires-Dist: loguru>=0.7.3
20
+ Requires-Dist: pydantic>=2.0
21
+ Requires-Dist: typer>=0.21.1
22
+ Description-Content-Type: text/markdown
23
+
24
+ # idfpy
25
+
26
+ Type-safe [Pydantic](https://docs.pydantic.dev/) models for **all** [EnergyPlus](https://energyplus.net/) IDF object types, plus IDF file read/write and simulation execution.
27
+
28
+ Auto-generated from `Energy+.schema.epJSON` version **25.2.0**.
29
+
30
+ ## Features
31
+
32
+ - **858 object types** as Pydantic v2 models with full validation
33
+ - **275 reference types** with cross-object validation
34
+ - **Case-insensitive** Literal field matching (EnergyPlus IDF is case-insensitive)
35
+ - **Extensible field** support (vertices, schedule data, etc.)
36
+ - **IDF read/write** with positional field ordering
37
+ - **EnergyPlus simulation** execution with ExpandObjects support
38
+ - Accepts both `snake_case` and original EnergyPlus schema key names
39
+
40
+ ## Installation
41
+
42
+ ```bash
43
+ pip install idfpy
44
+ ```
45
+
46
+ ## Quick Start
47
+
48
+ ```python
49
+ from pathlib import Path
50
+ from idfpy import IDF
51
+ from idfpy.models.simulation import Version, Building
52
+ from idfpy.models.thermal_zones import Zone
53
+
54
+ # Create an IDF
55
+ idf = IDF()
56
+ idf.add(Version())
57
+ idf.add(Building(name='MyBuilding', north_axis=0.0))
58
+ idf.add(Zone(name='Zone1'))
59
+
60
+ # Save
61
+ idf.save(Path('output.idf'))
62
+
63
+ # Load existing IDF
64
+ idf = IDF.load(Path('existing.idf'))
65
+ print(f'{len(idf)} objects loaded')
66
+
67
+ # Run simulation
68
+ idf.run(
69
+ idf_path=Path('model.idf'),
70
+ weather_path=Path('weather.epw'),
71
+ output_dir=Path('results/'),
72
+ )
73
+ ```
74
+
75
+ ## License
76
+
77
+ MIT
idfpy-25.2.0/README.md ADDED
@@ -0,0 +1,54 @@
1
+ # idfpy
2
+
3
+ Type-safe [Pydantic](https://docs.pydantic.dev/) models for **all** [EnergyPlus](https://energyplus.net/) IDF object types, plus IDF file read/write and simulation execution.
4
+
5
+ Auto-generated from `Energy+.schema.epJSON` version **25.2.0**.
6
+
7
+ ## Features
8
+
9
+ - **858 object types** as Pydantic v2 models with full validation
10
+ - **275 reference types** with cross-object validation
11
+ - **Case-insensitive** Literal field matching (EnergyPlus IDF is case-insensitive)
12
+ - **Extensible field** support (vertices, schedule data, etc.)
13
+ - **IDF read/write** with positional field ordering
14
+ - **EnergyPlus simulation** execution with ExpandObjects support
15
+ - Accepts both `snake_case` and original EnergyPlus schema key names
16
+
17
+ ## Installation
18
+
19
+ ```bash
20
+ pip install idfpy
21
+ ```
22
+
23
+ ## Quick Start
24
+
25
+ ```python
26
+ from pathlib import Path
27
+ from idfpy import IDF
28
+ from idfpy.models.simulation import Version, Building
29
+ from idfpy.models.thermal_zones import Zone
30
+
31
+ # Create an IDF
32
+ idf = IDF()
33
+ idf.add(Version())
34
+ idf.add(Building(name='MyBuilding', north_axis=0.0))
35
+ idf.add(Zone(name='Zone1'))
36
+
37
+ # Save
38
+ idf.save(Path('output.idf'))
39
+
40
+ # Load existing IDF
41
+ idf = IDF.load(Path('existing.idf'))
42
+ print(f'{len(idf)} objects loaded')
43
+
44
+ # Run simulation
45
+ idf.run(
46
+ idf_path=Path('model.idf'),
47
+ weather_path=Path('weather.epw'),
48
+ output_dir=Path('results/'),
49
+ )
50
+ ```
51
+
52
+ ## License
53
+
54
+ MIT
@@ -0,0 +1,15 @@
1
+ """idfpy - EnergyPlus IDF models and file handling.
2
+
3
+ Type-safe Pydantic models for all EnergyPlus IDF object types,
4
+ plus IDF file read/write functionality.
5
+
6
+ Generated from Energy+.schema.epJSON version 25.1.
7
+ """
8
+
9
+ from importlib.metadata import version
10
+
11
+ from idfpy.idf import IDF
12
+ from idfpy.models._base import IDFBaseModel
13
+
14
+ __version__ = version('idfpy')
15
+ __all__ = ['IDF', 'IDFBaseModel', '__version__']
@@ -0,0 +1,47 @@
1
+ """idfpy command-line interface."""
2
+
3
+ from __future__ import annotations
4
+
5
+ from pathlib import Path
6
+ from typing import Annotated
7
+
8
+ from typer import Option, Typer
9
+
10
+ app = Typer(name='idfpy', help='EnergyPlus IDF toolkit')
11
+
12
+
13
+ @app.command()
14
+ def codegen(
15
+ schema: Annotated[
16
+ Path, Option('--schema', '-s', help='Path to Energy+.schema.epJSON')
17
+ ],
18
+ output: Annotated[
19
+ Path, Option('--output', '-o', help='Output directory for generated models')
20
+ ] = Path('generated_models'),
21
+ ) -> None:
22
+ """Generate Pydantic models from EnergyPlus schema."""
23
+ from idfpy.codegen import ModelGenerator, SchemaParser
24
+
25
+ parser = SchemaParser(schema_path=schema)
26
+ specs = parser.parse()
27
+ schema_version = parser.get_version()
28
+ generator = ModelGenerator(output_dir=output)
29
+ generator.generate_all(specs, schema_version=schema_version)
30
+
31
+
32
+ @app.command()
33
+ def run(
34
+ idf: Annotated[Path, Option('--idf', '-i', help='Path to IDF file')],
35
+ weather: Annotated[
36
+ Path, Option('--weather', '-w', help='Path to EPW weather file')
37
+ ],
38
+ output: Annotated[
39
+ Path | None, Option('--output', '-o', help='Output directory')
40
+ ] = None,
41
+ ) -> None:
42
+ """Run EnergyPlus simulation."""
43
+ from idfpy.idf import IDF
44
+
45
+ idf_runner = IDF()
46
+ rc = idf_runner.run(idf_path=idf, weather_path=weather, output_dir=output)
47
+ raise SystemExit(rc)
@@ -0,0 +1,4 @@
1
+ from .model_generator import ModelGenerator
2
+ from .schema_parser import SchemaParser
3
+
4
+ __all__ = ['ModelGenerator', 'SchemaParser']
@@ -0,0 +1,248 @@
1
+ """EnergyPlus JSON Schema parser for code generation.
2
+
3
+ This module parses Energy+.schema.epJSON to extract field and object
4
+ specifications for generating type-safe Pydantic models.
5
+ """
6
+
7
+ from __future__ import annotations
8
+
9
+ import re
10
+ from dataclasses import dataclass
11
+ from typing import Any
12
+
13
+ _UNSET = object()
14
+
15
+
16
+ @dataclass
17
+ class FieldSpec:
18
+ """EnergyPlus field specification.
19
+
20
+ Represents a single field definition extracted from the schema,
21
+ including type information, constraints, and metadata.
22
+
23
+ Attributes:
24
+ name: Original field name from schema (e.g., "direction_of_relative_north").
25
+ python_name: Python-compatible name in snake_case.
26
+ field_type: JSON schema type ("number", "string", "integer", "array").
27
+ default: Default value if specified.
28
+ required: Whether the field is required.
29
+ enum_values: List of allowed values for enum fields.
30
+ units: Physical units (e.g., "m", "deg", "W").
31
+ minimum: Minimum allowed value (inclusive).
32
+ maximum: Maximum allowed value (inclusive).
33
+ exclusive_minimum: Exclusive minimum value.
34
+ exclusive_maximum: Exclusive maximum value.
35
+ object_list: Reference to other object types (for object_list fields).
36
+ items_spec: Nested FieldSpec for array item types.
37
+ note: Field description/note from schema.
38
+ data_type: Additional data type hint (e.g., "object_list").
39
+ anyof_specs: List of alternative type specs for anyOf fields.
40
+ nested_fields: List of nested field specs for object fields.
41
+ """
42
+
43
+ name: str
44
+ python_name: str
45
+ field_type: str
46
+ default: Any = _UNSET
47
+ required: bool = False
48
+ enum_values: list[str] | None = None
49
+ units: str | None = None
50
+ minimum: float | None = None
51
+ maximum: float | None = None
52
+ exclusive_minimum: float | None = None
53
+ exclusive_maximum: float | None = None
54
+ object_list: list[str] | None = None
55
+ items_spec: FieldSpec | None = None
56
+ item_class_name: str | None = None
57
+ note: str | None = None
58
+ data_type: str | None = None
59
+ anyof_specs: list[FieldSpec] | None = None
60
+ nested_fields: list[FieldSpec] | None = None
61
+
62
+
63
+ class FieldParser:
64
+ """Parser for EnergyPlus schema field definitions.
65
+
66
+ Extracts FieldSpec instances from JSON schema field definitions,
67
+ handling various field types including nested objects and arrays.
68
+ """
69
+
70
+ _CAMEL_TO_SNAKE_PATTERN = re.compile(r'(?<!^)(?=[A-Z])')
71
+
72
+ def parse_field(self, name: str, field_schema: dict[str, Any]) -> FieldSpec:
73
+ """Parse a single field definition from schema.
74
+
75
+ Args:
76
+ name: Original field name.
77
+ field_schema: Field definition dictionary from schema.
78
+
79
+ Returns:
80
+ Parsed FieldSpec instance.
81
+ """
82
+ python_name = self._to_python_name(name)
83
+
84
+ if 'anyOf' in field_schema:
85
+ return self._parse_anyof_field(name, python_name, field_schema)
86
+
87
+ field_type = field_schema.get('type', 'string')
88
+
89
+ spec = FieldSpec(
90
+ name=name,
91
+ python_name=python_name,
92
+ field_type=field_type,
93
+ default=field_schema.get('default', _UNSET),
94
+ enum_values=field_schema.get('enum'),
95
+ units=field_schema.get('units'),
96
+ minimum=field_schema.get('minimum'),
97
+ maximum=field_schema.get('maximum'),
98
+ exclusive_minimum=field_schema.get('exclusiveMinimum'),
99
+ exclusive_maximum=field_schema.get('exclusiveMaximum'),
100
+ object_list=field_schema.get('object_list'),
101
+ note=field_schema.get('note'),
102
+ data_type=field_schema.get('data_type'),
103
+ )
104
+
105
+ if field_type == 'array' and 'items' in field_schema:
106
+ spec.items_spec = self._parse_array_items(field_schema['items'])
107
+
108
+ return spec
109
+
110
+ def _parse_anyof_field(
111
+ self, name: str, python_name: str, field_schema: dict[str, Any]
112
+ ) -> FieldSpec:
113
+ """Parse a field with anyOf type alternatives.
114
+
115
+ Args:
116
+ name: Original field name.
117
+ python_name: Python-compatible field name.
118
+ field_schema: Field definition with anyOf.
119
+
120
+ Returns:
121
+ FieldSpec with anyof_specs populated.
122
+ """
123
+ anyof_specs = []
124
+ primary_type = 'string' # Default fallback
125
+
126
+ for alt_schema in field_schema.get('anyOf', []):
127
+ alt_type = alt_schema.get('type', 'string')
128
+
129
+ if alt_type != 'null' and primary_type == 'string':
130
+ primary_type = alt_type
131
+
132
+ alt_spec = FieldSpec(
133
+ name=name,
134
+ python_name=python_name,
135
+ field_type=alt_type,
136
+ enum_values=alt_schema.get('enum'),
137
+ minimum=alt_schema.get('minimum'),
138
+ maximum=alt_schema.get('maximum'),
139
+ )
140
+ anyof_specs.append(alt_spec)
141
+
142
+ return FieldSpec(
143
+ name=name,
144
+ python_name=python_name,
145
+ field_type=primary_type,
146
+ default=field_schema.get('default', _UNSET),
147
+ units=field_schema.get('units'),
148
+ note=field_schema.get('note'),
149
+ anyof_specs=anyof_specs,
150
+ )
151
+
152
+ def _parse_array_items(self, items_schema: dict[str, Any]) -> FieldSpec:
153
+ """Parse array items specification.
154
+
155
+ Handles nested object definitions within array items,
156
+ such as vertices with x/y/z coordinates.
157
+
158
+ Args:
159
+ items_schema: Array items definition from schema.
160
+
161
+ Returns:
162
+ FieldSpec representing the array item type.
163
+ """
164
+ item_type = items_schema.get('type', 'object')
165
+
166
+ if item_type == 'object' and 'properties' in items_schema:
167
+ nested_fields = []
168
+ required_set = set(items_schema.get('required', []))
169
+ for prop_name, prop_schema in items_schema['properties'].items():
170
+ nested_spec = self.parse_field(prop_name, prop_schema)
171
+ nested_spec.required = prop_name in required_set
172
+ nested_fields.append(nested_spec)
173
+
174
+ return FieldSpec(
175
+ name='_item',
176
+ python_name='_item',
177
+ field_type='object',
178
+ nested_fields=nested_fields,
179
+ )
180
+
181
+ return FieldSpec(
182
+ name='_item',
183
+ python_name='_item',
184
+ field_type=item_type,
185
+ enum_values=items_schema.get('enum'),
186
+ minimum=items_schema.get('minimum'),
187
+ maximum=items_schema.get('maximum'),
188
+ )
189
+
190
+ def _to_python_name(self, name: str) -> str:
191
+ """Convert field name to Python snake_case.
192
+
193
+ Args:
194
+ name: Original field name (may contain spaces, hyphens, etc.).
195
+
196
+ Returns:
197
+ Python-compatible snake_case name.
198
+
199
+ Examples:
200
+ >>> parser = FieldParser()
201
+ >>> parser._to_python_name('direction_of_relative_north')
202
+ 'direction_of_relative_north'
203
+ >>> parser._to_python_name('X Origin')
204
+ 'x_origin'
205
+ >>> parser._to_python_name('vertex-x-coordinate')
206
+ 'vertex_x_coordinate'
207
+ >>> parser._to_python_name('100% Outdoor Air in Cooling')
208
+ 'n100_outdoor_air_in_cooling'
209
+ """
210
+ result = name.replace(' ', '_').replace('-', '_')
211
+
212
+ result = self._CAMEL_TO_SNAKE_PATTERN.sub('_', result)
213
+
214
+ result = result.lower()
215
+
216
+ result = re.sub(r'_+', '_', result)
217
+
218
+ result = result.strip('_')
219
+
220
+ # Python identifiers cannot start with a digit - prefix with 'n'
221
+ if result and result[0].isdigit():
222
+ result = 'n' + result
223
+
224
+ return result
225
+
226
+ def parse_fields_from_properties(
227
+ self,
228
+ properties: dict[str, Any],
229
+ required_fields: list[str] | None = None,
230
+ ) -> list[FieldSpec]:
231
+ """Parse all fields from a properties dictionary.
232
+
233
+ Args:
234
+ properties: Dictionary of field name to field schema.
235
+ required_fields: List of required field names.
236
+
237
+ Returns:
238
+ List of parsed FieldSpec instances.
239
+ """
240
+ required_set = set(required_fields or [])
241
+ fields = []
242
+
243
+ for name, schema in properties.items():
244
+ spec = self.parse_field(name, schema)
245
+ spec.required = name in required_set
246
+ fields.append(spec)
247
+
248
+ return fields