etlplus 0.11.12__tar.gz → 0.12.9__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {etlplus-0.11.12/etlplus.egg-info → etlplus-0.12.9}/PKG-INFO +94 -1
- {etlplus-0.11.12 → etlplus-0.12.9}/README.md +88 -0
- etlplus-0.12.9/etlplus/file/_imports.py +141 -0
- etlplus-0.12.9/etlplus/file/_io.py +121 -0
- etlplus-0.12.9/etlplus/file/avro.py +164 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/etlplus/file/core.py +119 -84
- etlplus-0.12.9/etlplus/file/csv.py +67 -0
- etlplus-0.12.9/etlplus/file/dat.py +66 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/etlplus/file/enums.py +114 -15
- etlplus-0.12.9/etlplus/file/feather.py +99 -0
- etlplus-0.12.9/etlplus/file/fwf.py +66 -0
- etlplus-0.12.9/etlplus/file/gz.py +123 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/etlplus/file/json.py +13 -2
- etlplus-0.12.9/etlplus/file/ndjson.py +102 -0
- etlplus-0.12.9/etlplus/file/orc.py +99 -0
- etlplus-0.12.9/etlplus/file/parquet.py +101 -0
- etlplus-0.12.9/etlplus/file/psv.py +66 -0
- etlplus-0.12.9/etlplus/file/stub.py +84 -0
- etlplus-0.12.9/etlplus/file/tab.py +82 -0
- etlplus-0.12.9/etlplus/file/tsv.py +67 -0
- etlplus-0.12.9/etlplus/file/txt.py +92 -0
- etlplus-0.12.9/etlplus/file/xls.py +88 -0
- etlplus-0.12.9/etlplus/file/xlsx.py +99 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/etlplus/file/xml.py +12 -3
- {etlplus-0.11.12 → etlplus-0.12.9}/etlplus/file/yaml.py +12 -41
- etlplus-0.12.9/etlplus/file/zip.py +175 -0
- {etlplus-0.11.12 → etlplus-0.12.9/etlplus.egg-info}/PKG-INFO +94 -1
- {etlplus-0.11.12 → etlplus-0.12.9}/etlplus.egg-info/SOURCES.txt +7 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/etlplus.egg-info/requires.txt +5 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/pyproject.toml +5 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/setup.py +5 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/tests/unit/database/test_u_database_ddl.py +3 -2
- etlplus-0.12.9/tests/unit/file/test_u_file_core.py +533 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/tests/unit/file/test_u_file_enums.py +28 -19
- {etlplus-0.11.12 → etlplus-0.12.9}/tests/unit/file/test_u_file_yaml.py +8 -9
- etlplus-0.11.12/etlplus/file/avro.py +0 -59
- etlplus-0.11.12/etlplus/file/csv.py +0 -82
- etlplus-0.11.12/etlplus/file/feather.py +0 -59
- etlplus-0.11.12/etlplus/file/gz.py +0 -59
- etlplus-0.11.12/etlplus/file/ndjson.py +0 -59
- etlplus-0.11.12/etlplus/file/orc.py +0 -59
- etlplus-0.11.12/etlplus/file/parquet.py +0 -59
- etlplus-0.11.12/etlplus/file/tsv.py +0 -59
- etlplus-0.11.12/etlplus/file/txt.py +0 -59
- etlplus-0.11.12/etlplus/file/xls.py +0 -59
- etlplus-0.11.12/etlplus/file/xlsx.py +0 -59
- etlplus-0.11.12/etlplus/file/zip.py +0 -49
- etlplus-0.11.12/tests/unit/file/test_u_file_core.py +0 -318
- {etlplus-0.11.12 → etlplus-0.12.9}/.coveragerc +0 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/.editorconfig +0 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/.gitattributes +0 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/.github/actions/python-bootstrap/action.yml +0 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/.github/workflows/ci.yml +0 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/.gitignore +0 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/.pre-commit-config.yaml +0 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/.ruff.toml +0 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/CODE_OF_CONDUCT.md +0 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/CONTRIBUTING.md +0 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/DEMO.md +0 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/LICENSE +0 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/MANIFEST.in +0 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/Makefile +0 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/REFERENCES.md +0 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/SECURITY.md +0 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/SUPPORT.md +0 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/docs/README.md +0 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/docs/pipeline-guide.md +0 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/docs/snippets/installation_version.md +0 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/etlplus/README.md +0 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/etlplus/__init__.py +0 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/etlplus/__main__.py +0 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/etlplus/__version__.py +0 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/etlplus/api/README.md +0 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/etlplus/api/__init__.py +0 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/etlplus/api/auth.py +0 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/etlplus/api/config.py +0 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/etlplus/api/endpoint_client.py +0 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/etlplus/api/errors.py +0 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/etlplus/api/pagination/__init__.py +0 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/etlplus/api/pagination/client.py +0 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/etlplus/api/pagination/config.py +0 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/etlplus/api/pagination/paginator.py +0 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/etlplus/api/rate_limiting/__init__.py +0 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/etlplus/api/rate_limiting/config.py +0 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/etlplus/api/rate_limiting/rate_limiter.py +0 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/etlplus/api/request_manager.py +0 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/etlplus/api/retry_manager.py +0 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/etlplus/api/transport.py +0 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/etlplus/api/types.py +0 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/etlplus/cli/README.md +0 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/etlplus/cli/__init__.py +0 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/etlplus/cli/commands.py +0 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/etlplus/cli/constants.py +0 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/etlplus/cli/handlers.py +0 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/etlplus/cli/io.py +0 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/etlplus/cli/main.py +0 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/etlplus/cli/options.py +0 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/etlplus/cli/state.py +0 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/etlplus/cli/types.py +0 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/etlplus/config/README.md +0 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/etlplus/config/__init__.py +0 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/etlplus/config/connector.py +0 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/etlplus/config/jobs.py +0 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/etlplus/config/pipeline.py +0 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/etlplus/config/profile.py +0 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/etlplus/config/types.py +0 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/etlplus/config/utils.py +0 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/etlplus/database/README.md +0 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/etlplus/database/__init__.py +0 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/etlplus/database/ddl.py +0 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/etlplus/database/engine.py +0 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/etlplus/database/orm.py +0 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/etlplus/database/schema.py +0 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/etlplus/database/types.py +0 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/etlplus/enums.py +0 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/etlplus/extract.py +0 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/etlplus/file/README.md +0 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/etlplus/file/__init__.py +0 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/etlplus/load.py +0 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/etlplus/mixins.py +0 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/etlplus/py.typed +0 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/etlplus/run.py +0 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/etlplus/run_helpers.py +0 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/etlplus/templates/README.md +0 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/etlplus/templates/__init__.py +0 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/etlplus/templates/ddl.sql.j2 +0 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/etlplus/templates/view.sql.j2 +0 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/etlplus/transform.py +0 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/etlplus/types.py +0 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/etlplus/utils.py +0 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/etlplus/validate.py +0 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/etlplus/validation/README.md +0 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/etlplus/validation/__init__.py +0 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/etlplus/validation/utils.py +0 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/etlplus.egg-info/dependency_links.txt +0 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/etlplus.egg-info/entry_points.txt +0 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/etlplus.egg-info/top_level.txt +0 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/examples/README.md +0 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/examples/configs/ddl_spec.yml +0 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/examples/configs/pipeline.yml +0 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/examples/data/sample.csv +0 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/examples/data/sample.json +0 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/examples/data/sample.xml +0 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/examples/data/sample.xsd +0 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/examples/data/sample.yaml +0 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/examples/quickstart_python.py +0 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/pytest.ini +0 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/setup.cfg +0 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/tests/__init__.py +0 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/tests/conftest.py +0 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/tests/integration/conftest.py +0 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/tests/integration/test_i_cli.py +0 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/tests/integration/test_i_examples_data_parity.py +0 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/tests/integration/test_i_pagination_strategy.py +0 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/tests/integration/test_i_pipeline_smoke.py +0 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/tests/integration/test_i_pipeline_yaml_load.py +0 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/tests/integration/test_i_run.py +0 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/tests/integration/test_i_run_profile_pagination_defaults.py +0 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/tests/integration/test_i_run_profile_rate_limit_defaults.py +0 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/tests/unit/api/conftest.py +0 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/tests/unit/api/test_u_auth.py +0 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/tests/unit/api/test_u_config.py +0 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/tests/unit/api/test_u_endpoint_client.py +0 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/tests/unit/api/test_u_mocks.py +0 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/tests/unit/api/test_u_pagination_client.py +0 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/tests/unit/api/test_u_pagination_config.py +0 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/tests/unit/api/test_u_paginator.py +0 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/tests/unit/api/test_u_rate_limit_config.py +0 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/tests/unit/api/test_u_rate_limiter.py +0 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/tests/unit/api/test_u_request_manager.py +0 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/tests/unit/api/test_u_retry_manager.py +0 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/tests/unit/api/test_u_transport.py +0 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/tests/unit/api/test_u_types.py +0 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/tests/unit/cli/conftest.py +0 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/tests/unit/cli/test_u_cli_handlers.py +0 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/tests/unit/cli/test_u_cli_io.py +0 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/tests/unit/cli/test_u_cli_main.py +0 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/tests/unit/cli/test_u_cli_state.py +0 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/tests/unit/config/test_u_config_utils.py +0 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/tests/unit/config/test_u_connector.py +0 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/tests/unit/config/test_u_jobs.py +0 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/tests/unit/config/test_u_pipeline.py +0 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/tests/unit/conftest.py +0 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/tests/unit/database/test_u_database_engine.py +0 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/tests/unit/database/test_u_database_orm.py +0 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/tests/unit/database/test_u_database_schema.py +0 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/tests/unit/test_u_enums.py +0 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/tests/unit/test_u_extract.py +0 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/tests/unit/test_u_load.py +0 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/tests/unit/test_u_main.py +0 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/tests/unit/test_u_mixins.py +0 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/tests/unit/test_u_run.py +0 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/tests/unit/test_u_run_helpers.py +0 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/tests/unit/test_u_transform.py +0 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/tests/unit/test_u_utils.py +0 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/tests/unit/test_u_validate.py +0 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/tests/unit/test_u_version.py +0 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/tests/unit/validation/test_u_validation_utils.py +0 -0
- {etlplus-0.11.12 → etlplus-0.12.9}/tools/update_demo_snippets.py +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: etlplus
|
|
3
|
-
Version: 0.
|
|
3
|
+
Version: 0.12.9
|
|
4
4
|
Summary: A Swiss Army knife for simple ETL operations
|
|
5
5
|
Home-page: https://github.com/Dagitali/ETLPlus
|
|
6
6
|
Author: ETLPlus Team
|
|
@@ -17,8 +17,11 @@ Classifier: Programming Language :: Python :: 3.14
|
|
|
17
17
|
Requires-Python: >=3.13,<3.15
|
|
18
18
|
Description-Content-Type: text/markdown
|
|
19
19
|
License-File: LICENSE
|
|
20
|
+
Requires-Dist: fastavro>=1.12.1
|
|
20
21
|
Requires-Dist: jinja2>=3.1.6
|
|
22
|
+
Requires-Dist: openpyxl>=3.1.5
|
|
21
23
|
Requires-Dist: pyodbc>=5.3.0
|
|
24
|
+
Requires-Dist: pyarrow>=22.0.0
|
|
22
25
|
Requires-Dist: python-dotenv>=1.2.1
|
|
23
26
|
Requires-Dist: pandas>=2.3.3
|
|
24
27
|
Requires-Dist: pydantic>=2.12.5
|
|
@@ -26,6 +29,8 @@ Requires-Dist: PyYAML>=6.0.3
|
|
|
26
29
|
Requires-Dist: requests>=2.32.5
|
|
27
30
|
Requires-Dist: SQLAlchemy>=2.0.45
|
|
28
31
|
Requires-Dist: typer>=0.21.0
|
|
32
|
+
Requires-Dist: xlrd>=2.0.2
|
|
33
|
+
Requires-Dist: xlwt>=1.3.0
|
|
29
34
|
Provides-Extra: dev
|
|
30
35
|
Requires-Dist: black>=25.9.0; extra == "dev"
|
|
31
36
|
Requires-Dist: build>=1.2.2; extra == "dev"
|
|
@@ -63,6 +68,17 @@ package and command-line interface for data extraction, validation, transformati
|
|
|
63
68
|
- [Features](#features)
|
|
64
69
|
- [Installation](#installation)
|
|
65
70
|
- [Quickstart](#quickstart)
|
|
71
|
+
- [Data Connectors](#data-connectors)
|
|
72
|
+
- [REST APIs (`api`)](#rest-apis-api)
|
|
73
|
+
- [Databases (`database`)](#databases-database)
|
|
74
|
+
- [Files (`file`)](#files-file)
|
|
75
|
+
- [Stubbed / Placeholder](#stubbed--placeholder)
|
|
76
|
+
- [Tabular \& Delimited Text](#tabular--delimited-text)
|
|
77
|
+
- [Semi-Structured Text](#semi-structured-text)
|
|
78
|
+
- [Columnar / Analytics-Friendly](#columnar--analytics-friendly)
|
|
79
|
+
- [Binary Serialization and Interchange](#binary-serialization-and-interchange)
|
|
80
|
+
- [Spreadsheets](#spreadsheets)
|
|
81
|
+
- [Data Archives](#data-archives)
|
|
66
82
|
- [Usage](#usage)
|
|
67
83
|
- [Command Line Interface](#command-line-interface)
|
|
68
84
|
- [Argument Order and Required Options](#argument-order-and-required-options)
|
|
@@ -186,6 +202,83 @@ assert validate(filtered, rules)["valid"]
|
|
|
186
202
|
load(filtered, "file", "temp/sample_output.json", file_format="json")
|
|
187
203
|
```
|
|
188
204
|
|
|
205
|
+
## Data Connectors
|
|
206
|
+
|
|
207
|
+
Data connectors abstract sources from which to extract data and targets to which to load data. They
|
|
208
|
+
are differentiated by their types, each of which is represented in the subsections below.
|
|
209
|
+
|
|
210
|
+
### REST APIs (`api`)
|
|
211
|
+
|
|
212
|
+
ETLPlus can extract from REST APIs and load results via common HTTP methods. Supported operations
|
|
213
|
+
include GET for extract and PATCH/POST/PUT for load.
|
|
214
|
+
|
|
215
|
+
### Databases (`database`)
|
|
216
|
+
|
|
217
|
+
Database connectors use connection strings for extraction and loading, and
|
|
218
|
+
DDL can be rendered from table specs for migrations or schema checks.
|
|
219
|
+
|
|
220
|
+
### Files (`file`)
|
|
221
|
+
|
|
222
|
+
File formats are grouped as in `FileFormat`. Support is marked as:
|
|
223
|
+
|
|
224
|
+
- **Y**: implemented (may require optional dependencies)
|
|
225
|
+
- **N**: stubbed or not yet implemented
|
|
226
|
+
|
|
227
|
+
#### Stubbed / Placeholder
|
|
228
|
+
|
|
229
|
+
| Format | Supported | Description |
|
|
230
|
+
| --- | --- | --- |
|
|
231
|
+
| `stub` | N | Placeholder format for tests and future connectors. |
|
|
232
|
+
|
|
233
|
+
#### Tabular & Delimited Text
|
|
234
|
+
|
|
235
|
+
| Format | Supported | Description |
|
|
236
|
+
| --- | --- | --- |
|
|
237
|
+
| `csv` | Y | Comma-Separated Values |
|
|
238
|
+
| `fwf` | N | Fixed-Width Fields |
|
|
239
|
+
| `dat` | N | Generic data file, often delimited or fixed-width |
|
|
240
|
+
| `psv` | N | Pipe-Separated Values |
|
|
241
|
+
| `tab` | N | Often synonymous with TSV |
|
|
242
|
+
| `tsv` | Y | Tab-Separated Values |
|
|
243
|
+
| `txt` | Y | Plain text, often delimited or fixed-width |
|
|
244
|
+
|
|
245
|
+
#### Semi-Structured Text
|
|
246
|
+
|
|
247
|
+
| Format | Supported | Description |
|
|
248
|
+
| --- | --- | --- |
|
|
249
|
+
| `json` | Y | JavaScript Object Notation |
|
|
250
|
+
| `ndjson` | Y | Newline-Delimited JSON |
|
|
251
|
+
| `xml` | Y | Extensible Markup Language |
|
|
252
|
+
| `yaml` | Y | YAML Ain't Markup Language |
|
|
253
|
+
|
|
254
|
+
#### Columnar / Analytics-Friendly
|
|
255
|
+
|
|
256
|
+
| Format | Supported | Description |
|
|
257
|
+
| --- | --- | --- |
|
|
258
|
+
| `feather` | Y | Apache Arrow Feather |
|
|
259
|
+
| `orc` | Y | Optimized Row Columnar; common in Hadoop |
|
|
260
|
+
| `parquet` | Y | Apache Parquet; common in Big Data |
|
|
261
|
+
|
|
262
|
+
#### Binary Serialization and Interchange
|
|
263
|
+
|
|
264
|
+
| Format | Supported | Description |
|
|
265
|
+
| --- | --- | --- |
|
|
266
|
+
| `avro` | Y | Apache Avro |
|
|
267
|
+
|
|
268
|
+
#### Spreadsheets
|
|
269
|
+
|
|
270
|
+
| Format | Supported | Description |
|
|
271
|
+
| --- | --- | --- |
|
|
272
|
+
| `xls` | Y | Microsoft Excel (BIFF); read-only |
|
|
273
|
+
| `xlsx` | Y | Microsoft Excel (Open XML) |
|
|
274
|
+
|
|
275
|
+
#### Data Archives
|
|
276
|
+
|
|
277
|
+
| Format | Supported | Description |
|
|
278
|
+
| --- | --- | --- |
|
|
279
|
+
| `gz` | Y | Gzip-compressed file |
|
|
280
|
+
| `zip` | Y | ZIP archive |
|
|
281
|
+
|
|
189
282
|
## Usage
|
|
190
283
|
|
|
191
284
|
### Command Line Interface
|
|
@@ -18,6 +18,17 @@ package and command-line interface for data extraction, validation, transformati
|
|
|
18
18
|
- [Features](#features)
|
|
19
19
|
- [Installation](#installation)
|
|
20
20
|
- [Quickstart](#quickstart)
|
|
21
|
+
- [Data Connectors](#data-connectors)
|
|
22
|
+
- [REST APIs (`api`)](#rest-apis-api)
|
|
23
|
+
- [Databases (`database`)](#databases-database)
|
|
24
|
+
- [Files (`file`)](#files-file)
|
|
25
|
+
- [Stubbed / Placeholder](#stubbed--placeholder)
|
|
26
|
+
- [Tabular \& Delimited Text](#tabular--delimited-text)
|
|
27
|
+
- [Semi-Structured Text](#semi-structured-text)
|
|
28
|
+
- [Columnar / Analytics-Friendly](#columnar--analytics-friendly)
|
|
29
|
+
- [Binary Serialization and Interchange](#binary-serialization-and-interchange)
|
|
30
|
+
- [Spreadsheets](#spreadsheets)
|
|
31
|
+
- [Data Archives](#data-archives)
|
|
21
32
|
- [Usage](#usage)
|
|
22
33
|
- [Command Line Interface](#command-line-interface)
|
|
23
34
|
- [Argument Order and Required Options](#argument-order-and-required-options)
|
|
@@ -141,6 +152,83 @@ assert validate(filtered, rules)["valid"]
|
|
|
141
152
|
load(filtered, "file", "temp/sample_output.json", file_format="json")
|
|
142
153
|
```
|
|
143
154
|
|
|
155
|
+
## Data Connectors
|
|
156
|
+
|
|
157
|
+
Data connectors abstract sources from which to extract data and targets to which to load data. They
|
|
158
|
+
are differentiated by their types, each of which is represented in the subsections below.
|
|
159
|
+
|
|
160
|
+
### REST APIs (`api`)
|
|
161
|
+
|
|
162
|
+
ETLPlus can extract from REST APIs and load results via common HTTP methods. Supported operations
|
|
163
|
+
include GET for extract and PATCH/POST/PUT for load.
|
|
164
|
+
|
|
165
|
+
### Databases (`database`)
|
|
166
|
+
|
|
167
|
+
Database connectors use connection strings for extraction and loading, and
|
|
168
|
+
DDL can be rendered from table specs for migrations or schema checks.
|
|
169
|
+
|
|
170
|
+
### Files (`file`)
|
|
171
|
+
|
|
172
|
+
File formats are grouped as in `FileFormat`. Support is marked as:
|
|
173
|
+
|
|
174
|
+
- **Y**: implemented (may require optional dependencies)
|
|
175
|
+
- **N**: stubbed or not yet implemented
|
|
176
|
+
|
|
177
|
+
#### Stubbed / Placeholder
|
|
178
|
+
|
|
179
|
+
| Format | Supported | Description |
|
|
180
|
+
| --- | --- | --- |
|
|
181
|
+
| `stub` | N | Placeholder format for tests and future connectors. |
|
|
182
|
+
|
|
183
|
+
#### Tabular & Delimited Text
|
|
184
|
+
|
|
185
|
+
| Format | Supported | Description |
|
|
186
|
+
| --- | --- | --- |
|
|
187
|
+
| `csv` | Y | Comma-Separated Values |
|
|
188
|
+
| `fwf` | N | Fixed-Width Fields |
|
|
189
|
+
| `dat` | N | Generic data file, often delimited or fixed-width |
|
|
190
|
+
| `psv` | N | Pipe-Separated Values |
|
|
191
|
+
| `tab` | N | Often synonymous with TSV |
|
|
192
|
+
| `tsv` | Y | Tab-Separated Values |
|
|
193
|
+
| `txt` | Y | Plain text, often delimited or fixed-width |
|
|
194
|
+
|
|
195
|
+
#### Semi-Structured Text
|
|
196
|
+
|
|
197
|
+
| Format | Supported | Description |
|
|
198
|
+
| --- | --- | --- |
|
|
199
|
+
| `json` | Y | JavaScript Object Notation |
|
|
200
|
+
| `ndjson` | Y | Newline-Delimited JSON |
|
|
201
|
+
| `xml` | Y | Extensible Markup Language |
|
|
202
|
+
| `yaml` | Y | YAML Ain't Markup Language |
|
|
203
|
+
|
|
204
|
+
#### Columnar / Analytics-Friendly
|
|
205
|
+
|
|
206
|
+
| Format | Supported | Description |
|
|
207
|
+
| --- | --- | --- |
|
|
208
|
+
| `feather` | Y | Apache Arrow Feather |
|
|
209
|
+
| `orc` | Y | Optimized Row Columnar; common in Hadoop |
|
|
210
|
+
| `parquet` | Y | Apache Parquet; common in Big Data |
|
|
211
|
+
|
|
212
|
+
#### Binary Serialization and Interchange
|
|
213
|
+
|
|
214
|
+
| Format | Supported | Description |
|
|
215
|
+
| --- | --- | --- |
|
|
216
|
+
| `avro` | Y | Apache Avro |
|
|
217
|
+
|
|
218
|
+
#### Spreadsheets
|
|
219
|
+
|
|
220
|
+
| Format | Supported | Description |
|
|
221
|
+
| --- | --- | --- |
|
|
222
|
+
| `xls` | Y | Microsoft Excel (BIFF); read-only |
|
|
223
|
+
| `xlsx` | Y | Microsoft Excel (Open XML) |
|
|
224
|
+
|
|
225
|
+
#### Data Archives
|
|
226
|
+
|
|
227
|
+
| Format | Supported | Description |
|
|
228
|
+
| --- | --- | --- |
|
|
229
|
+
| `gz` | Y | Gzip-compressed file |
|
|
230
|
+
| `zip` | Y | ZIP archive |
|
|
231
|
+
|
|
144
232
|
## Usage
|
|
145
233
|
|
|
146
234
|
### Command Line Interface
|
|
@@ -0,0 +1,141 @@
|
|
|
1
|
+
"""
|
|
2
|
+
:mod:`etlplus.file._imports` module.
|
|
3
|
+
|
|
4
|
+
Shared helpers for optional dependency imports.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
from __future__ import annotations
|
|
8
|
+
|
|
9
|
+
from importlib import import_module
|
|
10
|
+
from typing import Any
|
|
11
|
+
|
|
12
|
+
# SECTION: INTERNAL CONSTANTS =============================================== #
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
_MODULE_CACHE: dict[str, Any] = {}
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
# SECTION: INTERNAL FUNCTIONS =============================================== #
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
def _error_message(
|
|
22
|
+
module_name: str,
|
|
23
|
+
format_name: str,
|
|
24
|
+
) -> str:
|
|
25
|
+
"""
|
|
26
|
+
Build an import error message for an optional dependency.
|
|
27
|
+
|
|
28
|
+
Parameters
|
|
29
|
+
----------
|
|
30
|
+
module_name : str
|
|
31
|
+
Module name to look up.
|
|
32
|
+
format_name : str
|
|
33
|
+
Human-readable format name for templated messages.
|
|
34
|
+
|
|
35
|
+
Returns
|
|
36
|
+
-------
|
|
37
|
+
str
|
|
38
|
+
Formatted error message.
|
|
39
|
+
"""
|
|
40
|
+
return (
|
|
41
|
+
f'{format_name} support requires '
|
|
42
|
+
f'optional dependency "{module_name}".\n'
|
|
43
|
+
f'Install with: pip install {module_name}'
|
|
44
|
+
)
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
# SECTION: FUNCTIONS ======================================================== #
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
def get_optional_module(
|
|
51
|
+
module_name: str,
|
|
52
|
+
*,
|
|
53
|
+
error_message: str,
|
|
54
|
+
) -> Any:
|
|
55
|
+
"""
|
|
56
|
+
Return an optional dependency module, caching on first import.
|
|
57
|
+
|
|
58
|
+
Parameters
|
|
59
|
+
----------
|
|
60
|
+
module_name : str
|
|
61
|
+
Name of the module to import.
|
|
62
|
+
error_message : str
|
|
63
|
+
Error message to surface when the module is missing.
|
|
64
|
+
|
|
65
|
+
Returns
|
|
66
|
+
-------
|
|
67
|
+
Any
|
|
68
|
+
The imported module.
|
|
69
|
+
|
|
70
|
+
Raises
|
|
71
|
+
------
|
|
72
|
+
ImportError
|
|
73
|
+
If the optional dependency is missing.
|
|
74
|
+
"""
|
|
75
|
+
cached = _MODULE_CACHE.get(module_name)
|
|
76
|
+
if cached is not None: # pragma: no cover - tiny branch
|
|
77
|
+
return cached
|
|
78
|
+
try:
|
|
79
|
+
module = import_module(module_name)
|
|
80
|
+
except ImportError as e: # pragma: no cover
|
|
81
|
+
raise ImportError(error_message) from e
|
|
82
|
+
_MODULE_CACHE[module_name] = module
|
|
83
|
+
return module
|
|
84
|
+
|
|
85
|
+
|
|
86
|
+
def get_fastavro() -> Any:
|
|
87
|
+
"""
|
|
88
|
+
Return the fastavro module, importing it on first use.
|
|
89
|
+
|
|
90
|
+
Raises an informative ImportError if the optional dependency is missing.
|
|
91
|
+
|
|
92
|
+
Notes
|
|
93
|
+
-----
|
|
94
|
+
Prefer :func:`get_optional_module` for new call sites.
|
|
95
|
+
"""
|
|
96
|
+
return get_optional_module(
|
|
97
|
+
'fastavro',
|
|
98
|
+
error_message=_error_message('fastavro', format_name='AVRO'),
|
|
99
|
+
)
|
|
100
|
+
|
|
101
|
+
|
|
102
|
+
def get_pandas(
|
|
103
|
+
format_name: str,
|
|
104
|
+
) -> Any:
|
|
105
|
+
"""
|
|
106
|
+
Return the pandas module, importing it on first use.
|
|
107
|
+
|
|
108
|
+
Parameters
|
|
109
|
+
----------
|
|
110
|
+
format_name : str
|
|
111
|
+
Human-readable format name for error messages.
|
|
112
|
+
|
|
113
|
+
Returns
|
|
114
|
+
-------
|
|
115
|
+
Any
|
|
116
|
+
The pandas module.
|
|
117
|
+
|
|
118
|
+
Notes
|
|
119
|
+
-----
|
|
120
|
+
Prefer :func:`get_optional_module` for new call sites.
|
|
121
|
+
"""
|
|
122
|
+
return get_optional_module(
|
|
123
|
+
'pandas',
|
|
124
|
+
error_message=_error_message('pandas', format_name=format_name),
|
|
125
|
+
)
|
|
126
|
+
|
|
127
|
+
|
|
128
|
+
def get_yaml() -> Any:
|
|
129
|
+
"""
|
|
130
|
+
Return the PyYAML module, importing it on first use.
|
|
131
|
+
|
|
132
|
+
Raises an informative ImportError if the optional dependency is missing.
|
|
133
|
+
|
|
134
|
+
Notes
|
|
135
|
+
-----
|
|
136
|
+
Prefer :func:`get_optional_module` for new call sites.
|
|
137
|
+
"""
|
|
138
|
+
return get_optional_module(
|
|
139
|
+
'yaml',
|
|
140
|
+
error_message=_error_message('PyYAML', format_name='YAML'),
|
|
141
|
+
)
|
|
@@ -0,0 +1,121 @@
|
|
|
1
|
+
"""
|
|
2
|
+
:mod:`etlplus.file._io` module.
|
|
3
|
+
|
|
4
|
+
Shared helpers for record normalization and delimited text formats.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
from __future__ import annotations
|
|
8
|
+
|
|
9
|
+
import csv
|
|
10
|
+
from pathlib import Path
|
|
11
|
+
from typing import cast
|
|
12
|
+
|
|
13
|
+
from ..types import JSONData
|
|
14
|
+
from ..types import JSONDict
|
|
15
|
+
from ..types import JSONList
|
|
16
|
+
|
|
17
|
+
# SECTION: FUNCTIONS ======================================================== #
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
def normalize_records(
|
|
21
|
+
data: JSONData,
|
|
22
|
+
format_name: str,
|
|
23
|
+
) -> JSONList:
|
|
24
|
+
"""
|
|
25
|
+
Normalize payloads into a list of dictionaries.
|
|
26
|
+
|
|
27
|
+
Parameters
|
|
28
|
+
----------
|
|
29
|
+
data : JSONData
|
|
30
|
+
Input payload to normalize.
|
|
31
|
+
format_name : str
|
|
32
|
+
Human-readable format name for error messages.
|
|
33
|
+
|
|
34
|
+
Returns
|
|
35
|
+
-------
|
|
36
|
+
JSONList
|
|
37
|
+
Normalized list of dictionaries.
|
|
38
|
+
|
|
39
|
+
Raises
|
|
40
|
+
------
|
|
41
|
+
TypeError
|
|
42
|
+
If a list payload contains non-dict items.
|
|
43
|
+
"""
|
|
44
|
+
if isinstance(data, list):
|
|
45
|
+
if not all(isinstance(item, dict) for item in data):
|
|
46
|
+
raise TypeError(
|
|
47
|
+
f'{format_name} payloads must contain only objects (dicts)',
|
|
48
|
+
)
|
|
49
|
+
return cast(JSONList, data)
|
|
50
|
+
return [cast(JSONDict, data)]
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
def read_delimited(path: Path, *, delimiter: str) -> JSONList:
|
|
54
|
+
"""
|
|
55
|
+
Read delimited content from ``path``.
|
|
56
|
+
|
|
57
|
+
Parameters
|
|
58
|
+
----------
|
|
59
|
+
path : Path
|
|
60
|
+
Path to the delimited file on disk.
|
|
61
|
+
delimiter : str
|
|
62
|
+
Delimiter character for parsing.
|
|
63
|
+
|
|
64
|
+
Returns
|
|
65
|
+
-------
|
|
66
|
+
JSONList
|
|
67
|
+
The list of dictionaries read from the delimited file.
|
|
68
|
+
"""
|
|
69
|
+
with path.open('r', encoding='utf-8', newline='') as handle:
|
|
70
|
+
reader: csv.DictReader[str] = csv.DictReader(
|
|
71
|
+
handle,
|
|
72
|
+
delimiter=delimiter,
|
|
73
|
+
)
|
|
74
|
+
rows: JSONList = []
|
|
75
|
+
for row in reader:
|
|
76
|
+
if not any(row.values()):
|
|
77
|
+
continue
|
|
78
|
+
rows.append(cast(JSONDict, dict(row)))
|
|
79
|
+
return rows
|
|
80
|
+
|
|
81
|
+
|
|
82
|
+
def write_delimited(path: Path, data: JSONData, *, delimiter: str) -> int:
|
|
83
|
+
"""
|
|
84
|
+
Write ``data`` to a delimited file and return record count.
|
|
85
|
+
|
|
86
|
+
Parameters
|
|
87
|
+
----------
|
|
88
|
+
path : Path
|
|
89
|
+
Path to the delimited file on disk.
|
|
90
|
+
data : JSONData
|
|
91
|
+
Data to write as delimited rows.
|
|
92
|
+
delimiter : str
|
|
93
|
+
Delimiter character for writing.
|
|
94
|
+
|
|
95
|
+
Returns
|
|
96
|
+
-------
|
|
97
|
+
int
|
|
98
|
+
The number of rows written.
|
|
99
|
+
"""
|
|
100
|
+
rows: list[JSONDict]
|
|
101
|
+
if isinstance(data, list):
|
|
102
|
+
rows = [row for row in data if isinstance(row, dict)]
|
|
103
|
+
else:
|
|
104
|
+
rows = [data]
|
|
105
|
+
|
|
106
|
+
if not rows:
|
|
107
|
+
return 0
|
|
108
|
+
|
|
109
|
+
fieldnames = sorted({key for row in rows for key in row})
|
|
110
|
+
path.parent.mkdir(parents=True, exist_ok=True)
|
|
111
|
+
with path.open('w', encoding='utf-8', newline='') as handle:
|
|
112
|
+
writer = csv.DictWriter(
|
|
113
|
+
handle,
|
|
114
|
+
fieldnames=fieldnames,
|
|
115
|
+
delimiter=delimiter,
|
|
116
|
+
)
|
|
117
|
+
writer.writeheader()
|
|
118
|
+
for row in rows:
|
|
119
|
+
writer.writerow({field: row.get(field) for field in fieldnames})
|
|
120
|
+
|
|
121
|
+
return len(rows)
|
|
@@ -0,0 +1,164 @@
|
|
|
1
|
+
"""
|
|
2
|
+
:mod:`etlplus.file.avro` module.
|
|
3
|
+
|
|
4
|
+
Helpers for reading/writing Avro files.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
from __future__ import annotations
|
|
8
|
+
|
|
9
|
+
from pathlib import Path
|
|
10
|
+
from typing import Any
|
|
11
|
+
from typing import cast
|
|
12
|
+
|
|
13
|
+
from etlplus.file._imports import get_fastavro
|
|
14
|
+
|
|
15
|
+
from ..types import JSONData
|
|
16
|
+
from ..types import JSONDict
|
|
17
|
+
from ..types import JSONList
|
|
18
|
+
from ._io import normalize_records
|
|
19
|
+
|
|
20
|
+
# SECTION: EXPORTS ========================================================== #
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
__all__ = [
|
|
24
|
+
'read',
|
|
25
|
+
'write',
|
|
26
|
+
]
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
# SECTION: INTERNAL CONSTANTS =============================================== #
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
_PRIMITIVE_TYPES: tuple[type, ...] = (
|
|
33
|
+
bool,
|
|
34
|
+
int,
|
|
35
|
+
float,
|
|
36
|
+
str,
|
|
37
|
+
bytes,
|
|
38
|
+
bytearray,
|
|
39
|
+
)
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
# SECTION: INTERNAL FUNCTIONS =============================================== #
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
def _infer_schema(records: JSONList) -> dict[str, Any]:
|
|
46
|
+
"""
|
|
47
|
+
Infer a basic Avro schema from record payloads.
|
|
48
|
+
|
|
49
|
+
Only primitive field values are supported; complex values raise TypeError.
|
|
50
|
+
"""
|
|
51
|
+
field_names = sorted({key for record in records for key in record})
|
|
52
|
+
fields: list[dict[str, Any]] = []
|
|
53
|
+
for name in field_names:
|
|
54
|
+
types: list[str] = []
|
|
55
|
+
for record in records:
|
|
56
|
+
value = record.get(name)
|
|
57
|
+
if value is None:
|
|
58
|
+
types.append('null')
|
|
59
|
+
continue
|
|
60
|
+
if isinstance(value, dict | list):
|
|
61
|
+
raise TypeError(
|
|
62
|
+
'AVRO payloads must contain only primitive values',
|
|
63
|
+
)
|
|
64
|
+
if not isinstance(value, _PRIMITIVE_TYPES):
|
|
65
|
+
raise TypeError(
|
|
66
|
+
'AVRO payloads must contain only primitive values',
|
|
67
|
+
)
|
|
68
|
+
types.append(cast(str, _infer_value_type(value)))
|
|
69
|
+
fields.append({'name': name, 'type': _merge_types(types)})
|
|
70
|
+
|
|
71
|
+
return {
|
|
72
|
+
'name': 'etlplus_record',
|
|
73
|
+
'type': 'record',
|
|
74
|
+
'fields': fields,
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
|
|
78
|
+
def _infer_value_type(value: object) -> str | list[str]:
|
|
79
|
+
"""
|
|
80
|
+
Infer the Avro type for a primitive value.
|
|
81
|
+
|
|
82
|
+
Raises TypeError for unsupported types.
|
|
83
|
+
"""
|
|
84
|
+
if value is None:
|
|
85
|
+
return 'null'
|
|
86
|
+
if isinstance(value, bool):
|
|
87
|
+
return 'boolean'
|
|
88
|
+
if isinstance(value, int):
|
|
89
|
+
return 'long'
|
|
90
|
+
if isinstance(value, float):
|
|
91
|
+
return 'double'
|
|
92
|
+
if isinstance(value, str):
|
|
93
|
+
return 'string'
|
|
94
|
+
if isinstance(value, (bytes, bytearray)):
|
|
95
|
+
return 'bytes'
|
|
96
|
+
raise TypeError('AVRO payloads must contain only primitive values')
|
|
97
|
+
|
|
98
|
+
|
|
99
|
+
def _merge_types(types: list[str]) -> str | list[str]:
|
|
100
|
+
"""Return a stable Avro type union for a list of types."""
|
|
101
|
+
unique = list(dict.fromkeys(types))
|
|
102
|
+
if len(unique) == 1:
|
|
103
|
+
return unique[0]
|
|
104
|
+
ordered = ['null'] + sorted(t for t in unique if t != 'null')
|
|
105
|
+
return ordered
|
|
106
|
+
|
|
107
|
+
|
|
108
|
+
# SECTION: FUNCTIONS ======================================================== #
|
|
109
|
+
|
|
110
|
+
|
|
111
|
+
def read(
|
|
112
|
+
path: Path,
|
|
113
|
+
) -> JSONList:
|
|
114
|
+
"""
|
|
115
|
+
Read AVRO content from ``path``.
|
|
116
|
+
|
|
117
|
+
Parameters
|
|
118
|
+
----------
|
|
119
|
+
path : Path
|
|
120
|
+
Path to the AVRO file on disk.
|
|
121
|
+
|
|
122
|
+
Returns
|
|
123
|
+
-------
|
|
124
|
+
JSONList
|
|
125
|
+
The list of dictionaries read from the AVRO file.
|
|
126
|
+
"""
|
|
127
|
+
fastavro = get_fastavro()
|
|
128
|
+
with path.open('rb') as handle:
|
|
129
|
+
reader = fastavro.reader(handle)
|
|
130
|
+
return [cast(JSONDict, record) for record in reader]
|
|
131
|
+
|
|
132
|
+
|
|
133
|
+
def write(
|
|
134
|
+
path: Path,
|
|
135
|
+
data: JSONData,
|
|
136
|
+
) -> int:
|
|
137
|
+
"""
|
|
138
|
+
Write ``data`` to AVRO at ``path`` and return record count.
|
|
139
|
+
|
|
140
|
+
Parameters
|
|
141
|
+
----------
|
|
142
|
+
path : Path
|
|
143
|
+
Path to the AVRO file on disk.
|
|
144
|
+
data : JSONData
|
|
145
|
+
Data to write.
|
|
146
|
+
|
|
147
|
+
Returns
|
|
148
|
+
-------
|
|
149
|
+
int
|
|
150
|
+
Number of records written.
|
|
151
|
+
"""
|
|
152
|
+
records = normalize_records(data, 'AVRO')
|
|
153
|
+
if not records:
|
|
154
|
+
return 0
|
|
155
|
+
|
|
156
|
+
fastavro = get_fastavro()
|
|
157
|
+
schema = _infer_schema(records)
|
|
158
|
+
parsed_schema = fastavro.parse_schema(schema)
|
|
159
|
+
|
|
160
|
+
path.parent.mkdir(parents=True, exist_ok=True)
|
|
161
|
+
with path.open('wb') as handle:
|
|
162
|
+
fastavro.writer(handle, parsed_schema, records)
|
|
163
|
+
|
|
164
|
+
return len(records)
|