datacompose 0.2.4.1__tar.gz → 0.2.6.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of datacompose might be problematic. Click here for more details.
- {datacompose-0.2.4.1 → datacompose-0.2.6.0}/CHANGELOG.md +86 -3
- datacompose-0.2.6.0/PKG-INFO +94 -0
- datacompose-0.2.6.0/README.md +44 -0
- {datacompose-0.2.4.1 → datacompose-0.2.6.0}/datacompose/cli/__init__.py +1 -1
- {datacompose-0.2.4.1 → datacompose-0.2.6.0}/datacompose/cli/commands/add.py +49 -21
- {datacompose-0.2.4.1 → datacompose-0.2.6.0}/datacompose/cli/commands/init.py +35 -9
- {datacompose-0.2.4.1 → datacompose-0.2.6.0}/datacompose/cli/commands/list.py +2 -2
- datacompose-0.2.6.0/datacompose/cli/config.py +80 -0
- {datacompose-0.2.4.1 → datacompose-0.2.6.0}/datacompose/cli/main.py +3 -3
- {datacompose-0.2.4.1 → datacompose-0.2.6.0}/datacompose/generators/base.py +15 -14
- {datacompose-0.2.4.1 → datacompose-0.2.6.0}/datacompose/generators/pyspark/generator.py +5 -10
- {datacompose-0.2.4.1 → datacompose-0.2.6.0}/datacompose/operators/__init__.py +1 -1
- {datacompose-0.2.4.1 → datacompose-0.2.6.0}/datacompose/operators/primitives.py +57 -19
- {datacompose-0.2.4.1/datacompose/transformers/text/clean_addresses → datacompose-0.2.6.0/datacompose/transformers/text/addresses}/pyspark/pyspark_primitives.py +68 -13
- {datacompose-0.2.4.1/datacompose/transformers/text/clean_emails → datacompose-0.2.6.0/datacompose/transformers/text/emails}/pyspark/pyspark_primitives.py +53 -1
- {datacompose-0.2.4.1/datacompose/transformers/text/clean_phone_numbers → datacompose-0.2.6.0/datacompose/transformers/text/phone_numbers}/pyspark/pyspark_primitives.py +416 -366
- datacompose-0.2.6.0/datacompose.egg-info/PKG-INFO +94 -0
- {datacompose-0.2.4.1 → datacompose-0.2.6.0}/datacompose.egg-info/SOURCES.txt +12 -14
- datacompose-0.2.6.0/datacompose.egg-info/requires.txt +21 -0
- {datacompose-0.2.4.1 → datacompose-0.2.6.0}/pyproject.toml +11 -8
- {datacompose-0.2.4.1 → datacompose-0.2.6.0}/tests/integration/test_end_to_end.py +29 -27
- {datacompose-0.2.4.1 → datacompose-0.2.6.0}/tests/integration/test_full_workflow.py +49 -48
- datacompose-0.2.6.0/tests/integration/test_generated_imports.py +149 -0
- {datacompose-0.2.4.1 → datacompose-0.2.6.0}/tests/unit/cli/test_add_command.py +15 -5
- {datacompose-0.2.4.1 → datacompose-0.2.6.0}/tests/unit/cli/test_add_command_complete.py +85 -66
- datacompose-0.2.6.0/tests/unit/cli/test_add_default_target.py +327 -0
- {datacompose-0.2.4.1 → datacompose-0.2.6.0}/tests/unit/cli/test_add_validation.py +5 -5
- datacompose-0.2.6.0/tests/unit/cli/test_config.py +249 -0
- {datacompose-0.2.4.1 → datacompose-0.2.6.0}/tests/unit/cli/test_init_command.py +1 -1
- {datacompose-0.2.4.1 → datacompose-0.2.6.0}/tests/unit/cli/test_init_command_complete.py +13 -13
- {datacompose-0.2.4.1 → datacompose-0.2.6.0}/tests/unit/generators/test_base_generator.py +7 -7
- {datacompose-0.2.4.1 → datacompose-0.2.6.0}/tests/unit/generators/test_spark_generator.py +17 -17
- datacompose-0.2.6.0/tests/unit/operators/test_compose_conditions.py +594 -0
- datacompose-0.2.6.0/tests/unit/operators/test_conditional_auto_detection.py +192 -0
- datacompose-0.2.6.0/tests/unit/operators/test_conditional_core.py +733 -0
- {datacompose-0.2.4.1 → datacompose-0.2.6.0}/tests/unit/operators/test_conditional_real_world.py +142 -7
- {datacompose-0.2.4.1 → datacompose-0.2.6.0}/tests/unit/operators/test_operators.py +13 -13
- {datacompose-0.2.4.1 → datacompose-0.2.6.0}/tests/unit/operators/test_primitives_complete.py +67 -65
- {datacompose-0.2.4.1 → datacompose-0.2.6.0}/tests/unit/transformers/test_discovery.py +6 -6
- {datacompose-0.2.4.1 → datacompose-0.2.6.0}/tests/unit/transformers/text/test_addresses/test_building_unit_extraction.py +2 -2
- {datacompose-0.2.4.1 → datacompose-0.2.6.0}/tests/unit/transformers/text/test_addresses/test_city_state_extraction.py +13 -13
- {datacompose-0.2.4.1 → datacompose-0.2.6.0}/tests/unit/transformers/text/test_addresses/test_clean_addresses.py +4 -4
- {datacompose-0.2.4.1 → datacompose-0.2.6.0}/tests/unit/transformers/text/test_addresses/test_country_extraction.py +1 -1
- {datacompose-0.2.4.1 → datacompose-0.2.6.0}/tests/unit/transformers/text/test_addresses/test_data_addresses.py +1 -1
- {datacompose-0.2.4.1 → datacompose-0.2.6.0}/tests/unit/transformers/text/test_addresses/test_po_box_extraction.py +1 -1
- {datacompose-0.2.4.1 → datacompose-0.2.6.0}/tests/unit/transformers/text/test_addresses/test_street_extraction.py +1 -1
- {datacompose-0.2.4.1 → datacompose-0.2.6.0}/tests/unit/transformers/text/test_addresses/test_zip_code_extraction.py +28 -28
- {datacompose-0.2.4.1 → datacompose-0.2.6.0}/tests/unit/transformers/text/test_emails/test_debug_long_emails.py +1 -1
- {datacompose-0.2.4.1 → datacompose-0.2.6.0}/tests/unit/transformers/text/test_emails/test_email_extraction.py +1 -1
- {datacompose-0.2.4.1 → datacompose-0.2.6.0}/tests/unit/transformers/text/test_emails/test_email_optimized.py +1 -1
- {datacompose-0.2.4.1 → datacompose-0.2.6.0}/tests/unit/transformers/text/test_phone_numbers/test_phone_extraction.py +84 -65
- {datacompose-0.2.4.1 → datacompose-0.2.6.0}/tests/unit/transformers/text/test_phone_numbers/test_phone_formatting.py +56 -32
- datacompose-0.2.4.1/PKG-INFO +0 -449
- datacompose-0.2.4.1/README.md +0 -402
- datacompose-0.2.4.1/datacompose.egg-info/PKG-INFO +0 -449
- datacompose-0.2.4.1/datacompose.egg-info/requires.txt +0 -18
- datacompose-0.2.4.1/tests/integration/test_generated_imports.py +0 -219
- datacompose-0.2.4.1/tests/unit/operators/conditional_tests_common.py +0 -26
- datacompose-0.2.4.1/tests/unit/operators/conftest.py +0 -61
- datacompose-0.2.4.1/tests/unit/operators/test_conditional_complex_logic.py +0 -200
- datacompose-0.2.4.1/tests/unit/operators/test_conditional_data_driven.py +0 -117
- datacompose-0.2.4.1/tests/unit/operators/test_conditional_edge_cases.py +0 -150
- datacompose-0.2.4.1/tests/unit/operators/test_conditional_error_handling.py +0 -67
- datacompose-0.2.4.1/tests/unit/operators/test_conditional_parameters.py +0 -94
- datacompose-0.2.4.1/tests/unit/operators/test_conditional_performance.py +0 -106
- {datacompose-0.2.4.1 → datacompose-0.2.6.0}/LICENSE +0 -0
- {datacompose-0.2.4.1 → datacompose-0.2.6.0}/MANIFEST.in +0 -0
- {datacompose-0.2.4.1 → datacompose-0.2.6.0}/datacompose/__init__.py +0 -0
- {datacompose-0.2.4.1 → datacompose-0.2.6.0}/datacompose/cli/colors.py +0 -0
- {datacompose-0.2.4.1 → datacompose-0.2.6.0}/datacompose/cli/commands/__init__.py +0 -0
- {datacompose-0.2.4.1 → datacompose-0.2.6.0}/datacompose/cli/validation.py +0 -0
- {datacompose-0.2.4.1 → datacompose-0.2.6.0}/datacompose/generators/__init__.py +0 -0
- {datacompose-0.2.4.1 → datacompose-0.2.6.0}/datacompose/generators/pyspark/__init__.py +0 -0
- {datacompose-0.2.4.1 → datacompose-0.2.6.0}/datacompose/transformers/__init__.py +0 -0
- {datacompose-0.2.4.1 → datacompose-0.2.6.0}/datacompose/transformers/discovery.py +0 -0
- {datacompose-0.2.4.1 → datacompose-0.2.6.0}/datacompose/transformers/text/__init__.py +0 -0
- {datacompose-0.2.4.1/datacompose/transformers/text/clean_addresses → datacompose-0.2.6.0/datacompose/transformers/text/addresses}/__init__.py +0 -0
- {datacompose-0.2.4.1/datacompose/transformers/text/clean_emails → datacompose-0.2.6.0/datacompose/transformers/text/emails}/__init__.py +0 -0
- {datacompose-0.2.4.1/datacompose/transformers/text/clean_phone_numbers → datacompose-0.2.6.0/datacompose/transformers/text/phone_numbers}/__init__.py +0 -0
- {datacompose-0.2.4.1 → datacompose-0.2.6.0}/datacompose.egg-info/dependency_links.txt +0 -0
- {datacompose-0.2.4.1 → datacompose-0.2.6.0}/datacompose.egg-info/entry_points.txt +0 -0
- {datacompose-0.2.4.1 → datacompose-0.2.6.0}/datacompose.egg-info/top_level.txt +0 -0
- {datacompose-0.2.4.1 → datacompose-0.2.6.0}/setup.cfg +0 -0
- {datacompose-0.2.4.1 → datacompose-0.2.6.0}/tests/__init__.py +0 -0
- {datacompose-0.2.4.1 → datacompose-0.2.6.0}/tests/integration/__init__.py +0 -0
- {datacompose-0.2.4.1 → datacompose-0.2.6.0}/tests/unit/cli/.venv/bin/activate_this.py +0 -0
- {datacompose-0.2.4.1 → datacompose-0.2.6.0}/tests/unit/cli/.venv/lib/python3.12/site-packages/_virtualenv.py +0 -0
- {datacompose-0.2.4.1 → datacompose-0.2.6.0}/tests/unit/cli/__init__.py +0 -0
- {datacompose-0.2.4.1 → datacompose-0.2.6.0}/tests/unit/cli/build/__init__.py +0 -0
- {datacompose-0.2.4.1 → datacompose-0.2.6.0}/tests/unit/cli/build/postgres/__init__.py +0 -0
- {datacompose-0.2.4.1 → datacompose-0.2.6.0}/tests/unit/cli/build/postgres/clean_emails/__init__.py +0 -0
- {datacompose-0.2.4.1 → datacompose-0.2.6.0}/tests/unit/cli/build/postgres/clean_emails/email_cleaner_udf_spec.yaml +0 -0
- {datacompose-0.2.4.1 → datacompose-0.2.6.0}/tests/unit/cli/build/postgres/clean_emails/test_email_cleaner_udf.py +0 -0
- {datacompose-0.2.4.1 → datacompose-0.2.6.0}/tests/unit/cli/build/spark/__init__.py +0 -0
- {datacompose-0.2.4.1 → datacompose-0.2.6.0}/tests/unit/cli/build/spark/clean_emails/__init__.py +0 -0
- {datacompose-0.2.4.1 → datacompose-0.2.6.0}/tests/unit/cli/build/spark/clean_emails/email_cleaner_udf.py +0 -0
- {datacompose-0.2.4.1 → datacompose-0.2.6.0}/tests/unit/cli/build/spark/clean_emails/email_cleaner_udf_spec.yaml +0 -0
- {datacompose-0.2.4.1 → datacompose-0.2.6.0}/tests/unit/cli/build/spark/clean_emails/test_email_cleaner_udf.py +0 -0
- {datacompose-0.2.4.1 → datacompose-0.2.6.0}/tests/unit/cli/test_list_command.py +0 -0
- {datacompose-0.2.4.1 → datacompose-0.2.6.0}/tests/unit/cli/test_main.py +0 -0
- {datacompose-0.2.4.1 → datacompose-0.2.6.0}/tests/unit/cli/test_main_complete.py +0 -0
- {datacompose-0.2.4.1 → datacompose-0.2.6.0}/tests/unit/cli/test_validation_complete.py +0 -0
- {datacompose-0.2.4.1 → datacompose-0.2.6.0}/tests/unit/generators/__init__.py +0 -0
- {datacompose-0.2.4.1 → datacompose-0.2.6.0}/tests/unit/transformers/__init__.py +0 -0
- {datacompose-0.2.4.1 → datacompose-0.2.6.0}/tests/unit/transformers/text/common/test_common.py +0 -0
- {datacompose-0.2.4.1 → datacompose-0.2.6.0}/tests/yaml_specs/__init__.py +0 -0
|
@@ -7,6 +7,89 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
|
|
|
7
7
|
|
|
8
8
|
## [Unreleased]
|
|
9
9
|
|
|
10
|
+
## [0.2.6.0] - 2025-08-24
|
|
11
|
+
|
|
12
|
+
### Added
|
|
13
|
+
- **Automatic Conditional Detection**: Smart detection of conditional operators based on naming patterns
|
|
14
|
+
- Functions starting with `is_`, `has_`, `needs_`, `should_`, `can_`, `contains_`, `matches_`, `equals_`, `starts_with_`, `ends_with_` are automatically detected as conditionals
|
|
15
|
+
- Eliminates need for explicit `is_conditional=True` in most cases
|
|
16
|
+
- Explicit override still available when needed via `is_conditional` parameter
|
|
17
|
+
- **Phone Number Processing Pipeline**: Complete phone number validation and formatting example
|
|
18
|
+
- Letter-to-number conversion (1-800-FLOWERS)
|
|
19
|
+
- NANP validation and formatting
|
|
20
|
+
- Toll-free number detection
|
|
21
|
+
- E.164 and parentheses formatting
|
|
22
|
+
|
|
23
|
+
### Changed
|
|
24
|
+
- **Conditional Operator Registration**: `is_conditional` parameter now optional with smart defaults
|
|
25
|
+
- **Test Organization**: Consolidated conditional tests into three focused files:
|
|
26
|
+
- `test_conditional_core.py` - Core functionality, logic, errors, parameters, and performance
|
|
27
|
+
- `test_conditional_real_world.py` - Real-world pipeline scenarios
|
|
28
|
+
- `test_conditional_auto_detection.py` - Auto-detection feature tests
|
|
29
|
+
|
|
30
|
+
### Fixed
|
|
31
|
+
- **Phone Number Validation**: Updated NANP validation to be more flexible for testing scenarios
|
|
32
|
+
|
|
33
|
+
## [0.2.5.3] - 2025-08-23
|
|
34
|
+
|
|
35
|
+
### Added
|
|
36
|
+
- **Compose Decorator Enhancement**: Auto-detection of PrimitiveRegistry instances in function globals
|
|
37
|
+
- Compose decorator now automatically discovers all namespace instances without explicit passing
|
|
38
|
+
- Improved namespace resolution using function's global scope instead of module globals
|
|
39
|
+
- Better support for multiple namespaces in composed functions
|
|
40
|
+
|
|
41
|
+
### Fixed
|
|
42
|
+
- **Namespace Resolution**: Fixed global namespace lookups to use function's own globals
|
|
43
|
+
- PipelineCompiler now correctly resolves namespaces from the decorated function's scope
|
|
44
|
+
- Fallback compose mode uses function globals for namespace discovery
|
|
45
|
+
- Prevents namespace resolution errors when registries are defined in different modules
|
|
46
|
+
|
|
47
|
+
### Changed
|
|
48
|
+
- **Phone Number Tests**: Updated test imports and formatting for phone number primitives
|
|
49
|
+
- **Test Organization**: Added comprehensive conditional composition tests
|
|
50
|
+
|
|
51
|
+
## [0.2.5.2] - 2025-08-22
|
|
52
|
+
|
|
53
|
+
### Fixed
|
|
54
|
+
- **Import Paths**: Updated import paths in phone_numbers pyspark primitives for clarity and consistency
|
|
55
|
+
- **Documentation**: Improved docstring documentation across primitives
|
|
56
|
+
|
|
57
|
+
## [0.2.5.1] - 2025-08-22
|
|
58
|
+
|
|
59
|
+
### Changed
|
|
60
|
+
- **Import Paths**: Renamed imports to be more transparent and clear
|
|
61
|
+
|
|
62
|
+
### Added
|
|
63
|
+
- **Documentation**: Added clear module-level docstrings throughout the codebase
|
|
64
|
+
- **Unit Tests**: Added comprehensive unit tests for default initialization and datacompose.json configuration
|
|
65
|
+
- Tests for default target auto-selection with single target
|
|
66
|
+
- Tests for explicit target override behavior
|
|
67
|
+
- Tests for configuration file validation
|
|
68
|
+
- Tests for output path resolution from config
|
|
69
|
+
|
|
70
|
+
### Fixed
|
|
71
|
+
- **CLI Tests**: Fixed all failing default target configuration tests
|
|
72
|
+
- Added proper validation mocks for non-existent platforms in tests
|
|
73
|
+
- Fixed error message assertion for invalid platform validation
|
|
74
|
+
- Properly mocked generator class hierarchy for output path testing
|
|
75
|
+
- All 13 CLI default target tests now passing (100% pass rate)
|
|
76
|
+
|
|
77
|
+
## [0.2.5] - 2025-08-21
|
|
78
|
+
|
|
79
|
+
### Changed
|
|
80
|
+
- **Documentation**: Streamlined README to be more concise
|
|
81
|
+
- Removed extensive code examples (now on website)
|
|
82
|
+
- Reduced from 390 lines to 44 lines
|
|
83
|
+
- Focused on core features and philosophy
|
|
84
|
+
- Added link to datacompose.io for detailed documentation
|
|
85
|
+
|
|
86
|
+
### Fixed
|
|
87
|
+
- **Test Suite**: Fixed failing CLI tests for `add` command
|
|
88
|
+
- Tests now properly mock ConfigLoader for isolated filesystem environments
|
|
89
|
+
- `test_add_invalid_transformer` correctly validates transformer not found error
|
|
90
|
+
- `test_complete_transformer_success` updated to match actual transformer names
|
|
91
|
+
- All CLI command tests passing with proper configuration mocking
|
|
92
|
+
|
|
10
93
|
## [0.2.4] - 2025-08-13
|
|
11
94
|
|
|
12
95
|
### Added
|
|
@@ -41,9 +124,9 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
|
|
|
41
124
|
- Removed `validate` command completely
|
|
42
125
|
- **Import Strategy**: Primitives now try local utils import first, fall back to datacompose package
|
|
43
126
|
- **File Naming**: Generated files use plural form with primitives suffix
|
|
44
|
-
- `
|
|
45
|
-
- `
|
|
46
|
-
- `
|
|
127
|
+
- `emails` → `email_primitives.py`
|
|
128
|
+
- `addresses` → `address_primitives.py`
|
|
129
|
+
- `phone_numbers` → `phone_primitives.py`
|
|
47
130
|
|
|
48
131
|
### Fixed
|
|
49
132
|
- **Critical**: Fixed utils/primitives.py output location to be shared across all transformers
|
|
@@ -0,0 +1,94 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: datacompose
|
|
3
|
+
Version: 0.2.6.0
|
|
4
|
+
Summary: Copy-pasteable data transformation primitives for PySpark. Inspired by shadcn-svelte.
|
|
5
|
+
Author: Datacompose Contributors
|
|
6
|
+
Maintainer: Datacompose Contributors
|
|
7
|
+
License: MIT
|
|
8
|
+
Project-URL: Homepage, https://github.com/tc-cole/datacompose
|
|
9
|
+
Project-URL: Documentation, https://github.com/tc-cole/datacompose/tree/main/docs
|
|
10
|
+
Project-URL: Repository, https://github.com/tc-cole/datacompose.git
|
|
11
|
+
Project-URL: Issues, https://github.com/tc-cole/datacompose/issues
|
|
12
|
+
Project-URL: Changelog, https://github.com/tc-cole/datacompose/blob/main/CHANGELOG.md
|
|
13
|
+
Keywords: data-cleaning,data-quality,udf,spark,postgres,code-generation,data-pipeline,etl
|
|
14
|
+
Classifier: Development Status :: 4 - Beta
|
|
15
|
+
Classifier: Intended Audience :: Developers
|
|
16
|
+
Classifier: Topic :: Software Development :: Code Generators
|
|
17
|
+
Classifier: Topic :: Database
|
|
18
|
+
Classifier: Topic :: Software Development :: Libraries :: Python Modules
|
|
19
|
+
Classifier: License :: OSI Approved :: MIT License
|
|
20
|
+
Classifier: Programming Language :: Python :: 3
|
|
21
|
+
Classifier: Programming Language :: Python :: 3.8
|
|
22
|
+
Classifier: Programming Language :: Python :: 3.9
|
|
23
|
+
Classifier: Programming Language :: Python :: 3.10
|
|
24
|
+
Classifier: Programming Language :: Python :: 3.11
|
|
25
|
+
Classifier: Programming Language :: Python :: 3.12
|
|
26
|
+
Classifier: Operating System :: OS Independent
|
|
27
|
+
Requires-Python: >=3.8
|
|
28
|
+
Description-Content-Type: text/markdown
|
|
29
|
+
License-File: LICENSE
|
|
30
|
+
Requires-Dist: jinja2>=3.0.0
|
|
31
|
+
Requires-Dist: pyyaml>=6.0
|
|
32
|
+
Requires-Dist: click>=8.0.0
|
|
33
|
+
Provides-Extra: dev
|
|
34
|
+
Requires-Dist: pytest>=7.0.0; extra == "dev"
|
|
35
|
+
Requires-Dist: black>=23.0.0; extra == "dev"
|
|
36
|
+
Requires-Dist: mypy>=1.0.0; extra == "dev"
|
|
37
|
+
Requires-Dist: ruff>=0.1.0; extra == "dev"
|
|
38
|
+
Provides-Extra: docs
|
|
39
|
+
Requires-Dist: mkdocs>=1.5.3; extra == "docs"
|
|
40
|
+
Requires-Dist: mkdocs-material>=9.5.0; extra == "docs"
|
|
41
|
+
Requires-Dist: mkdocs-material-extensions>=1.3; extra == "docs"
|
|
42
|
+
Requires-Dist: mkdocs-minify-plugin>=0.7.1; extra == "docs"
|
|
43
|
+
Requires-Dist: mkdocs-redirects>=1.2.1; extra == "docs"
|
|
44
|
+
Requires-Dist: mike>=2.0.0; extra == "docs"
|
|
45
|
+
Requires-Dist: pymdown-extensions>=10.5; extra == "docs"
|
|
46
|
+
Requires-Dist: pygments>=2.17.0; extra == "docs"
|
|
47
|
+
Requires-Dist: mkdocs-git-revision-date-localized-plugin>=1.2.2; extra == "docs"
|
|
48
|
+
Requires-Dist: mkdocs-glightbox>=0.3.5; extra == "docs"
|
|
49
|
+
Dynamic: license-file
|
|
50
|
+
|
|
51
|
+
# Datacompose
|
|
52
|
+
|
|
53
|
+
[](https://pypi.org/project/datacompose/)
|
|
54
|
+
[](https://www.python.org/downloads/)
|
|
55
|
+
[](https://github.com/your-username/datacompose)
|
|
56
|
+
[](https://opensource.org/licenses/MIT)
|
|
57
|
+
|
|
58
|
+
A powerful data transformation framework for building reusable, composable data cleaning pipelines in PySpark.
|
|
59
|
+
|
|
60
|
+
## Installation
|
|
61
|
+
|
|
62
|
+
```bash
|
|
63
|
+
pip install datacompose
|
|
64
|
+
```
|
|
65
|
+
|
|
66
|
+
## What is Datacompose?
|
|
67
|
+
|
|
68
|
+
Datacompose provides production-ready PySpark data transformation primitives that become part of YOUR codebase. Inspired by [shadcn](https://ui.shadcn.com/)'s approach to components, we believe in giving you full ownership and control over your code.
|
|
69
|
+
|
|
70
|
+
### Key Features
|
|
71
|
+
|
|
72
|
+
- **No Runtime Dependencies**: Standalone PySpark code that runs without Datacompose
|
|
73
|
+
- **Composable Primitives**: Build complex transformations from simple, reusable functions
|
|
74
|
+
- **Smart Partial Application**: Pre-configure transformations with parameters for reuse
|
|
75
|
+
- **Optimized Operations**: Efficient Spark transformations with minimal overhead
|
|
76
|
+
- **Comprehensive Libraries**: Pre-built primitives for emails, addresses, and phone numbers
|
|
77
|
+
|
|
78
|
+
### Available Transformers
|
|
79
|
+
|
|
80
|
+
- **Emails**: Validation, extraction, standardization, typo correction
|
|
81
|
+
- **Addresses**: Street parsing, state/zip validation, PO Box detection
|
|
82
|
+
- **Phone Numbers**: NANP/international validation, formatting, toll-free detection
|
|
83
|
+
|
|
84
|
+
## Documentation
|
|
85
|
+
|
|
86
|
+
For detailed documentation, examples, and API reference, visit [datacompose.io](https://datacompose.io).
|
|
87
|
+
|
|
88
|
+
## Philosophy
|
|
89
|
+
|
|
90
|
+
This is NOT a traditional library - it gives you production-ready data transformation primitives that you can modify to fit your exact needs. You own the code, with no external dependencies to manage or worry about breaking changes.
|
|
91
|
+
|
|
92
|
+
## License
|
|
93
|
+
|
|
94
|
+
MIT License - see LICENSE file for details
|
|
@@ -0,0 +1,44 @@
|
|
|
1
|
+
# Datacompose
|
|
2
|
+
|
|
3
|
+
[](https://pypi.org/project/datacompose/)
|
|
4
|
+
[](https://www.python.org/downloads/)
|
|
5
|
+
[](https://github.com/your-username/datacompose)
|
|
6
|
+
[](https://opensource.org/licenses/MIT)
|
|
7
|
+
|
|
8
|
+
A powerful data transformation framework for building reusable, composable data cleaning pipelines in PySpark.
|
|
9
|
+
|
|
10
|
+
## Installation
|
|
11
|
+
|
|
12
|
+
```bash
|
|
13
|
+
pip install datacompose
|
|
14
|
+
```
|
|
15
|
+
|
|
16
|
+
## What is Datacompose?
|
|
17
|
+
|
|
18
|
+
Datacompose provides production-ready PySpark data transformation primitives that become part of YOUR codebase. Inspired by [shadcn](https://ui.shadcn.com/)'s approach to components, we believe in giving you full ownership and control over your code.
|
|
19
|
+
|
|
20
|
+
### Key Features
|
|
21
|
+
|
|
22
|
+
- **No Runtime Dependencies**: Standalone PySpark code that runs without Datacompose
|
|
23
|
+
- **Composable Primitives**: Build complex transformations from simple, reusable functions
|
|
24
|
+
- **Smart Partial Application**: Pre-configure transformations with parameters for reuse
|
|
25
|
+
- **Optimized Operations**: Efficient Spark transformations with minimal overhead
|
|
26
|
+
- **Comprehensive Libraries**: Pre-built primitives for emails, addresses, and phone numbers
|
|
27
|
+
|
|
28
|
+
### Available Transformers
|
|
29
|
+
|
|
30
|
+
- **Emails**: Validation, extraction, standardization, typo correction
|
|
31
|
+
- **Addresses**: Street parsing, state/zip validation, PO Box detection
|
|
32
|
+
- **Phone Numbers**: NANP/international validation, formatting, toll-free detection
|
|
33
|
+
|
|
34
|
+
## Documentation
|
|
35
|
+
|
|
36
|
+
For detailed documentation, examples, and API reference, visit [datacompose.io](https://datacompose.io).
|
|
37
|
+
|
|
38
|
+
## Philosophy
|
|
39
|
+
|
|
40
|
+
This is NOT a traditional library - it gives you production-ready data transformation primitives that you can modify to fit your exact needs. You own the code, with no external dependencies to manage or worry about breaking changes.
|
|
41
|
+
|
|
42
|
+
## License
|
|
43
|
+
|
|
44
|
+
MIT License - see LICENSE file for details
|
|
@@ -7,6 +7,7 @@ from pathlib import Path
|
|
|
7
7
|
import click
|
|
8
8
|
|
|
9
9
|
from datacompose.cli.colors import dim, error, highlight, info, success
|
|
10
|
+
from datacompose.cli.config import ConfigLoader
|
|
10
11
|
from datacompose.cli.validation import validate_platform, validate_type_for_platform
|
|
11
12
|
from datacompose.transformers.discovery import TransformerDiscovery
|
|
12
13
|
|
|
@@ -85,28 +86,48 @@ _MODULE_DIR = Path(__file__).parent
|
|
|
85
86
|
@click.option(
|
|
86
87
|
"--target",
|
|
87
88
|
"-t",
|
|
88
|
-
default=
|
|
89
|
+
default=None,
|
|
89
90
|
shell_complete=complete_target,
|
|
90
|
-
help="Target platform (e.g., 'pyspark', 'postgres', 'snowflake').
|
|
91
|
+
help="Target platform (e.g., 'pyspark', 'postgres', 'snowflake'). Uses default from datacompose.json if not specified",
|
|
91
92
|
)
|
|
92
93
|
@click.option(
|
|
93
94
|
"--type",
|
|
94
95
|
shell_complete=complete_type,
|
|
95
96
|
help="UDF type for the platform (e.g., 'pandas_udf', 'sql_udf'). Uses platform default if not specified",
|
|
96
97
|
)
|
|
97
|
-
@click.option("--output", "-o", help="Output directory (default: build/{target})")
|
|
98
98
|
@click.option(
|
|
99
|
-
"--
|
|
100
|
-
|
|
101
|
-
help="
|
|
99
|
+
"--output",
|
|
100
|
+
"-o",
|
|
101
|
+
help="Output directory (default: from config or transformers/{target})",
|
|
102
102
|
)
|
|
103
103
|
@click.option("--verbose", "-v", is_flag=True, help="Verbose output")
|
|
104
104
|
@click.pass_context
|
|
105
|
-
def add(ctx, transformer, target, type, output,
|
|
105
|
+
def add(ctx, transformer, target, type, output, verbose):
|
|
106
106
|
"""Add UDFs for transformers.
|
|
107
107
|
|
|
108
|
-
TRANSFORMER: Transformer to add UDF for (e.g., '
|
|
108
|
+
TRANSFORMER: Transformer to add UDF for (e.g., 'emails')
|
|
109
109
|
"""
|
|
110
|
+
# Load config to get default target if not specified
|
|
111
|
+
config = ConfigLoader.load_config()
|
|
112
|
+
|
|
113
|
+
if target is None:
|
|
114
|
+
# Try to get default target from config
|
|
115
|
+
target = ConfigLoader.get_default_target(config)
|
|
116
|
+
if target is None:
|
|
117
|
+
print(
|
|
118
|
+
error(
|
|
119
|
+
"Error: No target specified and no default target found in datacompose.json"
|
|
120
|
+
)
|
|
121
|
+
)
|
|
122
|
+
print(
|
|
123
|
+
info(
|
|
124
|
+
"Please specify a target with --target or run 'datacompose init' to set up defaults"
|
|
125
|
+
)
|
|
126
|
+
)
|
|
127
|
+
ctx.exit(1)
|
|
128
|
+
elif verbose:
|
|
129
|
+
print(dim(f"Using default target from config: {target}"))
|
|
130
|
+
|
|
110
131
|
# Initialize discovery for validation
|
|
111
132
|
discovery = TransformerDiscovery()
|
|
112
133
|
|
|
@@ -119,12 +140,12 @@ def add(ctx, transformer, target, type, output, template_dir, verbose):
|
|
|
119
140
|
ctx.exit(1)
|
|
120
141
|
|
|
121
142
|
# Combine target and type into generator reference
|
|
122
|
-
exit_code = _run_add(transformer, target, output,
|
|
143
|
+
exit_code = _run_add(transformer, target, output, verbose)
|
|
123
144
|
if exit_code != 0:
|
|
124
145
|
ctx.exit(exit_code)
|
|
125
146
|
|
|
126
147
|
|
|
127
|
-
def _run_add(transformer, target, output,
|
|
148
|
+
def _run_add(transformer, target, output, verbose) -> int:
|
|
128
149
|
"""Execute the add command."""
|
|
129
150
|
# Initialize discovery
|
|
130
151
|
discovery = TransformerDiscovery()
|
|
@@ -135,9 +156,7 @@ def _run_add(transformer, target, output, template_dir, verbose) -> int:
|
|
|
135
156
|
if not transformer_path:
|
|
136
157
|
print(error(f"Error: Transformer not found: {transformer}"))
|
|
137
158
|
print(
|
|
138
|
-
info(
|
|
139
|
-
f"Available transformers: {', '.join(discovery.list_transformers())}"
|
|
140
|
-
)
|
|
159
|
+
info(f"Available transformers: {', '.join(discovery.list_transformers())}")
|
|
141
160
|
)
|
|
142
161
|
return 1
|
|
143
162
|
else:
|
|
@@ -154,18 +173,28 @@ def _run_add(transformer, target, output, template_dir, verbose) -> int:
|
|
|
154
173
|
print(info(f"Available generators: {', '.join(discovery.list_generators())}"))
|
|
155
174
|
return 1
|
|
156
175
|
|
|
157
|
-
# Determine output directory
|
|
176
|
+
# Determine output directory
|
|
158
177
|
if not output:
|
|
159
|
-
|
|
178
|
+
# Try to get output from config first
|
|
179
|
+
config = ConfigLoader.load_config()
|
|
180
|
+
config_output = ConfigLoader.get_target_output(config, target)
|
|
181
|
+
if config_output:
|
|
182
|
+
# Config output already includes 'transformers/pyspark', so use it directly
|
|
183
|
+
output_dir = config_output
|
|
184
|
+
else:
|
|
185
|
+
output_dir = f"transformers/{target}"
|
|
160
186
|
else:
|
|
161
|
-
output_dir =
|
|
187
|
+
output_dir = output
|
|
162
188
|
|
|
163
189
|
try:
|
|
164
190
|
# Create generator instance
|
|
191
|
+
# Note: template_dir is required by base class but not used by current generators
|
|
165
192
|
generator = generator_class(
|
|
166
|
-
template_dir=Path(
|
|
193
|
+
template_dir=Path("."), # Placeholder - not actually used
|
|
194
|
+
output_dir=Path(output_dir),
|
|
195
|
+
verbose=verbose,
|
|
167
196
|
)
|
|
168
|
-
|
|
197
|
+
|
|
169
198
|
# Generate the UDF
|
|
170
199
|
result = generator.generate(
|
|
171
200
|
transformer_name, force=False, transformer_dir=transformer_dir
|
|
@@ -178,14 +207,14 @@ def _run_add(transformer, target, output, template_dir, verbose) -> int:
|
|
|
178
207
|
print(dim(f" Hash: {result.get('hash', 'N/A')}"))
|
|
179
208
|
else:
|
|
180
209
|
print(success(f"✓ UDF generated: {result['output_path']}"))
|
|
181
|
-
if result.get(
|
|
210
|
+
if result.get("test_path"):
|
|
182
211
|
print(success(f"✓ Test created: {result['test_path']}"))
|
|
183
212
|
print(highlight(f"Function name: {result['function_name']}"))
|
|
184
213
|
if verbose:
|
|
185
214
|
print(dim(f" Target: {target}"))
|
|
186
215
|
print(highlight("\nGenerated package contents:"))
|
|
187
216
|
print(f" - UDF code: {result['output_path']}")
|
|
188
|
-
if result.get(
|
|
217
|
+
if result.get("test_path"):
|
|
189
218
|
print(f" - Test file: {result['test_path']}")
|
|
190
219
|
|
|
191
220
|
return 0
|
|
@@ -197,4 +226,3 @@ def _run_add(transformer, target, output, template_dir, verbose) -> int:
|
|
|
197
226
|
|
|
198
227
|
traceback.print_exc()
|
|
199
228
|
return 1
|
|
200
|
-
|
|
@@ -18,10 +18,11 @@ from datacompose.cli.colors import dim, error, highlight, info, success
|
|
|
18
18
|
|
|
19
19
|
DEFAULT_CONFIG = {
|
|
20
20
|
"version": "1.0",
|
|
21
|
+
"default_target": "pyspark",
|
|
21
22
|
"aliases": {"utils": "./src/utils"},
|
|
22
23
|
"targets": {
|
|
23
24
|
"pyspark": {
|
|
24
|
-
"output": "./
|
|
25
|
+
"output": "./transformers/pyspark",
|
|
25
26
|
}
|
|
26
27
|
},
|
|
27
28
|
}
|
|
@@ -57,7 +58,7 @@ class InitCommand:
|
|
|
57
58
|
def get_config_template(template_name: str) -> Dict[str, Any]:
|
|
58
59
|
"""Get configuration template by name."""
|
|
59
60
|
if template_name == "minimal":
|
|
60
|
-
return {"version": "1.0", "targets": {"pyspark": {"output": "./
|
|
61
|
+
return {"version": "1.0", "default_target": "pyspark", "targets": {"pyspark": {"output": "./transformers/pyspark"}}}
|
|
61
62
|
elif template_name == "advanced":
|
|
62
63
|
config = DEFAULT_CONFIG.copy()
|
|
63
64
|
config.update(
|
|
@@ -65,10 +66,10 @@ class InitCommand:
|
|
|
65
66
|
"style": "custom",
|
|
66
67
|
"aliases": {
|
|
67
68
|
"utils": "./src/utils",
|
|
68
|
-
"
|
|
69
|
+
"transformers": "./transformers",
|
|
69
70
|
},
|
|
70
71
|
"include": ["src/**/*"],
|
|
71
|
-
"exclude": ["__pycache__", "
|
|
72
|
+
"exclude": ["__pycache__", "transformers", "*.pyc", ".pytest_cache"],
|
|
72
73
|
"testing": {"framework": "pytest", "test_dir": "./tests"},
|
|
73
74
|
}
|
|
74
75
|
)
|
|
@@ -184,7 +185,7 @@ class InitCommand:
|
|
|
184
185
|
|
|
185
186
|
# Select targets with multi-select
|
|
186
187
|
available_targets = {
|
|
187
|
-
"pyspark": {"output": "./
|
|
188
|
+
"pyspark": {"output": "./transformers/pyspark", "name": "PySpark (Apache Spark)"},
|
|
188
189
|
}
|
|
189
190
|
|
|
190
191
|
selected_targets = InitCommand.prompt_for_targets(available_targets)
|
|
@@ -199,6 +200,31 @@ class InitCommand:
|
|
|
199
200
|
|
|
200
201
|
# Update targets with user selections
|
|
201
202
|
config["targets"] = selected_targets
|
|
203
|
+
|
|
204
|
+
# Set default target to the first selected target (or only target if single)
|
|
205
|
+
target_keys = list(selected_targets.keys())
|
|
206
|
+
if len(target_keys) == 1:
|
|
207
|
+
config["default_target"] = target_keys[0]
|
|
208
|
+
elif len(target_keys) > 1:
|
|
209
|
+
# Ask user to select default target
|
|
210
|
+
print(highlight("\nSelect Default Target"))
|
|
211
|
+
print(dim("Which platform should be used by default when running 'datacompose add'?\n"))
|
|
212
|
+
for i, key in enumerate(target_keys, 1):
|
|
213
|
+
print(f" {i}. {key}")
|
|
214
|
+
print()
|
|
215
|
+
|
|
216
|
+
while True:
|
|
217
|
+
choice = input(f"Select default target (1-{len(target_keys)}): ").strip()
|
|
218
|
+
try:
|
|
219
|
+
choice_idx = int(choice) - 1
|
|
220
|
+
if 0 <= choice_idx < len(target_keys):
|
|
221
|
+
config["default_target"] = target_keys[choice_idx]
|
|
222
|
+
print(dim(f"Default target set to: {target_keys[choice_idx]}\n"))
|
|
223
|
+
break
|
|
224
|
+
else:
|
|
225
|
+
print(error("Invalid selection. Please try again."))
|
|
226
|
+
except ValueError:
|
|
227
|
+
print(error("Please enter a number."))
|
|
202
228
|
|
|
203
229
|
print() # Add spacing
|
|
204
230
|
return config
|
|
@@ -403,11 +429,11 @@ def _run_init(force, output, verbose, yes, skip_completion) -> int:
|
|
|
403
429
|
"2. Source your shell config or restart terminal for tab completion"
|
|
404
430
|
)
|
|
405
431
|
print(
|
|
406
|
-
"3. Add your first transformer: datacompose add
|
|
432
|
+
"3. Add your first transformer: datacompose add emails"
|
|
407
433
|
)
|
|
408
434
|
else:
|
|
409
435
|
print(
|
|
410
|
-
"2. Add your first transformer: datacompose add
|
|
436
|
+
"2. Add your first transformer: datacompose add emails"
|
|
411
437
|
)
|
|
412
438
|
if not skip_completion:
|
|
413
439
|
print(
|
|
@@ -419,7 +445,7 @@ def _run_init(force, output, verbose, yes, skip_completion) -> int:
|
|
|
419
445
|
print(success("✓ Tab completion configured"))
|
|
420
446
|
print(
|
|
421
447
|
highlight(
|
|
422
|
-
"\nRun 'datacompose add
|
|
448
|
+
"\nRun 'datacompose add emails' to get started"
|
|
423
449
|
)
|
|
424
450
|
)
|
|
425
451
|
print(
|
|
@@ -430,7 +456,7 @@ def _run_init(force, output, verbose, yes, skip_completion) -> int:
|
|
|
430
456
|
else:
|
|
431
457
|
print(
|
|
432
458
|
highlight(
|
|
433
|
-
"\nRun 'datacompose add
|
|
459
|
+
"\nRun 'datacompose add emails' to get started"
|
|
434
460
|
)
|
|
435
461
|
)
|
|
436
462
|
if not skip_completion and not yes:
|
|
@@ -95,7 +95,7 @@ class ListCommand:
|
|
|
95
95
|
print(f" • {transformer_name}")
|
|
96
96
|
|
|
97
97
|
print("\nUsage: datacompose add <transformer> --target <platform> [--type <type>]")
|
|
98
|
-
print("Example: datacompose add
|
|
98
|
+
print("Example: datacompose add emails --target pyspark")
|
|
99
99
|
return 0
|
|
100
100
|
|
|
101
101
|
@staticmethod
|
|
@@ -114,5 +114,5 @@ class ListCommand:
|
|
|
114
114
|
print(f" • {gen_type} ({gen_class.__name__})")
|
|
115
115
|
|
|
116
116
|
print("\nUsage: datacompose add <transformer> --target <platform> [--type <type>]")
|
|
117
|
-
print("Example: datacompose add
|
|
117
|
+
print("Example: datacompose add emails --target pyspark")
|
|
118
118
|
return 0
|
|
@@ -0,0 +1,80 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Configuration management for Datacompose CLI.
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
import json
|
|
6
|
+
from pathlib import Path
|
|
7
|
+
from typing import Any, Dict, Optional
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
class ConfigLoader:
|
|
11
|
+
"""Load and manage Datacompose configuration."""
|
|
12
|
+
|
|
13
|
+
DEFAULT_CONFIG_FILE = "datacompose.json"
|
|
14
|
+
|
|
15
|
+
@staticmethod
|
|
16
|
+
def load_config(config_path: Optional[Path] = None) -> Optional[Dict[str, Any]]:
|
|
17
|
+
"""Load configuration from datacompose.json.
|
|
18
|
+
|
|
19
|
+
Args:
|
|
20
|
+
config_path: Optional path to config file. Defaults to ./datacompose.json
|
|
21
|
+
|
|
22
|
+
Returns:
|
|
23
|
+
Config dictionary or None if not found
|
|
24
|
+
"""
|
|
25
|
+
if config_path is None:
|
|
26
|
+
config_path = Path(ConfigLoader.DEFAULT_CONFIG_FILE)
|
|
27
|
+
|
|
28
|
+
if not config_path.exists():
|
|
29
|
+
return None
|
|
30
|
+
|
|
31
|
+
try:
|
|
32
|
+
with open(config_path, 'r') as f:
|
|
33
|
+
return json.load(f)
|
|
34
|
+
except (json.JSONDecodeError, IOError):
|
|
35
|
+
return None
|
|
36
|
+
|
|
37
|
+
@staticmethod
|
|
38
|
+
def get_default_target(config: Optional[Dict[str, Any]] = None) -> Optional[str]:
|
|
39
|
+
"""Get the default target from config.
|
|
40
|
+
|
|
41
|
+
Args:
|
|
42
|
+
config: Optional config dict. If None, will load from file.
|
|
43
|
+
|
|
44
|
+
Returns:
|
|
45
|
+
Default target name or None
|
|
46
|
+
"""
|
|
47
|
+
if config is None:
|
|
48
|
+
config = ConfigLoader.load_config()
|
|
49
|
+
|
|
50
|
+
if not config:
|
|
51
|
+
return None
|
|
52
|
+
|
|
53
|
+
# Check for explicit default_target setting
|
|
54
|
+
if "default_target" in config:
|
|
55
|
+
return config["default_target"]
|
|
56
|
+
|
|
57
|
+
# Otherwise use the first target if only one exists
|
|
58
|
+
targets = config.get("targets", {})
|
|
59
|
+
if len(targets) == 1:
|
|
60
|
+
return list(targets.keys())[0]
|
|
61
|
+
|
|
62
|
+
return None
|
|
63
|
+
|
|
64
|
+
@staticmethod
|
|
65
|
+
def get_target_output(config: Optional[Dict[str, Any]], target: str) -> Optional[str]:
|
|
66
|
+
"""Get the output directory for a specific target.
|
|
67
|
+
|
|
68
|
+
Args:
|
|
69
|
+
config: Config dictionary
|
|
70
|
+
target: Target name
|
|
71
|
+
|
|
72
|
+
Returns:
|
|
73
|
+
Output directory path or None
|
|
74
|
+
"""
|
|
75
|
+
if not config:
|
|
76
|
+
return None
|
|
77
|
+
|
|
78
|
+
targets = config.get("targets", {})
|
|
79
|
+
target_config = targets.get(target, {})
|
|
80
|
+
return target_config.get("output")
|
|
@@ -25,9 +25,9 @@ def cli(ctx):
|
|
|
25
25
|
"""Generate data cleaning UDFs for various platforms.
|
|
26
26
|
|
|
27
27
|
Examples:
|
|
28
|
-
datacompose init
|
|
29
|
-
datacompose add
|
|
30
|
-
datacompose add
|
|
28
|
+
datacompose init # Set up project with default target
|
|
29
|
+
datacompose add emails # Uses default target from config
|
|
30
|
+
datacompose add emails --target snowflake --output sql/udfs/
|
|
31
31
|
datacompose list targets
|
|
32
32
|
"""
|
|
33
33
|
pass
|