datacompose 0.2.8__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- datacompose-0.2.8/CHANGELOG.md +227 -0
- datacompose-0.2.8/LICENSE +21 -0
- datacompose-0.2.8/MANIFEST.in +38 -0
- datacompose-0.2.8/PKG-INFO +176 -0
- datacompose-0.2.8/README.md +126 -0
- datacompose-0.2.8/datacompose/__init__.py +1 -0
- datacompose-0.2.8/datacompose/cli/__init__.py +5 -0
- datacompose-0.2.8/datacompose/cli/colors.py +80 -0
- datacompose-0.2.8/datacompose/cli/commands/__init__.py +3 -0
- datacompose-0.2.8/datacompose/cli/commands/add.py +242 -0
- datacompose-0.2.8/datacompose/cli/commands/init.py +477 -0
- datacompose-0.2.8/datacompose/cli/commands/list.py +118 -0
- datacompose-0.2.8/datacompose/cli/config.py +82 -0
- datacompose-0.2.8/datacompose/cli/main.py +59 -0
- datacompose-0.2.8/datacompose/cli/validation.py +72 -0
- datacompose-0.2.8/datacompose/generators/__init__.py +3 -0
- datacompose-0.2.8/datacompose/generators/base.py +181 -0
- datacompose-0.2.8/datacompose/generators/pyspark/__init__.py +1 -0
- datacompose-0.2.8/datacompose/generators/pyspark/generator.py +46 -0
- datacompose-0.2.8/datacompose/operators/__init__.py +21 -0
- datacompose-0.2.8/datacompose/operators/primitives.py +633 -0
- datacompose-0.2.8/datacompose/transformers/__init__.py +0 -0
- datacompose-0.2.8/datacompose/transformers/discovery.py +186 -0
- datacompose-0.2.8/datacompose/transformers/text/__init__.py +1 -0
- datacompose-0.2.8/datacompose/transformers/text/addresses/__init__.py +1 -0
- datacompose-0.2.8/datacompose/transformers/text/addresses/pyspark/pyspark_primitives.py +2024 -0
- datacompose-0.2.8/datacompose/transformers/text/datetimes/pyspark/pyspark_primitives.py +1383 -0
- datacompose-0.2.8/datacompose/transformers/text/emails/__init__.py +1 -0
- datacompose-0.2.8/datacompose/transformers/text/emails/pyspark/pyspark_primitives.py +850 -0
- datacompose-0.2.8/datacompose/transformers/text/phone_numbers/__init__.py +0 -0
- datacompose-0.2.8/datacompose/transformers/text/phone_numbers/pyspark/pyspark_primitives.py +1006 -0
- datacompose-0.2.8/datacompose.egg-info/PKG-INFO +176 -0
- datacompose-0.2.8/datacompose.egg-info/SOURCES.txt +100 -0
- datacompose-0.2.8/datacompose.egg-info/dependency_links.txt +1 -0
- datacompose-0.2.8/datacompose.egg-info/entry_points.txt +2 -0
- datacompose-0.2.8/datacompose.egg-info/requires.txt +21 -0
- datacompose-0.2.8/datacompose.egg-info/top_level.txt +1 -0
- datacompose-0.2.8/pyproject.toml +92 -0
- datacompose-0.2.8/setup.cfg +5 -0
- datacompose-0.2.8/tests/__init__.py +0 -0
- datacompose-0.2.8/tests/conftest.py +53 -0
- datacompose-0.2.8/tests/integration/__init__.py +1 -0
- datacompose-0.2.8/tests/integration/test_end_to_end.py +189 -0
- datacompose-0.2.8/tests/integration/test_full_workflow.py +298 -0
- datacompose-0.2.8/tests/integration/test_generated_imports.py +149 -0
- datacompose-0.2.8/tests/unit/cli/.venv/bin/activate_this.py +59 -0
- datacompose-0.2.8/tests/unit/cli/.venv/lib/python3.12/site-packages/_virtualenv.py +101 -0
- datacompose-0.2.8/tests/unit/cli/__init__.py +1 -0
- datacompose-0.2.8/tests/unit/cli/build/__init__.py +0 -0
- datacompose-0.2.8/tests/unit/cli/build/postgres/__init__.py +0 -0
- datacompose-0.2.8/tests/unit/cli/build/postgres/clean_emails/__init__.py +0 -0
- datacompose-0.2.8/tests/unit/cli/build/postgres/clean_emails/email_cleaner_udf_spec.yaml +67 -0
- datacompose-0.2.8/tests/unit/cli/build/postgres/clean_emails/test_email_cleaner_udf.py +102 -0
- datacompose-0.2.8/tests/unit/cli/build/spark/__init__.py +0 -0
- datacompose-0.2.8/tests/unit/cli/build/spark/clean_emails/__init__.py +0 -0
- datacompose-0.2.8/tests/unit/cli/build/spark/clean_emails/email_cleaner_udf.py +196 -0
- datacompose-0.2.8/tests/unit/cli/build/spark/clean_emails/email_cleaner_udf_spec.yaml +67 -0
- datacompose-0.2.8/tests/unit/cli/build/spark/clean_emails/test_email_cleaner_udf.py +102 -0
- datacompose-0.2.8/tests/unit/cli/test_add_command.py +187 -0
- datacompose-0.2.8/tests/unit/cli/test_add_command_complete.py +451 -0
- datacompose-0.2.8/tests/unit/cli/test_add_default_target.py +327 -0
- datacompose-0.2.8/tests/unit/cli/test_add_validation.py +96 -0
- datacompose-0.2.8/tests/unit/cli/test_config.py +249 -0
- datacompose-0.2.8/tests/unit/cli/test_init_command.py +123 -0
- datacompose-0.2.8/tests/unit/cli/test_init_command_complete.py +654 -0
- datacompose-0.2.8/tests/unit/cli/test_list_command.py +239 -0
- datacompose-0.2.8/tests/unit/cli/test_main.py +45 -0
- datacompose-0.2.8/tests/unit/cli/test_main_complete.py +372 -0
- datacompose-0.2.8/tests/unit/cli/test_validation_complete.py +400 -0
- datacompose-0.2.8/tests/unit/generators/__init__.py +1 -0
- datacompose-0.2.8/tests/unit/generators/test_base_generator.py +203 -0
- datacompose-0.2.8/tests/unit/generators/test_spark_generator.py +265 -0
- datacompose-0.2.8/tests/unit/operators/test_compose_conditions.py +560 -0
- datacompose-0.2.8/tests/unit/operators/test_conditional_auto_detection.py +192 -0
- datacompose-0.2.8/tests/unit/operators/test_conditional_core.py +751 -0
- datacompose-0.2.8/tests/unit/operators/test_conditional_real_world.py +340 -0
- datacompose-0.2.8/tests/unit/operators/test_operators.py +936 -0
- datacompose-0.2.8/tests/unit/operators/test_primitives_complete.py +329 -0
- datacompose-0.2.8/tests/unit/transformers/__init__.py +1 -0
- datacompose-0.2.8/tests/unit/transformers/test_discovery.py +171 -0
- datacompose-0.2.8/tests/unit/transformers/text/common/test_common.py +0 -0
- datacompose-0.2.8/tests/unit/transformers/text/test_addresses/test_building_unit_extraction.py +750 -0
- datacompose-0.2.8/tests/unit/transformers/text/test_addresses/test_city_state_extraction.py +790 -0
- datacompose-0.2.8/tests/unit/transformers/text/test_addresses/test_clean_addresses.py +270 -0
- datacompose-0.2.8/tests/unit/transformers/text/test_addresses/test_country_extraction.py +206 -0
- datacompose-0.2.8/tests/unit/transformers/text/test_addresses/test_data_addresses.py +627 -0
- datacompose-0.2.8/tests/unit/transformers/text/test_addresses/test_po_box_extraction.py +276 -0
- datacompose-0.2.8/tests/unit/transformers/text/test_addresses/test_street_extraction.py +997 -0
- datacompose-0.2.8/tests/unit/transformers/text/test_addresses/test_zip_code_extraction.py +955 -0
- datacompose-0.2.8/tests/unit/transformers/text/test_datetimes/test_datetime_data_quality.py +531 -0
- datacompose-0.2.8/tests/unit/transformers/text/test_datetimes/test_datetime_extraction.py +997 -0
- datacompose-0.2.8/tests/unit/transformers/text/test_datetimes/test_datetime_integration.py +480 -0
- datacompose-0.2.8/tests/unit/transformers/text/test_datetimes/test_datetime_performance.py +396 -0
- datacompose-0.2.8/tests/unit/transformers/text/test_datetimes/test_datetime_regression.py +452 -0
- datacompose-0.2.8/tests/unit/transformers/text/test_datetimes/test_datetime_timezones.py +439 -0
- datacompose-0.2.8/tests/unit/transformers/text/test_emails/test_debug_long_emails.py +38 -0
- datacompose-0.2.8/tests/unit/transformers/text/test_emails/test_email_extraction.py +936 -0
- datacompose-0.2.8/tests/unit/transformers/text/test_emails/test_email_optimized.py +204 -0
- datacompose-0.2.8/tests/unit/transformers/text/test_phone_numbers/test_phone_extraction.py +719 -0
- datacompose-0.2.8/tests/unit/transformers/text/test_phone_numbers/test_phone_formatting.py +487 -0
- datacompose-0.2.8/tests/yaml_specs/__init__.py +1 -0
|
@@ -0,0 +1,227 @@
|
|
|
1
|
+
# Changelog
|
|
2
|
+
|
|
3
|
+
All notable changes to Datacompose will be documented in this file.
|
|
4
|
+
|
|
5
|
+
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
|
|
6
|
+
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
|
|
7
|
+
|
|
8
|
+
## [Unreleased]
|
|
9
|
+
|
|
10
|
+
## [0.2.7.0] - 2025-09-11
|
|
11
|
+
|
|
12
|
+
### Fixed
|
|
13
|
+
- **SHA256 Transformer Memory Issues**: Fixed Java heap space OutOfMemoryError in email and phone number SHA256 hashing
|
|
14
|
+
- Set `standardize_first=False` by default in tests to avoid complex Spark query planning issues
|
|
15
|
+
- All SHA256 hashing tests now pass without memory errors
|
|
16
|
+
|
|
17
|
+
- **CLI Configuration Handling**: Improved config file error handling in add command
|
|
18
|
+
- Add command now properly fails with helpful error message when no config file exists
|
|
19
|
+
- Add command correctly handles malformed JSON config files
|
|
20
|
+
- "pyspark" is now the default target when explicitly called without config
|
|
21
|
+
|
|
22
|
+
- **Test Fixtures**: Added missing `diverse_test_data` fixture for conditional operator tests
|
|
23
|
+
- Created comprehensive test dataset with category, value, size, id, and text columns
|
|
24
|
+
- Fixed all conditional logic tests in `test_conditional_core.py`
|
|
25
|
+
- Fixed all real-world scenario tests in `test_conditional_real_world.py`
|
|
26
|
+
|
|
27
|
+
- **Test Assertions**: Updated test expectations to match actual behavior
|
|
28
|
+
- Fixed init command test to expect full command in error message ("datacompose init --force")
|
|
29
|
+
- Updated conditional test assertions for non-standardized hashing behavior
|
|
30
|
+
|
|
31
|
+
### Changed
|
|
32
|
+
- **Default Target Behavior**: ConfigLoader now returns "pyspark" as fallback when no config is provided programmatically
|
|
33
|
+
|
|
34
|
+
## [0.2.6.0] - 2025-08-24
|
|
35
|
+
|
|
36
|
+
### Added
|
|
37
|
+
- **Automatic Conditional Detection**: Smart detection of conditional operators based on naming patterns
|
|
38
|
+
- Functions starting with `is_`, `has_`, `needs_`, `should_`, `can_`, `contains_`, `matches_`, `equals_`, `starts_with_`, `ends_with_` are automatically detected as conditionals
|
|
39
|
+
- Eliminates need for explicit `is_conditional=True` in most cases
|
|
40
|
+
- Explicit override still available when needed via `is_conditional` parameter
|
|
41
|
+
- **Phone Number Processing Pipeline**: Complete phone number validation and formatting example
|
|
42
|
+
- Letter-to-number conversion (1-800-FLOWERS)
|
|
43
|
+
- NANP validation and formatting
|
|
44
|
+
- Toll-free number detection
|
|
45
|
+
- E.164 and parentheses formatting
|
|
46
|
+
|
|
47
|
+
### Changed
|
|
48
|
+
- **Conditional Operator Registration**: `is_conditional` parameter now optional with smart defaults
|
|
49
|
+
- **Test Organization**: Consolidated conditional tests into three focused files:
|
|
50
|
+
- `test_conditional_core.py` - Core functionality, logic, errors, parameters, and performance
|
|
51
|
+
- `test_conditional_real_world.py` - Real-world pipeline scenarios
|
|
52
|
+
- `test_conditional_auto_detection.py` - Auto-detection feature tests
|
|
53
|
+
|
|
54
|
+
### Fixed
|
|
55
|
+
- **Phone Number Validation**: Updated NANP validation to be more flexible for testing scenarios
|
|
56
|
+
|
|
57
|
+
## [0.2.5.3] - 2025-08-23
|
|
58
|
+
|
|
59
|
+
### Added
|
|
60
|
+
- **Compose Decorator Enhancement**: Auto-detection of PrimitiveRegistry instances in function globals
|
|
61
|
+
- Compose decorator now automatically discovers all namespace instances without explicit passing
|
|
62
|
+
- Improved namespace resolution using function's global scope instead of module globals
|
|
63
|
+
- Better support for multiple namespaces in composed functions
|
|
64
|
+
|
|
65
|
+
### Fixed
|
|
66
|
+
- **Namespace Resolution**: Fixed global namespace lookups to use function's own globals
|
|
67
|
+
- PipelineCompiler now correctly resolves namespaces from the decorated function's scope
|
|
68
|
+
- Fallback compose mode uses function globals for namespace discovery
|
|
69
|
+
- Prevents namespace resolution errors when registries are defined in different modules
|
|
70
|
+
|
|
71
|
+
### Changed
|
|
72
|
+
- **Phone Number Tests**: Updated test imports and formatting for phone number primitives
|
|
73
|
+
- **Test Organization**: Added comprehensive conditional composition tests
|
|
74
|
+
|
|
75
|
+
## [0.2.5.2] - 2025-08-22
|
|
76
|
+
|
|
77
|
+
### Fixed
|
|
78
|
+
- **Import Paths**: Updated import paths in phone_numbers pyspark primitives for clarity and consistency
|
|
79
|
+
- **Documentation**: Improved docstring documentation across primitives
|
|
80
|
+
|
|
81
|
+
## [0.2.5.1] - 2025-08-22
|
|
82
|
+
|
|
83
|
+
### Changed
|
|
84
|
+
- **Import Paths**: Renamed imports to be more transparent and clear
|
|
85
|
+
|
|
86
|
+
### Added
|
|
87
|
+
- **Documentation**: Added clear module-level docstrings throughout the codebase
|
|
88
|
+
- **Unit Tests**: Added comprehensive unit tests for default initialization and datacompose.json configuration
|
|
89
|
+
- Tests for default target auto-selection with single target
|
|
90
|
+
- Tests for explicit target override behavior
|
|
91
|
+
- Tests for configuration file validation
|
|
92
|
+
- Tests for output path resolution from config
|
|
93
|
+
|
|
94
|
+
### Fixed
|
|
95
|
+
- **CLI Tests**: Fixed all failing default target configuration tests
|
|
96
|
+
- Added proper validation mocks for non-existent platforms in tests
|
|
97
|
+
- Fixed error message assertion for invalid platform validation
|
|
98
|
+
- Properly mocked generator class hierarchy for output path testing
|
|
99
|
+
- All 13 CLI default target tests now passing (100% pass rate)
|
|
100
|
+
|
|
101
|
+
## [0.2.5] - 2025-08-21
|
|
102
|
+
|
|
103
|
+
### Changed
|
|
104
|
+
- **Documentation**: Streamlined README to be more concise
|
|
105
|
+
- Removed extensive code examples (now on website)
|
|
106
|
+
- Reduced from 390 lines to 44 lines
|
|
107
|
+
- Focused on core features and philosophy
|
|
108
|
+
- Added link to datacompose.io for detailed documentation
|
|
109
|
+
|
|
110
|
+
### Fixed
|
|
111
|
+
- **Test Suite**: Fixed failing CLI tests for `add` command
|
|
112
|
+
- Tests now properly mock ConfigLoader for isolated filesystem environments
|
|
113
|
+
- `test_add_invalid_transformer` correctly validates transformer not found error
|
|
114
|
+
- `test_complete_transformer_success` updated to match actual transformer names
|
|
115
|
+
- All CLI command tests passing with proper configuration mocking
|
|
116
|
+
|
|
117
|
+
## [0.2.4] - 2025-08-13
|
|
118
|
+
|
|
119
|
+
### Added
|
|
120
|
+
- **Published to PyPI**: Package is now available via `pip install datacompose`
|
|
121
|
+
- **Phone Number Primitives**: Complete set of 45+ phone number transformation functions
|
|
122
|
+
- NANP validation and formatting (North American Numbering Plan)
|
|
123
|
+
- International phone support with E.164 formatting
|
|
124
|
+
- Extension handling and toll-free detection
|
|
125
|
+
- Phone number extraction from text
|
|
126
|
+
- Letter-to-number conversion (1-800-FLOWERS support)
|
|
127
|
+
- **Address Improvements**: Enhanced street extraction and standardization
|
|
128
|
+
- Fixed numbered street extraction ("5th Avenue" correctly returns "5th")
|
|
129
|
+
- Improved null handling in street extraction
|
|
130
|
+
- Custom mapping support for street suffix standardization
|
|
131
|
+
- **Utils Export**: Generated code now includes `utils/primitives.py` for standalone deployment
|
|
132
|
+
- PrimitiveRegistry class embedded with generated code
|
|
133
|
+
- No runtime dependency on datacompose package
|
|
134
|
+
- Fallback imports for maximum compatibility
|
|
135
|
+
- **Comprehensive Test Coverage**: Improved test coverage from 87% to 92%
|
|
136
|
+
- Added 18 new tests for primitives.py module (70% → 86% coverage)
|
|
137
|
+
- Created comprehensive test suites for all CLI commands
|
|
138
|
+
- Added full end-to-end integration tests (init → add → transform)
|
|
139
|
+
- validation.py achieved 100% coverage
|
|
140
|
+
- add.py improved to 99% coverage
|
|
141
|
+
|
|
142
|
+
### Changed
|
|
143
|
+
- **BREAKING**: Renamed `PrimitiveNameSpace` to `PrimitiveRegistry` throughout codebase
|
|
144
|
+
- **Major Architecture Shift**: Removed YAML/spec file system entirely
|
|
145
|
+
- No more YAML specifications or JSON replacements
|
|
146
|
+
- Direct primitive file copying instead of template rendering
|
|
147
|
+
- Simplified discovery system works with transformer directories
|
|
148
|
+
- Removed `validate` command completely
|
|
149
|
+
- **Import Strategy**: Primitives now try local utils import first, fall back to datacompose package
|
|
150
|
+
- **File Naming**: Generated files use plural form with primitives suffix
|
|
151
|
+
- `emails` → `email_primitives.py`
|
|
152
|
+
- `addresses` → `address_primitives.py`
|
|
153
|
+
- `phone_numbers` → `phone_primitives.py`
|
|
154
|
+
|
|
155
|
+
### Fixed
|
|
156
|
+
- **Critical**: Fixed utils/primitives.py output location to be shared across all transformers
|
|
157
|
+
- Utils module now generates at top-level build/utils/ instead of per-transformer
|
|
158
|
+
- All transformers share the same PrimitiveRegistry implementation
|
|
159
|
+
- Prevents duplicate utils modules and ensures consistency
|
|
160
|
+
- Phone `normalize_separators` now correctly handles parentheses: `(555)123-4567` → `555-123-4567`
|
|
161
|
+
- Street extraction for numbered streets ("5th Avenue" issue)
|
|
162
|
+
- Compose decorator now requires namespace to be passed explicitly for proper method resolution
|
|
163
|
+
- `standardize_street_suffix` applies both custom and default mappings correctly
|
|
164
|
+
- Test failures due to namespace resolution in compose decorator
|
|
165
|
+
- Generator initialization error handling in add command
|
|
166
|
+
|
|
167
|
+
### Removed
|
|
168
|
+
- All YAML/spec file functionality
|
|
169
|
+
- PostgreSQL generator references
|
|
170
|
+
- Jinja2 template dependencies
|
|
171
|
+
- `validate` command from CLI
|
|
172
|
+
- Old Spark integration tests (replaced with end-to-end tests)
|
|
173
|
+
|
|
174
|
+
## [0.2.0] - 2024-XX-XX
|
|
175
|
+
|
|
176
|
+
### Added
|
|
177
|
+
- **Primitive System**: New composable primitive architecture for building data pipelines
|
|
178
|
+
- `SmartPrimitive` class for partial application and parameter binding
|
|
179
|
+
- `PrimitiveRegistry` (originally PrimitiveNameSpace) for organizing related transformations
|
|
180
|
+
- Support for conditional primitives (boolean-returning functions)
|
|
181
|
+
- **Conditional Compilation**: AST-based pipeline compilation with if/else support
|
|
182
|
+
- `PipelineCompiler` for parsing and compiling conditional logic
|
|
183
|
+
- `StablePipeline` for executing compiled pipelines
|
|
184
|
+
- Full support for nested conditionals and complex branching
|
|
185
|
+
- **Comprehensive Testing**: 44+ tests covering conditional compilation scenarios
|
|
186
|
+
- Edge cases and null handling
|
|
187
|
+
- Complex nested logic
|
|
188
|
+
- Data-driven conditions
|
|
189
|
+
- Performance optimization tests
|
|
190
|
+
- Real-world use cases
|
|
191
|
+
- Parameter handling
|
|
192
|
+
- Error handling
|
|
193
|
+
- **Improved Architecture**: Dual approach for different runtime constraints
|
|
194
|
+
- Primitives for flexible runtimes (Python, Spark, Scala)
|
|
195
|
+
- Templates for rigid targets (SQL, PostgreSQL)
|
|
196
|
+
|
|
197
|
+
### Changed
|
|
198
|
+
- Made PySpark an optional dependency
|
|
199
|
+
- Reorganized test structure with focused test files and shared fixtures
|
|
200
|
+
- Refined architecture to support both template-based and primitive-based approaches
|
|
201
|
+
|
|
202
|
+
### Fixed
|
|
203
|
+
- Import paths for pipeline compilation modules
|
|
204
|
+
- Missing return statements in pipeline execution
|
|
205
|
+
- Conditional logic to use accumulated results correctly
|
|
206
|
+
|
|
207
|
+
## [0.1.4] - 2024-XX-XX
|
|
208
|
+
|
|
209
|
+
### Added
|
|
210
|
+
- Initial release of Datacompose
|
|
211
|
+
- Core framework for generating data cleaning UDFs
|
|
212
|
+
- Support for Spark, PostgreSQL, and Pandas targets
|
|
213
|
+
- Built-in specifications for common data cleaning tasks:
|
|
214
|
+
- Email address cleaning
|
|
215
|
+
- Phone number normalization
|
|
216
|
+
- Address standardization
|
|
217
|
+
- Job title standardization
|
|
218
|
+
- Date/time parsing
|
|
219
|
+
- CLI interface with commands:
|
|
220
|
+
- `datacompose init` - Initialize project
|
|
221
|
+
- `datacompose add` - Generate UDFs from specs
|
|
222
|
+
- `datacompose list` - List available targets and specs
|
|
223
|
+
- `datacompose validate` - Validate specification files
|
|
224
|
+
- YAML-based specification format
|
|
225
|
+
- Jinja2 templating for code generation
|
|
226
|
+
- Comprehensive test suite
|
|
227
|
+
- Documentation with Sphinx and Furo theme
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
MIT License
|
|
2
|
+
|
|
3
|
+
Copyright (c) 2025 datacompose
|
|
4
|
+
|
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
7
|
+
in the Software without restriction, including without limitation the rights
|
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
10
|
+
furnished to do so, subject to the following conditions:
|
|
11
|
+
|
|
12
|
+
The above copyright notice and this permission notice shall be included in all
|
|
13
|
+
copies or substantial portions of the Software.
|
|
14
|
+
|
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
21
|
+
SOFTWARE.
|
|
@@ -0,0 +1,38 @@
|
|
|
1
|
+
# Include documentation
|
|
2
|
+
include README.md
|
|
3
|
+
include LICENSE
|
|
4
|
+
include CHANGELOG.md
|
|
5
|
+
|
|
6
|
+
# Include all YAML specifications
|
|
7
|
+
recursive-include datacompose/transformers *.yaml
|
|
8
|
+
|
|
9
|
+
# Include all Jinja2 templates
|
|
10
|
+
recursive-include datacompose/transformers *.j2
|
|
11
|
+
recursive-include datacompose/generators *.j2
|
|
12
|
+
|
|
13
|
+
# Include type hints
|
|
14
|
+
recursive-include datacompose py.typed
|
|
15
|
+
|
|
16
|
+
# Include test data (optional, remove if you don't want tests in distribution)
|
|
17
|
+
recursive-include tests *.py
|
|
18
|
+
recursive-include tests *.csv
|
|
19
|
+
recursive-include tests *.yaml
|
|
20
|
+
|
|
21
|
+
# Exclude unnecessary files
|
|
22
|
+
global-exclude *.pyc
|
|
23
|
+
global-exclude *.pyo
|
|
24
|
+
global-exclude __pycache__
|
|
25
|
+
global-exclude .DS_Store
|
|
26
|
+
global-exclude .git*
|
|
27
|
+
global-exclude *.swp
|
|
28
|
+
global-exclude *~
|
|
29
|
+
|
|
30
|
+
# Exclude development files
|
|
31
|
+
exclude .pre-commit-config.yaml
|
|
32
|
+
exclude .gitignore
|
|
33
|
+
exclude docker-compose.yml
|
|
34
|
+
exclude Dockerfile
|
|
35
|
+
exclude Makefile
|
|
36
|
+
prune docs/build
|
|
37
|
+
prune notebooks
|
|
38
|
+
prune scripts
|
|
@@ -0,0 +1,176 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: datacompose
|
|
3
|
+
Version: 0.2.8
|
|
4
|
+
Summary: Copy-pasteable data transformation primitives for PySpark. Inspired by shadcn-svelte.
|
|
5
|
+
Author: Datacompose Contributors
|
|
6
|
+
Maintainer: Datacompose Contributors
|
|
7
|
+
License: MIT
|
|
8
|
+
Project-URL: Homepage, https://github.com/tc-cole/datacompose
|
|
9
|
+
Project-URL: Documentation, https://github.com/tc-cole/datacompose/tree/main/docs
|
|
10
|
+
Project-URL: Repository, https://github.com/tc-cole/datacompose.git
|
|
11
|
+
Project-URL: Issues, https://github.com/tc-cole/datacompose/issues
|
|
12
|
+
Project-URL: Changelog, https://github.com/tc-cole/datacompose/blob/main/CHANGELOG.md
|
|
13
|
+
Keywords: data-cleaning,data-quality,udf,spark,postgres,code-generation,data-pipeline,etl
|
|
14
|
+
Classifier: Development Status :: 4 - Beta
|
|
15
|
+
Classifier: Intended Audience :: Developers
|
|
16
|
+
Classifier: Topic :: Software Development :: Code Generators
|
|
17
|
+
Classifier: Topic :: Database
|
|
18
|
+
Classifier: Topic :: Software Development :: Libraries :: Python Modules
|
|
19
|
+
Classifier: License :: OSI Approved :: MIT License
|
|
20
|
+
Classifier: Programming Language :: Python :: 3
|
|
21
|
+
Classifier: Programming Language :: Python :: 3.8
|
|
22
|
+
Classifier: Programming Language :: Python :: 3.9
|
|
23
|
+
Classifier: Programming Language :: Python :: 3.10
|
|
24
|
+
Classifier: Programming Language :: Python :: 3.11
|
|
25
|
+
Classifier: Programming Language :: Python :: 3.12
|
|
26
|
+
Classifier: Operating System :: OS Independent
|
|
27
|
+
Requires-Python: >=3.8
|
|
28
|
+
Description-Content-Type: text/markdown
|
|
29
|
+
License-File: LICENSE
|
|
30
|
+
Requires-Dist: jinja2>=3.0.0
|
|
31
|
+
Requires-Dist: pyyaml>=6.0
|
|
32
|
+
Requires-Dist: click>=8.0.0
|
|
33
|
+
Provides-Extra: dev
|
|
34
|
+
Requires-Dist: pytest>=7.0.0; extra == "dev"
|
|
35
|
+
Requires-Dist: black>=23.0.0; extra == "dev"
|
|
36
|
+
Requires-Dist: mypy>=1.0.0; extra == "dev"
|
|
37
|
+
Requires-Dist: ruff>=0.1.0; extra == "dev"
|
|
38
|
+
Provides-Extra: docs
|
|
39
|
+
Requires-Dist: mkdocs>=1.5.3; extra == "docs"
|
|
40
|
+
Requires-Dist: mkdocs-material>=9.5.0; extra == "docs"
|
|
41
|
+
Requires-Dist: mkdocs-material-extensions>=1.3; extra == "docs"
|
|
42
|
+
Requires-Dist: mkdocs-minify-plugin>=0.7.1; extra == "docs"
|
|
43
|
+
Requires-Dist: mkdocs-redirects>=1.2.1; extra == "docs"
|
|
44
|
+
Requires-Dist: mike>=2.0.0; extra == "docs"
|
|
45
|
+
Requires-Dist: pymdown-extensions>=10.5; extra == "docs"
|
|
46
|
+
Requires-Dist: pygments>=2.17.0; extra == "docs"
|
|
47
|
+
Requires-Dist: mkdocs-git-revision-date-localized-plugin>=1.2.2; extra == "docs"
|
|
48
|
+
Requires-Dist: mkdocs-glightbox>=0.3.5; extra == "docs"
|
|
49
|
+
Dynamic: license-file
|
|
50
|
+
|
|
51
|
+
# DataCompose
|
|
52
|
+
|
|
53
|
+
PySpark transformations you can actually own and modify. No black boxes.
|
|
54
|
+
|
|
55
|
+
## Before vs After
|
|
56
|
+
|
|
57
|
+
```python
|
|
58
|
+
# Before: Regex nightmare for addresses
|
|
59
|
+
df = df.withColumn("state_clean",
|
|
60
|
+
F.when(F.col("address").rlike(".*\\b(NY|N\\.Y\\.|New York|NewYork|Newyork)\\b.*"), "NY")
|
|
61
|
+
.when(F.col("address").rlike(".*\\b(CA|Cal\\.|Calif\\.|California)\\b.*"), "CA")
|
|
62
|
+
.when(F.col("address").rlike(".*\\b(IL|Ill\\.|Illinois|Illinios)\\b.*"), "IL")
|
|
63
|
+
.when(F.upper(F.col("address")).contains("NEW YORK"), "NY")
|
|
64
|
+
.when(F.regexp_extract(F.col("address"), ",\\s*([A-Z]{2})\\s+\\d{5}", 1) == "NY", "NY")
|
|
65
|
+
.when(F.regexp_extract(F.col("address"), "\\s+([A-Z]{2})\\s*$", 1) == "NY", "NY")
|
|
66
|
+
# ... handle "N.Y 10001" vs "NY, 10001" vs "New York 10001"
|
|
67
|
+
# ... handle misspellings like "Californai" or "Illnois"
|
|
68
|
+
# ... 50 more states × 10 variations each
|
|
69
|
+
)
|
|
70
|
+
|
|
71
|
+
# After: One line
|
|
72
|
+
from builders.transformers.addresses import addresses
|
|
73
|
+
df = df.withColumn("state", addresses.standardize_state(F.col("address")))
|
|
74
|
+
```
|
|
75
|
+
|
|
76
|
+
## Installation
|
|
77
|
+
|
|
78
|
+
```bash
|
|
79
|
+
pip install datacompose
|
|
80
|
+
```
|
|
81
|
+
|
|
82
|
+
## How It Works
|
|
83
|
+
|
|
84
|
+
```bash
|
|
85
|
+
# Copy transformers into YOUR repo
|
|
86
|
+
datacompose add phones
|
|
87
|
+
datacompose add addresses
|
|
88
|
+
datacompose add emails
|
|
89
|
+
```
|
|
90
|
+
|
|
91
|
+
```python
|
|
92
|
+
# Use them like any Python module - this is your code now
|
|
93
|
+
from transformers.pyspark.addresses import addresses
|
|
94
|
+
|
|
95
|
+
df = (df
|
|
96
|
+
.withColumn("street_number", addresses.extract_street_number(F.col("address")))
|
|
97
|
+
.withColumn("street_name", addresses.extract_street_name(F.col("address")))
|
|
98
|
+
.withColumn("city", addresses.extract_city(F.col("address")))
|
|
99
|
+
.withColumn("state", addresses.standardize_state(F.col("address")))
|
|
100
|
+
.withColumn("zip", addresses.extract_zip_code(F.col("address")))
|
|
101
|
+
)
|
|
102
|
+
|
|
103
|
+
# Result:
|
|
104
|
+
+----------------------------------------+-------------+------------+-----------+-----+-------+
|
|
105
|
+
|address |street_number|street_name |city |state|zip |
|
|
106
|
+
+----------------------------------------+-------------+------------+-----------+-----+-------+
|
|
107
|
+
|123 Main St, New York, NY 10001 |123 |Main |New York |NY |10001 |
|
|
108
|
+
|456 Oak Ave Apt 5B, Los Angeles, CA 90001|456 |Oak |Los Angeles|CA |90001 |
|
|
109
|
+
|789 Pine Blvd, Chicago, IL 60601 |789 |Pine |Chicago |IL |60601 |
|
|
110
|
+
+----------------------------------------+-------------+------------+-----------+-----+-------+
|
|
111
|
+
```
|
|
112
|
+
|
|
113
|
+
The code lives in your repo. Modify it. Delete what you don't need. No external dependencies.
|
|
114
|
+
|
|
115
|
+
## Why Copy-to-Own?
|
|
116
|
+
|
|
117
|
+
- **Your data is weird** - Phone numbers with "ask for Bob"? We can't predict that. You can fix it.
|
|
118
|
+
- **No breaking changes** - Library updates can't break your pipeline at 2 AM
|
|
119
|
+
- **Actually debuggable** - Stack traces point to YOUR code, not site-packages
|
|
120
|
+
- **No dependency hell** - It's just PySpark. If Spark runs, this runs.
|
|
121
|
+
|
|
122
|
+
## Available Transformers
|
|
123
|
+
|
|
124
|
+
**Phones** - Standardize formats, extract from text, validate, handle extensions
|
|
125
|
+
**Addresses** - Parse components, standardize states, validate zips, detect PO boxes
|
|
126
|
+
**Emails** - Validate, extract domains, fix typos (gmial→gmail), standardize
|
|
127
|
+
|
|
128
|
+
More coming based on what you need.
|
|
129
|
+
|
|
130
|
+
## Real Example
|
|
131
|
+
|
|
132
|
+
```python
|
|
133
|
+
# Messy customer data
|
|
134
|
+
df = spark.createDataFrame([
|
|
135
|
+
("(555) 123-4567 ext 89", "john.doe@gmial.com", "123 Main St Apt 4B"),
|
|
136
|
+
("555.987.6543", "JANE@COMPANY.COM", "456 Oak Ave, NY, NY 10001")
|
|
137
|
+
])
|
|
138
|
+
|
|
139
|
+
# Clean it
|
|
140
|
+
clean_df = (df
|
|
141
|
+
.withColumn("phone", phones.standardize_phone(F.col("phone")))
|
|
142
|
+
.withColumn("email", emails.fix_common_typos(F.col("email")))
|
|
143
|
+
.withColumn("street", addresses.extract_street_address(F.col("address")))
|
|
144
|
+
)
|
|
145
|
+
```
|
|
146
|
+
|
|
147
|
+
## The Philosophy
|
|
148
|
+
|
|
149
|
+
```
|
|
150
|
+
█████████████ 60% - Already clean
|
|
151
|
+
████████ 30% - Common patterns (formatting, typos)
|
|
152
|
+
██ 8% - Edge cases (weird but fixable)
|
|
153
|
+
▌ 2% - Complete chaos (that's what interns are for)
|
|
154
|
+
```
|
|
155
|
+
|
|
156
|
+
We handle the 38% with patterns. You handle the 2% chaos.
|
|
157
|
+
|
|
158
|
+
## Documentation
|
|
159
|
+
|
|
160
|
+
Full docs at [datacompose.io](https://datacompose.io)
|
|
161
|
+
|
|
162
|
+
## Key Features
|
|
163
|
+
|
|
164
|
+
- **Zero dependencies** - Just PySpark code that runs anywhere Spark runs
|
|
165
|
+
- **Fully modifiable** - It's in your repo. Change whatever you need
|
|
166
|
+
- **Battle-tested patterns** - Built from real production data cleaning challenges
|
|
167
|
+
- **Composable functions** - Chain simple operations into complex pipelines
|
|
168
|
+
- **No breaking changes** - You control when and how to update
|
|
169
|
+
|
|
170
|
+
## License
|
|
171
|
+
|
|
172
|
+
MIT - It's your code now.
|
|
173
|
+
|
|
174
|
+
---
|
|
175
|
+
|
|
176
|
+
*Inspired by [shadcn/ui](https://ui.shadcn.com/) and [Svelte](https://svelte.dev/)'s approach to components - copy, don't install.*
|
|
@@ -0,0 +1,126 @@
|
|
|
1
|
+
# DataCompose
|
|
2
|
+
|
|
3
|
+
PySpark transformations you can actually own and modify. No black boxes.
|
|
4
|
+
|
|
5
|
+
## Before vs After
|
|
6
|
+
|
|
7
|
+
```python
|
|
8
|
+
# Before: Regex nightmare for addresses
|
|
9
|
+
df = df.withColumn("state_clean",
|
|
10
|
+
F.when(F.col("address").rlike(".*\\b(NY|N\\.Y\\.|New York|NewYork|Newyork)\\b.*"), "NY")
|
|
11
|
+
.when(F.col("address").rlike(".*\\b(CA|Cal\\.|Calif\\.|California)\\b.*"), "CA")
|
|
12
|
+
.when(F.col("address").rlike(".*\\b(IL|Ill\\.|Illinois|Illinios)\\b.*"), "IL")
|
|
13
|
+
.when(F.upper(F.col("address")).contains("NEW YORK"), "NY")
|
|
14
|
+
.when(F.regexp_extract(F.col("address"), ",\\s*([A-Z]{2})\\s+\\d{5}", 1) == "NY", "NY")
|
|
15
|
+
.when(F.regexp_extract(F.col("address"), "\\s+([A-Z]{2})\\s*$", 1) == "NY", "NY")
|
|
16
|
+
# ... handle "N.Y 10001" vs "NY, 10001" vs "New York 10001"
|
|
17
|
+
# ... handle misspellings like "Californai" or "Illnois"
|
|
18
|
+
# ... 50 more states × 10 variations each
|
|
19
|
+
)
|
|
20
|
+
|
|
21
|
+
# After: One line
|
|
22
|
+
from builders.transformers.addresses import addresses
|
|
23
|
+
df = df.withColumn("state", addresses.standardize_state(F.col("address")))
|
|
24
|
+
```
|
|
25
|
+
|
|
26
|
+
## Installation
|
|
27
|
+
|
|
28
|
+
```bash
|
|
29
|
+
pip install datacompose
|
|
30
|
+
```
|
|
31
|
+
|
|
32
|
+
## How It Works
|
|
33
|
+
|
|
34
|
+
```bash
|
|
35
|
+
# Copy transformers into YOUR repo
|
|
36
|
+
datacompose add phones
|
|
37
|
+
datacompose add addresses
|
|
38
|
+
datacompose add emails
|
|
39
|
+
```
|
|
40
|
+
|
|
41
|
+
```python
|
|
42
|
+
# Use them like any Python module - this is your code now
|
|
43
|
+
from transformers.pyspark.addresses import addresses
|
|
44
|
+
|
|
45
|
+
df = (df
|
|
46
|
+
.withColumn("street_number", addresses.extract_street_number(F.col("address")))
|
|
47
|
+
.withColumn("street_name", addresses.extract_street_name(F.col("address")))
|
|
48
|
+
.withColumn("city", addresses.extract_city(F.col("address")))
|
|
49
|
+
.withColumn("state", addresses.standardize_state(F.col("address")))
|
|
50
|
+
.withColumn("zip", addresses.extract_zip_code(F.col("address")))
|
|
51
|
+
)
|
|
52
|
+
|
|
53
|
+
# Result:
|
|
54
|
+
+----------------------------------------+-------------+------------+-----------+-----+-------+
|
|
55
|
+
|address |street_number|street_name |city |state|zip |
|
|
56
|
+
+----------------------------------------+-------------+------------+-----------+-----+-------+
|
|
57
|
+
|123 Main St, New York, NY 10001 |123 |Main |New York |NY |10001 |
|
|
58
|
+
|456 Oak Ave Apt 5B, Los Angeles, CA 90001|456 |Oak |Los Angeles|CA |90001 |
|
|
59
|
+
|789 Pine Blvd, Chicago, IL 60601 |789 |Pine |Chicago |IL |60601 |
|
|
60
|
+
+----------------------------------------+-------------+------------+-----------+-----+-------+
|
|
61
|
+
```
|
|
62
|
+
|
|
63
|
+
The code lives in your repo. Modify it. Delete what you don't need. No external dependencies.
|
|
64
|
+
|
|
65
|
+
## Why Copy-to-Own?
|
|
66
|
+
|
|
67
|
+
- **Your data is weird** - Phone numbers with "ask for Bob"? We can't predict that. You can fix it.
|
|
68
|
+
- **No breaking changes** - Library updates can't break your pipeline at 2 AM
|
|
69
|
+
- **Actually debuggable** - Stack traces point to YOUR code, not site-packages
|
|
70
|
+
- **No dependency hell** - It's just PySpark. If Spark runs, this runs.
|
|
71
|
+
|
|
72
|
+
## Available Transformers
|
|
73
|
+
|
|
74
|
+
**Phones** - Standardize formats, extract from text, validate, handle extensions
|
|
75
|
+
**Addresses** - Parse components, standardize states, validate zips, detect PO boxes
|
|
76
|
+
**Emails** - Validate, extract domains, fix typos (gmial→gmail), standardize
|
|
77
|
+
|
|
78
|
+
More coming based on what you need.
|
|
79
|
+
|
|
80
|
+
## Real Example
|
|
81
|
+
|
|
82
|
+
```python
|
|
83
|
+
# Messy customer data
|
|
84
|
+
df = spark.createDataFrame([
|
|
85
|
+
("(555) 123-4567 ext 89", "john.doe@gmial.com", "123 Main St Apt 4B"),
|
|
86
|
+
("555.987.6543", "JANE@COMPANY.COM", "456 Oak Ave, NY, NY 10001")
|
|
87
|
+
])
|
|
88
|
+
|
|
89
|
+
# Clean it
|
|
90
|
+
clean_df = (df
|
|
91
|
+
.withColumn("phone", phones.standardize_phone(F.col("phone")))
|
|
92
|
+
.withColumn("email", emails.fix_common_typos(F.col("email")))
|
|
93
|
+
.withColumn("street", addresses.extract_street_address(F.col("address")))
|
|
94
|
+
)
|
|
95
|
+
```
|
|
96
|
+
|
|
97
|
+
## The Philosophy
|
|
98
|
+
|
|
99
|
+
```
|
|
100
|
+
█████████████ 60% - Already clean
|
|
101
|
+
████████ 30% - Common patterns (formatting, typos)
|
|
102
|
+
██ 8% - Edge cases (weird but fixable)
|
|
103
|
+
▌ 2% - Complete chaos (that's what interns are for)
|
|
104
|
+
```
|
|
105
|
+
|
|
106
|
+
We handle the 38% with patterns. You handle the 2% chaos.
|
|
107
|
+
|
|
108
|
+
## Documentation
|
|
109
|
+
|
|
110
|
+
Full docs at [datacompose.io](https://datacompose.io)
|
|
111
|
+
|
|
112
|
+
## Key Features
|
|
113
|
+
|
|
114
|
+
- **Zero dependencies** - Just PySpark code that runs anywhere Spark runs
|
|
115
|
+
- **Fully modifiable** - It's in your repo. Change whatever you need
|
|
116
|
+
- **Battle-tested patterns** - Built from real production data cleaning challenges
|
|
117
|
+
- **Composable functions** - Chain simple operations into complex pipelines
|
|
118
|
+
- **No breaking changes** - You control when and how to update
|
|
119
|
+
|
|
120
|
+
## License
|
|
121
|
+
|
|
122
|
+
MIT - It's your code now.
|
|
123
|
+
|
|
124
|
+
---
|
|
125
|
+
|
|
126
|
+
*Inspired by [shadcn/ui](https://ui.shadcn.com/) and [Svelte](https://svelte.dev/)'s approach to components - copy, don't install.*
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
"""Datacompose source package."""
|
|
@@ -0,0 +1,80 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Simple color utilities for CLI output.
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
import os
|
|
6
|
+
import sys
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class Colors:
|
|
10
|
+
"""ANSI color codes for terminal output."""
|
|
11
|
+
|
|
12
|
+
# Text colors
|
|
13
|
+
RED = "\033[91m"
|
|
14
|
+
GREEN = "\033[92m"
|
|
15
|
+
YELLOW = "\033[93m"
|
|
16
|
+
BLUE = "\033[94m"
|
|
17
|
+
MAGENTA = "\033[95m"
|
|
18
|
+
CYAN = "\033[96m"
|
|
19
|
+
WHITE = "\033[97m"
|
|
20
|
+
GRAY = "\033[90m"
|
|
21
|
+
|
|
22
|
+
# Styles
|
|
23
|
+
BOLD = "\033[1m"
|
|
24
|
+
DIM = "\033[2m"
|
|
25
|
+
UNDERLINE = "\033[4m"
|
|
26
|
+
|
|
27
|
+
# Reset
|
|
28
|
+
RESET = "\033[0m"
|
|
29
|
+
|
|
30
|
+
@classmethod
|
|
31
|
+
def is_enabled(cls) -> bool:
|
|
32
|
+
"""Check if colors should be enabled."""
|
|
33
|
+
# Disable colors if NO_COLOR env var is set
|
|
34
|
+
if os.getenv("NO_COLOR"):
|
|
35
|
+
return False
|
|
36
|
+
|
|
37
|
+
# Disable colors if not in a TTY
|
|
38
|
+
if not sys.stdout.isatty():
|
|
39
|
+
return False
|
|
40
|
+
|
|
41
|
+
return True
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
def colorize(text: str, color: str = "", style: str = "") -> str:
|
|
45
|
+
"""Colorize text if colors are enabled."""
|
|
46
|
+
if not Colors.is_enabled():
|
|
47
|
+
return text
|
|
48
|
+
|
|
49
|
+
prefix = style + color
|
|
50
|
+
return f"{prefix}{text}{Colors.RESET}"
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
def success(text: str) -> str:
|
|
54
|
+
"""Green text for success messages."""
|
|
55
|
+
return colorize(text, Colors.GREEN, Colors.BOLD)
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
def error(text: str) -> str:
|
|
59
|
+
"""Red text for error messages."""
|
|
60
|
+
return colorize(text, Colors.RED, Colors.BOLD)
|
|
61
|
+
|
|
62
|
+
|
|
63
|
+
def warning(text: str) -> str:
|
|
64
|
+
"""Yellow text for warning messages."""
|
|
65
|
+
return colorize(text, Colors.YELLOW, Colors.BOLD)
|
|
66
|
+
|
|
67
|
+
|
|
68
|
+
def info(text: str) -> str:
|
|
69
|
+
"""Blue text for info messages."""
|
|
70
|
+
return colorize(text, Colors.BLUE)
|
|
71
|
+
|
|
72
|
+
|
|
73
|
+
def highlight(text: str) -> str:
|
|
74
|
+
"""Cyan text for highlighted text."""
|
|
75
|
+
return colorize(text, Colors.CYAN, Colors.BOLD)
|
|
76
|
+
|
|
77
|
+
|
|
78
|
+
def dim(text: str) -> str:
|
|
79
|
+
"""Dimmed text for less important info."""
|
|
80
|
+
return colorize(text, Colors.GRAY)
|