sibi-flux 2026.1.5__tar.gz → 2026.1.6__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.6}/PKG-INFO +5 -25
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.6}/README.md +4 -24
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.6}/pyproject.toml +16 -1
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.6}/src/sibi_flux/cli.py +36 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.6}/src/sibi_flux/datacube/cli.py +74 -6
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.6}/src/sibi_flux/datacube/generator.py +13 -1
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.6}/src/sibi_flux/datacube/orchestrator.py +17 -2
- sibi_flux-2026.1.6/src/sibi_flux/init/app.py +111 -0
- sibi_flux-2026.1.6/src/sibi_flux/init/cube_extender.py +149 -0
- sibi_flux-2026.1.6/src/sibi_flux/init/cube_proposer.py +134 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.6}/src/sibi_dst/__init__.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.6}/src/sibi_flux/__init__.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.6}/src/sibi_flux/artifacts/__init__.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.6}/src/sibi_flux/artifacts/base.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.6}/src/sibi_flux/artifacts/parquet.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.6}/src/sibi_flux/artifacts/parquet_engine/__init__.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.6}/src/sibi_flux/artifacts/parquet_engine/executor.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.6}/src/sibi_flux/artifacts/parquet_engine/manifest.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.6}/src/sibi_flux/artifacts/parquet_engine/planner.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.6}/src/sibi_flux/config/__init__.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.6}/src/sibi_flux/config/manager.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.6}/src/sibi_flux/config/settings.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.6}/src/sibi_flux/core/__init__.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.6}/src/sibi_flux/core/managed_resource/__init__.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.6}/src/sibi_flux/core/managed_resource/_managed_resource.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.6}/src/sibi_flux/core/type_maps/__init__.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.6}/src/sibi_flux/dask_cluster/__init__.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.6}/src/sibi_flux/dask_cluster/async_core.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.6}/src/sibi_flux/dask_cluster/client_manager.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.6}/src/sibi_flux/dask_cluster/core.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.6}/src/sibi_flux/dask_cluster/exceptions.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.6}/src/sibi_flux/dask_cluster/utils.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.6}/src/sibi_flux/datacube/__init__.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.6}/src/sibi_flux/datacube/_data_cube.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.6}/src/sibi_flux/datacube/config_engine.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.6}/src/sibi_flux/datacube/field_factory.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.6}/src/sibi_flux/datacube/field_mapper.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.6}/src/sibi_flux/datacube/field_registry.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.6}/src/sibi_flux/datacube/router.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.6}/src/sibi_flux/dataset/__init__.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.6}/src/sibi_flux/dataset/_dataset.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.6}/src/sibi_flux/dataset/hybrid_loader.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.6}/src/sibi_flux/df_enricher/__init__.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.6}/src/sibi_flux/df_enricher/async_enricher.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.6}/src/sibi_flux/df_enricher/attacher.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.6}/src/sibi_flux/df_enricher/merger.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.6}/src/sibi_flux/df_enricher/specs.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.6}/src/sibi_flux/df_enricher/types.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.6}/src/sibi_flux/df_helper/__init__.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.6}/src/sibi_flux/df_helper/_df_helper.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.6}/src/sibi_flux/df_helper/backends/__init__.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.6}/src/sibi_flux/df_helper/backends/_params.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.6}/src/sibi_flux/df_helper/backends/_strategies.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.6}/src/sibi_flux/df_helper/backends/http/__init__.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.6}/src/sibi_flux/df_helper/backends/http/_http_config.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.6}/src/sibi_flux/df_helper/backends/parquet/__init__.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.6}/src/sibi_flux/df_helper/backends/parquet/_parquet_options.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.6}/src/sibi_flux/df_helper/backends/sqlalchemy/__init__.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.6}/src/sibi_flux/df_helper/backends/sqlalchemy/_db_connection.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.6}/src/sibi_flux/df_helper/backends/sqlalchemy/_db_gatekeeper.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.6}/src/sibi_flux/df_helper/backends/sqlalchemy/_io_dask.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.6}/src/sibi_flux/df_helper/backends/sqlalchemy/_load_from_db.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.6}/src/sibi_flux/df_helper/backends/sqlalchemy/_model_registry.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.6}/src/sibi_flux/df_helper/backends/sqlalchemy/_sql_model_builder.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.6}/src/sibi_flux/df_helper/backends/utils.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.6}/src/sibi_flux/df_helper/core/__init__.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.6}/src/sibi_flux/df_helper/core/_defaults.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.6}/src/sibi_flux/df_helper/core/_filter_handler.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.6}/src/sibi_flux/df_helper/core/_params_config.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.6}/src/sibi_flux/df_helper/core/_query_config.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.6}/src/sibi_flux/df_validator/__init__.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.6}/src/sibi_flux/df_validator/_df_validator.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.6}/src/sibi_flux/init/__init__.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.6}/src/sibi_flux/init/core.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.6}/src/sibi_flux/init/discovery_updater.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.6}/src/sibi_flux/init/env.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.6}/src/sibi_flux/init/env_engine.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.6}/src/sibi_flux/init/env_generator.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.6}/src/sibi_flux/init/rule_generator.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.6}/src/sibi_flux/init/templates/__init__.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.6}/src/sibi_flux/init/templates/discovery_params.yaml +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.6}/src/sibi_flux/init/templates/gen_dc.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.6}/src/sibi_flux/init/templates/property_template.yaml +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.6}/src/sibi_flux/logger/__init__.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.6}/src/sibi_flux/logger/_logger.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.6}/src/sibi_flux/mcp/__init__.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.6}/src/sibi_flux/mcp/client.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.6}/src/sibi_flux/mcp/router.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.6}/src/sibi_flux/orchestration/__init__.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.6}/src/sibi_flux/orchestration/_artifact_orchestrator.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.6}/src/sibi_flux/orchestration/_pipeline_executor.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.6}/src/sibi_flux/osmnx_helper/__init__.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.6}/src/sibi_flux/osmnx_helper/_pbf_handler.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.6}/src/sibi_flux/osmnx_helper/graph_loader.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.6}/src/sibi_flux/osmnx_helper/utils.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.6}/src/sibi_flux/parquet/__init__.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.6}/src/sibi_flux/parquet/readers/__init__.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.6}/src/sibi_flux/parquet/readers/base.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.6}/src/sibi_flux/parquet/readers/parquet.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.6}/src/sibi_flux/parquet/saver/__init__.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.6}/src/sibi_flux/parquet/saver/_parquet_saver.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.6}/src/sibi_flux/parquet/saver/_write_gatekeeper.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.6}/src/sibi_flux/pipelines/__init__.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.6}/src/sibi_flux/pipelines/base.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.6}/src/sibi_flux/pipelines/template.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.6}/src/sibi_flux/py.typed +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.6}/src/sibi_flux/readers/__init__.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.6}/src/sibi_flux/readers/base.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.6}/src/sibi_flux/storage/__init__.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.6}/src/sibi_flux/storage/_fs_registry.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.6}/src/sibi_flux/storage/_storage_manager.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.6}/src/sibi_flux/storage/factory.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.6}/src/sibi_flux/utils/__init__.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.6}/src/sibi_flux/utils/clickhouse_writer/__init__.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.6}/src/sibi_flux/utils/clickhouse_writer/_clickhouse_writer.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.6}/src/sibi_flux/utils/common.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.6}/src/sibi_flux/utils/dask_utils.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.6}/src/sibi_flux/utils/data_utils/__init__.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.6}/src/sibi_flux/utils/data_utils/_data_utils.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.6}/src/sibi_flux/utils/dataframe_utils.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.6}/src/sibi_flux/utils/date_utils/__init__.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.6}/src/sibi_flux/utils/date_utils/_business_days.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.6}/src/sibi_flux/utils/date_utils/_date_utils.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.6}/src/sibi_flux/utils/date_utils/_file_age_checker.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.6}/src/sibi_flux/utils/file_utils.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.6}/src/sibi_flux/utils/filepath_generator/__init__.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.6}/src/sibi_flux/utils/filepath_generator/_filepath_generator.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.6}/src/sibi_flux/utils/retry.py +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.3
|
|
2
2
|
Name: sibi-flux
|
|
3
|
-
Version: 2026.1.
|
|
3
|
+
Version: 2026.1.6
|
|
4
4
|
Summary: Sibi Toolkit: A collection of tools for Data Analysis/Engineering.
|
|
5
5
|
Author: Luis Valverde
|
|
6
6
|
Author-email: Luis Valverde <lvalverdeb@gmail.com>
|
|
@@ -52,30 +52,10 @@ Description-Content-Type: text/markdown
|
|
|
52
52
|
|
|
53
53
|
**SibiFlux** is a production-grade resilient data engineering ecosystem designed to bridge the gap between local development, distributed computing, and agentic AI workflows. It provides a unified engine for hybrid data loading (batch + streaming), self-healing distributed operations, and native interfaces for AI agents via the Model Context Protocol (MCP).
|
|
54
54
|
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
Router <--> Resources["SibiFlux Resources"]
|
|
60
|
-
end
|
|
61
|
-
|
|
62
|
-
subgraph G2["Solutions Layer (Business Logic)"]
|
|
63
|
-
Logistics["Logistics Solutions"]
|
|
64
|
-
Enrichment["Enrichment Pipelines"]
|
|
65
|
-
Cubes["DataCubes"]
|
|
66
|
-
end
|
|
67
|
-
|
|
68
|
-
subgraph G3["SibiFlux Core Engine"]
|
|
69
|
-
DfHelper["DfHelper (Unified Loader)"]
|
|
70
|
-
Cluster["Resilient Dask Cluster"]
|
|
71
|
-
Managed["ManagedResource Lifecycle"]
|
|
72
|
-
end
|
|
73
|
-
|
|
74
|
-
Resources --> Cubes
|
|
75
|
-
Logistics --> DfHelper
|
|
76
|
-
Cubes --> DfHelper
|
|
77
|
-
DfHelper --> Cluster
|
|
78
|
-
```
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
## Documentation
|
|
58
|
+
Full documentation is available in [src/docs/index.md](src/docs/index.md).
|
|
79
59
|
|
|
80
60
|
## Core Architecture
|
|
81
61
|
|
|
@@ -2,30 +2,10 @@
|
|
|
2
2
|
|
|
3
3
|
**SibiFlux** is a production-grade resilient data engineering ecosystem designed to bridge the gap between local development, distributed computing, and agentic AI workflows. It provides a unified engine for hybrid data loading (batch + streaming), self-healing distributed operations, and native interfaces for AI agents via the Model Context Protocol (MCP).
|
|
4
4
|
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
Router <--> Resources["SibiFlux Resources"]
|
|
10
|
-
end
|
|
11
|
-
|
|
12
|
-
subgraph G2["Solutions Layer (Business Logic)"]
|
|
13
|
-
Logistics["Logistics Solutions"]
|
|
14
|
-
Enrichment["Enrichment Pipelines"]
|
|
15
|
-
Cubes["DataCubes"]
|
|
16
|
-
end
|
|
17
|
-
|
|
18
|
-
subgraph G3["SibiFlux Core Engine"]
|
|
19
|
-
DfHelper["DfHelper (Unified Loader)"]
|
|
20
|
-
Cluster["Resilient Dask Cluster"]
|
|
21
|
-
Managed["ManagedResource Lifecycle"]
|
|
22
|
-
end
|
|
23
|
-
|
|
24
|
-
Resources --> Cubes
|
|
25
|
-
Logistics --> DfHelper
|
|
26
|
-
Cubes --> DfHelper
|
|
27
|
-
DfHelper --> Cluster
|
|
28
|
-
```
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
## Documentation
|
|
8
|
+
Full documentation is available in [src/docs/index.md](src/docs/index.md).
|
|
29
9
|
|
|
30
10
|
## Core Architecture
|
|
31
11
|
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
[project]
|
|
2
2
|
name = "sibi-flux"
|
|
3
|
-
version = "2026.1.
|
|
3
|
+
version = "2026.1.6"
|
|
4
4
|
description = "Sibi Toolkit: A collection of tools for Data Analysis/Engineering."
|
|
5
5
|
readme = "README.md"
|
|
6
6
|
authors = [
|
|
@@ -127,6 +127,21 @@ dc-scan = "python solutions/generators/datacubes/gen_dc.py scan"
|
|
|
127
127
|
dc-match = "python solutions/generators/datacubes/gen_dc.py match"
|
|
128
128
|
dc-map = "python solutions/generators/datacubes/gen_dc.py map"
|
|
129
129
|
|
|
130
|
+
# --- Internal Snapshot Tasks ---
|
|
131
|
+
_test_snapshots_core = "python tests/solutions/gendatacubes/test_snapshots.py"
|
|
132
|
+
_test_snapshots_cli = "python tests/solutions/gendatacubes/test_cli_snapshot.py"
|
|
133
|
+
_test_snapshots_env = "python tests/solutions/init/test_env_generator_snapshots.py"
|
|
134
|
+
_test_snapshots_rules = "python tests/solutions/init/test_rule_engine_snapshots.py"
|
|
135
|
+
|
|
136
|
+
[tool.poe.tasks.test-snapshots]
|
|
137
|
+
help = "Run all regression snapshot tests"
|
|
138
|
+
sequence = ["_test_snapshots_core", "_test_snapshots_cli", "_test_snapshots_env", "_test_snapshots_rules"]
|
|
139
|
+
|
|
140
|
+
[tool.poe.tasks.update-snapshots]
|
|
141
|
+
help = "Update all regression snapshots (Golden Masters)"
|
|
142
|
+
sequence = ["_test_snapshots_core", "_test_snapshots_cli", "_test_snapshots_env", "_test_snapshots_rules"]
|
|
143
|
+
env = { UPDATE_SNAPSHOTS = "1" }
|
|
144
|
+
|
|
130
145
|
[tool.poe.tasks.release]
|
|
131
146
|
sequence = ["build","publish"]
|
|
132
147
|
envfile = ".env" # Loads the token for the whole sequence
|
|
@@ -34,6 +34,42 @@ def init(
|
|
|
34
34
|
initialize_project(project_name, lib, app)
|
|
35
35
|
|
|
36
36
|
|
|
37
|
+
@app.command()
|
|
38
|
+
def create_app(
|
|
39
|
+
name: str = typer.Argument(..., help="Name of the application to create"),
|
|
40
|
+
):
|
|
41
|
+
"""
|
|
42
|
+
Create a new application within an existing Sibi Flux project.
|
|
43
|
+
|
|
44
|
+
Generates standard directory structure in `<project_root>/<name>`.
|
|
45
|
+
"""
|
|
46
|
+
from sibi_flux.init.app import init_app
|
|
47
|
+
init_app(name)
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
@app.command()
|
|
51
|
+
def create_cubes(
|
|
52
|
+
app_name: str = typer.Argument(..., help="Name of the application"),
|
|
53
|
+
):
|
|
54
|
+
"""
|
|
55
|
+
Generate app-specific Datacube extensions from `<app_name>/datacubes/datacubes.yaml`.
|
|
56
|
+
"""
|
|
57
|
+
from sibi_flux.init.cube_extender import create_cubes
|
|
58
|
+
create_cubes(app_name)
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
@app.command()
|
|
62
|
+
def propose_cubes(
|
|
63
|
+
db_domain: str = typer.Argument(..., help="Database domain to filter by (e.g., 'ibis_dev')"),
|
|
64
|
+
app_name: str = typer.Argument(..., help="Name of the target application"),
|
|
65
|
+
):
|
|
66
|
+
"""
|
|
67
|
+
Scan global registry for datacubes in <db_domain> and add them to <app_name>/datacubes/datacubes.yaml.
|
|
68
|
+
"""
|
|
69
|
+
from sibi_flux.init.cube_proposer import propose_cubes
|
|
70
|
+
propose_cubes(db_domain, app_name)
|
|
71
|
+
|
|
72
|
+
|
|
37
73
|
@app.command()
|
|
38
74
|
def env(
|
|
39
75
|
project_path: Path = typer.Argument(Path("."), help="Project root directory"),
|
|
@@ -258,6 +258,42 @@ def sync(
|
|
|
258
258
|
|
|
259
259
|
# Start with empty/default registry
|
|
260
260
|
config_data = _load_and_resolve_config(config_path)
|
|
261
|
+
|
|
262
|
+
# Load existing Global Registry to preserve manual edits (e.g. custom_name)
|
|
263
|
+
# We must flatten the grouped structure (by config object) into a single tables dict
|
|
264
|
+
params = context.params or {}
|
|
265
|
+
reg_rel_path = params.get("paths", {}).get("repositories", {}).get(
|
|
266
|
+
"global_datacube_registry_file"
|
|
267
|
+
) or params.get("global_datacube_registry_file")
|
|
268
|
+
|
|
269
|
+
if reg_rel_path:
|
|
270
|
+
reg_file = Path(reg_rel_path)
|
|
271
|
+
if not reg_file.is_absolute():
|
|
272
|
+
try:
|
|
273
|
+
# Heuristic: config_path is in generators/datacubes/, project root is 3 levels up
|
|
274
|
+
prj_root = config_path.parent.parent.parent
|
|
275
|
+
reg_file = prj_root / reg_rel_path
|
|
276
|
+
except Exception:
|
|
277
|
+
reg_file = Path.cwd() / reg_rel_path
|
|
278
|
+
|
|
279
|
+
if reg_file.exists():
|
|
280
|
+
try:
|
|
281
|
+
with open(reg_file, "r") as rf:
|
|
282
|
+
existing_reg_data = yaml.safe_load(rf) or {}
|
|
283
|
+
|
|
284
|
+
flat_tables = {}
|
|
285
|
+
for grp, tbls in existing_reg_data.items():
|
|
286
|
+
if isinstance(tbls, dict):
|
|
287
|
+
for t, t_meta in tbls.items():
|
|
288
|
+
# Inject the config object (group key) so DatacubeRegistry knows the connection
|
|
289
|
+
t_meta["connection_obj"] = grp
|
|
290
|
+
flat_tables[t] = t_meta
|
|
291
|
+
|
|
292
|
+
config_data["tables"] = flat_tables
|
|
293
|
+
console.print(f"[dim]Loaded {len(flat_tables)} existing registry entries for merge preservation.[/dim]")
|
|
294
|
+
except Exception as e:
|
|
295
|
+
console.print(f"[yellow]Warning: Could not load existing registry: {e}[/yellow]")
|
|
296
|
+
|
|
261
297
|
registry = DatacubeRegistry(config_data, params=context.params)
|
|
262
298
|
|
|
263
299
|
# --- Aggregation Phase ---
|
|
@@ -378,9 +414,16 @@ def sync(
|
|
|
378
414
|
imp = [db_imp] if db_imp else registry.global_imports
|
|
379
415
|
return resolve_db_url(conf_name, imp)
|
|
380
416
|
|
|
381
|
-
_run_field_map_generation(
|
|
417
|
+
generated_maps = _run_field_map_generation(
|
|
382
418
|
context, config_path, databases, get_url_safe, force=force
|
|
383
419
|
)
|
|
420
|
+
|
|
421
|
+
if generated_maps:
|
|
422
|
+
for table_name, mod_path in generated_maps.items():
|
|
423
|
+
if table_name in registry.tables:
|
|
424
|
+
# console.print(f"[green]DEBUG: Updating {table_name} with field_map {mod_path}[/green]")
|
|
425
|
+
registry.tables[table_name]["field_map"] = mod_path
|
|
426
|
+
|
|
384
427
|
# Ensure new modules are picked up
|
|
385
428
|
importlib.invalidate_caches()
|
|
386
429
|
|
|
@@ -533,11 +576,18 @@ def sync(
|
|
|
533
576
|
if conf_obj not in generated_registry:
|
|
534
577
|
generated_registry[conf_obj] = {}
|
|
535
578
|
|
|
536
|
-
|
|
579
|
+
# Preserve custom_name from existing registry if present
|
|
580
|
+
existing_meta = registry.get_table_details(t_name)
|
|
581
|
+
custom_name = existing_meta.get("custom_name")
|
|
582
|
+
|
|
583
|
+
entry_data = {
|
|
537
584
|
"class_name": cls_n,
|
|
538
585
|
"path": str(rel_path),
|
|
586
|
+
"custom_name": custom_name,
|
|
539
587
|
}
|
|
540
588
|
|
|
589
|
+
generated_registry[conf_obj][t_name] = entry_data
|
|
590
|
+
|
|
541
591
|
console.print(summary_table)
|
|
542
592
|
|
|
543
593
|
# --- Write Datacube Registry ---
|
|
@@ -1904,6 +1954,9 @@ def whitelist(
|
|
|
1904
1954
|
# Merge: update rule defaults only if not set in existing
|
|
1905
1955
|
merged = rule_meta.copy()
|
|
1906
1956
|
merged.update(existing_meta) # Existing overwrites rule
|
|
1957
|
+
|
|
1958
|
+
# Legacy Cleanup: We moved custom_name to Registry
|
|
1959
|
+
merged.pop("custom_name", None)
|
|
1907
1960
|
|
|
1908
1961
|
# Restore calculated paths (Enforce Relative)
|
|
1909
1962
|
if "datacube_path" in rule_meta:
|
|
@@ -2099,6 +2152,8 @@ def _run_field_map_generation(
|
|
|
2099
2152
|
"[yellow]Warning: Could not determine global field_maps_dir for clean build.[/yellow]"
|
|
2100
2153
|
)
|
|
2101
2154
|
|
|
2155
|
+
generated_maps = {}
|
|
2156
|
+
|
|
2102
2157
|
for db in target_dbs:
|
|
2103
2158
|
# console.print(f"DEBUG: Processing DB entry: {db} (Type: {type(db)})")
|
|
2104
2159
|
if isinstance(db, str):
|
|
@@ -2223,10 +2278,6 @@ def _run_field_map_generation(
|
|
|
2223
2278
|
if not found:
|
|
2224
2279
|
rules = []
|
|
2225
2280
|
|
|
2226
|
-
# console.print(f"DEBUG: Loaded {len(rules)} rules from {rules_path} for {conn_obj}")
|
|
2227
|
-
|
|
2228
|
-
# console.print(f"DEBUG: Loaded {len(rules)} rules from {rules_path}")
|
|
2229
|
-
|
|
2230
2281
|
# Support List or Dict Format
|
|
2231
2282
|
scoped_data = registry_data.get(conn_obj, {})
|
|
2232
2283
|
if isinstance(scoped_data, list):
|
|
@@ -2448,6 +2499,21 @@ def _run_field_map_generation(
|
|
|
2448
2499
|
with open(target_file, "w") as f:
|
|
2449
2500
|
f.write("\n".join(lines))
|
|
2450
2501
|
|
|
2502
|
+
# Calculate Import Path
|
|
2503
|
+
try:
|
|
2504
|
+
# Ensure we get relative path to project root (which should be sys.path root)
|
|
2505
|
+
if "project_root" not in locals():
|
|
2506
|
+
project_root = Path.cwd()
|
|
2507
|
+
|
|
2508
|
+
rel_py_path = target_file.relative_to(project_root)
|
|
2509
|
+
module_path = str(rel_py_path.with_suffix("")).replace("/", ".")
|
|
2510
|
+
generated_maps[table_name] = f"{module_path}.field_map"
|
|
2511
|
+
except ValueError:
|
|
2512
|
+
# Fallback if outside project root?
|
|
2513
|
+
pass
|
|
2514
|
+
except Exception as e:
|
|
2515
|
+
pass
|
|
2516
|
+
|
|
2451
2517
|
except Exception as e:
|
|
2452
2518
|
console.print(f"[red]Error processing {table_name}: {e}[/red]")
|
|
2453
2519
|
continue
|
|
@@ -2464,6 +2530,8 @@ def _run_field_map_generation(
|
|
|
2464
2530
|
except Exception as e:
|
|
2465
2531
|
console.print(f"[red]Failed to save Global Field Repository: {e}[/red]")
|
|
2466
2532
|
|
|
2533
|
+
return generated_maps
|
|
2534
|
+
|
|
2467
2535
|
|
|
2468
2536
|
if __name__ == "__main__":
|
|
2469
2537
|
app()
|
|
@@ -531,6 +531,15 @@ class DatacubeRegistry:
|
|
|
531
531
|
or self.config.get("class_suffix")
|
|
532
532
|
or self.params.get("class_suffix", "Dc")
|
|
533
533
|
)
|
|
534
|
+
self._enforce_custom_names()
|
|
535
|
+
|
|
536
|
+
def _enforce_custom_names(self) -> None:
|
|
537
|
+
"""
|
|
538
|
+
Ensures that if custom_name is set, it overrides class_name explicitly.
|
|
539
|
+
"""
|
|
540
|
+
for table, meta in self.tables.items():
|
|
541
|
+
if meta.get("custom_name"):
|
|
542
|
+
meta["class_name"] = meta["custom_name"]
|
|
534
543
|
|
|
535
544
|
def get_table_details(self, table_name: str) -> dict[str, Any]:
|
|
536
545
|
return self.tables.get(table_name, {})
|
|
@@ -573,7 +582,10 @@ class DatacubeRegistry:
|
|
|
573
582
|
elif k not in existing:
|
|
574
583
|
existing[k] = v
|
|
575
584
|
|
|
576
|
-
|
|
585
|
+
# Override class_name if custom_name is set
|
|
586
|
+
if existing.get("custom_name"):
|
|
587
|
+
existing["class_name"] = existing["custom_name"]
|
|
588
|
+
elif "class_name" not in existing:
|
|
577
589
|
existing["class_name"] = new_details.get("class_name")
|
|
578
590
|
|
|
579
591
|
self.tables[table] = existing
|
|
@@ -554,8 +554,23 @@ class DiscoveryOrchestrator:
|
|
|
554
554
|
"[yellow]Prune active: Registry replaced with discovery results.[/]"
|
|
555
555
|
)
|
|
556
556
|
else:
|
|
557
|
-
# Merge:
|
|
558
|
-
|
|
557
|
+
# Smart Merge: Preserve 'custom_name' and 'class_name' from existing entries
|
|
558
|
+
# if they are not explicitly overridden by the new entry.
|
|
559
|
+
for table, new_meta in new_entries.items():
|
|
560
|
+
if table in current_data["tables"]:
|
|
561
|
+
existing = current_data["tables"][table]
|
|
562
|
+
|
|
563
|
+
# 1. Preserve custom_name if new entry doesn't specify one
|
|
564
|
+
if not new_meta.get("custom_name") and existing.get("custom_name"):
|
|
565
|
+
new_meta["custom_name"] = existing["custom_name"]
|
|
566
|
+
|
|
567
|
+
# 2. Also preserve class_name (since it's driven by custom_name)
|
|
568
|
+
# We only preserve class_name if custom_name was preserved
|
|
569
|
+
# AND new entry didn't explicitly change class_name logic (unlikely unless configured)
|
|
570
|
+
if existing.get("class_name"):
|
|
571
|
+
new_meta["class_name"] = existing["class_name"]
|
|
572
|
+
|
|
573
|
+
current_data["tables"][table] = new_meta
|
|
559
574
|
|
|
560
575
|
# Sort tables for readability
|
|
561
576
|
current_data["tables"] = dict(sorted(current_data["tables"].items()))
|
|
@@ -0,0 +1,111 @@
|
|
|
1
|
+
|
|
2
|
+
from pathlib import Path
|
|
3
|
+
import os
|
|
4
|
+
from rich.console import Console
|
|
5
|
+
import typer
|
|
6
|
+
import yaml
|
|
7
|
+
|
|
8
|
+
console = Console()
|
|
9
|
+
|
|
10
|
+
def init_app(name: str) -> None:
|
|
11
|
+
"""
|
|
12
|
+
Initialize a new application within the current project root.
|
|
13
|
+
|
|
14
|
+
Args:
|
|
15
|
+
name: The name of the application to create (e.g., 'inventory', 'pricing').
|
|
16
|
+
"""
|
|
17
|
+
|
|
18
|
+
# 1. Validation: Ensure we are in a Sibi Flux project root (check for pyproject.toml as heuristic)
|
|
19
|
+
cwd = Path(os.getcwd())
|
|
20
|
+
if not (cwd / "pyproject.toml").exists():
|
|
21
|
+
console.print("[yellow]Warning: pyproject.toml not found. Are you in a project root?[/yellow]")
|
|
22
|
+
|
|
23
|
+
app_dir = cwd / name
|
|
24
|
+
|
|
25
|
+
if app_dir.exists():
|
|
26
|
+
console.print(f"[red]Error: Application directory '{name}' already exists.[/red]")
|
|
27
|
+
raise typer.Exit(code=1)
|
|
28
|
+
|
|
29
|
+
console.print(f"[bold blue]Initializing new application: {name}[/bold blue]")
|
|
30
|
+
|
|
31
|
+
# 2. Create Directory Structure
|
|
32
|
+
structure = [
|
|
33
|
+
"api",
|
|
34
|
+
"datacubes",
|
|
35
|
+
"readers",
|
|
36
|
+
"aggregators",
|
|
37
|
+
]
|
|
38
|
+
|
|
39
|
+
app_dir.mkdir()
|
|
40
|
+
(app_dir / "__init__.py").touch()
|
|
41
|
+
|
|
42
|
+
for folder in structure:
|
|
43
|
+
path = app_dir / folder
|
|
44
|
+
path.mkdir()
|
|
45
|
+
(path / "__init__.py").touch()
|
|
46
|
+
|
|
47
|
+
# 2.1 Create datacubes extension registry template
|
|
48
|
+
datacubes_yaml = app_dir / "datacubes" / "datacubes.yaml"
|
|
49
|
+
template_yaml = """
|
|
50
|
+
cubes:
|
|
51
|
+
# Define your app-specific datacube extensions here
|
|
52
|
+
# - source: dataobjects.gencubes.ibis_dev.products.products_cubes.ProductsDc
|
|
53
|
+
# name: LogisticsProductsDc
|
|
54
|
+
# module: products # -> logistics/datacubes/products.py
|
|
55
|
+
"""
|
|
56
|
+
datacubes_yaml.write_text(template_yaml.strip())
|
|
57
|
+
|
|
58
|
+
# 3. Create Basic Router Template
|
|
59
|
+
router_template = f"""
|
|
60
|
+
from fastapi import APIRouter
|
|
61
|
+
|
|
62
|
+
router = APIRouter()
|
|
63
|
+
|
|
64
|
+
@router.get("/")
|
|
65
|
+
async def root():
|
|
66
|
+
return {{"message": "Hello from {name}!"}}
|
|
67
|
+
"""
|
|
68
|
+
(app_dir / "api" / "main.py").write_text(router_template.strip())
|
|
69
|
+
|
|
70
|
+
# 4. Register in conf/apps.yaml
|
|
71
|
+
conf_dir = cwd / "conf"
|
|
72
|
+
if not conf_dir.exists():
|
|
73
|
+
conf_dir.mkdir()
|
|
74
|
+
|
|
75
|
+
apps_yaml_path = conf_dir / "apps.yaml"
|
|
76
|
+
|
|
77
|
+
apps_data = {"apps": []}
|
|
78
|
+
if apps_yaml_path.exists():
|
|
79
|
+
try:
|
|
80
|
+
with open(apps_yaml_path, "r") as f:
|
|
81
|
+
loaded = yaml.safe_load(f)
|
|
82
|
+
if loaded and isinstance(loaded, dict) and "apps" in loaded:
|
|
83
|
+
apps_data = loaded
|
|
84
|
+
elif loaded is None:
|
|
85
|
+
pass # empty file
|
|
86
|
+
else:
|
|
87
|
+
console.print(f"[yellow]Warning: conf/apps.yaml has unexpected structure. Initializing with empty list.[/yellow]")
|
|
88
|
+
except Exception as e:
|
|
89
|
+
console.print(f"[yellow]Warning: Could not read existing apps.yaml: {e}[/yellow]")
|
|
90
|
+
|
|
91
|
+
if name not in apps_data["apps"]:
|
|
92
|
+
apps_data["apps"].append(name)
|
|
93
|
+
with open(apps_yaml_path, "w") as f:
|
|
94
|
+
yaml.dump(apps_data, f, default_flow_style=False)
|
|
95
|
+
console.print(f"[green]Registered '{name}' in conf/apps.yaml[/green]")
|
|
96
|
+
else:
|
|
97
|
+
console.print(f"[yellow]App '{name}' already registered in conf/apps.yaml[/yellow]")
|
|
98
|
+
|
|
99
|
+
# 5. Success Message & Instructions
|
|
100
|
+
console.print(f"[bold green]Successfully created application '{name}'![/bold green]")
|
|
101
|
+
console.print(f"Location: {name}/")
|
|
102
|
+
console.print("\n[yellow]Next Steps:[/yellow]")
|
|
103
|
+
console.print(f"1. Register your new router in [bold]main.py[/bold] (or wherever your app is defined):")
|
|
104
|
+
|
|
105
|
+
code_snippet = f"""
|
|
106
|
+
from {name}.api.main import router as {name}_router
|
|
107
|
+
|
|
108
|
+
app.include_router({name}_router, prefix="/{name}", tags=["{name}"])
|
|
109
|
+
"""
|
|
110
|
+
console.print(code_snippet)
|
|
111
|
+
|
|
@@ -0,0 +1,149 @@
|
|
|
1
|
+
|
|
2
|
+
import yaml
|
|
3
|
+
from pathlib import Path
|
|
4
|
+
import os
|
|
5
|
+
from typing import List, Dict, Any
|
|
6
|
+
from rich.console import Console
|
|
7
|
+
import typer
|
|
8
|
+
import importlib.util
|
|
9
|
+
import sys
|
|
10
|
+
|
|
11
|
+
console = Console()
|
|
12
|
+
|
|
13
|
+
def create_cubes(app_name: str) -> None:
|
|
14
|
+
"""
|
|
15
|
+
Generate app-specific Datacube extensions based on local configuration.
|
|
16
|
+
|
|
17
|
+
Args:
|
|
18
|
+
app_name: The name of the application (must exist in project root).
|
|
19
|
+
"""
|
|
20
|
+
cwd = Path(os.getcwd())
|
|
21
|
+
|
|
22
|
+
# 1. Validate App Directory
|
|
23
|
+
app_dir = cwd / app_name
|
|
24
|
+
if not app_dir.exists() or not app_dir.is_dir():
|
|
25
|
+
console.print(f"[red]Error: Application directory '{app_name}' not found.[/red]")
|
|
26
|
+
raise typer.Exit(code=1)
|
|
27
|
+
|
|
28
|
+
datacubes_dir = app_dir / "datacubes"
|
|
29
|
+
if not datacubes_dir.exists():
|
|
30
|
+
console.print(f"[yellow]Creating 'datacubes' directory for {app_name}...[/yellow]")
|
|
31
|
+
datacubes_dir.mkdir(parents=True, exist_ok=True)
|
|
32
|
+
(datacubes_dir / "__init__.py").touch()
|
|
33
|
+
|
|
34
|
+
# 2. Load Registry (datacubes.yaml)
|
|
35
|
+
registry_path = datacubes_dir / "datacubes.yaml"
|
|
36
|
+
if not registry_path.exists():
|
|
37
|
+
console.print(f"[red]Error: Registry file not found at {registry_path}[/red]")
|
|
38
|
+
console.print("Please define your extensions in this file first.")
|
|
39
|
+
# Create a template if missing?
|
|
40
|
+
template = """
|
|
41
|
+
cubes:
|
|
42
|
+
# - source: dataobjects.gencubes.ibis_dev.products.products_cubes.ProductsDc
|
|
43
|
+
# name: LogisticsProductsDc
|
|
44
|
+
# module: products
|
|
45
|
+
"""
|
|
46
|
+
registry_path.write_text(template.strip())
|
|
47
|
+
console.print(f"[green]Created template at {registry_path}. Edit it and run again.[/green]")
|
|
48
|
+
raise typer.Exit(code=1)
|
|
49
|
+
|
|
50
|
+
try:
|
|
51
|
+
with open(registry_path, "r") as f:
|
|
52
|
+
config = yaml.safe_load(f)
|
|
53
|
+
except Exception as e:
|
|
54
|
+
console.print(f"[red]Error parsing {registry_path}: {e}[/red]")
|
|
55
|
+
raise typer.Exit(code=1)
|
|
56
|
+
|
|
57
|
+
if not config or "cubes" not in config or not isinstance(config["cubes"], list):
|
|
58
|
+
console.print(f"[yellow]Warning: No 'cubes' list found in {registry_path}.[/yellow]")
|
|
59
|
+
return
|
|
60
|
+
|
|
61
|
+
# 3. Group by Module
|
|
62
|
+
from collections import defaultdict
|
|
63
|
+
cubes_by_module = defaultdict(list)
|
|
64
|
+
for entry in config["cubes"]:
|
|
65
|
+
mod = entry.get("module")
|
|
66
|
+
if mod:
|
|
67
|
+
cubes_by_module[mod].append(entry)
|
|
68
|
+
|
|
69
|
+
console.print(f"[bold blue]Processing {len(config['cubes'])} datacube extensions across {len(cubes_by_module)} modules...[/bold blue]")
|
|
70
|
+
|
|
71
|
+
for module_name, entries in cubes_by_module.items():
|
|
72
|
+
process_module_group(module_name, entries, datacubes_dir, app_name)
|
|
73
|
+
|
|
74
|
+
|
|
75
|
+
def process_module_group(module_name: str, entries: List[Dict[str, Any]], datacubes_dir: Path, app_name: str) -> None:
|
|
76
|
+
target_file = datacubes_dir / f"{module_name}.py"
|
|
77
|
+
|
|
78
|
+
# Read existing content if file exists
|
|
79
|
+
existing_content = ""
|
|
80
|
+
if target_file.exists():
|
|
81
|
+
existing_content = target_file.read_text()
|
|
82
|
+
|
|
83
|
+
new_imports = set()
|
|
84
|
+
new_classes = []
|
|
85
|
+
|
|
86
|
+
for entry in entries:
|
|
87
|
+
source_path = entry.get("source")
|
|
88
|
+
class_name = entry.get("name")
|
|
89
|
+
|
|
90
|
+
if not all([source_path, class_name]):
|
|
91
|
+
console.print(f"[red]Skipping invalid entry in {module_name}: {entry}[/red]")
|
|
92
|
+
continue
|
|
93
|
+
|
|
94
|
+
# Check for idempotency: does class definition already exist?
|
|
95
|
+
if f"class {class_name}" in existing_content:
|
|
96
|
+
console.print(f"[dim]Skipping {class_name} (already exists in {module_name}.py)[/dim]")
|
|
97
|
+
continue
|
|
98
|
+
|
|
99
|
+
# Parse Source
|
|
100
|
+
try:
|
|
101
|
+
source_module_str, source_class = source_path.rsplit(".", 1)
|
|
102
|
+
except ValueError:
|
|
103
|
+
console.print(f"[red]Invalid source format '{source_path}' for {class_name}[/red]")
|
|
104
|
+
continue
|
|
105
|
+
|
|
106
|
+
# Prepare Import
|
|
107
|
+
import_stmt = f"from {source_module_str} import {source_class}"
|
|
108
|
+
|
|
109
|
+
# Check if import exists in file or is already queued
|
|
110
|
+
if import_stmt not in existing_content:
|
|
111
|
+
new_imports.add(import_stmt)
|
|
112
|
+
|
|
113
|
+
# Prepare Class
|
|
114
|
+
class_code = f"""
|
|
115
|
+
class {class_name}({source_class}):
|
|
116
|
+
\"\"\"
|
|
117
|
+
App-specific extension of {source_class} for '{app_name}'.
|
|
118
|
+
|
|
119
|
+
Source: {source_path}
|
|
120
|
+
\"\"\"
|
|
121
|
+
pass
|
|
122
|
+
"""
|
|
123
|
+
new_classes.append(class_code.strip())
|
|
124
|
+
console.print(f"[green]Queued generation for {class_name}[/green]")
|
|
125
|
+
|
|
126
|
+
if new_classes:
|
|
127
|
+
mode = "a" if target_file.exists() else "w"
|
|
128
|
+
|
|
129
|
+
content_parts = []
|
|
130
|
+
|
|
131
|
+
# Add imports first
|
|
132
|
+
if new_imports:
|
|
133
|
+
sorted_imports = sorted(list(new_imports))
|
|
134
|
+
content_parts.extend(sorted_imports)
|
|
135
|
+
content_parts.append("") # Spacer
|
|
136
|
+
|
|
137
|
+
# Add classes
|
|
138
|
+
content_parts.extend(new_classes)
|
|
139
|
+
|
|
140
|
+
with open(target_file, mode) as f:
|
|
141
|
+
if mode == "a" and existing_content.strip():
|
|
142
|
+
f.write("\n\n") # Ensure separation from previous content
|
|
143
|
+
|
|
144
|
+
f.write("\n\n".join(content_parts))
|
|
145
|
+
f.write("\n")
|
|
146
|
+
|
|
147
|
+
console.print(f"[green]Updated {target_file.name} with {len(new_classes)} new classes.[/green]")
|
|
148
|
+
else:
|
|
149
|
+
console.print(f"[dim]No changes needed for {target_file.name}[/dim]")
|
|
@@ -0,0 +1,134 @@
|
|
|
1
|
+
|
|
2
|
+
import yaml
|
|
3
|
+
from pathlib import Path
|
|
4
|
+
import os
|
|
5
|
+
from typing import List, Dict, Any
|
|
6
|
+
from rich.console import Console
|
|
7
|
+
import typer
|
|
8
|
+
import sys
|
|
9
|
+
|
|
10
|
+
console = Console()
|
|
11
|
+
|
|
12
|
+
def propose_cubes(db_domain: str, app_name: str) -> None:
|
|
13
|
+
"""
|
|
14
|
+
Scans the global datacube registry for cubes matching 'db_domain'
|
|
15
|
+
and adds them to the app's datacubes.yaml.
|
|
16
|
+
|
|
17
|
+
Args:
|
|
18
|
+
db_domain: The database domain / folder name to filter by (e.g., 'ibis_dev', 'istmo360n').
|
|
19
|
+
app_name: The name of the target application.
|
|
20
|
+
"""
|
|
21
|
+
cwd = Path(os.getcwd())
|
|
22
|
+
|
|
23
|
+
# 1. Validate App Directory
|
|
24
|
+
app_dir = cwd / app_name
|
|
25
|
+
if not app_dir.exists() or not app_dir.is_dir():
|
|
26
|
+
console.print(f"[red]Error: Application directory '{app_name}' not found.[/red]")
|
|
27
|
+
raise typer.Exit(code=1)
|
|
28
|
+
|
|
29
|
+
datacubes_dir = app_dir / "datacubes"
|
|
30
|
+
datacubes_dir.mkdir(parents=True, exist_ok=True) # Ensure exists
|
|
31
|
+
|
|
32
|
+
registry_path = datacubes_dir / "datacubes.yaml"
|
|
33
|
+
|
|
34
|
+
# 2. Locate Global Registry
|
|
35
|
+
# Heuristic: dataobjects/globals/datacube_registry.yaml
|
|
36
|
+
global_reg_path = cwd / "dataobjects/globals/datacube_registry.yaml"
|
|
37
|
+
|
|
38
|
+
if not global_reg_path.exists():
|
|
39
|
+
console.print(f"[red]Error: Global registry not found at {global_reg_path}[/red]")
|
|
40
|
+
console.print("Run 'uv run sibi-flux dc sync' first to generate the registry.")
|
|
41
|
+
raise typer.Exit(code=1)
|
|
42
|
+
|
|
43
|
+
try:
|
|
44
|
+
with open(global_reg_path, "r") as f:
|
|
45
|
+
global_data = yaml.safe_load(f)
|
|
46
|
+
except Exception as e:
|
|
47
|
+
console.print(f"[red]Error parsing global registry: {e}[/red]")
|
|
48
|
+
raise typer.Exit(code=1)
|
|
49
|
+
|
|
50
|
+
if not global_data:
|
|
51
|
+
console.print("[yellow]Global registry is empty.[/yellow]")
|
|
52
|
+
return
|
|
53
|
+
|
|
54
|
+
# 3. Find Matches
|
|
55
|
+
matches = []
|
|
56
|
+
|
|
57
|
+
# Registry Structure: {conf_obj: {table_name: {class_name: ..., path: ...}}}
|
|
58
|
+
for conf_obj, tables in global_data.items():
|
|
59
|
+
for table_name, meta in tables.items():
|
|
60
|
+
path_str = meta.get("path", "")
|
|
61
|
+
class_name = meta.get("class_name")
|
|
62
|
+
|
|
63
|
+
# Check if domain matches path logic
|
|
64
|
+
# e.g. path="dataobjects/gencubes/istmo360n/..." and db_domain="istmo360n"
|
|
65
|
+
if db_domain in path_str.split("/"):
|
|
66
|
+
# Clean source path for python import
|
|
67
|
+
# "dataobjects/gencubes/..." -> "dataobjects.gencubes...."
|
|
68
|
+
# remove extension .py
|
|
69
|
+
module_path = path_str.replace("/", ".").rstrip(".py")
|
|
70
|
+
source_path = f"{module_path}.{class_name}"
|
|
71
|
+
|
|
72
|
+
# Determine target module name (the last part of the path directory?)
|
|
73
|
+
# e.g. .../biometric/biometric_cubes.py -> "biometric"
|
|
74
|
+
try:
|
|
75
|
+
# heuristic: parent dir name or filename base?
|
|
76
|
+
# path: dataobjects/gencubes/istmo360n/biometric/biometric_cubes.py
|
|
77
|
+
# logical group: biometric
|
|
78
|
+
|
|
79
|
+
p = Path(path_str)
|
|
80
|
+
module_group = p.parent.name # biometric
|
|
81
|
+
if module_group == db_domain:
|
|
82
|
+
# fallback if nested directly under domain
|
|
83
|
+
module_group = p.stem.replace("_cubes", "")
|
|
84
|
+
|
|
85
|
+
except Exception:
|
|
86
|
+
module_group = "common"
|
|
87
|
+
|
|
88
|
+
matches.append({
|
|
89
|
+
"source": source_path,
|
|
90
|
+
"name": f"{app_name.capitalize()}{class_name}", # Default naming: App + Class
|
|
91
|
+
"module": module_group,
|
|
92
|
+
"original_class": class_name
|
|
93
|
+
})
|
|
94
|
+
|
|
95
|
+
if not matches:
|
|
96
|
+
console.print(f"[yellow]No datacubes found for domain '{db_domain}' in registry.[/yellow]")
|
|
97
|
+
return
|
|
98
|
+
|
|
99
|
+
console.print(f"[green]Found {len(matches)} matching datacubes.[/green]")
|
|
100
|
+
|
|
101
|
+
# 4. Update App Registry
|
|
102
|
+
|
|
103
|
+
# Load existing
|
|
104
|
+
current_config = {"cubes": []}
|
|
105
|
+
if registry_path.exists():
|
|
106
|
+
try:
|
|
107
|
+
with open(registry_path, "r") as f:
|
|
108
|
+
loaded = yaml.safe_load(f)
|
|
109
|
+
if loaded and "cubes" in loaded:
|
|
110
|
+
current_config = loaded
|
|
111
|
+
except Exception:
|
|
112
|
+
pass
|
|
113
|
+
|
|
114
|
+
# Deduplicate
|
|
115
|
+
existing_sources = {c.get("source") for c in current_config["cubes"]}
|
|
116
|
+
added_count = 0
|
|
117
|
+
|
|
118
|
+
for m in matches:
|
|
119
|
+
if m["source"] not in existing_sources:
|
|
120
|
+
entry = {
|
|
121
|
+
"source": m["source"],
|
|
122
|
+
"name": m["name"],
|
|
123
|
+
"module": m["module"]
|
|
124
|
+
}
|
|
125
|
+
current_config["cubes"].append(entry)
|
|
126
|
+
existing_sources.add(m["source"])
|
|
127
|
+
added_count += 1
|
|
128
|
+
|
|
129
|
+
# Save
|
|
130
|
+
with open(registry_path, "w") as f:
|
|
131
|
+
yaml.dump(current_config, f, sort_keys=False)
|
|
132
|
+
|
|
133
|
+
console.print(f"[bold green]Added {added_count} new entries to {registry_path}[/bold green]")
|
|
134
|
+
console.print(f"Next: Run [blue]uv run sibi-flux create-cubes {app_name}[/blue] to generate code.")
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{sibi_flux-2026.1.5 → sibi_flux-2026.1.6}/src/sibi_flux/artifacts/parquet_engine/__init__.py
RENAMED
|
File without changes
|
{sibi_flux-2026.1.5 → sibi_flux-2026.1.6}/src/sibi_flux/artifacts/parquet_engine/executor.py
RENAMED
|
File without changes
|
{sibi_flux-2026.1.5 → sibi_flux-2026.1.6}/src/sibi_flux/artifacts/parquet_engine/manifest.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{sibi_flux-2026.1.5 → sibi_flux-2026.1.6}/src/sibi_flux/core/managed_resource/_managed_resource.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{sibi_flux-2026.1.5 → sibi_flux-2026.1.6}/src/sibi_flux/df_helper/backends/http/_http_config.py
RENAMED
|
File without changes
|
{sibi_flux-2026.1.5 → sibi_flux-2026.1.6}/src/sibi_flux/df_helper/backends/parquet/__init__.py
RENAMED
|
File without changes
|
|
File without changes
|
{sibi_flux-2026.1.5 → sibi_flux-2026.1.6}/src/sibi_flux/df_helper/backends/sqlalchemy/__init__.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
{sibi_flux-2026.1.5 → sibi_flux-2026.1.6}/src/sibi_flux/df_helper/backends/sqlalchemy/_io_dask.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{sibi_flux-2026.1.5 → sibi_flux-2026.1.6}/src/sibi_flux/init/templates/discovery_params.yaml
RENAMED
|
File without changes
|
|
File without changes
|
{sibi_flux-2026.1.5 → sibi_flux-2026.1.6}/src/sibi_flux/init/templates/property_template.yaml
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{sibi_flux-2026.1.5 → sibi_flux-2026.1.6}/src/sibi_flux/orchestration/_artifact_orchestrator.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{sibi_flux-2026.1.5 → sibi_flux-2026.1.6}/src/sibi_flux/utils/date_utils/_file_age_checker.py
RENAMED
|
File without changes
|
|
File without changes
|
{sibi_flux-2026.1.5 → sibi_flux-2026.1.6}/src/sibi_flux/utils/filepath_generator/__init__.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|