sibi-flux 2026.1.5__tar.gz → 2026.1.7__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.7}/PKG-INFO +5 -25
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.7}/README.md +4 -24
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.7}/pyproject.toml +16 -1
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.7}/src/sibi_flux/cli.py +36 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.7}/src/sibi_flux/datacube/cli.py +117 -36
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.7}/src/sibi_flux/datacube/generator.py +13 -1
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.7}/src/sibi_flux/datacube/orchestrator.py +17 -2
- sibi_flux-2026.1.7/src/sibi_flux/init/app.py +111 -0
- sibi_flux-2026.1.7/src/sibi_flux/init/cube_extender.py +149 -0
- sibi_flux-2026.1.7/src/sibi_flux/init/cube_proposer.py +134 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.7}/src/sibi_dst/__init__.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.7}/src/sibi_flux/__init__.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.7}/src/sibi_flux/artifacts/__init__.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.7}/src/sibi_flux/artifacts/base.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.7}/src/sibi_flux/artifacts/parquet.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.7}/src/sibi_flux/artifacts/parquet_engine/__init__.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.7}/src/sibi_flux/artifacts/parquet_engine/executor.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.7}/src/sibi_flux/artifacts/parquet_engine/manifest.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.7}/src/sibi_flux/artifacts/parquet_engine/planner.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.7}/src/sibi_flux/config/__init__.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.7}/src/sibi_flux/config/manager.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.7}/src/sibi_flux/config/settings.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.7}/src/sibi_flux/core/__init__.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.7}/src/sibi_flux/core/managed_resource/__init__.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.7}/src/sibi_flux/core/managed_resource/_managed_resource.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.7}/src/sibi_flux/core/type_maps/__init__.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.7}/src/sibi_flux/dask_cluster/__init__.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.7}/src/sibi_flux/dask_cluster/async_core.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.7}/src/sibi_flux/dask_cluster/client_manager.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.7}/src/sibi_flux/dask_cluster/core.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.7}/src/sibi_flux/dask_cluster/exceptions.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.7}/src/sibi_flux/dask_cluster/utils.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.7}/src/sibi_flux/datacube/__init__.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.7}/src/sibi_flux/datacube/_data_cube.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.7}/src/sibi_flux/datacube/config_engine.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.7}/src/sibi_flux/datacube/field_factory.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.7}/src/sibi_flux/datacube/field_mapper.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.7}/src/sibi_flux/datacube/field_registry.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.7}/src/sibi_flux/datacube/router.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.7}/src/sibi_flux/dataset/__init__.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.7}/src/sibi_flux/dataset/_dataset.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.7}/src/sibi_flux/dataset/hybrid_loader.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.7}/src/sibi_flux/df_enricher/__init__.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.7}/src/sibi_flux/df_enricher/async_enricher.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.7}/src/sibi_flux/df_enricher/attacher.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.7}/src/sibi_flux/df_enricher/merger.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.7}/src/sibi_flux/df_enricher/specs.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.7}/src/sibi_flux/df_enricher/types.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.7}/src/sibi_flux/df_helper/__init__.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.7}/src/sibi_flux/df_helper/_df_helper.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.7}/src/sibi_flux/df_helper/backends/__init__.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.7}/src/sibi_flux/df_helper/backends/_params.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.7}/src/sibi_flux/df_helper/backends/_strategies.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.7}/src/sibi_flux/df_helper/backends/http/__init__.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.7}/src/sibi_flux/df_helper/backends/http/_http_config.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.7}/src/sibi_flux/df_helper/backends/parquet/__init__.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.7}/src/sibi_flux/df_helper/backends/parquet/_parquet_options.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.7}/src/sibi_flux/df_helper/backends/sqlalchemy/__init__.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.7}/src/sibi_flux/df_helper/backends/sqlalchemy/_db_connection.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.7}/src/sibi_flux/df_helper/backends/sqlalchemy/_db_gatekeeper.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.7}/src/sibi_flux/df_helper/backends/sqlalchemy/_io_dask.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.7}/src/sibi_flux/df_helper/backends/sqlalchemy/_load_from_db.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.7}/src/sibi_flux/df_helper/backends/sqlalchemy/_model_registry.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.7}/src/sibi_flux/df_helper/backends/sqlalchemy/_sql_model_builder.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.7}/src/sibi_flux/df_helper/backends/utils.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.7}/src/sibi_flux/df_helper/core/__init__.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.7}/src/sibi_flux/df_helper/core/_defaults.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.7}/src/sibi_flux/df_helper/core/_filter_handler.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.7}/src/sibi_flux/df_helper/core/_params_config.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.7}/src/sibi_flux/df_helper/core/_query_config.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.7}/src/sibi_flux/df_validator/__init__.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.7}/src/sibi_flux/df_validator/_df_validator.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.7}/src/sibi_flux/init/__init__.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.7}/src/sibi_flux/init/core.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.7}/src/sibi_flux/init/discovery_updater.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.7}/src/sibi_flux/init/env.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.7}/src/sibi_flux/init/env_engine.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.7}/src/sibi_flux/init/env_generator.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.7}/src/sibi_flux/init/rule_generator.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.7}/src/sibi_flux/init/templates/__init__.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.7}/src/sibi_flux/init/templates/discovery_params.yaml +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.7}/src/sibi_flux/init/templates/gen_dc.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.7}/src/sibi_flux/init/templates/property_template.yaml +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.7}/src/sibi_flux/logger/__init__.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.7}/src/sibi_flux/logger/_logger.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.7}/src/sibi_flux/mcp/__init__.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.7}/src/sibi_flux/mcp/client.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.7}/src/sibi_flux/mcp/router.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.7}/src/sibi_flux/orchestration/__init__.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.7}/src/sibi_flux/orchestration/_artifact_orchestrator.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.7}/src/sibi_flux/orchestration/_pipeline_executor.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.7}/src/sibi_flux/osmnx_helper/__init__.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.7}/src/sibi_flux/osmnx_helper/_pbf_handler.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.7}/src/sibi_flux/osmnx_helper/graph_loader.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.7}/src/sibi_flux/osmnx_helper/utils.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.7}/src/sibi_flux/parquet/__init__.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.7}/src/sibi_flux/parquet/readers/__init__.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.7}/src/sibi_flux/parquet/readers/base.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.7}/src/sibi_flux/parquet/readers/parquet.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.7}/src/sibi_flux/parquet/saver/__init__.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.7}/src/sibi_flux/parquet/saver/_parquet_saver.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.7}/src/sibi_flux/parquet/saver/_write_gatekeeper.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.7}/src/sibi_flux/pipelines/__init__.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.7}/src/sibi_flux/pipelines/base.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.7}/src/sibi_flux/pipelines/template.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.7}/src/sibi_flux/py.typed +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.7}/src/sibi_flux/readers/__init__.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.7}/src/sibi_flux/readers/base.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.7}/src/sibi_flux/storage/__init__.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.7}/src/sibi_flux/storage/_fs_registry.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.7}/src/sibi_flux/storage/_storage_manager.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.7}/src/sibi_flux/storage/factory.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.7}/src/sibi_flux/utils/__init__.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.7}/src/sibi_flux/utils/clickhouse_writer/__init__.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.7}/src/sibi_flux/utils/clickhouse_writer/_clickhouse_writer.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.7}/src/sibi_flux/utils/common.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.7}/src/sibi_flux/utils/dask_utils.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.7}/src/sibi_flux/utils/data_utils/__init__.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.7}/src/sibi_flux/utils/data_utils/_data_utils.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.7}/src/sibi_flux/utils/dataframe_utils.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.7}/src/sibi_flux/utils/date_utils/__init__.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.7}/src/sibi_flux/utils/date_utils/_business_days.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.7}/src/sibi_flux/utils/date_utils/_date_utils.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.7}/src/sibi_flux/utils/date_utils/_file_age_checker.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.7}/src/sibi_flux/utils/file_utils.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.7}/src/sibi_flux/utils/filepath_generator/__init__.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.7}/src/sibi_flux/utils/filepath_generator/_filepath_generator.py +0 -0
- {sibi_flux-2026.1.5 → sibi_flux-2026.1.7}/src/sibi_flux/utils/retry.py +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.3
|
|
2
2
|
Name: sibi-flux
|
|
3
|
-
Version: 2026.1.
|
|
3
|
+
Version: 2026.1.7
|
|
4
4
|
Summary: Sibi Toolkit: A collection of tools for Data Analysis/Engineering.
|
|
5
5
|
Author: Luis Valverde
|
|
6
6
|
Author-email: Luis Valverde <lvalverdeb@gmail.com>
|
|
@@ -52,30 +52,10 @@ Description-Content-Type: text/markdown
|
|
|
52
52
|
|
|
53
53
|
**SibiFlux** is a production-grade resilient data engineering ecosystem designed to bridge the gap between local development, distributed computing, and agentic AI workflows. It provides a unified engine for hybrid data loading (batch + streaming), self-healing distributed operations, and native interfaces for AI agents via the Model Context Protocol (MCP).
|
|
54
54
|
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
Router <--> Resources["SibiFlux Resources"]
|
|
60
|
-
end
|
|
61
|
-
|
|
62
|
-
subgraph G2["Solutions Layer (Business Logic)"]
|
|
63
|
-
Logistics["Logistics Solutions"]
|
|
64
|
-
Enrichment["Enrichment Pipelines"]
|
|
65
|
-
Cubes["DataCubes"]
|
|
66
|
-
end
|
|
67
|
-
|
|
68
|
-
subgraph G3["SibiFlux Core Engine"]
|
|
69
|
-
DfHelper["DfHelper (Unified Loader)"]
|
|
70
|
-
Cluster["Resilient Dask Cluster"]
|
|
71
|
-
Managed["ManagedResource Lifecycle"]
|
|
72
|
-
end
|
|
73
|
-
|
|
74
|
-
Resources --> Cubes
|
|
75
|
-
Logistics --> DfHelper
|
|
76
|
-
Cubes --> DfHelper
|
|
77
|
-
DfHelper --> Cluster
|
|
78
|
-
```
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
## Documentation
|
|
58
|
+
Full documentation is available in [src/docs/index.md](src/docs/index.md).
|
|
79
59
|
|
|
80
60
|
## Core Architecture
|
|
81
61
|
|
|
@@ -2,30 +2,10 @@
|
|
|
2
2
|
|
|
3
3
|
**SibiFlux** is a production-grade resilient data engineering ecosystem designed to bridge the gap between local development, distributed computing, and agentic AI workflows. It provides a unified engine for hybrid data loading (batch + streaming), self-healing distributed operations, and native interfaces for AI agents via the Model Context Protocol (MCP).
|
|
4
4
|
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
Router <--> Resources["SibiFlux Resources"]
|
|
10
|
-
end
|
|
11
|
-
|
|
12
|
-
subgraph G2["Solutions Layer (Business Logic)"]
|
|
13
|
-
Logistics["Logistics Solutions"]
|
|
14
|
-
Enrichment["Enrichment Pipelines"]
|
|
15
|
-
Cubes["DataCubes"]
|
|
16
|
-
end
|
|
17
|
-
|
|
18
|
-
subgraph G3["SibiFlux Core Engine"]
|
|
19
|
-
DfHelper["DfHelper (Unified Loader)"]
|
|
20
|
-
Cluster["Resilient Dask Cluster"]
|
|
21
|
-
Managed["ManagedResource Lifecycle"]
|
|
22
|
-
end
|
|
23
|
-
|
|
24
|
-
Resources --> Cubes
|
|
25
|
-
Logistics --> DfHelper
|
|
26
|
-
Cubes --> DfHelper
|
|
27
|
-
DfHelper --> Cluster
|
|
28
|
-
```
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
## Documentation
|
|
8
|
+
Full documentation is available in [src/docs/index.md](src/docs/index.md).
|
|
29
9
|
|
|
30
10
|
## Core Architecture
|
|
31
11
|
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
[project]
|
|
2
2
|
name = "sibi-flux"
|
|
3
|
-
version = "2026.1.
|
|
3
|
+
version = "2026.1.7"
|
|
4
4
|
description = "Sibi Toolkit: A collection of tools for Data Analysis/Engineering."
|
|
5
5
|
readme = "README.md"
|
|
6
6
|
authors = [
|
|
@@ -127,6 +127,21 @@ dc-scan = "python solutions/generators/datacubes/gen_dc.py scan"
|
|
|
127
127
|
dc-match = "python solutions/generators/datacubes/gen_dc.py match"
|
|
128
128
|
dc-map = "python solutions/generators/datacubes/gen_dc.py map"
|
|
129
129
|
|
|
130
|
+
# --- Internal Snapshot Tasks ---
|
|
131
|
+
_test_snapshots_core = "python tests/solutions/gendatacubes/test_snapshots.py"
|
|
132
|
+
_test_snapshots_cli = "python tests/solutions/gendatacubes/test_cli_snapshot.py"
|
|
133
|
+
_test_snapshots_env = "python tests/solutions/init/test_env_generator_snapshots.py"
|
|
134
|
+
_test_snapshots_rules = "python tests/solutions/init/test_rule_engine_snapshots.py"
|
|
135
|
+
|
|
136
|
+
[tool.poe.tasks.test-snapshots]
|
|
137
|
+
help = "Run all regression snapshot tests"
|
|
138
|
+
sequence = ["_test_snapshots_core", "_test_snapshots_cli", "_test_snapshots_env", "_test_snapshots_rules"]
|
|
139
|
+
|
|
140
|
+
[tool.poe.tasks.update-snapshots]
|
|
141
|
+
help = "Update all regression snapshots (Golden Masters)"
|
|
142
|
+
sequence = ["_test_snapshots_core", "_test_snapshots_cli", "_test_snapshots_env", "_test_snapshots_rules"]
|
|
143
|
+
env = { UPDATE_SNAPSHOTS = "1" }
|
|
144
|
+
|
|
130
145
|
[tool.poe.tasks.release]
|
|
131
146
|
sequence = ["build","publish"]
|
|
132
147
|
envfile = ".env" # Loads the token for the whole sequence
|
|
@@ -34,6 +34,42 @@ def init(
|
|
|
34
34
|
initialize_project(project_name, lib, app)
|
|
35
35
|
|
|
36
36
|
|
|
37
|
+
@app.command()
|
|
38
|
+
def create_app(
|
|
39
|
+
name: str = typer.Argument(..., help="Name of the application to create"),
|
|
40
|
+
):
|
|
41
|
+
"""
|
|
42
|
+
Create a new application within an existing Sibi Flux project.
|
|
43
|
+
|
|
44
|
+
Generates standard directory structure in `<project_root>/<name>`.
|
|
45
|
+
"""
|
|
46
|
+
from sibi_flux.init.app import init_app
|
|
47
|
+
init_app(name)
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
@app.command()
|
|
51
|
+
def create_cubes(
|
|
52
|
+
app_name: str = typer.Argument(..., help="Name of the application"),
|
|
53
|
+
):
|
|
54
|
+
"""
|
|
55
|
+
Generate app-specific Datacube extensions from `<app_name>/datacubes/datacubes.yaml`.
|
|
56
|
+
"""
|
|
57
|
+
from sibi_flux.init.cube_extender import create_cubes
|
|
58
|
+
create_cubes(app_name)
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
@app.command()
|
|
62
|
+
def propose_cubes(
|
|
63
|
+
db_domain: str = typer.Argument(..., help="Database domain to filter by (e.g., 'ibis_dev')"),
|
|
64
|
+
app_name: str = typer.Argument(..., help="Name of the target application"),
|
|
65
|
+
):
|
|
66
|
+
"""
|
|
67
|
+
Scan global registry for datacubes in <db_domain> and add them to <app_name>/datacubes/datacubes.yaml.
|
|
68
|
+
"""
|
|
69
|
+
from sibi_flux.init.cube_proposer import propose_cubes
|
|
70
|
+
propose_cubes(db_domain, app_name)
|
|
71
|
+
|
|
72
|
+
|
|
37
73
|
@app.command()
|
|
38
74
|
def env(
|
|
39
75
|
project_path: Path = typer.Argument(Path("."), help="Project root directory"),
|
|
@@ -258,6 +258,42 @@ def sync(
|
|
|
258
258
|
|
|
259
259
|
# Start with empty/default registry
|
|
260
260
|
config_data = _load_and_resolve_config(config_path)
|
|
261
|
+
|
|
262
|
+
# Load existing Global Registry to preserve manual edits (e.g. custom_name)
|
|
263
|
+
# We must flatten the grouped structure (by config object) into a single tables dict
|
|
264
|
+
params = context.params or {}
|
|
265
|
+
reg_rel_path = params.get("paths", {}).get("repositories", {}).get(
|
|
266
|
+
"global_datacube_registry_file"
|
|
267
|
+
) or params.get("global_datacube_registry_file")
|
|
268
|
+
|
|
269
|
+
if reg_rel_path:
|
|
270
|
+
reg_file = Path(reg_rel_path)
|
|
271
|
+
if not reg_file.is_absolute():
|
|
272
|
+
try:
|
|
273
|
+
# Heuristic: config_path is in generators/datacubes/, project root is 3 levels up
|
|
274
|
+
prj_root = config_path.parent.parent.parent
|
|
275
|
+
reg_file = prj_root / reg_rel_path
|
|
276
|
+
except Exception:
|
|
277
|
+
reg_file = Path.cwd() / reg_rel_path
|
|
278
|
+
|
|
279
|
+
if reg_file.exists():
|
|
280
|
+
try:
|
|
281
|
+
with open(reg_file, "r") as rf:
|
|
282
|
+
existing_reg_data = yaml.safe_load(rf) or {}
|
|
283
|
+
|
|
284
|
+
flat_tables = {}
|
|
285
|
+
for grp, tbls in existing_reg_data.items():
|
|
286
|
+
if isinstance(tbls, dict):
|
|
287
|
+
for t, t_meta in tbls.items():
|
|
288
|
+
# Inject the config object (group key) so DatacubeRegistry knows the connection
|
|
289
|
+
t_meta["connection_obj"] = grp
|
|
290
|
+
flat_tables[t] = t_meta
|
|
291
|
+
|
|
292
|
+
config_data["tables"] = flat_tables
|
|
293
|
+
console.print(f"[dim]Loaded {len(flat_tables)} existing registry entries for merge preservation.[/dim]")
|
|
294
|
+
except Exception as e:
|
|
295
|
+
console.print(f"[yellow]Warning: Could not load existing registry: {e}[/yellow]")
|
|
296
|
+
|
|
261
297
|
registry = DatacubeRegistry(config_data, params=context.params)
|
|
262
298
|
|
|
263
299
|
# --- Aggregation Phase ---
|
|
@@ -378,9 +414,16 @@ def sync(
|
|
|
378
414
|
imp = [db_imp] if db_imp else registry.global_imports
|
|
379
415
|
return resolve_db_url(conf_name, imp)
|
|
380
416
|
|
|
381
|
-
_run_field_map_generation(
|
|
417
|
+
generated_maps = _run_field_map_generation(
|
|
382
418
|
context, config_path, databases, get_url_safe, force=force
|
|
383
419
|
)
|
|
420
|
+
|
|
421
|
+
if generated_maps:
|
|
422
|
+
for table_name, mod_path in generated_maps.items():
|
|
423
|
+
if table_name in registry.tables:
|
|
424
|
+
# console.print(f"[green]DEBUG: Updating {table_name} with field_map {mod_path}[/green]")
|
|
425
|
+
registry.tables[table_name]["field_map"] = mod_path
|
|
426
|
+
|
|
384
427
|
# Ensure new modules are picked up
|
|
385
428
|
importlib.invalidate_caches()
|
|
386
429
|
|
|
@@ -435,6 +478,35 @@ def sync(
|
|
|
435
478
|
is_append = False
|
|
436
479
|
existing_content = ""
|
|
437
480
|
|
|
481
|
+
# --- Registry Collection (Always run, even if skipped) ---
|
|
482
|
+
# Structure: {conf_obj: {table_name: {class_name: ..., path: ...}}}
|
|
483
|
+
for item in items:
|
|
484
|
+
t_name = item[0]
|
|
485
|
+
conf_obj = item[1]
|
|
486
|
+
cls_n = item[4]
|
|
487
|
+
# Calculate path relative to project root
|
|
488
|
+
try:
|
|
489
|
+
if "project_root" not in locals():
|
|
490
|
+
project_root = config_path.parent.parent.parent
|
|
491
|
+
rel_path = file_path.relative_to(project_root)
|
|
492
|
+
except Exception:
|
|
493
|
+
rel_path = file_path
|
|
494
|
+
|
|
495
|
+
if conf_obj not in generated_registry:
|
|
496
|
+
generated_registry[conf_obj] = {}
|
|
497
|
+
|
|
498
|
+
# Preserve custom_name from existing registry if present
|
|
499
|
+
existing_meta = registry.get_table_details(t_name)
|
|
500
|
+
custom_name = existing_meta.get("custom_name")
|
|
501
|
+
|
|
502
|
+
entry_data = {
|
|
503
|
+
"class_name": cls_n,
|
|
504
|
+
"path": str(rel_path),
|
|
505
|
+
"custom_name": custom_name,
|
|
506
|
+
}
|
|
507
|
+
|
|
508
|
+
generated_registry[conf_obj][t_name] = entry_data
|
|
509
|
+
|
|
438
510
|
if file_path.exists() and not force:
|
|
439
511
|
with open(file_path, "r") as f:
|
|
440
512
|
existing_content = f.read()
|
|
@@ -481,12 +553,8 @@ def sync(
|
|
|
481
553
|
)
|
|
482
554
|
|
|
483
555
|
if not classes_code:
|
|
484
|
-
if not is_append
|
|
485
|
-
|
|
486
|
-
file_path_str, "0", "[red]Failed (No Classes Generated)[/red]"
|
|
487
|
-
)
|
|
488
|
-
else:
|
|
489
|
-
summary_table.add_row(file_path_str, "0", "[red]Failed to Append[/red]")
|
|
556
|
+
status_msg = "[red]Failed (No Classes Generated)[/red]" if not is_append else "[red]Failed to Append[/red]"
|
|
557
|
+
summary_table.add_row(file_path_str, "0", status_msg)
|
|
490
558
|
continue
|
|
491
559
|
|
|
492
560
|
if not is_append:
|
|
@@ -515,29 +583,6 @@ def sync(
|
|
|
515
583
|
)
|
|
516
584
|
summary_table.add_row(file_path_str, str(len(items)), status_msg)
|
|
517
585
|
|
|
518
|
-
# --- Registry Collection ---
|
|
519
|
-
# Collect metadata for generated datacubes
|
|
520
|
-
# Structure: {conf_obj: {table_name: {class_name: ..., path: ...}}}
|
|
521
|
-
for item in items:
|
|
522
|
-
t_name = item[0]
|
|
523
|
-
conf_obj = item[1]
|
|
524
|
-
cls_n = item[4]
|
|
525
|
-
# Calculate path relative to project root
|
|
526
|
-
try:
|
|
527
|
-
if "project_root" not in locals():
|
|
528
|
-
project_root = config_path.parent.parent.parent
|
|
529
|
-
rel_path = file_path.relative_to(project_root)
|
|
530
|
-
except Exception:
|
|
531
|
-
rel_path = file_path
|
|
532
|
-
|
|
533
|
-
if conf_obj not in generated_registry:
|
|
534
|
-
generated_registry[conf_obj] = {}
|
|
535
|
-
|
|
536
|
-
generated_registry[conf_obj][t_name] = {
|
|
537
|
-
"class_name": cls_n,
|
|
538
|
-
"path": str(rel_path),
|
|
539
|
-
}
|
|
540
|
-
|
|
541
586
|
console.print(summary_table)
|
|
542
587
|
|
|
543
588
|
# --- Write Datacube Registry ---
|
|
@@ -566,8 +611,9 @@ def sync(
|
|
|
566
611
|
with open(reg_file, "w") as f:
|
|
567
612
|
yaml.dump(reg_data, f, sort_keys=False)
|
|
568
613
|
|
|
614
|
+
total_tables = sum(len(tables) for tables in generated_registry.values())
|
|
569
615
|
console.print(
|
|
570
|
-
f"[green]Updated Datacube Registry at {reg_rel_path} ({len(generated_registry)}
|
|
616
|
+
f"[green]Updated Datacube Registry at {reg_rel_path} ({total_tables} tables across {len(generated_registry)} groups)[/green]"
|
|
571
617
|
)
|
|
572
618
|
except Exception as e:
|
|
573
619
|
console.print(f"[red]Failed to write Datacube Registry: {e}[/red]")
|
|
@@ -765,12 +811,29 @@ def discover(
|
|
|
765
811
|
)
|
|
766
812
|
continue
|
|
767
813
|
|
|
814
|
+
# Resolve Registry Path (Target)
|
|
815
|
+
reg_rel_path = params.get("paths", {}).get("repositories", {}).get(
|
|
816
|
+
"global_datacube_registry_file"
|
|
817
|
+
) or params.get("global_datacube_registry_file")
|
|
818
|
+
|
|
819
|
+
real_registry_path = str(config_path) # Fallback to config if not defined (legacy behavior)
|
|
820
|
+
if reg_rel_path:
|
|
821
|
+
if Path(reg_rel_path).is_absolute():
|
|
822
|
+
real_registry_path = reg_rel_path
|
|
823
|
+
else:
|
|
824
|
+
try:
|
|
825
|
+
# Anchor to project root
|
|
826
|
+
prj_root = config_path.parent.parent.parent
|
|
827
|
+
real_registry_path = str(prj_root / reg_rel_path)
|
|
828
|
+
except Exception:
|
|
829
|
+
real_registry_path = str(config_path.parent / reg_rel_path)
|
|
830
|
+
|
|
768
831
|
orchestrator = DiscoveryOrchestrator(
|
|
769
832
|
field_registry=field_registry,
|
|
770
833
|
params=context.params,
|
|
771
834
|
rules_path=str(rules_path),
|
|
772
835
|
whitelist_path=str(whitelist_path),
|
|
773
|
-
registry_path=
|
|
836
|
+
registry_path=real_registry_path,
|
|
774
837
|
db_connection_str=db_conn_str,
|
|
775
838
|
db_config=db_config,
|
|
776
839
|
)
|
|
@@ -1904,6 +1967,9 @@ def whitelist(
|
|
|
1904
1967
|
# Merge: update rule defaults only if not set in existing
|
|
1905
1968
|
merged = rule_meta.copy()
|
|
1906
1969
|
merged.update(existing_meta) # Existing overwrites rule
|
|
1970
|
+
|
|
1971
|
+
# Legacy Cleanup: We moved custom_name to Registry
|
|
1972
|
+
merged.pop("custom_name", None)
|
|
1907
1973
|
|
|
1908
1974
|
# Restore calculated paths (Enforce Relative)
|
|
1909
1975
|
if "datacube_path" in rule_meta:
|
|
@@ -2099,6 +2165,8 @@ def _run_field_map_generation(
|
|
|
2099
2165
|
"[yellow]Warning: Could not determine global field_maps_dir for clean build.[/yellow]"
|
|
2100
2166
|
)
|
|
2101
2167
|
|
|
2168
|
+
generated_maps = {}
|
|
2169
|
+
|
|
2102
2170
|
for db in target_dbs:
|
|
2103
2171
|
# console.print(f"DEBUG: Processing DB entry: {db} (Type: {type(db)})")
|
|
2104
2172
|
if isinstance(db, str):
|
|
@@ -2223,10 +2291,6 @@ def _run_field_map_generation(
|
|
|
2223
2291
|
if not found:
|
|
2224
2292
|
rules = []
|
|
2225
2293
|
|
|
2226
|
-
# console.print(f"DEBUG: Loaded {len(rules)} rules from {rules_path} for {conn_obj}")
|
|
2227
|
-
|
|
2228
|
-
# console.print(f"DEBUG: Loaded {len(rules)} rules from {rules_path}")
|
|
2229
|
-
|
|
2230
2294
|
# Support List or Dict Format
|
|
2231
2295
|
scoped_data = registry_data.get(conn_obj, {})
|
|
2232
2296
|
if isinstance(scoped_data, list):
|
|
@@ -2448,6 +2512,21 @@ def _run_field_map_generation(
|
|
|
2448
2512
|
with open(target_file, "w") as f:
|
|
2449
2513
|
f.write("\n".join(lines))
|
|
2450
2514
|
|
|
2515
|
+
# Calculate Import Path
|
|
2516
|
+
try:
|
|
2517
|
+
# Ensure we get relative path to project root (which should be sys.path root)
|
|
2518
|
+
if "project_root" not in locals():
|
|
2519
|
+
project_root = Path.cwd()
|
|
2520
|
+
|
|
2521
|
+
rel_py_path = target_file.relative_to(project_root)
|
|
2522
|
+
module_path = str(rel_py_path.with_suffix("")).replace("/", ".")
|
|
2523
|
+
generated_maps[table_name] = f"{module_path}.field_map"
|
|
2524
|
+
except ValueError:
|
|
2525
|
+
# Fallback if outside project root?
|
|
2526
|
+
pass
|
|
2527
|
+
except Exception as e:
|
|
2528
|
+
pass
|
|
2529
|
+
|
|
2451
2530
|
except Exception as e:
|
|
2452
2531
|
console.print(f"[red]Error processing {table_name}: {e}[/red]")
|
|
2453
2532
|
continue
|
|
@@ -2464,6 +2543,8 @@ def _run_field_map_generation(
|
|
|
2464
2543
|
except Exception as e:
|
|
2465
2544
|
console.print(f"[red]Failed to save Global Field Repository: {e}[/red]")
|
|
2466
2545
|
|
|
2546
|
+
return generated_maps
|
|
2547
|
+
|
|
2467
2548
|
|
|
2468
2549
|
if __name__ == "__main__":
|
|
2469
2550
|
app()
|
|
@@ -531,6 +531,15 @@ class DatacubeRegistry:
|
|
|
531
531
|
or self.config.get("class_suffix")
|
|
532
532
|
or self.params.get("class_suffix", "Dc")
|
|
533
533
|
)
|
|
534
|
+
self._enforce_custom_names()
|
|
535
|
+
|
|
536
|
+
def _enforce_custom_names(self) -> None:
|
|
537
|
+
"""
|
|
538
|
+
Ensures that if custom_name is set, it overrides class_name explicitly.
|
|
539
|
+
"""
|
|
540
|
+
for table, meta in self.tables.items():
|
|
541
|
+
if meta.get("custom_name"):
|
|
542
|
+
meta["class_name"] = meta["custom_name"]
|
|
534
543
|
|
|
535
544
|
def get_table_details(self, table_name: str) -> dict[str, Any]:
|
|
536
545
|
return self.tables.get(table_name, {})
|
|
@@ -573,7 +582,10 @@ class DatacubeRegistry:
|
|
|
573
582
|
elif k not in existing:
|
|
574
583
|
existing[k] = v
|
|
575
584
|
|
|
576
|
-
|
|
585
|
+
# Override class_name if custom_name is set
|
|
586
|
+
if existing.get("custom_name"):
|
|
587
|
+
existing["class_name"] = existing["custom_name"]
|
|
588
|
+
elif "class_name" not in existing:
|
|
577
589
|
existing["class_name"] = new_details.get("class_name")
|
|
578
590
|
|
|
579
591
|
self.tables[table] = existing
|
|
@@ -554,8 +554,23 @@ class DiscoveryOrchestrator:
|
|
|
554
554
|
"[yellow]Prune active: Registry replaced with discovery results.[/]"
|
|
555
555
|
)
|
|
556
556
|
else:
|
|
557
|
-
# Merge:
|
|
558
|
-
|
|
557
|
+
# Smart Merge: Preserve 'custom_name' and 'class_name' from existing entries
|
|
558
|
+
# if they are not explicitly overridden by the new entry.
|
|
559
|
+
for table, new_meta in new_entries.items():
|
|
560
|
+
if table in current_data["tables"]:
|
|
561
|
+
existing = current_data["tables"][table]
|
|
562
|
+
|
|
563
|
+
# 1. Preserve custom_name if new entry doesn't specify one
|
|
564
|
+
if not new_meta.get("custom_name") and existing.get("custom_name"):
|
|
565
|
+
new_meta["custom_name"] = existing["custom_name"]
|
|
566
|
+
|
|
567
|
+
# 2. Also preserve class_name (since it's driven by custom_name)
|
|
568
|
+
# We only preserve class_name if custom_name was preserved
|
|
569
|
+
# AND new entry didn't explicitly change class_name logic (unlikely unless configured)
|
|
570
|
+
if existing.get("class_name"):
|
|
571
|
+
new_meta["class_name"] = existing["class_name"]
|
|
572
|
+
|
|
573
|
+
current_data["tables"][table] = new_meta
|
|
559
574
|
|
|
560
575
|
# Sort tables for readability
|
|
561
576
|
current_data["tables"] = dict(sorted(current_data["tables"].items()))
|
|
@@ -0,0 +1,111 @@
|
|
|
1
|
+
|
|
2
|
+
from pathlib import Path
|
|
3
|
+
import os
|
|
4
|
+
from rich.console import Console
|
|
5
|
+
import typer
|
|
6
|
+
import yaml
|
|
7
|
+
|
|
8
|
+
console = Console()
|
|
9
|
+
|
|
10
|
+
def init_app(name: str) -> None:
|
|
11
|
+
"""
|
|
12
|
+
Initialize a new application within the current project root.
|
|
13
|
+
|
|
14
|
+
Args:
|
|
15
|
+
name: The name of the application to create (e.g., 'inventory', 'pricing').
|
|
16
|
+
"""
|
|
17
|
+
|
|
18
|
+
# 1. Validation: Ensure we are in a Sibi Flux project root (check for pyproject.toml as heuristic)
|
|
19
|
+
cwd = Path(os.getcwd())
|
|
20
|
+
if not (cwd / "pyproject.toml").exists():
|
|
21
|
+
console.print("[yellow]Warning: pyproject.toml not found. Are you in a project root?[/yellow]")
|
|
22
|
+
|
|
23
|
+
app_dir = cwd / name
|
|
24
|
+
|
|
25
|
+
if app_dir.exists():
|
|
26
|
+
console.print(f"[red]Error: Application directory '{name}' already exists.[/red]")
|
|
27
|
+
raise typer.Exit(code=1)
|
|
28
|
+
|
|
29
|
+
console.print(f"[bold blue]Initializing new application: {name}[/bold blue]")
|
|
30
|
+
|
|
31
|
+
# 2. Create Directory Structure
|
|
32
|
+
structure = [
|
|
33
|
+
"api",
|
|
34
|
+
"datacubes",
|
|
35
|
+
"readers",
|
|
36
|
+
"aggregators",
|
|
37
|
+
]
|
|
38
|
+
|
|
39
|
+
app_dir.mkdir()
|
|
40
|
+
(app_dir / "__init__.py").touch()
|
|
41
|
+
|
|
42
|
+
for folder in structure:
|
|
43
|
+
path = app_dir / folder
|
|
44
|
+
path.mkdir()
|
|
45
|
+
(path / "__init__.py").touch()
|
|
46
|
+
|
|
47
|
+
# 2.1 Create datacubes extension registry template
|
|
48
|
+
datacubes_yaml = app_dir / "datacubes" / "datacubes.yaml"
|
|
49
|
+
template_yaml = """
|
|
50
|
+
cubes:
|
|
51
|
+
# Define your app-specific datacube extensions here
|
|
52
|
+
# - source: dataobjects.gencubes.ibis_dev.products.products_cubes.ProductsDc
|
|
53
|
+
# name: LogisticsProductsDc
|
|
54
|
+
# module: products # -> logistics/datacubes/products.py
|
|
55
|
+
"""
|
|
56
|
+
datacubes_yaml.write_text(template_yaml.strip())
|
|
57
|
+
|
|
58
|
+
# 3. Create Basic Router Template
|
|
59
|
+
router_template = f"""
|
|
60
|
+
from fastapi import APIRouter
|
|
61
|
+
|
|
62
|
+
router = APIRouter()
|
|
63
|
+
|
|
64
|
+
@router.get("/")
|
|
65
|
+
async def root():
|
|
66
|
+
return {{"message": "Hello from {name}!"}}
|
|
67
|
+
"""
|
|
68
|
+
(app_dir / "api" / "main.py").write_text(router_template.strip())
|
|
69
|
+
|
|
70
|
+
# 4. Register in conf/apps.yaml
|
|
71
|
+
conf_dir = cwd / "conf"
|
|
72
|
+
if not conf_dir.exists():
|
|
73
|
+
conf_dir.mkdir()
|
|
74
|
+
|
|
75
|
+
apps_yaml_path = conf_dir / "apps.yaml"
|
|
76
|
+
|
|
77
|
+
apps_data = {"apps": []}
|
|
78
|
+
if apps_yaml_path.exists():
|
|
79
|
+
try:
|
|
80
|
+
with open(apps_yaml_path, "r") as f:
|
|
81
|
+
loaded = yaml.safe_load(f)
|
|
82
|
+
if loaded and isinstance(loaded, dict) and "apps" in loaded:
|
|
83
|
+
apps_data = loaded
|
|
84
|
+
elif loaded is None:
|
|
85
|
+
pass # empty file
|
|
86
|
+
else:
|
|
87
|
+
console.print(f"[yellow]Warning: conf/apps.yaml has unexpected structure. Initializing with empty list.[/yellow]")
|
|
88
|
+
except Exception as e:
|
|
89
|
+
console.print(f"[yellow]Warning: Could not read existing apps.yaml: {e}[/yellow]")
|
|
90
|
+
|
|
91
|
+
if name not in apps_data["apps"]:
|
|
92
|
+
apps_data["apps"].append(name)
|
|
93
|
+
with open(apps_yaml_path, "w") as f:
|
|
94
|
+
yaml.dump(apps_data, f, default_flow_style=False)
|
|
95
|
+
console.print(f"[green]Registered '{name}' in conf/apps.yaml[/green]")
|
|
96
|
+
else:
|
|
97
|
+
console.print(f"[yellow]App '{name}' already registered in conf/apps.yaml[/yellow]")
|
|
98
|
+
|
|
99
|
+
# 5. Success Message & Instructions
|
|
100
|
+
console.print(f"[bold green]Successfully created application '{name}'![/bold green]")
|
|
101
|
+
console.print(f"Location: {name}/")
|
|
102
|
+
console.print("\n[yellow]Next Steps:[/yellow]")
|
|
103
|
+
console.print(f"1. Register your new router in [bold]main.py[/bold] (or wherever your app is defined):")
|
|
104
|
+
|
|
105
|
+
code_snippet = f"""
|
|
106
|
+
from {name}.api.main import router as {name}_router
|
|
107
|
+
|
|
108
|
+
app.include_router({name}_router, prefix="/{name}", tags=["{name}"])
|
|
109
|
+
"""
|
|
110
|
+
console.print(code_snippet)
|
|
111
|
+
|