sibi-flux 2026.1.1__py3-none-any.whl → 2026.1.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- sibi_flux/cli.py +45 -0
- sibi_flux/config/settings.py +7 -7
- sibi_flux/init/__init__.py +0 -0
- sibi_flux/init/core.py +159 -0
- sibi_flux/init/discovery_updater.py +99 -0
- sibi_flux/init/env.py +86 -0
- sibi_flux/init/env_engine.py +151 -0
- sibi_flux/init/env_generator.py +554 -0
- sibi_flux/init/templates/__init__.py +0 -0
- sibi_flux/init/templates/discovery_params.yaml +45 -0
- sibi_flux/init/templates/gen_dc.py +137 -0
- sibi_flux/init/templates/property_template.yaml +10 -0
- {sibi_flux-2026.1.1.dist-info → sibi_flux-2026.1.2.dist-info}/METADATA +42 -1
- {sibi_flux-2026.1.1.dist-info → sibi_flux-2026.1.2.dist-info}/RECORD +16 -4
- sibi_flux-2026.1.2.dist-info/entry_points.txt +3 -0
- {sibi_flux-2026.1.1.dist-info → sibi_flux-2026.1.2.dist-info}/WHEEL +0 -0
sibi_flux/cli.py
ADDED
|
@@ -0,0 +1,45 @@
|
|
|
1
|
+
import typer
|
|
2
|
+
from typing import Optional
|
|
3
|
+
from pathlib import Path
|
|
4
|
+
from rich.console import Console
|
|
5
|
+
from sibi_flux.init.core import initialize_project
|
|
6
|
+
|
|
7
|
+
app = typer.Typer(help="Sibi Flux CLI")
|
|
8
|
+
console = Console()
|
|
9
|
+
|
|
10
|
+
@app.callback()
|
|
11
|
+
def callback():
|
|
12
|
+
"""
|
|
13
|
+
Sibi Flux CLI
|
|
14
|
+
"""
|
|
15
|
+
|
|
16
|
+
@app.command()
|
|
17
|
+
def init(
|
|
18
|
+
project_name: str = typer.Argument(..., help="Name of the project to create"),
|
|
19
|
+
lib: bool = typer.Option(False, "--lib", help="Initialize as a library project (passed to uv init)"),
|
|
20
|
+
app: bool = typer.Option(False, "--app", help="Initialize as an application project (passed to uv init)")
|
|
21
|
+
):
|
|
22
|
+
"""
|
|
23
|
+
Initialize a new Sibi Flux project.
|
|
24
|
+
|
|
25
|
+
Creates a new directory <project_name>, initializes it with 'uv',
|
|
26
|
+
and adds 'sibi-flux' as a dependency.
|
|
27
|
+
"""
|
|
28
|
+
initialize_project(project_name, lib, app)
|
|
29
|
+
|
|
30
|
+
@app.command()
|
|
31
|
+
def env(
|
|
32
|
+
project_path: Path = typer.Argument(Path("."), help="Project root directory"),
|
|
33
|
+
env_file: Optional[Path] = typer.Option(None, "--env-file", "-e", help="Path to environment file (defaults to .env)"),
|
|
34
|
+
cleanup: bool = typer.Option(False, "--cleanup", help="Remove existing configuration files"),
|
|
35
|
+
production: bool = typer.Option(False, "--production", "-p", help="Generate production skeleton (no hardcoded values)"),
|
|
36
|
+
):
|
|
37
|
+
"""
|
|
38
|
+
Initialize configuration files (settings.py, credentials) based on .env
|
|
39
|
+
"""
|
|
40
|
+
from sibi_flux.init.env import init_env
|
|
41
|
+
init_env(project_path, env_file, cleanup=cleanup, production_mode=production)
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
if __name__ == "__main__":
|
|
45
|
+
app()
|
sibi_flux/config/settings.py
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
from typing import Optional, Any
|
|
1
|
+
from typing import Optional, Any, ClassVar
|
|
2
2
|
from pydantic import SecretStr
|
|
3
3
|
from pydantic_settings import BaseSettings, SettingsConfigDict
|
|
4
4
|
|
|
@@ -9,6 +9,8 @@ class SibiBaseSettings(BaseSettings):
|
|
|
9
9
|
model_config = SettingsConfigDict(
|
|
10
10
|
env_file=".env", env_file_encoding="utf-8", extra="ignore"
|
|
11
11
|
)
|
|
12
|
+
|
|
13
|
+
conf_name: ClassVar[str] = ""
|
|
12
14
|
|
|
13
15
|
|
|
14
16
|
class FsSettings(SibiBaseSettings):
|
|
@@ -84,13 +86,8 @@ class DatabaseSettings(SibiBaseSettings):
|
|
|
84
86
|
"""Generic SQL Database settings."""
|
|
85
87
|
|
|
86
88
|
db_url: str = "sqlite:///:memory:"
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
class ClickhouseBaseSettings(SibiBaseSettings):
|
|
90
|
-
"""Base settings for ClickHouse connection."""
|
|
91
|
-
|
|
92
89
|
host: str = "localhost"
|
|
93
|
-
port: int =
|
|
90
|
+
port: int = 5432
|
|
94
91
|
database: str = "default"
|
|
95
92
|
user: str = "default"
|
|
96
93
|
password: SecretStr = SecretStr("secret")
|
|
@@ -102,9 +99,12 @@ class ClickhouseBaseSettings(SibiBaseSettings):
|
|
|
102
99
|
"dbname": self.database,
|
|
103
100
|
"user": self.user,
|
|
104
101
|
"password": self.password.get_secret_value() if self.password else None,
|
|
102
|
+
"db_url": self.db_url,
|
|
105
103
|
}
|
|
106
104
|
|
|
107
105
|
|
|
106
|
+
|
|
107
|
+
|
|
108
108
|
class RedisBaseSettings(SibiBaseSettings):
|
|
109
109
|
"""Base settings for Redis connection."""
|
|
110
110
|
|
|
File without changes
|
sibi_flux/init/core.py
ADDED
|
@@ -0,0 +1,159 @@
|
|
|
1
|
+
import subprocess
|
|
2
|
+
import os
|
|
3
|
+
import sys
|
|
4
|
+
from pathlib import Path
|
|
5
|
+
from rich.console import Console
|
|
6
|
+
import typer
|
|
7
|
+
|
|
8
|
+
console = Console()
|
|
9
|
+
|
|
10
|
+
def initialize_project(
|
|
11
|
+
project_name: str,
|
|
12
|
+
lib: bool = False,
|
|
13
|
+
app: bool = False,
|
|
14
|
+
) -> None:
|
|
15
|
+
"""
|
|
16
|
+
Initialize a new Sibi Flux project with the standard directory structure and configuration.
|
|
17
|
+
|
|
18
|
+
Args:
|
|
19
|
+
project_name: The name of the project directory to create.
|
|
20
|
+
lib: If True, initialize as a library project via `uv init --lib`.
|
|
21
|
+
app: If True, initialize as an application project via `uv init --app`.
|
|
22
|
+
Note: If `lib` is False, the default behavior will be to include `--app`
|
|
23
|
+
unless `app` is explicitly False. (Aligned with CLI defaults).
|
|
24
|
+
"""
|
|
25
|
+
project_path = Path(os.getcwd()) / project_name
|
|
26
|
+
|
|
27
|
+
if project_path.exists():
|
|
28
|
+
console.print(f"[red]Error: Directory '{project_name}' already exists.[/red]")
|
|
29
|
+
raise typer.Exit(code=1)
|
|
30
|
+
|
|
31
|
+
console.print(f"[bold blue]Initializing Sibi Flux project: {project_name}[/bold blue]")
|
|
32
|
+
|
|
33
|
+
# 1. Create Directory
|
|
34
|
+
try:
|
|
35
|
+
# Run `uv init`
|
|
36
|
+
# format: uv init <project_name> [--lib] [--app]
|
|
37
|
+
cmd = ["uv", "init", project_name]
|
|
38
|
+
|
|
39
|
+
if lib:
|
|
40
|
+
cmd.append("--lib")
|
|
41
|
+
else:
|
|
42
|
+
# Default behavior: behave like --app unless explicitly disabled?
|
|
43
|
+
# From previous steps, user wanted default to be --app.
|
|
44
|
+
# If lib is False, we default to app.
|
|
45
|
+
cmd.append("--app")
|
|
46
|
+
|
|
47
|
+
console.print(f"Running: {' '.join(cmd)}")
|
|
48
|
+
subprocess.check_call(cmd)
|
|
49
|
+
|
|
50
|
+
# 2. Add sibi-flux dependency
|
|
51
|
+
console.print(f"Adding sibi-flux dependency...")
|
|
52
|
+
try:
|
|
53
|
+
from importlib.metadata import version
|
|
54
|
+
ver = version("sibi-flux")
|
|
55
|
+
pkg_spec = f"sibi-flux>={ver}"
|
|
56
|
+
except Exception:
|
|
57
|
+
pkg_spec = "sibi-flux@latest"
|
|
58
|
+
|
|
59
|
+
subprocess.check_call(["uv", "add", pkg_spec], cwd=project_path)
|
|
60
|
+
|
|
61
|
+
# 2b. Add dev dependencies
|
|
62
|
+
dev_deps = ["black", "notebook", "pytest", "ruff", "httpx", "poethepoet", "PyYAML"]
|
|
63
|
+
console.print(f"Adding dev dependencies: {', '.join(dev_deps)}...")
|
|
64
|
+
subprocess.check_call(["uv", "add", "--dev"] + dev_deps, cwd=project_path)
|
|
65
|
+
|
|
66
|
+
# 3. Create Scaffolding Folders (conf, dataobjects, generators)
|
|
67
|
+
(project_path / "conf").mkdir(exist_ok=True)
|
|
68
|
+
(project_path / "dataobjects").mkdir(exist_ok=True)
|
|
69
|
+
(project_path / "generators" / "datacubes").mkdir(parents=True, exist_ok=True)
|
|
70
|
+
|
|
71
|
+
# Add basic .gitkeep
|
|
72
|
+
(project_path / "conf" / ".gitkeep").touch()
|
|
73
|
+
(project_path / "dataobjects" / ".gitkeep").touch()
|
|
74
|
+
(project_path / "generators" / ".gitkeep").touch()
|
|
75
|
+
# (project_path / "generators" / "datacubes" / ".gitkeep").touch() # Not needed if we write params
|
|
76
|
+
|
|
77
|
+
# 4. Write discovery_params.yaml templates
|
|
78
|
+
try:
|
|
79
|
+
from importlib import resources as importlib_resources
|
|
80
|
+
pass
|
|
81
|
+
except ImportError:
|
|
82
|
+
import importlib_resources # type: ignore
|
|
83
|
+
|
|
84
|
+
# Access the template resource from sibi_flux.init.templates
|
|
85
|
+
try:
|
|
86
|
+
# Modern way
|
|
87
|
+
ref = importlib_resources.files("sibi_flux.init.templates").joinpath("discovery_params.yaml")
|
|
88
|
+
template_content = ref.read_text()
|
|
89
|
+
except AttributeError:
|
|
90
|
+
# Python < 3.9
|
|
91
|
+
template_content = importlib_resources.read_text("sibi_flux.init.templates", "discovery_params.yaml")
|
|
92
|
+
|
|
93
|
+
# Refactor paths for the new project structure
|
|
94
|
+
template_content = template_content.replace("solutions/dataobjects/", "dataobjects/")
|
|
95
|
+
template_content = template_content.replace("solutions.conf", "conf")
|
|
96
|
+
template_content = template_content.replace(".env.linux", ".env.local")
|
|
97
|
+
|
|
98
|
+
(project_path / "generators" / "datacubes" / "discovery_params.yaml").write_text(template_content)
|
|
99
|
+
|
|
100
|
+
# Write property_template.yaml
|
|
101
|
+
try:
|
|
102
|
+
ref_prop = importlib_resources.files("sibi_flux.init.templates").joinpath("property_template.yaml")
|
|
103
|
+
prop_content = ref_prop.read_text()
|
|
104
|
+
except AttributeError:
|
|
105
|
+
prop_content = importlib_resources.read_text("sibi_flux.init.templates", "property_template.yaml")
|
|
106
|
+
|
|
107
|
+
(project_path / "generators" / "datacubes" / "property_template.yaml").write_text(prop_content)
|
|
108
|
+
|
|
109
|
+
# Write gen_dc.py
|
|
110
|
+
try:
|
|
111
|
+
ref_gen = importlib_resources.files("sibi_flux.init.templates").joinpath("gen_dc.py")
|
|
112
|
+
gen_content = ref_gen.read_text()
|
|
113
|
+
except AttributeError:
|
|
114
|
+
gen_content = importlib_resources.read_text("sibi_flux.init.templates", "gen_dc.py")
|
|
115
|
+
|
|
116
|
+
(project_path / "generators" / "datacubes" / "gen_dc.py").write_text(gen_content)
|
|
117
|
+
|
|
118
|
+
# 5. Create .env.local
|
|
119
|
+
(project_path / ".env.local").touch()
|
|
120
|
+
|
|
121
|
+
# 6. Create/Update .gitignore
|
|
122
|
+
gitignore_path = project_path / ".gitignore"
|
|
123
|
+
gitignore_content = ""
|
|
124
|
+
if gitignore_path.exists():
|
|
125
|
+
gitignore_content = gitignore_path.read_text()
|
|
126
|
+
|
|
127
|
+
# Ensure .env is ignored
|
|
128
|
+
if ".env" not in gitignore_content:
|
|
129
|
+
with open(gitignore_path, "a") as f:
|
|
130
|
+
if gitignore_content and not gitignore_content.endswith("\n"):
|
|
131
|
+
f.write("\n")
|
|
132
|
+
f.write("\n# Environment variables\n.env\n.env.local\n.env.*\n")
|
|
133
|
+
|
|
134
|
+
if not gitignore_path.exists():
|
|
135
|
+
gitignore_path.write_text("# Sibi Flux\n.env\n.env.local\n.env.*\n__pycache__/\n*.pyc\n.DS_Store\n")
|
|
136
|
+
|
|
137
|
+
# 7. Configure poe tasks in pyproject.toml
|
|
138
|
+
pyproject_path = project_path / "pyproject.toml"
|
|
139
|
+
if pyproject_path.exists():
|
|
140
|
+
with open(pyproject_path, "a") as f:
|
|
141
|
+
f.write("\n\n[tool.poe.tasks]\n")
|
|
142
|
+
f.write('dc-sync = "python generators/datacubes/gen_dc.py sync"\n')
|
|
143
|
+
f.write('dc-init = "python generators/datacubes/gen_dc.py init"\n')
|
|
144
|
+
f.write('dc-discover = "python generators/datacubes/gen_dc.py discover"\n')
|
|
145
|
+
f.write('dc-scan = "python generators/datacubes/gen_dc.py scan"\n')
|
|
146
|
+
f.write('dc-match = "python generators/datacubes/gen_dc.py match"\n')
|
|
147
|
+
f.write('dc-map = "python generators/datacubes/gen_dc.py map"\n')
|
|
148
|
+
|
|
149
|
+
console.print(f"[bold green]Successfully initialized {project_name}![/bold green]")
|
|
150
|
+
console.print(f"Created directories: conf/, dataobjects/, generators/datacubes/")
|
|
151
|
+
console.print(f"Created files: .env.local, .gitignore, generators/datacubes/[discovery_params.yaml, property_template.yaml, gen_dc.py]")
|
|
152
|
+
console.print(f"cd {project_name}")
|
|
153
|
+
|
|
154
|
+
except subprocess.CalledProcessError as e:
|
|
155
|
+
console.print(f"[red]Command failed with exit code {e.returncode}[/red]")
|
|
156
|
+
raise typer.Exit(code=e.returncode)
|
|
157
|
+
except FileNotFoundError:
|
|
158
|
+
console.print("[red]Error: 'uv' command not found. Please ensure uv is installed and in your PATH.[/red]")
|
|
159
|
+
raise typer.Exit(code=1)
|
|
@@ -0,0 +1,99 @@
|
|
|
1
|
+
import yaml
|
|
2
|
+
from pathlib import Path
|
|
3
|
+
from typing import List, Dict, Any
|
|
4
|
+
from sibi_flux.init.env_engine import EnvFile, EnvSection
|
|
5
|
+
from rich.console import Console
|
|
6
|
+
|
|
7
|
+
console = Console()
|
|
8
|
+
|
|
9
|
+
class DiscoveryParamsUpdater:
|
|
10
|
+
"""
|
|
11
|
+
Updates generators/datacubes/discovery_params.yaml to include
|
|
12
|
+
database connections found in the generated env configuration.
|
|
13
|
+
"""
|
|
14
|
+
|
|
15
|
+
@staticmethod
|
|
16
|
+
def _is_database(section: EnvSection) -> bool:
|
|
17
|
+
return section.type.upper() in ["POSTGRES", "MYSQL", "CLICKHOUSE", "SQLALCHEMY_DATABASE"]
|
|
18
|
+
|
|
19
|
+
@staticmethod
|
|
20
|
+
def update(project_path: Path, env_data: EnvFile) -> None:
|
|
21
|
+
params_path = project_path / "generators" / "datacubes" / "discovery_params.yaml"
|
|
22
|
+
if not params_path.exists():
|
|
23
|
+
console.print(f"[yellow]Warning: {params_path} not found. Skipping discovery params update.[/yellow]")
|
|
24
|
+
return
|
|
25
|
+
|
|
26
|
+
try:
|
|
27
|
+
# 1. Read existing YAML
|
|
28
|
+
with open(params_path, "r") as f:
|
|
29
|
+
data = yaml.safe_load(f) or {}
|
|
30
|
+
|
|
31
|
+
# 2. Extract DBs from Env
|
|
32
|
+
db_sections = [s for s in env_data.sections if DiscoveryParamsUpdater._is_database(s)]
|
|
33
|
+
|
|
34
|
+
if not db_sections:
|
|
35
|
+
if "databases" in data:
|
|
36
|
+
del data["databases"]
|
|
37
|
+
with open(params_path, "w") as f:
|
|
38
|
+
yaml.dump(data, f, sort_keys=False, default_flow_style=False)
|
|
39
|
+
console.print(f"[yellow]Removed databases section from discovery_params.yaml (no databases found in env).[/yellow]")
|
|
40
|
+
return
|
|
41
|
+
|
|
42
|
+
# Ensure databases list exists
|
|
43
|
+
if "databases" not in data:
|
|
44
|
+
data["databases"] = []
|
|
45
|
+
|
|
46
|
+
# Map existing entries by ID to avoid duplicates (upsert behavior)
|
|
47
|
+
existing_dbs = {db.get("id"): db for db in data["databases"]}
|
|
48
|
+
|
|
49
|
+
changes_made = False
|
|
50
|
+
|
|
51
|
+
for section in db_sections:
|
|
52
|
+
db_id = section.name.lower()
|
|
53
|
+
|
|
54
|
+
# Check exclusion? No, default to include all.
|
|
55
|
+
|
|
56
|
+
# Generate entry
|
|
57
|
+
conf_name = f"{section.name.lower()}_conf"
|
|
58
|
+
if section.type.upper() == "CLICKHOUSE":
|
|
59
|
+
conf_name = "clickhouse_config"
|
|
60
|
+
|
|
61
|
+
# Extract DB Name for db_domain
|
|
62
|
+
db_domain = db_id # Fallback
|
|
63
|
+
for var in section.vars:
|
|
64
|
+
# Check for common DB name keys.
|
|
65
|
+
# Note: Keys in EnvSection are fully qualified (e.g. REPLICA_DATABASE)
|
|
66
|
+
# We check if the suffix matches DATABASE
|
|
67
|
+
if var.key.endswith("_DATABASE") or var.key == "DATABASE":
|
|
68
|
+
db_domain = var.value
|
|
69
|
+
break
|
|
70
|
+
|
|
71
|
+
new_entry = {
|
|
72
|
+
"id": db_id,
|
|
73
|
+
"connection_ref": conf_name,
|
|
74
|
+
"db_domain": db_domain,
|
|
75
|
+
"import_spec": {
|
|
76
|
+
"module": "conf.credentials",
|
|
77
|
+
"symbol": conf_name
|
|
78
|
+
}
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
# Upsert
|
|
82
|
+
if db_id in existing_dbs:
|
|
83
|
+
# Update existing? Only if significantly different?
|
|
84
|
+
# For now, let's update strict fields but preserve extra if any.
|
|
85
|
+
# Or simpler: Overwrite specific keys.
|
|
86
|
+
existing_dbs[db_id].update(new_entry)
|
|
87
|
+
else:
|
|
88
|
+
data["databases"].append(new_entry)
|
|
89
|
+
changes_made = True
|
|
90
|
+
|
|
91
|
+
# 3. Write back
|
|
92
|
+
# Use sort_keys=False to preserve order roughly
|
|
93
|
+
with open(params_path, "w") as f:
|
|
94
|
+
yaml.dump(data, f, sort_keys=False, default_flow_style=False)
|
|
95
|
+
|
|
96
|
+
console.print(f"[green]Updated discovery_params.yaml with {len(db_sections)} databases.[/green]")
|
|
97
|
+
|
|
98
|
+
except Exception as e:
|
|
99
|
+
console.print(f"[red]Failed to update discovery_params.yaml: {e}[/red]")
|
sibi_flux/init/env.py
ADDED
|
@@ -0,0 +1,86 @@
|
|
|
1
|
+
from pathlib import Path
|
|
2
|
+
from rich.console import Console
|
|
3
|
+
from typing import Optional
|
|
4
|
+
|
|
5
|
+
from sibi_flux.init.env_engine import EnvParser
|
|
6
|
+
from sibi_flux.init.env_generator import EnvGenerator
|
|
7
|
+
|
|
8
|
+
console = Console()
|
|
9
|
+
|
|
10
|
+
def init_env(project_path: Path, env_file: Optional[Path] = None, cleanup: bool = False, production_mode: bool = False):
|
|
11
|
+
"""
|
|
12
|
+
Initializes configuration files for the project.
|
|
13
|
+
Parses .env file (or defaults) and dynamically generates settings.py and credentials.
|
|
14
|
+
If cleanup is True, removes generated configuration files.
|
|
15
|
+
If production_mode is True, generates a 'skeleton' settings.py without hardcoded values.
|
|
16
|
+
"""
|
|
17
|
+
conf_dir = project_path / "conf"
|
|
18
|
+
|
|
19
|
+
if cleanup:
|
|
20
|
+
settings_path = conf_dir / "settings.py"
|
|
21
|
+
creds_path = conf_dir / "credentials" / "__init__.py"
|
|
22
|
+
|
|
23
|
+
removed = []
|
|
24
|
+
if settings_path.exists():
|
|
25
|
+
settings_path.unlink()
|
|
26
|
+
removed.append("conf/settings.py")
|
|
27
|
+
|
|
28
|
+
if creds_path.exists():
|
|
29
|
+
creds_path.unlink()
|
|
30
|
+
removed.append("conf/credentials/__init__.py")
|
|
31
|
+
|
|
32
|
+
if removed:
|
|
33
|
+
console.print(f"[green]Removed: {', '.join(removed)}[/green]")
|
|
34
|
+
else:
|
|
35
|
+
console.print("[yellow]No configuration files found to remove.[/yellow]")
|
|
36
|
+
return
|
|
37
|
+
|
|
38
|
+
# Parse Environment
|
|
39
|
+
if env_file:
|
|
40
|
+
if not env_file.exists():
|
|
41
|
+
console.print(f"[red]Error: Environment file {env_file} does not exist.[/red]")
|
|
42
|
+
return
|
|
43
|
+
|
|
44
|
+
content = env_file.read_text().strip()
|
|
45
|
+
if not content or all(line.strip().startswith('#') for line in content.splitlines() if line.strip()):
|
|
46
|
+
console.print(f"[yellow]Environment file {env_file} is empty or contains only comments. No action taken.[/yellow]")
|
|
47
|
+
return
|
|
48
|
+
|
|
49
|
+
env_data = EnvParser.parse(content)
|
|
50
|
+
env_filename = env_file.name
|
|
51
|
+
else:
|
|
52
|
+
# Implicit default: try to find .env, but if not found or empty, maybe generate skeleton?
|
|
53
|
+
# User request implies: "if an .env file is empty, message that nothing will be done"
|
|
54
|
+
# But if NO file passed, we assume .env in project root.
|
|
55
|
+
|
|
56
|
+
default_env = project_path / ".env"
|
|
57
|
+
if not default_env.exists():
|
|
58
|
+
console.print(f"[yellow]No .env file found at {default_env}. Skipping generation.[/yellow]")
|
|
59
|
+
return
|
|
60
|
+
|
|
61
|
+
content = default_env.read_text().strip()
|
|
62
|
+
if not content or all(line.strip().startswith('#') for line in content.splitlines() if line.strip()):
|
|
63
|
+
console.print(f"[yellow]Environment file .env is empty or contains only comments. No action taken.[/yellow]")
|
|
64
|
+
return
|
|
65
|
+
|
|
66
|
+
env_data = EnvParser.parse(content)
|
|
67
|
+
env_filename = ".env"
|
|
68
|
+
|
|
69
|
+
conf_dir.mkdir(exist_ok=True)
|
|
70
|
+
|
|
71
|
+
# 1. settings.py
|
|
72
|
+
settings_code = EnvGenerator.generate_pydantic_code(env_data, env_file_name=env_filename, production_mode=production_mode)
|
|
73
|
+
(conf_dir / "settings.py").write_text(settings_code)
|
|
74
|
+
console.print(f"[green]Created conf/settings.py (parsed {len(env_data.sections)} sections)[/green]")
|
|
75
|
+
|
|
76
|
+
# 2. credentials/__init__.py
|
|
77
|
+
creds_dir = conf_dir / "credentials"
|
|
78
|
+
creds_dir.mkdir(exist_ok=True)
|
|
79
|
+
|
|
80
|
+
creds_code = EnvGenerator.generate_credentials_code(env_data)
|
|
81
|
+
(creds_dir / "__init__.py").write_text(creds_code)
|
|
82
|
+
console.print(f"[green]Created conf/credentials/__init__.py[/green]")
|
|
83
|
+
|
|
84
|
+
# 3. Update discovery_params.yaml
|
|
85
|
+
from sibi_flux.init.discovery_updater import DiscoveryParamsUpdater
|
|
86
|
+
DiscoveryParamsUpdater.update(project_path, env_data)
|
|
@@ -0,0 +1,151 @@
|
|
|
1
|
+
from pydantic import BaseModel, Field
|
|
2
|
+
from typing import List, Dict, Optional, Any
|
|
3
|
+
import re
|
|
4
|
+
from pathlib import Path
|
|
5
|
+
from collections import defaultdict
|
|
6
|
+
|
|
7
|
+
class EnvVar(BaseModel):
|
|
8
|
+
key: str
|
|
9
|
+
value: str
|
|
10
|
+
comment: Optional[str] = None
|
|
11
|
+
|
|
12
|
+
class EnvSection(BaseModel):
|
|
13
|
+
name: str # e.g. "MAIN_DB"
|
|
14
|
+
type: str = "GENERIC" # e.g. "POSTGRES", "S3"
|
|
15
|
+
vars: List[EnvVar]
|
|
16
|
+
resource_id: Optional[str] = None
|
|
17
|
+
|
|
18
|
+
class EnvFile(BaseModel):
|
|
19
|
+
sections: List[EnvSection]
|
|
20
|
+
orphan_vars: List[EnvVar] = Field(default_factory=list)
|
|
21
|
+
|
|
22
|
+
class EnvParser:
|
|
23
|
+
SECTION_REGEX = re.compile(r"^#\s*\[(.*)\]\s*(?:\((.*)\))?") # Matches # [NAME] or # [NAME](TYPE)
|
|
24
|
+
|
|
25
|
+
@staticmethod
|
|
26
|
+
def parse(content: str) -> EnvFile:
|
|
27
|
+
sections = []
|
|
28
|
+
orphans = []
|
|
29
|
+
current_section = None
|
|
30
|
+
|
|
31
|
+
for line in content.splitlines():
|
|
32
|
+
line = line.strip()
|
|
33
|
+
if not line: continue
|
|
34
|
+
|
|
35
|
+
# Section Header detection
|
|
36
|
+
sec_match = EnvParser.SECTION_REGEX.match(line)
|
|
37
|
+
if sec_match:
|
|
38
|
+
if current_section: sections.append(current_section)
|
|
39
|
+
name = sec_match.group(1)
|
|
40
|
+
type_hint = sec_match.group(2) if sec_match.lastindex >= 2 else "GENERIC"
|
|
41
|
+
current_section = EnvSection(name=name, type=type_hint or "GENERIC", vars=[])
|
|
42
|
+
continue
|
|
43
|
+
|
|
44
|
+
# Var detection
|
|
45
|
+
if "=" in line and not line.startswith("#"):
|
|
46
|
+
key, val = line.split("=", 1)
|
|
47
|
+
# Parse inline comments
|
|
48
|
+
comment = None
|
|
49
|
+
if "#" in val:
|
|
50
|
+
val, comment = val.split("#", 1)
|
|
51
|
+
comment = comment.strip()
|
|
52
|
+
|
|
53
|
+
var = EnvVar(key=key.strip(), value=val.strip(), comment=comment)
|
|
54
|
+
|
|
55
|
+
if current_section:
|
|
56
|
+
current_section.vars.append(var)
|
|
57
|
+
else:
|
|
58
|
+
orphans.append(var)
|
|
59
|
+
|
|
60
|
+
if current_section: sections.append(current_section)
|
|
61
|
+
|
|
62
|
+
env_file = EnvFile(sections=sections, orphan_vars=orphans)
|
|
63
|
+
|
|
64
|
+
return EnvParser.heuristic_grouping(env_file)
|
|
65
|
+
|
|
66
|
+
@staticmethod
|
|
67
|
+
def heuristic_grouping(env_file: EnvFile) -> EnvFile:
|
|
68
|
+
"""
|
|
69
|
+
Groups orphan variables into sections based on common prefixes.
|
|
70
|
+
"""
|
|
71
|
+
if not env_file.orphan_vars:
|
|
72
|
+
return env_file
|
|
73
|
+
|
|
74
|
+
groups = defaultdict(list)
|
|
75
|
+
others = []
|
|
76
|
+
|
|
77
|
+
for var in env_file.orphan_vars:
|
|
78
|
+
if "_" in var.key:
|
|
79
|
+
prefix = var.key.split("_", 1)[0]
|
|
80
|
+
groups[prefix].append(var)
|
|
81
|
+
else:
|
|
82
|
+
others.append(var)
|
|
83
|
+
|
|
84
|
+
new_sections = list(env_file.sections)
|
|
85
|
+
|
|
86
|
+
for prefix, vars_list in groups.items():
|
|
87
|
+
# Check if section already exists
|
|
88
|
+
existing = next((s for s in new_sections if s.name == prefix), None)
|
|
89
|
+
if existing:
|
|
90
|
+
existing.vars.extend(vars_list)
|
|
91
|
+
else:
|
|
92
|
+
# Type Inference
|
|
93
|
+
inferred_type = "GENERIC"
|
|
94
|
+
upper_prefix = prefix.upper()
|
|
95
|
+
|
|
96
|
+
# 1. Name-based inference
|
|
97
|
+
KNOWN_TYPES = ["POSTGRES", "MYSQL", "CLICKHOUSE", "REDIS", "S3", "AIRFLOW", "OSMNX", "SENTINEL", "OPENOBSERVE", "MCP", "DASK"]
|
|
98
|
+
if upper_prefix in KNOWN_TYPES:
|
|
99
|
+
inferred_type = upper_prefix
|
|
100
|
+
|
|
101
|
+
# 2. Key-based inference (if still GENERIC)
|
|
102
|
+
if inferred_type == "GENERIC":
|
|
103
|
+
keys = []
|
|
104
|
+
for v in vars_list:
|
|
105
|
+
k = v.key.upper()
|
|
106
|
+
if k.startswith(upper_prefix + "_"):
|
|
107
|
+
keys.append(k[len(upper_prefix)+1:])
|
|
108
|
+
else:
|
|
109
|
+
keys.append(k)
|
|
110
|
+
|
|
111
|
+
keys_set = set(keys)
|
|
112
|
+
|
|
113
|
+
# Signatures
|
|
114
|
+
if any("AWS_ACCESS_KEY" in k or "S3_" in k or "FS_KEY" in k for k in [v.key.upper() for v in vars_list]):
|
|
115
|
+
inferred_type = "S3"
|
|
116
|
+
elif "HOST" in keys_set and "PORT" in keys_set:
|
|
117
|
+
if "DB_INDEX" in keys_set or "REDIS_DB" in keys_set:
|
|
118
|
+
inferred_type = "REDIS"
|
|
119
|
+
elif "DATABASE" in keys_set or "DBNAME" in keys_set or "USER" in keys_set:
|
|
120
|
+
inferred_type = "POSTGRES"
|
|
121
|
+
else:
|
|
122
|
+
pass
|
|
123
|
+
elif "API_URL" in keys_set and ("USERNAME" in keys_set or "PASSWORD" in keys_set):
|
|
124
|
+
inferred_type = "AIRFLOW"
|
|
125
|
+
|
|
126
|
+
new_sections.append(EnvSection(name=prefix, type=inferred_type, vars=vars_list))
|
|
127
|
+
|
|
128
|
+
return EnvFile(sections=new_sections, orphan_vars=others)
|
|
129
|
+
|
|
130
|
+
@staticmethod
|
|
131
|
+
def render(env_file: EnvFile) -> str:
|
|
132
|
+
lines = []
|
|
133
|
+
|
|
134
|
+
# Orphans first
|
|
135
|
+
for var in env_file.orphan_vars:
|
|
136
|
+
line = f"{var.key}={var.value}"
|
|
137
|
+
if var.comment: line += f" # {var.comment}"
|
|
138
|
+
lines.append(line)
|
|
139
|
+
|
|
140
|
+
if lines: lines.append("")
|
|
141
|
+
|
|
142
|
+
# Sections
|
|
143
|
+
for section in env_file.sections:
|
|
144
|
+
lines.append(f"# [{section.name}]({section.type})")
|
|
145
|
+
for var in section.vars:
|
|
146
|
+
line = f"{var.key}={var.value}"
|
|
147
|
+
if var.comment: line += f" # {var.comment}"
|
|
148
|
+
lines.append(line)
|
|
149
|
+
lines.append("")
|
|
150
|
+
|
|
151
|
+
return "\n".join(lines)
|