nao-core 0.0.29__py3-none-any.whl → 0.0.31__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (73) hide show
  1. nao_core/__init__.py +1 -1
  2. nao_core/bin/fastapi/main.py +21 -2
  3. nao_core/bin/fastapi/test_main.py +156 -0
  4. nao_core/bin/migrations-postgres/0004_input_and_output_tokens.sql +8 -0
  5. nao_core/bin/migrations-postgres/0005_add_project_tables.sql +39 -0
  6. nao_core/bin/migrations-postgres/meta/0004_snapshot.json +847 -0
  7. nao_core/bin/migrations-postgres/meta/0005_snapshot.json +1129 -0
  8. nao_core/bin/migrations-postgres/meta/_journal.json +14 -0
  9. nao_core/bin/migrations-sqlite/0004_input_and_output_tokens.sql +8 -0
  10. nao_core/bin/migrations-sqlite/0005_add_project_tables.sql +38 -0
  11. nao_core/bin/migrations-sqlite/meta/0004_snapshot.json +819 -0
  12. nao_core/bin/migrations-sqlite/meta/0005_snapshot.json +1086 -0
  13. nao_core/bin/migrations-sqlite/meta/_journal.json +14 -0
  14. nao_core/bin/nao-chat-server +0 -0
  15. nao_core/bin/public/assets/{code-block-F6WJLWQG-EQr6mTlQ.js → code-block-F6WJLWQG-TAi8koem.js} +3 -3
  16. nao_core/bin/public/assets/index-BfHcd9Xz.css +1 -0
  17. nao_core/bin/public/assets/index-Mzo9bkag.js +557 -0
  18. nao_core/bin/public/index.html +2 -2
  19. nao_core/commands/chat.py +11 -10
  20. nao_core/commands/debug.py +10 -6
  21. nao_core/commands/init.py +66 -27
  22. nao_core/commands/sync/__init__.py +40 -21
  23. nao_core/commands/sync/accessors.py +219 -141
  24. nao_core/commands/sync/cleanup.py +133 -0
  25. nao_core/commands/sync/providers/__init__.py +30 -0
  26. nao_core/commands/sync/providers/base.py +87 -0
  27. nao_core/commands/sync/providers/databases/__init__.py +17 -0
  28. nao_core/commands/sync/providers/databases/bigquery.py +78 -0
  29. nao_core/commands/sync/providers/databases/databricks.py +79 -0
  30. nao_core/commands/sync/providers/databases/duckdb.py +83 -0
  31. nao_core/commands/sync/providers/databases/postgres.py +78 -0
  32. nao_core/commands/sync/providers/databases/provider.py +123 -0
  33. nao_core/commands/sync/providers/databases/snowflake.py +78 -0
  34. nao_core/commands/sync/providers/repositories/__init__.py +5 -0
  35. nao_core/commands/sync/{repositories.py → providers/repositories/provider.py} +43 -20
  36. nao_core/config/__init__.py +16 -1
  37. nao_core/config/base.py +23 -4
  38. nao_core/config/databases/__init__.py +37 -11
  39. nao_core/config/databases/base.py +7 -0
  40. nao_core/config/databases/bigquery.py +29 -1
  41. nao_core/config/databases/databricks.py +69 -0
  42. nao_core/config/databases/duckdb.py +33 -0
  43. nao_core/config/databases/postgres.py +78 -0
  44. nao_core/config/databases/snowflake.py +115 -0
  45. nao_core/config/exceptions.py +7 -0
  46. nao_core/templates/__init__.py +12 -0
  47. nao_core/templates/defaults/databases/columns.md.j2 +23 -0
  48. nao_core/templates/defaults/databases/description.md.j2 +32 -0
  49. nao_core/templates/defaults/databases/preview.md.j2 +22 -0
  50. nao_core/templates/defaults/databases/profiling.md.j2 +34 -0
  51. nao_core/templates/engine.py +133 -0
  52. {nao_core-0.0.29.dist-info → nao_core-0.0.31.dist-info}/METADATA +9 -4
  53. nao_core-0.0.31.dist-info/RECORD +86 -0
  54. nao_core/bin/public/assets/_chat-layout-BTlqRUE5.js +0 -1
  55. nao_core/bin/public/assets/_chat-layout.index-DOARokp1.js +0 -1
  56. nao_core/bin/public/assets/agentProvider-C6dGIy-H.js +0 -1
  57. nao_core/bin/public/assets/button-By_1dzVx.js +0 -1
  58. nao_core/bin/public/assets/folder-DnRS5rg3.js +0 -1
  59. nao_core/bin/public/assets/index-CElAN2SH.css +0 -1
  60. nao_core/bin/public/assets/index-ZTHASguQ.js +0 -59
  61. nao_core/bin/public/assets/input-CUQA5tsi.js +0 -1
  62. nao_core/bin/public/assets/login-BUQDum3t.js +0 -1
  63. nao_core/bin/public/assets/mermaid-FSSLJTFX-Dc6ZvCPw.js +0 -427
  64. nao_core/bin/public/assets/sidebar-bgEk7Xg8.js +0 -1
  65. nao_core/bin/public/assets/signinForm-CGAhnAkv.js +0 -1
  66. nao_core/bin/public/assets/signup-D2n11La3.js +0 -1
  67. nao_core/bin/public/assets/user-CYl8Tly2.js +0 -1
  68. nao_core/bin/public/assets/utils-DzJYey0s.js +0 -1
  69. nao_core/commands/sync/databases.py +0 -132
  70. nao_core-0.0.29.dist-info/RECORD +0 -69
  71. {nao_core-0.0.29.dist-info → nao_core-0.0.31.dist-info}/WHEEL +0 -0
  72. {nao_core-0.0.29.dist-info → nao_core-0.0.31.dist-info}/entry_points.txt +0 -0
  73. {nao_core-0.0.29.dist-info → nao_core-0.0.31.dist-info}/licenses/LICENSE +0 -0
nao_core/config/base.py CHANGED
@@ -1,5 +1,8 @@
1
+ import os
2
+ import re
1
3
  from pathlib import Path
2
4
 
5
+ import dotenv
3
6
  import yaml
4
7
  from ibis import BaseBackend
5
8
  from pydantic import BaseModel, Field, model_validator
@@ -9,6 +12,8 @@ from .llm import LLMConfig
9
12
  from .repos import RepoConfig
10
13
  from .slack import SlackConfig
11
14
 
15
+ dotenv.load_dotenv()
16
+
12
17
 
13
18
  class NaoConfig(BaseModel):
14
19
  """nao project configuration."""
@@ -43,8 +48,9 @@ class NaoConfig(BaseModel):
43
48
  def load(cls, path: Path) -> "NaoConfig":
44
49
  """Load the configuration from a YAML file."""
45
50
  config_file = path / "nao_config.yaml"
46
- with config_file.open() as f:
47
- data = yaml.safe_load(f)
51
+ content = config_file.read_text()
52
+ content = cls._process_env_vars(content)
53
+ data = yaml.safe_load(content)
48
54
  return cls.model_validate(data)
49
55
 
50
56
  def get_connection(self, name: str) -> BaseBackend:
@@ -63,11 +69,14 @@ class NaoConfig(BaseModel):
63
69
  """Try to load config from path, returns None if not found or invalid.
64
70
 
65
71
  Args:
66
- path: Directory containing nao_config.yaml. Defaults to current directory.
72
+ path: Directory containing nao_config.yaml. Defaults to NAO_DEFAULT_PROJECT_PATH
73
+ environment variable if set, otherwise current directory.
67
74
  """
68
75
  if path is None:
69
- path = Path.cwd()
76
+ default_path = os.environ.get("NAO_DEFAULT_PROJECT_PATH")
77
+ path = Path(default_path) if default_path else Path.cwd()
70
78
  try:
79
+ os.chdir(path)
71
80
  return cls.load(path)
72
81
  except (FileNotFoundError, ValueError, yaml.YAMLError):
73
82
  return None
@@ -76,3 +85,13 @@ class NaoConfig(BaseModel):
76
85
  def json_schema(cls) -> dict:
77
86
  """Generate JSON schema for the configuration."""
78
87
  return cls.model_json_schema()
88
+
89
+ @staticmethod
90
+ def _process_env_vars(content: str) -> str:
91
+ regex = re.compile(r"\$\{\{\s*env\(['\"]([^'\"]+)['\"]\)\s*\}\}")
92
+
93
+ def replacer(match: re.Match[str]) -> str:
94
+ env_var = match.group(1)
95
+ return os.environ.get(env_var, "")
96
+
97
+ return regex.sub(replacer, content)
@@ -1,20 +1,28 @@
1
+ from typing import Annotated, Union
2
+
3
+ from pydantic import Discriminator, Tag
4
+
1
5
  from .base import AccessorType, DatabaseConfig, DatabaseType
2
6
  from .bigquery import BigQueryConfig
7
+ from .databricks import DatabricksConfig
8
+ from .duckdb import DuckDBConfig
9
+ from .postgres import PostgresConfig
10
+ from .snowflake import SnowflakeConfig
3
11
 
4
12
  # =============================================================================
5
13
  # Database Config Registry
6
14
  # =============================================================================
7
15
 
8
- # When adding more backends, convert this to a discriminated union:
9
- # AnyDatabaseConfig = Annotated[
10
- # Union[
11
- # Annotated[BigQueryConfig, Tag("bigquery")],
12
- # Annotated[PostgresConfig, Tag("postgres")],
13
- # ],
14
- # Discriminator(lambda x: x.get("type", "bigquery")),
15
- # ]
16
-
17
- AnyDatabaseConfig = BigQueryConfig
16
+ AnyDatabaseConfig = Annotated[
17
+ Union[
18
+ Annotated[BigQueryConfig, Tag("bigquery")],
19
+ Annotated[DatabricksConfig, Tag("databricks")],
20
+ Annotated[SnowflakeConfig, Tag("snowflake")],
21
+ Annotated[DuckDBConfig, Tag("duckdb")],
22
+ Annotated[PostgresConfig, Tag("postgres")],
23
+ ],
24
+ Discriminator("type"),
25
+ ]
18
26
 
19
27
 
20
28
  def parse_database_config(data: dict) -> DatabaseConfig:
@@ -22,8 +30,26 @@ def parse_database_config(data: dict) -> DatabaseConfig:
22
30
  db_type = data.get("type")
23
31
  if db_type == "bigquery":
24
32
  return BigQueryConfig.model_validate(data)
33
+ elif db_type == "duckdb":
34
+ return DuckDBConfig.model_validate(data)
35
+ elif db_type == "databricks":
36
+ return DatabricksConfig.model_validate(data)
37
+ elif db_type == "snowflake":
38
+ return SnowflakeConfig.model_validate(data)
39
+ elif db_type == "postgres":
40
+ return PostgresConfig.model_validate(data)
25
41
  else:
26
42
  raise ValueError(f"Unknown database type: {db_type}")
27
43
 
28
44
 
29
- __all__ = ["AccessorType", "DatabaseConfig", "DatabaseType", "BigQueryConfig", "AnyDatabaseConfig"]
45
+ __all__ = [
46
+ "AccessorType",
47
+ "AnyDatabaseConfig",
48
+ "BigQueryConfig",
49
+ "DuckDBConfig",
50
+ "DatabaseConfig",
51
+ "DatabaseType",
52
+ "DatabricksConfig",
53
+ "SnowflakeConfig",
54
+ "PostgresConfig",
55
+ ]
@@ -4,12 +4,19 @@ from enum import Enum
4
4
 
5
5
  from ibis import BaseBackend
6
6
  from pydantic import BaseModel, Field
7
+ from rich.console import Console
8
+
9
+ console = Console()
7
10
 
8
11
 
9
12
  class DatabaseType(str, Enum):
10
13
  """Supported database types."""
11
14
 
12
15
  BIGQUERY = "bigquery"
16
+ DUCKDB = "duckdb"
17
+ DATABRICKS = "databricks"
18
+ SNOWFLAKE = "snowflake"
19
+ POSTGRES = "postgres"
13
20
 
14
21
 
15
22
  class AccessorType(str, Enum):
@@ -3,8 +3,11 @@ from typing import Literal
3
3
  import ibis
4
4
  from ibis import BaseBackend
5
5
  from pydantic import Field
6
+ from rich.prompt import Prompt
6
7
 
7
- from .base import DatabaseConfig
8
+ from nao_core.config.exceptions import InitError
9
+
10
+ from .base import DatabaseConfig, console
8
11
 
9
12
 
10
13
  class BigQueryConfig(DatabaseConfig):
@@ -20,6 +23,31 @@ class BigQueryConfig(DatabaseConfig):
20
23
  sso: bool = Field(default=False, description="Use Single Sign-On (SSO) for authentication")
21
24
  location: str | None = Field(default=None, description="BigQuery location")
22
25
 
26
+ @classmethod
27
+ def promptConfig(cls) -> "BigQueryConfig":
28
+ """Interactively prompt the user for BigQuery configuration."""
29
+ console.print("\n[bold cyan]BigQuery Configuration[/bold cyan]\n")
30
+
31
+ name = Prompt.ask("[bold]Connection name[/bold]", default="bigquery-prod")
32
+
33
+ project_id = Prompt.ask("[bold]GCP Project ID[/bold]")
34
+ if not project_id:
35
+ raise InitError("GCP Project ID cannot be empty.")
36
+
37
+ dataset_id = Prompt.ask("[bold]Default dataset[/bold] [dim](optional, press Enter to skip)[/dim]", default="")
38
+
39
+ credentials_path = Prompt.ask(
40
+ "[bold]Service account JSON path[/bold] [dim](optional, uses ADC if empty)[/dim]",
41
+ default="",
42
+ )
43
+
44
+ return BigQueryConfig(
45
+ name=name,
46
+ project_id=project_id,
47
+ dataset_id=dataset_id or None,
48
+ credentials_path=credentials_path or None,
49
+ )
50
+
23
51
  def connect(self) -> BaseBackend:
24
52
  """Create an Ibis BigQuery connection."""
25
53
  kwargs: dict = {"project_id": self.project_id}
@@ -0,0 +1,69 @@
1
+ from typing import Literal
2
+
3
+ import ibis
4
+ from ibis import BaseBackend
5
+ from pydantic import Field
6
+ from rich.prompt import Prompt
7
+
8
+ from nao_core.config.exceptions import InitError
9
+
10
+ from .base import DatabaseConfig, console
11
+
12
+
13
+ class DatabricksConfig(DatabaseConfig):
14
+ """Databricks-specific configuration."""
15
+
16
+ type: Literal["databricks"] = "databricks"
17
+ server_hostname: str = Field(description="Databricks server hostname (e.g., 'adb-xxxx.azuredatabricks.net')")
18
+ http_path: str = Field(description="HTTP path to the SQL warehouse or cluster")
19
+ access_token: str = Field(description="Databricks personal access token")
20
+ catalog: str | None = Field(default=None, description="Unity Catalog name (optional)")
21
+ schema: str | None = Field(default=None, description="Default schema (optional)")
22
+
23
+ @classmethod
24
+ def promptConfig(cls) -> "DatabricksConfig":
25
+ """Interactively prompt the user for Databricks configuration."""
26
+ console.print("\n[bold cyan]Databricks Configuration[/bold cyan]\n")
27
+
28
+ name = Prompt.ask("[bold]Connection name[/bold]", default="databricks-prod")
29
+
30
+ server_hostname = Prompt.ask("[bold]Server hostname[/bold] [dim](e.g., adb-xxxx.azuredatabricks.net)[/dim]")
31
+ if not server_hostname:
32
+ raise InitError("Server hostname cannot be empty.")
33
+
34
+ http_path = Prompt.ask("[bold]HTTP path[/bold] [dim](e.g., /sql/1.0/warehouses/xxxx)[/dim]")
35
+ if not http_path:
36
+ raise InitError("HTTP path cannot be empty.")
37
+
38
+ access_token = Prompt.ask("[bold]Access token[/bold]", password=True)
39
+ if not access_token:
40
+ raise InitError("Access token cannot be empty.")
41
+
42
+ catalog = Prompt.ask("[bold]Catalog[/bold] [dim](optional, press Enter to skip)[/dim]", default=None)
43
+
44
+ schema = Prompt.ask("[bold]Default schema[/bold] [dim](optional, press Enter to skip)[/dim]", default=None)
45
+
46
+ return DatabricksConfig(
47
+ name=name,
48
+ server_hostname=server_hostname,
49
+ http_path=http_path,
50
+ access_token=access_token,
51
+ catalog=catalog,
52
+ schema=schema,
53
+ )
54
+
55
+ def connect(self) -> BaseBackend:
56
+ """Create an Ibis Databricks connection."""
57
+ kwargs: dict = {
58
+ "server_hostname": self.server_hostname,
59
+ "http_path": self.http_path,
60
+ "access_token": self.access_token,
61
+ }
62
+
63
+ if self.catalog:
64
+ kwargs["catalog"] = self.catalog
65
+
66
+ if self.schema:
67
+ kwargs["schema"] = self.schema
68
+
69
+ return ibis.databricks.connect(**kwargs)
@@ -0,0 +1,33 @@
1
+ from typing import Literal
2
+
3
+ import ibis
4
+ from ibis import BaseBackend
5
+ from pydantic import Field
6
+ from rich.prompt import Prompt
7
+
8
+ from .base import DatabaseConfig, console
9
+
10
+
11
+ class DuckDBConfig(DatabaseConfig):
12
+ """DuckDB-specific configuration."""
13
+
14
+ type: Literal["duckdb"] = "duckdb"
15
+ path: str = Field(description="Path to the DuckDB database file", default=":memory:")
16
+
17
+ @classmethod
18
+ def promptConfig(cls) -> "DuckDBConfig":
19
+ """Interactively prompt the user for DuckDB configuration."""
20
+ console.print("\n[bold cyan]DuckDB Configuration[/bold cyan]\n")
21
+
22
+ name = Prompt.ask("[bold]Connection name[/bold]", default="duckdb-memory")
23
+
24
+ path = Prompt.ask("[bold]Path to the DuckDB database file[/bold]", default=":memory:")
25
+
26
+ return DuckDBConfig(name=name, path=path)
27
+
28
+ def connect(self) -> BaseBackend:
29
+ """Create an Ibis DuckDB connection."""
30
+ return ibis.duckdb.connect(
31
+ database=self.path,
32
+ read_only=False if self.path == ":memory:" else True,
33
+ )
@@ -0,0 +1,78 @@
1
+ from typing import Literal
2
+
3
+ import ibis
4
+ from ibis import BaseBackend
5
+ from pydantic import Field
6
+ from rich.prompt import Prompt
7
+
8
+ from nao_core.config.exceptions import InitError
9
+
10
+ from .base import DatabaseConfig, console
11
+
12
+
13
+ class PostgresConfig(DatabaseConfig):
14
+ """PostgreSQL-specific configuration."""
15
+
16
+ type: Literal["postgres"] = "postgres"
17
+ host: str = Field(description="PostgreSQL host")
18
+ port: int = Field(default=5432, description="PostgreSQL port")
19
+ database: str = Field(description="Database name")
20
+ user: str = Field(description="Username")
21
+ password: str = Field(description="Password")
22
+ schema_name: str | None = Field(default=None, description="Default schema (optional, uses 'public' if not set)")
23
+
24
+ @classmethod
25
+ def promptConfig(cls) -> "PostgresConfig":
26
+ """Interactively prompt the user for PostgreSQL configuration."""
27
+ console.print("\n[bold cyan]PostgreSQL Configuration[/bold cyan]\n")
28
+
29
+ name = Prompt.ask("[bold]Connection name[/bold]", default="postgres-prod")
30
+
31
+ host = Prompt.ask("[bold]Host[/bold]", default="localhost")
32
+
33
+ port = Prompt.ask("[bold]Port[/bold]", default="5432")
34
+ if not port.isdigit():
35
+ raise InitError("Port must be a valid integer.")
36
+
37
+ database = Prompt.ask("[bold]Database name[/bold]")
38
+ if not database:
39
+ raise InitError("Database name cannot be empty.")
40
+
41
+ user = Prompt.ask("[bold]Username[/bold]")
42
+ if not user:
43
+ raise InitError("Username cannot be empty.")
44
+
45
+ password = Prompt.ask("[bold]Password[/bold]", password=True)
46
+
47
+ schema_name = Prompt.ask(
48
+ "[bold]Default schema[/bold] [dim](optional, uses 'public' if empty)[/dim]",
49
+ default="",
50
+ )
51
+
52
+ return PostgresConfig(
53
+ name=name,
54
+ host=host,
55
+ port=int(port),
56
+ database=database,
57
+ user=user,
58
+ password=password,
59
+ schema_name=schema_name or None,
60
+ )
61
+
62
+ def connect(self) -> BaseBackend:
63
+ """Create an Ibis PostgreSQL connection."""
64
+
65
+ kwargs: dict = {
66
+ "host": self.host,
67
+ "port": self.port,
68
+ "database": self.database,
69
+ "user": self.user,
70
+ "password": self.password,
71
+ }
72
+
73
+ if self.schema_name:
74
+ kwargs["schema"] = self.schema_name
75
+
76
+ return ibis.postgres.connect(
77
+ **kwargs,
78
+ )
@@ -0,0 +1,115 @@
1
+ from typing import Literal
2
+
3
+ import ibis
4
+ from cryptography.hazmat.backends import default_backend
5
+ from cryptography.hazmat.primitives import serialization
6
+ from ibis import BaseBackend
7
+ from pydantic import Field
8
+ from rich.prompt import Confirm, Prompt
9
+
10
+ from nao_core.config.exceptions import InitError
11
+
12
+ from .base import DatabaseConfig, console
13
+
14
+
15
+ class SnowflakeConfig(DatabaseConfig):
16
+ """Snowflake-specific configuration."""
17
+
18
+ type: Literal["snowflake"] = "snowflake"
19
+ username: str = Field(description="Snowflake username")
20
+ account_id: str = Field(description="Snowflake account identifier (e.g., 'xy12345.us-east-1')")
21
+ password: str | None = Field(default=None, description="Snowflake password")
22
+ database: str = Field(description="Snowflake database")
23
+ schema: str | None = Field(default=None, description="Snowflake schema (optional)")
24
+ warehouse: str | None = Field(default=None, description="Snowflake warehouse to use (optional)")
25
+ private_key_path: str | None = Field(
26
+ default=None,
27
+ description="Path to private key file for key-pair authentication",
28
+ )
29
+ passphrase: str | None = Field(
30
+ default=None,
31
+ description="Passphrase for the private key if it is encrypted",
32
+ )
33
+
34
+ @classmethod
35
+ def promptConfig(cls) -> "SnowflakeConfig":
36
+ """Interactively prompt the user for Snowflake configuration."""
37
+ console.print("\n[bold cyan]Snowflake Configuration[/bold cyan]\n")
38
+
39
+ name = Prompt.ask("[bold]Connection name[/bold]", default="snowflake-prod")
40
+
41
+ username = Prompt.ask("[bold]Snowflake username[/bold]")
42
+ if not username:
43
+ raise InitError("Snowflake username cannot be empty.")
44
+
45
+ account_id = Prompt.ask("[bold]Snowflake account identifier[/bold]")
46
+ if not account_id:
47
+ raise InitError("Snowflake account identifier cannot be empty.")
48
+
49
+ database = Prompt.ask("[bold]Snowflake database[/bold]")
50
+ if not database:
51
+ raise InitError("Snowflake database cannot be empty.")
52
+
53
+ warehouse = Prompt.ask(
54
+ "[bold]Snowflake warehouse[/bold] [dim](optional, press Enter to skip)[/dim]", default=None
55
+ )
56
+
57
+ schema = Prompt.ask("[bold]Default schema[/bold] [dim](optional, press Enter to skip)[/dim]", default=None)
58
+
59
+ key_pair_auth = Confirm.ask("[bold]Use key-pair authentication for authentication?[/bold]", default=False)
60
+
61
+ if key_pair_auth:
62
+ private_key_path = Prompt.ask("[bold]Path to private key file[/bold]")
63
+ if not private_key_path:
64
+ raise InitError("Path to private key file cannot be empty.")
65
+ passphrase = Prompt.ask(
66
+ "[bold]Passphrase for the private key[/bold] [dim](optional, press Enter to skip)[/dim]",
67
+ default=None,
68
+ password=True,
69
+ )
70
+ else:
71
+ password = Prompt.ask("[bold]Snowflake password[/bold]", password=True)
72
+ if not password:
73
+ raise InitError("Snowflake password cannot be empty.")
74
+
75
+ return SnowflakeConfig(
76
+ name=name,
77
+ username=username,
78
+ password=password if not key_pair_auth else None,
79
+ account_id=account_id,
80
+ database=database,
81
+ warehouse=warehouse,
82
+ schema=schema,
83
+ private_key_path=private_key_path if key_pair_auth else None,
84
+ passphrase=passphrase if key_pair_auth else None,
85
+ )
86
+
87
+ def connect(self) -> BaseBackend:
88
+ """Create an Ibis Snowflake connection."""
89
+ kwargs: dict = {"user": self.username}
90
+ kwargs["account"] = self.account_id
91
+
92
+ if self.database and self.schema:
93
+ kwargs["database"] = f"{self.database}/{self.schema}"
94
+ elif self.database:
95
+ kwargs["database"] = self.database
96
+
97
+ if self.warehouse:
98
+ kwargs["warehouse"] = self.warehouse
99
+
100
+ if self.private_key_path:
101
+ with open(self.private_key_path, "rb") as key_file:
102
+ private_key = serialization.load_pem_private_key(
103
+ key_file.read(),
104
+ password=self.passphrase.encode() if self.passphrase else None,
105
+ backend=default_backend(),
106
+ )
107
+ # Convert to DER format which Snowflake expects
108
+ kwargs["private_key"] = private_key.private_bytes(
109
+ encoding=serialization.Encoding.DER,
110
+ format=serialization.PrivateFormat.PKCS8,
111
+ encryption_algorithm=serialization.NoEncryption(),
112
+ )
113
+ kwargs["password"] = self.password
114
+
115
+ return ibis.snowflake.connect(**kwargs)
@@ -0,0 +1,7 @@
1
+ """Shared exceptions for nao_core."""
2
+
3
+
4
+ class InitError(Exception):
5
+ """Base exception for init command errors."""
6
+
7
+ pass
@@ -0,0 +1,12 @@
1
+ """Template engine module for nao providers.
2
+
3
+ This module provides a Jinja2-based templating system that allows users
4
+ to customize the output of sync providers (databases, repos, etc.).
5
+
6
+ Default templates are stored in this package and can be overridden by
7
+ placing templates with the same name in the project's `templates/` directory.
8
+ """
9
+
10
+ from .engine import TemplateEngine, get_template_engine
11
+
12
+ __all__ = ["TemplateEngine", "get_template_engine"]
@@ -0,0 +1,23 @@
1
+ {#
2
+ Template: columns.md.j2
3
+ Description: Generates column documentation for a database table
4
+
5
+ Available variables:
6
+ - table_name (str): Name of the table
7
+ - dataset (str): Schema/dataset name
8
+ - columns (list): List of column dictionaries with:
9
+ - name (str): Column name
10
+ - type (str): Data type
11
+ - nullable (bool): Whether the column allows nulls
12
+ - description (str|None): Column description if available
13
+ - column_count (int): Total number of columns
14
+ #}
15
+ # {{ table_name }}
16
+
17
+ **Dataset:** `{{ dataset }}`
18
+
19
+ ## Columns ({{ column_count }})
20
+
21
+ {% for col in columns %}
22
+ - {{ col.name }} ({{ col.type }}{% if col.description %}, "{{ col.description | truncate_middle(256) }}"{% endif %})
23
+ {% endfor %}
@@ -0,0 +1,32 @@
1
+ {#
2
+ Template: description.md.j2
3
+ Description: Generates table metadata and description documentation
4
+
5
+ Available variables:
6
+ - table_name (str): Name of the table
7
+ - dataset (str): Schema/dataset name
8
+ - row_count (int): Total number of rows in the table
9
+ - column_count (int): Number of columns in the table
10
+ - description (str|None): Table description if available
11
+ - columns (list): List of column dictionaries with:
12
+ - name (str): Column name
13
+ - type (str): Data type
14
+ #}
15
+ # {{ table_name }}
16
+
17
+ **Dataset:** `{{ dataset }}`
18
+
19
+ ## Table Metadata
20
+
21
+ | Property | Value |
22
+ |----------|-------|
23
+ | **Row Count** | {{ "{:,}".format(row_count) }} |
24
+ | **Column Count** | {{ column_count }} |
25
+
26
+ ## Description
27
+
28
+ {% if description %}
29
+ {{ description }}
30
+ {% else %}
31
+ _No description available._
32
+ {% endif %}
@@ -0,0 +1,22 @@
1
+ {#
2
+ Template: preview.md.j2
3
+ Description: Generates a preview of table rows in JSONL format
4
+
5
+ Available variables:
6
+ - table_name (str): Name of the table
7
+ - dataset (str): Schema/dataset name
8
+ - rows (list): List of row dictionaries (first N rows of the table)
9
+ - row_count (int): Number of preview rows shown
10
+ - columns (list): List of column dictionaries with:
11
+ - name (str): Column name
12
+ - type (str): Data type
13
+ #}
14
+ # {{ table_name }} - Preview
15
+
16
+ **Dataset:** `{{ dataset }}`
17
+
18
+ ## Rows ({{ row_count }})
19
+
20
+ {% for row in rows %}
21
+ - {{ row | to_json }}
22
+ {% endfor %}
@@ -0,0 +1,34 @@
1
+ {#
2
+ Template: profiling.md.j2
3
+ Description: Generates column-level statistics and profiling data
4
+
5
+ Available variables:
6
+ - table_name (str): Name of the table
7
+ - dataset (str): Schema/dataset name
8
+ - column_stats (list): List of column statistics dictionaries with:
9
+ - name (str): Column name
10
+ - type (str): Data type
11
+ - null_count (int): Number of null values
12
+ - unique_count (int): Number of unique values
13
+ - min_value (str|None): Minimum value (for numeric/temporal columns)
14
+ - max_value (str|None): Maximum value (for numeric/temporal columns)
15
+ - error (str|None): Error message if stats couldn't be computed
16
+ - columns (list): List of column dictionaries with:
17
+ - name (str): Column name
18
+ - type (str): Data type
19
+ #}
20
+ # {{ table_name }} - Profiling
21
+
22
+ **Dataset:** `{{ dataset }}`
23
+
24
+ ## Column Statistics
25
+
26
+ | Column | Type | Nulls | Unique | Min | Max |
27
+ |--------|------|-------|--------|-----|-----|
28
+ {% for stat in column_stats %}
29
+ {% if stat.error %}
30
+ | `{{ stat.name }}` | `{{ stat.type }}` | Error: {{ stat.error }} | | | |
31
+ {% else %}
32
+ | `{{ stat.name }}` | `{{ stat.type }}` | {{ "{:,}".format(stat.null_count) }} | {{ "{:,}".format(stat.unique_count) }} | {{ stat.min_value or "" }} | {{ stat.max_value or "" }} |
33
+ {% endif %}
34
+ {% endfor %}