nao-core 0.0.30__py3-none-any.whl → 0.0.31__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- nao_core/__init__.py +1 -1
- nao_core/bin/fastapi/main.py +6 -0
- nao_core/bin/migrations-postgres/0005_add_project_tables.sql +39 -0
- nao_core/bin/migrations-postgres/meta/0005_snapshot.json +1129 -0
- nao_core/bin/migrations-postgres/meta/_journal.json +7 -0
- nao_core/bin/migrations-sqlite/0005_add_project_tables.sql +38 -0
- nao_core/bin/migrations-sqlite/meta/0005_snapshot.json +1086 -0
- nao_core/bin/migrations-sqlite/meta/_journal.json +7 -0
- nao_core/bin/nao-chat-server +0 -0
- nao_core/bin/public/assets/{code-block-F6WJLWQG-z4zcca7w.js → code-block-F6WJLWQG-TAi8koem.js} +1 -1
- nao_core/bin/public/assets/index-BfHcd9Xz.css +1 -0
- nao_core/bin/public/assets/{index-DhhS7iVA.js → index-Mzo9bkag.js} +256 -172
- nao_core/bin/public/index.html +2 -2
- nao_core/commands/chat.py +11 -10
- nao_core/commands/init.py +27 -4
- nao_core/commands/sync/__init__.py +40 -21
- nao_core/commands/sync/accessors.py +218 -139
- nao_core/commands/sync/cleanup.py +133 -0
- nao_core/commands/sync/providers/__init__.py +30 -0
- nao_core/commands/sync/providers/base.py +87 -0
- nao_core/commands/sync/providers/databases/__init__.py +17 -0
- nao_core/commands/sync/providers/databases/bigquery.py +78 -0
- nao_core/commands/sync/providers/databases/databricks.py +79 -0
- nao_core/commands/sync/providers/databases/duckdb.py +83 -0
- nao_core/commands/sync/providers/databases/postgres.py +78 -0
- nao_core/commands/sync/providers/databases/provider.py +123 -0
- nao_core/commands/sync/providers/databases/snowflake.py +78 -0
- nao_core/commands/sync/providers/repositories/__init__.py +5 -0
- nao_core/commands/sync/{repositories.py → providers/repositories/provider.py} +43 -20
- nao_core/config/__init__.py +2 -0
- nao_core/config/base.py +23 -4
- nao_core/config/databases/__init__.py +5 -0
- nao_core/config/databases/base.py +1 -0
- nao_core/config/databases/postgres.py +78 -0
- nao_core/templates/__init__.py +12 -0
- nao_core/templates/defaults/databases/columns.md.j2 +23 -0
- nao_core/templates/defaults/databases/description.md.j2 +32 -0
- nao_core/templates/defaults/databases/preview.md.j2 +22 -0
- nao_core/templates/defaults/databases/profiling.md.j2 +34 -0
- nao_core/templates/engine.py +133 -0
- {nao_core-0.0.30.dist-info → nao_core-0.0.31.dist-info}/METADATA +6 -2
- nao_core-0.0.31.dist-info/RECORD +86 -0
- nao_core/bin/public/assets/index-ClduEZSo.css +0 -1
- nao_core/commands/sync/databases.py +0 -374
- nao_core-0.0.30.dist-info/RECORD +0 -65
- {nao_core-0.0.30.dist-info → nao_core-0.0.31.dist-info}/WHEEL +0 -0
- {nao_core-0.0.30.dist-info → nao_core-0.0.31.dist-info}/entry_points.txt +0 -0
- {nao_core-0.0.30.dist-info → nao_core-0.0.31.dist-info}/licenses/LICENSE +0 -0
|
@@ -1,374 +0,0 @@
|
|
|
1
|
-
"""Database syncing functionality for generating markdown documentation from database schemas."""
|
|
2
|
-
|
|
3
|
-
from pathlib import Path
|
|
4
|
-
|
|
5
|
-
from rich.console import Console
|
|
6
|
-
from rich.progress import BarColumn, Progress, SpinnerColumn, TaskProgressColumn, TextColumn
|
|
7
|
-
|
|
8
|
-
from .accessors import DataAccessor
|
|
9
|
-
from .registry import get_accessors
|
|
10
|
-
|
|
11
|
-
console = Console()
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
def sync_bigquery(
|
|
15
|
-
db_config,
|
|
16
|
-
base_path: Path,
|
|
17
|
-
progress: Progress,
|
|
18
|
-
accessors: list[DataAccessor],
|
|
19
|
-
) -> tuple[int, int]:
|
|
20
|
-
"""Sync BigQuery database schema to markdown files.
|
|
21
|
-
|
|
22
|
-
Args:
|
|
23
|
-
db_config: The database configuration
|
|
24
|
-
base_path: Base output path
|
|
25
|
-
progress: Rich progress instance
|
|
26
|
-
accessors: List of data accessors to run
|
|
27
|
-
|
|
28
|
-
Returns:
|
|
29
|
-
Tuple of (datasets_synced, tables_synced)
|
|
30
|
-
"""
|
|
31
|
-
conn = db_config.connect()
|
|
32
|
-
db_path = base_path / "type=bigquery" / f"database={db_config.project_id}"
|
|
33
|
-
|
|
34
|
-
datasets_synced = 0
|
|
35
|
-
tables_synced = 0
|
|
36
|
-
|
|
37
|
-
if db_config.dataset_id:
|
|
38
|
-
datasets = [db_config.dataset_id]
|
|
39
|
-
else:
|
|
40
|
-
datasets = conn.list_databases()
|
|
41
|
-
|
|
42
|
-
dataset_task = progress.add_task(
|
|
43
|
-
f"[dim]{db_config.name}[/dim]",
|
|
44
|
-
total=len(datasets),
|
|
45
|
-
)
|
|
46
|
-
|
|
47
|
-
for dataset in datasets:
|
|
48
|
-
try:
|
|
49
|
-
all_tables = conn.list_tables(database=dataset)
|
|
50
|
-
except Exception:
|
|
51
|
-
progress.update(dataset_task, advance=1)
|
|
52
|
-
continue
|
|
53
|
-
|
|
54
|
-
# Filter tables based on include/exclude patterns
|
|
55
|
-
tables = [t for t in all_tables if db_config.matches_pattern(dataset, t)]
|
|
56
|
-
|
|
57
|
-
# Skip dataset if no tables match
|
|
58
|
-
if not tables:
|
|
59
|
-
progress.update(dataset_task, advance=1)
|
|
60
|
-
continue
|
|
61
|
-
|
|
62
|
-
dataset_path = db_path / f"schema={dataset}"
|
|
63
|
-
dataset_path.mkdir(parents=True, exist_ok=True)
|
|
64
|
-
datasets_synced += 1
|
|
65
|
-
|
|
66
|
-
table_task = progress.add_task(
|
|
67
|
-
f" [cyan]{dataset}[/cyan]",
|
|
68
|
-
total=len(tables),
|
|
69
|
-
)
|
|
70
|
-
|
|
71
|
-
for table in tables:
|
|
72
|
-
table_path = dataset_path / f"table={table}"
|
|
73
|
-
table_path.mkdir(parents=True, exist_ok=True)
|
|
74
|
-
|
|
75
|
-
for accessor in accessors:
|
|
76
|
-
content = accessor.generate(conn, dataset, table)
|
|
77
|
-
output_file = table_path / accessor.filename
|
|
78
|
-
output_file.write_text(content)
|
|
79
|
-
|
|
80
|
-
tables_synced += 1
|
|
81
|
-
progress.update(table_task, advance=1)
|
|
82
|
-
|
|
83
|
-
progress.update(dataset_task, advance=1)
|
|
84
|
-
|
|
85
|
-
return datasets_synced, tables_synced
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
def sync_duckdb(
|
|
89
|
-
db_config,
|
|
90
|
-
base_path: Path,
|
|
91
|
-
progress: Progress,
|
|
92
|
-
accessors: list[DataAccessor],
|
|
93
|
-
) -> tuple[int, int]:
|
|
94
|
-
"""Sync DuckDB database schema to markdown files.
|
|
95
|
-
|
|
96
|
-
Args:
|
|
97
|
-
db_config: The database configuration
|
|
98
|
-
base_path: Base output path
|
|
99
|
-
progress: Rich progress instance
|
|
100
|
-
accessors: List of data accessors to run
|
|
101
|
-
|
|
102
|
-
Returns:
|
|
103
|
-
Tuple of (schemas_synced, tables_synced)
|
|
104
|
-
"""
|
|
105
|
-
conn = db_config.connect()
|
|
106
|
-
|
|
107
|
-
# Derive database name from path
|
|
108
|
-
if db_config.path == ":memory:":
|
|
109
|
-
db_name = "memory"
|
|
110
|
-
else:
|
|
111
|
-
db_name = Path(db_config.path).stem
|
|
112
|
-
|
|
113
|
-
db_path = base_path / "type=duckdb" / f"database={db_name}"
|
|
114
|
-
|
|
115
|
-
schemas_synced = 0
|
|
116
|
-
tables_synced = 0
|
|
117
|
-
|
|
118
|
-
# List all schemas in DuckDB
|
|
119
|
-
schemas = conn.list_databases()
|
|
120
|
-
|
|
121
|
-
schema_task = progress.add_task(
|
|
122
|
-
f"[dim]{db_config.name}[/dim]",
|
|
123
|
-
total=len(schemas),
|
|
124
|
-
)
|
|
125
|
-
|
|
126
|
-
for schema in schemas:
|
|
127
|
-
try:
|
|
128
|
-
all_tables = conn.list_tables(database=schema)
|
|
129
|
-
except Exception:
|
|
130
|
-
progress.update(schema_task, advance=1)
|
|
131
|
-
continue
|
|
132
|
-
|
|
133
|
-
# Filter tables based on include/exclude patterns
|
|
134
|
-
tables = [t for t in all_tables if db_config.matches_pattern(schema, t)]
|
|
135
|
-
|
|
136
|
-
# Skip schema if no tables match
|
|
137
|
-
if not tables:
|
|
138
|
-
progress.update(schema_task, advance=1)
|
|
139
|
-
continue
|
|
140
|
-
|
|
141
|
-
schema_path = db_path / f"schema={schema}"
|
|
142
|
-
schema_path.mkdir(parents=True, exist_ok=True)
|
|
143
|
-
schemas_synced += 1
|
|
144
|
-
|
|
145
|
-
table_task = progress.add_task(
|
|
146
|
-
f" [cyan]{schema}[/cyan]",
|
|
147
|
-
total=len(tables),
|
|
148
|
-
)
|
|
149
|
-
|
|
150
|
-
for table in tables:
|
|
151
|
-
table_path = schema_path / f"table={table}"
|
|
152
|
-
table_path.mkdir(parents=True, exist_ok=True)
|
|
153
|
-
|
|
154
|
-
for accessor in accessors:
|
|
155
|
-
content = accessor.generate(conn, schema, table)
|
|
156
|
-
output_file = table_path / accessor.filename
|
|
157
|
-
output_file.write_text(content)
|
|
158
|
-
|
|
159
|
-
tables_synced += 1
|
|
160
|
-
progress.update(table_task, advance=1)
|
|
161
|
-
|
|
162
|
-
progress.update(schema_task, advance=1)
|
|
163
|
-
|
|
164
|
-
return schemas_synced, tables_synced
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
def sync_databricks(
|
|
168
|
-
db_config,
|
|
169
|
-
base_path: Path,
|
|
170
|
-
progress: Progress,
|
|
171
|
-
accessors: list[DataAccessor],
|
|
172
|
-
) -> tuple[int, int]:
|
|
173
|
-
"""Sync Databricks database schema to markdown files.
|
|
174
|
-
|
|
175
|
-
Args:
|
|
176
|
-
db_config: The database configuration
|
|
177
|
-
base_path: Base output path
|
|
178
|
-
progress: Rich progress instance
|
|
179
|
-
accessors: List of data accessors to run
|
|
180
|
-
|
|
181
|
-
Returns:
|
|
182
|
-
Tuple of (schemas_synced, tables_synced)
|
|
183
|
-
"""
|
|
184
|
-
conn = db_config.connect()
|
|
185
|
-
catalog = db_config.catalog or "main"
|
|
186
|
-
db_path = base_path / "type=databricks" / f"database={catalog}"
|
|
187
|
-
|
|
188
|
-
schemas_synced = 0
|
|
189
|
-
tables_synced = 0
|
|
190
|
-
|
|
191
|
-
if db_config.schema:
|
|
192
|
-
schemas = [db_config.schema]
|
|
193
|
-
else:
|
|
194
|
-
schemas = conn.list_databases()
|
|
195
|
-
|
|
196
|
-
schema_task = progress.add_task(
|
|
197
|
-
f"[dim]{db_config.name}[/dim]",
|
|
198
|
-
total=len(schemas),
|
|
199
|
-
)
|
|
200
|
-
|
|
201
|
-
for schema in schemas:
|
|
202
|
-
try:
|
|
203
|
-
all_tables = conn.list_tables(database=schema)
|
|
204
|
-
except Exception:
|
|
205
|
-
progress.update(schema_task, advance=1)
|
|
206
|
-
continue
|
|
207
|
-
|
|
208
|
-
# Filter tables based on include/exclude patterns
|
|
209
|
-
tables = [t for t in all_tables if db_config.matches_pattern(schema, t)]
|
|
210
|
-
|
|
211
|
-
# Skip schema if no tables match
|
|
212
|
-
if not tables:
|
|
213
|
-
progress.update(schema_task, advance=1)
|
|
214
|
-
continue
|
|
215
|
-
|
|
216
|
-
schema_path = db_path / f"schema={schema}"
|
|
217
|
-
schema_path.mkdir(parents=True, exist_ok=True)
|
|
218
|
-
schemas_synced += 1
|
|
219
|
-
|
|
220
|
-
table_task = progress.add_task(
|
|
221
|
-
f" [cyan]{schema}[/cyan]",
|
|
222
|
-
total=len(tables),
|
|
223
|
-
)
|
|
224
|
-
|
|
225
|
-
for table in tables:
|
|
226
|
-
table_path = schema_path / f"table={table}"
|
|
227
|
-
table_path.mkdir(parents=True, exist_ok=True)
|
|
228
|
-
|
|
229
|
-
for accessor in accessors:
|
|
230
|
-
content = accessor.generate(conn, schema, table)
|
|
231
|
-
output_file = table_path / accessor.filename
|
|
232
|
-
output_file.write_text(content)
|
|
233
|
-
|
|
234
|
-
tables_synced += 1
|
|
235
|
-
progress.update(table_task, advance=1)
|
|
236
|
-
|
|
237
|
-
progress.update(schema_task, advance=1)
|
|
238
|
-
|
|
239
|
-
return schemas_synced, tables_synced
|
|
240
|
-
|
|
241
|
-
|
|
242
|
-
def sync_snowflake(
|
|
243
|
-
db_config,
|
|
244
|
-
base_path: Path,
|
|
245
|
-
progress: Progress,
|
|
246
|
-
accessors: list[DataAccessor],
|
|
247
|
-
) -> tuple[int, int]:
|
|
248
|
-
"""Sync Snowflake database schema to markdown files.
|
|
249
|
-
|
|
250
|
-
Args:
|
|
251
|
-
db_config: The database configuration
|
|
252
|
-
base_path: Base output path
|
|
253
|
-
progress: Rich progress instance
|
|
254
|
-
accessors: List of data accessors to run
|
|
255
|
-
|
|
256
|
-
Returns:
|
|
257
|
-
Tuple of (schemas_synced, tables_synced)
|
|
258
|
-
"""
|
|
259
|
-
conn = db_config.connect()
|
|
260
|
-
db_path = base_path / "type=snowflake" / f"database={db_config.database}"
|
|
261
|
-
|
|
262
|
-
schemas_synced = 0
|
|
263
|
-
tables_synced = 0
|
|
264
|
-
|
|
265
|
-
if db_config.schema:
|
|
266
|
-
schemas = [db_config.schema]
|
|
267
|
-
else:
|
|
268
|
-
schemas = conn.list_databases()
|
|
269
|
-
|
|
270
|
-
schema_task = progress.add_task(
|
|
271
|
-
f"[dim]{db_config.name}[/dim]",
|
|
272
|
-
total=len(schemas),
|
|
273
|
-
)
|
|
274
|
-
|
|
275
|
-
for schema in schemas:
|
|
276
|
-
try:
|
|
277
|
-
all_tables = conn.list_tables(database=schema)
|
|
278
|
-
except Exception:
|
|
279
|
-
progress.update(schema_task, advance=1)
|
|
280
|
-
continue
|
|
281
|
-
|
|
282
|
-
# Filter tables based on include/exclude patterns
|
|
283
|
-
tables = [t for t in all_tables if db_config.matches_pattern(schema, t)]
|
|
284
|
-
|
|
285
|
-
# Skip schema if no tables match
|
|
286
|
-
if not tables:
|
|
287
|
-
progress.update(schema_task, advance=1)
|
|
288
|
-
continue
|
|
289
|
-
|
|
290
|
-
schema_path = db_path / f"schema={schema}"
|
|
291
|
-
schema_path.mkdir(parents=True, exist_ok=True)
|
|
292
|
-
schemas_synced += 1
|
|
293
|
-
|
|
294
|
-
table_task = progress.add_task(
|
|
295
|
-
f" [cyan]{schema}[/cyan]",
|
|
296
|
-
total=len(tables),
|
|
297
|
-
)
|
|
298
|
-
|
|
299
|
-
for table in tables:
|
|
300
|
-
table_path = schema_path / f"table={table}"
|
|
301
|
-
table_path.mkdir(parents=True, exist_ok=True)
|
|
302
|
-
|
|
303
|
-
for accessor in accessors:
|
|
304
|
-
content = accessor.generate(conn, schema, table)
|
|
305
|
-
output_file = table_path / accessor.filename
|
|
306
|
-
output_file.write_text(content)
|
|
307
|
-
|
|
308
|
-
tables_synced += 1
|
|
309
|
-
progress.update(table_task, advance=1)
|
|
310
|
-
|
|
311
|
-
progress.update(schema_task, advance=1)
|
|
312
|
-
|
|
313
|
-
return schemas_synced, tables_synced
|
|
314
|
-
|
|
315
|
-
|
|
316
|
-
def sync_databases(databases: list, base_path: Path) -> tuple[int, int]:
|
|
317
|
-
"""Sync all configured databases.
|
|
318
|
-
|
|
319
|
-
Args:
|
|
320
|
-
databases: List of database configurations
|
|
321
|
-
base_path: Base path where database schemas are stored
|
|
322
|
-
|
|
323
|
-
Returns:
|
|
324
|
-
Tuple of (total_datasets, total_tables) synced
|
|
325
|
-
"""
|
|
326
|
-
if not databases:
|
|
327
|
-
console.print("\n[dim]No databases configured[/dim]")
|
|
328
|
-
return 0, 0
|
|
329
|
-
|
|
330
|
-
total_datasets = 0
|
|
331
|
-
total_tables = 0
|
|
332
|
-
|
|
333
|
-
console.print("\n[bold cyan]🗄️ Syncing Databases[/bold cyan]")
|
|
334
|
-
console.print(f"[dim]Location:[/dim] {base_path.absolute()}\n")
|
|
335
|
-
|
|
336
|
-
with Progress(
|
|
337
|
-
SpinnerColumn(style="dim"),
|
|
338
|
-
TextColumn("[progress.description]{task.description}"),
|
|
339
|
-
BarColumn(bar_width=30, style="dim", complete_style="cyan", finished_style="green"),
|
|
340
|
-
TaskProgressColumn(),
|
|
341
|
-
console=console,
|
|
342
|
-
transient=False,
|
|
343
|
-
) as progress:
|
|
344
|
-
for db in databases:
|
|
345
|
-
# Get accessors from database config
|
|
346
|
-
db_accessors = get_accessors(db.accessors)
|
|
347
|
-
accessor_names = [a.filename.replace(".md", "") for a in db_accessors]
|
|
348
|
-
|
|
349
|
-
try:
|
|
350
|
-
console.print(f"[dim]{db.name} accessors:[/dim] {', '.join(accessor_names)}")
|
|
351
|
-
if db.type == "bigquery":
|
|
352
|
-
datasets, tables = sync_bigquery(db, base_path, progress, db_accessors)
|
|
353
|
-
total_datasets += datasets
|
|
354
|
-
total_tables += tables
|
|
355
|
-
elif db.type == "duckdb":
|
|
356
|
-
schemas, tables = sync_duckdb(db, base_path, progress, db_accessors)
|
|
357
|
-
total_datasets += schemas
|
|
358
|
-
total_tables += tables
|
|
359
|
-
elif db.type == "databricks":
|
|
360
|
-
console.print(f"[dim]{db.name} accessors:[/dim] {', '.join(accessor_names)}")
|
|
361
|
-
schemas, tables = sync_databricks(db, base_path, progress, db_accessors)
|
|
362
|
-
total_datasets += schemas
|
|
363
|
-
total_tables += tables
|
|
364
|
-
elif db.type == "snowflake":
|
|
365
|
-
console.print(f"[dim]{db.name} accessors:[/dim] {', '.join(accessor_names)}")
|
|
366
|
-
schemas, tables = sync_snowflake(db, base_path, progress, db_accessors)
|
|
367
|
-
total_datasets += schemas
|
|
368
|
-
total_tables += tables
|
|
369
|
-
else:
|
|
370
|
-
console.print(f"[yellow]⚠ Unsupported database type: {db.type}[/yellow]")
|
|
371
|
-
except Exception as e:
|
|
372
|
-
console.print(f"[bold red]✗[/bold red] Failed to sync {db.name}: {e}")
|
|
373
|
-
|
|
374
|
-
return total_datasets, total_tables
|
nao_core-0.0.30.dist-info/RECORD
DELETED
|
@@ -1,65 +0,0 @@
|
|
|
1
|
-
nao_core/__init__.py,sha256=D-TUM48DRXHFcjyDLd4uJhkBBBrvO9k_3p_dICCoMTU,46
|
|
2
|
-
nao_core/main.py,sha256=f00vLL4s2B2kCMa8y3lI56LX3TnUppWzYmqM6eOGomA,205
|
|
3
|
-
nao_core/bin/.nao-secret,sha256=BeHkxTsBb9ehjUkjZrY2SMU6SB0cfbcjBwpeGxsUJJk,43
|
|
4
|
-
nao_core/bin/nao-chat-server,sha256=1Mr68-z4YBFiScO9XUt7teax3689-fFwTg8MDgmXNy0,66259056
|
|
5
|
-
nao_core/bin/rg,sha256=bvQDRr8x_M552WFMd0XBmFQpJaDH1JEeH_55TFM5KsE,4528512
|
|
6
|
-
nao_core/bin/fastapi/main.py,sha256=C47ZtIpiWZ3EmtB2oVD5vP7s9x0z56K6w1HoZgPAgNQ,4206
|
|
7
|
-
nao_core/bin/fastapi/test_main.py,sha256=ewqfLNhtNC25F5YmtXNGXg3MOMBtbd-5aoHn2CVS9Kk,4286
|
|
8
|
-
nao_core/bin/migrations-postgres/0000_user_auth_and_chat_tables.sql,sha256=UEOxvNaQEKPeZimW-HgHmeCg_py2ToDZkxFXk2NoNSo,4601
|
|
9
|
-
nao_core/bin/migrations-postgres/0001_message_feedback.sql,sha256=OWSyDCABwM6xlnEc5sC34wXmml1RyhvCDRG4s_7KCU4,443
|
|
10
|
-
nao_core/bin/migrations-postgres/0002_chat_message_stop_reason_and_error_message.sql,sha256=QOp5LBcBQGRu-ocFoaaTZnH9qb-M9oU2OdK9MUOQx7Y,141
|
|
11
|
-
nao_core/bin/migrations-postgres/0003_handle_slack_with_thread.sql,sha256=4zGpAV9g58moMkMbLXHOd3Wc7PZN6K75ArVn85ys6FA,157
|
|
12
|
-
nao_core/bin/migrations-postgres/0004_input_and_output_tokens.sql,sha256=8Is8hwmNYL-dpY9rvCwJGKTxWOQL7T8s9UG4gGz6E0U,725
|
|
13
|
-
nao_core/bin/migrations-postgres/meta/0000_snapshot.json,sha256=8bV8DQjaks4Id2aPPWbVcSb5xMhPKNf_V0loe01dnl8,17997
|
|
14
|
-
nao_core/bin/migrations-postgres/meta/0001_snapshot.json,sha256=oKRco0GEWSe6PXr1ghB5BfOseNpEA3nA_X_UMYdSpjY,19531
|
|
15
|
-
nao_core/bin/migrations-postgres/meta/0002_snapshot.json,sha256=P4RDY5FMgqMmrU-XHD6DkoOYE9uSyUkuXuo2YKqooMU,19841
|
|
16
|
-
nao_core/bin/migrations-postgres/meta/0003_snapshot.json,sha256=t2VF3EJ-B_c5nknL_vifUZ_MrtBbaC3vsxVtBZm7sZQ,20408
|
|
17
|
-
nao_core/bin/migrations-postgres/meta/0004_snapshot.json,sha256=AsFWH3Qt1Ws-nFdwc99ZUtZ8WEnvhpNkpY_4M9X4WY4,21796
|
|
18
|
-
nao_core/bin/migrations-postgres/meta/_journal.json,sha256=Cf5bSLVo2LtrPw4LdUHA1jpXOnnNtRP7whfCPNA7f0w,836
|
|
19
|
-
nao_core/bin/migrations-sqlite/0000_user_auth_and_chat_tables.sql,sha256=8H7o4sTEfnqYc_Ks-0MUu85C8zHe5Ot94g3mlW33eCs,4458
|
|
20
|
-
nao_core/bin/migrations-sqlite/0001_message_feedback.sql,sha256=4XGfvNu0UUSDtmyL3RzaI1h8hssX9rk3NlTfontI1EM,398
|
|
21
|
-
nao_core/bin/migrations-sqlite/0002_chat_message_stop_reason_and_error_message.sql,sha256=Jj0uz2OhzoTriT8sxhI-RPf2AAVMcNvKsFEjshrqavQ,127
|
|
22
|
-
nao_core/bin/migrations-sqlite/0003_handle_slack_with_thread.sql,sha256=-9bL-zvkesrKQapm2T1Ykmeyl5riwBQMqlLJzNKSa9w,138
|
|
23
|
-
nao_core/bin/migrations-sqlite/0004_input_and_output_tokens.sql,sha256=DzvwdOiDydmbm9z2dvuHgJ6juiyoKHs7I-3Ep_V2Iyk,669
|
|
24
|
-
nao_core/bin/migrations-sqlite/meta/0000_snapshot.json,sha256=tuziD4cmsnGH5Eq1p683ac1Q8R8DLuw_QLPZL_MG5X8,17872
|
|
25
|
-
nao_core/bin/migrations-sqlite/meta/0001_snapshot.json,sha256=M35-2dFnDVe4ZcNOXdrU9UAUpsVprzY-YOe09Qkv5Yw,19580
|
|
26
|
-
nao_core/bin/migrations-sqlite/meta/0002_snapshot.json,sha256=T_SJc8-7A2ocO7PXG_hWVO0EdQ8UjkJ3a5Q5yyIQVTY,19958
|
|
27
|
-
nao_core/bin/migrations-sqlite/meta/0003_snapshot.json,sha256=lpG5dT7MI5UETMXiHJpXNCb7n84gYSXnxdXo6JTmr8c,20336
|
|
28
|
-
nao_core/bin/migrations-sqlite/meta/0004_snapshot.json,sha256=GyI-SeZIvS42n4NRIjLoF2U2ryUkjp36JR8qOGsjoe8,21996
|
|
29
|
-
nao_core/bin/migrations-sqlite/meta/_journal.json,sha256=ZlhImp911iftDA1VYakdI8ncwgqkh2gMERbZzgPTwFs,832
|
|
30
|
-
nao_core/bin/public/favicon.ico,sha256=s-a2BQ71NeB_Y2uOC_EPF9isG5DavpF9GKwYgxNSOWk,15406
|
|
31
|
-
nao_core/bin/public/github-icon.svg,sha256=drFeRxLjsnngoGOh4heU3E5mX_xUsmGgFOc_mnjXKwU,1926
|
|
32
|
-
nao_core/bin/public/google-icon.svg,sha256=bTmcoH4gkdoV2Atl2kp-pLFpFZNL85FR_8GAKv9LL5I,1140
|
|
33
|
-
nao_core/bin/public/index.html,sha256=g8z_drF4TbpxtYGpjUEqqKj_ZK-_rQYDExynNiRNyPM,670
|
|
34
|
-
nao_core/bin/public/nao-logo-greyscale.svg,sha256=hi_aq_RD83zpGUQB06znDEFC7MNdScdaZtGTyZUl1IY,1570
|
|
35
|
-
nao_core/bin/public/nao-square-logo.png,sha256=OWCZOgcfWu-OifAe6i7ZiY8kJjcx8fooElaiE0mkB3o,423403
|
|
36
|
-
nao_core/bin/public/assets/code-block-F6WJLWQG-z4zcca7w.js,sha256=sCbSj9426igR02eI467qzDL9it6g29GSSvzn0mOrJpw,173667
|
|
37
|
-
nao_core/bin/public/assets/index-ClduEZSo.css,sha256=BbZHnFXzFV8WFmQp57-d6mslZO9Ouiec4DF-ccI5rfU,57575
|
|
38
|
-
nao_core/bin/public/assets/index-DhhS7iVA.js,sha256=qBZFSoxlfLNzJ4tirdIV53mOLOu5jqLNzTfBp7fKuW8,2443185
|
|
39
|
-
nao_core/commands/__init__.py,sha256=rFCuUyA9wM4hS6CxCt2M_5W4VysyQXh6HkA_hyRCxPc,207
|
|
40
|
-
nao_core/commands/chat.py,sha256=XGpJepj3iciCmNzQ6abVVDNKv2ovB2lPwQTboNlPbWU,7982
|
|
41
|
-
nao_core/commands/debug.py,sha256=stt-IAsYA5hqywyIUOG9J_XXZs6QP5gtqMujPiuSmCc,5137
|
|
42
|
-
nao_core/commands/init.py,sha256=33-AOXjsP5oqct2KZfUJokfOa70KrR4lwwOrhunOM20,10391
|
|
43
|
-
nao_core/commands/sync/__init__.py,sha256=Opg4j0xvNdSBtHXZ6FzCihW05c6UDONxKgkIACJ1pAE,1853
|
|
44
|
-
nao_core/commands/sync/accessors.py,sha256=iPddVCjhiGqNuUD6dV5ts90H0u6R5AZYyRitiQbLS-8,6913
|
|
45
|
-
nao_core/commands/sync/databases.py,sha256=01zzAjCjDwM_XDkcVAUch_TH6-luA7I4BzZo9qnLcEI,11601
|
|
46
|
-
nao_core/commands/sync/registry.py,sha256=0yExoePzNZ_T0AswEr49ztOyHWaruz63ifVSGj1baO8,728
|
|
47
|
-
nao_core/commands/sync/repositories.py,sha256=GNzM5CouCK9zlQPW5B94c4Tye-bpmSWtAdZRXd0No30,3043
|
|
48
|
-
nao_core/config/__init__.py,sha256=sHrQDkr3GcXDjbmFwv4ZuiSi6TzQKr7U-L0mjfdCnBU,558
|
|
49
|
-
nao_core/config/base.py,sha256=7gPNfiwRbF-5ra4qDTttPSzcV3krXr-_scFMKXFTBgY,2964
|
|
50
|
-
nao_core/config/exceptions.py,sha256=Y23b_64sUUwabHr2GGBv5MkwK4OqxhZ791-pV49Gw7Q,128
|
|
51
|
-
nao_core/config/databases/__init__.py,sha256=0VP-V-8Zq0Sp0lwiE6BQbQBca7PQ3XDBaXfwKDDW-zA,1518
|
|
52
|
-
nao_core/config/databases/base.py,sha256=DS2xADfCHfi1TVdN1DIZ2FIz2xwrjqeNxRx7u3fwXpw,2353
|
|
53
|
-
nao_core/config/databases/bigquery.py,sha256=mKP-BGaEUcLvjKdSt3X8hkqZLozfKdAJQdL1Nojy65E,2480
|
|
54
|
-
nao_core/config/databases/databricks.py,sha256=q6QxFRIjHxsqxAw0ip9AQmxPxzbKDYgs4wb2deAlwig,2605
|
|
55
|
-
nao_core/config/databases/duckdb.py,sha256=U_fZbrgK7iKfwayGMG3Iw43sOnuX5Vj7s93azYd5u38,1047
|
|
56
|
-
nao_core/config/databases/snowflake.py,sha256=51YqVArEDgCt1lQU8p6MAyO_z4kIknqTIwGX9KCHXjo,4710
|
|
57
|
-
nao_core/config/llm/__init__.py,sha256=TBCL0ZJ83NIu1VoPof0uYkM_sBIboCDR36HnwwtJmiY,371
|
|
58
|
-
nao_core/config/repos/__init__.py,sha256=R3LyHr9JtQG0DQ5Ae7pD5vO-ZrChAth-U-dFPJ0fRQ4,55
|
|
59
|
-
nao_core/config/repos/base.py,sha256=kutfkSzOW2F2zFnD2vy69hjhsazV86-z-Mr2Nn8KBqU,353
|
|
60
|
-
nao_core/config/slack/__init__.py,sha256=VLovE8Dp8Sgv_1N-I3XEmlxPOVWk8QxATHp17cMdR-o,415
|
|
61
|
-
nao_core-0.0.30.dist-info/METADATA,sha256=eYMmOQhrHaeS4YyPuHTbRnBfnEkpODU7rkCuU0E4GdE,5289
|
|
62
|
-
nao_core-0.0.30.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
|
|
63
|
-
nao_core-0.0.30.dist-info/entry_points.txt,sha256=SZakIiNybgS3pl_OEZVLyLSweadeBFoEMBECMoj9czY,42
|
|
64
|
-
nao_core-0.0.30.dist-info/licenses/LICENSE,sha256=rn5YtWB6E5hPQI49tCTNSyqlArWGsB6HzA5FfSbRHRs,1066
|
|
65
|
-
nao_core-0.0.30.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|