decompressed-cli 0.1.0__tar.gz → 0.1.2__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (20) hide show
  1. {decompressed_cli-0.1.0 → decompressed_cli-0.1.2}/PKG-INFO +2 -3
  2. {decompressed_cli-0.1.0 → decompressed_cli-0.1.2}/pyproject.toml +2 -3
  3. {decompressed_cli-0.1.0 → decompressed_cli-0.1.2}/src/decompressed_cli/__init__.py +1 -1
  4. decompressed_cli-0.1.2/src/decompressed_cli/commands/imports_cmd.py +220 -0
  5. decompressed_cli-0.1.2/src/decompressed_cli/commands/sync_cmd.py +283 -0
  6. {decompressed_cli-0.1.0 → decompressed_cli-0.1.2}/src/decompressed_cli/main.py +3 -1
  7. {decompressed_cli-0.1.0 → decompressed_cli-0.1.2}/src/decompressed_cli.egg-info/PKG-INFO +2 -3
  8. {decompressed_cli-0.1.0 → decompressed_cli-0.1.2}/src/decompressed_cli.egg-info/SOURCES.txt +2 -0
  9. {decompressed_cli-0.1.0 → decompressed_cli-0.1.2}/README.md +0 -0
  10. {decompressed_cli-0.1.0 → decompressed_cli-0.1.2}/setup.cfg +0 -0
  11. {decompressed_cli-0.1.0 → decompressed_cli-0.1.2}/src/decompressed_cli/commands/__init__.py +0 -0
  12. {decompressed_cli-0.1.0 → decompressed_cli-0.1.2}/src/decompressed_cli/commands/config_cmd.py +0 -0
  13. {decompressed_cli-0.1.0 → decompressed_cli-0.1.2}/src/decompressed_cli/commands/data_cmd.py +0 -0
  14. {decompressed_cli-0.1.0 → decompressed_cli-0.1.2}/src/decompressed_cli/commands/datasets_cmd.py +0 -0
  15. {decompressed_cli-0.1.0 → decompressed_cli-0.1.2}/src/decompressed_cli/commands/versions_cmd.py +0 -0
  16. {decompressed_cli-0.1.0 → decompressed_cli-0.1.2}/src/decompressed_cli/config.py +0 -0
  17. {decompressed_cli-0.1.0 → decompressed_cli-0.1.2}/src/decompressed_cli.egg-info/dependency_links.txt +0 -0
  18. {decompressed_cli-0.1.0 → decompressed_cli-0.1.2}/src/decompressed_cli.egg-info/entry_points.txt +0 -0
  19. {decompressed_cli-0.1.0 → decompressed_cli-0.1.2}/src/decompressed_cli.egg-info/requires.txt +0 -0
  20. {decompressed_cli-0.1.0 → decompressed_cli-0.1.2}/src/decompressed_cli.egg-info/top_level.txt +0 -0
@@ -1,12 +1,11 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: decompressed-cli
3
- Version: 0.1.0
3
+ Version: 0.1.2
4
4
  Summary: CLI for Decompressed - Git-like version control for vector datasets
5
5
  Author-email: Decompressed <support@decompressed.io>
6
- License: MIT
6
+ License-Expression: MIT
7
7
  Classifier: Development Status :: 3 - Alpha
8
8
  Classifier: Intended Audience :: Developers
9
- Classifier: License :: OSI Approved :: MIT License
10
9
  Classifier: Programming Language :: Python :: 3
11
10
  Classifier: Programming Language :: Python :: 3.9
12
11
  Classifier: Programming Language :: Python :: 3.10
@@ -4,18 +4,17 @@ build-backend = "setuptools.build_meta"
4
4
 
5
5
  [project]
6
6
  name = "decompressed-cli"
7
- version = "0.1.0"
7
+ version = "0.1.2"
8
8
  description = "CLI for Decompressed - Git-like version control for vector datasets"
9
9
  readme = "README.md"
10
10
  requires-python = ">=3.9"
11
- license = {text = "MIT"}
11
+ license = "MIT"
12
12
  authors = [
13
13
  {name = "Decompressed", email = "support@decompressed.io"}
14
14
  ]
15
15
  classifiers = [
16
16
  "Development Status :: 3 - Alpha",
17
17
  "Intended Audience :: Developers",
18
- "License :: OSI Approved :: MIT License",
19
18
  "Programming Language :: Python :: 3",
20
19
  "Programming Language :: Python :: 3.9",
21
20
  "Programming Language :: Python :: 3.10",
@@ -1,3 +1,3 @@
1
1
  """Decompressed CLI - Git-like version control for vector datasets."""
2
2
 
3
- __version__ = "0.1.0"
3
+ __version__ = "0.1.2"
@@ -0,0 +1,220 @@
1
+ """Import commands for pulling vectors from external databases."""
2
+
3
+ import time
4
+ import typer
5
+ from rich.console import Console
6
+ from rich.table import Table
7
+ from rich.panel import Panel
8
+ from rich.progress import Progress, SpinnerColumn, TextColumn, BarColumn
9
+
10
+ from ..config import get_client
11
+
12
+ app = typer.Typer(no_args_is_help=True)
13
+ console = Console()
14
+
15
+
16
+ @app.command("pull")
17
+ def pull(
18
+ connector: str = typer.Argument(..., help="Connector name or ID to import from"),
19
+ dataset_name: str = typer.Argument(..., help="Name for the new dataset"),
20
+ project: str = typer.Option(None, "--project", "-p", help="Project name or ID"),
21
+ no_metadata: bool = typer.Option(False, "--no-metadata", help="Skip importing metadata"),
22
+ batch_size: int = typer.Option(100, "--batch-size", "-b", help="Vectors per batch"),
23
+ no_wait: bool = typer.Option(False, "--no-wait", help="Don't wait for completion"),
24
+ ):
25
+ """
26
+ Pull vectors from an external database into a new dataset.
27
+
28
+ Example:
29
+ dcp imports pull pinecone-prod my-backup
30
+ dcp imports pull qdrant-index imported-vectors --project ml-team
31
+ """
32
+ try:
33
+ client = get_client()
34
+
35
+ console.print(f"[cyan]Connecting to connector:[/cyan] {connector}")
36
+
37
+ # Initialize import
38
+ session = client.imports.init(
39
+ connector_id=connector,
40
+ dataset_name=dataset_name,
41
+ project=project,
42
+ include_metadata=not no_metadata,
43
+ batch_size=batch_size,
44
+ )
45
+
46
+ console.print(Panel(
47
+ f"[green]✓[/green] Connected to [cyan]{session.connector_type}[/cyan]\n"
48
+ f"[dim]Estimated vectors:[/dim] {session.estimated_vectors:,}\n"
49
+ f"[dim]Dimension:[/dim] {session.dimension}\n"
50
+ f"[dim]Dataset ID:[/dim] {session.dataset_id}",
51
+ title="Import Initialized",
52
+ border_style="green"
53
+ ))
54
+
55
+ # Start the import
56
+ result = client.imports.start(session.import_session_id)
57
+ console.print(f"[cyan]Import job started:[/cyan] {result.job_id[:8]}...")
58
+
59
+ if no_wait:
60
+ console.print(f"\n[yellow]Import running in background.[/yellow]")
61
+ console.print(f"Check status: [cyan]dcp imports status {session.import_session_id}[/cyan]")
62
+ return
63
+
64
+ # Wait with progress
65
+ with Progress(
66
+ SpinnerColumn(),
67
+ TextColumn("[progress.description]{task.description}"),
68
+ BarColumn(),
69
+ TextColumn("[progress.percentage]{task.percentage:>3.0f}%"),
70
+ console=console,
71
+ ) as progress:
72
+ task = progress.add_task("Importing vectors...", total=100)
73
+
74
+ while True:
75
+ job = client.imports.status(session.import_session_id)
76
+
77
+ if job.progress:
78
+ progress.update(task, completed=job.progress)
79
+
80
+ if job.status == "completed":
81
+ progress.update(task, completed=100)
82
+ break
83
+ elif job.status == "failed":
84
+ console.print(f"\n[red]✗ Import failed:[/red] {job.error_message}")
85
+ raise typer.Exit(1)
86
+
87
+ time.sleep(2)
88
+
89
+ console.print(Panel(
90
+ f"[green]✓[/green] Import complete!\n"
91
+ f"[dim]Dataset:[/dim] {dataset_name}\n"
92
+ f"[dim]Dataset ID:[/dim] {session.dataset_id}",
93
+ title="Success",
94
+ border_style="green"
95
+ ))
96
+
97
+ except ValueError as e:
98
+ console.print(f"[red]Error:[/red] {e}")
99
+ raise typer.Exit(1)
100
+ except Exception as e:
101
+ console.print(f"[red]Error:[/red] {e}")
102
+ raise typer.Exit(1)
103
+
104
+
105
+ @app.command("append")
106
+ def append(
107
+ connector: str = typer.Argument(..., help="Connector name or ID to import from"),
108
+ dataset: str = typer.Argument(..., help="Existing dataset name or ID to append to"),
109
+ no_metadata: bool = typer.Option(False, "--no-metadata", help="Skip importing metadata"),
110
+ batch_size: int = typer.Option(100, "--batch-size", "-b", help="Vectors per batch"),
111
+ no_wait: bool = typer.Option(False, "--no-wait", help="Don't wait for completion"),
112
+ ):
113
+ """
114
+ Append vectors from an external database to an existing dataset.
115
+
116
+ Example:
117
+ dcp imports append pinecone-prod my-dataset
118
+ """
119
+ try:
120
+ client = get_client()
121
+
122
+ # Resolve dataset ID
123
+ ds = client.datasets.get(dataset)
124
+
125
+ console.print(f"[cyan]Connecting to connector:[/cyan] {connector}")
126
+ console.print(f"[cyan]Appending to dataset:[/cyan] {ds.name} (v{ds.current_version})")
127
+
128
+ # Initialize append import
129
+ session = client.imports.init_append(
130
+ connector_id=connector,
131
+ dataset_id=ds.id,
132
+ include_metadata=not no_metadata,
133
+ batch_size=batch_size,
134
+ )
135
+
136
+ console.print(Panel(
137
+ f"[green]✓[/green] Connected to [cyan]{session.connector_type}[/cyan]\n"
138
+ f"[dim]Estimated vectors:[/dim] {session.estimated_vectors:,}\n"
139
+ f"[dim]Dimension:[/dim] {session.dimension}",
140
+ title="Append Import Initialized",
141
+ border_style="green"
142
+ ))
143
+
144
+ # Start the import
145
+ result = client.imports.start(session.import_session_id)
146
+ console.print(f"[cyan]Import job started:[/cyan] {result.job_id[:8]}...")
147
+
148
+ if no_wait:
149
+ console.print(f"\n[yellow]Import running in background.[/yellow]")
150
+ console.print(f"Check status: [cyan]dcp imports status {session.import_session_id}[/cyan]")
151
+ return
152
+
153
+ # Wait with progress
154
+ with Progress(
155
+ SpinnerColumn(),
156
+ TextColumn("[progress.description]{task.description}"),
157
+ BarColumn(),
158
+ TextColumn("[progress.percentage]{task.percentage:>3.0f}%"),
159
+ console=console,
160
+ ) as progress:
161
+ task = progress.add_task("Importing vectors...", total=100)
162
+
163
+ while True:
164
+ job = client.imports.status(session.import_session_id)
165
+
166
+ if job.progress:
167
+ progress.update(task, completed=job.progress)
168
+
169
+ if job.status == "completed":
170
+ progress.update(task, completed=100)
171
+ break
172
+ elif job.status == "failed":
173
+ console.print(f"\n[red]✗ Import failed:[/red] {job.error_message}")
174
+ raise typer.Exit(1)
175
+
176
+ time.sleep(2)
177
+
178
+ console.print(f"[green]✓[/green] Appended vectors to [cyan]{ds.name}[/cyan]")
179
+
180
+ except ValueError as e:
181
+ console.print(f"[red]Error:[/red] {e}")
182
+ raise typer.Exit(1)
183
+ except Exception as e:
184
+ console.print(f"[red]Error:[/red] {e}")
185
+ raise typer.Exit(1)
186
+
187
+
188
+ @app.command("status")
189
+ def status(
190
+ session_id: str = typer.Argument(..., help="Import session ID"),
191
+ ):
192
+ """Check status of an import job."""
193
+ try:
194
+ client = get_client()
195
+ job = client.imports.status(session_id)
196
+
197
+ status_color = {
198
+ "initialized": "yellow",
199
+ "in_progress": "cyan",
200
+ "completed": "green",
201
+ "failed": "red",
202
+ }.get(job.status, "white")
203
+
204
+ panel_content = f"""[cyan]Session ID:[/cyan] {job.import_session_id}
205
+ [cyan]Dataset ID:[/cyan] {job.dataset_id or 'N/A'}
206
+ [cyan]Status:[/cyan] [{status_color}]{job.status}[/{status_color}]
207
+ [cyan]Progress:[/cyan] {job.progress or 0}%
208
+ [cyan]Job ID:[/cyan] {job.job_id or 'N/A'}"""
209
+
210
+ if job.error_message:
211
+ panel_content += f"\n[red]Error:[/red] {job.error_message}"
212
+
213
+ console.print(Panel(panel_content, title="Import Status", border_style=status_color))
214
+
215
+ except ValueError as e:
216
+ console.print(f"[red]Error:[/red] {e}")
217
+ raise typer.Exit(1)
218
+ except Exception as e:
219
+ console.print(f"[red]Error:[/red] {e}")
220
+ raise typer.Exit(1)
@@ -0,0 +1,283 @@
1
+ """CLI commands for syncing datasets to external vector databases."""
2
+
3
+ import typer
4
+ import time
5
+ from rich.console import Console
6
+ from rich.table import Table
7
+ from rich.panel import Panel
8
+ from rich.progress import Progress, SpinnerColumn, TextColumn, BarColumn
9
+ from typing import Optional
10
+
11
+ from ..config import get_config
12
+
13
+ app = typer.Typer(no_args_is_help=True)
14
+ console = Console()
15
+
16
+
17
+ def _api_headers():
18
+ cfg = get_config()
19
+ return {
20
+ "Authorization": f"Bearer {cfg['api_key']}",
21
+ "Content-Type": "application/json",
22
+ }
23
+
24
+
25
+ def _api_url(path: str) -> str:
26
+ cfg = get_config()
27
+ return f"{cfg['base_url']}/api/v1{path}"
28
+
29
+
30
+ @app.command("push")
31
+ def push(
32
+ dataset: str = typer.Argument(..., help="Dataset name or ID"),
33
+ connector: str = typer.Argument(..., help="Connector name or ID"),
34
+ version: Optional[int] = typer.Option(None, "--version", "-v", help="Source version (default: current)"),
35
+ batch_size: int = typer.Option(100, "--batch-size", "-b", help="Vectors per batch"),
36
+ mode: str = typer.Option("auto", "--mode", "-m", help="Sync mode: auto or full"),
37
+ force: bool = typer.Option(False, "--force", "-f", help="Force sync, overwrite destination drift"),
38
+ ):
39
+ """
40
+ Push dataset to a vector database connector.
41
+
42
+ Decompressed is the source of truth. This command deploys your dataset
43
+ version to the destination. If the destination was modified externally,
44
+ you will be warned (use --force to override).
45
+
46
+ Examples:
47
+ dcp sync push my-dataset my-pinecone-connector
48
+ dcp sync push my-dataset my-pinecone --version 3
49
+ dcp sync push my-dataset my-pinecone --mode full --force
50
+ """
51
+ import httpx
52
+
53
+ headers = _api_headers()
54
+
55
+ # Resolve dataset ID
56
+ console.print(f"[dim]Resolving dataset '{dataset}'...[/dim]")
57
+ try:
58
+ r = httpx.get(_api_url(f"/datasets/{dataset}"), headers=headers, timeout=15)
59
+ r.raise_for_status()
60
+ ds = r.json()
61
+ dataset_id = ds["id"]
62
+ ds_name = ds.get("name", dataset_id)
63
+ current_ver = ds.get("current_version", 1)
64
+ num_vectors = ds.get("num_vectors", 0)
65
+ except httpx.HTTPStatusError as e:
66
+ console.print(f"[red]Dataset not found: {e.response.text}[/red]")
67
+ raise typer.Exit(1)
68
+
69
+ source_version = version or current_ver
70
+
71
+ # Show sync plan
72
+ console.print()
73
+ console.print(Panel(
74
+ f"[bold]Dataset:[/bold] {ds_name}\n"
75
+ f"[bold]Version:[/bold] v{source_version} ({num_vectors:,} vectors)\n"
76
+ f"[bold]Connector:[/bold] {connector}\n"
77
+ f"[bold]Mode:[/bold] {mode}\n"
78
+ f"[bold]Force:[/bold] {'yes' if force else 'no'}",
79
+ title="[bold green]Sync Plan[/bold green]",
80
+ border_style="green",
81
+ ))
82
+
83
+ if mode == "auto":
84
+ console.print(
85
+ "[dim]Auto mode: will use incremental sync if a previous sync exists, "
86
+ "otherwise full upload.[/dim]"
87
+ )
88
+ elif mode == "full":
89
+ console.print("[dim]Full mode: all vectors will be re-uploaded.[/dim]")
90
+
91
+ console.print()
92
+
93
+ # Create sync job
94
+ config = {
95
+ "batch_size": batch_size,
96
+ "sync_mode": mode if mode == "full" else None,
97
+ "force_sync": force or None,
98
+ }
99
+ # Remove None values
100
+ config = {k: v for k, v in config.items() if v is not None}
101
+
102
+ try:
103
+ r = httpx.post(
104
+ _api_url("/syncs"),
105
+ headers=headers,
106
+ json={
107
+ "dataset_id": dataset_id,
108
+ "connector_id": connector,
109
+ "source_version": source_version,
110
+ "config": config,
111
+ },
112
+ timeout=30,
113
+ )
114
+ r.raise_for_status()
115
+ result = r.json()
116
+ job_id = result.get("job_id")
117
+ sync_job_id = result.get("sync_job_id")
118
+ except httpx.HTTPStatusError as e:
119
+ detail = e.response.json().get("detail", str(e))
120
+ if isinstance(detail, dict):
121
+ console.print(f"[red]{detail.get('message', str(detail))}[/red]")
122
+ if detail.get("suggestions"):
123
+ for s in detail["suggestions"]:
124
+ console.print(f" [yellow]→ {s}[/yellow]")
125
+ else:
126
+ console.print(f"[red]{detail}[/red]")
127
+ raise typer.Exit(1)
128
+
129
+ console.print(f"[green]✓ Sync job created[/green] (job: {job_id[:8]}...)")
130
+ console.print()
131
+
132
+ # Poll for completion
133
+ with Progress(
134
+ SpinnerColumn(),
135
+ TextColumn("[progress.description]{task.description}"),
136
+ BarColumn(),
137
+ TextColumn("{task.percentage:>3.0f}%"),
138
+ console=console,
139
+ ) as progress:
140
+ task = progress.add_task("Syncing...", total=100)
141
+
142
+ while True:
143
+ time.sleep(2)
144
+ try:
145
+ r = httpx.get(
146
+ _api_url(f"/jobs/{job_id}"),
147
+ headers=headers,
148
+ timeout=15,
149
+ )
150
+ r.raise_for_status()
151
+ job = r.json()
152
+ except Exception:
153
+ continue
154
+
155
+ status = job.get("status", "unknown")
156
+ pct = job.get("progress", 0) or 0
157
+ stage = (job.get("progress_details") or {}).get("stage", "")
158
+
159
+ progress.update(task, completed=pct, description=stage or f"Status: {status}")
160
+
161
+ if status in ("completed", "failed"):
162
+ progress.update(task, completed=100)
163
+ break
164
+
165
+ # Show results
166
+ console.print()
167
+ try:
168
+ r = httpx.get(_api_url(f"/syncs/{sync_job_id}"), headers=headers, timeout=15)
169
+ r.raise_for_status()
170
+ sync_result = r.json()
171
+ except Exception:
172
+ sync_result = {}
173
+
174
+ error_details = sync_result.get("error_details") or {}
175
+ sync_mode_used = error_details.get("sync_mode", "full")
176
+ drift = error_details.get("drift_report")
177
+ diff = error_details.get("diff_summary")
178
+ warnings = error_details.get("warnings", [])
179
+
180
+ if sync_result.get("status") == "completed":
181
+ console.print("[bold green]✓ Sync completed successfully[/bold green]")
182
+ else:
183
+ console.print(f"[bold red]✗ Sync failed[/bold red]: {sync_result.get('error_message', 'Unknown error')}")
184
+
185
+ # Summary table
186
+ table = Table(show_header=False, box=None, padding=(0, 2))
187
+ table.add_column(style="dim")
188
+ table.add_column()
189
+ table.add_row("Mode", f"[cyan]{sync_mode_used}[/cyan]")
190
+ table.add_row("Vectors synced", f"[green]{sync_result.get('vectors_synced', 0):,}[/green]")
191
+
192
+ if error_details.get("vectors_deleted"):
193
+ table.add_row("Vectors deleted", f"[red]{error_details['vectors_deleted']:,}[/red]")
194
+
195
+ if diff:
196
+ table.add_row("Added", f"[green]+{diff.get('added', 0)}[/green]")
197
+ table.add_row("Deleted", f"[red]-{diff.get('deleted', 0)}[/red]")
198
+ table.add_row("Updated", f"[yellow]~{diff.get('updated', 0)}[/yellow]")
199
+ table.add_row("Unchanged", f"[dim]{diff.get('unchanged', 0)}[/dim]")
200
+
201
+ table.add_row("Batches", f"{sync_result.get('batches_completed', 0)} ok / {sync_result.get('batches_failed', 0)} failed")
202
+ console.print(table)
203
+
204
+ if drift and drift.get("has_drift"):
205
+ console.print()
206
+ console.print("[yellow]⚠ Drift detected in destination:[/yellow]")
207
+ for d in drift.get("details", []):
208
+ console.print(f" [yellow]• {d}[/yellow]")
209
+
210
+ if warnings:
211
+ console.print()
212
+ for w in warnings[:5]:
213
+ console.print(f"[yellow]⚠ {w}[/yellow]")
214
+
215
+ if sync_result.get("status") != "completed":
216
+ raise typer.Exit(1)
217
+
218
+
219
+ @app.command("status")
220
+ def status(
221
+ dataset: str = typer.Argument(..., help="Dataset name or ID"),
222
+ ):
223
+ """Show sync state for all connectors linked to a dataset."""
224
+ import httpx
225
+
226
+ headers = _api_headers()
227
+
228
+ try:
229
+ r = httpx.get(
230
+ _api_url(f"/syncs/state/{dataset}"),
231
+ headers=headers,
232
+ timeout=15,
233
+ )
234
+ r.raise_for_status()
235
+ data = r.json()
236
+ except httpx.HTTPStatusError as e:
237
+ console.print(f"[red]{e.response.text}[/red]")
238
+ raise typer.Exit(1)
239
+
240
+ states = data.get("sync_states", [])
241
+ current_ver = data.get("current_version", 1)
242
+
243
+ if not states:
244
+ console.print("[dim]No connectors linked to this dataset.[/dim]")
245
+ console.print("[dim]Use the dashboard or API to sync to a vector database.[/dim]")
246
+ return
247
+
248
+ console.print(f"[bold]Dataset version:[/bold] v{current_ver}")
249
+ console.print()
250
+
251
+ table = Table(title="Connected Destinations")
252
+ table.add_column("Connector", style="bold")
253
+ table.add_column("Type")
254
+ table.add_column("Status")
255
+ table.add_column("Synced Version")
256
+ table.add_column("Vectors")
257
+ table.add_column("Auto-sync")
258
+ table.add_column("Last Synced")
259
+
260
+ for s in states:
261
+ sync_status = s.get("sync_status", "unknown")
262
+ status_style = {
263
+ "in_sync": "[green]✓ In sync[/green]",
264
+ "behind": f"[yellow]⚠ {s.get('versions_behind', 0)} behind[/yellow]",
265
+ "never_synced": "[dim]Never synced[/dim]",
266
+ }.get(sync_status, f"[dim]{sync_status}[/dim]")
267
+
268
+ auto = "[green]on[/green]" if s.get("auto_sync_enabled") else "[dim]off[/dim]"
269
+ last = s.get("last_synced_at", "Never")
270
+ if last and last != "Never":
271
+ last = last[:16].replace("T", " ")
272
+
273
+ table.add_row(
274
+ s.get("connector_name", "?"),
275
+ s.get("connector_type", "?"),
276
+ status_style,
277
+ f"v{s.get('last_synced_version', 0)}",
278
+ f"{(s.get('vectors_in_destination') or 0):,}",
279
+ auto,
280
+ last,
281
+ )
282
+
283
+ console.print(table)
@@ -4,7 +4,7 @@ import typer
4
4
  from rich.console import Console
5
5
 
6
6
  from . import __version__
7
- from .commands import config_cmd, datasets_cmd, versions_cmd, data_cmd
7
+ from .commands import config_cmd, datasets_cmd, versions_cmd, data_cmd, imports_cmd, sync_cmd
8
8
 
9
9
  app = typer.Typer(
10
10
  name="dcp",
@@ -17,6 +17,8 @@ console = Console()
17
17
  app.add_typer(config_cmd.app, name="config", help="Manage CLI configuration")
18
18
  app.add_typer(datasets_cmd.app, name="datasets", help="Manage datasets")
19
19
  app.add_typer(data_cmd.app, name="data", help="Pull/push data (alias)")
20
+ app.add_typer(imports_cmd.app, name="imports", help="Import vectors from external databases")
21
+ app.add_typer(sync_cmd.app, name="sync", help="Sync datasets to vector databases")
20
22
 
21
23
  # Register top-level versioning commands
22
24
  app.command("log")(versions_cmd.log)
@@ -1,12 +1,11 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: decompressed-cli
3
- Version: 0.1.0
3
+ Version: 0.1.2
4
4
  Summary: CLI for Decompressed - Git-like version control for vector datasets
5
5
  Author-email: Decompressed <support@decompressed.io>
6
- License: MIT
6
+ License-Expression: MIT
7
7
  Classifier: Development Status :: 3 - Alpha
8
8
  Classifier: Intended Audience :: Developers
9
- Classifier: License :: OSI Approved :: MIT License
10
9
  Classifier: Programming Language :: Python :: 3
11
10
  Classifier: Programming Language :: Python :: 3.9
12
11
  Classifier: Programming Language :: Python :: 3.10
@@ -13,4 +13,6 @@ src/decompressed_cli/commands/__init__.py
13
13
  src/decompressed_cli/commands/config_cmd.py
14
14
  src/decompressed_cli/commands/data_cmd.py
15
15
  src/decompressed_cli/commands/datasets_cmd.py
16
+ src/decompressed_cli/commands/imports_cmd.py
17
+ src/decompressed_cli/commands/sync_cmd.py
16
18
  src/decompressed_cli/commands/versions_cmd.py