foodforthought-cli 0.2.7__py3-none-any.whl → 0.3.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ate/__init__.py +6 -0
- ate/__main__.py +16 -0
- ate/auth/__init__.py +1 -0
- ate/auth/device_flow.py +141 -0
- ate/auth/token_store.py +96 -0
- ate/behaviors/__init__.py +100 -0
- ate/behaviors/approach.py +399 -0
- ate/behaviors/common.py +686 -0
- ate/behaviors/tree.py +454 -0
- ate/cli.py +855 -3995
- ate/client.py +90 -0
- ate/commands/__init__.py +168 -0
- ate/commands/auth.py +389 -0
- ate/commands/bridge.py +448 -0
- ate/commands/data.py +185 -0
- ate/commands/deps.py +111 -0
- ate/commands/generate.py +384 -0
- ate/commands/memory.py +907 -0
- ate/commands/parts.py +166 -0
- ate/commands/primitive.py +399 -0
- ate/commands/protocol.py +288 -0
- ate/commands/recording.py +524 -0
- ate/commands/repo.py +154 -0
- ate/commands/simulation.py +291 -0
- ate/commands/skill.py +303 -0
- ate/commands/skills.py +487 -0
- ate/commands/team.py +147 -0
- ate/commands/workflow.py +271 -0
- ate/detection/__init__.py +38 -0
- ate/detection/base.py +142 -0
- ate/detection/color_detector.py +399 -0
- ate/detection/trash_detector.py +322 -0
- ate/drivers/__init__.py +39 -0
- ate/drivers/ble_transport.py +405 -0
- ate/drivers/mechdog.py +942 -0
- ate/drivers/wifi_camera.py +477 -0
- ate/interfaces/__init__.py +187 -0
- ate/interfaces/base.py +273 -0
- ate/interfaces/body.py +267 -0
- ate/interfaces/detection.py +282 -0
- ate/interfaces/locomotion.py +422 -0
- ate/interfaces/manipulation.py +408 -0
- ate/interfaces/navigation.py +389 -0
- ate/interfaces/perception.py +362 -0
- ate/interfaces/sensors.py +247 -0
- ate/interfaces/types.py +371 -0
- ate/llm_proxy.py +239 -0
- ate/mcp_server.py +387 -0
- ate/memory/__init__.py +35 -0
- ate/memory/cloud.py +244 -0
- ate/memory/context.py +269 -0
- ate/memory/embeddings.py +184 -0
- ate/memory/export.py +26 -0
- ate/memory/merge.py +146 -0
- ate/memory/migrate/__init__.py +34 -0
- ate/memory/migrate/base.py +89 -0
- ate/memory/migrate/pipeline.py +189 -0
- ate/memory/migrate/sources/__init__.py +13 -0
- ate/memory/migrate/sources/chroma.py +170 -0
- ate/memory/migrate/sources/pinecone.py +120 -0
- ate/memory/migrate/sources/qdrant.py +110 -0
- ate/memory/migrate/sources/weaviate.py +160 -0
- ate/memory/reranker.py +353 -0
- ate/memory/search.py +26 -0
- ate/memory/store.py +548 -0
- ate/recording/__init__.py +83 -0
- ate/recording/demonstration.py +378 -0
- ate/recording/session.py +415 -0
- ate/recording/upload.py +304 -0
- ate/recording/visual.py +416 -0
- ate/recording/wrapper.py +95 -0
- ate/robot/__init__.py +221 -0
- ate/robot/agentic_servo.py +856 -0
- ate/robot/behaviors.py +493 -0
- ate/robot/ble_capture.py +1000 -0
- ate/robot/ble_enumerate.py +506 -0
- ate/robot/calibration.py +668 -0
- ate/robot/calibration_state.py +388 -0
- ate/robot/commands.py +3735 -0
- ate/robot/direction_calibration.py +554 -0
- ate/robot/discovery.py +441 -0
- ate/robot/introspection.py +330 -0
- ate/robot/llm_system_id.py +654 -0
- ate/robot/locomotion_calibration.py +508 -0
- ate/robot/manager.py +270 -0
- ate/robot/marker_generator.py +611 -0
- ate/robot/perception.py +502 -0
- ate/robot/primitives.py +614 -0
- ate/robot/profiles.py +281 -0
- ate/robot/registry.py +322 -0
- ate/robot/servo_mapper.py +1153 -0
- ate/robot/skill_upload.py +675 -0
- ate/robot/target_calibration.py +500 -0
- ate/robot/teach.py +515 -0
- ate/robot/types.py +242 -0
- ate/robot/visual_labeler.py +1048 -0
- ate/robot/visual_servo_loop.py +494 -0
- ate/robot/visual_servoing.py +570 -0
- ate/robot/visual_system_id.py +906 -0
- ate/transports/__init__.py +121 -0
- ate/transports/base.py +394 -0
- ate/transports/ble.py +405 -0
- ate/transports/hybrid.py +444 -0
- ate/transports/serial.py +345 -0
- ate/urdf/__init__.py +30 -0
- ate/urdf/capture.py +582 -0
- ate/urdf/cloud.py +491 -0
- ate/urdf/collision.py +271 -0
- ate/urdf/commands.py +708 -0
- ate/urdf/depth.py +360 -0
- ate/urdf/inertial.py +312 -0
- ate/urdf/kinematics.py +330 -0
- ate/urdf/lifting.py +415 -0
- ate/urdf/meshing.py +300 -0
- ate/urdf/models/__init__.py +110 -0
- ate/urdf/models/depth_anything.py +253 -0
- ate/urdf/models/sam2.py +324 -0
- ate/urdf/motion_analysis.py +396 -0
- ate/urdf/pipeline.py +468 -0
- ate/urdf/scale.py +256 -0
- ate/urdf/scan_session.py +411 -0
- ate/urdf/segmentation.py +299 -0
- ate/urdf/synthesis.py +319 -0
- ate/urdf/topology.py +336 -0
- ate/urdf/validation.py +371 -0
- {foodforthought_cli-0.2.7.dist-info → foodforthought_cli-0.3.0.dist-info}/METADATA +9 -1
- foodforthought_cli-0.3.0.dist-info/RECORD +166 -0
- {foodforthought_cli-0.2.7.dist-info → foodforthought_cli-0.3.0.dist-info}/WHEEL +1 -1
- foodforthought_cli-0.2.7.dist-info/RECORD +0 -44
- {foodforthought_cli-0.2.7.dist-info → foodforthought_cli-0.3.0.dist-info}/entry_points.txt +0 -0
- {foodforthought_cli-0.2.7.dist-info → foodforthought_cli-0.3.0.dist-info}/top_level.txt +0 -0
ate/commands/memory.py
ADDED
|
@@ -0,0 +1,907 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Memory management commands for FoodforThought CLI.
|
|
3
|
+
|
|
4
|
+
Commands:
|
|
5
|
+
- ate memory init - Create a new .mv2 memory file
|
|
6
|
+
- ate memory add - Add content (single text or bulk JSONL)
|
|
7
|
+
- ate memory search - Search memory by query
|
|
8
|
+
- ate memory info - Show memory store statistics
|
|
9
|
+
- ate memory export - Export all content to JSONL
|
|
10
|
+
- ate memory merge - Merge multiple .mv2 files
|
|
11
|
+
"""
|
|
12
|
+
|
|
13
|
+
import json
|
|
14
|
+
import os
|
|
15
|
+
import sys
|
|
16
|
+
from dataclasses import asdict
|
|
17
|
+
from pathlib import Path
|
|
18
|
+
|
|
19
|
+
from ate.memory import MemoryStore, merge_memories, EmbeddingConfig, EmbeddingManager
|
|
20
|
+
from ate.memory.cloud import CloudClient, CloudAuthError, CloudError, CloudNotFoundError
|
|
21
|
+
from ate.auth.token_store import TokenStore
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
# ---------------------------------------------------------------------------
|
|
25
|
+
# Arg-parser registration
|
|
26
|
+
# ---------------------------------------------------------------------------
|
|
27
|
+
|
|
28
|
+
def register_parser(subparsers):
|
|
29
|
+
"""Register memory commands with argparse."""
|
|
30
|
+
mem_parser = subparsers.add_parser("memory", help="Memory file management (.mv2)")
|
|
31
|
+
mem_subs = mem_parser.add_subparsers(dest="memory_action", help="Memory action")
|
|
32
|
+
|
|
33
|
+
# --- init ---
|
|
34
|
+
p_init = mem_subs.add_parser("init", help="Create a new .mv2 memory file")
|
|
35
|
+
p_init.add_argument("path", help="Path for the new .mv2 file")
|
|
36
|
+
p_init.add_argument("--embedding-provider", dest="embedding_provider", default=None,
|
|
37
|
+
choices=["openai", "cohere", "voyage", "ollama", "none"],
|
|
38
|
+
help="Embedding provider to use")
|
|
39
|
+
p_init.add_argument("--embedding-model", dest="embedding_model", default=None,
|
|
40
|
+
help="Embedding model override")
|
|
41
|
+
p_init.add_argument("--format", dest="format", default=None,
|
|
42
|
+
choices=["json"], help="Output format (default: human-readable)")
|
|
43
|
+
|
|
44
|
+
# --- add ---
|
|
45
|
+
p_add = mem_subs.add_parser("add", help="Add content to a memory file")
|
|
46
|
+
p_add.add_argument("path", help="Path to the .mv2 file")
|
|
47
|
+
p_add.add_argument("--text", default=None, help="Text content to add")
|
|
48
|
+
p_add.add_argument("--file", default=None, help="JSONL file for bulk add")
|
|
49
|
+
p_add.add_argument("--tags", default=None, help="Comma-separated tags")
|
|
50
|
+
p_add.add_argument("--title", default=None, help="Title for the entry")
|
|
51
|
+
p_add.add_argument("--metadata", default=None, help="JSON metadata string")
|
|
52
|
+
p_add.add_argument("--embed", action="store_true", default=False,
|
|
53
|
+
help="Force embedding even if not default")
|
|
54
|
+
p_add.add_argument("--format", dest="format", default=None,
|
|
55
|
+
choices=["json"], help="Output format")
|
|
56
|
+
|
|
57
|
+
# --- search ---
|
|
58
|
+
p_search = mem_subs.add_parser("search", help="Search memory")
|
|
59
|
+
p_search.add_argument("path", help="Path to the .mv2 file")
|
|
60
|
+
p_search.add_argument("query", help="Search query")
|
|
61
|
+
p_search.add_argument("--top-k", type=int, default=5, help="Max results (default 5)")
|
|
62
|
+
p_search.add_argument("--engine", dest="engine", default=None,
|
|
63
|
+
choices=["auto", "vec", "lex", "hybrid", "rerank"],
|
|
64
|
+
help="Search engine to use (default: auto)")
|
|
65
|
+
p_search.add_argument("--rerank-provider", dest="rerank_provider", default=None,
|
|
66
|
+
choices=["anthropic", "openai", "google", "ollama"],
|
|
67
|
+
help="LLM provider for reranking")
|
|
68
|
+
p_search.add_argument("--rerank-model", dest="rerank_model", default=None,
|
|
69
|
+
help="LLM model for reranking")
|
|
70
|
+
p_search.add_argument("--format", dest="format", default=None,
|
|
71
|
+
choices=["json"], help="Output format")
|
|
72
|
+
|
|
73
|
+
# --- info ---
|
|
74
|
+
p_info = mem_subs.add_parser("info", help="Show memory store statistics")
|
|
75
|
+
p_info.add_argument("path", help="Path to the .mv2 file")
|
|
76
|
+
p_info.add_argument("--format", dest="format", default=None,
|
|
77
|
+
choices=["json"], help="Output format")
|
|
78
|
+
|
|
79
|
+
# --- export ---
|
|
80
|
+
p_export = mem_subs.add_parser("export", help="Export memory content")
|
|
81
|
+
p_export.add_argument("path", help="Path to the .mv2 file")
|
|
82
|
+
p_export.add_argument("--output", required=True, help="Output JSONL file path")
|
|
83
|
+
p_export.add_argument("--format", dest="format", default=None,
|
|
84
|
+
choices=["json", "jsonl"], help="Output format")
|
|
85
|
+
|
|
86
|
+
# --- merge ---
|
|
87
|
+
p_merge = mem_subs.add_parser("merge", help="Merge multiple .mv2 files")
|
|
88
|
+
p_merge.add_argument("paths", nargs="+", help="Source .mv2 file paths")
|
|
89
|
+
p_merge.add_argument("--output", required=True, help="Output .mv2 file path")
|
|
90
|
+
p_merge.add_argument("--no-dedup", action="store_true", default=False,
|
|
91
|
+
help="Disable deduplication")
|
|
92
|
+
p_merge.add_argument("--format", dest="format", default=None,
|
|
93
|
+
choices=["json"], help="Output format")
|
|
94
|
+
|
|
95
|
+
# --- think (context-aware) ---
|
|
96
|
+
p_think = mem_subs.add_parser("think", help="Add a thought to active memory")
|
|
97
|
+
p_think.add_argument("text", help="Text content to add")
|
|
98
|
+
p_think.add_argument("--memory", default=None, help="Override active memory")
|
|
99
|
+
p_think.add_argument("--tags", default=None, help="Comma-separated tags")
|
|
100
|
+
p_think.add_argument("--title", default=None, help="Title for the entry")
|
|
101
|
+
p_think.add_argument("--metadata", default=None, help="JSON metadata string")
|
|
102
|
+
p_think.add_argument("--embed", action="store_true", default=False,
|
|
103
|
+
help="Force embedding even if not default")
|
|
104
|
+
p_think.add_argument("--format", dest="format", default=None,
|
|
105
|
+
choices=["json"], help="Output format")
|
|
106
|
+
|
|
107
|
+
# --- recall (context-aware) ---
|
|
108
|
+
p_recall = mem_subs.add_parser("recall", help="Search active memory")
|
|
109
|
+
p_recall.add_argument("query", help="Search query")
|
|
110
|
+
p_recall.add_argument("--memory", default=None, help="Override active memory")
|
|
111
|
+
p_recall.add_argument("--top-k", type=int, default=5, help="Max results (default 5)")
|
|
112
|
+
p_recall.add_argument("--engine", dest="engine", default=None,
|
|
113
|
+
choices=["auto", "vec", "lex", "hybrid", "rerank"],
|
|
114
|
+
help="Search engine to use (default: auto)")
|
|
115
|
+
p_recall.add_argument("--all-trains", action="store_true", default=False,
|
|
116
|
+
help="Search across all trains in current memory")
|
|
117
|
+
p_recall.add_argument("--format", dest="format", default=None,
|
|
118
|
+
choices=["json"], help="Output format")
|
|
119
|
+
|
|
120
|
+
# --- train (context switching) ---
|
|
121
|
+
p_train = mem_subs.add_parser("train", help="Manage trains of thought")
|
|
122
|
+
p_train.add_argument("name", nargs="?", help="Train name to switch to")
|
|
123
|
+
p_train.add_argument("--delete", default=None, help="Delete a train")
|
|
124
|
+
p_train.add_argument("--rename", nargs=2, metavar=("OLD", "NEW"),
|
|
125
|
+
help="Rename a train")
|
|
126
|
+
p_train.add_argument("--format", dest="format", default=None,
|
|
127
|
+
choices=["json"], help="Output format")
|
|
128
|
+
|
|
129
|
+
# --- status (context info) ---
|
|
130
|
+
p_status = mem_subs.add_parser("status", help="Show current memory context")
|
|
131
|
+
p_status.add_argument("--format", dest="format", default=None,
|
|
132
|
+
choices=["json"], help="Output format")
|
|
133
|
+
|
|
134
|
+
# --- new (memory creation) ---
|
|
135
|
+
p_new = mem_subs.add_parser("new", help="Create a new memory")
|
|
136
|
+
p_new.add_argument("name", help="Memory name")
|
|
137
|
+
p_new.add_argument("--description", default="", help="Memory description")
|
|
138
|
+
p_new.add_argument("--format", dest="format", default=None,
|
|
139
|
+
choices=["json"], help="Output format")
|
|
140
|
+
|
|
141
|
+
# --- use (memory switching) ---
|
|
142
|
+
p_use = mem_subs.add_parser("use", help="Switch to a different memory")
|
|
143
|
+
p_use.add_argument("name", help="Memory name")
|
|
144
|
+
p_use.add_argument("--format", dest="format", default=None,
|
|
145
|
+
choices=["json"], help="Output format")
|
|
146
|
+
|
|
147
|
+
# --- list-memories (local memories) ---
|
|
148
|
+
p_list_memories = mem_subs.add_parser("list-memories", help="List all local memories")
|
|
149
|
+
p_list_memories.add_argument("--format", dest="format", default=None,
|
|
150
|
+
choices=["json"], help="Output format")
|
|
151
|
+
|
|
152
|
+
# --- push (cloud) ---
|
|
153
|
+
p_push = mem_subs.add_parser("push", help="Upload .mv2 file to cloud")
|
|
154
|
+
p_push.add_argument("path", help="Path to the .mv2 file to upload")
|
|
155
|
+
p_push.add_argument("--project", required=True, help="Project identifier (e.g. kindly/memories)")
|
|
156
|
+
p_push.add_argument("--name", default=None, help="Name override (defaults to filename)")
|
|
157
|
+
p_push.add_argument("--format", dest="format", default=None,
|
|
158
|
+
choices=["json"], help="Output format")
|
|
159
|
+
|
|
160
|
+
# --- pull (cloud) ---
|
|
161
|
+
p_pull = mem_subs.add_parser("pull", help="Download .mv2 file from cloud")
|
|
162
|
+
p_pull.add_argument("ref", help="Project/name reference (e.g. kindly/memories/test.mv2)")
|
|
163
|
+
p_pull.add_argument("--output", required=True, help="Output file path")
|
|
164
|
+
p_pull.add_argument("--format", dest="format", default=None,
|
|
165
|
+
choices=["json"], help="Output format")
|
|
166
|
+
|
|
167
|
+
# --- list (cloud) ---
|
|
168
|
+
p_list = mem_subs.add_parser("list", help="List memory files in a cloud project")
|
|
169
|
+
p_list.add_argument("--project", required=True, help="Project identifier")
|
|
170
|
+
p_list.add_argument("--format", dest="format", default=None,
|
|
171
|
+
choices=["json"], help="Output format")
|
|
172
|
+
|
|
173
|
+
# --- delete (cloud) ---
|
|
174
|
+
p_delete = mem_subs.add_parser("delete", help="Delete a memory file from cloud")
|
|
175
|
+
p_delete.add_argument("ref", help="Project/name reference (e.g. kindly/memories/old.mv2)")
|
|
176
|
+
p_delete.add_argument("--format", dest="format", default=None,
|
|
177
|
+
choices=["json"], help="Output format")
|
|
178
|
+
|
|
179
|
+
# --- providers ---
|
|
180
|
+
p_providers = mem_subs.add_parser("providers", help="List detected embedding providers")
|
|
181
|
+
p_providers.add_argument("--format", dest="format", default=None,
|
|
182
|
+
choices=["json"], help="Output format")
|
|
183
|
+
|
|
184
|
+
|
|
185
|
+
# ---------------------------------------------------------------------------
|
|
186
|
+
# Dispatcher
|
|
187
|
+
# ---------------------------------------------------------------------------
|
|
188
|
+
|
|
189
|
+
def handle(client, args):
|
|
190
|
+
"""Handle memory commands."""
|
|
191
|
+
action = getattr(args, "memory_action", None)
|
|
192
|
+
if action == "init":
|
|
193
|
+
_handle_init(args)
|
|
194
|
+
elif action == "add":
|
|
195
|
+
_handle_add(args)
|
|
196
|
+
elif action == "search":
|
|
197
|
+
_handle_search(args)
|
|
198
|
+
elif action == "info":
|
|
199
|
+
_handle_info(args)
|
|
200
|
+
elif action == "export":
|
|
201
|
+
_handle_export(args)
|
|
202
|
+
elif action == "merge":
|
|
203
|
+
_handle_merge(args)
|
|
204
|
+
elif action == "think":
|
|
205
|
+
_handle_think(args)
|
|
206
|
+
elif action == "recall":
|
|
207
|
+
_handle_recall(args)
|
|
208
|
+
elif action == "train":
|
|
209
|
+
_handle_train(args)
|
|
210
|
+
elif action == "status":
|
|
211
|
+
_handle_status(args)
|
|
212
|
+
elif action == "new":
|
|
213
|
+
_handle_new(args)
|
|
214
|
+
elif action == "use":
|
|
215
|
+
_handle_use(args)
|
|
216
|
+
elif action == "list-memories":
|
|
217
|
+
_handle_list_memories(args)
|
|
218
|
+
elif action == "push":
|
|
219
|
+
_handle_push(args)
|
|
220
|
+
elif action == "pull":
|
|
221
|
+
_handle_pull(args)
|
|
222
|
+
elif action == "list":
|
|
223
|
+
_handle_list(args)
|
|
224
|
+
elif action == "delete":
|
|
225
|
+
_handle_delete(args)
|
|
226
|
+
elif action == "providers":
|
|
227
|
+
_handle_providers(args)
|
|
228
|
+
else:
|
|
229
|
+
print("Usage: ate memory {init|add|search|info|export|merge|think|recall|train|status|new|use|list-memories|push|pull|list|delete|providers}", file=sys.stderr)
|
|
230
|
+
sys.exit(1)
|
|
231
|
+
|
|
232
|
+
|
|
233
|
+
# ---------------------------------------------------------------------------
|
|
234
|
+
# Individual handlers
|
|
235
|
+
# ---------------------------------------------------------------------------
|
|
236
|
+
|
|
237
|
+
def _handle_init(args):
|
|
238
|
+
"""Create a new .mv2 memory file."""
|
|
239
|
+
path = args.path
|
|
240
|
+
|
|
241
|
+
# Handle embedding configuration
|
|
242
|
+
embedding_config = None
|
|
243
|
+
if args.embedding_provider:
|
|
244
|
+
embedding_config = EmbeddingConfig(
|
|
245
|
+
provider=args.embedding_provider,
|
|
246
|
+
model=args.embedding_model
|
|
247
|
+
)
|
|
248
|
+
|
|
249
|
+
try:
|
|
250
|
+
store = MemoryStore.create(path, embedding_config=embedding_config)
|
|
251
|
+
store.close()
|
|
252
|
+
except Exception as e:
|
|
253
|
+
print(f"Error: Failed to create memory file: {e}", file=sys.stderr)
|
|
254
|
+
sys.exit(2)
|
|
255
|
+
|
|
256
|
+
if args.format == "json":
|
|
257
|
+
print(json.dumps({"path": path, "created": True}))
|
|
258
|
+
else:
|
|
259
|
+
print(f"✅ Created memory file: {path}")
|
|
260
|
+
|
|
261
|
+
|
|
262
|
+
def _handle_add(args):
|
|
263
|
+
"""Add content to a memory file."""
|
|
264
|
+
path = args.path
|
|
265
|
+
text = args.text
|
|
266
|
+
file_path = args.file
|
|
267
|
+
|
|
268
|
+
if not text and not file_path:
|
|
269
|
+
print("Error: Provide --text or --file", file=sys.stderr)
|
|
270
|
+
sys.exit(1)
|
|
271
|
+
|
|
272
|
+
if text is not None and text.strip() == "":
|
|
273
|
+
print("Error: Text cannot be empty", file=sys.stderr)
|
|
274
|
+
sys.exit(1)
|
|
275
|
+
|
|
276
|
+
try:
|
|
277
|
+
with MemoryStore.open(path) as store:
|
|
278
|
+
if file_path:
|
|
279
|
+
# Bulk add from JSONL
|
|
280
|
+
items = []
|
|
281
|
+
with open(file_path, "r", encoding="utf-8") as f:
|
|
282
|
+
for line in f:
|
|
283
|
+
line = line.strip()
|
|
284
|
+
if line:
|
|
285
|
+
items.append(json.loads(line))
|
|
286
|
+
frame_ids = store.add_batch(items)
|
|
287
|
+
|
|
288
|
+
if args.format == "json":
|
|
289
|
+
print(json.dumps({"count": len(frame_ids), "frame_ids": frame_ids}))
|
|
290
|
+
else:
|
|
291
|
+
print(f"✅ Added {len(frame_ids)} entries from {file_path}")
|
|
292
|
+
else:
|
|
293
|
+
# Single add
|
|
294
|
+
tags = [t.strip() for t in args.tags.split(",")] if args.tags else None
|
|
295
|
+
metadata = json.loads(args.metadata) if args.metadata else None
|
|
296
|
+
title = args.title
|
|
297
|
+
enable_embedding = True if args.embed else None
|
|
298
|
+
|
|
299
|
+
frame_id = store.add(text=text, tags=tags, metadata=metadata,
|
|
300
|
+
title=title, enable_embedding=enable_embedding)
|
|
301
|
+
|
|
302
|
+
if args.format == "json":
|
|
303
|
+
print(json.dumps({"frame_id": frame_id}))
|
|
304
|
+
else:
|
|
305
|
+
print(f"✅ Added entry {frame_id}")
|
|
306
|
+
except FileNotFoundError as e:
|
|
307
|
+
print(f"Error: {e}", file=sys.stderr)
|
|
308
|
+
sys.exit(1)
|
|
309
|
+
except Exception as e:
|
|
310
|
+
print(f"Error: {e}", file=sys.stderr)
|
|
311
|
+
sys.exit(2)
|
|
312
|
+
|
|
313
|
+
|
|
314
|
+
def _handle_search(args):
|
|
315
|
+
"""Search memory."""
|
|
316
|
+
path = args.path
|
|
317
|
+
query = args.query
|
|
318
|
+
top_k = args.top_k
|
|
319
|
+
engine = args.engine
|
|
320
|
+
|
|
321
|
+
if not query.strip():
|
|
322
|
+
print("Error: Query cannot be empty", file=sys.stderr)
|
|
323
|
+
sys.exit(1)
|
|
324
|
+
|
|
325
|
+
try:
|
|
326
|
+
with MemoryStore.open(path) as store:
|
|
327
|
+
results = store.search(query, top_k=top_k, engine=engine)
|
|
328
|
+
|
|
329
|
+
if args.format == "json":
|
|
330
|
+
out = {
|
|
331
|
+
"query": query,
|
|
332
|
+
"results": [
|
|
333
|
+
{
|
|
334
|
+
"frame_id": r.frame_id,
|
|
335
|
+
"text": r.text,
|
|
336
|
+
"title": r.title,
|
|
337
|
+
"score": r.score,
|
|
338
|
+
"tags": r.tags,
|
|
339
|
+
"metadata": r.metadata,
|
|
340
|
+
"engine": r.engine,
|
|
341
|
+
}
|
|
342
|
+
for r in results
|
|
343
|
+
],
|
|
344
|
+
}
|
|
345
|
+
print(json.dumps(out))
|
|
346
|
+
else:
|
|
347
|
+
if not results:
|
|
348
|
+
print("No results found.")
|
|
349
|
+
else:
|
|
350
|
+
print(f"🔍 {len(results)} result(s) for \"{query}\":\n")
|
|
351
|
+
for i, r in enumerate(results, 1):
|
|
352
|
+
title_str = f" — {r.title}" if r.title else ""
|
|
353
|
+
print(f" {i}. [{r.score:.2f}] {r.text}{title_str}")
|
|
354
|
+
if r.tags:
|
|
355
|
+
print(f" Tags: {', '.join(r.tags)}")
|
|
356
|
+
except FileNotFoundError as e:
|
|
357
|
+
print(f"Error: {e}", file=sys.stderr)
|
|
358
|
+
sys.exit(1)
|
|
359
|
+
except Exception as e:
|
|
360
|
+
print(f"Error: {e}", file=sys.stderr)
|
|
361
|
+
sys.exit(2)
|
|
362
|
+
|
|
363
|
+
|
|
364
|
+
def _handle_info(args):
|
|
365
|
+
"""Show memory store statistics."""
|
|
366
|
+
path = args.path
|
|
367
|
+
try:
|
|
368
|
+
with MemoryStore.open(path) as store:
|
|
369
|
+
info = store.info()
|
|
370
|
+
|
|
371
|
+
if args.format == "json":
|
|
372
|
+
print(json.dumps({
|
|
373
|
+
"path": info.path,
|
|
374
|
+
"frame_count": info.frame_count,
|
|
375
|
+
"size_bytes": info.size_bytes,
|
|
376
|
+
"has_lex_index": info.has_lex_index,
|
|
377
|
+
"has_vec_index": info.has_vec_index,
|
|
378
|
+
"has_time_index": info.has_time_index,
|
|
379
|
+
"created_at": getattr(info, "created_at", None),
|
|
380
|
+
}))
|
|
381
|
+
else:
|
|
382
|
+
size_kb = info.size_bytes / 1024
|
|
383
|
+
print(f"📦 Memory: {info.path}")
|
|
384
|
+
print(f" Frames: {info.frame_count}")
|
|
385
|
+
print(f" Size: {size_kb:.1f} KB")
|
|
386
|
+
print(f" Lex index: {'✅' if info.has_lex_index else '❌'}")
|
|
387
|
+
print(f" Vec index: {'✅' if info.has_vec_index else '❌'}")
|
|
388
|
+
print(f" Time index: {'✅' if info.has_time_index else '❌'}")
|
|
389
|
+
except FileNotFoundError as e:
|
|
390
|
+
print(f"Error: {e}", file=sys.stderr)
|
|
391
|
+
sys.exit(1)
|
|
392
|
+
except Exception as e:
|
|
393
|
+
print(f"Error: {e}", file=sys.stderr)
|
|
394
|
+
sys.exit(2)
|
|
395
|
+
|
|
396
|
+
|
|
397
|
+
def _handle_export(args):
|
|
398
|
+
"""Export memory content to JSONL."""
|
|
399
|
+
path = args.path
|
|
400
|
+
output = args.output
|
|
401
|
+
try:
|
|
402
|
+
with MemoryStore.open(path) as store:
|
|
403
|
+
count = store.export_jsonl(output)
|
|
404
|
+
|
|
405
|
+
if args.format == "json":
|
|
406
|
+
print(json.dumps({"output": output, "count": count}))
|
|
407
|
+
else:
|
|
408
|
+
print(f"✅ Exported {count} entries to {output}")
|
|
409
|
+
except FileNotFoundError as e:
|
|
410
|
+
print(f"Error: {e}", file=sys.stderr)
|
|
411
|
+
sys.exit(1)
|
|
412
|
+
except Exception as e:
|
|
413
|
+
print(f"Error: {e}", file=sys.stderr)
|
|
414
|
+
sys.exit(2)
|
|
415
|
+
|
|
416
|
+
|
|
417
|
+
def _handle_merge(args):
|
|
418
|
+
"""Merge multiple .mv2 files."""
|
|
419
|
+
source_paths = args.paths
|
|
420
|
+
output = args.output
|
|
421
|
+
dedup = not args.no_dedup
|
|
422
|
+
|
|
423
|
+
try:
|
|
424
|
+
info = merge_memories(source_paths, output, dedup=dedup)
|
|
425
|
+
|
|
426
|
+
if args.format == "json":
|
|
427
|
+
print(json.dumps({
|
|
428
|
+
"output": info.path,
|
|
429
|
+
"frame_count": info.frame_count,
|
|
430
|
+
"size_bytes": info.size_bytes,
|
|
431
|
+
"dedup": dedup,
|
|
432
|
+
}))
|
|
433
|
+
else:
|
|
434
|
+
print(f"✅ Merged {len(source_paths)} files → {info.path}")
|
|
435
|
+
print(f" Frames: {info.frame_count}")
|
|
436
|
+
print(f" Size: {info.size_bytes / 1024:.1f} KB")
|
|
437
|
+
if dedup:
|
|
438
|
+
print(f" Dedup: enabled")
|
|
439
|
+
except FileNotFoundError as e:
|
|
440
|
+
print(f"Error: {e}", file=sys.stderr)
|
|
441
|
+
sys.exit(1)
|
|
442
|
+
except Exception as e:
|
|
443
|
+
print(f"Error: {e}", file=sys.stderr)
|
|
444
|
+
sys.exit(2)
|
|
445
|
+
|
|
446
|
+
|
|
447
|
+
# ---------------------------------------------------------------------------
|
|
448
|
+
# Context-aware handlers (think / recall / train / status / new / use / list-memories)
|
|
449
|
+
# ---------------------------------------------------------------------------
|
|
450
|
+
|
|
451
|
+
def _handle_think(args):
|
|
452
|
+
"""Add a thought to active memory."""
|
|
453
|
+
text = args.text
|
|
454
|
+
memory_path = getattr(args, 'memory', None)
|
|
455
|
+
|
|
456
|
+
if not text.strip():
|
|
457
|
+
print("Error: Text cannot be empty", file=sys.stderr)
|
|
458
|
+
sys.exit(1)
|
|
459
|
+
|
|
460
|
+
try:
|
|
461
|
+
with MemoryStore.from_context_or_path(memory_path) as store:
|
|
462
|
+
tags = [t.strip() for t in args.tags.split(",")] if args.tags else None
|
|
463
|
+
metadata = json.loads(args.metadata) if args.metadata else None
|
|
464
|
+
title = args.title
|
|
465
|
+
enable_embedding = True if args.embed else None
|
|
466
|
+
|
|
467
|
+
frame_id = store.add(text=text, tags=tags, metadata=metadata,
|
|
468
|
+
title=title, enable_embedding=enable_embedding)
|
|
469
|
+
|
|
470
|
+
if args.format == "json":
|
|
471
|
+
# Get current context for output
|
|
472
|
+
from ate.memory.context import ContextManager
|
|
473
|
+
context = ContextManager.get_context()
|
|
474
|
+
print(json.dumps({
|
|
475
|
+
"frame_id": frame_id,
|
|
476
|
+
"memory": context.active_memory,
|
|
477
|
+
"train": context.active_train
|
|
478
|
+
}))
|
|
479
|
+
else:
|
|
480
|
+
print(f"💭 Stored thought (frame {frame_id})")
|
|
481
|
+
except Exception as e:
|
|
482
|
+
print(f"Error: {e}", file=sys.stderr)
|
|
483
|
+
sys.exit(2)
|
|
484
|
+
|
|
485
|
+
|
|
486
|
+
def _handle_recall(args):
|
|
487
|
+
"""Search active memory."""
|
|
488
|
+
query = args.query
|
|
489
|
+
memory_path = getattr(args, 'memory', None)
|
|
490
|
+
top_k = args.top_k
|
|
491
|
+
engine = args.engine
|
|
492
|
+
|
|
493
|
+
if not query.strip():
|
|
494
|
+
print("Error: Query cannot be empty", file=sys.stderr)
|
|
495
|
+
sys.exit(1)
|
|
496
|
+
|
|
497
|
+
try:
|
|
498
|
+
# TODO: Implement --all-trains functionality
|
|
499
|
+
if getattr(args, 'all_trains', False):
|
|
500
|
+
print("Error: --all-trains not yet implemented", file=sys.stderr)
|
|
501
|
+
sys.exit(1)
|
|
502
|
+
|
|
503
|
+
with MemoryStore.from_context_or_path(memory_path) as store:
|
|
504
|
+
results = store.search(query, top_k=top_k, engine=engine)
|
|
505
|
+
|
|
506
|
+
if args.format == "json":
|
|
507
|
+
# Get current context for output
|
|
508
|
+
from ate.memory.context import ContextManager
|
|
509
|
+
context = ContextManager.get_context()
|
|
510
|
+
out = {
|
|
511
|
+
"query": query,
|
|
512
|
+
"memory": context.active_memory,
|
|
513
|
+
"train": context.active_train,
|
|
514
|
+
"results": [
|
|
515
|
+
{
|
|
516
|
+
"frame_id": r.frame_id,
|
|
517
|
+
"text": r.text,
|
|
518
|
+
"title": r.title,
|
|
519
|
+
"score": r.score,
|
|
520
|
+
"tags": r.tags,
|
|
521
|
+
"metadata": r.metadata,
|
|
522
|
+
"engine": r.engine,
|
|
523
|
+
}
|
|
524
|
+
for r in results
|
|
525
|
+
],
|
|
526
|
+
}
|
|
527
|
+
print(json.dumps(out))
|
|
528
|
+
else:
|
|
529
|
+
if not results:
|
|
530
|
+
print("🔍 No results found.")
|
|
531
|
+
else:
|
|
532
|
+
print(f"🔍 {len(results)} result(s) for \"{query}\":\n")
|
|
533
|
+
for i, r in enumerate(results, 1):
|
|
534
|
+
title_str = f" — {r.title}" if r.title else ""
|
|
535
|
+
print(f" {i}. [{r.score:.2f}] {r.text}{title_str}")
|
|
536
|
+
if r.tags:
|
|
537
|
+
print(f" Tags: {', '.join(r.tags)}")
|
|
538
|
+
except Exception as e:
|
|
539
|
+
print(f"Error: {e}", file=sys.stderr)
|
|
540
|
+
sys.exit(2)
|
|
541
|
+
|
|
542
|
+
|
|
543
|
+
def _handle_train(args):
|
|
544
|
+
"""Manage trains of thought."""
|
|
545
|
+
from ate.memory.context import ContextManager
|
|
546
|
+
|
|
547
|
+
try:
|
|
548
|
+
# List trains if no name provided
|
|
549
|
+
if not args.name:
|
|
550
|
+
context = ContextManager.get_context()
|
|
551
|
+
trains = ContextManager.list_trains(context.active_memory)
|
|
552
|
+
|
|
553
|
+
if args.format == "json":
|
|
554
|
+
print(json.dumps({
|
|
555
|
+
"memory": context.active_memory,
|
|
556
|
+
"trains": trains,
|
|
557
|
+
"active": context.active_train
|
|
558
|
+
}))
|
|
559
|
+
else:
|
|
560
|
+
print(f"🚂 Trains of thought in \"{context.active_memory}\":")
|
|
561
|
+
for train in trains:
|
|
562
|
+
marker = " ← active" if train == context.active_train else ""
|
|
563
|
+
print(f" {'→' if train == context.active_train else ' '} {train}{marker}")
|
|
564
|
+
return
|
|
565
|
+
|
|
566
|
+
# Handle delete
|
|
567
|
+
if args.delete:
|
|
568
|
+
print("Error: --delete not yet implemented", file=sys.stderr)
|
|
569
|
+
sys.exit(1)
|
|
570
|
+
|
|
571
|
+
# Handle rename
|
|
572
|
+
if args.rename:
|
|
573
|
+
print("Error: --rename not yet implemented", file=sys.stderr)
|
|
574
|
+
sys.exit(1)
|
|
575
|
+
|
|
576
|
+
# Switch to train
|
|
577
|
+
context = ContextManager.get_context()
|
|
578
|
+
new_context = ContextManager.set_context(context.active_memory, args.name)
|
|
579
|
+
|
|
580
|
+
if args.format == "json":
|
|
581
|
+
print(json.dumps({
|
|
582
|
+
"memory": new_context.active_memory,
|
|
583
|
+
"train": new_context.active_train,
|
|
584
|
+
"path": new_context.path
|
|
585
|
+
}))
|
|
586
|
+
else:
|
|
587
|
+
print(f"🚂 Switched to train \"{args.name}\"")
|
|
588
|
+
|
|
589
|
+
except Exception as e:
|
|
590
|
+
print(f"Error: {e}", file=sys.stderr)
|
|
591
|
+
sys.exit(2)
|
|
592
|
+
|
|
593
|
+
|
|
594
|
+
def _handle_status(args):
|
|
595
|
+
"""Show current memory context."""
|
|
596
|
+
from ate.memory.context import ContextManager
|
|
597
|
+
|
|
598
|
+
try:
|
|
599
|
+
context = ContextManager.get_context()
|
|
600
|
+
|
|
601
|
+
# Get memory info
|
|
602
|
+
with MemoryStore.open(context.path) as store:
|
|
603
|
+
info = store.info()
|
|
604
|
+
|
|
605
|
+
if args.format == "json":
|
|
606
|
+
print(json.dumps({
|
|
607
|
+
"memory": context.active_memory,
|
|
608
|
+
"train": context.active_train,
|
|
609
|
+
"frames": info.frame_count,
|
|
610
|
+
"size_bytes": info.size_bytes,
|
|
611
|
+
"has_vec_index": info.has_vec_index,
|
|
612
|
+
"has_lex_index": info.has_lex_index,
|
|
613
|
+
"has_time_index": info.has_time_index,
|
|
614
|
+
"path": context.path,
|
|
615
|
+
"visibility": "private" # Default for now
|
|
616
|
+
}))
|
|
617
|
+
else:
|
|
618
|
+
size_kb = info.size_bytes / 1024
|
|
619
|
+
print(f"🧠 Memory: {context.active_memory} (private)")
|
|
620
|
+
print(f"🚂 Train: {context.active_train}")
|
|
621
|
+
print(f"📦 Frames: {info.frame_count} ({size_kb:.1f} KB)")
|
|
622
|
+
print(f"🔍 Search: {'lex' if info.has_lex_index else ''}" +
|
|
623
|
+
f"{' + vec' if info.has_vec_index else ''}")
|
|
624
|
+
print(f"📁 Path: {context.path}")
|
|
625
|
+
|
|
626
|
+
except Exception as e:
|
|
627
|
+
print(f"Error: {e}", file=sys.stderr)
|
|
628
|
+
sys.exit(2)
|
|
629
|
+
|
|
630
|
+
|
|
631
|
+
def _handle_new(args):
|
|
632
|
+
"""Create a new memory."""
|
|
633
|
+
from ate.memory.context import ContextManager
|
|
634
|
+
|
|
635
|
+
try:
|
|
636
|
+
path = ContextManager.ensure_memory(args.name)
|
|
637
|
+
|
|
638
|
+
# Update memory.json with description if provided
|
|
639
|
+
if args.description:
|
|
640
|
+
memory_dir = os.path.dirname(path)
|
|
641
|
+
memory_json_path = os.path.join(memory_dir, "memory.json")
|
|
642
|
+
|
|
643
|
+
with open(memory_json_path, 'r') as f:
|
|
644
|
+
data = json.load(f)
|
|
645
|
+
data["description"] = args.description
|
|
646
|
+
with open(memory_json_path, 'w') as f:
|
|
647
|
+
json.dump(data, f, indent=2)
|
|
648
|
+
|
|
649
|
+
if args.format == "json":
|
|
650
|
+
print(json.dumps({
|
|
651
|
+
"name": args.name,
|
|
652
|
+
"path": path,
|
|
653
|
+
"description": args.description,
|
|
654
|
+
"created": True
|
|
655
|
+
}))
|
|
656
|
+
else:
|
|
657
|
+
print(f"✅ Created memory \"{args.name}\"")
|
|
658
|
+
if args.description:
|
|
659
|
+
print(f" Description: {args.description}")
|
|
660
|
+
|
|
661
|
+
except Exception as e:
|
|
662
|
+
print(f"Error: {e}", file=sys.stderr)
|
|
663
|
+
sys.exit(2)
|
|
664
|
+
|
|
665
|
+
|
|
666
|
+
def _handle_use(args):
|
|
667
|
+
"""Switch to a different memory."""
|
|
668
|
+
from ate.memory.context import ContextManager
|
|
669
|
+
|
|
670
|
+
try:
|
|
671
|
+
# Verify memory exists
|
|
672
|
+
memories = ContextManager.list_memories()
|
|
673
|
+
memory_names = [m.name for m in memories]
|
|
674
|
+
|
|
675
|
+
if args.name not in memory_names:
|
|
676
|
+
print(f"Error: Memory '{args.name}' does not exist. Use 'ate memory list-memories' to see available memories.", file=sys.stderr)
|
|
677
|
+
sys.exit(1)
|
|
678
|
+
|
|
679
|
+
# Find the default train for this memory
|
|
680
|
+
memory_metadata = next(m for m in memories if m.name == args.name)
|
|
681
|
+
default_train = memory_metadata.default_train
|
|
682
|
+
|
|
683
|
+
context = ContextManager.set_context(args.name, default_train)
|
|
684
|
+
|
|
685
|
+
if args.format == "json":
|
|
686
|
+
print(json.dumps({
|
|
687
|
+
"memory": context.active_memory,
|
|
688
|
+
"train": context.active_train,
|
|
689
|
+
"path": context.path
|
|
690
|
+
}))
|
|
691
|
+
else:
|
|
692
|
+
print(f"🧠 Switched to memory \"{args.name}\"")
|
|
693
|
+
|
|
694
|
+
except Exception as e:
|
|
695
|
+
print(f"Error: {e}", file=sys.stderr)
|
|
696
|
+
sys.exit(2)
|
|
697
|
+
|
|
698
|
+
|
|
699
|
+
def _handle_list_memories(args):
|
|
700
|
+
"""List all local memories."""
|
|
701
|
+
from ate.memory.context import ContextManager
|
|
702
|
+
|
|
703
|
+
try:
|
|
704
|
+
memories = ContextManager.list_memories()
|
|
705
|
+
|
|
706
|
+
if args.format == "json":
|
|
707
|
+
print(json.dumps({
|
|
708
|
+
"memories": [
|
|
709
|
+
{
|
|
710
|
+
"name": m.name,
|
|
711
|
+
"visibility": m.visibility,
|
|
712
|
+
"trains": m.trains,
|
|
713
|
+
"default_train": m.default_train,
|
|
714
|
+
"description": m.description,
|
|
715
|
+
"created_at": m.created_at,
|
|
716
|
+
"remote": m.remote
|
|
717
|
+
}
|
|
718
|
+
for m in memories
|
|
719
|
+
]
|
|
720
|
+
}))
|
|
721
|
+
else:
|
|
722
|
+
if not memories:
|
|
723
|
+
print("No local memories found.")
|
|
724
|
+
print("Create one with: ate memory new <name>")
|
|
725
|
+
else:
|
|
726
|
+
print(f"🧠 {len(memories)} local memor{'y' if len(memories) == 1 else 'ies'}:")
|
|
727
|
+
for memory in memories:
|
|
728
|
+
trains_str = f"{len(memory.trains)} train{'s' if len(memory.trains) != 1 else ''}"
|
|
729
|
+
desc_str = f" — {memory.description}" if memory.description else ""
|
|
730
|
+
print(f" • {memory.name} ({trains_str}){desc_str}")
|
|
731
|
+
|
|
732
|
+
except Exception as e:
|
|
733
|
+
print(f"Error: {e}", file=sys.stderr)
|
|
734
|
+
sys.exit(2)
|
|
735
|
+
|
|
736
|
+
|
|
737
|
+
# ---------------------------------------------------------------------------
|
|
738
|
+
# Cloud handlers (push / pull / list / delete)
|
|
739
|
+
# ---------------------------------------------------------------------------
|
|
740
|
+
|
|
741
|
+
def _get_cloud_client(args=None):
|
|
742
|
+
"""Create a CloudClient with token from TokenStore.
|
|
743
|
+
|
|
744
|
+
Exits with code 1 if no valid token is found.
|
|
745
|
+
"""
|
|
746
|
+
store = TokenStore()
|
|
747
|
+
token_resp = store.load()
|
|
748
|
+
if token_resp is None:
|
|
749
|
+
print("Error: Not authenticated. Run: ate device-login", file=sys.stderr)
|
|
750
|
+
sys.exit(1)
|
|
751
|
+
return CloudClient(token=token_resp.access_token)
|
|
752
|
+
|
|
753
|
+
|
|
754
|
+
def _parse_ref(ref: str):
|
|
755
|
+
"""Parse a 'project/name' reference like 'kindly/memories/test.mv2'.
|
|
756
|
+
|
|
757
|
+
The last path component is the name; everything before is the project.
|
|
758
|
+
Returns (project, name).
|
|
759
|
+
"""
|
|
760
|
+
parts = ref.rsplit("/", 1)
|
|
761
|
+
if len(parts) != 2 or not parts[0] or not parts[1]:
|
|
762
|
+
print(f"Error: Invalid reference '{ref}'. Expected format: project/name", file=sys.stderr)
|
|
763
|
+
sys.exit(1)
|
|
764
|
+
return parts[0], parts[1]
|
|
765
|
+
|
|
766
|
+
|
|
767
|
+
def _handle_push(args):
|
|
768
|
+
"""Upload .mv2 file to cloud."""
|
|
769
|
+
client = _get_cloud_client(args)
|
|
770
|
+
try:
|
|
771
|
+
result = client.push(args.path, args.project, name=args.name)
|
|
772
|
+
|
|
773
|
+
if args.format == "json":
|
|
774
|
+
print(json.dumps(asdict(result)))
|
|
775
|
+
else:
|
|
776
|
+
print(f"✅ Pushed {result.name} to {result.project}")
|
|
777
|
+
print(f" URL: {result.url}")
|
|
778
|
+
print(f" Size: {result.size_bytes} bytes")
|
|
779
|
+
except CloudAuthError as e:
|
|
780
|
+
print(f"Error: {e}", file=sys.stderr)
|
|
781
|
+
sys.exit(1)
|
|
782
|
+
except CloudError as e:
|
|
783
|
+
print(f"Error: {e}", file=sys.stderr)
|
|
784
|
+
sys.exit(2)
|
|
785
|
+
except Exception as e:
|
|
786
|
+
print(f"Error: {e}", file=sys.stderr)
|
|
787
|
+
sys.exit(2)
|
|
788
|
+
|
|
789
|
+
|
|
790
|
+
def _handle_pull(args):
|
|
791
|
+
"""Download .mv2 file from cloud."""
|
|
792
|
+
client = _get_cloud_client(args)
|
|
793
|
+
project, name = _parse_ref(args.ref)
|
|
794
|
+
try:
|
|
795
|
+
result = client.pull(project, name, args.output)
|
|
796
|
+
|
|
797
|
+
if args.format == "json":
|
|
798
|
+
print(json.dumps(asdict(result)))
|
|
799
|
+
else:
|
|
800
|
+
print(f"✅ Pulled {result.name} from {result.project}")
|
|
801
|
+
print(f" Saved: {result.path}")
|
|
802
|
+
print(f" Size: {result.size_bytes} bytes")
|
|
803
|
+
except CloudNotFoundError as e:
|
|
804
|
+
print(f"Error: {e}", file=sys.stderr)
|
|
805
|
+
sys.exit(1)
|
|
806
|
+
except CloudAuthError as e:
|
|
807
|
+
print(f"Error: {e}", file=sys.stderr)
|
|
808
|
+
sys.exit(1)
|
|
809
|
+
except CloudError as e:
|
|
810
|
+
print(f"Error: {e}", file=sys.stderr)
|
|
811
|
+
sys.exit(2)
|
|
812
|
+
except Exception as e:
|
|
813
|
+
print(f"Error: {e}", file=sys.stderr)
|
|
814
|
+
sys.exit(2)
|
|
815
|
+
|
|
816
|
+
|
|
817
|
+
def _handle_list(args):
|
|
818
|
+
"""List memory files in a cloud project."""
|
|
819
|
+
client = _get_cloud_client(args)
|
|
820
|
+
try:
|
|
821
|
+
items = client.list(args.project)
|
|
822
|
+
|
|
823
|
+
if args.format == "json":
|
|
824
|
+
print(json.dumps({
|
|
825
|
+
"project": args.project,
|
|
826
|
+
"items": [asdict(item) for item in items],
|
|
827
|
+
}))
|
|
828
|
+
else:
|
|
829
|
+
if not items:
|
|
830
|
+
print(f"No memory files in project '{args.project}'.")
|
|
831
|
+
else:
|
|
832
|
+
print(f"📦 {len(items)} file(s) in {args.project}:\n")
|
|
833
|
+
for item in items:
|
|
834
|
+
size_kb = item.size_bytes / 1024
|
|
835
|
+
print(f" • {item.name} ({size_kb:.1f} KB) {item.updated_at}")
|
|
836
|
+
except CloudAuthError as e:
|
|
837
|
+
print(f"Error: {e}", file=sys.stderr)
|
|
838
|
+
sys.exit(1)
|
|
839
|
+
except CloudError as e:
|
|
840
|
+
print(f"Error: {e}", file=sys.stderr)
|
|
841
|
+
sys.exit(2)
|
|
842
|
+
except Exception as e:
|
|
843
|
+
print(f"Error: {e}", file=sys.stderr)
|
|
844
|
+
sys.exit(2)
|
|
845
|
+
|
|
846
|
+
|
|
847
|
+
def _handle_delete(args):
|
|
848
|
+
"""Delete a memory file from cloud."""
|
|
849
|
+
client = _get_cloud_client(args)
|
|
850
|
+
project, name = _parse_ref(args.ref)
|
|
851
|
+
try:
|
|
852
|
+
client.delete(project, name)
|
|
853
|
+
|
|
854
|
+
if args.format == "json":
|
|
855
|
+
print(json.dumps({"project": project, "name": name, "deleted": True}))
|
|
856
|
+
else:
|
|
857
|
+
print(f"✅ Deleted {name} from {project}")
|
|
858
|
+
except CloudNotFoundError as e:
|
|
859
|
+
print(f"Error: {e}", file=sys.stderr)
|
|
860
|
+
sys.exit(1)
|
|
861
|
+
except CloudAuthError as e:
|
|
862
|
+
print(f"Error: {e}", file=sys.stderr)
|
|
863
|
+
sys.exit(1)
|
|
864
|
+
except CloudError as e:
|
|
865
|
+
print(f"Error: {e}", file=sys.stderr)
|
|
866
|
+
sys.exit(2)
|
|
867
|
+
except Exception as e:
|
|
868
|
+
print(f"Error: {e}", file=sys.stderr)
|
|
869
|
+
sys.exit(2)
|
|
870
|
+
|
|
871
|
+
|
|
872
|
+
def _handle_providers(args):
|
|
873
|
+
"""List detected embedding providers."""
|
|
874
|
+
try:
|
|
875
|
+
providers = EmbeddingManager.available_providers()
|
|
876
|
+
|
|
877
|
+
# Determine active provider
|
|
878
|
+
active_config = EmbeddingManager.detect()
|
|
879
|
+
active = active_config.provider if active_config.provider != "none" else None
|
|
880
|
+
|
|
881
|
+
if args.format == "json":
|
|
882
|
+
print(json.dumps({
|
|
883
|
+
"providers": providers,
|
|
884
|
+
"active": active
|
|
885
|
+
}))
|
|
886
|
+
else:
|
|
887
|
+
print("🤖 Embedding Providers:\n")
|
|
888
|
+
for provider in providers:
|
|
889
|
+
status = "✅" if provider["available"] else "❌"
|
|
890
|
+
name = provider["name"]
|
|
891
|
+
|
|
892
|
+
if provider["available"]:
|
|
893
|
+
model = provider.get("model", "default")
|
|
894
|
+
source = provider.get("source", "unknown")
|
|
895
|
+
print(f" {status} {name} ({model}) - {source}")
|
|
896
|
+
else:
|
|
897
|
+
reason = provider.get("reason", "unknown")
|
|
898
|
+
print(f" {status} {name} - {reason}")
|
|
899
|
+
|
|
900
|
+
if active:
|
|
901
|
+
print(f"\n🎯 Active provider: {active}")
|
|
902
|
+
else:
|
|
903
|
+
print(f"\n⚠️ No embedding providers available - using BM25 lexical search only")
|
|
904
|
+
|
|
905
|
+
except Exception as e:
|
|
906
|
+
print(f"Error: {e}", file=sys.stderr)
|
|
907
|
+
sys.exit(2)
|