tarang 4.4.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- tarang/__init__.py +23 -0
- tarang/cli.py +1168 -0
- tarang/client/__init__.py +19 -0
- tarang/client/api_client.py +701 -0
- tarang/client/auth.py +178 -0
- tarang/context/__init__.py +41 -0
- tarang/context/bm25.py +218 -0
- tarang/context/chunker.py +984 -0
- tarang/context/graph.py +464 -0
- tarang/context/indexer.py +514 -0
- tarang/context/retriever.py +270 -0
- tarang/context/skeleton.py +282 -0
- tarang/context_collector.py +449 -0
- tarang/executor/__init__.py +6 -0
- tarang/executor/diff_apply.py +246 -0
- tarang/executor/linter.py +184 -0
- tarang/stream.py +1346 -0
- tarang/ui/__init__.py +7 -0
- tarang/ui/console.py +407 -0
- tarang/ui/diff_viewer.py +146 -0
- tarang/ui/formatter.py +1151 -0
- tarang/ui/keyboard.py +197 -0
- tarang/ws/__init__.py +14 -0
- tarang/ws/client.py +464 -0
- tarang/ws/executor.py +638 -0
- tarang/ws/handlers.py +590 -0
- tarang-4.4.0.dist-info/METADATA +102 -0
- tarang-4.4.0.dist-info/RECORD +31 -0
- tarang-4.4.0.dist-info/WHEEL +5 -0
- tarang-4.4.0.dist-info/entry_points.txt +2 -0
- tarang-4.4.0.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,701 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Tarang API Client - HTTP client for the Orchestrator backend.
|
|
3
|
+
|
|
4
|
+
Handles communication with the hosted Tarang backend service.
|
|
5
|
+
Implements the thin-client architecture where:
|
|
6
|
+
- CLI: Sends context, executes returned instructions locally
|
|
7
|
+
- Backend: Reasoning/planning, returns instructions
|
|
8
|
+
"""
|
|
9
|
+
from __future__ import annotations
|
|
10
|
+
|
|
11
|
+
import json
|
|
12
|
+
from dataclasses import dataclass, field
|
|
13
|
+
from pathlib import Path
|
|
14
|
+
from typing import Any, AsyncIterator, Dict, List, Optional
|
|
15
|
+
|
|
16
|
+
import httpx
|
|
17
|
+
from pydantic import BaseModel
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
class SearchReplace(BaseModel):
|
|
21
|
+
"""Search and replace instruction."""
|
|
22
|
+
search: str
|
|
23
|
+
replace: str
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
class EditInstruction(BaseModel):
|
|
27
|
+
"""Edit instruction from backend.
|
|
28
|
+
|
|
29
|
+
Supports three modes:
|
|
30
|
+
- content: Full file write (create/overwrite)
|
|
31
|
+
- search_replace: Find and replace text
|
|
32
|
+
- diff: Apply unified diff patch
|
|
33
|
+
"""
|
|
34
|
+
file: str
|
|
35
|
+
diff: Optional[str] = None
|
|
36
|
+
content: Optional[str] = None
|
|
37
|
+
search_replace: Optional[SearchReplace] = None
|
|
38
|
+
# Legacy fields for backwards compatibility
|
|
39
|
+
search: Optional[str] = None
|
|
40
|
+
replace: Optional[str] = None
|
|
41
|
+
description: str = ""
|
|
42
|
+
|
|
43
|
+
def get_search(self) -> Optional[str]:
|
|
44
|
+
"""Get search text from either format."""
|
|
45
|
+
if self.search_replace:
|
|
46
|
+
return self.search_replace.search
|
|
47
|
+
return self.search
|
|
48
|
+
|
|
49
|
+
def get_replace(self) -> Optional[str]:
|
|
50
|
+
"""Get replace text from either format."""
|
|
51
|
+
if self.search_replace:
|
|
52
|
+
return self.search_replace.replace
|
|
53
|
+
return self.replace
|
|
54
|
+
|
|
55
|
+
|
|
56
|
+
class CommandInstruction(BaseModel):
|
|
57
|
+
"""Shell command instruction from backend."""
|
|
58
|
+
command: str
|
|
59
|
+
working_dir: Optional[str] = None
|
|
60
|
+
description: str = ""
|
|
61
|
+
require_confirmation: bool = False
|
|
62
|
+
timeout: int = 60
|
|
63
|
+
|
|
64
|
+
|
|
65
|
+
# Alias for backwards compatibility
|
|
66
|
+
ShellCommand = CommandInstruction
|
|
67
|
+
|
|
68
|
+
|
|
69
|
+
class TarangResponse(BaseModel):
|
|
70
|
+
"""Response from Tarang backend.
|
|
71
|
+
|
|
72
|
+
Response types:
|
|
73
|
+
- message: Text response, no execution needed
|
|
74
|
+
- edits: File edit instructions for CLI to execute
|
|
75
|
+
- command: Shell command instructions for CLI to execute
|
|
76
|
+
- error: Error occurred during processing
|
|
77
|
+
"""
|
|
78
|
+
session_id: str
|
|
79
|
+
type: str = "message" # message, edits, command, error
|
|
80
|
+
message: str = ""
|
|
81
|
+
edits: List[EditInstruction] = []
|
|
82
|
+
commands: List[CommandInstruction] = []
|
|
83
|
+
command: Optional[str] = None # Legacy single command field
|
|
84
|
+
thought_process: Optional[str] = None
|
|
85
|
+
error: Optional[str] = None
|
|
86
|
+
recoverable: bool = True
|
|
87
|
+
|
|
88
|
+
# Metadata
|
|
89
|
+
model_used: Optional[str] = None
|
|
90
|
+
tokens_used: int = 0
|
|
91
|
+
|
|
92
|
+
|
|
93
|
+
@dataclass
|
|
94
|
+
class LocalContext:
|
|
95
|
+
"""Local context to send to backend.
|
|
96
|
+
|
|
97
|
+
Contains project information for the backend to reason about.
|
|
98
|
+
"""
|
|
99
|
+
project_root: str
|
|
100
|
+
skeleton: Dict[str, Any] = field(default_factory=dict)
|
|
101
|
+
file_contents: Dict[str, str] = field(default_factory=dict)
|
|
102
|
+
active_files: List[Dict[str, str]] = field(default_factory=list)
|
|
103
|
+
git_status: Optional[str] = None
|
|
104
|
+
history: List[Dict[str, str]] = field(default_factory=list)
|
|
105
|
+
|
|
106
|
+
@property
|
|
107
|
+
def cwd(self) -> str:
|
|
108
|
+
"""Alias for project_root (for backend compatibility)."""
|
|
109
|
+
return self.project_root
|
|
110
|
+
|
|
111
|
+
def to_dict(self) -> Dict[str, Any]:
|
|
112
|
+
return {
|
|
113
|
+
"cwd": self.project_root,
|
|
114
|
+
"skeleton": self.skeleton,
|
|
115
|
+
"file_contents": self.file_contents,
|
|
116
|
+
"history": self.history,
|
|
117
|
+
}
|
|
118
|
+
|
|
119
|
+
def add_file(self, file_path: str, content: str) -> None:
|
|
120
|
+
"""Add a file's content to the context."""
|
|
121
|
+
self.file_contents[file_path] = content
|
|
122
|
+
|
|
123
|
+
|
|
124
|
+
class TarangAPIClient:
|
|
125
|
+
"""
|
|
126
|
+
Thin client for Tarang Orchestrator API.
|
|
127
|
+
|
|
128
|
+
Handles authentication, requests, and streaming responses.
|
|
129
|
+
"""
|
|
130
|
+
|
|
131
|
+
DEFAULT_BASE_URL = "https://tarang-backend-intl-web-app-production.up.railway.app"
|
|
132
|
+
|
|
133
|
+
def __init__(self, base_url: Optional[str] = None):
|
|
134
|
+
self.base_url = base_url or self.DEFAULT_BASE_URL
|
|
135
|
+
self.token: Optional[str] = None
|
|
136
|
+
self.openrouter_key: Optional[str] = None
|
|
137
|
+
|
|
138
|
+
def _build_headers(self) -> Dict[str, str]:
|
|
139
|
+
"""Build request headers."""
|
|
140
|
+
headers = {
|
|
141
|
+
"Content-Type": "application/json",
|
|
142
|
+
"X-Tarang-Protocol-Version": "3.0", # Updated protocol version
|
|
143
|
+
}
|
|
144
|
+
if self.token:
|
|
145
|
+
headers["Authorization"] = f"Bearer {self.token}"
|
|
146
|
+
if self.openrouter_key:
|
|
147
|
+
headers["X-OpenRouter-Key"] = self.openrouter_key
|
|
148
|
+
return headers
|
|
149
|
+
|
|
150
|
+
async def execute(
|
|
151
|
+
self,
|
|
152
|
+
instruction: str,
|
|
153
|
+
context: LocalContext,
|
|
154
|
+
session_id: Optional[str] = None,
|
|
155
|
+
file_content: Optional[str] = None,
|
|
156
|
+
file_path: Optional[str] = None,
|
|
157
|
+
) -> TarangResponse:
|
|
158
|
+
"""
|
|
159
|
+
Send instruction to Orchestrator and get response.
|
|
160
|
+
|
|
161
|
+
Args:
|
|
162
|
+
instruction: User's instruction/request
|
|
163
|
+
context: Local project context (skeleton, file_contents)
|
|
164
|
+
session_id: Optional session ID for continuity
|
|
165
|
+
file_content: Optional focused file content
|
|
166
|
+
file_path: Optional file path being edited
|
|
167
|
+
|
|
168
|
+
Returns:
|
|
169
|
+
TarangResponse with edits, commands, or messages
|
|
170
|
+
"""
|
|
171
|
+
payload = {
|
|
172
|
+
"message": instruction,
|
|
173
|
+
"context": context.to_dict(),
|
|
174
|
+
"session_id": session_id,
|
|
175
|
+
"file_content": file_content,
|
|
176
|
+
"file_path": file_path,
|
|
177
|
+
}
|
|
178
|
+
|
|
179
|
+
async with httpx.AsyncClient(timeout=300) as client:
|
|
180
|
+
try:
|
|
181
|
+
response = await client.post(
|
|
182
|
+
f"{self.base_url}/v2/execute",
|
|
183
|
+
json=payload,
|
|
184
|
+
headers=self._build_headers(),
|
|
185
|
+
)
|
|
186
|
+
response.raise_for_status()
|
|
187
|
+
return TarangResponse.model_validate(response.json())
|
|
188
|
+
|
|
189
|
+
except httpx.ConnectError:
|
|
190
|
+
return TarangResponse(
|
|
191
|
+
session_id=session_id or "",
|
|
192
|
+
type="error",
|
|
193
|
+
error="Cannot reach Tarang server. Check your internet connection.",
|
|
194
|
+
recoverable=False,
|
|
195
|
+
)
|
|
196
|
+
except httpx.HTTPStatusError as e:
|
|
197
|
+
error_detail = ""
|
|
198
|
+
try:
|
|
199
|
+
error_data = e.response.json()
|
|
200
|
+
error_detail = error_data.get("detail", "")
|
|
201
|
+
except Exception:
|
|
202
|
+
pass
|
|
203
|
+
return TarangResponse(
|
|
204
|
+
session_id=session_id or "",
|
|
205
|
+
type="error",
|
|
206
|
+
error=f"Server error: {e.response.status_code}. {error_detail}",
|
|
207
|
+
recoverable=True,
|
|
208
|
+
)
|
|
209
|
+
except Exception as e:
|
|
210
|
+
return TarangResponse(
|
|
211
|
+
session_id=session_id or "",
|
|
212
|
+
type="error",
|
|
213
|
+
error=str(e),
|
|
214
|
+
recoverable=True,
|
|
215
|
+
)
|
|
216
|
+
|
|
217
|
+
async def execute_stream(
|
|
218
|
+
self,
|
|
219
|
+
instruction: str,
|
|
220
|
+
context: LocalContext,
|
|
221
|
+
session_id: Optional[str] = None,
|
|
222
|
+
) -> AsyncIterator[TarangResponse]:
|
|
223
|
+
"""
|
|
224
|
+
Stream responses from Orchestrator (SSE).
|
|
225
|
+
|
|
226
|
+
For long-running tasks, the backend streams intermediate results.
|
|
227
|
+
"""
|
|
228
|
+
payload = {
|
|
229
|
+
"message": instruction,
|
|
230
|
+
"context": context.to_dict(),
|
|
231
|
+
"session_id": session_id,
|
|
232
|
+
}
|
|
233
|
+
|
|
234
|
+
async with httpx.AsyncClient(timeout=300) as client:
|
|
235
|
+
async with client.stream(
|
|
236
|
+
"POST",
|
|
237
|
+
f"{self.base_url}/v2/execute/stream",
|
|
238
|
+
json=payload,
|
|
239
|
+
headers=self._build_headers(),
|
|
240
|
+
) as response:
|
|
241
|
+
response.raise_for_status()
|
|
242
|
+
async for line in response.aiter_lines():
|
|
243
|
+
if line.startswith("data: "):
|
|
244
|
+
data = json.loads(line[6:])
|
|
245
|
+
yield TarangResponse.model_validate(data)
|
|
246
|
+
|
|
247
|
+
async def report_feedback(
|
|
248
|
+
self,
|
|
249
|
+
session_id: str,
|
|
250
|
+
success: bool,
|
|
251
|
+
applied_edits: Optional[List[str]] = None,
|
|
252
|
+
error_message: Optional[str] = None,
|
|
253
|
+
lint_output: Optional[str] = None,
|
|
254
|
+
) -> TarangResponse:
|
|
255
|
+
"""
|
|
256
|
+
Report execution results back to Orchestrator.
|
|
257
|
+
|
|
258
|
+
Args:
|
|
259
|
+
session_id: Current session ID
|
|
260
|
+
success: Whether changes applied successfully
|
|
261
|
+
applied_edits: List of files that were edited
|
|
262
|
+
error_message: Error message if failed
|
|
263
|
+
lint_output: Lint output if there were errors
|
|
264
|
+
"""
|
|
265
|
+
payload = {
|
|
266
|
+
"session_id": session_id,
|
|
267
|
+
"success": success,
|
|
268
|
+
"error_message": error_message,
|
|
269
|
+
"lint_output": lint_output,
|
|
270
|
+
}
|
|
271
|
+
|
|
272
|
+
async with httpx.AsyncClient(timeout=60) as client:
|
|
273
|
+
response = await client.post(
|
|
274
|
+
f"{self.base_url}/v2/feedback",
|
|
275
|
+
json=payload,
|
|
276
|
+
headers=self._build_headers(),
|
|
277
|
+
)
|
|
278
|
+
response.raise_for_status()
|
|
279
|
+
return TarangResponse.model_validate(response.json())
|
|
280
|
+
|
|
281
|
+
async def quick_ask(self, query: str) -> str:
|
|
282
|
+
"""
|
|
283
|
+
Quick question without code generation.
|
|
284
|
+
|
|
285
|
+
Args:
|
|
286
|
+
query: Simple question
|
|
287
|
+
|
|
288
|
+
Returns:
|
|
289
|
+
Answer string
|
|
290
|
+
"""
|
|
291
|
+
payload = {"query": query}
|
|
292
|
+
|
|
293
|
+
async with httpx.AsyncClient(timeout=60) as client:
|
|
294
|
+
response = await client.post(
|
|
295
|
+
f"{self.base_url}/v2/quick",
|
|
296
|
+
json=payload,
|
|
297
|
+
headers=self._build_headers(),
|
|
298
|
+
)
|
|
299
|
+
response.raise_for_status()
|
|
300
|
+
data = response.json()
|
|
301
|
+
return data.get("answer", "")
|
|
302
|
+
|
|
303
|
+
# ==========================================
|
|
304
|
+
# SESSION TRACKING
|
|
305
|
+
# ==========================================
|
|
306
|
+
|
|
307
|
+
async def create_session(
|
|
308
|
+
self,
|
|
309
|
+
instruction: str,
|
|
310
|
+
project_name: Optional[str] = None,
|
|
311
|
+
project_path: Optional[str] = None,
|
|
312
|
+
) -> Optional[str]:
|
|
313
|
+
"""
|
|
314
|
+
Create a new session in the backend.
|
|
315
|
+
|
|
316
|
+
Args:
|
|
317
|
+
instruction: User's instruction
|
|
318
|
+
project_name: Name of the project
|
|
319
|
+
project_path: Path to the project
|
|
320
|
+
|
|
321
|
+
Returns:
|
|
322
|
+
Session ID if successful, None otherwise
|
|
323
|
+
"""
|
|
324
|
+
payload = {
|
|
325
|
+
"instruction": instruction,
|
|
326
|
+
"project_name": project_name,
|
|
327
|
+
"project_path": project_path,
|
|
328
|
+
}
|
|
329
|
+
|
|
330
|
+
try:
|
|
331
|
+
async with httpx.AsyncClient(timeout=30) as client:
|
|
332
|
+
response = await client.post(
|
|
333
|
+
f"{self.base_url}/v2/sessions",
|
|
334
|
+
json=payload,
|
|
335
|
+
headers=self._build_headers(),
|
|
336
|
+
)
|
|
337
|
+
response.raise_for_status()
|
|
338
|
+
data = response.json()
|
|
339
|
+
return data.get("id")
|
|
340
|
+
except Exception:
|
|
341
|
+
# Session tracking is optional, don't fail the request
|
|
342
|
+
return None
|
|
343
|
+
|
|
344
|
+
async def update_session(
|
|
345
|
+
self,
|
|
346
|
+
session_id: str,
|
|
347
|
+
status: Optional[str] = None,
|
|
348
|
+
current_thought: Optional[str] = None,
|
|
349
|
+
error_message: Optional[str] = None,
|
|
350
|
+
applied_files: Optional[List[str]] = None,
|
|
351
|
+
) -> bool:
|
|
352
|
+
"""
|
|
353
|
+
Update session status.
|
|
354
|
+
|
|
355
|
+
Args:
|
|
356
|
+
session_id: Session ID to update
|
|
357
|
+
status: New status (thinking, executing, done, failed, etc.)
|
|
358
|
+
current_thought: Current thought/action
|
|
359
|
+
error_message: Error message if failed
|
|
360
|
+
applied_files: List of files that were modified
|
|
361
|
+
|
|
362
|
+
Returns:
|
|
363
|
+
True if successful
|
|
364
|
+
"""
|
|
365
|
+
payload = {}
|
|
366
|
+
if status:
|
|
367
|
+
payload["status"] = status
|
|
368
|
+
if current_thought:
|
|
369
|
+
payload["current_thought"] = current_thought
|
|
370
|
+
if error_message:
|
|
371
|
+
payload["error_message"] = error_message
|
|
372
|
+
if applied_files:
|
|
373
|
+
payload["applied_files"] = applied_files
|
|
374
|
+
|
|
375
|
+
try:
|
|
376
|
+
async with httpx.AsyncClient(timeout=30) as client:
|
|
377
|
+
response = await client.patch(
|
|
378
|
+
f"{self.base_url}/v2/sessions/{session_id}",
|
|
379
|
+
json=payload,
|
|
380
|
+
headers=self._build_headers(),
|
|
381
|
+
)
|
|
382
|
+
response.raise_for_status()
|
|
383
|
+
return True
|
|
384
|
+
except Exception:
|
|
385
|
+
return False
|
|
386
|
+
|
|
387
|
+
async def add_session_event(
|
|
388
|
+
self,
|
|
389
|
+
session_id: str,
|
|
390
|
+
event_type: str,
|
|
391
|
+
content: str,
|
|
392
|
+
metadata: Optional[Dict[str, Any]] = None,
|
|
393
|
+
) -> bool:
|
|
394
|
+
"""
|
|
395
|
+
Add an event to a session.
|
|
396
|
+
|
|
397
|
+
Args:
|
|
398
|
+
session_id: Session ID
|
|
399
|
+
event_type: Type of event (thought, action, result, error)
|
|
400
|
+
content: Event content
|
|
401
|
+
metadata: Optional metadata
|
|
402
|
+
|
|
403
|
+
Returns:
|
|
404
|
+
True if successful
|
|
405
|
+
"""
|
|
406
|
+
payload = {
|
|
407
|
+
"type": event_type,
|
|
408
|
+
"content": content,
|
|
409
|
+
"metadata": metadata or {},
|
|
410
|
+
}
|
|
411
|
+
|
|
412
|
+
try:
|
|
413
|
+
async with httpx.AsyncClient(timeout=30) as client:
|
|
414
|
+
response = await client.post(
|
|
415
|
+
f"{self.base_url}/v2/sessions/{session_id}/events",
|
|
416
|
+
json=payload,
|
|
417
|
+
headers=self._build_headers(),
|
|
418
|
+
)
|
|
419
|
+
response.raise_for_status()
|
|
420
|
+
return True
|
|
421
|
+
except Exception:
|
|
422
|
+
return False
|
|
423
|
+
|
|
424
|
+
async def update_session_usage(
|
|
425
|
+
self,
|
|
426
|
+
session_id: str,
|
|
427
|
+
input_tokens: int,
|
|
428
|
+
output_tokens: int,
|
|
429
|
+
cached_tokens: int = 0,
|
|
430
|
+
) -> bool:
|
|
431
|
+
"""
|
|
432
|
+
Update token usage for a session.
|
|
433
|
+
|
|
434
|
+
Args:
|
|
435
|
+
session_id: Session ID
|
|
436
|
+
input_tokens: Number of input tokens
|
|
437
|
+
output_tokens: Number of output tokens
|
|
438
|
+
cached_tokens: Number of cached tokens
|
|
439
|
+
|
|
440
|
+
Returns:
|
|
441
|
+
True if successful
|
|
442
|
+
"""
|
|
443
|
+
payload = {
|
|
444
|
+
"input_tokens": input_tokens,
|
|
445
|
+
"output_tokens": output_tokens,
|
|
446
|
+
"cached_tokens": cached_tokens,
|
|
447
|
+
}
|
|
448
|
+
|
|
449
|
+
try:
|
|
450
|
+
async with httpx.AsyncClient(timeout=30) as client:
|
|
451
|
+
response = await client.post(
|
|
452
|
+
f"{self.base_url}/v2/sessions/{session_id}/usage",
|
|
453
|
+
json=payload,
|
|
454
|
+
headers=self._build_headers(),
|
|
455
|
+
)
|
|
456
|
+
response.raise_for_status()
|
|
457
|
+
return True
|
|
458
|
+
except Exception:
|
|
459
|
+
return False
|
|
460
|
+
|
|
461
|
+
|
|
462
|
+
def collect_relevant_files(
|
|
463
|
+
project_path: Path,
|
|
464
|
+
instruction: str,
|
|
465
|
+
skeleton: Dict[str, Any],
|
|
466
|
+
max_files: int = 10,
|
|
467
|
+
max_size: int = 50000,
|
|
468
|
+
) -> Dict[str, str]:
|
|
469
|
+
"""
|
|
470
|
+
Collect relevant file contents based on instruction.
|
|
471
|
+
|
|
472
|
+
This helps the backend have context for reasoning.
|
|
473
|
+
|
|
474
|
+
Args:
|
|
475
|
+
project_path: Project root path
|
|
476
|
+
instruction: User instruction (used to find relevant files)
|
|
477
|
+
skeleton: Project skeleton
|
|
478
|
+
max_files: Maximum number of files to include
|
|
479
|
+
max_size: Maximum total size in characters
|
|
480
|
+
|
|
481
|
+
Returns:
|
|
482
|
+
Dict of file_path -> content
|
|
483
|
+
"""
|
|
484
|
+
file_contents = {}
|
|
485
|
+
total_size = 0
|
|
486
|
+
|
|
487
|
+
# Extract file paths mentioned in instruction
|
|
488
|
+
mentioned_files = []
|
|
489
|
+
instruction_lower = instruction.lower()
|
|
490
|
+
|
|
491
|
+
def find_files_in_skeleton(node: Dict[str, Any], prefix: str = "") -> List[str]:
|
|
492
|
+
"""Recursively find all files in skeleton."""
|
|
493
|
+
files = []
|
|
494
|
+
for name, value in node.items():
|
|
495
|
+
path = f"{prefix}/{name}".lstrip("/") if prefix else name
|
|
496
|
+
if isinstance(value, dict):
|
|
497
|
+
files.extend(find_files_in_skeleton(value, path))
|
|
498
|
+
else:
|
|
499
|
+
files.append(path)
|
|
500
|
+
return files
|
|
501
|
+
|
|
502
|
+
def parse_file_tree(file_tree: str) -> List[str]:
|
|
503
|
+
"""Parse ASCII file tree to extract file paths."""
|
|
504
|
+
files = []
|
|
505
|
+
path_stack = []
|
|
506
|
+
|
|
507
|
+
for line in file_tree.split("\n"):
|
|
508
|
+
if not line.strip():
|
|
509
|
+
continue
|
|
510
|
+
|
|
511
|
+
# Remove tree characters and get the name
|
|
512
|
+
# Handles: "├── ", "└── ", "│ ", " "
|
|
513
|
+
clean_line = line
|
|
514
|
+
for char in ["├", "└", "│", "─", " "]:
|
|
515
|
+
clean_line = clean_line.replace(char, "")
|
|
516
|
+
|
|
517
|
+
name = clean_line.strip()
|
|
518
|
+
if not name:
|
|
519
|
+
continue
|
|
520
|
+
|
|
521
|
+
# Calculate depth based on indentation
|
|
522
|
+
# Each level is typically 4 chars ("│ " or " ")
|
|
523
|
+
stripped = line.lstrip("│ ")
|
|
524
|
+
indent = len(line) - len(stripped)
|
|
525
|
+
depth = indent // 4
|
|
526
|
+
|
|
527
|
+
# Adjust path stack
|
|
528
|
+
while len(path_stack) > depth:
|
|
529
|
+
path_stack.pop()
|
|
530
|
+
|
|
531
|
+
if name.endswith("/"):
|
|
532
|
+
# It's a directory
|
|
533
|
+
path_stack.append(name.rstrip("/"))
|
|
534
|
+
else:
|
|
535
|
+
# It's a file
|
|
536
|
+
full_path = "/".join(path_stack + [name]) if path_stack else name
|
|
537
|
+
files.append(full_path)
|
|
538
|
+
|
|
539
|
+
return files
|
|
540
|
+
|
|
541
|
+
# Check if skeleton uses new format (file_tree string) or old format (nested dict)
|
|
542
|
+
if "file_tree" in skeleton:
|
|
543
|
+
all_files = parse_file_tree(skeleton.get("file_tree", ""))
|
|
544
|
+
else:
|
|
545
|
+
all_files = find_files_in_skeleton(skeleton)
|
|
546
|
+
|
|
547
|
+
# Find files mentioned in instruction
|
|
548
|
+
for file_path in all_files:
|
|
549
|
+
file_name = Path(file_path).name.lower()
|
|
550
|
+
if file_name in instruction_lower or file_path.lower() in instruction_lower:
|
|
551
|
+
mentioned_files.append(file_path)
|
|
552
|
+
|
|
553
|
+
# Also include key files
|
|
554
|
+
key_files = [
|
|
555
|
+
"package.json", "pyproject.toml", "requirements.txt",
|
|
556
|
+
"tsconfig.json", "vite.config.ts", "next.config.js",
|
|
557
|
+
"README.md", "src/App.tsx", "src/main.tsx", "src/index.ts",
|
|
558
|
+
"app.py", "main.py", "__init__.py",
|
|
559
|
+
]
|
|
560
|
+
|
|
561
|
+
for key_file in key_files:
|
|
562
|
+
for file_path in all_files:
|
|
563
|
+
if file_path.endswith(key_file):
|
|
564
|
+
if file_path not in mentioned_files:
|
|
565
|
+
mentioned_files.append(file_path)
|
|
566
|
+
|
|
567
|
+
# Read file contents
|
|
568
|
+
for file_path in mentioned_files[:max_files]:
|
|
569
|
+
full_path = project_path / file_path
|
|
570
|
+
if full_path.exists() and full_path.is_file():
|
|
571
|
+
try:
|
|
572
|
+
content = full_path.read_text(encoding="utf-8", errors="replace")
|
|
573
|
+
if len(content) + total_size <= max_size:
|
|
574
|
+
file_contents[file_path] = content
|
|
575
|
+
total_size += len(content)
|
|
576
|
+
except Exception:
|
|
577
|
+
pass
|
|
578
|
+
|
|
579
|
+
return file_contents
|
|
580
|
+
|
|
581
|
+
|
|
582
|
+
class StreamingEvent:
|
|
583
|
+
"""Event received from WebSocket stream."""
|
|
584
|
+
|
|
585
|
+
def __init__(self, event_type: str, data: Dict[str, Any]):
|
|
586
|
+
self.type = event_type
|
|
587
|
+
self.data = data
|
|
588
|
+
|
|
589
|
+
def __repr__(self) -> str:
|
|
590
|
+
return f"StreamingEvent(type={self.type}, data={self.data})"
|
|
591
|
+
|
|
592
|
+
|
|
593
|
+
class TarangStreamingClient:
|
|
594
|
+
"""WebSocket client for real-time streaming from Tarang backend."""
|
|
595
|
+
|
|
596
|
+
def __init__(self, base_url: Optional[str] = None):
|
|
597
|
+
"""Initialize the streaming client.
|
|
598
|
+
|
|
599
|
+
Args:
|
|
600
|
+
base_url: Backend base URL (will convert http to ws)
|
|
601
|
+
"""
|
|
602
|
+
http_url = base_url or TarangAPIClient.DEFAULT_BASE_URL
|
|
603
|
+
# Convert http(s) to ws(s)
|
|
604
|
+
if http_url.startswith("https://"):
|
|
605
|
+
self.ws_url = http_url.replace("https://", "wss://")
|
|
606
|
+
elif http_url.startswith("http://"):
|
|
607
|
+
self.ws_url = http_url.replace("http://", "ws://")
|
|
608
|
+
else:
|
|
609
|
+
self.ws_url = f"wss://{http_url}"
|
|
610
|
+
|
|
611
|
+
self.token: Optional[str] = None
|
|
612
|
+
self.openrouter_key: Optional[str] = None
|
|
613
|
+
|
|
614
|
+
async def stream_execute(
|
|
615
|
+
self,
|
|
616
|
+
instruction: str,
|
|
617
|
+
context: LocalContext,
|
|
618
|
+
session_id: Optional[str] = None,
|
|
619
|
+
) -> AsyncIterator[StreamingEvent]:
|
|
620
|
+
"""
|
|
621
|
+
Execute instruction with streaming events.
|
|
622
|
+
|
|
623
|
+
Yields events as they arrive from the backend.
|
|
624
|
+
|
|
625
|
+
Args:
|
|
626
|
+
instruction: User instruction
|
|
627
|
+
context: Project context
|
|
628
|
+
session_id: Optional session ID
|
|
629
|
+
|
|
630
|
+
Yields:
|
|
631
|
+
StreamingEvent for each event from backend
|
|
632
|
+
"""
|
|
633
|
+
import websockets
|
|
634
|
+
|
|
635
|
+
# Build WebSocket URL with auth
|
|
636
|
+
ws_endpoint = f"{self.ws_url}/v2/ws/execute"
|
|
637
|
+
params = []
|
|
638
|
+
if self.token:
|
|
639
|
+
params.append(f"token={self.token}")
|
|
640
|
+
if self.openrouter_key:
|
|
641
|
+
params.append(f"openrouter_key={self.openrouter_key}")
|
|
642
|
+
if params:
|
|
643
|
+
ws_endpoint = f"{ws_endpoint}?{'&'.join(params)}"
|
|
644
|
+
|
|
645
|
+
try:
|
|
646
|
+
async with websockets.connect(
|
|
647
|
+
ws_endpoint,
|
|
648
|
+
ping_interval=30,
|
|
649
|
+
ping_timeout=10,
|
|
650
|
+
close_timeout=5,
|
|
651
|
+
) as websocket:
|
|
652
|
+
# Wait for connected event
|
|
653
|
+
response = await websocket.recv()
|
|
654
|
+
event = json.loads(response)
|
|
655
|
+
yield StreamingEvent(event.get("type", "unknown"), event.get("data", {}))
|
|
656
|
+
|
|
657
|
+
# Send execute request
|
|
658
|
+
execute_request = {
|
|
659
|
+
"type": "execute",
|
|
660
|
+
"message": instruction,
|
|
661
|
+
"context": {
|
|
662
|
+
"skeleton": context.skeleton,
|
|
663
|
+
"cwd": context.cwd,
|
|
664
|
+
"history": context.history,
|
|
665
|
+
"file_contents": context.file_contents,
|
|
666
|
+
},
|
|
667
|
+
}
|
|
668
|
+
await websocket.send(json.dumps(execute_request))
|
|
669
|
+
|
|
670
|
+
# Stream events
|
|
671
|
+
while True:
|
|
672
|
+
try:
|
|
673
|
+
response = await websocket.recv()
|
|
674
|
+
event = json.loads(response)
|
|
675
|
+
event_type = event.get("type", "unknown")
|
|
676
|
+
event_data = event.get("data", {})
|
|
677
|
+
|
|
678
|
+
yield StreamingEvent(event_type, event_data)
|
|
679
|
+
|
|
680
|
+
# Check for terminal events
|
|
681
|
+
if event_type in ("complete", "error"):
|
|
682
|
+
break
|
|
683
|
+
|
|
684
|
+
except websockets.exceptions.ConnectionClosed:
|
|
685
|
+
break
|
|
686
|
+
|
|
687
|
+
except Exception as e:
|
|
688
|
+
yield StreamingEvent("error", {"message": str(e)})
|
|
689
|
+
|
|
690
|
+
async def send_approval(
|
|
691
|
+
self,
|
|
692
|
+
websocket,
|
|
693
|
+
approved: bool,
|
|
694
|
+
) -> None:
|
|
695
|
+
"""Send approval response for an edit."""
|
|
696
|
+
import websockets
|
|
697
|
+
|
|
698
|
+
await websocket.send(json.dumps({
|
|
699
|
+
"type": "approve",
|
|
700
|
+
"approved": approved,
|
|
701
|
+
}))
|