claude-code-workflow 6.3.2 → 6.3.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.claude/CLAUDE.md +9 -1
- package/.claude/commands/workflow/lite-plan.md +1 -1
- package/.claude/workflows/cli-tools-usage.md +515 -516
- package/ccw/dist/cli.d.ts.map +1 -1
- package/ccw/dist/cli.js +6 -1
- package/ccw/dist/cli.js.map +1 -1
- package/ccw/dist/commands/cli.d.ts +1 -1
- package/ccw/dist/commands/cli.d.ts.map +1 -1
- package/ccw/dist/commands/cli.js +71 -7
- package/ccw/dist/commands/cli.js.map +1 -1
- package/ccw/dist/tools/cli-executor.d.ts.map +1 -1
- package/ccw/dist/tools/cli-executor.js +19 -7
- package/ccw/dist/tools/cli-executor.js.map +1 -1
- package/ccw/dist/tools/cli-history-store.d.ts +33 -0
- package/ccw/dist/tools/cli-history-store.d.ts.map +1 -1
- package/ccw/dist/tools/cli-history-store.js +89 -5
- package/ccw/dist/tools/cli-history-store.js.map +1 -1
- package/ccw/src/cli.ts +263 -258
- package/ccw/src/commands/cli.ts +967 -884
- package/ccw/src/tools/cli-executor.ts +20 -7
- package/ccw/src/tools/cli-history-store.ts +125 -5
- package/codex-lens/src/codexlens/__pycache__/config.cpython-313.pyc +0 -0
- package/codex-lens/src/codexlens/config.py +3 -0
- package/codex-lens/src/codexlens/search/__pycache__/chain_search.cpython-313.pyc +0 -0
- package/codex-lens/src/codexlens/search/__pycache__/hybrid_search.cpython-313.pyc +0 -0
- package/codex-lens/src/codexlens/search/__pycache__/ranking.cpython-313.pyc +0 -0
- package/codex-lens/src/codexlens/search/chain_search.py +71 -1
- package/codex-lens/src/codexlens/search/ranking.py +274 -274
- package/codex-lens/src/codexlens/semantic/__pycache__/chunker.cpython-313.pyc +0 -0
- package/codex-lens/src/codexlens/storage/__pycache__/dir_index.cpython-313.pyc +0 -0
- package/codex-lens/src/codexlens/storage/__pycache__/global_index.cpython-313.pyc +0 -0
- package/codex-lens/src/codexlens/storage/__pycache__/index_tree.cpython-313.pyc +0 -0
- package/codex-lens/src/codexlens/storage/dir_index.py +1888 -1850
- package/codex-lens/src/codexlens/storage/global_index.py +365 -0
- package/codex-lens/src/codexlens/storage/index_tree.py +83 -10
- package/package.json +1 -1
|
@@ -0,0 +1,365 @@
|
|
|
1
|
+
"""Global cross-directory symbol index for fast lookups.
|
|
2
|
+
|
|
3
|
+
Stores symbols for an entire project in a single SQLite database so symbol search
|
|
4
|
+
does not require traversing every directory _index.db.
|
|
5
|
+
|
|
6
|
+
This index is updated incrementally during file indexing (delete+insert per file)
|
|
7
|
+
to avoid expensive batch rebuilds.
|
|
8
|
+
"""
|
|
9
|
+
|
|
10
|
+
from __future__ import annotations
|
|
11
|
+
|
|
12
|
+
import logging
|
|
13
|
+
import sqlite3
|
|
14
|
+
import threading
|
|
15
|
+
from pathlib import Path
|
|
16
|
+
from typing import List, Optional, Tuple
|
|
17
|
+
|
|
18
|
+
from codexlens.entities import Symbol
|
|
19
|
+
from codexlens.errors import StorageError
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
class GlobalSymbolIndex:
|
|
23
|
+
"""Project-wide symbol index with incremental updates."""
|
|
24
|
+
|
|
25
|
+
SCHEMA_VERSION = 1
|
|
26
|
+
DEFAULT_DB_NAME = "_global_symbols.db"
|
|
27
|
+
|
|
28
|
+
def __init__(self, db_path: str | Path, project_id: int) -> None:
|
|
29
|
+
self.db_path = Path(db_path).resolve()
|
|
30
|
+
self.project_id = int(project_id)
|
|
31
|
+
self._lock = threading.RLock()
|
|
32
|
+
self._conn: Optional[sqlite3.Connection] = None
|
|
33
|
+
self.logger = logging.getLogger(__name__)
|
|
34
|
+
|
|
35
|
+
def initialize(self) -> None:
|
|
36
|
+
"""Create database and schema if not exists."""
|
|
37
|
+
with self._lock:
|
|
38
|
+
self.db_path.parent.mkdir(parents=True, exist_ok=True)
|
|
39
|
+
conn = self._get_connection()
|
|
40
|
+
|
|
41
|
+
current_version = self._get_schema_version(conn)
|
|
42
|
+
if current_version > self.SCHEMA_VERSION:
|
|
43
|
+
raise StorageError(
|
|
44
|
+
f"Database schema version {current_version} is newer than "
|
|
45
|
+
f"supported version {self.SCHEMA_VERSION}. "
|
|
46
|
+
f"Please update the application or use a compatible database.",
|
|
47
|
+
db_path=str(self.db_path),
|
|
48
|
+
operation="initialize",
|
|
49
|
+
details={
|
|
50
|
+
"current_version": current_version,
|
|
51
|
+
"supported_version": self.SCHEMA_VERSION,
|
|
52
|
+
},
|
|
53
|
+
)
|
|
54
|
+
|
|
55
|
+
if current_version == 0:
|
|
56
|
+
self._create_schema(conn)
|
|
57
|
+
self._set_schema_version(conn, self.SCHEMA_VERSION)
|
|
58
|
+
elif current_version < self.SCHEMA_VERSION:
|
|
59
|
+
self._apply_migrations(conn, current_version)
|
|
60
|
+
self._set_schema_version(conn, self.SCHEMA_VERSION)
|
|
61
|
+
|
|
62
|
+
conn.commit()
|
|
63
|
+
|
|
64
|
+
def close(self) -> None:
|
|
65
|
+
"""Close database connection."""
|
|
66
|
+
with self._lock:
|
|
67
|
+
if self._conn is not None:
|
|
68
|
+
try:
|
|
69
|
+
self._conn.close()
|
|
70
|
+
except Exception:
|
|
71
|
+
pass
|
|
72
|
+
finally:
|
|
73
|
+
self._conn = None
|
|
74
|
+
|
|
75
|
+
def __enter__(self) -> "GlobalSymbolIndex":
|
|
76
|
+
self.initialize()
|
|
77
|
+
return self
|
|
78
|
+
|
|
79
|
+
def __exit__(self, exc_type: object, exc: object, tb: object) -> None:
|
|
80
|
+
self.close()
|
|
81
|
+
|
|
82
|
+
def add_symbol(self, symbol: Symbol, file_path: str | Path, index_path: str | Path) -> None:
|
|
83
|
+
"""Insert a single symbol (idempotent) for incremental updates."""
|
|
84
|
+
file_path_str = str(Path(file_path).resolve())
|
|
85
|
+
index_path_str = str(Path(index_path).resolve())
|
|
86
|
+
|
|
87
|
+
with self._lock:
|
|
88
|
+
conn = self._get_connection()
|
|
89
|
+
try:
|
|
90
|
+
conn.execute(
|
|
91
|
+
"""
|
|
92
|
+
INSERT INTO global_symbols(
|
|
93
|
+
project_id, symbol_name, symbol_kind,
|
|
94
|
+
file_path, start_line, end_line, index_path
|
|
95
|
+
)
|
|
96
|
+
VALUES(?, ?, ?, ?, ?, ?, ?)
|
|
97
|
+
ON CONFLICT(
|
|
98
|
+
project_id, symbol_name, symbol_kind,
|
|
99
|
+
file_path, start_line, end_line
|
|
100
|
+
)
|
|
101
|
+
DO UPDATE SET
|
|
102
|
+
index_path=excluded.index_path
|
|
103
|
+
""",
|
|
104
|
+
(
|
|
105
|
+
self.project_id,
|
|
106
|
+
symbol.name,
|
|
107
|
+
symbol.kind,
|
|
108
|
+
file_path_str,
|
|
109
|
+
symbol.range[0],
|
|
110
|
+
symbol.range[1],
|
|
111
|
+
index_path_str,
|
|
112
|
+
),
|
|
113
|
+
)
|
|
114
|
+
conn.commit()
|
|
115
|
+
except sqlite3.DatabaseError as exc:
|
|
116
|
+
conn.rollback()
|
|
117
|
+
raise StorageError(
|
|
118
|
+
f"Failed to add symbol {symbol.name}: {exc}",
|
|
119
|
+
db_path=str(self.db_path),
|
|
120
|
+
operation="add_symbol",
|
|
121
|
+
) from exc
|
|
122
|
+
|
|
123
|
+
def update_file_symbols(
|
|
124
|
+
self,
|
|
125
|
+
file_path: str | Path,
|
|
126
|
+
symbols: List[Symbol],
|
|
127
|
+
index_path: str | Path | None = None,
|
|
128
|
+
) -> None:
|
|
129
|
+
"""Replace all symbols for a file atomically (delete + insert)."""
|
|
130
|
+
file_path_str = str(Path(file_path).resolve())
|
|
131
|
+
|
|
132
|
+
index_path_str: Optional[str]
|
|
133
|
+
if index_path is not None:
|
|
134
|
+
index_path_str = str(Path(index_path).resolve())
|
|
135
|
+
else:
|
|
136
|
+
index_path_str = self._get_existing_index_path(file_path_str)
|
|
137
|
+
|
|
138
|
+
with self._lock:
|
|
139
|
+
conn = self._get_connection()
|
|
140
|
+
try:
|
|
141
|
+
conn.execute("BEGIN")
|
|
142
|
+
conn.execute(
|
|
143
|
+
"DELETE FROM global_symbols WHERE project_id=? AND file_path=?",
|
|
144
|
+
(self.project_id, file_path_str),
|
|
145
|
+
)
|
|
146
|
+
|
|
147
|
+
if symbols:
|
|
148
|
+
if not index_path_str:
|
|
149
|
+
raise StorageError(
|
|
150
|
+
"index_path is required when inserting symbols for a new file",
|
|
151
|
+
db_path=str(self.db_path),
|
|
152
|
+
operation="update_file_symbols",
|
|
153
|
+
details={"file_path": file_path_str},
|
|
154
|
+
)
|
|
155
|
+
|
|
156
|
+
rows = [
|
|
157
|
+
(
|
|
158
|
+
self.project_id,
|
|
159
|
+
s.name,
|
|
160
|
+
s.kind,
|
|
161
|
+
file_path_str,
|
|
162
|
+
s.range[0],
|
|
163
|
+
s.range[1],
|
|
164
|
+
index_path_str,
|
|
165
|
+
)
|
|
166
|
+
for s in symbols
|
|
167
|
+
]
|
|
168
|
+
conn.executemany(
|
|
169
|
+
"""
|
|
170
|
+
INSERT INTO global_symbols(
|
|
171
|
+
project_id, symbol_name, symbol_kind,
|
|
172
|
+
file_path, start_line, end_line, index_path
|
|
173
|
+
)
|
|
174
|
+
VALUES(?, ?, ?, ?, ?, ?, ?)
|
|
175
|
+
ON CONFLICT(
|
|
176
|
+
project_id, symbol_name, symbol_kind,
|
|
177
|
+
file_path, start_line, end_line
|
|
178
|
+
)
|
|
179
|
+
DO UPDATE SET
|
|
180
|
+
index_path=excluded.index_path
|
|
181
|
+
""",
|
|
182
|
+
rows,
|
|
183
|
+
)
|
|
184
|
+
|
|
185
|
+
conn.commit()
|
|
186
|
+
except sqlite3.DatabaseError as exc:
|
|
187
|
+
conn.rollback()
|
|
188
|
+
raise StorageError(
|
|
189
|
+
f"Failed to update symbols for {file_path_str}: {exc}",
|
|
190
|
+
db_path=str(self.db_path),
|
|
191
|
+
operation="update_file_symbols",
|
|
192
|
+
) from exc
|
|
193
|
+
|
|
194
|
+
def delete_file_symbols(self, file_path: str | Path) -> int:
|
|
195
|
+
"""Remove all symbols for a file. Returns number of rows deleted."""
|
|
196
|
+
file_path_str = str(Path(file_path).resolve())
|
|
197
|
+
with self._lock:
|
|
198
|
+
conn = self._get_connection()
|
|
199
|
+
try:
|
|
200
|
+
cur = conn.execute(
|
|
201
|
+
"DELETE FROM global_symbols WHERE project_id=? AND file_path=?",
|
|
202
|
+
(self.project_id, file_path_str),
|
|
203
|
+
)
|
|
204
|
+
conn.commit()
|
|
205
|
+
return int(cur.rowcount or 0)
|
|
206
|
+
except sqlite3.DatabaseError as exc:
|
|
207
|
+
conn.rollback()
|
|
208
|
+
raise StorageError(
|
|
209
|
+
f"Failed to delete symbols for {file_path_str}: {exc}",
|
|
210
|
+
db_path=str(self.db_path),
|
|
211
|
+
operation="delete_file_symbols",
|
|
212
|
+
) from exc
|
|
213
|
+
|
|
214
|
+
def search(
|
|
215
|
+
self,
|
|
216
|
+
name: str,
|
|
217
|
+
kind: Optional[str] = None,
|
|
218
|
+
limit: int = 50,
|
|
219
|
+
prefix_mode: bool = True,
|
|
220
|
+
) -> List[Symbol]:
|
|
221
|
+
"""Search symbols and return full Symbol objects."""
|
|
222
|
+
if prefix_mode:
|
|
223
|
+
pattern = f"{name}%"
|
|
224
|
+
else:
|
|
225
|
+
pattern = f"%{name}%"
|
|
226
|
+
|
|
227
|
+
with self._lock:
|
|
228
|
+
conn = self._get_connection()
|
|
229
|
+
if kind:
|
|
230
|
+
rows = conn.execute(
|
|
231
|
+
"""
|
|
232
|
+
SELECT symbol_name, symbol_kind, file_path, start_line, end_line
|
|
233
|
+
FROM global_symbols
|
|
234
|
+
WHERE project_id=? AND symbol_name LIKE ? AND symbol_kind=?
|
|
235
|
+
ORDER BY symbol_name
|
|
236
|
+
LIMIT ?
|
|
237
|
+
""",
|
|
238
|
+
(self.project_id, pattern, kind, limit),
|
|
239
|
+
).fetchall()
|
|
240
|
+
else:
|
|
241
|
+
rows = conn.execute(
|
|
242
|
+
"""
|
|
243
|
+
SELECT symbol_name, symbol_kind, file_path, start_line, end_line
|
|
244
|
+
FROM global_symbols
|
|
245
|
+
WHERE project_id=? AND symbol_name LIKE ?
|
|
246
|
+
ORDER BY symbol_name
|
|
247
|
+
LIMIT ?
|
|
248
|
+
""",
|
|
249
|
+
(self.project_id, pattern, limit),
|
|
250
|
+
).fetchall()
|
|
251
|
+
|
|
252
|
+
return [
|
|
253
|
+
Symbol(
|
|
254
|
+
name=row["symbol_name"],
|
|
255
|
+
kind=row["symbol_kind"],
|
|
256
|
+
range=(row["start_line"], row["end_line"]),
|
|
257
|
+
file=row["file_path"],
|
|
258
|
+
)
|
|
259
|
+
for row in rows
|
|
260
|
+
]
|
|
261
|
+
|
|
262
|
+
def search_symbols(
|
|
263
|
+
self,
|
|
264
|
+
name: str,
|
|
265
|
+
kind: Optional[str] = None,
|
|
266
|
+
limit: int = 50,
|
|
267
|
+
prefix_mode: bool = True,
|
|
268
|
+
) -> List[Tuple[str, Tuple[int, int]]]:
|
|
269
|
+
"""Search symbols and return only (file_path, (start_line, end_line))."""
|
|
270
|
+
symbols = self.search(name=name, kind=kind, limit=limit, prefix_mode=prefix_mode)
|
|
271
|
+
return [(s.file or "", s.range) for s in symbols]
|
|
272
|
+
|
|
273
|
+
def _get_existing_index_path(self, file_path_str: str) -> Optional[str]:
|
|
274
|
+
with self._lock:
|
|
275
|
+
conn = self._get_connection()
|
|
276
|
+
row = conn.execute(
|
|
277
|
+
"""
|
|
278
|
+
SELECT index_path
|
|
279
|
+
FROM global_symbols
|
|
280
|
+
WHERE project_id=? AND file_path=?
|
|
281
|
+
LIMIT 1
|
|
282
|
+
""",
|
|
283
|
+
(self.project_id, file_path_str),
|
|
284
|
+
).fetchone()
|
|
285
|
+
return str(row["index_path"]) if row else None
|
|
286
|
+
|
|
287
|
+
def _get_schema_version(self, conn: sqlite3.Connection) -> int:
|
|
288
|
+
try:
|
|
289
|
+
row = conn.execute("PRAGMA user_version").fetchone()
|
|
290
|
+
return int(row[0]) if row else 0
|
|
291
|
+
except Exception:
|
|
292
|
+
return 0
|
|
293
|
+
|
|
294
|
+
def _set_schema_version(self, conn: sqlite3.Connection, version: int) -> None:
|
|
295
|
+
conn.execute(f"PRAGMA user_version = {int(version)}")
|
|
296
|
+
|
|
297
|
+
def _apply_migrations(self, conn: sqlite3.Connection, from_version: int) -> None:
|
|
298
|
+
# No migrations yet (v1).
|
|
299
|
+
_ = (conn, from_version)
|
|
300
|
+
return
|
|
301
|
+
|
|
302
|
+
def _get_connection(self) -> sqlite3.Connection:
|
|
303
|
+
if self._conn is None:
|
|
304
|
+
self._conn = sqlite3.connect(str(self.db_path), check_same_thread=False)
|
|
305
|
+
self._conn.row_factory = sqlite3.Row
|
|
306
|
+
self._conn.execute("PRAGMA journal_mode=WAL")
|
|
307
|
+
self._conn.execute("PRAGMA synchronous=NORMAL")
|
|
308
|
+
self._conn.execute("PRAGMA foreign_keys=ON")
|
|
309
|
+
self._conn.execute("PRAGMA mmap_size=30000000000")
|
|
310
|
+
return self._conn
|
|
311
|
+
|
|
312
|
+
def _create_schema(self, conn: sqlite3.Connection) -> None:
|
|
313
|
+
try:
|
|
314
|
+
conn.execute(
|
|
315
|
+
"""
|
|
316
|
+
CREATE TABLE IF NOT EXISTS global_symbols (
|
|
317
|
+
id INTEGER PRIMARY KEY,
|
|
318
|
+
project_id INTEGER NOT NULL,
|
|
319
|
+
symbol_name TEXT NOT NULL,
|
|
320
|
+
symbol_kind TEXT NOT NULL,
|
|
321
|
+
file_path TEXT NOT NULL,
|
|
322
|
+
start_line INTEGER,
|
|
323
|
+
end_line INTEGER,
|
|
324
|
+
index_path TEXT NOT NULL,
|
|
325
|
+
UNIQUE(
|
|
326
|
+
project_id, symbol_name, symbol_kind,
|
|
327
|
+
file_path, start_line, end_line
|
|
328
|
+
)
|
|
329
|
+
)
|
|
330
|
+
"""
|
|
331
|
+
)
|
|
332
|
+
|
|
333
|
+
# Required by optimization spec.
|
|
334
|
+
conn.execute(
|
|
335
|
+
"""
|
|
336
|
+
CREATE INDEX IF NOT EXISTS idx_global_symbols_name_kind
|
|
337
|
+
ON global_symbols(symbol_name, symbol_kind)
|
|
338
|
+
"""
|
|
339
|
+
)
|
|
340
|
+
# Used by common queries (project-scoped name lookups).
|
|
341
|
+
conn.execute(
|
|
342
|
+
"""
|
|
343
|
+
CREATE INDEX IF NOT EXISTS idx_global_symbols_project_name_kind
|
|
344
|
+
ON global_symbols(project_id, symbol_name, symbol_kind)
|
|
345
|
+
"""
|
|
346
|
+
)
|
|
347
|
+
conn.execute(
|
|
348
|
+
"""
|
|
349
|
+
CREATE INDEX IF NOT EXISTS idx_global_symbols_project_file
|
|
350
|
+
ON global_symbols(project_id, file_path)
|
|
351
|
+
"""
|
|
352
|
+
)
|
|
353
|
+
conn.execute(
|
|
354
|
+
"""
|
|
355
|
+
CREATE INDEX IF NOT EXISTS idx_global_symbols_project_index_path
|
|
356
|
+
ON global_symbols(project_id, index_path)
|
|
357
|
+
"""
|
|
358
|
+
)
|
|
359
|
+
except sqlite3.DatabaseError as exc:
|
|
360
|
+
raise StorageError(
|
|
361
|
+
f"Failed to initialize global symbol schema: {exc}",
|
|
362
|
+
db_path=str(self.db_path),
|
|
363
|
+
operation="_create_schema",
|
|
364
|
+
) from exc
|
|
365
|
+
|
|
@@ -17,6 +17,7 @@ from typing import Dict, List, Optional, Set
|
|
|
17
17
|
from codexlens.config import Config
|
|
18
18
|
from codexlens.parsers.factory import ParserFactory
|
|
19
19
|
from codexlens.storage.dir_index import DirIndexStore
|
|
20
|
+
from codexlens.storage.global_index import GlobalSymbolIndex
|
|
20
21
|
from codexlens.storage.path_mapper import PathMapper
|
|
21
22
|
from codexlens.storage.registry import ProjectInfo, RegistryStore
|
|
22
23
|
|
|
@@ -141,6 +142,12 @@ class IndexTreeBuilder:
|
|
|
141
142
|
# Register project
|
|
142
143
|
index_root = self.mapper.source_to_index_dir(source_root)
|
|
143
144
|
project_info = self.registry.register_project(source_root, index_root)
|
|
145
|
+
global_index_db_path = index_root / GlobalSymbolIndex.DEFAULT_DB_NAME
|
|
146
|
+
|
|
147
|
+
global_index: GlobalSymbolIndex | None = None
|
|
148
|
+
if self.config.global_symbol_index_enabled:
|
|
149
|
+
global_index = GlobalSymbolIndex(global_index_db_path, project_id=project_info.id)
|
|
150
|
+
global_index.initialize()
|
|
144
151
|
|
|
145
152
|
# Report progress: discovering files (5%)
|
|
146
153
|
print("Discovering files...", flush=True)
|
|
@@ -150,6 +157,8 @@ class IndexTreeBuilder:
|
|
|
150
157
|
|
|
151
158
|
if not dirs_by_depth:
|
|
152
159
|
self.logger.warning("No indexable directories found in %s", source_root)
|
|
160
|
+
if global_index is not None:
|
|
161
|
+
global_index.close()
|
|
153
162
|
return BuildResult(
|
|
154
163
|
project_id=project_info.id,
|
|
155
164
|
source_root=source_root,
|
|
@@ -181,7 +190,13 @@ class IndexTreeBuilder:
|
|
|
181
190
|
self.logger.info("Building %d directories at depth %d", len(dirs), depth)
|
|
182
191
|
|
|
183
192
|
# Build directories at this level in parallel
|
|
184
|
-
results = self._build_level_parallel(
|
|
193
|
+
results = self._build_level_parallel(
|
|
194
|
+
dirs,
|
|
195
|
+
languages,
|
|
196
|
+
workers,
|
|
197
|
+
project_id=project_info.id,
|
|
198
|
+
global_index_db_path=global_index_db_path,
|
|
199
|
+
)
|
|
185
200
|
all_results.extend(results)
|
|
186
201
|
|
|
187
202
|
# Process results
|
|
@@ -230,7 +245,7 @@ class IndexTreeBuilder:
|
|
|
230
245
|
if result.error:
|
|
231
246
|
continue
|
|
232
247
|
try:
|
|
233
|
-
with DirIndexStore(result.index_path) as store:
|
|
248
|
+
with DirIndexStore(result.index_path, config=self.config, global_index=global_index) as store:
|
|
234
249
|
deleted_count = store.cleanup_deleted_files(result.source_path)
|
|
235
250
|
total_deleted += deleted_count
|
|
236
251
|
if deleted_count > 0:
|
|
@@ -257,6 +272,9 @@ class IndexTreeBuilder:
|
|
|
257
272
|
len(all_errors),
|
|
258
273
|
)
|
|
259
274
|
|
|
275
|
+
if global_index is not None:
|
|
276
|
+
global_index.close()
|
|
277
|
+
|
|
260
278
|
return BuildResult(
|
|
261
279
|
project_id=project_info.id,
|
|
262
280
|
source_root=source_root,
|
|
@@ -315,7 +333,18 @@ class IndexTreeBuilder:
|
|
|
315
333
|
"""
|
|
316
334
|
source_path = source_path.resolve()
|
|
317
335
|
self.logger.info("Rebuilding directory %s", source_path)
|
|
318
|
-
|
|
336
|
+
project_root = self.mapper.get_project_root(source_path)
|
|
337
|
+
project_info = self.registry.get_project(project_root)
|
|
338
|
+
if not project_info:
|
|
339
|
+
raise ValueError(f"Directory not indexed: {source_path}")
|
|
340
|
+
|
|
341
|
+
global_index_db_path = project_info.index_root / GlobalSymbolIndex.DEFAULT_DB_NAME
|
|
342
|
+
return self._build_single_dir(
|
|
343
|
+
source_path,
|
|
344
|
+
languages=None,
|
|
345
|
+
project_id=project_info.id,
|
|
346
|
+
global_index_db_path=global_index_db_path,
|
|
347
|
+
)
|
|
319
348
|
|
|
320
349
|
# === Internal Methods ===
|
|
321
350
|
|
|
@@ -396,7 +425,13 @@ class IndexTreeBuilder:
|
|
|
396
425
|
return len(source_files) > 0
|
|
397
426
|
|
|
398
427
|
def _build_level_parallel(
|
|
399
|
-
self,
|
|
428
|
+
self,
|
|
429
|
+
dirs: List[Path],
|
|
430
|
+
languages: List[str],
|
|
431
|
+
workers: int,
|
|
432
|
+
*,
|
|
433
|
+
project_id: int,
|
|
434
|
+
global_index_db_path: Path,
|
|
400
435
|
) -> List[DirBuildResult]:
|
|
401
436
|
"""Build multiple directories in parallel.
|
|
402
437
|
|
|
@@ -419,7 +454,12 @@ class IndexTreeBuilder:
|
|
|
419
454
|
|
|
420
455
|
# For single directory, avoid overhead of process pool
|
|
421
456
|
if len(dirs) == 1:
|
|
422
|
-
result = self._build_single_dir(
|
|
457
|
+
result = self._build_single_dir(
|
|
458
|
+
dirs[0],
|
|
459
|
+
languages,
|
|
460
|
+
project_id=project_id,
|
|
461
|
+
global_index_db_path=global_index_db_path,
|
|
462
|
+
)
|
|
423
463
|
return [result]
|
|
424
464
|
|
|
425
465
|
# Prepare arguments for worker processes
|
|
@@ -427,6 +467,7 @@ class IndexTreeBuilder:
|
|
|
427
467
|
"data_dir": str(self.config.data_dir),
|
|
428
468
|
"supported_languages": self.config.supported_languages,
|
|
429
469
|
"parsing_rules": self.config.parsing_rules,
|
|
470
|
+
"global_symbol_index_enabled": self.config.global_symbol_index_enabled,
|
|
430
471
|
}
|
|
431
472
|
|
|
432
473
|
worker_args = [
|
|
@@ -435,6 +476,8 @@ class IndexTreeBuilder:
|
|
|
435
476
|
self.mapper.source_to_index_db(dir_path),
|
|
436
477
|
languages,
|
|
437
478
|
config_dict,
|
|
479
|
+
int(project_id),
|
|
480
|
+
str(global_index_db_path),
|
|
438
481
|
)
|
|
439
482
|
for dir_path in dirs
|
|
440
483
|
]
|
|
@@ -467,7 +510,12 @@ class IndexTreeBuilder:
|
|
|
467
510
|
return results
|
|
468
511
|
|
|
469
512
|
def _build_single_dir(
|
|
470
|
-
self,
|
|
513
|
+
self,
|
|
514
|
+
dir_path: Path,
|
|
515
|
+
languages: List[str] = None,
|
|
516
|
+
*,
|
|
517
|
+
project_id: int,
|
|
518
|
+
global_index_db_path: Path,
|
|
471
519
|
) -> DirBuildResult:
|
|
472
520
|
"""Build index for a single directory.
|
|
473
521
|
|
|
@@ -484,12 +532,17 @@ class IndexTreeBuilder:
|
|
|
484
532
|
dir_path = dir_path.resolve()
|
|
485
533
|
index_db_path = self.mapper.source_to_index_db(dir_path)
|
|
486
534
|
|
|
535
|
+
global_index: GlobalSymbolIndex | None = None
|
|
487
536
|
try:
|
|
488
537
|
# Ensure index directory exists
|
|
489
538
|
index_db_path.parent.mkdir(parents=True, exist_ok=True)
|
|
490
539
|
|
|
491
540
|
# Create directory index
|
|
492
|
-
|
|
541
|
+
if self.config.global_symbol_index_enabled:
|
|
542
|
+
global_index = GlobalSymbolIndex(global_index_db_path, project_id=project_id)
|
|
543
|
+
global_index.initialize()
|
|
544
|
+
|
|
545
|
+
store = DirIndexStore(index_db_path, config=self.config, global_index=global_index)
|
|
493
546
|
store.initialize()
|
|
494
547
|
|
|
495
548
|
# Get source files in this directory only
|
|
@@ -541,6 +594,8 @@ class IndexTreeBuilder:
|
|
|
541
594
|
]
|
|
542
595
|
|
|
543
596
|
store.close()
|
|
597
|
+
if global_index is not None:
|
|
598
|
+
global_index.close()
|
|
544
599
|
|
|
545
600
|
if skipped_count > 0:
|
|
546
601
|
self.logger.debug(
|
|
@@ -570,6 +625,11 @@ class IndexTreeBuilder:
|
|
|
570
625
|
|
|
571
626
|
except Exception as exc:
|
|
572
627
|
self.logger.error("Failed to build directory %s: %s", dir_path, exc)
|
|
628
|
+
if global_index is not None:
|
|
629
|
+
try:
|
|
630
|
+
global_index.close()
|
|
631
|
+
except Exception:
|
|
632
|
+
pass
|
|
573
633
|
return DirBuildResult(
|
|
574
634
|
source_path=dir_path,
|
|
575
635
|
index_path=index_db_path,
|
|
@@ -676,28 +736,34 @@ def _build_dir_worker(args: tuple) -> DirBuildResult:
|
|
|
676
736
|
Reconstructs necessary objects from serializable arguments.
|
|
677
737
|
|
|
678
738
|
Args:
|
|
679
|
-
args: Tuple of (dir_path, index_db_path, languages, config_dict)
|
|
739
|
+
args: Tuple of (dir_path, index_db_path, languages, config_dict, project_id, global_index_db_path)
|
|
680
740
|
|
|
681
741
|
Returns:
|
|
682
742
|
DirBuildResult for the directory
|
|
683
743
|
"""
|
|
684
|
-
dir_path, index_db_path, languages, config_dict = args
|
|
744
|
+
dir_path, index_db_path, languages, config_dict, project_id, global_index_db_path = args
|
|
685
745
|
|
|
686
746
|
# Reconstruct config
|
|
687
747
|
config = Config(
|
|
688
748
|
data_dir=Path(config_dict["data_dir"]),
|
|
689
749
|
supported_languages=config_dict["supported_languages"],
|
|
690
750
|
parsing_rules=config_dict["parsing_rules"],
|
|
751
|
+
global_symbol_index_enabled=bool(config_dict.get("global_symbol_index_enabled", True)),
|
|
691
752
|
)
|
|
692
753
|
|
|
693
754
|
parser_factory = ParserFactory(config)
|
|
694
755
|
|
|
756
|
+
global_index: GlobalSymbolIndex | None = None
|
|
695
757
|
try:
|
|
696
758
|
# Ensure index directory exists
|
|
697
759
|
index_db_path.parent.mkdir(parents=True, exist_ok=True)
|
|
698
760
|
|
|
699
761
|
# Create directory index
|
|
700
|
-
|
|
762
|
+
if config.global_symbol_index_enabled and global_index_db_path:
|
|
763
|
+
global_index = GlobalSymbolIndex(Path(global_index_db_path), project_id=int(project_id))
|
|
764
|
+
global_index.initialize()
|
|
765
|
+
|
|
766
|
+
store = DirIndexStore(index_db_path, config=config, global_index=global_index)
|
|
701
767
|
store.initialize()
|
|
702
768
|
|
|
703
769
|
files_count = 0
|
|
@@ -756,6 +822,8 @@ def _build_dir_worker(args: tuple) -> DirBuildResult:
|
|
|
756
822
|
]
|
|
757
823
|
|
|
758
824
|
store.close()
|
|
825
|
+
if global_index is not None:
|
|
826
|
+
global_index.close()
|
|
759
827
|
|
|
760
828
|
return DirBuildResult(
|
|
761
829
|
source_path=dir_path,
|
|
@@ -766,6 +834,11 @@ def _build_dir_worker(args: tuple) -> DirBuildResult:
|
|
|
766
834
|
)
|
|
767
835
|
|
|
768
836
|
except Exception as exc:
|
|
837
|
+
if global_index is not None:
|
|
838
|
+
try:
|
|
839
|
+
global_index.close()
|
|
840
|
+
except Exception:
|
|
841
|
+
pass
|
|
769
842
|
return DirBuildResult(
|
|
770
843
|
source_path=dir_path,
|
|
771
844
|
index_path=index_db_path,
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "claude-code-workflow",
|
|
3
|
-
"version": "6.3.
|
|
3
|
+
"version": "6.3.4",
|
|
4
4
|
"description": "JSON-driven multi-agent development framework with intelligent CLI orchestration (Gemini/Qwen/Codex), context-first architecture, and automated workflow execution",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"main": "ccw/src/index.js",
|