claude-code-workflow 6.3.19 → 6.3.21

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (62) hide show
  1. package/.claude/agents/issue-plan-agent.md +31 -2
  2. package/.claude/commands/issue/new.md +92 -2
  3. package/.claude/commands/issue/plan.md +3 -2
  4. package/.codex/prompts/issue-execute.md +5 -0
  5. package/ccw/dist/core/routes/litellm-api-routes.d.ts.map +1 -1
  6. package/ccw/dist/core/routes/litellm-api-routes.js +8 -0
  7. package/ccw/dist/core/routes/litellm-api-routes.js.map +1 -1
  8. package/ccw/dist/core/server.d.ts.map +1 -1
  9. package/ccw/dist/core/server.js +5 -0
  10. package/ccw/dist/core/server.js.map +1 -1
  11. package/ccw/dist/core/services/api-key-tester.d.ts +11 -0
  12. package/ccw/dist/core/services/api-key-tester.d.ts.map +1 -1
  13. package/ccw/dist/core/services/api-key-tester.js +30 -10
  14. package/ccw/dist/core/services/api-key-tester.js.map +1 -1
  15. package/ccw/dist/core/services/health-check-service.d.ts +6 -0
  16. package/ccw/dist/core/services/health-check-service.d.ts.map +1 -1
  17. package/ccw/dist/core/services/health-check-service.js +22 -0
  18. package/ccw/dist/core/services/health-check-service.js.map +1 -1
  19. package/ccw/src/core/routes/litellm-api-routes.ts +8 -0
  20. package/ccw/src/core/server.ts +6 -0
  21. package/ccw/src/core/services/api-key-tester.ts +33 -10
  22. package/ccw/src/core/services/health-check-service.ts +26 -0
  23. package/ccw/src/templates/dashboard-js/api.js +1 -1
  24. package/ccw/src/templates/dashboard-js/components/cli-status.js +7 -7
  25. package/ccw/src/templates/dashboard-js/components/hook-manager.js +2 -2
  26. package/ccw/src/templates/dashboard-js/components/index-manager.js +2 -2
  27. package/ccw/src/templates/dashboard-js/components/mcp-manager.js +16 -16
  28. package/ccw/src/templates/dashboard-js/components/storage-manager.js +2 -2
  29. package/ccw/src/templates/dashboard-js/components/task-queue-sidebar.js +1 -1
  30. package/ccw/src/templates/dashboard-js/i18n.js +10 -0
  31. package/ccw/src/templates/dashboard-js/views/cli-manager.js +1 -1
  32. package/ccw/src/templates/dashboard-js/views/codexlens-manager.js +12 -2
  33. package/ccw/src/templates/dashboard-js/views/memory.js +1 -1
  34. package/ccw/src/templates/dashboard-js/views/prompt-history.js +1 -1
  35. package/codex-lens/src/codexlens/__pycache__/config.cpython-312.pyc +0 -0
  36. package/codex-lens/src/codexlens/__pycache__/config.cpython-313.pyc +0 -0
  37. package/codex-lens/src/codexlens/__pycache__/env_config.cpython-312.pyc +0 -0
  38. package/codex-lens/src/codexlens/__pycache__/env_config.cpython-313.pyc +0 -0
  39. package/codex-lens/src/codexlens/cli/__pycache__/embedding_manager.cpython-312.pyc +0 -0
  40. package/codex-lens/src/codexlens/cli/__pycache__/embedding_manager.cpython-313.pyc +0 -0
  41. package/codex-lens/src/codexlens/cli/embedding_manager.py +13 -4
  42. package/codex-lens/src/codexlens/config.py +35 -0
  43. package/codex-lens/src/codexlens/env_config.py +6 -0
  44. package/codex-lens/src/codexlens/search/__pycache__/chain_search.cpython-312.pyc +0 -0
  45. package/codex-lens/src/codexlens/search/__pycache__/chain_search.cpython-313.pyc +0 -0
  46. package/codex-lens/src/codexlens/search/__pycache__/ranking.cpython-312.pyc +0 -0
  47. package/codex-lens/src/codexlens/search/__pycache__/ranking.cpython-313.pyc +0 -0
  48. package/codex-lens/src/codexlens/search/chain_search.py +10 -0
  49. package/codex-lens/src/codexlens/search/ranking.py +50 -0
  50. package/codex-lens/src/codexlens/semantic/__pycache__/chunker.cpython-313.pyc +0 -0
  51. package/codex-lens/src/codexlens/semantic/chunker.py +328 -23
  52. package/codex-lens/src/codexlens/semantic/reranker/__pycache__/__init__.cpython-312.pyc +0 -0
  53. package/codex-lens/src/codexlens/semantic/reranker/__pycache__/api_reranker.cpython-312.pyc +0 -0
  54. package/codex-lens/src/codexlens/semantic/reranker/__pycache__/base.cpython-312.pyc +0 -0
  55. package/codex-lens/src/codexlens/semantic/reranker/__pycache__/factory.cpython-312.pyc +0 -0
  56. package/codex-lens/src/codexlens/semantic/reranker/__pycache__/fastembed_reranker.cpython-312.pyc +0 -0
  57. package/codex-lens/src/codexlens/semantic/reranker/__pycache__/legacy.cpython-312.pyc +0 -0
  58. package/codex-lens/src/codexlens/semantic/reranker/__pycache__/onnx_reranker.cpython-312.pyc +0 -0
  59. package/codex-lens/src/codexlens/storage/__pycache__/index_tree.cpython-312.pyc +0 -0
  60. package/codex-lens/src/codexlens/storage/__pycache__/index_tree.cpython-313.pyc +0 -0
  61. package/codex-lens/src/codexlens/storage/index_tree.py +46 -2
  62. package/package.json +1 -1
@@ -1109,6 +1109,16 @@ var ENV_VAR_GROUPS = {
1109
1109
  'CODEXLENS_CASCADE_COARSE_K': { labelKey: 'codexlens.envField.coarseK', type: 'number', placeholder: '100', default: '100', settingsPath: 'cascade.coarse_k', min: 10, max: 500 },
1110
1110
  'CODEXLENS_CASCADE_FINE_K': { labelKey: 'codexlens.envField.fineK', type: 'number', placeholder: '10', default: '10', settingsPath: 'cascade.fine_k', min: 1, max: 100 }
1111
1111
  }
1112
+ },
1113
+ chunking: {
1114
+ labelKey: 'codexlens.envGroup.chunking',
1115
+ icon: 'scissors',
1116
+ vars: {
1117
+ 'CHUNK_STRIP_COMMENTS': { labelKey: 'codexlens.envField.stripComments', type: 'select', options: ['true', 'false'], default: 'true', settingsPath: 'chunking.strip_comments' },
1118
+ 'CHUNK_STRIP_DOCSTRINGS': { labelKey: 'codexlens.envField.stripDocstrings', type: 'select', options: ['true', 'false'], default: 'true', settingsPath: 'chunking.strip_docstrings' },
1119
+ 'RERANKER_TEST_FILE_PENALTY': { labelKey: 'codexlens.envField.testFilePenalty', type: 'number', placeholder: '0.0', default: '0.0', settingsPath: 'reranker.test_file_penalty', min: 0, max: 1, step: 0.1 },
1120
+ 'RERANKER_DOCSTRING_WEIGHT': { labelKey: 'codexlens.envField.docstringWeight', type: 'number', placeholder: '1.0', default: '1.0', settingsPath: 'reranker.docstring_weight', min: 0, max: 1, step: 0.1 }
1121
+ }
1112
1122
  }
1113
1123
  };
1114
1124
 
@@ -3603,7 +3613,7 @@ async function initCodexLensIndex(indexType, embeddingModel, embeddingBackend, m
3603
3613
  // Install semantic dependencies first
3604
3614
  showRefreshToast(t('codexlens.installingDeps') || 'Installing semantic dependencies...', 'info');
3605
3615
  try {
3606
- var installResponse = await fetch('/api/codexlens/semantic/install', { method: 'POST' });
3616
+ var installResponse = await csrfFetch('/api/codexlens/semantic/install', { method: 'POST' });
3607
3617
  var installResult = await installResponse.json();
3608
3618
 
3609
3619
  if (!installResult.success) {
@@ -5373,7 +5383,7 @@ function initCodexLensManagerPageEvents(currentConfig) {
5373
5383
  saveBtn.disabled = true;
5374
5384
  saveBtn.innerHTML = '<span class="animate-pulse">' + t('common.saving') + '</span>';
5375
5385
  try {
5376
- var response = await fetch('/api/codexlens/config', { method: 'POST', headers: { 'Content-Type': 'application/json' }, body: JSON.stringify({ index_dir: newIndexDir }) });
5386
+ var response = await csrfFetch('/api/codexlens/config', { method: 'POST', headers: { 'Content-Type': 'application/json' }, body: JSON.stringify({ index_dir: newIndexDir }) });
5377
5387
  var result = await response.json();
5378
5388
  if (result.success) { showRefreshToast(t('codexlens.configSaved'), 'success'); renderCodexLensManager(); }
5379
5389
  else { showRefreshToast(t('common.saveFailed') + ': ' + result.error, 'error'); }
@@ -1114,7 +1114,7 @@ async function deleteInsight(insightId) {
1114
1114
  if (!confirm(t('memory.confirmDeleteInsight'))) return;
1115
1115
 
1116
1116
  try {
1117
- var response = await fetch('/api/memory/insights/' + insightId, { method: 'DELETE' });
1117
+ var response = await csrfFetch('/api/memory/insights/' + insightId, { method: 'DELETE' });
1118
1118
  if (!response.ok) throw new Error('Failed to delete insight');
1119
1119
 
1120
1120
  selectedInsight = null;
@@ -431,7 +431,7 @@ async function deletePromptInsight(insightId) {
431
431
  if (!confirm(isZh() ? '确定要删除这条洞察记录吗?' : 'Are you sure you want to delete this insight?')) return;
432
432
 
433
433
  try {
434
- var response = await fetch('/api/memory/insights/' + insightId, { method: 'DELETE' });
434
+ var response = await csrfFetch('/api/memory/insights/' + insightId, { method: 'DELETE' });
435
435
  if (!response.ok) throw new Error('Failed to delete insight');
436
436
 
437
437
  selectedPromptInsight = null;
@@ -535,10 +535,15 @@ def generate_embeddings(
535
535
 
536
536
  # skip_token_count=True: Use fast estimation (len/4) instead of expensive tiktoken
537
537
  # This significantly reduces CPU usage with minimal impact on metadata accuracy
538
+ # Load chunk stripping config from settings
539
+ from codexlens.config import Config
540
+ chunk_cfg = Config.load()
538
541
  chunker = Chunker(config=ChunkConfig(
539
542
  max_chunk_size=chunk_size,
540
543
  overlap=overlap,
541
- skip_token_count=True
544
+ skip_token_count=True,
545
+ strip_comments=getattr(chunk_cfg, 'chunk_strip_comments', True),
546
+ strip_docstrings=getattr(chunk_cfg, 'chunk_strip_docstrings', True),
542
547
  ))
543
548
 
544
549
  # Log embedder info with endpoint count for multi-endpoint mode
@@ -1307,10 +1312,15 @@ def generate_dense_embeddings_centralized(
1307
1312
  "error": f"Invalid embedding backend: {embedding_backend}",
1308
1313
  }
1309
1314
 
1315
+ # Load chunk stripping config from settings
1316
+ from codexlens.config import Config
1317
+ chunk_cfg = Config.load()
1310
1318
  chunker = Chunker(config=ChunkConfig(
1311
1319
  max_chunk_size=chunk_size,
1312
1320
  overlap=overlap,
1313
- skip_token_count=True
1321
+ skip_token_count=True,
1322
+ strip_comments=getattr(chunk_cfg, 'chunk_strip_comments', True),
1323
+ strip_docstrings=getattr(chunk_cfg, 'chunk_strip_docstrings', True),
1314
1324
  ))
1315
1325
 
1316
1326
  if progress_callback:
@@ -1319,8 +1329,7 @@ def generate_dense_embeddings_centralized(
1319
1329
  progress_callback(f"Using model: {embedder.model_name} ({embedder.embedding_dim} dimensions)")
1320
1330
 
1321
1331
  # Calculate dynamic batch size based on model capacity
1322
- from codexlens.config import Config
1323
- batch_config = Config.load()
1332
+ batch_config = chunk_cfg # Reuse already loaded config
1324
1333
  effective_batch_size = calculate_dynamic_batch_size(batch_config, embedder)
1325
1334
 
1326
1335
  if progress_callback and batch_config.api_batch_size_dynamic:
@@ -141,6 +141,12 @@ class Config:
141
141
  reranker_model: str = "cross-encoder/ms-marco-MiniLM-L-6-v2"
142
142
  reranker_top_k: int = 50
143
143
  reranker_max_input_tokens: int = 8192 # Maximum tokens for reranker API batching
144
+ reranker_chunk_type_weights: Optional[Dict[str, float]] = None # Weights for chunk types: {"code": 1.0, "docstring": 0.7}
145
+ reranker_test_file_penalty: float = 0.0 # Penalty for test files (0.0-1.0, e.g., 0.2 = 20% reduction)
146
+
147
+ # Chunk stripping configuration (for semantic embedding)
148
+ chunk_strip_comments: bool = True # Strip comments from code chunks
149
+ chunk_strip_docstrings: bool = True # Strip docstrings from code chunks
144
150
 
145
151
  # Cascade search configuration (two-stage retrieval)
146
152
  enable_cascade_search: bool = False # Enable cascade search (coarse + fine ranking)
@@ -545,6 +551,35 @@ class Config:
545
551
  except ValueError:
546
552
  log.warning("Invalid RERANKER_MAX_INPUT_TOKENS in .env: %r", reranker_max_tokens)
547
553
 
554
+ # Reranker tuning from environment
555
+ test_penalty = get_env("RERANKER_TEST_FILE_PENALTY")
556
+ if test_penalty:
557
+ try:
558
+ self.reranker_test_file_penalty = float(test_penalty)
559
+ log.debug("Overriding reranker_test_file_penalty from .env: %s", self.reranker_test_file_penalty)
560
+ except ValueError:
561
+ log.warning("Invalid RERANKER_TEST_FILE_PENALTY in .env: %r", test_penalty)
562
+
563
+ docstring_weight = get_env("RERANKER_DOCSTRING_WEIGHT")
564
+ if docstring_weight:
565
+ try:
566
+ weight = float(docstring_weight)
567
+ self.reranker_chunk_type_weights = {"code": 1.0, "docstring": weight}
568
+ log.debug("Overriding reranker docstring weight from .env: %s", weight)
569
+ except ValueError:
570
+ log.warning("Invalid RERANKER_DOCSTRING_WEIGHT in .env: %r", docstring_weight)
571
+
572
+ # Chunk stripping from environment
573
+ strip_comments = get_env("CHUNK_STRIP_COMMENTS")
574
+ if strip_comments:
575
+ self.chunk_strip_comments = strip_comments.lower() in ("true", "1", "yes")
576
+ log.debug("Overriding chunk_strip_comments from .env: %s", self.chunk_strip_comments)
577
+
578
+ strip_docstrings = get_env("CHUNK_STRIP_DOCSTRINGS")
579
+ if strip_docstrings:
580
+ self.chunk_strip_docstrings = strip_docstrings.lower() in ("true", "1", "yes")
581
+ log.debug("Overriding chunk_strip_docstrings from .env: %s", self.chunk_strip_docstrings)
582
+
548
583
  @classmethod
549
584
  def load(cls) -> "Config":
550
585
  """Load config with settings from file."""
@@ -45,6 +45,12 @@ ENV_VARS = {
45
45
  # General configuration
46
46
  "CODEXLENS_DATA_DIR": "Custom data directory path",
47
47
  "CODEXLENS_DEBUG": "Enable debug mode (true/false)",
48
+ # Chunking configuration
49
+ "CHUNK_STRIP_COMMENTS": "Strip comments from code chunks for embedding: true/false (default: true)",
50
+ "CHUNK_STRIP_DOCSTRINGS": "Strip docstrings from code chunks for embedding: true/false (default: true)",
51
+ # Reranker tuning
52
+ "RERANKER_TEST_FILE_PENALTY": "Penalty for test files in reranking: 0.0-1.0 (default: 0.0)",
53
+ "RERANKER_DOCSTRING_WEIGHT": "Weight for docstring chunks in reranking: 0.0-1.0 (default: 1.0)",
48
54
  }
49
55
 
50
56
 
@@ -1816,12 +1816,22 @@ class ChainSearchEngine:
1816
1816
  # Use cross_encoder_rerank from ranking module
1817
1817
  from codexlens.search.ranking import cross_encoder_rerank
1818
1818
 
1819
+ # Get chunk_type weights and test_file_penalty from config
1820
+ chunk_type_weights = None
1821
+ test_file_penalty = 0.0
1822
+
1823
+ if self._config is not None:
1824
+ chunk_type_weights = getattr(self._config, "reranker_chunk_type_weights", None)
1825
+ test_file_penalty = getattr(self._config, "reranker_test_file_penalty", 0.0)
1826
+
1819
1827
  return cross_encoder_rerank(
1820
1828
  query=query,
1821
1829
  results=results,
1822
1830
  reranker=reranker,
1823
1831
  top_k=top_k,
1824
1832
  batch_size=32,
1833
+ chunk_type_weights=chunk_type_weights,
1834
+ test_file_penalty=test_file_penalty,
1825
1835
  )
1826
1836
 
1827
1837
  def search_files_only(self, query: str,
@@ -613,11 +613,24 @@ def cross_encoder_rerank(
613
613
  reranker: Any,
614
614
  top_k: int = 50,
615
615
  batch_size: int = 32,
616
+ chunk_type_weights: Optional[Dict[str, float]] = None,
617
+ test_file_penalty: float = 0.0,
616
618
  ) -> List[SearchResult]:
617
619
  """Second-stage reranking using a cross-encoder model.
618
620
 
619
621
  This function is dependency-agnostic: callers can pass any object that exposes
620
622
  a compatible `score_pairs(pairs, batch_size=...)` method.
623
+
624
+ Args:
625
+ query: Search query string
626
+ results: List of search results to rerank
627
+ reranker: Cross-encoder model with score_pairs or predict method
628
+ top_k: Number of top results to rerank
629
+ batch_size: Batch size for reranking
630
+ chunk_type_weights: Optional weights for different chunk types.
631
+ Example: {"code": 1.0, "docstring": 0.7} - reduce docstring influence
632
+ test_file_penalty: Penalty applied to test files (0.0-1.0).
633
+ Example: 0.2 means test files get 20% score reduction
621
634
  """
622
635
  if not results:
623
636
  return []
@@ -667,13 +680,50 @@ def cross_encoder_rerank(
667
680
 
668
681
  reranked_results: List[SearchResult] = []
669
682
 
683
+ # Helper to detect test files
684
+ def is_test_file(path: str) -> bool:
685
+ if not path:
686
+ return False
687
+ basename = path.split("/")[-1].split("\\")[-1]
688
+ return (
689
+ basename.startswith("test_") or
690
+ basename.endswith("_test.py") or
691
+ basename.endswith(".test.ts") or
692
+ basename.endswith(".test.js") or
693
+ basename.endswith(".spec.ts") or
694
+ basename.endswith(".spec.js") or
695
+ "/tests/" in path or
696
+ "\\tests\\" in path or
697
+ "/test/" in path or
698
+ "\\test\\" in path
699
+ )
700
+
670
701
  for idx, result in enumerate(results):
671
702
  if idx < rerank_count:
672
703
  prev_score = float(result.score)
673
704
  ce_score = scores[idx]
674
705
  ce_prob = probs[idx]
706
+
707
+ # Base combined score
675
708
  combined_score = 0.5 * prev_score + 0.5 * ce_prob
676
709
 
710
+ # Apply chunk_type weight adjustment
711
+ if chunk_type_weights:
712
+ chunk_type = None
713
+ if result.chunk and hasattr(result.chunk, "metadata"):
714
+ chunk_type = result.chunk.metadata.get("chunk_type")
715
+ elif result.metadata:
716
+ chunk_type = result.metadata.get("chunk_type")
717
+
718
+ if chunk_type and chunk_type in chunk_type_weights:
719
+ weight = chunk_type_weights[chunk_type]
720
+ # Apply weight to CE contribution only
721
+ combined_score = 0.5 * prev_score + 0.5 * ce_prob * weight
722
+
723
+ # Apply test file penalty
724
+ if test_file_penalty > 0 and is_test_file(result.path):
725
+ combined_score = combined_score * (1.0 - test_file_penalty)
726
+
677
727
  reranked_results.append(
678
728
  SearchResult(
679
729
  path=result.path,