@optiqcode/cli 2.0.0 → 2.1.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -6,7 +6,15 @@ import path from 'path';
6
6
  import fs from 'fs/promises';
7
7
  import { getConfig } from '../utils/config.js';
8
8
  import { isValidDirectory, getGitIgnorePatterns, shouldIgnoreFile } from '../utils/files.js';
9
- const API_URL = process.env.OPTIQ_BACKEND_URL ? `${process.env.OPTIQ_BACKEND_URL}/api` : 'https://api.optiqcode.com/api';
9
+ const ENGINE_URL = process.env.OPTIQ_ENGINE_URL || 'https://api.optiqcode.com';
10
+ // Helper to generate repository ID from path
11
+ function generateRepoId(targetPath) {
12
+ const basename = path.basename(targetPath);
13
+ const hash = targetPath.split('').reduce((acc, char) => {
14
+ return ((acc << 5) - acc) + char.charCodeAt(0) | 0;
15
+ }, 0);
16
+ return `${basename}-${Math.abs(hash).toString(16)}`;
17
+ }
10
18
  export async function index(options) {
11
19
  const config = await getConfig();
12
20
  if (!config) {
@@ -63,43 +71,41 @@ export async function index(options) {
63
71
  // Skip files that can't be read
64
72
  }
65
73
  }
66
- spinner.text = 'Uploading to Optiq...';
67
- // Batch upload files (50 at a time to avoid 413 errors)
74
+ spinner.text = 'Indexing with Optiq Engine...';
75
+ // Generate repository ID from path
76
+ const repoId = generateRepoId(targetPath);
77
+ // Batch upload files (50 at a time to avoid gateway timeouts)
68
78
  const BATCH_SIZE = 50;
69
79
  let uploadedCount = 0;
70
- let repoId = '';
71
- let totalFilesProcessed = 0;
72
- let totalEntitiesIndexed = 0;
80
+ let totalChunksCreated = 0;
73
81
  for (let i = 0; i < fileContents.length; i += BATCH_SIZE) {
74
82
  const batch = fileContents.slice(i, i + BATCH_SIZE);
75
83
  const batchNum = Math.floor(i / BATCH_SIZE) + 1;
76
84
  const totalBatches = Math.ceil(fileContents.length / BATCH_SIZE);
77
- spinner.text = `Uploading batch ${batchNum}/${totalBatches} (${uploadedCount}/${fileContents.length} files)...`;
78
- const response = await axios.post(`${API_URL}/nexus/index/content`, {
79
- repository_path: targetPath,
85
+ spinner.text = `Indexing batch ${batchNum}/${totalBatches} (${uploadedCount}/${fileContents.length} files)...`;
86
+ const response = await axios.post(`${ENGINE_URL}/api/v1/index`, {
87
+ repository_id: repoId,
88
+ branch: 'main',
80
89
  files: batch,
81
90
  }, {
82
91
  headers: {
83
- Authorization: `Bearer ${config.apiKey}`,
84
92
  'Content-Type': 'application/json',
85
93
  },
86
94
  timeout: 0, // No timeout for large codebases
87
95
  });
88
96
  if (!response.data.success) {
89
97
  spinner.fail(chalk.red('✗ Indexing failed'));
90
- console.log(chalk.red(response.data.error || 'Unknown error'));
98
+ console.log(chalk.red(response.data.result?.errors?.join('\n') || 'Unknown error'));
91
99
  return;
92
100
  }
93
- repoId = response.data.repo_id;
94
- totalFilesProcessed += response.data.files_processed || 0;
95
- totalEntitiesIndexed += response.data.entities_indexed || 0;
101
+ totalChunksCreated += response.data.result?.chunks_created || 0;
96
102
  uploadedCount += batch.length;
97
103
  }
98
104
  spinner.succeed(chalk.green('✓ Indexing complete'));
99
105
  console.log(chalk.blue('📊 Repository ID:'), chalk.bold(repoId));
100
- console.log(chalk.blue('📁 Files indexed:'), chalk.bold(totalFilesProcessed));
101
- console.log(chalk.blue('📝 Entities indexed:'), chalk.bold(totalEntitiesIndexed));
102
- console.log(chalk.dim('\nUse this repo_id with the MCP server or API'));
106
+ console.log(chalk.blue('📁 Files indexed:'), chalk.bold(uploadedCount));
107
+ console.log(chalk.blue('📝 Chunks created:'), chalk.bold(totalChunksCreated));
108
+ console.log(chalk.dim('\nUse this repo_id for searches'));
103
109
  }
104
110
  catch (error) {
105
111
  spinner.fail(chalk.red('✗ Indexing failed'));
package/dist/index.js CHANGED
@@ -9,6 +9,16 @@ import fs from 'fs/promises';
9
9
  import { getConfig, saveConfig } from './utils/config.js';
10
10
  import { isValidDirectory, getGitIgnorePatterns, shouldIgnoreFile } from './utils/files.js';
11
11
  const BACKEND_URL = process.env.OPTIQ_BACKEND_URL || 'https://api.optiqcode.com';
12
+ const ENGINE_URL = process.env.OPTIQ_ENGINE_URL || 'https://api.optiqcode.com';
13
+ // Helper to generate repository ID from path
14
+ function generateRepoId(targetPath) {
15
+ // Use path basename + hash of full path for uniqueness
16
+ const basename = path.basename(targetPath);
17
+ const hash = targetPath.split('').reduce((acc, char) => {
18
+ return ((acc << 5) - acc) + char.charCodeAt(0) | 0;
19
+ }, 0);
20
+ return `${basename}-${Math.abs(hash).toString(16)}`;
21
+ }
12
22
  async function showBanner() {
13
23
  console.clear();
14
24
  console.log(chalk.white.bold(`
@@ -277,41 +287,40 @@ async function indexOnce(targetPath, config) {
277
287
  }
278
288
  spinner.text = `Reading... ${Math.min(i + PARALLEL_READS, files.length)}/${files.length}`;
279
289
  }
280
- // Upload in larger batches (150 files per batch)
281
- const BATCH_SIZE = 150;
282
- let totalFiles = 0;
283
- let totalEntities = 0;
284
- let repoId = '';
285
- spinner.text = 'Uploading...';
290
+ // Generate repository ID from path
291
+ const repoId = generateRepoId(targetPath);
292
+ // Upload in smaller batches to avoid gateway timeouts
293
+ const BATCH_SIZE = 50;
294
+ let totalChunks = 0;
295
+ spinner.text = 'Indexing...';
286
296
  for (let i = 0; i < filesArray.length; i += BATCH_SIZE) {
287
297
  const batch = filesArray.slice(i, i + BATCH_SIZE);
288
298
  const batchNum = Math.floor(i / BATCH_SIZE) + 1;
289
299
  const totalBatches = Math.ceil(filesArray.length / BATCH_SIZE);
290
- spinner.text = `Uploading... ${batchNum}/${totalBatches}`;
291
- const response = await axios.post(`${BACKEND_URL}/api/nexus/index/content`, {
292
- repository_path: targetPath,
300
+ spinner.text = `Indexing... ${batchNum}/${totalBatches} (parallel processing)`;
301
+ // Call the Rust engine's /api/v1/index endpoint
302
+ const response = await axios.post(`${ENGINE_URL}/api/v1/index`, {
303
+ repository_id: repoId,
304
+ branch: 'main',
293
305
  files: batch,
294
306
  }, {
295
307
  headers: {
296
- 'X-API-Key': config.apiKey,
297
308
  'Content-Type': 'application/json',
298
309
  },
299
- timeout: 0,
310
+ timeout: 0, // No timeout - parallel processing handles large batches
300
311
  });
301
312
  if (!response.data.success) {
302
313
  spinner.fail(chalk.red('Indexing failed'));
303
- console.log(chalk.gray(response.data.error || 'Unknown error'));
314
+ console.log(chalk.gray(response.data.result?.errors?.join('\n') || 'Unknown error'));
304
315
  return;
305
316
  }
306
- repoId = response.data.repo_id;
307
- totalFiles += response.data.stats?.files_indexed || 0;
308
- totalEntities += response.data.stats?.entities_indexed || 0;
317
+ totalChunks += response.data.result?.chunks_created || 0;
309
318
  }
310
319
  spinner.succeed(chalk.cyan('Indexed'));
311
- console.log(chalk.gray(` ${totalFiles} files • ${totalEntities} entities`));
320
+ console.log(chalk.gray(` ${filesArray.length} files • ${totalChunks} chunks`));
312
321
  console.log(chalk.cyan('\n📊 Repository ID:'));
313
322
  console.log(chalk.white(` ${repoId}`));
314
- console.log(chalk.gray('\n Use this ID with the MCP server\n'));
323
+ console.log(chalk.gray('\n Use this ID for searches\n'));
315
324
  }
316
325
  catch (error) {
317
326
  spinner.fail(chalk.red('Failed'));
@@ -324,89 +333,65 @@ async function indexOnce(targetPath, config) {
324
333
  }
325
334
  }
326
335
  async function watchDirectory(targetPath, config) {
327
- // Check if repo is already indexed
328
- let repoId = null;
336
+ // Generate repository ID from path
337
+ let repoId = generateRepoId(targetPath);
338
+ // Do initial full index
339
+ const spinner = ora({ text: 'Collecting files...', color: 'cyan' }).start();
329
340
  try {
330
- const repoListResponse = await axios.get(`${BACKEND_URL}/api/repositories`, {
331
- headers: {
332
- 'X-API-Key': config.apiKey,
333
- },
334
- timeout: 10000,
335
- });
336
- if (repoListResponse.data.success && repoListResponse.data.repositories) {
337
- const existingRepo = repoListResponse.data.repositories.find((r) => r.path === targetPath);
338
- if (existingRepo) {
339
- repoId = existingRepo.id;
340
- console.log(chalk.cyan('✓ Already indexed'));
341
- console.log(chalk.gray(` Repo ID: ${repoId}\n`));
341
+ const files = await collectFiles(targetPath);
342
+ spinner.text = `Reading ${files.length} files...`;
343
+ // Read files in parallel (100 at a time)
344
+ const PARALLEL_READS = 100;
345
+ const filesArray = [];
346
+ for (let i = 0; i < files.length; i += PARALLEL_READS) {
347
+ const chunk = files.slice(i, i + PARALLEL_READS);
348
+ const results = await Promise.allSettled(chunk.map(async (file) => {
349
+ const content = await fs.readFile(file, 'utf-8');
350
+ const relativePath = path.relative(targetPath, file).replace(/\\/g, '/');
351
+ return { path: relativePath, content };
352
+ }));
353
+ for (const result of results) {
354
+ if (result.status === 'fulfilled') {
355
+ filesArray.push(result.value);
356
+ }
357
+ }
358
+ spinner.text = `Reading... ${Math.min(i + PARALLEL_READS, files.length)}/${files.length}`;
359
+ }
360
+ // Upload in smaller batches to avoid gateway timeouts
361
+ const BATCH_SIZE = 50;
362
+ for (let i = 0; i < filesArray.length; i += BATCH_SIZE) {
363
+ const batch = filesArray.slice(i, i + BATCH_SIZE);
364
+ const batchNum = Math.floor(i / BATCH_SIZE) + 1;
365
+ const totalBatches = Math.ceil(filesArray.length / BATCH_SIZE);
366
+ spinner.text = `Indexing... ${batchNum}/${totalBatches} (parallel processing)`;
367
+ const response = await axios.post(`${ENGINE_URL}/api/v1/index`, {
368
+ repository_id: repoId,
369
+ branch: 'main',
370
+ files: batch,
371
+ }, {
372
+ headers: {
373
+ 'Content-Type': 'application/json',
374
+ },
375
+ timeout: 0,
376
+ });
377
+ if (!response.data.success) {
378
+ spinner.fail(chalk.red('Failed'));
379
+ console.log(chalk.gray(response.data.result?.errors?.join('\n') || 'Unknown error'));
380
+ return;
342
381
  }
343
382
  }
383
+ spinner.succeed(chalk.cyan(`Indexed ${files.length} files`));
384
+ console.log(chalk.gray(` Repo ID: ${repoId}\n`));
344
385
  }
345
386
  catch (error) {
346
- // Ignore errors, will do full index
347
- }
348
- // Only do full index if repo doesn't exist
349
- if (!repoId) {
350
- const spinner = ora({ text: 'Collecting files...', color: 'cyan' }).start();
351
- try {
352
- const files = await collectFiles(targetPath);
353
- spinner.text = `Reading ${files.length} files...`;
354
- // Read files in parallel (100 at a time)
355
- const PARALLEL_READS = 100;
356
- const filesArray = [];
357
- for (let i = 0; i < files.length; i += PARALLEL_READS) {
358
- const chunk = files.slice(i, i + PARALLEL_READS);
359
- const results = await Promise.allSettled(chunk.map(async (file) => {
360
- const content = await fs.readFile(file, 'utf-8');
361
- const relativePath = path.relative(targetPath, file).replace(/\\/g, '/');
362
- return { path: relativePath, content };
363
- }));
364
- for (const result of results) {
365
- if (result.status === 'fulfilled') {
366
- filesArray.push(result.value);
367
- }
368
- }
369
- spinner.text = `Reading... ${Math.min(i + PARALLEL_READS, files.length)}/${files.length}`;
370
- }
371
- // Upload in larger batches
372
- const BATCH_SIZE = 150;
373
- let uploadedCount = 0;
374
- for (let i = 0; i < filesArray.length; i += BATCH_SIZE) {
375
- const batch = filesArray.slice(i, i + BATCH_SIZE);
376
- const batchNum = Math.floor(i / BATCH_SIZE) + 1;
377
- const totalBatches = Math.ceil(filesArray.length / BATCH_SIZE);
378
- spinner.text = `Uploading... ${batchNum}/${totalBatches}`;
379
- const response = await axios.post(`${BACKEND_URL}/api/nexus/index/content`, {
380
- repository_path: targetPath,
381
- files: batch,
382
- }, {
383
- headers: {
384
- 'X-API-Key': config.apiKey,
385
- 'Content-Type': 'application/json',
386
- },
387
- timeout: 0,
388
- });
389
- if (!response.data.success) {
390
- spinner.fail(chalk.red('Failed'));
391
- console.log(chalk.gray(response.data.error || 'Unknown error'));
392
- return;
393
- }
394
- repoId = response.data.repo_id;
395
- uploadedCount += batch.length;
396
- }
397
- spinner.succeed(chalk.cyan(`Indexed ${files.length} files`));
398
- console.log(chalk.gray(` Repo ID: ${repoId}\n`));
387
+ spinner.fail(chalk.red('Failed'));
388
+ if (error.response?.data?.error) {
389
+ console.log(chalk.gray(' ' + error.response.data.error));
399
390
  }
400
- catch (error) {
401
- spinner.fail(chalk.red('Failed'));
402
- if (error.response?.data?.error) {
403
- console.log(chalk.gray(' ' + error.response.data.error));
404
- }
405
- else {
406
- console.log(chalk.gray(' ' + error.message));
407
- }
408
- return;
391
+ else {
392
+ console.log(chalk.gray(' ' + error.message));
409
393
  }
394
+ return;
410
395
  }
411
396
  const ignorePatterns = await getGitIgnorePatterns(targetPath);
412
397
  const watcher = chokidar.watch(targetPath, {
@@ -506,42 +491,52 @@ async function watchDirectory(targetPath, config) {
506
491
  isProcessing = false;
507
492
  return;
508
493
  }
509
- const response = await axios.post(`${BACKEND_URL}/api/nexus/index/incremental`, {
510
- repository_id: repoId,
511
- repository_path: targetPath,
512
- files: filesArray,
513
- }, {
514
- headers: {
515
- 'X-API-Key': config.apiKey,
516
- 'Content-Type': 'application/json',
517
- },
518
- timeout: 60000,
519
- });
520
- if (response.data.success) {
521
- // Add successfully indexed files to the set
522
- for (const file of filesArray) {
523
- if (file.content !== null) {
494
+ // Filter out deleted files (content: null) - send only files with content
495
+ const filesToIndex = filesArray.filter(f => f.content !== null);
496
+ // For incremental updates, use the same /api/v1/index endpoint
497
+ // The Rust engine handles upserts automatically
498
+ if (filesToIndex.length > 0) {
499
+ const response = await axios.post(`${ENGINE_URL}/api/v1/index`, {
500
+ repository_id: repoId,
501
+ branch: 'main',
502
+ files: filesToIndex,
503
+ }, {
504
+ headers: {
505
+ 'Content-Type': 'application/json',
506
+ },
507
+ timeout: 60000,
508
+ });
509
+ if (response.data.success) {
510
+ // Add successfully indexed files to the set
511
+ for (const file of filesToIndex) {
524
512
  allIndexedFiles.add(file.path);
525
513
  }
526
- }
527
- totalIndexed += filesArray.length;
528
- lastIndexedTime = Date.now();
529
- // Update last indexed file
530
- if (filesArray.length === 1) {
531
- lastIndexedFile = filesArray[0].path;
514
+ totalIndexed += filesToIndex.length;
515
+ lastIndexedTime = Date.now();
516
+ // Update last indexed file
517
+ if (filesToIndex.length === 1) {
518
+ lastIndexedFile = filesToIndex[0].path;
519
+ }
520
+ else {
521
+ lastIndexedFile = `${filesToIndex.length} files`;
522
+ }
523
+ // Update dashboard
524
+ showDashboard();
532
525
  }
533
526
  else {
534
- lastIndexedFile = `${filesArray.length} files`;
527
+ // Clear dashboard, show error, redraw dashboard
528
+ process.stdout.write('\x1b[5A');
529
+ process.stdout.write('\x1b[0J');
530
+ dashboardShown = false;
531
+ console.log(chalk.red(`✗ Failed to index: ${response.data.result?.errors?.join(', ') || 'Unknown error'}\n`));
532
+ showDashboard();
535
533
  }
536
- // Update dashboard
537
- showDashboard();
538
534
  }
539
535
  else {
540
- // Clear dashboard, show error, redraw dashboard
541
- process.stdout.write('\x1b[5A');
542
- process.stdout.write('\x1b[0J');
543
- dashboardShown = false;
544
- console.log(chalk.red(`✗ Failed to index: ${response.data.error}\n`));
536
+ // Only deletions - just update the dashboard
537
+ totalIndexed += filesArray.length;
538
+ lastIndexedTime = Date.now();
539
+ lastIndexedFile = `${filesArray.length} deleted`;
545
540
  showDashboard();
546
541
  }
547
542
  }
@@ -304,22 +304,50 @@ export async function getGitIgnorePatterns(dir) {
304
304
  return patterns;
305
305
  }
306
306
  export function shouldIgnoreFile(relativePath, patterns) {
307
- const parts = relativePath.split(path.sep);
307
+ // Normalize path separators to forward slashes for consistent matching
308
+ const normalizedPath = relativePath.replace(/\\/g, '/');
309
+ const parts = normalizedPath.split('/');
310
+ const fileName = parts[parts.length - 1];
308
311
  for (const pattern of patterns) {
309
- // Exact match
312
+ // Skip empty patterns
313
+ if (!pattern)
314
+ continue;
315
+ // Exact directory/file name match (e.g., 'node_modules', '.git')
310
316
  if (parts.includes(pattern)) {
311
317
  return true;
312
318
  }
313
- // Wildcard match
314
- if (pattern.includes('*')) {
315
- const regex = new RegExp('^' + pattern.replace(/\*/g, '.*') + '$');
316
- if (regex.test(relativePath)) {
319
+ // Extension match (e.g., '*.log', '*.pyc')
320
+ if (pattern.startsWith('*.')) {
321
+ const ext = pattern.slice(1); // Get '.log' from '*.log'
322
+ if (fileName.endsWith(ext)) {
317
323
  return true;
318
324
  }
319
325
  }
320
- // Extension match
321
- if (pattern.startsWith('*.') && relativePath.endsWith(pattern.slice(1))) {
322
- return true;
326
+ // Glob pattern match (e.g., '**/*.rs.bk', 'cmake-build-*')
327
+ if (pattern.includes('*') && !pattern.startsWith('*.')) {
328
+ try {
329
+ // Convert glob to regex
330
+ let regexPattern = pattern
331
+ .replace(/[.+^${}()|[\]\\]/g, '\\$&') // Escape special regex chars first
332
+ .replace(/\*\*/g, '{{GLOBSTAR}}') // Temporarily replace **
333
+ .replace(/\*/g, '[^/]*') // * matches anything except /
334
+ .replace(/{{GLOBSTAR}}/g, '.*') // ** matches anything including /
335
+ .replace(/\?/g, '[^/]'); // ? matches single char except /
336
+ const regex = new RegExp(`(^|/)${regexPattern}($|/|$)`);
337
+ if (regex.test(normalizedPath)) {
338
+ return true;
339
+ }
340
+ }
341
+ catch {
342
+ // Invalid regex pattern, skip
343
+ }
344
+ }
345
+ // Path prefix match (e.g., 'docs/_build' should match 'docs/_build/something')
346
+ if (pattern.includes('/') && !pattern.includes('*')) {
347
+ const normalizedPattern = pattern.replace(/\\/g, '/');
348
+ if (normalizedPath === normalizedPattern || normalizedPath.startsWith(normalizedPattern + '/')) {
349
+ return true;
350
+ }
323
351
  }
324
352
  }
325
353
  return false;
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@optiqcode/cli",
3
- "version": "2.0.0",
3
+ "version": "2.1.1",
4
4
  "description": "CLI tool for Optiq - automatic code indexing and context engine",
5
5
  "type": "module",
6
6
  "main": "dist/index.js",