@optiqcode/cli 1.8.0 → 2.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -6,7 +6,15 @@ import path from 'path';
6
6
  import fs from 'fs/promises';
7
7
  import { getConfig } from '../utils/config.js';
8
8
  import { isValidDirectory, getGitIgnorePatterns, shouldIgnoreFile } from '../utils/files.js';
9
- const API_URL = process.env.OPTIQ_BACKEND_URL ? `${process.env.OPTIQ_BACKEND_URL}/api` : 'https://api.optiqcode.com/api';
9
+ const ENGINE_URL = process.env.OPTIQ_ENGINE_URL || 'https://api.optiqcode.com';
10
+ // Helper to generate repository ID from path
11
+ function generateRepoId(targetPath) {
12
+ const basename = path.basename(targetPath);
13
+ const hash = targetPath.split('').reduce((acc, char) => {
14
+ return ((acc << 5) - acc) + char.charCodeAt(0) | 0;
15
+ }, 0);
16
+ return `${basename}-${Math.abs(hash).toString(16)}`;
17
+ }
10
18
  export async function index(options) {
11
19
  const config = await getConfig();
12
20
  if (!config) {
@@ -63,43 +71,41 @@ export async function index(options) {
63
71
  // Skip files that can't be read
64
72
  }
65
73
  }
66
- spinner.text = 'Uploading to Optiq...';
67
- // Batch upload files (50 at a time to avoid 413 errors)
68
- const BATCH_SIZE = 50;
74
+ spinner.text = 'Indexing with Optiq Engine...';
75
+ // Generate repository ID from path
76
+ const repoId = generateRepoId(targetPath);
77
+ // Batch upload files (150 at a time for parallel processing)
78
+ const BATCH_SIZE = 150;
69
79
  let uploadedCount = 0;
70
- let repoId = '';
71
- let totalFilesProcessed = 0;
72
- let totalEntitiesIndexed = 0;
80
+ let totalChunksCreated = 0;
73
81
  for (let i = 0; i < fileContents.length; i += BATCH_SIZE) {
74
82
  const batch = fileContents.slice(i, i + BATCH_SIZE);
75
83
  const batchNum = Math.floor(i / BATCH_SIZE) + 1;
76
84
  const totalBatches = Math.ceil(fileContents.length / BATCH_SIZE);
77
- spinner.text = `Uploading batch ${batchNum}/${totalBatches} (${uploadedCount}/${fileContents.length} files)...`;
78
- const response = await axios.post(`${API_URL}/nexus/index/content`, {
79
- repository_path: targetPath,
85
+ spinner.text = `Indexing batch ${batchNum}/${totalBatches} (${uploadedCount}/${fileContents.length} files)...`;
86
+ const response = await axios.post(`${ENGINE_URL}/api/v1/index`, {
87
+ repository_id: repoId,
88
+ branch: 'main',
80
89
  files: batch,
81
90
  }, {
82
91
  headers: {
83
- Authorization: `Bearer ${config.apiKey}`,
84
92
  'Content-Type': 'application/json',
85
93
  },
86
94
  timeout: 0, // No timeout for large codebases
87
95
  });
88
96
  if (!response.data.success) {
89
97
  spinner.fail(chalk.red('✗ Indexing failed'));
90
- console.log(chalk.red(response.data.error || 'Unknown error'));
98
+ console.log(chalk.red(response.data.result?.errors?.join('\n') || 'Unknown error'));
91
99
  return;
92
100
  }
93
- repoId = response.data.repo_id;
94
- totalFilesProcessed += response.data.files_processed || 0;
95
- totalEntitiesIndexed += response.data.entities_indexed || 0;
101
+ totalChunksCreated += response.data.result?.chunks_created || 0;
96
102
  uploadedCount += batch.length;
97
103
  }
98
104
  spinner.succeed(chalk.green('✓ Indexing complete'));
99
105
  console.log(chalk.blue('📊 Repository ID:'), chalk.bold(repoId));
100
- console.log(chalk.blue('📁 Files indexed:'), chalk.bold(totalFilesProcessed));
101
- console.log(chalk.blue('📝 Entities indexed:'), chalk.bold(totalEntitiesIndexed));
102
- console.log(chalk.dim('\nUse this repo_id with the MCP server or API'));
106
+ console.log(chalk.blue('📁 Files indexed:'), chalk.bold(uploadedCount));
107
+ console.log(chalk.blue('📝 Chunks created:'), chalk.bold(totalChunksCreated));
108
+ console.log(chalk.dim('\nUse this repo_id for searches'));
103
109
  }
104
110
  catch (error) {
105
111
  spinner.fail(chalk.red('✗ Indexing failed'));
package/dist/index.js CHANGED
@@ -6,13 +6,22 @@ import ora from 'ora';
6
6
  import chokidar from 'chokidar';
7
7
  import path from 'path';
8
8
  import fs from 'fs/promises';
9
- import logUpdate from 'log-update';
10
9
  import { getConfig, saveConfig } from './utils/config.js';
11
10
  import { isValidDirectory, getGitIgnorePatterns, shouldIgnoreFile } from './utils/files.js';
12
11
  const BACKEND_URL = process.env.OPTIQ_BACKEND_URL || 'https://api.optiqcode.com';
12
+ const ENGINE_URL = process.env.OPTIQ_ENGINE_URL || 'https://api.optiqcode.com';
13
+ // Helper to generate repository ID from path
14
+ function generateRepoId(targetPath) {
15
+ // Use path basename + hash of full path for uniqueness
16
+ const basename = path.basename(targetPath);
17
+ const hash = targetPath.split('').reduce((acc, char) => {
18
+ return ((acc << 5) - acc) + char.charCodeAt(0) | 0;
19
+ }, 0);
20
+ return `${basename}-${Math.abs(hash).toString(16)}`;
21
+ }
13
22
  async function showBanner() {
14
23
  console.clear();
15
- console.log(chalk.cyan.bold(`
24
+ console.log(chalk.white.bold(`
16
25
  ___ _ _
17
26
  / _ \\ _ __ | |_(_) __ _
18
27
  | | | | '_ \\| __| |/ _\` |
@@ -21,7 +30,7 @@ async function showBanner() {
21
30
  |_| |_|
22
31
  `));
23
32
  console.log(chalk.gray(' AI-powered code indexing & search'));
24
- console.log(chalk.gray(' v1.7.0\n'));
33
+ console.log(chalk.gray(' v2.0.0\n'));
25
34
  }
26
35
  function showHelp() {
27
36
  console.log(chalk.cyan.bold('Optiq CLI') + chalk.gray(' - AI-powered code indexing\n'));
@@ -260,63 +269,61 @@ async function indexOnce(targetPath, config) {
260
269
  try {
261
270
  const files = await collectFiles(targetPath);
262
271
  spinner.text = `Reading ${files.length} files...`;
263
- const fileContents = {};
264
- let processed = 0;
265
- for (const file of files) {
266
- try {
272
+ // Read files in parallel (100 at a time)
273
+ const PARALLEL_READS = 100;
274
+ const filesArray = [];
275
+ for (let i = 0; i < files.length; i += PARALLEL_READS) {
276
+ const chunk = files.slice(i, i + PARALLEL_READS);
277
+ const results = await Promise.allSettled(chunk.map(async (file) => {
267
278
  const content = await fs.readFile(file, 'utf-8');
268
279
  const relativePath = path.relative(targetPath, file).replace(/\\/g, '/');
269
- fileContents[relativePath] = content;
270
- processed++;
271
- if (processed % 50 === 0) {
272
- spinner.text = `Reading files... ${processed}/${files.length}`;
280
+ return { path: relativePath, content };
281
+ }));
282
+ // Filter successful reads
283
+ for (const result of results) {
284
+ if (result.status === 'fulfilled') {
285
+ filesArray.push(result.value);
273
286
  }
274
287
  }
275
- catch (error) {
276
- // Skip unreadable files
277
- }
288
+ spinner.text = `Reading... ${Math.min(i + PARALLEL_READS, files.length)}/${files.length}`;
278
289
  }
279
- const filesArray = Object.entries(fileContents).map(([path, content]) => ({
280
- path,
281
- content,
282
- }));
283
- // Batch upload
284
- const BATCH_SIZE = 50;
285
- let totalFiles = 0;
286
- let totalEntities = 0;
287
- let repoId = '';
290
+ // Generate repository ID from path
291
+ const repoId = generateRepoId(targetPath);
292
+ // Upload in larger batches (150 files per batch)
293
+ const BATCH_SIZE = 150;
294
+ let totalChunks = 0;
295
+ spinner.text = 'Indexing...';
288
296
  for (let i = 0; i < filesArray.length; i += BATCH_SIZE) {
289
297
  const batch = filesArray.slice(i, i + BATCH_SIZE);
290
298
  const batchNum = Math.floor(i / BATCH_SIZE) + 1;
291
299
  const totalBatches = Math.ceil(filesArray.length / BATCH_SIZE);
292
- spinner.text = `Uploading... batch ${batchNum}/${totalBatches}`;
293
- const response = await axios.post(`${BACKEND_URL}/api/nexus/index/content`, {
294
- repository_path: targetPath,
300
+ spinner.text = `Indexing... ${batchNum}/${totalBatches} (parallel processing)`;
301
+ // Call the Rust engine's /api/v1/index endpoint
302
+ const response = await axios.post(`${ENGINE_URL}/api/v1/index`, {
303
+ repository_id: repoId,
304
+ branch: 'main',
295
305
  files: batch,
296
306
  }, {
297
307
  headers: {
298
- 'X-API-Key': config.apiKey,
299
308
  'Content-Type': 'application/json',
300
309
  },
301
- timeout: 0,
310
+ timeout: 0, // No timeout - parallel processing handles large batches
302
311
  });
303
312
  if (!response.data.success) {
304
313
  spinner.fail(chalk.red('Indexing failed'));
305
- console.log(chalk.gray(response.data.error || 'Unknown error'));
314
+ console.log(chalk.gray(response.data.result?.errors?.join('\n') || 'Unknown error'));
306
315
  return;
307
316
  }
308
- repoId = response.data.repo_id;
309
- totalFiles += response.data.stats?.files_indexed || 0;
310
- totalEntities += response.data.stats?.entities_indexed || 0;
317
+ totalChunks += response.data.result?.chunks_created || 0;
311
318
  }
312
319
  spinner.succeed(chalk.cyan('Indexed'));
313
- console.log(chalk.gray(` ${totalFiles} files • ${totalEntities} entities`));
320
+ console.log(chalk.gray(` ${filesArray.length} files • ${totalChunks} chunks`));
314
321
  console.log(chalk.cyan('\n📊 Repository ID:'));
315
322
  console.log(chalk.white(` ${repoId}`));
316
- console.log(chalk.gray('\n Use this ID with the MCP server\n'));
323
+ console.log(chalk.gray('\n Use this ID for searches\n'));
317
324
  }
318
325
  catch (error) {
319
- spinner.fail(chalk.red('Failed'));
326
+ spinner.fail(chalk.red('Failed'));
320
327
  if (error.response?.data?.error) {
321
328
  console.log(chalk.gray(' ' + error.response.data.error));
322
329
  }
@@ -326,93 +333,65 @@ async function indexOnce(targetPath, config) {
326
333
  }
327
334
  }
328
335
  async function watchDirectory(targetPath, config) {
329
- // Check if repo is already indexed
330
- let repoId = null;
336
+ // Generate repository ID from path
337
+ let repoId = generateRepoId(targetPath);
338
+ // Do initial full index
339
+ const spinner = ora({ text: 'Collecting files...', color: 'cyan' }).start();
331
340
  try {
332
- const repoListResponse = await axios.get(`${BACKEND_URL}/api/repositories`, {
333
- headers: {
334
- 'X-API-Key': config.apiKey,
335
- },
336
- timeout: 10000,
337
- });
338
- if (repoListResponse.data.success && repoListResponse.data.repositories) {
339
- const existingRepo = repoListResponse.data.repositories.find((r) => r.path === targetPath);
340
- if (existingRepo) {
341
- repoId = existingRepo.id;
342
- console.log(chalk.cyan('✓ Already indexed'));
343
- console.log(chalk.gray(` Repo ID: ${repoId}\n`));
344
- }
345
- }
346
- }
347
- catch (error) {
348
- // Ignore errors, will do full index
349
- }
350
- // Only do full index if repo doesn't exist
351
- if (!repoId) {
352
- const spinner = ora({ text: 'Collecting files...', color: 'white' }).start();
353
- try {
354
- const files = await collectFiles(targetPath);
355
- spinner.text = `Indexing ${files.length} files...`;
356
- const fileContents = {};
357
- let processed = 0;
358
- for (const file of files) {
359
- try {
360
- const content = await fs.readFile(file, 'utf-8');
361
- const relativePath = path.relative(targetPath, file);
362
- fileContents[relativePath] = content;
363
- processed++;
364
- // Update progress
365
- const percent = Math.round((processed / files.length) * 100);
366
- spinner.text = `Indexing ${processed}/${files.length} files (${percent}%)`;
367
- }
368
- catch (error) {
369
- // Skip files that can't be read
370
- }
371
- }
372
- spinner.text = 'Uploading to Optiq...';
373
- const filesArray = Object.entries(fileContents).map(([path, content]) => ({
374
- path,
375
- content,
341
+ const files = await collectFiles(targetPath);
342
+ spinner.text = `Reading ${files.length} files...`;
343
+ // Read files in parallel (100 at a time)
344
+ const PARALLEL_READS = 100;
345
+ const filesArray = [];
346
+ for (let i = 0; i < files.length; i += PARALLEL_READS) {
347
+ const chunk = files.slice(i, i + PARALLEL_READS);
348
+ const results = await Promise.allSettled(chunk.map(async (file) => {
349
+ const content = await fs.readFile(file, 'utf-8');
350
+ const relativePath = path.relative(targetPath, file).replace(/\\/g, '/');
351
+ return { path: relativePath, content };
376
352
  }));
377
- // Batch upload files (50 at a time to avoid 413 errors)
378
- const BATCH_SIZE = 50;
379
- let uploadedCount = 0;
380
- for (let i = 0; i < filesArray.length; i += BATCH_SIZE) {
381
- const batch = filesArray.slice(i, i + BATCH_SIZE);
382
- const batchNum = Math.floor(i / BATCH_SIZE) + 1;
383
- const totalBatches = Math.ceil(filesArray.length / BATCH_SIZE);
384
- spinner.text = `Uploading batch ${batchNum}/${totalBatches} (${uploadedCount}/${filesArray.length} files)...`;
385
- const response = await axios.post(`${BACKEND_URL}/api/nexus/index/content`, {
386
- repository_path: targetPath,
387
- files: batch,
388
- }, {
389
- headers: {
390
- 'X-API-Key': config.apiKey,
391
- 'Content-Type': 'application/json',
392
- },
393
- timeout: 0, // No timeout for large codebases
394
- });
395
- if (!response.data.success) {
396
- spinner.fail(chalk.gray('Initial indexing failed'));
397
- console.log(chalk.gray(response.data.error || 'Unknown error'));
398
- return;
353
+ for (const result of results) {
354
+ if (result.status === 'fulfilled') {
355
+ filesArray.push(result.value);
399
356
  }
400
- repoId = response.data.repo_id;
401
- uploadedCount += batch.length;
402
357
  }
403
- spinner.succeed(chalk.cyan(`Indexed ${files.length} files`));
404
- console.log(chalk.gray(` Repo ID: ${repoId}\n`));
358
+ spinner.text = `Reading... ${Math.min(i + PARALLEL_READS, files.length)}/${files.length}`;
405
359
  }
406
- catch (error) {
407
- spinner.fail(chalk.red('Failed'));
408
- if (error.response?.data?.error) {
409
- console.log(chalk.gray(' ' + error.response.data.error));
410
- }
411
- else {
412
- console.log(chalk.gray(' ' + error.message));
360
+ // Upload in larger batches using Rust engine
361
+ const BATCH_SIZE = 150;
362
+ for (let i = 0; i < filesArray.length; i += BATCH_SIZE) {
363
+ const batch = filesArray.slice(i, i + BATCH_SIZE);
364
+ const batchNum = Math.floor(i / BATCH_SIZE) + 1;
365
+ const totalBatches = Math.ceil(filesArray.length / BATCH_SIZE);
366
+ spinner.text = `Indexing... ${batchNum}/${totalBatches} (parallel processing)`;
367
+ const response = await axios.post(`${ENGINE_URL}/api/v1/index`, {
368
+ repository_id: repoId,
369
+ branch: 'main',
370
+ files: batch,
371
+ }, {
372
+ headers: {
373
+ 'Content-Type': 'application/json',
374
+ },
375
+ timeout: 0,
376
+ });
377
+ if (!response.data.success) {
378
+ spinner.fail(chalk.red('Failed'));
379
+ console.log(chalk.gray(response.data.result?.errors?.join('\n') || 'Unknown error'));
380
+ return;
413
381
  }
414
- return;
415
382
  }
383
+ spinner.succeed(chalk.cyan(`Indexed ${files.length} files`));
384
+ console.log(chalk.gray(` Repo ID: ${repoId}\n`));
385
+ }
386
+ catch (error) {
387
+ spinner.fail(chalk.red('Failed'));
388
+ if (error.response?.data?.error) {
389
+ console.log(chalk.gray(' ' + error.response.data.error));
390
+ }
391
+ else {
392
+ console.log(chalk.gray(' ' + error.message));
393
+ }
394
+ return;
416
395
  }
417
396
  const ignorePatterns = await getGitIgnorePatterns(targetPath);
418
397
  const watcher = chokidar.watch(targetPath, {
@@ -446,43 +425,49 @@ async function watchDirectory(targetPath, config) {
446
425
  return `${minutes}m ${seconds % 60}s`;
447
426
  return `${seconds}s`;
448
427
  };
449
- const updateDashboard = () => {
428
+ let dashboardShown = false;
429
+ const showDashboard = () => {
450
430
  const uptime = formatUptime(Date.now() - sessionStartTime);
451
- const timeSinceLastIndex = Math.floor((Date.now() - lastIndexedTime) / 1000);
452
- const lines = [];
453
- lines.push(chalk.gray(''.repeat(50)));
454
- lines.push(`${chalk.cyan('👀 Watching')} ${chalk.gray('•')} ${chalk.white(uptime)} ${chalk.gray('uptime')}`);
455
- lines.push(`${chalk.gray(' Indexed:')} ${chalk.white(totalIndexed)} ${chalk.gray('changes')} ${chalk.gray('•')} ${chalk.white(allIndexedFiles.size)} ${chalk.gray('files')}`);
456
- if (lastIndexedFile) {
431
+ // Move cursor up 5 lines to overwrite previous dashboard (but not on first display)
432
+ if (dashboardShown) {
433
+ process.stdout.write('\x1b[5A'); // Move up 5 lines
434
+ process.stdout.write('\x1b[0J'); // Clear from cursor to end of screen
435
+ }
436
+ dashboardShown = true;
437
+ console.log(chalk.gray('─'.repeat(50)));
438
+ console.log(chalk.cyan('👀 Watching') + chalk.gray(` • ${uptime} uptime • Press Ctrl+C to stop`));
439
+ const details = [];
440
+ if (totalIndexed > 0 && lastIndexedFile) {
441
+ const timeSinceLastIndex = Math.floor((Date.now() - lastIndexedTime) / 1000);
457
442
  const timeStr = timeSinceLastIndex < 60 ? `${timeSinceLastIndex}s` : `${Math.floor(timeSinceLastIndex / 60)}m`;
458
- lines.push(`${chalk.gray(' Last:')} ${chalk.white(lastIndexedFile)} ${chalk.gray(`(${timeStr} ago)`)}`);
443
+ details.push(chalk.gray(` Indexed: ${totalIndexed} changes • ${allIndexedFiles.size} files • Last: ${lastIndexedFile} (${timeStr} ago)`));
444
+ }
445
+ else {
446
+ details.push(chalk.gray(` Ready • 0 changes • Waiting for file modifications...`));
459
447
  }
460
- lines.push(chalk.gray(''.repeat(50)));
461
- lines.push('');
462
- logUpdate(lines.join('\n'));
448
+ console.log(details.join(''));
449
+ console.log(chalk.gray(''.repeat(50)));
450
+ console.log('');
463
451
  };
464
- // Show dashboard immediately and update every 5 seconds
465
- updateDashboard();
466
- setInterval(updateDashboard, 5000);
452
+ // Initial dashboard display
453
+ showDashboard();
454
+ // Update dashboard every 10 seconds to refresh uptime and "time ago"
455
+ setInterval(showDashboard, 10000);
467
456
  const processChanges = async () => {
468
457
  if (pendingChanges.size === 0 || isProcessing)
469
458
  return;
470
459
  isProcessing = true;
471
460
  const changes = Array.from(pendingChanges.entries());
472
461
  pendingChanges.clear();
473
- // Collect unique files first
474
- for (const [filePath] of changes) {
475
- const relativePath = path.relative(targetPath, filePath);
476
- allIndexedFiles.add(relativePath);
477
- }
478
462
  try {
479
463
  const filesArray = [];
480
464
  let hasChanges = false;
481
465
  for (const [filePath, changeType] of changes) {
482
- const relativePath = path.relative(targetPath, filePath);
466
+ const relativePath = path.relative(targetPath, filePath).replace(/\\/g, '/');
483
467
  if (changeType === 'unlink') {
484
468
  filesArray.push({ path: relativePath, content: null });
485
469
  fileContentCache.delete(relativePath);
470
+ allIndexedFiles.delete(relativePath);
486
471
  hasChanges = true;
487
472
  }
488
473
  else {
@@ -506,42 +491,62 @@ async function watchDirectory(targetPath, config) {
506
491
  isProcessing = false;
507
492
  return;
508
493
  }
509
- const response = await axios.post(`${BACKEND_URL}/api/nexus/index/incremental`, {
510
- repository_id: repoId,
511
- repository_path: targetPath,
512
- files: filesArray,
513
- }, {
514
- headers: {
515
- 'X-API-Key': config.apiKey,
516
- 'Content-Type': 'application/json',
517
- },
518
- timeout: 60000,
519
- });
520
- if (response.data.success) {
521
- totalIndexed += filesArray.length;
522
- lastIndexedTime = Date.now();
523
- // Update last indexed file
524
- if (filesArray.length === 1) {
525
- lastIndexedFile = filesArray[0].path;
494
+ // Filter out deleted files (content: null) - send only files with content
495
+ const filesToIndex = filesArray.filter(f => f.content !== null);
496
+ // For incremental updates, use the same /api/v1/index endpoint
497
+ // The Rust engine handles upserts automatically
498
+ if (filesToIndex.length > 0) {
499
+ const response = await axios.post(`${ENGINE_URL}/api/v1/index`, {
500
+ repository_id: repoId,
501
+ branch: 'main',
502
+ files: filesToIndex,
503
+ }, {
504
+ headers: {
505
+ 'Content-Type': 'application/json',
506
+ },
507
+ timeout: 60000,
508
+ });
509
+ if (response.data.success) {
510
+ // Add successfully indexed files to the set
511
+ for (const file of filesToIndex) {
512
+ allIndexedFiles.add(file.path);
513
+ }
514
+ totalIndexed += filesToIndex.length;
515
+ lastIndexedTime = Date.now();
516
+ // Update last indexed file
517
+ if (filesToIndex.length === 1) {
518
+ lastIndexedFile = filesToIndex[0].path;
519
+ }
520
+ else {
521
+ lastIndexedFile = `${filesToIndex.length} files`;
522
+ }
523
+ // Update dashboard
524
+ showDashboard();
526
525
  }
527
526
  else {
528
- lastIndexedFile = `${filesArray.length} files`;
527
+ // Clear dashboard, show error, redraw dashboard
528
+ process.stdout.write('\x1b[5A');
529
+ process.stdout.write('\x1b[0J');
530
+ dashboardShown = false;
531
+ console.log(chalk.red(`✗ Failed to index: ${response.data.result?.errors?.join(', ') || 'Unknown error'}\n`));
532
+ showDashboard();
529
533
  }
530
- // Update dashboard
531
- updateDashboard();
532
534
  }
533
535
  else {
534
- logUpdate.clear();
535
- console.log(chalk.red(`✗ Failed to index`));
536
- console.log(chalk.gray(` ${response.data.error}\n`));
537
- updateDashboard();
536
+ // Only deletions - just update the dashboard
537
+ totalIndexed += filesArray.length;
538
+ lastIndexedTime = Date.now();
539
+ lastIndexedFile = `${filesArray.length} deleted`;
540
+ showDashboard();
538
541
  }
539
542
  }
540
543
  catch (error) {
541
- logUpdate.clear();
542
- console.log(chalk.red(`✗ Failed to index`));
543
- console.log(chalk.gray(` ${error.response?.data?.error || error.message}\n`));
544
- updateDashboard();
544
+ // Clear dashboard, show error, redraw dashboard
545
+ process.stdout.write('\x1b[5A');
546
+ process.stdout.write('\x1b[0J');
547
+ dashboardShown = false;
548
+ console.log(chalk.red(`✗ Failed to index: ${error.response?.data?.error || error.message}\n`));
549
+ showDashboard();
545
550
  }
546
551
  isProcessing = false;
547
552
  // Check if there are more pending changes and process them
@@ -569,13 +574,18 @@ async function watchDirectory(targetPath, config) {
569
574
  scheduleProcess();
570
575
  })
571
576
  .on('error', (error) => {
572
- logUpdate.clear();
573
- console.log(chalk.red('✗ Watcher error:'), chalk.gray(error.message + '\n'));
574
- updateDashboard();
577
+ // Clear dashboard, show error, redraw dashboard
578
+ process.stdout.write('\x1b[5A');
579
+ process.stdout.write('\x1b[0J');
580
+ dashboardShown = false;
581
+ console.log(chalk.red(`✗ Watcher error: ${error.message}\n`));
582
+ showDashboard();
575
583
  });
576
584
  process.on('SIGINT', () => {
577
- logUpdate.clear();
578
- console.log(chalk.cyan('\n✓ Stopped watching'));
585
+ // Clear dashboard and show exit message
586
+ process.stdout.write('\x1b[5A');
587
+ process.stdout.write('\x1b[0J');
588
+ console.log(chalk.cyan('✓ Stopped watching\n'));
579
589
  watcher.close();
580
590
  process.exit(0);
581
591
  });
@@ -304,22 +304,50 @@ export async function getGitIgnorePatterns(dir) {
304
304
  return patterns;
305
305
  }
306
306
  export function shouldIgnoreFile(relativePath, patterns) {
307
- const parts = relativePath.split(path.sep);
307
+ // Normalize path separators to forward slashes for consistent matching
308
+ const normalizedPath = relativePath.replace(/\\/g, '/');
309
+ const parts = normalizedPath.split('/');
310
+ const fileName = parts[parts.length - 1];
308
311
  for (const pattern of patterns) {
309
- // Exact match
312
+ // Skip empty patterns
313
+ if (!pattern)
314
+ continue;
315
+ // Exact directory/file name match (e.g., 'node_modules', '.git')
310
316
  if (parts.includes(pattern)) {
311
317
  return true;
312
318
  }
313
- // Wildcard match
314
- if (pattern.includes('*')) {
315
- const regex = new RegExp('^' + pattern.replace(/\*/g, '.*') + '$');
316
- if (regex.test(relativePath)) {
319
+ // Extension match (e.g., '*.log', '*.pyc')
320
+ if (pattern.startsWith('*.')) {
321
+ const ext = pattern.slice(1); // Get '.log' from '*.log'
322
+ if (fileName.endsWith(ext)) {
317
323
  return true;
318
324
  }
319
325
  }
320
- // Extension match
321
- if (pattern.startsWith('*.') && relativePath.endsWith(pattern.slice(1))) {
322
- return true;
326
+ // Glob pattern match (e.g., '**/*.rs.bk', 'cmake-build-*')
327
+ if (pattern.includes('*') && !pattern.startsWith('*.')) {
328
+ try {
329
+ // Convert glob to regex
330
+ let regexPattern = pattern
331
+ .replace(/[.+^${}()|[\]\\]/g, '\\$&') // Escape special regex chars first
332
+ .replace(/\*\*/g, '{{GLOBSTAR}}') // Temporarily replace **
333
+ .replace(/\*/g, '[^/]*') // * matches anything except /
334
+ .replace(/{{GLOBSTAR}}/g, '.*') // ** matches anything including /
335
+ .replace(/\?/g, '[^/]'); // ? matches single char except /
336
+ const regex = new RegExp(`(^|/)${regexPattern}($|/|$)`);
337
+ if (regex.test(normalizedPath)) {
338
+ return true;
339
+ }
340
+ }
341
+ catch {
342
+ // Invalid regex pattern, skip
343
+ }
344
+ }
345
+ // Path prefix match (e.g., 'docs/_build' should match 'docs/_build/something')
346
+ if (pattern.includes('/') && !pattern.includes('*')) {
347
+ const normalizedPattern = pattern.replace(/\\/g, '/');
348
+ if (normalizedPath === normalizedPattern || normalizedPath.startsWith(normalizedPattern + '/')) {
349
+ return true;
350
+ }
323
351
  }
324
352
  }
325
353
  return false;
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@optiqcode/cli",
3
- "version": "1.8.0",
3
+ "version": "2.1.0",
4
4
  "description": "CLI tool for Optiq - automatic code indexing and context engine",
5
5
  "type": "module",
6
6
  "main": "dist/index.js",
@@ -42,7 +42,6 @@
42
42
  "axios": "^1.6.0",
43
43
  "chalk": "^5.3.0",
44
44
  "chokidar": "^3.5.3",
45
- "log-update": "^7.0.1",
46
45
  "ora": "^8.0.1",
47
46
  "prompts": "^2.4.2"
48
47
  },