@optiqcode/cli 1.7.0 → 2.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (2) hide show
  1. package/dist/index.js +96 -77
  2. package/package.json +1 -2
package/dist/index.js CHANGED
@@ -6,13 +6,12 @@ import ora from 'ora';
6
6
  import chokidar from 'chokidar';
7
7
  import path from 'path';
8
8
  import fs from 'fs/promises';
9
- import logUpdate from 'log-update';
10
9
  import { getConfig, saveConfig } from './utils/config.js';
11
10
  import { isValidDirectory, getGitIgnorePatterns, shouldIgnoreFile } from './utils/files.js';
12
11
  const BACKEND_URL = process.env.OPTIQ_BACKEND_URL || 'https://api.optiqcode.com';
13
12
  async function showBanner() {
14
13
  console.clear();
15
- console.log(chalk.cyan.bold(`
14
+ console.log(chalk.white.bold(`
16
15
  ___ _ _
17
16
  / _ \\ _ __ | |_(_) __ _
18
17
  | | | | '_ \\| __| |/ _\` |
@@ -21,7 +20,7 @@ async function showBanner() {
21
20
  |_| |_|
22
21
  `));
23
22
  console.log(chalk.gray(' AI-powered code indexing & search'));
24
- console.log(chalk.gray(' v1.6.0\n'));
23
+ console.log(chalk.gray(' v2.0.0\n'));
25
24
  }
26
25
  function showHelp() {
27
26
  console.log(chalk.cyan.bold('Optiq CLI') + chalk.gray(' - AI-powered code indexing\n'));
@@ -260,36 +259,35 @@ async function indexOnce(targetPath, config) {
260
259
  try {
261
260
  const files = await collectFiles(targetPath);
262
261
  spinner.text = `Reading ${files.length} files...`;
263
- const fileContents = {};
264
- let processed = 0;
265
- for (const file of files) {
266
- try {
262
+ // Read files in parallel (100 at a time)
263
+ const PARALLEL_READS = 100;
264
+ const filesArray = [];
265
+ for (let i = 0; i < files.length; i += PARALLEL_READS) {
266
+ const chunk = files.slice(i, i + PARALLEL_READS);
267
+ const results = await Promise.allSettled(chunk.map(async (file) => {
267
268
  const content = await fs.readFile(file, 'utf-8');
268
269
  const relativePath = path.relative(targetPath, file).replace(/\\/g, '/');
269
- fileContents[relativePath] = content;
270
- processed++;
271
- if (processed % 50 === 0) {
272
- spinner.text = `Reading files... ${processed}/${files.length}`;
270
+ return { path: relativePath, content };
271
+ }));
272
+ // Filter successful reads
273
+ for (const result of results) {
274
+ if (result.status === 'fulfilled') {
275
+ filesArray.push(result.value);
273
276
  }
274
277
  }
275
- catch (error) {
276
- // Skip unreadable files
277
- }
278
+ spinner.text = `Reading... ${Math.min(i + PARALLEL_READS, files.length)}/${files.length}`;
278
279
  }
279
- const filesArray = Object.entries(fileContents).map(([path, content]) => ({
280
- path,
281
- content,
282
- }));
283
- // Batch upload
284
- const BATCH_SIZE = 50;
280
+ // Upload in larger batches (150 files per batch)
281
+ const BATCH_SIZE = 150;
285
282
  let totalFiles = 0;
286
283
  let totalEntities = 0;
287
284
  let repoId = '';
285
+ spinner.text = 'Uploading...';
288
286
  for (let i = 0; i < filesArray.length; i += BATCH_SIZE) {
289
287
  const batch = filesArray.slice(i, i + BATCH_SIZE);
290
288
  const batchNum = Math.floor(i / BATCH_SIZE) + 1;
291
289
  const totalBatches = Math.ceil(filesArray.length / BATCH_SIZE);
292
- spinner.text = `Uploading... batch ${batchNum}/${totalBatches}`;
290
+ spinner.text = `Uploading... ${batchNum}/${totalBatches}`;
293
291
  const response = await axios.post(`${BACKEND_URL}/api/nexus/index/content`, {
294
292
  repository_path: targetPath,
295
293
  files: batch,
@@ -316,7 +314,7 @@ async function indexOnce(targetPath, config) {
316
314
  console.log(chalk.gray('\n Use this ID with the MCP server\n'));
317
315
  }
318
316
  catch (error) {
319
- spinner.fail(chalk.red('Failed'));
317
+ spinner.fail(chalk.red('Failed'));
320
318
  if (error.response?.data?.error) {
321
319
  console.log(chalk.gray(' ' + error.response.data.error));
322
320
  }
@@ -349,39 +347,35 @@ async function watchDirectory(targetPath, config) {
349
347
  }
350
348
  // Only do full index if repo doesn't exist
351
349
  if (!repoId) {
352
- const spinner = ora({ text: 'Collecting files...', color: 'white' }).start();
350
+ const spinner = ora({ text: 'Collecting files...', color: 'cyan' }).start();
353
351
  try {
354
352
  const files = await collectFiles(targetPath);
355
- spinner.text = `Indexing ${files.length} files...`;
356
- const fileContents = {};
357
- let processed = 0;
358
- for (const file of files) {
359
- try {
353
+ spinner.text = `Reading ${files.length} files...`;
354
+ // Read files in parallel (100 at a time)
355
+ const PARALLEL_READS = 100;
356
+ const filesArray = [];
357
+ for (let i = 0; i < files.length; i += PARALLEL_READS) {
358
+ const chunk = files.slice(i, i + PARALLEL_READS);
359
+ const results = await Promise.allSettled(chunk.map(async (file) => {
360
360
  const content = await fs.readFile(file, 'utf-8');
361
- const relativePath = path.relative(targetPath, file);
362
- fileContents[relativePath] = content;
363
- processed++;
364
- // Update progress
365
- const percent = Math.round((processed / files.length) * 100);
366
- spinner.text = `Indexing ${processed}/${files.length} files (${percent}%)`;
367
- }
368
- catch (error) {
369
- // Skip files that can't be read
361
+ const relativePath = path.relative(targetPath, file).replace(/\\/g, '/');
362
+ return { path: relativePath, content };
363
+ }));
364
+ for (const result of results) {
365
+ if (result.status === 'fulfilled') {
366
+ filesArray.push(result.value);
367
+ }
370
368
  }
369
+ spinner.text = `Reading... ${Math.min(i + PARALLEL_READS, files.length)}/${files.length}`;
371
370
  }
372
- spinner.text = 'Uploading to Optiq...';
373
- const filesArray = Object.entries(fileContents).map(([path, content]) => ({
374
- path,
375
- content,
376
- }));
377
- // Batch upload files (50 at a time to avoid 413 errors)
378
- const BATCH_SIZE = 50;
371
+ // Upload in larger batches
372
+ const BATCH_SIZE = 150;
379
373
  let uploadedCount = 0;
380
374
  for (let i = 0; i < filesArray.length; i += BATCH_SIZE) {
381
375
  const batch = filesArray.slice(i, i + BATCH_SIZE);
382
376
  const batchNum = Math.floor(i / BATCH_SIZE) + 1;
383
377
  const totalBatches = Math.ceil(filesArray.length / BATCH_SIZE);
384
- spinner.text = `Uploading batch ${batchNum}/${totalBatches} (${uploadedCount}/${filesArray.length} files)...`;
378
+ spinner.text = `Uploading... ${batchNum}/${totalBatches}`;
385
379
  const response = await axios.post(`${BACKEND_URL}/api/nexus/index/content`, {
386
380
  repository_path: targetPath,
387
381
  files: batch,
@@ -390,10 +384,10 @@ async function watchDirectory(targetPath, config) {
390
384
  'X-API-Key': config.apiKey,
391
385
  'Content-Type': 'application/json',
392
386
  },
393
- timeout: 0, // No timeout for large codebases
387
+ timeout: 0,
394
388
  });
395
389
  if (!response.data.success) {
396
- spinner.fail(chalk.gray('Initial indexing failed'));
390
+ spinner.fail(chalk.red('Failed'));
397
391
  console.log(chalk.gray(response.data.error || 'Unknown error'));
398
392
  return;
399
393
  }
@@ -446,42 +440,49 @@ async function watchDirectory(targetPath, config) {
446
440
  return `${minutes}m ${seconds % 60}s`;
447
441
  return `${seconds}s`;
448
442
  };
449
- const updateDashboard = () => {
443
+ let dashboardShown = false;
444
+ const showDashboard = () => {
450
445
  const uptime = formatUptime(Date.now() - sessionStartTime);
451
- const timeSinceLastIndex = Math.floor((Date.now() - lastIndexedTime) / 1000);
452
- const lines = [];
453
- lines.push(chalk.gray(''.repeat(50)));
454
- lines.push(`${chalk.cyan('👀 Watching')} ${chalk.gray('•')} ${chalk.white(uptime)} ${chalk.gray('uptime')}`);
455
- lines.push(`${chalk.gray(' Indexed:')} ${chalk.white(totalIndexed)} ${chalk.gray('changes')} ${chalk.gray('•')} ${chalk.white(allIndexedFiles.size)} ${chalk.gray('files')}`);
456
- if (lastIndexedFile) {
446
+ // Move cursor up 5 lines to overwrite previous dashboard (but not on first display)
447
+ if (dashboardShown) {
448
+ process.stdout.write('\x1b[5A'); // Move up 5 lines
449
+ process.stdout.write('\x1b[0J'); // Clear from cursor to end of screen
450
+ }
451
+ dashboardShown = true;
452
+ console.log(chalk.gray('─'.repeat(50)));
453
+ console.log(chalk.cyan('👀 Watching') + chalk.gray(` • ${uptime} uptime • Press Ctrl+C to stop`));
454
+ const details = [];
455
+ if (totalIndexed > 0 && lastIndexedFile) {
456
+ const timeSinceLastIndex = Math.floor((Date.now() - lastIndexedTime) / 1000);
457
457
  const timeStr = timeSinceLastIndex < 60 ? `${timeSinceLastIndex}s` : `${Math.floor(timeSinceLastIndex / 60)}m`;
458
- lines.push(`${chalk.gray(' Last:')} ${chalk.white(lastIndexedFile)} ${chalk.gray(`(${timeStr} ago)`)}`);
458
+ details.push(chalk.gray(` Indexed: ${totalIndexed} changes • ${allIndexedFiles.size} files • Last: ${lastIndexedFile} (${timeStr} ago)`));
459
459
  }
460
- lines.push(chalk.gray('─'.repeat(50)));
461
- lines.push('');
462
- logUpdate(lines.join('\n'));
460
+ else {
461
+ details.push(chalk.gray(` Ready • 0 changes • Waiting for file modifications...`));
462
+ }
463
+ console.log(details.join(''));
464
+ console.log(chalk.gray('─'.repeat(50)));
465
+ console.log('');
463
466
  };
464
- // Update dashboard every 5 seconds to show uptime
465
- setInterval(updateDashboard, 5000);
467
+ // Initial dashboard display
468
+ showDashboard();
469
+ // Update dashboard every 10 seconds to refresh uptime and "time ago"
470
+ setInterval(showDashboard, 10000);
466
471
  const processChanges = async () => {
467
472
  if (pendingChanges.size === 0 || isProcessing)
468
473
  return;
469
474
  isProcessing = true;
470
475
  const changes = Array.from(pendingChanges.entries());
471
476
  pendingChanges.clear();
472
- // Collect unique files first
473
- for (const [filePath] of changes) {
474
- const relativePath = path.relative(targetPath, filePath);
475
- allIndexedFiles.add(relativePath);
476
- }
477
477
  try {
478
478
  const filesArray = [];
479
479
  let hasChanges = false;
480
480
  for (const [filePath, changeType] of changes) {
481
- const relativePath = path.relative(targetPath, filePath);
481
+ const relativePath = path.relative(targetPath, filePath).replace(/\\/g, '/');
482
482
  if (changeType === 'unlink') {
483
483
  filesArray.push({ path: relativePath, content: null });
484
484
  fileContentCache.delete(relativePath);
485
+ allIndexedFiles.delete(relativePath);
485
486
  hasChanges = true;
486
487
  }
487
488
  else {
@@ -517,6 +518,12 @@ async function watchDirectory(targetPath, config) {
517
518
  timeout: 60000,
518
519
  });
519
520
  if (response.data.success) {
521
+ // Add successfully indexed files to the set
522
+ for (const file of filesArray) {
523
+ if (file.content !== null) {
524
+ allIndexedFiles.add(file.path);
525
+ }
526
+ }
520
527
  totalIndexed += filesArray.length;
521
528
  lastIndexedTime = Date.now();
522
529
  // Update last indexed file
@@ -527,18 +534,24 @@ async function watchDirectory(targetPath, config) {
527
534
  lastIndexedFile = `${filesArray.length} files`;
528
535
  }
529
536
  // Update dashboard
530
- updateDashboard();
537
+ showDashboard();
531
538
  }
532
539
  else {
533
- logUpdate.clear();
534
- console.log(chalk.red(`✗ Failed to index`));
535
- console.log(chalk.gray(` ${response.data.error}`));
540
+ // Clear dashboard, show error, redraw dashboard
541
+ process.stdout.write('\x1b[5A');
542
+ process.stdout.write('\x1b[0J');
543
+ dashboardShown = false;
544
+ console.log(chalk.red(`✗ Failed to index: ${response.data.error}\n`));
545
+ showDashboard();
536
546
  }
537
547
  }
538
548
  catch (error) {
539
- logUpdate.clear();
540
- console.log(chalk.red(`✗ Failed to index`));
541
- console.log(chalk.gray(` ${error.response?.data?.error || error.message}`));
549
+ // Clear dashboard, show error, redraw dashboard
550
+ process.stdout.write('\x1b[5A');
551
+ process.stdout.write('\x1b[0J');
552
+ dashboardShown = false;
553
+ console.log(chalk.red(`✗ Failed to index: ${error.response?.data?.error || error.message}\n`));
554
+ showDashboard();
542
555
  }
543
556
  isProcessing = false;
544
557
  // Check if there are more pending changes and process them
@@ -566,12 +579,18 @@ async function watchDirectory(targetPath, config) {
566
579
  scheduleProcess();
567
580
  })
568
581
  .on('error', (error) => {
569
- logUpdate.clear();
570
- console.log(chalk.red('✗ Watcher error:'), chalk.gray(error.message));
582
+ // Clear dashboard, show error, redraw dashboard
583
+ process.stdout.write('\x1b[5A');
584
+ process.stdout.write('\x1b[0J');
585
+ dashboardShown = false;
586
+ console.log(chalk.red(`✗ Watcher error: ${error.message}\n`));
587
+ showDashboard();
571
588
  });
572
589
  process.on('SIGINT', () => {
573
- logUpdate.clear();
574
- console.log(chalk.cyan('\n✓ Stopped watching'));
590
+ // Clear dashboard and show exit message
591
+ process.stdout.write('\x1b[5A');
592
+ process.stdout.write('\x1b[0J');
593
+ console.log(chalk.cyan('✓ Stopped watching\n'));
575
594
  watcher.close();
576
595
  process.exit(0);
577
596
  });
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@optiqcode/cli",
3
- "version": "1.7.0",
3
+ "version": "2.0.0",
4
4
  "description": "CLI tool for Optiq - automatic code indexing and context engine",
5
5
  "type": "module",
6
6
  "main": "dist/index.js",
@@ -42,7 +42,6 @@
42
42
  "axios": "^1.6.0",
43
43
  "chalk": "^5.3.0",
44
44
  "chokidar": "^3.5.3",
45
- "log-update": "^7.0.1",
46
45
  "ora": "^8.0.1",
47
46
  "prompts": "^2.4.2"
48
47
  },