@optiqcode/cli 2.1.2 → 2.1.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (2) hide show
  1. package/dist/index.js +31 -5
  2. package/package.json +1 -1
package/dist/index.js CHANGED
@@ -10,6 +10,8 @@ import { getConfig, saveConfig } from './utils/config.js';
10
10
  import { isValidDirectory, getGitIgnorePatterns, shouldIgnoreFile } from './utils/files.js';
11
11
  const BACKEND_URL = process.env.OPTIQ_BACKEND_URL || 'https://api.optiqcode.com';
12
12
  const ENGINE_URL = process.env.OPTIQ_ENGINE_URL || 'https://api.optiqcode.com';
13
+ // Debug mode - set OPTIQ_DEBUG=1 to see request details
14
+ const DEBUG = process.env.OPTIQ_DEBUG === '1';
13
15
  // Helper to generate repository ID from path
14
16
  function generateRepoId(targetPath) {
15
17
  // Use path basename + hash of full path for uniqueness
@@ -271,17 +273,24 @@ async function indexOnce(targetPath, config) {
271
273
  spinner.text = `Reading ${files.length} files...`;
272
274
  // Read files in parallel (100 at a time)
273
275
  const PARALLEL_READS = 100;
276
+ const MAX_FILE_SIZE = 100_000; // 100KB max per file
274
277
  const filesArray = [];
275
278
  for (let i = 0; i < files.length; i += PARALLEL_READS) {
276
279
  const chunk = files.slice(i, i + PARALLEL_READS);
277
280
  const results = await Promise.allSettled(chunk.map(async (file) => {
278
281
  const content = await fs.readFile(file, 'utf-8');
279
282
  const relativePath = path.relative(targetPath, file).replace(/\\/g, '/');
283
+ // Skip files that are too large
284
+ if (content.length > MAX_FILE_SIZE) {
285
+ if (DEBUG)
286
+ console.log(chalk.yellow(`[DEBUG] Skipping large file: ${relativePath} (${Math.round(content.length / 1024)}KB)`));
287
+ return null;
288
+ }
280
289
  return { path: relativePath, content };
281
290
  }));
282
- // Filter successful reads
291
+ // Filter successful reads (and non-null)
283
292
  for (const result of results) {
284
- if (result.status === 'fulfilled') {
293
+ if (result.status === 'fulfilled' && result.value) {
285
294
  filesArray.push(result.value);
286
295
  }
287
296
  }
@@ -290,14 +299,22 @@ async function indexOnce(targetPath, config) {
290
299
  // Generate repository ID from path
291
300
  const repoId = generateRepoId(targetPath);
292
301
  // Upload in small batches to avoid gateway timeouts
293
- const BATCH_SIZE = 20;
302
+ const BATCH_SIZE = 5;
294
303
  let totalChunks = 0;
304
+ if (DEBUG) {
305
+ console.log(chalk.gray(`\n[DEBUG] Sending to: ${ENGINE_URL}/api/v1/index`));
306
+ console.log(chalk.gray(`[DEBUG] Repo ID: ${repoId}`));
307
+ console.log(chalk.gray(`[DEBUG] Total files: ${filesArray.length}`));
308
+ }
295
309
  spinner.text = 'Indexing...';
296
310
  for (let i = 0; i < filesArray.length; i += BATCH_SIZE) {
297
311
  const batch = filesArray.slice(i, i + BATCH_SIZE);
298
312
  const batchNum = Math.floor(i / BATCH_SIZE) + 1;
299
313
  const totalBatches = Math.ceil(filesArray.length / BATCH_SIZE);
300
314
  spinner.text = `Indexing... ${batchNum}/${totalBatches}`;
315
+ if (DEBUG) {
316
+ console.log(chalk.gray(`\n[DEBUG] Batch ${batchNum}: ${batch.map(f => f.path).join(', ')}`));
317
+ }
301
318
  // Retry logic for transient failures
302
319
  let retries = 3;
303
320
  let lastError = null;
@@ -324,6 +341,12 @@ async function indexOnce(targetPath, config) {
324
341
  catch (err) {
325
342
  lastError = err;
326
343
  retries--;
344
+ if (DEBUG) {
345
+ console.log(chalk.red(`\n[DEBUG] Error: ${err.code || err.message}`));
346
+ if (err.response) {
347
+ console.log(chalk.red(`[DEBUG] Status: ${err.response.status}`));
348
+ }
349
+ }
327
350
  if (retries > 0) {
328
351
  spinner.text = `Retrying batch ${batchNum}... (${retries} attempts left)`;
329
352
  await new Promise(r => setTimeout(r, 2000)); // Wait 2s before retry
@@ -360,23 +383,26 @@ async function watchDirectory(targetPath, config) {
360
383
  spinner.text = `Reading ${files.length} files...`;
361
384
  // Read files in parallel (100 at a time)
362
385
  const PARALLEL_READS = 100;
386
+ const MAX_FILE_SIZE = 100_000; // 100KB max per file
363
387
  const filesArray = [];
364
388
  for (let i = 0; i < files.length; i += PARALLEL_READS) {
365
389
  const chunk = files.slice(i, i + PARALLEL_READS);
366
390
  const results = await Promise.allSettled(chunk.map(async (file) => {
367
391
  const content = await fs.readFile(file, 'utf-8');
368
392
  const relativePath = path.relative(targetPath, file).replace(/\\/g, '/');
393
+ if (content.length > MAX_FILE_SIZE)
394
+ return null;
369
395
  return { path: relativePath, content };
370
396
  }));
371
397
  for (const result of results) {
372
- if (result.status === 'fulfilled') {
398
+ if (result.status === 'fulfilled' && result.value) {
373
399
  filesArray.push(result.value);
374
400
  }
375
401
  }
376
402
  spinner.text = `Reading... ${Math.min(i + PARALLEL_READS, files.length)}/${files.length}`;
377
403
  }
378
404
  // Upload in small batches to avoid gateway timeouts
379
- const BATCH_SIZE = 20;
405
+ const BATCH_SIZE = 5;
380
406
  for (let i = 0; i < filesArray.length; i += BATCH_SIZE) {
381
407
  const batch = filesArray.slice(i, i + BATCH_SIZE);
382
408
  const batchNum = Math.floor(i / BATCH_SIZE) + 1;
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@optiqcode/cli",
3
- "version": "2.1.2",
3
+ "version": "2.1.4",
4
4
  "description": "CLI tool for Optiq - automatic code indexing and context engine",
5
5
  "type": "module",
6
6
  "main": "dist/index.js",