claude-evolve 1.8.10 → 1.8.12

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -226,39 +226,42 @@ class AutoStatus:
226
226
  try:
227
227
  data = self.get_status_data()
228
228
  except Exception as e:
229
- self.display.clear_screen()
230
229
  self.display.move_cursor(1, 1)
230
+ self.display.clear_line()
231
231
  print(f"Error reading status: {e}")
232
232
  return
233
-
234
- # Clear screen and start rendering
235
- self.display.clear_screen()
233
+
234
+ # Move to top and start rendering (no full clear to avoid flicker)
235
+ self.display.move_cursor(1, 1)
236
236
  row = 1
237
237
 
238
238
  # Header
239
239
  self.display.move_cursor(row, 1)
240
+ self.display.clear_line()
240
241
  header = "Claude Evolution Auto-Status"
241
- print(f"\033[1;36m{header.center(self.display.cols)}\033[0m")
242
+ print(f"\033[1;36m{header.center(self.display.cols)}\033[0m", end="")
242
243
  row += 1
243
-
244
+
244
245
  # Timestamp and working dir
245
246
  self.display.move_cursor(row, 1)
247
+ self.display.clear_line()
246
248
  timestamp = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
247
249
  working_dir = os.path.basename(data["working_dir"])
248
- print(f"Last updated: {timestamp} | Working dir: {working_dir} | Press '\''q'\'' to quit")
250
+ print(f"Last updated: {timestamp} | Working dir: {working_dir} | Press '\''q'\'' to quit", end="")
249
251
  row += 2
250
-
252
+
251
253
  # Leader
252
254
  self.display.move_cursor(row, 1)
255
+ self.display.clear_line()
253
256
  if data["leader"]:
254
257
  leader_id, leader_desc, leader_score = data["leader"]
255
258
  # Truncate description for leader
256
259
  max_desc_len = self.display.cols - 30
257
260
  if len(leader_desc) > max_desc_len:
258
261
  leader_desc = leader_desc[:max_desc_len-3] + "..."
259
- print(f"\033[1;32mLeader:\033[0m {leader_id} | {leader_score:.4f} | {leader_desc}")
262
+ print(f"\033[1;32mLeader:\033[0m {leader_id} | {leader_score:.4f} | {leader_desc}", end="")
260
263
  else:
261
- print("\033[1;32mLeader:\033[0m None (no completed candidates)")
264
+ print("\033[1;32mLeader:\033[0m None (no completed candidates)", end="")
262
265
  row += 2
263
266
 
264
267
  # Generation table
@@ -266,14 +269,16 @@ class AutoStatus:
266
269
  if generations:
267
270
  # Table header
268
271
  self.display.move_cursor(row, 1)
272
+ self.display.clear_line()
269
273
  header_fmt = "{:<10} | {:^25} | {:>10} | {:>8} | {}".format(
270
274
  "Generation", "Stats (p/c/f/r/s)", "Top ID", "Score", "Description"
271
275
  )
272
- print("\033[1m" + header_fmt[:self.display.cols] + "\033[0m")
276
+ print("\033[1m" + header_fmt[:self.display.cols] + "\033[0m", end="")
273
277
  row += 1
274
-
278
+
275
279
  self.display.move_cursor(row, 1)
276
- print("-" * min(self.display.cols, len(header_fmt)))
280
+ self.display.clear_line()
281
+ print("-" * min(self.display.cols, len(header_fmt)), end="")
277
282
  row += 1
278
283
  # Sort generations numerically by extracting the number after "gen"
279
284
  # Put baseline first, then sort numerically
@@ -299,21 +304,22 @@ class AutoStatus:
299
304
  for gen in sorted_gens[start_idx:]:
300
305
  if row >= self.display.rows - 1:
301
306
  break
302
-
307
+
303
308
  gen_data = generations[gen]
304
309
  stats_str = f"{gen_data['\''pending'\'']}/{gen_data['\''complete'\'']}/{gen_data['\''failed'\'']}/{gen_data['\''running'\'']}"
305
310
  if gen_data['\''skipped'\''] > 0:
306
311
  stats_str += f"/{gen_data['\''skipped'\'']}"
307
-
312
+
308
313
  self.display.move_cursor(row, 1)
309
-
314
+ self.display.clear_line()
315
+
310
316
  if gen_data["best"]:
311
317
  best_id, best_desc, best_score = gen_data["best"]
312
318
  # Truncate description
313
319
  max_desc_len = self.display.cols - 55
314
320
  if len(best_desc) > max_desc_len:
315
321
  best_desc = best_desc[:max_desc_len-3] + "..."
316
-
322
+
317
323
  # Highlight if this is the overall leader
318
324
  if data["leader"] and best_id == data["leader"][0]:
319
325
  line = "{:<10} | {:^25} | \033[32m{:>10}\033[0m | {:>8.4f} | {}".format(
@@ -327,10 +333,16 @@ class AutoStatus:
327
333
  line = "{:<10} | {:^25} | {:>10} | {:>8} | {}".format(
328
334
  gen, stats_str, "-", "-", "No completed candidates"
329
335
  )
330
-
331
- print(line[:self.display.cols])
336
+
337
+ print(line[:self.display.cols], end="")
332
338
  row += 1
333
-
339
+
340
+ # Clear any remaining lines from previous render
341
+ while row < self.display.rows:
342
+ self.display.move_cursor(row, 1)
343
+ self.display.clear_line()
344
+ row += 1
345
+
334
346
  # Ensure cursor is at bottom
335
347
  self.display.move_cursor(self.display.rows, 1)
336
348
  sys.stdout.flush()
@@ -366,8 +378,8 @@ class AutoStatus:
366
378
  sys.stdout.flush()
367
379
  time.sleep(2) # Give time to read error
368
380
 
369
- # Check for input and wait
370
- for _ in range(10): # Check 10 times per second
381
+ # Check for input and wait (update every 5 seconds)
382
+ for _ in range(50): # Check 50 times over 5 seconds
371
383
  if self.check_input():
372
384
  break
373
385
  time.sleep(0.1)
@@ -362,6 +362,12 @@ print(max_id + 1)
362
362
  "
363
363
  }
364
364
 
365
+ # AIDEV-NOTE: This function had a critical race condition bug that caused wrong rows to be updated
366
+ # The bug occurred when parallel processes modified the main CSV between temp CSV creation and append.
367
+ # FIX: Now requires original_main_csv_lines parameter (6th arg) to track the exact line count at copy time.
368
+ # This ensures we always append the correct new rows from temp CSV, regardless of concurrent modifications.
369
+ # Without this fix, the system would update wrong IDs (e.g., claim to add gen81 but update gen80 instead).
370
+ #
365
371
  # Validate that AI directly modified the CSV file
366
372
  validate_direct_csv_modification() {
367
373
  local temp_csv="$1"
@@ -369,6 +375,7 @@ validate_direct_csv_modification() {
369
375
  local idea_type="$3"
370
376
  local ai_model="${4:-}" # AI model that generated the ideas
371
377
  local expected_ids="${5:-}" # Optional: comma or space separated list of expected IDs
378
+ local original_main_csv_lines="${6:-}" # CRITICAL: Line count of main CSV when temp CSV was created
372
379
 
373
380
  # Check if the file was actually modified
374
381
  if [[ ! -f "$temp_csv" ]]; then
@@ -376,32 +383,37 @@ validate_direct_csv_modification() {
376
383
  return 1
377
384
  fi
378
385
 
379
- # Get the count before modification from the temp CSV (which was copied from original before AI ran)
380
- # We need to track this before the AI runs by reading from the beginning state
381
- # First, get a fresh count from the current main CSV (which reflects any previous operations in this session)
382
- local current_original_count
383
- current_original_count=$(grep -v '^[[:space:]]*$' "$FULL_CSV_PATH" | tail -n +2 | wc -l | tr -d '[:space:]')
384
-
385
386
  # Count data rows in the modified temp CSV
386
387
  local new_count
387
388
  new_count=$(grep -v '^[[:space:]]*$' "$temp_csv" | tail -n +2 | wc -l | tr -d '[:space:]')
388
389
 
390
+ # If original line count wasn't provided, fall back to current main CSV count (old behavior)
391
+ # This preserves backward compatibility but may have race conditions
392
+ if [[ -z "$original_main_csv_lines" ]]; then
393
+ echo "[WARN] No original line count provided - using current main CSV count (may cause race conditions)" >&2
394
+ original_main_csv_lines=$(wc -l < "$FULL_CSV_PATH" | tr -d '[:space:]')
395
+ fi
396
+
397
+ # Calculate how many data rows the temp CSV started with (before stubs were added)
398
+ # This should match the original main CSV line count (including header)
399
+ local original_data_rows=$((original_main_csv_lines - 1)) # Subtract header
400
+
401
+ # Calculate how many rows were actually added to temp CSV
402
+ local added_count=$((new_count - original_data_rows))
389
403
 
390
404
  # Check if AI overwrote the file instead of appending
391
- if [[ $new_count -lt $current_original_count ]]; then
392
- echo "[ERROR] AI overwrote the CSV file instead of appending ($new_count < $current_original_count)" >&2
405
+ if [[ $new_count -lt $original_data_rows ]]; then
406
+ echo "[ERROR] AI overwrote the CSV file instead of appending ($new_count < $original_data_rows)" >&2
393
407
  head -10 "$temp_csv" >&2
394
408
  return 1
395
409
  fi
396
410
 
397
411
  # Check if no changes were made
398
- if [[ $new_count -eq $current_original_count ]]; then
399
- echo "[ERROR] CSV file wasn't modified - same number of data rows ($new_count = $current_original_count)" >&2
412
+ if [[ $new_count -eq $original_data_rows ]]; then
413
+ echo "[ERROR] CSV file wasn't modified - same number of data rows ($new_count = $original_data_rows)" >&2
400
414
  head -10 "$temp_csv" >&2
401
415
  return 1
402
416
  fi
403
-
404
- local added_count=$((new_count - current_original_count))
405
417
  if [[ $added_count -ne $expected_count ]]; then
406
418
  echo "[ERROR] Expected to add $expected_count ideas but only added $added_count" >&2
407
419
  echo "[ERROR] Ideation failed - rejecting partial results to prevent endless loops" >&2
@@ -434,43 +446,47 @@ validate_direct_csv_modification() {
434
446
 
435
447
  # Use proper locking to safely update the CSV
436
448
  echo "[INFO] Acquiring CSV lock to apply changes..."
437
-
449
+
438
450
  # Set the lockfile path
439
451
  CSV_LOCKFILE="$FULL_EVOLUTION_DIR/.evolution.csv.lock"
440
-
452
+
441
453
  if ! acquire_csv_lock; then
442
454
  echo "[ERROR] Failed to acquire CSV lock for update" >&2
443
455
  rm -f "$temp_csv"
444
456
  return 1
445
457
  fi
446
-
447
- # Get just the new entries (skip header and existing entries)
448
- local original_line_count=$(wc -l < "$FULL_CSV_PATH" | tr -d '[:space:]')
449
-
450
- # Append only the new lines from temp CSV to the main CSV
451
- tail -n +$((original_line_count + 1)) "$temp_csv" >> "$FULL_CSV_PATH"
452
-
458
+
459
+ # CRITICAL FIX: Use the original line count (when temp CSV was created) to determine which lines to append
460
+ # This prevents race conditions where other processes modify the main CSV between temp CSV creation and append
461
+ # Append only the NEW lines from temp CSV (those added after the original content)
462
+ echo "[DEBUG] Appending last $added_count rows from temp CSV (from line $((original_main_csv_lines + 1)) onwards)" >&2
463
+ tail -n +$((original_main_csv_lines + 1)) "$temp_csv" >> "$FULL_CSV_PATH"
464
+
465
+ # Get the IDs that were actually added by reading them from temp CSV (not main CSV)
466
+ # This avoids race conditions where other processes add rows to main CSV
467
+ local new_ids
468
+ new_ids=$(tail -n $added_count "$temp_csv" | grep -v "^id," | cut -d',' -f1 | tr -d '"')
469
+ echo "[DEBUG] IDs being added: $new_ids" >&2
470
+
453
471
  # Clean up temp file
454
472
  rm -f "$temp_csv"
455
-
473
+
456
474
  # Update idea-LLM field for newly added rows if model is known
457
475
  if [[ -n "$ai_model" ]]; then
458
476
  echo "[INFO] Recording that $ai_model generated the ideas" >&2
459
- # Get the IDs of the newly added rows (skip header line and strip quotes)
460
- local new_ids
461
- new_ids=$(tail -n $added_count "$FULL_CSV_PATH" | grep -v "^id," | cut -d',' -f1 | tr -d '"')
462
-
477
+
463
478
  # Update each new row with the model that generated it
464
479
  for id in $new_ids; do
465
480
  if [[ -n "$id" && "$id" != "id" ]]; then
481
+ echo "[DEBUG] Updating field for $id" >&2
466
482
  "$PYTHON_CMD" "$SCRIPT_DIR/../lib/evolution_csv.py" "$FULL_CSV_PATH" field "$id" "idea-LLM" "$ai_model" || echo "[WARN] Failed to update $id" >&2
467
483
  fi
468
484
  done
469
485
  fi
470
-
486
+
471
487
  # Release the lock
472
488
  release_csv_lock
473
-
489
+
474
490
  echo "[INFO] Successfully added $added_count $idea_type ideas to CSV"
475
491
  return 0
476
492
  }
@@ -1001,6 +1017,12 @@ generate_novel_ideas_direct() {
1001
1017
  local temp_csv="$FULL_EVOLUTION_DIR/temp-csv-$$.csv"
1002
1018
  cp "$FULL_CSV_PATH" "$temp_csv"
1003
1019
 
1020
+ # CRITICAL: Capture the original line count immediately after copying
1021
+ # This is needed to correctly append rows later, preventing race conditions
1022
+ local original_csv_lines
1023
+ original_csv_lines=$(wc -l < "$temp_csv" | tr -d '[:space:]')
1024
+ echo "[DEBUG] Original CSV has $original_csv_lines lines (including header)" >&2
1025
+
1004
1026
  # Pre-populate the CSV with stub rows containing the correct IDs
1005
1027
  # This ensures the AI can't possibly use wrong IDs - it just fills in descriptions
1006
1028
  echo "[INFO] Pre-populating CSV with stub rows: $required_ids_str"
@@ -1098,10 +1120,10 @@ CRITICAL: Do NOT use any git commands (git add, git commit, git reset, etc.). On
1098
1120
 
1099
1121
  # Restore working directory
1100
1122
  cd "$original_pwd"
1101
-
1102
1123
 
1103
1124
  # Validate that the CSV file was actually modified with correct IDs
1104
- if ! validate_direct_csv_modification "$temp_csv" "$count" "novel" "$ai_response" "$required_ids_str"; then
1125
+ # Pass original_csv_lines to prevent race conditions
1126
+ if ! validate_direct_csv_modification "$temp_csv" "$count" "novel" "$ai_response" "$required_ids_str" "$original_csv_lines"; then
1105
1127
  rm -f "$temp_csv"
1106
1128
  return 1
1107
1129
  fi
@@ -1135,6 +1157,11 @@ generate_hill_climbing_direct() {
1135
1157
  local temp_csv="$FULL_EVOLUTION_DIR/temp-csv-$$.csv"
1136
1158
  cp "$FULL_CSV_PATH" "$temp_csv"
1137
1159
 
1160
+ # CRITICAL: Capture the original line count immediately after copying
1161
+ local original_csv_lines
1162
+ original_csv_lines=$(wc -l < "$temp_csv" | tr -d '[:space:]')
1163
+ echo "[DEBUG] Original CSV has $original_csv_lines lines (including header)" >&2
1164
+
1138
1165
  # Extract just the IDs from top performers for clarity (needed before pre-populating)
1139
1166
  local valid_parent_ids
1140
1167
  valid_parent_ids=$(echo "$top_performers" | cut -d',' -f1 | paste -sd ',' -)
@@ -1230,10 +1257,10 @@ CRITICAL INSTRUCTIONS:
1230
1257
 
1231
1258
  # Restore working directory
1232
1259
  cd "$original_pwd"
1233
-
1234
1260
 
1235
1261
  # Validate that the CSV file was actually modified with correct IDs
1236
- if ! validate_direct_csv_modification "$temp_csv" "$count" "hill-climbing" "$ai_response" "$required_ids_str"; then
1262
+ # Pass original_csv_lines to prevent race conditions
1263
+ if ! validate_direct_csv_modification "$temp_csv" "$count" "hill-climbing" "$ai_response" "$required_ids_str" "$original_csv_lines"; then
1237
1264
  rm -f "$temp_csv"
1238
1265
  return 1
1239
1266
  fi
@@ -1267,6 +1294,11 @@ generate_structural_mutation_direct() {
1267
1294
  local temp_csv="$FULL_EVOLUTION_DIR/temp-csv-$$.csv"
1268
1295
  cp "$FULL_CSV_PATH" "$temp_csv"
1269
1296
 
1297
+ # CRITICAL: Capture the original line count immediately after copying
1298
+ local original_csv_lines
1299
+ original_csv_lines=$(wc -l < "$temp_csv" | tr -d '[:space:]')
1300
+ echo "[DEBUG] Original CSV has $original_csv_lines lines (including header)" >&2
1301
+
1270
1302
  # Extract just the IDs from top performers for clarity (needed before pre-populating)
1271
1303
  local valid_parent_ids
1272
1304
  valid_parent_ids=$(echo "$top_performers" | cut -d',' -f1 | paste -sd ',' -)
@@ -1352,10 +1384,10 @@ CRITICAL INSTRUCTIONS:
1352
1384
 
1353
1385
  # Restore working directory
1354
1386
  cd "$original_pwd"
1355
-
1356
1387
 
1357
1388
  # Validate that the CSV file was actually modified with correct IDs
1358
- if ! validate_direct_csv_modification "$temp_csv" "$count" "structural" "$ai_response" "$required_ids_str"; then
1389
+ # Pass original_csv_lines to prevent race conditions
1390
+ if ! validate_direct_csv_modification "$temp_csv" "$count" "structural" "$ai_response" "$required_ids_str" "$original_csv_lines"; then
1359
1391
  rm -f "$temp_csv"
1360
1392
  return 1
1361
1393
  fi
@@ -1389,6 +1421,11 @@ generate_crossover_direct() {
1389
1421
  local temp_csv="$FULL_EVOLUTION_DIR/temp-csv-$$.csv"
1390
1422
  cp "$FULL_CSV_PATH" "$temp_csv"
1391
1423
 
1424
+ # CRITICAL: Capture the original line count immediately after copying
1425
+ local original_csv_lines
1426
+ original_csv_lines=$(wc -l < "$temp_csv" | tr -d '[:space:]')
1427
+ echo "[DEBUG] Original CSV has $original_csv_lines lines (including header)" >&2
1428
+
1392
1429
  # Extract just the IDs from top performers for clarity (needed before pre-populating)
1393
1430
  local valid_parent_ids
1394
1431
  valid_parent_ids=$(echo "$top_performers" | cut -d',' -f1 | paste -sd ',' -)
@@ -1474,10 +1511,10 @@ CRITICAL INSTRUCTIONS:
1474
1511
 
1475
1512
  # Restore working directory
1476
1513
  cd "$original_pwd"
1477
-
1478
1514
 
1479
1515
  # Validate that the CSV file was actually modified with correct IDs
1480
- if ! validate_direct_csv_modification "$temp_csv" "$count" "crossover" "$ai_response" "$required_ids_str"; then
1516
+ # Pass original_csv_lines to prevent race conditions
1517
+ if ! validate_direct_csv_modification "$temp_csv" "$count" "crossover" "$ai_response" "$required_ids_str" "$original_csv_lines"; then
1481
1518
  rm -f "$temp_csv"
1482
1519
  return 1
1483
1520
  fi
@@ -1496,7 +1533,12 @@ ideate_ai_legacy() {
1496
1533
  # Create temporary CSV copy in evolution directory (so AI can access it)
1497
1534
  local temp_csv="$FULL_EVOLUTION_DIR/temp-csv-$$.csv"
1498
1535
  cp "$FULL_CSV_PATH" "$temp_csv"
1499
-
1536
+
1537
+ # CRITICAL: Capture the original line count immediately after copying
1538
+ local original_csv_lines
1539
+ original_csv_lines=$(wc -l < "$temp_csv" | tr -d '[:space:]')
1540
+ echo "[DEBUG] Original CSV has $original_csv_lines lines (including header)" >&2
1541
+
1500
1542
  echo "[INFO] Generating $TOTAL_IDEAS ideas (legacy mode)..."
1501
1543
 
1502
1544
  # Get top performers for context
@@ -1580,14 +1622,14 @@ CRITICAL: Do NOT use any git commands (git add, git commit, git reset, etc.). On
1580
1622
 
1581
1623
  # Restore working directory
1582
1624
  cd "$original_pwd"
1583
-
1584
-
1625
+
1585
1626
  # Validate that the CSV file was actually modified
1586
- if ! validate_direct_csv_modification "$temp_csv" "$TOTAL_IDEAS" "mixed" "$ai_response"; then
1627
+ # Pass original_csv_lines to prevent race conditions
1628
+ if ! validate_direct_csv_modification "$temp_csv" "$TOTAL_IDEAS" "mixed" "$ai_response" "" "$original_csv_lines"; then
1587
1629
  rm -f "$temp_csv"
1588
1630
  return 1
1589
1631
  fi
1590
-
1632
+
1591
1633
  echo "[INFO] Legacy ideas generated"
1592
1634
  return 0
1593
1635
  }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "claude-evolve",
3
- "version": "1.8.10",
3
+ "version": "1.8.12",
4
4
  "bin": {
5
5
  "claude-evolve": "./bin/claude-evolve",
6
6
  "claude-evolve-main": "./bin/claude-evolve-main",