@codemoot/core 0.2.3 → 0.2.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.d.ts CHANGED
@@ -2669,6 +2669,6 @@ declare const DEFAULT_DLP_CONFIG: DlpConfig;
2669
2669
  */
2670
2670
  declare function sanitize(input: string, config?: Partial<DlpConfig>): DlpResult;
2671
2671
 
2672
- declare const VERSION = "0.2.3";
2672
+ declare const VERSION = "0.2.4";
2673
2673
 
2674
2674
  export { type ArtifactRecord, ArtifactStore, type ArtifactType, type AssembledContext, type AttemptResult, BINARY_SNIFF_BYTES, type BridgeCapabilities, type BridgeOptions, type BridgeResumeOptions, type BudgetConfig, type BuildPhase, type BuildRun, BuildStore, type BuildSummary, type BuiltInRole, CLEANUP_TIMEOUT_SEC, CONTEXT_ACTIVE, CONTEXT_BUFFER, CONTEXT_RETRIEVED, CURRENT_VERSION, type CacheEntry, CacheStore, type CallModelOptions, CancellationError, CancellationToken, type ChatMessage, type CleanupConfidence, type CleanupFinding, type CleanupReport, type CleanupScope, type CleanupSource, CliAdapter, type CliAdapterConfig, type CliBridge, type CliCallOptions, type CliDetectionResult, ConfigError, type ContextBudget, ContextBuilder, type ContextBuilderOptions, type CostInput, type CostLogEntry, CostStore, type CostSummary, CostTracker, type CostUpdateEvent, DAYS_PER_YEAR, DEFAULT_CONFIG, DEFAULT_DLP_CONFIG, DEFAULT_MAX_TOKENS, DEFAULT_RULES, DEFAULT_TIMEOUT_SEC, DLP_MAX_CONTENT, DLP_MAX_PROCESSING_MS, DatabaseError, type DebateConfig, type DebateEngineState, type DebateInput, type DebateMessageRow, type DebateOptions, type DebatePattern, type DebateResponse, type DebateResult, DebateStore, type DebateTurnRow, type DebateTurnStatus, type DlpAuditEntry, type DlpConfig, type DlpMode, DlpReasonCode, type DlpRedaction, type DlpResult, type EngineEvent, type EnqueueOptions, ErrorCode, EventBus, type ExecutionMode, type FallbackConfig, HTTP_TOO_MANY_REQUESTS, type HandoffCommand, type HandoffEnvelopeOptions, type HostFindingInput, IMPLEMENTER_MAX_TOKENS, type JobLogRecord, type JobRecord, type JobStatus, JobStore, type JobType, type LogLevel, type Logger, type LoopConfig, LoopController, type LoopIterationEvent, type LoopResult, MCP_CONTENT_MAX_LENGTH, MCP_TASK_MAX_LENGTH, MCP_TIMEOUT_MAX, type MemoryCategory, type MemoryConfig, type MemoryInput, type MemoryRecord, MemoryStore, type MessageStatus, MessageStore, type MeteringSource, type ModelAdapter, type ModelCallResult, type ModelConfig, ModelError, type ModelPricing, type ModelProvider, ModelRegistry, Orchestrator, type OrchestratorOptions, type OutputConfig, type ParsedVerdict, type PlanInput, type PlanOptions, type PolicyContext, type PolicyDecision, type PolicyMode, type PolicyResult, type PolicyRule, type PresetName, type ProgressCallbacks, type ProjectConfig, type ProjectConfigInput, type PromptType, type PromptVariables, REVIEW_DIFF_MAX_CHARS, REVIEW_PRESETS, REVIEW_TEXT_MAX_CHARS, type ResolvedStep, type ResolvedWorkflow, type ResultStatus, type RetryConfig, type RetryOptions, type ReviewInput, type ReviewOptions, type ReviewPreset, type ReviewResult, type Role, type RoleConfig, RoleManager, type RunOptions, type Session, type SessionCompletedEvent, type SessionEvent, type SessionFailedEvent, SessionManager, type SessionOverflowStatus, type SessionResult, type SessionStartedEvent, type SessionStatus, SessionStore, type StepCompletedEvent, type StepDefinition, type StepFailedEvent, type StepResult, StepRunner, type StepStartedEvent, type StepType, TerminalReason, type TextDeltaEmitter, type TextDeltaEvent, type TextDoneEvent, type TokenUsage, type TranscriptEntry, type UnifiedSession, VERSION, type VerdictResult, type WorkflowDefinition, WorkflowEngine, WorkflowError, buildHandoffEnvelope, buildReconstructionPrompt, calculateCost, calculateDebateTokens, callModel, clearDetectionCache, computeThreeWayStats, computeTwoWayStats, costInputSchema, createIgnoreFilter, createLogger, debateInputSchema, debateOutputSchema, detectCli, estimateTokens, evaluatePolicy, generateId, generateSessionId, getModelPricing, getReviewPreset, getSchemaVersion, getTokenBudgetStatus, hashConfig, hashContent, hostFindingsSchema, isRateLimit, isRetryable, listPresetNames, listPresets, loadConfig, loadIgnorePatterns, loadPreset, memoryInputSchema, mergeThreeWay, mergeTwoWay, migrateConfig, openDatabase, parseDebateVerdict, parseVerdict, planInputSchema, preflightTokenCheck, projectConfigSchema, recalculateConfidenceStats, renderPrompt, reviewInputSchema, reviewOutputSchema, runAllScanners, runMigrations, sanitize, scanAntiPatterns, scanDeadCode, scanDuplicates, scanHardcoded, scanNearDuplicates, scanSecurity, scanUnusedDeps, scanUnusedExports, shouldIgnore, sleep, streamModel, validateConfig, withCanonicalRetry, withFallback, withRetry, writeConfig };
package/dist/index.js CHANGED
@@ -431,36 +431,54 @@ function validateConfig(config) {
431
431
  }
432
432
 
433
433
  // src/config/presets.ts
434
- import { readFileSync } from "fs";
435
- import { dirname, join } from "path";
436
- import { fileURLToPath } from "url";
437
- import { parse as parseYaml } from "yaml";
438
- var PRESETS_DIR = join(dirname(fileURLToPath(import.meta.url)), "..", "..", "..", "presets");
434
+ var PRESET_CONFIGS = {
435
+ "cli-first": {
436
+ models: {
437
+ "codex-architect": {
438
+ provider: "openai",
439
+ model: "gpt-5.3-codex",
440
+ maxTokens: 4096,
441
+ temperature: 0.7,
442
+ timeout: 120
443
+ },
444
+ "codex-reviewer": {
445
+ provider: "openai",
446
+ model: "gpt-5.3-codex",
447
+ maxTokens: 4096,
448
+ temperature: 0.3,
449
+ timeout: 120
450
+ }
451
+ },
452
+ roles: {
453
+ architect: {
454
+ model: "codex-architect",
455
+ temperature: 0.7,
456
+ maxTokens: 4096
457
+ },
458
+ reviewer: {
459
+ model: "codex-reviewer",
460
+ temperature: 0.3,
461
+ maxTokens: 4096
462
+ }
463
+ }
464
+ }
465
+ };
439
466
  var VALID_PRESETS = ["cli-first"];
440
467
  function loadPreset(name) {
441
468
  if (!VALID_PRESETS.includes(name)) {
442
469
  console.error(`Warning: Unknown preset "${name}", falling back to "cli-first".`);
443
470
  name = "cli-first";
444
471
  }
445
- const filePath = join(PRESETS_DIR, `${name}.yml`);
446
- try {
447
- const content = readFileSync(filePath, "utf-8");
448
- return parseYaml(content);
449
- } catch (err) {
450
- throw new ConfigError(
451
- `Failed to load preset "${name}": ${err instanceof Error ? err.message : String(err)}`,
452
- "preset"
453
- );
454
- }
472
+ return structuredClone(PRESET_CONFIGS[name]);
455
473
  }
456
474
  function listPresets() {
457
475
  return [...VALID_PRESETS];
458
476
  }
459
477
 
460
478
  // src/config/loader.ts
461
- import { appendFileSync, existsSync, mkdirSync, readFileSync as readFileSync2, writeFileSync } from "fs";
462
- import { join as join2 } from "path";
463
- import { parse as parseYaml2, stringify as stringifyYaml } from "yaml";
479
+ import { appendFileSync, existsSync, mkdirSync, readFileSync, writeFileSync } from "fs";
480
+ import { join } from "path";
481
+ import { parse as parseYaml, stringify as stringifyYaml } from "yaml";
464
482
  var CONFIG_FILENAME = ".cowork.yml";
465
483
  function deepMerge(target, source) {
466
484
  const result = { ...target };
@@ -482,11 +500,11 @@ function loadConfig(options) {
482
500
  const presetConfig = loadPreset(options.preset);
483
501
  merged = deepMerge(merged, presetConfig);
484
502
  }
485
- const configPath = join2(projectDir, CONFIG_FILENAME);
503
+ const configPath = join(projectDir, CONFIG_FILENAME);
486
504
  if (!options?.skipFile && existsSync(configPath)) {
487
505
  try {
488
- const content = readFileSync2(configPath, "utf-8");
489
- const fileConfig = parseYaml2(content);
506
+ const content = readFileSync(configPath, "utf-8");
507
+ const fileConfig = parseYaml(content);
490
508
  if (fileConfig && typeof fileConfig === "object") {
491
509
  merged = deepMerge(merged, fileConfig);
492
510
  }
@@ -502,14 +520,14 @@ function loadConfig(options) {
502
520
  return validateConfig(merged);
503
521
  }
504
522
  function writeConfig(config, dir) {
505
- const configPath = join2(dir, CONFIG_FILENAME);
523
+ const configPath = join(dir, CONFIG_FILENAME);
506
524
  const yamlContent = stringifyYaml(config, { lineWidth: 100 });
507
525
  writeFileSync(configPath, yamlContent, "utf-8");
508
- mkdirSync(join2(dir, ".cowork", "db"), { recursive: true });
509
- mkdirSync(join2(dir, ".cowork", "transcripts"), { recursive: true });
510
- const gitignorePath = join2(dir, ".gitignore");
526
+ mkdirSync(join(dir, ".cowork", "db"), { recursive: true });
527
+ mkdirSync(join(dir, ".cowork", "transcripts"), { recursive: true });
528
+ const gitignorePath = join(dir, ".gitignore");
511
529
  if (existsSync(gitignorePath)) {
512
- const content = readFileSync2(gitignorePath, "utf-8");
530
+ const content = readFileSync(gitignorePath, "utf-8");
513
531
  if (!content.includes(".cowork/")) {
514
532
  appendFileSync(gitignorePath, "\n.cowork/\n");
515
533
  }
@@ -636,8 +654,8 @@ function listPresetNames() {
636
654
  }
637
655
 
638
656
  // src/config/ignore.ts
639
- import { existsSync as existsSync2, readFileSync as readFileSync3 } from "fs";
640
- import { join as join3 } from "path";
657
+ import { existsSync as existsSync2, readFileSync as readFileSync2 } from "fs";
658
+ import { join as join2 } from "path";
641
659
  import ignore from "ignore";
642
660
  var BUILTIN_IGNORES = [
643
661
  "node_modules",
@@ -665,24 +683,24 @@ function createIgnoreFilter(projectDir, options) {
665
683
  const ig = ignore();
666
684
  ig.add(BUILTIN_IGNORES);
667
685
  if (!options?.skipGitignore) {
668
- const gitignorePath = join3(projectDir, ".gitignore");
686
+ const gitignorePath = join2(projectDir, ".gitignore");
669
687
  if (existsSync2(gitignorePath)) {
670
- const content = readFileSync3(gitignorePath, "utf-8");
688
+ const content = readFileSync2(gitignorePath, "utf-8");
671
689
  ig.add(content);
672
690
  }
673
691
  }
674
- const codemootIgnorePath = join3(projectDir, ".codemootignore");
692
+ const codemootIgnorePath = join2(projectDir, ".codemootignore");
675
693
  if (existsSync2(codemootIgnorePath)) {
676
- const content = readFileSync3(codemootIgnorePath, "utf-8");
694
+ const content = readFileSync2(codemootIgnorePath, "utf-8");
677
695
  ig.add(content);
678
696
  }
679
697
  return ig;
680
698
  }
681
699
  function loadIgnorePatterns(projectDir) {
682
700
  const patterns = [...BUILTIN_IGNORES];
683
- const ignorePath = join3(projectDir, ".codemootignore");
701
+ const ignorePath = join2(projectDir, ".codemootignore");
684
702
  if (existsSync2(ignorePath)) {
685
- const content = readFileSync3(ignorePath, "utf-8");
703
+ const content = readFileSync2(ignorePath, "utf-8");
686
704
  for (const line of content.split("\n")) {
687
705
  const trimmed = line.trim();
688
706
  if (trimmed && !trimmed.startsWith("#")) {
@@ -2659,7 +2677,7 @@ import { spawn } from "child_process";
2659
2677
  import { randomUUID } from "crypto";
2660
2678
  import { readFile, unlink } from "fs/promises";
2661
2679
  import { tmpdir } from "os";
2662
- import { join as join4 } from "path";
2680
+ import { join as join3 } from "path";
2663
2681
  var MAX_OUTPUT_BYTES = 512 * 1024;
2664
2682
  var TRUNCATION_MARKER = "\n[TRUNCATED: output exceeded 512KB]";
2665
2683
  var BASE_ENV_ALLOWLIST = [
@@ -2736,7 +2754,7 @@ var CliAdapter = class {
2736
2754
  ...options?.envAllowlist ?? []
2737
2755
  ];
2738
2756
  const env = buildFilteredEnv(allowlist);
2739
- const tmpFile = join4(tmpdir(), `codemoot-cli-${randomUUID()}.txt`);
2757
+ const tmpFile = join3(tmpdir(), `codemoot-cli-${randomUUID()}.txt`);
2740
2758
  const args = this.buildArgs(tmpFile);
2741
2759
  const start = Date.now();
2742
2760
  try {
@@ -3724,13 +3742,13 @@ var LoopController = class {
3724
3742
  };
3725
3743
 
3726
3744
  // src/engine/orchestrator.ts
3727
- import { dirname as dirname2, resolve } from "path";
3728
- import { fileURLToPath as fileURLToPath2 } from "url";
3745
+ import { dirname, resolve } from "path";
3746
+ import { fileURLToPath } from "url";
3729
3747
  import { EventEmitter as EventEmitter2 } from "eventemitter3";
3730
3748
 
3731
3749
  // src/context/context-builder.ts
3732
- import { readFileSync as readFileSync4, readdirSync, statSync } from "fs";
3733
- import { join as join5 } from "path";
3750
+ import { readFileSync as readFileSync3, readdirSync, statSync } from "fs";
3751
+ import { join as join4 } from "path";
3734
3752
  var DEFAULT_BUDGET = {
3735
3753
  tier1: CONTEXT_BUFFER,
3736
3754
  tier2: CONTEXT_RETRIEVED,
@@ -3961,7 +3979,7 @@ ${lines.join("\n")}`;
3961
3979
  if (entry.startsWith(".") && entry !== ".cowork.yml") continue;
3962
3980
  if (IGNORE_DIRS.has(entry)) continue;
3963
3981
  try {
3964
- const fullPath = join5(dir, entry);
3982
+ const fullPath = join4(dir, entry);
3965
3983
  const stat = statSync(fullPath);
3966
3984
  if (stat.isDirectory()) {
3967
3985
  dirs.push(entry);
@@ -3974,7 +3992,7 @@ ${lines.join("\n")}`;
3974
3992
  for (const d of dirs) {
3975
3993
  if (counter.value >= this.maxFiles) return;
3976
3994
  lines.push(`${prefix}${d}/`);
3977
- this.walkDir(join5(dir, d), `${prefix} `, depth + 1, lines, counter);
3995
+ this.walkDir(join4(dir, d), `${prefix} `, depth + 1, lines, counter);
3978
3996
  }
3979
3997
  for (const f of files) {
3980
3998
  if (counter.value >= this.maxFiles) return;
@@ -3994,8 +4012,8 @@ ${lines.join("\n")}`;
3994
4012
  readFileContent(filePath, maxChars = 8e3) {
3995
4013
  if (!this.projectDir) return null;
3996
4014
  try {
3997
- const fullPath = join5(this.projectDir, filePath);
3998
- const content = readFileSync4(fullPath, "utf-8");
4015
+ const fullPath = join4(this.projectDir, filePath);
4016
+ const content = readFileSync3(fullPath, "utf-8");
3999
4017
  if (content.length > maxChars) {
4000
4018
  return `${content.slice(0, maxChars)}
4001
4019
  [TRUNCATED: file exceeds ${maxChars} chars]`;
@@ -4509,9 +4527,9 @@ var StepRunner = class {
4509
4527
  };
4510
4528
 
4511
4529
  // src/engine/workflow-engine.ts
4512
- import { readFileSync as readFileSync5 } from "fs";
4513
- import { join as join6 } from "path";
4514
- import { parse as parseYaml3 } from "yaml";
4530
+ import { readFileSync as readFileSync4 } from "fs";
4531
+ import { join as join5 } from "path";
4532
+ import { parse as parseYaml2 } from "yaml";
4515
4533
  var VALID_STEP_TYPES = /* @__PURE__ */ new Set([
4516
4534
  "generate",
4517
4535
  "review",
@@ -4529,16 +4547,16 @@ var WorkflowEngine = class {
4529
4547
  * Load a workflow YAML file by name, validate it, and resolve into a DAG.
4530
4548
  */
4531
4549
  load(workflowName) {
4532
- const filePath = join6(this.workflowDir, `${workflowName}.yml`);
4550
+ const filePath = join5(this.workflowDir, `${workflowName}.yml`);
4533
4551
  let raw;
4534
4552
  try {
4535
- raw = readFileSync5(filePath, "utf-8");
4553
+ raw = readFileSync4(filePath, "utf-8");
4536
4554
  } catch {
4537
4555
  throw new WorkflowError(`Workflow file not found: ${filePath}`);
4538
4556
  }
4539
4557
  let parsed;
4540
4558
  try {
4541
- parsed = parseYaml3(raw);
4559
+ parsed = parseYaml2(raw);
4542
4560
  } catch {
4543
4561
  throw new WorkflowError(`Invalid YAML in workflow: ${workflowName}`);
4544
4562
  }
@@ -4670,7 +4688,7 @@ var WorkflowEngine = class {
4670
4688
  };
4671
4689
 
4672
4690
  // src/engine/orchestrator.ts
4673
- var currentDir = dirname2(fileURLToPath2(import.meta.url));
4691
+ var currentDir = dirname(fileURLToPath(import.meta.url));
4674
4692
  var DEFAULT_MAX_CLI_CONCURRENCY = 3;
4675
4693
  var DEFAULT_MAX_API_CONCURRENCY = 5;
4676
4694
  var AsyncSemaphore = class {
@@ -5280,8 +5298,8 @@ function evaluatePolicy(event, context, rules, mode = "enforce") {
5280
5298
  }
5281
5299
 
5282
5300
  // src/cleanup/scanners.ts
5283
- import { readFileSync as readFileSync6, readdirSync as readdirSync2, statSync as statSync2, existsSync as existsSync3 } from "fs";
5284
- import { join as join7, relative, sep } from "path";
5301
+ import { readFileSync as readFileSync5, readdirSync as readdirSync2, statSync as statSync2, existsSync as existsSync3 } from "fs";
5302
+ import { join as join6, relative, sep } from "path";
5285
5303
  import { createHash as createHash2 } from "crypto";
5286
5304
  var SOURCE_EXTS = /* @__PURE__ */ new Set([".ts", ".tsx", ".js", ".jsx", ".mjs", ".cjs"]);
5287
5305
  var ALL_SCAN_EXTS = /* @__PURE__ */ new Set([...SOURCE_EXTS, ".json"]);
@@ -5297,7 +5315,7 @@ function walkFiles(dir, exts, result = [], rootDir, ig) {
5297
5315
  return result;
5298
5316
  }
5299
5317
  for (const entry of entries) {
5300
- const full = join7(dir, entry);
5318
+ const full = join6(dir, entry);
5301
5319
  const rel = normalizePath(relative(root, full));
5302
5320
  if (ig && ig.ignores(rel)) continue;
5303
5321
  let stat;
@@ -5317,7 +5335,7 @@ function walkFiles(dir, exts, result = [], rootDir, ig) {
5317
5335
  }
5318
5336
  function readFileSafe(filePath) {
5319
5337
  try {
5320
- return readFileSync6(filePath, "utf-8");
5338
+ return readFileSync5(filePath, "utf-8");
5321
5339
  } catch {
5322
5340
  return "";
5323
5341
  }
@@ -5327,22 +5345,22 @@ function makeKey(scope, file, symbol) {
5327
5345
  }
5328
5346
  function findPackageJsons(projectDir) {
5329
5347
  const results = [];
5330
- const rootPkg = join7(projectDir, "package.json");
5348
+ const rootPkg = join6(projectDir, "package.json");
5331
5349
  if (existsSync3(rootPkg)) {
5332
5350
  try {
5333
5351
  results.push({ dir: projectDir, pkg: JSON.parse(readFileSafe(rootPkg)) });
5334
5352
  } catch {
5335
5353
  }
5336
5354
  }
5337
- const packagesDir = join7(projectDir, "packages");
5355
+ const packagesDir = join6(projectDir, "packages");
5338
5356
  if (existsSync3(packagesDir)) {
5339
5357
  try {
5340
5358
  for (const entry of readdirSync2(packagesDir)) {
5341
- const pkgJson = join7(packagesDir, entry, "package.json");
5359
+ const pkgJson = join6(packagesDir, entry, "package.json");
5342
5360
  if (existsSync3(pkgJson)) {
5343
5361
  try {
5344
5362
  results.push({
5345
- dir: join7(packagesDir, entry),
5363
+ dir: join6(packagesDir, entry),
5346
5364
  pkg: JSON.parse(readFileSafe(pkgJson))
5347
5365
  });
5348
5366
  } catch {
@@ -5384,7 +5402,7 @@ function scanUnusedDeps(projectDir, ig) {
5384
5402
  const usedInPkgJson = binStr.includes(depName) || exportsStr.includes(depName) || scriptsStr.includes(depName);
5385
5403
  const usedInSource = importPatterns.some((p) => allContent.includes(p));
5386
5404
  if (!usedInSource && !usedInPkgJson) {
5387
- const relFile = normalizePath(relative(projectDir, join7(dir, "package.json")));
5405
+ const relFile = normalizePath(relative(projectDir, join6(dir, "package.json")));
5388
5406
  findings.push({
5389
5407
  key: makeKey("deps", relFile, depName),
5390
5408
  scope: "deps",
@@ -6062,7 +6080,7 @@ var hostFindingSchema = z3.object({
6062
6080
  var hostFindingsSchema = z3.array(hostFindingSchema);
6063
6081
 
6064
6082
  // src/index.ts
6065
- var VERSION = "0.2.3";
6083
+ var VERSION = "0.2.4";
6066
6084
  export {
6067
6085
  ArtifactStore,
6068
6086
  BINARY_SNIFF_BYTES,