@codemoot/core 0.2.2 → 0.2.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.d.ts CHANGED
@@ -2669,6 +2669,6 @@ declare const DEFAULT_DLP_CONFIG: DlpConfig;
2669
2669
  */
2670
2670
  declare function sanitize(input: string, config?: Partial<DlpConfig>): DlpResult;
2671
2671
 
2672
- declare const VERSION = "0.2.2";
2672
+ declare const VERSION = "0.2.4";
2673
2673
 
2674
2674
  export { type ArtifactRecord, ArtifactStore, type ArtifactType, type AssembledContext, type AttemptResult, BINARY_SNIFF_BYTES, type BridgeCapabilities, type BridgeOptions, type BridgeResumeOptions, type BudgetConfig, type BuildPhase, type BuildRun, BuildStore, type BuildSummary, type BuiltInRole, CLEANUP_TIMEOUT_SEC, CONTEXT_ACTIVE, CONTEXT_BUFFER, CONTEXT_RETRIEVED, CURRENT_VERSION, type CacheEntry, CacheStore, type CallModelOptions, CancellationError, CancellationToken, type ChatMessage, type CleanupConfidence, type CleanupFinding, type CleanupReport, type CleanupScope, type CleanupSource, CliAdapter, type CliAdapterConfig, type CliBridge, type CliCallOptions, type CliDetectionResult, ConfigError, type ContextBudget, ContextBuilder, type ContextBuilderOptions, type CostInput, type CostLogEntry, CostStore, type CostSummary, CostTracker, type CostUpdateEvent, DAYS_PER_YEAR, DEFAULT_CONFIG, DEFAULT_DLP_CONFIG, DEFAULT_MAX_TOKENS, DEFAULT_RULES, DEFAULT_TIMEOUT_SEC, DLP_MAX_CONTENT, DLP_MAX_PROCESSING_MS, DatabaseError, type DebateConfig, type DebateEngineState, type DebateInput, type DebateMessageRow, type DebateOptions, type DebatePattern, type DebateResponse, type DebateResult, DebateStore, type DebateTurnRow, type DebateTurnStatus, type DlpAuditEntry, type DlpConfig, type DlpMode, DlpReasonCode, type DlpRedaction, type DlpResult, type EngineEvent, type EnqueueOptions, ErrorCode, EventBus, type ExecutionMode, type FallbackConfig, HTTP_TOO_MANY_REQUESTS, type HandoffCommand, type HandoffEnvelopeOptions, type HostFindingInput, IMPLEMENTER_MAX_TOKENS, type JobLogRecord, type JobRecord, type JobStatus, JobStore, type JobType, type LogLevel, type Logger, type LoopConfig, LoopController, type LoopIterationEvent, type LoopResult, MCP_CONTENT_MAX_LENGTH, MCP_TASK_MAX_LENGTH, MCP_TIMEOUT_MAX, type MemoryCategory, type MemoryConfig, type MemoryInput, type MemoryRecord, MemoryStore, type MessageStatus, MessageStore, type MeteringSource, type ModelAdapter, type ModelCallResult, type ModelConfig, ModelError, type ModelPricing, type ModelProvider, ModelRegistry, Orchestrator, type OrchestratorOptions, type OutputConfig, type ParsedVerdict, type PlanInput, type PlanOptions, type PolicyContext, type PolicyDecision, type PolicyMode, type PolicyResult, type PolicyRule, type PresetName, type ProgressCallbacks, type ProjectConfig, type ProjectConfigInput, type PromptType, type PromptVariables, REVIEW_DIFF_MAX_CHARS, REVIEW_PRESETS, REVIEW_TEXT_MAX_CHARS, type ResolvedStep, type ResolvedWorkflow, type ResultStatus, type RetryConfig, type RetryOptions, type ReviewInput, type ReviewOptions, type ReviewPreset, type ReviewResult, type Role, type RoleConfig, RoleManager, type RunOptions, type Session, type SessionCompletedEvent, type SessionEvent, type SessionFailedEvent, SessionManager, type SessionOverflowStatus, type SessionResult, type SessionStartedEvent, type SessionStatus, SessionStore, type StepCompletedEvent, type StepDefinition, type StepFailedEvent, type StepResult, StepRunner, type StepStartedEvent, type StepType, TerminalReason, type TextDeltaEmitter, type TextDeltaEvent, type TextDoneEvent, type TokenUsage, type TranscriptEntry, type UnifiedSession, VERSION, type VerdictResult, type WorkflowDefinition, WorkflowEngine, WorkflowError, buildHandoffEnvelope, buildReconstructionPrompt, calculateCost, calculateDebateTokens, callModel, clearDetectionCache, computeThreeWayStats, computeTwoWayStats, costInputSchema, createIgnoreFilter, createLogger, debateInputSchema, debateOutputSchema, detectCli, estimateTokens, evaluatePolicy, generateId, generateSessionId, getModelPricing, getReviewPreset, getSchemaVersion, getTokenBudgetStatus, hashConfig, hashContent, hostFindingsSchema, isRateLimit, isRetryable, listPresetNames, listPresets, loadConfig, loadIgnorePatterns, loadPreset, memoryInputSchema, mergeThreeWay, mergeTwoWay, migrateConfig, openDatabase, parseDebateVerdict, parseVerdict, planInputSchema, preflightTokenCheck, projectConfigSchema, recalculateConfidenceStats, renderPrompt, reviewInputSchema, reviewOutputSchema, runAllScanners, runMigrations, sanitize, scanAntiPatterns, scanDeadCode, scanDuplicates, scanHardcoded, scanNearDuplicates, scanSecurity, scanUnusedDeps, scanUnusedExports, shouldIgnore, sleep, streamModel, validateConfig, withCanonicalRetry, withFallback, withRetry, writeConfig };
package/dist/index.js CHANGED
@@ -431,38 +431,54 @@ function validateConfig(config) {
431
431
  }
432
432
 
433
433
  // src/config/presets.ts
434
- import { readFileSync } from "fs";
435
- import { dirname, join } from "path";
436
- import { fileURLToPath } from "url";
437
- import { parse as parseYaml } from "yaml";
438
- var PRESETS_DIR = join(dirname(fileURLToPath(import.meta.url)), "..", "..", "..", "presets");
434
+ var PRESET_CONFIGS = {
435
+ "cli-first": {
436
+ models: {
437
+ "codex-architect": {
438
+ provider: "openai",
439
+ model: "gpt-5.3-codex",
440
+ maxTokens: 4096,
441
+ temperature: 0.7,
442
+ timeout: 120
443
+ },
444
+ "codex-reviewer": {
445
+ provider: "openai",
446
+ model: "gpt-5.3-codex",
447
+ maxTokens: 4096,
448
+ temperature: 0.3,
449
+ timeout: 120
450
+ }
451
+ },
452
+ roles: {
453
+ architect: {
454
+ model: "codex-architect",
455
+ temperature: 0.7,
456
+ maxTokens: 4096
457
+ },
458
+ reviewer: {
459
+ model: "codex-reviewer",
460
+ temperature: 0.3,
461
+ maxTokens: 4096
462
+ }
463
+ }
464
+ }
465
+ };
439
466
  var VALID_PRESETS = ["cli-first"];
440
467
  function loadPreset(name) {
441
468
  if (!VALID_PRESETS.includes(name)) {
442
- throw new ConfigError(
443
- `Unknown preset: "${name}". Valid presets: ${VALID_PRESETS.join(", ")}`,
444
- "preset"
445
- );
446
- }
447
- const filePath = join(PRESETS_DIR, `${name}.yml`);
448
- try {
449
- const content = readFileSync(filePath, "utf-8");
450
- return parseYaml(content);
451
- } catch (err) {
452
- throw new ConfigError(
453
- `Failed to load preset "${name}": ${err instanceof Error ? err.message : String(err)}`,
454
- "preset"
455
- );
469
+ console.error(`Warning: Unknown preset "${name}", falling back to "cli-first".`);
470
+ name = "cli-first";
456
471
  }
472
+ return structuredClone(PRESET_CONFIGS[name]);
457
473
  }
458
474
  function listPresets() {
459
475
  return [...VALID_PRESETS];
460
476
  }
461
477
 
462
478
  // src/config/loader.ts
463
- import { appendFileSync, existsSync, mkdirSync, readFileSync as readFileSync2, writeFileSync } from "fs";
464
- import { join as join2 } from "path";
465
- import { parse as parseYaml2, stringify as stringifyYaml } from "yaml";
479
+ import { appendFileSync, existsSync, mkdirSync, readFileSync, writeFileSync } from "fs";
480
+ import { join } from "path";
481
+ import { parse as parseYaml, stringify as stringifyYaml } from "yaml";
466
482
  var CONFIG_FILENAME = ".cowork.yml";
467
483
  function deepMerge(target, source) {
468
484
  const result = { ...target };
@@ -484,11 +500,11 @@ function loadConfig(options) {
484
500
  const presetConfig = loadPreset(options.preset);
485
501
  merged = deepMerge(merged, presetConfig);
486
502
  }
487
- const configPath = join2(projectDir, CONFIG_FILENAME);
503
+ const configPath = join(projectDir, CONFIG_FILENAME);
488
504
  if (!options?.skipFile && existsSync(configPath)) {
489
505
  try {
490
- const content = readFileSync2(configPath, "utf-8");
491
- const fileConfig = parseYaml2(content);
506
+ const content = readFileSync(configPath, "utf-8");
507
+ const fileConfig = parseYaml(content);
492
508
  if (fileConfig && typeof fileConfig === "object") {
493
509
  merged = deepMerge(merged, fileConfig);
494
510
  }
@@ -504,14 +520,14 @@ function loadConfig(options) {
504
520
  return validateConfig(merged);
505
521
  }
506
522
  function writeConfig(config, dir) {
507
- const configPath = join2(dir, CONFIG_FILENAME);
523
+ const configPath = join(dir, CONFIG_FILENAME);
508
524
  const yamlContent = stringifyYaml(config, { lineWidth: 100 });
509
525
  writeFileSync(configPath, yamlContent, "utf-8");
510
- mkdirSync(join2(dir, ".cowork", "db"), { recursive: true });
511
- mkdirSync(join2(dir, ".cowork", "transcripts"), { recursive: true });
512
- const gitignorePath = join2(dir, ".gitignore");
526
+ mkdirSync(join(dir, ".cowork", "db"), { recursive: true });
527
+ mkdirSync(join(dir, ".cowork", "transcripts"), { recursive: true });
528
+ const gitignorePath = join(dir, ".gitignore");
513
529
  if (existsSync(gitignorePath)) {
514
- const content = readFileSync2(gitignorePath, "utf-8");
530
+ const content = readFileSync(gitignorePath, "utf-8");
515
531
  if (!content.includes(".cowork/")) {
516
532
  appendFileSync(gitignorePath, "\n.cowork/\n");
517
533
  }
@@ -638,8 +654,8 @@ function listPresetNames() {
638
654
  }
639
655
 
640
656
  // src/config/ignore.ts
641
- import { existsSync as existsSync2, readFileSync as readFileSync3 } from "fs";
642
- import { join as join3 } from "path";
657
+ import { existsSync as existsSync2, readFileSync as readFileSync2 } from "fs";
658
+ import { join as join2 } from "path";
643
659
  import ignore from "ignore";
644
660
  var BUILTIN_IGNORES = [
645
661
  "node_modules",
@@ -667,24 +683,24 @@ function createIgnoreFilter(projectDir, options) {
667
683
  const ig = ignore();
668
684
  ig.add(BUILTIN_IGNORES);
669
685
  if (!options?.skipGitignore) {
670
- const gitignorePath = join3(projectDir, ".gitignore");
686
+ const gitignorePath = join2(projectDir, ".gitignore");
671
687
  if (existsSync2(gitignorePath)) {
672
- const content = readFileSync3(gitignorePath, "utf-8");
688
+ const content = readFileSync2(gitignorePath, "utf-8");
673
689
  ig.add(content);
674
690
  }
675
691
  }
676
- const codemootIgnorePath = join3(projectDir, ".codemootignore");
692
+ const codemootIgnorePath = join2(projectDir, ".codemootignore");
677
693
  if (existsSync2(codemootIgnorePath)) {
678
- const content = readFileSync3(codemootIgnorePath, "utf-8");
694
+ const content = readFileSync2(codemootIgnorePath, "utf-8");
679
695
  ig.add(content);
680
696
  }
681
697
  return ig;
682
698
  }
683
699
  function loadIgnorePatterns(projectDir) {
684
700
  const patterns = [...BUILTIN_IGNORES];
685
- const ignorePath = join3(projectDir, ".codemootignore");
701
+ const ignorePath = join2(projectDir, ".codemootignore");
686
702
  if (existsSync2(ignorePath)) {
687
- const content = readFileSync3(ignorePath, "utf-8");
703
+ const content = readFileSync2(ignorePath, "utf-8");
688
704
  for (const line of content.split("\n")) {
689
705
  const trimmed = line.trim();
690
706
  if (trimmed && !trimmed.startsWith("#")) {
@@ -2661,7 +2677,7 @@ import { spawn } from "child_process";
2661
2677
  import { randomUUID } from "crypto";
2662
2678
  import { readFile, unlink } from "fs/promises";
2663
2679
  import { tmpdir } from "os";
2664
- import { join as join4 } from "path";
2680
+ import { join as join3 } from "path";
2665
2681
  var MAX_OUTPUT_BYTES = 512 * 1024;
2666
2682
  var TRUNCATION_MARKER = "\n[TRUNCATED: output exceeded 512KB]";
2667
2683
  var BASE_ENV_ALLOWLIST = [
@@ -2738,7 +2754,7 @@ var CliAdapter = class {
2738
2754
  ...options?.envAllowlist ?? []
2739
2755
  ];
2740
2756
  const env = buildFilteredEnv(allowlist);
2741
- const tmpFile = join4(tmpdir(), `codemoot-cli-${randomUUID()}.txt`);
2757
+ const tmpFile = join3(tmpdir(), `codemoot-cli-${randomUUID()}.txt`);
2742
2758
  const args = this.buildArgs(tmpFile);
2743
2759
  const start = Date.now();
2744
2760
  try {
@@ -3726,13 +3742,13 @@ var LoopController = class {
3726
3742
  };
3727
3743
 
3728
3744
  // src/engine/orchestrator.ts
3729
- import { dirname as dirname2, resolve } from "path";
3730
- import { fileURLToPath as fileURLToPath2 } from "url";
3745
+ import { dirname, resolve } from "path";
3746
+ import { fileURLToPath } from "url";
3731
3747
  import { EventEmitter as EventEmitter2 } from "eventemitter3";
3732
3748
 
3733
3749
  // src/context/context-builder.ts
3734
- import { readFileSync as readFileSync4, readdirSync, statSync } from "fs";
3735
- import { join as join5 } from "path";
3750
+ import { readFileSync as readFileSync3, readdirSync, statSync } from "fs";
3751
+ import { join as join4 } from "path";
3736
3752
  var DEFAULT_BUDGET = {
3737
3753
  tier1: CONTEXT_BUFFER,
3738
3754
  tier2: CONTEXT_RETRIEVED,
@@ -3963,7 +3979,7 @@ ${lines.join("\n")}`;
3963
3979
  if (entry.startsWith(".") && entry !== ".cowork.yml") continue;
3964
3980
  if (IGNORE_DIRS.has(entry)) continue;
3965
3981
  try {
3966
- const fullPath = join5(dir, entry);
3982
+ const fullPath = join4(dir, entry);
3967
3983
  const stat = statSync(fullPath);
3968
3984
  if (stat.isDirectory()) {
3969
3985
  dirs.push(entry);
@@ -3976,7 +3992,7 @@ ${lines.join("\n")}`;
3976
3992
  for (const d of dirs) {
3977
3993
  if (counter.value >= this.maxFiles) return;
3978
3994
  lines.push(`${prefix}${d}/`);
3979
- this.walkDir(join5(dir, d), `${prefix} `, depth + 1, lines, counter);
3995
+ this.walkDir(join4(dir, d), `${prefix} `, depth + 1, lines, counter);
3980
3996
  }
3981
3997
  for (const f of files) {
3982
3998
  if (counter.value >= this.maxFiles) return;
@@ -3996,8 +4012,8 @@ ${lines.join("\n")}`;
3996
4012
  readFileContent(filePath, maxChars = 8e3) {
3997
4013
  if (!this.projectDir) return null;
3998
4014
  try {
3999
- const fullPath = join5(this.projectDir, filePath);
4000
- const content = readFileSync4(fullPath, "utf-8");
4015
+ const fullPath = join4(this.projectDir, filePath);
4016
+ const content = readFileSync3(fullPath, "utf-8");
4001
4017
  if (content.length > maxChars) {
4002
4018
  return `${content.slice(0, maxChars)}
4003
4019
  [TRUNCATED: file exceeds ${maxChars} chars]`;
@@ -4511,9 +4527,9 @@ var StepRunner = class {
4511
4527
  };
4512
4528
 
4513
4529
  // src/engine/workflow-engine.ts
4514
- import { readFileSync as readFileSync5 } from "fs";
4515
- import { join as join6 } from "path";
4516
- import { parse as parseYaml3 } from "yaml";
4530
+ import { readFileSync as readFileSync4 } from "fs";
4531
+ import { join as join5 } from "path";
4532
+ import { parse as parseYaml2 } from "yaml";
4517
4533
  var VALID_STEP_TYPES = /* @__PURE__ */ new Set([
4518
4534
  "generate",
4519
4535
  "review",
@@ -4531,16 +4547,16 @@ var WorkflowEngine = class {
4531
4547
  * Load a workflow YAML file by name, validate it, and resolve into a DAG.
4532
4548
  */
4533
4549
  load(workflowName) {
4534
- const filePath = join6(this.workflowDir, `${workflowName}.yml`);
4550
+ const filePath = join5(this.workflowDir, `${workflowName}.yml`);
4535
4551
  let raw;
4536
4552
  try {
4537
- raw = readFileSync5(filePath, "utf-8");
4553
+ raw = readFileSync4(filePath, "utf-8");
4538
4554
  } catch {
4539
4555
  throw new WorkflowError(`Workflow file not found: ${filePath}`);
4540
4556
  }
4541
4557
  let parsed;
4542
4558
  try {
4543
- parsed = parseYaml3(raw);
4559
+ parsed = parseYaml2(raw);
4544
4560
  } catch {
4545
4561
  throw new WorkflowError(`Invalid YAML in workflow: ${workflowName}`);
4546
4562
  }
@@ -4672,7 +4688,7 @@ var WorkflowEngine = class {
4672
4688
  };
4673
4689
 
4674
4690
  // src/engine/orchestrator.ts
4675
- var currentDir = dirname2(fileURLToPath2(import.meta.url));
4691
+ var currentDir = dirname(fileURLToPath(import.meta.url));
4676
4692
  var DEFAULT_MAX_CLI_CONCURRENCY = 3;
4677
4693
  var DEFAULT_MAX_API_CONCURRENCY = 5;
4678
4694
  var AsyncSemaphore = class {
@@ -5282,8 +5298,8 @@ function evaluatePolicy(event, context, rules, mode = "enforce") {
5282
5298
  }
5283
5299
 
5284
5300
  // src/cleanup/scanners.ts
5285
- import { readFileSync as readFileSync6, readdirSync as readdirSync2, statSync as statSync2, existsSync as existsSync3 } from "fs";
5286
- import { join as join7, relative, sep } from "path";
5301
+ import { readFileSync as readFileSync5, readdirSync as readdirSync2, statSync as statSync2, existsSync as existsSync3 } from "fs";
5302
+ import { join as join6, relative, sep } from "path";
5287
5303
  import { createHash as createHash2 } from "crypto";
5288
5304
  var SOURCE_EXTS = /* @__PURE__ */ new Set([".ts", ".tsx", ".js", ".jsx", ".mjs", ".cjs"]);
5289
5305
  var ALL_SCAN_EXTS = /* @__PURE__ */ new Set([...SOURCE_EXTS, ".json"]);
@@ -5299,7 +5315,7 @@ function walkFiles(dir, exts, result = [], rootDir, ig) {
5299
5315
  return result;
5300
5316
  }
5301
5317
  for (const entry of entries) {
5302
- const full = join7(dir, entry);
5318
+ const full = join6(dir, entry);
5303
5319
  const rel = normalizePath(relative(root, full));
5304
5320
  if (ig && ig.ignores(rel)) continue;
5305
5321
  let stat;
@@ -5319,7 +5335,7 @@ function walkFiles(dir, exts, result = [], rootDir, ig) {
5319
5335
  }
5320
5336
  function readFileSafe(filePath) {
5321
5337
  try {
5322
- return readFileSync6(filePath, "utf-8");
5338
+ return readFileSync5(filePath, "utf-8");
5323
5339
  } catch {
5324
5340
  return "";
5325
5341
  }
@@ -5329,22 +5345,22 @@ function makeKey(scope, file, symbol) {
5329
5345
  }
5330
5346
  function findPackageJsons(projectDir) {
5331
5347
  const results = [];
5332
- const rootPkg = join7(projectDir, "package.json");
5348
+ const rootPkg = join6(projectDir, "package.json");
5333
5349
  if (existsSync3(rootPkg)) {
5334
5350
  try {
5335
5351
  results.push({ dir: projectDir, pkg: JSON.parse(readFileSafe(rootPkg)) });
5336
5352
  } catch {
5337
5353
  }
5338
5354
  }
5339
- const packagesDir = join7(projectDir, "packages");
5355
+ const packagesDir = join6(projectDir, "packages");
5340
5356
  if (existsSync3(packagesDir)) {
5341
5357
  try {
5342
5358
  for (const entry of readdirSync2(packagesDir)) {
5343
- const pkgJson = join7(packagesDir, entry, "package.json");
5359
+ const pkgJson = join6(packagesDir, entry, "package.json");
5344
5360
  if (existsSync3(pkgJson)) {
5345
5361
  try {
5346
5362
  results.push({
5347
- dir: join7(packagesDir, entry),
5363
+ dir: join6(packagesDir, entry),
5348
5364
  pkg: JSON.parse(readFileSafe(pkgJson))
5349
5365
  });
5350
5366
  } catch {
@@ -5386,7 +5402,7 @@ function scanUnusedDeps(projectDir, ig) {
5386
5402
  const usedInPkgJson = binStr.includes(depName) || exportsStr.includes(depName) || scriptsStr.includes(depName);
5387
5403
  const usedInSource = importPatterns.some((p) => allContent.includes(p));
5388
5404
  if (!usedInSource && !usedInPkgJson) {
5389
- const relFile = normalizePath(relative(projectDir, join7(dir, "package.json")));
5405
+ const relFile = normalizePath(relative(projectDir, join6(dir, "package.json")));
5390
5406
  findings.push({
5391
5407
  key: makeKey("deps", relFile, depName),
5392
5408
  scope: "deps",
@@ -6064,7 +6080,7 @@ var hostFindingSchema = z3.object({
6064
6080
  var hostFindingsSchema = z3.array(hostFindingSchema);
6065
6081
 
6066
6082
  // src/index.ts
6067
- var VERSION = "0.2.2";
6083
+ var VERSION = "0.2.4";
6068
6084
  export {
6069
6085
  ArtifactStore,
6070
6086
  BINARY_SNIFF_BYTES,