@yasserkhanorg/e2e-agents 1.9.5 → 1.10.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (51) hide show
  1. package/dist/cli/commands/gate.d.ts.map +1 -1
  2. package/dist/cli/commands/gate.js +4 -1
  3. package/dist/cli/commands/impact.d.ts.map +1 -1
  4. package/dist/cli/commands/impact.js +7 -0
  5. package/dist/cli/commands/llm_health.d.ts.map +1 -1
  6. package/dist/cli/commands/llm_health.js +57 -17
  7. package/dist/cli/usage.d.ts.map +1 -1
  8. package/dist/cli/usage.js +4 -0
  9. package/dist/cli.js +1 -1
  10. package/dist/esm/cli/commands/gate.js +4 -1
  11. package/dist/esm/cli/commands/impact.js +7 -0
  12. package/dist/esm/cli/commands/llm_health.js +57 -17
  13. package/dist/esm/cli/usage.js +4 -0
  14. package/dist/esm/cli.js +1 -1
  15. package/dist/esm/knowledge/api_surface.js +265 -34
  16. package/dist/esm/knowledge/failure_history.js +121 -0
  17. package/dist/esm/knowledge/route_families.js +31 -1
  18. package/dist/esm/pipeline/stage1_impact.js +19 -3
  19. package/dist/esm/pipeline/stage2_coverage.js +28 -7
  20. package/dist/esm/pipeline/stage3_generation.js +20 -1
  21. package/dist/esm/prompts/coverage.js +10 -0
  22. package/dist/esm/prompts/generation.js +41 -7
  23. package/dist/esm/validation/guardrails.js +5 -0
  24. package/dist/index.d.ts +1 -1
  25. package/dist/knowledge/api_surface.d.ts +12 -0
  26. package/dist/knowledge/api_surface.d.ts.map +1 -1
  27. package/dist/knowledge/api_surface.js +268 -34
  28. package/dist/knowledge/failure_history.d.ts +39 -0
  29. package/dist/knowledge/failure_history.d.ts.map +1 -0
  30. package/dist/knowledge/failure_history.js +128 -0
  31. package/dist/knowledge/route_families.d.ts +11 -0
  32. package/dist/knowledge/route_families.d.ts.map +1 -1
  33. package/dist/knowledge/route_families.js +32 -1
  34. package/dist/pipeline/stage1_impact.d.ts +1 -1
  35. package/dist/pipeline/stage1_impact.d.ts.map +1 -1
  36. package/dist/pipeline/stage1_impact.js +18 -2
  37. package/dist/pipeline/stage2_coverage.d.ts.map +1 -1
  38. package/dist/pipeline/stage2_coverage.js +28 -7
  39. package/dist/pipeline/stage3_generation.d.ts.map +1 -1
  40. package/dist/pipeline/stage3_generation.js +20 -1
  41. package/dist/prompts/coverage.d.ts.map +1 -1
  42. package/dist/prompts/coverage.js +10 -0
  43. package/dist/prompts/generation.d.ts +1 -1
  44. package/dist/prompts/generation.d.ts.map +1 -1
  45. package/dist/prompts/generation.js +41 -7
  46. package/dist/validation/guardrails.d.ts +2 -0
  47. package/dist/validation/guardrails.d.ts.map +1 -1
  48. package/dist/validation/guardrails.js +5 -0
  49. package/dist/validation/output_schema.d.ts +3 -0
  50. package/dist/validation/output_schema.d.ts.map +1 -1
  51. package/package.json +1 -1
@@ -1 +1 @@
1
- {"version":3,"file":"gate.d.ts","sourceRoot":"","sources":["../../../src/cli/commands/gate.ts"],"names":[],"mappings":"AAgBA,OAAO,KAAK,EAAC,UAAU,EAAC,MAAM,aAAa,CAAC;AAE5C,wBAAsB,cAAc,CAAC,IAAI,EAAE,UAAU,EAAE,UAAU,EAAE,MAAM,GAAG,SAAS,GAAG,OAAO,CAAC,IAAI,CAAC,CA6EpG"}
1
+ {"version":3,"file":"gate.d.ts","sourceRoot":"","sources":["../../../src/cli/commands/gate.ts"],"names":[],"mappings":"AAgBA,OAAO,KAAK,EAAC,UAAU,EAAC,MAAM,aAAa,CAAC;AAE5C,wBAAsB,cAAc,CAAC,IAAI,EAAE,UAAU,EAAE,UAAU,EAAE,MAAM,GAAG,SAAS,GAAG,OAAO,CAAC,IAAI,CAAC,CAgFpG"}
@@ -20,7 +20,10 @@ async function runGateCommand(args, autoConfig) {
20
20
  console.error('Error: --path is required for gate command');
21
21
  process.exit(1);
22
22
  }
23
- const threshold = args.gateThreshold ?? 80;
23
+ let threshold = args.gateThreshold ?? 80;
24
+ if (threshold > 0 && threshold < 1) {
25
+ threshold = threshold * 100;
26
+ }
24
27
  const { config } = (0, config_js_1.resolveConfig)(process.cwd(), autoConfig, {
25
28
  path: args.path,
26
29
  profile: args.profile,
@@ -1 +1 @@
1
- {"version":3,"file":"impact.d.ts","sourceRoot":"","sources":["../../../src/cli/commands/impact.ts"],"names":[],"mappings":"AAGA,OAAO,KAAK,EAAC,aAAa,EAAC,MAAM,uBAAuB,CAAC;AAIzD,OAAO,KAAK,EAAC,UAAU,EAAC,MAAM,aAAa,CAAC;AAE5C,wBAAgB,gBAAgB,CAAC,IAAI,EAAE,UAAU,EAAE,MAAM,EAAE,UAAU,CAAC,OAAO,aAAa,CAAC,CAAC,QAAQ,CAAC,GAAG,IAAI,CAkB3G"}
1
+ {"version":3,"file":"impact.d.ts","sourceRoot":"","sources":["../../../src/cli/commands/impact.ts"],"names":[],"mappings":"AAMA,OAAO,KAAK,EAAC,aAAa,EAAC,MAAM,uBAAuB,CAAC;AAIzD,OAAO,KAAK,EAAC,UAAU,EAAC,MAAM,aAAa,CAAC;AAE5C,wBAAgB,gBAAgB,CAAC,IAAI,EAAE,UAAU,EAAE,MAAM,EAAE,UAAU,CAAC,OAAO,aAAa,CAAC,CAAC,QAAQ,CAAC,GAAG,IAAI,CAwB3G"}
@@ -3,6 +3,8 @@
3
3
  // See LICENSE.txt for license information.
4
4
  Object.defineProperty(exports, "__esModule", { value: true });
5
5
  exports.runImpactCommand = runImpactCommand;
6
+ const fs_1 = require("fs");
7
+ const path_1 = require("path");
6
8
  const git_js_1 = require("../../agent/git.js");
7
9
  const impact_engine_js_1 = require("../../engine/impact_engine.js");
8
10
  function runImpactCommand(args, config) {
@@ -23,4 +25,9 @@ function runImpactCommand(args, config) {
23
25
  console.warn(` Warning: ${w}`);
24
26
  }
25
27
  }
28
+ const outputDir = (0, path_1.join)(reportRoot, '.e2e-ai-agents');
29
+ (0, fs_1.mkdirSync)(outputDir, { recursive: true });
30
+ const planPath = (0, path_1.join)(outputDir, 'plan.json');
31
+ (0, fs_1.writeFileSync)(planPath, JSON.stringify(impactResult, null, 2));
32
+ console.log(`Wrote ${planPath}`);
26
33
  }
@@ -1 +1 @@
1
- {"version":3,"file":"llm_health.d.ts","sourceRoot":"","sources":["../../../src/cli/commands/llm_health.ts"],"names":[],"mappings":"AAMA,wBAAsB,YAAY,IAAI,OAAO,CAAC,IAAI,CAAC,CA6BlD"}
1
+ {"version":3,"file":"llm_health.d.ts","sourceRoot":"","sources":["../../../src/cli/commands/llm_health.ts"],"names":[],"mappings":"AA2DA,wBAAsB,YAAY,IAAI,OAAO,CAAC,IAAI,CAAC,CA4BlD"}
@@ -4,35 +4,75 @@
4
4
  Object.defineProperty(exports, "__esModule", { value: true });
5
5
  exports.runLlmHealth = runLlmHealth;
6
6
  const anthropic_provider_js_1 = require("../../anthropic_provider.js");
7
+ const openai_provider_js_1 = require("../../openai_provider.js");
8
+ const ollama_provider_js_1 = require("../../ollama_provider.js");
7
9
  const provider_interface_js_1 = require("../../provider_interface.js");
8
- async function runLlmHealth() {
10
+ async function checkAnthropic() {
9
11
  if (!process.env.ANTHROPIC_API_KEY) {
10
- console.error('ANTHROPIC_API_KEY is required for llm-health.');
11
- process.exit(1);
12
+ return null;
12
13
  }
13
14
  const model = process.env.ANTHROPIC_MODEL || 'claude-sonnet-4-5-20250929';
14
- const provider = new anthropic_provider_js_1.AnthropicProvider({
15
- apiKey: process.env.ANTHROPIC_API_KEY,
16
- model,
17
- });
18
15
  try {
16
+ const provider = new anthropic_provider_js_1.AnthropicProvider({ apiKey: process.env.ANTHROPIC_API_KEY, model });
17
+ const response = await provider.generateText('Reply with OK.', { maxTokens: 8, timeout: 15000 });
18
+ return { provider: 'Anthropic', model, ok: true, response: response.text.trim() };
19
+ }
20
+ catch (error) {
21
+ const message = error instanceof provider_interface_js_1.LLMProviderError || error instanceof Error ? error.message : String(error);
22
+ return { provider: 'Anthropic', model, ok: false, error: message };
23
+ }
24
+ }
25
+ async function checkOpenAI() {
26
+ if (!process.env.OPENAI_API_KEY) {
27
+ return null;
28
+ }
29
+ const model = process.env.OPENAI_MODEL || 'gpt-4o';
30
+ try {
31
+ const provider = new openai_provider_js_1.OpenAIProvider({ apiKey: process.env.OPENAI_API_KEY, model });
32
+ const response = await provider.generateText('Reply with OK.', { maxTokens: 8, timeout: 15000 });
33
+ return { provider: 'OpenAI', model, ok: true, response: response.text.trim() };
34
+ }
35
+ catch (error) {
36
+ const message = error instanceof provider_interface_js_1.LLMProviderError || error instanceof Error ? error.message : String(error);
37
+ return { provider: 'OpenAI', model, ok: false, error: message };
38
+ }
39
+ }
40
+ async function checkOllama() {
41
+ const baseUrl = process.env.OLLAMA_HOST || 'http://localhost:11434';
42
+ const model = process.env.OLLAMA_MODEL || 'llama3';
43
+ try {
44
+ const provider = new ollama_provider_js_1.OllamaProvider({ baseUrl, model });
19
45
  const response = await provider.generateText('Reply with OK.', { maxTokens: 8, timeout: 15000 });
20
- const text = response.text.trim();
21
- console.log(`Anthropic OK (${model}) -> ${text}`);
46
+ return { provider: 'Ollama', model, ok: true, response: response.text.trim() };
22
47
  }
23
48
  catch (error) {
24
- if (error instanceof provider_interface_js_1.LLMProviderError) {
25
- console.error(`Anthropic failed: ${error.message}`);
26
- if (error.cause instanceof Error) {
27
- console.error(`Cause: ${error.cause.message}`);
28
- }
49
+ const message = error instanceof provider_interface_js_1.LLMProviderError || error instanceof Error ? error.message : String(error);
50
+ return { provider: 'Ollama', model, ok: false, error: message };
51
+ }
52
+ }
53
+ async function runLlmHealth() {
54
+ const checks = await Promise.allSettled([checkAnthropic(), checkOpenAI(), checkOllama()]);
55
+ const results = [];
56
+ for (const check of checks) {
57
+ if (check.status === 'fulfilled' && check.value) {
58
+ results.push(check.value);
29
59
  }
30
- else if (error instanceof Error) {
31
- console.error(`Anthropic failed: ${error.message}`);
60
+ }
61
+ if (results.length === 0) {
62
+ console.error('No LLM providers configured. Set ANTHROPIC_API_KEY, OPENAI_API_KEY, or OLLAMA_HOST.');
63
+ process.exit(1);
64
+ }
65
+ let anyFailed = false;
66
+ for (const r of results) {
67
+ if (r.ok) {
68
+ console.log(`${r.provider} OK (${r.model}) -> ${r.response}`);
32
69
  }
33
70
  else {
34
- console.error(`Anthropic failed: ${String(error)}`);
71
+ console.error(`${r.provider} failed (${r.model}): ${r.error}`);
72
+ anyFailed = true;
35
73
  }
74
+ }
75
+ if (anyFailed) {
36
76
  process.exit(1);
37
77
  }
38
78
  }
@@ -1 +1 @@
1
- {"version":3,"file":"usage.d.ts","sourceRoot":"","sources":["../../src/cli/usage.ts"],"names":[],"mappings":"AAGA,wBAAgB,UAAU,IAAI,IAAI,CA2FjC"}
1
+ {"version":3,"file":"usage.d.ts","sourceRoot":"","sources":["../../src/cli/usage.ts"],"names":[],"mappings":"AAGA,wBAAgB,UAAU,IAAI,IAAI,CA+FjC"}
package/dist/cli/usage.js CHANGED
@@ -18,6 +18,10 @@ function printUsage() {
18
18
  ' e2e-ai-agents generate [--scenarios <path|json>] [--max-attempts <n>] [--dry-run]',
19
19
  ' e2e-ai-agents analyze --path <app-root> [--tests-root <path>] [--since <ref>] [--generate] [--generate-output <dir>] [--heal] [--heal-report <json>]',
20
20
  ' e2e-ai-agents train --path <project-root> [--no-enrich] [--validate] [--since <ref>] [--pr <num>]',
21
+ ' e2e-ai-agents crew --path <app-root> [--crew-workflow <name>]',
22
+ ' e2e-ai-agents gate --path <app-root> --threshold <n> [--since <ref>]',
23
+ ' e2e-ai-agents cost-report [--path <app-root>]',
24
+ ' e2e-ai-agents bootstrap --path <app-root> [options]',
21
25
  ' e2e-ai-agents llm-health',
22
26
  '',
23
27
  'Options:',
package/dist/cli.js CHANGED
@@ -61,7 +61,7 @@ async function main() {
61
61
  }
62
62
  if (args.help || !args.command) {
63
63
  (0, usage_js_1.printUsage)();
64
- process.exit(args.command ? 0 : 1);
64
+ process.exit(0);
65
65
  }
66
66
  if (args.command === 'llm-health') {
67
67
  await (0, llm_health_js_1.runLlmHealth)();
@@ -17,7 +17,10 @@ export async function runGateCommand(args, autoConfig) {
17
17
  console.error('Error: --path is required for gate command');
18
18
  process.exit(1);
19
19
  }
20
- const threshold = args.gateThreshold ?? 80;
20
+ let threshold = args.gateThreshold ?? 80;
21
+ if (threshold > 0 && threshold < 1) {
22
+ threshold = threshold * 100;
23
+ }
21
24
  const { config } = resolveConfig(process.cwd(), autoConfig, {
22
25
  path: args.path,
23
26
  profile: args.profile,
@@ -1,5 +1,7 @@
1
1
  // Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved.
2
2
  // See LICENSE.txt for license information.
3
+ import { mkdirSync, writeFileSync } from 'fs';
4
+ import { join } from 'path';
3
5
  import { getChangedFiles } from '../../agent/git.js';
4
6
  import { analyzeImpact as analyzeImpactV2 } from '../../engine/impact_engine.js';
5
7
  export function runImpactCommand(args, config) {
@@ -20,4 +22,9 @@ export function runImpactCommand(args, config) {
20
22
  console.warn(` Warning: ${w}`);
21
23
  }
22
24
  }
25
+ const outputDir = join(reportRoot, '.e2e-ai-agents');
26
+ mkdirSync(outputDir, { recursive: true });
27
+ const planPath = join(outputDir, 'plan.json');
28
+ writeFileSync(planPath, JSON.stringify(impactResult, null, 2));
29
+ console.log(`Wrote ${planPath}`);
23
30
  }
@@ -1,35 +1,75 @@
1
1
  // Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved.
2
2
  // See LICENSE.txt for license information.
3
3
  import { AnthropicProvider } from '../../anthropic_provider.js';
4
+ import { OpenAIProvider } from '../../openai_provider.js';
5
+ import { OllamaProvider } from '../../ollama_provider.js';
4
6
  import { LLMProviderError } from '../../provider_interface.js';
5
- export async function runLlmHealth() {
7
+ async function checkAnthropic() {
6
8
  if (!process.env.ANTHROPIC_API_KEY) {
7
- console.error('ANTHROPIC_API_KEY is required for llm-health.');
8
- process.exit(1);
9
+ return null;
9
10
  }
10
11
  const model = process.env.ANTHROPIC_MODEL || 'claude-sonnet-4-5-20250929';
11
- const provider = new AnthropicProvider({
12
- apiKey: process.env.ANTHROPIC_API_KEY,
13
- model,
14
- });
15
12
  try {
13
+ const provider = new AnthropicProvider({ apiKey: process.env.ANTHROPIC_API_KEY, model });
14
+ const response = await provider.generateText('Reply with OK.', { maxTokens: 8, timeout: 15000 });
15
+ return { provider: 'Anthropic', model, ok: true, response: response.text.trim() };
16
+ }
17
+ catch (error) {
18
+ const message = error instanceof LLMProviderError || error instanceof Error ? error.message : String(error);
19
+ return { provider: 'Anthropic', model, ok: false, error: message };
20
+ }
21
+ }
22
+ async function checkOpenAI() {
23
+ if (!process.env.OPENAI_API_KEY) {
24
+ return null;
25
+ }
26
+ const model = process.env.OPENAI_MODEL || 'gpt-4o';
27
+ try {
28
+ const provider = new OpenAIProvider({ apiKey: process.env.OPENAI_API_KEY, model });
29
+ const response = await provider.generateText('Reply with OK.', { maxTokens: 8, timeout: 15000 });
30
+ return { provider: 'OpenAI', model, ok: true, response: response.text.trim() };
31
+ }
32
+ catch (error) {
33
+ const message = error instanceof LLMProviderError || error instanceof Error ? error.message : String(error);
34
+ return { provider: 'OpenAI', model, ok: false, error: message };
35
+ }
36
+ }
37
+ async function checkOllama() {
38
+ const baseUrl = process.env.OLLAMA_HOST || 'http://localhost:11434';
39
+ const model = process.env.OLLAMA_MODEL || 'llama3';
40
+ try {
41
+ const provider = new OllamaProvider({ baseUrl, model });
16
42
  const response = await provider.generateText('Reply with OK.', { maxTokens: 8, timeout: 15000 });
17
- const text = response.text.trim();
18
- console.log(`Anthropic OK (${model}) -> ${text}`);
43
+ return { provider: 'Ollama', model, ok: true, response: response.text.trim() };
19
44
  }
20
45
  catch (error) {
21
- if (error instanceof LLMProviderError) {
22
- console.error(`Anthropic failed: ${error.message}`);
23
- if (error.cause instanceof Error) {
24
- console.error(`Cause: ${error.cause.message}`);
25
- }
46
+ const message = error instanceof LLMProviderError || error instanceof Error ? error.message : String(error);
47
+ return { provider: 'Ollama', model, ok: false, error: message };
48
+ }
49
+ }
50
+ export async function runLlmHealth() {
51
+ const checks = await Promise.allSettled([checkAnthropic(), checkOpenAI(), checkOllama()]);
52
+ const results = [];
53
+ for (const check of checks) {
54
+ if (check.status === 'fulfilled' && check.value) {
55
+ results.push(check.value);
26
56
  }
27
- else if (error instanceof Error) {
28
- console.error(`Anthropic failed: ${error.message}`);
57
+ }
58
+ if (results.length === 0) {
59
+ console.error('No LLM providers configured. Set ANTHROPIC_API_KEY, OPENAI_API_KEY, or OLLAMA_HOST.');
60
+ process.exit(1);
61
+ }
62
+ let anyFailed = false;
63
+ for (const r of results) {
64
+ if (r.ok) {
65
+ console.log(`${r.provider} OK (${r.model}) -> ${r.response}`);
29
66
  }
30
67
  else {
31
- console.error(`Anthropic failed: ${String(error)}`);
68
+ console.error(`${r.provider} failed (${r.model}): ${r.error}`);
69
+ anyFailed = true;
32
70
  }
71
+ }
72
+ if (anyFailed) {
33
73
  process.exit(1);
34
74
  }
35
75
  }
@@ -15,6 +15,10 @@ export function printUsage() {
15
15
  ' e2e-ai-agents generate [--scenarios <path|json>] [--max-attempts <n>] [--dry-run]',
16
16
  ' e2e-ai-agents analyze --path <app-root> [--tests-root <path>] [--since <ref>] [--generate] [--generate-output <dir>] [--heal] [--heal-report <json>]',
17
17
  ' e2e-ai-agents train --path <project-root> [--no-enrich] [--validate] [--since <ref>] [--pr <num>]',
18
+ ' e2e-ai-agents crew --path <app-root> [--crew-workflow <name>]',
19
+ ' e2e-ai-agents gate --path <app-root> --threshold <n> [--since <ref>]',
20
+ ' e2e-ai-agents cost-report [--path <app-root>]',
21
+ ' e2e-ai-agents bootstrap --path <app-root> [options]',
18
22
  ' e2e-ai-agents llm-health',
19
23
  '',
20
24
  'Options:',
package/dist/esm/cli.js CHANGED
@@ -59,7 +59,7 @@ async function main() {
59
59
  }
60
60
  if (args.help || !args.command) {
61
61
  printUsage();
62
- process.exit(args.command ? 0 : 1);
62
+ process.exit(0);
63
63
  }
64
64
  if (args.command === 'llm-health') {
65
65
  await runLlmHealth();