@amitdeshmukh/ax-crew 3.11.1 → 4.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.d.ts CHANGED
@@ -2,6 +2,8 @@ import { AxCrew } from './agents/index.js';
2
2
  import { AxCrewFunctions } from './functions/index.js';
3
3
  import type { CrewConfigInput, AgentConfig } from './types.js';
4
4
  import type { UsageCost, AggregatedMetrics, AggregatedCosts, StateInstance, FunctionRegistryType } from './types.js';
5
+ export * from './metrics/index.js';
6
+ export { MetricsRegistry } from './metrics/index.js';
5
7
  /**
6
8
  * The configuration for an AxCrew.
7
9
  *
package/dist/index.js CHANGED
@@ -1,3 +1,5 @@
1
1
  import { AxCrew } from './agents/index.js';
2
2
  import { AxCrewFunctions } from './functions/index.js';
3
+ export * from './metrics/index.js';
4
+ export { MetricsRegistry } from './metrics/index.js';
3
5
  export { AxCrew, AxCrewFunctions, };
@@ -0,0 +1,2 @@
1
+ export * from './types.js';
2
+ export * as MetricsRegistry from './registry.js';
@@ -0,0 +1,2 @@
1
+ export * from './types.js';
2
+ export * as MetricsRegistry from './registry.js';
@@ -0,0 +1,9 @@
1
+ import type { LabelKeys, MetricsSnapshot, TokenUsage } from './types.js';
2
+ export declare function recordRequest(labels: LabelKeys, streaming: boolean, durationMs: number): void;
3
+ export declare function recordError(labels: LabelKeys): void;
4
+ export declare function recordTokens(labels: LabelKeys, usage: TokenUsage): void;
5
+ export declare function recordEstimatedCost(labels: LabelKeys, usd: number): void;
6
+ export declare function recordFunctionCall(labels: LabelKeys, latencyMs: number): void;
7
+ export declare function snapshot(labels: LabelKeys): MetricsSnapshot;
8
+ export declare function reset(labels?: LabelKeys): void;
9
+ export declare function snapshotCrew(crewId: string): MetricsSnapshot;
@@ -0,0 +1,138 @@
1
+ import Big from 'big.js';
2
+ const store = new Map();
3
+ function keyOf(labels) {
4
+ const { crewId, agent = '', provider = '', model = '' } = labels;
5
+ return [crewId, agent, provider, model].join('|');
6
+ }
7
+ function getOrInit(labels) {
8
+ const k = keyOf(labels);
9
+ let c = store.get(k);
10
+ if (!c) {
11
+ c = {
12
+ requests: 0,
13
+ errors: 0,
14
+ streaming: 0,
15
+ durationMsSum: 0,
16
+ durationCount: 0,
17
+ inputTokens: 0,
18
+ outputTokens: 0,
19
+ estimatedCostUSD: 0,
20
+ functionCalls: 0,
21
+ functionLatencyMs: 0,
22
+ };
23
+ store.set(k, c);
24
+ }
25
+ return c;
26
+ }
27
+ export function recordRequest(labels, streaming, durationMs) {
28
+ const c = getOrInit(labels);
29
+ c.requests += 1;
30
+ if (streaming)
31
+ c.streaming += 1;
32
+ c.durationMsSum += durationMs;
33
+ c.durationCount += 1;
34
+ }
35
+ export function recordError(labels) {
36
+ const c = getOrInit(labels);
37
+ c.errors += 1;
38
+ }
39
+ export function recordTokens(labels, usage) {
40
+ const c = getOrInit(labels);
41
+ c.inputTokens += usage.promptTokens || 0;
42
+ c.outputTokens += usage.completionTokens || 0;
43
+ }
44
+ export function recordEstimatedCost(labels, usd) {
45
+ const c = getOrInit(labels);
46
+ const current = new Big(c.estimatedCostUSD || 0);
47
+ const addition = new Big(usd || 0);
48
+ c.estimatedCostUSD = Number(current.plus(addition));
49
+ }
50
+ export function recordFunctionCall(labels, latencyMs) {
51
+ const c = getOrInit(labels);
52
+ c.functionCalls += 1;
53
+ c.functionLatencyMs += latencyMs || 0;
54
+ }
55
+ export function snapshot(labels) {
56
+ const c = getOrInit(labels);
57
+ const totalTokens = c.inputTokens + c.outputTokens;
58
+ return {
59
+ provider: labels.provider,
60
+ model: labels.model,
61
+ requests: {
62
+ totalRequests: c.requests,
63
+ totalErrors: c.errors,
64
+ errorRate: c.requests > 0 ? c.errors / c.requests : 0,
65
+ totalStreamingRequests: c.streaming,
66
+ durationMsSum: c.durationMsSum,
67
+ durationCount: c.durationCount,
68
+ },
69
+ tokens: {
70
+ promptTokens: c.inputTokens,
71
+ completionTokens: c.outputTokens,
72
+ totalTokens,
73
+ },
74
+ estimatedCostUSD: Number(new Big(c.estimatedCostUSD || 0).round(5)),
75
+ functions: {
76
+ totalFunctionCalls: c.functionCalls,
77
+ totalFunctionLatencyMs: c.functionLatencyMs,
78
+ },
79
+ };
80
+ }
81
+ export function reset(labels) {
82
+ if (!labels) {
83
+ store.clear();
84
+ return;
85
+ }
86
+ const k = keyOf(labels);
87
+ store.delete(k);
88
+ }
89
+ export function snapshotCrew(crewId) {
90
+ const empty = {
91
+ requests: 0,
92
+ errors: 0,
93
+ streaming: 0,
94
+ durationMsSum: 0,
95
+ durationCount: 0,
96
+ inputTokens: 0,
97
+ outputTokens: 0,
98
+ estimatedCostUSD: 0,
99
+ functionCalls: 0,
100
+ functionLatencyMs: 0,
101
+ };
102
+ const agg = Array.from(store.entries()).reduce((acc, [k, v]) => {
103
+ if (k.startsWith(crewId + '|')) {
104
+ acc.requests += v.requests;
105
+ acc.errors += v.errors;
106
+ acc.streaming += v.streaming;
107
+ acc.durationMsSum += v.durationMsSum;
108
+ acc.durationCount += v.durationCount;
109
+ acc.inputTokens += v.inputTokens;
110
+ acc.outputTokens += v.outputTokens;
111
+ acc.estimatedCostUSD = Number(new Big(acc.estimatedCostUSD || 0).plus(v.estimatedCostUSD || 0));
112
+ acc.functionCalls += v.functionCalls;
113
+ acc.functionLatencyMs += v.functionLatencyMs;
114
+ }
115
+ return acc;
116
+ }, { ...empty });
117
+ const totalTokens = agg.inputTokens + agg.outputTokens;
118
+ return {
119
+ requests: {
120
+ totalRequests: agg.requests,
121
+ totalErrors: agg.errors,
122
+ errorRate: agg.requests > 0 ? agg.errors / agg.requests : 0,
123
+ totalStreamingRequests: agg.streaming,
124
+ durationMsSum: agg.durationMsSum,
125
+ durationCount: agg.durationCount,
126
+ },
127
+ tokens: {
128
+ promptTokens: agg.inputTokens,
129
+ completionTokens: agg.outputTokens,
130
+ totalTokens,
131
+ },
132
+ estimatedCostUSD: Number(new Big(agg.estimatedCostUSD || 0).round(5)),
133
+ functions: {
134
+ totalFunctionCalls: agg.functionCalls,
135
+ totalFunctionLatencyMs: agg.functionLatencyMs,
136
+ },
137
+ };
138
+ }
@@ -0,0 +1,40 @@
1
+ export interface TokenUsage {
2
+ promptTokens: number;
3
+ completionTokens: number;
4
+ totalTokens?: number;
5
+ }
6
+ export interface CostSnapshot {
7
+ usdTotal: number;
8
+ tokenUsage: TokenUsage;
9
+ }
10
+ export interface RequestStats {
11
+ totalRequests: number;
12
+ totalErrors: number;
13
+ errorRate: number;
14
+ totalStreamingRequests: number;
15
+ durationMsSum: number;
16
+ durationCount: number;
17
+ }
18
+ export interface FunctionStats {
19
+ totalFunctionCalls: number;
20
+ totalFunctionLatencyMs: number;
21
+ }
22
+ export interface MetricsSnapshot {
23
+ provider?: string;
24
+ model?: string;
25
+ requests: RequestStats;
26
+ tokens: TokenUsage;
27
+ estimatedCostUSD: number;
28
+ functions: FunctionStats;
29
+ }
30
+ export interface LabelKeys {
31
+ crewId: string;
32
+ agent?: string;
33
+ provider?: string;
34
+ model?: string;
35
+ }
36
+ export interface BudgetConfig {
37
+ maxTokens?: number;
38
+ maxCost?: number;
39
+ costPerModel?: Record<string, number>;
40
+ }
@@ -0,0 +1 @@
1
+ export {};
package/dist/types.d.ts CHANGED
@@ -1,5 +1,5 @@
1
- import type { AxFunction, AxSignature, AxModelConfig, AxMCPStreamableHTTPTransportOptions } from '@ax-llm/ax';
2
- import type { Provider } from './agents/agentConfig.js';
1
+ import type { AxFunction, AxSignature, AxModelConfig, AxMCPStreamableHTTPTransportOptions, AxProgramForwardOptions } from '@ax-llm/ax';
2
+ export type Provider = 'openai' | 'anthropic' | 'google-gemini' | 'mistral' | 'groq' | 'cohere' | 'together' | 'deepseek' | 'ollama' | 'huggingface' | 'openrouter' | 'azure-openai' | 'reka' | 'x-grok';
3
3
  /**
4
4
  * A state instance that is shared between agents.
5
5
  * This can be used to store data that becomes available to all agents and functions in an out-of-band manner.
@@ -117,7 +117,7 @@ interface MCPHTTPSSETransportConfig {
117
117
  * @property {string} mcpEndpoint - The HTTP endpoint URL for the MCP server.
118
118
  * @property {AxMCPStreamableHTTPTransportOptions} options - Optional transport options.
119
119
  */
120
- interface MCPStreambleHTTPTransportConfig {
120
+ interface MCPStreamableHTTPTransportConfig {
121
121
  mcpEndpoint: string;
122
122
  options?: AxMCPStreamableHTTPTransportOptions;
123
123
  }
@@ -126,7 +126,7 @@ interface MCPStreambleHTTPTransportConfig {
126
126
  *
127
127
  * @property {MCPStdioTransportConfig | MCPHTTPSSETransportConfig | MCPStreambleHTTPTransportConfig} config - The config for the MCP server. Config can be either stdio, http-sse, or streamable http transport.
128
128
  */
129
- type MCPTransportConfig = MCPStdioTransportConfig | MCPHTTPSSETransportConfig | MCPStreambleHTTPTransportConfig;
129
+ type MCPTransportConfig = MCPStdioTransportConfig | MCPHTTPSSETransportConfig | MCPStreamableHTTPTransportConfig;
130
130
  /**
131
131
  * The configuration for an agent.
132
132
  *
@@ -138,7 +138,7 @@ type MCPTransportConfig = MCPStdioTransportConfig | MCPHTTPSSETransportConfig |
138
138
  * @property {AxModelConfig & { model: string }} ai - The AI model configuration to be passed to the agent.
139
139
  * @property {boolean} debug - Whether to enable debug mode.
140
140
  * @property {string} apiURL - Set this if you are using a custom API URL e.g. ollama on localhost.
141
- * @property {Record<string, any>} options - Agent options. Refer to the Ax documentation for more details.
141
+ * @property {Partial<AxProgramForwardOptions<any>> & Record<string, any>} options - Agent options including thinkingTokenBudget, showThoughts, etc. Also allows arbitrary provider-specific keys.
142
142
  * @property {string[]} functions - Function names to be used by the agent.
143
143
  * @property {string[]} agents - Sub-agent available to the agent.
144
144
  * @property {Record<string, any>[]} examples - DSPy examples for the agent to learn from.
@@ -155,7 +155,7 @@ interface AgentConfig {
155
155
  };
156
156
  debug?: boolean;
157
157
  apiURL?: string;
158
- options?: Record<string, any>;
158
+ options?: Partial<AxProgramForwardOptions<any>> & Record<string, any>;
159
159
  functions?: string[];
160
160
  agents?: string[];
161
161
  examples?: Array<Record<string, any>>;
@@ -167,4 +167,4 @@ interface AgentConfig {
167
167
  type CrewConfigInput = string | {
168
168
  crew: AgentConfig[];
169
169
  };
170
- export { type AgentConfig, type CrewConfigInput, type AggregatedMetrics, type StateInstance, type FunctionRegistryType, type MCPStdioTransportConfig, type MCPHTTPSSETransportConfig, type MCPStreambleHTTPTransportConfig, type MCPTransportConfig, type ModelUsage, type ModelInfo, type UsageCost, type AggregatedCosts };
170
+ export { type AgentConfig, type CrewConfigInput, type AggregatedMetrics, type StateInstance, type FunctionRegistryType, type MCPStdioTransportConfig, type MCPHTTPSSETransportConfig, type MCPStreamableHTTPTransportConfig, type MCPTransportConfig, type ModelUsage, type ModelInfo, type UsageCost, type AggregatedCosts };
@@ -1,17 +1,21 @@
1
1
  import { AxCrew } from "../dist/index.js";
2
- import { AxCrewFunctions } from "../src/functions/index.js";
2
+ import { AxCrewFunctions } from "../dist/functions/index.js";
3
+ import type { AxCrewConfig } from "../dist/index.js";
4
+ import type { Provider } from "../dist/types.js";
3
5
 
4
6
  // Example agent configuration
5
- const agentConfig = {
7
+ const agentConfig: AxCrewConfig = {
6
8
  crew: [
7
9
  {
8
10
  name: "researcher",
9
11
  description: "A research agent that finds information",
10
12
  signature: "query:string -> research:string",
11
- provider: "anthropic",
12
- providerKeyName: "ANTHROPIC_API_KEY",
13
+ provider: "google-gemini" as Provider,
14
+ providerKeyName: "GEMINI_API_KEY",
13
15
  ai: {
14
- model: "claude-3-haiku-20240307"
16
+ model: "gemini-2.5-flash-lite",
17
+ maxTokens: 4000,
18
+ stream: true
15
19
  },
16
20
  options: {
17
21
  debug: true,
@@ -22,10 +26,12 @@ const agentConfig = {
22
26
  name: "writer",
23
27
  description: "A writing agent that creates content",
24
28
  signature: "topic:string -> article:string",
25
- provider: "anthropic",
29
+ provider: "anthropic" as Provider,
26
30
  providerKeyName: "ANTHROPIC_API_KEY",
27
31
  ai: {
28
- model: "claude-3-haiku-20240307"
32
+ model: "claude-3-haiku-20240307",
33
+ maxTokens: 4000,
34
+ stream: true
29
35
  },
30
36
  options: {
31
37
  debug: true,
@@ -61,12 +67,11 @@ async function main() {
61
67
  // Print the article
62
68
  console.log("Article:", article);
63
69
 
64
- // Print usage costs
65
- console.log("\nUsage:\n+++++++++++++++++++++++++++++++++");
66
- console.log("Writer Agent:", JSON.stringify(writer.getAccumulatedCosts(), null, 2));
67
- console.log("Researcher Agent Last Usage:", JSON.stringify(researcher.getLastUsageCost(), null, 2));
68
- console.log("Researcher Agent Accumulated:", JSON.stringify(researcher.getAccumulatedCosts(), null, 2));
69
- console.log("Total Cost:", JSON.stringify(crew.getAggregatedCosts(), null, 2));
70
+ // Print metrics snapshots (new mechanism)
71
+ console.log("\nMetrics:\n+++++++++++++++++++++++++++++++++");
72
+ console.log("Writer Metrics:", JSON.stringify((writer as any).getMetrics?.(), null, 2));
73
+ console.log("Researcher Metrics:", JSON.stringify((researcher as any).getMetrics?.(), null, 2));
74
+ console.log("Crew Metrics:", JSON.stringify((crew as any).getCrewMetrics?.(), null, 2));
70
75
 
71
76
  // If you want to start fresh with cost tracking
72
77
  crew.resetCosts();
@@ -1,10 +1,11 @@
1
1
  import { AxCrew } from "../dist/index.js";
2
+ import type { AxCrewConfig } from "../dist/index.js";
2
3
 
3
4
  import dotenv from "dotenv";
4
5
  dotenv.config();
5
6
 
6
7
  // Define the crew configuration
7
- const config = {
8
+ const config: AxCrewConfig = {
8
9
  crew: [
9
10
  {
10
11
  name: "MapsAgent",
@@ -15,9 +16,11 @@ const config = {
15
16
  ai: {
16
17
  model: "claude-3-5-sonnet-latest",
17
18
  temperature: 0,
19
+ maxTokens: 1000,
20
+ stream: true
18
21
  },
19
22
  options: {
20
- debug: true,
23
+ debug: true
21
24
  },
22
25
  "mcpServers": {
23
26
  "google-maps": {
@@ -43,6 +46,7 @@ const config = {
43
46
  model: "gpt-4o-mini",
44
47
  maxTokens: 1000,
45
48
  temperature: 0,
49
+ stream: true
46
50
  },
47
51
  options: {
48
52
  debug: true,
@@ -59,10 +63,10 @@ const config = {
59
63
  ai: {
60
64
  model: "gemini-1.5-pro",
61
65
  temperature: 0,
66
+ stream: true
62
67
  },
63
68
  options: {
64
69
  debug: false,
65
- codeExecution: true,
66
70
  },
67
71
  },
68
72
  ],
@@ -89,11 +93,11 @@ const main = async (): Promise<void> => {
89
93
 
90
94
  console.log(`\nAnswer: ${JSON.stringify(managerResponse?.answer, null, 2)}`);
91
95
 
92
- // Print usage costs
93
- console.log("\nUsage:\n+++++++++++++++++++++++++++++++++");
94
- console.log("Manager Agent Cost in $:", JSON.stringify(managerAgent?.getAccumulatedCosts()?.totalCost, null, 2));
95
- console.log("Maps Agent Cost in $:", JSON.stringify(mapsAgent?.getAccumulatedCosts()?.totalCost, null, 2));
96
- console.log("Total Cost in $:", JSON.stringify(crew.getAggregatedCosts()?.totalCost, null, 2));
96
+ // Print metrics
97
+ console.log("\nMetrics:\n+++++++++++++++++++++++++++++++++");
98
+ console.log("Manager Agent Metrics:", JSON.stringify((managerAgent as any)?.getMetrics?.(), null, 2));
99
+ console.log("Maps Agent Metrics:", JSON.stringify((mapsAgent as any)?.getMetrics?.(), null, 2));
100
+ console.log("Crew Metrics:", JSON.stringify((crew as any)?.getCrewMetrics?.(), null, 2));
97
101
  };
98
102
 
99
103
  main()
@@ -0,0 +1,89 @@
1
+ import { AxCrew } from "../dist/index.js";
2
+
3
+ import dotenv from "dotenv";
4
+ dotenv.config();
5
+
6
+ // Define the crew configuration
7
+ const config = {
8
+ crew: [
9
+ {
10
+ name: "DeepResearchAgent",
11
+ description: "A specialized agent that performs deep research using perplexity",
12
+ signature: 'researchTopic:string "a topic of interest" -> result:string "The result of the research"',
13
+ provider: "openai",
14
+ providerKeyName: "OPENAI_API_KEY",
15
+ ai: {
16
+ model: "gpt-4.1",
17
+ temperature: 0.1,
18
+ },
19
+ options: {
20
+ stream: true,
21
+ debug: true,
22
+ },
23
+ mcpServers: {
24
+ "perplexity-mcp": {
25
+ "env": {
26
+ "PERPLEXITY_API_KEY": process.env.PERPLEXITY_API_KEY,
27
+ "PERPLEXITY_MODEL": "sonar-deep-research"
28
+ },
29
+ "command": "uvx",
30
+ "args": [
31
+ "perplexity-mcp"
32
+ ]
33
+ }
34
+ }
35
+ }
36
+ ]
37
+ };
38
+
39
+ // Create a new instance of AxCrew with the config
40
+ const crew = new AxCrew(config);
41
+
42
+ // Add the agents to the crew
43
+ await crew.addAllAgents();
44
+
45
+ // Get agent instances
46
+ const researchAgent = crew.agents?.get("DeepResearchAgent");
47
+
48
+ const userQuery: string = "You are a Research assistant. Your task is to analyse the company SpaceX, its origins, current team members, customer profile and any news worthy happenings. Prepare a detailed report.";
49
+
50
+ console.log(`\n\nUser Query: ${userQuery}`);
51
+
52
+ const main = async (): Promise<void> => {
53
+ // Start timing
54
+ const startTime = Date.now();
55
+ console.log(`\n🕐 Starting research task at: ${new Date(startTime).toLocaleTimeString()}`);
56
+
57
+ const response = await researchAgent?.streamingForward({
58
+ researchTopic: userQuery,
59
+ });
60
+
61
+ if (response) {
62
+ try {
63
+ for await (const chunk of response) {
64
+ if (chunk.delta && typeof chunk.delta === 'object' && 'results' in chunk.delta) {
65
+ process.stdout.write(chunk.delta.results);
66
+ }
67
+ }
68
+ console.log('\n');
69
+ } catch (error) {
70
+ console.error('Error processing stream:', error);
71
+ }
72
+ }
73
+
74
+ // End timing and calculate duration
75
+ const endTime = Date.now();
76
+ const duration = endTime - startTime;
77
+ const durationInSeconds = (duration / 1000).toFixed(2);
78
+ const durationInMinutes = (duration / 60000).toFixed(2);
79
+
80
+ console.log(`\n⏱️ Task completed at: ${new Date(endTime).toLocaleTimeString()}`);
81
+ console.log(`⏱️ Total time taken: ${duration}ms (${durationInSeconds}s / ${durationInMinutes}min)`);
82
+ };
83
+
84
+ main()
85
+ .then(() => {
86
+ console.log("Done");
87
+ process.exit(0);
88
+ })
89
+ .catch(console.error);
@@ -0,0 +1,74 @@
1
+ import { AxCrew } from "../dist/index.js";
2
+
3
+ import dotenv from "dotenv";
4
+ dotenv.config();
5
+
6
+ // Define the crew configuration
7
+ const config = {
8
+ crew: [
9
+ {
10
+ name: "XSearchAgent",
11
+ description: "A specialized agent that can search X (Twitter) posts for the latest news and updates about specific topics, people, or events. It can find trending posts, recent tweets, and real-time information from X platform.",
12
+ signature: 'searchQuery:string "a search query" -> result:string "the response to the user query citing relevant sources including X posts and other web sources"',
13
+ provider: "grok",
14
+ providerKeyName: "GROK_API_KEY",
15
+ ai: {
16
+ model: "grok-3-latest",
17
+ temperature: 0.1,
18
+ },
19
+ options: {
20
+ stream: true,
21
+ debug: true,
22
+ searchParameters: {
23
+ mode: 'on',
24
+ returnCitations: true,
25
+ maxSearchResults: 10,
26
+ sources: [
27
+ { type: 'x' },
28
+ { type: 'web' },
29
+ { type: 'news' }
30
+ ]
31
+ }
32
+ }
33
+ }
34
+ ]
35
+ };
36
+
37
+ // Create a new instance of AxCrew with the config
38
+ const crew = new AxCrew(config);
39
+
40
+ // Add the agents to the crew
41
+ await crew.addAllAgents();
42
+
43
+ // Get agent instances
44
+ const xSearchAgent = crew.agents?.get("XSearchAgent");
45
+
46
+ const userQuery: string = "when is the next ISRO launch date and what is the launch vehicle and payload";
47
+
48
+ console.log(`\n\nUser Query: ${userQuery}`);
49
+
50
+ const main = async (): Promise<void> => {
51
+ const response = await xSearchAgent?.streamingForward({
52
+ searchQuery: userQuery,
53
+ });
54
+
55
+ if (response) {
56
+ try {
57
+ for await (const chunk of response) {
58
+ if (chunk.delta && typeof chunk.delta === 'object' && 'results' in chunk.delta) {
59
+ process.stdout.write(chunk.delta.results);
60
+ }
61
+ }
62
+ console.log('\n');
63
+ } catch (error) {
64
+ console.error('Error processing stream:', error);
65
+ }
66
+ }
67
+ };
68
+
69
+ main()
70
+ .then(() => {
71
+ console.log("Done");
72
+ process.exit(0);
73
+ })
74
+ .catch(console.error);
@@ -1,7 +1,8 @@
1
1
  import { AxCrew } from "../dist/index.js";
2
+ import type { AxCrewConfig } from "../src/index.js";
2
3
 
3
4
  // Define the crew configuration
4
- const config = {
5
+ const config: AxCrewConfig = {
5
6
  crew: [
6
7
  {
7
8
  name: "MathAgent",
@@ -62,10 +63,11 @@ const main = async (): Promise<void> => {
62
63
 
63
64
  console.log(`\nAnswer: ${JSON.stringify(managerResponse.answer, null, 2)}`);
64
65
 
65
- // Print usage costs
66
- console.log("\nUsage:\n+++++++++++++++++++++++++++++++++");
67
- console.log("Manager Agent:", JSON.stringify(managerAgent.getAccumulatedCosts(), null, 2));
68
- console.log("Total Cost:", JSON.stringify(crew.getAggregatedCosts(), null, 2));
66
+ // Print metrics
67
+ console.log("\nMetrics:\n+++++++++++++++++++++++++++++++++");
68
+ console.log("Manager Agent Metrics:", JSON.stringify((managerAgent as any).getMetrics?.(), null, 2));
69
+ console.log("Math Agent Metrics:", JSON.stringify((mathAgent as any).getMetrics?.(), null, 2));
70
+ console.log("Crew Metrics:", JSON.stringify((crew as any).getCrewMetrics?.(), null, 2));
69
71
  }
70
72
  };
71
73
 
@@ -1,10 +1,11 @@
1
1
  import { AxCrew } from "../dist/index.js";
2
+ import type { AxCrewConfig } from "../src/index.js";
2
3
 
3
4
  import dotenv from "dotenv";
4
5
  dotenv.config();
5
6
 
6
7
  // Define the crew configuration
7
- const config = {
8
+ const config: AxCrewConfig = {
8
9
  crew: [
9
10
  {
10
11
  name: "ManagerAgent",
@@ -75,11 +76,11 @@ const main = async (): Promise<void> => {
75
76
  }
76
77
  }
77
78
 
78
- // Print usage costs
79
- console.log("\nUsage:\n+++++++++++++++++++++++++++++++++");
80
- console.log("Manager Agent Cost in $:", JSON.stringify(managerAgent?.getAccumulatedCosts()?.totalCost, null, 2));
81
- console.log("Math Agent Cost in $:", JSON.stringify(mathAgent?.getAccumulatedCosts()?.totalCost, null, 2));
82
- console.log("Total Cost in $:", JSON.stringify(crew.getAggregatedCosts()?.totalCost, null, 2));
79
+ // Print metrics
80
+ console.log("\nMetrics:\n+++++++++++++++++++++++++++++++++");
81
+ console.log("Manager Agent Metrics:", JSON.stringify((managerAgent as any)?.getMetrics?.(), null, 2));
82
+ console.log("Math Agent Metrics:", JSON.stringify((mathAgent as any)?.getMetrics?.(), null, 2));
83
+ console.log("Crew Metrics:", JSON.stringify((crew as any)?.getCrewMetrics?.(), null, 2));
83
84
  };
84
85
 
85
86
  main()
package/package.json CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "type": "module",
3
3
  "name": "@amitdeshmukh/ax-crew",
4
- "version": "3.11.1",
4
+ "version": "4.0.1",
5
5
  "description": "Build and launch a crew of AI agents with shared state. Built with axllm.dev",
6
6
  "main": "dist/index.js",
7
7
  "types": "dist/index.d.ts",
@@ -17,14 +17,19 @@
17
17
  "test:ui": "vitest --ui"
18
18
  },
19
19
  "dependencies": {
20
- "@ax-llm/ax": "^11.0.50",
20
+ "big.js": "^7.0.1",
21
21
  "decimal.js": "^10.5.0",
22
22
  "dotenv": "^16.4.5",
23
23
  "upgrade": "^1.1.0",
24
24
  "uuid": "^10.0.0"
25
25
  },
26
+ "peerDependencies": {
27
+ "@ax-llm/ax": "14.0.16",
28
+ "@ax-llm/ax-tools": "14.0.16"
29
+ },
26
30
  "devDependencies": {
27
31
  "@testing-library/jest-dom": "^6.6.3",
32
+ "@types/big.js": "^6.2.2",
28
33
  "@types/node": "^20.14.9",
29
34
  "@types/uuid": "^10.0.0",
30
35
  "@vitest/coverage-v8": "^3.0.9",