@traccia2/sdk 0.0.1 → 0.0.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -1,11 +1,12 @@
1
1
  # Traccia SDK for TypeScript
2
2
 
3
- Production-grade distributed tracing SDK for agent applications. Built with TypeScript and designed for high performance, reliability, and ease of integration.
3
+ Distributed tracing SDK for agent applications. Built with TypeScript and designed for high performance, reliability, and ease of integration.
4
4
 
5
5
  ## Features
6
6
 
7
7
  - **Distributed Tracing**: Create, propagate, and manage spans across service boundaries
8
8
  - **Automatic Instrumentation**: Optional auto-instrumentation for popular LLM libraries (OpenAI, Anthropic)
9
+ - **Framework Integrations**: Built-in support for LangChain (callbacks) and LangGraph (instrumentation)
9
10
  - **Span Processors**: Built-in processors for sampling, batching, token counting, and cost analysis
10
11
  - **Token & Cost Tracking**: Automatic token counting and cost calculation for LLM calls
11
12
  - **Flexible Exporters**: Export spans via HTTP, console, or custom exporters
@@ -327,6 +328,239 @@ await stopTracing();
327
328
 
328
329
  ## Advanced Usage
329
330
 
331
+ ### Framework Integrations
332
+
333
+ #### LangChain with Callback Handler
334
+
335
+ The SDK provides a `TraciaCallbackHandler` for seamless integration with LangChain:
336
+
337
+ ```typescript
338
+ import { startTracing, getTracer } from '@traccia/sdk';
339
+ import { TraciaCallbackHandler } from '@traccia/sdk/integrations/langchain-callback';
340
+ import { ChatOpenAI } from '@langchain/openai';
341
+ import { AgentExecutor, createOpenAIToolsAgent } from 'langchain/agents';
342
+
343
+ // Initialize tracing
344
+ await startTracing({
345
+ apiKey: 'your-api-key',
346
+ enableConsoleExporter: true,
347
+ });
348
+
349
+ // Create callback handler for tracing
350
+ const traciaHandler = new TraciaCallbackHandler();
351
+
352
+ // Use with LangChain models and agents
353
+ const model = new ChatOpenAI({
354
+ modelName: 'gpt-4',
355
+ callbacks: [traciaHandler],
356
+ });
357
+
358
+ const agent = await createOpenAIToolsAgent({
359
+ llmWithTools: model,
360
+ tools: [yourTools],
361
+ callbacks: [traciaHandler],
362
+ });
363
+
364
+ const executor = new AgentExecutor({
365
+ agent,
366
+ tools: [yourTools],
367
+ callbacks: [traciaHandler],
368
+ });
369
+
370
+ // Run with automatic tracing
371
+ const result = await executor.invoke({
372
+ input: 'Your prompt here',
373
+ });
374
+ ```
375
+
376
+ The `TraciaCallbackHandler` automatically traces:
377
+
378
+ - **LLM Calls**: Model name, token counts, latency
379
+ - **Agent Actions**: Tool selection and execution
380
+ - **Chain Steps**: Multi-step reasoning and intermediate results
381
+ - **Tool Usage**: Tool names, inputs, and outputs
382
+ - **Errors & Exceptions**: Automatic error recording
383
+
384
+ **Traced Metrics:**
385
+
386
+ ```typescript
387
+ // Automatically captured attributes:
388
+ {
389
+ 'llm.model': 'gpt-4',
390
+ 'llm.tokens.prompt': 150,
391
+ 'llm.tokens.completion': 75,
392
+ 'llm.tokens.total': 225,
393
+ 'agent.action': 'tool_use',
394
+ 'agent.tool': 'calculator',
395
+ 'agent.tool.input': '{"expression": "2 + 2"}',
396
+ 'agent.tool.output': '4',
397
+ 'chain.steps': 3,
398
+ 'error.type': 'ValidationError', // if applicable
399
+ 'error.message': 'Invalid input', // if applicable
400
+ }
401
+ ```
402
+
403
+ #### LangGraph with Instrumentation
404
+
405
+ For LangGraph, use the `instrumentLangGraph` utility:
406
+
407
+ ```typescript
408
+ import { startTracing } from '@traccia/sdk';
409
+ import { instrumentLangGraph, createTracedNode, createTracedConditional } from '@traccia/sdk/integrations/langgraph-instrumentation';
410
+ import { StateGraph } from '@langchain/langgraph';
411
+
412
+ // Initialize tracing
413
+ await startTracing({
414
+ apiKey: 'your-api-key',
415
+ enableConsoleExporter: true,
416
+ });
417
+
418
+ // Wrap your graph with instrumentation
419
+ const graph = new StateGraph(/* ... */);
420
+
421
+ // Option 1: Instrument entire graph
422
+ const instrumentedGraph = instrumentLangGraph(graph, {
423
+ traceGraphExecution: true,
424
+ traceNodeExecution: true,
425
+ captureGraphState: true,
426
+ });
427
+
428
+ // Option 2: Wrap individual nodes
429
+ const processNode = createTracedNode(
430
+ 'process-step',
431
+ async (state) => {
432
+ // Node logic
433
+ return { result: 'done' };
434
+ }
435
+ );
436
+
437
+ graph.addNode('process', processNode);
438
+
439
+ // Option 3: Trace conditional logic
440
+ const router = createTracedConditional(
441
+ 'route-decision',
442
+ (state) => {
443
+ return state.requiresApproval ? 'approval' : 'processing';
444
+ }
445
+ );
446
+
447
+ graph.addConditionalEdges('decide', router);
448
+ ```
449
+
450
+ **Captured Graph Metrics:**
451
+
452
+ ```typescript
453
+ // Automatically traced:
454
+ {
455
+ 'graph.name': 'my-agent-graph',
456
+ 'graph.step': 2,
457
+ 'node.name': 'process-step',
458
+ 'node.type': 'regular|start|end',
459
+ 'node.duration_ms': 250,
460
+ 'edge.from': 'start',
461
+ 'edge.to': 'process',
462
+ 'conditional.decision': 'approval',
463
+ 'graph.state': { /* full state if enabled */ },
464
+ 'error.type': 'NodeExecutionError', // if applicable
465
+ }
466
+ ```
467
+
468
+ #### Ollama Integration
469
+
470
+ Run local LLMs with Ollama and automatic tracing:
471
+
472
+ ```typescript
473
+ import { startTracing } from '@traccia/sdk';
474
+ import { createOllamaChatbot, getOllamaSetupInstructions } from '@traccia/sdk/integrations';
475
+
476
+ // Initialize tracing
477
+ await startTracing({
478
+ enableConsoleExporter: true,
479
+ });
480
+
481
+ // Create a chatbot using local Ollama model
482
+ const chatbot = await createOllamaChatbot({
483
+ model: 'mistral', // Download with: ollama pull mistral
484
+ baseUrl: 'http://localhost:11434', // Default Ollama port
485
+ systemPrompt: 'You are a helpful assistant.',
486
+ temperature: 0.7,
487
+ });
488
+
489
+ // Use the chatbot - fully traced!
490
+ const response = await chatbot('What is machine learning?');
491
+ console.log(response);
492
+ ```
493
+
494
+ **Setup Instructions:**
495
+
496
+ ```typescript
497
+ import { getOllamaSetupInstructions } from '@traccia/sdk/integrations';
498
+
499
+ console.log(getOllamaSetupInstructions());
500
+ // Prints installation and setup steps
501
+ ```
502
+
503
+ **Quick Start:**
504
+
505
+ 1. Install Ollama: https://ollama.ai/download
506
+ 2. Start Ollama: `ollama serve`
507
+ 3. Pull a model: `ollama pull mistral`
508
+ 4. Use in code:
509
+
510
+ ```typescript
511
+ import { createOllamaChatbot } from '@traccia/sdk/integrations';
512
+
513
+ const chatbot = await createOllamaChatbot({
514
+ model: 'mistral',
515
+ });
516
+
517
+ const response = await chatbot('Hello!');
518
+ // Automatically traced!
519
+ ```
520
+
521
+ **Popular Ollama Models:**
522
+
523
+ ```typescript
524
+ import { POPULAR_OLLAMA_MODELS } from '@traccia/sdk/integrations';
525
+
526
+ // mistral - Fast 7B model (recommended for speed)
527
+ // neural-chat - Optimized for conversations
528
+ // llama2 - Versatile 7B model
529
+ // orca-mini - Small 3B model (fastest)
530
+ // dolphin-mixtral - High quality but larger
531
+ // opencodeup - Specialized for code
532
+
533
+ for (const model of POPULAR_OLLAMA_MODELS) {
534
+ console.log(`${model.name}: ${model.description}`);
535
+ console.log(`Size: ${model.size}`);
536
+ console.log(`Install: ${model.command}`);
537
+ }
538
+ ```
539
+
540
+ **Streaming Responses:**
541
+
542
+ For models that support streaming, stream responses as they're generated:
543
+
544
+ ```typescript
545
+ import { createOllamaStreamingChatbot } from '@traccia/sdk/integrations';
546
+
547
+ const chatbot = await createOllamaStreamingChatbot({
548
+ model: 'mistral',
549
+ onChunk: (chunk) => process.stdout.write(chunk), // Print as it arrives
550
+ });
551
+
552
+ await chatbot('Write a story about a robot');
553
+ // Output streams in real-time!
554
+ ```
555
+
556
+ **Automatic Tracing Features:**
557
+
558
+ - Model name and configuration tracking
559
+ - Input/output length tracking
560
+ - Latency measurement
561
+ - Error recording
562
+ - Streaming span management
563
+
330
564
  ### Custom Exporters
331
565
 
332
566
  Implement the `ISpanExporter` interface:
@@ -0,0 +1,108 @@
1
+ /**
2
+ * Automatic LangChain instrumentation - simple one-line setup.
3
+ *
4
+ * Instead of manually passing callbacks to every component, use these
5
+ * convenience functions for automatic instrumentation with zero boilerplate.
6
+ */
7
+ import { TraciaCallbackHandler } from './langchain-callback';
8
+ /**
9
+ * Get or create the global Traccia callback handler.
10
+ *
11
+ * @example
12
+ * // Instead of:
13
+ * const handler = new TraciaCallbackHandler();
14
+ * const model = new ChatOpenAI({ callbacks: [handler] });
15
+ *
16
+ * // Just do:
17
+ * const model = new ChatOpenAI({ callbacks: [getTraciaHandler()] });
18
+ */
19
+ export declare function getTraciaHandler(): TraciaCallbackHandler;
20
+ /**
21
+ * Wrap any LangChain model/chain/agent with automatic tracing.
22
+ *
23
+ * @example
24
+ * const model = new ChatOpenAI({ modelName: 'gpt-4' });
25
+ * const tracedModel = withTracing(model);
26
+ *
27
+ * const response = await tracedModel.invoke({ input: 'Hello' });
28
+ * // Automatically traced!
29
+ */
30
+ export declare function withTracing<T extends any>(component: T): T;
31
+ /**
32
+ * Create a traced ChatOpenAI model with one line.
33
+ *
34
+ * @example
35
+ * const model = createTracedOpenAI({ modelName: 'gpt-4' });
36
+ * const response = await model.invoke({ input: 'Hello' });
37
+ */
38
+ export declare function createTracedOpenAI(config: any): Promise<any>;
39
+ /**
40
+ * Create a traced agent executor with one line.
41
+ *
42
+ * @example
43
+ * const executor = createTracedAgentExecutor({
44
+ * agent,
45
+ * tools,
46
+ * agentExecutorOptions: { maxIterations: 10 }
47
+ * });
48
+ *
49
+ * const result = await executor.invoke({ input: 'What time is it?' });
50
+ */
51
+ export declare function createTracedAgentExecutor(options: {
52
+ agent: any;
53
+ tools: any[];
54
+ agentExecutorOptions?: Record<string, any>;
55
+ }): Promise<any>;
56
+ /**
57
+ * Create a traced LLMChain with one line.
58
+ *
59
+ * @example
60
+ * const chain = createTracedLLMChain({
61
+ * llm: new ChatOpenAI(),
62
+ * prompt: chatPrompt
63
+ * });
64
+ *
65
+ * const result = await chain.invoke({ question: 'Hello?' });
66
+ */
67
+ export declare function createTracedLLMChain(options: {
68
+ llm: any;
69
+ prompt: any;
70
+ }): Promise<any>;
71
+ /**
72
+ * Decorator for methods that should be traced.
73
+ *
74
+ * @example
75
+ * class MyAgent {
76
+ * @traced('agent-process')
77
+ * async process(input: string) {
78
+ * return await this.llm.invoke({ input });
79
+ * }
80
+ * }
81
+ *
82
+ * const agent = new MyAgent();
83
+ * const result = await agent.process('Hello'); // Automatically traced!
84
+ */
85
+ export declare function traced(spanName: string): (_target: any, propertyKey: string, descriptor: PropertyDescriptor) => PropertyDescriptor;
86
+ /**
87
+ * Simple configuration helper for common LangChain setup patterns.
88
+ *
89
+ * @example
90
+ * const { model, executor } = await setupLangChainWithTracing({
91
+ * modelName: 'gpt-4',
92
+ * tools: [weatherTool, calculatorTool],
93
+ * systemPrompt: 'You are a helpful assistant.'
94
+ * });
95
+ *
96
+ * const result = await executor.invoke({ input: 'What is the weather?' });
97
+ */
98
+ export declare function setupLangChainWithTracing(options: {
99
+ modelName?: string;
100
+ modelConfig?: Record<string, any>;
101
+ tools?: any[];
102
+ systemPrompt?: string;
103
+ }): Promise<{
104
+ model: any;
105
+ executor: any;
106
+ handler: TraciaCallbackHandler;
107
+ }>;
108
+ //# sourceMappingURL=auto-langchain.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"auto-langchain.d.ts","sourceRoot":"","sources":["../../src/integrations/auto-langchain.ts"],"names":[],"mappings":"AAAA;;;;;GAKG;AAEH,OAAO,EAAE,qBAAqB,EAAE,MAAM,sBAAsB,CAAC;AAO7D;;;;;;;;;;GAUG;AACH,wBAAgB,gBAAgB,IAAI,qBAAqB,CAKxD;AAED;;;;;;;;;GASG;AACH,wBAAgB,WAAW,CAAC,CAAC,SAAS,GAAG,EAAE,SAAS,EAAE,CAAC,GAAG,CAAC,CAkB1D;AAED;;;;;;GAMG;AACH,wBAAsB,kBAAkB,CAAC,MAAM,EAAE,GAAG,GAAG,OAAO,CAAC,GAAG,CAAC,CAWlE;AAED;;;;;;;;;;;GAWG;AACH,wBAAsB,yBAAyB,CAAC,OAAO,EAAE;IACvD,KAAK,EAAE,GAAG,CAAC;IACX,KAAK,EAAE,GAAG,EAAE,CAAC;IACb,oBAAoB,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,GAAG,CAAC,CAAC;CAC5C,GAAG,OAAO,CAAC,GAAG,CAAC,CAmBf;AAED;;;;;;;;;;GAUG;AACH,wBAAsB,oBAAoB,CAAC,OAAO,EAAE;IAClD,GAAG,EAAE,GAAG,CAAC;IACT,MAAM,EAAE,GAAG,CAAC;CACb,GAAG,OAAO,CAAC,GAAG,CAAC,CAkBf;AAED;;;;;;;;;;;;;GAaG;AACH,wBAAgB,MAAM,CAAC,QAAQ,EAAE,MAAM,IAEnC,SAAS,GAAG,EACZ,aAAa,MAAM,EACnB,YAAY,kBAAkB,wBA0BjC;AAED;;;;;;;;;;;GAWG;AACH,wBAAsB,yBAAyB,CAAC,OAAO,EAAE;IACvD,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB,WAAW,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,GAAG,CAAC,CAAC;IAClC,KAAK,CAAC,EAAE,GAAG,EAAE,CAAC;IACd,YAAY,CAAC,EAAE,MAAM,CAAC;CACvB,GAAG,OAAO,CAAC;IACV,KAAK,EAAE,GAAG,CAAC;IACX,QAAQ,EAAE,GAAG,CAAC;IACd,OAAO,EAAE,qBAAqB,CAAC;CAChC,CAAC,CA8DD"}
@@ -0,0 +1,276 @@
1
+ "use strict";
2
+ /**
3
+ * Automatic LangChain instrumentation - simple one-line setup.
4
+ *
5
+ * Instead of manually passing callbacks to every component, use these
6
+ * convenience functions for automatic instrumentation with zero boilerplate.
7
+ */
8
+ var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
9
+ if (k2 === undefined) k2 = k;
10
+ var desc = Object.getOwnPropertyDescriptor(m, k);
11
+ if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
12
+ desc = { enumerable: true, get: function() { return m[k]; } };
13
+ }
14
+ Object.defineProperty(o, k2, desc);
15
+ }) : (function(o, m, k, k2) {
16
+ if (k2 === undefined) k2 = k;
17
+ o[k2] = m[k];
18
+ }));
19
+ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
20
+ Object.defineProperty(o, "default", { enumerable: true, value: v });
21
+ }) : function(o, v) {
22
+ o["default"] = v;
23
+ });
24
+ var __importStar = (this && this.__importStar) || (function () {
25
+ var ownKeys = function(o) {
26
+ ownKeys = Object.getOwnPropertyNames || function (o) {
27
+ var ar = [];
28
+ for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
29
+ return ar;
30
+ };
31
+ return ownKeys(o);
32
+ };
33
+ return function (mod) {
34
+ if (mod && mod.__esModule) return mod;
35
+ var result = {};
36
+ if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
37
+ __setModuleDefault(result, mod);
38
+ return result;
39
+ };
40
+ })();
41
+ Object.defineProperty(exports, "__esModule", { value: true });
42
+ exports.getTraciaHandler = getTraciaHandler;
43
+ exports.withTracing = withTracing;
44
+ exports.createTracedOpenAI = createTracedOpenAI;
45
+ exports.createTracedAgentExecutor = createTracedAgentExecutor;
46
+ exports.createTracedLLMChain = createTracedLLMChain;
47
+ exports.traced = traced;
48
+ exports.setupLangChainWithTracing = setupLangChainWithTracing;
49
+ const langchain_callback_1 = require("./langchain-callback");
50
+ /**
51
+ * Global handler instance to avoid creating multiple handlers
52
+ */
53
+ let globalTraciaHandler = null;
54
+ /**
55
+ * Get or create the global Traccia callback handler.
56
+ *
57
+ * @example
58
+ * // Instead of:
59
+ * const handler = new TraciaCallbackHandler();
60
+ * const model = new ChatOpenAI({ callbacks: [handler] });
61
+ *
62
+ * // Just do:
63
+ * const model = new ChatOpenAI({ callbacks: [getTraciaHandler()] });
64
+ */
65
+ function getTraciaHandler() {
66
+ if (!globalTraciaHandler) {
67
+ globalTraciaHandler = new langchain_callback_1.TraciaCallbackHandler();
68
+ }
69
+ return globalTraciaHandler;
70
+ }
71
+ /**
72
+ * Wrap any LangChain model/chain/agent with automatic tracing.
73
+ *
74
+ * @example
75
+ * const model = new ChatOpenAI({ modelName: 'gpt-4' });
76
+ * const tracedModel = withTracing(model);
77
+ *
78
+ * const response = await tracedModel.invoke({ input: 'Hello' });
79
+ * // Automatically traced!
80
+ */
81
+ function withTracing(component) {
82
+ const handler = getTraciaHandler();
83
+ // For models and chains, add the handler to callbacks
84
+ if (component && typeof component === 'object') {
85
+ if ('callbacks' in component) {
86
+ // If callbacks exist, add our handler
87
+ const existing = component.callbacks || [];
88
+ component.callbacks = Array.isArray(existing)
89
+ ? [...existing, handler]
90
+ : [existing, handler];
91
+ }
92
+ else {
93
+ // Otherwise create callbacks array
94
+ component.callbacks = [handler];
95
+ }
96
+ }
97
+ return component;
98
+ }
99
+ /**
100
+ * Create a traced ChatOpenAI model with one line.
101
+ *
102
+ * @example
103
+ * const model = createTracedOpenAI({ modelName: 'gpt-4' });
104
+ * const response = await model.invoke({ input: 'Hello' });
105
+ */
106
+ async function createTracedOpenAI(config) {
107
+ try {
108
+ // eslint-disable-next-line @typescript-eslint/no-var-requires
109
+ const langchainOpenai = require('@langchain/openai');
110
+ const model = new langchainOpenai.ChatOpenAI(config);
111
+ return withTracing(model);
112
+ }
113
+ catch (error) {
114
+ throw new Error('Failed to create traced ChatOpenAI. Make sure @langchain/openai is installed.');
115
+ }
116
+ }
117
+ /**
118
+ * Create a traced agent executor with one line.
119
+ *
120
+ * @example
121
+ * const executor = createTracedAgentExecutor({
122
+ * agent,
123
+ * tools,
124
+ * agentExecutorOptions: { maxIterations: 10 }
125
+ * });
126
+ *
127
+ * const result = await executor.invoke({ input: 'What time is it?' });
128
+ */
129
+ async function createTracedAgentExecutor(options) {
130
+ try {
131
+ // eslint-disable-next-line @typescript-eslint/no-var-requires
132
+ const langchainAgents = require('langchain/agents');
133
+ const { agent, tools, agentExecutorOptions = {} } = options;
134
+ const executor = langchainAgents.AgentExecutor.fromAgentAndTools({
135
+ agent,
136
+ tools,
137
+ ...agentExecutorOptions,
138
+ callbacks: [getTraciaHandler(), ...(agentExecutorOptions.callbacks || [])],
139
+ });
140
+ return executor;
141
+ }
142
+ catch (error) {
143
+ throw new Error('Failed to create traced AgentExecutor. Make sure langchain/agents is installed.');
144
+ }
145
+ }
146
+ /**
147
+ * Create a traced LLMChain with one line.
148
+ *
149
+ * @example
150
+ * const chain = createTracedLLMChain({
151
+ * llm: new ChatOpenAI(),
152
+ * prompt: chatPrompt
153
+ * });
154
+ *
155
+ * const result = await chain.invoke({ question: 'Hello?' });
156
+ */
157
+ async function createTracedLLMChain(options) {
158
+ try {
159
+ // eslint-disable-next-line @typescript-eslint/no-var-requires
160
+ const langchainChains = require('langchain/chains');
161
+ const { llm, prompt } = options;
162
+ const chain = new langchainChains.LLMChain({
163
+ llm: withTracing(llm),
164
+ prompt,
165
+ callbacks: [getTraciaHandler()],
166
+ });
167
+ return chain;
168
+ }
169
+ catch (error) {
170
+ throw new Error('Failed to create traced LLMChain. Make sure langchain/chains is installed.');
171
+ }
172
+ }
173
+ /**
174
+ * Decorator for methods that should be traced.
175
+ *
176
+ * @example
177
+ * class MyAgent {
178
+ * @traced('agent-process')
179
+ * async process(input: string) {
180
+ * return await this.llm.invoke({ input });
181
+ * }
182
+ * }
183
+ *
184
+ * const agent = new MyAgent();
185
+ * const result = await agent.process('Hello'); // Automatically traced!
186
+ */
187
+ function traced(spanName) {
188
+ return function (_target, propertyKey, descriptor) {
189
+ const originalMethod = descriptor.value;
190
+ descriptor.value = async function (...args) {
191
+ const { getTracer } = await Promise.resolve().then(() => __importStar(require('../auto')));
192
+ const tracer = getTracer('decorated-method');
193
+ return tracer.startActiveSpan(spanName, async (span) => {
194
+ try {
195
+ span.setAttribute('method', propertyKey);
196
+ span.setAttribute('args_count', args.length);
197
+ const result = await originalMethod.apply(this, args);
198
+ span.setAttribute('success', true);
199
+ return result;
200
+ }
201
+ catch (error) {
202
+ if (error instanceof Error) {
203
+ span.recordException(error);
204
+ }
205
+ throw error;
206
+ }
207
+ });
208
+ };
209
+ return descriptor;
210
+ };
211
+ }
212
+ /**
213
+ * Simple configuration helper for common LangChain setup patterns.
214
+ *
215
+ * @example
216
+ * const { model, executor } = await setupLangChainWithTracing({
217
+ * modelName: 'gpt-4',
218
+ * tools: [weatherTool, calculatorTool],
219
+ * systemPrompt: 'You are a helpful assistant.'
220
+ * });
221
+ *
222
+ * const result = await executor.invoke({ input: 'What is the weather?' });
223
+ */
224
+ async function setupLangChainWithTracing(options) {
225
+ try {
226
+ const { modelName = 'gpt-4', modelConfig = {}, tools = [], systemPrompt, } = options;
227
+ // Create traced model
228
+ const model = await createTracedOpenAI({
229
+ modelName,
230
+ ...modelConfig,
231
+ });
232
+ // Create agent if tools provided
233
+ let executor = null;
234
+ if (tools.length > 0) {
235
+ try {
236
+ // Try to create agent with tools
237
+ const langchainAgents = require('langchain/agents');
238
+ const langchainCore = require('@langchain/core/prompts');
239
+ // Create prompt
240
+ const prompt = langchainCore.ChatPromptTemplate.fromMessages([
241
+ ...(systemPrompt
242
+ ? [['system', systemPrompt]]
243
+ : [['system', 'You are a helpful assistant.']]),
244
+ ['human', '{input}'],
245
+ new langchainCore.MessagesPlaceholder('agent_scratchpad'),
246
+ ]);
247
+ // Create agent
248
+ const agent = await langchainAgents.createOpenAIToolsAgent({
249
+ llmWithTools: model,
250
+ tools,
251
+ prompt,
252
+ callbacks: [getTraciaHandler()],
253
+ });
254
+ // Create executor
255
+ executor = await createTracedAgentExecutor({
256
+ agent,
257
+ tools,
258
+ });
259
+ }
260
+ catch (error) {
261
+ // If agent creation fails, just return model without executor
262
+ console.warn('Could not create agent executor:', error.message);
263
+ }
264
+ }
265
+ return {
266
+ model,
267
+ executor,
268
+ handler: getTraciaHandler(),
269
+ };
270
+ }
271
+ catch (error) {
272
+ const err = error;
273
+ throw new Error(`Failed to setup LangChain with tracing: ${err.message}`);
274
+ }
275
+ }
276
+ //# sourceMappingURL=auto-langchain.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"auto-langchain.js","sourceRoot":"","sources":["../../src/integrations/auto-langchain.ts"],"names":[],"mappings":";AAAA;;;;;GAKG;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAoBH,4CAKC;AAYD,kCAkBC;AASD,gDAWC;AAcD,8DAuBC;AAaD,oDAqBC;AAgBD,wBA8BC;AAcD,8DAuEC;AAnRD,6DAA6D;AAE7D;;GAEG;AACH,IAAI,mBAAmB,GAAiC,IAAI,CAAC;AAE7D;;;;;;;;;;GAUG;AACH,SAAgB,gBAAgB;IAC9B,IAAI,CAAC,mBAAmB,EAAE,CAAC;QACzB,mBAAmB,GAAG,IAAI,0CAAqB,EAAE,CAAC;IACpD,CAAC;IACD,OAAO,mBAAmB,CAAC;AAC7B,CAAC;AAED;;;;;;;;;GASG;AACH,SAAgB,WAAW,CAAgB,SAAY;IACrD,MAAM,OAAO,GAAG,gBAAgB,EAAE,CAAC;IAEnC,sDAAsD;IACtD,IAAI,SAAS,IAAI,OAAO,SAAS,KAAK,QAAQ,EAAE,CAAC;QAC/C,IAAI,WAAW,IAAI,SAAS,EAAE,CAAC;YAC7B,sCAAsC;YACtC,MAAM,QAAQ,GAAI,SAAiB,CAAC,SAAS,IAAI,EAAE,CAAC;YACnD,SAAiB,CAAC,SAAS,GAAG,KAAK,CAAC,OAAO,CAAC,QAAQ,CAAC;gBACpD,CAAC,CAAC,CAAC,GAAG,QAAQ,EAAE,OAAO,CAAC;gBACxB,CAAC,CAAC,CAAC,QAAQ,EAAE,OAAO,CAAC,CAAC;QAC1B,CAAC;aAAM,CAAC;YACN,mCAAmC;YAClC,SAAiB,CAAC,SAAS,GAAG,CAAC,OAAO,CAAC,CAAC;QAC3C,CAAC;IACH,CAAC;IAED,OAAO,SAAS,CAAC;AACnB,CAAC;AAED;;;;;;GAMG;AACI,KAAK,UAAU,kBAAkB,CAAC,MAAW;IAClD,IAAI,CAAC;QACH,8DAA8D;QAC9D,MAAM,eAAe,GAAG,OAAO,CAAC,mBAAmB,CAAC,CAAC;QACrD,MAAM,KAAK,GAAG,IAAI,eAAe,CAAC,UAAU,CAAC,MAAM,CAAC,CAAC;QACrD,OAAO,WAAW,CAAC,KAAK,CAAC,CAAC;IAC5B,CAAC;IAAC,OAAO,KAAK,EAAE,CAAC;QACf,MAAM,IAAI,KAAK,CACb,+EAA+E,CAChF,CAAC;IACJ,CAAC;AACH,CAAC;AAED;;;;;;;;;;;GAWG;AACI,KAAK,UAAU,yBAAyB,CAAC,OAI/C;IACC,IAAI,CAAC;QACH,8DAA8D;QAC9D,MAAM,eAAe,GAAG,OAAO,CAAC,kBAAkB,CAAC,CAAC;QACpD,MAAM,EAAE,KAAK,EAAE,KAAK,EAAE,oBAAoB,GAAG,EAAE,EAAE,GAAG,OAAO,CAAC;QAE5D,MAAM,QAAQ,GAAG,eAAe,CAAC,aAAa,CAAC,iBAAiB,CAAC;YAC/D,KAAK;YACL,KAAK;YACL,GAAG,oBAAoB;YACvB,SAAS,EAAE,CAAC,gBAAgB,EAAE,EAAE,GAAG,CAAC,oBAAoB,CAAC,SAAS,IAAI,EAAE,CAAC,CAAC;SAC3E,CAAC,CAAC;QAEH,OAAO,QAAQ,CAAC;IAClB,CAAC;IAAC,OAAO,KAAK,EAAE,CAAC;QACf,MAAM,IAAI,KAAK,CACb,iFAAiF,CAClF,CAAC;IACJ,CAAC;AACH,CAAC;AAED;;;;;;;;;;GAUG;AACI,KAAK,UAAU,oBAAoB,CAAC,OAG1C;IACC,IAAI,CAAC;QACH,8DAA8D;QAC9D,MAAM,eAAe,GAAG,OAAO,CAAC,kBAAkB,CAAC,CAAC;QACpD,MAAM,EAAE,GAAG,EAAE,MAAM,EAAE,GAAG,OAAO,CAAC;QAEhC,MAAM,KAAK,GAAG,IAAI,eAAe,CAAC,QAAQ,CAAC;YACzC,GAAG,EAAE,WAAW,CAAC,GAAG,CAAC;YACrB,MAAM;YACN,SAAS,EAAE,CAAC,gBAAgB,EAAE,CAAC;SAChC,CAAC,CAAC;QAEH,OAAO,KAAK,CAAC;IACf,CAAC;IAAC,OAAO,KAAK,EAAE,CAAC;QACf,MAAM,IAAI,KAAK,CACb,4EAA4E,CAC7E,CAAC;IACJ,CAAC;AACH,CAAC;AAED;;;;;;;;;;;;;GAaG;AACH,SAAgB,MAAM,CAAC,QAAgB;IACrC,OAAO,UACL,OAAY,EACZ,WAAmB,EACnB,UAA8B;QAE9B,MAAM,cAAc,GAAG,UAAU,CAAC,KAAK,CAAC;QAExC,UAAU,CAAC,KAAK,GAAG,KAAK,WAAW,GAAG,IAAW;YAC/C,MAAM,EAAE,SAAS,EAAE,GAAG,wDAAa,SAAS,GAAC,CAAC;YAC9C,MAAM,MAAM,GAAG,SAAS,CAAC,kBAAkB,CAAC,CAAC;YAE7C,OAAO,MAAM,CAAC,eAAe,CAAC,QAAQ,EAAE,KAAK,EAAE,IAAI,EAAE,EAAE;gBACrD,IAAI,CAAC;oBACH,IAAI,CAAC,YAAY,CAAC,QAAQ,EAAE,WAAW,CAAC,CAAC;oBACzC,IAAI,CAAC,YAAY,CAAC,YAAY,EAAE,IAAI,CAAC,MAAM,CAAC,CAAC;oBAC7C,MAAM,MAAM,GAAG,MAAM,cAAc,CAAC,KAAK,CAAC,IAAI,EAAE,IAAI,CAAC,CAAC;oBACtD,IAAI,CAAC,YAAY,CAAC,SAAS,EAAE,IAAI,CAAC,CAAC;oBACnC,OAAO,MAAM,CAAC;gBAChB,CAAC;gBAAC,OAAO,KAAK,EAAE,CAAC;oBACf,IAAI,KAAK,YAAY,KAAK,EAAE,CAAC;wBAC3B,IAAI,CAAC,eAAe,CAAC,KAAK,CAAC,CAAC;oBAC9B,CAAC;oBACD,MAAM,KAAK,CAAC;gBACd,CAAC;YACH,CAAC,CAAC,CAAC;QACL,CAAC,CAAC;QAEF,OAAO,UAAU,CAAC;IACpB,CAAC,CAAC;AACJ,CAAC;AAED;;;;;;;;;;;GAWG;AACI,KAAK,UAAU,yBAAyB,CAAC,OAK/C;IAKC,IAAI,CAAC;QACH,MAAM,EACJ,SAAS,GAAG,OAAO,EACnB,WAAW,GAAG,EAAE,EAChB,KAAK,GAAG,EAAE,EACV,YAAY,GACb,GAAG,OAAO,CAAC;QAEZ,sBAAsB;QACtB,MAAM,KAAK,GAAG,MAAM,kBAAkB,CAAC;YACrC,SAAS;YACT,GAAG,WAAW;SACf,CAAC,CAAC;QAEH,iCAAiC;QACjC,IAAI,QAAQ,GAAG,IAAI,CAAC;QACpB,IAAI,KAAK,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;YACrB,IAAI,CAAC;gBACH,iCAAiC;gBACjC,MAAM,eAAe,GAAG,OAAO,CAAC,kBAAkB,CAAC,CAAC;gBACpD,MAAM,aAAa,GAAG,OAAO,CAAC,yBAAyB,CAAC,CAAC;gBAEzD,gBAAgB;gBAChB,MAAM,MAAM,GAAG,aAAa,CAAC,kBAAkB,CAAC,YAAY,CAAC;oBAC3D,GAAG,CAAC,YAAY;wBACd,CAAC,CAAC,CAAC,CAAC,QAAQ,EAAE,YAAY,CAAC,CAAC;wBAC5B,CAAC,CAAC,CAAC,CAAC,QAAQ,EAAE,8BAA8B,CAAC,CAAC,CAAC;oBACjD,CAAC,OAAO,EAAE,SAAS,CAAC;oBACpB,IAAI,aAAa,CAAC,mBAAmB,CAAC,kBAAkB,CAAC;iBAC1D,CAAC,CAAC;gBAEH,eAAe;gBACf,MAAM,KAAK,GAAG,MAAM,eAAe,CAAC,sBAAsB,CAAC;oBACzD,YAAY,EAAE,KAAK;oBACnB,KAAK;oBACL,MAAM;oBACN,SAAS,EAAE,CAAC,gBAAgB,EAAE,CAAC;iBAChC,CAAC,CAAC;gBAEH,kBAAkB;gBAClB,QAAQ,GAAG,MAAM,yBAAyB,CAAC;oBACzC,KAAK;oBACL,KAAK;iBACN,CAAC,CAAC;YACL,CAAC;YAAC,OAAO,KAAK,EAAE,CAAC;gBACf,8DAA8D;gBAC9D,OAAO,CAAC,IAAI,CAAC,kCAAkC,EAAG,KAAe,CAAC,OAAO,CAAC,CAAC;YAC7E,CAAC;QACH,CAAC;QAED,OAAO;YACL,KAAK;YACL,QAAQ;YACR,OAAO,EAAE,gBAAgB,EAAE;SAC5B,CAAC;IACJ,CAAC;IAAC,OAAO,KAAK,EAAE,CAAC;QACf,MAAM,GAAG,GAAG,KAAc,CAAC;QAC3B,MAAM,IAAI,KAAK,CACb,2CAA2C,GAAG,CAAC,OAAO,EAAE,CACzD,CAAC;IACJ,CAAC;AACH,CAAC"}