@riotprompt/riotprompt 0.0.12 → 0.0.13
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +1 -1
- package/README.md +121 -21
- package/dist/builder.js +3 -0
- package/dist/builder.js.map +1 -1
- package/dist/cli.cjs +1519 -0
- package/dist/cli.d.ts +8 -0
- package/dist/config.d.ts +7 -0
- package/dist/conversation-logger.js +6 -1
- package/dist/conversation-logger.js.map +1 -1
- package/dist/execution/anthropic.d.ts +5 -0
- package/dist/execution/anthropic.js +46 -0
- package/dist/execution/anthropic.js.map +1 -0
- package/dist/execution/gemini.d.ts +5 -0
- package/dist/execution/gemini.js +78 -0
- package/dist/execution/gemini.js.map +1 -0
- package/dist/execution/index.d.ts +10 -0
- package/dist/execution/index.js +53 -0
- package/dist/execution/index.js.map +1 -0
- package/dist/execution/openai.d.ts +5 -0
- package/dist/execution/openai.js +44 -0
- package/dist/execution/openai.js.map +1 -0
- package/dist/execution/provider.d.ts +18 -0
- package/dist/loader.js +3 -0
- package/dist/loader.js.map +1 -1
- package/dist/model-config.d.ts +1 -0
- package/dist/model-config.js +19 -18
- package/dist/model-config.js.map +1 -1
- package/dist/override.js +3 -0
- package/dist/override.js.map +1 -1
- package/dist/recipes.js +3 -0
- package/dist/recipes.js.map +1 -1
- package/dist/riotprompt.cjs +612 -77
- package/dist/riotprompt.cjs.map +1 -1
- package/dist/riotprompt.d.ts +3 -0
- package/dist/riotprompt.js +6 -0
- package/dist/riotprompt.js.map +1 -1
- package/dist/serializer.d.ts +5 -0
- package/dist/serializer.js +220 -0
- package/dist/serializer.js.map +1 -0
- package/dist/writer.d.ts +2 -0
- package/dist/writer.js +91 -0
- package/dist/writer.js.map +1 -0
- package/guide/architecture.md +51 -0
- package/guide/configuration.md +51 -0
- package/guide/development.md +62 -0
- package/guide/index.md +55 -0
- package/guide/usage.md +99 -0
- package/package.json +13 -2
- package/vite.config.cli.ts +49 -0
- package/BUG-ANALYSIS.md +0 -523
- package/CODE-REVIEW-SUMMARY.md +0 -330
- package/FIXES-APPLIED.md +0 -437
package/dist/cli.d.ts
ADDED
|
@@ -0,0 +1,8 @@
|
|
|
1
|
+
import * as RiotPrompt from './riotprompt';
|
|
2
|
+
export declare function isDirectory(path: string): Promise<boolean>;
|
|
3
|
+
export declare function fileExists(path: string): Promise<boolean>;
|
|
4
|
+
export declare function loadPromptFromDirectory(absolutePromptPath: string): Promise<RiotPrompt.Prompt>;
|
|
5
|
+
export declare function createAction(promptName: string, options: any): Promise<void>;
|
|
6
|
+
export declare function processAction(promptPath: string, options: any): Promise<void>;
|
|
7
|
+
export declare function executeAction(promptPath: string, options: any): Promise<void>;
|
|
8
|
+
export declare function main(): Promise<void>;
|
package/dist/config.d.ts
ADDED
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
import { z } from 'zod';
|
|
2
|
+
export declare const ConfigSchema: z.ZodObject<{
|
|
3
|
+
defaultModel: z.ZodDefault<z.ZodString>;
|
|
4
|
+
promptsDir: z.ZodDefault<z.ZodString>;
|
|
5
|
+
outputDir: z.ZodOptional<z.ZodString>;
|
|
6
|
+
}, z.core.$strip>;
|
|
7
|
+
export type Config = z.infer<typeof ConfigSchema>;
|
|
@@ -175,7 +175,12 @@ function _define_property(obj, key, value) {
|
|
|
175
175
|
}
|
|
176
176
|
const timestamp = new Date().toISOString().replace(/[:.]/g, '-');
|
|
177
177
|
const filename = this.config.filenameTemplate.replace('{timestamp}', timestamp).replace('{id}', this.conversationId).replace('{template}', this.metadata.template || 'default');
|
|
178
|
-
|
|
178
|
+
let ext = '.json';
|
|
179
|
+
if (this.config.format === 'markdown') {
|
|
180
|
+
ext = '.md';
|
|
181
|
+
} else if (this.config.format === 'jsonl') {
|
|
182
|
+
ext = '.jsonl';
|
|
183
|
+
}
|
|
179
184
|
const fullPath = path__default.join(this.config.outputPath, filename + ext);
|
|
180
185
|
// Ensure directory exists
|
|
181
186
|
await fs__default.mkdir(path__default.dirname(fullPath), {
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"conversation-logger.js","sources":["../src/conversation-logger.ts"],"sourcesContent":["import fs from 'fs/promises';\nimport path from 'path';\nimport { DEFAULT_LOGGER, wrapLogger } from \"./logger\";\nimport type { ConversationMessage, ToolCall } from \"./conversation\";\n\n// ===== TYPE DEFINITIONS =====\n\n/**\n * Log format\n */\nexport type LogFormat = 'json' | 'markdown' | 'jsonl';\n\n/**\n * Log configuration\n */\nexport interface LogConfig {\n enabled: boolean;\n outputPath?: string;\n format?: LogFormat;\n filenameTemplate?: string;\n includeMetadata?: boolean;\n includePrompt?: boolean;\n redactSensitive?: boolean;\n redactPatterns?: RegExp[];\n onSaved?: (path: string) => void;\n onError?: (error: Error) => void;\n}\n\n/**\n * Logged conversation structure\n */\nexport interface LoggedConversation {\n id: string;\n metadata: ConversationLogMetadata;\n prompt?: PromptSnapshot;\n messages: LoggedMessage[];\n summary: ConversationSummary;\n}\n\n/**\n * Conversation metadata for logging\n */\nexport interface ConversationLogMetadata {\n startTime: Date;\n endTime?: Date;\n duration?: number;\n model: string;\n template?: string;\n userContext?: Record<string, any>;\n}\n\n/**\n * Snapshot of prompt configuration\n */\nexport interface PromptSnapshot {\n persona?: string;\n instructions?: string;\n content?: string[];\n context?: string[];\n}\n\n/**\n * Logged message with metadata\n */\nexport interface LoggedMessage {\n index: number;\n timestamp: string;\n role: string;\n content: string | null;\n tool_calls?: ToolCall[];\n tool_call_id?: string;\n metadata?: MessageLogMetadata;\n}\n\n/**\n * Message metadata for logging\n */\nexport interface MessageLogMetadata {\n tokens?: number;\n source?: string;\n latency?: number;\n tool?: string;\n duration?: number;\n success?: boolean;\n [key: string]: any;\n}\n\n/**\n * Conversation summary\n */\nexport interface ConversationSummary {\n totalMessages: number;\n totalTokens?: number;\n toolCallsExecuted: number;\n iterations: number;\n finalOutput?: string;\n success: boolean;\n}\n\n/**\n * Tool call log entry\n */\nexport interface ToolCallLog {\n callId: string;\n toolName: string;\n timestamp: string;\n iteration: number;\n arguments: any;\n result: any;\n duration: number;\n success: boolean;\n error?: string;\n}\n\n// ===== CONVERSATION LOGGER =====\n\n/**\n * ConversationLogger logs conversations to various formats.\n *\n * Features:\n * - Multiple formats (JSON, Markdown, JSONL)\n * - Automatic timestamping\n * - Metadata tracking\n * - Sensitive data redaction\n * - Streaming support (JSONL)\n *\n * @example\n * ```typescript\n * const logger = new ConversationLogger({\n * enabled: true,\n * outputPath: 'logs/conversations',\n * format: 'json',\n * includeMetadata: true\n * });\n *\n * logger.onConversationStart({ model: 'gpt-4o', startTime: new Date() });\n * logger.onMessageAdded(message);\n * const path = await logger.save();\n * ```\n */\nexport class ConversationLogger {\n private config: Required<LogConfig>;\n private conversationId: string;\n private metadata: ConversationLogMetadata;\n private messages: LoggedMessage[];\n private toolCalls: ToolCallLog[];\n private startTime: Date;\n private logger: any;\n private messageIndex: number;\n private cachedOutputPath?: string;\n private writeQueue: Promise<void> = Promise.resolve();\n\n constructor(config: LogConfig, logger?: any) {\n this.config = {\n outputPath: 'logs/conversations',\n format: 'json',\n filenameTemplate: 'conversation-{timestamp}',\n includeMetadata: true,\n includePrompt: false,\n redactSensitive: false,\n redactPatterns: [],\n onSaved: () => {},\n onError: () => {},\n ...config,\n } as Required<LogConfig>;\n\n this.conversationId = this.generateId();\n this.messages = [];\n this.toolCalls = [];\n this.startTime = new Date();\n this.messageIndex = 0;\n this.logger = wrapLogger(logger || DEFAULT_LOGGER, 'ConversationLogger');\n\n this.metadata = {\n startTime: this.startTime,\n model: 'unknown',\n };\n }\n\n /**\n * Start conversation logging\n */\n onConversationStart(metadata: Partial<ConversationLogMetadata>): void {\n this.metadata = {\n ...this.metadata,\n ...metadata,\n startTime: this.startTime,\n };\n\n // Reset cached output path to prevent file collision if logger is reused\n this.cachedOutputPath = undefined;\n\n this.logger.debug('Conversation logging started', { id: this.conversationId });\n }\n\n /**\n * Log a message\n */\n onMessageAdded(message: ConversationMessage, metadata?: MessageLogMetadata): void {\n let content = message.content;\n\n // Redact sensitive data if enabled\n if (this.config.redactSensitive && content && typeof content === 'string') {\n content = this.redactContent(content);\n }\n\n const loggedMessage: LoggedMessage = {\n index: this.messageIndex++,\n timestamp: new Date().toISOString(),\n role: message.role,\n content,\n tool_calls: message.tool_calls,\n tool_call_id: message.tool_call_id,\n metadata,\n };\n\n this.messages.push(loggedMessage);\n\n // For JSONL format, append immediately with write queue\n if (this.config.format === 'jsonl') {\n this.writeQueue = this.writeQueue\n .then(() => this.appendToJSONL(loggedMessage))\n .catch((error) => {\n this.logger.error('Failed to write JSONL message', { error });\n try {\n this.config.onError?.(error);\n } catch (callbackError) {\n this.logger.error('onError callback failed', { callbackError });\n }\n });\n }\n }\n\n /**\n * Log a tool call\n */\n onToolCall(\n callId: string,\n toolName: string,\n iteration: number,\n args: any,\n result: any,\n duration: number,\n success: boolean,\n error?: string\n ): void {\n this.toolCalls.push({\n callId,\n toolName,\n timestamp: new Date().toISOString(),\n iteration,\n arguments: args,\n result,\n duration,\n success,\n error,\n });\n }\n\n /**\n * End conversation logging\n */\n onConversationEnd(_summary: ConversationSummary): void {\n this.metadata.endTime = new Date();\n this.metadata.duration = this.metadata.endTime.getTime() - this.startTime.getTime();\n\n this.logger.debug('Conversation logging ended', {\n messages: this.messages.length,\n duration: this.metadata.duration\n });\n }\n\n /**\n * Save conversation to disk\n */\n async save(): Promise<string> {\n if (!this.config.enabled) {\n return '';\n }\n\n try {\n const outputPath = await this.getOutputPath();\n\n switch (this.config.format) {\n case 'json':\n await this.saveAsJSON(outputPath);\n break;\n case 'markdown':\n await this.saveAsMarkdown(outputPath);\n break;\n case 'jsonl':\n // Already saved during execution\n break;\n }\n\n this.config.onSaved(outputPath);\n this.logger.info('Conversation saved', { path: outputPath });\n\n return outputPath;\n } catch (error) {\n this.config.onError(error as Error);\n this.logger.error('Failed to save conversation', { error });\n throw error;\n }\n }\n\n /**\n * Get logged conversation object\n */\n getConversation(): LoggedConversation {\n return {\n id: this.conversationId,\n metadata: this.metadata,\n messages: this.messages,\n summary: {\n totalMessages: this.messages.length,\n toolCallsExecuted: this.toolCalls.length,\n iterations: 0, // Would need to be tracked externally\n success: true,\n },\n };\n }\n\n /**\n * Generate unique conversation ID\n */\n private generateId(): string {\n const timestamp = new Date().toISOString().replace(/[:.]/g, '-');\n const random = Math.random().toString(36).substring(2, 8);\n return `conv-${timestamp}-${random}`;\n }\n\n /**\n * Get output file path (cached for JSONL to avoid recalculation)\n */\n private async getOutputPath(): Promise<string> {\n if (this.cachedOutputPath) {\n return this.cachedOutputPath;\n }\n\n const timestamp = new Date().toISOString().replace(/[:.]/g, '-');\n const filename = this.config.filenameTemplate\n .replace('{timestamp}', timestamp)\n .replace('{id}', this.conversationId)\n .replace('{template}', this.metadata.template || 'default');\n\n const ext = this.config.format === 'markdown' ? '.md' : '.json';\n const fullPath = path.join(this.config.outputPath, filename + ext);\n\n // Ensure directory exists\n await fs.mkdir(path.dirname(fullPath), { recursive: true });\n\n // Cache path for JSONL format to ensure consistent file writes\n if (this.config.format === 'jsonl') {\n this.cachedOutputPath = fullPath;\n }\n\n return fullPath;\n }\n\n /**\n * Save as JSON\n */\n private async saveAsJSON(outputPath: string): Promise<void> {\n const data: LoggedConversation = {\n id: this.conversationId,\n metadata: this.metadata,\n messages: this.messages,\n summary: {\n totalMessages: this.messages.length,\n toolCallsExecuted: this.toolCalls.length,\n iterations: 0,\n success: true,\n },\n };\n\n await fs.writeFile(outputPath, JSON.stringify(data, null, 2), 'utf-8');\n }\n\n /**\n * Save as Markdown\n */\n private async saveAsMarkdown(outputPath: string): Promise<void> {\n let markdown = `# Conversation Log\\n\\n`;\n markdown += `**ID**: ${this.conversationId}\\n`;\n markdown += `**Started**: ${this.metadata.startTime.toISOString()}\\n`;\n if (this.metadata.duration) {\n markdown += `**Duration**: ${(this.metadata.duration / 1000).toFixed(1)}s\\n`;\n }\n markdown += `**Model**: ${this.metadata.model}\\n`;\n if (this.metadata.template) {\n markdown += `**Template**: ${this.metadata.template}\\n`;\n }\n markdown += `\\n## Conversation\\n\\n`;\n\n for (const msg of this.messages) {\n const time = new Date(msg.timestamp).toLocaleTimeString();\n markdown += `### Message ${msg.index + 1} (${time}) - ${msg.role}\\n\\n`;\n\n if (msg.content) {\n markdown += `\\`\\`\\`\\n${msg.content}\\n\\`\\`\\`\\n\\n`;\n }\n\n if (msg.tool_calls) {\n markdown += `**Tool Calls:**\\n`;\n for (const call of msg.tool_calls) {\n markdown += `- ${call.function.name}: \\`${call.function.arguments}\\`\\n`;\n }\n markdown += `\\n`;\n }\n\n if (msg.metadata) {\n markdown += `*Metadata: ${JSON.stringify(msg.metadata)}*\\n\\n`;\n }\n }\n\n markdown += `## Summary\\n\\n`;\n markdown += `- **Total Messages**: ${this.messages.length}\\n`;\n markdown += `- **Tool Calls**: ${this.toolCalls.length}\\n`;\n\n await fs.writeFile(outputPath, markdown, 'utf-8');\n }\n\n /**\n * Append to JSONL file (streaming)\n */\n private async appendToJSONL(message: LoggedMessage): Promise<void> {\n const outputPath = await this.getOutputPath();\n const line = JSON.stringify(message) + '\\n';\n await fs.appendFile(outputPath, line, 'utf-8');\n }\n\n /**\n * Redact sensitive content\n */\n private redactContent(content: string): string {\n let redacted = content;\n\n // Apply custom patterns\n for (const pattern of this.config.redactPatterns) {\n redacted = redacted.replace(pattern, '[REDACTED]');\n }\n\n // Default patterns\n const defaultPatterns = [\n /api[_-]?key[\\s:=\"']+[\\w-]+/gi,\n /password[\\s:=\"']+[\\w-]+/gi,\n /Bearer\\s+[\\w-]+/gi,\n /sk-[a-zA-Z0-9]{48}/g,\n ];\n\n for (const pattern of defaultPatterns) {\n redacted = redacted.replace(pattern, '[REDACTED]');\n }\n\n return redacted;\n }\n}\n\n// ===== CONVERSATION REPLAYER =====\n\n/**\n * Replay options\n */\nexport interface ReplayOptions {\n model?: string;\n maxIterations?: number;\n retryFailedTools?: boolean;\n toolTimeout?: number;\n expectSimilarOutput?: boolean;\n}\n\n/**\n * Replay result\n */\nexport interface ReplayResult {\n success: boolean;\n conversation: LoggedConversation;\n errors?: Error[];\n}\n\n/**\n * Comparison result\n */\nexport interface ComparisonResult {\n messageDiff: number;\n toolCallDiff: number;\n tokenDiff?: number;\n outputSimilarity: number;\n costSavings?: number;\n}\n\n/**\n * ConversationReplayer loads and replays logged conversations.\n *\n * Features:\n * - Load from various formats\n * - Replay conversations\n * - Compare replays with originals\n * - Export to different formats\n *\n * @example\n * ```typescript\n * const replayer = await ConversationReplayer.load('logs/conv.json');\n *\n * console.log('Messages:', replayer.messages.length);\n * console.log('Tool calls:', replayer.getToolCalls().length);\n *\n * const timeline = replayer.getTimeline();\n * console.log('Events:', timeline.length);\n * ```\n */\nexport class ConversationReplayer {\n private conversation: LoggedConversation;\n private logger: any;\n\n private constructor(conversation: LoggedConversation, logger?: any) {\n this.conversation = conversation;\n this.logger = wrapLogger(logger || DEFAULT_LOGGER, 'ConversationReplayer');\n }\n\n /**\n * Load conversation from file\n */\n static async load(filePath: string, logger?: any): Promise<ConversationReplayer> {\n const wlogger = wrapLogger(logger || DEFAULT_LOGGER, 'ConversationReplayer');\n wlogger.debug('Loading conversation', { path: filePath });\n\n try {\n const content = await fs.readFile(filePath, 'utf-8');\n\n // Determine format by extension\n if (filePath.endsWith('.json')) {\n const data: LoggedConversation = JSON.parse(content);\n return new ConversationReplayer(data, logger);\n } else if (filePath.endsWith('.jsonl')) {\n const lines = content.trim().split('\\n');\n const messages = lines.map(line => JSON.parse(line));\n\n const conversation: LoggedConversation = {\n id: `replayer-${Date.now()}`,\n metadata: {\n startTime: new Date(),\n model: 'unknown'\n },\n messages,\n summary: {\n totalMessages: messages.length,\n toolCallsExecuted: 0,\n iterations: 0,\n success: true\n }\n };\n\n return new ConversationReplayer(conversation, logger);\n } else {\n throw new Error(`Unsupported format: ${filePath}`);\n }\n } catch (error) {\n wlogger.error('Failed to load conversation', { path: filePath, error });\n throw error;\n }\n }\n\n /**\n * Load latest conversation from directory\n */\n static async loadLatest(directory: string, logger?: any): Promise<ConversationReplayer> {\n const files = await fs.readdir(directory);\n const jsonFiles = files.filter(f => f.endsWith('.json')).sort().reverse();\n\n if (jsonFiles.length === 0) {\n throw new Error(`No conversation logs found in ${directory}`);\n }\n\n const latestPath = path.join(directory, jsonFiles[0]);\n return ConversationReplayer.load(latestPath, logger);\n }\n\n /**\n * Get all messages\n */\n get messages(): LoggedMessage[] {\n return this.conversation.messages;\n }\n\n /**\n * Get conversation metadata\n */\n getMetadata(): ConversationLogMetadata {\n return { ...this.conversation.metadata };\n }\n\n /**\n * Get tool calls\n */\n getToolCalls(): ToolCallLog[] {\n const toolCalls: ToolCallLog[] = [];\n\n for (const msg of this.conversation.messages) {\n if (msg.tool_calls) {\n for (const call of msg.tool_calls) {\n // Parse arguments with error handling\n let parsedArgs: any;\n try {\n parsedArgs = JSON.parse(call.function.arguments);\n } catch (error) {\n this.logger.warn('Failed to parse tool call arguments', {\n callId: call.id,\n error: error instanceof Error ? error.message : String(error)\n });\n parsedArgs = { __parse_error: true, raw: call.function.arguments };\n }\n\n toolCalls.push({\n callId: call.id,\n toolName: call.function.name,\n timestamp: msg.timestamp,\n iteration: 0, // Would need to be calculated\n arguments: parsedArgs,\n result: null, // Would need to find corresponding tool message\n duration: 0,\n success: true,\n });\n }\n }\n }\n\n return toolCalls;\n }\n\n /**\n * Get message at index\n */\n getMessageAt(index: number): LoggedMessage | undefined {\n return this.conversation.messages[index];\n }\n\n /**\n * Get timeline of events\n */\n getTimeline(): TimelineEvent[] {\n const events: TimelineEvent[] = [];\n\n for (const msg of this.conversation.messages) {\n events.push({\n timestamp: msg.timestamp,\n iteration: 0, // Would need iteration tracking\n type: 'message',\n description: `${msg.role} message`,\n });\n }\n\n return events;\n }\n\n /**\n * Export to format\n */\n async exportToFormat(format: LogFormat, outputPath: string): Promise<string> {\n this.logger.debug('Exporting to format', { format, path: outputPath });\n\n switch (format) {\n case 'json':\n await fs.writeFile(outputPath, JSON.stringify(this.conversation, null, 2), 'utf-8');\n break;\n case 'markdown':\n await this.exportMarkdown(outputPath);\n break;\n case 'jsonl': {\n const lines = this.messages.map(m => JSON.stringify(m)).join('\\n');\n await fs.writeFile(outputPath, lines, 'utf-8');\n break;\n }\n }\n\n return outputPath;\n }\n\n /**\n * Export as markdown\n */\n private async exportMarkdown(outputPath: string): Promise<void> {\n let markdown = `# Conversation Log\\n\\n`;\n markdown += `**ID**: ${this.conversation.id}\\n`;\n\n const startTime = typeof this.conversation.metadata.startTime === 'string'\n ? this.conversation.metadata.startTime\n : this.conversation.metadata.startTime.toISOString();\n\n markdown += `**Started**: ${startTime}\\n\\n`;\n\n for (const msg of this.conversation.messages) {\n markdown += `## ${msg.role.toUpperCase()} (${msg.index})\\n\\n`;\n if (msg.content) {\n markdown += `${msg.content}\\n\\n`;\n }\n }\n\n await fs.writeFile(outputPath, markdown, 'utf-8');\n }\n}\n\n/**\n * Timeline event interface\n */\ninterface TimelineEvent {\n timestamp: string;\n iteration: number;\n type: string;\n description: string;\n duration?: number;\n success?: boolean;\n}\n\nexport default ConversationLogger;\n\n"],"names":["ConversationLogger","onConversationStart","metadata","startTime","cachedOutputPath","undefined","logger","debug","id","conversationId","onMessageAdded","message","content","config","redactSensitive","redactContent","loggedMessage","index","messageIndex","timestamp","Date","toISOString","role","tool_calls","tool_call_id","messages","push","format","writeQueue","then","appendToJSONL","catch","error","onError","callbackError","onToolCall","callId","toolName","iteration","args","result","duration","success","toolCalls","arguments","onConversationEnd","_summary","endTime","getTime","length","save","enabled","outputPath","getOutputPath","saveAsJSON","saveAsMarkdown","onSaved","info","path","getConversation","summary","totalMessages","toolCallsExecuted","iterations","generateId","replace","random","Math","toString","substring","filename","filenameTemplate","template","ext","fullPath","join","fs","mkdir","dirname","recursive","data","writeFile","JSON","stringify","markdown","toFixed","model","msg","time","toLocaleTimeString","call","function","name","line","appendFile","redacted","pattern","redactPatterns","defaultPatterns","Promise","resolve","includeMetadata","includePrompt","wrapLogger","DEFAULT_LOGGER","ConversationReplayer","load","filePath","wlogger","readFile","endsWith","parse","lines","trim","split","map","conversation","now","Error","loadLatest","directory","files","readdir","jsonFiles","filter","f","sort","reverse","latestPath","getMetadata","getToolCalls","parsedArgs","warn","String","__parse_error","raw","getMessageAt","getTimeline","events","type","description","exportToFormat","exportMarkdown","m","toUpperCase"],"mappings":";;;;;;;;;;;;;;;;;AAkHA;AAEA;;;;;;;;;;;;;;;;;;;;;;;AAuBC,IACM,MAAMA,kBAAAA,CAAAA;AAuCT;;QAGAC,mBAAAA,CAAoBC,QAA0C,EAAQ;QAClE,IAAI,CAACA,QAAQ,GAAG;YACZ,GAAG,IAAI,CAACA,QAAQ;AAChB,YAAA,GAAGA,QAAQ;YACXC,SAAAA,EAAW,IAAI,CAACA;AACpB,SAAA;;QAGA,IAAI,CAACC,gBAAgB,GAAGC,SAAAA;AAExB,QAAA,IAAI,CAACC,MAAM,CAACC,KAAK,CAAC,8BAAA,EAAgC;YAAEC,EAAAA,EAAI,IAAI,CAACC;AAAe,SAAA,CAAA;AAChF,IAAA;AAEA;;AAEC,QACDC,cAAAA,CAAeC,OAA4B,EAAET,QAA6B,EAAQ;QAC9E,IAAIU,OAAAA,GAAUD,QAAQC,OAAO;;QAG7B,IAAI,IAAI,CAACC,MAAM,CAACC,eAAe,IAAIF,OAAAA,IAAW,OAAOA,OAAAA,KAAY,QAAA,EAAU;YACvEA,OAAAA,GAAU,IAAI,CAACG,aAAa,CAACH,OAAAA,CAAAA;AACjC,QAAA;AAEA,QAAA,MAAMI,aAAAA,GAA+B;YACjCC,KAAAA,EAAO,IAAI,CAACC,YAAY,EAAA;YACxBC,SAAAA,EAAW,IAAIC,OAAOC,WAAW,EAAA;AACjCC,YAAAA,IAAAA,EAAMX,QAAQW,IAAI;AAClBV,YAAAA,OAAAA;AACAW,YAAAA,UAAAA,EAAYZ,QAAQY,UAAU;AAC9BC,YAAAA,YAAAA,EAAcb,QAAQa,YAAY;AAClCtB,YAAAA;AACJ,SAAA;AAEA,QAAA,IAAI,CAACuB,QAAQ,CAACC,IAAI,CAACV,aAAAA,CAAAA;;AAGnB,QAAA,IAAI,IAAI,CAACH,MAAM,CAACc,MAAM,KAAK,OAAA,EAAS;AAChC,YAAA,IAAI,CAACC,UAAU,GAAG,IAAI,CAACA,UAAU,CAC5BC,IAAI,CAAC,IAAM,IAAI,CAACC,aAAa,CAACd,aAAAA,CAAAA,CAAAA,CAC9Be,KAAK,CAAC,CAACC,KAAAA,GAAAA;AACJ,gBAAA,IAAI,CAAC1B,MAAM,CAAC0B,KAAK,CAAC,+BAAA,EAAiC;AAAEA,oBAAAA;AAAM,iBAAA,CAAA;gBAC3D,IAAI;wBACA,oBAAA,EAAA,YAAA;qBAAA,oBAAA,GAAA,CAAA,YAAA,GAAA,IAAI,CAACnB,MAAM,EAACoB,OAAO,MAAA,IAAA,IAAnB,oBAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,oBAAA,CAAA,IAAA,CAAA,YAAA,EAAsBD,KAAAA,CAAAA;AAC1B,gBAAA,CAAA,CAAE,OAAOE,aAAAA,EAAe;AACpB,oBAAA,IAAI,CAAC5B,MAAM,CAAC0B,KAAK,CAAC,yBAAA,EAA2B;AAAEE,wBAAAA;AAAc,qBAAA,CAAA;AACjE,gBAAA;AACJ,YAAA,CAAA,CAAA;AACR,QAAA;AACJ,IAAA;AAEA;;AAEC,QACDC,WACIC,MAAc,EACdC,QAAgB,EAChBC,SAAiB,EACjBC,IAAS,EACTC,MAAW,EACXC,QAAgB,EAChBC,OAAgB,EAChBV,KAAc,EACV;AACJ,QAAA,IAAI,CAACW,SAAS,CAACjB,IAAI,CAAC;AAChBU,YAAAA,MAAAA;AACAC,YAAAA,QAAAA;YACAlB,SAAAA,EAAW,IAAIC,OAAOC,WAAW,EAAA;AACjCiB,YAAAA,SAAAA;YACAM,SAAAA,EAAWL,IAAAA;AACXC,YAAAA,MAAAA;AACAC,YAAAA,QAAAA;AACAC,YAAAA,OAAAA;AACAV,YAAAA;AACJ,SAAA,CAAA;AACJ,IAAA;AAEA;;QAGAa,iBAAAA,CAAkBC,QAA6B,EAAQ;AACnD,QAAA,IAAI,CAAC5C,QAAQ,CAAC6C,OAAO,GAAG,IAAI3B,IAAAA,EAAAA;AAC5B,QAAA,IAAI,CAAClB,QAAQ,CAACuC,QAAQ,GAAG,IAAI,CAACvC,QAAQ,CAAC6C,OAAO,CAACC,OAAO,EAAA,GAAK,IAAI,CAAC7C,SAAS,CAAC6C,OAAO,EAAA;AAEjF,QAAA,IAAI,CAAC1C,MAAM,CAACC,KAAK,CAAC,4BAAA,EAA8B;AAC5CkB,YAAAA,QAAAA,EAAU,IAAI,CAACA,QAAQ,CAACwB,MAAM;AAC9BR,YAAAA,QAAAA,EAAU,IAAI,CAACvC,QAAQ,CAACuC;AAC5B,SAAA,CAAA;AACJ,IAAA;AAEA;;AAEC,QACD,MAAMS,IAAAA,GAAwB;AAC1B,QAAA,IAAI,CAAC,IAAI,CAACrC,MAAM,CAACsC,OAAO,EAAE;YACtB,OAAO,EAAA;AACX,QAAA;QAEA,IAAI;AACA,YAAA,MAAMC,UAAAA,GAAa,MAAM,IAAI,CAACC,aAAa,EAAA;AAE3C,YAAA,OAAQ,IAAI,CAACxC,MAAM,CAACc,MAAM;gBACtB,KAAK,MAAA;oBACD,MAAM,IAAI,CAAC2B,UAAU,CAACF,UAAAA,CAAAA;AACtB,oBAAA;gBACJ,KAAK,UAAA;oBACD,MAAM,IAAI,CAACG,cAAc,CAACH,UAAAA,CAAAA;AAC1B,oBAAA;gBACJ,KAAK,OAAA;AAED,oBAAA;AACR;AAEA,YAAA,IAAI,CAACvC,MAAM,CAAC2C,OAAO,CAACJ,UAAAA,CAAAA;AACpB,YAAA,IAAI,CAAC9C,MAAM,CAACmD,IAAI,CAAC,oBAAA,EAAsB;gBAAEC,IAAAA,EAAMN;AAAW,aAAA,CAAA;YAE1D,OAAOA,UAAAA;AACX,QAAA,CAAA,CAAE,OAAOpB,KAAAA,EAAO;AACZ,YAAA,IAAI,CAACnB,MAAM,CAACoB,OAAO,CAACD,KAAAA,CAAAA;AACpB,YAAA,IAAI,CAAC1B,MAAM,CAAC0B,KAAK,CAAC,6BAAA,EAA+B;AAAEA,gBAAAA;AAAM,aAAA,CAAA;YACzD,MAAMA,KAAAA;AACV,QAAA;AACJ,IAAA;AAEA;;AAEC,QACD2B,eAAAA,GAAsC;QAClC,OAAO;YACHnD,EAAAA,EAAI,IAAI,CAACC,cAAc;YACvBP,QAAAA,EAAU,IAAI,CAACA,QAAQ;YACvBuB,QAAAA,EAAU,IAAI,CAACA,QAAQ;YACvBmC,OAAAA,EAAS;AACLC,gBAAAA,aAAAA,EAAe,IAAI,CAACpC,QAAQ,CAACwB,MAAM;AACnCa,gBAAAA,iBAAAA,EAAmB,IAAI,CAACnB,SAAS,CAACM,MAAM;gBACxCc,UAAAA,EAAY,CAAA;gBACZrB,OAAAA,EAAS;AACb;AACJ,SAAA;AACJ,IAAA;AAEA;;AAEC,QACD,UAAQsB,GAAqB;AACzB,QAAA,MAAM7C,YAAY,IAAIC,IAAAA,EAAAA,CAAOC,WAAW,EAAA,CAAG4C,OAAO,CAAC,OAAA,EAAS,GAAA,CAAA;QAC5D,MAAMC,MAAAA,GAASC,KAAKD,MAAM,EAAA,CAAGE,QAAQ,CAAC,EAAA,CAAA,CAAIC,SAAS,CAAC,CAAA,EAAG,CAAA,CAAA;AACvD,QAAA,OAAO,CAAC,KAAK,EAAElD,SAAAA,CAAU,CAAC,EAAE+C,MAAAA,CAAAA,CAAQ;AACxC,IAAA;AAEA;;AAEC,QACD,MAAcb,aAAAA,GAAiC;QAC3C,IAAI,IAAI,CAACjD,gBAAgB,EAAE;YACvB,OAAO,IAAI,CAACA,gBAAgB;AAChC,QAAA;AAEA,QAAA,MAAMe,YAAY,IAAIC,IAAAA,EAAAA,CAAOC,WAAW,EAAA,CAAG4C,OAAO,CAAC,OAAA,EAAS,GAAA,CAAA;QAC5D,MAAMK,QAAAA,GAAW,IAAI,CAACzD,MAAM,CAAC0D,gBAAgB,CACxCN,OAAO,CAAC,aAAA,EAAe9C,SAAAA,CAAAA,CACvB8C,OAAO,CAAC,QAAQ,IAAI,CAACxD,cAAc,CAAA,CACnCwD,OAAO,CAAC,YAAA,EAAc,IAAI,CAAC/D,QAAQ,CAACsE,QAAQ,IAAI,SAAA,CAAA;QAErD,MAAMC,GAAAA,GAAM,IAAI,CAAC5D,MAAM,CAACc,MAAM,KAAK,aAAa,KAAA,GAAQ,OAAA;QACxD,MAAM+C,QAAAA,GAAWhB,aAAAA,CAAKiB,IAAI,CAAC,IAAI,CAAC9D,MAAM,CAACuC,UAAU,EAAEkB,QAAAA,GAAWG,GAAAA,CAAAA;;AAG9D,QAAA,MAAMG,YAAGC,KAAK,CAACnB,aAAAA,CAAKoB,OAAO,CAACJ,QAAAA,CAAAA,EAAW;YAAEK,SAAAA,EAAW;AAAK,SAAA,CAAA;;AAGzD,QAAA,IAAI,IAAI,CAAClE,MAAM,CAACc,MAAM,KAAK,OAAA,EAAS;YAChC,IAAI,CAACvB,gBAAgB,GAAGsE,QAAAA;AAC5B,QAAA;QAEA,OAAOA,QAAAA;AACX,IAAA;AAEA;;QAGA,MAAcpB,UAAAA,CAAWF,UAAkB,EAAiB;AACxD,QAAA,MAAM4B,IAAAA,GAA2B;YAC7BxE,EAAAA,EAAI,IAAI,CAACC,cAAc;YACvBP,QAAAA,EAAU,IAAI,CAACA,QAAQ;YACvBuB,QAAAA,EAAU,IAAI,CAACA,QAAQ;YACvBmC,OAAAA,EAAS;AACLC,gBAAAA,aAAAA,EAAe,IAAI,CAACpC,QAAQ,CAACwB,MAAM;AACnCa,gBAAAA,iBAAAA,EAAmB,IAAI,CAACnB,SAAS,CAACM,MAAM;gBACxCc,UAAAA,EAAY,CAAA;gBACZrB,OAAAA,EAAS;AACb;AACJ,SAAA;QAEA,MAAMkC,WAAAA,CAAGK,SAAS,CAAC7B,UAAAA,EAAY8B,KAAKC,SAAS,CAACH,IAAAA,EAAM,IAAA,EAAM,CAAA,CAAA,EAAI,OAAA,CAAA;AAClE,IAAA;AAEA;;QAGA,MAAczB,cAAAA,CAAeH,UAAkB,EAAiB;QAC5D,IAAIgC,QAAAA,GAAW,CAAC,sBAAsB,CAAC;QACvCA,QAAAA,IAAY,CAAC,QAAQ,EAAE,IAAI,CAAC3E,cAAc,CAAC,EAAE,CAAC;AAC9C2E,QAAAA,QAAAA,IAAY,CAAC,aAAa,EAAE,IAAI,CAAClF,QAAQ,CAACC,SAAS,CAACkB,WAAW,EAAA,CAAG,EAAE,CAAC;AACrE,QAAA,IAAI,IAAI,CAACnB,QAAQ,CAACuC,QAAQ,EAAE;AACxB2C,YAAAA,QAAAA,IAAY,CAAC,cAAc,EAAE,CAAC,IAAI,CAAClF,QAAQ,CAACuC,QAAQ,GAAG,IAAG,EAAG4C,OAAO,CAAC,CAAA,CAAA,CAAG,GAAG,CAAC;AAChF,QAAA;QACAD,QAAAA,IAAY,CAAC,WAAW,EAAE,IAAI,CAAClF,QAAQ,CAACoF,KAAK,CAAC,EAAE,CAAC;AACjD,QAAA,IAAI,IAAI,CAACpF,QAAQ,CAACsE,QAAQ,EAAE;YACxBY,QAAAA,IAAY,CAAC,cAAc,EAAE,IAAI,CAAClF,QAAQ,CAACsE,QAAQ,CAAC,EAAE,CAAC;AAC3D,QAAA;QACAY,QAAAA,IAAY,CAAC,qBAAqB,CAAC;AAEnC,QAAA,KAAK,MAAMG,GAAAA,IAAO,IAAI,CAAC9D,QAAQ,CAAE;AAC7B,YAAA,MAAM+D,OAAO,IAAIpE,IAAAA,CAAKmE,GAAAA,CAAIpE,SAAS,EAAEsE,kBAAkB,EAAA;AACvDL,YAAAA,QAAAA,IAAY,CAAC,YAAY,EAAEG,GAAAA,CAAItE,KAAK,GAAG,CAAA,CAAE,EAAE,EAAEuE,IAAAA,CAAK,IAAI,EAAED,GAAAA,CAAIjE,IAAI,CAAC,IAAI,CAAC;YAEtE,IAAIiE,GAAAA,CAAI3E,OAAO,EAAE;AACbwE,gBAAAA,QAAAA,IAAY,CAAC,QAAQ,EAAEG,IAAI3E,OAAO,CAAC,YAAY,CAAC;AACpD,YAAA;YAEA,IAAI2E,GAAAA,CAAIhE,UAAU,EAAE;gBAChB6D,QAAAA,IAAY,CAAC,iBAAiB,CAAC;AAC/B,gBAAA,KAAK,MAAMM,IAAAA,IAAQH,GAAAA,CAAIhE,UAAU,CAAE;AAC/B6D,oBAAAA,QAAAA,IAAY,CAAC,EAAE,EAAEM,IAAAA,CAAKC,QAAQ,CAACC,IAAI,CAAC,IAAI,EAAEF,KAAKC,QAAQ,CAAC/C,SAAS,CAAC,IAAI,CAAC;AAC3E,gBAAA;gBACAwC,QAAAA,IAAY,CAAC,EAAE,CAAC;AACpB,YAAA;YAEA,IAAIG,GAAAA,CAAIrF,QAAQ,EAAE;gBACdkF,QAAAA,IAAY,CAAC,WAAW,EAAEF,IAAAA,CAAKC,SAAS,CAACI,GAAAA,CAAIrF,QAAQ,CAAA,CAAE,KAAK,CAAC;AACjE,YAAA;AACJ,QAAA;QAEAkF,QAAAA,IAAY,CAAC,cAAc,CAAC;QAC5BA,QAAAA,IAAY,CAAC,sBAAsB,EAAE,IAAI,CAAC3D,QAAQ,CAACwB,MAAM,CAAC,EAAE,CAAC;QAC7DmC,QAAAA,IAAY,CAAC,kBAAkB,EAAE,IAAI,CAACzC,SAAS,CAACM,MAAM,CAAC,EAAE,CAAC;AAE1D,QAAA,MAAM2B,WAAAA,CAAGK,SAAS,CAAC7B,UAAAA,EAAYgC,QAAAA,EAAU,OAAA,CAAA;AAC7C,IAAA;AAEA;;QAGA,MAActD,aAAAA,CAAcnB,OAAsB,EAAiB;AAC/D,QAAA,MAAMyC,UAAAA,GAAa,MAAM,IAAI,CAACC,aAAa,EAAA;AAC3C,QAAA,MAAMwC,IAAAA,GAAOX,IAAAA,CAAKC,SAAS,CAACxE,OAAAA,CAAAA,GAAW,IAAA;AACvC,QAAA,MAAMiE,WAAAA,CAAGkB,UAAU,CAAC1C,UAAAA,EAAYyC,IAAAA,EAAM,OAAA,CAAA;AAC1C,IAAA;AAEA;;QAGQ9E,aAAAA,CAAcH,OAAe,EAAU;AAC3C,QAAA,IAAImF,QAAAA,GAAWnF,OAAAA;;AAGf,QAAA,KAAK,MAAMoF,OAAAA,IAAW,IAAI,CAACnF,MAAM,CAACoF,cAAc,CAAE;YAC9CF,QAAAA,GAAWA,QAAAA,CAAS9B,OAAO,CAAC+B,OAAAA,EAAS,YAAA,CAAA;AACzC,QAAA;;AAGA,QAAA,MAAME,eAAAA,GAAkB;AACpB,YAAA,8BAAA;AACA,YAAA,2BAAA;AACA,YAAA,mBAAA;AACA,YAAA;AACH,SAAA;QAED,KAAK,MAAMF,WAAWE,eAAAA,CAAiB;YACnCH,QAAAA,GAAWA,QAAAA,CAAS9B,OAAO,CAAC+B,OAAAA,EAAS,YAAA,CAAA;AACzC,QAAA;QAEA,OAAOD,QAAAA;AACX,IAAA;IAhTA,WAAA,CAAYlF,MAAiB,EAAEP,MAAY,CAAE;AAX7C,QAAA,gBAAA,CAAA,IAAA,EAAQO,UAAR,MAAA,CAAA;AACA,QAAA,gBAAA,CAAA,IAAA,EAAQJ,kBAAR,MAAA,CAAA;AACA,QAAA,gBAAA,CAAA,IAAA,EAAQP,YAAR,MAAA,CAAA;AACA,QAAA,gBAAA,CAAA,IAAA,EAAQuB,YAAR,MAAA,CAAA;AACA,QAAA,gBAAA,CAAA,IAAA,EAAQkB,aAAR,MAAA,CAAA;AACA,QAAA,gBAAA,CAAA,IAAA,EAAQxC,aAAR,MAAA,CAAA;AACA,QAAA,gBAAA,CAAA,IAAA,EAAQG,UAAR,MAAA,CAAA;AACA,QAAA,gBAAA,CAAA,IAAA,EAAQY,gBAAR,MAAA,CAAA;AACA,QAAA,gBAAA,CAAA,IAAA,EAAQd,oBAAR,MAAA,CAAA;QACA,gBAAA,CAAA,IAAA,EAAQwB,YAAAA,EAA4BuE,QAAQC,OAAO,EAAA,CAAA;QAG/C,IAAI,CAACvF,MAAM,GAAG;YACVuC,UAAAA,EAAY,oBAAA;YACZzB,MAAAA,EAAQ,MAAA;YACR4C,gBAAAA,EAAkB,0BAAA;YAClB8B,eAAAA,EAAiB,IAAA;YACjBC,aAAAA,EAAe,KAAA;YACfxF,eAAAA,EAAiB,KAAA;AACjBmF,YAAAA,cAAAA,EAAgB,EAAE;AAClBzC,YAAAA,OAAAA,EAAS,IAAA,CAAO,CAAA;AAChBvB,YAAAA,OAAAA,EAAS,IAAA,CAAO,CAAA;AAChB,YAAA,GAAGpB;AACP,SAAA;AAEA,QAAA,IAAI,CAACJ,cAAc,GAAG,IAAI,CAACuD,UAAU,EAAA;QACrC,IAAI,CAACvC,QAAQ,GAAG,EAAE;QAClB,IAAI,CAACkB,SAAS,GAAG,EAAE;QACnB,IAAI,CAACxC,SAAS,GAAG,IAAIiB,IAAAA,EAAAA;QACrB,IAAI,CAACF,YAAY,GAAG,CAAA;AACpB,QAAA,IAAI,CAACZ,MAAM,GAAGiG,UAAAA,CAAWjG,UAAUkG,cAAAA,EAAgB,oBAAA,CAAA;QAEnD,IAAI,CAACtG,QAAQ,GAAG;YACZC,SAAAA,EAAW,IAAI,CAACA,SAAS;YACzBmF,KAAAA,EAAO;AACX,SAAA;AACJ,IAAA;AAwRJ;AAmCA;;;;;;;;;;;;;;;;;;;AAmBC,IACM,MAAMmB,oBAAAA,CAAAA;AAST;;AAEC,QACD,aAAaC,IAAAA,CAAKC,QAAgB,EAAErG,MAAY,EAAiC;QAC7E,MAAMsG,OAAAA,GAAUL,UAAAA,CAAWjG,MAAAA,IAAUkG,cAAAA,EAAgB,sBAAA,CAAA;QACrDI,OAAAA,CAAQrG,KAAK,CAAC,sBAAA,EAAwB;YAAEmD,IAAAA,EAAMiD;AAAS,SAAA,CAAA;QAEvD,IAAI;AACA,YAAA,MAAM/F,OAAAA,GAAU,MAAMgE,WAAAA,CAAGiC,QAAQ,CAACF,QAAAA,EAAU,OAAA,CAAA;;YAG5C,IAAIA,QAAAA,CAASG,QAAQ,CAAC,OAAA,CAAA,EAAU;gBAC5B,MAAM9B,IAAAA,GAA2BE,IAAAA,CAAK6B,KAAK,CAACnG,OAAAA,CAAAA;gBAC5C,OAAO,IAAI6F,qBAAqBzB,IAAAA,EAAM1E,MAAAA,CAAAA;AAC1C,YAAA,CAAA,MAAO,IAAIqG,QAAAA,CAASG,QAAQ,CAAC,QAAA,CAAA,EAAW;AACpC,gBAAA,MAAME,KAAAA,GAAQpG,OAAAA,CAAQqG,IAAI,EAAA,CAAGC,KAAK,CAAC,IAAA,CAAA;gBACnC,MAAMzF,QAAAA,GAAWuF,MAAMG,GAAG,CAACtB,CAAAA,IAAAA,GAAQX,IAAAA,CAAK6B,KAAK,CAAClB,IAAAA,CAAAA,CAAAA;AAE9C,gBAAA,MAAMuB,YAAAA,GAAmC;AACrC5G,oBAAAA,EAAAA,EAAI,CAAC,SAAS,EAAEY,IAAAA,CAAKiG,GAAG,EAAA,CAAA,CAAI;oBAC5BnH,QAAAA,EAAU;AACNC,wBAAAA,SAAAA,EAAW,IAAIiB,IAAAA,EAAAA;wBACfkE,KAAAA,EAAO;AACX,qBAAA;AACA7D,oBAAAA,QAAAA;oBACAmC,OAAAA,EAAS;AACLC,wBAAAA,aAAAA,EAAepC,SAASwB,MAAM;wBAC9Ba,iBAAAA,EAAmB,CAAA;wBACnBC,UAAAA,EAAY,CAAA;wBACZrB,OAAAA,EAAS;AACb;AACJ,iBAAA;gBAEA,OAAO,IAAI+D,qBAAqBW,YAAAA,EAAc9G,MAAAA,CAAAA;YAClD,CAAA,MAAO;AACH,gBAAA,MAAM,IAAIgH,KAAAA,CAAM,CAAC,oBAAoB,EAAEX,QAAAA,CAAAA,CAAU,CAAA;AACrD,YAAA;AACJ,QAAA,CAAA,CAAE,OAAO3E,KAAAA,EAAO;YACZ4E,OAAAA,CAAQ5E,KAAK,CAAC,6BAAA,EAA+B;gBAAE0B,IAAAA,EAAMiD,QAAAA;AAAU3E,gBAAAA;AAAM,aAAA,CAAA;YACrE,MAAMA,KAAAA;AACV,QAAA;AACJ,IAAA;AAEA;;AAEC,QACD,aAAauF,UAAAA,CAAWC,SAAiB,EAAElH,MAAY,EAAiC;AACpF,QAAA,MAAMmH,KAAAA,GAAQ,MAAM7C,WAAAA,CAAG8C,OAAO,CAACF,SAAAA,CAAAA;AAC/B,QAAA,MAAMG,SAAAA,GAAYF,KAAAA,CAAMG,MAAM,CAACC,CAAAA,CAAAA,GAAKA,CAAAA,CAAEf,QAAQ,CAAC,OAAA,CAAA,CAAA,CAAUgB,IAAI,EAAA,CAAGC,OAAO,EAAA;QAEvE,IAAIJ,SAAAA,CAAU1E,MAAM,KAAK,CAAA,EAAG;AACxB,YAAA,MAAM,IAAIqE,KAAAA,CAAM,CAAC,8BAA8B,EAAEE,SAAAA,CAAAA,CAAW,CAAA;AAChE,QAAA;AAEA,QAAA,MAAMQ,aAAatE,aAAAA,CAAKiB,IAAI,CAAC6C,SAAAA,EAAWG,SAAS,CAAC,CAAA,CAAE,CAAA;QACpD,OAAOlB,oBAAAA,CAAqBC,IAAI,CAACsB,UAAAA,EAAY1H,MAAAA,CAAAA;AACjD,IAAA;AAEA;;AAEC,QACD,IAAImB,QAAAA,GAA4B;AAC5B,QAAA,OAAO,IAAI,CAAC2F,YAAY,CAAC3F,QAAQ;AACrC,IAAA;AAEA;;AAEC,QACDwG,WAAAA,GAAuC;QACnC,OAAO;AAAE,YAAA,GAAG,IAAI,CAACb,YAAY,CAAClH;AAAS,SAAA;AAC3C,IAAA;AAEA;;AAEC,QACDgI,YAAAA,GAA8B;AAC1B,QAAA,MAAMvF,YAA2B,EAAE;AAEnC,QAAA,KAAK,MAAM4C,GAAAA,IAAO,IAAI,CAAC6B,YAAY,CAAC3F,QAAQ,CAAE;YAC1C,IAAI8D,GAAAA,CAAIhE,UAAU,EAAE;AAChB,gBAAA,KAAK,MAAMmE,IAAAA,IAAQH,GAAAA,CAAIhE,UAAU,CAAE;;oBAE/B,IAAI4G,UAAAA;oBACJ,IAAI;AACAA,wBAAAA,UAAAA,GAAajD,KAAK6B,KAAK,CAACrB,IAAAA,CAAKC,QAAQ,CAAC/C,SAAS,CAAA;AACnD,oBAAA,CAAA,CAAE,OAAOZ,KAAAA,EAAO;AACZ,wBAAA,IAAI,CAAC1B,MAAM,CAAC8H,IAAI,CAAC,qCAAA,EAAuC;AACpDhG,4BAAAA,MAAAA,EAAQsD,KAAKlF,EAAE;AACfwB,4BAAAA,KAAAA,EAAOA,KAAAA,YAAiBsF,KAAAA,GAAQtF,KAAAA,CAAMrB,OAAO,GAAG0H,MAAAA,CAAOrG,KAAAA;AAC3D,yBAAA,CAAA;wBACAmG,UAAAA,GAAa;4BAAEG,aAAAA,EAAe,IAAA;4BAAMC,GAAAA,EAAK7C,IAAAA,CAAKC,QAAQ,CAAC/C;AAAU,yBAAA;AACrE,oBAAA;AAEAD,oBAAAA,SAAAA,CAAUjB,IAAI,CAAC;AACXU,wBAAAA,MAAAA,EAAQsD,KAAKlF,EAAE;wBACf6B,QAAAA,EAAUqD,IAAAA,CAAKC,QAAQ,CAACC,IAAI;AAC5BzE,wBAAAA,SAAAA,EAAWoE,IAAIpE,SAAS;wBACxBmB,SAAAA,EAAW,CAAA;wBACXM,SAAAA,EAAWuF,UAAAA;wBACX3F,MAAAA,EAAQ,IAAA;wBACRC,QAAAA,EAAU,CAAA;wBACVC,OAAAA,EAAS;AACb,qBAAA,CAAA;AACJ,gBAAA;AACJ,YAAA;AACJ,QAAA;QAEA,OAAOC,SAAAA;AACX,IAAA;AAEA;;QAGA6F,YAAAA,CAAavH,KAAa,EAA6B;AACnD,QAAA,OAAO,IAAI,CAACmG,YAAY,CAAC3F,QAAQ,CAACR,KAAAA,CAAM;AAC5C,IAAA;AAEA;;AAEC,QACDwH,WAAAA,GAA+B;AAC3B,QAAA,MAAMC,SAA0B,EAAE;AAElC,QAAA,KAAK,MAAMnD,GAAAA,IAAO,IAAI,CAAC6B,YAAY,CAAC3F,QAAQ,CAAE;AAC1CiH,YAAAA,MAAAA,CAAOhH,IAAI,CAAC;AACRP,gBAAAA,SAAAA,EAAWoE,IAAIpE,SAAS;gBACxBmB,SAAAA,EAAW,CAAA;gBACXqG,IAAAA,EAAM,SAAA;AACNC,gBAAAA,WAAAA,EAAa,CAAA,EAAGrD,GAAAA,CAAIjE,IAAI,CAAC,QAAQ;AACrC,aAAA,CAAA;AACJ,QAAA;QAEA,OAAOoH,MAAAA;AACX,IAAA;AAEA;;AAEC,QACD,MAAMG,cAAAA,CAAelH,MAAiB,EAAEyB,UAAkB,EAAmB;AACzE,QAAA,IAAI,CAAC9C,MAAM,CAACC,KAAK,CAAC,qBAAA,EAAuB;AAAEoB,YAAAA,MAAAA;YAAQ+B,IAAAA,EAAMN;AAAW,SAAA,CAAA;QAEpE,OAAQzB,MAAAA;YACJ,KAAK,MAAA;AACD,gBAAA,MAAMiD,WAAAA,CAAGK,SAAS,CAAC7B,UAAAA,EAAY8B,IAAAA,CAAKC,SAAS,CAAC,IAAI,CAACiC,YAAY,EAAE,IAAA,EAAM,CAAA,CAAA,EAAI,OAAA,CAAA;AAC3E,gBAAA;YACJ,KAAK,UAAA;gBACD,MAAM,IAAI,CAAC0B,cAAc,CAAC1F,UAAAA,CAAAA;AAC1B,gBAAA;YACJ,KAAK,OAAA;AAAS,gBAAA;AACV,oBAAA,MAAM4D,KAAAA,GAAQ,IAAI,CAACvF,QAAQ,CAAC0F,GAAG,CAAC4B,CAAAA,CAAAA,GAAK7D,IAAAA,CAAKC,SAAS,CAAC4D,CAAAA,CAAAA,CAAAA,CAAIpE,IAAI,CAAC,IAAA,CAAA;AAC7D,oBAAA,MAAMC,WAAAA,CAAGK,SAAS,CAAC7B,UAAAA,EAAY4D,KAAAA,EAAO,OAAA,CAAA;AACtC,oBAAA;AACJ,gBAAA;AACJ;QAEA,OAAO5D,UAAAA;AACX,IAAA;AAEA;;QAGA,MAAc0F,cAAAA,CAAe1F,UAAkB,EAAiB;QAC5D,IAAIgC,QAAAA,GAAW,CAAC,sBAAsB,CAAC;QACvCA,QAAAA,IAAY,CAAC,QAAQ,EAAE,IAAI,CAACgC,YAAY,CAAC5G,EAAE,CAAC,EAAE,CAAC;QAE/C,MAAML,SAAAA,GAAY,OAAO,IAAI,CAACiH,YAAY,CAAClH,QAAQ,CAACC,SAAS,KAAK,QAAA,GAC5D,IAAI,CAACiH,YAAY,CAAClH,QAAQ,CAACC,SAAS,GACpC,IAAI,CAACiH,YAAY,CAAClH,QAAQ,CAACC,SAAS,CAACkB,WAAW,EAAA;AAEtD+D,QAAAA,QAAAA,IAAY,CAAC,aAAa,EAAEjF,SAAAA,CAAU,IAAI,CAAC;AAE3C,QAAA,KAAK,MAAMoF,GAAAA,IAAO,IAAI,CAAC6B,YAAY,CAAC3F,QAAQ,CAAE;AAC1C2D,YAAAA,QAAAA,IAAY,CAAC,GAAG,EAAEG,GAAAA,CAAIjE,IAAI,CAAC0H,WAAW,EAAA,CAAG,EAAE,EAAEzD,GAAAA,CAAItE,KAAK,CAAC,KAAK,CAAC;YAC7D,IAAIsE,GAAAA,CAAI3E,OAAO,EAAE;AACbwE,gBAAAA,QAAAA,IAAY,CAAA,EAAGG,GAAAA,CAAI3E,OAAO,CAAC,IAAI,CAAC;AACpC,YAAA;AACJ,QAAA;AAEA,QAAA,MAAMgE,WAAAA,CAAGK,SAAS,CAAC7B,UAAAA,EAAYgC,QAAAA,EAAU,OAAA,CAAA;AAC7C,IAAA;IAxLA,WAAA,CAAoBgC,YAAgC,EAAE9G,MAAY,CAAE;AAHpE,QAAA,gBAAA,CAAA,IAAA,EAAQ8G,gBAAR,MAAA,CAAA;AACA,QAAA,gBAAA,CAAA,IAAA,EAAQ9G,UAAR,MAAA,CAAA;QAGI,IAAI,CAAC8G,YAAY,GAAGA,YAAAA;AACpB,QAAA,IAAI,CAAC9G,MAAM,GAAGiG,UAAAA,CAAWjG,UAAUkG,cAAAA,EAAgB,sBAAA,CAAA;AACvD,IAAA;AAsLJ;;;;"}
|
|
1
|
+
{"version":3,"file":"conversation-logger.js","sources":["../src/conversation-logger.ts"],"sourcesContent":["import fs from 'fs/promises';\nimport path from 'path';\nimport { DEFAULT_LOGGER, wrapLogger } from \"./logger\";\nimport type { ConversationMessage, ToolCall } from \"./conversation\";\n\n// ===== TYPE DEFINITIONS =====\n\n/**\n * Log format\n */\nexport type LogFormat = 'json' | 'markdown' | 'jsonl';\n\n/**\n * Log configuration\n */\nexport interface LogConfig {\n enabled: boolean;\n outputPath?: string;\n format?: LogFormat;\n filenameTemplate?: string;\n includeMetadata?: boolean;\n includePrompt?: boolean;\n redactSensitive?: boolean;\n redactPatterns?: RegExp[];\n onSaved?: (path: string) => void;\n onError?: (error: Error) => void;\n}\n\n/**\n * Logged conversation structure\n */\nexport interface LoggedConversation {\n id: string;\n metadata: ConversationLogMetadata;\n prompt?: PromptSnapshot;\n messages: LoggedMessage[];\n summary: ConversationSummary;\n}\n\n/**\n * Conversation metadata for logging\n */\nexport interface ConversationLogMetadata {\n startTime: Date;\n endTime?: Date;\n duration?: number;\n model: string;\n template?: string;\n userContext?: Record<string, any>;\n}\n\n/**\n * Snapshot of prompt configuration\n */\nexport interface PromptSnapshot {\n persona?: string;\n instructions?: string;\n content?: string[];\n context?: string[];\n}\n\n/**\n * Logged message with metadata\n */\nexport interface LoggedMessage {\n index: number;\n timestamp: string;\n role: string;\n content: string | null;\n tool_calls?: ToolCall[];\n tool_call_id?: string;\n metadata?: MessageLogMetadata;\n}\n\n/**\n * Message metadata for logging\n */\nexport interface MessageLogMetadata {\n tokens?: number;\n source?: string;\n latency?: number;\n tool?: string;\n duration?: number;\n success?: boolean;\n [key: string]: any;\n}\n\n/**\n * Conversation summary\n */\nexport interface ConversationSummary {\n totalMessages: number;\n totalTokens?: number;\n toolCallsExecuted: number;\n iterations: number;\n finalOutput?: string;\n success: boolean;\n}\n\n/**\n * Tool call log entry\n */\nexport interface ToolCallLog {\n callId: string;\n toolName: string;\n timestamp: string;\n iteration: number;\n arguments: any;\n result: any;\n duration: number;\n success: boolean;\n error?: string;\n}\n\n// ===== CONVERSATION LOGGER =====\n\n/**\n * ConversationLogger logs conversations to various formats.\n *\n * Features:\n * - Multiple formats (JSON, Markdown, JSONL)\n * - Automatic timestamping\n * - Metadata tracking\n * - Sensitive data redaction\n * - Streaming support (JSONL)\n *\n * @example\n * ```typescript\n * const logger = new ConversationLogger({\n * enabled: true,\n * outputPath: 'logs/conversations',\n * format: 'json',\n * includeMetadata: true\n * });\n *\n * logger.onConversationStart({ model: 'gpt-4o', startTime: new Date() });\n * logger.onMessageAdded(message);\n * const path = await logger.save();\n * ```\n */\nexport class ConversationLogger {\n private config: Required<LogConfig>;\n private conversationId: string;\n private metadata: ConversationLogMetadata;\n private messages: LoggedMessage[];\n private toolCalls: ToolCallLog[];\n private startTime: Date;\n private logger: any;\n private messageIndex: number;\n private cachedOutputPath?: string;\n private writeQueue: Promise<void> = Promise.resolve();\n\n constructor(config: LogConfig, logger?: any) {\n this.config = {\n outputPath: 'logs/conversations',\n format: 'json',\n filenameTemplate: 'conversation-{timestamp}',\n includeMetadata: true,\n includePrompt: false,\n redactSensitive: false,\n redactPatterns: [],\n onSaved: () => {},\n onError: () => {},\n ...config,\n } as Required<LogConfig>;\n\n this.conversationId = this.generateId();\n this.messages = [];\n this.toolCalls = [];\n this.startTime = new Date();\n this.messageIndex = 0;\n this.logger = wrapLogger(logger || DEFAULT_LOGGER, 'ConversationLogger');\n\n this.metadata = {\n startTime: this.startTime,\n model: 'unknown',\n };\n }\n\n /**\n * Start conversation logging\n */\n onConversationStart(metadata: Partial<ConversationLogMetadata>): void {\n this.metadata = {\n ...this.metadata,\n ...metadata,\n startTime: this.startTime,\n };\n\n // Reset cached output path to prevent file collision if logger is reused\n this.cachedOutputPath = undefined;\n\n this.logger.debug('Conversation logging started', { id: this.conversationId });\n }\n\n /**\n * Log a message\n */\n onMessageAdded(message: ConversationMessage, metadata?: MessageLogMetadata): void {\n let content = message.content;\n\n // Redact sensitive data if enabled\n if (this.config.redactSensitive && content && typeof content === 'string') {\n content = this.redactContent(content);\n }\n\n const loggedMessage: LoggedMessage = {\n index: this.messageIndex++,\n timestamp: new Date().toISOString(),\n role: message.role,\n content,\n tool_calls: message.tool_calls,\n tool_call_id: message.tool_call_id,\n metadata,\n };\n\n this.messages.push(loggedMessage);\n\n // For JSONL format, append immediately with write queue\n if (this.config.format === 'jsonl') {\n this.writeQueue = this.writeQueue\n .then(() => this.appendToJSONL(loggedMessage))\n .catch((error) => {\n this.logger.error('Failed to write JSONL message', { error });\n try {\n this.config.onError?.(error);\n } catch (callbackError) {\n this.logger.error('onError callback failed', { callbackError });\n }\n });\n }\n }\n\n /**\n * Log a tool call\n */\n onToolCall(\n callId: string,\n toolName: string,\n iteration: number,\n args: any,\n result: any,\n duration: number,\n success: boolean,\n error?: string\n ): void {\n this.toolCalls.push({\n callId,\n toolName,\n timestamp: new Date().toISOString(),\n iteration,\n arguments: args,\n result,\n duration,\n success,\n error,\n });\n }\n\n /**\n * End conversation logging\n */\n onConversationEnd(_summary: ConversationSummary): void {\n this.metadata.endTime = new Date();\n this.metadata.duration = this.metadata.endTime.getTime() - this.startTime.getTime();\n\n this.logger.debug('Conversation logging ended', {\n messages: this.messages.length,\n duration: this.metadata.duration\n });\n }\n\n /**\n * Save conversation to disk\n */\n async save(): Promise<string> {\n if (!this.config.enabled) {\n return '';\n }\n\n try {\n const outputPath = await this.getOutputPath();\n\n switch (this.config.format) {\n case 'json':\n await this.saveAsJSON(outputPath);\n break;\n case 'markdown':\n await this.saveAsMarkdown(outputPath);\n break;\n case 'jsonl':\n // Already saved during execution\n break;\n }\n\n this.config.onSaved(outputPath);\n this.logger.info('Conversation saved', { path: outputPath });\n\n return outputPath;\n } catch (error) {\n this.config.onError(error as Error);\n this.logger.error('Failed to save conversation', { error });\n throw error;\n }\n }\n\n /**\n * Get logged conversation object\n */\n getConversation(): LoggedConversation {\n return {\n id: this.conversationId,\n metadata: this.metadata,\n messages: this.messages,\n summary: {\n totalMessages: this.messages.length,\n toolCallsExecuted: this.toolCalls.length,\n iterations: 0, // Would need to be tracked externally\n success: true,\n },\n };\n }\n\n /**\n * Generate unique conversation ID\n */\n private generateId(): string {\n const timestamp = new Date().toISOString().replace(/[:.]/g, '-');\n const random = Math.random().toString(36).substring(2, 8);\n return `conv-${timestamp}-${random}`;\n }\n\n /**\n * Get output file path (cached for JSONL to avoid recalculation)\n */\n private async getOutputPath(): Promise<string> {\n if (this.cachedOutputPath) {\n return this.cachedOutputPath;\n }\n\n const timestamp = new Date().toISOString().replace(/[:.]/g, '-');\n const filename = this.config.filenameTemplate\n .replace('{timestamp}', timestamp)\n .replace('{id}', this.conversationId)\n .replace('{template}', this.metadata.template || 'default');\n\n let ext = '.json';\n if (this.config.format === 'markdown') {\n ext = '.md';\n } else if (this.config.format === 'jsonl') {\n ext = '.jsonl';\n }\n \n const fullPath = path.join(this.config.outputPath, filename + ext);\n\n // Ensure directory exists\n await fs.mkdir(path.dirname(fullPath), { recursive: true });\n\n // Cache path for JSONL format to ensure consistent file writes\n if (this.config.format === 'jsonl') {\n this.cachedOutputPath = fullPath;\n }\n\n return fullPath;\n }\n\n /**\n * Save as JSON\n */\n private async saveAsJSON(outputPath: string): Promise<void> {\n const data: LoggedConversation = {\n id: this.conversationId,\n metadata: this.metadata,\n messages: this.messages,\n summary: {\n totalMessages: this.messages.length,\n toolCallsExecuted: this.toolCalls.length,\n iterations: 0,\n success: true,\n },\n };\n\n await fs.writeFile(outputPath, JSON.stringify(data, null, 2), 'utf-8');\n }\n\n /**\n * Save as Markdown\n */\n private async saveAsMarkdown(outputPath: string): Promise<void> {\n let markdown = `# Conversation Log\\n\\n`;\n markdown += `**ID**: ${this.conversationId}\\n`;\n markdown += `**Started**: ${this.metadata.startTime.toISOString()}\\n`;\n if (this.metadata.duration) {\n markdown += `**Duration**: ${(this.metadata.duration / 1000).toFixed(1)}s\\n`;\n }\n markdown += `**Model**: ${this.metadata.model}\\n`;\n if (this.metadata.template) {\n markdown += `**Template**: ${this.metadata.template}\\n`;\n }\n markdown += `\\n## Conversation\\n\\n`;\n\n for (const msg of this.messages) {\n const time = new Date(msg.timestamp).toLocaleTimeString();\n markdown += `### Message ${msg.index + 1} (${time}) - ${msg.role}\\n\\n`;\n\n if (msg.content) {\n markdown += `\\`\\`\\`\\n${msg.content}\\n\\`\\`\\`\\n\\n`;\n }\n\n if (msg.tool_calls) {\n markdown += `**Tool Calls:**\\n`;\n for (const call of msg.tool_calls) {\n markdown += `- ${call.function.name}: \\`${call.function.arguments}\\`\\n`;\n }\n markdown += `\\n`;\n }\n\n if (msg.metadata) {\n markdown += `*Metadata: ${JSON.stringify(msg.metadata)}*\\n\\n`;\n }\n }\n\n markdown += `## Summary\\n\\n`;\n markdown += `- **Total Messages**: ${this.messages.length}\\n`;\n markdown += `- **Tool Calls**: ${this.toolCalls.length}\\n`;\n\n await fs.writeFile(outputPath, markdown, 'utf-8');\n }\n\n /**\n * Append to JSONL file (streaming)\n */\n private async appendToJSONL(message: LoggedMessage): Promise<void> {\n const outputPath = await this.getOutputPath();\n const line = JSON.stringify(message) + '\\n';\n await fs.appendFile(outputPath, line, 'utf-8');\n }\n\n /**\n * Redact sensitive content\n */\n private redactContent(content: string): string {\n let redacted = content;\n\n // Apply custom patterns\n for (const pattern of this.config.redactPatterns) {\n redacted = redacted.replace(pattern, '[REDACTED]');\n }\n\n // Default patterns\n const defaultPatterns = [\n /api[_-]?key[\\s:=\"']+[\\w-]+/gi,\n /password[\\s:=\"']+[\\w-]+/gi,\n /Bearer\\s+[\\w-]+/gi,\n /sk-[a-zA-Z0-9]{48}/g,\n ];\n\n for (const pattern of defaultPatterns) {\n redacted = redacted.replace(pattern, '[REDACTED]');\n }\n\n return redacted;\n }\n}\n\n// ===== CONVERSATION REPLAYER =====\n\n/**\n * Replay options\n */\nexport interface ReplayOptions {\n model?: string;\n maxIterations?: number;\n retryFailedTools?: boolean;\n toolTimeout?: number;\n expectSimilarOutput?: boolean;\n}\n\n/**\n * Replay result\n */\nexport interface ReplayResult {\n success: boolean;\n conversation: LoggedConversation;\n errors?: Error[];\n}\n\n/**\n * Comparison result\n */\nexport interface ComparisonResult {\n messageDiff: number;\n toolCallDiff: number;\n tokenDiff?: number;\n outputSimilarity: number;\n costSavings?: number;\n}\n\n/**\n * ConversationReplayer loads and replays logged conversations.\n *\n * Features:\n * - Load from various formats\n * - Replay conversations\n * - Compare replays with originals\n * - Export to different formats\n *\n * @example\n * ```typescript\n * const replayer = await ConversationReplayer.load('logs/conv.json');\n *\n * console.log('Messages:', replayer.messages.length);\n * console.log('Tool calls:', replayer.getToolCalls().length);\n *\n * const timeline = replayer.getTimeline();\n * console.log('Events:', timeline.length);\n * ```\n */\nexport class ConversationReplayer {\n private conversation: LoggedConversation;\n private logger: any;\n\n private constructor(conversation: LoggedConversation, logger?: any) {\n this.conversation = conversation;\n this.logger = wrapLogger(logger || DEFAULT_LOGGER, 'ConversationReplayer');\n }\n\n /**\n * Load conversation from file\n */\n static async load(filePath: string, logger?: any): Promise<ConversationReplayer> {\n const wlogger = wrapLogger(logger || DEFAULT_LOGGER, 'ConversationReplayer');\n wlogger.debug('Loading conversation', { path: filePath });\n\n try {\n const content = await fs.readFile(filePath, 'utf-8');\n\n // Determine format by extension\n if (filePath.endsWith('.json')) {\n const data: LoggedConversation = JSON.parse(content);\n return new ConversationReplayer(data, logger);\n } else if (filePath.endsWith('.jsonl')) {\n const lines = content.trim().split('\\n');\n const messages = lines.map(line => JSON.parse(line));\n\n const conversation: LoggedConversation = {\n id: `replayer-${Date.now()}`,\n metadata: {\n startTime: new Date(),\n model: 'unknown'\n },\n messages,\n summary: {\n totalMessages: messages.length,\n toolCallsExecuted: 0,\n iterations: 0,\n success: true\n }\n };\n\n return new ConversationReplayer(conversation, logger);\n } else {\n throw new Error(`Unsupported format: ${filePath}`);\n }\n } catch (error) {\n wlogger.error('Failed to load conversation', { path: filePath, error });\n throw error;\n }\n }\n\n /**\n * Load latest conversation from directory\n */\n static async loadLatest(directory: string, logger?: any): Promise<ConversationReplayer> {\n const files = await fs.readdir(directory);\n const jsonFiles = files.filter(f => f.endsWith('.json')).sort().reverse();\n\n if (jsonFiles.length === 0) {\n throw new Error(`No conversation logs found in ${directory}`);\n }\n\n const latestPath = path.join(directory, jsonFiles[0]);\n return ConversationReplayer.load(latestPath, logger);\n }\n\n /**\n * Get all messages\n */\n get messages(): LoggedMessage[] {\n return this.conversation.messages;\n }\n\n /**\n * Get conversation metadata\n */\n getMetadata(): ConversationLogMetadata {\n return { ...this.conversation.metadata };\n }\n\n /**\n * Get tool calls\n */\n getToolCalls(): ToolCallLog[] {\n const toolCalls: ToolCallLog[] = [];\n\n for (const msg of this.conversation.messages) {\n if (msg.tool_calls) {\n for (const call of msg.tool_calls) {\n // Parse arguments with error handling\n let parsedArgs: any;\n try {\n parsedArgs = JSON.parse(call.function.arguments);\n } catch (error) {\n this.logger.warn('Failed to parse tool call arguments', {\n callId: call.id,\n error: error instanceof Error ? error.message : String(error)\n });\n parsedArgs = { __parse_error: true, raw: call.function.arguments };\n }\n\n toolCalls.push({\n callId: call.id,\n toolName: call.function.name,\n timestamp: msg.timestamp,\n iteration: 0, // Would need to be calculated\n arguments: parsedArgs,\n result: null, // Would need to find corresponding tool message\n duration: 0,\n success: true,\n });\n }\n }\n }\n\n return toolCalls;\n }\n\n /**\n * Get message at index\n */\n getMessageAt(index: number): LoggedMessage | undefined {\n return this.conversation.messages[index];\n }\n\n /**\n * Get timeline of events\n */\n getTimeline(): TimelineEvent[] {\n const events: TimelineEvent[] = [];\n\n for (const msg of this.conversation.messages) {\n events.push({\n timestamp: msg.timestamp,\n iteration: 0, // Would need iteration tracking\n type: 'message',\n description: `${msg.role} message`,\n });\n }\n\n return events;\n }\n\n /**\n * Export to format\n */\n async exportToFormat(format: LogFormat, outputPath: string): Promise<string> {\n this.logger.debug('Exporting to format', { format, path: outputPath });\n\n switch (format) {\n case 'json':\n await fs.writeFile(outputPath, JSON.stringify(this.conversation, null, 2), 'utf-8');\n break;\n case 'markdown':\n await this.exportMarkdown(outputPath);\n break;\n case 'jsonl': {\n const lines = this.messages.map(m => JSON.stringify(m)).join('\\n');\n await fs.writeFile(outputPath, lines, 'utf-8');\n break;\n }\n }\n\n return outputPath;\n }\n\n /**\n * Export as markdown\n */\n private async exportMarkdown(outputPath: string): Promise<void> {\n let markdown = `# Conversation Log\\n\\n`;\n markdown += `**ID**: ${this.conversation.id}\\n`;\n\n const startTime = typeof this.conversation.metadata.startTime === 'string'\n ? this.conversation.metadata.startTime\n : this.conversation.metadata.startTime.toISOString();\n\n markdown += `**Started**: ${startTime}\\n\\n`;\n\n for (const msg of this.conversation.messages) {\n markdown += `## ${msg.role.toUpperCase()} (${msg.index})\\n\\n`;\n if (msg.content) {\n markdown += `${msg.content}\\n\\n`;\n }\n }\n\n await fs.writeFile(outputPath, markdown, 'utf-8');\n }\n}\n\n/**\n * Timeline event interface\n */\ninterface TimelineEvent {\n timestamp: string;\n iteration: number;\n type: string;\n description: string;\n duration?: number;\n success?: boolean;\n}\n\nexport default ConversationLogger;\n\n"],"names":["ConversationLogger","onConversationStart","metadata","startTime","cachedOutputPath","undefined","logger","debug","id","conversationId","onMessageAdded","message","content","config","redactSensitive","redactContent","loggedMessage","index","messageIndex","timestamp","Date","toISOString","role","tool_calls","tool_call_id","messages","push","format","writeQueue","then","appendToJSONL","catch","error","onError","callbackError","onToolCall","callId","toolName","iteration","args","result","duration","success","toolCalls","arguments","onConversationEnd","_summary","endTime","getTime","length","save","enabled","outputPath","getOutputPath","saveAsJSON","saveAsMarkdown","onSaved","info","path","getConversation","summary","totalMessages","toolCallsExecuted","iterations","generateId","replace","random","Math","toString","substring","filename","filenameTemplate","template","ext","fullPath","join","fs","mkdir","dirname","recursive","data","writeFile","JSON","stringify","markdown","toFixed","model","msg","time","toLocaleTimeString","call","function","name","line","appendFile","redacted","pattern","redactPatterns","defaultPatterns","Promise","resolve","includeMetadata","includePrompt","wrapLogger","DEFAULT_LOGGER","ConversationReplayer","load","filePath","wlogger","readFile","endsWith","parse","lines","trim","split","map","conversation","now","Error","loadLatest","directory","files","readdir","jsonFiles","filter","f","sort","reverse","latestPath","getMetadata","getToolCalls","parsedArgs","warn","String","__parse_error","raw","getMessageAt","getTimeline","events","type","description","exportToFormat","exportMarkdown","m","toUpperCase"],"mappings":";;;;;;;;;;;;;;;;;AAkHA;AAEA;;;;;;;;;;;;;;;;;;;;;;;AAuBC,IACM,MAAMA,kBAAAA,CAAAA;AAuCT;;QAGAC,mBAAAA,CAAoBC,QAA0C,EAAQ;QAClE,IAAI,CAACA,QAAQ,GAAG;YACZ,GAAG,IAAI,CAACA,QAAQ;AAChB,YAAA,GAAGA,QAAQ;YACXC,SAAAA,EAAW,IAAI,CAACA;AACpB,SAAA;;QAGA,IAAI,CAACC,gBAAgB,GAAGC,SAAAA;AAExB,QAAA,IAAI,CAACC,MAAM,CAACC,KAAK,CAAC,8BAAA,EAAgC;YAAEC,EAAAA,EAAI,IAAI,CAACC;AAAe,SAAA,CAAA;AAChF,IAAA;AAEA;;AAEC,QACDC,cAAAA,CAAeC,OAA4B,EAAET,QAA6B,EAAQ;QAC9E,IAAIU,OAAAA,GAAUD,QAAQC,OAAO;;QAG7B,IAAI,IAAI,CAACC,MAAM,CAACC,eAAe,IAAIF,OAAAA,IAAW,OAAOA,OAAAA,KAAY,QAAA,EAAU;YACvEA,OAAAA,GAAU,IAAI,CAACG,aAAa,CAACH,OAAAA,CAAAA;AACjC,QAAA;AAEA,QAAA,MAAMI,aAAAA,GAA+B;YACjCC,KAAAA,EAAO,IAAI,CAACC,YAAY,EAAA;YACxBC,SAAAA,EAAW,IAAIC,OAAOC,WAAW,EAAA;AACjCC,YAAAA,IAAAA,EAAMX,QAAQW,IAAI;AAClBV,YAAAA,OAAAA;AACAW,YAAAA,UAAAA,EAAYZ,QAAQY,UAAU;AAC9BC,YAAAA,YAAAA,EAAcb,QAAQa,YAAY;AAClCtB,YAAAA;AACJ,SAAA;AAEA,QAAA,IAAI,CAACuB,QAAQ,CAACC,IAAI,CAACV,aAAAA,CAAAA;;AAGnB,QAAA,IAAI,IAAI,CAACH,MAAM,CAACc,MAAM,KAAK,OAAA,EAAS;AAChC,YAAA,IAAI,CAACC,UAAU,GAAG,IAAI,CAACA,UAAU,CAC5BC,IAAI,CAAC,IAAM,IAAI,CAACC,aAAa,CAACd,aAAAA,CAAAA,CAAAA,CAC9Be,KAAK,CAAC,CAACC,KAAAA,GAAAA;AACJ,gBAAA,IAAI,CAAC1B,MAAM,CAAC0B,KAAK,CAAC,+BAAA,EAAiC;AAAEA,oBAAAA;AAAM,iBAAA,CAAA;gBAC3D,IAAI;wBACA,oBAAA,EAAA,YAAA;qBAAA,oBAAA,GAAA,CAAA,YAAA,GAAA,IAAI,CAACnB,MAAM,EAACoB,OAAO,MAAA,IAAA,IAAnB,oBAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,oBAAA,CAAA,IAAA,CAAA,YAAA,EAAsBD,KAAAA,CAAAA;AAC1B,gBAAA,CAAA,CAAE,OAAOE,aAAAA,EAAe;AACpB,oBAAA,IAAI,CAAC5B,MAAM,CAAC0B,KAAK,CAAC,yBAAA,EAA2B;AAAEE,wBAAAA;AAAc,qBAAA,CAAA;AACjE,gBAAA;AACJ,YAAA,CAAA,CAAA;AACR,QAAA;AACJ,IAAA;AAEA;;AAEC,QACDC,WACIC,MAAc,EACdC,QAAgB,EAChBC,SAAiB,EACjBC,IAAS,EACTC,MAAW,EACXC,QAAgB,EAChBC,OAAgB,EAChBV,KAAc,EACV;AACJ,QAAA,IAAI,CAACW,SAAS,CAACjB,IAAI,CAAC;AAChBU,YAAAA,MAAAA;AACAC,YAAAA,QAAAA;YACAlB,SAAAA,EAAW,IAAIC,OAAOC,WAAW,EAAA;AACjCiB,YAAAA,SAAAA;YACAM,SAAAA,EAAWL,IAAAA;AACXC,YAAAA,MAAAA;AACAC,YAAAA,QAAAA;AACAC,YAAAA,OAAAA;AACAV,YAAAA;AACJ,SAAA,CAAA;AACJ,IAAA;AAEA;;QAGAa,iBAAAA,CAAkBC,QAA6B,EAAQ;AACnD,QAAA,IAAI,CAAC5C,QAAQ,CAAC6C,OAAO,GAAG,IAAI3B,IAAAA,EAAAA;AAC5B,QAAA,IAAI,CAAClB,QAAQ,CAACuC,QAAQ,GAAG,IAAI,CAACvC,QAAQ,CAAC6C,OAAO,CAACC,OAAO,EAAA,GAAK,IAAI,CAAC7C,SAAS,CAAC6C,OAAO,EAAA;AAEjF,QAAA,IAAI,CAAC1C,MAAM,CAACC,KAAK,CAAC,4BAAA,EAA8B;AAC5CkB,YAAAA,QAAAA,EAAU,IAAI,CAACA,QAAQ,CAACwB,MAAM;AAC9BR,YAAAA,QAAAA,EAAU,IAAI,CAACvC,QAAQ,CAACuC;AAC5B,SAAA,CAAA;AACJ,IAAA;AAEA;;AAEC,QACD,MAAMS,IAAAA,GAAwB;AAC1B,QAAA,IAAI,CAAC,IAAI,CAACrC,MAAM,CAACsC,OAAO,EAAE;YACtB,OAAO,EAAA;AACX,QAAA;QAEA,IAAI;AACA,YAAA,MAAMC,UAAAA,GAAa,MAAM,IAAI,CAACC,aAAa,EAAA;AAE3C,YAAA,OAAQ,IAAI,CAACxC,MAAM,CAACc,MAAM;gBACtB,KAAK,MAAA;oBACD,MAAM,IAAI,CAAC2B,UAAU,CAACF,UAAAA,CAAAA;AACtB,oBAAA;gBACJ,KAAK,UAAA;oBACD,MAAM,IAAI,CAACG,cAAc,CAACH,UAAAA,CAAAA;AAC1B,oBAAA;gBACJ,KAAK,OAAA;AAED,oBAAA;AACR;AAEA,YAAA,IAAI,CAACvC,MAAM,CAAC2C,OAAO,CAACJ,UAAAA,CAAAA;AACpB,YAAA,IAAI,CAAC9C,MAAM,CAACmD,IAAI,CAAC,oBAAA,EAAsB;gBAAEC,IAAAA,EAAMN;AAAW,aAAA,CAAA;YAE1D,OAAOA,UAAAA;AACX,QAAA,CAAA,CAAE,OAAOpB,KAAAA,EAAO;AACZ,YAAA,IAAI,CAACnB,MAAM,CAACoB,OAAO,CAACD,KAAAA,CAAAA;AACpB,YAAA,IAAI,CAAC1B,MAAM,CAAC0B,KAAK,CAAC,6BAAA,EAA+B;AAAEA,gBAAAA;AAAM,aAAA,CAAA;YACzD,MAAMA,KAAAA;AACV,QAAA;AACJ,IAAA;AAEA;;AAEC,QACD2B,eAAAA,GAAsC;QAClC,OAAO;YACHnD,EAAAA,EAAI,IAAI,CAACC,cAAc;YACvBP,QAAAA,EAAU,IAAI,CAACA,QAAQ;YACvBuB,QAAAA,EAAU,IAAI,CAACA,QAAQ;YACvBmC,OAAAA,EAAS;AACLC,gBAAAA,aAAAA,EAAe,IAAI,CAACpC,QAAQ,CAACwB,MAAM;AACnCa,gBAAAA,iBAAAA,EAAmB,IAAI,CAACnB,SAAS,CAACM,MAAM;gBACxCc,UAAAA,EAAY,CAAA;gBACZrB,OAAAA,EAAS;AACb;AACJ,SAAA;AACJ,IAAA;AAEA;;AAEC,QACD,UAAQsB,GAAqB;AACzB,QAAA,MAAM7C,YAAY,IAAIC,IAAAA,EAAAA,CAAOC,WAAW,EAAA,CAAG4C,OAAO,CAAC,OAAA,EAAS,GAAA,CAAA;QAC5D,MAAMC,MAAAA,GAASC,KAAKD,MAAM,EAAA,CAAGE,QAAQ,CAAC,EAAA,CAAA,CAAIC,SAAS,CAAC,CAAA,EAAG,CAAA,CAAA;AACvD,QAAA,OAAO,CAAC,KAAK,EAAElD,SAAAA,CAAU,CAAC,EAAE+C,MAAAA,CAAAA,CAAQ;AACxC,IAAA;AAEA;;AAEC,QACD,MAAcb,aAAAA,GAAiC;QAC3C,IAAI,IAAI,CAACjD,gBAAgB,EAAE;YACvB,OAAO,IAAI,CAACA,gBAAgB;AAChC,QAAA;AAEA,QAAA,MAAMe,YAAY,IAAIC,IAAAA,EAAAA,CAAOC,WAAW,EAAA,CAAG4C,OAAO,CAAC,OAAA,EAAS,GAAA,CAAA;QAC5D,MAAMK,QAAAA,GAAW,IAAI,CAACzD,MAAM,CAAC0D,gBAAgB,CACxCN,OAAO,CAAC,aAAA,EAAe9C,SAAAA,CAAAA,CACvB8C,OAAO,CAAC,QAAQ,IAAI,CAACxD,cAAc,CAAA,CACnCwD,OAAO,CAAC,YAAA,EAAc,IAAI,CAAC/D,QAAQ,CAACsE,QAAQ,IAAI,SAAA,CAAA;AAErD,QAAA,IAAIC,GAAAA,GAAM,OAAA;AACV,QAAA,IAAI,IAAI,CAAC5D,MAAM,CAACc,MAAM,KAAK,UAAA,EAAY;YACnC8C,GAAAA,GAAM,KAAA;AACV,QAAA,CAAA,MAAO,IAAI,IAAI,CAAC5D,MAAM,CAACc,MAAM,KAAK,OAAA,EAAS;YACvC8C,GAAAA,GAAM,QAAA;AACV,QAAA;QAEA,MAAMC,QAAAA,GAAWhB,aAAAA,CAAKiB,IAAI,CAAC,IAAI,CAAC9D,MAAM,CAACuC,UAAU,EAAEkB,QAAAA,GAAWG,GAAAA,CAAAA;;AAG9D,QAAA,MAAMG,YAAGC,KAAK,CAACnB,aAAAA,CAAKoB,OAAO,CAACJ,QAAAA,CAAAA,EAAW;YAAEK,SAAAA,EAAW;AAAK,SAAA,CAAA;;AAGzD,QAAA,IAAI,IAAI,CAAClE,MAAM,CAACc,MAAM,KAAK,OAAA,EAAS;YAChC,IAAI,CAACvB,gBAAgB,GAAGsE,QAAAA;AAC5B,QAAA;QAEA,OAAOA,QAAAA;AACX,IAAA;AAEA;;QAGA,MAAcpB,UAAAA,CAAWF,UAAkB,EAAiB;AACxD,QAAA,MAAM4B,IAAAA,GAA2B;YAC7BxE,EAAAA,EAAI,IAAI,CAACC,cAAc;YACvBP,QAAAA,EAAU,IAAI,CAACA,QAAQ;YACvBuB,QAAAA,EAAU,IAAI,CAACA,QAAQ;YACvBmC,OAAAA,EAAS;AACLC,gBAAAA,aAAAA,EAAe,IAAI,CAACpC,QAAQ,CAACwB,MAAM;AACnCa,gBAAAA,iBAAAA,EAAmB,IAAI,CAACnB,SAAS,CAACM,MAAM;gBACxCc,UAAAA,EAAY,CAAA;gBACZrB,OAAAA,EAAS;AACb;AACJ,SAAA;QAEA,MAAMkC,WAAAA,CAAGK,SAAS,CAAC7B,UAAAA,EAAY8B,KAAKC,SAAS,CAACH,IAAAA,EAAM,IAAA,EAAM,CAAA,CAAA,EAAI,OAAA,CAAA;AAClE,IAAA;AAEA;;QAGA,MAAczB,cAAAA,CAAeH,UAAkB,EAAiB;QAC5D,IAAIgC,QAAAA,GAAW,CAAC,sBAAsB,CAAC;QACvCA,QAAAA,IAAY,CAAC,QAAQ,EAAE,IAAI,CAAC3E,cAAc,CAAC,EAAE,CAAC;AAC9C2E,QAAAA,QAAAA,IAAY,CAAC,aAAa,EAAE,IAAI,CAAClF,QAAQ,CAACC,SAAS,CAACkB,WAAW,EAAA,CAAG,EAAE,CAAC;AACrE,QAAA,IAAI,IAAI,CAACnB,QAAQ,CAACuC,QAAQ,EAAE;AACxB2C,YAAAA,QAAAA,IAAY,CAAC,cAAc,EAAE,CAAC,IAAI,CAAClF,QAAQ,CAACuC,QAAQ,GAAG,IAAG,EAAG4C,OAAO,CAAC,CAAA,CAAA,CAAG,GAAG,CAAC;AAChF,QAAA;QACAD,QAAAA,IAAY,CAAC,WAAW,EAAE,IAAI,CAAClF,QAAQ,CAACoF,KAAK,CAAC,EAAE,CAAC;AACjD,QAAA,IAAI,IAAI,CAACpF,QAAQ,CAACsE,QAAQ,EAAE;YACxBY,QAAAA,IAAY,CAAC,cAAc,EAAE,IAAI,CAAClF,QAAQ,CAACsE,QAAQ,CAAC,EAAE,CAAC;AAC3D,QAAA;QACAY,QAAAA,IAAY,CAAC,qBAAqB,CAAC;AAEnC,QAAA,KAAK,MAAMG,GAAAA,IAAO,IAAI,CAAC9D,QAAQ,CAAE;AAC7B,YAAA,MAAM+D,OAAO,IAAIpE,IAAAA,CAAKmE,GAAAA,CAAIpE,SAAS,EAAEsE,kBAAkB,EAAA;AACvDL,YAAAA,QAAAA,IAAY,CAAC,YAAY,EAAEG,GAAAA,CAAItE,KAAK,GAAG,CAAA,CAAE,EAAE,EAAEuE,IAAAA,CAAK,IAAI,EAAED,GAAAA,CAAIjE,IAAI,CAAC,IAAI,CAAC;YAEtE,IAAIiE,GAAAA,CAAI3E,OAAO,EAAE;AACbwE,gBAAAA,QAAAA,IAAY,CAAC,QAAQ,EAAEG,IAAI3E,OAAO,CAAC,YAAY,CAAC;AACpD,YAAA;YAEA,IAAI2E,GAAAA,CAAIhE,UAAU,EAAE;gBAChB6D,QAAAA,IAAY,CAAC,iBAAiB,CAAC;AAC/B,gBAAA,KAAK,MAAMM,IAAAA,IAAQH,GAAAA,CAAIhE,UAAU,CAAE;AAC/B6D,oBAAAA,QAAAA,IAAY,CAAC,EAAE,EAAEM,IAAAA,CAAKC,QAAQ,CAACC,IAAI,CAAC,IAAI,EAAEF,KAAKC,QAAQ,CAAC/C,SAAS,CAAC,IAAI,CAAC;AAC3E,gBAAA;gBACAwC,QAAAA,IAAY,CAAC,EAAE,CAAC;AACpB,YAAA;YAEA,IAAIG,GAAAA,CAAIrF,QAAQ,EAAE;gBACdkF,QAAAA,IAAY,CAAC,WAAW,EAAEF,IAAAA,CAAKC,SAAS,CAACI,GAAAA,CAAIrF,QAAQ,CAAA,CAAE,KAAK,CAAC;AACjE,YAAA;AACJ,QAAA;QAEAkF,QAAAA,IAAY,CAAC,cAAc,CAAC;QAC5BA,QAAAA,IAAY,CAAC,sBAAsB,EAAE,IAAI,CAAC3D,QAAQ,CAACwB,MAAM,CAAC,EAAE,CAAC;QAC7DmC,QAAAA,IAAY,CAAC,kBAAkB,EAAE,IAAI,CAACzC,SAAS,CAACM,MAAM,CAAC,EAAE,CAAC;AAE1D,QAAA,MAAM2B,WAAAA,CAAGK,SAAS,CAAC7B,UAAAA,EAAYgC,QAAAA,EAAU,OAAA,CAAA;AAC7C,IAAA;AAEA;;QAGA,MAActD,aAAAA,CAAcnB,OAAsB,EAAiB;AAC/D,QAAA,MAAMyC,UAAAA,GAAa,MAAM,IAAI,CAACC,aAAa,EAAA;AAC3C,QAAA,MAAMwC,IAAAA,GAAOX,IAAAA,CAAKC,SAAS,CAACxE,OAAAA,CAAAA,GAAW,IAAA;AACvC,QAAA,MAAMiE,WAAAA,CAAGkB,UAAU,CAAC1C,UAAAA,EAAYyC,IAAAA,EAAM,OAAA,CAAA;AAC1C,IAAA;AAEA;;QAGQ9E,aAAAA,CAAcH,OAAe,EAAU;AAC3C,QAAA,IAAImF,QAAAA,GAAWnF,OAAAA;;AAGf,QAAA,KAAK,MAAMoF,OAAAA,IAAW,IAAI,CAACnF,MAAM,CAACoF,cAAc,CAAE;YAC9CF,QAAAA,GAAWA,QAAAA,CAAS9B,OAAO,CAAC+B,OAAAA,EAAS,YAAA,CAAA;AACzC,QAAA;;AAGA,QAAA,MAAME,eAAAA,GAAkB;AACpB,YAAA,8BAAA;AACA,YAAA,2BAAA;AACA,YAAA,mBAAA;AACA,YAAA;AACH,SAAA;QAED,KAAK,MAAMF,WAAWE,eAAAA,CAAiB;YACnCH,QAAAA,GAAWA,QAAAA,CAAS9B,OAAO,CAAC+B,OAAAA,EAAS,YAAA,CAAA;AACzC,QAAA;QAEA,OAAOD,QAAAA;AACX,IAAA;IAtTA,WAAA,CAAYlF,MAAiB,EAAEP,MAAY,CAAE;AAX7C,QAAA,gBAAA,CAAA,IAAA,EAAQO,UAAR,MAAA,CAAA;AACA,QAAA,gBAAA,CAAA,IAAA,EAAQJ,kBAAR,MAAA,CAAA;AACA,QAAA,gBAAA,CAAA,IAAA,EAAQP,YAAR,MAAA,CAAA;AACA,QAAA,gBAAA,CAAA,IAAA,EAAQuB,YAAR,MAAA,CAAA;AACA,QAAA,gBAAA,CAAA,IAAA,EAAQkB,aAAR,MAAA,CAAA;AACA,QAAA,gBAAA,CAAA,IAAA,EAAQxC,aAAR,MAAA,CAAA;AACA,QAAA,gBAAA,CAAA,IAAA,EAAQG,UAAR,MAAA,CAAA;AACA,QAAA,gBAAA,CAAA,IAAA,EAAQY,gBAAR,MAAA,CAAA;AACA,QAAA,gBAAA,CAAA,IAAA,EAAQd,oBAAR,MAAA,CAAA;QACA,gBAAA,CAAA,IAAA,EAAQwB,YAAAA,EAA4BuE,QAAQC,OAAO,EAAA,CAAA;QAG/C,IAAI,CAACvF,MAAM,GAAG;YACVuC,UAAAA,EAAY,oBAAA;YACZzB,MAAAA,EAAQ,MAAA;YACR4C,gBAAAA,EAAkB,0BAAA;YAClB8B,eAAAA,EAAiB,IAAA;YACjBC,aAAAA,EAAe,KAAA;YACfxF,eAAAA,EAAiB,KAAA;AACjBmF,YAAAA,cAAAA,EAAgB,EAAE;AAClBzC,YAAAA,OAAAA,EAAS,IAAA,CAAO,CAAA;AAChBvB,YAAAA,OAAAA,EAAS,IAAA,CAAO,CAAA;AAChB,YAAA,GAAGpB;AACP,SAAA;AAEA,QAAA,IAAI,CAACJ,cAAc,GAAG,IAAI,CAACuD,UAAU,EAAA;QACrC,IAAI,CAACvC,QAAQ,GAAG,EAAE;QAClB,IAAI,CAACkB,SAAS,GAAG,EAAE;QACnB,IAAI,CAACxC,SAAS,GAAG,IAAIiB,IAAAA,EAAAA;QACrB,IAAI,CAACF,YAAY,GAAG,CAAA;AACpB,QAAA,IAAI,CAACZ,MAAM,GAAGiG,UAAAA,CAAWjG,UAAUkG,cAAAA,EAAgB,oBAAA,CAAA;QAEnD,IAAI,CAACtG,QAAQ,GAAG;YACZC,SAAAA,EAAW,IAAI,CAACA,SAAS;YACzBmF,KAAAA,EAAO;AACX,SAAA;AACJ,IAAA;AA8RJ;AAmCA;;;;;;;;;;;;;;;;;;;AAmBC,IACM,MAAMmB,oBAAAA,CAAAA;AAST;;AAEC,QACD,aAAaC,IAAAA,CAAKC,QAAgB,EAAErG,MAAY,EAAiC;QAC7E,MAAMsG,OAAAA,GAAUL,UAAAA,CAAWjG,MAAAA,IAAUkG,cAAAA,EAAgB,sBAAA,CAAA;QACrDI,OAAAA,CAAQrG,KAAK,CAAC,sBAAA,EAAwB;YAAEmD,IAAAA,EAAMiD;AAAS,SAAA,CAAA;QAEvD,IAAI;AACA,YAAA,MAAM/F,OAAAA,GAAU,MAAMgE,WAAAA,CAAGiC,QAAQ,CAACF,QAAAA,EAAU,OAAA,CAAA;;YAG5C,IAAIA,QAAAA,CAASG,QAAQ,CAAC,OAAA,CAAA,EAAU;gBAC5B,MAAM9B,IAAAA,GAA2BE,IAAAA,CAAK6B,KAAK,CAACnG,OAAAA,CAAAA;gBAC5C,OAAO,IAAI6F,qBAAqBzB,IAAAA,EAAM1E,MAAAA,CAAAA;AAC1C,YAAA,CAAA,MAAO,IAAIqG,QAAAA,CAASG,QAAQ,CAAC,QAAA,CAAA,EAAW;AACpC,gBAAA,MAAME,KAAAA,GAAQpG,OAAAA,CAAQqG,IAAI,EAAA,CAAGC,KAAK,CAAC,IAAA,CAAA;gBACnC,MAAMzF,QAAAA,GAAWuF,MAAMG,GAAG,CAACtB,CAAAA,IAAAA,GAAQX,IAAAA,CAAK6B,KAAK,CAAClB,IAAAA,CAAAA,CAAAA;AAE9C,gBAAA,MAAMuB,YAAAA,GAAmC;AACrC5G,oBAAAA,EAAAA,EAAI,CAAC,SAAS,EAAEY,IAAAA,CAAKiG,GAAG,EAAA,CAAA,CAAI;oBAC5BnH,QAAAA,EAAU;AACNC,wBAAAA,SAAAA,EAAW,IAAIiB,IAAAA,EAAAA;wBACfkE,KAAAA,EAAO;AACX,qBAAA;AACA7D,oBAAAA,QAAAA;oBACAmC,OAAAA,EAAS;AACLC,wBAAAA,aAAAA,EAAepC,SAASwB,MAAM;wBAC9Ba,iBAAAA,EAAmB,CAAA;wBACnBC,UAAAA,EAAY,CAAA;wBACZrB,OAAAA,EAAS;AACb;AACJ,iBAAA;gBAEA,OAAO,IAAI+D,qBAAqBW,YAAAA,EAAc9G,MAAAA,CAAAA;YAClD,CAAA,MAAO;AACH,gBAAA,MAAM,IAAIgH,KAAAA,CAAM,CAAC,oBAAoB,EAAEX,QAAAA,CAAAA,CAAU,CAAA;AACrD,YAAA;AACJ,QAAA,CAAA,CAAE,OAAO3E,KAAAA,EAAO;YACZ4E,OAAAA,CAAQ5E,KAAK,CAAC,6BAAA,EAA+B;gBAAE0B,IAAAA,EAAMiD,QAAAA;AAAU3E,gBAAAA;AAAM,aAAA,CAAA;YACrE,MAAMA,KAAAA;AACV,QAAA;AACJ,IAAA;AAEA;;AAEC,QACD,aAAauF,UAAAA,CAAWC,SAAiB,EAAElH,MAAY,EAAiC;AACpF,QAAA,MAAMmH,KAAAA,GAAQ,MAAM7C,WAAAA,CAAG8C,OAAO,CAACF,SAAAA,CAAAA;AAC/B,QAAA,MAAMG,SAAAA,GAAYF,KAAAA,CAAMG,MAAM,CAACC,CAAAA,CAAAA,GAAKA,CAAAA,CAAEf,QAAQ,CAAC,OAAA,CAAA,CAAA,CAAUgB,IAAI,EAAA,CAAGC,OAAO,EAAA;QAEvE,IAAIJ,SAAAA,CAAU1E,MAAM,KAAK,CAAA,EAAG;AACxB,YAAA,MAAM,IAAIqE,KAAAA,CAAM,CAAC,8BAA8B,EAAEE,SAAAA,CAAAA,CAAW,CAAA;AAChE,QAAA;AAEA,QAAA,MAAMQ,aAAatE,aAAAA,CAAKiB,IAAI,CAAC6C,SAAAA,EAAWG,SAAS,CAAC,CAAA,CAAE,CAAA;QACpD,OAAOlB,oBAAAA,CAAqBC,IAAI,CAACsB,UAAAA,EAAY1H,MAAAA,CAAAA;AACjD,IAAA;AAEA;;AAEC,QACD,IAAImB,QAAAA,GAA4B;AAC5B,QAAA,OAAO,IAAI,CAAC2F,YAAY,CAAC3F,QAAQ;AACrC,IAAA;AAEA;;AAEC,QACDwG,WAAAA,GAAuC;QACnC,OAAO;AAAE,YAAA,GAAG,IAAI,CAACb,YAAY,CAAClH;AAAS,SAAA;AAC3C,IAAA;AAEA;;AAEC,QACDgI,YAAAA,GAA8B;AAC1B,QAAA,MAAMvF,YAA2B,EAAE;AAEnC,QAAA,KAAK,MAAM4C,GAAAA,IAAO,IAAI,CAAC6B,YAAY,CAAC3F,QAAQ,CAAE;YAC1C,IAAI8D,GAAAA,CAAIhE,UAAU,EAAE;AAChB,gBAAA,KAAK,MAAMmE,IAAAA,IAAQH,GAAAA,CAAIhE,UAAU,CAAE;;oBAE/B,IAAI4G,UAAAA;oBACJ,IAAI;AACAA,wBAAAA,UAAAA,GAAajD,KAAK6B,KAAK,CAACrB,IAAAA,CAAKC,QAAQ,CAAC/C,SAAS,CAAA;AACnD,oBAAA,CAAA,CAAE,OAAOZ,KAAAA,EAAO;AACZ,wBAAA,IAAI,CAAC1B,MAAM,CAAC8H,IAAI,CAAC,qCAAA,EAAuC;AACpDhG,4BAAAA,MAAAA,EAAQsD,KAAKlF,EAAE;AACfwB,4BAAAA,KAAAA,EAAOA,KAAAA,YAAiBsF,KAAAA,GAAQtF,KAAAA,CAAMrB,OAAO,GAAG0H,MAAAA,CAAOrG,KAAAA;AAC3D,yBAAA,CAAA;wBACAmG,UAAAA,GAAa;4BAAEG,aAAAA,EAAe,IAAA;4BAAMC,GAAAA,EAAK7C,IAAAA,CAAKC,QAAQ,CAAC/C;AAAU,yBAAA;AACrE,oBAAA;AAEAD,oBAAAA,SAAAA,CAAUjB,IAAI,CAAC;AACXU,wBAAAA,MAAAA,EAAQsD,KAAKlF,EAAE;wBACf6B,QAAAA,EAAUqD,IAAAA,CAAKC,QAAQ,CAACC,IAAI;AAC5BzE,wBAAAA,SAAAA,EAAWoE,IAAIpE,SAAS;wBACxBmB,SAAAA,EAAW,CAAA;wBACXM,SAAAA,EAAWuF,UAAAA;wBACX3F,MAAAA,EAAQ,IAAA;wBACRC,QAAAA,EAAU,CAAA;wBACVC,OAAAA,EAAS;AACb,qBAAA,CAAA;AACJ,gBAAA;AACJ,YAAA;AACJ,QAAA;QAEA,OAAOC,SAAAA;AACX,IAAA;AAEA;;QAGA6F,YAAAA,CAAavH,KAAa,EAA6B;AACnD,QAAA,OAAO,IAAI,CAACmG,YAAY,CAAC3F,QAAQ,CAACR,KAAAA,CAAM;AAC5C,IAAA;AAEA;;AAEC,QACDwH,WAAAA,GAA+B;AAC3B,QAAA,MAAMC,SAA0B,EAAE;AAElC,QAAA,KAAK,MAAMnD,GAAAA,IAAO,IAAI,CAAC6B,YAAY,CAAC3F,QAAQ,CAAE;AAC1CiH,YAAAA,MAAAA,CAAOhH,IAAI,CAAC;AACRP,gBAAAA,SAAAA,EAAWoE,IAAIpE,SAAS;gBACxBmB,SAAAA,EAAW,CAAA;gBACXqG,IAAAA,EAAM,SAAA;AACNC,gBAAAA,WAAAA,EAAa,CAAA,EAAGrD,GAAAA,CAAIjE,IAAI,CAAC,QAAQ;AACrC,aAAA,CAAA;AACJ,QAAA;QAEA,OAAOoH,MAAAA;AACX,IAAA;AAEA;;AAEC,QACD,MAAMG,cAAAA,CAAelH,MAAiB,EAAEyB,UAAkB,EAAmB;AACzE,QAAA,IAAI,CAAC9C,MAAM,CAACC,KAAK,CAAC,qBAAA,EAAuB;AAAEoB,YAAAA,MAAAA;YAAQ+B,IAAAA,EAAMN;AAAW,SAAA,CAAA;QAEpE,OAAQzB,MAAAA;YACJ,KAAK,MAAA;AACD,gBAAA,MAAMiD,WAAAA,CAAGK,SAAS,CAAC7B,UAAAA,EAAY8B,IAAAA,CAAKC,SAAS,CAAC,IAAI,CAACiC,YAAY,EAAE,IAAA,EAAM,CAAA,CAAA,EAAI,OAAA,CAAA;AAC3E,gBAAA;YACJ,KAAK,UAAA;gBACD,MAAM,IAAI,CAAC0B,cAAc,CAAC1F,UAAAA,CAAAA;AAC1B,gBAAA;YACJ,KAAK,OAAA;AAAS,gBAAA;AACV,oBAAA,MAAM4D,KAAAA,GAAQ,IAAI,CAACvF,QAAQ,CAAC0F,GAAG,CAAC4B,CAAAA,CAAAA,GAAK7D,IAAAA,CAAKC,SAAS,CAAC4D,CAAAA,CAAAA,CAAAA,CAAIpE,IAAI,CAAC,IAAA,CAAA;AAC7D,oBAAA,MAAMC,WAAAA,CAAGK,SAAS,CAAC7B,UAAAA,EAAY4D,KAAAA,EAAO,OAAA,CAAA;AACtC,oBAAA;AACJ,gBAAA;AACJ;QAEA,OAAO5D,UAAAA;AACX,IAAA;AAEA;;QAGA,MAAc0F,cAAAA,CAAe1F,UAAkB,EAAiB;QAC5D,IAAIgC,QAAAA,GAAW,CAAC,sBAAsB,CAAC;QACvCA,QAAAA,IAAY,CAAC,QAAQ,EAAE,IAAI,CAACgC,YAAY,CAAC5G,EAAE,CAAC,EAAE,CAAC;QAE/C,MAAML,SAAAA,GAAY,OAAO,IAAI,CAACiH,YAAY,CAAClH,QAAQ,CAACC,SAAS,KAAK,QAAA,GAC5D,IAAI,CAACiH,YAAY,CAAClH,QAAQ,CAACC,SAAS,GACpC,IAAI,CAACiH,YAAY,CAAClH,QAAQ,CAACC,SAAS,CAACkB,WAAW,EAAA;AAEtD+D,QAAAA,QAAAA,IAAY,CAAC,aAAa,EAAEjF,SAAAA,CAAU,IAAI,CAAC;AAE3C,QAAA,KAAK,MAAMoF,GAAAA,IAAO,IAAI,CAAC6B,YAAY,CAAC3F,QAAQ,CAAE;AAC1C2D,YAAAA,QAAAA,IAAY,CAAC,GAAG,EAAEG,GAAAA,CAAIjE,IAAI,CAAC0H,WAAW,EAAA,CAAG,EAAE,EAAEzD,GAAAA,CAAItE,KAAK,CAAC,KAAK,CAAC;YAC7D,IAAIsE,GAAAA,CAAI3E,OAAO,EAAE;AACbwE,gBAAAA,QAAAA,IAAY,CAAA,EAAGG,GAAAA,CAAI3E,OAAO,CAAC,IAAI,CAAC;AACpC,YAAA;AACJ,QAAA;AAEA,QAAA,MAAMgE,WAAAA,CAAGK,SAAS,CAAC7B,UAAAA,EAAYgC,QAAAA,EAAU,OAAA,CAAA;AAC7C,IAAA;IAxLA,WAAA,CAAoBgC,YAAgC,EAAE9G,MAAY,CAAE;AAHpE,QAAA,gBAAA,CAAA,IAAA,EAAQ8G,gBAAR,MAAA,CAAA;AACA,QAAA,gBAAA,CAAA,IAAA,EAAQ9G,UAAR,MAAA,CAAA;QAGI,IAAI,CAAC8G,YAAY,GAAGA,YAAAA;AACpB,QAAA,IAAI,CAAC9G,MAAM,GAAGiG,UAAAA,CAAWjG,UAAUkG,cAAAA,EAAgB,sBAAA,CAAA;AACvD,IAAA;AAsLJ;;;;"}
|
|
@@ -0,0 +1,46 @@
|
|
|
1
|
+
import Anthropic from '@anthropic-ai/sdk';
|
|
2
|
+
|
|
3
|
+
class AnthropicProvider {
|
|
4
|
+
async execute(request, options = {}) {
|
|
5
|
+
const apiKey = options.apiKey || process.env.ANTHROPIC_API_KEY;
|
|
6
|
+
if (!apiKey) throw new Error('Anthropic API key is required');
|
|
7
|
+
const client = new Anthropic({
|
|
8
|
+
apiKey
|
|
9
|
+
});
|
|
10
|
+
const model = options.model || request.model || 'claude-3-opus-20240229';
|
|
11
|
+
// Anthropic separates system prompt from messages
|
|
12
|
+
let systemPrompt = '';
|
|
13
|
+
const messages = [];
|
|
14
|
+
for (const msg of request.messages){
|
|
15
|
+
if (msg.role === 'system' || msg.role === 'developer') {
|
|
16
|
+
systemPrompt += (typeof msg.content === 'string' ? msg.content : JSON.stringify(msg.content)) + '\n\n';
|
|
17
|
+
} else {
|
|
18
|
+
messages.push({
|
|
19
|
+
role: msg.role,
|
|
20
|
+
content: typeof msg.content === 'string' ? msg.content : JSON.stringify(msg.content)
|
|
21
|
+
});
|
|
22
|
+
}
|
|
23
|
+
}
|
|
24
|
+
const response = await client.messages.create({
|
|
25
|
+
model: model,
|
|
26
|
+
system: systemPrompt.trim() || undefined,
|
|
27
|
+
messages: messages,
|
|
28
|
+
max_tokens: options.maxTokens || 4096,
|
|
29
|
+
temperature: options.temperature
|
|
30
|
+
});
|
|
31
|
+
// Handle ContentBlock
|
|
32
|
+
const contentBlock = response.content[0];
|
|
33
|
+
const text = contentBlock.type === 'text' ? contentBlock.text : '';
|
|
34
|
+
return {
|
|
35
|
+
content: text,
|
|
36
|
+
model: response.model,
|
|
37
|
+
usage: {
|
|
38
|
+
inputTokens: response.usage.input_tokens,
|
|
39
|
+
outputTokens: response.usage.output_tokens
|
|
40
|
+
}
|
|
41
|
+
};
|
|
42
|
+
}
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
export { AnthropicProvider };
|
|
46
|
+
//# sourceMappingURL=anthropic.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"anthropic.js","sources":["../../src/execution/anthropic.ts"],"sourcesContent":["import Anthropic from '@anthropic-ai/sdk';\nimport { Provider, ProviderResponse, ExecutionOptions } from './provider';\nimport { Request } from '../chat';\n\nexport class AnthropicProvider implements Provider {\n async execute(request: Request, options: ExecutionOptions = {}): Promise<ProviderResponse> {\n const apiKey = options.apiKey || process.env.ANTHROPIC_API_KEY;\n if (!apiKey) throw new Error('Anthropic API key is required');\n\n const client = new Anthropic({ apiKey });\n \n const model = options.model || request.model || 'claude-3-opus-20240229';\n\n // Anthropic separates system prompt from messages\n let systemPrompt = '';\n const messages: Anthropic.MessageParam[] = [];\n\n for (const msg of request.messages) {\n if (msg.role === 'system' || msg.role === 'developer') {\n systemPrompt += (typeof msg.content === 'string' ? msg.content : JSON.stringify(msg.content)) + '\\n\\n';\n } else {\n messages.push({\n role: msg.role as 'user' | 'assistant',\n content: typeof msg.content === 'string' ? msg.content : JSON.stringify(msg.content)\n });\n }\n }\n\n const response = await client.messages.create({\n model: model,\n system: systemPrompt.trim() || undefined,\n messages: messages,\n max_tokens: options.maxTokens || 4096, // Anthropic requires max_tokens\n temperature: options.temperature,\n });\n\n // Handle ContentBlock\n const contentBlock = response.content[0];\n const text = contentBlock.type === 'text' ? contentBlock.text : '';\n\n return {\n content: text,\n model: response.model,\n usage: {\n inputTokens: response.usage.input_tokens,\n outputTokens: response.usage.output_tokens\n }\n };\n }\n}\n\n"],"names":["AnthropicProvider","execute","request","options","apiKey","process","env","ANTHROPIC_API_KEY","Error","client","Anthropic","model","systemPrompt","messages","msg","role","content","JSON","stringify","push","response","create","system","trim","undefined","max_tokens","maxTokens","temperature","contentBlock","text","type","usage","inputTokens","input_tokens","outputTokens","output_tokens"],"mappings":";;AAIO,MAAMA,iBAAAA,CAAAA;AACT,IAAA,MAAMC,QAAQC,OAAgB,EAAEC,OAAAA,GAA4B,EAAE,EAA6B;AACvF,QAAA,MAAMC,SAASD,OAAAA,CAAQC,MAAM,IAAIC,OAAAA,CAAQC,GAAG,CAACC,iBAAiB;AAC9D,QAAA,IAAI,CAACH,MAAAA,EAAQ,MAAM,IAAII,KAAAA,CAAM,+BAAA,CAAA;QAE7B,MAAMC,MAAAA,GAAS,IAAIC,SAAAA,CAAU;AAAEN,YAAAA;AAAO,SAAA,CAAA;AAEtC,QAAA,MAAMO,QAAQR,OAAAA,CAAQQ,KAAK,IAAIT,OAAAA,CAAQS,KAAK,IAAI,wBAAA;;AAGhD,QAAA,IAAIC,YAAAA,GAAe,EAAA;AACnB,QAAA,MAAMC,WAAqC,EAAE;AAE7C,QAAA,KAAK,MAAMC,GAAAA,IAAOZ,OAAAA,CAAQW,QAAQ,CAAE;AAChC,YAAA,IAAIC,IAAIC,IAAI,KAAK,YAAYD,GAAAA,CAAIC,IAAI,KAAK,WAAA,EAAa;AACnDH,gBAAAA,YAAAA,IAAgB,CAAC,OAAOE,GAAAA,CAAIE,OAAO,KAAK,QAAA,GAAWF,GAAAA,CAAIE,OAAO,GAAGC,KAAKC,SAAS,CAACJ,GAAAA,CAAIE,OAAO,CAAA,IAAK,MAAA;YACpG,CAAA,MAAO;AACHH,gBAAAA,QAAAA,CAASM,IAAI,CAAC;AACVJ,oBAAAA,IAAAA,EAAMD,IAAIC,IAAI;AACdC,oBAAAA,OAAAA,EAAS,OAAOF,GAAAA,CAAIE,OAAO,KAAK,QAAA,GAAWF,GAAAA,CAAIE,OAAO,GAAGC,IAAAA,CAAKC,SAAS,CAACJ,GAAAA,CAAIE,OAAO;AACvF,iBAAA,CAAA;AACJ,YAAA;AACJ,QAAA;AAEA,QAAA,MAAMI,WAAW,MAAMX,MAAAA,CAAOI,QAAQ,CAACQ,MAAM,CAAC;YAC1CV,KAAAA,EAAOA,KAAAA;YACPW,MAAAA,EAAQV,YAAAA,CAAaW,IAAI,EAAA,IAAMC,SAAAA;YAC/BX,QAAAA,EAAUA,QAAAA;YACVY,UAAAA,EAAYtB,OAAAA,CAAQuB,SAAS,IAAI,IAAA;AACjCC,YAAAA,WAAAA,EAAaxB,QAAQwB;AACzB,SAAA,CAAA;;AAGA,QAAA,MAAMC,YAAAA,GAAeR,QAAAA,CAASJ,OAAO,CAAC,CAAA,CAAE;AACxC,QAAA,MAAMa,OAAOD,YAAAA,CAAaE,IAAI,KAAK,MAAA,GAASF,YAAAA,CAAaC,IAAI,GAAG,EAAA;QAEhE,OAAO;YACHb,OAAAA,EAASa,IAAAA;AACTlB,YAAAA,KAAAA,EAAOS,SAAST,KAAK;YACrBoB,KAAAA,EAAO;gBACHC,WAAAA,EAAaZ,QAAAA,CAASW,KAAK,CAACE,YAAY;gBACxCC,YAAAA,EAAcd,QAAAA,CAASW,KAAK,CAACI;AACjC;AACJ,SAAA;AACJ,IAAA;AACJ;;;;"}
|
|
@@ -0,0 +1,78 @@
|
|
|
1
|
+
import { GoogleGenerativeAI } from '@google/generative-ai';
|
|
2
|
+
|
|
3
|
+
class GeminiProvider {
|
|
4
|
+
async execute(request, options = {}) {
|
|
5
|
+
const apiKey = options.apiKey || process.env.GEMINI_API_KEY; // or GOOGLE_API_KEY
|
|
6
|
+
if (!apiKey) throw new Error('Gemini API key is required');
|
|
7
|
+
const genAI = new GoogleGenerativeAI(apiKey);
|
|
8
|
+
const modelName = options.model || request.model || 'gemini-1.5-pro';
|
|
9
|
+
// Gemini format: system instruction is separate, history is separate from last message
|
|
10
|
+
// generateContent accepts a string or parts.
|
|
11
|
+
// We need to construct the prompt.
|
|
12
|
+
// Simple approach: Concat system instructions + chat history
|
|
13
|
+
let systemInstruction = '';
|
|
14
|
+
// Extract system prompt
|
|
15
|
+
for (const msg of request.messages){
|
|
16
|
+
if (msg.role === 'system' || msg.role === 'developer') {
|
|
17
|
+
systemInstruction += (typeof msg.content === 'string' ? msg.content : JSON.stringify(msg.content)) + '\n\n';
|
|
18
|
+
}
|
|
19
|
+
}
|
|
20
|
+
// Configure model with system instruction if available (newer Gemini versions support this)
|
|
21
|
+
// Or just prepend to first user message.
|
|
22
|
+
// Let's try to prepend for compatibility if needed, but 'systemInstruction' param exists in getGenerativeModel config.
|
|
23
|
+
const configuredModel = genAI.getGenerativeModel({
|
|
24
|
+
model: modelName,
|
|
25
|
+
systemInstruction: systemInstruction ? systemInstruction.trim() : undefined
|
|
26
|
+
});
|
|
27
|
+
// Build history/messages
|
|
28
|
+
// Gemini `generateContent` takes the *last* user message.
|
|
29
|
+
// `startChat` takes history.
|
|
30
|
+
const chatHistory = [];
|
|
31
|
+
let lastUserMessage = '';
|
|
32
|
+
for (const msg of request.messages){
|
|
33
|
+
if (msg.role === 'system' || msg.role === 'developer') continue;
|
|
34
|
+
const content = typeof msg.content === 'string' ? msg.content : JSON.stringify(msg.content);
|
|
35
|
+
if (msg.role === 'user') {
|
|
36
|
+
lastUserMessage = content; // Assuming strictly alternating or we just want the prompt?
|
|
37
|
+
// If there are multiple messages, we should build a chat.
|
|
38
|
+
}
|
|
39
|
+
chatHistory.push({
|
|
40
|
+
role: msg.role === 'assistant' ? 'model' : 'user',
|
|
41
|
+
parts: [
|
|
42
|
+
{
|
|
43
|
+
text: content
|
|
44
|
+
}
|
|
45
|
+
]
|
|
46
|
+
});
|
|
47
|
+
}
|
|
48
|
+
// If we are just running a prompt (single turn), we can use generateContent with the full text.
|
|
49
|
+
// But let's support multi-turn by using startChat if history > 1.
|
|
50
|
+
// If it's a typical "Prompt" execution, it's usually System + 1 User message.
|
|
51
|
+
let result;
|
|
52
|
+
if (chatHistory.length > 1) {
|
|
53
|
+
// Remove last message from history to send it
|
|
54
|
+
const lastMsg = chatHistory.pop();
|
|
55
|
+
const chat = configuredModel.startChat({
|
|
56
|
+
history: chatHistory
|
|
57
|
+
});
|
|
58
|
+
result = await chat.sendMessage((lastMsg === null || lastMsg === void 0 ? void 0 : lastMsg.parts[0].text) || '');
|
|
59
|
+
} else {
|
|
60
|
+
// Just one message (or none?)
|
|
61
|
+
result = await configuredModel.generateContent(lastUserMessage || ' ');
|
|
62
|
+
}
|
|
63
|
+
const response = await result.response;
|
|
64
|
+
const text = response.text();
|
|
65
|
+
return {
|
|
66
|
+
content: text,
|
|
67
|
+
model: modelName,
|
|
68
|
+
// Gemini usage metadata usageMetadata
|
|
69
|
+
usage: response.usageMetadata ? {
|
|
70
|
+
inputTokens: response.usageMetadata.promptTokenCount,
|
|
71
|
+
outputTokens: response.usageMetadata.candidatesTokenCount
|
|
72
|
+
} : undefined
|
|
73
|
+
};
|
|
74
|
+
}
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
export { GeminiProvider };
|
|
78
|
+
//# sourceMappingURL=gemini.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"gemini.js","sources":["../../src/execution/gemini.ts"],"sourcesContent":["import { GoogleGenerativeAI } from '@google/generative-ai';\nimport { Provider, ProviderResponse, ExecutionOptions } from './provider';\nimport { Request } from '../chat';\n\nexport class GeminiProvider implements Provider {\n async execute(request: Request, options: ExecutionOptions = {}): Promise<ProviderResponse> {\n const apiKey = options.apiKey || process.env.GEMINI_API_KEY; // or GOOGLE_API_KEY\n if (!apiKey) throw new Error('Gemini API key is required');\n\n const genAI = new GoogleGenerativeAI(apiKey);\n \n const modelName = options.model || request.model || 'gemini-1.5-pro';\n \n // Gemini format: system instruction is separate, history is separate from last message\n // generateContent accepts a string or parts.\n \n // We need to construct the prompt.\n // Simple approach: Concat system instructions + chat history\n \n let systemInstruction = '';\n \n // Extract system prompt\n for (const msg of request.messages) {\n if (msg.role === 'system' || msg.role === 'developer') {\n systemInstruction += (typeof msg.content === 'string' ? msg.content : JSON.stringify(msg.content)) + '\\n\\n';\n }\n }\n\n // Configure model with system instruction if available (newer Gemini versions support this)\n // Or just prepend to first user message.\n // Let's try to prepend for compatibility if needed, but 'systemInstruction' param exists in getGenerativeModel config.\n \n const configuredModel = genAI.getGenerativeModel({ \n model: modelName,\n systemInstruction: systemInstruction ? systemInstruction.trim() : undefined\n });\n\n // Build history/messages\n // Gemini `generateContent` takes the *last* user message.\n // `startChat` takes history.\n \n const chatHistory = [];\n let lastUserMessage = '';\n\n for (const msg of request.messages) {\n if (msg.role === 'system' || msg.role === 'developer') continue;\n \n const content = typeof msg.content === 'string' ? msg.content : JSON.stringify(msg.content);\n \n if (msg.role === 'user') {\n lastUserMessage = content; // Assuming strictly alternating or we just want the prompt?\n // If there are multiple messages, we should build a chat.\n }\n \n chatHistory.push({\n role: msg.role === 'assistant' ? 'model' : 'user',\n parts: [{ text: content }]\n });\n }\n\n // If we are just running a prompt (single turn), we can use generateContent with the full text.\n // But let's support multi-turn by using startChat if history > 1.\n \n // If it's a typical \"Prompt\" execution, it's usually System + 1 User message.\n \n let result;\n \n if (chatHistory.length > 1) {\n // Remove last message from history to send it\n const lastMsg = chatHistory.pop();\n const chat = configuredModel.startChat({\n history: chatHistory\n });\n result = await chat.sendMessage(lastMsg?.parts[0].text || '');\n } else {\n // Just one message (or none?)\n result = await configuredModel.generateContent(lastUserMessage || ' ');\n }\n\n const response = await result.response;\n const text = response.text();\n\n return {\n content: text,\n model: modelName,\n // Gemini usage metadata usageMetadata\n usage: response.usageMetadata ? {\n inputTokens: response.usageMetadata.promptTokenCount,\n outputTokens: response.usageMetadata.candidatesTokenCount\n } : undefined\n };\n }\n}\n\n"],"names":["GeminiProvider","execute","request","options","apiKey","process","env","GEMINI_API_KEY","Error","genAI","GoogleGenerativeAI","modelName","model","systemInstruction","msg","messages","role","content","JSON","stringify","configuredModel","getGenerativeModel","trim","undefined","chatHistory","lastUserMessage","push","parts","text","result","length","lastMsg","pop","chat","startChat","history","sendMessage","generateContent","response","usage","usageMetadata","inputTokens","promptTokenCount","outputTokens","candidatesTokenCount"],"mappings":";;AAIO,MAAMA,cAAAA,CAAAA;AACT,IAAA,MAAMC,QAAQC,OAAgB,EAAEC,OAAAA,GAA4B,EAAE,EAA6B;QACvF,MAAMC,MAAAA,GAASD,QAAQC,MAAM,IAAIC,QAAQC,GAAG,CAACC,cAAc,CAAA;AAC3D,QAAA,IAAI,CAACH,MAAAA,EAAQ,MAAM,IAAII,KAAAA,CAAM,4BAAA,CAAA;QAE7B,MAAMC,KAAAA,GAAQ,IAAIC,kBAAAA,CAAmBN,MAAAA,CAAAA;AAErC,QAAA,MAAMO,YAAYR,OAAAA,CAAQS,KAAK,IAAIV,OAAAA,CAAQU,KAAK,IAAI,gBAAA;;;;;AAQpD,QAAA,IAAIC,iBAAAA,GAAoB,EAAA;;AAGxB,QAAA,KAAK,MAAMC,GAAAA,IAAOZ,OAAAA,CAAQa,QAAQ,CAAE;AAChC,YAAA,IAAID,IAAIE,IAAI,KAAK,YAAYF,GAAAA,CAAIE,IAAI,KAAK,WAAA,EAAa;AACnDH,gBAAAA,iBAAAA,IAAqB,CAAC,OAAOC,GAAAA,CAAIG,OAAO,KAAK,QAAA,GAAWH,GAAAA,CAAIG,OAAO,GAAGC,KAAKC,SAAS,CAACL,GAAAA,CAAIG,OAAO,CAAA,IAAK,MAAA;AACzG,YAAA;AACJ,QAAA;;;;QAMA,MAAMG,eAAAA,GAAkBX,KAAAA,CAAMY,kBAAkB,CAAC;YAC7CT,KAAAA,EAAOD,SAAAA;YACPE,iBAAAA,EAAmBA,iBAAAA,GAAoBA,iBAAAA,CAAkBS,IAAI,EAAA,GAAKC;AACtE,SAAA,CAAA;;;;AAMA,QAAA,MAAMC,cAAc,EAAE;AACtB,QAAA,IAAIC,eAAAA,GAAkB,EAAA;AAEtB,QAAA,KAAK,MAAMX,GAAAA,IAAOZ,OAAAA,CAAQa,QAAQ,CAAE;AAChC,YAAA,IAAID,IAAIE,IAAI,KAAK,YAAYF,GAAAA,CAAIE,IAAI,KAAK,WAAA,EAAa;AAEvD,YAAA,MAAMC,OAAAA,GAAU,OAAOH,GAAAA,CAAIG,OAAO,KAAK,QAAA,GAAWH,GAAAA,CAAIG,OAAO,GAAGC,IAAAA,CAAKC,SAAS,CAACL,IAAIG,OAAO,CAAA;YAE1F,IAAIH,GAAAA,CAAIE,IAAI,KAAK,MAAA,EAAQ;AACrBS,gBAAAA,eAAAA,GAAkBR;;AAEtB,YAAA;AAEAO,YAAAA,WAAAA,CAAYE,IAAI,CAAC;AACbV,gBAAAA,IAAAA,EAAMF,GAAAA,CAAIE,IAAI,KAAK,WAAA,GAAc,OAAA,GAAU,MAAA;gBAC3CW,KAAAA,EAAO;AAAC,oBAAA;wBAAEC,IAAAA,EAAMX;AAAQ;AAAE;AAC9B,aAAA,CAAA;AACJ,QAAA;;;;QAOA,IAAIY,MAAAA;QAEJ,IAAIL,WAAAA,CAAYM,MAAM,GAAG,CAAA,EAAG;;YAExB,MAAMC,OAAAA,GAAUP,YAAYQ,GAAG,EAAA;YAC/B,MAAMC,IAAAA,GAAOb,eAAAA,CAAgBc,SAAS,CAAC;gBACnCC,OAAAA,EAASX;AACb,aAAA,CAAA;AACAK,YAAAA,MAAAA,GAAS,MAAMI,IAAAA,CAAKG,WAAW,CAACL,CAAAA,OAAAA,KAAAA,IAAAA,IAAAA,OAAAA,KAAAA,MAAAA,GAAAA,MAAAA,GAAAA,OAAAA,CAASJ,KAAK,CAAC,CAAA,CAAE,CAACC,IAAI,KAAI,EAAA,CAAA;QAC9D,CAAA,MAAO;;AAEHC,YAAAA,MAAAA,GAAS,MAAMT,eAAAA,CAAgBiB,eAAe,CAACZ,eAAAA,IAAmB,GAAA,CAAA;AACtE,QAAA;QAEA,MAAMa,QAAAA,GAAW,MAAMT,MAAAA,CAAOS,QAAQ;QACtC,MAAMV,IAAAA,GAAOU,SAASV,IAAI,EAAA;QAE1B,OAAO;YACHX,OAAAA,EAASW,IAAAA;YACThB,KAAAA,EAAOD,SAAAA;;YAEP4B,KAAAA,EAAOD,QAAAA,CAASE,aAAa,GAAG;gBAC5BC,WAAAA,EAAaH,QAAAA,CAASE,aAAa,CAACE,gBAAgB;gBACpDC,YAAAA,EAAcL,QAAAA,CAASE,aAAa,CAACI;aACzC,GAAIrB;AACR,SAAA;AACJ,IAAA;AACJ;;;;"}
|
|
@@ -0,0 +1,10 @@
|
|
|
1
|
+
import { Provider, ProviderResponse, ExecutionOptions } from './provider';
|
|
2
|
+
import { Request } from '../chat';
|
|
3
|
+
export type { Provider, ProviderResponse, ExecutionOptions };
|
|
4
|
+
export declare class ExecutionManager {
|
|
5
|
+
private providers;
|
|
6
|
+
constructor();
|
|
7
|
+
getProvider(model: string): Provider;
|
|
8
|
+
execute(request: Request, options?: ExecutionOptions): Promise<ProviderResponse>;
|
|
9
|
+
}
|
|
10
|
+
export declare const execute: (request: Request, options?: ExecutionOptions) => Promise<ProviderResponse>;
|
|
@@ -0,0 +1,53 @@
|
|
|
1
|
+
import { OpenAIProvider } from './openai.js';
|
|
2
|
+
import { AnthropicProvider } from './anthropic.js';
|
|
3
|
+
import { GeminiProvider } from './gemini.js';
|
|
4
|
+
|
|
5
|
+
function _define_property(obj, key, value) {
|
|
6
|
+
if (key in obj) {
|
|
7
|
+
Object.defineProperty(obj, key, {
|
|
8
|
+
value: value,
|
|
9
|
+
enumerable: true,
|
|
10
|
+
configurable: true,
|
|
11
|
+
writable: true
|
|
12
|
+
});
|
|
13
|
+
} else {
|
|
14
|
+
obj[key] = value;
|
|
15
|
+
}
|
|
16
|
+
return obj;
|
|
17
|
+
}
|
|
18
|
+
class ExecutionManager {
|
|
19
|
+
getProvider(model) {
|
|
20
|
+
if (!model) {
|
|
21
|
+
// Default to OpenAI if model is undefined
|
|
22
|
+
return this.providers.get('openai');
|
|
23
|
+
}
|
|
24
|
+
if (model.startsWith('gpt') || model.startsWith('o1')) {
|
|
25
|
+
return this.providers.get('openai');
|
|
26
|
+
} else if (model.startsWith('claude')) {
|
|
27
|
+
return this.providers.get('anthropic');
|
|
28
|
+
} else if (model.startsWith('gemini')) {
|
|
29
|
+
return this.providers.get('gemini');
|
|
30
|
+
}
|
|
31
|
+
// Fallback or default?
|
|
32
|
+
return this.providers.get('openai');
|
|
33
|
+
}
|
|
34
|
+
async execute(request, options = {}) {
|
|
35
|
+
const model = options.model || request.model;
|
|
36
|
+
const provider = this.getProvider(model);
|
|
37
|
+
return provider.execute(request, options);
|
|
38
|
+
}
|
|
39
|
+
constructor(){
|
|
40
|
+
_define_property(this, "providers", void 0);
|
|
41
|
+
this.providers = new Map();
|
|
42
|
+
this.providers.set('openai', new OpenAIProvider());
|
|
43
|
+
this.providers.set('anthropic', new AnthropicProvider());
|
|
44
|
+
this.providers.set('gemini', new GeminiProvider());
|
|
45
|
+
}
|
|
46
|
+
}
|
|
47
|
+
const execute = async (request, options = {})=>{
|
|
48
|
+
const manager = new ExecutionManager();
|
|
49
|
+
return manager.execute(request, options);
|
|
50
|
+
};
|
|
51
|
+
|
|
52
|
+
export { ExecutionManager, execute };
|
|
53
|
+
//# sourceMappingURL=index.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.js","sources":["../../src/execution/index.ts"],"sourcesContent":["import { Provider, ProviderResponse, ExecutionOptions } from './provider';\nimport { OpenAIProvider } from './openai';\nimport { AnthropicProvider } from './anthropic';\nimport { GeminiProvider } from './gemini';\nimport { Request } from '../chat';\n\nexport type { Provider, ProviderResponse, ExecutionOptions };\n\nexport class ExecutionManager {\n private providers: Map<string, Provider>;\n\n constructor() {\n this.providers = new Map();\n this.providers.set('openai', new OpenAIProvider());\n this.providers.set('anthropic', new AnthropicProvider());\n this.providers.set('gemini', new GeminiProvider());\n }\n\n getProvider(model: string): Provider {\n if (!model) {\n // Default to OpenAI if model is undefined\n return this.providers.get('openai')!;\n }\n if (model.startsWith('gpt') || model.startsWith('o1')) {\n return this.providers.get('openai')!;\n } else if (model.startsWith('claude')) {\n return this.providers.get('anthropic')!;\n } else if (model.startsWith('gemini')) {\n return this.providers.get('gemini')!;\n }\n \n // Fallback or default?\n return this.providers.get('openai')!;\n }\n\n async execute(request: Request, options: ExecutionOptions = {}): Promise<ProviderResponse> {\n const model = options.model || request.model;\n const provider = this.getProvider(model);\n return provider.execute(request, options);\n }\n}\n\nexport const execute = async (request: Request, options: ExecutionOptions = {}): Promise<ProviderResponse> => {\n const manager = new ExecutionManager();\n return manager.execute(request, options);\n}\n"],"names":["ExecutionManager","getProvider","model","providers","get","startsWith","execute","request","options","provider","Map","set","OpenAIProvider","AnthropicProvider","GeminiProvider","manager"],"mappings":";;;;;;;;;;;;;;;;;AAQO,MAAMA,gBAAAA,CAAAA;AAUTC,IAAAA,WAAAA,CAAYC,KAAa,EAAY;AACjC,QAAA,IAAI,CAACA,KAAAA,EAAO;;AAER,YAAA,OAAO,IAAI,CAACC,SAAS,CAACC,GAAG,CAAC,QAAA,CAAA;AAC9B,QAAA;AACA,QAAA,IAAIF,MAAMG,UAAU,CAAC,UAAUH,KAAAA,CAAMG,UAAU,CAAC,IAAA,CAAA,EAAO;AACnD,YAAA,OAAO,IAAI,CAACF,SAAS,CAACC,GAAG,CAAC,QAAA,CAAA;AAC9B,QAAA,CAAA,MAAO,IAAIF,KAAAA,CAAMG,UAAU,CAAC,QAAA,CAAA,EAAW;AACnC,YAAA,OAAO,IAAI,CAACF,SAAS,CAACC,GAAG,CAAC,WAAA,CAAA;AAC9B,QAAA,CAAA,MAAO,IAAIF,KAAAA,CAAMG,UAAU,CAAC,QAAA,CAAA,EAAW;AACnC,YAAA,OAAO,IAAI,CAACF,SAAS,CAACC,GAAG,CAAC,QAAA,CAAA;AAC9B,QAAA;;AAGA,QAAA,OAAO,IAAI,CAACD,SAAS,CAACC,GAAG,CAAC,QAAA,CAAA;AAC9B,IAAA;AAEA,IAAA,MAAME,QAAQC,OAAgB,EAAEC,OAAAA,GAA4B,EAAE,EAA6B;AACvF,QAAA,MAAMN,KAAAA,GAAQM,OAAAA,CAAQN,KAAK,IAAIK,QAAQL,KAAK;AAC5C,QAAA,MAAMO,QAAAA,GAAW,IAAI,CAACR,WAAW,CAACC,KAAAA,CAAAA;QAClC,OAAOO,QAAAA,CAASH,OAAO,CAACC,OAAAA,EAASC,OAAAA,CAAAA;AACrC,IAAA;IA5BA,WAAA,EAAc;AAFd,QAAA,gBAAA,CAAA,IAAA,EAAQL,aAAR,MAAA,CAAA;QAGI,IAAI,CAACA,SAAS,GAAG,IAAIO,GAAAA,EAAAA;AACrB,QAAA,IAAI,CAACP,SAAS,CAACQ,GAAG,CAAC,UAAU,IAAIC,cAAAA,EAAAA,CAAAA;AACjC,QAAA,IAAI,CAACT,SAAS,CAACQ,GAAG,CAAC,aAAa,IAAIE,iBAAAA,EAAAA,CAAAA;AACpC,QAAA,IAAI,CAACV,SAAS,CAACQ,GAAG,CAAC,UAAU,IAAIG,cAAAA,EAAAA,CAAAA;AACrC,IAAA;AAwBJ;MAEaR,OAAAA,GAAU,OAAOC,OAAAA,EAAkBC,OAAAA,GAA4B,EAAE,GAAA;AAC1E,IAAA,MAAMO,UAAU,IAAIf,gBAAAA,EAAAA;IACpB,OAAOe,OAAAA,CAAQT,OAAO,CAACC,OAAAA,EAASC,OAAAA,CAAAA;AACpC;;;;"}
|
|
@@ -0,0 +1,44 @@
|
|
|
1
|
+
import OpenAI from 'openai';
|
|
2
|
+
|
|
3
|
+
class OpenAIProvider {
|
|
4
|
+
async execute(request, options = {}) {
|
|
5
|
+
const apiKey = options.apiKey || process.env.OPENAI_API_KEY;
|
|
6
|
+
if (!apiKey) throw new Error('OpenAI API key is required');
|
|
7
|
+
const client = new OpenAI({
|
|
8
|
+
apiKey
|
|
9
|
+
});
|
|
10
|
+
const model = options.model || request.model || 'gpt-4';
|
|
11
|
+
// Convert RiotPrompt messages to OpenAI messages
|
|
12
|
+
const messages = request.messages.map((msg)=>{
|
|
13
|
+
const role = msg.role === 'developer' ? 'system' : msg.role; // OpenAI uses system, not developer usually (except o1)
|
|
14
|
+
// But wait, o1 uses developer. Let's respect what formatter gave us if valid.
|
|
15
|
+
// OpenAI Node SDK types expect specific roles.
|
|
16
|
+
// RiotPrompt roles: "user" | "assistant" | "system" | "developer"
|
|
17
|
+
// OpenAI roles: "system" | "user" | "assistant" | "tool" | "function" | "developer" (recent versions)
|
|
18
|
+
// We'll cast to any to avoid strict type issues with older/newer SDK versions mismatch
|
|
19
|
+
return {
|
|
20
|
+
role: role,
|
|
21
|
+
content: typeof msg.content === 'string' ? msg.content : JSON.stringify(msg.content),
|
|
22
|
+
name: msg.name
|
|
23
|
+
};
|
|
24
|
+
});
|
|
25
|
+
const response = await client.chat.completions.create({
|
|
26
|
+
model: model,
|
|
27
|
+
messages: messages,
|
|
28
|
+
temperature: options.temperature,
|
|
29
|
+
max_tokens: options.maxTokens
|
|
30
|
+
});
|
|
31
|
+
const choice = response.choices[0];
|
|
32
|
+
return {
|
|
33
|
+
content: choice.message.content || '',
|
|
34
|
+
model: response.model,
|
|
35
|
+
usage: response.usage ? {
|
|
36
|
+
inputTokens: response.usage.prompt_tokens,
|
|
37
|
+
outputTokens: response.usage.completion_tokens
|
|
38
|
+
} : undefined
|
|
39
|
+
};
|
|
40
|
+
}
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
export { OpenAIProvider };
|
|
44
|
+
//# sourceMappingURL=openai.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"openai.js","sources":["../../src/execution/openai.ts"],"sourcesContent":["import OpenAI from 'openai';\nimport { Provider, ProviderResponse, ExecutionOptions } from './provider';\nimport { Request } from '../chat';\n\nexport class OpenAIProvider implements Provider {\n async execute(request: Request, options: ExecutionOptions = {}): Promise<ProviderResponse> {\n const apiKey = options.apiKey || process.env.OPENAI_API_KEY;\n if (!apiKey) throw new Error('OpenAI API key is required');\n\n const client = new OpenAI({ apiKey });\n \n const model = options.model || request.model || 'gpt-4';\n\n // Convert RiotPrompt messages to OpenAI messages\n const messages = request.messages.map(msg => {\n const role = msg.role === 'developer' ? 'system' : msg.role; // OpenAI uses system, not developer usually (except o1)\n // But wait, o1 uses developer. Let's respect what formatter gave us if valid.\n // OpenAI Node SDK types expect specific roles.\n // RiotPrompt roles: \"user\" | \"assistant\" | \"system\" | \"developer\"\n // OpenAI roles: \"system\" | \"user\" | \"assistant\" | \"tool\" | \"function\" | \"developer\" (recent versions)\n \n // We'll cast to any to avoid strict type issues with older/newer SDK versions mismatch\n return {\n role: role,\n content: typeof msg.content === 'string' ? msg.content : JSON.stringify(msg.content),\n name: msg.name\n } as any;\n });\n\n const response = await client.chat.completions.create({\n model: model,\n messages: messages,\n temperature: options.temperature,\n max_tokens: options.maxTokens,\n });\n\n const choice = response.choices[0];\n \n return {\n content: choice.message.content || '',\n model: response.model,\n usage: response.usage ? {\n inputTokens: response.usage.prompt_tokens,\n outputTokens: response.usage.completion_tokens\n } : undefined\n };\n }\n}\n\n"],"names":["OpenAIProvider","execute","request","options","apiKey","process","env","OPENAI_API_KEY","Error","client","OpenAI","model","messages","map","msg","role","content","JSON","stringify","name","response","chat","completions","create","temperature","max_tokens","maxTokens","choice","choices","message","usage","inputTokens","prompt_tokens","outputTokens","completion_tokens","undefined"],"mappings":";;AAIO,MAAMA,cAAAA,CAAAA;AACT,IAAA,MAAMC,QAAQC,OAAgB,EAAEC,OAAAA,GAA4B,EAAE,EAA6B;AACvF,QAAA,MAAMC,SAASD,OAAAA,CAAQC,MAAM,IAAIC,OAAAA,CAAQC,GAAG,CAACC,cAAc;AAC3D,QAAA,IAAI,CAACH,MAAAA,EAAQ,MAAM,IAAII,KAAAA,CAAM,4BAAA,CAAA;QAE7B,MAAMC,MAAAA,GAAS,IAAIC,MAAAA,CAAO;AAAEN,YAAAA;AAAO,SAAA,CAAA;AAEnC,QAAA,MAAMO,QAAQR,OAAAA,CAAQQ,KAAK,IAAIT,OAAAA,CAAQS,KAAK,IAAI,OAAA;;AAGhD,QAAA,MAAMC,WAAWV,OAAAA,CAAQU,QAAQ,CAACC,GAAG,CAACC,CAAAA,GAAAA,GAAAA;YAClC,MAAMC,IAAAA,GAAOD,IAAIC,IAAI,KAAK,cAAc,QAAA,GAAWD,GAAAA,CAAIC,IAAI,CAAA;;;;;;YAO3D,OAAO;gBACHA,IAAAA,EAAMA,IAAAA;AACNC,gBAAAA,OAAAA,EAAS,OAAOF,GAAAA,CAAIE,OAAO,KAAK,QAAA,GAAWF,GAAAA,CAAIE,OAAO,GAAGC,IAAAA,CAAKC,SAAS,CAACJ,GAAAA,CAAIE,OAAO,CAAA;AACnFG,gBAAAA,IAAAA,EAAML,IAAIK;AACd,aAAA;AACJ,QAAA,CAAA,CAAA;QAEA,MAAMC,QAAAA,GAAW,MAAMX,MAAAA,CAAOY,IAAI,CAACC,WAAW,CAACC,MAAM,CAAC;YAClDZ,KAAAA,EAAOA,KAAAA;YACPC,QAAAA,EAAUA,QAAAA;AACVY,YAAAA,WAAAA,EAAarB,QAAQqB,WAAW;AAChCC,YAAAA,UAAAA,EAAYtB,QAAQuB;AACxB,SAAA,CAAA;AAEA,QAAA,MAAMC,MAAAA,GAASP,QAAAA,CAASQ,OAAO,CAAC,CAAA,CAAE;QAElC,OAAO;AACHZ,YAAAA,OAAAA,EAASW,MAAAA,CAAOE,OAAO,CAACb,OAAO,IAAI,EAAA;AACnCL,YAAAA,KAAAA,EAAOS,SAAST,KAAK;YACrBmB,KAAAA,EAAOV,QAAAA,CAASU,KAAK,GAAG;gBACpBC,WAAAA,EAAaX,QAAAA,CAASU,KAAK,CAACE,aAAa;gBACzCC,YAAAA,EAAcb,QAAAA,CAASU,KAAK,CAACI;aACjC,GAAIC;AACR,SAAA;AACJ,IAAA;AACJ;;;;"}
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
import { Request } from '../chat';
|
|
2
|
+
export interface ProviderResponse {
|
|
3
|
+
content: string;
|
|
4
|
+
model: string;
|
|
5
|
+
usage?: {
|
|
6
|
+
inputTokens: number;
|
|
7
|
+
outputTokens: number;
|
|
8
|
+
};
|
|
9
|
+
}
|
|
10
|
+
export interface ExecutionOptions {
|
|
11
|
+
apiKey?: string;
|
|
12
|
+
model?: string;
|
|
13
|
+
temperature?: number;
|
|
14
|
+
maxTokens?: number;
|
|
15
|
+
}
|
|
16
|
+
export interface Provider {
|
|
17
|
+
execute(request: Request, options?: ExecutionOptions): Promise<ProviderResponse>;
|
|
18
|
+
}
|
package/dist/loader.js
CHANGED
|
@@ -13,6 +13,9 @@ import './recipes.js';
|
|
|
13
13
|
import './conversation.js';
|
|
14
14
|
import 'tiktoken';
|
|
15
15
|
import './tools.js';
|
|
16
|
+
import 'openai';
|
|
17
|
+
import '@anthropic-ai/sdk';
|
|
18
|
+
import '@google/generative-ai';
|
|
16
19
|
import { create as create$1 } from './util/storage.js';
|
|
17
20
|
|
|
18
21
|
const OptionsSchema = z.object({
|