@librechat/agents 2.4.30 → 2.4.311

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (55) hide show
  1. package/dist/cjs/common/enum.cjs +1 -0
  2. package/dist/cjs/common/enum.cjs.map +1 -1
  3. package/dist/cjs/main.cjs +2 -0
  4. package/dist/cjs/main.cjs.map +1 -1
  5. package/dist/cjs/tools/search/firecrawl.cjs +149 -0
  6. package/dist/cjs/tools/search/firecrawl.cjs.map +1 -0
  7. package/dist/cjs/tools/search/format.cjs +116 -0
  8. package/dist/cjs/tools/search/format.cjs.map +1 -0
  9. package/dist/cjs/tools/search/highlights.cjs +194 -0
  10. package/dist/cjs/tools/search/highlights.cjs.map +1 -0
  11. package/dist/cjs/tools/search/rerankers.cjs +187 -0
  12. package/dist/cjs/tools/search/rerankers.cjs.map +1 -0
  13. package/dist/cjs/tools/search/search.cjs +410 -0
  14. package/dist/cjs/tools/search/search.cjs.map +1 -0
  15. package/dist/cjs/tools/search/tool.cjs +103 -0
  16. package/dist/cjs/tools/search/tool.cjs.map +1 -0
  17. package/dist/esm/common/enum.mjs +1 -0
  18. package/dist/esm/common/enum.mjs.map +1 -1
  19. package/dist/esm/main.mjs +1 -0
  20. package/dist/esm/main.mjs.map +1 -1
  21. package/dist/esm/tools/search/firecrawl.mjs +145 -0
  22. package/dist/esm/tools/search/firecrawl.mjs.map +1 -0
  23. package/dist/esm/tools/search/format.mjs +114 -0
  24. package/dist/esm/tools/search/format.mjs.map +1 -0
  25. package/dist/esm/tools/search/highlights.mjs +192 -0
  26. package/dist/esm/tools/search/highlights.mjs.map +1 -0
  27. package/dist/esm/tools/search/rerankers.mjs +181 -0
  28. package/dist/esm/tools/search/rerankers.mjs.map +1 -0
  29. package/dist/esm/tools/search/search.mjs +407 -0
  30. package/dist/esm/tools/search/search.mjs.map +1 -0
  31. package/dist/esm/tools/search/tool.mjs +101 -0
  32. package/dist/esm/tools/search/tool.mjs.map +1 -0
  33. package/dist/types/common/enum.d.ts +1 -0
  34. package/dist/types/index.d.ts +1 -0
  35. package/dist/types/scripts/search.d.ts +1 -0
  36. package/dist/types/tools/search/firecrawl.d.ts +117 -0
  37. package/dist/types/tools/search/format.d.ts +2 -0
  38. package/dist/types/tools/search/highlights.d.ts +13 -0
  39. package/dist/types/tools/search/index.d.ts +2 -0
  40. package/dist/types/tools/search/rerankers.d.ts +32 -0
  41. package/dist/types/tools/search/search.d.ts +9 -0
  42. package/dist/types/tools/search/tool.d.ts +12 -0
  43. package/dist/types/tools/search/types.d.ts +150 -0
  44. package/package.json +2 -1
  45. package/src/common/enum.ts +1 -0
  46. package/src/index.ts +1 -0
  47. package/src/scripts/search.ts +141 -0
  48. package/src/tools/search/firecrawl.ts +270 -0
  49. package/src/tools/search/format.ts +121 -0
  50. package/src/tools/search/highlights.ts +238 -0
  51. package/src/tools/search/index.ts +2 -0
  52. package/src/tools/search/rerankers.ts +248 -0
  53. package/src/tools/search/search.ts +567 -0
  54. package/src/tools/search/tool.ts +151 -0
  55. package/src/tools/search/types.ts +179 -0
@@ -0,0 +1,117 @@
1
+ export interface FirecrawlScrapeOptions {
2
+ formats?: string[];
3
+ includeTags?: string[];
4
+ excludeTags?: string[];
5
+ headers?: Record<string, string>;
6
+ waitFor?: number;
7
+ timeout?: number;
8
+ }
9
+ interface ScrapeMetadata {
10
+ sourceURL?: string;
11
+ url?: string;
12
+ scrapeId?: string;
13
+ statusCode?: number;
14
+ title?: string;
15
+ description?: string;
16
+ language?: string;
17
+ favicon?: string;
18
+ viewport?: string;
19
+ robots?: string;
20
+ 'theme-color'?: string;
21
+ 'og:url'?: string;
22
+ 'og:title'?: string;
23
+ 'og:description'?: string;
24
+ 'og:type'?: string;
25
+ 'og:image'?: string;
26
+ 'og:image:width'?: string;
27
+ 'og:image:height'?: string;
28
+ 'og:site_name'?: string;
29
+ ogUrl?: string;
30
+ ogTitle?: string;
31
+ ogDescription?: string;
32
+ ogImage?: string;
33
+ ogSiteName?: string;
34
+ 'article:author'?: string;
35
+ 'article:published_time'?: string;
36
+ 'article:modified_time'?: string;
37
+ 'article:section'?: string;
38
+ 'article:tag'?: string;
39
+ 'article:publisher'?: string;
40
+ publishedTime?: string;
41
+ modifiedTime?: string;
42
+ 'twitter:site'?: string;
43
+ 'twitter:creator'?: string;
44
+ 'twitter:card'?: string;
45
+ 'twitter:image'?: string;
46
+ 'twitter:dnt'?: string;
47
+ 'twitter:app:name:iphone'?: string;
48
+ 'twitter:app:id:iphone'?: string;
49
+ 'twitter:app:url:iphone'?: string;
50
+ 'twitter:app:name:ipad'?: string;
51
+ 'twitter:app:id:ipad'?: string;
52
+ 'twitter:app:url:ipad'?: string;
53
+ 'twitter:app:name:googleplay'?: string;
54
+ 'twitter:app:id:googleplay'?: string;
55
+ 'twitter:app:url:googleplay'?: string;
56
+ 'fb:app_id'?: string;
57
+ 'al:ios:url'?: string;
58
+ 'al:ios:app_name'?: string;
59
+ 'al:ios:app_store_id'?: string;
60
+ [key: string]: string | number | boolean | null | undefined;
61
+ }
62
+ export interface FirecrawlScrapeResponse {
63
+ success: boolean;
64
+ data?: {
65
+ markdown?: string;
66
+ html?: string;
67
+ rawHtml?: string;
68
+ screenshot?: string;
69
+ links?: string[];
70
+ metadata?: ScrapeMetadata;
71
+ };
72
+ error?: string;
73
+ }
74
+ export interface FirecrawlScraperConfig {
75
+ apiKey?: string;
76
+ apiUrl?: string;
77
+ formats?: string[];
78
+ timeout?: number;
79
+ }
80
+ export declare function getAttribution(link: string, metadata?: ScrapeMetadata): string | undefined;
81
+ /**
82
+ * Firecrawl scraper implementation
83
+ * Uses the Firecrawl API to scrape web pages
84
+ */
85
+ export declare class FirecrawlScraper {
86
+ private apiKey;
87
+ private apiUrl;
88
+ private defaultFormats;
89
+ private timeout;
90
+ constructor(config?: FirecrawlScraperConfig);
91
+ /**
92
+ * Scrape a single URL
93
+ * @param url URL to scrape
94
+ * @param options Scrape options
95
+ * @returns Scrape response
96
+ */
97
+ scrapeUrl(url: string, options?: FirecrawlScrapeOptions): Promise<[string, FirecrawlScrapeResponse]>;
98
+ /**
99
+ * Extract content from scrape response
100
+ * @param response Scrape response
101
+ * @returns Extracted content or empty string if not available
102
+ */
103
+ extractContent(response: FirecrawlScrapeResponse): string;
104
+ /**
105
+ * Extract metadata from scrape response
106
+ * @param response Scrape response
107
+ * @returns Metadata object
108
+ */
109
+ extractMetadata(response: FirecrawlScrapeResponse): ScrapeMetadata;
110
+ }
111
+ /**
112
+ * Create a Firecrawl scraper instance
113
+ * @param config Scraper configuration
114
+ * @returns Firecrawl scraper instance
115
+ */
116
+ export declare const createFirecrawlScraper: (config?: FirecrawlScraperConfig) => FirecrawlScraper;
117
+ export {};
@@ -0,0 +1,2 @@
1
+ import type * as t from './types';
2
+ export declare function formatResultsForLLM(results: t.SearchResultData): string;
@@ -0,0 +1,13 @@
1
+ import type * as t from './types';
2
+ /**
3
+ * Expand highlights in search results using smart boundary detection.
4
+ *
5
+ * This implementation finds natural text boundaries like paragraphs, sentences,
6
+ * and phrases to provide context while maintaining readability.
7
+ *
8
+ * @param searchResults - Search results object
9
+ * @param mainExpandBy - Primary expansion size on each side (default: 300)
10
+ * @param separatorExpandBy - Additional range to look for separators (default: 150)
11
+ * @returns Copy of search results with expanded highlights
12
+ */
13
+ export declare function expandHighlights(searchResults: t.SearchResultData, mainExpandBy?: number, separatorExpandBy?: number): t.SearchResultData;
@@ -0,0 +1,2 @@
1
+ export * from './tool';
2
+ export type * from './types';
@@ -0,0 +1,32 @@
1
+ import type * as t from './types';
2
+ export declare abstract class BaseReranker {
3
+ protected apiKey: string | undefined;
4
+ constructor();
5
+ abstract rerank(query: string, documents: string[], topK?: number): Promise<t.Highlight[]>;
6
+ protected getDefaultRanking(documents: string[], topK: number): t.Highlight[];
7
+ protected logDocumentSamples(documents: string[]): void;
8
+ }
9
+ export declare class JinaReranker extends BaseReranker {
10
+ constructor({ apiKey }: {
11
+ apiKey?: string;
12
+ });
13
+ rerank(query: string, documents: string[], topK?: number): Promise<t.Highlight[]>;
14
+ }
15
+ export declare class CohereReranker extends BaseReranker {
16
+ constructor({ apiKey }: {
17
+ apiKey?: string;
18
+ });
19
+ rerank(query: string, documents: string[], topK?: number): Promise<t.Highlight[]>;
20
+ }
21
+ export declare class InfinityReranker extends BaseReranker {
22
+ constructor();
23
+ rerank(query: string, documents: string[], topK?: number): Promise<t.Highlight[]>;
24
+ }
25
+ /**
26
+ * Creates the appropriate reranker based on type and configuration
27
+ */
28
+ export declare const createReranker: (config: {
29
+ rerankerType: t.RerankerType;
30
+ jinaApiKey?: string;
31
+ cohereApiKey?: string;
32
+ }) => BaseReranker | undefined;
@@ -0,0 +1,9 @@
1
+ import type * as t from './types';
2
+ import { FirecrawlScraper } from './firecrawl';
3
+ export declare const createSearchAPI: (config: t.SearchConfig) => {
4
+ getSources: (query: string, numResults?: number, storedLocation?: string) => Promise<t.SearchResult>;
5
+ };
6
+ export declare const createSourceProcessor: (config?: t.ProcessSourcesConfig, scraperInstance?: FirecrawlScraper) => {
7
+ processSources: (result: t.SearchResult, numElements: number, query: string, proMode?: boolean) => Promise<t.SearchResultData>;
8
+ topResults: number;
9
+ };
@@ -0,0 +1,12 @@
1
+ import { z } from 'zod';
2
+ import { DynamicStructuredTool } from '@langchain/core/tools';
3
+ import type * as t from './types';
4
+ declare const SearchToolSchema: z.ZodObject<{
5
+ query: z.ZodString;
6
+ }, "strip", z.ZodTypeAny, {
7
+ query: string;
8
+ }, {
9
+ query: string;
10
+ }>;
11
+ export declare const createSearchTool: (config?: t.SearchToolConfig) => DynamicStructuredTool<typeof SearchToolSchema>;
12
+ export {};
@@ -0,0 +1,150 @@
1
+ import type { RunnableConfig } from '@langchain/core/runnables';
2
+ import type { BaseReranker } from './rerankers';
3
+ export type SearchProvider = 'serper' | 'searxng';
4
+ export type RerankerType = 'infinity' | 'jina' | 'cohere' | 'none';
5
+ export interface OrganicResult {
6
+ position?: number;
7
+ title?: string;
8
+ link: string;
9
+ snippet?: string;
10
+ date?: string;
11
+ }
12
+ export interface TopStoryResult {
13
+ title?: string;
14
+ link: string;
15
+ source?: string;
16
+ date?: string;
17
+ imageUrl?: string;
18
+ }
19
+ export interface ImageResult {
20
+ title?: string;
21
+ imageUrl?: string;
22
+ }
23
+ export interface KnowledgeGraphResult {
24
+ title?: string;
25
+ type?: string;
26
+ description?: string;
27
+ attributes?: Record<string, string>;
28
+ imageUrl?: string;
29
+ }
30
+ export interface AnswerBoxResult {
31
+ title?: string;
32
+ answer?: string;
33
+ snippet?: string;
34
+ date?: string;
35
+ }
36
+ export interface PeopleAlsoAskResult {
37
+ question?: string;
38
+ answer?: string;
39
+ }
40
+ export interface Highlight {
41
+ score: number;
42
+ text: string;
43
+ }
44
+ export interface ValidSource {
45
+ link: string;
46
+ position?: number;
47
+ title?: string;
48
+ snippet?: string;
49
+ date?: string;
50
+ content?: string;
51
+ attribution?: string;
52
+ highlights?: Highlight[];
53
+ }
54
+ export interface SearchResultData {
55
+ organic?: ValidSource[];
56
+ topStories?: ValidSource[];
57
+ images?: ImageResult[];
58
+ knowledgeGraph?: KnowledgeGraphResult;
59
+ answerBox?: AnswerBoxResult;
60
+ peopleAlsoAsk?: PeopleAlsoAskResult[];
61
+ relatedSearches?: string[];
62
+ suggestions?: string[];
63
+ error?: string;
64
+ }
65
+ export interface SearchResult {
66
+ data?: SearchResultData;
67
+ error?: string;
68
+ success: boolean;
69
+ }
70
+ export interface Source {
71
+ link: string;
72
+ html?: string;
73
+ title?: string;
74
+ snippet?: string;
75
+ date?: string;
76
+ }
77
+ export interface SearchConfig {
78
+ searchProvider?: SearchProvider;
79
+ serperApiKey?: string;
80
+ searxngInstanceUrl?: string;
81
+ searxngApiKey?: string;
82
+ }
83
+ export interface ScrapeResult {
84
+ url: string;
85
+ error?: boolean;
86
+ content: string;
87
+ attribution?: string;
88
+ highlights?: Highlight[];
89
+ }
90
+ export interface ProcessSourcesConfig {
91
+ topResults?: number;
92
+ strategies?: string[];
93
+ filterContent?: boolean;
94
+ reranker?: BaseReranker;
95
+ }
96
+ export interface FirecrawlConfig {
97
+ firecrawlApiKey?: string;
98
+ firecrawlApiUrl?: string;
99
+ firecrawlFormats?: string[];
100
+ }
101
+ export interface ScraperContentResult {
102
+ content: string;
103
+ }
104
+ export interface ScraperExtractionResult {
105
+ no_extraction: ScraperContentResult;
106
+ }
107
+ export interface SearXNGResult {
108
+ title?: string;
109
+ url?: string;
110
+ content?: string;
111
+ publishedDate?: string;
112
+ img_src?: string;
113
+ }
114
+ export interface JinaRerankerResult {
115
+ index: number;
116
+ relevance_score: number;
117
+ document?: string | {
118
+ text: string;
119
+ };
120
+ }
121
+ export interface JinaRerankerResponse {
122
+ model: string;
123
+ usage: {
124
+ total_tokens: number;
125
+ };
126
+ results: JinaRerankerResult[];
127
+ }
128
+ export interface CohereRerankerResult {
129
+ index: number;
130
+ relevance_score: number;
131
+ }
132
+ export interface CohereRerankerResponse {
133
+ results: CohereRerankerResult[];
134
+ id: string;
135
+ meta: {
136
+ api_version: {
137
+ version: string;
138
+ is_experimental: boolean;
139
+ };
140
+ billed_units: {
141
+ search_units: number;
142
+ };
143
+ };
144
+ }
145
+ export interface SearchToolConfig extends SearchConfig, ProcessSourcesConfig, FirecrawlConfig {
146
+ jinaApiKey?: string;
147
+ cohereApiKey?: string;
148
+ rerankerType?: RerankerType;
149
+ onSearchResults?: (results: SearchResult, runnableConfig?: RunnableConfig) => void;
150
+ }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@librechat/agents",
3
- "version": "2.4.30",
3
+ "version": "2.4.311",
4
4
  "main": "./dist/cjs/main.cjs",
5
5
  "module": "./dist/esm/main.mjs",
6
6
  "types": "./dist/types/index.d.ts",
@@ -52,6 +52,7 @@
52
52
  "thinking": "node -r dotenv/config --loader ./tsconfig-paths-bootstrap.mjs --experimental-specifier-resolution=node ./src/scripts/thinking.ts --name 'Jo' --location 'New York, NY'",
53
53
  "memory": "node -r dotenv/config --loader ./tsconfig-paths-bootstrap.mjs --experimental-specifier-resolution=node ./src/scripts/memory.ts --provider 'openAI' --name 'Jo' --location 'New York, NY'",
54
54
  "tool-test": "node -r dotenv/config --loader ./tsconfig-paths-bootstrap.mjs --experimental-specifier-resolution=node ./src/scripts/tools.ts --provider 'alibaba' --name 'Jo' --location 'New York, NY'",
55
+ "search": "node -r dotenv/config --loader ./tsconfig-paths-bootstrap.mjs --experimental-specifier-resolution=node ./src/scripts/search.ts --provider 'anthropic' --name 'Jo' --location 'New York, NY'",
55
56
  "abort": "node -r dotenv/config --loader ./tsconfig-paths-bootstrap.mjs --experimental-specifier-resolution=node ./src/scripts/abort.ts --provider 'openAI' --name 'Jo' --location 'New York, NY'",
56
57
  "start:cli2": "node -r dotenv/config --loader ./tsconfig-paths-bootstrap.mjs --experimental-specifier-resolution=node ./src/scripts/cli2.ts --provider 'anthropic' --name 'Jo' --location 'New York, NY'",
57
58
  "script2": "node -r dotenv/config --loader ./tsconfig-paths-bootstrap.mjs --experimental-specifier-resolution=node ./src/proto/example_test.ts",
@@ -158,6 +158,7 @@ export enum Callback {
158
158
  export enum Constants {
159
159
  OFFICIAL_CODE_BASEURL = 'https://api.librechat.ai/v1',
160
160
  EXECUTE_CODE = 'execute_code',
161
+ WEB_SEARCH = 'web_search',
161
162
  CONTENT_AND_ARTIFACT = 'content_and_artifact',
162
163
  }
163
164
 
package/src/index.ts CHANGED
@@ -10,6 +10,7 @@ export * from './graphs';
10
10
 
11
11
  /* Tools */
12
12
  export * from './tools/CodeExecutor';
13
+ export * from './tools/search';
13
14
 
14
15
  /* Misc. */
15
16
  export * from './common';
@@ -0,0 +1,141 @@
1
+ /* eslint-disable no-console */
2
+ // src/scripts/cli.ts
3
+ import { config } from 'dotenv';
4
+ config();
5
+ import { HumanMessage, BaseMessage } from '@langchain/core/messages';
6
+ import type * as t from '@/types';
7
+ import { ChatModelStreamHandler, createContentAggregator } from '@/stream';
8
+ import { ToolEndHandler, ModelEndHandler } from '@/events';
9
+ import { createSearchTool } from '@/tools/search';
10
+
11
+ import { getArgs } from '@/scripts/args';
12
+ import { Run } from '@/run';
13
+ import { GraphEvents, Callback } from '@/common';
14
+ import { getLLMConfig } from '@/utils/llmConfig';
15
+
16
+ const conversationHistory: BaseMessage[] = [];
17
+ async function testStandardStreaming(): Promise<void> {
18
+ const { userName, location, provider, currentDate } = await getArgs();
19
+ const { contentParts, aggregateContent } = createContentAggregator();
20
+ const customHandlers = {
21
+ [GraphEvents.TOOL_END]: new ToolEndHandler(),
22
+ [GraphEvents.CHAT_MODEL_END]: new ModelEndHandler(),
23
+ [GraphEvents.CHAT_MODEL_STREAM]: new ChatModelStreamHandler(),
24
+ [GraphEvents.ON_RUN_STEP_COMPLETED]: {
25
+ handle: (
26
+ event: GraphEvents.ON_RUN_STEP_COMPLETED,
27
+ data: t.StreamEventData
28
+ ): void => {
29
+ console.log('====== ON_RUN_STEP_COMPLETED ======');
30
+ // console.dir(data, { depth: null });
31
+ aggregateContent({
32
+ event,
33
+ data: data as unknown as { result: t.ToolEndEvent },
34
+ });
35
+ },
36
+ },
37
+ [GraphEvents.ON_RUN_STEP]: {
38
+ handle: (
39
+ event: GraphEvents.ON_RUN_STEP,
40
+ data: t.StreamEventData
41
+ ): void => {
42
+ console.log('====== ON_RUN_STEP ======');
43
+ console.dir(data, { depth: null });
44
+ aggregateContent({ event, data: data as t.RunStep });
45
+ },
46
+ },
47
+ [GraphEvents.ON_RUN_STEP_DELTA]: {
48
+ handle: (
49
+ event: GraphEvents.ON_RUN_STEP_DELTA,
50
+ data: t.StreamEventData
51
+ ): void => {
52
+ console.log('====== ON_RUN_STEP_DELTA ======');
53
+ console.dir(data, { depth: null });
54
+ aggregateContent({ event, data: data as t.RunStepDeltaEvent });
55
+ },
56
+ },
57
+ [GraphEvents.ON_MESSAGE_DELTA]: {
58
+ handle: (
59
+ event: GraphEvents.ON_MESSAGE_DELTA,
60
+ data: t.StreamEventData
61
+ ): void => {
62
+ console.log('====== ON_MESSAGE_DELTA ======');
63
+ console.dir(data, { depth: null });
64
+ aggregateContent({ event, data: data as t.MessageDeltaEvent });
65
+ },
66
+ },
67
+ [GraphEvents.TOOL_START]: {
68
+ handle: (
69
+ _event: string,
70
+ data: t.StreamEventData,
71
+ metadata?: Record<string, unknown>
72
+ ): void => {
73
+ console.log('====== TOOL_START ======');
74
+ // console.dir(data, { depth: null });
75
+ },
76
+ },
77
+ };
78
+
79
+ const llmConfig = getLLMConfig(provider);
80
+
81
+ const run = await Run.create<t.IState>({
82
+ runId: 'test-run-id',
83
+ graphConfig: {
84
+ type: 'standard',
85
+ llmConfig,
86
+ tools: [createSearchTool()],
87
+ instructions:
88
+ 'You are a friendly AI assistant. Always address the user by their name.',
89
+ additional_instructions: `The user's name is ${userName} and they are located in ${location}.`,
90
+ },
91
+ returnContent: true,
92
+ customHandlers,
93
+ });
94
+
95
+ const config = {
96
+ configurable: {
97
+ provider,
98
+ thread_id: 'conversation-num-1',
99
+ },
100
+ streamMode: 'values',
101
+ version: 'v2' as const,
102
+ };
103
+
104
+ console.log('Test 1: Weather query (content parts test)');
105
+
106
+ // const userMessage = `
107
+ // Make a search for the weather in ${location} today, which is ${currentDate}.
108
+ // Before making the search, please let me know what you're about to do, then immediately start searching without hesitation.
109
+ // Make sure to always refer to me by name, which is ${userName}.
110
+ // After giving me a thorough summary, tell me a joke about the weather forecast we went over.
111
+ // `;
112
+ const userMessage = 'Are massage guns good?';
113
+
114
+ conversationHistory.push(new HumanMessage(userMessage));
115
+
116
+ const inputs = {
117
+ messages: conversationHistory,
118
+ };
119
+ const finalContentParts = await run.processStream(inputs, config);
120
+ const finalMessages = run.getRunMessages();
121
+ if (finalMessages) {
122
+ conversationHistory.push(...finalMessages);
123
+ console.dir(conversationHistory, { depth: null });
124
+ }
125
+ // console.dir(finalContentParts, { depth: null });
126
+ console.log('\n\n====================\n\n');
127
+ // console.dir(contentParts, { depth: null });
128
+ }
129
+
130
+ process.on('unhandledRejection', (reason, promise) => {
131
+ console.error('Unhandled Rejection at:', promise, 'reason:', reason);
132
+ console.log('Conversation history:');
133
+ process.exit(1);
134
+ });
135
+
136
+ testStandardStreaming().catch((err) => {
137
+ console.error(err);
138
+ console.log('Conversation history:');
139
+ console.dir(conversationHistory, { depth: null });
140
+ process.exit(1);
141
+ });