@langchain/core 0.2.2 → 0.2.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -4,10 +4,7 @@ import { test } from "@jest/globals";
4
4
  import { ChatPromptTemplate, HumanMessagePromptTemplate, PromptTemplate, SystemMessagePromptTemplate, } from "../../prompts/index.js";
5
5
  import { Document } from "../../documents/document.js";
6
6
  import { RunnableSequence, RunnableMap } from "../base.js";
7
- import { FakeLLM, FakeStreamingLLM, FakeChatModel, FakeRetriever, } from "../../utils/testing/index.js";
8
- import { SystemMessage, HumanMessage } from "../../messages/index.js";
9
- import { CommaSeparatedListOutputParser } from "../../output_parsers/list.js";
10
- import { ChatPromptValue } from "../../prompt_values.js";
7
+ import { FakeLLM, FakeChatModel, FakeRetriever, } from "../../utils/testing/index.js";
11
8
  test("Runnable streamLog method", async () => {
12
9
  const promptTemplate = PromptTemplate.fromTemplate("{input}");
13
10
  const llm = new FakeLLM({});
@@ -72,211 +69,3 @@ test("Runnable streamLog method with a more complicated sequence", async () => {
72
69
  expect(finalState.state.logs.CUSTOM_NAME).toBeDefined();
73
70
  expect(finalState.state.logs.CUSTOM_NAME.final_output.output).toEqual(JSON.stringify(retrieverOutputDocs));
74
71
  });
75
- test("Test stream log aggregation", async () => {
76
- const prompt = ChatPromptTemplate.fromMessages([
77
- ["system", "You are a nice assistant"],
78
- ["human", "{question}"],
79
- ]);
80
- const llm = new FakeStreamingLLM({
81
- responses: ["tomato, lettuce, onion"],
82
- });
83
- const parser = new CommaSeparatedListOutputParser({});
84
- const chain = prompt.pipe(llm).pipe(parser);
85
- const logStream = await chain.streamLog({
86
- question: "what is up?",
87
- });
88
- const chunks = [];
89
- for await (const chunk of logStream) {
90
- chunks.push(chunk);
91
- }
92
- expect(chunks).toMatchObject([
93
- {
94
- ops: [
95
- {
96
- op: "replace",
97
- path: "",
98
- value: {
99
- id: expect.any(String),
100
- streamed_output: [],
101
- logs: {},
102
- },
103
- },
104
- ],
105
- },
106
- {
107
- ops: [
108
- {
109
- op: "add",
110
- path: "/logs/ChatPromptTemplate",
111
- value: {
112
- id: expect.any(String),
113
- name: "ChatPromptTemplate",
114
- type: "prompt",
115
- tags: ["seq:step:1"],
116
- metadata: {},
117
- start_time: expect.any(String),
118
- streamed_output: [],
119
- streamed_output_str: [],
120
- },
121
- },
122
- ],
123
- },
124
- {
125
- ops: [
126
- {
127
- op: "add",
128
- path: "/logs/ChatPromptTemplate/final_output",
129
- value: new ChatPromptValue([
130
- new SystemMessage("You are a nice assistant"),
131
- new HumanMessage("what is up?"),
132
- ]),
133
- },
134
- {
135
- op: "add",
136
- path: "/logs/ChatPromptTemplate/end_time",
137
- value: expect.any(String),
138
- },
139
- ],
140
- },
141
- {
142
- ops: [
143
- {
144
- op: "add",
145
- path: "/logs/FakeStreamingLLM",
146
- value: {
147
- id: expect.any(String),
148
- name: "FakeStreamingLLM",
149
- type: "llm",
150
- tags: ["seq:step:2"],
151
- metadata: {},
152
- start_time: expect.any(String),
153
- streamed_output: [],
154
- streamed_output_str: [],
155
- },
156
- },
157
- ],
158
- },
159
- {
160
- ops: [
161
- {
162
- op: "add",
163
- path: "/logs/CommaSeparatedListOutputParser",
164
- value: {
165
- id: expect.any(String),
166
- name: "CommaSeparatedListOutputParser",
167
- type: "parser",
168
- tags: ["seq:step:3"],
169
- metadata: {},
170
- start_time: expect.any(String),
171
- streamed_output: [],
172
- streamed_output_str: [],
173
- },
174
- },
175
- ],
176
- },
177
- {
178
- ops: [
179
- {
180
- op: "add",
181
- path: "/logs/CommaSeparatedListOutputParser/streamed_output/-",
182
- value: ["tomato"],
183
- },
184
- ],
185
- },
186
- {
187
- ops: [
188
- {
189
- op: "add",
190
- path: "/streamed_output/-",
191
- value: ["tomato"],
192
- },
193
- ],
194
- },
195
- {
196
- ops: [
197
- {
198
- op: "add",
199
- path: "/logs/CommaSeparatedListOutputParser/streamed_output/-",
200
- value: ["lettuce"],
201
- },
202
- ],
203
- },
204
- {
205
- ops: [
206
- {
207
- op: "add",
208
- path: "/streamed_output/-",
209
- value: ["lettuce"],
210
- },
211
- ],
212
- },
213
- {
214
- ops: [
215
- {
216
- op: "add",
217
- path: "/logs/FakeStreamingLLM/final_output",
218
- value: {
219
- generations: [
220
- [
221
- {
222
- text: "tomato, lettuce, onion",
223
- generationInfo: {},
224
- },
225
- ],
226
- ],
227
- },
228
- },
229
- {
230
- op: "add",
231
- path: "/logs/FakeStreamingLLM/end_time",
232
- value: expect.any(String),
233
- },
234
- ],
235
- },
236
- {
237
- ops: [
238
- {
239
- op: "add",
240
- path: "/logs/CommaSeparatedListOutputParser/streamed_output/-",
241
- value: ["onion"],
242
- },
243
- ],
244
- },
245
- {
246
- ops: [
247
- {
248
- op: "add",
249
- path: "/streamed_output/-",
250
- value: ["onion"],
251
- },
252
- ],
253
- },
254
- {
255
- ops: [
256
- {
257
- op: "add",
258
- path: "/logs/CommaSeparatedListOutputParser/final_output",
259
- value: {
260
- output: ["tomato", "lettuce", "onion"],
261
- },
262
- },
263
- {
264
- op: "add",
265
- path: "/logs/CommaSeparatedListOutputParser/end_time",
266
- value: expect.any(String),
267
- },
268
- ],
269
- },
270
- {
271
- ops: [
272
- {
273
- op: "replace",
274
- path: "/final_output",
275
- value: {
276
- output: ["tomato", "lettuce", "onion"],
277
- },
278
- },
279
- ],
280
- },
281
- ]);
282
- });
@@ -11,28 +11,15 @@ class MockAsyncLocalStorage {
11
11
  }
12
12
  }
13
13
  exports.MockAsyncLocalStorage = MockAsyncLocalStorage;
14
+ const mockAsyncLocalStorage = new MockAsyncLocalStorage();
14
15
  class AsyncLocalStorageProvider {
15
- constructor() {
16
- Object.defineProperty(this, "asyncLocalStorage", {
17
- enumerable: true,
18
- configurable: true,
19
- writable: true,
20
- value: new MockAsyncLocalStorage()
21
- });
22
- Object.defineProperty(this, "hasBeenInitialized", {
23
- enumerable: true,
24
- configurable: true,
25
- writable: true,
26
- value: false
27
- });
28
- }
29
16
  getInstance() {
30
- return this.asyncLocalStorage;
17
+ return (globalThis.__lc_tracing_async_local_storage ??
18
+ mockAsyncLocalStorage);
31
19
  }
32
20
  initializeGlobalInstance(instance) {
33
- if (!this.hasBeenInitialized) {
34
- this.hasBeenInitialized = true;
35
- this.asyncLocalStorage = instance;
21
+ if (globalThis.__lc_tracing_async_local_storage === undefined) {
22
+ globalThis.__lc_tracing_async_local_storage = instance;
36
23
  }
37
24
  }
38
25
  }
@@ -7,8 +7,6 @@ export declare class MockAsyncLocalStorage implements AsyncLocalStorageInterface
7
7
  run<T>(_store: any, callback: () => T): T;
8
8
  }
9
9
  declare class AsyncLocalStorageProvider {
10
- private asyncLocalStorage;
11
- private hasBeenInitialized;
12
10
  getInstance(): AsyncLocalStorageInterface;
13
11
  initializeGlobalInstance(instance: AsyncLocalStorageInterface): void;
14
12
  }
@@ -7,28 +7,15 @@ export class MockAsyncLocalStorage {
7
7
  return callback();
8
8
  }
9
9
  }
10
+ const mockAsyncLocalStorage = new MockAsyncLocalStorage();
10
11
  class AsyncLocalStorageProvider {
11
- constructor() {
12
- Object.defineProperty(this, "asyncLocalStorage", {
13
- enumerable: true,
14
- configurable: true,
15
- writable: true,
16
- value: new MockAsyncLocalStorage()
17
- });
18
- Object.defineProperty(this, "hasBeenInitialized", {
19
- enumerable: true,
20
- configurable: true,
21
- writable: true,
22
- value: false
23
- });
24
- }
25
12
  getInstance() {
26
- return this.asyncLocalStorage;
13
+ return (globalThis.__lc_tracing_async_local_storage ??
14
+ mockAsyncLocalStorage);
27
15
  }
28
16
  initializeGlobalInstance(instance) {
29
- if (!this.hasBeenInitialized) {
30
- this.hasBeenInitialized = true;
31
- this.asyncLocalStorage = instance;
17
+ if (globalThis.__lc_tracing_async_local_storage === undefined) {
18
+ globalThis.__lc_tracing_async_local_storage = instance;
32
19
  }
33
20
  }
34
21
  }
@@ -1,6 +1,6 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.EventStreamCallbackHandler = void 0;
3
+ exports.EventStreamCallbackHandler = exports.isStreamEventsHandler = void 0;
4
4
  const base_js_1 = require("./base.cjs");
5
5
  const stream_js_1 = require("../utils/stream.cjs");
6
6
  const ai_js_1 = require("../messages/ai.cjs");
@@ -17,6 +17,8 @@ function assignName({ name, serialized, }) {
17
17
  }
18
18
  return "Unnamed";
19
19
  }
20
+ const isStreamEventsHandler = (handler) => handler.name === "event_stream_tracer";
21
+ exports.isStreamEventsHandler = isStreamEventsHandler;
20
22
  /**
21
23
  * Class that extends the `BaseTracer` class from the
22
24
  * `langchain.callbacks.tracers.base` module. It represents a callback
@@ -1,5 +1,5 @@
1
1
  import { BaseTracer, type Run } from "./base.js";
2
- import { BaseCallbackHandlerInput } from "../callbacks/base.js";
2
+ import { BaseCallbackHandler, BaseCallbackHandlerInput } from "../callbacks/base.js";
3
3
  import { IterableReadableStream } from "../utils/stream.js";
4
4
  /**
5
5
  * Data associated with a StreamEvent.
@@ -94,6 +94,7 @@ export interface EventStreamCallbackHandlerInput extends BaseCallbackHandlerInpu
94
94
  excludeTypes?: string[];
95
95
  excludeTags?: string[];
96
96
  }
97
+ export declare const isStreamEventsHandler: (handler: BaseCallbackHandler) => handler is EventStreamCallbackHandler;
97
98
  /**
98
99
  * Class that extends the `BaseTracer` class from the
99
100
  * `langchain.callbacks.tracers.base` module. It represents a callback
@@ -14,6 +14,7 @@ function assignName({ name, serialized, }) {
14
14
  }
15
15
  return "Unnamed";
16
16
  }
17
+ export const isStreamEventsHandler = (handler) => handler.name === "event_stream_tracer";
17
18
  /**
18
19
  * Class that extends the `BaseTracer` class from the
19
20
  * `langchain.callbacks.tracers.base` module. It represents a callback
@@ -1,6 +1,6 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.LogStreamCallbackHandler = exports.RunLog = exports.RunLogPatch = void 0;
3
+ exports.LogStreamCallbackHandler = exports.isLogStreamHandler = exports.RunLog = exports.RunLogPatch = void 0;
4
4
  const index_js_1 = require("../utils/fast-json-patch/index.cjs");
5
5
  const base_js_1 = require("./base.cjs");
6
6
  const stream_js_1 = require("../utils/stream.cjs");
@@ -59,6 +59,8 @@ class RunLog extends RunLogPatch {
59
59
  }
60
60
  }
61
61
  exports.RunLog = RunLog;
62
+ const isLogStreamHandler = (handler) => handler.name === "log_stream_tracer";
63
+ exports.isLogStreamHandler = isLogStreamHandler;
62
64
  /**
63
65
  * Extract standardized inputs from a run.
64
66
  *
@@ -1,6 +1,6 @@
1
1
  import { type Operation as JSONPatchOperation } from "../utils/fast-json-patch/index.js";
2
2
  import { BaseTracer, type Run } from "./base.js";
3
- import { BaseCallbackHandlerInput, HandleLLMNewTokenCallbackFields } from "../callbacks/base.js";
3
+ import { BaseCallbackHandler, BaseCallbackHandlerInput, HandleLLMNewTokenCallbackFields } from "../callbacks/base.js";
4
4
  import { IterableReadableStream } from "../utils/stream.js";
5
5
  import type { StreamEvent, StreamEventData } from "./event_stream.js";
6
6
  export type { StreamEvent, StreamEventData };
@@ -83,6 +83,7 @@ export interface LogStreamCallbackHandlerInput extends BaseCallbackHandlerInput
83
83
  excludeTags?: string[];
84
84
  _schemaFormat?: SchemaFormat;
85
85
  }
86
+ export declare const isLogStreamHandler: (handler: BaseCallbackHandler) => handler is LogStreamCallbackHandler;
86
87
  /**
87
88
  * Class that extends the `BaseTracer` class from the
88
89
  * `langchain.callbacks.tracers.base` module. It represents a callback
@@ -54,6 +54,7 @@ export class RunLog extends RunLogPatch {
54
54
  });
55
55
  }
56
56
  }
57
+ export const isLogStreamHandler = (handler) => handler.name === "log_stream_tracer";
57
58
  /**
58
59
  * Extract standardized inputs from a run.
59
60
  *
@@ -2,6 +2,7 @@
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
3
  exports.LangChainTracer = void 0;
4
4
  const langsmith_1 = require("langsmith");
5
+ const traceable_1 = require("langsmith/singletons/traceable");
5
6
  const env_js_1 = require("../utils/env.cjs");
6
7
  const base_js_1 = require("./base.cjs");
7
8
  class LangChainTracer extends base_js_1.BaseTracer {
@@ -38,6 +39,38 @@ class LangChainTracer extends base_js_1.BaseTracer {
38
39
  (0, env_js_1.getEnvironmentVariable)("LANGCHAIN_SESSION");
39
40
  this.exampleId = exampleId;
40
41
  this.client = client ?? new langsmith_1.Client({});
42
+ // if we're inside traceable, we can obtain the traceable tree
43
+ // and populate the run map, which is used to correctly
44
+ // infer dotted order and execution order
45
+ const traceableTree = this.getTraceableRunTree();
46
+ if (traceableTree) {
47
+ let rootRun = traceableTree;
48
+ const visited = new Set();
49
+ while (rootRun.parent_run) {
50
+ if (visited.has(rootRun.id))
51
+ break;
52
+ visited.add(rootRun.id);
53
+ if (!rootRun.parent_run)
54
+ break;
55
+ rootRun = rootRun.parent_run;
56
+ }
57
+ visited.clear();
58
+ const queue = [rootRun];
59
+ while (queue.length > 0) {
60
+ const current = queue.shift();
61
+ if (!current || visited.has(current.id))
62
+ continue;
63
+ visited.add(current.id);
64
+ // @ts-expect-error Types of property 'events' are incompatible.
65
+ this.runMap.set(current.id, current);
66
+ if (current.child_runs) {
67
+ queue.push(...current.child_runs);
68
+ }
69
+ }
70
+ this.client = traceableTree.client ?? this.client;
71
+ this.projectName = traceableTree.project_name ?? this.projectName;
72
+ this.exampleId = traceableTree.reference_example_id ?? this.exampleId;
73
+ }
41
74
  }
42
75
  async _convertToCreate(run, example_id = undefined) {
43
76
  return {
@@ -72,5 +105,13 @@ class LangChainTracer extends base_js_1.BaseTracer {
72
105
  getRun(id) {
73
106
  return this.runMap.get(id);
74
107
  }
108
+ getTraceableRunTree() {
109
+ try {
110
+ return (0, traceable_1.getCurrentRunTree)();
111
+ }
112
+ catch {
113
+ return undefined;
114
+ }
115
+ }
75
116
  }
76
117
  exports.LangChainTracer = LangChainTracer;
@@ -1,4 +1,5 @@
1
1
  import { Client } from "langsmith";
2
+ import { RunTree } from "langsmith/run_trees";
2
3
  import { BaseRun, RunCreate, RunUpdate as BaseRunUpdate, KVMap } from "langsmith/schemas";
3
4
  import { BaseTracer } from "./base.js";
4
5
  import { BaseCallbackHandlerInput } from "../callbacks/base.js";
@@ -35,4 +36,5 @@ export declare class LangChainTracer extends BaseTracer implements LangChainTrac
35
36
  onRunCreate(run: Run): Promise<void>;
36
37
  onRunUpdate(run: Run): Promise<void>;
37
38
  getRun(id: string): Run | undefined;
39
+ getTraceableRunTree(): RunTree | undefined;
38
40
  }
@@ -1,4 +1,5 @@
1
1
  import { Client } from "langsmith";
2
+ import { getCurrentRunTree } from "langsmith/singletons/traceable";
2
3
  import { getEnvironmentVariable, getRuntimeEnvironment } from "../utils/env.js";
3
4
  import { BaseTracer } from "./base.js";
4
5
  export class LangChainTracer extends BaseTracer {
@@ -35,6 +36,38 @@ export class LangChainTracer extends BaseTracer {
35
36
  getEnvironmentVariable("LANGCHAIN_SESSION");
36
37
  this.exampleId = exampleId;
37
38
  this.client = client ?? new Client({});
39
+ // if we're inside traceable, we can obtain the traceable tree
40
+ // and populate the run map, which is used to correctly
41
+ // infer dotted order and execution order
42
+ const traceableTree = this.getTraceableRunTree();
43
+ if (traceableTree) {
44
+ let rootRun = traceableTree;
45
+ const visited = new Set();
46
+ while (rootRun.parent_run) {
47
+ if (visited.has(rootRun.id))
48
+ break;
49
+ visited.add(rootRun.id);
50
+ if (!rootRun.parent_run)
51
+ break;
52
+ rootRun = rootRun.parent_run;
53
+ }
54
+ visited.clear();
55
+ const queue = [rootRun];
56
+ while (queue.length > 0) {
57
+ const current = queue.shift();
58
+ if (!current || visited.has(current.id))
59
+ continue;
60
+ visited.add(current.id);
61
+ // @ts-expect-error Types of property 'events' are incompatible.
62
+ this.runMap.set(current.id, current);
63
+ if (current.child_runs) {
64
+ queue.push(...current.child_runs);
65
+ }
66
+ }
67
+ this.client = traceableTree.client ?? this.client;
68
+ this.projectName = traceableTree.project_name ?? this.projectName;
69
+ this.exampleId = traceableTree.reference_example_id ?? this.exampleId;
70
+ }
38
71
  }
39
72
  async _convertToCreate(run, example_id = undefined) {
40
73
  return {
@@ -69,4 +102,12 @@ export class LangChainTracer extends BaseTracer {
69
102
  getRun(id) {
70
103
  return this.runMap.get(id);
71
104
  }
105
+ getTraceableRunTree() {
106
+ try {
107
+ return getCurrentRunTree();
108
+ }
109
+ catch {
110
+ return undefined;
111
+ }
112
+ }
72
113
  }
@@ -130,7 +130,7 @@ class FakeStreamingLLM extends llms_js_1.LLM {
130
130
  this.responses = this.responses?.slice(1);
131
131
  return response ?? prompt;
132
132
  }
133
- async *_streamResponseChunks(input) {
133
+ async *_streamResponseChunks(input, _options, runManager) {
134
134
  if (this.thrownErrorString) {
135
135
  throw new Error(this.thrownErrorString);
136
136
  }
@@ -139,6 +139,7 @@ class FakeStreamingLLM extends llms_js_1.LLM {
139
139
  for (const c of response ?? input) {
140
140
  await new Promise((resolve) => setTimeout(resolve, this.sleep));
141
141
  yield { text: c, generationInfo: {} };
142
+ await runManager?.handleLLMNewToken(c);
142
143
  }
143
144
  }
144
145
  }
@@ -51,7 +51,7 @@ export declare class FakeStreamingLLM extends LLM {
51
51
  } & BaseLLMParams);
52
52
  _llmType(): string;
53
53
  _call(prompt: string): Promise<string>;
54
- _streamResponseChunks(input: string): AsyncGenerator<GenerationChunk, void, unknown>;
54
+ _streamResponseChunks(input: string, _options?: this["ParsedCallOptions"], runManager?: CallbackManagerForLLMRun): AsyncGenerator<GenerationChunk, void, unknown>;
55
55
  }
56
56
  export declare class FakeChatModel extends BaseChatModel {
57
57
  _combineLLMOutput(): never[];
@@ -124,7 +124,7 @@ export class FakeStreamingLLM extends LLM {
124
124
  this.responses = this.responses?.slice(1);
125
125
  return response ?? prompt;
126
126
  }
127
- async *_streamResponseChunks(input) {
127
+ async *_streamResponseChunks(input, _options, runManager) {
128
128
  if (this.thrownErrorString) {
129
129
  throw new Error(this.thrownErrorString);
130
130
  }
@@ -133,6 +133,7 @@ export class FakeStreamingLLM extends LLM {
133
133
  for (const c of response ?? input) {
134
134
  await new Promise((resolve) => setTimeout(resolve, this.sleep));
135
135
  yield { text: c, generationInfo: {} };
136
+ await runManager?.handleLLMNewToken(c);
136
137
  }
137
138
  }
138
139
  }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@langchain/core",
3
- "version": "0.2.2",
3
+ "version": "0.2.4",
4
4
  "description": "Core LangChain.js abstractions and schemas",
5
5
  "type": "module",
6
6
  "engines": {
@@ -45,7 +45,7 @@
45
45
  "camelcase": "6",
46
46
  "decamelize": "1.2.0",
47
47
  "js-tiktoken": "^1.0.12",
48
- "langsmith": "~0.1.7",
48
+ "langsmith": "~0.1.30",
49
49
  "ml-distance": "^4.0.0",
50
50
  "mustache": "^4.2.0",
51
51
  "p-queue": "^6.6.2",