langchain 0.0.134 → 0.0.136

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (65) hide show
  1. package/dist/agents/chat_convo/outputParser.cjs +13 -10
  2. package/dist/agents/chat_convo/outputParser.js +13 -10
  3. package/dist/callbacks/base.d.ts +6 -3
  4. package/dist/callbacks/handlers/tracer.cjs +2 -2
  5. package/dist/callbacks/handlers/tracer.d.ts +2 -2
  6. package/dist/callbacks/handlers/tracer.js +2 -2
  7. package/dist/callbacks/manager.cjs +2 -2
  8. package/dist/callbacks/manager.d.ts +2 -2
  9. package/dist/callbacks/manager.js +2 -2
  10. package/dist/chains/question_answering/map_reduce_prompts.d.ts +2 -2
  11. package/dist/chains/retrieval_qa.cjs +1 -1
  12. package/dist/chains/retrieval_qa.js +1 -1
  13. package/dist/chat_models/openai.cjs +10 -5
  14. package/dist/chat_models/openai.js +10 -5
  15. package/dist/llms/writer.cjs +167 -0
  16. package/dist/llms/writer.d.ts +60 -0
  17. package/dist/llms/writer.js +163 -0
  18. package/dist/load/import_constants.cjs +2 -0
  19. package/dist/load/import_constants.js +2 -0
  20. package/dist/load/import_map.cjs +2 -1
  21. package/dist/load/import_map.d.ts +1 -0
  22. package/dist/load/import_map.js +1 -0
  23. package/dist/memory/summary_buffer.d.ts +1 -1
  24. package/dist/output_parsers/prompts.d.ts +1 -1
  25. package/dist/prompts/prompt.cjs +3 -1
  26. package/dist/prompts/prompt.d.ts +12 -1
  27. package/dist/prompts/prompt.js +3 -1
  28. package/dist/retrievers/parent_document.cjs +8 -1
  29. package/dist/retrievers/parent_document.d.ts +2 -0
  30. package/dist/retrievers/parent_document.js +8 -1
  31. package/dist/retrievers/score_threshold.cjs +45 -0
  32. package/dist/retrievers/score_threshold.d.ts +15 -0
  33. package/dist/retrievers/score_threshold.js +41 -0
  34. package/dist/sql_db.cjs +8 -1
  35. package/dist/sql_db.d.ts +1 -0
  36. package/dist/sql_db.js +8 -1
  37. package/dist/stores/message/mongodb.cjs +48 -0
  38. package/dist/stores/message/mongodb.d.ts +15 -0
  39. package/dist/stores/message/mongodb.js +44 -0
  40. package/dist/tools/index.cjs +3 -1
  41. package/dist/tools/index.d.ts +1 -0
  42. package/dist/tools/index.js +1 -0
  43. package/dist/tools/wolframalpha.cjs +40 -0
  44. package/dist/tools/wolframalpha.d.ts +12 -0
  45. package/dist/tools/wolframalpha.js +36 -0
  46. package/dist/util/sql_utils.cjs +8 -2
  47. package/dist/util/sql_utils.d.ts +2 -1
  48. package/dist/util/sql_utils.js +8 -2
  49. package/dist/vectorstores/chroma.cjs +8 -0
  50. package/dist/vectorstores/chroma.d.ts +4 -1
  51. package/dist/vectorstores/chroma.js +8 -0
  52. package/dist/vectorstores/redis.cjs +9 -1
  53. package/dist/vectorstores/redis.js +9 -1
  54. package/dist/vectorstores/vectara.cjs +1 -1
  55. package/dist/vectorstores/vectara.js +1 -1
  56. package/llms/writer.cjs +1 -0
  57. package/llms/writer.d.ts +1 -0
  58. package/llms/writer.js +1 -0
  59. package/package.json +30 -1
  60. package/retrievers/score_threshold.cjs +1 -0
  61. package/retrievers/score_threshold.d.ts +1 -0
  62. package/retrievers/score_threshold.js +1 -0
  63. package/stores/message/mongodb.cjs +1 -0
  64. package/stores/message/mongodb.d.ts +1 -0
  65. package/stores/message/mongodb.js +1 -0
@@ -36,16 +36,19 @@ class ChatConversationalAgentOutputParser extends types_js_1.AgentActionOutputPa
36
36
  */
37
37
  async parse(text) {
38
38
  let jsonOutput = text.trim();
39
- if (jsonOutput.includes("```json")) {
40
- jsonOutput = jsonOutput.split("```json")[1].trimStart();
41
- }
42
- else if (jsonOutput.includes("```")) {
43
- const firstIndex = jsonOutput.indexOf("```");
44
- jsonOutput = jsonOutput.slice(firstIndex + 3).trimStart();
45
- }
46
- const lastIndex = jsonOutput.lastIndexOf("```");
47
- if (lastIndex !== -1) {
48
- jsonOutput = jsonOutput.slice(0, lastIndex).trimEnd();
39
+ if (jsonOutput.includes("```json") || jsonOutput.includes("```")) {
40
+ const testString = jsonOutput.includes("```json") ? "```json" : "```";
41
+ const firstIndex = jsonOutput.indexOf(testString);
42
+ const actionInputIndex = jsonOutput.indexOf("action_input");
43
+ if (actionInputIndex > firstIndex) {
44
+ jsonOutput = jsonOutput
45
+ .slice(firstIndex + testString.length)
46
+ .trimStart();
47
+ const lastIndex = jsonOutput.lastIndexOf("```");
48
+ if (lastIndex !== -1) {
49
+ jsonOutput = jsonOutput.slice(0, lastIndex).trimEnd();
50
+ }
51
+ }
49
52
  }
50
53
  try {
51
54
  const response = JSON.parse(jsonOutput);
@@ -33,16 +33,19 @@ export class ChatConversationalAgentOutputParser extends AgentActionOutputParser
33
33
  */
34
34
  async parse(text) {
35
35
  let jsonOutput = text.trim();
36
- if (jsonOutput.includes("```json")) {
37
- jsonOutput = jsonOutput.split("```json")[1].trimStart();
38
- }
39
- else if (jsonOutput.includes("```")) {
40
- const firstIndex = jsonOutput.indexOf("```");
41
- jsonOutput = jsonOutput.slice(firstIndex + 3).trimStart();
42
- }
43
- const lastIndex = jsonOutput.lastIndexOf("```");
44
- if (lastIndex !== -1) {
45
- jsonOutput = jsonOutput.slice(0, lastIndex).trimEnd();
36
+ if (jsonOutput.includes("```json") || jsonOutput.includes("```")) {
37
+ const testString = jsonOutput.includes("```json") ? "```json" : "```";
38
+ const firstIndex = jsonOutput.indexOf(testString);
39
+ const actionInputIndex = jsonOutput.indexOf("action_input");
40
+ if (actionInputIndex > firstIndex) {
41
+ jsonOutput = jsonOutput
42
+ .slice(firstIndex + testString.length)
43
+ .trimStart();
44
+ const lastIndex = jsonOutput.lastIndexOf("```");
45
+ if (lastIndex !== -1) {
46
+ jsonOutput = jsonOutput.slice(0, lastIndex).trimEnd();
47
+ }
48
+ }
46
49
  }
47
50
  try {
48
51
  const response = JSON.parse(jsonOutput);
@@ -1,4 +1,4 @@
1
- import { AgentAction, AgentFinish, BaseMessage, ChainValues, LLMResult } from "../schema/index.js";
1
+ import { AgentAction, AgentFinish, BaseMessage, ChainValues, ChatGenerationChunk, GenerationChunk, LLMResult } from "../schema/index.js";
2
2
  import { Serializable, Serialized, SerializedNotImplemented } from "../load/serializable.js";
3
3
  import { SerializedFields } from "../load/map_keys.js";
4
4
  import { Document } from "../document.js";
@@ -22,6 +22,9 @@ export interface NewTokenIndices {
22
22
  prompt: number;
23
23
  completion: number;
24
24
  }
25
+ export type HandleLLMNewTokenCallbackFields = {
26
+ chunk?: GenerationChunk | ChatGenerationChunk;
27
+ };
25
28
  /**
26
29
  * Abstract class that provides a set of optional methods that can be
27
30
  * overridden in derived classes to handle various events during the
@@ -43,7 +46,7 @@ declare abstract class BaseCallbackHandlerMethodsClass {
43
46
  * idx.completion is the index of the completion that produced the token
44
47
  * (if multiple completions per prompt are requested)
45
48
  */
46
- idx: NewTokenIndices, runId: string, parentRunId?: string, tags?: string[]): Promise<void> | void;
49
+ idx: NewTokenIndices, runId: string, parentRunId?: string, tags?: string[], fields?: HandleLLMNewTokenCallbackFields): Promise<void> | void;
47
50
  /**
48
51
  * Called if an LLM/ChatModel run encounters an error
49
52
  */
@@ -182,7 +185,7 @@ export declare abstract class BaseCallbackHandler extends BaseCallbackHandlerMet
182
185
  /**
183
186
  * Called when an LLM/ChatModel in `streaming` mode produces a new token
184
187
  */
185
- handleLLMNewToken?(token: string, idx: NewTokenIndices, runId: string, parentRunId?: string | undefined, tags?: string[] | undefined): void | Promise<void>;
188
+ handleLLMNewToken?(token: string, idx: NewTokenIndices, runId: string, parentRunId?: string | undefined, tags?: string[] | undefined, fields?: HandleLLMNewTokenCallbackFields | undefined): void | Promise<void>;
186
189
  /**
187
190
  * Called if an LLM/ChatModel run encounters an error
188
191
  */
@@ -332,7 +332,7 @@ class BaseTracer extends base_js_1.BaseCallbackHandler {
332
332
  });
333
333
  await this.onText?.(run);
334
334
  }
335
- async handleLLMNewToken(token, idx, runId) {
335
+ async handleLLMNewToken(token, idx, runId, _parentRunId, _tags, fields) {
336
336
  const run = this.runMap.get(runId);
337
337
  if (!run || run?.run_type !== "llm") {
338
338
  return;
@@ -340,7 +340,7 @@ class BaseTracer extends base_js_1.BaseCallbackHandler {
340
340
  run.events.push({
341
341
  name: "new_token",
342
342
  time: Date.now(),
343
- kwargs: { token, idx },
343
+ kwargs: { token, idx, chunk: fields?.chunk },
344
344
  });
345
345
  await this.onLLMNewToken?.(run);
346
346
  }
@@ -1,7 +1,7 @@
1
1
  import { KVMap, BaseRun } from "langsmith/schemas";
2
2
  import { AgentAction, AgentFinish, BaseMessage, ChainValues, LLMResult } from "../../schema/index.js";
3
3
  import { Serialized } from "../../load/serializable.js";
4
- import { BaseCallbackHandler, BaseCallbackHandlerInput, NewTokenIndices } from "../base.js";
4
+ import { BaseCallbackHandler, BaseCallbackHandlerInput, HandleLLMNewTokenCallbackFields, NewTokenIndices } from "../base.js";
5
5
  import { Document } from "../../document.js";
6
6
  export type RunType = string;
7
7
  export interface Run extends BaseRun {
@@ -44,7 +44,7 @@ export declare abstract class BaseTracer extends BaseCallbackHandler {
44
44
  handleRetrieverEnd(documents: Document<Record<string, unknown>>[], runId: string): Promise<void>;
45
45
  handleRetrieverError(error: Error, runId: string): Promise<void>;
46
46
  handleText(text: string, runId: string): Promise<void>;
47
- handleLLMNewToken(token: string, idx: NewTokenIndices, runId: string): Promise<void>;
47
+ handleLLMNewToken(token: string, idx: NewTokenIndices, runId: string, _parentRunId?: string, _tags?: string[], fields?: HandleLLMNewTokenCallbackFields): Promise<void>;
48
48
  onLLMStart?(run: Run): void | Promise<void>;
49
49
  onLLMEnd?(run: Run): void | Promise<void>;
50
50
  onLLMError?(run: Run): void | Promise<void>;
@@ -329,7 +329,7 @@ export class BaseTracer extends BaseCallbackHandler {
329
329
  });
330
330
  await this.onText?.(run);
331
331
  }
332
- async handleLLMNewToken(token, idx, runId) {
332
+ async handleLLMNewToken(token, idx, runId, _parentRunId, _tags, fields) {
333
333
  const run = this.runMap.get(runId);
334
334
  if (!run || run?.run_type !== "llm") {
335
335
  return;
@@ -337,7 +337,7 @@ export class BaseTracer extends BaseCallbackHandler {
337
337
  run.events.push({
338
338
  name: "new_token",
339
339
  time: Date.now(),
340
- kwargs: { token, idx },
340
+ kwargs: { token, idx, chunk: fields?.chunk },
341
341
  });
342
342
  await this.onLLMNewToken?.(run);
343
343
  }
@@ -137,11 +137,11 @@ class CallbackManagerForRetrieverRun extends BaseRunManager {
137
137
  }
138
138
  exports.CallbackManagerForRetrieverRun = CallbackManagerForRetrieverRun;
139
139
  class CallbackManagerForLLMRun extends BaseRunManager {
140
- async handleLLMNewToken(token, idx = { prompt: 0, completion: 0 }) {
140
+ async handleLLMNewToken(token, idx, _runId, _parentRunId, _tags, fields) {
141
141
  await Promise.all(this.handlers.map((handler) => (0, promises_js_1.consumeCallback)(async () => {
142
142
  if (!handler.ignoreLLM) {
143
143
  try {
144
- await handler.handleLLMNewToken?.(token, idx, this.runId, this._parentRunId, this.tags);
144
+ await handler.handleLLMNewToken?.(token, idx ?? { prompt: 0, completion: 0 }, this.runId, this._parentRunId, this.tags, fields);
145
145
  }
146
146
  catch (err) {
147
147
  console.error(`Error in handler ${handler.constructor.name}, handleLLMNewToken: ${err}`);
@@ -1,5 +1,5 @@
1
1
  import { AgentAction, AgentFinish, BaseMessage, ChainValues, LLMResult } from "../schema/index.js";
2
- import { BaseCallbackHandler, CallbackHandlerMethods, NewTokenIndices } from "./base.js";
2
+ import { BaseCallbackHandler, CallbackHandlerMethods, HandleLLMNewTokenCallbackFields, NewTokenIndices } from "./base.js";
3
3
  import { LangChainTracerFields } from "./handlers/tracer_langchain.js";
4
4
  import { Serialized } from "../load/serializable.js";
5
5
  import { Document } from "../document.js";
@@ -62,7 +62,7 @@ export declare class CallbackManagerForRetrieverRun extends BaseRunManager imple
62
62
  handleRetrieverError(err: Error | unknown): Promise<void>;
63
63
  }
64
64
  export declare class CallbackManagerForLLMRun extends BaseRunManager implements BaseCallbackManagerMethods {
65
- handleLLMNewToken(token: string, idx?: NewTokenIndices): Promise<void>;
65
+ handleLLMNewToken(token: string, idx?: NewTokenIndices, _runId?: string, _parentRunId?: string, _tags?: string[], fields?: HandleLLMNewTokenCallbackFields): Promise<void>;
66
66
  handleLLMError(err: Error | unknown): Promise<void>;
67
67
  handleLLMEnd(output: LLMResult): Promise<void>;
68
68
  }
@@ -131,11 +131,11 @@ export class CallbackManagerForRetrieverRun extends BaseRunManager {
131
131
  }
132
132
  }
133
133
  export class CallbackManagerForLLMRun extends BaseRunManager {
134
- async handleLLMNewToken(token, idx = { prompt: 0, completion: 0 }) {
134
+ async handleLLMNewToken(token, idx, _runId, _parentRunId, _tags, fields) {
135
135
  await Promise.all(this.handlers.map((handler) => consumeCallback(async () => {
136
136
  if (!handler.ignoreLLM) {
137
137
  try {
138
- await handler.handleLLMNewToken?.(token, idx, this.runId, this._parentRunId, this.tags);
138
+ await handler.handleLLMNewToken?.(token, idx ?? { prompt: 0, completion: 0 }, this.runId, this._parentRunId, this.tags, fields);
139
139
  }
140
140
  catch (err) {
141
141
  console.error(`Error in handler ${handler.constructor.name}, handleLLMNewToken: ${err}`);
@@ -1,6 +1,6 @@
1
1
  import { PromptTemplate } from "../../prompts/prompt.js";
2
2
  import { ConditionalPromptSelector } from "../../prompts/selectors/conditional.js";
3
- export declare const DEFAULT_COMBINE_QA_PROMPT: PromptTemplate<any, any>;
3
+ export declare const DEFAULT_COMBINE_QA_PROMPT: PromptTemplate<import("../../prompts/prompt.js").ParamsFromFString<"Use the following portion of a long document to see if any of the text is relevant to answer the question. \nReturn any relevant text verbatim.\n{context}\nQuestion: {question}\nRelevant text, if any:">, any>;
4
4
  export declare const COMBINE_QA_PROMPT_SELECTOR: ConditionalPromptSelector;
5
- export declare const COMBINE_PROMPT: PromptTemplate<any, any>;
5
+ export declare const COMBINE_PROMPT: PromptTemplate<import("../../prompts/prompt.js").ParamsFromFString<"Given the following extracted parts of a long document and a question, create a final answer. \nIf you don't know the answer, just say that you don't know. Don't try to make up an answer.\n\nQUESTION: Which state/country's law governs the interpretation of the contract?\n=========\nContent: This Agreement is governed by English law and the parties submit to the exclusive jurisdiction of the English courts in relation to any dispute (contractual or non-contractual) concerning this Agreement save that either party may apply to any court for an injunction or other relief to protect its Intellectual Property Rights.\n\nContent: No Waiver. Failure or delay in exercising any right or remedy under this Agreement shall not constitute a waiver of such (or any other) right or remedy.\n\n11.7 Severability. The invalidity, illegality or unenforceability of any term (or part of a term) of this Agreement shall not affect the continuation in force of the remainder of the term (if any) and this Agreement.\n\n11.8 No Agency. Except as expressly stated otherwise, nothing in this Agreement shall create an agency, partnership or joint venture of any kind between the parties.\n\n11.9 No Third-Party Beneficiaries.\n\nContent: (b) if Google believes, in good faith, that the Distributor has violated or caused Google to violate any Anti-Bribery Laws (as defined in Clause 8.5) or that such a violation is reasonably likely to occur,\n=========\nFINAL ANSWER: This Agreement is governed by English law.\n\nQUESTION: What did the president say about Michael Jackson?\n=========\nContent: Madam Speaker, Madam Vice President, our First Lady and Second Gentleman. Members of Congress and the Cabinet. Justices of the Supreme Court. My fellow Americans. \n\nLast year COVID-19 kept us apart. This year we are finally together again. \n\nTonight, we meet as Democrats Republicans and Independents. But most importantly as Americans. \n\nWith a duty to one another to the American people to the Constitution. \n\nAnd with an unwavering resolve that freedom will always triumph over tyranny. \n\nSix days ago, Russia’s Vladimir Putin sought to shake the foundations of the free world thinking he could make it bend to his menacing ways. But he badly miscalculated. \n\nHe thought he could roll into Ukraine and the world would roll over. Instead he met a wall of strength he never imagined. \n\nHe met the Ukrainian people. \n\nFrom President Zelenskyy to every Ukrainian, their fearlessness, their courage, their determination, inspires the world. \n\nGroups of citizens blocking tanks with their bodies. Everyone from students to retirees teachers turned soldiers defending their homeland.\n\nContent: And we won’t stop. \n\nWe have lost so much to COVID-19. Time with one another. And worst of all, so much loss of life. \n\nLet’s use this moment to reset. Let’s stop looking at COVID-19 as a partisan dividing line and see it for what it is: A God-awful disease. \n\nLet’s stop seeing each other as enemies, and start seeing each other for who we really are: Fellow Americans. \n\nWe can’t change how divided we’ve been. But we can change how we move forward—on COVID-19 and other issues we must face together. \n\nI recently visited the New York City Police Department days after the funerals of Officer Wilbert Mora and his partner, Officer Jason Rivera. \n\nThey were responding to a 9-1-1 call when a man shot and killed them with a stolen gun. \n\nOfficer Mora was 27 years old. \n\nOfficer Rivera was 22. \n\nBoth Dominican Americans who’d grown up on the same streets they later chose to patrol as police officers. \n\nI spoke with their families and told them that we are forever in debt for their sacrifice, and we will carry on their mission to restore the trust and safety every community deserves.\n\nContent: And a proud Ukrainian people, who have known 30 years of independence, have repeatedly shown that they will not tolerate anyone who tries to take their country backwards. \n\nTo all Americans, I will be honest with you, as I’ve always promised. A Russian dictator, invading a foreign country, has costs around the world. \n\nAnd I’m taking robust action to make sure the pain of our sanctions is targeted at Russia’s economy. And I will use every tool at our disposal to protect American businesses and consumers. \n\nTonight, I can announce that the United States has worked with 30 other countries to release 60 Million barrels of oil from reserves around the world. \n\nAmerica will lead that effort, releasing 30 Million barrels from our own Strategic Petroleum Reserve. And we stand ready to do more if necessary, unified with our allies. \n\nThese steps will help blunt gas prices here at home. And I know the news about what’s happening can seem alarming. \n\nBut I want you to know that we are going to be okay.\n\nContent: More support for patients and families. \n\nTo get there, I call on Congress to fund ARPA-H, the Advanced Research Projects Agency for Health. \n\nIt’s based on DARPA—the Defense Department project that led to the Internet, GPS, and so much more. \n\nARPA-H will have a singular purpose—to drive breakthroughs in cancer, Alzheimer’s, diabetes, and more. \n\nA unity agenda for the nation. \n\nWe can do this. \n\nMy fellow Americans—tonight , we have gathered in a sacred space—the citadel of our democracy. \n\nIn this Capitol, generation after generation, Americans have debated great questions amid great strife, and have done great things. \n\nWe have fought for freedom, expanded liberty, defeated totalitarianism and terror. \n\nAnd built the strongest, freest, and most prosperous nation the world has ever known. \n\nNow is the hour. \n\nOur moment of responsibility. \n\nOur test of resolve and conscience, of history itself. \n\nIt is in this moment that our character is formed. Our purpose is found. Our future is forged. \n\nWell I know this nation.\n=========\nFINAL ANSWER: The president did not mention Michael Jackson.\n\nQUESTION: {question}\n=========\n{summaries}\n=========\nFINAL ANSWER:">, any>;
6
6
  export declare const COMBINE_PROMPT_SELECTOR: ConditionalPromptSelector;
@@ -56,7 +56,7 @@ class RetrievalQAChain extends base_js_1.BaseChain {
56
56
  }
57
57
  const question = values[this.inputKey];
58
58
  const docs = await this.retriever.getRelevantDocuments(question, runManager?.getChild("retriever"));
59
- const inputs = { question, input_documents: docs };
59
+ const inputs = { question, input_documents: docs, ...values };
60
60
  const result = await this.combineDocumentsChain.call(inputs, runManager?.getChild("combine_documents"));
61
61
  if (this.returnSourceDocuments) {
62
62
  return {
@@ -53,7 +53,7 @@ export class RetrievalQAChain extends BaseChain {
53
53
  }
54
54
  const question = values[this.inputKey];
55
55
  const docs = await this.retriever.getRelevantDocuments(question, runManager?.getChild("retriever"));
56
- const inputs = { question, input_documents: docs };
56
+ const inputs = { question, input_documents: docs, ...values };
57
57
  const result = await this.combineDocumentsChain.call(inputs, runManager?.getChild("combine_documents"));
58
58
  if (this.returnSourceDocuments) {
59
59
  return {
@@ -402,7 +402,10 @@ class ChatOpenAI extends base_js_1.BaseChatModel {
402
402
  });
403
403
  yield generationChunk;
404
404
  // eslint-disable-next-line no-void
405
- void runManager?.handleLLMNewToken(generationChunk.text ?? "");
405
+ void runManager?.handleLLMNewToken(generationChunk.text ?? "", {
406
+ prompt: 0,
407
+ completion: choice.index,
408
+ }, undefined, undefined, undefined, { chunk: generationChunk });
406
409
  }
407
410
  }
408
411
  startStream(request, options) {
@@ -548,13 +551,15 @@ class ChatOpenAI extends base_js_1.BaseChatModel {
548
551
  choice.message.function_call.arguments +=
549
552
  part.delta?.function_call?.arguments ?? "";
550
553
  }
551
- // eslint-disable-next-line no-void
554
+ const chunk = _convertDeltaToMessageChunk(part.delta, "assistant");
555
+ const generationChunk = new index_js_1.ChatGenerationChunk({
556
+ message: chunk,
557
+ text: chunk.content,
558
+ });
552
559
  void runManager?.handleLLMNewToken(part.delta?.content ?? "", {
553
560
  prompt: options.promptIndex ?? 0,
554
561
  completion: part.index,
555
- });
556
- // TODO we don't currently have a callback method for
557
- // sending the function call arguments
562
+ }, undefined, undefined, undefined, { chunk: generationChunk });
558
563
  }
559
564
  }
560
565
  // when all messages are finished, resolve
@@ -396,7 +396,10 @@ export class ChatOpenAI extends BaseChatModel {
396
396
  });
397
397
  yield generationChunk;
398
398
  // eslint-disable-next-line no-void
399
- void runManager?.handleLLMNewToken(generationChunk.text ?? "");
399
+ void runManager?.handleLLMNewToken(generationChunk.text ?? "", {
400
+ prompt: 0,
401
+ completion: choice.index,
402
+ }, undefined, undefined, undefined, { chunk: generationChunk });
400
403
  }
401
404
  }
402
405
  startStream(request, options) {
@@ -542,13 +545,15 @@ export class ChatOpenAI extends BaseChatModel {
542
545
  choice.message.function_call.arguments +=
543
546
  part.delta?.function_call?.arguments ?? "";
544
547
  }
545
- // eslint-disable-next-line no-void
548
+ const chunk = _convertDeltaToMessageChunk(part.delta, "assistant");
549
+ const generationChunk = new ChatGenerationChunk({
550
+ message: chunk,
551
+ text: chunk.content,
552
+ });
546
553
  void runManager?.handleLLMNewToken(part.delta?.content ?? "", {
547
554
  prompt: options.promptIndex ?? 0,
548
555
  completion: part.index,
549
- });
550
- // TODO we don't currently have a callback method for
551
- // sending the function call arguments
556
+ }, undefined, undefined, undefined, { chunk: generationChunk });
552
557
  }
553
558
  }
554
559
  // when all messages are finished, resolve
@@ -0,0 +1,167 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.Writer = void 0;
4
+ const writer_sdk_1 = require("@writerai/writer-sdk");
5
+ const base_js_1 = require("./base.cjs");
6
+ const env_js_1 = require("../util/env.cjs");
7
+ /**
8
+ * Class representing a Writer Large Language Model (LLM). It interacts
9
+ * with the Writer API to generate text completions.
10
+ */
11
+ class Writer extends base_js_1.LLM {
12
+ static lc_name() {
13
+ return "Writer";
14
+ }
15
+ get lc_secrets() {
16
+ return {
17
+ apiKey: "WRITER_API_KEY",
18
+ orgId: "WRITER_ORG_ID",
19
+ };
20
+ }
21
+ get lc_aliases() {
22
+ return {
23
+ apiKey: "writer_api_key",
24
+ orgId: "writer_org_id",
25
+ };
26
+ }
27
+ constructor(fields) {
28
+ super(fields ?? {});
29
+ Object.defineProperty(this, "lc_serializable", {
30
+ enumerable: true,
31
+ configurable: true,
32
+ writable: true,
33
+ value: true
34
+ });
35
+ Object.defineProperty(this, "apiKey", {
36
+ enumerable: true,
37
+ configurable: true,
38
+ writable: true,
39
+ value: void 0
40
+ });
41
+ Object.defineProperty(this, "orgId", {
42
+ enumerable: true,
43
+ configurable: true,
44
+ writable: true,
45
+ value: void 0
46
+ });
47
+ Object.defineProperty(this, "model", {
48
+ enumerable: true,
49
+ configurable: true,
50
+ writable: true,
51
+ value: "palmyra-instruct"
52
+ });
53
+ Object.defineProperty(this, "temperature", {
54
+ enumerable: true,
55
+ configurable: true,
56
+ writable: true,
57
+ value: void 0
58
+ });
59
+ Object.defineProperty(this, "minTokens", {
60
+ enumerable: true,
61
+ configurable: true,
62
+ writable: true,
63
+ value: void 0
64
+ });
65
+ Object.defineProperty(this, "maxTokens", {
66
+ enumerable: true,
67
+ configurable: true,
68
+ writable: true,
69
+ value: void 0
70
+ });
71
+ Object.defineProperty(this, "bestOf", {
72
+ enumerable: true,
73
+ configurable: true,
74
+ writable: true,
75
+ value: void 0
76
+ });
77
+ Object.defineProperty(this, "frequencyPenalty", {
78
+ enumerable: true,
79
+ configurable: true,
80
+ writable: true,
81
+ value: void 0
82
+ });
83
+ Object.defineProperty(this, "logprobs", {
84
+ enumerable: true,
85
+ configurable: true,
86
+ writable: true,
87
+ value: void 0
88
+ });
89
+ Object.defineProperty(this, "n", {
90
+ enumerable: true,
91
+ configurable: true,
92
+ writable: true,
93
+ value: void 0
94
+ });
95
+ Object.defineProperty(this, "presencePenalty", {
96
+ enumerable: true,
97
+ configurable: true,
98
+ writable: true,
99
+ value: void 0
100
+ });
101
+ Object.defineProperty(this, "topP", {
102
+ enumerable: true,
103
+ configurable: true,
104
+ writable: true,
105
+ value: void 0
106
+ });
107
+ const apiKey = fields?.apiKey ?? (0, env_js_1.getEnvironmentVariable)("WRITER_API_KEY");
108
+ const orgId = fields?.orgId ?? (0, env_js_1.getEnvironmentVariable)("WRITER_ORG_ID");
109
+ if (!apiKey) {
110
+ throw new Error("Please set the WRITER_API_KEY environment variable or pass it to the constructor as the apiKey field.");
111
+ }
112
+ if (!orgId) {
113
+ throw new Error("Please set the WRITER_ORG_ID environment variable or pass it to the constructor as the orgId field.");
114
+ }
115
+ this.apiKey = apiKey;
116
+ this.orgId = typeof orgId === "string" ? parseInt(orgId, 10) : orgId;
117
+ this.model = fields?.model ?? this.model;
118
+ this.temperature = fields?.temperature ?? this.temperature;
119
+ this.minTokens = fields?.minTokens ?? this.minTokens;
120
+ this.maxTokens = fields?.maxTokens ?? this.maxTokens;
121
+ this.bestOf = fields?.bestOf ?? this.bestOf;
122
+ this.frequencyPenalty = fields?.frequencyPenalty ?? this.frequencyPenalty;
123
+ this.logprobs = fields?.logprobs ?? this.logprobs;
124
+ this.n = fields?.n ?? this.n;
125
+ this.presencePenalty = fields?.presencePenalty ?? this.presencePenalty;
126
+ this.topP = fields?.topP ?? this.topP;
127
+ }
128
+ _llmType() {
129
+ return "writer";
130
+ }
131
+ /** @ignore */
132
+ async _call(prompt, options) {
133
+ const sdk = new writer_sdk_1.Writer({
134
+ security: {
135
+ apiKey: this.apiKey,
136
+ },
137
+ organizationId: this.orgId,
138
+ });
139
+ return this.caller.callWithOptions({ signal: options.signal }, async () => {
140
+ try {
141
+ const res = await sdk.completions.create({
142
+ completionRequest: {
143
+ prompt,
144
+ stop: options.stop,
145
+ temperature: this.temperature,
146
+ minTokens: this.minTokens,
147
+ maxTokens: this.maxTokens,
148
+ bestOf: this.bestOf,
149
+ n: this.n,
150
+ frequencyPenalty: this.frequencyPenalty,
151
+ logprobs: this.logprobs,
152
+ presencePenalty: this.presencePenalty,
153
+ topP: this.topP,
154
+ },
155
+ modelId: this.model,
156
+ });
157
+ return (res.completionResponse?.choices?.[0].text ?? "No completion found.");
158
+ }
159
+ catch (e) {
160
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
161
+ e.response = e.rawResponse;
162
+ throw e;
163
+ }
164
+ });
165
+ }
166
+ }
167
+ exports.Writer = Writer;
@@ -0,0 +1,60 @@
1
+ import { BaseLLMParams, LLM } from "./base.js";
2
+ /**
3
+ * Interface for the input parameters specific to the Writer model.
4
+ */
5
+ export interface WriterInput extends BaseLLMParams {
6
+ /** Writer API key */
7
+ apiKey?: string;
8
+ /** Writer organization ID */
9
+ orgId?: string | number;
10
+ /** Model to use */
11
+ model?: string;
12
+ /** Sampling temperature to use */
13
+ temperature?: number;
14
+ /** Minimum number of tokens to generate. */
15
+ minTokens?: number;
16
+ /** Maximum number of tokens to generate in the completion. */
17
+ maxTokens?: number;
18
+ /** Generates this many completions server-side and returns the "best"." */
19
+ bestOf?: number;
20
+ /** Penalizes repeated tokens according to frequency. */
21
+ frequencyPenalty?: number;
22
+ /** Whether to return log probabilities. */
23
+ logprobs?: number;
24
+ /** Number of completions to generate. */
25
+ n?: number;
26
+ /** Penalizes repeated tokens regardless of frequency. */
27
+ presencePenalty?: number;
28
+ /** Total probability mass of tokens to consider at each step. */
29
+ topP?: number;
30
+ }
31
+ /**
32
+ * Class representing a Writer Large Language Model (LLM). It interacts
33
+ * with the Writer API to generate text completions.
34
+ */
35
+ export declare class Writer extends LLM implements WriterInput {
36
+ static lc_name(): string;
37
+ get lc_secrets(): {
38
+ [key: string]: string;
39
+ } | undefined;
40
+ get lc_aliases(): {
41
+ [key: string]: string;
42
+ } | undefined;
43
+ lc_serializable: boolean;
44
+ apiKey: string;
45
+ orgId: number;
46
+ model: string;
47
+ temperature?: number;
48
+ minTokens?: number;
49
+ maxTokens?: number;
50
+ bestOf?: number;
51
+ frequencyPenalty?: number;
52
+ logprobs?: number;
53
+ n?: number;
54
+ presencePenalty?: number;
55
+ topP?: number;
56
+ constructor(fields?: WriterInput);
57
+ _llmType(): string;
58
+ /** @ignore */
59
+ _call(prompt: string, options: this["ParsedCallOptions"]): Promise<string>;
60
+ }