langchain 0.0.21 → 0.0.23
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/chat_models.d.ts +1 -0
- package/chat_models.js +1 -0
- package/dist/cache.d.ts +1 -1
- package/dist/chains/chat_vector_db_chain.d.ts +4 -2
- package/dist/chains/chat_vector_db_chain.js +15 -1
- package/dist/chains/chat_vector_db_chain.js.map +1 -1
- package/dist/chains/llm_chain.d.ts +5 -4
- package/dist/chains/llm_chain.js +4 -5
- package/dist/chains/llm_chain.js.map +1 -1
- package/dist/chains/tests/chat_vector_db_qa_chain.int.test.js +18 -0
- package/dist/chains/tests/chat_vector_db_qa_chain.int.test.js.map +1 -1
- package/dist/chains/tests/combine_docs_chain.test.js.map +1 -1
- package/dist/chains/tests/llm_chain.int.test.js +14 -1
- package/dist/chains/tests/llm_chain.int.test.js.map +1 -1
- package/dist/chains/vector_db_qa.d.ts +2 -0
- package/dist/chains/vector_db_qa.js +14 -0
- package/dist/chains/vector_db_qa.js.map +1 -1
- package/dist/chat_models/base.d.ts +32 -0
- package/dist/chat_models/base.js +110 -0
- package/dist/chat_models/base.js.map +1 -0
- package/dist/chat_models/index.d.ts +1 -0
- package/dist/chat_models/index.js +2 -0
- package/dist/chat_models/index.js.map +1 -0
- package/dist/chat_models/openai.d.ts +150 -0
- package/dist/chat_models/openai.js +295 -0
- package/dist/chat_models/openai.js.map +1 -0
- package/dist/chat_models/tests/chatopenai.int.test.d.ts +1 -0
- package/dist/chat_models/tests/chatopenai.int.test.js +122 -0
- package/dist/chat_models/tests/chatopenai.int.test.js.map +1 -0
- package/dist/llms/base.d.ts +6 -2
- package/dist/llms/base.js +28 -5
- package/dist/llms/base.js.map +1 -1
- package/dist/llms/cohere.d.ts +2 -1
- package/dist/llms/cohere.js.map +1 -1
- package/dist/llms/hf.d.ts +20 -3
- package/dist/llms/hf.js +48 -4
- package/dist/llms/hf.js.map +1 -1
- package/dist/llms/index.d.ts +0 -35
- package/dist/llms/openai-chat.d.ts +3 -3
- package/dist/llms/openai.d.ts +3 -3
- package/dist/llms/openai.js.map +1 -1
- package/dist/llms/tests/huggingface_hub.int.test.js +1 -1
- package/dist/llms/tests/huggingface_hub.int.test.js.map +1 -1
- package/dist/llms/tests/openai.int.test.js +15 -0
- package/dist/llms/tests/openai.int.test.js.map +1 -1
- package/dist/prompts/base.d.ts +20 -3
- package/dist/prompts/base.js +24 -0
- package/dist/prompts/base.js.map +1 -1
- package/dist/prompts/chat.d.ts +74 -0
- package/dist/prompts/chat.js +179 -0
- package/dist/prompts/chat.js.map +1 -0
- package/dist/prompts/few_shot.d.ts +2 -2
- package/dist/prompts/few_shot.js +3 -3
- package/dist/prompts/few_shot.js.map +1 -1
- package/dist/prompts/index.d.ts +2 -1
- package/dist/prompts/index.js +2 -1
- package/dist/prompts/index.js.map +1 -1
- package/dist/prompts/prompt.d.ts +3 -3
- package/dist/prompts/prompt.js +4 -4
- package/dist/prompts/prompt.js.map +1 -1
- package/dist/prompts/template.d.ts +3 -0
- package/dist/prompts/template.js +5 -0
- package/dist/prompts/template.js.map +1 -1
- package/dist/prompts/tests/chat.test.d.ts +1 -0
- package/dist/prompts/tests/chat.test.js +101 -0
- package/dist/prompts/tests/chat.test.js.map +1 -0
- package/dist/prompts/tests/few_shot.test.js +19 -0
- package/dist/prompts/tests/few_shot.test.js.map +1 -1
- package/dist/schema/index.d.ts +79 -0
- package/dist/schema/index.js +53 -0
- package/dist/schema/index.js.map +1 -0
- package/dist/vectorstores/chroma.d.ts +3 -0
- package/dist/vectorstores/chroma.js +10 -14
- package/dist/vectorstores/chroma.js.map +1 -1
- package/dist/vectorstores/tests/chroma.test.d.ts +1 -0
- package/dist/vectorstores/tests/chroma.test.js +9 -0
- package/dist/vectorstores/tests/chroma.test.js.map +1 -0
- package/package.json +22 -8
- package/schema.d.ts +1 -0
- package/schema.js +1 -0
package/chat_models.d.ts
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export * from './dist/chat_models/index.js'
|
package/chat_models.js
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export * from './dist/chat_models/index.js'
|
package/dist/cache.d.ts
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
|
-
import { BaseChain, ChainValues, SerializedBaseChain, SerializedLLMChain
|
|
2
|
-
import { VectorStore } from "../vectorstores/base.js";
|
|
1
|
+
import { BaseChain, ChainValues, LLMChain, SerializedBaseChain, SerializedLLMChain } from "./index.js";
|
|
3
2
|
import { BaseLLM } from "../llms/index.js";
|
|
3
|
+
import { VectorStore } from "../vectorstores/base.js";
|
|
4
4
|
export type LoadValues = Record<string, any>;
|
|
5
5
|
export interface ChatVectorDBQAChainInput {
|
|
6
6
|
vectorstore: VectorStore;
|
|
@@ -26,6 +26,7 @@ export declare class ChatVectorDBQAChain extends BaseChain implements ChatVector
|
|
|
26
26
|
vectorstore: VectorStore;
|
|
27
27
|
combineDocumentsChain: BaseChain;
|
|
28
28
|
questionGeneratorChain: LLMChain;
|
|
29
|
+
returnSourceDocuments: boolean;
|
|
29
30
|
constructor(fields: {
|
|
30
31
|
vectorstore: VectorStore;
|
|
31
32
|
combineDocumentsChain: BaseChain;
|
|
@@ -33,6 +34,7 @@ export declare class ChatVectorDBQAChain extends BaseChain implements ChatVector
|
|
|
33
34
|
inputKey?: string;
|
|
34
35
|
outputKey?: string;
|
|
35
36
|
k?: number;
|
|
37
|
+
returnSourceDocuments?: boolean;
|
|
36
38
|
});
|
|
37
39
|
_call(values: ChainValues): Promise<ChainValues>;
|
|
38
40
|
_chainType(): "chat-vector-db";
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import { BaseChain,
|
|
1
|
+
import { BaseChain, LLMChain, loadQAChain, } from "./index.js";
|
|
2
2
|
import { PromptTemplate } from "../prompts/index.js";
|
|
3
3
|
import { resolveConfigFromFile } from "../util/index.js";
|
|
4
4
|
const question_generator_template = `Given the following conversation and a follow up question, rephrase the follow up question to be a standalone question.
|
|
@@ -63,12 +63,20 @@ export class ChatVectorDBQAChain extends BaseChain {
|
|
|
63
63
|
writable: true,
|
|
64
64
|
value: void 0
|
|
65
65
|
});
|
|
66
|
+
Object.defineProperty(this, "returnSourceDocuments", {
|
|
67
|
+
enumerable: true,
|
|
68
|
+
configurable: true,
|
|
69
|
+
writable: true,
|
|
70
|
+
value: false
|
|
71
|
+
});
|
|
66
72
|
this.vectorstore = fields.vectorstore;
|
|
67
73
|
this.combineDocumentsChain = fields.combineDocumentsChain;
|
|
68
74
|
this.questionGeneratorChain = fields.questionGeneratorChain;
|
|
69
75
|
this.inputKey = fields.inputKey ?? this.inputKey;
|
|
70
76
|
this.outputKey = fields.outputKey ?? this.outputKey;
|
|
71
77
|
this.k = fields.k ?? this.k;
|
|
78
|
+
this.returnSourceDocuments =
|
|
79
|
+
fields.returnSourceDocuments ?? this.returnSourceDocuments;
|
|
72
80
|
}
|
|
73
81
|
async _call(values) {
|
|
74
82
|
if (!(this.inputKey in values)) {
|
|
@@ -100,6 +108,12 @@ export class ChatVectorDBQAChain extends BaseChain {
|
|
|
100
108
|
chat_history: chatHistory,
|
|
101
109
|
};
|
|
102
110
|
const result = await this.combineDocumentsChain.call(inputs);
|
|
111
|
+
if (this.returnSourceDocuments) {
|
|
112
|
+
return {
|
|
113
|
+
...result,
|
|
114
|
+
sourceDocuments: docs,
|
|
115
|
+
};
|
|
116
|
+
}
|
|
103
117
|
return result;
|
|
104
118
|
}
|
|
105
119
|
_chainType() {
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"chat_vector_db_chain.js","sourceRoot":"","sources":["../../src/chains/chat_vector_db_chain.ts"],"names":[],"mappings":"AAAA,OAAO,EACL,SAAS,
|
|
1
|
+
{"version":3,"file":"chat_vector_db_chain.js","sourceRoot":"","sources":["../../src/chains/chat_vector_db_chain.ts"],"names":[],"mappings":"AAAA,OAAO,EACL,SAAS,EAET,QAAQ,EACR,WAAW,GAGZ,MAAM,YAAY,CAAC;AAEpB,OAAO,EAAE,cAAc,EAAE,MAAM,qBAAqB,CAAC;AAKrD,OAAO,EAAE,qBAAqB,EAAE,MAAM,kBAAkB,CAAC;AAIzD,MAAM,2BAA2B,GAAG;;;;;qBAKf,CAAC;AACtB,MAAM,yBAAyB,GAAG,cAAc,CAAC,YAAY,CAC3D,2BAA2B,CAC5B,CAAC;AAEF,MAAM,WAAW,GAAG;;;;;gBAKJ,CAAC;AACjB,MAAM,SAAS,GAAG,cAAc,CAAC,YAAY,CAAC,WAAW,CAAC,CAAC;AAmB3D,MAAM,OAAO,mBACX,SAAQ,SAAS;IASjB,IAAI,SAAS;QACX,OAAO,CAAC,IAAI,CAAC,QAAQ,EAAE,IAAI,CAAC,cAAc,CAAC,CAAC;IAC9C,CAAC;IAYD,YAAY,MAQX;QACC,KAAK,EAAE,CAAC;QA7BV;;;;mBAAI,CAAC;WAAC;QAEN;;;;mBAAW,UAAU;WAAC;QAEtB;;;;mBAAiB,cAAc;WAAC;QAMhC;;;;mBAAY,QAAQ;WAAC;QAErB;;;;;WAAyB;QAEzB;;;;;WAAiC;QAEjC;;;;;WAAiC;QAEjC;;;;mBAAwB,KAAK;WAAC;QAY5B,IAAI,CAAC,WAAW,GAAG,MAAM,CAAC,WAAW,CAAC;QACtC,IAAI,CAAC,qBAAqB,GAAG,MAAM,CAAC,qBAAqB,CAAC;QAC1D,IAAI,CAAC,sBAAsB,GAAG,MAAM,CAAC,sBAAsB,CAAC;QAC5D,IAAI,CAAC,QAAQ,GAAG,MAAM,CAAC,QAAQ,IAAI,IAAI,CAAC,QAAQ,CAAC;QACjD,IAAI,CAAC,SAAS,GAAG,MAAM,CAAC,SAAS,IAAI,IAAI,CAAC,SAAS,CAAC;QACpD,IAAI,CAAC,CAAC,GAAG,MAAM,CAAC,CAAC,IAAI,IAAI,CAAC,CAAC,CAAC;QAC5B,IAAI,CAAC,qBAAqB;YACxB,MAAM,CAAC,qBAAqB,IAAI,IAAI,CAAC,qBAAqB,CAAC;IAC/D,CAAC;IAED,KAAK,CAAC,KAAK,CAAC,MAAmB;QAC7B,IAAI,CAAC,CAAC,IAAI,CAAC,QAAQ,IAAI,MAAM,CAAC,EAAE;YAC9B,MAAM,IAAI,KAAK,CAAC,gBAAgB,IAAI,CAAC,QAAQ,aAAa,CAAC,CAAC;SAC7D;QACD,IAAI,CAAC,CAAC,IAAI,CAAC,cAAc,IAAI,MAAM,CAAC,EAAE;YACpC,MAAM,IAAI,KAAK,CAAC,oBAAoB,IAAI,CAAC,QAAQ,aAAa,CAAC,CAAC;SACjE;QACD,MAAM,QAAQ,GAAW,MAAM,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC;QAC/C,MAAM,WAAW,GAAW,MAAM,CAAC,IAAI,CAAC,cAAc,CAAC,CAAC;QACxD,IAAI,WAAW,GAAG,QAAQ,CAAC;QAC3B,IAAI,WAAW,CAAC,MAAM,GAAG,CAAC,EAAE;YAC1B,MAAM,MAAM,GAAG,MAAM,IAAI,CAAC,sBAAsB,CAAC,IAAI,CAAC;gBACpD,QAAQ;gBACR,YAAY,EAAE,WAAW;aAC1B,CAAC,CAAC;YACH,MAAM,IAAI,GAAG,MAAM,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;YACjC,IAAI,IAAI,CAAC,MAAM,KAAK,CAAC,EAAE;gBACrB,WAAW,GAAG,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC;aAC/B;iBAAM;gBACL,MAAM,IAAI,KAAK,CACb,0EAA0E,CAC3E,CAAC;aACH;SACF;QACD,MAAM,IAAI,GAAG,MAAM,IAAI,CAAC,WAAW,CAAC,gBAAgB,CAAC,WAAW,EAAE,IAAI,CAAC,CAAC,CAAC,CAAC;QAC1E,MAAM,MAAM,GAAG;YACb,QAAQ;YACR,eAAe,EAAE,IAAI;YACrB,YAAY,EAAE,WAAW;SAC1B,CAAC;QACF,MAAM,MAAM,GAAG,MAAM,IAAI,CAAC,qBAAqB,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;QAC7D,IAAI,IAAI,CAAC,qBAAqB,EAAE;YAC9B,OAAO;gBACL,GAAG,MAAM;gBACT,eAAe,EAAE,IAAI;aACtB,CAAC;SACH;QACD,OAAO,MAAM,CAAC;IAChB,CAAC;IAED,UAAU;QACR,OAAO,gBAAyB,CAAC;IACnC,CAAC;IAED,MAAM,CAAC,KAAK,CAAC,WAAW,CACtB,IAAmC,EACnC,MAAkB;QAElB,IAAI,CAAC,CAAC,aAAa,IAAI,MAAM,CAAC,EAAE;YAC9B,MAAM,IAAI,KAAK,CACb,8DAA8D,CAC/D,CAAC;SACH;QACD,MAAM,EAAE,WAAW,EAAE,GAAG,MAAM,CAAC;QAC/B,MAAM,+BAA+B,GAAG,MAAM,qBAAqB,CAGjE,yBAAyB,EAAE,IAAI,CAAC,CAAC;QACnC,MAAM,gCAAgC,GAAG,MAAM,qBAAqB,CAGlE,oBAAoB,EAAE,IAAI,CAAC,CAAC;QAE9B,OAAO,IAAI,mBAAmB,CAAC;YAC7B,qBAAqB,EAAE,MAAM,SAAS,CAAC,WAAW,CAChD,+BAA+B,CAChC;YACD,sBAAsB,EAAE,MAAM,QAAQ,CAAC,WAAW,CAChD,gCAAgC,CACjC;YACD,CAAC,EAAE,IAAI,CAAC,CAAC;YACT,WAAW;SACZ,CAAC,CAAC;IACL,CAAC;IAED,SAAS;QACP,OAAO;YACL,KAAK,EAAE,IAAI,CAAC,UAAU,EAAE;YACxB,uBAAuB,EAAE,IAAI,CAAC,qBAAqB,CAAC,SAAS,EAAE;YAC/D,kBAAkB,EAAE,IAAI,CAAC,sBAAsB,CAAC,SAAS,EAAE;YAC3D,CAAC,EAAE,IAAI,CAAC,CAAC;SACV,CAAC;IACJ,CAAC;IAED,MAAM,CAAC,OAAO,CAAC,GAAY,EAAE,WAAwB;QACnD,MAAM,OAAO,GAAG,WAAW,CAAC,GAAG,EAAE,EAAE,MAAM,EAAE,SAAS,EAAE,CAAC,CAAC;QACxD,MAAM,sBAAsB,GAAG,IAAI,QAAQ,CAAC;YAC1C,MAAM,EAAE,yBAAyB;YACjC,GAAG;SACJ,CAAC,CAAC;QACH,MAAM,QAAQ,GAAG,IAAI,IAAI,CAAC;YACxB,WAAW;YACX,qBAAqB,EAAE,OAAO;YAC9B,sBAAsB;SACvB,CAAC,CAAC;QACH,OAAO,QAAQ,CAAC;IAClB,CAAC;CACF"}
|
|
@@ -1,12 +1,13 @@
|
|
|
1
|
-
import { BaseChain,
|
|
1
|
+
import { BaseChain, ChainInputs, ChainValues } from "./index.js";
|
|
2
2
|
import { BaseLLM, SerializedLLM } from "../llms/index.js";
|
|
3
|
+
import { BaseLanguageModel } from "../schema/index.js";
|
|
3
4
|
import { BaseMemory } from "../memory/index.js";
|
|
4
5
|
import { BasePromptTemplate, SerializedBasePromptTemplate } from "../prompts/index.js";
|
|
5
6
|
export interface LLMChainInput extends ChainInputs {
|
|
6
7
|
/** Prompt object to use */
|
|
7
8
|
prompt: BasePromptTemplate;
|
|
8
9
|
/** LLM Wrapper to use */
|
|
9
|
-
llm:
|
|
10
|
+
llm: BaseLanguageModel;
|
|
10
11
|
/** @ignore */
|
|
11
12
|
outputKey: string;
|
|
12
13
|
}
|
|
@@ -31,12 +32,12 @@ export type SerializedLLMChain = {
|
|
|
31
32
|
*/
|
|
32
33
|
export declare class LLMChain extends BaseChain implements LLMChainInput {
|
|
33
34
|
prompt: BasePromptTemplate;
|
|
34
|
-
llm:
|
|
35
|
+
llm: BaseLanguageModel;
|
|
35
36
|
outputKey: string;
|
|
36
37
|
get inputKeys(): string[];
|
|
37
38
|
constructor(fields: {
|
|
38
39
|
prompt: BasePromptTemplate;
|
|
39
|
-
llm:
|
|
40
|
+
llm: BaseLanguageModel;
|
|
40
41
|
outputKey?: string;
|
|
41
42
|
memory?: BaseMemory;
|
|
42
43
|
});
|
package/dist/chains/llm_chain.js
CHANGED
|
@@ -48,10 +48,9 @@ export class LLMChain extends BaseChain {
|
|
|
48
48
|
if ("stop" in values && Array.isArray(values.stop)) {
|
|
49
49
|
stop = values.stop;
|
|
50
50
|
}
|
|
51
|
-
const
|
|
52
|
-
const
|
|
53
|
-
|
|
54
|
-
return result;
|
|
51
|
+
const promptValue = await this.prompt.formatPromptValue(values);
|
|
52
|
+
const { generations } = await this.llm.generatePrompt([promptValue], stop);
|
|
53
|
+
return { [this.outputKey]: generations[0][0].text };
|
|
55
54
|
}
|
|
56
55
|
/**
|
|
57
56
|
* Format prompt with values and pass to LLM
|
|
@@ -82,7 +81,7 @@ export class LLMChain extends BaseChain {
|
|
|
82
81
|
serialize() {
|
|
83
82
|
return {
|
|
84
83
|
_type: this._chainType(),
|
|
85
|
-
llm: this.llm.serialize(),
|
|
84
|
+
// llm: this.llm.serialize(), TODO fix this now that llm is BaseLanguageModel
|
|
86
85
|
prompt: this.prompt.serialize(),
|
|
87
86
|
};
|
|
88
87
|
}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"llm_chain.js","sourceRoot":"","sources":["../../src/chains/llm_chain.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,SAAS,EAA4B,MAAM,YAAY,CAAC;AAEjE,OAAO,EAAE,OAAO,EAAiB,MAAM,kBAAkB,CAAC;
|
|
1
|
+
{"version":3,"file":"llm_chain.js","sourceRoot":"","sources":["../../src/chains/llm_chain.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,SAAS,EAA4B,MAAM,YAAY,CAAC;AAEjE,OAAO,EAAE,OAAO,EAAiB,MAAM,kBAAkB,CAAC;AAG1D,OAAO,EAAc,YAAY,EAAE,MAAM,oBAAoB,CAAC;AAC9D,OAAO,EACL,kBAAkB,EAClB,cAAc,GAEf,MAAM,qBAAqB,CAAC;AAE7B,OAAO,EAAE,qBAAqB,EAAE,MAAM,kBAAkB,CAAC;AAoBzD;;;;;;;;;;;GAWG;AACH,MAAM,OAAO,QAAS,SAAQ,SAAS;IAOrC,IAAI,SAAS;QACX,OAAO,IAAI,CAAC,MAAM,CAAC,cAAc,CAAC;IACpC,CAAC;IAED,YAAY,MAKX;QACC,KAAK,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC;QAhBvB;;;;;WAA2B;QAE3B;;;;;WAAuB;QAEvB;;;;mBAAY,MAAM;WAAC;QAajB,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC,MAAM,CAAC;QAC5B,IAAI,CAAC,GAAG,GAAG,MAAM,CAAC,GAAG,CAAC;QACtB,IAAI,CAAC,SAAS,GAAG,MAAM,CAAC,SAAS,IAAI,IAAI,CAAC,SAAS,CAAC;IACtD,CAAC;IAED,KAAK,CAAC,KAAK,CAAC,MAAmB;QAC7B,IAAI,IAAI,CAAC;QACT,IAAI,MAAM,IAAI,MAAM,IAAI,KAAK,CAAC,OAAO,CAAC,MAAM,CAAC,IAAI,CAAC,EAAE;YAClD,IAAI,GAAG,MAAM,CAAC,IAAI,CAAC;SACpB;QACD,MAAM,WAAW,GAAG,MAAM,IAAI,CAAC,MAAM,CAAC,iBAAiB,CAAC,MAAM,CAAC,CAAC;QAChE,MAAM,EAAE,WAAW,EAAE,GAAG,MAAM,IAAI,CAAC,GAAG,CAAC,cAAc,CAAC,CAAC,WAAW,CAAC,EAAE,IAAI,CAAC,CAAC;QAC3E,OAAO,EAAE,CAAC,IAAI,CAAC,SAAS,CAAC,EAAE,WAAW,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,EAAE,CAAC;IACtD,CAAC;IAED;;;;;;;;;;OAUG;IACH,KAAK,CAAC,OAAO,CAAC,MAAmB;QAC/B,MAAM,MAAM,GAAG,MAAM,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;QACvC,OAAO,MAAM,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC;IAChC,CAAC;IAED,UAAU;QACR,OAAO,WAAoB,CAAC;IAC9B,CAAC;IAED,MAAM,CAAC,KAAK,CAAC,WAAW,CAAC,IAAwB;QAC/C,MAAM,aAAa,GAAG,MAAM,qBAAqB,CAC/C,KAAK,EACL,IAAI,CACL,CAAC;QACF,MAAM,gBAAgB,GAAG,MAAM,qBAAqB,CAGlD,QAAQ,EAAE,IAAI,CAAC,CAAC;QAElB,OAAO,IAAI,QAAQ,CAAC;YAClB,GAAG,EAAE,MAAM,OAAO,CAAC,WAAW,CAAC,aAAa,CAAC;YAC7C,MAAM,EAAE,MAAM,kBAAkB,CAAC,WAAW,CAAC,gBAAgB,CAAC;SAC/D,CAAC,CAAC;IACL,CAAC;IAED,SAAS;QACP,OAAO;YACL,KAAK,EAAE,IAAI,CAAC,UAAU,EAAE;YACxB,6EAA6E;YAC7E,MAAM,EAAE,IAAI,CAAC,MAAM,CAAC,SAAS,EAAE;SAChC,CAAC;IACJ,CAAC;CACF;AAED,mCAAmC;AACnC,MAAM,eAAe,GAAG;;;;;IAKpB,CAAC;AAEL,MAAM,aAAa,GAAG,IAAI,cAAc,CAAC;IACvC,QAAQ,EAAE,eAAe;IACzB,cAAc,EAAE,CAAC,SAAS,EAAE,OAAO,CAAC;CACrC,CAAC,CAAC;AAEH,MAAM,OAAO,iBAAkB,SAAQ,QAAQ;IAC7C,YAAY,MAKX;QACC,KAAK,CAAC;YACJ,MAAM,EAAE,MAAM,CAAC,MAAM,IAAI,aAAa;YACtC,GAAG,EAAE,MAAM,CAAC,GAAG;YACf,SAAS,EAAE,MAAM,CAAC,SAAS,IAAI,UAAU;SAC1C,CAAC,CAAC;QACH,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC,MAAM,IAAI,IAAI,YAAY,EAAE,CAAC;IACpD,CAAC;CACF"}
|
|
@@ -23,6 +23,24 @@ test("Test ChatVectorDBQAChain", async () => {
|
|
|
23
23
|
const res = await chain.call({ question: "foo", chat_history: "bar" });
|
|
24
24
|
console.log({ res });
|
|
25
25
|
});
|
|
26
|
+
test("Test ChatVectorDBQAChain with returnSourceDocuments", async () => {
|
|
27
|
+
const model = new OpenAI({ modelName: "text-ada-001" });
|
|
28
|
+
const prompt = PromptTemplate.fromTemplate("Print {question}, and ignore {chat_history}");
|
|
29
|
+
const vectorStore = await HNSWLib.fromTexts(["Hello world", "Bye bye", "hello nice world", "bye", "hi"], [{ id: 2 }, { id: 1 }, { id: 3 }, { id: 4 }, { id: 5 }], new OpenAIEmbeddings());
|
|
30
|
+
const llmChain = new LLMChain({ prompt, llm: model });
|
|
31
|
+
const combineDocsChain = new StuffDocumentsChain({
|
|
32
|
+
llmChain,
|
|
33
|
+
documentVariableName: "foo",
|
|
34
|
+
});
|
|
35
|
+
const chain = new ChatVectorDBQAChain({
|
|
36
|
+
combineDocumentsChain: combineDocsChain,
|
|
37
|
+
vectorstore: vectorStore,
|
|
38
|
+
questionGeneratorChain: llmChain,
|
|
39
|
+
returnSourceDocuments: true,
|
|
40
|
+
});
|
|
41
|
+
const res = await chain.call({ question: "foo", chat_history: "bar" });
|
|
42
|
+
console.log({ res });
|
|
43
|
+
});
|
|
26
44
|
test("Test ChatVectorDBQAChain from LLM", async () => {
|
|
27
45
|
const model = new OpenAI({ modelName: "text-ada-001" });
|
|
28
46
|
const vectorStore = await HNSWLib.fromTexts(["Hello world", "Bye bye", "hello nice world", "bye", "hi"], [{ id: 2 }, { id: 1 }, { id: 3 }, { id: 4 }, { id: 5 }], new OpenAIEmbeddings());
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"chat_vector_db_qa_chain.int.test.js","sourceRoot":"","sources":["../../../src/chains/tests/chat_vector_db_qa_chain.int.test.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,IAAI,EAAE,MAAM,eAAe,CAAC;AACrC,OAAO,EAAE,MAAM,EAAE,MAAM,sBAAsB,CAAC;AAC9C,OAAO,EAAE,cAAc,EAAE,MAAM,wBAAwB,CAAC;AACxD,OAAO,EAAE,QAAQ,EAAE,MAAM,iBAAiB,CAAC;AAC3C,OAAO,EAAE,mBAAmB,EAAE,MAAM,0BAA0B,CAAC;AAC/D,OAAO,EAAE,mBAAmB,EAAE,MAAM,4BAA4B,CAAC;AACjE,OAAO,EAAE,OAAO,EAAE,MAAM,+BAA+B,CAAC;AACxD,OAAO,EAAE,gBAAgB,EAAE,MAAM,2BAA2B,CAAC;AAE7D,IAAI,CAAC,0BAA0B,EAAE,KAAK,IAAI,EAAE;IAC1C,MAAM,KAAK,GAAG,IAAI,MAAM,CAAC,EAAE,SAAS,EAAE,cAAc,EAAE,CAAC,CAAC;IACxD,MAAM,MAAM,GAAG,cAAc,CAAC,YAAY,CACxC,6CAA6C,CAC9C,CAAC;IACF,MAAM,WAAW,GAAG,MAAM,OAAO,CAAC,SAAS,CACzC,CAAC,aAAa,EAAE,SAAS,EAAE,kBAAkB,EAAE,KAAK,EAAE,IAAI,CAAC,EAC3D,CAAC,EAAE,EAAE,EAAE,CAAC,EAAE,EAAE,EAAE,EAAE,EAAE,CAAC,EAAE,EAAE,EAAE,EAAE,EAAE,CAAC,EAAE,EAAE,EAAE,EAAE,EAAE,CAAC,EAAE,EAAE,EAAE,EAAE,EAAE,CAAC,EAAE,CAAC,EACvD,IAAI,gBAAgB,EAAE,CACvB,CAAC;IACF,MAAM,QAAQ,GAAG,IAAI,QAAQ,CAAC,EAAE,MAAM,EAAE,GAAG,EAAE,KAAK,EAAE,CAAC,CAAC;IACtD,MAAM,gBAAgB,GAAG,IAAI,mBAAmB,CAAC;QAC/C,QAAQ;QACR,oBAAoB,EAAE,KAAK;KAC5B,CAAC,CAAC;IACH,MAAM,KAAK,GAAG,IAAI,mBAAmB,CAAC;QACpC,qBAAqB,EAAE,gBAAgB;QACvC,WAAW,EAAE,WAAW;QACxB,sBAAsB,EAAE,QAAQ;KACjC,CAAC,CAAC;IACH,MAAM,GAAG,GAAG,MAAM,KAAK,CAAC,IAAI,CAAC,EAAE,QAAQ,EAAE,KAAK,EAAE,YAAY,EAAE,KAAK,EAAE,CAAC,CAAC;IACvE,OAAO,CAAC,GAAG,CAAC,EAAE,GAAG,EAAE,CAAC,CAAC;AACvB,CAAC,CAAC,CAAC;AAEH,IAAI,CAAC,mCAAmC,EAAE,KAAK,IAAI,EAAE;IACnD,MAAM,KAAK,GAAG,IAAI,MAAM,CAAC,EAAE,SAAS,EAAE,cAAc,EAAE,CAAC,CAAC;IACxD,MAAM,WAAW,GAAG,MAAM,OAAO,CAAC,SAAS,CACzC,CAAC,aAAa,EAAE,SAAS,EAAE,kBAAkB,EAAE,KAAK,EAAE,IAAI,CAAC,EAC3D,CAAC,EAAE,EAAE,EAAE,CAAC,EAAE,EAAE,EAAE,EAAE,EAAE,CAAC,EAAE,EAAE,EAAE,EAAE,EAAE,CAAC,EAAE,EAAE,EAAE,EAAE,EAAE,CAAC,EAAE,EAAE,EAAE,EAAE,EAAE,CAAC,EAAE,CAAC,EACvD,IAAI,gBAAgB,EAAE,CACvB,CAAC;IACF,MAAM,KAAK,GAAG,mBAAmB,CAAC,OAAO,CAAC,KAAK,EAAE,WAAW,CAAC,CAAC;IAC9D,MAAM,GAAG,GAAG,MAAM,KAAK,CAAC,IAAI,CAAC,EAAE,QAAQ,EAAE,KAAK,EAAE,YAAY,EAAE,KAAK,EAAE,CAAC,CAAC;IACvE,OAAO,CAAC,GAAG,CAAC,EAAE,GAAG,EAAE,CAAC,CAAC;AACvB,CAAC,CAAC,CAAC"}
|
|
1
|
+
{"version":3,"file":"chat_vector_db_qa_chain.int.test.js","sourceRoot":"","sources":["../../../src/chains/tests/chat_vector_db_qa_chain.int.test.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,IAAI,EAAE,MAAM,eAAe,CAAC;AACrC,OAAO,EAAE,MAAM,EAAE,MAAM,sBAAsB,CAAC;AAC9C,OAAO,EAAE,cAAc,EAAE,MAAM,wBAAwB,CAAC;AACxD,OAAO,EAAE,QAAQ,EAAE,MAAM,iBAAiB,CAAC;AAC3C,OAAO,EAAE,mBAAmB,EAAE,MAAM,0BAA0B,CAAC;AAC/D,OAAO,EAAE,mBAAmB,EAAE,MAAM,4BAA4B,CAAC;AACjE,OAAO,EAAE,OAAO,EAAE,MAAM,+BAA+B,CAAC;AACxD,OAAO,EAAE,gBAAgB,EAAE,MAAM,2BAA2B,CAAC;AAE7D,IAAI,CAAC,0BAA0B,EAAE,KAAK,IAAI,EAAE;IAC1C,MAAM,KAAK,GAAG,IAAI,MAAM,CAAC,EAAE,SAAS,EAAE,cAAc,EAAE,CAAC,CAAC;IACxD,MAAM,MAAM,GAAG,cAAc,CAAC,YAAY,CACxC,6CAA6C,CAC9C,CAAC;IACF,MAAM,WAAW,GAAG,MAAM,OAAO,CAAC,SAAS,CACzC,CAAC,aAAa,EAAE,SAAS,EAAE,kBAAkB,EAAE,KAAK,EAAE,IAAI,CAAC,EAC3D,CAAC,EAAE,EAAE,EAAE,CAAC,EAAE,EAAE,EAAE,EAAE,EAAE,CAAC,EAAE,EAAE,EAAE,EAAE,EAAE,CAAC,EAAE,EAAE,EAAE,EAAE,EAAE,CAAC,EAAE,EAAE,EAAE,EAAE,EAAE,CAAC,EAAE,CAAC,EACvD,IAAI,gBAAgB,EAAE,CACvB,CAAC;IACF,MAAM,QAAQ,GAAG,IAAI,QAAQ,CAAC,EAAE,MAAM,EAAE,GAAG,EAAE,KAAK,EAAE,CAAC,CAAC;IACtD,MAAM,gBAAgB,GAAG,IAAI,mBAAmB,CAAC;QAC/C,QAAQ;QACR,oBAAoB,EAAE,KAAK;KAC5B,CAAC,CAAC;IACH,MAAM,KAAK,GAAG,IAAI,mBAAmB,CAAC;QACpC,qBAAqB,EAAE,gBAAgB;QACvC,WAAW,EAAE,WAAW;QACxB,sBAAsB,EAAE,QAAQ;KACjC,CAAC,CAAC;IACH,MAAM,GAAG,GAAG,MAAM,KAAK,CAAC,IAAI,CAAC,EAAE,QAAQ,EAAE,KAAK,EAAE,YAAY,EAAE,KAAK,EAAE,CAAC,CAAC;IACvE,OAAO,CAAC,GAAG,CAAC,EAAE,GAAG,EAAE,CAAC,CAAC;AACvB,CAAC,CAAC,CAAC;AAEH,IAAI,CAAC,qDAAqD,EAAE,KAAK,IAAI,EAAE;IACrE,MAAM,KAAK,GAAG,IAAI,MAAM,CAAC,EAAE,SAAS,EAAE,cAAc,EAAE,CAAC,CAAC;IACxD,MAAM,MAAM,GAAG,cAAc,CAAC,YAAY,CACxC,6CAA6C,CAC9C,CAAC;IACF,MAAM,WAAW,GAAG,MAAM,OAAO,CAAC,SAAS,CACzC,CAAC,aAAa,EAAE,SAAS,EAAE,kBAAkB,EAAE,KAAK,EAAE,IAAI,CAAC,EAC3D,CAAC,EAAE,EAAE,EAAE,CAAC,EAAE,EAAE,EAAE,EAAE,EAAE,CAAC,EAAE,EAAE,EAAE,EAAE,EAAE,CAAC,EAAE,EAAE,EAAE,EAAE,EAAE,CAAC,EAAE,EAAE,EAAE,EAAE,EAAE,CAAC,EAAE,CAAC,EACvD,IAAI,gBAAgB,EAAE,CACvB,CAAC;IACF,MAAM,QAAQ,GAAG,IAAI,QAAQ,CAAC,EAAE,MAAM,EAAE,GAAG,EAAE,KAAK,EAAE,CAAC,CAAC;IACtD,MAAM,gBAAgB,GAAG,IAAI,mBAAmB,CAAC;QAC/C,QAAQ;QACR,oBAAoB,EAAE,KAAK;KAC5B,CAAC,CAAC;IACH,MAAM,KAAK,GAAG,IAAI,mBAAmB,CAAC;QACpC,qBAAqB,EAAE,gBAAgB;QACvC,WAAW,EAAE,WAAW;QACxB,sBAAsB,EAAE,QAAQ;QAChC,qBAAqB,EAAE,IAAI;KAC5B,CAAC,CAAC;IACH,MAAM,GAAG,GAAG,MAAM,KAAK,CAAC,IAAI,CAAC,EAAE,QAAQ,EAAE,KAAK,EAAE,YAAY,EAAE,KAAK,EAAE,CAAC,CAAC;IACvE,OAAO,CAAC,GAAG,CAAC,EAAE,GAAG,EAAE,CAAC,CAAC;AACvB,CAAC,CAAC,CAAC;AAEH,IAAI,CAAC,mCAAmC,EAAE,KAAK,IAAI,EAAE;IACnD,MAAM,KAAK,GAAG,IAAI,MAAM,CAAC,EAAE,SAAS,EAAE,cAAc,EAAE,CAAC,CAAC;IACxD,MAAM,WAAW,GAAG,MAAM,OAAO,CAAC,SAAS,CACzC,CAAC,aAAa,EAAE,SAAS,EAAE,kBAAkB,EAAE,KAAK,EAAE,IAAI,CAAC,EAC3D,CAAC,EAAE,EAAE,EAAE,CAAC,EAAE,EAAE,EAAE,EAAE,EAAE,CAAC,EAAE,EAAE,EAAE,EAAE,EAAE,CAAC,EAAE,EAAE,EAAE,EAAE,EAAE,CAAC,EAAE,EAAE,EAAE,EAAE,EAAE,CAAC,EAAE,CAAC,EACvD,IAAI,gBAAgB,EAAE,CACvB,CAAC;IACF,MAAM,KAAK,GAAG,mBAAmB,CAAC,OAAO,CAAC,KAAK,EAAE,WAAW,CAAC,CAAC;IAC9D,MAAM,GAAG,GAAG,MAAM,KAAK,CAAC,IAAI,CAAC,EAAE,QAAQ,EAAE,KAAK,EAAE,YAAY,EAAE,KAAK,EAAE,CAAC,CAAC;IACvE,OAAO,CAAC,GAAG,CAAC,EAAE,GAAG,EAAE,CAAC,CAAC;AACvB,CAAC,CAAC,CAAC"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"combine_docs_chain.test.js","sourceRoot":"","sources":["../../../src/chains/tests/combine_docs_chain.test.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,IAAI,EAAE,MAAM,EAAE,MAAM,eAAe,CAAC;AAC7C,OAAO,EAAE,QAAQ,EAAE,MAAM,mBAAmB,CAAC;AAC7C,OAAO,EAAE,OAAO,
|
|
1
|
+
{"version":3,"file":"combine_docs_chain.test.js","sourceRoot":"","sources":["../../../src/chains/tests/combine_docs_chain.test.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,IAAI,EAAE,MAAM,EAAE,MAAM,eAAe,CAAC;AAC7C,OAAO,EAAE,QAAQ,EAAE,MAAM,mBAAmB,CAAC;AAC7C,OAAO,EAAE,OAAO,EAAE,MAAM,qBAAqB,CAAC;AAC9C,OAAO,EAAE,WAAW,EAAE,MAAM,+BAA+B,CAAC;AAG5D,IAAI,CAAC,8BAA8B,EAAE,KAAK,IAAI,EAAE;IAC9C,IAAI,UAAU,GAAG,CAAC,CAAC;IACnB,IAAI,aAAa,GAAG,CAAC,CAAC;IAEtB,MAAM,OAAQ,SAAQ,OAAO;QAC3B,QAAQ;YACN,OAAO,MAAM,CAAC;QAChB,CAAC;QAED,KAAK,CAAC,SAAS,CAAC,OAAiB,EAAE,CAAY;YAC7C,OAAO;gBACL,WAAW,EAAE,OAAO,CAAC,GAAG,CAAC,CAAC,MAAM,EAAE,EAAE;oBAClC,IAAI,UAAU,GAAG,EAAE,CAAC;oBACpB,IAAI,MAAM,CAAC,UAAU,CAAC,2BAA2B,CAAC,EAAE;wBAClD,UAAU,IAAI,CAAC,CAAC;wBAChB,UAAU,GAAG,sBAAsB,CAAC;qBACrC;yBAAM,IAAI,MAAM,CAAC,UAAU,CAAC,+BAA+B,CAAC,EAAE;wBAC7D,aAAa,IAAI,CAAC,CAAC;wBACnB,UAAU,GAAG,gBAAgB,CAAC;qBAC/B;oBACD,OAAO;wBACL;4BACE,IAAI,EAAE,UAAU;4BAChB,KAAK,EAAE,CAAC;yBACT;qBACF,CAAC;gBACJ,CAAC,CAAC;aACH,CAAC;QACJ,CAAC;KACF;IAED,MAAM,KAAK,GAAG,IAAI,OAAO,CAAC,EAAE,CAAC,CAAC;IAC9B,MAAM,KAAK,GAAG,WAAW,CAAC,KAAK,EAAE,EAAE,IAAI,EAAE,YAAY,EAAE,CAAC,CAAC;IACzD,MAAM,IAAI,GAAG;QACX,IAAI,QAAQ,CAAC,EAAE,WAAW,EAAE,0BAA0B,EAAE,CAAC;QACzD,IAAI,QAAQ,CAAC,EAAE,WAAW,EAAE,0BAA0B,EAAE,CAAC;KAC1D,CAAC;IACF,MAAM,GAAG,GAAG,MAAM,KAAK,CAAC,IAAI,CAAC;QAC3B,eAAe,EAAE,IAAI;QACrB,QAAQ,EAAE,kCAAkC;KAC7C,CAAC,CAAC;IACH,OAAO,CAAC,GAAG,CAAC,EAAE,GAAG,EAAE,CAAC,CAAC;IAErB,MAAM,CAAC,GAAG,CAAC,CAAC,OAAO,CAAC;QAClB,IAAI,EAAE,gBAAgB;KACvB,CAAC,CAAC;IACH,MAAM,CAAC,UAAU,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,kBAAkB;IAC9C,MAAM,CAAC,aAAa,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;AAChC,CAAC,CAAC,CAAC"}
|
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
import { test } from "@jest/globals";
|
|
2
2
|
import { OpenAI } from "../../llms/openai.js";
|
|
3
|
-
import {
|
|
3
|
+
import { ChatOpenAI } from "../../chat_models/index.js";
|
|
4
|
+
import { ChatPromptTemplate, HumanMessagePromptTemplate, PromptTemplate, } from "../../prompts/index.js";
|
|
4
5
|
import { LLMChain, ConversationChain } from "../llm_chain.js";
|
|
5
6
|
import { loadChain } from "../load.js";
|
|
6
7
|
test("Test OpenAI", async () => {
|
|
@@ -44,4 +45,16 @@ test("Test ConversationChain", async () => {
|
|
|
44
45
|
const res = await chain.call({ input: "my favorite color" });
|
|
45
46
|
console.log({ res });
|
|
46
47
|
});
|
|
48
|
+
test("Test LLMChain with ChatOpenAI", async () => {
|
|
49
|
+
const model = new ChatOpenAI({ temperature: 0.9 });
|
|
50
|
+
const template = "What is a good name for a company that makes {product}?";
|
|
51
|
+
const prompt = new PromptTemplate({ template, inputVariables: ["product"] });
|
|
52
|
+
const humanMessagePrompt = new HumanMessagePromptTemplate(prompt);
|
|
53
|
+
const chatPromptTemplate = ChatPromptTemplate.fromPromptMessages([
|
|
54
|
+
humanMessagePrompt,
|
|
55
|
+
]);
|
|
56
|
+
const chatChain = new LLMChain({ llm: model, prompt: chatPromptTemplate });
|
|
57
|
+
const res = await chatChain.call({ product: "colorful socks" });
|
|
58
|
+
console.log({ res });
|
|
59
|
+
});
|
|
47
60
|
//# sourceMappingURL=llm_chain.int.test.js.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"llm_chain.int.test.js","sourceRoot":"","sources":["../../../src/chains/tests/llm_chain.int.test.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,IAAI,EAAE,MAAM,eAAe,CAAC;AACrC,OAAO,EAAE,MAAM,EAAE,MAAM,sBAAsB,CAAC;AAC9C,OAAO,EAAE,
|
|
1
|
+
{"version":3,"file":"llm_chain.int.test.js","sourceRoot":"","sources":["../../../src/chains/tests/llm_chain.int.test.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,IAAI,EAAE,MAAM,eAAe,CAAC;AACrC,OAAO,EAAE,MAAM,EAAE,MAAM,sBAAsB,CAAC;AAC9C,OAAO,EAAE,UAAU,EAAE,MAAM,4BAA4B,CAAC;AACxD,OAAO,EACL,kBAAkB,EAClB,0BAA0B,EAC1B,cAAc,GACf,MAAM,wBAAwB,CAAC;AAChC,OAAO,EAAE,QAAQ,EAAE,iBAAiB,EAAE,MAAM,iBAAiB,CAAC;AAC9D,OAAO,EAAE,SAAS,EAAE,MAAM,YAAY,CAAC;AAEvC,IAAI,CAAC,aAAa,EAAE,KAAK,IAAI,EAAE;IAC7B,MAAM,KAAK,GAAG,IAAI,MAAM,CAAC,EAAE,SAAS,EAAE,cAAc,EAAE,CAAC,CAAC;IACxD,MAAM,MAAM,GAAG,IAAI,cAAc,CAAC;QAChC,QAAQ,EAAE,aAAa;QACvB,cAAc,EAAE,CAAC,KAAK,CAAC;KACxB,CAAC,CAAC;IACH,MAAM,KAAK,GAAG,IAAI,QAAQ,CAAC,EAAE,MAAM,EAAE,GAAG,EAAE,KAAK,EAAE,CAAC,CAAC;IACnD,MAAM,GAAG,GAAG,MAAM,KAAK,CAAC,IAAI,CAAC,EAAE,GAAG,EAAE,mBAAmB,EAAE,CAAC,CAAC;IAC3D,OAAO,CAAC,GAAG,CAAC,EAAE,GAAG,EAAE,CAAC,CAAC;AACvB,CAAC,CAAC,CAAC;AAEH,IAAI,CAAC,iBAAiB,EAAE,KAAK,IAAI,EAAE;IACjC,MAAM,KAAK,GAAG,IAAI,MAAM,CAAC,EAAE,SAAS,EAAE,cAAc,EAAE,CAAC,CAAC;IACxD,MAAM,MAAM,GAAG,IAAI,cAAc,CAAC;QAChC,QAAQ,EAAE,aAAa;QACvB,cAAc,EAAE,CAAC,KAAK,CAAC;KACxB,CAAC,CAAC;IACH,MAAM,KAAK,GAAG,IAAI,QAAQ,CAAC,EAAE,MAAM,EAAE,GAAG,EAAE,KAAK,EAAE,CAAC,CAAC;IACnD,MAAM,GAAG,GAAG,MAAM,KAAK,CAAC,GAAG,CAAC,mBAAmB,CAAC,CAAC;IACjD,OAAO,CAAC,GAAG,CAAC,EAAE,GAAG,EAAE,CAAC,CAAC;AACvB,CAAC,CAAC,CAAC;AAEH,IAAI,CAAC,YAAY,EAAE,KAAK,IAAI,EAAE;IAC5B,MAAM,KAAK,GAAG,IAAI,MAAM,CAAC,EAAE,SAAS,EAAE,cAAc,EAAE,CAAC,CAAC;IACxD,MAAM,MAAM,GAAG,IAAI,cAAc,CAAC;QAChC,QAAQ,EAAE,aAAa;QACvB,cAAc,EAAE,CAAC,KAAK,CAAC;KACxB,CAAC,CAAC;IACH,MAAM,KAAK,GAAG,IAAI,QAAQ,CAAC,EAAE,MAAM,EAAE,GAAG,EAAE,KAAK,EAAE,CAAC,CAAC;IACnD,MAAM,GAAG,GAAG,MAAM,KAAK,CAAC,KAAK,CAAC,CAAC,EAAE,GAAG,EAAE,mBAAmB,EAAE,CAAC,CAAC,CAAC;IAC9D,OAAO,CAAC,GAAG,CAAC,EAAE,GAAG,EAAE,CAAC,CAAC;AACvB,CAAC,CAAC,CAAC;AAEH,IAAI,CAAC,qBAAqB,EAAE,KAAK,IAAI,EAAE;IACrC,MAAM,KAAK,GAAG,MAAM,SAAS,CAAC,oCAAoC,CAAC,CAAC;IACpE,MAAM,GAAG,GAAG,MAAM,KAAK,CAAC,IAAI,CAAC,EAAE,KAAK,EAAE,mBAAmB,EAAE,CAAC,CAAC;IAC7D,OAAO,CAAC,GAAG,CAAC,EAAE,GAAG,EAAE,CAAC,CAAC;AACvB,CAAC,CAAC,CAAC;AAEH,IAAI,CAAC,wBAAwB,EAAE,KAAK,IAAI,EAAE;IACxC,MAAM,KAAK,GAAG,IAAI,MAAM,CAAC,EAAE,SAAS,EAAE,cAAc,EAAE,CAAC,CAAC;IACxD,MAAM,KAAK,GAAG,IAAI,iBAAiB,CAAC,EAAE,GAAG,EAAE,KAAK,EAAE,CAAC,CAAC;IACpD,MAAM,GAAG,GAAG,MAAM,KAAK,CAAC,IAAI,CAAC,EAAE,KAAK,EAAE,mBAAmB,EAAE,CAAC,CAAC;IAC7D,OAAO,CAAC,GAAG,CAAC,EAAE,GAAG,EAAE,CAAC,CAAC;AACvB,CAAC,CAAC,CAAC;AAEH,IAAI,CAAC,+BAA+B,EAAE,KAAK,IAAI,EAAE;IAC/C,MAAM,KAAK,GAAG,IAAI,UAAU,CAAC,EAAE,WAAW,EAAE,GAAG,EAAE,CAAC,CAAC;IACnD,MAAM,QAAQ,GAAG,yDAAyD,CAAC;IAC3E,MAAM,MAAM,GAAG,IAAI,cAAc,CAAC,EAAE,QAAQ,EAAE,cAAc,EAAE,CAAC,SAAS,CAAC,EAAE,CAAC,CAAC;IAC7E,MAAM,kBAAkB,GAAG,IAAI,0BAA0B,CAAC,MAAM,CAAC,CAAC;IAClE,MAAM,kBAAkB,GAAG,kBAAkB,CAAC,kBAAkB,CAAC;QAC/D,kBAAkB;KACnB,CAAC,CAAC;IACH,MAAM,SAAS,GAAG,IAAI,QAAQ,CAAC,EAAE,GAAG,EAAE,KAAK,EAAE,MAAM,EAAE,kBAAkB,EAAE,CAAC,CAAC;IAC3E,MAAM,GAAG,GAAG,MAAM,SAAS,CAAC,IAAI,CAAC,EAAE,OAAO,EAAE,gBAAgB,EAAE,CAAC,CAAC;IAChE,OAAO,CAAC,GAAG,CAAC,EAAE,GAAG,EAAE,CAAC,CAAC;AACvB,CAAC,CAAC,CAAC"}
|
|
@@ -22,12 +22,14 @@ export declare class VectorDBQAChain extends BaseChain implements VectorDBQAChai
|
|
|
22
22
|
outputKey: string;
|
|
23
23
|
vectorstore: VectorStore;
|
|
24
24
|
combineDocumentsChain: BaseChain;
|
|
25
|
+
returnSourceDocuments: boolean;
|
|
25
26
|
constructor(fields: {
|
|
26
27
|
vectorstore: VectorStore;
|
|
27
28
|
combineDocumentsChain: BaseChain;
|
|
28
29
|
inputKey?: string;
|
|
29
30
|
outputKey?: string;
|
|
30
31
|
k?: number;
|
|
32
|
+
returnSourceDocuments?: boolean;
|
|
31
33
|
});
|
|
32
34
|
_call(values: ChainValues): Promise<ChainValues>;
|
|
33
35
|
_chainType(): "vector_db_qa";
|
|
@@ -36,11 +36,19 @@ export class VectorDBQAChain extends BaseChain {
|
|
|
36
36
|
writable: true,
|
|
37
37
|
value: void 0
|
|
38
38
|
});
|
|
39
|
+
Object.defineProperty(this, "returnSourceDocuments", {
|
|
40
|
+
enumerable: true,
|
|
41
|
+
configurable: true,
|
|
42
|
+
writable: true,
|
|
43
|
+
value: false
|
|
44
|
+
});
|
|
39
45
|
this.vectorstore = fields.vectorstore;
|
|
40
46
|
this.combineDocumentsChain = fields.combineDocumentsChain;
|
|
41
47
|
this.inputKey = fields.inputKey ?? this.inputKey;
|
|
42
48
|
this.outputKey = fields.outputKey ?? this.outputKey;
|
|
43
49
|
this.k = fields.k ?? this.k;
|
|
50
|
+
this.returnSourceDocuments =
|
|
51
|
+
fields.returnSourceDocuments ?? this.returnSourceDocuments;
|
|
44
52
|
}
|
|
45
53
|
async _call(values) {
|
|
46
54
|
if (!(this.inputKey in values)) {
|
|
@@ -50,6 +58,12 @@ export class VectorDBQAChain extends BaseChain {
|
|
|
50
58
|
const docs = await this.vectorstore.similaritySearch(question, this.k);
|
|
51
59
|
const inputs = { question, input_documents: docs };
|
|
52
60
|
const result = await this.combineDocumentsChain.call(inputs);
|
|
61
|
+
if (this.returnSourceDocuments) {
|
|
62
|
+
return {
|
|
63
|
+
...result,
|
|
64
|
+
sourceDocuments: docs,
|
|
65
|
+
};
|
|
66
|
+
}
|
|
53
67
|
return result;
|
|
54
68
|
}
|
|
55
69
|
_chainType() {
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"vector_db_qa.js","sourceRoot":"","sources":["../../src/chains/vector_db_qa.ts"],"names":[],"mappings":"AAAA,OAAO,EACL,SAAS,EAGT,WAAW,GACZ,MAAM,YAAY,CAAC;AAKpB,OAAO,EAAE,qBAAqB,EAAE,MAAM,kBAAkB,CAAC;AAmBzD,MAAM,OAAO,eAAgB,SAAQ,SAAS;IAK5C,IAAI,SAAS;QACX,OAAO,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC;IACzB,CAAC;
|
|
1
|
+
{"version":3,"file":"vector_db_qa.js","sourceRoot":"","sources":["../../src/chains/vector_db_qa.ts"],"names":[],"mappings":"AAAA,OAAO,EACL,SAAS,EAGT,WAAW,GACZ,MAAM,YAAY,CAAC;AAKpB,OAAO,EAAE,qBAAqB,EAAE,MAAM,kBAAkB,CAAC;AAmBzD,MAAM,OAAO,eAAgB,SAAQ,SAAS;IAK5C,IAAI,SAAS;QACX,OAAO,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC;IACzB,CAAC;IAUD,YAAY,MAOX;QACC,KAAK,EAAE,CAAC;QAxBV;;;;mBAAI,CAAC;WAAC;QAEN;;;;mBAAW,OAAO;WAAC;QAMnB;;;;mBAAY,QAAQ;WAAC;QAErB;;;;;WAAyB;QAEzB;;;;;WAAiC;QAEjC;;;;mBAAwB,KAAK;WAAC;QAW5B,IAAI,CAAC,WAAW,GAAG,MAAM,CAAC,WAAW,CAAC;QACtC,IAAI,CAAC,qBAAqB,GAAG,MAAM,CAAC,qBAAqB,CAAC;QAC1D,IAAI,CAAC,QAAQ,GAAG,MAAM,CAAC,QAAQ,IAAI,IAAI,CAAC,QAAQ,CAAC;QACjD,IAAI,CAAC,SAAS,GAAG,MAAM,CAAC,SAAS,IAAI,IAAI,CAAC,SAAS,CAAC;QACpD,IAAI,CAAC,CAAC,GAAG,MAAM,CAAC,CAAC,IAAI,IAAI,CAAC,CAAC,CAAC;QAC5B,IAAI,CAAC,qBAAqB;YACxB,MAAM,CAAC,qBAAqB,IAAI,IAAI,CAAC,qBAAqB,CAAC;IAC/D,CAAC;IAED,KAAK,CAAC,KAAK,CAAC,MAAmB;QAC7B,IAAI,CAAC,CAAC,IAAI,CAAC,QAAQ,IAAI,MAAM,CAAC,EAAE;YAC9B,MAAM,IAAI,KAAK,CAAC,gBAAgB,IAAI,CAAC,QAAQ,aAAa,CAAC,CAAC;SAC7D;QACD,MAAM,QAAQ,GAAW,MAAM,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC;QAC/C,MAAM,IAAI,GAAG,MAAM,IAAI,CAAC,WAAW,CAAC,gBAAgB,CAAC,QAAQ,EAAE,IAAI,CAAC,CAAC,CAAC,CAAC;QACvE,MAAM,MAAM,GAAG,EAAE,QAAQ,EAAE,eAAe,EAAE,IAAI,EAAE,CAAC;QACnD,MAAM,MAAM,GAAG,MAAM,IAAI,CAAC,qBAAqB,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;QAC7D,IAAI,IAAI,CAAC,qBAAqB,EAAE;YAC9B,OAAO;gBACL,GAAG,MAAM;gBACT,eAAe,EAAE,IAAI;aACtB,CAAC;SACH;QACD,OAAO,MAAM,CAAC;IAChB,CAAC;IAED,UAAU;QACR,OAAO,cAAuB,CAAC;IACjC,CAAC;IAED,MAAM,CAAC,KAAK,CAAC,WAAW,CACtB,IAA+B,EAC/B,MAAkB;QAElB,IAAI,CAAC,CAAC,aAAa,IAAI,MAAM,CAAC,EAAE;YAC9B,MAAM,IAAI,KAAK,CACb,8DAA8D,CAC/D,CAAC;SACH;QACD,MAAM,EAAE,WAAW,EAAE,GAAG,MAAM,CAAC;QAC/B,MAAM,+BAA+B,GAAG,MAAM,qBAAqB,CAGjE,yBAAyB,EAAE,IAAI,CAAC,CAAC;QAEnC,OAAO,IAAI,eAAe,CAAC;YACzB,qBAAqB,EAAE,MAAM,SAAS,CAAC,WAAW,CAChD,+BAA+B,CAChC;YACD,CAAC,EAAE,IAAI,CAAC,CAAC;YACT,WAAW;SACZ,CAAC,CAAC;IACL,CAAC;IAED,SAAS;QACP,OAAO;YACL,KAAK,EAAE,IAAI,CAAC,UAAU,EAAE;YACxB,uBAAuB,EAAE,IAAI,CAAC,qBAAqB,CAAC,SAAS,EAAE;YAC/D,CAAC,EAAE,IAAI,CAAC,CAAC;SACV,CAAC;IACJ,CAAC;IAED,MAAM,CAAC,OAAO,CAAC,GAAY,EAAE,WAAwB;QACnD,MAAM,OAAO,GAAG,WAAW,CAAC,GAAG,CAAC,CAAC;QACjC,MAAM,QAAQ,GAAG,IAAI,IAAI,CAAC,EAAE,WAAW,EAAE,qBAAqB,EAAE,OAAO,EAAE,CAAC,CAAC;QAC3E,OAAO,QAAQ,CAAC;IAClB,CAAC;CACF"}
|
|
@@ -0,0 +1,32 @@
|
|
|
1
|
+
import { BaseChatMessage, BaseLanguageModel, BasePromptValue, ChatResult, LLMCallbackManager, LLMResult } from "../schema/index.js";
|
|
2
|
+
export type SerializedChatModel = {
|
|
3
|
+
_model: string;
|
|
4
|
+
_type: string;
|
|
5
|
+
} & Record<string, any>;
|
|
6
|
+
export declare abstract class BaseChatModel extends BaseLanguageModel {
|
|
7
|
+
callbackManager: LLMCallbackManager;
|
|
8
|
+
verbose: boolean;
|
|
9
|
+
protected constructor(callbackManager?: LLMCallbackManager, verbose?: boolean);
|
|
10
|
+
generate(messages: BaseChatMessage[][], stop?: string[]): Promise<LLMResult>;
|
|
11
|
+
/**
|
|
12
|
+
* Get the identifying parameters of the LLM.
|
|
13
|
+
*/
|
|
14
|
+
_identifyingParams(): Record<string, any>;
|
|
15
|
+
_modelType(): string;
|
|
16
|
+
abstract _llmType(): string;
|
|
17
|
+
/**
|
|
18
|
+
* Return a json-like object representing this Chat model.
|
|
19
|
+
*/
|
|
20
|
+
serialize(): SerializedChatModel;
|
|
21
|
+
private _tokenizer?;
|
|
22
|
+
getNumTokens(text: string): number;
|
|
23
|
+
generatePrompt(promptValues: BasePromptValue[], stop?: string[]): Promise<LLMResult>;
|
|
24
|
+
abstract _generate(messages: BaseChatMessage[], stop?: string[]): Promise<ChatResult>;
|
|
25
|
+
call(messages: BaseChatMessage[], stop?: string[]): Promise<BaseChatMessage>;
|
|
26
|
+
callPrompt(promptValue: BasePromptValue, stop?: string[]): Promise<BaseChatMessage>;
|
|
27
|
+
}
|
|
28
|
+
export declare abstract class SimpleChatModel extends BaseChatModel {
|
|
29
|
+
protected constructor(callbackManager?: LLMCallbackManager, verbose?: boolean);
|
|
30
|
+
abstract _call(messages: BaseChatMessage[], stop?: string[]): Promise<string>;
|
|
31
|
+
_generate(messages: BaseChatMessage[], stop?: string[]): Promise<ChatResult>;
|
|
32
|
+
}
|
|
@@ -0,0 +1,110 @@
|
|
|
1
|
+
import GPT3Tokenizer from "gpt3-tokenizer";
|
|
2
|
+
import { AIChatMessage, BaseLanguageModel, } from "../schema/index.js";
|
|
3
|
+
const getCallbackManager = () => ({
|
|
4
|
+
handleStart: (..._args) => {
|
|
5
|
+
// console.log(args);
|
|
6
|
+
},
|
|
7
|
+
handleEnd: (..._args) => {
|
|
8
|
+
// console.log(args);
|
|
9
|
+
},
|
|
10
|
+
handleError: (..._args) => {
|
|
11
|
+
// console.log(args);
|
|
12
|
+
},
|
|
13
|
+
});
|
|
14
|
+
const getVerbosity = () => true;
|
|
15
|
+
export class BaseChatModel extends BaseLanguageModel {
|
|
16
|
+
constructor(callbackManager, verbose) {
|
|
17
|
+
super();
|
|
18
|
+
Object.defineProperty(this, "callbackManager", {
|
|
19
|
+
enumerable: true,
|
|
20
|
+
configurable: true,
|
|
21
|
+
writable: true,
|
|
22
|
+
value: void 0
|
|
23
|
+
});
|
|
24
|
+
Object.defineProperty(this, "verbose", {
|
|
25
|
+
enumerable: true,
|
|
26
|
+
configurable: true,
|
|
27
|
+
writable: true,
|
|
28
|
+
value: void 0
|
|
29
|
+
});
|
|
30
|
+
// TODO deserialize
|
|
31
|
+
Object.defineProperty(this, "_tokenizer", {
|
|
32
|
+
enumerable: true,
|
|
33
|
+
configurable: true,
|
|
34
|
+
writable: true,
|
|
35
|
+
value: void 0
|
|
36
|
+
});
|
|
37
|
+
this.callbackManager = callbackManager ?? getCallbackManager();
|
|
38
|
+
this.verbose = verbose ?? getVerbosity();
|
|
39
|
+
}
|
|
40
|
+
async generate(messages, stop) {
|
|
41
|
+
const generations = [];
|
|
42
|
+
for (const message of messages) {
|
|
43
|
+
const result = await this._generate(message, stop);
|
|
44
|
+
generations.push(result.generations);
|
|
45
|
+
}
|
|
46
|
+
return {
|
|
47
|
+
generations,
|
|
48
|
+
};
|
|
49
|
+
}
|
|
50
|
+
/**
|
|
51
|
+
* Get the identifying parameters of the LLM.
|
|
52
|
+
*/
|
|
53
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
54
|
+
_identifyingParams() {
|
|
55
|
+
return {};
|
|
56
|
+
}
|
|
57
|
+
_modelType() {
|
|
58
|
+
return "base_chat_model";
|
|
59
|
+
}
|
|
60
|
+
/**
|
|
61
|
+
* Return a json-like object representing this Chat model.
|
|
62
|
+
*/
|
|
63
|
+
serialize() {
|
|
64
|
+
return {
|
|
65
|
+
...this._identifyingParams(),
|
|
66
|
+
_type: this._llmType(),
|
|
67
|
+
_model: this._modelType(),
|
|
68
|
+
};
|
|
69
|
+
}
|
|
70
|
+
getNumTokens(text) {
|
|
71
|
+
// TODOs copied from py implementation
|
|
72
|
+
// TODO: this method may not be exact.
|
|
73
|
+
// TODO: this method may differ based on model (eg codex, gpt-3.5).
|
|
74
|
+
if (this._tokenizer === undefined) {
|
|
75
|
+
const Constructor = GPT3Tokenizer.default;
|
|
76
|
+
this._tokenizer = new Constructor({ type: "gpt3" });
|
|
77
|
+
}
|
|
78
|
+
return this._tokenizer.encode(text).bpe.length;
|
|
79
|
+
}
|
|
80
|
+
async generatePrompt(promptValues, stop) {
|
|
81
|
+
const promptMessages = promptValues.map((promptValue) => promptValue.toChatMessages());
|
|
82
|
+
return this.generate(promptMessages, stop);
|
|
83
|
+
}
|
|
84
|
+
async call(messages, stop) {
|
|
85
|
+
const { generations } = await this._generate(messages, stop);
|
|
86
|
+
return generations[0].message;
|
|
87
|
+
}
|
|
88
|
+
async callPrompt(promptValue, stop) {
|
|
89
|
+
const promptMessages = promptValue.toChatMessages();
|
|
90
|
+
return this.call(promptMessages, stop);
|
|
91
|
+
}
|
|
92
|
+
}
|
|
93
|
+
export class SimpleChatModel extends BaseChatModel {
|
|
94
|
+
constructor(callbackManager, verbose) {
|
|
95
|
+
super(callbackManager, verbose);
|
|
96
|
+
}
|
|
97
|
+
async _generate(messages, stop) {
|
|
98
|
+
const text = await this._call(messages, stop);
|
|
99
|
+
const message = new AIChatMessage(text);
|
|
100
|
+
return {
|
|
101
|
+
generations: [
|
|
102
|
+
{
|
|
103
|
+
text: message.text,
|
|
104
|
+
message,
|
|
105
|
+
},
|
|
106
|
+
],
|
|
107
|
+
};
|
|
108
|
+
}
|
|
109
|
+
}
|
|
110
|
+
//# sourceMappingURL=base.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"base.js","sourceRoot":"","sources":["../../src/chat_models/base.ts"],"names":[],"mappings":"AAAA,OAAO,aAAa,MAAM,gBAAgB,CAAC;AAC3C,OAAO,EACL,aAAa,EAEb,iBAAiB,GAMlB,MAAM,oBAAoB,CAAC;AAE5B,MAAM,kBAAkB,GAAG,GAAuB,EAAE,CAAC,CAAC;IACpD,WAAW,EAAE,CAAC,GAAG,KAAK,EAAE,EAAE;QACxB,qBAAqB;IACvB,CAAC;IACD,SAAS,EAAE,CAAC,GAAG,KAAK,EAAE,EAAE;QACtB,qBAAqB;IACvB,CAAC;IACD,WAAW,EAAE,CAAC,GAAG,KAAK,EAAE,EAAE;QACxB,qBAAqB;IACvB,CAAC;CACF,CAAC,CAAC;AAEH,MAAM,YAAY,GAAG,GAAG,EAAE,CAAC,IAAI,CAAC;AAQhC,MAAM,OAAgB,aAAc,SAAQ,iBAAiB;IAK3D,YACE,eAAoC,EACpC,OAAiB;QAEjB,KAAK,EAAE,CAAC;QARV;;;;;WAAoC;QAEpC;;;;;WAAiB;QAkDjB,mBAAmB;QAEnB;;;;;WAA2C;QA7CzC,IAAI,CAAC,eAAe,GAAG,eAAe,IAAI,kBAAkB,EAAE,CAAC;QAC/D,IAAI,CAAC,OAAO,GAAG,OAAO,IAAI,YAAY,EAAE,CAAC;IAC3C,CAAC;IAED,KAAK,CAAC,QAAQ,CACZ,QAA6B,EAC7B,IAAe;QAEf,MAAM,WAAW,GAAuB,EAAE,CAAC;QAC3C,KAAK,MAAM,OAAO,IAAI,QAAQ,EAAE;YAC9B,MAAM,MAAM,GAAG,MAAM,IAAI,CAAC,SAAS,CAAC,OAAO,EAAE,IAAI,CAAC,CAAC;YACnD,WAAW,CAAC,IAAI,CAAC,MAAM,CAAC,WAAW,CAAC,CAAC;SACtC;QACD,OAAO;YACL,WAAW;SACZ,CAAC;IACJ,CAAC;IAED;;OAEG;IACH,8DAA8D;IAC9D,kBAAkB;QAChB,OAAO,EAAE,CAAC;IACZ,CAAC;IAED,UAAU;QACR,OAAO,iBAA0B,CAAC;IACpC,CAAC;IAID;;OAEG;IACH,SAAS;QACP,OAAO;YACL,GAAG,IAAI,CAAC,kBAAkB,EAAE;YAC5B,KAAK,EAAE,IAAI,CAAC,QAAQ,EAAE;YACtB,MAAM,EAAE,IAAI,CAAC,UAAU,EAAE;SAC1B,CAAC;IACJ,CAAC;IAMD,YAAY,CAAC,IAAY;QACvB,sCAAsC;QACtC,sCAAsC;QACtC,mEAAmE;QACnE,IAAI,IAAI,CAAC,UAAU,KAAK,SAAS,EAAE;YACjC,MAAM,WAAW,GAAG,aAAa,CAAC,OAAO,CAAC;YAC1C,IAAI,CAAC,UAAU,GAAG,IAAI,WAAW,CAAC,EAAE,IAAI,EAAE,MAAM,EAAE,CAAC,CAAC;SACrD;QACD,OAAO,IAAI,CAAC,UAAU,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,GAAG,CAAC,MAAM,CAAC;IACjD,CAAC;IAED,KAAK,CAAC,cAAc,CAClB,YAA+B,EAC/B,IAAe;QAEf,MAAM,cAAc,GAAwB,YAAY,CAAC,GAAG,CAC1D,CAAC,WAAW,EAAE,EAAE,CAAC,WAAW,CAAC,cAAc,EAAE,CAC9C,CAAC;QACF,OAAO,IAAI,CAAC,QAAQ,CAAC,cAAc,EAAE,IAAI,CAAC,CAAC;IAC7C,CAAC;IAOD,KAAK,CAAC,IAAI,CACR,QAA2B,EAC3B,IAAe;QAEf,MAAM,EAAE,WAAW,EAAE,GAAG,MAAM,IAAI,CAAC,SAAS,CAAC,QAAQ,EAAE,IAAI,CAAC,CAAC;QAC7D,OAAO,WAAW,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC;IAChC,CAAC;IAED,KAAK,CAAC,UAAU,CACd,WAA4B,EAC5B,IAAe;QAEf,MAAM,cAAc,GAAsB,WAAW,CAAC,cAAc,EAAE,CAAC;QACvE,OAAO,IAAI,CAAC,IAAI,CAAC,cAAc,EAAE,IAAI,CAAC,CAAC;IACzC,CAAC;CACF;AAED,MAAM,OAAgB,eAAgB,SAAQ,aAAa;IACzD,YACE,eAAoC,EACpC,OAAiB;QAEjB,KAAK,CAAC,eAAe,EAAE,OAAO,CAAC,CAAC;IAClC,CAAC;IAID,KAAK,CAAC,SAAS,CACb,QAA2B,EAC3B,IAAe;QAEf,MAAM,IAAI,GAAG,MAAM,IAAI,CAAC,KAAK,CAAC,QAAQ,EAAE,IAAI,CAAC,CAAC;QAC9C,MAAM,OAAO,GAAG,IAAI,aAAa,CAAC,IAAI,CAAC,CAAC;QACxC,OAAO;YACL,WAAW,EAAE;gBACX;oBACE,IAAI,EAAE,OAAO,CAAC,IAAI;oBAClB,OAAO;iBACR;aACF;SACF,CAAC;IACJ,CAAC;CACF"}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export { ChatOpenAI } from "./openai.js";
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/chat_models/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,UAAU,EAAE,MAAM,aAAa,CAAC"}
|
|
@@ -0,0 +1,150 @@
|
|
|
1
|
+
import { CreateChatCompletionRequest, ConfigurationParameters } from "openai";
|
|
2
|
+
import { BaseChatModel } from "./base.js";
|
|
3
|
+
import { BaseChatMessage, ChatResult, LLMCallbackManager } from "../schema/index.js";
|
|
4
|
+
interface ModelParams {
|
|
5
|
+
/** Sampling temperature to use, between 0 and 2, defaults to 1 */
|
|
6
|
+
temperature: number;
|
|
7
|
+
/** Total probability mass of tokens to consider at each step, between 0 and 1, defaults to 1 */
|
|
8
|
+
topP: number;
|
|
9
|
+
/** Penalizes repeated tokens according to frequency */
|
|
10
|
+
frequencyPenalty: number;
|
|
11
|
+
/** Penalizes repeated tokens */
|
|
12
|
+
presencePenalty: number;
|
|
13
|
+
/** Number of chat completions to generate for each prompt */
|
|
14
|
+
n: number;
|
|
15
|
+
/** Dictionary used to adjust the probability of specific tokens being generated */
|
|
16
|
+
logitBias?: Record<string, number>;
|
|
17
|
+
/** Whether to stream the results or not */
|
|
18
|
+
streaming: boolean;
|
|
19
|
+
/**
|
|
20
|
+
* Maximum number of tokens to generate in the completion. -1 returns as many
|
|
21
|
+
* tokens as possible given the prompt and the model's maximum context size.
|
|
22
|
+
*/
|
|
23
|
+
maxTokens: number;
|
|
24
|
+
}
|
|
25
|
+
/**
|
|
26
|
+
* Input to OpenAI class.
|
|
27
|
+
* @augments ModelParams
|
|
28
|
+
*/
|
|
29
|
+
interface OpenAIInput extends ModelParams {
|
|
30
|
+
/** Model name to use */
|
|
31
|
+
modelName: string;
|
|
32
|
+
/** Holds any additional parameters that are valid to pass to {@link
|
|
33
|
+
* https://platform.openai.com/docs/api-reference/completions/create |
|
|
34
|
+
* `openai.create`} that are not explicitly specified on this class.
|
|
35
|
+
*/
|
|
36
|
+
modelKwargs?: Kwargs;
|
|
37
|
+
/** Maximum number of retries to make when generating */
|
|
38
|
+
maxRetries: number;
|
|
39
|
+
/** List of stop words to use when generating */
|
|
40
|
+
stop?: string[];
|
|
41
|
+
}
|
|
42
|
+
type Kwargs = Record<string, any>;
|
|
43
|
+
/**
|
|
44
|
+
* Wrapper around OpenAI large language models that use the Chat endpoint.
|
|
45
|
+
*
|
|
46
|
+
* To use you should have the `openai` package installed, with the
|
|
47
|
+
* `OPENAI_API_KEY` environment variable set.
|
|
48
|
+
*
|
|
49
|
+
* @remarks
|
|
50
|
+
* Any parameters that are valid to be passed to {@link
|
|
51
|
+
* https://platform.openai.com/docs/api-reference/chat/create |
|
|
52
|
+
* `openai.createCompletion`} can be passed through {@link modelKwargs}, even
|
|
53
|
+
* if not explicitly available on this class.
|
|
54
|
+
*
|
|
55
|
+
* @augments BaseLLM
|
|
56
|
+
* @augments OpenAIInput
|
|
57
|
+
*/
|
|
58
|
+
export declare class ChatOpenAI extends BaseChatModel implements OpenAIInput {
|
|
59
|
+
temperature: number;
|
|
60
|
+
topP: number;
|
|
61
|
+
frequencyPenalty: number;
|
|
62
|
+
presencePenalty: number;
|
|
63
|
+
n: number;
|
|
64
|
+
logitBias?: Record<string, number>;
|
|
65
|
+
modelName: string;
|
|
66
|
+
modelKwargs?: Kwargs;
|
|
67
|
+
maxRetries: number;
|
|
68
|
+
stop?: string[];
|
|
69
|
+
streaming: boolean;
|
|
70
|
+
maxTokens: number;
|
|
71
|
+
private batchClient;
|
|
72
|
+
private streamingClient;
|
|
73
|
+
private clientConfig;
|
|
74
|
+
constructor(fields?: Partial<OpenAIInput> & {
|
|
75
|
+
callbackManager?: LLMCallbackManager;
|
|
76
|
+
concurrency?: number;
|
|
77
|
+
cache?: boolean;
|
|
78
|
+
verbose?: boolean;
|
|
79
|
+
openAIApiKey?: string;
|
|
80
|
+
}, configuration?: ConfigurationParameters);
|
|
81
|
+
/**
|
|
82
|
+
* Get the parameters used to invoke the model
|
|
83
|
+
*/
|
|
84
|
+
invocationParams(): Omit<CreateChatCompletionRequest, "messages"> & Kwargs;
|
|
85
|
+
_identifyingParams(): {
|
|
86
|
+
apiKey?: string | Promise<string> | ((name: string) => string) | ((name: string) => Promise<string>) | undefined;
|
|
87
|
+
organization?: string | undefined;
|
|
88
|
+
username?: string | undefined;
|
|
89
|
+
password?: string | undefined;
|
|
90
|
+
accessToken?: string | Promise<string> | ((name?: string | undefined, scopes?: string[] | undefined) => string) | ((name?: string | undefined, scopes?: string[] | undefined) => Promise<string>) | undefined;
|
|
91
|
+
basePath?: string | undefined;
|
|
92
|
+
baseOptions?: any;
|
|
93
|
+
formDataCtor?: (new () => any) | undefined;
|
|
94
|
+
stop?: import("openai").CreateChatCompletionRequestStop | undefined;
|
|
95
|
+
model: string;
|
|
96
|
+
temperature?: number | null | undefined;
|
|
97
|
+
top_p?: number | null | undefined;
|
|
98
|
+
n?: number | null | undefined;
|
|
99
|
+
stream?: boolean | null | undefined;
|
|
100
|
+
presence_penalty?: number | null | undefined;
|
|
101
|
+
frequency_penalty?: number | null | undefined;
|
|
102
|
+
logit_bias?: object | null | undefined;
|
|
103
|
+
user?: string | undefined;
|
|
104
|
+
model_name: string;
|
|
105
|
+
};
|
|
106
|
+
/**
|
|
107
|
+
* Get the identifying parameters for the model
|
|
108
|
+
*/
|
|
109
|
+
identifyingParams(): {
|
|
110
|
+
apiKey?: string | Promise<string> | ((name: string) => string) | ((name: string) => Promise<string>) | undefined;
|
|
111
|
+
organization?: string | undefined;
|
|
112
|
+
username?: string | undefined;
|
|
113
|
+
password?: string | undefined;
|
|
114
|
+
accessToken?: string | Promise<string> | ((name?: string | undefined, scopes?: string[] | undefined) => string) | ((name?: string | undefined, scopes?: string[] | undefined) => Promise<string>) | undefined;
|
|
115
|
+
basePath?: string | undefined;
|
|
116
|
+
baseOptions?: any;
|
|
117
|
+
formDataCtor?: (new () => any) | undefined;
|
|
118
|
+
stop?: import("openai").CreateChatCompletionRequestStop | undefined;
|
|
119
|
+
model: string;
|
|
120
|
+
temperature?: number | null | undefined;
|
|
121
|
+
top_p?: number | null | undefined;
|
|
122
|
+
n?: number | null | undefined;
|
|
123
|
+
stream?: boolean | null | undefined;
|
|
124
|
+
presence_penalty?: number | null | undefined;
|
|
125
|
+
frequency_penalty?: number | null | undefined;
|
|
126
|
+
logit_bias?: object | null | undefined;
|
|
127
|
+
user?: string | undefined;
|
|
128
|
+
model_name: string;
|
|
129
|
+
};
|
|
130
|
+
/**
|
|
131
|
+
* Call out to OpenAI's endpoint with k unique prompts
|
|
132
|
+
*
|
|
133
|
+
* @param messages - The messages to pass into the model.
|
|
134
|
+
* @param [stop] - Optional list of stop words to use when generating.
|
|
135
|
+
*
|
|
136
|
+
* @returns The full LLM output.
|
|
137
|
+
*
|
|
138
|
+
* @example
|
|
139
|
+
* ```ts
|
|
140
|
+
* import { OpenAI } from "langchain/llms";
|
|
141
|
+
* const openai = new OpenAI();
|
|
142
|
+
* const response = await openai.generate(["Tell me a joke."]);
|
|
143
|
+
* ```
|
|
144
|
+
*/
|
|
145
|
+
_generate(messages: BaseChatMessage[], stop?: string[]): Promise<ChatResult>;
|
|
146
|
+
/** @ignore */
|
|
147
|
+
completionWithRetry(request: CreateChatCompletionRequest): Promise<import("axios").AxiosResponse<import("openai").CreateChatCompletionResponse, any>>;
|
|
148
|
+
_llmType(): string;
|
|
149
|
+
}
|
|
150
|
+
export {};
|