langchain 0.0.148 → 0.0.150

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (38) hide show
  1. package/dist/chains/question_answering/load.cjs +12 -4
  2. package/dist/chains/question_answering/load.d.ts +2 -0
  3. package/dist/chains/question_answering/load.js +12 -4
  4. package/dist/chains/summarization/load.cjs +8 -4
  5. package/dist/chains/summarization/load.d.ts +2 -0
  6. package/dist/chains/summarization/load.js +8 -4
  7. package/dist/llms/bedrock.cjs +9 -1
  8. package/dist/llms/bedrock.d.ts +3 -0
  9. package/dist/llms/bedrock.js +9 -1
  10. package/dist/llms/replicate.cjs +28 -2
  11. package/dist/llms/replicate.d.ts +3 -0
  12. package/dist/llms/replicate.js +28 -2
  13. package/dist/prompts/prompt.cjs +2 -0
  14. package/dist/prompts/prompt.d.ts +1 -1
  15. package/dist/prompts/prompt.js +2 -0
  16. package/dist/retrievers/self_query/base.cjs +1 -1
  17. package/dist/retrievers/self_query/base.js +2 -2
  18. package/dist/retrievers/self_query/functional.cjs +1 -1
  19. package/dist/retrievers/self_query/functional.js +2 -2
  20. package/dist/retrievers/self_query/utils.cjs +46 -6
  21. package/dist/retrievers/self_query/utils.d.ts +7 -0
  22. package/dist/retrievers/self_query/utils.js +44 -5
  23. package/dist/schema/runnable/base.cjs +911 -0
  24. package/dist/schema/runnable/base.d.ts +300 -0
  25. package/dist/schema/runnable/base.js +897 -0
  26. package/dist/schema/runnable/index.cjs +19 -926
  27. package/dist/schema/runnable/index.d.ts +4 -298
  28. package/dist/schema/runnable/index.js +3 -914
  29. package/dist/schema/runnable/passthrough.cjs +31 -0
  30. package/dist/schema/runnable/passthrough.d.ts +11 -0
  31. package/dist/schema/runnable/passthrough.js +27 -0
  32. package/dist/schema/runnable/router.cjs +74 -0
  33. package/dist/schema/runnable/router.d.ts +29 -0
  34. package/dist/schema/runnable/router.js +70 -0
  35. package/dist/vectorstores/opensearch.cjs +4 -2
  36. package/dist/vectorstores/opensearch.d.ts +4 -1
  37. package/dist/vectorstores/opensearch.js +4 -2
  38. package/package.json +3 -3
@@ -42,9 +42,13 @@ exports.loadQAStuffChain = loadQAStuffChain;
42
42
  * @returns A MapReduceQAChain instance.
43
43
  */
44
44
  function loadQAMapReduceChain(llm, params = {}) {
45
- const { combineMapPrompt = map_reduce_prompts_js_1.COMBINE_QA_PROMPT_SELECTOR.getPrompt(llm), combinePrompt = map_reduce_prompts_js_1.COMBINE_PROMPT_SELECTOR.getPrompt(llm), verbose, returnIntermediateSteps, } = params;
45
+ const { combineMapPrompt = map_reduce_prompts_js_1.COMBINE_QA_PROMPT_SELECTOR.getPrompt(llm), combinePrompt = map_reduce_prompts_js_1.COMBINE_PROMPT_SELECTOR.getPrompt(llm), verbose, combineLLM, returnIntermediateSteps, } = params;
46
46
  const llmChain = new llm_chain_js_1.LLMChain({ prompt: combineMapPrompt, llm, verbose });
47
- const combineLLMChain = new llm_chain_js_1.LLMChain({ prompt: combinePrompt, llm, verbose });
47
+ const combineLLMChain = new llm_chain_js_1.LLMChain({
48
+ prompt: combinePrompt,
49
+ llm: combineLLM ?? llm,
50
+ verbose,
51
+ });
48
52
  const combineDocumentChain = new combine_docs_chain_js_1.StuffDocumentsChain({
49
53
  llmChain: combineLLMChain,
50
54
  documentVariableName: "summaries",
@@ -67,9 +71,13 @@ exports.loadQAMapReduceChain = loadQAMapReduceChain;
67
71
  * @returns A RefineQAChain instance.
68
72
  */
69
73
  function loadQARefineChain(llm, params = {}) {
70
- const { questionPrompt = refine_prompts_js_1.QUESTION_PROMPT_SELECTOR.getPrompt(llm), refinePrompt = refine_prompts_js_1.REFINE_PROMPT_SELECTOR.getPrompt(llm), verbose, } = params;
74
+ const { questionPrompt = refine_prompts_js_1.QUESTION_PROMPT_SELECTOR.getPrompt(llm), refinePrompt = refine_prompts_js_1.REFINE_PROMPT_SELECTOR.getPrompt(llm), refineLLM, verbose, } = params;
71
75
  const llmChain = new llm_chain_js_1.LLMChain({ prompt: questionPrompt, llm, verbose });
72
- const refineLLMChain = new llm_chain_js_1.LLMChain({ prompt: refinePrompt, llm, verbose });
76
+ const refineLLMChain = new llm_chain_js_1.LLMChain({
77
+ prompt: refinePrompt,
78
+ llm: refineLLM ?? llm,
79
+ verbose,
80
+ });
73
81
  const chain = new combine_docs_chain_js_1.RefineDocumentsChain({
74
82
  llmChain,
75
83
  refineLLMChain,
@@ -35,6 +35,7 @@ export interface MapReduceQAChainParams {
35
35
  returnIntermediateSteps?: MapReduceDocumentsChainInput["returnIntermediateSteps"];
36
36
  combineMapPrompt?: BasePromptTemplate;
37
37
  combinePrompt?: BasePromptTemplate;
38
+ combineLLM?: BaseLanguageModel;
38
39
  verbose?: boolean;
39
40
  }
40
41
  /**
@@ -51,6 +52,7 @@ export declare function loadQAMapReduceChain(llm: BaseLanguageModel, params?: Ma
51
52
  export interface RefineQAChainParams {
52
53
  questionPrompt?: BasePromptTemplate;
53
54
  refinePrompt?: BasePromptTemplate;
55
+ refineLLM?: BaseLanguageModel;
54
56
  verbose?: boolean;
55
57
  }
56
58
  /**
@@ -37,9 +37,13 @@ export function loadQAStuffChain(llm, params = {}) {
37
37
  * @returns A MapReduceQAChain instance.
38
38
  */
39
39
  export function loadQAMapReduceChain(llm, params = {}) {
40
- const { combineMapPrompt = COMBINE_QA_PROMPT_SELECTOR.getPrompt(llm), combinePrompt = COMBINE_PROMPT_SELECTOR.getPrompt(llm), verbose, returnIntermediateSteps, } = params;
40
+ const { combineMapPrompt = COMBINE_QA_PROMPT_SELECTOR.getPrompt(llm), combinePrompt = COMBINE_PROMPT_SELECTOR.getPrompt(llm), verbose, combineLLM, returnIntermediateSteps, } = params;
41
41
  const llmChain = new LLMChain({ prompt: combineMapPrompt, llm, verbose });
42
- const combineLLMChain = new LLMChain({ prompt: combinePrompt, llm, verbose });
42
+ const combineLLMChain = new LLMChain({
43
+ prompt: combinePrompt,
44
+ llm: combineLLM ?? llm,
45
+ verbose,
46
+ });
43
47
  const combineDocumentChain = new StuffDocumentsChain({
44
48
  llmChain: combineLLMChain,
45
49
  documentVariableName: "summaries",
@@ -61,9 +65,13 @@ export function loadQAMapReduceChain(llm, params = {}) {
61
65
  * @returns A RefineQAChain instance.
62
66
  */
63
67
  export function loadQARefineChain(llm, params = {}) {
64
- const { questionPrompt = QUESTION_PROMPT_SELECTOR.getPrompt(llm), refinePrompt = REFINE_PROMPT_SELECTOR.getPrompt(llm), verbose, } = params;
68
+ const { questionPrompt = QUESTION_PROMPT_SELECTOR.getPrompt(llm), refinePrompt = REFINE_PROMPT_SELECTOR.getPrompt(llm), refineLLM, verbose, } = params;
65
69
  const llmChain = new LLMChain({ prompt: questionPrompt, llm, verbose });
66
- const refineLLMChain = new LLMChain({ prompt: refinePrompt, llm, verbose });
70
+ const refineLLMChain = new LLMChain({
71
+ prompt: refinePrompt,
72
+ llm: refineLLM ?? llm,
73
+ verbose,
74
+ });
67
75
  const chain = new RefineDocumentsChain({
68
76
  llmChain,
69
77
  refineLLMChain,
@@ -18,11 +18,11 @@ const loadSummarizationChain = (llm, params = { type: "map_reduce" }) => {
18
18
  return chain;
19
19
  }
20
20
  if (params.type === "map_reduce") {
21
- const { combineMapPrompt = stuff_prompts_js_1.DEFAULT_PROMPT, combinePrompt = stuff_prompts_js_1.DEFAULT_PROMPT, returnIntermediateSteps, } = params;
21
+ const { combineMapPrompt = stuff_prompts_js_1.DEFAULT_PROMPT, combinePrompt = stuff_prompts_js_1.DEFAULT_PROMPT, combineLLM, returnIntermediateSteps, } = params;
22
22
  const llmChain = new llm_chain_js_1.LLMChain({ prompt: combineMapPrompt, llm, verbose });
23
23
  const combineLLMChain = new llm_chain_js_1.LLMChain({
24
24
  prompt: combinePrompt,
25
- llm,
25
+ llm: combineLLM ?? llm,
26
26
  verbose,
27
27
  });
28
28
  const combineDocumentChain = new combine_docs_chain_js_1.StuffDocumentsChain({
@@ -40,9 +40,13 @@ const loadSummarizationChain = (llm, params = { type: "map_reduce" }) => {
40
40
  return chain;
41
41
  }
42
42
  if (params.type === "refine") {
43
- const { refinePrompt = refine_prompts_js_1.REFINE_PROMPT, questionPrompt = stuff_prompts_js_1.DEFAULT_PROMPT } = params;
43
+ const { refinePrompt = refine_prompts_js_1.REFINE_PROMPT, refineLLM, questionPrompt = stuff_prompts_js_1.DEFAULT_PROMPT, } = params;
44
44
  const llmChain = new llm_chain_js_1.LLMChain({ prompt: questionPrompt, llm, verbose });
45
- const refineLLMChain = new llm_chain_js_1.LLMChain({ prompt: refinePrompt, llm, verbose });
45
+ const refineLLMChain = new llm_chain_js_1.LLMChain({
46
+ prompt: refinePrompt,
47
+ llm: refineLLM ?? llm,
48
+ verbose,
49
+ });
46
50
  const chain = new combine_docs_chain_js_1.RefineDocumentsChain({
47
51
  llmChain,
48
52
  refineLLMChain,
@@ -16,9 +16,11 @@ export type SummarizationChainParams = BaseParams & ({
16
16
  type?: "map_reduce";
17
17
  combineMapPrompt?: BasePromptTemplate;
18
18
  combinePrompt?: BasePromptTemplate;
19
+ combineLLM?: BaseLanguageModel;
19
20
  } & Pick<MapReduceDocumentsChainInput, "returnIntermediateSteps">) | {
20
21
  type?: "refine";
21
22
  refinePrompt?: BasePromptTemplate;
23
+ refineLLM?: BaseLanguageModel;
22
24
  questionPrompt?: BasePromptTemplate;
23
25
  });
24
26
  export declare const loadSummarizationChain: (llm: BaseLanguageModel, params?: SummarizationChainParams) => StuffDocumentsChain | MapReduceDocumentsChain | RefineDocumentsChain;
@@ -15,11 +15,11 @@ export const loadSummarizationChain = (llm, params = { type: "map_reduce" }) =>
15
15
  return chain;
16
16
  }
17
17
  if (params.type === "map_reduce") {
18
- const { combineMapPrompt = DEFAULT_PROMPT, combinePrompt = DEFAULT_PROMPT, returnIntermediateSteps, } = params;
18
+ const { combineMapPrompt = DEFAULT_PROMPT, combinePrompt = DEFAULT_PROMPT, combineLLM, returnIntermediateSteps, } = params;
19
19
  const llmChain = new LLMChain({ prompt: combineMapPrompt, llm, verbose });
20
20
  const combineLLMChain = new LLMChain({
21
21
  prompt: combinePrompt,
22
- llm,
22
+ llm: combineLLM ?? llm,
23
23
  verbose,
24
24
  });
25
25
  const combineDocumentChain = new StuffDocumentsChain({
@@ -37,9 +37,13 @@ export const loadSummarizationChain = (llm, params = { type: "map_reduce" }) =>
37
37
  return chain;
38
38
  }
39
39
  if (params.type === "refine") {
40
- const { refinePrompt = REFINE_PROMPT, questionPrompt = DEFAULT_PROMPT } = params;
40
+ const { refinePrompt = REFINE_PROMPT, refineLLM, questionPrompt = DEFAULT_PROMPT, } = params;
41
41
  const llmChain = new LLMChain({ prompt: questionPrompt, llm, verbose });
42
- const refineLLMChain = new LLMChain({ prompt: refinePrompt, llm, verbose });
42
+ const refineLLMChain = new LLMChain({
43
+ prompt: refinePrompt,
44
+ llm: refineLLM ?? llm,
45
+ verbose,
46
+ });
43
47
  const chain = new RefineDocumentsChain({
44
48
  llmChain,
45
49
  refineLLMChain,
@@ -112,6 +112,12 @@ class Bedrock extends base_js_1.LLM {
112
112
  writable: true,
113
113
  value: void 0
114
114
  });
115
+ Object.defineProperty(this, "endpointUrl", {
116
+ enumerable: true,
117
+ configurable: true,
118
+ writable: true,
119
+ value: void 0
120
+ });
115
121
  Object.defineProperty(this, "codec", {
116
122
  enumerable: true,
117
123
  configurable: true,
@@ -132,6 +138,7 @@ class Bedrock extends base_js_1.LLM {
132
138
  this.temperature = fields?.temperature ?? this.temperature;
133
139
  this.maxTokens = fields?.maxTokens ?? this.maxTokens;
134
140
  this.fetchFn = fields?.fetchFn ?? fetch;
141
+ this.endpointUrl = fields?.endpointUrl;
135
142
  }
136
143
  /** Call out to Bedrock service model.
137
144
  Arguments:
@@ -154,7 +161,8 @@ class Bedrock extends base_js_1.LLM {
154
161
  const provider = this.model.split(".")[0];
155
162
  const service = "bedrock";
156
163
  const inputBody = BedrockLLMInputOutputAdapter.prepareInput(provider, prompt, this.maxTokens, this.temperature);
157
- const url = new URL(`https://${service}.${this.region}.amazonaws.com/model/${this.model}/invoke-with-response-stream`);
164
+ const endpointUrl = this.endpointUrl ?? `${service}.${this.region}.amazonaws.com`;
165
+ const url = new URL(`https://${endpointUrl}/model/${this.model}/invoke-with-response-stream`);
158
166
  const request = new protocol_http_1.HttpRequest({
159
167
  hostname: url.hostname,
160
168
  path: url.pathname,
@@ -29,6 +29,8 @@ export interface BedrockInput {
29
29
  maxTokens?: number;
30
30
  /** A custom fetch function for low-level access to AWS API. Defaults to fetch() */
31
31
  fetchFn?: typeof fetch;
32
+ /** Override the default endpoint url */
33
+ endpointUrl?: string;
32
34
  }
33
35
  /**
34
36
  * A type of Large Language Model (LLM) that interacts with the Bedrock
@@ -46,6 +48,7 @@ export declare class Bedrock extends LLM implements BedrockInput {
46
48
  temperature?: number | undefined;
47
49
  maxTokens?: number | undefined;
48
50
  fetchFn: typeof fetch;
51
+ endpointUrl?: string;
49
52
  codec: EventStreamCodec;
50
53
  get lc_secrets(): {
51
54
  [key: string]: string;
@@ -109,6 +109,12 @@ export class Bedrock extends LLM {
109
109
  writable: true,
110
110
  value: void 0
111
111
  });
112
+ Object.defineProperty(this, "endpointUrl", {
113
+ enumerable: true,
114
+ configurable: true,
115
+ writable: true,
116
+ value: void 0
117
+ });
112
118
  Object.defineProperty(this, "codec", {
113
119
  enumerable: true,
114
120
  configurable: true,
@@ -129,6 +135,7 @@ export class Bedrock extends LLM {
129
135
  this.temperature = fields?.temperature ?? this.temperature;
130
136
  this.maxTokens = fields?.maxTokens ?? this.maxTokens;
131
137
  this.fetchFn = fields?.fetchFn ?? fetch;
138
+ this.endpointUrl = fields?.endpointUrl;
132
139
  }
133
140
  /** Call out to Bedrock service model.
134
141
  Arguments:
@@ -151,7 +158,8 @@ export class Bedrock extends LLM {
151
158
  const provider = this.model.split(".")[0];
152
159
  const service = "bedrock";
153
160
  const inputBody = BedrockLLMInputOutputAdapter.prepareInput(provider, prompt, this.maxTokens, this.temperature);
154
- const url = new URL(`https://${service}.${this.region}.amazonaws.com/model/${this.model}/invoke-with-response-stream`);
161
+ const endpointUrl = this.endpointUrl ?? `${service}.${this.region}.amazonaws.com`;
162
+ const url = new URL(`https://${endpointUrl}/model/${this.model}/invoke-with-response-stream`);
155
163
  const request = new HttpRequest({
156
164
  hostname: url.hostname,
157
165
  path: url.pathname,
@@ -44,6 +44,12 @@ class Replicate extends base_js_1.LLM {
44
44
  writable: true,
45
45
  value: void 0
46
46
  });
47
+ Object.defineProperty(this, "promptKey", {
48
+ enumerable: true,
49
+ configurable: true,
50
+ writable: true,
51
+ value: void 0
52
+ });
47
53
  const apiKey = fields?.apiKey ??
48
54
  (0, env_js_1.getEnvironmentVariable)("REPLICATE_API_KEY") ?? // previous environment variable for backwards compatibility
49
55
  (0, env_js_1.getEnvironmentVariable)("REPLICATE_API_TOKEN"); // current environment variable, matching the Python library
@@ -53,6 +59,7 @@ class Replicate extends base_js_1.LLM {
53
59
  this.apiKey = apiKey;
54
60
  this.model = fields.model;
55
61
  this.input = fields.input ?? {};
62
+ this.promptKey = fields.promptKey;
56
63
  }
57
64
  _llmType() {
58
65
  return "replicate";
@@ -64,11 +71,30 @@ class Replicate extends base_js_1.LLM {
64
71
  userAgent: "langchain",
65
72
  auth: this.apiKey,
66
73
  });
74
+ if (this.promptKey === undefined) {
75
+ const [modelString, versionString] = this.model.split(":");
76
+ const version = await replicate.models.versions.get(modelString.split("/")[0], modelString.split("/")[1], versionString);
77
+ const openapiSchema = version.openapi_schema;
78
+ const inputProperties =
79
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
80
+ openapiSchema?.components?.schemas?.Input?.properties;
81
+ if (inputProperties === undefined) {
82
+ this.promptKey = "prompt";
83
+ }
84
+ else {
85
+ const sortedInputProperties = Object.entries(inputProperties).sort(([_keyA, valueA], [_keyB, valueB]) => {
86
+ const orderA = valueA["x-order"] || 0;
87
+ const orderB = valueB["x-order"] || 0;
88
+ return orderA - orderB;
89
+ });
90
+ this.promptKey = sortedInputProperties[0][0] ?? "prompt";
91
+ }
92
+ }
67
93
  const output = await this.caller.callWithOptions({ signal: options.signal }, () => replicate.run(this.model, {
68
- wait: true,
69
94
  input: {
95
+ // eslint-disable-next-line @typescript-eslint/no-non-null-assertion
96
+ [this.promptKey]: prompt,
70
97
  ...this.input,
71
- prompt,
72
98
  },
73
99
  }));
74
100
  if (typeof output === "string") {
@@ -10,6 +10,8 @@ export interface ReplicateInput {
10
10
  [key: string]: string | number | boolean;
11
11
  };
12
12
  apiKey?: string;
13
+ /** The key used to pass prompts to the model. */
14
+ promptKey?: string;
13
15
  }
14
16
  /**
15
17
  * Class responsible for managing the interaction with the Replicate API.
@@ -26,6 +28,7 @@ export declare class Replicate extends LLM implements ReplicateInput {
26
28
  model: ReplicateInput["model"];
27
29
  input: ReplicateInput["input"];
28
30
  apiKey: string;
31
+ promptKey?: string;
29
32
  constructor(fields: ReplicateInput & BaseLLMParams);
30
33
  _llmType(): string;
31
34
  /** @ignore */
@@ -41,6 +41,12 @@ export class Replicate extends LLM {
41
41
  writable: true,
42
42
  value: void 0
43
43
  });
44
+ Object.defineProperty(this, "promptKey", {
45
+ enumerable: true,
46
+ configurable: true,
47
+ writable: true,
48
+ value: void 0
49
+ });
44
50
  const apiKey = fields?.apiKey ??
45
51
  getEnvironmentVariable("REPLICATE_API_KEY") ?? // previous environment variable for backwards compatibility
46
52
  getEnvironmentVariable("REPLICATE_API_TOKEN"); // current environment variable, matching the Python library
@@ -50,6 +56,7 @@ export class Replicate extends LLM {
50
56
  this.apiKey = apiKey;
51
57
  this.model = fields.model;
52
58
  this.input = fields.input ?? {};
59
+ this.promptKey = fields.promptKey;
53
60
  }
54
61
  _llmType() {
55
62
  return "replicate";
@@ -61,11 +68,30 @@ export class Replicate extends LLM {
61
68
  userAgent: "langchain",
62
69
  auth: this.apiKey,
63
70
  });
71
+ if (this.promptKey === undefined) {
72
+ const [modelString, versionString] = this.model.split(":");
73
+ const version = await replicate.models.versions.get(modelString.split("/")[0], modelString.split("/")[1], versionString);
74
+ const openapiSchema = version.openapi_schema;
75
+ const inputProperties =
76
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
77
+ openapiSchema?.components?.schemas?.Input?.properties;
78
+ if (inputProperties === undefined) {
79
+ this.promptKey = "prompt";
80
+ }
81
+ else {
82
+ const sortedInputProperties = Object.entries(inputProperties).sort(([_keyA, valueA], [_keyB, valueB]) => {
83
+ const orderA = valueA["x-order"] || 0;
84
+ const orderB = valueB["x-order"] || 0;
85
+ return orderA - orderB;
86
+ });
87
+ this.promptKey = sortedInputProperties[0][0] ?? "prompt";
88
+ }
89
+ }
64
90
  const output = await this.caller.callWithOptions({ signal: options.signal }, () => replicate.run(this.model, {
65
- wait: true,
66
91
  input: {
92
+ // eslint-disable-next-line @typescript-eslint/no-non-null-assertion
93
+ [this.promptKey]: prompt,
67
94
  ...this.input,
68
- prompt,
69
95
  },
70
96
  }));
71
97
  if (typeof output === "string") {
@@ -99,6 +99,8 @@ class PromptTemplate extends base_js_1.BaseStringPromptTemplate {
99
99
  }
100
100
  });
101
101
  return new PromptTemplate({
102
+ // Rely on extracted types
103
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
102
104
  inputVariables: [...names],
103
105
  templateFormat,
104
106
  template,
@@ -32,7 +32,7 @@ type NonAlphanumeric = " " | "\t" | "\n" | "\r" | '"' | "'" | "{" | "[" | "(" |
32
32
  */
33
33
  type ExtractTemplateParamsRecursive<T extends string, Result extends string[] = []> = T extends `${string}{${infer Param}}${infer Rest}` ? Param extends `${NonAlphanumeric}${string}` ? ExtractTemplateParamsRecursive<Rest, Result> : ExtractTemplateParamsRecursive<Rest, [...Result, Param]> : Result;
34
34
  export type ParamsFromFString<T extends string> = {
35
- [Key in ExtractTemplateParamsRecursive<T>[number]]: string;
35
+ [Key in ExtractTemplateParamsRecursive<T>[number] | (string & Record<never, never>)]: string;
36
36
  };
37
37
  /**
38
38
  * Schema to represent a basic prompt for an LLM.
@@ -96,6 +96,8 @@ export class PromptTemplate extends BaseStringPromptTemplate {
96
96
  }
97
97
  });
98
98
  return new PromptTemplate({
99
+ // Rely on extracted types
100
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
99
101
  inputVariables: [...names],
100
102
  templateFormat,
101
103
  template,
@@ -84,7 +84,7 @@ class BasicTranslator extends BaseTranslator {
84
84
  visitComparison(comparison) {
85
85
  return {
86
86
  [comparison.attribute]: {
87
- [this.formatFunction(comparison.comparator)]: comparison.value,
87
+ [this.formatFunction(comparison.comparator)]: (0, utils_js_1.castValue)(comparison.value),
88
88
  },
89
89
  };
90
90
  }
@@ -1,5 +1,5 @@
1
1
  import { Comparators, Operators, Visitor, } from "../../chains/query_constructor/ir.js";
2
- import { isFilterEmpty } from "./utils.js";
2
+ import { isFilterEmpty, castValue } from "./utils.js";
3
3
  /**
4
4
  * Abstract class that provides a blueprint for creating specific
5
5
  * translator classes. Defines two abstract methods: formatFunction and
@@ -80,7 +80,7 @@ export class BasicTranslator extends BaseTranslator {
80
80
  visitComparison(comparison) {
81
81
  return {
82
82
  [comparison.attribute]: {
83
- [this.formatFunction(comparison.comparator)]: comparison.value,
83
+ [this.formatFunction(comparison.comparator)]: castValue(comparison.value),
84
84
  },
85
85
  };
86
86
  }
@@ -132,7 +132,7 @@ class FunctionalTranslator extends base_js_1.BaseTranslator {
132
132
  }
133
133
  return false;
134
134
  }
135
- return comparatorFunction(documentValue, value);
135
+ return comparatorFunction(documentValue, (0, utils_js_1.castValue)(value));
136
136
  };
137
137
  }
138
138
  else {
@@ -1,6 +1,6 @@
1
1
  import { Comparators, Operators, } from "../../chains/query_constructor/ir.js";
2
2
  import { BaseTranslator } from "./base.js";
3
- import { isFilterEmpty } from "./utils.js";
3
+ import { castValue, isFilterEmpty } from "./utils.js";
4
4
  /**
5
5
  * A class that extends `BaseTranslator` to translate structured queries
6
6
  * into functional filters.
@@ -129,7 +129,7 @@ export class FunctionalTranslator extends BaseTranslator {
129
129
  }
130
130
  return false;
131
131
  }
132
- return comparatorFunction(documentValue, value);
132
+ return comparatorFunction(documentValue, castValue(value));
133
133
  };
134
134
  }
135
135
  else {
@@ -1,6 +1,6 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.isString = exports.isFloat = exports.isInt = exports.isFilterEmpty = exports.isObject = void 0;
3
+ exports.castValue = exports.isString = exports.isFloat = exports.isInt = exports.isFilterEmpty = exports.isObject = void 0;
4
4
  /**
5
5
  * Checks if the provided argument is an object and not an array.
6
6
  */
@@ -32,16 +32,32 @@ exports.isFilterEmpty = isFilterEmpty;
32
32
  * Checks if the provided value is an integer.
33
33
  */
34
34
  function isInt(value) {
35
- const numberValue = parseFloat(value);
36
- return !Number.isNaN(numberValue) && numberValue % 1 === 0;
35
+ if (typeof value === "number") {
36
+ return value % 1 === 0;
37
+ }
38
+ else if (typeof value === "string") {
39
+ const numberValue = parseInt(value, 10);
40
+ return (!Number.isNaN(numberValue) &&
41
+ numberValue % 1 === 0 &&
42
+ numberValue.toString() === value);
43
+ }
44
+ return false;
37
45
  }
38
46
  exports.isInt = isInt;
39
47
  /**
40
48
  * Checks if the provided value is a floating-point number.
41
49
  */
42
50
  function isFloat(value) {
43
- const numberValue = parseFloat(value);
44
- return !Number.isNaN(numberValue) && numberValue % 1 !== 0;
51
+ if (typeof value === "number") {
52
+ return value % 1 !== 0;
53
+ }
54
+ else if (typeof value === "string") {
55
+ const numberValue = parseFloat(value);
56
+ return (!Number.isNaN(numberValue) &&
57
+ numberValue % 1 !== 0 &&
58
+ numberValue.toString() === value);
59
+ }
60
+ return false;
45
61
  }
46
62
  exports.isFloat = isFloat;
47
63
  /**
@@ -49,6 +65,30 @@ exports.isFloat = isFloat;
49
65
  * number.
50
66
  */
51
67
  function isString(value) {
52
- return typeof value === "string" && Number.isNaN(parseFloat(value));
68
+ return (typeof value === "string" &&
69
+ (Number.isNaN(parseFloat(value)) || parseFloat(value).toString() !== value));
53
70
  }
54
71
  exports.isString = isString;
72
+ /**
73
+ * Casts a value that might be string or number to actual string or number.
74
+ * Since LLM might return back an integer/float as a string, we need to cast
75
+ * it back to a number, as many vector databases can't handle number as string
76
+ * values as a comparator.
77
+ */
78
+ function castValue(input) {
79
+ let value;
80
+ if (isString(input)) {
81
+ value = input;
82
+ }
83
+ else if (isInt(input)) {
84
+ value = parseInt(input, 10);
85
+ }
86
+ else if (isFloat(input)) {
87
+ value = parseFloat(input);
88
+ }
89
+ else {
90
+ throw new Error("Unsupported value type");
91
+ }
92
+ return value;
93
+ }
94
+ exports.castValue = castValue;
@@ -20,3 +20,10 @@ export declare function isFloat(value: unknown): boolean;
20
20
  * number.
21
21
  */
22
22
  export declare function isString(value: unknown): boolean;
23
+ /**
24
+ * Casts a value that might be string or number to actual string or number.
25
+ * Since LLM might return back an integer/float as a string, we need to cast
26
+ * it back to a number, as many vector databases can't handle number as string
27
+ * values as a comparator.
28
+ */
29
+ export declare function castValue(input: unknown): string | number;
@@ -27,20 +27,59 @@ filter) {
27
27
  * Checks if the provided value is an integer.
28
28
  */
29
29
  export function isInt(value) {
30
- const numberValue = parseFloat(value);
31
- return !Number.isNaN(numberValue) && numberValue % 1 === 0;
30
+ if (typeof value === "number") {
31
+ return value % 1 === 0;
32
+ }
33
+ else if (typeof value === "string") {
34
+ const numberValue = parseInt(value, 10);
35
+ return (!Number.isNaN(numberValue) &&
36
+ numberValue % 1 === 0 &&
37
+ numberValue.toString() === value);
38
+ }
39
+ return false;
32
40
  }
33
41
  /**
34
42
  * Checks if the provided value is a floating-point number.
35
43
  */
36
44
  export function isFloat(value) {
37
- const numberValue = parseFloat(value);
38
- return !Number.isNaN(numberValue) && numberValue % 1 !== 0;
45
+ if (typeof value === "number") {
46
+ return value % 1 !== 0;
47
+ }
48
+ else if (typeof value === "string") {
49
+ const numberValue = parseFloat(value);
50
+ return (!Number.isNaN(numberValue) &&
51
+ numberValue % 1 !== 0 &&
52
+ numberValue.toString() === value);
53
+ }
54
+ return false;
39
55
  }
40
56
  /**
41
57
  * Checks if the provided value is a string that cannot be parsed into a
42
58
  * number.
43
59
  */
44
60
  export function isString(value) {
45
- return typeof value === "string" && Number.isNaN(parseFloat(value));
61
+ return (typeof value === "string" &&
62
+ (Number.isNaN(parseFloat(value)) || parseFloat(value).toString() !== value));
63
+ }
64
+ /**
65
+ * Casts a value that might be string or number to actual string or number.
66
+ * Since LLM might return back an integer/float as a string, we need to cast
67
+ * it back to a number, as many vector databases can't handle number as string
68
+ * values as a comparator.
69
+ */
70
+ export function castValue(input) {
71
+ let value;
72
+ if (isString(input)) {
73
+ value = input;
74
+ }
75
+ else if (isInt(input)) {
76
+ value = parseInt(input, 10);
77
+ }
78
+ else if (isFloat(input)) {
79
+ value = parseFloat(input);
80
+ }
81
+ else {
82
+ throw new Error("Unsupported value type");
83
+ }
84
+ return value;
46
85
  }