@digipair/skill-llm 0.114.1 → 0.114.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.cjs.js +225 -8
- package/dist/index.esm2.js +5 -5
- package/package.json +6 -2
- package/dist/api_chain.cjs.js +0 -481
- package/dist/few_shot.cjs.js +0 -883
- package/dist/index.cjs2.js +0 -91576
- package/dist/sequential_chain.cjs.js +0 -1092
- package/dist/vector_db_qa.cjs.js +0 -860
package/dist/index.cjs.js
CHANGED
|
@@ -1,12 +1,229 @@
|
|
|
1
1
|
'use strict';
|
|
2
2
|
|
|
3
|
-
var
|
|
4
|
-
require('@
|
|
3
|
+
var engine = require('@digipair/engine');
|
|
4
|
+
var messages = require('@langchain/core/messages');
|
|
5
|
+
var prompts = require('@langchain/core/prompts');
|
|
6
|
+
var runnables = require('@langchain/core/runnables');
|
|
7
|
+
var chains = require('langchain/chains');
|
|
8
|
+
var output_parsers = require('langchain/output_parsers');
|
|
9
|
+
var text_splitter = require('langchain/text_splitter');
|
|
10
|
+
var zod = require('zod');
|
|
5
11
|
|
|
12
|
+
let LLMService = class LLMService {
|
|
13
|
+
objectToInput(obj) {
|
|
14
|
+
const result = {};
|
|
15
|
+
for(const key in obj){
|
|
16
|
+
if (Object.prototype.hasOwnProperty.call(obj, key)) {
|
|
17
|
+
result[key] = ()=>obj[key];
|
|
18
|
+
}
|
|
19
|
+
}
|
|
20
|
+
return result;
|
|
21
|
+
}
|
|
22
|
+
jsonSchemaToZod(schema) {
|
|
23
|
+
const zodProps = {};
|
|
24
|
+
switch(schema.type){
|
|
25
|
+
case 'string':
|
|
26
|
+
return zod.z.string().optional();
|
|
27
|
+
case 'number':
|
|
28
|
+
return zod.z.number().optional();
|
|
29
|
+
case 'boolean':
|
|
30
|
+
return zod.z.boolean().optional();
|
|
31
|
+
case 'object':
|
|
32
|
+
for(const prop in schema.properties){
|
|
33
|
+
zodProps[prop] = this.jsonSchemaToZod(schema.properties[prop]);
|
|
34
|
+
if (schema.properties[prop].description) {
|
|
35
|
+
zodProps[prop] = zodProps[prop].describe(schema.properties[prop].description);
|
|
36
|
+
}
|
|
37
|
+
}
|
|
38
|
+
return zod.z.object(zodProps).required((schema.required ?? []).reduce((acc, reqProp)=>({
|
|
39
|
+
...acc,
|
|
40
|
+
[reqProp]: true
|
|
41
|
+
}), {})).optional();
|
|
42
|
+
case 'array':
|
|
43
|
+
if (schema.items) {
|
|
44
|
+
return zod.z.array(this.jsonSchemaToZod(schema.items)).optional();
|
|
45
|
+
}
|
|
46
|
+
return zod.z.array(zod.z.unknown()).optional();
|
|
47
|
+
default:
|
|
48
|
+
throw new Error(`Unsupported JSON Schema type: ${schema.type}`);
|
|
49
|
+
}
|
|
50
|
+
}
|
|
51
|
+
async invoke(params, _pinsSettingsList, context) {
|
|
52
|
+
const { execute, input = {} } = params;
|
|
53
|
+
const chain = runnables.RunnableSequence.from([
|
|
54
|
+
this.objectToInput(input),
|
|
55
|
+
...await Promise.all(execute.map((pinsSettings, i)=>engine.executePinsList([
|
|
56
|
+
pinsSettings
|
|
57
|
+
], context, `${context.__PATH__}.execute[${i}]`)))
|
|
58
|
+
]);
|
|
59
|
+
let model;
|
|
60
|
+
let service;
|
|
61
|
+
const result = await chain.invoke({}, {
|
|
62
|
+
callbacks: [
|
|
63
|
+
{
|
|
64
|
+
handleChatModelStart: async ({ id }, _1, _2, _3, extrasParams)=>{
|
|
65
|
+
model = (extrasParams?.['invocation_params']).model;
|
|
66
|
+
service = id[2];
|
|
67
|
+
},
|
|
68
|
+
handleLLMStart: async ({ id }, _1, _2, _3, extrasParams)=>{
|
|
69
|
+
model = (extrasParams?.['invocation_params']).model;
|
|
70
|
+
service = id[2];
|
|
71
|
+
},
|
|
72
|
+
handleLLMEnd: async (infos)=>{
|
|
73
|
+
const { completionTokens, promptTokens } = infos.llmOutput?.['tokenUsage'] || {
|
|
74
|
+
completionTokens: 0,
|
|
75
|
+
promptTokens: 0
|
|
76
|
+
};
|
|
77
|
+
const skillLogger = require('@digipair/skill-logger');
|
|
78
|
+
await skillLogger.addConsumption(context, service, model, promptTokens, completionTokens);
|
|
79
|
+
}
|
|
80
|
+
}
|
|
81
|
+
]
|
|
82
|
+
});
|
|
83
|
+
return result;
|
|
84
|
+
}
|
|
85
|
+
async reasoningStep(params, _pinsSettingsList, context) {
|
|
86
|
+
const { attributes } = params;
|
|
87
|
+
const data = {};
|
|
88
|
+
let i = 0;
|
|
89
|
+
for (const attribute of attributes){
|
|
90
|
+
data[attribute.name] = async (previous)=>await engine.executePinsList(attribute.value, {
|
|
91
|
+
...context,
|
|
92
|
+
previous,
|
|
93
|
+
parent: {
|
|
94
|
+
previous: context.previous,
|
|
95
|
+
parent: context.parent
|
|
96
|
+
}
|
|
97
|
+
}, `${context.__PATH__}.attributes[${i}]`);
|
|
98
|
+
i++;
|
|
99
|
+
}
|
|
100
|
+
return data;
|
|
101
|
+
}
|
|
102
|
+
async basic(params, _pins, context) {
|
|
103
|
+
const { model, prompt, schema } = params;
|
|
104
|
+
let chain;
|
|
105
|
+
if (!schema) {
|
|
106
|
+
const modelInstance = await engine.executePinsList(model ?? context.privates.MODEL_LLM, context, `${context.__PATH__}.model`);
|
|
107
|
+
chain = runnables.RunnableSequence.from([
|
|
108
|
+
prompts.PromptTemplate.fromTemplate(prompt ?? '{prompt}'),
|
|
109
|
+
modelInstance
|
|
110
|
+
]);
|
|
111
|
+
} else {
|
|
112
|
+
const modelInstance = await engine.executePinsList(model ?? context.privates.MODEL_LLM_JSON ?? context.privates.MODEL_LLM, context, `${context.__PATH__}.model`);
|
|
113
|
+
const parser = new output_parsers.StructuredOutputParser(this.jsonSchemaToZod(schema));
|
|
114
|
+
chain = runnables.RunnableSequence.from([
|
|
115
|
+
prompts.PromptTemplate.fromTemplate(`${prompt ?? '{prompt}'}
|
|
116
|
+
|
|
117
|
+
Answer the users question as best as possible.
|
|
118
|
+
{format_instructions}
|
|
119
|
+
|
|
120
|
+
JSON:`, {
|
|
121
|
+
partialVariables: {
|
|
122
|
+
format_instructions: parser.getFormatInstructions()
|
|
123
|
+
}
|
|
124
|
+
}),
|
|
125
|
+
modelInstance,
|
|
126
|
+
parser
|
|
127
|
+
]);
|
|
128
|
+
}
|
|
129
|
+
return chain;
|
|
130
|
+
}
|
|
131
|
+
async vision(params, _pins, context) {
|
|
132
|
+
const { model, prompt, schema, image } = params;
|
|
133
|
+
let chain;
|
|
134
|
+
if (!schema) {
|
|
135
|
+
const modelInstance = await engine.executePinsList(model ?? context.privates.MODEL_VISION ?? context.privates.MODEL_LLM, context, `${context.__PATH__}.model`);
|
|
136
|
+
chain = runnables.RunnableSequence.from([
|
|
137
|
+
prompts.PromptTemplate.fromTemplate(prompt ?? '{prompt}'),
|
|
138
|
+
(text)=>[
|
|
139
|
+
new messages.HumanMessage({
|
|
140
|
+
content: [
|
|
141
|
+
{
|
|
142
|
+
type: 'text',
|
|
143
|
+
text: text.value
|
|
144
|
+
},
|
|
145
|
+
{
|
|
146
|
+
type: 'image_url',
|
|
147
|
+
image_url: {
|
|
148
|
+
url: image
|
|
149
|
+
}
|
|
150
|
+
}
|
|
151
|
+
]
|
|
152
|
+
})
|
|
153
|
+
],
|
|
154
|
+
modelInstance
|
|
155
|
+
]);
|
|
156
|
+
} else {
|
|
157
|
+
const modelInstance = await engine.executePinsList(model ?? context.privates.MODEL_VISION_JSON ?? context.privates.MODEL_VISION ?? context.privates.MODEL_LLM_JSON ?? context.privates.MODEL_LLM, context, `${context.__PATH__}.model`);
|
|
158
|
+
const parser = new output_parsers.StructuredOutputParser(this.jsonSchemaToZod(schema));
|
|
159
|
+
chain = runnables.RunnableSequence.from([
|
|
160
|
+
prompts.PromptTemplate.fromTemplate(`${prompt ?? '{prompt}'}
|
|
161
|
+
|
|
162
|
+
Answer the users question as best as possible.
|
|
163
|
+
{format_instructions}
|
|
164
|
+
|
|
165
|
+
JSON:`, {
|
|
166
|
+
partialVariables: {
|
|
167
|
+
format_instructions: parser.getFormatInstructions()
|
|
168
|
+
}
|
|
169
|
+
}),
|
|
170
|
+
(text)=>[
|
|
171
|
+
new messages.HumanMessage({
|
|
172
|
+
content: [
|
|
173
|
+
{
|
|
174
|
+
type: 'text',
|
|
175
|
+
text: text.value
|
|
176
|
+
},
|
|
177
|
+
{
|
|
178
|
+
type: 'image_url',
|
|
179
|
+
image_url: {
|
|
180
|
+
url: image
|
|
181
|
+
}
|
|
182
|
+
}
|
|
183
|
+
]
|
|
184
|
+
})
|
|
185
|
+
],
|
|
186
|
+
modelInstance,
|
|
187
|
+
parser
|
|
188
|
+
]);
|
|
189
|
+
}
|
|
190
|
+
return chain;
|
|
191
|
+
}
|
|
192
|
+
async summarization(params, _pins, context) {
|
|
193
|
+
const { model = context.privates.MODEL_LLM, chunkSize = 1024, type = 'map_reduce', verbose = false, prompt, combineMapPrompt, combinePrompt, returnIntermediateSteps, refinePrompt, questionPrompt } = params;
|
|
194
|
+
const modelInstance = await engine.executePinsList(model, context, `${context.__PATH__}.model`);
|
|
195
|
+
const textSplitter = new text_splitter.RecursiveCharacterTextSplitter({
|
|
196
|
+
chunkSize
|
|
197
|
+
});
|
|
198
|
+
const summarizationChain = chains.loadSummarizationChain(modelInstance, {
|
|
199
|
+
type,
|
|
200
|
+
verbose,
|
|
201
|
+
prompt: !prompt ? undefined : prompts.PromptTemplate.fromTemplate(prompt),
|
|
202
|
+
combineMapPrompt: !combineMapPrompt ? undefined : prompts.PromptTemplate.fromTemplate(combineMapPrompt),
|
|
203
|
+
combinePrompt: !combinePrompt ? undefined : prompts.PromptTemplate.fromTemplate(combinePrompt),
|
|
204
|
+
returnIntermediateSteps,
|
|
205
|
+
refinePrompt: !refinePrompt ? undefined : prompts.PromptTemplate.fromTemplate(refinePrompt),
|
|
206
|
+
questionPrompt: !questionPrompt ? undefined : prompts.PromptTemplate.fromTemplate(questionPrompt)
|
|
207
|
+
});
|
|
208
|
+
const chain = runnables.RunnableSequence.from([
|
|
209
|
+
{
|
|
210
|
+
input_documents: async ({ document })=>await textSplitter.createDocuments([
|
|
211
|
+
document
|
|
212
|
+
])
|
|
213
|
+
},
|
|
214
|
+
summarizationChain
|
|
215
|
+
]);
|
|
216
|
+
return chain;
|
|
217
|
+
}
|
|
218
|
+
};
|
|
219
|
+
const invoke = (params, pinsSettingsList, context)=>new LLMService().invoke(params, pinsSettingsList, context);
|
|
220
|
+
const reasoningStep = (params, pinsSettingsList, context)=>new LLMService().reasoningStep(params, pinsSettingsList, context);
|
|
221
|
+
const basic = (params, pinsSettingsList, context)=>new LLMService().basic(params, pinsSettingsList, context);
|
|
222
|
+
const vision = (params, pinsSettingsList, context)=>new LLMService().vision(params, pinsSettingsList, context);
|
|
223
|
+
const summarization = (params, pinsSettingsList, context)=>new LLMService().summarization(params, pinsSettingsList, context);
|
|
6
224
|
|
|
7
|
-
|
|
8
|
-
exports.
|
|
9
|
-
exports.
|
|
10
|
-
exports.
|
|
11
|
-
exports.
|
|
12
|
-
exports.vision = index.vision;
|
|
225
|
+
exports.basic = basic;
|
|
226
|
+
exports.invoke = invoke;
|
|
227
|
+
exports.reasoningStep = reasoningStep;
|
|
228
|
+
exports.summarization = summarization;
|
|
229
|
+
exports.vision = vision;
|
package/dist/index.esm2.js
CHANGED
|
@@ -22838,14 +22838,14 @@ function indent(str, spaces) {
|
|
|
22838
22838
|
var match = parseIdentifier(input, i1, namePart) || namePart && parseAdditionalSymbol(input, i1) || maybeSpace && parseSpaces(input, i1);
|
|
22839
22839
|
// match is required
|
|
22840
22840
|
if (!match) {
|
|
22841
|
-
return
|
|
22841
|
+
return nextMatch = nextMatch1, tokens = tokens1, i = i1, {
|
|
22842
22842
|
v: nextMatch1
|
|
22843
22843
|
};
|
|
22844
22844
|
}
|
|
22845
22845
|
var token = match.token, offset = match.offset;
|
|
22846
22846
|
i1 += offset;
|
|
22847
22847
|
if (token === ' ') {
|
|
22848
|
-
return
|
|
22848
|
+
return nextMatch = nextMatch1, tokens = tokens1, i = i1, "continue";
|
|
22849
22849
|
}
|
|
22850
22850
|
tokens1 = _to_consumable_array$4$1(tokens1).concat([
|
|
22851
22851
|
token
|
|
@@ -22864,7 +22864,7 @@ function indent(str, spaces) {
|
|
|
22864
22864
|
if (contextKeys.some(function(el) {
|
|
22865
22865
|
return el.startsWith(name);
|
|
22866
22866
|
})) {
|
|
22867
|
-
return
|
|
22867
|
+
return nextMatch = nextMatch1, tokens = tokens1, i = i1, "continue";
|
|
22868
22868
|
}
|
|
22869
22869
|
if (dateTimeIdentifiers.some(function(el) {
|
|
22870
22870
|
return el === name;
|
|
@@ -22883,9 +22883,9 @@ function indent(str, spaces) {
|
|
|
22883
22883
|
if (dateTimeIdentifiers.some(function(el) {
|
|
22884
22884
|
return el.startsWith(name);
|
|
22885
22885
|
})) {
|
|
22886
|
-
return
|
|
22886
|
+
return nextMatch = nextMatch1, tokens = tokens1, i = i1, "continue";
|
|
22887
22887
|
}
|
|
22888
|
-
return
|
|
22888
|
+
return nextMatch = nextMatch1, tokens = tokens1, i = i1, {
|
|
22889
22889
|
v: nextMatch1
|
|
22890
22890
|
};
|
|
22891
22891
|
};
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@digipair/skill-llm",
|
|
3
|
-
"version": "0.114.
|
|
3
|
+
"version": "0.114.3",
|
|
4
4
|
"main": "./dist/index.cjs.js",
|
|
5
5
|
"module": "./dist/index.esm.js",
|
|
6
6
|
"types": "./dist/index.d.ts",
|
|
@@ -31,5 +31,9 @@
|
|
|
31
31
|
"nx": {
|
|
32
32
|
"name": "skill-llm"
|
|
33
33
|
},
|
|
34
|
-
"dependencies": {
|
|
34
|
+
"dependencies": {
|
|
35
|
+
"langchain": "^0.3.5",
|
|
36
|
+
"@langchain/core": "^0.3.15",
|
|
37
|
+
"zod": "^3.23.8"
|
|
38
|
+
}
|
|
35
39
|
}
|