@digipair/skill-llm 0.25.6 → 0.26.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/index.cjs2.js CHANGED
@@ -23509,14 +23509,14 @@ function indent(str, spaces) {
23509
23509
  var match = parseIdentifier(input, i1, namePart) || namePart && parseAdditionalSymbol(input, i1) || maybeSpace && parseSpaces(input, i1);
23510
23510
  // match is required
23511
23511
  if (!match) {
23512
- return i = i1, nextMatch = nextMatch1, tokens = tokens1, {
23512
+ return i = i1, tokens = tokens1, nextMatch = nextMatch1, {
23513
23513
  v: nextMatch1
23514
23514
  };
23515
23515
  }
23516
23516
  var token = match.token, offset = match.offset;
23517
23517
  i1 += offset;
23518
23518
  if (token === " ") {
23519
- return i = i1, nextMatch = nextMatch1, tokens = tokens1, "continue";
23519
+ return i = i1, tokens = tokens1, nextMatch = nextMatch1, "continue";
23520
23520
  }
23521
23521
  tokens1 = _to_consumable_array$y(tokens1).concat([
23522
23522
  token
@@ -23535,7 +23535,7 @@ function indent(str, spaces) {
23535
23535
  if (contextKeys.some(function(el) {
23536
23536
  return el.startsWith(name);
23537
23537
  })) {
23538
- return i = i1, nextMatch = nextMatch1, tokens = tokens1, "continue";
23538
+ return i = i1, tokens = tokens1, nextMatch = nextMatch1, "continue";
23539
23539
  }
23540
23540
  if (dateTimeIdentifiers.some(function(el) {
23541
23541
  return el === name;
@@ -23554,9 +23554,9 @@ function indent(str, spaces) {
23554
23554
  if (dateTimeIdentifiers.some(function(el) {
23555
23555
  return el.startsWith(name);
23556
23556
  })) {
23557
- return i = i1, nextMatch = nextMatch1, tokens = tokens1, "continue";
23557
+ return i = i1, tokens = tokens1, nextMatch = nextMatch1, "continue";
23558
23558
  }
23559
- return i = i1, nextMatch = nextMatch1, tokens = tokens1, {
23559
+ return i = i1, tokens = tokens1, nextMatch = nextMatch1, {
23560
23560
  v: nextMatch1
23561
23561
  };
23562
23562
  };
package/index.esm2.js CHANGED
@@ -23489,14 +23489,14 @@ function indent(str, spaces) {
23489
23489
  var match = parseIdentifier(input, i1, namePart) || namePart && parseAdditionalSymbol(input, i1) || maybeSpace && parseSpaces(input, i1);
23490
23490
  // match is required
23491
23491
  if (!match) {
23492
- return tokens = tokens1, i = i1, nextMatch = nextMatch1, {
23492
+ return tokens = tokens1, nextMatch = nextMatch1, i = i1, {
23493
23493
  v: nextMatch1
23494
23494
  };
23495
23495
  }
23496
23496
  var token = match.token, offset = match.offset;
23497
23497
  i1 += offset;
23498
23498
  if (token === " ") {
23499
- return tokens = tokens1, i = i1, nextMatch = nextMatch1, "continue";
23499
+ return tokens = tokens1, nextMatch = nextMatch1, i = i1, "continue";
23500
23500
  }
23501
23501
  tokens1 = _to_consumable_array$y(tokens1).concat([
23502
23502
  token
@@ -23515,7 +23515,7 @@ function indent(str, spaces) {
23515
23515
  if (contextKeys.some(function(el) {
23516
23516
  return el.startsWith(name);
23517
23517
  })) {
23518
- return tokens = tokens1, i = i1, nextMatch = nextMatch1, "continue";
23518
+ return tokens = tokens1, nextMatch = nextMatch1, i = i1, "continue";
23519
23519
  }
23520
23520
  if (dateTimeIdentifiers.some(function(el) {
23521
23521
  return el === name;
@@ -23534,9 +23534,9 @@ function indent(str, spaces) {
23534
23534
  if (dateTimeIdentifiers.some(function(el) {
23535
23535
  return el.startsWith(name);
23536
23536
  })) {
23537
- return tokens = tokens1, i = i1, nextMatch = nextMatch1, "continue";
23537
+ return tokens = tokens1, nextMatch = nextMatch1, i = i1, "continue";
23538
23538
  }
23539
- return tokens = tokens1, i = i1, nextMatch = nextMatch1, {
23539
+ return tokens = tokens1, nextMatch = nextMatch1, i = i1, {
23540
23540
  v: nextMatch1
23541
23541
  };
23542
23542
  };
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@digipair/skill-llm",
3
- "version": "0.25.6",
3
+ "version": "0.26.0",
4
4
  "dependencies": {},
5
5
  "main": "./index.cjs.js",
6
6
  "module": "./index.esm.js"
package/schema.fr.json ADDED
@@ -0,0 +1,277 @@
1
+ {
2
+ "openapi": "3.0.0",
3
+ "info": {
4
+ "title": "@digipair/skill-llm",
5
+ "summary": "Gestion de modèles LLM",
6
+ "description": "La compétence @digipair/skill-llm est une compétence qui permet d'utiliser un modèle de langage à grande échelle.",
7
+ "version": "0.1.0",
8
+ "x-icon": "🚀"
9
+ },
10
+ "paths": {
11
+ "/invoke": {
12
+ "post": {
13
+ "tags": ["service"],
14
+ "summary": "Raisonnement LLM",
15
+ "parameters": [
16
+ {
17
+ "name": "execute",
18
+ "required": true,
19
+ "description": "Exécuter",
20
+ "schema": {
21
+ "type": "array",
22
+ "items": {
23
+ "$ref": "https://schemas.digipair.ai/pinsSettings"
24
+ }
25
+ }
26
+ },
27
+ {
28
+ "name": "input",
29
+ "summary": "Données d'entrée",
30
+ "required": false,
31
+ "description": "input",
32
+ "schema": {
33
+ "type": "object"
34
+ }
35
+ }
36
+ ],
37
+ "x-events": []
38
+ }
39
+ },
40
+ "/reasoningStep": {
41
+ "post": {
42
+ "tags": ["service"],
43
+ "summary": "Etape de raisonnement",
44
+ "parameters": [
45
+ {
46
+ "name": "attributes",
47
+ "summary": "Données",
48
+ "required": true,
49
+ "description": "attributes",
50
+ "schema": {
51
+ "type": "array",
52
+ "items": {
53
+ "$ref": "#/components/schemas/reasonningStepAttribute"
54
+ }
55
+ }
56
+ }
57
+ ],
58
+ "x-events": []
59
+ }
60
+ },
61
+ "/basic": {
62
+ "post": {
63
+ "tags": ["service"],
64
+ "summary": "Générer",
65
+ "description": "Génération de texte via un model LLM",
66
+ "parameters": [
67
+ {
68
+ "name": "prompt",
69
+ "summary": "Prompt",
70
+ "required": true,
71
+ "description": "Prompt à exécuter via le model LLM",
72
+ "schema": {
73
+ "type": "string"
74
+ }
75
+ },
76
+ {
77
+ "name": "model",
78
+ "summary": "Modèle LLM",
79
+ "required": false,
80
+ "description": "Modèle LLM à charger",
81
+ "schema": {
82
+ "type": "array",
83
+ "items": {
84
+ "$ref": "https://schemas.digipair.ai/pinsSettings"
85
+ }
86
+ }
87
+ },
88
+ {
89
+ "name": "schema",
90
+ "summary": "Schema",
91
+ "required": false,
92
+ "description": "Schema JSON des données à extraire",
93
+ "schema": {
94
+ "type": "object"
95
+ }
96
+ }
97
+ ],
98
+ "x-events": []
99
+ }
100
+ },
101
+ "/vision": {
102
+ "post": {
103
+ "tags": ["service"],
104
+ "summary": "Voir",
105
+ "description": "Utiliser un model LLM qui sait lire des images et du texte en entrée",
106
+ "parameters": [
107
+ {
108
+ "name": "model",
109
+ "summary": "Modèle Vision",
110
+ "required": false,
111
+ "description": "Modèle Vision à charger",
112
+ "schema": {
113
+ "type": "array",
114
+ "items": {
115
+ "$ref": "https://schemas.digipair.ai/pinsSettings"
116
+ }
117
+ }
118
+ },
119
+ {
120
+ "name": "prompt",
121
+ "summary": "Prompt",
122
+ "required": true,
123
+ "description": "Prompt à exécuter sur le modèle LLM",
124
+ "schema": {
125
+ "type": "string"
126
+ }
127
+ },
128
+ {
129
+ "name": "image",
130
+ "summary": "Image",
131
+ "required": true,
132
+ "description": "Image en base 64",
133
+ "schema": {
134
+ "type": "string"
135
+ }
136
+ },
137
+ {
138
+ "name": "schema",
139
+ "summary": "Schema JSON",
140
+ "required": false,
141
+ "description": "Schema JSON des données à extraire",
142
+ "schema": {
143
+ "type": "object"
144
+ }
145
+ }
146
+ ],
147
+ "x-events": []
148
+ }
149
+ },
150
+ "/summarization": {
151
+ "post": {
152
+ "tags": ["service"],
153
+ "summary": "Résumer un texte",
154
+ "description": "Résumer un texte via un model LLM",
155
+ "parameters": [
156
+ {
157
+ "name": "model",
158
+ "summary": "Modèle LLM",
159
+ "required": false,
160
+ "description": "Modèle LLM à charger",
161
+ "schema": {
162
+ "type": "array",
163
+ "items": {
164
+ "$ref": "https://schemas.digipair.ai/pinsSettings"
165
+ }
166
+ }
167
+ },
168
+ {
169
+ "name": "chunkSize",
170
+ "summary": "Taille des morceaux de texte",
171
+ "required": false,
172
+ "description": "Taille des morceaux de texte",
173
+ "schema": {
174
+ "type": "number"
175
+ }
176
+ },
177
+ {
178
+ "name": "type",
179
+ "summary": "Algorithme",
180
+ "required": false,
181
+ "description": "Algorithme à utiliser pour résumer le texte",
182
+ "schema": {
183
+ "type": "string"
184
+ }
185
+ },
186
+ {
187
+ "name": "prompt",
188
+ "summary": "Texte",
189
+ "required": true,
190
+ "description": "Texte à résumer",
191
+ "schema": {
192
+ "type": "string"
193
+ }
194
+ },
195
+ {
196
+ "name": "combineMapPrompt",
197
+ "summary": "",
198
+ "required": false,
199
+ "description": "combineMapPrompt",
200
+ "schema": {
201
+ "type": "string"
202
+ }
203
+ },
204
+ {
205
+ "name": "combinePrompt",
206
+ "summary": "",
207
+ "required": false,
208
+ "description": "combinePrompt",
209
+ "schema": {
210
+ "type": "string"
211
+ }
212
+ },
213
+ {
214
+ "name": "returnIntermediateSteps",
215
+ "summary": "",
216
+ "required": false,
217
+ "description": "returnIntermediateSteps",
218
+ "schema": {
219
+ "type": "boolean"
220
+ }
221
+ },
222
+ {
223
+ "name": "refinePrompt",
224
+ "summary": "",
225
+ "required": false,
226
+ "description": "refinePrompt",
227
+ "schema": {
228
+ "type": "string"
229
+ }
230
+ },
231
+ {
232
+ "name": "questionPrompt",
233
+ "summary": "",
234
+ "required": false,
235
+ "description": "questionPrompt",
236
+ "schema": {
237
+ "type": "string"
238
+ }
239
+ },
240
+ {
241
+ "name": "verbose",
242
+ "summary": "Debug",
243
+ "required": false,
244
+ "description": "Ajoute des logs de debug",
245
+ "schema": {
246
+ "type": "boolean"
247
+ }
248
+ }
249
+ ],
250
+ "x-events": []
251
+ }
252
+ }
253
+ },
254
+ "components": {
255
+ "schemas": {
256
+ "reasonningStepAttribute": {
257
+ "tags": ["service"],
258
+ "summary": "Donnée",
259
+ "type": "object",
260
+ "properties": {
261
+ "name": {
262
+ "type": "string",
263
+ "summary": "Nom"
264
+ },
265
+ "value": {
266
+ "summary": "Valeur",
267
+ "type": "array",
268
+ "items": {
269
+ "$ref": "https://schemas.digipair.ai/pinsSettings"
270
+ }
271
+ }
272
+ },
273
+ "required": ["name", "value"]
274
+ }
275
+ }
276
+ }
277
+ }
package/schema.json CHANGED
@@ -2,8 +2,8 @@
2
2
  "openapi": "3.0.0",
3
3
  "info": {
4
4
  "title": "@digipair/skill-llm",
5
- "summary": "Gestion de modèles LLM",
6
- "description": "La compétence @digipair/skill-llm est une compétence qui permet d'utiliser un modèle de langage à grande échelle.",
5
+ "summary": "LLM Model Management",
6
+ "description": "The @digipair/skill-llm skill allows the use of a large-scale language model.",
7
7
  "version": "0.1.0",
8
8
  "x-icon": "🚀"
9
9
  },
@@ -11,12 +11,12 @@
11
11
  "/invoke": {
12
12
  "post": {
13
13
  "tags": ["service"],
14
- "summary": "Raisonnement LLM",
14
+ "summary": "LLM Reasoning",
15
15
  "parameters": [
16
16
  {
17
17
  "name": "execute",
18
18
  "required": true,
19
- "description": "Exécuter",
19
+ "description": "Execute",
20
20
  "schema": {
21
21
  "type": "array",
22
22
  "items": {
@@ -26,7 +26,7 @@
26
26
  },
27
27
  {
28
28
  "name": "input",
29
- "summary": "Données d'entrée",
29
+ "summary": "Input Data",
30
30
  "required": false,
31
31
  "description": "input",
32
32
  "schema": {
@@ -40,11 +40,11 @@
40
40
  "/reasoningStep": {
41
41
  "post": {
42
42
  "tags": ["service"],
43
- "summary": "Etape de raisonnement",
43
+ "summary": "Reasoning Step",
44
44
  "parameters": [
45
45
  {
46
46
  "name": "attributes",
47
- "summary": "Données",
47
+ "summary": "Data",
48
48
  "required": true,
49
49
  "description": "attributes",
50
50
  "schema": {
@@ -61,23 +61,23 @@
61
61
  "/basic": {
62
62
  "post": {
63
63
  "tags": ["service"],
64
- "summary": "Générer",
65
- "description": "Génération de texte via un model LLM",
64
+ "summary": "Generate",
65
+ "description": "Text generation via an LLM model",
66
66
  "parameters": [
67
67
  {
68
68
  "name": "prompt",
69
69
  "summary": "Prompt",
70
70
  "required": true,
71
- "description": "Prompt à exécuter via le model LLM",
71
+ "description": "Prompt to execute via the LLM model",
72
72
  "schema": {
73
73
  "type": "string"
74
74
  }
75
75
  },
76
76
  {
77
77
  "name": "model",
78
- "summary": "Modèle LLM",
78
+ "summary": "LLM Model",
79
79
  "required": false,
80
- "description": "Modèle LLM à charger",
80
+ "description": "LLM model to load",
81
81
  "schema": {
82
82
  "type": "array",
83
83
  "items": {
@@ -89,7 +89,7 @@
89
89
  "name": "schema",
90
90
  "summary": "Schema",
91
91
  "required": false,
92
- "description": "Schema JSON des données à extraire",
92
+ "description": "JSON schema of the data to extract",
93
93
  "schema": {
94
94
  "type": "object"
95
95
  }
@@ -101,14 +101,14 @@
101
101
  "/vision": {
102
102
  "post": {
103
103
  "tags": ["service"],
104
- "summary": "Voir",
105
- "description": "Utiliser un model LLM qui sait lire des images et du texte en entrée",
104
+ "summary": "See",
105
+ "description": "Use an LLM model that can read images and text as input",
106
106
  "parameters": [
107
107
  {
108
108
  "name": "model",
109
- "summary": "Modèle Vision",
109
+ "summary": "Vision Model",
110
110
  "required": false,
111
- "description": "Modèle Vision à charger",
111
+ "description": "Vision model to load",
112
112
  "schema": {
113
113
  "type": "array",
114
114
  "items": {
@@ -120,7 +120,7 @@
120
120
  "name": "prompt",
121
121
  "summary": "Prompt",
122
122
  "required": true,
123
- "description": "Prompt à exécuter sur le modèle LLM",
123
+ "description": "Prompt to execute on the LLM model",
124
124
  "schema": {
125
125
  "type": "string"
126
126
  }
@@ -129,16 +129,16 @@
129
129
  "name": "image",
130
130
  "summary": "Image",
131
131
  "required": true,
132
- "description": "Image en base 64",
132
+ "description": "Base64 image",
133
133
  "schema": {
134
134
  "type": "string"
135
135
  }
136
136
  },
137
137
  {
138
138
  "name": "schema",
139
- "summary": "Schema JSON",
139
+ "summary": "JSON Schema",
140
140
  "required": false,
141
- "description": "Schema JSON des données à extraire",
141
+ "description": "JSON schema of the data to extract",
142
142
  "schema": {
143
143
  "type": "object"
144
144
  }
@@ -150,14 +150,14 @@
150
150
  "/summarization": {
151
151
  "post": {
152
152
  "tags": ["service"],
153
- "summary": "Résumer un texte",
154
- "description": "Résumer un texte via un model LLM",
153
+ "summary": "Summarize Text",
154
+ "description": "Summarize text via an LLM model",
155
155
  "parameters": [
156
156
  {
157
157
  "name": "model",
158
- "summary": "Modèle LLM",
158
+ "summary": "LLM Model",
159
159
  "required": false,
160
- "description": "Modèle LLM à charger",
160
+ "description": "LLM model to load",
161
161
  "schema": {
162
162
  "type": "array",
163
163
  "items": {
@@ -167,27 +167,27 @@
167
167
  },
168
168
  {
169
169
  "name": "chunkSize",
170
- "summary": "Taille des morceaux de texte",
170
+ "summary": "Text Chunk Size",
171
171
  "required": false,
172
- "description": "Taille des morceaux de texte",
172
+ "description": "Size of text chunks",
173
173
  "schema": {
174
174
  "type": "number"
175
175
  }
176
176
  },
177
177
  {
178
178
  "name": "type",
179
- "summary": "Algorithme",
179
+ "summary": "Algorithm",
180
180
  "required": false,
181
- "description": "Algorithme à utiliser pour résumer le texte",
181
+ "description": "Algorithm to use for summarizing the text",
182
182
  "schema": {
183
183
  "type": "string"
184
184
  }
185
185
  },
186
186
  {
187
187
  "name": "prompt",
188
- "summary": "Texte",
188
+ "summary": "Text",
189
189
  "required": true,
190
- "description": "Texte à résumer",
190
+ "description": "Text to summarize",
191
191
  "schema": {
192
192
  "type": "string"
193
193
  }
@@ -241,7 +241,7 @@
241
241
  "name": "verbose",
242
242
  "summary": "Debug",
243
243
  "required": false,
244
- "description": "Ajoute des logs de debug",
244
+ "description": "Adds debug logs",
245
245
  "schema": {
246
246
  "type": "boolean"
247
247
  }
@@ -255,15 +255,15 @@
255
255
  "schemas": {
256
256
  "reasonningStepAttribute": {
257
257
  "tags": ["service"],
258
- "summary": "Donnée",
258
+ "summary": "Data",
259
259
  "type": "object",
260
260
  "properties": {
261
261
  "name": {
262
262
  "type": "string",
263
- "summary": "Nom"
263
+ "summary": "Name"
264
264
  },
265
265
  "value": {
266
- "summary": "Valeur",
266
+ "summary": "Value",
267
267
  "type": "array",
268
268
  "items": {
269
269
  "$ref": "https://schemas.digipair.ai/pinsSettings"