@tiledesk/tiledesk-tybot-connector 2.0.41 → 2.0.42

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,7 @@
1
+ module.exports = {
2
+ provider: process.env.EMBEDDINGS_PROVIDER || "openai",
3
+ name: process.env.EMBEDDINGS_NAME || "text-embedding-ada-002",
4
+ api_key: "",
5
+ dimension: Number(process.env.EMBEDDINGS_DIMENSION) || 1536,
6
+ url: process.env.EMBEDDINGS_URL
7
+ }
@@ -0,0 +1,10 @@
1
+ module.exports = {
2
+ name: process.env.VECTOR_STORE_NAME || 'pinecone',
3
+ type: process.env.INDEX_TYPE_HYBRID || process.env.PINECONE_TYPE_HYBRID || 'serverless',
4
+ apikey: process.env.VECTOR_STORE_APIKEY || '',
5
+ vector_size: Number(process.env.VECTOR_SIZE_HYBRID) || 1536,
6
+ index_name: process.env.INDEX_NAME_HYBRID || process.env.PINECONE_INDEX_HYBRID || 'llm-sample-hybrid-index',
7
+ host: process.env.VECTOR_STORE_HOST,
8
+ port: process.env.VECTOR_STORE_PORT ? Number(process.env.VECTOR_STORE_PORT) : undefined,
9
+ deployment: process.env.VECTOR_STORE_DEPLOYMENT
10
+ }
@@ -0,0 +1,10 @@
1
+ module.exports = {
2
+ name: process.env.VECTOR_STORE_NAME || 'pinecone',
3
+ type: process.env.INDEX_TYPE || process.env.PINECONE_TYPE || 'serverless',
4
+ apikey: process.env.VECTOR_STORE_APIKEY || '',
5
+ vector_size: Number(process.env.VECTOR_SIZE) || 1536,
6
+ index_name: process.env.INDEX_NAME || process.env.PINECONE_INDEX || 'llm-sample-index',
7
+ host: process.env.VECTOR_STORE_HOST,
8
+ port: process.env.VECTOR_STORE_PORT ? Number(process.env.VECTOR_STORE_PORT) : undefined,
9
+ deployment: process.env.VECTOR_STORE_DEPLOYMENT
10
+ }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@tiledesk/tiledesk-tybot-connector",
3
- "version": "2.0.41",
3
+ "version": "2.0.42",
4
4
  "description": "Tiledesk Tybot connector",
5
5
  "main": "index.js",
6
6
  "scripts": {
@@ -0,0 +1,44 @@
1
+ const integrationService = require("./IntegrationService");
2
+
3
+ class AIController {
4
+
5
+ constructor() {}
6
+
7
+ async resolveLLMConfig(id_project, provider = 'openai', model, token) {
8
+
9
+ if (provider === 'ollama' || provider === 'vllm') {
10
+ try {
11
+ const integration = await integrationService.getIntegration(id_project, provider, token);
12
+ if (!integration?.value?.url) {
13
+ throw { code: 422, error: `Server url for ${provider} is empty or invalid`}
14
+ }
15
+
16
+ return {
17
+ provider,
18
+ name: model,
19
+ url: integration.value.url,
20
+ api_key: integration.value.api_key || ""
21
+ }
22
+
23
+ } catch (err) {
24
+ throw { code: err.code, error: err.error }
25
+ }
26
+ }
27
+
28
+ try {
29
+ let key = await integrationService.getKeyFromIntegrations(id_project, provider, token);
30
+
31
+ return {
32
+ provider,
33
+ name: model,
34
+ api_key: key
35
+ }
36
+
37
+ } catch (err) {
38
+ throw { code: err.code, error: err.error }
39
+ }
40
+ }
41
+ }
42
+
43
+ const aiController = new AIController();
44
+ module.exports = aiController;
@@ -85,7 +85,7 @@ class DirAddKbContent {
85
85
 
86
86
  let key = await integrationService.getKeyFromIntegrations(this.projectId, 'openai', this.token);
87
87
  if (!key) {
88
- this.logger.native("[Add to KnwoledgeBase] OpenAI key not found in Integration. Using shared OpenAI key");
88
+ this.logger.native("[Add to KnwoledgeBase] Using shared OpenAI key");
89
89
  winston.verbose("[DirAddKbContent] - Key not found in Integrations. Searching in kb settings...");
90
90
  key = await this.getKeyFromKbSettings();
91
91
  }
@@ -110,7 +110,7 @@ class DirAiCondition {
110
110
 
111
111
  // evaluate
112
112
 
113
- let AI_endpoint = process.env.AI_ENDPOINT;
113
+ let AI_endpoint = process.env.KB_ENDPOINT_QA;
114
114
  winston.verbose("DirAiPrompt AI_endpoint " + AI_endpoint);
115
115
 
116
116
  let headers = {
@@ -140,7 +140,7 @@ class DirAiCondition {
140
140
  key = await integrationService.getKeyFromIntegrations(this.projectId, action.llm, this.token);
141
141
 
142
142
  if (!key && action.llm === "openai") {
143
- this.logger.native("[AI Condition] OpenAI key not found in Integration. Retrieve shared OpenAI key.")
143
+ this.logger.native("[AI Condition] Using shared OpenAI key.")
144
144
  key = process.env.GPTKEY;
145
145
  publicKey = true;
146
146
  }
@@ -112,7 +112,7 @@ class DirAiPrompt {
112
112
  }
113
113
  }
114
114
 
115
- let AI_endpoint = process.env.AI_ENDPOINT;
115
+ let AI_endpoint = process.env.KB_ENDPOINT_QA;
116
116
  winston.verbose("DirAiPrompt AI_endpoint " + AI_endpoint);
117
117
 
118
118
  let headers = {
@@ -141,7 +141,7 @@ class DirAiPrompt {
141
141
  key = await integrationService.getKeyFromIntegrations(this.projectId, action.llm, this.token);
142
142
 
143
143
  if (!key && action.llm === "openai") {
144
- this.logger.native("[AI Prompt] OpenAI key not found in Integration. Retrieve shared OpenAI key.")
144
+ this.logger.native("[AI Prompt] Using shared OpenAI key.")
145
145
  key = process.env.GPTKEY;
146
146
  publicKey = true;
147
147
  }
@@ -13,6 +13,10 @@ const integrationService = require("../../services/IntegrationService");
13
13
  const { Logger } = require("../../Logger");
14
14
  const kbService = require("../../services/KbService");
15
15
  const quotasService = require("../../services/QuotasService");
16
+ const aiController = require("../../services/AIController");
17
+ const default_engine = require('../../config/kb/engine');
18
+ const default_engine_hybrid = require('../../config/kb/engine.hybrid');
19
+ const default_embedding = require("../../config/kb/embedding");
16
20
 
17
21
  class DirAskGPTV2 {
18
22
 
@@ -81,6 +85,7 @@ class DirAskGPTV2 {
81
85
  let citations = false;
82
86
  let chunks_only = false;
83
87
  let engine;
88
+ let embedding;
84
89
  let reranking;
85
90
  let skip_unanswered = false;
86
91
 
@@ -181,57 +186,36 @@ class DirAskGPTV2 {
181
186
 
182
187
  let key;
183
188
  let publicKey = false;
184
- let ollama_integration;
185
- let vllm_integration;
186
-
187
- if (action.llm === 'ollama') {
188
- key = process.env.GPTKEY;
189
- ollama_integration = await integrationService.getIntegration(this.projectId, action.llm, this.token).catch( async (err) => {
190
- this.logger.error("[Ask Knowledge Base] Error getting ollama integration.");
191
- await this.chatbot.addParameter("flowError", "Ollama integration not found");
192
- if (falseIntent) {
193
- await this.#executeCondition(false, trueIntent, trueIntentAttributes, falseIntent, falseIntentAttributes);
194
- callback(true);
195
- return;
196
- }
197
- callback();
198
- return;
199
- })
200
- }
201
- else if (action.llm === 'vllm') {
202
- key = process.env.GPTKEY;
203
- vllm_integration = await integrationService.getIntegration(this.projectId, action.llm, this.token).catch( async (err) => {
204
- this.logger.error("[Ask Knowledge Base] Error getting vllm integration.");
205
- await this.chatbot.addParameter("flowError", "vLLM integration not found");
206
- if (falseIntent) {
207
- await this.#executeCondition(false, trueIntent, trueIntentAttributes, falseIntent, falseIntentAttributes);
208
- callback(true);
209
- return;
210
- }
211
- callback();
189
+
190
+ try {
191
+ model = await aiController.resolveLLMConfig(this.projectId, llm, model, this.token);
192
+ } catch (err) {
193
+ this.logger.error(`[Ask Knowledge Base] Error getting ${llm} integration.`);
194
+ await this.chatbot.addParameter("flowError", `${llm} integration not found`);
195
+ if (falseIntent) {
196
+ await this.#executeCondition(false, trueIntent, trueIntentAttributes, falseIntent, falseIntentAttributes);
197
+ callback(true);
212
198
  return;
213
- })
199
+ }
200
+ callback();
201
+ return;
214
202
  }
215
- else {
216
- key = await integrationService.getKeyFromIntegrations(this.projectId, action.llm, this.token);
217
203
 
218
- if (!key && action.llm === 'openai') {
219
- this.logger.native("[Ask Knowledge Base] OpenAI key not found in Integration. Retrieve shared OpenAI key.");
220
- key = process.env.GPTKEY;
221
- publicKey = true;
222
- }
204
+ if (!model.api_key && model.provider === 'openai') {
205
+ model.api_key = process.env.GPTKEY;
206
+ publicKey = true;
207
+ }
223
208
 
224
- if (!key) {
225
- this.logger.error(`[Ask Knowledge Base] llm key for ${action.llm} not found in integrations`);
226
- await this.chatbot.addParameter("flowError", `AskKnowledgeBase Error: missing key for llm ${action.llm}`);
227
- if (falseIntent) {
228
- await this.#executeCondition(false, trueIntent, trueIntentAttributes, falseIntent, falseIntentAttributes);
229
- callback(true);
230
- return;
231
- }
232
- callback();
209
+ if (!model.api_key) {
210
+ this.logger.error(`[Ask Knowledge Base] llm key for ${llm} not found in integrations`);
211
+ await this.chatbot.addParameter("flowError", `AskKnowledgeBase Error: missing key for llm ${llm}`);
212
+ if (falseIntent) {
213
+ await this.#executeCondition(false, trueIntent, trueIntentAttributes, falseIntent, falseIntentAttributes);
214
+ callback(true);
233
215
  return;
234
216
  }
217
+ callback();
218
+ return;
235
219
  }
236
220
 
237
221
  if (publicKey === true && !chunks_only) {
@@ -305,15 +289,18 @@ class DirAskGPTV2 {
305
289
  engine = await this.setDefaultEngine(ns.hybrid);
306
290
  }
307
291
 
292
+ embedding = ns.embedding || default_embedding;
293
+ embedding.api_key = process.env.EMBEDDING_API_KEY || process.env.GPTKEY;
294
+
308
295
  let json = {
309
296
  question: filled_question,
310
- gptkey: key,
311
297
  namespace: namespace,
312
- llm: llm,
313
298
  model: model,
299
+ embedding: embedding,
314
300
  citations: citations,
315
301
  engine: engine,
316
- debug: true
302
+ debug: true,
303
+ stream: false
317
304
  };
318
305
  if (top_k) {
319
306
  json.top_k = top_k;
@@ -328,17 +315,6 @@ class DirAskGPTV2 {
328
315
  json.chunks_only = chunks_only;
329
316
  }
330
317
 
331
- if (llm === 'ollama') {
332
- json.gptkey = "";
333
- json.model = {
334
- name: action.model,
335
- url: ollama_integration.value.url,
336
- token: ollama_integration.value.token
337
- }
338
- json.stream = false;
339
- }
340
-
341
-
342
318
  if (ns.hybrid === true) {
343
319
  json.search_type = 'hybrid';
344
320
  json.alpha = alpha;
@@ -351,7 +327,7 @@ class DirAskGPTV2 {
351
327
  }
352
328
 
353
329
  if (!action.advancedPrompt) {
354
- const contextTemplate = contexts[model] || contexts["general"];
330
+ const contextTemplate = contexts[model.name] || contexts["general"];
355
331
  if (filled_context) {
356
332
  json.system_context = filled_context + "\n" + contextTemplate;
357
333
  } else {
@@ -386,7 +362,7 @@ class DirAskGPTV2 {
386
362
 
387
363
  httpUtils.request(
388
364
  HTTPREQUEST, async (err, resbody) => {
389
-
365
+ console.log("resbody: ", JSON.stringify(resbody));
390
366
  if (err) {
391
367
  winston.error("DirAskGPTV2 error: ", {
392
368
  status: err.response?.status,
@@ -407,6 +383,7 @@ class DirAskGPTV2 {
407
383
  }
408
384
  else if (resbody.success === true) {
409
385
  winston.debug("DirAskGPTV2 resbody: ", resbody);
386
+ console.log("Answer: ", resbody.answer);
410
387
  if (chunks_only) {
411
388
  await this.#assignAttributes(action, resbody.answer, resbody.source, resbody.chunks);
412
389
  if (trueIntent) {
@@ -418,6 +395,7 @@ class DirAskGPTV2 {
418
395
  return;
419
396
 
420
397
  } else {
398
+ console.log("assign answer to ", action.assignReplyTo)
421
399
  await this.#assignAttributes(action, resbody.answer, resbody.source, resbody.content_chunks);
422
400
  if (publicKey === true && !chunks_only) {
423
401
  let tokens_usage = {
@@ -430,6 +408,7 @@ class DirAskGPTV2 {
430
408
  }
431
409
 
432
410
  if (trueIntent) {
411
+ console.log("execute true intent");
433
412
  await this.#executeCondition(true, trueIntent, trueIntentAttributes, falseIntent, falseIntentAttributes);
434
413
  callback(true);
435
414
  return;
@@ -438,6 +417,7 @@ class DirAskGPTV2 {
438
417
  return;
439
418
  }
440
419
  } else {
420
+ console.log("else case. Assign answer: ", answer);
441
421
  await this.#assignAttributes(action, answer, source);
442
422
  if (!skip_unanswered) {
443
423
  kbService.addUnansweredQuestion(this.projectId, json.namespace, json.question, this.token).catch((err) => {
@@ -450,6 +430,7 @@ class DirAskGPTV2 {
450
430
  })
451
431
  }
452
432
  if (falseIntent) {
433
+ console.log("execute false intent");
453
434
  await this.#executeCondition(false, trueIntent, trueIntentAttributes, falseIntent, falseIntentAttributes);
454
435
  callback(true);
455
436
  return;
@@ -612,20 +593,10 @@ class DirAskGPTV2 {
612
593
  }
613
594
 
614
595
  async setDefaultEngine(hybrid = false) {
615
- let isHybrid = hybrid === true;
616
- return new Promise((resolve) => {
617
- let engine = {
618
- name: process.env.VECTOR_STORE_NAME || "pinecone",
619
- type: isHybrid ? process.env.INDEX_TYPE_HYBRID || "serverless" : process.env.INDEX_TYPE || process.env.PINECONE_TYPE || 'serverless',
620
- apikey: "",
621
- vector_size: process.env.VECTOR_SIZE || 1536,
622
- index_name: isHybrid ? process.env.INDEX_NAME_HYBRID || process.env.PINECONE_INDEX_HYBRID || "llm-sample-index-hybrid" : process.env.INDEX_NAME || process.env.PINECONE_INDEX || "llm-sample-index",
623
- ...(process.env.VECTOR_STORE_HOST && { host: process.env.VECTOR_STORE_HOST }),
624
- ...(process.env.VECTOR_STORE_PORT && { port: process.env.VECTOR_STORE_PORT }),
625
- ...(process.env.VECTOR_STORE_DEPLOYMENT && { deployment: process.env.VECTOR_STORE_DEPLOYMENT })
626
- }
627
- resolve(engine);
628
- })
596
+ if (hybrid) {
597
+ return default_engine_hybrid
598
+ }
599
+ return default_engine;
629
600
  }
630
601
 
631
602
  }
@@ -160,6 +160,7 @@ class DirReply {
160
160
 
161
161
  cleanMessage.senderFullname = this.context.chatbot.bot.name;
162
162
  winston.debug("DirReply reply with clean message: ", cleanMessage);
163
+ this.logger.native("[Reply] Reply with 2: " + cleanMessage.text);
163
164
 
164
165
  await TiledeskChatbotUtil.updateConversationTranscript(this.context.chatbot, cleanMessage);
165
166
  this.tdClient.sendSupportMessage(