@smythos/sre 1.7.41 → 1.8.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (60) hide show
  1. package/CHANGELOG +136 -64
  2. package/dist/index.js +65 -50
  3. package/dist/index.js.map +1 -1
  4. package/dist/types/Components/Async.class.d.ts +11 -5
  5. package/dist/types/index.d.ts +2 -0
  6. package/dist/types/subsystems/AgentManager/AgentData.service/connectors/SQLiteAgentDataConnector.class.d.ts +45 -0
  7. package/dist/types/subsystems/LLMManager/LLM.helper.d.ts +32 -1
  8. package/dist/types/subsystems/LLMManager/LLM.inference.d.ts +25 -2
  9. package/dist/types/subsystems/LLMManager/LLM.service/connectors/Anthropic.class.d.ts +22 -2
  10. package/dist/types/subsystems/LLMManager/LLM.service/connectors/Bedrock.class.d.ts +2 -2
  11. package/dist/types/subsystems/LLMManager/LLM.service/connectors/GoogleAI.class.d.ts +27 -2
  12. package/dist/types/subsystems/LLMManager/LLM.service/connectors/Groq.class.d.ts +22 -2
  13. package/dist/types/subsystems/LLMManager/LLM.service/connectors/Ollama.class.d.ts +22 -2
  14. package/dist/types/subsystems/LLMManager/LLM.service/connectors/Perplexity.class.d.ts +3 -3
  15. package/dist/types/subsystems/LLMManager/LLM.service/connectors/openai/OpenAIConnector.class.d.ts +23 -3
  16. package/dist/types/subsystems/LLMManager/LLM.service/connectors/openai/apiInterfaces/ChatCompletionsApiInterface.d.ts +2 -2
  17. package/dist/types/subsystems/LLMManager/LLM.service/connectors/openai/apiInterfaces/OpenAIApiInterface.d.ts +2 -2
  18. package/dist/types/subsystems/LLMManager/LLM.service/connectors/openai/apiInterfaces/ResponsesApiInterface.d.ts +2 -2
  19. package/dist/types/subsystems/LLMManager/LLM.service/connectors/xAI.class.d.ts +3 -3
  20. package/dist/types/subsystems/MemoryManager/LLMContext.d.ts +10 -3
  21. package/dist/types/subsystems/ObservabilityManager/Telemetry.service/connectors/OTel/OTel.class.d.ts +24 -0
  22. package/dist/types/subsystems/ObservabilityManager/Telemetry.service/connectors/OTel/OTel.redaction.helper.d.ts +49 -0
  23. package/dist/types/types/LLM.types.d.ts +30 -1
  24. package/package.json +4 -3
  25. package/src/Components/APICall/OAuth.helper.ts +16 -1
  26. package/src/Components/APIEndpoint.class.ts +11 -4
  27. package/src/Components/Async.class.ts +38 -5
  28. package/src/Components/GenAILLM.class.ts +13 -7
  29. package/src/Components/LLMAssistant.class.ts +3 -1
  30. package/src/Components/LogicAND.class.ts +13 -0
  31. package/src/Components/LogicAtLeast.class.ts +18 -0
  32. package/src/Components/LogicAtMost.class.ts +19 -0
  33. package/src/Components/LogicOR.class.ts +12 -2
  34. package/src/Components/LogicXOR.class.ts +11 -0
  35. package/src/constants.ts +1 -1
  36. package/src/helpers/Conversation.helper.ts +10 -8
  37. package/src/index.ts +2 -0
  38. package/src/index.ts.bak +2 -0
  39. package/src/subsystems/AgentManager/AgentData.service/connectors/SQLiteAgentDataConnector.class.ts +190 -0
  40. package/src/subsystems/AgentManager/AgentData.service/index.ts +2 -0
  41. package/src/subsystems/LLMManager/LLM.helper.ts +117 -1
  42. package/src/subsystems/LLMManager/LLM.inference.ts +136 -67
  43. package/src/subsystems/LLMManager/LLM.service/LLMConnector.ts +13 -6
  44. package/src/subsystems/LLMManager/LLM.service/connectors/Anthropic.class.ts +157 -33
  45. package/src/subsystems/LLMManager/LLM.service/connectors/Bedrock.class.ts +9 -8
  46. package/src/subsystems/LLMManager/LLM.service/connectors/GoogleAI.class.ts +121 -83
  47. package/src/subsystems/LLMManager/LLM.service/connectors/Groq.class.ts +125 -62
  48. package/src/subsystems/LLMManager/LLM.service/connectors/Ollama.class.ts +168 -76
  49. package/src/subsystems/LLMManager/LLM.service/connectors/Perplexity.class.ts +18 -8
  50. package/src/subsystems/LLMManager/LLM.service/connectors/VertexAI.class.ts +8 -4
  51. package/src/subsystems/LLMManager/LLM.service/connectors/openai/OpenAIConnector.class.ts +50 -8
  52. package/src/subsystems/LLMManager/LLM.service/connectors/openai/apiInterfaces/ChatCompletionsApiInterface.ts +30 -16
  53. package/src/subsystems/LLMManager/LLM.service/connectors/openai/apiInterfaces/OpenAIApiInterface.ts +2 -2
  54. package/src/subsystems/LLMManager/LLM.service/connectors/openai/apiInterfaces/ResponsesApiInterface.ts +29 -15
  55. package/src/subsystems/LLMManager/LLM.service/connectors/xAI.class.ts +10 -8
  56. package/src/subsystems/MemoryManager/LLMContext.ts +27 -8
  57. package/src/subsystems/ObservabilityManager/Telemetry.service/connectors/OTel/OTel.class.ts +467 -120
  58. package/src/subsystems/ObservabilityManager/Telemetry.service/connectors/OTel/OTel.redaction.helper.ts +203 -0
  59. package/src/types/LLM.types.ts +31 -1
  60. package/src/types/node-sqlite.d.ts +45 -0
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@smythos/sre",
3
- "version": "1.7.41",
3
+ "version": "1.8.0",
4
4
  "description": "Smyth Runtime Environment",
5
5
  "author": "Alaa-eddine KADDOURI",
6
6
  "license": "MIT",
@@ -46,7 +46,7 @@
46
46
  "typescript": "^5.4.5"
47
47
  },
48
48
  "dependencies": {
49
- "@anthropic-ai/sdk": "^0.56.0",
49
+ "@anthropic-ai/sdk": "^0.73.0",
50
50
  "@aws-sdk/client-bedrock-runtime": "^3.826.0",
51
51
  "@aws-sdk/client-iam": "^3.835.0",
52
52
  "@aws-sdk/client-lambda": "^3.835.0",
@@ -103,7 +103,8 @@
103
103
  "winston-transport": "^4.7.0",
104
104
  "ws": "^8.18.3",
105
105
  "xxhashjs": "^0.2.2",
106
- "zip-lib": "^1.1.2"
106
+ "zip-lib": "^1.1.2",
107
+ "zod": "^4.3.6"
107
108
  },
108
109
  "repository": {
109
110
  "type": "git",
@@ -247,10 +247,15 @@ export const retrieveOAuthTokens = async (agent, config) => {
247
247
  responseData.tokenURL = tokensData.auth_settings?.tokenURL;
248
248
  responseData.clientID = tokensData.auth_settings?.clientID;
249
249
  responseData.clientSecret = tokensData.auth_settings?.clientSecret;
250
+ responseData.scope = tokensData.auth_settings?.scope;
251
+ responseData.audience = tokensData.auth_settings?.audience;
250
252
  } else {
251
253
  responseData.tokenURL = tokensData.tokenURL || tokensData.oauth_info?.tokenURL;
252
254
  responseData.clientID = tokensData.clientID || tokensData.oauth_info?.clientID;
253
255
  responseData.clientSecret = tokensData.clientSecret || tokensData.oauth_info?.clientSecret;
256
+ // Extract scope and audience from oauth_info (old structure)
257
+ responseData.scope = tokensData.scope || tokensData.oauth_info?.scope;
258
+ responseData.audience = tokensData.audience || tokensData.oauth_info?.audience;
254
259
  }
255
260
  responseData.expiresIn = expiresIn ?? 0; // Optional property, default to 0 if not present
256
261
  responseData.team = tokensData.team || agent.teamId;
@@ -355,7 +360,7 @@ async function getClientCredentialToken(tokensData, logger, keyId, oauthTokens,
355
360
  };
356
361
 
357
362
  try {
358
- const { clientID, clientSecret, tokenURL } = oauthTokens;
363
+ const { clientID, clientSecret, tokenURL, scope, audience } = oauthTokens;
359
364
  const currentTime = new Date().getTime();
360
365
  // Check for token expiration
361
366
  if (!oauthTokens.expiresIn || currentTime >= Number(oauthTokens.expiresIn)) {
@@ -370,6 +375,16 @@ async function getClientCredentialToken(tokensData, logger, keyId, oauthTokens,
370
375
  client_secret: clientSecret,
371
376
  });
372
377
 
378
+ // Add audience if provided (required by some providers like Auth0)
379
+ if (audience && typeof audience === 'string' && audience.trim()) {
380
+ params.append('audience', audience.trim());
381
+ }
382
+
383
+ // Add scope if provided (OAuth2 Client Credentials supports scopes)
384
+ if (scope && typeof scope === 'string' && scope.trim()) {
385
+ params.append('scope', scope.trim());
386
+ }
387
+
373
388
  const response = await axios.post(tokenURL, params.toString(), {
374
389
  headers: { 'Content-Type': 'application/x-www-form-urlencoded' },
375
390
  });
@@ -80,7 +80,7 @@ export class APIEndpoint extends Component {
80
80
 
81
81
  // set default value and agent variables
82
82
  const inputsWithDefaultValue = config.inputs.filter(
83
- (input) => input.defaultVal !== undefined && input.defaultVal !== '' && input.defaultVal !== null
83
+ (input) => input.defaultVal !== undefined && input.defaultVal !== '' && input.defaultVal !== null,
84
84
  );
85
85
 
86
86
  const bodyInputNames: string[] = [];
@@ -152,7 +152,14 @@ export class APIEndpoint extends Component {
152
152
  // #region log inputs
153
153
  logger.debug('Parsing inputs');
154
154
  logger.debug(' Headers', headers);
155
- logger.debug(' Body', body);
155
+ const dbgBody = {};
156
+ for (let key in body) {
157
+ const entry = body[key];
158
+ if (entry instanceof BinaryInput) dbgBody[key] = `BinaryInput<...>`;
159
+ else dbgBody[key] = entry;
160
+ }
161
+
162
+ logger.debug(' Body', dbgBody);
156
163
  logger.debug(' Params', params);
157
164
  logger.debug(' Query', query);
158
165
  // #endregion log inputs
@@ -172,7 +179,7 @@ export class APIEndpoint extends Component {
172
179
  }
173
180
  }
174
181
  }
175
- logger.debug('Parsed body json input', body);
182
+ logger.debug('Parsed body json input');
176
183
 
177
184
  logger.debug('Parsing query json input');
178
185
  for (let key in query) {
@@ -219,7 +226,7 @@ export class APIEndpoint extends Component {
219
226
  return await binaryInput.getJsonData(AccessCandidate.agent(agent.id));
220
227
  }
221
228
  return null;
222
- })
229
+ }),
223
230
  );
224
231
 
225
232
  // Filter out null values and handle single/multiple results
@@ -4,6 +4,17 @@ import { Component } from './Component.class';
4
4
  import Joi from 'joi';
5
5
  import { delay } from '../utils';
6
6
 
7
+ interface AsyncResult {
8
+ JobID: string;
9
+ _debug?: string;
10
+ [key: string]: any; // Allow additional properties from input in forked mode
11
+ }
12
+
13
+ interface AsyncErrorResult {
14
+ _error: string;
15
+ _debug: string;
16
+ }
17
+
7
18
  export class Async extends Component {
8
19
  static JOBS = {};
9
20
  protected configSchema = null;
@@ -18,6 +29,9 @@ export class Async extends Component {
18
29
  async process(input, config, agent: Agent) {
19
30
  await super.process(input, config, agent);
20
31
  const logger = this.createComponentLogger(agent, config);
32
+
33
+ logger.debug(`=== Async Log ===`);
34
+
21
35
  //we set data.forked to true in the forked component in order to refork it again.
22
36
  const forked = config.data.forked;
23
37
  let _error = null;
@@ -29,6 +43,8 @@ export class Async extends Component {
29
43
  const forkedAgent: ForkedAgent = new ForkedAgent(agent, config.id);
30
44
  const JobID = forkedAgent.jobID;
31
45
 
46
+ logger.debug(` JobID: ${JobID}`);
47
+
32
48
  forkedAgent.agent.async = true;
33
49
  forkedAgent.agent.jobID = JobID;
34
50
  //clean JobID branch
@@ -71,21 +87,38 @@ export class Async extends Component {
71
87
  }
72
88
  });
73
89
 
74
- return { JobID };
90
+ logger.debug(''); // empty line
91
+ const result: AsyncResult = {
92
+ JobID,
93
+ _debug: logger.output,
94
+ };
95
+ logger.debug(` Result: \n${JSON.stringify(result, null, 2)}`);
96
+ return result;
75
97
  } else {
76
98
  //const Input = input.Input;
77
- let result = { JobID: agent.jobID };
99
+ const result: AsyncResult = {
100
+ JobID: agent.jobID,
101
+ };
78
102
  for (let key in input) {
79
103
  result[key] = input[key];
80
104
  }
81
105
 
106
+ logger.debug(` JobID: ${agent.jobID}`);
107
+
108
+ logger.debug(''); // empty line
109
+ result._debug = logger.output;
110
+ logger.debug(` Result: \n${JSON.stringify(result, null, 2)}`);
82
111
  return result;
83
112
  }
84
113
  } catch (error: any) {
85
- _error = error;
114
+ _error = error?.message || error?.toString() || error;
115
+ logger.error(` Error: ${_error}`);
116
+ const errorResult: AsyncErrorResult = {
117
+ _error,
118
+ _debug: logger.output,
119
+ };
120
+ return errorResult;
86
121
  }
87
-
88
- return {};
89
122
  }
90
123
 
91
124
  // private recursiveTagAsyncComponents(component, agent: Agent) {
@@ -247,7 +247,7 @@ export class GenAILLM extends Component {
247
247
  },
248
248
  reasoningEffort: {
249
249
  type: 'string',
250
- valid: ['none', 'default', 'low', 'medium', 'high', 'xhigh'],
250
+ valid: ['none', 'default', 'low', 'medium', 'high', 'xhigh', 'max'],
251
251
  description: 'Controls the level of effort the model will put into reasoning',
252
252
  label: 'Reasoning Effort',
253
253
  },
@@ -278,10 +278,10 @@ export class GenAILLM extends Component {
278
278
  protected configSchema = Joi.object({
279
279
  model: Joi.string().max(200).required(),
280
280
  prompt: Joi.string().required().max(8_000_000).label('Prompt'), // 2M tokens is around 8M characters
281
- temperature: Joi.number().min(0).max(5).label('Temperature'), // max temperature is 2 for OpenAI and togetherAI but 5 for cohere
281
+ temperature: Joi.number().min(-0.01).max(5).label('Temperature'), // min is -0.01 to represent "not set" for Anthropic (only one of Temperature/Top P can be used). Max is 2 for OpenAI/TogetherAI, 5 for Cohere
282
282
  maxTokens: Joi.number().min(1).label('Maximum Tokens'),
283
283
  stopSequences: Joi.string().allow('').max(400).label('Stop Sequences'),
284
- topP: Joi.number().min(0).max(1).label('Top P'),
284
+ topP: Joi.number().min(-0.01).max(1).label('Top P'), // min is -0.01 to represent "not set" for Anthropic (only one of Temperature/Top P can be used)
285
285
  topK: Joi.number().min(0).max(500).label('Top K'), // max top_k is 100 for togetherAI but 500 for cohere
286
286
  frequencyPenalty: Joi.number().min(0).max(2).label('Frequency Penalty'),
287
287
  presencePenalty: Joi.number().min(0).max(2).label('Presence Penalty'),
@@ -306,7 +306,12 @@ export class GenAILLM extends Component {
306
306
  searchMode: Joi.string().valid('auto', 'on', 'off').optional().allow('').label('Search Mode'),
307
307
  returnCitations: Joi.boolean().optional().allow('').label('Return Citations'),
308
308
  maxSearchResults: Joi.number().min(1).max(100).optional().allow('').label('Max Search Results'),
309
- searchDataSources: Joi.array().items(Joi.string().valid('web', 'x', 'news', 'rss')).max(4).optional().allow('').label('Search Data Sources'),
309
+ searchDataSources: Joi.array()
310
+ .items(Joi.string().valid('web', 'x', 'news', 'rss'))
311
+ .max(4)
312
+ .optional()
313
+ .allow('')
314
+ .label('Search Data Sources'),
310
315
  searchCountry: Joi.string().max(255).optional().allow('').label('Search Country'),
311
316
  excludedWebsites: Joi.string().max(10000).optional().allow('').label('Excluded Websites'),
312
317
  allowedWebsites: Joi.string().max(10000).optional().allow('').label('Allowed Websites'),
@@ -331,7 +336,7 @@ export class GenAILLM extends Component {
331
336
  // #region Reasoning
332
337
  useReasoning: Joi.boolean().optional().label('Use Reasoning'),
333
338
  reasoningEffort: Joi.string()
334
- .valid('none', 'default', 'minimal', 'low', 'medium', 'high', 'xhigh')
339
+ .valid('none', 'default', 'minimal', 'low', 'medium', 'high', 'xhigh', 'max')
335
340
  .optional()
336
341
  .allow('')
337
342
  .label('Reasoning Effort'),
@@ -355,6 +360,7 @@ export class GenAILLM extends Component {
355
360
  // Resolve template variables in config.data without mutating original config
356
361
  const resolvedConfigData = {
357
362
  ...config.data,
363
+ outputs: config.outputs,
358
364
  prompt: config.data.prompt && TemplateString(config.data.prompt).parse(input).result,
359
365
  webSearchCity: config.data.webSearchCity && TemplateString(config.data.webSearchCity).parse(input).result,
360
366
  webSearchCountry: config.data.webSearchCountry && TemplateString(config.data.webSearchCountry).parse(input).result,
@@ -412,7 +418,7 @@ export class GenAILLM extends Component {
412
418
  }
413
419
 
414
420
  return features?.includes(requestFeature) ? file : null;
415
- })
421
+ }),
416
422
  );
417
423
 
418
424
  files = validFiles.filter(Boolean);
@@ -420,7 +426,6 @@ export class GenAILLM extends Component {
420
426
  if (files.length === 0) {
421
427
  // No valid files after filtering - determine the cause
422
428
  const hasDetectedMimeTypes = fileTypes.size > 0;
423
-
424
429
  if (!hasDetectedMimeTypes) {
425
430
  // Case 1: No mime types detected - files are corrupted/invalid
426
431
  return {
@@ -450,6 +455,7 @@ export class GenAILLM extends Component {
450
455
  }
451
456
 
452
457
  // default to json response format
458
+ // Having 'responseFormat' will be deprecated after structured output is implemented for all LLMs
453
459
  const hasCustomOutputs = config?.outputs?.some((output) => !output.default);
454
460
  resolvedConfigData.responseFormat = resolvedConfigData?.responseFormat || (hasCustomOutputs ? 'json' : '');
455
461
 
@@ -137,7 +137,9 @@ export class LLMAssistant extends Component {
137
137
 
138
138
  const messages: any[] = await readMessagesFromSession(agent.id, userId, conversationId, Math.round(maxTokens / 2));
139
139
 
140
- messages.push({ role: TLLMMessageRole.User, content: userInput });
140
+ const prompt = llmInference.connector.enhancePrompt(userInput, config);
141
+
142
+ messages.push({ role: TLLMMessageRole.User, content: prompt });
141
143
 
142
144
  if (messages[0]?.role != TLLMMessageRole.System) {
143
145
  messages.unshift({ role: TLLMMessageRole.System, content: behavior });
@@ -11,7 +11,14 @@ export class LogicAND extends Component {
11
11
  const logger = this.createComponentLogger(agent, config);
12
12
  const result: any = { Output: true };
13
13
 
14
+ logger.debug(`=== LogicAND Log ===`);
15
+
16
+ logger.debug(' Input:');
17
+
14
18
  for (let cfgInput of config.inputs) {
19
+
20
+ logger.debug(`${cfgInput.name}: ${input?.[cfgInput.name]}`);
21
+
15
22
  // check if all inputs are set (expected inputs are in "config.inputs" actual inputs are in "input")
16
23
  if (!input[cfgInput.name]) {
17
24
  result.Output = undefined;
@@ -23,6 +30,12 @@ export class LogicAND extends Component {
23
30
  result.Unverified = !result.Verified;
24
31
  if (!result.Verified) delete result.Verified;
25
32
  if (!result.Unverified) delete result.Unverified;
33
+
34
+ logger.debug('') // empty line
35
+ logger.debug(` Result: \n${JSON.stringify(result, null, 2)}`);
36
+
37
+ result._debug = logger.output;
38
+
26
39
  return result;
27
40
  }
28
41
  }
@@ -21,12 +21,25 @@ export class LogicAtLeast extends Component {
21
21
  const logger = this.createComponentLogger(agent, config);
22
22
  const result: any = { Output: undefined };
23
23
 
24
+ logger.debug(`=== LogicAtLeast Log ===`);
25
+ logger.debug(' Input:');
26
+
27
+ for (let cfgInput of config.inputs) {
28
+ logger.debug(`${cfgInput.name}: ${input?.[cfgInput.name]}`);
29
+ }
30
+
24
31
  if (config.data.minSetInputs === '' || isNaN(Number(config.data.minSetInputs))) {
32
+ logger.debug(''); // empty line
33
+ logger.debug(` Result: \n${JSON.stringify(result, null, 2)}`);
34
+ result._debug = logger.output;
25
35
  return result;
26
36
  }
27
37
 
28
38
  const minSetInputs = Number(config.data.minSetInputs);
29
39
  if (config.inputs.length < minSetInputs) {
40
+ logger.debug(''); // empty line
41
+ logger.debug(` Result: \n${JSON.stringify(result, null, 2)}`);
42
+ result._debug = logger.output;
30
43
  return result;
31
44
  }
32
45
 
@@ -46,6 +59,11 @@ export class LogicAtLeast extends Component {
46
59
  if (!result.Verified) delete result.Verified;
47
60
  if (!result.Unverified) delete result.Unverified;
48
61
 
62
+ logger.debug(''); // empty line
63
+ logger.debug(` Result: \n${JSON.stringify(result, null, 2)}`);
64
+
65
+ result._debug = logger.output;
66
+
49
67
  return result;
50
68
  }
51
69
  }
@@ -18,14 +18,28 @@ export class LogicAtMost extends Component {
18
18
 
19
19
  async process(input, config, agent: Agent) {
20
20
  await super.process(input, config, agent);
21
+ const logger = this.createComponentLogger(agent, config);
21
22
  const result: any = { Output: undefined };
22
23
 
24
+ logger.debug(`=== LogicAtMost Log ===`);
25
+ logger.debug(' Input:');
26
+
27
+ for (let cfgInput of config.inputs) {
28
+ logger.debug(`${cfgInput.name}: ${input?.[cfgInput.name]}`);
29
+ }
30
+
23
31
  if (config.data.maxSetInputs === '' || isNaN(Number(config.data.maxSetInputs))) {
32
+ logger.debug(''); // empty line
33
+ logger.debug(` Result: \n${JSON.stringify(result, null, 2)}`);
34
+ result._debug = logger.output;
24
35
  return result;
25
36
  }
26
37
 
27
38
  const maxSetInputs = Number(config.data.maxSetInputs);
28
39
  if (config.inputs.length < maxSetInputs) {
40
+ logger.debug(''); // empty line
41
+ logger.debug(` Result: \n${JSON.stringify(result, null, 2)}`);
42
+ result._debug = logger.output;
29
43
  return result;
30
44
  }
31
45
 
@@ -48,6 +62,11 @@ export class LogicAtMost extends Component {
48
62
  if (!result.Verified) delete result.Verified;
49
63
  if (!result.Unverified) delete result.Unverified;
50
64
 
65
+ logger.debug(''); // empty line
66
+ logger.debug(` Result: \n${JSON.stringify(result, null, 2)}`);
67
+
68
+ result._debug = logger.output;
69
+
51
70
  return result;
52
71
  }
53
72
  }
@@ -8,10 +8,15 @@ export class LogicOR extends Component {
8
8
  init() {}
9
9
  async process(input, config, agent: Agent) {
10
10
  await super.process(input, config, agent);
11
+ const logger = this.createComponentLogger(agent, config);
11
12
  const result: any = { Output: undefined };
12
- console.log(input);
13
- console.log(config);
13
+
14
+ logger.debug(`=== LogicOR Log ===`);
15
+ logger.debug(' Input:');
16
+
14
17
  for (let cfgInput of config.inputs) {
18
+ logger.debug(`${cfgInput.name}: ${input?.[cfgInput.name]}`);
19
+
15
20
  // check if one of the inputs are set (expected inputs are in "config.inputs" actual inputs are in "input")
16
21
  if (input[cfgInput.name]) {
17
22
  result.Output = true;
@@ -24,6 +29,11 @@ export class LogicOR extends Component {
24
29
  if (!result.Verified) delete result.Verified;
25
30
  if (!result.Unverified) delete result.Unverified;
26
31
 
32
+ logger.debug(''); // empty line
33
+ logger.debug(` Result: \n${JSON.stringify(result, null, 2)}`);
34
+
35
+ result._debug = logger.output;
36
+
27
37
  return result;
28
38
  }
29
39
  }
@@ -10,10 +10,16 @@ export class LogicXOR extends Component {
10
10
 
11
11
  async process(input, config, agent: Agent) {
12
12
  await super.process(input, config, agent);
13
+ const logger = this.createComponentLogger(agent, config);
13
14
  const result: any = { Output: undefined };
14
15
  let trueCount = 0;
15
16
 
17
+ logger.debug(`=== LogicXOR Log ===`);
18
+ logger.debug(' Input:');
19
+
16
20
  for (let cfgInput of config.inputs) {
21
+ logger.debug(`${cfgInput.name}: ${input?.[cfgInput.name]}`);
22
+
17
23
  // counts the number of set inputs
18
24
  if (input[cfgInput.name]) {
19
25
  trueCount++;
@@ -29,6 +35,11 @@ export class LogicXOR extends Component {
29
35
  if (!result.Verified) delete result.Verified;
30
36
  if (!result.Unverified) delete result.Unverified;
31
37
 
38
+ logger.debug(''); // empty line
39
+ logger.debug(` Result: \n${JSON.stringify(result, null, 2)}`);
40
+
41
+ result._debug = logger.output;
42
+
32
43
  return result;
33
44
  }
34
45
  }
package/src/constants.ts CHANGED
@@ -66,7 +66,7 @@ export const SUPPORTED_MIME_TYPES_MAP = {
66
66
  OpenAI: {
67
67
  image: ['image/png', 'image/jpeg', 'image/jpg', 'image/webp', 'image/gif'],
68
68
  imageGen: ['image/png', 'image/jpeg', 'image/jpg', 'image/webp'],
69
- document: ['application/pdf'],
69
+ document: ['application/pdf', 'text/plain'],
70
70
  },
71
71
  TogetherAI: {
72
72
  image: ['image/png', 'image/jpeg', 'image/jpg', 'image/webp', 'image/gif'], // Same as OpenAI
@@ -320,7 +320,7 @@ export class Conversation extends EventEmitter {
320
320
  // });
321
321
  /* ==================== STEP ENTRY ==================== */
322
322
 
323
- if (message) this._context.addUserMessage(message, message_id);
323
+ if (message) await this._context.addUserMessage(message, message_id);
324
324
 
325
325
  const contextWindow = await this._context.getContextWindow(this._maxContextSize, this._maxOutputTokens);
326
326
 
@@ -448,7 +448,7 @@ export class Conversation extends EventEmitter {
448
448
  // Already at limit, don't execute any tools from this batch - all will be pending
449
449
  const pendingToolNames = toolsData.map((t: ToolData) => t.name).join(', ');
450
450
  const systemInstruction = `You have reached the maximum number of tool calls (${this._maxToolCallsPerSession}). The following tools were requested but marked as "pending": ${pendingToolNames}. Please provide a helpful response based on the information you've gathered so far. You may acknowledge these pending tools and suggest the user can continue in a follow-up request.`;
451
- this._context.addUserMessage(systemInstruction, message_id, { internal: true });
451
+ await this._context.addUserMessage(systemInstruction, message_id, { internal: true });
452
452
  this.emit(TLLMEvent.Interrupted, 'max_tool_calls', { requestId: llmReqUid });
453
453
  this._disableToolsForNextCall = true;
454
454
 
@@ -609,13 +609,13 @@ export class Conversation extends EventEmitter {
609
609
  //if (!passThroughContent) {
610
610
 
611
611
  if (!passThroughContent) {
612
- this._context.addToolMessage(llmMessage, allToolsData, message_id);
612
+ await this._context.addToolMessage(llmMessage, allToolsData, message_id);
613
613
  //delete toolHeaders['x-passthrough'];
614
614
  } else {
615
615
  //this._context.addAssistantMessage(passThroughContent, message_id);
616
616
 
617
617
  //llmMessage.content += '\n' + passThroughContent;
618
- this._context.addToolMessage(llmMessage, allToolsData, message_id, { passThrough: true });
618
+ await this._context.addToolMessage(llmMessage, allToolsData, message_id, { passThrough: true });
619
619
 
620
620
  //this._context.addAssistantMessage(passThroughContent, message_id, { passthrough: true });
621
621
  //this should not be stored in the persistent conversation store
@@ -637,7 +637,7 @@ export class Conversation extends EventEmitter {
637
637
  // If no pending tools, LLM completed naturally - don't confuse it with limit messages
638
638
  const systemInstruction = `You have reached the maximum number of tool calls (${this._maxToolCallsPerSession}) for this request. Some tools are marked as "pending" and were not executed. Please provide a helpful response based on the information you've gathered so far. You may acknowledge these pending tools and suggest the user can continue in a follow-up request.`;
639
639
 
640
- this._context.addUserMessage(systemInstruction, message_id, { internal: true });
640
+ await this._context.addUserMessage(systemInstruction, message_id, { internal: true });
641
641
  this.emit(TLLMEvent.Interrupted, 'max_tool_calls', { requestId: llmReqUid });
642
642
  }
643
643
  }
@@ -670,7 +670,7 @@ export class Conversation extends EventEmitter {
670
670
  if (lastMessage?.content?.includes(passThroughtContinueMessage) && lastMessage?.__smyth_data__?.internal) {
671
671
  metadata = { internal: true };
672
672
  }
673
- this._context.addAssistantMessage(_content, message_id, metadata);
673
+ await this._context.addAssistantMessage(_content, message_id, metadata);
674
674
  resolve(''); //the content were already emitted through 'content' event
675
675
  }
676
676
  });
@@ -678,7 +678,6 @@ export class Conversation extends EventEmitter {
678
678
 
679
679
  const toolsContent = await toolsPromise.catch((error) => {
680
680
  console.error('Error in toolsPromise: ', error);
681
- //this.emit('error', error);
682
681
  this.emit(TLLMEvent.Error, error, { requestId: llmReqUid });
683
682
  return '';
684
683
  });
@@ -1033,7 +1032,10 @@ export class Conversation extends EventEmitter {
1033
1032
  });
1034
1033
 
1035
1034
  let messages = [];
1036
- if (this._context) messages = this._context.messages; // preserve messages
1035
+ if (this._context) {
1036
+ await this._context.ready();
1037
+ messages = this._context.messages; // preserve messages
1038
+ }
1037
1039
 
1038
1040
  this._context = new LLMContext(this.llmInference, this.systemPrompt, this._llmContextStore);
1039
1041
  } else {
package/src/index.ts CHANGED
@@ -167,6 +167,7 @@ export * from './subsystems/Security/Vault.service/VaultConnector';
167
167
  export * from './subsystems/AgentManager/AgentData.service/connectors/CLIAgentDataConnector.class';
168
168
  export * from './subsystems/AgentManager/AgentData.service/connectors/LocalAgentDataConnector.class';
169
169
  export * from './subsystems/AgentManager/AgentData.service/connectors/NullAgentData.class';
170
+ export * from './subsystems/AgentManager/AgentData.service/connectors/SQLiteAgentDataConnector.class';
170
171
  export * from './subsystems/AgentManager/Component.service/connectors/LocalComponentConnector.class';
171
172
  export * from './subsystems/AgentManager/Scheduler.service/connectors/LocalScheduler.class';
172
173
  export * from './subsystems/ComputeManager/Code.service/connectors/AWSLambdaCode.class';
@@ -211,6 +212,7 @@ export * from './subsystems/Security/Vault.service/connectors/SecretsManager.cla
211
212
  export * from './subsystems/LLMManager/LLM.service/connectors/openai/OpenAIConnector.class';
212
213
  export * from './subsystems/LLMManager/LLM.service/connectors/openai/types';
213
214
  export * from './subsystems/ObservabilityManager/Telemetry.service/connectors/OTel/OTel.class';
215
+ export * from './subsystems/ObservabilityManager/Telemetry.service/connectors/OTel/OTel.redaction.helper';
214
216
  export * from './subsystems/ObservabilityManager/Telemetry.service/connectors/OTel/OTelContextRegistry';
215
217
  export * from './subsystems/LLMManager/LLM.service/connectors/openai/apiInterfaces/constants';
216
218
  export * from './subsystems/LLMManager/LLM.service/connectors/openai/apiInterfaces/utils';
package/src/index.ts.bak CHANGED
@@ -167,6 +167,7 @@ export * from './subsystems/Security/Vault.service/VaultConnector';
167
167
  export * from './subsystems/AgentManager/AgentData.service/connectors/CLIAgentDataConnector.class';
168
168
  export * from './subsystems/AgentManager/AgentData.service/connectors/LocalAgentDataConnector.class';
169
169
  export * from './subsystems/AgentManager/AgentData.service/connectors/NullAgentData.class';
170
+ export * from './subsystems/AgentManager/AgentData.service/connectors/SQLiteAgentDataConnector.class';
170
171
  export * from './subsystems/AgentManager/Component.service/connectors/LocalComponentConnector.class';
171
172
  export * from './subsystems/AgentManager/Scheduler.service/connectors/LocalScheduler.class';
172
173
  export * from './subsystems/ComputeManager/Code.service/connectors/AWSLambdaCode.class';
@@ -211,6 +212,7 @@ export * from './subsystems/Security/Vault.service/connectors/SecretsManager.cla
211
212
  export * from './subsystems/LLMManager/LLM.service/connectors/openai/OpenAIConnector.class';
212
213
  export * from './subsystems/LLMManager/LLM.service/connectors/openai/types';
213
214
  export * from './subsystems/ObservabilityManager/Telemetry.service/connectors/OTel/OTel.class';
215
+ export * from './subsystems/ObservabilityManager/Telemetry.service/connectors/OTel/OTel.redaction.helper';
214
216
  export * from './subsystems/ObservabilityManager/Telemetry.service/connectors/OTel/OTelContextRegistry';
215
217
  export * from './subsystems/LLMManager/LLM.service/connectors/openai/apiInterfaces/constants';
216
218
  export * from './subsystems/LLMManager/LLM.service/connectors/openai/apiInterfaces/utils';