@cognigy/rest-api-client 0.18.0 → 0.20.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (216) hide show
  1. package/CHANGELOG.md +6 -0
  2. package/build/RestAPIClient.js +7 -0
  3. package/build/apigroups/AdministrationAPIGroup_2_0.js +8 -1
  4. package/build/apigroups/JWTAuthAPIGroup_2_0.js +12 -0
  5. package/build/apigroups/MetricsAPIGroup_2_0.js +5 -0
  6. package/build/apigroups/ResourcesAPIGroup_2_0.js +28 -14
  7. package/build/apigroups/index.js +3 -1
  8. package/build/shared/charts/descriptors/agentAssist/helpers/determineMetadata.js +15 -0
  9. package/build/shared/charts/descriptors/agentAssist/helpers/knowledgeSearch/answerExtraction.helper.js +1 -1
  10. package/build/shared/charts/descriptors/agentAssist/helpers/knowledgeSearch/followUpDetection.helper.js +2 -2
  11. package/build/shared/charts/descriptors/agentAssist/identityAssist.js +1 -1
  12. package/build/shared/charts/descriptors/agentAssist/index.js +3 -1
  13. package/build/shared/charts/descriptors/agentAssist/knowledgeAssist.js +1 -1
  14. package/build/shared/charts/descriptors/agentAssist/nextActionAssist.js +4 -5
  15. package/build/shared/charts/descriptors/agentAssist/sendData.js +74 -0
  16. package/build/shared/charts/descriptors/agentAssist/sentimentAssist.js +1 -1
  17. package/build/shared/charts/descriptors/agentAssist/setAdaptiveCardTile.js +2 -0
  18. package/build/shared/charts/descriptors/agentAssist/setAgentAssistGrid.js +2 -1
  19. package/build/shared/charts/descriptors/agentAssist/setHtmlTile.js +5 -3
  20. package/build/shared/charts/descriptors/agentAssist/setIframeTile.js +5 -3
  21. package/build/shared/charts/descriptors/agentAssist/setSecureFormsTile.js +2 -2
  22. package/build/shared/charts/descriptors/agentAssist/transcriptAssist.js +2 -1
  23. package/build/shared/charts/descriptors/analytics/activateProfile.js +1 -0
  24. package/build/shared/charts/descriptors/analytics/addMemory.js +51 -0
  25. package/build/shared/charts/descriptors/analytics/blindMode.js +2 -0
  26. package/build/shared/charts/descriptors/analytics/completeGoal.js +4 -2
  27. package/build/shared/charts/descriptors/analytics/deactivateProfile.js +1 -0
  28. package/build/shared/charts/descriptors/analytics/deleteProfile.js +1 -0
  29. package/build/shared/charts/descriptors/analytics/helper.js +20 -0
  30. package/build/shared/charts/descriptors/analytics/index.js +5 -1
  31. package/build/shared/charts/descriptors/analytics/mergeProfile.js +1 -0
  32. package/build/shared/charts/descriptors/analytics/overwriteAnalytics.js +9 -0
  33. package/build/shared/charts/descriptors/analytics/setRating.js +4 -2
  34. package/build/shared/charts/descriptors/analytics/trackGoal.js +102 -0
  35. package/build/shared/charts/descriptors/analytics/updateProfile.js +1 -0
  36. package/build/shared/charts/descriptors/apps/initAppSession.js +1 -0
  37. package/build/shared/charts/descriptors/apps/setAdaptiveCardAppState.js +35 -10
  38. package/build/shared/charts/descriptors/apps/setHtmlAppState.js +25 -2
  39. package/build/shared/charts/descriptors/apps/utils/getXAppsOverlaySettings.js +54 -0
  40. package/build/shared/charts/descriptors/connectionNodes/documentParserProviders/azureAIDocumentIntelligenceConnection.js +12 -0
  41. package/build/shared/charts/descriptors/connectionNodes/documentParserProviders/index.js +13 -0
  42. package/build/shared/charts/descriptors/connectionNodes/generativeAIProviders/awsBedrockProviderConnection.js +12 -0
  43. package/build/shared/charts/descriptors/connectionNodes/generativeAIProviders/azureOpenAIProviderConnection.js +4 -3
  44. package/build/shared/charts/descriptors/connectionNodes/generativeAIProviders/azureOpenAIProviderConnectionV2.js +3 -3
  45. package/build/shared/charts/descriptors/connectionNodes/generativeAIProviders/azureOpenAIProviderOauth2Connection.js +14 -0
  46. package/build/shared/charts/descriptors/connectionNodes/generativeAIProviders/index.js +16 -8
  47. package/build/shared/charts/descriptors/connectionNodes/generativeAIProviders/openAIProviderConnection.js +3 -3
  48. package/build/shared/charts/descriptors/data/addToContext.js +7 -0
  49. package/build/shared/charts/descriptors/data/copyDataToContext.js +3 -0
  50. package/build/shared/charts/descriptors/data/copySlotsToContext.js +4 -1
  51. package/build/shared/charts/descriptors/data/debugMessage.js +73 -0
  52. package/build/shared/charts/descriptors/data/index.js +3 -1
  53. package/build/shared/charts/descriptors/data/removeFromContext.js +9 -1
  54. package/build/shared/charts/descriptors/data/resetContext.js +1 -0
  55. package/build/shared/charts/descriptors/index.js +19 -1
  56. package/build/shared/charts/descriptors/knowledgeSearch/knowledgeSearchV2.js +1 -1
  57. package/build/shared/charts/descriptors/knowledgeSearch/searchExtractOutput.js +55 -20
  58. package/build/shared/charts/descriptors/logic/disableSlotFillers.js +1 -1
  59. package/build/shared/charts/descriptors/logic/enableSlotFillers.js +1 -1
  60. package/build/shared/charts/descriptors/logic/resetState.js +1 -0
  61. package/build/shared/charts/descriptors/logic/setState.js +2 -1
  62. package/build/shared/charts/descriptors/logic/setTranslation.js +3 -1
  63. package/build/shared/charts/descriptors/logic/switchLocale.js +1 -0
  64. package/build/shared/charts/descriptors/logic/think.js +3 -1
  65. package/build/shared/charts/descriptors/logic/thinkV2.js +113 -4
  66. package/build/shared/charts/descriptors/message/question/question.js +50 -5
  67. package/build/shared/charts/descriptors/message/question/utils/validateQuestionAnswer.js +4 -2
  68. package/build/shared/charts/descriptors/nlu/cleanText.js +2 -1
  69. package/build/shared/charts/descriptors/nlu/executeCognigyNLU.js +1 -1
  70. package/build/shared/charts/descriptors/nlu/fuzzySearch.js +24 -2
  71. package/build/shared/charts/descriptors/nlu/generativeSlotFiller/generativeSlotFiller.js +1 -1
  72. package/build/shared/charts/descriptors/nlu/generativeSlotFiller/generativeSlotFillerFallback.js +1 -1
  73. package/build/shared/charts/descriptors/nlu/generativeSlotFiller/generativeSlotFillerSuccess.js +1 -1
  74. package/build/shared/charts/descriptors/nlu/generativeSlotFiller/prompt.js +18 -9
  75. package/build/shared/charts/descriptors/nlu/matchPattern.js +1 -1
  76. package/build/shared/charts/descriptors/nlu/regexSlotFiller.js +1 -1
  77. package/build/shared/charts/descriptors/service/GPTConversation.js +1 -1
  78. package/build/shared/charts/descriptors/service/GPTPrompt.js +70 -41
  79. package/build/shared/charts/descriptors/service/LLMEntityExtract.js +12 -3
  80. package/build/shared/charts/descriptors/service/aiAgent/aiAgentHandover.js +92 -0
  81. package/build/shared/charts/descriptors/service/aiAgent/aiAgentJob.js +1146 -0
  82. package/build/shared/charts/descriptors/service/aiAgent/aiAgentJobDefault.js +31 -0
  83. package/build/shared/charts/descriptors/service/aiAgent/aiAgentJobTool.js +139 -0
  84. package/build/shared/charts/descriptors/service/aiAgent/aiAgentToolAnswer.js +120 -0
  85. package/build/shared/charts/descriptors/service/aiAgent/helper.js +222 -0
  86. package/build/shared/charts/descriptors/service/handoverV2.js +140 -2
  87. package/build/shared/charts/descriptors/service/httpRequest.js +35 -2
  88. package/build/shared/charts/descriptors/service/index.js +11 -1
  89. package/build/shared/charts/descriptors/transcripts/addTranscriptStep.js +413 -0
  90. package/build/shared/charts/descriptors/transcripts/getTranscript.js +104 -0
  91. package/build/shared/charts/descriptors/transcripts/index.js +8 -0
  92. package/build/shared/charts/descriptors/voice/mappers/base.mapper.js +20 -0
  93. package/build/shared/charts/descriptors/voice/mappers/setSessionConfig.mapper.js +227 -72
  94. package/build/shared/charts/descriptors/voice/mappers/transfer.mapper.js +6 -9
  95. package/build/shared/charts/descriptors/voice/nodes/bargeIn.js +2 -0
  96. package/build/shared/charts/descriptors/voice/nodes/continuousAsr.js +5 -4
  97. package/build/shared/charts/descriptors/voice/nodes/dtmf.js +2 -0
  98. package/build/shared/charts/descriptors/voice/nodes/muteSpeechInput.js +1 -0
  99. package/build/shared/charts/descriptors/voice/nodes/noUserInput.js +2 -0
  100. package/build/shared/charts/descriptors/voice/nodes/sessionSpeechParameters.js +2 -0
  101. package/build/shared/charts/descriptors/voice/nodes/transfer.js +2 -0
  102. package/build/shared/charts/descriptors/voicegateway/nodes/callRecording.js +8 -1
  103. package/build/shared/charts/descriptors/voicegateway/nodes/handover.js +4 -2
  104. package/build/shared/charts/descriptors/voicegateway/nodes/hangup.js +4 -2
  105. package/build/shared/charts/descriptors/voicegateway/nodes/helper/utils.js +14 -0
  106. package/build/shared/charts/descriptors/voicegateway/nodes/playURL.js +4 -5
  107. package/build/shared/charts/descriptors/voicegateway/nodes/sendMessage.js +8 -1
  108. package/build/shared/charts/descriptors/voicegateway/nodes/sendMetaData.js +7 -3
  109. package/build/shared/charts/descriptors/voicegateway/nodes/setSessionParams.js +8 -1
  110. package/build/shared/charts/descriptors/voicegateway2/nodes/dtmf.js +2 -0
  111. package/build/shared/charts/descriptors/voicegateway2/nodes/hangup.js +2 -0
  112. package/build/shared/charts/descriptors/voicegateway2/nodes/muteSpeechInput.js +3 -0
  113. package/build/shared/charts/descriptors/voicegateway2/nodes/play.js +1 -0
  114. package/build/shared/charts/descriptors/voicegateway2/nodes/record.js +1 -0
  115. package/build/shared/charts/descriptors/voicegateway2/nodes/refer.js +1 -0
  116. package/build/shared/charts/descriptors/voicegateway2/nodes/sendMetadata.js +1 -0
  117. package/build/shared/charts/descriptors/voicegateway2/nodes/setSessionConfig.js +311 -10
  118. package/build/shared/charts/descriptors/voicegateway2/nodes/transfer.js +25 -9
  119. package/build/shared/charts/descriptors/voicegateway2/utils/helper.js +2 -2
  120. package/build/shared/charts/helpers/generativeAI/rephraseSentenceWithAi.js +4 -2
  121. package/build/shared/constants.js +16 -1
  122. package/build/shared/handoverClients/interfaces/THandoverEventType.js +2 -0
  123. package/build/shared/helper/logFullConfigToDebugMode.js +30 -0
  124. package/build/shared/helper/nlu/textCleaner.js +4 -2
  125. package/build/shared/interfaces/IEndpointSettings.js +3 -0
  126. package/build/shared/interfaces/IOrganisation.js +1 -0
  127. package/build/shared/interfaces/IProfile.js +2 -0
  128. package/build/shared/interfaces/IProfileSchema.js +4 -0
  129. package/build/shared/interfaces/analytics/IAnalyticsSourceData.js +22 -20
  130. package/build/shared/interfaces/{restAPI/resources/milestone/v2.0/IMilestone_2_0.js → analytics/IGoalAnalytics.js} +1 -1
  131. package/build/shared/interfaces/appsession/ISetAppStateOptions.js +3 -0
  132. package/build/shared/interfaces/appsession/ISetAppStateOverlaySettings.js +3 -0
  133. package/build/shared/interfaces/appsession/ISetAppStateOverlaySettingsMetaData.js +3 -0
  134. package/build/shared/interfaces/debugEvents/TDebugEventMessagePayload.js +3 -0
  135. package/build/shared/interfaces/debugEvents/TDebugEventType.js +2 -0
  136. package/build/shared/interfaces/generativeAI/IGenerativeAIModels.js +51 -70
  137. package/build/shared/interfaces/{restAPI/resources/journey/v2.0/IJourney_2_0.js → generativeAI/tools.js} +1 -1
  138. package/build/shared/interfaces/handover.js +45 -2
  139. package/build/shared/interfaces/license.js +3 -2
  140. package/build/shared/interfaces/messageAPI/endpoints.js +8 -1
  141. package/build/shared/interfaces/messageAPI/handover.js +22 -1
  142. package/build/shared/interfaces/resources/IAiAgent.js +52 -0
  143. package/build/shared/interfaces/resources/IAuditEvent.js +2 -1
  144. package/build/shared/interfaces/resources/IConnection.js +1 -0
  145. package/build/shared/interfaces/resources/IEndpoint.js +2 -2
  146. package/build/shared/interfaces/resources/{IMilestone.js → IGoal.js} +15 -15
  147. package/build/shared/interfaces/resources/ILargeLanguageModel.js +52 -4
  148. package/build/shared/interfaces/resources/INodeDescriptorSet.js +15 -0
  149. package/build/shared/interfaces/resources/TResourceType.js +16 -8
  150. package/build/shared/interfaces/resources/knowledgeStore/IKnowledgeSource.js +1 -1
  151. package/build/shared/interfaces/resources/settings/IAgentSettings.js +12 -7
  152. package/build/shared/interfaces/resources/settings/IGenerativeAISettings.js +9 -1
  153. package/build/shared/interfaces/resources/settings/IKnowledgeAISettings.js +18 -0
  154. package/build/shared/interfaces/resources/settings/index.js +4 -1
  155. package/build/shared/interfaces/restAPI/administration/organisations/v2.0/IReadCollectionsToBeDeletedRest_2_0.js +0 -1
  156. package/build/shared/interfaces/restAPI/administration/organisations/v2.0/IReadOrganisationKnowledgeChunksCountRest_2_0.js +3 -0
  157. package/build/shared/interfaces/restAPI/administration/user/v2.0/IGetPinnedResources_2_0.js +3 -0
  158. package/build/shared/interfaces/restAPI/{resources/journey/v2.0/IReadJourneyRest_2_0.js → administration/user/v2.0/IPinResourceRest_2_0.js} +1 -1
  159. package/build/shared/interfaces/restAPI/operations/nlu/v2.0/IGenerateNluScoresRest_2_0.js +57 -0
  160. package/build/shared/interfaces/restAPI/resources/aiAgent/v2.0/IAiAgentHiringTemplate_2_0.js +32 -0
  161. package/build/shared/interfaces/restAPI/resources/aiAgent/v2.0/IAiAgent_2_0.js +3 -0
  162. package/build/shared/interfaces/restAPI/resources/{journey/v2.0/IIndexJourneysRest_2_0.js → aiAgent/v2.0/ICreateAiAgentRest_2_0.js} +1 -1
  163. package/build/shared/interfaces/restAPI/resources/aiAgent/v2.0/IDeleteAiAgentRest_2_0.js +3 -0
  164. package/build/shared/interfaces/restAPI/resources/aiAgent/v2.0/IGetAiAgentHiringTemplates_2_0.js +3 -0
  165. package/build/shared/interfaces/restAPI/resources/{journey/v2.0/IJourneyStep_2_0.js → aiAgent/v2.0/IHireAiAgent_2_0.js} +1 -1
  166. package/build/shared/interfaces/restAPI/resources/{journey/v2.0/IJourneyIndexItem_2_0.js → aiAgent/v2.0/IIndexAiAgentRest_2_0.js} +1 -1
  167. package/build/shared/interfaces/restAPI/resources/{journey/v2.0/IJourneyProgress_2_0.js → aiAgent/v2.0/IReadAiAgentRest_2_0.js} +1 -1
  168. package/build/shared/interfaces/restAPI/resources/aiAgent/v2.0/IUpdateAiAgentRest_2_0.js +3 -0
  169. package/build/shared/interfaces/restAPI/resources/aiAgent/v2.0/IValidateAiAgentNameRest_2_0.js +3 -0
  170. package/build/shared/interfaces/restAPI/resources/goal/v2.0/ICloneGoalRest_2_0.js +3 -0
  171. package/build/shared/interfaces/restAPI/resources/goal/v2.0/ICreateGoalRest_2_0.js +3 -0
  172. package/build/shared/interfaces/restAPI/resources/goal/v2.0/IDeleteGoalRest_2_0.js +3 -0
  173. package/build/shared/interfaces/restAPI/resources/goal/v2.0/IGoalIndexItem_2_0.js +3 -0
  174. package/build/shared/interfaces/restAPI/resources/goal/v2.0/IGoalStepMetric_2_0.js +3 -0
  175. package/build/shared/interfaces/restAPI/resources/goal/v2.0/IGoalStep_2_0.js +3 -0
  176. package/build/shared/interfaces/restAPI/resources/goal/v2.0/IGoal_2_0.js +3 -0
  177. package/build/shared/interfaces/restAPI/resources/goal/v2.0/IIndexGoalsRest_2_0.js +3 -0
  178. package/build/shared/interfaces/restAPI/resources/goal/v2.0/IReadGoalRest_2_0.js +3 -0
  179. package/build/shared/interfaces/restAPI/resources/goal/v2.0/IUpdateGoalRest_2_0.js +3 -0
  180. package/build/shared/interfaces/restAPI/resources/goal/v2.0/index.js +3 -0
  181. package/build/shared/interfaces/restAPI/resources/largeLanguageModel/v2.0/IAvailableModelsForLLMProvider_2_0 .js +18 -0
  182. package/build/shared/interfaces/restAPI/resources/largeLanguageModel/v2.0/IGetAvailableModelsForLLMRest_2_0 .js +3 -0
  183. package/build/shared/interfaces/security/IPermission.js +6 -2
  184. package/build/shared/interfaces/security/IPinnedResource.js +3 -0
  185. package/build/shared/interfaces/security/IRole.js +2 -0
  186. package/build/shared/interfaces/security/ISystemCapabilities.js +3 -0
  187. package/build/shared/interfaces/security/index.js +1 -1
  188. package/build/shared/interfaces/trainer/ITrainerRecord.js +2 -2
  189. package/build/shared/interfaces/transcripts/transcripts.js +33 -0
  190. package/build/shared/interfaces/user.js +1 -1
  191. package/package.json +2 -2
  192. package/types/index.d.ts +2332 -1041
  193. package/build/shared/interfaces/journeys/IJourney.js +0 -83
  194. package/build/shared/interfaces/journeys/IJourneyProgress.js +0 -40
  195. package/build/shared/interfaces/journeys/IJourneyTrackEvent.js +0 -35
  196. package/build/shared/interfaces/journeys/index.js +0 -14
  197. package/build/shared/interfaces/license.js.map +0 -1
  198. package/build/shared/interfaces/restAPI/resources/journey/v2.0/IJourneyTrackEvents_2_0.js +0 -3
  199. package/build/shared/interfaces/restAPI/resources/journey/v2.0/IReadJourneyProgressRest_2_0.js +0 -3
  200. package/build/shared/interfaces/restAPI/resources/journey/v2.0/ITrackJourneyEventRest_2_0.js +0 -3
  201. package/build/shared/interfaces/restAPI/resources/journey/v2.0/IUpdateSelectedJourneyRest_2_0.js +0 -3
  202. package/build/shared/interfaces/restAPI/resources/knowledgeSearchIndex/v2.0/ICreateKnowledgeSearchIndexRest_2_0.js +0 -3
  203. package/build/shared/interfaces/restAPI/resources/knowledgeSearchIndex/v2.0/IDeleteKnowledgeSearchIndexRest_2_0.js +0 -3
  204. package/build/shared/interfaces/restAPI/resources/milestone/v2.0/ICloneMilestoneRest_2_0.js +0 -3
  205. package/build/shared/interfaces/restAPI/resources/milestone/v2.0/ICreateMilestoneRest_2_0.js +0 -3
  206. package/build/shared/interfaces/restAPI/resources/milestone/v2.0/IDeleteMilestoneRest_2_0.js +0 -3
  207. package/build/shared/interfaces/restAPI/resources/milestone/v2.0/IIndexMilestonesRest_2_0.js +0 -3
  208. package/build/shared/interfaces/restAPI/resources/milestone/v2.0/IMilestoneIndexItem_2_0.js +0 -3
  209. package/build/shared/interfaces/restAPI/resources/milestone/v2.0/IMilestoneStepMetric_2_0.js +0 -3
  210. package/build/shared/interfaces/restAPI/resources/milestone/v2.0/IMilestoneStep_2_0.js +0 -3
  211. package/build/shared/interfaces/restAPI/resources/milestone/v2.0/IReadMilestoneRest_2_0.js +0 -3
  212. package/build/shared/interfaces/restAPI/resources/milestone/v2.0/IUpdateMilestoneRest_2_0.js +0 -3
  213. package/build/test.js +0 -27
  214. /package/build/shared/interfaces/restAPI/{resources/journey/v2.0 → operations}/index.js +0 -0
  215. /package/build/shared/interfaces/restAPI/{resources/knowledgeSearchIndex → operations/nlu}/v2.0/index.js +0 -0
  216. /package/build/shared/interfaces/restAPI/resources/{milestone → aiAgent}/v2.0/index.js +0 -0
@@ -28,9 +28,9 @@ exports.GPT_PROMPT = (0, createNodeDescriptor_1.createNodeDescriptor)({
28
28
  params: {
29
29
  multiline: true,
30
30
  rows: 5,
31
- required: true
31
+ required: false
32
32
  },
33
- defaultValue: ""
33
+ defaultValue: undefined
34
34
  },
35
35
  {
36
36
  key: "chatTranscriptSteps",
@@ -112,7 +112,7 @@ exports.GPT_PROMPT = (0, createNodeDescriptor_1.createNodeDescriptor)({
112
112
  label: "UI__NODE_EDITOR__SERVICE__GPT_PROMPT__FIELDS__MAX_TOKENS__LABEL",
113
113
  type: "slider",
114
114
  description: "UI__NODE_EDITOR__SERVICE__GPT_PROMPT__FIELDS__MAX_TOKENS__DESCRIPTION",
115
- defaultValue: 100,
115
+ defaultValue: 1000,
116
116
  params: {
117
117
  min: 1,
118
118
  max: 4000,
@@ -195,7 +195,7 @@ exports.GPT_PROMPT = (0, createNodeDescriptor_1.createNodeDescriptor)({
195
195
  key: "immediateOutput",
196
196
  label: "UI__NODE_EDITOR__SERVICE__GPT_PROMPT__FIELDS__IMMEDIATEOUTPUT__LABEL",
197
197
  type: "toggle",
198
- defaultValue: false,
198
+ defaultValue: true,
199
199
  condition: {
200
200
  or: [
201
201
  {
@@ -250,7 +250,18 @@ exports.GPT_PROMPT = (0, createNodeDescriptor_1.createNodeDescriptor)({
250
250
  type: "textArray",
251
251
  label: "UI__NODE_EDITOR__SERVICE__GPT_PROMPT__FIELDS__STREAM_STOP_TOKENS__LABEL",
252
252
  description: "UI__NODE_EDITOR__SERVICE__GPT_PROMPT__FIELDS__STREAM_STOP_TOKENS__DESCRIPTION",
253
- defaultValue: [".", "!", "?"],
253
+ defaultValue: [".", "!", "?", "\\n"],
254
+ condition: {
255
+ key: "storeLocation",
256
+ value: "stream",
257
+ }
258
+ },
259
+ {
260
+ key: "streamStopTokenOverrides",
261
+ type: "textArray",
262
+ label: "UI__NODE_EDITOR__SERVICE__GPT_PROMPT__FIELDS__STREAM_STOP_TOKEN_OVERRIDES__LABEL",
263
+ description: "UI__NODE_EDITOR__SERVICE__GPT_PROMPT__FIELDS__STREAM_STOP_TOKEN_OVERRIDES__DESCRIPTION",
264
+ defaultValue: ["\d+\."],
254
265
  condition: {
255
266
  key: "storeLocation",
256
267
  value: "stream",
@@ -396,6 +407,13 @@ exports.GPT_PROMPT = (0, createNodeDescriptor_1.createNodeDescriptor)({
396
407
  type: "json",
397
408
  defaultValue: {}
398
409
  },
410
+ {
411
+ key: "logErrorToSystem",
412
+ label: "UI__NODE_EDITOR__SERVICE__GPT_PROMPT__FIELDS__LOG_ERROR_TO_SYSTEM__LABEL",
413
+ description: "UI__NODE_EDITOR__SERVICE__GPT_PROMPT__FIELDS__LOG_ERROR_TO_SYSTEM__DESCRIPTION",
414
+ type: "toggle",
415
+ defaultValue: false,
416
+ },
399
417
  {
400
418
  key: "errorHandling",
401
419
  type: "select",
@@ -473,6 +491,7 @@ exports.GPT_PROMPT = (0, createNodeDescriptor_1.createNodeDescriptor)({
473
491
  "contextKey",
474
492
  "immediateOutput",
475
493
  "streamStopTokens",
494
+ "streamStopTokenOverrides",
476
495
  "streamStoreCopyInInput",
477
496
  "detailedResults"
478
497
  ]
@@ -482,28 +501,29 @@ exports.GPT_PROMPT = (0, createNodeDescriptor_1.createNodeDescriptor)({
482
501
  label: "UI__NODE_EDITOR__SERVICE__GPT_PROMPT__SECTIONS__ERROR_HANDLING__LABEL",
483
502
  defaultCollapsed: true,
484
503
  fields: [
504
+ "logErrorToSystem",
485
505
  "errorHandling",
486
506
  "errorMessage",
487
507
  "errorHandlingGotoTarget",
488
508
  ]
489
509
  },
490
510
  {
491
- key: "debugging",
492
- label: "UI__NODE_EDITOR__SERVICE__GPT_PROMPT__SECTIONS__DEBUGGING__LABEL",
511
+ key: "customOptions",
512
+ label: "UI__NODE_EDITOR__SERVICE__GPT_PROMPT__SECTIONS__CUSTOM_OPTIONS__LABEL",
493
513
  defaultCollapsed: true,
494
514
  fields: [
495
- "debugDescription",
496
- "debugLogTokenCount",
497
- "debugLogRequestAndCompletion"
515
+ "customModelOptions",
516
+ "customRequestOptions"
498
517
  ]
499
518
  },
500
519
  {
501
- key: "customOptions",
502
- label: "UI__NODE_EDITOR__SERVICE__GPT_PROMPT__SECTIONS__CUSTOM_OPTIONS__LABEL",
520
+ key: "debugging",
521
+ label: "UI__NODE_EDITOR__SECTIONS__DEBUG_SETTINGS__LABEL",
503
522
  defaultCollapsed: true,
504
523
  fields: [
505
- "customModelOptions",
506
- "customRequestOptions"
524
+ "debugDescription",
525
+ "debugLogTokenCount",
526
+ "debugLogRequestAndCompletion"
507
527
  ]
508
528
  }
509
529
  ],
@@ -515,16 +535,16 @@ exports.GPT_PROMPT = (0, createNodeDescriptor_1.createNodeDescriptor)({
515
535
  { type: "section", key: "advanced" },
516
536
  { type: "section", key: "storage" },
517
537
  { type: "section", key: "errorHandling" },
538
+ { type: "section", key: "customOptions" },
518
539
  { type: "section", key: "debugging" },
519
- { type: "section", key: "customOptions" }
520
540
  ],
521
541
  appearance: {},
522
- tags: ["service", "llm", "gpt", "generative ai", "openai", "azure", "prompt"],
542
+ tags: ["ai", "llm", "gpt", "generative ai", "openai", "azure", "prompt"],
523
543
  function: async ({ cognigy, config, nodeId }) => {
524
- var _a;
544
+ var _a, _b, _c;
525
545
  const { api, input } = cognigy;
526
- const { temperature, maxTokens, topP, presencePenalty, frequencyPenalty, useStop, stop, storeLocation, contextKey, inputKey, timeout, streamStopTokens, debugLogTokenCount, debugLogRequestAndCompletion, llmProviderReferenceId, useChatMode, chatTranscriptSteps, responseFormat, streamStoreCopyInInput, detailedResults, seed, immediateOutput, customModelOptions, customRequestOptions, errorHandling = "continue", // default behavior for LLM Prompt node was, continue its execution even though an error occurred (deviating it from the SEO node) & do not output an error message on UI explicitly. However, error is always stored in the input or context object. We can use an extra "say" node to output it.
527
- errorHandlingGotoTarget, errorMessage, } = config;
546
+ const { temperature, maxTokens, topP, presencePenalty, frequencyPenalty, useStop, stop, storeLocation, contextKey, inputKey, timeout, streamStopTokens, streamStopTokenOverrides, debugLogTokenCount, debugLogRequestAndCompletion, llmProviderReferenceId, useChatMode, chatTranscriptSteps, responseFormat, streamStoreCopyInInput, detailedResults, seed, immediateOutput, customModelOptions, customRequestOptions, errorHandling = "continue", // default behavior for LLM Prompt node was, continue its execution even though an error occurred (deviating it from the SEO node) & do not output an error message on UI explicitly. However, error is always stored in the input or context object. We can use an extra "say" node to output it.
547
+ errorHandlingGotoTarget, errorMessage, logErrorToSystem, } = config;
528
548
  let prompt = config.prompt;
529
549
  const { traceId } = input;
530
550
  // check if custom variables are used and if they have a length modifier
@@ -549,13 +569,17 @@ exports.GPT_PROMPT = (0, createNodeDescriptor_1.createNodeDescriptor)({
549
569
  }
550
570
  // handle errors from external services, depending on the settings
551
571
  const handleServiceError = async (error) => {
552
- var _a;
572
+ var _a, _b, _c;
553
573
  const compactError = {
554
574
  name: error === null || error === void 0 ? void 0 : error.name,
555
575
  code: error === null || error === void 0 ? void 0 : error.code,
556
576
  message: (error === null || error === void 0 ? void 0 : error.message) || error
557
577
  };
558
- if ((_a = error === null || error === void 0 ? void 0 : error.originalErrorDetails) === null || _a === void 0 ? void 0 : _a.code) {
578
+ // return the requestId if it exist in the error obj.
579
+ if ((_a = error === null || error === void 0 ? void 0 : error.meta) === null || _a === void 0 ? void 0 : _a.requestId) {
580
+ compactError["requestId"] = (_b = error === null || error === void 0 ? void 0 : error.meta) === null || _b === void 0 ? void 0 : _b.requestId;
581
+ }
582
+ if ((_c = error === null || error === void 0 ? void 0 : error.originalErrorDetails) === null || _c === void 0 ? void 0 : _c.code) {
559
583
  compactError.code = error.originalErrorDetails.code;
560
584
  }
561
585
  const errorResponse = {
@@ -619,7 +643,9 @@ exports.GPT_PROMPT = (0, createNodeDescriptor_1.createNodeDescriptor)({
619
643
  }
620
644
  },
621
645
  streamStopTokens,
622
- detailedResults,
646
+ streamStopTokenOverrides,
647
+ // set to true in order to get token usage
648
+ detailedResults: true,
623
649
  seed: Number(seed) ? Number(seed) : undefined,
624
650
  customModelOptions,
625
651
  customRequestOptions
@@ -641,25 +667,18 @@ exports.GPT_PROMPT = (0, createNodeDescriptor_1.createNodeDescriptor)({
641
667
  debugPrompt = JSON.stringify(data["chat"]);
642
668
  }
643
669
  const response = await api.runGenerativeAIPrompt(data, "gptPromptNode");
644
- // if we're in adminconsole, process debugging options
645
- input.channel === "adminconsole" && (0, prompt_1.writeLLMDebugLogs)("LLM Prompt", debugPrompt, response, debugLogTokenCount, debugLogRequestAndCompletion, cognigy);
670
+ const isFollowSessionActive = api.getMetadata().isFollowSessionActive;
671
+ // if we're in adminconsole or following a session, process debugging options
672
+ (input.endpointType === "adminconsole" || isFollowSessionActive) && (0, prompt_1.writeLLMDebugLogs)("LLM Prompt", debugPrompt, response, debugLogTokenCount, debugLogRequestAndCompletion, cognigy);
673
+ let responseToStore;
646
674
  if (detailedResults) {
647
- // if the api didn't return native usage results, compute them
648
- if (!response.usage) {
649
- const promptContent = (response.messages) ? JSON.stringify(response.messages) : prompt;
650
- delete response.messages;
651
- const prompt_tokens = await api.countGPTTokens(promptContent);
652
- const completion_tokens = await api.countGPTTokens(response.result);
653
- response.usage = {
654
- prompt_tokens,
655
- completion_tokens,
656
- total_tokens: prompt_tokens + completion_tokens,
657
- calculation_method: "estimate"
658
- };
659
- }
675
+ responseToStore = response;
676
+ }
677
+ else {
678
+ responseToStore = response.result;
660
679
  }
661
680
  if (storeLocation === "context") {
662
- api.addToContext(contextKey, response, "simple");
681
+ api.addToContext(contextKey, responseToStore, "simple");
663
682
  // output result immediately if toggle is set
664
683
  if (immediateOutput) {
665
684
  const resultToOutput = typeof ((response === null || response === void 0 ? void 0 : response.result) || response) === "object" ? JSON.stringify((response === null || response === void 0 ? void 0 : response.result) || response, undefined, 2) : (response === null || response === void 0 ? void 0 : response.result) || response;
@@ -668,7 +687,7 @@ exports.GPT_PROMPT = (0, createNodeDescriptor_1.createNodeDescriptor)({
668
687
  }
669
688
  else if (storeLocation === "input" || (storeLocation === "stream" && streamStoreCopyInInput)) {
670
689
  // @ts-ignore
671
- api.addToInput(inputKey, response);
690
+ api.addToInput(inputKey, responseToStore);
672
691
  // output result immediately if toggle is set and we're storing into input
673
692
  // this means we don't output the result again if we streamed
674
693
  if (storeLocation === "input" && immediateOutput) {
@@ -678,12 +697,22 @@ exports.GPT_PROMPT = (0, createNodeDescriptor_1.createNodeDescriptor)({
678
697
  }
679
698
  }
680
699
  catch (error) {
681
- const errorDetails = {
700
+ const errorDetailsBase = {
682
701
  name: error === null || error === void 0 ? void 0 : error.name,
683
702
  code: (error === null || error === void 0 ? void 0 : error.code) || (error === null || error === void 0 ? void 0 : error.httpStatusCode),
684
703
  message: (error === null || error === void 0 ? void 0 : error.message) || ((_a = error.originalErrorDetails) === null || _a === void 0 ? void 0 : _a.message),
685
- originalErrorDetails: error === null || error === void 0 ? void 0 : error.originalErrorDetails
686
704
  };
705
+ const errorDetails = Object.assign(Object.assign({}, errorDetailsBase), { originalErrorDetails: error === null || error === void 0 ? void 0 : error.originalErrorDetails });
706
+ // return the requestId if it exist in the error obj.
707
+ if ((_b = error.meta) === null || _b === void 0 ? void 0 : _b.requestId) {
708
+ errorDetails["meta"] = {
709
+ requestId: (_c = error.meta) === null || _c === void 0 ? void 0 : _c.requestId
710
+ };
711
+ }
712
+ if (logErrorToSystem) {
713
+ api.log("error", JSON.stringify(errorDetailsBase));
714
+ }
715
+ api.logDebugError(errorDetailsBase, "UI__DEBUG_MODE__LLM_PROMPT__ERROR");
687
716
  await handleServiceError(errorDetails);
688
717
  return;
689
718
  }
@@ -201,7 +201,7 @@ exports.LLM_ENTITY_EXTRACT = (0, createNodeDescriptor_1.createNodeDescriptor)({
201
201
  { type: "section", key: "debugging" }
202
202
  ],
203
203
  appearance: {},
204
- tags: ["service", "llm", "gpt", "generative ai", "openai", "azure", "prompt", "entity", "extract"],
204
+ tags: ["ai", "llm", "gpt", "generative ai", "openai", "azure", "prompt", "entity", "extract"],
205
205
  function: async ({ cognigy, config }) => {
206
206
  var _a, _b;
207
207
  const { api, input } = cognigy;
@@ -228,8 +228,7 @@ exports.LLM_ENTITY_EXTRACT = (0, createNodeDescriptor_1.createNodeDescriptor)({
228
228
  }
229
229
  options["chat"] = (0, prompt_1.createLastConversationChatObject)(cognigy.lastConversationEntries, (0, generativeAIPrompts_1.getLLMEntityExtractSystemMessage)(entityName, entityDescription, examples), 3, true);
230
230
  const response = await api.runGenerativeAIPrompt(options, "gptPromptNode");
231
- // if we're in adminconsole, process debugging options
232
- input.channel === "adminconsole" && (0, prompt_1.writeLLMDebugLogs)("LLM Entity Extract", prompt, response, debugLogTokenCount, debugLogRequestAndCompletion, cognigy);
231
+ (0, prompt_1.writeLLMDebugLogs)("LLM Entity Extract", prompt, response, debugLogTokenCount, debugLogRequestAndCompletion, cognigy);
233
232
  // find the entity in the response
234
233
  let result = (_a = response === null || response === void 0 ? void 0 : response.result) === null || _a === void 0 ? void 0 : _a[entityName];
235
234
  if (!result) {
@@ -248,10 +247,19 @@ exports.LLM_ENTITY_EXTRACT = (0, createNodeDescriptor_1.createNodeDescriptor)({
248
247
  }
249
248
  if (storeLocation === "context") {
250
249
  api.addToContext(contextKey, result, "simple");
250
+ if (result) {
251
+ api.logDebugMessage(`context.${contextKey} = '${result}'`);
252
+ }
251
253
  }
252
254
  else if (storeLocation === "input") {
253
255
  // @ts-ignore
254
256
  api.addToInput(inputKey, result);
257
+ if (result) {
258
+ api.logDebugMessage(`input.${inputKey} = '${result}'`);
259
+ }
260
+ }
261
+ if (!result) {
262
+ api.logDebugMessage(`UI__DEBUG_MODE__LLM_ENTITY_EXTRACT__MESSAGE`);
255
263
  }
256
264
  }
257
265
  catch (error) {
@@ -260,6 +268,7 @@ exports.LLM_ENTITY_EXTRACT = (0, createNodeDescriptor_1.createNodeDescriptor)({
260
268
  code: error.code,
261
269
  message: error.message || ((_b = error.originalErrorDetails) === null || _b === void 0 ? void 0 : _b.message),
262
270
  };
271
+ api.logDebugError(errorDetails);
263
272
  if (storeLocation === "context") {
264
273
  api.addToContext(contextKey, { error: errorDetails }, "simple");
265
274
  }
@@ -0,0 +1,92 @@
1
+ "use strict";
2
+ /* Custom modules */
3
+ Object.defineProperty(exports, "__esModule", { value: true });
4
+ exports.AI_AGENT_HANDOVER = void 0;
5
+ /* Interfaces */
6
+ const createNodeDescriptor_1 = require("../../../createNodeDescriptor");
7
+ exports.AI_AGENT_HANDOVER = (0, createNodeDescriptor_1.createNodeDescriptor)({
8
+ type: "aiAgentHandover",
9
+ defaultLabel: "AI Agent Handover",
10
+ summary: "UI__NODE_EDITOR__SERVICE__AI_AGENT_HANDOVER__SUMMARY",
11
+ appearance: {
12
+ showIcon: false,
13
+ color: "#7F199B",
14
+ },
15
+ behavior: {
16
+ stopping: true
17
+ },
18
+ fields: [
19
+ {
20
+ key: "flowNode",
21
+ type: "flowNode",
22
+ label: "UI__NODE_EDITOR__SERVICE__AI_AGENT_HANDOVER__FIELDS__FLOW_NODE__LABEL",
23
+ params: {
24
+ required: true
25
+ }
26
+ },
27
+ {
28
+ key: "parseIntents",
29
+ type: "toggle",
30
+ label: "UI__NODE_EDITOR__SERVICE__AI_AGENT_HANDOVER__FIELDS__PARSE_INTENTS__LABEL",
31
+ description: "UI__NODE_EDITOR__SERVICE__AI_AGENT_HANDOVER__FIELDS__PARSE_INTENTS__DESCRIPTION",
32
+ defaultValue: false,
33
+ },
34
+ {
35
+ key: "parseKeyphrases",
36
+ type: "toggle",
37
+ label: "UI__NODE_EDITOR__SERVICE__AI_AGENT_HANDOVER__FIELDS__PARSE_KEYPHRASES__LABEL",
38
+ description: "UI__NODE_EDITOR__SERVICE__AI_AGENT_HANDOVER__FIELDS__PARSE_KEYPHRASES__DESCRIPTION",
39
+ defaultValue: false,
40
+ },
41
+ ],
42
+ sections: [
43
+ {
44
+ key: "advanced",
45
+ label: "UI__NODE_EDITOR__SERVICE__AI_AGENT_HANDOVER__SECTIONS__ADVANCED__LABEL",
46
+ defaultCollapsed: true,
47
+ fields: [
48
+ "parseIntents",
49
+ "parseKeyphrases",
50
+ ]
51
+ }
52
+ ],
53
+ form: [
54
+ { type: "field", key: "flowNode" },
55
+ { type: "section", key: "advanced" }
56
+ ],
57
+ preview: {
58
+ key: "flowNode",
59
+ type: "resource",
60
+ },
61
+ tags: ["ai", "aiAgent"],
62
+ function: async ({ cognigy, config, nodeId: thisNodeId }) => {
63
+ var _a, _b;
64
+ const { flowNode: { flow: flowId, node: nodeId }, parseIntents, parseKeyphrases } = config;
65
+ const { api } = cognigy;
66
+ if (!flowId) {
67
+ throw new Error("flowId is required");
68
+ }
69
+ if (!nodeId) {
70
+ throw new Error("nodeId is required");
71
+ }
72
+ if (!((_a = api.checkThink) === null || _a === void 0 ? void 0 : _a.call(api, thisNodeId))) {
73
+ api.resetNextNodes();
74
+ (_b = api.setThinkMarker) === null || _b === void 0 ? void 0 : _b.call(api, config.flowNode.flow);
75
+ // If Execution is to continue, execute Flow
76
+ await api.executeFlow({
77
+ flowNode: {
78
+ flow: flowId,
79
+ node: nodeId,
80
+ isGoto: true,
81
+ },
82
+ absorbContext: true,
83
+ parseIntents,
84
+ parseKeyphrases
85
+ });
86
+ }
87
+ else {
88
+ throw new Error("Infinite Loop Detected");
89
+ }
90
+ },
91
+ });
92
+ //# sourceMappingURL=aiAgentHandover.js.map