@cognigy/rest-api-client 2025.18.0 → 2025.19.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (37) hide show
  1. package/CHANGELOG.md +12 -0
  2. package/build/authentication/AuthenticationAPI.js +1 -1
  3. package/build/authentication/OAuth2/OAuth2Authentication.js +12 -9
  4. package/build/connector/AxiosAdapter.js +6 -2
  5. package/build/shared/charts/descriptors/service/GPTPrompt.js +2 -2
  6. package/build/shared/charts/descriptors/service/aiAgent/aiAgentJob.js +2 -2
  7. package/build/shared/charts/descriptors/service/llmPrompt/LLMPromptV2.js +2 -2
  8. package/build/shared/charts/descriptors/voice/mappers/transfer.mapper.js +20 -6
  9. package/build/shared/charts/descriptors/voicegateway2/nodes/transfer.js +39 -3
  10. package/build/shared/generativeAI/utils/generativeAIPrompts.js +17 -446
  11. package/build/shared/generativeAI/utils/prompts/flowGeneration.js +168 -0
  12. package/build/shared/generativeAI/utils/prompts/generateNodeOutput.js +39 -0
  13. package/build/shared/generativeAI/utils/prompts/intentSentenceGeneration.js +15 -0
  14. package/build/shared/generativeAI/utils/prompts/lexiconGeneration.js +22 -0
  15. package/build/shared/interfaces/generativeAI/IGenerativeAIModels.js +4 -0
  16. package/build/shared/interfaces/handover.js +3 -1
  17. package/build/shared/interfaces/messageAPI/endpoints.js +5 -2
  18. package/build/shared/interfaces/resources/INodeDescriptorSet.js +87 -77
  19. package/dist/esm/authentication/AuthenticationAPI.js +1 -1
  20. package/dist/esm/authentication/OAuth2/OAuth2Authentication.js +12 -9
  21. package/dist/esm/connector/AxiosAdapter.js +6 -2
  22. package/dist/esm/shared/charts/descriptors/service/GPTPrompt.js +1 -1
  23. package/dist/esm/shared/charts/descriptors/service/aiAgent/aiAgentJob.js +1 -1
  24. package/dist/esm/shared/charts/descriptors/service/llmPrompt/LLMPromptV2.js +1 -1
  25. package/dist/esm/shared/charts/descriptors/voice/mappers/transfer.mapper.js +20 -6
  26. package/dist/esm/shared/charts/descriptors/voicegateway2/nodes/transfer.js +39 -3
  27. package/dist/esm/shared/generativeAI/utils/generativeAIPrompts.js +16 -445
  28. package/dist/esm/shared/generativeAI/utils/prompts/flowGeneration.js +165 -0
  29. package/dist/esm/shared/generativeAI/utils/prompts/generateNodeOutput.js +36 -0
  30. package/dist/esm/shared/generativeAI/utils/prompts/intentSentenceGeneration.js +12 -0
  31. package/dist/esm/shared/generativeAI/utils/prompts/lexiconGeneration.js +19 -0
  32. package/dist/esm/shared/interfaces/generativeAI/IGenerativeAIModels.js +4 -0
  33. package/dist/esm/shared/interfaces/handover.js +3 -1
  34. package/dist/esm/shared/interfaces/messageAPI/endpoints.js +5 -2
  35. package/dist/esm/shared/interfaces/resources/INodeDescriptorSet.js +88 -78
  36. package/package.json +1 -1
  37. package/types/index.d.ts +1045 -1016
package/CHANGELOG.md CHANGED
@@ -1,3 +1,15 @@
1
+ # 2025.19.0
2
+ Released: September 16th, 2025
3
+
4
+ Released state of package up to date with Cognigy.AI v2025.19.0
5
+
6
+ # 2025.18.1
7
+ Released: September 04th, 2025
8
+
9
+ Released state of package up to date with Cognigy.AI v2025.18.0
10
+
11
+ - Changes for allowing cookies to be sent with authentication requests
12
+
1
13
  # 2025.18.0
2
14
  Released: September 02nd, 2025
3
15
 
@@ -66,7 +66,7 @@ function AuthenticationAPI(instance) {
66
66
  },
67
67
  exchangeOneTimeTokenForRefreshToken: (_a, options) => {
68
68
  var { loginToken } = _a, args = __rest(_a, ["loginToken"]);
69
- return (0, GenericAPIFn_1.GenericAPIFn)(`/auth/exchangetoken?${(0, rest_1.stringifyQuery)({ loginToken })}`, "GET", self)(args, Object.assign({ withAuthentication: false }, options));
69
+ return (0, GenericAPIFn_1.GenericAPIFn)(`/auth/exchangetoken?${(0, rest_1.stringifyQuery)({ loginToken })}`, "GET", self)(args, Object.assign({ withAuthentication: false, withCredentials: true }, options));
70
70
  },
71
71
  generateManagementUIAuthToken: (args, options) => (0, GenericAPIFn_1.GenericAPIFn)("/new/management/auth/token", "POST", self)(args, options)
72
72
  };
@@ -4,7 +4,6 @@ exports.OAuth2Authentication = void 0;
4
4
  /* Custom Modules */
5
5
  const OAuth2Error_1 = require("./OAuth2Error");
6
6
  const HttpStatusCode_1 = require("../../shared/helper/HttpStatusCode");
7
- const errors_1 = require("../../shared/errors");
8
7
  const IOAuth2ErrorResponse_1 = require("./IOAuth2ErrorResponse");
9
8
  const expiryBuffer = 8;
10
9
  exports.OAuth2Authentication = function (credentials, self) {
@@ -29,7 +28,8 @@ exports.OAuth2Authentication = function (credentials, self) {
29
28
  };
30
29
  return await request(formFields, {
31
30
  maxRetries: Infinity,
32
- retryDelay: () => 3000
31
+ retryDelay: () => 3000,
32
+ withCredentials: true
33
33
  });
34
34
  };
35
35
  const request = async (formFields, options) => {
@@ -84,6 +84,8 @@ exports.OAuth2Authentication = function (credentials, self) {
84
84
  OAuth2Authentication.prototype.login = async (parameters) => {
85
85
  const { clientId, clientSecret } = self.credentials;
86
86
  switch (parameters.type) {
87
+ // Password Grant is deprecated in OAuth2, but still supported
88
+ // for api and e2e tests
87
89
  case "password":
88
90
  {
89
91
  const { username, password, rememberMe, organisationId } = parameters;
@@ -103,6 +105,8 @@ exports.OAuth2Authentication = function (credentials, self) {
103
105
  break;
104
106
  case "refreshToken":
105
107
  {
108
+ // The refresh token from the parameters is to support older token granted via Password Grant flow
109
+ // TODO - remove this in future once PKCE fully takes over
106
110
  const { refreshToken } = parameters;
107
111
  const credentials = self.credentials;
108
112
  const result = await refreshTokenGrant({
@@ -115,17 +119,19 @@ exports.OAuth2Authentication = function (credentials, self) {
115
119
  break;
116
120
  case "authorizationCode":
117
121
  {
118
- const { code, redirectUri, codeVerifier } = parameters;
122
+ const { code, redirectUri, codeVerifier, rememberMe } = parameters;
119
123
  const formFields = {
120
124
  grant_type: "authorization_code",
121
125
  client_id: clientId,
122
126
  client_secret: clientSecret,
123
127
  code,
124
128
  redirect_uri: redirectUri,
129
+ rememberMe,
125
130
  code_verifier: codeVerifier
126
131
  };
127
132
  self.tokenData = await request(formFields, {
128
- maxRetries: 0
133
+ maxRetries: 0,
134
+ withCredentials: true
129
135
  });
130
136
  }
131
137
  break;
@@ -177,9 +183,6 @@ exports.OAuth2Authentication = function (credentials, self) {
177
183
  */
178
184
  if (isAccessTokenExpired(self.tokenData)) {
179
185
  const credentials = self.credentials;
180
- if (!self.tokenData.refresh_token) {
181
- throw new errors_1.UnauthorizedError("No RefreshToken provided.");
182
- }
183
186
  if (self.refreshTokenSingleton === null) {
184
187
  self.refreshTokenSingleton = new Promise((resolve, reject) => {
185
188
  refreshTokenGrant({
@@ -219,8 +222,7 @@ exports.OAuth2Authentication = function (credentials, self) {
219
222
  };
220
223
  OAuth2Authentication.prototype.logout = async () => {
221
224
  if ((self === null || self === void 0 ? void 0 : self.credentials.type) ===
222
- "OAuth2" &&
223
- self.tokenData.refresh_token) {
225
+ "OAuth2") {
224
226
  const httpAdapter = self.getHttpAdapter();
225
227
  const requestData = {
226
228
  method: "POST",
@@ -229,6 +231,7 @@ exports.OAuth2Authentication = function (credentials, self) {
229
231
  token: self.tokenData.refresh_token
230
232
  },
231
233
  maxRetries: 3,
234
+ withCredentials: true,
232
235
  retryDelay: () => 0
233
236
  };
234
237
  // reset tokenData to avoid multiple revoke calls
@@ -76,13 +76,14 @@ class AxiosAdapter {
76
76
  return this.request(Object.assign(Object.assign({}, request), { method: "HEAD" }), client);
77
77
  }
78
78
  async convertRequest(request, client) {
79
- var _a;
79
+ var _a, _b;
80
80
  const baseUrl = (_a = request.baseUrl) !== null && _a !== void 0 ? _a : this.config.baseUrl;
81
81
  const axiosRequest = {
82
82
  data: request.data,
83
83
  headers: request.headers,
84
84
  method: request.method || "GET",
85
85
  url: `${baseUrl}${request.url}`,
86
+ withCredentials: (_b = request.withCredentials) !== null && _b !== void 0 ? _b : false,
86
87
  validateStatus: (status) => !(0, isRetryableStatus_1.isRetryableStatus)(status)
87
88
  };
88
89
  if (typeof request.onProgress === "function") {
@@ -116,7 +117,7 @@ class AxiosAdapter {
116
117
  }
117
118
  }
118
119
  if (isAxiosResponse(axiosResponse)) {
119
- const errorClass = errors_1.ErrorCollection[(_b = axiosResponse.data) === null || _b === void 0 ? void 0 : _b.code] ||
120
+ let errorClass = errors_1.ErrorCollection[(_b = axiosResponse.data) === null || _b === void 0 ? void 0 : _b.code] ||
120
121
  errors_1.ErrorCollection[axiosResponse.status] ||
121
122
  errors_1.ErrorCollection[errors_1.ErrorCode.INTERNAL_SERVER_ERROR];
122
123
  if ((axiosResponse.status === HttpStatusCode_1.HttpStatusCode.UNAUTHORIZED ||
@@ -125,6 +126,9 @@ class AxiosAdapter {
125
126
  typeof this.config.onUnauthorized === "function") {
126
127
  this.config.onUnauthorized();
127
128
  }
129
+ if (axiosResponse.status === HttpStatusCode_1.HttpStatusCode.CONFLICT) {
130
+ errorClass = errors_1.ErrorCollection[errors_1.ErrorCode.CONFLICT_ERROR];
131
+ }
128
132
  if (IOAuth2ErrorResponse_1.OAuth2Errors.includes((_d = axiosResponse.data) === null || _d === void 0 ? void 0 : _d.error)) {
129
133
  throw new errorClass(axiosResponse.data.detail, { traceId: axiosResponse.data.traceId }, undefined, axiosResponse.data);
130
134
  }
@@ -4,7 +4,7 @@ exports.GPT_PROMPT = void 0;
4
4
  /* Custom modules */
5
5
  const createNodeDescriptor_1 = require("../../createNodeDescriptor");
6
6
  const logic_1 = require("../logic");
7
- const crypto_1 = require("crypto");
7
+ const uuid_1 = require("uuid");
8
8
  const prompt_1 = require("../nlu/generativeSlotFiller/prompt");
9
9
  const errors_1 = require("../../../errors");
10
10
  const transcripts_1 = require("../../../interfaces/transcripts/transcripts");
@@ -660,7 +660,7 @@ exports.GPT_PROMPT = (0, createNodeDescriptor_1.createNodeDescriptor)({
660
660
  };
661
661
  try {
662
662
  const isStreamingChannel = input.channel === "webchat3" || input.channel === "adminconsole";
663
- const _messageId = (0, crypto_1.randomUUID)();
663
+ const _messageId = (0, uuid_1.v4)();
664
664
  const data = {
665
665
  prompt,
666
666
  temperature,
@@ -14,7 +14,6 @@ Object.defineProperty(exports, "__esModule", { value: true });
14
14
  exports.AI_AGENT_JOB = exports.AI_AGENT_TOOLS_WHITELIST = void 0;
15
15
  /* Custom modules */
16
16
  const createNodeDescriptor_1 = require("../../../createNodeDescriptor");
17
- const crypto_1 = require("crypto");
18
17
  const setSessionConfig_mapper_1 = require("../../voice/mappers/setSessionConfig.mapper");
19
18
  const setSessionConfig_mapper_2 = require("../../voice/mappers/setSessionConfig.mapper");
20
19
  const logFullConfigToDebugMode_1 = require("../../../../helper/logFullConfigToDebugMode");
@@ -22,6 +21,7 @@ const createSystemMessage_1 = require("./helpers/createSystemMessage");
22
21
  const generateSearchPrompt_1 = require("./helpers/generateSearchPrompt");
23
22
  const getUserMemory_1 = require("./helpers/getUserMemory");
24
23
  const createToolDefinitions_1 = require("./helpers/createToolDefinitions");
24
+ const uuid_1 = require("uuid");
25
25
  const transcripts_1 = require("../../../../interfaces/transcripts/transcripts");
26
26
  exports.AI_AGENT_TOOLS_WHITELIST = ["aiAgentJobDefault", "aiAgentJobTool", "aiAgentJobMCPTool"];
27
27
  exports.AI_AGENT_JOB = (0, createNodeDescriptor_1.createNodeDescriptor)({
@@ -1163,7 +1163,7 @@ exports.AI_AGENT_JOB = (0, createNodeDescriptor_1.createNodeDescriptor)({
1163
1163
  transcript[transcript.length - 1].payload.text = enhancedInput;
1164
1164
  }
1165
1165
  const isStreamingChannel = input.channel === "webchat3" || input.channel === "adminconsole";
1166
- const _messageId = (0, crypto_1.randomUUID)();
1166
+ const _messageId = (0, uuid_1.v4)();
1167
1167
  const llmPromptOptions = Object.assign(Object.assign({ prompt: "", chat: systemMessage,
1168
1168
  // Temp fix to override the transcript if needed
1169
1169
  transcript: ((_0 = context === null || context === void 0 ? void 0 : context._cognigy) === null || _0 === void 0 ? void 0 : _0.transcript) ? [...context._cognigy.transcript] : transcript, detailedResults: true, timeoutInMs: timeoutInMs !== null && timeoutInMs !== void 0 ? timeoutInMs : 8000, maxTokens: maxTokens !== null && maxTokens !== void 0 ? maxTokens : 4000, temperature: temperature !== null && temperature !== void 0 ? temperature : 0.7, topP: 1, frequencyPenalty: 0, presencePenalty: 0, responseFormat: "text", stream: storeLocation === "stream", streamOnDataHandler: (text) => {
@@ -15,8 +15,8 @@ exports.LLM_PROMPT_V2 = void 0;
15
15
  /* Custom modules */
16
16
  const createNodeDescriptor_1 = require("../../../createNodeDescriptor");
17
17
  const logic_1 = require("../../logic");
18
- const crypto_1 = require("crypto");
19
18
  const createToolDefinitions_1 = require("../aiAgent/helpers/createToolDefinitions");
19
+ const uuid_1 = require("uuid");
20
20
  const prompt_1 = require("../../nlu/generativeSlotFiller/prompt");
21
21
  const errors_1 = require("../../../../errors");
22
22
  const transcripts_1 = require("../../../../interfaces/transcripts/transcripts");
@@ -736,7 +736,7 @@ exports.LLM_PROMPT_V2 = (0, createNodeDescriptor_1.createNodeDescriptor)({
736
736
  };
737
737
  try {
738
738
  const isStreamingChannel = input.channel === "webchat3" || input.channel === "adminconsole";
739
- const _messageId = (0, crypto_1.randomUUID)();
739
+ const _messageId = (0, uuid_1.v4)();
740
740
  // Start measuring LLM latency and time to first output if debug flag is enabled
741
741
  let firstOutputTime = null;
742
742
  /**
@@ -5,7 +5,7 @@ exports.prepareTransferParams = exports.transfer = void 0;
5
5
  const helper_1 = require("../../../descriptors/voicegateway2/utils/helper");
6
6
  const helper_2 = require("../utils/helper");
7
7
  exports.transfer = {
8
- handleInput(endpointType, params, isGenericNode = false, recognitionChannel, sttVendor, sttLanguage, googleModel, sttDeepgramModel, sttDisablePunctuation, deepgramEndpointing, deepgramEndpointingValue, deepgramSmartFormatting, anchorMedia) {
8
+ handleInput(endpointType, params, isGenericNode = false, recognitionChannel, sttVendor, sttLanguage, googleModel, sttDeepgramModel, sttDisablePunctuation, deepgramEndpointing, deepgramEndpointingValue, deepgramSmartFormatting, mediaPath, anchorMedia) {
9
9
  try {
10
10
  switch (endpointType) {
11
11
  case "bandwidth":
@@ -24,14 +24,14 @@ exports.transfer = {
24
24
  return this.handleAudioCodesInput((0, exports.prepareTransferParams)(params), endpointType);
25
25
  case "voiceGateway2":
26
26
  default:
27
- return this.handleVGInput((0, exports.prepareTransferParams)(params), recognitionChannel, sttVendor, sttLanguage, googleModel, sttDeepgramModel, sttDisablePunctuation, deepgramEndpointing, deepgramEndpointingValue, deepgramSmartFormatting, anchorMedia);
27
+ return this.handleVGInput((0, exports.prepareTransferParams)(params), recognitionChannel, sttVendor, sttLanguage, googleModel, sttDeepgramModel, sttDisablePunctuation, deepgramEndpointing, deepgramEndpointingValue, deepgramSmartFormatting, mediaPath, anchorMedia);
28
28
  }
29
29
  }
30
30
  catch (error) {
31
31
  throw Error(error.message);
32
32
  }
33
33
  },
34
- handleVGInput(transferParam, recognitionChannel, sttVendor, sttLanguage, googleModel, sttDeepgramModel, sttDisablePunctuation, deepgramEndpointing, deepgramEndpointingValue, deepgramSmartFormatting, anchorMedia) {
34
+ handleVGInput(transferParam, recognitionChannel, sttVendor, sttLanguage, googleModel, sttDeepgramModel, sttDisablePunctuation, deepgramEndpointing, deepgramEndpointingValue, deepgramSmartFormatting, mediaPath, anchorMedia) {
35
35
  const { transferType, transferTarget, transferReason, referredBy, useTransferSipHeaders, transferSipHeaders, dialMusic, dialTranscriptionWebhook, dialCallerId, amdEnabled, amdRedirectOnMachineDetected, amdRedirectText, dialTimeout, timeLimit, sttLabel } = transferParam;
36
36
  const payload = {
37
37
  _voiceGateway2: {
@@ -55,6 +55,19 @@ exports.transfer = {
55
55
  if (timeLimit && timeLimit > 0) {
56
56
  dialVerb.timeLimit = timeLimit;
57
57
  }
58
+ if (process.env.FEATURE_DISABLE_VG_MEDIA_PATH === "true") {
59
+ if (typeof anchorMedia === "boolean") {
60
+ dialVerb.anchorMedia = anchorMedia;
61
+ }
62
+ }
63
+ else {
64
+ if (mediaPath) {
65
+ dialVerb.mediaPath = mediaPath;
66
+ }
67
+ else if (typeof anchorMedia === "boolean") {
68
+ dialVerb.mediaPath = anchorMedia ? "fullMedia" : "partialMedia";
69
+ }
70
+ }
58
71
  if (amdEnabled) {
59
72
  dialVerb.amd = {
60
73
  actionHook: "amd"
@@ -75,6 +88,9 @@ exports.transfer = {
75
88
  };
76
89
  /* By default we set the target to phone */
77
90
  dialVerb.target = [phoneTarget];
91
+ if (process.env.FEATURE_DISABLE_VG_MEDIA_PATH === "true") {
92
+ delete dialVerb.mediaPath;
93
+ }
78
94
  /* If targets includes an @ we set the target to sip */
79
95
  if (transferTarget === null || transferTarget === void 0 ? void 0 : transferTarget.includes("@")) {
80
96
  dialVerb.target = [sipTarget];
@@ -137,12 +153,10 @@ exports.transfer = {
137
153
  }
138
154
  dialVerb.callerId = dialCallerId;
139
155
  }
140
- if (anchorMedia) {
141
- dialVerb.anchorMedia = anchorMedia;
142
- }
143
156
  payload._voiceGateway2.json["dial"] = dialVerb;
144
157
  break;
145
158
  case "refer":
159
+ mediaPath = undefined;
146
160
  default:
147
161
  const referVerb = {
148
162
  "referTo": (0, helper_1.cleanTarget)(transferTarget, false),
@@ -81,6 +81,33 @@ exports.transferNode = (0, createNodeDescriptor_1.createNodeDescriptor)({
81
81
  value: "dial"
82
82
  }
83
83
  },
84
+ {
85
+ key: "mediaPath",
86
+ label: "UI__NODE_EDITOR__VOICEGATEWAY2__TRANSFER__FIELDS__MEDIA_PATH__LABEL",
87
+ type: "select",
88
+ description: "UI__NODE_EDITOR__VOICEGATEWAY2__TRANSFER__FIELDS__MEDIA_PATH__DESCRIPTION",
89
+ defaultValue: "partialMedia",
90
+ params: {
91
+ options: [
92
+ {
93
+ label: "UI__NODE_EDITOR__VOICEGATEWAY2__TRANSFER__FIELDS__MEDIA_PATH__OPTIONS__FULL_MEDIA__LABEL",
94
+ value: "fullMedia"
95
+ },
96
+ {
97
+ label: "UI__NODE_EDITOR__VOICEGATEWAY2__TRANSFER__FIELDS__MEDIA_PATH__OPTIONS__PARTIAL_MEDIA__LABEL",
98
+ value: "partialMedia"
99
+ },
100
+ {
101
+ label: "UI__NODE_EDITOR__VOICEGATEWAY2__TRANSFER__FIELDS__MEDIA_PATH__OPTIONS__NO_MEDIA__LABEL",
102
+ value: "noMedia"
103
+ }
104
+ ]
105
+ },
106
+ condition: {
107
+ key: "transferType",
108
+ value: "dial"
109
+ }
110
+ },
84
111
  {
85
112
  key: "useTransferSipHeaders",
86
113
  label: "UI__NODE_EDITOR__VOICEGATEWAY2__TRANSFER__FIELDS__USE_TRANSFER_SIP_HEADERS__LABEL",
@@ -462,7 +489,7 @@ exports.transferNode = (0, createNodeDescriptor_1.createNodeDescriptor)({
462
489
  defaultCollapsed: true,
463
490
  fields: [
464
491
  "referredBy",
465
- "anchorMedia",
492
+ process.env.FEATURE_DISABLE_VG_MEDIA_PATH === "true" ? "anchorMedia" : "mediaPath",
466
493
  "useTransferSipHeaders",
467
494
  "transferSipHeaders",
468
495
  "enableAnsweringMachineDetection"
@@ -526,7 +553,7 @@ exports.transferNode = (0, createNodeDescriptor_1.createNodeDescriptor)({
526
553
  summary: "UI__NODE_EDITOR__VOICEGATEWAY2__TRANSFER__SUMMARY",
527
554
  function: async ({ cognigy, config, }) => {
528
555
  const { api, input } = cognigy;
529
- const { transferType, transferTarget, referredBy, anchorMedia, useTransferSipHeaders, transferSipHeaders = {}, transferReason, dialMusic, dialTranscriptionWebhook, dialCallerId, recognitionChannel, sttVendor, sttLanguage, sttDisablePunctuation, dialTimeout, enableTimeLimit, timeLimit, amdEnabled, amdRedirectOnMachineDetected, amdRedirectText, sttLabel, googleModel, sttDeepgramModel, deepgramEndpointing, deepgramEndpointingValue, deepgramSmartFormatting, agentAssistEnabled, agentAssistHeadersKey = customHeaderDefaultValue } = config;
556
+ const { transferType, transferTarget, referredBy, mediaPath, useTransferSipHeaders, transferSipHeaders = {}, transferReason, dialMusic, dialTranscriptionWebhook, dialCallerId, recognitionChannel, sttVendor, sttLanguage, sttDisablePunctuation, dialTimeout, enableTimeLimit, timeLimit, amdEnabled, amdRedirectOnMachineDetected, amdRedirectText, sttLabel, googleModel, sttDeepgramModel, deepgramEndpointing, deepgramEndpointingValue, deepgramSmartFormatting, agentAssistEnabled, agentAssistHeadersKey = customHeaderDefaultValue, anchorMedia } = config;
530
557
  const transferParams = {
531
558
  transferType,
532
559
  transferReason,
@@ -544,6 +571,7 @@ exports.transferNode = (0, createNodeDescriptor_1.createNodeDescriptor)({
544
571
  timeLimit,
545
572
  sttLabel,
546
573
  };
574
+ let media = mediaPath;
547
575
  try {
548
576
  if (input.channel === "adminconsole") {
549
577
  let textWarningAdminChannel = "Transferring a call is not supported in the Interaction Panel, please use the VoiceGateway endpoint.";
@@ -556,6 +584,14 @@ exports.transferNode = (0, createNodeDescriptor_1.createNodeDescriptor)({
556
584
  if (!enableTimeLimit) {
557
585
  delete transferParams.timeLimit;
558
586
  }
587
+ if (transferType === "dial" && typeof anchorMedia === "boolean" && (mediaPath === null || mediaPath === undefined) && process.env.FEATURE_DISABLE_VG_MEDIA_PATH === "false") {
588
+ if (anchorMedia) {
589
+ media = "fullMedia";
590
+ }
591
+ else {
592
+ media = "partialMedia";
593
+ }
594
+ }
559
595
  if (agentAssistEnabled && dialTranscriptionWebhook) {
560
596
  try {
561
597
  const agentAssistConfigId = api.getAgentAssistConfigId();
@@ -591,7 +627,7 @@ exports.transferNode = (0, createNodeDescriptor_1.createNodeDescriptor)({
591
627
  transferParams.useTransferSipHeaders = false;
592
628
  api.log("error", "Invalid JSON in Transfer SIP Headers");
593
629
  }
594
- const payload = transfer_mapper_1.transfer.handleInput("voiceGateway2", transferParams, false, recognitionChannel, sttVendor, sttLanguage, googleModel, sttDeepgramModel, sttDisablePunctuation, deepgramEndpointing, deepgramEndpointingValue, deepgramSmartFormatting, anchorMedia);
630
+ const payload = transfer_mapper_1.transfer.handleInput("voiceGateway2", transferParams, false, recognitionChannel, sttVendor, sttLanguage, googleModel, sttDeepgramModel, sttDisablePunctuation, deepgramEndpointing, deepgramEndpointingValue, deepgramSmartFormatting, media, anchorMedia);
595
631
  await api.say(null, {
596
632
  _cognigy: payload,
597
633
  });