rasa-pro 3.11.8__py3-none-any.whl → 3.11.10__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of rasa-pro might be problematic. Click here for more details.
- rasa/cli/inspect.py +8 -4
- rasa/core/channels/inspector/dist/assets/{arc-62ea6ecb.js → arc-f09fea11.js} +1 -1
- rasa/core/channels/inspector/dist/assets/{blockDiagram-38ab4fdb-133584f2.js → blockDiagram-38ab4fdb-95518007.js} +1 -1
- rasa/core/channels/inspector/dist/assets/{c4Diagram-3d4e48cf-3fdd847f.js → c4Diagram-3d4e48cf-c91a4a08.js} +1 -1
- rasa/core/channels/inspector/dist/assets/channel-cc7720dc.js +1 -0
- rasa/core/channels/inspector/dist/assets/{classDiagram-70f12bd4-fbbe018c.js → classDiagram-70f12bd4-27f7869b.js} +1 -1
- rasa/core/channels/inspector/dist/assets/{classDiagram-v2-f2320105-a4eb680a.js → classDiagram-v2-f2320105-1ab94cdb.js} +1 -1
- rasa/core/channels/inspector/dist/assets/clone-3688e1f7.js +1 -0
- rasa/core/channels/inspector/dist/assets/{createText-2e5e7dd3-a0a4811e.js → createText-2e5e7dd3-a7900089.js} +1 -1
- rasa/core/channels/inspector/dist/assets/{edges-e0da2a9e-d6c66181.js → edges-e0da2a9e-3d5b2697.js} +1 -1
- rasa/core/channels/inspector/dist/assets/{erDiagram-9861fffd-f2062a78.js → erDiagram-9861fffd-443cc11b.js} +1 -1
- rasa/core/channels/inspector/dist/assets/{flowDb-956e92f1-1a6bd8c6.js → flowDb-956e92f1-8a6f8c52.js} +1 -1
- rasa/core/channels/inspector/dist/assets/{flowDiagram-66a62f08-8c64ef56.js → flowDiagram-66a62f08-06a0b4f3.js} +1 -1
- rasa/core/channels/inspector/dist/assets/flowDiagram-v2-96b9c2cf-5055ec2d.js +1 -0
- rasa/core/channels/inspector/dist/assets/{flowchart-elk-definition-4a651766-b16259fa.js → flowchart-elk-definition-4a651766-7a01e0b5.js} +1 -1
- rasa/core/channels/inspector/dist/assets/{ganttDiagram-c361ad54-3bef87d8.js → ganttDiagram-c361ad54-5f1289f2.js} +1 -1
- rasa/core/channels/inspector/dist/assets/{gitGraphDiagram-72cf32ee-c0776679.js → gitGraphDiagram-72cf32ee-44409666.js} +1 -1
- rasa/core/channels/inspector/dist/assets/{graph-af24022c.js → graph-3c393c89.js} +1 -1
- rasa/core/channels/inspector/dist/assets/{index-3862675e-1f1f2ddf.js → index-3862675e-4d0c4142.js} +1 -1
- rasa/core/channels/inspector/dist/assets/{index-e799a83e.js → index-b208b2c3.js} +30 -30
- rasa/core/channels/inspector/dist/assets/{infoDiagram-f8f76790-c5d562c0.js → infoDiagram-f8f76790-ae0fa7ff.js} +1 -1
- rasa/core/channels/inspector/dist/assets/{journeyDiagram-49397b02-8b3f9070.js → journeyDiagram-49397b02-5c3b08cc.js} +1 -1
- rasa/core/channels/inspector/dist/assets/{layout-cc1e3a25.js → layout-b24c95cb.js} +1 -1
- rasa/core/channels/inspector/dist/assets/{line-7f6d1f25.js → line-999a77c5.js} +1 -1
- rasa/core/channels/inspector/dist/assets/{linear-4bacd66e.js → linear-81a792fd.js} +1 -1
- rasa/core/channels/inspector/dist/assets/{mindmap-definition-fc14e90a-2926a2f0.js → mindmap-definition-fc14e90a-c574f712.js} +1 -1
- rasa/core/channels/inspector/dist/assets/{pieDiagram-8a3498a8-05bf892e.js → pieDiagram-8a3498a8-1919891d.js} +1 -1
- rasa/core/channels/inspector/dist/assets/{quadrantDiagram-120e2f19-f700d7d2.js → quadrantDiagram-120e2f19-26e43d09.js} +1 -1
- rasa/core/channels/inspector/dist/assets/{requirementDiagram-deff3bca-6eb3541f.js → requirementDiagram-deff3bca-f4b22985.js} +1 -1
- rasa/core/channels/inspector/dist/assets/{sankeyDiagram-04a897e0-a47a81ed.js → sankeyDiagram-04a897e0-b957b472.js} +1 -1
- rasa/core/channels/inspector/dist/assets/{sequenceDiagram-704730f1-cf1ccf9f.js → sequenceDiagram-704730f1-1d8ca073.js} +1 -1
- rasa/core/channels/inspector/dist/assets/{stateDiagram-587899a1-405950fc.js → stateDiagram-587899a1-c67b1b71.js} +1 -1
- rasa/core/channels/inspector/dist/assets/{stateDiagram-v2-d93cdb3a-23d8e35b.js → stateDiagram-v2-d93cdb3a-ee820f55.js} +1 -1
- rasa/core/channels/inspector/dist/assets/{styles-6aaf32cf-08c526c0.js → styles-6aaf32cf-b162bdf3.js} +1 -1
- rasa/core/channels/inspector/dist/assets/{styles-9a916d00-8062abc7.js → styles-9a916d00-67a7b254.js} +1 -1
- rasa/core/channels/inspector/dist/assets/{styles-c10674c1-0c776ed5.js → styles-c10674c1-81a8ac73.js} +1 -1
- rasa/core/channels/inspector/dist/assets/{svgDrawCommon-08f97a94-bb5daba8.js → svgDrawCommon-08f97a94-ede42905.js} +1 -1
- rasa/core/channels/inspector/dist/assets/{timeline-definition-85554ec2-83421f60.js → timeline-definition-85554ec2-b0f41635.js} +1 -1
- rasa/core/channels/inspector/dist/assets/{xychartDiagram-e933f94c-dad4ea79.js → xychartDiagram-e933f94c-d715dfb0.js} +1 -1
- rasa/core/channels/inspector/dist/index.html +1 -1
- rasa/core/channels/inspector/src/App.tsx +3 -2
- rasa/core/channels/voice_ready/audiocodes.py +34 -17
- rasa/shared/constants.py +1 -0
- rasa/shared/core/flows/flow.py +122 -126
- rasa/shared/providers/_utils.py +84 -0
- rasa/shared/providers/llm/_base_litellm_client.py +5 -3
- rasa/shared/providers/llm/azure_openai_llm_client.py +6 -65
- rasa/shared/providers/router/_base_litellm_router_client.py +55 -1
- rasa/validator.py +1 -2
- rasa/version.py +1 -1
- {rasa_pro-3.11.8.dist-info → rasa_pro-3.11.10.dist-info}/METADATA +3 -3
- {rasa_pro-3.11.8.dist-info → rasa_pro-3.11.10.dist-info}/RECORD +55 -55
- rasa/core/channels/inspector/dist/assets/channel-6a3b6c3b.js +0 -1
- rasa/core/channels/inspector/dist/assets/clone-243bdc4d.js +0 -1
- rasa/core/channels/inspector/dist/assets/flowDiagram-v2-96b9c2cf-2fc14195.js +0 -1
- {rasa_pro-3.11.8.dist-info → rasa_pro-3.11.10.dist-info}/NOTICE +0 -0
- {rasa_pro-3.11.8.dist-info → rasa_pro-3.11.10.dist-info}/WHEEL +0 -0
- {rasa_pro-3.11.8.dist-info → rasa_pro-3.11.10.dist-info}/entry_points.txt +0 -0
|
@@ -139,6 +139,12 @@ class Conversation:
|
|
|
139
139
|
structlogger.warning(
|
|
140
140
|
"audiocodes.handle.activities.duplicate_activity",
|
|
141
141
|
activity_id=activity[ACTIVITY_ID_KEY],
|
|
142
|
+
event_info=(
|
|
143
|
+
"Audiocodes might send duplicate activities if the bot has not "
|
|
144
|
+
"responded to the previous one or responded too late. Please "
|
|
145
|
+
"consider enabling the `use_websocket` option to use"
|
|
146
|
+
" Audiocodes Asynchronous API."
|
|
147
|
+
),
|
|
142
148
|
)
|
|
143
149
|
continue
|
|
144
150
|
self.activity_ids.append(activity[ACTIVITY_ID_KEY])
|
|
@@ -390,30 +396,41 @@ class AudiocodesInput(InputChannel):
|
|
|
390
396
|
"audiocodes.on_activities.no_conversation", request=request.json
|
|
391
397
|
)
|
|
392
398
|
return response.json({})
|
|
393
|
-
|
|
399
|
+
|
|
400
|
+
if self.use_websocket:
|
|
401
|
+
# send an empty response for this request
|
|
402
|
+
# activities are processed in the background
|
|
403
|
+
# chat response is sent via the websocket
|
|
394
404
|
ac_output: Union[WebsocketOutput, AudiocodesOutput] = WebsocketOutput(
|
|
395
405
|
conversation.ws, conversation_id
|
|
396
406
|
)
|
|
397
|
-
|
|
398
|
-
|
|
399
|
-
|
|
400
|
-
|
|
401
|
-
|
|
407
|
+
self._create_task(
|
|
408
|
+
conversation_id,
|
|
409
|
+
conversation.handle_activities(
|
|
410
|
+
request.json,
|
|
411
|
+
input_channel_name=self.name(),
|
|
412
|
+
output_channel=ac_output,
|
|
413
|
+
on_new_message=on_new_message,
|
|
414
|
+
),
|
|
415
|
+
)
|
|
416
|
+
return response.json({})
|
|
417
|
+
|
|
418
|
+
# without websockets, this becomes a blocking call
|
|
419
|
+
# and the response is sent back to the Audiocodes server
|
|
420
|
+
# after the activities are processed
|
|
421
|
+
ac_output = AudiocodesOutput()
|
|
422
|
+
await conversation.handle_activities(
|
|
423
|
+
request.json,
|
|
424
|
+
input_channel_name=self.name(),
|
|
425
|
+
output_channel=ac_output,
|
|
426
|
+
on_new_message=on_new_message,
|
|
427
|
+
)
|
|
428
|
+
return response.json(
|
|
429
|
+
{
|
|
402
430
|
"conversation": conversation_id,
|
|
403
431
|
"activities": ac_output.messages,
|
|
404
432
|
}
|
|
405
|
-
|
|
406
|
-
# start a background task to handle activities
|
|
407
|
-
self._create_task(
|
|
408
|
-
conversation_id,
|
|
409
|
-
conversation.handle_activities(
|
|
410
|
-
request.json,
|
|
411
|
-
input_channel_name=self.name(),
|
|
412
|
-
output_channel=ac_output,
|
|
413
|
-
on_new_message=on_new_message,
|
|
414
|
-
),
|
|
415
433
|
)
|
|
416
|
-
return response.json(response_json)
|
|
417
434
|
|
|
418
435
|
@ac_webhook.route(
|
|
419
436
|
"/conversation/<conversation_id>/disconnect", methods=["POST"]
|
rasa/shared/constants.py
CHANGED
|
@@ -220,6 +220,7 @@ EXTRA_PARAMETERS_KEY = "extra_parameters"
|
|
|
220
220
|
MODEL_GROUP_ID_KEY = "model_group_id"
|
|
221
221
|
MODEL_LIST_KEY = "model_list"
|
|
222
222
|
LITELLM_PARAMS_KEY = "litellm_params"
|
|
223
|
+
_VALIDATE_ENVIRONMENT_MISSING_KEYS_KEY = "missing_keys"
|
|
223
224
|
|
|
224
225
|
LLM_API_HEALTH_CHECK_ENV_VAR = "LLM_API_HEALTH_CHECK"
|
|
225
226
|
LLM_API_HEALTH_CHECK_DEFAULT_VALUE = "false"
|
rasa/shared/core/flows/flow.py
CHANGED
|
@@ -22,10 +22,11 @@ from rasa.shared.core.flows.flow_step_links import (
|
|
|
22
22
|
from rasa.shared.core.flows.flow_step_sequence import FlowStepSequence
|
|
23
23
|
from rasa.shared.core.flows.nlu_trigger import NLUTriggers
|
|
24
24
|
from rasa.shared.core.flows.steps import (
|
|
25
|
+
ActionFlowStep,
|
|
26
|
+
CallFlowStep,
|
|
25
27
|
CollectInformationFlowStep,
|
|
26
28
|
EndFlowStep,
|
|
27
29
|
StartFlowStep,
|
|
28
|
-
ActionFlowStep,
|
|
29
30
|
)
|
|
30
31
|
from rasa.shared.core.flows.steps.constants import (
|
|
31
32
|
CONTINUE_STEP_PREFIX,
|
|
@@ -402,161 +403,156 @@ class Flow:
|
|
|
402
403
|
and a set of visited step IDs to prevent revisiting steps.
|
|
403
404
|
It calls `go_over_steps` to recursively explore and fill the paths list.
|
|
404
405
|
"""
|
|
405
|
-
|
|
406
|
-
|
|
406
|
+
all_paths = FlowPathsList(self.id, paths=[])
|
|
407
|
+
start_step: FlowStep = self.first_step_in_flow()
|
|
407
408
|
current_path: FlowPath = FlowPath(flow=self.id, nodes=[])
|
|
408
|
-
|
|
409
|
-
|
|
410
|
-
self._go_over_steps(steps, current_path, flow_paths_list, step_ids_visited)
|
|
409
|
+
visited_step_ids: Set[str] = set()
|
|
411
410
|
|
|
412
|
-
|
|
413
|
-
flow_paths_list.paths.append(copy.deepcopy(current_path))
|
|
411
|
+
self._go_over_steps(start_step, current_path, all_paths, visited_step_ids)
|
|
414
412
|
|
|
415
413
|
structlogger.debug(
|
|
416
414
|
"shared.core.flows.flow.extract_all_paths",
|
|
417
415
|
comment="Extraction complete",
|
|
418
|
-
number_of_paths=len(
|
|
416
|
+
number_of_paths=len(all_paths.paths),
|
|
419
417
|
flow_name=self.name,
|
|
420
418
|
)
|
|
421
|
-
return
|
|
419
|
+
return all_paths
|
|
422
420
|
|
|
423
421
|
def _go_over_steps(
|
|
424
422
|
self,
|
|
425
|
-
|
|
423
|
+
current_step: FlowStep,
|
|
426
424
|
current_path: FlowPath,
|
|
427
|
-
|
|
428
|
-
|
|
425
|
+
all_paths: FlowPathsList,
|
|
426
|
+
visited_step_ids: Set[str],
|
|
429
427
|
) -> None:
|
|
430
428
|
"""Processes the flow steps recursively.
|
|
431
429
|
|
|
432
|
-
Either following direct step IDs or handling conditions, and adds complete
|
|
433
|
-
paths to the collected_paths.
|
|
434
|
-
|
|
435
430
|
Args:
|
|
436
|
-
|
|
431
|
+
current_step: The current step being processed.
|
|
437
432
|
current_path: The current path being constructed.
|
|
438
|
-
|
|
439
|
-
|
|
433
|
+
all_paths: The list where completed paths are added.
|
|
434
|
+
visited_step_ids: A set of steps that have been visited to avoid cycles.
|
|
440
435
|
|
|
441
436
|
Returns:
|
|
442
|
-
None: This function modifies
|
|
437
|
+
None: This function modifies all_paths in place by appending new paths
|
|
443
438
|
as they are found.
|
|
444
439
|
"""
|
|
445
|
-
#
|
|
446
|
-
#
|
|
447
|
-
#
|
|
448
|
-
|
|
449
|
-
|
|
450
|
-
|
|
451
|
-
|
|
452
|
-
|
|
453
|
-
|
|
454
|
-
|
|
455
|
-
|
|
456
|
-
|
|
457
|
-
|
|
458
|
-
|
|
459
|
-
|
|
460
|
-
|
|
461
|
-
# We only create new path nodes for ActionFlowStep and
|
|
462
|
-
# CollectInformationFlowStep because these are externally visible
|
|
463
|
-
# changes in the assistant's behaviour (trackable in the e2e tests).
|
|
464
|
-
# For other flow steps, we only follow their links.
|
|
465
|
-
# We decided to ignore calls to other flows in our coverage analysis.
|
|
466
|
-
if not isinstance(step, (CollectInformationFlowStep, ActionFlowStep)):
|
|
467
|
-
self._handle_links(
|
|
468
|
-
step.next.links,
|
|
469
|
-
current_path,
|
|
470
|
-
completed_paths,
|
|
471
|
-
step_ids_visited,
|
|
472
|
-
)
|
|
473
|
-
continue
|
|
474
|
-
|
|
475
|
-
# 2. Check if already visited this custom step id
|
|
476
|
-
# in order to keep track of loops
|
|
477
|
-
if step.custom_id is not None and step.custom_id in step_ids_visited:
|
|
478
|
-
if not completed_paths.is_path_part_of_list(current_path):
|
|
479
|
-
completed_paths.paths.append(copy.deepcopy(current_path))
|
|
480
|
-
return # Stop traversing this path if we've revisited a step
|
|
481
|
-
elif step.custom_id is not None:
|
|
482
|
-
step_ids_visited.add(step.custom_id)
|
|
483
|
-
|
|
484
|
-
# 3. Append step info to the path
|
|
485
|
-
current_path.nodes.append(
|
|
486
|
-
PathNode(
|
|
487
|
-
flow=current_path.flow,
|
|
488
|
-
step_id=step.id,
|
|
489
|
-
lines=step.metadata["line_numbers"],
|
|
490
|
-
)
|
|
440
|
+
# Check if the step is relevant for testable_paths extraction.
|
|
441
|
+
# We only create new path nodes for ActionFlowStep, CallFlowStep and
|
|
442
|
+
# CollectInformationFlowStep because these are externally visible
|
|
443
|
+
# changes in the assistant's behaviour (trackable in the e2e tests).
|
|
444
|
+
# For other flow steps, we only follow their links.
|
|
445
|
+
# We decided to ignore calls to other flows in our coverage analysis.
|
|
446
|
+
should_add_node = isinstance(
|
|
447
|
+
current_step, (CollectInformationFlowStep, ActionFlowStep, CallFlowStep)
|
|
448
|
+
)
|
|
449
|
+
if should_add_node:
|
|
450
|
+
# Add current step to the current path that is being constructed.
|
|
451
|
+
current_path.nodes.append(
|
|
452
|
+
PathNode(
|
|
453
|
+
flow=current_path.flow,
|
|
454
|
+
step_id=current_step.id,
|
|
455
|
+
lines=current_step.metadata["line_numbers"],
|
|
491
456
|
)
|
|
457
|
+
)
|
|
492
458
|
|
|
493
|
-
|
|
494
|
-
|
|
495
|
-
|
|
496
|
-
|
|
497
|
-
|
|
498
|
-
)
|
|
499
|
-
|
|
500
|
-
|
|
501
|
-
|
|
502
|
-
|
|
503
|
-
|
|
504
|
-
|
|
505
|
-
|
|
506
|
-
|
|
507
|
-
|
|
508
|
-
|
|
459
|
+
if current_step.id in visited_step_ids or self.is_end_of_path(current_step):
|
|
460
|
+
# Found a cycle, or reached an end step, do not proceed further.
|
|
461
|
+
all_paths.paths.append(copy.deepcopy(current_path))
|
|
462
|
+
# Remove the last node from the path if it was added.
|
|
463
|
+
if should_add_node:
|
|
464
|
+
current_path.nodes.pop()
|
|
465
|
+
return
|
|
466
|
+
|
|
467
|
+
# Mark current step as visited in this path.
|
|
468
|
+
visited_step_ids.add(current_step.id)
|
|
469
|
+
|
|
470
|
+
# Iterate over all links of the current step.
|
|
471
|
+
for link in current_step.next.links:
|
|
472
|
+
self._handle_link(
|
|
473
|
+
current_path,
|
|
474
|
+
all_paths,
|
|
475
|
+
visited_step_ids,
|
|
476
|
+
link,
|
|
477
|
+
)
|
|
509
478
|
|
|
510
|
-
|
|
479
|
+
# Backtrack the current step and remove it from the path.
|
|
480
|
+
visited_step_ids.remove(current_step.id)
|
|
481
|
+
|
|
482
|
+
# Remove the last node from the path if it was added.
|
|
483
|
+
if should_add_node:
|
|
484
|
+
current_path.nodes.pop()
|
|
485
|
+
|
|
486
|
+
def _handle_link(
|
|
511
487
|
self,
|
|
512
|
-
|
|
513
|
-
|
|
514
|
-
|
|
515
|
-
|
|
488
|
+
current_path: FlowPath,
|
|
489
|
+
all_paths: FlowPathsList,
|
|
490
|
+
visited_step_ids: Set[str],
|
|
491
|
+
link: FlowStepLink,
|
|
516
492
|
) -> None:
|
|
517
|
-
"""
|
|
518
|
-
|
|
519
|
-
Potentially recursively calling itself to handle conditional paths and
|
|
520
|
-
branching.
|
|
493
|
+
"""Handles the next step in a flow.
|
|
521
494
|
|
|
522
495
|
Args:
|
|
523
|
-
|
|
524
|
-
|
|
525
|
-
|
|
526
|
-
|
|
527
|
-
to avoid loops.
|
|
496
|
+
current_path: The current path being constructed.
|
|
497
|
+
all_paths: The list where completed paths are added.
|
|
498
|
+
visited_step_ids: A set of steps that have been visited to avoid cycles.
|
|
499
|
+
link: The link to be followed.
|
|
528
500
|
|
|
529
501
|
Returns:
|
|
530
|
-
None:
|
|
531
|
-
as they are
|
|
502
|
+
None: This function modifies all_paths in place by appending new paths
|
|
503
|
+
as they are found.
|
|
532
504
|
"""
|
|
533
|
-
|
|
534
|
-
|
|
535
|
-
|
|
536
|
-
|
|
537
|
-
if isinstance(link, StaticFlowStepLink):
|
|
538
|
-
# Find this id in the flow steps and restart from there
|
|
539
|
-
for i, step in enumerate(steps):
|
|
540
|
-
if step.id == link.target_step_id:
|
|
541
|
-
self._go_over_steps(
|
|
542
|
-
steps[i:],
|
|
543
|
-
copy.deepcopy(path),
|
|
544
|
-
collected_paths,
|
|
545
|
-
copy.deepcopy(step_ids_visited),
|
|
546
|
-
)
|
|
547
|
-
|
|
548
|
-
# If conditions
|
|
549
|
-
elif isinstance(link, (IfFlowStepLink, ElseFlowStepLink)):
|
|
550
|
-
# Handling conditional paths
|
|
551
|
-
target_steps: Union[str, List[FlowStep]]
|
|
552
|
-
if isinstance(link.target_reference, FlowStepSequence):
|
|
553
|
-
target_steps = link.target_reference.child_steps
|
|
554
|
-
else:
|
|
555
|
-
target_steps = link.target_reference
|
|
556
|
-
|
|
505
|
+
# StaticFlowStepLink is a direct link to the next step.
|
|
506
|
+
if isinstance(link, StaticFlowStepLink):
|
|
507
|
+
# Find the step by its id and continue the path.
|
|
508
|
+
if step := self._get_step_by_step_id(link.target_step_id):
|
|
557
509
|
self._go_over_steps(
|
|
558
|
-
|
|
559
|
-
|
|
560
|
-
|
|
561
|
-
|
|
510
|
+
step,
|
|
511
|
+
current_path,
|
|
512
|
+
all_paths,
|
|
513
|
+
visited_step_ids,
|
|
562
514
|
)
|
|
515
|
+
return
|
|
516
|
+
# IfFlowStepLink and ElseFlowStepLink are conditional links.
|
|
517
|
+
elif isinstance(link, (IfFlowStepLink, ElseFlowStepLink)):
|
|
518
|
+
if isinstance(link.target_reference, FlowStepSequence):
|
|
519
|
+
# If the target is a FlowStepSequence, we need to go over all
|
|
520
|
+
# child steps of the sequence.
|
|
521
|
+
for child_step in link.target_reference.child_steps:
|
|
522
|
+
self._go_over_steps(
|
|
523
|
+
child_step,
|
|
524
|
+
current_path,
|
|
525
|
+
all_paths,
|
|
526
|
+
visited_step_ids,
|
|
527
|
+
)
|
|
528
|
+
return
|
|
529
|
+
else:
|
|
530
|
+
# Find the step by its id and continue the path.
|
|
531
|
+
if step := self._get_step_by_step_id(link.target_reference):
|
|
532
|
+
self._go_over_steps(
|
|
533
|
+
step,
|
|
534
|
+
current_path,
|
|
535
|
+
all_paths,
|
|
536
|
+
visited_step_ids,
|
|
537
|
+
)
|
|
538
|
+
return
|
|
539
|
+
|
|
540
|
+
def is_end_of_path(self, step: FlowStep) -> bool:
|
|
541
|
+
"""Check if there is no path available from the current step."""
|
|
542
|
+
if (
|
|
543
|
+
len(step.next.links) == 1
|
|
544
|
+
and isinstance(step.next.links[0], StaticFlowStepLink)
|
|
545
|
+
and step.next.links[0].target == END_STEP
|
|
546
|
+
):
|
|
547
|
+
return True
|
|
548
|
+
return False
|
|
549
|
+
|
|
550
|
+
def _get_step_by_step_id(
|
|
551
|
+
self,
|
|
552
|
+
step_id: Optional[str],
|
|
553
|
+
) -> Optional[FlowStep]:
|
|
554
|
+
"""Get a step by its id from a list of steps."""
|
|
555
|
+
for step in self.steps:
|
|
556
|
+
if step.id == step_id:
|
|
557
|
+
return step
|
|
558
|
+
return None
|
rasa/shared/providers/_utils.py
CHANGED
|
@@ -1,3 +1,5 @@
|
|
|
1
|
+
from typing import Optional, Dict, Any
|
|
2
|
+
|
|
1
3
|
import structlog
|
|
2
4
|
|
|
3
5
|
from rasa.shared.constants import (
|
|
@@ -9,6 +11,11 @@ from rasa.shared.constants import (
|
|
|
9
11
|
AWS_REGION_NAME_CONFIG_KEY,
|
|
10
12
|
AWS_SESSION_TOKEN_CONFIG_KEY,
|
|
11
13
|
AWS_SESSION_TOKEN_ENV_VAR,
|
|
14
|
+
AZURE_API_BASE_ENV_VAR,
|
|
15
|
+
API_BASE_CONFIG_KEY,
|
|
16
|
+
AZURE_API_VERSION_ENV_VAR,
|
|
17
|
+
API_VERSION_CONFIG_KEY,
|
|
18
|
+
DEPLOYMENT_CONFIG_KEY,
|
|
12
19
|
)
|
|
13
20
|
from rasa.shared.exceptions import ProviderClientValidationError
|
|
14
21
|
from litellm import validate_environment
|
|
@@ -77,3 +84,80 @@ def validate_aws_setup_for_litellm_clients(
|
|
|
77
84
|
missing_environment_variables=missing_environment_variables,
|
|
78
85
|
)
|
|
79
86
|
raise ProviderClientValidationError(event_info)
|
|
87
|
+
|
|
88
|
+
|
|
89
|
+
def validate_azure_client_setup(
|
|
90
|
+
api_base: Optional[str],
|
|
91
|
+
api_version: Optional[str],
|
|
92
|
+
deployment: Optional[str],
|
|
93
|
+
) -> None:
|
|
94
|
+
"""Validates the Azure setup for LiteLLM Router clients to ensure
|
|
95
|
+
that all required configuration parameters are set.
|
|
96
|
+
|
|
97
|
+
Raises:
|
|
98
|
+
ProviderClientValidationError: If any required Azure configurations
|
|
99
|
+
is missing.
|
|
100
|
+
"""
|
|
101
|
+
|
|
102
|
+
def generate_event_info_for_missing_setting(
|
|
103
|
+
setting: str,
|
|
104
|
+
setting_env_var: Optional[str] = None,
|
|
105
|
+
setting_config_key: Optional[str] = None,
|
|
106
|
+
) -> str:
|
|
107
|
+
"""Generate a part of the message with instructions on what to set
|
|
108
|
+
for the missing client setting.
|
|
109
|
+
"""
|
|
110
|
+
info = "Set {setting} with {options}. "
|
|
111
|
+
options = ""
|
|
112
|
+
if setting_env_var is not None:
|
|
113
|
+
options += f"environment variable '{setting_env_var}'"
|
|
114
|
+
if setting_config_key is not None and setting_env_var is not None:
|
|
115
|
+
options += " or "
|
|
116
|
+
if setting_config_key is not None:
|
|
117
|
+
options += f"config key '{setting_config_key}'"
|
|
118
|
+
|
|
119
|
+
return info.format(setting=setting, options=options)
|
|
120
|
+
|
|
121
|
+
# All required settings for Azure OpenAI client
|
|
122
|
+
settings: Dict[str, Dict[str, Any]] = {
|
|
123
|
+
"API Base": {
|
|
124
|
+
"current_value": api_base,
|
|
125
|
+
"env_var": AZURE_API_BASE_ENV_VAR,
|
|
126
|
+
"config_key": API_BASE_CONFIG_KEY,
|
|
127
|
+
},
|
|
128
|
+
"API Version": {
|
|
129
|
+
"current_value": api_version,
|
|
130
|
+
"env_var": AZURE_API_VERSION_ENV_VAR,
|
|
131
|
+
"config_key": API_VERSION_CONFIG_KEY,
|
|
132
|
+
},
|
|
133
|
+
"Deployment Name": {
|
|
134
|
+
"current_value": deployment,
|
|
135
|
+
"env_var": None,
|
|
136
|
+
"config_key": DEPLOYMENT_CONFIG_KEY,
|
|
137
|
+
},
|
|
138
|
+
}
|
|
139
|
+
|
|
140
|
+
missing_settings = [
|
|
141
|
+
setting_name
|
|
142
|
+
for setting_name, setting_info in settings.items()
|
|
143
|
+
if setting_info["current_value"] is None
|
|
144
|
+
]
|
|
145
|
+
|
|
146
|
+
if missing_settings:
|
|
147
|
+
event_info = f"Client settings not set: " f"{', '.join(missing_settings)}. "
|
|
148
|
+
|
|
149
|
+
for missing_setting in missing_settings:
|
|
150
|
+
if settings[missing_setting]["current_value"] is not None:
|
|
151
|
+
continue
|
|
152
|
+
event_info += generate_event_info_for_missing_setting(
|
|
153
|
+
missing_setting,
|
|
154
|
+
settings[missing_setting]["env_var"],
|
|
155
|
+
settings[missing_setting]["config_key"],
|
|
156
|
+
)
|
|
157
|
+
|
|
158
|
+
structlogger.error(
|
|
159
|
+
"azure_openai_llm_client.not_configured",
|
|
160
|
+
event_info=event_info,
|
|
161
|
+
missing_settings=missing_settings,
|
|
162
|
+
)
|
|
163
|
+
raise ProviderClientValidationError(event_info)
|
|
@@ -9,7 +9,11 @@ from litellm import (
|
|
|
9
9
|
validate_environment,
|
|
10
10
|
)
|
|
11
11
|
|
|
12
|
-
from rasa.shared.constants import
|
|
12
|
+
from rasa.shared.constants import (
|
|
13
|
+
API_BASE_CONFIG_KEY,
|
|
14
|
+
API_KEY,
|
|
15
|
+
_VALIDATE_ENVIRONMENT_MISSING_KEYS_KEY,
|
|
16
|
+
)
|
|
13
17
|
from rasa.shared.exceptions import (
|
|
14
18
|
ProviderClientAPIException,
|
|
15
19
|
ProviderClientValidationError,
|
|
@@ -23,8 +27,6 @@ from rasa.shared.utils.io import suppress_logs, resolve_environment_variables
|
|
|
23
27
|
|
|
24
28
|
structlogger = structlog.get_logger()
|
|
25
29
|
|
|
26
|
-
_VALIDATE_ENVIRONMENT_MISSING_KEYS_KEY = "missing_keys"
|
|
27
|
-
|
|
28
30
|
# Suppress LiteLLM info and debug logs - Global level.
|
|
29
31
|
logging.getLogger("LiteLLM").setLevel(logging.WARNING)
|
|
30
32
|
|
|
@@ -9,9 +9,6 @@ from rasa.shared.constants import (
|
|
|
9
9
|
OPENAI_API_VERSION_ENV_VAR,
|
|
10
10
|
AZURE_API_BASE_ENV_VAR,
|
|
11
11
|
AZURE_API_VERSION_ENV_VAR,
|
|
12
|
-
API_BASE_CONFIG_KEY,
|
|
13
|
-
API_VERSION_CONFIG_KEY,
|
|
14
|
-
DEPLOYMENT_CONFIG_KEY,
|
|
15
12
|
AZURE_API_KEY_ENV_VAR,
|
|
16
13
|
OPENAI_API_TYPE_ENV_VAR,
|
|
17
14
|
OPENAI_API_KEY_ENV_VAR,
|
|
@@ -23,6 +20,7 @@ from rasa.shared.exceptions import ProviderClientValidationError
|
|
|
23
20
|
from rasa.shared.providers._configs.azure_openai_client_config import (
|
|
24
21
|
AzureOpenAIClientConfig,
|
|
25
22
|
)
|
|
23
|
+
from rasa.shared.providers._utils import validate_azure_client_setup
|
|
26
24
|
from rasa.shared.providers.llm._base_litellm_client import _BaseLiteLLMClient
|
|
27
25
|
from rasa.shared.utils.io import raise_deprecation_warning
|
|
28
26
|
|
|
@@ -295,65 +293,8 @@ class AzureOpenAILLMClient(_BaseLiteLLMClient):
|
|
|
295
293
|
def validate_client_setup(self) -> None:
|
|
296
294
|
"""Validates that all required configuration parameters are set."""
|
|
297
295
|
|
|
298
|
-
|
|
299
|
-
|
|
300
|
-
|
|
301
|
-
|
|
302
|
-
)
|
|
303
|
-
"""Generate a part of the message with instructions on what to set
|
|
304
|
-
for the missing client setting.
|
|
305
|
-
"""
|
|
306
|
-
info = "Set {setting} with {options}. "
|
|
307
|
-
options = ""
|
|
308
|
-
if setting_env_var is not None:
|
|
309
|
-
options += f"environment variable '{setting_env_var}'"
|
|
310
|
-
if setting_config_key is not None and setting_env_var is not None:
|
|
311
|
-
options += " or "
|
|
312
|
-
if setting_config_key is not None:
|
|
313
|
-
options += f"config key '{setting_config_key}'"
|
|
314
|
-
|
|
315
|
-
return info.format(setting=setting, options=options)
|
|
316
|
-
|
|
317
|
-
# All required settings for Azure OpenAI client
|
|
318
|
-
settings: Dict[str, Dict[str, Any]] = {
|
|
319
|
-
"API Base": {
|
|
320
|
-
"current_value": self.api_base,
|
|
321
|
-
"env_var": AZURE_API_BASE_ENV_VAR,
|
|
322
|
-
"config_key": API_BASE_CONFIG_KEY,
|
|
323
|
-
},
|
|
324
|
-
"API Version": {
|
|
325
|
-
"current_value": self.api_version,
|
|
326
|
-
"env_var": AZURE_API_VERSION_ENV_VAR,
|
|
327
|
-
"config_key": API_VERSION_CONFIG_KEY,
|
|
328
|
-
},
|
|
329
|
-
"Deployment Name": {
|
|
330
|
-
"current_value": self.deployment,
|
|
331
|
-
"env_var": None,
|
|
332
|
-
"config_key": DEPLOYMENT_CONFIG_KEY,
|
|
333
|
-
},
|
|
334
|
-
}
|
|
335
|
-
|
|
336
|
-
missing_settings = [
|
|
337
|
-
setting_name
|
|
338
|
-
for setting_name, setting_info in settings.items()
|
|
339
|
-
if setting_info["current_value"] is None
|
|
340
|
-
]
|
|
341
|
-
|
|
342
|
-
if missing_settings:
|
|
343
|
-
event_info = f"Client settings not set: " f"{', '.join(missing_settings)}. "
|
|
344
|
-
|
|
345
|
-
for missing_setting in missing_settings:
|
|
346
|
-
if settings[missing_setting]["current_value"] is not None:
|
|
347
|
-
continue
|
|
348
|
-
event_info += generate_event_info_for_missing_setting(
|
|
349
|
-
missing_setting,
|
|
350
|
-
settings[missing_setting]["env_var"],
|
|
351
|
-
settings[missing_setting]["config_key"],
|
|
352
|
-
)
|
|
353
|
-
|
|
354
|
-
structlogger.error(
|
|
355
|
-
"azure_openai_llm_client.not_configured",
|
|
356
|
-
event_info=event_info,
|
|
357
|
-
missing_settings=missing_settings,
|
|
358
|
-
)
|
|
359
|
-
raise ProviderClientValidationError(event_info)
|
|
296
|
+
return validate_azure_client_setup(
|
|
297
|
+
api_base=self.api_base,
|
|
298
|
+
api_version=self.api_version,
|
|
299
|
+
deployment=self.deployment,
|
|
300
|
+
)
|
|
@@ -2,7 +2,7 @@ from typing import Any, Dict, List
|
|
|
2
2
|
import os
|
|
3
3
|
import structlog
|
|
4
4
|
|
|
5
|
-
from litellm import Router
|
|
5
|
+
from litellm import Router, validate_environment
|
|
6
6
|
|
|
7
7
|
from rasa.shared.constants import (
|
|
8
8
|
MODEL_LIST_KEY,
|
|
@@ -14,11 +14,18 @@ from rasa.shared.constants import (
|
|
|
14
14
|
API_KEY,
|
|
15
15
|
MODEL_CONFIG_KEY,
|
|
16
16
|
USE_CHAT_COMPLETIONS_ENDPOINT_CONFIG_KEY,
|
|
17
|
+
API_BASE_CONFIG_KEY,
|
|
18
|
+
_VALIDATE_ENVIRONMENT_MISSING_KEYS_KEY,
|
|
19
|
+
AZURE_OPENAI_PROVIDER,
|
|
20
|
+
API_VERSION_CONFIG_KEY,
|
|
21
|
+
AZURE_API_BASE_ENV_VAR,
|
|
22
|
+
AZURE_API_VERSION_ENV_VAR,
|
|
17
23
|
)
|
|
18
24
|
from rasa.shared.exceptions import ProviderClientValidationError
|
|
19
25
|
from rasa.shared.providers._configs.litellm_router_client_config import (
|
|
20
26
|
LiteLLMRouterClientConfig,
|
|
21
27
|
)
|
|
28
|
+
from rasa.shared.providers._utils import validate_azure_client_setup
|
|
22
29
|
from rasa.shared.utils.io import resolve_environment_variables
|
|
23
30
|
|
|
24
31
|
structlogger = structlog.get_logger()
|
|
@@ -62,6 +69,7 @@ class _BaseLiteLLMRouterClient:
|
|
|
62
69
|
resolved_model_configurations = (
|
|
63
70
|
self._resolve_env_vars_in_model_configurations()
|
|
64
71
|
)
|
|
72
|
+
self._validate_model_configurations(resolved_model_configurations)
|
|
65
73
|
self._router_client = Router(
|
|
66
74
|
model_list=resolved_model_configurations, **router_settings
|
|
67
75
|
)
|
|
@@ -181,3 +189,49 @@ class _BaseLiteLLMRouterClient:
|
|
|
181
189
|
)
|
|
182
190
|
model_configuration_with_resolved_keys.append(resolved_model_configuration)
|
|
183
191
|
return model_configuration_with_resolved_keys
|
|
192
|
+
|
|
193
|
+
def _validate_model_configurations(
|
|
194
|
+
self, resolved_model_configurations: List[Dict[str, Any]]
|
|
195
|
+
) -> None:
|
|
196
|
+
"""Validates the model configurations.
|
|
197
|
+
|
|
198
|
+
Args:
|
|
199
|
+
resolved_model_configurations: (List[Dict[str, Any]]) The list of model
|
|
200
|
+
configurations with resolved environment variables.
|
|
201
|
+
|
|
202
|
+
Raises:
|
|
203
|
+
ProviderClientValidationError: If the model configurations are invalid.
|
|
204
|
+
"""
|
|
205
|
+
for model_configuration in resolved_model_configurations:
|
|
206
|
+
litellm_params = model_configuration.get(LITELLM_PARAMS_KEY, {})
|
|
207
|
+
|
|
208
|
+
model = litellm_params.get(MODEL_CONFIG_KEY)
|
|
209
|
+
provider, deployment = model.split("/", 1)
|
|
210
|
+
api_base = litellm_params.get(API_BASE_CONFIG_KEY)
|
|
211
|
+
|
|
212
|
+
if provider.lower() == AZURE_OPENAI_PROVIDER:
|
|
213
|
+
validate_azure_client_setup(
|
|
214
|
+
api_base=api_base or os.getenv(AZURE_API_BASE_ENV_VAR),
|
|
215
|
+
api_version=litellm_params.get(API_VERSION_CONFIG_KEY)
|
|
216
|
+
or os.getenv(AZURE_API_VERSION_ENV_VAR),
|
|
217
|
+
deployment=deployment,
|
|
218
|
+
)
|
|
219
|
+
else:
|
|
220
|
+
validation_info = validate_environment(
|
|
221
|
+
model=model,
|
|
222
|
+
api_key=litellm_params.get(API_KEY),
|
|
223
|
+
api_base=api_base,
|
|
224
|
+
)
|
|
225
|
+
if missing_environment_variables := validation_info.get(
|
|
226
|
+
_VALIDATE_ENVIRONMENT_MISSING_KEYS_KEY
|
|
227
|
+
):
|
|
228
|
+
event_info = (
|
|
229
|
+
f"Environment variables: {missing_environment_variables} "
|
|
230
|
+
f"not set. Required for API calls."
|
|
231
|
+
)
|
|
232
|
+
structlogger.error(
|
|
233
|
+
"base_litellm_router_client.validate_environment_variables",
|
|
234
|
+
event_info=event_info,
|
|
235
|
+
missing_environment_variables=missing_environment_variables,
|
|
236
|
+
)
|
|
237
|
+
raise ProviderClientValidationError(event_info)
|