rasa-pro 3.13.0a1.dev4__py3-none-any.whl → 3.13.0a1.dev5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of rasa-pro might be problematic. Click here for more details.

@@ -0,0 +1,81 @@
1
+ import json
2
+ from typing import Optional
3
+
4
+ from rasa.shared.core.constants import DEFAULT_SLOT_NAMES
5
+ from rasa.shared.core.events import (
6
+ ActionExecuted,
7
+ BotUttered,
8
+ FlowCompleted,
9
+ FlowStarted,
10
+ SlotSet,
11
+ UserUttered,
12
+ )
13
+ from rasa.shared.core.trackers import DialogueStateTracker
14
+
15
+
16
+ def tracker_as_llm_context(tracker: Optional[DialogueStateTracker]) -> str:
17
+ """Convert a tracker to a string that can be used as context for the LLM."""
18
+ if not tracker or not tracker.events:
19
+ return "No conversation history available."
20
+
21
+ context_parts = []
22
+ current_turn = []
23
+
24
+ for event in tracker.events:
25
+ if isinstance(event, UserUttered):
26
+ if current_turn:
27
+ context_parts.append(" | ".join(current_turn))
28
+ current_turn = []
29
+ current_turn.append(f"User: {event.text}")
30
+ if event.intent:
31
+ current_turn.append(f"Intent: {event.intent.get('name')}")
32
+ if event.entities:
33
+ current_turn.append(
34
+ f"Entities: {[e.get('entity') for e in event.entities]}"
35
+ )
36
+ if event.commands:
37
+ current_turn.append(
38
+ f"Commands: {[cmd.get('name') for cmd in event.commands]}"
39
+ )
40
+
41
+ elif isinstance(event, BotUttered):
42
+ if event.text:
43
+ current_turn.append(f"Bot: {event.text}")
44
+
45
+ elif isinstance(event, ActionExecuted):
46
+ current_turn.append(f"Action: {event.action_name}")
47
+ if event.confidence:
48
+ current_turn.append(f"Confidence: {event.confidence:.2f}")
49
+
50
+ elif isinstance(event, SlotSet) and event.key not in DEFAULT_SLOT_NAMES:
51
+ current_turn.append(f"Slot Set: {event.key}={event.value}")
52
+
53
+ elif isinstance(event, FlowStarted):
54
+ current_turn.append(f"# Flow Started: {event.flow_id}")
55
+
56
+ elif isinstance(event, FlowCompleted):
57
+ current_turn.append(f"# Flow Completed: {event.flow_id}")
58
+
59
+ if current_turn:
60
+ context_parts.append(" | ".join(current_turn))
61
+
62
+ # Add final state information
63
+ context_parts.append("\nCurrent State:")
64
+ context_parts.append(f"Latest Message: {tracker.latest_message.text or '-'}")
65
+
66
+ # Add active flows from stack
67
+ if tracker.active_flow:
68
+ context_parts.append(f"Active Flow: {tracker.active_flow}")
69
+ if tracker.stack:
70
+ context_parts.append(f"Flow Stack: {json.dumps(tracker.stack.as_dict())}")
71
+
72
+ # Add slot values that are not None
73
+ non_empty_slots = {
74
+ k: str(v.value)
75
+ for k, v in tracker.slots.items()
76
+ if v is not None and k not in DEFAULT_SLOT_NAMES
77
+ }
78
+ if non_empty_slots:
79
+ context_parts.append(f"Slots: {json.dumps(non_empty_slots)}")
80
+
81
+ return "\n".join(context_parts)
@@ -1,4 +1,5 @@
1
1
  import asyncio
2
+ import collections
2
3
  import importlib
3
4
  import json
4
5
  import logging
@@ -18,6 +19,7 @@ from sanic import Sanic, response
18
19
  from structlog.testing import capture_logs
19
20
 
20
21
  import rasa.core.utils
22
+ from rasa.builder.llm_context import tracker_as_llm_context
21
23
  from rasa.cli.utils import validate_files
22
24
  from rasa.constants import PACKAGE_NAME
23
25
  from rasa.core import agent, channels
@@ -53,11 +55,6 @@ DEFAULT_SKILL_GENERATION_SYSTEM_PROMPT = importlib.resources.read_text(
53
55
  VECTOR_STORE_ID = "vs_685123376e288191a005b6b144d3026f"
54
56
 
55
57
 
56
- class PromptRequest(BaseModel):
57
- prompt: str
58
- client_id: Optional[str] = None
59
-
60
-
61
58
  default_credentials_yaml = """
62
59
  studio_chat:
63
60
  user_message_evt: "user_message"
@@ -65,6 +62,23 @@ studio_chat:
65
62
  session_persistence: true
66
63
  """
67
64
 
65
+ # create a dict where we collect most recent logs. only collect the last 30 log lines
66
+ # use a builtin type for this
67
+ recent_logs = collections.deque(maxlen=30)
68
+
69
+
70
+ def collecting_logs_processor(logger, log_level, event_dict):
71
+ if log_level != logging.getLevelName(logging.DEBUG).lower():
72
+ event_message = event_dict.get("event_info") or event_dict.get("event", "")
73
+ recent_logs.append(f"[{log_level}] {event_message}")
74
+
75
+ return event_dict
76
+
77
+
78
+ class PromptRequest(BaseModel):
79
+ prompt: str
80
+ client_id: Optional[str] = None
81
+
68
82
 
69
83
  def default_credentials() -> Dict[str, Any]:
70
84
  return read_yaml(default_credentials_yaml)
@@ -255,7 +269,9 @@ class PromptToBotService:
255
269
  )
256
270
  except Exception as e:
257
271
  structlogger.error(
258
- "prompt_to_bot.update_bot_data.validation_error", error=str(e)
272
+ "prompt_to_bot.update_bot_data.validation_error",
273
+ error=str(e),
274
+ event_info="Failed to validate the Rasa project. Error: " + str(e),
259
275
  )
260
276
  await response.send(
261
277
  sse_event(
@@ -273,7 +289,9 @@ class PromptToBotService:
273
289
  await response.send(sse_event("train_success", {"status": "train_success"}))
274
290
  except Exception as e:
275
291
  structlogger.error(
276
- "prompt_to_bot.update_bot_data.train_error", error=str(e)
292
+ "prompt_to_bot.update_bot_data.train_error",
293
+ error=str(e),
294
+ event_info="Failed to train the agent. Error: " + str(e),
277
295
  )
278
296
  await response.send(
279
297
  sse_event("train_error", {"status": "train_error", "error": str(e)})
@@ -450,27 +468,40 @@ class PromptToBotService:
450
468
  ] = await self.app.ctx.agent.tracker_store.retrieve(
451
469
  self.input_channel.latest_tracker_session_id
452
470
  )
453
- if tracker is None:
454
- test_conversation = []
455
- else:
456
- test_conversation = tracker.current_state()
471
+ return tracker_as_llm_context(tracker)
457
472
  else:
458
- test_conversation = []
473
+ return tracker_as_llm_context(None)
459
474
 
460
- chat_bot_files = self.bot_files
461
-
462
- return {
463
- "test_conversation": test_conversation,
464
- "chat_bot_files": chat_bot_files,
465
- }
475
+ def format_chat_dump(self, user_chat_history: List[Dict[str, Any]]) -> str:
476
+ """Format the chat dump for the LLM."""
477
+ result = ""
478
+ for message in user_chat_history:
479
+ if message.get("type") == "user":
480
+ result += f"User: {message.get('content')}\n"
481
+ else:
482
+ for part in message.get("content", []):
483
+ if part.get("type") == "text":
484
+ result += f"Assistant: {part.get('text')}\n"
485
+ return result
466
486
 
467
487
  async def llm_builder(self, request):
468
488
  chat_bot_context = await self.generate_chat_bot_context()
489
+ recent_logs_context = "\n".join(recent_logs)
490
+ chat_bot_files_context = json.dumps(self.bot_files)
469
491
  user_chat_history = request.json.get("messages", [])
470
- last_user_message = (
471
- user_chat_history[-1].get("content") if user_chat_history else ""
492
+ chat_dump = self.format_chat_dump(user_chat_history)
493
+
494
+ client = openai.AsyncOpenAI()
495
+
496
+ results = await client.vector_stores.search(
497
+ vector_store_id=VECTOR_STORE_ID,
498
+ query=chat_dump,
499
+ max_num_results=10,
500
+ rewrite_query=True,
472
501
  )
473
502
 
503
+ formatted_results = self.format_results(results.data)
504
+
474
505
  prompt = dedent(f"""
475
506
  You are a helpful assistant that helps users building chatbots using Rasa.
476
507
  Do not mention Rasa Studio - only ever refer to Rasa Pro.
@@ -516,8 +547,25 @@ class PromptToBotService:
516
547
 
517
548
  Create links to https://rasa.com/docs for the user to take a look at.
518
549
 
519
- Chat bot context:
550
+ ----
551
+ Logs from chatbot framework:
552
+ {recent_logs_context}
553
+
554
+ ----
555
+ Most recent conversation of the user with the chatbot:
520
556
  {json.dumps(chat_bot_context)}
557
+
558
+ ----
559
+ Chat bot files for this project (name and content):
560
+ {json.dumps(chat_bot_files_context)}
561
+
562
+ ----
563
+ Documentation Context:
564
+ {formatted_results}
565
+
566
+ ----
567
+ Respond with a json array of content blocks. Keep the response
568
+ short and concise.
521
569
  """)
522
570
 
523
571
  past_messages = []
@@ -529,26 +577,9 @@ class PromptToBotService:
529
577
  }
530
578
  )
531
579
 
532
- client = openai.AsyncOpenAI()
533
-
534
- results = await client.vector_stores.search(
535
- vector_store_id=VECTOR_STORE_ID,
536
- query=last_user_message,
537
- )
538
-
539
- formatted_results = self.format_results(results.data)
540
-
541
580
  messages = [
542
581
  {"role": "system", "content": prompt},
543
582
  *past_messages,
544
- {
545
- "role": "user",
546
- "content": (
547
- f"Documentation Context: {formatted_results}\n\n"
548
- "Respond with a json array of content blocks. Keep the "
549
- "response short and concise."
550
- ),
551
- },
552
583
  ]
553
584
 
554
585
  llm_helper_schema = read_json_file(
@@ -622,7 +653,10 @@ class PromptToBotService:
622
653
  )
623
654
 
624
655
  if not agent_instance.is_ready():
625
- raise Exception("Failed to load model.")
656
+ raise Exception(
657
+ "Generation of the chatbot failed with an error (model failed "
658
+ "to load). Please try again."
659
+ )
626
660
 
627
661
  structlogger.info(
628
662
  "prompt_to_bot.train_and_load_agent.agent_ready",
@@ -642,7 +676,13 @@ def main():
642
676
  warn_only_once=True,
643
677
  filter_repeated_logs=True,
644
678
  )
645
- configure_structlog(log_level, include_time=True)
679
+ configure_structlog(
680
+ log_level,
681
+ include_time=True,
682
+ additional_processors=[
683
+ collecting_logs_processor,
684
+ ],
685
+ )
646
686
 
647
687
  service = PromptToBotService()
648
688
  register_custom_sanic_error_handler(service.app)
@@ -206,9 +206,6 @@ class StudioChatInput(SocketIOInput, VoiceInputChannel):
206
206
 
207
207
  async def on_tracker_updated(self, tracker: "DialogueStateTracker") -> None:
208
208
  """Triggers a tracker update notification after a change to the tracker."""
209
- # we need the latest session id to use it for the llm helper to get the
210
- # most recent conversation the user had with the bot.
211
- self.latest_tracker_session_id = tracker.sender_id
212
209
  await self.publish_tracker_update(tracker.sender_id, tracker_as_dump(tracker))
213
210
 
214
211
  async def publish_tracker_update(self, sender_id: str, tracker_dump: Dict) -> None:
@@ -216,6 +213,11 @@ class StudioChatInput(SocketIOInput, VoiceInputChannel):
216
213
  if not self.sio:
217
214
  structlogger.error("studio_chat.on_tracker_updated.sio_not_initialized")
218
215
  return
216
+
217
+ # we need the latest session id to use it for the llm helper to get the
218
+ # most recent conversation the user had with the bot.
219
+ self.latest_tracker_session_id = sender_id
220
+
219
221
  await self.sio.emit("tracker", tracker_dump, room=sender_id)
220
222
 
221
223
  async def on_message_proxy(
@@ -734,7 +734,14 @@ def _run_action_step(
734
734
  # do not log about non-existing validation actions of collect steps
735
735
  utter_action_name = render_template_variables("{{context.utter}}", context)
736
736
  if utter_action_name not in available_actions:
737
- structlogger.warning("flow.step.run.action.unknown", action=action_name)
737
+ structlogger.warning(
738
+ "flow.step.run.action.unknown",
739
+ action=action_name,
740
+ event_info=(
741
+ f"The action '{action_name}' is not defined in the domain but "
742
+ f"getting triggered by the flow '{step.flow_id}'."
743
+ ),
744
+ )
738
745
  return ContinueFlowWithNextStep(events=initial_events)
739
746
 
740
747
 
rasa/utils/log_utils.py CHANGED
@@ -3,7 +3,7 @@ from __future__ import annotations
3
3
  import logging
4
4
  import os
5
5
  import sys
6
- from typing import Any, Optional
6
+ from typing import Any, List, Optional
7
7
 
8
8
  import structlog
9
9
  from structlog.dev import ConsoleRenderer
@@ -37,6 +37,7 @@ class HumanConsoleRenderer(ConsoleRenderer):
37
37
  def configure_structlog(
38
38
  log_level: Optional[int] = None,
39
39
  include_time: bool = False,
40
+ additional_processors: Optional[List[structlog.processors.Processor]] = None,
40
41
  ) -> None:
41
42
  """Configure logging of the server."""
42
43
  if log_level is None: # Log level NOTSET is 0 so we use `is None` here
@@ -75,6 +76,9 @@ def configure_structlog(
75
76
  if include_time:
76
77
  shared_processors.append(structlog.processors.TimeStamper(fmt="iso"))
77
78
 
79
+ if additional_processors:
80
+ shared_processors.extend(additional_processors)
81
+
78
82
  if not FORCE_JSON_LOGGING and sys.stderr.isatty():
79
83
  # Pretty printing when we run in a terminal session.
80
84
  # Automatically prints pretty tracebacks when "rich" is installed
rasa/version.py CHANGED
@@ -1,3 +1,3 @@
1
1
  # this file will automatically be changed,
2
2
  # do not add anything but the version number here!
3
- __version__ = "3.13.0a1.dev4"
3
+ __version__ = "3.13.0a1.dev5"
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: rasa-pro
3
- Version: 3.13.0a1.dev4
3
+ Version: 3.13.0a1.dev5
4
4
  Summary: State-of-the-art open-core Conversational AI framework for Enterprises that natively leverages generative AI for effortless assistant development.
5
5
  Keywords: nlp,machine-learning,machine-learning-library,bot,bots,botkit,rasa conversational-agents,conversational-ai,chatbot,chatbot-framework,bot-framework
6
6
  Author: Rasa Technologies GmbH
@@ -4,7 +4,8 @@ rasa/api.py,sha256=RY3SqtlOcdq4YZGgr6DOm-nUBpiA8l8uguUZOctL_7o,6320
4
4
  rasa/builder/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
5
5
  rasa/builder/create_openai_vector_store.py,sha256=GeOV5Wa5snNr6PNOnRcLjltKrG2NyydeRgGvs81n37g,1929
6
6
  rasa/builder/llm-helper-schema.json,sha256=z5IJc_-2mZ9oQ-z-9WzTivOoqYsLXCAm8MIOTWy5rGs,1609
7
- rasa/builder/prompt_to_bot.py,sha256=ntATcck6MMHTNRQpzXQOB2tseMB21NgKi9QSzZddyiQ,22436
7
+ rasa/builder/llm_context.py,sha256=zy7htrXgS_QWJWeEj4TfseQgTI65whFJR_4GKm_iOvE,2826
8
+ rasa/builder/prompt_to_bot.py,sha256=s6zRBsL3jtdsorIbk01woXdvyYRXZAd0t5Ma9DhqfEc,23889
8
9
  rasa/builder/scrape_rasa_docs.py,sha256=HukkTCIh1rMCE8D_EtXGHy0aHtFBVrVTT_6Wpex3XQM,2428
9
10
  rasa/builder/skill_to_bot_prompt.jinja,sha256=h_HyrqPYMjLht8LJyw6vRChNkBTPM0uq2wopo9VjGc0,5345
10
11
  rasa/cli/__init__.py,sha256=eO5vp9rFCANtbTVU-pxN3iMBKw4p9WRcgzytt9MzinY,115
@@ -259,7 +260,7 @@ rasa/core/channels/rest.py,sha256=LWBYBdVzOz5Vv5tZCkB1QA7LxXJFTeC87CQLAi_ZGeI,73
259
260
  rasa/core/channels/rocketchat.py,sha256=hajaH6549CjEYFM5jSapw1DQKBPKTXbn7cVSuZzknmI,5999
260
261
  rasa/core/channels/slack.py,sha256=jVsTTUu9wUjukPoIsAhbee9o0QFUMCNlQHbR8LTcMBc,24406
261
262
  rasa/core/channels/socketio.py,sha256=ZEavmx2on9AH73cuIFSGMKn1LHJhzcQVaqrFz7SH-CE,11348
262
- rasa/core/channels/studio_chat.py,sha256=U4aDQ17asMv1m0gHYRb6PHJPkCNzG2MgvQZNIsbrDa8,19547
263
+ rasa/core/channels/studio_chat.py,sha256=34KdZ9AtHtL9X-0E9gJpif1xeO4HrZ0pyOYfP--Xh64,19541
263
264
  rasa/core/channels/telegram.py,sha256=TKVknsk3U9tYeY1a8bzlhqkltWmZfGSOvrcmwa9qozc,12499
264
265
  rasa/core/channels/twilio.py,sha256=2BTQpyx0b0yPpc0A2BHYfxLPgodrLGLs8nq6i3lVGAM,5906
265
266
  rasa/core/channels/vier_cvg.py,sha256=5O4yx0TDQIMppvlCxTOzmPB60CA-vqQXqWQ7upfrTO0,13496
@@ -332,7 +333,7 @@ rasa/core/policies/enterprise_search_prompt_with_relevancy_check_and_citation_te
332
333
  rasa/core/policies/flow_policy.py,sha256=Rvx5MIGDHi9sVxGazf-dXs6F-hFHSi3UoVjjSP8ATik,7470
333
334
  rasa/core/policies/flows/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
334
335
  rasa/core/policies/flows/flow_exceptions.py,sha256=_FQuN-cerQDM1pivce9bz4zylh5UYkljvYS1gjDukHI,1527
335
- rasa/core/policies/flows/flow_executor.py,sha256=rdODUFtDRlpgFV7Wzd6UuSYpXbiqUIwaaVZkl1HueBg,28314
336
+ rasa/core/policies/flows/flow_executor.py,sha256=MDYZ_XKl8LfzmBwcyEUL3Tnap0lcqpzknSgwTyJ_kpc,28592
336
337
  rasa/core/policies/flows/flow_step_result.py,sha256=agjPrD6lahGSe2ViO5peBeoMdI9ngVGRSgtytgxmJmg,1360
337
338
  rasa/core/policies/intentless_policy.py,sha256=1A7FSkI4PQdN3t1p3GQhSImmO-m6UVCUzzEsjxz4nKc,38040
338
339
  rasa/core/policies/intentless_prompt_template.jinja2,sha256=KhIL3cruMmkxhrs5oVbqgSvK6ZiN_6TQ_jXrgtEB-ZY,677
@@ -820,7 +821,7 @@ rasa/utils/endpoints.py,sha256=jX9xSI_3KJ-NpzymyfaO-Zj-ISaWbA4ql2Kx3NulBvE,10905
820
821
  rasa/utils/io.py,sha256=LIAdQQqUPA-V_mdpgeQzPDzA4rmsdZLyVKc8j_0Z70Y,7161
821
822
  rasa/utils/json_utils.py,sha256=SKtJzzsIRCAgNEQiBvWDDm9euMRBgJ-TyvCi2tXHH1w,1689
822
823
  rasa/utils/licensing.py,sha256=_YGELCUjBF9rzVwlKoP1YfnqskszHUqyd-_qVpXr5Kw,20508
823
- rasa/utils/log_utils.py,sha256=QR0R5Ezs9xOaESluelqdikViIypXSWVxCPJmJM4Ir3E,5440
824
+ rasa/utils/log_utils.py,sha256=ihkghF8HQ-9nAH04OE433aaYXamSWuw8zBMBIRFewlY,5615
824
825
  rasa/utils/mapper.py,sha256=CZiD3fu7-W-OJgoB1R8JaOg-Hq13TK20D-zGVNgbF18,7726
825
826
  rasa/utils/ml_utils.py,sha256=y4Czr9GdRBj-a2npXU8ED2qC9bzw5olRyqQEmu5BB8k,4185
826
827
  rasa/utils/plotting.py,sha256=Dv7O58T4eu3XwwIFgptx4ay1ocjHLmeA7uReN2FIne4,12260
@@ -847,9 +848,9 @@ rasa/utils/train_utils.py,sha256=ClJx-6x3-h3Vt6mskacgkcCUJTMXjFPe3zAcy_DfmaU,212
847
848
  rasa/utils/url_tools.py,sha256=dZ1HGkVdWTJB7zYEdwoDIrEuyX9HE5WsxKKFVsXBLE0,1218
848
849
  rasa/utils/yaml.py,sha256=KjbZq5C94ZP7Jdsw8bYYF7HASI6K4-C_kdHfrnPLpSI,2000
849
850
  rasa/validator.py,sha256=IRhLfcgCpps0wSpokOvUGNaY8t8GsmeSmPOUVRKeOeE,83087
850
- rasa/version.py,sha256=jE1pYjm2152_i0qt5oD2cxd0kGGE-S9GshzD3iFuYfQ,124
851
- rasa_pro-3.13.0a1.dev4.dist-info/METADATA,sha256=df5eOzp5yQ8PeEp2nGD8xza-F0mW2ROkVGpjBS5-2Lc,10562
852
- rasa_pro-3.13.0a1.dev4.dist-info/NOTICE,sha256=7HlBoMHJY9CL2GlYSfTQ-PZsVmLmVkYmMiPlTjhuCqA,218
853
- rasa_pro-3.13.0a1.dev4.dist-info/WHEEL,sha256=fGIA9gx4Qxk2KDKeNJCbOEwSrmLtjWCwzBz351GyrPQ,88
854
- rasa_pro-3.13.0a1.dev4.dist-info/entry_points.txt,sha256=ckJ2SfEyTPgBqj_I6vm_tqY9dZF_LAPJZA335Xp0Q9U,43
855
- rasa_pro-3.13.0a1.dev4.dist-info/RECORD,,
851
+ rasa/version.py,sha256=FYrU5nK7ROc0P0pY4HA3QPd1MQFZ-Pkf-jy2jy20NV4,124
852
+ rasa_pro-3.13.0a1.dev5.dist-info/METADATA,sha256=6dEcJ6rsZMNZ8hZ1C7Excc7o5dbcEeDGB7M_J61oXtU,10562
853
+ rasa_pro-3.13.0a1.dev5.dist-info/NOTICE,sha256=7HlBoMHJY9CL2GlYSfTQ-PZsVmLmVkYmMiPlTjhuCqA,218
854
+ rasa_pro-3.13.0a1.dev5.dist-info/WHEEL,sha256=fGIA9gx4Qxk2KDKeNJCbOEwSrmLtjWCwzBz351GyrPQ,88
855
+ rasa_pro-3.13.0a1.dev5.dist-info/entry_points.txt,sha256=ckJ2SfEyTPgBqj_I6vm_tqY9dZF_LAPJZA335Xp0Q9U,43
856
+ rasa_pro-3.13.0a1.dev5.dist-info/RECORD,,