rasa-pro 3.11.0a4.dev3__py3-none-any.whl → 3.11.0rc2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of rasa-pro might be problematic. Click here for more details.
- rasa/__main__.py +22 -12
- rasa/api.py +1 -1
- rasa/cli/arguments/default_arguments.py +1 -2
- rasa/cli/arguments/shell.py +5 -1
- rasa/cli/e2e_test.py +1 -1
- rasa/cli/evaluate.py +8 -8
- rasa/cli/inspect.py +6 -4
- rasa/cli/llm_fine_tuning.py +1 -1
- rasa/cli/project_templates/calm/config.yml +5 -7
- rasa/cli/project_templates/calm/endpoints.yml +8 -0
- rasa/cli/project_templates/tutorial/config.yml +8 -5
- rasa/cli/project_templates/tutorial/data/flows.yml +1 -1
- rasa/cli/project_templates/tutorial/data/patterns.yml +5 -0
- rasa/cli/project_templates/tutorial/domain.yml +14 -0
- rasa/cli/project_templates/tutorial/endpoints.yml +7 -7
- rasa/cli/run.py +1 -1
- rasa/cli/scaffold.py +4 -2
- rasa/cli/studio/studio.py +18 -8
- rasa/cli/utils.py +5 -0
- rasa/cli/x.py +8 -8
- rasa/constants.py +1 -1
- rasa/core/actions/action_repeat_bot_messages.py +17 -0
- rasa/core/channels/channel.py +20 -0
- rasa/core/channels/inspector/dist/assets/{arc-6852c607.js → arc-bc141fb2.js} +1 -1
- rasa/core/channels/inspector/dist/assets/{c4Diagram-d0fbc5ce-acc952b2.js → c4Diagram-d0fbc5ce-be2db283.js} +1 -1
- rasa/core/channels/inspector/dist/assets/{classDiagram-936ed81e-848a7597.js → classDiagram-936ed81e-55366915.js} +1 -1
- rasa/core/channels/inspector/dist/assets/{classDiagram-v2-c3cb15f1-a73d3e68.js → classDiagram-v2-c3cb15f1-bb529518.js} +1 -1
- rasa/core/channels/inspector/dist/assets/{createText-62fc7601-e5ee049d.js → createText-62fc7601-b0ec81d6.js} +1 -1
- rasa/core/channels/inspector/dist/assets/{edges-f2ad444c-771e517e.js → edges-f2ad444c-6166330c.js} +1 -1
- rasa/core/channels/inspector/dist/assets/{erDiagram-9d236eb7-aa347178.js → erDiagram-9d236eb7-5ccc6a8e.js} +1 -1
- rasa/core/channels/inspector/dist/assets/{flowDb-1972c806-651fc57d.js → flowDb-1972c806-fca3bfe4.js} +1 -1
- rasa/core/channels/inspector/dist/assets/{flowDiagram-7ea5b25a-ca67804f.js → flowDiagram-7ea5b25a-4739080f.js} +1 -1
- rasa/core/channels/inspector/dist/assets/flowDiagram-v2-855bc5b3-736177bf.js +1 -0
- rasa/core/channels/inspector/dist/assets/{flowchart-elk-definition-abe16c3d-2dbc568d.js → flowchart-elk-definition-abe16c3d-7c1b0e0f.js} +1 -1
- rasa/core/channels/inspector/dist/assets/{ganttDiagram-9b5ea136-25a65bd8.js → ganttDiagram-9b5ea136-772fd050.js} +1 -1
- rasa/core/channels/inspector/dist/assets/{gitGraphDiagram-99d0ae7c-fdc7378d.js → gitGraphDiagram-99d0ae7c-8eae1dc9.js} +1 -1
- rasa/core/channels/inspector/dist/assets/{index-2c4b9a3b-6f1fd606.js → index-2c4b9a3b-f55afcdf.js} +1 -1
- rasa/core/channels/inspector/dist/assets/{index-efdd30c1.js → index-e7cef9de.js} +68 -68
- rasa/core/channels/inspector/dist/assets/{infoDiagram-736b4530-cb1a041a.js → infoDiagram-736b4530-124d4a14.js} +1 -1
- rasa/core/channels/inspector/dist/assets/{journeyDiagram-df861f2b-14609879.js → journeyDiagram-df861f2b-7c4fae44.js} +1 -1
- rasa/core/channels/inspector/dist/assets/{layout-2490f52b.js → layout-b9885fb6.js} +1 -1
- rasa/core/channels/inspector/dist/assets/{line-40186f1f.js → line-7c59abb6.js} +1 -1
- rasa/core/channels/inspector/dist/assets/{linear-08814e93.js → linear-4776f780.js} +1 -1
- rasa/core/channels/inspector/dist/assets/{mindmap-definition-beec6740-1a534584.js → mindmap-definition-beec6740-2332c46c.js} +1 -1
- rasa/core/channels/inspector/dist/assets/{pieDiagram-dbbf0591-72397b61.js → pieDiagram-dbbf0591-8fb39303.js} +1 -1
- rasa/core/channels/inspector/dist/assets/{quadrantDiagram-4d7f4fd6-3bb0b6a3.js → quadrantDiagram-4d7f4fd6-3c7180a2.js} +1 -1
- rasa/core/channels/inspector/dist/assets/{requirementDiagram-6fc4c22a-57334f61.js → requirementDiagram-6fc4c22a-e910bcb8.js} +1 -1
- rasa/core/channels/inspector/dist/assets/{sankeyDiagram-8f13d901-111e1297.js → sankeyDiagram-8f13d901-ead16c89.js} +1 -1
- rasa/core/channels/inspector/dist/assets/{sequenceDiagram-b655622a-10bcfe62.js → sequenceDiagram-b655622a-29a02a19.js} +1 -1
- rasa/core/channels/inspector/dist/assets/{stateDiagram-59f0c015-acaf7513.js → stateDiagram-59f0c015-042b3137.js} +1 -1
- rasa/core/channels/inspector/dist/assets/{stateDiagram-v2-2b26beab-3ec2a235.js → stateDiagram-v2-2b26beab-2178c0f3.js} +1 -1
- rasa/core/channels/inspector/dist/assets/{styles-080da4f6-62730289.js → styles-080da4f6-23ffa4fc.js} +1 -1
- rasa/core/channels/inspector/dist/assets/{styles-3dcbcfbf-5284ee76.js → styles-3dcbcfbf-94f59763.js} +1 -1
- rasa/core/channels/inspector/dist/assets/{styles-9c745c82-642435e3.js → styles-9c745c82-78a6bebc.js} +1 -1
- rasa/core/channels/inspector/dist/assets/{svgDrawCommon-4835440b-b250a350.js → svgDrawCommon-4835440b-eae2a6f6.js} +1 -1
- rasa/core/channels/inspector/dist/assets/{timeline-definition-5b62e21b-c2b147ed.js → timeline-definition-5b62e21b-5c968d92.js} +1 -1
- rasa/core/channels/inspector/dist/assets/{xychartDiagram-2b33534f-f92cfea9.js → xychartDiagram-2b33534f-fd3db0d5.js} +1 -1
- rasa/core/channels/inspector/dist/index.html +1 -1
- rasa/core/channels/inspector/src/App.tsx +1 -1
- rasa/core/channels/inspector/src/helpers/audiostream.ts +77 -16
- rasa/core/channels/socketio.py +2 -1
- rasa/core/channels/telegram.py +1 -1
- rasa/core/channels/twilio.py +1 -1
- rasa/core/channels/voice_ready/audiocodes.py +12 -0
- rasa/core/channels/voice_ready/jambonz.py +15 -4
- rasa/core/channels/voice_ready/twilio_voice.py +6 -21
- rasa/core/channels/voice_stream/asr/asr_event.py +5 -0
- rasa/core/channels/voice_stream/asr/azure.py +122 -0
- rasa/core/channels/voice_stream/asr/deepgram.py +16 -6
- rasa/core/channels/voice_stream/audio_bytes.py +1 -0
- rasa/core/channels/voice_stream/browser_audio.py +31 -8
- rasa/core/channels/voice_stream/call_state.py +23 -0
- rasa/core/channels/voice_stream/tts/azure.py +6 -2
- rasa/core/channels/voice_stream/tts/cartesia.py +10 -6
- rasa/core/channels/voice_stream/tts/tts_engine.py +1 -0
- rasa/core/channels/voice_stream/twilio_media_streams.py +27 -18
- rasa/core/channels/voice_stream/util.py +4 -4
- rasa/core/channels/voice_stream/voice_channel.py +189 -39
- rasa/core/featurizers/single_state_featurizer.py +22 -1
- rasa/core/featurizers/tracker_featurizers.py +115 -18
- rasa/core/nlg/contextual_response_rephraser.py +32 -30
- rasa/core/persistor.py +86 -39
- rasa/core/policies/enterprise_search_policy.py +119 -60
- rasa/core/policies/flows/flow_executor.py +7 -4
- rasa/core/policies/intentless_policy.py +78 -22
- rasa/core/policies/ted_policy.py +58 -33
- rasa/core/policies/unexpected_intent_policy.py +15 -7
- rasa/core/processor.py +25 -0
- rasa/core/training/interactive.py +34 -35
- rasa/core/utils.py +8 -3
- rasa/dialogue_understanding/coexistence/llm_based_router.py +39 -12
- rasa/dialogue_understanding/commands/change_flow_command.py +6 -0
- rasa/dialogue_understanding/commands/user_silence_command.py +59 -0
- rasa/dialogue_understanding/commands/utils.py +5 -0
- rasa/dialogue_understanding/generator/constants.py +2 -0
- rasa/dialogue_understanding/generator/flow_retrieval.py +49 -4
- rasa/dialogue_understanding/generator/llm_based_command_generator.py +37 -23
- rasa/dialogue_understanding/generator/multi_step/multi_step_llm_command_generator.py +57 -10
- rasa/dialogue_understanding/generator/nlu_command_adapter.py +19 -1
- rasa/dialogue_understanding/generator/single_step/single_step_llm_command_generator.py +71 -11
- rasa/dialogue_understanding/patterns/default_flows_for_patterns.yml +39 -0
- rasa/dialogue_understanding/patterns/user_silence.py +37 -0
- rasa/dialogue_understanding/processor/command_processor.py +21 -1
- rasa/e2e_test/e2e_test_case.py +85 -6
- rasa/e2e_test/e2e_test_runner.py +4 -2
- rasa/e2e_test/utils/io.py +1 -1
- rasa/engine/validation.py +316 -10
- rasa/model_manager/config.py +15 -3
- rasa/model_manager/model_api.py +15 -7
- rasa/model_manager/runner_service.py +8 -6
- rasa/model_manager/socket_bridge.py +6 -3
- rasa/model_manager/trainer_service.py +7 -5
- rasa/model_manager/utils.py +28 -7
- rasa/model_service.py +9 -2
- rasa/model_training.py +2 -0
- rasa/nlu/classifiers/diet_classifier.py +38 -25
- rasa/nlu/classifiers/logistic_regression_classifier.py +22 -9
- rasa/nlu/classifiers/sklearn_intent_classifier.py +37 -16
- rasa/nlu/extractors/crf_entity_extractor.py +93 -50
- rasa/nlu/featurizers/sparse_featurizer/count_vectors_featurizer.py +45 -16
- rasa/nlu/featurizers/sparse_featurizer/lexical_syntactic_featurizer.py +52 -17
- rasa/nlu/featurizers/sparse_featurizer/regex_featurizer.py +5 -3
- rasa/nlu/tokenizers/whitespace_tokenizer.py +3 -14
- rasa/server.py +3 -1
- rasa/shared/constants.py +36 -3
- rasa/shared/core/constants.py +7 -0
- rasa/shared/core/domain.py +26 -0
- rasa/shared/core/flows/flow.py +5 -0
- rasa/shared/core/flows/flows_list.py +5 -1
- rasa/shared/core/flows/flows_yaml_schema.json +10 -0
- rasa/shared/core/flows/utils.py +39 -0
- rasa/shared/core/flows/validation.py +96 -0
- rasa/shared/core/slots.py +5 -0
- rasa/shared/nlu/training_data/features.py +120 -2
- rasa/shared/providers/_configs/azure_openai_client_config.py +5 -3
- rasa/shared/providers/_configs/litellm_router_client_config.py +200 -0
- rasa/shared/providers/_configs/model_group_config.py +167 -0
- rasa/shared/providers/_configs/openai_client_config.py +1 -1
- rasa/shared/providers/_configs/rasa_llm_client_config.py +73 -0
- rasa/shared/providers/_configs/self_hosted_llm_client_config.py +1 -0
- rasa/shared/providers/_configs/utils.py +16 -0
- rasa/shared/providers/embedding/_base_litellm_embedding_client.py +18 -29
- rasa/shared/providers/embedding/azure_openai_embedding_client.py +54 -21
- rasa/shared/providers/embedding/litellm_router_embedding_client.py +135 -0
- rasa/shared/providers/llm/_base_litellm_client.py +37 -31
- rasa/shared/providers/llm/azure_openai_llm_client.py +50 -29
- rasa/shared/providers/llm/litellm_router_llm_client.py +127 -0
- rasa/shared/providers/llm/rasa_llm_client.py +112 -0
- rasa/shared/providers/llm/self_hosted_llm_client.py +1 -1
- rasa/shared/providers/mappings.py +19 -0
- rasa/shared/providers/router/__init__.py +0 -0
- rasa/shared/providers/router/_base_litellm_router_client.py +149 -0
- rasa/shared/providers/router/router_client.py +73 -0
- rasa/shared/utils/common.py +8 -0
- rasa/shared/utils/health_check/__init__.py +0 -0
- rasa/shared/utils/health_check/embeddings_health_check_mixin.py +31 -0
- rasa/shared/utils/health_check/health_check.py +256 -0
- rasa/shared/utils/health_check/llm_health_check_mixin.py +31 -0
- rasa/shared/utils/io.py +28 -6
- rasa/shared/utils/llm.py +353 -46
- rasa/shared/utils/yaml.py +111 -73
- rasa/studio/auth.py +3 -5
- rasa/studio/config.py +13 -4
- rasa/studio/constants.py +1 -0
- rasa/studio/data_handler.py +10 -3
- rasa/studio/upload.py +81 -26
- rasa/telemetry.py +92 -17
- rasa/tracing/config.py +2 -0
- rasa/tracing/instrumentation/attribute_extractors.py +94 -17
- rasa/tracing/instrumentation/instrumentation.py +121 -0
- rasa/utils/common.py +5 -0
- rasa/utils/io.py +7 -81
- rasa/utils/log_utils.py +9 -2
- rasa/utils/sanic_error_handler.py +32 -0
- rasa/utils/tensorflow/feature_array.py +366 -0
- rasa/utils/tensorflow/model_data.py +2 -193
- rasa/validator.py +70 -0
- rasa/version.py +1 -1
- {rasa_pro-3.11.0a4.dev3.dist-info → rasa_pro-3.11.0rc2.dist-info}/METADATA +11 -10
- {rasa_pro-3.11.0a4.dev3.dist-info → rasa_pro-3.11.0rc2.dist-info}/RECORD +183 -163
- rasa/core/channels/inspector/dist/assets/flowDiagram-v2-855bc5b3-587d82d8.js +0 -1
- {rasa_pro-3.11.0a4.dev3.dist-info → rasa_pro-3.11.0rc2.dist-info}/NOTICE +0 -0
- {rasa_pro-3.11.0a4.dev3.dist-info → rasa_pro-3.11.0rc2.dist-info}/WHEEL +0 -0
- {rasa_pro-3.11.0a4.dev3.dist-info → rasa_pro-3.11.0rc2.dist-info}/entry_points.txt +0 -0
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import{Z as zt,$ as ot,X as wt,W as Ft,s as Nt,g as Xt,B as Yt,D as St,a as Ht,b as $t,E as Ut,l as Ct,U as qt,i as jt,d as Gt}from"./index-
|
|
1
|
+
import{Z as zt,$ as ot,X as wt,W as Ft,s as Nt,g as Xt,B as Yt,D as St,a as Ht,b as $t,E as Ut,l as Ct,U as qt,i as jt,d as Gt}from"./index-e7cef9de.js";import{c as Qt}from"./createText-62fc7601-b0ec81d6.js";import{i as Kt}from"./init-77b53fdd.js";import{o as Zt}from"./ordinal-ba9b4969.js";import{l as ft}from"./linear-4776f780.js";import{l as pt}from"./line-7c59abb6.js";import"./array-9f3ba611.js";import"./path-53f90ab3.js";function Jt(e,t,i){e=+e,t=+t,i=(n=arguments.length)<2?(t=e,e=0,1):n<3?1:+i;for(var s=-1,n=Math.max(0,Math.ceil((t-e)/i))|0,o=new Array(n);++s<n;)o[s]=e+s*i;return o}function st(){var e=Zt().unknown(void 0),t=e.domain,i=e.range,s=0,n=1,o,c,f=!1,d=0,R=0,_=.5;delete e.unknown;function A(){var m=t().length,T=n<s,S=T?n:s,P=T?s:n;o=(P-S)/Math.max(1,m-d+R*2),f&&(o=Math.floor(o)),S+=(P-S-o*(m-d))*_,c=o*(1-d),f&&(S=Math.round(S),c=Math.round(c));var p=Jt(m).map(function(C){return S+o*C});return i(T?p.reverse():p)}return e.domain=function(m){return arguments.length?(t(m),A()):t()},e.range=function(m){return arguments.length?([s,n]=m,s=+s,n=+n,A()):[s,n]},e.rangeRound=function(m){return[s,n]=m,s=+s,n=+n,f=!0,A()},e.bandwidth=function(){return c},e.step=function(){return o},e.round=function(m){return arguments.length?(f=!!m,A()):f},e.padding=function(m){return arguments.length?(d=Math.min(1,R=+m),A()):d},e.paddingInner=function(m){return arguments.length?(d=Math.min(1,m),A()):d},e.paddingOuter=function(m){return arguments.length?(R=+m,A()):R},e.align=function(m){return arguments.length?(_=Math.max(0,Math.min(1,m)),A()):_},e.copy=function(){return st(t(),[s,n]).round(f).paddingInner(d).paddingOuter(R).align(_)},Kt.apply(A(),arguments)}var nt=function(){var e=function(V,r,l,u){for(l=l||{},u=V.length;u--;l[V[u]]=r);return l},t=[1,10,12,14,16,18,19,21,23],i=[2,6],s=[1,3],n=[1,5],o=[1,6],c=[1,7],f=[1,5,10,12,14,16,18,19,21,23,34,35,36],d=[1,25],R=[1,26],_=[1,28],A=[1,29],m=[1,30],T=[1,31],S=[1,32],P=[1,33],p=[1,34],C=[1,35],h=[1,36],L=[1,37],z=[1,43],lt=[1,42],ct=[1,47],$=[1,50],w=[1,10,12,14,16,18,19,21,23,34,35,36],Q=[1,10,12,14,16,18,19,21,23,24,26,27,28,34,35,36],E=[1,10,12,14,16,18,19,21,23,24,26,27,28,34,35,36,41,42,43,44,45,46,47,48,49,50],ut=[1,64],K={trace:function(){},yy:{},symbols_:{error:2,start:3,eol:4,XYCHART:5,chartConfig:6,document:7,CHART_ORIENTATION:8,statement:9,title:10,text:11,X_AXIS:12,parseXAxis:13,Y_AXIS:14,parseYAxis:15,LINE:16,plotData:17,BAR:18,acc_title:19,acc_title_value:20,acc_descr:21,acc_descr_value:22,acc_descr_multiline_value:23,SQUARE_BRACES_START:24,commaSeparatedNumbers:25,SQUARE_BRACES_END:26,NUMBER_WITH_DECIMAL:27,COMMA:28,xAxisData:29,bandData:30,ARROW_DELIMITER:31,commaSeparatedTexts:32,yAxisData:33,NEWLINE:34,SEMI:35,EOF:36,alphaNum:37,STR:38,MD_STR:39,alphaNumToken:40,AMP:41,NUM:42,ALPHA:43,PLUS:44,EQUALS:45,MULT:46,DOT:47,BRKT:48,MINUS:49,UNDERSCORE:50,$accept:0,$end:1},terminals_:{2:"error",5:"XYCHART",8:"CHART_ORIENTATION",10:"title",12:"X_AXIS",14:"Y_AXIS",16:"LINE",18:"BAR",19:"acc_title",20:"acc_title_value",21:"acc_descr",22:"acc_descr_value",23:"acc_descr_multiline_value",24:"SQUARE_BRACES_START",26:"SQUARE_BRACES_END",27:"NUMBER_WITH_DECIMAL",28:"COMMA",31:"ARROW_DELIMITER",34:"NEWLINE",35:"SEMI",36:"EOF",38:"STR",39:"MD_STR",41:"AMP",42:"NUM",43:"ALPHA",44:"PLUS",45:"EQUALS",46:"MULT",47:"DOT",48:"BRKT",49:"MINUS",50:"UNDERSCORE"},productions_:[0,[3,2],[3,3],[3,2],[3,1],[6,1],[7,0],[7,2],[9,2],[9,2],[9,2],[9,2],[9,2],[9,3],[9,2],[9,3],[9,2],[9,2],[9,1],[17,3],[25,3],[25,1],[13,1],[13,2],[13,1],[29,1],[29,3],[30,3],[32,3],[32,1],[15,1],[15,2],[15,1],[33,3],[4,1],[4,1],[4,1],[11,1],[11,1],[11,1],[37,1],[37,2],[40,1],[40,1],[40,1],[40,1],[40,1],[40,1],[40,1],[40,1],[40,1],[40,1]],performAction:function(r,l,u,g,b,a,F){var x=a.length-1;switch(b){case 5:g.setOrientation(a[x]);break;case 9:g.setDiagramTitle(a[x].text.trim());break;case 12:g.setLineData({text:"",type:"text"},a[x]);break;case 13:g.setLineData(a[x-1],a[x]);break;case 14:g.setBarData({text:"",type:"text"},a[x]);break;case 15:g.setBarData(a[x-1],a[x]);break;case 16:this.$=a[x].trim(),g.setAccTitle(this.$);break;case 17:case 18:this.$=a[x].trim(),g.setAccDescription(this.$);break;case 19:this.$=a[x-1];break;case 20:this.$=[Number(a[x-2]),...a[x]];break;case 21:this.$=[Number(a[x])];break;case 22:g.setXAxisTitle(a[x]);break;case 23:g.setXAxisTitle(a[x-1]);break;case 24:g.setXAxisTitle({type:"text",text:""});break;case 25:g.setXAxisBand(a[x]);break;case 26:g.setXAxisRangeData(Number(a[x-2]),Number(a[x]));break;case 27:this.$=a[x-1];break;case 28:this.$=[a[x-2],...a[x]];break;case 29:this.$=[a[x]];break;case 30:g.setYAxisTitle(a[x]);break;case 31:g.setYAxisTitle(a[x-1]);break;case 32:g.setYAxisTitle({type:"text",text:""});break;case 33:g.setYAxisRangeData(Number(a[x-2]),Number(a[x]));break;case 37:this.$={text:a[x],type:"text"};break;case 38:this.$={text:a[x],type:"text"};break;case 39:this.$={text:a[x],type:"markdown"};break;case 40:this.$=a[x];break;case 41:this.$=a[x-1]+""+a[x];break}},table:[e(t,i,{3:1,4:2,7:4,5:s,34:n,35:o,36:c}),{1:[3]},e(t,i,{4:2,7:4,3:8,5:s,34:n,35:o,36:c}),e(t,i,{4:2,7:4,6:9,3:10,5:s,8:[1,11],34:n,35:o,36:c}),{1:[2,4],9:12,10:[1,13],12:[1,14],14:[1,15],16:[1,16],18:[1,17],19:[1,18],21:[1,19],23:[1,20]},e(f,[2,34]),e(f,[2,35]),e(f,[2,36]),{1:[2,1]},e(t,i,{4:2,7:4,3:21,5:s,34:n,35:o,36:c}),{1:[2,3]},e(f,[2,5]),e(t,[2,7],{4:22,34:n,35:o,36:c}),{11:23,37:24,38:d,39:R,40:27,41:_,42:A,43:m,44:T,45:S,46:P,47:p,48:C,49:h,50:L},{11:39,13:38,24:z,27:lt,29:40,30:41,37:24,38:d,39:R,40:27,41:_,42:A,43:m,44:T,45:S,46:P,47:p,48:C,49:h,50:L},{11:45,15:44,27:ct,33:46,37:24,38:d,39:R,40:27,41:_,42:A,43:m,44:T,45:S,46:P,47:p,48:C,49:h,50:L},{11:49,17:48,24:$,37:24,38:d,39:R,40:27,41:_,42:A,43:m,44:T,45:S,46:P,47:p,48:C,49:h,50:L},{11:52,17:51,24:$,37:24,38:d,39:R,40:27,41:_,42:A,43:m,44:T,45:S,46:P,47:p,48:C,49:h,50:L},{20:[1,53]},{22:[1,54]},e(w,[2,18]),{1:[2,2]},e(w,[2,8]),e(w,[2,9]),e(Q,[2,37],{40:55,41:_,42:A,43:m,44:T,45:S,46:P,47:p,48:C,49:h,50:L}),e(Q,[2,38]),e(Q,[2,39]),e(E,[2,40]),e(E,[2,42]),e(E,[2,43]),e(E,[2,44]),e(E,[2,45]),e(E,[2,46]),e(E,[2,47]),e(E,[2,48]),e(E,[2,49]),e(E,[2,50]),e(E,[2,51]),e(w,[2,10]),e(w,[2,22],{30:41,29:56,24:z,27:lt}),e(w,[2,24]),e(w,[2,25]),{31:[1,57]},{11:59,32:58,37:24,38:d,39:R,40:27,41:_,42:A,43:m,44:T,45:S,46:P,47:p,48:C,49:h,50:L},e(w,[2,11]),e(w,[2,30],{33:60,27:ct}),e(w,[2,32]),{31:[1,61]},e(w,[2,12]),{17:62,24:$},{25:63,27:ut},e(w,[2,14]),{17:65,24:$},e(w,[2,16]),e(w,[2,17]),e(E,[2,41]),e(w,[2,23]),{27:[1,66]},{26:[1,67]},{26:[2,29],28:[1,68]},e(w,[2,31]),{27:[1,69]},e(w,[2,13]),{26:[1,70]},{26:[2,21],28:[1,71]},e(w,[2,15]),e(w,[2,26]),e(w,[2,27]),{11:59,32:72,37:24,38:d,39:R,40:27,41:_,42:A,43:m,44:T,45:S,46:P,47:p,48:C,49:h,50:L},e(w,[2,33]),e(w,[2,19]),{25:73,27:ut},{26:[2,28]},{26:[2,20]}],defaultActions:{8:[2,1],10:[2,3],21:[2,2],72:[2,28],73:[2,20]},parseError:function(r,l){if(l.recoverable)this.trace(r);else{var u=new Error(r);throw u.hash=l,u}},parse:function(r){var l=this,u=[0],g=[],b=[null],a=[],F=this.table,x="",U=0,gt=0,Vt=2,xt=1,Bt=a.slice.call(arguments,1),k=Object.create(this.lexer),B={yy:{}};for(var J in this.yy)Object.prototype.hasOwnProperty.call(this.yy,J)&&(B.yy[J]=this.yy[J]);k.setInput(r,B.yy),B.yy.lexer=k,B.yy.parser=this,typeof k.yylloc>"u"&&(k.yylloc={});var tt=k.yylloc;a.push(tt);var Wt=k.options&&k.options.ranges;typeof B.yy.parseError=="function"?this.parseError=B.yy.parseError:this.parseError=Object.getPrototypeOf(this).parseError;function Ot(){var I;return I=g.pop()||k.lex()||xt,typeof I!="number"&&(I instanceof Array&&(g=I,I=g.pop()),I=l.symbols_[I]||I),I}for(var D,W,v,it,O={},q,M,dt,j;;){if(W=u[u.length-1],this.defaultActions[W]?v=this.defaultActions[W]:((D===null||typeof D>"u")&&(D=Ot()),v=F[W]&&F[W][D]),typeof v>"u"||!v.length||!v[0]){var et="";j=[];for(q in F[W])this.terminals_[q]&&q>Vt&&j.push("'"+this.terminals_[q]+"'");k.showPosition?et="Parse error on line "+(U+1)+`:
|
|
2
2
|
`+k.showPosition()+`
|
|
3
3
|
Expecting `+j.join(", ")+", got '"+(this.terminals_[D]||D)+"'":et="Parse error on line "+(U+1)+": Unexpected "+(D==xt?"end of input":"'"+(this.terminals_[D]||D)+"'"),this.parseError(et,{text:k.match,token:this.terminals_[D]||D,line:k.yylineno,loc:tt,expected:j})}if(v[0]instanceof Array&&v.length>1)throw new Error("Parse Error: multiple actions possible at state: "+W+", token: "+D);switch(v[0]){case 1:u.push(D),b.push(k.yytext),a.push(k.yylloc),u.push(v[1]),D=null,gt=k.yyleng,x=k.yytext,U=k.yylineno,tt=k.yylloc;break;case 2:if(M=this.productions_[v[1]][1],O.$=b[b.length-M],O._$={first_line:a[a.length-(M||1)].first_line,last_line:a[a.length-1].last_line,first_column:a[a.length-(M||1)].first_column,last_column:a[a.length-1].last_column},Wt&&(O._$.range=[a[a.length-(M||1)].range[0],a[a.length-1].range[1]]),it=this.performAction.apply(O,[x,gt,U,B.yy,v[1],b,a].concat(Bt)),typeof it<"u")return it;M&&(u=u.slice(0,-1*M*2),b=b.slice(0,-1*M),a=a.slice(0,-1*M)),u.push(this.productions_[v[1]][0]),b.push(O.$),a.push(O._$),dt=F[u[u.length-2]][u[u.length-1]],u.push(dt);break;case 3:return!0}}return!0}},It=function(){var V={EOF:1,parseError:function(l,u){if(this.yy.parser)this.yy.parser.parseError(l,u);else throw new Error(l)},setInput:function(r,l){return this.yy=l||this.yy||{},this._input=r,this._more=this._backtrack=this.done=!1,this.yylineno=this.yyleng=0,this.yytext=this.matched=this.match="",this.conditionStack=["INITIAL"],this.yylloc={first_line:1,first_column:0,last_line:1,last_column:0},this.options.ranges&&(this.yylloc.range=[0,0]),this.offset=0,this},input:function(){var r=this._input[0];this.yytext+=r,this.yyleng++,this.offset++,this.match+=r,this.matched+=r;var l=r.match(/(?:\r\n?|\n).*/g);return l?(this.yylineno++,this.yylloc.last_line++):this.yylloc.last_column++,this.options.ranges&&this.yylloc.range[1]++,this._input=this._input.slice(1),r},unput:function(r){var l=r.length,u=r.split(/(?:\r\n?|\n)/g);this._input=r+this._input,this.yytext=this.yytext.substr(0,this.yytext.length-l),this.offset-=l;var g=this.match.split(/(?:\r\n?|\n)/g);this.match=this.match.substr(0,this.match.length-1),this.matched=this.matched.substr(0,this.matched.length-1),u.length-1&&(this.yylineno-=u.length-1);var b=this.yylloc.range;return this.yylloc={first_line:this.yylloc.first_line,last_line:this.yylineno+1,first_column:this.yylloc.first_column,last_column:u?(u.length===g.length?this.yylloc.first_column:0)+g[g.length-u.length].length-u[0].length:this.yylloc.first_column-l},this.options.ranges&&(this.yylloc.range=[b[0],b[0]+this.yyleng-l]),this.yyleng=this.yytext.length,this},more:function(){return this._more=!0,this},reject:function(){if(this.options.backtrack_lexer)this._backtrack=!0;else return this.parseError("Lexical error on line "+(this.yylineno+1)+`. You can only invoke reject() in the lexer when the lexer is of the backtracking persuasion (options.backtrack_lexer = true).
|
|
4
4
|
`+this.showPosition(),{text:"",token:null,line:this.yylineno});return this},less:function(r){this.unput(this.match.slice(r))},pastInput:function(){var r=this.matched.substr(0,this.matched.length-this.match.length);return(r.length>20?"...":"")+r.substr(-20).replace(/\n/g,"")},upcomingInput:function(){var r=this.match;return r.length<20&&(r+=this._input.substr(0,20-r.length)),(r.substr(0,20)+(r.length>20?"...":"")).replace(/\n/g,"")},showPosition:function(){var r=this.pastInput(),l=new Array(r.length+1).join("-");return r+this.upcomingInput()+`
|
|
@@ -6,7 +6,7 @@
|
|
|
6
6
|
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
|
7
7
|
<link rel="icon" href="data:image/x-icon;base64,AAABAAEAICAQAAEABADoAgAAFgAAACgAAAAgAAAAQAAAAAEABAAAAAAAAAIAAOwAAADsAAAAEAAAABAAAABxaUwAwoyZAMOJmADBi5kAwoyZAMKMmQDCjJkAwoyZAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADQAAAAAAAAAAAAAAAAAACV3AAAAAAAAAAAAAAAAAAYXdwAAAAAAAAAAAAAAAANHd3cAAAAAAAEREREREREXd3d3ERAAAAAHd3d3d3d3d3FXd3dwAAAAB3d3d3d3d3dDB3d3cAAAAAd3IiIiIiIiAAIid3AAAAAHdwAAAAAAAAAAAHdwAAAAB3cAAAAAAAAAAAB3cAAAAAd3AAd3AABnd0AAd3AAAAAHdwAHdwADF3QgAHdwAAAAB3cAB3dTMXcSAAB3cAAAAAd3AAd3dxdxMAAAd3AAAAAHdwAHd3d3dGAAAHdwAAAAB3cAB3dHd3dxUAB3cAAAAAd3AAd3A1F3d3AAd3AAAAAHdwAHdwAANXdwAHdwAAAAB3cAB3cAAAAXcAB3cAAAAAd3AAd3d3d3d3AAd3AAAAAHdwAHd3d3d3dwAHdwAAAAB3cAB3d3d3d3cAB3cAAAAAd3AAAAAAAAAAAAd3AAAAAHdwAAAAAAAAAAAHdwAAAAB3cAAAAAAAAAAAB3cAAAAAd3d3d3d3d3d3d3d3AAAAAHd3d3d3d3d3d3d3dwAAAAB3d3d3d3d3d3d3d3cAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD///5////4f///8H///8B/8AAAD/AAAA/wAAQP8AAcD/H//4/x//+P8ceDj/HHA4/xwAeP8cAPj/HAD4/xwAOP8cQDj/HHg4/xx+OP8cADj/HAA4/xwAOP8f//j/H//4/x//+P8AAAD/AAAA/wAAAP/////////////////////w==" sizes="32x32">
|
|
8
8
|
<title>Rasa inspector</title>
|
|
9
|
-
<script type="module" crossorigin src="./assets/index-
|
|
9
|
+
<script type="module" crossorigin src="./assets/index-e7cef9de.js"></script>
|
|
10
10
|
<link rel="stylesheet" href="./assets/index-3ee28881.css">
|
|
11
11
|
</head>
|
|
12
12
|
|
|
@@ -8,6 +8,25 @@ const audioOptions = {
|
|
|
8
8
|
}
|
|
9
9
|
}
|
|
10
10
|
|
|
11
|
+
const arrayBufferToBase64 = ( buffer: ArrayBuffer ): string => {
|
|
12
|
+
let binary = '';
|
|
13
|
+
const bytes = new Uint8Array( buffer );
|
|
14
|
+
const len = bytes.byteLength;
|
|
15
|
+
for (let i = 0; i < len; i++) {
|
|
16
|
+
binary += String.fromCharCode( bytes[ i ] );
|
|
17
|
+
}
|
|
18
|
+
return window.btoa( binary );
|
|
19
|
+
}
|
|
20
|
+
|
|
21
|
+
const base64ToArrayBuffer = ( s: string ): ArrayBuffer => {
|
|
22
|
+
const binary_string = window.atob(s);
|
|
23
|
+
const len = binary_string.length;
|
|
24
|
+
const bytes = new Uint8Array( len );
|
|
25
|
+
for (let i = 0; i < len; i++) {
|
|
26
|
+
bytes[i] = binary_string.charCodeAt(i);
|
|
27
|
+
}
|
|
28
|
+
return bytes.buffer;
|
|
29
|
+
}
|
|
11
30
|
|
|
12
31
|
const floatToIntArray = (arr: Float32Array): Int32Array => {
|
|
13
32
|
// Convert Float Array [-1, 1] to full range int array
|
|
@@ -18,17 +37,29 @@ const intToFloatArray = (arr: Int32Array): Float32Array => {
|
|
|
18
37
|
return Float32Array.from(arr, x => (x / 0x7fffffff))
|
|
19
38
|
}
|
|
20
39
|
|
|
40
|
+
interface Mark {
|
|
41
|
+
id: string
|
|
42
|
+
bytesToGo: number
|
|
43
|
+
}
|
|
44
|
+
|
|
21
45
|
interface AudioQueue {
|
|
22
46
|
buffer: Float32Array;
|
|
47
|
+
marks: Array<Mark>
|
|
48
|
+
socket: WebSocket,
|
|
23
49
|
write: (newAudio: Float32Array) => void;
|
|
24
50
|
read: (nSamples: number) => Float32Array;
|
|
25
51
|
length: () => number;
|
|
52
|
+
addMarker: (id: string) => void;
|
|
53
|
+
reduceMarkers: (bytesRead: number) => void;
|
|
54
|
+
popMarkers: () => void;
|
|
26
55
|
}
|
|
27
56
|
|
|
28
57
|
|
|
29
|
-
const createAudioQueue = () : AudioQueue => {
|
|
58
|
+
const createAudioQueue = (socket: WebSocket) : AudioQueue => {
|
|
30
59
|
return {
|
|
31
60
|
buffer: new Float32Array(0),
|
|
61
|
+
marks: new Array<Mark>(),
|
|
62
|
+
socket,
|
|
32
63
|
|
|
33
64
|
write: function(newAudio: Float32Array) {
|
|
34
65
|
const currentQLength = this.buffer.length;
|
|
@@ -41,12 +72,42 @@ const createAudioQueue = () : AudioQueue => {
|
|
|
41
72
|
read: function(nSamples: number) {
|
|
42
73
|
const samplesToPlay = this.buffer.subarray(0, nSamples);
|
|
43
74
|
this.buffer = this.buffer.subarray(nSamples, this.buffer.length);
|
|
75
|
+
this.reduceMarkers(samplesToPlay.length)
|
|
76
|
+
this.popMarkers()
|
|
44
77
|
return samplesToPlay;
|
|
45
78
|
},
|
|
46
79
|
|
|
47
80
|
length: function() {
|
|
48
81
|
return this.buffer.length;
|
|
82
|
+
},
|
|
83
|
+
|
|
84
|
+
addMarker: function(id: string) {
|
|
85
|
+
this.marks.push({id, bytesToGo: this.length()})
|
|
86
|
+
},
|
|
87
|
+
|
|
88
|
+
reduceMarkers: function(bytesRead: number) {
|
|
89
|
+
this.marks = this.marks.map((m) => {
|
|
90
|
+
return {id: m.id, bytesToGo: m.bytesToGo - bytesRead}
|
|
91
|
+
})
|
|
92
|
+
},
|
|
93
|
+
|
|
94
|
+
popMarkers: function() {
|
|
95
|
+
// marks are ordered
|
|
96
|
+
let popUpTo = 0;
|
|
97
|
+
while (popUpTo < this.marks.length) {
|
|
98
|
+
if (this.marks[popUpTo].bytesToGo <= 0) {
|
|
99
|
+
popUpTo += 1
|
|
100
|
+
} else {
|
|
101
|
+
break
|
|
102
|
+
}
|
|
103
|
+
}
|
|
104
|
+
const marksToPop = this.marks.slice(0, popUpTo)
|
|
105
|
+
this.marks = this.marks.slice(popUpTo, this.marks.length)
|
|
106
|
+
marksToPop.forEach((m) => {
|
|
107
|
+
this.socket.send(JSON.stringify({marker: m.id}))
|
|
108
|
+
})
|
|
49
109
|
}
|
|
110
|
+
|
|
50
111
|
};
|
|
51
112
|
}
|
|
52
113
|
|
|
@@ -59,7 +120,10 @@ const streamMicrophoneToServer = async (socket: WebSocket) => {
|
|
|
59
120
|
const audioInput = audioContext.createMediaStreamSource(audioStream)
|
|
60
121
|
const sender = audioContext.createScriptProcessor(bufferSize, 1, 1)
|
|
61
122
|
sender.onaudioprocess = function(event) {
|
|
62
|
-
|
|
123
|
+
const message = JSON.stringify({
|
|
124
|
+
"audio": arrayBufferToBase64(floatToIntArray(event.inputBuffer.getChannelData(0)).buffer)
|
|
125
|
+
})
|
|
126
|
+
socket.send(message)
|
|
63
127
|
}
|
|
64
128
|
audioInput.connect(sender)
|
|
65
129
|
sender.connect(audioContext.destination)
|
|
@@ -68,8 +132,8 @@ const streamMicrophoneToServer = async (socket: WebSocket) => {
|
|
|
68
132
|
}
|
|
69
133
|
}
|
|
70
134
|
|
|
71
|
-
const setupAudioPlayback = (): AudioQueue => {
|
|
72
|
-
const audioQueue = createAudioQueue()
|
|
135
|
+
const setupAudioPlayback = (socket: WebSocket): AudioQueue => {
|
|
136
|
+
const audioQueue = createAudioQueue(socket)
|
|
73
137
|
const silence = new Float32Array(bufferSize)
|
|
74
138
|
const audioOutputContext = new AudioContext({sampleRate})
|
|
75
139
|
const scriptNode = audioOutputContext.createScriptProcessor(bufferSize, 1, 1);
|
|
@@ -82,23 +146,20 @@ const setupAudioPlayback = (): AudioQueue => {
|
|
|
82
146
|
}
|
|
83
147
|
|
|
84
148
|
const addDataToAudioQueue = (audioQueue: AudioQueue) => (message: MessageEvent<any>) => {
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
const
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
audioQueue.
|
|
92
|
-
|
|
93
|
-
};
|
|
94
|
-
reader.readAsArrayBuffer(message.data);
|
|
95
|
-
}
|
|
149
|
+
const data = JSON.parse(message.data.toString())
|
|
150
|
+
if (data["audio"]) {
|
|
151
|
+
const audioBytes = base64ToArrayBuffer(data["audio"])
|
|
152
|
+
const audioData = intToFloatArray(new Int32Array(audioBytes))
|
|
153
|
+
audioQueue.write(audioData);
|
|
154
|
+
} else if (data["marker"]) {
|
|
155
|
+
audioQueue.addMarker(data["marker"])
|
|
156
|
+
}
|
|
96
157
|
}
|
|
97
158
|
|
|
98
159
|
export async function createAudioConnection() {
|
|
99
160
|
const websocketURL = "ws://localhost:5005/webhooks/browser_audio/websocket"
|
|
100
161
|
const socket = new WebSocket(websocketURL)
|
|
101
162
|
socket.onopen = async () => { await streamMicrophoneToServer(socket)}
|
|
102
|
-
const audioQueue = setupAudioPlayback()
|
|
163
|
+
const audioQueue = setupAudioPlayback(socket)
|
|
103
164
|
socket.onmessage = addDataToAudioQueue(audioQueue)
|
|
104
165
|
}
|
rasa/core/channels/socketio.py
CHANGED
|
@@ -51,6 +51,7 @@ class SocketIOOutput(OutputChannel):
|
|
|
51
51
|
return "socketio"
|
|
52
52
|
|
|
53
53
|
def __init__(self, sio: AsyncServer, bot_message_evt: Text) -> None:
|
|
54
|
+
super().__init__()
|
|
54
55
|
self.sio = sio
|
|
55
56
|
self.bot_message_evt = bot_message_evt
|
|
56
57
|
self.last_event_timestamp = (
|
|
@@ -59,7 +60,7 @@ class SocketIOOutput(OutputChannel):
|
|
|
59
60
|
|
|
60
61
|
def _get_new_events(self) -> List[Dict[Text, Any]]:
|
|
61
62
|
"""Get events that are newer than the last sent event."""
|
|
62
|
-
events = self.tracker_state.get("events", [])
|
|
63
|
+
events = self.tracker_state.get("events", []) if self.tracker_state else []
|
|
63
64
|
new_events = [
|
|
64
65
|
event for event in events if event["timestamp"] > self.last_event_timestamp
|
|
65
66
|
]
|
rasa/core/channels/telegram.py
CHANGED
|
@@ -34,7 +34,7 @@ class TelegramOutput(Bot, OutputChannel):
|
|
|
34
34
|
return "telegram"
|
|
35
35
|
|
|
36
36
|
def __init__(self, access_token: Optional[Text]) -> None:
|
|
37
|
-
|
|
37
|
+
Bot.__init__(self, access_token)
|
|
38
38
|
|
|
39
39
|
async def send_text_message(
|
|
40
40
|
self, recipient_id: Text, text: Text, **kwargs: Any
|
rasa/core/channels/twilio.py
CHANGED
|
@@ -28,7 +28,7 @@ class TwilioOutput(Client, OutputChannel):
|
|
|
28
28
|
auth_token: Optional[Text],
|
|
29
29
|
twilio_number: Optional[Text],
|
|
30
30
|
) -> None:
|
|
31
|
-
|
|
31
|
+
Client.__init__(self, account_sid, auth_token)
|
|
32
32
|
self.twilio_number = twilio_number
|
|
33
33
|
self.send_retry = 0
|
|
34
34
|
self.max_retry = 5
|
|
@@ -21,6 +21,7 @@ from sanic.exceptions import NotFound, SanicException, ServerError
|
|
|
21
21
|
from sanic.request import Request
|
|
22
22
|
from sanic.response import HTTPResponse
|
|
23
23
|
|
|
24
|
+
from rasa.utils.io import remove_emojis
|
|
24
25
|
|
|
25
26
|
structlogger = structlog.get_logger()
|
|
26
27
|
|
|
@@ -449,6 +450,7 @@ class AudiocodesOutput(OutputChannel):
|
|
|
449
450
|
self, recipient_id: Text, text: Text, **kwargs: Any
|
|
450
451
|
) -> None:
|
|
451
452
|
"""Send a text message."""
|
|
453
|
+
text = remove_emojis(text)
|
|
452
454
|
await self.add_message({"type": "message", "text": text})
|
|
453
455
|
|
|
454
456
|
async def send_image_url(
|
|
@@ -471,6 +473,16 @@ class AudiocodesOutput(OutputChannel):
|
|
|
471
473
|
"""Indicate that the conversation should be ended."""
|
|
472
474
|
await self.add_message({"type": "event", "name": "hangup"})
|
|
473
475
|
|
|
476
|
+
async def send_text_with_buttons(
|
|
477
|
+
self,
|
|
478
|
+
recipient_id: str,
|
|
479
|
+
text: str,
|
|
480
|
+
buttons: List[Dict[str, Any]],
|
|
481
|
+
**kwargs: Any,
|
|
482
|
+
) -> None:
|
|
483
|
+
"""Uses the concise button output format for voice channels."""
|
|
484
|
+
await self.send_text_with_buttons_concise(recipient_id, text, buttons, **kwargs)
|
|
485
|
+
|
|
474
486
|
|
|
475
487
|
class WebsocketOutput(AudiocodesOutput):
|
|
476
488
|
def __init__(self, ws: Any, conversation_id: Text) -> None:
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
from typing import Any, Awaitable, Callable, Dict, Optional, Text
|
|
1
|
+
from typing import Any, Awaitable, Callable, Dict, List, Optional, Text
|
|
2
2
|
|
|
3
3
|
import structlog
|
|
4
4
|
from rasa.core.channels.channel import InputChannel, OutputChannel, UserMessage
|
|
@@ -13,8 +13,8 @@ from sanic import Blueprint, response, Websocket # type: ignore[attr-defined]
|
|
|
13
13
|
from sanic.request import Request
|
|
14
14
|
from sanic.response import HTTPResponse
|
|
15
15
|
|
|
16
|
-
from rasa.shared.utils.common import
|
|
17
|
-
|
|
16
|
+
from rasa.shared.utils.common import mark_as_beta_feature
|
|
17
|
+
from rasa.utils.io import remove_emojis
|
|
18
18
|
|
|
19
19
|
structlogger = structlog.get_logger()
|
|
20
20
|
|
|
@@ -36,7 +36,7 @@ class JambonzVoiceReadyInput(InputChannel):
|
|
|
36
36
|
|
|
37
37
|
def __init__(self) -> None:
|
|
38
38
|
"""Initializes the JambonzVoiceReadyInput channel."""
|
|
39
|
-
|
|
39
|
+
mark_as_beta_feature("Jambonz Channel")
|
|
40
40
|
validate_voice_license_scope()
|
|
41
41
|
|
|
42
42
|
def blueprint(
|
|
@@ -87,6 +87,7 @@ class JambonzWebsocketOutput(OutputChannel):
|
|
|
87
87
|
self, recipient_id: Text, text: Text, **kwargs: Any
|
|
88
88
|
) -> None:
|
|
89
89
|
"""Send a text message."""
|
|
90
|
+
text = remove_emojis(text)
|
|
90
91
|
await self.add_message({"type": "message", "text": text})
|
|
91
92
|
|
|
92
93
|
async def send_image_url(
|
|
@@ -108,3 +109,13 @@ class JambonzWebsocketOutput(OutputChannel):
|
|
|
108
109
|
async def hangup(self, recipient_id: Text, **kwargs: Any) -> None:
|
|
109
110
|
"""Indicate that the conversation should be ended."""
|
|
110
111
|
await send_ws_hangup_message(DEFAULT_HANGUP_DELAY_SECONDS, self.ws)
|
|
112
|
+
|
|
113
|
+
async def send_text_with_buttons(
|
|
114
|
+
self,
|
|
115
|
+
recipient_id: str,
|
|
116
|
+
text: str,
|
|
117
|
+
buttons: List[Dict[str, Any]],
|
|
118
|
+
**kwargs: Any,
|
|
119
|
+
) -> None:
|
|
120
|
+
"""Uses the concise button output format for voice channels."""
|
|
121
|
+
await self.send_text_with_buttons_concise(recipient_id, text, buttons, **kwargs)
|
|
@@ -358,38 +358,23 @@ class TwilioVoiceCollectingOutputChannel(CollectingOutputChannel):
|
|
|
358
358
|
"""Name of the output channel."""
|
|
359
359
|
return "twilio_voice"
|
|
360
360
|
|
|
361
|
-
@staticmethod
|
|
362
|
-
def _emoji_warning(text: Text) -> None:
|
|
363
|
-
"""Raises a warning if text contains an emoji."""
|
|
364
|
-
emoji_regex = rasa.utils.io.get_emoji_regex()
|
|
365
|
-
if emoji_regex.findall(text):
|
|
366
|
-
rasa.shared.utils.io.raise_warning(
|
|
367
|
-
"Text contains an emoji in a voice response. "
|
|
368
|
-
"Review responses to provide a voice-friendly alternative."
|
|
369
|
-
)
|
|
370
|
-
|
|
371
361
|
async def send_text_message(
|
|
372
362
|
self, recipient_id: Text, text: Text, **kwargs: Any
|
|
373
363
|
) -> None:
|
|
374
364
|
"""Sends the text message after removing emojis."""
|
|
375
|
-
|
|
365
|
+
text = rasa.utils.io.remove_emojis(text)
|
|
376
366
|
for message_part in text.strip().split("\n\n"):
|
|
377
367
|
await self._persist_message(self._message(recipient_id, text=message_part))
|
|
378
368
|
|
|
379
369
|
async def send_text_with_buttons(
|
|
380
370
|
self,
|
|
381
|
-
recipient_id:
|
|
382
|
-
text:
|
|
383
|
-
buttons: List[Dict[
|
|
371
|
+
recipient_id: str,
|
|
372
|
+
text: str,
|
|
373
|
+
buttons: List[Dict[str, Any]],
|
|
384
374
|
**kwargs: Any,
|
|
385
375
|
) -> None:
|
|
386
|
-
"""
|
|
387
|
-
self.
|
|
388
|
-
await self._persist_message(self._message(recipient_id, text=text))
|
|
389
|
-
|
|
390
|
-
for b in buttons:
|
|
391
|
-
self._emoji_warning(b["title"])
|
|
392
|
-
await self._persist_message(self._message(recipient_id, text=b["title"]))
|
|
376
|
+
"""Uses the concise button output format for voice channels."""
|
|
377
|
+
await self.send_text_with_buttons_concise(recipient_id, text, buttons, **kwargs)
|
|
393
378
|
|
|
394
379
|
async def send_image_url(
|
|
395
380
|
self, recipient_id: Text, image: Text, **kwargs: Any
|
|
@@ -0,0 +1,122 @@
|
|
|
1
|
+
import os
|
|
2
|
+
from dataclasses import dataclass
|
|
3
|
+
from typing import Any, Dict, Optional, AsyncIterator
|
|
4
|
+
import asyncio
|
|
5
|
+
|
|
6
|
+
from rasa.core.channels.voice_stream.asr.asr_engine import ASREngine, ASREngineConfig
|
|
7
|
+
from rasa.core.channels.voice_stream.asr.asr_event import (
|
|
8
|
+
ASREvent,
|
|
9
|
+
NewTranscript,
|
|
10
|
+
UserStartedSpeaking,
|
|
11
|
+
)
|
|
12
|
+
from rasa.core.channels.voice_stream.audio_bytes import HERTZ, RasaAudioBytes
|
|
13
|
+
from rasa.shared.exceptions import ConnectionException
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
@dataclass
|
|
17
|
+
class AzureASRConfig(ASREngineConfig):
|
|
18
|
+
language: Optional[str] = None
|
|
19
|
+
speech_region: Optional[str] = None
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
class AzureASR(ASREngine[AzureASRConfig]):
|
|
23
|
+
def __init__(self, config: Optional[AzureASRConfig] = None):
|
|
24
|
+
import azure.cognitiveservices.speech as speechsdk
|
|
25
|
+
|
|
26
|
+
super().__init__(config)
|
|
27
|
+
self.speech_recognizer: Optional[speechsdk.SpeechRecognizer] = None
|
|
28
|
+
self.stream: Optional[speechsdk.audio.PushAudioInputStream] = None
|
|
29
|
+
self.is_recognizing = False
|
|
30
|
+
self.queue: asyncio.Queue[speechsdk.SpeechRecognitionEventArgs] = (
|
|
31
|
+
asyncio.Queue()
|
|
32
|
+
)
|
|
33
|
+
|
|
34
|
+
def signal_user_started_speaking(self, event: Any) -> None:
|
|
35
|
+
"""Replace the unspecific azure event with a specific start event."""
|
|
36
|
+
self.fill_queue(UserStartedSpeaking())
|
|
37
|
+
|
|
38
|
+
def fill_queue(self, event: Any) -> None:
|
|
39
|
+
"""Either puts the event or a dedicated ASR Event into the queue."""
|
|
40
|
+
self.queue.put_nowait(event)
|
|
41
|
+
|
|
42
|
+
async def connect(self) -> None:
|
|
43
|
+
import azure.cognitiveservices.speech as speechsdk
|
|
44
|
+
|
|
45
|
+
speech_config = speechsdk.SpeechConfig(
|
|
46
|
+
subscription=os.environ["AZURE_SPEECH_API_KEY"],
|
|
47
|
+
region=self.config.speech_region,
|
|
48
|
+
)
|
|
49
|
+
audio_format = speechsdk.audio.AudioStreamFormat(
|
|
50
|
+
samples_per_second=HERTZ,
|
|
51
|
+
bits_per_sample=8,
|
|
52
|
+
channels=1,
|
|
53
|
+
wave_stream_format=speechsdk.AudioStreamWaveFormat.MULAW,
|
|
54
|
+
)
|
|
55
|
+
self.stream = speechsdk.audio.PushAudioInputStream(stream_format=audio_format)
|
|
56
|
+
audio_config = speechsdk.audio.AudioConfig(stream=self.stream)
|
|
57
|
+
self.speech_recognizer = speechsdk.SpeechRecognizer(
|
|
58
|
+
speech_config=speech_config,
|
|
59
|
+
language=self.config.language,
|
|
60
|
+
audio_config=audio_config,
|
|
61
|
+
)
|
|
62
|
+
self.speech_recognizer.recognized.connect(self.fill_queue)
|
|
63
|
+
self.speech_recognizer.speech_start_detected.connect(
|
|
64
|
+
self.signal_user_started_speaking
|
|
65
|
+
)
|
|
66
|
+
self.speech_recognizer.start_continuous_recognition_async()
|
|
67
|
+
self.is_recognizing = True
|
|
68
|
+
|
|
69
|
+
async def close_connection(self) -> None:
|
|
70
|
+
if self.speech_recognizer is None:
|
|
71
|
+
raise ConnectionException("Websocket not connected.")
|
|
72
|
+
self.speech_recognizer.stop_continuous_recognition_async()
|
|
73
|
+
|
|
74
|
+
async def signal_audio_done(self) -> None:
|
|
75
|
+
"""Signal to the ASR Api that you are done sending data."""
|
|
76
|
+
self.is_recognizing = False
|
|
77
|
+
|
|
78
|
+
def rasa_audio_bytes_to_engine_bytes(self, chunk: RasaAudioBytes) -> bytes:
|
|
79
|
+
"""Convert RasaAudioBytes to bytes usable by this engine."""
|
|
80
|
+
return chunk
|
|
81
|
+
|
|
82
|
+
async def send_audio_chunks(self, chunk: RasaAudioBytes) -> None:
|
|
83
|
+
"""Send audio chunks to the ASR system via the websocket."""
|
|
84
|
+
if self.speech_recognizer is None or self.stream is None:
|
|
85
|
+
raise ConnectionException("ASR not connected.")
|
|
86
|
+
engine_bytes = self.rasa_audio_bytes_to_engine_bytes(chunk)
|
|
87
|
+
self.stream.write(engine_bytes)
|
|
88
|
+
|
|
89
|
+
async def stream_asr_events(self) -> AsyncIterator[ASREvent]:
|
|
90
|
+
"""Stream the events returned by the ASR system as it is fed audio bytes."""
|
|
91
|
+
if self.speech_recognizer is None:
|
|
92
|
+
raise ConnectionException("Websocket not connected.")
|
|
93
|
+
while self.is_recognizing or not self.queue.empty():
|
|
94
|
+
try:
|
|
95
|
+
message = await asyncio.wait_for(self.queue.get(), timeout=2)
|
|
96
|
+
asr_event = self.engine_event_to_asr_event(message)
|
|
97
|
+
if asr_event:
|
|
98
|
+
yield asr_event
|
|
99
|
+
except asyncio.TimeoutError:
|
|
100
|
+
pass
|
|
101
|
+
|
|
102
|
+
def engine_event_to_asr_event(self, e: Any) -> Optional[ASREvent]:
|
|
103
|
+
"""Translate an engine event to a common ASREvent."""
|
|
104
|
+
import azure.cognitiveservices.speech as speechsdk
|
|
105
|
+
|
|
106
|
+
if isinstance(e, speechsdk.SpeechRecognitionEventArgs) and isinstance(
|
|
107
|
+
e.result, speechsdk.SpeechRecognitionResult
|
|
108
|
+
):
|
|
109
|
+
return NewTranscript(e.result.text)
|
|
110
|
+
if isinstance(e, ASREvent):
|
|
111
|
+
# transformation happened before
|
|
112
|
+
return e
|
|
113
|
+
|
|
114
|
+
return None
|
|
115
|
+
|
|
116
|
+
@staticmethod
|
|
117
|
+
def get_default_config() -> AzureASRConfig:
|
|
118
|
+
return AzureASRConfig("en-US", "germanywestcentral")
|
|
119
|
+
|
|
120
|
+
@classmethod
|
|
121
|
+
def from_config_dict(cls, config: Dict) -> "AzureASR":
|
|
122
|
+
return AzureASR(AzureASRConfig.from_dict(config))
|
|
@@ -7,8 +7,12 @@ import websockets
|
|
|
7
7
|
from websockets.legacy.client import WebSocketClientProtocol
|
|
8
8
|
|
|
9
9
|
from rasa.core.channels.voice_stream.asr.asr_engine import ASREngine, ASREngineConfig
|
|
10
|
-
from rasa.core.channels.voice_stream.asr.asr_event import
|
|
11
|
-
|
|
10
|
+
from rasa.core.channels.voice_stream.asr.asr_event import (
|
|
11
|
+
ASREvent,
|
|
12
|
+
NewTranscript,
|
|
13
|
+
UserStartedSpeaking,
|
|
14
|
+
)
|
|
15
|
+
from rasa.core.channels.voice_stream.audio_bytes import HERTZ, RasaAudioBytes
|
|
12
16
|
|
|
13
17
|
DEEPGRAM_API_KEY = "DEEPGRAM_API_KEY"
|
|
14
18
|
|
|
@@ -18,6 +22,9 @@ class DeepgramASRConfig(ASREngineConfig):
|
|
|
18
22
|
endpoint: Optional[str] = None
|
|
19
23
|
# number of miliseconds of silence to determine end of speech
|
|
20
24
|
endpointing: Optional[int] = None
|
|
25
|
+
language: Optional[str] = None
|
|
26
|
+
model: Optional[str] = None
|
|
27
|
+
smart_format: Optional[bool] = None
|
|
21
28
|
|
|
22
29
|
|
|
23
30
|
class DeepgramASR(ASREngine[DeepgramASRConfig]):
|
|
@@ -27,7 +34,7 @@ class DeepgramASR(ASREngine[DeepgramASRConfig]):
|
|
|
27
34
|
|
|
28
35
|
async def open_websocket_connection(self) -> WebSocketClientProtocol:
|
|
29
36
|
"""Connect to the ASR system."""
|
|
30
|
-
deepgram_api_key = os.environ
|
|
37
|
+
deepgram_api_key = os.environ[DEEPGRAM_API_KEY]
|
|
31
38
|
extra_headers = {"Authorization": f"Token {deepgram_api_key}"}
|
|
32
39
|
api_url = self._get_api_url()
|
|
33
40
|
query_params = self._get_query_params()
|
|
@@ -41,8 +48,9 @@ class DeepgramASR(ASREngine[DeepgramASRConfig]):
|
|
|
41
48
|
|
|
42
49
|
def _get_query_params(self) -> str:
|
|
43
50
|
return (
|
|
44
|
-
f"encoding=mulaw&sample_rate=
|
|
45
|
-
f"&vad_events=true"
|
|
51
|
+
f"encoding=mulaw&sample_rate={HERTZ}&endpointing={self.config.endpointing}"
|
|
52
|
+
f"&vad_events=true&language={self.config.language}"
|
|
53
|
+
f"&model={self.config.model}&smart_format={str(self.config.smart_format).lower()}"
|
|
46
54
|
)
|
|
47
55
|
|
|
48
56
|
async def signal_audio_done(self) -> None:
|
|
@@ -66,11 +74,13 @@ class DeepgramASR(ASREngine[DeepgramASRConfig]):
|
|
|
66
74
|
return NewTranscript(full_transcript)
|
|
67
75
|
else:
|
|
68
76
|
self.accumulated_transcript += transcript
|
|
77
|
+
elif data.get("type") == "SpeechStarted":
|
|
78
|
+
return UserStartedSpeaking()
|
|
69
79
|
return None
|
|
70
80
|
|
|
71
81
|
@staticmethod
|
|
72
82
|
def get_default_config() -> DeepgramASRConfig:
|
|
73
|
-
return DeepgramASRConfig("api.deepgram.com", 400)
|
|
83
|
+
return DeepgramASRConfig("api.deepgram.com", 400, "en", "nova-2-general", True)
|
|
74
84
|
|
|
75
85
|
@classmethod
|
|
76
86
|
def from_config_dict(cls, config: Dict) -> "DeepgramASR":
|