botrun-flow-lang 5.12.264__py3-none-any.whl → 6.2.61__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -34,7 +34,6 @@ from botrun_flow_lang.utils.botrun_logger import get_default_botrun_logger
34
34
 
35
35
  # Import for generate_image
36
36
  from langchain_community.utilities.dalle_image_generator import DallEAPIWrapper
37
- from langchain_community.callbacks import get_openai_callback
38
37
 
39
38
  # Initialize MCP server
40
39
  mcp = FastMCP(name="BotrunFlowLangDefaultMCP", stateless_http=True)
@@ -74,7 +73,7 @@ async def scrape(url: str) -> dict:
74
73
  @mcp.tool()
75
74
  async def chat_with_pdf(
76
75
  pdf_url: str, user_input: str, botrun_flow_lang_url: str, user_id: str
77
- ) -> str:
76
+ ) -> dict:
78
77
  """
79
78
  Analyze a PDF file and answer questions about its content.
80
79
 
@@ -89,7 +88,16 @@ async def chat_with_pdf(
89
88
  user_id: REQUIRED - User ID for file upload (LLM can get this from system prompt)
90
89
 
91
90
  Returns:
92
- str: Analysis result or Plotly-compatible data structure if visualization is needed
91
+ dict: {
92
+ "result": str, # Analysis result
93
+ "usage_metadata": List[Dict] # Token usage for each LLM call, with format:
94
+ - prompt_tokens: int
95
+ - completion_tokens: int
96
+ - total_tokens: int
97
+ - cache_creation_input_tokens: int
98
+ - cache_read_input_tokens: int
99
+ - model: str
100
+ }
93
101
  """
94
102
  logger.info(f"chat_with_pdf pdf_url: {pdf_url} user_input: {user_input}")
95
103
 
@@ -106,7 +114,7 @@ async def chat_with_imgs(
106
114
  user_input: str,
107
115
  botrun_flow_lang_url: str,
108
116
  user_id: str,
109
- ) -> str:
117
+ ) -> dict:
110
118
  """
111
119
  Analyze multiple images and answer questions about their content.
112
120
 
@@ -117,7 +125,16 @@ async def chat_with_imgs(
117
125
  user_id: REQUIRED - User ID for file upload (LLM can get this from system prompt)
118
126
 
119
127
  Returns:
120
- str: Analysis result or Plotly-compatible data structure if visualization is needed
128
+ dict: {
129
+ "result": str, # Analysis result
130
+ "usage_metadata": List[Dict] # Token usage for each LLM call, with format:
131
+ - prompt_tokens: int
132
+ - completion_tokens: int
133
+ - total_tokens: int
134
+ - cache_creation_input_tokens: int
135
+ - cache_read_input_tokens: int
136
+ - model: str
137
+ }
121
138
  """
122
139
  logger.info(f"chat_with_imgs img_urls: {img_urls} user_input: {user_input}")
123
140
 
@@ -136,7 +153,7 @@ async def chat_with_imgs(
136
153
  @mcp.tool()
137
154
  async def generate_image(
138
155
  user_input: str, user_id: str = "", botrun_flow_lang_url: str = ""
139
- ) -> str:
156
+ ) -> dict:
140
157
  """
141
158
  Generate high-quality images using DALL-E 3 and store permanently in GCS.
142
159
 
@@ -173,18 +190,35 @@ async def generate_image(
173
190
  botrun_flow_lang_url: REQUIRED - URL for the botrun flow lang API (LLM can get this from system prompt)
174
191
 
175
192
  Returns:
176
- str: Permanent URL to the generated image stored in GCS, or error message if generation fails
193
+ dict: {
194
+ "result": str, # Permanent URL to the generated image stored in GCS, or error message
195
+ "usage_metadata": List[Dict] # Token usage for each LLM call, with format:
196
+ - prompt_tokens: int
197
+ - completion_tokens: int
198
+ - total_tokens: int
199
+ - cache_creation_input_tokens: int
200
+ - cache_read_input_tokens: int
201
+ - model: str
202
+ }
177
203
  """
204
+ usage_list = []
205
+
178
206
  try:
179
207
  logger.info(f"generate_image user_input: {user_input}")
180
208
 
181
209
  # 驗證必要參數
182
210
  if not user_id:
183
211
  logger.error("User ID not available")
184
- return "User ID not available"
212
+ return {
213
+ "result": "User ID not available",
214
+ "usage_metadata": usage_list,
215
+ }
185
216
  if not botrun_flow_lang_url:
186
217
  logger.error("botrun_flow_lang_url not available")
187
- return "botrun_flow_lang_url not available"
218
+ return {
219
+ "result": "botrun_flow_lang_url not available",
220
+ "usage_metadata": usage_list,
221
+ }
188
222
 
189
223
  # Check rate limit before generating image
190
224
  rate_limit_client = RateLimitClient()
@@ -201,26 +235,38 @@ async def generate_image(
201
235
  f"User {user_id} has reached daily limit of {daily_limit} image generations. "
202
236
  f"Current usage: {current_usage}. Please try again tomorrow."
203
237
  )
204
- return f"[Please tell user error] You have reached your daily limit of {daily_limit} image generations. " \
205
- f"Current usage: {current_usage}. Please try again tomorrow."
206
- # raise BotrunRateLimitException(
207
- # f"You have reached your daily limit of {daily_limit} image generations. "
208
- # f"Current usage: {current_usage}. Please try again tomorrow."
209
- # )
238
+ return {
239
+ "result": f"[Please tell user error] You have reached your daily limit of {daily_limit} image generations. "
240
+ f"Current usage: {current_usage}. Please try again tomorrow.",
241
+ "usage_metadata": usage_list,
242
+ }
210
243
 
211
244
  # 2. 使用 DALL-E 生成圖片
245
+ dalle_size = "1024x1024" # 可選: 1024x1024, 1024x1792, 1792x1024
246
+ dalle_quality = "standard" # 可選: standard, hd
212
247
  dalle_wrapper = DallEAPIWrapper(
213
- api_key=os.getenv("OPENAI_API_KEY"), model="dall-e-3"
248
+ api_key=os.getenv("OPENAI_API_KEY"),
249
+ model="dall-e-3",
250
+ size=dalle_size,
251
+ quality=dalle_quality,
214
252
  )
215
253
 
216
- # Generate image with token usage tracking
217
- with get_openai_callback() as cb:
218
- temp_image_url = dalle_wrapper.run(user_input)
219
- logger.info(
220
- f"DALL-E generated temporary URL: {temp_image_url}, "
221
- f"prompt tokens: {cb.prompt_tokens}, "
222
- f"completion tokens: {cb.completion_tokens}"
223
- )
254
+ # Generate image (DALL-E charges per image, not per token)
255
+ temp_image_url = dalle_wrapper.run(user_input)
256
+ logger.info(f"DALL-E generated temporary URL: {temp_image_url}")
257
+
258
+ # DALL-E 不使用 token 計費,改記錄圖片生成次數和規格
259
+ usage_list.append({
260
+ "prompt_tokens": 0,
261
+ "completion_tokens": 0,
262
+ "total_tokens": 0,
263
+ "cache_creation_input_tokens": 0,
264
+ "cache_read_input_tokens": 0,
265
+ "model": "dall-e-3",
266
+ "image_count": 1,
267
+ "image_size": dalle_size,
268
+ "image_quality": dalle_quality,
269
+ })
224
270
 
225
271
  # 3. 下載並上傳到 GCS,取得永久 URL
226
272
  from botrun_flow_lang.langgraph_agents.agents.util.local_files import (
@@ -236,22 +282,34 @@ async def generate_image(
236
282
  # 4. 更新使用計數
237
283
  await rate_limit_client.update_drawing_usage(user_id)
238
284
 
239
- return permanent_url
285
+ return {
286
+ "result": permanent_url,
287
+ "usage_metadata": usage_list,
288
+ }
240
289
  except Exception as upload_error:
241
290
  logger.error(
242
291
  f"Failed to upload to GCS, returning temporary URL: {upload_error}"
243
292
  )
244
293
  # Fallback: 回傳臨時 URL
245
294
  await rate_limit_client.update_drawing_usage(user_id)
246
- return temp_image_url
295
+ return {
296
+ "result": temp_image_url,
297
+ "usage_metadata": usage_list,
298
+ }
247
299
 
248
300
  except Exception as e:
249
301
  logger.error(f"generate_image error: {e}", error=str(e), exc_info=True)
250
302
 
251
303
  # Check if this is a user-visible exception
252
304
  if str(e).startswith("[Please tell user error]"):
253
- return str(e) # Return the error message as is
254
- return f"Error: {e}"
305
+ return {
306
+ "result": str(e),
307
+ "usage_metadata": usage_list,
308
+ }
309
+ return {
310
+ "result": f"Error: {e}",
311
+ "usage_metadata": usage_list,
312
+ }
255
313
 
256
314
 
257
315
  @mcp.tool()
@@ -267,7 +325,7 @@ async def generate_tmp_public_url(
267
325
  user_id: REQUIRED - User ID for file upload (LLM can get this from system prompt)
268
326
 
269
327
  Returns:
270
- str: A public URL that can be used to access the file for 7 days
328
+ str: A temporary public URL that may be deleted periodically
271
329
 
272
330
  Raises:
273
331
  FileNotFoundError: If the specified file does not exist
@@ -289,7 +347,9 @@ async def create_html_page(
289
347
  user_id: str,
290
348
  ) -> str:
291
349
  """
292
- Create a custom HTML page and return its URL.
350
+ Create a custom HTML page and return a PERMANENT URL that never expires.
351
+
352
+ The URL created by this tool will remain accessible indefinitely.
293
353
 
294
354
  This tool supports complete HTML documents, including JavaScript and CSS, which can be used to create
295
355
  complex interactive pages.
@@ -314,8 +374,9 @@ async def create_html_page(
314
374
  user_id: REQUIRED - User ID for file upload (LLM can get this from system prompt)
315
375
 
316
376
  Returns:
317
- str: URL for the HTML page. This URL should be provided to the user,
318
- as they will need to access it to view the content in their web browser.
377
+ str: A permanent URL for the HTML page that never expires.
378
+ This URL should be provided to the user, as they will need to
379
+ access it to view the content in their web browser.
319
380
  """
320
381
  try:
321
382
  logger.info(f"create_html_page html_content: {html_content} title: {title}")
@@ -692,11 +753,23 @@ async def web_search(
692
753
  user_id: Optional user ID (not used for this tool)
693
754
 
694
755
  Returns:
695
- dict: A dictionary containing:
696
- - content (str): The detailed answer based on web search results
697
- - citations (list): A list of URLs, citations are important to provide to the user
698
- - images (list): A list of image URLs (only when return_images is True)
756
+ dict: {
757
+ "result": {
758
+ "content": str, # The detailed answer based on web search results
759
+ "citations": list, # A list of URLs
760
+ "images": list # A list of image URLs (only when return_images is True)
761
+ },
762
+ "usage_metadata": List[Dict] # Token usage for each LLM call, with format:
763
+ - prompt_tokens: int
764
+ - completion_tokens: int
765
+ - total_tokens: int
766
+ - cache_creation_input_tokens: int
767
+ - cache_read_input_tokens: int
768
+ - model: str
769
+ }
699
770
  """
771
+ usage_list = []
772
+
700
773
  try:
701
774
  logger.info(f"web_search user_input: {user_input}")
702
775
 
@@ -717,6 +790,7 @@ async def web_search(
717
790
  "content": "",
718
791
  "citations": [],
719
792
  }
793
+ raw_response = None
720
794
 
721
795
  async for event in respond_with_perplexity_search(
722
796
  final_input,
@@ -729,16 +803,52 @@ async def web_search(
729
803
  ):
730
804
  if event and isinstance(event.chunk, str):
731
805
  search_result = json.loads(event.chunk)
806
+ if event and event.raw_json:
807
+ raw_response = event.raw_json
808
+
809
+ # Extract usage from raw response
810
+ if raw_response and "usage" in raw_response:
811
+ usage = raw_response["usage"]
812
+ prompt_tokens = usage.get("prompt_tokens", 0) or 0
813
+ citation_tokens = usage.get("citation_tokens", 0) or 0
814
+ completion_tokens = usage.get("completion_tokens", 0) or 0
815
+ model = raw_response.get("model", "sonar-reasoning-pro")
816
+
817
+ # 判斷是否使用 OpenRouter(與 perplexity_search.py 邏輯一致)
818
+ is_use_openrouter = os.getenv("OPENROUTER_API_KEY") and os.getenv("OPENROUTER_BASE_URL")
819
+ if return_images:
820
+ is_use_openrouter = False
821
+
822
+ # 加上 API 來源前綴
823
+ if is_use_openrouter:
824
+ model = f"openrouter/{model}"
825
+ else:
826
+ # 直接使用 Perplexity API,加上 perplexity/ 前綴
827
+ if not model.startswith("perplexity/"):
828
+ model = f"perplexity/{model}"
829
+
830
+ usage_list.append({
831
+ "prompt_tokens": prompt_tokens + citation_tokens,
832
+ "completion_tokens": completion_tokens,
833
+ "total_tokens": prompt_tokens + citation_tokens + completion_tokens,
834
+ "cache_creation_input_tokens": 0,
835
+ "cache_read_input_tokens": 0,
836
+ "model": model,
837
+ })
732
838
 
733
839
  logger.info(
734
840
  f"web_search completed============> {len(search_result.get('content', ''))}"
735
841
  )
736
- return (
737
- search_result
738
- if search_result
739
- else {"content": "No results found.", "citations": []}
740
- )
842
+
843
+ result = search_result if search_result else {"content": "No results found.", "citations": []}
844
+ return {
845
+ "result": result,
846
+ "usage_metadata": usage_list,
847
+ }
741
848
 
742
849
  except Exception as e:
743
850
  logger.error(f"web_search error: {e}", error=str(e), exc_info=True)
744
- return {"content": f"Error during web search: {str(e)}", "citations": []}
851
+ return {
852
+ "result": {"content": f"Error during web search: {str(e)}", "citations": []},
853
+ "usage_metadata": usage_list,
854
+ }
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: botrun-flow-lang
3
- Version: 5.12.264
3
+ Version: 6.2.61
4
4
  Summary: A flow language for botrun
5
5
  Author-email: sebastian-hsu <sebastian.hsu@gmail.com>
6
6
  License: MIT
@@ -27,12 +27,11 @@ Requires-Dist: google-cloud-storage<3,>=2.18
27
27
  Requires-Dist: google-genai>=1.28.0
28
28
  Requires-Dist: jinja2>=3.1.6
29
29
  Requires-Dist: langchain-anthropic>=0.3.10
30
- Requires-Dist: langchain-aws>=0.2.17
30
+ Requires-Dist: langchain-aws>=1.0.0
31
31
  Requires-Dist: langchain-community>=0.3.27
32
- Requires-Dist: langchain-core>=0.3.72
32
+ Requires-Dist: langchain-core>=1.1.2
33
33
  Requires-Dist: langchain-google-community>=2.0.3
34
- Requires-Dist: langchain-google-genai>=2.0.9
35
- Requires-Dist: langchain-google-vertexai<3.0.0,>=2.1.2
34
+ Requires-Dist: langchain-google-genai>=4.0.0
36
35
  Requires-Dist: langchain-mcp-adapters>=0.1.7
37
36
  Requires-Dist: langchain-openai>=0.3.28
38
37
  Requires-Dist: langchain>=0.3.27
@@ -41,7 +40,7 @@ Requires-Dist: langgraph-supervisor>=0.0.20
41
40
  Requires-Dist: langgraph>=0.6.3
42
41
  Requires-Dist: line-bot-sdk>=3.17.1
43
42
  Requires-Dist: mcp<1.11.0,>=1.10.1
44
- Requires-Dist: numpy<2,>=1
43
+ Requires-Dist: numpy>=1.24.0
45
44
  Requires-Dist: openai>=1.99.1
46
45
  Requires-Dist: pandas>=2.2.3
47
46
  Requires-Dist: pdfminer-six==20250506
@@ -9,7 +9,8 @@ botrun_flow_lang/api/auth_utils.py,sha256=KoVTZUMOBaATWvdyjjYKdBjDu8MaQGGvmhE8gn
9
9
  botrun_flow_lang/api/botrun_back_api.py,sha256=qNIQqMFZ969XaLE4qsbM659bcoK11o9sy85gfmeO8Kw,2462
10
10
  botrun_flow_lang/api/flow_api.py,sha256=I6ZMohJOpuVcs8q2euUjdydz0xYvavRei7f3LQFmjbQ,111
11
11
  botrun_flow_lang/api/hatch_api.py,sha256=S-_bNt4Y8oKtlzXW7JA3TuMRFO-Pb4-5OobhnwfiqFE,17492
12
- botrun_flow_lang/api/langgraph_api.py,sha256=PP0K_H5-BRJsjFGIvZEZubLtQ97FXp35Ts8SGSKxKe8,30102
12
+ botrun_flow_lang/api/langgraph_api.py,sha256=E1FDme6CUnMRXRaQFhp_S-uI4m7vtQo5CZOd0o_X1nA,30228
13
+ botrun_flow_lang/api/langgraph_constants.py,sha256=oxh3Rj940mZ7ekKIiQodvpQs_pek_R0atqgda9yxSV0,411
13
14
  botrun_flow_lang/api/line_bot_api.py,sha256=JluAbySIU42zWc0NaMwL1fhfRCEGMjAVWTfwKXp2F0A,56984
14
15
  botrun_flow_lang/api/model_api.py,sha256=vkzVvzxxsAhqbiMcVAeqiQoheJVbPLAXBqwoU5PgWMw,9783
15
16
  botrun_flow_lang/api/rate_limit_api.py,sha256=zrQ9wFILNqYMiLDM8NqdfcDg87BdyzbBC2Kns89WIGo,980
@@ -24,7 +25,7 @@ botrun_flow_lang/api/youtube_api.py,sha256=9eGr--gR2OoM9JZ6Nf9KqPiE-FeXEx8R-QeJv
24
25
  botrun_flow_lang/langgraph_agents/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
25
26
  botrun_flow_lang/langgraph_agents/agents/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
26
27
  botrun_flow_lang/langgraph_agents/agents/agent_runner.py,sha256=tiuPIqAcM8rIWBTjo8NS4owTepCsX3QkIHaUEDakOTc,6673
27
- botrun_flow_lang/langgraph_agents/agents/langgraph_react_agent.py,sha256=ANXWs6WKRULX9eECipCj_ivY-mnyt-gcu9_-xmLGKSg,30672
28
+ botrun_flow_lang/langgraph_agents/agents/langgraph_react_agent.py,sha256=-4ejdM9SiocZyV92wCg1DOKNMBUTO6-wanj-D73RG7k,31156
28
29
  botrun_flow_lang/langgraph_agents/agents/search_agent_graph.py,sha256=hWDPt0U09Gj-3-NNWhsn9xaakYbOcHExIXqcL8TeZxw,32046
29
30
  botrun_flow_lang/langgraph_agents/agents/agent_tools/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
30
31
  botrun_flow_lang/langgraph_agents/agents/agent_tools/step_planner.py,sha256=S4TYt0ZhgdAZ-2ndH8hJoEaIyDKdNJdWHjEZ49Lg_NQ,2427
@@ -38,18 +39,20 @@ botrun_flow_lang/langgraph_agents/agents/gov_researcher/gov_researcher_graph.py,
38
39
  botrun_flow_lang/langgraph_agents/agents/tools/__init__.py,sha256=-z1uuC3IET02q8kPhPlr-L9eTGJqgHjEJlC__cG16H0,105
39
40
  botrun_flow_lang/langgraph_agents/agents/tools/gemini_code_execution.py,sha256=EEp8xhVU-Kj1Nk5qV8ObqdVZ8gT6GITrE4VyjIc2InA,14238
40
41
  botrun_flow_lang/langgraph_agents/agents/util/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
42
+ botrun_flow_lang/langgraph_agents/agents/util/custom_vertex_claude.py,sha256=RDj1-PnoR36xdOUu7r6yMQrxAe-CXyGcSZEKiOlPxJY,14951
41
43
  botrun_flow_lang/langgraph_agents/agents/util/gemini_grounding.py,sha256=JTfH9WJNDlpvMvfzXyZy3bHeCN58MTnEOiamQGMsqh0,2884
42
44
  botrun_flow_lang/langgraph_agents/agents/util/html_util.py,sha256=g5yJO0qTqRq_kb-xhSnWX3WAbHDIjNQYl7ErRBPQwHs,13230
43
- botrun_flow_lang/langgraph_agents/agents/util/img_util.py,sha256=px_ymI5U8WkFujqF-nbxZZNFOKPxJyj5HYV2IQdpYl4,10405
45
+ botrun_flow_lang/langgraph_agents/agents/util/img_util.py,sha256=6OERtpGGimlev4Pb_O1UbMNaT_DMBHSmAgo9gB-R8xk,12385
44
46
  botrun_flow_lang/langgraph_agents/agents/util/local_files.py,sha256=b7N4B3P9zPPDj7_C9y8JaU5oROQostCXBt7wfxi_L64,13529
45
47
  botrun_flow_lang/langgraph_agents/agents/util/mermaid_util.py,sha256=o65I979wA7jzzB_Zp-t2CxBjNdXyFtdTkdofi4bJlb0,2642
46
48
  botrun_flow_lang/langgraph_agents/agents/util/model_utils.py,sha256=oeYEwiEtlrNGomKZ98M3F_OvXYjAIoCV9IJCY9eMuug,4954
47
- botrun_flow_lang/langgraph_agents/agents/util/pdf_analyzer.py,sha256=Q8cuTYvYG5ZSCdjPFiH96dWjXrSXA9DSZpLicKMx_u0,16177
49
+ botrun_flow_lang/langgraph_agents/agents/util/pdf_analyzer.py,sha256=Pu5hZGquvgm1Iy6qWD5SDG2--9tumTHdJ8EGLcTF8LU,20305
48
50
  botrun_flow_lang/langgraph_agents/agents/util/pdf_cache.py,sha256=nmHA-qf2px3ywUtC_5kXIQHg-Gl2W8DOSnL8gOFR3xY,7375
49
51
  botrun_flow_lang/langgraph_agents/agents/util/pdf_processor.py,sha256=1YbB4zpabQB-8HwRvd4LRyye4oSusLNUW_iJfmUNANw,6244
50
52
  botrun_flow_lang/langgraph_agents/agents/util/perplexity_search.py,sha256=dZmb4tSECEXWOSZkqdMhFrmnGwMhdHSUJvLT7IAVu_s,19537
51
53
  botrun_flow_lang/langgraph_agents/agents/util/plotly_util.py,sha256=WwAPcmMDnQnrsxH_92377G0yRWf-dF-g8uOG9KnkcCk,1972
52
54
  botrun_flow_lang/langgraph_agents/agents/util/tavily_search.py,sha256=jjuA8dko9YRSs_LcvMduAsSGDaix3UEzw4cIllVVFh0,6822
55
+ botrun_flow_lang/langgraph_agents/agents/util/usage_metadata.py,sha256=ahjulhlNUdyLG-KNBL-0pQPkxbzpiYKjGR2YqQCF1fA,1207
53
56
  botrun_flow_lang/langgraph_agents/agents/util/youtube_util.py,sha256=PHGDpJqRZNHLoLMAFpfpQiz_vlZWG3u53GZQajjjEpI,3007
54
57
  botrun_flow_lang/langgraph_agents/cache/__init__.py,sha256=SnKEKUXeTReKzUeNVXfvP3BEZypgKBQ4TKs_-T8ZdtI,36
55
58
  botrun_flow_lang/langgraph_agents/cache/langgraph_botrun_cache.py,sha256=HgwP7HKIglm24LFy7sIddLNi-nAfmIkxqvkwV6FnxVk,6364
@@ -58,7 +61,7 @@ botrun_flow_lang/llm_agent/llm_agent.py,sha256=1yPws2a5MVouOBnsNudh-c1POviYaz9hn
58
61
  botrun_flow_lang/llm_agent/llm_agent_util.py,sha256=cvnkHYH1D1V1_chgIByCb1Cn7iytNxtlJpfrFlYa_a4,3131
59
62
  botrun_flow_lang/log/.gitignore,sha256=ZeCRrK8PsUdGyHBMDfCkk1Jl9XrN9VkgJmyeCIUCxGU,18
60
63
  botrun_flow_lang/mcp_server/__init__.py,sha256=lbhwcb-QsYmdXA8bS3pSD-CLVbcbCKfl1XeOaUm380Y,218
61
- botrun_flow_lang/mcp_server/default_mcp.py,sha256=wNzK0WRS62ABFb5f0QuJQMDpNs4yZSreGTzmD8fwXqs,29234
64
+ botrun_flow_lang/mcp_server/default_mcp.py,sha256=azFgYEo0yXwBm-n-SqFwSw2YhcrvgAloS_YNR8eXsO0,33194
62
65
  botrun_flow_lang/models/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
63
66
  botrun_flow_lang/models/token_usage.py,sha256=PHLPwzLaGwFhWzxaHBYcahztlyTZEpRso5XI6pscWVM,876
64
67
  botrun_flow_lang/models/nodes/utils.py,sha256=uKCdPYQfzjGf8Bzoy-FSZxtcwVifwnCviaJM4qnEyrI,6904
@@ -97,6 +100,6 @@ botrun_flow_lang/utils/yaml_utils.py,sha256=dPlabIol-Clhnwc7N5nuffCaLSq8dyvmvjRw
97
100
  botrun_flow_lang/utils/clients/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
98
101
  botrun_flow_lang/utils/clients/rate_limit_client.py,sha256=96NNCHB9I5C5bpVFF6sfPhmh4oAx3UdOLb-Z4PAXLdg,8558
99
102
  botrun_flow_lang/utils/clients/token_verify_client.py,sha256=-AnYApJ9CvxVn-RhCCZZ2LCrf065fgskhwLKAm-aiN0,5893
100
- botrun_flow_lang-5.12.264.dist-info/METADATA,sha256=u_ZfQqG71uK5H-aV7BNSjYAHTB5MF6kjW0sh4VOioD0,6221
101
- botrun_flow_lang-5.12.264.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
102
- botrun_flow_lang-5.12.264.dist-info/RECORD,,
103
+ botrun_flow_lang-6.2.61.dist-info/METADATA,sha256=7mfsw0_BTSzWq5fSeKqCg0RmuOEyMfdKwItnzlRA0p8,6164
104
+ botrun_flow_lang-6.2.61.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
105
+ botrun_flow_lang-6.2.61.dist-info/RECORD,,