flowstate-sdk 0.1.11__py3-none-any.whl → 0.1.12__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- flowstate_sdk/langchain/callback_handler.py +48 -18
- {flowstate_sdk-0.1.11.dist-info → flowstate_sdk-0.1.12.dist-info}/METADATA +1 -1
- {flowstate_sdk-0.1.11.dist-info → flowstate_sdk-0.1.12.dist-info}/RECORD +6 -6
- {flowstate_sdk-0.1.11.dist-info → flowstate_sdk-0.1.12.dist-info}/WHEEL +0 -0
- {flowstate_sdk-0.1.11.dist-info → flowstate_sdk-0.1.12.dist-info}/licenses/LICENSE +0 -0
- {flowstate_sdk-0.1.11.dist-info → flowstate_sdk-0.1.12.dist-info}/top_level.txt +0 -0
|
@@ -252,6 +252,31 @@ def _is_tool_call_message(message: Any, gen_info: Optional[Dict[str, Any]]) -> b
|
|
|
252
252
|
return False
|
|
253
253
|
|
|
254
254
|
|
|
255
|
+
def _estimate_token_count(text: Optional[str], model: Optional[str]) -> Optional[int]:
|
|
256
|
+
if not text:
|
|
257
|
+
return 0
|
|
258
|
+
try:
|
|
259
|
+
import tiktoken # type: ignore
|
|
260
|
+
except Exception:
|
|
261
|
+
tiktoken = None
|
|
262
|
+
|
|
263
|
+
if tiktoken is not None:
|
|
264
|
+
try:
|
|
265
|
+
try:
|
|
266
|
+
enc = tiktoken.encoding_for_model(model or "")
|
|
267
|
+
except Exception:
|
|
268
|
+
enc = tiktoken.get_encoding("cl100k_base")
|
|
269
|
+
return len(enc.encode(text))
|
|
270
|
+
except Exception:
|
|
271
|
+
return None
|
|
272
|
+
|
|
273
|
+
if os.getenv("FLOWSTATE_APPROX_TOKENS"):
|
|
274
|
+
approx = (len(text) + 3) // 4
|
|
275
|
+
return approx if approx > 0 else 0
|
|
276
|
+
|
|
277
|
+
return None
|
|
278
|
+
|
|
279
|
+
|
|
255
280
|
class FlowstateCallbackHandler(BaseCallbackHandler):
|
|
256
281
|
def __init__(self, provider: str, model: str) -> None:
|
|
257
282
|
self.provider = provider
|
|
@@ -341,6 +366,25 @@ class FlowstateCallbackHandler(BaseCallbackHandler):
|
|
|
341
366
|
if finish_reason is None and _is_tool_call_message(ai_message_chunk, gen_info):
|
|
342
367
|
return
|
|
343
368
|
|
|
369
|
+
llm_output = getattr(response, "llm_output", None)
|
|
370
|
+
resolved_model = self.model
|
|
371
|
+
if isinstance(llm_output, dict):
|
|
372
|
+
for key in ("model_name", "model", "model_id"):
|
|
373
|
+
val = llm_output.get(key)
|
|
374
|
+
if isinstance(val, str) and val.strip():
|
|
375
|
+
resolved_model = val
|
|
376
|
+
break
|
|
377
|
+
resolved_model = _normalize_model_name(resolved_model)
|
|
378
|
+
|
|
379
|
+
# Extract output text robustly (chat message content or plain .text)
|
|
380
|
+
output_text = ""
|
|
381
|
+
if response.generations and response.generations[0]:
|
|
382
|
+
generation = response.generations[0][0]
|
|
383
|
+
if getattr(generation, "message", None) is not None:
|
|
384
|
+
output_text = _content_to_text(generation.message.content)
|
|
385
|
+
else:
|
|
386
|
+
output_text = generation.text or ""
|
|
387
|
+
|
|
344
388
|
usage_candidates: List[Dict[str, Any]] = []
|
|
345
389
|
gen_response_metadata = getattr(generation_chunk, "response_metadata", None)
|
|
346
390
|
if isinstance(gen_response_metadata, dict):
|
|
@@ -360,20 +404,15 @@ class FlowstateCallbackHandler(BaseCallbackHandler):
|
|
|
360
404
|
usage_candidates.append(additional_kwargs)
|
|
361
405
|
if isinstance(gen_info, dict) and gen_info:
|
|
362
406
|
usage_candidates.append(gen_info)
|
|
363
|
-
llm_output = getattr(response, "llm_output", None)
|
|
364
407
|
if isinstance(llm_output, dict):
|
|
365
408
|
usage_candidates.append(llm_output)
|
|
366
409
|
|
|
367
410
|
input_tokens, output_tokens = _extract_token_usage(usage_candidates)
|
|
411
|
+
if input_tokens is None:
|
|
412
|
+
input_tokens = _estimate_token_count(self._input_str, resolved_model)
|
|
413
|
+
if output_tokens is None:
|
|
414
|
+
output_tokens = _estimate_token_count(output_text, resolved_model)
|
|
368
415
|
|
|
369
|
-
resolved_model = self.model
|
|
370
|
-
if isinstance(llm_output, dict):
|
|
371
|
-
for key in ("model_name", "model", "model_id"):
|
|
372
|
-
val = llm_output.get(key)
|
|
373
|
-
if isinstance(val, str) and val.strip():
|
|
374
|
-
resolved_model = val
|
|
375
|
-
break
|
|
376
|
-
resolved_model = _normalize_model_name(resolved_model)
|
|
377
416
|
if os.getenv("FLOWSTATE_DEBUG_LLM_USAGE"):
|
|
378
417
|
try:
|
|
379
418
|
import json
|
|
@@ -410,15 +449,6 @@ class FlowstateCallbackHandler(BaseCallbackHandler):
|
|
|
410
449
|
if cost_value != 0.0:
|
|
411
450
|
cost_usd = cost_value
|
|
412
451
|
|
|
413
|
-
# Extract output text robustly (chat message content or plain .text)
|
|
414
|
-
output_text = ""
|
|
415
|
-
if response.generations and response.generations[0]:
|
|
416
|
-
generation = response.generations[0][0]
|
|
417
|
-
if getattr(generation, "message", None) is not None:
|
|
418
|
-
output_text = _content_to_text(generation.message.content)
|
|
419
|
-
else:
|
|
420
|
-
output_text = generation.text or ""
|
|
421
|
-
|
|
422
452
|
provider_metrics = ProviderMetrics(
|
|
423
453
|
run_id=context.current_run.get(),
|
|
424
454
|
provider=self.provider,
|
|
@@ -9,9 +9,9 @@ flowstate_sdk/shared_dataclasses.py,sha256=-hq4NUdBfK02NI1m8cEScPYBtURUcC7ll7q_q
|
|
|
9
9
|
flowstate_sdk/task.py,sha256=3quyOV30H2YY9j7mNNLNJo3IKBvglfqyCfOvLLcksas,702
|
|
10
10
|
flowstate_sdk/task_context.py,sha256=DtM3o4dsJzMNHwFygRq2474wSwJMPiZOMJDrZDInAS4,5944
|
|
11
11
|
flowstate_sdk/workflow.py,sha256=CqKLNJ_1hY93NKXIvanNSYX-fALYbzZs5m0VnRsaNzI,1988
|
|
12
|
-
flowstate_sdk/langchain/callback_handler.py,sha256=
|
|
13
|
-
flowstate_sdk-0.1.
|
|
14
|
-
flowstate_sdk-0.1.
|
|
15
|
-
flowstate_sdk-0.1.
|
|
16
|
-
flowstate_sdk-0.1.
|
|
17
|
-
flowstate_sdk-0.1.
|
|
12
|
+
flowstate_sdk/langchain/callback_handler.py,sha256=5f_zOAWquP8XBknPmFxM9Vpat_LrhTW1GGhnKpQzpiQ,16892
|
|
13
|
+
flowstate_sdk-0.1.12.dist-info/licenses/LICENSE,sha256=gBfhEg0GV1iXXcGHVELsXUX-y_ewaTaFvE2MWfJoNUI,19
|
|
14
|
+
flowstate_sdk-0.1.12.dist-info/METADATA,sha256=A1giTx1fkBdZLPM2KhGhMRof_xww7Wvg10P8Ejs2cPU,376
|
|
15
|
+
flowstate_sdk-0.1.12.dist-info/WHEEL,sha256=wUyA8OaulRlbfwMtmQsvNngGrxQHAvkKcvRmdizlJi0,92
|
|
16
|
+
flowstate_sdk-0.1.12.dist-info/top_level.txt,sha256=dZ-q9vpo98TaUsZA3d8NNe1XxK2RaPmoIbDXuidZ1uk,14
|
|
17
|
+
flowstate_sdk-0.1.12.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|