flowforge-sdk 0.2.2__tar.gz → 0.2.4__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (25) hide show
  1. {flowforge_sdk-0.2.2 → flowforge_sdk-0.2.4}/PKG-INFO +1 -1
  2. {flowforge_sdk-0.2.2 → flowforge_sdk-0.2.4}/pyproject.toml +1 -1
  3. {flowforge_sdk-0.2.2 → flowforge_sdk-0.2.4}/src/flowforge/integrations/fastapi.py +16 -1
  4. {flowforge_sdk-0.2.2 → flowforge_sdk-0.2.4}/src/flowforge/steps.py +36 -5
  5. {flowforge_sdk-0.2.2 → flowforge_sdk-0.2.4}/.gitignore +0 -0
  6. {flowforge_sdk-0.2.2 → flowforge_sdk-0.2.4}/README.md +0 -0
  7. {flowforge_sdk-0.2.2 → flowforge_sdk-0.2.4}/src/flowforge/__init__.py +0 -0
  8. {flowforge_sdk-0.2.2 → flowforge_sdk-0.2.4}/src/flowforge/agent.py +0 -0
  9. {flowforge_sdk-0.2.2 → flowforge_sdk-0.2.4}/src/flowforge/agent_def.py +0 -0
  10. {flowforge_sdk-0.2.2 → flowforge_sdk-0.2.4}/src/flowforge/ai/__init__.py +0 -0
  11. {flowforge_sdk-0.2.2 → flowforge_sdk-0.2.4}/src/flowforge/ai/providers.py +0 -0
  12. {flowforge_sdk-0.2.2 → flowforge_sdk-0.2.4}/src/flowforge/client.py +0 -0
  13. {flowforge_sdk-0.2.2 → flowforge_sdk-0.2.4}/src/flowforge/config.py +0 -0
  14. {flowforge_sdk-0.2.2 → flowforge_sdk-0.2.4}/src/flowforge/context.py +0 -0
  15. {flowforge_sdk-0.2.2 → flowforge_sdk-0.2.4}/src/flowforge/decorators.py +0 -0
  16. {flowforge_sdk-0.2.2 → flowforge_sdk-0.2.4}/src/flowforge/dev/__init__.py +0 -0
  17. {flowforge_sdk-0.2.2 → flowforge_sdk-0.2.4}/src/flowforge/dev/server.py +0 -0
  18. {flowforge_sdk-0.2.2 → flowforge_sdk-0.2.4}/src/flowforge/exceptions.py +0 -0
  19. {flowforge_sdk-0.2.2 → flowforge_sdk-0.2.4}/src/flowforge/execution.py +0 -0
  20. {flowforge_sdk-0.2.2 → flowforge_sdk-0.2.4}/src/flowforge/integrations/__init__.py +0 -0
  21. {flowforge_sdk-0.2.2 → flowforge_sdk-0.2.4}/src/flowforge/network.py +0 -0
  22. {flowforge_sdk-0.2.2 → flowforge_sdk-0.2.4}/src/flowforge/router.py +0 -0
  23. {flowforge_sdk-0.2.2 → flowforge_sdk-0.2.4}/src/flowforge/tools.py +0 -0
  24. {flowforge_sdk-0.2.2 → flowforge_sdk-0.2.4}/src/flowforge/triggers.py +0 -0
  25. {flowforge_sdk-0.2.2 → flowforge_sdk-0.2.4}/src/flowforge/worker.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: flowforge-sdk
3
- Version: 0.2.2
3
+ Version: 0.2.4
4
4
  Summary: Python SDK for FlowForge - AI workflow orchestration
5
5
  Project-URL: Homepage, https://github.com/flowforge/flowforge
6
6
  Project-URL: Documentation, https://flowforge.dev/docs
@@ -1,6 +1,6 @@
1
1
  [project]
2
2
  name = "flowforge-sdk"
3
- version = "0.2.2"
3
+ version = "0.2.4"
4
4
  description = "Python SDK for FlowForge - AI workflow orchestration"
5
5
  readme = "README.md"
6
6
  requires-python = ">=3.11"
@@ -13,6 +13,21 @@ if TYPE_CHECKING:
13
13
  from flowforge.decorators import FlowForgeFunction
14
14
 
15
15
 
16
+ def _make_serializable(obj: Any) -> Any:
17
+ """Recursively convert non-serializable objects to JSON-safe types."""
18
+ if isinstance(obj, dict):
19
+ return {k: _make_serializable(v) for k, v in obj.items()}
20
+ if isinstance(obj, list):
21
+ return [_make_serializable(item) for item in obj]
22
+ # Convert Tool objects to their OpenAI schema
23
+ if hasattr(obj, "to_openai_schema"):
24
+ return obj.to_openai_schema()
25
+ # Convert any dataclass-like objects with to_dict
26
+ if hasattr(obj, "to_dict") and callable(obj.to_dict):
27
+ return obj.to_dict()
28
+ return obj
29
+
30
+
16
31
  async def _handle_invoke(
17
32
  request: Request,
18
33
  flowforge: "FlowForge",
@@ -90,7 +105,7 @@ async def _handle_invoke(
90
105
  attempt=attempt,
91
106
  )
92
107
 
93
- return JSONResponse(content=result.to_dict())
108
+ return JSONResponse(content=_make_serializable(result.to_dict()))
94
109
 
95
110
 
96
111
  async def _handle_register(
@@ -269,6 +269,14 @@ class StepManager:
269
269
  if messages is None:
270
270
  raise ValueError("Either 'prompt' or 'messages' must be provided")
271
271
 
272
+ # Convert Tool objects to JSON-serializable OpenAI schema dicts
273
+ tools_schema = None
274
+ if tools:
275
+ tools_schema = [
276
+ t.to_openai_schema() if isinstance(t, Tool) else t
277
+ for t in tools
278
+ ]
279
+
272
280
  # This will be executed by the server/executor with LLM client
273
281
  ai_request = {
274
282
  "type": "ai",
@@ -278,7 +286,7 @@ class StepManager:
278
286
  "temperature": temperature,
279
287
  "provider": provider,
280
288
  "use_cache": use_cache,
281
- "tools": tools,
289
+ "tools": tools_schema,
282
290
  "tool_choice": tool_choice,
283
291
  "max_tool_calls": max_tool_calls,
284
292
  **kwargs,
@@ -556,9 +564,32 @@ class StepManager:
556
564
  "content": ai_response.get("content", ""),
557
565
  }
558
566
 
567
+ # Normalize tool calls to OpenAI format for litellm compatibility.
568
+ # Server stores flat format: {"id", "name", "arguments"}
569
+ # LiteLLM expects OpenAI format: {"id", "type", "function": {"name", "arguments"}}
570
+ raw_tool_calls = ai_response.get("tool_calls", [])
571
+ normalized_tool_calls = []
572
+ for tc in raw_tool_calls:
573
+ if "function" in tc:
574
+ # Already in OpenAI format
575
+ normalized_tool_calls.append(tc)
576
+ else:
577
+ # Convert flat format to OpenAI format
578
+ args = tc.get("arguments", {})
579
+ if not isinstance(args, str):
580
+ args = json.dumps(args)
581
+ normalized_tool_calls.append({
582
+ "id": tc.get("id", ""),
583
+ "type": "function",
584
+ "function": {
585
+ "name": tc.get("name", ""),
586
+ "arguments": args,
587
+ },
588
+ })
589
+
559
590
  # Include tool calls in assistant message if present
560
- if ai_response.get("tool_calls"):
561
- assistant_message["tool_calls"] = ai_response["tool_calls"]
591
+ if normalized_tool_calls:
592
+ assistant_message["tool_calls"] = normalized_tool_calls
562
593
 
563
594
  state.messages.append(assistant_message)
564
595
 
@@ -566,12 +597,12 @@ class StepManager:
566
597
  finish_reason = ai_response.get("finish_reason", "stop")
567
598
 
568
599
  # If no tool calls, we're done
569
- if not ai_response.get("tool_calls") or finish_reason == "stop":
600
+ if not normalized_tool_calls or finish_reason == "stop":
570
601
  state.status = "completed"
571
602
  break
572
603
 
573
604
  # Execute each tool call
574
- for tool_call in ai_response.get("tool_calls", []):
605
+ for tool_call in normalized_tool_calls:
575
606
  tool_call_id = tool_call.get("id", f"tool-{state.tool_calls_count}")
576
607
  tool_name = tool_call.get("function", {}).get("name", "")
577
608
  tool_args = tool_call.get("function", {}).get("arguments", {})
File without changes
File without changes