llmir 0.0.9__tar.gz → 0.0.10__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: llmir
3
- Version: 0.0.9
3
+ Version: 0.0.10
4
4
  Summary: Core message and tool IR for LLM pipelines
5
5
  Author: Mathis Siebert
6
6
  Requires-Python: >=3.11
@@ -1,4 +1,4 @@
1
- from typing import TypedDict, Literal
1
+ from typing import TypedDict, Literal, Required
2
2
 
3
3
  from ..messages import AIMessages, AIMessageToolResponse
4
4
  from ..chunks import AIChunkText, AIChunkImageURL, AIChunkFile, AIChunkToolCall
@@ -32,13 +32,13 @@ class OpenAIToolCall(TypedDict):
32
32
  OpenAIContents = OpenAIText | OpenAIImageURL
33
33
 
34
34
 
35
- class OpenAIMessage(TypedDict):
36
- role: str
35
+ class OpenAIMessage(TypedDict, total=False):
36
+ role: Required[str]
37
37
  content: list[OpenAIContents]
38
38
  tool_calls: list[OpenAIToolCall]
39
39
 
40
- class OpenAIMessageToolResponse(TypedDict):
41
- role: Literal["tool"]
40
+ class OpenAIMessageToolResponse(TypedDict, total=False):
41
+ role: Required[Literal["tool"]]
42
42
  tool_call_id: str
43
43
  content: str
44
44
 
@@ -78,19 +78,25 @@ def to_openai(messages: list[AIMessages]) -> list[OpenAIMessages]:
78
78
  content=[
79
79
  content_chunk_to_openai(chunk) for chunk in media_chunks
80
80
  ],
81
- tool_calls=[]
82
81
  )
83
82
  )
84
83
  else:
85
- result.append(OpenAIMessage(
86
- role= role,
87
- content=[
88
- content_chunk_to_openai(chunk) for chunk in message.chunks if not isinstance(chunk, AIChunkToolCall)
89
- ],
90
- tool_calls=[
91
- tool_call_chunk_to_openai(chunk) for chunk in message.chunks if isinstance(chunk, AIChunkToolCall)
92
- ]
93
- ))
84
+ formatted = OpenAIMessage(
85
+ role=role
86
+ )
87
+ content: list[OpenAIContents] = [
88
+ content_chunk_to_openai(chunk) for chunk in message.chunks if not isinstance(chunk, AIChunkToolCall)
89
+ ]
90
+ tool_calls: list[OpenAIToolCall] = [
91
+ tool_call_chunk_to_openai(chunk) for chunk in message.chunks if isinstance(chunk, AIChunkToolCall)
92
+ ]
93
+ if content:
94
+ formatted["content"] = content
95
+ if tool_calls:
96
+ formatted["tool_calls"] = tool_calls
97
+
98
+ result.append(formatted)
99
+
94
100
  return result
95
101
 
96
102
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: llmir
3
- Version: 0.0.9
3
+ Version: 0.0.10
4
4
  Summary: Core message and tool IR for LLM pipelines
5
5
  Author: Mathis Siebert
6
6
  Requires-Python: >=3.11
@@ -1,6 +1,6 @@
1
1
  [project]
2
2
  name = "llmir"
3
- version = "0.0.9"
3
+ version = "0.0.10"
4
4
  description = "Core message and tool IR for LLM pipelines"
5
5
  authors = [{ name="Mathis Siebert" }]
6
6
  requires-python = ">=3.11"
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes