tasks-prompts-chain 0.1.1__py3-none-any.whl → 0.1.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,3 +1,5 @@
1
+ from typing import Optional
2
+
1
3
  class ClientLLMSDK:
2
4
  """
3
5
  A class to handle LLM SDKs for various providers.
@@ -18,6 +20,25 @@ class ClientLLMSDK:
18
20
  # Instantiate the LLM
19
21
  self.client = AsyncLLmAi(**client_kwargs)
20
22
 
23
+ def _extract_content(self, chunk) -> Optional[str]:
24
+ """Extract content from different response formats"""
25
+ if self.llm_class_name == "AsyncAnthropic":
26
+ # Handle different Anthropic event types
27
+ if hasattr(chunk, 'type'):
28
+ if chunk.type == 'content_block_delta':
29
+ return chunk.delta.text
30
+ elif chunk.type == 'message_stop':
31
+ return None
32
+ return None
33
+ else:
34
+ # Handle OpenAI and other formats
35
+ if hasattr(chunk, 'choices') and chunk.choices:
36
+ if hasattr(chunk.choices[0], 'delta'):
37
+ return chunk.choices[0].delta.content
38
+ elif hasattr(chunk.choices[0], 'message'):
39
+ return chunk.choices[0].message.content
40
+ return None
41
+
21
42
  async def generat_response(self, **kwargs):
22
43
  """
23
44
  Generate a response from the LLM.
@@ -42,8 +63,14 @@ class ClientLLMSDK:
42
63
  )
43
64
 
44
65
  async for chunk in response:
45
- if chunk.choices[0].delta.content is not None:
46
- yield chunk.choices[0].delta.content
66
+ if chunk is not None:
67
+ if isinstance(chunk, str):
68
+ delta = chunk
69
+ else:
70
+ # Handle different response formats
71
+ delta = self._extract_content(chunk)
72
+ if delta is not None:
73
+ yield delta
47
74
 
48
75
 
49
76
  elif self.llm_class_name == "AsyncAnthropic": # Anthropic SDK
@@ -69,11 +96,14 @@ class ClientLLMSDK:
69
96
  )
70
97
 
71
98
  async for chunk in response:
72
- # Based on the observed output format: RawContentBlockDeltaEvent with TextDelta
73
- if chunk.type == "content_block_delta" and hasattr(chunk.delta, "text"):
74
- yield chunk.delta.text
75
- elif chunk.type == "content_block_stop":
76
- pass
99
+ if chunk is not None:
100
+ if isinstance(chunk, str):
101
+ delta = chunk
102
+ else:
103
+ # Handle different response formats
104
+ delta = self._extract_content(chunk)
105
+ if delta is not None:
106
+ yield delta
77
107
 
78
108
  elif self.llm_class_name == "AsyncCerebras": # AsyncCerebras SDK
79
109
  response = await self.client.chat.completions.create(
@@ -234,6 +234,8 @@ class TasksPromptsChain:
234
234
  async for chunk in streamResponse:
235
235
  if chunk is not None:
236
236
  delta = chunk
237
+ if delta:
238
+ delta = chunk
237
239
  response_content += delta
238
240
  self._current_stream_buffer = response_content
239
241
  self._format_current_stream()
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: tasks_prompts_chain
3
- Version: 0.1.1
3
+ Version: 0.1.2
4
4
  Summary: A Python library for creating and executing chains of prompts using multiple LLM providers with streaming support and template formatting.
5
5
  Project-URL: Homepage, https://github.com/smirfolio/tasks_prompts_chain
6
6
  Project-URL: Issues, https://github.com/smirfolio/tasks_prompts_chain/issues
@@ -0,0 +1,7 @@
1
+ tasks_prompts_chain/__init__.py,sha256=HVhC_vMTYCyZW6vnoErHh-TkAnNRqJ2JJqClJQSfU8Y,148
2
+ tasks_prompts_chain/client_llm_sdk.py,sha256=T2x7kyEliZdVm-hl5Ci88s8kOvWcy3lxoOLko1bk8Sk,4830
3
+ tasks_prompts_chain/tasks_prompts_chain.py,sha256=V5t68vSMzUnY6TmansQmHcrjAE3NtRPD0KvzsdnC_A4,13477
4
+ tasks_prompts_chain-0.1.2.dist-info/METADATA,sha256=sXTgsZW_t0lbwVFEIFALkibzkowhizo7zYS0QcffsB4,13331
5
+ tasks_prompts_chain-0.1.2.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
6
+ tasks_prompts_chain-0.1.2.dist-info/licenses/LICENSE,sha256=WYmcYJG1QFgu1hfo7qrEkZ3Jhcz8NUWe6XUraZvlIFs,10172
7
+ tasks_prompts_chain-0.1.2.dist-info/RECORD,,
@@ -1,7 +0,0 @@
1
- tasks_prompts_chain/__init__.py,sha256=HVhC_vMTYCyZW6vnoErHh-TkAnNRqJ2JJqClJQSfU8Y,148
2
- tasks_prompts_chain/client_llm_sdk.py,sha256=ifwsecvykP5RalXMqolAClPHdpmv-5hF4eT61dxo1f8,3708
3
- tasks_prompts_chain/tasks_prompts_chain.py,sha256=T8WaqAdgDBqHjNa48dPgny_mDnEN1OvMTFTX6YndGgo,13409
4
- tasks_prompts_chain-0.1.1.dist-info/METADATA,sha256=1zVlU2deVaP-Fv_Zj6ph6LcDJrJOD4qSffehnp1umRM,13331
5
- tasks_prompts_chain-0.1.1.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
6
- tasks_prompts_chain-0.1.1.dist-info/licenses/LICENSE,sha256=WYmcYJG1QFgu1hfo7qrEkZ3Jhcz8NUWe6XUraZvlIFs,10172
7
- tasks_prompts_chain-0.1.1.dist-info/RECORD,,