hammad-python 0.0.30__py3-none-any.whl → 0.0.32__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (137) hide show
  1. ham/__init__.py +200 -0
  2. {hammad_python-0.0.30.dist-info → hammad_python-0.0.32.dist-info}/METADATA +6 -32
  3. hammad_python-0.0.32.dist-info/RECORD +6 -0
  4. hammad/__init__.py +0 -84
  5. hammad/_internal.py +0 -256
  6. hammad/_main.py +0 -226
  7. hammad/cache/__init__.py +0 -40
  8. hammad/cache/base_cache.py +0 -181
  9. hammad/cache/cache.py +0 -169
  10. hammad/cache/decorators.py +0 -261
  11. hammad/cache/file_cache.py +0 -80
  12. hammad/cache/ttl_cache.py +0 -74
  13. hammad/cli/__init__.py +0 -33
  14. hammad/cli/animations.py +0 -573
  15. hammad/cli/plugins.py +0 -867
  16. hammad/cli/styles/__init__.py +0 -55
  17. hammad/cli/styles/settings.py +0 -139
  18. hammad/cli/styles/types.py +0 -358
  19. hammad/cli/styles/utils.py +0 -634
  20. hammad/data/__init__.py +0 -90
  21. hammad/data/collections/__init__.py +0 -49
  22. hammad/data/collections/collection.py +0 -326
  23. hammad/data/collections/indexes/__init__.py +0 -37
  24. hammad/data/collections/indexes/qdrant/__init__.py +0 -1
  25. hammad/data/collections/indexes/qdrant/index.py +0 -723
  26. hammad/data/collections/indexes/qdrant/settings.py +0 -94
  27. hammad/data/collections/indexes/qdrant/utils.py +0 -210
  28. hammad/data/collections/indexes/tantivy/__init__.py +0 -1
  29. hammad/data/collections/indexes/tantivy/index.py +0 -426
  30. hammad/data/collections/indexes/tantivy/settings.py +0 -40
  31. hammad/data/collections/indexes/tantivy/utils.py +0 -176
  32. hammad/data/configurations/__init__.py +0 -35
  33. hammad/data/configurations/configuration.py +0 -564
  34. hammad/data/models/__init__.py +0 -50
  35. hammad/data/models/extensions/__init__.py +0 -4
  36. hammad/data/models/extensions/pydantic/__init__.py +0 -42
  37. hammad/data/models/extensions/pydantic/converters.py +0 -759
  38. hammad/data/models/fields.py +0 -546
  39. hammad/data/models/model.py +0 -1078
  40. hammad/data/models/utils.py +0 -280
  41. hammad/data/sql/__init__.py +0 -24
  42. hammad/data/sql/database.py +0 -576
  43. hammad/data/sql/types.py +0 -127
  44. hammad/data/types/__init__.py +0 -75
  45. hammad/data/types/file.py +0 -431
  46. hammad/data/types/multimodal/__init__.py +0 -36
  47. hammad/data/types/multimodal/audio.py +0 -200
  48. hammad/data/types/multimodal/image.py +0 -182
  49. hammad/data/types/text.py +0 -1308
  50. hammad/formatting/__init__.py +0 -33
  51. hammad/formatting/json/__init__.py +0 -27
  52. hammad/formatting/json/converters.py +0 -158
  53. hammad/formatting/text/__init__.py +0 -63
  54. hammad/formatting/text/converters.py +0 -723
  55. hammad/formatting/text/markdown.py +0 -131
  56. hammad/formatting/yaml/__init__.py +0 -26
  57. hammad/formatting/yaml/converters.py +0 -5
  58. hammad/genai/__init__.py +0 -217
  59. hammad/genai/a2a/__init__.py +0 -32
  60. hammad/genai/a2a/workers.py +0 -552
  61. hammad/genai/agents/__init__.py +0 -59
  62. hammad/genai/agents/agent.py +0 -1973
  63. hammad/genai/agents/run.py +0 -1024
  64. hammad/genai/agents/types/__init__.py +0 -42
  65. hammad/genai/agents/types/agent_context.py +0 -13
  66. hammad/genai/agents/types/agent_event.py +0 -128
  67. hammad/genai/agents/types/agent_hooks.py +0 -220
  68. hammad/genai/agents/types/agent_messages.py +0 -31
  69. hammad/genai/agents/types/agent_response.py +0 -125
  70. hammad/genai/agents/types/agent_stream.py +0 -327
  71. hammad/genai/graphs/__init__.py +0 -125
  72. hammad/genai/graphs/_utils.py +0 -190
  73. hammad/genai/graphs/base.py +0 -1828
  74. hammad/genai/graphs/plugins.py +0 -316
  75. hammad/genai/graphs/types.py +0 -638
  76. hammad/genai/models/__init__.py +0 -1
  77. hammad/genai/models/embeddings/__init__.py +0 -43
  78. hammad/genai/models/embeddings/model.py +0 -226
  79. hammad/genai/models/embeddings/run.py +0 -163
  80. hammad/genai/models/embeddings/types/__init__.py +0 -37
  81. hammad/genai/models/embeddings/types/embedding_model_name.py +0 -75
  82. hammad/genai/models/embeddings/types/embedding_model_response.py +0 -76
  83. hammad/genai/models/embeddings/types/embedding_model_run_params.py +0 -66
  84. hammad/genai/models/embeddings/types/embedding_model_settings.py +0 -47
  85. hammad/genai/models/language/__init__.py +0 -57
  86. hammad/genai/models/language/model.py +0 -1098
  87. hammad/genai/models/language/run.py +0 -878
  88. hammad/genai/models/language/types/__init__.py +0 -40
  89. hammad/genai/models/language/types/language_model_instructor_mode.py +0 -47
  90. hammad/genai/models/language/types/language_model_messages.py +0 -28
  91. hammad/genai/models/language/types/language_model_name.py +0 -239
  92. hammad/genai/models/language/types/language_model_request.py +0 -127
  93. hammad/genai/models/language/types/language_model_response.py +0 -217
  94. hammad/genai/models/language/types/language_model_response_chunk.py +0 -56
  95. hammad/genai/models/language/types/language_model_settings.py +0 -89
  96. hammad/genai/models/language/types/language_model_stream.py +0 -600
  97. hammad/genai/models/language/utils/__init__.py +0 -28
  98. hammad/genai/models/language/utils/requests.py +0 -421
  99. hammad/genai/models/language/utils/structured_outputs.py +0 -135
  100. hammad/genai/models/model_provider.py +0 -4
  101. hammad/genai/models/multimodal.py +0 -47
  102. hammad/genai/models/reranking.py +0 -26
  103. hammad/genai/types/__init__.py +0 -1
  104. hammad/genai/types/base.py +0 -215
  105. hammad/genai/types/history.py +0 -290
  106. hammad/genai/types/tools.py +0 -507
  107. hammad/logging/__init__.py +0 -35
  108. hammad/logging/decorators.py +0 -834
  109. hammad/logging/logger.py +0 -1018
  110. hammad/mcp/__init__.py +0 -53
  111. hammad/mcp/client/__init__.py +0 -35
  112. hammad/mcp/client/client.py +0 -624
  113. hammad/mcp/client/client_service.py +0 -400
  114. hammad/mcp/client/settings.py +0 -178
  115. hammad/mcp/servers/__init__.py +0 -26
  116. hammad/mcp/servers/launcher.py +0 -1161
  117. hammad/runtime/__init__.py +0 -32
  118. hammad/runtime/decorators.py +0 -142
  119. hammad/runtime/run.py +0 -299
  120. hammad/service/__init__.py +0 -49
  121. hammad/service/create.py +0 -527
  122. hammad/service/decorators.py +0 -283
  123. hammad/types.py +0 -288
  124. hammad/typing/__init__.py +0 -435
  125. hammad/web/__init__.py +0 -43
  126. hammad/web/http/__init__.py +0 -1
  127. hammad/web/http/client.py +0 -944
  128. hammad/web/models.py +0 -275
  129. hammad/web/openapi/__init__.py +0 -1
  130. hammad/web/openapi/client.py +0 -740
  131. hammad/web/search/__init__.py +0 -1
  132. hammad/web/search/client.py +0 -1023
  133. hammad/web/utils.py +0 -472
  134. hammad_python-0.0.30.dist-info/RECORD +0 -135
  135. {hammad → ham}/py.typed +0 -0
  136. {hammad_python-0.0.30.dist-info → hammad_python-0.0.32.dist-info}/WHEEL +0 -0
  137. {hammad_python-0.0.30.dist-info → hammad_python-0.0.32.dist-info}/licenses/LICENSE +0 -0
@@ -1,638 +0,0 @@
1
- """hammad.genai.graphs.types - Types for the graph framework built on pydantic-graph"""
2
-
3
- from typing import (
4
- Any,
5
- Dict,
6
- List,
7
- Optional,
8
- TypeVar,
9
- Generic,
10
- Union,
11
- Callable,
12
- Iterator,
13
- AsyncIterator,
14
- TYPE_CHECKING,
15
- )
16
- from typing_extensions import Literal
17
- from dataclasses import dataclass, field
18
-
19
- # Import from pydantic-graph
20
- from pydantic_graph import BaseNode, End, GraphRunContext, Graph as PydanticGraph
21
- from pydantic import BaseModel, Field
22
-
23
- from ...cache import cached
24
- from ...typing import get_type_description
25
- from ..agents.types.agent_response import AgentResponse
26
- from ..models.language.types.language_model_response import LanguageModelResponse
27
- from ..models.language.types.language_model_name import LanguageModelName
28
- from ..types.history import History
29
- from ..types.base import BaseGenAIModelStream
30
-
31
- if TYPE_CHECKING:
32
- from .base import BaseGraph, SelectionStrategy
33
-
34
- __all__ = [
35
- "GraphState",
36
- "GraphContext",
37
- "GraphResponse",
38
- "GraphStream",
39
- "GraphResponseChunk",
40
- "ActionSettings",
41
- "ActionInfo",
42
- "GraphEvent",
43
- "BasePlugin",
44
- "GraphHistoryEntry",
45
- "GraphNode",
46
- "GraphEnd",
47
- "PydanticGraphContext",
48
- ]
49
-
50
- # Type variables
51
- GraphState = TypeVar("GraphState")
52
- T = TypeVar("T")
53
-
54
- # Re-export from pydantic-graph for convenience
55
- GraphNode = BaseNode
56
- GraphEnd = End
57
- PydanticGraphContext = GraphRunContext
58
-
59
-
60
- @dataclass
61
- class ActionSettings:
62
- """Settings for an action in a graph."""
63
-
64
- model: Optional[LanguageModelName | str] = None
65
- temperature: Optional[float] = None
66
- max_tokens: Optional[int] = None
67
- tools: List[Callable] = field(default_factory=list)
68
- start: bool = False
69
- terminates: bool = False
70
- xml: Optional[str] = None
71
- next: Optional[Union[str, List[str], "SelectionStrategy"]] = None
72
- read_history: bool = False
73
- persist_history: bool = False
74
- condition: Optional[str] = None
75
- name: Optional[str] = None
76
- instructions: Optional[str] = None
77
- verbose: bool = False
78
- debug: bool = False
79
- # Agent end strategy parameters
80
- max_steps: Optional[int] = None
81
- end_strategy: Optional[Literal["tool"]] = None
82
- end_tool: Optional[Callable] = None
83
- kwargs: Dict[str, Any] = field(default_factory=dict)
84
-
85
-
86
- @dataclass
87
- class GraphHistoryEntry:
88
- """Entry in the graph execution history."""
89
-
90
- index: int
91
- """The index of the history entry."""
92
-
93
- field: str
94
- """The name of the field/action that triggered this history entry."""
95
-
96
- steps: List[LanguageModelResponse] = field(default_factory=list)
97
- """The steps that were executed to produce this history entry."""
98
-
99
- metadata: Dict[str, Any] = field(default_factory=dict)
100
- """Additional metadata about this history entry."""
101
-
102
-
103
- @dataclass
104
- class ActionInfo:
105
- """Information about an action in a graph."""
106
-
107
- name: str
108
- """The name of the action."""
109
-
110
- func: Callable
111
- """The function that implements the action."""
112
-
113
- settings: "ActionSettings"
114
- """Settings for the action."""
115
-
116
- is_start: bool = False
117
- """Whether this is the start action."""
118
-
119
- parameters: Dict[str, Any] = field(default_factory=dict)
120
- """Parameter information for the action."""
121
-
122
-
123
- @dataclass
124
- class GraphEvent:
125
- """Event emitted during graph execution that can be modified by plugins."""
126
-
127
- field: str
128
- """The name of the field that triggered the event."""
129
-
130
- output: Any
131
- """The output value of the field that triggered the event."""
132
-
133
- type: str
134
- """The type of this event (e.g., 'tool_call', 'final_response', 'action_start')."""
135
-
136
- metadata: Dict[str, Any] = field(default_factory=dict)
137
- """Additional metadata about the event."""
138
-
139
- _hooks: List[Callable] = field(default_factory=list)
140
- """Internal list of hooks registered for this event."""
141
-
142
- def on(self, func: Callable) -> Callable:
143
- """Hook to look into and augment this event."""
144
- self._hooks.append(func)
145
- return func
146
-
147
- def trigger_hooks(self) -> "GraphEvent":
148
- """Trigger all registered hooks for this event."""
149
- result = self
150
- for hook in self._hooks:
151
- hook_result = hook(result)
152
- if hook_result is not None:
153
- result = hook_result
154
- return result
155
-
156
-
157
- # Enhanced GraphContext that wraps pydantic-graph's GraphRunContext
158
- @dataclass
159
- class GraphContext(Generic[GraphState]):
160
- """Context object for graph execution, providing access to state and plugin system."""
161
-
162
- pydantic_context: GraphRunContext[GraphState]
163
- """The underlying pydantic-graph context."""
164
-
165
- plugins: List["BasePlugin"] = field(default_factory=list)
166
- """Active plugins for this graph execution."""
167
-
168
- history: List[GraphHistoryEntry] = field(default_factory=list)
169
- """The history of the graph's execution."""
170
-
171
- metadata: Dict[str, Any] = field(default_factory=dict)
172
- """Additional metadata about the graph's execution."""
173
-
174
- @property
175
- def state(self) -> GraphState:
176
- """Get the current state."""
177
- return self.pydantic_context.state
178
-
179
- @property
180
- def deps(self) -> Any:
181
- """Get the dependencies."""
182
- return self.pydantic_context.deps
183
-
184
- def get_field(self, field_name: str) -> Any:
185
- """Get a field value from the state."""
186
- if hasattr(self.state, field_name):
187
- return getattr(self.state, field_name)
188
- elif hasattr(self.state, "__getitem__"):
189
- return self.state[field_name]
190
- else:
191
- raise AttributeError(f"Field '{field_name}' not found in state")
192
-
193
- def set_field(self, field_name: str, value: Any) -> None:
194
- """Set a field value in the state."""
195
- if hasattr(self.state, field_name):
196
- setattr(self.state, field_name, value)
197
- elif hasattr(self.state, "__setitem__"):
198
- self.state[field_name] = value
199
- else:
200
- raise AttributeError(f"Cannot set field '{field_name}' in state")
201
-
202
- def __getitem__(self, key: str) -> Any:
203
- """Allow dict-like access to fields"""
204
- return self.get_field(key)
205
-
206
- def __setitem__(self, key: str, value: Any) -> None:
207
- """Allow dict-like assignment to fields"""
208
- self.set_field(key, value)
209
-
210
- def get(self, key: str, default: Any = None) -> Any:
211
- """Get field value with optional default"""
212
- try:
213
- return self.get_field(key)
214
- except AttributeError:
215
- return default
216
-
217
- def emit_event(self, event: GraphEvent) -> GraphEvent:
218
- """Emit an event and allow plugins to modify it."""
219
- # Apply plugin modifications
220
- for plugin in self.plugins:
221
- if hasattr(plugin, "on_event"):
222
- event = plugin.on_event(event)
223
-
224
- # Trigger any hooks registered on the event
225
- event = event.trigger_hooks()
226
-
227
- return event
228
-
229
-
230
- class BasePlugin:
231
- """Base class for graph plugins that can modify execution."""
232
-
233
- def __init__(self, **kwargs: Any):
234
- """Initialize the plugin with configuration options."""
235
- self.config = kwargs
236
-
237
- def on_event(self, event: GraphEvent) -> GraphEvent:
238
- """Handle and potentially modify an event."""
239
- return event
240
-
241
- def on_graph_start(self, context: GraphContext[Any]) -> None:
242
- """Called when graph execution starts."""
243
- pass
244
-
245
- def on_graph_end(self, context: GraphContext[Any]) -> None:
246
- """Called when graph execution ends."""
247
- pass
248
-
249
- def on_action_start(self, context: GraphContext[Any], action_name: str) -> None:
250
- """Called when an action starts executing."""
251
- pass
252
-
253
- def on_action_end(
254
- self, context: GraphContext[Any], action_name: str, result: Any
255
- ) -> None:
256
- """Called when an action finishes executing."""
257
- pass
258
-
259
-
260
- class GraphResponse(AgentResponse[T, GraphState], Generic[T, GraphState]):
261
- """A response generated by the execution of a graph."""
262
-
263
- type: str = "graph"
264
- """The type of this response. Always 'graph'."""
265
-
266
- output: T
267
- """The final output of the graph's execution."""
268
-
269
- state: GraphState | None = None
270
- """The final state object after graph execution."""
271
-
272
- history: List[GraphHistoryEntry] = Field(default_factory=list)
273
- """The total history of the graph's execution."""
274
-
275
- start_node: Optional[str] = None
276
- """The name of the start node that was executed."""
277
-
278
- metadata: Dict[str, Any] = Field(default_factory=dict)
279
- """Metadata about the graph's execution."""
280
-
281
- nodes_executed: List[str] = Field(default_factory=list)
282
- """List of node names that were executed."""
283
-
284
- steps: List[LanguageModelResponse] = Field(default_factory=list)
285
- """The steps that were executed to produce this response."""
286
-
287
- @property
288
- def fields(self) -> Dict[str, Any]:
289
- """Access to the graph field values"""
290
- if self.state is None:
291
- return {}
292
- if hasattr(self.state, "model_dump"):
293
- return self.state.model_dump()
294
- elif hasattr(self.state, "__dict__"):
295
- return self.state.__dict__
296
- elif hasattr(self.state, "items"):
297
- return dict(self.state.items())
298
- return {}
299
-
300
- def __getattr__(self, name: str) -> Any:
301
- """Allow accessing fields via dot notation"""
302
- if name in self.fields:
303
- return self.fields[name]
304
- raise AttributeError(
305
- f"'{type(self).__name__}' object has no attribute '{name}'"
306
- )
307
-
308
- @cached
309
- def __str__(self) -> str:
310
- """Pretty prints the response object."""
311
- output = ">>> GraphResponse:"
312
-
313
- if self.output or self.content:
314
- output += f"\n{self.output if self.output else self.content}"
315
- else:
316
- output += f"\n{self.completion}"
317
-
318
- output += f"\n\n>>> Model: {self.model}"
319
- output += f"\n>>> Start Node: {self.start_node or 'Unknown'}"
320
- output += f"\n>>> Nodes Executed: {len(self.nodes_executed)}"
321
-
322
- if self.nodes_executed:
323
- output += f" ({', '.join(self.nodes_executed)})"
324
-
325
- output += f"\n>>> Output Type: {get_type_description(type(self.output))}"
326
-
327
- # Calculate total tool calls across all steps
328
- total_tool_calls = 0
329
- for step in self.steps:
330
- if hasattr(step, "has_tool_calls") and step.has_tool_calls():
331
- total_tool_calls += len(step.tool_calls)
332
-
333
- output += f"\n>>> Total Tool Calls: {total_tool_calls}"
334
-
335
- # Show state values if available
336
- if self.state:
337
- output += f"\n>>> State: {self._format_state_display()}"
338
-
339
- return output
340
-
341
- def _format_state_display(self) -> str:
342
- """Format state values for display in string representation."""
343
- try:
344
- items = []
345
- for key, value in self.fields.items():
346
- if isinstance(value, str):
347
- # Truncate long strings
348
- display_value = value[:50] + "..." if len(value) > 50 else value
349
- items.append(f"{key}='{display_value}'")
350
- else:
351
- items.append(f"{key}={value}")
352
-
353
- return "{" + ", ".join(items) + "}"
354
- except Exception:
355
- return str(self.state)
356
-
357
-
358
- class GraphResponseChunk(BaseModel, Generic[T]):
359
- """A chunk from a graph response stream representing a single execution step."""
360
-
361
- step_number: int
362
- """The step number of this chunk."""
363
-
364
- node_name: str
365
- """The name of the node that was executed."""
366
-
367
- output: T | None = None
368
- """The output value from this step."""
369
-
370
- content: str | None = None
371
- """The content string from this step."""
372
-
373
- model: str | None = None
374
- """The model name used for this step."""
375
-
376
- is_final: bool = False
377
- """Whether this is the final chunk."""
378
-
379
- state: Any = None
380
- """The state after this step."""
381
-
382
- metadata: Dict[str, Any] = Field(default_factory=dict)
383
- """Additional metadata about this step."""
384
-
385
- def __bool__(self) -> bool:
386
- """Check if this chunk has meaningful content."""
387
- return bool(self.output or self.content)
388
-
389
- def __str__(self) -> str:
390
- """String representation of the chunk."""
391
- output = f"GraphResponseChunk(step={self.step_number}, node={self.node_name}, final={self.is_final})"
392
-
393
- # Show content if available
394
- if self.output or self.content:
395
- content_preview = str(self.output if self.output else self.content)
396
- if len(content_preview) > 100:
397
- content_preview = content_preview[:100] + "..."
398
- output += f"\nContent: {content_preview}"
399
-
400
- return output
401
-
402
-
403
- class GraphStream(BaseGenAIModelStream[GraphResponseChunk[T]], Generic[T, GraphState]):
404
- """Stream of graph execution steps that can be used in sync and async contexts."""
405
-
406
- def __init__(
407
- self,
408
- graph: "BaseGraph[GraphState, T]",
409
- start_node: Any,
410
- state: Optional[GraphState] = None,
411
- **kwargs: Any,
412
- ):
413
- """Initialize the graph stream.
414
-
415
- Args:
416
- graph: The BaseGraph instance
417
- start_node: The starting node for execution
418
- state: Optional state object
419
- **kwargs: Additional parameters
420
- """
421
- super().__init__(
422
- type="graph",
423
- model=graph._global_model or "openai/gpt-4o-mini",
424
- stream=None,
425
- )
426
- self.graph = graph
427
- self.start_node = start_node
428
- self.state = state
429
- self.kwargs = kwargs
430
- self.current_step = 0
431
- self.is_done = False
432
- self._pydantic_iterator = None
433
- self._async_pydantic_iterator = None
434
-
435
- def __iter__(self) -> Iterator[GraphResponseChunk[T]]:
436
- """Iterate over graph execution steps."""
437
- if not self.graph._pydantic_graph:
438
- raise ValueError("Graph not initialized")
439
-
440
- # Create the pydantic-graph iterator
441
- pydantic_iter = self.graph._pydantic_graph.iter(
442
- self.start_node, state=self.state
443
- )
444
- self._pydantic_iterator = pydantic_iter
445
-
446
- try:
447
- for node_result in pydantic_iter:
448
- self.current_step += 1
449
-
450
- # Extract information from the pydantic-graph result
451
- node_name = getattr(node_result, "__class__", {}).get(
452
- "__name__", "unknown"
453
- )
454
- if hasattr(node_result, "action_name"):
455
- node_name = node_result.action_name
456
-
457
- # Extract output from the result
458
- output = None
459
- content = None
460
- if hasattr(node_result, "data"):
461
- output = node_result.data
462
- content = str(node_result.data)
463
- elif hasattr(node_result, "output"):
464
- output = node_result.output
465
- content = str(node_result.output)
466
- else:
467
- output = str(node_result)
468
- content = str(node_result)
469
-
470
- # Determine if this is the final step
471
- is_final = isinstance(node_result, End)
472
-
473
- # Create and yield chunk
474
- chunk = GraphResponseChunk(
475
- step_number=self.current_step,
476
- node_name=node_name,
477
- output=output,
478
- content=content,
479
- model=self.model,
480
- is_final=is_final,
481
- state=self.state,
482
- metadata=self.kwargs,
483
- )
484
-
485
- yield chunk
486
-
487
- if is_final:
488
- self.is_done = True
489
- break
490
-
491
- except Exception as e:
492
- # Handle any errors during iteration
493
- error_chunk = GraphResponseChunk(
494
- step_number=self.current_step + 1,
495
- node_name="error",
496
- output=None,
497
- content=f"Error during graph execution: {str(e)}",
498
- model=self.model,
499
- is_final=True,
500
- state=self.state,
501
- metadata={"error": str(e)},
502
- )
503
- yield error_chunk
504
- self.is_done = True
505
-
506
- def __aiter__(self) -> AsyncIterator[GraphResponseChunk[T]]:
507
- """Async iterator for graph execution."""
508
- return self
509
-
510
- async def __anext__(self) -> GraphResponseChunk[T]:
511
- """Get the next chunk in async iteration."""
512
- if not self.graph._pydantic_graph:
513
- raise ValueError("Graph not initialized")
514
-
515
- if self.is_done:
516
- raise StopAsyncIteration
517
-
518
- # Initialize async iterator if not already done
519
- if self._async_pydantic_iterator is None:
520
- self._async_pydantic_iterator = self.graph._pydantic_graph.iter(
521
- self.start_node, state=self.state
522
- ).__aiter__()
523
-
524
- try:
525
- node_result = await self._async_pydantic_iterator.__anext__()
526
- self.current_step += 1
527
-
528
- # Extract information from the pydantic-graph result
529
- node_name = getattr(node_result, "__class__", {}).get("__name__", "unknown")
530
- if hasattr(node_result, "action_name"):
531
- node_name = node_result.action_name
532
-
533
- # Extract output from the result
534
- output = None
535
- content = None
536
- if hasattr(node_result, "data"):
537
- output = node_result.data
538
- content = str(node_result.data)
539
- elif hasattr(node_result, "output"):
540
- output = node_result.output
541
- content = str(node_result.output)
542
- else:
543
- output = str(node_result)
544
- content = str(node_result)
545
-
546
- # Determine if this is the final step
547
- is_final = isinstance(node_result, End)
548
-
549
- # Create chunk
550
- chunk = GraphResponseChunk(
551
- step_number=self.current_step,
552
- node_name=node_name,
553
- output=output,
554
- content=content,
555
- model=self.model,
556
- is_final=is_final,
557
- state=self.state,
558
- metadata=self.kwargs,
559
- )
560
-
561
- if is_final:
562
- self.is_done = True
563
-
564
- return chunk
565
-
566
- except StopAsyncIteration:
567
- self.is_done = True
568
- raise
569
- except Exception as e:
570
- # Handle any errors during iteration
571
- error_chunk = GraphResponseChunk(
572
- step_number=self.current_step + 1,
573
- node_name="error",
574
- output=None,
575
- content=f"Error during graph execution: {str(e)}",
576
- model=self.model,
577
- is_final=True,
578
- state=self.state,
579
- metadata={"error": str(e)},
580
- )
581
- self.is_done = True
582
- return error_chunk
583
-
584
- def collect(self) -> GraphResponse[T, GraphState]:
585
- """Collect all steps and return final graph response."""
586
- chunks = []
587
- final_chunk = None
588
-
589
- for chunk in self:
590
- chunks.append(chunk)
591
- if chunk.is_final:
592
- final_chunk = chunk
593
- break
594
-
595
- if final_chunk is None:
596
- raise RuntimeError("No final chunk generated by graph execution")
597
-
598
- # Create response from final chunk
599
- return GraphResponse(
600
- type="graph",
601
- model=self.model,
602
- output=final_chunk.output,
603
- content=final_chunk.content,
604
- completion=None,
605
- state=final_chunk.state,
606
- history=[],
607
- start_node=getattr(self.start_node, "action_name", "unknown"),
608
- nodes_executed=[chunk.node_name for chunk in chunks],
609
- metadata=final_chunk.metadata,
610
- )
611
-
612
- async def async_collect(self) -> GraphResponse[T, GraphState]:
613
- """Collect all steps and return final graph response."""
614
- chunks = []
615
- final_chunk = None
616
-
617
- async for chunk in self:
618
- chunks.append(chunk)
619
- if chunk.is_final:
620
- final_chunk = chunk
621
- break
622
-
623
- if final_chunk is None:
624
- raise RuntimeError("No final chunk generated by graph execution")
625
-
626
- # Create response from final chunk
627
- return GraphResponse(
628
- type="graph",
629
- model=self.model,
630
- output=final_chunk.output,
631
- content=final_chunk.content,
632
- completion=None,
633
- state=final_chunk.state,
634
- history=[],
635
- start_node=getattr(self.start_node, "action_name", "unknown"),
636
- nodes_executed=[chunk.node_name for chunk in chunks],
637
- metadata=final_chunk.metadata,
638
- )
@@ -1 +0,0 @@
1
- """hammad.genai.models"""
@@ -1,43 +0,0 @@
1
- """hammad.genai.models.embeddings"""
2
-
3
- from typing import TYPE_CHECKING
4
- from ...._internal import create_getattr_importer
5
-
6
-
7
- if TYPE_CHECKING:
8
- from .model import (
9
- EmbeddingModel,
10
- create_embedding_model,
11
- )
12
- from .run import (
13
- run_embedding_model,
14
- async_run_embedding_model,
15
- )
16
- from .types import (
17
- Embedding,
18
- EmbeddingModelResponse,
19
- EmbeddingModelSettings,
20
- )
21
-
22
-
23
- __all__ = [
24
- "EmbeddingModel",
25
- "create_embedding_model",
26
- # hammad.genai.models.embeddings.run
27
- "run_embedding_model",
28
- "async_run_embedding_model",
29
- # hammad.genai.models.embeddings.types.embedding
30
- "Embedding",
31
- # hammad.genai.models.embeddings.types.embedding_model_response
32
- "EmbeddingModelResponse",
33
- # hammad.genai.models.embeddings.types.embedding_model_settings
34
- "EmbeddingModelSettings",
35
- ]
36
-
37
-
38
- __getattr__ = create_getattr_importer(__all__)
39
-
40
-
41
- def __dir__() -> list[str]:
42
- """Return the list of attributes to be shown in the REPL."""
43
- return __all__