amazon-bedrock-haystack 3.7.0__py3-none-any.whl → 3.8.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: amazon-bedrock-haystack
3
- Version: 3.7.0
3
+ Version: 3.8.0
4
4
  Summary: An integration of Amazon Bedrock as an AmazonBedrockGenerator component.
5
5
  Project-URL: Documentation, https://github.com/deepset-ai/haystack-core-integrations/tree/main/integrations/amazon_bedrock#readme
6
6
  Project-URL: Issues, https://github.com/deepset-ai/haystack-core-integrations/issues
@@ -21,7 +21,7 @@ Classifier: Programming Language :: Python :: Implementation :: PyPy
21
21
  Requires-Python: >=3.9
22
22
  Requires-Dist: aioboto3>=14.0.0
23
23
  Requires-Dist: boto3>=1.28.57
24
- Requires-Dist: haystack-ai>=2.13.1
24
+ Requires-Dist: haystack-ai>=2.15.1
25
25
  Description-Content-Type: text/markdown
26
26
 
27
27
  # amazon-bedrock-haystack
@@ -11,12 +11,12 @@ haystack_integrations/components/generators/amazon_bedrock/__init__.py,sha256=lv
11
11
  haystack_integrations/components/generators/amazon_bedrock/adapters.py,sha256=yBC-3YwV6qAwSXMtdZiLSYh2lUpPQIDy7Efl7w-Cu-k,19640
12
12
  haystack_integrations/components/generators/amazon_bedrock/generator.py,sha256=c_saV5zxFYQVJT0Hzo80lKty46itL0Dp31VuDueYa3M,14716
13
13
  haystack_integrations/components/generators/amazon_bedrock/chat/__init__.py,sha256=6GZ8Y3Lw0rLOsOAqi6Tu5mZC977UzQvgDxKpOWr8IQw,110
14
- haystack_integrations/components/generators/amazon_bedrock/chat/chat_generator.py,sha256=nhen_XcdypQT2K8312gjYdPYrNK_NtBKcR3gPfYzVwU,23461
15
- haystack_integrations/components/generators/amazon_bedrock/chat/utils.py,sha256=EGDcJZFzXKjIJnD8sw1M9YHIPPm05drw1KbY_Lx1kUI,21170
14
+ haystack_integrations/components/generators/amazon_bedrock/chat/chat_generator.py,sha256=6sCjyIzZBpfQRoXuu3PzziLVasb82xWG17sMy_zBf2Y,24130
15
+ haystack_integrations/components/generators/amazon_bedrock/chat/utils.py,sha256=RY7NbwdAx1uvCazasqBeJP3RjXgTXdEwgU3EFfESvkg,21555
16
16
  haystack_integrations/components/rankers/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
17
17
  haystack_integrations/components/rankers/amazon_bedrock/__init__.py,sha256=Zrc3BSVkEaXYpliEi6hKG9bqW4J7DNk93p50SuoyT1Q,107
18
18
  haystack_integrations/components/rankers/amazon_bedrock/ranker.py,sha256=enAjf2QyDwfpidKkFCdLz954cx-Tjh9emrOS3vINJDg,12344
19
- amazon_bedrock_haystack-3.7.0.dist-info/METADATA,sha256=2n5u93lnpV3M8NL_7GaR7dHqRnoq8vzV6v73sTjS008,2287
20
- amazon_bedrock_haystack-3.7.0.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
21
- amazon_bedrock_haystack-3.7.0.dist-info/licenses/LICENSE.txt,sha256=B05uMshqTA74s-0ltyHKI6yoPfJ3zYgQbvcXfDVGFf8,10280
22
- amazon_bedrock_haystack-3.7.0.dist-info/RECORD,,
19
+ amazon_bedrock_haystack-3.8.0.dist-info/METADATA,sha256=5XRkUVgYK3BZxIYmlJ2I9xIjhXXKCSIO7bTnI8rP3Xw,2287
20
+ amazon_bedrock_haystack-3.8.0.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
21
+ amazon_bedrock_haystack-3.8.0.dist-info/licenses/LICENSE.txt,sha256=B05uMshqTA74s-0ltyHKI6yoPfJ3zYgQbvcXfDVGFf8,10280
22
+ amazon_bedrock_haystack-3.8.0.dist-info/RECORD,,
@@ -5,7 +5,7 @@ from botocore.config import Config
5
5
  from botocore.eventstream import EventStream
6
6
  from botocore.exceptions import ClientError
7
7
  from haystack import component, default_from_dict, default_to_dict, logging
8
- from haystack.dataclasses import ChatMessage, StreamingCallbackT, select_streaming_callback
8
+ from haystack.dataclasses import ChatMessage, ComponentInfo, StreamingCallbackT, select_streaming_callback
9
9
  from haystack.tools import (
10
10
  Tool,
11
11
  Toolset,
@@ -371,7 +371,9 @@ class AmazonBedrockChatGenerator:
371
371
  if additional_fields:
372
372
  params["additionalModelRequestFields"] = additional_fields
373
373
 
374
- callback = select_streaming_callback(
374
+ # overloads that exhaust finite Literals(bool) not treated as exhaustive
375
+ # see https://github.com/python/mypy/issues/14764
376
+ callback = select_streaming_callback( # type: ignore[call-overload]
375
377
  init_callback=self.streaming_callback,
376
378
  runtime_callback=streaming_callback,
377
379
  requires_async=requires_async,
@@ -406,6 +408,8 @@ class AmazonBedrockChatGenerator:
406
408
  :raises AmazonBedrockInferenceError:
407
409
  If the Bedrock inference API call fails.
408
410
  """
411
+ component_info = ComponentInfo.from_component(self)
412
+
409
413
  params, callback = self._prepare_request_params(
410
414
  messages=messages,
411
415
  streaming_callback=streaming_callback,
@@ -422,7 +426,12 @@ class AmazonBedrockChatGenerator:
422
426
  msg = "No stream found in the response."
423
427
  raise AmazonBedrockInferenceError(msg)
424
428
  # the type of streaming callback is checked in _prepare_request_params, but mypy doesn't know
425
- replies = _parse_streaming_response(response_stream, callback, self.model) # type: ignore[arg-type]
429
+ replies = _parse_streaming_response(
430
+ response_stream=response_stream,
431
+ streaming_callback=callback, # type: ignore[arg-type]
432
+ model=self.model,
433
+ component_info=component_info,
434
+ )
426
435
  else:
427
436
  response = self.client.converse(**params)
428
437
  replies = _parse_completion_response(response, self.model)
@@ -459,6 +468,8 @@ class AmazonBedrockChatGenerator:
459
468
  :raises AmazonBedrockInferenceError:
460
469
  If the Bedrock inference API call fails.
461
470
  """
471
+ component_info = ComponentInfo.from_component(self)
472
+
462
473
  params, callback = self._prepare_request_params(
463
474
  messages=messages,
464
475
  streaming_callback=streaming_callback,
@@ -479,7 +490,12 @@ class AmazonBedrockChatGenerator:
479
490
  msg = "No stream found in the response."
480
491
  raise AmazonBedrockInferenceError(msg)
481
492
  # the type of streaming callback is checked in _prepare_request_params, but mypy doesn't know
482
- replies = await _parse_streaming_response_async(response_stream, callback, self.model) # type: ignore[arg-type]
493
+ replies = await _parse_streaming_response_async(
494
+ response_stream=response_stream,
495
+ streaming_callback=callback, # type: ignore[arg-type]
496
+ model=self.model,
497
+ component_info=component_info,
498
+ )
483
499
  else:
484
500
  response = await async_client.converse(**params)
485
501
  replies = _parse_completion_response(response, self.model)
@@ -8,6 +8,7 @@ from haystack.dataclasses import (
8
8
  AsyncStreamingCallbackT,
9
9
  ChatMessage,
10
10
  ChatRole,
11
+ ComponentInfo,
11
12
  StreamingChunk,
12
13
  SyncStreamingCallbackT,
13
14
  ToolCall,
@@ -235,7 +236,9 @@ def _parse_completion_response(response_body: Dict[str, Any], model: str) -> Lis
235
236
 
236
237
 
237
238
  # Bedrock streaming to Haystack util methods
238
- def _convert_event_to_streaming_chunk(event: Dict[str, Any], model: str) -> StreamingChunk:
239
+ def _convert_event_to_streaming_chunk(
240
+ event: Dict[str, Any], model: str, component_info: ComponentInfo
241
+ ) -> StreamingChunk:
239
242
  """
240
243
  Convert a Bedrock streaming event to a Haystack StreamingChunk.
241
244
 
@@ -244,6 +247,7 @@ def _convert_event_to_streaming_chunk(event: Dict[str, Any], model: str) -> Stre
244
247
 
245
248
  :param event: Dictionary containing a Bedrock streaming event.
246
249
  :param model: The model ID used for generation, included in chunk metadata.
250
+ :param component_info: ComponentInfo object
247
251
  :returns: StreamingChunk object containing the content and metadata extracted from the event.
248
252
  """
249
253
  # Initialize an empty StreamingChunk to return if no relevant event is found
@@ -358,6 +362,8 @@ def _convert_event_to_streaming_chunk(event: Dict[str, Any], model: str) -> Stre
358
362
  },
359
363
  )
360
364
 
365
+ streaming_chunk.component_info = component_info
366
+
361
367
  return streaming_chunk
362
368
 
363
369
 
@@ -438,6 +444,7 @@ def _parse_streaming_response(
438
444
  response_stream: EventStream,
439
445
  streaming_callback: SyncStreamingCallbackT,
440
446
  model: str,
447
+ component_info: ComponentInfo,
441
448
  ) -> List[ChatMessage]:
442
449
  """
443
450
  Parse a streaming response from Bedrock.
@@ -445,11 +452,12 @@ def _parse_streaming_response(
445
452
  :param response_stream: EventStream from Bedrock API
446
453
  :param streaming_callback: Callback for streaming chunks
447
454
  :param model: The model ID used for generation
455
+ :param component_info: ComponentInfo object
448
456
  :return: List of ChatMessage objects
449
457
  """
450
458
  chunks: List[StreamingChunk] = []
451
459
  for event in response_stream:
452
- streaming_chunk = _convert_event_to_streaming_chunk(event=event, model=model)
460
+ streaming_chunk = _convert_event_to_streaming_chunk(event=event, model=model, component_info=component_info)
453
461
  streaming_callback(streaming_chunk)
454
462
  chunks.append(streaming_chunk)
455
463
  replies = [_convert_streaming_chunks_to_chat_message(chunks=chunks)]
@@ -460,6 +468,7 @@ async def _parse_streaming_response_async(
460
468
  response_stream: EventStream,
461
469
  streaming_callback: AsyncStreamingCallbackT,
462
470
  model: str,
471
+ component_info: ComponentInfo,
463
472
  ) -> List[ChatMessage]:
464
473
  """
465
474
  Parse a streaming response from Bedrock.
@@ -467,11 +476,12 @@ async def _parse_streaming_response_async(
467
476
  :param response_stream: EventStream from Bedrock API
468
477
  :param streaming_callback: Callback for streaming chunks
469
478
  :param model: The model ID used for generation
479
+ :param component_info: ComponentInfo object
470
480
  :return: List of ChatMessage objects
471
481
  """
472
482
  chunks: List[StreamingChunk] = []
473
483
  async for event in response_stream:
474
- streaming_chunk = _convert_event_to_streaming_chunk(event=event, model=model)
484
+ streaming_chunk = _convert_event_to_streaming_chunk(event=event, model=model, component_info=component_info)
475
485
  await streaming_callback(streaming_chunk)
476
486
  chunks.append(streaming_chunk)
477
487
  replies = [_convert_streaming_chunks_to_chat_message(chunks=chunks)]