apache-airflow-providers-amazon 8.22.0__py3-none-any.whl → 8.23.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (25) hide show
  1. airflow/providers/amazon/__init__.py +1 -1
  2. airflow/providers/amazon/aws/executors/batch/batch_executor.py +47 -3
  3. airflow/providers/amazon/aws/executors/ecs/ecs_executor.py +1 -0
  4. airflow/providers/amazon/aws/hooks/bedrock.py +20 -0
  5. airflow/providers/amazon/aws/hooks/comprehend.py +37 -0
  6. airflow/providers/amazon/aws/hooks/neptune.py +36 -1
  7. airflow/providers/amazon/aws/operators/athena.py +1 -1
  8. airflow/providers/amazon/aws/operators/batch.py +1 -3
  9. airflow/providers/amazon/aws/operators/bedrock.py +218 -2
  10. airflow/providers/amazon/aws/operators/comprehend.py +192 -0
  11. airflow/providers/amazon/aws/operators/emr.py +21 -11
  12. airflow/providers/amazon/aws/operators/neptune.py +128 -21
  13. airflow/providers/amazon/aws/operators/sagemaker.py +10 -14
  14. airflow/providers/amazon/aws/sensors/comprehend.py +147 -0
  15. airflow/providers/amazon/aws/sensors/emr.py +8 -0
  16. airflow/providers/amazon/aws/triggers/comprehend.py +61 -0
  17. airflow/providers/amazon/aws/triggers/neptune.py +45 -0
  18. airflow/providers/amazon/aws/triggers/sagemaker.py +1 -1
  19. airflow/providers/amazon/aws/utils/__init__.py +7 -0
  20. airflow/providers/amazon/aws/waiters/comprehend.json +49 -0
  21. airflow/providers/amazon/get_provider_info.py +25 -1
  22. {apache_airflow_providers_amazon-8.22.0.dist-info → apache_airflow_providers_amazon-8.23.0.dist-info}/METADATA +6 -6
  23. {apache_airflow_providers_amazon-8.22.0.dist-info → apache_airflow_providers_amazon-8.23.0.dist-info}/RECORD +25 -20
  24. {apache_airflow_providers_amazon-8.22.0.dist-info → apache_airflow_providers_amazon-8.23.0.dist-info}/WHEEL +0 -0
  25. {apache_airflow_providers_amazon-8.22.0.dist-info → apache_airflow_providers_amazon-8.23.0.dist-info}/entry_points.txt +0 -0
@@ -29,7 +29,7 @@ from airflow import __version__ as airflow_version
29
29
 
30
30
  __all__ = ["__version__"]
31
31
 
32
- __version__ = "8.22.0"
32
+ __version__ = "8.23.0"
33
33
 
34
34
  if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse(
35
35
  "2.7.0"
@@ -19,10 +19,11 @@
19
19
 
20
20
  from __future__ import annotations
21
21
 
22
+ import contextlib
22
23
  import time
23
24
  from collections import defaultdict, deque
24
25
  from copy import deepcopy
25
- from typing import TYPE_CHECKING, Any, Dict, List
26
+ from typing import TYPE_CHECKING, Any, Dict, List, Sequence
26
27
 
27
28
  from botocore.exceptions import ClientError, NoCredentialsError
28
29
 
@@ -34,11 +35,12 @@ from airflow.providers.amazon.aws.executors.utils.exponential_backoff_retry impo
34
35
  exponential_backoff_retry,
35
36
  )
36
37
  from airflow.providers.amazon.aws.hooks.batch_client import BatchClientHook
38
+ from airflow.stats import Stats
37
39
  from airflow.utils import timezone
38
40
  from airflow.utils.helpers import merge_dicts
39
41
 
40
42
  if TYPE_CHECKING:
41
- from airflow.models.taskinstance import TaskInstanceKey
43
+ from airflow.models.taskinstance import TaskInstance, TaskInstanceKey
42
44
  from airflow.providers.amazon.aws.executors.batch.boto_schema import (
43
45
  BatchDescribeJobsResponseSchema,
44
46
  BatchSubmitJobResponseSchema,
@@ -306,14 +308,20 @@ class AwsBatchExecutor(BaseExecutor):
306
308
  self.pending_jobs.append(batch_job)
307
309
  else:
308
310
  # Success case
311
+ job_id = submit_job_response["job_id"]
309
312
  self.active_workers.add_job(
310
- job_id=submit_job_response["job_id"],
313
+ job_id=job_id,
311
314
  airflow_task_key=key,
312
315
  airflow_cmd=cmd,
313
316
  queue=queue,
314
317
  exec_config=exec_config,
315
318
  attempt_number=attempt_number,
316
319
  )
320
+ with contextlib.suppress(AttributeError):
321
+ # TODO: Remove this when min_airflow_version is 2.10.0 or higher in Amazon provider.
322
+ # running_state is added in Airflow 2.10 and only needed to support task adoption
323
+ # (an optional executor feature).
324
+ self.running_state(key, job_id)
317
325
  if failure_reasons:
318
326
  self.log.error(
319
327
  "Pending Batch jobs failed to launch for the following reasons: %s. Retrying later.",
@@ -418,3 +426,39 @@ class AwsBatchExecutor(BaseExecutor):
418
426
  " and value should be NULL or empty."
419
427
  )
420
428
  return submit_kwargs
429
+
430
+ def try_adopt_task_instances(self, tis: Sequence[TaskInstance]) -> Sequence[TaskInstance]:
431
+ """
432
+ Adopt task instances which have an external_executor_id (the Batch job ID).
433
+
434
+ Anything that is not adopted will be cleared by the scheduler and becomes eligible for re-scheduling.
435
+ """
436
+ with Stats.timer("batch_executor.adopt_task_instances.duration"):
437
+ adopted_tis: list[TaskInstance] = []
438
+
439
+ if job_ids := [ti.external_executor_id for ti in tis if ti.external_executor_id]:
440
+ batch_jobs = self._describe_jobs(job_ids)
441
+
442
+ for batch_job in batch_jobs:
443
+ ti = next(ti for ti in tis if ti.external_executor_id == batch_job.job_id)
444
+ self.active_workers.add_job(
445
+ job_id=batch_job.job_id,
446
+ airflow_task_key=ti.key,
447
+ airflow_cmd=ti.command_as_list(),
448
+ queue=ti.queue,
449
+ exec_config=ti.executor_config,
450
+ attempt_number=ti.prev_attempted_tries,
451
+ )
452
+ adopted_tis.append(ti)
453
+
454
+ if adopted_tis:
455
+ tasks = [f"{task} in state {task.state}" for task in adopted_tis]
456
+ task_instance_str = "\n\t".join(tasks)
457
+ self.log.info(
458
+ "Adopted the following %d tasks from a dead executor:\n\t%s",
459
+ len(adopted_tis),
460
+ task_instance_str,
461
+ )
462
+
463
+ not_adopted_tis = [ti for ti in tis if ti not in adopted_tis]
464
+ return not_adopted_tis
@@ -405,6 +405,7 @@ class AwsEcsExecutor(BaseExecutor):
405
405
  except AttributeError:
406
406
  # running_state is newly added, and only needed to support task adoption (an optional
407
407
  # executor feature).
408
+ # TODO: remove when min airflow version >= 2.9.2
408
409
  pass
409
410
  if failure_reasons:
410
411
  self.log.error(
@@ -77,3 +77,23 @@ class BedrockAgentHook(AwsBaseHook):
77
77
  def __init__(self, *args, **kwargs) -> None:
78
78
  kwargs["client_type"] = self.client_type
79
79
  super().__init__(*args, **kwargs)
80
+
81
+
82
+ class BedrockAgentRuntimeHook(AwsBaseHook):
83
+ """
84
+ Interact with the Amazon Agents for Bedrock API.
85
+
86
+ Provide thin wrapper around :external+boto3:py:class:`boto3.client("bedrock-agent-runtime") <AgentsforBedrockRuntime.Client>`.
87
+
88
+ Additional arguments (such as ``aws_conn_id``) may be specified and
89
+ are passed down to the underlying AwsBaseHook.
90
+
91
+ .. seealso::
92
+ - :class:`airflow.providers.amazon.aws.hooks.base_aws.AwsBaseHook`
93
+ """
94
+
95
+ client_type = "bedrock-agent-runtime"
96
+
97
+ def __init__(self, *args, **kwargs) -> None:
98
+ kwargs["client_type"] = self.client_type
99
+ super().__init__(*args, **kwargs)
@@ -0,0 +1,37 @@
1
+ # Licensed to the Apache Software Foundation (ASF) under one
2
+ # or more contributor license agreements. See the NOTICE file
3
+ # distributed with this work for additional information
4
+ # regarding copyright ownership. The ASF licenses this file
5
+ # to you under the Apache License, Version 2.0 (the
6
+ # "License"); you may not use this file except in compliance
7
+ # with the License. You may obtain a copy of the License at
8
+ #
9
+ # http://www.apache.org/licenses/LICENSE-2.0
10
+ #
11
+ # Unless required by applicable law or agreed to in writing,
12
+ # software distributed under the License is distributed on an
13
+ # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14
+ # KIND, either express or implied. See the License for the
15
+ # specific language governing permissions and limitations
16
+ # under the License.
17
+ from __future__ import annotations
18
+
19
+ from airflow.providers.amazon.aws.hooks.base_aws import AwsBaseHook
20
+
21
+
22
+ class ComprehendHook(AwsBaseHook):
23
+ """
24
+ Interact with AWS Comprehend.
25
+
26
+ Provide thin wrapper around :external+boto3:py:class:`boto3.client("comprehend") <Comprehend.Client>`.
27
+
28
+ Additional arguments (such as ``aws_conn_id``) may be specified and
29
+ are passed down to the underlying AwsBaseHook.
30
+
31
+ .. seealso::
32
+ - :class:`airflow.providers.amazon.aws.hooks.base_aws.AwsBaseHook`
33
+ """
34
+
35
+ def __init__(self, *args, **kwargs) -> None:
36
+ kwargs["client_type"] = "comprehend"
37
+ super().__init__(*args, **kwargs)
@@ -34,6 +34,12 @@ class NeptuneHook(AwsBaseHook):
34
34
 
35
35
  AVAILABLE_STATES = ["available"]
36
36
  STOPPED_STATES = ["stopped"]
37
+ ERROR_STATES = [
38
+ "cloning-failed",
39
+ "inaccessible-encryption-credentials",
40
+ "inaccessible-encryption-credentials-recoverable",
41
+ "migration-failed",
42
+ ]
37
43
 
38
44
  def __init__(self, *args, **kwargs):
39
45
  kwargs["client_type"] = "neptune"
@@ -82,4 +88,33 @@ class NeptuneHook(AwsBaseHook):
82
88
  :param cluster_id: The ID of the cluster to get the status of.
83
89
  :return: The status of the cluster.
84
90
  """
85
- return self.get_conn().describe_db_clusters(DBClusterIdentifier=cluster_id)["DBClusters"][0]["Status"]
91
+ return self.conn.describe_db_clusters(DBClusterIdentifier=cluster_id)["DBClusters"][0]["Status"]
92
+
93
+ def get_db_instance_status(self, instance_id: str) -> str:
94
+ """
95
+ Get the status of a Neptune instance.
96
+
97
+ :param instance_id: The ID of the instance to get the status of.
98
+ :return: The status of the instance.
99
+ """
100
+ return self.conn.describe_db_instances(DBInstanceIdentifier=instance_id)["DBInstances"][0][
101
+ "DBInstanceStatus"
102
+ ]
103
+
104
+ def wait_for_cluster_instance_availability(
105
+ self, cluster_id: str, delay: int = 30, max_attempts: int = 60
106
+ ) -> None:
107
+ """
108
+ Wait for Neptune instances in a cluster to be available.
109
+
110
+ :param cluster_id: The cluster ID of the instances to wait for.
111
+ :param delay: Time in seconds to delay between polls.
112
+ :param max_attempts: Maximum number of attempts to poll for completion.
113
+ :return: The status of the instances.
114
+ """
115
+ filters = [{"Name": "db-cluster-id", "Values": [cluster_id]}]
116
+ self.log.info("Waiting for instances in cluster %s.", cluster_id)
117
+ self.get_waiter("db_instance_available").wait(
118
+ Filters=filters, WaiterConfig={"Delay": delay, "MaxAttempts": max_attempts}
119
+ )
120
+ self.log.info("Finished waiting for instances in cluster %s.", cluster_id)
@@ -266,7 +266,7 @@ class AthenaOperator(AwsBaseOperator[AthenaHook]):
266
266
 
267
267
  if self.output_location:
268
268
  parsed = urlparse(self.output_location)
269
- outputs.append(Dataset(namespace=f"{parsed.scheme}://{parsed.netloc}", name=parsed.path))
269
+ outputs.append(Dataset(namespace=f"{parsed.scheme}://{parsed.netloc}", name=parsed.path or "/"))
270
270
 
271
271
  return OperatorLineage(job_facets=job_facets, run_facets=run_facets, inputs=inputs, outputs=outputs)
272
272
 
@@ -206,9 +206,7 @@ class BatchOperator(BaseOperator):
206
206
  self.scheduling_priority_override = scheduling_priority_override
207
207
  self.array_properties = array_properties
208
208
  self.parameters = parameters or {}
209
- self.retry_strategy = retry_strategy or {}
210
- if not self.retry_strategy.get("attempts", None):
211
- self.retry_strategy["attempts"] = 1
209
+ self.retry_strategy = retry_strategy
212
210
  self.waiters = waiters
213
211
  self.tags = tags or {}
214
212
  self.wait_for_completion = wait_for_completion
@@ -20,11 +20,17 @@ import json
20
20
  from time import sleep
21
21
  from typing import TYPE_CHECKING, Any, Sequence
22
22
 
23
+ import botocore
23
24
  from botocore.exceptions import ClientError
24
25
 
25
26
  from airflow.configuration import conf
26
27
  from airflow.exceptions import AirflowException
27
- from airflow.providers.amazon.aws.hooks.bedrock import BedrockAgentHook, BedrockHook, BedrockRuntimeHook
28
+ from airflow.providers.amazon.aws.hooks.bedrock import (
29
+ BedrockAgentHook,
30
+ BedrockAgentRuntimeHook,
31
+ BedrockHook,
32
+ BedrockRuntimeHook,
33
+ )
28
34
  from airflow.providers.amazon.aws.operators.base_aws import AwsBaseOperator
29
35
  from airflow.providers.amazon.aws.triggers.bedrock import (
30
36
  BedrockCustomizeModelCompletedTrigger,
@@ -32,7 +38,7 @@ from airflow.providers.amazon.aws.triggers.bedrock import (
32
38
  BedrockKnowledgeBaseActiveTrigger,
33
39
  BedrockProvisionModelThroughputCompletedTrigger,
34
40
  )
35
- from airflow.providers.amazon.aws.utils import validate_execute_complete_event
41
+ from airflow.providers.amazon.aws.utils import get_botocore_version, validate_execute_complete_event
36
42
  from airflow.providers.amazon.aws.utils.mixins import aws_template_fields
37
43
  from airflow.utils.helpers import prune_dict
38
44
  from airflow.utils.timezone import utcnow
@@ -664,3 +670,213 @@ class BedrockIngestDataOperator(AwsBaseOperator[BedrockAgentHook]):
664
670
  )
665
671
 
666
672
  return ingestion_job_id
673
+
674
+
675
+ class BedrockRaGOperator(AwsBaseOperator[BedrockAgentRuntimeHook]):
676
+ """
677
+ Query a knowledge base and generate responses based on the retrieved results with sources citations.
678
+
679
+ NOTE: Support for EXTERNAL SOURCES was added in botocore 1.34.90
680
+
681
+ .. seealso::
682
+ For more information on how to use this operator, take a look at the guide:
683
+ :ref:`howto/operator:BedrockRaGOperator`
684
+
685
+ :param input: The query to be made to the knowledge base. (templated)
686
+ :param source_type: The type of resource that is queried by the request. (templated)
687
+ Must be one of 'KNOWLEDGE_BASE' or 'EXTERNAL_SOURCES', and the appropriate config values must also be provided.
688
+ If set to 'KNOWLEDGE_BASE' then `knowledge_base_id` must be provided, and `vector_search_config` may be.
689
+ If set to `EXTERNAL_SOURCES` then `sources` must also be provided.
690
+ NOTE: Support for EXTERNAL SOURCES was added in botocore 1.34.90
691
+ :param model_arn: The ARN of the foundation model used to generate a response. (templated)
692
+ :param prompt_template: The template for the prompt that's sent to the model for response generation.
693
+ You can include prompt placeholders, which are replaced before the prompt is sent to the model
694
+ to provide instructions and context to the model. In addition, you can include XML tags to delineate
695
+ meaningful sections of the prompt template. (templated)
696
+ :param knowledge_base_id: The unique identifier of the knowledge base that is queried. (templated)
697
+ Can only be specified if source_type='KNOWLEDGE_BASE'.
698
+ :param vector_search_config: How the results from the vector search should be returned. (templated)
699
+ Can only be specified if source_type='KNOWLEDGE_BASE'.
700
+ For more information, see https://docs.aws.amazon.com/bedrock/latest/userguide/kb-test-config.html.
701
+ :param sources: The documents used as reference for the response. (templated)
702
+ Can only be specified if source_type='EXTERNAL_SOURCES'
703
+ NOTE: Support for EXTERNAL SOURCES was added in botocore 1.34.90
704
+ :param rag_kwargs: Additional keyword arguments to pass to the API call. (templated)
705
+ """
706
+
707
+ aws_hook_class = BedrockAgentRuntimeHook
708
+ template_fields: Sequence[str] = aws_template_fields(
709
+ "input",
710
+ "source_type",
711
+ "model_arn",
712
+ "prompt_template",
713
+ "knowledge_base_id",
714
+ "vector_search_config",
715
+ "sources",
716
+ "rag_kwargs",
717
+ )
718
+
719
+ def __init__(
720
+ self,
721
+ input: str,
722
+ source_type: str,
723
+ model_arn: str,
724
+ prompt_template: str | None = None,
725
+ knowledge_base_id: str | None = None,
726
+ vector_search_config: dict[str, Any] | None = None,
727
+ sources: list[dict[str, Any]] | None = None,
728
+ rag_kwargs: dict[str, Any] | None = None,
729
+ **kwargs,
730
+ ):
731
+ super().__init__(**kwargs)
732
+ self.input = input
733
+ self.prompt_template = prompt_template
734
+ self.source_type = source_type.upper()
735
+ self.knowledge_base_id = knowledge_base_id
736
+ self.model_arn = model_arn
737
+ self.vector_search_config = vector_search_config
738
+ self.sources = sources
739
+ self.rag_kwargs = rag_kwargs or {}
740
+
741
+ def validate_inputs(self):
742
+ if self.source_type == "KNOWLEDGE_BASE":
743
+ if self.knowledge_base_id is None:
744
+ raise AttributeError(
745
+ "If `source_type` is set to 'KNOWLEDGE_BASE' then `knowledge_base_id` must be provided."
746
+ )
747
+ if self.sources is not None:
748
+ raise AttributeError(
749
+ "`sources` can not be used when `source_type` is set to 'KNOWLEDGE_BASE'."
750
+ )
751
+ elif self.source_type == "EXTERNAL_SOURCES":
752
+ if not self.sources is not None:
753
+ raise AttributeError(
754
+ "If `source_type` is set to `EXTERNAL_SOURCES` then `sources` must also be provided."
755
+ )
756
+ if self.vector_search_config or self.knowledge_base_id:
757
+ raise AttributeError(
758
+ "`vector_search_config` and `knowledge_base_id` can not be used "
759
+ "when `source_type` is set to `EXTERNAL_SOURCES`"
760
+ )
761
+ else:
762
+ raise AttributeError(
763
+ "`source_type` must be one of 'KNOWLEDGE_BASE' or 'EXTERNAL_SOURCES', "
764
+ "and the appropriate config values must also be provided."
765
+ )
766
+
767
+ def build_rag_config(self) -> dict[str, Any]:
768
+ result: dict[str, Any] = {}
769
+ base_config: dict[str, Any] = {
770
+ "modelArn": self.model_arn,
771
+ }
772
+
773
+ if self.prompt_template:
774
+ base_config["generationConfiguration"] = {
775
+ "promptTemplate": {"textPromptTemplate": self.prompt_template}
776
+ }
777
+
778
+ if self.source_type == "KNOWLEDGE_BASE":
779
+ if self.vector_search_config:
780
+ base_config["retrievalConfiguration"] = {
781
+ "vectorSearchConfiguration": self.vector_search_config
782
+ }
783
+
784
+ result = {
785
+ "type": self.source_type,
786
+ "knowledgeBaseConfiguration": {
787
+ **base_config,
788
+ "knowledgeBaseId": self.knowledge_base_id,
789
+ },
790
+ }
791
+
792
+ if self.source_type == "EXTERNAL_SOURCES":
793
+ result = {
794
+ "type": self.source_type,
795
+ "externalSourcesConfiguration": {**base_config, "sources": self.sources},
796
+ }
797
+ return result
798
+
799
+ def execute(self, context: Context) -> Any:
800
+ self.validate_inputs()
801
+
802
+ try:
803
+ result = self.hook.conn.retrieve_and_generate(
804
+ input={"text": self.input},
805
+ retrieveAndGenerateConfiguration=self.build_rag_config(),
806
+ **self.rag_kwargs,
807
+ )
808
+ except botocore.exceptions.ParamValidationError as error:
809
+ if (
810
+ 'Unknown parameter in retrieveAndGenerateConfiguration: "externalSourcesConfiguration"'
811
+ in str(error)
812
+ ) and (self.source_type == "EXTERNAL_SOURCES"):
813
+ self.log.error(
814
+ "You are attempting to use External Sources and the BOTO API returned an "
815
+ "error message which may indicate the need to update botocore to do this. \n"
816
+ "Support for External Sources was added in botocore 1.34.90 and you are using botocore %s",
817
+ ".".join(map(str, get_botocore_version())),
818
+ )
819
+ raise
820
+
821
+ self.log.info(
822
+ "\nPrompt: %s\nResponse: %s\nCitations: %s",
823
+ self.input,
824
+ result["output"]["text"],
825
+ result["citations"],
826
+ )
827
+ return result
828
+
829
+
830
+ class BedrockRetrieveOperator(AwsBaseOperator[BedrockAgentRuntimeHook]):
831
+ """
832
+ Query a knowledge base and retrieve results with source citations.
833
+
834
+ .. seealso::
835
+ For more information on how to use this operator, take a look at the guide:
836
+ :ref:`howto/operator:BedrockRetrieveOperator`
837
+
838
+ :param retrieval_query: The query to be made to the knowledge base. (templated)
839
+ :param knowledge_base_id: The unique identifier of the knowledge base that is queried. (templated)
840
+ :param vector_search_config: How the results from the vector search should be returned. (templated)
841
+ For more information, see https://docs.aws.amazon.com/bedrock/latest/userguide/kb-test-config.html.
842
+ :param retrieve_kwargs: Additional keyword arguments to pass to the API call. (templated)
843
+ """
844
+
845
+ aws_hook_class = BedrockAgentRuntimeHook
846
+ template_fields: Sequence[str] = aws_template_fields(
847
+ "retrieval_query",
848
+ "knowledge_base_id",
849
+ "vector_search_config",
850
+ "retrieve_kwargs",
851
+ )
852
+
853
+ def __init__(
854
+ self,
855
+ retrieval_query: str,
856
+ knowledge_base_id: str,
857
+ vector_search_config: dict[str, Any] | None = None,
858
+ retrieve_kwargs: dict[str, Any] | None = None,
859
+ **kwargs,
860
+ ):
861
+ super().__init__(**kwargs)
862
+ self.retrieval_query = retrieval_query
863
+ self.knowledge_base_id = knowledge_base_id
864
+ self.vector_search_config = vector_search_config
865
+ self.retrieve_kwargs = retrieve_kwargs or {}
866
+
867
+ def execute(self, context: Context) -> Any:
868
+ retrieval_configuration = (
869
+ {"retrievalConfiguration": {"vectorSearchConfiguration": self.vector_search_config}}
870
+ if self.vector_search_config
871
+ else {}
872
+ )
873
+
874
+ result = self.hook.conn.retrieve(
875
+ retrievalQuery={"text": self.retrieval_query},
876
+ knowledgeBaseId=self.knowledge_base_id,
877
+ **retrieval_configuration,
878
+ **self.retrieve_kwargs,
879
+ )
880
+
881
+ self.log.info("\nQuery: %s\nRetrieved: %s", self.retrieval_query, result["retrievalResults"])
882
+ return result
@@ -0,0 +1,192 @@
1
+ # Licensed to the Apache Software Foundation (ASF) under one
2
+ # or more contributor license agreements. See the NOTICE file
3
+ # distributed with this work for additional information
4
+ # regarding copyright ownership. The ASF licenses this file
5
+ # to you under the Apache License, Version 2.0 (the
6
+ # "License"); you may not use this file except in compliance
7
+ # with the License. You may obtain a copy of the License at
8
+ #
9
+ # http://www.apache.org/licenses/LICENSE-2.0
10
+ #
11
+ # Unless required by applicable law or agreed to in writing,
12
+ # software distributed under the License is distributed on an
13
+ # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14
+ # KIND, either express or implied. See the License for the
15
+ # specific language governing permissions and limitations
16
+ # under the License.
17
+ from __future__ import annotations
18
+
19
+ from functools import cached_property
20
+ from typing import TYPE_CHECKING, Any, Sequence
21
+
22
+ from airflow.configuration import conf
23
+ from airflow.exceptions import AirflowException
24
+ from airflow.providers.amazon.aws.hooks.comprehend import ComprehendHook
25
+ from airflow.providers.amazon.aws.operators.base_aws import AwsBaseOperator
26
+ from airflow.providers.amazon.aws.triggers.comprehend import ComprehendPiiEntitiesDetectionJobCompletedTrigger
27
+ from airflow.providers.amazon.aws.utils import validate_execute_complete_event
28
+ from airflow.providers.amazon.aws.utils.mixins import aws_template_fields
29
+ from airflow.utils.timezone import utcnow
30
+
31
+ if TYPE_CHECKING:
32
+ import boto3
33
+
34
+ from airflow.utils.context import Context
35
+
36
+
37
+ class ComprehendBaseOperator(AwsBaseOperator[ComprehendHook]):
38
+ """
39
+ This is the base operator for Comprehend Service operators (not supposed to be used directly in DAGs).
40
+
41
+ :param input_data_config: The input properties for a PII entities detection job. (templated)
42
+ :param output_data_config: Provides `configuration` parameters for the output of PII entity detection
43
+ jobs. (templated)
44
+ :param data_access_role_arn: The Amazon Resource Name (ARN) of the IAM role that grants Amazon Comprehend
45
+ read access to your input data. (templated)
46
+ :param language_code: The language of the input documents. (templated)
47
+ """
48
+
49
+ aws_hook_class = ComprehendHook
50
+
51
+ template_fields: Sequence[str] = aws_template_fields(
52
+ "input_data_config", "output_data_config", "data_access_role_arn", "language_code"
53
+ )
54
+
55
+ template_fields_renderers: dict = {"input_data_config": "json", "output_data_config": "json"}
56
+
57
+ def __init__(
58
+ self,
59
+ input_data_config: dict,
60
+ output_data_config: dict,
61
+ data_access_role_arn: str,
62
+ language_code: str,
63
+ **kwargs,
64
+ ):
65
+ super().__init__(**kwargs)
66
+ self.input_data_config = input_data_config
67
+ self.output_data_config = output_data_config
68
+ self.data_access_role_arn = data_access_role_arn
69
+ self.language_code = language_code
70
+
71
+ @cached_property
72
+ def client(self) -> boto3.client:
73
+ """Create and return the Comprehend client."""
74
+ return self.hook.conn
75
+
76
+ def execute(self, context: Context):
77
+ """Must overwrite in child classes."""
78
+ raise NotImplementedError("Please implement execute() in subclass")
79
+
80
+
81
+ class ComprehendStartPiiEntitiesDetectionJobOperator(ComprehendBaseOperator):
82
+ """
83
+ Create a comprehend pii entities detection job for a collection of documents.
84
+
85
+ .. seealso::
86
+ For more information on how to use this operator, take a look at the guide:
87
+ :ref:`howto/operator:ComprehendStartPiiEntitiesDetectionJobOperator`
88
+
89
+ :param input_data_config: The input properties for a PII entities detection job. (templated)
90
+ :param output_data_config: Provides `configuration` parameters for the output of PII entity detection
91
+ jobs. (templated)
92
+ :param mode: Specifies whether the output provides the locations (offsets) of PII entities or a file in
93
+ which PII entities are redacted. If you set the mode parameter to ONLY_REDACTION. In that case you
94
+ must provide a RedactionConfig in start_pii_entities_kwargs.
95
+ :param data_access_role_arn: The Amazon Resource Name (ARN) of the IAM role that grants Amazon Comprehend
96
+ read access to your input data. (templated)
97
+ :param language_code: The language of the input documents. (templated)
98
+ :param start_pii_entities_kwargs: Any optional parameters to pass to the job. If JobName is not provided
99
+ in start_pii_entities_kwargs, operator will create.
100
+
101
+ :param wait_for_completion: Whether to wait for job to stop. (default: True)
102
+ :param waiter_delay: Time in seconds to wait between status checks. (default: 60)
103
+ :param waiter_max_attempts: Maximum number of attempts to check for job completion. (default: 20)
104
+ :param deferrable: If True, the operator will wait asynchronously for the job to stop.
105
+ This implies waiting for completion. This mode requires aiobotocore module to be installed.
106
+ (default: False)
107
+ :param aws_conn_id: The Airflow connection used for AWS credentials.
108
+ If this is ``None`` or empty then the default boto3 behaviour is used. If
109
+ running Airflow in a distributed manner and aws_conn_id is None or
110
+ empty, then default boto3 configuration would be used (and must be
111
+ maintained on each worker node).
112
+ :param region_name: AWS region_name. If not specified then the default boto3 behaviour is used.
113
+ :param verify: Whether to verify SSL certificates. See:
114
+ https://boto3.amazonaws.com/v1/documentation/api/latest/reference/core/session.html
115
+ :param botocore_config: Configuration dictionary (key-values) for botocore client. See:
116
+ https://botocore.amazonaws.com/v1/documentation/api/latest/reference/config.html
117
+ """
118
+
119
+ def __init__(
120
+ self,
121
+ input_data_config: dict,
122
+ output_data_config: dict,
123
+ mode: str,
124
+ data_access_role_arn: str,
125
+ language_code: str,
126
+ start_pii_entities_kwargs: dict[str, Any] | None = None,
127
+ wait_for_completion: bool = True,
128
+ waiter_delay: int = 60,
129
+ waiter_max_attempts: int = 20,
130
+ deferrable: bool = conf.getboolean("operators", "default_deferrable", fallback=False),
131
+ **kwargs,
132
+ ):
133
+ super().__init__(
134
+ input_data_config=input_data_config,
135
+ output_data_config=output_data_config,
136
+ data_access_role_arn=data_access_role_arn,
137
+ language_code=language_code,
138
+ **kwargs,
139
+ )
140
+ self.mode = mode
141
+ self.start_pii_entities_kwargs = start_pii_entities_kwargs or {}
142
+ self.wait_for_completion = wait_for_completion
143
+ self.waiter_delay = waiter_delay
144
+ self.waiter_max_attempts = waiter_max_attempts
145
+ self.deferrable = deferrable
146
+
147
+ def execute(self, context: Context) -> str:
148
+ if self.start_pii_entities_kwargs.get("JobName", None) is None:
149
+ self.start_pii_entities_kwargs["JobName"] = (
150
+ f"start_pii_entities_detection_job-{int(utcnow().timestamp())}"
151
+ )
152
+
153
+ self.log.info(
154
+ "Submitting start pii entities detection job '%s'.", self.start_pii_entities_kwargs["JobName"]
155
+ )
156
+ job_id = self.client.start_pii_entities_detection_job(
157
+ InputDataConfig=self.input_data_config,
158
+ OutputDataConfig=self.output_data_config,
159
+ Mode=self.mode,
160
+ DataAccessRoleArn=self.data_access_role_arn,
161
+ LanguageCode=self.language_code,
162
+ **self.start_pii_entities_kwargs,
163
+ )["JobId"]
164
+
165
+ message_description = f"start pii entities detection job {job_id} to complete."
166
+ if self.deferrable:
167
+ self.log.info("Deferring %s", message_description)
168
+ self.defer(
169
+ trigger=ComprehendPiiEntitiesDetectionJobCompletedTrigger(
170
+ job_id=job_id,
171
+ waiter_delay=self.waiter_delay,
172
+ waiter_max_attempts=self.waiter_max_attempts,
173
+ aws_conn_id=self.aws_conn_id,
174
+ ),
175
+ method_name="execute_complete",
176
+ )
177
+ elif self.wait_for_completion:
178
+ self.log.info("Waiting for %s", message_description)
179
+ self.hook.get_waiter("pii_entities_detection_job_complete").wait(
180
+ JobId=job_id,
181
+ WaiterConfig={"Delay": self.waiter_delay, "MaxAttempts": self.waiter_max_attempts},
182
+ )
183
+
184
+ return job_id
185
+
186
+ def execute_complete(self, context: Context, event: dict[str, Any] | None = None) -> str:
187
+ event = validate_execute_complete_event(event)
188
+ if event["status"] != "success":
189
+ raise AirflowException("Error while running job: %s", event)
190
+
191
+ self.log.info("Comprehend pii entities detection job `%s` complete.", event["job_id"])
192
+ return event["job_id"]