apache-airflow-providers-amazon 9.18.1rc1__py3-none-any.whl → 9.19.0rc1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (92) hide show
  1. airflow/providers/amazon/__init__.py +1 -1
  2. airflow/providers/amazon/aws/hooks/sagemaker_unified_studio.py +1 -1
  3. airflow/providers/amazon/aws/links/base_aws.py +1 -1
  4. airflow/providers/amazon/aws/notifications/chime.py +1 -1
  5. airflow/providers/amazon/aws/operators/appflow.py +1 -1
  6. airflow/providers/amazon/aws/operators/athena.py +1 -1
  7. airflow/providers/amazon/aws/operators/batch.py +1 -1
  8. airflow/providers/amazon/aws/operators/bedrock.py +3 -4
  9. airflow/providers/amazon/aws/operators/cloud_formation.py +1 -1
  10. airflow/providers/amazon/aws/operators/comprehend.py +3 -4
  11. airflow/providers/amazon/aws/operators/datasync.py +1 -1
  12. airflow/providers/amazon/aws/operators/dms.py +2 -6
  13. airflow/providers/amazon/aws/operators/ec2.py +1 -1
  14. airflow/providers/amazon/aws/operators/ecs.py +1 -1
  15. airflow/providers/amazon/aws/operators/eks.py +1 -1
  16. airflow/providers/amazon/aws/operators/emr.py +1 -1
  17. airflow/providers/amazon/aws/operators/eventbridge.py +1 -1
  18. airflow/providers/amazon/aws/operators/glacier.py +1 -1
  19. airflow/providers/amazon/aws/operators/glue.py +1 -1
  20. airflow/providers/amazon/aws/operators/glue_crawler.py +2 -3
  21. airflow/providers/amazon/aws/operators/glue_databrew.py +1 -1
  22. airflow/providers/amazon/aws/operators/kinesis_analytics.py +1 -1
  23. airflow/providers/amazon/aws/operators/lambda_function.py +1 -1
  24. airflow/providers/amazon/aws/operators/mwaa.py +1 -1
  25. airflow/providers/amazon/aws/operators/neptune.py +1 -1
  26. airflow/providers/amazon/aws/operators/quicksight.py +1 -1
  27. airflow/providers/amazon/aws/operators/rds.py +1 -1
  28. airflow/providers/amazon/aws/operators/redshift_cluster.py +1 -1
  29. airflow/providers/amazon/aws/operators/redshift_data.py +1 -1
  30. airflow/providers/amazon/aws/operators/s3.py +1 -1
  31. airflow/providers/amazon/aws/operators/sagemaker.py +1 -1
  32. airflow/providers/amazon/aws/operators/sagemaker_unified_studio.py +1 -1
  33. airflow/providers/amazon/aws/operators/sns.py +1 -1
  34. airflow/providers/amazon/aws/operators/sqs.py +1 -1
  35. airflow/providers/amazon/aws/operators/ssm.py +1 -1
  36. airflow/providers/amazon/aws/operators/step_function.py +1 -1
  37. airflow/providers/amazon/aws/secrets/secrets_manager.py +2 -1
  38. airflow/providers/amazon/aws/secrets/systems_manager.py +2 -1
  39. airflow/providers/amazon/aws/sensors/athena.py +3 -4
  40. airflow/providers/amazon/aws/sensors/batch.py +1 -1
  41. airflow/providers/amazon/aws/sensors/bedrock.py +1 -1
  42. airflow/providers/amazon/aws/sensors/cloud_formation.py +2 -3
  43. airflow/providers/amazon/aws/sensors/comprehend.py +1 -1
  44. airflow/providers/amazon/aws/sensors/dms.py +1 -1
  45. airflow/providers/amazon/aws/sensors/dynamodb.py +1 -1
  46. airflow/providers/amazon/aws/sensors/ec2.py +1 -1
  47. airflow/providers/amazon/aws/sensors/ecs.py +1 -1
  48. airflow/providers/amazon/aws/sensors/eks.py +1 -1
  49. airflow/providers/amazon/aws/sensors/emr.py +1 -1
  50. airflow/providers/amazon/aws/sensors/glacier.py +1 -1
  51. airflow/providers/amazon/aws/sensors/glue.py +1 -1
  52. airflow/providers/amazon/aws/sensors/glue_catalog_partition.py +1 -1
  53. airflow/providers/amazon/aws/sensors/glue_crawler.py +1 -1
  54. airflow/providers/amazon/aws/sensors/kinesis_analytics.py +1 -1
  55. airflow/providers/amazon/aws/sensors/lambda_function.py +1 -1
  56. airflow/providers/amazon/aws/sensors/mwaa.py +1 -1
  57. airflow/providers/amazon/aws/sensors/opensearch_serverless.py +1 -1
  58. airflow/providers/amazon/aws/sensors/quicksight.py +1 -1
  59. airflow/providers/amazon/aws/sensors/rds.py +1 -1
  60. airflow/providers/amazon/aws/sensors/redshift_cluster.py +1 -1
  61. airflow/providers/amazon/aws/sensors/s3.py +4 -5
  62. airflow/providers/amazon/aws/sensors/sagemaker.py +1 -1
  63. airflow/providers/amazon/aws/sensors/sagemaker_unified_studio.py +1 -1
  64. airflow/providers/amazon/aws/sensors/sqs.py +1 -1
  65. airflow/providers/amazon/aws/sensors/ssm.py +1 -1
  66. airflow/providers/amazon/aws/sensors/step_function.py +1 -1
  67. airflow/providers/amazon/aws/transfers/azure_blob_to_s3.py +1 -1
  68. airflow/providers/amazon/aws/transfers/exasol_to_s3.py +1 -1
  69. airflow/providers/amazon/aws/transfers/ftp_to_s3.py +1 -1
  70. airflow/providers/amazon/aws/transfers/gcs_to_s3.py +1 -1
  71. airflow/providers/amazon/aws/transfers/glacier_to_gcs.py +1 -1
  72. airflow/providers/amazon/aws/transfers/google_api_to_s3.py +1 -1
  73. airflow/providers/amazon/aws/transfers/hive_to_dynamodb.py +1 -1
  74. airflow/providers/amazon/aws/transfers/http_to_s3.py +1 -1
  75. airflow/providers/amazon/aws/transfers/imap_attachment_to_s3.py +1 -1
  76. airflow/providers/amazon/aws/transfers/local_to_s3.py +1 -1
  77. airflow/providers/amazon/aws/transfers/mongo_to_s3.py +1 -1
  78. airflow/providers/amazon/aws/transfers/redshift_to_s3.py +1 -1
  79. airflow/providers/amazon/aws/transfers/s3_to_dynamodb.py +1 -1
  80. airflow/providers/amazon/aws/transfers/s3_to_ftp.py +1 -1
  81. airflow/providers/amazon/aws/transfers/s3_to_redshift.py +1 -1
  82. airflow/providers/amazon/aws/transfers/s3_to_sftp.py +1 -1
  83. airflow/providers/amazon/aws/transfers/s3_to_sql.py +1 -1
  84. airflow/providers/amazon/aws/transfers/salesforce_to_s3.py +1 -1
  85. airflow/providers/amazon/aws/transfers/sftp_to_s3.py +1 -1
  86. airflow/providers/amazon/aws/transfers/sql_to_s3.py +1 -1
  87. {apache_airflow_providers_amazon-9.18.1rc1.dist-info → apache_airflow_providers_amazon-9.19.0rc1.dist-info}/METADATA +6 -6
  88. {apache_airflow_providers_amazon-9.18.1rc1.dist-info → apache_airflow_providers_amazon-9.19.0rc1.dist-info}/RECORD +92 -92
  89. {apache_airflow_providers_amazon-9.18.1rc1.dist-info → apache_airflow_providers_amazon-9.19.0rc1.dist-info}/WHEEL +0 -0
  90. {apache_airflow_providers_amazon-9.18.1rc1.dist-info → apache_airflow_providers_amazon-9.19.0rc1.dist-info}/entry_points.txt +0 -0
  91. {apache_airflow_providers_amazon-9.18.1rc1.dist-info → apache_airflow_providers_amazon-9.19.0rc1.dist-info}/licenses/LICENSE +0 -0
  92. {apache_airflow_providers_amazon-9.18.1rc1.dist-info → apache_airflow_providers_amazon-9.19.0rc1.dist-info}/licenses/NOTICE +0 -0
@@ -29,7 +29,7 @@ from airflow import __version__ as airflow_version
29
29
 
30
30
  __all__ = ["__version__"]
31
31
 
32
- __version__ = "9.18.1"
32
+ __version__ = "9.19.0"
33
33
 
34
34
  if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse(
35
35
  "2.11.0"
@@ -123,7 +123,7 @@ class SageMakerNotebookHook(BaseHook):
123
123
  if self.compute:
124
124
  start_execution_params["compute"] = self.compute
125
125
  else:
126
- start_execution_params["compute"] = {"instance_type": "ml.m4.xlarge"}
126
+ start_execution_params["compute"] = {"instance_type": "ml.m6i.xlarge"}
127
127
 
128
128
  print(start_execution_params)
129
129
  return self._sagemaker_studio.execution_client.start_execution(**start_execution_params)
@@ -25,7 +25,7 @@ from airflow.providers.common.compat.sdk import BaseOperatorLink, XCom
25
25
  if TYPE_CHECKING:
26
26
  from airflow.models import BaseOperator
27
27
  from airflow.models.taskinstancekey import TaskInstanceKey
28
- from airflow.utils.context import Context
28
+ from airflow.sdk import Context
29
29
 
30
30
 
31
31
  BASE_AWS_CONSOLE_LINK = "https://console.{aws_domain}"
@@ -24,7 +24,7 @@ from airflow.providers.amazon.aws.hooks.chime import ChimeWebhookHook
24
24
  from airflow.providers.common.compat.notifier import BaseNotifier
25
25
 
26
26
  if TYPE_CHECKING:
27
- from airflow.utils.context import Context
27
+ from airflow.sdk import Context
28
28
 
29
29
 
30
30
  class ChimeNotifier(BaseNotifier):
@@ -34,7 +34,7 @@ if TYPE_CHECKING:
34
34
  TaskTypeDef,
35
35
  )
36
36
 
37
- from airflow.utils.context import Context
37
+ from airflow.sdk import Context
38
38
 
39
39
  SUPPORTED_SOURCES = {"salesforce", "zendesk"}
40
40
  MANDATORY_FILTER_DATE_MSG = "The filter_date argument is mandatory for {entity}!"
@@ -33,7 +33,7 @@ from airflow.providers.common.compat.sdk import AirflowException
33
33
  if TYPE_CHECKING:
34
34
  from airflow.providers.common.compat.openlineage.facet import BaseFacet, Dataset, DatasetFacet
35
35
  from airflow.providers.openlineage.extractors.base import OperatorLineage
36
- from airflow.utils.context import Context
36
+ from airflow.sdk import Context
37
37
 
38
38
 
39
39
  class AthenaOperator(AwsBaseOperator[AthenaHook]):
@@ -49,7 +49,7 @@ from airflow.providers.amazon.aws.utils.task_log_fetcher import AwsTaskLogFetche
49
49
  from airflow.providers.common.compat.sdk import AirflowException
50
50
 
51
51
  if TYPE_CHECKING:
52
- from airflow.utils.context import Context
52
+ from airflow.sdk import Context
53
53
 
54
54
 
55
55
  class BatchOperator(AwsBaseOperator[BatchClientHook]):
@@ -40,12 +40,11 @@ from airflow.providers.amazon.aws.triggers.bedrock import (
40
40
  )
41
41
  from airflow.providers.amazon.aws.utils import validate_execute_complete_event
42
42
  from airflow.providers.amazon.aws.utils.mixins import aws_template_fields
43
- from airflow.providers.common.compat.sdk import AirflowException
43
+ from airflow.providers.common.compat.sdk import AirflowException, timezone
44
44
  from airflow.utils.helpers import prune_dict
45
- from airflow.utils.timezone import utcnow
46
45
 
47
46
  if TYPE_CHECKING:
48
- from airflow.utils.context import Context
47
+ from airflow.sdk import Context
49
48
 
50
49
 
51
50
  class BedrockInvokeModelOperator(AwsBaseOperator[BedrockRuntimeHook]):
@@ -237,7 +236,7 @@ class BedrockCustomizeModelOperator(AwsBaseOperator[BedrockHook]):
237
236
  if not self.ensure_unique_job_name:
238
237
  raise error
239
238
  retry = True
240
- self.job_name = f"{self.job_name}-{int(utcnow().timestamp())}"
239
+ self.job_name = f"{self.job_name}-{int(timezone.utcnow().timestamp())}"
241
240
  self.log.info("Changed job name to '%s' to avoid collision.", self.job_name)
242
241
 
243
242
  if response["ResponseMetadata"]["HTTPStatusCode"] != 201:
@@ -27,7 +27,7 @@ from airflow.providers.amazon.aws.operators.base_aws import AwsBaseOperator
27
27
  from airflow.providers.amazon.aws.utils.mixins import aws_template_fields
28
28
 
29
29
  if TYPE_CHECKING:
30
- from airflow.utils.context import Context
30
+ from airflow.sdk import Context
31
31
 
32
32
 
33
33
  class CloudFormationCreateStackOperator(AwsBaseOperator[CloudFormationHook]):
@@ -33,13 +33,12 @@ from airflow.providers.amazon.aws.triggers.comprehend import (
33
33
  )
34
34
  from airflow.providers.amazon.aws.utils import validate_execute_complete_event
35
35
  from airflow.providers.amazon.aws.utils.mixins import aws_template_fields
36
- from airflow.providers.common.compat.sdk import AirflowException
37
- from airflow.utils.timezone import utcnow
36
+ from airflow.providers.common.compat.sdk import AirflowException, timezone
38
37
 
39
38
  if TYPE_CHECKING:
40
39
  import boto3
41
40
 
42
- from airflow.utils.context import Context
41
+ from airflow.sdk import Context
43
42
 
44
43
 
45
44
  class ComprehendBaseOperator(AwsBaseOperator[ComprehendHook]):
@@ -157,7 +156,7 @@ class ComprehendStartPiiEntitiesDetectionJobOperator(ComprehendBaseOperator):
157
156
  def execute(self, context: Context) -> str:
158
157
  if self.start_pii_entities_kwargs.get("JobName", None) is None:
159
158
  self.start_pii_entities_kwargs["JobName"] = (
160
- f"start_pii_entities_detection_job-{int(utcnow().timestamp())}"
159
+ f"start_pii_entities_detection_job-{int(timezone.utcnow().timestamp())}"
161
160
  )
162
161
 
163
162
  self.log.info(
@@ -30,7 +30,7 @@ from airflow.providers.amazon.aws.utils.mixins import aws_template_fields
30
30
  from airflow.providers.common.compat.sdk import AirflowException, AirflowTaskTimeout
31
31
 
32
32
  if TYPE_CHECKING:
33
- from airflow.utils.context import Context
33
+ from airflow.sdk import Context
34
34
 
35
35
 
36
36
  class DataSyncOperator(AwsBaseOperator[DataSyncHook]):
@@ -19,7 +19,7 @@ from __future__ import annotations
19
19
 
20
20
  from collections.abc import Sequence
21
21
  from datetime import datetime
22
- from typing import TYPE_CHECKING, Any, ClassVar
22
+ from typing import Any, ClassVar
23
23
 
24
24
  from airflow.configuration import conf
25
25
  from airflow.providers.amazon.aws.hooks.dms import DmsHook
@@ -32,11 +32,7 @@ from airflow.providers.amazon.aws.triggers.dms import (
32
32
  DmsReplicationTerminalStatusTrigger,
33
33
  )
34
34
  from airflow.providers.amazon.aws.utils.mixins import aws_template_fields
35
- from airflow.providers.common.compat.sdk import AirflowException
36
- from airflow.utils.context import Context
37
-
38
- if TYPE_CHECKING:
39
- from airflow.utils.context import Context
35
+ from airflow.providers.common.compat.sdk import AirflowException, Context
40
36
 
41
37
 
42
38
  class DmsCreateTaskOperator(AwsBaseOperator[DmsHook]):
@@ -30,7 +30,7 @@ from airflow.providers.amazon.aws.utils.mixins import aws_template_fields
30
30
  from airflow.providers.common.compat.sdk import AirflowException
31
31
 
32
32
  if TYPE_CHECKING:
33
- from airflow.utils.context import Context
33
+ from airflow.sdk import Context
34
34
 
35
35
 
36
36
  class EC2StartInstanceOperator(AwsBaseOperator[EC2Hook]):
@@ -45,7 +45,7 @@ if TYPE_CHECKING:
45
45
  import boto3
46
46
 
47
47
  from airflow.models import TaskInstance
48
- from airflow.utils.context import Context
48
+ from airflow.sdk import Context
49
49
 
50
50
 
51
51
  class EcsBaseOperator(AwsBaseOperator[EcsHook]):
@@ -54,7 +54,7 @@ except ImportError:
54
54
  )
55
55
 
56
56
  if TYPE_CHECKING:
57
- from airflow.utils.context import Context
57
+ from airflow.sdk import Context
58
58
 
59
59
 
60
60
  CHECK_INTERVAL_SECONDS = 15
@@ -62,7 +62,7 @@ from airflow.providers.common.compat.sdk import AirflowException
62
62
  from airflow.utils.helpers import exactly_one, prune_dict
63
63
 
64
64
  if TYPE_CHECKING:
65
- from airflow.utils.context import Context
65
+ from airflow.sdk import Context
66
66
 
67
67
 
68
68
  class EmrAddStepsOperator(AwsBaseOperator[EmrHook]):
@@ -26,7 +26,7 @@ from airflow.providers.common.compat.sdk import AirflowException
26
26
  from airflow.utils.helpers import prune_dict
27
27
 
28
28
  if TYPE_CHECKING:
29
- from airflow.utils.context import Context
29
+ from airflow.sdk import Context
30
30
 
31
31
 
32
32
  class EventBridgePutEventsOperator(AwsBaseOperator[EventBridgeHook]):
@@ -25,7 +25,7 @@ from airflow.providers.amazon.aws.operators.base_aws import AwsBaseOperator
25
25
  from airflow.providers.amazon.aws.utils.mixins import aws_template_fields
26
26
 
27
27
  if TYPE_CHECKING:
28
- from airflow.utils.context import Context
28
+ from airflow.sdk import Context
29
29
 
30
30
 
31
31
  class GlacierCreateJobOperator(AwsBaseOperator[GlacierHook]):
@@ -39,7 +39,7 @@ from airflow.providers.amazon.aws.utils.mixins import aws_template_fields
39
39
  from airflow.providers.common.compat.sdk import AirflowException
40
40
 
41
41
  if TYPE_CHECKING:
42
- from airflow.utils.context import Context
42
+ from airflow.sdk import Context
43
43
 
44
44
 
45
45
  class GlueJobOperator(AwsBaseOperator[GlueJobHook]):
@@ -21,6 +21,7 @@ from collections.abc import Sequence
21
21
  from typing import TYPE_CHECKING, Any
22
22
 
23
23
  from airflow.configuration import conf
24
+ from airflow.providers.amazon.aws.hooks.glue_crawler import GlueCrawlerHook
24
25
  from airflow.providers.amazon.aws.operators.base_aws import AwsBaseOperator
25
26
  from airflow.providers.amazon.aws.triggers.glue_crawler import GlueCrawlerCompleteTrigger
26
27
  from airflow.providers.amazon.aws.utils import validate_execute_complete_event
@@ -28,9 +29,7 @@ from airflow.providers.amazon.aws.utils.mixins import aws_template_fields
28
29
  from airflow.providers.common.compat.sdk import AirflowException
29
30
 
30
31
  if TYPE_CHECKING:
31
- from airflow.utils.context import Context
32
-
33
- from airflow.providers.amazon.aws.hooks.glue_crawler import GlueCrawlerHook
32
+ from airflow.sdk import Context
34
33
 
35
34
 
36
35
  class GlueCrawlerOperator(AwsBaseOperator[GlueCrawlerHook]):
@@ -29,7 +29,7 @@ from airflow.providers.amazon.aws.utils.mixins import aws_template_fields
29
29
  from airflow.providers.common.compat.sdk import AirflowException
30
30
 
31
31
  if TYPE_CHECKING:
32
- from airflow.utils.context import Context
32
+ from airflow.sdk import Context
33
33
 
34
34
 
35
35
  class GlueDataBrewStartJobOperator(AwsBaseOperator[GlueDataBrewHook]):
@@ -32,7 +32,7 @@ from airflow.providers.amazon.aws.utils.mixins import aws_template_fields
32
32
  from airflow.providers.common.compat.sdk import AirflowException
33
33
 
34
34
  if TYPE_CHECKING:
35
- from airflow.utils.context import Context
35
+ from airflow.sdk import Context
36
36
 
37
37
 
38
38
  class KinesisAnalyticsV2CreateApplicationOperator(AwsBaseOperator[KinesisAnalyticsV2Hook]):
@@ -31,7 +31,7 @@ from airflow.providers.amazon.aws.utils.mixins import aws_template_fields
31
31
  from airflow.providers.common.compat.sdk import AirflowException
32
32
 
33
33
  if TYPE_CHECKING:
34
- from airflow.utils.context import Context
34
+ from airflow.sdk import Context
35
35
 
36
36
 
37
37
  class LambdaCreateFunctionOperator(AwsBaseOperator[LambdaHook]):
@@ -30,7 +30,7 @@ from airflow.providers.amazon.aws.utils.mixins import aws_template_fields
30
30
  from airflow.providers.common.compat.sdk import AirflowException
31
31
 
32
32
  if TYPE_CHECKING:
33
- from airflow.utils.context import Context
33
+ from airflow.sdk import Context
34
34
 
35
35
 
36
36
  class MwaaTriggerDagRunOperator(AwsBaseOperator[MwaaHook]):
@@ -34,7 +34,7 @@ from airflow.providers.amazon.aws.utils.mixins import aws_template_fields
34
34
  from airflow.providers.common.compat.sdk import AirflowException
35
35
 
36
36
  if TYPE_CHECKING:
37
- from airflow.utils.context import Context
37
+ from airflow.sdk import Context
38
38
 
39
39
 
40
40
  def handle_waitable_exception(
@@ -24,7 +24,7 @@ from airflow.providers.amazon.aws.operators.base_aws import AwsBaseOperator
24
24
  from airflow.providers.amazon.aws.utils.mixins import aws_template_fields
25
25
 
26
26
  if TYPE_CHECKING:
27
- from airflow.utils.context import Context
27
+ from airflow.sdk import Context
28
28
 
29
29
 
30
30
  class QuickSightCreateIngestionOperator(AwsBaseOperator[QuickSightHook]):
@@ -41,7 +41,7 @@ from airflow.utils.helpers import prune_dict
41
41
  if TYPE_CHECKING:
42
42
  from mypy_boto3_rds.type_defs import TagTypeDef
43
43
 
44
- from airflow.utils.context import Context
44
+ from airflow.sdk import Context
45
45
 
46
46
 
47
47
  class RdsBaseOperator(AwsBaseOperator[RdsHook]):
@@ -37,7 +37,7 @@ from airflow.providers.common.compat.sdk import AirflowException
37
37
  from airflow.utils.helpers import prune_dict
38
38
 
39
39
  if TYPE_CHECKING:
40
- from airflow.utils.context import Context
40
+ from airflow.sdk import Context
41
41
 
42
42
 
43
43
  class RedshiftCreateClusterOperator(AwsBaseOperator[RedshiftHook]):
@@ -33,7 +33,7 @@ if TYPE_CHECKING:
33
33
  GetStatementResultResponseTypeDef,
34
34
  )
35
35
 
36
- from airflow.utils.context import Context
36
+ from airflow.sdk import Context
37
37
 
38
38
 
39
39
  class RedshiftDataOperator(AwsBaseOperator[RedshiftDataHook]):
@@ -37,7 +37,7 @@ from airflow.utils.helpers import exactly_one
37
37
  if TYPE_CHECKING:
38
38
  from datetime import datetime
39
39
 
40
- from airflow.utils.context import Context
40
+ from airflow.sdk import Context
41
41
 
42
42
  BUCKET_DOES_NOT_EXIST_MSG = "Bucket with name: %s doesn't exist"
43
43
 
@@ -48,7 +48,7 @@ from airflow.utils.helpers import prune_dict
48
48
  if TYPE_CHECKING:
49
49
  from airflow.providers.common.compat.openlineage.facet import Dataset
50
50
  from airflow.providers.openlineage.extractors.base import OperatorLineage
51
- from airflow.utils.context import Context
51
+ from airflow.sdk import Context
52
52
 
53
53
  # DEFAULT_CONN_ID: str = "aws_default"
54
54
  CHECK_INTERVAL_SECOND: int = 30
@@ -35,7 +35,7 @@ from airflow.providers.amazon.aws.triggers.sagemaker_unified_studio import (
35
35
  from airflow.providers.common.compat.sdk import AirflowException, BaseOperator
36
36
 
37
37
  if TYPE_CHECKING:
38
- from airflow.utils.context import Context
38
+ from airflow.sdk import Context
39
39
 
40
40
 
41
41
  class SageMakerNotebookOperator(BaseOperator):
@@ -27,7 +27,7 @@ from airflow.providers.amazon.aws.operators.base_aws import AwsBaseOperator
27
27
  from airflow.providers.amazon.aws.utils.mixins import aws_template_fields
28
28
 
29
29
  if TYPE_CHECKING:
30
- from airflow.utils.context import Context
30
+ from airflow.sdk import Context
31
31
 
32
32
 
33
33
  class SnsPublishOperator(AwsBaseOperator[SnsHook]):
@@ -26,7 +26,7 @@ from airflow.providers.amazon.aws.operators.base_aws import AwsBaseOperator
26
26
  from airflow.providers.amazon.aws.utils.mixins import aws_template_fields
27
27
 
28
28
  if TYPE_CHECKING:
29
- from airflow.utils.context import Context
29
+ from airflow.sdk import Context
30
30
 
31
31
 
32
32
  class SqsPublishOperator(AwsBaseOperator[SqsHook]):
@@ -27,7 +27,7 @@ from airflow.providers.amazon.aws.utils import validate_execute_complete_event
27
27
  from airflow.providers.amazon.aws.utils.mixins import aws_template_fields
28
28
 
29
29
  if TYPE_CHECKING:
30
- from airflow.utils.context import Context
30
+ from airflow.sdk import Context
31
31
 
32
32
 
33
33
  class SsmRunCommandOperator(AwsBaseOperator[SsmHook]):
@@ -34,7 +34,7 @@ from airflow.providers.amazon.aws.utils.mixins import aws_template_fields
34
34
  from airflow.providers.common.compat.sdk import AirflowException
35
35
 
36
36
  if TYPE_CHECKING:
37
- from airflow.utils.context import Context
37
+ from airflow.sdk import Context
38
38
 
39
39
 
40
40
  class StepFunctionStartExecutionOperator(AwsBaseOperator[StepFunctionHook]):
@@ -225,11 +225,12 @@ class SecretsManagerBackend(BaseSecretsBackend, LoggingMixin):
225
225
  return standardized_secret
226
226
  return secret
227
227
 
228
- def get_variable(self, key: str) -> str | None:
228
+ def get_variable(self, key: str, team_name: str | None = None) -> str | None:
229
229
  """
230
230
  Get Airflow Variable.
231
231
 
232
232
  :param key: Variable Key
233
+ :param team_name: Team name associated to the task trying to access the variable (if any)
233
234
  :return: Variable Value
234
235
  """
235
236
  if self.variables_prefix is None:
@@ -143,11 +143,12 @@ class SystemsManagerParameterStoreBackend(BaseSecretsBackend, LoggingMixin):
143
143
 
144
144
  return self._get_secret(self.connections_prefix, conn_id, self.connections_lookup_pattern)
145
145
 
146
- def get_variable(self, key: str) -> str | None:
146
+ def get_variable(self, key: str, team_name: str | None = None) -> str | None:
147
147
  """
148
148
  Get Airflow Variable.
149
149
 
150
150
  :param key: Variable Key
151
+ :param team_name: Team name associated to the task trying to access the variable (if any)
151
152
  :return: Variable Value
152
153
  """
153
154
  if self.variables_prefix is None:
@@ -20,14 +20,13 @@ from __future__ import annotations
20
20
  from collections.abc import Sequence
21
21
  from typing import TYPE_CHECKING, Any
22
22
 
23
+ from airflow.providers.amazon.aws.hooks.athena import AthenaHook
23
24
  from airflow.providers.amazon.aws.sensors.base_aws import AwsBaseSensor
24
25
  from airflow.providers.amazon.aws.utils.mixins import aws_template_fields
26
+ from airflow.providers.common.compat.sdk import AirflowException
25
27
 
26
28
  if TYPE_CHECKING:
27
- from airflow.utils.context import Context
28
-
29
- from airflow.providers.amazon.aws.hooks.athena import AthenaHook
30
- from airflow.providers.common.compat.sdk import AirflowException
29
+ from airflow.sdk import Context
31
30
 
32
31
 
33
32
  class AthenaSensor(AwsBaseSensor[AthenaHook]):
@@ -28,7 +28,7 @@ from airflow.providers.amazon.aws.utils.mixins import aws_template_fields
28
28
  from airflow.providers.common.compat.sdk import AirflowException
29
29
 
30
30
  if TYPE_CHECKING:
31
- from airflow.utils.context import Context
31
+ from airflow.sdk import Context
32
32
 
33
33
 
34
34
  class BatchSensor(AwsBaseSensor[BatchClientHook]):
@@ -37,7 +37,7 @@ from airflow.providers.common.compat.sdk import AirflowException
37
37
 
38
38
  if TYPE_CHECKING:
39
39
  from airflow.providers.amazon.aws.triggers.bedrock import BedrockBaseBatchInferenceTrigger
40
- from airflow.utils.context import Context
40
+ from airflow.sdk import Context
41
41
 
42
42
 
43
43
  _GenericBedrockHook = TypeVar("_GenericBedrockHook", BedrockAgentHook, BedrockHook)
@@ -22,13 +22,12 @@ from __future__ import annotations
22
22
  from collections.abc import Sequence
23
23
  from typing import TYPE_CHECKING
24
24
 
25
+ from airflow.providers.amazon.aws.hooks.cloud_formation import CloudFormationHook
25
26
  from airflow.providers.amazon.aws.sensors.base_aws import AwsBaseSensor
26
27
  from airflow.providers.amazon.aws.utils.mixins import aws_template_fields
27
28
 
28
29
  if TYPE_CHECKING:
29
- from airflow.utils.context import Context
30
-
31
- from airflow.providers.amazon.aws.hooks.cloud_formation import CloudFormationHook
30
+ from airflow.sdk import Context
32
31
 
33
32
 
34
33
  class CloudFormationCreateStackSensor(AwsBaseSensor[CloudFormationHook]):
@@ -31,7 +31,7 @@ from airflow.providers.amazon.aws.utils.mixins import aws_template_fields
31
31
  from airflow.providers.common.compat.sdk import AirflowException
32
32
 
33
33
  if TYPE_CHECKING:
34
- from airflow.utils.context import Context
34
+ from airflow.sdk import Context
35
35
 
36
36
 
37
37
  class ComprehendBaseSensor(AwsBaseSensor[ComprehendHook]):
@@ -26,7 +26,7 @@ from airflow.providers.amazon.aws.utils.mixins import aws_template_fields
26
26
  from airflow.providers.common.compat.sdk import AirflowException
27
27
 
28
28
  if TYPE_CHECKING:
29
- from airflow.utils.context import Context
29
+ from airflow.sdk import Context
30
30
 
31
31
 
32
32
  class DmsTaskBaseSensor(AwsBaseSensor[DmsHook]):
@@ -26,7 +26,7 @@ from airflow.providers.amazon.aws.sensors.base_aws import AwsBaseSensor
26
26
  from airflow.providers.amazon.aws.utils.mixins import aws_template_fields
27
27
 
28
28
  if TYPE_CHECKING:
29
- from airflow.utils.context import Context
29
+ from airflow.sdk import Context
30
30
 
31
31
 
32
32
  class DynamoDBValueSensor(AwsBaseSensor[DynamoDBHook]):
@@ -29,7 +29,7 @@ from airflow.providers.amazon.aws.utils.mixins import aws_template_fields
29
29
  from airflow.providers.common.compat.sdk import AirflowException
30
30
 
31
31
  if TYPE_CHECKING:
32
- from airflow.utils.context import Context
32
+ from airflow.sdk import Context
33
33
 
34
34
 
35
35
  class EC2InstanceStateSensor(AwsBaseSensor[EC2Hook]):
@@ -33,7 +33,7 @@ from airflow.providers.common.compat.sdk import AirflowException
33
33
  if TYPE_CHECKING:
34
34
  import boto3
35
35
 
36
- from airflow.utils.context import Context
36
+ from airflow.sdk import Context
37
37
 
38
38
 
39
39
  def _check_failed(current_state, target_state, failure_states) -> None:
@@ -35,7 +35,7 @@ from airflow.providers.amazon.aws.utils.mixins import aws_template_fields
35
35
  from airflow.providers.common.compat.sdk import AirflowException
36
36
 
37
37
  if TYPE_CHECKING:
38
- from airflow.utils.context import Context
38
+ from airflow.sdk import Context
39
39
 
40
40
 
41
41
  DEFAULT_CONN_ID = "aws_default"
@@ -35,7 +35,7 @@ from airflow.providers.amazon.aws.utils.mixins import aws_template_fields
35
35
  from airflow.providers.common.compat.sdk import AirflowException
36
36
 
37
37
  if TYPE_CHECKING:
38
- from airflow.utils.context import Context
38
+ from airflow.sdk import Context
39
39
 
40
40
 
41
41
  class EmrBaseSensor(AwsBaseSensor[EmrHook]):
@@ -27,7 +27,7 @@ from airflow.providers.amazon.aws.utils.mixins import aws_template_fields
27
27
  from airflow.providers.common.compat.sdk import AirflowException
28
28
 
29
29
  if TYPE_CHECKING:
30
- from airflow.utils.context import Context
30
+ from airflow.sdk import Context
31
31
 
32
32
 
33
33
  class JobStatus(Enum):
@@ -33,7 +33,7 @@ from airflow.providers.amazon.aws.utils.mixins import aws_template_fields
33
33
  from airflow.providers.common.compat.sdk import AirflowException
34
34
 
35
35
  if TYPE_CHECKING:
36
- from airflow.utils.context import Context
36
+ from airflow.sdk import Context
37
37
 
38
38
 
39
39
  class GlueJobSensor(AwsBaseSensor[GlueJobHook]):
@@ -30,7 +30,7 @@ from airflow.providers.amazon.aws.utils.mixins import aws_template_fields
30
30
  from airflow.providers.common.compat.sdk import AirflowException
31
31
 
32
32
  if TYPE_CHECKING:
33
- from airflow.utils.context import Context
33
+ from airflow.sdk import Context
34
34
 
35
35
 
36
36
  class GlueCatalogPartitionSensor(AwsBaseSensor[GlueCatalogHook]):
@@ -26,7 +26,7 @@ from airflow.providers.amazon.aws.utils.mixins import aws_template_fields
26
26
  from airflow.providers.common.compat.sdk import AirflowException
27
27
 
28
28
  if TYPE_CHECKING:
29
- from airflow.utils.context import Context
29
+ from airflow.sdk import Context
30
30
 
31
31
 
32
32
  class GlueCrawlerSensor(AwsBaseSensor[GlueCrawlerHook]):
@@ -29,7 +29,7 @@ from airflow.providers.amazon.aws.utils.mixins import aws_template_fields
29
29
  from airflow.providers.common.compat.sdk import AirflowException
30
30
 
31
31
  if TYPE_CHECKING:
32
- from airflow.utils.context import Context
32
+ from airflow.sdk import Context
33
33
 
34
34
 
35
35
  class KinesisAnalyticsV2BaseSensor(AwsBaseSensor[KinesisAnalyticsV2Hook]):
@@ -27,7 +27,7 @@ from airflow.providers.amazon.aws.utils.mixins import aws_template_fields
27
27
  from airflow.providers.common.compat.sdk import AirflowException
28
28
 
29
29
  if TYPE_CHECKING:
30
- from airflow.utils.context import Context
30
+ from airflow.sdk import Context
31
31
 
32
32
 
33
33
  class LambdaFunctionStateSensor(AwsBaseSensor[LambdaHook]):