apache-airflow-providers-amazon 8.4.0__tar.gz → 8.5.0rc1__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {apache-airflow-providers-amazon-8.4.0/apache_airflow_providers_amazon.egg-info → apache-airflow-providers-amazon-8.5.0rc1}/PKG-INFO +8 -6
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/README.rst +4 -3
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/__init__.py +1 -1
- apache-airflow-providers-amazon-8.5.0rc1/airflow/providers/amazon/aws/hooks/eventbridge.py +90 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/hooks/s3.py +2 -2
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/operators/batch.py +2 -2
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/operators/eventbridge.py +85 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/operators/rds.py +8 -1
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/operators/sagemaker.py +141 -15
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/sensors/sqs.py +44 -50
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/transfers/gcs_to_s3.py +48 -21
- apache-airflow-providers-amazon-8.5.0rc1/airflow/providers/amazon/aws/triggers/sqs.py +168 -0
- apache-airflow-providers-amazon-8.5.0rc1/airflow/providers/amazon/aws/utils/sqs.py +90 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/get_provider_info.py +5 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1/apache_airflow_providers_amazon.egg-info}/PKG-INFO +8 -6
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/apache_airflow_providers_amazon.egg-info/SOURCES.txt +2 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/apache_airflow_providers_amazon.egg-info/requires.txt +5 -2
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/pyproject.toml +1 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/setup.cfg +5 -5
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/setup.py +2 -1
- apache-airflow-providers-amazon-8.4.0/airflow/providers/amazon/aws/hooks/eventbridge.py +0 -27
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/LICENSE +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/MANIFEST.in +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/NOTICE +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/__init__.py +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/exceptions.py +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/hooks/__init__.py +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/hooks/appflow.py +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/hooks/athena.py +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/hooks/base_aws.py +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/hooks/batch_client.py +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/hooks/batch_waiters.json +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/hooks/batch_waiters.py +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/hooks/chime.py +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/hooks/cloud_formation.py +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/hooks/datasync.py +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/hooks/dms.py +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/hooks/dynamodb.py +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/hooks/ec2.py +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/hooks/ecr.py +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/hooks/ecs.py +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/hooks/eks.py +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/hooks/elasticache_replication_group.py +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/hooks/emr.py +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/hooks/glacier.py +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/hooks/glue.py +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/hooks/glue_catalog.py +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/hooks/glue_crawler.py +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/hooks/kinesis.py +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/hooks/lambda_function.py +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/hooks/logs.py +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/hooks/quicksight.py +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/hooks/rds.py +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/hooks/redshift_cluster.py +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/hooks/redshift_data.py +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/hooks/redshift_sql.py +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/hooks/sagemaker.py +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/hooks/secrets_manager.py +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/hooks/ses.py +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/hooks/sns.py +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/hooks/sqs.py +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/hooks/ssm.py +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/hooks/step_function.py +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/hooks/sts.py +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/links/__init__.py +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/links/base_aws.py +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/links/batch.py +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/links/emr.py +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/links/glue.py +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/links/logs.py +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/log/__init__.py +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/log/cloudwatch_task_handler.py +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/log/s3_task_handler.py +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/notifications/__init__.py +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/notifications/chime.py +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/operators/__init__.py +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/operators/appflow.py +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/operators/athena.py +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/operators/cloud_formation.py +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/operators/datasync.py +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/operators/dms.py +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/operators/ec2.py +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/operators/ecs.py +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/operators/eks.py +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/operators/emr.py +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/operators/glacier.py +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/operators/glue.py +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/operators/glue_crawler.py +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/operators/lambda_function.py +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/operators/quicksight.py +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/operators/redshift_cluster.py +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/operators/redshift_data.py +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/operators/s3.py +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/operators/sns.py +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/operators/sqs.py +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/operators/step_function.py +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/secrets/__init__.py +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/secrets/secrets_manager.py +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/secrets/systems_manager.py +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/sensors/__init__.py +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/sensors/athena.py +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/sensors/batch.py +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/sensors/cloud_formation.py +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/sensors/dms.py +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/sensors/dynamodb.py +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/sensors/ec2.py +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/sensors/ecs.py +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/sensors/eks.py +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/sensors/emr.py +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/sensors/glacier.py +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/sensors/glue.py +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/sensors/glue_catalog_partition.py +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/sensors/glue_crawler.py +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/sensors/lambda_function.py +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/sensors/quicksight.py +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/sensors/rds.py +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/sensors/redshift_cluster.py +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/sensors/s3.py +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/sensors/sagemaker.py +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/sensors/step_function.py +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/transfers/__init__.py +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/transfers/azure_blob_to_s3.py +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/transfers/base.py +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/transfers/dynamodb_to_s3.py +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/transfers/exasol_to_s3.py +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/transfers/ftp_to_s3.py +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/transfers/glacier_to_gcs.py +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/transfers/google_api_to_s3.py +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/transfers/hive_to_dynamodb.py +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/transfers/imap_attachment_to_s3.py +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/transfers/local_to_s3.py +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/transfers/mongo_to_s3.py +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/transfers/redshift_to_s3.py +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/transfers/s3_to_ftp.py +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/transfers/s3_to_redshift.py +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/transfers/s3_to_sftp.py +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/transfers/s3_to_sql.py +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/transfers/salesforce_to_s3.py +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/transfers/sftp_to_s3.py +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/transfers/sql_to_s3.py +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/triggers/__init__.py +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/triggers/athena.py +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/triggers/base.py +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/triggers/batch.py +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/triggers/ec2.py +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/triggers/ecs.py +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/triggers/eks.py +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/triggers/emr.py +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/triggers/glue.py +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/triggers/glue_crawler.py +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/triggers/rds.py +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/triggers/redshift_cluster.py +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/triggers/s3.py +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/triggers/sagemaker.py +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/triggers/step_function.py +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/utils/__init__.py +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/utils/connection_wrapper.py +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/utils/eks_get_token.py +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/utils/emailer.py +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/utils/rds.py +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/utils/redshift.py +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/utils/sagemaker.py +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/utils/tags.py +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/utils/task_log_fetcher.py +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/utils/waiter.py +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/utils/waiter_with_logging.py +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/waiters/__init__.py +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/waiters/appflow.json +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/waiters/athena.json +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/waiters/base_waiter.py +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/waiters/batch.json +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/waiters/dynamodb.json +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/waiters/ecs.json +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/waiters/eks.json +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/waiters/emr-containers.json +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/waiters/emr-serverless.json +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/waiters/emr.json +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/waiters/glue.json +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/waiters/redshift.json +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/waiters/sagemaker.json +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/airflow/providers/amazon/aws/waiters/stepfunctions.json +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/apache_airflow_providers_amazon.egg-info/dependency_links.txt +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/apache_airflow_providers_amazon.egg-info/entry_points.txt +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/apache_airflow_providers_amazon.egg-info/not-zip-safe +0 -0
- {apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/apache_airflow_providers_amazon.egg-info/top_level.txt +0 -0
@@ -1,14 +1,14 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: apache-airflow-providers-amazon
|
3
|
-
Version: 8.
|
3
|
+
Version: 8.5.0rc1
|
4
4
|
Summary: Provider for Apache Airflow. Implements apache-airflow-providers-amazon package
|
5
5
|
Home-page: https://airflow.apache.org/
|
6
6
|
Download-URL: https://archive.apache.org/dist/airflow/providers
|
7
7
|
Author: Apache Software Foundation
|
8
8
|
Author-email: dev@airflow.apache.org
|
9
9
|
License: Apache License 2.0
|
10
|
-
Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-amazon/8.
|
11
|
-
Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-amazon/8.
|
10
|
+
Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-amazon/8.5.0/
|
11
|
+
Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-amazon/8.5.0/changelog.html
|
12
12
|
Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
|
13
13
|
Project-URL: Source Code, https://github.com/apache/airflow
|
14
14
|
Project-URL: Slack Chat, https://s.apache.org/airflow-slack
|
@@ -39,6 +39,7 @@ Provides-Extra: http
|
|
39
39
|
Provides-Extra: imap
|
40
40
|
Provides-Extra: microsoft.azure
|
41
41
|
Provides-Extra: mongo
|
42
|
+
Provides-Extra: openlineage
|
42
43
|
Provides-Extra: salesforce
|
43
44
|
Provides-Extra: ssh
|
44
45
|
Provides-Extra: pandas
|
@@ -84,7 +85,7 @@ License-File: NOTICE
|
|
84
85
|
|
85
86
|
Package ``apache-airflow-providers-amazon``
|
86
87
|
|
87
|
-
Release: ``8.
|
88
|
+
Release: ``8.5.0rc1``
|
88
89
|
|
89
90
|
|
90
91
|
Amazon integration (including `Amazon Web Services (AWS) <https://aws.amazon.com/>`__).
|
@@ -97,7 +98,7 @@ This is a provider package for ``amazon`` provider. All classes for this provide
|
|
97
98
|
are in ``airflow.providers.amazon`` python package.
|
98
99
|
|
99
100
|
You can find package information and changelog for the provider
|
100
|
-
in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-amazon/8.
|
101
|
+
in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-amazon/8.5.0/>`_.
|
101
102
|
|
102
103
|
|
103
104
|
Installation
|
@@ -157,9 +158,10 @@ Dependent package
|
|
157
158
|
`apache-airflow-providers-imap <https://airflow.apache.org/docs/apache-airflow-providers-imap>`_ ``imap``
|
158
159
|
`apache-airflow-providers-microsoft-azure <https://airflow.apache.org/docs/apache-airflow-providers-microsoft-azure>`_ ``microsoft.azure``
|
159
160
|
`apache-airflow-providers-mongo <https://airflow.apache.org/docs/apache-airflow-providers-mongo>`_ ``mongo``
|
161
|
+
`apache-airflow-providers-openlineage <https://airflow.apache.org/docs/apache-airflow-providers-openlineage>`_ ``openlineage``
|
160
162
|
`apache-airflow-providers-salesforce <https://airflow.apache.org/docs/apache-airflow-providers-salesforce>`_ ``salesforce``
|
161
163
|
`apache-airflow-providers-ssh <https://airflow.apache.org/docs/apache-airflow-providers-ssh>`_ ``ssh``
|
162
164
|
====================================================================================================================== ===================
|
163
165
|
|
164
166
|
The changelog for the provider package can be found in the
|
165
|
-
`changelog <https://airflow.apache.org/docs/apache-airflow-providers-amazon/8.
|
167
|
+
`changelog <https://airflow.apache.org/docs/apache-airflow-providers-amazon/8.5.0/changelog.html>`_.
|
{apache-airflow-providers-amazon-8.4.0 → apache-airflow-providers-amazon-8.5.0rc1}/README.rst
RENAMED
@@ -36,7 +36,7 @@
|
|
36
36
|
|
37
37
|
Package ``apache-airflow-providers-amazon``
|
38
38
|
|
39
|
-
Release: ``8.
|
39
|
+
Release: ``8.5.0rc1``
|
40
40
|
|
41
41
|
|
42
42
|
Amazon integration (including `Amazon Web Services (AWS) <https://aws.amazon.com/>`__).
|
@@ -49,7 +49,7 @@ This is a provider package for ``amazon`` provider. All classes for this provide
|
|
49
49
|
are in ``airflow.providers.amazon`` python package.
|
50
50
|
|
51
51
|
You can find package information and changelog for the provider
|
52
|
-
in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-amazon/8.
|
52
|
+
in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-amazon/8.5.0/>`_.
|
53
53
|
|
54
54
|
|
55
55
|
Installation
|
@@ -109,9 +109,10 @@ Dependent package
|
|
109
109
|
`apache-airflow-providers-imap <https://airflow.apache.org/docs/apache-airflow-providers-imap>`_ ``imap``
|
110
110
|
`apache-airflow-providers-microsoft-azure <https://airflow.apache.org/docs/apache-airflow-providers-microsoft-azure>`_ ``microsoft.azure``
|
111
111
|
`apache-airflow-providers-mongo <https://airflow.apache.org/docs/apache-airflow-providers-mongo>`_ ``mongo``
|
112
|
+
`apache-airflow-providers-openlineage <https://airflow.apache.org/docs/apache-airflow-providers-openlineage>`_ ``openlineage``
|
112
113
|
`apache-airflow-providers-salesforce <https://airflow.apache.org/docs/apache-airflow-providers-salesforce>`_ ``salesforce``
|
113
114
|
`apache-airflow-providers-ssh <https://airflow.apache.org/docs/apache-airflow-providers-ssh>`_ ``ssh``
|
114
115
|
====================================================================================================================== ===================
|
115
116
|
|
116
117
|
The changelog for the provider package can be found in the
|
117
|
-
`changelog <https://airflow.apache.org/docs/apache-airflow-providers-amazon/8.
|
118
|
+
`changelog <https://airflow.apache.org/docs/apache-airflow-providers-amazon/8.5.0/changelog.html>`_.
|
@@ -0,0 +1,90 @@
|
|
1
|
+
# Licensed to the Apache Software Foundation (ASF) under one
|
2
|
+
# or more contributor license agreements. See the NOTICE file
|
3
|
+
# distributed with this work for additional information
|
4
|
+
# regarding copyright ownership. The ASF licenses this file
|
5
|
+
# to you under the Apache License, Version 2.0 (the
|
6
|
+
# "License"); you may not use this file except in compliance
|
7
|
+
# with the License. You may obtain a copy of the License at
|
8
|
+
#
|
9
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
10
|
+
#
|
11
|
+
# Unless required by applicable law or agreed to in writing,
|
12
|
+
# software distributed under the License is distributed on an
|
13
|
+
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
14
|
+
# KIND, either express or implied. See the License for the
|
15
|
+
# specific language governing permissions and limitations
|
16
|
+
# under the License.
|
17
|
+
from __future__ import annotations
|
18
|
+
|
19
|
+
import json
|
20
|
+
|
21
|
+
from airflow.providers.amazon.aws.hooks.base_aws import AwsBaseHook
|
22
|
+
from airflow.providers.amazon.aws.utils import trim_none_values
|
23
|
+
|
24
|
+
|
25
|
+
def _validate_json(pattern: str) -> None:
|
26
|
+
try:
|
27
|
+
json.loads(pattern)
|
28
|
+
except ValueError:
|
29
|
+
raise ValueError("`event_pattern` must be a valid JSON string.")
|
30
|
+
|
31
|
+
|
32
|
+
class EventBridgeHook(AwsBaseHook):
|
33
|
+
"""Amazon EventBridge Hook."""
|
34
|
+
|
35
|
+
def __init__(self, *args, **kwargs):
|
36
|
+
super().__init__(client_type="events", *args, **kwargs)
|
37
|
+
|
38
|
+
def put_rule(
|
39
|
+
self,
|
40
|
+
name: str,
|
41
|
+
description: str | None = None,
|
42
|
+
event_bus_name: str | None = None,
|
43
|
+
event_pattern: str | None = None,
|
44
|
+
role_arn: str | None = None,
|
45
|
+
schedule_expression: str | None = None,
|
46
|
+
state: str | None = None,
|
47
|
+
tags: list[dict] | None = None,
|
48
|
+
**kwargs,
|
49
|
+
):
|
50
|
+
"""
|
51
|
+
Create or update an EventBridge rule.
|
52
|
+
|
53
|
+
:param name: name of the rule to create or update (required)
|
54
|
+
:param description: description of the rule
|
55
|
+
:param event_bus_name: name or ARN of the event bus to associate with this rule
|
56
|
+
:param event_pattern: pattern of events to be matched to this rule
|
57
|
+
:param role_arn: the Amazon Resource Name of the IAM role associated with the rule
|
58
|
+
:param schedule_expression: the scheduling expression (for example, a cron or rate expression)
|
59
|
+
:param state: indicates whether rule is set to be "ENABLED" or "DISABLED"
|
60
|
+
:param tags: list of key-value pairs to associate with the rule
|
61
|
+
|
62
|
+
"""
|
63
|
+
if not (event_pattern or schedule_expression):
|
64
|
+
raise ValueError(
|
65
|
+
"One of `event_pattern` or `schedule_expression` are required in order to "
|
66
|
+
"put or update your rule."
|
67
|
+
)
|
68
|
+
|
69
|
+
if state and state not in ["ENABLED", "DISABLED"]:
|
70
|
+
raise ValueError("`state` must be specified as ENABLED or DISABLED.")
|
71
|
+
|
72
|
+
if event_pattern:
|
73
|
+
_validate_json(event_pattern)
|
74
|
+
|
75
|
+
put_rule_kwargs: dict[str, str | list] = {
|
76
|
+
**trim_none_values(
|
77
|
+
{
|
78
|
+
"Name": name,
|
79
|
+
"Description": description,
|
80
|
+
"EventBusName": event_bus_name,
|
81
|
+
"EventPattern": event_pattern,
|
82
|
+
"RoleArn": role_arn,
|
83
|
+
"ScheduleExpression": schedule_expression,
|
84
|
+
"State": state,
|
85
|
+
"Tags": tags,
|
86
|
+
}
|
87
|
+
)
|
88
|
+
}
|
89
|
+
|
90
|
+
return self.conn.put_rule(**put_rule_kwargs)
|
@@ -57,7 +57,7 @@ from airflow.providers.amazon.aws.utils.tags import format_tags
|
|
57
57
|
from airflow.utils.helpers import chunks
|
58
58
|
|
59
59
|
if TYPE_CHECKING:
|
60
|
-
from mypy_boto3_s3.service_resource import Object as S3ResourceObject
|
60
|
+
from mypy_boto3_s3.service_resource import Bucket as S3Bucket, Object as S3ResourceObject
|
61
61
|
|
62
62
|
T = TypeVar("T", bound=Callable)
|
63
63
|
|
@@ -298,7 +298,7 @@ class S3Hook(AwsBaseHook):
|
|
298
298
|
return False
|
299
299
|
|
300
300
|
@provide_bucket_name
|
301
|
-
def get_bucket(self, bucket_name: str | None = None) ->
|
301
|
+
def get_bucket(self, bucket_name: str | None = None) -> S3Bucket:
|
302
302
|
"""
|
303
303
|
Returns a :py:class:`S3.Bucket` object.
|
304
304
|
|
@@ -149,7 +149,7 @@ class BatchOperator(BaseOperator):
|
|
149
149
|
parameters: dict | None = None,
|
150
150
|
job_id: str | None = None,
|
151
151
|
waiters: Any | None = None,
|
152
|
-
max_retries: int
|
152
|
+
max_retries: int = 4200,
|
153
153
|
status_retries: int | None = None,
|
154
154
|
aws_conn_id: str | None = None,
|
155
155
|
region_name: str | None = None,
|
@@ -223,7 +223,7 @@ class BatchOperator(BaseOperator):
|
|
223
223
|
timeout=self.execution_timeout,
|
224
224
|
trigger=BatchJobTrigger(
|
225
225
|
job_id=self.job_id,
|
226
|
-
waiter_max_attempts=self.max_retries
|
226
|
+
waiter_max_attempts=self.max_retries,
|
227
227
|
aws_conn_id=self.aws_conn_id,
|
228
228
|
region_name=self.region_name,
|
229
229
|
waiter_delay=self.poll_interval,
|
@@ -32,6 +32,10 @@ class EventBridgePutEventsOperator(BaseOperator):
|
|
32
32
|
"""
|
33
33
|
Put Events onto Amazon EventBridge.
|
34
34
|
|
35
|
+
.. seealso::
|
36
|
+
For more information on how to use this operator, take a look at the guide:
|
37
|
+
:ref:`howto/operator:EventBridgePutEventsOperator`
|
38
|
+
|
35
39
|
:param entries: the list of events to be put onto EventBridge, each event is a dict (required)
|
36
40
|
:param endpoint_id: the URL subdomain of the endpoint
|
37
41
|
:param aws_conn_id: the AWS connection to use
|
@@ -85,3 +89,84 @@ class EventBridgePutEventsOperator(BaseOperator):
|
|
85
89
|
|
86
90
|
if self.do_xcom_push:
|
87
91
|
return [e["EventId"] for e in response["Entries"]]
|
92
|
+
|
93
|
+
|
94
|
+
class EventBridgePutRuleOperator(BaseOperator):
|
95
|
+
"""
|
96
|
+
Create or update a specified EventBridge rule.
|
97
|
+
|
98
|
+
.. seealso::
|
99
|
+
For more information on how to use this operator, take a look at the guide:
|
100
|
+
:ref:`howto/operator:EventBridgePutRuleOperator`
|
101
|
+
|
102
|
+
:param name: name of the rule to create or update (required)
|
103
|
+
:param description: description of the rule
|
104
|
+
:param event_bus_name: name or ARN of the event bus to associate with this rule
|
105
|
+
:param event_pattern: pattern of events to be matched to this rule
|
106
|
+
:param role_arn: the Amazon Resource Name of the IAM role associated with the rule
|
107
|
+
:param schedule_expression: the scheduling expression (for example, a cron or rate expression)
|
108
|
+
:param state: indicates whether rule is set to be "ENABLED" or "DISABLED"
|
109
|
+
:param tags: list of key-value pairs to associate with the rule
|
110
|
+
:param region: the region where rule is to be created or updated
|
111
|
+
|
112
|
+
"""
|
113
|
+
|
114
|
+
template_fields: Sequence[str] = (
|
115
|
+
"aws_conn_id",
|
116
|
+
"name",
|
117
|
+
"description",
|
118
|
+
"event_bus_name",
|
119
|
+
"event_pattern",
|
120
|
+
"role_arn",
|
121
|
+
"schedule_expression",
|
122
|
+
"state",
|
123
|
+
"tags",
|
124
|
+
"region_name",
|
125
|
+
)
|
126
|
+
|
127
|
+
def __init__(
|
128
|
+
self,
|
129
|
+
*,
|
130
|
+
name: str,
|
131
|
+
description: str | None = None,
|
132
|
+
event_bus_name: str | None = None,
|
133
|
+
event_pattern: str | None = None,
|
134
|
+
role_arn: str | None = None,
|
135
|
+
schedule_expression: str | None = None,
|
136
|
+
state: str | None = None,
|
137
|
+
tags: list | None = None,
|
138
|
+
region_name: str | None = None,
|
139
|
+
aws_conn_id: str = "aws_default",
|
140
|
+
**kwargs,
|
141
|
+
):
|
142
|
+
super().__init__(**kwargs)
|
143
|
+
self.name = name
|
144
|
+
self.description = description
|
145
|
+
self.event_bus_name = event_bus_name
|
146
|
+
self.event_pattern = event_pattern
|
147
|
+
self.role_arn = role_arn
|
148
|
+
self.region_name = region_name
|
149
|
+
self.schedule_expression = schedule_expression
|
150
|
+
self.state = state
|
151
|
+
self.tags = tags
|
152
|
+
self.aws_conn_id = aws_conn_id
|
153
|
+
|
154
|
+
@cached_property
|
155
|
+
def hook(self) -> EventBridgeHook:
|
156
|
+
"""Create and return an EventBridgeHook."""
|
157
|
+
return EventBridgeHook(aws_conn_id=self.aws_conn_id, region_name=self.region_name)
|
158
|
+
|
159
|
+
def execute(self, context: Context):
|
160
|
+
|
161
|
+
self.log.info('Sending rule "%s" to EventBridge.', self.name)
|
162
|
+
|
163
|
+
return self.hook.put_rule(
|
164
|
+
name=self.name,
|
165
|
+
description=self.description,
|
166
|
+
event_bus_name=self.event_bus_name,
|
167
|
+
event_pattern=self.event_pattern,
|
168
|
+
role_arn=self.role_arn,
|
169
|
+
schedule_expression=self.schedule_expression,
|
170
|
+
state=self.state,
|
171
|
+
tags=self.tags,
|
172
|
+
)
|
@@ -392,6 +392,8 @@ class RdsCancelExportTaskOperator(RdsBaseOperator):
|
|
392
392
|
|
393
393
|
:param export_task_identifier: The identifier of the snapshot export task to cancel
|
394
394
|
:param wait_for_completion: If True, waits for DB snapshot export to cancel. (default: True)
|
395
|
+
:param check_interval: The amount of time in seconds to wait between attempts
|
396
|
+
:param max_attempts: The maximum number of attempts to be made
|
395
397
|
"""
|
396
398
|
|
397
399
|
template_fields = ("export_task_identifier",)
|
@@ -402,6 +404,7 @@ class RdsCancelExportTaskOperator(RdsBaseOperator):
|
|
402
404
|
export_task_identifier: str,
|
403
405
|
wait_for_completion: bool = True,
|
404
406
|
check_interval: int = 30,
|
407
|
+
max_attempts: int = 40,
|
405
408
|
**kwargs,
|
406
409
|
):
|
407
410
|
super().__init__(**kwargs)
|
@@ -409,6 +412,7 @@ class RdsCancelExportTaskOperator(RdsBaseOperator):
|
|
409
412
|
self.export_task_identifier = export_task_identifier
|
410
413
|
self.wait_for_completion = wait_for_completion
|
411
414
|
self.check_interval = check_interval
|
415
|
+
self.max_attempts = max_attempts
|
412
416
|
|
413
417
|
def execute(self, context: Context) -> str:
|
414
418
|
self.log.info("Canceling export task %s", self.export_task_identifier)
|
@@ -419,7 +423,10 @@ class RdsCancelExportTaskOperator(RdsBaseOperator):
|
|
419
423
|
|
420
424
|
if self.wait_for_completion:
|
421
425
|
self.hook.wait_for_export_task_state(
|
422
|
-
self.export_task_identifier,
|
426
|
+
self.export_task_identifier,
|
427
|
+
target_state="canceled",
|
428
|
+
check_interval=self.check_interval,
|
429
|
+
max_attempts=self.max_attempts,
|
423
430
|
)
|
424
431
|
return json.dumps(cancel_export, default=str)
|
425
432
|
|
@@ -40,13 +40,16 @@ from airflow.providers.amazon.aws.utils.tags import format_tags
|
|
40
40
|
from airflow.utils.json import AirflowJsonEncoder
|
41
41
|
|
42
42
|
if TYPE_CHECKING:
|
43
|
+
from openlineage.client.run import Dataset
|
44
|
+
|
45
|
+
from airflow.providers.openlineage.extractors.base import OperatorLineage
|
43
46
|
from airflow.utils.context import Context
|
44
47
|
|
45
48
|
DEFAULT_CONN_ID: str = "aws_default"
|
46
49
|
CHECK_INTERVAL_SECOND: int = 30
|
47
50
|
|
48
51
|
|
49
|
-
def serialize(result: dict) ->
|
52
|
+
def serialize(result: dict) -> dict:
|
50
53
|
return json.loads(json.dumps(result, cls=AirflowJsonEncoder))
|
51
54
|
|
52
55
|
|
@@ -158,6 +161,14 @@ class SageMakerBaseOperator(BaseOperator):
|
|
158
161
|
"""Return SageMakerHook."""
|
159
162
|
return SageMakerHook(aws_conn_id=self.aws_conn_id)
|
160
163
|
|
164
|
+
@staticmethod
|
165
|
+
def path_to_s3_dataset(path) -> Dataset:
|
166
|
+
from openlineage.client.run import Dataset
|
167
|
+
|
168
|
+
path = path.replace("s3://", "")
|
169
|
+
split_path = path.split("/")
|
170
|
+
return Dataset(namespace=f"s3://{split_path[0]}", name="/".join(split_path[1:]), facets={})
|
171
|
+
|
161
172
|
|
162
173
|
class SageMakerProcessingOperator(SageMakerBaseOperator):
|
163
174
|
"""
|
@@ -225,6 +236,7 @@ class SageMakerProcessingOperator(SageMakerBaseOperator):
|
|
225
236
|
self.max_attempts = max_attempts or 60
|
226
237
|
self.max_ingestion_time = max_ingestion_time
|
227
238
|
self.deferrable = deferrable
|
239
|
+
self.serialized_job: dict
|
228
240
|
|
229
241
|
def _create_integer_fields(self) -> None:
|
230
242
|
"""Set fields which should be cast to integers."""
|
@@ -282,14 +294,48 @@ class SageMakerProcessingOperator(SageMakerBaseOperator):
|
|
282
294
|
method_name="execute_complete",
|
283
295
|
)
|
284
296
|
|
285
|
-
|
297
|
+
self.serialized_job = serialize(self.hook.describe_processing_job(self.config["ProcessingJobName"]))
|
298
|
+
return {"Processing": self.serialized_job}
|
286
299
|
|
287
300
|
def execute_complete(self, context, event=None):
|
288
301
|
if event["status"] != "success":
|
289
302
|
raise AirflowException(f"Error while running job: {event}")
|
290
303
|
else:
|
291
304
|
self.log.info(event["message"])
|
292
|
-
|
305
|
+
self.serialized_job = serialize(self.hook.describe_processing_job(self.config["ProcessingJobName"]))
|
306
|
+
return {"Processing": self.serialized_job}
|
307
|
+
|
308
|
+
def get_openlineage_facets_on_complete(self, task_instance) -> OperatorLineage:
|
309
|
+
"""Returns OpenLineage data gathered from SageMaker's API response saved by processing job."""
|
310
|
+
from airflow.providers.openlineage.extractors.base import OperatorLineage
|
311
|
+
|
312
|
+
inputs = []
|
313
|
+
outputs = []
|
314
|
+
try:
|
315
|
+
inputs, outputs = self._extract_s3_dataset_identifiers(
|
316
|
+
processing_inputs=self.serialized_job["ProcessingInputs"],
|
317
|
+
processing_outputs=self.serialized_job["ProcessingOutputConfig"]["Outputs"],
|
318
|
+
)
|
319
|
+
except KeyError:
|
320
|
+
self.log.exception("Could not find input/output information in Xcom.")
|
321
|
+
|
322
|
+
return OperatorLineage(inputs=inputs, outputs=outputs)
|
323
|
+
|
324
|
+
def _extract_s3_dataset_identifiers(self, processing_inputs, processing_outputs):
|
325
|
+
inputs = []
|
326
|
+
outputs = []
|
327
|
+
try:
|
328
|
+
for processing_input in processing_inputs:
|
329
|
+
inputs.append(self.path_to_s3_dataset(processing_input["S3Input"]["S3Uri"]))
|
330
|
+
except KeyError:
|
331
|
+
self.log.exception("Cannot find S3 input details", exc_info=True)
|
332
|
+
|
333
|
+
try:
|
334
|
+
for processing_output in processing_outputs:
|
335
|
+
outputs.append(self.path_to_s3_dataset(processing_output["S3Output"]["S3Uri"]))
|
336
|
+
except KeyError:
|
337
|
+
self.log.exception("Cannot find S3 output details.", exc_info=True)
|
338
|
+
return inputs, outputs
|
293
339
|
|
294
340
|
|
295
341
|
class SageMakerEndpointConfigOperator(SageMakerBaseOperator):
|
@@ -579,6 +625,8 @@ class SageMakerTransformOperator(SageMakerBaseOperator):
|
|
579
625
|
Provided value: '{action_if_job_exists}'."
|
580
626
|
)
|
581
627
|
self.deferrable = deferrable
|
628
|
+
self.serialized_model: dict
|
629
|
+
self.serialized_tranform: dict
|
582
630
|
|
583
631
|
def _create_integer_fields(self) -> None:
|
584
632
|
"""Set fields which should be cast to integers."""
|
@@ -650,10 +698,11 @@ class SageMakerTransformOperator(SageMakerBaseOperator):
|
|
650
698
|
method_name="execute_complete",
|
651
699
|
)
|
652
700
|
|
653
|
-
|
654
|
-
|
655
|
-
|
656
|
-
|
701
|
+
self.serialized_model = serialize(self.hook.describe_model(transform_config["ModelName"]))
|
702
|
+
self.serialized_tranform = serialize(
|
703
|
+
self.hook.describe_transform_job(transform_config["TransformJobName"])
|
704
|
+
)
|
705
|
+
return {"Model": self.serialized_model, "Transform": self.serialized_tranform}
|
657
706
|
|
658
707
|
def execute_complete(self, context, event=None):
|
659
708
|
if event["status"] != "success":
|
@@ -661,10 +710,62 @@ class SageMakerTransformOperator(SageMakerBaseOperator):
|
|
661
710
|
else:
|
662
711
|
self.log.info(event["message"])
|
663
712
|
transform_config = self.config.get("Transform", self.config)
|
664
|
-
|
665
|
-
|
666
|
-
|
667
|
-
|
713
|
+
self.serialized_model = serialize(self.hook.describe_model(transform_config["ModelName"]))
|
714
|
+
self.serialized_tranform = serialize(
|
715
|
+
self.hook.describe_transform_job(transform_config["TransformJobName"])
|
716
|
+
)
|
717
|
+
return {"Model": self.serialized_model, "Transform": self.serialized_tranform}
|
718
|
+
|
719
|
+
def get_openlineage_facets_on_complete(self, task_instance) -> OperatorLineage:
|
720
|
+
"""Returns OpenLineage data gathered from SageMaker's API response saved by transform job."""
|
721
|
+
from airflow.providers.openlineage.extractors import OperatorLineage
|
722
|
+
|
723
|
+
model_package_arn = None
|
724
|
+
transform_input = None
|
725
|
+
transform_output = None
|
726
|
+
|
727
|
+
try:
|
728
|
+
model_package_arn = self.serialized_model["PrimaryContainer"]["ModelPackageName"]
|
729
|
+
except KeyError:
|
730
|
+
self.log.error("Cannot find Model Package Name.", exc_info=True)
|
731
|
+
|
732
|
+
try:
|
733
|
+
transform_input = self.serialized_tranform["TransformInput"]["DataSource"]["S3DataSource"][
|
734
|
+
"S3Uri"
|
735
|
+
]
|
736
|
+
transform_output = self.serialized_tranform["TransformOutput"]["S3OutputPath"]
|
737
|
+
except KeyError:
|
738
|
+
self.log.error("Cannot find some required input/output details.", exc_info=True)
|
739
|
+
|
740
|
+
inputs = []
|
741
|
+
|
742
|
+
if transform_input is not None:
|
743
|
+
inputs.append(self.path_to_s3_dataset(transform_input))
|
744
|
+
|
745
|
+
if model_package_arn is not None:
|
746
|
+
model_data_urls = self._get_model_data_urls(model_package_arn)
|
747
|
+
for model_data_url in model_data_urls:
|
748
|
+
inputs.append(self.path_to_s3_dataset(model_data_url))
|
749
|
+
|
750
|
+
outputs = []
|
751
|
+
if transform_output is not None:
|
752
|
+
outputs.append(self.path_to_s3_dataset(transform_output))
|
753
|
+
|
754
|
+
return OperatorLineage(inputs=inputs, outputs=outputs)
|
755
|
+
|
756
|
+
def _get_model_data_urls(self, model_package_arn) -> list:
|
757
|
+
model_data_urls = []
|
758
|
+
try:
|
759
|
+
model_containers = self.hook.get_conn().describe_model_package(
|
760
|
+
ModelPackageName=model_package_arn
|
761
|
+
)["InferenceSpecification"]["Containers"]
|
762
|
+
|
763
|
+
for container in model_containers:
|
764
|
+
model_data_urls.append(container["ModelDataUrl"])
|
765
|
+
except KeyError:
|
766
|
+
self.log.exception("Cannot retrieve model details.", exc_info=True)
|
767
|
+
|
768
|
+
return model_data_urls
|
668
769
|
|
669
770
|
|
670
771
|
class SageMakerTuningOperator(SageMakerBaseOperator):
|
@@ -891,6 +992,7 @@ class SageMakerTrainingOperator(SageMakerBaseOperator):
|
|
891
992
|
Provided value: '{action_if_job_exists}'."
|
892
993
|
)
|
893
994
|
self.deferrable = deferrable
|
995
|
+
self.serialized_training_data: dict
|
894
996
|
|
895
997
|
def expand_role(self) -> None:
|
896
998
|
"""Expands an IAM role name into an ARN."""
|
@@ -951,16 +1053,40 @@ class SageMakerTrainingOperator(SageMakerBaseOperator):
|
|
951
1053
|
method_name="execute_complete",
|
952
1054
|
)
|
953
1055
|
|
954
|
-
|
955
|
-
|
1056
|
+
self.serialized_training_data = serialize(
|
1057
|
+
self.hook.describe_training_job(self.config["TrainingJobName"])
|
1058
|
+
)
|
1059
|
+
return {"Training": self.serialized_training_data}
|
956
1060
|
|
957
1061
|
def execute_complete(self, context, event=None):
|
958
1062
|
if event["status"] != "success":
|
959
1063
|
raise AirflowException(f"Error while running job: {event}")
|
960
1064
|
else:
|
961
1065
|
self.log.info(event["message"])
|
962
|
-
|
963
|
-
|
1066
|
+
self.serialized_training_data = serialize(
|
1067
|
+
self.hook.describe_training_job(self.config["TrainingJobName"])
|
1068
|
+
)
|
1069
|
+
return {"Training": self.serialized_training_data}
|
1070
|
+
|
1071
|
+
def get_openlineage_facets_on_complete(self, task_instance) -> OperatorLineage:
|
1072
|
+
"""Returns OpenLineage data gathered from SageMaker's API response saved by training job."""
|
1073
|
+
from airflow.providers.openlineage.extractors import OperatorLineage
|
1074
|
+
|
1075
|
+
inputs = []
|
1076
|
+
outputs = []
|
1077
|
+
try:
|
1078
|
+
for input_data in self.serialized_training_data["InputDataConfig"]:
|
1079
|
+
inputs.append(self.path_to_s3_dataset(input_data["DataSource"]["S3DataSource"]["S3Uri"]))
|
1080
|
+
except KeyError:
|
1081
|
+
self.log.exception("Issues extracting inputs.")
|
1082
|
+
|
1083
|
+
try:
|
1084
|
+
outputs.append(
|
1085
|
+
self.path_to_s3_dataset(self.serialized_training_data["ModelArtifacts"]["S3ModelArtifacts"])
|
1086
|
+
)
|
1087
|
+
except KeyError:
|
1088
|
+
self.log.exception("Issues extracting inputs.")
|
1089
|
+
return OperatorLineage(inputs=inputs, outputs=outputs)
|
964
1090
|
|
965
1091
|
|
966
1092
|
class SageMakerDeleteModelOperator(SageMakerBaseOperator):
|