apache-airflow-providers-teradata 2.1.1rc1__tar.gz → 2.2.0rc1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (14) hide show
  1. {apache_airflow_providers_teradata-2.1.1rc1 → apache_airflow_providers_teradata-2.2.0rc1}/PKG-INFO +18 -12
  2. {apache_airflow_providers_teradata-2.1.1rc1 → apache_airflow_providers_teradata-2.2.0rc1}/README.rst +11 -9
  3. {apache_airflow_providers_teradata-2.1.1rc1 → apache_airflow_providers_teradata-2.2.0rc1}/airflow/providers/teradata/__init__.py +1 -1
  4. {apache_airflow_providers_teradata-2.1.1rc1 → apache_airflow_providers_teradata-2.2.0rc1}/airflow/providers/teradata/get_provider_info.py +19 -3
  5. {apache_airflow_providers_teradata-2.1.1rc1 → apache_airflow_providers_teradata-2.2.0rc1}/airflow/providers/teradata/hooks/teradata.py +66 -0
  6. {apache_airflow_providers_teradata-2.1.1rc1 → apache_airflow_providers_teradata-2.2.0rc1}/airflow/providers/teradata/operators/teradata.py +44 -5
  7. apache_airflow_providers_teradata-2.2.0rc1/airflow/providers/teradata/transfers/azure_blob_to_teradata.py +103 -0
  8. apache_airflow_providers_teradata-2.2.0rc1/airflow/providers/teradata/transfers/s3_to_teradata.py +109 -0
  9. {apache_airflow_providers_teradata-2.1.1rc1 → apache_airflow_providers_teradata-2.2.0rc1}/pyproject.toml +9 -3
  10. {apache_airflow_providers_teradata-2.1.1rc1 → apache_airflow_providers_teradata-2.2.0rc1}/airflow/providers/teradata/LICENSE +0 -0
  11. {apache_airflow_providers_teradata-2.1.1rc1 → apache_airflow_providers_teradata-2.2.0rc1}/airflow/providers/teradata/hooks/__init__.py +0 -0
  12. {apache_airflow_providers_teradata-2.1.1rc1 → apache_airflow_providers_teradata-2.2.0rc1}/airflow/providers/teradata/operators/__init__.py +0 -0
  13. {apache_airflow_providers_teradata-2.1.1rc1 → apache_airflow_providers_teradata-2.2.0rc1}/airflow/providers/teradata/transfers/__init__.py +0 -0
  14. {apache_airflow_providers_teradata-2.1.1rc1 → apache_airflow_providers_teradata-2.2.0rc1}/airflow/providers/teradata/transfers/teradata_to_teradata.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: apache-airflow-providers-teradata
3
- Version: 2.1.1rc1
3
+ Version: 2.2.0rc1
4
4
  Summary: Provider package apache-airflow-providers-teradata for Apache Airflow
5
5
  Keywords: airflow-provider,teradata,airflow,integration
6
6
  Author-email: Apache Software Foundation <dev@airflow.apache.org>
@@ -25,15 +25,19 @@ Requires-Dist: apache-airflow-providers-common-sql>=1.3.1rc0
25
25
  Requires-Dist: apache-airflow>=2.7.0rc0
26
26
  Requires-Dist: teradatasql>=17.20.0.28
27
27
  Requires-Dist: teradatasqlalchemy>=17.20.0.0
28
+ Requires-Dist: apache-airflow-providers-amazon ; extra == "amazon"
28
29
  Requires-Dist: apache-airflow-providers-common-sql ; extra == "common.sql"
30
+ Requires-Dist: apache-airflow-providers-microsoft-azure ; extra == "microsoft.azure"
29
31
  Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
30
- Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-teradata/2.1.1/changelog.html
31
- Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-teradata/2.1.1
32
+ Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-teradata/2.2.0/changelog.html
33
+ Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-teradata/2.2.0
32
34
  Project-URL: Slack Chat, https://s.apache.org/airflow-slack
33
35
  Project-URL: Source Code, https://github.com/apache/airflow
34
36
  Project-URL: Twitter, https://twitter.com/ApacheAirflow
35
37
  Project-URL: YouTube, https://www.youtube.com/channel/UCSXwxpWZQ7XZ1WL3wqevChA/
38
+ Provides-Extra: amazon
36
39
  Provides-Extra: common.sql
40
+ Provides-Extra: microsoft.azure
37
41
 
38
42
 
39
43
  .. Licensed to the Apache Software Foundation (ASF) under one
@@ -79,7 +83,7 @@ Provides-Extra: common.sql
79
83
 
80
84
  Package ``apache-airflow-providers-teradata``
81
85
 
82
- Release: ``2.1.1.rc1``
86
+ Release: ``2.2.0.rc1``
83
87
 
84
88
 
85
89
  `Teradata <https://www.teradata.com/>`__
@@ -92,7 +96,7 @@ This is a provider package for ``teradata`` provider. All classes for this provi
92
96
  are in ``airflow.providers.teradata`` python package.
93
97
 
94
98
  You can find package information and changelog for the provider
95
- in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-teradata/2.1.1/>`_.
99
+ in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-teradata/2.2.0/>`_.
96
100
 
97
101
  Installation
98
102
  ------------
@@ -125,14 +129,16 @@ You can install such cross-provider dependencies when installing from PyPI. For
125
129
 
126
130
  .. code-block:: bash
127
131
 
128
- pip install apache-airflow-providers-teradata[common.sql]
132
+ pip install apache-airflow-providers-teradata[amazon]
129
133
 
130
134
 
131
- ============================================================================================================ ==============
132
- Dependent package Extra
133
- ============================================================================================================ ==============
134
- `apache-airflow-providers-common-sql <https://airflow.apache.org/docs/apache-airflow-providers-common-sql>`_ ``common.sql``
135
- ============================================================================================================ ==============
135
+ ====================================================================================================================== ===================
136
+ Dependent package Extra
137
+ ====================================================================================================================== ===================
138
+ `apache-airflow-providers-amazon <https://airflow.apache.org/docs/apache-airflow-providers-amazon>`_ ``amazon``
139
+ `apache-airflow-providers-common-sql <https://airflow.apache.org/docs/apache-airflow-providers-common-sql>`_ ``common.sql``
140
+ `apache-airflow-providers-microsoft-azure <https://airflow.apache.org/docs/apache-airflow-providers-microsoft-azure>`_ ``microsoft.azure``
141
+ ====================================================================================================================== ===================
136
142
 
137
143
  The changelog for the provider package can be found in the
138
- `changelog <https://airflow.apache.org/docs/apache-airflow-providers-teradata/2.1.1/changelog.html>`_.
144
+ `changelog <https://airflow.apache.org/docs/apache-airflow-providers-teradata/2.2.0/changelog.html>`_.
@@ -42,7 +42,7 @@
42
42
 
43
43
  Package ``apache-airflow-providers-teradata``
44
44
 
45
- Release: ``2.1.1.rc1``
45
+ Release: ``2.2.0.rc1``
46
46
 
47
47
 
48
48
  `Teradata <https://www.teradata.com/>`__
@@ -55,7 +55,7 @@ This is a provider package for ``teradata`` provider. All classes for this provi
55
55
  are in ``airflow.providers.teradata`` python package.
56
56
 
57
57
  You can find package information and changelog for the provider
58
- in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-teradata/2.1.1/>`_.
58
+ in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-teradata/2.2.0/>`_.
59
59
 
60
60
  Installation
61
61
  ------------
@@ -88,14 +88,16 @@ You can install such cross-provider dependencies when installing from PyPI. For
88
88
 
89
89
  .. code-block:: bash
90
90
 
91
- pip install apache-airflow-providers-teradata[common.sql]
91
+ pip install apache-airflow-providers-teradata[amazon]
92
92
 
93
93
 
94
- ============================================================================================================ ==============
95
- Dependent package Extra
96
- ============================================================================================================ ==============
97
- `apache-airflow-providers-common-sql <https://airflow.apache.org/docs/apache-airflow-providers-common-sql>`_ ``common.sql``
98
- ============================================================================================================ ==============
94
+ ====================================================================================================================== ===================
95
+ Dependent package Extra
96
+ ====================================================================================================================== ===================
97
+ `apache-airflow-providers-amazon <https://airflow.apache.org/docs/apache-airflow-providers-amazon>`_ ``amazon``
98
+ `apache-airflow-providers-common-sql <https://airflow.apache.org/docs/apache-airflow-providers-common-sql>`_ ``common.sql``
99
+ `apache-airflow-providers-microsoft-azure <https://airflow.apache.org/docs/apache-airflow-providers-microsoft-azure>`_ ``microsoft.azure``
100
+ ====================================================================================================================== ===================
99
101
 
100
102
  The changelog for the provider package can be found in the
101
- `changelog <https://airflow.apache.org/docs/apache-airflow-providers-teradata/2.1.1/changelog.html>`_.
103
+ `changelog <https://airflow.apache.org/docs/apache-airflow-providers-teradata/2.2.0/changelog.html>`_.
@@ -29,7 +29,7 @@ from airflow import __version__ as airflow_version
29
29
 
30
30
  __all__ = ["__version__"]
31
31
 
32
- __version__ = "2.1.1"
32
+ __version__ = "2.2.0"
33
33
 
34
34
  if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse(
35
35
  "2.7.0"
@@ -28,14 +28,18 @@ def get_provider_info():
28
28
  "name": "Teradata",
29
29
  "description": "`Teradata <https://www.teradata.com/>`__\n",
30
30
  "state": "ready",
31
- "source-date-epoch": 1716289161,
32
- "versions": ["2.1.1", "2.1.0", "2.0.0"],
31
+ "source-date-epoch": 1717051506,
32
+ "versions": ["2.2.0", "2.1.1", "2.1.0", "2.0.0"],
33
33
  "dependencies": [
34
34
  "apache-airflow>=2.7.0",
35
35
  "apache-airflow-providers-common-sql>=1.3.1",
36
36
  "teradatasqlalchemy>=17.20.0.0",
37
37
  "teradatasql>=17.20.0.28",
38
38
  ],
39
+ "additional-extras": [
40
+ {"name": "microsoft.azure", "dependencies": ["apache-airflow-providers-microsoft-azure"]},
41
+ {"name": "amazon", "dependencies": ["apache-airflow-providers-amazon"]},
42
+ ],
39
43
  "integrations": [
40
44
  {
41
45
  "integration-name": "Teradata",
@@ -60,7 +64,19 @@ def get_provider_info():
60
64
  "target-integration-name": "Teradata",
61
65
  "python-module": "airflow.providers.teradata.transfers.teradata_to_teradata",
62
66
  "how-to-guide": "/docs/apache-airflow-providers-teradata/operators/teradata_to_teradata.rst",
63
- }
67
+ },
68
+ {
69
+ "source-integration-name": "Microsoft Azure Blob Storage",
70
+ "target-integration-name": "Teradata",
71
+ "python-module": "airflow.providers.teradata.transfers.azure_blob_to_teradata",
72
+ "how-to-guide": "/docs/apache-airflow-providers-teradata/operators/azure_blob_to_teradata.rst",
73
+ },
74
+ {
75
+ "source-integration-name": "Amazon Simple Storage Service (S3)",
76
+ "target-integration-name": "Teradata",
77
+ "python-module": "airflow.providers.teradata.transfers.s3_to_teradata",
78
+ "how-to-guide": "/docs/apache-airflow-providers-teradata/operators/s3_to_teradata.rst",
79
+ },
64
80
  ],
65
81
  "connection-types": [
66
82
  {
@@ -32,6 +32,17 @@ from airflow.providers.common.sql.hooks.sql import DbApiHook
32
32
  if TYPE_CHECKING:
33
33
  from airflow.models.connection import Connection
34
34
 
35
+ PARAM_TYPES = {bool, float, int, str}
36
+
37
+
38
+ def _map_param(value):
39
+ if value in PARAM_TYPES:
40
+ # In this branch, value is a Python type; calling it produces
41
+ # an instance of the type which is understood by the Teradata driver
42
+ # in the out parameter mapping mechanism.
43
+ value = value()
44
+ return value
45
+
35
46
 
36
47
  class TeradataHook(DbApiHook):
37
48
  """General hook for interacting with Teradata SQL Database.
@@ -187,3 +198,58 @@ class TeradataHook(DbApiHook):
187
198
  "password": "dbc",
188
199
  },
189
200
  }
201
+
202
+ def callproc(
203
+ self,
204
+ identifier: str,
205
+ autocommit: bool = False,
206
+ parameters: list | dict | None = None,
207
+ ) -> list | dict | tuple | None:
208
+ """
209
+ Call the stored procedure identified by the provided string.
210
+
211
+ Any OUT parameters must be provided with a value of either the
212
+ expected Python type (e.g., `int`) or an instance of that type.
213
+
214
+ :param identifier: stored procedure name
215
+ :param autocommit: What to set the connection's autocommit setting to
216
+ before executing the query.
217
+ :param parameters: The `IN`, `OUT` and `INOUT` parameters for Teradata
218
+ stored procedure
219
+
220
+ The return value is a list or mapping that includes parameters in
221
+ both directions; the actual return type depends on the type of the
222
+ provided `parameters` argument.
223
+
224
+ """
225
+ if parameters is None:
226
+ parameters = []
227
+
228
+ args = ",".join("?" for name in parameters)
229
+
230
+ sql = f"{{CALL {identifier}({(args)})}}"
231
+
232
+ def handler(cursor):
233
+ records = cursor.fetchall()
234
+
235
+ if records is None:
236
+ return
237
+ if isinstance(records, list):
238
+ return [row for row in records]
239
+
240
+ if isinstance(records, dict):
241
+ return {n: v for (n, v) in records.items()}
242
+ raise TypeError(f"Unexpected results: {records}")
243
+
244
+ result = self.run(
245
+ sql,
246
+ autocommit=autocommit,
247
+ parameters=(
248
+ [_map_param(value) for (name, value) in parameters.items()]
249
+ if isinstance(parameters, dict)
250
+ else [_map_param(value) for value in parameters]
251
+ ),
252
+ handler=handler,
253
+ )
254
+
255
+ return result
@@ -17,11 +17,15 @@
17
17
  # under the License.
18
18
  from __future__ import annotations
19
19
 
20
- from typing import Sequence
20
+ from typing import TYPE_CHECKING, Sequence
21
21
 
22
+ from airflow.models import BaseOperator
22
23
  from airflow.providers.common.sql.operators.sql import SQLExecuteQueryOperator
23
24
  from airflow.providers.teradata.hooks.teradata import TeradataHook
24
25
 
26
+ if TYPE_CHECKING:
27
+ from airflow.utils.context import Context
28
+
25
29
 
26
30
  class TeradataOperator(SQLExecuteQueryOperator):
27
31
  """
@@ -34,15 +38,15 @@ class TeradataOperator(SQLExecuteQueryOperator):
34
38
  :ref:`howto/operator:TeradataOperator`
35
39
 
36
40
  :param sql: the SQL query to be executed as a single string, or a list of str (sql statements)
37
- :param conn_id: reference to a predefined database
41
+ :param teradata_conn_id: reference to a predefined database
38
42
  :param autocommit: if True, each command is automatically committed.(default value: False)
39
43
  :param parameters: (optional) the parameters to render the SQL query with.
40
44
  :param schema: The Teradata database to connect to.
41
45
  """
42
46
 
43
47
  template_fields: Sequence[str] = (
44
- "parameters",
45
48
  "sql",
49
+ "parameters",
46
50
  )
47
51
  template_ext: Sequence[str] = (".sql",)
48
52
  template_fields_renderers = {"sql": "sql"}
@@ -50,7 +54,7 @@ class TeradataOperator(SQLExecuteQueryOperator):
50
54
 
51
55
  def __init__(
52
56
  self,
53
- conn_id: str = TeradataHook.default_conn_name,
57
+ teradata_conn_id: str = TeradataHook.default_conn_name,
54
58
  schema: str | None = None,
55
59
  **kwargs,
56
60
  ) -> None:
@@ -61,4 +65,39 @@ class TeradataOperator(SQLExecuteQueryOperator):
61
65
  **hook_params,
62
66
  }
63
67
  super().__init__(**kwargs)
64
- self.conn_id = conn_id
68
+ self.conn_id = teradata_conn_id
69
+
70
+
71
+ class TeradataStoredProcedureOperator(BaseOperator):
72
+ """
73
+ Executes stored procedure in a specific Teradata database.
74
+
75
+ :param procedure: name of stored procedure to call (templated)
76
+ :param teradata_conn_id: The :ref:`Teradata connection id <howto/connection:teradata>`
77
+ reference to a specific Teradata database.
78
+ :param parameters: (optional, templated) the parameters provided in the call
79
+
80
+ """
81
+
82
+ template_fields: Sequence[str] = (
83
+ "procedure",
84
+ "parameters",
85
+ )
86
+ ui_color = "#ededed"
87
+
88
+ def __init__(
89
+ self,
90
+ *,
91
+ procedure: str,
92
+ teradata_conn_id: str = TeradataHook.default_conn_name,
93
+ parameters: dict | list | None = None,
94
+ **kwargs,
95
+ ) -> None:
96
+ super().__init__(**kwargs)
97
+ self.teradata_conn_id = teradata_conn_id
98
+ self.procedure = procedure
99
+ self.parameters = parameters
100
+
101
+ def execute(self, context: Context):
102
+ hook = TeradataHook(teradata_conn_id=self.teradata_conn_id)
103
+ return hook.callproc(self.procedure, autocommit=True, parameters=self.parameters)
@@ -0,0 +1,103 @@
1
+ #
2
+ # Licensed to the Apache Software Foundation (ASF) under one
3
+ # or more contributor license agreements. See the NOTICE file
4
+ # distributed with this work for additional information
5
+ # regarding copyright ownership. The ASF licenses this file
6
+ # to you under the Apache License, Version 2.0 (the
7
+ # "License"); you may not use this file except in compliance
8
+ # with the License. You may obtain a copy of the License at
9
+ #
10
+ # http://www.apache.org/licenses/LICENSE-2.0
11
+ #
12
+ # Unless required by applicable law or agreed to in writing,
13
+ # software distributed under the License is distributed on an
14
+ # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
15
+ # KIND, either express or implied. See the License for the
16
+ # specific language governing permissions and limitations
17
+ # under the License.
18
+ from __future__ import annotations
19
+
20
+ from textwrap import dedent
21
+ from typing import TYPE_CHECKING, Sequence
22
+
23
+ from airflow.models import BaseOperator
24
+
25
+ try:
26
+ from airflow.providers.microsoft.azure.hooks.wasb import WasbHook
27
+ except ModuleNotFoundError as e:
28
+ from airflow.exceptions import AirflowOptionalProviderFeatureException
29
+
30
+ raise AirflowOptionalProviderFeatureException(e)
31
+
32
+ from airflow.providers.teradata.hooks.teradata import TeradataHook
33
+
34
+ if TYPE_CHECKING:
35
+ from airflow.utils.context import Context
36
+
37
+
38
+ class AzureBlobStorageToTeradataOperator(BaseOperator):
39
+ """
40
+
41
+ Loads CSV, JSON and Parquet format data from Azure Blob Storage to Teradata.
42
+
43
+ .. seealso::
44
+ For more information on how to use this operator, take a look at the guide:
45
+ :ref:`howto/operator:AzureBlobStorageToTeradataOperator`
46
+
47
+ :param blob_source_key: The URI format specifying the location of the Azure blob object store.(templated)
48
+ The URI format is `/az/YOUR-STORAGE-ACCOUNT.blob.core.windows.net/YOUR-CONTAINER/YOUR-BLOB-LOCATION`.
49
+ Refer to
50
+ https://docs.teradata.com/search/documents?query=native+object+store&sort=last_update&virtual-field=title_only&content-lang=en-US
51
+ :param azure_conn_id: The Airflow WASB connection used for azure blob credentials.
52
+ :param teradata_table: The name of the teradata table to which the data is transferred.(templated)
53
+ :param teradata_conn_id: The connection ID used to connect to Teradata
54
+ :ref:`Teradata connection <howto/connection:Teradata>`
55
+
56
+ Note that ``blob_source_key`` and ``teradata_table`` are
57
+ templated, so you can use variables in them if you wish.
58
+ """
59
+
60
+ template_fields: Sequence[str] = ("blob_source_key", "teradata_table")
61
+ ui_color = "#e07c24"
62
+
63
+ def __init__(
64
+ self,
65
+ *,
66
+ blob_source_key: str,
67
+ azure_conn_id: str = "azure_default",
68
+ teradata_table: str,
69
+ teradata_conn_id: str = "teradata_default",
70
+ **kwargs,
71
+ ) -> None:
72
+ super().__init__(**kwargs)
73
+ self.blob_source_key = blob_source_key
74
+ self.azure_conn_id = azure_conn_id
75
+ self.teradata_table = teradata_table
76
+ self.teradata_conn_id = teradata_conn_id
77
+
78
+ def execute(self, context: Context) -> None:
79
+ self.log.info(
80
+ "transferring data from %s to teradata table %s...", self.blob_source_key, self.teradata_table
81
+ )
82
+ azure_hook = WasbHook(wasb_conn_id=self.azure_conn_id)
83
+ conn = azure_hook.get_connection(self.azure_conn_id)
84
+ # Obtaining the Azure client ID and Azure secret in order to access a specified Blob container
85
+ access_id = conn.login if conn.login is not None else ""
86
+ access_secret = conn.password if conn.password is not None else ""
87
+ teradata_hook = TeradataHook(teradata_conn_id=self.teradata_conn_id)
88
+ sql = dedent(f"""
89
+ CREATE MULTISET TABLE {self.teradata_table} AS
90
+ (
91
+ SELECT * FROM (
92
+ LOCATION = '{self.blob_source_key}'
93
+ ACCESS_ID= '{access_id}'
94
+ ACCESS_KEY= '{access_secret}'
95
+ ) AS d
96
+ ) WITH DATA
97
+ """).rstrip()
98
+ try:
99
+ teradata_hook.run(sql, True)
100
+ except Exception as ex:
101
+ self.log.error(str(ex))
102
+ raise
103
+ self.log.info("The transfer of data from Azure Blob to Teradata was successful")
@@ -0,0 +1,109 @@
1
+ #
2
+ # Licensed to the Apache Software Foundation (ASF) under one
3
+ # or more contributor license agreements. See the NOTICE file
4
+ # distributed with this work for additional information
5
+ # regarding copyright ownership. The ASF licenses this file
6
+ # to you under the Apache License, Version 2.0 (the
7
+ # "License"); you may not use this file except in compliance
8
+ # with the License. You may obtain a copy of the License at
9
+ #
10
+ # http://www.apache.org/licenses/LICENSE-2.0
11
+ #
12
+ # Unless required by applicable law or agreed to in writing,
13
+ # software distributed under the License is distributed on an
14
+ # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
15
+ # KIND, either express or implied. See the License for the
16
+ # specific language governing permissions and limitations
17
+ # under the License.
18
+ from __future__ import annotations
19
+
20
+ from textwrap import dedent
21
+ from typing import TYPE_CHECKING, Sequence
22
+
23
+ from airflow.models import BaseOperator
24
+
25
+ try:
26
+ from airflow.providers.amazon.aws.hooks.s3 import S3Hook
27
+ except ModuleNotFoundError as e:
28
+ from airflow.exceptions import AirflowOptionalProviderFeatureException
29
+
30
+ raise AirflowOptionalProviderFeatureException(e)
31
+ from airflow.providers.teradata.hooks.teradata import TeradataHook
32
+
33
+ if TYPE_CHECKING:
34
+ from airflow.utils.context import Context
35
+
36
+
37
+ class S3ToTeradataOperator(BaseOperator):
38
+ """
39
+ Loads CSV, JSON and Parquet format data from Amazon S3 to Teradata.
40
+
41
+ .. seealso::
42
+ For more information on how to use this operator, take a look at the guide:
43
+ :ref:`howto/operator:S3ToTeradataOperator`
44
+
45
+ :param s3_source_key: The URI format specifying the location of the S3 bucket.(templated)
46
+ The URI format is /s3/YOUR-BUCKET.s3.amazonaws.com/YOUR-BUCKET-NAME.
47
+ Refer to
48
+ https://docs.teradata.com/search/documents?query=native+object+store&sort=last_update&virtual-field=title_only&content-lang=en-US
49
+ :param public_bucket: Specifies whether the provided S3 bucket is public. If the bucket is public,
50
+ it means that anyone can access the objects within it via a URL without requiring authentication.
51
+ If the bucket is private and authentication is not provided, the operator will throw an exception.
52
+ :param teradata_table: The name of the teradata table to which the data is transferred.(templated)
53
+ :param aws_conn_id: The Airflow AWS connection used for AWS credentials.
54
+ :param teradata_conn_id: The connection ID used to connect to Teradata
55
+ :ref:`Teradata connection <howto/connection:Teradata>`.
56
+
57
+ Note that ``s3_source_key`` and ``teradata_table`` are
58
+ templated, so you can use variables in them if you wish.
59
+ """
60
+
61
+ template_fields: Sequence[str] = ("s3_source_key", "teradata_table")
62
+ ui_color = "#e07c24"
63
+
64
+ def __init__(
65
+ self,
66
+ *,
67
+ s3_source_key: str,
68
+ public_bucket: bool = False,
69
+ teradata_table: str,
70
+ aws_conn_id: str = "aws_default",
71
+ teradata_conn_id: str = "teradata_default",
72
+ **kwargs,
73
+ ) -> None:
74
+ super().__init__(**kwargs)
75
+ self.s3_source_key = s3_source_key
76
+ self.public_bucket = public_bucket
77
+ self.teradata_table = teradata_table
78
+ self.aws_conn_id = aws_conn_id
79
+ self.teradata_conn_id = teradata_conn_id
80
+
81
+ def execute(self, context: Context) -> None:
82
+ self.log.info(
83
+ "transferring data from %s to teradata table %s...", self.s3_source_key, self.teradata_table
84
+ )
85
+
86
+ s3_hook = S3Hook(aws_conn_id=self.aws_conn_id)
87
+ access_key = ""
88
+ access_secret = ""
89
+ if not self.public_bucket:
90
+ credentials = s3_hook.get_credentials()
91
+ access_key = credentials.access_key
92
+ access_secret = credentials.secret_key
93
+ teradata_hook = TeradataHook(teradata_conn_id=self.teradata_conn_id)
94
+ sql = dedent(f"""
95
+ CREATE MULTISET TABLE {self.teradata_table} AS
96
+ (
97
+ SELECT * FROM (
98
+ LOCATION = '{self.s3_source_key}'
99
+ ACCESS_ID= '{access_key}'
100
+ ACCESS_KEY= '{access_secret}'
101
+ ) AS d
102
+ ) WITH DATA
103
+ """).rstrip()
104
+ try:
105
+ teradata_hook.run(sql, True)
106
+ except Exception as ex:
107
+ self.log.error(str(ex))
108
+ raise
109
+ self.log.info("The transfer of data from S3 to Teradata was successful")
@@ -28,7 +28,7 @@ build-backend = "flit_core.buildapi"
28
28
 
29
29
  [project]
30
30
  name = "apache-airflow-providers-teradata"
31
- version = "2.1.1.rc1"
31
+ version = "2.2.0.rc1"
32
32
  description = "Provider package apache-airflow-providers-teradata for Apache Airflow"
33
33
  readme = "README.rst"
34
34
  authors = [
@@ -63,8 +63,8 @@ dependencies = [
63
63
  ]
64
64
 
65
65
  [project.urls]
66
- "Documentation" = "https://airflow.apache.org/docs/apache-airflow-providers-teradata/2.1.1"
67
- "Changelog" = "https://airflow.apache.org/docs/apache-airflow-providers-teradata/2.1.1/changelog.html"
66
+ "Documentation" = "https://airflow.apache.org/docs/apache-airflow-providers-teradata/2.2.0"
67
+ "Changelog" = "https://airflow.apache.org/docs/apache-airflow-providers-teradata/2.2.0/changelog.html"
68
68
  "Bug Tracker" = "https://github.com/apache/airflow/issues"
69
69
  "Source Code" = "https://github.com/apache/airflow"
70
70
  "Slack Chat" = "https://s.apache.org/airflow-slack"
@@ -74,9 +74,15 @@ dependencies = [
74
74
  [project.entry-points."apache_airflow_provider"]
75
75
  provider_info = "airflow.providers.teradata.get_provider_info:get_provider_info"
76
76
  [project.optional-dependencies]
77
+ "amazon" = [
78
+ "apache-airflow-providers-amazon",
79
+ ]
77
80
  "common.sql" = [
78
81
  "apache-airflow-providers-common-sql",
79
82
  ]
83
+ "microsoft.azure" = [
84
+ "apache-airflow-providers-microsoft-azure",
85
+ ]
80
86
 
81
87
  [tool.flit.module]
82
88
  name = "airflow.providers.teradata"