apache-airflow-providers-teradata 1.0.11__py3-none-any.whl → 2.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of apache-airflow-providers-teradata might be problematic. Click here for more details.
- airflow/providers/teradata/LICENSE +253 -0
- airflow/providers/teradata/__init__.py +26 -1
- airflow/providers/teradata/get_provider_info.py +43 -24
- airflow/providers/teradata/hooks/teradata.py +202 -0
- airflow/providers/teradata/operators/teradata.py +64 -0
- airflow/providers/teradata/transfers/teradata_to_teradata.py +101 -0
- apache_airflow_providers_teradata-2.0.0.dist-info/METADATA +137 -0
- apache_airflow_providers_teradata-2.0.0.dist-info/RECORD +13 -0
- {apache_airflow_providers_teradata-1.0.11.dist-info → apache_airflow_providers_teradata-2.0.0.dist-info}/WHEEL +1 -2
- apache_airflow_providers_teradata-2.0.0.dist-info/entry_points.txt +3 -0
- airflow/providers/teradata/example_dags/example_execute_bteq.py +0 -51
- airflow/providers/teradata/example_dags/example_export_csv.py +0 -48
- airflow/providers/teradata/example_dags/example_load_csv.py +0 -69
- airflow/providers/teradata/hooks/ttu.py +0 -394
- airflow/providers/teradata/operators/bteq.py +0 -70
- airflow/providers/teradata/operators/fastexport.py +0 -77
- airflow/providers/teradata/operators/fastload.py +0 -105
- apache_airflow_providers_teradata-1.0.11.dist-info/AUTHORS.rst +0 -13
- apache_airflow_providers_teradata-1.0.11.dist-info/LICENSE +0 -22
- apache_airflow_providers_teradata-1.0.11.dist-info/METADATA +0 -53
- apache_airflow_providers_teradata-1.0.11.dist-info/RECORD +0 -19
- apache_airflow_providers_teradata-1.0.11.dist-info/entry_points.txt +0 -3
- apache_airflow_providers_teradata-1.0.11.dist-info/top_level.txt +0 -1
- /airflow/providers/teradata/{example_dags → transfers}/__init__.py +0 -0
|
@@ -0,0 +1,101 @@
|
|
|
1
|
+
#
|
|
2
|
+
# Licensed to the Apache Software Foundation (ASF) under one
|
|
3
|
+
# or more contributor license agreements. See the NOTICE file
|
|
4
|
+
# distributed with this work for additional information
|
|
5
|
+
# regarding copyright ownership. The ASF licenses this file
|
|
6
|
+
# to you under the Apache License, Version 2.0 (the
|
|
7
|
+
# "License"); you may not use this file except in compliance
|
|
8
|
+
# with the License. You may obtain a copy of the License at
|
|
9
|
+
#
|
|
10
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
11
|
+
#
|
|
12
|
+
# Unless required by applicable law or agreed to in writing,
|
|
13
|
+
# software distributed under the License is distributed on an
|
|
14
|
+
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
|
15
|
+
# KIND, either express or implied. See the License for the
|
|
16
|
+
# specific language governing permissions and limitations
|
|
17
|
+
# under the License.
|
|
18
|
+
from __future__ import annotations
|
|
19
|
+
|
|
20
|
+
from functools import cached_property
|
|
21
|
+
from typing import TYPE_CHECKING, Sequence
|
|
22
|
+
|
|
23
|
+
from airflow.models import BaseOperator
|
|
24
|
+
from airflow.providers.teradata.hooks.teradata import TeradataHook
|
|
25
|
+
|
|
26
|
+
if TYPE_CHECKING:
|
|
27
|
+
from airflow.utils.context import Context
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
class TeradataToTeradataOperator(BaseOperator):
|
|
31
|
+
"""
|
|
32
|
+
Moves data from Teradata source database to Teradata destination database.
|
|
33
|
+
|
|
34
|
+
.. seealso::
|
|
35
|
+
For more information on how to use this operator, take a look at the guide:
|
|
36
|
+
:ref:`howto/operator:TeradataToTeradataOperator`
|
|
37
|
+
|
|
38
|
+
:param dest_teradata_conn_id: destination Teradata connection.
|
|
39
|
+
:param destination_table: destination table to insert rows.
|
|
40
|
+
:param source_teradata_conn_id: :ref:`Source Teradata connection <howto/connection:Teradata>`.
|
|
41
|
+
:param sql: SQL query to execute against the source Teradata database
|
|
42
|
+
:param sql_params: Parameters to use in sql query.
|
|
43
|
+
:param rows_chunk: number of rows per chunk to commit.
|
|
44
|
+
"""
|
|
45
|
+
|
|
46
|
+
template_fields: Sequence[str] = (
|
|
47
|
+
"sql",
|
|
48
|
+
"sql_params",
|
|
49
|
+
)
|
|
50
|
+
template_ext: Sequence[str] = (".sql",)
|
|
51
|
+
template_fields_renderers = {"sql": "sql", "sql_params": "py"}
|
|
52
|
+
ui_color = "#e07c24"
|
|
53
|
+
|
|
54
|
+
def __init__(
|
|
55
|
+
self,
|
|
56
|
+
*,
|
|
57
|
+
dest_teradata_conn_id: str,
|
|
58
|
+
destination_table: str,
|
|
59
|
+
source_teradata_conn_id: str,
|
|
60
|
+
sql: str,
|
|
61
|
+
sql_params: dict | None = None,
|
|
62
|
+
rows_chunk: int = 5000,
|
|
63
|
+
**kwargs,
|
|
64
|
+
) -> None:
|
|
65
|
+
super().__init__(**kwargs)
|
|
66
|
+
if sql_params is None:
|
|
67
|
+
sql_params = {}
|
|
68
|
+
self.dest_teradata_conn_id = dest_teradata_conn_id
|
|
69
|
+
self.destination_table = destination_table
|
|
70
|
+
self.source_teradata_conn_id = source_teradata_conn_id
|
|
71
|
+
self.sql = sql
|
|
72
|
+
self.sql_params = sql_params
|
|
73
|
+
self.rows_chunk = rows_chunk
|
|
74
|
+
|
|
75
|
+
@cached_property
|
|
76
|
+
def src_hook(self) -> TeradataHook:
|
|
77
|
+
return TeradataHook(teradata_conn_id=self.source_teradata_conn_id)
|
|
78
|
+
|
|
79
|
+
@cached_property
|
|
80
|
+
def dest_hook(self) -> TeradataHook:
|
|
81
|
+
return TeradataHook(teradata_conn_id=self.dest_teradata_conn_id)
|
|
82
|
+
|
|
83
|
+
def execute(self, context: Context) -> None:
|
|
84
|
+
src_hook = self.src_hook
|
|
85
|
+
dest_hook = self.dest_hook
|
|
86
|
+
with src_hook.get_conn() as src_conn:
|
|
87
|
+
cursor = src_conn.cursor()
|
|
88
|
+
cursor.execute(self.sql, self.sql_params)
|
|
89
|
+
target_fields = [field[0] for field in cursor.description]
|
|
90
|
+
rows_total = 0
|
|
91
|
+
if len(target_fields) != 0:
|
|
92
|
+
for rows in iter(lambda: cursor.fetchmany(self.rows_chunk), []):
|
|
93
|
+
dest_hook.bulk_insert_rows(
|
|
94
|
+
self.destination_table,
|
|
95
|
+
rows,
|
|
96
|
+
target_fields=target_fields,
|
|
97
|
+
commit_every=self.rows_chunk,
|
|
98
|
+
)
|
|
99
|
+
rows_total += len(rows)
|
|
100
|
+
self.log.info("Finished data transfer. Total number of rows transferred - %s", rows_total)
|
|
101
|
+
cursor.close()
|
|
@@ -0,0 +1,137 @@
|
|
|
1
|
+
Metadata-Version: 2.1
|
|
2
|
+
Name: apache-airflow-providers-teradata
|
|
3
|
+
Version: 2.0.0
|
|
4
|
+
Summary: Provider package apache-airflow-providers-teradata for Apache Airflow
|
|
5
|
+
Keywords: airflow-provider,teradata,airflow,integration
|
|
6
|
+
Author-email: Apache Software Foundation <dev@airflow.apache.org>
|
|
7
|
+
Maintainer-email: Apache Software Foundation <dev@airflow.apache.org>
|
|
8
|
+
Requires-Python: ~=3.8
|
|
9
|
+
Description-Content-Type: text/x-rst
|
|
10
|
+
Classifier: Development Status :: 5 - Production/Stable
|
|
11
|
+
Classifier: Environment :: Console
|
|
12
|
+
Classifier: Environment :: Web Environment
|
|
13
|
+
Classifier: Intended Audience :: Developers
|
|
14
|
+
Classifier: Intended Audience :: System Administrators
|
|
15
|
+
Classifier: Framework :: Apache Airflow
|
|
16
|
+
Classifier: Framework :: Apache Airflow :: Provider
|
|
17
|
+
Classifier: License :: OSI Approved :: Apache Software License
|
|
18
|
+
Classifier: Programming Language :: Python :: 3.8
|
|
19
|
+
Classifier: Programming Language :: Python :: 3.9
|
|
20
|
+
Classifier: Programming Language :: Python :: 3.10
|
|
21
|
+
Classifier: Programming Language :: Python :: 3.11
|
|
22
|
+
Classifier: Topic :: System :: Monitoring
|
|
23
|
+
Requires-Dist: apache-airflow-providers-common-sql>=1.3.1
|
|
24
|
+
Requires-Dist: apache-airflow>=2.6.0
|
|
25
|
+
Requires-Dist: teradatasql>=17.20.0.28
|
|
26
|
+
Requires-Dist: teradatasqlalchemy>=17.20.0.0
|
|
27
|
+
Requires-Dist: apache-airflow-providers-common-sql ; extra == "common.sql"
|
|
28
|
+
Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
|
|
29
|
+
Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-teradata/2.0.0/changelog.html
|
|
30
|
+
Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-teradata/2.0.0
|
|
31
|
+
Project-URL: Slack Chat, https://s.apache.org/airflow-slack
|
|
32
|
+
Project-URL: Source Code, https://github.com/apache/airflow
|
|
33
|
+
Project-URL: Twitter, https://twitter.com/ApacheAirflow
|
|
34
|
+
Project-URL: YouTube, https://www.youtube.com/channel/UCSXwxpWZQ7XZ1WL3wqevChA/
|
|
35
|
+
Provides-Extra: common.sql
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
.. Licensed to the Apache Software Foundation (ASF) under one
|
|
39
|
+
or more contributor license agreements. See the NOTICE file
|
|
40
|
+
distributed with this work for additional information
|
|
41
|
+
regarding copyright ownership. The ASF licenses this file
|
|
42
|
+
to you under the Apache License, Version 2.0 (the
|
|
43
|
+
"License"); you may not use this file except in compliance
|
|
44
|
+
with the License. You may obtain a copy of the License at
|
|
45
|
+
|
|
46
|
+
.. http://www.apache.org/licenses/LICENSE-2.0
|
|
47
|
+
|
|
48
|
+
.. Unless required by applicable law or agreed to in writing,
|
|
49
|
+
software distributed under the License is distributed on an
|
|
50
|
+
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
|
51
|
+
KIND, either express or implied. See the License for the
|
|
52
|
+
specific language governing permissions and limitations
|
|
53
|
+
under the License.
|
|
54
|
+
|
|
55
|
+
.. Licensed to the Apache Software Foundation (ASF) under one
|
|
56
|
+
or more contributor license agreements. See the NOTICE file
|
|
57
|
+
distributed with this work for additional information
|
|
58
|
+
regarding copyright ownership. The ASF licenses this file
|
|
59
|
+
to you under the Apache License, Version 2.0 (the
|
|
60
|
+
"License"); you may not use this file except in compliance
|
|
61
|
+
with the License. You may obtain a copy of the License at
|
|
62
|
+
|
|
63
|
+
.. http://www.apache.org/licenses/LICENSE-2.0
|
|
64
|
+
|
|
65
|
+
.. Unless required by applicable law or agreed to in writing,
|
|
66
|
+
software distributed under the License is distributed on an
|
|
67
|
+
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
|
68
|
+
KIND, either express or implied. See the License for the
|
|
69
|
+
specific language governing permissions and limitations
|
|
70
|
+
under the License.
|
|
71
|
+
|
|
72
|
+
.. NOTE! THIS FILE IS AUTOMATICALLY GENERATED AND WILL BE
|
|
73
|
+
OVERWRITTEN WHEN PREPARING PACKAGES.
|
|
74
|
+
|
|
75
|
+
.. IF YOU WANT TO MODIFY TEMPLATE FOR THIS FILE, YOU SHOULD MODIFY THE TEMPLATE
|
|
76
|
+
`PROVIDER_README_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY
|
|
77
|
+
|
|
78
|
+
|
|
79
|
+
Package ``apache-airflow-providers-teradata``
|
|
80
|
+
|
|
81
|
+
Release: ``2.0.0``
|
|
82
|
+
|
|
83
|
+
|
|
84
|
+
`Teradata <https://www.teradata.com/>`__
|
|
85
|
+
|
|
86
|
+
|
|
87
|
+
Provider package
|
|
88
|
+
----------------
|
|
89
|
+
|
|
90
|
+
This is a provider package for ``teradata`` provider. All classes for this provider package
|
|
91
|
+
are in ``airflow.providers.teradata`` python package.
|
|
92
|
+
|
|
93
|
+
You can find package information and changelog for the provider
|
|
94
|
+
in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-teradata/2.0.0/>`_.
|
|
95
|
+
|
|
96
|
+
Installation
|
|
97
|
+
------------
|
|
98
|
+
|
|
99
|
+
You can install this package on top of an existing Airflow 2 installation (see ``Requirements`` below
|
|
100
|
+
for the minimum Airflow version supported) via
|
|
101
|
+
``pip install apache-airflow-providers-teradata``
|
|
102
|
+
|
|
103
|
+
The package supports the following python versions: 3.8,3.9,3.10,3.11
|
|
104
|
+
|
|
105
|
+
Requirements
|
|
106
|
+
------------
|
|
107
|
+
|
|
108
|
+
======================================= ==================
|
|
109
|
+
PIP package Version required
|
|
110
|
+
======================================= ==================
|
|
111
|
+
``apache-airflow`` ``>=2.6.0``
|
|
112
|
+
``apache-airflow-providers-common-sql`` ``>=1.3.1``
|
|
113
|
+
``teradatasqlalchemy`` ``>=17.20.0.0``
|
|
114
|
+
``teradatasql`` ``>=17.20.0.28``
|
|
115
|
+
======================================= ==================
|
|
116
|
+
|
|
117
|
+
Cross provider package dependencies
|
|
118
|
+
-----------------------------------
|
|
119
|
+
|
|
120
|
+
Those are dependencies that might be needed in order to use all the features of the package.
|
|
121
|
+
You need to install the specified provider packages in order to use them.
|
|
122
|
+
|
|
123
|
+
You can install such cross-provider dependencies when installing from PyPI. For example:
|
|
124
|
+
|
|
125
|
+
.. code-block:: bash
|
|
126
|
+
|
|
127
|
+
pip install apache-airflow-providers-teradata[common.sql]
|
|
128
|
+
|
|
129
|
+
|
|
130
|
+
============================================================================================================ ==============
|
|
131
|
+
Dependent package Extra
|
|
132
|
+
============================================================================================================ ==============
|
|
133
|
+
`apache-airflow-providers-common-sql <https://airflow.apache.org/docs/apache-airflow-providers-common-sql>`_ ``common.sql``
|
|
134
|
+
============================================================================================================ ==============
|
|
135
|
+
|
|
136
|
+
The changelog for the provider package can be found in the
|
|
137
|
+
`changelog <https://airflow.apache.org/docs/apache-airflow-providers-teradata/2.0.0/changelog.html>`_.
|
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
airflow/providers/teradata/LICENSE,sha256=ywUBpKZc7Jb96rVt5I3IDbg7dIJAbUSHkuoDcF3jbH4,13569
|
|
2
|
+
airflow/providers/teradata/__init__.py,sha256=RV5q0cL-SrYdknuU-pF-iEKaPMaVq_IeH3_tcjc6o60,1583
|
|
3
|
+
airflow/providers/teradata/get_provider_info.py,sha256=qOUPkhJIwdFfTsm926Ub8yacOBDIJ-lWWZ6NG2H6vBM,2872
|
|
4
|
+
airflow/providers/teradata/hooks/__init__.py,sha256=mlJxuZLkd5x-iq2SBwD3mvRQpt3YR7wjz_nceyF1IaI,787
|
|
5
|
+
airflow/providers/teradata/hooks/teradata.py,sha256=LTV18WhtmXbc9pgKlhuLdICbSArVrTBVaO3O8uW3GlQ,8094
|
|
6
|
+
airflow/providers/teradata/operators/__init__.py,sha256=mlJxuZLkd5x-iq2SBwD3mvRQpt3YR7wjz_nceyF1IaI,787
|
|
7
|
+
airflow/providers/teradata/operators/teradata.py,sha256=UqEH6bn0e9NJgjBVVXgM-lQj-50MigBeJyWlbJ-1WG8,2373
|
|
8
|
+
airflow/providers/teradata/transfers/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
|
9
|
+
airflow/providers/teradata/transfers/teradata_to_teradata.py,sha256=WketoakWYtYdMn7W3hR55otQJqQSKB_dKHTtYLc4mco,3834
|
|
10
|
+
apache_airflow_providers_teradata-2.0.0.dist-info/entry_points.txt,sha256=JbigXoUoKVSNWG-_-029FCCuehMOmAvuSnNGZ9Bz1Kc,104
|
|
11
|
+
apache_airflow_providers_teradata-2.0.0.dist-info/WHEEL,sha256=EZbGkh7Ie4PoZfRQ8I0ZuP9VklN_TvcZ6DSE5Uar4z4,81
|
|
12
|
+
apache_airflow_providers_teradata-2.0.0.dist-info/METADATA,sha256=5c6TrSER-wMo6RYKdjEtmRPHssCcFACf8Qq0O0sInCU,6191
|
|
13
|
+
apache_airflow_providers_teradata-2.0.0.dist-info/RECORD,,
|
|
@@ -1,51 +0,0 @@
|
|
|
1
|
-
#
|
|
2
|
-
# Licensed to the Apache Software Foundation (ASF) under one
|
|
3
|
-
# or more contributor license agreements. See the NOTICE file
|
|
4
|
-
# distributed with this work for additional information
|
|
5
|
-
# regarding copyright ownership. The ASF licenses this file
|
|
6
|
-
# to you under the Apache License, Version 2.0 (the
|
|
7
|
-
# "License"); you may not use this file except in compliance
|
|
8
|
-
# with the License. You may obtain a copy of the License at
|
|
9
|
-
#
|
|
10
|
-
# http://www.apache.org/licenses/LICENSE-2.0
|
|
11
|
-
#
|
|
12
|
-
# Unless required by applicable law or agreed to in writing,
|
|
13
|
-
# software distributed under the License is distributed on an
|
|
14
|
-
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
|
15
|
-
# KIND, either express or implied. See the License for the
|
|
16
|
-
# specific language governing permissions and limitations
|
|
17
|
-
# under the License.
|
|
18
|
-
"""
|
|
19
|
-
This is an example dag that runs a BTEQ sentence to teradata
|
|
20
|
-
"""
|
|
21
|
-
from datetime import timedelta
|
|
22
|
-
|
|
23
|
-
from airflow import DAG
|
|
24
|
-
from airflow.providers.teradata.operators.bteq import BteqOperator
|
|
25
|
-
from airflow.utils.dates import days_ago
|
|
26
|
-
|
|
27
|
-
DEFAULT_ARGS = {
|
|
28
|
-
'owner': 'airflow',
|
|
29
|
-
'depends_on_past': False,
|
|
30
|
-
'email': ['airflow@example.com'],
|
|
31
|
-
'email_on_failure': False,
|
|
32
|
-
'email_on_retry': False,
|
|
33
|
-
}
|
|
34
|
-
|
|
35
|
-
with DAG(
|
|
36
|
-
dag_id='example_execute_bteq',
|
|
37
|
-
default_args=DEFAULT_ARGS,
|
|
38
|
-
dagrun_timeout=timedelta(hours=2),
|
|
39
|
-
schedule_interval=None,
|
|
40
|
-
start_date=days_ago(2),
|
|
41
|
-
tags=['example'],
|
|
42
|
-
) as dag:
|
|
43
|
-
task_execute_bteq = BteqOperator(
|
|
44
|
-
task_id='task_execute_bteq',
|
|
45
|
-
bteq='''
|
|
46
|
-
SELECT CURRENT_DATE;
|
|
47
|
-
.IF ERRORCODE <> 0 THEN .QUIT 0301;
|
|
48
|
-
.QUIT 0;
|
|
49
|
-
''',
|
|
50
|
-
|
|
51
|
-
)
|
|
@@ -1,48 +0,0 @@
|
|
|
1
|
-
#
|
|
2
|
-
# Licensed to the Apache Software Foundation (ASF) under one
|
|
3
|
-
# or more contributor license agreements. See the NOTICE file
|
|
4
|
-
# distributed with this work for additional information
|
|
5
|
-
# regarding copyright ownership. The ASF licenses this file
|
|
6
|
-
# to you under the Apache License, Version 2.0 (the
|
|
7
|
-
# "License"); you may not use this file except in compliance
|
|
8
|
-
# with the License. You may obtain a copy of the License at
|
|
9
|
-
#
|
|
10
|
-
# http://www.apache.org/licenses/LICENSE-2.0
|
|
11
|
-
#
|
|
12
|
-
# Unless required by applicable law or agreed to in writing,
|
|
13
|
-
# software distributed under the License is distributed on an
|
|
14
|
-
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
|
15
|
-
# KIND, either express or implied. See the License for the
|
|
16
|
-
# specific language governing permissions and limitations
|
|
17
|
-
# under the License.
|
|
18
|
-
"""
|
|
19
|
-
This is an example dag that export select queries from Teradata Database to CSV file (without header)
|
|
20
|
-
"""
|
|
21
|
-
from datetime import timedelta
|
|
22
|
-
|
|
23
|
-
from airflow import DAG
|
|
24
|
-
from airflow.providers.teradata.operators.fastexport import FastExportOperator
|
|
25
|
-
from airflow.utils.dates import days_ago
|
|
26
|
-
|
|
27
|
-
DEFAULT_ARGS = {
|
|
28
|
-
'owner': 'airflow',
|
|
29
|
-
'depends_on_past': False,
|
|
30
|
-
'email': ['airflow@example.com'],
|
|
31
|
-
'email_on_failure': False,
|
|
32
|
-
'email_on_retry': False,
|
|
33
|
-
}
|
|
34
|
-
|
|
35
|
-
with DAG(
|
|
36
|
-
dag_id='example_export_csv_to_teradata',
|
|
37
|
-
default_args=DEFAULT_ARGS,
|
|
38
|
-
dagrun_timeout=timedelta(hours=2),
|
|
39
|
-
schedule_interval=None,
|
|
40
|
-
start_date=days_ago(2),
|
|
41
|
-
tags=['example'],
|
|
42
|
-
) as dag:
|
|
43
|
-
export_csv_to_teradata = FastExportOperator(
|
|
44
|
-
task_id='export_csv_to_teradata',
|
|
45
|
-
ttu_conn_id='ttu_default',
|
|
46
|
-
sql_select_stmt='''SEL UserName, CreateTimeStamp, SpoolSpace FROM dbc.Users''',
|
|
47
|
-
output_file='dummy_import.csv',
|
|
48
|
-
)
|
|
@@ -1,69 +0,0 @@
|
|
|
1
|
-
#
|
|
2
|
-
# Licensed to the Apache Software Foundation (ASF) under one
|
|
3
|
-
# or more contributor license agreements. See the NOTICE file
|
|
4
|
-
# distributed with this work for additional information
|
|
5
|
-
# regarding copyright ownership. The ASF licenses this file
|
|
6
|
-
# to you under the Apache License, Version 2.0 (the
|
|
7
|
-
# "License"); you may not use this file except in compliance
|
|
8
|
-
# with the License. You may obtain a copy of the License at
|
|
9
|
-
#
|
|
10
|
-
# http://www.apache.org/licenses/LICENSE-2.0
|
|
11
|
-
#
|
|
12
|
-
# Unless required by applicable law or agreed to in writing,
|
|
13
|
-
# software distributed under the License is distributed on an
|
|
14
|
-
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
|
15
|
-
# KIND, either express or implied. See the License for the
|
|
16
|
-
# specific language governing permissions and limitations
|
|
17
|
-
# under the License.
|
|
18
|
-
"""
|
|
19
|
-
This is an example dag that loads a CSV file without header to Teradata Database
|
|
20
|
-
"""
|
|
21
|
-
from datetime import timedelta
|
|
22
|
-
|
|
23
|
-
from airflow import DAG
|
|
24
|
-
from airflow.providers.teradata.operators.fastload import FastLoadOperator
|
|
25
|
-
from airflow.utils.dates import days_ago
|
|
26
|
-
|
|
27
|
-
DEFAULT_ARGS = {
|
|
28
|
-
'owner': 'airflow',
|
|
29
|
-
'depends_on_past': False,
|
|
30
|
-
'email': ['airflow@example.com'],
|
|
31
|
-
'email_on_failure': False,
|
|
32
|
-
'email_on_retry': False,
|
|
33
|
-
}
|
|
34
|
-
|
|
35
|
-
with DAG(
|
|
36
|
-
dag_id='example_load_csv_to_teradata',
|
|
37
|
-
default_args=DEFAULT_ARGS,
|
|
38
|
-
dagrun_timeout=timedelta(hours=2),
|
|
39
|
-
schedule_interval=None,
|
|
40
|
-
start_date=days_ago(2),
|
|
41
|
-
tags=['example'],
|
|
42
|
-
) as dag:
|
|
43
|
-
load_csv_to_teradata = FastLoadOperator(
|
|
44
|
-
task_id='load_csv_to_teradata',
|
|
45
|
-
ttu_conn_id='ttu_default',
|
|
46
|
-
input_file='/files/dags/dummy_load.csv',
|
|
47
|
-
## Pre-operator BTEQ for creating the table before data load ocurrs
|
|
48
|
-
## You can use macros too!
|
|
49
|
-
preoperator_bteq="""
|
|
50
|
-
DROP TABLE SQLJ.dummy_load_UV;
|
|
51
|
-
DROP TABLE SQLJ.dummy_load_Log;
|
|
52
|
-
DROP TABLE SQLJ.dummy_load_ET;
|
|
53
|
-
DROP TABLE SQLJ.dummy_load;
|
|
54
|
-
CREATE TABLE SQLJ.dummy_load (
|
|
55
|
-
id int,
|
|
56
|
-
first_name varchar(100),
|
|
57
|
-
mail varchar(320),
|
|
58
|
-
phone_number varchar(20)
|
|
59
|
-
)
|
|
60
|
-
PRIMARY INDEX (id);
|
|
61
|
-
.IF ERRORCODE <> 0 THEN .QUIT 0301;
|
|
62
|
-
.QUIT 0;
|
|
63
|
-
""",
|
|
64
|
-
target_table='SQLJ.dummy_load',
|
|
65
|
-
## Working db is for saving internal tables of fastload process (error tables)
|
|
66
|
-
working_database='SQLJ',
|
|
67
|
-
delimiter='|',
|
|
68
|
-
encoding='UTF8'
|
|
69
|
-
)
|