apache-airflow-providers-teradata 3.2.1__tar.gz → 3.2.2__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (84) hide show
  1. {apache_airflow_providers_teradata-3.2.1 → apache_airflow_providers_teradata-3.2.2}/PKG-INFO +29 -16
  2. {apache_airflow_providers_teradata-3.2.1 → apache_airflow_providers_teradata-3.2.2}/README.rst +25 -13
  3. apache_airflow_providers_teradata-3.2.2/docs/.latest-doc-only-change.txt +1 -0
  4. {apache_airflow_providers_teradata-3.2.1 → apache_airflow_providers_teradata-3.2.2}/docs/changelog.rst +20 -0
  5. {apache_airflow_providers_teradata-3.2.1 → apache_airflow_providers_teradata-3.2.2}/docs/index.rst +14 -14
  6. {apache_airflow_providers_teradata-3.2.1 → apache_airflow_providers_teradata-3.2.2}/docs/operators/azure_blob_to_teradata.rst +2 -2
  7. {apache_airflow_providers_teradata-3.2.1 → apache_airflow_providers_teradata-3.2.2}/docs/operators/bteq.rst +14 -4
  8. {apache_airflow_providers_teradata-3.2.1 → apache_airflow_providers_teradata-3.2.2}/docs/operators/s3_to_teradata.rst +2 -2
  9. {apache_airflow_providers_teradata-3.2.1 → apache_airflow_providers_teradata-3.2.2}/docs/operators/teradata.rst +4 -4
  10. {apache_airflow_providers_teradata-3.2.1 → apache_airflow_providers_teradata-3.2.2}/docs/operators/teradata_to_teradata.rst +2 -2
  11. {apache_airflow_providers_teradata-3.2.1 → apache_airflow_providers_teradata-3.2.2}/provider.yaml +2 -1
  12. {apache_airflow_providers_teradata-3.2.1 → apache_airflow_providers_teradata-3.2.2}/pyproject.toml +6 -4
  13. {apache_airflow_providers_teradata-3.2.1 → apache_airflow_providers_teradata-3.2.2}/src/airflow/providers/teradata/__init__.py +1 -1
  14. {apache_airflow_providers_teradata-3.2.1 → apache_airflow_providers_teradata-3.2.2}/src/airflow/providers/teradata/hooks/bteq.py +14 -16
  15. {apache_airflow_providers_teradata-3.2.1 → apache_airflow_providers_teradata-3.2.2}/src/airflow/providers/teradata/hooks/teradata.py +25 -8
  16. {apache_airflow_providers_teradata-3.2.1 → apache_airflow_providers_teradata-3.2.2}/src/airflow/providers/teradata/hooks/ttu.py +1 -1
  17. {apache_airflow_providers_teradata-3.2.1 → apache_airflow_providers_teradata-3.2.2}/src/airflow/providers/teradata/operators/bteq.py +10 -23
  18. {apache_airflow_providers_teradata-3.2.1 → apache_airflow_providers_teradata-3.2.2}/src/airflow/providers/teradata/operators/teradata.py +2 -6
  19. {apache_airflow_providers_teradata-3.2.1 → apache_airflow_providers_teradata-3.2.2}/src/airflow/providers/teradata/operators/teradata_compute_cluster.py +46 -44
  20. {apache_airflow_providers_teradata-3.2.1 → apache_airflow_providers_teradata-3.2.2}/src/airflow/providers/teradata/transfers/azure_blob_to_teradata.py +2 -2
  21. {apache_airflow_providers_teradata-3.2.1 → apache_airflow_providers_teradata-3.2.2}/src/airflow/providers/teradata/transfers/s3_to_teradata.py +2 -6
  22. {apache_airflow_providers_teradata-3.2.1 → apache_airflow_providers_teradata-3.2.2}/src/airflow/providers/teradata/transfers/teradata_to_teradata.py +2 -6
  23. {apache_airflow_providers_teradata-3.2.1 → apache_airflow_providers_teradata-3.2.2}/src/airflow/providers/teradata/triggers/teradata_compute_cluster.py +6 -7
  24. apache_airflow_providers_teradata-3.2.2/src/airflow/providers/teradata/utils/constants.py +49 -0
  25. {apache_airflow_providers_teradata-3.2.1 → apache_airflow_providers_teradata-3.2.2}/src/airflow/providers/teradata/version_compat.py +1 -11
  26. {apache_airflow_providers_teradata-3.2.1 → apache_airflow_providers_teradata-3.2.2}/tests/unit/teradata/hooks/test_bteq.py +10 -3
  27. {apache_airflow_providers_teradata-3.2.1 → apache_airflow_providers_teradata-3.2.2}/tests/unit/teradata/hooks/test_teradata.py +26 -20
  28. {apache_airflow_providers_teradata-3.2.1 → apache_airflow_providers_teradata-3.2.2}/tests/unit/teradata/operators/test_bteq.py +10 -4
  29. {apache_airflow_providers_teradata-3.2.1 → apache_airflow_providers_teradata-3.2.2}/tests/unit/teradata/triggers/test_teradata_compute_cluster.py +6 -2
  30. {apache_airflow_providers_teradata-3.2.1 → apache_airflow_providers_teradata-3.2.2}/tests/unit/teradata/utils/test_constants.py +3 -33
  31. apache_airflow_providers_teradata-3.2.1/docs/.latest-doc-only-change.txt +0 -1
  32. apache_airflow_providers_teradata-3.2.1/src/airflow/providers/teradata/utils/constants.py +0 -46
  33. {apache_airflow_providers_teradata-3.2.1 → apache_airflow_providers_teradata-3.2.2}/docs/commits.rst +0 -0
  34. {apache_airflow_providers_teradata-3.2.1 → apache_airflow_providers_teradata-3.2.2}/docs/conf.py +0 -0
  35. {apache_airflow_providers_teradata-3.2.1 → apache_airflow_providers_teradata-3.2.2}/docs/connections/teradata.rst +0 -0
  36. {apache_airflow_providers_teradata-3.2.1 → apache_airflow_providers_teradata-3.2.2}/docs/installing-providers-from-sources.rst +0 -0
  37. {apache_airflow_providers_teradata-3.2.1 → apache_airflow_providers_teradata-3.2.2}/docs/integration-logos/Teradata.png +0 -0
  38. {apache_airflow_providers_teradata-3.2.1 → apache_airflow_providers_teradata-3.2.2}/docs/operators/compute_cluster.rst +0 -0
  39. {apache_airflow_providers_teradata-3.2.1 → apache_airflow_providers_teradata-3.2.2}/docs/operators/index.rst +0 -0
  40. {apache_airflow_providers_teradata-3.2.1 → apache_airflow_providers_teradata-3.2.2}/docs/security.rst +0 -0
  41. {apache_airflow_providers_teradata-3.2.1 → apache_airflow_providers_teradata-3.2.2}/src/airflow/__init__.py +0 -0
  42. {apache_airflow_providers_teradata-3.2.1 → apache_airflow_providers_teradata-3.2.2}/src/airflow/providers/__init__.py +0 -0
  43. {apache_airflow_providers_teradata-3.2.1 → apache_airflow_providers_teradata-3.2.2}/src/airflow/providers/teradata/LICENSE +0 -0
  44. {apache_airflow_providers_teradata-3.2.1 → apache_airflow_providers_teradata-3.2.2}/src/airflow/providers/teradata/get_provider_info.py +0 -0
  45. {apache_airflow_providers_teradata-3.2.1 → apache_airflow_providers_teradata-3.2.2}/src/airflow/providers/teradata/hooks/__init__.py +0 -0
  46. {apache_airflow_providers_teradata-3.2.1 → apache_airflow_providers_teradata-3.2.2}/src/airflow/providers/teradata/operators/__init__.py +0 -0
  47. {apache_airflow_providers_teradata-3.2.1 → apache_airflow_providers_teradata-3.2.2}/src/airflow/providers/teradata/transfers/__init__.py +0 -0
  48. {apache_airflow_providers_teradata-3.2.1 → apache_airflow_providers_teradata-3.2.2}/src/airflow/providers/teradata/triggers/__init__.py +0 -0
  49. {apache_airflow_providers_teradata-3.2.1 → apache_airflow_providers_teradata-3.2.2}/src/airflow/providers/teradata/utils/__init__.py +0 -0
  50. {apache_airflow_providers_teradata-3.2.1 → apache_airflow_providers_teradata-3.2.2}/src/airflow/providers/teradata/utils/bteq_util.py +0 -0
  51. {apache_airflow_providers_teradata-3.2.1 → apache_airflow_providers_teradata-3.2.2}/src/airflow/providers/teradata/utils/encryption_utils.py +0 -0
  52. {apache_airflow_providers_teradata-3.2.1 → apache_airflow_providers_teradata-3.2.2}/tests/conftest.py +0 -0
  53. {apache_airflow_providers_teradata-3.2.1 → apache_airflow_providers_teradata-3.2.2}/tests/system/__init__.py +0 -0
  54. {apache_airflow_providers_teradata-3.2.1 → apache_airflow_providers_teradata-3.2.2}/tests/system/teradata/__init__.py +0 -0
  55. {apache_airflow_providers_teradata-3.2.1 → apache_airflow_providers_teradata-3.2.2}/tests/system/teradata/create_ssl_table.sql +0 -0
  56. {apache_airflow_providers_teradata-3.2.1 → apache_airflow_providers_teradata-3.2.2}/tests/system/teradata/create_table.sql +0 -0
  57. {apache_airflow_providers_teradata-3.2.1 → apache_airflow_providers_teradata-3.2.2}/tests/system/teradata/example_azure_blob_to_teradata_transfer.py +0 -0
  58. {apache_airflow_providers_teradata-3.2.1 → apache_airflow_providers_teradata-3.2.2}/tests/system/teradata/example_bteq.py +0 -0
  59. {apache_airflow_providers_teradata-3.2.1 → apache_airflow_providers_teradata-3.2.2}/tests/system/teradata/example_remote_bteq.py +0 -0
  60. {apache_airflow_providers_teradata-3.2.1 → apache_airflow_providers_teradata-3.2.2}/tests/system/teradata/example_s3_to_teradata_transfer.py +0 -0
  61. {apache_airflow_providers_teradata-3.2.1 → apache_airflow_providers_teradata-3.2.2}/tests/system/teradata/example_ssl_teradata.py +0 -0
  62. {apache_airflow_providers_teradata-3.2.1 → apache_airflow_providers_teradata-3.2.2}/tests/system/teradata/example_teradata.py +0 -0
  63. {apache_airflow_providers_teradata-3.2.1 → apache_airflow_providers_teradata-3.2.2}/tests/system/teradata/example_teradata_call_sp.py +0 -0
  64. {apache_airflow_providers_teradata-3.2.1 → apache_airflow_providers_teradata-3.2.2}/tests/system/teradata/example_teradata_compute_cluster.py +0 -0
  65. {apache_airflow_providers_teradata-3.2.1 → apache_airflow_providers_teradata-3.2.2}/tests/system/teradata/example_teradata_to_teradata_transfer.py +0 -0
  66. {apache_airflow_providers_teradata-3.2.1 → apache_airflow_providers_teradata-3.2.2}/tests/system/teradata/script +0 -0
  67. {apache_airflow_providers_teradata-3.2.1 → apache_airflow_providers_teradata-3.2.2}/tests/system/teradata/script.bteq +0 -0
  68. {apache_airflow_providers_teradata-3.2.1 → apache_airflow_providers_teradata-3.2.2}/tests/system/teradata/script.sql +0 -0
  69. {apache_airflow_providers_teradata-3.2.1 → apache_airflow_providers_teradata-3.2.2}/tests/system/teradata/script_utf16.bteq +0 -0
  70. {apache_airflow_providers_teradata-3.2.1 → apache_airflow_providers_teradata-3.2.2}/tests/unit/__init__.py +0 -0
  71. {apache_airflow_providers_teradata-3.2.1 → apache_airflow_providers_teradata-3.2.2}/tests/unit/teradata/__init__.py +0 -0
  72. {apache_airflow_providers_teradata-3.2.1 → apache_airflow_providers_teradata-3.2.2}/tests/unit/teradata/hooks/__init__.py +0 -0
  73. {apache_airflow_providers_teradata-3.2.1 → apache_airflow_providers_teradata-3.2.2}/tests/unit/teradata/hooks/test_ttu.py +0 -0
  74. {apache_airflow_providers_teradata-3.2.1 → apache_airflow_providers_teradata-3.2.2}/tests/unit/teradata/operators/__init__.py +0 -0
  75. {apache_airflow_providers_teradata-3.2.1 → apache_airflow_providers_teradata-3.2.2}/tests/unit/teradata/operators/test_teradata.py +0 -0
  76. {apache_airflow_providers_teradata-3.2.1 → apache_airflow_providers_teradata-3.2.2}/tests/unit/teradata/operators/test_teradata_compute_cluster.py +0 -0
  77. {apache_airflow_providers_teradata-3.2.1 → apache_airflow_providers_teradata-3.2.2}/tests/unit/teradata/transfers/__init__.py +0 -0
  78. {apache_airflow_providers_teradata-3.2.1 → apache_airflow_providers_teradata-3.2.2}/tests/unit/teradata/transfers/test_azure_blob_to_teradata.py +0 -0
  79. {apache_airflow_providers_teradata-3.2.1 → apache_airflow_providers_teradata-3.2.2}/tests/unit/teradata/transfers/test_s3_to_teradata.py +0 -0
  80. {apache_airflow_providers_teradata-3.2.1 → apache_airflow_providers_teradata-3.2.2}/tests/unit/teradata/transfers/test_teradata_to_teradata.py +0 -0
  81. {apache_airflow_providers_teradata-3.2.1 → apache_airflow_providers_teradata-3.2.2}/tests/unit/teradata/triggers/__init__.py +0 -0
  82. {apache_airflow_providers_teradata-3.2.1 → apache_airflow_providers_teradata-3.2.2}/tests/unit/teradata/utils/__init__.py +0 -0
  83. {apache_airflow_providers_teradata-3.2.1 → apache_airflow_providers_teradata-3.2.2}/tests/unit/teradata/utils/test_bteq_util.py +0 -0
  84. {apache_airflow_providers_teradata-3.2.1 → apache_airflow_providers_teradata-3.2.2}/tests/unit/teradata/utils/test_encryption_utils.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: apache-airflow-providers-teradata
3
- Version: 3.2.1
3
+ Version: 3.2.2
4
4
  Summary: Provider package apache-airflow-providers-teradata for Apache Airflow
5
5
  Keywords: airflow-provider,teradata,airflow,integration
6
6
  Author-email: Apache Software Foundation <dev@airflow.apache.org>
@@ -21,6 +21,7 @@ Classifier: Programming Language :: Python :: 3.12
21
21
  Classifier: Programming Language :: Python :: 3.13
22
22
  Classifier: Topic :: System :: Monitoring
23
23
  Requires-Dist: apache-airflow>=2.10.0
24
+ Requires-Dist: apache-airflow-providers-common-compat>=1.8.0
24
25
  Requires-Dist: apache-airflow-providers-common-sql>=1.20.0
25
26
  Requires-Dist: teradatasqlalchemy>=17.20.0.0
26
27
  Requires-Dist: teradatasql>=17.20.0.28
@@ -28,8 +29,8 @@ Requires-Dist: apache-airflow-providers-amazon ; extra == "amazon"
28
29
  Requires-Dist: apache-airflow-providers-microsoft-azure ; extra == "microsoft-azure"
29
30
  Requires-Dist: apache-airflow-providers-ssh ; extra == "ssh"
30
31
  Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
31
- Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-teradata/3.2.1/changelog.html
32
- Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-teradata/3.2.1
32
+ Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-teradata/3.2.2/changelog.html
33
+ Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-teradata/3.2.2
33
34
  Project-URL: Mastodon, https://fosstodon.org/@airflow
34
35
  Project-URL: Slack Chat, https://s.apache.org/airflow-slack
35
36
  Project-URL: Source Code, https://github.com/apache/airflow
@@ -63,9 +64,8 @@ Provides-Extra: ssh
63
64
 
64
65
  Package ``apache-airflow-providers-teradata``
65
66
 
66
- Release: ``3.2.1``
67
+ Release: ``3.2.2``
67
68
 
68
- Release Date: ``|PypiReleaseDate|``
69
69
 
70
70
  `Teradata <https://www.teradata.com/>`__
71
71
 
@@ -77,12 +77,12 @@ This is a provider package for ``teradata`` provider. All classes for this provi
77
77
  are in ``airflow.providers.teradata`` python package.
78
78
 
79
79
  You can find package information and changelog for the provider
80
- in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-teradata/3.2.1/>`_.
80
+ in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-teradata/3.2.2/>`_.
81
81
 
82
82
  Installation
83
83
  ------------
84
84
 
85
- You can install this package on top of an existing Airflow 2 installation (see ``Requirements`` below
85
+ You can install this package on top of an existing Airflow installation (see ``Requirements`` below
86
86
  for the minimum Airflow version supported) via
87
87
  ``pip install apache-airflow-providers-teradata``
88
88
 
@@ -91,14 +91,15 @@ The package supports the following python versions: 3.10,3.11,3.12,3.13
91
91
  Requirements
92
92
  ------------
93
93
 
94
- ======================================= ==================
95
- PIP package Version required
96
- ======================================= ==================
97
- ``apache-airflow`` ``>=2.10.0``
98
- ``apache-airflow-providers-common-sql`` ``>=1.20.0``
99
- ``teradatasqlalchemy`` ``>=17.20.0.0``
100
- ``teradatasql`` ``>=17.20.0.28``
101
- ======================================= ==================
94
+ ========================================== ==================
95
+ PIP package Version required
96
+ ========================================== ==================
97
+ ``apache-airflow`` ``>=2.10.0``
98
+ ``apache-airflow-providers-common-compat`` ``>=1.8.0``
99
+ ``apache-airflow-providers-common-sql`` ``>=1.20.0``
100
+ ``teradatasqlalchemy`` ``>=17.20.0.0``
101
+ ``teradatasql`` ``>=17.20.0.28``
102
+ ========================================== ==================
102
103
 
103
104
  Cross provider package dependencies
104
105
  -----------------------------------
@@ -117,11 +118,23 @@ You can install such cross-provider dependencies when installing from PyPI. For
117
118
  Dependent package Extra
118
119
  ====================================================================================================================== ===================
119
120
  `apache-airflow-providers-amazon <https://airflow.apache.org/docs/apache-airflow-providers-amazon>`_ ``amazon``
121
+ `apache-airflow-providers-common-compat <https://airflow.apache.org/docs/apache-airflow-providers-common-compat>`_ ``common.compat``
120
122
  `apache-airflow-providers-common-sql <https://airflow.apache.org/docs/apache-airflow-providers-common-sql>`_ ``common.sql``
121
123
  `apache-airflow-providers-microsoft-azure <https://airflow.apache.org/docs/apache-airflow-providers-microsoft-azure>`_ ``microsoft.azure``
122
124
  `apache-airflow-providers-ssh <https://airflow.apache.org/docs/apache-airflow-providers-ssh>`_ ``ssh``
123
125
  ====================================================================================================================== ===================
124
126
 
127
+ Optional dependencies
128
+ ----------------------
129
+
130
+ =================== ============================================
131
+ Extra Dependencies
132
+ =================== ============================================
133
+ ``microsoft.azure`` ``apache-airflow-providers-microsoft-azure``
134
+ ``amazon`` ``apache-airflow-providers-amazon``
135
+ ``ssh`` ``apache-airflow-providers-ssh``
136
+ =================== ============================================
137
+
125
138
  The changelog for the provider package can be found in the
126
- `changelog <https://airflow.apache.org/docs/apache-airflow-providers-teradata/3.2.1/changelog.html>`_.
139
+ `changelog <https://airflow.apache.org/docs/apache-airflow-providers-teradata/3.2.2/changelog.html>`_.
127
140
 
@@ -23,9 +23,8 @@
23
23
 
24
24
  Package ``apache-airflow-providers-teradata``
25
25
 
26
- Release: ``3.2.1``
26
+ Release: ``3.2.2``
27
27
 
28
- Release Date: ``|PypiReleaseDate|``
29
28
 
30
29
  `Teradata <https://www.teradata.com/>`__
31
30
 
@@ -37,12 +36,12 @@ This is a provider package for ``teradata`` provider. All classes for this provi
37
36
  are in ``airflow.providers.teradata`` python package.
38
37
 
39
38
  You can find package information and changelog for the provider
40
- in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-teradata/3.2.1/>`_.
39
+ in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-teradata/3.2.2/>`_.
41
40
 
42
41
  Installation
43
42
  ------------
44
43
 
45
- You can install this package on top of an existing Airflow 2 installation (see ``Requirements`` below
44
+ You can install this package on top of an existing Airflow installation (see ``Requirements`` below
46
45
  for the minimum Airflow version supported) via
47
46
  ``pip install apache-airflow-providers-teradata``
48
47
 
@@ -51,14 +50,15 @@ The package supports the following python versions: 3.10,3.11,3.12,3.13
51
50
  Requirements
52
51
  ------------
53
52
 
54
- ======================================= ==================
55
- PIP package Version required
56
- ======================================= ==================
57
- ``apache-airflow`` ``>=2.10.0``
58
- ``apache-airflow-providers-common-sql`` ``>=1.20.0``
59
- ``teradatasqlalchemy`` ``>=17.20.0.0``
60
- ``teradatasql`` ``>=17.20.0.28``
61
- ======================================= ==================
53
+ ========================================== ==================
54
+ PIP package Version required
55
+ ========================================== ==================
56
+ ``apache-airflow`` ``>=2.10.0``
57
+ ``apache-airflow-providers-common-compat`` ``>=1.8.0``
58
+ ``apache-airflow-providers-common-sql`` ``>=1.20.0``
59
+ ``teradatasqlalchemy`` ``>=17.20.0.0``
60
+ ``teradatasql`` ``>=17.20.0.28``
61
+ ========================================== ==================
62
62
 
63
63
  Cross provider package dependencies
64
64
  -----------------------------------
@@ -77,10 +77,22 @@ You can install such cross-provider dependencies when installing from PyPI. For
77
77
  Dependent package Extra
78
78
  ====================================================================================================================== ===================
79
79
  `apache-airflow-providers-amazon <https://airflow.apache.org/docs/apache-airflow-providers-amazon>`_ ``amazon``
80
+ `apache-airflow-providers-common-compat <https://airflow.apache.org/docs/apache-airflow-providers-common-compat>`_ ``common.compat``
80
81
  `apache-airflow-providers-common-sql <https://airflow.apache.org/docs/apache-airflow-providers-common-sql>`_ ``common.sql``
81
82
  `apache-airflow-providers-microsoft-azure <https://airflow.apache.org/docs/apache-airflow-providers-microsoft-azure>`_ ``microsoft.azure``
82
83
  `apache-airflow-providers-ssh <https://airflow.apache.org/docs/apache-airflow-providers-ssh>`_ ``ssh``
83
84
  ====================================================================================================================== ===================
84
85
 
86
+ Optional dependencies
87
+ ----------------------
88
+
89
+ =================== ============================================
90
+ Extra Dependencies
91
+ =================== ============================================
92
+ ``microsoft.azure`` ``apache-airflow-providers-microsoft-azure``
93
+ ``amazon`` ``apache-airflow-providers-amazon``
94
+ ``ssh`` ``apache-airflow-providers-ssh``
95
+ =================== ============================================
96
+
85
97
  The changelog for the provider package can be found in the
86
- `changelog <https://airflow.apache.org/docs/apache-airflow-providers-teradata/3.2.1/changelog.html>`_.
98
+ `changelog <https://airflow.apache.org/docs/apache-airflow-providers-teradata/3.2.2/changelog.html>`_.
@@ -0,0 +1 @@
1
+ 05960ac2ebb1fd9a74f3135e5e8fe5e28160d4b2
@@ -25,6 +25,26 @@
25
25
  Changelog
26
26
  ---------
27
27
 
28
+ 3.2.2
29
+ .....
30
+
31
+ Misc
32
+ ~~~~
33
+
34
+ * ``Fix mypy type errors in Teradata provider for SQLAlchemy 2 upgrade (#56861)``
35
+ * ``Improve error messages in Teradata provider (#56367)``
36
+ * ``Build correct SQLAlchemy URI in TeradataHook (#56305)``
37
+ * ``Migrate tableau, telegram, trino, teradata providers to ''common.compat'' (#57013)``
38
+
39
+ .. Below changes are excluded from the changelog. Move them to
40
+ appropriate section above if needed. Do not delete the lines(!):
41
+ * ``Prepare release for Sep 2025 1st wave of providers (#55203)``
42
+ * ``Fix Airflow 2 reference in README/index of providers (#55240)``
43
+ * ``Make term Dag consistent in providers docs (#55101)``
44
+ * ``Switch pre-commit to prek (#54258)``
45
+ * ``Remove placeholder Release Date in changelog and index files (#56056)``
46
+ * ``Prepare release for Sep 2025 2nd wave of providers (#55688)``
47
+
28
48
  3.2.1
29
49
  .....
30
50
 
@@ -77,9 +77,7 @@ apache-airflow-providers-teradata package
77
77
  `Teradata <https://www.teradata.com/>`__
78
78
 
79
79
 
80
- Release: 3.2.1
81
-
82
- Release Date: ``|PypiReleaseDate|``
80
+ Release: 3.2.2
83
81
 
84
82
  Provider package
85
83
  ----------------
@@ -90,7 +88,7 @@ All classes for this package are included in the ``airflow.providers.teradata``
90
88
  Installation
91
89
  ------------
92
90
 
93
- You can install this package on top of an existing Airflow 2 installation via
91
+ You can install this package on top of an existing Airflow installation via
94
92
  ``pip install apache-airflow-providers-teradata``.
95
93
  For the minimum Airflow version supported, see ``Requirements`` below.
96
94
 
@@ -99,14 +97,15 @@ Requirements
99
97
 
100
98
  The minimum Apache Airflow version supported by this provider distribution is ``2.10.0``.
101
99
 
102
- ======================================= ==================
103
- PIP package Version required
104
- ======================================= ==================
105
- ``apache-airflow`` ``>=2.10.0``
106
- ``apache-airflow-providers-common-sql`` ``>=1.20.0``
107
- ``teradatasqlalchemy`` ``>=17.20.0.0``
108
- ``teradatasql`` ``>=17.20.0.28``
109
- ======================================= ==================
100
+ ========================================== ==================
101
+ PIP package Version required
102
+ ========================================== ==================
103
+ ``apache-airflow`` ``>=2.10.0``
104
+ ``apache-airflow-providers-common-compat`` ``>=1.8.0``
105
+ ``apache-airflow-providers-common-sql`` ``>=1.20.0``
106
+ ``teradatasqlalchemy`` ``>=17.20.0.0``
107
+ ``teradatasql`` ``>=17.20.0.28``
108
+ ========================================== ==================
110
109
 
111
110
  Cross provider package dependencies
112
111
  -----------------------------------
@@ -125,6 +124,7 @@ You can install such cross-provider dependencies when installing from PyPI. For
125
124
  Dependent package Extra
126
125
  ====================================================================================================================== ===================
127
126
  `apache-airflow-providers-amazon <https://airflow.apache.org/docs/apache-airflow-providers-amazon>`_ ``amazon``
127
+ `apache-airflow-providers-common-compat <https://airflow.apache.org/docs/apache-airflow-providers-common-compat>`_ ``common.compat``
128
128
  `apache-airflow-providers-common-sql <https://airflow.apache.org/docs/apache-airflow-providers-common-sql>`_ ``common.sql``
129
129
  `apache-airflow-providers-microsoft-azure <https://airflow.apache.org/docs/apache-airflow-providers-microsoft-azure>`_ ``microsoft.azure``
130
130
  `apache-airflow-providers-ssh <https://airflow.apache.org/docs/apache-airflow-providers-ssh>`_ ``ssh``
@@ -136,5 +136,5 @@ Downloading official packages
136
136
  You can download officially released packages and verify their checksums and signatures from the
137
137
  `Official Apache Download site <https://downloads.apache.org/airflow/providers/>`_
138
138
 
139
- * `The apache-airflow-providers-teradata 3.2.1 sdist package <https://downloads.apache.org/airflow/providers/apache_airflow_providers_teradata-3.2.1.tar.gz>`_ (`asc <https://downloads.apache.org/airflow/providers/apache_airflow_providers_teradata-3.2.1.tar.gz.asc>`__, `sha512 <https://downloads.apache.org/airflow/providers/apache_airflow_providers_teradata-3.2.1.tar.gz.sha512>`__)
140
- * `The apache-airflow-providers-teradata 3.2.1 wheel package <https://downloads.apache.org/airflow/providers/apache_airflow_providers_teradata-3.2.1-py3-none-any.whl>`_ (`asc <https://downloads.apache.org/airflow/providers/apache_airflow_providers_teradata-3.2.1-py3-none-any.whl.asc>`__, `sha512 <https://downloads.apache.org/airflow/providers/apache_airflow_providers_teradata-3.2.1-py3-none-any.whl.sha512>`__)
139
+ * `The apache-airflow-providers-teradata 3.2.2 sdist package <https://downloads.apache.org/airflow/providers/apache_airflow_providers_teradata-3.2.2.tar.gz>`_ (`asc <https://downloads.apache.org/airflow/providers/apache_airflow_providers_teradata-3.2.2.tar.gz.asc>`__, `sha512 <https://downloads.apache.org/airflow/providers/apache_airflow_providers_teradata-3.2.2.tar.gz.sha512>`__)
140
+ * `The apache-airflow-providers-teradata 3.2.2 wheel package <https://downloads.apache.org/airflow/providers/apache_airflow_providers_teradata-3.2.2-py3-none-any.whl>`_ (`asc <https://downloads.apache.org/airflow/providers/apache_airflow_providers_teradata-3.2.2-py3-none-any.whl.asc>`__, `sha512 <https://downloads.apache.org/airflow/providers/apache_airflow_providers_teradata-3.2.2-py3-none-any.whl.sha512>`__)
@@ -123,10 +123,10 @@ to teradata table is as follows:
123
123
  :start-after: [START azure_blob_to_teradata_transfer_operator_howto_guide_transfer_data_blob_to_teradata_parquet]
124
124
  :end-before: [END azure_blob_to_teradata_transfer_operator_howto_guide_transfer_data_blob_to_teradata_parquet]
125
125
 
126
- The complete ``AzureBlobStorageToTeradataOperator`` Operator DAG
126
+ The complete ``AzureBlobStorageToTeradataOperator`` Operator Dag
127
127
  ----------------------------------------------------------------
128
128
 
129
- When we put everything together, our DAG should look like this:
129
+ When we put everything together, our Dag should look like this:
130
130
 
131
131
  .. exampleinclude:: /../../teradata/tests/system/teradata/example_azure_blob_to_teradata_transfer.py
132
132
  :language: python
@@ -22,7 +22,7 @@ BteqOperator
22
22
 
23
23
  The :class:`~airflow.providers.teradata.operators.bteq.BteqOperator` enables execution of SQL statements or BTEQ (Basic Teradata Query) scripts using the Teradata BTEQ utility, which can be installed either locally or accessed remotely via SSH.
24
24
 
25
- This is useful for executing administrative operations, batch queries, or ETL tasks in Teradata environments using the Teradata BTEQ utility.
25
+ This is useful for executing administrative operations, batch queries, or ELT tasks in Teradata environments using the Teradata BTEQ utility.
26
26
 
27
27
  .. note::
28
28
 
@@ -47,6 +47,15 @@ Make sure your Teradata Airflow connection is defined with the required fields:
47
47
 
48
48
  You can define a remote host with a separate SSH connection using the ``ssh_conn_id``.
49
49
 
50
+
51
+ Ensure that the Teradata BTEQ utility is installed on the machine where the SQL statements or scripts will be executed. This could be:
52
+
53
+ - The **local machine** where Airflow runs the task, for local execution.
54
+ - The **remote host** accessed via SSH, for remote execution.
55
+
56
+ If executing remotely, also ensure that an SSH server (e.g., ``sshd``) is running and accessible on the remote machine.
57
+
58
+
50
59
  .. note::
51
60
 
52
61
  For improved security, it is **highly recommended** to use
@@ -59,6 +68,7 @@ You can define a remote host with a separate SSH connection using the ``ssh_conn
59
68
  https://airflow.apache.org/docs/apache-airflow/stable/howto/connection/ssh.html
60
69
 
61
70
 
71
+
62
72
  To execute arbitrary SQL or BTEQ commands in a Teradata database, use the
63
73
  :class:`~airflow.providers.teradata.operators.bteq.BteqOperator`.
64
74
 
@@ -224,7 +234,7 @@ The BteqOperator supports executing conditional logic within your BTEQ scripts.
224
234
  :start-after: [START bteq_operator_howto_guide_conditional_logic]
225
235
  :end-before: [END bteq_operator_howto_guide_conditional_logic]
226
236
 
227
- Conditional execution enables more intelligent data pipelines that can adapt to different scenarios without requiring separate DAG branches.
237
+ Conditional execution enables more intelligent data pipelines that can adapt to different scenarios without requiring separate Dag branches.
228
238
 
229
239
 
230
240
  Error Handling in BTEQ Scripts
@@ -253,10 +263,10 @@ When your workflow completes or requires cleanup, you can use the BteqOperator t
253
263
  :end-before: [END bteq_operator_howto_guide_drop_table]
254
264
 
255
265
 
256
- The complete Teradata Operator DAG
266
+ The complete Teradata Operator Dag
257
267
  ----------------------------------
258
268
 
259
- When we put everything together, our DAG should look like this:
269
+ When we put everything together, our Dag should look like this:
260
270
 
261
271
  .. exampleinclude:: /../../teradata/tests/system/teradata/example_bteq.py
262
272
  :language: python
@@ -70,10 +70,10 @@ An example usage of the S3ToTeradataOperator to transfer PARQUET data format fro
70
70
  :start-after: [START s3_to_teradata_transfer_operator_howto_guide_transfer_data_s3_to_teradata_parquet]
71
71
  :end-before: [END s3_to_teradata_transfer_operator_howto_guide_transfer_data_s3_to_teradata_parquet]
72
72
 
73
- The complete ``S3ToTeradataOperator`` Operator DAG
73
+ The complete ``S3ToTeradataOperator`` Operator Dag
74
74
  --------------------------------------------------
75
75
 
76
- When we put everything together, our DAG should look like this:
76
+ When we put everything together, our Dag should look like this:
77
77
 
78
78
  .. exampleinclude:: /../../teradata/tests/system/teradata/example_s3_to_teradata_transfer.py
79
79
  :language: python
@@ -104,10 +104,10 @@ We can then create a TeradataOperator task that drops the ``Users`` table.
104
104
  :start-after: [START teradata_operator_howto_guide_drop_users_table]
105
105
  :end-before: [END teradata_operator_howto_guide_drop_users_table]
106
106
 
107
- The complete Teradata Operator DAG
107
+ The complete Teradata Operator Dag
108
108
  ----------------------------------
109
109
 
110
- When we put everything together, our DAG should look like this:
110
+ When we put everything together, our Dag should look like this:
111
111
 
112
112
  .. exampleinclude:: /../../teradata/tests/system/teradata/example_teradata.py
113
113
  :language: python
@@ -218,10 +218,10 @@ with parameters passed positionally as a list:
218
218
  :start-after: [START howto_teradata_stored_procedure_operator_with_in_out_dynamic_result]
219
219
  :end-before: [END howto_teradata_stored_procedure_operator_with_in_out_dynamic_result]
220
220
 
221
- The complete TeradataStoredProcedureOperator DAG
221
+ The complete TeradataStoredProcedureOperator Dag
222
222
  ------------------------------------------------
223
223
 
224
- When we put everything together, our DAG should look like this:
224
+ When we put everything together, our Dag should look like this:
225
225
 
226
226
  .. exampleinclude:: /../../teradata/tests/system/teradata/example_teradata_call_sp.py
227
227
  :language: python
@@ -37,10 +37,10 @@ An example usage of the TeradataToTeradataOperator is as follows:
37
37
  :start-after: [START teradata_to_teradata_transfer_operator_howto_guide_transfer_data]
38
38
  :end-before: [END teradata_to_teradata_transfer_operator_howto_guide_transfer_data]
39
39
 
40
- The complete TeradataToTeradata Transfer Operator DAG
40
+ The complete TeradataToTeradata Transfer Operator Dag
41
41
  -----------------------------------------------------
42
42
 
43
- When we put everything together, our DAG should look like this:
43
+ When we put everything together, our Dag should look like this:
44
44
 
45
45
  .. exampleinclude:: /../../teradata/tests/system/teradata/example_teradata.py
46
46
  :language: python
@@ -22,12 +22,13 @@ description: |
22
22
  `Teradata <https://www.teradata.com/>`__
23
23
 
24
24
  state: ready
25
- source-date-epoch: 1753692055
25
+ source-date-epoch: 1761117680
26
26
  # Note that those versions are maintained by release manager - do not update them manually
27
27
  # with the exception of case where other provider in sources has >= new provider version.
28
28
  # In such case adding >= NEW_VERSION and bumping to NEW_VERSION in a provider have
29
29
  # to be done in the same PR
30
30
  versions:
31
+ - 3.2.2
31
32
  - 3.2.1
32
33
  - 3.2.0
33
34
  - 3.1.0
@@ -25,7 +25,7 @@ build-backend = "flit_core.buildapi"
25
25
 
26
26
  [project]
27
27
  name = "apache-airflow-providers-teradata"
28
- version = "3.2.1"
28
+ version = "3.2.2"
29
29
  description = "Provider package apache-airflow-providers-teradata for Apache Airflow"
30
30
  readme = "README.rst"
31
31
  authors = [
@@ -54,10 +54,11 @@ requires-python = ">=3.10"
54
54
 
55
55
  # The dependencies should be modified in place in the generated file.
56
56
  # Any change in the dependencies is preserved when the file is regenerated
57
- # Make sure to run ``breeze static-checks --type update-providers-dependencies --all-files``
57
+ # Make sure to run ``prek update-providers-dependencies --all-files``
58
58
  # After you modify the dependencies, and rebuild your Breeze CI image with ``breeze ci-image build``
59
59
  dependencies = [
60
60
  "apache-airflow>=2.10.0",
61
+ "apache-airflow-providers-common-compat>=1.8.0",
61
62
  "apache-airflow-providers-common-sql>=1.20.0",
62
63
  "teradatasqlalchemy>=17.20.0.0",
63
64
  "teradatasql>=17.20.0.28",
@@ -82,6 +83,7 @@ dev = [
82
83
  "apache-airflow-task-sdk",
83
84
  "apache-airflow-devel-common",
84
85
  "apache-airflow-providers-amazon",
86
+ "apache-airflow-providers-common-compat",
85
87
  "apache-airflow-providers-common-sql",
86
88
  "apache-airflow-providers-microsoft-azure",
87
89
  "apache-airflow-providers-ssh",
@@ -114,8 +116,8 @@ apache-airflow-providers-common-sql = {workspace = true}
114
116
  apache-airflow-providers-standard = {workspace = true}
115
117
 
116
118
  [project.urls]
117
- "Documentation" = "https://airflow.apache.org/docs/apache-airflow-providers-teradata/3.2.1"
118
- "Changelog" = "https://airflow.apache.org/docs/apache-airflow-providers-teradata/3.2.1/changelog.html"
119
+ "Documentation" = "https://airflow.apache.org/docs/apache-airflow-providers-teradata/3.2.2"
120
+ "Changelog" = "https://airflow.apache.org/docs/apache-airflow-providers-teradata/3.2.2/changelog.html"
119
121
  "Bug Tracker" = "https://github.com/apache/airflow/issues"
120
122
  "Source Code" = "https://github.com/apache/airflow"
121
123
  "Slack Chat" = "https://s.apache.org/airflow-slack"
@@ -29,7 +29,7 @@ from airflow import __version__ as airflow_version
29
29
 
30
30
  __all__ = ["__version__"]
31
31
 
32
- __version__ = "3.2.1"
32
+ __version__ = "3.2.2"
33
33
 
34
34
  if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse(
35
35
  "2.10.0"
@@ -37,6 +37,7 @@ from airflow.providers.teradata.utils.bteq_util import (
37
37
  verify_bteq_installed,
38
38
  verify_bteq_installed_remote,
39
39
  )
40
+ from airflow.providers.teradata.utils.constants import Constants
40
41
  from airflow.providers.teradata.utils.encryption_utils import (
41
42
  decrypt_remote_file_to_string,
42
43
  generate_encrypted_file_with_openssl,
@@ -158,7 +159,7 @@ class BteqHook(TtuHook):
158
159
  if self.ssh_hook and self.ssh_hook.get_conn():
159
160
  with self.ssh_hook.get_conn() as ssh_client:
160
161
  if ssh_client is None:
161
- raise AirflowException("Failed to establish SSH connection. `ssh_client` is None.")
162
+ raise AirflowException(Constants.BTEQ_REMOTE_ERROR_MSG)
162
163
  verify_bteq_installed_remote(ssh_client)
163
164
  password = generate_random_password() # Encryption/Decryption password
164
165
  encrypted_file_path = os.path.join(tmp_dir, "bteq_script.enc")
@@ -170,7 +171,6 @@ class BteqHook(TtuHook):
170
171
  )
171
172
  remote_encrypted_path = os.path.join(remote_working_dir or "", "bteq_script.enc")
172
173
  remote_encrypted_path = remote_encrypted_path.replace("/", "\\")
173
-
174
174
  transfer_file_sftp(ssh_client, encrypted_file_path, remote_encrypted_path)
175
175
 
176
176
  bteq_command_str = prepare_bteq_command_for_remote_execution(
@@ -204,24 +204,20 @@ class BteqHook(TtuHook):
204
204
  else [bteq_quit_rc if bteq_quit_rc is not None else 0]
205
205
  )
206
206
  ):
207
- raise AirflowException(f"BTEQ task failed with error: {failure_message}")
207
+ raise AirflowException(f"Failed to execute BTEQ script : {failure_message}")
208
208
  if failure_message:
209
209
  self.log.warning(failure_message)
210
210
  return exit_status
211
211
  else:
212
- raise AirflowException("SSH connection is not established. `ssh_hook` is None or invalid.")
212
+ raise AirflowException(Constants.BTEQ_REMOTE_ERROR_MSG)
213
213
  except (OSError, socket.gaierror):
214
- raise AirflowException(
215
- "SSH connection timed out. Please check the network or server availability."
216
- )
214
+ raise AirflowException(Constants.BTEQ_REMOTE_ERROR_MSG)
217
215
  except SSHException as e:
218
- raise AirflowException(f"An unexpected error occurred during SSH connection: {str(e)}")
216
+ raise AirflowException(f"{Constants.BTEQ_REMOTE_ERROR_MSG}: {str(e)}")
219
217
  except AirflowException as e:
220
218
  raise e
221
219
  except Exception as e:
222
- raise AirflowException(
223
- f"An unexpected error occurred while executing BTEQ script on remote machine: {str(e)}"
224
- )
220
+ raise AirflowException(f"{Constants.BTEQ_REMOTE_ERROR_MSG}: {str(e)}")
225
221
  finally:
226
222
  # Remove the local script file
227
223
  if encrypted_file_path and os.path.exists(encrypted_file_path):
@@ -276,12 +272,12 @@ class BteqHook(TtuHook):
276
272
  process.wait(timeout=timeout + 60) # Adding 1 minute extra for BTEQ script timeout
277
273
  except subprocess.TimeoutExpired:
278
274
  self.on_kill()
279
- raise AirflowException(f"BTEQ command timed out after {timeout} seconds.")
275
+ raise AirflowException(Constants.BTEQ_TIMEOUT_ERROR_MSG, timeout)
280
276
  conn = self.get_conn()
281
277
  conn["sp"] = process # For `on_kill` support
282
278
  failure_message = None
283
279
  if stdout_data is None:
284
- raise AirflowException("Process stdout is None. Unable to read BTEQ output.")
280
+ raise AirflowException(Constants.BTEQ_UNEXPECTED_ERROR_MSG)
285
281
  decoded_line = ""
286
282
  for line in stdout_data.splitlines():
287
283
  try:
@@ -302,7 +298,7 @@ class BteqHook(TtuHook):
302
298
  else [bteq_quit_rc if bteq_quit_rc is not None else 0]
303
299
  )
304
300
  ):
305
- raise AirflowException(f"BTEQ task failed with error: {failure_message}")
301
+ raise AirflowException(f"{Constants.BTEQ_UNEXPECTED_ERROR_MSG}: {failure_message}")
306
302
  if failure_message:
307
303
  self.log.warning(failure_message)
308
304
 
@@ -320,7 +316,7 @@ class BteqHook(TtuHook):
320
316
  self.log.warning("Subprocess did not terminate in time. Forcing kill...")
321
317
  process.kill()
322
318
  except Exception as e:
323
- self.log.error("Failed to terminate subprocess: %s", str(e))
319
+ self.log.error("%s : %s", Constants.BTEQ_UNEXPECTED_ERROR_MSG, str(e))
324
320
 
325
321
  def get_airflow_home_dir(self) -> str:
326
322
  """Get the AIRFLOW_HOME directory."""
@@ -331,7 +327,9 @@ class BteqHook(TtuHook):
331
327
  try:
332
328
  temp_dir = tempfile.gettempdir()
333
329
  if not os.path.isdir(temp_dir) or not os.access(temp_dir, os.W_OK):
334
- raise OSError("OS temp dir not usable")
330
+ raise OSError(
331
+ f"Failed to execute the BTEQ script due to Temporary directory {temp_dir} is not writable."
332
+ )
335
333
  except Exception:
336
334
  temp_dir = self.get_airflow_home_dir()
337
335
 
@@ -22,8 +22,8 @@ from __future__ import annotations
22
22
  import re
23
23
  from typing import TYPE_CHECKING, Any
24
24
 
25
- import sqlalchemy
26
25
  import teradatasql
26
+ from sqlalchemy.engine import URL
27
27
  from teradatasql import TeradataConnection
28
28
 
29
29
  from airflow.providers.common.sql.hooks.sql import DbApiHook
@@ -34,6 +34,7 @@ if TYPE_CHECKING:
34
34
  except ImportError:
35
35
  from airflow.models.connection import Connection # type: ignore[assignment]
36
36
 
37
+ DEFAULT_DB_PORT = 1025
37
38
  PARAM_TYPES = {bool, float, int, str}
38
39
 
39
40
 
@@ -166,7 +167,7 @@ class TeradataHook(DbApiHook):
166
167
  conn: Connection = self.get_connection(self.get_conn_id())
167
168
  conn_config = {
168
169
  "host": conn.host or "localhost",
169
- "dbs_port": conn.port or "1025",
170
+ "dbs_port": conn.port or DEFAULT_DB_PORT,
170
171
  "database": conn.schema or "",
171
172
  "user": conn.login or "dbc",
172
173
  "password": conn.password or "dbc",
@@ -195,12 +196,28 @@ class TeradataHook(DbApiHook):
195
196
 
196
197
  return conn_config
197
198
 
198
- def get_sqlalchemy_engine(self, engine_kwargs=None):
199
- """Return a connection object using sqlalchemy."""
200
- conn: Connection = self.get_connection(self.get_conn_id())
201
- link = f"teradatasql://{conn.login}:{conn.password}@{conn.host}"
202
- connection = sqlalchemy.create_engine(link)
203
- return connection
199
+ @property
200
+ def sqlalchemy_url(self) -> URL:
201
+ """
202
+ Override to return a Sqlalchemy.engine.URL object from the Teradata connection.
203
+
204
+ :return: the extracted sqlalchemy.engine.URL object.
205
+ """
206
+ connection = self.get_connection(self.get_conn_id())
207
+ # Adding only teradatasqlalchemy supported connection parameters.
208
+ # https://pypi.org/project/teradatasqlalchemy/#ConnectionParameters
209
+ return URL.create(
210
+ drivername="teradatasql",
211
+ username=connection.login,
212
+ password=connection.password,
213
+ host=connection.host,
214
+ port=connection.port,
215
+ database=connection.schema if connection.schema else None,
216
+ )
217
+
218
+ def get_uri(self) -> str:
219
+ """Override DbApiHook get_uri method for get_sqlalchemy_engine()."""
220
+ return self.sqlalchemy_url.render_as_string()
204
221
 
205
222
  @staticmethod
206
223
  def get_ui_field_behaviour() -> dict:
@@ -22,7 +22,7 @@ from abc import ABC
22
22
  from typing import Any
23
23
 
24
24
  from airflow.exceptions import AirflowException
25
- from airflow.providers.teradata.version_compat import BaseHook
25
+ from airflow.providers.common.compat.sdk import BaseHook
26
26
 
27
27
 
28
28
  class TtuHook(BaseHook, ABC):