apache-airflow-providers-teradata 3.0.2rc1__py3-none-any.whl → 3.0.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of apache-airflow-providers-teradata might be problematic. Click here for more details.

@@ -29,7 +29,7 @@ from airflow import __version__ as airflow_version
29
29
 
30
30
  __all__ = ["__version__"]
31
31
 
32
- __version__ = "3.0.2"
32
+ __version__ = "3.0.3"
33
33
 
34
34
  if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse(
35
35
  "2.9.0"
@@ -26,22 +26,6 @@ def get_provider_info():
26
26
  "package-name": "apache-airflow-providers-teradata",
27
27
  "name": "Teradata",
28
28
  "description": "`Teradata <https://www.teradata.com/>`__\n",
29
- "state": "ready",
30
- "source-date-epoch": 1741509940,
31
- "versions": [
32
- "3.0.2",
33
- "3.0.1",
34
- "3.0.0",
35
- "2.6.1",
36
- "2.6.0",
37
- "2.5.0",
38
- "2.4.0",
39
- "2.3.0",
40
- "2.2.0",
41
- "2.1.1",
42
- "2.1.0",
43
- "2.0.0",
44
- ],
45
29
  "integrations": [
46
30
  {
47
31
  "integration-name": "Teradata",
@@ -98,15 +82,4 @@ def get_provider_info():
98
82
  "python-modules": ["airflow.providers.teradata.triggers.teradata_compute_cluster"],
99
83
  }
100
84
  ],
101
- "dependencies": [
102
- "apache-airflow>=2.9.0",
103
- "apache-airflow-providers-common-sql>=1.20.0",
104
- "teradatasqlalchemy>=17.20.0.0",
105
- "teradatasql>=17.20.0.28",
106
- ],
107
- "optional-dependencies": {
108
- "microsoft.azure": ["apache-airflow-providers-microsoft-azure"],
109
- "amazon": ["apache-airflow-providers-amazon"],
110
- },
111
- "devel-dependencies": [],
112
85
  }
@@ -157,7 +157,7 @@ class _TeradataComputeClusterOperator(BaseOperator):
157
157
  def _compute_cluster_execute_complete(self, event: dict[str, Any]) -> None:
158
158
  if event["status"] == "success":
159
159
  return event["message"]
160
- elif event["status"] == "error":
160
+ if event["status"] == "error":
161
161
  raise AirflowException(event["message"])
162
162
 
163
163
  def _handle_cc_status(self, operation_type, sql):
@@ -170,7 +170,7 @@ class _TeradataComputeClusterOperator(BaseOperator):
170
170
  self.defer(
171
171
  timeout=timedelta(minutes=self.timeout),
172
172
  trigger=TeradataComputeClusterSyncTrigger(
173
- teradata_conn_id=cast(str, self.teradata_conn_id),
173
+ teradata_conn_id=cast("str", self.teradata_conn_id),
174
174
  compute_profile_name=self.compute_profile_name,
175
175
  compute_group_name=self.compute_group_name,
176
176
  operation_type=operation_type,
@@ -185,8 +185,7 @@ class _TeradataComputeClusterOperator(BaseOperator):
185
185
  try:
186
186
  if handler is not None:
187
187
  return self.hook.run(query, handler=handler)
188
- else:
189
- return self.hook.run(query)
188
+ return self.hook.run(query)
190
189
  except Exception as ex:
191
190
  self.log.error(str(ex))
192
191
  raise
@@ -305,13 +304,12 @@ class TeradataComputeClusterProvisionOperator(_TeradataComputeClusterOperator):
305
304
  msg = f"Compute Profile {self.compute_profile_name} is already exists under Compute Group {self.compute_group_name}. Status is {cp_status_result}"
306
305
  self.log.info(msg)
307
306
  return cp_status_result
308
- else:
309
- create_cp_query = self._build_ccp_setup_query()
310
- operation = Constants.CC_CREATE_OPR
311
- initially_suspended = self._get_initially_suspended(create_cp_query)
312
- if initially_suspended == "TRUE":
313
- operation = Constants.CC_CREATE_SUSPEND_OPR
314
- return self._handle_cc_status(operation, create_cp_query)
307
+ create_cp_query = self._build_ccp_setup_query()
308
+ operation = Constants.CC_CREATE_OPR
309
+ initially_suspended = self._get_initially_suspended(create_cp_query)
310
+ if initially_suspended == "TRUE":
311
+ operation = Constants.CC_CREATE_SUSPEND_OPR
312
+ return self._handle_cc_status(operation, create_cp_query)
315
313
 
316
314
 
317
315
  class TeradataComputeClusterDecommissionOperator(_TeradataComputeClusterOperator):
@@ -444,10 +442,9 @@ class TeradataComputeClusterResumeOperator(_TeradataComputeClusterOperator):
444
442
  if self.compute_group_name:
445
443
  cp_resume_query = f"{cp_resume_query} IN COMPUTE GROUP {self.compute_group_name}"
446
444
  return self._handle_cc_status(Constants.CC_RESUME_OPR, cp_resume_query)
447
- else:
448
- self.log.info(
449
- "Compute Cluster %s already %s", self.compute_profile_name, Constants.CC_RESUME_DB_STATUS
450
- )
445
+ self.log.info(
446
+ "Compute Cluster %s already %s", self.compute_profile_name, Constants.CC_RESUME_DB_STATUS
447
+ )
451
448
 
452
449
 
453
450
  class TeradataComputeClusterSuspendOperator(_TeradataComputeClusterOperator):
@@ -516,7 +513,6 @@ class TeradataComputeClusterSuspendOperator(_TeradataComputeClusterOperator):
516
513
  if self.compute_group_name:
517
514
  sql = f"{sql} IN COMPUTE GROUP {self.compute_group_name}"
518
515
  return self._handle_cc_status(Constants.CC_SUSPEND_OPR, sql)
519
- else:
520
- self.log.info(
521
- "Compute Cluster %s already %s", self.compute_profile_name, Constants.CC_SUSPEND_DB_STATUS
522
- )
516
+ self.log.info(
517
+ "Compute Cluster %s already %s", self.compute_profile_name, Constants.CC_SUSPEND_DB_STATUS
518
+ )
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: apache-airflow-providers-teradata
3
- Version: 3.0.2rc1
3
+ Version: 3.0.3
4
4
  Summary: Provider package apache-airflow-providers-teradata for Apache Airflow
5
5
  Keywords: airflow-provider,teradata,airflow,integration
6
6
  Author-email: Apache Software Foundation <dev@airflow.apache.org>
@@ -20,18 +20,18 @@ Classifier: Programming Language :: Python :: 3.10
20
20
  Classifier: Programming Language :: Python :: 3.11
21
21
  Classifier: Programming Language :: Python :: 3.12
22
22
  Classifier: Topic :: System :: Monitoring
23
- Requires-Dist: apache-airflow>=2.9.0rc0
24
- Requires-Dist: apache-airflow-providers-common-sql>=1.20.0rc0
23
+ Requires-Dist: apache-airflow>=2.9.0
24
+ Requires-Dist: apache-airflow-providers-common-sql>=1.20.0
25
25
  Requires-Dist: teradatasqlalchemy>=17.20.0.0
26
26
  Requires-Dist: teradatasql>=17.20.0.28
27
27
  Requires-Dist: apache-airflow-providers-amazon ; extra == "amazon"
28
28
  Requires-Dist: apache-airflow-providers-microsoft-azure ; extra == "microsoft-azure"
29
29
  Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
30
- Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-teradata/3.0.2/changelog.html
31
- Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-teradata/3.0.2
30
+ Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-teradata/3.0.3/changelog.html
31
+ Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-teradata/3.0.3
32
+ Project-URL: Mastodon, https://fosstodon.org/@airflow
32
33
  Project-URL: Slack Chat, https://s.apache.org/airflow-slack
33
34
  Project-URL: Source Code, https://github.com/apache/airflow
34
- Project-URL: Twitter, https://x.com/ApacheAirflow
35
35
  Project-URL: YouTube, https://www.youtube.com/channel/UCSXwxpWZQ7XZ1WL3wqevChA/
36
36
  Provides-Extra: amazon
37
37
  Provides-Extra: microsoft-azure
@@ -61,7 +61,7 @@ Provides-Extra: microsoft-azure
61
61
 
62
62
  Package ``apache-airflow-providers-teradata``
63
63
 
64
- Release: ``3.0.2``
64
+ Release: ``3.0.3``
65
65
 
66
66
 
67
67
  `Teradata <https://www.teradata.com/>`__
@@ -74,7 +74,7 @@ This is a provider package for ``teradata`` provider. All classes for this provi
74
74
  are in ``airflow.providers.teradata`` python package.
75
75
 
76
76
  You can find package information and changelog for the provider
77
- in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-teradata/3.0.2/>`_.
77
+ in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-teradata/3.0.3/>`_.
78
78
 
79
79
  Installation
80
80
  ------------
@@ -101,7 +101,7 @@ Cross provider package dependencies
101
101
  -----------------------------------
102
102
 
103
103
  Those are dependencies that might be needed in order to use all the features of the package.
104
- You need to install the specified provider packages in order to use them.
104
+ You need to install the specified providers in order to use them.
105
105
 
106
106
  You can install such cross-provider dependencies when installing from PyPI. For example:
107
107
 
@@ -119,5 +119,5 @@ Dependent package
119
119
  ====================================================================================================================== ===================
120
120
 
121
121
  The changelog for the provider package can be found in the
122
- `changelog <https://airflow.apache.org/docs/apache-airflow-providers-teradata/3.0.2/changelog.html>`_.
122
+ `changelog <https://airflow.apache.org/docs/apache-airflow-providers-teradata/3.0.3/changelog.html>`_.
123
123
 
@@ -1,11 +1,11 @@
1
1
  airflow/providers/teradata/LICENSE,sha256=gXPVwptPlW1TJ4HSuG5OMPg-a3h43OGMkZRR1rpwfJA,10850
2
- airflow/providers/teradata/__init__.py,sha256=gzMTyxJQu-dl76_-5q_Gjyri1OIjLQK26NoYVPVSkrI,1495
3
- airflow/providers/teradata/get_provider_info.py,sha256=k_tqlH-LC1qQAEtktLTV7NyE3_2W7r8qWDTYTx9Tuhg,4503
2
+ airflow/providers/teradata/__init__.py,sha256=cLyRqDHkmu9WVeYe2GMCU6ieI_m_GQZgPFWREQ7J0lI,1495
3
+ airflow/providers/teradata/get_provider_info.py,sha256=lkm3HMP6Yq8hPF0BeAD9uIA2IgJovTMFDR9NPT8pHyU,3718
4
4
  airflow/providers/teradata/hooks/__init__.py,sha256=mlJxuZLkd5x-iq2SBwD3mvRQpt3YR7wjz_nceyF1IaI,787
5
5
  airflow/providers/teradata/hooks/teradata.py,sha256=v7jCwOTBlAMtnR5cAakkNn0WSYGELcEACmV8I7yxTW0,10851
6
6
  airflow/providers/teradata/operators/__init__.py,sha256=mlJxuZLkd5x-iq2SBwD3mvRQpt3YR7wjz_nceyF1IaI,787
7
7
  airflow/providers/teradata/operators/teradata.py,sha256=chuojW__qKu2RdWQpKSF3QN0rbTWB9TsAece8cKj0AU,3846
8
- airflow/providers/teradata/operators/teradata_compute_cluster.py,sha256=CpjhWvFcBn31ysAR1HF1kTDF4Sy4V8feIdZyMpy0FfA,22046
8
+ airflow/providers/teradata/operators/teradata_compute_cluster.py,sha256=4c_EGQYcwuuXO62u_rwmyyXJ9Ph2rjYPCdjhV_lxJMk,21934
9
9
  airflow/providers/teradata/transfers/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
10
10
  airflow/providers/teradata/transfers/azure_blob_to_teradata.py,sha256=Kcm97lJXAkNSdQf9u3yYY9iq9JtjB_Sr96nHqJBLNlQ,5652
11
11
  airflow/providers/teradata/transfers/s3_to_teradata.py,sha256=kWqTg9Z2Sl16BWdJWO3OXrnZveVBrj5byVuZtggfBbY,5674
@@ -14,7 +14,7 @@ airflow/providers/teradata/triggers/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL
14
14
  airflow/providers/teradata/triggers/teradata_compute_cluster.py,sha256=hjMTnOpqlbByTtmNdJ9usK7hilEAz4tFXpJoENgFhyo,6987
15
15
  airflow/providers/teradata/utils/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
16
16
  airflow/providers/teradata/utils/constants.py,sha256=ro1FVNsAakal8_uX27aN0DTVO0T9FG4fv9HzBIY2I-w,2253
17
- apache_airflow_providers_teradata-3.0.2rc1.dist-info/entry_points.txt,sha256=JbigXoUoKVSNWG-_-029FCCuehMOmAvuSnNGZ9Bz1Kc,104
18
- apache_airflow_providers_teradata-3.0.2rc1.dist-info/WHEEL,sha256=_2ozNFCLWc93bK4WKHCO-eDUENDlo-dgc9cU3qokYO4,82
19
- apache_airflow_providers_teradata-3.0.2rc1.dist-info/METADATA,sha256=w13mR0AWBT1N5BTxWP8OWo8WLcPiARntC0qnaDPLfN0,5796
20
- apache_airflow_providers_teradata-3.0.2rc1.dist-info/RECORD,,
17
+ apache_airflow_providers_teradata-3.0.3.dist-info/entry_points.txt,sha256=JbigXoUoKVSNWG-_-029FCCuehMOmAvuSnNGZ9Bz1Kc,104
18
+ apache_airflow_providers_teradata-3.0.3.dist-info/WHEEL,sha256=G2gURzTEtmeR8nrdXUJfNiB3VYVxigPQ-bEQujpNiNs,82
19
+ apache_airflow_providers_teradata-3.0.3.dist-info/METADATA,sha256=O3nwKWI-jwf4ynft1VdkuE3KkR3XGrESCueUv4R0EiU,5783
20
+ apache_airflow_providers_teradata-3.0.3.dist-info/RECORD,,
@@ -1,4 +1,4 @@
1
1
  Wheel-Version: 1.0
2
- Generator: flit 3.11.0
2
+ Generator: flit 3.12.0
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any