apache-airflow-providers-apache-hive 7.0.1rc1__tar.gz → 8.0.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (26) hide show
  1. {apache_airflow_providers_apache_hive-7.0.1rc1 → apache_airflow_providers_apache_hive-8.0.0}/PKG-INFO +10 -9
  2. {apache_airflow_providers_apache_hive-7.0.1rc1 → apache_airflow_providers_apache_hive-8.0.0}/README.rst +4 -4
  3. {apache_airflow_providers_apache_hive-7.0.1rc1 → apache_airflow_providers_apache_hive-8.0.0}/airflow/providers/apache/hive/__init__.py +1 -1
  4. {apache_airflow_providers_apache_hive-7.0.1rc1 → apache_airflow_providers_apache_hive-8.0.0}/airflow/providers/apache/hive/get_provider_info.py +1 -0
  5. {apache_airflow_providers_apache_hive-7.0.1rc1 → apache_airflow_providers_apache_hive-8.0.0}/airflow/providers/apache/hive/hooks/hive.py +30 -28
  6. {apache_airflow_providers_apache_hive-7.0.1rc1 → apache_airflow_providers_apache_hive-8.0.0}/airflow/providers/apache/hive/operators/hive.py +0 -8
  7. {apache_airflow_providers_apache_hive-7.0.1rc1 → apache_airflow_providers_apache_hive-8.0.0}/airflow/providers/apache/hive/operators/hive_stats.py +0 -10
  8. {apache_airflow_providers_apache_hive-7.0.1rc1 → apache_airflow_providers_apache_hive-8.0.0}/airflow/providers/apache/hive/transfers/hive_to_mysql.py +1 -0
  9. {apache_airflow_providers_apache_hive-7.0.1rc1 → apache_airflow_providers_apache_hive-8.0.0}/airflow/providers/apache/hive/transfers/hive_to_samba.py +1 -0
  10. {apache_airflow_providers_apache_hive-7.0.1rc1 → apache_airflow_providers_apache_hive-8.0.0}/airflow/providers/apache/hive/transfers/mssql_to_hive.py +1 -0
  11. {apache_airflow_providers_apache_hive-7.0.1rc1 → apache_airflow_providers_apache_hive-8.0.0}/airflow/providers/apache/hive/transfers/mysql_to_hive.py +1 -0
  12. {apache_airflow_providers_apache_hive-7.0.1rc1 → apache_airflow_providers_apache_hive-8.0.0}/airflow/providers/apache/hive/transfers/s3_to_hive.py +1 -0
  13. {apache_airflow_providers_apache_hive-7.0.1rc1 → apache_airflow_providers_apache_hive-8.0.0}/airflow/providers/apache/hive/transfers/vertica_to_hive.py +1 -0
  14. {apache_airflow_providers_apache_hive-7.0.1rc1 → apache_airflow_providers_apache_hive-8.0.0}/pyproject.toml +6 -5
  15. {apache_airflow_providers_apache_hive-7.0.1rc1 → apache_airflow_providers_apache_hive-8.0.0}/airflow/providers/apache/hive/LICENSE +0 -0
  16. {apache_airflow_providers_apache_hive-7.0.1rc1 → apache_airflow_providers_apache_hive-8.0.0}/airflow/providers/apache/hive/hooks/__init__.py +0 -0
  17. {apache_airflow_providers_apache_hive-7.0.1rc1 → apache_airflow_providers_apache_hive-8.0.0}/airflow/providers/apache/hive/macros/__init__.py +0 -0
  18. {apache_airflow_providers_apache_hive-7.0.1rc1 → apache_airflow_providers_apache_hive-8.0.0}/airflow/providers/apache/hive/macros/hive.py +0 -0
  19. {apache_airflow_providers_apache_hive-7.0.1rc1 → apache_airflow_providers_apache_hive-8.0.0}/airflow/providers/apache/hive/operators/__init__.py +0 -0
  20. {apache_airflow_providers_apache_hive-7.0.1rc1 → apache_airflow_providers_apache_hive-8.0.0}/airflow/providers/apache/hive/plugins/__init__.py +0 -0
  21. {apache_airflow_providers_apache_hive-7.0.1rc1 → apache_airflow_providers_apache_hive-8.0.0}/airflow/providers/apache/hive/plugins/hive.py +0 -0
  22. {apache_airflow_providers_apache_hive-7.0.1rc1 → apache_airflow_providers_apache_hive-8.0.0}/airflow/providers/apache/hive/sensors/__init__.py +0 -0
  23. {apache_airflow_providers_apache_hive-7.0.1rc1 → apache_airflow_providers_apache_hive-8.0.0}/airflow/providers/apache/hive/sensors/hive_partition.py +0 -0
  24. {apache_airflow_providers_apache_hive-7.0.1rc1 → apache_airflow_providers_apache_hive-8.0.0}/airflow/providers/apache/hive/sensors/metastore_partition.py +0 -0
  25. {apache_airflow_providers_apache_hive-7.0.1rc1 → apache_airflow_providers_apache_hive-8.0.0}/airflow/providers/apache/hive/sensors/named_hive_partition.py +0 -0
  26. {apache_airflow_providers_apache_hive-7.0.1rc1 → apache_airflow_providers_apache_hive-8.0.0}/airflow/providers/apache/hive/transfers/__init__.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: apache-airflow-providers-apache-hive
3
- Version: 7.0.1rc1
3
+ Version: 8.0.0
4
4
  Summary: Provider package apache-airflow-providers-apache-hive for Apache Airflow
5
5
  Keywords: airflow-provider,apache.hive,airflow,integration
6
6
  Author-email: Apache Software Foundation <dev@airflow.apache.org>
@@ -19,9 +19,10 @@ Classifier: Programming Language :: Python :: 3.8
19
19
  Classifier: Programming Language :: Python :: 3.9
20
20
  Classifier: Programming Language :: Python :: 3.10
21
21
  Classifier: Programming Language :: Python :: 3.11
22
+ Classifier: Programming Language :: Python :: 3.12
22
23
  Classifier: Topic :: System :: Monitoring
23
- Requires-Dist: apache-airflow-providers-common-sql>=1.3.1.dev0
24
- Requires-Dist: apache-airflow>=2.6.0.dev0
24
+ Requires-Dist: apache-airflow-providers-common-sql>=1.3.1
25
+ Requires-Dist: apache-airflow>=2.6.0
25
26
  Requires-Dist: hmsclient>=0.1.0
26
27
  Requires-Dist: pandas>=1.2.5,<2.2
27
28
  Requires-Dist: pyhive[hive_pure_sasl]>=0.7.0
@@ -34,8 +35,8 @@ Requires-Dist: apache-airflow-providers-presto ; extra == "presto"
34
35
  Requires-Dist: apache-airflow-providers-samba ; extra == "samba"
35
36
  Requires-Dist: apache-airflow-providers-vertica ; extra == "vertica"
36
37
  Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
37
- Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-apache-hive/7.0.1/changelog.html
38
- Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-apache-hive/7.0.1
38
+ Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-apache-hive/8.0.0/changelog.html
39
+ Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-apache-hive/8.0.0
39
40
  Project-URL: Slack Chat, https://s.apache.org/airflow-slack
40
41
  Project-URL: Source Code, https://github.com/apache/airflow
41
42
  Project-URL: Twitter, https://twitter.com/ApacheAirflow
@@ -92,7 +93,7 @@ Provides-Extra: vertica
92
93
 
93
94
  Package ``apache-airflow-providers-apache-hive``
94
95
 
95
- Release: ``7.0.1.rc1``
96
+ Release: ``8.0.0``
96
97
 
97
98
 
98
99
  `Apache Hive <https://hive.apache.org/>`__
@@ -105,7 +106,7 @@ This is a provider package for ``apache.hive`` provider. All classes for this pr
105
106
  are in ``airflow.providers.apache.hive`` python package.
106
107
 
107
108
  You can find package information and changelog for the provider
108
- in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-apache-hive/7.0.1/>`_.
109
+ in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-apache-hive/8.0.0/>`_.
109
110
 
110
111
  Installation
111
112
  ------------
@@ -114,7 +115,7 @@ You can install this package on top of an existing Airflow 2 installation (see `
114
115
  for the minimum Airflow version supported) via
115
116
  ``pip install apache-airflow-providers-apache-hive``
116
117
 
117
- The package supports the following python versions: 3.8,3.9,3.10,3.11
118
+ The package supports the following python versions: 3.8,3.9,3.10,3.11,3.12
118
119
 
119
120
  Requirements
120
121
  ------------
@@ -156,4 +157,4 @@ Dependent package
156
157
  ====================================================================================================================== ===================
157
158
 
158
159
  The changelog for the provider package can be found in the
159
- `changelog <https://airflow.apache.org/docs/apache-airflow-providers-apache-hive/7.0.1/changelog.html>`_.
160
+ `changelog <https://airflow.apache.org/docs/apache-airflow-providers-apache-hive/8.0.0/changelog.html>`_.
@@ -42,7 +42,7 @@
42
42
 
43
43
  Package ``apache-airflow-providers-apache-hive``
44
44
 
45
- Release: ``7.0.1.rc1``
45
+ Release: ``8.0.0``
46
46
 
47
47
 
48
48
  `Apache Hive <https://hive.apache.org/>`__
@@ -55,7 +55,7 @@ This is a provider package for ``apache.hive`` provider. All classes for this pr
55
55
  are in ``airflow.providers.apache.hive`` python package.
56
56
 
57
57
  You can find package information and changelog for the provider
58
- in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-apache-hive/7.0.1/>`_.
58
+ in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-apache-hive/8.0.0/>`_.
59
59
 
60
60
  Installation
61
61
  ------------
@@ -64,7 +64,7 @@ You can install this package on top of an existing Airflow 2 installation (see `
64
64
  for the minimum Airflow version supported) via
65
65
  ``pip install apache-airflow-providers-apache-hive``
66
66
 
67
- The package supports the following python versions: 3.8,3.9,3.10,3.11
67
+ The package supports the following python versions: 3.8,3.9,3.10,3.11,3.12
68
68
 
69
69
  Requirements
70
70
  ------------
@@ -106,4 +106,4 @@ Dependent package
106
106
  ====================================================================================================================== ===================
107
107
 
108
108
  The changelog for the provider package can be found in the
109
- `changelog <https://airflow.apache.org/docs/apache-airflow-providers-apache-hive/7.0.1/changelog.html>`_.
109
+ `changelog <https://airflow.apache.org/docs/apache-airflow-providers-apache-hive/8.0.0/changelog.html>`_.
@@ -27,7 +27,7 @@ import packaging.version
27
27
 
28
28
  __all__ = ["__version__"]
29
29
 
30
- __version__ = "7.0.1"
30
+ __version__ = "8.0.0"
31
31
 
32
32
  try:
33
33
  from airflow import __version__ as airflow_version
@@ -30,6 +30,7 @@ def get_provider_info():
30
30
  "state": "ready",
31
31
  "source-date-epoch": 1709554960,
32
32
  "versions": [
33
+ "8.0.0",
33
34
  "7.0.1",
34
35
  "7.0.0",
35
36
  "6.4.2",
@@ -23,12 +23,9 @@ import re
23
23
  import socket
24
24
  import subprocess
25
25
  import time
26
- import warnings
27
26
  from tempfile import NamedTemporaryFile, TemporaryDirectory
28
27
  from typing import TYPE_CHECKING, Any, Iterable, Mapping
29
28
 
30
- from airflow.exceptions import AirflowProviderDeprecationWarning
31
-
32
29
  if TYPE_CHECKING:
33
30
  import pandas as pd
34
31
 
@@ -116,6 +113,7 @@ class HiveCliHook(BaseHook):
116
113
  self.mapred_queue_priority = mapred_queue_priority
117
114
  self.mapred_job_name = mapred_job_name
118
115
  self.proxy_user = proxy_user
116
+ self.high_availability = self.conn.extra_dejson.get("high_availability", False)
119
117
 
120
118
  @classmethod
121
119
  def get_connection_form_widgets(cls) -> dict[str, Any]:
@@ -125,11 +123,12 @@ class HiveCliHook(BaseHook):
125
123
  from wtforms import BooleanField, StringField
126
124
 
127
125
  return {
128
- "use_beeline": BooleanField(lazy_gettext("Use Beeline"), default=False),
126
+ "use_beeline": BooleanField(lazy_gettext("Use Beeline"), default=True),
129
127
  "proxy_user": StringField(lazy_gettext("Proxy User"), widget=BS3TextFieldWidget(), default=""),
130
128
  "principal": StringField(
131
129
  lazy_gettext("Principal"), widget=BS3TextFieldWidget(), default="hive/_HOST@EXAMPLE.COM"
132
130
  ),
131
+ "high_availability": BooleanField(lazy_gettext("High Availability"), default=False),
133
132
  }
134
133
 
135
134
  @classmethod
@@ -159,7 +158,12 @@ class HiveCliHook(BaseHook):
159
158
  if self.use_beeline:
160
159
  hive_bin = "beeline"
161
160
  self._validate_beeline_parameters(conn)
162
- jdbc_url = f"jdbc:hive2://{conn.host}:{conn.port}/{conn.schema}"
161
+ if self.high_availability:
162
+ jdbc_url = f"jdbc:hive2://{conn.host}/{conn.schema}"
163
+ self.log.info("High Availability set, setting JDBC url as %s", jdbc_url)
164
+ else:
165
+ jdbc_url = f"jdbc:hive2://{conn.host}:{conn.port}/{conn.schema}"
166
+ self.log.info("High Availability not set, setting JDBC url as %s", jdbc_url)
163
167
  if conf.get("core", "security") == "kerberos":
164
168
  template = conn.extra_dejson.get("principal", "hive/_HOST@EXAMPLE.COM")
165
169
  if "_HOST" in template:
@@ -170,6 +174,10 @@ class HiveCliHook(BaseHook):
170
174
  if ";" in proxy_user:
171
175
  raise RuntimeError("The proxy_user should not contain the ';' character")
172
176
  jdbc_url += f";principal={template};{proxy_user}"
177
+ if self.high_availability:
178
+ if not jdbc_url.endswith(";"):
179
+ jdbc_url += ";"
180
+ jdbc_url += "serviceDiscoveryMode=zooKeeper;ssl=true;zooKeeperNamespace=hiveserver2"
173
181
  elif self.auth:
174
182
  jdbc_url += ";auth=" + self.auth
175
183
 
@@ -186,18 +194,28 @@ class HiveCliHook(BaseHook):
186
194
  return [hive_bin, *cmd_extra, *hive_params_list]
187
195
 
188
196
  def _validate_beeline_parameters(self, conn):
189
- if ":" in conn.host or "/" in conn.host or ";" in conn.host:
190
- raise Exception(
197
+ if self.high_availability:
198
+ if ";" in conn.schema:
199
+ raise ValueError(
200
+ f"The schema used in beeline command ({conn.schema}) should not contain ';' character)"
201
+ )
202
+ return
203
+ elif ":" in conn.host or "/" in conn.host or ";" in conn.host:
204
+ raise ValueError(
191
205
  f"The host used in beeline command ({conn.host}) should not contain ':/;' characters)"
192
206
  )
193
207
  try:
194
208
  int_port = int(conn.port)
195
209
  if not 0 < int_port <= 65535:
196
- raise Exception(f"The port used in beeline command ({conn.port}) should be in range 0-65535)")
210
+ raise ValueError(
211
+ f"The port used in beeline command ({conn.port}) should be in range 0-65535)"
212
+ )
197
213
  except (ValueError, TypeError) as e:
198
- raise Exception(f"The port used in beeline command ({conn.port}) should be a valid integer: {e})")
214
+ raise ValueError(
215
+ f"The port used in beeline command ({conn.port}) should be a valid integer: {e})"
216
+ )
199
217
  if ";" in conn.schema:
200
- raise Exception(
218
+ raise ValueError(
201
219
  f"The schema used in beeline command ({conn.schema}) should not contain ';' character)"
202
220
  )
203
221
 
@@ -518,11 +536,13 @@ class HiveMetastoreHook(BaseHook):
518
536
  def __getstate__(self) -> dict[str, Any]:
519
537
  # This is for pickling to work despite the thrift hive client not
520
538
  # being picklable
539
+ """Serialize object and omit non-serializable attributes."""
521
540
  state = dict(self.__dict__)
522
541
  del state["metastore"]
523
542
  return state
524
543
 
525
544
  def __setstate__(self, d: dict[str, Any]) -> None:
545
+ """Deserialize object and restore non-serializable attributes."""
526
546
  self.__dict__.update(d)
527
547
  self.__dict__["metastore"] = self.get_metastore_client()
528
548
 
@@ -538,15 +558,6 @@ class HiveMetastoreHook(BaseHook):
538
558
  if not host:
539
559
  raise AirflowException("Failed to locate the valid server.")
540
560
 
541
- if "authMechanism" in conn.extra_dejson:
542
- warnings.warn(
543
- "The 'authMechanism' option is deprecated. Please use 'auth_mechanism'.",
544
- AirflowProviderDeprecationWarning,
545
- stacklevel=2,
546
- )
547
- conn.extra_dejson["auth_mechanism"] = conn.extra_dejson["authMechanism"]
548
- del conn.extra_dejson["authMechanism"]
549
-
550
561
  auth_mechanism = conn.extra_dejson.get("auth_mechanism", "NOSASL")
551
562
 
552
563
  if conf.get("core", "security") == "kerberos":
@@ -849,15 +860,6 @@ class HiveServer2Hook(DbApiHook):
849
860
 
850
861
  db = self.get_connection(self.hiveserver2_conn_id) # type: ignore
851
862
 
852
- if "authMechanism" in db.extra_dejson:
853
- warnings.warn(
854
- "The 'authMechanism' option is deprecated. Please use 'auth_mechanism'.",
855
- AirflowProviderDeprecationWarning,
856
- stacklevel=2,
857
- )
858
- db.extra_dejson["auth_mechanism"] = db.extra_dejson["authMechanism"]
859
- del db.extra_dejson["authMechanism"]
860
-
861
863
  auth_mechanism = db.extra_dejson.get("auth_mechanism", "NONE")
862
864
  if auth_mechanism == "NONE" and db.login is None:
863
865
  # we need to give a username
@@ -22,10 +22,7 @@ import re
22
22
  from functools import cached_property
23
23
  from typing import TYPE_CHECKING, Any, Sequence
24
24
 
25
- from deprecated.classic import deprecated
26
-
27
25
  from airflow.configuration import conf
28
- from airflow.exceptions import AirflowProviderDeprecationWarning
29
26
  from airflow.models import BaseOperator
30
27
  from airflow.providers.apache.hive.hooks.hive import HiveCliHook
31
28
  from airflow.utils import operator_helpers
@@ -131,11 +128,6 @@ class HiveOperator(BaseOperator):
131
128
  proxy_user=self.proxy_user,
132
129
  )
133
130
 
134
- @deprecated(reason="use `hook` property instead.", category=AirflowProviderDeprecationWarning)
135
- def get_hook(self) -> HiveCliHook:
136
- """Get Hive cli hook."""
137
- return self.hook
138
-
139
131
  def prepare_template(self) -> None:
140
132
  if self.hiveconf_jinja_translate:
141
133
  self.hql = re.sub(r"(\$\{(hiveconf:)?([ a-zA-Z0-9_]*)\})", r"{{ \g<3> }}", self.hql)
@@ -18,7 +18,6 @@
18
18
  from __future__ import annotations
19
19
 
20
20
  import json
21
- import warnings
22
21
  from typing import TYPE_CHECKING, Any, Callable, Sequence
23
22
 
24
23
  from airflow.exceptions import AirflowException
@@ -80,15 +79,6 @@ class HiveStatsCollectionOperator(BaseOperator):
80
79
  dttm: str = "{{ logical_date.isoformat() }}",
81
80
  **kwargs: Any,
82
81
  ) -> None:
83
- if "col_blacklist" in kwargs:
84
- warnings.warn(
85
- f"col_blacklist kwarg passed to {self.__class__.__name__} "
86
- f"(task_id: {kwargs.get('task_id')}) is deprecated, "
87
- f"please rename it to excluded_columns instead",
88
- category=FutureWarning,
89
- stacklevel=2,
90
- )
91
- excluded_columns = kwargs.pop("col_blacklist")
92
82
  super().__init__(**kwargs)
93
83
  self.table = table
94
84
  self.partition = partition
@@ -16,6 +16,7 @@
16
16
  # specific language governing permissions and limitations
17
17
  # under the License.
18
18
  """This module contains an operator to move data from Hive to MySQL."""
19
+
19
20
  from __future__ import annotations
20
21
 
21
22
  from tempfile import NamedTemporaryFile
@@ -16,6 +16,7 @@
16
16
  # specific language governing permissions and limitations
17
17
  # under the License.
18
18
  """This module contains an operator to move data from Hive to Samba."""
19
+
19
20
  from __future__ import annotations
20
21
 
21
22
  from tempfile import NamedTemporaryFile
@@ -16,6 +16,7 @@
16
16
  # specific language governing permissions and limitations
17
17
  # under the License.
18
18
  """This module contains an operator to move data from MSSQL to Hive."""
19
+
19
20
  from __future__ import annotations
20
21
 
21
22
  import csv
@@ -16,6 +16,7 @@
16
16
  # specific language governing permissions and limitations
17
17
  # under the License.
18
18
  """This module contains an operator to move data from MySQL to Hive."""
19
+
19
20
  from __future__ import annotations
20
21
 
21
22
  import csv
@@ -16,6 +16,7 @@
16
16
  # specific language governing permissions and limitations
17
17
  # under the License.
18
18
  """This module contains an operator to move data from an S3 bucket to Hive."""
19
+
19
20
  from __future__ import annotations
20
21
 
21
22
  import bz2
@@ -16,6 +16,7 @@
16
16
  # specific language governing permissions and limitations
17
17
  # under the License.
18
18
  """This module contains an operator to move data from Vertica to Hive."""
19
+
19
20
  from __future__ import annotations
20
21
 
21
22
  import csv
@@ -28,7 +28,7 @@ build-backend = "flit_core.buildapi"
28
28
 
29
29
  [project]
30
30
  name = "apache-airflow-providers-apache-hive"
31
- version = "7.0.1.rc1"
31
+ version = "8.0.0"
32
32
  description = "Provider package apache-airflow-providers-apache-hive for Apache Airflow"
33
33
  readme = "README.rst"
34
34
  authors = [
@@ -51,12 +51,13 @@ classifiers = [
51
51
  "Programming Language :: Python :: 3.9",
52
52
  "Programming Language :: Python :: 3.10",
53
53
  "Programming Language :: Python :: 3.11",
54
+ "Programming Language :: Python :: 3.12",
54
55
  "Topic :: System :: Monitoring",
55
56
  ]
56
57
  requires-python = "~=3.8"
57
58
  dependencies = [
58
- "apache-airflow-providers-common-sql>=1.3.1.dev0",
59
- "apache-airflow>=2.6.0.dev0",
59
+ "apache-airflow-providers-common-sql>=1.3.1",
60
+ "apache-airflow>=2.6.0",
60
61
  "hmsclient>=0.1.0",
61
62
  "pandas>=1.2.5,<2.2",
62
63
  "pyhive[hive_pure_sasl]>=0.7.0",
@@ -64,8 +65,8 @@ dependencies = [
64
65
  ]
65
66
 
66
67
  [project.urls]
67
- "Documentation" = "https://airflow.apache.org/docs/apache-airflow-providers-apache-hive/7.0.1"
68
- "Changelog" = "https://airflow.apache.org/docs/apache-airflow-providers-apache-hive/7.0.1/changelog.html"
68
+ "Documentation" = "https://airflow.apache.org/docs/apache-airflow-providers-apache-hive/8.0.0"
69
+ "Changelog" = "https://airflow.apache.org/docs/apache-airflow-providers-apache-hive/8.0.0/changelog.html"
69
70
  "Bug Tracker" = "https://github.com/apache/airflow/issues"
70
71
  "Source Code" = "https://github.com/apache/airflow"
71
72
  "Slack Chat" = "https://s.apache.org/airflow-slack"