apache-airflow-providers-elasticsearch 6.0.0rc2__py3-none-any.whl → 6.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -199,55 +199,3 @@ distributed under the License is distributed on an "AS IS" BASIS,
199
199
  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
200
200
  See the License for the specific language governing permissions and
201
201
  limitations under the License.
202
-
203
- ============================================================================
204
- APACHE AIRFLOW SUBCOMPONENTS:
205
-
206
- The Apache Airflow project contains subcomponents with separate copyright
207
- notices and license terms. Your use of the source code for the these
208
- subcomponents is subject to the terms and conditions of the following
209
- licenses.
210
-
211
-
212
- ========================================================================
213
- Third party Apache 2.0 licenses
214
- ========================================================================
215
-
216
- The following components are provided under the Apache 2.0 License.
217
- See project link for details. The text of each license is also included
218
- at 3rd-party-licenses/LICENSE-[project].txt.
219
-
220
- (ALv2 License) hue v4.3.0 (https://github.com/cloudera/hue/)
221
- (ALv2 License) jqclock v2.3.0 (https://github.com/JohnRDOrazio/jQuery-Clock-Plugin)
222
- (ALv2 License) bootstrap3-typeahead v4.0.2 (https://github.com/bassjobsen/Bootstrap-3-Typeahead)
223
- (ALv2 License) connexion v2.7.0 (https://github.com/zalando/connexion)
224
-
225
- ========================================================================
226
- MIT licenses
227
- ========================================================================
228
-
229
- The following components are provided under the MIT License. See project link for details.
230
- The text of each license is also included at 3rd-party-licenses/LICENSE-[project].txt.
231
-
232
- (MIT License) jquery v3.5.1 (https://jquery.org/license/)
233
- (MIT License) dagre-d3 v0.6.4 (https://github.com/cpettitt/dagre-d3)
234
- (MIT License) bootstrap v3.4.1 (https://github.com/twbs/bootstrap/)
235
- (MIT License) d3-tip v0.9.1 (https://github.com/Caged/d3-tip)
236
- (MIT License) dataTables v1.10.25 (https://datatables.net)
237
- (MIT License) normalize.css v3.0.2 (http://necolas.github.io/normalize.css/)
238
- (MIT License) ElasticMock v1.3.2 (https://github.com/vrcmarcos/elasticmock)
239
- (MIT License) MomentJS v2.24.0 (http://momentjs.com/)
240
- (MIT License) eonasdan-bootstrap-datetimepicker v4.17.49 (https://github.com/eonasdan/bootstrap-datetimepicker/)
241
-
242
- ========================================================================
243
- BSD 3-Clause licenses
244
- ========================================================================
245
- The following components are provided under the BSD 3-Clause license. See project links for details.
246
- The text of each license is also included at 3rd-party-licenses/LICENSE-[project].txt.
247
-
248
- (BSD 3 License) d3 v5.16.0 (https://d3js.org)
249
- (BSD 3 License) d3-shape v2.1.0 (https://github.com/d3/d3-shape)
250
- (BSD 3 License) cgroupspy 0.2.1 (https://github.com/cloudsigma/cgroupspy)
251
-
252
- ========================================================================
253
- See 3rd-party-licenses/LICENSES-ui.txt for packages used in `/airflow/www`
@@ -29,7 +29,7 @@ from airflow import __version__ as airflow_version
29
29
 
30
30
  __all__ = ["__version__"]
31
31
 
32
- __version__ = "6.0.0"
32
+ __version__ = "6.1.0"
33
33
 
34
34
  if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse(
35
35
  "2.9.0"
@@ -15,8 +15,7 @@
15
15
  # specific language governing permissions and limitations
16
16
  # under the License.
17
17
 
18
- # NOTE! THIS FILE IS AUTOMATICALLY GENERATED AND WILL BE
19
- # OVERWRITTEN WHEN PREPARING PACKAGES.
18
+ # NOTE! THIS FILE IS AUTOMATICALLY GENERATED AND WILL BE OVERWRITTEN!
20
19
  #
21
20
  # IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE
22
21
  # `get_provider_info_TEMPLATE.py.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY
@@ -28,8 +27,9 @@ def get_provider_info():
28
27
  "name": "Elasticsearch",
29
28
  "description": "`Elasticsearch <https://www.elastic.co/elasticsearch>`__\n",
30
29
  "state": "ready",
31
- "source-date-epoch": 1734533734,
30
+ "source-date-epoch": 1739959618,
32
31
  "versions": [
32
+ "6.1.0",
33
33
  "6.0.0",
34
34
  "5.5.3",
35
35
  "5.5.2",
@@ -75,16 +75,11 @@ def get_provider_info():
75
75
  "1.0.1",
76
76
  "1.0.0",
77
77
  ],
78
- "dependencies": [
79
- "apache-airflow>=2.9.0",
80
- "apache-airflow-providers-common-sql>=1.20.0",
81
- "elasticsearch>=8.10,<9",
82
- ],
83
78
  "integrations": [
84
79
  {
85
80
  "integration-name": "Elasticsearch",
86
81
  "external-doc-url": "https://www.elastic.co/elasticsearch",
87
- "logo": "/integration-logos/elasticsearch/Elasticsearch.png",
82
+ "logo": "/docs/integration-logos/Elasticsearch.png",
88
83
  "tags": ["software"],
89
84
  }
90
85
  ],
@@ -141,6 +136,20 @@ def get_provider_info():
141
136
  "example": None,
142
137
  "default": "False",
143
138
  },
139
+ "write_to_es": {
140
+ "description": "Write the task logs to the ElasticSearch\n",
141
+ "version_added": "5.5.4",
142
+ "type": "string",
143
+ "example": None,
144
+ "default": "False",
145
+ },
146
+ "target_index": {
147
+ "description": "Name of the index to write to, when enabling writing the task logs to the ElasticSearch\n",
148
+ "version_added": "5.5.4",
149
+ "type": "string",
150
+ "example": None,
151
+ "default": "airflow-logs",
152
+ },
144
153
  "json_format": {
145
154
  "description": "Instead of the default log formatter, write the log lines as JSON\n",
146
155
  "version_added": "1.10.4",
@@ -205,4 +214,9 @@ def get_provider_info():
205
214
  },
206
215
  },
207
216
  },
217
+ "dependencies": [
218
+ "apache-airflow>=2.9.0",
219
+ "apache-airflow-providers-common-sql>=1.20.0",
220
+ "elasticsearch>=8.10,<9",
221
+ ],
208
222
  }
@@ -17,14 +17,14 @@
17
17
  # under the License.
18
18
  from __future__ import annotations
19
19
 
20
+ from collections.abc import Iterable, Mapping
20
21
  from functools import cached_property
21
22
  from typing import TYPE_CHECKING, Any
22
23
  from urllib import parse
23
24
 
24
- from elasticsearch import Elasticsearch
25
-
26
25
  from airflow.hooks.base import BaseHook
27
26
  from airflow.providers.common.sql.hooks.sql import DbApiHook
27
+ from elasticsearch import Elasticsearch
28
28
 
29
29
  if TYPE_CHECKING:
30
30
  from elastic_transport import ObjectApiResponse
@@ -43,6 +43,73 @@ def connect(
43
43
  return ESConnection(host, port, user, password, scheme, **kwargs)
44
44
 
45
45
 
46
+ class ElasticsearchSQLCursor:
47
+ """A PEP 249-like Cursor class for Elasticsearch SQL API."""
48
+
49
+ def __init__(self, es: Elasticsearch, **kwargs):
50
+ self.es = es
51
+ self.body = {
52
+ "fetch_size": kwargs.get("fetch_size", 1000),
53
+ "field_multi_value_leniency": kwargs.get("field_multi_value_leniency", False),
54
+ }
55
+ self._response: ObjectApiResponse | None = None
56
+
57
+ @property
58
+ def response(self) -> ObjectApiResponse:
59
+ return self._response or {} # type: ignore
60
+
61
+ @response.setter
62
+ def response(self, value):
63
+ self._response = value
64
+
65
+ @property
66
+ def cursor(self):
67
+ return self.response.get("cursor")
68
+
69
+ @property
70
+ def rows(self):
71
+ return self.response.get("rows", [])
72
+
73
+ @property
74
+ def rowcount(self) -> int:
75
+ return len(self.rows)
76
+
77
+ @property
78
+ def description(self) -> list[tuple]:
79
+ return [(column["name"], column["type"]) for column in self.response.get("columns", [])]
80
+
81
+ def execute(
82
+ self, statement: str, params: Iterable | Mapping[str, Any] | None = None
83
+ ) -> ObjectApiResponse:
84
+ self.body["query"] = statement
85
+ if params:
86
+ self.body["params"] = params
87
+ self.response = self.es.sql.query(body=self.body)
88
+ if self.cursor:
89
+ self.body["cursor"] = self.cursor
90
+ else:
91
+ self.body.pop("cursor", None)
92
+ return self.response
93
+
94
+ def fetchone(self):
95
+ if self.rows:
96
+ return self.rows[0]
97
+ return None
98
+
99
+ def fetchmany(self, size: int | None = None):
100
+ raise NotImplementedError()
101
+
102
+ def fetchall(self):
103
+ results = self.rows
104
+ while self.cursor:
105
+ self.execute(statement=self.body["query"])
106
+ results.extend(self.rows)
107
+ return results
108
+
109
+ def close(self):
110
+ self._response = None
111
+
112
+
46
113
  class ESConnection:
47
114
  """wrapper class for elasticsearch.Elasticsearch."""
48
115
 
@@ -68,9 +135,19 @@ class ESConnection:
68
135
  else:
69
136
  self.es = Elasticsearch(self.url, **self.kwargs)
70
137
 
71
- def execute_sql(self, query: str) -> ObjectApiResponse:
72
- sql_query = {"query": query}
73
- return self.es.sql.query(body=sql_query)
138
+ def cursor(self) -> ElasticsearchSQLCursor:
139
+ return ElasticsearchSQLCursor(self.es, **self.kwargs)
140
+
141
+ def close(self):
142
+ self.es.close()
143
+
144
+ def commit(self):
145
+ pass
146
+
147
+ def execute_sql(
148
+ self, query: str, params: Iterable | Mapping[str, Any] | None = None
149
+ ) -> ObjectApiResponse:
150
+ return self.cursor().execute(query, params)
74
151
 
75
152
 
76
153
  class ElasticsearchSQLHook(DbApiHook):
@@ -85,13 +162,13 @@ class ElasticsearchSQLHook(DbApiHook):
85
162
 
86
163
  conn_name_attr = "elasticsearch_conn_id"
87
164
  default_conn_name = "elasticsearch_default"
165
+ connector = ESConnection
88
166
  conn_type = "elasticsearch"
89
167
  hook_name = "Elasticsearch"
90
168
 
91
169
  def __init__(self, schema: str = "http", connection: AirflowConnection | None = None, *args, **kwargs):
92
170
  super().__init__(*args, **kwargs)
93
171
  self.schema = schema
94
- self._connection = connection
95
172
 
96
173
  def get_conn(self) -> ESConnection:
97
174
  """Return an elasticsearch connection object."""
@@ -105,11 +182,10 @@ class ElasticsearchSQLHook(DbApiHook):
105
182
  "scheme": conn.schema or "http",
106
183
  }
107
184
 
108
- if conn.extra_dejson.get("http_compress", False):
109
- conn_args["http_compress"] = bool(["http_compress"])
185
+ conn_args.update(conn.extra_dejson)
110
186
 
111
- if conn.extra_dejson.get("timeout", False):
112
- conn_args["timeout"] = conn.extra_dejson["timeout"]
187
+ if conn_args.get("http_compress", False):
188
+ conn_args["http_compress"] = bool(conn_args["http_compress"])
113
189
 
114
190
  return connect(**conn_args)
115
191
 
@@ -19,7 +19,11 @@ from __future__ import annotations
19
19
 
20
20
  import contextlib
21
21
  import inspect
22
+ import json
22
23
  import logging
24
+ import os
25
+ import pathlib
26
+ import shutil
23
27
  import sys
24
28
  import time
25
29
  from collections import defaultdict
@@ -27,11 +31,10 @@ from operator import attrgetter
27
31
  from typing import TYPE_CHECKING, Any, Callable, Literal
28
32
  from urllib.parse import quote, urlparse
29
33
 
30
- # Using `from elasticsearch import *` would break elasticsearch mocking used in unit test.
31
- import elasticsearch
32
34
  import pendulum
33
- from elasticsearch.exceptions import NotFoundError
34
35
 
36
+ # Using `from elasticsearch import *` would break elasticsearch mocking used in unit test.
37
+ import elasticsearch
35
38
  from airflow.configuration import conf
36
39
  from airflow.exceptions import AirflowException
37
40
  from airflow.models.dagrun import DagRun
@@ -43,6 +46,8 @@ from airflow.utils.log.file_task_handler import FileTaskHandler
43
46
  from airflow.utils.log.logging_mixin import ExternalLoggingMixin, LoggingMixin
44
47
  from airflow.utils.module_loading import import_string
45
48
  from airflow.utils.session import create_session
49
+ from elasticsearch import helpers
50
+ from elasticsearch.exceptions import NotFoundError
46
51
 
47
52
  if TYPE_CHECKING:
48
53
  from datetime import datetime
@@ -106,10 +111,13 @@ class ElasticsearchTaskHandler(FileTaskHandler, ExternalLoggingMixin, LoggingMix
106
111
  """
107
112
  ElasticsearchTaskHandler is a python log handler that reads logs from Elasticsearch.
108
113
 
109
- Note that Airflow does not handle the indexing of logs into Elasticsearch. Instead,
114
+ Note that Airflow by default does not handle the indexing of logs into Elasticsearch. Instead,
110
115
  Airflow flushes logs into local files. Additional software setup is required to index
111
116
  the logs into Elasticsearch, such as using Filebeat and Logstash.
112
117
 
118
+ Airflow can be configured to support directly writing logging to Elasticsearch. To enable this feature,
119
+ set `json_format` and `write_to_es` to `True`.
120
+
113
121
  To efficiently query and sort Elasticsearch results, this handler assumes each
114
122
  log message has a field `log_id` consists of ti primary keys:
115
123
  `log_id = {dag_id}-{task_id}-{logical_date}-{try_number}`
@@ -136,6 +144,8 @@ class ElasticsearchTaskHandler(FileTaskHandler, ExternalLoggingMixin, LoggingMix
136
144
  write_stdout: bool,
137
145
  json_format: bool,
138
146
  json_fields: str,
147
+ write_to_es: bool = False,
148
+ target_index: str = "airflow-logs",
139
149
  host_field: str = "host",
140
150
  offset_field: str = "offset",
141
151
  host: str = "http://localhost:9200",
@@ -166,6 +176,11 @@ class ElasticsearchTaskHandler(FileTaskHandler, ExternalLoggingMixin, LoggingMix
166
176
  self.index_patterns = index_patterns
167
177
  self.index_patterns_callable = index_patterns_callable
168
178
  self.context_set = False
179
+ self.write_to_es = write_to_es
180
+ self.target_index = target_index
181
+ self.delete_local_copy = kwargs.get(
182
+ "delete_local_copy", conf.getboolean("logging", "delete_local_logs")
183
+ )
169
184
 
170
185
  self.formatter: logging.Formatter
171
186
  self.handler: logging.FileHandler | logging.StreamHandler # type: ignore[assignment]
@@ -226,7 +241,8 @@ class ElasticsearchTaskHandler(FileTaskHandler, ExternalLoggingMixin, LoggingMix
226
241
  else:
227
242
  if TYPE_CHECKING:
228
243
  assert dag is not None
229
- data_interval = dag.get_run_data_interval(dag_run)
244
+ # TODO: Task-SDK: Where should this function be?
245
+ data_interval = dag.get_run_data_interval(dag_run) # type: ignore[attr-defined]
230
246
 
231
247
  if self.json_format:
232
248
  data_interval_start = self._clean_date(data_interval[0])
@@ -428,9 +444,11 @@ class ElasticsearchTaskHandler(FileTaskHandler, ExternalLoggingMixin, LoggingMix
428
444
  extras={
429
445
  "dag_id": str(ti.dag_id),
430
446
  "task_id": str(ti.task_id),
431
- date_key: self._clean_date(ti.logical_date)
432
- if AIRFLOW_V_3_0_PLUS
433
- else self._clean_date(ti.execution_date),
447
+ date_key: (
448
+ self._clean_date(ti.logical_date)
449
+ if AIRFLOW_V_3_0_PLUS
450
+ else self._clean_date(ti.execution_date)
451
+ ),
434
452
  "try_number": str(ti.try_number),
435
453
  "log_id": self._render_log_id(ti, ti.try_number),
436
454
  },
@@ -480,6 +498,18 @@ class ElasticsearchTaskHandler(FileTaskHandler, ExternalLoggingMixin, LoggingMix
480
498
  self.handler.close()
481
499
  sys.stdout = sys.__stdout__
482
500
 
501
+ if self.write_to_es and not self.write_stdout:
502
+ full_path = self.handler.baseFilename # type: ignore[union-attr]
503
+ log_relative_path = pathlib.Path(full_path).relative_to(self.local_base).as_posix()
504
+ local_loc = os.path.join(self.local_base, log_relative_path)
505
+ if os.path.exists(local_loc):
506
+ # read log and remove old logs to get just the latest additions
507
+ log = pathlib.Path(local_loc).read_text()
508
+ log_lines = self._parse_raw_log(log)
509
+ success = self._write_to_es(log_lines)
510
+ if success and self.delete_local_copy:
511
+ shutil.rmtree(os.path.dirname(local_loc))
512
+
483
513
  super().close()
484
514
 
485
515
  self.closed = True
@@ -599,6 +629,31 @@ class ElasticsearchTaskHandler(FileTaskHandler, ExternalLoggingMixin, LoggingMix
599
629
  callback: type[Hit] | Callable[..., Any] = getattr(doc_class, "from_es", doc_class)
600
630
  return callback(hit)
601
631
 
632
+ def _parse_raw_log(self, log: str) -> list[dict[str, Any]]:
633
+ logs = log.split("\n")
634
+ parsed_logs = []
635
+ for line in logs:
636
+ # Make sure line is not empty
637
+ if line.strip():
638
+ parsed_logs.append(json.loads(line))
639
+
640
+ return parsed_logs
641
+
642
+ def _write_to_es(self, log_lines: list[dict[str, Any]]) -> bool:
643
+ """
644
+ Write the log to ElasticSearch; return `True` or fails silently and return `False`.
645
+
646
+ :param log_lines: the log_lines to write to the ElasticSearch.
647
+ """
648
+ # Prepare the bulk request for Elasticsearch
649
+ bulk_actions = [{"_index": self.target_index, "_source": log} for log in log_lines]
650
+ try:
651
+ _ = helpers.bulk(self.client, bulk_actions)
652
+ return True
653
+ except Exception as e:
654
+ self.log.exception("Unable to insert logs into Elasticsearch. Reason: %s", str(e))
655
+ return False
656
+
602
657
 
603
658
  def getattr_nested(obj, item, default):
604
659
  """
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: apache-airflow-providers-elasticsearch
3
- Version: 6.0.0rc2
3
+ Version: 6.1.0
4
4
  Summary: Provider package apache-airflow-providers-elasticsearch for Apache Airflow
5
5
  Keywords: airflow-provider,elasticsearch,airflow,integration
6
6
  Author-email: Apache Software Foundation <dev@airflow.apache.org>
@@ -20,35 +20,18 @@ Classifier: Programming Language :: Python :: 3.10
20
20
  Classifier: Programming Language :: Python :: 3.11
21
21
  Classifier: Programming Language :: Python :: 3.12
22
22
  Classifier: Topic :: System :: Monitoring
23
- Requires-Dist: apache-airflow-providers-common-sql>=1.20.0rc0
24
- Requires-Dist: apache-airflow>=2.9.0rc0
23
+ Requires-Dist: apache-airflow>=2.9.0
24
+ Requires-Dist: apache-airflow-providers-common-sql>=1.20.0
25
25
  Requires-Dist: elasticsearch>=8.10,<9
26
26
  Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
27
- Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-elasticsearch/6.0.0/changelog.html
28
- Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-elasticsearch/6.0.0
27
+ Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-elasticsearch/6.1.0/changelog.html
28
+ Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-elasticsearch/6.1.0
29
29
  Project-URL: Slack Chat, https://s.apache.org/airflow-slack
30
30
  Project-URL: Source Code, https://github.com/apache/airflow
31
31
  Project-URL: Twitter, https://x.com/ApacheAirflow
32
32
  Project-URL: YouTube, https://www.youtube.com/channel/UCSXwxpWZQ7XZ1WL3wqevChA/
33
33
 
34
34
 
35
- .. Licensed to the Apache Software Foundation (ASF) under one
36
- or more contributor license agreements. See the NOTICE file
37
- distributed with this work for additional information
38
- regarding copyright ownership. The ASF licenses this file
39
- to you under the Apache License, Version 2.0 (the
40
- "License"); you may not use this file except in compliance
41
- with the License. You may obtain a copy of the License at
42
-
43
- .. http://www.apache.org/licenses/LICENSE-2.0
44
-
45
- .. Unless required by applicable law or agreed to in writing,
46
- software distributed under the License is distributed on an
47
- "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
48
- KIND, either express or implied. See the License for the
49
- specific language governing permissions and limitations
50
- under the License.
51
-
52
35
  .. Licensed to the Apache Software Foundation (ASF) under one
53
36
  or more contributor license agreements. See the NOTICE file
54
37
  distributed with this work for additional information
@@ -66,8 +49,7 @@ Project-URL: YouTube, https://www.youtube.com/channel/UCSXwxpWZQ7XZ1WL3wqevChA/
66
49
  specific language governing permissions and limitations
67
50
  under the License.
68
51
 
69
- .. NOTE! THIS FILE IS AUTOMATICALLY GENERATED AND WILL BE
70
- OVERWRITTEN WHEN PREPARING PACKAGES.
52
+ .. NOTE! THIS FILE IS AUTOMATICALLY GENERATED AND WILL BE OVERWRITTEN!
71
53
 
72
54
  .. IF YOU WANT TO MODIFY TEMPLATE FOR THIS FILE, YOU SHOULD MODIFY THE TEMPLATE
73
55
  `PROVIDER_README_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY
@@ -75,7 +57,7 @@ Project-URL: YouTube, https://www.youtube.com/channel/UCSXwxpWZQ7XZ1WL3wqevChA/
75
57
 
76
58
  Package ``apache-airflow-providers-elasticsearch``
77
59
 
78
- Release: ``6.0.0.rc2``
60
+ Release: ``6.1.0``
79
61
 
80
62
 
81
63
  `Elasticsearch <https://www.elastic.co/elasticsearch>`__
@@ -88,7 +70,7 @@ This is a provider package for ``elasticsearch`` provider. All classes for this
88
70
  are in ``airflow.providers.elasticsearch`` python package.
89
71
 
90
72
  You can find package information and changelog for the provider
91
- in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-elasticsearch/6.0.0/>`_.
73
+ in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-elasticsearch/6.1.0/>`_.
92
74
 
93
75
  Installation
94
76
  ------------
@@ -130,4 +112,5 @@ Dependent package
130
112
  ============================================================================================================ ==============
131
113
 
132
114
  The changelog for the provider package can be found in the
133
- `changelog <https://airflow.apache.org/docs/apache-airflow-providers-elasticsearch/6.0.0/changelog.html>`_.
115
+ `changelog <https://airflow.apache.org/docs/apache-airflow-providers-elasticsearch/6.1.0/changelog.html>`_.
116
+
@@ -0,0 +1,14 @@
1
+ airflow/providers/elasticsearch/LICENSE,sha256=gXPVwptPlW1TJ4HSuG5OMPg-a3h43OGMkZRR1rpwfJA,10850
2
+ airflow/providers/elasticsearch/__init__.py,sha256=GVpn7piAsUSkiENwO4yklyQuFU5fN1SNPMoqQCV92CE,1500
3
+ airflow/providers/elasticsearch/get_provider_info.py,sha256=UMTCgN-jybvty3C7QztzaerWDRkSgdgyzBk8n2k80C4,9322
4
+ airflow/providers/elasticsearch/version_compat.py,sha256=aHg90_DtgoSnQvILFICexMyNlHlALBdaeWqkX3dFDug,1605
5
+ airflow/providers/elasticsearch/hooks/__init__.py,sha256=mlJxuZLkd5x-iq2SBwD3mvRQpt3YR7wjz_nceyF1IaI,787
6
+ airflow/providers/elasticsearch/hooks/elasticsearch.py,sha256=ac_IfhPVccHPhUWzGs9sWRqZC1hnYs-JTauQLdv-mYQ,7870
7
+ airflow/providers/elasticsearch/log/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
8
+ airflow/providers/elasticsearch/log/es_json_formatter.py,sha256=DwWPDJtZLr_6Mdae1-XVEgmE1XErFIanSzxWovs50ig,1796
9
+ airflow/providers/elasticsearch/log/es_response.py,sha256=LdMBuVBTydaC42HooYSttAjTK-CpPA4r_KHl38msMnk,6046
10
+ airflow/providers/elasticsearch/log/es_task_handler.py,sha256=JpYm8GGKUVaXKBgeQKdF7z09W_gicrRGzJG7j3KcmZ8,27406
11
+ apache_airflow_providers_elasticsearch-6.1.0.dist-info/entry_points.txt,sha256=jpgAUVmTsdtWQ4nru2FJQKP9JBN4OPHK-ybfYc3_BOs,109
12
+ apache_airflow_providers_elasticsearch-6.1.0.dist-info/WHEEL,sha256=CpUCUxeHQbRN5UGRQHYRJorO5Af-Qy_fHMctcQ8DSGI,82
13
+ apache_airflow_providers_elasticsearch-6.1.0.dist-info/METADATA,sha256=M1lPsPnqqSuC8HAPhTqngujClY0-cElnGv7ruZhbwnk,5232
14
+ apache_airflow_providers_elasticsearch-6.1.0.dist-info/RECORD,,
@@ -1,14 +0,0 @@
1
- airflow/providers/elasticsearch/LICENSE,sha256=FFb4jd2AXnOOf7XLP04pQW6jbdhG49TxlGY6fFpCV1Y,13609
2
- airflow/providers/elasticsearch/__init__.py,sha256=fmylEM7BkJquAI8XVb8elGOtTIkzza6oQ9LEMpxCM6U,1500
3
- airflow/providers/elasticsearch/get_provider_info.py,sha256=SY9_0oTY1ciySePMvByw4d7wiacaJgjIeMd8mOoflic,8637
4
- airflow/providers/elasticsearch/version_compat.py,sha256=aHg90_DtgoSnQvILFICexMyNlHlALBdaeWqkX3dFDug,1605
5
- airflow/providers/elasticsearch/hooks/__init__.py,sha256=mlJxuZLkd5x-iq2SBwD3mvRQpt3YR7wjz_nceyF1IaI,787
6
- airflow/providers/elasticsearch/hooks/elasticsearch.py,sha256=-J3X2BHHncI1iV8nZ959057-kPR9ecgEw9B-QqNUupY,5812
7
- airflow/providers/elasticsearch/log/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
8
- airflow/providers/elasticsearch/log/es_json_formatter.py,sha256=DwWPDJtZLr_6Mdae1-XVEgmE1XErFIanSzxWovs50ig,1796
9
- airflow/providers/elasticsearch/log/es_response.py,sha256=LdMBuVBTydaC42HooYSttAjTK-CpPA4r_KHl38msMnk,6046
10
- airflow/providers/elasticsearch/log/es_task_handler.py,sha256=kLXTrkwcDN3sOFaBLDxLGcyKegnSNG5wvA9H9RQDsd4,25054
11
- apache_airflow_providers_elasticsearch-6.0.0rc2.dist-info/entry_points.txt,sha256=jpgAUVmTsdtWQ4nru2FJQKP9JBN4OPHK-ybfYc3_BOs,109
12
- apache_airflow_providers_elasticsearch-6.0.0rc2.dist-info/WHEEL,sha256=CpUCUxeHQbRN5UGRQHYRJorO5Af-Qy_fHMctcQ8DSGI,82
13
- apache_airflow_providers_elasticsearch-6.0.0rc2.dist-info/METADATA,sha256=HrUFsB-FmwoQzg9rUxx-v7l9aqTCx07L3eZa0qB5PRI,6070
14
- apache_airflow_providers_elasticsearch-6.0.0rc2.dist-info/RECORD,,