recurvedata-lib 0.1.492__py2.py3-none-any.whl → 0.1.496__py2.py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of recurvedata-lib might be problematic. Click here for more details.
- recurvedata/__version__.py +1 -1
- recurvedata/client/client.py +7 -94
- recurvedata/connectors/config_schema.py +20 -0
- recurvedata/connectors/connectors/wecom.py +66 -0
- recurvedata/connectors/dbapi.py +17 -9
- recurvedata/schedulers/airflow_db_process.py +25 -15
- {recurvedata_lib-0.1.492.dist-info → recurvedata_lib-0.1.496.dist-info}/METADATA +2 -1
- {recurvedata_lib-0.1.492.dist-info → recurvedata_lib-0.1.496.dist-info}/RECORD +10 -9
- {recurvedata_lib-0.1.492.dist-info → recurvedata_lib-0.1.496.dist-info}/WHEEL +0 -0
- {recurvedata_lib-0.1.492.dist-info → recurvedata_lib-0.1.496.dist-info}/entry_points.txt +0 -0
recurvedata/__version__.py
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
__version__ = "0.1.
|
|
1
|
+
__version__ = "0.1.496"
|
recurvedata/client/client.py
CHANGED
|
@@ -1,8 +1,6 @@
|
|
|
1
1
|
import json
|
|
2
2
|
import logging
|
|
3
|
-
import os
|
|
4
3
|
import time
|
|
5
|
-
from pathlib import Path
|
|
6
4
|
from typing import Any, TypeVar, overload
|
|
7
5
|
|
|
8
6
|
import httpx
|
|
@@ -26,90 +24,13 @@ class Client:
|
|
|
26
24
|
config = AgentConfig.load()
|
|
27
25
|
self.set_config(config)
|
|
28
26
|
|
|
29
|
-
@property
|
|
30
|
-
def is_offline_mode(self) -> bool:
|
|
31
|
-
"""Check if offline mode is enabled via environment variable"""
|
|
32
|
-
return os.environ.get("RECURVE_OFFLINE_MODE", "").lower() in ("true", "1")
|
|
33
|
-
|
|
34
|
-
@property
|
|
35
|
-
def offline_data_path(self) -> Path:
|
|
36
|
-
"""Get the offline data directory path"""
|
|
37
|
-
offline_path = os.environ.get("RECURVE_OFFLINE_DATA_PATH", "offline_data")
|
|
38
|
-
return Path(offline_path)
|
|
39
|
-
|
|
40
27
|
def set_config(self, config: AgentConfig):
|
|
41
28
|
self._config = config
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
headers={"User-Agent": f"RecurveLib/{__version__}"},
|
|
48
|
-
)
|
|
49
|
-
else:
|
|
50
|
-
self._client = None
|
|
51
|
-
|
|
52
|
-
def _resolve_offline_file_path(self, path: str, **kwargs) -> Path:
|
|
53
|
-
"""Convert API path to local file path with parameterized support"""
|
|
54
|
-
# Remove leading /api/ prefix: /api/executor/connection -> executor/connection
|
|
55
|
-
if path.startswith("/api/"):
|
|
56
|
-
clean_path = path[5:] # Remove "/api/" prefix
|
|
57
|
-
else:
|
|
58
|
-
clean_path = path.lstrip("/")
|
|
59
|
-
|
|
60
|
-
# Extract parameters from kwargs
|
|
61
|
-
params = kwargs.get("params", {})
|
|
62
|
-
|
|
63
|
-
# CORE OPERATOR APIs with parameter-based file structure:
|
|
64
|
-
|
|
65
|
-
# 1. get_connection() API - parameterized by project_id + connection_name
|
|
66
|
-
if clean_path == "executor/connection":
|
|
67
|
-
project_id = params.get("project_id", "0")
|
|
68
|
-
connection_name = params.get("name", "default")
|
|
69
|
-
return self.offline_data_path / "executor/connection" / str(project_id) / f"{connection_name}.json"
|
|
70
|
-
|
|
71
|
-
# 2. get_py_conn_configs() API - parameterized by project_id + project_connection_name
|
|
72
|
-
elif clean_path == "executor/python-conn-configs":
|
|
73
|
-
project_id = params.get("project_id", "0")
|
|
74
|
-
# Python configs use project_connection_name as the key (fallback to other param names for compatibility)
|
|
75
|
-
# Handle empty strings properly - treat them as None/missing
|
|
76
|
-
project_connection_name = (params.get("project_connection_name") or
|
|
77
|
-
params.get("project_conn_name") or
|
|
78
|
-
params.get("pyenv_name") or
|
|
79
|
-
"default")
|
|
80
|
-
return self.offline_data_path / "executor/python-conn-configs" / str(project_id) / f"{project_connection_name}.json"
|
|
81
|
-
|
|
82
|
-
# For any other APIs, raise error do not support offline mode
|
|
83
|
-
raise APIError(f"Offline mode: {path} is not supported")
|
|
84
|
-
|
|
85
|
-
def _read_offline_data(self, method: str, path: str, response_model_class: type[ResponseModelType] | None = None, **kwargs) -> Any:
|
|
86
|
-
"""Read API response from local JSON file"""
|
|
87
|
-
file_path = self._resolve_offline_file_path(path, **kwargs)
|
|
88
|
-
|
|
89
|
-
logger.info(f"🔌 Offline mode: Reading from {file_path}")
|
|
90
|
-
|
|
91
|
-
try:
|
|
92
|
-
if not file_path.exists():
|
|
93
|
-
logger.error(f"Offline data file not found: {file_path}")
|
|
94
|
-
raise APIError(f"Offline mode: Required data file not found: {file_path}")
|
|
95
|
-
|
|
96
|
-
with open(file_path, 'r') as f:
|
|
97
|
-
resp_content = json.load(f)
|
|
98
|
-
|
|
99
|
-
# Handle response model validation (same logic as online mode)
|
|
100
|
-
if response_model_class is not None:
|
|
101
|
-
if "code" in resp_content:
|
|
102
|
-
return response_model_class.model_validate(resp_content["data"])
|
|
103
|
-
return response_model_class.model_validate(resp_content)
|
|
104
|
-
|
|
105
|
-
return resp_content.get("data", resp_content)
|
|
106
|
-
|
|
107
|
-
except APIError:
|
|
108
|
-
raise # Re-raise APIError as-is
|
|
109
|
-
except Exception as e:
|
|
110
|
-
logger.error(f"Error reading offline data from {file_path}: {e}")
|
|
111
|
-
raise APIError(f"Offline mode: Failed to read data file {file_path}: {e}")
|
|
112
|
-
|
|
29
|
+
self._client = httpx.Client(
|
|
30
|
+
base_url=config.server_url,
|
|
31
|
+
timeout=config.request_timeout,
|
|
32
|
+
headers={"User-Agent": f"RecurveLib/{__version__}"},
|
|
33
|
+
)
|
|
113
34
|
|
|
114
35
|
@overload
|
|
115
36
|
def request(self, method: str, path: str, response_model_class: None = None, retries: int = 3, **kwargs) -> Any:
|
|
@@ -135,11 +56,6 @@ class Client:
|
|
|
135
56
|
retries: int = 1,
|
|
136
57
|
**kwargs,
|
|
137
58
|
) -> Any:
|
|
138
|
-
# Route to offline mode if enabled
|
|
139
|
-
if self.is_offline_mode:
|
|
140
|
-
return self._read_offline_data(method, path, response_model_class, **kwargs)
|
|
141
|
-
|
|
142
|
-
# Original online mode logic
|
|
143
59
|
self.prepare_header(kwargs)
|
|
144
60
|
pre_err: httpx.HTTPStatusError | None = None
|
|
145
61
|
for attempt in range(retries):
|
|
@@ -227,11 +143,8 @@ class Client:
|
|
|
227
143
|
)
|
|
228
144
|
|
|
229
145
|
def close(self):
|
|
230
|
-
|
|
231
|
-
self._client.close()
|
|
146
|
+
self._client.close()
|
|
232
147
|
|
|
233
148
|
@property
|
|
234
149
|
def base_url(self) -> str:
|
|
235
|
-
|
|
236
|
-
return "offline://localhost"
|
|
237
|
-
return str(self._client.base_url) if self._client else ""
|
|
150
|
+
return str(self._client.base_url)
|
|
@@ -1570,6 +1570,26 @@ ALL_CONFIG_SCHEMA_DCT = {
|
|
|
1570
1570
|
"group": ["destination"],
|
|
1571
1571
|
"test_required": True,
|
|
1572
1572
|
},
|
|
1573
|
+
"recurvedata.connectors.connectors.wecom": {
|
|
1574
|
+
"type": "wecom",
|
|
1575
|
+
"ui_type": "WeCom",
|
|
1576
|
+
"category": ["others"],
|
|
1577
|
+
"config_schema": {
|
|
1578
|
+
"type": "object",
|
|
1579
|
+
"properties": {
|
|
1580
|
+
"webhook_url": {"type": "string", "title": _l("WeCom Webhook URL")},
|
|
1581
|
+
},
|
|
1582
|
+
"order": [
|
|
1583
|
+
"webhook_url",
|
|
1584
|
+
],
|
|
1585
|
+
"required": [
|
|
1586
|
+
"webhook_url",
|
|
1587
|
+
],
|
|
1588
|
+
},
|
|
1589
|
+
"enabled": True,
|
|
1590
|
+
"group": ["integration"],
|
|
1591
|
+
"test_required": True,
|
|
1592
|
+
},
|
|
1573
1593
|
}
|
|
1574
1594
|
|
|
1575
1595
|
# auto generated finish
|
|
@@ -0,0 +1,66 @@
|
|
|
1
|
+
from urllib.parse import urlparse
|
|
2
|
+
|
|
3
|
+
import requests
|
|
4
|
+
|
|
5
|
+
from recurvedata.connectors._register import register_connector_class
|
|
6
|
+
from recurvedata.connectors.base import RecurveConnectorBase
|
|
7
|
+
from recurvedata.consts import ConnectorGroup
|
|
8
|
+
from recurvedata.core.translation import _l
|
|
9
|
+
|
|
10
|
+
CONNECTION_TYPE = "wecom"
|
|
11
|
+
UI_CONNECTION_TYPE = "WeCom"
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
@register_connector_class([CONNECTION_TYPE, UI_CONNECTION_TYPE])
|
|
15
|
+
class WeCom(RecurveConnectorBase):
|
|
16
|
+
connection_type = CONNECTION_TYPE
|
|
17
|
+
ui_connection_type = UI_CONNECTION_TYPE
|
|
18
|
+
group = [ConnectorGroup.INTEGRATION]
|
|
19
|
+
|
|
20
|
+
config_schema = {
|
|
21
|
+
"type": "object",
|
|
22
|
+
"properties": {
|
|
23
|
+
"webhook_url": {"type": "string", "title": _l("WeCom Webhook URL")},
|
|
24
|
+
},
|
|
25
|
+
"order": [
|
|
26
|
+
"webhook_url",
|
|
27
|
+
],
|
|
28
|
+
"required": [
|
|
29
|
+
"webhook_url",
|
|
30
|
+
],
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
@property
|
|
34
|
+
def webhook_url(self):
|
|
35
|
+
return self.conf["webhook_url"]
|
|
36
|
+
|
|
37
|
+
@property
|
|
38
|
+
def wecom_conf(self):
|
|
39
|
+
if hasattr(self, "_wecom_conf"):
|
|
40
|
+
return self._wecom_conf
|
|
41
|
+
self.init_config()
|
|
42
|
+
return self._wecom_conf
|
|
43
|
+
|
|
44
|
+
def test_connection(self):
|
|
45
|
+
"""
|
|
46
|
+
Test WeCom webhook connection.
|
|
47
|
+
Validate URL format and network connectivity without sending actual messages.
|
|
48
|
+
"""
|
|
49
|
+
# Validate URL format
|
|
50
|
+
parsed_url = urlparse(self.webhook_url)
|
|
51
|
+
if not parsed_url.scheme or not parsed_url.netloc:
|
|
52
|
+
raise ValueError("Invalid webhook URL format")
|
|
53
|
+
|
|
54
|
+
if parsed_url.scheme != "https":
|
|
55
|
+
raise ValueError("WeCom webhook URL must use HTTPS")
|
|
56
|
+
|
|
57
|
+
# Verify WeCom domain
|
|
58
|
+
if "qyapi.weixin.qq.com" not in parsed_url.netloc:
|
|
59
|
+
raise ValueError("Invalid WeCom webhook URL domain")
|
|
60
|
+
|
|
61
|
+
# Test network connectivity (use HEAD request without sending message body)
|
|
62
|
+
try:
|
|
63
|
+
requests.head(self.webhook_url, timeout=10, allow_redirects=True)
|
|
64
|
+
# Connection test only, status code not checked (HEAD may return 405)
|
|
65
|
+
except requests.exceptions.RequestException as e:
|
|
66
|
+
raise ValueError(f"Failed to connect to WeCom webhook: {str(e)}")
|
recurvedata/connectors/dbapi.py
CHANGED
|
@@ -397,19 +397,27 @@ class DBAPIBase(RecurveConnectorBase):
|
|
|
397
397
|
set_env_dbt_password(self.password or "")
|
|
398
398
|
|
|
399
399
|
@classmethod
|
|
400
|
-
def
|
|
401
|
-
"""
|
|
402
|
-
order the sql by the orders
|
|
403
|
-
"""
|
|
400
|
+
def get_dialect(cls):
|
|
404
401
|
# dialect impala -> hive, cuz there is no dialect 'impala' in sqlglot
|
|
405
|
-
|
|
402
|
+
return "hive" if cls.connection_type == "impala" else (cls.connection_type or None)
|
|
403
|
+
|
|
404
|
+
@classmethod
|
|
405
|
+
def clean_sql(cls, sql):
|
|
406
|
+
dialect = cls.get_dialect()
|
|
406
407
|
# Parse the SQL query
|
|
407
408
|
parsed = sqlglot.parse_one(sql, read=dialect)
|
|
408
409
|
# since some sql dialects have special identifier, we need to use the dialect to generate the clean sql
|
|
409
|
-
|
|
410
|
+
return parsed.sql(dialect=dialect, comments=False)
|
|
411
|
+
|
|
412
|
+
@classmethod
|
|
413
|
+
def order_sql(cls, sql: str, orders: list[dict[str, str]] = None, return_sql: bool = True):
|
|
414
|
+
"""
|
|
415
|
+
order the sql by the orders
|
|
416
|
+
"""
|
|
417
|
+
dialect = cls.get_dialect()
|
|
410
418
|
# Wrap the entire query with a subquery
|
|
411
419
|
alias = "_recurve_limit_subquery"
|
|
412
|
-
subquery = exp.Subquery(this=clean_sql, alias=alias)
|
|
420
|
+
subquery = exp.Subquery(this=cls.clean_sql(sql), alias=alias)
|
|
413
421
|
|
|
414
422
|
# Create a new SELECT statement with the subquery and the LIMIT clause
|
|
415
423
|
outer_select = exp.select("*").from_(subquery)
|
|
@@ -436,7 +444,7 @@ class DBAPIBase(RecurveConnectorBase):
|
|
|
436
444
|
no validation on sql.
|
|
437
445
|
If the sql is DML, then execute it will raise an error.
|
|
438
446
|
"""
|
|
439
|
-
dialect =
|
|
447
|
+
dialect = cls.get_dialect()
|
|
440
448
|
|
|
441
449
|
outer_select = cls.order_sql(sql, orders, return_sql=False)
|
|
442
450
|
|
|
@@ -456,7 +464,7 @@ class DBAPIBase(RecurveConnectorBase):
|
|
|
456
464
|
no validation on sql.
|
|
457
465
|
If the sql is DML, then execute it will raise an error.
|
|
458
466
|
"""
|
|
459
|
-
return f"SELECT COUNT(1) FROM ({sql}) AS cnt_subquery"
|
|
467
|
+
return f"SELECT COUNT(1) FROM ({cls.clean_sql(sql)}) AS cnt_subquery"
|
|
460
468
|
|
|
461
469
|
|
|
462
470
|
@dataclass
|
|
@@ -148,12 +148,15 @@ class AirflowDbService:
|
|
|
148
148
|
max_execution_date: datetime.datetime | None,
|
|
149
149
|
failed_only: bool,
|
|
150
150
|
):
|
|
151
|
-
|
|
152
|
-
dag_id
|
|
153
|
-
run_id
|
|
154
|
-
execution_start_date
|
|
155
|
-
execution_end_date
|
|
156
|
-
|
|
151
|
+
kwargs = {
|
|
152
|
+
"dag_id": dag.dag_id,
|
|
153
|
+
"run_id": run_id,
|
|
154
|
+
"execution_start_date": min_execution_date,
|
|
155
|
+
"execution_end_date": max_execution_date,
|
|
156
|
+
}
|
|
157
|
+
if min_execution_date or max_execution_date:
|
|
158
|
+
kwargs.pop("run_id")
|
|
159
|
+
drs: list[DagRun] = DagRun.find(**kwargs)
|
|
157
160
|
if not drs:
|
|
158
161
|
logger.info(f"skip rerun, no dag_run found for {dag.dag_id} at {run_id}")
|
|
159
162
|
return
|
|
@@ -182,14 +185,19 @@ class AirflowDbService:
|
|
|
182
185
|
include_downstream: bool,
|
|
183
186
|
failed_only: bool,
|
|
184
187
|
):
|
|
185
|
-
|
|
186
|
-
dag_id
|
|
187
|
-
run_id
|
|
188
|
-
execution_start_date
|
|
189
|
-
execution_end_date
|
|
190
|
-
|
|
188
|
+
kwargs = {
|
|
189
|
+
"dag_id": dag.dag_id,
|
|
190
|
+
"run_id": run_id,
|
|
191
|
+
"execution_start_date": min_execution_date,
|
|
192
|
+
"execution_end_date": max_execution_date,
|
|
193
|
+
}
|
|
194
|
+
if min_execution_date or max_execution_date:
|
|
195
|
+
kwargs.pop("run_id")
|
|
196
|
+
drs: list[DagRun] = DagRun.find(**kwargs)
|
|
191
197
|
if not drs:
|
|
192
|
-
logger.info(
|
|
198
|
+
logger.info(
|
|
199
|
+
f"skip rerun, no dag_run found for {dag.dag_id} at {run_id}, or between {min_execution_date} and {max_execution_date}"
|
|
200
|
+
)
|
|
193
201
|
return
|
|
194
202
|
clear_start_date = min([dr.execution_date for dr in drs])
|
|
195
203
|
clear_end_date = max([dr.execution_date for dr in drs])
|
|
@@ -205,14 +213,16 @@ class AirflowDbService:
|
|
|
205
213
|
for task_id in clear_task_ids:
|
|
206
214
|
if task_id in dag.task_dict:
|
|
207
215
|
task = dag.task_dict[task_id]
|
|
208
|
-
|
|
216
|
+
# Use Airflow's built-in method to get all upstream tasks
|
|
217
|
+
upstream_task_ids = task.get_flat_relative_ids(upstream=True)
|
|
209
218
|
expanded_task_ids.update(upstream_task_ids)
|
|
210
219
|
|
|
211
220
|
if include_downstream:
|
|
212
221
|
for task_id in clear_task_ids:
|
|
213
222
|
if task_id in dag.task_dict:
|
|
214
223
|
task = dag.task_dict[task_id]
|
|
215
|
-
|
|
224
|
+
# Use Airflow's built-in method to get all downstream tasks
|
|
225
|
+
downstream_task_ids = task.get_flat_relative_ids(upstream=False)
|
|
216
226
|
expanded_task_ids.update(downstream_task_ids)
|
|
217
227
|
|
|
218
228
|
clear_task_ids = list(expanded_task_ids)
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: recurvedata-lib
|
|
3
|
-
Version: 0.1.
|
|
3
|
+
Version: 0.1.496
|
|
4
4
|
Summary: Common Library for ReOrc Data Platform
|
|
5
5
|
Author-email: Reorc Team <contact@recurvedata.com>
|
|
6
6
|
Requires-Dist: croniter
|
|
@@ -77,6 +77,7 @@ Requires-Dist: dbt-duckdb==1.8.4; extra == 'dbt'
|
|
|
77
77
|
Requires-Dist: dbt-extractor==0.5.1; extra == 'dbt'
|
|
78
78
|
Requires-Dist: dbt-fabric==1.9.4; extra == 'dbt'
|
|
79
79
|
Requires-Dist: dbt-impala==1.8.0; extra == 'dbt'
|
|
80
|
+
Requires-Dist: dbt-mysql==1.8.0+recurve.1; extra == 'dbt'
|
|
80
81
|
Requires-Dist: dbt-postgres==1.8.2; extra == 'dbt'
|
|
81
82
|
Requires-Dist: dbt-redshift==1.8.1; extra == 'dbt'
|
|
82
83
|
Requires-Dist: dbt-semantic-interfaces==0.5.1; extra == 'dbt'
|
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
recurvedata/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
2
|
-
recurvedata/__version__.py,sha256=
|
|
2
|
+
recurvedata/__version__.py,sha256=Y8Rd0ubG5uL_xudTnGRSLi5Dxc73nGvHR1VMlDgyh0Y,24
|
|
3
3
|
recurvedata/config.py,sha256=rbpccM6qr8ekdEC5p7XtsivayxmL64-Nb9ogrcWMgX8,3848
|
|
4
4
|
recurvedata/consts.py,sha256=y5BuAHBrz1jAcS5NgZxnrkfomQv3_5hvgafYwpLKpV8,1224
|
|
5
5
|
recurvedata/error_codes.py,sha256=y4OLrs0_2iLWdvQJEV10m-414uPkUdm4v0D7bE8iWOM,2303
|
|
@@ -7,15 +7,15 @@ recurvedata/exceptions.py,sha256=-Jtm1MXk06ViNOP176MRELFOujjYkZI_IkZY4hzwhRo,187
|
|
|
7
7
|
recurvedata/provider_manager.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
8
8
|
recurvedata/schema.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
9
9
|
recurvedata/client/__init__.py,sha256=FnX9HH-2dXADluNfucg98JPMfruMoBpN9ER9lZkVQvQ,49
|
|
10
|
-
recurvedata/client/client.py,sha256=
|
|
10
|
+
recurvedata/client/client.py,sha256=Z3JccGzQbRoWr0qnfvUam2_7zxOnyFSn1wV5A5F6jUI,5613
|
|
11
11
|
recurvedata/client/server_client.py,sha256=bZ55S_tk_fI3JDLU3txha2HKbS4hKUG6jLehj3HnQc0,3033
|
|
12
12
|
recurvedata/connectors/__init__.py,sha256=1VpGyGu9FA7lAvKZv0Z8j9ZzSi4i-L3_PyLcxdbrfs4,577
|
|
13
13
|
recurvedata/connectors/_register.py,sha256=7NYVIJk9PufhTJWyj7JkPt9p66Cc1ieCBPpFi24cMwo,1639
|
|
14
14
|
recurvedata/connectors/base.py,sha256=tuCUq2hij8GknWUKs1sP8HC_JZQie2csxoy3s-7Bb_4,3714
|
|
15
|
-
recurvedata/connectors/config_schema.py,sha256=
|
|
15
|
+
recurvedata/connectors/config_schema.py,sha256=fcTSyeVRA3tKvhD_kAvjk9lKo9TZlbDYRrIw9DKLkfk,62854
|
|
16
16
|
recurvedata/connectors/const.py,sha256=3Zl4wZ0AP6t9x63QoRM6GMG6ZDgTQW-JacOv7lJBcwI,10153
|
|
17
17
|
recurvedata/connectors/datasource.py,sha256=w09SskSeNJjNc7qUEp0N3UV-YnMX2flzTutg1BaLqO0,5573
|
|
18
|
-
recurvedata/connectors/dbapi.py,sha256=
|
|
18
|
+
recurvedata/connectors/dbapi.py,sha256=I9KlaOdWwDrOtZeDAZdBU1MxmkARnLCkZDnY_Vm9llI,14293
|
|
19
19
|
recurvedata/connectors/fs.py,sha256=ZZlDq4Bd7aA09ZarZwyWrzFEwUgG5GHm8JPctHw7Ako,1460
|
|
20
20
|
recurvedata/connectors/ftp.py,sha256=9H_xON87ESpatU5e8kCwwbYwLRnIH1ClqazIbDe-YfU,1091
|
|
21
21
|
recurvedata/connectors/object_store.py,sha256=Fd3jGnUQ79sNuC8L-0nSDxwHh32emU0RuIqTAOZx4IM,1714
|
|
@@ -60,6 +60,7 @@ recurvedata/connectors/connectors/spark.py,sha256=zaTrsA0fgIHLLR3LZ_D9pd3KIccZBe
|
|
|
60
60
|
recurvedata/connectors/connectors/starrocks.py,sha256=IdrlcKz0vUASokdR4QVjZqCXoVZ_dFBEdJUnDlMaScQ,6028
|
|
61
61
|
recurvedata/connectors/connectors/tencent_cos.py,sha256=1f_31aNW8lznuygE_N7tQbK9PToGIRUkFnmhHSRmq54,1392
|
|
62
62
|
recurvedata/connectors/connectors/tidb.py,sha256=2VkZ8x6fhmbcGRc7ekTwOADGgWiRFVHr4tnzuQVfqDU,1924
|
|
63
|
+
recurvedata/connectors/connectors/wecom.py,sha256=1n6DZeaSEEWDiLwuMWIxx1QYpOPJPKQUACUtGcgEvUQ,2134
|
|
63
64
|
recurvedata/core/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
64
65
|
recurvedata/core/config.py,sha256=_wmsWpPNgZE-mUSn1u3C7m4Zf5j2XEhZLhtUY5llbYo,1436
|
|
65
66
|
recurvedata/core/configurable.py,sha256=RekN9mY46Sb_IPLW5aciNg0xF8E47CtMbHPOsar3tfU,719
|
|
@@ -256,7 +257,7 @@ recurvedata/providers/flywheel/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5
|
|
|
256
257
|
recurvedata/providers/mysql/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
257
258
|
recurvedata/schedulers/__init__.py,sha256=euiId3-BC5JpPmXb2gSzXBycfLMCJyvjn3c5D2ZqK8U,21
|
|
258
259
|
recurvedata/schedulers/airflow.py,sha256=lX3nc-m2vrwuNuSABL560oaMR6t9G5mwhrKCExmrQDw,36198
|
|
259
|
-
recurvedata/schedulers/airflow_db_process.py,sha256=
|
|
260
|
+
recurvedata/schedulers/airflow_db_process.py,sha256=9Xm6SN6PFrrWMES5UHnsutCVNRg2Yq_BzhVjlFFAy0E,13968
|
|
260
261
|
recurvedata/schedulers/airflow_operators.py,sha256=-AfLwT9Oi8_JDCclc2XhspZ13cbTcu6ftj6yBw2-AWw,2011
|
|
261
262
|
recurvedata/schedulers/airflow_plugin.py,sha256=aUE1YxXJuMR4hL2hJ1XailBw3wA-V_ljYBVQz39MvYE,242
|
|
262
263
|
recurvedata/schedulers/airflow_trigger_dag_patch.py,sha256=bUVYRYDozgYeLcpQEmLZsNKoFbVS2PvuSwnpbIyTS0U,4300
|
|
@@ -327,7 +328,7 @@ recurvedata/utils/singleton.py,sha256=15PaK2nP9H5PyO26IZzQPpfzlW5h_Bp1NHA6QPb4H0
|
|
|
327
328
|
recurvedata/utils/sql.py,sha256=u3XRPv8_vsrMFMm-O1xyV63ZXChAFVHmJj2_xbRwcNg,264
|
|
328
329
|
recurvedata/utils/timeout.py,sha256=U5ssSgoyVRqop9P8vmyI3BJI-OnMH2k22PdzTh-JN4c,780
|
|
329
330
|
recurvedata/utils/tracing.py,sha256=gpK8q00ZjZmI81YpgQtDBPLzBvVSYpPA0sIq4wqnvBc,472
|
|
330
|
-
recurvedata_lib-0.1.
|
|
331
|
-
recurvedata_lib-0.1.
|
|
332
|
-
recurvedata_lib-0.1.
|
|
333
|
-
recurvedata_lib-0.1.
|
|
331
|
+
recurvedata_lib-0.1.496.dist-info/METADATA,sha256=BiSQLgBoteUjIHtU3IBWdDhKF_rr8CkQ7cB5rqu7avM,27801
|
|
332
|
+
recurvedata_lib-0.1.496.dist-info/WHEEL,sha256=tkmg4JIqwd9H8mL30xA7crRmoStyCtGp0VWshokd1Jc,105
|
|
333
|
+
recurvedata_lib-0.1.496.dist-info/entry_points.txt,sha256=4KBBIfooz3wqXBoLlidRRP4_r36JUCnIF4BFn4igtms,209
|
|
334
|
+
recurvedata_lib-0.1.496.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|