feldera 0.32.0__py3-none-any.whl → 0.34.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of feldera might be problematic. Click here for more details.
- feldera/enums.py +42 -0
- feldera/pipeline.py +404 -31
- feldera/pipeline_builder.py +2 -2
- feldera/rest/feldera_client.py +103 -34
- feldera/rest/pipeline.py +1 -1
- {feldera-0.32.0.dist-info → feldera-0.34.0.dist-info}/METADATA +2 -2
- {feldera-0.32.0.dist-info → feldera-0.34.0.dist-info}/RECORD +9 -9
- {feldera-0.32.0.dist-info → feldera-0.34.0.dist-info}/WHEEL +1 -1
- {feldera-0.32.0.dist-info → feldera-0.34.0.dist-info}/top_level.txt +0 -0
feldera/enums.py
CHANGED
|
@@ -1,4 +1,5 @@
|
|
|
1
1
|
from enum import Enum
|
|
2
|
+
from typing import Optional
|
|
2
3
|
|
|
3
4
|
|
|
4
5
|
class CompilationProfile(Enum):
|
|
@@ -190,3 +191,44 @@ class PipelineStatus(Enum):
|
|
|
190
191
|
|
|
191
192
|
def __eq__(self, other):
|
|
192
193
|
return self.value == other.value
|
|
194
|
+
|
|
195
|
+
|
|
196
|
+
class ProgramStatus(Enum):
|
|
197
|
+
Pending = 1
|
|
198
|
+
CompilingSql = 2
|
|
199
|
+
SqlCompiled = 3
|
|
200
|
+
CompilingRust = 4
|
|
201
|
+
Success = 5
|
|
202
|
+
SqlError = 6
|
|
203
|
+
RustError = 7
|
|
204
|
+
SystemError = 8
|
|
205
|
+
|
|
206
|
+
def __init__(self, value):
|
|
207
|
+
self.error: Optional[dict] = None
|
|
208
|
+
self._value_ = value
|
|
209
|
+
|
|
210
|
+
@staticmethod
|
|
211
|
+
def from_value(value):
|
|
212
|
+
error = None
|
|
213
|
+
if isinstance(value, dict):
|
|
214
|
+
error = value
|
|
215
|
+
value = list(value.keys())[0]
|
|
216
|
+
|
|
217
|
+
for member in ProgramStatus:
|
|
218
|
+
if member.name.lower() == value.lower():
|
|
219
|
+
member.error = error
|
|
220
|
+
return member
|
|
221
|
+
raise ValueError(f"Unknown value '{value}' for enum {ProgramStatus.__name__}")
|
|
222
|
+
|
|
223
|
+
def __eq__(self, other):
|
|
224
|
+
return self.value == other.value
|
|
225
|
+
|
|
226
|
+
def __str__(self):
|
|
227
|
+
return self.name + (f": ({self.error})" if self.error else "")
|
|
228
|
+
|
|
229
|
+
def get_error(self) -> Optional[dict]:
|
|
230
|
+
"""
|
|
231
|
+
Returns the compilation error, if any.
|
|
232
|
+
"""
|
|
233
|
+
|
|
234
|
+
return self.error
|
feldera/pipeline.py
CHANGED
|
@@ -1,4 +1,7 @@
|
|
|
1
|
+
import logging
|
|
1
2
|
import time
|
|
3
|
+
from datetime import datetime
|
|
4
|
+
|
|
2
5
|
import pandas
|
|
3
6
|
|
|
4
7
|
from typing import List, Dict, Callable, Optional, Generator, Mapping, Any
|
|
@@ -6,21 +9,28 @@ from collections import deque
|
|
|
6
9
|
from queue import Queue
|
|
7
10
|
|
|
8
11
|
from feldera.rest.errors import FelderaAPIError
|
|
9
|
-
from feldera.enums import PipelineStatus
|
|
12
|
+
from feldera.enums import PipelineStatus, ProgramStatus
|
|
10
13
|
from feldera.rest.pipeline import Pipeline as InnerPipeline
|
|
11
14
|
from feldera.rest.feldera_client import FelderaClient
|
|
12
15
|
from feldera._callback_runner import _CallbackRunnerInstruction, CallbackRunner
|
|
13
16
|
from feldera.output_handler import OutputHandler
|
|
14
17
|
from feldera._helpers import ensure_dataframe_has_columns, chunk_dataframe
|
|
18
|
+
from feldera.rest.sql_table import SQLTable
|
|
19
|
+
from feldera.rest.sql_view import SQLView
|
|
15
20
|
|
|
16
21
|
|
|
17
22
|
class Pipeline:
|
|
18
|
-
def __init__(self,
|
|
19
|
-
self.name = name
|
|
23
|
+
def __init__(self, client: FelderaClient):
|
|
20
24
|
self.client: FelderaClient = client
|
|
21
25
|
self._inner: InnerPipeline | None = None
|
|
22
26
|
self.views_tx: List[Dict[str, Queue]] = []
|
|
23
27
|
|
|
28
|
+
@staticmethod
|
|
29
|
+
def _from_inner(inner: InnerPipeline, client: FelderaClient) -> "Pipeline":
|
|
30
|
+
pipeline = Pipeline(client)
|
|
31
|
+
pipeline._inner = inner
|
|
32
|
+
return pipeline
|
|
33
|
+
|
|
24
34
|
def __setup_output_listeners(self):
|
|
25
35
|
"""
|
|
26
36
|
Internal function used to set up the output listeners.
|
|
@@ -35,15 +45,23 @@ class Pipeline:
|
|
|
35
45
|
# block until the callback runner is ready
|
|
36
46
|
queue.join()
|
|
37
47
|
|
|
48
|
+
def refresh(self):
|
|
49
|
+
"""
|
|
50
|
+
Calls the backend to get the updated, latest version of the pipeline.
|
|
51
|
+
|
|
52
|
+
:raises FelderaConnectionError: If there is an issue connecting to the backend.
|
|
53
|
+
"""
|
|
54
|
+
|
|
55
|
+
self._inner = self.client.get_pipeline(self.name)
|
|
56
|
+
|
|
38
57
|
def status(self) -> PipelineStatus:
|
|
39
58
|
"""
|
|
40
59
|
Return the current status of the pipeline.
|
|
41
60
|
"""
|
|
42
61
|
|
|
43
62
|
try:
|
|
44
|
-
|
|
45
|
-
self._inner
|
|
46
|
-
return PipelineStatus.from_str(inner.deployment_status)
|
|
63
|
+
self.refresh()
|
|
64
|
+
return PipelineStatus.from_str(self._inner.deployment_status)
|
|
47
65
|
|
|
48
66
|
except FelderaAPIError as err:
|
|
49
67
|
if err.status_code == 404:
|
|
@@ -55,9 +73,18 @@ class Pipeline:
|
|
|
55
73
|
"""
|
|
56
74
|
Push all rows in a pandas DataFrame to the pipeline.
|
|
57
75
|
|
|
76
|
+
The pipeline must either be in RUNNING or PAUSED states to push data.
|
|
77
|
+
An error will be raised if the pipeline is in any other state.
|
|
78
|
+
|
|
79
|
+
The dataframe must have the same columns as the table in the pipeline.
|
|
80
|
+
|
|
58
81
|
:param table_name: The name of the table to insert data into.
|
|
59
82
|
:param df: The pandas DataFrame to be pushed to the pipeline.
|
|
60
83
|
:param force: `True` to push data even if the pipeline is paused. `False` by default.
|
|
84
|
+
|
|
85
|
+
:raises ValueError: If the table does not exist in the pipeline.
|
|
86
|
+
:raises RuntimeError: If the pipeline is not in a valid state to push data.
|
|
87
|
+
:raises RuntimeError: If the pipeline is paused and force is not set to `True`.
|
|
61
88
|
"""
|
|
62
89
|
|
|
63
90
|
status = self.status()
|
|
@@ -77,7 +104,7 @@ class Pipeline:
|
|
|
77
104
|
tbl.name.lower() for tbl in pipeline.tables
|
|
78
105
|
]:
|
|
79
106
|
raise ValueError(
|
|
80
|
-
f"Cannot push to table '{table_name}'
|
|
107
|
+
f"Cannot push to table '{table_name}': table with this name does not exist in the '{self.name}' pipeline"
|
|
81
108
|
)
|
|
82
109
|
else:
|
|
83
110
|
# consider validating the schema here
|
|
@@ -104,14 +131,25 @@ class Pipeline:
|
|
|
104
131
|
"""
|
|
105
132
|
Push this JSON data to the specified table of the pipeline.
|
|
106
133
|
|
|
134
|
+
The pipeline must either be in RUNNING or PAUSED states to push data.
|
|
135
|
+
An error will be raised if the pipeline is in any other state.
|
|
136
|
+
|
|
107
137
|
:param table_name: The name of the table to push data into.
|
|
108
138
|
:param data: The JSON encoded data to be pushed to the pipeline. The data should be in the form:
|
|
109
139
|
`{'col1': 'val1', 'col2': 'val2'}` or `[{'col1': 'val1', 'col2': 'val2'}, {'col1': 'val1', 'col2': 'val2'}]`
|
|
110
140
|
:param update_format: The update format of the JSON data to be pushed to the pipeline. Must be one of:
|
|
111
141
|
"raw", "insert_delete". <https://docs.feldera.com/formats/json#the-insertdelete-format>
|
|
112
142
|
:param force: `True` to push data even if the pipeline is paused. `False` by default.
|
|
143
|
+
|
|
144
|
+
:raises ValueError: If the update format is invalid.
|
|
145
|
+
:raises FelderaAPIError: If the pipeline is not in a valid state to push data.
|
|
146
|
+
:raises RuntimeError: If the pipeline is paused and `force` is not set to `True`.
|
|
113
147
|
"""
|
|
114
148
|
|
|
149
|
+
status = self.status()
|
|
150
|
+
if not force and status == PipelineStatus.PAUSED:
|
|
151
|
+
raise RuntimeError("Pipeline is paused, set force=True to push data")
|
|
152
|
+
|
|
115
153
|
if update_format not in ["raw", "insert_delete"]:
|
|
116
154
|
ValueError("update_format must be one of raw or insert_delete")
|
|
117
155
|
|
|
@@ -126,11 +164,54 @@ class Pipeline:
|
|
|
126
164
|
force=force,
|
|
127
165
|
)
|
|
128
166
|
|
|
167
|
+
def pause_connector(self, table_name: str, connector_name: str):
|
|
168
|
+
"""
|
|
169
|
+
Pause the specified input connector.
|
|
170
|
+
|
|
171
|
+
Connectors allow feldera to fetch data from a source or write data to a sink.
|
|
172
|
+
This method allows users to **PAUSE** a specific **INPUT** connector.
|
|
173
|
+
All connectors are RUNNING by default.
|
|
174
|
+
|
|
175
|
+
Refer to the connector documentation for more information:
|
|
176
|
+
<https://docs.feldera.com/connectors/#input-connector-orchestration>
|
|
177
|
+
|
|
178
|
+
:param table_name: The name of the table that the connector is attached to.
|
|
179
|
+
:param connector_name: The name of the connector to pause.
|
|
180
|
+
|
|
181
|
+
:raises FelderaAPIError: If the connector is not found, or if the pipeline is not running.
|
|
182
|
+
"""
|
|
183
|
+
|
|
184
|
+
self.client.pause_connector(self.name, table_name, connector_name)
|
|
185
|
+
|
|
186
|
+
def resume_connector(self, table_name: str, connector_name: str):
|
|
187
|
+
"""
|
|
188
|
+
Resume the specified connector.
|
|
189
|
+
|
|
190
|
+
Connectors allow feldera to fetch data from a source or write data to a sink.
|
|
191
|
+
This method allows users to **RESUME / START** a specific **INPUT** connector.
|
|
192
|
+
All connectors are RUNNING by default.
|
|
193
|
+
|
|
194
|
+
Refer to the connector documentation for more information:
|
|
195
|
+
<https://docs.feldera.com/connectors/#input-connector-orchestration>
|
|
196
|
+
|
|
197
|
+
:param table_name: The name of the table that the connector is attached to.
|
|
198
|
+
:param connector_name: The name of the connector to resume.
|
|
199
|
+
|
|
200
|
+
:raises FelderaAPIError: If the connector is not found, or if the pipeline is not running.
|
|
201
|
+
"""
|
|
202
|
+
|
|
203
|
+
self.client.resume_connector(self.name, table_name, connector_name)
|
|
204
|
+
|
|
129
205
|
def listen(self, view_name: str) -> OutputHandler:
|
|
130
206
|
"""
|
|
131
|
-
|
|
207
|
+
Follow the change stream (i.e., the output) of the provided view.
|
|
208
|
+
Returns an output handler to read the changes.
|
|
209
|
+
|
|
132
210
|
When the pipeline is shutdown, these listeners are dropped.
|
|
133
211
|
|
|
212
|
+
You must call this method before starting the pipeline to get the entire output of the view.
|
|
213
|
+
If this method is called once the pipeline has started, you will only get the output from that point onwards.
|
|
214
|
+
|
|
134
215
|
:param view_name: The name of the view to listen to.
|
|
135
216
|
"""
|
|
136
217
|
|
|
@@ -151,6 +232,9 @@ class Pipeline:
|
|
|
151
232
|
"""
|
|
152
233
|
Run the given callback on each chunk of the output of the specified view.
|
|
153
234
|
|
|
235
|
+
You must call this method before starting the pipeline to operate on the entire output.
|
|
236
|
+
You can call this method after the pipeline has started, but you will only get the output from that point onwards.
|
|
237
|
+
|
|
154
238
|
:param view_name: The name of the view.
|
|
155
239
|
:param callback: The callback to run on each chunk. The callback should take two arguments:
|
|
156
240
|
|
|
@@ -176,7 +260,9 @@ class Pipeline:
|
|
|
176
260
|
handler = CallbackRunner(self.client, self.name, view_name, callback, queue)
|
|
177
261
|
handler.start()
|
|
178
262
|
|
|
179
|
-
def wait_for_completion(
|
|
263
|
+
def wait_for_completion(
|
|
264
|
+
self, shutdown: bool = False, timeout_s: Optional[float] = None
|
|
265
|
+
):
|
|
180
266
|
"""
|
|
181
267
|
Block until the pipeline has completed processing all input records.
|
|
182
268
|
|
|
@@ -191,6 +277,8 @@ class Pipeline:
|
|
|
191
277
|
Kafka, that does not issue the end-of-input notification.
|
|
192
278
|
|
|
193
279
|
:param shutdown: If True, the pipeline will be shutdown after completion. False by default.
|
|
280
|
+
:param timeout_s: Optional. The maximum time (in seconds) to wait for the pipeline to complete.
|
|
281
|
+
The default is None, which means wait indefinitely.
|
|
194
282
|
|
|
195
283
|
:raises RuntimeError: If the pipeline returns unknown metrics.
|
|
196
284
|
"""
|
|
@@ -202,7 +290,19 @@ class Pipeline:
|
|
|
202
290
|
]:
|
|
203
291
|
raise RuntimeError("Pipeline must be running to wait for completion")
|
|
204
292
|
|
|
293
|
+
start_time = time.monotonic()
|
|
294
|
+
|
|
205
295
|
while True:
|
|
296
|
+
if timeout_s is not None:
|
|
297
|
+
elapsed = time.monotonic() - start_time
|
|
298
|
+
if elapsed > timeout_s:
|
|
299
|
+
raise TimeoutError(
|
|
300
|
+
f"timeout ({timeout_s}s) reached while waiting for pipeline '{self.name}' to complete"
|
|
301
|
+
)
|
|
302
|
+
logging.debug(
|
|
303
|
+
f"waiting for pipeline {self.name} to complete: elapsed time {elapsed}s, timeout: {timeout_s}s"
|
|
304
|
+
)
|
|
305
|
+
|
|
206
306
|
metrics: dict = self.client.get_pipeline_stats(self.name).get(
|
|
207
307
|
"global_metrics"
|
|
208
308
|
)
|
|
@@ -221,35 +321,66 @@ class Pipeline:
|
|
|
221
321
|
if shutdown:
|
|
222
322
|
self.shutdown()
|
|
223
323
|
|
|
224
|
-
def
|
|
324
|
+
def __failed_check(self, next):
|
|
325
|
+
"""
|
|
326
|
+
Checks if the pipeline is in FAILED state and raises an error if it is.
|
|
327
|
+
:meta private:
|
|
328
|
+
"""
|
|
329
|
+
status = self.status()
|
|
330
|
+
if status == PipelineStatus.FAILED:
|
|
331
|
+
deployment_error = self.client.get_pipeline(self.name).deployment_error
|
|
332
|
+
error_msg = deployment_error.get("message", "")
|
|
333
|
+
raise RuntimeError(
|
|
334
|
+
f"""Cannot {next} pipeline '{self.name}' in FAILED state.
|
|
335
|
+
The pipeline must be in SHUTDOWN state before it can be started, but it is currently in FAILED state.
|
|
336
|
+
Use `Pipeline.shutdown()` method to shut down the pipeline.
|
|
337
|
+
Error Message:
|
|
338
|
+
{error_msg}"""
|
|
339
|
+
)
|
|
340
|
+
|
|
341
|
+
def start(self, timeout_s: Optional[float] = None):
|
|
225
342
|
"""
|
|
226
343
|
.. _start:
|
|
227
344
|
|
|
228
345
|
Starts this pipeline.
|
|
229
346
|
|
|
230
|
-
|
|
347
|
+
The pipeline must be in SHUTDOWN state to start.
|
|
348
|
+
If the pipeline is in any other state, an error will be raised.
|
|
349
|
+
If the pipeline is in PAUSED state, use `.meth:resume` instead.
|
|
350
|
+
If the pipeline is in FAILED state, it must be shutdown before starting it again.
|
|
351
|
+
|
|
352
|
+
:param timeout_s: The maximum time (in seconds) to wait for the pipeline to start.
|
|
353
|
+
|
|
354
|
+
:raises RuntimeError: If the pipeline is not in SHUTDOWN state.
|
|
231
355
|
"""
|
|
232
356
|
|
|
357
|
+
self.__failed_check("start")
|
|
233
358
|
status = self.status()
|
|
234
359
|
if status != PipelineStatus.SHUTDOWN:
|
|
235
360
|
raise RuntimeError(
|
|
236
|
-
f"pipeline {self.name} in state {str(status.name)}
|
|
237
|
-
|
|
238
|
-
|
|
239
|
-
|
|
361
|
+
f"""Cannot start pipeline '{self.name}' in state '{str(status.name)}'.
|
|
362
|
+
The pipeline must be in SHUTDOWN state before it can be started.
|
|
363
|
+
You can either shut down the pipeline using the `Pipeline.shutdown()` method or use `Pipeline.resume()` to \
|
|
364
|
+
resume a paused pipeline."""
|
|
240
365
|
)
|
|
241
366
|
|
|
242
|
-
self.client.pause_pipeline(
|
|
367
|
+
self.client.pause_pipeline(
|
|
368
|
+
self.name, "Unable to START the pipeline.", timeout_s
|
|
369
|
+
)
|
|
243
370
|
self.__setup_output_listeners()
|
|
244
|
-
self.resume()
|
|
371
|
+
self.resume(timeout_s)
|
|
245
372
|
|
|
246
|
-
def restart(self):
|
|
373
|
+
def restart(self, timeout_s: Optional[float] = None):
|
|
247
374
|
"""
|
|
248
375
|
Restarts the pipeline.
|
|
376
|
+
|
|
377
|
+
This method **SHUTS DOWN** the pipeline regardless of its current state and then starts it again.
|
|
378
|
+
|
|
379
|
+
:param timeout_s: The maximum time (in seconds) to wait for the pipeline to restart.
|
|
249
380
|
"""
|
|
250
381
|
|
|
251
|
-
self.shutdown()
|
|
252
|
-
self.start()
|
|
382
|
+
self.shutdown(timeout_s)
|
|
383
|
+
self.start(timeout_s)
|
|
253
384
|
|
|
254
385
|
def wait_for_idle(
|
|
255
386
|
self,
|
|
@@ -328,16 +459,28 @@ class Pipeline:
|
|
|
328
459
|
raise RuntimeError(f"waiting for idle reached timeout ({timeout_s}s)")
|
|
329
460
|
time.sleep(poll_interval_s)
|
|
330
461
|
|
|
331
|
-
def pause(self):
|
|
462
|
+
def pause(self, timeout_s: Optional[float] = None):
|
|
332
463
|
"""
|
|
333
464
|
Pause the pipeline.
|
|
465
|
+
|
|
466
|
+
The pipeline can only transition to the PAUSED state from the RUNNING state.
|
|
467
|
+
If the pipeline is already paused, it will remain in the PAUSED state.
|
|
468
|
+
|
|
469
|
+
:param timeout_s: The maximum time (in seconds) to wait for the pipeline to pause.
|
|
470
|
+
|
|
471
|
+
:raises FelderaAPIError: If the pipeline is in FAILED state.
|
|
334
472
|
"""
|
|
335
473
|
|
|
336
|
-
self.
|
|
474
|
+
self.__failed_check("pause")
|
|
475
|
+
self.client.pause_pipeline(self.name, timeout_s=timeout_s)
|
|
337
476
|
|
|
338
|
-
def shutdown(self):
|
|
477
|
+
def shutdown(self, timeout_s: Optional[float] = None):
|
|
339
478
|
"""
|
|
340
479
|
Shut down the pipeline.
|
|
480
|
+
|
|
481
|
+
Shuts down the pipeline regardless of its current state.
|
|
482
|
+
|
|
483
|
+
:param timeout_s: The maximum time (in seconds) to wait for the pipeline to shut down.
|
|
341
484
|
"""
|
|
342
485
|
|
|
343
486
|
if len(self.views_tx) > 0:
|
|
@@ -347,18 +490,27 @@ class Pipeline:
|
|
|
347
490
|
# block until the callback runner has been stopped
|
|
348
491
|
queue.join()
|
|
349
492
|
|
|
350
|
-
self.client.shutdown_pipeline(self.name)
|
|
493
|
+
self.client.shutdown_pipeline(self.name, timeout_s=timeout_s)
|
|
351
494
|
|
|
352
|
-
def resume(self):
|
|
495
|
+
def resume(self, timeout_s: Optional[float] = None):
|
|
353
496
|
"""
|
|
354
|
-
Resumes the pipeline.
|
|
497
|
+
Resumes the pipeline from the PAUSED state. If the pipeline is already running, it will remain in the RUNNING state.
|
|
498
|
+
|
|
499
|
+
:param timeout_s: The maximum time (in seconds) to wait for the pipeline to shut down.
|
|
500
|
+
|
|
501
|
+
:raises FelderaAPIError: If the pipeline is in FAILED state.
|
|
355
502
|
"""
|
|
356
503
|
|
|
357
|
-
self.
|
|
504
|
+
self.__failed_check("resume")
|
|
505
|
+
self.client.start_pipeline(self.name, timeout_s=timeout_s)
|
|
358
506
|
|
|
359
507
|
def delete(self):
|
|
360
508
|
"""
|
|
361
509
|
Deletes the pipeline.
|
|
510
|
+
|
|
511
|
+
The pipeline must be shutdown before it can be deleted.
|
|
512
|
+
|
|
513
|
+
:raises FelderaAPIError: If the pipeline is not in SHUTDOWN state.
|
|
362
514
|
"""
|
|
363
515
|
|
|
364
516
|
self.client.delete_pipeline(self.name)
|
|
@@ -374,23 +526,39 @@ class Pipeline:
|
|
|
374
526
|
|
|
375
527
|
try:
|
|
376
528
|
inner = client.get_pipeline(name)
|
|
377
|
-
|
|
378
|
-
pipeline.__inner = inner
|
|
379
|
-
return pipeline
|
|
529
|
+
return Pipeline._from_inner(inner, client)
|
|
380
530
|
except FelderaAPIError as err:
|
|
381
531
|
if err.status_code == 404:
|
|
382
532
|
raise RuntimeError(f"Pipeline with name {name} not found")
|
|
383
533
|
|
|
534
|
+
def checkpoint(self):
|
|
535
|
+
"""
|
|
536
|
+
Checkpoints this pipeline, if fault-tolerance is enabled.
|
|
537
|
+
Fault Tolerance in Feldera: <https://docs.feldera.com/fault-tolerance/>
|
|
538
|
+
|
|
539
|
+
:raises FelderaAPIError: If checkpointing is not enabled.
|
|
540
|
+
"""
|
|
541
|
+
|
|
542
|
+
self.client.checkpoint_pipeline(self.name)
|
|
543
|
+
|
|
384
544
|
def query(self, query: str) -> Generator[Mapping[str, Any], None, None]:
|
|
385
545
|
"""
|
|
386
|
-
Executes an ad-hoc SQL query on this pipeline and returns the
|
|
546
|
+
Executes an ad-hoc SQL query on this pipeline and returns a generator that yields the rows of the result as Python dictionaries.
|
|
387
547
|
For ``INSERT`` and ``DELETE`` queries, consider using :meth:`.execute` instead.
|
|
548
|
+
All floating-point numbers are deserialized as Decimal objects to avoid precision loss.
|
|
549
|
+
|
|
550
|
+
Note:
|
|
551
|
+
You can only ``SELECT`` from materialized tables and views.
|
|
388
552
|
|
|
389
553
|
Important:
|
|
390
554
|
This method is lazy. It returns a generator and is not evaluated until you consume the result.
|
|
391
555
|
|
|
392
556
|
:param query: The SQL query to be executed.
|
|
393
557
|
:return: A generator that yields the rows of the result as Python dictionaries.
|
|
558
|
+
|
|
559
|
+
:raises FelderaAPIError: If the pipeline is not in a RUNNING or PAUSED state.
|
|
560
|
+
:raises FelderaAPIError: If querying a non materialized table or view.
|
|
561
|
+
:raises FelderaAPIError: If the query is invalid.
|
|
394
562
|
"""
|
|
395
563
|
|
|
396
564
|
return self.client.query_as_json(self.name, query)
|
|
@@ -400,8 +568,15 @@ class Pipeline:
|
|
|
400
568
|
Executes an ad-hoc SQL query on this pipeline and saves the result to the specified path as a parquet file.
|
|
401
569
|
If the extension isn't `parquet`, it will be automatically appended to `path`.
|
|
402
570
|
|
|
571
|
+
Note:
|
|
572
|
+
You can only ``SELECT`` from materialized tables and views.
|
|
573
|
+
|
|
403
574
|
:param query: The SQL query to be executed.
|
|
404
575
|
:param path: The path of the parquet file.
|
|
576
|
+
|
|
577
|
+
:raises FelderaAPIError: If the pipeline is not in a RUNNING or PAUSED state.
|
|
578
|
+
:raises FelderaAPIError: If querying a non materialized table or view.
|
|
579
|
+
:raises FelderaAPIError: If the query is invalid.
|
|
405
580
|
"""
|
|
406
581
|
|
|
407
582
|
self.client.query_as_parquet(self.name, query, path)
|
|
@@ -410,11 +585,18 @@ class Pipeline:
|
|
|
410
585
|
"""
|
|
411
586
|
Executes a SQL query on this pipeline and returns the result as a formatted string.
|
|
412
587
|
|
|
588
|
+
Note:
|
|
589
|
+
You can only ``SELECT`` from materialized tables and views.
|
|
590
|
+
|
|
413
591
|
Important:
|
|
414
592
|
This method is lazy. It returns a generator and is not evaluated until you consume the result.
|
|
415
593
|
|
|
416
594
|
:param query: The SQL query to be executed.
|
|
417
595
|
:return: A generator that yields a string representing the query result in a human-readable, tabular format.
|
|
596
|
+
|
|
597
|
+
:raises FelderaAPIError: If the pipeline is not in a RUNNING or PAUSED state.
|
|
598
|
+
:raises FelderaAPIError: If querying a non materialized table or view.
|
|
599
|
+
:raises FelderaAPIError: If the query is invalid.
|
|
418
600
|
"""
|
|
419
601
|
|
|
420
602
|
return self.client.query_as_text(self.name, query)
|
|
@@ -429,8 +611,199 @@ class Pipeline:
|
|
|
429
611
|
confirmation of successful query execution, but does not require the query result.
|
|
430
612
|
If the query fails, an exception will be raised.
|
|
431
613
|
|
|
614
|
+
Important:
|
|
615
|
+
If you try to ``INSERT`` or ``DELETE`` data from a table while the pipeline is paused,
|
|
616
|
+
it will block until the pipeline is resumed.
|
|
617
|
+
|
|
432
618
|
:param query: The SQL query to be executed.
|
|
619
|
+
|
|
620
|
+
:raises FelderaAPIError: If the pipeline is not in a RUNNING state.
|
|
621
|
+
:raises FelderaAPIError: If the query is invalid.
|
|
433
622
|
"""
|
|
434
623
|
|
|
435
624
|
gen = self.query_tabular(query)
|
|
436
625
|
deque(gen, maxlen=0)
|
|
626
|
+
|
|
627
|
+
@property
|
|
628
|
+
def name(self) -> str:
|
|
629
|
+
"""
|
|
630
|
+
Return the name of the pipeline.
|
|
631
|
+
"""
|
|
632
|
+
|
|
633
|
+
return self._inner.name
|
|
634
|
+
|
|
635
|
+
def program_code(self) -> str:
|
|
636
|
+
"""
|
|
637
|
+
Return the program SQL code of the pipeline.
|
|
638
|
+
"""
|
|
639
|
+
|
|
640
|
+
self.refresh()
|
|
641
|
+
return self._inner.program_code
|
|
642
|
+
|
|
643
|
+
def program_status(self) -> ProgramStatus:
|
|
644
|
+
"""
|
|
645
|
+
Return the program status of the pipeline.
|
|
646
|
+
|
|
647
|
+
Program status is the status of compilation of this SQL program.
|
|
648
|
+
We first compile the SQL program to Rust code, and then compile the Rust code to a binary.
|
|
649
|
+
"""
|
|
650
|
+
|
|
651
|
+
self.refresh()
|
|
652
|
+
return ProgramStatus.from_value(self._inner.program_status)
|
|
653
|
+
|
|
654
|
+
def program_status_since(self) -> datetime:
|
|
655
|
+
"""
|
|
656
|
+
Return the timestamp when the current program status was set.
|
|
657
|
+
"""
|
|
658
|
+
|
|
659
|
+
self.refresh()
|
|
660
|
+
return datetime.fromisoformat(self._inner.program_status_since)
|
|
661
|
+
|
|
662
|
+
def udf_rust(self) -> str:
|
|
663
|
+
"""
|
|
664
|
+
Return the Rust code for UDFs.
|
|
665
|
+
"""
|
|
666
|
+
|
|
667
|
+
self.refresh()
|
|
668
|
+
return self._inner.udf_rust
|
|
669
|
+
|
|
670
|
+
def udf_toml(self) -> str:
|
|
671
|
+
"""
|
|
672
|
+
Return the Rust dependencies required by UDFs (in the TOML format).
|
|
673
|
+
"""
|
|
674
|
+
|
|
675
|
+
self.refresh()
|
|
676
|
+
return self._inner.udf_toml
|
|
677
|
+
|
|
678
|
+
def program_config(self) -> Mapping[str, Any]:
|
|
679
|
+
"""
|
|
680
|
+
Return the program config of the pipeline.
|
|
681
|
+
"""
|
|
682
|
+
|
|
683
|
+
self.refresh()
|
|
684
|
+
return self._inner.program_config
|
|
685
|
+
|
|
686
|
+
def runtime_config(self) -> Mapping[str, Any]:
|
|
687
|
+
"""
|
|
688
|
+
Return the runtime config of the pipeline.
|
|
689
|
+
"""
|
|
690
|
+
|
|
691
|
+
self.refresh()
|
|
692
|
+
return self._inner.runtime_config
|
|
693
|
+
|
|
694
|
+
def id(self) -> str:
|
|
695
|
+
"""
|
|
696
|
+
Return the ID of the pipeline.
|
|
697
|
+
"""
|
|
698
|
+
|
|
699
|
+
self.refresh()
|
|
700
|
+
return self._inner.id
|
|
701
|
+
|
|
702
|
+
def description(self) -> str:
|
|
703
|
+
"""
|
|
704
|
+
Return the description of the pipeline.
|
|
705
|
+
"""
|
|
706
|
+
|
|
707
|
+
self.refresh()
|
|
708
|
+
return self._inner.description
|
|
709
|
+
|
|
710
|
+
def tables(self) -> List[SQLTable]:
|
|
711
|
+
"""
|
|
712
|
+
Return the tables of the pipeline.
|
|
713
|
+
"""
|
|
714
|
+
|
|
715
|
+
self.refresh()
|
|
716
|
+
return self._inner.tables
|
|
717
|
+
|
|
718
|
+
def views(self) -> List[SQLView]:
|
|
719
|
+
"""
|
|
720
|
+
Return the views of the pipeline.
|
|
721
|
+
"""
|
|
722
|
+
|
|
723
|
+
self.refresh()
|
|
724
|
+
return self._inner.views
|
|
725
|
+
|
|
726
|
+
def created_at(self) -> datetime:
|
|
727
|
+
"""
|
|
728
|
+
Return the creation time of the pipeline.
|
|
729
|
+
"""
|
|
730
|
+
|
|
731
|
+
self.refresh()
|
|
732
|
+
return datetime.fromisoformat(self._inner.created_at)
|
|
733
|
+
|
|
734
|
+
def version(self) -> int:
|
|
735
|
+
"""
|
|
736
|
+
Return the version of the pipeline.
|
|
737
|
+
"""
|
|
738
|
+
|
|
739
|
+
self.refresh()
|
|
740
|
+
return self._inner.version
|
|
741
|
+
|
|
742
|
+
def program_version(self) -> int:
|
|
743
|
+
"""
|
|
744
|
+
Return the program version of the pipeline.
|
|
745
|
+
"""
|
|
746
|
+
|
|
747
|
+
self.refresh()
|
|
748
|
+
return self._inner.program_version
|
|
749
|
+
|
|
750
|
+
def deployment_status_since(self) -> datetime:
|
|
751
|
+
"""
|
|
752
|
+
Return the timestamp when the current deployment status of the pipeline was set.
|
|
753
|
+
"""
|
|
754
|
+
|
|
755
|
+
self.refresh()
|
|
756
|
+
return datetime.fromisoformat(self._inner.deployment_status_since)
|
|
757
|
+
|
|
758
|
+
def deployment_config(self) -> Mapping[str, Any]:
|
|
759
|
+
"""
|
|
760
|
+
Return the deployment config of the pipeline.
|
|
761
|
+
"""
|
|
762
|
+
|
|
763
|
+
self.refresh()
|
|
764
|
+
return self._inner.deployment_config
|
|
765
|
+
|
|
766
|
+
def deployment_desired_status(self) -> PipelineStatus:
|
|
767
|
+
"""
|
|
768
|
+
Return the desired deployment status of the pipeline.
|
|
769
|
+
This is the next state that the pipeline should transition to.
|
|
770
|
+
"""
|
|
771
|
+
|
|
772
|
+
self.refresh()
|
|
773
|
+
return PipelineStatus.from_str(self._inner.deployment_desired_status)
|
|
774
|
+
|
|
775
|
+
def deployment_error(self) -> Mapping[str, Any]:
|
|
776
|
+
"""
|
|
777
|
+
Return the deployment error of the pipeline.
|
|
778
|
+
Returns an empty string if there is no error.
|
|
779
|
+
"""
|
|
780
|
+
|
|
781
|
+
self.refresh()
|
|
782
|
+
return self._inner.deployment_error
|
|
783
|
+
|
|
784
|
+
def deployment_location(self) -> str:
|
|
785
|
+
"""
|
|
786
|
+
Return the deployment location of the pipeline.
|
|
787
|
+
Deployment location is the location where the pipeline can be reached at runtime (a TCP port number or a URI).
|
|
788
|
+
"""
|
|
789
|
+
|
|
790
|
+
self.refresh()
|
|
791
|
+
return self._inner.deployment_location
|
|
792
|
+
|
|
793
|
+
def program_binary_url(self) -> str:
|
|
794
|
+
"""
|
|
795
|
+
Return the program binary URL of the pipeline.
|
|
796
|
+
This is the URL where the compiled program binary can be downloaded from.
|
|
797
|
+
"""
|
|
798
|
+
|
|
799
|
+
self.refresh()
|
|
800
|
+
return self._inner.program_binary_url
|
|
801
|
+
|
|
802
|
+
def program_info(self) -> Mapping[str, Any]:
|
|
803
|
+
"""
|
|
804
|
+
Return the program info of the pipeline.
|
|
805
|
+
This is the output returned by the SQL compiler, including: the list of input and output connectors, the generated Rust code for the pipeline, and the SQL program schema.
|
|
806
|
+
"""
|
|
807
|
+
|
|
808
|
+
self.refresh()
|
|
809
|
+
return self._inner.program_info
|
feldera/pipeline_builder.py
CHANGED
|
@@ -66,7 +66,7 @@ class PipelineBuilder:
|
|
|
66
66
|
)
|
|
67
67
|
|
|
68
68
|
inner = self.client.create_pipeline(inner)
|
|
69
|
-
pipeline = Pipeline(
|
|
69
|
+
pipeline = Pipeline(self.client)
|
|
70
70
|
pipeline._inner = inner
|
|
71
71
|
|
|
72
72
|
return pipeline
|
|
@@ -103,7 +103,7 @@ class PipelineBuilder:
|
|
|
103
103
|
)
|
|
104
104
|
|
|
105
105
|
inner = self.client.create_or_update_pipeline(inner)
|
|
106
|
-
pipeline = Pipeline(
|
|
106
|
+
pipeline = Pipeline(self.client)
|
|
107
107
|
pipeline._inner = inner
|
|
108
108
|
|
|
109
109
|
return pipeline
|
feldera/rest/feldera_client.py
CHANGED
|
@@ -7,6 +7,7 @@ from decimal import Decimal
|
|
|
7
7
|
from typing import Generator
|
|
8
8
|
|
|
9
9
|
from feldera.rest.config import Config
|
|
10
|
+
from feldera.rest.errors import FelderaTimeoutError
|
|
10
11
|
from feldera.rest.pipeline import Pipeline
|
|
11
12
|
from feldera.rest._httprequests import HttpRequests
|
|
12
13
|
|
|
@@ -27,7 +28,7 @@ class FelderaClient:
|
|
|
27
28
|
self,
|
|
28
29
|
url: str,
|
|
29
30
|
api_key: Optional[str] = None,
|
|
30
|
-
timeout: Optional[
|
|
31
|
+
timeout: Optional[float] = None,
|
|
31
32
|
) -> None:
|
|
32
33
|
"""
|
|
33
34
|
:param url: The url to Feldera API (ex: https://try.feldera.com)
|
|
@@ -36,7 +37,7 @@ class FelderaClient:
|
|
|
36
37
|
out.
|
|
37
38
|
"""
|
|
38
39
|
|
|
39
|
-
self.config = Config(url, api_key, timeout)
|
|
40
|
+
self.config = Config(url, api_key, timeout=timeout)
|
|
40
41
|
self.http = HttpRequests(self.config)
|
|
41
42
|
|
|
42
43
|
try:
|
|
@@ -45,6 +46,14 @@ class FelderaClient:
|
|
|
45
46
|
logging.error(f"Failed to connect to Feldera API: {e}")
|
|
46
47
|
raise e
|
|
47
48
|
|
|
49
|
+
@staticmethod
|
|
50
|
+
def localhost(port: int = 8080) -> "FelderaClient":
|
|
51
|
+
"""
|
|
52
|
+
Create a FelderaClient that connects to the local Feldera instance
|
|
53
|
+
"""
|
|
54
|
+
|
|
55
|
+
return FelderaClient(f"http://localhost:{port}")
|
|
56
|
+
|
|
48
57
|
def get_pipeline(self, pipeline_name) -> Pipeline:
|
|
49
58
|
"""
|
|
50
59
|
Get a pipeline by name
|
|
@@ -188,18 +197,30 @@ class FelderaClient:
|
|
|
188
197
|
|
|
189
198
|
return resp
|
|
190
199
|
|
|
191
|
-
def start_pipeline(self, pipeline_name: str):
|
|
200
|
+
def start_pipeline(self, pipeline_name: str, timeout_s: Optional[float] = 300):
|
|
192
201
|
"""
|
|
193
|
-
Start a pipeline
|
|
194
202
|
|
|
195
203
|
:param pipeline_name: The name of the pipeline to start
|
|
204
|
+
:param timeout_s: The amount of time in seconds to wait for the pipeline to start. 300 seconds by default.
|
|
196
205
|
"""
|
|
197
206
|
|
|
207
|
+
if timeout_s is None:
|
|
208
|
+
timeout_s = 300
|
|
209
|
+
|
|
198
210
|
self.http.post(
|
|
199
211
|
path=f"/pipelines/{pipeline_name}/start",
|
|
200
212
|
)
|
|
201
213
|
|
|
214
|
+
start_time = time.monotonic()
|
|
215
|
+
|
|
202
216
|
while True:
|
|
217
|
+
if timeout_s is not None:
|
|
218
|
+
elapsed = time.monotonic() - start_time
|
|
219
|
+
if elapsed > timeout_s:
|
|
220
|
+
raise TimeoutError(
|
|
221
|
+
f"Timed out waiting for pipeline {pipeline_name} to start"
|
|
222
|
+
)
|
|
223
|
+
|
|
203
224
|
resp = self.get_pipeline(pipeline_name)
|
|
204
225
|
status = resp.deployment_status
|
|
205
226
|
|
|
@@ -217,13 +238,23 @@ Reason: The pipeline is in a FAILED state due to the following error:
|
|
|
217
238
|
)
|
|
218
239
|
time.sleep(0.1)
|
|
219
240
|
|
|
220
|
-
def pause_pipeline(
|
|
241
|
+
def pause_pipeline(
|
|
242
|
+
self,
|
|
243
|
+
pipeline_name: str,
|
|
244
|
+
error_message: str = None,
|
|
245
|
+
timeout_s: Optional[float] = 300,
|
|
246
|
+
):
|
|
221
247
|
"""
|
|
222
248
|
Stop a pipeline
|
|
223
249
|
|
|
224
250
|
:param pipeline_name: The name of the pipeline to stop
|
|
225
251
|
:param error_message: The error message to show if the pipeline is in FAILED state
|
|
252
|
+
:param timeout_s: The amount of time in seconds to wait for the pipeline to pause. 300 seconds by default.
|
|
226
253
|
"""
|
|
254
|
+
|
|
255
|
+
if timeout_s is None:
|
|
256
|
+
timeout_s = 300
|
|
257
|
+
|
|
227
258
|
self.http.post(
|
|
228
259
|
path=f"/pipelines/{pipeline_name}/pause",
|
|
229
260
|
)
|
|
@@ -231,7 +262,16 @@ Reason: The pipeline is in a FAILED state due to the following error:
|
|
|
231
262
|
if error_message is None:
|
|
232
263
|
error_message = "Unable to PAUSE the pipeline.\n"
|
|
233
264
|
|
|
265
|
+
start_time = time.monotonic()
|
|
266
|
+
|
|
234
267
|
while True:
|
|
268
|
+
if timeout_s is not None:
|
|
269
|
+
elapsed = time.monotonic() - start_time
|
|
270
|
+
if elapsed > timeout_s:
|
|
271
|
+
raise TimeoutError(
|
|
272
|
+
f"Timed out waiting for pipeline {pipeline_name} to pause"
|
|
273
|
+
)
|
|
274
|
+
|
|
235
275
|
resp = self.get_pipeline(pipeline_name)
|
|
236
276
|
status = resp.deployment_status
|
|
237
277
|
|
|
@@ -249,44 +289,24 @@ Reason: The pipeline is in a FAILED state due to the following error:
|
|
|
249
289
|
)
|
|
250
290
|
time.sleep(0.1)
|
|
251
291
|
|
|
252
|
-
def shutdown_pipeline(self, pipeline_name: str):
|
|
292
|
+
def shutdown_pipeline(self, pipeline_name: str, timeout_s: Optional[float] = 300):
|
|
253
293
|
"""
|
|
254
294
|
Shutdown a pipeline
|
|
255
295
|
|
|
256
296
|
:param pipeline_name: The name of the pipeline to shut down
|
|
297
|
+
:param timeout_s: The amount of time in seconds to wait for the pipeline to shut down. Default is 15 seconds.
|
|
257
298
|
"""
|
|
258
299
|
|
|
259
|
-
|
|
260
|
-
|
|
261
|
-
)
|
|
262
|
-
|
|
263
|
-
start = time.time()
|
|
264
|
-
timeout = 15
|
|
265
|
-
|
|
266
|
-
while time.time() - start < timeout:
|
|
267
|
-
status = self.get_pipeline(pipeline_name).deployment_status
|
|
300
|
+
if timeout_s is None:
|
|
301
|
+
timeout_s = 300
|
|
268
302
|
|
|
269
|
-
if status == "Shutdown":
|
|
270
|
-
return
|
|
271
|
-
|
|
272
|
-
logging.debug(
|
|
273
|
-
"still shutting down %s, waiting for 100 more milliseconds",
|
|
274
|
-
pipeline_name,
|
|
275
|
-
)
|
|
276
|
-
time.sleep(0.1)
|
|
277
|
-
|
|
278
|
-
# retry sending shutdown request as the pipline hasn't shutdown yet
|
|
279
|
-
logging.debug(
|
|
280
|
-
"pipeline %s hasn't shutdown after %s s, retrying", pipeline_name, timeout
|
|
281
|
-
)
|
|
282
303
|
self.http.post(
|
|
283
304
|
path=f"/pipelines/{pipeline_name}/shutdown",
|
|
284
305
|
)
|
|
285
306
|
|
|
286
|
-
start = time.
|
|
287
|
-
timeout = 5
|
|
307
|
+
start = time.monotonic()
|
|
288
308
|
|
|
289
|
-
while time.
|
|
309
|
+
while time.monotonic() - start < timeout_s:
|
|
290
310
|
status = self.get_pipeline(pipeline_name).deployment_status
|
|
291
311
|
|
|
292
312
|
if status == "Shutdown":
|
|
@@ -298,7 +318,9 @@ Reason: The pipeline is in a FAILED state due to the following error:
|
|
|
298
318
|
)
|
|
299
319
|
time.sleep(0.1)
|
|
300
320
|
|
|
301
|
-
raise
|
|
321
|
+
raise FelderaTimeoutError(
|
|
322
|
+
f"timeout error: pipeline '{pipeline_name}' did not shutdown in {timeout_s} seconds"
|
|
323
|
+
)
|
|
302
324
|
|
|
303
325
|
def checkpoint_pipeline(self, pipeline_name: str):
|
|
304
326
|
"""
|
|
@@ -435,12 +457,12 @@ Reason: The pipeline is in a FAILED state due to the following error:
|
|
|
435
457
|
stream=True,
|
|
436
458
|
)
|
|
437
459
|
|
|
438
|
-
end = time.
|
|
460
|
+
end = time.monotonic() + timeout if timeout else None
|
|
439
461
|
|
|
440
462
|
# Using the default chunk size below makes `iter_lines` extremely
|
|
441
463
|
# inefficient when dealing with long lines.
|
|
442
464
|
for chunk in resp.iter_lines(chunk_size=50000000):
|
|
443
|
-
if end and time.
|
|
465
|
+
if end and time.monotonic() > end:
|
|
444
466
|
break
|
|
445
467
|
if chunk:
|
|
446
468
|
yield json.loads(chunk, parse_float=Decimal)
|
|
@@ -514,6 +536,7 @@ Reason: The pipeline is in a FAILED state due to the following error:
|
|
|
514
536
|
"""
|
|
515
537
|
Executes an ad-hoc query on the specified pipeline and returns the result as a generator that yields
|
|
516
538
|
rows of the query as Python dictionaries.
|
|
539
|
+
All floating-point numbers are deserialized as Decimal objects to avoid precision loss.
|
|
517
540
|
|
|
518
541
|
:param pipeline_name: The name of the pipeline to query.
|
|
519
542
|
:param query: The SQL query to be executed.
|
|
@@ -534,3 +557,49 @@ Reason: The pipeline is in a FAILED state due to the following error:
|
|
|
534
557
|
for chunk in resp.iter_lines(chunk_size=50000000):
|
|
535
558
|
if chunk:
|
|
536
559
|
yield json.loads(chunk, parse_float=Decimal)
|
|
560
|
+
|
|
561
|
+
def pause_connector(self, pipeline_name, table_name, connector_name):
|
|
562
|
+
"""
|
|
563
|
+
Pause the specified input connector.
|
|
564
|
+
|
|
565
|
+
Connectors allow feldera to fetch data from a source or write data to a sink.
|
|
566
|
+
This method allows users to **PAUSE** a specific **INPUT** connector.
|
|
567
|
+
All connectors are RUNNING by default.
|
|
568
|
+
|
|
569
|
+
Refer to the connector documentation for more information:
|
|
570
|
+
<https://docs.feldera.com/connectors/#input-connector-orchestration>
|
|
571
|
+
|
|
572
|
+
:param pipeline_name: The name of the pipeline.
|
|
573
|
+
:param table_name: The name of the table associated with this connector.
|
|
574
|
+
:param connector_name: The name of the connector.
|
|
575
|
+
|
|
576
|
+
:raises FelderaAPIError: If the connector cannot be found, or if the pipeline is not running.
|
|
577
|
+
"""
|
|
578
|
+
|
|
579
|
+
self.http.post(
|
|
580
|
+
path=f"/pipelines/{pipeline_name}/tables/{table_name}/connectors/{connector_name}/pause",
|
|
581
|
+
)
|
|
582
|
+
|
|
583
|
+
def resume_connector(
|
|
584
|
+
self, pipeline_name: str, table_name: str, connector_name: str
|
|
585
|
+
):
|
|
586
|
+
"""
|
|
587
|
+
Resume the specified connector.
|
|
588
|
+
|
|
589
|
+
Connectors allow feldera to fetch data from a source or write data to a sink.
|
|
590
|
+
This method allows users to **RESUME / START** a specific **INPUT** connector.
|
|
591
|
+
All connectors are RUNNING by default.
|
|
592
|
+
|
|
593
|
+
Refer to the connector documentation for more information:
|
|
594
|
+
<https://docs.feldera.com/connectors/#input-connector-orchestration>
|
|
595
|
+
|
|
596
|
+
:param pipeline_name: The name of the pipeline.
|
|
597
|
+
:param table_name: The name of the table associated with this connector.
|
|
598
|
+
:param connector_name: The name of the connector.
|
|
599
|
+
|
|
600
|
+
:raises FelderaAPIError: If the connector cannot be found, or if the pipeline is not running.
|
|
601
|
+
"""
|
|
602
|
+
|
|
603
|
+
self.http.post(
|
|
604
|
+
path=f"/pipelines/{pipeline_name}/tables/{table_name}/connectors/{connector_name}/start",
|
|
605
|
+
)
|
feldera/rest/pipeline.py
CHANGED
|
@@ -37,7 +37,7 @@ class Pipeline:
|
|
|
37
37
|
self.description: Optional[str] = description
|
|
38
38
|
self.program_config: Mapping[str, Any] = program_config
|
|
39
39
|
self.runtime_config: Mapping[str, Any] = runtime_config
|
|
40
|
-
self.id: Optional[str] =
|
|
40
|
+
self.id: Optional[str] = None
|
|
41
41
|
self.tables: list[SQLTable] = []
|
|
42
42
|
self.views: list[SQLView] = []
|
|
43
43
|
self.deployment_status: Optional[str] = None
|
|
@@ -1,20 +1,20 @@
|
|
|
1
1
|
feldera/__init__.py,sha256=PxkgCtEAuFwo4u8NGEDio-bF3M-GnbeV45tAQVoBbqE,297
|
|
2
2
|
feldera/_callback_runner.py,sha256=Tdf6BXN4zppyoy8t_y-Ooa3B0wEfvyezMHU9jxY2ZhA,4713
|
|
3
3
|
feldera/_helpers.py,sha256=TuaJPQdAnRV9K5bG7-DCAr45b2JxsZyrwkZBJf1806M,2684
|
|
4
|
-
feldera/enums.py,sha256=
|
|
4
|
+
feldera/enums.py,sha256=tI48tTF65AU5ZLem_IDnC5ycPVMKMv591lW2T__U4C8,7281
|
|
5
5
|
feldera/output_handler.py,sha256=64J3ljhOaKIhxdjOKYi-BUz_HnMwROfmN8eE-btYygU,1930
|
|
6
|
-
feldera/pipeline.py,sha256
|
|
7
|
-
feldera/pipeline_builder.py,sha256=
|
|
6
|
+
feldera/pipeline.py,sha256=yDU5vSRk-2I5P1VeV_Ix5rCDLbjNIkIYEP_gA2vWSIU,29705
|
|
7
|
+
feldera/pipeline_builder.py,sha256=4rmklRZ0-otvTUb-HTESfNsJopEK-E2jxpJXiYlKpps,3664
|
|
8
8
|
feldera/runtime_config.py,sha256=PfYXsrLrs5Duty-7x3dGDf2uvp5hwp3Yb5n3bRQtLVk,2898
|
|
9
9
|
feldera/rest/__init__.py,sha256=Eg-EKUU3RSTDcdxTR_7wNDnCly8VpXEzsZCQUmf-y2M,308
|
|
10
10
|
feldera/rest/_httprequests.py,sha256=y3RxFn4BCTKbUztO1LN2CWXgGA93dIIV5VLdyiWQWuQ,6181
|
|
11
11
|
feldera/rest/config.py,sha256=84Lj2QX6SYNZJdBfrCHPMh29Nj4MY7nRB-uddytx_ok,795
|
|
12
12
|
feldera/rest/errors.py,sha256=b4i2JjrbSmej7jdko_FL8UeXklLKenSipwMT80jowaM,1720
|
|
13
|
-
feldera/rest/feldera_client.py,sha256=
|
|
14
|
-
feldera/rest/pipeline.py,sha256=
|
|
13
|
+
feldera/rest/feldera_client.py,sha256=kTNN3DaxQ5gnCpTYAggpdlMpBKJJ15bYh_WXI6FzGzU,20570
|
|
14
|
+
feldera/rest/pipeline.py,sha256=o6BFLL3DuurvAhneX1LH7mLjbvX3dn4lCXziYRciUI4,2788
|
|
15
15
|
feldera/rest/sql_table.py,sha256=qrw-YwMzx5T81zDefNO1KOx7EyypFz1vPwGBzSUB7kc,652
|
|
16
16
|
feldera/rest/sql_view.py,sha256=hN12mPM0mvwLCIPYywpb12s9Hd2Ws31IlTMXPriMisw,644
|
|
17
|
-
feldera-0.
|
|
18
|
-
feldera-0.
|
|
19
|
-
feldera-0.
|
|
20
|
-
feldera-0.
|
|
17
|
+
feldera-0.34.0.dist-info/METADATA,sha256=C60x6jVIUQBT33nIVHMx3saeoXJpyfYzDMLtC23i9oQ,2582
|
|
18
|
+
feldera-0.34.0.dist-info/WHEEL,sha256=In9FTNxeP60KnTkGw7wk6mJPYd_dQSjEZmXdBdMCI-8,91
|
|
19
|
+
feldera-0.34.0.dist-info/top_level.txt,sha256=fB6yTqrQiO6RCbY1xP2T_mpPoTjDFtJvkJJodiee7d0,8
|
|
20
|
+
feldera-0.34.0.dist-info/RECORD,,
|
|
File without changes
|