vellum-workflow-server 1.4.0.post1__py3-none-any.whl → 1.4.1.post2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of vellum-workflow-server might be problematic. Click here for more details.
- {vellum_workflow_server-1.4.0.post1.dist-info → vellum_workflow_server-1.4.1.post2.dist-info}/METADATA +2 -2
- {vellum_workflow_server-1.4.0.post1.dist-info → vellum_workflow_server-1.4.1.post2.dist-info}/RECORD +7 -7
- workflow_server/api/tests/test_workflow_view_stream_workflow_route.py +49 -0
- workflow_server/api/workflow_view.py +5 -1
- workflow_server/core/executor.py +32 -20
- {vellum_workflow_server-1.4.0.post1.dist-info → vellum_workflow_server-1.4.1.post2.dist-info}/WHEEL +0 -0
- {vellum_workflow_server-1.4.0.post1.dist-info → vellum_workflow_server-1.4.1.post2.dist-info}/entry_points.txt +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: vellum-workflow-server
|
|
3
|
-
Version: 1.4.
|
|
3
|
+
Version: 1.4.1.post2
|
|
4
4
|
Summary:
|
|
5
5
|
License: AGPL
|
|
6
6
|
Requires-Python: >=3.9.0,<4
|
|
@@ -29,7 +29,7 @@ Requires-Dist: pyjwt (==2.10.0)
|
|
|
29
29
|
Requires-Dist: python-dotenv (==1.0.1)
|
|
30
30
|
Requires-Dist: retrying (==1.3.4)
|
|
31
31
|
Requires-Dist: sentry-sdk[flask] (==2.20.0)
|
|
32
|
-
Requires-Dist: vellum-ai (==1.4.
|
|
32
|
+
Requires-Dist: vellum-ai (==1.4.1)
|
|
33
33
|
Description-Content-Type: text/markdown
|
|
34
34
|
|
|
35
35
|
# Vellum Workflow Runner Server
|
{vellum_workflow_server-1.4.0.post1.dist-info → vellum_workflow_server-1.4.1.post2.dist-info}/RECORD
RENAMED
|
@@ -5,14 +5,14 @@ workflow_server/api/healthz_view.py,sha256=itiRvBDBXncrw8Kbbc73UZLwqMAhgHOR3uSre
|
|
|
5
5
|
workflow_server/api/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
6
6
|
workflow_server/api/tests/test_input_display_mapping.py,sha256=drBZqMudFyB5wgiUOcMgRXz7E7ge-Qgxbstw4E4f0zE,2211
|
|
7
7
|
workflow_server/api/tests/test_workflow_view.py,sha256=RlAw1tHeIlnOXGrFQN-w3EOLPZkhp6Dfy6d1r7kU5oc,22573
|
|
8
|
-
workflow_server/api/tests/test_workflow_view_stream_workflow_route.py,sha256=
|
|
9
|
-
workflow_server/api/workflow_view.py,sha256=
|
|
8
|
+
workflow_server/api/tests/test_workflow_view_stream_workflow_route.py,sha256=taNOFnToCIMolJbr1bJNG88ZiB47YOTZb4fFxkE-rx4,28820
|
|
9
|
+
workflow_server/api/workflow_view.py,sha256=_WhjNgimTPoS10C-npRWDfJixzg4eHTJ5xIKACStZf4,21943
|
|
10
10
|
workflow_server/code_exec_runner.py,sha256=lBnMIorPZL8zZBye6TjeCIs06WTJM7P2HR07B1fjJJI,2533
|
|
11
11
|
workflow_server/config.py,sha256=qmmTr6ty3ZN5LDOFs3TfUxYshYe6Mmn_LanplHHeE9Q,1796
|
|
12
12
|
workflow_server/core/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
13
13
|
workflow_server/core/cancel_workflow.py,sha256=Ffkc3mzmrdMEUcD-sHfEhX4IwVrka-E--SxKA1dUfIU,2185
|
|
14
14
|
workflow_server/core/events.py,sha256=24MA66DVQuaLJJcZrS8IL1Zq4Ohi9CoouKZ5VgoH3Cs,1402
|
|
15
|
-
workflow_server/core/executor.py,sha256=
|
|
15
|
+
workflow_server/core/executor.py,sha256=BqGEHVIo8w-wn-_Gfz0gFcZLXbj3VFPH3BmoHqoCJa8,18475
|
|
16
16
|
workflow_server/core/utils.py,sha256=aIpSINstLGslP2PIoDLM82_1GlJ1uC_0AIrP-V7Yobo,3230
|
|
17
17
|
workflow_server/core/workflow_executor_context.py,sha256=w3OhV_AXpgh7AxpjEsc0vo-IJypgJcgr5DXJCqGptOU,1587
|
|
18
18
|
workflow_server/server.py,sha256=QBU12AaAfAgLqfCDBd24qIJl_mbheiq0-hfcWV7rZM4,1234
|
|
@@ -28,7 +28,7 @@ workflow_server/utils/tests/test_sentry_integration.py,sha256=LGmWiaLhFrx-jslrRj
|
|
|
28
28
|
workflow_server/utils/tests/test_system_utils.py,sha256=_4GwXvVvU5BrATxUEWwQIPg0bzQXMWBtiBmjP8MTxJM,4314
|
|
29
29
|
workflow_server/utils/tests/test_utils.py,sha256=0Nq6du8o-iBtTrip9_wgHES53JSiJbVdSXaBnPobw3s,6930
|
|
30
30
|
workflow_server/utils/utils.py,sha256=ZPoM1Suhid22dpB8oEFLux8wx-9iyzmSfWuYxSCrgWk,4774
|
|
31
|
-
vellum_workflow_server-1.4.
|
|
32
|
-
vellum_workflow_server-1.4.
|
|
33
|
-
vellum_workflow_server-1.4.
|
|
34
|
-
vellum_workflow_server-1.4.
|
|
31
|
+
vellum_workflow_server-1.4.1.post2.dist-info/METADATA,sha256=IO3NXiuPcht-UfZP1Zz8z97yIRaPkYsoMcVvhWp0jFc,2273
|
|
32
|
+
vellum_workflow_server-1.4.1.post2.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
|
|
33
|
+
vellum_workflow_server-1.4.1.post2.dist-info/entry_points.txt,sha256=uB_0yPkr7YV6RhEXzvFReUM8P4OQBlVXD6TN6eb9-oc,277
|
|
34
|
+
vellum_workflow_server-1.4.1.post2.dist-info/RECORD,,
|
|
@@ -10,6 +10,8 @@ from uuid import uuid4
|
|
|
10
10
|
|
|
11
11
|
import requests_mock
|
|
12
12
|
|
|
13
|
+
from vellum.workflows.emitters.base import WorkflowEvent
|
|
14
|
+
from vellum.workflows.emitters.vellum_emitter import VellumEmitter
|
|
13
15
|
from workflow_server.code_exec_runner import run_code_exec_stream
|
|
14
16
|
from workflow_server.server import create_app
|
|
15
17
|
from workflow_server.utils.system_utils import get_active_process_count
|
|
@@ -912,3 +914,50 @@ class Workflow(BaseWorkflow):
|
|
|
912
914
|
|
|
913
915
|
# AND we get the expected timeout error message
|
|
914
916
|
assert response_data == {"detail": "Request timed out trying to initiate the Workflow"}
|
|
917
|
+
|
|
918
|
+
|
|
919
|
+
@pytest.mark.parametrize("non_process_stream_types", [code_exec_stream, flask_stream_disable_process_wrapper])
|
|
920
|
+
def test_stream_workflow_route__vembda_emitting_calls_monitoring_api(non_process_stream_types):
|
|
921
|
+
"""
|
|
922
|
+
Tests that the monitoring API is called when vembda emitting is enabled.
|
|
923
|
+
"""
|
|
924
|
+
|
|
925
|
+
# GIVEN a valid request body with vembda emitting enabled
|
|
926
|
+
span_id = uuid4()
|
|
927
|
+
request_body = {
|
|
928
|
+
"execution_id": str(span_id),
|
|
929
|
+
"inputs": [],
|
|
930
|
+
"environment_api_key": "test",
|
|
931
|
+
"module": "workflow",
|
|
932
|
+
"timeout": 360,
|
|
933
|
+
"feature_flags": {"vembda-event-emitting-enabled": True},
|
|
934
|
+
"files": {
|
|
935
|
+
"__init__.py": "",
|
|
936
|
+
"workflow.py": """\
|
|
937
|
+
from vellum.workflows import BaseWorkflow
|
|
938
|
+
|
|
939
|
+
class Workflow(BaseWorkflow):
|
|
940
|
+
class Outputs(BaseWorkflow.Outputs):
|
|
941
|
+
foo = "hello"
|
|
942
|
+
""",
|
|
943
|
+
},
|
|
944
|
+
}
|
|
945
|
+
emitted_events = []
|
|
946
|
+
|
|
947
|
+
def send_events(self, events: list[WorkflowEvent]) -> None:
|
|
948
|
+
for event in events:
|
|
949
|
+
emitted_events.append(event)
|
|
950
|
+
|
|
951
|
+
VellumEmitter._send_events = send_events
|
|
952
|
+
|
|
953
|
+
# WHEN we call the stream route with mocked monitoring API
|
|
954
|
+
status_code, events = non_process_stream_types(request_body)
|
|
955
|
+
|
|
956
|
+
# THEN we get a 200 response
|
|
957
|
+
assert status_code == 200, events
|
|
958
|
+
|
|
959
|
+
# AND the expected workflow events were emitted
|
|
960
|
+
event_names = [event.name for event in emitted_events]
|
|
961
|
+
assert len(event_names) == 2, "Should include 2 events"
|
|
962
|
+
assert "workflow.execution.initiated" in event_names, "Should include workflow.execution.initiated event"
|
|
963
|
+
assert "workflow.execution.fulfilled" in event_names, "Should include workflow.execution.fulfilled event"
|
|
@@ -23,6 +23,7 @@ from vellum_ee.workflows.display.types import WorkflowDisplayContext
|
|
|
23
23
|
from vellum_ee.workflows.display.workflows import BaseWorkflowDisplay
|
|
24
24
|
from vellum_ee.workflows.server.virtual_file_loader import VirtualFileFinder
|
|
25
25
|
|
|
26
|
+
from vellum.workflows import BaseWorkflow
|
|
26
27
|
from vellum.workflows.exceptions import WorkflowInitializationException
|
|
27
28
|
from vellum.workflows.nodes import BaseNode
|
|
28
29
|
from workflow_server.config import ENABLE_PROCESS_WRAPPER, MEMORY_LIMIT_MB
|
|
@@ -458,7 +459,10 @@ def serialize_route() -> Response:
|
|
|
458
459
|
try:
|
|
459
460
|
result = BaseWorkflowDisplay.serialize_module(namespace, client=client)
|
|
460
461
|
except Exception as e:
|
|
461
|
-
raise WorkflowInitializationException(
|
|
462
|
+
raise WorkflowInitializationException(
|
|
463
|
+
message=str(e),
|
|
464
|
+
workflow_definition=BaseWorkflow,
|
|
465
|
+
) from e
|
|
462
466
|
|
|
463
467
|
return Response(
|
|
464
468
|
json.dumps(result.model_dump()),
|
workflow_server/core/executor.py
CHANGED
|
@@ -122,11 +122,6 @@ def _stream_workflow_wrapper(
|
|
|
122
122
|
logger.exception(e)
|
|
123
123
|
queue.put(serialize_vembda_rejected_event(executor_context, "Internal Server Error"))
|
|
124
124
|
|
|
125
|
-
emitter_thread = next(
|
|
126
|
-
(t for t in threading.enumerate() if t.name.endswith(".background_thread") and t.is_alive()), None
|
|
127
|
-
)
|
|
128
|
-
if emitter_thread:
|
|
129
|
-
emitter_thread.join()
|
|
130
125
|
queue.put(STREAM_FINISHED_EVENT)
|
|
131
126
|
|
|
132
127
|
exit(0)
|
|
@@ -174,7 +169,7 @@ def stream_workflow(
|
|
|
174
169
|
cancel_signal: Optional[ThreadingEvent] = None,
|
|
175
170
|
) -> tuple[Iterator[dict], UUID]:
|
|
176
171
|
workflow, namespace = _gather_workflow(executor_context)
|
|
177
|
-
workflow_inputs = _get_workflow_inputs(executor_context)
|
|
172
|
+
workflow_inputs = _get_workflow_inputs(executor_context, workflow.__class__)
|
|
178
173
|
display_context = _gather_display_context(workflow, namespace)
|
|
179
174
|
workflow_state = (
|
|
180
175
|
workflow.deserialize_state(
|
|
@@ -243,6 +238,16 @@ def stream_workflow(
|
|
|
243
238
|
finally:
|
|
244
239
|
cancel_watcher_kill_switch.set()
|
|
245
240
|
|
|
241
|
+
emitter_thread = next(
|
|
242
|
+
(t for t in threading.enumerate() if t.name.endswith(".background_thread") and t.is_alive()), None
|
|
243
|
+
)
|
|
244
|
+
if emitter_thread:
|
|
245
|
+
emitter_thread.join()
|
|
246
|
+
|
|
247
|
+
timer_thread = next((t for t in threading.enumerate() if t.name.startswith("Thread-")), None)
|
|
248
|
+
if timer_thread:
|
|
249
|
+
timer_thread.join()
|
|
250
|
+
|
|
246
251
|
return (
|
|
247
252
|
_call_stream(
|
|
248
253
|
executor_context=executor_context,
|
|
@@ -258,21 +263,18 @@ def stream_node(
|
|
|
258
263
|
executor_context: NodeExecutorContext,
|
|
259
264
|
disable_redirect: bool = True,
|
|
260
265
|
) -> Iterator[dict]:
|
|
261
|
-
namespace = _get_file_namespace(executor_context)
|
|
262
|
-
|
|
263
266
|
def call_node() -> Generator[dict[str, Any], Any, None]:
|
|
264
|
-
|
|
265
|
-
workflow_context = _create_workflow_context(executor_context)
|
|
267
|
+
workflow, namespace = _gather_workflow(executor_context)
|
|
266
268
|
node_module = importlib.import_module(f"{namespace}.{executor_context.node_module}")
|
|
267
269
|
|
|
268
270
|
Node = getattr(node_module, executor_context.node_name)
|
|
269
271
|
|
|
270
|
-
workflow_inputs = _get_workflow_inputs(executor_context)
|
|
272
|
+
workflow_inputs = _get_workflow_inputs(executor_context, workflow.__class__)
|
|
271
273
|
workflow_state = _get_workflow_state(executor_context, workflow_inputs=workflow_inputs)
|
|
272
274
|
|
|
273
275
|
node = Node(
|
|
274
276
|
state=workflow_state,
|
|
275
|
-
context=
|
|
277
|
+
context=workflow._context,
|
|
276
278
|
)
|
|
277
279
|
|
|
278
280
|
executor_context.stream_start_time = time.time_ns()
|
|
@@ -339,7 +341,7 @@ def _call_stream(
|
|
|
339
341
|
yield vembda_fulfilled_event.model_dump(mode="json")
|
|
340
342
|
|
|
341
343
|
|
|
342
|
-
def _create_workflow(executor_context:
|
|
344
|
+
def _create_workflow(executor_context: BaseExecutorContext, namespace: str) -> BaseWorkflow:
|
|
343
345
|
workflow_context = _create_workflow_context(executor_context)
|
|
344
346
|
Workflow = BaseWorkflow.load_from_module(namespace)
|
|
345
347
|
VembdaExecutionFulfilledEvent.model_rebuild(
|
|
@@ -431,7 +433,9 @@ def _dump_event(event: BaseEvent, executor_context: BaseExecutorContext, client:
|
|
|
431
433
|
return dump
|
|
432
434
|
|
|
433
435
|
|
|
434
|
-
def _get_workflow_inputs(
|
|
436
|
+
def _get_workflow_inputs(
|
|
437
|
+
executor_context: BaseExecutorContext, workflow_class: Type[BaseWorkflow]
|
|
438
|
+
) -> Optional[BaseInputs]:
|
|
435
439
|
if not executor_context.inputs:
|
|
436
440
|
return None
|
|
437
441
|
|
|
@@ -443,17 +447,22 @@ def _get_workflow_inputs(executor_context: BaseExecutorContext) -> Optional[Base
|
|
|
443
447
|
try:
|
|
444
448
|
inputs_module = importlib.import_module(inputs_module_path)
|
|
445
449
|
except Exception as e:
|
|
446
|
-
raise WorkflowInitializationException(
|
|
450
|
+
raise WorkflowInitializationException(
|
|
451
|
+
message=f"Failed to initialize workflow inputs: {e}",
|
|
452
|
+
workflow_definition=workflow_class,
|
|
453
|
+
) from e
|
|
447
454
|
|
|
448
455
|
if not hasattr(inputs_module, "Inputs"):
|
|
449
456
|
raise WorkflowInitializationException(
|
|
450
|
-
f"Inputs module {inputs_module_path} does not have a required Inputs class"
|
|
457
|
+
message=f"Inputs module {inputs_module_path} does not have a required Inputs class",
|
|
458
|
+
workflow_definition=workflow_class,
|
|
451
459
|
)
|
|
452
460
|
|
|
453
461
|
if not issubclass(inputs_module.Inputs, BaseInputs):
|
|
454
462
|
raise WorkflowInitializationException(
|
|
455
|
-
f"""The class {inputs_module_path}.Inputs was expected to be a subclass of BaseInputs, \
|
|
456
|
-
but found {inputs_module.Inputs.__class__.__name__}"""
|
|
463
|
+
message=f"""The class {inputs_module_path}.Inputs was expected to be a subclass of BaseInputs, \
|
|
464
|
+
but found {inputs_module.Inputs.__class__.__name__}""",
|
|
465
|
+
workflow_definition=workflow_class,
|
|
457
466
|
)
|
|
458
467
|
|
|
459
468
|
return inputs_module.Inputs(**executor_context.inputs)
|
|
@@ -493,7 +502,7 @@ def _get_run_from_node(executor_context: WorkflowExecutorContext, workflow: Base
|
|
|
493
502
|
return None
|
|
494
503
|
|
|
495
504
|
|
|
496
|
-
def _gather_workflow(context:
|
|
505
|
+
def _gather_workflow(context: BaseExecutorContext) -> Tuple[BaseWorkflow, str]:
|
|
497
506
|
try:
|
|
498
507
|
namespace = _get_file_namespace(context)
|
|
499
508
|
if namespace != LOCAL_WORKFLOW_MODULE:
|
|
@@ -505,7 +514,10 @@ def _gather_workflow(context: WorkflowExecutorContext) -> Tuple[BaseWorkflow, st
|
|
|
505
514
|
return workflow, namespace
|
|
506
515
|
except Exception as e:
|
|
507
516
|
logger.exception("Failed to initialize Workflow")
|
|
508
|
-
raise WorkflowInitializationException(
|
|
517
|
+
raise WorkflowInitializationException(
|
|
518
|
+
message=f"Failed to initialize workflow: {e}",
|
|
519
|
+
workflow_definition=BaseWorkflow,
|
|
520
|
+
) from e
|
|
509
521
|
|
|
510
522
|
|
|
511
523
|
def _gather_display_context(workflow: BaseWorkflow, namespace: str) -> Optional["WorkflowEventDisplayContext"]:
|
{vellum_workflow_server-1.4.0.post1.dist-info → vellum_workflow_server-1.4.1.post2.dist-info}/WHEEL
RENAMED
|
File without changes
|
|
File without changes
|