vellum-workflow-server 0.14.73.post2__py3-none-any.whl → 0.14.73.post3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of vellum-workflow-server might be problematic. Click here for more details.

@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: vellum-workflow-server
3
- Version: 0.14.73.post2
3
+ Version: 0.14.73.post3
4
4
  Summary:
5
5
  License: AGPL
6
6
  Requires-Python: >=3.9.0,<4
@@ -6,13 +6,13 @@ workflow_server/api/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJW
6
6
  workflow_server/api/tests/test_input_display_mapping.py,sha256=drBZqMudFyB5wgiUOcMgRXz7E7ge-Qgxbstw4E4f0zE,2211
7
7
  workflow_server/api/tests/test_workflow_view.py,sha256=2nscM_QsYPHkkTG8_JhNbE2LmGL5FQKXEtaLLjXouw0,14591
8
8
  workflow_server/api/tests/test_workflow_view_stream_workflow_route.py,sha256=cUVF3tqV8Ay91YfFr2lT2oXONWRN823Nk5M3EOC8wE8,24133
9
- workflow_server/api/workflow_view.py,sha256=bvkDQ9h267hqWsHSElplg83NGHfoLuR8VaLlMh6zJ9s,15295
10
- workflow_server/code_exec_runner.py,sha256=XSs4w_b0vDUt0HqSakc26Gxx9aoG2wmwOo-JGVL5QJ4,2388
9
+ workflow_server/api/workflow_view.py,sha256=9_ZI7TGlTRmeNiO47juTRx5TBpAjVmUEGx5C_VqTYMI,14993
10
+ workflow_server/code_exec_runner.py,sha256=njBK48zVUwhAjvap_KY1so-D0UjKgR4UihDuKow3JnM,2274
11
11
  workflow_server/config.py,sha256=K5Tavm7wiqCZt0RWWue7zzb8N6e8aWnFOTNlBqEJPcI,1330
12
12
  workflow_server/core/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
13
13
  workflow_server/core/cancel_workflow.py,sha256=Ffkc3mzmrdMEUcD-sHfEhX4IwVrka-E--SxKA1dUfIU,2185
14
14
  workflow_server/core/events.py,sha256=24MA66DVQuaLJJcZrS8IL1Zq4Ohi9CoouKZ5VgoH3Cs,1402
15
- workflow_server/core/executor.py,sha256=j78gYOgaeJ5Z25nCefnPg4fUPdTLW0tcwaZeWQf0DP0,16588
15
+ workflow_server/core/executor.py,sha256=AjY4qlTxv8T6gq73KXGlWb8gU97xr-p1RMxgbPSg1O0,16288
16
16
  workflow_server/core/utils.py,sha256=lgzxkAEjEXPxGXXQlUYTYuCdHht-eDJJmHj5AhEb3_o,1500
17
17
  workflow_server/core/workflow_executor_context.py,sha256=a-v48GJbOWUh4JIf_bNwDX-BvfKkg4xwRSPEyRVQmp4,1373
18
18
  workflow_server/server.py,sha256=QBU12AaAfAgLqfCDBd24qIJl_mbheiq0-hfcWV7rZM4,1234
@@ -21,13 +21,14 @@ workflow_server/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3h
21
21
  workflow_server/utils/exit_handler.py,sha256=_FacDVi4zc3bfTA3D2mJsISePlJ8jpLrnGVo5-xZQFs,743
22
22
  workflow_server/utils/log_proxy.py,sha256=nugi6fOgAYKX2X9DIc39TG366rsmmDUPoEtG3gzma_Y,3088
23
23
  workflow_server/utils/oom_killer.py,sha256=4Sag_iRQWqbp62iIBn6nKP-pxUHguOF93DdVXZTtJDk,2809
24
- workflow_server/utils/sentry.py,sha256=Pr3xKvHdk0XFSpXgy-55bWI4J3bbf_36gjDyLOs7oVU,855
24
+ workflow_server/utils/sentry.py,sha256=pqx3X_4W3yOzmz8QMJYUEi39skIKWtrTN5nyFhaPkbk,1597
25
25
  workflow_server/utils/system_utils.py,sha256=fTzbdpmZ-0bXiNBLYYQdNJWtFAItZgIH8cLJdoXDuQQ,2114
26
26
  workflow_server/utils/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
27
+ workflow_server/utils/tests/test_sentry_integration.py,sha256=LGmWiaLhFrx-jslrRjRq9JY6Z5ShLZyx_N_L0-FU6OI,2100
27
28
  workflow_server/utils/tests/test_system_utils.py,sha256=MdBxI9gxUOpR_JBAHpEz6dGFY6JjxhMSM2oExpqFvNA,4314
28
29
  workflow_server/utils/tests/test_utils.py,sha256=qwK5Rmy3RQyjtlUrYAuGuDlBeRzZKsf1yS-y2IpUizQ,6452
29
30
  workflow_server/utils/utils.py,sha256=Wqqn-1l2ugkGgy5paWWdt0AVxAyPMQCYcnRSSOMjXlA,4355
30
- vellum_workflow_server-0.14.73.post2.dist-info/METADATA,sha256=7J8qwzytgU5lH36SRdvkqMfN35req2uxbKYeZJhVCQI,2243
31
- vellum_workflow_server-0.14.73.post2.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
32
- vellum_workflow_server-0.14.73.post2.dist-info/entry_points.txt,sha256=uB_0yPkr7YV6RhEXzvFReUM8P4OQBlVXD6TN6eb9-oc,277
33
- vellum_workflow_server-0.14.73.post2.dist-info/RECORD,,
31
+ vellum_workflow_server-0.14.73.post3.dist-info/METADATA,sha256=WI6P39NDrxKvYn99EqvupmJvsghS7m2bvR-InCMll1U,2243
32
+ vellum_workflow_server-0.14.73.post3.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
33
+ vellum_workflow_server-0.14.73.post3.dist-info/entry_points.txt,sha256=uB_0yPkr7YV6RhEXzvFReUM8P4OQBlVXD6TN6eb9-oc,277
34
+ vellum_workflow_server-0.14.73.post3.dist-info/RECORD,,
@@ -15,7 +15,6 @@ from typing import Any, Dict, Generator, Iterator, Union, cast
15
15
 
16
16
  from flask import Blueprint, Response, current_app as app, request, stream_with_context
17
17
  from pydantic import ValidationError
18
- import sentry_sdk
19
18
  from vellum_ee.workflows.display.nodes.get_node_display_class import get_node_display_class
20
19
  from vellum_ee.workflows.display.types import WorkflowDisplayContext
21
20
 
@@ -115,7 +114,6 @@ def stream_workflow_route() -> Response:
115
114
  )
116
115
  increment_process_count(1)
117
116
  except Exception as e:
118
- sentry_sdk.set_tag("vellum_trace_id", str(context.trace_id))
119
117
  logger.exception(e)
120
118
 
121
119
  process_output_queue.put(create_vembda_rejected_event(context, traceback.format_exc()))
@@ -202,7 +200,6 @@ def stream_workflow_route() -> Response:
202
200
 
203
201
  continue
204
202
  except Exception as e:
205
- sentry_sdk.set_tag("vellum_trace_id", str(context.trace_id))
206
203
  logger.exception(e)
207
204
  break
208
205
 
@@ -300,7 +297,6 @@ def stream_node_route() -> Response:
300
297
  # This happens when theres a problem with the stream function call
301
298
  # itself not the workflow runner
302
299
  yield create_vembda_rejected_event(context, "Internal Server Error")
303
- sentry_sdk.set_tag("vellum_trace_id", str(context.trace_id))
304
300
  app.logger.exception(stream_future.exception())
305
301
  break
306
302
  else:
@@ -377,7 +373,6 @@ def get_version_route() -> tuple[dict, int]:
377
373
 
378
374
  resp["nodes"] = nodes
379
375
  except Exception as e:
380
- sentry_sdk.set_tag("vellum_trace_id", "unknown")
381
376
  logger.exception(f"Failed to discover nodes: {str(e)}")
382
377
  resp["nodes"] = []
383
378
 
@@ -5,8 +5,6 @@ import os
5
5
  from uuid import uuid4
6
6
  from typing import Optional
7
7
 
8
- import sentry_sdk
9
-
10
8
  from vellum.workflows.exceptions import WorkflowInitializationException
11
9
  from workflow_server.api.workflow_view import get_workflow_request_context
12
10
  from workflow_server.core.events import VembdaExecutionInitiatedBody, VembdaExecutionInitiatedEvent
@@ -56,7 +54,6 @@ def run_code_exec_stream() -> None:
56
54
  fulfilled_event = serialize_vembda_rejected_event(context, str(e))
57
55
  print(f"{_EVENT_LINE}{fulfilled_event}") # noqa: T201
58
56
  except Exception as e:
59
- sentry_sdk.set_tag("vellum_trace_id", str(context.trace_id) if context else "unknown")
60
57
  logger.exception(e)
61
58
 
62
59
  event = serialize_vembda_rejected_event(context, "Internal Server Error")
@@ -15,7 +15,6 @@ from uuid import UUID, uuid4
15
15
  from typing import Any, Callable, Generator, Iterator, Optional, Tuple, Type
16
16
 
17
17
  from pebble import concurrent
18
- import sentry_sdk
19
18
  from vellum_ee.workflows.display.workflows import BaseWorkflowDisplay
20
19
  from vellum_ee.workflows.server.virtual_file_loader import VirtualFileFinder
21
20
 
@@ -68,7 +67,6 @@ def _stream_node_wrapper(executor_context: NodeExecutorContext, queue: Queue) ->
68
67
  for event in stream_node(executor_context=executor_context):
69
68
  queue.put(event)
70
69
  except Exception as e:
71
- sentry_sdk.set_tag("vellum_trace_id", str(executor_context.trace_id))
72
70
  logger.exception(e)
73
71
  queue.put(
74
72
  VembdaExecutionFulfilledEvent(
@@ -106,7 +104,6 @@ def _stream_workflow_wrapper(executor_context: WorkflowExecutorContext, queue: Q
106
104
  if not span_id_emitted:
107
105
  queue.put(f"{SPAN_ID_EVENT}:{uuid4()}")
108
106
 
109
- sentry_sdk.set_tag("vellum_trace_id", str(executor_context.trace_id))
110
107
  logger.exception(e)
111
108
  queue.put(serialize_vembda_rejected_event(executor_context, "Internal Server Error"))
112
109
  queue.put(STREAM_FINISHED_EVENT)
@@ -456,7 +453,6 @@ def _gather_workflow(context: WorkflowExecutorContext) -> Tuple[BaseWorkflow, st
456
453
  )
457
454
  return workflow, namespace
458
455
  except Exception as e:
459
- sentry_sdk.set_tag("vellum_trace_id", str(context.trace_id))
460
456
  logger.exception("Failed to initialize Workflow")
461
457
  raise WorkflowInitializationException(f"Failed to initialize workflow: {e}") from e
462
458
 
@@ -465,6 +461,5 @@ def _gather_display_context(workflow: BaseWorkflow, namespace: str) -> Optional[
465
461
  try:
466
462
  return BaseWorkflowDisplay.gather_event_display_context(namespace, workflow.__class__)
467
463
  except Exception:
468
- sentry_sdk.set_tag("vellum_trace_id", "unknown")
469
464
  logger.exception("Unable to Parse Workflow Display Context")
470
465
  return None
@@ -5,10 +5,44 @@ import sentry_sdk
5
5
  from sentry_sdk.integrations.logging import LoggingIntegration
6
6
 
7
7
 
8
+ def _tag_trace_id(event: dict) -> None:
9
+ if "request" not in event:
10
+ return
11
+
12
+ if not isinstance(event["request"], dict):
13
+ return
14
+
15
+ url = event["request"].get("url")
16
+ if not isinstance(url, str):
17
+ return
18
+
19
+ if not url.endswith("/workflow/stream"):
20
+ return
21
+
22
+ body = event["request"].get("data")
23
+ if not isinstance(body, dict):
24
+ return
25
+
26
+ execution_context = body.get("execution_context")
27
+ if not isinstance(execution_context, dict):
28
+ return
29
+
30
+ trace_id = execution_context.get("trace_id")
31
+ if not isinstance(trace_id, str):
32
+ return
33
+
34
+ if "tags" not in event:
35
+ event["tags"] = {}
36
+
37
+ event["tags"]["vellum_trace_id"] = trace_id
38
+
39
+
8
40
  def before_send(event: dict, hint: dict) -> Optional[dict]:
9
41
  if "exc_info" in hint:
10
42
  _, _, _ = hint["exc_info"]
11
43
 
44
+ _tag_trace_id(event)
45
+
12
46
  return event
13
47
 
14
48
 
@@ -0,0 +1,69 @@
1
+ import pytest
2
+ from uuid import uuid4
3
+
4
+ from workflow_server.server import create_app
5
+
6
+
7
+ @pytest.fixture
8
+ def mock_sentry_capture_envelope(mocker):
9
+ mock_transport = mocker.patch("sentry_sdk.client.make_transport")
10
+ return mock_transport.return_value.capture_envelope
11
+
12
+
13
+ def test_sentry_integration_with_workflow_endpoints(monkeypatch, mock_sentry_capture_envelope):
14
+ # GIVEN sentry is configured
15
+ monkeypatch.setenv("SENTRY_DSN", "https://test-dsn@sentry.io/1234567890")
16
+
17
+ # AND our /workflow/stream endpoint raises an exception
18
+ def mock_get_version():
19
+ raise Exception("Test exception")
20
+
21
+ monkeypatch.setattr("workflow_server.api.workflow_view.get_version", mock_get_version)
22
+
23
+ # AND we have a mock trace_id
24
+ trace_id = str(uuid4())
25
+
26
+ # AND we have a mock request body
27
+ body = {
28
+ "execution_id": uuid4(),
29
+ "inputs": [],
30
+ "environment_api_key": "test",
31
+ "module": "workflow",
32
+ "timeout": 360,
33
+ "files": {
34
+ "__init__.py": "",
35
+ "workflow.py": """\
36
+ from vellum.workflows import BaseWorkflow
37
+
38
+ class Workflow(BaseWorkflow):
39
+ pass
40
+ """,
41
+ },
42
+ "execution_context": {
43
+ "trace_id": trace_id,
44
+ "parent_context": {
45
+ "type": "API_REQUEST",
46
+ "span_id": str(uuid4()),
47
+ "parent": None,
48
+ },
49
+ },
50
+ }
51
+
52
+ # WHEN we call the /workflow/version endpoint
53
+ flask_app = create_app()
54
+
55
+ with flask_app.test_client() as test_client:
56
+ response = test_client.post("/workflow/stream", json=body)
57
+
58
+ # THEN we get a 500 error
59
+ assert response.status_code == 500
60
+
61
+ # AND sentry captures the error with the correct data
62
+ assert mock_sentry_capture_envelope.call_count == 1
63
+ envelope = mock_sentry_capture_envelope.call_args[0][0]
64
+ event = envelope.get_event()
65
+ assert event["level"] == "error"
66
+ assert "Test exception" in event["exception"]["values"][0]["value"]
67
+
68
+ # AND the trace_id is tagged
69
+ assert event["tags"]["vellum_trace_id"] == trace_id