projectdavid 1.33.11__py3-none-any.whl → 1.33.13__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of projectdavid might be problematic. Click here for more details.

@@ -11,10 +11,15 @@ LOG = UtilsInterface.LoggingUtility()
11
11
 
12
12
 
13
13
  class SynchronousInferenceStream:
14
+ # ------------------------------------------------------------ #
15
+ # GLOBAL EVENT LOOP (single hidden thread for sync wrapper)
16
+ # ------------------------------------------------------------ #
14
17
  _GLOBAL_LOOP = asyncio.new_event_loop()
15
18
  asyncio.set_event_loop(_GLOBAL_LOOP)
16
19
 
17
- # ────────────────────────────────────────────────────────────
20
+ # ------------------------------------------------------------ #
21
+ # Init / setup
22
+ # ------------------------------------------------------------ #
18
23
  def __init__(self, inference) -> None:
19
24
  self.inference_client = inference
20
25
  self.user_id: Optional[str] = None
@@ -24,7 +29,6 @@ class SynchronousInferenceStream:
24
29
  self.run_id: Optional[str] = None
25
30
  self.api_key: Optional[str] = None
26
31
 
27
- # ────────────────────────────────────────────────────────────
28
32
  def setup(
29
33
  self,
30
34
  user_id: str,
@@ -34,6 +38,7 @@ class SynchronousInferenceStream:
34
38
  run_id: str,
35
39
  api_key: str,
36
40
  ) -> None:
41
+ """Populate IDs once, so callers only provide provider/model."""
37
42
  self.user_id = user_id
38
43
  self.thread_id = thread_id
39
44
  self.assistant_id = assistant_id
@@ -41,8 +46,10 @@ class SynchronousInferenceStream:
41
46
  self.run_id = run_id
42
47
  self.api_key = api_key
43
48
 
44
- # ────────────────────────────────────────────────────────────
45
- def stream_chunks(
49
+ # ------------------------------------------------------------ #
50
+ # Core sync-to-async streaming wrapper
51
+ # ------------------------------------------------------------ #
52
+ def stream_chunks( # noqa: PLR0915
46
53
  self,
47
54
  provider: str,
48
55
  model: str,
@@ -51,9 +58,15 @@ class SynchronousInferenceStream:
51
58
  timeout_per_chunk: float = 280.0,
52
59
  suppress_fc: bool = True,
53
60
  ) -> Generator[dict, None, None]:
61
+ """
62
+ Sync generator that mirrors async `inference_client.stream_inference_response`
63
+ but (optionally) removes raw <fc> … </fc> output *and* JSON
64
+ `{"type": "function_call" …}` objects from the stream.
65
+ """
54
66
 
55
67
  resolved_api_key = api_key or self.api_key
56
68
 
69
+ # ---------- async inner generator -------------------------------- #
57
70
  async def _stream_chunks_async():
58
71
  async for chk in self.inference_client.stream_inference_response(
59
72
  provider=provider,
@@ -68,46 +81,58 @@ class SynchronousInferenceStream:
68
81
 
69
82
  agen = _stream_chunks_async().__aiter__()
70
83
 
71
- # ── build the <fc> suppressor chain ─────────────────────────
84
+ # ---------- FC-suppressor plumbing -------------------------------- #
72
85
  if suppress_fc:
73
86
  _suppressor = FunctionCallSuppressor()
74
87
  _peek_gate = PeekGate(_suppressor)
75
88
 
76
- def _filter_text(txt: str) -> str: # noqa: D401
89
+ def _filter_text(txt: str) -> str:
77
90
  return _peek_gate.feed(txt)
78
91
 
92
+ LOG.debug("[SyncStream] Function-call suppression ACTIVE")
79
93
  else:
80
94
 
81
- def _filter_text(txt: str) -> str: # noqa: D401
95
+ def _filter_text(txt: str) -> str:
82
96
  return txt
83
97
 
84
- # helper drain **all** residual bytes from the chain
98
+ LOG.debug("[SyncStream] Function-call suppression DISABLED")
99
+
100
+ # ---------- helper to flush residual buffered text ---------------- #
85
101
  def _drain_filters() -> Optional[dict]:
86
102
  if not suppress_fc:
87
103
  return None
88
104
  parts: list[str] = []
89
105
  while True:
90
- out = _filter_text("") # empty feed ⇒ “give me whatever’s left”
106
+ out = _filter_text("")
91
107
  if not out:
92
108
  break
93
109
  parts.append(out)
110
+ if not _peek_gate.suppressing and _peek_gate.buf:
111
+ parts.append(_peek_gate.buf)
112
+ _peek_gate.buf = ""
94
113
  if parts:
95
- return {"type": "content", "content": "".join(parts)}
114
+ return {
115
+ "type": "content",
116
+ "content": "".join(parts),
117
+ "run_id": self.run_id,
118
+ }
96
119
  return None
97
120
 
98
- # ── main loop ─────────────────────────────────────────────
121
+ # ---------- main sync loop ---------------------------------------- #
99
122
  while True:
100
123
  try:
101
124
  chunk = self._GLOBAL_LOOP.run_until_complete(
102
125
  asyncio.wait_for(agen.__anext__(), timeout=timeout_per_chunk)
103
126
  )
104
127
 
105
- # drop provider-labelled function_call objects
106
- if suppress_fc and chunk.get("type") == "function_call":
107
- LOG.debug("[SUPPRESS] provider function_call dropped")
128
+ # Always attach run_id for front-end helpers
129
+ chunk["run_id"] = self.run_id
130
+
131
+ # ----- bypass filters for status / code-exec related -------- #
132
+ if chunk.get("type") == "status":
133
+ yield chunk
108
134
  continue
109
135
 
110
- # ② hot-code & file-preview payloads always pass
111
136
  if chunk.get("type") in ("hot_code", "hot_code_output"):
112
137
  yield chunk
113
138
  continue
@@ -119,35 +144,46 @@ class SynchronousInferenceStream:
119
144
  yield chunk
120
145
  continue
121
146
 
122
- # ordinary TEXT content run through the <fc> filter
147
+ # ----- NEW: swallow raw JSON function_call objects ---------- #
148
+ if suppress_fc and chunk.get("type") == "function_call":
149
+ LOG.debug(
150
+ "[SyncStream] Swallowing JSON function_call chunk: %s",
151
+ chunk.get("name") or "<unnamed>",
152
+ )
153
+ continue
154
+
155
+ # ----- text-level suppression ------------------------------- #
123
156
  if isinstance(chunk.get("content"), str):
124
157
  chunk["content"] = _filter_text(chunk["content"])
125
158
  if chunk["content"] == "":
126
- continue # fully suppressed / still buffering
159
+ # Entire segment was inside <fc> … </fc>
160
+ continue
127
161
 
128
- # ④ everything else streams unchanged
129
162
  yield chunk
130
163
 
131
- # ─────────── graceful endings ───────────
132
164
  except StopAsyncIteration:
133
165
  if tail := _drain_filters():
134
166
  yield tail
135
- LOG.info("Stream completed normally.")
167
+ LOG.info("[SyncStream] Stream completed normally.")
136
168
  break
137
169
 
138
170
  except asyncio.TimeoutError:
139
171
  if tail := _drain_filters():
140
172
  yield tail
141
- LOG.error("[TimeoutError] Chunk wait expired aborting stream.")
173
+ LOG.error("[SyncStream] Timeout waiting for next chunk.")
142
174
  break
143
175
 
144
176
  except Exception as exc: # noqa: BLE001
145
177
  if tail := _drain_filters():
146
178
  yield tail
147
- LOG.error("Unexpected streaming error: %s", exc, exc_info=True)
179
+ LOG.error(
180
+ "[SyncStream] Unexpected streaming error: %s", exc, exc_info=True
181
+ )
148
182
  break
149
183
 
150
- # ────────────────────────────────────────────────────────────
184
+ # ------------------------------------------------------------ #
185
+ # House-keeping
186
+ # ------------------------------------------------------------ #
151
187
  @classmethod
152
188
  def shutdown_loop(cls) -> None:
153
189
  if cls._GLOBAL_LOOP and not cls._GLOBAL_LOOP.is_closed():
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: projectdavid
3
- Version: 1.33.11
3
+ Version: 1.33.13
4
4
  Summary: Python SDK for interacting with the Entities Assistant API.
5
5
  Author-email: Francis Neequaye Armah <francis.neequaye@projectdavid.co.uk>
6
6
  License: PolyForm Noncommercial License 1.0.0
@@ -16,7 +16,7 @@ projectdavid/clients/files_client.py,sha256=XkIDzbQFGDrd88taf0Kouc_4YJOPIYEHiIyW
16
16
  projectdavid/clients/inference_client.py,sha256=xz4ACPv5Tkis604QxO5mJX1inH_TGDfQP-31geETYpE,6609
17
17
  projectdavid/clients/messages_client.py,sha256=467xeIt3VYs6cG8-bl-eDRi_auWOPmfd5tSJDmQSJUI,17232
18
18
  projectdavid/clients/runs.py,sha256=-fXOq5L9w2efDPmZkNxb0s2yjl6oN0XN4_aLXqaeceo,25270
19
- projectdavid/clients/synchronous_inference_wrapper.py,sha256=smbKBkwJZHX2LCqb0pO9RS7N7GFmruzCfmxrQsmJv_o,6204
19
+ projectdavid/clients/synchronous_inference_wrapper.py,sha256=qh94rtNlLqgIxiA_ZbQ1ncOwQTi9aBj5os3sMExLh4E,7070
20
20
  projectdavid/clients/threads_client.py,sha256=ekzU5w14zftmtmFkiec3NC90Of-_KVSUY1qH9cmfSFg,6771
21
21
  projectdavid/clients/tools_client.py,sha256=GkCVOmwpAoPqVt6aYmH0G1HIFha3iEwR9IIf9teR0j8,11487
22
22
  projectdavid/clients/users_client.py,sha256=eCuUb9qvyH1GUFhZu6TRL9zdoK-qzHSs8-Vmrk_0mmg,13729
@@ -35,8 +35,8 @@ projectdavid/utils/monitor_launcher.py,sha256=3YAgJdeuaUvq3JGvpA4ymqFsAnk29nH5q9
35
35
  projectdavid/utils/peek_gate.py,sha256=5whMRnDOQjATRpThWDJkvY9ScXuJ7Sd_-9rvGgXeTAQ,2532
36
36
  projectdavid/utils/run_monitor.py,sha256=F_WkqIP-qnWH-4llIbileWWLfRj2Q1Cg-ni23SR1rec,3786
37
37
  projectdavid/utils/vector_search_formatter.py,sha256=YTe3HPGec26qGY7uxY8_GS8lc4QaN6aNXMzkl29nZpI,1735
38
- projectdavid-1.33.11.dist-info/licenses/LICENSE,sha256=_8yjiEGttpS284BkfhXxfERqTRZW_tUaHiBB0GTJTMg,4563
39
- projectdavid-1.33.11.dist-info/METADATA,sha256=aqETkoAw4NxCKZg56GnWf4QNVMdl4-mCZb0f78ej4EQ,11555
40
- projectdavid-1.33.11.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
41
- projectdavid-1.33.11.dist-info/top_level.txt,sha256=kil8GU4s7qYRfNnzGnFHhZnSNRSxgNG-J4HLgQMmMtw,13
42
- projectdavid-1.33.11.dist-info/RECORD,,
38
+ projectdavid-1.33.13.dist-info/licenses/LICENSE,sha256=_8yjiEGttpS284BkfhXxfERqTRZW_tUaHiBB0GTJTMg,4563
39
+ projectdavid-1.33.13.dist-info/METADATA,sha256=wFCKMGJBgK8yku6jOoE2IPsdB9-kfIGL1kBYF89yfNM,11555
40
+ projectdavid-1.33.13.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
41
+ projectdavid-1.33.13.dist-info/top_level.txt,sha256=kil8GU4s7qYRfNnzGnFHhZnSNRSxgNG-J4HLgQMmMtw,13
42
+ projectdavid-1.33.13.dist-info/RECORD,,