dasein-core 0.2.10__py3-none-any.whl → 0.2.12__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- dasein/api.py +145 -5
- {dasein_core-0.2.10.dist-info → dasein_core-0.2.12.dist-info}/METADATA +1 -1
- {dasein_core-0.2.10.dist-info → dasein_core-0.2.12.dist-info}/RECORD +6 -6
- {dasein_core-0.2.10.dist-info → dasein_core-0.2.12.dist-info}/WHEEL +0 -0
- {dasein_core-0.2.10.dist-info → dasein_core-0.2.12.dist-info}/licenses/LICENSE +0 -0
- {dasein_core-0.2.10.dist-info → dasein_core-0.2.12.dist-info}/top_level.txt +0 -0
dasein/api.py
CHANGED
@@ -881,6 +881,9 @@ class CognateProxy:
|
|
881
881
|
# Initialize KPI tracking
|
882
882
|
self._last_run_kpis = None
|
883
883
|
|
884
|
+
# Initialize wrapped LLM (will be set by _wrap_agent_llm if applicable)
|
885
|
+
self._wrapped_llm = None
|
886
|
+
|
884
887
|
# Wrap the agent's LLM with our trace capture wrapper
|
885
888
|
self._wrap_agent_llm()
|
886
889
|
|
@@ -1680,7 +1683,69 @@ Follow these rules when planning your actions."""
|
|
1680
1683
|
self._deadletter_fn = None
|
1681
1684
|
|
1682
1685
|
def _wrap_agent_llm(self):
|
1683
|
-
"""
|
1686
|
+
"""Wrap LLM instance (for SQL agent callbacks) AND monkey-patch (for Pipecleaner)."""
|
1687
|
+
try:
|
1688
|
+
# STEP 1: Wrap the main agent LLM with DaseinLLMWrapper (captures all _generate calls)
|
1689
|
+
# This is critical for SQL agents where callbacks don't propagate properly
|
1690
|
+
llm = self._find_llm_recursively(self._agent, max_depth=5)
|
1691
|
+
if llm:
|
1692
|
+
wrapped_llm = DaseinLLMWrapper(llm, self._callback_handler, verbose=self._verbose)
|
1693
|
+
# Replace the original LLM with our wrapped version
|
1694
|
+
self._replace_llm_in_structure(self._agent, llm, wrapped_llm, max_depth=5)
|
1695
|
+
self._wrapped_llm = wrapped_llm
|
1696
|
+
self._vprint(f"[DASEIN][WRAPPER] Successfully wrapped {type(llm).__name__} LLM")
|
1697
|
+
else:
|
1698
|
+
self._vprint(f"[DASEIN][WRAPPER] Could not find any LLM in agent structure")
|
1699
|
+
self._wrapped_llm = None
|
1700
|
+
|
1701
|
+
# STEP 2: Monkey-patch LLM classes for Pipecleaner deduplication
|
1702
|
+
# This is critical for research agents with Summary calls that need deduplication
|
1703
|
+
self._monkey_patch_llm_classes()
|
1704
|
+
|
1705
|
+
except Exception as e:
|
1706
|
+
self._vprint(f"[DASEIN][WRAPPER] Failed to wrap agent LLM: {e}")
|
1707
|
+
import traceback
|
1708
|
+
traceback.print_exc()
|
1709
|
+
self._wrapped_llm = None
|
1710
|
+
|
1711
|
+
def _replace_llm_in_structure(self, obj, original_llm, wrapped_llm, max_depth=5, path=""):
|
1712
|
+
"""Replace the original LLM with wrapped LLM in the structure."""
|
1713
|
+
if max_depth <= 0:
|
1714
|
+
return
|
1715
|
+
|
1716
|
+
# Special handling for RunnableSequence - check steps
|
1717
|
+
if hasattr(obj, 'steps') and hasattr(obj, '__iter__'):
|
1718
|
+
for i, step in enumerate(obj.steps):
|
1719
|
+
if step is original_llm:
|
1720
|
+
self._vprint(f"[DASEIN][WRAPPER] Replacing LLM at {path}.steps[{i}]")
|
1721
|
+
obj.steps[i] = wrapped_llm
|
1722
|
+
return
|
1723
|
+
# Check if step has bound attribute (RunnableBinding)
|
1724
|
+
if hasattr(step, 'bound') and step.bound is original_llm:
|
1725
|
+
self._vprint(f"[DASEIN][WRAPPER] Replacing LLM at {path}.steps[{i}].bound")
|
1726
|
+
step.bound = wrapped_llm
|
1727
|
+
return
|
1728
|
+
# Recursively search in the step
|
1729
|
+
self._replace_llm_in_structure(step, original_llm, wrapped_llm, max_depth - 1, f"{path}.steps[{i}]")
|
1730
|
+
|
1731
|
+
# Search in attributes
|
1732
|
+
for attr_name in dir(obj):
|
1733
|
+
if attr_name.startswith('_'):
|
1734
|
+
continue
|
1735
|
+
try:
|
1736
|
+
attr_value = getattr(obj, attr_name)
|
1737
|
+
if attr_value is original_llm:
|
1738
|
+
self._vprint(f"[DASEIN][WRAPPER] Replacing LLM at {path}.{attr_name}")
|
1739
|
+
setattr(obj, attr_name, wrapped_llm)
|
1740
|
+
return
|
1741
|
+
# Recursively search in the attribute
|
1742
|
+
if hasattr(attr_value, '__dict__') or hasattr(attr_value, '__iter__'):
|
1743
|
+
self._replace_llm_in_structure(attr_value, original_llm, wrapped_llm, max_depth - 1, f"{path}.{attr_name}")
|
1744
|
+
except:
|
1745
|
+
continue
|
1746
|
+
|
1747
|
+
def _monkey_patch_llm_classes(self):
|
1748
|
+
"""Monkey-patch ALL LLM classes found in agent + tools for Pipecleaner deduplication."""
|
1684
1749
|
try:
|
1685
1750
|
# Find ALL LLMs in agent structure + tools
|
1686
1751
|
print(f"[DASEIN][WRAPPER] Searching for ALL LLMs in agent+tools...")
|
@@ -1713,10 +1778,10 @@ Follow these rules when planning your actions."""
|
|
1713
1778
|
print(f"[DASEIN][WRAPPER] Patching {llm_class.__name__} (found in {location})...")
|
1714
1779
|
|
1715
1780
|
# Check what methods the LLM class has
|
1716
|
-
#
|
1781
|
+
# Patch both user-facing AND internal methods since SQL agents bypass invoke
|
1717
1782
|
print(f"[DASEIN][WRAPPER] Checking LLM methods...")
|
1718
1783
|
methods_to_patch = []
|
1719
|
-
for method in ['invoke', 'ainvoke']: #
|
1784
|
+
for method in ['invoke', 'ainvoke', '_generate', '_agenerate']: # Include internal methods for SQL agents
|
1720
1785
|
if hasattr(llm_class, method):
|
1721
1786
|
print(f"[DASEIN][WRAPPER] - Has {method}")
|
1722
1787
|
methods_to_patch.append(method)
|
@@ -1811,7 +1876,7 @@ Follow these rules when planning your actions."""
|
|
1811
1876
|
prompt_strings.append(msg.content)
|
1812
1877
|
elif isinstance(msg, str):
|
1813
1878
|
prompt_strings.append(msg)
|
1814
|
-
|
1879
|
+
else:
|
1815
1880
|
prompt_strings.append(str(msg))
|
1816
1881
|
|
1817
1882
|
# =============================================================
|
@@ -1929,6 +1994,7 @@ Follow these rules when planning your actions."""
|
|
1929
1994
|
break
|
1930
1995
|
|
1931
1996
|
if should_dedupe:
|
1997
|
+
try:
|
1932
1998
|
# Deduplicate each prompt
|
1933
1999
|
from .pipecleaner import get_or_create_corpus
|
1934
2000
|
import hashlib
|
@@ -1969,14 +2035,53 @@ Follow these rules when planning your actions."""
|
|
1969
2035
|
kwargs['messages'] = messages_to_dedupe
|
1970
2036
|
elif 'prompts' in kwargs:
|
1971
2037
|
kwargs['prompts'] = messages_to_dedupe
|
2038
|
+
except Exception as e:
|
2039
|
+
print(f"[🔥 HOTPATH] ⚠️ Deduplication error: {e}")
|
2040
|
+
import traceback
|
2041
|
+
traceback.print_exc()
|
1972
2042
|
except Exception as e:
|
1973
|
-
print(f"[🔥 HOTPATH] ⚠️
|
2043
|
+
print(f"[🔥 HOTPATH] ⚠️ Error in pipecleaner preprocessing: {e}")
|
1974
2044
|
import traceback
|
1975
2045
|
traceback.print_exc()
|
1976
2046
|
|
1977
2047
|
try:
|
2048
|
+
# CRITICAL FIX: Ensure callbacks propagate to _generate/_agenerate
|
2049
|
+
# AgentExecutor doesn't pass run_manager to these internal methods
|
2050
|
+
# So we need to manually inject the callback handler
|
2051
|
+
if meth_name in ['_generate', '_agenerate'] and callback_handler:
|
2052
|
+
if 'run_manager' not in kwargs and hasattr(callback_handler, 'on_llm_start'):
|
2053
|
+
# Manually trigger on_llm_start since no run_manager
|
2054
|
+
import uuid
|
2055
|
+
run_id = uuid.uuid4()
|
2056
|
+
# Extract messages for on_llm_start
|
2057
|
+
messages = args[0] if args else []
|
2058
|
+
prompts = []
|
2059
|
+
for msg in (messages if isinstance(messages, list) else [messages]):
|
2060
|
+
if hasattr(msg, 'content'):
|
2061
|
+
prompts.append(str(msg.content))
|
2062
|
+
else:
|
2063
|
+
prompts.append(str(msg))
|
2064
|
+
|
2065
|
+
# Call on_llm_start
|
2066
|
+
callback_handler.on_llm_start(
|
2067
|
+
serialized={'name': type(self_llm).__name__},
|
2068
|
+
prompts=prompts,
|
2069
|
+
run_id=run_id
|
2070
|
+
)
|
2071
|
+
|
2072
|
+
# Store run_id for on_llm_end
|
2073
|
+
if not hasattr(self_llm, '_dasein_pending_run_ids'):
|
2074
|
+
self_llm._dasein_pending_run_ids = []
|
2075
|
+
self_llm._dasein_pending_run_ids.append(run_id)
|
2076
|
+
|
1978
2077
|
result = await orig_method(self_llm, *args, **kwargs)
|
1979
2078
|
|
2079
|
+
# Call on_llm_end if we called on_llm_start
|
2080
|
+
if meth_name in ['_generate', '_agenerate'] and callback_handler:
|
2081
|
+
if hasattr(self_llm, '_dasein_pending_run_ids') and self_llm._dasein_pending_run_ids:
|
2082
|
+
run_id = self_llm._dasein_pending_run_ids.pop(0)
|
2083
|
+
callback_handler.on_llm_end(result, run_id=run_id)
|
2084
|
+
|
1980
2085
|
# 🚨 MICROTURN ENFORCEMENT - DISABLED
|
1981
2086
|
# Microturn can interfere with tool execution, so it's disabled
|
1982
2087
|
# TODO: Re-enable with proper gating if needed for specific use cases
|
@@ -2205,8 +2310,43 @@ Follow these rules when planning your actions."""
|
|
2205
2310
|
traceback.print_exc()
|
2206
2311
|
|
2207
2312
|
try:
|
2313
|
+
# CRITICAL FIX: Ensure callbacks propagate to _generate/_agenerate
|
2314
|
+
# AgentExecutor doesn't pass run_manager to these internal methods
|
2315
|
+
# So we need to manually inject the callback handler
|
2316
|
+
if meth_name in ['_generate', '_agenerate'] and callback_handler:
|
2317
|
+
if 'run_manager' not in kwargs and hasattr(callback_handler, 'on_llm_start'):
|
2318
|
+
# Manually trigger on_llm_start since no run_manager
|
2319
|
+
import uuid
|
2320
|
+
run_id = uuid.uuid4()
|
2321
|
+
# Extract messages for on_llm_start
|
2322
|
+
messages = args[0] if args else []
|
2323
|
+
prompts = []
|
2324
|
+
for msg in (messages if isinstance(messages, list) else [messages]):
|
2325
|
+
if hasattr(msg, 'content'):
|
2326
|
+
prompts.append(str(msg.content))
|
2327
|
+
else:
|
2328
|
+
prompts.append(str(msg))
|
2329
|
+
|
2330
|
+
# Call on_llm_start
|
2331
|
+
callback_handler.on_llm_start(
|
2332
|
+
serialized={'name': type(self_llm).__name__},
|
2333
|
+
prompts=prompts,
|
2334
|
+
run_id=run_id
|
2335
|
+
)
|
2336
|
+
|
2337
|
+
# Store run_id for on_llm_end
|
2338
|
+
if not hasattr(self_llm, '_dasein_pending_run_ids'):
|
2339
|
+
self_llm._dasein_pending_run_ids = []
|
2340
|
+
self_llm._dasein_pending_run_ids.append(run_id)
|
2341
|
+
|
2208
2342
|
result = orig_method(self_llm, *args, **kwargs)
|
2209
2343
|
|
2344
|
+
# Call on_llm_end if we called on_llm_start
|
2345
|
+
if meth_name in ['_generate', '_agenerate'] and callback_handler:
|
2346
|
+
if hasattr(self_llm, '_dasein_pending_run_ids') and self_llm._dasein_pending_run_ids:
|
2347
|
+
run_id = self_llm._dasein_pending_run_ids.pop(0)
|
2348
|
+
callback_handler.on_llm_end(result, run_id=run_id)
|
2349
|
+
|
2210
2350
|
# 🚨 MICROTURN ENFORCEMENT - DISABLED (can interfere with tool execution)
|
2211
2351
|
# TODO: Re-enable with proper gating if needed
|
2212
2352
|
|
@@ -1,6 +1,6 @@
|
|
1
1
|
dasein/__init__.py,sha256=RY0lhaaWB6yJ_5YMRmaHDvQ0eFbc0BGbYNe5OVyxzYE,2316
|
2
2
|
dasein/advice_format.py,sha256=5-h4J24L_B2Y9dlmyDuIYtmPCWOGAYoinBEXqpcNg2s,5386
|
3
|
-
dasein/api.py,sha256=
|
3
|
+
dasein/api.py,sha256=dDOtZVXnfZ3VbiY6N_1u2m3JtiSUwmKC9AO6gxevHxY,256483
|
4
4
|
dasein/capture.py,sha256=D4DvknI2wbmVup5WqvNcgw-zW5riEstYG81Rl98uz6o,110942
|
5
5
|
dasein/config.py,sha256=lXO8JG4RXbodn3gT5yEnuB0VRwWdrRVwhX3Rm06IZmU,1957
|
6
6
|
dasein/events.py,sha256=mG-lnOvQoZUhXbrPSjrG4RME6ywUcbSZ04PscoJ15GI,12896
|
@@ -45,7 +45,7 @@ dasein/services/post_run_client.py,sha256=UjK3eqf7oWGSuWkKe0vQmeMS0yUUOhYFD4-SZ7
|
|
45
45
|
dasein/services/pre_run_client.py,sha256=tXmz_PQaSfq0xwypiWUAqNkXOmREZ6EwXLC4OM89J-A,4317
|
46
46
|
dasein/services/service_adapter.py,sha256=YHk41lR3PXh8WTmxOzzwKf6hwPYGqIdApI92vQKlkAY,7350
|
47
47
|
dasein/services/service_config.py,sha256=8_4tpV4mZvfaOc5_yyHbOyL4rYsPHzkLTEY1rtYgLs8,1629
|
48
|
-
dasein_core-0.2.
|
48
|
+
dasein_core-0.2.12.dist-info/licenses/LICENSE,sha256=7FHjIFEKl_3hSc3tGUVEWmufC_3oi8rh_2zVuL7jMKs,1091
|
49
49
|
dasein/models/en_core_web_sm/en_core_web_sm-3.7.1.dist-info/LICENSE,sha256=OTPBdpebaLxtC8yQLH1sEw8dEn9Hbxe6XNuo2Zz9ABI,1056
|
50
50
|
dasein/models/en_core_web_sm/en_core_web_sm-3.7.1.dist-info/LICENSES_SOURCES,sha256=INnfrNIVESJR8VNB7dGkex-Yvzk6IS8Q8ZT_3H7pipA,2347
|
51
51
|
dasein/models/en_core_web_sm/en_core_web_sm-3.7.1.dist-info/METADATA,sha256=-vGqRxa_M2RwKtLjBhc4JlBQdJ3k7CwOnseT_ReYcic,2958
|
@@ -53,7 +53,7 @@ dasein/models/en_core_web_sm/en_core_web_sm-3.7.1.dist-info/RECORD,sha256=dDb6U7
|
|
53
53
|
dasein/models/en_core_web_sm/en_core_web_sm-3.7.1.dist-info/WHEEL,sha256=Xo9-1PvkuimrydujYJAjF7pCkriuXBpUPEjma1nZyJ0,92
|
54
54
|
dasein/models/en_core_web_sm/en_core_web_sm-3.7.1.dist-info/entry_points.txt,sha256=OkWs-KxPJtDdpvIFCVXzDC9ECtejhPxv7pP3Tgk2cNg,47
|
55
55
|
dasein/models/en_core_web_sm/en_core_web_sm-3.7.1.dist-info/top_level.txt,sha256=56OIuRbEuhr12HsM9XpCMnTtHRMgNC5Hje4Xeo8wF2c,15
|
56
|
-
dasein_core-0.2.
|
57
|
-
dasein_core-0.2.
|
58
|
-
dasein_core-0.2.
|
59
|
-
dasein_core-0.2.
|
56
|
+
dasein_core-0.2.12.dist-info/METADATA,sha256=43WrPvrzNviksaZxsnZNnLh0nPU7vBO8-rdI2J9R-JM,10297
|
57
|
+
dasein_core-0.2.12.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
58
|
+
dasein_core-0.2.12.dist-info/top_level.txt,sha256=6yYY9kltjvvPsg9K6KyMKRtzEr5qM7sHXN7VzmrDtp0,7
|
59
|
+
dasein_core-0.2.12.dist-info/RECORD,,
|
File without changes
|
File without changes
|
File without changes
|