dasein-core 0.2.12__tar.gz → 0.2.14__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (67) hide show
  1. {dasein_core-0.2.12/src/dasein_core.egg-info → dasein_core-0.2.14}/PKG-INFO +1 -1
  2. {dasein_core-0.2.12 → dasein_core-0.2.14}/pyproject.toml +1 -1
  3. {dasein_core-0.2.12 → dasein_core-0.2.14}/src/dasein/api.py +26 -73
  4. {dasein_core-0.2.12 → dasein_core-0.2.14/src/dasein_core.egg-info}/PKG-INFO +1 -1
  5. {dasein_core-0.2.12 → dasein_core-0.2.14}/LICENSE +0 -0
  6. {dasein_core-0.2.12 → dasein_core-0.2.14}/MANIFEST.in +0 -0
  7. {dasein_core-0.2.12 → dasein_core-0.2.14}/README.md +0 -0
  8. {dasein_core-0.2.12 → dasein_core-0.2.14}/examples/dasein_examples.ipynb +0 -0
  9. {dasein_core-0.2.12 → dasein_core-0.2.14}/setup.cfg +0 -0
  10. {dasein_core-0.2.12 → dasein_core-0.2.14}/setup.py +0 -0
  11. {dasein_core-0.2.12 → dasein_core-0.2.14}/src/dasein/__init__.py +0 -0
  12. {dasein_core-0.2.12 → dasein_core-0.2.14}/src/dasein/advice_format.py +0 -0
  13. {dasein_core-0.2.12 → dasein_core-0.2.14}/src/dasein/capture.py +0 -0
  14. {dasein_core-0.2.12 → dasein_core-0.2.14}/src/dasein/config.py +0 -0
  15. {dasein_core-0.2.12 → dasein_core-0.2.14}/src/dasein/events.py +0 -0
  16. {dasein_core-0.2.12 → dasein_core-0.2.14}/src/dasein/extractors.py +0 -0
  17. {dasein_core-0.2.12 → dasein_core-0.2.14}/src/dasein/injection_strategies.py +0 -0
  18. {dasein_core-0.2.12 → dasein_core-0.2.14}/src/dasein/injector.py +0 -0
  19. {dasein_core-0.2.12 → dasein_core-0.2.14}/src/dasein/microturn.py +0 -0
  20. {dasein_core-0.2.12 → dasein_core-0.2.14}/src/dasein/models/en_core_web_sm/en_core_web_sm/__init__.py +0 -0
  21. {dasein_core-0.2.12 → dasein_core-0.2.14}/src/dasein/models/en_core_web_sm/en_core_web_sm/en_core_web_sm-3.7.1/LICENSE +0 -0
  22. {dasein_core-0.2.12 → dasein_core-0.2.14}/src/dasein/models/en_core_web_sm/en_core_web_sm/en_core_web_sm-3.7.1/LICENSES_SOURCES +0 -0
  23. {dasein_core-0.2.12 → dasein_core-0.2.14}/src/dasein/models/en_core_web_sm/en_core_web_sm/en_core_web_sm-3.7.1/README.md +0 -0
  24. {dasein_core-0.2.12 → dasein_core-0.2.14}/src/dasein/models/en_core_web_sm/en_core_web_sm/en_core_web_sm-3.7.1/accuracy.json +0 -0
  25. {dasein_core-0.2.12 → dasein_core-0.2.14}/src/dasein/models/en_core_web_sm/en_core_web_sm/en_core_web_sm-3.7.1/attribute_ruler/patterns +0 -0
  26. {dasein_core-0.2.12 → dasein_core-0.2.14}/src/dasein/models/en_core_web_sm/en_core_web_sm/en_core_web_sm-3.7.1/config.cfg +0 -0
  27. {dasein_core-0.2.12 → dasein_core-0.2.14}/src/dasein/models/en_core_web_sm/en_core_web_sm/en_core_web_sm-3.7.1/lemmatizer/lookups/lookups.bin +0 -0
  28. {dasein_core-0.2.12 → dasein_core-0.2.14}/src/dasein/models/en_core_web_sm/en_core_web_sm/en_core_web_sm-3.7.1/meta.json +0 -0
  29. {dasein_core-0.2.12 → dasein_core-0.2.14}/src/dasein/models/en_core_web_sm/en_core_web_sm/en_core_web_sm-3.7.1/ner/cfg +0 -0
  30. {dasein_core-0.2.12 → dasein_core-0.2.14}/src/dasein/models/en_core_web_sm/en_core_web_sm/en_core_web_sm-3.7.1/ner/model +0 -0
  31. {dasein_core-0.2.12 → dasein_core-0.2.14}/src/dasein/models/en_core_web_sm/en_core_web_sm/en_core_web_sm-3.7.1/ner/moves +0 -0
  32. {dasein_core-0.2.12 → dasein_core-0.2.14}/src/dasein/models/en_core_web_sm/en_core_web_sm/en_core_web_sm-3.7.1/parser/cfg +0 -0
  33. {dasein_core-0.2.12 → dasein_core-0.2.14}/src/dasein/models/en_core_web_sm/en_core_web_sm/en_core_web_sm-3.7.1/parser/model +0 -0
  34. {dasein_core-0.2.12 → dasein_core-0.2.14}/src/dasein/models/en_core_web_sm/en_core_web_sm/en_core_web_sm-3.7.1/parser/moves +0 -0
  35. {dasein_core-0.2.12 → dasein_core-0.2.14}/src/dasein/models/en_core_web_sm/en_core_web_sm/en_core_web_sm-3.7.1/senter/cfg +0 -0
  36. {dasein_core-0.2.12 → dasein_core-0.2.14}/src/dasein/models/en_core_web_sm/en_core_web_sm/en_core_web_sm-3.7.1/senter/model +0 -0
  37. {dasein_core-0.2.12 → dasein_core-0.2.14}/src/dasein/models/en_core_web_sm/en_core_web_sm/en_core_web_sm-3.7.1/tagger/cfg +0 -0
  38. {dasein_core-0.2.12 → dasein_core-0.2.14}/src/dasein/models/en_core_web_sm/en_core_web_sm/en_core_web_sm-3.7.1/tagger/model +0 -0
  39. {dasein_core-0.2.12 → dasein_core-0.2.14}/src/dasein/models/en_core_web_sm/en_core_web_sm/en_core_web_sm-3.7.1/tok2vec/cfg +0 -0
  40. {dasein_core-0.2.12 → dasein_core-0.2.14}/src/dasein/models/en_core_web_sm/en_core_web_sm/en_core_web_sm-3.7.1/tok2vec/model +0 -0
  41. {dasein_core-0.2.12 → dasein_core-0.2.14}/src/dasein/models/en_core_web_sm/en_core_web_sm/en_core_web_sm-3.7.1/tokenizer +0 -0
  42. {dasein_core-0.2.12 → dasein_core-0.2.14}/src/dasein/models/en_core_web_sm/en_core_web_sm/en_core_web_sm-3.7.1/vocab/key2row +0 -0
  43. {dasein_core-0.2.12 → dasein_core-0.2.14}/src/dasein/models/en_core_web_sm/en_core_web_sm/en_core_web_sm-3.7.1/vocab/lookups.bin +0 -0
  44. {dasein_core-0.2.12 → dasein_core-0.2.14}/src/dasein/models/en_core_web_sm/en_core_web_sm/en_core_web_sm-3.7.1/vocab/strings.json +0 -0
  45. {dasein_core-0.2.12 → dasein_core-0.2.14}/src/dasein/models/en_core_web_sm/en_core_web_sm/en_core_web_sm-3.7.1/vocab/vectors +0 -0
  46. {dasein_core-0.2.12 → dasein_core-0.2.14}/src/dasein/models/en_core_web_sm/en_core_web_sm/en_core_web_sm-3.7.1/vocab/vectors.cfg +0 -0
  47. {dasein_core-0.2.12 → dasein_core-0.2.14}/src/dasein/models/en_core_web_sm/en_core_web_sm/meta.json +0 -0
  48. {dasein_core-0.2.12 → dasein_core-0.2.14}/src/dasein/models/en_core_web_sm/en_core_web_sm-3.7.1.dist-info/LICENSE +0 -0
  49. {dasein_core-0.2.12 → dasein_core-0.2.14}/src/dasein/models/en_core_web_sm/en_core_web_sm-3.7.1.dist-info/LICENSES_SOURCES +0 -0
  50. {dasein_core-0.2.12 → dasein_core-0.2.14}/src/dasein/models/en_core_web_sm/en_core_web_sm-3.7.1.dist-info/METADATA +0 -0
  51. {dasein_core-0.2.12 → dasein_core-0.2.14}/src/dasein/models/en_core_web_sm/en_core_web_sm-3.7.1.dist-info/RECORD +0 -0
  52. {dasein_core-0.2.12 → dasein_core-0.2.14}/src/dasein/models/en_core_web_sm/en_core_web_sm-3.7.1.dist-info/WHEEL +0 -0
  53. {dasein_core-0.2.12 → dasein_core-0.2.14}/src/dasein/models/en_core_web_sm/en_core_web_sm-3.7.1.dist-info/entry_points.txt +0 -0
  54. {dasein_core-0.2.12 → dasein_core-0.2.14}/src/dasein/models/en_core_web_sm/en_core_web_sm-3.7.1.dist-info/top_level.txt +0 -0
  55. {dasein_core-0.2.12 → dasein_core-0.2.14}/src/dasein/pipecleaner.py +0 -0
  56. {dasein_core-0.2.12 → dasein_core-0.2.14}/src/dasein/services/__init__.py +0 -0
  57. {dasein_core-0.2.12 → dasein_core-0.2.14}/src/dasein/services/post_run_client.py +0 -0
  58. {dasein_core-0.2.12 → dasein_core-0.2.14}/src/dasein/services/pre_run_client.py +0 -0
  59. {dasein_core-0.2.12 → dasein_core-0.2.14}/src/dasein/services/service_adapter.py +0 -0
  60. {dasein_core-0.2.12 → dasein_core-0.2.14}/src/dasein/services/service_config.py +0 -0
  61. {dasein_core-0.2.12 → dasein_core-0.2.14}/src/dasein/trace_buffer.py +0 -0
  62. {dasein_core-0.2.12 → dasein_core-0.2.14}/src/dasein/types.py +0 -0
  63. {dasein_core-0.2.12 → dasein_core-0.2.14}/src/dasein/wrappers.py +0 -0
  64. {dasein_core-0.2.12 → dasein_core-0.2.14}/src/dasein_core.egg-info/SOURCES.txt +0 -0
  65. {dasein_core-0.2.12 → dasein_core-0.2.14}/src/dasein_core.egg-info/dependency_links.txt +0 -0
  66. {dasein_core-0.2.12 → dasein_core-0.2.14}/src/dasein_core.egg-info/requires.txt +0 -0
  67. {dasein_core-0.2.12 → dasein_core-0.2.14}/src/dasein_core.egg-info/top_level.txt +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: dasein-core
3
- Version: 0.2.12
3
+ Version: 0.2.14
4
4
  Summary: Universal memory for agentic AI. Attach a brain to any LangChain/LangGraph agent in a single line.
5
5
  Author-email: Dasein Team <support@dasein.ai>
6
6
  License: MIT
@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
4
4
 
5
5
  [project]
6
6
  name = "dasein-core"
7
- version = "0.2.12"
7
+ version = "0.2.14"
8
8
  description = "Universal memory for agentic AI. Attach a brain to any LangChain/LangGraph agent in a single line."
9
9
  readme = "README.md"
10
10
  requires-python = ">=3.8"
@@ -887,6 +887,11 @@ class CognateProxy:
887
887
  # Wrap the agent's LLM with our trace capture wrapper
888
888
  self._wrap_agent_llm()
889
889
 
890
+ # CRITICAL: Update langgraph_params to use wrapped LLM for recreation
891
+ if self._is_langgraph and self._langgraph_params and self._wrapped_llm:
892
+ print(f" [DASEIN][WRAPPER] Updating langgraph_params to use wrapped LLM")
893
+ self._langgraph_params['model'] = self._wrapped_llm
894
+
890
895
  # Inject universal dead-letter tool
891
896
  self._inject_deadletter_tool()
892
897
 
@@ -1754,14 +1759,24 @@ Follow these rules when planning your actions."""
1754
1759
  # 1. Search in agent
1755
1760
  agent_llm = self._find_llm_recursively(self._agent, max_depth=5)
1756
1761
  if agent_llm:
1757
- all_llms.append(('agent', agent_llm))
1762
+ # CRITICAL: If this is a DaseinLLMWrapper, we need to patch the INNER LLM for pipecleaner!
1763
+ # But we skip patching the wrapper itself to avoid double callbacks
1764
+ if isinstance(agent_llm, DaseinLLMWrapper) and hasattr(agent_llm, '_llm'):
1765
+ print(f"[DASEIN][WRAPPER] Found DaseinLLMWrapper, patching inner LLM: {type(agent_llm._llm).__name__}")
1766
+ all_llms.append(('agent_inner', agent_llm._llm))
1767
+ else:
1768
+ all_llms.append(('agent', agent_llm))
1758
1769
 
1759
1770
  # 2. Search in tools (where Summary LLM lives!)
1760
1771
  if hasattr(self._agent, 'tools'):
1761
1772
  for i, tool in enumerate(self._agent.tools or []):
1762
1773
  tool_llm = self._find_llm_recursively(tool, max_depth=3, path=f"tools[{i}]")
1763
1774
  if tool_llm:
1764
- all_llms.append((f'tool_{i}_{getattr(tool, "name", "unknown")}', tool_llm))
1775
+ # Skip if it's a DaseinLLMWrapper (already handles callbacks)
1776
+ if isinstance(tool_llm, DaseinLLMWrapper):
1777
+ print(f"[DASEIN][WRAPPER] Found DaseinLLMWrapper in tool - skipping")
1778
+ else:
1779
+ all_llms.append((f'tool_{i}_{getattr(tool, "name", "unknown")}', tool_llm))
1765
1780
 
1766
1781
  print(f"[DASEIN][WRAPPER] Found {len(all_llms)} LLM(s)")
1767
1782
  for location, llm in all_llms:
@@ -2045,43 +2060,10 @@ Follow these rules when planning your actions."""
2045
2060
  traceback.print_exc()
2046
2061
 
2047
2062
  try:
2048
- # CRITICAL FIX: Ensure callbacks propagate to _generate/_agenerate
2049
- # AgentExecutor doesn't pass run_manager to these internal methods
2050
- # So we need to manually inject the callback handler
2051
- if meth_name in ['_generate', '_agenerate'] and callback_handler:
2052
- if 'run_manager' not in kwargs and hasattr(callback_handler, 'on_llm_start'):
2053
- # Manually trigger on_llm_start since no run_manager
2054
- import uuid
2055
- run_id = uuid.uuid4()
2056
- # Extract messages for on_llm_start
2057
- messages = args[0] if args else []
2058
- prompts = []
2059
- for msg in (messages if isinstance(messages, list) else [messages]):
2060
- if hasattr(msg, 'content'):
2061
- prompts.append(str(msg.content))
2062
- else:
2063
- prompts.append(str(msg))
2064
-
2065
- # Call on_llm_start
2066
- callback_handler.on_llm_start(
2067
- serialized={'name': type(self_llm).__name__},
2068
- prompts=prompts,
2069
- run_id=run_id
2070
- )
2071
-
2072
- # Store run_id for on_llm_end
2073
- if not hasattr(self_llm, '_dasein_pending_run_ids'):
2074
- self_llm._dasein_pending_run_ids = []
2075
- self_llm._dasein_pending_run_ids.append(run_id)
2076
-
2063
+ # NOTE: Manual callback injection DISABLED - DaseinLLMWrapper handles callbacks
2064
+ # The patched methods ONLY do pipecleaner deduplication, not callbacks
2077
2065
  result = await orig_method(self_llm, *args, **kwargs)
2078
2066
 
2079
- # Call on_llm_end if we called on_llm_start
2080
- if meth_name in ['_generate', '_agenerate'] and callback_handler:
2081
- if hasattr(self_llm, '_dasein_pending_run_ids') and self_llm._dasein_pending_run_ids:
2082
- run_id = self_llm._dasein_pending_run_ids.pop(0)
2083
- callback_handler.on_llm_end(result, run_id=run_id)
2084
-
2085
2067
  # 🚨 MICROTURN ENFORCEMENT - DISABLED
2086
2068
  # Microturn can interfere with tool execution, so it's disabled
2087
2069
  # TODO: Re-enable with proper gating if needed for specific use cases
@@ -2310,43 +2292,10 @@ Follow these rules when planning your actions."""
2310
2292
  traceback.print_exc()
2311
2293
 
2312
2294
  try:
2313
- # CRITICAL FIX: Ensure callbacks propagate to _generate/_agenerate
2314
- # AgentExecutor doesn't pass run_manager to these internal methods
2315
- # So we need to manually inject the callback handler
2316
- if meth_name in ['_generate', '_agenerate'] and callback_handler:
2317
- if 'run_manager' not in kwargs and hasattr(callback_handler, 'on_llm_start'):
2318
- # Manually trigger on_llm_start since no run_manager
2319
- import uuid
2320
- run_id = uuid.uuid4()
2321
- # Extract messages for on_llm_start
2322
- messages = args[0] if args else []
2323
- prompts = []
2324
- for msg in (messages if isinstance(messages, list) else [messages]):
2325
- if hasattr(msg, 'content'):
2326
- prompts.append(str(msg.content))
2327
- else:
2328
- prompts.append(str(msg))
2329
-
2330
- # Call on_llm_start
2331
- callback_handler.on_llm_start(
2332
- serialized={'name': type(self_llm).__name__},
2333
- prompts=prompts,
2334
- run_id=run_id
2335
- )
2336
-
2337
- # Store run_id for on_llm_end
2338
- if not hasattr(self_llm, '_dasein_pending_run_ids'):
2339
- self_llm._dasein_pending_run_ids = []
2340
- self_llm._dasein_pending_run_ids.append(run_id)
2341
-
2295
+ # NOTE: Manual callback injection DISABLED - DaseinLLMWrapper handles callbacks
2296
+ # The patched methods ONLY do pipecleaner deduplication, not callbacks
2342
2297
  result = orig_method(self_llm, *args, **kwargs)
2343
2298
 
2344
- # Call on_llm_end if we called on_llm_start
2345
- if meth_name in ['_generate', '_agenerate'] and callback_handler:
2346
- if hasattr(self_llm, '_dasein_pending_run_ids') and self_llm._dasein_pending_run_ids:
2347
- run_id = self_llm._dasein_pending_run_ids.pop(0)
2348
- callback_handler.on_llm_end(result, run_id=run_id)
2349
-
2350
2299
  # 🚨 MICROTURN ENFORCEMENT - DISABLED (can interfere with tool execution)
2351
2300
  # TODO: Re-enable with proper gating if needed
2352
2301
 
@@ -3841,7 +3790,11 @@ Follow these rules when planning your actions."""
3841
3790
  print(f"[DASEIN] 🔧 Pre-loading embedding model for pipecleaner (found filter search rules)...")
3842
3791
  from .pipecleaner import _get_embedding_model
3843
3792
  try:
3844
- _get_embedding_model() # Warm up the model
3793
+ # Suppress protobuf warnings from sentence-transformers
3794
+ import warnings
3795
+ with warnings.catch_warnings():
3796
+ warnings.filterwarnings('ignore', category=Warning)
3797
+ _get_embedding_model() # Warm up the model
3845
3798
  print(f"[DASEIN] ✅ Embedding model pre-loaded successfully")
3846
3799
  except Exception as e:
3847
3800
  print(f"[DASEIN] ⚠️ Failed to pre-load embedding model: {e}")
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: dasein-core
3
- Version: 0.2.12
3
+ Version: 0.2.14
4
4
  Summary: Universal memory for agentic AI. Attach a brain to any LangChain/LangGraph agent in a single line.
5
5
  Author-email: Dasein Team <support@dasein.ai>
6
6
  License: MIT
File without changes
File without changes
File without changes
File without changes
File without changes