spaik-sdk 0.6.2__py3-none-any.whl → 0.6.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,4 +1,5 @@
1
1
  import asyncio
2
+ import uuid
2
3
  from abc import ABC
3
4
  from typing import Any, AsyncGenerator, Dict, List, Optional, Type, TypeVar
4
5
 
@@ -57,9 +58,16 @@ class BaseAgent(ABC):
57
58
  cost_provider: Optional[CostProvider] = None,
58
59
  ):
59
60
  logger.debug("Initializing BaseAgent")
61
+ # Generate unique instance ID for trace correlation
62
+ self.agent_instance_id: str = str(uuid.uuid4())
60
63
  self.prompt_loader = prompt_loader or get_prompt_loader(prompt_loader_mode)
61
64
  self.system_prompt = system_prompt or self._get_system_prompt(system_prompt_args, system_prompt_version)
62
- self.trace = trace or AgentTrace(self.system_prompt, self.__class__.__name__, trace_sink=trace_sink)
65
+ self.trace = trace or AgentTrace(
66
+ self.system_prompt,
67
+ self.__class__.__name__,
68
+ trace_sink=trace_sink,
69
+ agent_instance_id=self.agent_instance_id,
70
+ )
63
71
  self.thread_container = thread_container or ThreadContainer(self.system_prompt)
64
72
  self.tools = tools or self._create_tools(tool_providers)
65
73
  self.llm_config = llm_config or self.create_llm_config(llm_model, reasoning)
spaik_sdk/config/env.py CHANGED
@@ -1,5 +1,6 @@
1
1
  import os
2
2
  from typing import Dict
3
+ from typing import Optional as OptionalType
3
4
 
4
5
  from spaik_sdk.models.llm_model import LLMModel
5
6
  from spaik_sdk.models.model_registry import ModelRegistry
@@ -46,8 +47,16 @@ class EnvConfig:
46
47
  def get_prompt_loader_mode(self) -> PromptLoaderMode:
47
48
  return PromptLoaderMode.from_name(self.get_key("PROMPT_LOADER_MODE", "local"))
48
49
 
49
- def get_trace_sink_mode(self) -> TraceSinkMode:
50
- return TraceSinkMode.from_name(self.get_key("TRACE_SINK_MODE", "local"))
50
+ def get_trace_sink_mode(self) -> OptionalType[TraceSinkMode]:
51
+ """Get the trace sink mode from environment variable.
52
+
53
+ Returns:
54
+ TraceSinkMode.LOCAL if TRACE_SINK_MODE=local,
55
+ TraceSinkMode.NOOP if TRACE_SINK_MODE=noop,
56
+ None if TRACE_SINK_MODE is not set or empty (allows global/default behavior).
57
+ """
58
+ mode_str = self.get_key("TRACE_SINK_MODE", "", required=False)
59
+ return TraceSinkMode.from_name(mode_str)
51
60
 
52
61
  def get_credentials_provider_type(self) -> str:
53
62
  return self.get_key("CREDENTIALS_PROVIDER_TYPE", "env")
@@ -22,6 +22,11 @@ class GoogleModelFactory(BaseModelFactory):
22
22
  if config.reasoning:
23
23
  model_config["thinking_budget"] = config.reasoning_budget_tokens
24
24
  model_config["include_thoughts"] = True
25
+ else:
26
+ # Gemini models have thinking enabled by default, so we must explicitly
27
+ # set thinking_budget=0 to disable it (omitting the parameter doesn't work)
28
+ model_config["thinking_budget"] = 0
29
+ model_config["include_thoughts"] = False
25
30
 
26
31
  # Handle streaming - Google models use disable_streaming instead of streaming
27
32
  if not config.streaming:
@@ -36,15 +36,21 @@ class OpenAIModelFactory(BaseModelFactory):
36
36
  if config.tool_usage:
37
37
  model_config["model_kwargs"] = {"parallel_tool_calls": True}
38
38
 
39
- # Add model-specific configurations for reasoning models
39
+ # Handle reasoning configuration based on user preference (config.reasoning)
40
+ # and model capability (config.model.reasoning)
40
41
  if config.model.reasoning:
41
- # Enable Responses API for reasoning models
42
+ # Model supports reasoning - check user preference
42
43
  model_config["use_responses_api"] = True
43
44
 
44
- # Configure reasoning through model_kwargs as per LangChain docs
45
- if config.reasoning_summary:
46
- model_config["model_kwargs"] = {"reasoning": {"effort": config.reasoning_effort, "summary": config.reasoning_summary}}
45
+ if config.reasoning:
46
+ # User wants reasoning enabled - use configured effort
47
+ if config.reasoning_summary:
48
+ model_config["model_kwargs"] = {"reasoning": {"effort": config.reasoning_effort, "summary": config.reasoning_summary}}
49
+ else:
50
+ # User wants reasoning disabled - use model's minimum effort level
51
+ model_config["model_kwargs"] = {"reasoning": {"effort": config.model.reasoning_min_effort}}
47
52
  else:
53
+ # Model doesn't support reasoning
48
54
  model_config["temperature"] = config.temperature
49
55
 
50
56
  return model_config
@@ -8,6 +8,7 @@ class LLMModel:
8
8
  family: str
9
9
  name: str
10
10
  reasoning: bool = True
11
+ reasoning_min_effort: str = "none" # Minimum reasoning effort: 'none' or 'minimal'
11
12
  prompt_caching: bool = False
12
13
 
13
14
  def __str__(self) -> str:
@@ -25,9 +25,9 @@ class ModelRegistry:
25
25
  GPT_4O = LLMModel(family=LLMFamilies.OPENAI, name="gpt-4o", reasoning=False, prompt_caching=True)
26
26
  O4_MINI = LLMModel(family=LLMFamilies.OPENAI, name="o4-mini")
27
27
  O4_MINI_APRIL_2025 = LLMModel(family=LLMFamilies.OPENAI, name="o4-mini-2025-04-16")
28
- GPT_5 = LLMModel(family=LLMFamilies.OPENAI, name="gpt-5", reasoning=True, prompt_caching=True)
29
- GPT_5_MINI = LLMModel(family=LLMFamilies.OPENAI, name="gpt-5-mini", reasoning=True, prompt_caching=True)
30
- GPT_5_NANO = LLMModel(family=LLMFamilies.OPENAI, name="gpt-5-nano", reasoning=True, prompt_caching=True)
28
+ GPT_5 = LLMModel(family=LLMFamilies.OPENAI, name="gpt-5", reasoning=True, reasoning_min_effort="minimal", prompt_caching=True)
29
+ GPT_5_MINI = LLMModel(family=LLMFamilies.OPENAI, name="gpt-5-mini", reasoning=True, reasoning_min_effort="minimal", prompt_caching=True)
30
+ GPT_5_NANO = LLMModel(family=LLMFamilies.OPENAI, name="gpt-5-nano", reasoning=True, reasoning_min_effort="minimal", prompt_caching=True)
31
31
  GPT_5_1 = LLMModel(family=LLMFamilies.OPENAI, name="gpt-5.1", reasoning=True, prompt_caching=True)
32
32
  GPT_5_1_CODEX = LLMModel(family=LLMFamilies.OPENAI, name="gpt-5.1-codex", reasoning=True, prompt_caching=True)
33
33
  GPT_5_1_CODEX_MINI = LLMModel(family=LLMFamilies.OPENAI, name="gpt-5.1-codex-mini", reasoning=True, prompt_caching=True)
@@ -1,6 +1,7 @@
1
1
  from spaik_sdk.tracing.agent_trace import AgentTrace
2
- from spaik_sdk.tracing.get_trace_sink import get_trace_sink
2
+ from spaik_sdk.tracing.get_trace_sink import configure_tracing, get_trace_sink
3
3
  from spaik_sdk.tracing.local_trace_sink import LocalTraceSink
4
+ from spaik_sdk.tracing.noop_trace_sink import NoOpTraceSink
4
5
  from spaik_sdk.tracing.trace_sink import TraceSink
5
6
  from spaik_sdk.tracing.trace_sink_mode import TraceSinkMode
6
7
 
@@ -8,6 +9,8 @@ __all__ = [
8
9
  "AgentTrace",
9
10
  "TraceSink",
10
11
  "LocalTraceSink",
12
+ "NoOpTraceSink",
11
13
  "TraceSinkMode",
14
+ "configure_tracing",
12
15
  "get_trace_sink",
13
16
  ]
@@ -1,5 +1,6 @@
1
1
  import json
2
2
  import time
3
+ import uuid
3
4
  from typing import Optional, Type
4
5
 
5
6
  from pydantic import BaseModel
@@ -15,12 +16,15 @@ class AgentTrace:
15
16
  system_prompt: str,
16
17
  save_name: Optional[str] = None,
17
18
  trace_sink: Optional[TraceSink] = None,
19
+ agent_instance_id: Optional[str] = None,
18
20
  ):
19
21
  self.system_prompt: str = system_prompt
20
22
  self._start_time_monotonic: float = time.monotonic()
21
23
  self._steps: list[tuple[float, str]] = []
22
24
  self.save_name: Optional[str] = save_name
23
25
  self._trace_sink: TraceSink = trace_sink or get_trace_sink()
26
+ # Generate UUID if not provided (backward compatibility)
27
+ self.agent_instance_id: str = agent_instance_id or str(uuid.uuid4())
24
28
 
25
29
  def add_step(self, step_content: str) -> None:
26
30
  current_time_monotonic: float = time.monotonic()
@@ -69,4 +73,6 @@ class AgentTrace:
69
73
 
70
74
  def save(self, name: str) -> None:
71
75
  trace_content = self.to_string(include_system_prompt=False)
72
- self._trace_sink.save_trace(name, trace_content, self.system_prompt)
76
+ self._trace_sink.save_trace(
77
+ name, trace_content, self.system_prompt, self.agent_instance_id
78
+ )
@@ -1,15 +1,77 @@
1
1
  from typing import Optional
2
2
 
3
3
  from spaik_sdk.tracing.local_trace_sink import LocalTraceSink
4
+ from spaik_sdk.tracing.noop_trace_sink import NoOpTraceSink
4
5
  from spaik_sdk.tracing.trace_sink import TraceSink
5
6
  from spaik_sdk.tracing.trace_sink_mode import TraceSinkMode
6
7
 
8
+ # Module-level storage for globally configured trace sink
9
+ _global_trace_sink: Optional[TraceSink] = None
10
+
11
+
12
+ def configure_tracing(sink: Optional[TraceSink]) -> None:
13
+ """Configure the global trace sink used by all agents.
14
+
15
+ Call this once at application startup to set a custom trace sink
16
+ that will be used by all subsequently created agents.
17
+
18
+ Resolution order:
19
+ 1. TRACE_SINK_MODE=local env var -> LocalTraceSink (escape hatch)
20
+ 2. TRACE_SINK_MODE=noop env var -> NoOpTraceSink
21
+ 3. Global sink set via this function -> the configured sink
22
+ 4. No configuration -> NoOpTraceSink (silent default)
23
+
24
+ Args:
25
+ sink: The TraceSink to use globally, or None to clear the global
26
+ configuration (reverts to default no-op behavior).
27
+
28
+ Example:
29
+ from spaik_sdk.tracing import configure_tracing, LocalTraceSink
30
+
31
+ # At application startup
32
+ configure_tracing(LocalTraceSink(traces_dir="my_traces"))
33
+
34
+ # Or with a custom sink for observability
35
+ configure_tracing(MyDatadogTraceSink())
36
+
37
+ # Clear global config
38
+ configure_tracing(None)
39
+ """
40
+ global _global_trace_sink
41
+ _global_trace_sink = sink
42
+
7
43
 
8
44
  def get_trace_sink(mode: Optional[TraceSinkMode] = None) -> TraceSink:
45
+ """Get the appropriate trace sink based on configuration.
46
+
47
+ Resolution order:
48
+ 1. If mode parameter is provided, use that mode
49
+ 2. Check TRACE_SINK_MODE env var (LOCAL or NOOP override everything)
50
+ 3. Check for globally configured sink via configure_tracing()
51
+ 4. Default to NoOpTraceSink (silent no-op)
52
+
53
+ Args:
54
+ mode: Optional explicit mode to use (overrides all other config).
55
+
56
+ Returns:
57
+ The appropriate TraceSink instance.
58
+ """
9
59
  # Lazy import to avoid circular dependency with env.py
10
60
  from spaik_sdk.config.env import env_config
11
61
 
12
- mode = mode or env_config.get_trace_sink_mode()
62
+ # If explicit mode parameter provided, use it
63
+ if mode is None:
64
+ mode = env_config.get_trace_sink_mode()
65
+
66
+ # Step 1-2: Check env var mode (LOCAL or NOOP)
13
67
  if mode == TraceSinkMode.LOCAL:
14
68
  return LocalTraceSink()
15
- raise ValueError(f"Unknown TraceSinkMode: {mode}")
69
+ if mode == TraceSinkMode.NOOP:
70
+ return NoOpTraceSink()
71
+
72
+ # Step 3: Check global sink
73
+ if _global_trace_sink is not None:
74
+ return _global_trace_sink
75
+
76
+ # Step 4: Default to no-op
77
+ return NoOpTraceSink()
@@ -10,7 +10,14 @@ class LocalTraceSink(TraceSink):
10
10
  def __init__(self, traces_dir: Optional[str] = None):
11
11
  self.traces_dir = traces_dir or "traces"
12
12
 
13
- def save_trace(self, name: str, trace_content: str, system_prompt: str) -> None:
13
+ def save_trace(
14
+ self,
15
+ name: str,
16
+ trace_content: str,
17
+ system_prompt: str,
18
+ agent_instance_id: Optional[str] = None,
19
+ ) -> None:
20
+ # agent_instance_id is accepted but intentionally ignored - file naming is unchanged
14
21
  os.makedirs(self.traces_dir, exist_ok=True)
15
22
 
16
23
  trace_path = os.path.join(self.traces_dir, f"{name}.txt")
@@ -0,0 +1,21 @@
1
+ from typing import Optional
2
+
3
+ from spaik_sdk.tracing.trace_sink import TraceSink
4
+
5
+
6
+ class NoOpTraceSink(TraceSink):
7
+ """TraceSink implementation that does nothing.
8
+
9
+ Used as the default when no tracing is configured, ensuring traces
10
+ are silently discarded without any side effects.
11
+ """
12
+
13
+ def save_trace(
14
+ self,
15
+ name: str,
16
+ trace_content: str,
17
+ system_prompt: str,
18
+ agent_instance_id: Optional[str] = None,
19
+ ) -> None:
20
+ """Do nothing - silently discard the trace."""
21
+ pass
@@ -1,4 +1,5 @@
1
1
  from abc import ABC, abstractmethod
2
+ from typing import Optional
2
3
 
3
4
 
4
5
  class TraceSink(ABC):
@@ -8,12 +9,21 @@ class TraceSink(ABC):
8
9
  """
9
10
 
10
11
  @abstractmethod
11
- def save_trace(self, name: str, trace_content: str, system_prompt: str) -> None:
12
+ def save_trace(
13
+ self,
14
+ name: str,
15
+ trace_content: str,
16
+ system_prompt: str,
17
+ agent_instance_id: Optional[str] = None,
18
+ ) -> None:
12
19
  """Save a trace with its system prompt.
13
20
 
14
21
  Args:
15
22
  name: Identifier for the trace (e.g., agent class name)
16
23
  trace_content: The formatted trace content (without system prompt)
17
24
  system_prompt: The system prompt used for the agent
25
+ agent_instance_id: Optional UUID identifying the agent instance for correlation.
26
+ Custom sinks can use this to correlate traces in observability
27
+ backends (e.g., Datadog, OpenTelemetry).
18
28
  """
19
29
  pass
@@ -1,14 +1,28 @@
1
1
  from enum import Enum
2
+ from typing import Optional
2
3
 
3
4
 
4
5
  class TraceSinkMode(Enum):
5
6
  LOCAL = "local"
7
+ NOOP = "noop"
6
8
 
7
9
  @classmethod
8
- def from_name(cls, name: str) -> "TraceSinkMode":
10
+ def from_name(cls, name: Optional[str]) -> Optional["TraceSinkMode"]:
11
+ """Convert a string name to a TraceSinkMode.
12
+
13
+ Args:
14
+ name: The mode name ("local", "noop") or None/empty string.
15
+
16
+ Returns:
17
+ The corresponding TraceSinkMode, or None if name is empty/None.
18
+ Returns None for unrecognized values (silent fallthrough to default behavior).
19
+ """
20
+ if not name:
21
+ return None
22
+
9
23
  for mode in cls:
10
- if mode.value == name:
24
+ if mode.value == name.lower():
11
25
  return mode
12
26
 
13
- available_modes = [mode.value for mode in cls]
14
- raise ValueError(f"Unknown TraceSinkMode '{name}'. Available: {', '.join(available_modes)}")
27
+ # Unknown values fall through to None (let get_trace_sink handle default)
28
+ return None
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: spaik-sdk
3
- Version: 0.6.2
3
+ Version: 0.6.4
4
4
  Summary: Python SDK for building AI agents with multi-LLM support, streaming, and production-ready infrastructure
5
5
  Project-URL: Homepage, https://github.com/siilisolutions/spaik-sdk
6
6
  Project-URL: Repository, https://github.com/siilisolutions/spaik-sdk
@@ -54,6 +54,8 @@ Description-Content-Type: text/markdown
54
54
 
55
55
  Python SDK for building AI agents with multi-LLM support, streaming, and production infrastructure.
56
56
 
57
+ Spaik SDK is an open-source project developed by engineers at Siili Solutions Oyj. This is not an official Siili product.
58
+
57
59
  ## Installation
58
60
 
59
61
  ```bash
@@ -376,4 +378,4 @@ MessageBlockType.ERROR # Error message
376
378
 
377
379
  ## License
378
380
 
379
- MIT - Copyright (c) 2025 Siili Solutions Oyj
381
+ MIT - Copyright (c) 2026 Siili Solutions Oyj
@@ -1,7 +1,7 @@
1
1
  spaik_sdk/__init__.py,sha256=UhJdqPEBVFTlyWHPicbcpcvOuOqmObenwnJv_GkPbVA,576
2
2
  spaik_sdk/py.typed,sha256=Nqnn8clbgv-5l0PgxcTOldg8mkMKrFn4TvPL-rYUUGg,1
3
3
  spaik_sdk/agent/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
4
- spaik_sdk/agent/base_agent.py,sha256=5X1b2Z7LjxF-y5XVxZCsURXMztCR14zsj2fPvu0bdB0,10808
4
+ spaik_sdk/agent/base_agent.py,sha256=sk1Q3ediUHm6hOVIIAv90XwXAL7pSrlLcDIm7qtG6z0,11037
5
5
  spaik_sdk/attachments/__init__.py,sha256=ckqaKkl8rCVg-V8hNkE_RG57peYkt1zMWTszSRgWZeE,678
6
6
  spaik_sdk/attachments/builder.py,sha256=WgB14KcZ491KqjY6QMeIYXS18KElqsnvO-XAc7wuP0s,1758
7
7
  spaik_sdk/attachments/file_storage_provider.py,sha256=3EKDCCfhKi2iDpLR3BMsKt9KCR8iFvUZz2LV8cMFs3s,692
@@ -21,7 +21,7 @@ spaik_sdk/audio/providers/google_tts.py,sha256=-7gohXszE_A3sFQbiMp3kk1VZBhGFyLkx
21
21
  spaik_sdk/audio/providers/openai_stt.py,sha256=ZD44obwqOTmvnmvPcD6RQu4cL6B4rWkMmSmLwlEeoQY,2078
22
22
  spaik_sdk/audio/providers/openai_tts.py,sha256=SHfav2hgPnk84Dy784XcJHAGQ7PgUcUuwvq2eU4ceW0,3436
23
23
  spaik_sdk/config/credentials_provider.py,sha256=mfaAUb8yRr9VEHqwxxIGlbXv-_v7ZnmaBxqDmq9SRys,299
24
- spaik_sdk/config/env.py,sha256=vidp6zmJ_Xy2brmxkGQFeoDmkGKr_6h3jBO9x9Ydb1o,2361
24
+ spaik_sdk/config/env.py,sha256=H4xdFhRqoE64Rj6J0DM_J_zaq10seh2WpDRldNmxaZI,2754
25
25
  spaik_sdk/config/env_credentials_provider.py,sha256=Y4Tti-T3IAAFQDV2rTsBaa3mngZcJ6RBY6Pk1TMmJRM,307
26
26
  spaik_sdk/config/get_credentials_provider.py,sha256=D2EF3ezConXlCmNvl9mQTSoR3BsIlDmxKM0a7jgGym8,516
27
27
  spaik_sdk/image_gen/__init__.py,sha256=AEujgMjNTj0y0x5EPsbX8IV9MB_UXDRjf3nc3Sc7Ou4,245
@@ -55,15 +55,15 @@ spaik_sdk/llm/streaming/streaming_state_manager.py,sha256=Nsfcf9umUk2WAVdsank3a_
55
55
  spaik_sdk/models/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
56
56
  spaik_sdk/models/llm_config.py,sha256=ICIY_y_-loD7X0h-0_sjJUufxPpZgrUKfEQ5uVxA740,1641
57
57
  spaik_sdk/models/llm_families.py,sha256=mtsMfih5FbkX52eFMkMSfjiSIpkroZ_uqe8JlreJscQ,169
58
- spaik_sdk/models/llm_model.py,sha256=8cUyfRyguinno-GdVqvSlf9J6aylEgjoYiCRV56JdzM,352
58
+ spaik_sdk/models/llm_model.py,sha256=DO8wlN4Gj_AB_lxTpqzQGDABvDgyOI3JcQda_J-UiKU,440
59
59
  spaik_sdk/models/llm_wrapper.py,sha256=CB07qSPJUWScN3hj1SO_9qi8QQ7Zg5p53JLnXFZ4O6A,929
60
- spaik_sdk/models/model_registry.py,sha256=CxucIOjp9Pf8gRnp5jOSKZMpyxx8P1s2io2qdTJi5k8,7818
60
+ spaik_sdk/models/model_registry.py,sha256=bzMi-_amifpuE-YNkK2lAPDYOiAPX0Unz2xNzhATEQM,7914
61
61
  spaik_sdk/models/factories/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
62
62
  spaik_sdk/models/factories/anthropic_factory.py,sha256=1Dmn-RBTmDHnXVCt2D3xx1Xw9OVMaecTk1kBFur8prs,1330
63
63
  spaik_sdk/models/factories/base_model_factory.py,sha256=iLTUhVh7G4l9TiQdFlq9HU-cs-t5uaul4SeoIdSFNww,2845
64
- spaik_sdk/models/factories/google_factory.py,sha256=2HpsTqrNy-xfk9d1lLU62YxTBjZyZYxWVg-N9ViAPu4,1199
64
+ spaik_sdk/models/factories/google_factory.py,sha256=5Xc-I6h_SwPJFaSVu0m87O2xcRgWns0OwURK_hFUpbg,1486
65
65
  spaik_sdk/models/factories/ollama_factory.py,sha256=7RXPlbF7b-wJA45FluSGkVIzMXdHToxxGQ99KatiLfs,1674
66
- spaik_sdk/models/factories/openai_factory.py,sha256=kdHnVbpvJ2g2sTqVFbSQzqPKNlbXR_rLTz2Dwv8z0h8,2116
66
+ spaik_sdk/models/factories/openai_factory.py,sha256=LvKx5ueuL3uROfI453BQSq6vuLFHJwMzLVqIbeg4G9s,2489
67
67
  spaik_sdk/models/providers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
68
68
  spaik_sdk/models/providers/anthropic_provider.py,sha256=NY_K8cbph5PKMRdKWOY4iw2C6wUZtnZ4id8DGwuKv1M,1329
69
69
  spaik_sdk/models/providers/azure_provider.py,sha256=Ddzhmi1TCWleAMJx1qoagxxMEfv8Tot46h15eocS-Bg,1453
@@ -148,14 +148,15 @@ spaik_sdk/tools/tool_provider.py,sha256=m34O2Yw-k40NDCxU8OzBR9AZz4cnM439KKYo62l2
148
148
  spaik_sdk/tools/impl/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
149
149
  spaik_sdk/tools/impl/mcp_tool_provider.py,sha256=g2ULJkTW7J3nyXI43RdrTj-AzFtMiGCP5Aog79vKnMI,2750
150
150
  spaik_sdk/tools/impl/search_tool_provider.py,sha256=fi8SBmvC7--n0cCNaTz6PhEe6Bf4RkyrxYkqKzEDlY4,515
151
- spaik_sdk/tracing/__init__.py,sha256=il8zLt8OCPOk15O9TIDbHDspMUfQ41Tc_rJyHk3yefI,401
152
- spaik_sdk/tracing/agent_trace.py,sha256=074iqitiYuq71-hPWUZ42KIJlg1ss0_eCEWmUuZmHjY,2790
153
- spaik_sdk/tracing/get_trace_sink.py,sha256=O_CrRutRaJ-hOPx5oQI1O7ROGkqCh0Xzu77hejuQUiQ,558
154
- spaik_sdk/tracing/local_trace_sink.py,sha256=4Z3Txo_VOj69OvREILxeiKpLBjd_0l-Mk6ut4kS5GyE,803
155
- spaik_sdk/tracing/trace_sink.py,sha256=J_jl53G84PJhtbhwYIfJZX-ikC2o4xxM7svgH0RWu6E,611
156
- spaik_sdk/tracing/trace_sink_mode.py,sha256=e2hRBrerRugdzjfRTtIDWMqbSygCRAo5TExFX4OLrJg,388
151
+ spaik_sdk/tracing/__init__.py,sha256=kmLFmy1Lb7bS2sryIFoFaknxYXlyuswuP_4gHmwEtv0,526
152
+ spaik_sdk/tracing/agent_trace.py,sha256=ORxHCiilgElebx7O5CyZtwgeWTiruB7sFky98xgsf_M,3039
153
+ spaik_sdk/tracing/get_trace_sink.py,sha256=ZPg8pVLS1BbY0PwXJXC-O8qSvUvQSzmDb4SWRtsRSSc,2573
154
+ spaik_sdk/tracing/local_trace_sink.py,sha256=QTqkzDv8S0cLtRvScwPmejnj6EpccYaHFjd7KkP9Xrk,984
155
+ spaik_sdk/tracing/noop_trace_sink.py,sha256=AxImIYh8MPzISTp6qDp8ShtWyPLVqLRwsh7yyAVDSjs,540
156
+ spaik_sdk/tracing/trace_sink.py,sha256=LU6aF848Kz2hMZuz0q6l-4IaD0sC-ex0AKFk8mVTSR4,984
157
+ spaik_sdk/tracing/trace_sink_mode.py,sha256=74qiL4P3sNVGM3_DUkWKlqlu9UvT928NLKTskD_vxgk,791
157
158
  spaik_sdk/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
158
159
  spaik_sdk/utils/init_logger.py,sha256=htxNtHMxRXVNAXBbS9f6Wmd0aET7kl3ClJ062b3YHmQ,791
159
- spaik_sdk-0.6.2.dist-info/METADATA,sha256=K8e5p37BKW5a_W0KPtO93GmMGTwRlzI7dSXd6ZoZHms,9962
160
- spaik_sdk-0.6.2.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
161
- spaik_sdk-0.6.2.dist-info/RECORD,,
160
+ spaik_sdk-0.6.4.dist-info/METADATA,sha256=Si8vJmRo18czibAqovNT87HsVqPl9nxqz-ztBYG90DM,10085
161
+ spaik_sdk-0.6.4.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
162
+ spaik_sdk-0.6.4.dist-info/RECORD,,