langwatch 0.7.2__py3-none-any.whl → 0.8.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
langwatch/__init__.py CHANGED
@@ -8,6 +8,8 @@ from .login import login
8
8
  from .state import get_api_key, get_endpoint
9
9
  from .__version__ import __version__
10
10
  from .utils.initialization import ensure_setup, setup
11
+ from .prompts.types import FetchPolicy
12
+
11
13
 
12
14
  # Type hints for IntelliSense (only imported for typing)
13
15
  from typing import TYPE_CHECKING
@@ -153,5 +155,5 @@ __all__ = [
153
155
  "evaluations",
154
156
  "langchain",
155
157
  "dspy",
156
- "prompts",
158
+ "FetchPolicy",
157
159
  ]
langwatch/__version__.py CHANGED
@@ -1,3 +1,3 @@
1
1
  """Version information for LangWatch."""
2
2
 
3
- __version__ = "0.7.2" # x-release-please-version
3
+ __version__ = "0.8.0" # x-release-please-version
@@ -1,5 +1,36 @@
1
1
  from .prompt_facade import PromptsFacade
2
+ from .types import FetchPolicy
2
3
 
3
4
  __all__ = [
4
5
  "PromptsFacade",
6
+ "FetchPolicy",
5
7
  ]
8
+
9
+ # Cached PromptsFacade instance for module-level method delegation
10
+ _facade_instance: PromptsFacade | None = None
11
+
12
+
13
+ def _get_facade() -> PromptsFacade:
14
+ """Get or create the cached PromptsFacade instance."""
15
+ global _facade_instance
16
+ if _facade_instance is None:
17
+ _facade_instance = PromptsFacade.from_global()
18
+ return _facade_instance
19
+
20
+
21
+ def __getattr__(name: str):
22
+ """
23
+ Delegate attribute access to PromptsFacade instance.
24
+
25
+ This allows langwatch.prompts to work both as:
26
+ - A module (for submodule access like `from langwatch.prompts.types import FetchPolicy`)
27
+ - A facade (for method access like `langwatch.prompts.get(...)`)
28
+
29
+ When Python imports `langwatch.prompts.types`, it stores `langwatch.prompts` as a module
30
+ in sys.modules, which shadows the lazy-loaded PromptsFacade instance from langwatch.__getattr__.
31
+ This __getattr__ ensures method calls still work by delegating to a PromptsFacade instance.
32
+ """
33
+ facade = _get_facade()
34
+ if hasattr(facade, name):
35
+ return getattr(facade, name)
36
+ raise AttributeError(f"module '{__name__}' has no attribute '{name}'")
@@ -139,7 +139,8 @@ def unwrap_response(
139
139
  error_detail = f": {msg}" if msg else ""
140
140
  raise ValueError(f"Invalid prompt request{error_detail}")
141
141
  if status == 401:
142
- raise RuntimeError("Authentication error")
142
+ error_detail = f": {msg}" if msg else ""
143
+ raise RuntimeError(f"Authentication error{error_detail}")
143
144
  if status >= 500:
144
145
  error_detail = f" - {msg}" if msg else ""
145
146
  raise RuntimeError(
@@ -25,8 +25,13 @@ class LocalPromptLoader:
25
25
  """Loads prompts from local files in CLI format."""
26
26
 
27
27
  def __init__(self, base_path: Optional[Path] = None):
28
- """Initialize with base path (defaults to current working directory)."""
29
- self.base_path = base_path or Path.cwd()
28
+ """Initialize with base path (defaults to current working directory at load time)."""
29
+ self._base_path = base_path
30
+
31
+ @property
32
+ def base_path(self) -> Path:
33
+ """Get the base path, defaulting to current working directory if not set."""
34
+ return self._base_path or Path.cwd()
30
35
 
31
36
  def load_prompt(self, prompt_id: str) -> Optional[PromptData]:
32
37
  """
@@ -8,7 +8,8 @@ or when API is unavailable.
8
8
 
9
9
  Follows the facade pattern to coordinate between LocalPromptLoader and PromptApiService.
10
10
  """
11
- from typing import Dict, List, Literal, Optional
11
+ from typing import Any, Dict, List, Literal, Optional
12
+ import time
12
13
  from langwatch.generated.langwatch_rest_api_client.client import (
13
14
  Client as LangWatchRestApiClient,
14
15
  )
@@ -16,9 +17,13 @@ from langwatch.generated.langwatch_rest_api_client.client import (
16
17
  from langwatch.utils.initialization import ensure_setup
17
18
  from langwatch.state import get_instance
18
19
  from .prompt import Prompt
20
+ from .types import FetchPolicy
19
21
  from .prompt_api_service import PromptApiService
20
22
  from .local_loader import LocalPromptLoader
21
23
  from .types import MessageDict, InputDict, OutputDict
24
+ from logging import getLogger
25
+
26
+ logger = getLogger(__name__)
22
27
 
23
28
 
24
29
  class PromptsFacade:
@@ -34,6 +39,7 @@ class PromptsFacade:
34
39
  """Initialize the prompt service facade with dependencies."""
35
40
  self._api_service = PromptApiService(rest_api_client)
36
41
  self._local_loader = LocalPromptLoader()
42
+ self._cache: Dict[str, Dict[str, Any]] = {}
37
43
 
38
44
  @classmethod
39
45
  def from_global(cls) -> "PromptsFacade":
@@ -46,17 +52,43 @@ class PromptsFacade:
46
52
  )
47
53
  return cls(instance.rest_api_client)
48
54
 
49
- def get(self, prompt_id: str, version_number: Optional[int] = None) -> Prompt:
55
+ def get(
56
+ self,
57
+ prompt_id: str,
58
+ version_number: Optional[int] = None,
59
+ fetch_policy: Optional[FetchPolicy] = None,
60
+ cache_ttl_minutes: Optional[int] = None,
61
+ ) -> Prompt:
50
62
  """
51
- Retrieve a prompt by its ID with guaranteed availability.
63
+ Retrieve a prompt by its ID with configurable fetch policy.
52
64
 
53
- Tries local files first, then falls back to API.
54
- You can optionally specify a version number to get a specific version of the prompt.
65
+ Args:
66
+ prompt_id: The prompt ID to retrieve
67
+ version_number: Optional specific version number to retrieve
68
+ fetch_policy: How to fetch the prompt. Defaults to MATERIALIZED_FIRST.
69
+ cache_ttl_minutes: Cache TTL in minutes (only used with CACHE_TTL policy). Defaults to 5.
55
70
 
56
71
  Raises:
57
72
  ValueError: If the prompt is not found (404 error).
58
73
  RuntimeError: If the API call fails for other reasons (auth, server errors, etc.).
59
74
  """
75
+ fetch_policy = fetch_policy or FetchPolicy.MATERIALIZED_FIRST
76
+
77
+ if fetch_policy == FetchPolicy.MATERIALIZED_ONLY:
78
+ return self._get_materialized_only(prompt_id)
79
+ elif fetch_policy == FetchPolicy.ALWAYS_FETCH:
80
+ return self._get_always_fetch(prompt_id, version_number)
81
+ elif fetch_policy == FetchPolicy.CACHE_TTL:
82
+ return self._get_cache_ttl(
83
+ prompt_id, version_number, cache_ttl_minutes or 5
84
+ )
85
+ else: # MATERIALIZED_FIRST (default)
86
+ return self._get_materialized_first(prompt_id, version_number)
87
+
88
+ def _get_materialized_first(
89
+ self, prompt_id: str, version_number: Optional[int] = None
90
+ ) -> Prompt:
91
+ """Get prompt using MATERIALIZED_FIRST policy (local first, API fallback)."""
60
92
  # Try to load from local files first
61
93
  local_data = self._local_loader.load_prompt(prompt_id)
62
94
  if local_data is not None:
@@ -66,6 +98,64 @@ class PromptsFacade:
66
98
  api_data = self._api_service.get(prompt_id, version_number)
67
99
  return Prompt(api_data)
68
100
 
101
+ def _get_materialized_only(self, prompt_id: str) -> Prompt:
102
+ """Get prompt using MATERIALIZED_ONLY policy (local only, no API calls)."""
103
+ local_data = self._local_loader.load_prompt(prompt_id)
104
+ if local_data is not None:
105
+ return Prompt(local_data)
106
+
107
+ raise ValueError(f"Prompt '{prompt_id}' not found in materialized files")
108
+
109
+ def _get_always_fetch(
110
+ self, prompt_id: str, version_number: Optional[int] = None
111
+ ) -> Prompt:
112
+ """Get prompt using ALWAYS_FETCH policy (API first, local fallback)."""
113
+ try:
114
+ api_data = self._api_service.get(prompt_id, version_number)
115
+ return Prompt(api_data)
116
+ except Exception:
117
+ # Fall back to local if API fails
118
+ local_data = self._local_loader.load_prompt(prompt_id)
119
+ if local_data is not None:
120
+ return Prompt(local_data)
121
+ raise ValueError(f"Prompt '{prompt_id}' not found locally or on server")
122
+
123
+ def _get_cache_ttl(
124
+ self,
125
+ prompt_id: str,
126
+ version_number: Optional[int] = None,
127
+ cache_ttl_minutes: int = 5,
128
+ ) -> Prompt:
129
+ """Get prompt using CACHE_TTL policy (cache with TTL, fallback to local)."""
130
+ cache_key = f"{prompt_id}::version:{version_number or ''}"
131
+ ttl_ms = cache_ttl_minutes * 60 * 1000
132
+ now = time.time() * 1000 # Convert to milliseconds
133
+
134
+ cached = self._cache.get(cache_key)
135
+ if cached and now - cached["timestamp"] < ttl_ms:
136
+ return Prompt(cached["data"])
137
+
138
+ try:
139
+ api_data = self._api_service.get(prompt_id, version_number)
140
+ self._cache[cache_key] = {"data": api_data, "timestamp": now}
141
+ return Prompt(api_data)
142
+ except Exception:
143
+ logger.warning(
144
+ f"Failed to fetch prompt '{prompt_id}' from API, falling back to local",
145
+ exc_info=True,
146
+ stack_info=True,
147
+ extra={
148
+ "prompt_id": prompt_id,
149
+ "version_number": version_number,
150
+ "cache_ttl_minutes": cache_ttl_minutes,
151
+ },
152
+ )
153
+ # Fall back to local if API fails
154
+ local_data = self._local_loader.load_prompt(prompt_id)
155
+ if local_data is not None:
156
+ return Prompt(local_data)
157
+ raise ValueError(f"Prompt '{prompt_id}' not found locally or on server")
158
+
69
159
  def create(
70
160
  self,
71
161
  handle: str,
@@ -8,6 +8,9 @@ organized by their purpose and scope.
8
8
  # Core prompt data structure
9
9
  from .prompt_data import PromptData
10
10
 
11
+ # Fetch policy
12
+ from .fetch_policy import FetchPolicy
13
+
11
14
  # Standardized data structures
12
15
  from .structures import (
13
16
  MessageDict,
@@ -24,4 +27,5 @@ __all__ = [
24
27
  "InputDict",
25
28
  "OutputDict",
26
29
  "ResponseFormatDict",
30
+ "FetchPolicy",
27
31
  ]
@@ -0,0 +1,26 @@
1
+ from enum import Enum
2
+
3
+
4
+ class FetchPolicy(Enum):
5
+ """
6
+ Fetch policy for prompt retrieval.
7
+
8
+ Controls how prompts are fetched and cached.
9
+ """
10
+
11
+ # Use local file if available, otherwise fetch from API (default)
12
+ MATERIALIZED_FIRST = "MATERIALIZED_FIRST"
13
+
14
+ # Always try API first, fall back to materialized
15
+ ALWAYS_FETCH = "ALWAYS_FETCH"
16
+
17
+ # Fetch every X minutes, use materialized between fetches
18
+ CACHE_TTL = "CACHE_TTL"
19
+
20
+ # Never fetch, use materialized files only
21
+ MATERIALIZED_ONLY = "MATERIALIZED_ONLY"
22
+
23
+
24
+ __all__ = [
25
+ "FetchPolicy",
26
+ ]
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: langwatch
3
- Version: 0.7.2
3
+ Version: 0.8.0
4
4
  Summary: LangWatch Python SDK, for monitoring your LLMs
5
5
  Author-email: Langwatch Engineers <engineering@langwatch.ai>
6
6
  License: MIT
@@ -1,5 +1,5 @@
1
- langwatch/__init__.py,sha256=TzPHzqCFGZJByI3sAIKrNB33Qi4PqVmgYDZuBwPnhPc,4222
2
- langwatch/__version__.py,sha256=xd61VXC9KB2dkBlREW18eiU4DnZ96PU7rNiffqTsMn0,91
1
+ langwatch/__init__.py,sha256=GMq4SV2Tz2i0JD05shqnw2lBW5cgMx4Zzo141hp106k,4266
2
+ langwatch/__version__.py,sha256=vwG-FkKYsGya3EzNIEIMvFwGCI2K-gXNyKE3Y88jHc4,91
3
3
  langwatch/attributes.py,sha256=nXdI_G85wQQCAdAcwjCiLYdEYj3wATmfgCmhlf6dVIk,3910
4
4
  langwatch/batch_evaluation.py,sha256=piez7TYqUZPb9NlIShTuTPmSzrZqX-vm2Grz_NGXe04,16078
5
5
  langwatch/client.py,sha256=WTNcYSik7kZ2kH-qGDnhbMTosc8e_Xhab_lZlfh5TC8,25559
@@ -390,16 +390,16 @@ langwatch/generated/langwatch_rest_api_client/models/search_request_filters.py,s
390
390
  langwatch/generated/langwatch_rest_api_client/models/search_response.py,sha256=zDYmJ8bFBSJyF9D3cEn_ffrey-ITIfwr-_7eu72zLyk,2832
391
391
  langwatch/generated/langwatch_rest_api_client/models/timestamps.py,sha256=-nRKUPZTAJQNxiKz128xF7DKgZNbFo4G3mr5xNXrkaw,2173
392
392
  langwatch/generated/langwatch_rest_api_client/models/trace.py,sha256=K9Lc_EQOrJ2dqMXx9EpiUXReT1_uYF7WRfYyhlfbi3I,7537
393
- langwatch/prompts/__init__.py,sha256=2BcFhTrcCfxMxDGc5gd09oNZdzY_wlcAGFlt4Mz2lv4,77
394
- langwatch/prompts/errors.py,sha256=9WKzLiOLJAm7DhtKc9c74oquh0IyDQOHNs6xeTFiQRg,5060
395
- langwatch/prompts/local_loader.py,sha256=JL2l1v8ymyhi8LvTndVIqhzP7XDHm4HXKygYDvSaV5w,6596
393
+ langwatch/prompts/__init__.py,sha256=OGf3BrzVsTZnSPYSqSqe_eWmGIBRiOUlovOduYzKCx4,1259
394
+ langwatch/prompts/errors.py,sha256=kmaGeA1QPot9Ii5lgooxmAFlvUPOGjAnzzPBuw4h6Bw,5124
395
+ langwatch/prompts/local_loader.py,sha256=luvk39Kc4S-x4lBSms-AoWqg0Im_Uu-Lcpt3cRJFmdY,6774
396
396
  langwatch/prompts/prompt.py,sha256=SgLDo9hO-CuRE-AZ8zx9v7-KqjiabiW8GzD9jdx1IoA,6914
397
397
  langwatch/prompts/prompt_api_service.py,sha256=tHhwIRjUBSM43_jwDAoGCHJjvvqVeSCrUPwcwMvUHho,9823
398
- langwatch/prompts/prompt_facade.py,sha256=kf-IXFWX_39Z5WD8nmKzWpO13pYwqMl12qfVfyw4A3E,5228
399
- langwatch/prompts/types.py,sha256=p1bRMvfCCpGGiVwzFtQijVtWl5GWugL_vBOFc4B2348,269
398
+ langwatch/prompts/prompt_facade.py,sha256=fc16TB4ZBhKpwXVx9SNTJxLvQVms4eu4w0ZOH4Kk55w,8950
400
399
  langwatch/prompts/decorators/prompt_service_tracing.py,sha256=uSYw0vExo7AuxbcCRnxbYl6UOfOQSC0IsisSqYy153Y,2395
401
400
  langwatch/prompts/decorators/prompt_tracing.py,sha256=x_PQvJlGbGF1h2HtGNiqaZ8K1qNd1jRf5pTOBTQx-7M,3963
402
- langwatch/prompts/types/__init__.py,sha256=uEnjOQC4LkLMWQ0fXfKe573xKOvoMdPgC6uY-yo9B_g,506
401
+ langwatch/prompts/types/__init__.py,sha256=jwaFV4VJHrOE6dm6yyLtWk6_7dqZpR5uZjN1cswtga4,579
402
+ langwatch/prompts/types/fetch_policy.py,sha256=AyS4J2x4IYjlcFo-6hSJY16awhll6hK2c3jncLUgZxc,578
403
403
  langwatch/prompts/types/prompt_data.py,sha256=g_EQ94-PGfa4Ptwd3e2rMqoIZiX052MEEZKyF77m9D0,3137
404
404
  langwatch/prompts/types/structures.py,sha256=cB94bn-qhFgHHYXcrmJV6Bk9idk5ZmyfXhFNQAaXw-M,951
405
405
  langwatch/telemetry/context.py,sha256=q0hUG9PM3aifIr6ZRuuNNbsGtcAImu9Pv2XTKUp3CGc,4029
@@ -415,6 +415,6 @@ langwatch/utils/initialization.py,sha256=1KoZmkHOvGEVF0j-4t4xRQdA_2C_SPiF7qFXqEG
415
415
  langwatch/utils/module.py,sha256=KLBNOK3mA9gCSifCcQX_lOtU48BJQDWvFKtF6NMvwVA,688
416
416
  langwatch/utils/transformation.py,sha256=76MGXyrYTxM0Yri36NJqLK-XxL4BBYdmKWAXXlw3D4Q,7690
417
417
  langwatch/utils/utils.py,sha256=ZCOSie4o9LdJ7odshNfCNjmgwgQ27ojc5ENqt1rXuSs,596
418
- langwatch-0.7.2.dist-info/METADATA,sha256=QeRx1wldxr7sSCCONIKDKVcmdmENX8yK-_TLIO-J9Ig,13192
419
- langwatch-0.7.2.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
420
- langwatch-0.7.2.dist-info/RECORD,,
418
+ langwatch-0.8.0.dist-info/METADATA,sha256=RVtrzq4jjZAzk9s80xxjXz4Wd_grAzp0vRJ73oN7WNs,13192
419
+ langwatch-0.8.0.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
420
+ langwatch-0.8.0.dist-info/RECORD,,
@@ -1,16 +0,0 @@
1
- # Re-export types for backward compatibility
2
- from .types import (
3
- PromptData,
4
- MessageDict,
5
- InputDict,
6
- OutputDict,
7
- ResponseFormatDict,
8
- )
9
-
10
- __all__ = [
11
- "PromptData",
12
- "MessageDict",
13
- "InputDict",
14
- "OutputDict",
15
- "ResponseFormatDict",
16
- ]