lucidicai 3.4.0__tar.gz → 3.4.2__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {lucidicai-3.4.0 → lucidicai-3.4.2}/PKG-INFO +1 -1
- {lucidicai-3.4.0 → lucidicai-3.4.2}/lucidicai/__init__.py +1 -1
- {lucidicai-3.4.0 → lucidicai-3.4.2}/lucidicai/api/client.py +26 -0
- lucidicai-3.4.2/lucidicai/api/resources/prompt.py +342 -0
- {lucidicai-3.4.0 → lucidicai-3.4.2}/lucidicai.egg-info/PKG-INFO +1 -1
- {lucidicai-3.4.0 → lucidicai-3.4.2}/setup.py +1 -1
- lucidicai-3.4.0/lucidicai/api/resources/prompt.py +0 -162
- {lucidicai-3.4.0 → lucidicai-3.4.2}/README.md +0 -0
- {lucidicai-3.4.0 → lucidicai-3.4.2}/lucidicai/api/__init__.py +0 -0
- {lucidicai-3.4.0 → lucidicai-3.4.2}/lucidicai/api/resources/__init__.py +0 -0
- {lucidicai-3.4.0 → lucidicai-3.4.2}/lucidicai/api/resources/dataset.py +0 -0
- {lucidicai-3.4.0 → lucidicai-3.4.2}/lucidicai/api/resources/evals.py +0 -0
- {lucidicai-3.4.0 → lucidicai-3.4.2}/lucidicai/api/resources/event.py +0 -0
- {lucidicai-3.4.0 → lucidicai-3.4.2}/lucidicai/api/resources/experiment.py +0 -0
- {lucidicai-3.4.0 → lucidicai-3.4.2}/lucidicai/api/resources/feature_flag.py +0 -0
- {lucidicai-3.4.0 → lucidicai-3.4.2}/lucidicai/api/resources/session.py +0 -0
- {lucidicai-3.4.0 → lucidicai-3.4.2}/lucidicai/client.py +0 -0
- {lucidicai-3.4.0 → lucidicai-3.4.2}/lucidicai/core/__init__.py +0 -0
- {lucidicai-3.4.0 → lucidicai-3.4.2}/lucidicai/core/config.py +0 -0
- {lucidicai-3.4.0 → lucidicai-3.4.2}/lucidicai/core/errors.py +0 -0
- {lucidicai-3.4.0 → lucidicai-3.4.2}/lucidicai/core/types.py +0 -0
- {lucidicai-3.4.0 → lucidicai-3.4.2}/lucidicai/integrations/__init__.py +0 -0
- {lucidicai-3.4.0 → lucidicai-3.4.2}/lucidicai/integrations/livekit.py +0 -0
- {lucidicai-3.4.0 → lucidicai-3.4.2}/lucidicai/sdk/__init__.py +0 -0
- {lucidicai-3.4.0 → lucidicai-3.4.2}/lucidicai/sdk/context.py +0 -0
- {lucidicai-3.4.0 → lucidicai-3.4.2}/lucidicai/sdk/decorators.py +0 -0
- {lucidicai-3.4.0 → lucidicai-3.4.2}/lucidicai/sdk/error_boundary.py +0 -0
- {lucidicai-3.4.0 → lucidicai-3.4.2}/lucidicai/sdk/event.py +0 -0
- {lucidicai-3.4.0 → lucidicai-3.4.2}/lucidicai/sdk/event_builder.py +0 -0
- {lucidicai-3.4.0 → lucidicai-3.4.2}/lucidicai/sdk/features/__init__.py +0 -0
- {lucidicai-3.4.0 → lucidicai-3.4.2}/lucidicai/sdk/features/dataset.py +0 -0
- {lucidicai-3.4.0 → lucidicai-3.4.2}/lucidicai/sdk/features/feature_flag.py +0 -0
- {lucidicai-3.4.0 → lucidicai-3.4.2}/lucidicai/sdk/init.py +0 -0
- {lucidicai-3.4.0 → lucidicai-3.4.2}/lucidicai/sdk/session.py +0 -0
- {lucidicai-3.4.0 → lucidicai-3.4.2}/lucidicai/sdk/shutdown_manager.py +0 -0
- {lucidicai-3.4.0 → lucidicai-3.4.2}/lucidicai/session_obj.py +0 -0
- {lucidicai-3.4.0 → lucidicai-3.4.2}/lucidicai/telemetry/__init__.py +0 -0
- {lucidicai-3.4.0 → lucidicai-3.4.2}/lucidicai/telemetry/context_bridge.py +0 -0
- {lucidicai-3.4.0 → lucidicai-3.4.2}/lucidicai/telemetry/context_capture_processor.py +0 -0
- {lucidicai-3.4.0 → lucidicai-3.4.2}/lucidicai/telemetry/extract.py +0 -0
- {lucidicai-3.4.0 → lucidicai-3.4.2}/lucidicai/telemetry/litellm_bridge.py +0 -0
- {lucidicai-3.4.0 → lucidicai-3.4.2}/lucidicai/telemetry/lucidic_exporter.py +0 -0
- {lucidicai-3.4.0 → lucidicai-3.4.2}/lucidicai/telemetry/openai_agents_instrumentor.py +0 -0
- {lucidicai-3.4.0 → lucidicai-3.4.2}/lucidicai/telemetry/openai_patch.py +0 -0
- {lucidicai-3.4.0 → lucidicai-3.4.2}/lucidicai/telemetry/openai_uninstrument.py +0 -0
- {lucidicai-3.4.0 → lucidicai-3.4.2}/lucidicai/telemetry/telemetry_init.py +0 -0
- {lucidicai-3.4.0 → lucidicai-3.4.2}/lucidicai/telemetry/telemetry_manager.py +0 -0
- {lucidicai-3.4.0 → lucidicai-3.4.2}/lucidicai/telemetry/utils/__init__.py +0 -0
- {lucidicai-3.4.0 → lucidicai-3.4.2}/lucidicai/telemetry/utils/model_pricing.py +0 -0
- {lucidicai-3.4.0 → lucidicai-3.4.2}/lucidicai/telemetry/utils/provider.py +0 -0
- {lucidicai-3.4.0 → lucidicai-3.4.2}/lucidicai/utils/__init__.py +0 -0
- {lucidicai-3.4.0 → lucidicai-3.4.2}/lucidicai/utils/logger.py +0 -0
- {lucidicai-3.4.0 → lucidicai-3.4.2}/lucidicai/utils/serialization.py +0 -0
- {lucidicai-3.4.0 → lucidicai-3.4.2}/lucidicai.egg-info/SOURCES.txt +0 -0
- {lucidicai-3.4.0 → lucidicai-3.4.2}/lucidicai.egg-info/dependency_links.txt +0 -0
- {lucidicai-3.4.0 → lucidicai-3.4.2}/lucidicai.egg-info/requires.txt +0 -0
- {lucidicai-3.4.0 → lucidicai-3.4.2}/lucidicai.egg-info/top_level.txt +0 -0
- {lucidicai-3.4.0 → lucidicai-3.4.2}/setup.cfg +0 -0
- {lucidicai-3.4.0 → lucidicai-3.4.2}/tests/test_event_creation.py +0 -0
|
@@ -207,6 +207,19 @@ class HttpClient:
|
|
|
207
207
|
data = self._add_timestamp(data)
|
|
208
208
|
return self.request("PUT", endpoint, json=data)
|
|
209
209
|
|
|
210
|
+
def patch(self, endpoint: str, data: Optional[Dict[str, Any]] = None) -> Dict[str, Any]:
|
|
211
|
+
"""Make a synchronous PATCH request.
|
|
212
|
+
|
|
213
|
+
Args:
|
|
214
|
+
endpoint: API endpoint (without base URL)
|
|
215
|
+
data: Request body data
|
|
216
|
+
|
|
217
|
+
Returns:
|
|
218
|
+
Response data as dictionary
|
|
219
|
+
"""
|
|
220
|
+
data = self._add_timestamp(data)
|
|
221
|
+
return self.request("PATCH", endpoint, json=data)
|
|
222
|
+
|
|
210
223
|
def delete(self, endpoint: str, params: Optional[Dict[str, Any]] = None) -> Dict[str, Any]:
|
|
211
224
|
"""Make a synchronous DELETE request.
|
|
212
225
|
|
|
@@ -301,6 +314,19 @@ class HttpClient:
|
|
|
301
314
|
data = self._add_timestamp(data)
|
|
302
315
|
return await self.arequest("PUT", endpoint, json=data)
|
|
303
316
|
|
|
317
|
+
async def apatch(self, endpoint: str, data: Optional[Dict[str, Any]] = None) -> Dict[str, Any]:
|
|
318
|
+
"""Make an asynchronous PATCH request.
|
|
319
|
+
|
|
320
|
+
Args:
|
|
321
|
+
endpoint: API endpoint (without base URL)
|
|
322
|
+
data: Request body data
|
|
323
|
+
|
|
324
|
+
Returns:
|
|
325
|
+
Response data as dictionary
|
|
326
|
+
"""
|
|
327
|
+
data = self._add_timestamp(data)
|
|
328
|
+
return await self.arequest("PATCH", endpoint, json=data)
|
|
329
|
+
|
|
304
330
|
async def adelete(self, endpoint: str, params: Optional[Dict[str, Any]] = None) -> Dict[str, Any]:
|
|
305
331
|
"""Make an asynchronous DELETE request.
|
|
306
332
|
|
|
@@ -0,0 +1,342 @@
|
|
|
1
|
+
"""Prompt resource API operations."""
|
|
2
|
+
import logging
|
|
3
|
+
import time
|
|
4
|
+
from dataclasses import dataclass
|
|
5
|
+
from typing import Any, Dict, List, Optional, Tuple, TYPE_CHECKING
|
|
6
|
+
|
|
7
|
+
from ..client import HttpClient
|
|
8
|
+
|
|
9
|
+
if TYPE_CHECKING:
|
|
10
|
+
from ...core.config import SDKConfig
|
|
11
|
+
|
|
12
|
+
logger = logging.getLogger("Lucidic")
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
@dataclass
|
|
16
|
+
class Prompt:
|
|
17
|
+
"""Represents a prompt retrieved from the Lucidic prompt database."""
|
|
18
|
+
|
|
19
|
+
raw_content: str
|
|
20
|
+
content: str
|
|
21
|
+
metadata: Dict[str, Any]
|
|
22
|
+
|
|
23
|
+
def __str__(self) -> str:
|
|
24
|
+
return self.content
|
|
25
|
+
|
|
26
|
+
def replace_variables(self, variables: Dict[str, Any]) -> "Prompt":
|
|
27
|
+
"""Replace template variables in the prompt content.
|
|
28
|
+
|
|
29
|
+
Replaces {{key}} placeholders in raw_content with the provided
|
|
30
|
+
variable values and updates content.
|
|
31
|
+
|
|
32
|
+
Args:
|
|
33
|
+
variables: Dictionary mapping variable names to their values.
|
|
34
|
+
|
|
35
|
+
Returns:
|
|
36
|
+
self, for method chaining.
|
|
37
|
+
"""
|
|
38
|
+
content = self.raw_content
|
|
39
|
+
for key, value in variables.items():
|
|
40
|
+
content = content.replace(f"{{{{{key}}}}}", str(value))
|
|
41
|
+
self.content = content
|
|
42
|
+
return self
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
class PromptResource:
|
|
46
|
+
"""Handle prompt-related API operations."""
|
|
47
|
+
|
|
48
|
+
def __init__(self, http: HttpClient, config: "SDKConfig", production: bool = False):
|
|
49
|
+
"""Initialize prompt resource.
|
|
50
|
+
|
|
51
|
+
Args:
|
|
52
|
+
http: HTTP client instance
|
|
53
|
+
config: SDK configuration
|
|
54
|
+
production: Whether to suppress errors in production mode
|
|
55
|
+
"""
|
|
56
|
+
self.http = http
|
|
57
|
+
self._config = config
|
|
58
|
+
self._production = production
|
|
59
|
+
self._cache: Dict[Tuple[str, str], Dict[str, Any]] = {}
|
|
60
|
+
|
|
61
|
+
def _invalidate_cache(self, prompt_name: str, label: Optional[str] = None) -> None:
|
|
62
|
+
"""Invalidate cached prompt entries.
|
|
63
|
+
|
|
64
|
+
Args:
|
|
65
|
+
prompt_name: Name of the prompt to invalidate.
|
|
66
|
+
label: If provided, only invalidate the specific (prompt_name, label) entry.
|
|
67
|
+
If None, invalidate all entries matching prompt_name.
|
|
68
|
+
"""
|
|
69
|
+
if label is not None:
|
|
70
|
+
self._cache.pop((prompt_name, label), None)
|
|
71
|
+
else:
|
|
72
|
+
keys_to_remove = [k for k in self._cache if k[0] == prompt_name]
|
|
73
|
+
for k in keys_to_remove:
|
|
74
|
+
del self._cache[k]
|
|
75
|
+
|
|
76
|
+
def _is_cache_valid(self, cache_key: Tuple[str, str], cache_ttl: int) -> bool:
|
|
77
|
+
"""Check if a cached prompt is still valid.
|
|
78
|
+
|
|
79
|
+
Args:
|
|
80
|
+
cache_key: The (prompt_name, label) tuple
|
|
81
|
+
cache_ttl: Cache TTL in seconds (-1 = indefinite, 0 = no cache)
|
|
82
|
+
|
|
83
|
+
Returns:
|
|
84
|
+
True if cache is valid, False otherwise
|
|
85
|
+
"""
|
|
86
|
+
if cache_ttl == 0:
|
|
87
|
+
return False
|
|
88
|
+
if cache_key not in self._cache:
|
|
89
|
+
return False
|
|
90
|
+
if cache_ttl == -1:
|
|
91
|
+
return True
|
|
92
|
+
cached = self._cache[cache_key]
|
|
93
|
+
return (time.time() - cached["timestamp"]) < cache_ttl
|
|
94
|
+
|
|
95
|
+
def get(
|
|
96
|
+
self,
|
|
97
|
+
prompt_name: str,
|
|
98
|
+
variables: Optional[Dict[str, Any]] = None,
|
|
99
|
+
label: str = "production",
|
|
100
|
+
cache_ttl: int = 0,
|
|
101
|
+
) -> Prompt:
|
|
102
|
+
"""Get a prompt from the prompt database.
|
|
103
|
+
|
|
104
|
+
Args:
|
|
105
|
+
prompt_name: Name of the prompt.
|
|
106
|
+
variables: Variables to interpolate into the prompt.
|
|
107
|
+
label: Prompt version label (default: "production").
|
|
108
|
+
cache_ttl: Cache TTL in seconds. 0 = no cache, -1 = cache indefinitely,
|
|
109
|
+
positive value = seconds before refetching.
|
|
110
|
+
|
|
111
|
+
Returns:
|
|
112
|
+
A Prompt object with raw_content, content (with variables replaced),
|
|
113
|
+
and metadata. Use str(prompt) for backward-compatible string access.
|
|
114
|
+
"""
|
|
115
|
+
try:
|
|
116
|
+
cache_key = (prompt_name, label)
|
|
117
|
+
|
|
118
|
+
# Check cache
|
|
119
|
+
if self._is_cache_valid(cache_key, cache_ttl):
|
|
120
|
+
raw_content = self._cache[cache_key]["content"]
|
|
121
|
+
metadata = self._cache[cache_key]["metadata"]
|
|
122
|
+
else:
|
|
123
|
+
response = self.http.get(
|
|
124
|
+
"sdk/prompts",
|
|
125
|
+
{"prompt_name": prompt_name, "label": label, "agent_id": self._config.agent_id},
|
|
126
|
+
)
|
|
127
|
+
raw_content = response.get("prompt_content", "")
|
|
128
|
+
metadata = response.get("metadata", {})
|
|
129
|
+
|
|
130
|
+
# Store in cache if caching is enabled
|
|
131
|
+
if cache_ttl != 0:
|
|
132
|
+
self._cache[cache_key] = {
|
|
133
|
+
"content": raw_content,
|
|
134
|
+
"metadata": metadata,
|
|
135
|
+
"timestamp": time.time(),
|
|
136
|
+
}
|
|
137
|
+
|
|
138
|
+
prompt = Prompt(raw_content=raw_content, content=raw_content, metadata=metadata)
|
|
139
|
+
if variables:
|
|
140
|
+
prompt.replace_variables(variables)
|
|
141
|
+
return prompt
|
|
142
|
+
except Exception as e:
|
|
143
|
+
if self._production:
|
|
144
|
+
logger.error(f"[PromptResource] Failed to get prompt: {e}")
|
|
145
|
+
return Prompt(raw_content="", content="", metadata={})
|
|
146
|
+
raise
|
|
147
|
+
|
|
148
|
+
async def aget(
|
|
149
|
+
self,
|
|
150
|
+
prompt_name: str,
|
|
151
|
+
variables: Optional[Dict[str, Any]] = None,
|
|
152
|
+
label: str = "production",
|
|
153
|
+
cache_ttl: int = 0,
|
|
154
|
+
) -> Prompt:
|
|
155
|
+
"""Get a prompt from the prompt database (asynchronous).
|
|
156
|
+
|
|
157
|
+
See get() for full documentation.
|
|
158
|
+
"""
|
|
159
|
+
try:
|
|
160
|
+
cache_key = (prompt_name, label)
|
|
161
|
+
|
|
162
|
+
# Check cache
|
|
163
|
+
if self._is_cache_valid(cache_key, cache_ttl):
|
|
164
|
+
raw_content = self._cache[cache_key]["content"]
|
|
165
|
+
metadata = self._cache[cache_key]["metadata"]
|
|
166
|
+
else:
|
|
167
|
+
response = await self.http.aget(
|
|
168
|
+
"sdk/prompts",
|
|
169
|
+
{"prompt_name": prompt_name, "label": label, "agent_id": self._config.agent_id},
|
|
170
|
+
)
|
|
171
|
+
raw_content = response.get("prompt_content", "")
|
|
172
|
+
metadata = response.get("metadata", {})
|
|
173
|
+
|
|
174
|
+
# Store in cache if caching is enabled
|
|
175
|
+
if cache_ttl != 0:
|
|
176
|
+
self._cache[cache_key] = {
|
|
177
|
+
"content": raw_content,
|
|
178
|
+
"metadata": metadata,
|
|
179
|
+
"timestamp": time.time(),
|
|
180
|
+
}
|
|
181
|
+
|
|
182
|
+
prompt = Prompt(raw_content=raw_content, content=raw_content, metadata=metadata)
|
|
183
|
+
if variables:
|
|
184
|
+
prompt.replace_variables(variables)
|
|
185
|
+
return prompt
|
|
186
|
+
except Exception as e:
|
|
187
|
+
if self._production:
|
|
188
|
+
logger.error(f"[PromptResource] Failed to get prompt: {e}")
|
|
189
|
+
return Prompt(raw_content="", content="", metadata={})
|
|
190
|
+
raise
|
|
191
|
+
|
|
192
|
+
def update(
|
|
193
|
+
self,
|
|
194
|
+
prompt_name: str,
|
|
195
|
+
prompt_content: str,
|
|
196
|
+
description: Optional[str] = None,
|
|
197
|
+
metadata: Optional[Dict[str, Any]] = None,
|
|
198
|
+
labels: Optional[List[str]] = None,
|
|
199
|
+
) -> Prompt:
|
|
200
|
+
"""Update a prompt, creating a new immutable version.
|
|
201
|
+
|
|
202
|
+
Args:
|
|
203
|
+
prompt_name: Name of the prompt to update.
|
|
204
|
+
prompt_content: New content for the prompt.
|
|
205
|
+
description: Optional description for the prompt version.
|
|
206
|
+
metadata: Optional metadata dict to attach to the prompt version.
|
|
207
|
+
labels: Optional list of labels to assign to the new version.
|
|
208
|
+
|
|
209
|
+
Returns:
|
|
210
|
+
A Prompt object with the new content and metadata from the response.
|
|
211
|
+
"""
|
|
212
|
+
try:
|
|
213
|
+
body: Dict[str, Any] = {
|
|
214
|
+
"agent_id": self._config.agent_id,
|
|
215
|
+
"prompt_name": prompt_name,
|
|
216
|
+
"prompt_content": prompt_content,
|
|
217
|
+
}
|
|
218
|
+
if description is not None:
|
|
219
|
+
body["description"] = description
|
|
220
|
+
if metadata is not None:
|
|
221
|
+
body["metadata"] = metadata
|
|
222
|
+
if labels is not None:
|
|
223
|
+
body["labels"] = labels
|
|
224
|
+
|
|
225
|
+
response = self.http.put("sdk/prompts", data=body)
|
|
226
|
+
response_metadata = response.get("metadata", {})
|
|
227
|
+
|
|
228
|
+
self._invalidate_cache(prompt_name)
|
|
229
|
+
|
|
230
|
+
return Prompt(raw_content=prompt_content, content=prompt_content, metadata=response_metadata)
|
|
231
|
+
except Exception as e:
|
|
232
|
+
if self._production:
|
|
233
|
+
logger.error(f"[PromptResource] Failed to update prompt: {e}")
|
|
234
|
+
return Prompt(raw_content="", content="", metadata={})
|
|
235
|
+
raise
|
|
236
|
+
|
|
237
|
+
async def aupdate(
|
|
238
|
+
self,
|
|
239
|
+
prompt_name: str,
|
|
240
|
+
prompt_content: str,
|
|
241
|
+
description: Optional[str] = None,
|
|
242
|
+
metadata: Optional[Dict[str, Any]] = None,
|
|
243
|
+
labels: Optional[List[str]] = None,
|
|
244
|
+
) -> Prompt:
|
|
245
|
+
"""Update a prompt, creating a new immutable version (asynchronous).
|
|
246
|
+
|
|
247
|
+
See update() for full documentation.
|
|
248
|
+
"""
|
|
249
|
+
try:
|
|
250
|
+
body: Dict[str, Any] = {
|
|
251
|
+
"agent_id": self._config.agent_id,
|
|
252
|
+
"prompt_name": prompt_name,
|
|
253
|
+
"prompt_content": prompt_content,
|
|
254
|
+
}
|
|
255
|
+
if description is not None:
|
|
256
|
+
body["description"] = description
|
|
257
|
+
if metadata is not None:
|
|
258
|
+
body["metadata"] = metadata
|
|
259
|
+
if labels is not None:
|
|
260
|
+
body["labels"] = labels
|
|
261
|
+
|
|
262
|
+
response = await self.http.aput("sdk/prompts", data=body)
|
|
263
|
+
response_metadata = response.get("metadata", {})
|
|
264
|
+
|
|
265
|
+
self._invalidate_cache(prompt_name)
|
|
266
|
+
|
|
267
|
+
return Prompt(raw_content=prompt_content, content=prompt_content, metadata=response_metadata)
|
|
268
|
+
except Exception as e:
|
|
269
|
+
if self._production:
|
|
270
|
+
logger.error(f"[PromptResource] Failed to update prompt: {e}")
|
|
271
|
+
return Prompt(raw_content="", content="", metadata={})
|
|
272
|
+
raise
|
|
273
|
+
|
|
274
|
+
def update_metadata(
|
|
275
|
+
self,
|
|
276
|
+
prompt_name: str,
|
|
277
|
+
label: str,
|
|
278
|
+
metadata: Dict[str, Any],
|
|
279
|
+
) -> Prompt:
|
|
280
|
+
"""Update metadata on an existing prompt version.
|
|
281
|
+
|
|
282
|
+
Sends a PATCH request to update only the metadata for the prompt version
|
|
283
|
+
identified by (prompt_name, label). The prompt content is not returned
|
|
284
|
+
by this endpoint, so the returned Prompt will have empty content fields.
|
|
285
|
+
|
|
286
|
+
Args:
|
|
287
|
+
prompt_name: Name of the prompt.
|
|
288
|
+
label: Label identifying the prompt version to update.
|
|
289
|
+
metadata: Metadata dict to set on the prompt version.
|
|
290
|
+
|
|
291
|
+
Returns:
|
|
292
|
+
A Prompt object with empty content and the updated metadata.
|
|
293
|
+
"""
|
|
294
|
+
try:
|
|
295
|
+
body: Dict[str, Any] = {
|
|
296
|
+
"agent_id": self._config.agent_id,
|
|
297
|
+
"prompt_name": prompt_name,
|
|
298
|
+
"label": label,
|
|
299
|
+
"metadata": metadata,
|
|
300
|
+
}
|
|
301
|
+
|
|
302
|
+
response = self.http.patch("sdk/prompts", data=body)
|
|
303
|
+
response_metadata = response.get("metadata", {})
|
|
304
|
+
|
|
305
|
+
self._invalidate_cache(prompt_name, label)
|
|
306
|
+
|
|
307
|
+
return Prompt(raw_content="", content="", metadata=response_metadata)
|
|
308
|
+
except Exception as e:
|
|
309
|
+
if self._production:
|
|
310
|
+
logger.error(f"[PromptResource] Failed to update prompt metadata: {e}")
|
|
311
|
+
return Prompt(raw_content="", content="", metadata={})
|
|
312
|
+
raise
|
|
313
|
+
|
|
314
|
+
async def aupdate_metadata(
|
|
315
|
+
self,
|
|
316
|
+
prompt_name: str,
|
|
317
|
+
label: str,
|
|
318
|
+
metadata: Dict[str, Any],
|
|
319
|
+
) -> Prompt:
|
|
320
|
+
"""Update metadata on an existing prompt version (asynchronous).
|
|
321
|
+
|
|
322
|
+
See update_metadata() for full documentation.
|
|
323
|
+
"""
|
|
324
|
+
try:
|
|
325
|
+
body: Dict[str, Any] = {
|
|
326
|
+
"agent_id": self._config.agent_id,
|
|
327
|
+
"prompt_name": prompt_name,
|
|
328
|
+
"label": label,
|
|
329
|
+
"metadata": metadata,
|
|
330
|
+
}
|
|
331
|
+
|
|
332
|
+
response = await self.http.apatch("sdk/prompts", data=body)
|
|
333
|
+
response_metadata = response.get("metadata", {})
|
|
334
|
+
|
|
335
|
+
self._invalidate_cache(prompt_name, label)
|
|
336
|
+
|
|
337
|
+
return Prompt(raw_content="", content="", metadata=response_metadata)
|
|
338
|
+
except Exception as e:
|
|
339
|
+
if self._production:
|
|
340
|
+
logger.error(f"[PromptResource] Failed to update prompt metadata: {e}")
|
|
341
|
+
return Prompt(raw_content="", content="", metadata={})
|
|
342
|
+
raise
|
|
@@ -1,162 +0,0 @@
|
|
|
1
|
-
"""Prompt resource API operations."""
|
|
2
|
-
import logging
|
|
3
|
-
import time
|
|
4
|
-
from dataclasses import dataclass
|
|
5
|
-
from typing import Any, Dict, Optional, Tuple, TYPE_CHECKING
|
|
6
|
-
|
|
7
|
-
from ..client import HttpClient
|
|
8
|
-
|
|
9
|
-
if TYPE_CHECKING:
|
|
10
|
-
from ...core.config import SDKConfig
|
|
11
|
-
|
|
12
|
-
logger = logging.getLogger("Lucidic")
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
@dataclass
|
|
16
|
-
class Prompt:
|
|
17
|
-
"""Represents a prompt retrieved from the Lucidic prompt database."""
|
|
18
|
-
|
|
19
|
-
raw_content: str
|
|
20
|
-
content: str
|
|
21
|
-
metadata: Dict[str, Any]
|
|
22
|
-
|
|
23
|
-
def __str__(self) -> str:
|
|
24
|
-
return self.content
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
class PromptResource:
|
|
28
|
-
"""Handle prompt-related API operations."""
|
|
29
|
-
|
|
30
|
-
def __init__(self, http: HttpClient, config: "SDKConfig", production: bool = False):
|
|
31
|
-
"""Initialize prompt resource.
|
|
32
|
-
|
|
33
|
-
Args:
|
|
34
|
-
http: HTTP client instance
|
|
35
|
-
config: SDK configuration
|
|
36
|
-
production: Whether to suppress errors in production mode
|
|
37
|
-
"""
|
|
38
|
-
self.http = http
|
|
39
|
-
self._config = config
|
|
40
|
-
self._production = production
|
|
41
|
-
self._cache: Dict[Tuple[str, str], Dict[str, Any]] = {}
|
|
42
|
-
|
|
43
|
-
def _is_cache_valid(self, cache_key: Tuple[str, str], cache_ttl: int) -> bool:
|
|
44
|
-
"""Check if a cached prompt is still valid.
|
|
45
|
-
|
|
46
|
-
Args:
|
|
47
|
-
cache_key: The (prompt_name, label) tuple
|
|
48
|
-
cache_ttl: Cache TTL in seconds (-1 = indefinite, 0 = no cache)
|
|
49
|
-
|
|
50
|
-
Returns:
|
|
51
|
-
True if cache is valid, False otherwise
|
|
52
|
-
"""
|
|
53
|
-
if cache_ttl == 0:
|
|
54
|
-
return False
|
|
55
|
-
if cache_key not in self._cache:
|
|
56
|
-
return False
|
|
57
|
-
if cache_ttl == -1:
|
|
58
|
-
return True
|
|
59
|
-
cached = self._cache[cache_key]
|
|
60
|
-
return (time.time() - cached["timestamp"]) < cache_ttl
|
|
61
|
-
|
|
62
|
-
def get(
|
|
63
|
-
self,
|
|
64
|
-
prompt_name: str,
|
|
65
|
-
variables: Optional[Dict[str, Any]] = None,
|
|
66
|
-
label: str = "production",
|
|
67
|
-
cache_ttl: int = 0,
|
|
68
|
-
) -> Prompt:
|
|
69
|
-
"""Get a prompt from the prompt database.
|
|
70
|
-
|
|
71
|
-
Args:
|
|
72
|
-
prompt_name: Name of the prompt.
|
|
73
|
-
variables: Variables to interpolate into the prompt.
|
|
74
|
-
label: Prompt version label (default: "production").
|
|
75
|
-
cache_ttl: Cache TTL in seconds. 0 = no cache, -1 = cache indefinitely,
|
|
76
|
-
positive value = seconds before refetching.
|
|
77
|
-
|
|
78
|
-
Returns:
|
|
79
|
-
A Prompt object with raw_content, content (with variables replaced),
|
|
80
|
-
and metadata. Use str(prompt) for backward-compatible string access.
|
|
81
|
-
"""
|
|
82
|
-
try:
|
|
83
|
-
cache_key = (prompt_name, label)
|
|
84
|
-
|
|
85
|
-
# Check cache
|
|
86
|
-
if self._is_cache_valid(cache_key, cache_ttl):
|
|
87
|
-
raw_content = self._cache[cache_key]["content"]
|
|
88
|
-
metadata = self._cache[cache_key]["metadata"]
|
|
89
|
-
else:
|
|
90
|
-
response = self.http.get(
|
|
91
|
-
"getprompt",
|
|
92
|
-
{"prompt_name": prompt_name, "label": label, "agent_id": self._config.agent_id},
|
|
93
|
-
)
|
|
94
|
-
raw_content = response.get("prompt_content", "")
|
|
95
|
-
metadata = response.get("metadata", {})
|
|
96
|
-
|
|
97
|
-
# Store in cache if caching is enabled
|
|
98
|
-
if cache_ttl != 0:
|
|
99
|
-
self._cache[cache_key] = {
|
|
100
|
-
"content": raw_content,
|
|
101
|
-
"metadata": metadata,
|
|
102
|
-
"timestamp": time.time(),
|
|
103
|
-
}
|
|
104
|
-
|
|
105
|
-
# Replace variables
|
|
106
|
-
content = raw_content
|
|
107
|
-
if variables:
|
|
108
|
-
for key, value in variables.items():
|
|
109
|
-
content = content.replace(f"{{{{{key}}}}}", str(value))
|
|
110
|
-
|
|
111
|
-
return Prompt(raw_content=raw_content, content=content, metadata=metadata)
|
|
112
|
-
except Exception as e:
|
|
113
|
-
if self._production:
|
|
114
|
-
logger.error(f"[PromptResource] Failed to get prompt: {e}")
|
|
115
|
-
return Prompt(raw_content="", content="", metadata={})
|
|
116
|
-
raise
|
|
117
|
-
|
|
118
|
-
async def aget(
|
|
119
|
-
self,
|
|
120
|
-
prompt_name: str,
|
|
121
|
-
variables: Optional[Dict[str, Any]] = None,
|
|
122
|
-
label: str = "production",
|
|
123
|
-
cache_ttl: int = 0,
|
|
124
|
-
) -> Prompt:
|
|
125
|
-
"""Get a prompt from the prompt database (asynchronous).
|
|
126
|
-
|
|
127
|
-
See get() for full documentation.
|
|
128
|
-
"""
|
|
129
|
-
try:
|
|
130
|
-
cache_key = (prompt_name, label)
|
|
131
|
-
|
|
132
|
-
# Check cache
|
|
133
|
-
if self._is_cache_valid(cache_key, cache_ttl):
|
|
134
|
-
raw_content = self._cache[cache_key]["content"]
|
|
135
|
-
metadata = self._cache[cache_key]["metadata"]
|
|
136
|
-
else:
|
|
137
|
-
response = await self.http.aget(
|
|
138
|
-
"getprompt",
|
|
139
|
-
{"prompt_name": prompt_name, "label": label, "agent_id": self._config.agent_id},
|
|
140
|
-
)
|
|
141
|
-
raw_content = response.get("prompt_content", "")
|
|
142
|
-
metadata = response.get("metadata", {})
|
|
143
|
-
|
|
144
|
-
# Store in cache if caching is enabled
|
|
145
|
-
if cache_ttl != 0:
|
|
146
|
-
self._cache[cache_key] = {
|
|
147
|
-
"content": raw_content,
|
|
148
|
-
"metadata": metadata,
|
|
149
|
-
"timestamp": time.time(),
|
|
150
|
-
}
|
|
151
|
-
|
|
152
|
-
content = raw_content
|
|
153
|
-
if variables:
|
|
154
|
-
for key, value in variables.items():
|
|
155
|
-
content = content.replace(f"{{{{{key}}}}}", str(value))
|
|
156
|
-
|
|
157
|
-
return Prompt(raw_content=raw_content, content=content, metadata=metadata)
|
|
158
|
-
except Exception as e:
|
|
159
|
-
if self._production:
|
|
160
|
-
logger.error(f"[PromptResource] Failed to get prompt: {e}")
|
|
161
|
-
return Prompt(raw_content="", content="", metadata={})
|
|
162
|
-
raise
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|