lucidicai 3.3.1__py3-none-any.whl → 3.4.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
lucidicai/__init__.py CHANGED
@@ -34,11 +34,14 @@ from .core.errors import (
34
34
  FeatureFlagError,
35
35
  )
36
36
 
37
+ # Prompt object
38
+ from .api.resources.prompt import Prompt
39
+
37
40
  # Integrations
38
41
  from .integrations.livekit import setup_livekit
39
42
 
40
43
  # Version
41
- __version__ = "3.3.1"
44
+ __version__ = "3.4.0"
42
45
 
43
46
  # All exports
44
47
  __all__ = [
@@ -53,6 +56,8 @@ __all__ = [
53
56
  "InvalidOperationError",
54
57
  "PromptError",
55
58
  "FeatureFlagError",
59
+ # Prompt object
60
+ "Prompt",
56
61
  # Integrations
57
62
  "setup_livekit",
58
63
  # Version
@@ -1,6 +1,7 @@
1
1
  """Prompt resource API operations."""
2
2
  import logging
3
3
  import time
4
+ from dataclasses import dataclass
4
5
  from typing import Any, Dict, Optional, Tuple, TYPE_CHECKING
5
6
 
6
7
  from ..client import HttpClient
@@ -11,6 +12,18 @@ if TYPE_CHECKING:
11
12
  logger = logging.getLogger("Lucidic")
12
13
 
13
14
 
15
+ @dataclass
16
+ class Prompt:
17
+ """Represents a prompt retrieved from the Lucidic prompt database."""
18
+
19
+ raw_content: str
20
+ content: str
21
+ metadata: Dict[str, Any]
22
+
23
+ def __str__(self) -> str:
24
+ return self.content
25
+
26
+
14
27
  class PromptResource:
15
28
  """Handle prompt-related API operations."""
16
29
 
@@ -52,7 +65,7 @@ class PromptResource:
52
65
  variables: Optional[Dict[str, Any]] = None,
53
66
  label: str = "production",
54
67
  cache_ttl: int = 0,
55
- ) -> str:
68
+ ) -> Prompt:
56
69
  """Get a prompt from the prompt database.
57
70
 
58
71
  Args:
@@ -63,38 +76,43 @@ class PromptResource:
63
76
  positive value = seconds before refetching.
64
77
 
65
78
  Returns:
66
- The prompt content with variables interpolated.
79
+ A Prompt object with raw_content, content (with variables replaced),
80
+ and metadata. Use str(prompt) for backward-compatible string access.
67
81
  """
68
82
  try:
69
83
  cache_key = (prompt_name, label)
70
84
 
71
85
  # Check cache
72
86
  if self._is_cache_valid(cache_key, cache_ttl):
73
- prompt = self._cache[cache_key]["content"]
87
+ raw_content = self._cache[cache_key]["content"]
88
+ metadata = self._cache[cache_key]["metadata"]
74
89
  else:
75
90
  response = self.http.get(
76
91
  "getprompt",
77
92
  {"prompt_name": prompt_name, "label": label, "agent_id": self._config.agent_id},
78
93
  )
79
- prompt = response.get("prompt_content", "")
94
+ raw_content = response.get("prompt_content", "")
95
+ metadata = response.get("metadata", {})
80
96
 
81
97
  # Store in cache if caching is enabled
82
98
  if cache_ttl != 0:
83
99
  self._cache[cache_key] = {
84
- "content": prompt,
100
+ "content": raw_content,
101
+ "metadata": metadata,
85
102
  "timestamp": time.time(),
86
103
  }
87
104
 
88
105
  # Replace variables
106
+ content = raw_content
89
107
  if variables:
90
108
  for key, value in variables.items():
91
- prompt = prompt.replace(f"{{{{{key}}}}}", str(value))
109
+ content = content.replace(f"{{{{{key}}}}}", str(value))
92
110
 
93
- return prompt
111
+ return Prompt(raw_content=raw_content, content=content, metadata=metadata)
94
112
  except Exception as e:
95
113
  if self._production:
96
114
  logger.error(f"[PromptResource] Failed to get prompt: {e}")
97
- return ""
115
+ return Prompt(raw_content="", content="", metadata={})
98
116
  raise
99
117
 
100
118
  async def aget(
@@ -103,7 +121,7 @@ class PromptResource:
103
121
  variables: Optional[Dict[str, Any]] = None,
104
122
  label: str = "production",
105
123
  cache_ttl: int = 0,
106
- ) -> str:
124
+ ) -> Prompt:
107
125
  """Get a prompt from the prompt database (asynchronous).
108
126
 
109
127
  See get() for full documentation.
@@ -113,28 +131,32 @@ class PromptResource:
113
131
 
114
132
  # Check cache
115
133
  if self._is_cache_valid(cache_key, cache_ttl):
116
- prompt = self._cache[cache_key]["content"]
134
+ raw_content = self._cache[cache_key]["content"]
135
+ metadata = self._cache[cache_key]["metadata"]
117
136
  else:
118
137
  response = await self.http.aget(
119
138
  "getprompt",
120
139
  {"prompt_name": prompt_name, "label": label, "agent_id": self._config.agent_id},
121
140
  )
122
- prompt = response.get("prompt_content", "")
141
+ raw_content = response.get("prompt_content", "")
142
+ metadata = response.get("metadata", {})
123
143
 
124
144
  # Store in cache if caching is enabled
125
145
  if cache_ttl != 0:
126
146
  self._cache[cache_key] = {
127
- "content": prompt,
147
+ "content": raw_content,
148
+ "metadata": metadata,
128
149
  "timestamp": time.time(),
129
150
  }
130
151
 
152
+ content = raw_content
131
153
  if variables:
132
154
  for key, value in variables.items():
133
- prompt = prompt.replace(f"{{{{{key}}}}}", str(value))
155
+ content = content.replace(f"{{{{{key}}}}}", str(value))
134
156
 
135
- return prompt
157
+ return Prompt(raw_content=raw_content, content=content, metadata=metadata)
136
158
  except Exception as e:
137
159
  if self._production:
138
160
  logger.error(f"[PromptResource] Failed to get prompt: {e}")
139
- return ""
161
+ return Prompt(raw_content="", content="", metadata={})
140
162
  raise
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: lucidicai
3
- Version: 3.3.1
3
+ Version: 3.4.0
4
4
  Summary: Lucidic AI Python SDK
5
5
  Author: Andy Liang
6
6
  Author-email: andy@lucidic.ai
@@ -1,4 +1,4 @@
1
- lucidicai/__init__.py,sha256=Dur2YTPgHbjFM09QQ36SHOHE0B62uFb5dIUHKo4oy7g,1284
1
+ lucidicai/__init__.py,sha256=F60cdLYiZ2P2xh5B1GXzIUFckj-HUwGZimMOAfepXUk,1376
2
2
  lucidicai/action.py,sha256=sPRd1hTIVXDqnvG9ZXWEipUFh0bsXcE0Fm7RVqmVccM,237
3
3
  lucidicai/client.py,sha256=BGKP91_Oj5kHQU0osYf1T_BWakL8KIhs0AgUc5X99sU,15104
4
4
  lucidicai/constants.py,sha256=zN8O7TjoRHRlaGa9CZUWppS73rhzKGwaEkF9XMTV0Cg,1160
@@ -26,7 +26,7 @@ lucidicai/api/resources/evals.py,sha256=_3nLE6dMLht844mWw7kl_hctjv5JIuC6MP06YWUg
26
26
  lucidicai/api/resources/event.py,sha256=GTIU5sIbLNTWAHk4rB120xWTRkhnraz9JNfamEygyNo,14267
27
27
  lucidicai/api/resources/experiment.py,sha256=fOIKJ5d89bHJBVZ3wjbhY_6XF3kLHz9TE3BVPA5pNpA,3563
28
28
  lucidicai/api/resources/feature_flag.py,sha256=ii412DIkZCEAhrXdGydcpQKveqGlFq4NlgdmWQnU83c,2259
29
- lucidicai/api/resources/prompt.py,sha256=5pIV3vTfOQkovQpINohDVseY678E04CWz2E5V4NA-i8,4625
29
+ lucidicai/api/resources/prompt.py,sha256=KAvpgWuLzo4HhSHy1vSBApNNplNAfMNyjYjv0fsurHM,5637
30
30
  lucidicai/api/resources/session.py,sha256=jW_bftHdunhLHl_3-k0nqB5FrtLhlFeCF0tMFE82nNw,20761
31
31
  lucidicai/core/__init__.py,sha256=b0YQkd8190Y_GgwUcmf0tOiSLARd7L4kq4jwfhhGAyI,39
32
32
  lucidicai/core/config.py,sha256=q4h-yR35Ay_3znL7vavri6ScfeM69RjHShNNzjoQthc,10194
@@ -93,7 +93,7 @@ lucidicai/utils/images.py,sha256=z8mlIKgFfrIbuk-l4L2rB62uw_uPO79sHPXPY7eLu2A,128
93
93
  lucidicai/utils/logger.py,sha256=R3B3gSee64F6UVHUrShihBq_O7W7bgfrBiVDXTO3Isg,4777
94
94
  lucidicai/utils/queue.py,sha256=8DQwnGw7pINEJ0dNSkB0PhdPW-iBQQ-YZg23poe4umE,17323
95
95
  lucidicai/utils/serialization.py,sha256=KdOREZd7XBxFBAZ86DePMfYPzSVyKr4RcgUa82aFxrs,820
96
- lucidicai-3.3.1.dist-info/METADATA,sha256=-VsGzxsuN4ux1cLJN6jcbEO2cOv2G756BdsAWY8uiio,902
97
- lucidicai-3.3.1.dist-info/WHEEL,sha256=Xo9-1PvkuimrydujYJAjF7pCkriuXBpUPEjma1nZyJ0,92
98
- lucidicai-3.3.1.dist-info/top_level.txt,sha256=vSSdM3lclF4I5tyVC0xxUk8eIRnnYXMe1hW-eO91HUo,10
99
- lucidicai-3.3.1.dist-info/RECORD,,
96
+ lucidicai-3.4.0.dist-info/METADATA,sha256=8-0BGkaSBc5YCjeVpDV6QFTCgZiCWDnzeY3Y00RcvOg,902
97
+ lucidicai-3.4.0.dist-info/WHEEL,sha256=Xo9-1PvkuimrydujYJAjF7pCkriuXBpUPEjma1nZyJ0,92
98
+ lucidicai-3.4.0.dist-info/top_level.txt,sha256=vSSdM3lclF4I5tyVC0xxUk8eIRnnYXMe1hW-eO91HUo,10
99
+ lucidicai-3.4.0.dist-info/RECORD,,