deepeval 3.6.3__py3-none-any.whl → 3.6.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
deepeval/_version.py CHANGED
@@ -1 +1 @@
1
- __version__: str = "3.6.3"
1
+ __version__: str = "3.6.4"
deepeval/prompt/prompt.py CHANGED
@@ -8,6 +8,7 @@ import os
8
8
  from pydantic import BaseModel
9
9
  import asyncio
10
10
  import portalocker
11
+ import threading
11
12
 
12
13
  from deepeval.prompt.api import (
13
14
  PromptHttpResponse,
@@ -20,15 +21,39 @@ from deepeval.prompt.api import (
20
21
  from deepeval.prompt.utils import interpolate_text
21
22
  from deepeval.confident.api import Api, Endpoints, HttpMethods
22
23
  from deepeval.constants import HIDDEN_DIR
23
- from deepeval.utils import (
24
- get_or_create_event_loop,
25
- get_or_create_general_event_loop,
26
- )
27
24
 
28
25
  CACHE_FILE_NAME = f"{HIDDEN_DIR}/.deepeval-prompt-cache.json"
29
26
  VERSION_CACHE_KEY = "version"
30
27
  LABEL_CACHE_KEY = "label"
31
28
 
29
+ # Global background event loop for polling
30
+ _polling_loop: Optional[asyncio.AbstractEventLoop] = None
31
+ _polling_thread: Optional[threading.Thread] = None
32
+ _polling_loop_lock = threading.Lock()
33
+
34
+
35
+ def _get_or_create_polling_loop() -> asyncio.AbstractEventLoop:
36
+ """Get or create a background event loop for polling that runs in a daemon thread."""
37
+ global _polling_loop, _polling_thread
38
+
39
+ with _polling_loop_lock:
40
+ if _polling_loop is None or not _polling_loop.is_running():
41
+
42
+ def run_loop():
43
+ global _polling_loop
44
+ _polling_loop = asyncio.new_event_loop()
45
+ asyncio.set_event_loop(_polling_loop)
46
+ _polling_loop.run_forever()
47
+
48
+ _polling_thread = threading.Thread(target=run_loop, daemon=True)
49
+ _polling_thread.start()
50
+
51
+ # Wait for loop to be ready
52
+ while _polling_loop is None:
53
+ time.sleep(0.01)
54
+
55
+ return _polling_loop
56
+
32
57
 
33
58
  class CustomEncoder(json.JSONEncoder):
34
59
  def default(self, obj):
@@ -80,11 +105,22 @@ class Prompt:
80
105
  self._version = None
81
106
  self._polling_tasks: Dict[str, Dict[str, asyncio.Task]] = {}
82
107
  self._refresh_map: Dict[str, Dict[str, int]] = {}
108
+ self._lock = (
109
+ threading.Lock()
110
+ ) # Protect instance attributes from race conditions
83
111
  if template:
84
112
  self._type = PromptType.TEXT
85
113
  elif messages_template:
86
114
  self._type = PromptType.LIST
87
115
 
116
+ def __del__(self):
117
+ """Cleanup polling tasks when instance is destroyed"""
118
+ try:
119
+ self._stop_polling()
120
+ except Exception:
121
+ # Suppress exceptions during cleanup to avoid issues in interpreter shutdown
122
+ pass
123
+
88
124
  @property
89
125
  def version(self):
90
126
  if self._version is not None and self._version != "latest":
@@ -100,33 +136,37 @@ class Prompt:
100
136
  self._version = value
101
137
 
102
138
  def interpolate(self, **kwargs):
103
- if self._type == PromptType.TEXT:
104
- if self._text_template is None:
139
+ with self._lock:
140
+ prompt_type = self._type
141
+ text_template = self._text_template
142
+ messages_template = self._messages_template
143
+ interpolation_type = self._interpolation_type
144
+
145
+ if prompt_type == PromptType.TEXT:
146
+ if text_template is None:
105
147
  raise TypeError(
106
148
  "Unable to interpolate empty prompt template. Please pull a prompt from Confident AI or set template manually to continue."
107
149
  )
108
150
 
109
- return interpolate_text(
110
- self._interpolation_type, self._text_template, **kwargs
111
- )
151
+ return interpolate_text(interpolation_type, text_template, **kwargs)
112
152
 
113
- elif self._type == PromptType.LIST:
114
- if self._messages_template is None:
153
+ elif prompt_type == PromptType.LIST:
154
+ if messages_template is None:
115
155
  raise TypeError(
116
156
  "Unable to interpolate empty prompt template messages. Please pull a prompt from Confident AI or set template manually to continue."
117
157
  )
118
158
 
119
159
  interpolated_messages = []
120
- for message in self._messages_template:
160
+ for message in messages_template:
121
161
  interpolated_content = interpolate_text(
122
- self._interpolation_type, message.content, **kwargs
162
+ interpolation_type, message.content, **kwargs
123
163
  )
124
164
  interpolated_messages.append(
125
165
  {"role": message.role, "content": interpolated_content}
126
166
  )
127
167
  return interpolated_messages
128
168
  else:
129
- raise ValueError(f"Unsupported prompt type: {self._type}")
169
+ raise ValueError(f"Unsupported prompt type: {prompt_type}")
130
170
 
131
171
  def _get_versions(self) -> List:
132
172
  if self.alias is None:
@@ -272,15 +312,16 @@ class Prompt:
272
312
  if not cached_prompt:
273
313
  raise ValueError("Unable to fetch prompt and load from cache")
274
314
 
275
- self.version = cached_prompt.version
276
- self.label = cached_prompt.label
277
- self._text_template = cached_prompt.template
278
- self._messages_template = cached_prompt.messages_template
279
- self._prompt_version_id = cached_prompt.prompt_version_id
280
- self._type = PromptType(cached_prompt.type)
281
- self._interpolation_type = PromptInterpolationType(
282
- cached_prompt.interpolation_type
283
- )
315
+ with self._lock:
316
+ self.version = cached_prompt.version
317
+ self.label = cached_prompt.label
318
+ self._text_template = cached_prompt.template
319
+ self._messages_template = cached_prompt.messages_template
320
+ self._prompt_version_id = cached_prompt.prompt_version_id
321
+ self._type = PromptType(cached_prompt.type)
322
+ self._interpolation_type = PromptInterpolationType(
323
+ cached_prompt.interpolation_type
324
+ )
284
325
 
285
326
  end_time = time.perf_counter()
286
327
  time_taken = format(end_time - start_time, ".2f")
@@ -300,7 +341,6 @@ class Prompt:
300
341
  ):
301
342
  should_write_on_first_fetch = False
302
343
  if refresh:
303
- default_to_cache = True
304
344
  # Check if we need to bootstrap the cache
305
345
  cached_prompt = self._read_from_cache(
306
346
  self.alias, version=version, label=label
@@ -316,12 +356,10 @@ class Prompt:
316
356
  )
317
357
 
318
358
  # Manage background prompt polling
319
- loop = get_or_create_general_event_loop()
320
- if loop.is_running():
321
- loop.create_task(self.create_polling_task(version, label, refresh))
322
- else:
323
- loop.run_until_complete(
324
- self.create_polling_task(version, label, refresh)
359
+ if refresh:
360
+ loop = _get_or_create_polling_loop()
361
+ asyncio.run_coroutine_threadsafe(
362
+ self.create_polling_task(version, label, refresh), loop
325
363
  )
326
364
 
327
365
  if default_to_cache:
@@ -330,15 +368,20 @@ class Prompt:
330
368
  self.alias, version=version, label=label
331
369
  )
332
370
  if cached_prompt:
333
- self.version = cached_prompt.version
334
- self.label = cached_prompt.label
335
- self._text_template = cached_prompt.template
336
- self._messages_template = cached_prompt.messages_template
337
- self._prompt_version_id = cached_prompt.prompt_version_id
338
- self._type = PromptType(cached_prompt.type)
339
- self._interpolation_type = PromptInterpolationType(
340
- cached_prompt.interpolation_type
341
- )
371
+ with self._lock:
372
+ self.version = cached_prompt.version
373
+ self.label = cached_prompt.label
374
+ self._text_template = cached_prompt.template
375
+ self._messages_template = (
376
+ cached_prompt.messages_template
377
+ )
378
+ self._prompt_version_id = (
379
+ cached_prompt.prompt_version_id
380
+ )
381
+ self._type = PromptType(cached_prompt.type)
382
+ self._interpolation_type = PromptInterpolationType(
383
+ cached_prompt.interpolation_type
384
+ )
342
385
  return
343
386
  except:
344
387
  pass
@@ -402,13 +445,14 @@ class Prompt:
402
445
  return
403
446
  raise
404
447
 
405
- self.version = response.version
406
- self.label = response.label
407
- self._text_template = response.text
408
- self._messages_template = response.messages
409
- self._prompt_version_id = response.id
410
- self._type = response.type
411
- self._interpolation_type = response.interpolation_type
448
+ with self._lock:
449
+ self.version = response.version
450
+ self.label = response.label
451
+ self._text_template = response.text
452
+ self._messages_template = response.messages
453
+ self._prompt_version_id = response.id
454
+ self._type = response.type
455
+ self._interpolation_type = response.interpolation_type
412
456
 
413
457
  end_time = time.perf_counter()
414
458
  time_taken = format(end_time - start_time, ".2f")
@@ -483,11 +527,7 @@ class Prompt:
483
527
  version: Optional[str],
484
528
  label: Optional[str],
485
529
  refresh: Optional[int] = 60,
486
- default_to_cache: bool = True,
487
530
  ):
488
- if version is None and label is None:
489
- return
490
-
491
531
  # If polling task doesn't exist, start it
492
532
  CACHE_KEY = LABEL_CACHE_KEY if label else VERSION_CACHE_KEY
493
533
  cache_value = label if label else version
@@ -506,9 +546,7 @@ class Prompt:
506
546
  self._refresh_map[CACHE_KEY][cache_value] = refresh
507
547
  if not polling_task:
508
548
  self._polling_tasks[CACHE_KEY][cache_value] = (
509
- asyncio.create_task(
510
- self.poll(version, label, default_to_cache)
511
- )
549
+ asyncio.create_task(self.poll(version, label))
512
550
  )
513
551
 
514
552
  # If invalid `refresh`, stop the task
@@ -524,24 +562,12 @@ class Prompt:
524
562
  self,
525
563
  version: Optional[str] = None,
526
564
  label: Optional[str] = None,
527
- default_to_cache: bool = True,
528
565
  ):
566
+ CACHE_KEY = LABEL_CACHE_KEY if label else VERSION_CACHE_KEY
567
+ cache_value = label if label else version
568
+
529
569
  while True:
530
- if default_to_cache:
531
- cached_prompt = self._read_from_cache(
532
- self.alias, version=version, label=label
533
- )
534
- if cached_prompt:
535
- self.version = cached_prompt.version
536
- self.label = cached_prompt.label
537
- self._text_template = cached_prompt.template
538
- self._messages_template = cached_prompt.messages_template
539
- self._prompt_version_id = cached_prompt.prompt_version_id
540
- self._type = PromptType(cached_prompt.type)
541
- self._interpolation_type = PromptInterpolationType(
542
- cached_prompt.interpolation_type
543
- )
544
- return
570
+ await asyncio.sleep(self._refresh_map[CACHE_KEY][cache_value])
545
571
 
546
572
  api = Api()
547
573
  try:
@@ -573,22 +599,43 @@ class Prompt:
573
599
  type=data["type"],
574
600
  interpolation_type=data["interpolationType"],
575
601
  )
576
- if default_to_cache:
577
- self._write_to_cache(
578
- cache_key=(
579
- LABEL_CACHE_KEY if label else VERSION_CACHE_KEY
580
- ),
581
- version=response.version,
582
- label=response.label,
583
- text_template=response.text,
584
- messages_template=response.messages,
585
- prompt_version_id=response.id,
586
- type=response.type,
587
- interpolation_type=response.interpolation_type,
588
- )
589
- except Exception as e:
602
+
603
+ # Update the cache with fresh data from server
604
+ self._write_to_cache(
605
+ cache_key=CACHE_KEY,
606
+ version=response.version,
607
+ label=response.label,
608
+ text_template=response.text,
609
+ messages_template=response.messages,
610
+ prompt_version_id=response.id,
611
+ type=response.type,
612
+ interpolation_type=response.interpolation_type,
613
+ )
614
+
615
+ # Update in-memory properties with fresh data (thread-safe)
616
+ with self._lock:
617
+ self.version = response.version
618
+ self.label = response.label
619
+ self._text_template = response.text
620
+ self._messages_template = response.messages
621
+ self._prompt_version_id = response.id
622
+ self._type = response.type
623
+ self._interpolation_type = response.interpolation_type
624
+
625
+ except Exception:
590
626
  pass
591
627
 
592
- CACHE_KEY = LABEL_CACHE_KEY if label else VERSION_CACHE_KEY
593
- cache_value = label if label else version
594
- await asyncio.sleep(self._refresh_map[CACHE_KEY][cache_value])
628
+ def _stop_polling(self):
629
+ loop = _polling_loop
630
+ if not loop or not loop.is_running():
631
+ return
632
+
633
+ # Stop all polling tasks
634
+ for ck in list(self._polling_tasks.keys()):
635
+ for cv in list(self._polling_tasks[ck].keys()):
636
+ task = self._polling_tasks[ck][cv]
637
+ if task and not task.done():
638
+ loop.call_soon_threadsafe(task.cancel)
639
+ self._polling_tasks[ck].clear()
640
+ self._refresh_map[ck].clear()
641
+ return
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: deepeval
3
- Version: 3.6.3
3
+ Version: 3.6.4
4
4
  Summary: The LLM Evaluation Framework
5
5
  Home-page: https://github.com/confident-ai/deepeval
6
6
  License: Apache-2.0
@@ -1,5 +1,5 @@
1
1
  deepeval/__init__.py,sha256=6fsb813LD_jNhqR-xZnSdE5E-KsBbC3tc4oIg5ZMgTw,2115
2
- deepeval/_version.py,sha256=1BsEnmEpD1mtVjCYoXBeguVgrKPAi3TRpS_a7ndu4XU,27
2
+ deepeval/_version.py,sha256=7aJWTxY4XnqpfnHnpzOHDXIjM0FFMGZTYkyt2xqUalQ,27
3
3
  deepeval/annotation/__init__.py,sha256=ZFhUVNNuH_YgQSZJ-m5E9iUb9TkAkEV33a6ouMDZ8EI,111
4
4
  deepeval/annotation/annotation.py,sha256=3j3-syeJepAcEj3u3e4T_BeRDzNr7yXGDIoNQGMKpwQ,2298
5
5
  deepeval/annotation/api.py,sha256=EYN33ACVzVxsFleRYm60KB4Exvff3rPJKt1VBuuX970,2147
@@ -405,7 +405,7 @@ deepeval/plugins/plugin.py,sha256=_dwsdx4Dg9DbXxK3f7zJY4QWTJQWc7QE1HmIg2Zjjag,15
405
405
  deepeval/progress_context.py,sha256=ZSKpxrE9sdgt9G3REKnVeXAv7GJXHHVGgLynpG1Pudw,3557
406
406
  deepeval/prompt/__init__.py,sha256=M99QTWdxOfiNeySGCSqN873Q80PPxqRvjLq4_Mw-X1w,49
407
407
  deepeval/prompt/api.py,sha256=665mLKiq8irXWV8kM9P_qFJipdCYZUNQFwW8AkA3itM,1777
408
- deepeval/prompt/prompt.py,sha256=w2BmKtSzXxobjSlBQqUjdAB0Zwe6IYaLjLg7KQvVDXE,21999
408
+ deepeval/prompt/prompt.py,sha256=JjPm7rB-3rnTs8oEQT4EEwnqQqE8ZFNXebVngEOWsI4,23537
409
409
  deepeval/prompt/utils.py,sha256=Ermw9P-1-T5wQ5uYuj5yWgdj7pVB_JLw8D37Qvmh9ok,1938
410
410
  deepeval/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
411
411
  deepeval/red_teaming/README.md,sha256=BY5rAdpp3-sMMToEKwq0Nsd9ivkGDzPE16DeDb8GY7U,154
@@ -461,8 +461,8 @@ deepeval/tracing/tracing.py,sha256=xZEyuxdGY259nQaDkGp_qO7Avriv8hrf4L15ZfeMNV8,4
461
461
  deepeval/tracing/types.py,sha256=l_utWKerNlE5H3mOKpeUJLsvpP3cMyjH7HRANNgTmSQ,5306
462
462
  deepeval/tracing/utils.py,sha256=SLnks8apGlrV6uVnvFVl2mWYABEkvXbPXnQvq3KaU_o,7943
463
463
  deepeval/utils.py,sha256=-_o3W892u7naX4Y7a8if4mP0Rtkgtapg6Krr1ZBpj0o,17197
464
- deepeval-3.6.3.dist-info/LICENSE.md,sha256=0ATkuLv6QgsJTBODUHC5Rak_PArA6gv2t7inJzNTP38,11352
465
- deepeval-3.6.3.dist-info/METADATA,sha256=BoRZ6BEBPwkypse9Xzw8gRlsezwSrDKsT5RO9C3thQc,18754
466
- deepeval-3.6.3.dist-info/WHEEL,sha256=d2fvjOD7sXsVzChCqf0Ty0JbHKBaLYwDbGQDwQTnJ50,88
467
- deepeval-3.6.3.dist-info/entry_points.txt,sha256=fVr8UphXTfJe9I2rObmUtfU3gkSrYeM0pLy-NbJYg10,94
468
- deepeval-3.6.3.dist-info/RECORD,,
464
+ deepeval-3.6.4.dist-info/LICENSE.md,sha256=0ATkuLv6QgsJTBODUHC5Rak_PArA6gv2t7inJzNTP38,11352
465
+ deepeval-3.6.4.dist-info/METADATA,sha256=oZQnVgn7bI4TUmgA7W_fsoflHL4RuT23O7oBkoo5XcM,18754
466
+ deepeval-3.6.4.dist-info/WHEEL,sha256=d2fvjOD7sXsVzChCqf0Ty0JbHKBaLYwDbGQDwQTnJ50,88
467
+ deepeval-3.6.4.dist-info/entry_points.txt,sha256=fVr8UphXTfJe9I2rObmUtfU3gkSrYeM0pLy-NbJYg10,94
468
+ deepeval-3.6.4.dist-info/RECORD,,