lm-deluge 0.0.25__tar.gz → 0.0.26__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of lm-deluge might be problematic. Click here for more details.

Files changed (62) hide show
  1. {lm_deluge-0.0.25/src/lm_deluge.egg-info → lm_deluge-0.0.26}/PKG-INFO +1 -1
  2. {lm_deluge-0.0.25 → lm_deluge-0.0.26}/pyproject.toml +1 -1
  3. {lm_deluge-0.0.25 → lm_deluge-0.0.26}/src/lm_deluge/file.py +3 -2
  4. {lm_deluge-0.0.25 → lm_deluge-0.0.26}/src/lm_deluge/request_context.py +11 -6
  5. {lm_deluge-0.0.25 → lm_deluge-0.0.26/src/lm_deluge.egg-info}/PKG-INFO +1 -1
  6. {lm_deluge-0.0.25 → lm_deluge-0.0.26}/LICENSE +0 -0
  7. {lm_deluge-0.0.25 → lm_deluge-0.0.26}/README.md +0 -0
  8. {lm_deluge-0.0.25 → lm_deluge-0.0.26}/setup.cfg +0 -0
  9. {lm_deluge-0.0.25 → lm_deluge-0.0.26}/src/lm_deluge/__init__.py +0 -0
  10. {lm_deluge-0.0.25 → lm_deluge-0.0.26}/src/lm_deluge/agent.py +0 -0
  11. {lm_deluge-0.0.25 → lm_deluge-0.0.26}/src/lm_deluge/api_requests/__init__.py +0 -0
  12. {lm_deluge-0.0.25 → lm_deluge-0.0.26}/src/lm_deluge/api_requests/anthropic.py +0 -0
  13. {lm_deluge-0.0.25 → lm_deluge-0.0.26}/src/lm_deluge/api_requests/base.py +0 -0
  14. {lm_deluge-0.0.25 → lm_deluge-0.0.26}/src/lm_deluge/api_requests/bedrock.py +0 -0
  15. {lm_deluge-0.0.25 → lm_deluge-0.0.26}/src/lm_deluge/api_requests/common.py +0 -0
  16. {lm_deluge-0.0.25 → lm_deluge-0.0.26}/src/lm_deluge/api_requests/deprecated/bedrock.py +0 -0
  17. {lm_deluge-0.0.25 → lm_deluge-0.0.26}/src/lm_deluge/api_requests/deprecated/cohere.py +0 -0
  18. {lm_deluge-0.0.25 → lm_deluge-0.0.26}/src/lm_deluge/api_requests/deprecated/deepseek.py +0 -0
  19. {lm_deluge-0.0.25 → lm_deluge-0.0.26}/src/lm_deluge/api_requests/deprecated/mistral.py +0 -0
  20. {lm_deluge-0.0.25 → lm_deluge-0.0.26}/src/lm_deluge/api_requests/deprecated/vertex.py +0 -0
  21. {lm_deluge-0.0.25 → lm_deluge-0.0.26}/src/lm_deluge/api_requests/gemini.py +0 -0
  22. {lm_deluge-0.0.25 → lm_deluge-0.0.26}/src/lm_deluge/api_requests/mistral.py +0 -0
  23. {lm_deluge-0.0.25 → lm_deluge-0.0.26}/src/lm_deluge/api_requests/openai.py +0 -0
  24. {lm_deluge-0.0.25 → lm_deluge-0.0.26}/src/lm_deluge/api_requests/response.py +0 -0
  25. {lm_deluge-0.0.25 → lm_deluge-0.0.26}/src/lm_deluge/batches.py +0 -0
  26. {lm_deluge-0.0.25 → lm_deluge-0.0.26}/src/lm_deluge/built_in_tools/anthropic/__init__.py +0 -0
  27. {lm_deluge-0.0.25 → lm_deluge-0.0.26}/src/lm_deluge/built_in_tools/anthropic/bash.py +0 -0
  28. {lm_deluge-0.0.25 → lm_deluge-0.0.26}/src/lm_deluge/built_in_tools/anthropic/computer_use.py +0 -0
  29. {lm_deluge-0.0.25 → lm_deluge-0.0.26}/src/lm_deluge/built_in_tools/anthropic/editor.py +0 -0
  30. {lm_deluge-0.0.25 → lm_deluge-0.0.26}/src/lm_deluge/built_in_tools/base.py +0 -0
  31. {lm_deluge-0.0.25 → lm_deluge-0.0.26}/src/lm_deluge/built_in_tools/openai.py +0 -0
  32. {lm_deluge-0.0.25 → lm_deluge-0.0.26}/src/lm_deluge/cache.py +0 -0
  33. {lm_deluge-0.0.25 → lm_deluge-0.0.26}/src/lm_deluge/client.py +0 -0
  34. {lm_deluge-0.0.25 → lm_deluge-0.0.26}/src/lm_deluge/config.py +0 -0
  35. {lm_deluge-0.0.25 → lm_deluge-0.0.26}/src/lm_deluge/embed.py +0 -0
  36. {lm_deluge-0.0.25 → lm_deluge-0.0.26}/src/lm_deluge/errors.py +0 -0
  37. {lm_deluge-0.0.25 → lm_deluge-0.0.26}/src/lm_deluge/gemini_limits.py +0 -0
  38. {lm_deluge-0.0.25 → lm_deluge-0.0.26}/src/lm_deluge/image.py +0 -0
  39. {lm_deluge-0.0.25 → lm_deluge-0.0.26}/src/lm_deluge/llm_tools/__init__.py +0 -0
  40. {lm_deluge-0.0.25 → lm_deluge-0.0.26}/src/lm_deluge/llm_tools/classify.py +0 -0
  41. {lm_deluge-0.0.25 → lm_deluge-0.0.26}/src/lm_deluge/llm_tools/extract.py +0 -0
  42. {lm_deluge-0.0.25 → lm_deluge-0.0.26}/src/lm_deluge/llm_tools/locate.py +0 -0
  43. {lm_deluge-0.0.25 → lm_deluge-0.0.26}/src/lm_deluge/llm_tools/ocr.py +0 -0
  44. {lm_deluge-0.0.25 → lm_deluge-0.0.26}/src/lm_deluge/llm_tools/score.py +0 -0
  45. {lm_deluge-0.0.25 → lm_deluge-0.0.26}/src/lm_deluge/llm_tools/translate.py +0 -0
  46. {lm_deluge-0.0.25 → lm_deluge-0.0.26}/src/lm_deluge/models.py +0 -0
  47. {lm_deluge-0.0.25 → lm_deluge-0.0.26}/src/lm_deluge/prompt.py +0 -0
  48. {lm_deluge-0.0.25 → lm_deluge-0.0.26}/src/lm_deluge/rerank.py +0 -0
  49. {lm_deluge-0.0.25 → lm_deluge-0.0.26}/src/lm_deluge/tool.py +0 -0
  50. {lm_deluge-0.0.25 → lm_deluge-0.0.26}/src/lm_deluge/tracker.py +0 -0
  51. {lm_deluge-0.0.25 → lm_deluge-0.0.26}/src/lm_deluge/usage.py +0 -0
  52. {lm_deluge-0.0.25 → lm_deluge-0.0.26}/src/lm_deluge/util/json.py +0 -0
  53. {lm_deluge-0.0.25 → lm_deluge-0.0.26}/src/lm_deluge/util/logprobs.py +0 -0
  54. {lm_deluge-0.0.25 → lm_deluge-0.0.26}/src/lm_deluge/util/spatial.py +0 -0
  55. {lm_deluge-0.0.25 → lm_deluge-0.0.26}/src/lm_deluge/util/validation.py +0 -0
  56. {lm_deluge-0.0.25 → lm_deluge-0.0.26}/src/lm_deluge/util/xml.py +0 -0
  57. {lm_deluge-0.0.25 → lm_deluge-0.0.26}/src/lm_deluge.egg-info/SOURCES.txt +0 -0
  58. {lm_deluge-0.0.25 → lm_deluge-0.0.26}/src/lm_deluge.egg-info/dependency_links.txt +0 -0
  59. {lm_deluge-0.0.25 → lm_deluge-0.0.26}/src/lm_deluge.egg-info/requires.txt +0 -0
  60. {lm_deluge-0.0.25 → lm_deluge-0.0.26}/src/lm_deluge.egg-info/top_level.txt +0 -0
  61. {lm_deluge-0.0.25 → lm_deluge-0.0.26}/tests/test_builtin_tools.py +0 -0
  62. {lm_deluge-0.0.25 → lm_deluge-0.0.26}/tests/test_native_mcp_server.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: lm_deluge
3
- Version: 0.0.25
3
+ Version: 0.0.26
4
4
  Summary: Python utility for using LLM API models.
5
5
  Author-email: Benjamin Anderson <ben@trytaylor.ai>
6
6
  Requires-Python: >=3.10
@@ -3,7 +3,7 @@ requires = ["setuptools", "wheel"]
3
3
 
4
4
  [project]
5
5
  name = "lm_deluge"
6
- version = "0.0.25"
6
+ version = "0.0.26"
7
7
  authors = [{ name = "Benjamin Anderson", email = "ben@trytaylor.ai" }]
8
8
  description = "Python utility for using LLM API models."
9
9
  readme = "README.md"
@@ -1,3 +1,4 @@
1
+ from functools import cached_property
1
2
  import os
2
3
  import io
3
4
  import requests
@@ -68,13 +69,13 @@ class File:
68
69
  return encoded
69
70
  return f"data:{self._mime()};base64,{encoded}"
70
71
 
71
- @property
72
+ @cached_property
72
73
  def fingerprint(self) -> str:
73
74
  # Hash the file contents for fingerprinting
74
75
  file_bytes = self._bytes()
75
76
  return xxhash.xxh64(file_bytes).hexdigest()
76
77
 
77
- @property
78
+ @cached_property
78
79
  def size(self) -> int:
79
80
  """Return file size in bytes."""
80
81
  return len(self._bytes())
@@ -1,4 +1,5 @@
1
1
  from dataclasses import dataclass, field
2
+ from functools import cached_property
2
3
  from typing import Any, Callable
3
4
 
4
5
  from .config import SamplingParams
@@ -39,14 +40,18 @@ class RequestContext:
39
40
 
40
41
  # Computed properties
41
42
  cache_key: str = field(init=False)
42
- num_tokens: int = field(init=False)
43
+ # num_tokens: int = field(init=False)
43
44
 
44
- def __post_init__(self):
45
- # Compute cache key from prompt fingerprint
46
- self.cache_key = self.prompt.fingerprint
45
+ # def __post_init__(self):
46
+ # # Compute cache key from prompt fingerprint
47
+ # # self.cache_key = self.prompt.fingerprint
47
48
 
48
- # Compute token count
49
- self.num_tokens = self.prompt.count_tokens(self.sampling_params.max_new_tokens)
49
+ # # Compute token count
50
+ # self.num_tokens =
51
+
52
+ @cached_property
53
+ def num_tokens(self):
54
+ return self.prompt.count_tokens(self.sampling_params.max_new_tokens)
50
55
 
51
56
  def maybe_callback(self, response, tracker):
52
57
  if not self.callback:
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: lm_deluge
3
- Version: 0.0.25
3
+ Version: 0.0.26
4
4
  Summary: Python utility for using LLM API models.
5
5
  Author-email: Benjamin Anderson <ben@trytaylor.ai>
6
6
  Requires-Python: >=3.10
File without changes
File without changes
File without changes