lm-deluge 0.0.4__tar.gz → 0.0.5__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of lm-deluge might be problematic. Click here for more details.

Files changed (43) hide show
  1. {lm_deluge-0.0.4 → lm_deluge-0.0.5}/PKG-INFO +1 -1
  2. {lm_deluge-0.0.4 → lm_deluge-0.0.5}/pyproject.toml +1 -1
  3. {lm_deluge-0.0.4 → lm_deluge-0.0.5}/src/lm_deluge/api_requests/anthropic.py +0 -2
  4. {lm_deluge-0.0.4 → lm_deluge-0.0.5}/src/lm_deluge/api_requests/cohere.py +2 -6
  5. {lm_deluge-0.0.4 → lm_deluge-0.0.5}/src/lm_deluge.egg-info/PKG-INFO +1 -1
  6. {lm_deluge-0.0.4 → lm_deluge-0.0.5}/README.md +0 -0
  7. {lm_deluge-0.0.4 → lm_deluge-0.0.5}/setup.cfg +0 -0
  8. {lm_deluge-0.0.4 → lm_deluge-0.0.5}/src/lm_deluge/__init__.py +0 -0
  9. {lm_deluge-0.0.4 → lm_deluge-0.0.5}/src/lm_deluge/api_requests/__init__.py +0 -0
  10. {lm_deluge-0.0.4 → lm_deluge-0.0.5}/src/lm_deluge/api_requests/base.py +0 -0
  11. {lm_deluge-0.0.4 → lm_deluge-0.0.5}/src/lm_deluge/api_requests/common.py +0 -0
  12. {lm_deluge-0.0.4 → lm_deluge-0.0.5}/src/lm_deluge/api_requests/deprecated/bedrock.py +0 -0
  13. {lm_deluge-0.0.4 → lm_deluge-0.0.5}/src/lm_deluge/api_requests/deprecated/deepseek.py +0 -0
  14. {lm_deluge-0.0.4 → lm_deluge-0.0.5}/src/lm_deluge/api_requests/deprecated/mistral.py +0 -0
  15. {lm_deluge-0.0.4 → lm_deluge-0.0.5}/src/lm_deluge/api_requests/google.py +0 -0
  16. {lm_deluge-0.0.4 → lm_deluge-0.0.5}/src/lm_deluge/api_requests/openai.py +0 -0
  17. {lm_deluge-0.0.4 → lm_deluge-0.0.5}/src/lm_deluge/api_requests/vertex.py +0 -0
  18. {lm_deluge-0.0.4 → lm_deluge-0.0.5}/src/lm_deluge/cache.py +0 -0
  19. {lm_deluge-0.0.4 → lm_deluge-0.0.5}/src/lm_deluge/client.py +0 -0
  20. {lm_deluge-0.0.4 → lm_deluge-0.0.5}/src/lm_deluge/embed.py +0 -0
  21. {lm_deluge-0.0.4 → lm_deluge-0.0.5}/src/lm_deluge/errors.py +0 -0
  22. {lm_deluge-0.0.4 → lm_deluge-0.0.5}/src/lm_deluge/gemini_limits.py +0 -0
  23. {lm_deluge-0.0.4 → lm_deluge-0.0.5}/src/lm_deluge/image.py +0 -0
  24. {lm_deluge-0.0.4 → lm_deluge-0.0.5}/src/lm_deluge/llm_tools/__init__.py +0 -0
  25. {lm_deluge-0.0.4 → lm_deluge-0.0.5}/src/lm_deluge/llm_tools/extract.py +0 -0
  26. {lm_deluge-0.0.4 → lm_deluge-0.0.5}/src/lm_deluge/llm_tools/score.py +0 -0
  27. {lm_deluge-0.0.4 → lm_deluge-0.0.5}/src/lm_deluge/llm_tools/translate.py +0 -0
  28. {lm_deluge-0.0.4 → lm_deluge-0.0.5}/src/lm_deluge/models.py +0 -0
  29. {lm_deluge-0.0.4 → lm_deluge-0.0.5}/src/lm_deluge/prompt.py +0 -0
  30. {lm_deluge-0.0.4 → lm_deluge-0.0.5}/src/lm_deluge/rerank.py +0 -0
  31. {lm_deluge-0.0.4 → lm_deluge-0.0.5}/src/lm_deluge/sampling_params.py +0 -0
  32. {lm_deluge-0.0.4 → lm_deluge-0.0.5}/src/lm_deluge/tool.py +0 -0
  33. {lm_deluge-0.0.4 → lm_deluge-0.0.5}/src/lm_deluge/tracker.py +0 -0
  34. {lm_deluge-0.0.4 → lm_deluge-0.0.5}/src/lm_deluge/util/json.py +0 -0
  35. {lm_deluge-0.0.4 → lm_deluge-0.0.5}/src/lm_deluge/util/logprobs.py +0 -0
  36. {lm_deluge-0.0.4 → lm_deluge-0.0.5}/src/lm_deluge/util/pdf.py +0 -0
  37. {lm_deluge-0.0.4 → lm_deluge-0.0.5}/src/lm_deluge/util/validation.py +0 -0
  38. {lm_deluge-0.0.4 → lm_deluge-0.0.5}/src/lm_deluge/util/xml.py +0 -0
  39. {lm_deluge-0.0.4 → lm_deluge-0.0.5}/src/lm_deluge.egg-info/SOURCES.txt +0 -0
  40. {lm_deluge-0.0.4 → lm_deluge-0.0.5}/src/lm_deluge.egg-info/dependency_links.txt +0 -0
  41. {lm_deluge-0.0.4 → lm_deluge-0.0.5}/src/lm_deluge.egg-info/requires.txt +0 -0
  42. {lm_deluge-0.0.4 → lm_deluge-0.0.5}/src/lm_deluge.egg-info/top_level.txt +0 -0
  43. {lm_deluge-0.0.4 → lm_deluge-0.0.5}/tests/test_heal_json.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: lm_deluge
3
- Version: 0.0.4
3
+ Version: 0.0.5
4
4
  Summary: Python utility for using LLM API models.
5
5
  Author-email: Benjamin Anderson <ben@trytaylor.ai>
6
6
  Requires-Python: >=3.10
@@ -3,7 +3,7 @@ requires = ["setuptools", "wheel"]
3
3
 
4
4
  [project]
5
5
  name = "lm_deluge"
6
- version = "0.0.4"
6
+ version = "0.0.5"
7
7
  authors = [{ name = "Benjamin Anderson", email = "ben@trytaylor.ai" }]
8
8
  description = "Python utility for using LLM API models."
9
9
  readme = "README.md"
@@ -119,9 +119,7 @@ class AnthropicRequest(APIRequestBase):
119
119
  if status_code >= 200 and status_code < 300:
120
120
  try:
121
121
  data = await http_response.json()
122
- print("response data:", data)
123
122
  content = data["content"] # [0]["text"]
124
- print("content is length", len(content))
125
123
  for item in content:
126
124
  if item["type"] == "text":
127
125
  completion = item["text"]
@@ -55,7 +55,7 @@ class CohereRequest(APIRequestBase):
55
55
 
56
56
  self.model = APIModel.from_registry(model_name)
57
57
  self.url = f"{self.model.api_base}/chat"
58
- self.system_message, chat_history, last_user_message = prompt.to_cohere()
58
+ messages = prompt.to_cohere()
59
59
 
60
60
  self.request_header = {
61
61
  "Authorization": f"bearer {os.getenv(self.model.api_key_env_var)}",
@@ -65,16 +65,12 @@ class CohereRequest(APIRequestBase):
65
65
 
66
66
  self.request_json = {
67
67
  "model": self.model.name,
68
- "chat_history": chat_history,
69
- "message": last_user_message,
68
+ "messages": messages,
70
69
  "temperature": sampling_params.temperature,
71
70
  "top_p": sampling_params.top_p,
72
71
  "max_tokens": sampling_params.max_new_tokens,
73
72
  }
74
73
 
75
- if self.system_message:
76
- self.request_json["preamble"] = self.system_message
77
-
78
74
  async def handle_response(self, http_response: ClientResponse) -> APIResponse:
79
75
  is_error = False
80
76
  error_message = None
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: lm_deluge
3
- Version: 0.0.4
3
+ Version: 0.0.5
4
4
  Summary: Python utility for using LLM API models.
5
5
  Author-email: Benjamin Anderson <ben@trytaylor.ai>
6
6
  Requires-Python: >=3.10
File without changes
File without changes