not-again-ai 0.8.0__tar.gz → 0.8.1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (30) hide show
  1. {not_again_ai-0.8.0 → not_again_ai-0.8.1}/PKG-INFO +4 -4
  2. {not_again_ai-0.8.0 → not_again_ai-0.8.1}/pyproject.toml +4 -4
  3. {not_again_ai-0.8.0 → not_again_ai-0.8.1}/src/not_again_ai/llm/openai_api/tokens.py +4 -0
  4. {not_again_ai-0.8.0 → not_again_ai-0.8.1}/LICENSE +0 -0
  5. {not_again_ai-0.8.0 → not_again_ai-0.8.1}/README.md +0 -0
  6. {not_again_ai-0.8.0 → not_again_ai-0.8.1}/src/not_again_ai/__init__.py +0 -0
  7. {not_again_ai-0.8.0 → not_again_ai-0.8.1}/src/not_again_ai/base/__init__.py +0 -0
  8. {not_again_ai-0.8.0 → not_again_ai-0.8.1}/src/not_again_ai/base/file_system.py +0 -0
  9. {not_again_ai-0.8.0 → not_again_ai-0.8.1}/src/not_again_ai/base/parallel.py +0 -0
  10. {not_again_ai-0.8.0 → not_again_ai-0.8.1}/src/not_again_ai/llm/__init__.py +0 -0
  11. {not_again_ai-0.8.0 → not_again_ai-0.8.1}/src/not_again_ai/llm/chat_completion.py +0 -0
  12. {not_again_ai-0.8.0 → not_again_ai-0.8.1}/src/not_again_ai/llm/ollama/__init__.py +0 -0
  13. {not_again_ai-0.8.0 → not_again_ai-0.8.1}/src/not_again_ai/llm/ollama/chat_completion.py +0 -0
  14. {not_again_ai-0.8.0 → not_again_ai-0.8.1}/src/not_again_ai/llm/ollama/ollama_client.py +0 -0
  15. {not_again_ai-0.8.0 → not_again_ai-0.8.1}/src/not_again_ai/llm/ollama/service.py +0 -0
  16. {not_again_ai-0.8.0 → not_again_ai-0.8.1}/src/not_again_ai/llm/openai_api/__init__.py +0 -0
  17. {not_again_ai-0.8.0 → not_again_ai-0.8.1}/src/not_again_ai/llm/openai_api/chat_completion.py +0 -0
  18. {not_again_ai-0.8.0 → not_again_ai-0.8.1}/src/not_again_ai/llm/openai_api/context_management.py +0 -0
  19. {not_again_ai-0.8.0 → not_again_ai-0.8.1}/src/not_again_ai/llm/openai_api/embeddings.py +0 -0
  20. {not_again_ai-0.8.0 → not_again_ai-0.8.1}/src/not_again_ai/llm/openai_api/openai_client.py +0 -0
  21. {not_again_ai-0.8.0 → not_again_ai-0.8.1}/src/not_again_ai/llm/openai_api/prompts.py +0 -0
  22. {not_again_ai-0.8.0 → not_again_ai-0.8.1}/src/not_again_ai/py.typed +0 -0
  23. {not_again_ai-0.8.0 → not_again_ai-0.8.1}/src/not_again_ai/statistics/__init__.py +0 -0
  24. {not_again_ai-0.8.0 → not_again_ai-0.8.1}/src/not_again_ai/statistics/dependence.py +0 -0
  25. {not_again_ai-0.8.0 → not_again_ai-0.8.1}/src/not_again_ai/viz/__init__.py +0 -0
  26. {not_again_ai-0.8.0 → not_again_ai-0.8.1}/src/not_again_ai/viz/barplots.py +0 -0
  27. {not_again_ai-0.8.0 → not_again_ai-0.8.1}/src/not_again_ai/viz/distributions.py +0 -0
  28. {not_again_ai-0.8.0 → not_again_ai-0.8.1}/src/not_again_ai/viz/scatterplot.py +0 -0
  29. {not_again_ai-0.8.0 → not_again_ai-0.8.1}/src/not_again_ai/viz/time_series.py +0 -0
  30. {not_again_ai-0.8.0 → not_again_ai-0.8.1}/src/not_again_ai/viz/utils.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: not-again-ai
3
- Version: 0.8.0
3
+ Version: 0.8.1
4
4
  Summary: Designed to once and for all collect all the little things that come up over and over again in AI projects and put them in one place.
5
5
  Home-page: https://github.com/DaveCoDev/not-again-ai
6
6
  License: MIT
@@ -21,14 +21,14 @@ Provides-Extra: llm
21
21
  Provides-Extra: statistics
22
22
  Provides-Extra: viz
23
23
  Requires-Dist: numpy (>=1.26.4,<2.0.0) ; extra == "statistics" or extra == "viz"
24
- Requires-Dist: ollama (>=0.1.9,<0.2.0) ; extra == "llm"
25
- Requires-Dist: openai (>=1.25.1,<2.0.0) ; extra == "llm"
24
+ Requires-Dist: ollama (>=0.2.0,<0.3.0) ; extra == "llm"
25
+ Requires-Dist: openai (>=1.29.0,<2.0.0) ; extra == "llm"
26
26
  Requires-Dist: pandas (>=2.2.2,<3.0.0) ; extra == "viz"
27
27
  Requires-Dist: python-liquid (>=1.12.1,<2.0.0) ; extra == "llm"
28
28
  Requires-Dist: scikit-learn (>=1.4.2,<2.0.0) ; extra == "statistics"
29
29
  Requires-Dist: scipy (>=1.13.0,<2.0.0) ; extra == "statistics"
30
30
  Requires-Dist: seaborn (>=0.13.2,<0.14.0) ; extra == "viz"
31
- Requires-Dist: tiktoken (>=0.6.0,<0.7.0) ; extra == "llm"
31
+ Requires-Dist: tiktoken (>=0.7.0,<0.8.0) ; extra == "llm"
32
32
  Project-URL: Documentation, https://github.com/DaveCoDev/not-again-ai
33
33
  Project-URL: Repository, https://github.com/DaveCoDev/not-again-ai
34
34
  Description-Content-Type: text/markdown
@@ -1,6 +1,6 @@
1
1
  [tool.poetry]
2
2
  name = "not-again-ai"
3
- version = "0.8.0"
3
+ version = "0.8.1"
4
4
  description = "Designed to once and for all collect all the little things that come up over and over again in AI projects and put them in one place."
5
5
  authors = ["DaveCoDev <dave.co.dev@gmail.com>"]
6
6
  license = "MIT"
@@ -28,14 +28,14 @@ python = "^3.11, <3.13"
28
28
 
29
29
  # Optional dependencies are defined here, and groupings are defined below.
30
30
  numpy = { version = "^1.26.4", optional = true }
31
- ollama = { version = "^0.1.9", optional = true }
32
- openai = { version = "^1.25.1", optional = true }
31
+ ollama = { version = "^0.2.0", optional = true }
32
+ openai = { version = "^1.29.0", optional = true }
33
33
  pandas = { version = "^2.2.2", optional = true }
34
34
  python-liquid = { version = "^1.12.1", optional = true }
35
35
  scipy = { version = "^1.13.0", optional = true }
36
36
  scikit-learn = { version = "^1.4.2", optional = true }
37
37
  seaborn = { version = "^0.13.2", optional = true }
38
- tiktoken = { version = "^0.6.0", optional = true }
38
+ tiktoken = { version = "^0.7.0", optional = true }
39
39
 
40
40
  [tool.poetry.extras]
41
41
  llm = ["ollama", "openai", "python-liquid", "tiktoken"]
@@ -82,6 +82,8 @@ def num_tokens_from_messages(messages: list[dict[str, str]], model: str = "gpt-3
82
82
  "gpt-4-0125-preview",
83
83
  "gpt-4-turbo",
84
84
  "gpt-4-turbo-2024-04-09",
85
+ "gpt-4o",
86
+ "gpt-4o-2024-05-13",
85
87
  }:
86
88
  tokens_per_message = 3 # every message follows <|start|>{role/name}\n{content}<|end|>\n
87
89
  tokens_per_name = 1 # if there's a name, the role is omitted
@@ -91,6 +93,8 @@ def num_tokens_from_messages(messages: list[dict[str, str]], model: str = "gpt-3
91
93
  # Approximate catch-all. Assumes future versions of 3.5 and 4 will have the same token counts as the 0613 versions.
92
94
  elif "gpt-3.5-turbo" in model:
93
95
  return num_tokens_from_messages(messages, model="gpt-3.5-turbo-0613")
96
+ elif "gpt-4o" in model:
97
+ return num_tokens_from_messages(messages, model="gpt-4o-2024-05-13")
94
98
  elif "gpt-4" in model:
95
99
  return num_tokens_from_messages(messages, model="gpt-4-0613")
96
100
  else:
File without changes
File without changes