not-again-ai 0.5.0__tar.gz → 0.5.1__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {not_again_ai-0.5.0 → not_again_ai-0.5.1}/PKG-INFO +2 -2
- {not_again_ai-0.5.0 → not_again_ai-0.5.1}/README.md +1 -1
- {not_again_ai-0.5.0 → not_again_ai-0.5.1}/pyproject.toml +4 -1
- {not_again_ai-0.5.0 → not_again_ai-0.5.1}/src/not_again_ai/llm/chat_completion.py +2 -3
- {not_again_ai-0.5.0 → not_again_ai-0.5.1}/src/not_again_ai/llm/chat_completion_vision.py +2 -3
- {not_again_ai-0.5.0 → not_again_ai-0.5.1}/src/not_again_ai/llm/prompts.py +1 -1
- {not_again_ai-0.5.0 → not_again_ai-0.5.1}/LICENSE +0 -0
- {not_again_ai-0.5.0 → not_again_ai-0.5.1}/src/not_again_ai/__init__.py +0 -0
- {not_again_ai-0.5.0 → not_again_ai-0.5.1}/src/not_again_ai/base/__init__.py +0 -0
- {not_again_ai-0.5.0 → not_again_ai-0.5.1}/src/not_again_ai/base/file_system.py +0 -0
- {not_again_ai-0.5.0 → not_again_ai-0.5.1}/src/not_again_ai/base/parallel.py +0 -0
- {not_again_ai-0.5.0 → not_again_ai-0.5.1}/src/not_again_ai/llm/__init__.py +0 -0
- {not_again_ai-0.5.0 → not_again_ai-0.5.1}/src/not_again_ai/llm/context_management.py +0 -0
- {not_again_ai-0.5.0 → not_again_ai-0.5.1}/src/not_again_ai/llm/embeddings.py +0 -0
- {not_again_ai-0.5.0 → not_again_ai-0.5.1}/src/not_again_ai/llm/openai_client.py +0 -0
- {not_again_ai-0.5.0 → not_again_ai-0.5.1}/src/not_again_ai/llm/tokens.py +0 -0
- {not_again_ai-0.5.0 → not_again_ai-0.5.1}/src/not_again_ai/py.typed +0 -0
- {not_again_ai-0.5.0 → not_again_ai-0.5.1}/src/not_again_ai/statistics/__init__.py +0 -0
- {not_again_ai-0.5.0 → not_again_ai-0.5.1}/src/not_again_ai/statistics/dependence.py +0 -0
- {not_again_ai-0.5.0 → not_again_ai-0.5.1}/src/not_again_ai/viz/__init__.py +0 -0
- {not_again_ai-0.5.0 → not_again_ai-0.5.1}/src/not_again_ai/viz/barplots.py +0 -0
- {not_again_ai-0.5.0 → not_again_ai-0.5.1}/src/not_again_ai/viz/distributions.py +0 -0
- {not_again_ai-0.5.0 → not_again_ai-0.5.1}/src/not_again_ai/viz/scatterplot.py +0 -0
- {not_again_ai-0.5.0 → not_again_ai-0.5.1}/src/not_again_ai/viz/time_series.py +0 -0
- {not_again_ai-0.5.0 → not_again_ai-0.5.1}/src/not_again_ai/viz/utils.py +0 -0
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: not-again-ai
|
3
|
-
Version: 0.5.
|
3
|
+
Version: 0.5.1
|
4
4
|
Summary: Designed to once and for all collect all the little things that come up over and over again in AI projects and put them in one place.
|
5
5
|
Home-page: https://github.com/DaveCoDev/not-again-ai
|
6
6
|
License: MIT
|
@@ -76,7 +76,7 @@ The package is split into subpackages, so you can install only the parts you nee
|
|
76
76
|
The base package includes only functions that have minimal external dependencies and are useful in a variety of situations such as parallelization and filesystem operations.
|
77
77
|
|
78
78
|
## LLM (Large Language Model)
|
79
|
-
[README](https://github.com/DaveCoDev/not-again-ai/blob/main/readmes/llm.md)
|
79
|
+
[README](https://github.com/DaveCoDev/not-again-ai/blob/main/readmes/llm.md), [Example Notebooks](https://github.com/DaveCoDev/not-again-ai/blob/main/notebooks/llm/)
|
80
80
|
|
81
81
|
Supports OpenAI chat completions and text embeddings. Includes functions for creating chat completion prompts, token management, and context management.
|
82
82
|
|
@@ -42,7 +42,7 @@ The package is split into subpackages, so you can install only the parts you nee
|
|
42
42
|
The base package includes only functions that have minimal external dependencies and are useful in a variety of situations such as parallelization and filesystem operations.
|
43
43
|
|
44
44
|
## LLM (Large Language Model)
|
45
|
-
[README](https://github.com/DaveCoDev/not-again-ai/blob/main/readmes/llm.md)
|
45
|
+
[README](https://github.com/DaveCoDev/not-again-ai/blob/main/readmes/llm.md), [Example Notebooks](https://github.com/DaveCoDev/not-again-ai/blob/main/notebooks/llm/)
|
46
46
|
|
47
47
|
Supports OpenAI chat completions and text embeddings. Includes functions for creating chat completion prompts, token management, and context management.
|
48
48
|
|
@@ -1,6 +1,6 @@
|
|
1
1
|
[tool.poetry]
|
2
2
|
name = "not-again-ai"
|
3
|
-
version = "0.5.
|
3
|
+
version = "0.5.1"
|
4
4
|
description = "Designed to once and for all collect all the little things that come up over and over again in AI projects and put them in one place."
|
5
5
|
authors = ["DaveCoDev <dave.co.dev@gmail.com>"]
|
6
6
|
license = "MIT"
|
@@ -41,6 +41,9 @@ llm = ["openai", "python-liquid", "tiktoken"]
|
|
41
41
|
statistics = ["numpy", "scikit-learn", "scipy"]
|
42
42
|
viz = ["numpy", "pandas", "seaborn"]
|
43
43
|
|
44
|
+
[tool.poetry.dev-dependencies]
|
45
|
+
ipykernel = "*"
|
46
|
+
|
44
47
|
[tool.poetry.group.nox.dependencies]
|
45
48
|
nox-poetry = "*"
|
46
49
|
|
@@ -64,9 +64,8 @@ def chat_completion(
|
|
64
64
|
this will be a list of dictionaries containing the token, logprob, and bytes for each token in the message.
|
65
65
|
'choices' (list[dict], optional): A list of chat completion choices if n > 1 where each dict contains the above fields.
|
66
66
|
'completion_tokens' (int): The number of tokens used by the model to generate the completion.
|
67
|
-
NOTE: If n > 1 this is the sum of all completions
|
68
|
-
'prompt_tokens' (int): The number of tokens in the
|
69
|
-
NOTE: If n > 1 this is the sum of all completions and thus will be same value in each dict.
|
67
|
+
NOTE: If n > 1 this is the sum of all completions.
|
68
|
+
'prompt_tokens' (int): The number of tokens in the messages sent to the model.
|
70
69
|
'system_fingerprint' (str, optional): If seed is set, a unique identifier for the model used to generate the response.
|
71
70
|
"""
|
72
71
|
response_format = {"type": "json_object"} if json_mode else None
|
@@ -41,9 +41,8 @@ def chat_completion_vision(
|
|
41
41
|
'message' (str | dict): The content of the generated assistant message.
|
42
42
|
'choices' (list[dict], optional): A list of chat completion choices if n > 1 where each dict contains the above fields.
|
43
43
|
'completion_tokens' (int): The number of tokens used by the model to generate the completion.
|
44
|
-
NOTE: If n > 1 this is the sum of all completions
|
45
|
-
'prompt_tokens' (int): The number of tokens in the
|
46
|
-
NOTE: If n > 1 this is the sum of all completions and thus will be same value in each dict.
|
44
|
+
NOTE: If n > 1 this is the sum of all completions.
|
45
|
+
'prompt_tokens' (int): The number of tokens in the messages sent to the model.
|
47
46
|
'system_fingerprint' (str, optional): If seed is set, a unique identifier for the model used to generate the response.
|
48
47
|
"""
|
49
48
|
kwargs.update(
|
@@ -127,7 +127,7 @@ def create_image_url(image_path: Path) -> str:
|
|
127
127
|
|
128
128
|
|
129
129
|
def chat_prompt_vision(messages_unformatted: list[dict[str, Any]], variables: dict[str, str]) -> list[dict[str, Any]]:
|
130
|
-
"""Formats a list of messages for OpenAI's chat completion API for vision models only using Liquid templating.
|
130
|
+
"""Formats a list of messages for OpenAI's chat completion API, for vision models only, using Liquid templating.
|
131
131
|
|
132
132
|
Args:
|
133
133
|
messages_unformatted (list[dict[str, list[dict[str, Path | str]] | str]]):
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|