not-again-ai 0.4.0__tar.gz → 0.4.1__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {not_again_ai-0.4.0 → not_again_ai-0.4.1}/PKG-INFO +10 -1
- {not_again_ai-0.4.0 → not_again_ai-0.4.1}/README.md +9 -0
- {not_again_ai-0.4.0 → not_again_ai-0.4.1}/pyproject.toml +1 -1
- not_again_ai-0.4.1/src/not_again_ai/llm/embeddings.py +62 -0
- {not_again_ai-0.4.0 → not_again_ai-0.4.1}/LICENSE +0 -0
- {not_again_ai-0.4.0 → not_again_ai-0.4.1}/src/not_again_ai/__init__.py +0 -0
- {not_again_ai-0.4.0 → not_again_ai-0.4.1}/src/not_again_ai/base/__init__.py +0 -0
- {not_again_ai-0.4.0 → not_again_ai-0.4.1}/src/not_again_ai/base/file_system.py +0 -0
- {not_again_ai-0.4.0 → not_again_ai-0.4.1}/src/not_again_ai/base/parallel.py +0 -0
- {not_again_ai-0.4.0 → not_again_ai-0.4.1}/src/not_again_ai/llm/__init__.py +0 -0
- {not_again_ai-0.4.0 → not_again_ai-0.4.1}/src/not_again_ai/llm/chat_completion.py +0 -0
- {not_again_ai-0.4.0 → not_again_ai-0.4.1}/src/not_again_ai/llm/context_management.py +0 -0
- {not_again_ai-0.4.0 → not_again_ai-0.4.1}/src/not_again_ai/llm/openai_client.py +0 -0
- {not_again_ai-0.4.0 → not_again_ai-0.4.1}/src/not_again_ai/llm/prompts.py +0 -0
- {not_again_ai-0.4.0 → not_again_ai-0.4.1}/src/not_again_ai/llm/tokens.py +0 -0
- {not_again_ai-0.4.0 → not_again_ai-0.4.1}/src/not_again_ai/py.typed +0 -0
- {not_again_ai-0.4.0 → not_again_ai-0.4.1}/src/not_again_ai/statistics/__init__.py +0 -0
- {not_again_ai-0.4.0 → not_again_ai-0.4.1}/src/not_again_ai/statistics/dependence.py +0 -0
- {not_again_ai-0.4.0 → not_again_ai-0.4.1}/src/not_again_ai/viz/__init__.py +0 -0
- {not_again_ai-0.4.0 → not_again_ai-0.4.1}/src/not_again_ai/viz/barplots.py +0 -0
- {not_again_ai-0.4.0 → not_again_ai-0.4.1}/src/not_again_ai/viz/distributions.py +0 -0
- {not_again_ai-0.4.0 → not_again_ai-0.4.1}/src/not_again_ai/viz/scatterplot.py +0 -0
- {not_again_ai-0.4.0 → not_again_ai-0.4.1}/src/not_again_ai/viz/time_series.py +0 -0
- {not_again_ai-0.4.0 → not_again_ai-0.4.1}/src/not_again_ai/viz/utils.py +0 -0
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: not-again-ai
|
3
|
-
Version: 0.4.
|
3
|
+
Version: 0.4.1
|
4
4
|
Summary: Designed to once and for all collect all the little things that come up over and over again in AI projects and put them in one place.
|
5
5
|
Home-page: https://github.com/DaveCoDev/not-again-ai
|
6
6
|
License: MIT
|
@@ -79,6 +79,15 @@ The base package includes only functions that have minimal external dependencies
|
|
79
79
|
## LLM (Large Language Model)
|
80
80
|
[README](https://github.com/DaveCoDev/not-again-ai/blob/main/readmes/llm.md)
|
81
81
|
|
82
|
+
Supports OpenAI chat completions and text embeddings. Includes functions for creating chat completion prompts, token management, and context management.
|
83
|
+
|
84
|
+
One example:
|
85
|
+
```python
|
86
|
+
client = openai_client()
|
87
|
+
messages = [{"role": "system", "content": "You are a helpful assistant."}, {"role": "user", "content": "Hello!"}]
|
88
|
+
response = chat_completion(messages=messages, model="gpt-3.5-turbo", max_tokens=100, client=client)["message"]
|
89
|
+
>>> "Hello! How can I help you today?"
|
90
|
+
```
|
82
91
|
|
83
92
|
## Statistics
|
84
93
|
[README](https://github.com/DaveCoDev/not-again-ai/blob/main/readmes/statistics.md)
|
@@ -44,6 +44,15 @@ The base package includes only functions that have minimal external dependencies
|
|
44
44
|
## LLM (Large Language Model)
|
45
45
|
[README](https://github.com/DaveCoDev/not-again-ai/blob/main/readmes/llm.md)
|
46
46
|
|
47
|
+
Supports OpenAI chat completions and text embeddings. Includes functions for creating chat completion prompts, token management, and context management.
|
48
|
+
|
49
|
+
One example:
|
50
|
+
```python
|
51
|
+
client = openai_client()
|
52
|
+
messages = [{"role": "system", "content": "You are a helpful assistant."}, {"role": "user", "content": "Hello!"}]
|
53
|
+
response = chat_completion(messages=messages, model="gpt-3.5-turbo", max_tokens=100, client=client)["message"]
|
54
|
+
>>> "Hello! How can I help you today?"
|
55
|
+
```
|
47
56
|
|
48
57
|
## Statistics
|
49
58
|
[README](https://github.com/DaveCoDev/not-again-ai/blob/main/readmes/statistics.md)
|
@@ -1,6 +1,6 @@
|
|
1
1
|
[tool.poetry]
|
2
2
|
name = "not-again-ai"
|
3
|
-
version = "0.4.
|
3
|
+
version = "0.4.1"
|
4
4
|
description = "Designed to once and for all collect all the little things that come up over and over again in AI projects and put them in one place."
|
5
5
|
authors = ["DaveCoDev <dave.co.dev@gmail.com>"]
|
6
6
|
license = "MIT"
|
@@ -0,0 +1,62 @@
|
|
1
|
+
from typing import Any
|
2
|
+
|
3
|
+
from openai import OpenAI
|
4
|
+
|
5
|
+
|
6
|
+
def embed_text(
|
7
|
+
text: str | list[str],
|
8
|
+
client: OpenAI,
|
9
|
+
model: str = "text-embedding-3-large",
|
10
|
+
dimensions: int | None = None,
|
11
|
+
encoding_format: str = "float",
|
12
|
+
**kwargs: Any,
|
13
|
+
) -> list[float] | str | list[list[float]] | list[str]:
|
14
|
+
"""Generates an embedding vector for a given text using OpenAI's API.
|
15
|
+
|
16
|
+
Args:
|
17
|
+
text (str | list[str]): The input text to be embedded. Each text should not exceed 8191 tokens, which is the max for V2 and V3 models
|
18
|
+
client (OpenAI): The OpenAI client used to interact with the API.
|
19
|
+
model (str, optional): The ID of the model to use for embedding.
|
20
|
+
Defaults to "text-embedding-3-large".
|
21
|
+
Choose from text-embedding-3-small, text-embedding-3-large, text-embedding-ada-002.
|
22
|
+
See https://platform.openai.com/docs/models/embeddings for more details.
|
23
|
+
dimensions (int | None, optional): The number of dimensions for the output embeddings.
|
24
|
+
This is only supported in "text-embedding-3" and later models. Defaults to None.
|
25
|
+
encoding_format (str, optional): The format for the returned embeddings. Can be either "float" or "base64".
|
26
|
+
Defaults to "float".
|
27
|
+
|
28
|
+
Returns:
|
29
|
+
list[float] | str | list[list[float]] | list[str]: The embedding vector represented as a list of floats or base64 encoded string.
|
30
|
+
If multiple text inputs are provided, a list of embedding vectors is returned.
|
31
|
+
The length and format of the vector depend on the model, encoding_format, and dimensions.
|
32
|
+
|
33
|
+
Raises:
|
34
|
+
ValueError: If 'text-embedding-ada-002' model is used and dimensions are specified,
|
35
|
+
as this model does not support specifying dimensions.
|
36
|
+
|
37
|
+
Example:
|
38
|
+
client = OpenAI()
|
39
|
+
embedding = embed_text("Example text", client, model="text-embedding-ada-002")
|
40
|
+
"""
|
41
|
+
if model == "text-embedding-ada-002" and dimensions:
|
42
|
+
# text-embedding-ada-002 does not support dimensions
|
43
|
+
raise ValueError("text-embedding-ada-002 does not support dimensions")
|
44
|
+
|
45
|
+
kwargs = {
|
46
|
+
"model": model,
|
47
|
+
"input": text,
|
48
|
+
"encoding_format": encoding_format,
|
49
|
+
}
|
50
|
+
if dimensions:
|
51
|
+
kwargs["dimensions"] = dimensions
|
52
|
+
|
53
|
+
response = client.embeddings.create(**kwargs)
|
54
|
+
|
55
|
+
responses = []
|
56
|
+
for embedding in response.data:
|
57
|
+
responses.append(embedding.embedding)
|
58
|
+
|
59
|
+
if len(responses) == 1:
|
60
|
+
return responses[0]
|
61
|
+
|
62
|
+
return responses
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|