not-again-ai 0.17.0__py3-none-any.whl → 0.18.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- not_again_ai/llm/__init__.py +0 -15
- not_again_ai/llm/chat_completion/interface.py +4 -0
- not_again_ai/llm/chat_completion/providers/anthropic_api.py +180 -0
- not_again_ai/llm/chat_completion/types.py +5 -1
- not_again_ai/llm/prompting/compile_prompt.py +3 -3
- {not_again_ai-0.17.0.dist-info → not_again_ai-0.18.0.dist-info}/METADATA +66 -164
- {not_again_ai-0.17.0.dist-info → not_again_ai-0.18.0.dist-info}/RECORD +12 -11
- {not_again_ai-0.17.0.dist-info → not_again_ai-0.18.0.dist-info}/WHEEL +1 -1
- {not_again_ai-0.17.0.dist-info → not_again_ai-0.18.0.dist-info/licenses}/LICENSE +0 -0
not_again_ai/llm/__init__.py
CHANGED
@@ -1,15 +0,0 @@
|
|
1
|
-
import importlib.util
|
2
|
-
|
3
|
-
if (
|
4
|
-
importlib.util.find_spec("liquid") is None
|
5
|
-
or importlib.util.find_spec("openai") is None
|
6
|
-
or importlib.util.find_spec("tiktoken") is None
|
7
|
-
):
|
8
|
-
raise ImportError(
|
9
|
-
"not_again_ai.llm requires the 'llm' extra to be installed. "
|
10
|
-
"You can install it using 'pip install not_again_ai[llm]'."
|
11
|
-
)
|
12
|
-
else:
|
13
|
-
import liquid # noqa: F401
|
14
|
-
import openai # noqa: F401
|
15
|
-
import tiktoken # noqa: F401
|
@@ -1,6 +1,7 @@
|
|
1
1
|
from collections.abc import AsyncGenerator, Callable
|
2
2
|
from typing import Any
|
3
3
|
|
4
|
+
from not_again_ai.llm.chat_completion.providers.anthropic_api import anthropic_chat_completion
|
4
5
|
from not_again_ai.llm.chat_completion.providers.ollama_api import ollama_chat_completion, ollama_chat_completion_stream
|
5
6
|
from not_again_ai.llm.chat_completion.providers.openai_api import openai_chat_completion, openai_chat_completion_stream
|
6
7
|
from not_again_ai.llm.chat_completion.types import ChatCompletionChunk, ChatCompletionRequest, ChatCompletionResponse
|
@@ -28,6 +29,8 @@ def chat_completion(
|
|
28
29
|
return openai_chat_completion(request, client)
|
29
30
|
elif provider == "ollama":
|
30
31
|
return ollama_chat_completion(request, client)
|
32
|
+
elif provider == "anthropic":
|
33
|
+
return anthropic_chat_completion(request, client)
|
31
34
|
else:
|
32
35
|
raise ValueError(f"Provider {provider} not supported")
|
33
36
|
|
@@ -41,6 +44,7 @@ async def chat_completion_stream(
|
|
41
44
|
- `openai` - OpenAI
|
42
45
|
- `azure_openai` - Azure OpenAI
|
43
46
|
- `ollama` - Ollama
|
47
|
+
- `anthropic` - Anthropic
|
44
48
|
|
45
49
|
Args:
|
46
50
|
request: Request parameter object
|
@@ -0,0 +1,180 @@
|
|
1
|
+
from collections.abc import Callable
|
2
|
+
import os
|
3
|
+
import time
|
4
|
+
from typing import Any
|
5
|
+
|
6
|
+
from anthropic import Anthropic
|
7
|
+
from anthropic.types import Message
|
8
|
+
|
9
|
+
from not_again_ai.llm.chat_completion.types import (
|
10
|
+
AssistantMessage,
|
11
|
+
ChatCompletionChoice,
|
12
|
+
ChatCompletionRequest,
|
13
|
+
ChatCompletionResponse,
|
14
|
+
Function,
|
15
|
+
ToolCall,
|
16
|
+
)
|
17
|
+
|
18
|
+
ANTHROPIC_PARAMETER_MAP = {
|
19
|
+
"max_completion_tokens": "max_tokens",
|
20
|
+
}
|
21
|
+
|
22
|
+
|
23
|
+
def anthropic_chat_completion(request: ChatCompletionRequest, client: Callable[..., Any]) -> ChatCompletionResponse:
|
24
|
+
"""Anthropic chat completion function.
|
25
|
+
|
26
|
+
TODO
|
27
|
+
- Image messages
|
28
|
+
- Thinking
|
29
|
+
- Citations
|
30
|
+
- Stop sequences
|
31
|
+
- Documents
|
32
|
+
"""
|
33
|
+
kwargs = request.model_dump(mode="json", exclude_none=True)
|
34
|
+
|
35
|
+
# For each key in ANTHROPIC_PARAMETER_MAP
|
36
|
+
# If it is not None, set the key in kwargs to the value of the corresponding value in ANTHROPIC_PARAMETER_MAP
|
37
|
+
# If it is None, remove that key from kwargs
|
38
|
+
for key, value in ANTHROPIC_PARAMETER_MAP.items():
|
39
|
+
if value is not None and key in kwargs:
|
40
|
+
kwargs[value] = kwargs.pop(key)
|
41
|
+
elif value is None and key in kwargs:
|
42
|
+
del kwargs[key]
|
43
|
+
|
44
|
+
# Handle messages
|
45
|
+
# Any system messages need to be removed from messages and concatenated into a single string (in order)
|
46
|
+
# Any tool messages need to be converted to a special user message
|
47
|
+
# Any assistant messages with tool calls need to be converted.
|
48
|
+
system = ""
|
49
|
+
new_messages = []
|
50
|
+
for message in kwargs["messages"]:
|
51
|
+
if message["role"] == "system":
|
52
|
+
system += message["content"] + "\n"
|
53
|
+
elif message["role"] == "tool":
|
54
|
+
new_messages.append(
|
55
|
+
{
|
56
|
+
"role": "user",
|
57
|
+
"content": [
|
58
|
+
{
|
59
|
+
"type": "tool_result",
|
60
|
+
"tool_use_id": message["name"],
|
61
|
+
"content": message["content"],
|
62
|
+
}
|
63
|
+
],
|
64
|
+
}
|
65
|
+
)
|
66
|
+
elif message["role"] == "assistant":
|
67
|
+
content = []
|
68
|
+
if message.get("content", None):
|
69
|
+
content.append(
|
70
|
+
{
|
71
|
+
"type": "text",
|
72
|
+
"content": message["content"],
|
73
|
+
}
|
74
|
+
)
|
75
|
+
for tool_call in message.get("tool_calls", []):
|
76
|
+
content.append(
|
77
|
+
{
|
78
|
+
"type": "tool_use",
|
79
|
+
"id": tool_call["id"],
|
80
|
+
"name": tool_call["function"]["name"],
|
81
|
+
"input": tool_call["function"]["arguments"],
|
82
|
+
}
|
83
|
+
)
|
84
|
+
new_messages.append(
|
85
|
+
{
|
86
|
+
"role": "assistant",
|
87
|
+
"content": content,
|
88
|
+
}
|
89
|
+
)
|
90
|
+
else:
|
91
|
+
new_messages.append(message)
|
92
|
+
kwargs["messages"] = new_messages
|
93
|
+
system = system.strip()
|
94
|
+
if system:
|
95
|
+
kwargs["system"] = system
|
96
|
+
|
97
|
+
# Handle tool choice and parallel tool calls
|
98
|
+
if kwargs.get("tool_choice") is not None:
|
99
|
+
tool_choice_value = kwargs.pop("tool_choice")
|
100
|
+
tool_choice = {}
|
101
|
+
if tool_choice_value == "none":
|
102
|
+
tool_choice["type"] = "none"
|
103
|
+
elif tool_choice_value in ["auto", "any"]:
|
104
|
+
tool_choice["type"] = "auto"
|
105
|
+
if kwargs.get("parallel_tool_calls") is not None:
|
106
|
+
tool_choice["disable_parallel_tool_use"] = str(not kwargs["parallel_tool_calls"])
|
107
|
+
else:
|
108
|
+
tool_choice["name"] = tool_choice_value
|
109
|
+
tool_choice["type"] = "tool"
|
110
|
+
if kwargs.get("parallel_tool_calls") is not None:
|
111
|
+
tool_choice["disable_parallel_tool_use"] = str(not kwargs["parallel_tool_calls"])
|
112
|
+
kwargs["tool_choice"] = tool_choice
|
113
|
+
kwargs.pop("parallel_tool_calls", None)
|
114
|
+
|
115
|
+
start_time = time.time()
|
116
|
+
response: Message = client(**kwargs)
|
117
|
+
end_time = time.time()
|
118
|
+
response_duration = round(end_time - start_time, 4)
|
119
|
+
|
120
|
+
tool_calls: list[ToolCall] = []
|
121
|
+
assistant_message = ""
|
122
|
+
for block in response.content:
|
123
|
+
if block.type == "text":
|
124
|
+
assistant_message += block.text
|
125
|
+
elif block.type == "tool_use":
|
126
|
+
tool_calls.append(
|
127
|
+
ToolCall(
|
128
|
+
id=block.id,
|
129
|
+
function=Function(
|
130
|
+
name=block.name,
|
131
|
+
arguments=block.input, # type: ignore
|
132
|
+
),
|
133
|
+
)
|
134
|
+
)
|
135
|
+
|
136
|
+
choice = ChatCompletionChoice(
|
137
|
+
message=AssistantMessage(
|
138
|
+
content=assistant_message,
|
139
|
+
tool_calls=tool_calls,
|
140
|
+
),
|
141
|
+
finish_reason=response.stop_reason or "stop",
|
142
|
+
)
|
143
|
+
|
144
|
+
chat_completion_response = ChatCompletionResponse(
|
145
|
+
choices=[choice],
|
146
|
+
errors="",
|
147
|
+
completion_tokens=response.usage.output_tokens,
|
148
|
+
prompt_tokens=response.usage.input_tokens,
|
149
|
+
cache_read_input_tokens=response.usage.cache_read_input_tokens,
|
150
|
+
cache_creation_input_tokens=response.usage.cache_creation_input_tokens,
|
151
|
+
response_duration=response_duration,
|
152
|
+
)
|
153
|
+
return chat_completion_response
|
154
|
+
|
155
|
+
|
156
|
+
def create_client_callable(client_class: type[Anthropic], **client_args: Any) -> Callable[..., Any]:
|
157
|
+
"""Creates a callable that instantiates and uses an Anthropic client.
|
158
|
+
|
159
|
+
Args:
|
160
|
+
client_class: The Anthropic client class to instantiate
|
161
|
+
**client_args: Arguments to pass to the client constructor
|
162
|
+
|
163
|
+
Returns:
|
164
|
+
A callable that creates a client and returns completion results
|
165
|
+
"""
|
166
|
+
filtered_args = {k: v for k, v in client_args.items() if v is not None}
|
167
|
+
|
168
|
+
def client_callable(**kwargs: Any) -> Any:
|
169
|
+
client = client_class(**filtered_args)
|
170
|
+
completion = client.beta.messages.create(**kwargs)
|
171
|
+
return completion
|
172
|
+
|
173
|
+
return client_callable
|
174
|
+
|
175
|
+
|
176
|
+
def anthropic_client(api_key: str | None = None) -> Callable[..., Any]:
|
177
|
+
if not api_key:
|
178
|
+
api_key = os.environ.get("ANTHROPIC_API_KEY")
|
179
|
+
client_callable = create_client_callable(Anthropic, api_key=api_key)
|
180
|
+
return client_callable
|
@@ -138,7 +138,9 @@ class ChatCompletionRequest(BaseModel):
|
|
138
138
|
|
139
139
|
class ChatCompletionChoice(BaseModel):
|
140
140
|
message: AssistantMessage
|
141
|
-
finish_reason: Literal[
|
141
|
+
finish_reason: Literal[
|
142
|
+
"stop", "length", "tool_calls", "content_filter", "end_turn", "max_tokens", "stop_sequence", "tool_use"
|
143
|
+
]
|
142
144
|
|
143
145
|
json_message: dict[str, Any] | None = Field(default=None)
|
144
146
|
logprobs: list[dict[str, Any] | list[dict[str, Any]]] | None = Field(default=None)
|
@@ -155,6 +157,8 @@ class ChatCompletionResponse(BaseModel):
|
|
155
157
|
prompt_tokens: int
|
156
158
|
completion_detailed_tokens: dict[str, int] | None = Field(default=None)
|
157
159
|
prompt_detailed_tokens: dict[str, int] | None = Field(default=None)
|
160
|
+
cache_read_input_tokens: int | None = Field(default=None)
|
161
|
+
cache_creation_input_tokens: int | None = Field(default=None)
|
158
162
|
response_duration: float
|
159
163
|
|
160
164
|
system_fingerprint: str | None = Field(default=None)
|
@@ -5,7 +5,7 @@ import mimetypes
|
|
5
5
|
from pathlib import Path
|
6
6
|
from typing import Any
|
7
7
|
|
8
|
-
from liquid import
|
8
|
+
from liquid import render
|
9
9
|
from openai.lib._pydantic import to_strict_json_schema
|
10
10
|
from pydantic import BaseModel
|
11
11
|
|
@@ -15,7 +15,7 @@ from not_again_ai.llm.chat_completion.types import MessageT
|
|
15
15
|
def _apply_templates(value: Any, variables: dict[str, str]) -> Any:
|
16
16
|
"""Recursively applies Liquid templating to all string fields within the given value."""
|
17
17
|
if isinstance(value, str):
|
18
|
-
return
|
18
|
+
return render(value, **variables)
|
19
19
|
elif isinstance(value, list):
|
20
20
|
return [_apply_templates(item, variables) for item in value]
|
21
21
|
elif isinstance(value, dict):
|
@@ -31,7 +31,7 @@ def _apply_templates(value: Any, variables: dict[str, str]) -> Any:
|
|
31
31
|
|
32
32
|
def compile_messages(messages: Sequence[MessageT], variables: dict[str, str]) -> Sequence[MessageT]:
|
33
33
|
"""Compiles messages using Liquid templating and the provided variables.
|
34
|
-
Calls
|
34
|
+
Calls render(content_part, **variables) on each text content part.
|
35
35
|
|
36
36
|
Args:
|
37
37
|
messages: List of MessageT where content can contain Liquid templates.
|
@@ -1,11 +1,13 @@
|
|
1
|
-
Metadata-Version: 2.
|
1
|
+
Metadata-Version: 2.4
|
2
2
|
Name: not-again-ai
|
3
|
-
Version: 0.
|
3
|
+
Version: 0.18.0
|
4
4
|
Summary: Designed to once and for all collect all the little things that come up over and over again in AI projects and put them in one place.
|
5
|
-
|
6
|
-
|
7
|
-
|
8
|
-
|
5
|
+
Project-URL: Homepage, https://github.com/DaveCoDev/not-again-ai
|
6
|
+
Project-URL: Documentation, https://davecodev.github.io/not-again-ai/
|
7
|
+
Project-URL: Repository, https://github.com/DaveCoDev/not-again-ai
|
8
|
+
Author-email: DaveCoDev <dave.co.dev@gmail.com>
|
9
|
+
License-Expression: MIT
|
10
|
+
License-File: LICENSE
|
9
11
|
Classifier: Development Status :: 3 - Alpha
|
10
12
|
Classifier: Intended Audience :: Developers
|
11
13
|
Classifier: Intended Audience :: Science/Research
|
@@ -16,52 +18,55 @@ Classifier: Programming Language :: Python :: 3
|
|
16
18
|
Classifier: Programming Language :: Python :: 3.11
|
17
19
|
Classifier: Programming Language :: Python :: 3.12
|
18
20
|
Classifier: Typing :: Typed
|
21
|
+
Requires-Python: >=3.11
|
22
|
+
Requires-Dist: loguru>=0.7
|
23
|
+
Requires-Dist: pydantic>=2.10
|
19
24
|
Provides-Extra: data
|
25
|
+
Requires-Dist: playwright<2.0,>=1.51; extra == 'data'
|
26
|
+
Requires-Dist: pytest-playwright<1.0,>=0.7; extra == 'data'
|
20
27
|
Provides-Extra: llm
|
28
|
+
Requires-Dist: anthropic<1.0,>=0.49; extra == 'llm'
|
29
|
+
Requires-Dist: azure-identity<2.0,>=1.21; extra == 'llm'
|
30
|
+
Requires-Dist: ollama<1.0,>=0.4; extra == 'llm'
|
31
|
+
Requires-Dist: openai<2.0,>=1.68; extra == 'llm'
|
32
|
+
Requires-Dist: python-liquid<3.0,>=2.0; extra == 'llm'
|
33
|
+
Requires-Dist: tiktoken<1.0,>=0.9; extra == 'llm'
|
21
34
|
Provides-Extra: statistics
|
35
|
+
Requires-Dist: numpy<3.0,>=2.2; extra == 'statistics'
|
36
|
+
Requires-Dist: scikit-learn<2.0,>=1.6; extra == 'statistics'
|
37
|
+
Requires-Dist: scipy>=1.15; extra == 'statistics'
|
22
38
|
Provides-Extra: viz
|
23
|
-
Requires-Dist:
|
24
|
-
Requires-Dist:
|
25
|
-
Requires-Dist:
|
26
|
-
Requires-Dist: numpy (>=2.2) ; extra == "viz"
|
27
|
-
Requires-Dist: ollama (>=0.4) ; extra == "llm"
|
28
|
-
Requires-Dist: openai (>=1) ; extra == "llm"
|
29
|
-
Requires-Dist: pandas (>=2.2) ; extra == "viz"
|
30
|
-
Requires-Dist: playwright (>=1.50) ; extra == "data"
|
31
|
-
Requires-Dist: pydantic (>=2.10)
|
32
|
-
Requires-Dist: pytest-playwright (>=0.7) ; extra == "data"
|
33
|
-
Requires-Dist: python-liquid (>=1.12) ; extra == "llm"
|
34
|
-
Requires-Dist: scikit-learn (>=1.6) ; extra == "statistics"
|
35
|
-
Requires-Dist: scipy (>=1.15) ; extra == "statistics"
|
36
|
-
Requires-Dist: seaborn (>=0.13) ; extra == "viz"
|
37
|
-
Requires-Dist: tiktoken (>=0.8) ; extra == "llm"
|
38
|
-
Project-URL: Documentation, https://davecodev.github.io/not-again-ai/
|
39
|
-
Project-URL: Homepage, https://github.com/DaveCoDev/not-again-ai
|
40
|
-
Project-URL: Repository, https://github.com/DaveCoDev/not-again-ai
|
39
|
+
Requires-Dist: numpy<3.0,>=2.2; extra == 'viz'
|
40
|
+
Requires-Dist: pandas<3.0,>=2.2; extra == 'viz'
|
41
|
+
Requires-Dist: seaborn<1.0,>=0.13; extra == 'viz'
|
41
42
|
Description-Content-Type: text/markdown
|
42
43
|
|
43
44
|
# not-again-ai
|
44
45
|
|
45
46
|
[![GitHub Actions][github-actions-badge]](https://github.com/johnthagen/python-blueprint/actions)
|
46
|
-
[![
|
47
|
+
[![uv][uv-badge]](https://github.com/astral-sh/uv)
|
47
48
|
[![Nox][nox-badge]](https://github.com/wntrblm/nox)
|
48
49
|
[![Ruff][ruff-badge]](https://github.com/astral-sh/ruff)
|
49
50
|
[![Type checked with mypy][mypy-badge]](https://mypy-lang.org/)
|
50
51
|
|
51
52
|
[github-actions-badge]: https://github.com/johnthagen/python-blueprint/workflows/python/badge.svg
|
52
|
-
[
|
53
|
+
[uv-badge]: https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/astral-sh/uv/main/assets/badge/v0.json
|
53
54
|
[nox-badge]: https://img.shields.io/badge/%F0%9F%A6%8A-Nox-D85E00.svg
|
54
55
|
[black-badge]: https://img.shields.io/badge/code%20style-black-000000.svg
|
55
56
|
[ruff-badge]: https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/astral-sh/ruff/main/assets/badge/v2.json
|
56
57
|
[mypy-badge]: https://www.mypy-lang.org/static/mypy_badge.svg
|
57
58
|
|
58
|
-
**not-again-ai** is a collection of various building blocks that come up over and over again when developing AI products.
|
59
|
+
**not-again-ai** is a collection of various building blocks that come up over and over again when developing AI products.
|
60
|
+
The key goals of this package are to have simple, yet flexible interfaces and to minimize dependencies.
|
61
|
+
It is encouraged to also **a)** use this as a template for your own Python package.
|
62
|
+
**b)** instead of installing the package, copy and paste functions into your own projects.
|
63
|
+
We make this easier by limiting the number of dependencies and use an MIT license.
|
59
64
|
|
60
65
|
**Documentation** available within individual **[notebooks](notebooks)**, docstrings within the source, or auto-generated at [DaveCoDev.github.io/not-again-ai/](https://DaveCoDev.github.io/not-again-ai/).
|
61
66
|
|
62
67
|
# Installation
|
63
68
|
|
64
|
-
Requires: Python 3.11, or 3.12
|
69
|
+
Requires: Python 3.11, or 3.12 which can be installed with [uv](https://docs.astral.sh/uv/getting-started/installation/) by running the command `uv python install 3.12`
|
65
70
|
|
66
71
|
Install the entire package from [PyPI](https://pypi.org/project/not-again-ai/) with:
|
67
72
|
|
@@ -111,52 +116,35 @@ The package is split into subpackages, so you can install only the parts you nee
|
|
111
116
|
|
112
117
|
# Development Information
|
113
118
|
|
114
|
-
|
119
|
+
This package uses [uv](https://docs.astral.sh/uv/) to manage dependencies and
|
120
|
+
isolated [Python virtual environments](https://docs.python.org/3/library/venv.html).
|
115
121
|
|
116
|
-
|
117
|
-
|
118
|
-
|
122
|
+
To proceed,
|
123
|
+
[install uv globally](https://docs.astral.sh/uv/getting-started/installation/)
|
124
|
+
onto your system.
|
119
125
|
|
120
|
-
|
126
|
+
To install a specific version of Python:
|
121
127
|
|
122
|
-
```
|
123
|
-
|
124
|
-
```
|
125
|
-
|
126
|
-
(Optional) configure Poetry to use an in-project virtual environment.
|
127
|
-
```bash
|
128
|
-
$ poetry config virtualenvs.in-project true
|
128
|
+
```shell
|
129
|
+
uv python install 3.12
|
129
130
|
```
|
130
131
|
|
131
132
|
## Dependencies
|
132
133
|
|
133
134
|
Dependencies are defined in [`pyproject.toml`](./pyproject.toml) and specific versions are locked
|
134
|
-
into [`
|
135
|
+
into [`uv.lock`](./uv.lock). This allows for exact reproducible environments across
|
135
136
|
all machines that use the project, both during development and in production.
|
136
137
|
|
137
|
-
To
|
138
|
-
|
139
|
-
```bash
|
140
|
-
$ poetry update
|
141
|
-
```
|
142
|
-
|
143
|
-
To install all dependencies (with all extra dependencies) into an isolated virtual environment:
|
144
|
-
|
145
|
-
```bash
|
146
|
-
$ poetry sync --all-extras
|
147
|
-
```
|
138
|
+
To install all dependencies into an isolated virtual environment:
|
148
139
|
|
149
|
-
|
150
|
-
|
151
|
-
|
152
|
-
```bash
|
153
|
-
$ poetry shell
|
140
|
+
```shell
|
141
|
+
uv sync --all-extras
|
154
142
|
```
|
155
143
|
|
156
|
-
To
|
144
|
+
To upgrade all dependencies to their latest versions:
|
157
145
|
|
158
|
-
```
|
159
|
-
|
146
|
+
```shell
|
147
|
+
uv lock --upgrade
|
160
148
|
```
|
161
149
|
|
162
150
|
## Packaging
|
@@ -164,48 +152,40 @@ To deactivate the environment:
|
|
164
152
|
This project is designed as a Python package, meaning that it can be bundled up and redistributed
|
165
153
|
as a single compressed file.
|
166
154
|
|
167
|
-
Packaging is configured by
|
168
|
-
|
169
|
-
- [`pyproject.toml`](./pyproject.toml)
|
155
|
+
Packaging is configured by the [`pyproject.toml`](./pyproject.toml).
|
170
156
|
|
171
157
|
To package the project as both a
|
172
158
|
[source distribution](https://packaging.python.org/en/latest/flow/#the-source-distribution-sdist) and
|
173
159
|
a [wheel](https://packaging.python.org/en/latest/specifications/binary-distribution-format/):
|
174
160
|
|
175
161
|
```bash
|
176
|
-
$
|
162
|
+
$ uv build
|
177
163
|
```
|
178
164
|
|
179
165
|
This will generate `dist/not-again-ai-<version>.tar.gz` and `dist/not_again_ai-<version>-py3-none-any.whl`.
|
180
166
|
|
181
|
-
Read more about the [advantages of wheels](https://pythonwheels.com/) to understand why generating
|
182
|
-
wheel distributions are important.
|
183
167
|
|
184
168
|
## Publish Distributions to PyPI
|
185
169
|
|
186
170
|
Source and wheel redistributable packages can
|
187
|
-
be [published to PyPI](https://
|
171
|
+
be [published to PyPI](https://docs.astral.sh/uv/guides/package/) or installed
|
188
172
|
directly from the filesystem using `pip`.
|
189
173
|
|
190
|
-
```
|
191
|
-
|
174
|
+
```shell
|
175
|
+
uv publish
|
192
176
|
```
|
193
177
|
|
194
178
|
# Enforcing Code Quality
|
195
179
|
|
196
|
-
Automated code quality checks are performed using
|
197
|
-
|
198
|
-
[`
|
199
|
-
|
200
|
-
guide checking, type checking and documentation generation.
|
201
|
-
|
202
|
-
> Note: `nox` is installed into the virtual environment automatically by the `poetry sync`
|
203
|
-
> command above. Run `poetry shell` to activate the virtual environment.
|
180
|
+
Automated code quality checks are performed using [Nox](https://nox.thea.codes/en/stable/). Nox
|
181
|
+
will automatically create virtual environments and run commands based on
|
182
|
+
[`noxfile.py`](./noxfile.py) for unit testing, PEP 8 style guide checking, type checking and
|
183
|
+
documentation generation.
|
204
184
|
|
205
185
|
To run all default sessions:
|
206
186
|
|
207
|
-
```
|
208
|
-
|
187
|
+
```shell
|
188
|
+
uv run nox
|
209
189
|
```
|
210
190
|
|
211
191
|
## Unit Testing
|
@@ -237,7 +217,7 @@ pytest and code coverage are configured in [`pyproject.toml`](./pyproject.toml).
|
|
237
217
|
To run selected tests:
|
238
218
|
|
239
219
|
```bash
|
240
|
-
(.venv) $ nox -s test -- -k "test_web"
|
220
|
+
(.venv) $ uv run nox -s test -- -k "test_web"
|
241
221
|
```
|
242
222
|
|
243
223
|
## Code Style Checking
|
@@ -251,13 +231,13 @@ code. PEP 8 code compliance is verified using [Ruff][Ruff]. Ruff is configured i
|
|
251
231
|
To lint code, run:
|
252
232
|
|
253
233
|
```bash
|
254
|
-
(.venv) $ nox -s lint
|
234
|
+
(.venv) $ uv run nox -s lint
|
255
235
|
```
|
256
236
|
|
257
237
|
To automatically fix fixable lint errors, run:
|
258
238
|
|
259
239
|
```bash
|
260
|
-
(.venv) $ nox -s lint_fix
|
240
|
+
(.venv) $ uv run nox -s lint_fix
|
261
241
|
```
|
262
242
|
|
263
243
|
## Automated Code Formatting
|
@@ -267,13 +247,13 @@ To automatically fix fixable lint errors, run:
|
|
267
247
|
To automatically format code, run:
|
268
248
|
|
269
249
|
```bash
|
270
|
-
(.venv) $ nox -s fmt
|
250
|
+
(.venv) $ uv run nox -s fmt
|
271
251
|
```
|
272
252
|
|
273
253
|
To verify code has been formatted, such as in a CI job:
|
274
254
|
|
275
255
|
```bash
|
276
|
-
(.venv) $ nox -s fmt_check
|
256
|
+
(.venv) $ uv run nox -s fmt_check
|
277
257
|
```
|
278
258
|
|
279
259
|
## Type Checking
|
@@ -293,11 +273,9 @@ def factorial(n: int) -> int:
|
|
293
273
|
mypy is configured in [`pyproject.toml`](./pyproject.toml). To type check code, run:
|
294
274
|
|
295
275
|
```bash
|
296
|
-
(.venv) $ nox -s type_check
|
276
|
+
(.venv) $ uv run nox -s type_check
|
297
277
|
```
|
298
278
|
|
299
|
-
See also [awesome-python-typing](https://github.com/typeddjango/awesome-python-typing).
|
300
|
-
|
301
279
|
### Distributing Type Annotations
|
302
280
|
|
303
281
|
[PEP 561](https://www.python.org/dev/peps/pep-0561/) defines how a Python package should
|
@@ -313,7 +291,7 @@ installed package to indicate that inline type annotations should be checked.
|
|
313
291
|
Check for typos using [typos](https://github.com/crate-ci/typos)
|
314
292
|
|
315
293
|
```bash
|
316
|
-
(.venv) $ nox -s typos
|
294
|
+
(.venv) $ uv run nox -s typos
|
317
295
|
```
|
318
296
|
|
319
297
|
## Continuous Integration
|
@@ -331,81 +309,5 @@ Install the [Ruff extension](https://marketplace.visualstudio.com/items?itemName
|
|
331
309
|
|
332
310
|
Default settings are configured in [`.vscode/settings.json`](./.vscode/settings.json) which will enable Ruff with consistent settings.
|
333
311
|
|
334
|
-
# Generating Documentation
|
335
|
-
|
336
|
-
## Generating a User Guide
|
337
|
-
|
338
|
-
[Material for MkDocs](https://squidfunk.github.io/mkdocs-material/) is a powerful static site
|
339
|
-
generator that combines easy-to-write Markdown, with a number of Markdown extensions that increase
|
340
|
-
the power of Markdown. This makes it a great fit for user guides and other technical documentation.
|
341
|
-
|
342
|
-
The example MkDocs project included in this project is configured to allow the built documentation
|
343
|
-
to be hosted at any URL or viewed offline from the file system.
|
344
|
-
|
345
|
-
To build the user guide, run,
|
346
|
-
|
347
|
-
```bash
|
348
|
-
(.venv) $ nox -s docs
|
349
|
-
```
|
350
|
-
|
351
|
-
and open `docs/user_guide/site/index.html` using a web browser.
|
352
|
-
|
353
|
-
To build the user guide, additionally validating external URLs, run:
|
354
|
-
|
355
|
-
```bash
|
356
|
-
(.venv) $ nox -s docs_check_urls
|
357
|
-
```
|
358
|
-
|
359
|
-
To build the user guide in a format suitable for viewing directly from the file system, run:
|
360
|
-
|
361
|
-
```bash
|
362
|
-
(.venv) $ nox -s docs_offline
|
363
|
-
```
|
364
|
-
|
365
|
-
To build and serve the user guide with automatic rebuilding as you change the contents,
|
366
|
-
run:
|
367
|
-
|
368
|
-
```bash
|
369
|
-
(.venv) $ nox -s docs_serve
|
370
|
-
```
|
371
|
-
|
372
|
-
and open <http://127.0.0.1:8000> in a browser.
|
373
|
-
|
374
|
-
Each time the `main` Git branch is updated, the
|
375
|
-
[`.github/workflows/pages.yml`](.github/workflows/pages.yml) GitHub Action will
|
376
|
-
automatically build the user guide and publish it to [GitHub Pages](https://pages.github.com/).
|
377
|
-
This is configured in the `docs_github_pages` Nox session.
|
378
|
-
|
379
|
-
## Generating API Documentation
|
380
|
-
|
381
|
-
This project uses [mkdocstrings](https://github.com/mkdocstrings/mkdocstrings) plugin for
|
382
|
-
MkDocs, which renders
|
383
|
-
[Google-style docstrings](https://www.sphinx-doc.org/en/master/usage/extensions/napoleon.html)
|
384
|
-
into an MkDocs project. Google-style docstrings provide a good mix of easy-to-read docstrings in
|
385
|
-
code as well as nicely-rendered output.
|
386
|
-
|
387
|
-
```python
|
388
|
-
"""Computes the factorial through a recursive algorithm.
|
389
|
-
|
390
|
-
Args:
|
391
|
-
n: A positive input value.
|
392
|
-
|
393
|
-
Raises:
|
394
|
-
InvalidFactorialError: If n is less than 0.
|
395
|
-
|
396
|
-
Returns:
|
397
|
-
Computed factorial.
|
398
|
-
"""
|
399
|
-
```
|
400
|
-
|
401
|
-
## Misc
|
402
|
-
|
403
|
-
If you get a `Failed to create the collection: Prompt dismissed..` error when running `poetry update` on Ubuntu, try setting the following environment variable:
|
404
|
-
|
405
|
-
```bash
|
406
|
-
export PYTHON_KEYRING_BACKEND=keyring.backends.null.Keyring
|
407
|
-
```
|
408
|
-
|
409
312
|
# Attributions
|
410
313
|
[python-blueprint](https://github.com/johnthagen/python-blueprint) for the Python package skeleton.
|
411
|
-
|
@@ -1,29 +1,30 @@
|
|
1
1
|
not_again_ai/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
2
|
+
not_again_ai/py.typed,sha256=UaCuPFa3H8UAakbt-5G8SPacldTOGvJv18pPjUJ5gDY,93
|
2
3
|
not_again_ai/base/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
3
4
|
not_again_ai/base/file_system.py,sha256=KNQmacO4Q__CQuq2oPzWrg3rQO48n3evglc9bNiP7KM,949
|
4
5
|
not_again_ai/base/parallel.py,sha256=fcYhKBYBWvob84iKp3O93wvFFdXeidljZsShgBLTNGA,3448
|
5
6
|
not_again_ai/data/__init__.py,sha256=1jF6mwvtB2PT7IEc3xpbRtZm3g3Lyf8zUqH4AEE4qlQ,244
|
6
7
|
not_again_ai/data/web.py,sha256=wjx9cc33jcoJBGonYCIpwygPBFOwz7F-dx_ominmbnI,1838
|
7
|
-
not_again_ai/llm/__init__.py,sha256=
|
8
|
+
not_again_ai/llm/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
8
9
|
not_again_ai/llm/chat_completion/__init__.py,sha256=HozawvdRkTFgq8XR16GJUHN1ukEa4Ya68wVPVrl-afs,250
|
9
|
-
not_again_ai/llm/chat_completion/interface.py,sha256=
|
10
|
+
not_again_ai/llm/chat_completion/interface.py,sha256=xRZXQ75dxrkt5WNtOTtrAa2Oy4ZB-PG2WihW9FBmW-s,2525
|
11
|
+
not_again_ai/llm/chat_completion/types.py,sha256=Z_pQjVK_7rEvAE2fj5srKxHPFJBLPV8e0iCFibnzT7M,5596
|
10
12
|
not_again_ai/llm/chat_completion/providers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
13
|
+
not_again_ai/llm/chat_completion/providers/anthropic_api.py,sha256=_-NPc5pfhsRwwy-GYc1vAiyc0agmGLyo5_7-mcPEnBU,6189
|
11
14
|
not_again_ai/llm/chat_completion/providers/ollama_api.py,sha256=Puo2eE2VynvZOoqrUlNYtPgRGCRMVa8syc3TfBxS1hs,10661
|
12
15
|
not_again_ai/llm/chat_completion/providers/openai_api.py,sha256=1wdeV50KYX_KIf2uofsICKYoHVSvj4kTRpS1Vuw3NSQ,17887
|
13
|
-
not_again_ai/llm/chat_completion/types.py,sha256=yjSrcR9N5hrrMQAjzNvRIfQXQ-lVRgZfrIoKuhMbmjo,5399
|
14
16
|
not_again_ai/llm/embedding/__init__.py,sha256=wscUfROukvw0M0vYccfaVTdXV0P-eICAT5mqM0LaHHc,182
|
15
17
|
not_again_ai/llm/embedding/interface.py,sha256=Hj3UiktXEeCUeMwpIDtRkwBfKgaJSnJvclLNyjwUAtE,1144
|
18
|
+
not_again_ai/llm/embedding/types.py,sha256=J4FFLx35Aow2kOaafDReeY9cUNqhWMjaAk5gXkX7SVk,506
|
16
19
|
not_again_ai/llm/embedding/providers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
17
20
|
not_again_ai/llm/embedding/providers/ollama_api.py,sha256=m-OCis9WAUT2baGsGVPzejlive40eSNyO6tHmPh6joM,3201
|
18
21
|
not_again_ai/llm/embedding/providers/openai_api.py,sha256=JFFqbq0O5snIEnr9VESdp5xehikQBPbs7nwyE6acFsY,5441
|
19
|
-
not_again_ai/llm/embedding/types.py,sha256=J4FFLx35Aow2kOaafDReeY9cUNqhWMjaAk5gXkX7SVk,506
|
20
22
|
not_again_ai/llm/prompting/__init__.py,sha256=7YnHro1yH01FLGnao27WyqQDFjNYf9npE5UxoR9YrUU,84
|
21
|
-
not_again_ai/llm/prompting/compile_prompt.py,sha256=
|
23
|
+
not_again_ai/llm/prompting/compile_prompt.py,sha256=uBn655yTiQ325z1CUgnkU2k7ICIvaYRJOm50B7w2lSs,4683
|
22
24
|
not_again_ai/llm/prompting/interface.py,sha256=SMKYabmu3zTWbEDukU6aLU_JQ88apeBWWOF_qZ0s3ww,1783
|
25
|
+
not_again_ai/llm/prompting/types.py,sha256=xz70dnawL9rji7Zr1_mOekY-uUlvKJJf7k9nXJsOXc4,1219
|
23
26
|
not_again_ai/llm/prompting/providers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
24
27
|
not_again_ai/llm/prompting/providers/openai_tiktoken.py,sha256=8YrEiK3ZHyKVGiVsJ_Rd6eVdISIvcub7ooj-HB7Prsc,4536
|
25
|
-
not_again_ai/llm/prompting/types.py,sha256=xz70dnawL9rji7Zr1_mOekY-uUlvKJJf7k9nXJsOXc4,1219
|
26
|
-
not_again_ai/py.typed,sha256=UaCuPFa3H8UAakbt-5G8SPacldTOGvJv18pPjUJ5gDY,93
|
27
28
|
not_again_ai/statistics/__init__.py,sha256=gA8r9JQFbFSN0ykrHy4G1IQgcky4f2eM5Oo24oVI5Ik,466
|
28
29
|
not_again_ai/statistics/dependence.py,sha256=4xaniMkLlTjdXcNVXdwepEAiZ-WaaGYfR9haJC1lU2Q,4434
|
29
30
|
not_again_ai/viz/__init__.py,sha256=MeaWae_QRbDEHJ4MWYoY1-Ad6S0FhSDaRhQncS2cpSc,447
|
@@ -32,7 +33,7 @@ not_again_ai/viz/distributions.py,sha256=OyWwJaNI6lMRm_iSrhq-CORLNvXfeuLSgDtVo3u
|
|
32
33
|
not_again_ai/viz/scatterplot.py,sha256=5CUOWeknbBOaZPeX9oPin5sBkRKEwk8qeFH45R-9LlY,2292
|
33
34
|
not_again_ai/viz/time_series.py,sha256=pOGZqXp_2nd6nKo-PUQNCtmMh__69jxQ6bQibTGLwZA,5212
|
34
35
|
not_again_ai/viz/utils.py,sha256=hN7gwxtBt3U6jQni2K8j5m5pCXpaJDoNzGhBBikEU28,238
|
35
|
-
not_again_ai-0.
|
36
|
-
not_again_ai-0.
|
37
|
-
not_again_ai-0.
|
38
|
-
not_again_ai-0.
|
36
|
+
not_again_ai-0.18.0.dist-info/METADATA,sha256=n41TWZaLvs_XPNbSEQojju9DI4nPhkjE055xX7ZJGjQ,12021
|
37
|
+
not_again_ai-0.18.0.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
|
38
|
+
not_again_ai-0.18.0.dist-info/licenses/LICENSE,sha256=btjOgNGpp-ux5xOo1Gx1MddxeWtT9sof3s3Nui29QfA,1071
|
39
|
+
not_again_ai-0.18.0.dist-info/RECORD,,
|
File without changes
|