not-again-ai 0.12.0__py3-none-any.whl → 0.12.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- not_again_ai/local_llm/chat_completion.py +4 -4
- {not_again_ai-0.12.0.dist-info → not_again_ai-0.12.1.dist-info}/METADATA +7 -3
- {not_again_ai-0.12.0.dist-info → not_again_ai-0.12.1.dist-info}/RECORD +6 -6
- {not_again_ai-0.12.0.dist-info → not_again_ai-0.12.1.dist-info}/LICENSE +0 -0
- {not_again_ai-0.12.0.dist-info → not_again_ai-0.12.1.dist-info}/WHEEL +0 -0
- {not_again_ai-0.12.0.dist-info → not_again_ai-0.12.1.dist-info}/entry_points.txt +0 -0
@@ -2,7 +2,7 @@ from typing import Any
|
|
2
2
|
|
3
3
|
from azure.ai.inference import ChatCompletionsClient
|
4
4
|
from ollama import Client
|
5
|
-
from openai import OpenAI
|
5
|
+
from openai import AzureOpenAI, OpenAI
|
6
6
|
|
7
7
|
from not_again_ai.llm.gh_models import chat_completion as chat_completion_gh_models
|
8
8
|
from not_again_ai.llm.openai_api import chat_completion as chat_completion_openai
|
@@ -12,7 +12,7 @@ from not_again_ai.local_llm.ollama import chat_completion as chat_completion_oll
|
|
12
12
|
def chat_completion(
|
13
13
|
messages: list[dict[str, Any]],
|
14
14
|
model: str,
|
15
|
-
client: OpenAI | Client | ChatCompletionsClient,
|
15
|
+
client: OpenAI | AzureOpenAI | Client | ChatCompletionsClient,
|
16
16
|
tools: list[dict[str, Any]] | None = None,
|
17
17
|
max_tokens: int | None = None,
|
18
18
|
temperature: float = 0.7,
|
@@ -27,7 +27,7 @@ def chat_completion(
|
|
27
27
|
Args:
|
28
28
|
messages (list[dict[str, Any]]): A list of messages to send to the model.
|
29
29
|
model (str): The model name to use.
|
30
|
-
client (OpenAI | Client): The client object to use for chat completion.
|
30
|
+
client (OpenAI | AzureOpenAI | Client | ChatCompletionsClient): The client object to use for chat completion.
|
31
31
|
tools (list[dict[str, Any]], optional):A list of tools the model may call.
|
32
32
|
Use this to provide a list of functions the model may generate JSON inputs for. Defaults to None.
|
33
33
|
max_tokens (int, optional): The maximum number of tokens to generate.
|
@@ -48,7 +48,7 @@ def chat_completion(
|
|
48
48
|
extras (dict): This will contain any additional fields returned by corresponding provider.
|
49
49
|
"""
|
50
50
|
# Determine which chat_completion function to call based on the client type
|
51
|
-
if isinstance(client, OpenAI):
|
51
|
+
if isinstance(client, OpenAI | AzureOpenAI):
|
52
52
|
response = chat_completion_openai.chat_completion(
|
53
53
|
messages=messages,
|
54
54
|
model=model,
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: not-again-ai
|
3
|
-
Version: 0.12.
|
3
|
+
Version: 0.12.1
|
4
4
|
Summary: Designed to once and for all collect all the little things that come up over and over again in AI projects and put them in one place.
|
5
5
|
Home-page: https://github.com/DaveCoDev/not-again-ai
|
6
6
|
License: MIT
|
@@ -27,7 +27,7 @@ Requires-Dist: jinja2 (>=3.1,<4.0) ; extra == "local-llm"
|
|
27
27
|
Requires-Dist: loguru (==0.7.2)
|
28
28
|
Requires-Dist: numpy (>=1.26,<2.0) ; extra == "statistics" or extra == "viz"
|
29
29
|
Requires-Dist: ollama (>=0.3,<0.4) ; extra == "local-llm"
|
30
|
-
Requires-Dist: openai (>=1.
|
30
|
+
Requires-Dist: openai (>=1.41,<2.0) ; extra == "llm"
|
31
31
|
Requires-Dist: pandas (>=2.2,<3.0) ; extra == "viz"
|
32
32
|
Requires-Dist: pydantic (>=2.8,<3.0) ; extra == "llm"
|
33
33
|
Requires-Dist: python-liquid (>=1.12,<2.0) ; extra == "llm"
|
@@ -35,7 +35,7 @@ Requires-Dist: scikit-learn (>=1.5,<2.0) ; extra == "statistics"
|
|
35
35
|
Requires-Dist: scipy (>=1.14,<2.0) ; extra == "statistics"
|
36
36
|
Requires-Dist: seaborn (>=0.13,<0.14) ; extra == "viz"
|
37
37
|
Requires-Dist: tiktoken (>=0.7,<0.8) ; extra == "llm"
|
38
|
-
Requires-Dist: transformers (>=4.
|
38
|
+
Requires-Dist: transformers (>=4.44,<5.0) ; extra == "local-llm"
|
39
39
|
Project-URL: Documentation, https://github.com/DaveCoDev/not-again-ai
|
40
40
|
Project-URL: Repository, https://github.com/DaveCoDev/not-again-ai
|
41
41
|
Description-Content-Type: text/markdown
|
@@ -77,6 +77,10 @@ The package is split into subpackages, so you can install only the parts you nee
|
|
77
77
|
1. OpenAI API
|
78
78
|
1. Go to https://platform.openai.com/settings/profile?tab=api-keys to get your API key.
|
79
79
|
1. (Optional) Set the `OPENAI_API_KEY` and the `OPENAI_ORG_ID` environment variables.
|
80
|
+
1. Azure OpenAI (AOAI)
|
81
|
+
1. Using AOAI requires using Entra ID authentication. See https://learn.microsoft.com/en-us/azure/ai-services/openai/how-to/managed-identity for how to set this up for your AOAI deployment.
|
82
|
+
1. Requires the correct role assigned to your user account and being signed into the Azure CLI.
|
83
|
+
1. (Optional) Set the `AZURE_OPENAI_ENDPOINT` environment variable.
|
80
84
|
1. GitHub Models
|
81
85
|
1. Get a Personal Access Token from https://github.com/settings/tokens and set the `GITHUB_TOKEN` environment variable. The token does not need any permissions.
|
82
86
|
1. Check the [Github Marketplace](https://github.com/marketplace/models) to see which models are available.
|
@@ -14,7 +14,7 @@ not_again_ai/llm/openai_api/openai_client.py,sha256=AK9SDBkpP94u5Q73-Q5i5HRPQh_D
|
|
14
14
|
not_again_ai/llm/openai_api/prompts.py,sha256=B62xs3WKaTv7SfT_TVC-PqO9oeWWpO0xS4_oxW9MYMQ,7093
|
15
15
|
not_again_ai/llm/openai_api/tokens.py,sha256=RYBzl5vqE_MzWM60QbWC_6X9YOQoOgBOeR-68rM34II,4421
|
16
16
|
not_again_ai/local_llm/__init__.py,sha256=BsUn39U3QQaw6yomQHfp_HIPHRIBoMAgjcP3CDADx04,882
|
17
|
-
not_again_ai/local_llm/chat_completion.py,sha256=
|
17
|
+
not_again_ai/local_llm/chat_completion.py,sha256=PmICXrGZJXIuqY00ULBGi2bKnPG8ticqTXZHSTzZK9o,4828
|
18
18
|
not_again_ai/local_llm/huggingface/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
19
19
|
not_again_ai/local_llm/huggingface/chat_completion.py,sha256=Y6uMbxLG8TaMVi3hJGrMl_G9Y1N_0dld5Kv1iqYnoao,2300
|
20
20
|
not_again_ai/local_llm/huggingface/helpers.py,sha256=YPr8KbQ8Ac_Mn_nBcrFuL3bCl-IuDCdaRvYVCocy8Gk,734
|
@@ -35,8 +35,8 @@ not_again_ai/viz/distributions.py,sha256=OyWwJaNI6lMRm_iSrhq-CORLNvXfeuLSgDtVo3u
|
|
35
35
|
not_again_ai/viz/scatterplot.py,sha256=5CUOWeknbBOaZPeX9oPin5sBkRKEwk8qeFH45R-9LlY,2292
|
36
36
|
not_again_ai/viz/time_series.py,sha256=pOGZqXp_2nd6nKo-PUQNCtmMh__69jxQ6bQibTGLwZA,5212
|
37
37
|
not_again_ai/viz/utils.py,sha256=hN7gwxtBt3U6jQni2K8j5m5pCXpaJDoNzGhBBikEU28,238
|
38
|
-
not_again_ai-0.12.
|
39
|
-
not_again_ai-0.12.
|
40
|
-
not_again_ai-0.12.
|
41
|
-
not_again_ai-0.12.
|
42
|
-
not_again_ai-0.12.
|
38
|
+
not_again_ai-0.12.1.dist-info/LICENSE,sha256=btjOgNGpp-ux5xOo1Gx1MddxeWtT9sof3s3Nui29QfA,1071
|
39
|
+
not_again_ai-0.12.1.dist-info/METADATA,sha256=VydzFufICQyP6paN15KJTudJi6rSpwWn5H_W1v46p6Y,16389
|
40
|
+
not_again_ai-0.12.1.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
|
41
|
+
not_again_ai-0.12.1.dist-info/entry_points.txt,sha256=EMJegugnmJUd-jMUA_qIRMIPAasbei8gP6O4-ER0BxQ,61
|
42
|
+
not_again_ai-0.12.1.dist-info/RECORD,,
|
File without changes
|
File without changes
|
File without changes
|