together 1.2.10__tar.gz → 1.2.11__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (57) hide show
  1. {together-1.2.10 → together-1.2.11}/PKG-INFO +1 -1
  2. {together-1.2.10 → together-1.2.11}/pyproject.toml +2 -2
  3. {together-1.2.10 → together-1.2.11}/src/together/legacy/complete.py +4 -4
  4. {together-1.2.10 → together-1.2.11}/src/together/legacy/embeddings.py +1 -1
  5. {together-1.2.10 → together-1.2.11}/src/together/legacy/images.py +1 -1
  6. {together-1.2.10 → together-1.2.11}/src/together/resources/chat/completions.py +4 -0
  7. {together-1.2.10 → together-1.2.11}/src/together/resources/completions.py +5 -1
  8. {together-1.2.10 → together-1.2.11}/src/together/resources/embeddings.py +5 -1
  9. {together-1.2.10 → together-1.2.11}/src/together/resources/images.py +6 -0
  10. {together-1.2.10 → together-1.2.11}/src/together/resources/rerank.py +4 -0
  11. {together-1.2.10 → together-1.2.11}/LICENSE +0 -0
  12. {together-1.2.10 → together-1.2.11}/README.md +0 -0
  13. {together-1.2.10 → together-1.2.11}/src/together/__init__.py +0 -0
  14. {together-1.2.10 → together-1.2.11}/src/together/abstract/__init__.py +0 -0
  15. {together-1.2.10 → together-1.2.11}/src/together/abstract/api_requestor.py +0 -0
  16. {together-1.2.10 → together-1.2.11}/src/together/cli/__init__.py +0 -0
  17. {together-1.2.10 → together-1.2.11}/src/together/cli/api/__init__.py +0 -0
  18. {together-1.2.10 → together-1.2.11}/src/together/cli/api/chat.py +0 -0
  19. {together-1.2.10 → together-1.2.11}/src/together/cli/api/completions.py +0 -0
  20. {together-1.2.10 → together-1.2.11}/src/together/cli/api/files.py +0 -0
  21. {together-1.2.10 → together-1.2.11}/src/together/cli/api/finetune.py +0 -0
  22. {together-1.2.10 → together-1.2.11}/src/together/cli/api/images.py +0 -0
  23. {together-1.2.10 → together-1.2.11}/src/together/cli/api/models.py +0 -0
  24. {together-1.2.10 → together-1.2.11}/src/together/cli/cli.py +0 -0
  25. {together-1.2.10 → together-1.2.11}/src/together/client.py +0 -0
  26. {together-1.2.10 → together-1.2.11}/src/together/constants.py +0 -0
  27. {together-1.2.10 → together-1.2.11}/src/together/error.py +0 -0
  28. {together-1.2.10 → together-1.2.11}/src/together/filemanager.py +0 -0
  29. {together-1.2.10 → together-1.2.11}/src/together/legacy/__init__.py +0 -0
  30. {together-1.2.10 → together-1.2.11}/src/together/legacy/base.py +0 -0
  31. {together-1.2.10 → together-1.2.11}/src/together/legacy/files.py +0 -0
  32. {together-1.2.10 → together-1.2.11}/src/together/legacy/finetune.py +0 -0
  33. {together-1.2.10 → together-1.2.11}/src/together/legacy/models.py +0 -0
  34. {together-1.2.10 → together-1.2.11}/src/together/resources/__init__.py +0 -0
  35. {together-1.2.10 → together-1.2.11}/src/together/resources/chat/__init__.py +0 -0
  36. {together-1.2.10 → together-1.2.11}/src/together/resources/files.py +0 -0
  37. {together-1.2.10 → together-1.2.11}/src/together/resources/finetune.py +0 -0
  38. {together-1.2.10 → together-1.2.11}/src/together/resources/models.py +0 -0
  39. {together-1.2.10 → together-1.2.11}/src/together/together_response.py +0 -0
  40. {together-1.2.10 → together-1.2.11}/src/together/types/__init__.py +0 -0
  41. {together-1.2.10 → together-1.2.11}/src/together/types/abstract.py +0 -0
  42. {together-1.2.10 → together-1.2.11}/src/together/types/chat_completions.py +0 -0
  43. {together-1.2.10 → together-1.2.11}/src/together/types/common.py +0 -0
  44. {together-1.2.10 → together-1.2.11}/src/together/types/completions.py +0 -0
  45. {together-1.2.10 → together-1.2.11}/src/together/types/embeddings.py +0 -0
  46. {together-1.2.10 → together-1.2.11}/src/together/types/error.py +0 -0
  47. {together-1.2.10 → together-1.2.11}/src/together/types/files.py +0 -0
  48. {together-1.2.10 → together-1.2.11}/src/together/types/finetune.py +0 -0
  49. {together-1.2.10 → together-1.2.11}/src/together/types/images.py +0 -0
  50. {together-1.2.10 → together-1.2.11}/src/together/types/models.py +0 -0
  51. {together-1.2.10 → together-1.2.11}/src/together/types/rerank.py +0 -0
  52. {together-1.2.10 → together-1.2.11}/src/together/utils/__init__.py +0 -0
  53. {together-1.2.10 → together-1.2.11}/src/together/utils/_log.py +0 -0
  54. {together-1.2.10 → together-1.2.11}/src/together/utils/api_helpers.py +0 -0
  55. {together-1.2.10 → together-1.2.11}/src/together/utils/files.py +0 -0
  56. {together-1.2.10 → together-1.2.11}/src/together/utils/tools.py +0 -0
  57. {together-1.2.10 → together-1.2.11}/src/together/version.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: together
3
- Version: 1.2.10
3
+ Version: 1.2.11
4
4
  Summary: Python client for Together's Cloud Platform!
5
5
  Home-page: https://github.com/togethercomputer/together-python
6
6
  License: Apache-2.0
@@ -12,7 +12,7 @@ build-backend = "poetry.masonry.api"
12
12
 
13
13
  [tool.poetry]
14
14
  name = "together"
15
- version = "1.2.10"
15
+ version = "1.2.11"
16
16
  authors = [
17
17
  "Together AI <support@together.ai>"
18
18
  ]
@@ -55,7 +55,7 @@ types-tqdm = "^4.65.0.0"
55
55
  types-tabulate = "^0.9.0.3"
56
56
  pre-commit = "3.5.0"
57
57
  types-requests = "^2.31.0.20240218"
58
- pyarrow-stubs = "^10.0.1.7"
58
+ pyarrow-stubs = ">=10.0.1.7,<20240831.0.0.0"
59
59
  mypy = "^1.9.0"
60
60
 
61
61
  [tool.poetry.group.tests]
@@ -14,7 +14,7 @@ class Complete:
14
14
  def create(
15
15
  cls,
16
16
  prompt: str,
17
- **kwargs,
17
+ **kwargs: Any,
18
18
  ) -> Dict[str, Any]:
19
19
  """Legacy completion function."""
20
20
 
@@ -36,7 +36,7 @@ class Complete:
36
36
  def create_streaming(
37
37
  cls,
38
38
  prompt: str,
39
- **kwargs,
39
+ **kwargs: Any,
40
40
  ) -> Iterator[Dict[str, Any]]:
41
41
  """Legacy streaming completion function."""
42
42
 
@@ -59,7 +59,7 @@ class Completion:
59
59
  def create(
60
60
  cls,
61
61
  prompt: str,
62
- **kwargs,
62
+ **kwargs: Any,
63
63
  ) -> CompletionResponse | Iterator[CompletionChunk]:
64
64
  """Completion function."""
65
65
 
@@ -79,7 +79,7 @@ class AsyncComplete:
79
79
  async def create(
80
80
  cls,
81
81
  prompt: str,
82
- **kwargs,
82
+ **kwargs: Any,
83
83
  ) -> CompletionResponse | AsyncGenerator[CompletionChunk, None]:
84
84
  """Async completion function."""
85
85
 
@@ -11,7 +11,7 @@ class Embeddings:
11
11
  def create(
12
12
  cls,
13
13
  input: str,
14
- **kwargs,
14
+ **kwargs: Any,
15
15
  ) -> Dict[str, Any]:
16
16
  """Legacy embeddings function."""
17
17
 
@@ -11,7 +11,7 @@ class Image:
11
11
  def create(
12
12
  cls,
13
13
  prompt: str,
14
- **kwargs,
14
+ **kwargs: Any,
15
15
  ) -> Dict[str, Any]:
16
16
  """Legacy image function."""
17
17
 
@@ -40,6 +40,7 @@ class ChatCompletions:
40
40
  response_format: Dict[str, str | Dict[str, Any]] | None = None,
41
41
  tools: Dict[str, str | Dict[str, Any]] | None = None,
42
42
  tool_choice: str | Dict[str, str | Dict[str, str]] | None = None,
43
+ **kwargs: Any,
43
44
  ) -> ChatCompletionResponse | Iterator[ChatCompletionChunk]:
44
45
  """
45
46
  Method to generate completions based on a given prompt using a specified model.
@@ -131,6 +132,7 @@ class ChatCompletions:
131
132
  response_format=response_format,
132
133
  tools=tools,
133
134
  tool_choice=tool_choice,
135
+ **kwargs,
134
136
  ).model_dump(exclude_none=True)
135
137
 
136
138
  response, _, _ = requestor.request(
@@ -177,6 +179,7 @@ class AsyncChatCompletions:
177
179
  response_format: Dict[str, Any] | None = None,
178
180
  tools: Dict[str, str | Dict[str, str | Dict[str, Any]]] | None = None,
179
181
  tool_choice: str | Dict[str, str | Dict[str, str]] | None = None,
182
+ **kwargs: Any,
180
183
  ) -> AsyncGenerator[ChatCompletionChunk, None] | ChatCompletionResponse:
181
184
  """
182
185
  Async method to generate completions based on a given prompt using a specified model.
@@ -268,6 +271,7 @@ class AsyncChatCompletions:
268
271
  response_format=response_format,
269
272
  tools=tools,
270
273
  tool_choice=tool_choice,
274
+ **kwargs,
271
275
  ).model_dump(exclude_none=True)
272
276
 
273
277
  response, _, _ = await requestor.arequest(
@@ -1,6 +1,6 @@
1
1
  from __future__ import annotations
2
2
 
3
- from typing import AsyncGenerator, Dict, Iterator, List
3
+ from typing import AsyncGenerator, Dict, Iterator, List, Any
4
4
 
5
5
  from together.abstract import api_requestor
6
6
  from together.together_response import TogetherResponse
@@ -37,6 +37,7 @@ class Completions:
37
37
  echo: bool | None = None,
38
38
  n: int | None = None,
39
39
  safety_model: str | None = None,
40
+ **kwargs: Any,
40
41
  ) -> CompletionResponse | Iterator[CompletionChunk]:
41
42
  """
42
43
  Method to generate completions based on a given prompt using a specified model.
@@ -113,6 +114,7 @@ class Completions:
113
114
  echo=echo,
114
115
  n=n,
115
116
  safety_model=safety_model,
117
+ **kwargs,
116
118
  ).model_dump(exclude_none=True)
117
119
 
118
120
  response, _, _ = requestor.request(
@@ -156,6 +158,7 @@ class AsyncCompletions:
156
158
  echo: bool | None = None,
157
159
  n: int | None = None,
158
160
  safety_model: str | None = None,
161
+ **kwargs: Any,
159
162
  ) -> AsyncGenerator[CompletionChunk, None] | CompletionResponse:
160
163
  """
161
164
  Async method to generate completions based on a given prompt using a specified model.
@@ -232,6 +235,7 @@ class AsyncCompletions:
232
235
  echo=echo,
233
236
  n=n,
234
237
  safety_model=safety_model,
238
+ **kwargs,
235
239
  ).model_dump(exclude_none=True)
236
240
 
237
241
  response, _, _ = await requestor.arequest(
@@ -1,6 +1,6 @@
1
1
  from __future__ import annotations
2
2
 
3
- from typing import List
3
+ from typing import List, Any
4
4
 
5
5
  from together.abstract import api_requestor
6
6
  from together.together_response import TogetherResponse
@@ -21,6 +21,7 @@ class Embeddings:
21
21
  *,
22
22
  input: str | List[str],
23
23
  model: str,
24
+ **kwargs: Any,
24
25
  ) -> EmbeddingResponse:
25
26
  """
26
27
  Method to generate completions based on a given prompt using a specified model.
@@ -40,6 +41,7 @@ class Embeddings:
40
41
  parameter_payload = EmbeddingRequest(
41
42
  input=input,
42
43
  model=model,
44
+ **kwargs,
43
45
  ).model_dump(exclude_none=True)
44
46
 
45
47
  response, _, _ = requestor.request(
@@ -65,6 +67,7 @@ class AsyncEmbeddings:
65
67
  *,
66
68
  input: str | List[str],
67
69
  model: str,
70
+ **kwargs: Any,
68
71
  ) -> EmbeddingResponse:
69
72
  """
70
73
  Async method to generate completions based on a given prompt using a specified model.
@@ -84,6 +87,7 @@ class AsyncEmbeddings:
84
87
  parameter_payload = EmbeddingRequest(
85
88
  input=input,
86
89
  model=model,
90
+ **kwargs,
87
91
  ).model_dump(exclude_none=True)
88
92
 
89
93
  response, _, _ = await requestor.arequest(
@@ -1,5 +1,7 @@
1
1
  from __future__ import annotations
2
2
 
3
+ from typing import Any
4
+
3
5
  from together.abstract import api_requestor
4
6
  from together.together_response import TogetherResponse
5
7
  from together.types import (
@@ -25,6 +27,7 @@ class Images:
25
27
  height: int | None = 1024,
26
28
  width: int | None = 1024,
27
29
  negative_prompt: str | None = None,
30
+ **kwargs: Any,
28
31
  ) -> ImageResponse:
29
32
  """
30
33
  Method to generate images based on a given prompt using a specified model.
@@ -67,6 +70,7 @@ class Images:
67
70
  height=height,
68
71
  width=width,
69
72
  negative_prompt=negative_prompt,
73
+ **kwargs,
70
74
  ).model_dump(exclude_none=True)
71
75
 
72
76
  response, _, _ = requestor.request(
@@ -98,6 +102,7 @@ class AsyncImages:
98
102
  height: int | None = 1024,
99
103
  width: int | None = 1024,
100
104
  negative_prompt: str | None = None,
105
+ **kwargs: Any,
101
106
  ) -> ImageResponse:
102
107
  """
103
108
  Async method to generate images based on a given prompt using a specified model.
@@ -140,6 +145,7 @@ class AsyncImages:
140
145
  height=height,
141
146
  width=width,
142
147
  negative_prompt=negative_prompt,
148
+ **kwargs,
143
149
  ).model_dump(exclude_none=True)
144
150
 
145
151
  response, _, _ = await requestor.arequest(
@@ -25,6 +25,7 @@ class Rerank:
25
25
  top_n: int | None = None,
26
26
  return_documents: bool = False,
27
27
  rank_fields: List[str] | None = None,
28
+ **kwargs: Any,
28
29
  ) -> RerankResponse:
29
30
  """
30
31
  Method to generate completions based on a given prompt using a specified model.
@@ -52,6 +53,7 @@ class Rerank:
52
53
  top_n=top_n,
53
54
  return_documents=return_documents,
54
55
  rank_fields=rank_fields,
56
+ **kwargs,
55
57
  ).model_dump(exclude_none=True)
56
58
 
57
59
  response, _, _ = requestor.request(
@@ -81,6 +83,7 @@ class AsyncRerank:
81
83
  top_n: int | None = None,
82
84
  return_documents: bool = False,
83
85
  rank_fields: List[str] | None = None,
86
+ **kwargs: Any,
84
87
  ) -> RerankResponse:
85
88
  """
86
89
  Async method to generate completions based on a given prompt using a specified model.
@@ -108,6 +111,7 @@ class AsyncRerank:
108
111
  top_n=top_n,
109
112
  return_documents=return_documents,
110
113
  rank_fields=rank_fields,
114
+ **kwargs,
111
115
  ).model_dump(exclude_none=True)
112
116
 
113
117
  response, _, _ = await requestor.arequest(
File without changes
File without changes