together 0.2.8__tar.gz → 0.2.10__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (28) hide show
  1. {together-0.2.8 → together-0.2.10}/PKG-INFO +1 -11
  2. {together-0.2.8 → together-0.2.10}/README.md +0 -10
  3. {together-0.2.8 → together-0.2.10}/pyproject.toml +1 -1
  4. {together-0.2.8 → together-0.2.10}/src/together/__init__.py +23 -0
  5. {together-0.2.8 → together-0.2.10}/src/together/commands/complete.py +9 -0
  6. {together-0.2.8 → together-0.2.10}/src/together/commands/embeddings.py +1 -2
  7. {together-0.2.8 → together-0.2.10}/src/together/complete.py +4 -0
  8. together-0.2.10/src/together/embeddings.py +64 -0
  9. {together-0.2.8 → together-0.2.10}/src/together/utils.py +14 -18
  10. together-0.2.8/src/together/embeddings.py +0 -35
  11. {together-0.2.8 → together-0.2.10}/LICENSE +0 -0
  12. {together-0.2.8 → together-0.2.10}/src/together/cli/__init__.py +0 -0
  13. {together-0.2.8 → together-0.2.10}/src/together/cli/cli.py +0 -0
  14. {together-0.2.8 → together-0.2.10}/src/together/commands/__init__.py +0 -0
  15. {together-0.2.8 → together-0.2.10}/src/together/commands/chat.py +0 -0
  16. {together-0.2.8 → together-0.2.10}/src/together/commands/files.py +0 -0
  17. {together-0.2.8 → together-0.2.10}/src/together/commands/finetune.py +0 -0
  18. {together-0.2.8 → together-0.2.10}/src/together/commands/image.py +0 -0
  19. {together-0.2.8 → together-0.2.10}/src/together/commands/models.py +0 -0
  20. {together-0.2.8 → together-0.2.10}/src/together/error.py +0 -0
  21. {together-0.2.8 → together-0.2.10}/src/together/files.py +0 -0
  22. {together-0.2.8 → together-0.2.10}/src/together/finetune.py +0 -0
  23. {together-0.2.8 → together-0.2.10}/src/together/image.py +0 -0
  24. {together-0.2.8 → together-0.2.10}/src/together/models.py +0 -0
  25. {together-0.2.8 → together-0.2.10}/src/together/tools/__init__.py +0 -0
  26. {together-0.2.8 → together-0.2.10}/src/together/tools/conversation.py +0 -0
  27. {together-0.2.8 → together-0.2.10}/src/together/types.py +0 -0
  28. {together-0.2.8 → together-0.2.10}/src/together/version.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: together
3
- Version: 0.2.8
3
+ Version: 0.2.10
4
4
  Summary: Python client for Together's Cloud Platform!
5
5
  Home-page: https://github.com/togethercomputer/together
6
6
  License: Apache-2.0
@@ -481,16 +481,6 @@ print(output_text)
481
481
  Space Robots are a great way to get your kids interested in science. After all, they are the future!
482
482
  ```
483
483
 
484
- ## Embeddings API
485
-
486
- Embeddings are vector representations of sequences. You can use these vectors for measuring the overall similarity between texts. Embeddings are useful for tasks such as search and retrieval.
487
-
488
- ```python
489
- resp = together.Embeddings.create("embed this sentence into a single vector", model="togethercomputer/bert-base-uncased")
490
-
491
- print(resp['data'][0]['embedding']) # [0.06659205, 0.07896972, 0.007910785 ........]
492
- ```
493
-
494
484
  ## Colab Tutorial
495
485
 
496
486
  Follow along in our Colab (Google Colaboratory) Notebook Tutorial [Example Finetuning Project](https://colab.research.google.com/drive/11DwtftycpDSgp3Z1vnV-Cy68zvkGZL4K?usp=sharing).
@@ -453,16 +453,6 @@ print(output_text)
453
453
  Space Robots are a great way to get your kids interested in science. After all, they are the future!
454
454
  ```
455
455
 
456
- ## Embeddings API
457
-
458
- Embeddings are vector representations of sequences. You can use these vectors for measuring the overall similarity between texts. Embeddings are useful for tasks such as search and retrieval.
459
-
460
- ```python
461
- resp = together.Embeddings.create("embed this sentence into a single vector", model="togethercomputer/bert-base-uncased")
462
-
463
- print(resp['data'][0]['embedding']) # [0.06659205, 0.07896972, 0.007910785 ........]
464
- ```
465
-
466
456
  ## Colab Tutorial
467
457
 
468
458
  Follow along in our Colab (Google Colaboratory) Notebook Tutorial [Example Finetuning Project](https://colab.research.google.com/drive/11DwtftycpDSgp3Z1vnV-Cy68zvkGZL4K?usp=sharing).
@@ -4,7 +4,7 @@ build-backend = "poetry.masonry.api"
4
4
 
5
5
  [tool.poetry]
6
6
  name = "together"
7
- version = "0.2.8"
7
+ version = "0.2.10"
8
8
  authors = [
9
9
  "Together AI <support@together.ai>"
10
10
  ]
@@ -1,6 +1,7 @@
1
1
  import os
2
2
  import sys
3
3
  import urllib.parse
4
+ from typing import Type
4
5
 
5
6
  from .version import VERSION
6
7
 
@@ -41,6 +42,27 @@ from .image import Image
41
42
  from .models import Models
42
43
 
43
44
 
45
+ class Together:
46
+ complete: Type[Complete]
47
+ completion: Type[Completion]
48
+ embeddings: Type[Embeddings]
49
+ files: Type[Files]
50
+ finetune: Type[Finetune]
51
+ image: Type[Image]
52
+ models: Type[Models]
53
+
54
+ def __init__(
55
+ self,
56
+ ) -> None:
57
+ self.complete = Complete
58
+ self.completion = Completion
59
+ self.embeddings = Embeddings
60
+ self.files = Files
61
+ self.finetune = Finetune
62
+ self.image = Image
63
+ self.models = Models
64
+
65
+
44
66
  __all__ = [
45
67
  "api_key",
46
68
  "api_base",
@@ -63,4 +85,5 @@ __all__ = [
63
85
  "MISSING_API_KEY_MESSAGE",
64
86
  "BACKOFF_FACTOR",
65
87
  "min_samples",
88
+ "Together",
66
89
  ]
@@ -90,6 +90,13 @@ def add_parser(subparsers: argparse._SubParsersAction[argparse.ArgumentParser])
90
90
  action="store_true",
91
91
  help="temperature for the LM",
92
92
  )
93
+ subparser.add_argument(
94
+ "--safety-model",
95
+ "-sm",
96
+ default=None,
97
+ type=str,
98
+ help="The name of the safety model to use for moderation.",
99
+ )
93
100
  subparser.set_defaults(func=_run_complete)
94
101
 
95
102
 
@@ -142,6 +149,7 @@ def _run_complete(args: argparse.Namespace) -> None:
142
149
  top_k=args.top_k,
143
150
  repetition_penalty=args.repetition_penalty,
144
151
  logprobs=args.logprobs,
152
+ safety_model=args.safety_model,
145
153
  )
146
154
  except together.AuthenticationError:
147
155
  logger.critical(together.MISSING_API_KEY_MESSAGE)
@@ -159,6 +167,7 @@ def _run_complete(args: argparse.Namespace) -> None:
159
167
  top_p=args.top_p,
160
168
  top_k=args.top_k,
161
169
  repetition_penalty=args.repetition_penalty,
170
+ safety_model=args.safety_model,
162
171
  raw=args.raw,
163
172
  ):
164
173
  if not args.raw:
@@ -1,7 +1,6 @@
1
1
  from __future__ import annotations
2
2
 
3
3
  import argparse
4
- import json
5
4
 
6
5
  import together
7
6
  from together import Embeddings
@@ -42,7 +41,7 @@ def _run_complete(args: argparse.Namespace) -> None:
42
41
  model=args.model,
43
42
  )
44
43
 
45
- print(json.dumps(response, indent=4))
44
+ print([e.embedding for e in response.data])
46
45
  except together.AuthenticationError:
47
46
  logger.critical(together.MISSING_API_KEY_MESSAGE)
48
47
  exit(0)
@@ -24,6 +24,7 @@ class Complete:
24
24
  logprobs: Optional[int] = None,
25
25
  api_key: Optional[str] = None,
26
26
  cast: bool = False,
27
+ safety_model: Optional[str] = None,
27
28
  ) -> Union[Dict[str, Any], TogetherResponse]:
28
29
  if model == "":
29
30
  model = together.default_text_model
@@ -38,6 +39,7 @@ class Complete:
38
39
  "stop": stop,
39
40
  "repetition_penalty": repetition_penalty,
40
41
  "logprobs": logprobs,
42
+ "safety_model": safety_model,
41
43
  }
42
44
 
43
45
  # send request
@@ -70,6 +72,7 @@ class Complete:
70
72
  raw: Optional[bool] = False,
71
73
  api_key: Optional[str] = None,
72
74
  cast: Optional[bool] = False,
75
+ safety_model: Optional[str] = None,
73
76
  ) -> Union[Iterator[str], Iterator[TogetherResponse]]:
74
77
  """
75
78
  Prints streaming responses and returns the completed text.
@@ -88,6 +91,7 @@ class Complete:
88
91
  "stop": stop,
89
92
  "repetition_penalty": repetition_penalty,
90
93
  "stream_tokens": True,
94
+ "safety_model": safety_model,
91
95
  }
92
96
 
93
97
  # send request
@@ -0,0 +1,64 @@
1
+ import concurrent.futures
2
+ from typing import Any, Dict, List, Optional, Union
3
+
4
+ import together
5
+ from together.utils import create_post_request, get_logger
6
+
7
+
8
+ logger = get_logger(str(__name__))
9
+
10
+
11
+ class DataItem:
12
+ def __init__(self, embedding: List[float]):
13
+ self.embedding = embedding
14
+
15
+
16
+ class EmbeddingsOutput:
17
+ def __init__(self, data: List[DataItem]):
18
+ self.data = data
19
+
20
+
21
+ class Embeddings:
22
+ @classmethod
23
+ def create(
24
+ cls,
25
+ input: Union[str, List[str]],
26
+ model: Optional[str] = "",
27
+ ) -> EmbeddingsOutput:
28
+ if model == "":
29
+ model = together.default_embedding_model
30
+
31
+ if isinstance(input, str):
32
+ parameter_payload = {
33
+ "input": input,
34
+ "model": model,
35
+ }
36
+
37
+ response = cls._process_input(parameter_payload)
38
+
39
+ return EmbeddingsOutput([DataItem(response["data"][0]["embedding"])])
40
+
41
+ elif isinstance(input, list):
42
+ # If input is a list, process each string concurrently
43
+ with concurrent.futures.ThreadPoolExecutor() as executor:
44
+ parameter_payloads = [{"input": item, "model": model} for item in input]
45
+ results = list(executor.map(cls._process_input, parameter_payloads))
46
+
47
+ return EmbeddingsOutput(
48
+ [DataItem(item["data"][0]["embedding"]) for item in results]
49
+ )
50
+
51
+ @classmethod
52
+ def _process_input(cls, parameter_payload: Dict[str, Any]) -> Dict[str, Any]:
53
+ # send request
54
+ response = create_post_request(
55
+ url=together.api_base_embeddings, json=parameter_payload
56
+ )
57
+
58
+ # return the json as a DotDict
59
+ try:
60
+ response_json = dict(response.json())
61
+ except Exception as e:
62
+ raise together.JSONError(e, http_status=response.status_code)
63
+
64
+ return response_json
@@ -75,6 +75,18 @@ def parse_timestamp(timestamp: str) -> datetime:
75
75
  raise ValueError("Timestamp does not match any expected format")
76
76
 
77
77
 
78
+ def response_status_exception(response: requests.Response) -> None:
79
+ if response.status_code == 429:
80
+ raise together.RateLimitError(
81
+ message="Too many requests received. Please pace your requests."
82
+ )
83
+ elif response.status_code == 500:
84
+ raise Exception("server encountered an unexpected condition")
85
+ elif response.status_code == 401:
86
+ raise Exception("invalid authentication credentials")
87
+ response.raise_for_status()
88
+
89
+
78
90
  def create_post_request(
79
91
  url: str,
80
92
  headers: Optional[Dict[Any, Any]] = None,
@@ -99,15 +111,7 @@ def create_post_request(
99
111
  except requests.exceptions.RequestException as e:
100
112
  raise together.ResponseError(e)
101
113
 
102
- if response.status_code == 429:
103
- raise together.RateLimitError(
104
- message="Too many requests received. Please pace your requests."
105
- )
106
- elif response.status_code == 500:
107
- raise Exception("Invalid API key supplied.")
108
- elif response.status_code == 401:
109
- raise Exception("API Key not supplied")
110
- response.raise_for_status()
114
+ response_status_exception(response)
111
115
 
112
116
  return response
113
117
 
@@ -139,15 +143,7 @@ def create_get_request(
139
143
  except requests.exceptions.RequestException as e:
140
144
  raise together.ResponseError(e)
141
145
 
142
- if response.status_code == 429:
143
- raise together.RateLimitError(
144
- message="Too many requests received. Please pace your requests."
145
- )
146
- elif response.status_code == 500:
147
- raise Exception("Invalid API key supplied.")
148
- elif response.status_code == 401:
149
- raise Exception("API Key not supplied")
150
- response.raise_for_status()
146
+ response_status_exception(response)
151
147
 
152
148
  return response
153
149
 
@@ -1,35 +0,0 @@
1
- from typing import Any, Dict, Optional
2
-
3
- import together
4
- from together.utils import create_post_request, get_logger
5
-
6
-
7
- logger = get_logger(str(__name__))
8
-
9
-
10
- class Embeddings:
11
- @classmethod
12
- def create(
13
- self,
14
- input: str,
15
- model: Optional[str] = "",
16
- ) -> Dict[str, Any]:
17
- if model == "":
18
- model = together.default_embedding_model
19
-
20
- parameter_payload = {
21
- "input": input,
22
- "model": model,
23
- }
24
-
25
- # send request
26
- response = create_post_request(
27
- url=together.api_base_embeddings, json=parameter_payload
28
- )
29
-
30
- try:
31
- response_json = dict(response.json())
32
-
33
- except Exception as e:
34
- raise together.JSONError(e, http_status=response.status_code)
35
- return response_json
File without changes